First checkin of new depot_tools, just for win, for prototype testing.


git-svn-id: svn://svn.chromium.org/chrome/trunk@1241 0039d316-1c4b-4281-b951-d872f2087c98
diff --git a/depot_tools/release/win/bin/README b/depot_tools/release/win/bin/README
new file mode 100644
index 0000000..23685bb
--- /dev/null
+++ b/depot_tools/release/win/bin/README
@@ -0,0 +1,4 @@
+This directory contains the latest stable release of the depot tools.
+
+Please use https://svn/chrome/branches/depot_tools instead so that you are
+always using the most up-to-date version of the depot tools!
diff --git a/depot_tools/release/win/bin/chrome-update-create-task.bat b/depot_tools/release/win/bin/chrome-update-create-task.bat
new file mode 100644
index 0000000..b5e0c1c
--- /dev/null
+++ b/depot_tools/release/win/bin/chrome-update-create-task.bat
@@ -0,0 +1,49 @@
+@echo off
+setlocal
+
+:: This script will create a scheduled task to run chrome-update every day
+:: at the time you specify.  This script expects to be live in
+:: depot_tools\latest.
+::
+:: Usage: this-script <time to run task> <path to chrome trunk>
+
+set Out=%USERPROFILE%\chrome-update-task.bat
+set TaskTime=%1
+set Trunk=%~f2
+
+if not exist "%Trunk%" (
+  echo Usage: %~n0 ^<time^> ^<c:\path\to\chrome\trunk^>
+  echo ^<time^> is the time in HH:MM:SS format at which to run the task.
+  echo Example: %~n0 02:00:00 c:\src\chrome\trunk
+  exit 1
+)
+
+if not exist "%Out%" goto CreateScript
+
+echo WARNING: %Out% already exists.
+set Choice=
+set /P Choice=Overwrite file [Y/N]?
+if not "%Choice%"=="y" goto CreateTask
+
+:CreateScript
+
+echo.
+echo Creating %Out%
+
+echo>"%Out%" @echo off
+echo>>"%Out%" call "%~dp0\..\bootstrap\update.bat" "%~dp0\..\latest"
+echo>>"%Out%" "%~dp0\chrome-update.bat" "%Trunk%" ^> "%Trunk%\chrome-update-results.txt" 
+
+:CreateTask
+
+echo.
+echo ***********************************************************************
+echo Creating a Scheduled Task to run chrome-update each day at %TaskTime%.
+echo The batch file being run will live at %Out%.
+echo.
+echo WARNING: The password you enter will be displayed in cleartext.
+echo If you're paranoid, you can enter blank here and then fix the password
+echo by editing the scheduled task manually from the Control Panel.
+echo ***********************************************************************
+echo.
+schtasks /create /tn chrome-update /tr "\"%Out%\"" /sc daily /st %TaskTime%
diff --git a/depot_tools/release/win/bin/chrome-update.bat b/depot_tools/release/win/bin/chrome-update.bat
new file mode 100644
index 0000000..451d0088
--- /dev/null
+++ b/depot_tools/release/win/bin/chrome-update.bat
@@ -0,0 +1,10 @@
+@echo off
+
+:: This batch file assumes that the correct version of python can be found in
+:: the current directory, and that you have Visual Studio 8 installed in the
+:: default location.
+
+setlocal
+set PATH=%~dp0\..;%WINDIR%\system32;%PROGRAMFILES%\Microsoft Visual Studio 8\Common7\IDE
+
+"%~dp0\python_24\python.exe" "%~dp0\chrome-update.py" %*
diff --git a/depot_tools/release/win/bin/chrome-update.py b/depot_tools/release/win/bin/chrome-update.py
new file mode 100644
index 0000000..564da86
--- /dev/null
+++ b/depot_tools/release/win/bin/chrome-update.py
@@ -0,0 +1,159 @@
+#!/usr/bin/python
+# Author: mpcomplete
+#
+# This script updates and does a clean build of chrome for you.
+# Usage: python chrome-update.py C:\path\to\chrome\trunk
+#
+# It assumes the following:
+# - You have gclient.bat and devenv.com in your path (use the wrapper batch
+#   file to ensure this).
+
+import sys
+import os
+import subprocess
+import httplib
+import re
+import shutil
+import optparse
+
+def Message(str):
+  """Prints a status message."""
+  print "[chrome-update]", str
+
+def FixupPath(path):
+  """Returns the OS-ified version of a windows path."""
+  return os.path.sep.join(path.split("\\"))
+
+def GetRevision():
+  """Returns the revision number of the last build that was archived, or
+  None on failure."""
+  HOST = "chrome-dev.mtv.corp.google.com:8010"
+  PATH = "/waterfall"
+  EXPR = r"http://www.corp.google.com/eng-users/chrome-eng/snapshots/[^/]*/(\d+)"
+
+  connection = httplib.HTTPConnection(HOST)
+  connection.request("GET", PATH)
+  response = connection.getresponse()
+  text = response.read()
+  match = re.search(EXPR, text)
+  if match:
+    return int(match.group(1))
+  return None
+
+def SetRevisionForUpdate(chrome_root):
+  """Prepares environment so gclient syncs to a good revision, if possible."""
+  # Find a buildable revision.
+  rev = GetRevision()
+  if rev == None:
+    Message("WARNING: Failed to find a buildable revision.  Syncing to trunk.")
+    return "trunk"
+
+  # Read the .gclient file.
+  gclient_file = chrome_root + FixupPath("\\.gclient")
+  if not os.path.exists(gclient_file):
+    Message("WARNING: Failed to find .gclient file.  Syncing to trunk.")
+    return "trunk"
+  scope = {}
+  execfile(gclient_file, scope)
+  solutions = scope["solutions"]
+
+  # Edit the url of the chrome 'src' solution, unless the user wants a
+  # specific revision.
+  for solution in solutions:
+    if solution["name"] == "src":
+      splitter = solution["url"].split("@")
+      if len(splitter) == 1:
+	solution["url"] = splitter[0] + "@" + str(rev)
+      else:
+	rev = int(splitter[1])
+      break
+
+  # Write out the new .gclient file.
+  gclient_override = gclient_file + "-update-chrome"
+  f = open(gclient_override, "w")
+  f.write("solutions = " + str(solutions))
+  f.close()
+
+  # Set the env var that the gclient tool looks for.
+  os.environ["GCLIENT_FILE"] = gclient_override
+  return rev
+
+def DoUpdate(chrome_root):
+  """gclient sync to the latest build."""
+  # gclient sync
+  rev = SetRevisionForUpdate(chrome_root)
+
+  cmd = ["gclient.bat", "sync"]
+  Message("Updating to %s: %s" % (rev, cmd))
+  sys.stdout.flush()
+  return subprocess.call(cmd, cwd=chrome_root)
+
+def DoClean(chrome_root, type):
+  """Clean our build dir."""
+  # rm -rf src/chrome/Debug
+  rv = [0]
+  def onError(func, path, excinfo):
+    Message("Couldn't remove '%s': %s" % (path, excinfo))
+    rv[0] = [1]
+
+  build_path = chrome_root + FixupPath("\\src\\chrome\\" + type)
+  Message("Cleaning: %s" % build_path)
+  shutil.rmtree(build_path, False, onError)
+  return rv[0]
+
+def DoBuild(chrome_root, chrome_sln, clean, type):
+  """devenv /build what we just checked out."""
+  if clean:
+    rv = DoClean(chrome_root, type)
+    if rv != 0:
+      Message("WARNING: Clean failed.  Doing a build without clean.")
+
+  # devenv chrome.sln /build Debug
+  cmd = ["devenv.com", chrome_sln, "/build", type]
+
+  Message("Building: %s" % cmd)
+  sys.stdout.flush()
+  return subprocess.call(cmd, cwd=chrome_root)
+
+def Main():
+  parser = optparse.OptionParser()
+  parser.add_option("", "--clean", action="store_true", default=False,
+		    help="wipe Debug output directory before building")
+  parser.add_option("", "--solution", default="src\\chrome\\chrome.sln",
+		    help="path to the .sln file to build (absolute, or "
+		    "relative to chrome trunk")
+  parser.add_option("", "--release", action="store_true", default=False,
+		    help="build the release configuration in addition of the debug configuration.")
+  parser.add_option("", "--nosync", action="store_true", default=False,
+		    help="doesn't sync before building.")
+  options, args = parser.parse_args()
+
+  if not args:
+    Message("Usage: %s <path\\to\\chrome\\root> [options]" % sys.argv[0])
+    sys.exit(1)
+
+  chrome_root = args[0]
+
+  if options.nosync!=True:
+    rv = DoUpdate(chrome_root)
+    if rv != 0:
+      Message("Update Failed.  Bailing.")
+      sys.exit(rv)
+
+  chrome_sln = FixupPath(options.solution)
+  rv = DoBuild(chrome_root, chrome_sln, options.clean, "Debug")
+  if rv != 0:
+    Message("Debug build failed.  Sad face :(")
+
+  if options.release:
+    rv = DoBuild(chrome_root, chrome_sln, options.clean, "Release")
+    if rv != 0:
+      Message("Release build failed.  Sad face :(")
+
+  if rv != 0:
+    sys.exit(rv)
+
+  Message("Success!")
+
+if __name__ == "__main__":
+  Main()
diff --git a/depot_tools/release/win/bin/gcl.py b/depot_tools/release/win/bin/gcl.py
new file mode 100644
index 0000000..c3ba0f67
--- /dev/null
+++ b/depot_tools/release/win/bin/gcl.py
@@ -0,0 +1,618 @@
+#!/usr/bin/python
+# Wrapper script around Rietveld's upload.py that groups files into
+# changelists.
+
+import getpass
+import linecache
+import os
+import random
+import re
+import string
+import subprocess
+import sys
+import tempfile
+import upload
+import urllib2
+
+SERVER = 'chrome-reviews.prom.corp.google.com'  # Rietveld server to use
+SVN_REV = 'http://chrome-svn/viewvc/chrome?view=rev&revision='
+CC_LIST = 'chrome-reviews@google.com'  # CC'd on all emails.
+
+
+# Use a shell for subcommands on Windows to get a PATH search, and because svn
+# may be a batch file.
+use_shell = sys.platform.startswith("win")
+
+
+# globals that store the root of the current repositary and the directory where
+# we store information about changelists.
+repository_root = ""
+gcl_info_dir = ""
+
+
+def GetSVNFileInfo(file, field):
+  """Returns a field from the svn info output for the given file."""
+  output = RunShell(["svn", "info", file])
+  for line in output.splitlines():
+    search = field + ": "
+    if line.startswith(search):
+      return line[len(search):]
+  return ""
+
+
+def GetRepositoryRoot():
+  """Returns the top level directory of the current repository."""
+  global repository_root
+  if not repository_root:
+    cur_dir_repo_root = GetSVNFileInfo(os.getcwd(), "Repository Root")
+    if not cur_dir_repo_root:
+      ErrorExit("gcl run outside of repository")
+
+    repository_root = os.getcwd()
+    while True:
+      parent = os.path.dirname(repository_root)
+      if GetSVNFileInfo(parent, "Repository Root") != cur_dir_repo_root:
+        break
+      repository_root = parent
+  return repository_root
+
+
+def GetInfoDir():
+  """Returns the directory where gcl info files are stored."""
+  global gcl_info_dir
+  if not gcl_info_dir:
+    gcl_info_dir = os.path.join(GetRepositoryRoot(), '.svn', 'gcl_info')
+  return gcl_info_dir
+
+
+def ErrorExit(msg):
+  """Print an error message to stderr and exit."""
+  print >>sys.stderr, msg
+  sys.exit(1)
+
+
+def RunShell(command, print_output=False):
+  """Executes a command and returns the output."""
+  p = subprocess.Popen(command, stdout = subprocess.PIPE,
+                       stderr = subprocess.STDOUT, shell = use_shell)
+  if print_output:
+    output_array = []
+    while True:
+      line = p.stdout.readline()
+      if not line:
+        break
+      if print_output:
+        print line.strip('\n')
+      output_array.append(line)
+    output = "".join(output_array)
+  else:
+    output = p.stdout.read()
+  p.wait()
+  p.stdout.close()
+  return output
+
+
+def ReadFile(filename):
+  """Returns the contents of a file."""
+  file = open(filename, 'r')
+  result = file.read()
+  file.close()
+  return result
+
+
+def WriteFile(filename, contents):
+  """Overwrites the file with the given contents."""
+  file = open(filename, 'w')
+  file.write(contents)
+  file.close()
+
+
+class ChangeInfo:
+  """Holds information about a changelist.
+  
+    issue: the Rietveld issue number, of "" if it hasn't been uploaded yet.
+    description: the description.
+    files: a list of 2 tuple containing (status, filename) of changed files,
+           with paths being relative to the top repository directory.
+  """
+  def __init__(self, name="", issue="", description="", files=[]):
+    self.name = name
+    self.issue = issue
+    self.description = description
+    self.files = files
+
+  def FileList(self):
+    """Returns a list of files."""
+    return [file[1] for file in self.files]
+
+  def Save(self):
+    """Writes the changelist information to disk."""
+    data = SEPARATOR.join([self.issue,
+                          "\n".join([f[0] + f[1] for f in self.files]),
+                          self.description])
+    WriteFile(GetChangelistInfoFile(self.name), data)
+
+  def Delete(self):
+    """Removes the changelist information from disk."""
+    os.remove(GetChangelistInfoFile(self.name))
+
+  def CloseIssue(self):
+    """Closes the Rietveld issue for this changelist."""
+    data = [("description", self.description),]
+    ctype, body = upload.EncodeMultipartFormData(data, [])
+    SendToRietveld("/" + self.issue + "/close", body, ctype)
+
+  def UpdateRietveldDescription(self):
+    """Sets the description for an issue on Rietveld."""
+    data = [("description", self.description),]
+    ctype, body = upload.EncodeMultipartFormData(data, [])
+    SendToRietveld("/" + self.issue + "/description", body, ctype)
+
+  
+SEPARATOR = "\n-----\n"
+# The info files have the following format:
+# issue_id\n
+# SEPARATOR\n
+# filepath1\n
+# filepath2\n
+# .
+# .
+# filepathn\n
+# SEPARATOR\n
+# description
+
+
+def GetChangelistInfoFile(changename):
+  """Returns the file that stores information about a changelist."""
+  return os.path.join(GetInfoDir(), changename)
+
+
+def LoadChangelistInfo(changename, fail_on_not_found=True,
+                       update_status=False):
+  """Gets information about a changelist.
+  
+  Args:
+    fail_on_not_found: if True, this function will quit the program if the
+      changelist doesn't exist.
+    update_status: if True, the svn status will be updated for all the files
+      and unchanged files will be removed.
+  
+  Returns: a ChangeInfo object.
+  """
+  info_file = GetChangelistInfoFile(changename)
+  if not os.path.exists(info_file):
+    if fail_on_not_found:
+      ErrorExit("Changelist " + changename + " not found.")
+    return ChangeInfo(changename)
+  data = ReadFile(info_file)
+  split_data = data.split(SEPARATOR, 2)
+  if len(split_data) != 3:
+    os.remove(info_file)
+    ErrorExit("Changelist file %s was corrupt and deleted" % info_file)
+  issue = split_data[0]
+  files = []
+  for line in split_data[1].splitlines():
+    status = line[:7]
+    file = line[7:]
+    files.append((status, file))
+  description = split_data[2]  
+  save = False
+  if update_status:
+    for file in files:
+      filename = os.path.join(GetRepositoryRoot(), file[1])
+      status = RunShell(["svn", "status", filename])[:7]
+      if not status:  # File has been reverted.
+        save = True
+        files.remove(file)
+      elif status != file[0]:
+        save = True
+        files[files.index(file)] = (status, file[1])
+  change_info = ChangeInfo(changename, issue, description, files)
+  if save:
+    change_info.Save()
+  return change_info
+
+
+def GetCLs():
+  """Returns a list of all the changelists in this repository."""
+  return os.listdir(GetInfoDir())
+
+
+def GenerateChangeName():
+  """Generate a random changelist name."""
+  random.seed()
+  current_cl_names = GetCLs()
+  while True:
+    cl_name = (random.choice(string.ascii_lowercase) +
+               random.choice(string.digits) +
+               random.choice(string.ascii_lowercase) +
+               random.choice(string.digits))
+    if cl_name not in current_cl_names:
+      return cl_name
+
+
+def GetModifiedFiles():
+  """Returns a set that maps from changelist name to (status,filename) tuples.
+
+  Files not in a changelist have an empty changelist name.  Filenames are in
+  relation to the top level directory of the current repositary.  Note that
+  only the current directory and subdirectories are scanned, in order to
+  improve performance while still being flexible.
+  """
+  files = {}
+  
+  # Since the files are normalized to the root folder of the repositary, figure
+  # out what we need to add to the paths.
+  dir_prefix = os.getcwd()[len(GetRepositoryRoot()):].strip(os.sep)
+
+  # Get a list of all files in changelists.
+  files_in_cl = {}
+  for cl in GetCLs():
+    change_info = LoadChangelistInfo(cl)
+    for status, filename in change_info.files:
+      files_in_cl[filename] = change_info.name
+
+  # Get all the modified files.
+  status = RunShell(["svn", "status"])
+  for line in status.splitlines():
+    if not len(line) or line[0] == "?":
+      continue
+    status = line[:7]
+    filename = line[7:]
+    if dir_prefix:
+      filename = os.path.join(dir_prefix, filename)
+    change_list_name = ""
+    if filename in files_in_cl:
+      change_list_name = files_in_cl[filename]
+    files.setdefault(change_list_name, []).append((status, filename))
+
+  return files
+
+
+def GetFilesNotInCL():
+  """Returns a list of tuples (status,filename) that aren't in any changelists.
+  
+  See docstring of GetModifiedFiles for information about path of files and
+  which directories are scanned.
+  """
+  modified_files = GetModifiedFiles()
+  if "" not in modified_files:
+    return []
+  return modified_files[""]
+
+
+def SendToRietveld(request_path, payload=None,
+                   content_type="application/octet-stream"):
+  """Send a POST/GET to Rietveld.  Returns the response body."""
+  def GetUserCredentials():
+    """Prompts the user for a username and password."""
+    email = raw_input("Email: ").strip()
+    password = getpass.getpass("Password for %s: " % email)
+    return email, password
+
+  rpc_server = upload.HttpRpcServer(SERVER,
+                                    GetUserCredentials,
+                                    host_override=SERVER,
+                                    save_cookies=True)
+  return rpc_server.Send(request_path, payload, content_type)
+
+
+def GetIssueDescription(issue):
+  """Returns the issue description from Rietveld."""
+  return SendToRietveld("/" + issue + "/description")
+
+
+def UnknownFiles(extra_args):
+  """Runs svn status and prints unknown files.
+
+  Any args in |extra_args| are passed to the tool to support giving alternate
+  code locations.
+  """
+  args = ["svn", "status"]
+  args += extra_args
+  p = subprocess.Popen(args, stdout = subprocess.PIPE,
+                       stderr = subprocess.STDOUT, shell = use_shell)
+  while 1:
+    line = p.stdout.readline()
+    if not line:
+      break
+    if line[0] != '?':
+      continue  # Not an unknown file to svn.
+      # The lines look like this:
+      # "?      foo.txt"
+      # and we want just "foo.txt"
+      line = line[7:]
+  p.wait()
+  p.stdout.close()
+
+
+def Opened():
+  """Prints a list of modified files in the current directory down."""
+  files = GetModifiedFiles()
+  cl_keys = files.keys()
+  cl_keys.sort()
+  for cl_name in cl_keys:
+    if cl_name:
+      note = ""
+      if len(LoadChangelistInfo(cl_name).files) != len(files[cl_name]):
+        note = " (Note: this changelist contains files outside this directory)"
+      print "\n--- Changelist " + cl_name + note + ":"
+    for file in files[cl_name]:
+      print "".join(file)
+
+
+def Help():
+  print ("GCL is a wrapper for Subversion that simplifies working with groups "
+         "of files.\n")
+  print "Basic commands:"
+  print "-----------------------------------------"
+  print "   gcl change change_name"
+  print ("      Add/remove files to a changelist.  Only scans the current "
+         "directory and subdirectories.\n")
+  print ("   gcl upload change_name [-r reviewer1@gmail.com,"
+         "reviewer2@gmail.com,...] [--send_mail]")
+  print "      Uploads the changelist to the server for review.\n"
+  print "   gcl commit change_name"
+  print "      Commits the changelist to the repository.\n"
+  print "Advanced commands:"
+  print "-----------------------------------------"
+  print "   gcl delete change_name"
+  print "      Deletes a changelist.\n"
+  print "   gcl diff change_name"
+  print "      Diffs all files in the changelist.\n"
+  print "   gcl diff"
+  print ("      Diffs all files in the current directory and subdirectories "
+         "that aren't in a changelist.\n")
+  print "   gcl changes"
+  print "      Lists all the the changelists and the files in them.\n"
+  print "   gcl nothave [optional directory]"
+  print "      Lists files unknown to Subversion.\n"
+  print "   gcl opened"
+  print ("      Lists modified files in the current directory and "
+         "subdirectories.\n")
+
+
+def GetEditor():
+  editor = os.environ.get("SVN_EDITOR")
+  if not editor:
+    editor = os.environ.get("EDITOR")
+
+  if not editor:
+    if sys.platform.startswith("win"):
+      editor = "notepad"
+    else:
+      editor = "vi"
+
+  return editor
+
+
+def GenerateDiff(files):
+  """Returns a string containing the diff for the given file list."""
+  diff = []
+  for file in files:
+    # Use svn info output instead of os.path.isdir because the latter fails
+    # when the file is deleted.
+    if GetSVNFileInfo(file, "Node Kind") == "directory":
+      continue
+    diff.append(RunShell(["svn", "diff", "--diff-cmd=diff", file]))
+  return "".join(diff)
+
+
+def UploadCL(change_info, args):
+  upload_arg = ["upload.py", "-y", "-l"]
+  upload_arg.append("--server=" + SERVER)
+  upload_arg.extend(args)
+
+  desc_file = ""
+  if change_info.issue:  # Uploading a new patchset.
+    upload_arg.append("--message=''")
+    upload_arg.append("--issue=" + change_info.issue)
+  else: # First time we upload.
+    handle, desc_file = tempfile.mkstemp(text=True)
+    os.write(handle, change_info.description)
+    os.close(handle)
+
+    upload_arg.append("--cc=" + CC_LIST)
+    upload_arg.append("--description_file=" + desc_file + "")
+    if change_info.description:
+      subject = change_info.description[:77]
+      if subject.find("\r\n") != -1:
+        subject = subject[:subject.find("\r\n")]
+      if subject.find("\n") != -1:
+        subject = subject[:subject.find("\n")]
+      if len(change_info.description) > 77:
+        subject = subject + "..."
+      upload_arg.append("--message=" + subject)
+  
+  # Change the current working directory before calling upload.py so that it
+  # shows the correct base.
+  os.chdir(GetRepositoryRoot())
+
+  # If we have a lot of files with long paths, then we won't be able to fit
+  # the command to "svn diff".  Instead, we generate the diff manually for
+  # each file and concatenate them before passing it to upload.py.
+  issue = upload.RealMain(upload_arg, GenerateDiff(change_info.FileList()))
+  if issue and issue != change_info.issue:
+    change_info.issue = issue
+    change_info.Save()
+
+  if desc_file:
+    os.remove(desc_file)
+
+
+def Commit(change_info):
+  commit_cmd = ["svn", "commit"]
+  filename = ''
+  if change_info.issue:
+    # Get the latest description from Rietveld.
+    change_info.description = GetIssueDescription(change_info.issue)
+
+  commit_message = change_info.description.replace('\r\n', '\n')
+  # Removed because we don't want to leak the url in the description.
+  # Fix this when the url is external.
+  # commit_message += '\nReview URL: ' + url
+
+  handle, commit_filename = tempfile.mkstemp(text=True)
+  os.write(handle, commit_message)
+  os.close(handle)
+
+  handle, targets_filename = tempfile.mkstemp(text=True)
+  os.write(handle, "\n".join(change_info.FileList()))
+  os.close(handle)
+
+  commit_cmd += ['--file=' + commit_filename]
+  commit_cmd += ['--targets=' + targets_filename]
+  # Change the current working directory before calling commit.
+  os.chdir(GetRepositoryRoot())
+  output = RunShell(commit_cmd, True)
+  os.remove(commit_filename)
+  os.remove(targets_filename)
+  if output.find("Committed revision") != -1:
+    change_info.Delete()
+
+    if change_info.issue:
+      revision = re.compile(".*?\nCommitted revision (\d+)",
+                            re.DOTALL).match(output).group(1)
+      change_info.description = (change_info.description +
+                                 "\n\nCommitted: " + SVN_REV + revision)
+      change_info.CloseIssue()
+
+
+def Change(change_info):
+  """Creates/edits a changelist."""
+  if change_info.issue:
+    try:
+      description = GetIssueDescription(change_info.issue)
+    except urllib2.HTTPError, err:
+      if err.code == 404:
+        # The user deleted the issue in Rietveld, so forget the old issue id.
+        description = change_info.description
+        change_info.issue = ""
+        change_info.Save()
+      else:
+        ErrorExit("Error getting the description from Rietveld: " + err)
+  else:
+    description = change_info.description
+
+  other_files = GetFilesNotInCL()
+
+  separator1 = ("\n---All lines above this line become the description.\n"
+                "---Repository Root: " + GetRepositoryRoot() + "\n"
+                "---Paths in this changelist (" + change_info.name + "):\n")
+  separator2 = "\n\n---Paths modified but not in any changelist:\n\n"
+  text = (description + separator1 + '\n' +
+          '\n'.join([f[0] + f[1] for f in change_info.files]) + separator2 +
+          '\n'.join([f[0] + f[1] for f in other_files]) + '\n')
+
+  handle, filename = tempfile.mkstemp(text=True)
+  os.write(handle, text)
+  os.close(handle)
+  
+  command = GetEditor() + " " + filename
+  os.system(command)
+
+  result = ReadFile(filename)
+  os.remove(filename)
+
+  if not result:
+    return
+
+  split_result = result.split(separator1, 1)
+  if len(split_result) != 2:
+    ErrorExit("Don't modify the text starting with ---!\n\n" + result)
+
+  new_description = split_result[0]
+  cl_files_text = split_result[1]
+  if new_description != description:
+    change_info.description = new_description
+    if change_info.issue:
+      # Update the Rietveld issue with the new description.
+      change_info.UpdateRietveldDescription()
+
+  new_cl_files = []
+  for line in cl_files_text.splitlines():
+    if not len(line):
+      continue
+    if line.startswith("---"):
+      break
+    status = line[:7]
+    file = line[7:]
+    new_cl_files.append((status, file))
+  change_info.files = new_cl_files
+
+  change_info.Save()
+  print change_info.name + " changelist saved."
+
+
+def Changes():
+  """Print all the changlists and their files."""
+  for cl in GetCLs():
+    change_info = LoadChangelistInfo(cl, True, True)
+    print "\n--- Changelist " + change_info.name + ":"
+    for file in change_info.files:
+      print "".join(file)
+
+
+def main(argv=None):
+  if argv is None:
+    argv = sys.argv
+  
+  if len(argv) == 1:
+    Help()
+    return 0;
+
+  # Create the directory where we store information about changelists if it
+  # doesn't exist.
+  if not os.path.exists(GetInfoDir()):
+    os.mkdir(GetInfoDir())
+
+  command = argv[1]
+  if command == "opened":
+    Opened()
+    return 0
+  if command == "nothave":
+    UnknownFiles(argv[2:])
+    return 0
+  if command == "changes":
+    Changes()
+    return 0
+  if command == "diff" and len(argv) == 2:
+    files = GetFilesNotInCL()
+    print GenerateDiff([os.path.join(GetRepositoryRoot(), x[1]) for x in files])
+    return 0
+
+  if len(argv) == 2:
+    if command == "change":
+      # Generate a random changelist name.
+      changename = GenerateChangeName()
+    elif command == "help":
+      Help()
+      return 0
+    else:
+      ErrorExit("Need a changelist name.")
+  else:
+    changename = argv[2]
+
+  fail_on_not_found = command != "change"
+  change_info = LoadChangelistInfo(changename, fail_on_not_found, True)
+
+  if command == "change":
+    Change(change_info)
+  elif command == "upload":
+    UploadCL(change_info, argv[3:])
+  elif command == "commit":
+    Commit(change_info)
+  elif command == "delete":
+    change_info.Delete()
+  else:
+    # Everything else that is passed into gcl we redirect to svn, after adding
+    # the files. This allows commands such as 'gcl diff xxx' to work.
+    args =["svn", command]
+    root = GetRepositoryRoot()
+    args.extend([os.path.join(root, x) for x in change_info.FileList()])
+    RunShell(args, True)
+  return 0
+
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/depot_tools/release/win/bin/gclient.bat b/depot_tools/release/win/bin/gclient.bat
new file mode 100644
index 0000000..c7c3e6f
--- /dev/null
+++ b/depot_tools/release/win/bin/gclient.bat
@@ -0,0 +1,11 @@
+@echo off
+
+:: This batch file assumes that the correct version of svn and python can be
+:: found in the current directory.
+
+setlocal
+set PATH=%~dp0\svn;%WINDIR%\system32
+
+xcopy %~dp0..\wrappers\*.* %~dp0..\..\bin /e /c /d /y /q > nul
+
+%~dp0..\python_24\python.exe %~dp0\gclient.py %*
diff --git a/depot_tools/release/win/bin/gclient.py b/depot_tools/release/win/bin/gclient.py
new file mode 100644
index 0000000..620de901
--- /dev/null
+++ b/depot_tools/release/win/bin/gclient.py
@@ -0,0 +1,898 @@
+#!/usr/bin/python
+#
+# Copyright 2008 Google Inc.  All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""A wrapper script to manage a set of client modules in (initially) svn.
+
+This script is intended to be used to help basic management of client
+program sources residing in one or more Subversion modules, along with
+other modules it depends on, also in Subversion, but possibly on
+multiple respositories, making a wrapper system apparently necessary.
+
+Files
+  .gclient      : Current client configuration, written by 'config' command.
+                  Format is a Python script defining 'solutions', a list whose
+                  entries each are maps binding the strings "name" and "url"
+                  to strings specifying the name and location of the client
+                  module, as well as "custom_deps" to a map similar to the DEPS
+                  file below.
+  .gclient_entries : A cache constructed by 'update' command.  Format is a
+                  Python script defining 'entries', a list of the names
+                  of all modules in the client
+  <module>/DEPS : Python script defining var 'deps' as a map from each requisite
+                  submodule name to a URL where it can be found (via svn)
+"""
+
+__author__ = "darinf@gmail.com (Darin Fisher)"
+
+import optparse
+import os
+import subprocess
+import sys
+import urlparse
+
+SVN_COMMAND = "svn"
+CLIENT_FILE = os.environ.get("GCLIENT_FILE", ".gclient")
+CLIENT_ENTRIES_FILE = ".gclient_entries"
+DEPS_FILE = "DEPS"
+
+# default help text
+DEFAULT_USAGE_TEXT = (
+    """usage: %prog <subcommand> [options] [--] [svn options/args...]
+a wrapper for managing a set of client modules in svn.
+
+subcommands:
+   config
+   diff
+   status
+   sync
+   update
+
+Options and extra arguments can be passed to invoked svn commands by
+appending them to the command line.  Note that if the first such
+appended option starts with a dash (-) then the options must be
+preceded by -- to distinguish them from gclient options.
+
+For additional help on a subcommand or examples of usage, try
+   %prog help <subcommand>
+   %prog help files
+""")
+
+GENERIC_UPDATE_USAGE_TEXT = (
+    """Perform a checkout/update of the modules specified by the gclient
+configuration; see 'help config'.  Unless --revision is specified,
+then the latest revision of the root solutions is checked out, with
+dependent submodule versions updated according to DEPS files.
+If --revision is specified, then the given revision is used in place
+of the latest, either for a single solution or for all solutions.
+Unless the --force option is provided, solutions and modules whose
+local revision matches the one to update (i.e., they have not changed
+in the repository) are *not* modified.
+This a synonym for 'gclient %(alias)s'
+
+usage: gclient %(cmd)s [options] [--] [svn update options/args]
+
+Valid options:
+  --force             : force update even for unchanged modules
+  --revision REV      : update/checkout all solutions with specified revision
+  --revision SOLUTION@REV : update given solution to specified revision
+  --verbose           : output additional diagnostics
+
+Examples:
+  gclient %(cmd)s
+      update files from SVN according to current configuration,
+      *for modules which have changed since last update or sync*
+  gclient %(cmd)s --force
+      update files from SVN according to current configuration, for
+      all modules (useful for recovering files deleted from local copy)
+""")
+
+COMMAND_USAGE_TEXT = {
+    "config": """Create a .gclient file in the current directory; this
+specifies the configuration for further commands.  After update/sync,
+top-level DEPS files in each module are read to determine dependent
+modules to operate on as well.  If optional [svnurl] parameter is
+provided, then configuration is read from a specified Subversion server
+URL.  Otherwise, a --spec option must be provided.
+
+usage: config [option | svnurl]
+
+Valid options:
+  --spec=GCLIENT_SPEC   : contents of .gclient are read from string parameter.
+                          *Note that due to Cygwin/Python brokenness, it
+                          probably can't contain any newlines.*
+
+Examples:
+  gclient config https://gclient.googlecode.com/svn/trunk/gclient
+      configure a new client to check out gclient.py tool sources
+  gclient config --spec='solutions=[{"name":"gclient","""
+    '"url":"https://gclient.googlecode.com/svn/trunk/gclient",'
+    '"custom_deps":{}}]',
+    "diff": """Display the differences between two revisions of modules.
+(Does 'svn diff' for each checked out module and dependences.)
+Additional args and options to 'svn diff' can be passed after
+gclient options.
+
+usage: diff [options] [--] [svn args/options]
+
+Valid options:
+  --verbose            : output additional diagnostics
+
+Examples:
+  gclient diff
+      simple 'svn diff' for configured client and dependences
+  gclient diff -- -x -b
+      use 'svn diff -x -b' to suppress whitespace-only differences
+  gclient diff -- -r HEAD -x -b
+      diff versus the latest version of each module
+""",
+    "status":
+    """Show the status of client and dependent modules, using 'svn diff'
+for each module.  Additional options and args may be passed to 'svn diff'.
+
+usage: status [options] [--] [svn diff args/options]
+
+Valid options:
+  --verbose           : output additional diagnostics
+""",
+    "sync": GENERIC_UPDATE_USAGE_TEXT % {"cmd": "sync", "alias": "update"},
+    "update": GENERIC_UPDATE_USAGE_TEXT % {"cmd": "update", "alias": "sync"},
+    "help": """Describe the usage of this program or its subcommands.
+
+usage: help [options] [subcommand]
+
+Valid options:
+  --verbose           : output additional diagnostics
+""",
+}
+
+# parameterized by (solution_name, solution_svnurl)
+DEFAULT_CLIENT_FILE_TEXT = (
+    """
+# An element of this array (a \"solution\") describes a repository directory
+# that will be checked out into your working copy.  Each solution may
+# optionally define additional dependencies (via its DEPS file) to be
+# checked out alongside the solution's directory.  A solution may also
+# specify custom dependencies (via the \"custom_deps\" property) that
+# override or augment the dependencies specified by the DEPS file.
+solutions = [
+  { \"name\"        : \"%s\",
+    \"url\"         : \"%s\",
+    \"custom_deps\" : {
+      # To use the trunk of a component instead of what's in DEPS:
+      #\"component\": \"https://svnserver/component/trunk/\",
+      # To exclude a component from your working copy:
+      #\"data/really_large_component\": None,
+    }
+  }
+]
+""")
+
+# -----------------------------------------------------------------------------
+# generic utils:
+
+
+class Error(Exception):
+  """gclient exception class."""
+
+  def __init__(self, message):
+    Exception.__init__(self)
+    self.message = message
+
+# -----------------------------------------------------------------------------
+# SVN utils:
+
+
+def RunSVN(args, in_directory,
+           output_stream=sys.stdout,
+           call=subprocess.call,
+           realpath=os.path.realpath):
+  """Runs svn, sending output to stdout.
+
+  Args:
+    args: A sequence of command line parameters to be passed to svn.
+    in_directory: The directory where svn is to be run.
+
+    Dependencies (for testing)
+    output_stream: stream for messages to the user
+    call: "subprocess.call" function
+    realpath: "os.path.realpath" function
+
+  Raises:
+    Error: An error occurred while running the svn command.
+  """
+  c = [SVN_COMMAND]
+  c.extend(args)
+  print >> output_stream, (
+      "\n________ running \'%s\' in \'%s\'" % (" ".join(c),
+                                               realpath(in_directory)))
+  output_stream.flush()  # flush our stdout so it shows up first.
+  rv = call(c, cwd=in_directory, shell=True)
+  if rv:
+    raise Error("failed to run command: %s" % " ".join(c))
+  return rv
+
+
+def CaptureSVN(args, in_directory, verbose):
+  """Runs svn, capturing output sent to stdout as a string.
+
+  Args:
+    args: A sequence of command line parameters to be passed to svn.
+    in_directory: The directory where svn is to be run.
+    verbose: Enables verbose output if true.
+
+  Returns:
+    The output sent to stdout as a string.
+  """
+  c = [SVN_COMMAND]
+  c.extend(args)
+  if verbose:
+    print ("\n________ running \'%s\' in \'%s\'"
+           % (" ".join(c), os.path.realpath(in_directory)))
+    sys.stdout.flush()  # flush our stdout so it shows up first.
+  return subprocess.Popen(c, cwd=in_directory, shell=True,
+                          stdout=subprocess.PIPE).communicate()[0]
+
+
+def CaptureSVNInfo(relpath, in_directory, verbose):
+  """Runs 'svn info' on an existing path.
+
+  Args:
+    relpath: The directory where the working copy resides relative to
+      the directory given by in_directory.
+    in_directory: The directory where svn is to be run.
+    verbose: Enables verbose output if true.
+
+  Returns:
+    A dict of fields corresponding to the output of 'svn info'
+  """
+  info = CaptureSVN(["info", relpath], in_directory, verbose)
+  result = {}
+  for line in info.splitlines():
+    fields = line.split(": ")
+    if len(fields) > 1:
+      result[fields[0]] = fields[1]
+  return result
+
+
+def RunSVNCommandForModule(command, relpath, root_dir, args):
+  """Runs an svn command for a single subversion module.
+
+  Args:
+    command: The svn command to use (e.g., "status" or "diff")
+    relpath: The directory where the working copy should reside relative
+      to the given root_dir.
+    root_dir: The directory from which relpath is relative.
+    args: list of str - extra arguments to add to the svn command line.
+  """
+  c = [command]
+  c.extend(args)
+  return RunSVN(c, os.path.join(root_dir, relpath))
+
+
+def UpdateToURL(relpath, svnurl, root_dir, options, args,
+                output_stream=sys.stdout,
+                path_exists=os.path.exists,
+                capture_svn_info=CaptureSVNInfo,
+                run_svn=RunSVN):
+  """Runs svn to checkout or update the working copy.
+
+  Args:
+    relpath: The directory where the working copy should reside relative
+      to the given root_dir.
+    svnurl: The svn URL to checkout or update the relpath to.
+    root_dir: The directory from which relpath is relative.
+    options: The Options object; attributes we care about:
+      verbose - If true, then print diagnostic output.
+      force - If true, also update modules with unchanged repository version.
+    args: list of str - extra arguments to add to the svn command line.
+
+    output_stream: stream for user messages
+    path_exists: os.path.exists (for testing)
+    capture_svn_info: CaptureSVNInfo (for testing)
+    run_svn: RunSVN (for testing)
+
+  Raises:
+    Error: if can't get URL for relative path.
+  """
+  comps = svnurl.split("@")
+  # by default, we run the svn command at the root directory level
+  run_dir = root_dir
+  if path_exists(os.path.join(root_dir, relpath)):
+    # get the existing svn url and revision number:
+    from_info = capture_svn_info(relpath, root_dir, options.verbose)
+    from_url = from_info.get("URL", None)
+    if from_url is None:
+      raise Error(
+          "Couldn't get URL for relative path: '%s' under root directory: %s.\n"
+          "\tSVN URL was:\n\t\t%s\n"
+          "\tInfo dict was:\n\t\t%s"
+          % (relpath, root_dir, svnurl, from_info))
+
+    if comps[0] != from_url:
+
+      to_info = capture_svn_info(svnurl, root_dir, options.verbose)
+      from_repository_root = from_info.get("Repository Root", None)
+      to_repository_root = to_info.get("Repository Root", None)
+
+      if from_repository_root and from_repository_root != to_repository_root:
+
+        # We have different roots, so check if we can switch --relocate.
+        # Subversion only permits this if the repository UUIDs match.
+        from_repository_uuid = from_info.get("Repository UUID", None)
+        to_repository_uuid = to_info.get("Repository UUID", None)
+        if from_repository_uuid != to_repository_uuid:
+          print >>output_stream, ("Skipping update to %s;\n"
+                                  "\tcan not relocate to URL with different"
+                                  " Repository UUID.\n"
+                                  % (svnurl))
+          return
+
+        if not options.relocate:
+          print >>output_stream, ("Skipping update to %s;\n"
+                                  "\tuse the --relocate option to switch\n"
+                                  "\tfrom %s\n"
+                                  "\tto   %s.\n"
+                                  % (svnurl, from_url, comps[0]))
+          return
+
+        # Perform the switch --relocate, then rewrite the from_url
+        # to reflect where we "are now."  (This is the same way that
+        # Subversion itself handles the metadata when switch --relocate
+        # is used.)  This makes the checks below for whether we
+        # can update to a revision or have to switch to a different
+        # branch work as expected.
+        run_svn(["switch", "--relocate",
+                 from_repository_root, to_repository_root], root_dir)
+        from_url = from_url.replace(from_repository_root, to_repository_root)
+
+    # by default, we assume that we cannot just use 'svn update'
+    can_update = False
+
+    # if the provided svn url has a revision number that matches the revision
+    # number of the existing directory, then we don't need to bother updating.
+    if comps[0] == from_url:
+      can_update = True
+      if (not options.force and
+          len(comps) > 1 and comps[1] == from_info["Revision"]):
+        if options.verbose:
+          print >>output_stream, ("\n_____ %s at %s" %
+                                  (from_info["URL"], from_info["Revision"]))
+        return
+
+    if can_update:
+      # ok, we can update; adjust run_dir accordingly
+      c = ["update"]
+      if len(comps) > 1:
+        c.extend(["-r", comps[1]])
+      run_dir = os.path.join(root_dir, relpath)
+    else:
+      # otherwise, switch to the new svn url
+      c = ["switch"]
+      if len(comps) > 1:
+        c.extend(["-r", comps[1]])
+      c.extend([comps[0], relpath])
+  else:
+    c = ["checkout", svnurl, relpath]
+  if args:
+    c.extend(args)
+
+  return run_svn(c, run_dir)
+
+# -----------------------------------------------------------------------------
+# gclient ops:
+
+
+def CreateClientFileFromText(text):
+  """Creates a .gclient file in the current directory from the given text.
+
+  Args:
+    text: The text of the .gclient file.
+  """
+  try:
+    f = open(CLIENT_FILE, "w")
+    f.write(text)
+  finally:
+    f.close()
+
+
+def CreateClientFile(solution_name, solution_url):
+  """Creates a default .gclient file in the current directory.
+
+  Args:
+    solution_name: The name of the solution.
+    solution_url: The svn URL of the solution.
+  """
+  text = DEFAULT_CLIENT_FILE_TEXT % (solution_name, solution_url)
+  CreateClientFileFromText(text)
+
+
+def CreateClientEntriesFile(client, entries):
+  """Creates a .gclient_entries file to record the list of unique svn checkouts.
+
+  The .gclient_entries file lives in the same directory as .gclient.
+  
+  Args:
+    client: The client for which the entries file should be written.
+    entries: A sequence of solution names.
+  """
+  text = "entries = [\n"
+  for entry in entries:
+    text += "  \"%s\",\n" % entry
+  text += "]\n"
+  f = open("%s/%s" % (client["root_dir"], CLIENT_ENTRIES_FILE), "w")
+  f.write(text)
+  f.close()
+
+
+def ReadClientEntriesFile(client):
+  """Read the .gclient_entries file for the given client.
+
+  Args:
+    client: The client for which the entries file should be read.
+
+  Returns:
+    A sequence of solution names, which will be empty if there is the
+    entries file hasn't been created yet.
+  """
+  path = os.path.join(client["root_dir"], CLIENT_ENTRIES_FILE)
+  if not os.path.exists(path):
+    return []
+  scope = {}
+  execfile(path, scope)
+  return scope["entries"]
+
+
+def GetClient():
+  """Searches for and loads a .gclient file relative to the current working dir.
+
+  Returns:
+    A dict representing the contents of the .gclient file or an empty dict if
+    the .gclient file doesn't exist.
+  """
+  path = os.path.realpath(os.curdir)
+  client_file = os.path.join(path, CLIENT_FILE)
+  while not os.path.exists(client_file):
+    next = os.path.split(path)
+    if not next[1]:
+      return {}
+    path = next[0]
+    client_file = os.path.join(path, CLIENT_FILE)
+  client = {}
+  client_fo = open(client_file)
+  try:
+    client_source = client_fo.read()
+    exec(client_source, client)
+  finally:
+    client_fo.close()
+  # record the root directory and client source for later use
+  client["root_dir"] = path
+  client["source"] = client_source
+  return client
+
+
+class FromImpl:
+  """Used to implement the From syntax."""
+
+  def __init__(self, module_name):
+    self.module_name = module_name
+
+  def __str__(self):
+    return 'From("%s")' % self.module_name
+
+
+def GetDefaultSolutionDeps(client, solution_name, platform=None,
+                           execf=execfile,
+                           logger=sys.stdout):
+  """Fetches the DEPS file for the specified solution.
+
+  Args:
+    client: The client containing the specified solution.
+    solution_name: The name of the solution to query.
+    platform: os platform (i.e. the output of sys.platform)
+    execf: execfile function for testing
+    logger: stream for user output
+
+  Returns:
+    A dict mapping module names (as relative paths) to svn URLs or an empty
+    dict if the solution does not have a DEPS file.
+  """
+  deps_file = os.path.join(client["root_dir"], solution_name, DEPS_FILE)
+  scope = {"From": FromImpl, "deps_os": {}}
+  try:
+    execf(deps_file, scope)
+  except EnvironmentError:
+    print >> logger, (
+        "\nWARNING: DEPS file not found for solution: %s\n" % solution_name)
+    return {}
+  deps = scope["deps"]
+  # load os specific dependencies if defined.  these dependencies may override
+  # or extend the values defined by the 'deps' member.
+  if platform is None:
+    platform = sys.platform
+  deps_os_key = {
+      "win32": "win",
+      "win": "win",
+      "darwin": "mac",
+      "mac": "mac",
+      "unix": "unix",
+  }.get(platform, "unix")
+  deps.update(scope["deps_os"].get(deps_os_key, {}))
+  return deps
+
+
+def GetAllDeps(client, solution_urls,
+               get_default_solution_deps=GetDefaultSolutionDeps,
+               capture_svn_info=CaptureSVNInfo):
+  """Get the complete list of dependencies for the client.
+
+  Args:
+    client: The client for which to gather dependencies.
+    solution_urls: A dict mapping module names (as relative paths) to svn URLs
+      corresponding to the solutions specified by the client.  This parameter
+      is passed as an optimization.
+
+    get_default_solution_deps: GetDefaultSolutionDeps (for testing)
+    capture_svn_info: CaptureSVNInfo (for testing)
+
+  Returns:
+    A dict mapping module names (as relative paths) to svn URLs corresponding
+    to the entire set of dependencies to checkout for the given client.
+
+  Raises:
+    Error: If a dependency conflicts with another dependency or of a solution.
+  """
+  deps = {}
+  for solution in client["solutions"]:
+    solution_deps = get_default_solution_deps(client, solution["name"])
+    for d in solution_deps:
+      if "custom_deps" in solution and d in solution["custom_deps"]:
+        url = solution["custom_deps"][d]
+        if url is None:
+          continue
+      else:
+        url = solution_deps[d]
+        #
+        # if we have a From reference dependent on another solution, then just
+        # skip the From reference.  when we pull deps for the solution, we will
+        # take care of this dependency.
+        #
+        # if multiple solutions all have the same From reference, then we should
+        # only add one to our list of dependencies.
+        #
+        if type(url) != str:
+          if url.module_name in solution_urls:
+            continue
+          if d in deps and type(deps[d]) != str:
+            if url.module_name == deps[d].module_name:
+              continue
+        parsed_url = urlparse.urlparse(url)
+        scheme = parsed_url[0]
+        if not scheme:
+          path = parsed_url[2]
+          if path[0] != "/":
+            raise Error(
+                "relative DEPS entry \"%s\" must begin with a slash" % d)
+          info = capture_svn_info(solution["url"], client["root_dir"], False)
+          url = info["Repository Root"] + url
+      if d in deps and deps[d] != url:
+        raise Error(
+            "solutions have conflicting versions of dependency \"%s\"" % d)
+      if d in solution_urls and solution_urls[d] != url:
+        raise Error(
+            "dependency \"%s\" conflicts with specified solution" % d)
+      deps[d] = url
+  return deps
+
+
+def RunSVNCommandForClientModules(
+    command, client, verbose, args,
+    run_svn_command_for_module=RunSVNCommandForModule,
+    get_all_deps=GetAllDeps):
+  """Runs an svn command on each svn module in a client and its dependencies.
+
+  The module's dependencies are specified in its top-level DEPS files.
+
+  Args:
+    command: The svn command to use (e.g., "status" or "diff")
+    client: The client for which to run the commands.
+    verbose: If true, then print diagnostic output.
+    args: list of str - extra arguments to add to the svn command line.
+    run_svn_command_for_module: RunSVNCommandForModule (for testing)
+    get_all_deps: GetAllDeps (for testing)
+
+  Raises:
+    Error: If the client has conflicting entries.
+  """
+  verbose = verbose  # Suppress lint warning.
+  entries = {}
+
+  # run on the base solutions first
+  for s in client["solutions"]:
+    name = s["name"]
+    if name in entries:
+      raise Error("solution specified more than once")
+    entries[name] = s["url"]
+    run_svn_command_for_module(command, name, client["root_dir"], args)
+
+  # do the module dependencies next (sort alphanumerically for
+  # readability)
+  deps_to_show = get_all_deps(client, entries).keys()
+  deps_to_show.sort()
+  for d in deps_to_show:
+    run_svn_command_for_module(command, d, client["root_dir"], args)
+
+
+def UpdateAll(client, options, args,
+              update_to_url=UpdateToURL,
+              get_all_deps=GetAllDeps,
+              create_client_entries_file=CreateClientEntriesFile,
+              read_client_entries_file=ReadClientEntriesFile,
+              get_default_solution_deps=GetDefaultSolutionDeps,
+              path_exists=os.path.exists,
+              logger=sys.stdout):
+  """Update all solutions and their dependencies.
+
+  Args:
+    client: The client to update.
+    options: Options object; attributes we care about:
+      verbose - If true, then print diagnostic output.
+      force - If true, then also update modules with unchanged repo version.
+      revision - If specified, a string SOLUTION@REV or just REV
+    args: list of str - extra arguments to add to the svn command line.
+
+    update_to_url: dependency (for testing)
+    get_all_deps: dependency (for testing)
+    create_client_entries_file: dependency (for testing)
+    read_client_entries_file: dependency (for testing)
+    get_default_solution_deps: dependency (for testing)
+    path_exists: dependency (for testing)
+    logger: dependency (for testing)
+
+  Raises:
+    Error: If the client has conflicting entries.
+  """
+  entries = {}
+  result = 0
+  # update the solutions first so we can read their dependencies
+  for s in client["solutions"]:
+    name = s["name"]
+    if name in entries:
+      raise Error("solution specified more than once")
+    url = s["url"]
+
+    # Check if we should sync to a given revision
+    if options.revision:
+      url_elem = url.split("@")
+      if options.revision.find("@") == -1:
+        # We want to update all solutions.
+        url = url_elem[0] + "@" + options.revision
+      else:
+        # Check if we want to update this solution.
+        revision_elem = options.revision.split("@")
+        if revision_elem[0] == name:
+          url = url_elem[0] + "@" + revision_elem[1]
+
+    entries[name] = url
+    r = update_to_url(name, url, client["root_dir"], options, args)
+    if r and result == 0:
+      result = r
+
+  # update the dependencies next (sort alphanumerically to ensure that
+  # containing directories get populated first)
+  deps = get_all_deps(client, entries)
+  deps_to_update = deps.keys()
+  deps_to_update.sort()
+  # first pass for explicit deps
+  for d in deps_to_update:
+    if type(deps[d]) == str:
+      entries[d] = deps[d]
+      r = update_to_url(d, deps[d], client["root_dir"], options, args)
+      if r and result == 0:
+        result = r
+  # first pass for inherited deps (via the From keyword)
+  for d in deps_to_update:
+    if type(deps[d]) != str:
+      sub_deps = get_default_solution_deps(client, deps[d].module_name)
+      entries[d] = sub_deps[d]
+      r = update_to_url(d, sub_deps[d], client["root_dir"], options, args)
+      if r and result == 0:
+        result = r
+
+  # notify the user if there is an orphaned entry in their working copy.
+  # TODO(darin): we should delete this directory manually if it doesn't
+  # have any changes in it.
+  prev_entries = read_client_entries_file(client)
+  for entry in prev_entries:
+    e_dir = "%s/%s" % (client["root_dir"], entry)
+    if entry not in entries and path_exists(e_dir):
+      entries[entry] = None  # keep warning until removed
+      print >> logger, (
+          "\nWARNING: \"%s\" is no longer part of this client.  "
+          "It is recommended that you manually remove it.\n") % entry
+
+  # record the current list of entries for next time
+  create_client_entries_file(client, entries)
+
+  return result
+
+# -----------------------------------------------------------------------------
+
+
+def DoConfig(options, args, client_file=CLIENT_FILE,
+             path_exists=os.path.exists,
+             create_client_file_from_text=CreateClientFileFromText,
+             create_client_file=CreateClientFile):
+  """Handle the config subcommand.
+
+  Args:
+    options: If options.spec set, a string providing contents of config file.
+    args: The command line args.  If spec is not set,
+          then args[0] is a string URL to get for config file.
+    client_file: name of gclient file.
+
+    path_exists: dependency (for testing)
+    create_client_file_from_text: dependency (for testing)
+    create_client_file: dependency (for testing)
+
+  Raises:
+    Error: on usage error
+  """
+  if len(args) < 1 and not options.spec:
+    raise Error("required argument missing; see 'gclient help config'")
+  if path_exists(client_file):
+    raise Error(".gclient file already exists in the current directory")
+  if options.spec:
+    create_client_file_from_text(options.spec)
+  else:
+    # TODO(darin): it would be nice to be able to specify an alternate relpath
+    # for the given svn URL.
+    svnurl = args[0]
+    name = args[0].split("/")[-1]
+    create_client_file(name, svnurl)
+
+
+def DoHelp(options, args,
+           output_stream=sys.stdout):
+  """Handle the help subcommand giving help for another subcommand.
+
+  Args:
+    options: The command line options.
+    args: The command line args.
+    output_stream: dependency (for testing).
+
+  Raises:
+    Error: if the command is unknown.
+  """
+  options = options   # suppress lint warning
+  if len(args) == 1 and args[0] in COMMAND_USAGE_TEXT:
+    print >>output_stream, COMMAND_USAGE_TEXT[args[0]]
+  else:
+    raise Error("unknown subcommand; see 'gclient help'")
+
+
+def DoStatus(options, args,
+             get_client=GetClient,
+             run_svn_command_for_client_modules=RunSVNCommandForClientModules):
+  """Handle the status subcommand.
+
+  Args:
+    options: set options.verbose for extra logging
+    args: list of str - extra arguments to add to the svn command line.
+    get_client: dependency (for testing).
+    run_svn_command_for_client_modules: dependency (for testing)
+
+  Raises:
+    Error: if client isn't configured properly.
+  """
+  client = get_client()
+  if not client:
+    raise Error("client not configured; see 'gclient config'")
+  return run_svn_command_for_client_modules("status", client,
+                                            options.verbose, args)
+
+
+def DoUpdate(options, args,
+             get_client=GetClient,
+             update_all=UpdateAll,
+             output_stream=sys.stdout):
+  """Handle the update and sync subcommands."""
+  client = get_client()
+  if not client:
+    raise Error("client not configured; see 'gclient config'")
+  if options.verbose:
+    # Print out the .gclient file.  This is longer than if we just printed the
+    # client dict, but more legible, and it might contain helpful comments.
+    print >>output_stream, client["source"]
+  return update_all(client, options, args)
+
+
+def DoDiff(options, args,
+           get_client=GetClient,
+           run_svn_command_for_client_modules=RunSVNCommandForClientModules,
+           output_stream=sys.stdout):
+  """Handle the diff subcommands."""
+  client = get_client()
+  if not client:
+    raise Error("client not configured; see 'gclient config'")
+  if options.verbose:
+    # Print out the .gclient file.  This is longer than if we just printed the
+    # client dict, but more legible, and it might contain helpful comments.
+    print >>output_stream, client["source"]
+  return run_svn_command_for_client_modules("diff", client,
+                                            options.verbose, args)
+
+
+gclient_command_map = {
+    "config": DoConfig,
+    "diff": DoDiff,
+    "help": DoHelp,
+    "status": DoStatus,
+    "sync": DoUpdate,
+    "update": DoUpdate,
+    }
+
+
+def DispatchCommand(command, options, args, command_map=None):
+  """Dispatches the appropriate subcommand based on command line arguments."""
+  if command_map is None:
+    command_map = gclient_command_map
+
+  if command in command_map:
+    return command_map[command](options, args)
+  else:
+    raise Error("unknown subcommand; see 'gclient help'")
+
+
+def Main(argv):
+  """Parse command line arguments and dispatch command."""
+  if len(argv) < 2:
+    raise Error("required subcommand missing; see 'gclient help'")
+
+  command = argv[1]
+
+  option_parser = optparse.OptionParser()
+  option_parser.disable_interspersed_args()
+  option_parser.set_usage(DEFAULT_USAGE_TEXT)
+  option_parser.add_option("", "--force", action="store_true", default=False,
+                           help=("(update/sync only) force update even "
+                                 "for modules which haven't changed"))
+  option_parser.add_option("", "--relocate", action="store_true",
+                           default=False,
+                           help="relocate")
+  option_parser.add_option("", "--revision", default=None,
+                           help=("(update/sync only) sync to a specific "
+                                 "revision"))
+  option_parser.add_option("", "--spec", default=None,
+                           help=("(config only) create a gclient file "
+                                 "containing the provided string"))
+  option_parser.add_option("", "--verbose", action="store_true", default=False,
+                           help="produce additional output for diagnostics")
+
+  options, args = option_parser.parse_args(argv[2:])
+
+  if len(argv) < 3 and command == "help":
+    option_parser.print_help()
+    sys.exit(0)
+
+  return DispatchCommand(command, options, args)
+
+if "__main__" == __name__:
+  try:
+    result = Main(sys.argv)
+  except Error, e:
+    print "Error: %s" % e.message
+    result = 1
+  sys.exit(result)
diff --git a/depot_tools/release/win/bin/upload.py b/depot_tools/release/win/bin/upload.py
new file mode 100644
index 0000000..1e3ccf19
--- /dev/null
+++ b/depot_tools/release/win/bin/upload.py
@@ -0,0 +1,1004 @@
+#!/usr/bin/env python
+#
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Tool for uploading diffs from a version control system to the codereview app.
+
+Usage summary: upload.py [options] [-- diff_options]
+
+Diff options are passed to the diff command of the underlying system.
+
+Supported version control systems:
+  Git
+  Subversion
+
+(It is important for Git users to specify a tree-ish to diff against.)
+"""
+# This code is derived from appcfg.py in the App Engine SDK (open source),
+# and from ASPN recipe #146306.
+
+import cookielib
+import getpass
+import logging
+import md5
+import mimetypes
+import optparse
+import os
+import re
+import socket
+import subprocess
+import sys
+import urllib
+import urllib2
+import urlparse
+
+try:
+  import readline
+except ImportError:
+  pass
+
+# The logging verbosity:
+#  0: Errors only.
+#  1: Status messages.
+#  2: Info logs.
+#  3: Debug logs.
+verbosity = 1
+
+# Max size of patch or base file.
+MAX_UPLOAD_SIZE = 900 * 1024
+
+
+def StatusUpdate(msg):
+  """Print a status message to stdout.
+
+  If 'verbosity' is greater than 0, print the message.
+
+  Args:
+    msg: The string to print.
+  """
+  if verbosity > 0:
+    print msg
+
+
+def ErrorExit(msg):
+  """Print an error message to stderr and exit."""
+  print >>sys.stderr, msg
+  sys.exit(1)
+
+
+class ClientLoginError(urllib2.HTTPError):
+  """Raised to indicate there was an error authenticating with ClientLogin."""
+
+  def __init__(self, url, code, msg, headers, args):
+    urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
+    self.args = args
+    self.reason = args["Error"]
+
+
+class AbstractRpcServer(object):
+  """Provides a common interface for a simple RPC server."""
+
+  def __init__(self, host, auth_function, host_override=None, extra_headers={},
+               save_cookies=False):
+    """Creates a new HttpRpcServer.
+
+    Args:
+      host: The host to send requests to.
+      auth_function: A function that takes no arguments and returns an
+        (email, password) tuple when called. Will be called if authentication
+        is required.
+      host_override: The host header to send to the server (defaults to host).
+      extra_headers: A dict of extra headers to append to every request.
+      save_cookies: If True, save the authentication cookies to local disk.
+        If False, use an in-memory cookiejar instead.  Subclasses must
+        implement this functionality.  Defaults to False.
+    """
+    self.host = host
+    self.host_override = host_override
+    self.auth_function = auth_function
+    self.authenticated = False
+    self.extra_headers = extra_headers
+    self.save_cookies = save_cookies
+    self.opener = self._GetOpener()
+    if self.host_override:
+      logging.info("Server: %s; Host: %s", self.host, self.host_override)
+    else:
+      logging.info("Server: %s", self.host)
+
+  def _GetOpener(self):
+    """Returns an OpenerDirector for making HTTP requests.
+
+    Returns:
+      A urllib2.OpenerDirector object.
+    """
+    raise NotImplementedError()
+
+  def _CreateRequest(self, url, data=None):
+    """Creates a new urllib request."""
+    logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
+    req = urllib2.Request(url, data=data)
+    if self.host_override:
+      req.add_header("Host", self.host_override)
+    for key, value in self.extra_headers.iteritems():
+      req.add_header(key, value)
+    return req
+
+  def _GetAuthToken(self, email, password):
+    """Uses ClientLogin to authenticate the user, returning an auth token.
+
+    Args:
+      email:    The user's email address
+      password: The user's password
+
+    Raises:
+      ClientLoginError: If there was an error authenticating with ClientLogin.
+      HTTPError: If there was some other form of HTTP error.
+
+    Returns:
+      The authentication token returned by ClientLogin.
+    """
+    req = self._CreateRequest(
+        url="https://www.google.com/accounts/ClientLogin",
+        data=urllib.urlencode({
+            "Email": email,
+            "Passwd": password,
+            "service": "ah",
+            "source": "rietveld-codereview-upload",
+            "accountType": "HOSTED_OR_GOOGLE",
+        })
+    )
+    try:
+      response = self.opener.open(req)
+      response_body = response.read()
+      response_dict = dict(x.split("=")
+                           for x in response_body.split("\n") if x)
+      return response_dict["Auth"]
+    except urllib2.HTTPError, e:
+      if e.code == 403:
+        body = e.read()
+        response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
+        raise ClientLoginError(req.get_full_url(), e.code, e.msg,
+                               e.headers, response_dict)
+      else:
+        raise
+
+  def _GetAuthCookie(self, auth_token):
+    """Fetches authentication cookies for an authentication token.
+
+    Args:
+      auth_token: The authentication token returned by ClientLogin.
+
+    Raises:
+      HTTPError: If there was an error fetching the authentication cookies.
+    """
+    # This is a dummy value to allow us to identify when we're successful.
+    continue_location = "http://localhost/"
+    args = {"continue": continue_location, "auth": auth_token}
+    req = self._CreateRequest("http://%s/_ah/login?%s" %
+                              (self.host, urllib.urlencode(args)))
+    try:
+      response = self.opener.open(req)
+    except urllib2.HTTPError, e:
+      response = e
+    if (response.code != 302 or
+        response.info()["location"] != continue_location):
+      raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
+                              response.headers, response.fp)
+    self.authenticated = True
+
+  def _Authenticate(self):
+    """Authenticates the user.
+
+    The authentication process works as follows:
+     1) We get a username and password from the user
+     2) We use ClientLogin to obtain an AUTH token for the user
+        (see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
+     3) We pass the auth token to /_ah/login on the server to obtain an
+        authentication cookie. If login was successful, it tries to redirect
+        us to the URL we provided.
+
+    If we attempt to access the upload API without first obtaining an
+    authentication cookie, it returns a 401 response and directs us to
+    authenticate ourselves with ClientLogin.
+    """
+    for i in range(3):
+      credentials = self.auth_function()
+      try:
+        auth_token = self._GetAuthToken(credentials[0], credentials[1])
+      except ClientLoginError, e:
+        if e.reason == "BadAuthentication":
+          print >>sys.stderr, "Invalid username or password."
+          continue
+        if e.reason == "CaptchaRequired":
+          print >>sys.stderr, (
+              "Please go to\n"
+              "https://www.google.com/accounts/DisplayUnlockCaptcha\n"
+              "and verify you are a human.  Then try again.")
+          break
+        if e.reason == "NotVerified":
+          print >>sys.stderr, "Account not verified."
+          break
+        if e.reason == "TermsNotAgreed":
+          print >>sys.stderr, "User has not agreed to TOS."
+          break
+        if e.reason == "AccountDeleted":
+          print >>sys.stderr, "The user account has been deleted."
+          break
+        if e.reason == "AccountDisabled":
+          print >>sys.stderr, "The user account has been disabled."
+          break
+        if e.reason == "ServiceDisabled":
+          print >>sys.stderr, ("The user's access to the service has been "
+                               "disabled.")
+          break
+        if e.reason == "ServiceUnavailable":
+          print >>sys.stderr, "The service is not available; try again later."
+          break
+        raise
+      self._GetAuthCookie(auth_token)
+      return
+
+  def Send(self, request_path, payload=None,
+           content_type="application/octet-stream",
+           timeout=None,
+           **kwargs):
+    """Sends an RPC and returns the response.
+
+    Args:
+      request_path: The path to send the request to, eg /api/appversion/create.
+      payload: The body of the request, or None to send an empty request.
+      content_type: The Content-Type header to use.
+      timeout: timeout in seconds; default None i.e. no timeout.
+        (Note: for large requests on OS X, the timeout doesn't work right.)
+      kwargs: Any keyword arguments are converted into query string parameters.
+
+    Returns:
+      The response body, as a string.
+    """
+    # TODO: Don't require authentication.  Let the server say
+    # whether it is necessary.
+    if not self.authenticated:
+      self._Authenticate()
+
+    old_timeout = socket.getdefaulttimeout()
+    socket.setdefaulttimeout(timeout)
+    try:
+      tries = 0
+      while True:
+        tries += 1
+        args = dict(kwargs)
+        url = "http://%s%s" % (self.host, request_path)
+        if args:
+          url += "?" + urllib.urlencode(args)
+        req = self._CreateRequest(url=url, data=payload)
+        req.add_header("Content-Type", content_type)
+        try:
+          f = self.opener.open(req)
+          response = f.read()
+          f.close()
+          return response
+        except urllib2.HTTPError, e:
+          if tries > 3:
+            raise
+          elif e.code == 401:
+            self._Authenticate()
+##           elif e.code >= 500 and e.code < 600:
+##             # Server Error - try again.
+##             continue
+          else:
+            raise
+    finally:
+      socket.setdefaulttimeout(old_timeout)
+
+
+class HttpRpcServer(AbstractRpcServer):
+  """Provides a simplified RPC-style interface for HTTP requests."""
+
+  def _Authenticate(self):
+    """Save the cookie jar after authentication."""
+    super(HttpRpcServer, self)._Authenticate()
+    if self.save_cookies:
+      StatusUpdate("Saving authentication cookies to %s" % self.cookie_file)
+      self.cookie_jar.save()
+
+  def _GetOpener(self):
+    """Returns an OpenerDirector that supports cookies and ignores redirects.
+
+    Returns:
+      A urllib2.OpenerDirector object.
+    """
+    opener = urllib2.OpenerDirector()
+    opener.add_handler(urllib2.ProxyHandler())
+    opener.add_handler(urllib2.UnknownHandler())
+    opener.add_handler(urllib2.HTTPHandler())
+    opener.add_handler(urllib2.HTTPDefaultErrorHandler())
+    opener.add_handler(urllib2.HTTPSHandler())
+    opener.add_handler(urllib2.HTTPErrorProcessor())
+    if self.save_cookies:
+      self.cookie_file = os.path.expanduser("~/.codereview_upload_cookies")
+      self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
+      if os.path.exists(self.cookie_file):
+        try:
+          self.cookie_jar.load()
+          self.authenticated = True
+          StatusUpdate("Loaded authentication cookies from %s" %
+                       self.cookie_file)
+        except (cookielib.LoadError, IOError):
+          # Failed to load cookies - just ignore them.
+          pass
+      else:
+        # Create an empty cookie file with mode 600
+        fd = os.open(self.cookie_file, os.O_CREAT, 0600)
+        os.close(fd)
+      # Always chmod the cookie file
+      os.chmod(self.cookie_file, 0600)
+    else:
+      # Don't save cookies across runs of update.py.
+      self.cookie_jar = cookielib.CookieJar()
+    opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
+    return opener
+
+
+parser = optparse.OptionParser(usage="%prog [options] [-- diff_options]")
+parser.add_option("-y", "--assume_yes", action="store_true",
+                  dest="assume_yes", default=False,
+                  help="Assume that the answer to yes/no questions is 'yes'.")
+# Logging
+group = parser.add_option_group("Logging options")
+group.add_option("-q", "--quiet", action="store_const", const=0,
+                 dest="verbose", help="Print errors only.")
+group.add_option("-v", "--verbose", action="store_const", const=2,
+                 dest="verbose", default=1,
+                 help="Print info level logs (default).")
+group.add_option("--noisy", action="store_const", const=3,
+                 dest="verbose", help="Print all logs.")
+# Review server
+group = parser.add_option_group("Review server options")
+group.add_option("-s", "--server", action="store", dest="server",
+                 default="codereview.appspot.com",
+                 metavar="SERVER",
+                 help=("The server to upload to. The format is host[:port]. "
+                       "Defaults to 'codereview.appspot.com'."))
+group.add_option("-e", "--email", action="store", dest="email",
+                 metavar="EMAIL", default=None,
+                 help="The username to use. Will prompt if omitted.")
+group.add_option("-H", "--host", action="store", dest="host",
+                 metavar="HOST", default=None,
+                 help="Overrides the Host header sent with all RPCs.")
+group.add_option("--no_cookies", action="store_false",
+                 dest="save_cookies", default=True,
+                 help="Do not save authentication cookies to local disk.")
+# Issue
+group = parser.add_option_group("Issue options")
+group.add_option("-d", "--description", action="store", dest="description",
+                 metavar="DESCRIPTION", default=None,
+                 help="Optional description when creating an issue.")
+group.add_option("-f", "--description_file", action="store",
+                 dest="description_file", metavar="DESCRIPTION_FILE",
+                 default=None,
+                 help="Optional path of a file that contains "
+                      "the description when creating an issue.")
+group.add_option("-r", "--reviewers", action="store", dest="reviewers",
+                 metavar="REVIEWERS", default=None,
+                 help="Add reviewers (comma separated email addresses).")
+group.add_option("--cc", action="store", dest="cc",
+                 metavar="CC", default=None,
+                 help="Add CC (comma separated email addresses).")
+# Upload options
+group = parser.add_option_group("Patch options")
+group.add_option("-m", "--message", action="store", dest="message",
+                 metavar="MESSAGE", default=None,
+                 help="A message to identify the patch. "
+                      "Will prompt if omitted.")
+group.add_option("-i", "--issue", type="int", action="store",
+                 metavar="ISSUE", default=None,
+                 help="Issue number to which to add. Defaults to new issue.")
+group.add_option("-l", "--local_base", action="store_true",
+                 dest="local_base", default=False,
+                 help="Base files will be uploaded.")
+group.add_option("--send_mail", action="store_true",
+                 dest="send_mail", default=False,
+                 help="Send notification email to reviewers.")
+
+
+def GetRpcServer(options):
+  """Returns an instance of an AbstractRpcServer.
+
+  Returns:
+    A new AbstractRpcServer, on which RPC calls can be made.
+  """
+
+  rpc_server_class = HttpRpcServer
+
+  def GetUserCredentials():
+    """Prompts the user for a username and password."""
+    email = options.email
+    if email is None:
+      email = raw_input("Email: ").strip()
+    password = getpass.getpass("Password for %s: " % email)
+    return (email, password)
+
+  # If this is the dev_appserver, use fake authentication.
+  host = (options.host or options.server).lower()
+  if host == "localhost" or host.startswith("localhost:"):
+    email = options.email
+    if email is None:
+      email = "test@example.com"
+      logging.info("Using debug user %s.  Override with --email" % email)
+    server = rpc_server_class(
+        options.server,
+        lambda: (email, "password"),
+        host_override=options.host,
+        extra_headers={"Cookie":
+                       'dev_appserver_login="%s:False"' % email},
+        save_cookies=options.save_cookies)
+    # Don't try to talk to ClientLogin.
+    server.authenticated = True
+    return server
+
+  return rpc_server_class(options.server, GetUserCredentials,
+                          host_override=options.host,
+                          save_cookies=options.save_cookies)
+
+
+def EncodeMultipartFormData(fields, files):
+  """Encode form fields for multipart/form-data.
+
+  Args:
+    fields: A sequence of (name, value) elements for regular form fields.
+    files: A sequence of (name, filename, value) elements for data to be
+           uploaded as files.
+  Returns:
+    (content_type, body) ready for httplib.HTTP instance.
+
+  Source:
+    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/146306
+  """
+  BOUNDARY = '-M-A-G-I-C---B-O-U-N-D-A-R-Y-'
+  CRLF = '\r\n'
+  lines = []
+  for (key, value) in fields:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"' % key)
+    lines.append('')
+    lines.append(value)
+  for (key, filename, value) in files:
+    lines.append('--' + BOUNDARY)
+    lines.append('Content-Disposition: form-data; name="%s"; filename="%s"' %
+             (key, filename))
+    lines.append('Content-Type: %s' % GetContentType(filename))
+    lines.append('')
+    lines.append(value)
+  lines.append('--' + BOUNDARY + '--')
+  lines.append('')
+  body = CRLF.join(lines)
+  content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
+  return content_type, body
+
+
+def GetContentType(filename):
+  """Helper to guess the content-type from the filename."""
+  return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+# Use a shell for subcommands on Windows to get a PATH search.
+use_shell = sys.platform.startswith("win")
+
+
+def RunShell(command, silent_ok=False, universal_newlines=False):
+  logging.info("Running %s", command)
+  p = subprocess.Popen(command, stdout=subprocess.PIPE,
+                       stderr=subprocess.STDOUT, shell=use_shell,
+                       universal_newlines=universal_newlines)
+  data = p.stdout.read()
+  p.wait()
+  p.stdout.close()
+  if p.returncode:
+    ErrorExit("Got error status from %s" % command)
+  if not silent_ok and not data:
+    ErrorExit("No output from %s" % command)
+  return data
+
+
+class VersionControlSystem(object):
+  """Abstract base class providing an interface to the VCS."""
+
+  def GenerateDiff(self, args):
+    """Return the current diff as a string.
+
+    Args:
+      args: Extra arguments to pass to the diff command.
+    """
+    raise NotImplementedError(
+        "abstract method -- subclass %s must override" % self.__class__)
+
+  def GetUnknownFiles(self):
+    """Return a list of files unknown to the VCS."""
+    raise NotImplementedError(
+        "abstract method -- subclass %s must override" % self.__class__)
+
+  def CheckForUnknownFiles(self):
+    """Show an "are you sure?" prompt if there are unknown files."""
+    unknown_files = self.GetUnknownFiles()
+    if unknown_files:
+      print "The following files are not added to version control:"
+      for line in unknown_files:
+        print line
+      prompt = "Are you sure to continue?(y/N) "
+      answer = raw_input(prompt).strip()
+      if answer != "y":
+        ErrorExit("User aborted")
+
+  def GetBaseFile(self, filename):
+    """Get the content of the upstream version of a file.
+
+    Returns:
+      A tuple (content, status) representing the file content and the status of
+      the file.
+    """
+
+    raise NotImplementedError(
+        "abstract method -- subclass %s must override" % self.__class__)
+
+  def UploadBaseFiles(self, issue, rpc_server, patch_list, patchset, options):
+    """Uploads the base files."""
+    patches = dict()
+    [patches.setdefault(v, k) for k, v in patch_list]
+    for filename in patches.keys():
+      content, status = self.GetBaseFile(filename)
+      no_base_file = False
+      if len(content) > MAX_UPLOAD_SIZE:
+        print ("Not uploading the base file for " + filename +
+               " because the file is too large.")
+        no_base_file = True
+        content = ""
+      checksum = md5.new(content).hexdigest()
+      parts = []
+      while content:
+        parts.append(content[:800000])
+        content = content[800000:]
+      if not parts:
+        parts = [""] # empty file
+      for part, data in enumerate(parts):
+        if options.verbose > 0:
+          print "Uploading %s (%d/%d)" % (filename, part+1, len(parts))
+        url = "/%d/upload_content/%d/%d" % (int(issue), int(patchset),
+                                            int(patches.get(filename)))
+        current_checksum = md5.new(data).hexdigest()
+        form_fields = [("filename", filename),
+                       ("status", status),
+                       ("num_parts", str(len(parts))),
+                       ("checksum", checksum),
+                       ("current_part", str(part)),
+                       ("current_checksum", current_checksum),]
+        if no_base_file:
+          form_fields.append(("no_base_file", "1"))
+        if options.email:
+          form_fields.append(("user", options.email))
+        ctype, body = EncodeMultipartFormData(form_fields,
+                                              [("data", filename, data)])
+        response_body = rpc_server.Send(url, body,
+                                        content_type=ctype)
+        if not response_body.startswith("OK"):
+          StatusUpdate("  --> %s" % response_body)
+          sys.exit(False)
+
+
+class SubversionVCS(VersionControlSystem):
+  """Implementation of the VersionControlSystem interface for Subversion."""
+
+  def GuessBase(self, required):
+    """Returns the SVN base URL.
+
+    Args:
+      required: If true, exits if the url can't be guessed, otherwise None is
+        returned.
+    """
+    info = RunShell(["svn", "info"])
+    for line in info.splitlines():
+      words = line.split()
+      if len(words) == 2 and words[0] == "URL:":
+        url = words[1]
+        scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
+        username, netloc = urllib.splituser(netloc)
+        if username:
+          logging.info("Removed username from base URL")
+        if netloc.endswith("svn.python.org"):
+          if netloc == "svn.python.org":
+            if path.startswith("/projects/"):
+              path = path[9:]
+          elif netloc != "pythondev@svn.python.org":
+            ErrorExit("Unrecognized Python URL: %s" % url)
+          base = "http://svn.python.org/view/*checkout*%s/" % path
+          logging.info("Guessed Python base = %s", base)
+        elif netloc.endswith("svn.collab.net"):
+          if path.startswith("/repos/"):
+            path = path[6:]
+          base = "http://svn.collab.net/viewvc/*checkout*%s/" % path
+          logging.info("Guessed CollabNet base = %s", base)
+        elif netloc.endswith(".googlecode.com"):
+          path = path + "/"
+          base = urlparse.urlunparse(("http", netloc, path, params,
+                                      query, fragment))
+          logging.info("Guessed Google Code base = %s", base)
+        else:
+          path = path + "/"
+          base = urlparse.urlunparse((scheme, netloc, path, params,
+                                      query, fragment))
+          logging.info("Guessed base = %s", base)
+        return base
+    if required:
+      ErrorExit("Can't find URL in output from svn info")
+    return None
+
+  def GenerateDiff(self, args):
+    cmd = ["svn", "diff"]
+    if not sys.platform.startswith("win"):
+      cmd.append("--diff-cmd=diff")
+    cmd.extend(args)
+    data = RunShell(cmd)
+    count = 0
+    for line in data.splitlines():
+      if line.startswith("Index:") or line.startswith("Property changes on:"):
+        count += 1
+        logging.info(line)
+    if not count:
+      ErrorExit("No valid patches found in output from svn diff")
+    return data
+
+  def _CollapseKeywords(self, content, keyword_str):
+    """Collapses SVN keywords."""
+    # svn cat translates keywords but svn diff doesn't. As a result of this
+    # behavior patching.PatchChunks() fails with a chunk mismatch error.
+    # This part was originally written by the Review Board development team
+    # who had the same problem (http://reviews.review-board.org/r/276/).
+    # Mapping of keywords to known aliases
+    svn_keywords = {
+      # Standard keywords
+      'Date':                ['Date', 'LastChangedDate'],
+      'Revision':            ['Revision', 'LastChangedRevision', 'Rev'],
+      'Author':              ['Author', 'LastChangedBy'],
+      'HeadURL':             ['HeadURL', 'URL'],
+      'Id':                  ['Id'],
+
+      # Aliases
+      'LastChangedDate':     ['LastChangedDate', 'Date'],
+      'LastChangedRevision': ['LastChangedRevision', 'Rev', 'Revision'],
+      'LastChangedBy':       ['LastChangedBy', 'Author'],
+      'URL':                 ['URL', 'HeadURL'],
+    }
+    def repl(m):
+       if m.group(2):
+         return "$%s::%s$" % (m.group(1), " " * len(m.group(3)))
+       return "$%s$" % m.group(1)
+    keywords = [keyword
+                for name in keyword_str.split(" ")
+                for keyword in svn_keywords.get(name, [])]
+    return re.sub(r"\$(%s):(:?)([^\$]+)\$" % '|'.join(keywords), repl, content)
+
+  def GetUnknownFiles(self):
+    status = RunShell(["svn", "status", "--ignore-externals"], silent_ok=True)
+    unknown_files = []
+    for line in status.split("\n"):
+      if line and line[0] == "?":
+        unknown_files.append(line)
+    return unknown_files
+
+  def GetBaseFile(self, filename):
+    status = RunShell(["svn", "status", "--ignore-externals", filename])
+    if not status:
+      StatusUpdate("svn status returned no output for %s" % filename)
+      sys.exit(False)
+    status_lines = status.splitlines()
+    # If file is in a cl, the output will begin with
+    # "\n--- Changelist 'cl_name':\n".  See
+    # http://svn.collab.net/repos/svn/trunk/notes/changelist-design.txt
+    if (len(status_lines) == 3 and
+        not status_lines[0] and
+        status_lines[1].startswith("--- Changelist")):
+      status = status_lines[2]
+    else:
+      status = status_lines[0]
+    # If a file is copied its status will be "A  +", which signifies
+    # "addition-with-history".  See "svn st" for more information.  We need to
+    # upload the original file or else diff parsing will fail if the file was
+    # edited.
+    if ((status[0] == "A" and status[3] != "+") or
+        (status[0] == " " and status[1] == "M")):  # property changed
+      content = ""
+    elif (status[0] in ("M", "D", "R") or
+          (status[0] == "A" and status[3] == "+")):
+      mimetype = RunShell(["svn", "-rBASE", "propget", "svn:mime-type",
+                           filename],
+                          silent_ok=True)
+      if mimetype.startswith("application/octet-stream"):
+        content = ""
+      else:
+        # On Windows svn cat gives \r\n, and calling subprocess.Popen turns
+        # them into \r\r\n, so use universal newlines to avoid the extra \r.
+        if sys.platform.startswith("win"):
+          nl = True
+        else:
+          nl = False
+        content = RunShell(["svn", "cat", filename], universal_newlines=nl)
+      keywords = RunShell(["svn", "-rBASE", "propget", "svn:keywords",
+                           filename],
+                          silent_ok=True)
+      if keywords:
+        content = self._CollapseKeywords(content, keywords)
+    else:
+      StatusUpdate("svn status returned unexpected output: %s" % status)
+      sys.exit(False)
+    return content, status[0:5]
+
+
+class GitVCS(VersionControlSystem):
+  """Implementation of the VersionControlSystem interface for Git."""
+
+  def __init__(self):
+    # Map of filename -> hash of base file.
+    self.base_hashes = {}
+
+  def GenerateDiff(self, extra_args):
+    # This is more complicated than svn's GenerateDiff because we must convert
+    # the diff output to include an svn-style "Index:" line as well as record
+    # the hashes of the base files, so we can upload them along with our diff.
+    gitdiff = RunShell("git diff", ["--full-index"] + extra_args)
+    svndiff = []
+    filecount = 0
+    filename = None
+    for line in gitdiff.splitlines():
+      match = re.match(r"diff --git a/(.*) b/.*$", line)
+      if match:
+        filecount += 1
+        filename = match.group(1)
+        svndiff.append("Index: %s\n" % filename)
+      else:
+        # The "index" line in a git diff looks like this (long hashes elided):
+        #   index 82c0d44..b2cee3f 100755
+        # We want to save the left hash, as that identifies the base file.
+        match = re.match(r"index (\w+)\.\.", line)
+        if match:
+          self.base_hashes[filename] = match.group(1)
+      svndiff.append(line + "\n")
+    if not filecount:
+      ErrorExit("No valid patches found in output from git diff")
+    return "".join(svndiff)
+
+  def GetUnknownFiles(self):
+    status = RunShell("git ls-files --others", silent_ok=True)
+    return status.splitlines()
+
+  def GetBaseFile(self, filename):
+    hash = self.base_hashes[filename]
+    if hash == "0" * 40:  # All-zero hash indicates no base file.
+      return ("", "A")
+    else:
+      return (RunShell("git show", [hash]), "M")
+
+
+# NOTE: this function is duplicated in engine.py, keep them in sync.
+def SplitPatch(data):
+  """Splits a patch into separate pieces for each file.
+
+  Args:
+    data: A string containing the output of svn diff.
+
+  Returns:
+    A list of 2-tuple (filename, text) where text is the svn diff output
+      pertaining to filename.
+  """
+  patches = []
+  filename = None
+  diff = []
+  for line in data.splitlines(True):
+    new_filename = None
+    if line.startswith('Index:'):
+      unused, new_filename = line.split(':', 1)
+      new_filename = new_filename.strip()
+    elif line.startswith('Property changes on:'):
+      unused, temp_filename = line.split(':', 1)
+      # When a file is modified, paths use '/' between directories, however
+      # when a property is modified '\' is used on Windows.  Make them the same
+      # otherwise the file shows up twice.
+      temp_filename = temp_filename.strip().replace('\\', '/')
+      if temp_filename != filename:
+        # File has property changes but no modifications, create a new diff.
+        new_filename = temp_filename
+    if new_filename:
+      if filename and diff:
+        patches.append((filename, ''.join(diff)))
+      filename = new_filename
+      diff = [line]
+      continue
+    if diff is not None:
+      diff.append(line)
+  if filename and diff:
+    patches.append((filename, ''.join(diff)))
+  return patches
+
+
+def UploadSeparatePatches(issue, rpc_server, patchset, data, options):
+  """Uploads a separate patch for each file in the diff output.
+
+  Returns a list of [patch_key, filename] for each file.
+  """
+  patches = SplitPatch(data)
+  rv = []
+  for patch in patches:
+    if len(patch[1]) > MAX_UPLOAD_SIZE:
+      print ("Not uploading the patch for " + patch[0] +
+             " because the file is too large.")
+      continue
+    form_fields = [("filename", patch[0])]
+    if options.local_base:
+      form_fields.append(("content_upload", "1"))
+    files = [("data", "data.diff", patch[1])]
+    ctype, body = EncodeMultipartFormData(form_fields, files)
+    url = "/%d/upload_patch/%d" % (int(issue), int(patchset))
+    print "Uploading patch for " + patch[0]
+    response_body = rpc_server.Send(url, body, content_type=ctype)
+    lines = response_body.splitlines()
+    if not lines or lines[0] != "OK":
+      StatusUpdate("  --> %s" % response_body)
+      sys.exit(False)
+    rv.append([lines[1], patch[0]])
+  return rv
+
+
+def GuessVCS():
+  """Helper to guess the version control system.
+
+  This examines the current directory, guesses which VersionControlSystem
+  we're using, and returns an instance of the appropriate class.  Exit with an
+  error if we can't figure it out.
+
+  Returns:
+    A VersionControlSystem instance. Exits if the VCS can't be guessed.
+  """
+  # Subversion has a .svn in all working directories.
+  if os.path.isdir('.svn'):
+    logging.info("Guessed VCS = Subversion")
+    return SubversionVCS()
+
+  # Git has a command to test if you're in a git tree.
+  # Try running it, but don't die if we don't have git installed.
+  try:
+    subproc = subprocess.Popen(["git", "rev-parse", "--is-inside-work-tree"],
+                               stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+    if subproc.wait() == 0:
+      return GitVCS()
+  except OSError, (errno, message):
+    if errno != 2:  # ENOENT -- they don't have git installed.
+      raise
+
+  ErrorExit(("Could not guess version control system. "
+             "Are you in a working copy directory?"))
+
+
+def RealMain(argv, data=None):
+  logging.basicConfig(format=("%(asctime).19s %(levelname)s %(filename)s:"
+                              "%(lineno)s %(message)s "))
+  os.environ['LC_ALL'] = 'C'
+  options, args = parser.parse_args(argv[1:])
+  global verbosity
+  verbosity = options.verbose
+  if verbosity >= 3:
+    logging.getLogger().setLevel(logging.DEBUG)
+  elif verbosity >= 2:
+    logging.getLogger().setLevel(logging.INFO)
+  vcs = GuessVCS()
+  if isinstance(vcs, SubversionVCS):
+    # base field is only allowed for Subversion.
+    # Note: Fetching base files may become deprecated in future releases.
+    base = vcs.GuessBase(not options.local_base)
+  else:
+    base = None
+  if not base and not options.local_base:
+    options.local_base = True
+    logging.info("Enabled upload of base file")
+  if not options.assume_yes:
+    vcs.CheckForUnknownFiles()
+  if data is None:
+    data = vcs.GenerateDiff(args)
+  if verbosity >= 1:
+    print "Upload server:", options.server, "(change with -s/--server)"
+  if options.issue:
+    prompt = "Message describing this patch set: "
+  else:
+    prompt = "New issue subject: "
+  message = options.message or raw_input(prompt).strip()
+  if not message:
+    ErrorExit("A non-empty message is required")
+  rpc_server = GetRpcServer(options)
+  form_fields = [("subject", message)]
+  if base:
+    form_fields.append(("base", base))
+  if options.issue:
+    form_fields.append(("issue", str(options.issue)))
+  if options.email:
+    form_fields.append(("user", options.email))
+  if options.reviewers:
+    for reviewer in options.reviewers.split(','):
+      if reviewer.count("@") != 1 or "." not in reviewer.split("@")[1]:
+        ErrorExit("Invalid email address: %s" % reviewer)
+    form_fields.append(("reviewers", options.reviewers))
+  if options.cc:
+    for cc in options.cc.split(','):
+      if cc.count("@") != 1 or "." not in cc.split("@")[1]:
+        ErrorExit("Invalid email address: %s" % cc)
+    form_fields.append(("cc", options.cc))
+  description = options.description
+  if options.description_file:
+    if options.description:
+      ErrorExit("Can't specify description and description_file")
+    file = open(options.description_file, 'r')
+    description = file.read()
+    file.close()
+  if description:
+    form_fields.append(("description", description))
+  # If we're uploading base files, don't send the email before the uploads, so
+  # that it contains the file status.
+  if options.send_mail and not options.local_base:
+    form_fields.append(("send_mail", "1"))
+  if options.local_base:
+    form_fields.append(("content_upload", "1"))
+  if len(data) > MAX_UPLOAD_SIZE:
+    print "Patch is large, so uploading file patches separately."
+    files = []
+    form_fields.append(("separate_patches", "1"))
+  else:
+    files = [("data", "data.diff", data)]
+  ctype, body = EncodeMultipartFormData(form_fields, files)
+  response_body = rpc_server.Send("/upload", body, content_type=ctype)
+  if options.local_base or not files:
+    lines = response_body.splitlines()
+    if len(lines) >= 2:
+      msg = lines[0]
+      patchset = lines[1].strip()
+      patches = [x.split(" ", 1) for x in lines[2:]]
+    else:
+      msg = response_body
+  else:
+    msg = response_body
+  StatusUpdate(msg)
+  if not response_body.startswith("Issue created.") and \
+  not response_body.startswith("Issue updated."):
+    sys.exit(0)
+  issue = msg[msg.rfind("/")+1:]
+
+  if not files:
+    result = UploadSeparatePatches(issue, rpc_server, patchset, data, options)
+    if options.local_base:
+      patches = result
+
+  if options.local_base:
+    vcs.UploadBaseFiles(issue, rpc_server, patches, patchset, options)
+    if options.send_mail:
+      rpc_server.Send("/" + issue + "/mail")
+  return issue
+
+
+def main():
+  try:
+    RealMain(sys.argv)
+  except KeyboardInterrupt:
+    print
+    StatusUpdate("Interrupted.")
+    sys.exit(1)
+
+
+if __name__ == "__main__":
+  main()
diff --git a/depot_tools/release/win/python_24/DLLs/_bsddb.pyd b/depot_tools/release/win/python_24/DLLs/_bsddb.pyd
new file mode 100644
index 0000000..132e0df6
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/_bsddb.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/_socket.pyd b/depot_tools/release/win/python_24/DLLs/_socket.pyd
new file mode 100644
index 0000000..a499193
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/_socket.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/_ssl.pyd b/depot_tools/release/win/python_24/DLLs/_ssl.pyd
new file mode 100644
index 0000000..00c1fb2
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/_ssl.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/_testcapi.pyd b/depot_tools/release/win/python_24/DLLs/_testcapi.pyd
new file mode 100644
index 0000000..e31eb1a
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/_testcapi.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/bz2.pyd b/depot_tools/release/win/python_24/DLLs/bz2.pyd
new file mode 100644
index 0000000..ef2f1862b
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/bz2.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/pyexpat.pyd b/depot_tools/release/win/python_24/DLLs/pyexpat.pyd
new file mode 100644
index 0000000..9a5fcd5c
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/pyexpat.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/select.pyd b/depot_tools/release/win/python_24/DLLs/select.pyd
new file mode 100644
index 0000000..6534f3b
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/select.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/unicodedata.pyd b/depot_tools/release/win/python_24/DLLs/unicodedata.pyd
new file mode 100644
index 0000000..ba78fc0
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/unicodedata.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/winsound.pyd b/depot_tools/release/win/python_24/DLLs/winsound.pyd
new file mode 100644
index 0000000..15a912d
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/winsound.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/DLLs/zlib.pyd b/depot_tools/release/win/python_24/DLLs/zlib.pyd
new file mode 100644
index 0000000..af3d4ba
--- /dev/null
+++ b/depot_tools/release/win/python_24/DLLs/zlib.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/BaseHTTPServer.py b/depot_tools/release/win/python_24/Lib/BaseHTTPServer.py
new file mode 100644
index 0000000..27ac513
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/BaseHTTPServer.py
@@ -0,0 +1,574 @@
+"""HTTP server base class.
+
+Note: the class in this module doesn't implement any HTTP request; see
+SimpleHTTPServer for simple implementations of GET, HEAD and POST
+(including CGI scripts).  It does, however, optionally implement HTTP/1.1
+persistent connections, as of version 0.3.
+
+Contents:
+
+- BaseHTTPRequestHandler: HTTP request handler base class
+- test: test function
+
+XXX To do:
+
+- log requests even later (to capture byte count)
+- log user-agent header and other interesting goodies
+- send error log to separate file
+"""
+
+
+# See also:
+#
+# HTTP Working Group                                        T. Berners-Lee
+# INTERNET-DRAFT                                            R. T. Fielding
+# <draft-ietf-http-v10-spec-00.txt>                     H. Frystyk Nielsen
+# Expires September 8, 1995                                  March 8, 1995
+#
+# URL: http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt
+#
+# and
+#
+# Network Working Group                                      R. Fielding
+# Request for Comments: 2616                                       et al
+# Obsoletes: 2068                                              June 1999
+# Category: Standards Track
+#
+# URL: http://www.faqs.org/rfcs/rfc2616.html
+
+# Log files
+# ---------
+#
+# Here's a quote from the NCSA httpd docs about log file format.
+#
+# | The logfile format is as follows. Each line consists of:
+# |
+# | host rfc931 authuser [DD/Mon/YYYY:hh:mm:ss] "request" ddd bbbb
+# |
+# |        host: Either the DNS name or the IP number of the remote client
+# |        rfc931: Any information returned by identd for this person,
+# |                - otherwise.
+# |        authuser: If user sent a userid for authentication, the user name,
+# |                  - otherwise.
+# |        DD: Day
+# |        Mon: Month (calendar name)
+# |        YYYY: Year
+# |        hh: hour (24-hour format, the machine's timezone)
+# |        mm: minutes
+# |        ss: seconds
+# |        request: The first line of the HTTP request as sent by the client.
+# |        ddd: the status code returned by the server, - if not available.
+# |        bbbb: the total number of bytes sent,
+# |              *not including the HTTP/1.0 header*, - if not available
+# |
+# | You can determine the name of the file accessed through request.
+#
+# (Actually, the latter is only true if you know the server configuration
+# at the time the request was made!)
+
+__version__ = "0.3"
+
+__all__ = ["HTTPServer", "BaseHTTPRequestHandler"]
+
+import sys
+import time
+import socket # For gethostbyaddr()
+import mimetools
+import SocketServer
+
+# Default error message
+DEFAULT_ERROR_MESSAGE = """\
+<head>
+<title>Error response</title>
+</head>
+<body>
+<h1>Error response</h1>
+<p>Error code %(code)d.
+<p>Message: %(message)s.
+<p>Error code explanation: %(code)s = %(explain)s.
+</body>
+"""
+
+
+class HTTPServer(SocketServer.TCPServer):
+
+    allow_reuse_address = 1    # Seems to make sense in testing environment
+
+    def server_bind(self):
+        """Override server_bind to store the server name."""
+        SocketServer.TCPServer.server_bind(self)
+        host, port = self.socket.getsockname()[:2]
+        self.server_name = socket.getfqdn(host)
+        self.server_port = port
+
+
+class BaseHTTPRequestHandler(SocketServer.StreamRequestHandler):
+
+    """HTTP request handler base class.
+
+    The following explanation of HTTP serves to guide you through the
+    code as well as to expose any misunderstandings I may have about
+    HTTP (so you don't need to read the code to figure out I'm wrong
+    :-).
+
+    HTTP (HyperText Transfer Protocol) is an extensible protocol on
+    top of a reliable stream transport (e.g. TCP/IP).  The protocol
+    recognizes three parts to a request:
+
+    1. One line identifying the request type and path
+    2. An optional set of RFC-822-style headers
+    3. An optional data part
+
+    The headers and data are separated by a blank line.
+
+    The first line of the request has the form
+
+    <command> <path> <version>
+
+    where <command> is a (case-sensitive) keyword such as GET or POST,
+    <path> is a string containing path information for the request,
+    and <version> should be the string "HTTP/1.0" or "HTTP/1.1".
+    <path> is encoded using the URL encoding scheme (using %xx to signify
+    the ASCII character with hex code xx).
+
+    The specification specifies that lines are separated by CRLF but
+    for compatibility with the widest range of clients recommends
+    servers also handle LF.  Similarly, whitespace in the request line
+    is treated sensibly (allowing multiple spaces between components
+    and allowing trailing whitespace).
+
+    Similarly, for output, lines ought to be separated by CRLF pairs
+    but most clients grok LF characters just fine.
+
+    If the first line of the request has the form
+
+    <command> <path>
+
+    (i.e. <version> is left out) then this is assumed to be an HTTP
+    0.9 request; this form has no optional headers and data part and
+    the reply consists of just the data.
+
+    The reply form of the HTTP 1.x protocol again has three parts:
+
+    1. One line giving the response code
+    2. An optional set of RFC-822-style headers
+    3. The data
+
+    Again, the headers and data are separated by a blank line.
+
+    The response code line has the form
+
+    <version> <responsecode> <responsestring>
+
+    where <version> is the protocol version ("HTTP/1.0" or "HTTP/1.1"),
+    <responsecode> is a 3-digit response code indicating success or
+    failure of the request, and <responsestring> is an optional
+    human-readable string explaining what the response code means.
+
+    This server parses the request and the headers, and then calls a
+    function specific to the request type (<command>).  Specifically,
+    a request SPAM will be handled by a method do_SPAM().  If no
+    such method exists the server sends an error response to the
+    client.  If it exists, it is called with no arguments:
+
+    do_SPAM()
+
+    Note that the request name is case sensitive (i.e. SPAM and spam
+    are different requests).
+
+    The various request details are stored in instance variables:
+
+    - client_address is the client IP address in the form (host,
+    port);
+
+    - command, path and version are the broken-down request line;
+
+    - headers is an instance of mimetools.Message (or a derived
+    class) containing the header information;
+
+    - rfile is a file object open for reading positioned at the
+    start of the optional input data part;
+
+    - wfile is a file object open for writing.
+
+    IT IS IMPORTANT TO ADHERE TO THE PROTOCOL FOR WRITING!
+
+    The first thing to be written must be the response line.  Then
+    follow 0 or more header lines, then a blank line, and then the
+    actual data (if any).  The meaning of the header lines depends on
+    the command executed by the server; in most cases, when data is
+    returned, there should be at least one header line of the form
+
+    Content-type: <type>/<subtype>
+
+    where <type> and <subtype> should be registered MIME types,
+    e.g. "text/html" or "text/plain".
+
+    """
+
+    # The Python system version, truncated to its first component.
+    sys_version = "Python/" + sys.version.split()[0]
+
+    # The server software version.  You may want to override this.
+    # The format is multiple whitespace-separated strings,
+    # where each string is of the form name[/version].
+    server_version = "BaseHTTP/" + __version__
+
+    def parse_request(self):
+        """Parse a request (internal).
+
+        The request should be stored in self.raw_requestline; the results
+        are in self.command, self.path, self.request_version and
+        self.headers.
+
+        Return True for success, False for failure; on failure, an
+        error is sent back.
+
+        """
+        self.command = None  # set in case of error on the first line
+        self.request_version = version = "HTTP/0.9" # Default
+        self.close_connection = 1
+        requestline = self.raw_requestline
+        if requestline[-2:] == '\r\n':
+            requestline = requestline[:-2]
+        elif requestline[-1:] == '\n':
+            requestline = requestline[:-1]
+        self.requestline = requestline
+        words = requestline.split()
+        if len(words) == 3:
+            [command, path, version] = words
+            if version[:5] != 'HTTP/':
+                self.send_error(400, "Bad request version (%r)" % version)
+                return False
+            try:
+                base_version_number = version.split('/', 1)[1]
+                version_number = base_version_number.split(".")
+                # RFC 2145 section 3.1 says there can be only one "." and
+                #   - major and minor numbers MUST be treated as
+                #      separate integers;
+                #   - HTTP/2.4 is a lower version than HTTP/2.13, which in
+                #      turn is lower than HTTP/12.3;
+                #   - Leading zeros MUST be ignored by recipients.
+                if len(version_number) != 2:
+                    raise ValueError
+                version_number = int(version_number[0]), int(version_number[1])
+            except (ValueError, IndexError):
+                self.send_error(400, "Bad request version (%r)" % version)
+                return False
+            if version_number >= (1, 1) and self.protocol_version >= "HTTP/1.1":
+                self.close_connection = 0
+            if version_number >= (2, 0):
+                self.send_error(505,
+                          "Invalid HTTP Version (%s)" % base_version_number)
+                return False
+        elif len(words) == 2:
+            [command, path] = words
+            self.close_connection = 1
+            if command != 'GET':
+                self.send_error(400,
+                                "Bad HTTP/0.9 request type (%r)" % command)
+                return False
+        elif not words:
+            return False
+        else:
+            self.send_error(400, "Bad request syntax (%r)" % requestline)
+            return False
+        self.command, self.path, self.request_version = command, path, version
+
+        # Examine the headers and look for a Connection directive
+        self.headers = self.MessageClass(self.rfile, 0)
+
+        conntype = self.headers.get('Connection', "")
+        if conntype.lower() == 'close':
+            self.close_connection = 1
+        elif (conntype.lower() == 'keep-alive' and
+              self.protocol_version >= "HTTP/1.1"):
+            self.close_connection = 0
+        return True
+
+    def handle_one_request(self):
+        """Handle a single HTTP request.
+
+        You normally don't need to override this method; see the class
+        __doc__ string for information on how to handle specific HTTP
+        commands such as GET and POST.
+
+        """
+        self.raw_requestline = self.rfile.readline()
+        if not self.raw_requestline:
+            self.close_connection = 1
+            return
+        if not self.parse_request(): # An error code has been sent, just exit
+            return
+        mname = 'do_' + self.command
+        if not hasattr(self, mname):
+            self.send_error(501, "Unsupported method (%r)" % self.command)
+            return
+        method = getattr(self, mname)
+        method()
+
+    def handle(self):
+        """Handle multiple requests if necessary."""
+        self.close_connection = 1
+
+        self.handle_one_request()
+        while not self.close_connection:
+            self.handle_one_request()
+
+    def send_error(self, code, message=None):
+        """Send and log an error reply.
+
+        Arguments are the error code, and a detailed message.
+        The detailed message defaults to the short entry matching the
+        response code.
+
+        This sends an error response (so it must be called before any
+        output has been generated), logs the error, and finally sends
+        a piece of HTML explaining the error to the user.
+
+        """
+
+        try:
+            short, long = self.responses[code]
+        except KeyError:
+            short, long = '???', '???'
+        if message is None:
+            message = short
+        explain = long
+        self.log_error("code %d, message %s", code, message)
+        content = (self.error_message_format %
+                   {'code': code, 'message': message, 'explain': explain})
+        self.send_response(code, message)
+        self.send_header("Content-Type", "text/html")
+        self.send_header('Connection', 'close')
+        self.end_headers()
+        if self.command != 'HEAD' and code >= 200 and code not in (204, 304):
+            self.wfile.write(content)
+
+    error_message_format = DEFAULT_ERROR_MESSAGE
+
+    def send_response(self, code, message=None):
+        """Send the response header and log the response code.
+
+        Also send two standard headers with the server software
+        version and the current date.
+
+        """
+        self.log_request(code)
+        if message is None:
+            if code in self.responses:
+                message = self.responses[code][0]
+            else:
+                message = ''
+        if self.request_version != 'HTTP/0.9':
+            self.wfile.write("%s %d %s\r\n" %
+                             (self.protocol_version, code, message))
+            # print (self.protocol_version, code, message)
+        self.send_header('Server', self.version_string())
+        self.send_header('Date', self.date_time_string())
+
+    def send_header(self, keyword, value):
+        """Send a MIME header."""
+        if self.request_version != 'HTTP/0.9':
+            self.wfile.write("%s: %s\r\n" % (keyword, value))
+
+        if keyword.lower() == 'connection':
+            if value.lower() == 'close':
+                self.close_connection = 1
+            elif value.lower() == 'keep-alive':
+                self.close_connection = 0
+
+    def end_headers(self):
+        """Send the blank line ending the MIME headers."""
+        if self.request_version != 'HTTP/0.9':
+            self.wfile.write("\r\n")
+
+    def log_request(self, code='-', size='-'):
+        """Log an accepted request.
+
+        This is called by send_reponse().
+
+        """
+
+        self.log_message('"%s" %s %s',
+                         self.requestline, str(code), str(size))
+
+    def log_error(self, *args):
+        """Log an error.
+
+        This is called when a request cannot be fulfilled.  By
+        default it passes the message on to log_message().
+
+        Arguments are the same as for log_message().
+
+        XXX This should go to the separate error log.
+
+        """
+
+        self.log_message(*args)
+
+    def log_message(self, format, *args):
+        """Log an arbitrary message.
+
+        This is used by all other logging functions.  Override
+        it if you have specific logging wishes.
+
+        The first argument, FORMAT, is a format string for the
+        message to be logged.  If the format string contains
+        any % escapes requiring parameters, they should be
+        specified as subsequent arguments (it's just like
+        printf!).
+
+        The client host and current date/time are prefixed to
+        every message.
+
+        """
+
+        sys.stderr.write("%s - - [%s] %s\n" %
+                         (self.address_string(),
+                          self.log_date_time_string(),
+                          format%args))
+
+    def version_string(self):
+        """Return the server software version string."""
+        return self.server_version + ' ' + self.sys_version
+
+    def date_time_string(self):
+        """Return the current date and time formatted for a message header."""
+        now = time.time()
+        year, month, day, hh, mm, ss, wd, y, z = time.gmtime(now)
+        s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
+                self.weekdayname[wd],
+                day, self.monthname[month], year,
+                hh, mm, ss)
+        return s
+
+    def log_date_time_string(self):
+        """Return the current time formatted for logging."""
+        now = time.time()
+        year, month, day, hh, mm, ss, x, y, z = time.localtime(now)
+        s = "%02d/%3s/%04d %02d:%02d:%02d" % (
+                day, self.monthname[month], year, hh, mm, ss)
+        return s
+
+    weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+
+    monthname = [None,
+                 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+                 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+
+    def address_string(self):
+        """Return the client address formatted for logging.
+
+        This version looks up the full hostname using gethostbyaddr(),
+        and tries to find a name that contains at least one dot.
+
+        """
+
+        host, port = self.client_address[:2]
+        return socket.getfqdn(host)
+
+    # Essentially static class variables
+
+    # The version of the HTTP protocol we support.
+    # Set this to HTTP/1.1 to enable automatic keepalive
+    protocol_version = "HTTP/1.0"
+
+    # The Message-like class used to parse headers
+    MessageClass = mimetools.Message
+
+    # Table mapping response codes to messages; entries have the
+    # form {code: (shortmessage, longmessage)}.
+    # See http://www.w3.org/hypertext/WWW/Protocols/HTTP/HTRESP.html
+    responses = {
+        100: ('Continue', 'Request received, please continue'),
+        101: ('Switching Protocols',
+              'Switching to new protocol; obey Upgrade header'),
+
+        200: ('OK', 'Request fulfilled, document follows'),
+        201: ('Created', 'Document created, URL follows'),
+        202: ('Accepted',
+              'Request accepted, processing continues off-line'),
+        203: ('Non-Authoritative Information', 'Request fulfilled from cache'),
+        204: ('No response', 'Request fulfilled, nothing follows'),
+        205: ('Reset Content', 'Clear input form for further input.'),
+        206: ('Partial Content', 'Partial content follows.'),
+
+        300: ('Multiple Choices',
+              'Object has several resources -- see URI list'),
+        301: ('Moved Permanently', 'Object moved permanently -- see URI list'),
+        302: ('Found', 'Object moved temporarily -- see URI list'),
+        303: ('See Other', 'Object moved -- see Method and URL list'),
+        304: ('Not modified',
+              'Document has not changed since given time'),
+        305: ('Use Proxy',
+              'You must use proxy specified in Location to access this '
+              'resource.'),
+        307: ('Temporary Redirect',
+              'Object moved temporarily -- see URI list'),
+
+        400: ('Bad request',
+              'Bad request syntax or unsupported method'),
+        401: ('Unauthorized',
+              'No permission -- see authorization schemes'),
+        402: ('Payment required',
+              'No payment -- see charging schemes'),
+        403: ('Forbidden',
+              'Request forbidden -- authorization will not help'),
+        404: ('Not Found', 'Nothing matches the given URI'),
+        405: ('Method Not Allowed',
+              'Specified method is invalid for this server.'),
+        406: ('Not Acceptable', 'URI not available in preferred format.'),
+        407: ('Proxy Authentication Required', 'You must authenticate with '
+              'this proxy before proceeding.'),
+        408: ('Request Time-out', 'Request timed out; try again later.'),
+        409: ('Conflict', 'Request conflict.'),
+        410: ('Gone',
+              'URI no longer exists and has been permanently removed.'),
+        411: ('Length Required', 'Client must specify Content-Length.'),
+        412: ('Precondition Failed', 'Precondition in headers is false.'),
+        413: ('Request Entity Too Large', 'Entity is too large.'),
+        414: ('Request-URI Too Long', 'URI is too long.'),
+        415: ('Unsupported Media Type', 'Entity body in unsupported format.'),
+        416: ('Requested Range Not Satisfiable',
+              'Cannot satisfy request range.'),
+        417: ('Expectation Failed',
+              'Expect condition could not be satisfied.'),
+
+        500: ('Internal error', 'Server got itself in trouble'),
+        501: ('Not Implemented',
+              'Server does not support this operation'),
+        502: ('Bad Gateway', 'Invalid responses from another server/proxy.'),
+        503: ('Service temporarily overloaded',
+              'The server cannot process the request due to a high load'),
+        504: ('Gateway timeout',
+              'The gateway server did not receive a timely response'),
+        505: ('HTTP Version not supported', 'Cannot fulfill request.'),
+        }
+
+
+def test(HandlerClass = BaseHTTPRequestHandler,
+         ServerClass = HTTPServer, protocol="HTTP/1.0"):
+    """Test the HTTP request handler class.
+
+    This runs an HTTP server on port 8000 (or the first command line
+    argument).
+
+    """
+
+    if sys.argv[1:]:
+        port = int(sys.argv[1])
+    else:
+        port = 8000
+    server_address = ('', port)
+
+    HandlerClass.protocol_version = protocol
+    httpd = ServerClass(server_address, HandlerClass)
+
+    sa = httpd.socket.getsockname()
+    print "Serving HTTP on", sa[0], "port", sa[1], "..."
+    httpd.serve_forever()
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/Bastion.py b/depot_tools/release/win/python_24/Lib/Bastion.py
new file mode 100644
index 0000000..58cce97
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/Bastion.py
@@ -0,0 +1,177 @@
+"""Bastionification utility.
+
+A bastion (for another object -- the 'original') is an object that has
+the same methods as the original but does not give access to its
+instance variables.  Bastions have a number of uses, but the most
+obvious one is to provide code executing in restricted mode with a
+safe interface to an object implemented in unrestricted mode.
+
+The bastionification routine has an optional second argument which is
+a filter function.  Only those methods for which the filter method
+(called with the method name as argument) returns true are accessible.
+The default filter method returns true unless the method name begins
+with an underscore.
+
+There are a number of possible implementations of bastions.  We use a
+'lazy' approach where the bastion's __getattr__() discipline does all
+the work for a particular method the first time it is used.  This is
+usually fastest, especially if the user doesn't call all available
+methods.  The retrieved methods are stored as instance variables of
+the bastion, so the overhead is only occurred on the first use of each
+method.
+
+Detail: the bastion class has a __repr__() discipline which includes
+the repr() of the original object.  This is precomputed when the
+bastion is created.
+
+"""
+
+__all__ = ["BastionClass", "Bastion"]
+
+from types import MethodType
+
+
+class BastionClass:
+
+    """Helper class used by the Bastion() function.
+
+    You could subclass this and pass the subclass as the bastionclass
+    argument to the Bastion() function, as long as the constructor has
+    the same signature (a get() function and a name for the object).
+
+    """
+
+    def __init__(self, get, name):
+        """Constructor.
+
+        Arguments:
+
+        get - a function that gets the attribute value (by name)
+        name - a human-readable name for the original object
+               (suggestion: use repr(object))
+
+        """
+        self._get_ = get
+        self._name_ = name
+
+    def __repr__(self):
+        """Return a representation string.
+
+        This includes the name passed in to the constructor, so that
+        if you print the bastion during debugging, at least you have
+        some idea of what it is.
+
+        """
+        return "<Bastion for %s>" % self._name_
+
+    def __getattr__(self, name):
+        """Get an as-yet undefined attribute value.
+
+        This calls the get() function that was passed to the
+        constructor.  The result is stored as an instance variable so
+        that the next time the same attribute is requested,
+        __getattr__() won't be invoked.
+
+        If the get() function raises an exception, this is simply
+        passed on -- exceptions are not cached.
+
+        """
+        attribute = self._get_(name)
+        self.__dict__[name] = attribute
+        return attribute
+
+
+def Bastion(object, filter = lambda name: name[:1] != '_',
+            name=None, bastionclass=BastionClass):
+    """Create a bastion for an object, using an optional filter.
+
+    See the Bastion module's documentation for background.
+
+    Arguments:
+
+    object - the original object
+    filter - a predicate that decides whether a function name is OK;
+             by default all names are OK that don't start with '_'
+    name - the name of the object; default repr(object)
+    bastionclass - class used to create the bastion; default BastionClass
+
+    """
+
+    raise RuntimeError, "This code is not secure in Python 2.2 and 2.3"
+
+    # Note: we define *two* ad-hoc functions here, get1 and get2.
+    # Both are intended to be called in the same way: get(name).
+    # It is clear that the real work (getting the attribute
+    # from the object and calling the filter) is done in get1.
+    # Why can't we pass get1 to the bastion?  Because the user
+    # would be able to override the filter argument!  With get2,
+    # overriding the default argument is no security loophole:
+    # all it does is call it.
+    # Also notice that we can't place the object and filter as
+    # instance variables on the bastion object itself, since
+    # the user has full access to all instance variables!
+
+    def get1(name, object=object, filter=filter):
+        """Internal function for Bastion().  See source comments."""
+        if filter(name):
+            attribute = getattr(object, name)
+            if type(attribute) == MethodType:
+                return attribute
+        raise AttributeError, name
+
+    def get2(name, get1=get1):
+        """Internal function for Bastion().  See source comments."""
+        return get1(name)
+
+    if name is None:
+        name = repr(object)
+    return bastionclass(get2, name)
+
+
+def _test():
+    """Test the Bastion() function."""
+    class Original:
+        def __init__(self):
+            self.sum = 0
+        def add(self, n):
+            self._add(n)
+        def _add(self, n):
+            self.sum = self.sum + n
+        def total(self):
+            return self.sum
+    o = Original()
+    b = Bastion(o)
+    testcode = """if 1:
+    b.add(81)
+    b.add(18)
+    print "b.total() =", b.total()
+    try:
+        print "b.sum =", b.sum,
+    except:
+        print "inaccessible"
+    else:
+        print "accessible"
+    try:
+        print "b._add =", b._add,
+    except:
+        print "inaccessible"
+    else:
+        print "accessible"
+    try:
+        print "b._get_.func_defaults =", map(type, b._get_.func_defaults),
+    except:
+        print "inaccessible"
+    else:
+        print "accessible"
+    \n"""
+    exec testcode
+    print '='*20, "Using rexec:", '='*20
+    import rexec
+    r = rexec.RExec()
+    m = r.add_module('__main__')
+    m.b = b
+    r.r_exec(testcode)
+
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/CGIHTTPServer.py b/depot_tools/release/win/python_24/Lib/CGIHTTPServer.py
new file mode 100644
index 0000000..47a0e2c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/CGIHTTPServer.py
@@ -0,0 +1,341 @@
+"""CGI-savvy HTTP Server.
+
+This module builds on SimpleHTTPServer by implementing GET and POST
+requests to cgi-bin scripts.
+
+If the os.fork() function is not present (e.g. on Windows),
+os.popen2() is used as a fallback, with slightly altered semantics; if
+that function is not present either (e.g. on Macintosh), only Python
+scripts are supported, and they are executed by the current process.
+
+In all cases, the implementation is intentionally naive -- all
+requests are executed sychronously.
+
+SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
+-- it may execute arbitrary Python code or external programs.
+
+"""
+
+
+__version__ = "0.4"
+
+__all__ = ["CGIHTTPRequestHandler"]
+
+import os
+import sys
+import urllib
+import BaseHTTPServer
+import SimpleHTTPServer
+import select
+
+
+class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
+
+    """Complete HTTP server with GET, HEAD and POST commands.
+
+    GET and HEAD also support running CGI scripts.
+
+    The POST command is *only* implemented for CGI scripts.
+
+    """
+
+    # Determine platform specifics
+    have_fork = hasattr(os, 'fork')
+    have_popen2 = hasattr(os, 'popen2')
+    have_popen3 = hasattr(os, 'popen3')
+
+    # Make rfile unbuffered -- we need to read one line and then pass
+    # the rest to a subprocess, so we can't use buffered input.
+    rbufsize = 0
+
+    def do_POST(self):
+        """Serve a POST request.
+
+        This is only implemented for CGI scripts.
+
+        """
+
+        if self.is_cgi():
+            self.run_cgi()
+        else:
+            self.send_error(501, "Can only POST to CGI scripts")
+
+    def send_head(self):
+        """Version of send_head that support CGI scripts"""
+        if self.is_cgi():
+            return self.run_cgi()
+        else:
+            return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
+
+    def is_cgi(self):
+        """Test whether self.path corresponds to a CGI script.
+
+        Return a tuple (dir, rest) if self.path requires running a
+        CGI script, None if not.  Note that rest begins with a
+        slash if it is not empty.
+
+        The default implementation tests whether the path
+        begins with one of the strings in the list
+        self.cgi_directories (and the next character is a '/'
+        or the end of the string).
+
+        """
+
+        path = self.path
+
+        for x in self.cgi_directories:
+            i = len(x)
+            if path[:i] == x and (not path[i:] or path[i] == '/'):
+                self.cgi_info = path[:i], path[i+1:]
+                return True
+        return False
+
+    cgi_directories = ['/cgi-bin', '/htbin']
+
+    def is_executable(self, path):
+        """Test whether argument path is an executable file."""
+        return executable(path)
+
+    def is_python(self, path):
+        """Test whether argument path is a Python script."""
+        head, tail = os.path.splitext(path)
+        return tail.lower() in (".py", ".pyw")
+
+    def run_cgi(self):
+        """Execute a CGI script."""
+        dir, rest = self.cgi_info
+        i = rest.rfind('?')
+        if i >= 0:
+            rest, query = rest[:i], rest[i+1:]
+        else:
+            query = ''
+        i = rest.find('/')
+        if i >= 0:
+            script, rest = rest[:i], rest[i:]
+        else:
+            script, rest = rest, ''
+        scriptname = dir + '/' + script
+        scriptfile = self.translate_path(scriptname)
+        if not os.path.exists(scriptfile):
+            self.send_error(404, "No such CGI script (%r)" % scriptname)
+            return
+        if not os.path.isfile(scriptfile):
+            self.send_error(403, "CGI script is not a plain file (%r)" %
+                            scriptname)
+            return
+        ispy = self.is_python(scriptname)
+        if not ispy:
+            if not (self.have_fork or self.have_popen2 or self.have_popen3):
+                self.send_error(403, "CGI script is not a Python script (%r)" %
+                                scriptname)
+                return
+            if not self.is_executable(scriptfile):
+                self.send_error(403, "CGI script is not executable (%r)" %
+                                scriptname)
+                return
+
+        # Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
+        # XXX Much of the following could be prepared ahead of time!
+        env = {}
+        env['SERVER_SOFTWARE'] = self.version_string()
+        env['SERVER_NAME'] = self.server.server_name
+        env['GATEWAY_INTERFACE'] = 'CGI/1.1'
+        env['SERVER_PROTOCOL'] = self.protocol_version
+        env['SERVER_PORT'] = str(self.server.server_port)
+        env['REQUEST_METHOD'] = self.command
+        uqrest = urllib.unquote(rest)
+        env['PATH_INFO'] = uqrest
+        env['PATH_TRANSLATED'] = self.translate_path(uqrest)
+        env['SCRIPT_NAME'] = scriptname
+        if query:
+            env['QUERY_STRING'] = query
+        host = self.address_string()
+        if host != self.client_address[0]:
+            env['REMOTE_HOST'] = host
+        env['REMOTE_ADDR'] = self.client_address[0]
+        authorization = self.headers.getheader("authorization")
+        if authorization:
+            authorization = authorization.split()
+            if len(authorization) == 2:
+                import base64, binascii
+                env['AUTH_TYPE'] = authorization[0]
+                if authorization[0].lower() == "basic":
+                    try:
+                        authorization = base64.decodestring(authorization[1])
+                    except binascii.Error:
+                        pass
+                    else:
+                        authorization = authorization.split(':')
+                        if len(authorization) == 2:
+                            env['REMOTE_USER'] = authorization[0]
+        # XXX REMOTE_IDENT
+        if self.headers.typeheader is None:
+            env['CONTENT_TYPE'] = self.headers.type
+        else:
+            env['CONTENT_TYPE'] = self.headers.typeheader
+        length = self.headers.getheader('content-length')
+        if length:
+            env['CONTENT_LENGTH'] = length
+        accept = []
+        for line in self.headers.getallmatchingheaders('accept'):
+            if line[:1] in "\t\n\r ":
+                accept.append(line.strip())
+            else:
+                accept = accept + line[7:].split(',')
+        env['HTTP_ACCEPT'] = ','.join(accept)
+        ua = self.headers.getheader('user-agent')
+        if ua:
+            env['HTTP_USER_AGENT'] = ua
+        co = filter(None, self.headers.getheaders('cookie'))
+        if co:
+            env['HTTP_COOKIE'] = ', '.join(co)
+        # XXX Other HTTP_* headers
+        # Since we're setting the env in the parent, provide empty
+        # values to override previously set values
+        for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
+                  'HTTP_USER_AGENT', 'HTTP_COOKIE'):
+            env.setdefault(k, "")
+        os.environ.update(env)
+
+        self.send_response(200, "Script output follows")
+
+        decoded_query = query.replace('+', ' ')
+
+        if self.have_fork:
+            # Unix -- fork as we should
+            args = [script]
+            if '=' not in decoded_query:
+                args.append(decoded_query)
+            nobody = nobody_uid()
+            self.wfile.flush() # Always flush before forking
+            pid = os.fork()
+            if pid != 0:
+                # Parent
+                pid, sts = os.waitpid(pid, 0)
+                # throw away additional data [see bug #427345]
+                while select.select([self.rfile], [], [], 0)[0]:
+                    if not self.rfile.read(1):
+                        break
+                if sts:
+                    self.log_error("CGI script exit status %#x", sts)
+                return
+            # Child
+            try:
+                try:
+                    os.setuid(nobody)
+                except os.error:
+                    pass
+                os.dup2(self.rfile.fileno(), 0)
+                os.dup2(self.wfile.fileno(), 1)
+                os.execve(scriptfile, args, os.environ)
+            except:
+                self.server.handle_error(self.request, self.client_address)
+                os._exit(127)
+
+        elif self.have_popen2 or self.have_popen3:
+            # Windows -- use popen2 or popen3 to create a subprocess
+            import shutil
+            if self.have_popen3:
+                popenx = os.popen3
+            else:
+                popenx = os.popen2
+            cmdline = scriptfile
+            if self.is_python(scriptfile):
+                interp = sys.executable
+                if interp.lower().endswith("w.exe"):
+                    # On Windows, use python.exe, not pythonw.exe
+                    interp = interp[:-5] + interp[-4:]
+                cmdline = "%s -u %s" % (interp, cmdline)
+            if '=' not in query and '"' not in query:
+                cmdline = '%s "%s"' % (cmdline, query)
+            self.log_message("command: %s", cmdline)
+            try:
+                nbytes = int(length)
+            except (TypeError, ValueError):
+                nbytes = 0
+            files = popenx(cmdline, 'b')
+            fi = files[0]
+            fo = files[1]
+            if self.have_popen3:
+                fe = files[2]
+            if self.command.lower() == "post" and nbytes > 0:
+                data = self.rfile.read(nbytes)
+                fi.write(data)
+            # throw away additional data [see bug #427345]
+            while select.select([self.rfile._sock], [], [], 0)[0]:
+                if not self.rfile._sock.recv(1):
+                    break
+            fi.close()
+            shutil.copyfileobj(fo, self.wfile)
+            if self.have_popen3:
+                errors = fe.read()
+                fe.close()
+                if errors:
+                    self.log_error('%s', errors)
+            sts = fo.close()
+            if sts:
+                self.log_error("CGI script exit status %#x", sts)
+            else:
+                self.log_message("CGI script exited OK")
+
+        else:
+            # Other O.S. -- execute script in this process
+            save_argv = sys.argv
+            save_stdin = sys.stdin
+            save_stdout = sys.stdout
+            save_stderr = sys.stderr
+            try:
+                save_cwd = os.getcwd()
+                try:
+                    sys.argv = [scriptfile]
+                    if '=' not in decoded_query:
+                        sys.argv.append(decoded_query)
+                    sys.stdout = self.wfile
+                    sys.stdin = self.rfile
+                    execfile(scriptfile, {"__name__": "__main__"})
+                finally:
+                    sys.argv = save_argv
+                    sys.stdin = save_stdin
+                    sys.stdout = save_stdout
+                    sys.stderr = save_stderr
+                    os.chdir(save_cwd)
+            except SystemExit, sts:
+                self.log_error("CGI script exit status %s", str(sts))
+            else:
+                self.log_message("CGI script exited OK")
+
+
+nobody = None
+
+def nobody_uid():
+    """Internal routine to get nobody's uid"""
+    global nobody
+    if nobody:
+        return nobody
+    try:
+        import pwd
+    except ImportError:
+        return -1
+    try:
+        nobody = pwd.getpwnam('nobody')[2]
+    except KeyError:
+        nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
+    return nobody
+
+
+def executable(path):
+    """Test for executable file."""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return st.st_mode & 0111 != 0
+
+
+def test(HandlerClass = CGIHTTPRequestHandler,
+         ServerClass = BaseHTTPServer.HTTPServer):
+    SimpleHTTPServer.test(HandlerClass, ServerClass)
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/ConfigParser.py b/depot_tools/release/win/python_24/Lib/ConfigParser.py
new file mode 100644
index 0000000..ade96147
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/ConfigParser.py
@@ -0,0 +1,640 @@
+"""Configuration file parser.
+
+A setup file consists of sections, lead by a "[section]" header,
+and followed by "name: value" entries, with continuations and such in
+the style of RFC 822.
+
+The option values can contain format strings which refer to other values in
+the same section, or values in a special [DEFAULT] section.
+
+For example:
+
+    something: %(dir)s/whatever
+
+would resolve the "%(dir)s" to the value of dir.  All reference
+expansions are done late, on demand.
+
+Intrinsic defaults can be specified by passing them into the
+ConfigParser constructor as a dictionary.
+
+class:
+
+ConfigParser -- responsible for parsing a list of
+                configuration files, and managing the parsed database.
+
+    methods:
+
+    __init__(defaults=None)
+        create the parser and specify a dictionary of intrinsic defaults.  The
+        keys must be strings, the values must be appropriate for %()s string
+        interpolation.  Note that `__name__' is always an intrinsic default;
+        it's value is the section's name.
+
+    sections()
+        return all the configuration section names, sans DEFAULT
+
+    has_section(section)
+        return whether the given section exists
+
+    has_option(section, option)
+        return whether the given option exists in the given section
+
+    options(section)
+        return list of configuration options for the named section
+
+    read(filenames)
+        read and parse the list of named configuration files, given by
+        name.  A single filename is also allowed.  Non-existing files
+        are ignored.  Return list of successfully read files.
+
+    readfp(fp, filename=None)
+        read and parse one configuration file, given as a file object.
+        The filename defaults to fp.name; it is only used in error
+        messages (if fp has no `name' attribute, the string `<???>' is used).
+
+    get(section, option, raw=False, vars=None)
+        return a string value for the named option.  All % interpolations are
+        expanded in the return values, based on the defaults passed into the
+        constructor and the DEFAULT section.  Additional substitutions may be
+        provided using the `vars' argument, which must be a dictionary whose
+        contents override any pre-existing defaults.
+
+    getint(section, options)
+        like get(), but convert value to an integer
+
+    getfloat(section, options)
+        like get(), but convert value to a float
+
+    getboolean(section, options)
+        like get(), but convert value to a boolean (currently case
+        insensitively defined as 0, false, no, off for False, and 1, true,
+        yes, on for True).  Returns False or True.
+
+    items(section, raw=False, vars=None)
+        return a list of tuples with (name, value) for each option
+        in the section.
+
+    remove_section(section)
+        remove the given file section and all its options
+
+    remove_option(section, option)
+        remove the given option from the given section
+
+    set(section, option, value)
+        set the given option
+
+    write(fp)
+        write the configuration state in .ini format
+"""
+
+import re
+
+__all__ = ["NoSectionError", "DuplicateSectionError", "NoOptionError",
+           "InterpolationError", "InterpolationDepthError",
+           "InterpolationSyntaxError", "ParsingError",
+           "MissingSectionHeaderError",
+           "ConfigParser", "SafeConfigParser", "RawConfigParser",
+           "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"]
+
+DEFAULTSECT = "DEFAULT"
+
+MAX_INTERPOLATION_DEPTH = 10
+
+
+
+# exception classes
+class Error(Exception):
+    """Base class for ConfigParser exceptions."""
+
+    def __init__(self, msg=''):
+        self.message = msg
+        Exception.__init__(self, msg)
+
+    def __repr__(self):
+        return self.message
+
+    __str__ = __repr__
+
+class NoSectionError(Error):
+    """Raised when no section matches a requested option."""
+
+    def __init__(self, section):
+        Error.__init__(self, 'No section: %r' % (section,))
+        self.section = section
+
+class DuplicateSectionError(Error):
+    """Raised when a section is multiply-created."""
+
+    def __init__(self, section):
+        Error.__init__(self, "Section %r already exists" % section)
+        self.section = section
+
+class NoOptionError(Error):
+    """A requested option was not found."""
+
+    def __init__(self, option, section):
+        Error.__init__(self, "No option %r in section: %r" %
+                       (option, section))
+        self.option = option
+        self.section = section
+
+class InterpolationError(Error):
+    """Base class for interpolation-related exceptions."""
+
+    def __init__(self, option, section, msg):
+        Error.__init__(self, msg)
+        self.option = option
+        self.section = section
+
+class InterpolationMissingOptionError(InterpolationError):
+    """A string substitution required a setting which was not available."""
+
+    def __init__(self, option, section, rawval, reference):
+        msg = ("Bad value substitution:\n"
+               "\tsection: [%s]\n"
+               "\toption : %s\n"
+               "\tkey    : %s\n"
+               "\trawval : %s\n"
+               % (section, option, reference, rawval))
+        InterpolationError.__init__(self, option, section, msg)
+        self.reference = reference
+
+class InterpolationSyntaxError(InterpolationError):
+    """Raised when the source text into which substitutions are made
+    does not conform to the required syntax."""
+
+class InterpolationDepthError(InterpolationError):
+    """Raised when substitutions are nested too deeply."""
+
+    def __init__(self, option, section, rawval):
+        msg = ("Value interpolation too deeply recursive:\n"
+               "\tsection: [%s]\n"
+               "\toption : %s\n"
+               "\trawval : %s\n"
+               % (section, option, rawval))
+        InterpolationError.__init__(self, option, section, msg)
+
+class ParsingError(Error):
+    """Raised when a configuration file does not follow legal syntax."""
+
+    def __init__(self, filename):
+        Error.__init__(self, 'File contains parsing errors: %s' % filename)
+        self.filename = filename
+        self.errors = []
+
+    def append(self, lineno, line):
+        self.errors.append((lineno, line))
+        self.message += '\n\t[line %2d]: %s' % (lineno, line)
+
+class MissingSectionHeaderError(ParsingError):
+    """Raised when a key-value pair is found before any section header."""
+
+    def __init__(self, filename, lineno, line):
+        Error.__init__(
+            self,
+            'File contains no section headers.\nfile: %s, line: %d\n%r' %
+            (filename, lineno, line))
+        self.filename = filename
+        self.lineno = lineno
+        self.line = line
+
+
+
+class RawConfigParser:
+    def __init__(self, defaults=None):
+        self._sections = {}
+        self._defaults = {}
+        if defaults:
+            for key, value in defaults.items():
+                self._defaults[self.optionxform(key)] = value
+
+    def defaults(self):
+        return self._defaults
+
+    def sections(self):
+        """Return a list of section names, excluding [DEFAULT]"""
+        # self._sections will never have [DEFAULT] in it
+        return self._sections.keys()
+
+    def add_section(self, section):
+        """Create a new section in the configuration.
+
+        Raise DuplicateSectionError if a section by the specified name
+        already exists.
+        """
+        if section in self._sections:
+            raise DuplicateSectionError(section)
+        self._sections[section] = {}
+
+    def has_section(self, section):
+        """Indicate whether the named section is present in the configuration.
+
+        The DEFAULT section is not acknowledged.
+        """
+        return section in self._sections
+
+    def options(self, section):
+        """Return a list of option names for the given section name."""
+        try:
+            opts = self._sections[section].copy()
+        except KeyError:
+            raise NoSectionError(section)
+        opts.update(self._defaults)
+        if '__name__' in opts:
+            del opts['__name__']
+        return opts.keys()
+
+    def read(self, filenames):
+        """Read and parse a filename or a list of filenames.
+
+        Files that cannot be opened are silently ignored; this is
+        designed so that you can specify a list of potential
+        configuration file locations (e.g. current directory, user's
+        home directory, systemwide directory), and all existing
+        configuration files in the list will be read.  A single
+        filename may also be given.
+
+        Return list of successfully read files.
+        """
+        if isinstance(filenames, basestring):
+            filenames = [filenames]
+        read_ok = []
+        for filename in filenames:
+            try:
+                fp = open(filename)
+            except IOError:
+                continue
+            self._read(fp, filename)
+            fp.close()
+            read_ok.append(filename)
+        return read_ok
+
+    def readfp(self, fp, filename=None):
+        """Like read() but the argument must be a file-like object.
+
+        The `fp' argument must have a `readline' method.  Optional
+        second argument is the `filename', which if not given, is
+        taken from fp.name.  If fp has no `name' attribute, `<???>' is
+        used.
+
+        """
+        if filename is None:
+            try:
+                filename = fp.name
+            except AttributeError:
+                filename = '<???>'
+        self._read(fp, filename)
+
+    def get(self, section, option):
+        opt = self.optionxform(option)
+        if section not in self._sections:
+            if section != DEFAULTSECT:
+                raise NoSectionError(section)
+            if opt in self._defaults:
+                return self._defaults[opt]
+            else:
+                raise NoOptionError(option, section)
+        elif opt in self._sections[section]:
+            return self._sections[section][opt]
+        elif opt in self._defaults:
+            return self._defaults[opt]
+        else:
+            raise NoOptionError(option, section)
+
+    def items(self, section):
+        try:
+            d2 = self._sections[section]
+        except KeyError:
+            if section != DEFAULTSECT:
+                raise NoSectionError(section)
+            d2 = {}
+        d = self._defaults.copy()
+        d.update(d2)
+        if "__name__" in d:
+            del d["__name__"]
+        return d.items()
+
+    def _get(self, section, conv, option):
+        return conv(self.get(section, option))
+
+    def getint(self, section, option):
+        return self._get(section, int, option)
+
+    def getfloat(self, section, option):
+        return self._get(section, float, option)
+
+    _boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True,
+                       '0': False, 'no': False, 'false': False, 'off': False}
+
+    def getboolean(self, section, option):
+        v = self.get(section, option)
+        if v.lower() not in self._boolean_states:
+            raise ValueError, 'Not a boolean: %s' % v
+        return self._boolean_states[v.lower()]
+
+    def optionxform(self, optionstr):
+        return optionstr.lower()
+
+    def has_option(self, section, option):
+        """Check for the existence of a given option in a given section."""
+        if not section or section == DEFAULTSECT:
+            option = self.optionxform(option)
+            return option in self._defaults
+        elif section not in self._sections:
+            return False
+        else:
+            option = self.optionxform(option)
+            return (option in self._sections[section]
+                    or option in self._defaults)
+
+    def set(self, section, option, value):
+        """Set an option."""
+        if not section or section == DEFAULTSECT:
+            sectdict = self._defaults
+        else:
+            try:
+                sectdict = self._sections[section]
+            except KeyError:
+                raise NoSectionError(section)
+        sectdict[self.optionxform(option)] = value
+
+    def write(self, fp):
+        """Write an .ini-format representation of the configuration state."""
+        if self._defaults:
+            fp.write("[%s]\n" % DEFAULTSECT)
+            for (key, value) in self._defaults.items():
+                fp.write("%s = %s\n" % (key, str(value).replace('\n', '\n\t')))
+            fp.write("\n")
+        for section in self._sections:
+            fp.write("[%s]\n" % section)
+            for (key, value) in self._sections[section].items():
+                if key != "__name__":
+                    fp.write("%s = %s\n" %
+                             (key, str(value).replace('\n', '\n\t')))
+            fp.write("\n")
+
+    def remove_option(self, section, option):
+        """Remove an option."""
+        if not section or section == DEFAULTSECT:
+            sectdict = self._defaults
+        else:
+            try:
+                sectdict = self._sections[section]
+            except KeyError:
+                raise NoSectionError(section)
+        option = self.optionxform(option)
+        existed = option in sectdict
+        if existed:
+            del sectdict[option]
+        return existed
+
+    def remove_section(self, section):
+        """Remove a file section."""
+        existed = section in self._sections
+        if existed:
+            del self._sections[section]
+        return existed
+
+    #
+    # Regular expressions for parsing section headers and options.
+    #
+    SECTCRE = re.compile(
+        r'\['                                 # [
+        r'(?P<header>[^]]+)'                  # very permissive!
+        r'\]'                                 # ]
+        )
+    OPTCRE = re.compile(
+        r'(?P<option>[^:=\s][^:=]*)'          # very permissive!
+        r'\s*(?P<vi>[:=])\s*'                 # any number of space/tab,
+                                              # followed by separator
+                                              # (either : or =), followed
+                                              # by any # space/tab
+        r'(?P<value>.*)$'                     # everything up to eol
+        )
+
+    def _read(self, fp, fpname):
+        """Parse a sectioned setup file.
+
+        The sections in setup file contains a title line at the top,
+        indicated by a name in square brackets (`[]'), plus key/value
+        options lines, indicated by `name: value' format lines.
+        Continuations are represented by an embedded newline then
+        leading whitespace.  Blank lines, lines beginning with a '#',
+        and just about everything else are ignored.
+        """
+        cursect = None                            # None, or a dictionary
+        optname = None
+        lineno = 0
+        e = None                                  # None, or an exception
+        while True:
+            line = fp.readline()
+            if not line:
+                break
+            lineno = lineno + 1
+            # comment or blank line?
+            if line.strip() == '' or line[0] in '#;':
+                continue
+            if line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR":
+                # no leading whitespace
+                continue
+            # continuation line?
+            if line[0].isspace() and cursect is not None and optname:
+                value = line.strip()
+                if value:
+                    cursect[optname] = "%s\n%s" % (cursect[optname], value)
+            # a section header or option header?
+            else:
+                # is it a section header?
+                mo = self.SECTCRE.match(line)
+                if mo:
+                    sectname = mo.group('header')
+                    if sectname in self._sections:
+                        cursect = self._sections[sectname]
+                    elif sectname == DEFAULTSECT:
+                        cursect = self._defaults
+                    else:
+                        cursect = {'__name__': sectname}
+                        self._sections[sectname] = cursect
+                    # So sections can't start with a continuation line
+                    optname = None
+                # no section header in the file?
+                elif cursect is None:
+                    raise MissingSectionHeaderError(fpname, lineno, line)
+                # an option line?
+                else:
+                    mo = self.OPTCRE.match(line)
+                    if mo:
+                        optname, vi, optval = mo.group('option', 'vi', 'value')
+                        if vi in ('=', ':') and ';' in optval:
+                            # ';' is a comment delimiter only if it follows
+                            # a spacing character
+                            pos = optval.find(';')
+                            if pos != -1 and optval[pos-1].isspace():
+                                optval = optval[:pos]
+                        optval = optval.strip()
+                        # allow empty values
+                        if optval == '""':
+                            optval = ''
+                        optname = self.optionxform(optname.rstrip())
+                        cursect[optname] = optval
+                    else:
+                        # a non-fatal parsing error occurred.  set up the
+                        # exception but keep going. the exception will be
+                        # raised at the end of the file and will contain a
+                        # list of all bogus lines
+                        if not e:
+                            e = ParsingError(fpname)
+                        e.append(lineno, repr(line))
+        # if any parsing errors occurred, raise an exception
+        if e:
+            raise e
+
+
+class ConfigParser(RawConfigParser):
+
+    def get(self, section, option, raw=False, vars=None):
+        """Get an option value for a given section.
+
+        All % interpolations are expanded in the return values, based on the
+        defaults passed into the constructor, unless the optional argument
+        `raw' is true.  Additional substitutions may be provided using the
+        `vars' argument, which must be a dictionary whose contents overrides
+        any pre-existing defaults.
+
+        The section DEFAULT is special.
+        """
+        d = self._defaults.copy()
+        try:
+            d.update(self._sections[section])
+        except KeyError:
+            if section != DEFAULTSECT:
+                raise NoSectionError(section)
+        # Update with the entry specific variables
+        if vars:
+            for key, value in vars.items():
+                d[self.optionxform(key)] = value
+        option = self.optionxform(option)
+        try:
+            value = d[option]
+        except KeyError:
+            raise NoOptionError(option, section)
+
+        if raw:
+            return value
+        else:
+            return self._interpolate(section, option, value, d)
+
+    def items(self, section, raw=False, vars=None):
+        """Return a list of tuples with (name, value) for each option
+        in the section.
+
+        All % interpolations are expanded in the return values, based on the
+        defaults passed into the constructor, unless the optional argument
+        `raw' is true.  Additional substitutions may be provided using the
+        `vars' argument, which must be a dictionary whose contents overrides
+        any pre-existing defaults.
+
+        The section DEFAULT is special.
+        """
+        d = self._defaults.copy()
+        try:
+            d.update(self._sections[section])
+        except KeyError:
+            if section != DEFAULTSECT:
+                raise NoSectionError(section)
+        # Update with the entry specific variables
+        if vars:
+            for key, value in vars.items():
+                d[self.optionxform(key)] = value
+        options = d.keys()
+        if "__name__" in options:
+            options.remove("__name__")
+        if raw:
+            return [(option, d[option])
+                    for option in options]
+        else:
+            return [(option, self._interpolate(section, option, d[option], d))
+                    for option in options]
+
+    def _interpolate(self, section, option, rawval, vars):
+        # do the string interpolation
+        value = rawval
+        depth = MAX_INTERPOLATION_DEPTH
+        while depth:                    # Loop through this until it's done
+            depth -= 1
+            if "%(" in value:
+                value = self._KEYCRE.sub(self._interpolation_replace, value)
+                try:
+                    value = value % vars
+                except KeyError, e:
+                    raise InterpolationMissingOptionError(
+                        option, section, rawval, e[0])
+            else:
+                break
+        if "%(" in value:
+            raise InterpolationDepthError(option, section, rawval)
+        return value
+
+    _KEYCRE = re.compile(r"%\(([^)]*)\)s|.")
+
+    def _interpolation_replace(self, match):
+        s = match.group(1)
+        if s is None:
+            return match.group()
+        else:
+            return "%%(%s)s" % self.optionxform(s)
+
+
+class SafeConfigParser(ConfigParser):
+
+    def _interpolate(self, section, option, rawval, vars):
+        # do the string interpolation
+        L = []
+        self._interpolate_some(option, L, rawval, section, vars, 1)
+        return ''.join(L)
+
+    _interpvar_match = re.compile(r"%\(([^)]+)\)s").match
+
+    def _interpolate_some(self, option, accum, rest, section, map, depth):
+        if depth > MAX_INTERPOLATION_DEPTH:
+            raise InterpolationDepthError(option, section, rest)
+        while rest:
+            p = rest.find("%")
+            if p < 0:
+                accum.append(rest)
+                return
+            if p > 0:
+                accum.append(rest[:p])
+                rest = rest[p:]
+            # p is no longer used
+            c = rest[1:2]
+            if c == "%":
+                accum.append("%")
+                rest = rest[2:]
+            elif c == "(":
+                m = self._interpvar_match(rest)
+                if m is None:
+                    raise InterpolationSyntaxError(option, section,
+                        "bad interpolation variable reference %r" % rest)
+                var = self.optionxform(m.group(1))
+                rest = rest[m.end():]
+                try:
+                    v = map[var]
+                except KeyError:
+                    raise InterpolationMissingOptionError(
+                        option, section, rest, var)
+                if "%" in v:
+                    self._interpolate_some(option, accum, v,
+                                           section, map, depth + 1)
+                else:
+                    accum.append(v)
+            else:
+                raise InterpolationSyntaxError(
+                    option, section,
+                    "'%%' must be followed by '%%' or '(', found: %r" % (rest,))
+
+    def set(self, section, option, value):
+        """Set an option.  Extend ConfigParser.set: check for string values."""
+        if not isinstance(value, basestring):
+            raise TypeError("option values must be strings")
+        ConfigParser.set(self, section, option, value)
diff --git a/depot_tools/release/win/python_24/Lib/Cookie.py b/depot_tools/release/win/python_24/Lib/Cookie.py
new file mode 100644
index 0000000..a6ca6545
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/Cookie.py
@@ -0,0 +1,748 @@
+#!/usr/bin/env python
+#
+
+####
+# Copyright 2000 by Timothy O'Malley <timo@alum.mit.edu>
+#
+#                All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software
+# and its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of
+# Timothy O'Malley  not be used in advertising or publicity
+# pertaining to distribution of the software without specific, written
+# prior permission.
+#
+# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
+# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
+# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+#
+####
+#
+# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp
+#   by Timothy O'Malley <timo@alum.mit.edu>
+#
+#  Cookie.py is a Python module for the handling of HTTP
+#  cookies as a Python dictionary.  See RFC 2109 for more
+#  information on cookies.
+#
+#  The original idea to treat Cookies as a dictionary came from
+#  Dave Mitchell (davem@magnet.com) in 1995, when he released the
+#  first version of nscookie.py.
+#
+####
+
+r"""
+Here's a sample session to show how to use this module.
+At the moment, this is the only documentation.
+
+The Basics
+----------
+
+Importing is easy..
+
+   >>> import Cookie
+
+Most of the time you start by creating a cookie.  Cookies come in
+three flavors, each with slightly different encoding semantics, but
+more on that later.
+
+   >>> C = Cookie.SimpleCookie()
+   >>> C = Cookie.SerialCookie()
+   >>> C = Cookie.SmartCookie()
+
+[Note: Long-time users of Cookie.py will remember using
+Cookie.Cookie() to create an Cookie object.  Although deprecated, it
+is still supported by the code.  See the Backward Compatibility notes
+for more information.]
+
+Once you've created your Cookie, you can add values just as if it were
+a dictionary.
+
+   >>> C = Cookie.SmartCookie()
+   >>> C["fig"] = "newton"
+   >>> C["sugar"] = "wafer"
+   >>> print C
+   Set-Cookie: fig=newton;
+   Set-Cookie: sugar=wafer;
+
+Notice that the printable representation of a Cookie is the
+appropriate format for a Set-Cookie: header.  This is the
+default behavior.  You can change the header and printed
+attributes by using the .output() function
+
+   >>> C = Cookie.SmartCookie()
+   >>> C["rocky"] = "road"
+   >>> C["rocky"]["path"] = "/cookie"
+   >>> print C.output(header="Cookie:")
+   Cookie: rocky=road; Path=/cookie;
+   >>> print C.output(attrs=[], header="Cookie:")
+   Cookie: rocky=road;
+
+The load() method of a Cookie extracts cookies from a string.  In a
+CGI script, you would use this method to extract the cookies from the
+HTTP_COOKIE environment variable.
+
+   >>> C = Cookie.SmartCookie()
+   >>> C.load("chips=ahoy; vienna=finger")
+   >>> print C
+   Set-Cookie: chips=ahoy;
+   Set-Cookie: vienna=finger;
+
+The load() method is darn-tootin smart about identifying cookies
+within a string.  Escaped quotation marks, nested semicolons, and other
+such trickeries do not confuse it.
+
+   >>> C = Cookie.SmartCookie()
+   >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";')
+   >>> print C
+   Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;";
+
+Each element of the Cookie also supports all of the RFC 2109
+Cookie attributes.  Here's an example which sets the Path
+attribute.
+
+   >>> C = Cookie.SmartCookie()
+   >>> C["oreo"] = "doublestuff"
+   >>> C["oreo"]["path"] = "/"
+   >>> print C
+   Set-Cookie: oreo=doublestuff; Path=/;
+
+Each dictionary element has a 'value' attribute, which gives you
+back the value associated with the key.
+
+   >>> C = Cookie.SmartCookie()
+   >>> C["twix"] = "none for you"
+   >>> C["twix"].value
+   'none for you'
+
+
+A Bit More Advanced
+-------------------
+
+As mentioned before, there are three different flavors of Cookie
+objects, each with different encoding/decoding semantics.  This
+section briefly discusses the differences.
+
+SimpleCookie
+
+The SimpleCookie expects that all values should be standard strings.
+Just to be sure, SimpleCookie invokes the str() builtin to convert
+the value to a string, when the values are set dictionary-style.
+
+   >>> C = Cookie.SimpleCookie()
+   >>> C["number"] = 7
+   >>> C["string"] = "seven"
+   >>> C["number"].value
+   '7'
+   >>> C["string"].value
+   'seven'
+   >>> print C
+   Set-Cookie: number=7;
+   Set-Cookie: string=seven;
+
+
+SerialCookie
+
+The SerialCookie expects that all values should be serialized using
+cPickle (or pickle, if cPickle isn't available).  As a result of
+serializing, SerialCookie can save almost any Python object to a
+value, and recover the exact same object when the cookie has been
+returned.  (SerialCookie can yield some strange-looking cookie
+values, however.)
+
+   >>> C = Cookie.SerialCookie()
+   >>> C["number"] = 7
+   >>> C["string"] = "seven"
+   >>> C["number"].value
+   7
+   >>> C["string"].value
+   'seven'
+   >>> print C
+   Set-Cookie: number="I7\012.";
+   Set-Cookie: string="S'seven'\012p1\012.";
+
+Be warned, however, if SerialCookie cannot de-serialize a value (because
+it isn't a valid pickle'd object), IT WILL RAISE AN EXCEPTION.
+
+
+SmartCookie
+
+The SmartCookie combines aspects of each of the other two flavors.
+When setting a value in a dictionary-fashion, the SmartCookie will
+serialize (ala cPickle) the value *if and only if* it isn't a
+Python string.  String objects are *not* serialized.  Similarly,
+when the load() method parses out values, it attempts to de-serialize
+the value.  If it fails, then it fallsback to treating the value
+as a string.
+
+   >>> C = Cookie.SmartCookie()
+   >>> C["number"] = 7
+   >>> C["string"] = "seven"
+   >>> C["number"].value
+   7
+   >>> C["string"].value
+   'seven'
+   >>> print C
+   Set-Cookie: number="I7\012.";
+   Set-Cookie: string=seven;
+
+
+Backwards Compatibility
+-----------------------
+
+In order to keep compatibilty with earlier versions of Cookie.py,
+it is still possible to use Cookie.Cookie() to create a Cookie.  In
+fact, this simply returns a SmartCookie.
+
+   >>> C = Cookie.Cookie()
+   >>> print C.__class__.__name__
+   SmartCookie
+
+
+Finis.
+"""  #"
+#     ^
+#     |----helps out font-lock
+
+#
+# Import our required modules
+#
+import string
+
+try:
+    from cPickle import dumps, loads
+except ImportError:
+    from pickle import dumps, loads
+
+import re, warnings
+
+__all__ = ["CookieError","BaseCookie","SimpleCookie","SerialCookie",
+           "SmartCookie","Cookie"]
+
+_nulljoin = ''.join
+_spacejoin = ' '.join
+
+#
+# Define an exception visible to External modules
+#
+class CookieError(Exception):
+    pass
+
+
+# These quoting routines conform to the RFC2109 specification, which in
+# turn references the character definitions from RFC2068.  They provide
+# a two-way quoting algorithm.  Any non-text character is translated
+# into a 4 character sequence: a forward-slash followed by the
+# three-digit octal equivalent of the character.  Any '\' or '"' is
+# quoted with a preceeding '\' slash.
+#
+# These are taken from RFC2068 and RFC2109.
+#       _LegalChars       is the list of chars which don't require "'s
+#       _Translator       hash-table for fast quoting
+#
+_LegalChars       = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~"
+_Translator       = {
+    '\000' : '\\000',  '\001' : '\\001',  '\002' : '\\002',
+    '\003' : '\\003',  '\004' : '\\004',  '\005' : '\\005',
+    '\006' : '\\006',  '\007' : '\\007',  '\010' : '\\010',
+    '\011' : '\\011',  '\012' : '\\012',  '\013' : '\\013',
+    '\014' : '\\014',  '\015' : '\\015',  '\016' : '\\016',
+    '\017' : '\\017',  '\020' : '\\020',  '\021' : '\\021',
+    '\022' : '\\022',  '\023' : '\\023',  '\024' : '\\024',
+    '\025' : '\\025',  '\026' : '\\026',  '\027' : '\\027',
+    '\030' : '\\030',  '\031' : '\\031',  '\032' : '\\032',
+    '\033' : '\\033',  '\034' : '\\034',  '\035' : '\\035',
+    '\036' : '\\036',  '\037' : '\\037',
+
+    '"' : '\\"',       '\\' : '\\\\',
+
+    '\177' : '\\177',  '\200' : '\\200',  '\201' : '\\201',
+    '\202' : '\\202',  '\203' : '\\203',  '\204' : '\\204',
+    '\205' : '\\205',  '\206' : '\\206',  '\207' : '\\207',
+    '\210' : '\\210',  '\211' : '\\211',  '\212' : '\\212',
+    '\213' : '\\213',  '\214' : '\\214',  '\215' : '\\215',
+    '\216' : '\\216',  '\217' : '\\217',  '\220' : '\\220',
+    '\221' : '\\221',  '\222' : '\\222',  '\223' : '\\223',
+    '\224' : '\\224',  '\225' : '\\225',  '\226' : '\\226',
+    '\227' : '\\227',  '\230' : '\\230',  '\231' : '\\231',
+    '\232' : '\\232',  '\233' : '\\233',  '\234' : '\\234',
+    '\235' : '\\235',  '\236' : '\\236',  '\237' : '\\237',
+    '\240' : '\\240',  '\241' : '\\241',  '\242' : '\\242',
+    '\243' : '\\243',  '\244' : '\\244',  '\245' : '\\245',
+    '\246' : '\\246',  '\247' : '\\247',  '\250' : '\\250',
+    '\251' : '\\251',  '\252' : '\\252',  '\253' : '\\253',
+    '\254' : '\\254',  '\255' : '\\255',  '\256' : '\\256',
+    '\257' : '\\257',  '\260' : '\\260',  '\261' : '\\261',
+    '\262' : '\\262',  '\263' : '\\263',  '\264' : '\\264',
+    '\265' : '\\265',  '\266' : '\\266',  '\267' : '\\267',
+    '\270' : '\\270',  '\271' : '\\271',  '\272' : '\\272',
+    '\273' : '\\273',  '\274' : '\\274',  '\275' : '\\275',
+    '\276' : '\\276',  '\277' : '\\277',  '\300' : '\\300',
+    '\301' : '\\301',  '\302' : '\\302',  '\303' : '\\303',
+    '\304' : '\\304',  '\305' : '\\305',  '\306' : '\\306',
+    '\307' : '\\307',  '\310' : '\\310',  '\311' : '\\311',
+    '\312' : '\\312',  '\313' : '\\313',  '\314' : '\\314',
+    '\315' : '\\315',  '\316' : '\\316',  '\317' : '\\317',
+    '\320' : '\\320',  '\321' : '\\321',  '\322' : '\\322',
+    '\323' : '\\323',  '\324' : '\\324',  '\325' : '\\325',
+    '\326' : '\\326',  '\327' : '\\327',  '\330' : '\\330',
+    '\331' : '\\331',  '\332' : '\\332',  '\333' : '\\333',
+    '\334' : '\\334',  '\335' : '\\335',  '\336' : '\\336',
+    '\337' : '\\337',  '\340' : '\\340',  '\341' : '\\341',
+    '\342' : '\\342',  '\343' : '\\343',  '\344' : '\\344',
+    '\345' : '\\345',  '\346' : '\\346',  '\347' : '\\347',
+    '\350' : '\\350',  '\351' : '\\351',  '\352' : '\\352',
+    '\353' : '\\353',  '\354' : '\\354',  '\355' : '\\355',
+    '\356' : '\\356',  '\357' : '\\357',  '\360' : '\\360',
+    '\361' : '\\361',  '\362' : '\\362',  '\363' : '\\363',
+    '\364' : '\\364',  '\365' : '\\365',  '\366' : '\\366',
+    '\367' : '\\367',  '\370' : '\\370',  '\371' : '\\371',
+    '\372' : '\\372',  '\373' : '\\373',  '\374' : '\\374',
+    '\375' : '\\375',  '\376' : '\\376',  '\377' : '\\377'
+    }
+
+def _quote(str, LegalChars=_LegalChars,
+           idmap=string._idmap, translate=string.translate):
+    #
+    # If the string does not need to be double-quoted,
+    # then just return the string.  Otherwise, surround
+    # the string in doublequotes and precede quote (with a \)
+    # special characters.
+    #
+    if "" == translate(str, idmap, LegalChars):
+        return str
+    else:
+        return '"' + _nulljoin( map(_Translator.get, str, str) ) + '"'
+# end _quote
+
+
+_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
+_QuotePatt = re.compile(r"[\\].")
+
+def _unquote(str):
+    # If there aren't any doublequotes,
+    # then there can't be any special characters.  See RFC 2109.
+    if  len(str) < 2:
+        return str
+    if str[0] != '"' or str[-1] != '"':
+        return str
+
+    # We have to assume that we must decode this string.
+    # Down to work.
+
+    # Remove the "s
+    str = str[1:-1]
+
+    # Check for special sequences.  Examples:
+    #    \012 --> \n
+    #    \"   --> "
+    #
+    i = 0
+    n = len(str)
+    res = []
+    while 0 <= i < n:
+        Omatch = _OctalPatt.search(str, i)
+        Qmatch = _QuotePatt.search(str, i)
+        if not Omatch and not Qmatch:              # Neither matched
+            res.append(str[i:])
+            break
+        # else:
+        j = k = -1
+        if Omatch: j = Omatch.start(0)
+        if Qmatch: k = Qmatch.start(0)
+        if Qmatch and ( not Omatch or k < j ):     # QuotePatt matched
+            res.append(str[i:k])
+            res.append(str[k+1])
+            i = k+2
+        else:                                      # OctalPatt matched
+            res.append(str[i:j])
+            res.append( chr( int(str[j+1:j+4], 8) ) )
+            i = j+4
+    return _nulljoin(res)
+# end _unquote
+
+# The _getdate() routine is used to set the expiration time in
+# the cookie's HTTP header.      By default, _getdate() returns the
+# current time in the appropriate "expires" format for a
+# Set-Cookie header.     The one optional argument is an offset from
+# now, in seconds.      For example, an offset of -3600 means "one hour ago".
+# The offset may be a floating point number.
+#
+
+_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+
+_monthname = [None,
+              'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+              'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+
+def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname):
+    from time import gmtime, time
+    now = time()
+    year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
+    return "%s, %02d-%3s-%4d %02d:%02d:%02d GMT" % \
+           (weekdayname[wd], day, monthname[month], year, hh, mm, ss)
+
+
+#
+# A class to hold ONE key,value pair.
+# In a cookie, each such pair may have several attributes.
+#       so this class is used to keep the attributes associated
+#       with the appropriate key,value pair.
+# This class also includes a coded_value attribute, which
+#       is used to hold the network representation of the
+#       value.  This is most useful when Python objects are
+#       pickled for network transit.
+#
+
+class Morsel(dict):
+    # RFC 2109 lists these attributes as reserved:
+    #   path       comment         domain
+    #   max-age    secure      version
+    #
+    # For historical reasons, these attributes are also reserved:
+    #   expires
+    #
+    # This dictionary provides a mapping from the lowercase
+    # variant on the left to the appropriate traditional
+    # formatting on the right.
+    _reserved = { "expires" : "expires",
+                   "path"        : "Path",
+                   "comment" : "Comment",
+                   "domain"      : "Domain",
+                   "max-age" : "Max-Age",
+                   "secure"      : "secure",
+                   "version" : "Version",
+                   }
+
+    def __init__(self):
+        # Set defaults
+        self.key = self.value = self.coded_value = None
+
+        # Set default attributes
+        for K in self._reserved:
+            dict.__setitem__(self, K, "")
+    # end __init__
+
+    def __setitem__(self, K, V):
+        K = K.lower()
+        if not K in self._reserved:
+            raise CookieError("Invalid Attribute %s" % K)
+        dict.__setitem__(self, K, V)
+    # end __setitem__
+
+    def isReservedKey(self, K):
+        return K.lower() in self._reserved
+    # end isReservedKey
+
+    def set(self, key, val, coded_val,
+            LegalChars=_LegalChars,
+            idmap=string._idmap, translate=string.translate ):
+        # First we verify that the key isn't a reserved word
+        # Second we make sure it only contains legal characters
+        if key.lower() in self._reserved:
+            raise CookieError("Attempt to set a reserved key: %s" % key)
+        if "" != translate(key, idmap, LegalChars):
+            raise CookieError("Illegal key value: %s" % key)
+
+        # It's a good key, so save it.
+        self.key                 = key
+        self.value               = val
+        self.coded_value         = coded_val
+    # end set
+
+    def output(self, attrs=None, header = "Set-Cookie:"):
+        return "%s %s" % ( header, self.OutputString(attrs) )
+
+    __str__ = output
+
+    def __repr__(self):
+        return '<%s: %s=%s>' % (self.__class__.__name__,
+                                self.key, repr(self.value) )
+
+    def js_output(self, attrs=None):
+        # Print javascript
+        return """
+        <SCRIPT LANGUAGE="JavaScript">
+        <!-- begin hiding
+        document.cookie = \"%s\"
+        // end hiding -->
+        </script>
+        """ % ( self.OutputString(attrs), )
+    # end js_output()
+
+    def OutputString(self, attrs=None):
+        # Build up our result
+        #
+        result = []
+        RA = result.append
+
+        # First, the key=value pair
+        RA("%s=%s;" % (self.key, self.coded_value))
+
+        # Now add any defined attributes
+        if attrs is None:
+            attrs = self._reserved
+        items = self.items()
+        items.sort()
+        for K,V in items:
+            if V == "": continue
+            if K not in attrs: continue
+            if K == "expires" and type(V) == type(1):
+                RA("%s=%s;" % (self._reserved[K], _getdate(V)))
+            elif K == "max-age" and type(V) == type(1):
+                RA("%s=%d;" % (self._reserved[K], V))
+            elif K == "secure":
+                RA("%s;" % self._reserved[K])
+            else:
+                RA("%s=%s;" % (self._reserved[K], V))
+
+        # Return the result
+        return _spacejoin(result)
+    # end OutputString
+# end Morsel class
+
+
+
+#
+# Pattern for finding cookie
+#
+# This used to be strict parsing based on the RFC2109 and RFC2068
+# specifications.  I have since discovered that MSIE 3.0x doesn't
+# follow the character rules outlined in those specs.  As a
+# result, the parsing rules here are less strict.
+#
+
+_LegalCharsPatt  = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=]"
+_CookiePattern = re.compile(
+    r"(?x)"                       # This is a Verbose pattern
+    r"(?P<key>"                   # Start of group 'key'
+    ""+ _LegalCharsPatt +"+?"     # Any word of at least one letter, nongreedy
+    r")"                          # End of group 'key'
+    r"\s*=\s*"                    # Equal Sign
+    r"(?P<val>"                   # Start of group 'val'
+    r'"(?:[^\\"]|\\.)*"'            # Any doublequoted string
+    r"|"                            # or
+    ""+ _LegalCharsPatt +"*"        # Any word or empty string
+    r")"                          # End of group 'val'
+    r"\s*;?"                      # Probably ending in a semi-colon
+    )
+
+
+# At long last, here is the cookie class.
+#   Using this class is almost just like using a dictionary.
+# See this module's docstring for example usage.
+#
+class BaseCookie(dict):
+    # A container class for a set of Morsels
+    #
+
+    def value_decode(self, val):
+        """real_value, coded_value = value_decode(STRING)
+        Called prior to setting a cookie's value from the network
+        representation.  The VALUE is the value read from HTTP
+        header.
+        Override this function to modify the behavior of cookies.
+        """
+        return val, val
+    # end value_encode
+
+    def value_encode(self, val):
+        """real_value, coded_value = value_encode(VALUE)
+        Called prior to setting a cookie's value from the dictionary
+        representation.  The VALUE is the value being assigned.
+        Override this function to modify the behavior of cookies.
+        """
+        strval = str(val)
+        return strval, strval
+    # end value_encode
+
+    def __init__(self, input=None):
+        if input: self.load(input)
+    # end __init__
+
+    def __set(self, key, real_value, coded_value):
+        """Private method for setting a cookie's value"""
+        M = self.get(key, Morsel())
+        M.set(key, real_value, coded_value)
+        dict.__setitem__(self, key, M)
+    # end __set
+
+    def __setitem__(self, key, value):
+        """Dictionary style assignment."""
+        rval, cval = self.value_encode(value)
+        self.__set(key, rval, cval)
+    # end __setitem__
+
+    def output(self, attrs=None, header="Set-Cookie:", sep="\n"):
+        """Return a string suitable for HTTP."""
+        result = []
+        items = self.items()
+        items.sort()
+        for K,V in items:
+            result.append( V.output(attrs, header) )
+        return sep.join(result)
+    # end output
+
+    __str__ = output
+
+    def __repr__(self):
+        L = []
+        items = self.items()
+        items.sort()
+        for K,V in items:
+            L.append( '%s=%s' % (K,repr(V.value) ) )
+        return '<%s: %s>' % (self.__class__.__name__, _spacejoin(L))
+
+    def js_output(self, attrs=None):
+        """Return a string suitable for JavaScript."""
+        result = []
+        items = self.items()
+        items.sort()
+        for K,V in items:
+            result.append( V.js_output(attrs) )
+        return _nulljoin(result)
+    # end js_output
+
+    def load(self, rawdata):
+        """Load cookies from a string (presumably HTTP_COOKIE) or
+        from a dictionary.  Loading cookies from a dictionary 'd'
+        is equivalent to calling:
+            map(Cookie.__setitem__, d.keys(), d.values())
+        """
+        if type(rawdata) == type(""):
+            self.__ParseString(rawdata)
+        else:
+            self.update(rawdata)
+        return
+    # end load()
+
+    def __ParseString(self, str, patt=_CookiePattern):
+        i = 0            # Our starting point
+        n = len(str)     # Length of string
+        M = None         # current morsel
+
+        while 0 <= i < n:
+            # Start looking for a cookie
+            match = patt.search(str, i)
+            if not match: break          # No more cookies
+
+            K,V = match.group("key"), match.group("val")
+            i = match.end(0)
+
+            # Parse the key, value in case it's metainfo
+            if K[0] == "$":
+                # We ignore attributes which pertain to the cookie
+                # mechanism as a whole.  See RFC 2109.
+                # (Does anyone care?)
+                if M:
+                    M[ K[1:] ] = V
+            elif K.lower() in Morsel._reserved:
+                if M:
+                    M[ K ] = _unquote(V)
+            else:
+                rval, cval = self.value_decode(V)
+                self.__set(K, rval, cval)
+                M = self[K]
+    # end __ParseString
+# end BaseCookie class
+
+class SimpleCookie(BaseCookie):
+    """SimpleCookie
+    SimpleCookie supports strings as cookie values.  When setting
+    the value using the dictionary assignment notation, SimpleCookie
+    calls the builtin str() to convert the value to a string.  Values
+    received from HTTP are kept as strings.
+    """
+    def value_decode(self, val):
+        return _unquote( val ), val
+    def value_encode(self, val):
+        strval = str(val)
+        return strval, _quote( strval )
+# end SimpleCookie
+
+class SerialCookie(BaseCookie):
+    """SerialCookie
+    SerialCookie supports arbitrary objects as cookie values. All
+    values are serialized (using cPickle) before being sent to the
+    client.  All incoming values are assumed to be valid Pickle
+    representations.  IF AN INCOMING VALUE IS NOT IN A VALID PICKLE
+    FORMAT, THEN AN EXCEPTION WILL BE RAISED.
+
+    Note: Large cookie values add overhead because they must be
+    retransmitted on every HTTP transaction.
+
+    Note: HTTP has a 2k limit on the size of a cookie.  This class
+    does not check for this limit, so be careful!!!
+    """
+    def __init__(self, input=None):
+        warnings.warn("SerialCookie class is insecure; do not use it",
+                      DeprecationWarning)
+        BaseCookie.__init__(self, input)
+    # end __init__
+    def value_decode(self, val):
+        # This could raise an exception!
+        return loads( _unquote(val) ), val
+    def value_encode(self, val):
+        return val, _quote( dumps(val) )
+# end SerialCookie
+
+class SmartCookie(BaseCookie):
+    """SmartCookie
+    SmartCookie supports arbitrary objects as cookie values.  If the
+    object is a string, then it is quoted.  If the object is not a
+    string, however, then SmartCookie will use cPickle to serialize
+    the object into a string representation.
+
+    Note: Large cookie values add overhead because they must be
+    retransmitted on every HTTP transaction.
+
+    Note: HTTP has a 2k limit on the size of a cookie.  This class
+    does not check for this limit, so be careful!!!
+    """
+    def __init__(self, input=None):
+        warnings.warn("Cookie/SmartCookie class is insecure; do not use it",
+                      DeprecationWarning)
+        BaseCookie.__init__(self, input)
+    # end __init__
+    def value_decode(self, val):
+        strval = _unquote(val)
+        try:
+            return loads(strval), val
+        except:
+            return strval, val
+    def value_encode(self, val):
+        if type(val) == type(""):
+            return val, _quote(val)
+        else:
+            return val, _quote( dumps(val) )
+# end SmartCookie
+
+
+###########################################################
+# Backwards Compatibility:  Don't break any existing code!
+
+# We provide Cookie() as an alias for SmartCookie()
+Cookie = SmartCookie
+
+#
+###########################################################
+
+def _test():
+    import doctest, Cookie
+    return doctest.testmod(Cookie)
+
+if __name__ == "__main__":
+    _test()
+
+
+#Local Variables:
+#tab-width: 4
+#end:
diff --git a/depot_tools/release/win/python_24/Lib/DocXMLRPCServer.py b/depot_tools/release/win/python_24/Lib/DocXMLRPCServer.py
new file mode 100644
index 0000000..20958b2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/DocXMLRPCServer.py
@@ -0,0 +1,303 @@
+"""Self documenting XML-RPC Server.
+
+This module can be used to create XML-RPC servers that
+serve pydoc-style documentation in response to HTTP
+GET requests. This documentation is dynamically generated
+based on the functions and methods registered with the
+server.
+
+This module is built upon the pydoc and SimpleXMLRPCServer
+modules.
+"""
+
+import pydoc
+import inspect
+import types
+import re
+import sys
+
+from SimpleXMLRPCServer import (SimpleXMLRPCServer,
+            SimpleXMLRPCRequestHandler,
+            CGIXMLRPCRequestHandler,
+            resolve_dotted_attribute)
+
+class ServerHTMLDoc(pydoc.HTMLDoc):
+    """Class used to generate pydoc HTML document for a server"""
+
+    def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
+        """Mark up some plain text, given a context of symbols to look for.
+        Each context dictionary maps object names to anchor names."""
+        escape = escape or self.escape
+        results = []
+        here = 0
+
+        # XXX Note that this regular expressions does not allow for the
+        # hyperlinking of arbitrary strings being used as method
+        # names. Only methods with names consisting of word characters
+        # and '.'s are hyperlinked.
+        pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
+                                r'RFC[- ]?(\d+)|'
+                                r'PEP[- ]?(\d+)|'
+                                r'(self\.)?((?:\w|\.)+))\b')
+        while 1:
+            match = pattern.search(text, here)
+            if not match: break
+            start, end = match.span()
+            results.append(escape(text[here:start]))
+
+            all, scheme, rfc, pep, selfdot, name = match.groups()
+            if scheme:
+                url = escape(all).replace('"', '&quot;')
+                results.append('<a href="%s">%s</a>' % (url, url))
+            elif rfc:
+                url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
+                results.append('<a href="%s">%s</a>' % (url, escape(all)))
+            elif pep:
+                url = 'http://www.python.org/peps/pep-%04d.html' % int(pep)
+                results.append('<a href="%s">%s</a>' % (url, escape(all)))
+            elif text[end:end+1] == '(':
+                results.append(self.namelink(name, methods, funcs, classes))
+            elif selfdot:
+                results.append('self.<strong>%s</strong>' % name)
+            else:
+                results.append(self.namelink(name, classes))
+            here = end
+        results.append(escape(text[here:]))
+        return ''.join(results)
+
+    def docroutine(self, object, name=None, mod=None,
+                   funcs={}, classes={}, methods={}, cl=None):
+        """Produce HTML documentation for a function or method object."""
+
+        anchor = (cl and cl.__name__ or '') + '-' + name
+        note = ''
+
+        title = '<a name="%s"><strong>%s</strong></a>' % (anchor, name)
+
+        if inspect.ismethod(object):
+            args, varargs, varkw, defaults = inspect.getargspec(object.im_func)
+            # exclude the argument bound to the instance, it will be
+            # confusing to the non-Python user
+            argspec = inspect.formatargspec (
+                    args[1:],
+                    varargs,
+                    varkw,
+                    defaults,
+                    formatvalue=self.formatvalue
+                )
+        elif inspect.isfunction(object):
+            args, varargs, varkw, defaults = inspect.getargspec(object)
+            argspec = inspect.formatargspec(
+                args, varargs, varkw, defaults, formatvalue=self.formatvalue)
+        else:
+            argspec = '(...)'
+
+        if isinstance(object, types.TupleType):
+            argspec = object[0] or argspec
+            docstring = object[1] or ""
+        else:
+            docstring = pydoc.getdoc(object)
+
+        decl = title + argspec + (note and self.grey(
+               '<font face="helvetica, arial">%s</font>' % note))
+
+        doc = self.markup(
+            docstring, self.preformat, funcs, classes, methods)
+        doc = doc and '<dd><tt>%s</tt></dd>' % doc
+        return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
+
+    def docserver(self, server_name, package_documentation, methods):
+        """Produce HTML documentation for an XML-RPC server."""
+
+        fdict = {}
+        for key, value in methods.items():
+            fdict[key] = '#-' + key
+            fdict[value] = fdict[key]
+
+        head = '<big><big><strong>%s</strong></big></big>' % server_name
+        result = self.heading(head, '#ffffff', '#7799ee')
+
+        doc = self.markup(package_documentation, self.preformat, fdict)
+        doc = doc and '<tt>%s</tt>' % doc
+        result = result + '<p>%s</p>\n' % doc
+
+        contents = []
+        method_items = methods.items()
+        method_items.sort()
+        for key, value in method_items:
+            contents.append(self.docroutine(value, key, funcs=fdict))
+        result = result + self.bigsection(
+            'Methods', '#ffffff', '#eeaa77', pydoc.join(contents))
+
+        return result
+
+class XMLRPCDocGenerator:
+    """Generates documentation for an XML-RPC server.
+
+    This class is designed as mix-in and should not
+    be constructed directly.
+    """
+
+    def __init__(self):
+        # setup variables used for HTML documentation
+        self.server_name = 'XML-RPC Server Documentation'
+        self.server_documentation = \
+            "This server exports the following methods through the XML-RPC "\
+            "protocol."
+        self.server_title = 'XML-RPC Server Documentation'
+
+    def set_server_title(self, server_title):
+        """Set the HTML title of the generated server documentation"""
+
+        self.server_title = server_title
+
+    def set_server_name(self, server_name):
+        """Set the name of the generated HTML server documentation"""
+
+        self.server_name = server_name
+
+    def set_server_documentation(self, server_documentation):
+        """Set the documentation string for the entire server."""
+
+        self.server_documentation = server_documentation
+
+    def generate_html_documentation(self):
+        """generate_html_documentation() => html documentation for the server
+
+        Generates HTML documentation for the server using introspection for
+        installed functions and instances that do not implement the
+        _dispatch method. Alternatively, instances can choose to implement
+        the _get_method_argstring(method_name) method to provide the
+        argument string used in the documentation and the
+        _methodHelp(method_name) method to provide the help text used
+        in the documentation."""
+
+        methods = {}
+
+        for method_name in self.system_listMethods():
+            if self.funcs.has_key(method_name):
+                method = self.funcs[method_name]
+            elif self.instance is not None:
+                method_info = [None, None] # argspec, documentation
+                if hasattr(self.instance, '_get_method_argstring'):
+                    method_info[0] = self.instance._get_method_argstring(method_name)
+                if hasattr(self.instance, '_methodHelp'):
+                    method_info[1] = self.instance._methodHelp(method_name)
+
+                method_info = tuple(method_info)
+                if method_info != (None, None):
+                    method = method_info
+                elif not hasattr(self.instance, '_dispatch'):
+                    try:
+                        method = resolve_dotted_attribute(
+                                    self.instance,
+                                    method_name
+                                    )
+                    except AttributeError:
+                        method = method_info
+                else:
+                    method = method_info
+            else:
+                assert 0, "Could not find method in self.functions and no "\
+                          "instance installed"
+
+            methods[method_name] = method
+
+        documenter = ServerHTMLDoc()
+        documentation = documenter.docserver(
+                                self.server_name,
+                                self.server_documentation,
+                                methods
+                            )
+
+        return documenter.page(self.server_title, documentation)
+
+class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
+    """XML-RPC and documentation request handler class.
+
+    Handles all HTTP POST requests and attempts to decode them as
+    XML-RPC requests.
+
+    Handles all HTTP GET requests and interprets them as requests
+    for documentation.
+    """
+
+    def do_GET(self):
+        """Handles the HTTP GET request.
+
+        Interpret all HTTP GET requests as requests for server
+        documentation.
+        """
+
+        response = self.server.generate_html_documentation()
+        self.send_response(200)
+        self.send_header("Content-type", "text/html")
+        self.send_header("Content-length", str(len(response)))
+        self.end_headers()
+        self.wfile.write(response)
+
+        # shut down the connection
+        self.wfile.flush()
+        self.connection.shutdown(1)
+
+class DocXMLRPCServer(  SimpleXMLRPCServer,
+                        XMLRPCDocGenerator):
+    """XML-RPC and HTML documentation server.
+
+    Adds the ability to serve server documentation to the capabilities
+    of SimpleXMLRPCServer.
+    """
+
+    def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler,
+                 logRequests=1):
+        SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests)
+        XMLRPCDocGenerator.__init__(self)
+
+class DocCGIXMLRPCRequestHandler(   CGIXMLRPCRequestHandler,
+                                    XMLRPCDocGenerator):
+    """Handler for XML-RPC data and documentation requests passed through
+    CGI"""
+
+    def handle_get(self):
+        """Handles the HTTP GET request.
+
+        Interpret all HTTP GET requests as requests for server
+        documentation.
+        """
+
+        response = self.generate_html_documentation()
+
+        print 'Content-Type: text/html'
+        print 'Content-Length: %d' % len(response)
+        print
+        sys.stdout.write(response)
+
+    def __init__(self):
+        CGIXMLRPCRequestHandler.__init__(self)
+        XMLRPCDocGenerator.__init__(self)
+
+if __name__ == '__main__':
+    def deg_to_rad(deg):
+        """deg_to_rad(90) => 1.5707963267948966
+
+        Converts an angle in degrees to an angle in radians"""
+        import math
+        return deg * math.pi / 180
+
+    server = DocXMLRPCServer(("localhost", 8000))
+
+    server.set_server_title("Math Server")
+    server.set_server_name("Math XML-RPC Server")
+    server.set_server_documentation("""This server supports various mathematical functions.
+
+You can use it from Python as follows:
+
+>>> from xmlrpclib import ServerProxy
+>>> s = ServerProxy("http://localhost:8000")
+>>> s.deg_to_rad(90.0)
+1.5707963267948966""")
+
+    server.register_function(deg_to_rad)
+    server.register_introspection_functions()
+
+    server.serve_forever()
diff --git a/depot_tools/release/win/python_24/Lib/HTMLParser.py b/depot_tools/release/win/python_24/Lib/HTMLParser.py
new file mode 100644
index 0000000..8380466e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/HTMLParser.py
@@ -0,0 +1,369 @@
+"""A parser for HTML and XHTML."""
+
+# This file is based on sgmllib.py, but the API is slightly different.
+
+# XXX There should be a way to distinguish between PCDATA (parsed
+# character data -- the normal case), RCDATA (replaceable character
+# data -- only char and entity references and end tags are special)
+# and CDATA (character data -- only end tags are special).
+
+
+import markupbase
+import re
+
+# Regular expressions used for parsing
+
+interesting_normal = re.compile('[&<]')
+interesting_cdata = re.compile(r'<(/|\Z)')
+incomplete = re.compile('&[a-zA-Z#]')
+
+entityref = re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]')
+charref = re.compile('&#(?:[0-9]+|[xX][0-9a-fA-F]+)[^0-9a-fA-F]')
+
+starttagopen = re.compile('<[a-zA-Z]')
+piclose = re.compile('>')
+commentclose = re.compile(r'--\s*>')
+tagfind = re.compile('[a-zA-Z][-.a-zA-Z0-9:_]*')
+attrfind = re.compile(
+    r'\s*([a-zA-Z_][-.:a-zA-Z_0-9]*)(\s*=\s*'
+    r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~@]*))?')
+
+locatestarttagend = re.compile(r"""
+  <[a-zA-Z][-.a-zA-Z0-9:_]*          # tag name
+  (?:\s+                             # whitespace before attribute name
+    (?:[a-zA-Z_][-.:a-zA-Z0-9_]*     # attribute name
+      (?:\s*=\s*                     # value indicator
+        (?:'[^']*'                   # LITA-enclosed value
+          |\"[^\"]*\"                # LIT-enclosed value
+          |[^'\">\s]+                # bare value
+         )
+       )?
+     )
+   )*
+  \s*                                # trailing whitespace
+""", re.VERBOSE)
+endendtag = re.compile('>')
+endtagfind = re.compile('</\s*([a-zA-Z][-.a-zA-Z0-9:_]*)\s*>')
+
+
+class HTMLParseError(Exception):
+    """Exception raised for all parse errors."""
+
+    def __init__(self, msg, position=(None, None)):
+        assert msg
+        self.msg = msg
+        self.lineno = position[0]
+        self.offset = position[1]
+
+    def __str__(self):
+        result = self.msg
+        if self.lineno is not None:
+            result = result + ", at line %d" % self.lineno
+        if self.offset is not None:
+            result = result + ", column %d" % (self.offset + 1)
+        return result
+
+
+class HTMLParser(markupbase.ParserBase):
+    """Find tags and other markup and call handler functions.
+
+    Usage:
+        p = HTMLParser()
+        p.feed(data)
+        ...
+        p.close()
+
+    Start tags are handled by calling self.handle_starttag() or
+    self.handle_startendtag(); end tags by self.handle_endtag().  The
+    data between tags is passed from the parser to the derived class
+    by calling self.handle_data() with the data as argument (the data
+    may be split up in arbitrary chunks).  Entity references are
+    passed by calling self.handle_entityref() with the entity
+    reference as the argument.  Numeric character references are
+    passed to self.handle_charref() with the string containing the
+    reference as the argument.
+    """
+
+    CDATA_CONTENT_ELEMENTS = ("script", "style")
+
+
+    def __init__(self):
+        """Initialize and reset this instance."""
+        self.reset()
+
+    def reset(self):
+        """Reset this instance.  Loses all unprocessed data."""
+        self.rawdata = ''
+        self.lasttag = '???'
+        self.interesting = interesting_normal
+        markupbase.ParserBase.reset(self)
+
+    def feed(self, data):
+        """Feed data to the parser.
+
+        Call this as often as you want, with as little or as much text
+        as you want (may include '\n').
+        """
+        self.rawdata = self.rawdata + data
+        self.goahead(0)
+
+    def close(self):
+        """Handle any buffered data."""
+        self.goahead(1)
+
+    def error(self, message):
+        raise HTMLParseError(message, self.getpos())
+
+    __starttag_text = None
+
+    def get_starttag_text(self):
+        """Return full source of start tag: '<...>'."""
+        return self.__starttag_text
+
+    def set_cdata_mode(self):
+        self.interesting = interesting_cdata
+
+    def clear_cdata_mode(self):
+        self.interesting = interesting_normal
+
+    # Internal -- handle data as far as reasonable.  May leave state
+    # and data to be processed by a subsequent call.  If 'end' is
+    # true, force handling all data as if followed by EOF marker.
+    def goahead(self, end):
+        rawdata = self.rawdata
+        i = 0
+        n = len(rawdata)
+        while i < n:
+            match = self.interesting.search(rawdata, i) # < or &
+            if match:
+                j = match.start()
+            else:
+                j = n
+            if i < j: self.handle_data(rawdata[i:j])
+            i = self.updatepos(i, j)
+            if i == n: break
+            startswith = rawdata.startswith
+            if startswith('<', i):
+                if starttagopen.match(rawdata, i): # < + letter
+                    k = self.parse_starttag(i)
+                elif startswith("</", i):
+                    k = self.parse_endtag(i)
+                elif startswith("<!--", i):
+                    k = self.parse_comment(i)
+                elif startswith("<?", i):
+                    k = self.parse_pi(i)
+                elif startswith("<!", i):
+                    k = self.parse_declaration(i)
+                elif (i + 1) < n:
+                    self.handle_data("<")
+                    k = i + 1
+                else:
+                    break
+                if k < 0:
+                    if end:
+                        self.error("EOF in middle of construct")
+                    break
+                i = self.updatepos(i, k)
+            elif startswith("&#", i):
+                match = charref.match(rawdata, i)
+                if match:
+                    name = match.group()[2:-1]
+                    self.handle_charref(name)
+                    k = match.end()
+                    if not startswith(';', k-1):
+                        k = k - 1
+                    i = self.updatepos(i, k)
+                    continue
+                else:
+                    break
+            elif startswith('&', i):
+                match = entityref.match(rawdata, i)
+                if match:
+                    name = match.group(1)
+                    self.handle_entityref(name)
+                    k = match.end()
+                    if not startswith(';', k-1):
+                        k = k - 1
+                    i = self.updatepos(i, k)
+                    continue
+                match = incomplete.match(rawdata, i)
+                if match:
+                    # match.group() will contain at least 2 chars
+                    if end and match.group() == rawdata[i:]:
+                        self.error("EOF in middle of entity or char ref")
+                    # incomplete
+                    break
+                elif (i + 1) < n:
+                    # not the end of the buffer, and can't be confused
+                    # with some other construct
+                    self.handle_data("&")
+                    i = self.updatepos(i, i + 1)
+                else:
+                    break
+            else:
+                assert 0, "interesting.search() lied"
+        # end while
+        if end and i < n:
+            self.handle_data(rawdata[i:n])
+            i = self.updatepos(i, n)
+        self.rawdata = rawdata[i:]
+
+    # Internal -- parse processing instr, return end or -1 if not terminated
+    def parse_pi(self, i):
+        rawdata = self.rawdata
+        assert rawdata[i:i+2] == '<?', 'unexpected call to parse_pi()'
+        match = piclose.search(rawdata, i+2) # >
+        if not match:
+            return -1
+        j = match.start()
+        self.handle_pi(rawdata[i+2: j])
+        j = match.end()
+        return j
+
+    # Internal -- handle starttag, return end or -1 if not terminated
+    def parse_starttag(self, i):
+        self.__starttag_text = None
+        endpos = self.check_for_whole_start_tag(i)
+        if endpos < 0:
+            return endpos
+        rawdata = self.rawdata
+        self.__starttag_text = rawdata[i:endpos]
+
+        # Now parse the data between i+1 and j into a tag and attrs
+        attrs = []
+        match = tagfind.match(rawdata, i+1)
+        assert match, 'unexpected call to parse_starttag()'
+        k = match.end()
+        self.lasttag = tag = rawdata[i+1:k].lower()
+
+        while k < endpos:
+            m = attrfind.match(rawdata, k)
+            if not m:
+                break
+            attrname, rest, attrvalue = m.group(1, 2, 3)
+            if not rest:
+                attrvalue = None
+            elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
+                 attrvalue[:1] == '"' == attrvalue[-1:]:
+                attrvalue = attrvalue[1:-1]
+                attrvalue = self.unescape(attrvalue)
+            attrs.append((attrname.lower(), attrvalue))
+            k = m.end()
+
+        end = rawdata[k:endpos].strip()
+        if end not in (">", "/>"):
+            lineno, offset = self.getpos()
+            if "\n" in self.__starttag_text:
+                lineno = lineno + self.__starttag_text.count("\n")
+                offset = len(self.__starttag_text) \
+                         - self.__starttag_text.rfind("\n")
+            else:
+                offset = offset + len(self.__starttag_text)
+            self.error("junk characters in start tag: %r"
+                       % (rawdata[k:endpos][:20],))
+        if end.endswith('/>'):
+            # XHTML-style empty tag: <span attr="value" />
+            self.handle_startendtag(tag, attrs)
+        else:
+            self.handle_starttag(tag, attrs)
+            if tag in self.CDATA_CONTENT_ELEMENTS:
+                self.set_cdata_mode()
+        return endpos
+
+    # Internal -- check to see if we have a complete starttag; return end
+    # or -1 if incomplete.
+    def check_for_whole_start_tag(self, i):
+        rawdata = self.rawdata
+        m = locatestarttagend.match(rawdata, i)
+        if m:
+            j = m.end()
+            next = rawdata[j:j+1]
+            if next == ">":
+                return j + 1
+            if next == "/":
+                if rawdata.startswith("/>", j):
+                    return j + 2
+                if rawdata.startswith("/", j):
+                    # buffer boundary
+                    return -1
+                # else bogus input
+                self.updatepos(i, j + 1)
+                self.error("malformed empty start tag")
+            if next == "":
+                # end of input
+                return -1
+            if next in ("abcdefghijklmnopqrstuvwxyz=/"
+                        "ABCDEFGHIJKLMNOPQRSTUVWXYZ"):
+                # end of input in or before attribute value, or we have the
+                # '/' from a '/>' ending
+                return -1
+            self.updatepos(i, j)
+            self.error("malformed start tag")
+        raise AssertionError("we should not get here!")
+
+    # Internal -- parse endtag, return end or -1 if incomplete
+    def parse_endtag(self, i):
+        rawdata = self.rawdata
+        assert rawdata[i:i+2] == "</", "unexpected call to parse_endtag"
+        match = endendtag.search(rawdata, i+1) # >
+        if not match:
+            return -1
+        j = match.end()
+        match = endtagfind.match(rawdata, i) # </ + tag + >
+        if not match:
+            self.error("bad end tag: %r" % (rawdata[i:j],))
+        tag = match.group(1)
+        self.handle_endtag(tag.lower())
+        self.clear_cdata_mode()
+        return j
+
+    # Overridable -- finish processing of start+end tag: <tag.../>
+    def handle_startendtag(self, tag, attrs):
+        self.handle_starttag(tag, attrs)
+        self.handle_endtag(tag)
+
+    # Overridable -- handle start tag
+    def handle_starttag(self, tag, attrs):
+        pass
+
+    # Overridable -- handle end tag
+    def handle_endtag(self, tag):
+        pass
+
+    # Overridable -- handle character reference
+    def handle_charref(self, name):
+        pass
+
+    # Overridable -- handle entity reference
+    def handle_entityref(self, name):
+        pass
+
+    # Overridable -- handle data
+    def handle_data(self, data):
+        pass
+
+    # Overridable -- handle comment
+    def handle_comment(self, data):
+        pass
+
+    # Overridable -- handle declaration
+    def handle_decl(self, decl):
+        pass
+
+    # Overridable -- handle processing instruction
+    def handle_pi(self, data):
+        pass
+
+    def unknown_decl(self, data):
+        self.error("unknown declaration: %r" % (data,))
+
+    # Internal -- helper to remove special character quoting
+    def unescape(self, s):
+        if '&' not in s:
+            return s
+        s = s.replace("&lt;", "<")
+        s = s.replace("&gt;", ">")
+        s = s.replace("&apos;", "'")
+        s = s.replace("&quot;", '"')
+        s = s.replace("&amp;", "&") # Must be last
+        return s
diff --git a/depot_tools/release/win/python_24/Lib/MimeWriter.py b/depot_tools/release/win/python_24/Lib/MimeWriter.py
new file mode 100644
index 0000000..58c0a0bc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/MimeWriter.py
@@ -0,0 +1,181 @@
+"""Generic MIME writer.
+
+This module defines the class MimeWriter.  The MimeWriter class implements
+a basic formatter for creating MIME multi-part files.  It doesn't seek around
+the output file nor does it use large amounts of buffer space. You must write
+the parts out in the order that they should occur in the final file.
+MimeWriter does buffer the headers you add, allowing you to rearrange their
+order.
+
+"""
+
+
+import mimetools
+
+__all__ = ["MimeWriter"]
+
+class MimeWriter:
+
+    """Generic MIME writer.
+
+    Methods:
+
+    __init__()
+    addheader()
+    flushheaders()
+    startbody()
+    startmultipartbody()
+    nextpart()
+    lastpart()
+
+    A MIME writer is much more primitive than a MIME parser.  It
+    doesn't seek around on the output file, and it doesn't use large
+    amounts of buffer space, so you have to write the parts in the
+    order they should occur on the output file.  It does buffer the
+    headers you add, allowing you to rearrange their order.
+
+    General usage is:
+
+    f = <open the output file>
+    w = MimeWriter(f)
+    ...call w.addheader(key, value) 0 or more times...
+
+    followed by either:
+
+    f = w.startbody(content_type)
+    ...call f.write(data) for body data...
+
+    or:
+
+    w.startmultipartbody(subtype)
+    for each part:
+        subwriter = w.nextpart()
+        ...use the subwriter's methods to create the subpart...
+    w.lastpart()
+
+    The subwriter is another MimeWriter instance, and should be
+    treated in the same way as the toplevel MimeWriter.  This way,
+    writing recursive body parts is easy.
+
+    Warning: don't forget to call lastpart()!
+
+    XXX There should be more state so calls made in the wrong order
+    are detected.
+
+    Some special cases:
+
+    - startbody() just returns the file passed to the constructor;
+      but don't use this knowledge, as it may be changed.
+
+    - startmultipartbody() actually returns a file as well;
+      this can be used to write the initial 'if you can read this your
+      mailer is not MIME-aware' message.
+
+    - If you call flushheaders(), the headers accumulated so far are
+      written out (and forgotten); this is useful if you don't need a
+      body part at all, e.g. for a subpart of type message/rfc822
+      that's (mis)used to store some header-like information.
+
+    - Passing a keyword argument 'prefix=<flag>' to addheader(),
+      start*body() affects where the header is inserted; 0 means
+      append at the end, 1 means insert at the start; default is
+      append for addheader(), but insert for start*body(), which use
+      it to determine where the Content-Type header goes.
+
+    """
+
+    def __init__(self, fp):
+        self._fp = fp
+        self._headers = []
+
+    def addheader(self, key, value, prefix=0):
+        """Add a header line to the MIME message.
+
+        The key is the name of the header, where the value obviously provides
+        the value of the header. The optional argument prefix determines
+        where the header is inserted; 0 means append at the end, 1 means
+        insert at the start. The default is to append.
+
+        """
+        lines = value.split("\n")
+        while lines and not lines[-1]: del lines[-1]
+        while lines and not lines[0]: del lines[0]
+        for i in range(1, len(lines)):
+            lines[i] = "    " + lines[i].strip()
+        value = "\n".join(lines) + "\n"
+        line = key + ": " + value
+        if prefix:
+            self._headers.insert(0, line)
+        else:
+            self._headers.append(line)
+
+    def flushheaders(self):
+        """Writes out and forgets all headers accumulated so far.
+
+        This is useful if you don't need a body part at all; for example,
+        for a subpart of type message/rfc822 that's (mis)used to store some
+        header-like information.
+
+        """
+        self._fp.writelines(self._headers)
+        self._headers = []
+
+    def startbody(self, ctype, plist=[], prefix=1):
+        """Returns a file-like object for writing the body of the message.
+
+        The content-type is set to the provided ctype, and the optional
+        parameter, plist, provides additional parameters for the
+        content-type declaration.  The optional argument prefix determines
+        where the header is inserted; 0 means append at the end, 1 means
+        insert at the start. The default is to insert at the start.
+
+        """
+        for name, value in plist:
+            ctype = ctype + ';\n %s=\"%s\"' % (name, value)
+        self.addheader("Content-Type", ctype, prefix=prefix)
+        self.flushheaders()
+        self._fp.write("\n")
+        return self._fp
+
+    def startmultipartbody(self, subtype, boundary=None, plist=[], prefix=1):
+        """Returns a file-like object for writing the body of the message.
+
+        Additionally, this method initializes the multi-part code, where the
+        subtype parameter provides the multipart subtype, the boundary
+        parameter may provide a user-defined boundary specification, and the
+        plist parameter provides optional parameters for the subtype.  The
+        optional argument, prefix, determines where the header is inserted;
+        0 means append at the end, 1 means insert at the start. The default
+        is to insert at the start.  Subparts should be created using the
+        nextpart() method.
+
+        """
+        self._boundary = boundary or mimetools.choose_boundary()
+        return self.startbody("multipart/" + subtype,
+                              [("boundary", self._boundary)] + plist,
+                              prefix=prefix)
+
+    def nextpart(self):
+        """Returns a new instance of MimeWriter which represents an
+        individual part in a multipart message.
+
+        This may be used to write the part as well as used for creating
+        recursively complex multipart messages. The message must first be
+        initialized with the startmultipartbody() method before using the
+        nextpart() method.
+
+        """
+        self._fp.write("\n--" + self._boundary + "\n")
+        return self.__class__(self._fp)
+
+    def lastpart(self):
+        """This is used to designate the last part of a multipart message.
+
+        It should always be used when writing multipart messages.
+
+        """
+        self._fp.write("\n--" + self._boundary + "--\n")
+
+
+if __name__ == '__main__':
+    import test.test_MimeWriter
diff --git a/depot_tools/release/win/python_24/Lib/Queue.py b/depot_tools/release/win/python_24/Lib/Queue.py
new file mode 100644
index 0000000..c6c608b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/Queue.py
@@ -0,0 +1,169 @@
+"""A multi-producer, multi-consumer queue."""
+
+from time import time as _time
+from collections import deque
+
+__all__ = ['Empty', 'Full', 'Queue']
+
+class Empty(Exception):
+    "Exception raised by Queue.get(block=0)/get_nowait()."
+    pass
+
+class Full(Exception):
+    "Exception raised by Queue.put(block=0)/put_nowait()."
+    pass
+
+class Queue:
+    def __init__(self, maxsize=0):
+        """Initialize a queue object with a given maximum size.
+
+        If maxsize is <= 0, the queue size is infinite.
+        """
+        try:
+            import threading
+        except ImportError:
+            import dummy_threading as threading
+        self._init(maxsize)
+        # mutex must be held whenever the queue is mutating.  All methods
+        # that acquire mutex must release it before returning.  mutex
+        # is shared between the two conditions, so acquiring and
+        # releasing the conditions also acquires and releases mutex.
+        self.mutex = threading.Lock()
+        # Notify not_empty whenever an item is added to the queue; a
+        # thread waiting to get is notified then.
+        self.not_empty = threading.Condition(self.mutex)
+        # Notify not_full whenever an item is removed from the queue;
+        # a thread waiting to put is notified then.
+        self.not_full = threading.Condition(self.mutex)
+
+    def qsize(self):
+        """Return the approximate size of the queue (not reliable!)."""
+        self.mutex.acquire()
+        n = self._qsize()
+        self.mutex.release()
+        return n
+
+    def empty(self):
+        """Return True if the queue is empty, False otherwise (not reliable!)."""
+        self.mutex.acquire()
+        n = self._empty()
+        self.mutex.release()
+        return n
+
+    def full(self):
+        """Return True if the queue is full, False otherwise (not reliable!)."""
+        self.mutex.acquire()
+        n = self._full()
+        self.mutex.release()
+        return n
+
+    def put(self, item, block=True, timeout=None):
+        """Put an item into the queue.
+
+        If optional args 'block' is true and 'timeout' is None (the default),
+        block if necessary until a free slot is available. If 'timeout' is
+        a positive number, it blocks at most 'timeout' seconds and raises
+        the Full exception if no free slot was available within that time.
+        Otherwise ('block' is false), put an item on the queue if a free slot
+        is immediately available, else raise the Full exception ('timeout'
+        is ignored in that case).
+        """
+        self.not_full.acquire()
+        try:
+            if not block:
+                if self._full():
+                    raise Full
+            elif timeout is None:
+                while self._full():
+                    self.not_full.wait()
+            else:
+                if timeout < 0:
+                    raise ValueError("'timeout' must be a positive number")
+                endtime = _time() + timeout
+                while self._full():
+                    remaining = endtime - _time()
+                    if remaining <= 0.0:
+                        raise Full
+                    self.not_full.wait(remaining)
+            self._put(item)
+            self.not_empty.notify()
+        finally:
+            self.not_full.release()
+
+    def put_nowait(self, item):
+        """Put an item into the queue without blocking.
+
+        Only enqueue the item if a free slot is immediately available.
+        Otherwise raise the Full exception.
+        """
+        return self.put(item, False)
+
+    def get(self, block=True, timeout=None):
+        """Remove and return an item from the queue.
+
+        If optional args 'block' is true and 'timeout' is None (the default),
+        block if necessary until an item is available. If 'timeout' is
+        a positive number, it blocks at most 'timeout' seconds and raises
+        the Empty exception if no item was available within that time.
+        Otherwise ('block' is false), return an item if one is immediately
+        available, else raise the Empty exception ('timeout' is ignored
+        in that case).
+        """
+        self.not_empty.acquire()
+        try:
+            if not block:
+                if self._empty():
+                    raise Empty
+            elif timeout is None:
+                while self._empty():
+                    self.not_empty.wait()
+            else:
+                if timeout < 0:
+                    raise ValueError("'timeout' must be a positive number")
+                endtime = _time() + timeout
+                while self._empty():
+                    remaining = endtime - _time()
+                    if remaining <= 0.0:
+                        raise Empty
+                    self.not_empty.wait(remaining)
+            item = self._get()
+            self.not_full.notify()
+            return item
+        finally:
+            self.not_empty.release()
+
+    def get_nowait(self):
+        """Remove and return an item from the queue without blocking.
+
+        Only get an item if one is immediately available. Otherwise
+        raise the Empty exception.
+        """
+        return self.get(False)
+
+    # Override these methods to implement other queue organizations
+    # (e.g. stack or priority queue).
+    # These will only be called with appropriate locks held
+
+    # Initialize the queue representation
+    def _init(self, maxsize):
+        self.maxsize = maxsize
+        self.queue = deque()
+
+    def _qsize(self):
+        return len(self.queue)
+
+    # Check whether the queue is empty
+    def _empty(self):
+        return not self.queue
+
+    # Check whether the queue is full
+    def _full(self):
+        return self.maxsize > 0 and len(self.queue) == self.maxsize
+
+    # Put a new item in the queue
+    def _put(self, item):
+        self.queue.append(item)
+
+    # Get an item from the queue
+    def _get(self):
+        return self.queue.popleft()
diff --git a/depot_tools/release/win/python_24/Lib/SimpleHTTPServer.py b/depot_tools/release/win/python_24/Lib/SimpleHTTPServer.py
new file mode 100644
index 0000000..4d8eb77
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/SimpleHTTPServer.py
@@ -0,0 +1,200 @@
+"""Simple HTTP Server.
+
+This module builds on BaseHTTPServer by implementing the standard GET
+and HEAD requests in a fairly straightforward manner.
+
+"""
+
+
+__version__ = "0.6"
+
+__all__ = ["SimpleHTTPRequestHandler"]
+
+import os
+import posixpath
+import BaseHTTPServer
+import urllib
+import cgi
+import shutil
+import mimetypes
+from StringIO import StringIO
+
+
+class SimpleHTTPRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+
+    """Simple HTTP request handler with GET and HEAD commands.
+
+    This serves files from the current directory and any of its
+    subdirectories.  The MIME type for files is determined by
+    calling the .guess_type() method.
+
+    The GET and HEAD requests are identical except that the HEAD
+    request omits the actual contents of the file.
+
+    """
+
+    server_version = "SimpleHTTP/" + __version__
+
+    def do_GET(self):
+        """Serve a GET request."""
+        f = self.send_head()
+        if f:
+            self.copyfile(f, self.wfile)
+            f.close()
+
+    def do_HEAD(self):
+        """Serve a HEAD request."""
+        f = self.send_head()
+        if f:
+            f.close()
+
+    def send_head(self):
+        """Common code for GET and HEAD commands.
+
+        This sends the response code and MIME headers.
+
+        Return value is either a file object (which has to be copied
+        to the outputfile by the caller unless the command was HEAD,
+        and must be closed by the caller under all circumstances), or
+        None, in which case the caller has nothing further to do.
+
+        """
+        path = self.translate_path(self.path)
+        f = None
+        if os.path.isdir(path):
+            for index in "index.html", "index.htm":
+                index = os.path.join(path, index)
+                if os.path.exists(index):
+                    path = index
+                    break
+            else:
+                return self.list_directory(path)
+        ctype = self.guess_type(path)
+        try:
+            # Always read in binary mode. Opening files in text mode may cause
+            # newline translations, making the actual size of the content
+            # transmitted *less* than the content-length!
+            f = open(path, 'rb')
+        except IOError:
+            self.send_error(404, "File not found")
+            return None
+        self.send_response(200)
+        self.send_header("Content-type", ctype)
+        self.send_header("Content-Length", str(os.fstat(f.fileno())[6]))
+        self.end_headers()
+        return f
+
+    def list_directory(self, path):
+        """Helper to produce a directory listing (absent index.html).
+
+        Return value is either a file object, or None (indicating an
+        error).  In either case, the headers are sent, making the
+        interface the same as for send_head().
+
+        """
+        try:
+            list = os.listdir(path)
+        except os.error:
+            self.send_error(404, "No permission to list directory")
+            return None
+        list.sort(key=lambda a: a.lower())
+        f = StringIO()
+        f.write("<title>Directory listing for %s</title>\n" % self.path)
+        f.write("<h2>Directory listing for %s</h2>\n" % self.path)
+        f.write("<hr>\n<ul>\n")
+        for name in list:
+            fullname = os.path.join(path, name)
+            displayname = linkname = name
+            # Append / for directories or @ for symbolic links
+            if os.path.isdir(fullname):
+                displayname = name + "/"
+                linkname = name + "/"
+            if os.path.islink(fullname):
+                displayname = name + "@"
+                # Note: a link to a directory displays with @ and links with /
+            f.write('<li><a href="%s">%s</a>\n'
+                    % (urllib.quote(linkname), cgi.escape(displayname)))
+        f.write("</ul>\n<hr>\n")
+        length = f.tell()
+        f.seek(0)
+        self.send_response(200)
+        self.send_header("Content-type", "text/html")
+        self.send_header("Content-Length", str(length))
+        self.end_headers()
+        return f
+
+    def translate_path(self, path):
+        """Translate a /-separated PATH to the local filename syntax.
+
+        Components that mean special things to the local file system
+        (e.g. drive or directory names) are ignored.  (XXX They should
+        probably be diagnosed.)
+
+        """
+        path = posixpath.normpath(urllib.unquote(path))
+        words = path.split('/')
+        words = filter(None, words)
+        path = os.getcwd()
+        for word in words:
+            drive, word = os.path.splitdrive(word)
+            head, word = os.path.split(word)
+            if word in (os.curdir, os.pardir): continue
+            path = os.path.join(path, word)
+        return path
+
+    def copyfile(self, source, outputfile):
+        """Copy all data between two file objects.
+
+        The SOURCE argument is a file object open for reading
+        (or anything with a read() method) and the DESTINATION
+        argument is a file object open for writing (or
+        anything with a write() method).
+
+        The only reason for overriding this would be to change
+        the block size or perhaps to replace newlines by CRLF
+        -- note however that this the default server uses this
+        to copy binary data as well.
+
+        """
+        shutil.copyfileobj(source, outputfile)
+
+    def guess_type(self, path):
+        """Guess the type of a file.
+
+        Argument is a PATH (a filename).
+
+        Return value is a string of the form type/subtype,
+        usable for a MIME Content-type header.
+
+        The default implementation looks the file's extension
+        up in the table self.extensions_map, using application/octet-stream
+        as a default; however it would be permissible (if
+        slow) to look inside the data to make a better guess.
+
+        """
+
+        base, ext = posixpath.splitext(path)
+        if ext in self.extensions_map:
+            return self.extensions_map[ext]
+        ext = ext.lower()
+        if ext in self.extensions_map:
+            return self.extensions_map[ext]
+        else:
+            return self.extensions_map['']
+
+    extensions_map = mimetypes.types_map.copy()
+    extensions_map.update({
+        '': 'application/octet-stream', # Default
+        '.py': 'text/plain',
+        '.c': 'text/plain',
+        '.h': 'text/plain',
+        })
+
+
+def test(HandlerClass = SimpleHTTPRequestHandler,
+         ServerClass = BaseHTTPServer.HTTPServer):
+    BaseHTTPServer.test(HandlerClass, ServerClass)
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/SimpleXMLRPCServer.py b/depot_tools/release/win/python_24/Lib/SimpleXMLRPCServer.py
new file mode 100644
index 0000000..315ce84
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/SimpleXMLRPCServer.py
@@ -0,0 +1,536 @@
+"""Simple XML-RPC Server.
+
+This module can be used to create simple XML-RPC servers
+by creating a server and either installing functions, a
+class instance, or by extending the SimpleXMLRPCServer
+class.
+
+It can also be used to handle XML-RPC requests in a CGI
+environment using CGIXMLRPCRequestHandler.
+
+A list of possible usage patterns follows:
+
+1. Install functions:
+
+server = SimpleXMLRPCServer(("localhost", 8000))
+server.register_function(pow)
+server.register_function(lambda x,y: x+y, 'add')
+server.serve_forever()
+
+2. Install an instance:
+
+class MyFuncs:
+    def __init__(self):
+        # make all of the string functions available through
+        # string.func_name
+        import string
+        self.string = string
+    def _listMethods(self):
+        # implement this method so that system.listMethods
+        # knows to advertise the strings methods
+        return list_public_methods(self) + \
+                ['string.' + method for method in list_public_methods(self.string)]
+    def pow(self, x, y): return pow(x, y)
+    def add(self, x, y) : return x + y
+
+server = SimpleXMLRPCServer(("localhost", 8000))
+server.register_introspection_functions()
+server.register_instance(MyFuncs())
+server.serve_forever()
+
+3. Install an instance with custom dispatch method:
+
+class Math:
+    def _listMethods(self):
+        # this method must be present for system.listMethods
+        # to work
+        return ['add', 'pow']
+    def _methodHelp(self, method):
+        # this method must be present for system.methodHelp
+        # to work
+        if method == 'add':
+            return "add(2,3) => 5"
+        elif method == 'pow':
+            return "pow(x, y[, z]) => number"
+        else:
+            # By convention, return empty
+            # string if no help is available
+            return ""
+    def _dispatch(self, method, params):
+        if method == 'pow':
+            return pow(*params)
+        elif method == 'add':
+            return params[0] + params[1]
+        else:
+            raise 'bad method'
+
+server = SimpleXMLRPCServer(("localhost", 8000))
+server.register_introspection_functions()
+server.register_instance(Math())
+server.serve_forever()
+
+4. Subclass SimpleXMLRPCServer:
+
+class MathServer(SimpleXMLRPCServer):
+    def _dispatch(self, method, params):
+        try:
+            # We are forcing the 'export_' prefix on methods that are
+            # callable through XML-RPC to prevent potential security
+            # problems
+            func = getattr(self, 'export_' + method)
+        except AttributeError:
+            raise Exception('method "%s" is not supported' % method)
+        else:
+            return func(*params)
+
+    def export_add(self, x, y):
+        return x + y
+
+server = MathServer(("localhost", 8000))
+server.serve_forever()
+
+5. CGI script:
+
+server = CGIXMLRPCRequestHandler()
+server.register_function(pow)
+server.handle_request()
+"""
+
+# Written by Brian Quinlan (brian@sweetapp.com).
+# Based on code written by Fredrik Lundh.
+
+import xmlrpclib
+from xmlrpclib import Fault
+import SocketServer
+import BaseHTTPServer
+import sys
+import os
+
+def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
+    """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
+
+    Resolves a dotted attribute name to an object.  Raises
+    an AttributeError if any attribute in the chain starts with a '_'.
+
+    If the optional allow_dotted_names argument is false, dots are not
+    supported and this function operates similar to getattr(obj, attr).
+    """
+
+    if allow_dotted_names:
+        attrs = attr.split('.')
+    else:
+        attrs = [attr]
+
+    for i in attrs:
+        if i.startswith('_'):
+            raise AttributeError(
+                'attempt to access private attribute "%s"' % i
+                )
+        else:
+            obj = getattr(obj,i)
+    return obj
+
+def list_public_methods(obj):
+    """Returns a list of attribute strings, found in the specified
+    object, which represent callable attributes"""
+
+    return [member for member in dir(obj)
+                if not member.startswith('_') and
+                    callable(getattr(obj, member))]
+
+def remove_duplicates(lst):
+    """remove_duplicates([2,2,2,1,3,3]) => [3,1,2]
+
+    Returns a copy of a list without duplicates. Every list
+    item must be hashable and the order of the items in the
+    resulting list is not defined.
+    """
+    u = {}
+    for x in lst:
+        u[x] = 1
+
+    return u.keys()
+
+class SimpleXMLRPCDispatcher:
+    """Mix-in class that dispatches XML-RPC requests.
+
+    This class is used to register XML-RPC method handlers
+    and then to dispatch them. There should never be any
+    reason to instantiate this class directly.
+    """
+
+    def __init__(self):
+        self.funcs = {}
+        self.instance = None
+
+    def register_instance(self, instance, allow_dotted_names=False):
+        """Registers an instance to respond to XML-RPC requests.
+
+        Only one instance can be installed at a time.
+
+        If the registered instance has a _dispatch method then that
+        method will be called with the name of the XML-RPC method and
+        it's parameters as a tuple
+        e.g. instance._dispatch('add',(2,3))
+
+        If the registered instance does not have a _dispatch method
+        then the instance will be searched to find a matching method
+        and, if found, will be called. Methods beginning with an '_'
+        are considered private and will not be called by
+        SimpleXMLRPCServer.
+
+        If a registered function matches a XML-RPC request, then it
+        will be called instead of the registered instance.
+
+        If the optional allow_dotted_names argument is true and the
+        instance does not have a _dispatch method, method names
+        containing dots are supported and resolved, as long as none of
+        the name segments start with an '_'.
+
+            *** SECURITY WARNING: ***
+
+            Enabling the allow_dotted_names options allows intruders
+            to access your module's global variables and may allow
+            intruders to execute arbitrary code on your machine.  Only
+            use this option on a secure, closed network.
+
+        """
+
+        self.instance = instance
+        self.allow_dotted_names = allow_dotted_names
+
+    def register_function(self, function, name = None):
+        """Registers a function to respond to XML-RPC requests.
+
+        The optional name argument can be used to set a Unicode name
+        for the function.
+        """
+
+        if name is None:
+            name = function.__name__
+        self.funcs[name] = function
+
+    def register_introspection_functions(self):
+        """Registers the XML-RPC introspection methods in the system
+        namespace.
+
+        see http://xmlrpc.usefulinc.com/doc/reserved.html
+        """
+
+        self.funcs.update({'system.listMethods' : self.system_listMethods,
+                      'system.methodSignature' : self.system_methodSignature,
+                      'system.methodHelp' : self.system_methodHelp})
+
+    def register_multicall_functions(self):
+        """Registers the XML-RPC multicall method in the system
+        namespace.
+
+        see http://www.xmlrpc.com/discuss/msgReader$1208"""
+
+        self.funcs.update({'system.multicall' : self.system_multicall})
+
+    def _marshaled_dispatch(self, data, dispatch_method = None):
+        """Dispatches an XML-RPC method from marshalled (XML) data.
+
+        XML-RPC methods are dispatched from the marshalled (XML) data
+        using the _dispatch method and the result is returned as
+        marshalled data. For backwards compatibility, a dispatch
+        function can be provided as an argument (see comment in
+        SimpleXMLRPCRequestHandler.do_POST) but overriding the
+        existing method through subclassing is the prefered means
+        of changing method dispatch behavior.
+        """
+
+        params, method = xmlrpclib.loads(data)
+
+        # generate response
+        try:
+            if dispatch_method is not None:
+                response = dispatch_method(method, params)
+            else:
+                response = self._dispatch(method, params)
+            # wrap response in a singleton tuple
+            response = (response,)
+            response = xmlrpclib.dumps(response, methodresponse=1)
+        except Fault, fault:
+            response = xmlrpclib.dumps(fault)
+        except:
+            # report exception back to server
+            response = xmlrpclib.dumps(
+                xmlrpclib.Fault(1, "%s:%s" % (sys.exc_type, sys.exc_value))
+                )
+
+        return response
+
+    def system_listMethods(self):
+        """system.listMethods() => ['add', 'subtract', 'multiple']
+
+        Returns a list of the methods supported by the server."""
+
+        methods = self.funcs.keys()
+        if self.instance is not None:
+            # Instance can implement _listMethod to return a list of
+            # methods
+            if hasattr(self.instance, '_listMethods'):
+                methods = remove_duplicates(
+                        methods + self.instance._listMethods()
+                    )
+            # if the instance has a _dispatch method then we
+            # don't have enough information to provide a list
+            # of methods
+            elif not hasattr(self.instance, '_dispatch'):
+                methods = remove_duplicates(
+                        methods + list_public_methods(self.instance)
+                    )
+        methods.sort()
+        return methods
+
+    def system_methodSignature(self, method_name):
+        """system.methodSignature('add') => [double, int, int]
+
+        Returns a list describing the signature of the method. In the
+        above example, the add method takes two integers as arguments
+        and returns a double result.
+
+        This server does NOT support system.methodSignature."""
+
+        # See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html
+
+        return 'signatures not supported'
+
+    def system_methodHelp(self, method_name):
+        """system.methodHelp('add') => "Adds two integers together"
+
+        Returns a string containing documentation for the specified method."""
+
+        method = None
+        if self.funcs.has_key(method_name):
+            method = self.funcs[method_name]
+        elif self.instance is not None:
+            # Instance can implement _methodHelp to return help for a method
+            if hasattr(self.instance, '_methodHelp'):
+                return self.instance._methodHelp(method_name)
+            # if the instance has a _dispatch method then we
+            # don't have enough information to provide help
+            elif not hasattr(self.instance, '_dispatch'):
+                try:
+                    method = resolve_dotted_attribute(
+                                self.instance,
+                                method_name,
+                                self.allow_dotted_names
+                                )
+                except AttributeError:
+                    pass
+
+        # Note that we aren't checking that the method actually
+        # be a callable object of some kind
+        if method is None:
+            return ""
+        else:
+            import pydoc
+            return pydoc.getdoc(method)
+
+    def system_multicall(self, call_list):
+        """system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \
+[[4], ...]
+
+        Allows the caller to package multiple XML-RPC calls into a single
+        request.
+
+        See http://www.xmlrpc.com/discuss/msgReader$1208
+        """
+
+        results = []
+        for call in call_list:
+            method_name = call['methodName']
+            params = call['params']
+
+            try:
+                # XXX A marshalling error in any response will fail the entire
+                # multicall. If someone cares they should fix this.
+                results.append([self._dispatch(method_name, params)])
+            except Fault, fault:
+                results.append(
+                    {'faultCode' : fault.faultCode,
+                     'faultString' : fault.faultString}
+                    )
+            except:
+                results.append(
+                    {'faultCode' : 1,
+                     'faultString' : "%s:%s" % (sys.exc_type, sys.exc_value)}
+                    )
+        return results
+
+    def _dispatch(self, method, params):
+        """Dispatches the XML-RPC method.
+
+        XML-RPC calls are forwarded to a registered function that
+        matches the called XML-RPC method name. If no such function
+        exists then the call is forwarded to the registered instance,
+        if available.
+
+        If the registered instance has a _dispatch method then that
+        method will be called with the name of the XML-RPC method and
+        it's parameters as a tuple
+        e.g. instance._dispatch('add',(2,3))
+
+        If the registered instance does not have a _dispatch method
+        then the instance will be searched to find a matching method
+        and, if found, will be called.
+
+        Methods beginning with an '_' are considered private and will
+        not be called.
+        """
+
+        func = None
+        try:
+            # check to see if a matching function has been registered
+            func = self.funcs[method]
+        except KeyError:
+            if self.instance is not None:
+                # check for a _dispatch method
+                if hasattr(self.instance, '_dispatch'):
+                    return self.instance._dispatch(method, params)
+                else:
+                    # call instance method directly
+                    try:
+                        func = resolve_dotted_attribute(
+                            self.instance,
+                            method,
+                            self.allow_dotted_names
+                            )
+                    except AttributeError:
+                        pass
+
+        if func is not None:
+            return func(*params)
+        else:
+            raise Exception('method "%s" is not supported' % method)
+
+class SimpleXMLRPCRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+    """Simple XML-RPC request handler class.
+
+    Handles all HTTP POST requests and attempts to decode them as
+    XML-RPC requests.
+    """
+
+    def do_POST(self):
+        """Handles the HTTP POST request.
+
+        Attempts to interpret all HTTP POST requests as XML-RPC calls,
+        which are forwarded to the server's _dispatch method for handling.
+        """
+
+        try:
+            # get arguments
+            data = self.rfile.read(int(self.headers["content-length"]))
+            # In previous versions of SimpleXMLRPCServer, _dispatch
+            # could be overridden in this class, instead of in
+            # SimpleXMLRPCDispatcher. To maintain backwards compatibility,
+            # check to see if a subclass implements _dispatch and dispatch
+            # using that method if present.
+            response = self.server._marshaled_dispatch(
+                    data, getattr(self, '_dispatch', None)
+                )
+        except: # This should only happen if the module is buggy
+            # internal error, report as HTTP server error
+            self.send_response(500)
+            self.end_headers()
+        else:
+            # got a valid XML RPC response
+            self.send_response(200)
+            self.send_header("Content-type", "text/xml")
+            self.send_header("Content-length", str(len(response)))
+            self.end_headers()
+            self.wfile.write(response)
+
+            # shut down the connection
+            self.wfile.flush()
+            self.connection.shutdown(1)
+
+    def log_request(self, code='-', size='-'):
+        """Selectively log an accepted request."""
+
+        if self.server.logRequests:
+            BaseHTTPServer.BaseHTTPRequestHandler.log_request(self, code, size)
+
+class SimpleXMLRPCServer(SocketServer.TCPServer,
+                         SimpleXMLRPCDispatcher):
+    """Simple XML-RPC server.
+
+    Simple XML-RPC server that allows functions and a single instance
+    to be installed to handle requests. The default implementation
+    attempts to dispatch XML-RPC calls to the functions or instance
+    installed in the server. Override the _dispatch method inhereted
+    from SimpleXMLRPCDispatcher to change this behavior.
+    """
+
+    def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
+                 logRequests=1):
+        self.logRequests = logRequests
+
+        SimpleXMLRPCDispatcher.__init__(self)
+        SocketServer.TCPServer.__init__(self, addr, requestHandler)
+
+class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher):
+    """Simple handler for XML-RPC data passed through CGI."""
+
+    def __init__(self):
+        SimpleXMLRPCDispatcher.__init__(self)
+
+    def handle_xmlrpc(self, request_text):
+        """Handle a single XML-RPC request"""
+
+        response = self._marshaled_dispatch(request_text)
+
+        print 'Content-Type: text/xml'
+        print 'Content-Length: %d' % len(response)
+        print
+        sys.stdout.write(response)
+
+    def handle_get(self):
+        """Handle a single HTTP GET request.
+
+        Default implementation indicates an error because
+        XML-RPC uses the POST method.
+        """
+
+        code = 400
+        message, explain = \
+                 BaseHTTPServer.BaseHTTPRequestHandler.responses[code]
+
+        response = BaseHTTPServer.DEFAULT_ERROR_MESSAGE % \
+            {
+             'code' : code,
+             'message' : message,
+             'explain' : explain
+            }
+        print 'Status: %d %s' % (code, message)
+        print 'Content-Type: text/html'
+        print 'Content-Length: %d' % len(response)
+        print
+        sys.stdout.write(response)
+
+    def handle_request(self, request_text = None):
+        """Handle a single XML-RPC request passed through a CGI post method.
+
+        If no XML data is given then it is read from stdin. The resulting
+        XML-RPC response is printed to stdout along with the correct HTTP
+        headers.
+        """
+
+        if request_text is None and \
+            os.environ.get('REQUEST_METHOD', None) == 'GET':
+            self.handle_get()
+        else:
+            # POST data is normally available through stdin
+            if request_text is None:
+                request_text = sys.stdin.read()
+
+            self.handle_xmlrpc(request_text)
+
+if __name__ == '__main__':
+    server = SimpleXMLRPCServer(("localhost", 8000))
+    server.register_function(pow)
+    server.register_function(lambda x,y: x+y, 'add')
+    server.serve_forever()
diff --git a/depot_tools/release/win/python_24/Lib/SocketServer.py b/depot_tools/release/win/python_24/Lib/SocketServer.py
new file mode 100644
index 0000000..06ed134
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/SocketServer.py
@@ -0,0 +1,584 @@
+"""Generic socket server classes.
+
+This module tries to capture the various aspects of defining a server:
+
+For socket-based servers:
+
+- address family:
+        - AF_INET{,6}: IP (Internet Protocol) sockets (default)
+        - AF_UNIX: Unix domain sockets
+        - others, e.g. AF_DECNET are conceivable (see <socket.h>
+- socket type:
+        - SOCK_STREAM (reliable stream, e.g. TCP)
+        - SOCK_DGRAM (datagrams, e.g. UDP)
+
+For request-based servers (including socket-based):
+
+- client address verification before further looking at the request
+        (This is actually a hook for any processing that needs to look
+         at the request before anything else, e.g. logging)
+- how to handle multiple requests:
+        - synchronous (one request is handled at a time)
+        - forking (each request is handled by a new process)
+        - threading (each request is handled by a new thread)
+
+The classes in this module favor the server type that is simplest to
+write: a synchronous TCP/IP server.  This is bad class design, but
+save some typing.  (There's also the issue that a deep class hierarchy
+slows down method lookups.)
+
+There are five classes in an inheritance diagram, four of which represent
+synchronous servers of four types:
+
+        +------------+
+        | BaseServer |
+        +------------+
+              |
+              v
+        +-----------+        +------------------+
+        | TCPServer |------->| UnixStreamServer |
+        +-----------+        +------------------+
+              |
+              v
+        +-----------+        +--------------------+
+        | UDPServer |------->| UnixDatagramServer |
+        +-----------+        +--------------------+
+
+Note that UnixDatagramServer derives from UDPServer, not from
+UnixStreamServer -- the only difference between an IP and a Unix
+stream server is the address family, which is simply repeated in both
+unix server classes.
+
+Forking and threading versions of each type of server can be created
+using the ForkingServer and ThreadingServer mix-in classes.  For
+instance, a threading UDP server class is created as follows:
+
+        class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
+
+The Mix-in class must come first, since it overrides a method defined
+in UDPServer! Setting the various member variables also changes
+the behavior of the underlying server mechanism.
+
+To implement a service, you must derive a class from
+BaseRequestHandler and redefine its handle() method.  You can then run
+various versions of the service by combining one of the server classes
+with your request handler class.
+
+The request handler class must be different for datagram or stream
+services.  This can be hidden by using the mix-in request handler
+classes StreamRequestHandler or DatagramRequestHandler.
+
+Of course, you still have to use your head!
+
+For instance, it makes no sense to use a forking server if the service
+contains state in memory that can be modified by requests (since the
+modifications in the child process would never reach the initial state
+kept in the parent process and passed to each child).  In this case,
+you can use a threading server, but you will probably have to use
+locks to avoid two requests that come in nearly simultaneous to apply
+conflicting changes to the server state.
+
+On the other hand, if you are building e.g. an HTTP server, where all
+data is stored externally (e.g. in the file system), a synchronous
+class will essentially render the service "deaf" while one request is
+being handled -- which may be for a very long time if a client is slow
+to reqd all the data it has requested.  Here a threading or forking
+server is appropriate.
+
+In some cases, it may be appropriate to process part of a request
+synchronously, but to finish processing in a forked child depending on
+the request data.  This can be implemented by using a synchronous
+server and doing an explicit fork in the request handler class
+handle() method.
+
+Another approach to handling multiple simultaneous requests in an
+environment that supports neither threads nor fork (or where these are
+too expensive or inappropriate for the service) is to maintain an
+explicit table of partially finished requests and to use select() to
+decide which request to work on next (or whether to handle a new
+incoming request).  This is particularly important for stream services
+where each client can potentially be connected for a long time (if
+threads or subprocesses cannot be used).
+
+Future work:
+- Standard classes for Sun RPC (which uses either UDP or TCP)
+- Standard mix-in classes to implement various authentication
+  and encryption schemes
+- Standard framework for select-based multiplexing
+
+XXX Open problems:
+- What to do with out-of-band data?
+
+BaseServer:
+- split generic "request" functionality out into BaseServer class.
+  Copyright (C) 2000  Luke Kenneth Casson Leighton <lkcl@samba.org>
+
+  example: read entries from a SQL database (requires overriding
+  get_request() to return a table entry from the database).
+  entry is processed by a RequestHandlerClass.
+
+"""
+
+# Author of the BaseServer patch: Luke Kenneth Casson Leighton
+
+# XXX Warning!
+# There is a test suite for this module, but it cannot be run by the
+# standard regression test.
+# To run it manually, run Lib/test/test_socketserver.py.
+
+__version__ = "0.4"
+
+
+import socket
+import sys
+import os
+
+__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer",
+           "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler",
+           "StreamRequestHandler","DatagramRequestHandler",
+           "ThreadingMixIn", "ForkingMixIn"]
+if hasattr(socket, "AF_UNIX"):
+    __all__.extend(["UnixStreamServer","UnixDatagramServer",
+                    "ThreadingUnixStreamServer",
+                    "ThreadingUnixDatagramServer"])
+
+class BaseServer:
+
+    """Base class for server classes.
+
+    Methods for the caller:
+
+    - __init__(server_address, RequestHandlerClass)
+    - serve_forever()
+    - handle_request()  # if you do not use serve_forever()
+    - fileno() -> int   # for select()
+
+    Methods that may be overridden:
+
+    - server_bind()
+    - server_activate()
+    - get_request() -> request, client_address
+    - verify_request(request, client_address)
+    - server_close()
+    - process_request(request, client_address)
+    - close_request(request)
+    - handle_error()
+
+    Methods for derived classes:
+
+    - finish_request(request, client_address)
+
+    Class variables that may be overridden by derived classes or
+    instances:
+
+    - address_family
+    - socket_type
+    - allow_reuse_address
+
+    Instance variables:
+
+    - RequestHandlerClass
+    - socket
+
+    """
+
+    def __init__(self, server_address, RequestHandlerClass):
+        """Constructor.  May be extended, do not override."""
+        self.server_address = server_address
+        self.RequestHandlerClass = RequestHandlerClass
+
+    def server_activate(self):
+        """Called by constructor to activate the server.
+
+        May be overridden.
+
+        """
+        pass
+
+    def serve_forever(self):
+        """Handle one request at a time until doomsday."""
+        while 1:
+            self.handle_request()
+
+    # The distinction between handling, getting, processing and
+    # finishing a request is fairly arbitrary.  Remember:
+    #
+    # - handle_request() is the top-level call.  It calls
+    #   get_request(), verify_request() and process_request()
+    # - get_request() is different for stream or datagram sockets
+    # - process_request() is the place that may fork a new process
+    #   or create a new thread to finish the request
+    # - finish_request() instantiates the request handler class;
+    #   this constructor will handle the request all by itself
+
+    def handle_request(self):
+        """Handle one request, possibly blocking."""
+        try:
+            request, client_address = self.get_request()
+        except socket.error:
+            return
+        if self.verify_request(request, client_address):
+            try:
+                self.process_request(request, client_address)
+            except:
+                self.handle_error(request, client_address)
+                self.close_request(request)
+
+    def verify_request(self, request, client_address):
+        """Verify the request.  May be overridden.
+
+        Return True if we should proceed with this request.
+
+        """
+        return True
+
+    def process_request(self, request, client_address):
+        """Call finish_request.
+
+        Overridden by ForkingMixIn and ThreadingMixIn.
+
+        """
+        self.finish_request(request, client_address)
+        self.close_request(request)
+
+    def server_close(self):
+        """Called to clean-up the server.
+
+        May be overridden.
+
+        """
+        pass
+
+    def finish_request(self, request, client_address):
+        """Finish one request by instantiating RequestHandlerClass."""
+        self.RequestHandlerClass(request, client_address, self)
+
+    def close_request(self, request):
+        """Called to clean up an individual request."""
+        pass
+
+    def handle_error(self, request, client_address):
+        """Handle an error gracefully.  May be overridden.
+
+        The default is to print a traceback and continue.
+
+        """
+        print '-'*40
+        print 'Exception happened during processing of request from',
+        print client_address
+        import traceback
+        traceback.print_exc() # XXX But this goes to stderr!
+        print '-'*40
+
+
+class TCPServer(BaseServer):
+
+    """Base class for various socket-based server classes.
+
+    Defaults to synchronous IP stream (i.e., TCP).
+
+    Methods for the caller:
+
+    - __init__(server_address, RequestHandlerClass)
+    - serve_forever()
+    - handle_request()  # if you don't use serve_forever()
+    - fileno() -> int   # for select()
+
+    Methods that may be overridden:
+
+    - server_bind()
+    - server_activate()
+    - get_request() -> request, client_address
+    - verify_request(request, client_address)
+    - process_request(request, client_address)
+    - close_request(request)
+    - handle_error()
+
+    Methods for derived classes:
+
+    - finish_request(request, client_address)
+
+    Class variables that may be overridden by derived classes or
+    instances:
+
+    - address_family
+    - socket_type
+    - request_queue_size (only for stream sockets)
+    - allow_reuse_address
+
+    Instance variables:
+
+    - server_address
+    - RequestHandlerClass
+    - socket
+
+    """
+
+    address_family = socket.AF_INET
+
+    socket_type = socket.SOCK_STREAM
+
+    request_queue_size = 5
+
+    allow_reuse_address = False
+
+    def __init__(self, server_address, RequestHandlerClass):
+        """Constructor.  May be extended, do not override."""
+        BaseServer.__init__(self, server_address, RequestHandlerClass)
+        self.socket = socket.socket(self.address_family,
+                                    self.socket_type)
+        self.server_bind()
+        self.server_activate()
+
+    def server_bind(self):
+        """Called by constructor to bind the socket.
+
+        May be overridden.
+
+        """
+        if self.allow_reuse_address:
+            self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+        self.socket.bind(self.server_address)
+
+    def server_activate(self):
+        """Called by constructor to activate the server.
+
+        May be overridden.
+
+        """
+        self.socket.listen(self.request_queue_size)
+
+    def server_close(self):
+        """Called to clean-up the server.
+
+        May be overridden.
+
+        """
+        self.socket.close()
+
+    def fileno(self):
+        """Return socket file number.
+
+        Interface required by select().
+
+        """
+        return self.socket.fileno()
+
+    def get_request(self):
+        """Get the request and client address from the socket.
+
+        May be overridden.
+
+        """
+        return self.socket.accept()
+
+    def close_request(self, request):
+        """Called to clean up an individual request."""
+        request.close()
+
+
+class UDPServer(TCPServer):
+
+    """UDP server class."""
+
+    allow_reuse_address = False
+
+    socket_type = socket.SOCK_DGRAM
+
+    max_packet_size = 8192
+
+    def get_request(self):
+        data, client_addr = self.socket.recvfrom(self.max_packet_size)
+        return (data, self.socket), client_addr
+
+    def server_activate(self):
+        # No need to call listen() for UDP.
+        pass
+
+    def close_request(self, request):
+        # No need to close anything.
+        pass
+
+class ForkingMixIn:
+
+    """Mix-in class to handle each request in a new process."""
+
+    active_children = None
+    max_children = 40
+
+    def collect_children(self):
+        """Internal routine to wait for died children."""
+        while self.active_children:
+            if len(self.active_children) < self.max_children:
+                options = os.WNOHANG
+            else:
+                # If the maximum number of children are already
+                # running, block while waiting for a child to exit
+                options = 0
+            try:
+                pid, status = os.waitpid(0, options)
+            except os.error:
+                pid = None
+            if not pid: break
+            self.active_children.remove(pid)
+
+    def process_request(self, request, client_address):
+        """Fork a new subprocess to process the request."""
+        self.collect_children()
+        pid = os.fork()
+        if pid:
+            # Parent process
+            if self.active_children is None:
+                self.active_children = []
+            self.active_children.append(pid)
+            self.close_request(request)
+            return
+        else:
+            # Child process.
+            # This must never return, hence os._exit()!
+            try:
+                self.finish_request(request, client_address)
+                os._exit(0)
+            except:
+                try:
+                    self.handle_error(request, client_address)
+                finally:
+                    os._exit(1)
+
+
+class ThreadingMixIn:
+    """Mix-in class to handle each request in a new thread."""
+
+    # Decides how threads will act upon termination of the
+    # main process
+    daemon_threads = False
+
+    def process_request_thread(self, request, client_address):
+        """Same as in BaseServer but as a thread.
+
+        In addition, exception handling is done here.
+
+        """
+        try:
+            self.finish_request(request, client_address)
+            self.close_request(request)
+        except:
+            self.handle_error(request, client_address)
+            self.close_request(request)
+
+    def process_request(self, request, client_address):
+        """Start a new thread to process the request."""
+        import threading
+        t = threading.Thread(target = self.process_request_thread,
+                             args = (request, client_address))
+        if self.daemon_threads:
+            t.setDaemon (1)
+        t.start()
+
+
+class ForkingUDPServer(ForkingMixIn, UDPServer): pass
+class ForkingTCPServer(ForkingMixIn, TCPServer): pass
+
+class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass
+class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass
+
+if hasattr(socket, 'AF_UNIX'):
+
+    class UnixStreamServer(TCPServer):
+        address_family = socket.AF_UNIX
+
+    class UnixDatagramServer(UDPServer):
+        address_family = socket.AF_UNIX
+
+    class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass
+
+    class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass
+
+class BaseRequestHandler:
+
+    """Base class for request handler classes.
+
+    This class is instantiated for each request to be handled.  The
+    constructor sets the instance variables request, client_address
+    and server, and then calls the handle() method.  To implement a
+    specific service, all you need to do is to derive a class which
+    defines a handle() method.
+
+    The handle() method can find the request as self.request, the
+    client address as self.client_address, and the server (in case it
+    needs access to per-server information) as self.server.  Since a
+    separate instance is created for each request, the handle() method
+    can define arbitrary other instance variariables.
+
+    """
+
+    def __init__(self, request, client_address, server):
+        self.request = request
+        self.client_address = client_address
+        self.server = server
+        try:
+            self.setup()
+            self.handle()
+            self.finish()
+        finally:
+            sys.exc_traceback = None    # Help garbage collection
+
+    def setup(self):
+        pass
+
+    def handle(self):
+        pass
+
+    def finish(self):
+        pass
+
+
+# The following two classes make it possible to use the same service
+# class for stream or datagram servers.
+# Each class sets up these instance variables:
+# - rfile: a file object from which receives the request is read
+# - wfile: a file object to which the reply is written
+# When the handle() method returns, wfile is flushed properly
+
+
+class StreamRequestHandler(BaseRequestHandler):
+
+    """Define self.rfile and self.wfile for stream sockets."""
+
+    # Default buffer sizes for rfile, wfile.
+    # We default rfile to buffered because otherwise it could be
+    # really slow for large data (a getc() call per byte); we make
+    # wfile unbuffered because (a) often after a write() we want to
+    # read and we need to flush the line; (b) big writes to unbuffered
+    # files are typically optimized by stdio even when big reads
+    # aren't.
+    rbufsize = -1
+    wbufsize = 0
+
+    def setup(self):
+        self.connection = self.request
+        self.rfile = self.connection.makefile('rb', self.rbufsize)
+        self.wfile = self.connection.makefile('wb', self.wbufsize)
+
+    def finish(self):
+        if not self.wfile.closed:
+            self.wfile.flush()
+        self.wfile.close()
+        self.rfile.close()
+
+
+class DatagramRequestHandler(BaseRequestHandler):
+
+    # XXX Regrettably, I cannot get this working on Linux;
+    # s.recvfrom() doesn't return a meaningful client address.
+
+    """Define self.rfile and self.wfile for datagram sockets."""
+
+    def setup(self):
+        import StringIO
+        self.packet, self.socket = self.request
+        self.rfile = StringIO.StringIO(self.packet)
+        self.wfile = StringIO.StringIO()
+
+    def finish(self):
+        self.socket.sendto(self.wfile.getvalue(), self.client_address)
diff --git a/depot_tools/release/win/python_24/Lib/StringIO.py b/depot_tools/release/win/python_24/Lib/StringIO.py
new file mode 100644
index 0000000..5c463fb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/StringIO.py
@@ -0,0 +1,324 @@
+r"""File-like objects that read from or write to a string buffer.
+
+This implements (nearly) all stdio methods.
+
+f = StringIO()      # ready for writing
+f = StringIO(buf)   # ready for reading
+f.close()           # explicitly release resources held
+flag = f.isatty()   # always false
+pos = f.tell()      # get current position
+f.seek(pos)         # set current position
+f.seek(pos, mode)   # mode 0: absolute; 1: relative; 2: relative to EOF
+buf = f.read()      # read until EOF
+buf = f.read(n)     # read up to n bytes
+buf = f.readline()  # read until end of line ('\n') or EOF
+list = f.readlines()# list of f.readline() results until EOF
+f.truncate([size])  # truncate file at to at most size (default: current pos)
+f.write(buf)        # write at current position
+f.writelines(list)  # for line in list: f.write(line)
+f.getvalue()        # return whole file's contents as a string
+
+Notes:
+- Using a real file is often faster (but less convenient).
+- There's also a much faster implementation in C, called cStringIO, but
+  it's not subclassable.
+- fileno() is left unimplemented so that code which uses it triggers
+  an exception early.
+- Seeking far beyond EOF and then writing will insert real null
+  bytes that occupy space in the buffer.
+- There's a simple test set (see end of this file).
+"""
+try:
+    from errno import EINVAL
+except ImportError:
+    EINVAL = 22
+
+__all__ = ["StringIO"]
+
+def _complain_ifclosed(closed):
+    if closed:
+        raise ValueError, "I/O operation on closed file"
+
+class StringIO:
+    """class StringIO([buffer])
+
+    When a StringIO object is created, it can be initialized to an existing
+    string by passing the string to the constructor. If no string is given,
+    the StringIO will start empty.
+
+    The StringIO object can accept either Unicode or 8-bit strings, but
+    mixing the two may take some care. If both are used, 8-bit strings that
+    cannot be interpreted as 7-bit ASCII (that use the 8th bit) will cause
+    a UnicodeError to be raised when getvalue() is called.
+    """
+    def __init__(self, buf = ''):
+        # Force self.buf to be a string or unicode
+        if not isinstance(buf, basestring):
+            buf = str(buf)
+        self.buf = buf
+        self.len = len(buf)
+        self.buflist = []
+        self.pos = 0
+        self.closed = False
+        self.softspace = 0
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        """A file object is its own iterator, for example iter(f) returns f
+        (unless f is closed). When a file is used as an iterator, typically
+        in a for loop (for example, for line in f: print line), the next()
+        method is called repeatedly. This method returns the next input line,
+        or raises StopIteration when EOF is hit.
+        """
+        if self.closed:
+            raise StopIteration
+        r = self.readline()
+        if not r:
+            raise StopIteration
+        return r
+
+    def close(self):
+        """Free the memory buffer.
+        """
+        if not self.closed:
+            self.closed = True
+            del self.buf, self.pos
+
+    def isatty(self):
+        """Returns False because StringIO objects are not connected to a
+        tty-like device.
+        """
+        _complain_ifclosed(self.closed)
+        return False
+
+    def seek(self, pos, mode = 0):
+        """Set the file's current position.
+
+        The mode argument is optional and defaults to 0 (absolute file
+        positioning); other values are 1 (seek relative to the current
+        position) and 2 (seek relative to the file's end).
+
+        There is no return value.
+        """
+        _complain_ifclosed(self.closed)
+        if self.buflist:
+            self.buf += ''.join(self.buflist)
+            self.buflist = []
+        if mode == 1:
+            pos += self.pos
+        elif mode == 2:
+            pos += self.len
+        self.pos = max(0, pos)
+
+    def tell(self):
+        """Return the file's current position."""
+        _complain_ifclosed(self.closed)
+        return self.pos
+
+    def read(self, n = -1):
+        """Read at most size bytes from the file
+        (less if the read hits EOF before obtaining size bytes).
+
+        If the size argument is negative or omitted, read all data until EOF
+        is reached. The bytes are returned as a string object. An empty
+        string is returned when EOF is encountered immediately.
+        """
+        _complain_ifclosed(self.closed)
+        if self.buflist:
+            self.buf += ''.join(self.buflist)
+            self.buflist = []
+        if n < 0:
+            newpos = self.len
+        else:
+            newpos = min(self.pos+n, self.len)
+        r = self.buf[self.pos:newpos]
+        self.pos = newpos
+        return r
+
+    def readline(self, length=None):
+        """Read one entire line from the file.
+
+        A trailing newline character is kept in the string (but may be absent
+        when a file ends with an incomplete line). If the size argument is
+        present and non-negative, it is a maximum byte count (including the
+        trailing newline) and an incomplete line may be returned.
+
+        An empty string is returned only when EOF is encountered immediately.
+
+        Note: Unlike stdio's fgets(), the returned string contains null
+        characters ('\0') if they occurred in the input.
+        """
+        _complain_ifclosed(self.closed)
+        if self.buflist:
+            self.buf += ''.join(self.buflist)
+            self.buflist = []
+        i = self.buf.find('\n', self.pos)
+        if i < 0:
+            newpos = self.len
+        else:
+            newpos = i+1
+        if length is not None:
+            if self.pos + length < newpos:
+                newpos = self.pos + length
+        r = self.buf[self.pos:newpos]
+        self.pos = newpos
+        return r
+
+    def readlines(self, sizehint = 0):
+        """Read until EOF using readline() and return a list containing the
+        lines thus read.
+
+        If the optional sizehint argument is present, instead of reading up
+        to EOF, whole lines totalling approximately sizehint bytes (or more
+        to accommodate a final whole line).
+        """
+        total = 0
+        lines = []
+        line = self.readline()
+        while line:
+            lines.append(line)
+            total += len(line)
+            if 0 < sizehint <= total:
+                break
+            line = self.readline()
+        return lines
+
+    def truncate(self, size=None):
+        """Truncate the file's size.
+
+        If the optional size argument is present, the file is truncated to
+        (at most) that size. The size defaults to the current position.
+        The current file position is not changed unless the position
+        is beyond the new file size.
+
+        If the specified size exceeds the file's current size, the
+        file remains unchanged.
+        """
+        _complain_ifclosed(self.closed)
+        if size is None:
+            size = self.pos
+        elif size < 0:
+            raise IOError(EINVAL, "Negative size not allowed")
+        elif size < self.pos:
+            self.pos = size
+        self.buf = self.getvalue()[:size]
+        self.len = size
+
+    def write(self, s):
+        """Write a string to the file.
+
+        There is no return value.
+        """
+        _complain_ifclosed(self.closed)
+        if not s: return
+        # Force s to be a string or unicode
+        if not isinstance(s, basestring):
+            s = str(s)
+        spos = self.pos
+        slen = self.len
+        if spos == slen:
+            self.buflist.append(s)
+            self.len = self.pos = spos + len(s)
+            return
+        if spos > slen:
+            self.buflist.append('\0'*(spos - slen))
+            slen = spos
+        newpos = spos + len(s)
+        if spos < slen:
+            if self.buflist:
+                self.buf += ''.join(self.buflist)
+            self.buflist = [self.buf[:spos], s, self.buf[newpos:]]
+            self.buf = ''
+            if newpos > slen:
+                slen = newpos
+        else:
+            self.buflist.append(s)
+            slen = newpos
+        self.len = slen
+        self.pos = newpos
+
+    def writelines(self, iterable):
+        """Write a sequence of strings to the file. The sequence can be any
+        iterable object producing strings, typically a list of strings. There
+        is no return value.
+
+        (The name is intended to match readlines(); writelines() does not add
+        line separators.)
+        """
+        write = self.write
+        for line in iterable:
+            write(line)
+
+    def flush(self):
+        """Flush the internal buffer
+        """
+        _complain_ifclosed(self.closed)
+
+    def getvalue(self):
+        """
+        Retrieve the entire contents of the "file" at any time before
+        the StringIO object's close() method is called.
+
+        The StringIO object can accept either Unicode or 8-bit strings,
+        but mixing the two may take some care. If both are used, 8-bit
+        strings that cannot be interpreted as 7-bit ASCII (that use the
+        8th bit) will cause a UnicodeError to be raised when getvalue()
+        is called.
+        """
+        if self.buflist:
+            self.buf += ''.join(self.buflist)
+            self.buflist = []
+        return self.buf
+
+
+# A little test suite
+
+def test():
+    import sys
+    if sys.argv[1:]:
+        file = sys.argv[1]
+    else:
+        file = '/etc/passwd'
+    lines = open(file, 'r').readlines()
+    text = open(file, 'r').read()
+    f = StringIO()
+    for line in lines[:-2]:
+        f.write(line)
+    f.writelines(lines[-2:])
+    if f.getvalue() != text:
+        raise RuntimeError, 'write failed'
+    length = f.tell()
+    print 'File length =', length
+    f.seek(len(lines[0]))
+    f.write(lines[1])
+    f.seek(0)
+    print 'First line =', repr(f.readline())
+    print 'Position =', f.tell()
+    line = f.readline()
+    print 'Second line =', repr(line)
+    f.seek(-len(line), 1)
+    line2 = f.read(len(line))
+    if line != line2:
+        raise RuntimeError, 'bad result after seek back'
+    f.seek(len(line2), 1)
+    list = f.readlines()
+    line = list[-1]
+    f.seek(f.tell() - len(line))
+    line2 = f.read()
+    if line != line2:
+        raise RuntimeError, 'bad result after seek back from EOF'
+    print 'Read', len(list), 'more lines'
+    print 'File length =', f.tell()
+    if f.tell() != length:
+        raise RuntimeError, 'bad length'
+    f.truncate(length/2)
+    f.seek(0, 2)
+    print 'Truncated length =', f.tell()
+    if f.tell() != length/2:
+        raise RuntimeError, 'truncate did not adjust length'
+    f.close()
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/UserDict.py b/depot_tools/release/win/python_24/Lib/UserDict.py
new file mode 100644
index 0000000..703f118
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/UserDict.py
@@ -0,0 +1,170 @@
+"""A more or less complete user-defined wrapper around dictionary objects."""
+
+class UserDict:
+    def __init__(self, dict=None, **kwargs):
+        self.data = {}
+        if dict is not None:
+            self.update(dict)
+        if len(kwargs):
+            self.update(kwargs)
+    def __repr__(self): return repr(self.data)
+    def __cmp__(self, dict):
+        if isinstance(dict, UserDict):
+            return cmp(self.data, dict.data)
+        else:
+            return cmp(self.data, dict)
+    def __len__(self): return len(self.data)
+    def __getitem__(self, key): return self.data[key]
+    def __setitem__(self, key, item): self.data[key] = item
+    def __delitem__(self, key): del self.data[key]
+    def clear(self): self.data.clear()
+    def copy(self):
+        if self.__class__ is UserDict:
+            return UserDict(self.data.copy())
+        import copy
+        data = self.data
+        try:
+            self.data = {}
+            c = copy.copy(self)
+        finally:
+            self.data = data
+        c.update(self)
+        return c
+    def keys(self): return self.data.keys()
+    def items(self): return self.data.items()
+    def iteritems(self): return self.data.iteritems()
+    def iterkeys(self): return self.data.iterkeys()
+    def itervalues(self): return self.data.itervalues()
+    def values(self): return self.data.values()
+    def has_key(self, key): return self.data.has_key(key)
+    def update(self, dict=None, **kwargs):
+        if dict is None:
+            pass
+        elif isinstance(dict, UserDict):
+            self.data.update(dict.data)
+        elif isinstance(dict, type({})) or not hasattr(dict, 'items'):
+            self.data.update(dict)
+        else:
+            for k, v in dict.items():
+                self[k] = v
+        if len(kwargs):
+            self.data.update(kwargs)
+    def get(self, key, failobj=None):
+        if not self.has_key(key):
+            return failobj
+        return self[key]
+    def setdefault(self, key, failobj=None):
+        if not self.has_key(key):
+            self[key] = failobj
+        return self[key]
+    def pop(self, key, *args):
+        return self.data.pop(key, *args)
+    def popitem(self):
+        return self.data.popitem()
+    def __contains__(self, key):
+        return key in self.data
+    def fromkeys(cls, iterable, value=None):
+        d = cls()
+        for key in iterable:
+            d[key] = value
+        return d
+    fromkeys = classmethod(fromkeys)
+
+class IterableUserDict(UserDict):
+    def __iter__(self):
+        return iter(self.data)
+
+class DictMixin:
+    # Mixin defining all dictionary methods for classes that already have
+    # a minimum dictionary interface including getitem, setitem, delitem,
+    # and keys. Without knowledge of the subclass constructor, the mixin
+    # does not define __init__() or copy().  In addition to the four base
+    # methods, progressively more efficiency comes with defining
+    # __contains__(), __iter__(), and iteritems().
+
+    # second level definitions support higher levels
+    def __iter__(self):
+        for k in self.keys():
+            yield k
+    def has_key(self, key):
+        try:
+            value = self[key]
+        except KeyError:
+            return False
+        return True
+    def __contains__(self, key):
+        return self.has_key(key)
+
+    # third level takes advantage of second level definitions
+    def iteritems(self):
+        for k in self:
+            yield (k, self[k])
+    def iterkeys(self):
+        return self.__iter__()
+
+    # fourth level uses definitions from lower levels
+    def itervalues(self):
+        for _, v in self.iteritems():
+            yield v
+    def values(self):
+        return [v for _, v in self.iteritems()]
+    def items(self):
+        return list(self.iteritems())
+    def clear(self):
+        for key in self.keys():
+            del self[key]
+    def setdefault(self, key, default=None):
+        try:
+            return self[key]
+        except KeyError:
+            self[key] = default
+        return default
+    def pop(self, key, *args):
+        if len(args) > 1:
+            raise TypeError, "pop expected at most 2 arguments, got "\
+                              + repr(1 + len(args))
+        try:
+            value = self[key]
+        except KeyError:
+            if args:
+                return args[0]
+            raise
+        del self[key]
+        return value
+    def popitem(self):
+        try:
+            k, v = self.iteritems().next()
+        except StopIteration:
+            raise KeyError, 'container is empty'
+        del self[k]
+        return (k, v)
+    def update(self, other=None, **kwargs):
+        # Make progressively weaker assumptions about "other"
+        if other is None:
+            pass
+        elif hasattr(other, 'iteritems'):  # iteritems saves memory and lookups
+            for k, v in other.iteritems():
+                self[k] = v
+        elif hasattr(other, 'keys'):
+            for k in other.keys():
+                self[k] = other[k]
+        else:
+            for k, v in other:
+                self[k] = v
+        if kwargs:
+            self.update(kwargs)
+    def get(self, key, default=None):
+        try:
+            return self[key]
+        except KeyError:
+            return default
+    def __repr__(self):
+        return repr(dict(self.iteritems()))
+    def __cmp__(self, other):
+        if other is None:
+            return 1
+        if isinstance(other, DictMixin):
+            other = dict(other.iteritems())
+        return cmp(dict(self.iteritems()), other)
+    def __len__(self):
+        return len(self.keys())
diff --git a/depot_tools/release/win/python_24/Lib/UserList.py b/depot_tools/release/win/python_24/Lib/UserList.py
new file mode 100644
index 0000000..072f6a7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/UserList.py
@@ -0,0 +1,85 @@
+"""A more or less complete user-defined wrapper around list objects."""
+
+class UserList:
+    def __init__(self, initlist=None):
+        self.data = []
+        if initlist is not None:
+            # XXX should this accept an arbitrary sequence?
+            if type(initlist) == type(self.data):
+                self.data[:] = initlist
+            elif isinstance(initlist, UserList):
+                self.data[:] = initlist.data[:]
+            else:
+                self.data = list(initlist)
+    def __repr__(self): return repr(self.data)
+    def __lt__(self, other): return self.data <  self.__cast(other)
+    def __le__(self, other): return self.data <= self.__cast(other)
+    def __eq__(self, other): return self.data == self.__cast(other)
+    def __ne__(self, other): return self.data != self.__cast(other)
+    def __gt__(self, other): return self.data >  self.__cast(other)
+    def __ge__(self, other): return self.data >= self.__cast(other)
+    def __cast(self, other):
+        if isinstance(other, UserList): return other.data
+        else: return other
+    def __cmp__(self, other):
+        return cmp(self.data, self.__cast(other))
+    def __contains__(self, item): return item in self.data
+    def __len__(self): return len(self.data)
+    def __getitem__(self, i): return self.data[i]
+    def __setitem__(self, i, item): self.data[i] = item
+    def __delitem__(self, i): del self.data[i]
+    def __getslice__(self, i, j):
+        i = max(i, 0); j = max(j, 0)
+        return self.__class__(self.data[i:j])
+    def __setslice__(self, i, j, other):
+        i = max(i, 0); j = max(j, 0)
+        if isinstance(other, UserList):
+            self.data[i:j] = other.data
+        elif isinstance(other, type(self.data)):
+            self.data[i:j] = other
+        else:
+            self.data[i:j] = list(other)
+    def __delslice__(self, i, j):
+        i = max(i, 0); j = max(j, 0)
+        del self.data[i:j]
+    def __add__(self, other):
+        if isinstance(other, UserList):
+            return self.__class__(self.data + other.data)
+        elif isinstance(other, type(self.data)):
+            return self.__class__(self.data + other)
+        else:
+            return self.__class__(self.data + list(other))
+    def __radd__(self, other):
+        if isinstance(other, UserList):
+            return self.__class__(other.data + self.data)
+        elif isinstance(other, type(self.data)):
+            return self.__class__(other + self.data)
+        else:
+            return self.__class__(list(other) + self.data)
+    def __iadd__(self, other):
+        if isinstance(other, UserList):
+            self.data += other.data
+        elif isinstance(other, type(self.data)):
+            self.data += other
+        else:
+            self.data += list(other)
+        return self
+    def __mul__(self, n):
+        return self.__class__(self.data*n)
+    __rmul__ = __mul__
+    def __imul__(self, n):
+        self.data *= n
+        return self
+    def append(self, item): self.data.append(item)
+    def insert(self, i, item): self.data.insert(i, item)
+    def pop(self, i=-1): return self.data.pop(i)
+    def remove(self, item): self.data.remove(item)
+    def count(self, item): return self.data.count(item)
+    def index(self, item, *args): return self.data.index(item, *args)
+    def reverse(self): self.data.reverse()
+    def sort(self, *args, **kwds): self.data.sort(*args, **kwds)
+    def extend(self, other):
+        if isinstance(other, UserList):
+            self.data.extend(other.data)
+        else:
+            self.data.extend(other)
diff --git a/depot_tools/release/win/python_24/Lib/UserString.py b/depot_tools/release/win/python_24/Lib/UserString.py
new file mode 100644
index 0000000..e8e0fed
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/UserString.py
@@ -0,0 +1,187 @@
+#!/usr/bin/env python
+## vim:ts=4:et:nowrap
+"""A user-defined wrapper around string objects
+
+Note: string objects have grown methods in Python 1.6
+This module requires Python 1.6 or later.
+"""
+from types import StringTypes
+import sys
+
+__all__ = ["UserString","MutableString"]
+
+class UserString:
+    def __init__(self, seq):
+        if isinstance(seq, StringTypes):
+            self.data = seq
+        elif isinstance(seq, UserString):
+            self.data = seq.data[:]
+        else:
+            self.data = str(seq)
+    def __str__(self): return str(self.data)
+    def __repr__(self): return repr(self.data)
+    def __int__(self): return int(self.data)
+    def __long__(self): return long(self.data)
+    def __float__(self): return float(self.data)
+    def __complex__(self): return complex(self.data)
+    def __hash__(self): return hash(self.data)
+
+    def __cmp__(self, string):
+        if isinstance(string, UserString):
+            return cmp(self.data, string.data)
+        else:
+            return cmp(self.data, string)
+    def __contains__(self, char):
+        return char in self.data
+
+    def __len__(self): return len(self.data)
+    def __getitem__(self, index): return self.__class__(self.data[index])
+    def __getslice__(self, start, end):
+        start = max(start, 0); end = max(end, 0)
+        return self.__class__(self.data[start:end])
+
+    def __add__(self, other):
+        if isinstance(other, UserString):
+            return self.__class__(self.data + other.data)
+        elif isinstance(other, StringTypes):
+            return self.__class__(self.data + other)
+        else:
+            return self.__class__(self.data + str(other))
+    def __radd__(self, other):
+        if isinstance(other, StringTypes):
+            return self.__class__(other + self.data)
+        else:
+            return self.__class__(str(other) + self.data)
+    def __mul__(self, n):
+        return self.__class__(self.data*n)
+    __rmul__ = __mul__
+    def __mod__(self, args):
+        return self.__class__(self.data % args)
+
+    # the following methods are defined in alphabetical order:
+    def capitalize(self): return self.__class__(self.data.capitalize())
+    def center(self, width, *args):
+        return self.__class__(self.data.center(width, *args))
+    def count(self, sub, start=0, end=sys.maxint):
+        return self.data.count(sub, start, end)
+    def decode(self, encoding=None, errors=None): # XXX improve this?
+        if encoding:
+            if errors:
+                return self.__class__(self.data.decode(encoding, errors))
+            else:
+                return self.__class__(self.data.decode(encoding))
+        else:
+            return self.__class__(self.data.decode())
+    def encode(self, encoding=None, errors=None): # XXX improve this?
+        if encoding:
+            if errors:
+                return self.__class__(self.data.encode(encoding, errors))
+            else:
+                return self.__class__(self.data.encode(encoding))
+        else:
+            return self.__class__(self.data.encode())
+    def endswith(self, suffix, start=0, end=sys.maxint):
+        return self.data.endswith(suffix, start, end)
+    def expandtabs(self, tabsize=8):
+        return self.__class__(self.data.expandtabs(tabsize))
+    def find(self, sub, start=0, end=sys.maxint):
+        return self.data.find(sub, start, end)
+    def index(self, sub, start=0, end=sys.maxint):
+        return self.data.index(sub, start, end)
+    def isalpha(self): return self.data.isalpha()
+    def isalnum(self): return self.data.isalnum()
+    def isdecimal(self): return self.data.isdecimal()
+    def isdigit(self): return self.data.isdigit()
+    def islower(self): return self.data.islower()
+    def isnumeric(self): return self.data.isnumeric()
+    def isspace(self): return self.data.isspace()
+    def istitle(self): return self.data.istitle()
+    def isupper(self): return self.data.isupper()
+    def join(self, seq): return self.data.join(seq)
+    def ljust(self, width, *args):
+        return self.__class__(self.data.ljust(width, *args))
+    def lower(self): return self.__class__(self.data.lower())
+    def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars))
+    def replace(self, old, new, maxsplit=-1):
+        return self.__class__(self.data.replace(old, new, maxsplit))
+    def rfind(self, sub, start=0, end=sys.maxint):
+        return self.data.rfind(sub, start, end)
+    def rindex(self, sub, start=0, end=sys.maxint):
+        return self.data.rindex(sub, start, end)
+    def rjust(self, width, *args):
+        return self.__class__(self.data.rjust(width, *args))
+    def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars))
+    def split(self, sep=None, maxsplit=-1):
+        return self.data.split(sep, maxsplit)
+    def rsplit(self, sep=None, maxsplit=-1):
+        return self.data.rsplit(sep, maxsplit)
+    def splitlines(self, keepends=0): return self.data.splitlines(keepends)
+    def startswith(self, prefix, start=0, end=sys.maxint):
+        return self.data.startswith(prefix, start, end)
+    def strip(self, chars=None): return self.__class__(self.data.strip(chars))
+    def swapcase(self): return self.__class__(self.data.swapcase())
+    def title(self): return self.__class__(self.data.title())
+    def translate(self, *args):
+        return self.__class__(self.data.translate(*args))
+    def upper(self): return self.__class__(self.data.upper())
+    def zfill(self, width): return self.__class__(self.data.zfill(width))
+
+class MutableString(UserString):
+    """mutable string objects
+
+    Python strings are immutable objects.  This has the advantage, that
+    strings may be used as dictionary keys.  If this property isn't needed
+    and you insist on changing string values in place instead, you may cheat
+    and use MutableString.
+
+    But the purpose of this class is an educational one: to prevent
+    people from inventing their own mutable string class derived
+    from UserString and than forget thereby to remove (override) the
+    __hash__ method inherited from UserString.  This would lead to
+    errors that would be very hard to track down.
+
+    A faster and better solution is to rewrite your program using lists."""
+    def __init__(self, string=""):
+        self.data = string
+    def __hash__(self):
+        raise TypeError, "unhashable type (it is mutable)"
+    def __setitem__(self, index, sub):
+        if index < 0 or index >= len(self.data): raise IndexError
+        self.data = self.data[:index] + sub + self.data[index+1:]
+    def __delitem__(self, index):
+        if index < 0 or index >= len(self.data): raise IndexError
+        self.data = self.data[:index] + self.data[index+1:]
+    def __setslice__(self, start, end, sub):
+        start = max(start, 0); end = max(end, 0)
+        if isinstance(sub, UserString):
+            self.data = self.data[:start]+sub.data+self.data[end:]
+        elif isinstance(sub, StringTypes):
+            self.data = self.data[:start]+sub+self.data[end:]
+        else:
+            self.data =  self.data[:start]+str(sub)+self.data[end:]
+    def __delslice__(self, start, end):
+        start = max(start, 0); end = max(end, 0)
+        self.data = self.data[:start] + self.data[end:]
+    def immutable(self):
+        return UserString(self.data)
+    def __iadd__(self, other):
+        if isinstance(other, UserString):
+            self.data += other.data
+        elif isinstance(other, StringTypes):
+            self.data += other
+        else:
+            self.data += str(other)
+        return self
+    def __imul__(self, n):
+        self.data *= n
+        return self
+
+if __name__ == "__main__":
+    # execute the regression test to stdout, if called as a script:
+    import os
+    called_in_dir, called_as = os.path.split(sys.argv[0])
+    called_as, py = os.path.splitext(called_as)
+    if '-q' in sys.argv:
+        from test import test_support
+        test_support.verbose = 0
+    __import__('test.test_' + called_as.lower())
diff --git a/depot_tools/release/win/python_24/Lib/_LWPCookieJar.py b/depot_tools/release/win/python_24/Lib/_LWPCookieJar.py
new file mode 100644
index 0000000..25a25286
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/_LWPCookieJar.py
@@ -0,0 +1,164 @@
+"""Load / save to libwww-perl (LWP) format files.
+
+Actually, the format is slightly extended from that used by LWP's
+(libwww-perl's) HTTP::Cookies, to avoid losing some RFC 2965 information
+not recorded by LWP.
+
+It uses the version string "2.0", though really there isn't an LWP Cookies
+2.0 format.  This indicates that there is extra information in here
+(domain_dot and # port_spec) while still being compatible with
+libwww-perl, I hope.
+
+"""
+
+import time, re, logging
+from cookielib import (reraise_unmasked_exceptions, FileCookieJar, Cookie,
+     MISSING_FILENAME_TEXT, join_header_words, split_header_words,
+     iso2time, time2isoz)
+
+def lwp_cookie_str(cookie):
+    """Return string representation of Cookie in an the LWP cookie file format.
+
+    Actually, the format is extended a bit -- see module docstring.
+
+    """
+    h = [(cookie.name, cookie.value),
+         ("path", cookie.path),
+         ("domain", cookie.domain)]
+    if cookie.port is not None: h.append(("port", cookie.port))
+    if cookie.path_specified: h.append(("path_spec", None))
+    if cookie.port_specified: h.append(("port_spec", None))
+    if cookie.domain_initial_dot: h.append(("domain_dot", None))
+    if cookie.secure: h.append(("secure", None))
+    if cookie.expires: h.append(("expires",
+                               time2isoz(float(cookie.expires))))
+    if cookie.discard: h.append(("discard", None))
+    if cookie.comment: h.append(("comment", cookie.comment))
+    if cookie.comment_url: h.append(("commenturl", cookie.comment_url))
+
+    keys = cookie._rest.keys()
+    keys.sort()
+    for k in keys:
+        h.append((k, str(cookie._rest[k])))
+
+    h.append(("version", str(cookie.version)))
+
+    return join_header_words([h])
+
+class LWPCookieJar(FileCookieJar):
+    """
+    The LWPCookieJar saves a sequence of"Set-Cookie3" lines.
+    "Set-Cookie3" is the format used by the libwww-perl libary, not known
+    to be compatible with any browser, but which is easy to read and
+    doesn't lose information about RFC 2965 cookies.
+
+    Additional methods
+
+    as_lwp_str(ignore_discard=True, ignore_expired=True)
+
+    """
+
+    def as_lwp_str(self, ignore_discard=True, ignore_expires=True):
+        """Return cookies as a string of "\n"-separated "Set-Cookie3" headers.
+
+        ignore_discard and ignore_expires: see docstring for FileCookieJar.save
+
+        """
+        now = time.time()
+        r = []
+        for cookie in self:
+            if not ignore_discard and cookie.discard:
+                continue
+            if not ignore_expires and cookie.is_expired(now):
+                continue
+            r.append("Set-Cookie3: %s" % lwp_cookie_str(cookie))
+        return "\n".join(r+[""])
+
+    def save(self, filename=None, ignore_discard=False, ignore_expires=False):
+        if filename is None:
+            if self.filename is not None: filename = self.filename
+            else: raise ValueError(MISSING_FILENAME_TEXT)
+
+        f = open(filename, "w")
+        try:
+            # There really isn't an LWP Cookies 2.0 format, but this indicates
+            # that there is extra information in here (domain_dot and
+            # port_spec) while still being compatible with libwww-perl, I hope.
+            f.write("#LWP-Cookies-2.0\n")
+            f.write(self.as_lwp_str(ignore_discard, ignore_expires))
+        finally:
+            f.close()
+
+    def _really_load(self, f, filename, ignore_discard, ignore_expires):
+        magic = f.readline()
+        if not re.search(self.magic_re, magic):
+            msg = "%s does not seem to contain cookies" % filename
+            raise IOError(msg)
+
+        now = time.time()
+
+        header = "Set-Cookie3:"
+        boolean_attrs = ("port_spec", "path_spec", "domain_dot",
+                         "secure", "discard")
+        value_attrs = ("version",
+                       "port", "path", "domain",
+                       "expires",
+                       "comment", "commenturl")
+
+        try:
+            while 1:
+                line = f.readline()
+                if line == "": break
+                if not line.startswith(header):
+                    continue
+                line = line[len(header):].strip()
+
+                for data in split_header_words([line]):
+                    name, value = data[0]
+                    standard = {}
+                    rest = {}
+                    for k in boolean_attrs:
+                        standard[k] = False
+                    for k, v in data[1:]:
+                        if k is not None:
+                            lc = k.lower()
+                        else:
+                            lc = None
+                        # don't lose case distinction for unknown fields
+                        if (lc in value_attrs) or (lc in boolean_attrs):
+                            k = lc
+                        if k in boolean_attrs:
+                            if v is None: v = True
+                            standard[k] = v
+                        elif k in value_attrs:
+                            standard[k] = v
+                        else:
+                            rest[k] = v
+
+                    h = standard.get
+                    expires = h("expires")
+                    discard = h("discard")
+                    if expires is not None:
+                        expires = iso2time(expires)
+                    if expires is None:
+                        discard = True
+                    domain = h("domain")
+                    domain_specified = domain.startswith(".")
+                    c = Cookie(h("version"), name, value,
+                               h("port"), h("port_spec"),
+                               domain, domain_specified, h("domain_dot"),
+                               h("path"), h("path_spec"),
+                               h("secure"),
+                               expires,
+                               discard,
+                               h("comment"),
+                               h("commenturl"),
+                               rest)
+                    if not ignore_discard and c.discard:
+                        continue
+                    if not ignore_expires and c.is_expired(now):
+                        continue
+                    self.set_cookie(c)
+        except:
+            reraise_unmasked_exceptions((IOError,))
+            raise IOError("invalid Set-Cookie3 format file %s" % filename)
diff --git a/depot_tools/release/win/python_24/Lib/_MozillaCookieJar.py b/depot_tools/release/win/python_24/Lib/_MozillaCookieJar.py
new file mode 100644
index 0000000..88e8492
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/_MozillaCookieJar.py
@@ -0,0 +1,148 @@
+"""Mozilla / Netscape cookie loading / saving."""
+
+import re, time, logging
+
+from cookielib import (reraise_unmasked_exceptions, FileCookieJar, Cookie,
+     MISSING_FILENAME_TEXT)
+
+class MozillaCookieJar(FileCookieJar):
+    """
+
+    WARNING: you may want to backup your browser's cookies file if you use
+    this class to save cookies.  I *think* it works, but there have been
+    bugs in the past!
+
+    This class differs from CookieJar only in the format it uses to save and
+    load cookies to and from a file.  This class uses the Mozilla/Netscape
+    `cookies.txt' format.  lynx uses this file format, too.
+
+    Don't expect cookies saved while the browser is running to be noticed by
+    the browser (in fact, Mozilla on unix will overwrite your saved cookies if
+    you change them on disk while it's running; on Windows, you probably can't
+    save at all while the browser is running).
+
+    Note that the Mozilla/Netscape format will downgrade RFC2965 cookies to
+    Netscape cookies on saving.
+
+    In particular, the cookie version and port number information is lost,
+    together with information about whether or not Path, Port and Discard were
+    specified by the Set-Cookie2 (or Set-Cookie) header, and whether or not the
+    domain as set in the HTTP header started with a dot (yes, I'm aware some
+    domains in Netscape files start with a dot and some don't -- trust me, you
+    really don't want to know any more about this).
+
+    Note that though Mozilla and Netscape use the same format, they use
+    slightly different headers.  The class saves cookies using the Netscape
+    header by default (Mozilla can cope with that).
+
+    """
+    magic_re = "#( Netscape)? HTTP Cookie File"
+    header = """\
+    # Netscape HTTP Cookie File
+    # http://www.netscape.com/newsref/std/cookie_spec.html
+    # This is a generated file!  Do not edit.
+
+"""
+
+    def _really_load(self, f, filename, ignore_discard, ignore_expires):
+        now = time.time()
+
+        magic = f.readline()
+        if not re.search(self.magic_re, magic):
+            f.close()
+            raise IOError(
+                "%s does not look like a Netscape format cookies file" %
+                filename)
+
+        try:
+            while 1:
+                line = f.readline()
+                if line == "": break
+
+                # last field may be absent, so keep any trailing tab
+                if line.endswith("\n"): line = line[:-1]
+
+                # skip comments and blank lines XXX what is $ for?
+                if (line.strip().startswith("#") or
+                    line.strip().startswith("$") or
+                    line.strip() == ""):
+                    continue
+
+                domain, domain_specified, path, secure, expires, name, value = \
+                        line.split("\t")
+                secure = (secure == "TRUE")
+                domain_specified = (domain_specified == "TRUE")
+                if name == "":
+                    # cookies.txt regards 'Set-Cookie: foo' as a cookie
+                    # with no name, whereas cookielib regards it as a
+                    # cookie with no value.
+                    name = value
+                    value = None
+
+                initial_dot = domain.startswith(".")
+                assert domain_specified == initial_dot
+
+                discard = False
+                if expires == "":
+                    expires = None
+                    discard = True
+
+                # assume path_specified is false
+                c = Cookie(0, name, value,
+                           None, False,
+                           domain, domain_specified, initial_dot,
+                           path, False,
+                           secure,
+                           expires,
+                           discard,
+                           None,
+                           None,
+                           {})
+                if not ignore_discard and c.discard:
+                    continue
+                if not ignore_expires and c.is_expired(now):
+                    continue
+                self.set_cookie(c)
+
+        except:
+            reraise_unmasked_exceptions((IOError,))
+            raise IOError("invalid Netscape format file %s: %s" %
+                          (filename, line))
+
+    def save(self, filename=None, ignore_discard=False, ignore_expires=False):
+        if filename is None:
+            if self.filename is not None: filename = self.filename
+            else: raise ValueError(MISSING_FILENAME_TEXT)
+
+        f = open(filename, "w")
+        try:
+            f.write(self.header)
+            now = time.time()
+            for cookie in self:
+                if not ignore_discard and cookie.discard:
+                    continue
+                if not ignore_expires and cookie.is_expired(now):
+                    continue
+                if cookie.secure: secure = "TRUE"
+                else: secure = "FALSE"
+                if cookie.domain.startswith("."): initial_dot = "TRUE"
+                else: initial_dot = "FALSE"
+                if cookie.expires is not None:
+                    expires = str(cookie.expires)
+                else:
+                    expires = ""
+                if cookie.value is None:
+                    # cookies.txt regards 'Set-Cookie: foo' as a cookie
+                    # with no name, whereas cookielib regards it as a
+                    # cookie with no value.
+                    name = ""
+                    value = cookie.name
+                else:
+                    name = cookie.name
+                    value = cookie.value
+                f.write(
+                    "\t".join([cookie.domain, initial_dot, cookie.path,
+                               secure, expires, name, value])+
+                    "\n")
+        finally:
+            f.close()
diff --git a/depot_tools/release/win/python_24/Lib/__future__.py b/depot_tools/release/win/python_24/Lib/__future__.py
new file mode 100644
index 0000000..8940a95
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/__future__.py
@@ -0,0 +1,104 @@
+"""Record of phased-in incompatible language changes.
+
+Each line is of the form:
+
+    FeatureName = "_Feature(" OptionalRelease "," MandatoryRelease ","
+                              CompilerFlag ")"
+
+where, normally, OptionalRelease < MandatoryRelease, and both are 5-tuples
+of the same form as sys.version_info:
+
+    (PY_MAJOR_VERSION, # the 2 in 2.1.0a3; an int
+     PY_MINOR_VERSION, # the 1; an int
+     PY_MICRO_VERSION, # the 0; an int
+     PY_RELEASE_LEVEL, # "alpha", "beta", "candidate" or "final"; string
+     PY_RELEASE_SERIAL # the 3; an int
+    )
+
+OptionalRelease records the first release in which
+
+    from __future__ import FeatureName
+
+was accepted.
+
+In the case of MandatoryReleases that have not yet occurred,
+MandatoryRelease predicts the release in which the feature will become part
+of the language.
+
+Else MandatoryRelease records when the feature became part of the language;
+in releases at or after that, modules no longer need
+
+    from __future__ import FeatureName
+
+to use the feature in question, but may continue to use such imports.
+
+MandatoryRelease may also be None, meaning that a planned feature got
+dropped.
+
+Instances of class _Feature have two corresponding methods,
+.getOptionalRelease() and .getMandatoryRelease().
+
+CompilerFlag is the (bitfield) flag that should be passed in the fourth
+argument to the builtin function compile() to enable the feature in
+dynamically compiled code.  This flag is stored in the .compiler_flag
+attribute on _Future instances.  These values must match the appropriate
+#defines of CO_xxx flags in Include/compile.h.
+
+No feature line is ever to be deleted from this file.
+"""
+
+all_feature_names = [
+    "nested_scopes",
+    "generators",
+    "division",
+]
+
+__all__ = ["all_feature_names"] + all_feature_names
+
+# The CO_xxx symbols are defined here under the same names used by
+# compile.h, so that an editor search will find them here.  However,
+# they're not exported in __all__, because they don't really belong to
+# this module.
+CO_NESTED            = 0x0010   # nested_scopes
+CO_GENERATOR_ALLOWED = 0x1000   # generators
+CO_FUTURE_DIVISION   = 0x2000   # division
+
+class _Feature:
+    def __init__(self, optionalRelease, mandatoryRelease, compiler_flag):
+        self.optional = optionalRelease
+        self.mandatory = mandatoryRelease
+        self.compiler_flag = compiler_flag
+
+    def getOptionalRelease(self):
+        """Return first release in which this feature was recognized.
+
+        This is a 5-tuple, of the same form as sys.version_info.
+        """
+
+        return self.optional
+
+    def getMandatoryRelease(self):
+        """Return release in which this feature will become mandatory.
+
+        This is a 5-tuple, of the same form as sys.version_info, or, if
+        the feature was dropped, is None.
+        """
+
+        return self.mandatory
+
+    def __repr__(self):
+        return "_Feature" + repr((self.optional,
+                                  self.mandatory,
+                                  self.compiler_flag))
+
+nested_scopes = _Feature((2, 1, 0, "beta",  1),
+                         (2, 2, 0, "alpha", 0),
+                         CO_NESTED)
+
+generators = _Feature((2, 2, 0, "alpha", 1),
+                      (2, 3, 0, "final", 0),
+                      CO_GENERATOR_ALLOWED)
+
+division = _Feature((2, 2, 0, "alpha", 2),
+                    (3, 0, 0, "alpha", 0),
+                    CO_FUTURE_DIVISION)
diff --git a/depot_tools/release/win/python_24/Lib/__phello__.foo.py b/depot_tools/release/win/python_24/Lib/__phello__.foo.py
new file mode 100644
index 0000000..8e8623ee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/__phello__.foo.py
@@ -0,0 +1 @@
+# This file exists as a helper for the test.test_frozen module.
diff --git a/depot_tools/release/win/python_24/Lib/_strptime.py b/depot_tools/release/win/python_24/Lib/_strptime.py
new file mode 100644
index 0000000..a6b54f3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/_strptime.py
@@ -0,0 +1,436 @@
+"""Strptime-related classes and functions.
+
+CLASSES:
+    LocaleTime -- Discovers and stores locale-specific time information
+    TimeRE -- Creates regexes for pattern matching a string of text containing
+                time information
+
+FUNCTIONS:
+    _getlang -- Figure out what language is being used for the locale
+    strptime -- Calculates the time struct represented by the passed-in string
+
+"""
+import time
+import locale
+import calendar
+from re import compile as re_compile
+from re import IGNORECASE
+from re import escape as re_escape
+from datetime import date as datetime_date
+try:
+    from thread import allocate_lock as _thread_allocate_lock
+except:
+    from dummy_thread import allocate_lock as _thread_allocate_lock
+
+__author__ = "Brett Cannon"
+__email__ = "brett@python.org"
+
+__all__ = ['strptime']
+
+def _getlang():
+    # Figure out what the current language is set to.
+    return locale.getlocale(locale.LC_TIME)
+
+class LocaleTime(object):
+    """Stores and handles locale-specific information related to time.
+
+    ATTRIBUTES:
+        f_weekday -- full weekday names (7-item list)
+        a_weekday -- abbreviated weekday names (7-item list)
+        f_month -- full month names (13-item list; dummy value in [0], which
+                    is added by code)
+        a_month -- abbreviated month names (13-item list, dummy value in
+                    [0], which is added by code)
+        am_pm -- AM/PM representation (2-item list)
+        LC_date_time -- format string for date/time representation (string)
+        LC_date -- format string for date representation (string)
+        LC_time -- format string for time representation (string)
+        timezone -- daylight- and non-daylight-savings timezone representation
+                    (2-item list of sets)
+        lang -- Language used by instance (2-item tuple)
+    """
+
+    def __init__(self):
+        """Set all attributes.
+
+        Order of methods called matters for dependency reasons.
+
+        The locale language is set at the offset and then checked again before
+        exiting.  This is to make sure that the attributes were not set with a
+        mix of information from more than one locale.  This would most likely
+        happen when using threads where one thread calls a locale-dependent
+        function while another thread changes the locale while the function in
+        the other thread is still running.  Proper coding would call for
+        locks to prevent changing the locale while locale-dependent code is
+        running.  The check here is done in case someone does not think about
+        doing this.
+
+        Only other possible issue is if someone changed the timezone and did
+        not call tz.tzset .  That is an issue for the programmer, though,
+        since changing the timezone is worthless without that call.
+
+        """
+        self.lang = _getlang()
+        self.__calc_weekday()
+        self.__calc_month()
+        self.__calc_am_pm()
+        self.__calc_timezone()
+        self.__calc_date_time()
+        if _getlang() != self.lang:
+            raise ValueError("locale changed during initialization")
+
+    def __pad(self, seq, front):
+        # Add '' to seq to either the front (is True), else the back.
+        seq = list(seq)
+        if front:
+            seq.insert(0, '')
+        else:
+            seq.append('')
+        return seq
+
+    def __calc_weekday(self):
+        # Set self.a_weekday and self.f_weekday using the calendar
+        # module.
+        a_weekday = [calendar.day_abbr[i].lower() for i in range(7)]
+        f_weekday = [calendar.day_name[i].lower() for i in range(7)]
+        self.a_weekday = a_weekday
+        self.f_weekday = f_weekday
+
+    def __calc_month(self):
+        # Set self.f_month and self.a_month using the calendar module.
+        a_month = [calendar.month_abbr[i].lower() for i in range(13)]
+        f_month = [calendar.month_name[i].lower() for i in range(13)]
+        self.a_month = a_month
+        self.f_month = f_month
+
+    def __calc_am_pm(self):
+        # Set self.am_pm by using time.strftime().
+
+        # The magic date (1999,3,17,hour,44,55,2,76,0) is not really that
+        # magical; just happened to have used it everywhere else where a
+        # static date was needed.
+        am_pm = []
+        for hour in (01,22):
+            time_tuple = time.struct_time((1999,3,17,hour,44,55,2,76,0))
+            am_pm.append(time.strftime("%p", time_tuple).lower())
+        self.am_pm = am_pm
+
+    def __calc_date_time(self):
+        # Set self.date_time, self.date, & self.time by using
+        # time.strftime().
+
+        # Use (1999,3,17,22,44,55,2,76,0) for magic date because the amount of
+        # overloaded numbers is minimized.  The order in which searches for
+        # values within the format string is very important; it eliminates
+        # possible ambiguity for what something represents.
+        time_tuple = time.struct_time((1999,3,17,22,44,55,2,76,0))
+        date_time = [None, None, None]
+        date_time[0] = time.strftime("%c", time_tuple).lower()
+        date_time[1] = time.strftime("%x", time_tuple).lower()
+        date_time[2] = time.strftime("%X", time_tuple).lower()
+        replacement_pairs = [('%', '%%'), (self.f_weekday[2], '%A'),
+                    (self.f_month[3], '%B'), (self.a_weekday[2], '%a'),
+                    (self.a_month[3], '%b'), (self.am_pm[1], '%p'),
+                    ('1999', '%Y'), ('99', '%y'), ('22', '%H'),
+                    ('44', '%M'), ('55', '%S'), ('76', '%j'),
+                    ('17', '%d'), ('03', '%m'), ('3', '%m'),
+                    # '3' needed for when no leading zero.
+                    ('2', '%w'), ('10', '%I')]
+        replacement_pairs.extend([(tz, "%Z") for tz_values in self.timezone
+                                                for tz in tz_values])
+        for offset,directive in ((0,'%c'), (1,'%x'), (2,'%X')):
+            current_format = date_time[offset]
+            for old, new in replacement_pairs:
+                # Must deal with possible lack of locale info
+                # manifesting itself as the empty string (e.g., Swedish's
+                # lack of AM/PM info) or a platform returning a tuple of empty
+                # strings (e.g., MacOS 9 having timezone as ('','')).
+                if old:
+                    current_format = current_format.replace(old, new)
+            time_tuple = time.struct_time((1999,1,3,1,1,1,6,3,0))
+            if time.strftime(directive, time_tuple).find('00'):
+                U_W = '%U'
+            else:
+                U_W = '%W'
+            date_time[offset] = current_format.replace('11', U_W)
+        self.LC_date_time = date_time[0]
+        self.LC_date = date_time[1]
+        self.LC_time = date_time[2]
+
+    def __calc_timezone(self):
+        # Set self.timezone by using time.tzname.
+        # Do not worry about possibility of time.tzname[0] == timetzname[1]
+        # and time.daylight; handle that in strptime .
+        try:
+            time.tzset()
+        except AttributeError:
+            pass
+        no_saving = frozenset(["utc", "gmt", time.tzname[0].lower()])
+        if time.daylight:
+            has_saving = frozenset([time.tzname[1].lower()])
+        else:
+            has_saving = frozenset()
+        self.timezone = (no_saving, has_saving)
+
+
+class TimeRE(dict):
+    """Handle conversion from format directives to regexes."""
+
+    def __init__(self, locale_time=None):
+        """Create keys/values.
+
+        Order of execution is important for dependency reasons.
+
+        """
+        if locale_time:
+            self.locale_time = locale_time
+        else:
+            self.locale_time = LocaleTime()
+        base = super(TimeRE, self)
+        base.__init__({
+            # The " \d" part of the regex is to make %c from ANSI C work
+            'd': r"(?P<d>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
+            'H': r"(?P<H>2[0-3]|[0-1]\d|\d)",
+            'I': r"(?P<I>1[0-2]|0[1-9]|[1-9])",
+            'j': r"(?P<j>36[0-6]|3[0-5]\d|[1-2]\d\d|0[1-9]\d|00[1-9]|[1-9]\d|0[1-9]|[1-9])",
+            'm': r"(?P<m>1[0-2]|0[1-9]|[1-9])",
+            'M': r"(?P<M>[0-5]\d|\d)",
+            'S': r"(?P<S>6[0-1]|[0-5]\d|\d)",
+            'U': r"(?P<U>5[0-3]|[0-4]\d|\d)",
+            'w': r"(?P<w>[0-6])",
+            # W is set below by using 'U'
+            'y': r"(?P<y>\d\d)",
+            #XXX: Does 'Y' need to worry about having less or more than
+            #     4 digits?
+            'Y': r"(?P<Y>\d\d\d\d)",
+            'A': self.__seqToRE(self.locale_time.f_weekday, 'A'),
+            'a': self.__seqToRE(self.locale_time.a_weekday, 'a'),
+            'B': self.__seqToRE(self.locale_time.f_month[1:], 'B'),
+            'b': self.__seqToRE(self.locale_time.a_month[1:], 'b'),
+            'p': self.__seqToRE(self.locale_time.am_pm, 'p'),
+            'Z': self.__seqToRE((tz for tz_names in self.locale_time.timezone
+                                        for tz in tz_names),
+                                'Z'),
+            '%': '%'})
+        base.__setitem__('W', base.__getitem__('U').replace('U', 'W'))
+        base.__setitem__('c', self.pattern(self.locale_time.LC_date_time))
+        base.__setitem__('x', self.pattern(self.locale_time.LC_date))
+        base.__setitem__('X', self.pattern(self.locale_time.LC_time))
+
+    def __seqToRE(self, to_convert, directive):
+        """Convert a list to a regex string for matching a directive.
+
+        Want possible matching values to be from longest to shortest.  This
+        prevents the possibility of a match occuring for a value that also
+        a substring of a larger value that should have matched (e.g., 'abc'
+        matching when 'abcdef' should have been the match).
+
+        """
+        to_convert = sorted(to_convert, key=len, reverse=True)
+        for value in to_convert:
+            if value != '':
+                break
+        else:
+            return ''
+        regex = '|'.join(re_escape(stuff) for stuff in to_convert)
+        regex = '(?P<%s>%s' % (directive, regex)
+        return '%s)' % regex
+
+    def pattern(self, format):
+        """Return regex pattern for the format string.
+
+        Need to make sure that any characters that might be interpreted as
+        regex syntax are escaped.
+
+        """
+        processed_format = ''
+        # The sub() call escapes all characters that might be misconstrued
+        # as regex syntax.  Cannot use re.escape since we have to deal with
+        # format directives (%m, etc.).
+        regex_chars = re_compile(r"([\\.^$*+?\(\){}\[\]|])")
+        format = regex_chars.sub(r"\\\1", format)
+        whitespace_replacement = re_compile('\s+')
+        format = whitespace_replacement.sub('\s*', format)
+        while '%' in format:
+            directive_index = format.index('%')+1
+            processed_format = "%s%s%s" % (processed_format,
+                                           format[:directive_index-1],
+                                           self[format[directive_index]])
+            format = format[directive_index+1:]
+        return "%s%s" % (processed_format, format)
+
+    def compile(self, format):
+        """Return a compiled re object for the format string."""
+        return re_compile(self.pattern(format), IGNORECASE)
+
+_cache_lock = _thread_allocate_lock()
+# DO NOT modify _TimeRE_cache or _regex_cache without acquiring the cache lock
+# first!
+_TimeRE_cache = TimeRE()
+_CACHE_MAX_SIZE = 5 # Max number of regexes stored in _regex_cache
+_regex_cache = {}
+
+def strptime(data_string, format="%a %b %d %H:%M:%S %Y"):
+    """Return a time struct based on the input string and the format string."""
+    global _TimeRE_cache
+    _cache_lock.acquire()
+    try:
+        time_re = _TimeRE_cache
+        locale_time = time_re.locale_time
+        if _getlang() != locale_time.lang:
+            _TimeRE_cache = TimeRE()
+        if len(_regex_cache) > _CACHE_MAX_SIZE:
+            _regex_cache.clear()
+        format_regex = _regex_cache.get(format)
+        if not format_regex:
+            format_regex = time_re.compile(format)
+            _regex_cache[format] = format_regex
+    finally:
+        _cache_lock.release()
+    found = format_regex.match(data_string)
+    if not found:
+        raise ValueError("time data did not match format:  data=%s  fmt=%s" %
+                         (data_string, format))
+    if len(data_string) != found.end():
+        raise ValueError("unconverted data remains: %s" %
+                          data_string[found.end():])
+    year = 1900
+    month = day = 1
+    hour = minute = second = 0
+    tz = -1
+    # Default to -1 to signify that values not known; not critical to have,
+    # though
+    week_of_year = -1
+    week_of_year_start = -1
+    # weekday and julian defaulted to -1 so as to signal need to calculate
+    # values
+    weekday = julian = -1
+    found_dict = found.groupdict()
+    for group_key in found_dict.iterkeys():
+        # Directives not explicitly handled below:
+        #   c, x, X
+        #      handled by making out of other directives
+        #   U, W
+        #      worthless without day of the week
+        if group_key == 'y':
+            year = int(found_dict['y'])
+            # Open Group specification for strptime() states that a %y
+            #value in the range of [00, 68] is in the century 2000, while
+            #[69,99] is in the century 1900
+            if year <= 68:
+                year += 2000
+            else:
+                year += 1900
+        elif group_key == 'Y':
+            year = int(found_dict['Y'])
+        elif group_key == 'm':
+            month = int(found_dict['m'])
+        elif group_key == 'B':
+            month = locale_time.f_month.index(found_dict['B'].lower())
+        elif group_key == 'b':
+            month = locale_time.a_month.index(found_dict['b'].lower())
+        elif group_key == 'd':
+            day = int(found_dict['d'])
+        elif group_key == 'H':
+            hour = int(found_dict['H'])
+        elif group_key == 'I':
+            hour = int(found_dict['I'])
+            ampm = found_dict.get('p', '').lower()
+            # If there was no AM/PM indicator, we'll treat this like AM
+            if ampm in ('', locale_time.am_pm[0]):
+                # We're in AM so the hour is correct unless we're
+                # looking at 12 midnight.
+                # 12 midnight == 12 AM == hour 0
+                if hour == 12:
+                    hour = 0
+            elif ampm == locale_time.am_pm[1]:
+                # We're in PM so we need to add 12 to the hour unless
+                # we're looking at 12 noon.
+                # 12 noon == 12 PM == hour 12
+                if hour != 12:
+                    hour += 12
+        elif group_key == 'M':
+            minute = int(found_dict['M'])
+        elif group_key == 'S':
+            second = int(found_dict['S'])
+        elif group_key == 'A':
+            weekday = locale_time.f_weekday.index(found_dict['A'].lower())
+        elif group_key == 'a':
+            weekday = locale_time.a_weekday.index(found_dict['a'].lower())
+        elif group_key == 'w':
+            weekday = int(found_dict['w'])
+            if weekday == 0:
+                weekday = 6
+            else:
+                weekday -= 1
+        elif group_key == 'j':
+            julian = int(found_dict['j'])
+        elif group_key in ('U', 'W'):
+            week_of_year = int(found_dict[group_key])
+            if group_key == 'U':
+                # U starts week on Sunday
+                week_of_year_start = 6
+            else:
+                # W starts week on Monday
+                week_of_year_start = 0
+        elif group_key == 'Z':
+            # Since -1 is default value only need to worry about setting tz if
+            # it can be something other than -1.
+            found_zone = found_dict['Z'].lower()
+            for value, tz_values in enumerate(locale_time.timezone):
+                if found_zone in tz_values:
+                    # Deal with bad locale setup where timezone names are the
+                    # same and yet time.daylight is true; too ambiguous to
+                    # be able to tell what timezone has daylight savings
+                    if (time.tzname[0] == time.tzname[1] and
+                       time.daylight and found_zone not in ("utc", "gmt")):
+                        break
+                    else:
+                        tz = value
+                        break
+    # If we know the week of the year and what day of that week, we can figure
+    # out the Julian day of the year
+    # Calculations below assume 0 is a Monday
+    if julian == -1 and week_of_year != -1 and weekday != -1:
+        # Calculate how many days in week 0
+        first_weekday = datetime_date(year, 1, 1).weekday()
+        preceeding_days = 7 - first_weekday
+        if preceeding_days == 7:
+            preceeding_days = 0
+        # Adjust for U directive so that calculations are not dependent on
+        # directive used to figure out week of year
+        if weekday == 6 and week_of_year_start == 6:
+            week_of_year -= 1
+        # If a year starts and ends on a Monday but a week is specified to
+        # start on a Sunday we need to up the week to counter-balance the fact
+        # that with %W that first Monday starts week 1 while with %U that is
+        # week 0 and thus shifts everything by a week
+        if weekday == 0 and first_weekday == 0 and week_of_year_start == 6:
+            week_of_year += 1
+        # If in week 0, then just figure out how many days from Jan 1 to day of
+        # week specified, else calculate by multiplying week of year by 7,
+        # adding in days in week 0, and the number of days from Monday to the
+        # day of the week
+        if week_of_year == 0:
+            julian = 1 + weekday - first_weekday
+        else:
+            days_to_week = preceeding_days + (7 * (week_of_year - 1))
+            julian = 1 + days_to_week + weekday
+    # Cannot pre-calculate datetime_date() since can change in Julian
+    #calculation and thus could have different value for the day of the week
+    #calculation
+    if julian == -1:
+        # Need to add 1 to result since first day of the year is 1, not 0.
+        julian = datetime_date(year, month, day).toordinal() - \
+                  datetime_date(year, 1, 1).toordinal() + 1
+    else:  # Assume that if they bothered to include Julian day it will
+           #be accurate
+        datetime_result = datetime_date.fromordinal((julian - 1) + datetime_date(year, 1, 1).toordinal())
+        year = datetime_result.year
+        month = datetime_result.month
+        day = datetime_result.day
+    if weekday == -1:
+        weekday = datetime_date(year, month, day).weekday()
+    return time.struct_time((year, month, day,
+                             hour, minute, second,
+                             weekday, julian, tz))
diff --git a/depot_tools/release/win/python_24/Lib/_threading_local.py b/depot_tools/release/win/python_24/Lib/_threading_local.py
new file mode 100644
index 0000000..c90fde0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/_threading_local.py
@@ -0,0 +1,237 @@
+"""Thread-local objects
+
+(Note that this module provides a Python version of thread
+ threading.local class.  Depending on the version of Python you're
+ using, there may be a faster one available.  You should always import
+ the local class from threading.)
+
+Thread-local objects support the management of thread-local data.
+If you have data that you want to be local to a thread, simply create
+a thread-local object and use its attributes:
+
+  >>> mydata = local()
+  >>> mydata.number = 42
+  >>> mydata.number
+  42
+
+You can also access the local-object's dictionary:
+
+  >>> mydata.__dict__
+  {'number': 42}
+  >>> mydata.__dict__.setdefault('widgets', [])
+  []
+  >>> mydata.widgets
+  []
+
+What's important about thread-local objects is that their data are
+local to a thread. If we access the data in a different thread:
+
+  >>> log = []
+  >>> def f():
+  ...     items = mydata.__dict__.items()
+  ...     items.sort()
+  ...     log.append(items)
+  ...     mydata.number = 11
+  ...     log.append(mydata.number)
+
+  >>> import threading
+  >>> thread = threading.Thread(target=f)
+  >>> thread.start()
+  >>> thread.join()
+  >>> log
+  [[], 11]
+
+we get different data.  Furthermore, changes made in the other thread
+don't affect data seen in this thread:
+
+  >>> mydata.number
+  42
+
+Of course, values you get from a local object, including a __dict__
+attribute, are for whatever thread was current at the time the
+attribute was read.  For that reason, you generally don't want to save
+these values across threads, as they apply only to the thread they
+came from.
+
+You can create custom local objects by subclassing the local class:
+
+  >>> class MyLocal(local):
+  ...     number = 2
+  ...     initialized = False
+  ...     def __init__(self, **kw):
+  ...         if self.initialized:
+  ...             raise SystemError('__init__ called too many times')
+  ...         self.initialized = True
+  ...         self.__dict__.update(kw)
+  ...     def squared(self):
+  ...         return self.number ** 2
+
+This can be useful to support default values, methods and
+initialization.  Note that if you define an __init__ method, it will be
+called each time the local object is used in a separate thread.  This
+is necessary to initialize each thread's dictionary.
+
+Now if we create a local object:
+
+  >>> mydata = MyLocal(color='red')
+
+Now we have a default number:
+
+  >>> mydata.number
+  2
+
+an initial color:
+
+  >>> mydata.color
+  'red'
+  >>> del mydata.color
+
+And a method that operates on the data:
+
+  >>> mydata.squared()
+  4
+
+As before, we can access the data in a separate thread:
+
+  >>> log = []
+  >>> thread = threading.Thread(target=f)
+  >>> thread.start()
+  >>> thread.join()
+  >>> log
+  [[('color', 'red'), ('initialized', True)], 11]
+
+without affecting this thread's data:
+
+  >>> mydata.number
+  2
+  >>> mydata.color
+  Traceback (most recent call last):
+  ...
+  AttributeError: 'MyLocal' object has no attribute 'color'
+
+Note that subclasses can define slots, but they are not thread
+local. They are shared across threads:
+
+  >>> class MyLocal(local):
+  ...     __slots__ = 'number'
+
+  >>> mydata = MyLocal()
+  >>> mydata.number = 42
+  >>> mydata.color = 'red'
+
+So, the separate thread:
+
+  >>> thread = threading.Thread(target=f)
+  >>> thread.start()
+  >>> thread.join()
+
+affects what we see:
+
+  >>> mydata.number
+  11
+
+>>> del mydata
+"""
+
+# Threading import is at end
+
+class _localbase(object):
+    __slots__ = '_local__key', '_local__args', '_local__lock'
+
+    def __new__(cls, *args, **kw):
+        self = object.__new__(cls)
+        key = '_local__key', 'thread.local.' + str(id(self))
+        object.__setattr__(self, '_local__key', key)
+        object.__setattr__(self, '_local__args', (args, kw))
+        object.__setattr__(self, '_local__lock', RLock())
+
+        if args or kw and (cls.__init__ is object.__init__):
+            raise TypeError("Initialization arguments are not supported")
+
+        # We need to create the thread dict in anticipation of
+        # __init__ being called, to make sire we don't cal it
+        # again ourselves.
+        dict = object.__getattribute__(self, '__dict__')
+        currentThread().__dict__[key] = dict
+
+        return self
+
+def _patch(self):
+    key = object.__getattribute__(self, '_local__key')
+    d = currentThread().__dict__.get(key)
+    if d is None:
+        d = {}
+        currentThread().__dict__[key] = d
+        object.__setattr__(self, '__dict__', d)
+
+        # we have a new instance dict, so call out __init__ if we have
+        # one
+        cls = type(self)
+        if cls.__init__ is not object.__init__:
+            args, kw = object.__getattribute__(self, '_local__args')
+            cls.__init__(self, *args, **kw)
+    else:
+        object.__setattr__(self, '__dict__', d)
+
+class local(_localbase):
+
+    def __getattribute__(self, name):
+        lock = object.__getattribute__(self, '_local__lock')
+        lock.acquire()
+        try:
+            _patch(self)
+            return object.__getattribute__(self, name)
+        finally:
+            lock.release()
+
+    def __setattr__(self, name, value):
+        lock = object.__getattribute__(self, '_local__lock')
+        lock.acquire()
+        try:
+            _patch(self)
+            return object.__setattr__(self, name, value)
+        finally:
+            lock.release()
+
+    def __delattr__(self, name):
+        lock = object.__getattribute__(self, '_local__lock')
+        lock.acquire()
+        try:
+            _patch(self)
+            return object.__delattr__(self, name)
+        finally:
+            lock.release()
+
+
+    def __del__():
+        threading_enumerate = enumerate
+        __getattribute__ = object.__getattribute__
+
+        def __del__(self):
+            key = __getattribute__(self, '_local__key')
+
+            try:
+                threads = list(threading_enumerate())
+            except:
+                # if enumerate fails, as it seems to do during
+                # shutdown, we'll skip cleanup under the assumption
+                # that there is nothing to clean up
+                return
+
+            for thread in threads:
+                try:
+                    __dict__ = thread.__dict__
+                except AttributeError:
+                    # Thread is dying, rest in peace
+                    continue
+
+                if key in __dict__:
+                    try:
+                        del __dict__[key]
+                    except KeyError:
+                        pass # didn't have anything in this thread
+
+        return __del__
+    __del__ = __del__()
+
+from threading import currentThread, enumerate, RLock
diff --git a/depot_tools/release/win/python_24/Lib/aifc.py b/depot_tools/release/win/python_24/Lib/aifc.py
new file mode 100644
index 0000000..781d77ce
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/aifc.py
@@ -0,0 +1,959 @@
+"""Stuff to parse AIFF-C and AIFF files.
+
+Unless explicitly stated otherwise, the description below is true
+both for AIFF-C files and AIFF files.
+
+An AIFF-C file has the following structure.
+
+  +-----------------+
+  | FORM            |
+  +-----------------+
+  | <size>          |
+  +----+------------+
+  |    | AIFC       |
+  |    +------------+
+  |    | <chunks>   |
+  |    |    .       |
+  |    |    .       |
+  |    |    .       |
+  +----+------------+
+
+An AIFF file has the string "AIFF" instead of "AIFC".
+
+A chunk consists of an identifier (4 bytes) followed by a size (4 bytes,
+big endian order), followed by the data.  The size field does not include
+the size of the 8 byte header.
+
+The following chunk types are recognized.
+
+  FVER
+      <version number of AIFF-C defining document> (AIFF-C only).
+  MARK
+      <# of markers> (2 bytes)
+      list of markers:
+          <marker ID> (2 bytes, must be > 0)
+          <position> (4 bytes)
+          <marker name> ("pstring")
+  COMM
+      <# of channels> (2 bytes)
+      <# of sound frames> (4 bytes)
+      <size of the samples> (2 bytes)
+      <sampling frequency> (10 bytes, IEEE 80-bit extended
+          floating point)
+      in AIFF-C files only:
+      <compression type> (4 bytes)
+      <human-readable version of compression type> ("pstring")
+  SSND
+      <offset> (4 bytes, not used by this program)
+      <blocksize> (4 bytes, not used by this program)
+      <sound data>
+
+A pstring consists of 1 byte length, a string of characters, and 0 or 1
+byte pad to make the total length even.
+
+Usage.
+
+Reading AIFF files:
+  f = aifc.open(file, 'r')
+where file is either the name of a file or an open file pointer.
+The open file pointer must have methods read(), seek(), and close().
+In some types of audio files, if the setpos() method is not used,
+the seek() method is not necessary.
+
+This returns an instance of a class with the following public methods:
+  getnchannels()  -- returns number of audio channels (1 for
+             mono, 2 for stereo)
+  getsampwidth()  -- returns sample width in bytes
+  getframerate()  -- returns sampling frequency
+  getnframes()    -- returns number of audio frames
+  getcomptype()   -- returns compression type ('NONE' for AIFF files)
+  getcompname()   -- returns human-readable version of
+             compression type ('not compressed' for AIFF files)
+  getparams() -- returns a tuple consisting of all of the
+             above in the above order
+  getmarkers()    -- get the list of marks in the audio file or None
+             if there are no marks
+  getmark(id) -- get mark with the specified id (raises an error
+             if the mark does not exist)
+  readframes(n)   -- returns at most n frames of audio
+  rewind()    -- rewind to the beginning of the audio stream
+  setpos(pos) -- seek to the specified position
+  tell()      -- return the current position
+  close()     -- close the instance (make it unusable)
+The position returned by tell(), the position given to setpos() and
+the position of marks are all compatible and have nothing to do with
+the actual position in the file.
+The close() method is called automatically when the class instance
+is destroyed.
+
+Writing AIFF files:
+  f = aifc.open(file, 'w')
+where file is either the name of a file or an open file pointer.
+The open file pointer must have methods write(), tell(), seek(), and
+close().
+
+This returns an instance of a class with the following public methods:
+  aiff()      -- create an AIFF file (AIFF-C default)
+  aifc()      -- create an AIFF-C file
+  setnchannels(n) -- set the number of channels
+  setsampwidth(n) -- set the sample width
+  setframerate(n) -- set the frame rate
+  setnframes(n)   -- set the number of frames
+  setcomptype(type, name)
+          -- set the compression type and the
+             human-readable compression type
+  setparams(tuple)
+          -- set all parameters at once
+  setmark(id, pos, name)
+          -- add specified mark to the list of marks
+  tell()      -- return current position in output file (useful
+             in combination with setmark())
+  writeframesraw(data)
+          -- write audio frames without pathing up the
+             file header
+  writeframes(data)
+          -- write audio frames and patch up the file header
+  close()     -- patch up the file header and close the
+             output file
+You should set the parameters before the first writeframesraw or
+writeframes.  The total number of frames does not need to be set,
+but when it is set to the correct value, the header does not have to
+be patched up.
+It is best to first set all parameters, perhaps possibly the
+compression type, and then write audio frames using writeframesraw.
+When all frames have been written, either call writeframes('') or
+close() to patch up the sizes in the header.
+Marks can be added anytime.  If there are any marks, ypu must call
+close() after all frames have been written.
+The close() method is called automatically when the class instance
+is destroyed.
+
+When a file is opened with the extension '.aiff', an AIFF file is
+written, otherwise an AIFF-C file is written.  This default can be
+changed by calling aiff() or aifc() before the first writeframes or
+writeframesraw.
+"""
+
+import struct
+import __builtin__
+
+__all__ = ["Error","open","openfp"]
+
+class Error(Exception):
+    pass
+
+_AIFC_version = 0xA2805140L     # Version 1 of AIFF-C
+
+_skiplist = 'COMT', 'INST', 'MIDI', 'AESD', \
+      'APPL', 'NAME', 'AUTH', '(c) ', 'ANNO'
+
+def _read_long(file):
+    try:
+        return struct.unpack('>l', file.read(4))[0]
+    except struct.error:
+        raise EOFError
+
+def _read_ulong(file):
+    try:
+        return struct.unpack('>L', file.read(4))[0]
+    except struct.error:
+        raise EOFError
+
+def _read_short(file):
+    try:
+        return struct.unpack('>h', file.read(2))[0]
+    except struct.error:
+        raise EOFError
+
+def _read_string(file):
+    length = ord(file.read(1))
+    if length == 0:
+        data = ''
+    else:
+        data = file.read(length)
+    if length & 1 == 0:
+        dummy = file.read(1)
+    return data
+
+_HUGE_VAL = 1.79769313486231e+308 # See <limits.h>
+
+def _read_float(f): # 10 bytes
+    expon = _read_short(f) # 2 bytes
+    sign = 1
+    if expon < 0:
+        sign = -1
+        expon = expon + 0x8000
+    himant = _read_ulong(f) # 4 bytes
+    lomant = _read_ulong(f) # 4 bytes
+    if expon == himant == lomant == 0:
+        f = 0.0
+    elif expon == 0x7FFF:
+        f = _HUGE_VAL
+    else:
+        expon = expon - 16383
+        f = (himant * 0x100000000L + lomant) * pow(2.0, expon - 63)
+    return sign * f
+
+def _write_short(f, x):
+    f.write(struct.pack('>h', x))
+
+def _write_long(f, x):
+    f.write(struct.pack('>L', x))
+
+def _write_string(f, s):
+    f.write(chr(len(s)))
+    f.write(s)
+    if len(s) & 1 == 0:
+        f.write(chr(0))
+
+def _write_float(f, x):
+    import math
+    if x < 0:
+        sign = 0x8000
+        x = x * -1
+    else:
+        sign = 0
+    if x == 0:
+        expon = 0
+        himant = 0
+        lomant = 0
+    else:
+        fmant, expon = math.frexp(x)
+        if expon > 16384 or fmant >= 1:     # Infinity or NaN
+            expon = sign|0x7FFF
+            himant = 0
+            lomant = 0
+        else:                   # Finite
+            expon = expon + 16382
+            if expon < 0:           # denormalized
+                fmant = math.ldexp(fmant, expon)
+                expon = 0
+            expon = expon | sign
+            fmant = math.ldexp(fmant, 32)
+            fsmant = math.floor(fmant)
+            himant = long(fsmant)
+            fmant = math.ldexp(fmant - fsmant, 32)
+            fsmant = math.floor(fmant)
+            lomant = long(fsmant)
+    _write_short(f, expon)
+    _write_long(f, himant)
+    _write_long(f, lomant)
+
+from chunk import Chunk
+
+class Aifc_read:
+    # Variables used in this class:
+    #
+    # These variables are available to the user though appropriate
+    # methods of this class:
+    # _file -- the open file with methods read(), close(), and seek()
+    #       set through the __init__() method
+    # _nchannels -- the number of audio channels
+    #       available through the getnchannels() method
+    # _nframes -- the number of audio frames
+    #       available through the getnframes() method
+    # _sampwidth -- the number of bytes per audio sample
+    #       available through the getsampwidth() method
+    # _framerate -- the sampling frequency
+    #       available through the getframerate() method
+    # _comptype -- the AIFF-C compression type ('NONE' if AIFF)
+    #       available through the getcomptype() method
+    # _compname -- the human-readable AIFF-C compression type
+    #       available through the getcomptype() method
+    # _markers -- the marks in the audio file
+    #       available through the getmarkers() and getmark()
+    #       methods
+    # _soundpos -- the position in the audio stream
+    #       available through the tell() method, set through the
+    #       setpos() method
+    #
+    # These variables are used internally only:
+    # _version -- the AIFF-C version number
+    # _decomp -- the decompressor from builtin module cl
+    # _comm_chunk_read -- 1 iff the COMM chunk has been read
+    # _aifc -- 1 iff reading an AIFF-C file
+    # _ssnd_seek_needed -- 1 iff positioned correctly in audio
+    #       file for readframes()
+    # _ssnd_chunk -- instantiation of a chunk class for the SSND chunk
+    # _framesize -- size of one frame in the file
+
+    def initfp(self, file):
+        self._version = 0
+        self._decomp = None
+        self._convert = None
+        self._markers = []
+        self._soundpos = 0
+        self._file = Chunk(file)
+        if self._file.getname() != 'FORM':
+            raise Error, 'file does not start with FORM id'
+        formdata = self._file.read(4)
+        if formdata == 'AIFF':
+            self._aifc = 0
+        elif formdata == 'AIFC':
+            self._aifc = 1
+        else:
+            raise Error, 'not an AIFF or AIFF-C file'
+        self._comm_chunk_read = 0
+        while 1:
+            self._ssnd_seek_needed = 1
+            try:
+                chunk = Chunk(self._file)
+            except EOFError:
+                break
+            chunkname = chunk.getname()
+            if chunkname == 'COMM':
+                self._read_comm_chunk(chunk)
+                self._comm_chunk_read = 1
+            elif chunkname == 'SSND':
+                self._ssnd_chunk = chunk
+                dummy = chunk.read(8)
+                self._ssnd_seek_needed = 0
+            elif chunkname == 'FVER':
+                self._version = _read_ulong(chunk)
+            elif chunkname == 'MARK':
+                self._readmark(chunk)
+            elif chunkname in _skiplist:
+                pass
+            else:
+                raise Error, 'unrecognized chunk type '+chunk.chunkname
+            chunk.skip()
+        if not self._comm_chunk_read or not self._ssnd_chunk:
+            raise Error, 'COMM chunk and/or SSND chunk missing'
+        if self._aifc and self._decomp:
+            import cl
+            params = [cl.ORIGINAL_FORMAT, 0,
+                  cl.BITS_PER_COMPONENT, self._sampwidth * 8,
+                  cl.FRAME_RATE, self._framerate]
+            if self._nchannels == 1:
+                params[1] = cl.MONO
+            elif self._nchannels == 2:
+                params[1] = cl.STEREO_INTERLEAVED
+            else:
+                raise Error, 'cannot compress more than 2 channels'
+            self._decomp.SetParams(params)
+
+    def __init__(self, f):
+        if type(f) == type(''):
+            f = __builtin__.open(f, 'rb')
+        # else, assume it is an open file object already
+        self.initfp(f)
+
+    #
+    # User visible methods.
+    #
+    def getfp(self):
+        return self._file
+
+    def rewind(self):
+        self._ssnd_seek_needed = 1
+        self._soundpos = 0
+
+    def close(self):
+        if self._decomp:
+            self._decomp.CloseDecompressor()
+            self._decomp = None
+        self._file = None
+
+    def tell(self):
+        return self._soundpos
+
+    def getnchannels(self):
+        return self._nchannels
+
+    def getnframes(self):
+        return self._nframes
+
+    def getsampwidth(self):
+        return self._sampwidth
+
+    def getframerate(self):
+        return self._framerate
+
+    def getcomptype(self):
+        return self._comptype
+
+    def getcompname(self):
+        return self._compname
+
+##  def getversion(self):
+##      return self._version
+
+    def getparams(self):
+        return self.getnchannels(), self.getsampwidth(), \
+              self.getframerate(), self.getnframes(), \
+              self.getcomptype(), self.getcompname()
+
+    def getmarkers(self):
+        if len(self._markers) == 0:
+            return None
+        return self._markers
+
+    def getmark(self, id):
+        for marker in self._markers:
+            if id == marker[0]:
+                return marker
+        raise Error, 'marker %r does not exist' % (id,)
+
+    def setpos(self, pos):
+        if pos < 0 or pos > self._nframes:
+            raise Error, 'position not in range'
+        self._soundpos = pos
+        self._ssnd_seek_needed = 1
+
+    def readframes(self, nframes):
+        if self._ssnd_seek_needed:
+            self._ssnd_chunk.seek(0)
+            dummy = self._ssnd_chunk.read(8)
+            pos = self._soundpos * self._framesize
+            if pos:
+                self._ssnd_chunk.seek(pos + 8)
+            self._ssnd_seek_needed = 0
+        if nframes == 0:
+            return ''
+        data = self._ssnd_chunk.read(nframes * self._framesize)
+        if self._convert and data:
+            data = self._convert(data)
+        self._soundpos = self._soundpos + len(data) / (self._nchannels * self._sampwidth)
+        return data
+
+    #
+    # Internal methods.
+    #
+
+    def _decomp_data(self, data):
+        import cl
+        dummy = self._decomp.SetParam(cl.FRAME_BUFFER_SIZE,
+                          len(data) * 2)
+        return self._decomp.Decompress(len(data) / self._nchannels,
+                           data)
+
+    def _ulaw2lin(self, data):
+        import audioop
+        return audioop.ulaw2lin(data, 2)
+
+    def _adpcm2lin(self, data):
+        import audioop
+        if not hasattr(self, '_adpcmstate'):
+            # first time
+            self._adpcmstate = None
+        data, self._adpcmstate = audioop.adpcm2lin(data, 2,
+                               self._adpcmstate)
+        return data
+
+    def _read_comm_chunk(self, chunk):
+        self._nchannels = _read_short(chunk)
+        self._nframes = _read_long(chunk)
+        self._sampwidth = (_read_short(chunk) + 7) / 8
+        self._framerate = int(_read_float(chunk))
+        self._framesize = self._nchannels * self._sampwidth
+        if self._aifc:
+            #DEBUG: SGI's soundeditor produces a bad size :-(
+            kludge = 0
+            if chunk.chunksize == 18:
+                kludge = 1
+                print 'Warning: bad COMM chunk size'
+                chunk.chunksize = 23
+            #DEBUG end
+            self._comptype = chunk.read(4)
+            #DEBUG start
+            if kludge:
+                length = ord(chunk.file.read(1))
+                if length & 1 == 0:
+                    length = length + 1
+                chunk.chunksize = chunk.chunksize + length
+                chunk.file.seek(-1, 1)
+            #DEBUG end
+            self._compname = _read_string(chunk)
+            if self._comptype != 'NONE':
+                if self._comptype == 'G722':
+                    try:
+                        import audioop
+                    except ImportError:
+                        pass
+                    else:
+                        self._convert = self._adpcm2lin
+                        self._framesize = self._framesize / 4
+                        return
+                # for ULAW and ALAW try Compression Library
+                try:
+                    import cl
+                except ImportError:
+                    if self._comptype == 'ULAW':
+                        try:
+                            import audioop
+                            self._convert = self._ulaw2lin
+                            self._framesize = self._framesize / 2
+                            return
+                        except ImportError:
+                            pass
+                    raise Error, 'cannot read compressed AIFF-C files'
+                if self._comptype == 'ULAW':
+                    scheme = cl.G711_ULAW
+                    self._framesize = self._framesize / 2
+                elif self._comptype == 'ALAW':
+                    scheme = cl.G711_ALAW
+                    self._framesize = self._framesize / 2
+                else:
+                    raise Error, 'unsupported compression type'
+                self._decomp = cl.OpenDecompressor(scheme)
+                self._convert = self._decomp_data
+        else:
+            self._comptype = 'NONE'
+            self._compname = 'not compressed'
+
+    def _readmark(self, chunk):
+        nmarkers = _read_short(chunk)
+        # Some files appear to contain invalid counts.
+        # Cope with this by testing for EOF.
+        try:
+            for i in range(nmarkers):
+                id = _read_short(chunk)
+                pos = _read_long(chunk)
+                name = _read_string(chunk)
+                if pos or name:
+                    # some files appear to have
+                    # dummy markers consisting of
+                    # a position 0 and name ''
+                    self._markers.append((id, pos, name))
+        except EOFError:
+            print 'Warning: MARK chunk contains only',
+            print len(self._markers),
+            if len(self._markers) == 1: print 'marker',
+            else: print 'markers',
+            print 'instead of', nmarkers
+
+class Aifc_write:
+    # Variables used in this class:
+    #
+    # These variables are user settable through appropriate methods
+    # of this class:
+    # _file -- the open file with methods write(), close(), tell(), seek()
+    #       set through the __init__() method
+    # _comptype -- the AIFF-C compression type ('NONE' in AIFF)
+    #       set through the setcomptype() or setparams() method
+    # _compname -- the human-readable AIFF-C compression type
+    #       set through the setcomptype() or setparams() method
+    # _nchannels -- the number of audio channels
+    #       set through the setnchannels() or setparams() method
+    # _sampwidth -- the number of bytes per audio sample
+    #       set through the setsampwidth() or setparams() method
+    # _framerate -- the sampling frequency
+    #       set through the setframerate() or setparams() method
+    # _nframes -- the number of audio frames written to the header
+    #       set through the setnframes() or setparams() method
+    # _aifc -- whether we're writing an AIFF-C file or an AIFF file
+    #       set through the aifc() method, reset through the
+    #       aiff() method
+    #
+    # These variables are used internally only:
+    # _version -- the AIFF-C version number
+    # _comp -- the compressor from builtin module cl
+    # _nframeswritten -- the number of audio frames actually written
+    # _datalength -- the size of the audio samples written to the header
+    # _datawritten -- the size of the audio samples actually written
+
+    def __init__(self, f):
+        if type(f) == type(''):
+            filename = f
+            f = __builtin__.open(f, 'wb')
+        else:
+            # else, assume it is an open file object already
+            filename = '???'
+        self.initfp(f)
+        if filename[-5:] == '.aiff':
+            self._aifc = 0
+        else:
+            self._aifc = 1
+
+    def initfp(self, file):
+        self._file = file
+        self._version = _AIFC_version
+        self._comptype = 'NONE'
+        self._compname = 'not compressed'
+        self._comp = None
+        self._convert = None
+        self._nchannels = 0
+        self._sampwidth = 0
+        self._framerate = 0
+        self._nframes = 0
+        self._nframeswritten = 0
+        self._datawritten = 0
+        self._datalength = 0
+        self._markers = []
+        self._marklength = 0
+        self._aifc = 1      # AIFF-C is default
+
+    def __del__(self):
+        if self._file:
+            self.close()
+
+    #
+    # User visible methods.
+    #
+    def aiff(self):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        self._aifc = 0
+
+    def aifc(self):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        self._aifc = 1
+
+    def setnchannels(self, nchannels):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if nchannels < 1:
+            raise Error, 'bad # of channels'
+        self._nchannels = nchannels
+
+    def getnchannels(self):
+        if not self._nchannels:
+            raise Error, 'number of channels not set'
+        return self._nchannels
+
+    def setsampwidth(self, sampwidth):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if sampwidth < 1 or sampwidth > 4:
+            raise Error, 'bad sample width'
+        self._sampwidth = sampwidth
+
+    def getsampwidth(self):
+        if not self._sampwidth:
+            raise Error, 'sample width not set'
+        return self._sampwidth
+
+    def setframerate(self, framerate):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if framerate <= 0:
+            raise Error, 'bad frame rate'
+        self._framerate = framerate
+
+    def getframerate(self):
+        if not self._framerate:
+            raise Error, 'frame rate not set'
+        return self._framerate
+
+    def setnframes(self, nframes):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        self._nframes = nframes
+
+    def getnframes(self):
+        return self._nframeswritten
+
+    def setcomptype(self, comptype, compname):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if comptype not in ('NONE', 'ULAW', 'ALAW', 'G722'):
+            raise Error, 'unsupported compression type'
+        self._comptype = comptype
+        self._compname = compname
+
+    def getcomptype(self):
+        return self._comptype
+
+    def getcompname(self):
+        return self._compname
+
+##  def setversion(self, version):
+##      if self._nframeswritten:
+##          raise Error, 'cannot change parameters after starting to write'
+##      self._version = version
+
+    def setparams(self, (nchannels, sampwidth, framerate, nframes, comptype, compname)):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if comptype not in ('NONE', 'ULAW', 'ALAW', 'G722'):
+            raise Error, 'unsupported compression type'
+        self.setnchannels(nchannels)
+        self.setsampwidth(sampwidth)
+        self.setframerate(framerate)
+        self.setnframes(nframes)
+        self.setcomptype(comptype, compname)
+
+    def getparams(self):
+        if not self._nchannels or not self._sampwidth or not self._framerate:
+            raise Error, 'not all parameters set'
+        return self._nchannels, self._sampwidth, self._framerate, \
+              self._nframes, self._comptype, self._compname
+
+    def setmark(self, id, pos, name):
+        if id <= 0:
+            raise Error, 'marker ID must be > 0'
+        if pos < 0:
+            raise Error, 'marker position must be >= 0'
+        if type(name) != type(''):
+            raise Error, 'marker name must be a string'
+        for i in range(len(self._markers)):
+            if id == self._markers[i][0]:
+                self._markers[i] = id, pos, name
+                return
+        self._markers.append((id, pos, name))
+
+    def getmark(self, id):
+        for marker in self._markers:
+            if id == marker[0]:
+                return marker
+        raise Error, 'marker %r does not exist' % (id,)
+
+    def getmarkers(self):
+        if len(self._markers) == 0:
+            return None
+        return self._markers
+
+    def tell(self):
+        return self._nframeswritten
+
+    def writeframesraw(self, data):
+        self._ensure_header_written(len(data))
+        nframes = len(data) / (self._sampwidth * self._nchannels)
+        if self._convert:
+            data = self._convert(data)
+        self._file.write(data)
+        self._nframeswritten = self._nframeswritten + nframes
+        self._datawritten = self._datawritten + len(data)
+
+    def writeframes(self, data):
+        self.writeframesraw(data)
+        if self._nframeswritten != self._nframes or \
+              self._datalength != self._datawritten:
+            self._patchheader()
+
+    def close(self):
+        self._ensure_header_written(0)
+        if self._datawritten & 1:
+            # quick pad to even size
+            self._file.write(chr(0))
+            self._datawritten = self._datawritten + 1
+        self._writemarkers()
+        if self._nframeswritten != self._nframes or \
+              self._datalength != self._datawritten or \
+              self._marklength:
+            self._patchheader()
+        if self._comp:
+            self._comp.CloseCompressor()
+            self._comp = None
+        self._file.flush()
+        self._file = None
+
+    #
+    # Internal methods.
+    #
+
+    def _comp_data(self, data):
+        import cl
+        dummy = self._comp.SetParam(cl.FRAME_BUFFER_SIZE, len(data))
+        dummy = self._comp.SetParam(cl.COMPRESSED_BUFFER_SIZE, len(data))
+        return self._comp.Compress(self._nframes, data)
+
+    def _lin2ulaw(self, data):
+        import audioop
+        return audioop.lin2ulaw(data, 2)
+
+    def _lin2adpcm(self, data):
+        import audioop
+        if not hasattr(self, '_adpcmstate'):
+            self._adpcmstate = None
+        data, self._adpcmstate = audioop.lin2adpcm(data, 2,
+                               self._adpcmstate)
+        return data
+
+    def _ensure_header_written(self, datasize):
+        if not self._nframeswritten:
+            if self._comptype in ('ULAW', 'ALAW'):
+                if not self._sampwidth:
+                    self._sampwidth = 2
+                if self._sampwidth != 2:
+                    raise Error, 'sample width must be 2 when compressing with ULAW or ALAW'
+            if self._comptype == 'G722':
+                if not self._sampwidth:
+                    self._sampwidth = 2
+                if self._sampwidth != 2:
+                    raise Error, 'sample width must be 2 when compressing with G7.22 (ADPCM)'
+            if not self._nchannels:
+                raise Error, '# channels not specified'
+            if not self._sampwidth:
+                raise Error, 'sample width not specified'
+            if not self._framerate:
+                raise Error, 'sampling rate not specified'
+            self._write_header(datasize)
+
+    def _init_compression(self):
+        if self._comptype == 'G722':
+            self._convert = self._lin2adpcm
+            return
+        try:
+            import cl
+        except ImportError:
+            if self._comptype == 'ULAW':
+                try:
+                    import audioop
+                    self._convert = self._lin2ulaw
+                    return
+                except ImportError:
+                    pass
+            raise Error, 'cannot write compressed AIFF-C files'
+        if self._comptype == 'ULAW':
+            scheme = cl.G711_ULAW
+        elif self._comptype == 'ALAW':
+            scheme = cl.G711_ALAW
+        else:
+            raise Error, 'unsupported compression type'
+        self._comp = cl.OpenCompressor(scheme)
+        params = [cl.ORIGINAL_FORMAT, 0,
+              cl.BITS_PER_COMPONENT, self._sampwidth * 8,
+              cl.FRAME_RATE, self._framerate,
+              cl.FRAME_BUFFER_SIZE, 100,
+              cl.COMPRESSED_BUFFER_SIZE, 100]
+        if self._nchannels == 1:
+            params[1] = cl.MONO
+        elif self._nchannels == 2:
+            params[1] = cl.STEREO_INTERLEAVED
+        else:
+            raise Error, 'cannot compress more than 2 channels'
+        self._comp.SetParams(params)
+        # the compressor produces a header which we ignore
+        dummy = self._comp.Compress(0, '')
+        self._convert = self._comp_data
+
+    def _write_header(self, initlength):
+        if self._aifc and self._comptype != 'NONE':
+            self._init_compression()
+        self._file.write('FORM')
+        if not self._nframes:
+            self._nframes = initlength / (self._nchannels * self._sampwidth)
+        self._datalength = self._nframes * self._nchannels * self._sampwidth
+        if self._datalength & 1:
+            self._datalength = self._datalength + 1
+        if self._aifc:
+            if self._comptype in ('ULAW', 'ALAW'):
+                self._datalength = self._datalength / 2
+                if self._datalength & 1:
+                    self._datalength = self._datalength + 1
+            elif self._comptype == 'G722':
+                self._datalength = (self._datalength + 3) / 4
+                if self._datalength & 1:
+                    self._datalength = self._datalength + 1
+        self._form_length_pos = self._file.tell()
+        commlength = self._write_form_length(self._datalength)
+        if self._aifc:
+            self._file.write('AIFC')
+            self._file.write('FVER')
+            _write_long(self._file, 4)
+            _write_long(self._file, self._version)
+        else:
+            self._file.write('AIFF')
+        self._file.write('COMM')
+        _write_long(self._file, commlength)
+        _write_short(self._file, self._nchannels)
+        self._nframes_pos = self._file.tell()
+        _write_long(self._file, self._nframes)
+        _write_short(self._file, self._sampwidth * 8)
+        _write_float(self._file, self._framerate)
+        if self._aifc:
+            self._file.write(self._comptype)
+            _write_string(self._file, self._compname)
+        self._file.write('SSND')
+        self._ssnd_length_pos = self._file.tell()
+        _write_long(self._file, self._datalength + 8)
+        _write_long(self._file, 0)
+        _write_long(self._file, 0)
+
+    def _write_form_length(self, datalength):
+        if self._aifc:
+            commlength = 18 + 5 + len(self._compname)
+            if commlength & 1:
+                commlength = commlength + 1
+            verslength = 12
+        else:
+            commlength = 18
+            verslength = 0
+        _write_long(self._file, 4 + verslength + self._marklength + \
+                    8 + commlength + 16 + datalength)
+        return commlength
+
+    def _patchheader(self):
+        curpos = self._file.tell()
+        if self._datawritten & 1:
+            datalength = self._datawritten + 1
+            self._file.write(chr(0))
+        else:
+            datalength = self._datawritten
+        if datalength == self._datalength and \
+              self._nframes == self._nframeswritten and \
+              self._marklength == 0:
+            self._file.seek(curpos, 0)
+            return
+        self._file.seek(self._form_length_pos, 0)
+        dummy = self._write_form_length(datalength)
+        self._file.seek(self._nframes_pos, 0)
+        _write_long(self._file, self._nframeswritten)
+        self._file.seek(self._ssnd_length_pos, 0)
+        _write_long(self._file, datalength + 8)
+        self._file.seek(curpos, 0)
+        self._nframes = self._nframeswritten
+        self._datalength = datalength
+
+    def _writemarkers(self):
+        if len(self._markers) == 0:
+            return
+        self._file.write('MARK')
+        length = 2
+        for marker in self._markers:
+            id, pos, name = marker
+            length = length + len(name) + 1 + 6
+            if len(name) & 1 == 0:
+                length = length + 1
+        _write_long(self._file, length)
+        self._marklength = length + 8
+        _write_short(self._file, len(self._markers))
+        for marker in self._markers:
+            id, pos, name = marker
+            _write_short(self._file, id)
+            _write_long(self._file, pos)
+            _write_string(self._file, name)
+
+def open(f, mode=None):
+    if mode is None:
+        if hasattr(f, 'mode'):
+            mode = f.mode
+        else:
+            mode = 'rb'
+    if mode in ('r', 'rb'):
+        return Aifc_read(f)
+    elif mode in ('w', 'wb'):
+        return Aifc_write(f)
+    else:
+        raise Error, "mode must be 'r', 'rb', 'w', or 'wb'"
+
+openfp = open # B/W compatibility
+
+if __name__ == '__main__':
+    import sys
+    if not sys.argv[1:]:
+        sys.argv.append('/usr/demos/data/audio/bach.aiff')
+    fn = sys.argv[1]
+    f = open(fn, 'r')
+    print "Reading", fn
+    print "nchannels =", f.getnchannels()
+    print "nframes   =", f.getnframes()
+    print "sampwidth =", f.getsampwidth()
+    print "framerate =", f.getframerate()
+    print "comptype  =", f.getcomptype()
+    print "compname  =", f.getcompname()
+    if sys.argv[2:]:
+        gn = sys.argv[2]
+        print "Writing", gn
+        g = open(gn, 'w')
+        g.setparams(f.getparams())
+        while 1:
+            data = f.readframes(1024)
+            if not data:
+                break
+            g.writeframes(data)
+        g.close()
+        f.close()
+        print "Done."
diff --git a/depot_tools/release/win/python_24/Lib/anydbm.py b/depot_tools/release/win/python_24/Lib/anydbm.py
new file mode 100644
index 0000000..8b01ef3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/anydbm.py
@@ -0,0 +1,83 @@
+"""Generic interface to all dbm clones.
+
+Instead of
+
+        import dbm
+        d = dbm.open(file, 'w', 0666)
+
+use
+
+        import anydbm
+        d = anydbm.open(file, 'w')
+
+The returned object is a dbhash, gdbm, dbm or dumbdbm object,
+dependent on the type of database being opened (determined by whichdb
+module) in the case of an existing dbm. If the dbm does not exist and
+the create or new flag ('c' or 'n') was specified, the dbm type will
+be determined by the availability of the modules (tested in the above
+order).
+
+It has the following interface (key and data are strings):
+
+        d[key] = data   # store data at key (may override data at
+                        # existing key)
+        data = d[key]   # retrieve data at key (raise KeyError if no
+                        # such key)
+        del d[key]      # delete data stored at key (raises KeyError
+                        # if no such key)
+        flag = key in d   # true if the key exists
+        list = d.keys() # return a list of all existing keys (slow!)
+
+Future versions may change the order in which implementations are
+tested for existence, add interfaces to other dbm-like
+implementations.
+
+The open function has an optional second argument.  This can be 'r',
+for read-only access, 'w', for read-write access of an existing
+database, 'c' for read-write access to a new or existing database, and
+'n' for read-write access to a new database.  The default is 'r'.
+
+Note: 'r' and 'w' fail if the database doesn't exist; 'c' creates it
+only if it doesn't exist; and 'n' always creates a new database.
+
+"""
+
+class error(Exception):
+    pass
+
+_names = ['dbhash', 'gdbm', 'dbm', 'dumbdbm']
+_errors = [error]
+_defaultmod = None
+
+for _name in _names:
+    try:
+        _mod = __import__(_name)
+    except ImportError:
+        continue
+    if not _defaultmod:
+        _defaultmod = _mod
+    _errors.append(_mod.error)
+
+if not _defaultmod:
+    raise ImportError, "no dbm clone found; tried %s" % _names
+
+error = tuple(_errors)
+
+def open(file, flag = 'r', mode = 0666):
+    # guess the type of an existing database
+    from whichdb import whichdb
+    result=whichdb(file)
+    if result is None:
+        # db doesn't exist
+        if 'c' in flag or 'n' in flag:
+            # file doesn't exist and the new
+            # flag was used so use default type
+            mod = _defaultmod
+        else:
+            raise error, "need 'c' or 'n' flag to open new db"
+    elif result == "":
+        # db type cannot be determined
+        raise error, "db type could not be determined"
+    else:
+        mod = __import__(result)
+    return mod.open(file, flag, mode)
diff --git a/depot_tools/release/win/python_24/Lib/asynchat.py b/depot_tools/release/win/python_24/Lib/asynchat.py
new file mode 100644
index 0000000..28b89a2b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/asynchat.py
@@ -0,0 +1,295 @@
+# -*- Mode: Python; tab-width: 4 -*-
+#       Id: asynchat.py,v 2.26 2000/09/07 22:29:26 rushing Exp
+#       Author: Sam Rushing <rushing@nightmare.com>
+
+# ======================================================================
+# Copyright 1996 by Sam Rushing
+#
+#                         All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Sam
+# Rushing not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+# ======================================================================
+
+r"""A class supporting chat-style (command/response) protocols.
+
+This class adds support for 'chat' style protocols - where one side
+sends a 'command', and the other sends a response (examples would be
+the common internet protocols - smtp, nntp, ftp, etc..).
+
+The handle_read() method looks at the input stream for the current
+'terminator' (usually '\r\n' for single-line responses, '\r\n.\r\n'
+for multi-line output), calling self.found_terminator() on its
+receipt.
+
+for example:
+Say you build an async nntp client using this class.  At the start
+of the connection, you'll have self.terminator set to '\r\n', in
+order to process the single-line greeting.  Just before issuing a
+'LIST' command you'll set it to '\r\n.\r\n'.  The output of the LIST
+command will be accumulated (using your own 'collect_incoming_data'
+method) up to the terminator, and then control will be returned to
+you - by calling your self.found_terminator() method.
+"""
+
+import socket
+import asyncore
+from collections import deque
+
+class async_chat (asyncore.dispatcher):
+    """This is an abstract class.  You must derive from this class, and add
+    the two methods collect_incoming_data() and found_terminator()"""
+
+    # these are overridable defaults
+
+    ac_in_buffer_size       = 4096
+    ac_out_buffer_size      = 4096
+
+    def __init__ (self, conn=None):
+        self.ac_in_buffer = ''
+        self.ac_out_buffer = ''
+        self.producer_fifo = fifo()
+        asyncore.dispatcher.__init__ (self, conn)
+
+    def collect_incoming_data(self, data):
+        raise NotImplementedError, "must be implemented in subclass"
+
+    def found_terminator(self):
+        raise NotImplementedError, "must be implemented in subclass"
+
+    def set_terminator (self, term):
+        "Set the input delimiter.  Can be a fixed string of any length, an integer, or None"
+        self.terminator = term
+
+    def get_terminator (self):
+        return self.terminator
+
+    # grab some more data from the socket,
+    # throw it to the collector method,
+    # check for the terminator,
+    # if found, transition to the next state.
+
+    def handle_read (self):
+
+        try:
+            data = self.recv (self.ac_in_buffer_size)
+        except socket.error, why:
+            self.handle_error()
+            return
+
+        self.ac_in_buffer = self.ac_in_buffer + data
+
+        # Continue to search for self.terminator in self.ac_in_buffer,
+        # while calling self.collect_incoming_data.  The while loop
+        # is necessary because we might read several data+terminator
+        # combos with a single recv(1024).
+
+        while self.ac_in_buffer:
+            lb = len(self.ac_in_buffer)
+            terminator = self.get_terminator()
+            if terminator is None or terminator == '':
+                # no terminator, collect it all
+                self.collect_incoming_data (self.ac_in_buffer)
+                self.ac_in_buffer = ''
+            elif isinstance(terminator, int):
+                # numeric terminator
+                n = terminator
+                if lb < n:
+                    self.collect_incoming_data (self.ac_in_buffer)
+                    self.ac_in_buffer = ''
+                    self.terminator = self.terminator - lb
+                else:
+                    self.collect_incoming_data (self.ac_in_buffer[:n])
+                    self.ac_in_buffer = self.ac_in_buffer[n:]
+                    self.terminator = 0
+                    self.found_terminator()
+            else:
+                # 3 cases:
+                # 1) end of buffer matches terminator exactly:
+                #    collect data, transition
+                # 2) end of buffer matches some prefix:
+                #    collect data to the prefix
+                # 3) end of buffer does not match any prefix:
+                #    collect data
+                terminator_len = len(terminator)
+                index = self.ac_in_buffer.find(terminator)
+                if index != -1:
+                    # we found the terminator
+                    if index > 0:
+                        # don't bother reporting the empty string (source of subtle bugs)
+                        self.collect_incoming_data (self.ac_in_buffer[:index])
+                    self.ac_in_buffer = self.ac_in_buffer[index+terminator_len:]
+                    # This does the Right Thing if the terminator is changed here.
+                    self.found_terminator()
+                else:
+                    # check for a prefix of the terminator
+                    index = find_prefix_at_end (self.ac_in_buffer, terminator)
+                    if index:
+                        if index != lb:
+                            # we found a prefix, collect up to the prefix
+                            self.collect_incoming_data (self.ac_in_buffer[:-index])
+                            self.ac_in_buffer = self.ac_in_buffer[-index:]
+                        break
+                    else:
+                        # no prefix, collect it all
+                        self.collect_incoming_data (self.ac_in_buffer)
+                        self.ac_in_buffer = ''
+
+    def handle_write (self):
+        self.initiate_send ()
+
+    def handle_close (self):
+        self.close()
+
+    def push (self, data):
+        self.producer_fifo.push (simple_producer (data))
+        self.initiate_send()
+
+    def push_with_producer (self, producer):
+        self.producer_fifo.push (producer)
+        self.initiate_send()
+
+    def readable (self):
+        "predicate for inclusion in the readable for select()"
+        return (len(self.ac_in_buffer) <= self.ac_in_buffer_size)
+
+    def writable (self):
+        "predicate for inclusion in the writable for select()"
+        # return len(self.ac_out_buffer) or len(self.producer_fifo) or (not self.connected)
+        # this is about twice as fast, though not as clear.
+        return not (
+                (self.ac_out_buffer == '') and
+                self.producer_fifo.is_empty() and
+                self.connected
+                )
+
+    def close_when_done (self):
+        "automatically close this channel once the outgoing queue is empty"
+        self.producer_fifo.push (None)
+
+    # refill the outgoing buffer by calling the more() method
+    # of the first producer in the queue
+    def refill_buffer (self):
+        while 1:
+            if len(self.producer_fifo):
+                p = self.producer_fifo.first()
+                # a 'None' in the producer fifo is a sentinel,
+                # telling us to close the channel.
+                if p is None:
+                    if not self.ac_out_buffer:
+                        self.producer_fifo.pop()
+                        self.close()
+                    return
+                elif isinstance(p, str):
+                    self.producer_fifo.pop()
+                    self.ac_out_buffer = self.ac_out_buffer + p
+                    return
+                data = p.more()
+                if data:
+                    self.ac_out_buffer = self.ac_out_buffer + data
+                    return
+                else:
+                    self.producer_fifo.pop()
+            else:
+                return
+
+    def initiate_send (self):
+        obs = self.ac_out_buffer_size
+        # try to refill the buffer
+        if (len (self.ac_out_buffer) < obs):
+            self.refill_buffer()
+
+        if self.ac_out_buffer and self.connected:
+            # try to send the buffer
+            try:
+                num_sent = self.send (self.ac_out_buffer[:obs])
+                if num_sent:
+                    self.ac_out_buffer = self.ac_out_buffer[num_sent:]
+
+            except socket.error, why:
+                self.handle_error()
+                return
+
+    def discard_buffers (self):
+        # Emergencies only!
+        self.ac_in_buffer = ''
+        self.ac_out_buffer = ''
+        while self.producer_fifo:
+            self.producer_fifo.pop()
+
+
+class simple_producer:
+
+    def __init__ (self, data, buffer_size=512):
+        self.data = data
+        self.buffer_size = buffer_size
+
+    def more (self):
+        if len (self.data) > self.buffer_size:
+            result = self.data[:self.buffer_size]
+            self.data = self.data[self.buffer_size:]
+            return result
+        else:
+            result = self.data
+            self.data = ''
+            return result
+
+class fifo:
+    def __init__ (self, list=None):
+        if not list:
+            self.list = deque()
+        else:
+            self.list = deque(list)
+
+    def __len__ (self):
+        return len(self.list)
+
+    def is_empty (self):
+        return not self.list
+
+    def first (self):
+        return self.list[0]
+
+    def push (self, data):
+        self.list.append(data)
+
+    def pop (self):
+        if self.list:
+            return (1, self.list.popleft())
+        else:
+            return (0, None)
+
+# Given 'haystack', see if any prefix of 'needle' is at its end.  This
+# assumes an exact match has already been checked.  Return the number of
+# characters matched.
+# for example:
+# f_p_a_e ("qwerty\r", "\r\n") => 1
+# f_p_a_e ("qwertydkjf", "\r\n") => 0
+# f_p_a_e ("qwerty\r\n", "\r\n") => <undefined>
+
+# this could maybe be made faster with a computed regex?
+# [answer: no; circa Python-2.0, Jan 2001]
+# new python:   28961/s
+# old python:   18307/s
+# re:        12820/s
+# regex:     14035/s
+
+def find_prefix_at_end (haystack, needle):
+    l = len(needle) - 1
+    while l and not haystack.endswith(needle[:l]):
+        l -= 1
+    return l
diff --git a/depot_tools/release/win/python_24/Lib/asyncore.py b/depot_tools/release/win/python_24/Lib/asyncore.py
new file mode 100644
index 0000000..a2387a1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/asyncore.py
@@ -0,0 +1,552 @@
+# -*- Mode: Python -*-
+#   Id: asyncore.py,v 2.51 2000/09/07 22:29:26 rushing Exp
+#   Author: Sam Rushing <rushing@nightmare.com>
+
+# ======================================================================
+# Copyright 1996 by Sam Rushing
+#
+#                         All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software and
+# its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of Sam
+# Rushing not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
+# NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+# ======================================================================
+
+"""Basic infrastructure for asynchronous socket service clients and servers.
+
+There are only two ways to have a program on a single processor do "more
+than one thing at a time".  Multi-threaded programming is the simplest and
+most popular way to do it, but there is another very different technique,
+that lets you have nearly all the advantages of multi-threading, without
+actually using multiple threads. it's really only practical if your program
+is largely I/O bound. If your program is CPU bound, then pre-emptive
+scheduled threads are probably what you really need. Network servers are
+rarely CPU-bound, however.
+
+If your operating system supports the select() system call in its I/O
+library (and nearly all do), then you can use it to juggle multiple
+communication channels at once; doing other work while your I/O is taking
+place in the "background."  Although this strategy can seem strange and
+complex, especially at first, it is in many ways easier to understand and
+control than multi-threaded programming. The module documented here solves
+many of the difficult problems for you, making the task of building
+sophisticated high-performance network servers and clients a snap.
+"""
+
+import exceptions
+import select
+import socket
+import sys
+import time
+
+import os
+from errno import EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, \
+     ENOTCONN, ESHUTDOWN, EINTR, EISCONN, errorcode
+
+try:
+    socket_map
+except NameError:
+    socket_map = {}
+
+class ExitNow(exceptions.Exception):
+    pass
+
+def read(obj):
+    try:
+        obj.handle_read_event()
+    except ExitNow:
+        raise
+    except:
+        obj.handle_error()
+
+def write(obj):
+    try:
+        obj.handle_write_event()
+    except ExitNow:
+        raise
+    except:
+        obj.handle_error()
+
+def _exception (obj):
+    try:
+        obj.handle_expt_event()
+    except ExitNow:
+        raise
+    except:
+        obj.handle_error()
+
+def readwrite(obj, flags):
+    try:
+        if flags & (select.POLLIN | select.POLLPRI):
+            obj.handle_read_event()
+        if flags & select.POLLOUT:
+            obj.handle_write_event()
+        if flags & (select.POLLERR | select.POLLHUP | select.POLLNVAL):
+            obj.handle_expt_event()
+    except ExitNow:
+        raise
+    except:
+        obj.handle_error()
+
+def poll(timeout=0.0, map=None):
+    if map is None:
+        map = socket_map
+    if map:
+        r = []; w = []; e = []
+        for fd, obj in map.items():
+            is_r = obj.readable()
+            is_w = obj.writable()
+            if is_r:
+                r.append(fd)
+            if is_w:
+                w.append(fd)
+            if is_r or is_w:
+                e.append(fd)
+        if [] == r == w == e:
+            time.sleep(timeout)
+        else:
+            try:
+                r, w, e = select.select(r, w, e, timeout)
+            except select.error, err:
+                if err[0] != EINTR:
+                    raise
+                else:
+                    return
+
+        for fd in r:
+            obj = map.get(fd)
+            if obj is None:
+                continue
+            read(obj)
+
+        for fd in w:
+            obj = map.get(fd)
+            if obj is None:
+                continue
+            write(obj)
+
+        for fd in e:
+            obj = map.get(fd)
+            if obj is None:
+                continue
+            _exception(obj)
+
+def poll2(timeout=0.0, map=None):
+    # Use the poll() support added to the select module in Python 2.0
+    if map is None:
+        map = socket_map
+    if timeout is not None:
+        # timeout is in milliseconds
+        timeout = int(timeout*1000)
+    pollster = select.poll()
+    if map:
+        for fd, obj in map.items():
+            flags = 0
+            if obj.readable():
+                flags |= select.POLLIN | select.POLLPRI
+            if obj.writable():
+                flags |= select.POLLOUT
+            if flags:
+                # Only check for exceptions if object was either readable
+                # or writable.
+                flags |= select.POLLERR | select.POLLHUP | select.POLLNVAL
+                pollster.register(fd, flags)
+        try:
+            r = pollster.poll(timeout)
+        except select.error, err:
+            if err[0] != EINTR:
+                raise
+            r = []
+        for fd, flags in r:
+            obj = map.get(fd)
+            if obj is None:
+                continue
+            readwrite(obj, flags)
+
+poll3 = poll2                           # Alias for backward compatibility
+
+def loop(timeout=30.0, use_poll=False, map=None, count=None):
+    if map is None:
+        map = socket_map
+
+    if use_poll and hasattr(select, 'poll'):
+        poll_fun = poll2
+    else:
+        poll_fun = poll
+
+    if count is None:
+        while map:
+            poll_fun(timeout, map)
+
+    else:
+        while map and count > 0:
+            poll_fun(timeout, map)
+            count = count - 1
+
+class dispatcher:
+
+    debug = False
+    connected = False
+    accepting = False
+    closing = False
+    addr = None
+
+    def __init__(self, sock=None, map=None):
+        if map is None:
+            self._map = socket_map
+        else:
+            self._map = map
+
+        if sock:
+            self.set_socket(sock, map)
+            # I think it should inherit this anyway
+            self.socket.setblocking(0)
+            self.connected = True
+            # XXX Does the constructor require that the socket passed
+            # be connected?
+            try:
+                self.addr = sock.getpeername()
+            except socket.error:
+                # The addr isn't crucial
+                pass
+        else:
+            self.socket = None
+
+    def __repr__(self):
+        status = [self.__class__.__module__+"."+self.__class__.__name__]
+        if self.accepting and self.addr:
+            status.append('listening')
+        elif self.connected:
+            status.append('connected')
+        if self.addr is not None:
+            try:
+                status.append('%s:%d' % self.addr)
+            except TypeError:
+                status.append(repr(self.addr))
+        return '<%s at %#x>' % (' '.join(status), id(self))
+
+    def add_channel(self, map=None):
+        #self.log_info('adding channel %s' % self)
+        if map is None:
+            map = self._map
+        map[self._fileno] = self
+
+    def del_channel(self, map=None):
+        fd = self._fileno
+        if map is None:
+            map = self._map
+        if map.has_key(fd):
+            #self.log_info('closing channel %d:%s' % (fd, self))
+            del map[fd]
+        self._fileno = None
+
+    def create_socket(self, family, type):
+        self.family_and_type = family, type
+        self.socket = socket.socket(family, type)
+        self.socket.setblocking(0)
+        self._fileno = self.socket.fileno()
+        self.add_channel()
+
+    def set_socket(self, sock, map=None):
+        self.socket = sock
+##        self.__dict__['socket'] = sock
+        self._fileno = sock.fileno()
+        self.add_channel(map)
+
+    def set_reuse_addr(self):
+        # try to re-use a server port if possible
+        try:
+            self.socket.setsockopt(
+                socket.SOL_SOCKET, socket.SO_REUSEADDR,
+                self.socket.getsockopt(socket.SOL_SOCKET,
+                                       socket.SO_REUSEADDR) | 1
+                )
+        except socket.error:
+            pass
+
+    # ==================================================
+    # predicates for select()
+    # these are used as filters for the lists of sockets
+    # to pass to select().
+    # ==================================================
+
+    def readable(self):
+        return True
+
+    def writable(self):
+        return True
+
+    # ==================================================
+    # socket object methods.
+    # ==================================================
+
+    def listen(self, num):
+        self.accepting = True
+        if os.name == 'nt' and num > 5:
+            num = 1
+        return self.socket.listen(num)
+
+    def bind(self, addr):
+        self.addr = addr
+        return self.socket.bind(addr)
+
+    def connect(self, address):
+        self.connected = False
+        err = self.socket.connect_ex(address)
+        # XXX Should interpret Winsock return values
+        if err in (EINPROGRESS, EALREADY, EWOULDBLOCK):
+            return
+        if err in (0, EISCONN):
+            self.addr = address
+            self.connected = True
+            self.handle_connect()
+        else:
+            raise socket.error, (err, errorcode[err])
+
+    def accept(self):
+        # XXX can return either an address pair or None
+        try:
+            conn, addr = self.socket.accept()
+            return conn, addr
+        except socket.error, why:
+            if why[0] == EWOULDBLOCK:
+                pass
+            else:
+                raise
+
+    def send(self, data):
+        try:
+            result = self.socket.send(data)
+            return result
+        except socket.error, why:
+            if why[0] == EWOULDBLOCK:
+                return 0
+            else:
+                raise
+            return 0
+
+    def recv(self, buffer_size):
+        try:
+            data = self.socket.recv(buffer_size)
+            if not data:
+                # a closed connection is indicated by signaling
+                # a read condition, and having recv() return 0.
+                self.handle_close()
+                return ''
+            else:
+                return data
+        except socket.error, why:
+            # winsock sometimes throws ENOTCONN
+            if why[0] in [ECONNRESET, ENOTCONN, ESHUTDOWN]:
+                self.handle_close()
+                return ''
+            else:
+                raise
+
+    def close(self):
+        self.del_channel()
+        self.socket.close()
+
+    # cheap inheritance, used to pass all other attribute
+    # references to the underlying socket object.
+    def __getattr__(self, attr):
+        return getattr(self.socket, attr)
+
+    # log and log_info may be overridden to provide more sophisticated
+    # logging and warning methods. In general, log is for 'hit' logging
+    # and 'log_info' is for informational, warning and error logging.
+
+    def log(self, message):
+        sys.stderr.write('log: %s\n' % str(message))
+
+    def log_info(self, message, type='info'):
+        if __debug__ or type != 'info':
+            print '%s: %s' % (type, message)
+
+    def handle_read_event(self):
+        if self.accepting:
+            # for an accepting socket, getting a read implies
+            # that we are connected
+            if not self.connected:
+                self.connected = True
+            self.handle_accept()
+        elif not self.connected:
+            self.handle_connect()
+            self.connected = True
+            self.handle_read()
+        else:
+            self.handle_read()
+
+    def handle_write_event(self):
+        # getting a write implies that we are connected
+        if not self.connected:
+            self.handle_connect()
+            self.connected = True
+        self.handle_write()
+
+    def handle_expt_event(self):
+        self.handle_expt()
+
+    def handle_error(self):
+        nil, t, v, tbinfo = compact_traceback()
+
+        # sometimes a user repr method will crash.
+        try:
+            self_repr = repr(self)
+        except:
+            self_repr = '<__repr__(self) failed for object at %0x>' % id(self)
+
+        self.log_info(
+            'uncaptured python exception, closing channel %s (%s:%s %s)' % (
+                self_repr,
+                t,
+                v,
+                tbinfo
+                ),
+            'error'
+            )
+        self.close()
+
+    def handle_expt(self):
+        self.log_info('unhandled exception', 'warning')
+
+    def handle_read(self):
+        self.log_info('unhandled read event', 'warning')
+
+    def handle_write(self):
+        self.log_info('unhandled write event', 'warning')
+
+    def handle_connect(self):
+        self.log_info('unhandled connect event', 'warning')
+
+    def handle_accept(self):
+        self.log_info('unhandled accept event', 'warning')
+
+    def handle_close(self):
+        self.log_info('unhandled close event', 'warning')
+        self.close()
+
+# ---------------------------------------------------------------------------
+# adds simple buffered output capability, useful for simple clients.
+# [for more sophisticated usage use asynchat.async_chat]
+# ---------------------------------------------------------------------------
+
+class dispatcher_with_send(dispatcher):
+
+    def __init__(self, sock=None, map=None):
+        dispatcher.__init__(self, sock, map)
+        self.out_buffer = ''
+
+    def initiate_send(self):
+        num_sent = 0
+        num_sent = dispatcher.send(self, self.out_buffer[:512])
+        self.out_buffer = self.out_buffer[num_sent:]
+
+    def handle_write(self):
+        self.initiate_send()
+
+    def writable(self):
+        return (not self.connected) or len(self.out_buffer)
+
+    def send(self, data):
+        if self.debug:
+            self.log_info('sending %s' % repr(data))
+        self.out_buffer = self.out_buffer + data
+        self.initiate_send()
+
+# ---------------------------------------------------------------------------
+# used for debugging.
+# ---------------------------------------------------------------------------
+
+def compact_traceback():
+    t, v, tb = sys.exc_info()
+    tbinfo = []
+    assert tb # Must have a traceback
+    while tb:
+        tbinfo.append((
+            tb.tb_frame.f_code.co_filename,
+            tb.tb_frame.f_code.co_name,
+            str(tb.tb_lineno)
+            ))
+        tb = tb.tb_next
+
+    # just to be safe
+    del tb
+
+    file, function, line = tbinfo[-1]
+    info = ' '.join(['[%s|%s|%s]' % x for x in tbinfo])
+    return (file, function, line), t, v, info
+
+def close_all(map=None):
+    if map is None:
+        map = socket_map
+    for x in map.values():
+        x.socket.close()
+    map.clear()
+
+# Asynchronous File I/O:
+#
+# After a little research (reading man pages on various unixen, and
+# digging through the linux kernel), I've determined that select()
+# isn't meant for doing asynchronous file i/o.
+# Heartening, though - reading linux/mm/filemap.c shows that linux
+# supports asynchronous read-ahead.  So _MOST_ of the time, the data
+# will be sitting in memory for us already when we go to read it.
+#
+# What other OS's (besides NT) support async file i/o?  [VMS?]
+#
+# Regardless, this is useful for pipes, and stdin/stdout...
+
+if os.name == 'posix':
+    import fcntl
+
+    class file_wrapper:
+        # here we override just enough to make a file
+        # look like a socket for the purposes of asyncore.
+
+        def __init__(self, fd):
+            self.fd = fd
+
+        def recv(self, *args):
+            return os.read(self.fd, *args)
+
+        def send(self, *args):
+            return os.write(self.fd, *args)
+
+        read = recv
+        write = send
+
+        def close(self):
+            os.close(self.fd)
+
+        def fileno(self):
+            return self.fd
+
+    class file_dispatcher(dispatcher):
+
+        def __init__(self, fd, map=None):
+            dispatcher.__init__(self, None, map)
+            self.connected = True
+            self.set_file(fd)
+            # set it to non-blocking mode
+            flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0)
+            flags = flags | os.O_NONBLOCK
+            fcntl.fcntl(fd, fcntl.F_SETFL, flags)
+
+        def set_file(self, fd):
+            self._fileno = fd
+            self.socket = file_wrapper(fd)
+            self.add_channel()
diff --git a/depot_tools/release/win/python_24/Lib/atexit.py b/depot_tools/release/win/python_24/Lib/atexit.py
new file mode 100644
index 0000000..c9f4cc6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/atexit.py
@@ -0,0 +1,62 @@
+"""
+atexit.py - allow programmer to define multiple exit functions to be executed
+upon normal program termination.
+
+One public function, register, is defined.
+"""
+
+__all__ = ["register"]
+
+import sys
+
+_exithandlers = []
+def _run_exitfuncs():
+    """run any registered exit functions
+
+    _exithandlers is traversed in reverse order so functions are executed
+    last in, first out.
+    """
+
+    exc_info = None
+    while _exithandlers:
+        func, targs, kargs = _exithandlers.pop()
+        try:
+            func(*targs, **kargs)
+        except SystemExit:
+            exc_info = sys.exc_info()
+        except:
+            import traceback
+            print >> sys.stderr, "Error in atexit._run_exitfuncs:"
+            traceback.print_exc()
+            exc_info = sys.exc_info()
+
+    if exc_info is not None:
+        raise exc_info[0], exc_info[1], exc_info[2]
+
+
+def register(func, *targs, **kargs):
+    """register a function to be executed upon normal program termination
+
+    func - function to be called at exit
+    targs - optional arguments to pass to func
+    kargs - optional keyword arguments to pass to func
+    """
+    _exithandlers.append((func, targs, kargs))
+
+if hasattr(sys, "exitfunc"):
+    # Assume it's another registered exit function - append it to our list
+    register(sys.exitfunc)
+sys.exitfunc = _run_exitfuncs
+
+if __name__ == "__main__":
+    def x1():
+        print "running x1"
+    def x2(n):
+        print "running x2(%r)" % (n,)
+    def x3(n, kwd=None):
+        print "running x3(%r, kwd=%r)" % (n, kwd)
+
+    register(x1)
+    register(x2, 12)
+    register(x3, 5, "bar")
+    register(x3, "no kwd args")
diff --git a/depot_tools/release/win/python_24/Lib/audiodev.py b/depot_tools/release/win/python_24/Lib/audiodev.py
new file mode 100644
index 0000000..8945c98
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/audiodev.py
@@ -0,0 +1,257 @@
+"""Classes for manipulating audio devices (currently only for Sun and SGI)"""
+
+__all__ = ["error","AudioDev"]
+
+class error(Exception):
+    pass
+
+class Play_Audio_sgi:
+    # Private instance variables
+##      if 0: access frameratelist, nchannelslist, sampwidthlist, oldparams, \
+##                params, config, inited_outrate, inited_width, \
+##                inited_nchannels, port, converter, classinited: private
+
+    classinited = 0
+    frameratelist = nchannelslist = sampwidthlist = None
+
+    def initclass(self):
+        import AL
+        self.frameratelist = [
+                  (48000, AL.RATE_48000),
+                  (44100, AL.RATE_44100),
+                  (32000, AL.RATE_32000),
+                  (22050, AL.RATE_22050),
+                  (16000, AL.RATE_16000),
+                  (11025, AL.RATE_11025),
+                  ( 8000,  AL.RATE_8000),
+                  ]
+        self.nchannelslist = [
+                  (1, AL.MONO),
+                  (2, AL.STEREO),
+                  (4, AL.QUADRO),
+                  ]
+        self.sampwidthlist = [
+                  (1, AL.SAMPLE_8),
+                  (2, AL.SAMPLE_16),
+                  (3, AL.SAMPLE_24),
+                  ]
+        self.classinited = 1
+
+    def __init__(self):
+        import al, AL
+        if not self.classinited:
+            self.initclass()
+        self.oldparams = []
+        self.params = [AL.OUTPUT_RATE, 0]
+        self.config = al.newconfig()
+        self.inited_outrate = 0
+        self.inited_width = 0
+        self.inited_nchannels = 0
+        self.converter = None
+        self.port = None
+        return
+
+    def __del__(self):
+        if self.port:
+            self.stop()
+        if self.oldparams:
+            import al, AL
+            al.setparams(AL.DEFAULT_DEVICE, self.oldparams)
+            self.oldparams = []
+
+    def wait(self):
+        if not self.port:
+            return
+        import time
+        while self.port.getfilled() > 0:
+            time.sleep(0.1)
+        self.stop()
+
+    def stop(self):
+        if self.port:
+            self.port.closeport()
+            self.port = None
+        if self.oldparams:
+            import al, AL
+            al.setparams(AL.DEFAULT_DEVICE, self.oldparams)
+            self.oldparams = []
+
+    def setoutrate(self, rate):
+        for (raw, cooked) in self.frameratelist:
+            if rate == raw:
+                self.params[1] = cooked
+                self.inited_outrate = 1
+                break
+        else:
+            raise error, 'bad output rate'
+
+    def setsampwidth(self, width):
+        for (raw, cooked) in self.sampwidthlist:
+            if width == raw:
+                self.config.setwidth(cooked)
+                self.inited_width = 1
+                break
+        else:
+            if width == 0:
+                import AL
+                self.inited_width = 0
+                self.config.setwidth(AL.SAMPLE_16)
+                self.converter = self.ulaw2lin
+            else:
+                raise error, 'bad sample width'
+
+    def setnchannels(self, nchannels):
+        for (raw, cooked) in self.nchannelslist:
+            if nchannels == raw:
+                self.config.setchannels(cooked)
+                self.inited_nchannels = 1
+                break
+        else:
+            raise error, 'bad # of channels'
+
+    def writeframes(self, data):
+        if not (self.inited_outrate and self.inited_nchannels):
+            raise error, 'params not specified'
+        if not self.port:
+            import al, AL
+            self.port = al.openport('Python', 'w', self.config)
+            self.oldparams = self.params[:]
+            al.getparams(AL.DEFAULT_DEVICE, self.oldparams)
+            al.setparams(AL.DEFAULT_DEVICE, self.params)
+        if self.converter:
+            data = self.converter(data)
+        self.port.writesamps(data)
+
+    def getfilled(self):
+        if self.port:
+            return self.port.getfilled()
+        else:
+            return 0
+
+    def getfillable(self):
+        if self.port:
+            return self.port.getfillable()
+        else:
+            return self.config.getqueuesize()
+
+    # private methods
+##      if 0: access *: private
+
+    def ulaw2lin(self, data):
+        import audioop
+        return audioop.ulaw2lin(data, 2)
+
+class Play_Audio_sun:
+##      if 0: access outrate, sampwidth, nchannels, inited_outrate, inited_width, \
+##                inited_nchannels, converter: private
+
+    def __init__(self):
+        self.outrate = 0
+        self.sampwidth = 0
+        self.nchannels = 0
+        self.inited_outrate = 0
+        self.inited_width = 0
+        self.inited_nchannels = 0
+        self.converter = None
+        self.port = None
+        return
+
+    def __del__(self):
+        self.stop()
+
+    def setoutrate(self, rate):
+        self.outrate = rate
+        self.inited_outrate = 1
+
+    def setsampwidth(self, width):
+        self.sampwidth = width
+        self.inited_width = 1
+
+    def setnchannels(self, nchannels):
+        self.nchannels = nchannels
+        self.inited_nchannels = 1
+
+    def writeframes(self, data):
+        if not (self.inited_outrate and self.inited_width and self.inited_nchannels):
+            raise error, 'params not specified'
+        if not self.port:
+            import sunaudiodev, SUNAUDIODEV
+            self.port = sunaudiodev.open('w')
+            info = self.port.getinfo()
+            info.o_sample_rate = self.outrate
+            info.o_channels = self.nchannels
+            if self.sampwidth == 0:
+                info.o_precision = 8
+                self.o_encoding = SUNAUDIODEV.ENCODING_ULAW
+                # XXX Hack, hack -- leave defaults
+            else:
+                info.o_precision = 8 * self.sampwidth
+                info.o_encoding = SUNAUDIODEV.ENCODING_LINEAR
+                self.port.setinfo(info)
+        if self.converter:
+            data = self.converter(data)
+        self.port.write(data)
+
+    def wait(self):
+        if not self.port:
+            return
+        self.port.drain()
+        self.stop()
+
+    def stop(self):
+        if self.port:
+            self.port.flush()
+            self.port.close()
+            self.port = None
+
+    def getfilled(self):
+        if self.port:
+            return self.port.obufcount()
+        else:
+            return 0
+
+##    # Nobody remembers what this method does, and it's broken. :-(
+##    def getfillable(self):
+##        return BUFFERSIZE - self.getfilled()
+
+def AudioDev():
+    # Dynamically try to import and use a platform specific module.
+    try:
+        import al
+    except ImportError:
+        try:
+            import sunaudiodev
+            return Play_Audio_sun()
+        except ImportError:
+            try:
+                import Audio_mac
+            except ImportError:
+                raise error, 'no audio device'
+            else:
+                return Audio_mac.Play_Audio_mac()
+    else:
+        return Play_Audio_sgi()
+
+def test(fn = None):
+    import sys
+    if sys.argv[1:]:
+        fn = sys.argv[1]
+    else:
+        fn = 'f:just samples:just.aif'
+    import aifc
+    af = aifc.open(fn, 'r')
+    print fn, af.getparams()
+    p = AudioDev()
+    p.setoutrate(af.getframerate())
+    p.setsampwidth(af.getsampwidth())
+    p.setnchannels(af.getnchannels())
+    BUFSIZ = af.getframerate()/af.getsampwidth()/af.getnchannels()
+    while 1:
+        data = af.readframes(BUFSIZ)
+        if not data: break
+        print len(data)
+        p.writeframes(data)
+    p.wait()
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/base64.py b/depot_tools/release/win/python_24/Lib/base64.py
new file mode 100644
index 0000000..f90b91d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/base64.py
@@ -0,0 +1,355 @@
+#! /usr/bin/env python
+
+"""RFC 3548: Base16, Base32, Base64 Data Encodings"""
+
+# Modified 04-Oct-1995 by Jack Jansen to use binascii module
+# Modified 30-Dec-2003 by Barry Warsaw to add full RFC 3548 support
+
+import re
+import struct
+import binascii
+
+
+__all__ = [
+    # Legacy interface exports traditional RFC 1521 Base64 encodings
+    'encode', 'decode', 'encodestring', 'decodestring',
+    # Generalized interface for other encodings
+    'b64encode', 'b64decode', 'b32encode', 'b32decode',
+    'b16encode', 'b16decode',
+    # Standard Base64 encoding
+    'standard_b64encode', 'standard_b64decode',
+    # Some common Base64 alternatives.  As referenced by RFC 3458, see thread
+    # starting at:
+    #
+    # http://zgp.org/pipermail/p2p-hackers/2001-September/000316.html
+    'urlsafe_b64encode', 'urlsafe_b64decode',
+    ]
+
+_translation = [chr(_x) for _x in range(256)]
+EMPTYSTRING = ''
+
+
+def _translate(s, altchars):
+    translation = _translation[:]
+    for k, v in altchars.items():
+        translation[ord(k)] = v
+    return s.translate(''.join(translation))
+
+
+
+# Base64 encoding/decoding uses binascii
+
+def b64encode(s, altchars=None):
+    """Encode a string using Base64.
+
+    s is the string to encode.  Optional altchars must be a string of at least
+    length 2 (additional characters are ignored) which specifies an
+    alternative alphabet for the '+' and '/' characters.  This allows an
+    application to e.g. generate url or filesystem safe Base64 strings.
+
+    The encoded string is returned.
+    """
+    # Strip off the trailing newline
+    encoded = binascii.b2a_base64(s)[:-1]
+    if altchars is not None:
+        return _translate(encoded, {'+': altchars[0], '/': altchars[1]})
+    return encoded
+
+
+def b64decode(s, altchars=None):
+    """Decode a Base64 encoded string.
+
+    s is the string to decode.  Optional altchars must be a string of at least
+    length 2 (additional characters are ignored) which specifies the
+    alternative alphabet used instead of the '+' and '/' characters.
+
+    The decoded string is returned.  A TypeError is raised if s were
+    incorrectly padded or if there are non-alphabet characters present in the
+    string.
+    """
+    if altchars is not None:
+        s = _translate(s, {altchars[0]: '+', altchars[1]: '/'})
+    try:
+        return binascii.a2b_base64(s)
+    except binascii.Error, msg:
+        # Transform this exception for consistency
+        raise TypeError(msg)
+
+
+def standard_b64encode(s):
+    """Encode a string using the standard Base64 alphabet.
+
+    s is the string to encode.  The encoded string is returned.
+    """
+    return b64encode(s)
+
+def standard_b64decode(s):
+    """Decode a string encoded with the standard Base64 alphabet.
+
+    s is the string to decode.  The decoded string is returned.  A TypeError
+    is raised if the string is incorrectly padded or if there are non-alphabet
+    characters present in the string.
+    """
+    return b64decode(s)
+
+def urlsafe_b64encode(s):
+    """Encode a string using a url-safe Base64 alphabet.
+
+    s is the string to encode.  The encoded string is returned.  The alphabet
+    uses '-' instead of '+' and '_' instead of '/'.
+    """
+    return b64encode(s, '-_')
+
+def urlsafe_b64decode(s):
+    """Decode a string encoded with the standard Base64 alphabet.
+
+    s is the string to decode.  The decoded string is returned.  A TypeError
+    is raised if the string is incorrectly padded or if there are non-alphabet
+    characters present in the string.
+
+    The alphabet uses '-' instead of '+' and '_' instead of '/'.
+    """
+    return b64decode(s, '-_')
+
+
+
+# Base32 encoding/decoding must be done in Python
+_b32alphabet = {
+    0: 'A',  9: 'J', 18: 'S', 27: '3',
+    1: 'B', 10: 'K', 19: 'T', 28: '4',
+    2: 'C', 11: 'L', 20: 'U', 29: '5',
+    3: 'D', 12: 'M', 21: 'V', 30: '6',
+    4: 'E', 13: 'N', 22: 'W', 31: '7',
+    5: 'F', 14: 'O', 23: 'X',
+    6: 'G', 15: 'P', 24: 'Y',
+    7: 'H', 16: 'Q', 25: 'Z',
+    8: 'I', 17: 'R', 26: '2',
+    }
+
+_b32tab = [v for v in _b32alphabet.values()]
+_b32rev = dict([(v, long(k)) for k, v in _b32alphabet.items()])
+
+
+def b32encode(s):
+    """Encode a string using Base32.
+
+    s is the string to encode.  The encoded string is returned.
+    """
+    parts = []
+    quanta, leftover = divmod(len(s), 5)
+    # Pad the last quantum with zero bits if necessary
+    if leftover:
+        s += ('\0' * (5 - leftover))
+        quanta += 1
+    for i in range(quanta):
+        # c1 and c2 are 16 bits wide, c3 is 8 bits wide.  The intent of this
+        # code is to process the 40 bits in units of 5 bits.  So we take the 1
+        # leftover bit of c1 and tack it onto c2.  Then we take the 2 leftover
+        # bits of c2 and tack them onto c3.  The shifts and masks are intended
+        # to give us values of exactly 5 bits in width.
+        c1, c2, c3 = struct.unpack('!HHB', s[i*5:(i+1)*5])
+        c2 += (c1 & 1) << 16 # 17 bits wide
+        c3 += (c2 & 3) << 8  # 10 bits wide
+        parts.extend([_b32tab[c1 >> 11],         # bits 1 - 5
+                      _b32tab[(c1 >> 6) & 0x1f], # bits 6 - 10
+                      _b32tab[(c1 >> 1) & 0x1f], # bits 11 - 15
+                      _b32tab[c2 >> 12],         # bits 16 - 20 (1 - 5)
+                      _b32tab[(c2 >> 7) & 0x1f], # bits 21 - 25 (6 - 10)
+                      _b32tab[(c2 >> 2) & 0x1f], # bits 26 - 30 (11 - 15)
+                      _b32tab[c3 >> 5],          # bits 31 - 35 (1 - 5)
+                      _b32tab[c3 & 0x1f],        # bits 36 - 40 (1 - 5)
+                      ])
+    encoded = EMPTYSTRING.join(parts)
+    # Adjust for any leftover partial quanta
+    if leftover == 1:
+        return encoded[:-6] + '======'
+    elif leftover == 2:
+        return encoded[:-4] + '===='
+    elif leftover == 3:
+        return encoded[:-3] + '==='
+    elif leftover == 4:
+        return encoded[:-1] + '='
+    return encoded
+
+
+def b32decode(s, casefold=False, map01=None):
+    """Decode a Base32 encoded string.
+
+    s is the string to decode.  Optional casefold is a flag specifying whether
+    a lowercase alphabet is acceptable as input.  For security purposes, the
+    default is False.
+
+    RFC 3548 allows for optional mapping of the digit 0 (zero) to the letter O
+    (oh), and for optional mapping of the digit 1 (one) to either the letter I
+    (eye) or letter L (el).  The optional argument map01 when not None,
+    specifies which letter the digit 1 should be mapped to (when map01 is not
+    None, the digit 0 is always mapped to the letter O).  For security
+    purposes the default is None, so that 0 and 1 are not allowed in the
+    input.
+
+    The decoded string is returned.  A TypeError is raised if s were
+    incorrectly padded or if there are non-alphabet characters present in the
+    string.
+    """
+    quanta, leftover = divmod(len(s), 8)
+    if leftover:
+        raise TypeError('Incorrect padding')
+    # Handle section 2.4 zero and one mapping.  The flag map01 will be either
+    # False, or the character to map the digit 1 (one) to.  It should be
+    # either L (el) or I (eye).
+    if map01:
+        s = _translate(s, {'0': 'O', '1': map01})
+    if casefold:
+        s = s.upper()
+    # Strip off pad characters from the right.  We need to count the pad
+    # characters because this will tell us how many null bytes to remove from
+    # the end of the decoded string.
+    padchars = 0
+    mo = re.search('(?P<pad>[=]*)$', s)
+    if mo:
+        padchars = len(mo.group('pad'))
+        if padchars > 0:
+            s = s[:-padchars]
+    # Now decode the full quanta
+    parts = []
+    acc = 0
+    shift = 35
+    for c in s:
+        val = _b32rev.get(c)
+        if val is None:
+            raise TypeError('Non-base32 digit found')
+        acc += _b32rev[c] << shift
+        shift -= 5
+        if shift < 0:
+            parts.append(binascii.unhexlify(hex(acc)[2:-1]))
+            acc = 0
+            shift = 35
+    # Process the last, partial quanta
+    last = binascii.unhexlify(hex(acc)[2:-1])
+    if padchars == 1:
+        last = last[:-1]
+    elif padchars == 3:
+        last = last[:-2]
+    elif padchars == 4:
+        last = last[:-3]
+    elif padchars == 6:
+        last = last[:-4]
+    elif padchars <> 0:
+        raise TypeError('Incorrect padding')
+    parts.append(last)
+    return EMPTYSTRING.join(parts)
+
+
+
+# RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns
+# lowercase.  The RFC also recommends against accepting input case
+# insensitively.
+def b16encode(s):
+    """Encode a string using Base16.
+
+    s is the string to encode.  The encoded string is returned.
+    """
+    return binascii.hexlify(s).upper()
+
+
+def b16decode(s, casefold=False):
+    """Decode a Base16 encoded string.
+
+    s is the string to decode.  Optional casefold is a flag specifying whether
+    a lowercase alphabet is acceptable as input.  For security purposes, the
+    default is False.
+
+    The decoded string is returned.  A TypeError is raised if s were
+    incorrectly padded or if there are non-alphabet characters present in the
+    string.
+    """
+    if casefold:
+        s = s.upper()
+    if re.search('[^0-9A-F]', s):
+        raise TypeError('Non-base16 digit found')
+    return binascii.unhexlify(s)
+
+
+
+# Legacy interface.  This code could be cleaned up since I don't believe
+# binascii has any line length limitations.  It just doesn't seem worth it
+# though.
+
+MAXLINESIZE = 76 # Excluding the CRLF
+MAXBINSIZE = (MAXLINESIZE//4)*3
+
+def encode(input, output):
+    """Encode a file."""
+    while True:
+        s = input.read(MAXBINSIZE)
+        if not s:
+            break
+        while len(s) < MAXBINSIZE:
+            ns = input.read(MAXBINSIZE-len(s))
+            if not ns:
+                break
+            s += ns
+        line = binascii.b2a_base64(s)
+        output.write(line)
+
+
+def decode(input, output):
+    """Decode a file."""
+    while True:
+        line = input.readline()
+        if not line:
+            break
+        s = binascii.a2b_base64(line)
+        output.write(s)
+
+
+def encodestring(s):
+    """Encode a string."""
+    pieces = []
+    for i in range(0, len(s), MAXBINSIZE):
+        chunk = s[i : i + MAXBINSIZE]
+        pieces.append(binascii.b2a_base64(chunk))
+    return "".join(pieces)
+
+
+def decodestring(s):
+    """Decode a string."""
+    return binascii.a2b_base64(s)
+
+
+
+# Useable as a script...
+def test():
+    """Small test program"""
+    import sys, getopt
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'deut')
+    except getopt.error, msg:
+        sys.stdout = sys.stderr
+        print msg
+        print """usage: %s [-d|-e|-u|-t] [file|-]
+        -d, -u: decode
+        -e: encode (default)
+        -t: encode and decode string 'Aladdin:open sesame'"""%sys.argv[0]
+        sys.exit(2)
+    func = encode
+    for o, a in opts:
+        if o == '-e': func = encode
+        if o == '-d': func = decode
+        if o == '-u': func = decode
+        if o == '-t': test1(); return
+    if args and args[0] != '-':
+        func(open(args[0], 'rb'), sys.stdout)
+    else:
+        func(sys.stdin, sys.stdout)
+
+
+def test1():
+    s0 = "Aladdin:open sesame"
+    s1 = encodestring(s0)
+    s2 = decodestring(s1)
+    print s0, repr(s1), s2
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/bdb.py b/depot_tools/release/win/python_24/Lib/bdb.py
new file mode 100644
index 0000000..8f808cc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bdb.py
@@ -0,0 +1,611 @@
+"""Debugger basics"""
+
+import sys
+import os
+import types
+
+__all__ = ["BdbQuit","Bdb","Breakpoint"]
+
+class BdbQuit(Exception):
+    """Exception to give up completely"""
+
+
+class Bdb:
+
+    """Generic Python debugger base class.
+
+    This class takes care of details of the trace facility;
+    a derived class should implement user interaction.
+    The standard debugger class (pdb.Pdb) is an example.
+    """
+
+    def __init__(self):
+        self.breaks = {}
+        self.fncache = {}
+
+    def canonic(self, filename):
+        if filename == "<" + filename[1:-1] + ">":
+            return filename
+        canonic = self.fncache.get(filename)
+        if not canonic:
+            canonic = os.path.abspath(filename)
+            canonic = os.path.normcase(canonic)
+            self.fncache[filename] = canonic
+        return canonic
+
+    def reset(self):
+        import linecache
+        linecache.checkcache()
+        self.botframe = None
+        self.stopframe = None
+        self.returnframe = None
+        self.quitting = 0
+
+    def trace_dispatch(self, frame, event, arg):
+        if self.quitting:
+            return # None
+        if event == 'line':
+            return self.dispatch_line(frame)
+        if event == 'call':
+            return self.dispatch_call(frame, arg)
+        if event == 'return':
+            return self.dispatch_return(frame, arg)
+        if event == 'exception':
+            return self.dispatch_exception(frame, arg)
+        if event == 'c_call':
+            return self.trace_dispatch
+        if event == 'c_exception':
+            return self.trace_dispatch
+        if event == 'c_return':
+            return self.trace_dispatch
+        print 'bdb.Bdb.dispatch: unknown debugging event:', repr(event)
+        return self.trace_dispatch
+
+    def dispatch_line(self, frame):
+        if self.stop_here(frame) or self.break_here(frame):
+            self.user_line(frame)
+            if self.quitting: raise BdbQuit
+        return self.trace_dispatch
+
+    def dispatch_call(self, frame, arg):
+        # XXX 'arg' is no longer used
+        if self.botframe is None:
+            # First call of dispatch since reset()
+            self.botframe = frame.f_back # (CT) Note that this may also be None!
+            return self.trace_dispatch
+        if not (self.stop_here(frame) or self.break_anywhere(frame)):
+            # No need to trace this function
+            return # None
+        self.user_call(frame, arg)
+        if self.quitting: raise BdbQuit
+        return self.trace_dispatch
+
+    def dispatch_return(self, frame, arg):
+        if self.stop_here(frame) or frame == self.returnframe:
+            self.user_return(frame, arg)
+            if self.quitting: raise BdbQuit
+        return self.trace_dispatch
+
+    def dispatch_exception(self, frame, arg):
+        if self.stop_here(frame):
+            self.user_exception(frame, arg)
+            if self.quitting: raise BdbQuit
+        return self.trace_dispatch
+
+    # Normally derived classes don't override the following
+    # methods, but they may if they want to redefine the
+    # definition of stopping and breakpoints.
+
+    def stop_here(self, frame):
+        # (CT) stopframe may now also be None, see dispatch_call.
+        # (CT) the former test for None is therefore removed from here.
+        if frame is self.stopframe:
+            return True
+        while frame is not None and frame is not self.stopframe:
+            if frame is self.botframe:
+                return True
+            frame = frame.f_back
+        return False
+
+    def break_here(self, frame):
+        filename = self.canonic(frame.f_code.co_filename)
+        if not filename in self.breaks:
+            return False
+        lineno = frame.f_lineno
+        if not lineno in self.breaks[filename]:
+            # The line itself has no breakpoint, but maybe the line is the
+            # first line of a function with breakpoint set by function name.
+            lineno = frame.f_code.co_firstlineno
+            if not lineno in self.breaks[filename]:
+                return False
+
+        # flag says ok to delete temp. bp
+        (bp, flag) = effective(filename, lineno, frame)
+        if bp:
+            self.currentbp = bp.number
+            if (flag and bp.temporary):
+                self.do_clear(str(bp.number))
+            return True
+        else:
+            return False
+
+    def do_clear(self, arg):
+        raise NotImplementedError, "subclass of bdb must implement do_clear()"
+
+    def break_anywhere(self, frame):
+        return self.breaks.has_key(
+            self.canonic(frame.f_code.co_filename))
+
+    # Derived classes should override the user_* methods
+    # to gain control.
+
+    def user_call(self, frame, argument_list):
+        """This method is called when there is the remote possibility
+        that we ever need to stop in this function."""
+        pass
+
+    def user_line(self, frame):
+        """This method is called when we stop or break at this line."""
+        pass
+
+    def user_return(self, frame, return_value):
+        """This method is called when a return trap is set here."""
+        pass
+
+    def user_exception(self, frame, (exc_type, exc_value, exc_traceback)):
+        """This method is called if an exception occurs,
+        but only if we are to stop at or just below this level."""
+        pass
+
+    # Derived classes and clients can call the following methods
+    # to affect the stepping state.
+
+    def set_step(self):
+        """Stop after one line of code."""
+        self.stopframe = None
+        self.returnframe = None
+        self.quitting = 0
+
+    def set_next(self, frame):
+        """Stop on the next line in or below the given frame."""
+        self.stopframe = frame
+        self.returnframe = None
+        self.quitting = 0
+
+    def set_return(self, frame):
+        """Stop when returning from the given frame."""
+        self.stopframe = frame.f_back
+        self.returnframe = frame
+        self.quitting = 0
+
+    def set_trace(self, frame=None):
+        """Start debugging from `frame`.
+
+        If frame is not specified, debugging starts from caller's frame.
+        """
+        if frame is None:
+            frame = sys._getframe().f_back
+        self.reset()
+        while frame:
+            frame.f_trace = self.trace_dispatch
+            self.botframe = frame
+            frame = frame.f_back
+        self.set_step()
+        sys.settrace(self.trace_dispatch)
+
+    def set_continue(self):
+        # Don't stop except at breakpoints or when finished
+        self.stopframe = self.botframe
+        self.returnframe = None
+        self.quitting = 0
+        if not self.breaks:
+            # no breakpoints; run without debugger overhead
+            sys.settrace(None)
+            frame = sys._getframe().f_back
+            while frame and frame is not self.botframe:
+                del frame.f_trace
+                frame = frame.f_back
+
+    def set_quit(self):
+        self.stopframe = self.botframe
+        self.returnframe = None
+        self.quitting = 1
+        sys.settrace(None)
+
+    # Derived classes and clients can call the following methods
+    # to manipulate breakpoints.  These methods return an
+    # error message is something went wrong, None if all is well.
+    # Set_break prints out the breakpoint line and file:lineno.
+    # Call self.get_*break*() to see the breakpoints or better
+    # for bp in Breakpoint.bpbynumber: if bp: bp.bpprint().
+
+    def set_break(self, filename, lineno, temporary=0, cond = None,
+                  funcname=None):
+        filename = self.canonic(filename)
+        import linecache # Import as late as possible
+        line = linecache.getline(filename, lineno)
+        if not line:
+            return 'Line %s:%d does not exist' % (filename,
+                                   lineno)
+        if not filename in self.breaks:
+            self.breaks[filename] = []
+        list = self.breaks[filename]
+        if not lineno in list:
+            list.append(lineno)
+        bp = Breakpoint(filename, lineno, temporary, cond, funcname)
+
+    def clear_break(self, filename, lineno):
+        filename = self.canonic(filename)
+        if not filename in self.breaks:
+            return 'There are no breakpoints in %s' % filename
+        if lineno not in self.breaks[filename]:
+            return 'There is no breakpoint at %s:%d' % (filename,
+                                    lineno)
+        # If there's only one bp in the list for that file,line
+        # pair, then remove the breaks entry
+        for bp in Breakpoint.bplist[filename, lineno][:]:
+            bp.deleteMe()
+        if not Breakpoint.bplist.has_key((filename, lineno)):
+            self.breaks[filename].remove(lineno)
+        if not self.breaks[filename]:
+            del self.breaks[filename]
+
+    def clear_bpbynumber(self, arg):
+        try:
+            number = int(arg)
+        except:
+            return 'Non-numeric breakpoint number (%s)' % arg
+        try:
+            bp = Breakpoint.bpbynumber[number]
+        except IndexError:
+            return 'Breakpoint number (%d) out of range' % number
+        if not bp:
+            return 'Breakpoint (%d) already deleted' % number
+        self.clear_break(bp.file, bp.line)
+
+    def clear_all_file_breaks(self, filename):
+        filename = self.canonic(filename)
+        if not filename in self.breaks:
+            return 'There are no breakpoints in %s' % filename
+        for line in self.breaks[filename]:
+            blist = Breakpoint.bplist[filename, line]
+            for bp in blist:
+                bp.deleteMe()
+        del self.breaks[filename]
+
+    def clear_all_breaks(self):
+        if not self.breaks:
+            return 'There are no breakpoints'
+        for bp in Breakpoint.bpbynumber:
+            if bp:
+                bp.deleteMe()
+        self.breaks = {}
+
+    def get_break(self, filename, lineno):
+        filename = self.canonic(filename)
+        return filename in self.breaks and \
+            lineno in self.breaks[filename]
+
+    def get_breaks(self, filename, lineno):
+        filename = self.canonic(filename)
+        return filename in self.breaks and \
+            lineno in self.breaks[filename] and \
+            Breakpoint.bplist[filename, lineno] or []
+
+    def get_file_breaks(self, filename):
+        filename = self.canonic(filename)
+        if filename in self.breaks:
+            return self.breaks[filename]
+        else:
+            return []
+
+    def get_all_breaks(self):
+        return self.breaks
+
+    # Derived classes and clients can call the following method
+    # to get a data structure representing a stack trace.
+
+    def get_stack(self, f, t):
+        stack = []
+        if t and t.tb_frame is f:
+            t = t.tb_next
+        while f is not None:
+            stack.append((f, f.f_lineno))
+            if f is self.botframe:
+                break
+            f = f.f_back
+        stack.reverse()
+        i = max(0, len(stack) - 1)
+        while t is not None:
+            stack.append((t.tb_frame, t.tb_lineno))
+            t = t.tb_next
+        return stack, i
+
+    #
+
+    def format_stack_entry(self, frame_lineno, lprefix=': '):
+        import linecache, repr
+        frame, lineno = frame_lineno
+        filename = self.canonic(frame.f_code.co_filename)
+        s = '%s(%r)' % (filename, lineno)
+        if frame.f_code.co_name:
+            s = s + frame.f_code.co_name
+        else:
+            s = s + "<lambda>"
+        if '__args__' in frame.f_locals:
+            args = frame.f_locals['__args__']
+        else:
+            args = None
+        if args:
+            s = s + repr.repr(args)
+        else:
+            s = s + '()'
+        if '__return__' in frame.f_locals:
+            rv = frame.f_locals['__return__']
+            s = s + '->'
+            s = s + repr.repr(rv)
+        line = linecache.getline(filename, lineno)
+        if line: s = s + lprefix + line.strip()
+        return s
+
+    # The following two methods can be called by clients to use
+    # a debugger to debug a statement, given as a string.
+
+    def run(self, cmd, globals=None, locals=None):
+        if globals is None:
+            import __main__
+            globals = __main__.__dict__
+        if locals is None:
+            locals = globals
+        self.reset()
+        sys.settrace(self.trace_dispatch)
+        if not isinstance(cmd, types.CodeType):
+            cmd = cmd+'\n'
+        try:
+            try:
+                exec cmd in globals, locals
+            except BdbQuit:
+                pass
+        finally:
+            self.quitting = 1
+            sys.settrace(None)
+
+    def runeval(self, expr, globals=None, locals=None):
+        if globals is None:
+            import __main__
+            globals = __main__.__dict__
+        if locals is None:
+            locals = globals
+        self.reset()
+        sys.settrace(self.trace_dispatch)
+        if not isinstance(expr, types.CodeType):
+            expr = expr+'\n'
+        try:
+            try:
+                return eval(expr, globals, locals)
+            except BdbQuit:
+                pass
+        finally:
+            self.quitting = 1
+            sys.settrace(None)
+
+    def runctx(self, cmd, globals, locals):
+        # B/W compatibility
+        self.run(cmd, globals, locals)
+
+    # This method is more useful to debug a single function call.
+
+    def runcall(self, func, *args, **kwds):
+        self.reset()
+        sys.settrace(self.trace_dispatch)
+        res = None
+        try:
+            try:
+                res = func(*args, **kwds)
+            except BdbQuit:
+                pass
+        finally:
+            self.quitting = 1
+            sys.settrace(None)
+        return res
+
+
+def set_trace():
+    Bdb().set_trace()
+
+
+class Breakpoint:
+
+    """Breakpoint class
+
+    Implements temporary breakpoints, ignore counts, disabling and
+    (re)-enabling, and conditionals.
+
+    Breakpoints are indexed by number through bpbynumber and by
+    the file,line tuple using bplist.  The former points to a
+    single instance of class Breakpoint.  The latter points to a
+    list of such instances since there may be more than one
+    breakpoint per line.
+
+    """
+
+    # XXX Keeping state in the class is a mistake -- this means
+    # you cannot have more than one active Bdb instance.
+
+    next = 1        # Next bp to be assigned
+    bplist = {}     # indexed by (file, lineno) tuple
+    bpbynumber = [None] # Each entry is None or an instance of Bpt
+                # index 0 is unused, except for marking an
+                # effective break .... see effective()
+
+    def __init__(self, file, line, temporary=0, cond=None, funcname=None):
+        self.funcname = funcname
+        # Needed if funcname is not None.
+        self.func_first_executable_line = None
+        self.file = file    # This better be in canonical form!
+        self.line = line
+        self.temporary = temporary
+        self.cond = cond
+        self.enabled = 1
+        self.ignore = 0
+        self.hits = 0
+        self.number = Breakpoint.next
+        Breakpoint.next = Breakpoint.next + 1
+        # Build the two lists
+        self.bpbynumber.append(self)
+        if self.bplist.has_key((file, line)):
+            self.bplist[file, line].append(self)
+        else:
+            self.bplist[file, line] = [self]
+
+
+    def deleteMe(self):
+        index = (self.file, self.line)
+        self.bpbynumber[self.number] = None   # No longer in list
+        self.bplist[index].remove(self)
+        if not self.bplist[index]:
+            # No more bp for this f:l combo
+            del self.bplist[index]
+
+    def enable(self):
+        self.enabled = 1
+
+    def disable(self):
+        self.enabled = 0
+
+    def bpprint(self):
+        if self.temporary:
+            disp = 'del  '
+        else:
+            disp = 'keep '
+        if self.enabled:
+            disp = disp + 'yes'
+        else:
+            disp = disp + 'no '
+        print '%-4dbreakpoint    %s at %s:%d' % (self.number, disp,
+                             self.file, self.line)
+        if self.cond:
+            print '\tstop only if %s' % (self.cond,)
+        if self.ignore:
+            print '\tignore next %d hits' % (self.ignore)
+        if (self.hits):
+            if (self.hits > 1): ss = 's'
+            else: ss = ''
+            print ('\tbreakpoint already hit %d time%s' %
+                   (self.hits, ss))
+
+# -----------end of Breakpoint class----------
+
+def checkfuncname(b, frame):
+    """Check whether we should break here because of `b.funcname`."""
+    if not b.funcname:
+        # Breakpoint was set via line number.
+        if b.line != frame.f_lineno:
+            # Breakpoint was set at a line with a def statement and the function
+            # defined is called: don't break.
+            return False
+        return True
+
+    # Breakpoint set via function name.
+
+    if frame.f_code.co_name != b.funcname:
+        # It's not a function call, but rather execution of def statement.
+        return False
+
+    # We are in the right frame.
+    if not b.func_first_executable_line:
+        # The function is entered for the 1st time.
+        b.func_first_executable_line = frame.f_lineno
+
+    if  b.func_first_executable_line != frame.f_lineno:
+        # But we are not at the first line number: don't break.
+        return False
+    return True
+
+# Determines if there is an effective (active) breakpoint at this
+# line of code.  Returns breakpoint number or 0 if none
+def effective(file, line, frame):
+    """Determine which breakpoint for this file:line is to be acted upon.
+
+    Called only if we know there is a bpt at this
+    location.  Returns breakpoint that was triggered and a flag
+    that indicates if it is ok to delete a temporary bp.
+
+    """
+    possibles = Breakpoint.bplist[file,line]
+    for i in range(0, len(possibles)):
+        b = possibles[i]
+        if b.enabled == 0:
+            continue
+        if not checkfuncname(b, frame):
+            continue
+        # Count every hit when bp is enabled
+        b.hits = b.hits + 1
+        if not b.cond:
+            # If unconditional, and ignoring,
+            # go on to next, else break
+            if b.ignore > 0:
+                b.ignore = b.ignore -1
+                continue
+            else:
+                # breakpoint and marker that's ok
+                # to delete if temporary
+                return (b,1)
+        else:
+            # Conditional bp.
+            # Ignore count applies only to those bpt hits where the
+            # condition evaluates to true.
+            try:
+                val = eval(b.cond, frame.f_globals,
+                       frame.f_locals)
+                if val:
+                    if b.ignore > 0:
+                        b.ignore = b.ignore -1
+                        # continue
+                    else:
+                        return (b,1)
+                # else:
+                #   continue
+            except:
+                # if eval fails, most conservative
+                # thing is to stop on breakpoint
+                # regardless of ignore count.
+                # Don't delete temporary,
+                # as another hint to user.
+                return (b,0)
+    return (None, None)
+
+# -------------------- testing --------------------
+
+class Tdb(Bdb):
+    def user_call(self, frame, args):
+        name = frame.f_code.co_name
+        if not name: name = '???'
+        print '+++ call', name, args
+    def user_line(self, frame):
+        import linecache
+        name = frame.f_code.co_name
+        if not name: name = '???'
+        fn = self.canonic(frame.f_code.co_filename)
+        line = linecache.getline(fn, frame.f_lineno)
+        print '+++', fn, frame.f_lineno, name, ':', line.strip()
+    def user_return(self, frame, retval):
+        print '+++ return', retval
+    def user_exception(self, frame, exc_stuff):
+        print '+++ exception', exc_stuff
+        self.set_continue()
+
+def foo(n):
+    print 'foo(', n, ')'
+    x = bar(n*10)
+    print 'bar returned', x
+
+def bar(a):
+    print 'bar(', a, ')'
+    return a/2
+
+def test():
+    t = Tdb()
+    t.run('import bdb; bdb.foo(10)')
+
+# end
diff --git a/depot_tools/release/win/python_24/Lib/binhex.py b/depot_tools/release/win/python_24/Lib/binhex.py
new file mode 100644
index 0000000..16985fb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/binhex.py
@@ -0,0 +1,530 @@
+"""Macintosh binhex compression/decompression.
+
+easy interface:
+binhex(inputfilename, outputfilename)
+hexbin(inputfilename, outputfilename)
+"""
+
+#
+# Jack Jansen, CWI, August 1995.
+#
+# The module is supposed to be as compatible as possible. Especially the
+# easy interface should work "as expected" on any platform.
+# XXXX Note: currently, textfiles appear in mac-form on all platforms.
+# We seem to lack a simple character-translate in python.
+# (we should probably use ISO-Latin-1 on all but the mac platform).
+# XXXX The simple routines are too simple: they expect to hold the complete
+# files in-core. Should be fixed.
+# XXXX It would be nice to handle AppleDouble format on unix
+# (for servers serving macs).
+# XXXX I don't understand what happens when you get 0x90 times the same byte on
+# input. The resulting code (xx 90 90) would appear to be interpreted as an
+# escaped *value* of 0x90. All coders I've seen appear to ignore this nicety...
+#
+import sys
+import os
+import struct
+import binascii
+
+__all__ = ["binhex","hexbin","Error"]
+
+class Error(Exception):
+    pass
+
+# States (what have we written)
+[_DID_HEADER, _DID_DATA, _DID_RSRC] = range(3)
+
+# Various constants
+REASONABLY_LARGE=32768  # Minimal amount we pass the rle-coder
+LINELEN=64
+RUNCHAR=chr(0x90)   # run-length introducer
+
+#
+# This code is no longer byte-order dependent
+
+#
+# Workarounds for non-mac machines.
+if os.name == 'mac':
+    import macfs
+    import MacOS
+    try:
+        openrf = MacOS.openrf
+    except AttributeError:
+        # Backward compatibility
+        openrf = open
+
+    def FInfo():
+        return macfs.FInfo()
+
+    def getfileinfo(name):
+        finfo = macfs.FSSpec(name).GetFInfo()
+        dir, file = os.path.split(name)
+        # XXXX Get resource/data sizes
+        fp = open(name, 'rb')
+        fp.seek(0, 2)
+        dlen = fp.tell()
+        fp = openrf(name, '*rb')
+        fp.seek(0, 2)
+        rlen = fp.tell()
+        return file, finfo, dlen, rlen
+
+    def openrsrc(name, *mode):
+        if not mode:
+            mode = '*rb'
+        else:
+            mode = '*' + mode[0]
+        return openrf(name, mode)
+
+else:
+    #
+    # Glue code for non-macintosh usage
+    #
+
+    class FInfo:
+        def __init__(self):
+            self.Type = '????'
+            self.Creator = '????'
+            self.Flags = 0
+
+    def getfileinfo(name):
+        finfo = FInfo()
+        # Quick check for textfile
+        fp = open(name)
+        data = open(name).read(256)
+        for c in data:
+            if not c.isspace() and (c<' ' or ord(c) > 0x7f):
+                break
+        else:
+            finfo.Type = 'TEXT'
+        fp.seek(0, 2)
+        dsize = fp.tell()
+        fp.close()
+        dir, file = os.path.split(name)
+        file = file.replace(':', '-', 1)
+        return file, finfo, dsize, 0
+
+    class openrsrc:
+        def __init__(self, *args):
+            pass
+
+        def read(self, *args):
+            return ''
+
+        def write(self, *args):
+            pass
+
+        def close(self):
+            pass
+
+class _Hqxcoderengine:
+    """Write data to the coder in 3-byte chunks"""
+
+    def __init__(self, ofp):
+        self.ofp = ofp
+        self.data = ''
+        self.hqxdata = ''
+        self.linelen = LINELEN-1
+
+    def write(self, data):
+        self.data = self.data + data
+        datalen = len(self.data)
+        todo = (datalen//3)*3
+        data = self.data[:todo]
+        self.data = self.data[todo:]
+        if not data:
+            return
+        self.hqxdata = self.hqxdata + binascii.b2a_hqx(data)
+        self._flush(0)
+
+    def _flush(self, force):
+        first = 0
+        while first <= len(self.hqxdata)-self.linelen:
+            last = first + self.linelen
+            self.ofp.write(self.hqxdata[first:last]+'\n')
+            self.linelen = LINELEN
+            first = last
+        self.hqxdata = self.hqxdata[first:]
+        if force:
+            self.ofp.write(self.hqxdata + ':\n')
+
+    def close(self):
+        if self.data:
+            self.hqxdata = \
+                 self.hqxdata + binascii.b2a_hqx(self.data)
+        self._flush(1)
+        self.ofp.close()
+        del self.ofp
+
+class _Rlecoderengine:
+    """Write data to the RLE-coder in suitably large chunks"""
+
+    def __init__(self, ofp):
+        self.ofp = ofp
+        self.data = ''
+
+    def write(self, data):
+        self.data = self.data + data
+        if len(self.data) < REASONABLY_LARGE:
+            return
+        rledata = binascii.rlecode_hqx(self.data)
+        self.ofp.write(rledata)
+        self.data = ''
+
+    def close(self):
+        if self.data:
+            rledata = binascii.rlecode_hqx(self.data)
+            self.ofp.write(rledata)
+        self.ofp.close()
+        del self.ofp
+
+class BinHex:
+    def __init__(self, (name, finfo, dlen, rlen), ofp):
+        if type(ofp) == type(''):
+            ofname = ofp
+            ofp = open(ofname, 'w')
+            if os.name == 'mac':
+                fss = macfs.FSSpec(ofname)
+                fss.SetCreatorType('BnHq', 'TEXT')
+        ofp.write('(This file must be converted with BinHex 4.0)\n\n:')
+        hqxer = _Hqxcoderengine(ofp)
+        self.ofp = _Rlecoderengine(hqxer)
+        self.crc = 0
+        if finfo is None:
+            finfo = FInfo()
+        self.dlen = dlen
+        self.rlen = rlen
+        self._writeinfo(name, finfo)
+        self.state = _DID_HEADER
+
+    def _writeinfo(self, name, finfo):
+        nl = len(name)
+        if nl > 63:
+            raise Error, 'Filename too long'
+        d = chr(nl) + name + '\0'
+        d2 = finfo.Type + finfo.Creator
+
+        # Force all structs to be packed with big-endian
+        d3 = struct.pack('>h', finfo.Flags)
+        d4 = struct.pack('>ii', self.dlen, self.rlen)
+        info = d + d2 + d3 + d4
+        self._write(info)
+        self._writecrc()
+
+    def _write(self, data):
+        self.crc = binascii.crc_hqx(data, self.crc)
+        self.ofp.write(data)
+
+    def _writecrc(self):
+        # XXXX Should this be here??
+        # self.crc = binascii.crc_hqx('\0\0', self.crc)
+        self.ofp.write(struct.pack('>h', self.crc))
+        self.crc = 0
+
+    def write(self, data):
+        if self.state != _DID_HEADER:
+            raise Error, 'Writing data at the wrong time'
+        self.dlen = self.dlen - len(data)
+        self._write(data)
+
+    def close_data(self):
+        if self.dlen != 0:
+            raise Error, 'Incorrect data size, diff=%r' % (self.rlen,)
+        self._writecrc()
+        self.state = _DID_DATA
+
+    def write_rsrc(self, data):
+        if self.state < _DID_DATA:
+            self.close_data()
+        if self.state != _DID_DATA:
+            raise Error, 'Writing resource data at the wrong time'
+        self.rlen = self.rlen - len(data)
+        self._write(data)
+
+    def close(self):
+        if self.state < _DID_DATA:
+            self.close_data()
+        if self.state != _DID_DATA:
+            raise Error, 'Close at the wrong time'
+        if self.rlen != 0:
+            raise Error, \
+                  "Incorrect resource-datasize, diff=%r" % (self.rlen,)
+        self._writecrc()
+        self.ofp.close()
+        self.state = None
+        del self.ofp
+
+def binhex(inp, out):
+    """(infilename, outfilename) - Create binhex-encoded copy of a file"""
+    finfo = getfileinfo(inp)
+    ofp = BinHex(finfo, out)
+
+    ifp = open(inp, 'rb')
+    # XXXX Do textfile translation on non-mac systems
+    while 1:
+        d = ifp.read(128000)
+        if not d: break
+        ofp.write(d)
+    ofp.close_data()
+    ifp.close()
+
+    ifp = openrsrc(inp, 'rb')
+    while 1:
+        d = ifp.read(128000)
+        if not d: break
+        ofp.write_rsrc(d)
+    ofp.close()
+    ifp.close()
+
+class _Hqxdecoderengine:
+    """Read data via the decoder in 4-byte chunks"""
+
+    def __init__(self, ifp):
+        self.ifp = ifp
+        self.eof = 0
+
+    def read(self, totalwtd):
+        """Read at least wtd bytes (or until EOF)"""
+        decdata = ''
+        wtd = totalwtd
+        #
+        # The loop here is convoluted, since we don't really now how
+        # much to decode: there may be newlines in the incoming data.
+        while wtd > 0:
+            if self.eof: return decdata
+            wtd = ((wtd+2)//3)*4
+            data = self.ifp.read(wtd)
+            #
+            # Next problem: there may not be a complete number of
+            # bytes in what we pass to a2b. Solve by yet another
+            # loop.
+            #
+            while 1:
+                try:
+                    decdatacur, self.eof = \
+                            binascii.a2b_hqx(data)
+                    break
+                except binascii.Incomplete:
+                    pass
+                newdata = self.ifp.read(1)
+                if not newdata:
+                    raise Error, \
+                          'Premature EOF on binhex file'
+                data = data + newdata
+            decdata = decdata + decdatacur
+            wtd = totalwtd - len(decdata)
+            if not decdata and not self.eof:
+                raise Error, 'Premature EOF on binhex file'
+        return decdata
+
+    def close(self):
+        self.ifp.close()
+
+class _Rledecoderengine:
+    """Read data via the RLE-coder"""
+
+    def __init__(self, ifp):
+        self.ifp = ifp
+        self.pre_buffer = ''
+        self.post_buffer = ''
+        self.eof = 0
+
+    def read(self, wtd):
+        if wtd > len(self.post_buffer):
+            self._fill(wtd-len(self.post_buffer))
+        rv = self.post_buffer[:wtd]
+        self.post_buffer = self.post_buffer[wtd:]
+        return rv
+
+    def _fill(self, wtd):
+        self.pre_buffer = self.pre_buffer + self.ifp.read(wtd+4)
+        if self.ifp.eof:
+            self.post_buffer = self.post_buffer + \
+                binascii.rledecode_hqx(self.pre_buffer)
+            self.pre_buffer = ''
+            return
+
+        #
+        # Obfuscated code ahead. We have to take care that we don't
+        # end up with an orphaned RUNCHAR later on. So, we keep a couple
+        # of bytes in the buffer, depending on what the end of
+        # the buffer looks like:
+        # '\220\0\220' - Keep 3 bytes: repeated \220 (escaped as \220\0)
+        # '?\220' - Keep 2 bytes: repeated something-else
+        # '\220\0' - Escaped \220: Keep 2 bytes.
+        # '?\220?' - Complete repeat sequence: decode all
+        # otherwise: keep 1 byte.
+        #
+        mark = len(self.pre_buffer)
+        if self.pre_buffer[-3:] == RUNCHAR + '\0' + RUNCHAR:
+            mark = mark - 3
+        elif self.pre_buffer[-1] == RUNCHAR:
+            mark = mark - 2
+        elif self.pre_buffer[-2:] == RUNCHAR + '\0':
+            mark = mark - 2
+        elif self.pre_buffer[-2] == RUNCHAR:
+            pass # Decode all
+        else:
+            mark = mark - 1
+
+        self.post_buffer = self.post_buffer + \
+            binascii.rledecode_hqx(self.pre_buffer[:mark])
+        self.pre_buffer = self.pre_buffer[mark:]
+
+    def close(self):
+        self.ifp.close()
+
+class HexBin:
+    def __init__(self, ifp):
+        if type(ifp) == type(''):
+            ifp = open(ifp)
+        #
+        # Find initial colon.
+        #
+        while 1:
+            ch = ifp.read(1)
+            if not ch:
+                raise Error, "No binhex data found"
+            # Cater for \r\n terminated lines (which show up as \n\r, hence
+            # all lines start with \r)
+            if ch == '\r':
+                continue
+            if ch == ':':
+                break
+            if ch != '\n':
+                dummy = ifp.readline()
+
+        hqxifp = _Hqxdecoderengine(ifp)
+        self.ifp = _Rledecoderengine(hqxifp)
+        self.crc = 0
+        self._readheader()
+
+    def _read(self, len):
+        data = self.ifp.read(len)
+        self.crc = binascii.crc_hqx(data, self.crc)
+        return data
+
+    def _checkcrc(self):
+        filecrc = struct.unpack('>h', self.ifp.read(2))[0] & 0xffff
+        #self.crc = binascii.crc_hqx('\0\0', self.crc)
+        # XXXX Is this needed??
+        self.crc = self.crc & 0xffff
+        if filecrc != self.crc:
+            raise Error, 'CRC error, computed %x, read %x' \
+                  %(self.crc, filecrc)
+        self.crc = 0
+
+    def _readheader(self):
+        len = self._read(1)
+        fname = self._read(ord(len))
+        rest = self._read(1+4+4+2+4+4)
+        self._checkcrc()
+
+        type = rest[1:5]
+        creator = rest[5:9]
+        flags = struct.unpack('>h', rest[9:11])[0]
+        self.dlen = struct.unpack('>l', rest[11:15])[0]
+        self.rlen = struct.unpack('>l', rest[15:19])[0]
+
+        self.FName = fname
+        self.FInfo = FInfo()
+        self.FInfo.Creator = creator
+        self.FInfo.Type = type
+        self.FInfo.Flags = flags
+
+        self.state = _DID_HEADER
+
+    def read(self, *n):
+        if self.state != _DID_HEADER:
+            raise Error, 'Read data at wrong time'
+        if n:
+            n = n[0]
+            n = min(n, self.dlen)
+        else:
+            n = self.dlen
+        rv = ''
+        while len(rv) < n:
+            rv = rv + self._read(n-len(rv))
+        self.dlen = self.dlen - n
+        return rv
+
+    def close_data(self):
+        if self.state != _DID_HEADER:
+            raise Error, 'close_data at wrong time'
+        if self.dlen:
+            dummy = self._read(self.dlen)
+        self._checkcrc()
+        self.state = _DID_DATA
+
+    def read_rsrc(self, *n):
+        if self.state == _DID_HEADER:
+            self.close_data()
+        if self.state != _DID_DATA:
+            raise Error, 'Read resource data at wrong time'
+        if n:
+            n = n[0]
+            n = min(n, self.rlen)
+        else:
+            n = self.rlen
+        self.rlen = self.rlen - n
+        return self._read(n)
+
+    def close(self):
+        if self.rlen:
+            dummy = self.read_rsrc(self.rlen)
+        self._checkcrc()
+        self.state = _DID_RSRC
+        self.ifp.close()
+
+def hexbin(inp, out):
+    """(infilename, outfilename) - Decode binhexed file"""
+    ifp = HexBin(inp)
+    finfo = ifp.FInfo
+    if not out:
+        out = ifp.FName
+    if os.name == 'mac':
+        ofss = macfs.FSSpec(out)
+        out = ofss.as_pathname()
+
+    ofp = open(out, 'wb')
+    # XXXX Do translation on non-mac systems
+    while 1:
+        d = ifp.read(128000)
+        if not d: break
+        ofp.write(d)
+    ofp.close()
+    ifp.close_data()
+
+    d = ifp.read_rsrc(128000)
+    if d:
+        ofp = openrsrc(out, 'wb')
+        ofp.write(d)
+        while 1:
+            d = ifp.read_rsrc(128000)
+            if not d: break
+            ofp.write(d)
+        ofp.close()
+
+    if os.name == 'mac':
+        nfinfo = ofss.GetFInfo()
+        nfinfo.Creator = finfo.Creator
+        nfinfo.Type = finfo.Type
+        nfinfo.Flags = finfo.Flags
+        ofss.SetFInfo(nfinfo)
+
+    ifp.close()
+
+def _test():
+    if os.name == 'mac':
+        fss, ok = macfs.PromptGetFile('File to convert:')
+        if not ok:
+            sys.exit(0)
+        fname = fss.as_pathname()
+    else:
+        fname = sys.argv[1]
+    binhex(fname, fname+'.hqx')
+    hexbin(fname+'.hqx', fname+'.viahqx')
+    #hexbin(fname, fname+'.unpacked')
+    sys.exit(1)
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/bisect.py b/depot_tools/release/win/python_24/Lib/bisect.py
new file mode 100644
index 0000000..152f6c78
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bisect.py
@@ -0,0 +1,84 @@
+"""Bisection algorithms."""
+
+def insort_right(a, x, lo=0, hi=None):
+    """Insert item x in list a, and keep it sorted assuming a is sorted.
+
+    If x is already in a, insert it to the right of the rightmost x.
+
+    Optional args lo (default 0) and hi (default len(a)) bound the
+    slice of a to be searched.
+    """
+
+    if hi is None:
+        hi = len(a)
+    while lo < hi:
+        mid = (lo+hi)//2
+        if x < a[mid]: hi = mid
+        else: lo = mid+1
+    a.insert(lo, x)
+
+insort = insort_right   # backward compatibility
+
+def bisect_right(a, x, lo=0, hi=None):
+    """Return the index where to insert item x in list a, assuming a is sorted.
+
+    The return value i is such that all e in a[:i] have e <= x, and all e in
+    a[i:] have e > x.  So if x already appears in the list, i points just
+    beyond the rightmost x already there.
+
+    Optional args lo (default 0) and hi (default len(a)) bound the
+    slice of a to be searched.
+    """
+
+    if hi is None:
+        hi = len(a)
+    while lo < hi:
+        mid = (lo+hi)//2
+        if x < a[mid]: hi = mid
+        else: lo = mid+1
+    return lo
+
+bisect = bisect_right   # backward compatibility
+
+def insort_left(a, x, lo=0, hi=None):
+    """Insert item x in list a, and keep it sorted assuming a is sorted.
+
+    If x is already in a, insert it to the left of the leftmost x.
+
+    Optional args lo (default 0) and hi (default len(a)) bound the
+    slice of a to be searched.
+    """
+
+    if hi is None:
+        hi = len(a)
+    while lo < hi:
+        mid = (lo+hi)//2
+        if a[mid] < x: lo = mid+1
+        else: hi = mid
+    a.insert(lo, x)
+
+
+def bisect_left(a, x, lo=0, hi=None):
+    """Return the index where to insert item x in list a, assuming a is sorted.
+
+    The return value i is such that all e in a[:i] have e < x, and all e in
+    a[i:] have e >= x.  So if x already appears in the list, i points just
+    before the leftmost x already there.
+
+    Optional args lo (default 0) and hi (default len(a)) bound the
+    slice of a to be searched.
+    """
+
+    if hi is None:
+        hi = len(a)
+    while lo < hi:
+        mid = (lo+hi)//2
+        if a[mid] < x: lo = mid+1
+        else: hi = mid
+    return lo
+
+# Overwrite above definitions with a fast C implementation
+try:
+    from _bisect import bisect_right, bisect_left, insort_left, insort_right, insort, bisect
+except ImportError:
+    pass
diff --git a/depot_tools/release/win/python_24/Lib/bsddb/__init__.py b/depot_tools/release/win/python_24/Lib/bsddb/__init__.py
new file mode 100644
index 0000000..0fc1892
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bsddb/__init__.py
@@ -0,0 +1,379 @@
+#----------------------------------------------------------------------
+#  Copyright (c) 1999-2001, Digital Creations, Fredericksburg, VA, USA
+#  and Andrew Kuchling. All rights reserved.
+#
+#  Redistribution and use in source and binary forms, with or without
+#  modification, are permitted provided that the following conditions are
+#  met:
+#
+#    o Redistributions of source code must retain the above copyright
+#      notice, this list of conditions, and the disclaimer that follows.
+#
+#    o Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions, and the following disclaimer in
+#      the documentation and/or other materials provided with the
+#      distribution.
+#
+#    o Neither the name of Digital Creations nor the names of its
+#      contributors may be used to endorse or promote products derived
+#      from this software without specific prior written permission.
+#
+#  THIS SOFTWARE IS PROVIDED BY DIGITAL CREATIONS AND CONTRIBUTORS *AS
+#  IS* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+#  TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+#  PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL DIGITAL
+#  CREATIONS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+#  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+#  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+#  OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+#  ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+#  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+#  USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+#  DAMAGE.
+#----------------------------------------------------------------------
+
+
+"""Support for BerkeleyDB 3.2 through 4.2.
+"""
+
+try:
+    if __name__ == 'bsddb3':
+        # import _pybsddb binary as it should be the more recent version from
+        # a standalone pybsddb addon package than the version included with
+        # python as bsddb._bsddb.
+        import _pybsddb
+        _bsddb = _pybsddb
+    else:
+        import _bsddb
+except ImportError:
+    # Remove ourselves from sys.modules
+    import sys
+    del sys.modules[__name__]
+    raise
+
+# bsddb3 calls it db, but provide _db for backwards compatibility
+db = _db = _bsddb
+__version__ = db.__version__
+
+error = db.DBError  # So bsddb.error will mean something...
+
+#----------------------------------------------------------------------
+
+import sys, os
+
+# for backwards compatibility with python versions older than 2.3, the
+# iterator interface is dynamically defined and added using a mixin
+# class.  old python can't tokenize it due to the yield keyword.
+if sys.version >= '2.3':
+    import UserDict
+    from weakref import ref
+    exec """
+class _iter_mixin(UserDict.DictMixin):
+    def _make_iter_cursor(self):
+        cur = self.db.cursor()
+        key = id(cur)
+        self._cursor_refs[key] = ref(cur, self._gen_cref_cleaner(key))
+        return cur
+
+    def _gen_cref_cleaner(self, key):
+        # use generate the function for the weakref callback here
+        # to ensure that we do not hold a strict reference to cur
+        # in the callback.
+        return lambda ref: self._cursor_refs.pop(key, None)
+
+    def __iter__(self):
+        try:
+            cur = self._make_iter_cursor()
+
+            # FIXME-20031102-greg: race condition.  cursor could
+            # be closed by another thread before this call.
+
+            # since we're only returning keys, we call the cursor
+            # methods with flags=0, dlen=0, dofs=0
+            key = cur.first(0,0,0)[0]
+            yield key
+
+            next = cur.next
+            while 1:
+                try:
+                    key = next(0,0,0)[0]
+                    yield key
+                except _bsddb.DBCursorClosedError:
+                    cur = self._make_iter_cursor()
+                    # FIXME-20031101-greg: race condition.  cursor could
+                    # be closed by another thread before this call.
+                    cur.set(key,0,0,0)
+                    next = cur.next
+        except _bsddb.DBNotFoundError:
+            return
+        except _bsddb.DBCursorClosedError:
+            # the database was modified during iteration.  abort.
+            return
+
+    def iteritems(self):
+        try:
+            cur = self._make_iter_cursor()
+
+            # FIXME-20031102-greg: race condition.  cursor could
+            # be closed by another thread before this call.
+
+            kv = cur.first()
+            key = kv[0]
+            yield kv
+
+            next = cur.next
+            while 1:
+                try:
+                    kv = next()
+                    key = kv[0]
+                    yield kv
+                except _bsddb.DBCursorClosedError:
+                    cur = self._make_iter_cursor()
+                    # FIXME-20031101-greg: race condition.  cursor could
+                    # be closed by another thread before this call.
+                    cur.set(key,0,0,0)
+                    next = cur.next
+        except _bsddb.DBNotFoundError:
+            return
+        except _bsddb.DBCursorClosedError:
+            # the database was modified during iteration.  abort.
+            return
+"""
+else:
+    class _iter_mixin: pass
+
+
+class _DBWithCursor(_iter_mixin):
+    """
+    A simple wrapper around DB that makes it look like the bsddbobject in
+    the old module.  It uses a cursor as needed to provide DB traversal.
+    """
+    def __init__(self, db):
+        self.db = db
+        self.db.set_get_returns_none(0)
+
+        # FIXME-20031101-greg: I believe there is still the potential
+        # for deadlocks in a multithreaded environment if someone
+        # attempts to use the any of the cursor interfaces in one
+        # thread while doing a put or delete in another thread.  The
+        # reason is that _checkCursor and _closeCursors are not atomic
+        # operations.  Doing our own locking around self.dbc,
+        # self.saved_dbc_key and self._cursor_refs could prevent this.
+        # TODO: A test case demonstrating the problem needs to be written.
+
+        # self.dbc is a DBCursor object used to implement the
+        # first/next/previous/last/set_location methods.
+        self.dbc = None
+        self.saved_dbc_key = None
+
+        # a collection of all DBCursor objects currently allocated
+        # by the _iter_mixin interface.
+        self._cursor_refs = {}
+
+    def __del__(self):
+        self.close()
+
+    def _checkCursor(self):
+        if self.dbc is None:
+            self.dbc = self.db.cursor()
+            if self.saved_dbc_key is not None:
+                self.dbc.set(self.saved_dbc_key)
+                self.saved_dbc_key = None
+
+    # This method is needed for all non-cursor DB calls to avoid
+    # BerkeleyDB deadlocks (due to being opened with DB_INIT_LOCK
+    # and DB_THREAD to be thread safe) when intermixing database
+    # operations that use the cursor internally with those that don't.
+    def _closeCursors(self, save=1):
+        if self.dbc:
+            c = self.dbc
+            self.dbc = None
+            if save:
+                self.saved_dbc_key = c.current(0,0,0)[0]
+            c.close()
+            del c
+        for cref in self._cursor_refs.values():
+            c = cref()
+            if c is not None:
+                c.close()
+
+    def _checkOpen(self):
+        if self.db is None:
+            raise error, "BSDDB object has already been closed"
+
+    def isOpen(self):
+        return self.db is not None
+
+    def __len__(self):
+        self._checkOpen()
+        return len(self.db)
+
+    def __getitem__(self, key):
+        self._checkOpen()
+        return self.db[key]
+
+    def __setitem__(self, key, value):
+        self._checkOpen()
+        self._closeCursors()
+        self.db[key] = value
+
+    def __delitem__(self, key):
+        self._checkOpen()
+        self._closeCursors()
+        del self.db[key]
+
+    def close(self):
+        self._closeCursors(save=0)
+        if self.dbc is not None:
+            self.dbc.close()
+        v = 0
+        if self.db is not None:
+            v = self.db.close()
+        self.dbc = None
+        self.db = None
+        return v
+
+    def keys(self):
+        self._checkOpen()
+        return self.db.keys()
+
+    def has_key(self, key):
+        self._checkOpen()
+        return self.db.has_key(key)
+
+    def set_location(self, key):
+        self._checkOpen()
+        self._checkCursor()
+        return self.dbc.set_range(key)
+
+    def next(self):
+        self._checkOpen()
+        self._checkCursor()
+        rv = self.dbc.next()
+        return rv
+
+    def previous(self):
+        self._checkOpen()
+        self._checkCursor()
+        rv = self.dbc.prev()
+        return rv
+
+    def first(self):
+        self._checkOpen()
+        self._checkCursor()
+        rv = self.dbc.first()
+        return rv
+
+    def last(self):
+        self._checkOpen()
+        self._checkCursor()
+        rv = self.dbc.last()
+        return rv
+
+    def sync(self):
+        self._checkOpen()
+        return self.db.sync()
+
+
+#----------------------------------------------------------------------
+# Compatibility object factory functions
+
+def hashopen(file, flag='c', mode=0666, pgsize=None, ffactor=None, nelem=None,
+            cachesize=None, lorder=None, hflags=0):
+
+    flags = _checkflag(flag, file)
+    e = _openDBEnv()
+    d = db.DB(e)
+    d.set_flags(hflags)
+    if cachesize is not None: d.set_cachesize(0, cachesize)
+    if pgsize is not None:    d.set_pagesize(pgsize)
+    if lorder is not None:    d.set_lorder(lorder)
+    if ffactor is not None:   d.set_h_ffactor(ffactor)
+    if nelem is not None:     d.set_h_nelem(nelem)
+    d.open(file, db.DB_HASH, flags, mode)
+    return _DBWithCursor(d)
+
+#----------------------------------------------------------------------
+
+def btopen(file, flag='c', mode=0666,
+            btflags=0, cachesize=None, maxkeypage=None, minkeypage=None,
+            pgsize=None, lorder=None):
+
+    flags = _checkflag(flag, file)
+    e = _openDBEnv()
+    d = db.DB(e)
+    if cachesize is not None: d.set_cachesize(0, cachesize)
+    if pgsize is not None: d.set_pagesize(pgsize)
+    if lorder is not None: d.set_lorder(lorder)
+    d.set_flags(btflags)
+    if minkeypage is not None: d.set_bt_minkey(minkeypage)
+    if maxkeypage is not None: d.set_bt_maxkey(maxkeypage)
+    d.open(file, db.DB_BTREE, flags, mode)
+    return _DBWithCursor(d)
+
+#----------------------------------------------------------------------
+
+
+def rnopen(file, flag='c', mode=0666,
+            rnflags=0, cachesize=None, pgsize=None, lorder=None,
+            rlen=None, delim=None, source=None, pad=None):
+
+    flags = _checkflag(flag, file)
+    e = _openDBEnv()
+    d = db.DB(e)
+    if cachesize is not None: d.set_cachesize(0, cachesize)
+    if pgsize is not None: d.set_pagesize(pgsize)
+    if lorder is not None: d.set_lorder(lorder)
+    d.set_flags(rnflags)
+    if delim is not None: d.set_re_delim(delim)
+    if rlen is not None: d.set_re_len(rlen)
+    if source is not None: d.set_re_source(source)
+    if pad is not None: d.set_re_pad(pad)
+    d.open(file, db.DB_RECNO, flags, mode)
+    return _DBWithCursor(d)
+
+#----------------------------------------------------------------------
+
+def _openDBEnv():
+    e = db.DBEnv()
+    e.open('.', db.DB_PRIVATE | db.DB_CREATE | db.DB_THREAD | db.DB_INIT_LOCK | db.DB_INIT_MPOOL)
+    return e
+
+def _checkflag(flag, file):
+    if flag == 'r':
+        flags = db.DB_RDONLY
+    elif flag == 'rw':
+        flags = 0
+    elif flag == 'w':
+        flags =  db.DB_CREATE
+    elif flag == 'c':
+        flags =  db.DB_CREATE
+    elif flag == 'n':
+        flags = db.DB_CREATE
+        #flags = db.DB_CREATE | db.DB_TRUNCATE
+        # we used db.DB_TRUNCATE flag for this before but BerkeleyDB
+        # 4.2.52 changed to disallowed truncate with txn environments.
+        if os.path.isfile(file):
+            os.unlink(file)
+    else:
+        raise error, "flags should be one of 'r', 'w', 'c' or 'n'"
+    return flags | db.DB_THREAD
+
+#----------------------------------------------------------------------
+
+
+# This is a silly little hack that allows apps to continue to use the
+# DB_THREAD flag even on systems without threads without freaking out
+# BerkeleyDB.
+#
+# This assumes that if Python was built with thread support then
+# BerkeleyDB was too.
+
+try:
+    import thread
+    del thread
+except ImportError:
+    db.DB_THREAD = 0
+
+
+#----------------------------------------------------------------------
diff --git a/depot_tools/release/win/python_24/Lib/bsddb/db.py b/depot_tools/release/win/python_24/Lib/bsddb/db.py
new file mode 100644
index 0000000..b2ee14e5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bsddb/db.py
@@ -0,0 +1,51 @@
+#----------------------------------------------------------------------
+#  Copyright (c) 1999-2001, Digital Creations, Fredericksburg, VA, USA
+#  and Andrew Kuchling. All rights reserved.
+#
+#  Redistribution and use in source and binary forms, with or without
+#  modification, are permitted provided that the following conditions are
+#  met:
+#
+#    o Redistributions of source code must retain the above copyright
+#      notice, this list of conditions, and the disclaimer that follows.
+#
+#    o Redistributions in binary form must reproduce the above copyright
+#      notice, this list of conditions, and the following disclaimer in
+#      the documentation and/or other materials provided with the
+#      distribution.
+#
+#    o Neither the name of Digital Creations nor the names of its
+#      contributors may be used to endorse or promote products derived
+#      from this software without specific prior written permission.
+#
+#  THIS SOFTWARE IS PROVIDED BY DIGITAL CREATIONS AND CONTRIBUTORS *AS
+#  IS* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+#  TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+#  PARTICULAR PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL DIGITAL
+#  CREATIONS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+#  INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+#  BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
+#  OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+#  ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
+#  TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+#  USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
+#  DAMAGE.
+#----------------------------------------------------------------------
+
+
+# This module is just a placeholder for possible future expansion, in
+# case we ever want to augment the stuff in _db in any way.  For now
+# it just simply imports everything from _db.
+
+if __name__[:len('bsddb3.')] == 'bsddb3.':
+    # import _pybsddb binary as it should be the more recent version from
+    # a standalone pybsddb addon package than the version included with
+    # python as bsddb._bsddb.
+    from _pybsddb import *
+    from _pybsddb import __version__
+else:
+    from _bsddb import *
+    from _bsddb import __version__
+
+if version() < (3, 2, 0):
+    raise ImportError, "correct BerkeleyDB symbols not found.  Perhaps python was statically linked with an older version?"
diff --git a/depot_tools/release/win/python_24/Lib/bsddb/dbobj.py b/depot_tools/release/win/python_24/Lib/bsddb/dbobj.py
new file mode 100644
index 0000000..3bafafa
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bsddb/dbobj.py
@@ -0,0 +1,206 @@
+#-------------------------------------------------------------------------
+#  This file contains real Python object wrappers for DB and DBEnv
+#  C "objects" that can be usefully subclassed.  The previous SWIG
+#  based interface allowed this thanks to SWIG's shadow classes.
+#   --  Gregory P. Smith
+#-------------------------------------------------------------------------
+#
+# (C) Copyright 2001  Autonomous Zone Industries
+#
+# License:  This is free software.  You may use this software for any
+#           purpose including modification/redistribution, so long as
+#           this header remains intact and that you do not claim any
+#           rights of ownership or authorship of this software.  This
+#           software has been tested, but no warranty is expressed or
+#           implied.
+#
+
+#
+# TODO it would be *really nice* to have an automatic shadow class populator
+# so that new methods don't need to be added  here manually after being
+# added to _bsddb.c.
+#
+
+import db
+
+try:
+    from UserDict import DictMixin
+except ImportError:
+    # DictMixin is new in Python 2.3
+    class DictMixin: pass
+
+class DBEnv:
+    def __init__(self, *args, **kwargs):
+        self._cobj = apply(db.DBEnv, args, kwargs)
+
+    def close(self, *args, **kwargs):
+        return apply(self._cobj.close, args, kwargs)
+    def open(self, *args, **kwargs):
+        return apply(self._cobj.open, args, kwargs)
+    def remove(self, *args, **kwargs):
+        return apply(self._cobj.remove, args, kwargs)
+    def set_shm_key(self, *args, **kwargs):
+        return apply(self._cobj.set_shm_key, args, kwargs)
+    def set_cachesize(self, *args, **kwargs):
+        return apply(self._cobj.set_cachesize, args, kwargs)
+    def set_data_dir(self, *args, **kwargs):
+        return apply(self._cobj.set_data_dir, args, kwargs)
+    def set_flags(self, *args, **kwargs):
+        return apply(self._cobj.set_flags, args, kwargs)
+    def set_lg_bsize(self, *args, **kwargs):
+        return apply(self._cobj.set_lg_bsize, args, kwargs)
+    def set_lg_dir(self, *args, **kwargs):
+        return apply(self._cobj.set_lg_dir, args, kwargs)
+    def set_lg_max(self, *args, **kwargs):
+        return apply(self._cobj.set_lg_max, args, kwargs)
+    def set_lk_detect(self, *args, **kwargs):
+        return apply(self._cobj.set_lk_detect, args, kwargs)
+    def set_lk_max(self, *args, **kwargs):
+        return apply(self._cobj.set_lk_max, args, kwargs)
+    def set_lk_max_locks(self, *args, **kwargs):
+        return apply(self._cobj.set_lk_max_locks, args, kwargs)
+    def set_lk_max_lockers(self, *args, **kwargs):
+        return apply(self._cobj.set_lk_max_lockers, args, kwargs)
+    def set_lk_max_objects(self, *args, **kwargs):
+        return apply(self._cobj.set_lk_max_objects, args, kwargs)
+    def set_mp_mmapsize(self, *args, **kwargs):
+        return apply(self._cobj.set_mp_mmapsize, args, kwargs)
+    def set_timeout(self, *args, **kwargs):
+        return apply(self._cobj.set_timeout, args, kwargs)
+    def set_tmp_dir(self, *args, **kwargs):
+        return apply(self._cobj.set_tmp_dir, args, kwargs)
+    def txn_begin(self, *args, **kwargs):
+        return apply(self._cobj.txn_begin, args, kwargs)
+    def txn_checkpoint(self, *args, **kwargs):
+        return apply(self._cobj.txn_checkpoint, args, kwargs)
+    def txn_stat(self, *args, **kwargs):
+        return apply(self._cobj.txn_stat, args, kwargs)
+    def set_tx_max(self, *args, **kwargs):
+        return apply(self._cobj.set_tx_max, args, kwargs)
+    def lock_detect(self, *args, **kwargs):
+        return apply(self._cobj.lock_detect, args, kwargs)
+    def lock_get(self, *args, **kwargs):
+        return apply(self._cobj.lock_get, args, kwargs)
+    def lock_id(self, *args, **kwargs):
+        return apply(self._cobj.lock_id, args, kwargs)
+    def lock_put(self, *args, **kwargs):
+        return apply(self._cobj.lock_put, args, kwargs)
+    def lock_stat(self, *args, **kwargs):
+        return apply(self._cobj.lock_stat, args, kwargs)
+    def log_archive(self, *args, **kwargs):
+        return apply(self._cobj.log_archive, args, kwargs)
+    def set_get_returns_none(self, *args, **kwargs):
+        return apply(self._cobj.set_get_returns_none, args, kwargs)
+
+    if db.version() >= (4,1):
+        def dbremove(self, *args, **kwargs):
+            return apply(self._cobj.dbremove, args, kwargs)
+        def dbrename(self, *args, **kwargs):
+            return apply(self._cobj.dbrename, args, kwargs)
+        def set_encrypt(self, *args, **kwargs):
+            return apply(self._cobj.set_encrypt, args, kwargs)
+
+
+class DB(DictMixin):
+    def __init__(self, dbenv, *args, **kwargs):
+        # give it the proper DBEnv C object that its expecting
+        self._cobj = apply(db.DB, (dbenv._cobj,) + args, kwargs)
+
+    # TODO are there other dict methods that need to be overridden?
+    def __len__(self):
+        return len(self._cobj)
+    def __getitem__(self, arg):
+        return self._cobj[arg]
+    def __setitem__(self, key, value):
+        self._cobj[key] = value
+    def __delitem__(self, arg):
+        del self._cobj[arg]
+
+    def append(self, *args, **kwargs):
+        return apply(self._cobj.append, args, kwargs)
+    def associate(self, *args, **kwargs):
+        return apply(self._cobj.associate, args, kwargs)
+    def close(self, *args, **kwargs):
+        return apply(self._cobj.close, args, kwargs)
+    def consume(self, *args, **kwargs):
+        return apply(self._cobj.consume, args, kwargs)
+    def consume_wait(self, *args, **kwargs):
+        return apply(self._cobj.consume_wait, args, kwargs)
+    def cursor(self, *args, **kwargs):
+        return apply(self._cobj.cursor, args, kwargs)
+    def delete(self, *args, **kwargs):
+        return apply(self._cobj.delete, args, kwargs)
+    def fd(self, *args, **kwargs):
+        return apply(self._cobj.fd, args, kwargs)
+    def get(self, *args, **kwargs):
+        return apply(self._cobj.get, args, kwargs)
+    def pget(self, *args, **kwargs):
+        return apply(self._cobj.pget, args, kwargs)
+    def get_both(self, *args, **kwargs):
+        return apply(self._cobj.get_both, args, kwargs)
+    def get_byteswapped(self, *args, **kwargs):
+        return apply(self._cobj.get_byteswapped, args, kwargs)
+    def get_size(self, *args, **kwargs):
+        return apply(self._cobj.get_size, args, kwargs)
+    def get_type(self, *args, **kwargs):
+        return apply(self._cobj.get_type, args, kwargs)
+    def join(self, *args, **kwargs):
+        return apply(self._cobj.join, args, kwargs)
+    def key_range(self, *args, **kwargs):
+        return apply(self._cobj.key_range, args, kwargs)
+    def has_key(self, *args, **kwargs):
+        return apply(self._cobj.has_key, args, kwargs)
+    def items(self, *args, **kwargs):
+        return apply(self._cobj.items, args, kwargs)
+    def keys(self, *args, **kwargs):
+        return apply(self._cobj.keys, args, kwargs)
+    def open(self, *args, **kwargs):
+        return apply(self._cobj.open, args, kwargs)
+    def put(self, *args, **kwargs):
+        return apply(self._cobj.put, args, kwargs)
+    def remove(self, *args, **kwargs):
+        return apply(self._cobj.remove, args, kwargs)
+    def rename(self, *args, **kwargs):
+        return apply(self._cobj.rename, args, kwargs)
+    def set_bt_minkey(self, *args, **kwargs):
+        return apply(self._cobj.set_bt_minkey, args, kwargs)
+    def set_cachesize(self, *args, **kwargs):
+        return apply(self._cobj.set_cachesize, args, kwargs)
+    def set_flags(self, *args, **kwargs):
+        return apply(self._cobj.set_flags, args, kwargs)
+    def set_h_ffactor(self, *args, **kwargs):
+        return apply(self._cobj.set_h_ffactor, args, kwargs)
+    def set_h_nelem(self, *args, **kwargs):
+        return apply(self._cobj.set_h_nelem, args, kwargs)
+    def set_lorder(self, *args, **kwargs):
+        return apply(self._cobj.set_lorder, args, kwargs)
+    def set_pagesize(self, *args, **kwargs):
+        return apply(self._cobj.set_pagesize, args, kwargs)
+    def set_re_delim(self, *args, **kwargs):
+        return apply(self._cobj.set_re_delim, args, kwargs)
+    def set_re_len(self, *args, **kwargs):
+        return apply(self._cobj.set_re_len, args, kwargs)
+    def set_re_pad(self, *args, **kwargs):
+        return apply(self._cobj.set_re_pad, args, kwargs)
+    def set_re_source(self, *args, **kwargs):
+        return apply(self._cobj.set_re_source, args, kwargs)
+    def set_q_extentsize(self, *args, **kwargs):
+        return apply(self._cobj.set_q_extentsize, args, kwargs)
+    def stat(self, *args, **kwargs):
+        return apply(self._cobj.stat, args, kwargs)
+    def sync(self, *args, **kwargs):
+        return apply(self._cobj.sync, args, kwargs)
+    def type(self, *args, **kwargs):
+        return apply(self._cobj.type, args, kwargs)
+    def upgrade(self, *args, **kwargs):
+        return apply(self._cobj.upgrade, args, kwargs)
+    def values(self, *args, **kwargs):
+        return apply(self._cobj.values, args, kwargs)
+    def verify(self, *args, **kwargs):
+        return apply(self._cobj.verify, args, kwargs)
+    def set_get_returns_none(self, *args, **kwargs):
+        return apply(self._cobj.set_get_returns_none, args, kwargs)
+
+    if db.version() >= (4,1):
+        def set_encrypt(self, *args, **kwargs):
+            return apply(self._cobj.set_encrypt, args, kwargs)
diff --git a/depot_tools/release/win/python_24/Lib/bsddb/dbrecio.py b/depot_tools/release/win/python_24/Lib/bsddb/dbrecio.py
new file mode 100644
index 0000000..22e382a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bsddb/dbrecio.py
@@ -0,0 +1,190 @@
+
+"""
+File-like objects that read from or write to a bsddb record.
+
+This implements (nearly) all stdio methods.
+
+f = DBRecIO(db, key, txn=None)
+f.close()           # explicitly release resources held
+flag = f.isatty()   # always false
+pos = f.tell()      # get current position
+f.seek(pos)         # set current position
+f.seek(pos, mode)   # mode 0: absolute; 1: relative; 2: relative to EOF
+buf = f.read()      # read until EOF
+buf = f.read(n)     # read up to n bytes
+f.truncate([size])  # truncate file at to at most size (default: current pos)
+f.write(buf)        # write at current position
+f.writelines(list)  # for line in list: f.write(line)
+
+Notes:
+- fileno() is left unimplemented so that code which uses it triggers
+  an exception early.
+- There's a simple test set (see end of this file) - not yet updated
+  for DBRecIO.
+- readline() is not implemented yet.
+
+
+From:
+    Itamar Shtull-Trauring <itamar@maxnm.com>
+"""
+
+import errno
+import string
+
+class DBRecIO:
+    def __init__(self, db, key, txn=None):
+        self.db = db
+        self.key = key
+        self.txn = txn
+        self.len = None
+        self.pos = 0
+        self.closed = 0
+        self.softspace = 0
+
+    def close(self):
+        if not self.closed:
+            self.closed = 1
+            del self.db, self.txn
+
+    def isatty(self):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        return 0
+
+    def seek(self, pos, mode = 0):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        if mode == 1:
+            pos = pos + self.pos
+        elif mode == 2:
+            pos = pos + self.len
+        self.pos = max(0, pos)
+
+    def tell(self):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        return self.pos
+
+    def read(self, n = -1):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        if n < 0:
+            newpos = self.len
+        else:
+            newpos = min(self.pos+n, self.len)
+
+        dlen = newpos - self.pos
+
+        r = self.db.get(key, txn=self.txn, dlen=dlen, doff=self.pos)
+        self.pos = newpos
+        return r
+
+    __fixme = """
+    def readline(self, length=None):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        if self.buflist:
+            self.buf = self.buf + string.joinfields(self.buflist, '')
+            self.buflist = []
+        i = string.find(self.buf, '\n', self.pos)
+        if i < 0:
+            newpos = self.len
+        else:
+            newpos = i+1
+        if length is not None:
+            if self.pos + length < newpos:
+                newpos = self.pos + length
+        r = self.buf[self.pos:newpos]
+        self.pos = newpos
+        return r
+
+    def readlines(self, sizehint = 0):
+        total = 0
+        lines = []
+        line = self.readline()
+        while line:
+            lines.append(line)
+            total += len(line)
+            if 0 < sizehint <= total:
+                break
+            line = self.readline()
+        return lines
+    """
+
+    def truncate(self, size=None):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        if size is None:
+            size = self.pos
+        elif size < 0:
+            raise IOError(errno.EINVAL,
+                                      "Negative size not allowed")
+        elif size < self.pos:
+            self.pos = size
+        self.db.put(key, "", txn=self.txn, dlen=self.len-size, doff=size)
+
+    def write(self, s):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        if not s: return
+        if self.pos > self.len:
+            self.buflist.append('\0'*(self.pos - self.len))
+            self.len = self.pos
+        newpos = self.pos + len(s)
+        self.db.put(key, s, txn=self.txn, dlen=len(s), doff=self.pos)
+        self.pos = newpos
+
+    def writelines(self, list):
+        self.write(string.joinfields(list, ''))
+
+    def flush(self):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+
+
+"""
+# A little test suite
+
+def _test():
+    import sys
+    if sys.argv[1:]:
+        file = sys.argv[1]
+    else:
+        file = '/etc/passwd'
+    lines = open(file, 'r').readlines()
+    text = open(file, 'r').read()
+    f = StringIO()
+    for line in lines[:-2]:
+        f.write(line)
+    f.writelines(lines[-2:])
+    if f.getvalue() != text:
+        raise RuntimeError, 'write failed'
+    length = f.tell()
+    print 'File length =', length
+    f.seek(len(lines[0]))
+    f.write(lines[1])
+    f.seek(0)
+    print 'First line =', repr(f.readline())
+    here = f.tell()
+    line = f.readline()
+    print 'Second line =', repr(line)
+    f.seek(-len(line), 1)
+    line2 = f.read(len(line))
+    if line != line2:
+        raise RuntimeError, 'bad result after seek back'
+    f.seek(len(line2), 1)
+    list = f.readlines()
+    line = list[-1]
+    f.seek(f.tell() - len(line))
+    line2 = f.read()
+    if line != line2:
+        raise RuntimeError, 'bad result after seek back from EOF'
+    print 'Read', len(list), 'more lines'
+    print 'File length =', f.tell()
+    if f.tell() != length:
+        raise RuntimeError, 'bad length'
+    f.close()
+
+if __name__ == '__main__':
+    _test()
+"""
diff --git a/depot_tools/release/win/python_24/Lib/bsddb/dbshelve.py b/depot_tools/release/win/python_24/Lib/bsddb/dbshelve.py
new file mode 100644
index 0000000..d341ab7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bsddb/dbshelve.py
@@ -0,0 +1,299 @@
+#!/bin/env python
+#------------------------------------------------------------------------
+#           Copyright (c) 1997-2001 by Total Control Software
+#                         All Rights Reserved
+#------------------------------------------------------------------------
+#
+# Module Name:  dbShelve.py
+#
+# Description:  A reimplementation of the standard shelve.py that
+#               forces the use of cPickle, and DB.
+#
+# Creation Date:    11/3/97 3:39:04PM
+#
+# License:      This is free software.  You may use this software for any
+#               purpose including modification/redistribution, so long as
+#               this header remains intact and that you do not claim any
+#               rights of ownership or authorship of this software.  This
+#               software has been tested, but no warranty is expressed or
+#               implied.
+#
+# 13-Dec-2000:  Updated to be used with the new bsddb3 package.
+#               Added DBShelfCursor class.
+#
+#------------------------------------------------------------------------
+
+"""Manage shelves of pickled objects using bsddb database files for the
+storage.
+"""
+
+#------------------------------------------------------------------------
+
+import cPickle
+try:
+    from UserDict import DictMixin
+except ImportError:
+    # DictMixin is new in Python 2.3
+    class DictMixin: pass
+import db
+
+#------------------------------------------------------------------------
+
+
+def open(filename, flags=db.DB_CREATE, mode=0660, filetype=db.DB_HASH,
+         dbenv=None, dbname=None):
+    """
+    A simple factory function for compatibility with the standard
+    shleve.py module.  It can be used like this, where key is a string
+    and data is a pickleable object:
+
+        from bsddb import dbshelve
+        db = dbshelve.open(filename)
+
+        db[key] = data
+
+        db.close()
+    """
+    if type(flags) == type(''):
+        sflag = flags
+        if sflag == 'r':
+            flags = db.DB_RDONLY
+        elif sflag == 'rw':
+            flags = 0
+        elif sflag == 'w':
+            flags =  db.DB_CREATE
+        elif sflag == 'c':
+            flags =  db.DB_CREATE
+        elif sflag == 'n':
+            flags = db.DB_TRUNCATE | db.DB_CREATE
+        else:
+            raise db.DBError, "flags should be one of 'r', 'w', 'c' or 'n' or use the bsddb.db.DB_* flags"
+
+    d = DBShelf(dbenv)
+    d.open(filename, dbname, filetype, flags, mode)
+    return d
+
+#---------------------------------------------------------------------------
+
+class DBShelf(DictMixin):
+    """A shelf to hold pickled objects, built upon a bsddb DB object.  It
+    automatically pickles/unpickles data objects going to/from the DB.
+    """
+    def __init__(self, dbenv=None):
+        self.db = db.DB(dbenv)
+        self.binary = 1
+
+
+    def __del__(self):
+        self.close()
+
+
+    def __getattr__(self, name):
+        """Many methods we can just pass through to the DB object.
+        (See below)
+        """
+        return getattr(self.db, name)
+
+
+    #-----------------------------------
+    # Dictionary access methods
+
+    def __len__(self):
+        return len(self.db)
+
+
+    def __getitem__(self, key):
+        data = self.db[key]
+        return cPickle.loads(data)
+
+
+    def __setitem__(self, key, value):
+        data = cPickle.dumps(value, self.binary)
+        self.db[key] = data
+
+
+    def __delitem__(self, key):
+        del self.db[key]
+
+
+    def keys(self, txn=None):
+        if txn != None:
+            return self.db.keys(txn)
+        else:
+            return self.db.keys()
+
+
+    def items(self, txn=None):
+        if txn != None:
+            items = self.db.items(txn)
+        else:
+            items = self.db.items()
+        newitems = []
+
+        for k, v in items:
+            newitems.append( (k, cPickle.loads(v)) )
+        return newitems
+
+    def values(self, txn=None):
+        if txn != None:
+            values = self.db.values(txn)
+        else:
+            values = self.db.values()
+
+        return map(cPickle.loads, values)
+
+    #-----------------------------------
+    # Other methods
+
+    def __append(self, value, txn=None):
+        data = cPickle.dumps(value, self.binary)
+        return self.db.append(data, txn)
+
+    def append(self, value, txn=None):
+        if self.get_type() != db.DB_RECNO:
+            self.append = self.__append
+            return self.append(value, txn=txn)
+        raise db.DBError, "append() only supported when dbshelve opened with filetype=dbshelve.db.DB_RECNO"
+
+
+    def associate(self, secondaryDB, callback, flags=0):
+        def _shelf_callback(priKey, priData, realCallback=callback):
+            data = cPickle.loads(priData)
+            return realCallback(priKey, data)
+        return self.db.associate(secondaryDB, _shelf_callback, flags)
+
+
+    #def get(self, key, default=None, txn=None, flags=0):
+    def get(self, *args, **kw):
+        # We do it with *args and **kw so if the default value wasn't
+        # given nothing is passed to the extension module.  That way
+        # an exception can be raised if set_get_returns_none is turned
+        # off.
+        data = apply(self.db.get, args, kw)
+        try:
+            return cPickle.loads(data)
+        except (TypeError, cPickle.UnpicklingError):
+            return data  # we may be getting the default value, or None,
+                         # so it doesn't need unpickled.
+
+    def get_both(self, key, value, txn=None, flags=0):
+        data = cPickle.dumps(value, self.binary)
+        data = self.db.get(key, data, txn, flags)
+        return cPickle.loads(data)
+
+
+    def cursor(self, txn=None, flags=0):
+        c = DBShelfCursor(self.db.cursor(txn, flags))
+        c.binary = self.binary
+        return c
+
+
+    def put(self, key, value, txn=None, flags=0):
+        data = cPickle.dumps(value, self.binary)
+        return self.db.put(key, data, txn, flags)
+
+
+    def join(self, cursorList, flags=0):
+        raise NotImplementedError
+
+
+    #----------------------------------------------
+    # Methods allowed to pass-through to self.db
+    #
+    #    close,  delete, fd, get_byteswapped, get_type, has_key,
+    #    key_range, open, remove, rename, stat, sync,
+    #    upgrade, verify, and all set_* methods.
+
+
+#---------------------------------------------------------------------------
+
+class DBShelfCursor:
+    """
+    """
+    def __init__(self, cursor):
+        self.dbc = cursor
+
+    def __del__(self):
+        self.close()
+
+
+    def __getattr__(self, name):
+        """Some methods we can just pass through to the cursor object.  (See below)"""
+        return getattr(self.dbc, name)
+
+
+    #----------------------------------------------
+
+    def dup(self, flags=0):
+        return DBShelfCursor(self.dbc.dup(flags))
+
+
+    def put(self, key, value, flags=0):
+        data = cPickle.dumps(value, self.binary)
+        return self.dbc.put(key, data, flags)
+
+
+    def get(self, *args):
+        count = len(args)  # a method overloading hack
+        method = getattr(self, 'get_%d' % count)
+        apply(method, args)
+
+    def get_1(self, flags):
+        rec = self.dbc.get(flags)
+        return self._extract(rec)
+
+    def get_2(self, key, flags):
+        rec = self.dbc.get(key, flags)
+        return self._extract(rec)
+
+    def get_3(self, key, value, flags):
+        data = cPickle.dumps(value, self.binary)
+        rec = self.dbc.get(key, flags)
+        return self._extract(rec)
+
+
+    def current(self, flags=0): return self.get_1(flags|db.DB_CURRENT)
+    def first(self, flags=0): return self.get_1(flags|db.DB_FIRST)
+    def last(self, flags=0): return self.get_1(flags|db.DB_LAST)
+    def next(self, flags=0): return self.get_1(flags|db.DB_NEXT)
+    def prev(self, flags=0): return self.get_1(flags|db.DB_PREV)
+    def consume(self, flags=0): return self.get_1(flags|db.DB_CONSUME)
+    def next_dup(self, flags=0): return self.get_1(flags|db.DB_NEXT_DUP)
+    def next_nodup(self, flags=0): return self.get_1(flags|db.DB_NEXT_NODUP)
+    def prev_nodup(self, flags=0): return self.get_1(flags|db.DB_PREV_NODUP)
+
+
+    def get_both(self, key, value, flags=0):
+        data = cPickle.dumps(value, self.binary)
+        rec = self.dbc.get_both(key, flags)
+        return self._extract(rec)
+
+
+    def set(self, key, flags=0):
+        rec = self.dbc.set(key, flags)
+        return self._extract(rec)
+
+    def set_range(self, key, flags=0):
+        rec = self.dbc.set_range(key, flags)
+        return self._extract(rec)
+
+    def set_recno(self, recno, flags=0):
+        rec = self.dbc.set_recno(recno, flags)
+        return self._extract(rec)
+
+    set_both = get_both
+
+    def _extract(self, rec):
+        if rec is None:
+            return None
+        else:
+            key, data = rec
+            return key, cPickle.loads(data)
+
+    #----------------------------------------------
+    # Methods allowed to pass-through to self.dbc
+    #
+    # close, count, delete, get_recno, join_item
+
+
+#---------------------------------------------------------------------------
diff --git a/depot_tools/release/win/python_24/Lib/bsddb/dbtables.py b/depot_tools/release/win/python_24/Lib/bsddb/dbtables.py
new file mode 100644
index 0000000..f735bda
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bsddb/dbtables.py
@@ -0,0 +1,693 @@
+#-----------------------------------------------------------------------
+#
+# Copyright (C) 2000, 2001 by Autonomous Zone Industries
+# Copyright (C) 2002 Gregory P. Smith
+#
+# License:      This is free software.  You may use this software for any
+#               purpose including modification/redistribution, so long as
+#               this header remains intact and that you do not claim any
+#               rights of ownership or authorship of this software.  This
+#               software has been tested, but no warranty is expressed or
+#               implied.
+#
+#   --  Gregory P. Smith <greg@electricrain.com>
+
+# This provides a simple database table interface built on top of
+# the Python BerkeleyDB 3 interface.
+#
+_cvsid = '$Id: dbtables.py,v 1.11 2004/08/08 00:54:20 tim_one Exp $'
+
+import re
+import sys
+import copy
+import xdrlib
+import random
+from types import ListType, StringType
+import cPickle as pickle
+
+try:
+    # For Pythons w/distutils pybsddb
+    from bsddb3.db import *
+except ImportError:
+    # For Python 2.3
+    from bsddb.db import *
+
+
+class TableDBError(StandardError):
+    pass
+class TableAlreadyExists(TableDBError):
+    pass
+
+
+class Cond:
+    """This condition matches everything"""
+    def __call__(self, s):
+        return 1
+
+class ExactCond(Cond):
+    """Acts as an exact match condition function"""
+    def __init__(self, strtomatch):
+        self.strtomatch = strtomatch
+    def __call__(self, s):
+        return s == self.strtomatch
+
+class PrefixCond(Cond):
+    """Acts as a condition function for matching a string prefix"""
+    def __init__(self, prefix):
+        self.prefix = prefix
+    def __call__(self, s):
+        return s[:len(self.prefix)] == self.prefix
+
+class PostfixCond(Cond):
+    """Acts as a condition function for matching a string postfix"""
+    def __init__(self, postfix):
+        self.postfix = postfix
+    def __call__(self, s):
+        return s[-len(self.postfix):] == self.postfix
+
+class LikeCond(Cond):
+    """
+    Acts as a function that will match using an SQL 'LIKE' style
+    string.  Case insensitive and % signs are wild cards.
+    This isn't perfect but it should work for the simple common cases.
+    """
+    def __init__(self, likestr, re_flags=re.IGNORECASE):
+        # escape python re characters
+        chars_to_escape = '.*+()[]?'
+        for char in chars_to_escape :
+            likestr = likestr.replace(char, '\\'+char)
+        # convert %s to wildcards
+        self.likestr = likestr.replace('%', '.*')
+        self.re = re.compile('^'+self.likestr+'$', re_flags)
+    def __call__(self, s):
+        return self.re.match(s)
+
+#
+# keys used to store database metadata
+#
+_table_names_key = '__TABLE_NAMES__'  # list of the tables in this db
+_columns = '._COLUMNS__'  # table_name+this key contains a list of columns
+
+def _columns_key(table):
+    return table + _columns
+
+#
+# these keys are found within table sub databases
+#
+_data =  '._DATA_.'  # this+column+this+rowid key contains table data
+_rowid = '._ROWID_.' # this+rowid+this key contains a unique entry for each
+                     # row in the table.  (no data is stored)
+_rowid_str_len = 8   # length in bytes of the unique rowid strings
+
+def _data_key(table, col, rowid):
+    return table + _data + col + _data + rowid
+
+def _search_col_data_key(table, col):
+    return table + _data + col + _data
+
+def _search_all_data_key(table):
+    return table + _data
+
+def _rowid_key(table, rowid):
+    return table + _rowid + rowid + _rowid
+
+def _search_rowid_key(table):
+    return table + _rowid
+
+def contains_metastrings(s) :
+    """Verify that the given string does not contain any
+    metadata strings that might interfere with dbtables database operation.
+    """
+    if (s.find(_table_names_key) >= 0 or
+        s.find(_columns) >= 0 or
+        s.find(_data) >= 0 or
+        s.find(_rowid) >= 0):
+        # Then
+        return 1
+    else:
+        return 0
+
+
+class bsdTableDB :
+    def __init__(self, filename, dbhome, create=0, truncate=0, mode=0600,
+                 recover=0, dbflags=0):
+        """bsdTableDB.open(filename, dbhome, create=0, truncate=0, mode=0600)
+        Open database name in the dbhome BerkeleyDB directory.
+        Use keyword arguments when calling this constructor.
+        """
+        self.db = None
+        myflags = DB_THREAD
+        if create:
+            myflags |= DB_CREATE
+        flagsforenv = (DB_INIT_MPOOL | DB_INIT_LOCK | DB_INIT_LOG |
+                       DB_INIT_TXN | dbflags)
+        # DB_AUTO_COMMIT isn't a valid flag for env.open()
+        try:
+            dbflags |= DB_AUTO_COMMIT
+        except AttributeError:
+            pass
+        if recover:
+            flagsforenv = flagsforenv | DB_RECOVER
+        self.env = DBEnv()
+        # enable auto deadlock avoidance
+        self.env.set_lk_detect(DB_LOCK_DEFAULT)
+        self.env.open(dbhome, myflags | flagsforenv)
+        if truncate:
+            myflags |= DB_TRUNCATE
+        self.db = DB(self.env)
+        # this code relies on DBCursor.set* methods to raise exceptions
+        # rather than returning None
+        self.db.set_get_returns_none(1)
+        # allow duplicate entries [warning: be careful w/ metadata]
+        self.db.set_flags(DB_DUP)
+        self.db.open(filename, DB_BTREE, dbflags | myflags, mode)
+        self.dbfilename = filename
+        # Initialize the table names list if this is a new database
+        txn = self.env.txn_begin()
+        try:
+            if not self.db.has_key(_table_names_key, txn):
+                self.db.put(_table_names_key, pickle.dumps([], 1), txn=txn)
+        # Yes, bare except
+        except:
+            txn.abort()
+            raise
+        else:
+            txn.commit()
+        # TODO verify more of the database's metadata?
+        self.__tablecolumns = {}
+
+    def __del__(self):
+        self.close()
+
+    def close(self):
+        if self.db is not None:
+            self.db.close()
+            self.db = None
+        if self.env is not None:
+            self.env.close()
+            self.env = None
+
+    def checkpoint(self, mins=0):
+        try:
+            self.env.txn_checkpoint(mins)
+        except DBIncompleteError:
+            pass
+
+    def sync(self):
+        try:
+            self.db.sync()
+        except DBIncompleteError:
+            pass
+
+    def _db_print(self) :
+        """Print the database to stdout for debugging"""
+        print "******** Printing raw database for debugging ********"
+        cur = self.db.cursor()
+        try:
+            key, data = cur.first()
+            while 1:
+                print repr({key: data})
+                next = cur.next()
+                if next:
+                    key, data = next
+                else:
+                    cur.close()
+                    return
+        except DBNotFoundError:
+            cur.close()
+
+
+    def CreateTable(self, table, columns):
+        """CreateTable(table, columns) - Create a new table in the database
+        raises TableDBError if it already exists or for other DB errors.
+        """
+        assert isinstance(columns, ListType)
+        txn = None
+        try:
+            # checking sanity of the table and column names here on
+            # table creation will prevent problems elsewhere.
+            if contains_metastrings(table):
+                raise ValueError(
+                    "bad table name: contains reserved metastrings")
+            for column in columns :
+                if contains_metastrings(column):
+                    raise ValueError(
+                        "bad column name: contains reserved metastrings")
+
+            columnlist_key = _columns_key(table)
+            if self.db.has_key(columnlist_key):
+                raise TableAlreadyExists, "table already exists"
+
+            txn = self.env.txn_begin()
+            # store the table's column info
+            self.db.put(columnlist_key, pickle.dumps(columns, 1), txn=txn)
+
+            # add the table name to the tablelist
+            tablelist = pickle.loads(self.db.get(_table_names_key, txn=txn,
+                                                 flags=DB_RMW))
+            tablelist.append(table)
+            # delete 1st, in case we opened with DB_DUP
+            self.db.delete(_table_names_key, txn)
+            self.db.put(_table_names_key, pickle.dumps(tablelist, 1), txn=txn)
+
+            txn.commit()
+            txn = None
+        except DBError, dberror:
+            if txn:
+                txn.abort()
+            raise TableDBError, dberror[1]
+
+
+    def ListTableColumns(self, table):
+        """Return a list of columns in the given table.
+        [] if the table doesn't exist.
+        """
+        assert isinstance(table, StringType)
+        if contains_metastrings(table):
+            raise ValueError, "bad table name: contains reserved metastrings"
+
+        columnlist_key = _columns_key(table)
+        if not self.db.has_key(columnlist_key):
+            return []
+        pickledcolumnlist = self.db.get(columnlist_key)
+        if pickledcolumnlist:
+            return pickle.loads(pickledcolumnlist)
+        else:
+            return []
+
+    def ListTables(self):
+        """Return a list of tables in this database."""
+        pickledtablelist = self.db.get(_table_names_key)
+        if pickledtablelist:
+            return pickle.loads(pickledtablelist)
+        else:
+            return []
+
+    def CreateOrExtendTable(self, table, columns):
+        """CreateOrExtendTable(table, columns)
+
+        - Create a new table in the database.
+        If a table of this name already exists, extend it to have any
+        additional columns present in the given list as well as
+        all of its current columns.
+        """
+        assert isinstance(columns, ListType)
+        try:
+            self.CreateTable(table, columns)
+        except TableAlreadyExists:
+            # the table already existed, add any new columns
+            txn = None
+            try:
+                columnlist_key = _columns_key(table)
+                txn = self.env.txn_begin()
+
+                # load the current column list
+                oldcolumnlist = pickle.loads(
+                    self.db.get(columnlist_key, txn=txn, flags=DB_RMW))
+                # create a hash table for fast lookups of column names in the
+                # loop below
+                oldcolumnhash = {}
+                for c in oldcolumnlist:
+                    oldcolumnhash[c] = c
+
+                # create a new column list containing both the old and new
+                # column names
+                newcolumnlist = copy.copy(oldcolumnlist)
+                for c in columns:
+                    if not oldcolumnhash.has_key(c):
+                        newcolumnlist.append(c)
+
+                # store the table's new extended column list
+                if newcolumnlist != oldcolumnlist :
+                    # delete the old one first since we opened with DB_DUP
+                    self.db.delete(columnlist_key, txn)
+                    self.db.put(columnlist_key,
+                                pickle.dumps(newcolumnlist, 1),
+                                txn=txn)
+
+                txn.commit()
+                txn = None
+
+                self.__load_column_info(table)
+            except DBError, dberror:
+                if txn:
+                    txn.abort()
+                raise TableDBError, dberror[1]
+
+
+    def __load_column_info(self, table) :
+        """initialize the self.__tablecolumns dict"""
+        # check the column names
+        try:
+            tcolpickles = self.db.get(_columns_key(table))
+        except DBNotFoundError:
+            raise TableDBError, "unknown table: %r" % (table,)
+        if not tcolpickles:
+            raise TableDBError, "unknown table: %r" % (table,)
+        self.__tablecolumns[table] = pickle.loads(tcolpickles)
+
+    def __new_rowid(self, table, txn) :
+        """Create a new unique row identifier"""
+        unique = 0
+        while not unique:
+            # Generate a random 64-bit row ID string
+            # (note: this code has <64 bits of randomness
+            # but it's plenty for our database id needs!)
+            p = xdrlib.Packer()
+            p.pack_int(int(random.random()*2147483647))
+            p.pack_int(int(random.random()*2147483647))
+            newid = p.get_buffer()
+
+            # Guarantee uniqueness by adding this key to the database
+            try:
+                self.db.put(_rowid_key(table, newid), None, txn=txn,
+                            flags=DB_NOOVERWRITE)
+            except DBKeyExistError:
+                pass
+            else:
+                unique = 1
+
+        return newid
+
+
+    def Insert(self, table, rowdict) :
+        """Insert(table, datadict) - Insert a new row into the table
+        using the keys+values from rowdict as the column values.
+        """
+        txn = None
+        try:
+            if not self.db.has_key(_columns_key(table)):
+                raise TableDBError, "unknown table"
+
+            # check the validity of each column name
+            if not self.__tablecolumns.has_key(table):
+                self.__load_column_info(table)
+            for column in rowdict.keys() :
+                if not self.__tablecolumns[table].count(column):
+                    raise TableDBError, "unknown column: %r" % (column,)
+
+            # get a unique row identifier for this row
+            txn = self.env.txn_begin()
+            rowid = self.__new_rowid(table, txn=txn)
+
+            # insert the row values into the table database
+            for column, dataitem in rowdict.items():
+                # store the value
+                self.db.put(_data_key(table, column, rowid), dataitem, txn=txn)
+
+            txn.commit()
+            txn = None
+
+        except DBError, dberror:
+            # WIBNI we could just abort the txn and re-raise the exception?
+            # But no, because TableDBError is not related to DBError via
+            # inheritance, so it would be backwards incompatible.  Do the next
+            # best thing.
+            info = sys.exc_info()
+            if txn:
+                txn.abort()
+                self.db.delete(_rowid_key(table, rowid))
+            raise TableDBError, dberror[1], info[2]
+
+
+    def Modify(self, table, conditions={}, mappings={}):
+        """Modify(table, conditions) - Modify in rows matching 'conditions'
+        using mapping functions in 'mappings'
+        * conditions is a dictionary keyed on column names
+        containing condition functions expecting the data string as an
+        argument and returning a boolean.
+        * mappings is a dictionary keyed on column names containint condition
+        functions expecting the data string as an argument and returning the
+        new string for that column.
+        """
+        try:
+            matching_rowids = self.__Select(table, [], conditions)
+
+            # modify only requested columns
+            columns = mappings.keys()
+            for rowid in matching_rowids.keys():
+                txn = None
+                try:
+                    for column in columns:
+                        txn = self.env.txn_begin()
+                        # modify the requested column
+                        try:
+                            dataitem = self.db.get(
+                                _data_key(table, column, rowid),
+                                txn)
+                            self.db.delete(
+                                _data_key(table, column, rowid),
+                                txn)
+                        except DBNotFoundError:
+                             # XXXXXXX row key somehow didn't exist, assume no
+                             # error
+                            dataitem = None
+                        dataitem = mappings[column](dataitem)
+                        if dataitem <> None:
+                            self.db.put(
+                                _data_key(table, column, rowid),
+                                dataitem, txn=txn)
+                        txn.commit()
+                        txn = None
+
+                except DBError, dberror:
+                    if txn:
+                        txn.abort()
+                    raise
+
+        except DBError, dberror:
+            raise TableDBError, dberror[1]
+
+    def Delete(self, table, conditions={}):
+        """Delete(table, conditions) - Delete items matching the given
+        conditions from the table.
+        * conditions is a dictionary keyed on column names
+        containing condition functions expecting the data string as an
+        argument and returning a boolean.
+        """
+        try:
+            matching_rowids = self.__Select(table, [], conditions)
+
+            # delete row data from all columns
+            columns = self.__tablecolumns[table]
+            for rowid in matching_rowids.keys():
+                txn = None
+                try:
+                    txn = self.env.txn_begin()
+                    for column in columns:
+                        # delete the data key
+                        try:
+                            self.db.delete(_data_key(table, column, rowid),
+                                           txn)
+                        except DBNotFoundError:
+                            # XXXXXXX column may not exist, assume no error
+                            pass
+
+                    try:
+                        self.db.delete(_rowid_key(table, rowid), txn)
+                    except DBNotFoundError:
+                        # XXXXXXX row key somehow didn't exist, assume no error
+                        pass
+                    txn.commit()
+                    txn = None
+                except DBError, dberror:
+                    if txn:
+                        txn.abort()
+                    raise
+        except DBError, dberror:
+            raise TableDBError, dberror[1]
+
+
+    def Select(self, table, columns, conditions={}):
+        """Select(table, conditions) - retrieve specific row data
+        Returns a list of row column->value mapping dictionaries.
+        * columns is a list of which column data to return.  If
+          columns is None, all columns will be returned.
+        * conditions is a dictionary keyed on column names
+          containing callable conditions expecting the data string as an
+          argument and returning a boolean.
+        """
+        try:
+            if not self.__tablecolumns.has_key(table):
+                self.__load_column_info(table)
+            if columns is None:
+                columns = self.__tablecolumns[table]
+            matching_rowids = self.__Select(table, columns, conditions)
+        except DBError, dberror:
+            raise TableDBError, dberror[1]
+        # return the matches as a list of dictionaries
+        return matching_rowids.values()
+
+
+    def __Select(self, table, columns, conditions):
+        """__Select() - Used to implement Select and Delete (above)
+        Returns a dictionary keyed on rowids containing dicts
+        holding the row data for columns listed in the columns param
+        that match the given conditions.
+        * conditions is a dictionary keyed on column names
+        containing callable conditions expecting the data string as an
+        argument and returning a boolean.
+        """
+        # check the validity of each column name
+        if not self.__tablecolumns.has_key(table):
+            self.__load_column_info(table)
+        if columns is None:
+            columns = self.tablecolumns[table]
+        for column in (columns + conditions.keys()):
+            if not self.__tablecolumns[table].count(column):
+                raise TableDBError, "unknown column: %r" % (column,)
+
+        # keyed on rows that match so far, containings dicts keyed on
+        # column names containing the data for that row and column.
+        matching_rowids = {}
+        # keys are rowids that do not match
+        rejected_rowids = {}
+
+        # attempt to sort the conditions in such a way as to minimize full
+        # column lookups
+        def cmp_conditions(atuple, btuple):
+            a = atuple[1]
+            b = btuple[1]
+            if type(a) is type(b):
+                if isinstance(a, PrefixCond) and isinstance(b, PrefixCond):
+                    # longest prefix first
+                    return cmp(len(b.prefix), len(a.prefix))
+                if isinstance(a, LikeCond) and isinstance(b, LikeCond):
+                    # longest likestr first
+                    return cmp(len(b.likestr), len(a.likestr))
+                return 0
+            if isinstance(a, ExactCond):
+                return -1
+            if isinstance(b, ExactCond):
+                return 1
+            if isinstance(a, PrefixCond):
+                return -1
+            if isinstance(b, PrefixCond):
+                return 1
+            # leave all unknown condition callables alone as equals
+            return 0
+
+        conditionlist = conditions.items()
+        conditionlist.sort(cmp_conditions)
+
+        # Apply conditions to column data to find what we want
+        cur = self.db.cursor()
+        column_num = -1
+        for column, condition in conditionlist:
+            column_num = column_num + 1
+            searchkey = _search_col_data_key(table, column)
+            # speedup: don't linear search columns within loop
+            if column in columns:
+                savethiscolumndata = 1  # save the data for return
+            else:
+                savethiscolumndata = 0  # data only used for selection
+
+            try:
+                key, data = cur.set_range(searchkey)
+                while key[:len(searchkey)] == searchkey:
+                    # extract the rowid from the key
+                    rowid = key[-_rowid_str_len:]
+
+                    if not rejected_rowids.has_key(rowid):
+                        # if no condition was specified or the condition
+                        # succeeds, add row to our match list.
+                        if not condition or condition(data):
+                            if not matching_rowids.has_key(rowid):
+                                matching_rowids[rowid] = {}
+                            if savethiscolumndata:
+                                matching_rowids[rowid][column] = data
+                        else:
+                            if matching_rowids.has_key(rowid):
+                                del matching_rowids[rowid]
+                            rejected_rowids[rowid] = rowid
+
+                    key, data = cur.next()
+
+            except DBError, dberror:
+                if dberror[0] != DB_NOTFOUND:
+                    raise
+                continue
+
+        cur.close()
+
+        # we're done selecting rows, garbage collect the reject list
+        del rejected_rowids
+
+        # extract any remaining desired column data from the
+        # database for the matching rows.
+        if len(columns) > 0:
+            for rowid, rowdata in matching_rowids.items():
+                for column in columns:
+                    if rowdata.has_key(column):
+                        continue
+                    try:
+                        rowdata[column] = self.db.get(
+                            _data_key(table, column, rowid))
+                    except DBError, dberror:
+                        if dberror[0] != DB_NOTFOUND:
+                            raise
+                        rowdata[column] = None
+
+        # return the matches
+        return matching_rowids
+
+
+    def Drop(self, table):
+        """Remove an entire table from the database"""
+        txn = None
+        try:
+            txn = self.env.txn_begin()
+
+            # delete the column list
+            self.db.delete(_columns_key(table), txn)
+
+            cur = self.db.cursor(txn)
+
+            # delete all keys containing this tables column and row info
+            table_key = _search_all_data_key(table)
+            while 1:
+                try:
+                    key, data = cur.set_range(table_key)
+                except DBNotFoundError:
+                    break
+                # only delete items in this table
+                if key[:len(table_key)] != table_key:
+                    break
+                cur.delete()
+
+            # delete all rowids used by this table
+            table_key = _search_rowid_key(table)
+            while 1:
+                try:
+                    key, data = cur.set_range(table_key)
+                except DBNotFoundError:
+                    break
+                # only delete items in this table
+                if key[:len(table_key)] != table_key:
+                    break
+                cur.delete()
+
+            cur.close()
+
+            # delete the tablename from the table name list
+            tablelist = pickle.loads(
+                self.db.get(_table_names_key, txn=txn, flags=DB_RMW))
+            try:
+                tablelist.remove(table)
+            except ValueError:
+                # hmm, it wasn't there, oh well, that's what we want.
+                pass
+            # delete 1st, incase we opened with DB_DUP
+            self.db.delete(_table_names_key, txn)
+            self.db.put(_table_names_key, pickle.dumps(tablelist, 1), txn=txn)
+
+            txn.commit()
+            txn = None
+
+            if self.__tablecolumns.has_key(table):
+                del self.__tablecolumns[table]
+
+        except DBError, dberror:
+            if txn:
+                txn.abort()
+            raise TableDBError, dberror[1]
diff --git a/depot_tools/release/win/python_24/Lib/bsddb/dbutils.py b/depot_tools/release/win/python_24/Lib/bsddb/dbutils.py
new file mode 100644
index 0000000..3f63842
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/bsddb/dbutils.py
@@ -0,0 +1,77 @@
+#------------------------------------------------------------------------
+#
+# Copyright (C) 2000 Autonomous Zone Industries
+#
+# License:      This is free software.  You may use this software for any
+#               purpose including modification/redistribution, so long as
+#               this header remains intact and that you do not claim any
+#               rights of ownership or authorship of this software.  This
+#               software has been tested, but no warranty is expressed or
+#               implied.
+#
+# Author: Gregory P. Smith <greg@electricrain.com>
+#
+# Note: I don't know how useful this is in reality since when a
+#       DBLockDeadlockError happens the current transaction is supposed to be
+#       aborted.  If it doesn't then when the operation is attempted again
+#       the deadlock is still happening...
+#       --Robin
+#
+#------------------------------------------------------------------------
+
+
+#
+# import the time.sleep function in a namespace safe way to allow
+# "from bsddb.db import *"
+#
+from time import sleep as _sleep
+
+import db
+
+# always sleep at least N seconds between retrys
+_deadlock_MinSleepTime = 1.0/64
+# never sleep more than N seconds between retrys
+_deadlock_MaxSleepTime = 3.14159
+
+# Assign a file object to this for a "sleeping" message to be written to it
+# each retry
+_deadlock_VerboseFile = None
+
+
+def DeadlockWrap(function, *_args, **_kwargs):
+    """DeadlockWrap(function, *_args, **_kwargs) - automatically retries
+    function in case of a database deadlock.
+
+    This is a function intended to be used to wrap database calls such
+    that they perform retrys with exponentially backing off sleeps in
+    between when a DBLockDeadlockError exception is raised.
+
+    A 'max_retries' parameter may optionally be passed to prevent it
+    from retrying forever (in which case the exception will be reraised).
+
+        d = DB(...)
+        d.open(...)
+        DeadlockWrap(d.put, "foo", data="bar")  # set key "foo" to "bar"
+    """
+    sleeptime = _deadlock_MinSleepTime
+    max_retries = _kwargs.get('max_retries', -1)
+    if _kwargs.has_key('max_retries'):
+        del _kwargs['max_retries']
+    while 1:
+        try:
+            return function(*_args, **_kwargs)
+        except db.DBLockDeadlockError:
+            if _deadlock_VerboseFile:
+                _deadlock_VerboseFile.write(
+                    'dbutils.DeadlockWrap: sleeping %1.3f\n' % sleeptime)
+            _sleep(sleeptime)
+            # exponential backoff in the sleep time
+            sleeptime *= 2
+            if sleeptime > _deadlock_MaxSleepTime:
+                sleeptime = _deadlock_MaxSleepTime
+            max_retries -= 1
+            if max_retries == -1:
+                raise
+
+
+#------------------------------------------------------------------------
diff --git a/depot_tools/release/win/python_24/Lib/calendar.py b/depot_tools/release/win/python_24/Lib/calendar.py
new file mode 100644
index 0000000..70813896
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/calendar.py
@@ -0,0 +1,230 @@
+"""Calendar printing functions
+
+Note when comparing these calendars to the ones printed by cal(1): By
+default, these calendars have Monday as the first day of the week, and
+Sunday as the last (the European convention). Use setfirstweekday() to
+set the first day of the week (0=Monday, 6=Sunday)."""
+
+import datetime
+
+__all__ = ["error","setfirstweekday","firstweekday","isleap",
+           "leapdays","weekday","monthrange","monthcalendar",
+           "prmonth","month","prcal","calendar","timegm",
+           "month_name", "month_abbr", "day_name", "day_abbr"]
+
+# Exception raised for bad input (with string parameter for details)
+error = ValueError
+
+# Constants for months referenced later
+January = 1
+February = 2
+
+# Number of days per month (except for February in leap years)
+mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
+
+# This module used to have hard-coded lists of day and month names, as
+# English strings.  The classes following emulate a read-only version of
+# that, but supply localized names.  Note that the values are computed
+# fresh on each call, in case the user changes locale between calls.
+
+class _localized_month:
+
+    _months = [datetime.date(2001, i+1, 1).strftime for i in range(12)]
+    _months.insert(0, lambda x: "")
+
+    def __init__(self, format):
+        self.format = format
+
+    def __getitem__(self, i):
+        funcs = self._months[i]
+        if isinstance(i, slice):
+            return [f(self.format) for f in funcs]
+        else:
+            return funcs(self.format)
+
+    def __len__(self):
+        return 13
+
+class _localized_day:
+
+    # January 1, 2001, was a Monday.
+    _days = [datetime.date(2001, 1, i+1).strftime for i in range(7)]
+
+    def __init__(self, format):
+        self.format = format
+
+    def __getitem__(self, i):
+        funcs = self._days[i]
+        if isinstance(i, slice):
+            return [f(self.format) for f in funcs]
+        else:
+            return funcs(self.format)
+
+    def __len__(self):
+        return 7
+
+# Full and abbreviated names of weekdays
+day_name = _localized_day('%A')
+day_abbr = _localized_day('%a')
+
+# Full and abbreviated names of months (1-based arrays!!!)
+month_name = _localized_month('%B')
+month_abbr = _localized_month('%b')
+
+# Constants for weekdays
+(MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7)
+
+_firstweekday = 0                       # 0 = Monday, 6 = Sunday
+
+def firstweekday():
+    return _firstweekday
+
+def setfirstweekday(weekday):
+    """Set weekday (Monday=0, Sunday=6) to start each week."""
+    global _firstweekday
+    if not MONDAY <= weekday <= SUNDAY:
+        raise ValueError, \
+              'bad weekday number; must be 0 (Monday) to 6 (Sunday)'
+    _firstweekday = weekday
+
+def isleap(year):
+    """Return 1 for leap years, 0 for non-leap years."""
+    return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
+
+def leapdays(y1, y2):
+    """Return number of leap years in range [y1, y2).
+       Assume y1 <= y2."""
+    y1 -= 1
+    y2 -= 1
+    return (y2//4 - y1//4) - (y2//100 - y1//100) + (y2//400 - y1//400)
+
+def weekday(year, month, day):
+    """Return weekday (0-6 ~ Mon-Sun) for year (1970-...), month (1-12),
+       day (1-31)."""
+    return datetime.date(year, month, day).weekday()
+
+def monthrange(year, month):
+    """Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for
+       year, month."""
+    if not 1 <= month <= 12:
+        raise ValueError, 'bad month number'
+    day1 = weekday(year, month, 1)
+    ndays = mdays[month] + (month == February and isleap(year))
+    return day1, ndays
+
+def monthcalendar(year, month):
+    """Return a matrix representing a month's calendar.
+       Each row represents a week; days outside this month are zero."""
+    day1, ndays = monthrange(year, month)
+    rows = []
+    r7 = range(7)
+    day = (_firstweekday - day1 + 6) % 7 - 5   # for leading 0's in first week
+    while day <= ndays:
+        row = [0, 0, 0, 0, 0, 0, 0]
+        for i in r7:
+            if 1 <= day <= ndays: row[i] = day
+            day = day + 1
+        rows.append(row)
+    return rows
+
+def prweek(theweek, width):
+    """Print a single week (no newline)."""
+    print week(theweek, width),
+
+def week(theweek, width):
+    """Returns a single week in a string (no newline)."""
+    days = []
+    for day in theweek:
+        if day == 0:
+            s = ''
+        else:
+            s = '%2i' % day             # right-align single-digit days
+        days.append(s.center(width))
+    return ' '.join(days)
+
+def weekheader(width):
+    """Return a header for a week."""
+    if width >= 9:
+        names = day_name
+    else:
+        names = day_abbr
+    days = []
+    for i in range(_firstweekday, _firstweekday + 7):
+        days.append(names[i%7][:width].center(width))
+    return ' '.join(days)
+
+def prmonth(theyear, themonth, w=0, l=0):
+    """Print a month's calendar."""
+    print month(theyear, themonth, w, l),
+
+def month(theyear, themonth, w=0, l=0):
+    """Return a month's calendar string (multi-line)."""
+    w = max(2, w)
+    l = max(1, l)
+    s = ("%s %r" % (month_name[themonth], theyear)).center(
+                 7 * (w + 1) - 1).rstrip() + \
+         '\n' * l + weekheader(w).rstrip() + '\n' * l
+    for aweek in monthcalendar(theyear, themonth):
+        s = s + week(aweek, w).rstrip() + '\n' * l
+    return s[:-l] + '\n'
+
+# Spacing of month columns for 3-column year calendar
+_colwidth = 7*3 - 1         # Amount printed by prweek()
+_spacing = 6                # Number of spaces between columns
+
+def format3c(a, b, c, colwidth=_colwidth, spacing=_spacing):
+    """Prints 3-column formatting for year calendars"""
+    print format3cstring(a, b, c, colwidth, spacing)
+
+def format3cstring(a, b, c, colwidth=_colwidth, spacing=_spacing):
+    """Returns a string formatted from 3 strings, centered within 3 columns."""
+    return (a.center(colwidth) + ' ' * spacing + b.center(colwidth) +
+            ' ' * spacing + c.center(colwidth))
+
+def prcal(year, w=0, l=0, c=_spacing):
+    """Print a year's calendar."""
+    print calendar(year, w, l, c),
+
+def calendar(year, w=0, l=0, c=_spacing):
+    """Returns a year's calendar as a multi-line string."""
+    w = max(2, w)
+    l = max(1, l)
+    c = max(2, c)
+    colwidth = (w + 1) * 7 - 1
+    s = repr(year).center(colwidth * 3 + c * 2).rstrip() + '\n' * l
+    header = weekheader(w)
+    header = format3cstring(header, header, header, colwidth, c).rstrip()
+    for q in range(January, January+12, 3):
+        s = (s + '\n' * l +
+             format3cstring(month_name[q], month_name[q+1], month_name[q+2],
+                            colwidth, c).rstrip() +
+             '\n' * l + header + '\n' * l)
+        data = []
+        height = 0
+        for amonth in range(q, q + 3):
+            cal = monthcalendar(year, amonth)
+            if len(cal) > height:
+                height = len(cal)
+            data.append(cal)
+        for i in range(height):
+            weeks = []
+            for cal in data:
+                if i >= len(cal):
+                    weeks.append('')
+                else:
+                    weeks.append(week(cal[i], w))
+            s = s + format3cstring(weeks[0], weeks[1], weeks[2],
+                                   colwidth, c).rstrip() + '\n' * l
+    return s[:-l] + '\n'
+
+EPOCH = 1970
+_EPOCH_ORD = datetime.date(EPOCH, 1, 1).toordinal()
+
+def timegm(tuple):
+    """Unrelated but handy function to calculate Unix timestamp from GMT."""
+    year, month, day, hour, minute, second = tuple[:6]
+    days = datetime.date(year, month, 1).toordinal() - _EPOCH_ORD + day - 1
+    hours = days*24 + hour
+    minutes = hours*60 + minute
+    seconds = minutes*60 + second
+    return seconds
diff --git a/depot_tools/release/win/python_24/Lib/cgi.py b/depot_tools/release/win/python_24/Lib/cgi.py
new file mode 100644
index 0000000..fb40ed3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/cgi.py
@@ -0,0 +1,1055 @@
+#! /usr/local/bin/python
+
+# NOTE: the above "/usr/local/bin/python" is NOT a mistake.  It is
+# intentionally NOT "/usr/bin/env python".  On many systems
+# (e.g. Solaris), /usr/local/bin is not in $PATH as passed to CGI
+# scripts, and /usr/local/bin is the default directory where Python is
+# installed, so /usr/bin/env would be unable to find python.  Granted,
+# binary installations by Linux vendors often install Python in
+# /usr/bin.  So let those vendors patch cgi.py to match their choice
+# of installation.
+
+"""Support module for CGI (Common Gateway Interface) scripts.
+
+This module defines a number of utilities for use by CGI scripts
+written in Python.
+"""
+
+# XXX Perhaps there should be a slimmed version that doesn't contain
+# all those backwards compatible and debugging classes and functions?
+
+# History
+# -------
+#
+# Michael McLay started this module.  Steve Majewski changed the
+# interface to SvFormContentDict and FormContentDict.  The multipart
+# parsing was inspired by code submitted by Andreas Paepcke.  Guido van
+# Rossum rewrote, reformatted and documented the module and is currently
+# responsible for its maintenance.
+#
+
+__version__ = "2.6"
+
+
+# Imports
+# =======
+
+import sys
+import os
+import urllib
+import mimetools
+import rfc822
+import UserDict
+from StringIO import StringIO
+
+__all__ = ["MiniFieldStorage", "FieldStorage", "FormContentDict",
+           "SvFormContentDict", "InterpFormContentDict", "FormContent",
+           "parse", "parse_qs", "parse_qsl", "parse_multipart",
+           "parse_header", "print_exception", "print_environ",
+           "print_form", "print_directory", "print_arguments",
+           "print_environ_usage", "escape"]
+
+# Logging support
+# ===============
+
+logfile = ""            # Filename to log to, if not empty
+logfp = None            # File object to log to, if not None
+
+def initlog(*allargs):
+    """Write a log message, if there is a log file.
+
+    Even though this function is called initlog(), you should always
+    use log(); log is a variable that is set either to initlog
+    (initially), to dolog (once the log file has been opened), or to
+    nolog (when logging is disabled).
+
+    The first argument is a format string; the remaining arguments (if
+    any) are arguments to the % operator, so e.g.
+        log("%s: %s", "a", "b")
+    will write "a: b" to the log file, followed by a newline.
+
+    If the global logfp is not None, it should be a file object to
+    which log data is written.
+
+    If the global logfp is None, the global logfile may be a string
+    giving a filename to open, in append mode.  This file should be
+    world writable!!!  If the file can't be opened, logging is
+    silently disabled (since there is no safe place where we could
+    send an error message).
+
+    """
+    global logfp, log
+    if logfile and not logfp:
+        try:
+            logfp = open(logfile, "a")
+        except IOError:
+            pass
+    if not logfp:
+        log = nolog
+    else:
+        log = dolog
+    log(*allargs)
+
+def dolog(fmt, *args):
+    """Write a log message to the log file.  See initlog() for docs."""
+    logfp.write(fmt%args + "\n")
+
+def nolog(*allargs):
+    """Dummy function, assigned to log when logging is disabled."""
+    pass
+
+log = initlog           # The current logging function
+
+
+# Parsing functions
+# =================
+
+# Maximum input we will accept when REQUEST_METHOD is POST
+# 0 ==> unlimited input
+maxlen = 0
+
+def parse(fp=None, environ=os.environ, keep_blank_values=0, strict_parsing=0):
+    """Parse a query in the environment or from a file (default stdin)
+
+        Arguments, all optional:
+
+        fp              : file pointer; default: sys.stdin
+
+        environ         : environment dictionary; default: os.environ
+
+        keep_blank_values: flag indicating whether blank values in
+            URL encoded forms should be treated as blank strings.
+            A true value indicates that blanks should be retained as
+            blank strings.  The default false value indicates that
+            blank values are to be ignored and treated as if they were
+            not included.
+
+        strict_parsing: flag indicating what to do with parsing errors.
+            If false (the default), errors are silently ignored.
+            If true, errors raise a ValueError exception.
+    """
+    if fp is None:
+        fp = sys.stdin
+    if not 'REQUEST_METHOD' in environ:
+        environ['REQUEST_METHOD'] = 'GET'       # For testing stand-alone
+    if environ['REQUEST_METHOD'] == 'POST':
+        ctype, pdict = parse_header(environ['CONTENT_TYPE'])
+        if ctype == 'multipart/form-data':
+            return parse_multipart(fp, pdict)
+        elif ctype == 'application/x-www-form-urlencoded':
+            clength = int(environ['CONTENT_LENGTH'])
+            if maxlen and clength > maxlen:
+                raise ValueError, 'Maximum content length exceeded'
+            qs = fp.read(clength)
+        else:
+            qs = ''                     # Unknown content-type
+        if 'QUERY_STRING' in environ:
+            if qs: qs = qs + '&'
+            qs = qs + environ['QUERY_STRING']
+        elif sys.argv[1:]:
+            if qs: qs = qs + '&'
+            qs = qs + sys.argv[1]
+        environ['QUERY_STRING'] = qs    # XXX Shouldn't, really
+    elif 'QUERY_STRING' in environ:
+        qs = environ['QUERY_STRING']
+    else:
+        if sys.argv[1:]:
+            qs = sys.argv[1]
+        else:
+            qs = ""
+        environ['QUERY_STRING'] = qs    # XXX Shouldn't, really
+    return parse_qs(qs, keep_blank_values, strict_parsing)
+
+
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
+    """Parse a query given as a string argument.
+
+        Arguments:
+
+        qs: URL-encoded query string to be parsed
+
+        keep_blank_values: flag indicating whether blank values in
+            URL encoded queries should be treated as blank strings.
+            A true value indicates that blanks should be retained as
+            blank strings.  The default false value indicates that
+            blank values are to be ignored and treated as if they were
+            not included.
+
+        strict_parsing: flag indicating what to do with parsing errors.
+            If false (the default), errors are silently ignored.
+            If true, errors raise a ValueError exception.
+    """
+    dict = {}
+    for name, value in parse_qsl(qs, keep_blank_values, strict_parsing):
+        if name in dict:
+            dict[name].append(value)
+        else:
+            dict[name] = [value]
+    return dict
+
+def parse_qsl(qs, keep_blank_values=0, strict_parsing=0):
+    """Parse a query given as a string argument.
+
+    Arguments:
+
+    qs: URL-encoded query string to be parsed
+
+    keep_blank_values: flag indicating whether blank values in
+        URL encoded queries should be treated as blank strings.  A
+        true value indicates that blanks should be retained as blank
+        strings.  The default false value indicates that blank values
+        are to be ignored and treated as if they were  not included.
+
+    strict_parsing: flag indicating what to do with parsing errors. If
+        false (the default), errors are silently ignored. If true,
+        errors raise a ValueError exception.
+
+    Returns a list, as G-d intended.
+    """
+    pairs = [s2 for s1 in qs.split('&') for s2 in s1.split(';')]
+    r = []
+    for name_value in pairs:
+        if not name_value and not strict_parsing:
+            continue
+        nv = name_value.split('=', 1)
+        if len(nv) != 2:
+            if strict_parsing:
+                raise ValueError, "bad query field: %r" % (name_value,)
+            # Handle case of a control-name with no equal sign
+            if keep_blank_values:
+                nv.append('')
+            else:
+                continue
+        if len(nv[1]) or keep_blank_values:
+            name = urllib.unquote(nv[0].replace('+', ' '))
+            value = urllib.unquote(nv[1].replace('+', ' '))
+            r.append((name, value))
+
+    return r
+
+
+def parse_multipart(fp, pdict):
+    """Parse multipart input.
+
+    Arguments:
+    fp   : input file
+    pdict: dictionary containing other parameters of conten-type header
+
+    Returns a dictionary just like parse_qs(): keys are the field names, each
+    value is a list of values for that field.  This is easy to use but not
+    much good if you are expecting megabytes to be uploaded -- in that case,
+    use the FieldStorage class instead which is much more flexible.  Note
+    that content-type is the raw, unparsed contents of the content-type
+    header.
+
+    XXX This does not parse nested multipart parts -- use FieldStorage for
+    that.
+
+    XXX This should really be subsumed by FieldStorage altogether -- no
+    point in having two implementations of the same parsing algorithm.
+
+    """
+    boundary = ""
+    if 'boundary' in pdict:
+        boundary = pdict['boundary']
+    if not valid_boundary(boundary):
+        raise ValueError,  ('Invalid boundary in multipart form: %r'
+                            % (boundary,))
+
+    nextpart = "--" + boundary
+    lastpart = "--" + boundary + "--"
+    partdict = {}
+    terminator = ""
+
+    while terminator != lastpart:
+        bytes = -1
+        data = None
+        if terminator:
+            # At start of next part.  Read headers first.
+            headers = mimetools.Message(fp)
+            clength = headers.getheader('content-length')
+            if clength:
+                try:
+                    bytes = int(clength)
+                except ValueError:
+                    pass
+            if bytes > 0:
+                if maxlen and bytes > maxlen:
+                    raise ValueError, 'Maximum content length exceeded'
+                data = fp.read(bytes)
+            else:
+                data = ""
+        # Read lines until end of part.
+        lines = []
+        while 1:
+            line = fp.readline()
+            if not line:
+                terminator = lastpart # End outer loop
+                break
+            if line[:2] == "--":
+                terminator = line.strip()
+                if terminator in (nextpart, lastpart):
+                    break
+            lines.append(line)
+        # Done with part.
+        if data is None:
+            continue
+        if bytes < 0:
+            if lines:
+                # Strip final line terminator
+                line = lines[-1]
+                if line[-2:] == "\r\n":
+                    line = line[:-2]
+                elif line[-1:] == "\n":
+                    line = line[:-1]
+                lines[-1] = line
+                data = "".join(lines)
+        line = headers['content-disposition']
+        if not line:
+            continue
+        key, params = parse_header(line)
+        if key != 'form-data':
+            continue
+        if 'name' in params:
+            name = params['name']
+        else:
+            continue
+        if name in partdict:
+            partdict[name].append(data)
+        else:
+            partdict[name] = [data]
+
+    return partdict
+
+
+def parse_header(line):
+    """Parse a Content-type like header.
+
+    Return the main content-type and a dictionary of options.
+
+    """
+    plist = map(lambda x: x.strip(), line.split(';'))
+    key = plist.pop(0).lower()
+    pdict = {}
+    for p in plist:
+        i = p.find('=')
+        if i >= 0:
+            name = p[:i].strip().lower()
+            value = p[i+1:].strip()
+            if len(value) >= 2 and value[0] == value[-1] == '"':
+                value = value[1:-1]
+                value = value.replace('\\\\', '\\').replace('\\"', '"')
+            pdict[name] = value
+    return key, pdict
+
+
+# Classes for field storage
+# =========================
+
+class MiniFieldStorage:
+
+    """Like FieldStorage, for use when no file uploads are possible."""
+
+    # Dummy attributes
+    filename = None
+    list = None
+    type = None
+    file = None
+    type_options = {}
+    disposition = None
+    disposition_options = {}
+    headers = {}
+
+    def __init__(self, name, value):
+        """Constructor from field name and value."""
+        self.name = name
+        self.value = value
+        # self.file = StringIO(value)
+
+    def __repr__(self):
+        """Return printable representation."""
+        return "MiniFieldStorage(%r, %r)" % (self.name, self.value)
+
+
+class FieldStorage:
+
+    """Store a sequence of fields, reading multipart/form-data.
+
+    This class provides naming, typing, files stored on disk, and
+    more.  At the top level, it is accessible like a dictionary, whose
+    keys are the field names.  (Note: None can occur as a field name.)
+    The items are either a Python list (if there's multiple values) or
+    another FieldStorage or MiniFieldStorage object.  If it's a single
+    object, it has the following attributes:
+
+    name: the field name, if specified; otherwise None
+
+    filename: the filename, if specified; otherwise None; this is the
+        client side filename, *not* the file name on which it is
+        stored (that's a temporary file you don't deal with)
+
+    value: the value as a *string*; for file uploads, this
+        transparently reads the file every time you request the value
+
+    file: the file(-like) object from which you can read the data;
+        None if the data is stored a simple string
+
+    type: the content-type, or None if not specified
+
+    type_options: dictionary of options specified on the content-type
+        line
+
+    disposition: content-disposition, or None if not specified
+
+    disposition_options: dictionary of corresponding options
+
+    headers: a dictionary(-like) object (sometimes rfc822.Message or a
+        subclass thereof) containing *all* headers
+
+    The class is subclassable, mostly for the purpose of overriding
+    the make_file() method, which is called internally to come up with
+    a file open for reading and writing.  This makes it possible to
+    override the default choice of storing all files in a temporary
+    directory and unlinking them as soon as they have been opened.
+
+    """
+
+    def __init__(self, fp=None, headers=None, outerboundary="",
+                 environ=os.environ, keep_blank_values=0, strict_parsing=0):
+        """Constructor.  Read multipart/* until last part.
+
+        Arguments, all optional:
+
+        fp              : file pointer; default: sys.stdin
+            (not used when the request method is GET)
+
+        headers         : header dictionary-like object; default:
+            taken from environ as per CGI spec
+
+        outerboundary   : terminating multipart boundary
+            (for internal use only)
+
+        environ         : environment dictionary; default: os.environ
+
+        keep_blank_values: flag indicating whether blank values in
+            URL encoded forms should be treated as blank strings.
+            A true value indicates that blanks should be retained as
+            blank strings.  The default false value indicates that
+            blank values are to be ignored and treated as if they were
+            not included.
+
+        strict_parsing: flag indicating what to do with parsing errors.
+            If false (the default), errors are silently ignored.
+            If true, errors raise a ValueError exception.
+
+        """
+        method = 'GET'
+        self.keep_blank_values = keep_blank_values
+        self.strict_parsing = strict_parsing
+        if 'REQUEST_METHOD' in environ:
+            method = environ['REQUEST_METHOD'].upper()
+        if method == 'GET' or method == 'HEAD':
+            if 'QUERY_STRING' in environ:
+                qs = environ['QUERY_STRING']
+            elif sys.argv[1:]:
+                qs = sys.argv[1]
+            else:
+                qs = ""
+            fp = StringIO(qs)
+            if headers is None:
+                headers = {'content-type':
+                           "application/x-www-form-urlencoded"}
+        if headers is None:
+            headers = {}
+            if method == 'POST':
+                # Set default content-type for POST to what's traditional
+                headers['content-type'] = "application/x-www-form-urlencoded"
+            if 'CONTENT_TYPE' in environ:
+                headers['content-type'] = environ['CONTENT_TYPE']
+            if 'CONTENT_LENGTH' in environ:
+                headers['content-length'] = environ['CONTENT_LENGTH']
+        self.fp = fp or sys.stdin
+        self.headers = headers
+        self.outerboundary = outerboundary
+
+        # Process content-disposition header
+        cdisp, pdict = "", {}
+        if 'content-disposition' in self.headers:
+            cdisp, pdict = parse_header(self.headers['content-disposition'])
+        self.disposition = cdisp
+        self.disposition_options = pdict
+        self.name = None
+        if 'name' in pdict:
+            self.name = pdict['name']
+        self.filename = None
+        if 'filename' in pdict:
+            self.filename = pdict['filename']
+
+        # Process content-type header
+        #
+        # Honor any existing content-type header.  But if there is no
+        # content-type header, use some sensible defaults.  Assume
+        # outerboundary is "" at the outer level, but something non-false
+        # inside a multi-part.  The default for an inner part is text/plain,
+        # but for an outer part it should be urlencoded.  This should catch
+        # bogus clients which erroneously forget to include a content-type
+        # header.
+        #
+        # See below for what we do if there does exist a content-type header,
+        # but it happens to be something we don't understand.
+        if 'content-type' in self.headers:
+            ctype, pdict = parse_header(self.headers['content-type'])
+        elif self.outerboundary or method != 'POST':
+            ctype, pdict = "text/plain", {}
+        else:
+            ctype, pdict = 'application/x-www-form-urlencoded', {}
+        self.type = ctype
+        self.type_options = pdict
+        self.innerboundary = ""
+        if 'boundary' in pdict:
+            self.innerboundary = pdict['boundary']
+        clen = -1
+        if 'content-length' in self.headers:
+            try:
+                clen = int(self.headers['content-length'])
+            except ValueError:
+                pass
+            if maxlen and clen > maxlen:
+                raise ValueError, 'Maximum content length exceeded'
+        self.length = clen
+
+        self.list = self.file = None
+        self.done = 0
+        if ctype == 'application/x-www-form-urlencoded':
+            self.read_urlencoded()
+        elif ctype[:10] == 'multipart/':
+            self.read_multi(environ, keep_blank_values, strict_parsing)
+        else:
+            self.read_single()
+
+    def __repr__(self):
+        """Return a printable representation."""
+        return "FieldStorage(%r, %r, %r)" % (
+                self.name, self.filename, self.value)
+
+    def __iter__(self):
+        return iter(self.keys())
+
+    def __getattr__(self, name):
+        if name != 'value':
+            raise AttributeError, name
+        if self.file:
+            self.file.seek(0)
+            value = self.file.read()
+            self.file.seek(0)
+        elif self.list is not None:
+            value = self.list
+        else:
+            value = None
+        return value
+
+    def __getitem__(self, key):
+        """Dictionary style indexing."""
+        if self.list is None:
+            raise TypeError, "not indexable"
+        found = []
+        for item in self.list:
+            if item.name == key: found.append(item)
+        if not found:
+            raise KeyError, key
+        if len(found) == 1:
+            return found[0]
+        else:
+            return found
+
+    def getvalue(self, key, default=None):
+        """Dictionary style get() method, including 'value' lookup."""
+        if key in self:
+            value = self[key]
+            if type(value) is type([]):
+                return map(lambda v: v.value, value)
+            else:
+                return value.value
+        else:
+            return default
+
+    def getfirst(self, key, default=None):
+        """ Return the first value received."""
+        if key in self:
+            value = self[key]
+            if type(value) is type([]):
+                return value[0].value
+            else:
+                return value.value
+        else:
+            return default
+
+    def getlist(self, key):
+        """ Return list of received values."""
+        if key in self:
+            value = self[key]
+            if type(value) is type([]):
+                return map(lambda v: v.value, value)
+            else:
+                return [value.value]
+        else:
+            return []
+
+    def keys(self):
+        """Dictionary style keys() method."""
+        if self.list is None:
+            raise TypeError, "not indexable"
+        keys = []
+        for item in self.list:
+            if item.name not in keys: keys.append(item.name)
+        return keys
+
+    def has_key(self, key):
+        """Dictionary style has_key() method."""
+        if self.list is None:
+            raise TypeError, "not indexable"
+        for item in self.list:
+            if item.name == key: return True
+        return False
+
+    def __contains__(self, key):
+        """Dictionary style __contains__ method."""
+        if self.list is None:
+            raise TypeError, "not indexable"
+        for item in self.list:
+            if item.name == key: return True
+        return False
+
+    def __len__(self):
+        """Dictionary style len(x) support."""
+        return len(self.keys())
+
+    def read_urlencoded(self):
+        """Internal: read data in query string format."""
+        qs = self.fp.read(self.length)
+        self.list = list = []
+        for key, value in parse_qsl(qs, self.keep_blank_values,
+                                    self.strict_parsing):
+            list.append(MiniFieldStorage(key, value))
+        self.skip_lines()
+
+    FieldStorageClass = None
+
+    def read_multi(self, environ, keep_blank_values, strict_parsing):
+        """Internal: read a part that is itself multipart."""
+        ib = self.innerboundary
+        if not valid_boundary(ib):
+            raise ValueError, 'Invalid boundary in multipart form: %r' % (ib,)
+        self.list = []
+        klass = self.FieldStorageClass or self.__class__
+        part = klass(self.fp, {}, ib,
+                     environ, keep_blank_values, strict_parsing)
+        # Throw first part away
+        while not part.done:
+            headers = rfc822.Message(self.fp)
+            part = klass(self.fp, headers, ib,
+                         environ, keep_blank_values, strict_parsing)
+            self.list.append(part)
+        self.skip_lines()
+
+    def read_single(self):
+        """Internal: read an atomic part."""
+        if self.length >= 0:
+            self.read_binary()
+            self.skip_lines()
+        else:
+            self.read_lines()
+        self.file.seek(0)
+
+    bufsize = 8*1024            # I/O buffering size for copy to file
+
+    def read_binary(self):
+        """Internal: read binary data."""
+        self.file = self.make_file('b')
+        todo = self.length
+        if todo >= 0:
+            while todo > 0:
+                data = self.fp.read(min(todo, self.bufsize))
+                if not data:
+                    self.done = -1
+                    break
+                self.file.write(data)
+                todo = todo - len(data)
+
+    def read_lines(self):
+        """Internal: read lines until EOF or outerboundary."""
+        self.file = self.__file = StringIO()
+        if self.outerboundary:
+            self.read_lines_to_outerboundary()
+        else:
+            self.read_lines_to_eof()
+
+    def __write(self, line):
+        if self.__file is not None:
+            if self.__file.tell() + len(line) > 1000:
+                self.file = self.make_file('')
+                self.file.write(self.__file.getvalue())
+                self.__file = None
+        self.file.write(line)
+
+    def read_lines_to_eof(self):
+        """Internal: read lines until EOF."""
+        while 1:
+            line = self.fp.readline()
+            if not line:
+                self.done = -1
+                break
+            self.__write(line)
+
+    def read_lines_to_outerboundary(self):
+        """Internal: read lines until outerboundary."""
+        next = "--" + self.outerboundary
+        last = next + "--"
+        delim = ""
+        while 1:
+            line = self.fp.readline()
+            if not line:
+                self.done = -1
+                break
+            if line[:2] == "--":
+                strippedline = line.strip()
+                if strippedline == next:
+                    break
+                if strippedline == last:
+                    self.done = 1
+                    break
+            odelim = delim
+            if line[-2:] == "\r\n":
+                delim = "\r\n"
+                line = line[:-2]
+            elif line[-1] == "\n":
+                delim = "\n"
+                line = line[:-1]
+            else:
+                delim = ""
+            self.__write(odelim + line)
+
+    def skip_lines(self):
+        """Internal: skip lines until outer boundary if defined."""
+        if not self.outerboundary or self.done:
+            return
+        next = "--" + self.outerboundary
+        last = next + "--"
+        while 1:
+            line = self.fp.readline()
+            if not line:
+                self.done = -1
+                break
+            if line[:2] == "--":
+                strippedline = line.strip()
+                if strippedline == next:
+                    break
+                if strippedline == last:
+                    self.done = 1
+                    break
+
+    def make_file(self, binary=None):
+        """Overridable: return a readable & writable file.
+
+        The file will be used as follows:
+        - data is written to it
+        - seek(0)
+        - data is read from it
+
+        The 'binary' argument is unused -- the file is always opened
+        in binary mode.
+
+        This version opens a temporary file for reading and writing,
+        and immediately deletes (unlinks) it.  The trick (on Unix!) is
+        that the file can still be used, but it can't be opened by
+        another process, and it will automatically be deleted when it
+        is closed or when the current process terminates.
+
+        If you want a more permanent file, you derive a class which
+        overrides this method.  If you want a visible temporary file
+        that is nevertheless automatically deleted when the script
+        terminates, try defining a __del__ method in a derived class
+        which unlinks the temporary files you have created.
+
+        """
+        import tempfile
+        return tempfile.TemporaryFile("w+b")
+
+
+
+# Backwards Compatibility Classes
+# ===============================
+
+class FormContentDict(UserDict.UserDict):
+    """Form content as dictionary with a list of values per field.
+
+    form = FormContentDict()
+
+    form[key] -> [value, value, ...]
+    key in form -> Boolean
+    form.keys() -> [key, key, ...]
+    form.values() -> [[val, val, ...], [val, val, ...], ...]
+    form.items() ->  [(key, [val, val, ...]), (key, [val, val, ...]), ...]
+    form.dict == {key: [val, val, ...], ...}
+
+    """
+    def __init__(self, environ=os.environ):
+        self.dict = self.data = parse(environ=environ)
+        self.query_string = environ['QUERY_STRING']
+
+
+class SvFormContentDict(FormContentDict):
+    """Form content as dictionary expecting a single value per field.
+
+    If you only expect a single value for each field, then form[key]
+    will return that single value.  It will raise an IndexError if
+    that expectation is not true.  If you expect a field to have
+    possible multiple values, than you can use form.getlist(key) to
+    get all of the values.  values() and items() are a compromise:
+    they return single strings where there is a single value, and
+    lists of strings otherwise.
+
+    """
+    def __getitem__(self, key):
+        if len(self.dict[key]) > 1:
+            raise IndexError, 'expecting a single value'
+        return self.dict[key][0]
+    def getlist(self, key):
+        return self.dict[key]
+    def values(self):
+        result = []
+        for value in self.dict.values():
+            if len(value) == 1:
+                result.append(value[0])
+            else: result.append(value)
+        return result
+    def items(self):
+        result = []
+        for key, value in self.dict.items():
+            if len(value) == 1:
+                result.append((key, value[0]))
+            else: result.append((key, value))
+        return result
+
+
+class InterpFormContentDict(SvFormContentDict):
+    """This class is present for backwards compatibility only."""
+    def __getitem__(self, key):
+        v = SvFormContentDict.__getitem__(self, key)
+        if v[0] in '0123456789+-.':
+            try: return int(v)
+            except ValueError:
+                try: return float(v)
+                except ValueError: pass
+        return v.strip()
+    def values(self):
+        result = []
+        for key in self.keys():
+            try:
+                result.append(self[key])
+            except IndexError:
+                result.append(self.dict[key])
+        return result
+    def items(self):
+        result = []
+        for key in self.keys():
+            try:
+                result.append((key, self[key]))
+            except IndexError:
+                result.append((key, self.dict[key]))
+        return result
+
+
+class FormContent(FormContentDict):
+    """This class is present for backwards compatibility only."""
+    def values(self, key):
+        if key in self.dict :return self.dict[key]
+        else: return None
+    def indexed_value(self, key, location):
+        if key in self.dict:
+            if len(self.dict[key]) > location:
+                return self.dict[key][location]
+            else: return None
+        else: return None
+    def value(self, key):
+        if key in self.dict: return self.dict[key][0]
+        else: return None
+    def length(self, key):
+        return len(self.dict[key])
+    def stripped(self, key):
+        if key in self.dict: return self.dict[key][0].strip()
+        else: return None
+    def pars(self):
+        return self.dict
+
+
+# Test/debug code
+# ===============
+
+def test(environ=os.environ):
+    """Robust test CGI script, usable as main program.
+
+    Write minimal HTTP headers and dump all information provided to
+    the script in HTML form.
+
+    """
+    print "Content-type: text/html"
+    print
+    sys.stderr = sys.stdout
+    try:
+        form = FieldStorage()   # Replace with other classes to test those
+        print_directory()
+        print_arguments()
+        print_form(form)
+        print_environ(environ)
+        print_environ_usage()
+        def f():
+            exec "testing print_exception() -- <I>italics?</I>"
+        def g(f=f):
+            f()
+        print "<H3>What follows is a test, not an actual exception:</H3>"
+        g()
+    except:
+        print_exception()
+
+    print "<H1>Second try with a small maxlen...</H1>"
+
+    global maxlen
+    maxlen = 50
+    try:
+        form = FieldStorage()   # Replace with other classes to test those
+        print_directory()
+        print_arguments()
+        print_form(form)
+        print_environ(environ)
+    except:
+        print_exception()
+
+def print_exception(type=None, value=None, tb=None, limit=None):
+    if type is None:
+        type, value, tb = sys.exc_info()
+    import traceback
+    print
+    print "<H3>Traceback (most recent call last):</H3>"
+    list = traceback.format_tb(tb, limit) + \
+           traceback.format_exception_only(type, value)
+    print "<PRE>%s<B>%s</B></PRE>" % (
+        escape("".join(list[:-1])),
+        escape(list[-1]),
+        )
+    del tb
+
+def print_environ(environ=os.environ):
+    """Dump the shell environment as HTML."""
+    keys = environ.keys()
+    keys.sort()
+    print
+    print "<H3>Shell Environment:</H3>"
+    print "<DL>"
+    for key in keys:
+        print "<DT>", escape(key), "<DD>", escape(environ[key])
+    print "</DL>"
+    print
+
+def print_form(form):
+    """Dump the contents of a form as HTML."""
+    keys = form.keys()
+    keys.sort()
+    print
+    print "<H3>Form Contents:</H3>"
+    if not keys:
+        print "<P>No form fields."
+    print "<DL>"
+    for key in keys:
+        print "<DT>" + escape(key) + ":",
+        value = form[key]
+        print "<i>" + escape(repr(type(value))) + "</i>"
+        print "<DD>" + escape(repr(value))
+    print "</DL>"
+    print
+
+def print_directory():
+    """Dump the current directory as HTML."""
+    print
+    print "<H3>Current Working Directory:</H3>"
+    try:
+        pwd = os.getcwd()
+    except os.error, msg:
+        print "os.error:", escape(str(msg))
+    else:
+        print escape(pwd)
+    print
+
+def print_arguments():
+    print
+    print "<H3>Command Line Arguments:</H3>"
+    print
+    print sys.argv
+    print
+
+def print_environ_usage():
+    """Dump a list of environment variables used by CGI as HTML."""
+    print """
+<H3>These environment variables could have been set:</H3>
+<UL>
+<LI>AUTH_TYPE
+<LI>CONTENT_LENGTH
+<LI>CONTENT_TYPE
+<LI>DATE_GMT
+<LI>DATE_LOCAL
+<LI>DOCUMENT_NAME
+<LI>DOCUMENT_ROOT
+<LI>DOCUMENT_URI
+<LI>GATEWAY_INTERFACE
+<LI>LAST_MODIFIED
+<LI>PATH
+<LI>PATH_INFO
+<LI>PATH_TRANSLATED
+<LI>QUERY_STRING
+<LI>REMOTE_ADDR
+<LI>REMOTE_HOST
+<LI>REMOTE_IDENT
+<LI>REMOTE_USER
+<LI>REQUEST_METHOD
+<LI>SCRIPT_NAME
+<LI>SERVER_NAME
+<LI>SERVER_PORT
+<LI>SERVER_PROTOCOL
+<LI>SERVER_ROOT
+<LI>SERVER_SOFTWARE
+</UL>
+In addition, HTTP headers sent by the server may be passed in the
+environment as well.  Here are some common variable names:
+<UL>
+<LI>HTTP_ACCEPT
+<LI>HTTP_CONNECTION
+<LI>HTTP_HOST
+<LI>HTTP_PRAGMA
+<LI>HTTP_REFERER
+<LI>HTTP_USER_AGENT
+</UL>
+"""
+
+
+# Utilities
+# =========
+
+def escape(s, quote=None):
+    """Replace special characters '&', '<' and '>' by SGML entities."""
+    s = s.replace("&", "&amp;") # Must be done first!
+    s = s.replace("<", "&lt;")
+    s = s.replace(">", "&gt;")
+    if quote:
+        s = s.replace('"', "&quot;")
+    return s
+
+def valid_boundary(s, _vb_pattern="^[ -~]{0,200}[!-~]$"):
+    import re
+    return re.match(_vb_pattern, s)
+
+# Invoke mainline
+# ===============
+
+# Call test() when this file is run as a script (not imported as a module)
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/cgitb.py b/depot_tools/release/win/python_24/Lib/cgitb.py
new file mode 100644
index 0000000..b8e14e7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/cgitb.py
@@ -0,0 +1,313 @@
+"""More comprehensive traceback formatting for Python scripts.
+
+To enable this module, do:
+
+    import cgitb; cgitb.enable()
+
+at the top of your script.  The optional arguments to enable() are:
+
+    display     - if true, tracebacks are displayed in the web browser
+    logdir      - if set, tracebacks are written to files in this directory
+    context     - number of lines of source code to show for each stack frame
+    format      - 'text' or 'html' controls the output format
+
+By default, tracebacks are displayed but not saved, the context is 5 lines
+and the output format is 'html' (for backwards compatibility with the
+original use of this module)
+
+Alternatively, if you have caught an exception and want cgitb to display it
+for you, call cgitb.handler().  The optional argument to handler() is a
+3-item tuple (etype, evalue, etb) just like the value of sys.exc_info().
+The default handler displays output as HTML.
+"""
+
+__author__ = 'Ka-Ping Yee'
+__version__ = '$Revision: 1.15 $'
+
+import sys
+
+def reset():
+    """Return a string that resets the CGI and browser to a known state."""
+    return '''<!--: spam
+Content-Type: text/html
+
+<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> -->
+<body bgcolor="#f0f0f8"><font color="#f0f0f8" size="-5"> --> -->
+</font> </font> </font> </script> </object> </blockquote> </pre>
+</table> </table> </table> </table> </table> </font> </font> </font>'''
+
+__UNDEF__ = []                          # a special sentinel object
+def small(text):
+    if text:
+        return '<small>' + text + '</small>'
+    else:
+        return ''
+
+def strong(text):
+    if text:
+        return '<strong>' + text + '</strong>'
+    else:
+        return ''
+
+def grey(text):
+    if text:
+        return '<font color="#909090">' + text + '</font>'
+    else:
+        return ''
+
+def lookup(name, frame, locals):
+    """Find the value for a given name in the given environment."""
+    if name in locals:
+        return 'local', locals[name]
+    if name in frame.f_globals:
+        return 'global', frame.f_globals[name]
+    if '__builtins__' in frame.f_globals:
+        builtins = frame.f_globals['__builtins__']
+        if type(builtins) is type({}):
+            if name in builtins:
+                return 'builtin', builtins[name]
+        else:
+            if hasattr(builtins, name):
+                return 'builtin', getattr(builtins, name)
+    return None, __UNDEF__
+
+def scanvars(reader, frame, locals):
+    """Scan one logical line of Python and look up values of variables used."""
+    import tokenize, keyword
+    vars, lasttoken, parent, prefix, value = [], None, None, '', __UNDEF__
+    for ttype, token, start, end, line in tokenize.generate_tokens(reader):
+        if ttype == tokenize.NEWLINE: break
+        if ttype == tokenize.NAME and token not in keyword.kwlist:
+            if lasttoken == '.':
+                if parent is not __UNDEF__:
+                    value = getattr(parent, token, __UNDEF__)
+                    vars.append((prefix + token, prefix, value))
+            else:
+                where, value = lookup(token, frame, locals)
+                vars.append((token, where, value))
+        elif token == '.':
+            prefix += lasttoken + '.'
+            parent = value
+        else:
+            parent, prefix = None, ''
+        lasttoken = token
+    return vars
+
+def html((etype, evalue, etb), context=5):
+    """Return a nice HTML document describing a given traceback."""
+    import os, types, time, traceback, linecache, inspect, pydoc
+
+    if type(etype) is types.ClassType:
+        etype = etype.__name__
+    pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
+    date = time.ctime(time.time())
+    head = '<body bgcolor="#f0f0f8">' + pydoc.html.heading(
+        '<big><big>%s</big></big>' %
+        strong(pydoc.html.escape(str(etype))),
+        '#ffffff', '#6622aa', pyver + '<br>' + date) + '''
+<p>A problem occurred in a Python script.  Here is the sequence of
+function calls leading up to the error, in the order they occurred.</p>'''
+
+    indent = '<tt>' + small('&nbsp;' * 5) + '&nbsp;</tt>'
+    frames = []
+    records = inspect.getinnerframes(etb, context)
+    for frame, file, lnum, func, lines, index in records:
+        file = file and os.path.abspath(file) or '?'
+        link = '<a href="file://%s">%s</a>' % (file, pydoc.html.escape(file))
+        args, varargs, varkw, locals = inspect.getargvalues(frame)
+        call = ''
+        if func != '?':
+            call = 'in ' + strong(func) + \
+                inspect.formatargvalues(args, varargs, varkw, locals,
+                    formatvalue=lambda value: '=' + pydoc.html.repr(value))
+
+        highlight = {}
+        def reader(lnum=[lnum]):
+            highlight[lnum[0]] = 1
+            try: return linecache.getline(file, lnum[0])
+            finally: lnum[0] += 1
+        vars = scanvars(reader, frame, locals)
+
+        rows = ['<tr><td bgcolor="#d8bbff">%s%s %s</td></tr>' %
+                ('<big>&nbsp;</big>', link, call)]
+        if index is not None:
+            i = lnum - index
+            for line in lines:
+                num = small('&nbsp;' * (5-len(str(i))) + str(i)) + '&nbsp;'
+                line = '<tt>%s%s</tt>' % (num, pydoc.html.preformat(line))
+                if i in highlight:
+                    rows.append('<tr><td bgcolor="#ffccee">%s</td></tr>' % line)
+                else:
+                    rows.append('<tr><td>%s</td></tr>' % grey(line))
+                i += 1
+
+        done, dump = {}, []
+        for name, where, value in vars:
+            if name in done: continue
+            done[name] = 1
+            if value is not __UNDEF__:
+                if where in ['global', 'builtin']:
+                    name = ('<em>%s</em> ' % where) + strong(name)
+                elif where == 'local':
+                    name = strong(name)
+                else:
+                    name = where + strong(name.split('.')[-1])
+                dump.append('%s&nbsp;= %s' % (name, pydoc.html.repr(value)))
+            else:
+                dump.append(name + ' <em>undefined</em>')
+
+        rows.append('<tr><td>%s</td></tr>' % small(grey(', '.join(dump))))
+        frames.append('''
+<table width="100%%" cellspacing=0 cellpadding=0 border=0>
+%s</table>''' % '\n'.join(rows))
+
+    exception = ['<p>%s: %s' % (strong(pydoc.html.escape(str(etype))),
+                                pydoc.html.escape(str(evalue)))]
+    if type(evalue) is types.InstanceType:
+        for name in dir(evalue):
+            if name[:1] == '_': continue
+            value = pydoc.html.repr(getattr(evalue, name))
+            exception.append('\n<br>%s%s&nbsp;=\n%s' % (indent, name, value))
+
+    import traceback
+    return head + ''.join(frames) + ''.join(exception) + '''
+
+
+<!-- The above is a description of an error in a Python program, formatted
+     for a Web browser because the 'cgitb' module was enabled.  In case you
+     are not reading this in a Web browser, here is the original traceback:
+
+%s
+-->
+''' % ''.join(traceback.format_exception(etype, evalue, etb))
+
+def text((etype, evalue, etb), context=5):
+    """Return a plain text document describing a given traceback."""
+    import os, types, time, traceback, linecache, inspect, pydoc
+
+    if type(etype) is types.ClassType:
+        etype = etype.__name__
+    pyver = 'Python ' + sys.version.split()[0] + ': ' + sys.executable
+    date = time.ctime(time.time())
+    head = "%s\n%s\n%s\n" % (str(etype), pyver, date) + '''
+A problem occurred in a Python script.  Here is the sequence of
+function calls leading up to the error, in the order they occurred.
+'''
+
+    frames = []
+    records = inspect.getinnerframes(etb, context)
+    for frame, file, lnum, func, lines, index in records:
+        file = file and os.path.abspath(file) or '?'
+        args, varargs, varkw, locals = inspect.getargvalues(frame)
+        call = ''
+        if func != '?':
+            call = 'in ' + func + \
+                inspect.formatargvalues(args, varargs, varkw, locals,
+                    formatvalue=lambda value: '=' + pydoc.text.repr(value))
+
+        highlight = {}
+        def reader(lnum=[lnum]):
+            highlight[lnum[0]] = 1
+            try: return linecache.getline(file, lnum[0])
+            finally: lnum[0] += 1
+        vars = scanvars(reader, frame, locals)
+
+        rows = [' %s %s' % (file, call)]
+        if index is not None:
+            i = lnum - index
+            for line in lines:
+                num = '%5d ' % i
+                rows.append(num+line.rstrip())
+                i += 1
+
+        done, dump = {}, []
+        for name, where, value in vars:
+            if name in done: continue
+            done[name] = 1
+            if value is not __UNDEF__:
+                if where == 'global': name = 'global ' + name
+                elif where != 'local': name = where + name.split('.')[-1]
+                dump.append('%s = %s' % (name, pydoc.text.repr(value)))
+            else:
+                dump.append(name + ' undefined')
+
+        rows.append('\n'.join(dump))
+        frames.append('\n%s\n' % '\n'.join(rows))
+
+    exception = ['%s: %s' % (str(etype), str(evalue))]
+    if type(evalue) is types.InstanceType:
+        for name in dir(evalue):
+            value = pydoc.text.repr(getattr(evalue, name))
+            exception.append('\n%s%s = %s' % (" "*4, name, value))
+
+    import traceback
+    return head + ''.join(frames) + ''.join(exception) + '''
+
+The above is a description of an error in a Python program.  Here is
+the original traceback:
+
+%s
+''' % ''.join(traceback.format_exception(etype, evalue, etb))
+
+class Hook:
+    """A hook to replace sys.excepthook that shows tracebacks in HTML."""
+
+    def __init__(self, display=1, logdir=None, context=5, file=None,
+                 format="html"):
+        self.display = display          # send tracebacks to browser if true
+        self.logdir = logdir            # log tracebacks to files if not None
+        self.context = context          # number of source code lines per frame
+        self.file = file or sys.stdout  # place to send the output
+        self.format = format
+
+    def __call__(self, etype, evalue, etb):
+        self.handle((etype, evalue, etb))
+
+    def handle(self, info=None):
+        info = info or sys.exc_info()
+        if self.format == "html":
+            self.file.write(reset())
+
+        formatter = (self.format=="html") and html or text
+        plain = False
+        try:
+            doc = formatter(info, self.context)
+        except:                         # just in case something goes wrong
+            import traceback
+            doc = ''.join(traceback.format_exception(*info))
+            plain = True
+
+        if self.display:
+            if plain:
+                doc = doc.replace('&', '&amp;').replace('<', '&lt;')
+                self.file.write('<pre>' + doc + '</pre>\n')
+            else:
+                self.file.write(doc + '\n')
+        else:
+            self.file.write('<p>A problem occurred in a Python script.\n')
+
+        if self.logdir is not None:
+            import os, tempfile
+            suffix = ['.txt', '.html'][self.format=="html"]
+            (fd, path) = tempfile.mkstemp(suffix=suffix, dir=self.logdir)
+            try:
+                file = os.fdopen(fd, 'w')
+                file.write(doc)
+                file.close()
+                msg = '<p> %s contains the description of this error.' % path
+            except:
+                msg = '<p> Tried to save traceback to %s, but failed.' % path
+            self.file.write(msg + '\n')
+        try:
+            self.file.flush()
+        except: pass
+
+handler = Hook().handle
+def enable(display=1, logdir=None, context=5, format="html"):
+    """Install an exception handler that formats tracebacks as HTML.
+
+    The optional argument 'display' can be set to 0 to suppress sending the
+    traceback to the browser, and 'logdir' can be set to a directory to cause
+    tracebacks to be written to files there."""
+    sys.excepthook = Hook(display=display, logdir=logdir,
+                          context=context, format=format)
diff --git a/depot_tools/release/win/python_24/Lib/chunk.py b/depot_tools/release/win/python_24/Lib/chunk.py
new file mode 100644
index 0000000..bda965f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/chunk.py
@@ -0,0 +1,167 @@
+"""Simple class to read IFF chunks.
+
+An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File
+Format)) has the following structure:
+
++----------------+
+| ID (4 bytes)   |
++----------------+
+| size (4 bytes) |
++----------------+
+| data           |
+| ...            |
++----------------+
+
+The ID is a 4-byte string which identifies the type of chunk.
+
+The size field (a 32-bit value, encoded using big-endian byte order)
+gives the size of the whole chunk, including the 8-byte header.
+
+Usually an IFF-type file consists of one or more chunks.  The proposed
+usage of the Chunk class defined here is to instantiate an instance at
+the start of each chunk and read from the instance until it reaches
+the end, after which a new instance can be instantiated.  At the end
+of the file, creating a new instance will fail with a EOFError
+exception.
+
+Usage:
+while True:
+    try:
+        chunk = Chunk(file)
+    except EOFError:
+        break
+    chunktype = chunk.getname()
+    while True:
+        data = chunk.read(nbytes)
+        if not data:
+            pass
+        # do something with data
+
+The interface is file-like.  The implemented methods are:
+read, close, seek, tell, isatty.
+Extra methods are: skip() (called by close, skips to the end of the chunk),
+getname() (returns the name (ID) of the chunk)
+
+The __init__ method has one required argument, a file-like object
+(including a chunk instance), and one optional argument, a flag which
+specifies whether or not chunks are aligned on 2-byte boundaries.  The
+default is 1, i.e. aligned.
+"""
+
+class Chunk:
+    def __init__(self, file, align=True, bigendian=True, inclheader=False):
+        import struct
+        self.closed = False
+        self.align = align      # whether to align to word (2-byte) boundaries
+        if bigendian:
+            strflag = '>'
+        else:
+            strflag = '<'
+        self.file = file
+        self.chunkname = file.read(4)
+        if len(self.chunkname) < 4:
+            raise EOFError
+        try:
+            self.chunksize = struct.unpack(strflag+'l', file.read(4))[0]
+        except struct.error:
+            raise EOFError
+        if inclheader:
+            self.chunksize = self.chunksize - 8 # subtract header
+        self.size_read = 0
+        try:
+            self.offset = self.file.tell()
+        except (AttributeError, IOError):
+            self.seekable = False
+        else:
+            self.seekable = True
+
+    def getname(self):
+        """Return the name (ID) of the current chunk."""
+        return self.chunkname
+
+    def getsize(self):
+        """Return the size of the current chunk."""
+        return self.chunksize
+
+    def close(self):
+        if not self.closed:
+            self.skip()
+            self.closed = True
+
+    def isatty(self):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        return False
+
+    def seek(self, pos, whence=0):
+        """Seek to specified position into the chunk.
+        Default position is 0 (start of chunk).
+        If the file is not seekable, this will result in an error.
+        """
+
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        if not self.seekable:
+            raise IOError, "cannot seek"
+        if whence == 1:
+            pos = pos + self.size_read
+        elif whence == 2:
+            pos = pos + self.chunksize
+        if pos < 0 or pos > self.chunksize:
+            raise RuntimeError
+        self.file.seek(self.offset + pos, 0)
+        self.size_read = pos
+
+    def tell(self):
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        return self.size_read
+
+    def read(self, size=-1):
+        """Read at most size bytes from the chunk.
+        If size is omitted or negative, read until the end
+        of the chunk.
+        """
+
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        if self.size_read >= self.chunksize:
+            return ''
+        if size < 0:
+            size = self.chunksize - self.size_read
+        if size > self.chunksize - self.size_read:
+            size = self.chunksize - self.size_read
+        data = self.file.read(size)
+        self.size_read = self.size_read + len(data)
+        if self.size_read == self.chunksize and \
+           self.align and \
+           (self.chunksize & 1):
+            dummy = self.file.read(1)
+            self.size_read = self.size_read + len(dummy)
+        return data
+
+    def skip(self):
+        """Skip the rest of the chunk.
+        If you are not interested in the contents of the chunk,
+        this method should be called so that the file points to
+        the start of the next chunk.
+        """
+
+        if self.closed:
+            raise ValueError, "I/O operation on closed file"
+        if self.seekable:
+            try:
+                n = self.chunksize - self.size_read
+                # maybe fix alignment
+                if self.align and (self.chunksize & 1):
+                    n = n + 1
+                self.file.seek(n, 1)
+                self.size_read = self.size_read + n
+                return
+            except IOError:
+                pass
+        while self.size_read < self.chunksize:
+            n = min(8192, self.chunksize - self.size_read)
+            dummy = self.read(n)
+            if not dummy:
+                raise EOFError
diff --git a/depot_tools/release/win/python_24/Lib/cmd.py b/depot_tools/release/win/python_24/Lib/cmd.py
new file mode 100644
index 0000000..3f82b48
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/cmd.py
@@ -0,0 +1,405 @@
+"""A generic class to build line-oriented command interpreters.
+
+Interpreters constructed with this class obey the following conventions:
+
+1. End of file on input is processed as the command 'EOF'.
+2. A command is parsed out of each line by collecting the prefix composed
+   of characters in the identchars member.
+3. A command `foo' is dispatched to a method 'do_foo()'; the do_ method
+   is passed a single argument consisting of the remainder of the line.
+4. Typing an empty line repeats the last command.  (Actually, it calls the
+   method `emptyline', which may be overridden in a subclass.)
+5. There is a predefined `help' method.  Given an argument `topic', it
+   calls the command `help_topic'.  With no arguments, it lists all topics
+   with defined help_ functions, broken into up to three topics; documented
+   commands, miscellaneous help topics, and undocumented commands.
+6. The command '?' is a synonym for `help'.  The command '!' is a synonym
+   for `shell', if a do_shell method exists.
+7. If completion is enabled, completing commands will be done automatically,
+   and completing of commands args is done by calling complete_foo() with
+   arguments text, line, begidx, endidx.  text is string we are matching
+   against, all returned matches must begin with it.  line is the current
+   input line (lstripped), begidx and endidx are the beginning and end
+   indexes of the text being matched, which could be used to provide
+   different completion depending upon which position the argument is in.
+
+The `default' method may be overridden to intercept commands for which there
+is no do_ method.
+
+The `completedefault' method may be overridden to intercept completions for
+commands that have no complete_ method.
+
+The data member `self.ruler' sets the character used to draw separator lines
+in the help messages.  If empty, no ruler line is drawn.  It defaults to "=".
+
+If the value of `self.intro' is nonempty when the cmdloop method is called,
+it is printed out on interpreter startup.  This value may be overridden
+via an optional argument to the cmdloop() method.
+
+The data members `self.doc_header', `self.misc_header', and
+`self.undoc_header' set the headers used for the help function's
+listings of documented functions, miscellaneous topics, and undocumented
+functions respectively.
+
+These interpreters use raw_input; thus, if the readline module is loaded,
+they automatically support Emacs-like command history and editing features.
+"""
+
+import string
+
+__all__ = ["Cmd"]
+
+PROMPT = '(Cmd) '
+IDENTCHARS = string.ascii_letters + string.digits + '_'
+
+class Cmd:
+    """A simple framework for writing line-oriented command interpreters.
+
+    These are often useful for test harnesses, administrative tools, and
+    prototypes that will later be wrapped in a more sophisticated interface.
+
+    A Cmd instance or subclass instance is a line-oriented interpreter
+    framework.  There is no good reason to instantiate Cmd itself; rather,
+    it's useful as a superclass of an interpreter class you define yourself
+    in order to inherit Cmd's methods and encapsulate action methods.
+
+    """
+    prompt = PROMPT
+    identchars = IDENTCHARS
+    ruler = '='
+    lastcmd = ''
+    intro = None
+    doc_leader = ""
+    doc_header = "Documented commands (type help <topic>):"
+    misc_header = "Miscellaneous help topics:"
+    undoc_header = "Undocumented commands:"
+    nohelp = "*** No help on %s"
+    use_rawinput = 1
+
+    def __init__(self, completekey='tab', stdin=None, stdout=None):
+        """Instantiate a line-oriented interpreter framework.
+
+        The optional argument 'completekey' is the readline name of a
+        completion key; it defaults to the Tab key. If completekey is
+        not None and the readline module is available, command completion
+        is done automatically. The optional arguments stdin and stdout
+        specify alternate input and output file objects; if not specified,
+        sys.stdin and sys.stdout are used.
+
+        """
+        import sys
+        if stdin is not None:
+            self.stdin = stdin
+        else:
+            self.stdin = sys.stdin
+        if stdout is not None:
+            self.stdout = stdout
+        else:
+            self.stdout = sys.stdout
+        self.cmdqueue = []
+        self.completekey = completekey
+
+    def cmdloop(self, intro=None):
+        """Repeatedly issue a prompt, accept input, parse an initial prefix
+        off the received input, and dispatch to action methods, passing them
+        the remainder of the line as argument.
+
+        """
+
+        self.preloop()
+        if self.use_rawinput and self.completekey:
+            try:
+                import readline
+                self.old_completer = readline.get_completer()
+                readline.set_completer(self.complete)
+                readline.parse_and_bind(self.completekey+": complete")
+            except ImportError:
+                pass
+        try:
+            if intro is not None:
+                self.intro = intro
+            if self.intro:
+                self.stdout.write(str(self.intro)+"\n")
+            stop = None
+            while not stop:
+                if self.cmdqueue:
+                    line = self.cmdqueue.pop(0)
+                else:
+                    if self.use_rawinput:
+                        try:
+                            line = raw_input(self.prompt)
+                        except EOFError:
+                            line = 'EOF'
+                    else:
+                        self.stdout.write(self.prompt)
+                        self.stdout.flush()
+                        line = self.stdin.readline()
+                        if not len(line):
+                            line = 'EOF'
+                        else:
+                            line = line[:-1] # chop \n
+                line = self.precmd(line)
+                stop = self.onecmd(line)
+                stop = self.postcmd(stop, line)
+            self.postloop()
+        finally:
+            if self.use_rawinput and self.completekey:
+                try:
+                    import readline
+                    readline.set_completer(self.old_completer)
+                except ImportError:
+                    pass
+
+
+    def precmd(self, line):
+        """Hook method executed just before the command line is
+        interpreted, but after the input prompt is generated and issued.
+
+        """
+        return line
+
+    def postcmd(self, stop, line):
+        """Hook method executed just after a command dispatch is finished."""
+        return stop
+
+    def preloop(self):
+        """Hook method executed once when the cmdloop() method is called."""
+        pass
+
+    def postloop(self):
+        """Hook method executed once when the cmdloop() method is about to
+        return.
+
+        """
+        pass
+
+    def parseline(self, line):
+        """Parse the line into a command name and a string containing
+        the arguments.  Returns a tuple containing (command, args, line).
+        'command' and 'args' may be None if the line couldn't be parsed.
+        """
+        line = line.strip()
+        if not line:
+            return None, None, line
+        elif line[0] == '?':
+            line = 'help ' + line[1:]
+        elif line[0] == '!':
+            if hasattr(self, 'do_shell'):
+                line = 'shell ' + line[1:]
+            else:
+                return None, None, line
+        i, n = 0, len(line)
+        while i < n and line[i] in self.identchars: i = i+1
+        cmd, arg = line[:i], line[i:].strip()
+        return cmd, arg, line
+
+    def onecmd(self, line):
+        """Interpret the argument as though it had been typed in response
+        to the prompt.
+
+        This may be overridden, but should not normally need to be;
+        see the precmd() and postcmd() methods for useful execution hooks.
+        The return value is a flag indicating whether interpretation of
+        commands by the interpreter should stop.
+
+        """
+        cmd, arg, line = self.parseline(line)
+        if not line:
+            return self.emptyline()
+        if cmd is None:
+            return self.default(line)
+        self.lastcmd = line
+        if cmd == '':
+            return self.default(line)
+        else:
+            try:
+                func = getattr(self, 'do_' + cmd)
+            except AttributeError:
+                return self.default(line)
+            return func(arg)
+
+    def emptyline(self):
+        """Called when an empty line is entered in response to the prompt.
+
+        If this method is not overridden, it repeats the last nonempty
+        command entered.
+
+        """
+        if self.lastcmd:
+            return self.onecmd(self.lastcmd)
+
+    def default(self, line):
+        """Called on an input line when the command prefix is not recognized.
+
+        If this method is not overridden, it prints an error message and
+        returns.
+
+        """
+        self.stdout.write('*** Unknown syntax: %s\n'%line)
+
+    def completedefault(self, *ignored):
+        """Method called to complete an input line when no command-specific
+        complete_*() method is available.
+
+        By default, it returns an empty list.
+
+        """
+        return []
+
+    def completenames(self, text, *ignored):
+        dotext = 'do_'+text
+        return [a[3:] for a in self.get_names() if a.startswith(dotext)]
+
+    def complete(self, text, state):
+        """Return the next possible completion for 'text'.
+
+        If a command has not been entered, then complete against command list.
+        Otherwise try to call complete_<command> to get list of completions.
+        """
+        if state == 0:
+            import readline
+            origline = readline.get_line_buffer()
+            line = origline.lstrip()
+            stripped = len(origline) - len(line)
+            begidx = readline.get_begidx() - stripped
+            endidx = readline.get_endidx() - stripped
+            if begidx>0:
+                cmd, args, foo = self.parseline(line)
+                if cmd == '':
+                    compfunc = self.completedefault
+                else:
+                    try:
+                        compfunc = getattr(self, 'complete_' + cmd)
+                    except AttributeError:
+                        compfunc = self.completedefault
+            else:
+                compfunc = self.completenames
+            self.completion_matches = compfunc(text, line, begidx, endidx)
+        try:
+            return self.completion_matches[state]
+        except IndexError:
+            return None
+
+    def get_names(self):
+        # Inheritance says we have to look in class and
+        # base classes; order is not important.
+        names = []
+        classes = [self.__class__]
+        while classes:
+            aclass = classes.pop(0)
+            if aclass.__bases__:
+                classes = classes + list(aclass.__bases__)
+            names = names + dir(aclass)
+        return names
+
+    def complete_help(self, *args):
+        return self.completenames(*args)
+
+    def do_help(self, arg):
+        if arg:
+            # XXX check arg syntax
+            try:
+                func = getattr(self, 'help_' + arg)
+            except AttributeError:
+                try:
+                    doc=getattr(self, 'do_' + arg).__doc__
+                    if doc:
+                        self.stdout.write("%s\n"%str(doc))
+                        return
+                except AttributeError:
+                    pass
+                self.stdout.write("%s\n"%str(self.nohelp % (arg,)))
+                return
+            func()
+        else:
+            names = self.get_names()
+            cmds_doc = []
+            cmds_undoc = []
+            help = {}
+            for name in names:
+                if name[:5] == 'help_':
+                    help[name[5:]]=1
+            names.sort()
+            # There can be duplicates if routines overridden
+            prevname = ''
+            for name in names:
+                if name[:3] == 'do_':
+                    if name == prevname:
+                        continue
+                    prevname = name
+                    cmd=name[3:]
+                    if cmd in help:
+                        cmds_doc.append(cmd)
+                        del help[cmd]
+                    elif getattr(self, name).__doc__:
+                        cmds_doc.append(cmd)
+                    else:
+                        cmds_undoc.append(cmd)
+            self.stdout.write("%s\n"%str(self.doc_leader))
+            self.print_topics(self.doc_header,   cmds_doc,   15,80)
+            self.print_topics(self.misc_header,  help.keys(),15,80)
+            self.print_topics(self.undoc_header, cmds_undoc, 15,80)
+
+    def print_topics(self, header, cmds, cmdlen, maxcol):
+        if cmds:
+            self.stdout.write("%s\n"%str(header))
+            if self.ruler:
+                self.stdout.write("%s\n"%str(self.ruler * len(header)))
+            self.columnize(cmds, maxcol-1)
+            self.stdout.write("\n")
+
+    def columnize(self, list, displaywidth=80):
+        """Display a list of strings as a compact set of columns.
+
+        Each column is only as wide as necessary.
+        Columns are separated by two spaces (one was not legible enough).
+        """
+        if not list:
+            self.stdout.write("<empty>\n")
+            return
+        nonstrings = [i for i in range(len(list))
+                        if not isinstance(list[i], str)]
+        if nonstrings:
+            raise TypeError, ("list[i] not a string for i in %s" %
+                              ", ".join(map(str, nonstrings)))
+        size = len(list)
+        if size == 1:
+            self.stdout.write('%s\n'%str(list[0]))
+            return
+        # Try every row count from 1 upwards
+        for nrows in range(1, len(list)):
+            ncols = (size+nrows-1) // nrows
+            colwidths = []
+            totwidth = -2
+            for col in range(ncols):
+                colwidth = 0
+                for row in range(nrows):
+                    i = row + nrows*col
+                    if i >= size:
+                        break
+                    x = list[i]
+                    colwidth = max(colwidth, len(x))
+                colwidths.append(colwidth)
+                totwidth += colwidth + 2
+                if totwidth > displaywidth:
+                    break
+            if totwidth <= displaywidth:
+                break
+        else:
+            nrows = len(list)
+            ncols = 1
+            colwidths = [0]
+        for row in range(nrows):
+            texts = []
+            for col in range(ncols):
+                i = row + nrows*col
+                if i >= size:
+                    x = ""
+                else:
+                    x = list[i]
+                texts.append(x)
+            while texts and not texts[-1]:
+                del texts[-1]
+            for col in range(len(texts)):
+                texts[col] = texts[col].ljust(colwidths[col])
+            self.stdout.write("%s\n"%str("  ".join(texts)))
diff --git a/depot_tools/release/win/python_24/Lib/code.py b/depot_tools/release/win/python_24/Lib/code.py
new file mode 100644
index 0000000..6bdc658
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/code.py
@@ -0,0 +1,307 @@
+"""Utilities needed to emulate Python's interactive interpreter.
+
+"""
+
+# Inspired by similar code by Jeff Epler and Fredrik Lundh.
+
+
+import sys
+import traceback
+from codeop import CommandCompiler, compile_command
+
+__all__ = ["InteractiveInterpreter", "InteractiveConsole", "interact",
+           "compile_command"]
+
+def softspace(file, newvalue):
+    oldvalue = 0
+    try:
+        oldvalue = file.softspace
+    except AttributeError:
+        pass
+    try:
+        file.softspace = newvalue
+    except (AttributeError, TypeError):
+        # "attribute-less object" or "read-only attributes"
+        pass
+    return oldvalue
+
+class InteractiveInterpreter:
+    """Base class for InteractiveConsole.
+
+    This class deals with parsing and interpreter state (the user's
+    namespace); it doesn't deal with input buffering or prompting or
+    input file naming (the filename is always passed in explicitly).
+
+    """
+
+    def __init__(self, locals=None):
+        """Constructor.
+
+        The optional 'locals' argument specifies the dictionary in
+        which code will be executed; it defaults to a newly created
+        dictionary with key "__name__" set to "__console__" and key
+        "__doc__" set to None.
+
+        """
+        if locals is None:
+            locals = {"__name__": "__console__", "__doc__": None}
+        self.locals = locals
+        self.compile = CommandCompiler()
+
+    def runsource(self, source, filename="<input>", symbol="single"):
+        """Compile and run some source in the interpreter.
+
+        Arguments are as for compile_command().
+
+        One several things can happen:
+
+        1) The input is incorrect; compile_command() raised an
+        exception (SyntaxError or OverflowError).  A syntax traceback
+        will be printed by calling the showsyntaxerror() method.
+
+        2) The input is incomplete, and more input is required;
+        compile_command() returned None.  Nothing happens.
+
+        3) The input is complete; compile_command() returned a code
+        object.  The code is executed by calling self.runcode() (which
+        also handles run-time exceptions, except for SystemExit).
+
+        The return value is True in case 2, False in the other cases (unless
+        an exception is raised).  The return value can be used to
+        decide whether to use sys.ps1 or sys.ps2 to prompt the next
+        line.
+
+        """
+        try:
+            code = self.compile(source, filename, symbol)
+        except (OverflowError, SyntaxError, ValueError):
+            # Case 1
+            self.showsyntaxerror(filename)
+            return False
+
+        if code is None:
+            # Case 2
+            return True
+
+        # Case 3
+        self.runcode(code)
+        return False
+
+    def runcode(self, code):
+        """Execute a code object.
+
+        When an exception occurs, self.showtraceback() is called to
+        display a traceback.  All exceptions are caught except
+        SystemExit, which is reraised.
+
+        A note about KeyboardInterrupt: this exception may occur
+        elsewhere in this code, and may not always be caught.  The
+        caller should be prepared to deal with it.
+
+        """
+        try:
+            exec code in self.locals
+        except SystemExit:
+            raise
+        except:
+            self.showtraceback()
+        else:
+            if softspace(sys.stdout, 0):
+                print
+
+    def showsyntaxerror(self, filename=None):
+        """Display the syntax error that just occurred.
+
+        This doesn't display a stack trace because there isn't one.
+
+        If a filename is given, it is stuffed in the exception instead
+        of what was there before (because Python's parser always uses
+        "<string>" when reading from a string).
+
+        The output is written by self.write(), below.
+
+        """
+        type, value, sys.last_traceback = sys.exc_info()
+        sys.last_type = type
+        sys.last_value = value
+        if filename and type is SyntaxError:
+            # Work hard to stuff the correct filename in the exception
+            try:
+                msg, (dummy_filename, lineno, offset, line) = value
+            except:
+                # Not the format we expect; leave it alone
+                pass
+            else:
+                # Stuff in the right filename
+                value = SyntaxError(msg, (filename, lineno, offset, line))
+                sys.last_value = value
+        list = traceback.format_exception_only(type, value)
+        map(self.write, list)
+
+    def showtraceback(self):
+        """Display the exception that just occurred.
+
+        We remove the first stack item because it is our own code.
+
+        The output is written by self.write(), below.
+
+        """
+        try:
+            type, value, tb = sys.exc_info()
+            sys.last_type = type
+            sys.last_value = value
+            sys.last_traceback = tb
+            tblist = traceback.extract_tb(tb)
+            del tblist[:1]
+            list = traceback.format_list(tblist)
+            if list:
+                list.insert(0, "Traceback (most recent call last):\n")
+            list[len(list):] = traceback.format_exception_only(type, value)
+        finally:
+            tblist = tb = None
+        map(self.write, list)
+
+    def write(self, data):
+        """Write a string.
+
+        The base implementation writes to sys.stderr; a subclass may
+        replace this with a different implementation.
+
+        """
+        sys.stderr.write(data)
+
+
+class InteractiveConsole(InteractiveInterpreter):
+    """Closely emulate the behavior of the interactive Python interpreter.
+
+    This class builds on InteractiveInterpreter and adds prompting
+    using the familiar sys.ps1 and sys.ps2, and input buffering.
+
+    """
+
+    def __init__(self, locals=None, filename="<console>"):
+        """Constructor.
+
+        The optional locals argument will be passed to the
+        InteractiveInterpreter base class.
+
+        The optional filename argument should specify the (file)name
+        of the input stream; it will show up in tracebacks.
+
+        """
+        InteractiveInterpreter.__init__(self, locals)
+        self.filename = filename
+        self.resetbuffer()
+
+    def resetbuffer(self):
+        """Reset the input buffer."""
+        self.buffer = []
+
+    def interact(self, banner=None):
+        """Closely emulate the interactive Python console.
+
+        The optional banner argument specify the banner to print
+        before the first interaction; by default it prints a banner
+        similar to the one printed by the real Python interpreter,
+        followed by the current class name in parentheses (so as not
+        to confuse this with the real interpreter -- since it's so
+        close!).
+
+        """
+        try:
+            sys.ps1
+        except AttributeError:
+            sys.ps1 = ">>> "
+        try:
+            sys.ps2
+        except AttributeError:
+            sys.ps2 = "... "
+        cprt = 'Type "help", "copyright", "credits" or "license" for more information.'
+        if banner is None:
+            self.write("Python %s on %s\n%s\n(%s)\n" %
+                       (sys.version, sys.platform, cprt,
+                        self.__class__.__name__))
+        else:
+            self.write("%s\n" % str(banner))
+        more = 0
+        while 1:
+            try:
+                if more:
+                    prompt = sys.ps2
+                else:
+                    prompt = sys.ps1
+                try:
+                    line = self.raw_input(prompt)
+                except EOFError:
+                    self.write("\n")
+                    break
+                else:
+                    more = self.push(line)
+            except KeyboardInterrupt:
+                self.write("\nKeyboardInterrupt\n")
+                self.resetbuffer()
+                more = 0
+
+    def push(self, line):
+        """Push a line to the interpreter.
+
+        The line should not have a trailing newline; it may have
+        internal newlines.  The line is appended to a buffer and the
+        interpreter's runsource() method is called with the
+        concatenated contents of the buffer as source.  If this
+        indicates that the command was executed or invalid, the buffer
+        is reset; otherwise, the command is incomplete, and the buffer
+        is left as it was after the line was appended.  The return
+        value is 1 if more input is required, 0 if the line was dealt
+        with in some way (this is the same as runsource()).
+
+        """
+        self.buffer.append(line)
+        source = "\n".join(self.buffer)
+        more = self.runsource(source, self.filename)
+        if not more:
+            self.resetbuffer()
+        return more
+
+    def raw_input(self, prompt=""):
+        """Write a prompt and read a line.
+
+        The returned line does not include the trailing newline.
+        When the user enters the EOF key sequence, EOFError is raised.
+
+        The base implementation uses the built-in function
+        raw_input(); a subclass may replace this with a different
+        implementation.
+
+        """
+        return raw_input(prompt)
+
+
+def interact(banner=None, readfunc=None, local=None):
+    """Closely emulate the interactive Python interpreter.
+
+    This is a backwards compatible interface to the InteractiveConsole
+    class.  When readfunc is not specified, it attempts to import the
+    readline module to enable GNU readline if it is available.
+
+    Arguments (all optional, all default to None):
+
+    banner -- passed to InteractiveConsole.interact()
+    readfunc -- if not None, replaces InteractiveConsole.raw_input()
+    local -- passed to InteractiveInterpreter.__init__()
+
+    """
+    console = InteractiveConsole(local)
+    if readfunc is not None:
+        console.raw_input = readfunc
+    else:
+        try:
+            import readline
+        except ImportError:
+            pass
+    console.interact(banner)
+
+
+if __name__ == '__main__':
+    import pdb
+    pdb.run("interact()\n")
diff --git a/depot_tools/release/win/python_24/Lib/codecs.py b/depot_tools/release/win/python_24/Lib/codecs.py
new file mode 100644
index 0000000..022b51b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/codecs.py
@@ -0,0 +1,761 @@
+""" codecs -- Python Codec Registry, API and helpers.
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""#"
+
+import __builtin__, sys
+
+### Registry and builtin stateless codec functions
+
+try:
+    from _codecs import *
+except ImportError, why:
+    raise SystemError,\
+          'Failed to load the builtin codecs: %s' % why
+
+__all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE",
+           "BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE",
+           "BOM_UTF8", "BOM_UTF16", "BOM_UTF16_LE", "BOM_UTF16_BE",
+           "BOM_UTF32", "BOM_UTF32_LE", "BOM_UTF32_BE",
+           "strict_errors", "ignore_errors", "replace_errors",
+           "xmlcharrefreplace_errors",
+           "register_error", "lookup_error"]
+
+### Constants
+
+#
+# Byte Order Mark (BOM = ZERO WIDTH NO-BREAK SPACE = U+FEFF)
+# and its possible byte string values
+# for UTF8/UTF16/UTF32 output and little/big endian machines
+#
+
+# UTF-8
+BOM_UTF8 = '\xef\xbb\xbf'
+
+# UTF-16, little endian
+BOM_LE = BOM_UTF16_LE = '\xff\xfe'
+
+# UTF-16, big endian
+BOM_BE = BOM_UTF16_BE = '\xfe\xff'
+
+# UTF-32, little endian
+BOM_UTF32_LE = '\xff\xfe\x00\x00'
+
+# UTF-32, big endian
+BOM_UTF32_BE = '\x00\x00\xfe\xff'
+
+if sys.byteorder == 'little':
+
+    # UTF-16, native endianness
+    BOM = BOM_UTF16 = BOM_UTF16_LE
+
+    # UTF-32, native endianness
+    BOM_UTF32 = BOM_UTF32_LE
+
+else:
+
+    # UTF-16, native endianness
+    BOM = BOM_UTF16 = BOM_UTF16_BE
+
+    # UTF-32, native endianness
+    BOM_UTF32 = BOM_UTF32_BE
+
+# Old broken names (don't use in new code)
+BOM32_LE = BOM_UTF16_LE
+BOM32_BE = BOM_UTF16_BE
+BOM64_LE = BOM_UTF32_LE
+BOM64_BE = BOM_UTF32_BE
+
+
+### Codec base classes (defining the API)
+
+class Codec:
+
+    """ Defines the interface for stateless encoders/decoders.
+
+        The .encode()/.decode() methods may use different error
+        handling schemes by providing the errors argument. These
+        string values are predefined:
+
+         'strict' - raise a ValueError error (or a subclass)
+         'ignore' - ignore the character and continue with the next
+         'replace' - replace with a suitable replacement character;
+                    Python will use the official U+FFFD REPLACEMENT
+                    CHARACTER for the builtin Unicode codecs on
+                    decoding and '?' on encoding.
+         'xmlcharrefreplace' - Replace with the appropriate XML
+                               character reference (only for encoding).
+         'backslashreplace'  - Replace with backslashed escape sequences
+                               (only for encoding).
+
+        The set of allowed values can be extended via register_error.
+
+    """
+    def encode(self, input, errors='strict'):
+
+        """ Encodes the object input and returns a tuple (output
+            object, length consumed).
+
+            errors defines the error handling to apply. It defaults to
+            'strict' handling.
+
+            The method may not store state in the Codec instance. Use
+            StreamCodec for codecs which have to keep state in order to
+            make encoding/decoding efficient.
+
+            The encoder must be able to handle zero length input and
+            return an empty object of the output object type in this
+            situation.
+
+        """
+        raise NotImplementedError
+
+    def decode(self, input, errors='strict'):
+
+        """ Decodes the object input and returns a tuple (output
+            object, length consumed).
+
+            input must be an object which provides the bf_getreadbuf
+            buffer slot. Python strings, buffer objects and memory
+            mapped files are examples of objects providing this slot.
+
+            errors defines the error handling to apply. It defaults to
+            'strict' handling.
+
+            The method may not store state in the Codec instance. Use
+            StreamCodec for codecs which have to keep state in order to
+            make encoding/decoding efficient.
+
+            The decoder must be able to handle zero length input and
+            return an empty object of the output object type in this
+            situation.
+
+        """
+        raise NotImplementedError
+
+#
+# The StreamWriter and StreamReader class provide generic working
+# interfaces which can be used to implement new encoding submodules
+# very easily. See encodings/utf_8.py for an example on how this is
+# done.
+#
+
+class StreamWriter(Codec):
+
+    def __init__(self, stream, errors='strict'):
+
+        """ Creates a StreamWriter instance.
+
+            stream must be a file-like object open for writing
+            (binary) data.
+
+            The StreamWriter may use different error handling
+            schemes by providing the errors keyword argument. These
+            parameters are predefined:
+
+             'strict' - raise a ValueError (or a subclass)
+             'ignore' - ignore the character and continue with the next
+             'replace'- replace with a suitable replacement character
+             'xmlcharrefreplace' - Replace with the appropriate XML
+                                   character reference.
+             'backslashreplace'  - Replace with backslashed escape
+                                   sequences (only for encoding).
+
+            The set of allowed parameter values can be extended via
+            register_error.
+        """
+        self.stream = stream
+        self.errors = errors
+
+    def write(self, object):
+
+        """ Writes the object's contents encoded to self.stream.
+        """
+        data, consumed = self.encode(object, self.errors)
+        self.stream.write(data)
+
+    def writelines(self, list):
+
+        """ Writes the concatenated list of strings to the stream
+            using .write().
+        """
+        self.write(''.join(list))
+
+    def reset(self):
+
+        """ Flushes and resets the codec buffers used for keeping state.
+
+            Calling this method should ensure that the data on the
+            output is put into a clean state, that allows appending
+            of new fresh data without having to rescan the whole
+            stream to recover state.
+
+        """
+        pass
+
+    def __getattr__(self, name,
+                    getattr=getattr):
+
+        """ Inherit all other methods from the underlying stream.
+        """
+        return getattr(self.stream, name)
+
+###
+
+class StreamReader(Codec):
+
+    def __init__(self, stream, errors='strict'):
+
+        """ Creates a StreamReader instance.
+
+            stream must be a file-like object open for reading
+            (binary) data.
+
+            The StreamReader may use different error handling
+            schemes by providing the errors keyword argument. These
+            parameters are predefined:
+
+             'strict' - raise a ValueError (or a subclass)
+             'ignore' - ignore the character and continue with the next
+             'replace'- replace with a suitable replacement character;
+
+            The set of allowed parameter values can be extended via
+            register_error.
+        """
+        self.stream = stream
+        self.errors = errors
+        self.bytebuffer = ""
+        self.charbuffer = u""
+        self.atcr = False
+
+    def decode(self, input, errors='strict'):
+        raise NotImplementedError
+
+    def read(self, size=-1, chars=-1):
+
+        """ Decodes data from the stream self.stream and returns the
+            resulting object.
+
+            chars indicates the number of characters to read from the
+            stream. read() will never return more than chars
+            characters, but it might return less, if there are not enough
+            characters available.
+
+            size indicates the approximate maximum number of bytes to
+            read from the stream for decoding purposes. The decoder
+            can modify this setting as appropriate. The default value
+            -1 indicates to read and decode as much as possible.  size
+            is intended to prevent having to decode huge files in one
+            step.
+
+            The method should use a greedy read strategy meaning that
+            it should read as much data as is allowed within the
+            definition of the encoding and the given size, e.g.  if
+            optional encoding endings or state markers are available
+            on the stream, these should be read too.
+        """
+        # read until we get the required number of characters (if available)
+        while True:
+            # can the request can be satisfied from the character buffer?
+            if chars < 0:
+                if self.charbuffer:
+                    break
+            else:
+                if len(self.charbuffer) >= chars:
+                    break
+            # we need more data
+            if size < 0:
+                newdata = self.stream.read()
+            else:
+                newdata = self.stream.read(size)
+            # decode bytes (those remaining from the last call included)
+            data = self.bytebuffer + newdata
+            newchars, decodedbytes = self.decode(data, self.errors)
+            # keep undecoded bytes until the next call
+            self.bytebuffer = data[decodedbytes:]
+            # put new characters in the character buffer
+            self.charbuffer += newchars
+            # there was no data available
+            if not newdata:
+                break
+        if chars < 0:
+            # Return everything we've got
+            result = self.charbuffer
+            self.charbuffer = u""
+        else:
+            # Return the first chars characters
+            result = self.charbuffer[:chars]
+            self.charbuffer = self.charbuffer[chars:]
+        return result
+
+    def readline(self, size=None, keepends=True):
+
+        """ Read one line from the input stream and return the
+            decoded data.
+
+            size, if given, is passed as size argument to the
+            read() method.
+
+        """
+        readsize = size or 72
+        line = u""
+        # If size is given, we call read() only once
+        while True:
+            data = self.read(readsize)
+            if self.atcr and data.startswith(u"\n"):
+                data = data[1:]
+            if data:
+                self.atcr = data.endswith(u"\r")
+            line += data
+            lines = line.splitlines(True)
+            if lines:
+                line0withend = lines[0]
+                line0withoutend = lines[0].splitlines(False)[0]
+                if line0withend != line0withoutend: # We really have a line end
+                    # Put the rest back together and keep it until the next call
+                    self.charbuffer = u"".join(lines[1:]) + self.charbuffer
+                    if keepends:
+                        line = line0withend
+                    else:
+                        line = line0withoutend
+                    break
+            # we didn't get anything or this was our only try
+            if not data or size is not None:
+                if line and not keepends:
+                    line = line.splitlines(False)[0]
+                break
+            if readsize<8000:
+                readsize *= 2
+        return line
+
+    def readlines(self, sizehint=None, keepends=True):
+
+        """ Read all lines available on the input stream
+            and return them as list of lines.
+
+            Line breaks are implemented using the codec's decoder
+            method and are included in the list entries.
+
+            sizehint, if given, is ignored since there is no efficient
+            way to finding the true end-of-line.
+
+        """
+        data = self.read()
+        return data.splitlines(keepends)
+
+    def reset(self):
+
+        """ Resets the codec buffers used for keeping state.
+
+            Note that no stream repositioning should take place.
+            This method is primarily intended to be able to recover
+            from decoding errors.
+
+        """
+        self.bytebuffer = ""
+        self.charbuffer = u""
+        self.atcr = False
+
+    def seek(self, offset, whence=0):
+        """ Set the input stream's current position.
+
+            Resets the codec buffers used for keeping state.
+        """
+        self.reset()
+        self.stream.seek(offset, whence)
+
+    def next(self):
+
+        """ Return the next decoded line from the input stream."""
+        line = self.readline()
+        if line:
+            return line
+        raise StopIteration
+
+    def __iter__(self):
+        return self
+
+    def __getattr__(self, name,
+                    getattr=getattr):
+
+        """ Inherit all other methods from the underlying stream.
+        """
+        return getattr(self.stream, name)
+
+###
+
+class StreamReaderWriter:
+
+    """ StreamReaderWriter instances allow wrapping streams which
+        work in both read and write modes.
+
+        The design is such that one can use the factory functions
+        returned by the codec.lookup() function to construct the
+        instance.
+
+    """
+    # Optional attributes set by the file wrappers below
+    encoding = 'unknown'
+
+    def __init__(self, stream, Reader, Writer, errors='strict'):
+
+        """ Creates a StreamReaderWriter instance.
+
+            stream must be a Stream-like object.
+
+            Reader, Writer must be factory functions or classes
+            providing the StreamReader, StreamWriter interface resp.
+
+            Error handling is done in the same way as defined for the
+            StreamWriter/Readers.
+
+        """
+        self.stream = stream
+        self.reader = Reader(stream, errors)
+        self.writer = Writer(stream, errors)
+        self.errors = errors
+
+    def read(self, size=-1):
+
+        return self.reader.read(size)
+
+    def readline(self, size=None):
+
+        return self.reader.readline(size)
+
+    def readlines(self, sizehint=None):
+
+        return self.reader.readlines(sizehint)
+
+    def next(self):
+
+        """ Return the next decoded line from the input stream."""
+        return self.reader.next()
+
+    def __iter__(self):
+        return self
+
+    def write(self, data):
+
+        return self.writer.write(data)
+
+    def writelines(self, list):
+
+        return self.writer.writelines(list)
+
+    def reset(self):
+
+        self.reader.reset()
+        self.writer.reset()
+
+    def __getattr__(self, name,
+                    getattr=getattr):
+
+        """ Inherit all other methods from the underlying stream.
+        """
+        return getattr(self.stream, name)
+
+###
+
+class StreamRecoder:
+
+    """ StreamRecoder instances provide a frontend - backend
+        view of encoding data.
+
+        They use the complete set of APIs returned by the
+        codecs.lookup() function to implement their task.
+
+        Data written to the stream is first decoded into an
+        intermediate format (which is dependent on the given codec
+        combination) and then written to the stream using an instance
+        of the provided Writer class.
+
+        In the other direction, data is read from the stream using a
+        Reader instance and then return encoded data to the caller.
+
+    """
+    # Optional attributes set by the file wrappers below
+    data_encoding = 'unknown'
+    file_encoding = 'unknown'
+
+    def __init__(self, stream, encode, decode, Reader, Writer,
+                 errors='strict'):
+
+        """ Creates a StreamRecoder instance which implements a two-way
+            conversion: encode and decode work on the frontend (the
+            input to .read() and output of .write()) while
+            Reader and Writer work on the backend (reading and
+            writing to the stream).
+
+            You can use these objects to do transparent direct
+            recodings from e.g. latin-1 to utf-8 and back.
+
+            stream must be a file-like object.
+
+            encode, decode must adhere to the Codec interface, Reader,
+            Writer must be factory functions or classes providing the
+            StreamReader, StreamWriter interface resp.
+
+            encode and decode are needed for the frontend translation,
+            Reader and Writer for the backend translation. Unicode is
+            used as intermediate encoding.
+
+            Error handling is done in the same way as defined for the
+            StreamWriter/Readers.
+
+        """
+        self.stream = stream
+        self.encode = encode
+        self.decode = decode
+        self.reader = Reader(stream, errors)
+        self.writer = Writer(stream, errors)
+        self.errors = errors
+
+    def read(self, size=-1):
+
+        data = self.reader.read(size)
+        data, bytesencoded = self.encode(data, self.errors)
+        return data
+
+    def readline(self, size=None):
+
+        if size is None:
+            data = self.reader.readline()
+        else:
+            data = self.reader.readline(size)
+        data, bytesencoded = self.encode(data, self.errors)
+        return data
+
+    def readlines(self, sizehint=None):
+
+        data = self.reader.read()
+        data, bytesencoded = self.encode(data, self.errors)
+        return data.splitlines(1)
+
+    def next(self):
+
+        """ Return the next decoded line from the input stream."""
+        return self.reader.next()
+
+    def __iter__(self):
+        return self
+
+    def write(self, data):
+
+        data, bytesdecoded = self.decode(data, self.errors)
+        return self.writer.write(data)
+
+    def writelines(self, list):
+
+        data = ''.join(list)
+        data, bytesdecoded = self.decode(data, self.errors)
+        return self.writer.write(data)
+
+    def reset(self):
+
+        self.reader.reset()
+        self.writer.reset()
+
+    def __getattr__(self, name,
+                    getattr=getattr):
+
+        """ Inherit all other methods from the underlying stream.
+        """
+        return getattr(self.stream, name)
+
+### Shortcuts
+
+def open(filename, mode='rb', encoding=None, errors='strict', buffering=1):
+
+    """ Open an encoded file using the given mode and return
+        a wrapped version providing transparent encoding/decoding.
+
+        Note: The wrapped version will only accept the object format
+        defined by the codecs, i.e. Unicode objects for most builtin
+        codecs. Output is also codec dependent and will usually by
+        Unicode as well.
+
+        Files are always opened in binary mode, even if no binary mode
+        was specified. This is done to avoid data loss due to encodings
+        using 8-bit values. The default file mode is 'rb' meaning to
+        open the file in binary read mode.
+
+        encoding specifies the encoding which is to be used for the
+        file.
+
+        errors may be given to define the error handling. It defaults
+        to 'strict' which causes ValueErrors to be raised in case an
+        encoding error occurs.
+
+        buffering has the same meaning as for the builtin open() API.
+        It defaults to line buffered.
+
+        The returned wrapped file object provides an extra attribute
+        .encoding which allows querying the used encoding. This
+        attribute is only available if an encoding was specified as
+        parameter.
+
+    """
+    if encoding is not None and \
+       'b' not in mode:
+        # Force opening of the file in binary mode
+        mode = mode + 'b'
+    file = __builtin__.open(filename, mode, buffering)
+    if encoding is None:
+        return file
+    (e, d, sr, sw) = lookup(encoding)
+    srw = StreamReaderWriter(file, sr, sw, errors)
+    # Add attributes to simplify introspection
+    srw.encoding = encoding
+    return srw
+
+def EncodedFile(file, data_encoding, file_encoding=None, errors='strict'):
+
+    """ Return a wrapped version of file which provides transparent
+        encoding translation.
+
+        Strings written to the wrapped file are interpreted according
+        to the given data_encoding and then written to the original
+        file as string using file_encoding. The intermediate encoding
+        will usually be Unicode but depends on the specified codecs.
+
+        Strings are read from the file using file_encoding and then
+        passed back to the caller as string using data_encoding.
+
+        If file_encoding is not given, it defaults to data_encoding.
+
+        errors may be given to define the error handling. It defaults
+        to 'strict' which causes ValueErrors to be raised in case an
+        encoding error occurs.
+
+        The returned wrapped file object provides two extra attributes
+        .data_encoding and .file_encoding which reflect the given
+        parameters of the same name. The attributes can be used for
+        introspection by Python programs.
+
+    """
+    if file_encoding is None:
+        file_encoding = data_encoding
+    encode, decode = lookup(data_encoding)[:2]
+    Reader, Writer = lookup(file_encoding)[2:]
+    sr = StreamRecoder(file,
+                       encode, decode, Reader, Writer,
+                       errors)
+    # Add attributes to simplify introspection
+    sr.data_encoding = data_encoding
+    sr.file_encoding = file_encoding
+    return sr
+
+### Helpers for codec lookup
+
+def getencoder(encoding):
+
+    """ Lookup up the codec for the given encoding and return
+        its encoder function.
+
+        Raises a LookupError in case the encoding cannot be found.
+
+    """
+    return lookup(encoding)[0]
+
+def getdecoder(encoding):
+
+    """ Lookup up the codec for the given encoding and return
+        its decoder function.
+
+        Raises a LookupError in case the encoding cannot be found.
+
+    """
+    return lookup(encoding)[1]
+
+def getreader(encoding):
+
+    """ Lookup up the codec for the given encoding and return
+        its StreamReader class or factory function.
+
+        Raises a LookupError in case the encoding cannot be found.
+
+    """
+    return lookup(encoding)[2]
+
+def getwriter(encoding):
+
+    """ Lookup up the codec for the given encoding and return
+        its StreamWriter class or factory function.
+
+        Raises a LookupError in case the encoding cannot be found.
+
+    """
+    return lookup(encoding)[3]
+
+### Helpers for charmap-based codecs
+
+def make_identity_dict(rng):
+
+    """ make_identity_dict(rng) -> dict
+
+        Return a dictionary where elements of the rng sequence are
+        mapped to themselves.
+
+    """
+    res = {}
+    for i in rng:
+        res[i]=i
+    return res
+
+def make_encoding_map(decoding_map):
+
+    """ Creates an encoding map from a decoding map.
+
+        If a target mapping in the decoding map occurs multiple
+        times, then that target is mapped to None (undefined mapping),
+        causing an exception when encountered by the charmap codec
+        during translation.
+
+        One example where this happens is cp875.py which decodes
+        multiple character to \u001a.
+
+    """
+    m = {}
+    for k,v in decoding_map.items():
+        if not v in m:
+            m[v] = k
+        else:
+            m[v] = None
+    return m
+
+### error handlers
+
+try:
+    strict_errors = lookup_error("strict")
+    ignore_errors = lookup_error("ignore")
+    replace_errors = lookup_error("replace")
+    xmlcharrefreplace_errors = lookup_error("xmlcharrefreplace")
+    backslashreplace_errors = lookup_error("backslashreplace")
+except LookupError:
+    # In --disable-unicode builds, these error handler are missing
+    strict_errors = None
+    ignore_errors = None
+    replace_errors = None
+    xmlcharrefreplace_errors = None
+    backslashreplace_errors = None
+
+# Tell modulefinder that using codecs probably needs the encodings
+# package
+_false = 0
+if _false:
+    import encodings
+
+### Tests
+
+if __name__ == '__main__':
+
+    # Make stdout translate Latin-1 output into UTF-8 output
+    sys.stdout = EncodedFile(sys.stdout, 'latin-1', 'utf-8')
+
+    # Have stdin translate Latin-1 input into UTF-8 input
+    sys.stdin = EncodedFile(sys.stdin, 'utf-8', 'latin-1')
diff --git a/depot_tools/release/win/python_24/Lib/codeop.py b/depot_tools/release/win/python_24/Lib/codeop.py
new file mode 100644
index 0000000..daa7eb8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/codeop.py
@@ -0,0 +1,176 @@
+r"""Utilities to compile possibly incomplete Python source code.
+
+This module provides two interfaces, broadly similar to the builtin
+function compile(), which take program text, a filename and a 'mode'
+and:
+
+- Return code object if the command is complete and valid
+- Return None if the command is incomplete
+- Raise SyntaxError, ValueError or OverflowError if the command is a
+  syntax error (OverflowError and ValueError can be produced by
+  malformed literals).
+
+Approach:
+
+First, check if the source consists entirely of blank lines and
+comments; if so, replace it with 'pass', because the built-in
+parser doesn't always do the right thing for these.
+
+Compile three times: as is, with \n, and with \n\n appended.  If it
+compiles as is, it's complete.  If it compiles with one \n appended,
+we expect more.  If it doesn't compile either way, we compare the
+error we get when compiling with \n or \n\n appended.  If the errors
+are the same, the code is broken.  But if the errors are different, we
+expect more.  Not intuitive; not even guaranteed to hold in future
+releases; but this matches the compiler's behavior from Python 1.4
+through 2.2, at least.
+
+Caveat:
+
+It is possible (but not likely) that the parser stops parsing with a
+successful outcome before reaching the end of the source; in this
+case, trailing symbols may be ignored instead of causing an error.
+For example, a backslash followed by two newlines may be followed by
+arbitrary garbage.  This will be fixed once the API for the parser is
+better.
+
+The two interfaces are:
+
+compile_command(source, filename, symbol):
+
+    Compiles a single command in the manner described above.
+
+CommandCompiler():
+
+    Instances of this class have __call__ methods identical in
+    signature to compile_command; the difference is that if the
+    instance compiles program text containing a __future__ statement,
+    the instance 'remembers' and compiles all subsequent program texts
+    with the statement in force.
+
+The module also provides another class:
+
+Compile():
+
+    Instances of this class act like the built-in function compile,
+    but with 'memory' in the sense described above.
+"""
+
+import __future__
+
+_features = [getattr(__future__, fname)
+             for fname in __future__.all_feature_names]
+
+__all__ = ["compile_command", "Compile", "CommandCompiler"]
+
+PyCF_DONT_IMPLY_DEDENT = 0x200          # Matches pythonrun.h
+
+def _maybe_compile(compiler, source, filename, symbol):
+    # Check for source consisting of only blank lines and comments
+    for line in source.split("\n"):
+        line = line.strip()
+        if line and line[0] != '#':
+            break               # Leave it alone
+    else:
+        if symbol != "eval":
+            source = "pass"     # Replace it with a 'pass' statement
+
+    err = err1 = err2 = None
+    code = code1 = code2 = None
+
+    try:
+        code = compiler(source, filename, symbol)
+    except SyntaxError, err:
+        pass
+
+    try:
+        code1 = compiler(source + "\n", filename, symbol)
+    except SyntaxError, err1:
+        pass
+
+    try:
+        code2 = compiler(source + "\n\n", filename, symbol)
+    except SyntaxError, err2:
+        pass
+
+    if code:
+        return code
+    try:
+        e1 = err1.__dict__
+    except AttributeError:
+        e1 = err1
+    try:
+        e2 = err2.__dict__
+    except AttributeError:
+        e2 = err2
+    if not code1 and e1 == e2:
+        raise SyntaxError, err1
+
+def _compile(source, filename, symbol):
+    return compile(source, filename, symbol, PyCF_DONT_IMPLY_DEDENT)
+
+def compile_command(source, filename="<input>", symbol="single"):
+    r"""Compile a command and determine whether it is incomplete.
+
+    Arguments:
+
+    source -- the source string; may contain \n characters
+    filename -- optional filename from which source was read; default
+                "<input>"
+    symbol -- optional grammar start symbol; "single" (default) or "eval"
+
+    Return value / exceptions raised:
+
+    - Return a code object if the command is complete and valid
+    - Return None if the command is incomplete
+    - Raise SyntaxError, ValueError or OverflowError if the command is a
+      syntax error (OverflowError and ValueError can be produced by
+      malformed literals).
+    """
+    return _maybe_compile(_compile, source, filename, symbol)
+
+class Compile:
+    """Instances of this class behave much like the built-in compile
+    function, but if one is used to compile text containing a future
+    statement, it "remembers" and compiles all subsequent program texts
+    with the statement in force."""
+    def __init__(self):
+        self.flags = PyCF_DONT_IMPLY_DEDENT
+
+    def __call__(self, source, filename, symbol):
+        codeob = compile(source, filename, symbol, self.flags, 1)
+        for feature in _features:
+            if codeob.co_flags & feature.compiler_flag:
+                self.flags |= feature.compiler_flag
+        return codeob
+
+class CommandCompiler:
+    """Instances of this class have __call__ methods identical in
+    signature to compile_command; the difference is that if the
+    instance compiles program text containing a __future__ statement,
+    the instance 'remembers' and compiles all subsequent program texts
+    with the statement in force."""
+
+    def __init__(self,):
+        self.compiler = Compile()
+
+    def __call__(self, source, filename="<input>", symbol="single"):
+        r"""Compile a command and determine whether it is incomplete.
+
+        Arguments:
+
+        source -- the source string; may contain \n characters
+        filename -- optional filename from which source was read;
+                    default "<input>"
+        symbol -- optional grammar start symbol; "single" (default) or
+                  "eval"
+
+        Return value / exceptions raised:
+
+        - Return a code object if the command is complete and valid
+        - Return None if the command is incomplete
+        - Raise SyntaxError, ValueError or OverflowError if the command is a
+          syntax error (OverflowError and ValueError can be produced by
+          malformed literals).
+        """
+        return _maybe_compile(self.compiler, source, filename, symbol)
diff --git a/depot_tools/release/win/python_24/Lib/colorsys.py b/depot_tools/release/win/python_24/Lib/colorsys.py
new file mode 100644
index 0000000..c2cdf57
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/colorsys.py
@@ -0,0 +1,123 @@
+"""Conversion functions between RGB and other color systems.
+
+This modules provides two functions for each color system ABC:
+
+  rgb_to_abc(r, g, b) --> a, b, c
+  abc_to_rgb(a, b, c) --> r, g, b
+
+All inputs and outputs are triples of floats in the range [0.0...1.0].
+Inputs outside this range may cause exceptions or invalid outputs.
+
+Supported color systems:
+RGB: Red, Green, Blue components
+YIQ: used by composite video signals
+HLS: Hue, Luminance, Saturation
+HSV: Hue, Saturation, Value
+"""
+# References:
+# XXX Where's the literature?
+
+__all__ = ["rgb_to_yiq","yiq_to_rgb","rgb_to_hls","hls_to_rgb",
+           "rgb_to_hsv","hsv_to_rgb"]
+
+# Some floating point constants
+
+ONE_THIRD = 1.0/3.0
+ONE_SIXTH = 1.0/6.0
+TWO_THIRD = 2.0/3.0
+
+
+# YIQ: used by composite video signals (linear combinations of RGB)
+# Y: perceived grey level (0.0 == black, 1.0 == white)
+# I, Q: color components
+
+def rgb_to_yiq(r, g, b):
+    y = 0.30*r + 0.59*g + 0.11*b
+    i = 0.60*r - 0.28*g - 0.32*b
+    q = 0.21*r - 0.52*g + 0.31*b
+    return (y, i, q)
+
+def yiq_to_rgb(y, i, q):
+    r = y + 0.948262*i + 0.624013*q
+    g = y - 0.276066*i - 0.639810*q
+    b = y - 1.105450*i + 1.729860*q
+    if r < 0.0: r = 0.0
+    if g < 0.0: g = 0.0
+    if b < 0.0: b = 0.0
+    if r > 1.0: r = 1.0
+    if g > 1.0: g = 1.0
+    if b > 1.0: b = 1.0
+    return (r, g, b)
+
+
+# HLS: Hue, Luminance, S???
+# H: position in the spectrum
+# L: ???
+# S: ???
+
+def rgb_to_hls(r, g, b):
+    maxc = max(r, g, b)
+    minc = min(r, g, b)
+    # XXX Can optimize (maxc+minc) and (maxc-minc)
+    l = (minc+maxc)/2.0
+    if minc == maxc: return 0.0, l, 0.0
+    if l <= 0.5: s = (maxc-minc) / (maxc+minc)
+    else: s = (maxc-minc) / (2.0-maxc-minc)
+    rc = (maxc-r) / (maxc-minc)
+    gc = (maxc-g) / (maxc-minc)
+    bc = (maxc-b) / (maxc-minc)
+    if r == maxc: h = bc-gc
+    elif g == maxc: h = 2.0+rc-bc
+    else: h = 4.0+gc-rc
+    h = (h/6.0) % 1.0
+    return h, l, s
+
+def hls_to_rgb(h, l, s):
+    if s == 0.0: return l, l, l
+    if l <= 0.5: m2 = l * (1.0+s)
+    else: m2 = l+s-(l*s)
+    m1 = 2.0*l - m2
+    return (_v(m1, m2, h+ONE_THIRD), _v(m1, m2, h), _v(m1, m2, h-ONE_THIRD))
+
+def _v(m1, m2, hue):
+    hue = hue % 1.0
+    if hue < ONE_SIXTH: return m1 + (m2-m1)*hue*6.0
+    if hue < 0.5: return m2
+    if hue < TWO_THIRD: return m1 + (m2-m1)*(TWO_THIRD-hue)*6.0
+    return m1
+
+
+# HSV: Hue, Saturation, Value(?)
+# H: position in the spectrum
+# S: ???
+# V: ???
+
+def rgb_to_hsv(r, g, b):
+    maxc = max(r, g, b)
+    minc = min(r, g, b)
+    v = maxc
+    if minc == maxc: return 0.0, 0.0, v
+    s = (maxc-minc) / maxc
+    rc = (maxc-r) / (maxc-minc)
+    gc = (maxc-g) / (maxc-minc)
+    bc = (maxc-b) / (maxc-minc)
+    if r == maxc: h = bc-gc
+    elif g == maxc: h = 2.0+rc-bc
+    else: h = 4.0+gc-rc
+    h = (h/6.0) % 1.0
+    return h, s, v
+
+def hsv_to_rgb(h, s, v):
+    if s == 0.0: return v, v, v
+    i = int(h*6.0) # XXX assume int() truncates!
+    f = (h*6.0) - i
+    p = v*(1.0 - s)
+    q = v*(1.0 - s*f)
+    t = v*(1.0 - s*(1.0-f))
+    if i%6 == 0: return v, t, p
+    if i == 1: return q, v, p
+    if i == 2: return p, v, t
+    if i == 3: return p, q, v
+    if i == 4: return t, p, v
+    if i == 5: return v, p, q
+    # Cannot get here
diff --git a/depot_tools/release/win/python_24/Lib/commands.py b/depot_tools/release/win/python_24/Lib/commands.py
new file mode 100644
index 0000000..cfbb541c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/commands.py
@@ -0,0 +1,84 @@
+"""Execute shell commands via os.popen() and return status, output.
+
+Interface summary:
+
+       import commands
+
+       outtext = commands.getoutput(cmd)
+       (exitstatus, outtext) = commands.getstatusoutput(cmd)
+       outtext = commands.getstatus(file)  # returns output of "ls -ld file"
+
+A trailing newline is removed from the output string.
+
+Encapsulates the basic operation:
+
+      pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
+      text = pipe.read()
+      sts = pipe.close()
+
+ [Note:  it would be nice to add functions to interpret the exit status.]
+"""
+
+__all__ = ["getstatusoutput","getoutput","getstatus"]
+
+# Module 'commands'
+#
+# Various tools for executing commands and looking at their output and status.
+#
+# NB This only works (and is only relevant) for UNIX.
+
+
+# Get 'ls -l' status for an object into a string
+#
+def getstatus(file):
+    """Return output of "ls -ld <file>" in a string."""
+    return getoutput('ls -ld' + mkarg(file))
+
+
+# Get the output from a shell command into a string.
+# The exit status is ignored; a trailing newline is stripped.
+# Assume the command will work with '{ ... ; } 2>&1' around it..
+#
+def getoutput(cmd):
+    """Return output (stdout or stderr) of executing cmd in a shell."""
+    return getstatusoutput(cmd)[1]
+
+
+# Ditto but preserving the exit status.
+# Returns a pair (sts, output)
+#
+def getstatusoutput(cmd):
+    """Return (status, output) of executing cmd in a shell."""
+    import os
+    pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
+    text = pipe.read()
+    sts = pipe.close()
+    if sts is None: sts = 0
+    if text[-1:] == '\n': text = text[:-1]
+    return sts, text
+
+
+# Make command argument from directory and pathname (prefix space, add quotes).
+#
+def mk2arg(head, x):
+    import os
+    return mkarg(os.path.join(head, x))
+
+
+# Make a shell command argument from a string.
+# Return a string beginning with a space followed by a shell-quoted
+# version of the argument.
+# Two strategies: enclose in single quotes if it contains none;
+# otherwise, enclose in double quotes and prefix quotable characters
+# with backslash.
+#
+def mkarg(x):
+    if '\'' not in x:
+        return ' \'' + x + '\''
+    s = ' "'
+    for c in x:
+        if c in '\\$"`':
+            s = s + '\\'
+        s = s + c
+    s = s + '"'
+    return s
diff --git a/depot_tools/release/win/python_24/Lib/compileall.py b/depot_tools/release/win/python_24/Lib/compileall.py
new file mode 100644
index 0000000..b21d95f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compileall.py
@@ -0,0 +1,157 @@
+"""Module/script to "compile" all .py files to .pyc (or .pyo) file.
+
+When called as a script with arguments, this compiles the directories
+given as arguments recursively; the -l option prevents it from
+recursing into directories.
+
+Without arguments, if compiles all modules on sys.path, without
+recursing into subdirectories.  (Even though it should do so for
+packages -- for now, you'll have to deal with packages separately.)
+
+See module py_compile for details of the actual byte-compilation.
+
+"""
+
+import os
+import sys
+import py_compile
+
+__all__ = ["compile_dir","compile_path"]
+
+def compile_dir(dir, maxlevels=10, ddir=None,
+                force=0, rx=None, quiet=0):
+    """Byte-compile all modules in the given directory tree.
+
+    Arguments (only dir is required):
+
+    dir:       the directory to byte-compile
+    maxlevels: maximum recursion level (default 10)
+    ddir:      if given, purported directory name (this is the
+               directory name that will show up in error messages)
+    force:     if 1, force compilation, even if timestamps are up-to-date
+    quiet:     if 1, be quiet during compilation
+
+    """
+    if not quiet:
+        print 'Listing', dir, '...'
+    try:
+        names = os.listdir(dir)
+    except os.error:
+        print "Can't list", dir
+        names = []
+    names.sort()
+    success = 1
+    for name in names:
+        fullname = os.path.join(dir, name)
+        if ddir is not None:
+            dfile = os.path.join(ddir, name)
+        else:
+            dfile = None
+        if rx is not None:
+            mo = rx.search(fullname)
+            if mo:
+                continue
+        if os.path.isfile(fullname):
+            head, tail = name[:-3], name[-3:]
+            if tail == '.py':
+                cfile = fullname + (__debug__ and 'c' or 'o')
+                ftime = os.stat(fullname).st_mtime
+                try: ctime = os.stat(cfile).st_mtime
+                except os.error: ctime = 0
+                if (ctime > ftime) and not force: continue
+                if not quiet:
+                    print 'Compiling', fullname, '...'
+                try:
+                    ok = py_compile.compile(fullname, None, dfile, True)
+                except KeyboardInterrupt:
+                    raise KeyboardInterrupt
+                except py_compile.PyCompileError,err:
+                    if quiet:
+                        print 'Compiling', fullname, '...'
+                    print err.msg
+                    success = 0
+                except IOError, e:
+                    print "Sorry", e
+                    success = 0
+                else:
+                    if ok == 0:
+                        success = 0
+        elif maxlevels > 0 and \
+             name != os.curdir and name != os.pardir and \
+             os.path.isdir(fullname) and \
+             not os.path.islink(fullname):
+            if not compile_dir(fullname, maxlevels - 1, dfile, force, rx, quiet):
+                success = 0
+    return success
+
+def compile_path(skip_curdir=1, maxlevels=0, force=0, quiet=0):
+    """Byte-compile all module on sys.path.
+
+    Arguments (all optional):
+
+    skip_curdir: if true, skip current directory (default true)
+    maxlevels:   max recursion level (default 0)
+    force: as for compile_dir() (default 0)
+    quiet: as for compile_dir() (default 0)
+
+    """
+    success = 1
+    for dir in sys.path:
+        if (not dir or dir == os.curdir) and skip_curdir:
+            print 'Skipping current directory'
+        else:
+            success = success and compile_dir(dir, maxlevels, None,
+                                              force, quiet=quiet)
+    return success
+
+def main():
+    """Script main program."""
+    import getopt
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'lfqd:x:')
+    except getopt.error, msg:
+        print msg
+        print "usage: python compileall.py [-l] [-f] [-q] [-d destdir] " \
+              "[-x regexp] [directory ...]"
+        print "-l: don't recurse down"
+        print "-f: force rebuild even if timestamps are up-to-date"
+        print "-q: quiet operation"
+        print "-d destdir: purported directory name for error messages"
+        print "   if no directory arguments, -l sys.path is assumed"
+        print "-x regexp: skip files matching the regular expression regexp"
+        print "   the regexp is search for in the full path of the file"
+        sys.exit(2)
+    maxlevels = 10
+    ddir = None
+    force = 0
+    quiet = 0
+    rx = None
+    for o, a in opts:
+        if o == '-l': maxlevels = 0
+        if o == '-d': ddir = a
+        if o == '-f': force = 1
+        if o == '-q': quiet = 1
+        if o == '-x':
+            import re
+            rx = re.compile(a)
+    if ddir:
+        if len(args) != 1:
+            print "-d destdir require exactly one directory argument"
+            sys.exit(2)
+    success = 1
+    try:
+        if args:
+            for dir in args:
+                if not compile_dir(dir, maxlevels, ddir,
+                                   force, rx, quiet):
+                    success = 0
+        else:
+            success = compile_path()
+    except KeyboardInterrupt:
+        print "\n[interrupt]"
+        success = 0
+    return success
+
+if __name__ == '__main__':
+    exit_status = int(not main())
+    sys.exit(exit_status)
diff --git a/depot_tools/release/win/python_24/Lib/compiler/__init__.py b/depot_tools/release/win/python_24/Lib/compiler/__init__.py
new file mode 100644
index 0000000..13b05bf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/__init__.py
@@ -0,0 +1,26 @@
+"""Package for parsing and compiling Python source code
+
+There are several functions defined at the top level that are imported
+from modules contained in the package.
+
+parse(buf, mode="exec") -> AST
+    Converts a string containing Python source code to an abstract
+    syntax tree (AST).  The AST is defined in compiler.ast.
+
+parseFile(path) -> AST
+    The same as parse(open(path))
+
+walk(ast, visitor, verbose=None)
+    Does a pre-order walk over the ast using the visitor instance.
+    See compiler.visitor for details.
+
+compile(source, filename, mode, flags=None, dont_inherit=None)
+    Returns a code object.  A replacement for the builtin compile() function.
+
+compileFile(filename)
+    Generates a .pyc file by compiling filename.
+"""
+
+from transformer import parse, parseFile
+from visitor import walk
+from pycodegen import compile, compileFile
diff --git a/depot_tools/release/win/python_24/Lib/compiler/ast.py b/depot_tools/release/win/python_24/Lib/compiler/ast.py
new file mode 100644
index 0000000..6b78fdd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/ast.py
@@ -0,0 +1,1316 @@
+"""Python abstract syntax node definitions
+
+This file is automatically generated by Tools/compiler/astgen.py
+"""
+from consts import CO_VARARGS, CO_VARKEYWORDS
+
+def flatten(list):
+    l = []
+    for elt in list:
+        t = type(elt)
+        if t is tuple or t is list:
+            for elt2 in flatten(elt):
+                l.append(elt2)
+        else:
+            l.append(elt)
+    return l
+
+def flatten_nodes(list):
+    return [n for n in flatten(list) if isinstance(n, Node)]
+
+nodes = {}
+
+class Node:
+    """Abstract base class for ast nodes."""
+    def getChildren(self):
+        pass # implemented by subclasses
+    def __iter__(self):
+        for n in self.getChildren():
+            yield n
+    def asList(self): # for backwards compatibility
+        return self.getChildren()
+    def getChildNodes(self):
+        pass # implemented by subclasses
+
+class EmptyNode(Node):
+    pass
+
+class Expression(Node):
+    # Expression is an artificial node class to support "eval"
+    nodes["expression"] = "Expression"
+    def __init__(self, node):
+        self.node = node
+
+    def getChildren(self):
+        return self.node,
+
+    def getChildNodes(self):
+        return self.node,
+
+    def __repr__(self):
+        return "Expression(%s)" % (repr(self.node))
+
+class Add(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "Add((%s, %s))" % (repr(self.left), repr(self.right))
+
+class And(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "And(%s)" % (repr(self.nodes),)
+
+class AssAttr(Node):
+    def __init__(self, expr, attrname, flags, lineno=None):
+        self.expr = expr
+        self.attrname = attrname
+        self.flags = flags
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.expr, self.attrname, self.flags
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "AssAttr(%s, %s, %s)" % (repr(self.expr), repr(self.attrname), repr(self.flags))
+
+class AssList(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "AssList(%s)" % (repr(self.nodes),)
+
+class AssName(Node):
+    def __init__(self, name, flags, lineno=None):
+        self.name = name
+        self.flags = flags
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.name, self.flags
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "AssName(%s, %s)" % (repr(self.name), repr(self.flags))
+
+class AssTuple(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "AssTuple(%s)" % (repr(self.nodes),)
+
+class Assert(Node):
+    def __init__(self, test, fail, lineno=None):
+        self.test = test
+        self.fail = fail
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.test)
+        children.append(self.fail)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.test)
+        if self.fail is not None:
+            nodelist.append(self.fail)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Assert(%s, %s)" % (repr(self.test), repr(self.fail))
+
+class Assign(Node):
+    def __init__(self, nodes, expr, lineno=None):
+        self.nodes = nodes
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.extend(flatten(self.nodes))
+        children.append(self.expr)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        nodelist.append(self.expr)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Assign(%s, %s)" % (repr(self.nodes), repr(self.expr))
+
+class AugAssign(Node):
+    def __init__(self, node, op, expr, lineno=None):
+        self.node = node
+        self.op = op
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.node, self.op, self.expr
+
+    def getChildNodes(self):
+        return self.node, self.expr
+
+    def __repr__(self):
+        return "AugAssign(%s, %s, %s)" % (repr(self.node), repr(self.op), repr(self.expr))
+
+class Backquote(Node):
+    def __init__(self, expr, lineno=None):
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.expr,
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "Backquote(%s)" % (repr(self.expr),)
+
+class Bitand(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Bitand(%s)" % (repr(self.nodes),)
+
+class Bitor(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Bitor(%s)" % (repr(self.nodes),)
+
+class Bitxor(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Bitxor(%s)" % (repr(self.nodes),)
+
+class Break(Node):
+    def __init__(self, lineno=None):
+        self.lineno = lineno
+
+    def getChildren(self):
+        return ()
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "Break()"
+
+class CallFunc(Node):
+    def __init__(self, node, args, star_args = None, dstar_args = None, lineno=None):
+        self.node = node
+        self.args = args
+        self.star_args = star_args
+        self.dstar_args = dstar_args
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.node)
+        children.extend(flatten(self.args))
+        children.append(self.star_args)
+        children.append(self.dstar_args)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.node)
+        nodelist.extend(flatten_nodes(self.args))
+        if self.star_args is not None:
+            nodelist.append(self.star_args)
+        if self.dstar_args is not None:
+            nodelist.append(self.dstar_args)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "CallFunc(%s, %s, %s, %s)" % (repr(self.node), repr(self.args), repr(self.star_args), repr(self.dstar_args))
+
+class Class(Node):
+    def __init__(self, name, bases, doc, code, lineno=None):
+        self.name = name
+        self.bases = bases
+        self.doc = doc
+        self.code = code
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.name)
+        children.extend(flatten(self.bases))
+        children.append(self.doc)
+        children.append(self.code)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.bases))
+        nodelist.append(self.code)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Class(%s, %s, %s, %s)" % (repr(self.name), repr(self.bases), repr(self.doc), repr(self.code))
+
+class Compare(Node):
+    def __init__(self, expr, ops, lineno=None):
+        self.expr = expr
+        self.ops = ops
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.expr)
+        children.extend(flatten(self.ops))
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.expr)
+        nodelist.extend(flatten_nodes(self.ops))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Compare(%s, %s)" % (repr(self.expr), repr(self.ops))
+
+class Const(Node):
+    def __init__(self, value, lineno=None):
+        self.value = value
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.value,
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "Const(%s)" % (repr(self.value),)
+
+class Continue(Node):
+    def __init__(self, lineno=None):
+        self.lineno = lineno
+
+    def getChildren(self):
+        return ()
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "Continue()"
+
+class Decorators(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Decorators(%s)" % (repr(self.nodes),)
+
+class Dict(Node):
+    def __init__(self, items, lineno=None):
+        self.items = items
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.items))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.items))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Dict(%s)" % (repr(self.items),)
+
+class Discard(Node):
+    def __init__(self, expr, lineno=None):
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.expr,
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "Discard(%s)" % (repr(self.expr),)
+
+class Div(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "Div((%s, %s))" % (repr(self.left), repr(self.right))
+
+class Ellipsis(Node):
+    def __init__(self, lineno=None):
+        self.lineno = lineno
+
+    def getChildren(self):
+        return ()
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "Ellipsis()"
+
+class Exec(Node):
+    def __init__(self, expr, locals, globals, lineno=None):
+        self.expr = expr
+        self.locals = locals
+        self.globals = globals
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.expr)
+        children.append(self.locals)
+        children.append(self.globals)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.expr)
+        if self.locals is not None:
+            nodelist.append(self.locals)
+        if self.globals is not None:
+            nodelist.append(self.globals)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Exec(%s, %s, %s)" % (repr(self.expr), repr(self.locals), repr(self.globals))
+
+class FloorDiv(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "FloorDiv((%s, %s))" % (repr(self.left), repr(self.right))
+
+class For(Node):
+    def __init__(self, assign, list, body, else_, lineno=None):
+        self.assign = assign
+        self.list = list
+        self.body = body
+        self.else_ = else_
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.assign)
+        children.append(self.list)
+        children.append(self.body)
+        children.append(self.else_)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.assign)
+        nodelist.append(self.list)
+        nodelist.append(self.body)
+        if self.else_ is not None:
+            nodelist.append(self.else_)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "For(%s, %s, %s, %s)" % (repr(self.assign), repr(self.list), repr(self.body), repr(self.else_))
+
+class From(Node):
+    def __init__(self, modname, names, lineno=None):
+        self.modname = modname
+        self.names = names
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.modname, self.names
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "From(%s, %s)" % (repr(self.modname), repr(self.names))
+
+class Function(Node):
+    def __init__(self, decorators, name, argnames, defaults, flags, doc, code, lineno=None):
+        self.decorators = decorators
+        self.name = name
+        self.argnames = argnames
+        self.defaults = defaults
+        self.flags = flags
+        self.doc = doc
+        self.code = code
+        self.lineno = lineno
+        self.varargs = self.kwargs = None
+        if flags & CO_VARARGS:
+            self.varargs = 1
+        if flags & CO_VARKEYWORDS:
+            self.kwargs = 1
+
+
+
+    def getChildren(self):
+        children = []
+        children.append(self.decorators)
+        children.append(self.name)
+        children.append(self.argnames)
+        children.extend(flatten(self.defaults))
+        children.append(self.flags)
+        children.append(self.doc)
+        children.append(self.code)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        if self.decorators is not None:
+            nodelist.append(self.decorators)
+        nodelist.extend(flatten_nodes(self.defaults))
+        nodelist.append(self.code)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Function(%s, %s, %s, %s, %s, %s, %s)" % (repr(self.decorators), repr(self.name), repr(self.argnames), repr(self.defaults), repr(self.flags), repr(self.doc), repr(self.code))
+
+class GenExpr(Node):
+    def __init__(self, code, lineno=None):
+        self.code = code
+        self.lineno = lineno
+        self.argnames = ['[outmost-iterable]']
+        self.varargs = self.kwargs = None
+
+
+
+    def getChildren(self):
+        return self.code,
+
+    def getChildNodes(self):
+        return self.code,
+
+    def __repr__(self):
+        return "GenExpr(%s)" % (repr(self.code),)
+
+class GenExprFor(Node):
+    def __init__(self, assign, iter, ifs, lineno=None):
+        self.assign = assign
+        self.iter = iter
+        self.ifs = ifs
+        self.lineno = lineno
+        self.is_outmost = False
+
+
+    def getChildren(self):
+        children = []
+        children.append(self.assign)
+        children.append(self.iter)
+        children.extend(flatten(self.ifs))
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.assign)
+        nodelist.append(self.iter)
+        nodelist.extend(flatten_nodes(self.ifs))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "GenExprFor(%s, %s, %s)" % (repr(self.assign), repr(self.iter), repr(self.ifs))
+
+class GenExprIf(Node):
+    def __init__(self, test, lineno=None):
+        self.test = test
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.test,
+
+    def getChildNodes(self):
+        return self.test,
+
+    def __repr__(self):
+        return "GenExprIf(%s)" % (repr(self.test),)
+
+class GenExprInner(Node):
+    def __init__(self, expr, quals, lineno=None):
+        self.expr = expr
+        self.quals = quals
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.expr)
+        children.extend(flatten(self.quals))
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.expr)
+        nodelist.extend(flatten_nodes(self.quals))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "GenExprInner(%s, %s)" % (repr(self.expr), repr(self.quals))
+
+class Getattr(Node):
+    def __init__(self, expr, attrname, lineno=None):
+        self.expr = expr
+        self.attrname = attrname
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.expr, self.attrname
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "Getattr(%s, %s)" % (repr(self.expr), repr(self.attrname))
+
+class Global(Node):
+    def __init__(self, names, lineno=None):
+        self.names = names
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.names,
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "Global(%s)" % (repr(self.names),)
+
+class If(Node):
+    def __init__(self, tests, else_, lineno=None):
+        self.tests = tests
+        self.else_ = else_
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.extend(flatten(self.tests))
+        children.append(self.else_)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.tests))
+        if self.else_ is not None:
+            nodelist.append(self.else_)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "If(%s, %s)" % (repr(self.tests), repr(self.else_))
+
+class Import(Node):
+    def __init__(self, names, lineno=None):
+        self.names = names
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.names,
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "Import(%s)" % (repr(self.names),)
+
+class Invert(Node):
+    def __init__(self, expr, lineno=None):
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.expr,
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "Invert(%s)" % (repr(self.expr),)
+
+class Keyword(Node):
+    def __init__(self, name, expr, lineno=None):
+        self.name = name
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.name, self.expr
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "Keyword(%s, %s)" % (repr(self.name), repr(self.expr))
+
+class Lambda(Node):
+    def __init__(self, argnames, defaults, flags, code, lineno=None):
+        self.argnames = argnames
+        self.defaults = defaults
+        self.flags = flags
+        self.code = code
+        self.lineno = lineno
+        self.varargs = self.kwargs = None
+        if flags & CO_VARARGS:
+            self.varargs = 1
+        if flags & CO_VARKEYWORDS:
+            self.kwargs = 1
+
+
+
+    def getChildren(self):
+        children = []
+        children.append(self.argnames)
+        children.extend(flatten(self.defaults))
+        children.append(self.flags)
+        children.append(self.code)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.defaults))
+        nodelist.append(self.code)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Lambda(%s, %s, %s, %s)" % (repr(self.argnames), repr(self.defaults), repr(self.flags), repr(self.code))
+
+class LeftShift(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "LeftShift((%s, %s))" % (repr(self.left), repr(self.right))
+
+class List(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "List(%s)" % (repr(self.nodes),)
+
+class ListComp(Node):
+    def __init__(self, expr, quals, lineno=None):
+        self.expr = expr
+        self.quals = quals
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.expr)
+        children.extend(flatten(self.quals))
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.expr)
+        nodelist.extend(flatten_nodes(self.quals))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "ListComp(%s, %s)" % (repr(self.expr), repr(self.quals))
+
+class ListCompFor(Node):
+    def __init__(self, assign, list, ifs, lineno=None):
+        self.assign = assign
+        self.list = list
+        self.ifs = ifs
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.assign)
+        children.append(self.list)
+        children.extend(flatten(self.ifs))
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.assign)
+        nodelist.append(self.list)
+        nodelist.extend(flatten_nodes(self.ifs))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "ListCompFor(%s, %s, %s)" % (repr(self.assign), repr(self.list), repr(self.ifs))
+
+class ListCompIf(Node):
+    def __init__(self, test, lineno=None):
+        self.test = test
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.test,
+
+    def getChildNodes(self):
+        return self.test,
+
+    def __repr__(self):
+        return "ListCompIf(%s)" % (repr(self.test),)
+
+class Mod(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "Mod((%s, %s))" % (repr(self.left), repr(self.right))
+
+class Module(Node):
+    def __init__(self, doc, node, lineno=None):
+        self.doc = doc
+        self.node = node
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.doc, self.node
+
+    def getChildNodes(self):
+        return self.node,
+
+    def __repr__(self):
+        return "Module(%s, %s)" % (repr(self.doc), repr(self.node))
+
+class Mul(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "Mul((%s, %s))" % (repr(self.left), repr(self.right))
+
+class Name(Node):
+    def __init__(self, name, lineno=None):
+        self.name = name
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.name,
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "Name(%s)" % (repr(self.name),)
+
+class Not(Node):
+    def __init__(self, expr, lineno=None):
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.expr,
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "Not(%s)" % (repr(self.expr),)
+
+class Or(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Or(%s)" % (repr(self.nodes),)
+
+class Pass(Node):
+    def __init__(self, lineno=None):
+        self.lineno = lineno
+
+    def getChildren(self):
+        return ()
+
+    def getChildNodes(self):
+        return ()
+
+    def __repr__(self):
+        return "Pass()"
+
+class Power(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "Power((%s, %s))" % (repr(self.left), repr(self.right))
+
+class Print(Node):
+    def __init__(self, nodes, dest, lineno=None):
+        self.nodes = nodes
+        self.dest = dest
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.extend(flatten(self.nodes))
+        children.append(self.dest)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        if self.dest is not None:
+            nodelist.append(self.dest)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Print(%s, %s)" % (repr(self.nodes), repr(self.dest))
+
+class Printnl(Node):
+    def __init__(self, nodes, dest, lineno=None):
+        self.nodes = nodes
+        self.dest = dest
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.extend(flatten(self.nodes))
+        children.append(self.dest)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        if self.dest is not None:
+            nodelist.append(self.dest)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Printnl(%s, %s)" % (repr(self.nodes), repr(self.dest))
+
+class Raise(Node):
+    def __init__(self, expr1, expr2, expr3, lineno=None):
+        self.expr1 = expr1
+        self.expr2 = expr2
+        self.expr3 = expr3
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.expr1)
+        children.append(self.expr2)
+        children.append(self.expr3)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        if self.expr1 is not None:
+            nodelist.append(self.expr1)
+        if self.expr2 is not None:
+            nodelist.append(self.expr2)
+        if self.expr3 is not None:
+            nodelist.append(self.expr3)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Raise(%s, %s, %s)" % (repr(self.expr1), repr(self.expr2), repr(self.expr3))
+
+class Return(Node):
+    def __init__(self, value, lineno=None):
+        self.value = value
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.value,
+
+    def getChildNodes(self):
+        return self.value,
+
+    def __repr__(self):
+        return "Return(%s)" % (repr(self.value),)
+
+class RightShift(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "RightShift((%s, %s))" % (repr(self.left), repr(self.right))
+
+class Slice(Node):
+    def __init__(self, expr, flags, lower, upper, lineno=None):
+        self.expr = expr
+        self.flags = flags
+        self.lower = lower
+        self.upper = upper
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.expr)
+        children.append(self.flags)
+        children.append(self.lower)
+        children.append(self.upper)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.expr)
+        if self.lower is not None:
+            nodelist.append(self.lower)
+        if self.upper is not None:
+            nodelist.append(self.upper)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Slice(%s, %s, %s, %s)" % (repr(self.expr), repr(self.flags), repr(self.lower), repr(self.upper))
+
+class Sliceobj(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Sliceobj(%s)" % (repr(self.nodes),)
+
+class Stmt(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Stmt(%s)" % (repr(self.nodes),)
+
+class Sub(Node):
+    def __init__(self, (left, right), lineno=None):
+        self.left = left
+        self.right = right
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.left, self.right
+
+    def getChildNodes(self):
+        return self.left, self.right
+
+    def __repr__(self):
+        return "Sub((%s, %s))" % (repr(self.left), repr(self.right))
+
+class Subscript(Node):
+    def __init__(self, expr, flags, subs, lineno=None):
+        self.expr = expr
+        self.flags = flags
+        self.subs = subs
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.expr)
+        children.append(self.flags)
+        children.extend(flatten(self.subs))
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.expr)
+        nodelist.extend(flatten_nodes(self.subs))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Subscript(%s, %s, %s)" % (repr(self.expr), repr(self.flags), repr(self.subs))
+
+class TryExcept(Node):
+    def __init__(self, body, handlers, else_, lineno=None):
+        self.body = body
+        self.handlers = handlers
+        self.else_ = else_
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.body)
+        children.extend(flatten(self.handlers))
+        children.append(self.else_)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.body)
+        nodelist.extend(flatten_nodes(self.handlers))
+        if self.else_ is not None:
+            nodelist.append(self.else_)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "TryExcept(%s, %s, %s)" % (repr(self.body), repr(self.handlers), repr(self.else_))
+
+class TryFinally(Node):
+    def __init__(self, body, final, lineno=None):
+        self.body = body
+        self.final = final
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.body, self.final
+
+    def getChildNodes(self):
+        return self.body, self.final
+
+    def __repr__(self):
+        return "TryFinally(%s, %s)" % (repr(self.body), repr(self.final))
+
+class Tuple(Node):
+    def __init__(self, nodes, lineno=None):
+        self.nodes = nodes
+        self.lineno = lineno
+
+    def getChildren(self):
+        return tuple(flatten(self.nodes))
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.extend(flatten_nodes(self.nodes))
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "Tuple(%s)" % (repr(self.nodes),)
+
+class UnaryAdd(Node):
+    def __init__(self, expr, lineno=None):
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.expr,
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "UnaryAdd(%s)" % (repr(self.expr),)
+
+class UnarySub(Node):
+    def __init__(self, expr, lineno=None):
+        self.expr = expr
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.expr,
+
+    def getChildNodes(self):
+        return self.expr,
+
+    def __repr__(self):
+        return "UnarySub(%s)" % (repr(self.expr),)
+
+class While(Node):
+    def __init__(self, test, body, else_, lineno=None):
+        self.test = test
+        self.body = body
+        self.else_ = else_
+        self.lineno = lineno
+
+    def getChildren(self):
+        children = []
+        children.append(self.test)
+        children.append(self.body)
+        children.append(self.else_)
+        return tuple(children)
+
+    def getChildNodes(self):
+        nodelist = []
+        nodelist.append(self.test)
+        nodelist.append(self.body)
+        if self.else_ is not None:
+            nodelist.append(self.else_)
+        return tuple(nodelist)
+
+    def __repr__(self):
+        return "While(%s, %s, %s)" % (repr(self.test), repr(self.body), repr(self.else_))
+
+class Yield(Node):
+    def __init__(self, value, lineno=None):
+        self.value = value
+        self.lineno = lineno
+
+    def getChildren(self):
+        return self.value,
+
+    def getChildNodes(self):
+        return self.value,
+
+    def __repr__(self):
+        return "Yield(%s)" % (repr(self.value),)
+
+for name, obj in globals().items():
+    if isinstance(obj, type) and issubclass(obj, Node):
+        nodes[name.lower()] = obj
diff --git a/depot_tools/release/win/python_24/Lib/compiler/consts.py b/depot_tools/release/win/python_24/Lib/compiler/consts.py
new file mode 100644
index 0000000..a6cf559
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/consts.py
@@ -0,0 +1,19 @@
+# operation flags
+OP_ASSIGN = 'OP_ASSIGN'
+OP_DELETE = 'OP_DELETE'
+OP_APPLY = 'OP_APPLY'
+
+SC_LOCAL = 1
+SC_GLOBAL = 2
+SC_FREE = 3
+SC_CELL = 4
+SC_UNKNOWN = 5
+
+CO_OPTIMIZED = 0x0001
+CO_NEWLOCALS = 0x0002
+CO_VARARGS = 0x0004
+CO_VARKEYWORDS = 0x0008
+CO_NESTED = 0x0010
+CO_GENERATOR = 0x0020
+CO_GENERATOR_ALLOWED = 0x1000
+CO_FUTURE_DIVISION = 0x2000
diff --git a/depot_tools/release/win/python_24/Lib/compiler/future.py b/depot_tools/release/win/python_24/Lib/compiler/future.py
new file mode 100644
index 0000000..868b7cb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/future.py
@@ -0,0 +1,72 @@
+"""Parser for future statements
+
+"""
+
+from compiler import ast, walk
+
+def is_future(stmt):
+    """Return true if statement is a well-formed future statement"""
+    if not isinstance(stmt, ast.From):
+        return 0
+    if stmt.modname == "__future__":
+        return 1
+    else:
+        return 0
+
+class FutureParser:
+
+    features = ("nested_scopes", "generators", "division")
+
+    def __init__(self):
+        self.found = {} # set
+
+    def visitModule(self, node):
+        stmt = node.node
+        for s in stmt.nodes:
+            if not self.check_stmt(s):
+                break
+
+    def check_stmt(self, stmt):
+        if is_future(stmt):
+            for name, asname in stmt.names:
+                if name in self.features:
+                    self.found[name] = 1
+                else:
+                    raise SyntaxError, \
+                          "future feature %s is not defined" % name
+            stmt.valid_future = 1
+            return 1
+        return 0
+
+    def get_features(self):
+        """Return list of features enabled by future statements"""
+        return self.found.keys()
+
+class BadFutureParser:
+    """Check for invalid future statements"""
+
+    def visitFrom(self, node):
+        if hasattr(node, 'valid_future'):
+            return
+        if node.modname != "__future__":
+            return
+        raise SyntaxError, "invalid future statement"
+
+def find_futures(node):
+    p1 = FutureParser()
+    p2 = BadFutureParser()
+    walk(node, p1)
+    walk(node, p2)
+    return p1.get_features()
+
+if __name__ == "__main__":
+    import sys
+    from compiler import parseFile, walk
+
+    for file in sys.argv[1:]:
+        print file
+        tree = parseFile(file)
+        v = FutureParser()
+        walk(tree, v)
+        print v.found
+        print
diff --git a/depot_tools/release/win/python_24/Lib/compiler/misc.py b/depot_tools/release/win/python_24/Lib/compiler/misc.py
new file mode 100644
index 0000000..6d5eaa8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/misc.py
@@ -0,0 +1,74 @@
+import types
+
+def flatten(tup):
+    elts = []
+    for elt in tup:
+        if type(elt) == types.TupleType:
+            elts = elts + flatten(elt)
+        else:
+            elts.append(elt)
+    return elts
+
+class Set:
+    def __init__(self):
+        self.elts = {}
+    def __len__(self):
+        return len(self.elts)
+    def __contains__(self, elt):
+        return self.elts.has_key(elt)
+    def add(self, elt):
+        self.elts[elt] = elt
+    def elements(self):
+        return self.elts.keys()
+    def has_elt(self, elt):
+        return self.elts.has_key(elt)
+    def remove(self, elt):
+        del self.elts[elt]
+    def copy(self):
+        c = Set()
+        c.elts.update(self.elts)
+        return c
+
+class Stack:
+    def __init__(self):
+        self.stack = []
+        self.pop = self.stack.pop
+    def __len__(self):
+        return len(self.stack)
+    def push(self, elt):
+        self.stack.append(elt)
+    def top(self):
+        return self.stack[-1]
+    def __getitem__(self, index): # needed by visitContinue()
+        return self.stack[index]
+
+MANGLE_LEN = 256 # magic constant from compile.c
+
+def mangle(name, klass):
+    if not name.startswith('__'):
+        return name
+    if len(name) + 2 >= MANGLE_LEN:
+        return name
+    if name.endswith('__'):
+        return name
+    try:
+        i = 0
+        while klass[i] == '_':
+            i = i + 1
+    except IndexError:
+        return name
+    klass = klass[i:]
+
+    tlen = len(klass) + len(name)
+    if tlen > MANGLE_LEN:
+        klass = klass[:MANGLE_LEN-tlen]
+
+    return "_%s%s" % (klass, name)
+
+def set_filename(filename, tree):
+    """Set the filename attribute to filename on every node in tree"""
+    worklist = [tree]
+    while worklist:
+        node = worklist.pop(0)
+        node.filename = filename
+        worklist.extend(node.getChildNodes())
diff --git a/depot_tools/release/win/python_24/Lib/compiler/pyassem.py b/depot_tools/release/win/python_24/Lib/compiler/pyassem.py
new file mode 100644
index 0000000..0547eeb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/pyassem.py
@@ -0,0 +1,818 @@
+"""A flow graph representation for Python bytecode"""
+
+import dis
+import new
+import sys
+import types
+
+from compiler import misc
+from compiler.consts \
+     import CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS
+
+class FlowGraph:
+    def __init__(self):
+        self.current = self.entry = Block()
+        self.exit = Block("exit")
+        self.blocks = misc.Set()
+        self.blocks.add(self.entry)
+        self.blocks.add(self.exit)
+
+    def startBlock(self, block):
+        if self._debug:
+            if self.current:
+                print "end", repr(self.current)
+                print "    next", self.current.next
+                print "   ", self.current.get_children()
+            print repr(block)
+        self.current = block
+
+    def nextBlock(self, block=None):
+        # XXX think we need to specify when there is implicit transfer
+        # from one block to the next.  might be better to represent this
+        # with explicit JUMP_ABSOLUTE instructions that are optimized
+        # out when they are unnecessary.
+        #
+        # I think this strategy works: each block has a child
+        # designated as "next" which is returned as the last of the
+        # children.  because the nodes in a graph are emitted in
+        # reverse post order, the "next" block will always be emitted
+        # immediately after its parent.
+        # Worry: maintaining this invariant could be tricky
+        if block is None:
+            block = self.newBlock()
+
+        # Note: If the current block ends with an unconditional
+        # control transfer, then it is incorrect to add an implicit
+        # transfer to the block graph.  The current code requires
+        # these edges to get the blocks emitted in the right order,
+        # however. :-(  If a client needs to remove these edges, call
+        # pruneEdges().
+
+        self.current.addNext(block)
+        self.startBlock(block)
+
+    def newBlock(self):
+        b = Block()
+        self.blocks.add(b)
+        return b
+
+    def startExitBlock(self):
+        self.startBlock(self.exit)
+
+    _debug = 0
+
+    def _enable_debug(self):
+        self._debug = 1
+
+    def _disable_debug(self):
+        self._debug = 0
+
+    def emit(self, *inst):
+        if self._debug:
+            print "\t", inst
+        if inst[0] in ['RETURN_VALUE', 'YIELD_VALUE']:
+            self.current.addOutEdge(self.exit)
+        if len(inst) == 2 and isinstance(inst[1], Block):
+            self.current.addOutEdge(inst[1])
+        self.current.emit(inst)
+
+    def getBlocksInOrder(self):
+        """Return the blocks in reverse postorder
+
+        i.e. each node appears before all of its successors
+        """
+        # XXX make sure every node that doesn't have an explicit next
+        # is set so that next points to exit
+        for b in self.blocks.elements():
+            if b is self.exit:
+                continue
+            if not b.next:
+                b.addNext(self.exit)
+        order = dfs_postorder(self.entry, {})
+        order.reverse()
+        self.fixupOrder(order, self.exit)
+        # hack alert
+        if not self.exit in order:
+            order.append(self.exit)
+
+        return order
+
+    def fixupOrder(self, blocks, default_next):
+        """Fixup bad order introduced by DFS."""
+
+        # XXX This is a total mess.  There must be a better way to get
+        # the code blocks in the right order.
+
+        self.fixupOrderHonorNext(blocks, default_next)
+        self.fixupOrderForward(blocks, default_next)
+
+    def fixupOrderHonorNext(self, blocks, default_next):
+        """Fix one problem with DFS.
+
+        The DFS uses child block, but doesn't know about the special
+        "next" block.  As a result, the DFS can order blocks so that a
+        block isn't next to the right block for implicit control
+        transfers.
+        """
+        index = {}
+        for i in range(len(blocks)):
+            index[blocks[i]] = i
+
+        for i in range(0, len(blocks) - 1):
+            b = blocks[i]
+            n = blocks[i + 1]
+            if not b.next or b.next[0] == default_next or b.next[0] == n:
+                continue
+            # The blocks are in the wrong order.  Find the chain of
+            # blocks to insert where they belong.
+            cur = b
+            chain = []
+            elt = cur
+            while elt.next and elt.next[0] != default_next:
+                chain.append(elt.next[0])
+                elt = elt.next[0]
+            # Now remove the blocks in the chain from the current
+            # block list, so that they can be re-inserted.
+            l = []
+            for b in chain:
+                assert index[b] > i
+                l.append((index[b], b))
+            l.sort()
+            l.reverse()
+            for j, b in l:
+                del blocks[index[b]]
+            # Insert the chain in the proper location
+            blocks[i:i + 1] = [cur] + chain
+            # Finally, re-compute the block indexes
+            for i in range(len(blocks)):
+                index[blocks[i]] = i
+
+    def fixupOrderForward(self, blocks, default_next):
+        """Make sure all JUMP_FORWARDs jump forward"""
+        index = {}
+        chains = []
+        cur = []
+        for b in blocks:
+            index[b] = len(chains)
+            cur.append(b)
+            if b.next and b.next[0] == default_next:
+                chains.append(cur)
+                cur = []
+        chains.append(cur)
+
+        while 1:
+            constraints = []
+
+            for i in range(len(chains)):
+                l = chains[i]
+                for b in l:
+                    for c in b.get_children():
+                        if index[c] < i:
+                            forward_p = 0
+                            for inst in b.insts:
+                                if inst[0] == 'JUMP_FORWARD':
+                                    if inst[1] == c:
+                                        forward_p = 1
+                            if not forward_p:
+                                continue
+                            constraints.append((index[c], i))
+
+            if not constraints:
+                break
+
+            # XXX just do one for now
+            # do swaps to get things in the right order
+            goes_before, a_chain = constraints[0]
+            assert a_chain > goes_before
+            c = chains[a_chain]
+            chains.remove(c)
+            chains.insert(goes_before, c)
+
+        del blocks[:]
+        for c in chains:
+            for b in c:
+                blocks.append(b)
+
+    def getBlocks(self):
+        return self.blocks.elements()
+
+    def getRoot(self):
+        """Return nodes appropriate for use with dominator"""
+        return self.entry
+
+    def getContainedGraphs(self):
+        l = []
+        for b in self.getBlocks():
+            l.extend(b.getContainedGraphs())
+        return l
+
+def dfs_postorder(b, seen):
+    """Depth-first search of tree rooted at b, return in postorder"""
+    order = []
+    seen[b] = b
+    for c in b.get_children():
+        if seen.has_key(c):
+            continue
+        order = order + dfs_postorder(c, seen)
+    order.append(b)
+    return order
+
+class Block:
+    _count = 0
+
+    def __init__(self, label=''):
+        self.insts = []
+        self.inEdges = misc.Set()
+        self.outEdges = misc.Set()
+        self.label = label
+        self.bid = Block._count
+        self.next = []
+        Block._count = Block._count + 1
+
+    def __repr__(self):
+        if self.label:
+            return "<block %s id=%d>" % (self.label, self.bid)
+        else:
+            return "<block id=%d>" % (self.bid)
+
+    def __str__(self):
+        insts = map(str, self.insts)
+        return "<block %s %d:\n%s>" % (self.label, self.bid,
+                                       '\n'.join(insts))
+
+    def emit(self, inst):
+        op = inst[0]
+        if op[:4] == 'JUMP':
+            self.outEdges.add(inst[1])
+        self.insts.append(inst)
+
+    def getInstructions(self):
+        return self.insts
+
+    def addInEdge(self, block):
+        self.inEdges.add(block)
+
+    def addOutEdge(self, block):
+        self.outEdges.add(block)
+
+    def addNext(self, block):
+        self.next.append(block)
+        assert len(self.next) == 1, map(str, self.next)
+
+    _uncond_transfer = ('RETURN_VALUE', 'RAISE_VARARGS', 'YIELD_VALUE',
+                        'JUMP_ABSOLUTE', 'JUMP_FORWARD', 'CONTINUE_LOOP')
+
+    def pruneNext(self):
+        """Remove bogus edge for unconditional transfers
+
+        Each block has a next edge that accounts for implicit control
+        transfers, e.g. from a JUMP_IF_FALSE to the block that will be
+        executed if the test is true.
+
+        These edges must remain for the current assembler code to
+        work. If they are removed, the dfs_postorder gets things in
+        weird orders.  However, they shouldn't be there for other
+        purposes, e.g. conversion to SSA form.  This method will
+        remove the next edge when it follows an unconditional control
+        transfer.
+        """
+        try:
+            op, arg = self.insts[-1]
+        except (IndexError, ValueError):
+            return
+        if op in self._uncond_transfer:
+            self.next = []
+
+    def get_children(self):
+        if self.next and self.next[0] in self.outEdges:
+            self.outEdges.remove(self.next[0])
+        return self.outEdges.elements() + self.next
+
+    def getContainedGraphs(self):
+        """Return all graphs contained within this block.
+
+        For example, a MAKE_FUNCTION block will contain a reference to
+        the graph for the function body.
+        """
+        contained = []
+        for inst in self.insts:
+            if len(inst) == 1:
+                continue
+            op = inst[1]
+            if hasattr(op, 'graph'):
+                contained.append(op.graph)
+        return contained
+
+# flags for code objects
+
+# the FlowGraph is transformed in place; it exists in one of these states
+RAW = "RAW"
+FLAT = "FLAT"
+CONV = "CONV"
+DONE = "DONE"
+
+class PyFlowGraph(FlowGraph):
+    super_init = FlowGraph.__init__
+
+    def __init__(self, name, filename, args=(), optimized=0, klass=None):
+        self.super_init()
+        self.name = name
+        self.filename = filename
+        self.docstring = None
+        self.args = args # XXX
+        self.argcount = getArgCount(args)
+        self.klass = klass
+        if optimized:
+            self.flags = CO_OPTIMIZED | CO_NEWLOCALS
+        else:
+            self.flags = 0
+        self.consts = []
+        self.names = []
+        # Free variables found by the symbol table scan, including
+        # variables used only in nested scopes, are included here.
+        self.freevars = []
+        self.cellvars = []
+        # The closure list is used to track the order of cell
+        # variables and free variables in the resulting code object.
+        # The offsets used by LOAD_CLOSURE/LOAD_DEREF refer to both
+        # kinds of variables.
+        self.closure = []
+        self.varnames = list(args) or []
+        for i in range(len(self.varnames)):
+            var = self.varnames[i]
+            if isinstance(var, TupleArg):
+                self.varnames[i] = var.getName()
+        self.stage = RAW
+
+    def setDocstring(self, doc):
+        self.docstring = doc
+
+    def setFlag(self, flag):
+        self.flags = self.flags | flag
+        if flag == CO_VARARGS:
+            self.argcount = self.argcount - 1
+
+    def checkFlag(self, flag):
+        if self.flags & flag:
+            return 1
+
+    def setFreeVars(self, names):
+        self.freevars = list(names)
+
+    def setCellVars(self, names):
+        self.cellvars = names
+
+    def getCode(self):
+        """Get a Python code object"""
+        if self.stage == RAW:
+            self.computeStackDepth()
+            self.flattenGraph()
+        if self.stage == FLAT:
+            self.convertArgs()
+        if self.stage == CONV:
+            self.makeByteCode()
+        if self.stage == DONE:
+            return self.newCodeObject()
+        raise RuntimeError, "inconsistent PyFlowGraph state"
+
+    def dump(self, io=None):
+        if io:
+            save = sys.stdout
+            sys.stdout = io
+        pc = 0
+        for t in self.insts:
+            opname = t[0]
+            if opname == "SET_LINENO":
+                print
+            if len(t) == 1:
+                print "\t", "%3d" % pc, opname
+                pc = pc + 1
+            else:
+                print "\t", "%3d" % pc, opname, t[1]
+                pc = pc + 3
+        if io:
+            sys.stdout = save
+
+    def computeStackDepth(self):
+        """Compute the max stack depth.
+
+        Approach is to compute the stack effect of each basic block.
+        Then find the path through the code with the largest total
+        effect.
+        """
+        depth = {}
+        exit = None
+        for b in self.getBlocks():
+            depth[b] = findDepth(b.getInstructions())
+
+        seen = {}
+
+        def max_depth(b, d):
+            if seen.has_key(b):
+                return d
+            seen[b] = 1
+            d = d + depth[b]
+            children = b.get_children()
+            if children:
+                return max([max_depth(c, d) for c in children])
+            else:
+                if not b.label == "exit":
+                    return max_depth(self.exit, d)
+                else:
+                    return d
+
+        self.stacksize = max_depth(self.entry, 0)
+
+    def flattenGraph(self):
+        """Arrange the blocks in order and resolve jumps"""
+        assert self.stage == RAW
+        self.insts = insts = []
+        pc = 0
+        begin = {}
+        end = {}
+        for b in self.getBlocksInOrder():
+            begin[b] = pc
+            for inst in b.getInstructions():
+                insts.append(inst)
+                if len(inst) == 1:
+                    pc = pc + 1
+                elif inst[0] != "SET_LINENO":
+                    # arg takes 2 bytes
+                    pc = pc + 3
+            end[b] = pc
+        pc = 0
+        for i in range(len(insts)):
+            inst = insts[i]
+            if len(inst) == 1:
+                pc = pc + 1
+            elif inst[0] != "SET_LINENO":
+                pc = pc + 3
+            opname = inst[0]
+            if self.hasjrel.has_elt(opname):
+                oparg = inst[1]
+                offset = begin[oparg] - pc
+                insts[i] = opname, offset
+            elif self.hasjabs.has_elt(opname):
+                insts[i] = opname, begin[inst[1]]
+        self.stage = FLAT
+
+    hasjrel = misc.Set()
+    for i in dis.hasjrel:
+        hasjrel.add(dis.opname[i])
+    hasjabs = misc.Set()
+    for i in dis.hasjabs:
+        hasjabs.add(dis.opname[i])
+
+    def convertArgs(self):
+        """Convert arguments from symbolic to concrete form"""
+        assert self.stage == FLAT
+        self.consts.insert(0, self.docstring)
+        self.sort_cellvars()
+        for i in range(len(self.insts)):
+            t = self.insts[i]
+            if len(t) == 2:
+                opname, oparg = t
+                conv = self._converters.get(opname, None)
+                if conv:
+                    self.insts[i] = opname, conv(self, oparg)
+        self.stage = CONV
+
+    def sort_cellvars(self):
+        """Sort cellvars in the order of varnames and prune from freevars.
+        """
+        cells = {}
+        for name in self.cellvars:
+            cells[name] = 1
+        self.cellvars = [name for name in self.varnames
+                         if cells.has_key(name)]
+        for name in self.cellvars:
+            del cells[name]
+        self.cellvars = self.cellvars + cells.keys()
+        self.closure = self.cellvars + self.freevars
+
+    def _lookupName(self, name, list):
+        """Return index of name in list, appending if necessary
+
+        This routine uses a list instead of a dictionary, because a
+        dictionary can't store two different keys if the keys have the
+        same value but different types, e.g. 2 and 2L.  The compiler
+        must treat these two separately, so it does an explicit type
+        comparison before comparing the values.
+        """
+        t = type(name)
+        for i in range(len(list)):
+            if t == type(list[i]) and list[i] == name:
+                return i
+        end = len(list)
+        list.append(name)
+        return end
+
+    _converters = {}
+    def _convert_LOAD_CONST(self, arg):
+        if hasattr(arg, 'getCode'):
+            arg = arg.getCode()
+        return self._lookupName(arg, self.consts)
+
+    def _convert_LOAD_FAST(self, arg):
+        self._lookupName(arg, self.names)
+        return self._lookupName(arg, self.varnames)
+    _convert_STORE_FAST = _convert_LOAD_FAST
+    _convert_DELETE_FAST = _convert_LOAD_FAST
+
+    def _convert_LOAD_NAME(self, arg):
+        if self.klass is None:
+            self._lookupName(arg, self.varnames)
+        return self._lookupName(arg, self.names)
+
+    def _convert_NAME(self, arg):
+        if self.klass is None:
+            self._lookupName(arg, self.varnames)
+        return self._lookupName(arg, self.names)
+    _convert_STORE_NAME = _convert_NAME
+    _convert_DELETE_NAME = _convert_NAME
+    _convert_IMPORT_NAME = _convert_NAME
+    _convert_IMPORT_FROM = _convert_NAME
+    _convert_STORE_ATTR = _convert_NAME
+    _convert_LOAD_ATTR = _convert_NAME
+    _convert_DELETE_ATTR = _convert_NAME
+    _convert_LOAD_GLOBAL = _convert_NAME
+    _convert_STORE_GLOBAL = _convert_NAME
+    _convert_DELETE_GLOBAL = _convert_NAME
+
+    def _convert_DEREF(self, arg):
+        self._lookupName(arg, self.names)
+        self._lookupName(arg, self.varnames)
+        return self._lookupName(arg, self.closure)
+    _convert_LOAD_DEREF = _convert_DEREF
+    _convert_STORE_DEREF = _convert_DEREF
+
+    def _convert_LOAD_CLOSURE(self, arg):
+        self._lookupName(arg, self.varnames)
+        return self._lookupName(arg, self.closure)
+
+    _cmp = list(dis.cmp_op)
+    def _convert_COMPARE_OP(self, arg):
+        return self._cmp.index(arg)
+
+    # similarly for other opcodes...
+
+    for name, obj in locals().items():
+        if name[:9] == "_convert_":
+            opname = name[9:]
+            _converters[opname] = obj
+    del name, obj, opname
+
+    def makeByteCode(self):
+        assert self.stage == CONV
+        self.lnotab = lnotab = LineAddrTable()
+        for t in self.insts:
+            opname = t[0]
+            if len(t) == 1:
+                lnotab.addCode(self.opnum[opname])
+            else:
+                oparg = t[1]
+                if opname == "SET_LINENO":
+                    lnotab.nextLine(oparg)
+                    continue
+                hi, lo = twobyte(oparg)
+                try:
+                    lnotab.addCode(self.opnum[opname], lo, hi)
+                except ValueError:
+                    print opname, oparg
+                    print self.opnum[opname], lo, hi
+                    raise
+        self.stage = DONE
+
+    opnum = {}
+    for num in range(len(dis.opname)):
+        opnum[dis.opname[num]] = num
+    del num
+
+    def newCodeObject(self):
+        assert self.stage == DONE
+        if (self.flags & CO_NEWLOCALS) == 0:
+            nlocals = 0
+        else:
+            nlocals = len(self.varnames)
+        argcount = self.argcount
+        if self.flags & CO_VARKEYWORDS:
+            argcount = argcount - 1
+        return new.code(argcount, nlocals, self.stacksize, self.flags,
+                        self.lnotab.getCode(), self.getConsts(),
+                        tuple(self.names), tuple(self.varnames),
+                        self.filename, self.name, self.lnotab.firstline,
+                        self.lnotab.getTable(), tuple(self.freevars),
+                        tuple(self.cellvars))
+
+    def getConsts(self):
+        """Return a tuple for the const slot of the code object
+
+        Must convert references to code (MAKE_FUNCTION) to code
+        objects recursively.
+        """
+        l = []
+        for elt in self.consts:
+            if isinstance(elt, PyFlowGraph):
+                elt = elt.getCode()
+            l.append(elt)
+        return tuple(l)
+
+def isJump(opname):
+    if opname[:4] == 'JUMP':
+        return 1
+
+class TupleArg:
+    """Helper for marking func defs with nested tuples in arglist"""
+    def __init__(self, count, names):
+        self.count = count
+        self.names = names
+    def __repr__(self):
+        return "TupleArg(%s, %s)" % (self.count, self.names)
+    def getName(self):
+        return ".%d" % self.count
+
+def getArgCount(args):
+    argcount = len(args)
+    if args:
+        for arg in args:
+            if isinstance(arg, TupleArg):
+                numNames = len(misc.flatten(arg.names))
+                argcount = argcount - numNames
+    return argcount
+
+def twobyte(val):
+    """Convert an int argument into high and low bytes"""
+    assert type(val) == types.IntType
+    return divmod(val, 256)
+
+class LineAddrTable:
+    """lnotab
+
+    This class builds the lnotab, which is documented in compile.c.
+    Here's a brief recap:
+
+    For each SET_LINENO instruction after the first one, two bytes are
+    added to lnotab.  (In some cases, multiple two-byte entries are
+    added.)  The first byte is the distance in bytes between the
+    instruction for the last SET_LINENO and the current SET_LINENO.
+    The second byte is offset in line numbers.  If either offset is
+    greater than 255, multiple two-byte entries are added -- see
+    compile.c for the delicate details.
+    """
+
+    def __init__(self):
+        self.code = []
+        self.codeOffset = 0
+        self.firstline = 0
+        self.lastline = 0
+        self.lastoff = 0
+        self.lnotab = []
+
+    def addCode(self, *args):
+        for arg in args:
+            self.code.append(chr(arg))
+        self.codeOffset = self.codeOffset + len(args)
+
+    def nextLine(self, lineno):
+        if self.firstline == 0:
+            self.firstline = lineno
+            self.lastline = lineno
+        else:
+            # compute deltas
+            addr = self.codeOffset - self.lastoff
+            line = lineno - self.lastline
+            # Python assumes that lineno always increases with
+            # increasing bytecode address (lnotab is unsigned char).
+            # Depending on when SET_LINENO instructions are emitted
+            # this is not always true.  Consider the code:
+            #     a = (1,
+            #          b)
+            # In the bytecode stream, the assignment to "a" occurs
+            # after the loading of "b".  This works with the C Python
+            # compiler because it only generates a SET_LINENO instruction
+            # for the assignment.
+            if line >= 0:
+                push = self.lnotab.append
+                while addr > 255:
+                    push(255); push(0)
+                    addr -= 255
+                while line > 255:
+                    push(addr); push(255)
+                    line -= 255
+                    addr = 0
+                if addr > 0 or line > 0:
+                    push(addr); push(line)
+                self.lastline = lineno
+                self.lastoff = self.codeOffset
+
+    def getCode(self):
+        return ''.join(self.code)
+
+    def getTable(self):
+        return ''.join(map(chr, self.lnotab))
+
+class StackDepthTracker:
+    # XXX 1. need to keep track of stack depth on jumps
+    # XXX 2. at least partly as a result, this code is broken
+
+    def findDepth(self, insts, debug=0):
+        depth = 0
+        maxDepth = 0
+        for i in insts:
+            opname = i[0]
+            if debug:
+                print i,
+            delta = self.effect.get(opname, None)
+            if delta is not None:
+                depth = depth + delta
+            else:
+                # now check patterns
+                for pat, pat_delta in self.patterns:
+                    if opname[:len(pat)] == pat:
+                        delta = pat_delta
+                        depth = depth + delta
+                        break
+                # if we still haven't found a match
+                if delta is None:
+                    meth = getattr(self, opname, None)
+                    if meth is not None:
+                        depth = depth + meth(i[1])
+            if depth > maxDepth:
+                maxDepth = depth
+            if debug:
+                print depth, maxDepth
+        return maxDepth
+
+    effect = {
+        'POP_TOP': -1,
+        'DUP_TOP': 1,
+        'SLICE+1': -1,
+        'SLICE+2': -1,
+        'SLICE+3': -2,
+        'STORE_SLICE+0': -1,
+        'STORE_SLICE+1': -2,
+        'STORE_SLICE+2': -2,
+        'STORE_SLICE+3': -3,
+        'DELETE_SLICE+0': -1,
+        'DELETE_SLICE+1': -2,
+        'DELETE_SLICE+2': -2,
+        'DELETE_SLICE+3': -3,
+        'STORE_SUBSCR': -3,
+        'DELETE_SUBSCR': -2,
+        # PRINT_EXPR?
+        'PRINT_ITEM': -1,
+        'RETURN_VALUE': -1,
+        'YIELD_VALUE': -1,
+        'EXEC_STMT': -3,
+        'BUILD_CLASS': -2,
+        'STORE_NAME': -1,
+        'STORE_ATTR': -2,
+        'DELETE_ATTR': -1,
+        'STORE_GLOBAL': -1,
+        'BUILD_MAP': 1,
+        'COMPARE_OP': -1,
+        'STORE_FAST': -1,
+        'IMPORT_STAR': -1,
+        'IMPORT_NAME': 0,
+        'IMPORT_FROM': 1,
+        'LOAD_ATTR': 0, # unlike other loads
+        # close enough...
+        'SETUP_EXCEPT': 3,
+        'SETUP_FINALLY': 3,
+        'FOR_ITER': 1,
+        }
+    # use pattern match
+    patterns = [
+        ('BINARY_', -1),
+        ('LOAD_', 1),
+        ]
+
+    def UNPACK_SEQUENCE(self, count):
+        return count-1
+    def BUILD_TUPLE(self, count):
+        return -count+1
+    def BUILD_LIST(self, count):
+        return -count+1
+    def CALL_FUNCTION(self, argc):
+        hi, lo = divmod(argc, 256)
+        return -(lo + hi * 2)
+    def CALL_FUNCTION_VAR(self, argc):
+        return self.CALL_FUNCTION(argc)-1
+    def CALL_FUNCTION_KW(self, argc):
+        return self.CALL_FUNCTION(argc)-1
+    def CALL_FUNCTION_VAR_KW(self, argc):
+        return self.CALL_FUNCTION(argc)-2
+    def MAKE_FUNCTION(self, argc):
+        return -argc
+    def MAKE_CLOSURE(self, argc):
+        # XXX need to account for free variables too!
+        return -argc
+    def BUILD_SLICE(self, argc):
+        if argc == 2:
+            return -1
+        elif argc == 3:
+            return -2
+    def DUP_TOPX(self, argc):
+        return argc
+
+findDepth = StackDepthTracker().findDepth
diff --git a/depot_tools/release/win/python_24/Lib/compiler/pycodegen.py b/depot_tools/release/win/python_24/Lib/compiler/pycodegen.py
new file mode 100644
index 0000000..87558b211
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/pycodegen.py
@@ -0,0 +1,1479 @@
+import imp
+import os
+import marshal
+import struct
+import sys
+import types
+from cStringIO import StringIO
+
+from compiler import ast, parse, walk, syntax
+from compiler import pyassem, misc, future, symbols
+from compiler.consts import SC_LOCAL, SC_GLOBAL, SC_FREE, SC_CELL
+from compiler.consts import CO_VARARGS, CO_VARKEYWORDS, CO_NEWLOCALS,\
+     CO_NESTED, CO_GENERATOR, CO_GENERATOR_ALLOWED, CO_FUTURE_DIVISION
+from compiler.pyassem import TupleArg
+
+# XXX The version-specific code can go, since this code only works with 2.x.
+# Do we have Python 1.x or Python 2.x?
+try:
+    VERSION = sys.version_info[0]
+except AttributeError:
+    VERSION = 1
+
+callfunc_opcode_info = {
+    # (Have *args, Have **args) : opcode
+    (0,0) : "CALL_FUNCTION",
+    (1,0) : "CALL_FUNCTION_VAR",
+    (0,1) : "CALL_FUNCTION_KW",
+    (1,1) : "CALL_FUNCTION_VAR_KW",
+}
+
+LOOP = 1
+EXCEPT = 2
+TRY_FINALLY = 3
+END_FINALLY = 4
+
+def compileFile(filename, display=0):
+    f = open(filename, 'U')
+    buf = f.read()
+    f.close()
+    mod = Module(buf, filename)
+    try:
+        mod.compile(display)
+    except SyntaxError:
+        raise
+    else:
+        f = open(filename + "c", "wb")
+        mod.dump(f)
+        f.close()
+
+def compile(source, filename, mode, flags=None, dont_inherit=None):
+    """Replacement for builtin compile() function"""
+    if flags is not None or dont_inherit is not None:
+        raise RuntimeError, "not implemented yet"
+
+    if mode == "single":
+        gen = Interactive(source, filename)
+    elif mode == "exec":
+        gen = Module(source, filename)
+    elif mode == "eval":
+        gen = Expression(source, filename)
+    else:
+        raise ValueError("compile() 3rd arg must be 'exec' or "
+                         "'eval' or 'single'")
+    gen.compile()
+    return gen.code
+
+class AbstractCompileMode:
+
+    mode = None # defined by subclass
+
+    def __init__(self, source, filename):
+        self.source = source
+        self.filename = filename
+        self.code = None
+
+    def _get_tree(self):
+        tree = parse(self.source, self.mode)
+        misc.set_filename(self.filename, tree)
+        syntax.check(tree)
+        return tree
+
+    def compile(self):
+        pass # implemented by subclass
+
+    def getCode(self):
+        return self.code
+
+class Expression(AbstractCompileMode):
+
+    mode = "eval"
+
+    def compile(self):
+        tree = self._get_tree()
+        gen = ExpressionCodeGenerator(tree)
+        self.code = gen.getCode()
+
+class Interactive(AbstractCompileMode):
+
+    mode = "single"
+
+    def compile(self):
+        tree = self._get_tree()
+        gen = InteractiveCodeGenerator(tree)
+        self.code = gen.getCode()
+
+class Module(AbstractCompileMode):
+
+    mode = "exec"
+
+    def compile(self, display=0):
+        tree = self._get_tree()
+        gen = ModuleCodeGenerator(tree)
+        if display:
+            import pprint
+            print pprint.pprint(tree)
+        self.code = gen.getCode()
+
+    def dump(self, f):
+        f.write(self.getPycHeader())
+        marshal.dump(self.code, f)
+
+    MAGIC = imp.get_magic()
+
+    def getPycHeader(self):
+        # compile.c uses marshal to write a long directly, with
+        # calling the interface that would also generate a 1-byte code
+        # to indicate the type of the value.  simplest way to get the
+        # same effect is to call marshal and then skip the code.
+        mtime = os.path.getmtime(self.filename)
+        mtime = struct.pack('<i', mtime)
+        return self.MAGIC + mtime
+
+class LocalNameFinder:
+    """Find local names in scope"""
+    def __init__(self, names=()):
+        self.names = misc.Set()
+        self.globals = misc.Set()
+        for name in names:
+            self.names.add(name)
+
+    # XXX list comprehensions and for loops
+
+    def getLocals(self):
+        for elt in self.globals.elements():
+            if self.names.has_elt(elt):
+                self.names.remove(elt)
+        return self.names
+
+    def visitDict(self, node):
+        pass
+
+    def visitGlobal(self, node):
+        for name in node.names:
+            self.globals.add(name)
+
+    def visitFunction(self, node):
+        self.names.add(node.name)
+
+    def visitLambda(self, node):
+        pass
+
+    def visitImport(self, node):
+        for name, alias in node.names:
+            self.names.add(alias or name)
+
+    def visitFrom(self, node):
+        for name, alias in node.names:
+            self.names.add(alias or name)
+
+    def visitClass(self, node):
+        self.names.add(node.name)
+
+    def visitAssName(self, node):
+        self.names.add(node.name)
+
+def is_constant_false(node):
+    if isinstance(node, ast.Const):
+        if not node.value:
+            return 1
+    return 0
+
+class CodeGenerator:
+    """Defines basic code generator for Python bytecode
+
+    This class is an abstract base class.  Concrete subclasses must
+    define an __init__() that defines self.graph and then calls the
+    __init__() defined in this class.
+
+    The concrete class must also define the class attributes
+    NameFinder, FunctionGen, and ClassGen.  These attributes can be
+    defined in the initClass() method, which is a hook for
+    initializing these methods after all the classes have been
+    defined.
+    """
+
+    optimized = 0 # is namespace access optimized?
+    __initialized = None
+    class_name = None # provide default for instance variable
+
+    def __init__(self):
+        if self.__initialized is None:
+            self.initClass()
+            self.__class__.__initialized = 1
+        self.checkClass()
+        self.locals = misc.Stack()
+        self.setups = misc.Stack()
+        self.last_lineno = None
+        self._setupGraphDelegation()
+        self._div_op = "BINARY_DIVIDE"
+
+        # XXX set flags based on future features
+        futures = self.get_module().futures
+        for feature in futures:
+            if feature == "division":
+                self.graph.setFlag(CO_FUTURE_DIVISION)
+                self._div_op = "BINARY_TRUE_DIVIDE"
+            elif feature == "generators":
+                self.graph.setFlag(CO_GENERATOR_ALLOWED)
+
+    def initClass(self):
+        """This method is called once for each class"""
+
+    def checkClass(self):
+        """Verify that class is constructed correctly"""
+        try:
+            assert hasattr(self, 'graph')
+            assert getattr(self, 'NameFinder')
+            assert getattr(self, 'FunctionGen')
+            assert getattr(self, 'ClassGen')
+        except AssertionError, msg:
+            intro = "Bad class construction for %s" % self.__class__.__name__
+            raise AssertionError, intro
+
+    def _setupGraphDelegation(self):
+        self.emit = self.graph.emit
+        self.newBlock = self.graph.newBlock
+        self.startBlock = self.graph.startBlock
+        self.nextBlock = self.graph.nextBlock
+        self.setDocstring = self.graph.setDocstring
+
+    def getCode(self):
+        """Return a code object"""
+        return self.graph.getCode()
+
+    def mangle(self, name):
+        if self.class_name is not None:
+            return misc.mangle(name, self.class_name)
+        else:
+            return name
+
+    def parseSymbols(self, tree):
+        s = symbols.SymbolVisitor()
+        walk(tree, s)
+        return s.scopes
+
+    def get_module(self):
+        raise RuntimeError, "should be implemented by subclasses"
+
+    # Next five methods handle name access
+
+    def isLocalName(self, name):
+        return self.locals.top().has_elt(name)
+
+    def storeName(self, name):
+        self._nameOp('STORE', name)
+
+    def loadName(self, name):
+        self._nameOp('LOAD', name)
+
+    def delName(self, name):
+        self._nameOp('DELETE', name)
+
+    def _nameOp(self, prefix, name):
+        name = self.mangle(name)
+        scope = self.scope.check_name(name)
+        if scope == SC_LOCAL:
+            if not self.optimized:
+                self.emit(prefix + '_NAME', name)
+            else:
+                self.emit(prefix + '_FAST', name)
+        elif scope == SC_GLOBAL:
+            if not self.optimized:
+                self.emit(prefix + '_NAME', name)
+            else:
+                self.emit(prefix + '_GLOBAL', name)
+        elif scope == SC_FREE or scope == SC_CELL:
+            self.emit(prefix + '_DEREF', name)
+        else:
+            raise RuntimeError, "unsupported scope for var %s: %d" % \
+                  (name, scope)
+
+    def _implicitNameOp(self, prefix, name):
+        """Emit name ops for names generated implicitly by for loops
+
+        The interpreter generates names that start with a period or
+        dollar sign.  The symbol table ignores these names because
+        they aren't present in the program text.
+        """
+        if self.optimized:
+            self.emit(prefix + '_FAST', name)
+        else:
+            self.emit(prefix + '_NAME', name)
+
+    # The set_lineno() function and the explicit emit() calls for
+    # SET_LINENO below are only used to generate the line number table.
+    # As of Python 2.3, the interpreter does not have a SET_LINENO
+    # instruction.  pyassem treats SET_LINENO opcodes as a special case.
+
+    def set_lineno(self, node, force=False):
+        """Emit SET_LINENO if necessary.
+
+        The instruction is considered necessary if the node has a
+        lineno attribute and it is different than the last lineno
+        emitted.
+
+        Returns true if SET_LINENO was emitted.
+
+        There are no rules for when an AST node should have a lineno
+        attribute.  The transformer and AST code need to be reviewed
+        and a consistent policy implemented and documented.  Until
+        then, this method works around missing line numbers.
+        """
+        lineno = getattr(node, 'lineno', None)
+        if lineno is not None and (lineno != self.last_lineno
+                                   or force):
+            self.emit('SET_LINENO', lineno)
+            self.last_lineno = lineno
+            return True
+        return False
+
+    # The first few visitor methods handle nodes that generator new
+    # code objects.  They use class attributes to determine what
+    # specialized code generators to use.
+
+    NameFinder = LocalNameFinder
+    FunctionGen = None
+    ClassGen = None
+
+    def visitModule(self, node):
+        self.scopes = self.parseSymbols(node)
+        self.scope = self.scopes[node]
+        self.emit('SET_LINENO', 0)
+        if node.doc:
+            self.emit('LOAD_CONST', node.doc)
+            self.storeName('__doc__')
+        lnf = walk(node.node, self.NameFinder(), verbose=0)
+        self.locals.push(lnf.getLocals())
+        self.visit(node.node)
+        self.emit('LOAD_CONST', None)
+        self.emit('RETURN_VALUE')
+
+    def visitExpression(self, node):
+        self.set_lineno(node)
+        self.scopes = self.parseSymbols(node)
+        self.scope = self.scopes[node]
+        self.visit(node.node)
+        self.emit('RETURN_VALUE')
+
+    def visitFunction(self, node):
+        self._visitFuncOrLambda(node, isLambda=0)
+        if node.doc:
+            self.setDocstring(node.doc)
+        self.storeName(node.name)
+
+    def visitLambda(self, node):
+        self._visitFuncOrLambda(node, isLambda=1)
+
+    def _visitFuncOrLambda(self, node, isLambda=0):
+        if not isLambda and node.decorators:
+            for decorator in node.decorators.nodes:
+                self.visit(decorator)
+            ndecorators = len(node.decorators.nodes)
+        else:
+            ndecorators = 0
+
+        gen = self.FunctionGen(node, self.scopes, isLambda,
+                               self.class_name, self.get_module())
+        walk(node.code, gen)
+        gen.finish()
+        self.set_lineno(node)
+        for default in node.defaults:
+            self.visit(default)
+        frees = gen.scope.get_free_vars()
+        if frees:
+            for name in frees:
+                self.emit('LOAD_CLOSURE', name)
+            self.emit('LOAD_CONST', gen)
+            self.emit('MAKE_CLOSURE', len(node.defaults))
+        else:
+            self.emit('LOAD_CONST', gen)
+            self.emit('MAKE_FUNCTION', len(node.defaults))
+
+        for i in range(ndecorators):
+            self.emit('CALL_FUNCTION', 1)
+
+    def visitClass(self, node):
+        gen = self.ClassGen(node, self.scopes,
+                            self.get_module())
+        walk(node.code, gen)
+        gen.finish()
+        self.set_lineno(node)
+        self.emit('LOAD_CONST', node.name)
+        for base in node.bases:
+            self.visit(base)
+        self.emit('BUILD_TUPLE', len(node.bases))
+        frees = gen.scope.get_free_vars()
+        for name in frees:
+            self.emit('LOAD_CLOSURE', name)
+        self.emit('LOAD_CONST', gen)
+        if frees:
+            self.emit('MAKE_CLOSURE', 0)
+        else:
+            self.emit('MAKE_FUNCTION', 0)
+        self.emit('CALL_FUNCTION', 0)
+        self.emit('BUILD_CLASS')
+        self.storeName(node.name)
+
+    # The rest are standard visitor methods
+
+    # The next few implement control-flow statements
+
+    def visitIf(self, node):
+        end = self.newBlock()
+        numtests = len(node.tests)
+        for i in range(numtests):
+            test, suite = node.tests[i]
+            if is_constant_false(test):
+                # XXX will need to check generator stuff here
+                continue
+            self.set_lineno(test)
+            self.visit(test)
+            nextTest = self.newBlock()
+            self.emit('JUMP_IF_FALSE', nextTest)
+            self.nextBlock()
+            self.emit('POP_TOP')
+            self.visit(suite)
+            self.emit('JUMP_FORWARD', end)
+            self.startBlock(nextTest)
+            self.emit('POP_TOP')
+        if node.else_:
+            self.visit(node.else_)
+        self.nextBlock(end)
+
+    def visitWhile(self, node):
+        self.set_lineno(node)
+
+        loop = self.newBlock()
+        else_ = self.newBlock()
+
+        after = self.newBlock()
+        self.emit('SETUP_LOOP', after)
+
+        self.nextBlock(loop)
+        self.setups.push((LOOP, loop))
+
+        self.set_lineno(node, force=True)
+        self.visit(node.test)
+        self.emit('JUMP_IF_FALSE', else_ or after)
+
+        self.nextBlock()
+        self.emit('POP_TOP')
+        self.visit(node.body)
+        self.emit('JUMP_ABSOLUTE', loop)
+
+        self.startBlock(else_) # or just the POPs if not else clause
+        self.emit('POP_TOP')
+        self.emit('POP_BLOCK')
+        self.setups.pop()
+        if node.else_:
+            self.visit(node.else_)
+        self.nextBlock(after)
+
+    def visitFor(self, node):
+        start = self.newBlock()
+        anchor = self.newBlock()
+        after = self.newBlock()
+        self.setups.push((LOOP, start))
+
+        self.set_lineno(node)
+        self.emit('SETUP_LOOP', after)
+        self.visit(node.list)
+        self.emit('GET_ITER')
+
+        self.nextBlock(start)
+        self.set_lineno(node, force=1)
+        self.emit('FOR_ITER', anchor)
+        self.visit(node.assign)
+        self.visit(node.body)
+        self.emit('JUMP_ABSOLUTE', start)
+        self.nextBlock(anchor)
+        self.emit('POP_BLOCK')
+        self.setups.pop()
+        if node.else_:
+            self.visit(node.else_)
+        self.nextBlock(after)
+
+    def visitBreak(self, node):
+        if not self.setups:
+            raise SyntaxError, "'break' outside loop (%s, %d)" % \
+                  (node.filename, node.lineno)
+        self.set_lineno(node)
+        self.emit('BREAK_LOOP')
+
+    def visitContinue(self, node):
+        if not self.setups:
+            raise SyntaxError, "'continue' outside loop (%s, %d)" % \
+                  (node.filename, node.lineno)
+        kind, block = self.setups.top()
+        if kind == LOOP:
+            self.set_lineno(node)
+            self.emit('JUMP_ABSOLUTE', block)
+            self.nextBlock()
+        elif kind == EXCEPT or kind == TRY_FINALLY:
+            self.set_lineno(node)
+            # find the block that starts the loop
+            top = len(self.setups)
+            while top > 0:
+                top = top - 1
+                kind, loop_block = self.setups[top]
+                if kind == LOOP:
+                    break
+            if kind != LOOP:
+                raise SyntaxError, "'continue' outside loop (%s, %d)" % \
+                      (node.filename, node.lineno)
+            self.emit('CONTINUE_LOOP', loop_block)
+            self.nextBlock()
+        elif kind == END_FINALLY:
+            msg = "'continue' not allowed inside 'finally' clause (%s, %d)"
+            raise SyntaxError, msg % (node.filename, node.lineno)
+
+    def visitTest(self, node, jump):
+        end = self.newBlock()
+        for child in node.nodes[:-1]:
+            self.visit(child)
+            self.emit(jump, end)
+            self.nextBlock()
+            self.emit('POP_TOP')
+        self.visit(node.nodes[-1])
+        self.nextBlock(end)
+
+    def visitAnd(self, node):
+        self.visitTest(node, 'JUMP_IF_FALSE')
+
+    def visitOr(self, node):
+        self.visitTest(node, 'JUMP_IF_TRUE')
+
+    def visitCompare(self, node):
+        self.visit(node.expr)
+        cleanup = self.newBlock()
+        for op, code in node.ops[:-1]:
+            self.visit(code)
+            self.emit('DUP_TOP')
+            self.emit('ROT_THREE')
+            self.emit('COMPARE_OP', op)
+            self.emit('JUMP_IF_FALSE', cleanup)
+            self.nextBlock()
+            self.emit('POP_TOP')
+        # now do the last comparison
+        if node.ops:
+            op, code = node.ops[-1]
+            self.visit(code)
+            self.emit('COMPARE_OP', op)
+        if len(node.ops) > 1:
+            end = self.newBlock()
+            self.emit('JUMP_FORWARD', end)
+            self.startBlock(cleanup)
+            self.emit('ROT_TWO')
+            self.emit('POP_TOP')
+            self.nextBlock(end)
+
+    # list comprehensions
+    __list_count = 0
+
+    def visitListComp(self, node):
+        self.set_lineno(node)
+        # setup list
+        append = "$append%d" % self.__list_count
+        self.__list_count = self.__list_count + 1
+        self.emit('BUILD_LIST', 0)
+        self.emit('DUP_TOP')
+        self.emit('LOAD_ATTR', 'append')
+        self._implicitNameOp('STORE', append)
+
+        stack = []
+        for i, for_ in zip(range(len(node.quals)), node.quals):
+            start, anchor = self.visit(for_)
+            cont = None
+            for if_ in for_.ifs:
+                if cont is None:
+                    cont = self.newBlock()
+                self.visit(if_, cont)
+            stack.insert(0, (start, cont, anchor))
+
+        self._implicitNameOp('LOAD', append)
+        self.visit(node.expr)
+        self.emit('CALL_FUNCTION', 1)
+        self.emit('POP_TOP')
+
+        for start, cont, anchor in stack:
+            if cont:
+                skip_one = self.newBlock()
+                self.emit('JUMP_FORWARD', skip_one)
+                self.startBlock(cont)
+                self.emit('POP_TOP')
+                self.nextBlock(skip_one)
+            self.emit('JUMP_ABSOLUTE', start)
+            self.startBlock(anchor)
+        self._implicitNameOp('DELETE', append)
+
+        self.__list_count = self.__list_count - 1
+
+    def visitListCompFor(self, node):
+        start = self.newBlock()
+        anchor = self.newBlock()
+
+        self.visit(node.list)
+        self.emit('GET_ITER')
+        self.nextBlock(start)
+        self.set_lineno(node, force=True)
+        self.emit('FOR_ITER', anchor)
+        self.nextBlock()
+        self.visit(node.assign)
+        return start, anchor
+
+    def visitListCompIf(self, node, branch):
+        self.set_lineno(node, force=True)
+        self.visit(node.test)
+        self.emit('JUMP_IF_FALSE', branch)
+        self.newBlock()
+        self.emit('POP_TOP')
+
+    def visitGenExpr(self, node):
+        gen = GenExprCodeGenerator(node, self.scopes, self.class_name,
+                                   self.get_module())
+        walk(node.code, gen)
+        gen.finish()
+        self.set_lineno(node)
+        frees = gen.scope.get_free_vars()
+        if frees:
+            for name in frees:
+                self.emit('LOAD_CLOSURE', name)
+            self.emit('LOAD_CONST', gen)
+            self.emit('MAKE_CLOSURE', 0)
+        else:
+            self.emit('LOAD_CONST', gen)
+            self.emit('MAKE_FUNCTION', 0)
+
+        # precomputation of outmost iterable
+        self.visit(node.code.quals[0].iter)
+        self.emit('GET_ITER')
+        self.emit('CALL_FUNCTION', 1)
+
+    def visitGenExprInner(self, node):
+        self.set_lineno(node)
+        # setup list
+
+        stack = []
+        for i, for_ in zip(range(len(node.quals)), node.quals):
+            start, anchor = self.visit(for_)
+            cont = None
+            for if_ in for_.ifs:
+                if cont is None:
+                    cont = self.newBlock()
+                self.visit(if_, cont)
+            stack.insert(0, (start, cont, anchor))
+
+        self.visit(node.expr)
+        self.emit('YIELD_VALUE')
+
+        for start, cont, anchor in stack:
+            if cont:
+                skip_one = self.newBlock()
+                self.emit('JUMP_FORWARD', skip_one)
+                self.startBlock(cont)
+                self.emit('POP_TOP')
+                self.nextBlock(skip_one)
+            self.emit('JUMP_ABSOLUTE', start)
+            self.startBlock(anchor)
+        self.emit('LOAD_CONST', None)
+
+    def visitGenExprFor(self, node):
+        start = self.newBlock()
+        anchor = self.newBlock()
+
+        if node.is_outmost:
+            self.loadName('[outmost-iterable]')
+        else:
+            self.visit(node.iter)
+            self.emit('GET_ITER')
+
+        self.nextBlock(start)
+        self.set_lineno(node, force=True)
+        self.emit('FOR_ITER', anchor)
+        self.nextBlock()
+        self.visit(node.assign)
+        return start, anchor
+
+    def visitGenExprIf(self, node, branch):
+        self.set_lineno(node, force=True)
+        self.visit(node.test)
+        self.emit('JUMP_IF_FALSE', branch)
+        self.newBlock()
+        self.emit('POP_TOP')
+
+    # exception related
+
+    def visitAssert(self, node):
+        # XXX would be interesting to implement this via a
+        # transformation of the AST before this stage
+        if __debug__:
+            end = self.newBlock()
+            self.set_lineno(node)
+            # XXX AssertionError appears to be special case -- it is always
+            # loaded as a global even if there is a local name.  I guess this
+            # is a sort of renaming op.
+            self.nextBlock()
+            self.visit(node.test)
+            self.emit('JUMP_IF_TRUE', end)
+            self.nextBlock()
+            self.emit('POP_TOP')
+            self.emit('LOAD_GLOBAL', 'AssertionError')
+            if node.fail:
+                self.visit(node.fail)
+                self.emit('RAISE_VARARGS', 2)
+            else:
+                self.emit('RAISE_VARARGS', 1)
+            self.nextBlock(end)
+            self.emit('POP_TOP')
+
+    def visitRaise(self, node):
+        self.set_lineno(node)
+        n = 0
+        if node.expr1:
+            self.visit(node.expr1)
+            n = n + 1
+        if node.expr2:
+            self.visit(node.expr2)
+            n = n + 1
+        if node.expr3:
+            self.visit(node.expr3)
+            n = n + 1
+        self.emit('RAISE_VARARGS', n)
+
+    def visitTryExcept(self, node):
+        body = self.newBlock()
+        handlers = self.newBlock()
+        end = self.newBlock()
+        if node.else_:
+            lElse = self.newBlock()
+        else:
+            lElse = end
+        self.set_lineno(node)
+        self.emit('SETUP_EXCEPT', handlers)
+        self.nextBlock(body)
+        self.setups.push((EXCEPT, body))
+        self.visit(node.body)
+        self.emit('POP_BLOCK')
+        self.setups.pop()
+        self.emit('JUMP_FORWARD', lElse)
+        self.startBlock(handlers)
+
+        last = len(node.handlers) - 1
+        for i in range(len(node.handlers)):
+            expr, target, body = node.handlers[i]
+            self.set_lineno(expr)
+            if expr:
+                self.emit('DUP_TOP')
+                self.visit(expr)
+                self.emit('COMPARE_OP', 'exception match')
+                next = self.newBlock()
+                self.emit('JUMP_IF_FALSE', next)
+                self.nextBlock()
+                self.emit('POP_TOP')
+            self.emit('POP_TOP')
+            if target:
+                self.visit(target)
+            else:
+                self.emit('POP_TOP')
+            self.emit('POP_TOP')
+            self.visit(body)
+            self.emit('JUMP_FORWARD', end)
+            if expr:
+                self.nextBlock(next)
+            else:
+                self.nextBlock()
+            if expr: # XXX
+                self.emit('POP_TOP')
+        self.emit('END_FINALLY')
+        if node.else_:
+            self.nextBlock(lElse)
+            self.visit(node.else_)
+        self.nextBlock(end)
+
+    def visitTryFinally(self, node):
+        body = self.newBlock()
+        final = self.newBlock()
+        self.set_lineno(node)
+        self.emit('SETUP_FINALLY', final)
+        self.nextBlock(body)
+        self.setups.push((TRY_FINALLY, body))
+        self.visit(node.body)
+        self.emit('POP_BLOCK')
+        self.setups.pop()
+        self.emit('LOAD_CONST', None)
+        self.nextBlock(final)
+        self.setups.push((END_FINALLY, final))
+        self.visit(node.final)
+        self.emit('END_FINALLY')
+        self.setups.pop()
+
+    # misc
+
+    def visitDiscard(self, node):
+        self.set_lineno(node)
+        self.visit(node.expr)
+        self.emit('POP_TOP')
+
+    def visitConst(self, node):
+        self.emit('LOAD_CONST', node.value)
+
+    def visitKeyword(self, node):
+        self.emit('LOAD_CONST', node.name)
+        self.visit(node.expr)
+
+    def visitGlobal(self, node):
+        # no code to generate
+        pass
+
+    def visitName(self, node):
+        self.set_lineno(node)
+        self.loadName(node.name)
+
+    def visitPass(self, node):
+        self.set_lineno(node)
+
+    def visitImport(self, node):
+        self.set_lineno(node)
+        for name, alias in node.names:
+            if VERSION > 1:
+                self.emit('LOAD_CONST', None)
+            self.emit('IMPORT_NAME', name)
+            mod = name.split(".")[0]
+            if alias:
+                self._resolveDots(name)
+                self.storeName(alias)
+            else:
+                self.storeName(mod)
+
+    def visitFrom(self, node):
+        self.set_lineno(node)
+        fromlist = map(lambda (name, alias): name, node.names)
+        if VERSION > 1:
+            self.emit('LOAD_CONST', tuple(fromlist))
+        self.emit('IMPORT_NAME', node.modname)
+        for name, alias in node.names:
+            if VERSION > 1:
+                if name == '*':
+                    self.namespace = 0
+                    self.emit('IMPORT_STAR')
+                    # There can only be one name w/ from ... import *
+                    assert len(node.names) == 1
+                    return
+                else:
+                    self.emit('IMPORT_FROM', name)
+                    self._resolveDots(name)
+                    self.storeName(alias or name)
+            else:
+                self.emit('IMPORT_FROM', name)
+        self.emit('POP_TOP')
+
+    def _resolveDots(self, name):
+        elts = name.split(".")
+        if len(elts) == 1:
+            return
+        for elt in elts[1:]:
+            self.emit('LOAD_ATTR', elt)
+
+    def visitGetattr(self, node):
+        self.visit(node.expr)
+        self.emit('LOAD_ATTR', self.mangle(node.attrname))
+
+    # next five implement assignments
+
+    def visitAssign(self, node):
+        self.set_lineno(node)
+        self.visit(node.expr)
+        dups = len(node.nodes) - 1
+        for i in range(len(node.nodes)):
+            elt = node.nodes[i]
+            if i < dups:
+                self.emit('DUP_TOP')
+            if isinstance(elt, ast.Node):
+                self.visit(elt)
+
+    def visitAssName(self, node):
+        if node.flags == 'OP_ASSIGN':
+            self.storeName(node.name)
+        elif node.flags == 'OP_DELETE':
+            self.set_lineno(node)
+            self.delName(node.name)
+        else:
+            print "oops", node.flags
+
+    def visitAssAttr(self, node):
+        self.visit(node.expr)
+        if node.flags == 'OP_ASSIGN':
+            self.emit('STORE_ATTR', self.mangle(node.attrname))
+        elif node.flags == 'OP_DELETE':
+            self.emit('DELETE_ATTR', self.mangle(node.attrname))
+        else:
+            print "warning: unexpected flags:", node.flags
+            print node
+
+    def _visitAssSequence(self, node, op='UNPACK_SEQUENCE'):
+        if findOp(node) != 'OP_DELETE':
+            self.emit(op, len(node.nodes))
+        for child in node.nodes:
+            self.visit(child)
+
+    if VERSION > 1:
+        visitAssTuple = _visitAssSequence
+        visitAssList = _visitAssSequence
+    else:
+        def visitAssTuple(self, node):
+            self._visitAssSequence(node, 'UNPACK_TUPLE')
+
+        def visitAssList(self, node):
+            self._visitAssSequence(node, 'UNPACK_LIST')
+
+    # augmented assignment
+
+    def visitAugAssign(self, node):
+        self.set_lineno(node)
+        aug_node = wrap_aug(node.node)
+        self.visit(aug_node, "load")
+        self.visit(node.expr)
+        self.emit(self._augmented_opcode[node.op])
+        self.visit(aug_node, "store")
+
+    _augmented_opcode = {
+        '+=' : 'INPLACE_ADD',
+        '-=' : 'INPLACE_SUBTRACT',
+        '*=' : 'INPLACE_MULTIPLY',
+        '/=' : 'INPLACE_DIVIDE',
+        '//=': 'INPLACE_FLOOR_DIVIDE',
+        '%=' : 'INPLACE_MODULO',
+        '**=': 'INPLACE_POWER',
+        '>>=': 'INPLACE_RSHIFT',
+        '<<=': 'INPLACE_LSHIFT',
+        '&=' : 'INPLACE_AND',
+        '^=' : 'INPLACE_XOR',
+        '|=' : 'INPLACE_OR',
+        }
+
+    def visitAugName(self, node, mode):
+        if mode == "load":
+            self.loadName(node.name)
+        elif mode == "store":
+            self.storeName(node.name)
+
+    def visitAugGetattr(self, node, mode):
+        if mode == "load":
+            self.visit(node.expr)
+            self.emit('DUP_TOP')
+            self.emit('LOAD_ATTR', self.mangle(node.attrname))
+        elif mode == "store":
+            self.emit('ROT_TWO')
+            self.emit('STORE_ATTR', self.mangle(node.attrname))
+
+    def visitAugSlice(self, node, mode):
+        if mode == "load":
+            self.visitSlice(node, 1)
+        elif mode == "store":
+            slice = 0
+            if node.lower:
+                slice = slice | 1
+            if node.upper:
+                slice = slice | 2
+            if slice == 0:
+                self.emit('ROT_TWO')
+            elif slice == 3:
+                self.emit('ROT_FOUR')
+            else:
+                self.emit('ROT_THREE')
+            self.emit('STORE_SLICE+%d' % slice)
+
+    def visitAugSubscript(self, node, mode):
+        if len(node.subs) > 1:
+            raise SyntaxError, "augmented assignment to tuple is not possible"
+        if mode == "load":
+            self.visitSubscript(node, 1)
+        elif mode == "store":
+            self.emit('ROT_THREE')
+            self.emit('STORE_SUBSCR')
+
+    def visitExec(self, node):
+        self.visit(node.expr)
+        if node.locals is None:
+            self.emit('LOAD_CONST', None)
+        else:
+            self.visit(node.locals)
+        if node.globals is None:
+            self.emit('DUP_TOP')
+        else:
+            self.visit(node.globals)
+        self.emit('EXEC_STMT')
+
+    def visitCallFunc(self, node):
+        pos = 0
+        kw = 0
+        self.set_lineno(node)
+        self.visit(node.node)
+        for arg in node.args:
+            self.visit(arg)
+            if isinstance(arg, ast.Keyword):
+                kw = kw + 1
+            else:
+                pos = pos + 1
+        if node.star_args is not None:
+            self.visit(node.star_args)
+        if node.dstar_args is not None:
+            self.visit(node.dstar_args)
+        have_star = node.star_args is not None
+        have_dstar = node.dstar_args is not None
+        opcode = callfunc_opcode_info[have_star, have_dstar]
+        self.emit(opcode, kw << 8 | pos)
+
+    def visitPrint(self, node, newline=0):
+        self.set_lineno(node)
+        if node.dest:
+            self.visit(node.dest)
+        for child in node.nodes:
+            if node.dest:
+                self.emit('DUP_TOP')
+            self.visit(child)
+            if node.dest:
+                self.emit('ROT_TWO')
+                self.emit('PRINT_ITEM_TO')
+            else:
+                self.emit('PRINT_ITEM')
+        if node.dest and not newline:
+            self.emit('POP_TOP')
+
+    def visitPrintnl(self, node):
+        self.visitPrint(node, newline=1)
+        if node.dest:
+            self.emit('PRINT_NEWLINE_TO')
+        else:
+            self.emit('PRINT_NEWLINE')
+
+    def visitReturn(self, node):
+        self.set_lineno(node)
+        self.visit(node.value)
+        self.emit('RETURN_VALUE')
+
+    def visitYield(self, node):
+        self.set_lineno(node)
+        self.visit(node.value)
+        self.emit('YIELD_VALUE')
+
+    # slice and subscript stuff
+
+    def visitSlice(self, node, aug_flag=None):
+        # aug_flag is used by visitAugSlice
+        self.visit(node.expr)
+        slice = 0
+        if node.lower:
+            self.visit(node.lower)
+            slice = slice | 1
+        if node.upper:
+            self.visit(node.upper)
+            slice = slice | 2
+        if aug_flag:
+            if slice == 0:
+                self.emit('DUP_TOP')
+            elif slice == 3:
+                self.emit('DUP_TOPX', 3)
+            else:
+                self.emit('DUP_TOPX', 2)
+        if node.flags == 'OP_APPLY':
+            self.emit('SLICE+%d' % slice)
+        elif node.flags == 'OP_ASSIGN':
+            self.emit('STORE_SLICE+%d' % slice)
+        elif node.flags == 'OP_DELETE':
+            self.emit('DELETE_SLICE+%d' % slice)
+        else:
+            print "weird slice", node.flags
+            raise
+
+    def visitSubscript(self, node, aug_flag=None):
+        self.visit(node.expr)
+        for sub in node.subs:
+            self.visit(sub)
+        if aug_flag:
+            self.emit('DUP_TOPX', 2)
+        if len(node.subs) > 1:
+            self.emit('BUILD_TUPLE', len(node.subs))
+        if node.flags == 'OP_APPLY':
+            self.emit('BINARY_SUBSCR')
+        elif node.flags == 'OP_ASSIGN':
+            self.emit('STORE_SUBSCR')
+        elif node.flags == 'OP_DELETE':
+            self.emit('DELETE_SUBSCR')
+
+    # binary ops
+
+    def binaryOp(self, node, op):
+        self.visit(node.left)
+        self.visit(node.right)
+        self.emit(op)
+
+    def visitAdd(self, node):
+        return self.binaryOp(node, 'BINARY_ADD')
+
+    def visitSub(self, node):
+        return self.binaryOp(node, 'BINARY_SUBTRACT')
+
+    def visitMul(self, node):
+        return self.binaryOp(node, 'BINARY_MULTIPLY')
+
+    def visitDiv(self, node):
+        return self.binaryOp(node, self._div_op)
+
+    def visitFloorDiv(self, node):
+        return self.binaryOp(node, 'BINARY_FLOOR_DIVIDE')
+
+    def visitMod(self, node):
+        return self.binaryOp(node, 'BINARY_MODULO')
+
+    def visitPower(self, node):
+        return self.binaryOp(node, 'BINARY_POWER')
+
+    def visitLeftShift(self, node):
+        return self.binaryOp(node, 'BINARY_LSHIFT')
+
+    def visitRightShift(self, node):
+        return self.binaryOp(node, 'BINARY_RSHIFT')
+
+    # unary ops
+
+    def unaryOp(self, node, op):
+        self.visit(node.expr)
+        self.emit(op)
+
+    def visitInvert(self, node):
+        return self.unaryOp(node, 'UNARY_INVERT')
+
+    def visitUnarySub(self, node):
+        return self.unaryOp(node, 'UNARY_NEGATIVE')
+
+    def visitUnaryAdd(self, node):
+        return self.unaryOp(node, 'UNARY_POSITIVE')
+
+    def visitUnaryInvert(self, node):
+        return self.unaryOp(node, 'UNARY_INVERT')
+
+    def visitNot(self, node):
+        return self.unaryOp(node, 'UNARY_NOT')
+
+    def visitBackquote(self, node):
+        return self.unaryOp(node, 'UNARY_CONVERT')
+
+    # bit ops
+
+    def bitOp(self, nodes, op):
+        self.visit(nodes[0])
+        for node in nodes[1:]:
+            self.visit(node)
+            self.emit(op)
+
+    def visitBitand(self, node):
+        return self.bitOp(node.nodes, 'BINARY_AND')
+
+    def visitBitor(self, node):
+        return self.bitOp(node.nodes, 'BINARY_OR')
+
+    def visitBitxor(self, node):
+        return self.bitOp(node.nodes, 'BINARY_XOR')
+
+    # object constructors
+
+    def visitEllipsis(self, node):
+        self.emit('LOAD_CONST', Ellipsis)
+
+    def visitTuple(self, node):
+        self.set_lineno(node)
+        for elt in node.nodes:
+            self.visit(elt)
+        self.emit('BUILD_TUPLE', len(node.nodes))
+
+    def visitList(self, node):
+        self.set_lineno(node)
+        for elt in node.nodes:
+            self.visit(elt)
+        self.emit('BUILD_LIST', len(node.nodes))
+
+    def visitSliceobj(self, node):
+        for child in node.nodes:
+            self.visit(child)
+        self.emit('BUILD_SLICE', len(node.nodes))
+
+    def visitDict(self, node):
+        self.set_lineno(node)
+        self.emit('BUILD_MAP', 0)
+        for k, v in node.items:
+            self.emit('DUP_TOP')
+            self.visit(k)
+            self.visit(v)
+            self.emit('ROT_THREE')
+            self.emit('STORE_SUBSCR')
+
+class NestedScopeMixin:
+    """Defines initClass() for nested scoping (Python 2.2-compatible)"""
+    def initClass(self):
+        self.__class__.NameFinder = LocalNameFinder
+        self.__class__.FunctionGen = FunctionCodeGenerator
+        self.__class__.ClassGen = ClassCodeGenerator
+
+class ModuleCodeGenerator(NestedScopeMixin, CodeGenerator):
+    __super_init = CodeGenerator.__init__
+
+    scopes = None
+
+    def __init__(self, tree):
+        self.graph = pyassem.PyFlowGraph("<module>", tree.filename)
+        self.futures = future.find_futures(tree)
+        self.__super_init()
+        walk(tree, self)
+
+    def get_module(self):
+        return self
+
+class ExpressionCodeGenerator(NestedScopeMixin, CodeGenerator):
+    __super_init = CodeGenerator.__init__
+
+    scopes = None
+    futures = ()
+
+    def __init__(self, tree):
+        self.graph = pyassem.PyFlowGraph("<expression>", tree.filename)
+        self.__super_init()
+        walk(tree, self)
+
+    def get_module(self):
+        return self
+
+class InteractiveCodeGenerator(NestedScopeMixin, CodeGenerator):
+
+    __super_init = CodeGenerator.__init__
+
+    scopes = None
+    futures = ()
+
+    def __init__(self, tree):
+        self.graph = pyassem.PyFlowGraph("<interactive>", tree.filename)
+        self.__super_init()
+        self.set_lineno(tree)
+        walk(tree, self)
+        self.emit('RETURN_VALUE')
+
+    def get_module(self):
+        return self
+
+    def visitDiscard(self, node):
+        # XXX Discard means it's an expression.  Perhaps this is a bad
+        # name.
+        self.visit(node.expr)
+        self.emit('PRINT_EXPR')
+
+class AbstractFunctionCode:
+    optimized = 1
+    lambdaCount = 0
+
+    def __init__(self, func, scopes, isLambda, class_name, mod):
+        self.class_name = class_name
+        self.module = mod
+        if isLambda:
+            klass = FunctionCodeGenerator
+            name = "<lambda.%d>" % klass.lambdaCount
+            klass.lambdaCount = klass.lambdaCount + 1
+        else:
+            name = func.name
+
+        args, hasTupleArg = generateArgList(func.argnames)
+        self.graph = pyassem.PyFlowGraph(name, func.filename, args,
+                                         optimized=1)
+        self.isLambda = isLambda
+        self.super_init()
+
+        if not isLambda and func.doc:
+            self.setDocstring(func.doc)
+
+        lnf = walk(func.code, self.NameFinder(args), verbose=0)
+        self.locals.push(lnf.getLocals())
+        if func.varargs:
+            self.graph.setFlag(CO_VARARGS)
+        if func.kwargs:
+            self.graph.setFlag(CO_VARKEYWORDS)
+        self.set_lineno(func)
+        if hasTupleArg:
+            self.generateArgUnpack(func.argnames)
+
+    def get_module(self):
+        return self.module
+
+    def finish(self):
+        self.graph.startExitBlock()
+        if not self.isLambda:
+            self.emit('LOAD_CONST', None)
+        self.emit('RETURN_VALUE')
+
+    def generateArgUnpack(self, args):
+        for i in range(len(args)):
+            arg = args[i]
+            if type(arg) == types.TupleType:
+                self.emit('LOAD_FAST', '.%d' % (i * 2))
+                self.unpackSequence(arg)
+
+    def unpackSequence(self, tup):
+        if VERSION > 1:
+            self.emit('UNPACK_SEQUENCE', len(tup))
+        else:
+            self.emit('UNPACK_TUPLE', len(tup))
+        for elt in tup:
+            if type(elt) == types.TupleType:
+                self.unpackSequence(elt)
+            else:
+                self._nameOp('STORE', elt)
+
+    unpackTuple = unpackSequence
+
+class FunctionCodeGenerator(NestedScopeMixin, AbstractFunctionCode,
+                            CodeGenerator):
+    super_init = CodeGenerator.__init__ # call be other init
+    scopes = None
+
+    __super_init = AbstractFunctionCode.__init__
+
+    def __init__(self, func, scopes, isLambda, class_name, mod):
+        self.scopes = scopes
+        self.scope = scopes[func]
+        self.__super_init(func, scopes, isLambda, class_name, mod)
+        self.graph.setFreeVars(self.scope.get_free_vars())
+        self.graph.setCellVars(self.scope.get_cell_vars())
+        if self.scope.generator is not None:
+            self.graph.setFlag(CO_GENERATOR)
+
+class GenExprCodeGenerator(NestedScopeMixin, AbstractFunctionCode,
+                           CodeGenerator):
+    super_init = CodeGenerator.__init__ # call be other init
+    scopes = None
+
+    __super_init = AbstractFunctionCode.__init__
+
+    def __init__(self, gexp, scopes, class_name, mod):
+        self.scopes = scopes
+        self.scope = scopes[gexp]
+        self.__super_init(gexp, scopes, 1, class_name, mod)
+        self.graph.setFreeVars(self.scope.get_free_vars())
+        self.graph.setCellVars(self.scope.get_cell_vars())
+        self.graph.setFlag(CO_GENERATOR)
+
+class AbstractClassCode:
+
+    def __init__(self, klass, scopes, module):
+        self.class_name = klass.name
+        self.module = module
+        self.graph = pyassem.PyFlowGraph(klass.name, klass.filename,
+                                           optimized=0, klass=1)
+        self.super_init()
+        lnf = walk(klass.code, self.NameFinder(), verbose=0)
+        self.locals.push(lnf.getLocals())
+        self.graph.setFlag(CO_NEWLOCALS)
+        if klass.doc:
+            self.setDocstring(klass.doc)
+
+    def get_module(self):
+        return self.module
+
+    def finish(self):
+        self.graph.startExitBlock()
+        self.emit('LOAD_LOCALS')
+        self.emit('RETURN_VALUE')
+
+class ClassCodeGenerator(NestedScopeMixin, AbstractClassCode, CodeGenerator):
+    super_init = CodeGenerator.__init__
+    scopes = None
+
+    __super_init = AbstractClassCode.__init__
+
+    def __init__(self, klass, scopes, module):
+        self.scopes = scopes
+        self.scope = scopes[klass]
+        self.__super_init(klass, scopes, module)
+        self.graph.setFreeVars(self.scope.get_free_vars())
+        self.graph.setCellVars(self.scope.get_cell_vars())
+        self.set_lineno(klass)
+        self.emit("LOAD_GLOBAL", "__name__")
+        self.storeName("__module__")
+        if klass.doc:
+            self.emit("LOAD_CONST", klass.doc)
+            self.storeName('__doc__')
+
+def generateArgList(arglist):
+    """Generate an arg list marking TupleArgs"""
+    args = []
+    extra = []
+    count = 0
+    for i in range(len(arglist)):
+        elt = arglist[i]
+        if type(elt) == types.StringType:
+            args.append(elt)
+        elif type(elt) == types.TupleType:
+            args.append(TupleArg(i * 2, elt))
+            extra.extend(misc.flatten(elt))
+            count = count + 1
+        else:
+            raise ValueError, "unexpect argument type:", elt
+    return args + extra, count
+
+def findOp(node):
+    """Find the op (DELETE, LOAD, STORE) in an AssTuple tree"""
+    v = OpFinder()
+    walk(node, v, verbose=0)
+    return v.op
+
+class OpFinder:
+    def __init__(self):
+        self.op = None
+    def visitAssName(self, node):
+        if self.op is None:
+            self.op = node.flags
+        elif self.op != node.flags:
+            raise ValueError, "mixed ops in stmt"
+    visitAssAttr = visitAssName
+    visitSubscript = visitAssName
+
+class Delegator:
+    """Base class to support delegation for augmented assignment nodes
+
+    To generator code for augmented assignments, we use the following
+    wrapper classes.  In visitAugAssign, the left-hand expression node
+    is visited twice.  The first time the visit uses the normal method
+    for that node .  The second time the visit uses a different method
+    that generates the appropriate code to perform the assignment.
+    These delegator classes wrap the original AST nodes in order to
+    support the variant visit methods.
+    """
+    def __init__(self, obj):
+        self.obj = obj
+
+    def __getattr__(self, attr):
+        return getattr(self.obj, attr)
+
+class AugGetattr(Delegator):
+    pass
+
+class AugName(Delegator):
+    pass
+
+class AugSlice(Delegator):
+    pass
+
+class AugSubscript(Delegator):
+    pass
+
+wrapper = {
+    ast.Getattr: AugGetattr,
+    ast.Name: AugName,
+    ast.Slice: AugSlice,
+    ast.Subscript: AugSubscript,
+    }
+
+def wrap_aug(node):
+    return wrapper[node.__class__](node)
+
+if __name__ == "__main__":
+    for file in sys.argv[1:]:
+        compileFile(file)
diff --git a/depot_tools/release/win/python_24/Lib/compiler/symbols.py b/depot_tools/release/win/python_24/Lib/compiler/symbols.py
new file mode 100644
index 0000000..9d4605a8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/symbols.py
@@ -0,0 +1,468 @@
+"""Module symbol-table generator"""
+
+from compiler import ast
+from compiler.consts import SC_LOCAL, SC_GLOBAL, SC_FREE, SC_CELL, SC_UNKNOWN
+from compiler.misc import mangle
+import types
+
+
+import sys
+
+MANGLE_LEN = 256
+
+class Scope:
+    # XXX how much information do I need about each name?
+    def __init__(self, name, module, klass=None):
+        self.name = name
+        self.module = module
+        self.defs = {}
+        self.uses = {}
+        self.globals = {}
+        self.params = {}
+        self.frees = {}
+        self.cells = {}
+        self.children = []
+        # nested is true if the class could contain free variables,
+        # i.e. if it is nested within another function.
+        self.nested = None
+        self.generator = None
+        self.klass = None
+        if klass is not None:
+            for i in range(len(klass)):
+                if klass[i] != '_':
+                    self.klass = klass[i:]
+                    break
+
+    def __repr__(self):
+        return "<%s: %s>" % (self.__class__.__name__, self.name)
+
+    def mangle(self, name):
+        if self.klass is None:
+            return name
+        return mangle(name, self.klass)
+
+    def add_def(self, name):
+        self.defs[self.mangle(name)] = 1
+
+    def add_use(self, name):
+        self.uses[self.mangle(name)] = 1
+
+    def add_global(self, name):
+        name = self.mangle(name)
+        if self.uses.has_key(name) or self.defs.has_key(name):
+            pass # XXX warn about global following def/use
+        if self.params.has_key(name):
+            raise SyntaxError, "%s in %s is global and parameter" % \
+                  (name, self.name)
+        self.globals[name] = 1
+        self.module.add_def(name)
+
+    def add_param(self, name):
+        name = self.mangle(name)
+        self.defs[name] = 1
+        self.params[name] = 1
+
+    def get_names(self):
+        d = {}
+        d.update(self.defs)
+        d.update(self.uses)
+        d.update(self.globals)
+        return d.keys()
+
+    def add_child(self, child):
+        self.children.append(child)
+
+    def get_children(self):
+        return self.children
+
+    def DEBUG(self):
+        print >> sys.stderr, self.name, self.nested and "nested" or ""
+        print >> sys.stderr, "\tglobals: ", self.globals
+        print >> sys.stderr, "\tcells: ", self.cells
+        print >> sys.stderr, "\tdefs: ", self.defs
+        print >> sys.stderr, "\tuses: ", self.uses
+        print >> sys.stderr, "\tfrees:", self.frees
+
+    def check_name(self, name):
+        """Return scope of name.
+
+        The scope of a name could be LOCAL, GLOBAL, FREE, or CELL.
+        """
+        if self.globals.has_key(name):
+            return SC_GLOBAL
+        if self.cells.has_key(name):
+            return SC_CELL
+        if self.defs.has_key(name):
+            return SC_LOCAL
+        if self.nested and (self.frees.has_key(name) or
+                            self.uses.has_key(name)):
+            return SC_FREE
+        if self.nested:
+            return SC_UNKNOWN
+        else:
+            return SC_GLOBAL
+
+    def get_free_vars(self):
+        if not self.nested:
+            return ()
+        free = {}
+        free.update(self.frees)
+        for name in self.uses.keys():
+            if not (self.defs.has_key(name) or
+                    self.globals.has_key(name)):
+                free[name] = 1
+        return free.keys()
+
+    def handle_children(self):
+        for child in self.children:
+            frees = child.get_free_vars()
+            globals = self.add_frees(frees)
+            for name in globals:
+                child.force_global(name)
+
+    def force_global(self, name):
+        """Force name to be global in scope.
+
+        Some child of the current node had a free reference to name.
+        When the child was processed, it was labelled a free
+        variable.  Now that all its enclosing scope have been
+        processed, the name is known to be a global or builtin.  So
+        walk back down the child chain and set the name to be global
+        rather than free.
+
+        Be careful to stop if a child does not think the name is
+        free.
+        """
+        self.globals[name] = 1
+        if self.frees.has_key(name):
+            del self.frees[name]
+        for child in self.children:
+            if child.check_name(name) == SC_FREE:
+                child.force_global(name)
+
+    def add_frees(self, names):
+        """Process list of free vars from nested scope.
+
+        Returns a list of names that are either 1) declared global in the
+        parent or 2) undefined in a top-level parent.  In either case,
+        the nested scope should treat them as globals.
+        """
+        child_globals = []
+        for name in names:
+            sc = self.check_name(name)
+            if self.nested:
+                if sc == SC_UNKNOWN or sc == SC_FREE \
+                   or isinstance(self, ClassScope):
+                    self.frees[name] = 1
+                elif sc == SC_GLOBAL:
+                    child_globals.append(name)
+                elif isinstance(self, FunctionScope) and sc == SC_LOCAL:
+                    self.cells[name] = 1
+                elif sc != SC_CELL:
+                    child_globals.append(name)
+            else:
+                if sc == SC_LOCAL:
+                    self.cells[name] = 1
+                elif sc != SC_CELL:
+                    child_globals.append(name)
+        return child_globals
+
+    def get_cell_vars(self):
+        return self.cells.keys()
+
+class ModuleScope(Scope):
+    __super_init = Scope.__init__
+
+    def __init__(self):
+        self.__super_init("global", self)
+
+class FunctionScope(Scope):
+    pass
+
+class GenExprScope(Scope):
+    __super_init = Scope.__init__
+
+    __counter = 1
+
+    def __init__(self, module, klass=None):
+        i = self.__counter
+        self.__counter += 1
+        self.__super_init("generator expression<%d>"%i, module, klass)
+        self.add_param('[outmost-iterable]')
+
+    def get_names(self):
+        keys = Scope.get_names()
+        return keys
+
+class LambdaScope(FunctionScope):
+    __super_init = Scope.__init__
+
+    __counter = 1
+
+    def __init__(self, module, klass=None):
+        i = self.__counter
+        self.__counter += 1
+        self.__super_init("lambda.%d" % i, module, klass)
+
+class ClassScope(Scope):
+    __super_init = Scope.__init__
+
+    def __init__(self, name, module):
+        self.__super_init(name, module, name)
+
+class SymbolVisitor:
+    def __init__(self):
+        self.scopes = {}
+        self.klass = None
+
+    # node that define new scopes
+
+    def visitModule(self, node):
+        scope = self.module = self.scopes[node] = ModuleScope()
+        self.visit(node.node, scope)
+
+    visitExpression = visitModule
+
+    def visitFunction(self, node, parent):
+        if node.decorators:
+            self.visit(node.decorators, parent)
+        parent.add_def(node.name)
+        for n in node.defaults:
+            self.visit(n, parent)
+        scope = FunctionScope(node.name, self.module, self.klass)
+        if parent.nested or isinstance(parent, FunctionScope):
+            scope.nested = 1
+        self.scopes[node] = scope
+        self._do_args(scope, node.argnames)
+        self.visit(node.code, scope)
+        self.handle_free_vars(scope, parent)
+
+    def visitGenExpr(self, node, parent):
+        scope = GenExprScope(self.module, self.klass);
+        if parent.nested or isinstance(parent, FunctionScope) \
+                or isinstance(parent, GenExprScope):
+            scope.nested = 1
+
+        self.scopes[node] = scope
+        self.visit(node.code, scope)
+
+        self.handle_free_vars(scope, parent)
+
+    def visitGenExprInner(self, node, scope):
+        for genfor in node.quals:
+            self.visit(genfor, scope)
+
+        self.visit(node.expr, scope)
+
+    def visitGenExprFor(self, node, scope):
+        self.visit(node.assign, scope, 1)
+        self.visit(node.iter, scope)
+        for if_ in node.ifs:
+            self.visit(if_, scope)
+
+    def visitGenExprIf(self, node, scope):
+        self.visit(node.test, scope)
+
+    def visitLambda(self, node, parent, assign=0):
+        # Lambda is an expression, so it could appear in an expression
+        # context where assign is passed.  The transformer should catch
+        # any code that has a lambda on the left-hand side.
+        assert not assign
+
+        for n in node.defaults:
+            self.visit(n, parent)
+        scope = LambdaScope(self.module, self.klass)
+        if parent.nested or isinstance(parent, FunctionScope):
+            scope.nested = 1
+        self.scopes[node] = scope
+        self._do_args(scope, node.argnames)
+        self.visit(node.code, scope)
+        self.handle_free_vars(scope, parent)
+
+    def _do_args(self, scope, args):
+        for name in args:
+            if type(name) == types.TupleType:
+                self._do_args(scope, name)
+            else:
+                scope.add_param(name)
+
+    def handle_free_vars(self, scope, parent):
+        parent.add_child(scope)
+        scope.handle_children()
+
+    def visitClass(self, node, parent):
+        parent.add_def(node.name)
+        for n in node.bases:
+            self.visit(n, parent)
+        scope = ClassScope(node.name, self.module)
+        if parent.nested or isinstance(parent, FunctionScope):
+            scope.nested = 1
+        if node.doc is not None:
+            scope.add_def('__doc__')
+        scope.add_def('__module__')
+        self.scopes[node] = scope
+        prev = self.klass
+        self.klass = node.name
+        self.visit(node.code, scope)
+        self.klass = prev
+        self.handle_free_vars(scope, parent)
+
+    # name can be a def or a use
+
+    # XXX a few calls and nodes expect a third "assign" arg that is
+    # true if the name is being used as an assignment.  only
+    # expressions contained within statements may have the assign arg.
+
+    def visitName(self, node, scope, assign=0):
+        if assign:
+            scope.add_def(node.name)
+        else:
+            scope.add_use(node.name)
+
+    # operations that bind new names
+
+    def visitFor(self, node, scope):
+        self.visit(node.assign, scope, 1)
+        self.visit(node.list, scope)
+        self.visit(node.body, scope)
+        if node.else_:
+            self.visit(node.else_, scope)
+
+    def visitFrom(self, node, scope):
+        for name, asname in node.names:
+            if name == "*":
+                continue
+            scope.add_def(asname or name)
+
+    def visitImport(self, node, scope):
+        for name, asname in node.names:
+            i = name.find(".")
+            if i > -1:
+                name = name[:i]
+            scope.add_def(asname or name)
+
+    def visitGlobal(self, node, scope):
+        for name in node.names:
+            scope.add_global(name)
+
+    def visitAssign(self, node, scope):
+        """Propagate assignment flag down to child nodes.
+
+        The Assign node doesn't itself contains the variables being
+        assigned to.  Instead, the children in node.nodes are visited
+        with the assign flag set to true.  When the names occur in
+        those nodes, they are marked as defs.
+
+        Some names that occur in an assignment target are not bound by
+        the assignment, e.g. a name occurring inside a slice.  The
+        visitor handles these nodes specially; they do not propagate
+        the assign flag to their children.
+        """
+        for n in node.nodes:
+            self.visit(n, scope, 1)
+        self.visit(node.expr, scope)
+
+    def visitAssName(self, node, scope, assign=1):
+        scope.add_def(node.name)
+
+    def visitAssAttr(self, node, scope, assign=0):
+        self.visit(node.expr, scope, 0)
+
+    def visitSubscript(self, node, scope, assign=0):
+        self.visit(node.expr, scope, 0)
+        for n in node.subs:
+            self.visit(n, scope, 0)
+
+    def visitSlice(self, node, scope, assign=0):
+        self.visit(node.expr, scope, 0)
+        if node.lower:
+            self.visit(node.lower, scope, 0)
+        if node.upper:
+            self.visit(node.upper, scope, 0)
+
+    def visitAugAssign(self, node, scope):
+        # If the LHS is a name, then this counts as assignment.
+        # Otherwise, it's just use.
+        self.visit(node.node, scope)
+        if isinstance(node.node, ast.Name):
+            self.visit(node.node, scope, 1) # XXX worry about this
+        self.visit(node.expr, scope)
+
+    # prune if statements if tests are false
+
+    _const_types = types.StringType, types.IntType, types.FloatType
+
+    def visitIf(self, node, scope):
+        for test, body in node.tests:
+            if isinstance(test, ast.Const):
+                if type(test.value) in self._const_types:
+                    if not test.value:
+                        continue
+            self.visit(test, scope)
+            self.visit(body, scope)
+        if node.else_:
+            self.visit(node.else_, scope)
+
+    # a yield statement signals a generator
+
+    def visitYield(self, node, scope):
+        scope.generator = 1
+        self.visit(node.value, scope)
+
+def sort(l):
+    l = l[:]
+    l.sort()
+    return l
+
+def list_eq(l1, l2):
+    return sort(l1) == sort(l2)
+
+if __name__ == "__main__":
+    import sys
+    from compiler import parseFile, walk
+    import symtable
+
+    def get_names(syms):
+        return [s for s in [s.get_name() for s in syms.get_symbols()]
+                if not (s.startswith('_[') or s.startswith('.'))]
+
+    for file in sys.argv[1:]:
+        print file
+        f = open(file)
+        buf = f.read()
+        f.close()
+        syms = symtable.symtable(buf, file, "exec")
+        mod_names = get_names(syms)
+        tree = parseFile(file)
+        s = SymbolVisitor()
+        walk(tree, s)
+
+        # compare module-level symbols
+        names2 = s.scopes[tree].get_names()
+
+        if not list_eq(mod_names, names2):
+            print
+            print "oops", file
+            print sort(mod_names)
+            print sort(names2)
+            sys.exit(-1)
+
+        d = {}
+        d.update(s.scopes)
+        del d[tree]
+        scopes = d.values()
+        del d
+
+        for s in syms.get_symbols():
+            if s.is_namespace():
+                l = [sc for sc in scopes
+                     if sc.name == s.get_name()]
+                if len(l) > 1:
+                    print "skipping", s.get_name()
+                else:
+                    if not list_eq(get_names(s.get_namespace()),
+                                   l[0].get_names()):
+                        print s.get_name()
+                        print sort(get_names(s.get_namespace()))
+                        print sort(l[0].get_names())
+                        sys.exit(-1)
diff --git a/depot_tools/release/win/python_24/Lib/compiler/syntax.py b/depot_tools/release/win/python_24/Lib/compiler/syntax.py
new file mode 100644
index 0000000..88c1453a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/syntax.py
@@ -0,0 +1,46 @@
+"""Check for errs in the AST.
+
+The Python parser does not catch all syntax errors.  Others, like
+assignments with invalid targets, are caught in the code generation
+phase.
+
+The compiler package catches some errors in the transformer module.
+But it seems clearer to write checkers that use the AST to detect
+errors.
+"""
+
+from compiler import ast, walk
+
+def check(tree, multi=None):
+    v = SyntaxErrorChecker(multi)
+    walk(tree, v)
+    return v.errors
+
+class SyntaxErrorChecker:
+    """A visitor to find syntax errors in the AST."""
+
+    def __init__(self, multi=None):
+        """Create new visitor object.
+
+        If optional argument multi is not None, then print messages
+        for each error rather than raising a SyntaxError for the
+        first.
+        """
+        self.multi = multi
+        self.errors = 0
+
+    def error(self, node, msg):
+        self.errors = self.errors + 1
+        if self.multi is not None:
+            print "%s:%s: %s" % (node.filename, node.lineno, msg)
+        else:
+            raise SyntaxError, "%s (%s:%s)" % (msg, node.filename, node.lineno)
+
+    def visitAssign(self, node):
+        # the transformer module handles many of these
+        for target in node.nodes:
+            pass
+##            if isinstance(target, ast.AssList):
+##                if target.lineno is None:
+##                    target.lineno = node.lineno
+##                self.error(target, "can't assign to list comprehension")
diff --git a/depot_tools/release/win/python_24/Lib/compiler/transformer.py b/depot_tools/release/win/python_24/Lib/compiler/transformer.py
new file mode 100644
index 0000000..0c6d148
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/transformer.py
@@ -0,0 +1,1434 @@
+"""Parse tree transformation module.
+
+Transforms Python source code into an abstract syntax tree (AST)
+defined in the ast module.
+
+The simplest ways to invoke this module are via parse and parseFile.
+parse(buf) -> AST
+parseFile(path) -> AST
+"""
+
+# Original version written by Greg Stein (gstein@lyra.org)
+#                         and Bill Tutt (rassilon@lima.mudlib.org)
+# February 1997.
+#
+# Modifications and improvements for Python 2.0 by Jeremy Hylton and
+# Mark Hammond
+#
+# Some fixes to try to have correct line number on almost all nodes
+# (except Module, Discard and Stmt) added by Sylvain Thenault
+#
+# Portions of this file are:
+# Copyright (C) 1997-1998 Greg Stein. All Rights Reserved.
+#
+# This module is provided under a BSD-ish license. See
+#   http://www.opensource.org/licenses/bsd-license.html
+# and replace OWNER, ORGANIZATION, and YEAR as appropriate.
+
+from compiler.ast import *
+import parser
+import symbol
+import token
+import sys
+
+class WalkerError(StandardError):
+    pass
+
+from consts import CO_VARARGS, CO_VARKEYWORDS
+from consts import OP_ASSIGN, OP_DELETE, OP_APPLY
+
+def parseFile(path):
+    f = open(path, "U")
+    # XXX The parser API tolerates files without a trailing newline,
+    # but not strings without a trailing newline.  Always add an extra
+    # newline to the file contents, since we're going through the string
+    # version of the API.
+    src = f.read() + "\n"
+    f.close()
+    return parse(src)
+
+def parse(buf, mode="exec"):
+    if mode == "exec" or mode == "single":
+        return Transformer().parsesuite(buf)
+    elif mode == "eval":
+        return Transformer().parseexpr(buf)
+    else:
+        raise ValueError("compile() arg 3 must be"
+                         " 'exec' or 'eval' or 'single'")
+
+def asList(nodes):
+    l = []
+    for item in nodes:
+        if hasattr(item, "asList"):
+            l.append(item.asList())
+        else:
+            if type(item) is type( (None, None) ):
+                l.append(tuple(asList(item)))
+            elif type(item) is type( [] ):
+                l.append(asList(item))
+            else:
+                l.append(item)
+    return l
+
+def extractLineNo(ast):
+    if not isinstance(ast[1], tuple):
+        # get a terminal node
+        return ast[2]
+    for child in ast[1:]:
+        if isinstance(child, tuple):
+            lineno = extractLineNo(child)
+            if lineno is not None:
+                return lineno
+
+def Node(*args):
+    kind = args[0]
+    if nodes.has_key(kind):
+        try:
+            return nodes[kind](*args[1:])
+        except TypeError:
+            print nodes[kind], len(args), args
+            raise
+    else:
+        raise WalkerEror, "Can't find appropriate Node type: %s" % str(args)
+        #return apply(ast.Node, args)
+
+class Transformer:
+    """Utility object for transforming Python parse trees.
+
+    Exposes the following methods:
+        tree = transform(ast_tree)
+        tree = parsesuite(text)
+        tree = parseexpr(text)
+        tree = parsefile(fileob | filename)
+    """
+
+    def __init__(self):
+        self._dispatch = {}
+        for value, name in symbol.sym_name.items():
+            if hasattr(self, name):
+                self._dispatch[value] = getattr(self, name)
+        self._dispatch[token.NEWLINE] = self.com_NEWLINE
+        self._atom_dispatch = {token.LPAR: self.atom_lpar,
+                               token.LSQB: self.atom_lsqb,
+                               token.LBRACE: self.atom_lbrace,
+                               token.BACKQUOTE: self.atom_backquote,
+                               token.NUMBER: self.atom_number,
+                               token.STRING: self.atom_string,
+                               token.NAME: self.atom_name,
+                               }
+        self.encoding = None
+
+    def transform(self, tree):
+        """Transform an AST into a modified parse tree."""
+        if not (isinstance(tree, tuple) or isinstance(tree, list)):
+            tree = parser.ast2tuple(tree, line_info=1)
+        return self.compile_node(tree)
+
+    def parsesuite(self, text):
+        """Return a modified parse tree for the given suite text."""
+        return self.transform(parser.suite(text))
+
+    def parseexpr(self, text):
+        """Return a modified parse tree for the given expression text."""
+        return self.transform(parser.expr(text))
+
+    def parsefile(self, file):
+        """Return a modified parse tree for the contents of the given file."""
+        if type(file) == type(''):
+            file = open(file)
+        return self.parsesuite(file.read())
+
+    # --------------------------------------------------------------
+    #
+    # PRIVATE METHODS
+    #
+
+    def compile_node(self, node):
+        ### emit a line-number node?
+        n = node[0]
+
+        if n == symbol.encoding_decl:
+            self.encoding = node[2]
+            node = node[1]
+            n = node[0]
+
+        if n == symbol.single_input:
+            return self.single_input(node[1:])
+        if n == symbol.file_input:
+            return self.file_input(node[1:])
+        if n == symbol.eval_input:
+            return self.eval_input(node[1:])
+        if n == symbol.lambdef:
+            return self.lambdef(node[1:])
+        if n == symbol.funcdef:
+            return self.funcdef(node[1:])
+        if n == symbol.classdef:
+            return self.classdef(node[1:])
+
+        raise WalkerEror, ('unexpected node type', n)
+
+    def single_input(self, node):
+        ### do we want to do anything about being "interactive" ?
+
+        # NEWLINE | simple_stmt | compound_stmt NEWLINE
+        n = node[0][0]
+        if n != token.NEWLINE:
+            return self.com_stmt(node[0])
+
+        return Pass()
+
+    def file_input(self, nodelist):
+        doc = self.get_docstring(nodelist, symbol.file_input)
+        if doc is not None:
+            i = 1
+        else:
+            i = 0
+        stmts = []
+        for node in nodelist[i:]:
+            if node[0] != token.ENDMARKER and node[0] != token.NEWLINE:
+                self.com_append_stmt(stmts, node)
+        return Module(doc, Stmt(stmts))
+
+    def eval_input(self, nodelist):
+        # from the built-in function input()
+        ### is this sufficient?
+        return Expression(self.com_node(nodelist[0]))
+
+    def decorator_name(self, nodelist):
+        listlen = len(nodelist)
+        assert listlen >= 1 and listlen % 2 == 1
+
+        item = self.atom_name(nodelist)
+        i = 1
+        while i < listlen:
+            assert nodelist[i][0] == token.DOT
+            assert nodelist[i + 1][0] == token.NAME
+            item = Getattr(item, nodelist[i + 1][1])
+            i += 2
+
+        return item
+
+    def decorator(self, nodelist):
+        # '@' dotted_name [ '(' [arglist] ')' ]
+        assert len(nodelist) in (3, 5, 6)
+        assert nodelist[0][0] == token.AT
+        assert nodelist[-1][0] == token.NEWLINE
+
+        assert nodelist[1][0] == symbol.dotted_name
+        funcname = self.decorator_name(nodelist[1][1:])
+
+        if len(nodelist) > 3:
+            assert nodelist[2][0] == token.LPAR
+            expr = self.com_call_function(funcname, nodelist[3])
+        else:
+            expr = funcname
+
+        return expr
+
+    def decorators(self, nodelist):
+        # decorators: decorator ([NEWLINE] decorator)* NEWLINE
+        items = []
+        for dec_nodelist in nodelist:
+            assert dec_nodelist[0] == symbol.decorator
+            items.append(self.decorator(dec_nodelist[1:]))
+        return Decorators(items)
+
+    def funcdef(self, nodelist):
+        #                    -6   -5    -4         -3  -2    -1
+        # funcdef: [decorators] 'def' NAME parameters ':' suite
+        # parameters: '(' [varargslist] ')'
+
+        if len(nodelist) == 6:
+            assert nodelist[0][0] == symbol.decorators
+            decorators = self.decorators(nodelist[0][1:])
+        else:
+            assert len(nodelist) == 5
+            decorators = None
+
+        lineno = nodelist[-4][2]
+        name = nodelist[-4][1]
+        args = nodelist[-3][2]
+
+        if args[0] == symbol.varargslist:
+            names, defaults, flags = self.com_arglist(args[1:])
+        else:
+            names = defaults = ()
+            flags = 0
+        doc = self.get_docstring(nodelist[-1])
+
+        # code for function
+        code = self.com_node(nodelist[-1])
+
+        if doc is not None:
+            assert isinstance(code, Stmt)
+            assert isinstance(code.nodes[0], Discard)
+            del code.nodes[0]
+        return Function(decorators, name, names, defaults, flags, doc, code,
+                     lineno=lineno)
+
+    def lambdef(self, nodelist):
+        # lambdef: 'lambda' [varargslist] ':' test
+        if nodelist[2][0] == symbol.varargslist:
+            names, defaults, flags = self.com_arglist(nodelist[2][1:])
+        else:
+            names = defaults = ()
+            flags = 0
+
+        # code for lambda
+        code = self.com_node(nodelist[-1])
+
+        return Lambda(names, defaults, flags, code, lineno=nodelist[1][2])
+
+    def classdef(self, nodelist):
+        # classdef: 'class' NAME ['(' testlist ')'] ':' suite
+
+        name = nodelist[1][1]
+        doc = self.get_docstring(nodelist[-1])
+        if nodelist[2][0] == token.COLON:
+            bases = []
+        else:
+            bases = self.com_bases(nodelist[3])
+
+        # code for class
+        code = self.com_node(nodelist[-1])
+
+        if doc is not None:
+            assert isinstance(code, Stmt)
+            assert isinstance(code.nodes[0], Discard)
+            del code.nodes[0]
+
+        return Class(name, bases, doc, code, lineno=nodelist[1][2])
+
+    def stmt(self, nodelist):
+        return self.com_stmt(nodelist[0])
+
+    small_stmt = stmt
+    flow_stmt = stmt
+    compound_stmt = stmt
+
+    def simple_stmt(self, nodelist):
+        # small_stmt (';' small_stmt)* [';'] NEWLINE
+        stmts = []
+        for i in range(0, len(nodelist), 2):
+            self.com_append_stmt(stmts, nodelist[i])
+        return Stmt(stmts)
+
+    def parameters(self, nodelist):
+        raise WalkerEror
+
+    def varargslist(self, nodelist):
+        raise WalkerEror
+
+    def fpdef(self, nodelist):
+        raise WalkerEror
+
+    def fplist(self, nodelist):
+        raise WalkerEror
+
+    def dotted_name(self, nodelist):
+        raise WalkerEror
+
+    def comp_op(self, nodelist):
+        raise WalkerEror
+
+    def trailer(self, nodelist):
+        raise WalkerEror
+
+    def sliceop(self, nodelist):
+        raise WalkerEror
+
+    def argument(self, nodelist):
+        raise WalkerEror
+
+    # --------------------------------------------------------------
+    #
+    # STATEMENT NODES  (invoked by com_node())
+    #
+
+    def expr_stmt(self, nodelist):
+        # augassign testlist | testlist ('=' testlist)*
+        en = nodelist[-1]
+        exprNode = self.lookup_node(en)(en[1:])
+        if len(nodelist) == 1:
+            return Discard(exprNode, lineno=exprNode.lineno)
+        if nodelist[1][0] == token.EQUAL:
+            nodesl = []
+            for i in range(0, len(nodelist) - 2, 2):
+                nodesl.append(self.com_assign(nodelist[i], OP_ASSIGN))
+            return Assign(nodesl, exprNode, lineno=nodelist[1][2])
+        else:
+            lval = self.com_augassign(nodelist[0])
+            op = self.com_augassign_op(nodelist[1])
+            return AugAssign(lval, op[1], exprNode, lineno=op[2])
+        raise WalkerError, "can't get here"
+
+    def print_stmt(self, nodelist):
+        # print ([ test (',' test)* [','] ] | '>>' test [ (',' test)+ [','] ])
+        items = []
+        if len(nodelist) == 1:
+            start = 1
+            dest = None
+        elif nodelist[1][0] == token.RIGHTSHIFT:
+            assert len(nodelist) == 3 \
+                   or nodelist[3][0] == token.COMMA
+            dest = self.com_node(nodelist[2])
+            start = 4
+        else:
+            dest = None
+            start = 1
+        for i in range(start, len(nodelist), 2):
+            items.append(self.com_node(nodelist[i]))
+        if nodelist[-1][0] == token.COMMA:
+            return Print(items, dest, lineno=nodelist[0][2])
+        return Printnl(items, dest, lineno=nodelist[0][2])
+
+    def del_stmt(self, nodelist):
+        return self.com_assign(nodelist[1], OP_DELETE)
+
+    def pass_stmt(self, nodelist):
+        return Pass(lineno=nodelist[0][2])
+
+    def break_stmt(self, nodelist):
+        return Break(lineno=nodelist[0][2])
+
+    def continue_stmt(self, nodelist):
+        return Continue(lineno=nodelist[0][2])
+
+    def return_stmt(self, nodelist):
+        # return: [testlist]
+        if len(nodelist) < 2:
+            return Return(Const(None), lineno=nodelist[0][2])
+        return Return(self.com_node(nodelist[1]), lineno=nodelist[0][2])
+
+    def yield_stmt(self, nodelist):
+        return Yield(self.com_node(nodelist[1]), lineno=nodelist[0][2])
+
+    def raise_stmt(self, nodelist):
+        # raise: [test [',' test [',' test]]]
+        if len(nodelist) > 5:
+            expr3 = self.com_node(nodelist[5])
+        else:
+            expr3 = None
+        if len(nodelist) > 3:
+            expr2 = self.com_node(nodelist[3])
+        else:
+            expr2 = None
+        if len(nodelist) > 1:
+            expr1 = self.com_node(nodelist[1])
+        else:
+            expr1 = None
+        return Raise(expr1, expr2, expr3, lineno=nodelist[0][2])
+
+    def import_stmt(self, nodelist):
+        # import_stmt: import_name | import_from
+        assert len(nodelist) == 1
+        return self.com_node(nodelist[0])
+
+    def import_name(self, nodelist):
+        # import_name: 'import' dotted_as_names
+        return Import(self.com_dotted_as_names(nodelist[1]),
+                      lineno=nodelist[0][2])
+
+    def import_from(self, nodelist):
+        # import_from: 'from' dotted_name 'import' ('*' |
+        #    '(' import_as_names ')' | import_as_names)
+        assert nodelist[0][1] == 'from'
+        assert nodelist[1][0] == symbol.dotted_name
+        assert nodelist[2][1] == 'import'
+        fromname = self.com_dotted_name(nodelist[1])
+        if nodelist[3][0] == token.STAR:
+            return From(fromname, [('*', None)],
+                        lineno=nodelist[0][2])
+        else:
+            node = nodelist[3 + (nodelist[3][0] == token.LPAR)]
+            return From(fromname, self.com_import_as_names(node),
+                        lineno=nodelist[0][2])
+
+    def global_stmt(self, nodelist):
+        # global: NAME (',' NAME)*
+        names = []
+        for i in range(1, len(nodelist), 2):
+            names.append(nodelist[i][1])
+        return Global(names, lineno=nodelist[0][2])
+
+    def exec_stmt(self, nodelist):
+        # exec_stmt: 'exec' expr ['in' expr [',' expr]]
+        expr1 = self.com_node(nodelist[1])
+        if len(nodelist) >= 4:
+            expr2 = self.com_node(nodelist[3])
+            if len(nodelist) >= 6:
+                expr3 = self.com_node(nodelist[5])
+            else:
+                expr3 = None
+        else:
+            expr2 = expr3 = None
+
+        return Exec(expr1, expr2, expr3, lineno=nodelist[0][2])
+
+    def assert_stmt(self, nodelist):
+        # 'assert': test, [',' test]
+        expr1 = self.com_node(nodelist[1])
+        if (len(nodelist) == 4):
+            expr2 = self.com_node(nodelist[3])
+        else:
+            expr2 = None
+        return Assert(expr1, expr2, lineno=nodelist[0][2])
+
+    def if_stmt(self, nodelist):
+        # if: test ':' suite ('elif' test ':' suite)* ['else' ':' suite]
+        tests = []
+        for i in range(0, len(nodelist) - 3, 4):
+            testNode = self.com_node(nodelist[i + 1])
+            suiteNode = self.com_node(nodelist[i + 3])
+            tests.append((testNode, suiteNode))
+
+        if len(nodelist) % 4 == 3:
+            elseNode = self.com_node(nodelist[-1])
+##      elseNode.lineno = nodelist[-1][1][2]
+        else:
+            elseNode = None
+        return If(tests, elseNode, lineno=nodelist[0][2])
+
+    def while_stmt(self, nodelist):
+        # 'while' test ':' suite ['else' ':' suite]
+
+        testNode = self.com_node(nodelist[1])
+        bodyNode = self.com_node(nodelist[3])
+
+        if len(nodelist) > 4:
+            elseNode = self.com_node(nodelist[6])
+        else:
+            elseNode = None
+
+        return While(testNode, bodyNode, elseNode, lineno=nodelist[0][2])
+
+    def for_stmt(self, nodelist):
+        # 'for' exprlist 'in' exprlist ':' suite ['else' ':' suite]
+
+        assignNode = self.com_assign(nodelist[1], OP_ASSIGN)
+        listNode = self.com_node(nodelist[3])
+        bodyNode = self.com_node(nodelist[5])
+
+        if len(nodelist) > 8:
+            elseNode = self.com_node(nodelist[8])
+        else:
+            elseNode = None
+
+        return For(assignNode, listNode, bodyNode, elseNode,
+                   lineno=nodelist[0][2])
+
+    def try_stmt(self, nodelist):
+        # 'try' ':' suite (except_clause ':' suite)+ ['else' ':' suite]
+        # | 'try' ':' suite 'finally' ':' suite
+        if nodelist[3][0] != symbol.except_clause:
+            return self.com_try_finally(nodelist)
+
+        return self.com_try_except(nodelist)
+
+    def suite(self, nodelist):
+        # simple_stmt | NEWLINE INDENT NEWLINE* (stmt NEWLINE*)+ DEDENT
+        if len(nodelist) == 1:
+            return self.com_stmt(nodelist[0])
+
+        stmts = []
+        for node in nodelist:
+            if node[0] == symbol.stmt:
+                self.com_append_stmt(stmts, node)
+        return Stmt(stmts)
+
+    # --------------------------------------------------------------
+    #
+    # EXPRESSION NODES  (invoked by com_node())
+    #
+
+    def testlist(self, nodelist):
+        # testlist: expr (',' expr)* [',']
+        # testlist_safe: test [(',' test)+ [',']]
+        # exprlist: expr (',' expr)* [',']
+        return self.com_binary(Tuple, nodelist)
+
+    testlist_safe = testlist # XXX
+    testlist1 = testlist
+    exprlist = testlist
+
+    def testlist_gexp(self, nodelist):
+        if len(nodelist) == 2 and nodelist[1][0] == symbol.gen_for:
+            test = self.com_node(nodelist[0])
+            return self.com_generator_expression(test, nodelist[1])
+        return self.testlist(nodelist)
+
+    def test(self, nodelist):
+        # and_test ('or' and_test)* | lambdef
+        if len(nodelist) == 1 and nodelist[0][0] == symbol.lambdef:
+            return self.lambdef(nodelist[0])
+        return self.com_binary(Or, nodelist)
+
+    def and_test(self, nodelist):
+        # not_test ('and' not_test)*
+        return self.com_binary(And, nodelist)
+
+    def not_test(self, nodelist):
+        # 'not' not_test | comparison
+        result = self.com_node(nodelist[-1])
+        if len(nodelist) == 2:
+            return Not(result, lineno=nodelist[0][2])
+        return result
+
+    def comparison(self, nodelist):
+        # comparison: expr (comp_op expr)*
+        node = self.com_node(nodelist[0])
+        if len(nodelist) == 1:
+            return node
+
+        results = []
+        for i in range(2, len(nodelist), 2):
+            nl = nodelist[i-1]
+
+            # comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
+            #          | 'in' | 'not' 'in' | 'is' | 'is' 'not'
+            n = nl[1]
+            if n[0] == token.NAME:
+                type = n[1]
+                if len(nl) == 3:
+                    if type == 'not':
+                        type = 'not in'
+                    else:
+                        type = 'is not'
+            else:
+                type = _cmp_types[n[0]]
+
+            lineno = nl[1][2]
+            results.append((type, self.com_node(nodelist[i])))
+
+        # we need a special "compare" node so that we can distinguish
+        #   3 < x < 5   from    (3 < x) < 5
+        # the two have very different semantics and results (note that the
+        # latter form is always true)
+
+        return Compare(node, results, lineno=lineno)
+
+    def expr(self, nodelist):
+        # xor_expr ('|' xor_expr)*
+        return self.com_binary(Bitor, nodelist)
+
+    def xor_expr(self, nodelist):
+        # xor_expr ('^' xor_expr)*
+        return self.com_binary(Bitxor, nodelist)
+
+    def and_expr(self, nodelist):
+        # xor_expr ('&' xor_expr)*
+        return self.com_binary(Bitand, nodelist)
+
+    def shift_expr(self, nodelist):
+        # shift_expr ('<<'|'>>' shift_expr)*
+        node = self.com_node(nodelist[0])
+        for i in range(2, len(nodelist), 2):
+            right = self.com_node(nodelist[i])
+            if nodelist[i-1][0] == token.LEFTSHIFT:
+                node = LeftShift([node, right], lineno=nodelist[1][2])
+            elif nodelist[i-1][0] == token.RIGHTSHIFT:
+                node = RightShift([node, right], lineno=nodelist[1][2])
+            else:
+                raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
+        return node
+
+    def arith_expr(self, nodelist):
+        node = self.com_node(nodelist[0])
+        for i in range(2, len(nodelist), 2):
+            right = self.com_node(nodelist[i])
+            if nodelist[i-1][0] == token.PLUS:
+                node = Add([node, right], lineno=nodelist[1][2])
+            elif nodelist[i-1][0] == token.MINUS:
+                node = Sub([node, right], lineno=nodelist[1][2])
+            else:
+                raise ValueError, "unexpected token: %s" % nodelist[i-1][0]
+        return node
+
+    def term(self, nodelist):
+        node = self.com_node(nodelist[0])
+        for i in range(2, len(nodelist), 2):
+            right = self.com_node(nodelist[i])
+            t = nodelist[i-1][0]
+            if t == token.STAR:
+                node = Mul([node, right])
+            elif t == token.SLASH:
+                node = Div([node, right])
+            elif t == token.PERCENT:
+                node = Mod([node, right])
+            elif t == token.DOUBLESLASH:
+                node = FloorDiv([node, right])
+            else:
+                raise ValueError, "unexpected token: %s" % t
+            node.lineno = nodelist[1][2]
+        return node
+
+    def factor(self, nodelist):
+        elt = nodelist[0]
+        t = elt[0]
+        node = self.lookup_node(nodelist[-1])(nodelist[-1][1:])
+        # need to handle (unary op)constant here...
+        if t == token.PLUS:
+            return UnaryAdd(node, lineno=elt[2])
+        elif t == token.MINUS:
+            return UnarySub(node, lineno=elt[2])
+        elif t == token.TILDE:
+            node = Invert(node, lineno=elt[2])
+        return node
+
+    def power(self, nodelist):
+        # power: atom trailer* ('**' factor)*
+        node = self.com_node(nodelist[0])
+        for i in range(1, len(nodelist)):
+            elt = nodelist[i]
+            if elt[0] == token.DOUBLESTAR:
+                return Power([node, self.com_node(nodelist[i+1])],
+                             lineno=elt[2])
+
+            node = self.com_apply_trailer(node, elt)
+
+        return node
+
+    def atom(self, nodelist):
+        return self._atom_dispatch[nodelist[0][0]](nodelist)
+        n.lineno = nodelist[0][2]
+        return n
+
+    def atom_lpar(self, nodelist):
+        if nodelist[1][0] == token.RPAR:
+            return Tuple(())
+        return self.com_node(nodelist[1])
+
+    def atom_lsqb(self, nodelist):
+        if nodelist[1][0] == token.RSQB:
+            return List(())
+        return self.com_list_constructor(nodelist[1])
+
+    def atom_lbrace(self, nodelist):
+        if nodelist[1][0] == token.RBRACE:
+            return Dict(())
+        return self.com_dictmaker(nodelist[1])
+
+    def atom_backquote(self, nodelist):
+        return Backquote(self.com_node(nodelist[1]))
+
+    def atom_number(self, nodelist):
+        ### need to verify this matches compile.c
+        k = eval(nodelist[0][1])
+        return Const(k, lineno=nodelist[0][2])
+
+    def decode_literal(self, lit):
+        if self.encoding:
+            # this is particularly fragile & a bit of a
+            # hack... changes in compile.c:parsestr and
+            # tokenizer.c must be reflected here.
+            if self.encoding not in ['utf-8', 'iso-8859-1']:
+                lit = unicode(lit, 'utf-8').encode(self.encoding)
+            return eval("# coding: %s\n%s" % (self.encoding, lit))
+        else:
+            return eval(lit)
+
+    def atom_string(self, nodelist):
+        k = ''
+        for node in nodelist:
+            k += self.decode_literal(node[1])
+        return Const(k, lineno=nodelist[0][2])
+
+    def atom_name(self, nodelist):
+        return Name(nodelist[0][1], lineno=nodelist[0][2])
+
+    # --------------------------------------------------------------
+    #
+    # INTERNAL PARSING UTILITIES
+    #
+
+    # The use of com_node() introduces a lot of extra stack frames,
+    # enough to cause a stack overflow compiling test.test_parser with
+    # the standard interpreter recursionlimit.  The com_node() is a
+    # convenience function that hides the dispatch details, but comes
+    # at a very high cost.  It is more efficient to dispatch directly
+    # in the callers.  In these cases, use lookup_node() and call the
+    # dispatched node directly.
+
+    def lookup_node(self, node):
+        return self._dispatch[node[0]]
+
+    _callers = {}
+
+    def com_node(self, node):
+        # Note: compile.c has handling in com_node for del_stmt, pass_stmt,
+        #       break_stmt, stmt, small_stmt, flow_stmt, simple_stmt,
+        #       and compound_stmt.
+        #       We'll just dispatch them.
+        return self._dispatch[node[0]](node[1:])
+
+    def com_NEWLINE(self, *args):
+        # A ';' at the end of a line can make a NEWLINE token appear
+        # here, Render it harmless. (genc discards ('discard',
+        # ('const', xxxx)) Nodes)
+        return Discard(Const(None))
+
+    def com_arglist(self, nodelist):
+        # varargslist:
+        #     (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] | '**' NAME)
+        #   | fpdef ['=' test] (',' fpdef ['=' test])* [',']
+        # fpdef: NAME | '(' fplist ')'
+        # fplist: fpdef (',' fpdef)* [',']
+        names = []
+        defaults = []
+        flags = 0
+
+        i = 0
+        while i < len(nodelist):
+            node = nodelist[i]
+            if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
+                if node[0] == token.STAR:
+                    node = nodelist[i+1]
+                    if node[0] == token.NAME:
+                        names.append(node[1])
+                        flags = flags | CO_VARARGS
+                        i = i + 3
+
+                if i < len(nodelist):
+                    # should be DOUBLESTAR
+                    t = nodelist[i][0]
+                    if t == token.DOUBLESTAR:
+                        node = nodelist[i+1]
+                    else:
+                        raise ValueError, "unexpected token: %s" % t
+                    names.append(node[1])
+                    flags = flags | CO_VARKEYWORDS
+
+                break
+
+            # fpdef: NAME | '(' fplist ')'
+            names.append(self.com_fpdef(node))
+
+            i = i + 1
+            if i >= len(nodelist):
+                break
+
+            if nodelist[i][0] == token.EQUAL:
+                defaults.append(self.com_node(nodelist[i + 1]))
+                i = i + 2
+            elif len(defaults):
+                # XXX This should be a syntax error.
+                # Treat "(a=1, b)" as "(a=1, b=None)"
+                defaults.append(Const(None))
+
+            i = i + 1
+
+        return names, defaults, flags
+
+    def com_fpdef(self, node):
+        # fpdef: NAME | '(' fplist ')'
+        if node[1][0] == token.LPAR:
+            return self.com_fplist(node[2])
+        return node[1][1]
+
+    def com_fplist(self, node):
+        # fplist: fpdef (',' fpdef)* [',']
+        if len(node) == 2:
+            return self.com_fpdef(node[1])
+        list = []
+        for i in range(1, len(node), 2):
+            list.append(self.com_fpdef(node[i]))
+        return tuple(list)
+
+    def com_dotted_name(self, node):
+        # String together the dotted names and return the string
+        name = ""
+        for n in node:
+            if type(n) == type(()) and n[0] == 1:
+                name = name + n[1] + '.'
+        return name[:-1]
+
+    def com_dotted_as_name(self, node):
+        assert node[0] == symbol.dotted_as_name
+        node = node[1:]
+        dot = self.com_dotted_name(node[0][1:])
+        if len(node) == 1:
+            return dot, None
+        assert node[1][1] == 'as'
+        assert node[2][0] == token.NAME
+        return dot, node[2][1]
+
+    def com_dotted_as_names(self, node):
+        assert node[0] == symbol.dotted_as_names
+        node = node[1:]
+        names = [self.com_dotted_as_name(node[0])]
+        for i in range(2, len(node), 2):
+            names.append(self.com_dotted_as_name(node[i]))
+        return names
+
+    def com_import_as_name(self, node):
+        assert node[0] == symbol.import_as_name
+        node = node[1:]
+        assert node[0][0] == token.NAME
+        if len(node) == 1:
+            return node[0][1], None
+        assert node[1][1] == 'as', node
+        assert node[2][0] == token.NAME
+        return node[0][1], node[2][1]
+
+    def com_import_as_names(self, node):
+        assert node[0] == symbol.import_as_names
+        node = node[1:]
+        names = [self.com_import_as_name(node[0])]
+        for i in range(2, len(node), 2):
+            names.append(self.com_import_as_name(node[i]))
+        return names
+
+    def com_bases(self, node):
+        bases = []
+        for i in range(1, len(node), 2):
+            bases.append(self.com_node(node[i]))
+        return bases
+
+    def com_try_finally(self, nodelist):
+        # try_fin_stmt: "try" ":" suite "finally" ":" suite
+        return TryFinally(self.com_node(nodelist[2]),
+                       self.com_node(nodelist[5]),
+                       lineno=nodelist[0][2])
+
+    def com_try_except(self, nodelist):
+        # try_except: 'try' ':' suite (except_clause ':' suite)* ['else' suite]
+        #tryexcept:  [TryNode, [except_clauses], elseNode)]
+        stmt = self.com_node(nodelist[2])
+        clauses = []
+        elseNode = None
+        for i in range(3, len(nodelist), 3):
+            node = nodelist[i]
+            if node[0] == symbol.except_clause:
+                # except_clause: 'except' [expr [',' expr]] */
+                if len(node) > 2:
+                    expr1 = self.com_node(node[2])
+                    if len(node) > 4:
+                        expr2 = self.com_assign(node[4], OP_ASSIGN)
+                    else:
+                        expr2 = None
+                else:
+                    expr1 = expr2 = None
+                clauses.append((expr1, expr2, self.com_node(nodelist[i+2])))
+
+            if node[0] == token.NAME:
+                elseNode = self.com_node(nodelist[i+2])
+        return TryExcept(self.com_node(nodelist[2]), clauses, elseNode,
+                         lineno=nodelist[0][2])
+
+    def com_augassign_op(self, node):
+        assert node[0] == symbol.augassign
+        return node[1]
+
+    def com_augassign(self, node):
+        """Return node suitable for lvalue of augmented assignment
+
+        Names, slices, and attributes are the only allowable nodes.
+        """
+        l = self.com_node(node)
+        if l.__class__ in (Name, Slice, Subscript, Getattr):
+            return l
+        raise SyntaxError, "can't assign to %s" % l.__class__.__name__
+
+    def com_assign(self, node, assigning):
+        # return a node suitable for use as an "lvalue"
+        # loop to avoid trivial recursion
+        while 1:
+            t = node[0]
+            if t == symbol.exprlist or t == symbol.testlist or t == symbol.testlist_gexp:
+                if len(node) > 2:
+                    return self.com_assign_tuple(node, assigning)
+                node = node[1]
+            elif t in _assign_types:
+                if len(node) > 2:
+                    raise SyntaxError, "can't assign to operator"
+                node = node[1]
+            elif t == symbol.power:
+                if node[1][0] != symbol.atom:
+                    raise SyntaxError, "can't assign to operator"
+                if len(node) > 2:
+                    primary = self.com_node(node[1])
+                    for i in range(2, len(node)-1):
+                        ch = node[i]
+                        if ch[0] == token.DOUBLESTAR:
+                            raise SyntaxError, "can't assign to operator"
+                        primary = self.com_apply_trailer(primary, ch)
+                    return self.com_assign_trailer(primary, node[-1],
+                                                   assigning)
+                node = node[1]
+            elif t == symbol.atom:
+                t = node[1][0]
+                if t == token.LPAR:
+                    node = node[2]
+                    if node[0] == token.RPAR:
+                        raise SyntaxError, "can't assign to ()"
+                elif t == token.LSQB:
+                    node = node[2]
+                    if node[0] == token.RSQB:
+                        raise SyntaxError, "can't assign to []"
+                    return self.com_assign_list(node, assigning)
+                elif t == token.NAME:
+                    return self.com_assign_name(node[1], assigning)
+                else:
+                    raise SyntaxError, "can't assign to literal"
+            else:
+                raise SyntaxError, "bad assignment"
+
+    def com_assign_tuple(self, node, assigning):
+        assigns = []
+        for i in range(1, len(node), 2):
+            assigns.append(self.com_assign(node[i], assigning))
+        return AssTuple(assigns, lineno=extractLineNo(node))
+
+    def com_assign_list(self, node, assigning):
+        assigns = []
+        for i in range(1, len(node), 2):
+            if i + 1 < len(node):
+                if node[i + 1][0] == symbol.list_for:
+                    raise SyntaxError, "can't assign to list comprehension"
+                assert node[i + 1][0] == token.COMMA, node[i + 1]
+            assigns.append(self.com_assign(node[i], assigning))
+        return AssList(assigns, lineno=extractLineNo(node))
+
+    def com_assign_name(self, node, assigning):
+        return AssName(node[1], assigning, lineno=node[2])
+
+    def com_assign_trailer(self, primary, node, assigning):
+        t = node[1][0]
+        if t == token.DOT:
+            return self.com_assign_attr(primary, node[2], assigning)
+        if t == token.LSQB:
+            return self.com_subscriptlist(primary, node[2], assigning)
+        if t == token.LPAR:
+            raise SyntaxError, "can't assign to function call"
+        raise SyntaxError, "unknown trailer type: %s" % t
+
+    def com_assign_attr(self, primary, node, assigning):
+        return AssAttr(primary, node[1], assigning, lineno=node[-1])
+
+    def com_binary(self, constructor, nodelist):
+        "Compile 'NODE (OP NODE)*' into (type, [ node1, ..., nodeN ])."
+        l = len(nodelist)
+        if l == 1:
+            n = nodelist[0]
+            return self.lookup_node(n)(n[1:])
+        items = []
+        for i in range(0, l, 2):
+            n = nodelist[i]
+            items.append(self.lookup_node(n)(n[1:]))
+        return constructor(items, lineno=extractLineNo(nodelist))
+
+    def com_stmt(self, node):
+        result = self.lookup_node(node)(node[1:])
+        assert result is not None
+        if isinstance(result, Stmt):
+            return result
+        return Stmt([result])
+
+    def com_append_stmt(self, stmts, node):
+        result = self.lookup_node(node)(node[1:])
+        assert result is not None
+        if isinstance(result, Stmt):
+            stmts.extend(result.nodes)
+        else:
+            stmts.append(result)
+
+    if hasattr(symbol, 'list_for'):
+        def com_list_constructor(self, nodelist):
+            # listmaker: test ( list_for | (',' test)* [','] )
+            values = []
+            for i in range(1, len(nodelist)):
+                if nodelist[i][0] == symbol.list_for:
+                    assert len(nodelist[i:]) == 1
+                    return self.com_list_comprehension(values[0],
+                                                       nodelist[i])
+                elif nodelist[i][0] == token.COMMA:
+                    continue
+                values.append(self.com_node(nodelist[i]))
+            return List(values, lineno=values[0].lineno)
+
+        def com_list_comprehension(self, expr, node):
+            # list_iter: list_for | list_if
+            # list_for: 'for' exprlist 'in' testlist [list_iter]
+            # list_if: 'if' test [list_iter]
+
+            # XXX should raise SyntaxError for assignment
+
+            lineno = node[1][2]
+            fors = []
+            while node:
+                t = node[1][1]
+                if t == 'for':
+                    assignNode = self.com_assign(node[2], OP_ASSIGN)
+                    listNode = self.com_node(node[4])
+                    newfor = ListCompFor(assignNode, listNode, [])
+                    newfor.lineno = node[1][2]
+                    fors.append(newfor)
+                    if len(node) == 5:
+                        node = None
+                    else:
+                        node = self.com_list_iter(node[5])
+                elif t == 'if':
+                    test = self.com_node(node[2])
+                    newif = ListCompIf(test, lineno=node[1][2])
+                    newfor.ifs.append(newif)
+                    if len(node) == 3:
+                        node = None
+                    else:
+                        node = self.com_list_iter(node[3])
+                else:
+                    raise SyntaxError, \
+                          ("unexpected list comprehension element: %s %d"
+                           % (node, lineno))
+            return ListComp(expr, fors, lineno=lineno)
+
+        def com_list_iter(self, node):
+            assert node[0] == symbol.list_iter
+            return node[1]
+    else:
+        def com_list_constructor(self, nodelist):
+            values = []
+            for i in range(1, len(nodelist), 2):
+                values.append(self.com_node(nodelist[i]))
+            return List(values)
+
+    if hasattr(symbol, 'gen_for'):
+        def com_generator_expression(self, expr, node):
+            # gen_iter: gen_for | gen_if
+            # gen_for: 'for' exprlist 'in' test [gen_iter]
+            # gen_if: 'if' test [gen_iter]
+
+            lineno = node[1][2]
+            fors = []
+            while node:
+                t = node[1][1]
+                if t == 'for':
+                    assignNode = self.com_assign(node[2], OP_ASSIGN)
+                    genNode = self.com_node(node[4])
+                    newfor = GenExprFor(assignNode, genNode, [],
+                                        lineno=node[1][2])
+                    fors.append(newfor)
+                    if (len(node)) == 5:
+                        node = None
+                    else:
+                        node = self.com_gen_iter(node[5])
+                elif t == 'if':
+                    test = self.com_node(node[2])
+                    newif = GenExprIf(test, lineno=node[1][2])
+                    newfor.ifs.append(newif)
+                    if len(node) == 3:
+                        node = None
+                    else:
+                        node = self.com_gen_iter(node[3])
+                else:
+                    raise SyntaxError, \
+                            ("unexpected generator expression element: %s %d"
+                             % (node, lineno))
+            fors[0].is_outmost = True
+            return GenExpr(GenExprInner(expr, fors), lineno=lineno)
+
+        def com_gen_iter(self, node):
+            assert node[0] == symbol.gen_iter
+            return node[1]
+
+    def com_dictmaker(self, nodelist):
+        # dictmaker: test ':' test (',' test ':' value)* [',']
+        items = []
+        for i in range(1, len(nodelist), 4):
+            items.append((self.com_node(nodelist[i]),
+                          self.com_node(nodelist[i+2])))
+        return Dict(items)
+
+    def com_apply_trailer(self, primaryNode, nodelist):
+        t = nodelist[1][0]
+        if t == token.LPAR:
+            return self.com_call_function(primaryNode, nodelist[2])
+        if t == token.DOT:
+            return self.com_select_member(primaryNode, nodelist[2])
+        if t == token.LSQB:
+            return self.com_subscriptlist(primaryNode, nodelist[2], OP_APPLY)
+
+        raise SyntaxError, 'unknown node type: %s' % t
+
+    def com_select_member(self, primaryNode, nodelist):
+        if nodelist[0] != token.NAME:
+            raise SyntaxError, "member must be a name"
+        return Getattr(primaryNode, nodelist[1], lineno=nodelist[2])
+
+    def com_call_function(self, primaryNode, nodelist):
+        if nodelist[0] == token.RPAR:
+            return CallFunc(primaryNode, [], lineno=extractLineNo(nodelist))
+        args = []
+        kw = 0
+        len_nodelist = len(nodelist)
+        for i in range(1, len_nodelist, 2):
+            node = nodelist[i]
+            if node[0] == token.STAR or node[0] == token.DOUBLESTAR:
+                break
+            kw, result = self.com_argument(node, kw)
+
+            if len_nodelist != 2 and isinstance(result, GenExpr) \
+               and len(node) == 3 and node[2][0] == symbol.gen_for:
+                # allow f(x for x in y), but reject f(x for x in y, 1)
+                # should use f((x for x in y), 1) instead of f(x for x in y, 1)
+                raise SyntaxError, 'generator expression needs parenthesis'
+
+            args.append(result)
+        else:
+            # No broken by star arg, so skip the last one we processed.
+            i = i + 1
+        if i < len_nodelist and nodelist[i][0] == token.COMMA:
+            # need to accept an application that looks like "f(a, b,)"
+            i = i + 1
+        star_node = dstar_node = None
+        while i < len_nodelist:
+            tok = nodelist[i]
+            ch = nodelist[i+1]
+            i = i + 3
+            if tok[0]==token.STAR:
+                if star_node is not None:
+                    raise SyntaxError, 'already have the varargs indentifier'
+                star_node = self.com_node(ch)
+            elif tok[0]==token.DOUBLESTAR:
+                if dstar_node is not None:
+                    raise SyntaxError, 'already have the kwargs indentifier'
+                dstar_node = self.com_node(ch)
+            else:
+                raise SyntaxError, 'unknown node type: %s' % tok
+        return CallFunc(primaryNode, args, star_node, dstar_node,
+                        lineno=extractLineNo(nodelist))
+
+    def com_argument(self, nodelist, kw):
+        if len(nodelist) == 3 and nodelist[2][0] == symbol.gen_for:
+            test = self.com_node(nodelist[1])
+            return 0, self.com_generator_expression(test, nodelist[2])
+        if len(nodelist) == 2:
+            if kw:
+                raise SyntaxError, "non-keyword arg after keyword arg"
+            return 0, self.com_node(nodelist[1])
+        result = self.com_node(nodelist[3])
+        n = nodelist[1]
+        while len(n) == 2 and n[0] != token.NAME:
+            n = n[1]
+        if n[0] != token.NAME:
+            raise SyntaxError, "keyword can't be an expression (%s)"%n[0]
+        node = Keyword(n[1], result, lineno=n[2])
+        return 1, node
+
+    def com_subscriptlist(self, primary, nodelist, assigning):
+        # slicing:      simple_slicing | extended_slicing
+        # simple_slicing:   primary "[" short_slice "]"
+        # extended_slicing: primary "[" slice_list "]"
+        # slice_list:   slice_item ("," slice_item)* [","]
+
+        # backwards compat slice for '[i:j]'
+        if len(nodelist) == 2:
+            sub = nodelist[1]
+            if (sub[1][0] == token.COLON or \
+                            (len(sub) > 2 and sub[2][0] == token.COLON)) and \
+                            sub[-1][0] != symbol.sliceop:
+                return self.com_slice(primary, sub, assigning)
+
+        subscripts = []
+        for i in range(1, len(nodelist), 2):
+            subscripts.append(self.com_subscript(nodelist[i]))
+        return Subscript(primary, assigning, subscripts,
+                         lineno=extractLineNo(nodelist))
+
+    def com_subscript(self, node):
+        # slice_item: expression | proper_slice | ellipsis
+        ch = node[1]
+        t = ch[0]
+        if t == token.DOT and node[2][0] == token.DOT:
+            return Ellipsis()
+        if t == token.COLON or len(node) > 2:
+            return self.com_sliceobj(node)
+        return self.com_node(ch)
+
+    def com_sliceobj(self, node):
+        # proper_slice: short_slice | long_slice
+        # short_slice:  [lower_bound] ":" [upper_bound]
+        # long_slice:   short_slice ":" [stride]
+        # lower_bound:  expression
+        # upper_bound:  expression
+        # stride:       expression
+        #
+        # Note: a stride may be further slicing...
+
+        items = []
+
+        if node[1][0] == token.COLON:
+            items.append(Const(None))
+            i = 2
+        else:
+            items.append(self.com_node(node[1]))
+            # i == 2 is a COLON
+            i = 3
+
+        if i < len(node) and node[i][0] == symbol.test:
+            items.append(self.com_node(node[i]))
+            i = i + 1
+        else:
+            items.append(Const(None))
+
+        # a short_slice has been built. look for long_slice now by looking
+        # for strides...
+        for j in range(i, len(node)):
+            ch = node[j]
+            if len(ch) == 2:
+                items.append(Const(None))
+            else:
+                items.append(self.com_node(ch[2]))
+        return Sliceobj(items, lineno=extractLineNo(node))
+
+    def com_slice(self, primary, node, assigning):
+        # short_slice:  [lower_bound] ":" [upper_bound]
+        lower = upper = None
+        if len(node) == 3:
+            if node[1][0] == token.COLON:
+                upper = self.com_node(node[2])
+            else:
+                lower = self.com_node(node[1])
+        elif len(node) == 4:
+            lower = self.com_node(node[1])
+            upper = self.com_node(node[3])
+        return Slice(primary, assigning, lower, upper,
+                     lineno=extractLineNo(node))
+
+    def get_docstring(self, node, n=None):
+        if n is None:
+            n = node[0]
+            node = node[1:]
+        if n == symbol.suite:
+            if len(node) == 1:
+                return self.get_docstring(node[0])
+            for sub in node:
+                if sub[0] == symbol.stmt:
+                    return self.get_docstring(sub)
+            return None
+        if n == symbol.file_input:
+            for sub in node:
+                if sub[0] == symbol.stmt:
+                    return self.get_docstring(sub)
+            return None
+        if n == symbol.atom:
+            if node[0][0] == token.STRING:
+                s = ''
+                for t in node:
+                    s = s + eval(t[1])
+                return s
+            return None
+        if n == symbol.stmt or n == symbol.simple_stmt \
+           or n == symbol.small_stmt:
+            return self.get_docstring(node[0])
+        if n in _doc_nodes and len(node) == 1:
+            return self.get_docstring(node[0])
+        return None
+
+
+_doc_nodes = [
+    symbol.expr_stmt,
+    symbol.testlist,
+    symbol.testlist_safe,
+    symbol.test,
+    symbol.and_test,
+    symbol.not_test,
+    symbol.comparison,
+    symbol.expr,
+    symbol.xor_expr,
+    symbol.and_expr,
+    symbol.shift_expr,
+    symbol.arith_expr,
+    symbol.term,
+    symbol.factor,
+    symbol.power,
+    ]
+
+# comp_op: '<' | '>' | '=' | '>=' | '<=' | '<>' | '!=' | '=='
+#             | 'in' | 'not' 'in' | 'is' | 'is' 'not'
+_cmp_types = {
+    token.LESS : '<',
+    token.GREATER : '>',
+    token.EQEQUAL : '==',
+    token.EQUAL : '==',
+    token.LESSEQUAL : '<=',
+    token.GREATEREQUAL : '>=',
+    token.NOTEQUAL : '!=',
+    }
+
+_legal_node_types = [
+    symbol.funcdef,
+    symbol.classdef,
+    symbol.stmt,
+    symbol.small_stmt,
+    symbol.flow_stmt,
+    symbol.simple_stmt,
+    symbol.compound_stmt,
+    symbol.expr_stmt,
+    symbol.print_stmt,
+    symbol.del_stmt,
+    symbol.pass_stmt,
+    symbol.break_stmt,
+    symbol.continue_stmt,
+    symbol.return_stmt,
+    symbol.raise_stmt,
+    symbol.import_stmt,
+    symbol.global_stmt,
+    symbol.exec_stmt,
+    symbol.assert_stmt,
+    symbol.if_stmt,
+    symbol.while_stmt,
+    symbol.for_stmt,
+    symbol.try_stmt,
+    symbol.suite,
+    symbol.testlist,
+    symbol.testlist_safe,
+    symbol.test,
+    symbol.and_test,
+    symbol.not_test,
+    symbol.comparison,
+    symbol.exprlist,
+    symbol.expr,
+    symbol.xor_expr,
+    symbol.and_expr,
+    symbol.shift_expr,
+    symbol.arith_expr,
+    symbol.term,
+    symbol.factor,
+    symbol.power,
+    symbol.atom,
+    ]
+
+if hasattr(symbol, 'yield_stmt'):
+    _legal_node_types.append(symbol.yield_stmt)
+
+_assign_types = [
+    symbol.test,
+    symbol.and_test,
+    symbol.not_test,
+    symbol.comparison,
+    symbol.expr,
+    symbol.xor_expr,
+    symbol.and_expr,
+    symbol.shift_expr,
+    symbol.arith_expr,
+    symbol.term,
+    symbol.factor,
+    ]
+
+import types
+_names = {}
+for k, v in symbol.sym_name.items():
+    _names[k] = v
+for k, v in token.tok_name.items():
+    _names[k] = v
+
+def debug_tree(tree):
+    l = []
+    for elt in tree:
+        if type(elt) == types.IntType:
+            l.append(_names.get(elt, elt))
+        elif type(elt) == types.StringType:
+            l.append(elt)
+        else:
+            l.append(debug_tree(elt))
+    return l
diff --git a/depot_tools/release/win/python_24/Lib/compiler/visitor.py b/depot_tools/release/win/python_24/Lib/compiler/visitor.py
new file mode 100644
index 0000000..9e39d36
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/compiler/visitor.py
@@ -0,0 +1,113 @@
+from compiler import ast
+
+# XXX should probably rename ASTVisitor to ASTWalker
+# XXX can it be made even more generic?
+
+class ASTVisitor:
+    """Performs a depth-first walk of the AST
+
+    The ASTVisitor will walk the AST, performing either a preorder or
+    postorder traversal depending on which method is called.
+
+    methods:
+    preorder(tree, visitor)
+    postorder(tree, visitor)
+        tree: an instance of ast.Node
+        visitor: an instance with visitXXX methods
+
+    The ASTVisitor is responsible for walking over the tree in the
+    correct order.  For each node, it checks the visitor argument for
+    a method named 'visitNodeType' where NodeType is the name of the
+    node's class, e.g. Class.  If the method exists, it is called
+    with the node as its sole argument.
+
+    The visitor method for a particular node type can control how
+    child nodes are visited during a preorder walk.  (It can't control
+    the order during a postorder walk, because it is called _after_
+    the walk has occurred.)  The ASTVisitor modifies the visitor
+    argument by adding a visit method to the visitor; this method can
+    be used to visit a child node of arbitrary type.
+    """
+
+    VERBOSE = 0
+
+    def __init__(self):
+        self.node = None
+        self._cache = {}
+
+    def default(self, node, *args):
+        for child in node.getChildNodes():
+            self.dispatch(child, *args)
+
+    def dispatch(self, node, *args):
+        self.node = node
+        klass = node.__class__
+        meth = self._cache.get(klass, None)
+        if meth is None:
+            className = klass.__name__
+            meth = getattr(self.visitor, 'visit' + className, self.default)
+            self._cache[klass] = meth
+##        if self.VERBOSE > 0:
+##            className = klass.__name__
+##            if self.VERBOSE == 1:
+##                if meth == 0:
+##                    print "dispatch", className
+##            else:
+##                print "dispatch", className, (meth and meth.__name__ or '')
+        return meth(node, *args)
+
+    def preorder(self, tree, visitor, *args):
+        """Do preorder walk of tree using visitor"""
+        self.visitor = visitor
+        visitor.visit = self.dispatch
+        self.dispatch(tree, *args) # XXX *args make sense?
+
+class ExampleASTVisitor(ASTVisitor):
+    """Prints examples of the nodes that aren't visited
+
+    This visitor-driver is only useful for development, when it's
+    helpful to develop a visitor incrementally, and get feedback on what
+    you still have to do.
+    """
+    examples = {}
+
+    def dispatch(self, node, *args):
+        self.node = node
+        meth = self._cache.get(node.__class__, None)
+        className = node.__class__.__name__
+        if meth is None:
+            meth = getattr(self.visitor, 'visit' + className, 0)
+            self._cache[node.__class__] = meth
+        if self.VERBOSE > 1:
+            print "dispatch", className, (meth and meth.__name__ or '')
+        if meth:
+            meth(node, *args)
+        elif self.VERBOSE > 0:
+            klass = node.__class__
+            if not self.examples.has_key(klass):
+                self.examples[klass] = klass
+                print
+                print self.visitor
+                print klass
+                for attr in dir(node):
+                    if attr[0] != '_':
+                        print "\t", "%-12.12s" % attr, getattr(node, attr)
+                print
+            return self.default(node, *args)
+
+# XXX this is an API change
+
+_walker = ASTVisitor
+def walk(tree, visitor, walker=None, verbose=None):
+    if walker is None:
+        walker = _walker()
+    if verbose is not None:
+        walker.VERBOSE = verbose
+    walker.preorder(tree, visitor)
+    return walker.visitor
+
+def dumpNode(node):
+    print node.__class__
+    for attr in dir(node):
+        if attr[0] != '_':
+            print "\t", "%-10.10s" % attr, getattr(node, attr)
diff --git a/depot_tools/release/win/python_24/Lib/cookielib.py b/depot_tools/release/win/python_24/Lib/cookielib.py
new file mode 100644
index 0000000..19973a8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/cookielib.py
@@ -0,0 +1,1751 @@
+"""HTTP cookie handling for web clients.
+
+This module has (now fairly distant) origins in Gisle Aas' Perl module
+HTTP::Cookies, from the libwww-perl library.
+
+Docstrings, comments and debug strings in this code refer to the
+attributes of the HTTP cookie system as cookie-attributes, to distinguish
+them clearly from Python attributes.
+
+Class diagram (note that the classes which do not derive from
+FileCookieJar are not distributed with the Python standard library, but
+are available from http://wwwsearch.sf.net/):
+
+                        CookieJar____
+                        /     \      \
+            FileCookieJar      \      \
+             /    |   \         \      \
+ MozillaCookieJar | LWPCookieJar \      \
+                  |               |      \
+                  |   ---MSIEBase |       \
+                  |  /      |     |        \
+                  | /   MSIEDBCookieJar BSDDBCookieJar
+                  |/
+               MSIECookieJar
+
+"""
+
+import sys, re, urlparse, copy, time, urllib, logging
+from types import StringTypes
+try:
+    import threading as _threading
+except ImportError:
+    import dummy_threading as _threading
+import httplib  # only for the default HTTP port
+from calendar import timegm
+
+debug = logging.getLogger("cookielib").debug
+
+DEFAULT_HTTP_PORT = str(httplib.HTTP_PORT)
+MISSING_FILENAME_TEXT = ("a filename was not supplied (nor was the CookieJar "
+                         "instance initialised with one)")
+
+def reraise_unmasked_exceptions(unmasked=()):
+    # There are a few catch-all except: statements in this module, for
+    # catching input that's bad in unexpected ways.
+    # This function re-raises some exceptions we don't want to trap.
+    unmasked = unmasked + (KeyboardInterrupt, SystemExit, MemoryError)
+    etype = sys.exc_info()[0]
+    if issubclass(etype, unmasked):
+        raise
+    # swallowed an exception
+    import warnings, traceback, StringIO
+    f = StringIO.StringIO()
+    traceback.print_exc(None, f)
+    msg = f.getvalue()
+    warnings.warn("cookielib bug!\n%s" % msg, stacklevel=2)
+
+
+# Date/time conversion
+# -----------------------------------------------------------------------------
+
+EPOCH_YEAR = 1970
+def _timegm(tt):
+    year, month, mday, hour, min, sec = tt[:6]
+    if ((year >= EPOCH_YEAR) and (1 <= month <= 12) and (1 <= mday <= 31) and
+        (0 <= hour <= 24) and (0 <= min <= 59) and (0 <= sec <= 61)):
+        return timegm(tt)
+    else:
+        return None
+
+DAYS = ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"]
+MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
+          "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
+MONTHS_LOWER = []
+for month in MONTHS: MONTHS_LOWER.append(month.lower())
+
+def time2isoz(t=None):
+    """Return a string representing time in seconds since epoch, t.
+
+    If the function is called without an argument, it will use the current
+    time.
+
+    The format of the returned string is like "YYYY-MM-DD hh:mm:ssZ",
+    representing Universal Time (UTC, aka GMT).  An example of this format is:
+
+    1994-11-24 08:49:37Z
+
+    """
+    if t is None: t = time.time()
+    year, mon, mday, hour, min, sec = time.gmtime(t)[:6]
+    return "%04d-%02d-%02d %02d:%02d:%02dZ" % (
+        year, mon, mday, hour, min, sec)
+
+def time2netscape(t=None):
+    """Return a string representing time in seconds since epoch, t.
+
+    If the function is called without an argument, it will use the current
+    time.
+
+    The format of the returned string is like this:
+
+    Wed, DD-Mon-YYYY HH:MM:SS GMT
+
+    """
+    if t is None: t = time.time()
+    year, mon, mday, hour, min, sec, wday = time.gmtime(t)[:7]
+    return "%s %02d-%s-%04d %02d:%02d:%02d GMT" % (
+        DAYS[wday], mday, MONTHS[mon-1], year, hour, min, sec)
+
+
+UTC_ZONES = {"GMT": None, "UTC": None, "UT": None, "Z": None}
+
+TIMEZONE_RE = re.compile(r"^([-+])?(\d\d?):?(\d\d)?$")
+def offset_from_tz_string(tz):
+    offset = None
+    if tz in UTC_ZONES:
+        offset = 0
+    else:
+        m = TIMEZONE_RE.search(tz)
+        if m:
+            offset = 3600 * int(m.group(2))
+            if m.group(3):
+                offset = offset + 60 * int(m.group(3))
+            if m.group(1) == '-':
+                offset = -offset
+    return offset
+
+def _str2time(day, mon, yr, hr, min, sec, tz):
+    # translate month name to number
+    # month numbers start with 1 (January)
+    try:
+        mon = MONTHS_LOWER.index(mon.lower())+1
+    except ValueError:
+        # maybe it's already a number
+        try:
+            imon = int(mon)
+        except ValueError:
+            return None
+        if 1 <= imon <= 12:
+            mon = imon
+        else:
+            return None
+
+    # make sure clock elements are defined
+    if hr is None: hr = 0
+    if min is None: min = 0
+    if sec is None: sec = 0
+
+    yr = int(yr)
+    day = int(day)
+    hr = int(hr)
+    min = int(min)
+    sec = int(sec)
+
+    if yr < 1000:
+        # find "obvious" year
+        cur_yr = time.localtime(time.time())[0]
+        m = cur_yr % 100
+        tmp = yr
+        yr = yr + cur_yr - m
+        m = m - tmp
+        if abs(m) > 50:
+            if m > 0: yr = yr + 100
+            else: yr = yr - 100
+
+    # convert UTC time tuple to seconds since epoch (not timezone-adjusted)
+    t = _timegm((yr, mon, day, hr, min, sec, tz))
+
+    if t is not None:
+        # adjust time using timezone string, to get absolute time since epoch
+        if tz is None:
+            tz = "UTC"
+        tz = tz.upper()
+        offset = offset_from_tz_string(tz)
+        if offset is None:
+            return None
+        t = t - offset
+
+    return t
+
+STRICT_DATE_RE = re.compile(
+    r"^[SMTWF][a-z][a-z], (\d\d) ([JFMASOND][a-z][a-z]) "
+    "(\d\d\d\d) (\d\d):(\d\d):(\d\d) GMT$")
+WEEKDAY_RE = re.compile(
+    r"^(?:Sun|Mon|Tue|Wed|Thu|Fri|Sat)[a-z]*,?\s*", re.I)
+LOOSE_HTTP_DATE_RE = re.compile(
+    r"""^
+    (\d\d?)            # day
+       (?:\s+|[-\/])
+    (\w+)              # month
+        (?:\s+|[-\/])
+    (\d+)              # year
+    (?:
+          (?:\s+|:)    # separator before clock
+       (\d\d?):(\d\d)  # hour:min
+       (?::(\d\d))?    # optional seconds
+    )?                 # optional clock
+       \s*
+    ([-+]?\d{2,4}|(?![APap][Mm]\b)[A-Za-z]+)? # timezone
+       \s*
+    (?:\(\w+\))?       # ASCII representation of timezone in parens.
+       \s*$""", re.X)
+def http2time(text):
+    """Returns time in seconds since epoch of time represented by a string.
+
+    Return value is an integer.
+
+    None is returned if the format of str is unrecognized, the time is outside
+    the representable range, or the timezone string is not recognized.  If the
+    string contains no timezone, UTC is assumed.
+
+    The timezone in the string may be numerical (like "-0800" or "+0100") or a
+    string timezone (like "UTC", "GMT", "BST" or "EST").  Currently, only the
+    timezone strings equivalent to UTC (zero offset) are known to the function.
+
+    The function loosely parses the following formats:
+
+    Wed, 09 Feb 1994 22:23:32 GMT       -- HTTP format
+    Tuesday, 08-Feb-94 14:15:29 GMT     -- old rfc850 HTTP format
+    Tuesday, 08-Feb-1994 14:15:29 GMT   -- broken rfc850 HTTP format
+    09 Feb 1994 22:23:32 GMT            -- HTTP format (no weekday)
+    08-Feb-94 14:15:29 GMT              -- rfc850 format (no weekday)
+    08-Feb-1994 14:15:29 GMT            -- broken rfc850 format (no weekday)
+
+    The parser ignores leading and trailing whitespace.  The time may be
+    absent.
+
+    If the year is given with only 2 digits, the function will select the
+    century that makes the year closest to the current date.
+
+    """
+    # fast exit for strictly conforming string
+    m = STRICT_DATE_RE.search(text)
+    if m:
+        g = m.groups()
+        mon = MONTHS_LOWER.index(g[1].lower()) + 1
+        tt = (int(g[2]), mon, int(g[0]),
+              int(g[3]), int(g[4]), float(g[5]))
+        return _timegm(tt)
+
+    # No, we need some messy parsing...
+
+    # clean up
+    text = text.lstrip()
+    text = WEEKDAY_RE.sub("", text, 1)  # Useless weekday
+
+    # tz is time zone specifier string
+    day, mon, yr, hr, min, sec, tz = [None]*7
+
+    # loose regexp parse
+    m = LOOSE_HTTP_DATE_RE.search(text)
+    if m is not None:
+        day, mon, yr, hr, min, sec, tz = m.groups()
+    else:
+        return None  # bad format
+
+    return _str2time(day, mon, yr, hr, min, sec, tz)
+
+ISO_DATE_RE = re.compile(
+    """^
+    (\d{4})              # year
+       [-\/]?
+    (\d\d?)              # numerical month
+       [-\/]?
+    (\d\d?)              # day
+   (?:
+         (?:\s+|[-:Tt])  # separator before clock
+      (\d\d?):?(\d\d)    # hour:min
+      (?::?(\d\d(?:\.\d*)?))?  # optional seconds (and fractional)
+   )?                    # optional clock
+      \s*
+   ([-+]?\d\d?:?(:?\d\d)?
+    |Z|z)?               # timezone  (Z is "zero meridian", i.e. GMT)
+      \s*$""", re.X)
+def iso2time(text):
+    """
+    As for http2time, but parses the ISO 8601 formats:
+
+    1994-02-03 14:15:29 -0100    -- ISO 8601 format
+    1994-02-03 14:15:29          -- zone is optional
+    1994-02-03                   -- only date
+    1994-02-03T14:15:29          -- Use T as separator
+    19940203T141529Z             -- ISO 8601 compact format
+    19940203                     -- only date
+
+    """
+    # clean up
+    text = text.lstrip()
+
+    # tz is time zone specifier string
+    day, mon, yr, hr, min, sec, tz = [None]*7
+
+    # loose regexp parse
+    m = ISO_DATE_RE.search(text)
+    if m is not None:
+        # XXX there's an extra bit of the timezone I'm ignoring here: is
+        #   this the right thing to do?
+        yr, mon, day, hr, min, sec, tz, _ = m.groups()
+    else:
+        return None  # bad format
+
+    return _str2time(day, mon, yr, hr, min, sec, tz)
+
+
+# Header parsing
+# -----------------------------------------------------------------------------
+
+def unmatched(match):
+    """Return unmatched part of re.Match object."""
+    start, end = match.span(0)
+    return match.string[:start]+match.string[end:]
+
+HEADER_TOKEN_RE =        re.compile(r"^\s*([^=\s;,]+)")
+HEADER_QUOTED_VALUE_RE = re.compile(r"^\s*=\s*\"([^\"\\]*(?:\\.[^\"\\]*)*)\"")
+HEADER_VALUE_RE =        re.compile(r"^\s*=\s*([^\s;,]*)")
+HEADER_ESCAPE_RE = re.compile(r"\\(.)")
+def split_header_words(header_values):
+    r"""Parse header values into a list of lists containing key,value pairs.
+
+    The function knows how to deal with ",", ";" and "=" as well as quoted
+    values after "=".  A list of space separated tokens are parsed as if they
+    were separated by ";".
+
+    If the header_values passed as argument contains multiple values, then they
+    are treated as if they were a single value separated by comma ",".
+
+    This means that this function is useful for parsing header fields that
+    follow this syntax (BNF as from the HTTP/1.1 specification, but we relax
+    the requirement for tokens).
+
+      headers           = #header
+      header            = (token | parameter) *( [";"] (token | parameter))
+
+      token             = 1*<any CHAR except CTLs or separators>
+      separators        = "(" | ")" | "<" | ">" | "@"
+                        | "," | ";" | ":" | "\" | <">
+                        | "/" | "[" | "]" | "?" | "="
+                        | "{" | "}" | SP | HT
+
+      quoted-string     = ( <"> *(qdtext | quoted-pair ) <"> )
+      qdtext            = <any TEXT except <">>
+      quoted-pair       = "\" CHAR
+
+      parameter         = attribute "=" value
+      attribute         = token
+      value             = token | quoted-string
+
+    Each header is represented by a list of key/value pairs.  The value for a
+    simple token (not part of a parameter) is None.  Syntactically incorrect
+    headers will not necessarily be parsed as you would want.
+
+    This is easier to describe with some examples:
+
+    >>> split_header_words(['foo="bar"; port="80,81"; discard, bar=baz'])
+    [[('foo', 'bar'), ('port', '80,81'), ('discard', None)], [('bar', 'baz')]]
+    >>> split_header_words(['text/html; charset="iso-8859-1"'])
+    [[('text/html', None), ('charset', 'iso-8859-1')]]
+    >>> split_header_words([r'Basic realm="\"foo\bar\""'])
+    [[('Basic', None), ('realm', '"foobar"')]]
+
+    """
+    assert type(header_values) not in StringTypes
+    result = []
+    for text in header_values:
+        orig_text = text
+        pairs = []
+        while text:
+            m = HEADER_TOKEN_RE.search(text)
+            if m:
+                text = unmatched(m)
+                name = m.group(1)
+                m = HEADER_QUOTED_VALUE_RE.search(text)
+                if m:  # quoted value
+                    text = unmatched(m)
+                    value = m.group(1)
+                    value = HEADER_ESCAPE_RE.sub(r"\1", value)
+                else:
+                    m = HEADER_VALUE_RE.search(text)
+                    if m:  # unquoted value
+                        text = unmatched(m)
+                        value = m.group(1)
+                        value = value.rstrip()
+                    else:
+                        # no value, a lone token
+                        value = None
+                pairs.append((name, value))
+            elif text.lstrip().startswith(","):
+                # concatenated headers, as per RFC 2616 section 4.2
+                text = text.lstrip()[1:]
+                if pairs: result.append(pairs)
+                pairs = []
+            else:
+                # skip junk
+                non_junk, nr_junk_chars = re.subn("^[=\s;]*", "", text)
+                assert nr_junk_chars > 0, (
+                    "split_header_words bug: '%s', '%s', %s" %
+                    (orig_text, text, pairs))
+                text = non_junk
+        if pairs: result.append(pairs)
+    return result
+
+HEADER_JOIN_ESCAPE_RE = re.compile(r"([\"\\])")
+def join_header_words(lists):
+    """Do the inverse (almost) of the conversion done by split_header_words.
+
+    Takes a list of lists of (key, value) pairs and produces a single header
+    value.  Attribute values are quoted if needed.
+
+    >>> join_header_words([[("text/plain", None), ("charset", "iso-8859/1")]])
+    'text/plain; charset="iso-8859/1"'
+    >>> join_header_words([[("text/plain", None)], [("charset", "iso-8859/1")]])
+    'text/plain, charset="iso-8859/1"'
+
+    """
+    headers = []
+    for pairs in lists:
+        attr = []
+        for k, v in pairs:
+            if v is not None:
+                if not re.search(r"^\w+$", v):
+                    v = HEADER_JOIN_ESCAPE_RE.sub(r"\\\1", v)  # escape " and \
+                    v = '"%s"' % v
+                k = "%s=%s" % (k, v)
+            attr.append(k)
+        if attr: headers.append("; ".join(attr))
+    return ", ".join(headers)
+
+def parse_ns_headers(ns_headers):
+    """Ad-hoc parser for Netscape protocol cookie-attributes.
+
+    The old Netscape cookie format for Set-Cookie can for instance contain
+    an unquoted "," in the expires field, so we have to use this ad-hoc
+    parser instead of split_header_words.
+
+    XXX This may not make the best possible effort to parse all the crap
+    that Netscape Cookie headers contain.  Ronald Tschalar's HTTPClient
+    parser is probably better, so could do worse than following that if
+    this ever gives any trouble.
+
+    Currently, this is also used for parsing RFC 2109 cookies.
+
+    """
+    known_attrs = ("expires", "domain", "path", "secure",
+                   # RFC 2109 attrs (may turn up in Netscape cookies, too)
+                   "port", "max-age")
+
+    result = []
+    for ns_header in ns_headers:
+        pairs = []
+        version_set = False
+        for ii, param in enumerate(re.split(r";\s*", ns_header)):
+            param = param.rstrip()
+            if param == "": continue
+            if "=" not in param:
+                k, v = param, None
+            else:
+                k, v = re.split(r"\s*=\s*", param, 1)
+                k = k.lstrip()
+            if ii != 0:
+                lc = k.lower()
+                if lc in known_attrs:
+                    k = lc
+                if k == "version":
+                    # This is an RFC 2109 cookie.  Will be treated as RFC 2965
+                    # cookie in rest of code.
+                    # Probably it should be parsed with split_header_words, but
+                    # that's too much hassle.
+                    version_set = True
+                if k == "expires":
+                    # convert expires date to seconds since epoch
+                    if v.startswith('"'): v = v[1:]
+                    if v.endswith('"'): v = v[:-1]
+                    v = http2time(v)  # None if invalid
+            pairs.append((k, v))
+
+        if pairs:
+            if not version_set:
+                pairs.append(("version", "0"))
+            result.append(pairs)
+
+    return result
+
+
+IPV4_RE = re.compile(r"\.\d+$")
+def is_HDN(text):
+    """Return True if text is a host domain name."""
+    # XXX
+    # This may well be wrong.  Which RFC is HDN defined in, if any (for
+    #  the purposes of RFC 2965)?
+    # For the current implementation, what about IPv6?  Remember to look
+    #  at other uses of IPV4_RE also, if change this.
+    if IPV4_RE.search(text):
+        return False
+    if text == "":
+        return False
+    if text[0] == "." or text[-1] == ".":
+        return False
+    return True
+
+def domain_match(A, B):
+    """Return True if domain A domain-matches domain B, according to RFC 2965.
+
+    A and B may be host domain names or IP addresses.
+
+    RFC 2965, section 1:
+
+    Host names can be specified either as an IP address or a HDN string.
+    Sometimes we compare one host name with another.  (Such comparisons SHALL
+    be case-insensitive.)  Host A's name domain-matches host B's if
+
+         *  their host name strings string-compare equal; or
+
+         * A is a HDN string and has the form NB, where N is a non-empty
+            name string, B has the form .B', and B' is a HDN string.  (So,
+            x.y.com domain-matches .Y.com but not Y.com.)
+
+    Note that domain-match is not a commutative operation: a.b.c.com
+    domain-matches .c.com, but not the reverse.
+
+    """
+    # Note that, if A or B are IP addresses, the only relevant part of the
+    # definition of the domain-match algorithm is the direct string-compare.
+    A = A.lower()
+    B = B.lower()
+    if A == B:
+        return True
+    if not is_HDN(A):
+        return False
+    i = A.rfind(B)
+    if i == -1 or i == 0:
+        # A does not have form NB, or N is the empty string
+        return False
+    if not B.startswith("."):
+        return False
+    if not is_HDN(B[1:]):
+        return False
+    return True
+
+def liberal_is_HDN(text):
+    """Return True if text is a sort-of-like a host domain name.
+
+    For accepting/blocking domains.
+
+    """
+    if IPV4_RE.search(text):
+        return False
+    return True
+
+def user_domain_match(A, B):
+    """For blocking/accepting domains.
+
+    A and B may be host domain names or IP addresses.
+
+    """
+    A = A.lower()
+    B = B.lower()
+    if not (liberal_is_HDN(A) and liberal_is_HDN(B)):
+        if A == B:
+            # equal IP addresses
+            return True
+        return False
+    initial_dot = B.startswith(".")
+    if initial_dot and A.endswith(B):
+        return True
+    if not initial_dot and A == B:
+        return True
+    return False
+
+cut_port_re = re.compile(r":\d+$")
+def request_host(request):
+    """Return request-host, as defined by RFC 2965.
+
+    Variation from RFC: returned value is lowercased, for convenient
+    comparison.
+
+    """
+    url = request.get_full_url()
+    host = urlparse.urlparse(url)[1]
+    if host == "":
+        host = request.get_header("Host", "")
+
+    # remove port, if present
+    host = cut_port_re.sub("", host, 1)
+    return host.lower()
+
+def eff_request_host(request):
+    """Return a tuple (request-host, effective request-host name).
+
+    As defined by RFC 2965, except both are lowercased.
+
+    """
+    erhn = req_host = request_host(request)
+    if req_host.find(".") == -1 and not IPV4_RE.search(req_host):
+        erhn = req_host + ".local"
+    return req_host, erhn
+
+def request_path(request):
+    """request-URI, as defined by RFC 2965."""
+    url = request.get_full_url()
+    #scheme, netloc, path, parameters, query, frag = urlparse.urlparse(url)
+    #req_path = escape_path("".join(urlparse.urlparse(url)[2:]))
+    path, parameters, query, frag = urlparse.urlparse(url)[2:]
+    if parameters:
+        path = "%s;%s" % (path, parameters)
+    path = escape_path(path)
+    req_path = urlparse.urlunparse(("", "", path, "", query, frag))
+    if not req_path.startswith("/"):
+        # fix bad RFC 2396 absoluteURI
+        req_path = "/"+req_path
+    return req_path
+
+def request_port(request):
+    host = request.get_host()
+    i = host.find(':')
+    if i >= 0:
+        port = host[i+1:]
+        try:
+            int(port)
+        except ValueError:
+            debug("nonnumeric port: '%s'", port)
+            return None
+    else:
+        port = DEFAULT_HTTP_PORT
+    return port
+
+# Characters in addition to A-Z, a-z, 0-9, '_', '.', and '-' that don't
+# need to be escaped to form a valid HTTP URL (RFCs 2396 and 1738).
+HTTP_PATH_SAFE = "%/;:@&=+$,!~*'()"
+ESCAPED_CHAR_RE = re.compile(r"%([0-9a-fA-F][0-9a-fA-F])")
+def uppercase_escaped_char(match):
+    return "%%%s" % match.group(1).upper()
+def escape_path(path):
+    """Escape any invalid characters in HTTP URL, and uppercase all escapes."""
+    # There's no knowing what character encoding was used to create URLs
+    # containing %-escapes, but since we have to pick one to escape invalid
+    # path characters, we pick UTF-8, as recommended in the HTML 4.0
+    # specification:
+    # http://www.w3.org/TR/REC-html40/appendix/notes.html#h-B.2.1
+    # And here, kind of: draft-fielding-uri-rfc2396bis-03
+    # (And in draft IRI specification: draft-duerst-iri-05)
+    # (And here, for new URI schemes: RFC 2718)
+    if isinstance(path, unicode):
+        path = path.encode("utf-8")
+    path = urllib.quote(path, HTTP_PATH_SAFE)
+    path = ESCAPED_CHAR_RE.sub(uppercase_escaped_char, path)
+    return path
+
+def reach(h):
+    """Return reach of host h, as defined by RFC 2965, section 1.
+
+    The reach R of a host name H is defined as follows:
+
+       *  If
+
+          -  H is the host domain name of a host; and,
+
+          -  H has the form A.B; and
+
+          -  A has no embedded (that is, interior) dots; and
+
+          -  B has at least one embedded dot, or B is the string "local".
+             then the reach of H is .B.
+
+       *  Otherwise, the reach of H is H.
+
+    >>> reach("www.acme.com")
+    '.acme.com'
+    >>> reach("acme.com")
+    'acme.com'
+    >>> reach("acme.local")
+    '.local'
+
+    """
+    i = h.find(".")
+    if i >= 0:
+        #a = h[:i]  # this line is only here to show what a is
+        b = h[i+1:]
+        i = b.find(".")
+        if is_HDN(h) and (i >= 0 or b == "local"):
+            return "."+b
+    return h
+
+def is_third_party(request):
+    """
+
+    RFC 2965, section 3.3.6:
+
+        An unverifiable transaction is to a third-party host if its request-
+        host U does not domain-match the reach R of the request-host O in the
+        origin transaction.
+
+    """
+    req_host = request_host(request)
+    if not domain_match(req_host, reach(request.get_origin_req_host())):
+        return True
+    else:
+        return False
+
+
+class Cookie:
+    """HTTP Cookie.
+
+    This class represents both Netscape and RFC 2965 cookies.
+
+    This is deliberately a very simple class.  It just holds attributes.  It's
+    possible to construct Cookie instances that don't comply with the cookie
+    standards.  CookieJar.make_cookies is the factory function for Cookie
+    objects -- it deals with cookie parsing, supplying defaults, and
+    normalising to the representation used in this class.  CookiePolicy is
+    responsible for checking them to see whether they should be accepted from
+    and returned to the server.
+
+    Note that the port may be present in the headers, but unspecified ("Port"
+    rather than"Port=80", for example); if this is the case, port is None.
+
+    """
+
+    def __init__(self, version, name, value,
+                 port, port_specified,
+                 domain, domain_specified, domain_initial_dot,
+                 path, path_specified,
+                 secure,
+                 expires,
+                 discard,
+                 comment,
+                 comment_url,
+                 rest):
+
+        if version is not None: version = int(version)
+        if expires is not None: expires = int(expires)
+        if port is None and port_specified is True:
+            raise ValueError("if port is None, port_specified must be false")
+
+        self.version = version
+        self.name = name
+        self.value = value
+        self.port = port
+        self.port_specified = port_specified
+        # normalise case, as per RFC 2965 section 3.3.3
+        self.domain = domain.lower()
+        self.domain_specified = domain_specified
+        # Sigh.  We need to know whether the domain given in the
+        # cookie-attribute had an initial dot, in order to follow RFC 2965
+        # (as clarified in draft errata).  Needed for the returned $Domain
+        # value.
+        self.domain_initial_dot = domain_initial_dot
+        self.path = path
+        self.path_specified = path_specified
+        self.secure = secure
+        self.expires = expires
+        self.discard = discard
+        self.comment = comment
+        self.comment_url = comment_url
+
+        self._rest = copy.copy(rest)
+
+    def has_nonstandard_attr(self, name):
+        return name in self._rest
+    def get_nonstandard_attr(self, name, default=None):
+        return self._rest.get(name, default)
+    def set_nonstandard_attr(self, name, value):
+        self._rest[name] = value
+
+    def is_expired(self, now=None):
+        if now is None: now = time.time()
+        if (self.expires is not None) and (self.expires <= now):
+            return True
+        return False
+
+    def __str__(self):
+        if self.port is None: p = ""
+        else: p = ":"+self.port
+        limit = self.domain + p + self.path
+        if self.value is not None:
+            namevalue = "%s=%s" % (self.name, self.value)
+        else:
+            namevalue = self.name
+        return "<Cookie %s for %s>" % (namevalue, limit)
+
+    def __repr__(self):
+        args = []
+        for name in ["version", "name", "value",
+                     "port", "port_specified",
+                     "domain", "domain_specified", "domain_initial_dot",
+                     "path", "path_specified",
+                     "secure", "expires", "discard", "comment", "comment_url",
+                     ]:
+            attr = getattr(self, name)
+            args.append("%s=%s" % (name, repr(attr)))
+        args.append("rest=%s" % repr(self._rest))
+        return "Cookie(%s)" % ", ".join(args)
+
+
+class CookiePolicy:
+    """Defines which cookies get accepted from and returned to server.
+
+    May also modify cookies, though this is probably a bad idea.
+
+    The subclass DefaultCookiePolicy defines the standard rules for Netscape
+    and RFC 2965 cookies -- override that if you want a customised policy.
+
+    """
+    def set_ok(self, cookie, request):
+        """Return true if (and only if) cookie should be accepted from server.
+
+        Currently, pre-expired cookies never get this far -- the CookieJar
+        class deletes such cookies itself.
+
+        """
+        raise NotImplementedError()
+
+    def return_ok(self, cookie, request):
+        """Return true if (and only if) cookie should be returned to server."""
+        raise NotImplementedError()
+
+    def domain_return_ok(self, domain, request):
+        """Return false if cookies should not be returned, given cookie domain.
+        """
+        return True
+
+    def path_return_ok(self, path, request):
+        """Return false if cookies should not be returned, given cookie path.
+        """
+        return True
+
+
+class DefaultCookiePolicy(CookiePolicy):
+    """Implements the standard rules for accepting and returning cookies."""
+
+    DomainStrictNoDots = 1
+    DomainStrictNonDomain = 2
+    DomainRFC2965Match = 4
+
+    DomainLiberal = 0
+    DomainStrict = DomainStrictNoDots|DomainStrictNonDomain
+
+    def __init__(self,
+                 blocked_domains=None, allowed_domains=None,
+                 netscape=True, rfc2965=False,
+                 hide_cookie2=False,
+                 strict_domain=False,
+                 strict_rfc2965_unverifiable=True,
+                 strict_ns_unverifiable=False,
+                 strict_ns_domain=DomainLiberal,
+                 strict_ns_set_initial_dollar=False,
+                 strict_ns_set_path=False,
+                 ):
+        """Constructor arguments should be passed as keyword arguments only."""
+        self.netscape = netscape
+        self.rfc2965 = rfc2965
+        self.hide_cookie2 = hide_cookie2
+        self.strict_domain = strict_domain
+        self.strict_rfc2965_unverifiable = strict_rfc2965_unverifiable
+        self.strict_ns_unverifiable = strict_ns_unverifiable
+        self.strict_ns_domain = strict_ns_domain
+        self.strict_ns_set_initial_dollar = strict_ns_set_initial_dollar
+        self.strict_ns_set_path = strict_ns_set_path
+
+        if blocked_domains is not None:
+            self._blocked_domains = tuple(blocked_domains)
+        else:
+            self._blocked_domains = ()
+
+        if allowed_domains is not None:
+            allowed_domains = tuple(allowed_domains)
+        self._allowed_domains = allowed_domains
+
+    def blocked_domains(self):
+        """Return the sequence of blocked domains (as a tuple)."""
+        return self._blocked_domains
+    def set_blocked_domains(self, blocked_domains):
+        """Set the sequence of blocked domains."""
+        self._blocked_domains = tuple(blocked_domains)
+
+    def is_blocked(self, domain):
+        for blocked_domain in self._blocked_domains:
+            if user_domain_match(domain, blocked_domain):
+                return True
+        return False
+
+    def allowed_domains(self):
+        """Return None, or the sequence of allowed domains (as a tuple)."""
+        return self._allowed_domains
+    def set_allowed_domains(self, allowed_domains):
+        """Set the sequence of allowed domains, or None."""
+        if allowed_domains is not None:
+            allowed_domains = tuple(allowed_domains)
+        self._allowed_domains = allowed_domains
+
+    def is_not_allowed(self, domain):
+        if self._allowed_domains is None:
+            return False
+        for allowed_domain in self._allowed_domains:
+            if user_domain_match(domain, allowed_domain):
+                return False
+        return True
+
+    def set_ok(self, cookie, request):
+        """
+        If you override .set_ok(), be sure to call this method.  If it returns
+        false, so should your subclass (assuming your subclass wants to be more
+        strict about which cookies to accept).
+
+        """
+        debug(" - checking cookie %s=%s", cookie.name, cookie.value)
+
+        assert cookie.name is not None
+
+        for n in "version", "verifiability", "name", "path", "domain", "port":
+            fn_name = "set_ok_"+n
+            fn = getattr(self, fn_name)
+            if not fn(cookie, request):
+                return False
+
+        return True
+
+    def set_ok_version(self, cookie, request):
+        if cookie.version is None:
+            # Version is always set to 0 by parse_ns_headers if it's a Netscape
+            # cookie, so this must be an invalid RFC 2965 cookie.
+            debug("   Set-Cookie2 without version attribute (%s=%s)",
+                  cookie.name, cookie.value)
+            return False
+        if cookie.version > 0 and not self.rfc2965:
+            debug("   RFC 2965 cookies are switched off")
+            return False
+        elif cookie.version == 0 and not self.netscape:
+            debug("   Netscape cookies are switched off")
+            return False
+        return True
+
+    def set_ok_verifiability(self, cookie, request):
+        if request.is_unverifiable() and is_third_party(request):
+            if cookie.version > 0 and self.strict_rfc2965_unverifiable:
+                debug("   third-party RFC 2965 cookie during "
+                             "unverifiable transaction")
+                return False
+            elif cookie.version == 0 and self.strict_ns_unverifiable:
+                debug("   third-party Netscape cookie during "
+                             "unverifiable transaction")
+                return False
+        return True
+
+    def set_ok_name(self, cookie, request):
+        # Try and stop servers setting V0 cookies designed to hack other
+        # servers that know both V0 and V1 protocols.
+        if (cookie.version == 0 and self.strict_ns_set_initial_dollar and
+            cookie.name.startswith("$")):
+            debug("   illegal name (starts with '$'): '%s'", cookie.name)
+            return False
+        return True
+
+    def set_ok_path(self, cookie, request):
+        if cookie.path_specified:
+            req_path = request_path(request)
+            if ((cookie.version > 0 or
+                 (cookie.version == 0 and self.strict_ns_set_path)) and
+                not req_path.startswith(cookie.path)):
+                debug("   path attribute %s is not a prefix of request "
+                      "path %s", cookie.path, req_path)
+                return False
+        return True
+
+    def set_ok_domain(self, cookie, request):
+        if self.is_blocked(cookie.domain):
+            debug("   domain %s is in user block-list", cookie.domain)
+            return False
+        if self.is_not_allowed(cookie.domain):
+            debug("   domain %s is not in user allow-list", cookie.domain)
+            return False
+        if cookie.domain_specified:
+            req_host, erhn = eff_request_host(request)
+            domain = cookie.domain
+            if self.strict_domain and (domain.count(".") >= 2):
+                i = domain.rfind(".")
+                j = domain.rfind(".", 0, i)
+                if j == 0:  # domain like .foo.bar
+                    tld = domain[i+1:]
+                    sld = domain[j+1:i]
+                    if (sld.lower() in [
+                        "co", "ac",
+                        "com", "edu", "org", "net", "gov", "mil", "int"] and
+                        len(tld) == 2):
+                        # domain like .co.uk
+                        debug("   country-code second level domain %s", domain)
+                        return False
+            if domain.startswith("."):
+                undotted_domain = domain[1:]
+            else:
+                undotted_domain = domain
+            embedded_dots = (undotted_domain.find(".") >= 0)
+            if not embedded_dots and domain != ".local":
+                debug("   non-local domain %s contains no embedded dot",
+                      domain)
+                return False
+            if cookie.version == 0:
+                if (not erhn.endswith(domain) and
+                    (not erhn.startswith(".") and
+                     not ("."+erhn).endswith(domain))):
+                    debug("   effective request-host %s (even with added "
+                          "initial dot) does not end end with %s",
+                          erhn, domain)
+                    return False
+            if (cookie.version > 0 or
+                (self.strict_ns_domain & self.DomainRFC2965Match)):
+                if not domain_match(erhn, domain):
+                    debug("   effective request-host %s does not domain-match "
+                          "%s", erhn, domain)
+                    return False
+            if (cookie.version > 0 or
+                (self.strict_ns_domain & self.DomainStrictNoDots)):
+                host_prefix = req_host[:-len(domain)]
+                if (host_prefix.find(".") >= 0 and
+                    not IPV4_RE.search(req_host)):
+                    debug("   host prefix %s for domain %s contains a dot",
+                          host_prefix, domain)
+                    return False
+        return True
+
+    def set_ok_port(self, cookie, request):
+        if cookie.port_specified:
+            req_port = request_port(request)
+            if req_port is None:
+                req_port = "80"
+            else:
+                req_port = str(req_port)
+            for p in cookie.port.split(","):
+                try:
+                    int(p)
+                except ValueError:
+                    debug("   bad port %s (not numeric)", p)
+                    return False
+                if p == req_port:
+                    break
+            else:
+                debug("   request port (%s) not found in %s",
+                      req_port, cookie.port)
+                return False
+        return True
+
+    def return_ok(self, cookie, request):
+        """
+        If you override .return_ok(), be sure to call this method.  If it
+        returns false, so should your subclass (assuming your subclass wants to
+        be more strict about which cookies to return).
+
+        """
+        # Path has already been checked by .path_return_ok(), and domain
+        # blocking done by .domain_return_ok().
+        debug(" - checking cookie %s=%s", cookie.name, cookie.value)
+
+        for n in "version", "verifiability", "secure", "expires", "port", "domain":
+            fn_name = "return_ok_"+n
+            fn = getattr(self, fn_name)
+            if not fn(cookie, request):
+                return False
+        return True
+
+    def return_ok_version(self, cookie, request):
+        if cookie.version > 0 and not self.rfc2965:
+            debug("   RFC 2965 cookies are switched off")
+            return False
+        elif cookie.version == 0 and not self.netscape:
+            debug("   Netscape cookies are switched off")
+            return False
+        return True
+
+    def return_ok_verifiability(self, cookie, request):
+        if request.is_unverifiable() and is_third_party(request):
+            if cookie.version > 0 and self.strict_rfc2965_unverifiable:
+                debug("   third-party RFC 2965 cookie during unverifiable "
+                      "transaction")
+                return False
+            elif cookie.version == 0 and self.strict_ns_unverifiable:
+                debug("   third-party Netscape cookie during unverifiable "
+                      "transaction")
+                return False
+        return True
+
+    def return_ok_secure(self, cookie, request):
+        if cookie.secure and request.get_type() != "https":
+            debug("   secure cookie with non-secure request")
+            return False
+        return True
+
+    def return_ok_expires(self, cookie, request):
+        if cookie.is_expired(self._now):
+            debug("   cookie expired")
+            return False
+        return True
+
+    def return_ok_port(self, cookie, request):
+        if cookie.port:
+            req_port = request_port(request)
+            if req_port is None:
+                req_port = "80"
+            for p in cookie.port.split(","):
+                if p == req_port:
+                    break
+            else:
+                debug("   request port %s does not match cookie port %s",
+                      req_port, cookie.port)
+                return False
+        return True
+
+    def return_ok_domain(self, cookie, request):
+        req_host, erhn = eff_request_host(request)
+        domain = cookie.domain
+
+        # strict check of non-domain cookies: Mozilla does this, MSIE5 doesn't
+        if (cookie.version == 0 and
+            (self.strict_ns_domain & self.DomainStrictNonDomain) and
+            not cookie.domain_specified and domain != erhn):
+            debug("   cookie with unspecified domain does not string-compare "
+                  "equal to request domain")
+            return False
+
+        if cookie.version > 0 and not domain_match(erhn, domain):
+            debug("   effective request-host name %s does not domain-match "
+                  "RFC 2965 cookie domain %s", erhn, domain)
+            return False
+        if cookie.version == 0 and not ("."+erhn).endswith(domain):
+            debug("   request-host %s does not match Netscape cookie domain "
+                  "%s", req_host, domain)
+            return False
+        return True
+
+    def domain_return_ok(self, domain, request):
+        # Liberal check of.  This is here as an optimization to avoid
+        # having to load lots of MSIE cookie files unless necessary.
+        req_host, erhn = eff_request_host(request)
+        if not req_host.startswith("."):
+            req_host = "."+req_host
+        if not erhn.startswith("."):
+            erhn = "."+erhn
+        if not (req_host.endswith(domain) or erhn.endswith(domain)):
+            #debug("   request domain %s does not match cookie domain %s",
+            #      req_host, domain)
+            return False
+
+        if self.is_blocked(domain):
+            debug("   domain %s is in user block-list", domain)
+            return False
+        if self.is_not_allowed(domain):
+            debug("   domain %s is not in user allow-list", domain)
+            return False
+
+        return True
+
+    def path_return_ok(self, path, request):
+        debug("- checking cookie path=%s", path)
+        req_path = request_path(request)
+        if not req_path.startswith(path):
+            debug("  %s does not path-match %s", req_path, path)
+            return False
+        return True
+
+
+def vals_sorted_by_key(adict):
+    keys = adict.keys()
+    keys.sort()
+    return map(adict.get, keys)
+
+def deepvalues(mapping):
+    """Iterates over nested mapping, depth-first, in sorted order by key."""
+    values = vals_sorted_by_key(mapping)
+    for obj in values:
+        mapping = False
+        try:
+            obj.items
+        except AttributeError:
+            pass
+        else:
+            mapping = True
+            for subobj in deepvalues(obj):
+                yield subobj
+        if not mapping:
+            yield obj
+
+
+# Used as second parameter to dict.get() method, to distinguish absent
+# dict key from one with a None value.
+class Absent: pass
+
+class CookieJar:
+    """Collection of HTTP cookies.
+
+    You may not need to know about this class: try
+    urllib2.build_opener(HTTPCookieProcessor).open(url).
+
+    """
+
+    non_word_re = re.compile(r"\W")
+    quote_re = re.compile(r"([\"\\])")
+    strict_domain_re = re.compile(r"\.?[^.]*")
+    domain_re = re.compile(r"[^.]*")
+    dots_re = re.compile(r"^\.+")
+
+    magic_re = r"^\#LWP-Cookies-(\d+\.\d+)"
+
+    def __init__(self, policy=None):
+        if policy is None:
+            policy = DefaultCookiePolicy()
+        self._policy = policy
+
+        self._cookies_lock = _threading.RLock()
+        self._cookies = {}
+
+    def set_policy(self, policy):
+        self._policy = policy
+
+    def _cookies_for_domain(self, domain, request):
+        cookies = []
+        if not self._policy.domain_return_ok(domain, request):
+            return []
+        debug("Checking %s for cookies to return", domain)
+        cookies_by_path = self._cookies[domain]
+        for path in cookies_by_path.keys():
+            if not self._policy.path_return_ok(path, request):
+                continue
+            cookies_by_name = cookies_by_path[path]
+            for cookie in cookies_by_name.values():
+                if not self._policy.return_ok(cookie, request):
+                    debug("   not returning cookie")
+                    continue
+                debug("   it's a match")
+                cookies.append(cookie)
+        return cookies
+
+    def _cookies_for_request(self, request):
+        """Return a list of cookies to be returned to server."""
+        cookies = []
+        for domain in self._cookies.keys():
+            cookies.extend(self._cookies_for_domain(domain, request))
+        return cookies
+
+    def _cookie_attrs(self, cookies):
+        """Return a list of cookie-attributes to be returned to server.
+
+        like ['foo="bar"; $Path="/"', ...]
+
+        The $Version attribute is also added when appropriate (currently only
+        once per request).
+
+        """
+        # add cookies in order of most specific (ie. longest) path first
+        def decreasing_size(a, b): return cmp(len(b.path), len(a.path))
+        cookies.sort(decreasing_size)
+
+        version_set = False
+
+        attrs = []
+        for cookie in cookies:
+            # set version of Cookie header
+            # XXX
+            # What should it be if multiple matching Set-Cookie headers have
+            #  different versions themselves?
+            # Answer: there is no answer; was supposed to be settled by
+            #  RFC 2965 errata, but that may never appear...
+            version = cookie.version
+            if not version_set:
+                version_set = True
+                if version > 0:
+                    attrs.append("$Version=%s" % version)
+
+            # quote cookie value if necessary
+            # (not for Netscape protocol, which already has any quotes
+            #  intact, due to the poorly-specified Netscape Cookie: syntax)
+            if ((cookie.value is not None) and
+                self.non_word_re.search(cookie.value) and version > 0):
+                value = self.quote_re.sub(r"\\\1", cookie.value)
+            else:
+                value = cookie.value
+
+            # add cookie-attributes to be returned in Cookie header
+            if cookie.value is None:
+                attrs.append(cookie.name)
+            else:
+                attrs.append("%s=%s" % (cookie.name, value))
+            if version > 0:
+                if cookie.path_specified:
+                    attrs.append('$Path="%s"' % cookie.path)
+                if cookie.domain.startswith("."):
+                    domain = cookie.domain
+                    if (not cookie.domain_initial_dot and
+                        domain.startswith(".")):
+                        domain = domain[1:]
+                    attrs.append('$Domain="%s"' % domain)
+                if cookie.port is not None:
+                    p = "$Port"
+                    if cookie.port_specified:
+                        p = p + ('="%s"' % cookie.port)
+                    attrs.append(p)
+
+        return attrs
+
+    def add_cookie_header(self, request):
+        """Add correct Cookie: header to request (urllib2.Request object).
+
+        The Cookie2 header is also added unless policy.hide_cookie2 is true.
+
+        """
+        debug("add_cookie_header")
+        self._cookies_lock.acquire()
+
+        self._policy._now = self._now = int(time.time())
+
+        req_host, erhn = eff_request_host(request)
+        strict_non_domain = (
+            self._policy.strict_ns_domain & self._policy.DomainStrictNonDomain)
+
+        cookies = self._cookies_for_request(request)
+
+        attrs = self._cookie_attrs(cookies)
+        if attrs:
+            if not request.has_header("Cookie"):
+                request.add_unredirected_header(
+                    "Cookie", "; ".join(attrs))
+
+        # if necessary, advertise that we know RFC 2965
+        if (self._policy.rfc2965 and not self._policy.hide_cookie2 and
+            not request.has_header("Cookie2")):
+            for cookie in cookies:
+                if cookie.version != 1:
+                    request.add_unredirected_header("Cookie2", '$Version="1"')
+                    break
+
+        self._cookies_lock.release()
+
+        self.clear_expired_cookies()
+
+    def _normalized_cookie_tuples(self, attrs_set):
+        """Return list of tuples containing normalised cookie information.
+
+        attrs_set is the list of lists of key,value pairs extracted from
+        the Set-Cookie or Set-Cookie2 headers.
+
+        Tuples are name, value, standard, rest, where name and value are the
+        cookie name and value, standard is a dictionary containing the standard
+        cookie-attributes (discard, secure, version, expires or max-age,
+        domain, path and port) and rest is a dictionary containing the rest of
+        the cookie-attributes.
+
+        """
+        cookie_tuples = []
+
+        boolean_attrs = "discard", "secure"
+        value_attrs = ("version",
+                       "expires", "max-age",
+                       "domain", "path", "port",
+                       "comment", "commenturl")
+
+        for cookie_attrs in attrs_set:
+            name, value = cookie_attrs[0]
+
+            # Build dictionary of standard cookie-attributes (standard) and
+            # dictionary of other cookie-attributes (rest).
+
+            # Note: expiry time is normalised to seconds since epoch.  V0
+            # cookies should have the Expires cookie-attribute, and V1 cookies
+            # should have Max-Age, but since V1 includes RFC 2109 cookies (and
+            # since V0 cookies may be a mish-mash of Netscape and RFC 2109), we
+            # accept either (but prefer Max-Age).
+            max_age_set = False
+
+            bad_cookie = False
+
+            standard = {}
+            rest = {}
+            for k, v in cookie_attrs[1:]:
+                lc = k.lower()
+                # don't lose case distinction for unknown fields
+                if lc in value_attrs or lc in boolean_attrs:
+                    k = lc
+                if k in boolean_attrs and v is None:
+                    # boolean cookie-attribute is present, but has no value
+                    # (like "discard", rather than "port=80")
+                    v = True
+                if k in standard:
+                    # only first value is significant
+                    continue
+                if k == "domain":
+                    if v is None:
+                        debug("   missing value for domain attribute")
+                        bad_cookie = True
+                        break
+                    # RFC 2965 section 3.3.3
+                    v = v.lower()
+                if k == "expires":
+                    if max_age_set:
+                        # Prefer max-age to expires (like Mozilla)
+                        continue
+                    if v is None:
+                        debug("   missing or invalid value for expires "
+                              "attribute: treating as session cookie")
+                        continue
+                if k == "max-age":
+                    max_age_set = True
+                    try:
+                        v = int(v)
+                    except ValueError:
+                        debug("   missing or invalid (non-numeric) value for "
+                              "max-age attribute")
+                        bad_cookie = True
+                        break
+                    # convert RFC 2965 Max-Age to seconds since epoch
+                    # XXX Strictly you're supposed to follow RFC 2616
+                    #   age-calculation rules.  Remember that zero Max-Age is a
+                    #   is a request to discard (old and new) cookie, though.
+                    k = "expires"
+                    v = self._now + v
+                if (k in value_attrs) or (k in boolean_attrs):
+                    if (v is None and
+                        k not in ["port", "comment", "commenturl"]):
+                        debug("   missing value for %s attribute" % k)
+                        bad_cookie = True
+                        break
+                    standard[k] = v
+                else:
+                    rest[k] = v
+
+            if bad_cookie:
+                continue
+
+            cookie_tuples.append((name, value, standard, rest))
+
+        return cookie_tuples
+
+    def _cookie_from_cookie_tuple(self, tup, request):
+        # standard is dict of standard cookie-attributes, rest is dict of the
+        # rest of them
+        name, value, standard, rest = tup
+
+        domain = standard.get("domain", Absent)
+        path = standard.get("path", Absent)
+        port = standard.get("port", Absent)
+        expires = standard.get("expires", Absent)
+
+        # set the easy defaults
+        version = standard.get("version", None)
+        if version is not None: version = int(version)
+        secure = standard.get("secure", False)
+        # (discard is also set if expires is Absent)
+        discard = standard.get("discard", False)
+        comment = standard.get("comment", None)
+        comment_url = standard.get("commenturl", None)
+
+        # set default path
+        if path is not Absent and path != "":
+            path_specified = True
+            path = escape_path(path)
+        else:
+            path_specified = False
+            path = request_path(request)
+            i = path.rfind("/")
+            if i != -1:
+                if version == 0:
+                    # Netscape spec parts company from reality here
+                    path = path[:i]
+                else:
+                    path = path[:i+1]
+            if len(path) == 0: path = "/"
+
+        # set default domain
+        domain_specified = domain is not Absent
+        # but first we have to remember whether it starts with a dot
+        domain_initial_dot = False
+        if domain_specified:
+            domain_initial_dot = bool(domain.startswith("."))
+        if domain is Absent:
+            req_host, erhn = eff_request_host(request)
+            domain = erhn
+        elif not domain.startswith("."):
+            domain = "."+domain
+
+        # set default port
+        port_specified = False
+        if port is not Absent:
+            if port is None:
+                # Port attr present, but has no value: default to request port.
+                # Cookie should then only be sent back on that port.
+                port = request_port(request)
+            else:
+                port_specified = True
+                port = re.sub(r"\s+", "", port)
+        else:
+            # No port attr present.  Cookie can be sent back on any port.
+            port = None
+
+        # set default expires and discard
+        if expires is Absent:
+            expires = None
+            discard = True
+        elif expires <= self._now:
+            # Expiry date in past is request to delete cookie.  This can't be
+            # in DefaultCookiePolicy, because can't delete cookies there.
+            try:
+                self.clear(domain, path, name)
+            except KeyError:
+                pass
+            debug("Expiring cookie, domain='%s', path='%s', name='%s'",
+                  domain, path, name)
+            return None
+
+        return Cookie(version,
+                      name, value,
+                      port, port_specified,
+                      domain, domain_specified, domain_initial_dot,
+                      path, path_specified,
+                      secure,
+                      expires,
+                      discard,
+                      comment,
+                      comment_url,
+                      rest)
+
+    def _cookies_from_attrs_set(self, attrs_set, request):
+        cookie_tuples = self._normalized_cookie_tuples(attrs_set)
+
+        cookies = []
+        for tup in cookie_tuples:
+            cookie = self._cookie_from_cookie_tuple(tup, request)
+            if cookie: cookies.append(cookie)
+        return cookies
+
+    def make_cookies(self, response, request):
+        """Return sequence of Cookie objects extracted from response object."""
+        # get cookie-attributes for RFC 2965 and Netscape protocols
+        headers = response.info()
+        rfc2965_hdrs = headers.getheaders("Set-Cookie2")
+        ns_hdrs = headers.getheaders("Set-Cookie")
+
+        rfc2965 = self._policy.rfc2965
+        netscape = self._policy.netscape
+
+        if ((not rfc2965_hdrs and not ns_hdrs) or
+            (not ns_hdrs and not rfc2965) or
+            (not rfc2965_hdrs and not netscape) or
+            (not netscape and not rfc2965)):
+            return []  # no relevant cookie headers: quick exit
+
+        try:
+            cookies = self._cookies_from_attrs_set(
+                split_header_words(rfc2965_hdrs), request)
+        except:
+            reraise_unmasked_exceptions()
+            cookies = []
+
+        if ns_hdrs and netscape:
+            try:
+                ns_cookies = self._cookies_from_attrs_set(
+                    parse_ns_headers(ns_hdrs), request)
+            except:
+                reraise_unmasked_exceptions()
+                ns_cookies = []
+
+            # Look for Netscape cookies (from Set-Cookie headers) that match
+            # corresponding RFC 2965 cookies (from Set-Cookie2 headers).
+            # For each match, keep the RFC 2965 cookie and ignore the Netscape
+            # cookie (RFC 2965 section 9.1).  Actually, RFC 2109 cookies are
+            # bundled in with the Netscape cookies for this purpose, which is
+            # reasonable behaviour.
+            if rfc2965:
+                lookup = {}
+                for cookie in cookies:
+                    lookup[(cookie.domain, cookie.path, cookie.name)] = None
+
+                def no_matching_rfc2965(ns_cookie, lookup=lookup):
+                    key = ns_cookie.domain, ns_cookie.path, ns_cookie.name
+                    return key not in lookup
+                ns_cookies = filter(no_matching_rfc2965, ns_cookies)
+
+            if ns_cookies:
+                cookies.extend(ns_cookies)
+
+        return cookies
+
+    def set_cookie_if_ok(self, cookie, request):
+        """Set a cookie if policy says it's OK to do so."""
+        self._cookies_lock.acquire()
+        self._policy._now = self._now = int(time.time())
+
+        if self._policy.set_ok(cookie, request):
+            self.set_cookie(cookie)
+
+        self._cookies_lock.release()
+
+    def set_cookie(self, cookie):
+        """Set a cookie, without checking whether or not it should be set."""
+        c = self._cookies
+        self._cookies_lock.acquire()
+        try:
+            if cookie.domain not in c: c[cookie.domain] = {}
+            c2 = c[cookie.domain]
+            if cookie.path not in c2: c2[cookie.path] = {}
+            c3 = c2[cookie.path]
+            c3[cookie.name] = cookie
+        finally:
+            self._cookies_lock.release()
+
+    def extract_cookies(self, response, request):
+        """Extract cookies from response, where allowable given the request."""
+        debug("extract_cookies: %s", response.info())
+        self._cookies_lock.acquire()
+        self._policy._now = self._now = int(time.time())
+
+        for cookie in self.make_cookies(response, request):
+            if self._policy.set_ok(cookie, request):
+                debug(" setting cookie: %s", cookie)
+                self.set_cookie(cookie)
+        self._cookies_lock.release()
+
+    def clear(self, domain=None, path=None, name=None):
+        """Clear some cookies.
+
+        Invoking this method without arguments will clear all cookies.  If
+        given a single argument, only cookies belonging to that domain will be
+        removed.  If given two arguments, cookies belonging to the specified
+        path within that domain are removed.  If given three arguments, then
+        the cookie with the specified name, path and domain is removed.
+
+        Raises KeyError if no matching cookie exists.
+
+        """
+        if name is not None:
+            if (domain is None) or (path is None):
+                raise ValueError(
+                    "domain and path must be given to remove a cookie by name")
+            del self._cookies[domain][path][name]
+        elif path is not None:
+            if domain is None:
+                raise ValueError(
+                    "domain must be given to remove cookies by path")
+            del self._cookies[domain][path]
+        elif domain is not None:
+            del self._cookies[domain]
+        else:
+            self._cookies = {}
+
+    def clear_session_cookies(self):
+        """Discard all session cookies.
+
+        Note that the .save() method won't save session cookies anyway, unless
+        you ask otherwise by passing a true ignore_discard argument.
+
+        """
+        self._cookies_lock.acquire()
+        for cookie in self:
+            if cookie.discard:
+                self.clear(cookie.domain, cookie.path, cookie.name)
+        self._cookies_lock.release()
+
+    def clear_expired_cookies(self):
+        """Discard all expired cookies.
+
+        You probably don't need to call this method: expired cookies are never
+        sent back to the server (provided you're using DefaultCookiePolicy),
+        this method is called by CookieJar itself every so often, and the
+        .save() method won't save expired cookies anyway (unless you ask
+        otherwise by passing a true ignore_expires argument).
+
+        """
+        self._cookies_lock.acquire()
+        now = time.time()
+        for cookie in self:
+            if cookie.is_expired(now):
+                self.clear(cookie.domain, cookie.path, cookie.name)
+        self._cookies_lock.release()
+
+    def __iter__(self):
+        return deepvalues(self._cookies)
+
+    def __len__(self):
+        """Return number of contained cookies."""
+        i = 0
+        for cookie in self: i = i + 1
+        return i
+
+    def __repr__(self):
+        r = []
+        for cookie in self: r.append(repr(cookie))
+        return "<%s[%s]>" % (self.__class__, ", ".join(r))
+
+    def __str__(self):
+        r = []
+        for cookie in self: r.append(str(cookie))
+        return "<%s[%s]>" % (self.__class__, ", ".join(r))
+
+
+class LoadError(Exception): pass
+
+class FileCookieJar(CookieJar):
+    """CookieJar that can be loaded from and saved to a file."""
+
+    def __init__(self, filename=None, delayload=False, policy=None):
+        """
+        Cookies are NOT loaded from the named file until either the .load() or
+        .revert() method is called.
+
+        """
+        CookieJar.__init__(self, policy)
+        if filename is not None:
+            try:
+                filename+""
+            except:
+                raise ValueError("filename must be string-like")
+        self.filename = filename
+        self.delayload = bool(delayload)
+
+    def save(self, filename=None, ignore_discard=False, ignore_expires=False):
+        """Save cookies to a file."""
+        raise NotImplementedError()
+
+    def load(self, filename=None, ignore_discard=False, ignore_expires=False):
+        """Load cookies from a file."""
+        if filename is None:
+            if self.filename is not None: filename = self.filename
+            else: raise ValueError(MISSING_FILENAME_TEXT)
+
+        f = open(filename)
+        try:
+            self._really_load(f, filename, ignore_discard, ignore_expires)
+        finally:
+            f.close()
+
+    def revert(self, filename=None,
+               ignore_discard=False, ignore_expires=False):
+        """Clear all cookies and reload cookies from a saved file.
+
+        Raises LoadError (or IOError) if reversion is not successful; the
+        object's state will not be altered if this happens.
+
+        """
+        if filename is None:
+            if self.filename is not None: filename = self.filename
+            else: raise ValueError(MISSING_FILENAME_TEXT)
+
+        self._cookies_lock.acquire()
+
+        old_state = copy.deepcopy(self._cookies)
+        self._cookies = {}
+        try:
+            self.load(filename, ignore_discard, ignore_expires)
+        except (LoadError, IOError):
+            self._cookies = old_state
+            raise
+
+        self._cookies_lock.release()
+
+from _LWPCookieJar import LWPCookieJar, lwp_cookie_str
+from _MozillaCookieJar import MozillaCookieJar
diff --git a/depot_tools/release/win/python_24/Lib/copy.py b/depot_tools/release/win/python_24/Lib/copy.py
new file mode 100644
index 0000000..45fc32d2f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/copy.py
@@ -0,0 +1,428 @@
+"""Generic (shallow and deep) copying operations.
+
+Interface summary:
+
+        import copy
+
+        x = copy.copy(y)        # make a shallow copy of y
+        x = copy.deepcopy(y)    # make a deep copy of y
+
+For module specific errors, copy.Error is raised.
+
+The difference between shallow and deep copying is only relevant for
+compound objects (objects that contain other objects, like lists or
+class instances).
+
+- A shallow copy constructs a new compound object and then (to the
+  extent possible) inserts *the same objects* into in that the
+  original contains.
+
+- A deep copy constructs a new compound object and then, recursively,
+  inserts *copies* into it of the objects found in the original.
+
+Two problems often exist with deep copy operations that don't exist
+with shallow copy operations:
+
+ a) recursive objects (compound objects that, directly or indirectly,
+    contain a reference to themselves) may cause a recursive loop
+
+ b) because deep copy copies *everything* it may copy too much, e.g.
+    administrative data structures that should be shared even between
+    copies
+
+Python's deep copy operation avoids these problems by:
+
+ a) keeping a table of objects already copied during the current
+    copying pass
+
+ b) letting user-defined classes override the copying operation or the
+    set of components copied
+
+This version does not copy types like module, class, function, method,
+nor stack trace, stack frame, nor file, socket, window, nor array, nor
+any similar types.
+
+Classes can use the same interfaces to control copying that they use
+to control pickling: they can define methods called __getinitargs__(),
+__getstate__() and __setstate__().  See the documentation for module
+"pickle" for information on these methods.
+"""
+
+import types
+from copy_reg import dispatch_table
+
+class Error(Exception):
+    pass
+error = Error   # backward compatibility
+
+try:
+    from org.python.core import PyStringMap
+except ImportError:
+    PyStringMap = None
+
+__all__ = ["Error", "copy", "deepcopy"]
+
+import inspect
+def _getspecial(cls, name):
+    for basecls in inspect.getmro(cls):
+        try:
+            return basecls.__dict__[name]
+        except:
+            pass
+    else:
+        return None
+
+def copy(x):
+    """Shallow copy operation on arbitrary Python objects.
+
+    See the module's __doc__ string for more info.
+    """
+
+    cls = type(x)
+
+    copier = _copy_dispatch.get(cls)
+    if copier:
+        return copier(x)
+
+    copier = _getspecial(cls, "__copy__")
+    if copier:
+        return copier(x)
+
+    reductor = dispatch_table.get(cls)
+    if reductor:
+        rv = reductor(x)
+    else:
+        reductor = getattr(x, "__reduce_ex__", None)
+        if reductor:
+            rv = reductor(2)
+        else:
+            reductor = getattr(x, "__reduce__", None)
+            if reductor:
+                rv = reductor()
+            else:
+                copier = getattr(x, "__copy__", None)
+                if copier:
+                    return copier()
+                raise Error("un(shallow)copyable object of type %s" % cls)
+
+    return _reconstruct(x, rv, 0)
+
+
+_copy_dispatch = d = {}
+
+def _copy_immutable(x):
+    return x
+for t in (types.NoneType, int, long, float, bool, str, tuple,
+          frozenset, type, xrange, types.ClassType,
+          types.BuiltinFunctionType):
+    d[t] = _copy_immutable
+for name in ("ComplexType", "UnicodeType", "CodeType"):
+    t = getattr(types, name, None)
+    if t is not None:
+        d[t] = _copy_immutable
+
+def _copy_with_constructor(x):
+    return type(x)(x)
+for t in (list, dict, set):
+    d[t] = _copy_with_constructor
+
+def _copy_with_copy_method(x):
+    return x.copy()
+if PyStringMap is not None:
+    d[PyStringMap] = _copy_with_copy_method
+
+def _copy_inst(x):
+    if hasattr(x, '__copy__'):
+        return x.__copy__()
+    if hasattr(x, '__getinitargs__'):
+        args = x.__getinitargs__()
+        y = x.__class__(*args)
+    else:
+        y = _EmptyClass()
+        y.__class__ = x.__class__
+    if hasattr(x, '__getstate__'):
+        state = x.__getstate__()
+    else:
+        state = x.__dict__
+    if hasattr(y, '__setstate__'):
+        y.__setstate__(state)
+    else:
+        y.__dict__.update(state)
+    return y
+d[types.InstanceType] = _copy_inst
+
+del d
+
+def deepcopy(x, memo=None, _nil=[]):
+    """Deep copy operation on arbitrary Python objects.
+
+    See the module's __doc__ string for more info.
+    """
+
+    if memo is None:
+        memo = {}
+
+    d = id(x)
+    y = memo.get(d, _nil)
+    if y is not _nil:
+        return y
+
+    cls = type(x)
+
+    copier = _deepcopy_dispatch.get(cls)
+    if copier:
+        y = copier(x, memo)
+    else:
+        try:
+            issc = issubclass(cls, type)
+        except TypeError: # cls is not a class (old Boost; see SF #502085)
+            issc = 0
+        if issc:
+            y = _deepcopy_atomic(x, memo)
+        else:
+            copier = _getspecial(cls, "__deepcopy__")
+            if copier:
+                y = copier(x, memo)
+            else:
+                reductor = dispatch_table.get(cls)
+                if reductor:
+                    rv = reductor(x)
+                else:
+                    reductor = getattr(x, "__reduce_ex__", None)
+                    if reductor:
+                        rv = reductor(2)
+                    else:
+                        reductor = getattr(x, "__reduce__", None)
+                        if reductor:
+                            rv = reductor()
+                        else:
+                            copier = getattr(x, "__deepcopy__", None)
+                            if copier:
+                                return copier(memo)
+                            raise Error(
+                                "un(deep)copyable object of type %s" % cls)
+                y = _reconstruct(x, rv, 1, memo)
+
+    memo[d] = y
+    _keep_alive(x, memo) # Make sure x lives at least as long as d
+    return y
+
+_deepcopy_dispatch = d = {}
+
+def _deepcopy_atomic(x, memo):
+    return x
+d[types.NoneType] = _deepcopy_atomic
+d[types.IntType] = _deepcopy_atomic
+d[types.LongType] = _deepcopy_atomic
+d[types.FloatType] = _deepcopy_atomic
+d[types.BooleanType] = _deepcopy_atomic
+try:
+    d[types.ComplexType] = _deepcopy_atomic
+except AttributeError:
+    pass
+d[types.StringType] = _deepcopy_atomic
+try:
+    d[types.UnicodeType] = _deepcopy_atomic
+except AttributeError:
+    pass
+try:
+    d[types.CodeType] = _deepcopy_atomic
+except AttributeError:
+    pass
+d[types.TypeType] = _deepcopy_atomic
+d[types.XRangeType] = _deepcopy_atomic
+d[types.ClassType] = _deepcopy_atomic
+d[types.BuiltinFunctionType] = _deepcopy_atomic
+
+def _deepcopy_list(x, memo):
+    y = []
+    memo[id(x)] = y
+    for a in x:
+        y.append(deepcopy(a, memo))
+    return y
+d[types.ListType] = _deepcopy_list
+
+def _deepcopy_tuple(x, memo):
+    y = []
+    for a in x:
+        y.append(deepcopy(a, memo))
+    d = id(x)
+    try:
+        return memo[d]
+    except KeyError:
+        pass
+    for i in range(len(x)):
+        if x[i] is not y[i]:
+            y = tuple(y)
+            break
+    else:
+        y = x
+    memo[d] = y
+    return y
+d[types.TupleType] = _deepcopy_tuple
+
+def _deepcopy_dict(x, memo):
+    y = {}
+    memo[id(x)] = y
+    for key, value in x.iteritems():
+        y[deepcopy(key, memo)] = deepcopy(value, memo)
+    return y
+d[types.DictionaryType] = _deepcopy_dict
+if PyStringMap is not None:
+    d[PyStringMap] = _deepcopy_dict
+
+def _keep_alive(x, memo):
+    """Keeps a reference to the object x in the memo.
+
+    Because we remember objects by their id, we have
+    to assure that possibly temporary objects are kept
+    alive by referencing them.
+    We store a reference at the id of the memo, which should
+    normally not be used unless someone tries to deepcopy
+    the memo itself...
+    """
+    try:
+        memo[id(memo)].append(x)
+    except KeyError:
+        # aha, this is the first one :-)
+        memo[id(memo)]=[x]
+
+def _deepcopy_inst(x, memo):
+    if hasattr(x, '__deepcopy__'):
+        return x.__deepcopy__(memo)
+    if hasattr(x, '__getinitargs__'):
+        args = x.__getinitargs__()
+        args = deepcopy(args, memo)
+        y = x.__class__(*args)
+    else:
+        y = _EmptyClass()
+        y.__class__ = x.__class__
+    memo[id(x)] = y
+    if hasattr(x, '__getstate__'):
+        state = x.__getstate__()
+    else:
+        state = x.__dict__
+    state = deepcopy(state, memo)
+    if hasattr(y, '__setstate__'):
+        y.__setstate__(state)
+    else:
+        y.__dict__.update(state)
+    return y
+d[types.InstanceType] = _deepcopy_inst
+
+def _reconstruct(x, info, deep, memo=None):
+    if isinstance(info, str):
+        return x
+    assert isinstance(info, tuple)
+    if memo is None:
+        memo = {}
+    n = len(info)
+    assert n in (2, 3, 4, 5)
+    callable, args = info[:2]
+    if n > 2:
+        state = info[2]
+    else:
+        state = {}
+    if n > 3:
+        listiter = info[3]
+    else:
+        listiter = None
+    if n > 4:
+        dictiter = info[4]
+    else:
+        dictiter = None
+    if deep:
+        args = deepcopy(args, memo)
+    y = callable(*args)
+    memo[id(x)] = y
+    if listiter is not None:
+        for item in listiter:
+            if deep:
+                item = deepcopy(item, memo)
+            y.append(item)
+    if dictiter is not None:
+        for key, value in dictiter:
+            if deep:
+                key = deepcopy(key, memo)
+                value = deepcopy(value, memo)
+            y[key] = value
+    if state:
+        if deep:
+            state = deepcopy(state, memo)
+        if hasattr(y, '__setstate__'):
+            y.__setstate__(state)
+        else:
+            if isinstance(state, tuple) and len(state) == 2:
+                state, slotstate = state
+            else:
+                slotstate = None
+            if state is not None:
+                y.__dict__.update(state)
+            if slotstate is not None:
+                for key, value in slotstate.iteritems():
+                    setattr(y, key, value)
+    return y
+
+del d
+
+del types
+
+# Helper for instance creation without calling __init__
+class _EmptyClass:
+    pass
+
+def _test():
+    l = [None, 1, 2L, 3.14, 'xyzzy', (1, 2L), [3.14, 'abc'],
+         {'abc': 'ABC'}, (), [], {}]
+    l1 = copy(l)
+    print l1==l
+    l1 = map(copy, l)
+    print l1==l
+    l1 = deepcopy(l)
+    print l1==l
+    class C:
+        def __init__(self, arg=None):
+            self.a = 1
+            self.arg = arg
+            if __name__ == '__main__':
+                import sys
+                file = sys.argv[0]
+            else:
+                file = __file__
+            self.fp = open(file)
+            self.fp.close()
+        def __getstate__(self):
+            return {'a': self.a, 'arg': self.arg}
+        def __setstate__(self, state):
+            for key, value in state.iteritems():
+                setattr(self, key, value)
+        def __deepcopy__(self, memo=None):
+            new = self.__class__(deepcopy(self.arg, memo))
+            new.a = self.a
+            return new
+    c = C('argument sketch')
+    l.append(c)
+    l2 = copy(l)
+    print l == l2
+    print l
+    print l2
+    l2 = deepcopy(l)
+    print l == l2
+    print l
+    print l2
+    l.append({l[1]: l, 'xyz': l[2]})
+    l3 = copy(l)
+    import repr
+    print map(repr.repr, l)
+    print map(repr.repr, l1)
+    print map(repr.repr, l2)
+    print map(repr.repr, l3)
+    l3 = deepcopy(l)
+    import repr
+    print map(repr.repr, l)
+    print map(repr.repr, l1)
+    print map(repr.repr, l2)
+    print map(repr.repr, l3)
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/copy_reg.py b/depot_tools/release/win/python_24/Lib/copy_reg.py
new file mode 100644
index 0000000..f499013
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/copy_reg.py
@@ -0,0 +1,189 @@
+"""Helper to provide extensibility for pickle/cPickle.
+
+This is only useful to add pickle support for extension types defined in
+C, not for instances of user-defined classes.
+"""
+
+from types import ClassType as _ClassType
+
+__all__ = ["pickle", "constructor",
+           "add_extension", "remove_extension", "clear_extension_cache"]
+
+dispatch_table = {}
+
+def pickle(ob_type, pickle_function, constructor_ob=None):
+    if type(ob_type) is _ClassType:
+        raise TypeError("copy_reg is not intended for use with classes")
+
+    if not callable(pickle_function):
+        raise TypeError("reduction functions must be callable")
+    dispatch_table[ob_type] = pickle_function
+
+    # The constructor_ob function is a vestige of safe for unpickling.
+    # There is no reason for the caller to pass it anymore.
+    if constructor_ob is not None:
+        constructor(constructor_ob)
+
+def constructor(object):
+    if not callable(object):
+        raise TypeError("constructors must be callable")
+
+# Example: provide pickling support for complex numbers.
+
+try:
+    complex
+except NameError:
+    pass
+else:
+
+    def pickle_complex(c):
+        return complex, (c.real, c.imag)
+
+    pickle(complex, pickle_complex, complex)
+
+# Support for pickling new-style objects
+
+def _reconstructor(cls, base, state):
+    if base is object:
+        obj = object.__new__(cls)
+    else:
+        obj = base.__new__(cls, state)
+        base.__init__(obj, state)
+    return obj
+
+_HEAPTYPE = 1<<9
+
+# Python code for object.__reduce_ex__ for protocols 0 and 1
+
+def _reduce_ex(self, proto):
+    assert proto < 2
+    for base in self.__class__.__mro__:
+        if hasattr(base, '__flags__') and not base.__flags__ & _HEAPTYPE:
+            break
+    else:
+        base = object # not really reachable
+    if base is object:
+        state = None
+    else:
+        if base is self.__class__:
+            raise TypeError, "can't pickle %s objects" % base.__name__
+        state = base(self)
+    args = (self.__class__, base, state)
+    try:
+        getstate = self.__getstate__
+    except AttributeError:
+        if getattr(self, "__slots__", None):
+            raise TypeError("a class that defines __slots__ without "
+                            "defining __getstate__ cannot be pickled")
+        try:
+            dict = self.__dict__
+        except AttributeError:
+            dict = None
+    else:
+        dict = getstate()
+    if dict:
+        return _reconstructor, args, dict
+    else:
+        return _reconstructor, args
+
+# Helper for __reduce_ex__ protocol 2
+
+def __newobj__(cls, *args):
+    return cls.__new__(cls, *args)
+
+def _slotnames(cls):
+    """Return a list of slot names for a given class.
+
+    This needs to find slots defined by the class and its bases, so we
+    can't simply return the __slots__ attribute.  We must walk down
+    the Method Resolution Order and concatenate the __slots__ of each
+    class found there.  (This assumes classes don't modify their
+    __slots__ attribute to misrepresent their slots after the class is
+    defined.)
+    """
+
+    # Get the value from a cache in the class if possible
+    names = cls.__dict__.get("__slotnames__")
+    if names is not None:
+        return names
+
+    # Not cached -- calculate the value
+    names = []
+    if not hasattr(cls, "__slots__"):
+        # This class has no slots
+        pass
+    else:
+        # Slots found -- gather slot names from all base classes
+        for c in cls.__mro__:
+            if "__slots__" in c.__dict__:
+                names += [name for name in c.__dict__["__slots__"]
+                               if name not in ("__dict__", "__weakref__")]
+
+    # Cache the outcome in the class if at all possible
+    try:
+        cls.__slotnames__ = names
+    except:
+        pass # But don't die if we can't
+
+    return names
+
+# A registry of extension codes.  This is an ad-hoc compression
+# mechanism.  Whenever a global reference to <module>, <name> is about
+# to be pickled, the (<module>, <name>) tuple is looked up here to see
+# if it is a registered extension code for it.  Extension codes are
+# universal, so that the meaning of a pickle does not depend on
+# context.  (There are also some codes reserved for local use that
+# don't have this restriction.)  Codes are positive ints; 0 is
+# reserved.
+
+_extension_registry = {}                # key -> code
+_inverted_registry = {}                 # code -> key
+_extension_cache = {}                   # code -> object
+# Don't ever rebind those names:  cPickle grabs a reference to them when
+# it's initialized, and won't see a rebinding.
+
+def add_extension(module, name, code):
+    """Register an extension code."""
+    code = int(code)
+    if not 1 <= code <= 0x7fffffff:
+        raise ValueError, "code out of range"
+    key = (module, name)
+    if (_extension_registry.get(key) == code and
+        _inverted_registry.get(code) == key):
+        return # Redundant registrations are benign
+    if key in _extension_registry:
+        raise ValueError("key %s is already registered with code %s" %
+                         (key, _extension_registry[key]))
+    if code in _inverted_registry:
+        raise ValueError("code %s is already in use for key %s" %
+                         (code, _inverted_registry[code]))
+    _extension_registry[key] = code
+    _inverted_registry[code] = key
+
+def remove_extension(module, name, code):
+    """Unregister an extension code.  For testing only."""
+    key = (module, name)
+    if (_extension_registry.get(key) != code or
+        _inverted_registry.get(code) != key):
+        raise ValueError("key %s is not registered with code %s" %
+                         (key, code))
+    del _extension_registry[key]
+    del _inverted_registry[code]
+    if code in _extension_cache:
+        del _extension_cache[code]
+
+def clear_extension_cache():
+    _extension_cache.clear()
+
+# Standard extension code assignments
+
+# Reserved ranges
+
+# First  Last Count  Purpose
+#     1   127   127  Reserved for Python standard library
+#   128   191    64  Reserved for Zope
+#   192   239    48  Reserved for 3rd parties
+#   240   255    16  Reserved for private use (will never be assigned)
+#   256   Inf   Inf  Reserved for future assignment
+
+# Extension codes are assigned by the Python Software Foundation.
diff --git a/depot_tools/release/win/python_24/Lib/csv.py b/depot_tools/release/win/python_24/Lib/csv.py
new file mode 100644
index 0000000..f2389fd3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/csv.py
@@ -0,0 +1,431 @@
+
+"""
+csv.py - read/write/investigate CSV files
+"""
+
+import re
+from _csv import Error, __version__, writer, reader, register_dialect, \
+                 unregister_dialect, get_dialect, list_dialects, \
+                 QUOTE_MINIMAL, QUOTE_ALL, QUOTE_NONNUMERIC, QUOTE_NONE, \
+                 __doc__
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+__all__ = [ "QUOTE_MINIMAL", "QUOTE_ALL", "QUOTE_NONNUMERIC", "QUOTE_NONE",
+            "Error", "Dialect", "excel", "excel_tab", "reader", "writer",
+            "register_dialect", "get_dialect", "list_dialects", "Sniffer",
+            "unregister_dialect", "__version__", "DictReader", "DictWriter" ]
+
+class Dialect:
+    _name = ""
+    _valid = False
+    # placeholders
+    delimiter = None
+    quotechar = None
+    escapechar = None
+    doublequote = None
+    skipinitialspace = None
+    lineterminator = None
+    quoting = None
+
+    def __init__(self):
+        if self.__class__ != Dialect:
+            self._valid = True
+        errors = self._validate()
+        if errors != []:
+            raise Error, "Dialect did not validate: %s" % ", ".join(errors)
+
+    def _validate(self):
+        errors = []
+        if not self._valid:
+            errors.append("can't directly instantiate Dialect class")
+
+        if self.delimiter is None:
+            errors.append("delimiter character not set")
+        elif (not isinstance(self.delimiter, str) or
+              len(self.delimiter) > 1):
+            errors.append("delimiter must be one-character string")
+
+        if self.quotechar is None:
+            if self.quoting != QUOTE_NONE:
+                errors.append("quotechar not set")
+        elif (not isinstance(self.quotechar, str) or
+              len(self.quotechar) > 1):
+            errors.append("quotechar must be one-character string")
+
+        if self.lineterminator is None:
+            errors.append("lineterminator not set")
+        elif not isinstance(self.lineterminator, str):
+            errors.append("lineterminator must be a string")
+
+        if self.doublequote not in (True, False):
+            errors.append("doublequote parameter must be True or False")
+
+        if self.skipinitialspace not in (True, False):
+            errors.append("skipinitialspace parameter must be True or False")
+
+        if self.quoting is None:
+            errors.append("quoting parameter not set")
+
+        if self.quoting is QUOTE_NONE:
+            if (not isinstance(self.escapechar, (unicode, str)) or
+                len(self.escapechar) > 1):
+                errors.append("escapechar must be a one-character string or unicode object")
+
+        return errors
+
+class excel(Dialect):
+    delimiter = ','
+    quotechar = '"'
+    doublequote = True
+    skipinitialspace = False
+    lineterminator = '\r\n'
+    quoting = QUOTE_MINIMAL
+register_dialect("excel", excel)
+
+class excel_tab(excel):
+    delimiter = '\t'
+register_dialect("excel-tab", excel_tab)
+
+
+class DictReader:
+    def __init__(self, f, fieldnames=None, restkey=None, restval=None,
+                 dialect="excel", *args, **kwds):
+        self.fieldnames = fieldnames    # list of keys for the dict
+        self.restkey = restkey          # key to catch long rows
+        self.restval = restval          # default value for short rows
+        self.reader = reader(f, dialect, *args, **kwds)
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        row = self.reader.next()
+        if self.fieldnames is None:
+            self.fieldnames = row
+            row = self.reader.next()
+
+        # unlike the basic reader, we prefer not to return blanks,
+        # because we will typically wind up with a dict full of None
+        # values
+        while row == []:
+            row = self.reader.next()
+        d = dict(zip(self.fieldnames, row))
+        lf = len(self.fieldnames)
+        lr = len(row)
+        if lf < lr:
+            d[self.restkey] = row[lf:]
+        elif lf > lr:
+            for key in self.fieldnames[lr:]:
+                d[key] = self.restval
+        return d
+
+
+class DictWriter:
+    def __init__(self, f, fieldnames, restval="", extrasaction="raise",
+                 dialect="excel", *args, **kwds):
+        self.fieldnames = fieldnames    # list of keys for the dict
+        self.restval = restval          # for writing short dicts
+        if extrasaction.lower() not in ("raise", "ignore"):
+            raise ValueError, \
+                  ("extrasaction (%s) must be 'raise' or 'ignore'" %
+                   extrasaction)
+        self.extrasaction = extrasaction
+        self.writer = writer(f, dialect, *args, **kwds)
+
+    def _dict_to_list(self, rowdict):
+        if self.extrasaction == "raise":
+            for k in rowdict.keys():
+                if k not in self.fieldnames:
+                    raise ValueError, "dict contains fields not in fieldnames"
+        return [rowdict.get(key, self.restval) for key in self.fieldnames]
+
+    def writerow(self, rowdict):
+        return self.writer.writerow(self._dict_to_list(rowdict))
+
+    def writerows(self, rowdicts):
+        rows = []
+        for rowdict in rowdicts:
+            rows.append(self._dict_to_list(rowdict))
+        return self.writer.writerows(rows)
+
+# Guard Sniffer's type checking against builds that exclude complex()
+try:
+    complex
+except NameError:
+    complex = float
+
+class Sniffer:
+    '''
+    "Sniffs" the format of a CSV file (i.e. delimiter, quotechar)
+    Returns a Dialect object.
+    '''
+    def __init__(self):
+        # in case there is more than one possible delimiter
+        self.preferred = [',', '\t', ';', ' ', ':']
+
+
+    def sniff(self, sample, delimiters=None):
+        """
+        Returns a dialect (or None) corresponding to the sample
+        """
+
+        quotechar, delimiter, skipinitialspace = \
+                   self._guess_quote_and_delimiter(sample, delimiters)
+        if delimiter is None:
+            delimiter, skipinitialspace = self._guess_delimiter(sample,
+                                                                delimiters)
+
+        class dialect(Dialect):
+            _name = "sniffed"
+            lineterminator = '\r\n'
+            quoting = QUOTE_MINIMAL
+            # escapechar = ''
+            doublequote = False
+
+        dialect.delimiter = delimiter
+        # _csv.reader won't accept a quotechar of ''
+        dialect.quotechar = quotechar or '"'
+        dialect.skipinitialspace = skipinitialspace
+
+        return dialect
+
+
+    def _guess_quote_and_delimiter(self, data, delimiters):
+        """
+        Looks for text enclosed between two identical quotes
+        (the probable quotechar) which are preceded and followed
+        by the same character (the probable delimiter).
+        For example:
+                         ,'some text',
+        The quote with the most wins, same with the delimiter.
+        If there is no quotechar the delimiter can't be determined
+        this way.
+        """
+
+        matches = []
+        for restr in ('(?P<delim>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?P=delim)', # ,".*?",
+                      '(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?P<delim>[^\w\n"\'])(?P<space> ?)',   #  ".*?",
+                      '(?P<delim>>[^\w\n"\'])(?P<space> ?)(?P<quote>["\']).*?(?P=quote)(?:$|\n)',  # ,".*?"
+                      '(?:^|\n)(?P<quote>["\']).*?(?P=quote)(?:$|\n)'):                            #  ".*?" (no delim, no space)
+            regexp = re.compile(restr, re.DOTALL | re.MULTILINE)
+            matches = regexp.findall(data)
+            if matches:
+                break
+
+        if not matches:
+            return ('', None, 0) # (quotechar, delimiter, skipinitialspace)
+
+        quotes = {}
+        delims = {}
+        spaces = 0
+        for m in matches:
+            n = regexp.groupindex['quote'] - 1
+            key = m[n]
+            if key:
+                quotes[key] = quotes.get(key, 0) + 1
+            try:
+                n = regexp.groupindex['delim'] - 1
+                key = m[n]
+            except KeyError:
+                continue
+            if key and (delimiters is None or key in delimiters):
+                delims[key] = delims.get(key, 0) + 1
+            try:
+                n = regexp.groupindex['space'] - 1
+            except KeyError:
+                continue
+            if m[n]:
+                spaces += 1
+
+        quotechar = reduce(lambda a, b, quotes = quotes:
+                           (quotes[a] > quotes[b]) and a or b, quotes.keys())
+
+        if delims:
+            delim = reduce(lambda a, b, delims = delims:
+                           (delims[a] > delims[b]) and a or b, delims.keys())
+            skipinitialspace = delims[delim] == spaces
+            if delim == '\n': # most likely a file with a single column
+                delim = ''
+        else:
+            # there is *no* delimiter, it's a single column of quoted data
+            delim = ''
+            skipinitialspace = 0
+
+        return (quotechar, delim, skipinitialspace)
+
+
+    def _guess_delimiter(self, data, delimiters):
+        """
+        The delimiter /should/ occur the same number of times on
+        each row. However, due to malformed data, it may not. We don't want
+        an all or nothing approach, so we allow for small variations in this
+        number.
+          1) build a table of the frequency of each character on every line.
+          2) build a table of freqencies of this frequency (meta-frequency?),
+             e.g.  'x occurred 5 times in 10 rows, 6 times in 1000 rows,
+             7 times in 2 rows'
+          3) use the mode of the meta-frequency to determine the /expected/
+             frequency for that character
+          4) find out how often the character actually meets that goal
+          5) the character that best meets its goal is the delimiter
+        For performance reasons, the data is evaluated in chunks, so it can
+        try and evaluate the smallest portion of the data possible, evaluating
+        additional chunks as necessary.
+        """
+
+        data = filter(None, data.split('\n'))
+
+        ascii = [chr(c) for c in range(127)] # 7-bit ASCII
+
+        # build frequency tables
+        chunkLength = min(10, len(data))
+        iteration = 0
+        charFrequency = {}
+        modes = {}
+        delims = {}
+        start, end = 0, min(chunkLength, len(data))
+        while start < len(data):
+            iteration += 1
+            for line in data[start:end]:
+                for char in ascii:
+                    metaFrequency = charFrequency.get(char, {})
+                    # must count even if frequency is 0
+                    freq = line.strip().count(char)
+                    # value is the mode
+                    metaFrequency[freq] = metaFrequency.get(freq, 0) + 1
+                    charFrequency[char] = metaFrequency
+
+            for char in charFrequency.keys():
+                items = charFrequency[char].items()
+                if len(items) == 1 and items[0][0] == 0:
+                    continue
+                # get the mode of the frequencies
+                if len(items) > 1:
+                    modes[char] = reduce(lambda a, b: a[1] > b[1] and a or b,
+                                         items)
+                    # adjust the mode - subtract the sum of all
+                    # other frequencies
+                    items.remove(modes[char])
+                    modes[char] = (modes[char][0], modes[char][1]
+                                   - reduce(lambda a, b: (0, a[1] + b[1]),
+                                            items)[1])
+                else:
+                    modes[char] = items[0]
+
+            # build a list of possible delimiters
+            modeList = modes.items()
+            total = float(chunkLength * iteration)
+            # (rows of consistent data) / (number of rows) = 100%
+            consistency = 1.0
+            # minimum consistency threshold
+            threshold = 0.9
+            while len(delims) == 0 and consistency >= threshold:
+                for k, v in modeList:
+                    if v[0] > 0 and v[1] > 0:
+                        if ((v[1]/total) >= consistency and
+                            (delimiters is None or k in delimiters)):
+                            delims[k] = v
+                consistency -= 0.01
+
+            if len(delims) == 1:
+                delim = delims.keys()[0]
+                skipinitialspace = (data[0].count(delim) ==
+                                    data[0].count("%c " % delim))
+                return (delim, skipinitialspace)
+
+            # analyze another chunkLength lines
+            start = end
+            end += chunkLength
+
+        if not delims:
+            return ('', 0)
+
+        # if there's more than one, fall back to a 'preferred' list
+        if len(delims) > 1:
+            for d in self.preferred:
+                if d in delims.keys():
+                    skipinitialspace = (data[0].count(d) ==
+                                        data[0].count("%c " % d))
+                    return (d, skipinitialspace)
+
+        # finally, just return the first damn character in the list
+        delim = delims.keys()[0]
+        skipinitialspace = (data[0].count(delim) ==
+                            data[0].count("%c " % delim))
+        return (delim, skipinitialspace)
+
+
+    def has_header(self, sample):
+        # Creates a dictionary of types of data in each column. If any
+        # column is of a single type (say, integers), *except* for the first
+        # row, then the first row is presumed to be labels. If the type
+        # can't be determined, it is assumed to be a string in which case
+        # the length of the string is the determining factor: if all of the
+        # rows except for the first are the same length, it's a header.
+        # Finally, a 'vote' is taken at the end for each column, adding or
+        # subtracting from the likelihood of the first row being a header.
+
+        rdr = reader(StringIO(sample), self.sniff(sample))
+
+        header = rdr.next() # assume first row is header
+
+        columns = len(header)
+        columnTypes = {}
+        for i in range(columns): columnTypes[i] = None
+
+        checked = 0
+        for row in rdr:
+            # arbitrary number of rows to check, to keep it sane
+            if checked > 20:
+                break
+            checked += 1
+
+            if len(row) != columns:
+                continue # skip rows that have irregular number of columns
+
+            for col in columnTypes.keys():
+
+                for thisType in [int, long, float, complex]:
+                    try:
+                        thisType(row[col])
+                        break
+                    except (ValueError, OverflowError):
+                        pass
+                else:
+                    # fallback to length of string
+                    thisType = len(row[col])
+
+                # treat longs as ints
+                if thisType == long:
+                    thisType = int
+
+                if thisType != columnTypes[col]:
+                    if columnTypes[col] is None: # add new column type
+                        columnTypes[col] = thisType
+                    else:
+                        # type is inconsistent, remove column from
+                        # consideration
+                        del columnTypes[col]
+
+        # finally, compare results against first row and "vote"
+        # on whether it's a header
+        hasHeader = 0
+        for col, colType in columnTypes.items():
+            if type(colType) == type(0): # it's a length
+                if len(header[col]) != colType:
+                    hasHeader += 1
+                else:
+                    hasHeader -= 1
+            else: # attempt typecast
+                try:
+                    colType(header[col])
+                except (ValueError, TypeError):
+                    hasHeader += 1
+                else:
+                    hasHeader -= 1
+
+        return hasHeader > 0
diff --git a/depot_tools/release/win/python_24/Lib/curses/__init__.py b/depot_tools/release/win/python_24/Lib/curses/__init__.py
new file mode 100644
index 0000000..c982ad3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/curses/__init__.py
@@ -0,0 +1,53 @@
+"""curses
+
+The main package for curses support for Python.  Normally used by importing
+the package, and perhaps a particular module inside it.
+
+   import curses
+   from curses import textpad
+   curses.initwin()
+   ...
+
+"""
+
+__revision__ = "$Id: __init__.py,v 1.5 2004/07/18 06:14:41 tim_one Exp $"
+
+from _curses import *
+from curses.wrapper import wrapper
+
+# Some constants, most notably the ACS_* ones, are only added to the C
+# _curses module's dictionary after initscr() is called.  (Some
+# versions of SGI's curses don't define values for those constants
+# until initscr() has been called.)  This wrapper function calls the
+# underlying C initscr(), and then copies the constants from the
+# _curses module to the curses package's dictionary.  Don't do 'from
+# curses import *' if you'll be needing the ACS_* constants.
+
+def initscr():
+    import _curses, curses
+    stdscr = _curses.initscr()
+    for key, value in _curses.__dict__.items():
+        if key[0:4] == 'ACS_' or key in ('LINES', 'COLS'):
+            setattr(curses, key, value)
+
+    return stdscr
+
+# This is a similar wrapper for start_color(), which adds the COLORS and
+# COLOR_PAIRS variables which are only available after start_color() is
+# called.
+
+def start_color():
+    import _curses, curses
+    retval = _curses.start_color()
+    if hasattr(_curses, 'COLORS'):
+        curses.COLORS = _curses.COLORS
+    if hasattr(_curses, 'COLOR_PAIRS'):
+        curses.COLOR_PAIRS = _curses.COLOR_PAIRS
+    return retval
+
+# Import Python has_key() implementation if _curses doesn't contain has_key()
+
+try:
+    has_key
+except NameError:
+    from has_key import has_key
diff --git a/depot_tools/release/win/python_24/Lib/curses/ascii.py b/depot_tools/release/win/python_24/Lib/curses/ascii.py
new file mode 100644
index 0000000..800fd8b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/curses/ascii.py
@@ -0,0 +1,99 @@
+"""Constants and membership tests for ASCII characters"""
+
+NUL     = 0x00  # ^@
+SOH     = 0x01  # ^A
+STX     = 0x02  # ^B
+ETX     = 0x03  # ^C
+EOT     = 0x04  # ^D
+ENQ     = 0x05  # ^E
+ACK     = 0x06  # ^F
+BEL     = 0x07  # ^G
+BS      = 0x08  # ^H
+TAB     = 0x09  # ^I
+HT      = 0x09  # ^I
+LF      = 0x0a  # ^J
+NL      = 0x0a  # ^J
+VT      = 0x0b  # ^K
+FF      = 0x0c  # ^L
+CR      = 0x0d  # ^M
+SO      = 0x0e  # ^N
+SI      = 0x0f  # ^O
+DLE     = 0x10  # ^P
+DC1     = 0x11  # ^Q
+DC2     = 0x12  # ^R
+DC3     = 0x13  # ^S
+DC4     = 0x14  # ^T
+NAK     = 0x15  # ^U
+SYN     = 0x16  # ^V
+ETB     = 0x17  # ^W
+CAN     = 0x18  # ^X
+EM      = 0x19  # ^Y
+SUB     = 0x1a  # ^Z
+ESC     = 0x1b  # ^[
+FS      = 0x1c  # ^\
+GS      = 0x1d  # ^]
+RS      = 0x1e  # ^^
+US      = 0x1f  # ^_
+SP      = 0x20  # space
+DEL     = 0x7f  # delete
+
+controlnames = [
+"NUL", "SOH", "STX", "ETX", "EOT", "ENQ", "ACK", "BEL",
+"BS",  "HT",  "LF",  "VT",  "FF",  "CR",  "SO",  "SI",
+"DLE", "DC1", "DC2", "DC3", "DC4", "NAK", "SYN", "ETB",
+"CAN", "EM",  "SUB", "ESC", "FS",  "GS",  "RS",  "US",
+"SP"
+]
+
+def _ctoi(c):
+    if type(c) == type(""):
+        return ord(c)
+    else:
+        return c
+
+def isalnum(c): return isalpha(c) or isdigit(c)
+def isalpha(c): return isupper(c) or islower(c)
+def isascii(c): return _ctoi(c) <= 127          # ?
+def isblank(c): return _ctoi(c) in (8,32)
+def iscntrl(c): return _ctoi(c) <= 31
+def isdigit(c): return _ctoi(c) >= 48 and _ctoi(c) <= 57
+def isgraph(c): return _ctoi(c) >= 33 and _ctoi(c) <= 126
+def islower(c): return _ctoi(c) >= 97 and _ctoi(c) <= 122
+def isprint(c): return _ctoi(c) >= 32 and _ctoi(c) <= 126
+def ispunct(c): return _ctoi(c) != 32 and not isalnum(c)
+def isspace(c): return _ctoi(c) in (9, 10, 11, 12, 13, 32)
+def isupper(c): return _ctoi(c) >= 65 and _ctoi(c) <= 90
+def isxdigit(c): return isdigit(c) or \
+    (_ctoi(c) >= 65 and _ctoi(c) <= 70) or (_ctoi(c) >= 97 and _ctoi(c) <= 102)
+def isctrl(c): return _ctoi(c) < 32
+def ismeta(c): return _ctoi(c) > 127
+
+def ascii(c):
+    if type(c) == type(""):
+        return chr(_ctoi(c) & 0x7f)
+    else:
+        return _ctoi(c) & 0x7f
+
+def ctrl(c):
+    if type(c) == type(""):
+        return chr(_ctoi(c) & 0x1f)
+    else:
+        return _ctoi(c) & 0x1f
+
+def alt(c):
+    if type(c) == type(""):
+        return chr(_ctoi(c) | 0x80)
+    else:
+        return _ctoi(c) | 0x80
+
+def unctrl(c):
+    bits = _ctoi(c)
+    if bits == 0x7f:
+        rep = "^?"
+    elif isprint(bits & 0x7f):
+        rep = chr(bits & 0x7f)
+    else:
+        rep = "^" + chr(((bits & 0x7f) | 0x20) + 0x20)
+    if bits & 0x80:
+        return "!" + rep
+    return rep
diff --git a/depot_tools/release/win/python_24/Lib/curses/has_key.py b/depot_tools/release/win/python_24/Lib/curses/has_key.py
new file mode 100644
index 0000000..60b7be9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/curses/has_key.py
@@ -0,0 +1,192 @@
+
+#
+# Emulation of has_key() function for platforms that don't use ncurses
+#
+
+import _curses
+
+# Table mapping curses keys to the terminfo capability name
+
+_capability_names = {
+    _curses.KEY_A1: 'ka1',
+    _curses.KEY_A3: 'ka3',
+    _curses.KEY_B2: 'kb2',
+    _curses.KEY_BACKSPACE: 'kbs',
+    _curses.KEY_BEG: 'kbeg',
+    _curses.KEY_BTAB: 'kcbt',
+    _curses.KEY_C1: 'kc1',
+    _curses.KEY_C3: 'kc3',
+    _curses.KEY_CANCEL: 'kcan',
+    _curses.KEY_CATAB: 'ktbc',
+    _curses.KEY_CLEAR: 'kclr',
+    _curses.KEY_CLOSE: 'kclo',
+    _curses.KEY_COMMAND: 'kcmd',
+    _curses.KEY_COPY: 'kcpy',
+    _curses.KEY_CREATE: 'kcrt',
+    _curses.KEY_CTAB: 'kctab',
+    _curses.KEY_DC: 'kdch1',
+    _curses.KEY_DL: 'kdl1',
+    _curses.KEY_DOWN: 'kcud1',
+    _curses.KEY_EIC: 'krmir',
+    _curses.KEY_END: 'kend',
+    _curses.KEY_ENTER: 'kent',
+    _curses.KEY_EOL: 'kel',
+    _curses.KEY_EOS: 'ked',
+    _curses.KEY_EXIT: 'kext',
+    _curses.KEY_F0: 'kf0',
+    _curses.KEY_F1: 'kf1',
+    _curses.KEY_F10: 'kf10',
+    _curses.KEY_F11: 'kf11',
+    _curses.KEY_F12: 'kf12',
+    _curses.KEY_F13: 'kf13',
+    _curses.KEY_F14: 'kf14',
+    _curses.KEY_F15: 'kf15',
+    _curses.KEY_F16: 'kf16',
+    _curses.KEY_F17: 'kf17',
+    _curses.KEY_F18: 'kf18',
+    _curses.KEY_F19: 'kf19',
+    _curses.KEY_F2: 'kf2',
+    _curses.KEY_F20: 'kf20',
+    _curses.KEY_F21: 'kf21',
+    _curses.KEY_F22: 'kf22',
+    _curses.KEY_F23: 'kf23',
+    _curses.KEY_F24: 'kf24',
+    _curses.KEY_F25: 'kf25',
+    _curses.KEY_F26: 'kf26',
+    _curses.KEY_F27: 'kf27',
+    _curses.KEY_F28: 'kf28',
+    _curses.KEY_F29: 'kf29',
+    _curses.KEY_F3: 'kf3',
+    _curses.KEY_F30: 'kf30',
+    _curses.KEY_F31: 'kf31',
+    _curses.KEY_F32: 'kf32',
+    _curses.KEY_F33: 'kf33',
+    _curses.KEY_F34: 'kf34',
+    _curses.KEY_F35: 'kf35',
+    _curses.KEY_F36: 'kf36',
+    _curses.KEY_F37: 'kf37',
+    _curses.KEY_F38: 'kf38',
+    _curses.KEY_F39: 'kf39',
+    _curses.KEY_F4: 'kf4',
+    _curses.KEY_F40: 'kf40',
+    _curses.KEY_F41: 'kf41',
+    _curses.KEY_F42: 'kf42',
+    _curses.KEY_F43: 'kf43',
+    _curses.KEY_F44: 'kf44',
+    _curses.KEY_F45: 'kf45',
+    _curses.KEY_F46: 'kf46',
+    _curses.KEY_F47: 'kf47',
+    _curses.KEY_F48: 'kf48',
+    _curses.KEY_F49: 'kf49',
+    _curses.KEY_F5: 'kf5',
+    _curses.KEY_F50: 'kf50',
+    _curses.KEY_F51: 'kf51',
+    _curses.KEY_F52: 'kf52',
+    _curses.KEY_F53: 'kf53',
+    _curses.KEY_F54: 'kf54',
+    _curses.KEY_F55: 'kf55',
+    _curses.KEY_F56: 'kf56',
+    _curses.KEY_F57: 'kf57',
+    _curses.KEY_F58: 'kf58',
+    _curses.KEY_F59: 'kf59',
+    _curses.KEY_F6: 'kf6',
+    _curses.KEY_F60: 'kf60',
+    _curses.KEY_F61: 'kf61',
+    _curses.KEY_F62: 'kf62',
+    _curses.KEY_F63: 'kf63',
+    _curses.KEY_F7: 'kf7',
+    _curses.KEY_F8: 'kf8',
+    _curses.KEY_F9: 'kf9',
+    _curses.KEY_FIND: 'kfnd',
+    _curses.KEY_HELP: 'khlp',
+    _curses.KEY_HOME: 'khome',
+    _curses.KEY_IC: 'kich1',
+    _curses.KEY_IL: 'kil1',
+    _curses.KEY_LEFT: 'kcub1',
+    _curses.KEY_LL: 'kll',
+    _curses.KEY_MARK: 'kmrk',
+    _curses.KEY_MESSAGE: 'kmsg',
+    _curses.KEY_MOVE: 'kmov',
+    _curses.KEY_NEXT: 'knxt',
+    _curses.KEY_NPAGE: 'knp',
+    _curses.KEY_OPEN: 'kopn',
+    _curses.KEY_OPTIONS: 'kopt',
+    _curses.KEY_PPAGE: 'kpp',
+    _curses.KEY_PREVIOUS: 'kprv',
+    _curses.KEY_PRINT: 'kprt',
+    _curses.KEY_REDO: 'krdo',
+    _curses.KEY_REFERENCE: 'kref',
+    _curses.KEY_REFRESH: 'krfr',
+    _curses.KEY_REPLACE: 'krpl',
+    _curses.KEY_RESTART: 'krst',
+    _curses.KEY_RESUME: 'kres',
+    _curses.KEY_RIGHT: 'kcuf1',
+    _curses.KEY_SAVE: 'ksav',
+    _curses.KEY_SBEG: 'kBEG',
+    _curses.KEY_SCANCEL: 'kCAN',
+    _curses.KEY_SCOMMAND: 'kCMD',
+    _curses.KEY_SCOPY: 'kCPY',
+    _curses.KEY_SCREATE: 'kCRT',
+    _curses.KEY_SDC: 'kDC',
+    _curses.KEY_SDL: 'kDL',
+    _curses.KEY_SELECT: 'kslt',
+    _curses.KEY_SEND: 'kEND',
+    _curses.KEY_SEOL: 'kEOL',
+    _curses.KEY_SEXIT: 'kEXT',
+    _curses.KEY_SF: 'kind',
+    _curses.KEY_SFIND: 'kFND',
+    _curses.KEY_SHELP: 'kHLP',
+    _curses.KEY_SHOME: 'kHOM',
+    _curses.KEY_SIC: 'kIC',
+    _curses.KEY_SLEFT: 'kLFT',
+    _curses.KEY_SMESSAGE: 'kMSG',
+    _curses.KEY_SMOVE: 'kMOV',
+    _curses.KEY_SNEXT: 'kNXT',
+    _curses.KEY_SOPTIONS: 'kOPT',
+    _curses.KEY_SPREVIOUS: 'kPRV',
+    _curses.KEY_SPRINT: 'kPRT',
+    _curses.KEY_SR: 'kri',
+    _curses.KEY_SREDO: 'kRDO',
+    _curses.KEY_SREPLACE: 'kRPL',
+    _curses.KEY_SRIGHT: 'kRIT',
+    _curses.KEY_SRSUME: 'kRES',
+    _curses.KEY_SSAVE: 'kSAV',
+    _curses.KEY_SSUSPEND: 'kSPD',
+    _curses.KEY_STAB: 'khts',
+    _curses.KEY_SUNDO: 'kUND',
+    _curses.KEY_SUSPEND: 'kspd',
+    _curses.KEY_UNDO: 'kund',
+    _curses.KEY_UP: 'kcuu1'
+    }
+
+def has_key(ch):
+    if isinstance(ch, str):
+        ch = ord(ch)
+
+    # Figure out the correct capability name for the keycode.
+    capability_name = _capability_names.get(ch)
+    if capability_name is None:
+        return False
+
+    #Check the current terminal description for that capability;
+    #if present, return true, else return false.
+    if _curses.tigetstr( capability_name ):
+        return True
+    else:
+        return False
+
+if __name__ == '__main__':
+    # Compare the output of this implementation and the ncurses has_key,
+    # on platforms where has_key is already available
+    try:
+        L = []
+        _curses.initscr()
+        for key in _capability_names.keys():
+            system = _curses.has_key(key)
+            python = has_key(key)
+            if system != python:
+                L.append( 'Mismatch for key %s, system=%i, Python=%i'
+                          % (_curses.keyname( key ), system, python) )
+    finally:
+        _curses.endwin()
+        for i in L: print i
diff --git a/depot_tools/release/win/python_24/Lib/curses/panel.py b/depot_tools/release/win/python_24/Lib/curses/panel.py
new file mode 100644
index 0000000..4913047
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/curses/panel.py
@@ -0,0 +1,8 @@
+"""curses.panel
+
+Module for using panels with curses.
+"""
+
+__revision__ = "$Id: panel.py,v 1.2 2004/07/18 06:14:41 tim_one Exp $"
+
+from _curses_panel import *
diff --git a/depot_tools/release/win/python_24/Lib/curses/textpad.py b/depot_tools/release/win/python_24/Lib/curses/textpad.py
new file mode 100644
index 0000000..28d78dd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/curses/textpad.py
@@ -0,0 +1,173 @@
+"""Simple textbox editing widget with Emacs-like keybindings."""
+
+import curses, ascii
+
+def rectangle(win, uly, ulx, lry, lrx):
+    """Draw a rectangle with corners at the provided upper-left
+    and lower-right coordinates.
+    """
+    win.vline(uly+1, ulx, curses.ACS_VLINE, lry - uly - 1)
+    win.hline(uly, ulx+1, curses.ACS_HLINE, lrx - ulx - 1)
+    win.hline(lry, ulx+1, curses.ACS_HLINE, lrx - ulx - 1)
+    win.vline(uly+1, lrx, curses.ACS_VLINE, lry - uly - 1)
+    win.addch(uly, ulx, curses.ACS_ULCORNER)
+    win.addch(uly, lrx, curses.ACS_URCORNER)
+    win.addch(lry, lrx, curses.ACS_LRCORNER)
+    win.addch(lry, ulx, curses.ACS_LLCORNER)
+
+class Textbox:
+    """Editing widget using the interior of a window object.
+     Supports the following Emacs-like key bindings:
+
+    Ctrl-A      Go to left edge of window.
+    Ctrl-B      Cursor left, wrapping to previous line if appropriate.
+    Ctrl-D      Delete character under cursor.
+    Ctrl-E      Go to right edge (stripspaces off) or end of line (stripspaces on).
+    Ctrl-F      Cursor right, wrapping to next line when appropriate.
+    Ctrl-G      Terminate, returning the window contents.
+    Ctrl-H      Delete character backward.
+    Ctrl-J      Terminate if the window is 1 line, otherwise insert newline.
+    Ctrl-K      If line is blank, delete it, otherwise clear to end of line.
+    Ctrl-L      Refresh screen.
+    Ctrl-N      Cursor down; move down one line.
+    Ctrl-O      Insert a blank line at cursor location.
+    Ctrl-P      Cursor up; move up one line.
+
+    Move operations do nothing if the cursor is at an edge where the movement
+    is not possible.  The following synonyms are supported where possible:
+
+    KEY_LEFT = Ctrl-B, KEY_RIGHT = Ctrl-F, KEY_UP = Ctrl-P, KEY_DOWN = Ctrl-N
+    KEY_BACKSPACE = Ctrl-h
+    """
+    def __init__(self, win):
+        self.win = win
+        (self.maxy, self.maxx) = win.getmaxyx()
+        self.maxy = self.maxy - 1
+        self.maxx = self.maxx - 1
+        self.stripspaces = 1
+        self.lastcmd = None
+        win.keypad(1)
+
+    def _end_of_line(self, y):
+        "Go to the location of the first blank on the given line."
+        last = self.maxx
+        while 1:
+            if ascii.ascii(self.win.inch(y, last)) != ascii.SP:
+                last = last + 1
+                break
+            elif last == 0:
+                break
+            last = last - 1
+        return last
+
+    def do_command(self, ch):
+        "Process a single editing command."
+        (y, x) = self.win.getyx()
+        self.lastcmd = ch
+        if ascii.isprint(ch):
+            if y < self.maxy or x < self.maxx:
+                # The try-catch ignores the error we trigger from some curses
+                # versions by trying to write into the lowest-rightmost spot
+                # in the window.
+                try:
+                    self.win.addch(ch)
+                except curses.error:
+                    pass
+        elif ch == ascii.SOH:                           # ^a
+            self.win.move(y, 0)
+        elif ch in (ascii.STX,curses.KEY_LEFT, ascii.BS,curses.KEY_BACKSPACE):
+            if x > 0:
+                self.win.move(y, x-1)
+            elif y == 0:
+                pass
+            elif self.stripspaces:
+                self.win.move(y-1, self._end_of_line(y-1))
+            else:
+                self.win.move(y-1, self.maxx)
+            if ch in (ascii.BS, curses.KEY_BACKSPACE):
+                self.win.delch()
+        elif ch == ascii.EOT:                           # ^d
+            self.win.delch()
+        elif ch == ascii.ENQ:                           # ^e
+            if self.stripspaces:
+                self.win.move(y, self._end_of_line(y))
+            else:
+                self.win.move(y, self.maxx)
+        elif ch in (ascii.ACK, curses.KEY_RIGHT):       # ^f
+            if x < self.maxx:
+                self.win.move(y, x+1)
+            elif y == self.maxy:
+                pass
+            else:
+                self.win.move(y+1, 0)
+        elif ch == ascii.BEL:                           # ^g
+            return 0
+        elif ch == ascii.NL:                            # ^j
+            if self.maxy == 0:
+                return 0
+            elif y < self.maxy:
+                self.win.move(y+1, 0)
+        elif ch == ascii.VT:                            # ^k
+            if x == 0 and self._end_of_line(y) == 0:
+                self.win.deleteln()
+            else:
+                # first undo the effect of self._end_of_line
+                self.win.move(y, x)
+                self.win.clrtoeol()
+        elif ch == ascii.FF:                            # ^l
+            self.win.refresh()
+        elif ch in (ascii.SO, curses.KEY_DOWN):         # ^n
+            if y < self.maxy:
+                self.win.move(y+1, x)
+                if x > self._end_of_line(y+1):
+                    self.win.move(y+1, self._end_of_line(y+1))
+        elif ch == ascii.SI:                            # ^o
+            self.win.insertln()
+        elif ch in (ascii.DLE, curses.KEY_UP):          # ^p
+            if y > 0:
+                self.win.move(y-1, x)
+                if x > self._end_of_line(y-1):
+                    self.win.move(y-1, self._end_of_line(y-1))
+        return 1
+
+    def gather(self):
+        "Collect and return the contents of the window."
+        result = ""
+        for y in range(self.maxy+1):
+            self.win.move(y, 0)
+            stop = self._end_of_line(y)
+            if stop == 0 and self.stripspaces:
+                continue
+            for x in range(self.maxx+1):
+                if self.stripspaces and x == stop:
+                    break
+                result = result + chr(ascii.ascii(self.win.inch(y, x)))
+            if self.maxy > 0:
+                result = result + "\n"
+        return result
+
+    def edit(self, validate=None):
+        "Edit in the widget window and collect the results."
+        while 1:
+            ch = self.win.getch()
+            if validate:
+                ch = validate(ch)
+            if not ch:
+                continue
+            if not self.do_command(ch):
+                break
+            self.win.refresh()
+        return self.gather()
+
+if __name__ == '__main__':
+    def test_editbox(stdscr):
+        ncols, nlines = 9, 4
+        uly, ulx = 15, 20
+        stdscr.addstr(uly-2, ulx, "Use Ctrl-G to end editing.")
+        win = curses.newwin(nlines, ncols, uly, ulx)
+        rectangle(stdscr, uly-1, ulx-1, uly + nlines, ulx + ncols)
+        stdscr.refresh()
+        return Textbox(win).edit()
+
+    str = curses.wrapper(test_editbox)
+    print 'Contents of text box:', repr(str)
diff --git a/depot_tools/release/win/python_24/Lib/curses/wrapper.py b/depot_tools/release/win/python_24/Lib/curses/wrapper.py
new file mode 100644
index 0000000..5335204
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/curses/wrapper.py
@@ -0,0 +1,50 @@
+"""curses.wrapper
+
+Contains one function, wrapper(), which runs another function which
+should be the rest of your curses-based application.  If the
+application raises an exception, wrapper() will restore the terminal
+to a sane state so you can read the resulting traceback.
+
+"""
+
+import sys, curses
+
+def wrapper(func, *args, **kwds):
+    """Wrapper function that initializes curses and calls another function,
+    restoring normal keyboard/screen behavior on error.
+    The callable object 'func' is then passed the main window 'stdscr'
+    as its first argument, followed by any other arguments passed to
+    wrapper().
+    """
+
+    res = None
+    try:
+        # Initialize curses
+        stdscr=curses.initscr()
+
+        # Turn off echoing of keys, and enter cbreak mode,
+        # where no buffering is performed on keyboard input
+        curses.noecho()
+        curses.cbreak()
+
+        # In keypad mode, escape sequences for special keys
+        # (like the cursor keys) will be interpreted and
+        # a special value like curses.KEY_LEFT will be returned
+        stdscr.keypad(1)
+
+        # Start color, too.  Harmless if the terminal doesn't have
+        # color; user can test with has_color() later on.  The try/catch
+        # works around a minor bit of over-conscientiousness in the curses
+        # module -- the error return from C start_color() is ignorable.
+        try:
+            curses.start_color()
+        except:
+            pass
+
+        return func(stdscr, *args, **kwds)
+    finally:
+        # Set everything back to normal
+        stdscr.keypad(0)
+        curses.echo()
+        curses.nocbreak()
+        curses.endwin()
diff --git a/depot_tools/release/win/python_24/Lib/dbhash.py b/depot_tools/release/win/python_24/Lib/dbhash.py
new file mode 100644
index 0000000..9f8a9c3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/dbhash.py
@@ -0,0 +1,16 @@
+"""Provide a (g)dbm-compatible interface to bsddb.hashopen."""
+
+import sys
+try:
+    import bsddb
+except ImportError:
+    # prevent a second import of this module from spuriously succeeding
+    del sys.modules[__name__]
+    raise
+
+__all__ = ["error","open"]
+
+error = bsddb.error                     # Exported for anydbm
+
+def open(file, flag = 'r', mode=0666):
+    return bsddb.hashopen(file, flag, mode)
diff --git a/depot_tools/release/win/python_24/Lib/decimal.py b/depot_tools/release/win/python_24/Lib/decimal.py
new file mode 100644
index 0000000..18f1c90
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/decimal.py
@@ -0,0 +1,3078 @@
+# Copyright (c) 2004 Python Software Foundation.
+# All rights reserved.
+
+# Written by Eric Price <eprice at tjhsst.edu>
+#    and Facundo Batista <facundo at taniquetil.com.ar>
+#    and Raymond Hettinger <python at rcn.com>
+#    and Aahz <aahz at pobox.com>
+#    and Tim Peters
+
+# This module is currently Py2.3 compatible and should be kept that way
+# unless a major compelling advantage arises.  IOW, 2.3 compatibility is
+# strongly preferred, but not guaranteed.
+
+# Also, this module should be kept in sync with the latest updates of
+# the IBM specification as it evolves.  Those updates will be treated
+# as bug fixes (deviation from the spec is a compatibility, usability
+# bug) and will be backported.  At this point the spec is stabilizing
+# and the updates are becoming fewer, smaller, and less significant.
+
+"""
+This is a Py2.3 implementation of decimal floating point arithmetic based on
+the General Decimal Arithmetic Specification:
+
+    www2.hursley.ibm.com/decimal/decarith.html
+
+and IEEE standard 854-1987:
+
+    www.cs.berkeley.edu/~ejr/projects/754/private/drafts/854-1987/dir.html
+
+Decimal floating point has finite precision with arbitrarily large bounds.
+
+The purpose of the module is to support arithmetic using familiar
+"schoolhouse" rules and to avoid the some of tricky representation
+issues associated with binary floating point.  The package is especially
+useful for financial applications or for contexts where users have
+expectations that are at odds with binary floating point (for instance,
+in binary floating point, 1.00 % 0.1 gives 0.09999999999999995 instead
+of the expected Decimal("0.00") returned by decimal floating point).
+
+Here are some examples of using the decimal module:
+
+>>> from decimal import *
+>>> setcontext(ExtendedContext)
+>>> Decimal(0)
+Decimal("0")
+>>> Decimal("1")
+Decimal("1")
+>>> Decimal("-.0123")
+Decimal("-0.0123")
+>>> Decimal(123456)
+Decimal("123456")
+>>> Decimal("123.45e12345678901234567890")
+Decimal("1.2345E+12345678901234567892")
+>>> Decimal("1.33") + Decimal("1.27")
+Decimal("2.60")
+>>> Decimal("12.34") + Decimal("3.87") - Decimal("18.41")
+Decimal("-2.20")
+>>> dig = Decimal(1)
+>>> print dig / Decimal(3)
+0.333333333
+>>> getcontext().prec = 18
+>>> print dig / Decimal(3)
+0.333333333333333333
+>>> print dig.sqrt()
+1
+>>> print Decimal(3).sqrt()
+1.73205080756887729
+>>> print Decimal(3) ** 123
+4.85192780976896427E+58
+>>> inf = Decimal(1) / Decimal(0)
+>>> print inf
+Infinity
+>>> neginf = Decimal(-1) / Decimal(0)
+>>> print neginf
+-Infinity
+>>> print neginf + inf
+NaN
+>>> print neginf * inf
+-Infinity
+>>> print dig / 0
+Infinity
+>>> getcontext().traps[DivisionByZero] = 1
+>>> print dig / 0
+Traceback (most recent call last):
+  ...
+  ...
+  ...
+DivisionByZero: x / 0
+>>> c = Context()
+>>> c.traps[InvalidOperation] = 0
+>>> print c.flags[InvalidOperation]
+0
+>>> c.divide(Decimal(0), Decimal(0))
+Decimal("NaN")
+>>> c.traps[InvalidOperation] = 1
+>>> print c.flags[InvalidOperation]
+1
+>>> c.flags[InvalidOperation] = 0
+>>> print c.flags[InvalidOperation]
+0
+>>> print c.divide(Decimal(0), Decimal(0))
+Traceback (most recent call last):
+  ...
+  ...
+  ...
+InvalidOperation: 0 / 0
+>>> print c.flags[InvalidOperation]
+1
+>>> c.flags[InvalidOperation] = 0
+>>> c.traps[InvalidOperation] = 0
+>>> print c.divide(Decimal(0), Decimal(0))
+NaN
+>>> print c.flags[InvalidOperation]
+1
+>>>
+"""
+
+__all__ = [
+    # Two major classes
+    'Decimal', 'Context',
+
+    # Contexts
+    'DefaultContext', 'BasicContext', 'ExtendedContext',
+
+    # Exceptions
+    'DecimalException', 'Clamped', 'InvalidOperation', 'DivisionByZero',
+    'Inexact', 'Rounded', 'Subnormal', 'Overflow', 'Underflow',
+
+    # Constants for use in setting up contexts
+    'ROUND_DOWN', 'ROUND_HALF_UP', 'ROUND_HALF_EVEN', 'ROUND_CEILING',
+    'ROUND_FLOOR', 'ROUND_UP', 'ROUND_HALF_DOWN',
+
+    # Functions for manipulating contexts
+    'setcontext', 'getcontext'
+]
+
+import copy
+
+#Rounding
+ROUND_DOWN = 'ROUND_DOWN'
+ROUND_HALF_UP = 'ROUND_HALF_UP'
+ROUND_HALF_EVEN = 'ROUND_HALF_EVEN'
+ROUND_CEILING = 'ROUND_CEILING'
+ROUND_FLOOR = 'ROUND_FLOOR'
+ROUND_UP = 'ROUND_UP'
+ROUND_HALF_DOWN = 'ROUND_HALF_DOWN'
+
+#Rounding decision (not part of the public API)
+NEVER_ROUND = 'NEVER_ROUND'    # Round in division (non-divmod), sqrt ONLY
+ALWAYS_ROUND = 'ALWAYS_ROUND'  # Every operation rounds at end.
+
+#Errors
+
+class DecimalException(ArithmeticError):
+    """Base exception class.
+
+    Used exceptions derive from this.
+    If an exception derives from another exception besides this (such as
+    Underflow (Inexact, Rounded, Subnormal) that indicates that it is only
+    called if the others are present.  This isn't actually used for
+    anything, though.
+
+    handle  -- Called when context._raise_error is called and the
+               trap_enabler is set.  First argument is self, second is the
+               context.  More arguments can be given, those being after
+               the explanation in _raise_error (For example,
+               context._raise_error(NewError, '(-x)!', self._sign) would
+               call NewError().handle(context, self._sign).)
+
+    To define a new exception, it should be sufficient to have it derive
+    from DecimalException.
+    """
+    def handle(self, context, *args):
+        pass
+
+
+class Clamped(DecimalException):
+    """Exponent of a 0 changed to fit bounds.
+
+    This occurs and signals clamped if the exponent of a result has been
+    altered in order to fit the constraints of a specific concrete
+    representation. This may occur when the exponent of a zero result would
+    be outside the bounds of a representation, or  when a large normal
+    number would have an encoded exponent that cannot be represented. In
+    this latter case, the exponent is reduced to fit and the corresponding
+    number of zero digits are appended to the coefficient ("fold-down").
+    """
+
+
+class InvalidOperation(DecimalException):
+    """An invalid operation was performed.
+
+    Various bad things cause this:
+
+    Something creates a signaling NaN
+    -INF + INF
+     0 * (+-)INF
+     (+-)INF / (+-)INF
+    x % 0
+    (+-)INF % x
+    x._rescale( non-integer )
+    sqrt(-x) , x > 0
+    0 ** 0
+    x ** (non-integer)
+    x ** (+-)INF
+    An operand is invalid
+    """
+    def handle(self, context, *args):
+        if args:
+            if args[0] == 1: #sNaN, must drop 's' but keep diagnostics
+                return Decimal( (args[1]._sign, args[1]._int, 'n') )
+        return NaN
+
+class ConversionSyntax(InvalidOperation):
+    """Trying to convert badly formed string.
+
+    This occurs and signals invalid-operation if an string is being
+    converted to a number and it does not conform to the numeric string
+    syntax. The result is [0,qNaN].
+    """
+
+    def handle(self, context, *args):
+        return (0, (0,), 'n') #Passed to something which uses a tuple.
+
+class DivisionByZero(DecimalException, ZeroDivisionError):
+    """Division by 0.
+
+    This occurs and signals division-by-zero if division of a finite number
+    by zero was attempted (during a divide-integer or divide operation, or a
+    power operation with negative right-hand operand), and the dividend was
+    not zero.
+
+    The result of the operation is [sign,inf], where sign is the exclusive
+    or of the signs of the operands for divide, or is 1 for an odd power of
+    -0, for power.
+    """
+
+    def handle(self, context, sign, double = None, *args):
+        if double is not None:
+            return (Infsign[sign],)*2
+        return Infsign[sign]
+
+class DivisionImpossible(InvalidOperation):
+    """Cannot perform the division adequately.
+
+    This occurs and signals invalid-operation if the integer result of a
+    divide-integer or remainder operation had too many digits (would be
+    longer than precision). The result is [0,qNaN].
+    """
+
+    def handle(self, context, *args):
+        return (NaN, NaN)
+
+class DivisionUndefined(InvalidOperation, ZeroDivisionError):
+    """Undefined result of division.
+
+    This occurs and signals invalid-operation if division by zero was
+    attempted (during a divide-integer, divide, or remainder operation), and
+    the dividend is also zero. The result is [0,qNaN].
+    """
+
+    def handle(self, context, tup=None, *args):
+        if tup is not None:
+            return (NaN, NaN) #for 0 %0, 0 // 0
+        return NaN
+
+class Inexact(DecimalException):
+    """Had to round, losing information.
+
+    This occurs and signals inexact whenever the result of an operation is
+    not exact (that is, it needed to be rounded and any discarded digits
+    were non-zero), or if an overflow or underflow condition occurs. The
+    result in all cases is unchanged.
+
+    The inexact signal may be tested (or trapped) to determine if a given
+    operation (or sequence of operations) was inexact.
+    """
+    pass
+
+class InvalidContext(InvalidOperation):
+    """Invalid context.  Unknown rounding, for example.
+
+    This occurs and signals invalid-operation if an invalid context was
+    detected during an operation. This can occur if contexts are not checked
+    on creation and either the precision exceeds the capability of the
+    underlying concrete representation or an unknown or unsupported rounding
+    was specified. These aspects of the context need only be checked when
+    the values are required to be used. The result is [0,qNaN].
+    """
+
+    def handle(self, context, *args):
+        return NaN
+
+class Rounded(DecimalException):
+    """Number got rounded (not  necessarily changed during rounding).
+
+    This occurs and signals rounded whenever the result of an operation is
+    rounded (that is, some zero or non-zero digits were discarded from the
+    coefficient), or if an overflow or underflow condition occurs. The
+    result in all cases is unchanged.
+
+    The rounded signal may be tested (or trapped) to determine if a given
+    operation (or sequence of operations) caused a loss of precision.
+    """
+    pass
+
+class Subnormal(DecimalException):
+    """Exponent < Emin before rounding.
+
+    This occurs and signals subnormal whenever the result of a conversion or
+    operation is subnormal (that is, its adjusted exponent is less than
+    Emin, before any rounding). The result in all cases is unchanged.
+
+    The subnormal signal may be tested (or trapped) to determine if a given
+    or operation (or sequence of operations) yielded a subnormal result.
+    """
+    pass
+
+class Overflow(Inexact, Rounded):
+    """Numerical overflow.
+
+    This occurs and signals overflow if the adjusted exponent of a result
+    (from a conversion or from an operation that is not an attempt to divide
+    by zero), after rounding, would be greater than the largest value that
+    can be handled by the implementation (the value Emax).
+
+    The result depends on the rounding mode:
+
+    For round-half-up and round-half-even (and for round-half-down and
+    round-up, if implemented), the result of the operation is [sign,inf],
+    where sign is the sign of the intermediate result. For round-down, the
+    result is the largest finite number that can be represented in the
+    current precision, with the sign of the intermediate result. For
+    round-ceiling, the result is the same as for round-down if the sign of
+    the intermediate result is 1, or is [0,inf] otherwise. For round-floor,
+    the result is the same as for round-down if the sign of the intermediate
+    result is 0, or is [1,inf] otherwise. In all cases, Inexact and Rounded
+    will also be raised.
+   """
+
+    def handle(self, context, sign, *args):
+        if context.rounding in (ROUND_HALF_UP, ROUND_HALF_EVEN,
+                                     ROUND_HALF_DOWN, ROUND_UP):
+            return Infsign[sign]
+        if sign == 0:
+            if context.rounding == ROUND_CEILING:
+                return Infsign[sign]
+            return Decimal((sign, (9,)*context.prec,
+                            context.Emax-context.prec+1))
+        if sign == 1:
+            if context.rounding == ROUND_FLOOR:
+                return Infsign[sign]
+            return Decimal( (sign, (9,)*context.prec,
+                             context.Emax-context.prec+1))
+
+
+class Underflow(Inexact, Rounded, Subnormal):
+    """Numerical underflow with result rounded to 0.
+
+    This occurs and signals underflow if a result is inexact and the
+    adjusted exponent of the result would be smaller (more negative) than
+    the smallest value that can be handled by the implementation (the value
+    Emin). That is, the result is both inexact and subnormal.
+
+    The result after an underflow will be a subnormal number rounded, if
+    necessary, so that its exponent is not less than Etiny. This may result
+    in 0 with the sign of the intermediate result and an exponent of Etiny.
+
+    In all cases, Inexact, Rounded, and Subnormal will also be raised.
+    """
+
+# List of public traps and flags
+_signals = [Clamped, DivisionByZero, Inexact, Overflow, Rounded,
+           Underflow, InvalidOperation, Subnormal]
+
+# Map conditions (per the spec) to signals
+_condition_map = {ConversionSyntax:InvalidOperation,
+                  DivisionImpossible:InvalidOperation,
+                  DivisionUndefined:InvalidOperation,
+                  InvalidContext:InvalidOperation}
+
+##### Context Functions #######################################
+
+# The getcontext() and setcontext() function manage access to a thread-local
+# current context.  Py2.4 offers direct support for thread locals.  If that
+# is not available, use threading.currentThread() which is slower but will
+# work for older Pythons.  If threads are not part of the build, create a
+# mock threading object with threading.local() returning the module namespace.
+
+try:
+    import threading
+except ImportError:
+    # Python was compiled without threads; create a mock object instead
+    import sys
+    class MockThreading:
+        def local(self, sys=sys):
+            return sys.modules[__name__]
+    threading = MockThreading()
+    del sys, MockThreading
+
+try:
+    threading.local
+
+except AttributeError:
+
+    #To fix reloading, force it to create a new context
+    #Old contexts have different exceptions in their dicts, making problems.
+    if hasattr(threading.currentThread(), '__decimal_context__'):
+        del threading.currentThread().__decimal_context__
+
+    def setcontext(context):
+        """Set this thread's context to context."""
+        if context in (DefaultContext, BasicContext, ExtendedContext):
+            context = context.copy()
+            context.clear_flags()
+        threading.currentThread().__decimal_context__ = context
+
+    def getcontext():
+        """Returns this thread's context.
+
+        If this thread does not yet have a context, returns
+        a new context and sets this thread's context.
+        New contexts are copies of DefaultContext.
+        """
+        try:
+            return threading.currentThread().__decimal_context__
+        except AttributeError:
+            context = Context()
+            threading.currentThread().__decimal_context__ = context
+            return context
+
+else:
+
+    local = threading.local()
+    if hasattr(local, '__decimal_context__'):
+        del local.__decimal_context__
+
+    def getcontext(_local=local):
+        """Returns this thread's context.
+
+        If this thread does not yet have a context, returns
+        a new context and sets this thread's context.
+        New contexts are copies of DefaultContext.
+        """
+        try:
+            return _local.__decimal_context__
+        except AttributeError:
+            context = Context()
+            _local.__decimal_context__ = context
+            return context
+
+    def setcontext(context, _local=local):
+        """Set this thread's context to context."""
+        if context in (DefaultContext, BasicContext, ExtendedContext):
+            context = context.copy()
+            context.clear_flags()
+        _local.__decimal_context__ = context
+
+    del threading, local        # Don't contaminate the namespace
+
+
+##### Decimal class ###########################################
+
+class Decimal(object):
+    """Floating point class for decimal arithmetic."""
+
+    __slots__ = ('_exp','_int','_sign', '_is_special')
+    # Generally, the value of the Decimal instance is given by
+    #  (-1)**_sign * _int * 10**_exp
+    # Special values are signified by _is_special == True
+
+    # We're immutable, so use __new__ not __init__
+    def __new__(cls, value="0", context=None):
+        """Create a decimal point instance.
+
+        >>> Decimal('3.14')              # string input
+        Decimal("3.14")
+        >>> Decimal((0, (3, 1, 4), -2))  # tuple input (sign, digit_tuple, exponent)
+        Decimal("3.14")
+        >>> Decimal(314)                 # int or long
+        Decimal("314")
+        >>> Decimal(Decimal(314))        # another decimal instance
+        Decimal("314")
+        """
+
+        self = object.__new__(cls)
+        self._is_special = False
+
+        # From an internal working value
+        if isinstance(value, _WorkRep):
+            self._sign = value.sign
+            self._int = tuple(map(int, str(value.int)))
+            self._exp = int(value.exp)
+            return self
+
+        # From another decimal
+        if isinstance(value, Decimal):
+            self._exp  = value._exp
+            self._sign = value._sign
+            self._int  = value._int
+            self._is_special  = value._is_special
+            return self
+
+        # From an integer
+        if isinstance(value, (int,long)):
+            if value >= 0:
+                self._sign = 0
+            else:
+                self._sign = 1
+            self._exp = 0
+            self._int = tuple(map(int, str(abs(value))))
+            return self
+
+        # tuple/list conversion (possibly from as_tuple())
+        if isinstance(value, (list,tuple)):
+            if len(value) != 3:
+                raise ValueError, 'Invalid arguments'
+            if value[0] not in [0,1]:
+                raise ValueError, 'Invalid sign'
+            for digit in value[1]:
+                if not isinstance(digit, (int,long)) or digit < 0:
+                    raise ValueError, "The second value in the tuple must be composed of non negative integer elements."
+
+            self._sign = value[0]
+            self._int  = tuple(value[1])
+            if value[2] in ('F','n','N'):
+                self._exp = value[2]
+                self._is_special = True
+            else:
+                self._exp  = int(value[2])
+            return self
+
+        if isinstance(value, float):
+            raise TypeError("Cannot convert float to Decimal.  " +
+                            "First convert the float to a string")
+
+        # Other argument types may require the context during interpretation
+        if context is None:
+            context = getcontext()
+
+        # From a string
+        # REs insist on real strings, so we can too.
+        if isinstance(value, basestring):
+            if _isinfinity(value):
+                self._exp = 'F'
+                self._int = (0,)
+                self._is_special = True
+                if _isinfinity(value) == 1:
+                    self._sign = 0
+                else:
+                    self._sign = 1
+                return self
+            if _isnan(value):
+                sig, sign, diag = _isnan(value)
+                self._is_special = True
+                if len(diag) > context.prec: #Diagnostic info too long
+                    self._sign, self._int, self._exp = \
+                                context._raise_error(ConversionSyntax)
+                    return self
+                if sig == 1:
+                    self._exp = 'n' #qNaN
+                else: #sig == 2
+                    self._exp = 'N' #sNaN
+                self._sign = sign
+                self._int = tuple(map(int, diag)) #Diagnostic info
+                return self
+            try:
+                self._sign, self._int, self._exp = _string2exact(value)
+            except ValueError:
+                self._is_special = True
+                self._sign, self._int, self._exp = context._raise_error(ConversionSyntax)
+            return self
+
+        raise TypeError("Cannot convert %r to Decimal" % value)
+
+    def _isnan(self):
+        """Returns whether the number is not actually one.
+
+        0 if a number
+        1 if NaN
+        2 if sNaN
+        """
+        if self._is_special:
+            exp = self._exp
+            if exp == 'n':
+                return 1
+            elif exp == 'N':
+                return 2
+        return 0
+
+    def _isinfinity(self):
+        """Returns whether the number is infinite
+
+        0 if finite or not a number
+        1 if +INF
+        -1 if -INF
+        """
+        if self._exp == 'F':
+            if self._sign:
+                return -1
+            return 1
+        return 0
+
+    def _check_nans(self, other = None, context=None):
+        """Returns whether the number is not actually one.
+
+        if self, other are sNaN, signal
+        if self, other are NaN return nan
+        return 0
+
+        Done before operations.
+        """
+
+        self_is_nan = self._isnan()
+        if other is None:
+            other_is_nan = False
+        else:
+            other_is_nan = other._isnan()
+
+        if self_is_nan or other_is_nan:
+            if context is None:
+                context = getcontext()
+
+            if self_is_nan == 2:
+                return context._raise_error(InvalidOperation, 'sNaN',
+                                        1, self)
+            if other_is_nan == 2:
+                return context._raise_error(InvalidOperation, 'sNaN',
+                                        1, other)
+            if self_is_nan:
+                return self
+
+            return other
+        return 0
+
+    def __nonzero__(self):
+        """Is the number non-zero?
+
+        0 if self == 0
+        1 if self != 0
+        """
+        if self._is_special:
+            return 1
+        return sum(self._int) != 0
+
+    def __cmp__(self, other, context=None):
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return 1 # Comparison involving NaN's always reports self > other
+
+            # INF = INF
+            return cmp(self._isinfinity(), other._isinfinity())
+
+        if not self and not other:
+            return 0 #If both 0, sign comparison isn't certain.
+
+        #If different signs, neg one is less
+        if other._sign < self._sign:
+            return -1
+        if self._sign < other._sign:
+            return 1
+
+        self_adjusted = self.adjusted()
+        other_adjusted = other.adjusted()
+        if self_adjusted == other_adjusted and \
+           self._int + (0,)*(self._exp - other._exp) == \
+           other._int + (0,)*(other._exp - self._exp):
+            return 0 #equal, except in precision. ([0]*(-x) = [])
+        elif self_adjusted > other_adjusted and self._int[0] != 0:
+            return (-1)**self._sign
+        elif self_adjusted < other_adjusted and other._int[0] != 0:
+            return -((-1)**self._sign)
+
+        # Need to round, so make sure we have a valid context
+        if context is None:
+            context = getcontext()
+
+        context = context._shallow_copy()
+        rounding = context._set_rounding(ROUND_UP) #round away from 0
+
+        flags = context._ignore_all_flags()
+        res = self.__sub__(other, context=context)
+
+        context._regard_flags(*flags)
+
+        context.rounding = rounding
+
+        if not res:
+            return 0
+        elif res._sign:
+            return -1
+        return 1
+
+    def __eq__(self, other):
+        if not isinstance(other, (Decimal, int, long)):
+            return NotImplemented
+        return self.__cmp__(other) == 0
+
+    def __ne__(self, other):
+        if not isinstance(other, (Decimal, int, long)):
+            return NotImplemented
+        return self.__cmp__(other) != 0
+
+    def compare(self, other, context=None):
+        """Compares one to another.
+
+        -1 => a < b
+        0  => a = b
+        1  => a > b
+        NaN => one is NaN
+        Like __cmp__, but returns Decimal instances.
+        """
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        #compare(NaN, NaN) = NaN
+        if (self._is_special or other and other._is_special):
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+
+        return Decimal(self.__cmp__(other, context))
+
+    def __hash__(self):
+        """x.__hash__() <==> hash(x)"""
+        # Decimal integers must hash the same as the ints
+        # Non-integer decimals are normalized and hashed as strings
+        # Normalization assures that hast(100E-1) == hash(10)
+        if self._is_special:
+            if self._isnan():
+                raise TypeError('Cannot hash a NaN value.')
+            return hash(str(self))
+        i = int(self)
+        if self == Decimal(i):
+            return hash(i)
+        assert self.__nonzero__()   # '-0' handled by integer case
+        return hash(str(self.normalize()))
+
+    def as_tuple(self):
+        """Represents the number as a triple tuple.
+
+        To show the internals exactly as they are.
+        """
+        return (self._sign, self._int, self._exp)
+
+    def __repr__(self):
+        """Represents the number as an instance of Decimal."""
+        # Invariant:  eval(repr(d)) == d
+        return 'Decimal("%s")' % str(self)
+
+    def __str__(self, eng = 0, context=None):
+        """Return string representation of the number in scientific notation.
+
+        Captures all of the information in the underlying representation.
+        """
+
+        if self._isnan():
+            minus = '-'*self._sign
+            if self._int == (0,):
+                info = ''
+            else:
+                info = ''.join(map(str, self._int))
+            if self._isnan() == 2:
+                return minus + 'sNaN' + info
+            return minus + 'NaN' + info
+        if self._isinfinity():
+            minus = '-'*self._sign
+            return minus + 'Infinity'
+
+        if context is None:
+            context = getcontext()
+
+        tmp = map(str, self._int)
+        numdigits = len(self._int)
+        leftdigits = self._exp + numdigits
+        if eng and not self: #self = 0eX wants 0[.0[0]]eY, not [[0]0]0eY
+            if self._exp < 0 and self._exp >= -6: #short, no need for e/E
+                s = '-'*self._sign + '0.' + '0'*(abs(self._exp))
+                return s
+            #exp is closest mult. of 3 >= self._exp
+            exp = ((self._exp - 1)// 3 + 1) * 3
+            if exp != self._exp:
+                s = '0.'+'0'*(exp - self._exp)
+            else:
+                s = '0'
+            if exp != 0:
+                if context.capitals:
+                    s += 'E'
+                else:
+                    s += 'e'
+                if exp > 0:
+                    s += '+' #0.0e+3, not 0.0e3
+                s += str(exp)
+            s = '-'*self._sign + s
+            return s
+        if eng:
+            dotplace = (leftdigits-1)%3+1
+            adjexp = leftdigits -1 - (leftdigits-1)%3
+        else:
+            adjexp = leftdigits-1
+            dotplace = 1
+        if self._exp == 0:
+            pass
+        elif self._exp < 0 and adjexp >= 0:
+            tmp.insert(leftdigits, '.')
+        elif self._exp < 0 and adjexp >= -6:
+            tmp[0:0] = ['0'] * int(-leftdigits)
+            tmp.insert(0, '0.')
+        else:
+            if numdigits > dotplace:
+                tmp.insert(dotplace, '.')
+            elif numdigits < dotplace:
+                tmp.extend(['0']*(dotplace-numdigits))
+            if adjexp:
+                if not context.capitals:
+                    tmp.append('e')
+                else:
+                    tmp.append('E')
+                    if adjexp > 0:
+                        tmp.append('+')
+                tmp.append(str(adjexp))
+        if eng:
+            while tmp[0:1] == ['0']:
+                tmp[0:1] = []
+            if len(tmp) == 0 or tmp[0] == '.' or tmp[0].lower() == 'e':
+                tmp[0:0] = ['0']
+        if self._sign:
+            tmp.insert(0, '-')
+
+        return ''.join(tmp)
+
+    def to_eng_string(self, context=None):
+        """Convert to engineering-type string.
+
+        Engineering notation has an exponent which is a multiple of 3, so there
+        are up to 3 digits left of the decimal place.
+
+        Same rules for when in exponential and when as a value as in __str__.
+        """
+        return self.__str__(eng=1, context=context)
+
+    def __neg__(self, context=None):
+        """Returns a copy with the sign switched.
+
+        Rounds, if it has reason.
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        if not self:
+            # -Decimal('0') is Decimal('0'), not Decimal('-0')
+            sign = 0
+        elif self._sign:
+            sign = 0
+        else:
+            sign = 1
+
+        if context is None:
+            context = getcontext()
+        if context._rounding_decision == ALWAYS_ROUND:
+            return Decimal((sign, self._int, self._exp))._fix(context)
+        return Decimal( (sign, self._int, self._exp))
+
+    def __pos__(self, context=None):
+        """Returns a copy, unless it is a sNaN.
+
+        Rounds the number (if more then precision digits)
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        sign = self._sign
+        if not self:
+            # + (-0) = 0
+            sign = 0
+
+        if context is None:
+            context = getcontext()
+
+        if context._rounding_decision == ALWAYS_ROUND:
+            ans = self._fix(context)
+        else:
+            ans = Decimal(self)
+        ans._sign = sign
+        return ans
+
+    def __abs__(self, round=1, context=None):
+        """Returns the absolute value of self.
+
+        If the second argument is 0, do not round.
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        if not round:
+            if context is None:
+                context = getcontext()
+            context = context._shallow_copy()
+            context._set_rounding_decision(NEVER_ROUND)
+
+        if self._sign:
+            ans = self.__neg__(context=context)
+        else:
+            ans = self.__pos__(context=context)
+
+        return ans
+
+    def __add__(self, other, context=None):
+        """Returns self + other.
+
+        -INF + INF (or the reverse) cause InvalidOperation errors.
+        """
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        if context is None:
+            context = getcontext()
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+
+            if self._isinfinity():
+                #If both INF, same sign => same as both, opposite => error.
+                if self._sign != other._sign and other._isinfinity():
+                    return context._raise_error(InvalidOperation, '-INF + INF')
+                return Decimal(self)
+            if other._isinfinity():
+                return Decimal(other)  #Can't both be infinity here
+
+        shouldround = context._rounding_decision == ALWAYS_ROUND
+
+        exp = min(self._exp, other._exp)
+        negativezero = 0
+        if context.rounding == ROUND_FLOOR and self._sign != other._sign:
+            #If the answer is 0, the sign should be negative, in this case.
+            negativezero = 1
+
+        if not self and not other:
+            sign = min(self._sign, other._sign)
+            if negativezero:
+                sign = 1
+            return Decimal( (sign, (0,), exp))
+        if not self:
+            exp = max(exp, other._exp - context.prec-1)
+            ans = other._rescale(exp, watchexp=0, context=context)
+            if shouldround:
+                ans = ans._fix(context)
+            return ans
+        if not other:
+            exp = max(exp, self._exp - context.prec-1)
+            ans = self._rescale(exp, watchexp=0, context=context)
+            if shouldround:
+                ans = ans._fix(context)
+            return ans
+
+        op1 = _WorkRep(self)
+        op2 = _WorkRep(other)
+        op1, op2 = _normalize(op1, op2, shouldround, context.prec)
+
+        result = _WorkRep()
+        if op1.sign != op2.sign:
+            # Equal and opposite
+            if op1.int == op2.int:
+                if exp < context.Etiny():
+                    exp = context.Etiny()
+                    context._raise_error(Clamped)
+                return Decimal((negativezero, (0,), exp))
+            if op1.int < op2.int:
+                op1, op2 = op2, op1
+                #OK, now abs(op1) > abs(op2)
+            if op1.sign == 1:
+                result.sign = 1
+                op1.sign, op2.sign = op2.sign, op1.sign
+            else:
+                result.sign = 0
+                #So we know the sign, and op1 > 0.
+        elif op1.sign == 1:
+            result.sign = 1
+            op1.sign, op2.sign = (0, 0)
+        else:
+            result.sign = 0
+        #Now, op1 > abs(op2) > 0
+
+        if op2.sign == 0:
+            result.int = op1.int + op2.int
+        else:
+            result.int = op1.int - op2.int
+
+        result.exp = op1.exp
+        ans = Decimal(result)
+        if shouldround:
+            ans = ans._fix(context)
+        return ans
+
+    __radd__ = __add__
+
+    def __sub__(self, other, context=None):
+        """Return self + (-other)"""
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context=context)
+            if ans:
+                return ans
+
+        # -Decimal(0) = Decimal(0), which we don't want since
+        # (-0 - 0 = -0 + (-0) = -0, but -0 + 0 = 0.)
+        # so we change the sign directly to a copy
+        tmp = Decimal(other)
+        tmp._sign = 1-tmp._sign
+
+        return self.__add__(tmp, context=context)
+
+    def __rsub__(self, other, context=None):
+        """Return other + (-self)"""
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        tmp = Decimal(self)
+        tmp._sign = 1 - tmp._sign
+        return other.__add__(tmp, context=context)
+
+    def _increment(self, round=1, context=None):
+        """Special case of add, adding 1eExponent
+
+        Since it is common, (rounding, for example) this adds
+        (sign)*one E self._exp to the number more efficiently than add.
+
+        For example:
+        Decimal('5.624e10')._increment() == Decimal('5.625e10')
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+            return Decimal(self) # Must be infinite, and incrementing makes no difference
+
+        L = list(self._int)
+        L[-1] += 1
+        spot = len(L)-1
+        while L[spot] == 10:
+            L[spot] = 0
+            if spot == 0:
+                L[0:0] = [1]
+                break
+            L[spot-1] += 1
+            spot -= 1
+        ans = Decimal((self._sign, L, self._exp))
+
+        if context is None:
+            context = getcontext()
+        if round and context._rounding_decision == ALWAYS_ROUND:
+            ans = ans._fix(context)
+        return ans
+
+    def __mul__(self, other, context=None):
+        """Return self * other.
+
+        (+-) INF * 0 (or its reverse) raise InvalidOperation.
+        """
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        if context is None:
+            context = getcontext()
+
+        resultsign = self._sign ^ other._sign
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+
+            if self._isinfinity():
+                if not other:
+                    return context._raise_error(InvalidOperation, '(+-)INF * 0')
+                return Infsign[resultsign]
+
+            if other._isinfinity():
+                if not self:
+                    return context._raise_error(InvalidOperation, '0 * (+-)INF')
+                return Infsign[resultsign]
+
+        resultexp = self._exp + other._exp
+        shouldround = context._rounding_decision == ALWAYS_ROUND
+
+        # Special case for multiplying by zero
+        if not self or not other:
+            ans = Decimal((resultsign, (0,), resultexp))
+            if shouldround:
+                #Fixing in case the exponent is out of bounds
+                ans = ans._fix(context)
+            return ans
+
+        # Special case for multiplying by power of 10
+        if self._int == (1,):
+            ans = Decimal((resultsign, other._int, resultexp))
+            if shouldround:
+                ans = ans._fix(context)
+            return ans
+        if other._int == (1,):
+            ans = Decimal((resultsign, self._int, resultexp))
+            if shouldround:
+                ans = ans._fix(context)
+            return ans
+
+        op1 = _WorkRep(self)
+        op2 = _WorkRep(other)
+
+        ans = Decimal( (resultsign, map(int, str(op1.int * op2.int)), resultexp))
+        if shouldround:
+            ans = ans._fix(context)
+
+        return ans
+    __rmul__ = __mul__
+
+    def __div__(self, other, context=None):
+        """Return self / other."""
+        return self._divide(other, context=context)
+    __truediv__ = __div__
+
+    def _divide(self, other, divmod = 0, context=None):
+        """Return a / b, to context.prec precision.
+
+        divmod:
+        0 => true division
+        1 => (a //b, a%b)
+        2 => a //b
+        3 => a%b
+
+        Actually, if divmod is 2 or 3 a tuple is returned, but errors for
+        computing the other value are not raised.
+        """
+        other = _convert_other(other)
+        if other is NotImplemented:
+            if divmod in (0, 1):
+                return NotImplemented
+            return (NotImplemented, NotImplemented)
+
+        if context is None:
+            context = getcontext()
+
+        sign = self._sign ^ other._sign
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                if divmod:
+                    return (ans, ans)
+                return ans
+
+            if self._isinfinity() and other._isinfinity():
+                if divmod:
+                    return (context._raise_error(InvalidOperation,
+                                            '(+-)INF // (+-)INF'),
+                            context._raise_error(InvalidOperation,
+                                            '(+-)INF % (+-)INF'))
+                return context._raise_error(InvalidOperation, '(+-)INF/(+-)INF')
+
+            if self._isinfinity():
+                if divmod == 1:
+                    return (Infsign[sign],
+                            context._raise_error(InvalidOperation, 'INF % x'))
+                elif divmod == 2:
+                    return (Infsign[sign], NaN)
+                elif divmod == 3:
+                    return (Infsign[sign],
+                            context._raise_error(InvalidOperation, 'INF % x'))
+                return Infsign[sign]
+
+            if other._isinfinity():
+                if divmod:
+                    return (Decimal((sign, (0,), 0)), Decimal(self))
+                context._raise_error(Clamped, 'Division by infinity')
+                return Decimal((sign, (0,), context.Etiny()))
+
+        # Special cases for zeroes
+        if not self and not other:
+            if divmod:
+                return context._raise_error(DivisionUndefined, '0 / 0', 1)
+            return context._raise_error(DivisionUndefined, '0 / 0')
+
+        if not self:
+            if divmod:
+                otherside = Decimal(self)
+                otherside._exp = min(self._exp, other._exp)
+                return (Decimal((sign, (0,), 0)),  otherside)
+            exp = self._exp - other._exp
+            if exp < context.Etiny():
+                exp = context.Etiny()
+                context._raise_error(Clamped, '0e-x / y')
+            if exp > context.Emax:
+                exp = context.Emax
+                context._raise_error(Clamped, '0e+x / y')
+            return Decimal( (sign, (0,), exp) )
+
+        if not other:
+            if divmod:
+                return context._raise_error(DivisionByZero, 'divmod(x,0)',
+                                           sign, 1)
+            return context._raise_error(DivisionByZero, 'x / 0', sign)
+
+        #OK, so neither = 0, INF or NaN
+
+        shouldround = context._rounding_decision == ALWAYS_ROUND
+
+        #If we're dividing into ints, and self < other, stop.
+        #self.__abs__(0) does not round.
+        if divmod and (self.__abs__(0, context) < other.__abs__(0, context)):
+
+            if divmod == 1 or divmod == 3:
+                exp = min(self._exp, other._exp)
+                ans2 = self._rescale(exp, context=context, watchexp=0)
+                if shouldround:
+                    ans2 = ans2._fix(context)
+                return (Decimal( (sign, (0,), 0) ),
+                        ans2)
+
+            elif divmod == 2:
+                #Don't round the mod part, if we don't need it.
+                return (Decimal( (sign, (0,), 0) ), Decimal(self))
+
+        op1 = _WorkRep(self)
+        op2 = _WorkRep(other)
+        op1, op2, adjust = _adjust_coefficients(op1, op2)
+        res = _WorkRep( (sign, 0, (op1.exp - op2.exp)) )
+        if divmod and res.exp > context.prec + 1:
+            return context._raise_error(DivisionImpossible)
+
+        prec_limit = 10 ** context.prec
+        while 1:
+            while op2.int <= op1.int:
+                res.int += 1
+                op1.int -= op2.int
+            if res.exp == 0 and divmod:
+                if res.int >= prec_limit and shouldround:
+                    return context._raise_error(DivisionImpossible)
+                otherside = Decimal(op1)
+                frozen = context._ignore_all_flags()
+
+                exp = min(self._exp, other._exp)
+                otherside = otherside._rescale(exp, context=context, watchexp=0)
+                context._regard_flags(*frozen)
+                if shouldround:
+                    otherside = otherside._fix(context)
+                return (Decimal(res), otherside)
+
+            if op1.int == 0 and adjust >= 0 and not divmod:
+                break
+            if res.int >= prec_limit and shouldround:
+                if divmod:
+                    return context._raise_error(DivisionImpossible)
+                shouldround=1
+                # Really, the answer is a bit higher, so adding a one to
+                # the end will make sure the rounding is right.
+                if op1.int != 0:
+                    res.int *= 10
+                    res.int += 1
+                    res.exp -= 1
+
+                break
+            res.int *= 10
+            res.exp -= 1
+            adjust += 1
+            op1.int *= 10
+            op1.exp -= 1
+
+            if res.exp == 0 and divmod and op2.int > op1.int:
+                #Solves an error in precision.  Same as a previous block.
+
+                if res.int >= prec_limit and shouldround:
+                    return context._raise_error(DivisionImpossible)
+                otherside = Decimal(op1)
+                frozen = context._ignore_all_flags()
+
+                exp = min(self._exp, other._exp)
+                otherside = otherside._rescale(exp, context=context)
+
+                context._regard_flags(*frozen)
+
+                return (Decimal(res), otherside)
+
+        ans = Decimal(res)
+        if shouldround:
+            ans = ans._fix(context)
+        return ans
+
+    def __rdiv__(self, other, context=None):
+        """Swaps self/other and returns __div__."""
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+        return other.__div__(self, context=context)
+    __rtruediv__ = __rdiv__
+
+    def __divmod__(self, other, context=None):
+        """
+        (self // other, self % other)
+        """
+        return self._divide(other, 1, context)
+
+    def __rdivmod__(self, other, context=None):
+        """Swaps self/other and returns __divmod__."""
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+        return other.__divmod__(self, context=context)
+
+    def __mod__(self, other, context=None):
+        """
+        self % other
+        """
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+
+        if self and not other:
+            return context._raise_error(InvalidOperation, 'x % 0')
+
+        return self._divide(other, 3, context)[1]
+
+    def __rmod__(self, other, context=None):
+        """Swaps self/other and returns __mod__."""
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+        return other.__mod__(self, context=context)
+
+    def remainder_near(self, other, context=None):
+        """
+        Remainder nearest to 0-  abs(remainder-near) <= other/2
+        """
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+        if self and not other:
+            return context._raise_error(InvalidOperation, 'x % 0')
+
+        if context is None:
+            context = getcontext()
+        # If DivisionImpossible causes an error, do not leave Rounded/Inexact
+        # ignored in the calling function.
+        context = context._shallow_copy()
+        flags = context._ignore_flags(Rounded, Inexact)
+        #keep DivisionImpossible flags
+        (side, r) = self.__divmod__(other, context=context)
+
+        if r._isnan():
+            context._regard_flags(*flags)
+            return r
+
+        context = context._shallow_copy()
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+
+        if other._sign:
+            comparison = other.__div__(Decimal(-2), context=context)
+        else:
+            comparison = other.__div__(Decimal(2), context=context)
+
+        context._set_rounding_decision(rounding)
+        context._regard_flags(*flags)
+
+        s1, s2 = r._sign, comparison._sign
+        r._sign, comparison._sign = 0, 0
+
+        if r < comparison:
+            r._sign, comparison._sign = s1, s2
+            #Get flags now
+            self.__divmod__(other, context=context)
+            return r._fix(context)
+        r._sign, comparison._sign = s1, s2
+
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+
+        (side, r) = self.__divmod__(other, context=context)
+        context._set_rounding_decision(rounding)
+        if r._isnan():
+            return r
+
+        decrease = not side._iseven()
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+        side = side.__abs__(context=context)
+        context._set_rounding_decision(rounding)
+
+        s1, s2 = r._sign, comparison._sign
+        r._sign, comparison._sign = 0, 0
+        if r > comparison or decrease and r == comparison:
+            r._sign, comparison._sign = s1, s2
+            context.prec += 1
+            if len(side.__add__(Decimal(1), context=context)._int) >= context.prec:
+                context.prec -= 1
+                return context._raise_error(DivisionImpossible)[1]
+            context.prec -= 1
+            if self._sign == other._sign:
+                r = r.__sub__(other, context=context)
+            else:
+                r = r.__add__(other, context=context)
+        else:
+            r._sign, comparison._sign = s1, s2
+
+        return r._fix(context)
+
+    def __floordiv__(self, other, context=None):
+        """self // other"""
+        return self._divide(other, 2, context)[0]
+
+    def __rfloordiv__(self, other, context=None):
+        """Swaps self/other and returns __floordiv__."""
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+        return other.__floordiv__(self, context=context)
+
+    def __float__(self):
+        """Float representation."""
+        return float(str(self))
+
+    def __int__(self):
+        """Converts self to a int, truncating if necessary."""
+        if self._is_special:
+            if self._isnan():
+                context = getcontext()
+                return context._raise_error(InvalidContext)
+            elif self._isinfinity():
+                raise OverflowError, "Cannot convert infinity to long"
+        if self._exp >= 0:
+            s = ''.join(map(str, self._int)) + '0'*self._exp
+        else:
+            s = ''.join(map(str, self._int))[:self._exp]
+        if s == '':
+            s = '0'
+        sign = '-'*self._sign
+        return int(sign + s)
+
+    def __long__(self):
+        """Converts to a long.
+
+        Equivalent to long(int(self))
+        """
+        return long(self.__int__())
+
+    def _fix(self, context):
+        """Round if it is necessary to keep self within prec precision.
+
+        Rounds and fixes the exponent.  Does not raise on a sNaN.
+
+        Arguments:
+        self - Decimal instance
+        context - context used.
+        """
+        if self._is_special:
+            return self
+        if context is None:
+            context = getcontext()
+        prec = context.prec
+        ans = self._fixexponents(context)
+        if len(ans._int) > prec:
+            ans = ans._round(prec, context=context)
+            ans = ans._fixexponents(context)
+        return ans
+
+    def _fixexponents(self, context):
+        """Fix the exponents and return a copy with the exponent in bounds.
+        Only call if known to not be a special value.
+        """
+        folddown = context._clamp
+        Emin = context.Emin
+        ans = self
+        ans_adjusted = ans.adjusted()
+        if ans_adjusted < Emin:
+            Etiny = context.Etiny()
+            if ans._exp < Etiny:
+                if not ans:
+                    ans = Decimal(self)
+                    ans._exp = Etiny
+                    context._raise_error(Clamped)
+                    return ans
+                ans = ans._rescale(Etiny, context=context)
+                #It isn't zero, and exp < Emin => subnormal
+                context._raise_error(Subnormal)
+                if context.flags[Inexact]:
+                    context._raise_error(Underflow)
+            else:
+                if ans:
+                    #Only raise subnormal if non-zero.
+                    context._raise_error(Subnormal)
+        else:
+            Etop = context.Etop()
+            if folddown and ans._exp > Etop:
+                context._raise_error(Clamped)
+                ans = ans._rescale(Etop, context=context)
+            else:
+                Emax = context.Emax
+                if ans_adjusted > Emax:
+                    if not ans:
+                        ans = Decimal(self)
+                        ans._exp = Emax
+                        context._raise_error(Clamped)
+                        return ans
+                    context._raise_error(Inexact)
+                    context._raise_error(Rounded)
+                    return context._raise_error(Overflow, 'above Emax', ans._sign)
+        return ans
+
+    def _round(self, prec=None, rounding=None, context=None):
+        """Returns a rounded version of self.
+
+        You can specify the precision or rounding method.  Otherwise, the
+        context determines it.
+        """
+
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+            if self._isinfinity():
+                return Decimal(self)
+
+        if context is None:
+            context = getcontext()
+
+        if rounding is None:
+            rounding = context.rounding
+        if prec is None:
+            prec = context.prec
+
+        if not self:
+            if prec <= 0:
+                dig = (0,)
+                exp = len(self._int) - prec + self._exp
+            else:
+                dig = (0,) * prec
+                exp = len(self._int) + self._exp - prec
+            ans = Decimal((self._sign, dig, exp))
+            context._raise_error(Rounded)
+            return ans
+
+        if prec == 0:
+            temp = Decimal(self)
+            temp._int = (0,)+temp._int
+            prec = 1
+        elif prec < 0:
+            exp = self._exp + len(self._int) - prec - 1
+            temp = Decimal( (self._sign, (0, 1), exp))
+            prec = 1
+        else:
+            temp = Decimal(self)
+
+        numdigits = len(temp._int)
+        if prec == numdigits:
+            return temp
+
+        # See if we need to extend precision
+        expdiff = prec - numdigits
+        if expdiff > 0:
+            tmp = list(temp._int)
+            tmp.extend([0] * expdiff)
+            ans =  Decimal( (temp._sign, tmp, temp._exp - expdiff))
+            return ans
+
+        #OK, but maybe all the lost digits are 0.
+        lostdigits = self._int[expdiff:]
+        if lostdigits == (0,) * len(lostdigits):
+            ans = Decimal( (temp._sign, temp._int[:prec], temp._exp - expdiff))
+            #Rounded, but not Inexact
+            context._raise_error(Rounded)
+            return ans
+
+        # Okay, let's round and lose data
+
+        this_function = getattr(temp, self._pick_rounding_function[rounding])
+        #Now we've got the rounding function
+
+        if prec != context.prec:
+            context = context._shallow_copy()
+            context.prec = prec
+        ans = this_function(prec, expdiff, context)
+        context._raise_error(Rounded)
+        context._raise_error(Inexact, 'Changed in rounding')
+
+        return ans
+
+    _pick_rounding_function = {}
+
+    def _round_down(self, prec, expdiff, context):
+        """Also known as round-towards-0, truncate."""
+        return Decimal( (self._sign, self._int[:prec], self._exp - expdiff) )
+
+    def _round_half_up(self, prec, expdiff, context, tmp = None):
+        """Rounds 5 up (away from 0)"""
+
+        if tmp is None:
+            tmp = Decimal( (self._sign,self._int[:prec], self._exp - expdiff))
+        if self._int[prec] >= 5:
+            tmp = tmp._increment(round=0, context=context)
+            if len(tmp._int) > prec:
+                return Decimal( (tmp._sign, tmp._int[:-1], tmp._exp + 1))
+        return tmp
+
+    def _round_half_even(self, prec, expdiff, context):
+        """Round 5 to even, rest to nearest."""
+
+        tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff))
+        half = (self._int[prec] == 5)
+        if half:
+            for digit in self._int[prec+1:]:
+                if digit != 0:
+                    half = 0
+                    break
+        if half:
+            if self._int[prec-1] & 1 == 0:
+                return tmp
+        return self._round_half_up(prec, expdiff, context, tmp)
+
+    def _round_half_down(self, prec, expdiff, context):
+        """Round 5 down"""
+
+        tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff))
+        half = (self._int[prec] == 5)
+        if half:
+            for digit in self._int[prec+1:]:
+                if digit != 0:
+                    half = 0
+                    break
+        if half:
+            return tmp
+        return self._round_half_up(prec, expdiff, context, tmp)
+
+    def _round_up(self, prec, expdiff, context):
+        """Rounds away from 0."""
+        tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff) )
+        for digit in self._int[prec:]:
+            if digit != 0:
+                tmp = tmp._increment(round=1, context=context)
+                if len(tmp._int) > prec:
+                    return Decimal( (tmp._sign, tmp._int[:-1], tmp._exp + 1))
+                else:
+                    return tmp
+        return tmp
+
+    def _round_ceiling(self, prec, expdiff, context):
+        """Rounds up (not away from 0 if negative.)"""
+        if self._sign:
+            return self._round_down(prec, expdiff, context)
+        else:
+            return self._round_up(prec, expdiff, context)
+
+    def _round_floor(self, prec, expdiff, context):
+        """Rounds down (not towards 0 if negative)"""
+        if not self._sign:
+            return self._round_down(prec, expdiff, context)
+        else:
+            return self._round_up(prec, expdiff, context)
+
+    def __pow__(self, n, modulo = None, context=None):
+        """Return self ** n (mod modulo)
+
+        If modulo is None (default), don't take it mod modulo.
+        """
+        n = _convert_other(n)
+        if n is NotImplemented:
+            return n
+
+        if context is None:
+            context = getcontext()
+
+        if self._is_special or n._is_special or n.adjusted() > 8:
+            #Because the spot << doesn't work with really big exponents
+            if n._isinfinity() or n.adjusted() > 8:
+                return context._raise_error(InvalidOperation, 'x ** INF')
+
+            ans = self._check_nans(n, context)
+            if ans:
+                return ans
+
+        if not n._isinteger():
+            return context._raise_error(InvalidOperation, 'x ** (non-integer)')
+
+        if not self and not n:
+            return context._raise_error(InvalidOperation, '0 ** 0')
+
+        if not n:
+            return Decimal(1)
+
+        if self == Decimal(1):
+            return Decimal(1)
+
+        sign = self._sign and not n._iseven()
+        n = int(n)
+
+        if self._isinfinity():
+            if modulo:
+                return context._raise_error(InvalidOperation, 'INF % x')
+            if n > 0:
+                return Infsign[sign]
+            return Decimal( (sign, (0,), 0) )
+
+        #with ludicrously large exponent, just raise an overflow and return inf.
+        if not modulo and n > 0 and (self._exp + len(self._int) - 1) * n > context.Emax \
+           and self:
+
+            tmp = Decimal('inf')
+            tmp._sign = sign
+            context._raise_error(Rounded)
+            context._raise_error(Inexact)
+            context._raise_error(Overflow, 'Big power', sign)
+            return tmp
+
+        elength = len(str(abs(n)))
+        firstprec = context.prec
+
+        if not modulo and firstprec + elength + 1 > DefaultContext.Emax:
+            return context._raise_error(Overflow, 'Too much precision.', sign)
+
+        mul = Decimal(self)
+        val = Decimal(1)
+        context = context._shallow_copy()
+        context.prec = firstprec + elength + 1
+        if n < 0:
+            #n is a long now, not Decimal instance
+            n = -n
+            mul = Decimal(1).__div__(mul, context=context)
+
+        spot = 1
+        while spot <= n:
+            spot <<= 1
+
+        spot >>= 1
+        #Spot is the highest power of 2 less than n
+        while spot:
+            val = val.__mul__(val, context=context)
+            if val._isinfinity():
+                val = Infsign[sign]
+                break
+            if spot & n:
+                val = val.__mul__(mul, context=context)
+            if modulo is not None:
+                val = val.__mod__(modulo, context=context)
+            spot >>= 1
+        context.prec = firstprec
+
+        if context._rounding_decision == ALWAYS_ROUND:
+            return val._fix(context)
+        return val
+
+    def __rpow__(self, other, context=None):
+        """Swaps self/other and returns __pow__."""
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+        return other.__pow__(self, context=context)
+
+    def normalize(self, context=None):
+        """Normalize- strip trailing 0s, change anything equal to 0 to 0e0"""
+
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        dup = self._fix(context)
+        if dup._isinfinity():
+            return dup
+
+        if not dup:
+            return Decimal( (dup._sign, (0,), 0) )
+        end = len(dup._int)
+        exp = dup._exp
+        while dup._int[end-1] == 0:
+            exp += 1
+            end -= 1
+        return Decimal( (dup._sign, dup._int[:end], exp) )
+
+
+    def quantize(self, exp, rounding=None, context=None, watchexp=1):
+        """Quantize self so its exponent is the same as that of exp.
+
+        Similar to self._rescale(exp._exp) but with error checking.
+        """
+        if self._is_special or exp._is_special:
+            ans = self._check_nans(exp, context)
+            if ans:
+                return ans
+
+            if exp._isinfinity() or self._isinfinity():
+                if exp._isinfinity() and self._isinfinity():
+                    return self  #if both are inf, it is OK
+                if context is None:
+                    context = getcontext()
+                return context._raise_error(InvalidOperation,
+                                        'quantize with one INF')
+        return self._rescale(exp._exp, rounding, context, watchexp)
+
+    def same_quantum(self, other):
+        """Test whether self and other have the same exponent.
+
+        same as self._exp == other._exp, except NaN == sNaN
+        """
+        if self._is_special or other._is_special:
+            if self._isnan() or other._isnan():
+                return self._isnan() and other._isnan() and True
+            if self._isinfinity() or other._isinfinity():
+                return self._isinfinity() and other._isinfinity() and True
+        return self._exp == other._exp
+
+    def _rescale(self, exp, rounding=None, context=None, watchexp=1):
+        """Rescales so that the exponent is exp.
+
+        exp = exp to scale to (an integer)
+        rounding = rounding version
+        watchexp: if set (default) an error is returned if exp is greater
+        than Emax or less than Etiny.
+        """
+        if context is None:
+            context = getcontext()
+
+        if self._is_special:
+            if self._isinfinity():
+                return context._raise_error(InvalidOperation, 'rescale with an INF')
+
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        if watchexp and (context.Emax  < exp or context.Etiny() > exp):
+            return context._raise_error(InvalidOperation, 'rescale(a, INF)')
+
+        if not self:
+            ans = Decimal(self)
+            ans._int = (0,)
+            ans._exp = exp
+            return ans
+
+        diff = self._exp - exp
+        digits = len(self._int) + diff
+
+        if watchexp and digits > context.prec:
+            return context._raise_error(InvalidOperation, 'Rescale > prec')
+
+        tmp = Decimal(self)
+        tmp._int = (0,) + tmp._int
+        digits += 1
+
+        if digits < 0:
+            tmp._exp = -digits + tmp._exp
+            tmp._int = (0,1)
+            digits = 1
+        tmp = tmp._round(digits, rounding, context=context)
+
+        if tmp._int[0] == 0 and len(tmp._int) > 1:
+            tmp._int = tmp._int[1:]
+        tmp._exp = exp
+
+        tmp_adjusted = tmp.adjusted()
+        if tmp and tmp_adjusted < context.Emin:
+            context._raise_error(Subnormal)
+        elif tmp and tmp_adjusted > context.Emax:
+            return context._raise_error(InvalidOperation, 'rescale(a, INF)')
+        return tmp
+
+    def to_integral(self, rounding=None, context=None):
+        """Rounds to the nearest integer, without raising inexact, rounded."""
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+        if self._exp >= 0:
+            return self
+        if context is None:
+            context = getcontext()
+        flags = context._ignore_flags(Rounded, Inexact)
+        ans = self._rescale(0, rounding, context=context)
+        context._regard_flags(flags)
+        return ans
+
+    def sqrt(self, context=None):
+        """Return the square root of self.
+
+        Uses a converging algorithm (Xn+1 = 0.5*(Xn + self / Xn))
+        Should quadratically approach the right answer.
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+            if self._isinfinity() and self._sign == 0:
+                return Decimal(self)
+
+        if not self:
+            #exponent = self._exp / 2, using round_down.
+            #if self._exp < 0:
+            #    exp = (self._exp+1) // 2
+            #else:
+            exp = (self._exp) // 2
+            if self._sign == 1:
+                #sqrt(-0) = -0
+                return Decimal( (1, (0,), exp))
+            else:
+                return Decimal( (0, (0,), exp))
+
+        if context is None:
+            context = getcontext()
+
+        if self._sign == 1:
+            return context._raise_error(InvalidOperation, 'sqrt(-x), x > 0')
+
+        tmp = Decimal(self)
+
+        expadd = tmp._exp // 2
+        if tmp._exp & 1:
+            tmp._int += (0,)
+            tmp._exp = 0
+        else:
+            tmp._exp = 0
+
+        context = context._shallow_copy()
+        flags = context._ignore_all_flags()
+        firstprec = context.prec
+        context.prec = 3
+        if tmp.adjusted() & 1 == 0:
+            ans = Decimal( (0, (8,1,9), tmp.adjusted()  - 2) )
+            ans = ans.__add__(tmp.__mul__(Decimal((0, (2,5,9), -2)),
+                                          context=context), context=context)
+            ans._exp -= 1 + tmp.adjusted() // 2
+        else:
+            ans = Decimal( (0, (2,5,9), tmp._exp + len(tmp._int)- 3) )
+            ans = ans.__add__(tmp.__mul__(Decimal((0, (8,1,9), -3)),
+                                          context=context), context=context)
+            ans._exp -= 1 + tmp.adjusted()  // 2
+
+        #ans is now a linear approximation.
+
+        Emax, Emin = context.Emax, context.Emin
+        context.Emax, context.Emin = DefaultContext.Emax, DefaultContext.Emin
+
+        half = Decimal('0.5')
+
+        maxp = firstprec + 2
+        rounding = context._set_rounding(ROUND_HALF_EVEN)
+        while 1:
+            context.prec = min(2*context.prec - 2, maxp)
+            ans = half.__mul__(ans.__add__(tmp.__div__(ans, context=context),
+                                           context=context), context=context)
+            if context.prec == maxp:
+                break
+
+        #round to the answer's precision-- the only error can be 1 ulp.
+        context.prec = firstprec
+        prevexp = ans.adjusted()
+        ans = ans._round(context=context)
+
+        #Now, check if the other last digits are better.
+        context.prec = firstprec + 1
+        # In case we rounded up another digit and we should actually go lower.
+        if prevexp != ans.adjusted():
+            ans._int += (0,)
+            ans._exp -= 1
+
+
+        lower = ans.__sub__(Decimal((0, (5,), ans._exp-1)), context=context)
+        context._set_rounding(ROUND_UP)
+        if lower.__mul__(lower, context=context) > (tmp):
+            ans = ans.__sub__(Decimal((0, (1,), ans._exp)), context=context)
+
+        else:
+            upper = ans.__add__(Decimal((0, (5,), ans._exp-1)),context=context)
+            context._set_rounding(ROUND_DOWN)
+            if upper.__mul__(upper, context=context) < tmp:
+                ans = ans.__add__(Decimal((0, (1,), ans._exp)),context=context)
+
+        ans._exp += expadd
+
+        context.prec = firstprec
+        context.rounding = rounding
+        ans = ans._fix(context)
+
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+        if not ans.__mul__(ans, context=context) == self:
+            # Only rounded/inexact if here.
+            context._regard_flags(flags)
+            context._raise_error(Rounded)
+            context._raise_error(Inexact)
+        else:
+            #Exact answer, so let's set the exponent right.
+            #if self._exp < 0:
+            #    exp = (self._exp +1)// 2
+            #else:
+            exp = self._exp // 2
+            context.prec += ans._exp - exp
+            ans = ans._rescale(exp, context=context)
+            context.prec = firstprec
+            context._regard_flags(flags)
+        context.Emax, context.Emin = Emax, Emin
+
+        return ans._fix(context)
+
+    def max(self, other, context=None):
+        """Returns the larger value.
+
+        like max(self, other) except if one is not a number, returns
+        NaN (and signals if one is sNaN).  Also rounds.
+        """
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        if self._is_special or other._is_special:
+            # if one operand is a quiet NaN and the other is number, then the
+            # number is always returned
+            sn = self._isnan()
+            on = other._isnan()
+            if sn or on:
+                if on == 1 and sn != 2:
+                    return self
+                if sn == 1 and on != 2:
+                    return other
+                return self._check_nans(other, context)
+
+        ans = self
+        c = self.__cmp__(other)
+        if c == 0:
+            # if both operands are finite and equal in numerical value
+            # then an ordering is applied:
+            #
+            # if the signs differ then max returns the operand with the
+            # positive sign and min returns the operand with the negative sign
+            #
+            # if the signs are the same then the exponent is used to select
+            # the result.
+            if self._sign != other._sign:
+                if self._sign:
+                    ans = other
+            elif self._exp < other._exp and not self._sign:
+                ans = other
+            elif self._exp > other._exp and self._sign:
+                ans = other
+        elif c == -1:
+            ans = other
+
+        if context is None:
+            context = getcontext()
+        if context._rounding_decision == ALWAYS_ROUND:
+            return ans._fix(context)
+        return ans
+
+    def min(self, other, context=None):
+        """Returns the smaller value.
+
+        like min(self, other) except if one is not a number, returns
+        NaN (and signals if one is sNaN).  Also rounds.
+        """
+        other = _convert_other(other)
+        if other is NotImplemented:
+            return other
+
+        if self._is_special or other._is_special:
+            # if one operand is a quiet NaN and the other is number, then the
+            # number is always returned
+            sn = self._isnan()
+            on = other._isnan()
+            if sn or on:
+                if on == 1 and sn != 2:
+                    return self
+                if sn == 1 and on != 2:
+                    return other
+                return self._check_nans(other, context)
+
+        ans = self
+        c = self.__cmp__(other)
+        if c == 0:
+            # if both operands are finite and equal in numerical value
+            # then an ordering is applied:
+            #
+            # if the signs differ then max returns the operand with the
+            # positive sign and min returns the operand with the negative sign
+            #
+            # if the signs are the same then the exponent is used to select
+            # the result.
+            if self._sign != other._sign:
+                if other._sign:
+                    ans = other
+            elif self._exp > other._exp and not self._sign:
+                ans = other
+            elif self._exp < other._exp and self._sign:
+                ans = other
+        elif c == 1:
+            ans = other
+
+        if context is None:
+            context = getcontext()
+        if context._rounding_decision == ALWAYS_ROUND:
+            return ans._fix(context)
+        return ans
+
+    def _isinteger(self):
+        """Returns whether self is an integer"""
+        if self._exp >= 0:
+            return True
+        rest = self._int[self._exp:]
+        return rest == (0,)*len(rest)
+
+    def _iseven(self):
+        """Returns 1 if self is even.  Assumes self is an integer."""
+        if self._exp > 0:
+            return 1
+        return self._int[-1+self._exp] & 1 == 0
+
+    def adjusted(self):
+        """Return the adjusted exponent of self"""
+        try:
+            return self._exp + len(self._int) - 1
+        #If NaN or Infinity, self._exp is string
+        except TypeError:
+            return 0
+
+    # support for pickling, copy, and deepcopy
+    def __reduce__(self):
+        return (self.__class__, (str(self),))
+
+    def __copy__(self):
+        if type(self) == Decimal:
+            return self     # I'm immutable; therefore I am my own clone
+        return self.__class__(str(self))
+
+    def __deepcopy__(self, memo):
+        if type(self) == Decimal:
+            return self     # My components are also immutable
+        return self.__class__(str(self))
+
+##### Context class ###########################################
+
+
+# get rounding method function:
+rounding_functions = [name for name in Decimal.__dict__.keys() if name.startswith('_round_')]
+for name in rounding_functions:
+    #name is like _round_half_even, goes to the global ROUND_HALF_EVEN value.
+    globalname = name[1:].upper()
+    val = globals()[globalname]
+    Decimal._pick_rounding_function[val] = name
+
+del name, val, globalname, rounding_functions
+
+class Context(object):
+    """Contains the context for a Decimal instance.
+
+    Contains:
+    prec - precision (for use in rounding, division, square roots..)
+    rounding - rounding type. (how you round)
+    _rounding_decision - ALWAYS_ROUND, NEVER_ROUND -- do you round?
+    traps - If traps[exception] = 1, then the exception is
+                    raised when it is caused.  Otherwise, a value is
+                    substituted in.
+    flags  - When an exception is caused, flags[exception] is incremented.
+             (Whether or not the trap_enabler is set)
+             Should be reset by user of Decimal instance.
+    Emin -   Minimum exponent
+    Emax -   Maximum exponent
+    capitals -      If 1, 1*10^1 is printed as 1E+1.
+                    If 0, printed as 1e1
+    _clamp - If 1, change exponents if too high (Default 0)
+    """
+
+    def __init__(self, prec=None, rounding=None,
+                 traps=None, flags=None,
+                 _rounding_decision=None,
+                 Emin=None, Emax=None,
+                 capitals=None, _clamp=0,
+                 _ignored_flags=None):
+        if flags is None:
+            flags = []
+        if _ignored_flags is None:
+            _ignored_flags = []
+        if not isinstance(flags, dict):
+            flags = dict([(s,s in flags) for s in _signals])
+            del s
+        if traps is not None and not isinstance(traps, dict):
+            traps = dict([(s,s in traps) for s in _signals])
+            del s
+        for name, val in locals().items():
+            if val is None:
+                setattr(self, name, copy.copy(getattr(DefaultContext, name)))
+            else:
+                setattr(self, name, val)
+        del self.self
+
+    def __repr__(self):
+        """Show the current context."""
+        s = []
+        s.append('Context(prec=%(prec)d, rounding=%(rounding)s, Emin=%(Emin)d, Emax=%(Emax)d, capitals=%(capitals)d' % vars(self))
+        s.append('flags=[' + ', '.join([f.__name__ for f, v in self.flags.items() if v]) + ']')
+        s.append('traps=[' + ', '.join([t.__name__ for t, v in self.traps.items() if v]) + ']')
+        return ', '.join(s) + ')'
+
+    def clear_flags(self):
+        """Reset all flags to zero"""
+        for flag in self.flags:
+            self.flags[flag] = 0
+
+    def _shallow_copy(self):
+        """Returns a shallow copy from self."""
+        nc = Context(self.prec, self.rounding, self.traps, self.flags,
+                         self._rounding_decision, self.Emin, self.Emax,
+                         self.capitals, self._clamp, self._ignored_flags)
+        return nc
+
+    def copy(self):
+        """Returns a deep copy from self."""
+        nc = Context(self.prec, self.rounding, self.traps.copy(), self.flags.copy(),
+                         self._rounding_decision, self.Emin, self.Emax,
+                         self.capitals, self._clamp, self._ignored_flags)
+        return nc
+    __copy__ = copy
+
+    def _raise_error(self, condition, explanation = None, *args):
+        """Handles an error
+
+        If the flag is in _ignored_flags, returns the default response.
+        Otherwise, it increments the flag, then, if the corresponding
+        trap_enabler is set, it reaises the exception.  Otherwise, it returns
+        the default value after incrementing the flag.
+        """
+        error = _condition_map.get(condition, condition)
+        if error in self._ignored_flags:
+            #Don't touch the flag
+            return error().handle(self, *args)
+
+        self.flags[error] += 1
+        if not self.traps[error]:
+            #The errors define how to handle themselves.
+            return condition().handle(self, *args)
+
+        # Errors should only be risked on copies of the context
+        #self._ignored_flags = []
+        raise error, explanation
+
+    def _ignore_all_flags(self):
+        """Ignore all flags, if they are raised"""
+        return self._ignore_flags(*_signals)
+
+    def _ignore_flags(self, *flags):
+        """Ignore the flags, if they are raised"""
+        # Do not mutate-- This way, copies of a context leave the original
+        # alone.
+        self._ignored_flags = (self._ignored_flags + list(flags))
+        return list(flags)
+
+    def _regard_flags(self, *flags):
+        """Stop ignoring the flags, if they are raised"""
+        if flags and isinstance(flags[0], (tuple,list)):
+            flags = flags[0]
+        for flag in flags:
+            self._ignored_flags.remove(flag)
+
+    def __hash__(self):
+        """A Context cannot be hashed."""
+        # We inherit object.__hash__, so we must deny this explicitly
+        raise TypeError, "Cannot hash a Context."
+
+    def Etiny(self):
+        """Returns Etiny (= Emin - prec + 1)"""
+        return int(self.Emin - self.prec + 1)
+
+    def Etop(self):
+        """Returns maximum exponent (= Emax - prec + 1)"""
+        return int(self.Emax - self.prec + 1)
+
+    def _set_rounding_decision(self, type):
+        """Sets the rounding decision.
+
+        Sets the rounding decision, and returns the current (previous)
+        rounding decision.  Often used like:
+
+        context = context._shallow_copy()
+        # That so you don't change the calling context
+        # if an error occurs in the middle (say DivisionImpossible is raised).
+
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+        instance = instance / Decimal(2)
+        context._set_rounding_decision(rounding)
+
+        This will make it not round for that operation.
+        """
+
+        rounding = self._rounding_decision
+        self._rounding_decision = type
+        return rounding
+
+    def _set_rounding(self, type):
+        """Sets the rounding type.
+
+        Sets the rounding type, and returns the current (previous)
+        rounding type.  Often used like:
+
+        context = context.copy()
+        # so you don't change the calling context
+        # if an error occurs in the middle.
+        rounding = context._set_rounding(ROUND_UP)
+        val = self.__sub__(other, context=context)
+        context._set_rounding(rounding)
+
+        This will make it round up for that operation.
+        """
+        rounding = self.rounding
+        self.rounding= type
+        return rounding
+
+    def create_decimal(self, num='0'):
+        """Creates a new Decimal instance but using self as context."""
+        d = Decimal(num, context=self)
+        return d._fix(self)
+
+    #Methods
+    def abs(self, a):
+        """Returns the absolute value of the operand.
+
+        If the operand is negative, the result is the same as using the minus
+        operation on the operand. Otherwise, the result is the same as using
+        the plus operation on the operand.
+
+        >>> ExtendedContext.abs(Decimal('2.1'))
+        Decimal("2.1")
+        >>> ExtendedContext.abs(Decimal('-100'))
+        Decimal("100")
+        >>> ExtendedContext.abs(Decimal('101.5'))
+        Decimal("101.5")
+        >>> ExtendedContext.abs(Decimal('-101.5'))
+        Decimal("101.5")
+        """
+        return a.__abs__(context=self)
+
+    def add(self, a, b):
+        """Return the sum of the two operands.
+
+        >>> ExtendedContext.add(Decimal('12'), Decimal('7.00'))
+        Decimal("19.00")
+        >>> ExtendedContext.add(Decimal('1E+2'), Decimal('1.01E+4'))
+        Decimal("1.02E+4")
+        """
+        return a.__add__(b, context=self)
+
+    def _apply(self, a):
+        return str(a._fix(self))
+
+    def compare(self, a, b):
+        """Compares values numerically.
+
+        If the signs of the operands differ, a value representing each operand
+        ('-1' if the operand is less than zero, '0' if the operand is zero or
+        negative zero, or '1' if the operand is greater than zero) is used in
+        place of that operand for the comparison instead of the actual
+        operand.
+
+        The comparison is then effected by subtracting the second operand from
+        the first and then returning a value according to the result of the
+        subtraction: '-1' if the result is less than zero, '0' if the result is
+        zero or negative zero, or '1' if the result is greater than zero.
+
+        >>> ExtendedContext.compare(Decimal('2.1'), Decimal('3'))
+        Decimal("-1")
+        >>> ExtendedContext.compare(Decimal('2.1'), Decimal('2.1'))
+        Decimal("0")
+        >>> ExtendedContext.compare(Decimal('2.1'), Decimal('2.10'))
+        Decimal("0")
+        >>> ExtendedContext.compare(Decimal('3'), Decimal('2.1'))
+        Decimal("1")
+        >>> ExtendedContext.compare(Decimal('2.1'), Decimal('-3'))
+        Decimal("1")
+        >>> ExtendedContext.compare(Decimal('-3'), Decimal('2.1'))
+        Decimal("-1")
+        """
+        return a.compare(b, context=self)
+
+    def divide(self, a, b):
+        """Decimal division in a specified context.
+
+        >>> ExtendedContext.divide(Decimal('1'), Decimal('3'))
+        Decimal("0.333333333")
+        >>> ExtendedContext.divide(Decimal('2'), Decimal('3'))
+        Decimal("0.666666667")
+        >>> ExtendedContext.divide(Decimal('5'), Decimal('2'))
+        Decimal("2.5")
+        >>> ExtendedContext.divide(Decimal('1'), Decimal('10'))
+        Decimal("0.1")
+        >>> ExtendedContext.divide(Decimal('12'), Decimal('12'))
+        Decimal("1")
+        >>> ExtendedContext.divide(Decimal('8.00'), Decimal('2'))
+        Decimal("4.00")
+        >>> ExtendedContext.divide(Decimal('2.400'), Decimal('2.0'))
+        Decimal("1.20")
+        >>> ExtendedContext.divide(Decimal('1000'), Decimal('100'))
+        Decimal("10")
+        >>> ExtendedContext.divide(Decimal('1000'), Decimal('1'))
+        Decimal("1000")
+        >>> ExtendedContext.divide(Decimal('2.40E+6'), Decimal('2'))
+        Decimal("1.20E+6")
+        """
+        return a.__div__(b, context=self)
+
+    def divide_int(self, a, b):
+        """Divides two numbers and returns the integer part of the result.
+
+        >>> ExtendedContext.divide_int(Decimal('2'), Decimal('3'))
+        Decimal("0")
+        >>> ExtendedContext.divide_int(Decimal('10'), Decimal('3'))
+        Decimal("3")
+        >>> ExtendedContext.divide_int(Decimal('1'), Decimal('0.3'))
+        Decimal("3")
+        """
+        return a.__floordiv__(b, context=self)
+
+    def divmod(self, a, b):
+        return a.__divmod__(b, context=self)
+
+    def max(self, a,b):
+        """max compares two values numerically and returns the maximum.
+
+        If either operand is a NaN then the general rules apply.
+        Otherwise, the operands are compared as as though by the compare
+        operation. If they are numerically equal then the left-hand operand
+        is chosen as the result. Otherwise the maximum (closer to positive
+        infinity) of the two operands is chosen as the result.
+
+        >>> ExtendedContext.max(Decimal('3'), Decimal('2'))
+        Decimal("3")
+        >>> ExtendedContext.max(Decimal('-10'), Decimal('3'))
+        Decimal("3")
+        >>> ExtendedContext.max(Decimal('1.0'), Decimal('1'))
+        Decimal("1")
+        >>> ExtendedContext.max(Decimal('7'), Decimal('NaN'))
+        Decimal("7")
+        """
+        return a.max(b, context=self)
+
+    def min(self, a,b):
+        """min compares two values numerically and returns the minimum.
+
+        If either operand is a NaN then the general rules apply.
+        Otherwise, the operands are compared as as though by the compare
+        operation. If they are numerically equal then the left-hand operand
+        is chosen as the result. Otherwise the minimum (closer to negative
+        infinity) of the two operands is chosen as the result.
+
+        >>> ExtendedContext.min(Decimal('3'), Decimal('2'))
+        Decimal("2")
+        >>> ExtendedContext.min(Decimal('-10'), Decimal('3'))
+        Decimal("-10")
+        >>> ExtendedContext.min(Decimal('1.0'), Decimal('1'))
+        Decimal("1.0")
+        >>> ExtendedContext.min(Decimal('7'), Decimal('NaN'))
+        Decimal("7")
+        """
+        return a.min(b, context=self)
+
+    def minus(self, a):
+        """Minus corresponds to unary prefix minus in Python.
+
+        The operation is evaluated using the same rules as subtract; the
+        operation minus(a) is calculated as subtract('0', a) where the '0'
+        has the same exponent as the operand.
+
+        >>> ExtendedContext.minus(Decimal('1.3'))
+        Decimal("-1.3")
+        >>> ExtendedContext.minus(Decimal('-1.3'))
+        Decimal("1.3")
+        """
+        return a.__neg__(context=self)
+
+    def multiply(self, a, b):
+        """multiply multiplies two operands.
+
+        If either operand is a special value then the general rules apply.
+        Otherwise, the operands are multiplied together ('long multiplication'),
+        resulting in a number which may be as long as the sum of the lengths
+        of the two operands.
+
+        >>> ExtendedContext.multiply(Decimal('1.20'), Decimal('3'))
+        Decimal("3.60")
+        >>> ExtendedContext.multiply(Decimal('7'), Decimal('3'))
+        Decimal("21")
+        >>> ExtendedContext.multiply(Decimal('0.9'), Decimal('0.8'))
+        Decimal("0.72")
+        >>> ExtendedContext.multiply(Decimal('0.9'), Decimal('-0'))
+        Decimal("-0.0")
+        >>> ExtendedContext.multiply(Decimal('654321'), Decimal('654321'))
+        Decimal("4.28135971E+11")
+        """
+        return a.__mul__(b, context=self)
+
+    def normalize(self, a):
+        """normalize reduces an operand to its simplest form.
+
+        Essentially a plus operation with all trailing zeros removed from the
+        result.
+
+        >>> ExtendedContext.normalize(Decimal('2.1'))
+        Decimal("2.1")
+        >>> ExtendedContext.normalize(Decimal('-2.0'))
+        Decimal("-2")
+        >>> ExtendedContext.normalize(Decimal('1.200'))
+        Decimal("1.2")
+        >>> ExtendedContext.normalize(Decimal('-120'))
+        Decimal("-1.2E+2")
+        >>> ExtendedContext.normalize(Decimal('120.00'))
+        Decimal("1.2E+2")
+        >>> ExtendedContext.normalize(Decimal('0.00'))
+        Decimal("0")
+        """
+        return a.normalize(context=self)
+
+    def plus(self, a):
+        """Plus corresponds to unary prefix plus in Python.
+
+        The operation is evaluated using the same rules as add; the
+        operation plus(a) is calculated as add('0', a) where the '0'
+        has the same exponent as the operand.
+
+        >>> ExtendedContext.plus(Decimal('1.3'))
+        Decimal("1.3")
+        >>> ExtendedContext.plus(Decimal('-1.3'))
+        Decimal("-1.3")
+        """
+        return a.__pos__(context=self)
+
+    def power(self, a, b, modulo=None):
+        """Raises a to the power of b, to modulo if given.
+
+        The right-hand operand must be a whole number whose integer part (after
+        any exponent has been applied) has no more than 9 digits and whose
+        fractional part (if any) is all zeros before any rounding. The operand
+        may be positive, negative, or zero; if negative, the absolute value of
+        the power is used, and the left-hand operand is inverted (divided into
+        1) before use.
+
+        If the increased precision needed for the intermediate calculations
+        exceeds the capabilities of the implementation then an Invalid operation
+        condition is raised.
+
+        If, when raising to a negative power, an underflow occurs during the
+        division into 1, the operation is not halted at that point but
+        continues.
+
+        >>> ExtendedContext.power(Decimal('2'), Decimal('3'))
+        Decimal("8")
+        >>> ExtendedContext.power(Decimal('2'), Decimal('-3'))
+        Decimal("0.125")
+        >>> ExtendedContext.power(Decimal('1.7'), Decimal('8'))
+        Decimal("69.7575744")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('-2'))
+        Decimal("0")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('-1'))
+        Decimal("0")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('0'))
+        Decimal("1")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('1'))
+        Decimal("Infinity")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('2'))
+        Decimal("Infinity")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('-2'))
+        Decimal("0")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('-1'))
+        Decimal("-0")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('0'))
+        Decimal("1")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('1'))
+        Decimal("-Infinity")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('2'))
+        Decimal("Infinity")
+        >>> ExtendedContext.power(Decimal('0'), Decimal('0'))
+        Decimal("NaN")
+        """
+        return a.__pow__(b, modulo, context=self)
+
+    def quantize(self, a, b):
+        """Returns a value equal to 'a' (rounded) and having the exponent of 'b'.
+
+        The coefficient of the result is derived from that of the left-hand
+        operand. It may be rounded using the current rounding setting (if the
+        exponent is being increased), multiplied by a positive power of ten (if
+        the exponent is being decreased), or is unchanged (if the exponent is
+        already equal to that of the right-hand operand).
+
+        Unlike other operations, if the length of the coefficient after the
+        quantize operation would be greater than precision then an Invalid
+        operation condition is raised. This guarantees that, unless there is an
+        error condition, the exponent of the result of a quantize is always
+        equal to that of the right-hand operand.
+
+        Also unlike other operations, quantize will never raise Underflow, even
+        if the result is subnormal and inexact.
+
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.001'))
+        Decimal("2.170")
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.01'))
+        Decimal("2.17")
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.1'))
+        Decimal("2.2")
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('1e+0'))
+        Decimal("2")
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('1e+1'))
+        Decimal("0E+1")
+        >>> ExtendedContext.quantize(Decimal('-Inf'), Decimal('Infinity'))
+        Decimal("-Infinity")
+        >>> ExtendedContext.quantize(Decimal('2'), Decimal('Infinity'))
+        Decimal("NaN")
+        >>> ExtendedContext.quantize(Decimal('-0.1'), Decimal('1'))
+        Decimal("-0")
+        >>> ExtendedContext.quantize(Decimal('-0'), Decimal('1e+5'))
+        Decimal("-0E+5")
+        >>> ExtendedContext.quantize(Decimal('+35236450.6'), Decimal('1e-2'))
+        Decimal("NaN")
+        >>> ExtendedContext.quantize(Decimal('-35236450.6'), Decimal('1e-2'))
+        Decimal("NaN")
+        >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e-1'))
+        Decimal("217.0")
+        >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e-0'))
+        Decimal("217")
+        >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e+1'))
+        Decimal("2.2E+2")
+        >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e+2'))
+        Decimal("2E+2")
+        """
+        return a.quantize(b, context=self)
+
+    def remainder(self, a, b):
+        """Returns the remainder from integer division.
+
+        The result is the residue of the dividend after the operation of
+        calculating integer division as described for divide-integer, rounded to
+        precision digits if necessary. The sign of the result, if non-zero, is
+        the same as that of the original dividend.
+
+        This operation will fail under the same conditions as integer division
+        (that is, if integer division on the same two operands would fail, the
+        remainder cannot be calculated).
+
+        >>> ExtendedContext.remainder(Decimal('2.1'), Decimal('3'))
+        Decimal("2.1")
+        >>> ExtendedContext.remainder(Decimal('10'), Decimal('3'))
+        Decimal("1")
+        >>> ExtendedContext.remainder(Decimal('-10'), Decimal('3'))
+        Decimal("-1")
+        >>> ExtendedContext.remainder(Decimal('10.2'), Decimal('1'))
+        Decimal("0.2")
+        >>> ExtendedContext.remainder(Decimal('10'), Decimal('0.3'))
+        Decimal("0.1")
+        >>> ExtendedContext.remainder(Decimal('3.6'), Decimal('1.3'))
+        Decimal("1.0")
+        """
+        return a.__mod__(b, context=self)
+
+    def remainder_near(self, a, b):
+        """Returns to be "a - b * n", where n is the integer nearest the exact
+        value of "x / b" (if two integers are equally near then the even one
+        is chosen). If the result is equal to 0 then its sign will be the
+        sign of a.
+
+        This operation will fail under the same conditions as integer division
+        (that is, if integer division on the same two operands would fail, the
+        remainder cannot be calculated).
+
+        >>> ExtendedContext.remainder_near(Decimal('2.1'), Decimal('3'))
+        Decimal("-0.9")
+        >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('6'))
+        Decimal("-2")
+        >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('3'))
+        Decimal("1")
+        >>> ExtendedContext.remainder_near(Decimal('-10'), Decimal('3'))
+        Decimal("-1")
+        >>> ExtendedContext.remainder_near(Decimal('10.2'), Decimal('1'))
+        Decimal("0.2")
+        >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('0.3'))
+        Decimal("0.1")
+        >>> ExtendedContext.remainder_near(Decimal('3.6'), Decimal('1.3'))
+        Decimal("-0.3")
+        """
+        return a.remainder_near(b, context=self)
+
+    def same_quantum(self, a, b):
+        """Returns True if the two operands have the same exponent.
+
+        The result is never affected by either the sign or the coefficient of
+        either operand.
+
+        >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('0.001'))
+        False
+        >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('0.01'))
+        True
+        >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('1'))
+        False
+        >>> ExtendedContext.same_quantum(Decimal('Inf'), Decimal('-Inf'))
+        True
+        """
+        return a.same_quantum(b)
+
+    def sqrt(self, a):
+        """Returns the square root of a non-negative number to context precision.
+
+        If the result must be inexact, it is rounded using the round-half-even
+        algorithm.
+
+        >>> ExtendedContext.sqrt(Decimal('0'))
+        Decimal("0")
+        >>> ExtendedContext.sqrt(Decimal('-0'))
+        Decimal("-0")
+        >>> ExtendedContext.sqrt(Decimal('0.39'))
+        Decimal("0.624499800")
+        >>> ExtendedContext.sqrt(Decimal('100'))
+        Decimal("10")
+        >>> ExtendedContext.sqrt(Decimal('1'))
+        Decimal("1")
+        >>> ExtendedContext.sqrt(Decimal('1.0'))
+        Decimal("1.0")
+        >>> ExtendedContext.sqrt(Decimal('1.00'))
+        Decimal("1.0")
+        >>> ExtendedContext.sqrt(Decimal('7'))
+        Decimal("2.64575131")
+        >>> ExtendedContext.sqrt(Decimal('10'))
+        Decimal("3.16227766")
+        >>> ExtendedContext.prec
+        9
+        """
+        return a.sqrt(context=self)
+
+    def subtract(self, a, b):
+        """Return the sum of the two operands.
+
+        >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('1.07'))
+        Decimal("0.23")
+        >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('1.30'))
+        Decimal("0.00")
+        >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('2.07'))
+        Decimal("-0.77")
+        """
+        return a.__sub__(b, context=self)
+
+    def to_eng_string(self, a):
+        """Converts a number to a string, using scientific notation.
+
+        The operation is not affected by the context.
+        """
+        return a.to_eng_string(context=self)
+
+    def to_sci_string(self, a):
+        """Converts a number to a string, using scientific notation.
+
+        The operation is not affected by the context.
+        """
+        return a.__str__(context=self)
+
+    def to_integral(self, a):
+        """Rounds to an integer.
+
+        When the operand has a negative exponent, the result is the same
+        as using the quantize() operation using the given operand as the
+        left-hand-operand, 1E+0 as the right-hand-operand, and the precision
+        of the operand as the precision setting, except that no flags will
+        be set. The rounding mode is taken from the context.
+
+        >>> ExtendedContext.to_integral(Decimal('2.1'))
+        Decimal("2")
+        >>> ExtendedContext.to_integral(Decimal('100'))
+        Decimal("100")
+        >>> ExtendedContext.to_integral(Decimal('100.0'))
+        Decimal("100")
+        >>> ExtendedContext.to_integral(Decimal('101.5'))
+        Decimal("102")
+        >>> ExtendedContext.to_integral(Decimal('-101.5'))
+        Decimal("-102")
+        >>> ExtendedContext.to_integral(Decimal('10E+5'))
+        Decimal("1.0E+6")
+        >>> ExtendedContext.to_integral(Decimal('7.89E+77'))
+        Decimal("7.89E+77")
+        >>> ExtendedContext.to_integral(Decimal('-Inf'))
+        Decimal("-Infinity")
+        """
+        return a.to_integral(context=self)
+
+class _WorkRep(object):
+    __slots__ = ('sign','int','exp')
+    # sign: 0 or 1
+    # int:  int or long
+    # exp:  None, int, or string
+
+    def __init__(self, value=None):
+        if value is None:
+            self.sign = None
+            self.int = 0
+            self.exp = None
+        elif isinstance(value, Decimal):
+            self.sign = value._sign
+            cum = 0
+            for digit  in value._int:
+                cum = cum * 10 + digit
+            self.int = cum
+            self.exp = value._exp
+        else:
+            # assert isinstance(value, tuple)
+            self.sign = value[0]
+            self.int = value[1]
+            self.exp = value[2]
+
+    def __repr__(self):
+        return "(%r, %r, %r)" % (self.sign, self.int, self.exp)
+
+    __str__ = __repr__
+
+
+
+def _normalize(op1, op2, shouldround = 0, prec = 0):
+    """Normalizes op1, op2 to have the same exp and length of coefficient.
+
+    Done during addition.
+    """
+    # Yes, the exponent is a long, but the difference between exponents
+    # must be an int-- otherwise you'd get a big memory problem.
+    numdigits = int(op1.exp - op2.exp)
+    if numdigits < 0:
+        numdigits = -numdigits
+        tmp = op2
+        other = op1
+    else:
+        tmp = op1
+        other = op2
+
+
+    if shouldround and numdigits > prec + 1:
+        # Big difference in exponents - check the adjusted exponents
+        tmp_len = len(str(tmp.int))
+        other_len = len(str(other.int))
+        if numdigits > (other_len + prec + 1 - tmp_len):
+            # If the difference in adjusted exps is > prec+1, we know
+            # other is insignificant, so might as well put a 1 after the precision.
+            # (since this is only for addition.)  Also stops use of massive longs.
+
+            extend = prec + 2 - tmp_len
+            if extend <= 0:
+                extend = 1
+            tmp.int *= 10 ** extend
+            tmp.exp -= extend
+            other.int = 1
+            other.exp = tmp.exp
+            return op1, op2
+
+    tmp.int *= 10 ** numdigits
+    tmp.exp -= numdigits
+    return op1, op2
+
+def _adjust_coefficients(op1, op2):
+    """Adjust op1, op2 so that op2.int * 10 > op1.int >= op2.int.
+
+    Returns the adjusted op1, op2 as well as the change in op1.exp-op2.exp.
+
+    Used on _WorkRep instances during division.
+    """
+    adjust = 0
+    #If op1 is smaller, make it larger
+    while op2.int > op1.int:
+        op1.int *= 10
+        op1.exp -= 1
+        adjust += 1
+
+    #If op2 is too small, make it larger
+    while op1.int >= (10 * op2.int):
+        op2.int *= 10
+        op2.exp -= 1
+        adjust -= 1
+
+    return op1, op2, adjust
+
+##### Helper Functions ########################################
+
+def _convert_other(other):
+    """Convert other to Decimal.
+
+    Verifies that it's ok to use in an implicit construction.
+    """
+    if isinstance(other, Decimal):
+        return other
+    if isinstance(other, (int, long)):
+        return Decimal(other)
+    return NotImplemented
+
+_infinity_map = {
+    'inf' : 1,
+    'infinity' : 1,
+    '+inf' : 1,
+    '+infinity' : 1,
+    '-inf' : -1,
+    '-infinity' : -1
+}
+
+def _isinfinity(num):
+    """Determines whether a string or float is infinity.
+
+    +1 for negative infinity; 0 for finite ; +1 for positive infinity
+    """
+    num = str(num).lower()
+    return _infinity_map.get(num, 0)
+
+def _isnan(num):
+    """Determines whether a string or float is NaN
+
+    (1, sign, diagnostic info as string) => NaN
+    (2, sign, diagnostic info as string) => sNaN
+    0 => not a NaN
+    """
+    num = str(num).lower()
+    if not num:
+        return 0
+
+    #get the sign, get rid of trailing [+-]
+    sign = 0
+    if num[0] == '+':
+        num = num[1:]
+    elif num[0] == '-':  #elif avoids '+-nan'
+        num = num[1:]
+        sign = 1
+
+    if num.startswith('nan'):
+        if len(num) > 3 and not num[3:].isdigit(): #diagnostic info
+            return 0
+        return (1, sign, num[3:].lstrip('0'))
+    if num.startswith('snan'):
+        if len(num) > 4 and not num[4:].isdigit():
+            return 0
+        return (2, sign, num[4:].lstrip('0'))
+    return 0
+
+
+##### Setup Specific Contexts ################################
+
+# The default context prototype used by Context()
+# Is mutable, so that new contexts can have different default values
+
+DefaultContext = Context(
+        prec=28, rounding=ROUND_HALF_EVEN,
+        traps=[DivisionByZero, Overflow, InvalidOperation],
+        flags=[],
+        _rounding_decision=ALWAYS_ROUND,
+        Emax=999999999,
+        Emin=-999999999,
+        capitals=1
+)
+
+# Pre-made alternate contexts offered by the specification
+# Don't change these; the user should be able to select these
+# contexts and be able to reproduce results from other implementations
+# of the spec.
+
+BasicContext = Context(
+        prec=9, rounding=ROUND_HALF_UP,
+        traps=[DivisionByZero, Overflow, InvalidOperation, Clamped, Underflow],
+        flags=[],
+)
+
+ExtendedContext = Context(
+        prec=9, rounding=ROUND_HALF_EVEN,
+        traps=[],
+        flags=[],
+)
+
+
+##### Useful Constants (internal use only) ####################
+
+#Reusable defaults
+Inf = Decimal('Inf')
+negInf = Decimal('-Inf')
+
+#Infsign[sign] is infinity w/ that sign
+Infsign = (Inf, negInf)
+
+NaN = Decimal('NaN')
+
+
+##### crud for parsing strings #################################
+import re
+
+# There's an optional sign at the start, and an optional exponent
+# at the end.  The exponent has an optional sign and at least one
+# digit.  In between, must have either at least one digit followed
+# by an optional fraction, or a decimal point followed by at least
+# one digit.  Yuck.
+
+_parser = re.compile(r"""
+#    \s*
+    (?P<sign>[-+])?
+    (
+        (?P<int>\d+) (\. (?P<frac>\d*))?
+    |
+        \. (?P<onlyfrac>\d+)
+    )
+    ([eE](?P<exp>[-+]? \d+))?
+#    \s*
+    $
+""", re.VERBOSE).match #Uncomment the \s* to allow leading or trailing spaces.
+
+del re
+
+# return sign, n, p s.t. float string value == -1**sign * n * 10**p exactly
+
+def _string2exact(s):
+    m = _parser(s)
+    if m is None:
+        raise ValueError("invalid literal for Decimal: %r" % s)
+
+    if m.group('sign') == "-":
+        sign = 1
+    else:
+        sign = 0
+
+    exp = m.group('exp')
+    if exp is None:
+        exp = 0
+    else:
+        exp = int(exp)
+
+    intpart = m.group('int')
+    if intpart is None:
+        intpart = ""
+        fracpart = m.group('onlyfrac')
+    else:
+        fracpart = m.group('frac')
+        if fracpart is None:
+            fracpart = ""
+
+    exp -= len(fracpart)
+
+    mantissa = intpart + fracpart
+    tmp = map(int, mantissa)
+    backup = tmp
+    while tmp and tmp[0] == 0:
+        del tmp[0]
+
+    # It's a zero
+    if not tmp:
+        if backup:
+            return (sign, tuple(backup), exp)
+        return (sign, (0,), exp)
+    mantissa = tuple(tmp)
+
+    return (sign, mantissa, exp)
+
+
+if __name__ == '__main__':
+    import doctest, sys
+    doctest.testmod(sys.modules[__name__])
diff --git a/depot_tools/release/win/python_24/Lib/difflib.py b/depot_tools/release/win/python_24/Lib/difflib.py
new file mode 100644
index 0000000..590785f3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/difflib.py
@@ -0,0 +1,1990 @@
+#! /usr/bin/env python
+
+"""
+Module difflib -- helpers for computing deltas between objects.
+
+Function get_close_matches(word, possibilities, n=3, cutoff=0.6):
+    Use SequenceMatcher to return list of the best "good enough" matches.
+
+Function context_diff(a, b):
+    For two lists of strings, return a delta in context diff format.
+
+Function ndiff(a, b):
+    Return a delta: the difference between `a` and `b` (lists of strings).
+
+Function restore(delta, which):
+    Return one of the two sequences that generated an ndiff delta.
+
+Function unified_diff(a, b):
+    For two lists of strings, return a delta in unified diff format.
+
+Class SequenceMatcher:
+    A flexible class for comparing pairs of sequences of any type.
+
+Class Differ:
+    For producing human-readable deltas from sequences of lines of text.
+
+Class HtmlDiff:
+    For producing HTML side by side comparison with change highlights.
+"""
+
+__all__ = ['get_close_matches', 'ndiff', 'restore', 'SequenceMatcher',
+           'Differ','IS_CHARACTER_JUNK', 'IS_LINE_JUNK', 'context_diff',
+           'unified_diff', 'HtmlDiff']
+
+import heapq
+
+def _calculate_ratio(matches, length):
+    if length:
+        return 2.0 * matches / length
+    return 1.0
+
+class SequenceMatcher:
+
+    """
+    SequenceMatcher is a flexible class for comparing pairs of sequences of
+    any type, so long as the sequence elements are hashable.  The basic
+    algorithm predates, and is a little fancier than, an algorithm
+    published in the late 1980's by Ratcliff and Obershelp under the
+    hyperbolic name "gestalt pattern matching".  The basic idea is to find
+    the longest contiguous matching subsequence that contains no "junk"
+    elements (R-O doesn't address junk).  The same idea is then applied
+    recursively to the pieces of the sequences to the left and to the right
+    of the matching subsequence.  This does not yield minimal edit
+    sequences, but does tend to yield matches that "look right" to people.
+
+    SequenceMatcher tries to compute a "human-friendly diff" between two
+    sequences.  Unlike e.g. UNIX(tm) diff, the fundamental notion is the
+    longest *contiguous* & junk-free matching subsequence.  That's what
+    catches peoples' eyes.  The Windows(tm) windiff has another interesting
+    notion, pairing up elements that appear uniquely in each sequence.
+    That, and the method here, appear to yield more intuitive difference
+    reports than does diff.  This method appears to be the least vulnerable
+    to synching up on blocks of "junk lines", though (like blank lines in
+    ordinary text files, or maybe "<P>" lines in HTML files).  That may be
+    because this is the only method of the 3 that has a *concept* of
+    "junk" <wink>.
+
+    Example, comparing two strings, and considering blanks to be "junk":
+
+    >>> s = SequenceMatcher(lambda x: x == " ",
+    ...                     "private Thread currentThread;",
+    ...                     "private volatile Thread currentThread;")
+    >>>
+
+    .ratio() returns a float in [0, 1], measuring the "similarity" of the
+    sequences.  As a rule of thumb, a .ratio() value over 0.6 means the
+    sequences are close matches:
+
+    >>> print round(s.ratio(), 3)
+    0.866
+    >>>
+
+    If you're only interested in where the sequences match,
+    .get_matching_blocks() is handy:
+
+    >>> for block in s.get_matching_blocks():
+    ...     print "a[%d] and b[%d] match for %d elements" % block
+    a[0] and b[0] match for 8 elements
+    a[8] and b[17] match for 6 elements
+    a[14] and b[23] match for 15 elements
+    a[29] and b[38] match for 0 elements
+
+    Note that the last tuple returned by .get_matching_blocks() is always a
+    dummy, (len(a), len(b), 0), and this is the only case in which the last
+    tuple element (number of elements matched) is 0.
+
+    If you want to know how to change the first sequence into the second,
+    use .get_opcodes():
+
+    >>> for opcode in s.get_opcodes():
+    ...     print "%6s a[%d:%d] b[%d:%d]" % opcode
+     equal a[0:8] b[0:8]
+    insert a[8:8] b[8:17]
+     equal a[8:14] b[17:23]
+     equal a[14:29] b[23:38]
+
+    See the Differ class for a fancy human-friendly file differencer, which
+    uses SequenceMatcher both to compare sequences of lines, and to compare
+    sequences of characters within similar (near-matching) lines.
+
+    See also function get_close_matches() in this module, which shows how
+    simple code building on SequenceMatcher can be used to do useful work.
+
+    Timing:  Basic R-O is cubic time worst case and quadratic time expected
+    case.  SequenceMatcher is quadratic time for the worst case and has
+    expected-case behavior dependent in a complicated way on how many
+    elements the sequences have in common; best case time is linear.
+
+    Methods:
+
+    __init__(isjunk=None, a='', b='')
+        Construct a SequenceMatcher.
+
+    set_seqs(a, b)
+        Set the two sequences to be compared.
+
+    set_seq1(a)
+        Set the first sequence to be compared.
+
+    set_seq2(b)
+        Set the second sequence to be compared.
+
+    find_longest_match(alo, ahi, blo, bhi)
+        Find longest matching block in a[alo:ahi] and b[blo:bhi].
+
+    get_matching_blocks()
+        Return list of triples describing matching subsequences.
+
+    get_opcodes()
+        Return list of 5-tuples describing how to turn a into b.
+
+    ratio()
+        Return a measure of the sequences' similarity (float in [0,1]).
+
+    quick_ratio()
+        Return an upper bound on .ratio() relatively quickly.
+
+    real_quick_ratio()
+        Return an upper bound on ratio() very quickly.
+    """
+
+    def __init__(self, isjunk=None, a='', b=''):
+        """Construct a SequenceMatcher.
+
+        Optional arg isjunk is None (the default), or a one-argument
+        function that takes a sequence element and returns true iff the
+        element is junk.  None is equivalent to passing "lambda x: 0", i.e.
+        no elements are considered to be junk.  For example, pass
+            lambda x: x in " \\t"
+        if you're comparing lines as sequences of characters, and don't
+        want to synch up on blanks or hard tabs.
+
+        Optional arg a is the first of two sequences to be compared.  By
+        default, an empty string.  The elements of a must be hashable.  See
+        also .set_seqs() and .set_seq1().
+
+        Optional arg b is the second of two sequences to be compared.  By
+        default, an empty string.  The elements of b must be hashable. See
+        also .set_seqs() and .set_seq2().
+        """
+
+        # Members:
+        # a
+        #      first sequence
+        # b
+        #      second sequence; differences are computed as "what do
+        #      we need to do to 'a' to change it into 'b'?"
+        # b2j
+        #      for x in b, b2j[x] is a list of the indices (into b)
+        #      at which x appears; junk elements do not appear
+        # fullbcount
+        #      for x in b, fullbcount[x] == the number of times x
+        #      appears in b; only materialized if really needed (used
+        #      only for computing quick_ratio())
+        # matching_blocks
+        #      a list of (i, j, k) triples, where a[i:i+k] == b[j:j+k];
+        #      ascending & non-overlapping in i and in j; terminated by
+        #      a dummy (len(a), len(b), 0) sentinel
+        # opcodes
+        #      a list of (tag, i1, i2, j1, j2) tuples, where tag is
+        #      one of
+        #          'replace'   a[i1:i2] should be replaced by b[j1:j2]
+        #          'delete'    a[i1:i2] should be deleted
+        #          'insert'    b[j1:j2] should be inserted
+        #          'equal'     a[i1:i2] == b[j1:j2]
+        # isjunk
+        #      a user-supplied function taking a sequence element and
+        #      returning true iff the element is "junk" -- this has
+        #      subtle but helpful effects on the algorithm, which I'll
+        #      get around to writing up someday <0.9 wink>.
+        #      DON'T USE!  Only __chain_b uses this.  Use isbjunk.
+        # isbjunk
+        #      for x in b, isbjunk(x) == isjunk(x) but much faster;
+        #      it's really the has_key method of a hidden dict.
+        #      DOES NOT WORK for x in a!
+        # isbpopular
+        #      for x in b, isbpopular(x) is true iff b is reasonably long
+        #      (at least 200 elements) and x accounts for more than 1% of
+        #      its elements.  DOES NOT WORK for x in a!
+
+        self.isjunk = isjunk
+        self.a = self.b = None
+        self.set_seqs(a, b)
+
+    def set_seqs(self, a, b):
+        """Set the two sequences to be compared.
+
+        >>> s = SequenceMatcher()
+        >>> s.set_seqs("abcd", "bcde")
+        >>> s.ratio()
+        0.75
+        """
+
+        self.set_seq1(a)
+        self.set_seq2(b)
+
+    def set_seq1(self, a):
+        """Set the first sequence to be compared.
+
+        The second sequence to be compared is not changed.
+
+        >>> s = SequenceMatcher(None, "abcd", "bcde")
+        >>> s.ratio()
+        0.75
+        >>> s.set_seq1("bcde")
+        >>> s.ratio()
+        1.0
+        >>>
+
+        SequenceMatcher computes and caches detailed information about the
+        second sequence, so if you want to compare one sequence S against
+        many sequences, use .set_seq2(S) once and call .set_seq1(x)
+        repeatedly for each of the other sequences.
+
+        See also set_seqs() and set_seq2().
+        """
+
+        if a is self.a:
+            return
+        self.a = a
+        self.matching_blocks = self.opcodes = None
+
+    def set_seq2(self, b):
+        """Set the second sequence to be compared.
+
+        The first sequence to be compared is not changed.
+
+        >>> s = SequenceMatcher(None, "abcd", "bcde")
+        >>> s.ratio()
+        0.75
+        >>> s.set_seq2("abcd")
+        >>> s.ratio()
+        1.0
+        >>>
+
+        SequenceMatcher computes and caches detailed information about the
+        second sequence, so if you want to compare one sequence S against
+        many sequences, use .set_seq2(S) once and call .set_seq1(x)
+        repeatedly for each of the other sequences.
+
+        See also set_seqs() and set_seq1().
+        """
+
+        if b is self.b:
+            return
+        self.b = b
+        self.matching_blocks = self.opcodes = None
+        self.fullbcount = None
+        self.__chain_b()
+
+    # For each element x in b, set b2j[x] to a list of the indices in
+    # b where x appears; the indices are in increasing order; note that
+    # the number of times x appears in b is len(b2j[x]) ...
+    # when self.isjunk is defined, junk elements don't show up in this
+    # map at all, which stops the central find_longest_match method
+    # from starting any matching block at a junk element ...
+    # also creates the fast isbjunk function ...
+    # b2j also does not contain entries for "popular" elements, meaning
+    # elements that account for more than 1% of the total elements, and
+    # when the sequence is reasonably large (>= 200 elements); this can
+    # be viewed as an adaptive notion of semi-junk, and yields an enormous
+    # speedup when, e.g., comparing program files with hundreds of
+    # instances of "return NULL;" ...
+    # note that this is only called when b changes; so for cross-product
+    # kinds of matches, it's best to call set_seq2 once, then set_seq1
+    # repeatedly
+
+    def __chain_b(self):
+        # Because isjunk is a user-defined (not C) function, and we test
+        # for junk a LOT, it's important to minimize the number of calls.
+        # Before the tricks described here, __chain_b was by far the most
+        # time-consuming routine in the whole module!  If anyone sees
+        # Jim Roskind, thank him again for profile.py -- I never would
+        # have guessed that.
+        # The first trick is to build b2j ignoring the possibility
+        # of junk.  I.e., we don't call isjunk at all yet.  Throwing
+        # out the junk later is much cheaper than building b2j "right"
+        # from the start.
+        b = self.b
+        n = len(b)
+        self.b2j = b2j = {}
+        populardict = {}
+        for i, elt in enumerate(b):
+            if elt in b2j:
+                indices = b2j[elt]
+                if n >= 200 and len(indices) * 100 > n:
+                    populardict[elt] = 1
+                    del indices[:]
+                else:
+                    indices.append(i)
+            else:
+                b2j[elt] = [i]
+
+        # Purge leftover indices for popular elements.
+        for elt in populardict:
+            del b2j[elt]
+
+        # Now b2j.keys() contains elements uniquely, and especially when
+        # the sequence is a string, that's usually a good deal smaller
+        # than len(string).  The difference is the number of isjunk calls
+        # saved.
+        isjunk = self.isjunk
+        junkdict = {}
+        if isjunk:
+            for d in populardict, b2j:
+                for elt in d.keys():
+                    if isjunk(elt):
+                        junkdict[elt] = 1
+                        del d[elt]
+
+        # Now for x in b, isjunk(x) == x in junkdict, but the
+        # latter is much faster.  Note too that while there may be a
+        # lot of junk in the sequence, the number of *unique* junk
+        # elements is probably small.  So the memory burden of keeping
+        # this dict alive is likely trivial compared to the size of b2j.
+        self.isbjunk = junkdict.has_key
+        self.isbpopular = populardict.has_key
+
+    def find_longest_match(self, alo, ahi, blo, bhi):
+        """Find longest matching block in a[alo:ahi] and b[blo:bhi].
+
+        If isjunk is not defined:
+
+        Return (i,j,k) such that a[i:i+k] is equal to b[j:j+k], where
+            alo <= i <= i+k <= ahi
+            blo <= j <= j+k <= bhi
+        and for all (i',j',k') meeting those conditions,
+            k >= k'
+            i <= i'
+            and if i == i', j <= j'
+
+        In other words, of all maximal matching blocks, return one that
+        starts earliest in a, and of all those maximal matching blocks that
+        start earliest in a, return the one that starts earliest in b.
+
+        >>> s = SequenceMatcher(None, " abcd", "abcd abcd")
+        >>> s.find_longest_match(0, 5, 0, 9)
+        (0, 4, 5)
+
+        If isjunk is defined, first the longest matching block is
+        determined as above, but with the additional restriction that no
+        junk element appears in the block.  Then that block is extended as
+        far as possible by matching (only) junk elements on both sides.  So
+        the resulting block never matches on junk except as identical junk
+        happens to be adjacent to an "interesting" match.
+
+        Here's the same example as before, but considering blanks to be
+        junk.  That prevents " abcd" from matching the " abcd" at the tail
+        end of the second sequence directly.  Instead only the "abcd" can
+        match, and matches the leftmost "abcd" in the second sequence:
+
+        >>> s = SequenceMatcher(lambda x: x==" ", " abcd", "abcd abcd")
+        >>> s.find_longest_match(0, 5, 0, 9)
+        (1, 0, 4)
+
+        If no blocks match, return (alo, blo, 0).
+
+        >>> s = SequenceMatcher(None, "ab", "c")
+        >>> s.find_longest_match(0, 2, 0, 1)
+        (0, 0, 0)
+        """
+
+        # CAUTION:  stripping common prefix or suffix would be incorrect.
+        # E.g.,
+        #    ab
+        #    acab
+        # Longest matching block is "ab", but if common prefix is
+        # stripped, it's "a" (tied with "b").  UNIX(tm) diff does so
+        # strip, so ends up claiming that ab is changed to acab by
+        # inserting "ca" in the middle.  That's minimal but unintuitive:
+        # "it's obvious" that someone inserted "ac" at the front.
+        # Windiff ends up at the same place as diff, but by pairing up
+        # the unique 'b's and then matching the first two 'a's.
+
+        a, b, b2j, isbjunk = self.a, self.b, self.b2j, self.isbjunk
+        besti, bestj, bestsize = alo, blo, 0
+        # find longest junk-free match
+        # during an iteration of the loop, j2len[j] = length of longest
+        # junk-free match ending with a[i-1] and b[j]
+        j2len = {}
+        nothing = []
+        for i in xrange(alo, ahi):
+            # look at all instances of a[i] in b; note that because
+            # b2j has no junk keys, the loop is skipped if a[i] is junk
+            j2lenget = j2len.get
+            newj2len = {}
+            for j in b2j.get(a[i], nothing):
+                # a[i] matches b[j]
+                if j < blo:
+                    continue
+                if j >= bhi:
+                    break
+                k = newj2len[j] = j2lenget(j-1, 0) + 1
+                if k > bestsize:
+                    besti, bestj, bestsize = i-k+1, j-k+1, k
+            j2len = newj2len
+
+        # Extend the best by non-junk elements on each end.  In particular,
+        # "popular" non-junk elements aren't in b2j, which greatly speeds
+        # the inner loop above, but also means "the best" match so far
+        # doesn't contain any junk *or* popular non-junk elements.
+        while besti > alo and bestj > blo and \
+              not isbjunk(b[bestj-1]) and \
+              a[besti-1] == b[bestj-1]:
+            besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+        while besti+bestsize < ahi and bestj+bestsize < bhi and \
+              not isbjunk(b[bestj+bestsize]) and \
+              a[besti+bestsize] == b[bestj+bestsize]:
+            bestsize += 1
+
+        # Now that we have a wholly interesting match (albeit possibly
+        # empty!), we may as well suck up the matching junk on each
+        # side of it too.  Can't think of a good reason not to, and it
+        # saves post-processing the (possibly considerable) expense of
+        # figuring out what to do with it.  In the case of an empty
+        # interesting match, this is clearly the right thing to do,
+        # because no other kind of match is possible in the regions.
+        while besti > alo and bestj > blo and \
+              isbjunk(b[bestj-1]) and \
+              a[besti-1] == b[bestj-1]:
+            besti, bestj, bestsize = besti-1, bestj-1, bestsize+1
+        while besti+bestsize < ahi and bestj+bestsize < bhi and \
+              isbjunk(b[bestj+bestsize]) and \
+              a[besti+bestsize] == b[bestj+bestsize]:
+            bestsize = bestsize + 1
+
+        return besti, bestj, bestsize
+
+    def get_matching_blocks(self):
+        """Return list of triples describing matching subsequences.
+
+        Each triple is of the form (i, j, n), and means that
+        a[i:i+n] == b[j:j+n].  The triples are monotonically increasing in
+        i and in j.
+
+        The last triple is a dummy, (len(a), len(b), 0), and is the only
+        triple with n==0.
+
+        >>> s = SequenceMatcher(None, "abxcd", "abcd")
+        >>> s.get_matching_blocks()
+        [(0, 0, 2), (3, 2, 2), (5, 4, 0)]
+        """
+
+        if self.matching_blocks is not None:
+            return self.matching_blocks
+        self.matching_blocks = []
+        la, lb = len(self.a), len(self.b)
+        self.__helper(0, la, 0, lb, self.matching_blocks)
+        self.matching_blocks.append( (la, lb, 0) )
+        return self.matching_blocks
+
+    # builds list of matching blocks covering a[alo:ahi] and
+    # b[blo:bhi], appending them in increasing order to answer
+
+    def __helper(self, alo, ahi, blo, bhi, answer):
+        i, j, k = x = self.find_longest_match(alo, ahi, blo, bhi)
+        # a[alo:i] vs b[blo:j] unknown
+        # a[i:i+k] same as b[j:j+k]
+        # a[i+k:ahi] vs b[j+k:bhi] unknown
+        if k:
+            if alo < i and blo < j:
+                self.__helper(alo, i, blo, j, answer)
+            answer.append(x)
+            if i+k < ahi and j+k < bhi:
+                self.__helper(i+k, ahi, j+k, bhi, answer)
+
+    def get_opcodes(self):
+        """Return list of 5-tuples describing how to turn a into b.
+
+        Each tuple is of the form (tag, i1, i2, j1, j2).  The first tuple
+        has i1 == j1 == 0, and remaining tuples have i1 == the i2 from the
+        tuple preceding it, and likewise for j1 == the previous j2.
+
+        The tags are strings, with these meanings:
+
+        'replace':  a[i1:i2] should be replaced by b[j1:j2]
+        'delete':   a[i1:i2] should be deleted.
+                    Note that j1==j2 in this case.
+        'insert':   b[j1:j2] should be inserted at a[i1:i1].
+                    Note that i1==i2 in this case.
+        'equal':    a[i1:i2] == b[j1:j2]
+
+        >>> a = "qabxcd"
+        >>> b = "abycdf"
+        >>> s = SequenceMatcher(None, a, b)
+        >>> for tag, i1, i2, j1, j2 in s.get_opcodes():
+        ...    print ("%7s a[%d:%d] (%s) b[%d:%d] (%s)" %
+        ...           (tag, i1, i2, a[i1:i2], j1, j2, b[j1:j2]))
+         delete a[0:1] (q) b[0:0] ()
+          equal a[1:3] (ab) b[0:2] (ab)
+        replace a[3:4] (x) b[2:3] (y)
+          equal a[4:6] (cd) b[3:5] (cd)
+         insert a[6:6] () b[5:6] (f)
+        """
+
+        if self.opcodes is not None:
+            return self.opcodes
+        i = j = 0
+        self.opcodes = answer = []
+        for ai, bj, size in self.get_matching_blocks():
+            # invariant:  we've pumped out correct diffs to change
+            # a[:i] into b[:j], and the next matching block is
+            # a[ai:ai+size] == b[bj:bj+size].  So we need to pump
+            # out a diff to change a[i:ai] into b[j:bj], pump out
+            # the matching block, and move (i,j) beyond the match
+            tag = ''
+            if i < ai and j < bj:
+                tag = 'replace'
+            elif i < ai:
+                tag = 'delete'
+            elif j < bj:
+                tag = 'insert'
+            if tag:
+                answer.append( (tag, i, ai, j, bj) )
+            i, j = ai+size, bj+size
+            # the list of matching blocks is terminated by a
+            # sentinel with size 0
+            if size:
+                answer.append( ('equal', ai, i, bj, j) )
+        return answer
+
+    def get_grouped_opcodes(self, n=3):
+        """ Isolate change clusters by eliminating ranges with no changes.
+
+        Return a generator of groups with upto n lines of context.
+        Each group is in the same format as returned by get_opcodes().
+
+        >>> from pprint import pprint
+        >>> a = map(str, range(1,40))
+        >>> b = a[:]
+        >>> b[8:8] = ['i']     # Make an insertion
+        >>> b[20] += 'x'       # Make a replacement
+        >>> b[23:28] = []      # Make a deletion
+        >>> b[30] += 'y'       # Make another replacement
+        >>> pprint(list(SequenceMatcher(None,a,b).get_grouped_opcodes()))
+        [[('equal', 5, 8, 5, 8), ('insert', 8, 8, 8, 9), ('equal', 8, 11, 9, 12)],
+         [('equal', 16, 19, 17, 20),
+          ('replace', 19, 20, 20, 21),
+          ('equal', 20, 22, 21, 23),
+          ('delete', 22, 27, 23, 23),
+          ('equal', 27, 30, 23, 26)],
+         [('equal', 31, 34, 27, 30),
+          ('replace', 34, 35, 30, 31),
+          ('equal', 35, 38, 31, 34)]]
+        """
+
+        codes = self.get_opcodes()
+        if not codes:
+            codes = [("equal", 0, 1, 0, 1)]
+        # Fixup leading and trailing groups if they show no changes.
+        if codes[0][0] == 'equal':
+            tag, i1, i2, j1, j2 = codes[0]
+            codes[0] = tag, max(i1, i2-n), i2, max(j1, j2-n), j2
+        if codes[-1][0] == 'equal':
+            tag, i1, i2, j1, j2 = codes[-1]
+            codes[-1] = tag, i1, min(i2, i1+n), j1, min(j2, j1+n)
+
+        nn = n + n
+        group = []
+        for tag, i1, i2, j1, j2 in codes:
+            # End the current group and start a new one whenever
+            # there is a large range with no changes.
+            if tag == 'equal' and i2-i1 > nn:
+                group.append((tag, i1, min(i2, i1+n), j1, min(j2, j1+n)))
+                yield group
+                group = []
+                i1, j1 = max(i1, i2-n), max(j1, j2-n)
+            group.append((tag, i1, i2, j1 ,j2))
+        if group and not (len(group)==1 and group[0][0] == 'equal'):
+            yield group
+
+    def ratio(self):
+        """Return a measure of the sequences' similarity (float in [0,1]).
+
+        Where T is the total number of elements in both sequences, and
+        M is the number of matches, this is 2.0*M / T.
+        Note that this is 1 if the sequences are identical, and 0 if
+        they have nothing in common.
+
+        .ratio() is expensive to compute if you haven't already computed
+        .get_matching_blocks() or .get_opcodes(), in which case you may
+        want to try .quick_ratio() or .real_quick_ratio() first to get an
+        upper bound.
+
+        >>> s = SequenceMatcher(None, "abcd", "bcde")
+        >>> s.ratio()
+        0.75
+        >>> s.quick_ratio()
+        0.75
+        >>> s.real_quick_ratio()
+        1.0
+        """
+
+        matches = reduce(lambda sum, triple: sum + triple[-1],
+                         self.get_matching_blocks(), 0)
+        return _calculate_ratio(matches, len(self.a) + len(self.b))
+
+    def quick_ratio(self):
+        """Return an upper bound on ratio() relatively quickly.
+
+        This isn't defined beyond that it is an upper bound on .ratio(), and
+        is faster to compute.
+        """
+
+        # viewing a and b as multisets, set matches to the cardinality
+        # of their intersection; this counts the number of matches
+        # without regard to order, so is clearly an upper bound
+        if self.fullbcount is None:
+            self.fullbcount = fullbcount = {}
+            for elt in self.b:
+                fullbcount[elt] = fullbcount.get(elt, 0) + 1
+        fullbcount = self.fullbcount
+        # avail[x] is the number of times x appears in 'b' less the
+        # number of times we've seen it in 'a' so far ... kinda
+        avail = {}
+        availhas, matches = avail.has_key, 0
+        for elt in self.a:
+            if availhas(elt):
+                numb = avail[elt]
+            else:
+                numb = fullbcount.get(elt, 0)
+            avail[elt] = numb - 1
+            if numb > 0:
+                matches = matches + 1
+        return _calculate_ratio(matches, len(self.a) + len(self.b))
+
+    def real_quick_ratio(self):
+        """Return an upper bound on ratio() very quickly.
+
+        This isn't defined beyond that it is an upper bound on .ratio(), and
+        is faster to compute than either .ratio() or .quick_ratio().
+        """
+
+        la, lb = len(self.a), len(self.b)
+        # can't have more matches than the number of elements in the
+        # shorter sequence
+        return _calculate_ratio(min(la, lb), la + lb)
+
+def get_close_matches(word, possibilities, n=3, cutoff=0.6):
+    """Use SequenceMatcher to return list of the best "good enough" matches.
+
+    word is a sequence for which close matches are desired (typically a
+    string).
+
+    possibilities is a list of sequences against which to match word
+    (typically a list of strings).
+
+    Optional arg n (default 3) is the maximum number of close matches to
+    return.  n must be > 0.
+
+    Optional arg cutoff (default 0.6) is a float in [0, 1].  Possibilities
+    that don't score at least that similar to word are ignored.
+
+    The best (no more than n) matches among the possibilities are returned
+    in a list, sorted by similarity score, most similar first.
+
+    >>> get_close_matches("appel", ["ape", "apple", "peach", "puppy"])
+    ['apple', 'ape']
+    >>> import keyword as _keyword
+    >>> get_close_matches("wheel", _keyword.kwlist)
+    ['while']
+    >>> get_close_matches("apple", _keyword.kwlist)
+    []
+    >>> get_close_matches("accept", _keyword.kwlist)
+    ['except']
+    """
+
+    if not n >  0:
+        raise ValueError("n must be > 0: %r" % (n,))
+    if not 0.0 <= cutoff <= 1.0:
+        raise ValueError("cutoff must be in [0.0, 1.0]: %r" % (cutoff,))
+    result = []
+    s = SequenceMatcher()
+    s.set_seq2(word)
+    for x in possibilities:
+        s.set_seq1(x)
+        if s.real_quick_ratio() >= cutoff and \
+           s.quick_ratio() >= cutoff and \
+           s.ratio() >= cutoff:
+            result.append((s.ratio(), x))
+
+    # Move the best scorers to head of list
+    result = heapq.nlargest(n, result)
+    # Strip scores for the best n matches
+    return [x for score, x in result]
+
+def _count_leading(line, ch):
+    """
+    Return number of `ch` characters at the start of `line`.
+
+    Example:
+
+    >>> _count_leading('   abc', ' ')
+    3
+    """
+
+    i, n = 0, len(line)
+    while i < n and line[i] == ch:
+        i += 1
+    return i
+
+class Differ:
+    r"""
+    Differ is a class for comparing sequences of lines of text, and
+    producing human-readable differences or deltas.  Differ uses
+    SequenceMatcher both to compare sequences of lines, and to compare
+    sequences of characters within similar (near-matching) lines.
+
+    Each line of a Differ delta begins with a two-letter code:
+
+        '- '    line unique to sequence 1
+        '+ '    line unique to sequence 2
+        '  '    line common to both sequences
+        '? '    line not present in either input sequence
+
+    Lines beginning with '? ' attempt to guide the eye to intraline
+    differences, and were not present in either input sequence.  These lines
+    can be confusing if the sequences contain tab characters.
+
+    Note that Differ makes no claim to produce a *minimal* diff.  To the
+    contrary, minimal diffs are often counter-intuitive, because they synch
+    up anywhere possible, sometimes accidental matches 100 pages apart.
+    Restricting synch points to contiguous matches preserves some notion of
+    locality, at the occasional cost of producing a longer diff.
+
+    Example: Comparing two texts.
+
+    First we set up the texts, sequences of individual single-line strings
+    ending with newlines (such sequences can also be obtained from the
+    `readlines()` method of file-like objects):
+
+    >>> text1 = '''  1. Beautiful is better than ugly.
+    ...   2. Explicit is better than implicit.
+    ...   3. Simple is better than complex.
+    ...   4. Complex is better than complicated.
+    ... '''.splitlines(1)
+    >>> len(text1)
+    4
+    >>> text1[0][-1]
+    '\n'
+    >>> text2 = '''  1. Beautiful is better than ugly.
+    ...   3.   Simple is better than complex.
+    ...   4. Complicated is better than complex.
+    ...   5. Flat is better than nested.
+    ... '''.splitlines(1)
+
+    Next we instantiate a Differ object:
+
+    >>> d = Differ()
+
+    Note that when instantiating a Differ object we may pass functions to
+    filter out line and character 'junk'.  See Differ.__init__ for details.
+
+    Finally, we compare the two:
+
+    >>> result = list(d.compare(text1, text2))
+
+    'result' is a list of strings, so let's pretty-print it:
+
+    >>> from pprint import pprint as _pprint
+    >>> _pprint(result)
+    ['    1. Beautiful is better than ugly.\n',
+     '-   2. Explicit is better than implicit.\n',
+     '-   3. Simple is better than complex.\n',
+     '+   3.   Simple is better than complex.\n',
+     '?     ++\n',
+     '-   4. Complex is better than complicated.\n',
+     '?            ^                     ---- ^\n',
+     '+   4. Complicated is better than complex.\n',
+     '?           ++++ ^                      ^\n',
+     '+   5. Flat is better than nested.\n']
+
+    As a single multi-line string it looks like this:
+
+    >>> print ''.join(result),
+        1. Beautiful is better than ugly.
+    -   2. Explicit is better than implicit.
+    -   3. Simple is better than complex.
+    +   3.   Simple is better than complex.
+    ?     ++
+    -   4. Complex is better than complicated.
+    ?            ^                     ---- ^
+    +   4. Complicated is better than complex.
+    ?           ++++ ^                      ^
+    +   5. Flat is better than nested.
+
+    Methods:
+
+    __init__(linejunk=None, charjunk=None)
+        Construct a text differencer, with optional filters.
+
+    compare(a, b)
+        Compare two sequences of lines; generate the resulting delta.
+    """
+
+    def __init__(self, linejunk=None, charjunk=None):
+        """
+        Construct a text differencer, with optional filters.
+
+        The two optional keyword parameters are for filter functions:
+
+        - `linejunk`: A function that should accept a single string argument,
+          and return true iff the string is junk. The module-level function
+          `IS_LINE_JUNK` may be used to filter out lines without visible
+          characters, except for at most one splat ('#').  It is recommended
+          to leave linejunk None; as of Python 2.3, the underlying
+          SequenceMatcher class has grown an adaptive notion of "noise" lines
+          that's better than any static definition the author has ever been
+          able to craft.
+
+        - `charjunk`: A function that should accept a string of length 1. The
+          module-level function `IS_CHARACTER_JUNK` may be used to filter out
+          whitespace characters (a blank or tab; **note**: bad idea to include
+          newline in this!).  Use of IS_CHARACTER_JUNK is recommended.
+        """
+
+        self.linejunk = linejunk
+        self.charjunk = charjunk
+
+    def compare(self, a, b):
+        r"""
+        Compare two sequences of lines; generate the resulting delta.
+
+        Each sequence must contain individual single-line strings ending with
+        newlines. Such sequences can be obtained from the `readlines()` method
+        of file-like objects.  The delta generated also consists of newline-
+        terminated strings, ready to be printed as-is via the writeline()
+        method of a file-like object.
+
+        Example:
+
+        >>> print ''.join(Differ().compare('one\ntwo\nthree\n'.splitlines(1),
+        ...                                'ore\ntree\nemu\n'.splitlines(1))),
+        - one
+        ?  ^
+        + ore
+        ?  ^
+        - two
+        - three
+        ?  -
+        + tree
+        + emu
+        """
+
+        cruncher = SequenceMatcher(self.linejunk, a, b)
+        for tag, alo, ahi, blo, bhi in cruncher.get_opcodes():
+            if tag == 'replace':
+                g = self._fancy_replace(a, alo, ahi, b, blo, bhi)
+            elif tag == 'delete':
+                g = self._dump('-', a, alo, ahi)
+            elif tag == 'insert':
+                g = self._dump('+', b, blo, bhi)
+            elif tag == 'equal':
+                g = self._dump(' ', a, alo, ahi)
+            else:
+                raise ValueError, 'unknown tag %r' % (tag,)
+
+            for line in g:
+                yield line
+
+    def _dump(self, tag, x, lo, hi):
+        """Generate comparison results for a same-tagged range."""
+        for i in xrange(lo, hi):
+            yield '%s %s' % (tag, x[i])
+
+    def _plain_replace(self, a, alo, ahi, b, blo, bhi):
+        assert alo < ahi and blo < bhi
+        # dump the shorter block first -- reduces the burden on short-term
+        # memory if the blocks are of very different sizes
+        if bhi - blo < ahi - alo:
+            first  = self._dump('+', b, blo, bhi)
+            second = self._dump('-', a, alo, ahi)
+        else:
+            first  = self._dump('-', a, alo, ahi)
+            second = self._dump('+', b, blo, bhi)
+
+        for g in first, second:
+            for line in g:
+                yield line
+
+    def _fancy_replace(self, a, alo, ahi, b, blo, bhi):
+        r"""
+        When replacing one block of lines with another, search the blocks
+        for *similar* lines; the best-matching pair (if any) is used as a
+        synch point, and intraline difference marking is done on the
+        similar pair. Lots of work, but often worth it.
+
+        Example:
+
+        >>> d = Differ()
+        >>> results = d._fancy_replace(['abcDefghiJkl\n'], 0, 1,
+        ...                            ['abcdefGhijkl\n'], 0, 1)
+        >>> print ''.join(results),
+        - abcDefghiJkl
+        ?    ^  ^  ^
+        + abcdefGhijkl
+        ?    ^  ^  ^
+        """
+
+        # don't synch up unless the lines have a similarity score of at
+        # least cutoff; best_ratio tracks the best score seen so far
+        best_ratio, cutoff = 0.74, 0.75
+        cruncher = SequenceMatcher(self.charjunk)
+        eqi, eqj = None, None   # 1st indices of equal lines (if any)
+
+        # search for the pair that matches best without being identical
+        # (identical lines must be junk lines, & we don't want to synch up
+        # on junk -- unless we have to)
+        for j in xrange(blo, bhi):
+            bj = b[j]
+            cruncher.set_seq2(bj)
+            for i in xrange(alo, ahi):
+                ai = a[i]
+                if ai == bj:
+                    if eqi is None:
+                        eqi, eqj = i, j
+                    continue
+                cruncher.set_seq1(ai)
+                # computing similarity is expensive, so use the quick
+                # upper bounds first -- have seen this speed up messy
+                # compares by a factor of 3.
+                # note that ratio() is only expensive to compute the first
+                # time it's called on a sequence pair; the expensive part
+                # of the computation is cached by cruncher
+                if cruncher.real_quick_ratio() > best_ratio and \
+                      cruncher.quick_ratio() > best_ratio and \
+                      cruncher.ratio() > best_ratio:
+                    best_ratio, best_i, best_j = cruncher.ratio(), i, j
+        if best_ratio < cutoff:
+            # no non-identical "pretty close" pair
+            if eqi is None:
+                # no identical pair either -- treat it as a straight replace
+                for line in self._plain_replace(a, alo, ahi, b, blo, bhi):
+                    yield line
+                return
+            # no close pair, but an identical pair -- synch up on that
+            best_i, best_j, best_ratio = eqi, eqj, 1.0
+        else:
+            # there's a close pair, so forget the identical pair (if any)
+            eqi = None
+
+        # a[best_i] very similar to b[best_j]; eqi is None iff they're not
+        # identical
+
+        # pump out diffs from before the synch point
+        for line in self._fancy_helper(a, alo, best_i, b, blo, best_j):
+            yield line
+
+        # do intraline marking on the synch pair
+        aelt, belt = a[best_i], b[best_j]
+        if eqi is None:
+            # pump out a '-', '?', '+', '?' quad for the synched lines
+            atags = btags = ""
+            cruncher.set_seqs(aelt, belt)
+            for tag, ai1, ai2, bj1, bj2 in cruncher.get_opcodes():
+                la, lb = ai2 - ai1, bj2 - bj1
+                if tag == 'replace':
+                    atags += '^' * la
+                    btags += '^' * lb
+                elif tag == 'delete':
+                    atags += '-' * la
+                elif tag == 'insert':
+                    btags += '+' * lb
+                elif tag == 'equal':
+                    atags += ' ' * la
+                    btags += ' ' * lb
+                else:
+                    raise ValueError, 'unknown tag %r' % (tag,)
+            for line in self._qformat(aelt, belt, atags, btags):
+                yield line
+        else:
+            # the synch pair is identical
+            yield '  ' + aelt
+
+        # pump out diffs from after the synch point
+        for line in self._fancy_helper(a, best_i+1, ahi, b, best_j+1, bhi):
+            yield line
+
+    def _fancy_helper(self, a, alo, ahi, b, blo, bhi):
+        g = []
+        if alo < ahi:
+            if blo < bhi:
+                g = self._fancy_replace(a, alo, ahi, b, blo, bhi)
+            else:
+                g = self._dump('-', a, alo, ahi)
+        elif blo < bhi:
+            g = self._dump('+', b, blo, bhi)
+
+        for line in g:
+            yield line
+
+    def _qformat(self, aline, bline, atags, btags):
+        r"""
+        Format "?" output and deal with leading tabs.
+
+        Example:
+
+        >>> d = Differ()
+        >>> results = d._qformat('\tabcDefghiJkl\n', '\t\tabcdefGhijkl\n',
+        ...                      '  ^ ^  ^      ', '+  ^ ^  ^      ')
+        >>> for line in results: print repr(line)
+        ...
+        '- \tabcDefghiJkl\n'
+        '? \t ^ ^  ^\n'
+        '+ \t\tabcdefGhijkl\n'
+        '? \t  ^ ^  ^\n'
+        """
+
+        # Can hurt, but will probably help most of the time.
+        common = min(_count_leading(aline, "\t"),
+                     _count_leading(bline, "\t"))
+        common = min(common, _count_leading(atags[:common], " "))
+        atags = atags[common:].rstrip()
+        btags = btags[common:].rstrip()
+
+        yield "- " + aline
+        if atags:
+            yield "? %s%s\n" % ("\t" * common, atags)
+
+        yield "+ " + bline
+        if btags:
+            yield "? %s%s\n" % ("\t" * common, btags)
+
+# With respect to junk, an earlier version of ndiff simply refused to
+# *start* a match with a junk element.  The result was cases like this:
+#     before: private Thread currentThread;
+#     after:  private volatile Thread currentThread;
+# If you consider whitespace to be junk, the longest contiguous match
+# not starting with junk is "e Thread currentThread".  So ndiff reported
+# that "e volatil" was inserted between the 't' and the 'e' in "private".
+# While an accurate view, to people that's absurd.  The current version
+# looks for matching blocks that are entirely junk-free, then extends the
+# longest one of those as far as possible but only with matching junk.
+# So now "currentThread" is matched, then extended to suck up the
+# preceding blank; then "private" is matched, and extended to suck up the
+# following blank; then "Thread" is matched; and finally ndiff reports
+# that "volatile " was inserted before "Thread".  The only quibble
+# remaining is that perhaps it was really the case that " volatile"
+# was inserted after "private".  I can live with that <wink>.
+
+import re
+
+def IS_LINE_JUNK(line, pat=re.compile(r"\s*#?\s*$").match):
+    r"""
+    Return 1 for ignorable line: iff `line` is blank or contains a single '#'.
+
+    Examples:
+
+    >>> IS_LINE_JUNK('\n')
+    True
+    >>> IS_LINE_JUNK('  #   \n')
+    True
+    >>> IS_LINE_JUNK('hello\n')
+    False
+    """
+
+    return pat(line) is not None
+
+def IS_CHARACTER_JUNK(ch, ws=" \t"):
+    r"""
+    Return 1 for ignorable character: iff `ch` is a space or tab.
+
+    Examples:
+
+    >>> IS_CHARACTER_JUNK(' ')
+    True
+    >>> IS_CHARACTER_JUNK('\t')
+    True
+    >>> IS_CHARACTER_JUNK('\n')
+    False
+    >>> IS_CHARACTER_JUNK('x')
+    False
+    """
+
+    return ch in ws
+
+
+def unified_diff(a, b, fromfile='', tofile='', fromfiledate='',
+                 tofiledate='', n=3, lineterm='\n'):
+    r"""
+    Compare two sequences of lines; generate the delta as a unified diff.
+
+    Unified diffs are a compact way of showing line changes and a few
+    lines of context.  The number of context lines is set by 'n' which
+    defaults to three.
+
+    By default, the diff control lines (those with ---, +++, or @@) are
+    created with a trailing newline.  This is helpful so that inputs
+    created from file.readlines() result in diffs that are suitable for
+    file.writelines() since both the inputs and outputs have trailing
+    newlines.
+
+    For inputs that do not have trailing newlines, set the lineterm
+    argument to "" so that the output will be uniformly newline free.
+
+    The unidiff format normally has a header for filenames and modification
+    times.  Any or all of these may be specified using strings for
+    'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.  The modification
+    times are normally expressed in the format returned by time.ctime().
+
+    Example:
+
+    >>> for line in unified_diff('one two three four'.split(),
+    ...             'zero one tree four'.split(), 'Original', 'Current',
+    ...             'Sat Jan 26 23:30:50 1991', 'Fri Jun 06 10:20:52 2003',
+    ...             lineterm=''):
+    ...     print line
+    --- Original Sat Jan 26 23:30:50 1991
+    +++ Current Fri Jun 06 10:20:52 2003
+    @@ -1,4 +1,4 @@
+    +zero
+     one
+    -two
+    -three
+    +tree
+     four
+    """
+
+    started = False
+    for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n):
+        if not started:
+            yield '--- %s %s%s' % (fromfile, fromfiledate, lineterm)
+            yield '+++ %s %s%s' % (tofile, tofiledate, lineterm)
+            started = True
+        i1, i2, j1, j2 = group[0][1], group[-1][2], group[0][3], group[-1][4]
+        yield "@@ -%d,%d +%d,%d @@%s" % (i1+1, i2-i1, j1+1, j2-j1, lineterm)
+        for tag, i1, i2, j1, j2 in group:
+            if tag == 'equal':
+                for line in a[i1:i2]:
+                    yield ' ' + line
+                continue
+            if tag == 'replace' or tag == 'delete':
+                for line in a[i1:i2]:
+                    yield '-' + line
+            if tag == 'replace' or tag == 'insert':
+                for line in b[j1:j2]:
+                    yield '+' + line
+
+# See http://www.unix.org/single_unix_specification/
+def context_diff(a, b, fromfile='', tofile='',
+                 fromfiledate='', tofiledate='', n=3, lineterm='\n'):
+    r"""
+    Compare two sequences of lines; generate the delta as a context diff.
+
+    Context diffs are a compact way of showing line changes and a few
+    lines of context.  The number of context lines is set by 'n' which
+    defaults to three.
+
+    By default, the diff control lines (those with *** or ---) are
+    created with a trailing newline.  This is helpful so that inputs
+    created from file.readlines() result in diffs that are suitable for
+    file.writelines() since both the inputs and outputs have trailing
+    newlines.
+
+    For inputs that do not have trailing newlines, set the lineterm
+    argument to "" so that the output will be uniformly newline free.
+
+    The context diff format normally has a header for filenames and
+    modification times.  Any or all of these may be specified using
+    strings for 'fromfile', 'tofile', 'fromfiledate', and 'tofiledate'.
+    The modification times are normally expressed in the format returned
+    by time.ctime().  If not specified, the strings default to blanks.
+
+    Example:
+
+    >>> print ''.join(context_diff('one\ntwo\nthree\nfour\n'.splitlines(1),
+    ...       'zero\none\ntree\nfour\n'.splitlines(1), 'Original', 'Current',
+    ...       'Sat Jan 26 23:30:50 1991', 'Fri Jun 06 10:22:46 2003')),
+    *** Original Sat Jan 26 23:30:50 1991
+    --- Current Fri Jun 06 10:22:46 2003
+    ***************
+    *** 1,4 ****
+      one
+    ! two
+    ! three
+      four
+    --- 1,4 ----
+    + zero
+      one
+    ! tree
+      four
+    """
+
+    started = False
+    prefixmap = {'insert':'+ ', 'delete':'- ', 'replace':'! ', 'equal':'  '}
+    for group in SequenceMatcher(None,a,b).get_grouped_opcodes(n):
+        if not started:
+            yield '*** %s %s%s' % (fromfile, fromfiledate, lineterm)
+            yield '--- %s %s%s' % (tofile, tofiledate, lineterm)
+            started = True
+
+        yield '***************%s' % (lineterm,)
+        if group[-1][2] - group[0][1] >= 2:
+            yield '*** %d,%d ****%s' % (group[0][1]+1, group[-1][2], lineterm)
+        else:
+            yield '*** %d ****%s' % (group[-1][2], lineterm)
+        visiblechanges = [e for e in group if e[0] in ('replace', 'delete')]
+        if visiblechanges:
+            for tag, i1, i2, _, _ in group:
+                if tag != 'insert':
+                    for line in a[i1:i2]:
+                        yield prefixmap[tag] + line
+
+        if group[-1][4] - group[0][3] >= 2:
+            yield '--- %d,%d ----%s' % (group[0][3]+1, group[-1][4], lineterm)
+        else:
+            yield '--- %d ----%s' % (group[-1][4], lineterm)
+        visiblechanges = [e for e in group if e[0] in ('replace', 'insert')]
+        if visiblechanges:
+            for tag, _, _, j1, j2 in group:
+                if tag != 'delete':
+                    for line in b[j1:j2]:
+                        yield prefixmap[tag] + line
+
+def ndiff(a, b, linejunk=None, charjunk=IS_CHARACTER_JUNK):
+    r"""
+    Compare `a` and `b` (lists of strings); return a `Differ`-style delta.
+
+    Optional keyword parameters `linejunk` and `charjunk` are for filter
+    functions (or None):
+
+    - linejunk: A function that should accept a single string argument, and
+      return true iff the string is junk.  The default is None, and is
+      recommended; as of Python 2.3, an adaptive notion of "noise" lines is
+      used that does a good job on its own.
+
+    - charjunk: A function that should accept a string of length 1. The
+      default is module-level function IS_CHARACTER_JUNK, which filters out
+      whitespace characters (a blank or tab; note: bad idea to include newline
+      in this!).
+
+    Tools/scripts/ndiff.py is a command-line front-end to this function.
+
+    Example:
+
+    >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(1),
+    ...              'ore\ntree\nemu\n'.splitlines(1))
+    >>> print ''.join(diff),
+    - one
+    ?  ^
+    + ore
+    ?  ^
+    - two
+    - three
+    ?  -
+    + tree
+    + emu
+    """
+    return Differ(linejunk, charjunk).compare(a, b)
+
+def _mdiff(fromlines, tolines, context=None, linejunk=None,
+           charjunk=IS_CHARACTER_JUNK):
+    """Returns generator yielding marked up from/to side by side differences.
+
+    Arguments:
+    fromlines -- list of text lines to compared to tolines
+    tolines -- list of text lines to be compared to fromlines
+    context -- number of context lines to display on each side of difference,
+               if None, all from/to text lines will be generated.
+    linejunk -- passed on to ndiff (see ndiff documentation)
+    charjunk -- passed on to ndiff (see ndiff documentation)
+
+    This function returns an interator which returns a tuple:
+    (from line tuple, to line tuple, boolean flag)
+
+    from/to line tuple -- (line num, line text)
+        line num -- integer or None (to indicate a context seperation)
+        line text -- original line text with following markers inserted:
+            '\0+' -- marks start of added text
+            '\0-' -- marks start of deleted text
+            '\0^' -- marks start of changed text
+            '\1' -- marks end of added/deleted/changed text
+
+    boolean flag -- None indicates context separation, True indicates
+        either "from" or "to" line contains a change, otherwise False.
+
+    This function/iterator was originally developed to generate side by side
+    file difference for making HTML pages (see HtmlDiff class for example
+    usage).
+
+    Note, this function utilizes the ndiff function to generate the side by
+    side difference markup.  Optional ndiff arguments may be passed to this
+    function and they in turn will be passed to ndiff.
+    """
+    import re
+
+    # regular expression for finding intraline change indices
+    change_re = re.compile('(\++|\-+|\^+)')
+
+    # create the difference iterator to generate the differences
+    diff_lines_iterator = ndiff(fromlines,tolines,linejunk,charjunk)
+
+    def _make_line(lines, format_key, side, num_lines=[0,0]):
+        """Returns line of text with user's change markup and line formatting.
+
+        lines -- list of lines from the ndiff generator to produce a line of
+                 text from.  When producing the line of text to return, the
+                 lines used are removed from this list.
+        format_key -- '+' return first line in list with "add" markup around
+                          the entire line.
+                      '-' return first line in list with "delete" markup around
+                          the entire line.
+                      '?' return first line in list with add/delete/change
+                          intraline markup (indices obtained from second line)
+                      None return first line in list with no markup
+        side -- indice into the num_lines list (0=from,1=to)
+        num_lines -- from/to current line number.  This is NOT intended to be a
+                     passed parameter.  It is present as a keyword argument to
+                     maintain memory of the current line numbers between calls
+                     of this function.
+
+        Note, this function is purposefully not defined at the module scope so
+        that data it needs from its parent function (within whose context it
+        is defined) does not need to be of module scope.
+        """
+        num_lines[side] += 1
+        # Handle case where no user markup is to be added, just return line of
+        # text with user's line format to allow for usage of the line number.
+        if format_key is None:
+            return (num_lines[side],lines.pop(0)[2:])
+        # Handle case of intraline changes
+        if format_key == '?':
+            text, markers = lines.pop(0), lines.pop(0)
+            # find intraline changes (store change type and indices in tuples)
+            sub_info = []
+            def record_sub_info(match_object,sub_info=sub_info):
+                sub_info.append([match_object.group(1)[0],match_object.span()])
+                return match_object.group(1)
+            change_re.sub(record_sub_info,markers)
+            # process each tuple inserting our special marks that won't be
+            # noticed by an xml/html escaper.
+            for key,(begin,end) in sub_info[::-1]:
+                text = text[0:begin]+'\0'+key+text[begin:end]+'\1'+text[end:]
+            text = text[2:]
+        # Handle case of add/delete entire line
+        else:
+            text = lines.pop(0)[2:]
+            # if line of text is just a newline, insert a space so there is
+            # something for the user to highlight and see.
+            if not text:
+                text = ' '
+            # insert marks that won't be noticed by an xml/html escaper.
+            text = '\0' + format_key + text + '\1'
+        # Return line of text, first allow user's line formatter to do it's
+        # thing (such as adding the line number) then replace the special
+        # marks with what the user's change markup.
+        return (num_lines[side],text)
+
+    def _line_iterator():
+        """Yields from/to lines of text with a change indication.
+
+        This function is an iterator.  It itself pulls lines from a
+        differencing iterator, processes them and yields them.  When it can
+        it yields both a "from" and a "to" line, otherwise it will yield one
+        or the other.  In addition to yielding the lines of from/to text, a
+        boolean flag is yielded to indicate if the text line(s) have
+        differences in them.
+
+        Note, this function is purposefully not defined at the module scope so
+        that data it needs from its parent function (within whose context it
+        is defined) does not need to be of module scope.
+        """
+        lines = []
+        num_blanks_pending, num_blanks_to_yield = 0, 0
+        while True:
+            # Load up next 4 lines so we can look ahead, create strings which
+            # are a concatenation of the first character of each of the 4 lines
+            # so we can do some very readable comparisons.
+            while len(lines) < 4:
+                try:
+                    lines.append(diff_lines_iterator.next())
+                except StopIteration:
+                    lines.append('X')
+            s = ''.join([line[0] for line in lines])
+            if s.startswith('X'):
+                # When no more lines, pump out any remaining blank lines so the
+                # corresponding add/delete lines get a matching blank line so
+                # all line pairs get yielded at the next level.
+                num_blanks_to_yield = num_blanks_pending
+            elif s.startswith('-?+?'):
+                # simple intraline change
+                yield _make_line(lines,'?',0), _make_line(lines,'?',1), True
+                continue
+            elif s.startswith('--++'):
+                # in delete block, add block coming: we do NOT want to get
+                # caught up on blank lines yet, just process the delete line
+                num_blanks_pending -= 1
+                yield _make_line(lines,'-',0), None, True
+                continue
+            elif s.startswith('--?+') or s.startswith('--+') or \
+                 s.startswith('- '):
+                # in delete block and see a intraline change or unchanged line
+                # coming: yield the delete line and then blanks
+                from_line,to_line = _make_line(lines,'-',0), None
+                num_blanks_to_yield,num_blanks_pending = num_blanks_pending-1,0
+            elif s.startswith('-+?'):
+                # intraline change
+                yield _make_line(lines,None,0), _make_line(lines,'?',1), True
+                continue
+            elif s.startswith('-?+'):
+                # intraline change
+                yield _make_line(lines,'?',0), _make_line(lines,None,1), True
+                continue
+            elif s.startswith('-'):
+                # delete FROM line
+                num_blanks_pending -= 1
+                yield _make_line(lines,'-',0), None, True
+                continue
+            elif s.startswith('+--'):
+                # in add block, delete block coming: we do NOT want to get
+                # caught up on blank lines yet, just process the add line
+                num_blanks_pending += 1
+                yield None, _make_line(lines,'+',1), True
+                continue
+            elif s.startswith('+ ') or s.startswith('+-'):
+                # will be leaving an add block: yield blanks then add line
+                from_line, to_line = None, _make_line(lines,'+',1)
+                num_blanks_to_yield,num_blanks_pending = num_blanks_pending+1,0
+            elif s.startswith('+'):
+                # inside an add block, yield the add line
+                num_blanks_pending += 1
+                yield None, _make_line(lines,'+',1), True
+                continue
+            elif s.startswith(' '):
+                # unchanged text, yield it to both sides
+                yield _make_line(lines[:],None,0),_make_line(lines,None,1),False
+                continue
+            # Catch up on the blank lines so when we yield the next from/to
+            # pair, they are lined up.
+            while(num_blanks_to_yield < 0):
+                num_blanks_to_yield += 1
+                yield None,('','\n'),True
+            while(num_blanks_to_yield > 0):
+                num_blanks_to_yield -= 1
+                yield ('','\n'),None,True
+            if s.startswith('X'):
+                raise StopIteration
+            else:
+                yield from_line,to_line,True
+
+    def _line_pair_iterator():
+        """Yields from/to lines of text with a change indication.
+
+        This function is an iterator.  It itself pulls lines from the line
+        iterator.  It's difference from that iterator is that this function
+        always yields a pair of from/to text lines (with the change
+        indication).  If necessary it will collect single from/to lines
+        until it has a matching pair from/to pair to yield.
+
+        Note, this function is purposefully not defined at the module scope so
+        that data it needs from its parent function (within whose context it
+        is defined) does not need to be of module scope.
+        """
+        line_iterator = _line_iterator()
+        fromlines,tolines=[],[]
+        while True:
+            # Collecting lines of text until we have a from/to pair
+            while (len(fromlines)==0 or len(tolines)==0):
+                from_line, to_line, found_diff =line_iterator.next()
+                if from_line is not None:
+                    fromlines.append((from_line,found_diff))
+                if to_line is not None:
+                    tolines.append((to_line,found_diff))
+            # Once we have a pair, remove them from the collection and yield it
+            from_line, fromDiff = fromlines.pop(0)
+            to_line, to_diff = tolines.pop(0)
+            yield (from_line,to_line,fromDiff or to_diff)
+
+    # Handle case where user does not want context differencing, just yield
+    # them up without doing anything else with them.
+    line_pair_iterator = _line_pair_iterator()
+    if context is None:
+        while True:
+            yield line_pair_iterator.next()
+    # Handle case where user wants context differencing.  We must do some
+    # storage of lines until we know for sure that they are to be yielded.
+    else:
+        context += 1
+        lines_to_write = 0
+        while True:
+            # Store lines up until we find a difference, note use of a
+            # circular queue because we only need to keep around what
+            # we need for context.
+            index, contextLines = 0, [None]*(context)
+            found_diff = False
+            while(found_diff is False):
+                from_line, to_line, found_diff = line_pair_iterator.next()
+                i = index % context
+                contextLines[i] = (from_line, to_line, found_diff)
+                index += 1
+            # Yield lines that we have collected so far, but first yield
+            # the user's separator.
+            if index > context:
+                yield None, None, None
+                lines_to_write = context
+            else:
+                lines_to_write = index
+                index = 0
+            while(lines_to_write):
+                i = index % context
+                index += 1
+                yield contextLines[i]
+                lines_to_write -= 1
+            # Now yield the context lines after the change
+            lines_to_write = context-1
+            while(lines_to_write):
+                from_line, to_line, found_diff = line_pair_iterator.next()
+                # If another change within the context, extend the context
+                if found_diff:
+                    lines_to_write = context-1
+                else:
+                    lines_to_write -= 1
+                yield from_line, to_line, found_diff
+
+
+_file_template = """
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+          "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+<html>
+
+<head>
+    <meta http-equiv="Content-Type"
+          content="text/html; charset=ISO-8859-1" />
+    <title></title>
+    <style type="text/css">%(styles)s
+    </style>
+</head>
+
+<body>
+    %(table)s%(legend)s
+</body>
+
+</html>"""
+
+_styles = """
+        table.diff {font-family:Courier; border:medium;}
+        .diff_header {background-color:#e0e0e0}
+        td.diff_header {text-align:right}
+        .diff_next {background-color:#c0c0c0}
+        .diff_add {background-color:#aaffaa}
+        .diff_chg {background-color:#ffff77}
+        .diff_sub {background-color:#ffaaaa}"""
+
+_table_template = """
+    <table class="diff" id="difflib_chg_%(prefix)s_top"
+           cellspacing="0" cellpadding="0" rules="groups" >
+        <colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup>
+        <colgroup></colgroup> <colgroup></colgroup> <colgroup></colgroup>
+        %(header_row)s
+        <tbody>
+%(data_rows)s        </tbody>
+    </table>"""
+
+_legend = """
+    <table class="diff" summary="Legends">
+        <tr> <th colspan="2"> Legends </th> </tr>
+        <tr> <td> <table border="" summary="Colors">
+                      <tr><th> Colors </th> </tr>
+                      <tr><td class="diff_add">&nbsp;Added&nbsp;</td></tr>
+                      <tr><td class="diff_chg">Changed</td> </tr>
+                      <tr><td class="diff_sub">Deleted</td> </tr>
+                  </table></td>
+             <td> <table border="" summary="Links">
+                      <tr><th colspan="2"> Links </th> </tr>
+                      <tr><td>(f)irst change</td> </tr>
+                      <tr><td>(n)ext change</td> </tr>
+                      <tr><td>(t)op</td> </tr>
+                  </table></td> </tr>
+    </table>"""
+
+class HtmlDiff(object):
+    """For producing HTML side by side comparison with change highlights.
+
+    This class can be used to create an HTML table (or a complete HTML file
+    containing the table) showing a side by side, line by line comparison
+    of text with inter-line and intra-line change highlights.  The table can
+    be generated in either full or contextual difference mode.
+
+    The following methods are provided for HTML generation:
+
+    make_table -- generates HTML for a single side by side table
+    make_file -- generates complete HTML file with a single side by side table
+
+    See tools/scripts/diff.py for an example usage of this class.
+    """
+
+    _file_template = _file_template
+    _styles = _styles
+    _table_template = _table_template
+    _legend = _legend
+    _default_prefix = 0
+
+    def __init__(self,tabsize=8,wrapcolumn=None,linejunk=None,
+                 charjunk=IS_CHARACTER_JUNK):
+        """HtmlDiff instance initializer
+
+        Arguments:
+        tabsize -- tab stop spacing, defaults to 8.
+        wrapcolumn -- column number where lines are broken and wrapped,
+            defaults to None where lines are not wrapped.
+        linejunk,charjunk -- keyword arguments passed into ndiff() (used to by
+            HtmlDiff() to generate the side by side HTML differences).  See
+            ndiff() documentation for argument default values and descriptions.
+        """
+        self._tabsize = tabsize
+        self._wrapcolumn = wrapcolumn
+        self._linejunk = linejunk
+        self._charjunk = charjunk
+
+    def make_file(self,fromlines,tolines,fromdesc='',todesc='',context=False,
+                  numlines=5):
+        """Returns HTML file of side by side comparison with change highlights
+
+        Arguments:
+        fromlines -- list of "from" lines
+        tolines -- list of "to" lines
+        fromdesc -- "from" file column header string
+        todesc -- "to" file column header string
+        context -- set to True for contextual differences (defaults to False
+            which shows full differences).
+        numlines -- number of context lines.  When context is set True,
+            controls number of lines displayed before and after the change.
+            When context is False, controls the number of lines to place
+            the "next" link anchors before the next change (so click of
+            "next" link jumps to just before the change).
+        """
+
+        return self._file_template % dict(
+            styles = self._styles,
+            legend = self._legend,
+            table = self.make_table(fromlines,tolines,fromdesc,todesc,
+                                    context=context,numlines=numlines))
+
+    def _tab_newline_replace(self,fromlines,tolines):
+        """Returns from/to line lists with tabs expanded and newlines removed.
+
+        Instead of tab characters being replaced by the number of spaces
+        needed to fill in to the next tab stop, this function will fill
+        the space with tab characters.  This is done so that the difference
+        algorithms can identify changes in a file when tabs are replaced by
+        spaces and vice versa.  At the end of the HTML generation, the tab
+        characters will be replaced with a nonbreakable space.
+        """
+        def expand_tabs(line):
+            # hide real spaces
+            line = line.replace(' ','\0')
+            # expand tabs into spaces
+            line = line.expandtabs(self._tabsize)
+            # relace spaces from expanded tabs back into tab characters
+            # (we'll replace them with markup after we do differencing)
+            line = line.replace(' ','\t')
+            return line.replace('\0',' ').rstrip('\n')
+        fromlines = [expand_tabs(line) for line in fromlines]
+        tolines = [expand_tabs(line) for line in tolines]
+        return fromlines,tolines
+
+    def _split_line(self,data_list,line_num,text):
+        """Builds list of text lines by splitting text lines at wrap point
+
+        This function will determine if the input text line needs to be
+        wrapped (split) into separate lines.  If so, the first wrap point
+        will be determined and the first line appended to the output
+        text line list.  This function is used recursively to handle
+        the second part of the split line to further split it.
+        """
+        # if blank line or context separator, just add it to the output list
+        if not line_num:
+            data_list.append((line_num,text))
+            return
+
+        # if line text doesn't need wrapping, just add it to the output list
+        size = len(text)
+        max = self._wrapcolumn
+        if (size <= max) or ((size -(text.count('\0')*3)) <= max):
+            data_list.append((line_num,text))
+            return
+
+        # scan text looking for the wrap point, keeping track if the wrap
+        # point is inside markers
+        i = 0
+        n = 0
+        mark = ''
+        while n < max and i < size:
+            if text[i] == '\0':
+                i += 1
+                mark = text[i]
+                i += 1
+            elif text[i] == '\1':
+                i += 1
+                mark = ''
+            else:
+                i += 1
+                n += 1
+
+        # wrap point is inside text, break it up into separate lines
+        line1 = text[:i]
+        line2 = text[i:]
+
+        # if wrap point is inside markers, place end marker at end of first
+        # line and start marker at beginning of second line because each
+        # line will have its own table tag markup around it.
+        if mark:
+            line1 = line1 + '\1'
+            line2 = '\0' + mark + line2
+
+        # tack on first line onto the output list
+        data_list.append((line_num,line1))
+
+        # use this routine again to wrap the remaining text
+        self._split_line(data_list,'>',line2)
+
+    def _line_wrapper(self,diffs):
+        """Returns iterator that splits (wraps) mdiff text lines"""
+
+        # pull from/to data and flags from mdiff iterator
+        for fromdata,todata,flag in diffs:
+            # check for context separators and pass them through
+            if flag is None:
+                yield fromdata,todata,flag
+                continue
+            (fromline,fromtext),(toline,totext) = fromdata,todata
+            # for each from/to line split it at the wrap column to form
+            # list of text lines.
+            fromlist,tolist = [],[]
+            self._split_line(fromlist,fromline,fromtext)
+            self._split_line(tolist,toline,totext)
+            # yield from/to line in pairs inserting blank lines as
+            # necessary when one side has more wrapped lines
+            while fromlist or tolist:
+                if fromlist:
+                    fromdata = fromlist.pop(0)
+                else:
+                    fromdata = ('',' ')
+                if tolist:
+                    todata = tolist.pop(0)
+                else:
+                    todata = ('',' ')
+                yield fromdata,todata,flag
+
+    def _collect_lines(self,diffs):
+        """Collects mdiff output into separate lists
+
+        Before storing the mdiff from/to data into a list, it is converted
+        into a single line of text with HTML markup.
+        """
+
+        fromlist,tolist,flaglist = [],[],[]
+        # pull from/to data and flags from mdiff style iterator
+        for fromdata,todata,flag in diffs:
+            try:
+                # store HTML markup of the lines into the lists
+                fromlist.append(self._format_line(0,flag,*fromdata))
+                tolist.append(self._format_line(1,flag,*todata))
+            except TypeError:
+                # exceptions occur for lines where context separators go
+                fromlist.append(None)
+                tolist.append(None)
+            flaglist.append(flag)
+        return fromlist,tolist,flaglist
+
+    def _format_line(self,side,flag,linenum,text):
+        """Returns HTML markup of "from" / "to" text lines
+
+        side -- 0 or 1 indicating "from" or "to" text
+        flag -- indicates if difference on line
+        linenum -- line number (used for line number column)
+        text -- line text to be marked up
+        """
+        try:
+            linenum = '%d' % linenum
+            id = ' id="%s%s"' % (self._prefix[side],linenum)
+        except TypeError:
+            # handle blank lines where linenum is '>' or ''
+            id = ''
+        # replace those things that would get confused with HTML symbols
+        text=text.replace("&","&amp;").replace(">","&gt;").replace("<","&lt;")
+
+        # make space non-breakable so they don't get compressed or line wrapped
+        text = text.replace(' ','&nbsp;').rstrip()
+
+        return '<td class="diff_header"%s>%s</td><td nowrap="nowrap">%s</td>' \
+               % (id,linenum,text)
+
+    def _make_prefix(self):
+        """Create unique anchor prefixes"""
+
+        # Generate a unique anchor prefix so multiple tables
+        # can exist on the same HTML page without conflicts.
+        fromprefix = "from%d_" % HtmlDiff._default_prefix
+        toprefix = "to%d_" % HtmlDiff._default_prefix
+        HtmlDiff._default_prefix += 1
+        # store prefixes so line format method has access
+        self._prefix = [fromprefix,toprefix]
+
+    def _convert_flags(self,fromlist,tolist,flaglist,context,numlines):
+        """Makes list of "next" links"""
+
+        # all anchor names will be generated using the unique "to" prefix
+        toprefix = self._prefix[1]
+
+        # process change flags, generating middle column of next anchors/links
+        next_id = ['']*len(flaglist)
+        next_href = ['']*len(flaglist)
+        num_chg, in_change = 0, False
+        last = 0
+        for i,flag in enumerate(flaglist):
+            if flag:
+                if not in_change:
+                    in_change = True
+                    last = i
+                    # at the beginning of a change, drop an anchor a few lines
+                    # (the context lines) before the change for the previous
+                    # link
+                    i = max([0,i-numlines])
+                    next_id[i] = ' id="difflib_chg_%s_%d"' % (toprefix,num_chg)
+                    # at the beginning of a change, drop a link to the next
+                    # change
+                    num_chg += 1
+                    next_href[last] = '<a href="#difflib_chg_%s_%d">n</a>' % (
+                         toprefix,num_chg)
+            else:
+                in_change = False
+        # check for cases where there is no content to avoid exceptions
+        if not flaglist:
+            flaglist = [False]
+            next_id = ['']
+            next_href = ['']
+            last = 0
+            if context:
+                fromlist = ['<td></td><td>&nbsp;No Differences Found&nbsp;</td>']
+                tolist = fromlist
+            else:
+                fromlist = tolist = ['<td></td><td>&nbsp;Empty File&nbsp;</td>']
+        # if not a change on first line, drop a link
+        if not flaglist[0]:
+            next_href[0] = '<a href="#difflib_chg_%s_0">f</a>' % toprefix
+        # redo the last link to link to the top
+        next_href[last] = '<a href="#difflib_chg_%s_top">t</a>' % (toprefix)
+
+        return fromlist,tolist,flaglist,next_href,next_id
+
+    def make_table(self,fromlines,tolines,fromdesc='',todesc='',context=False,
+                   numlines=5):
+        """Returns HTML table of side by side comparison with change highlights
+
+        Arguments:
+        fromlines -- list of "from" lines
+        tolines -- list of "to" lines
+        fromdesc -- "from" file column header string
+        todesc -- "to" file column header string
+        context -- set to True for contextual differences (defaults to False
+            which shows full differences).
+        numlines -- number of context lines.  When context is set True,
+            controls number of lines displayed before and after the change.
+            When context is False, controls the number of lines to place
+            the "next" link anchors before the next change (so click of
+            "next" link jumps to just before the change).
+        """
+
+        # make unique anchor prefixes so that multiple tables may exist
+        # on the same page without conflict.
+        self._make_prefix()
+
+        # change tabs to spaces before it gets more difficult after we insert
+        # markkup
+        fromlines,tolines = self._tab_newline_replace(fromlines,tolines)
+
+        # create diffs iterator which generates side by side from/to data
+        if context:
+            context_lines = numlines
+        else:
+            context_lines = None
+        diffs = _mdiff(fromlines,tolines,context_lines,linejunk=self._linejunk,
+                      charjunk=self._charjunk)
+
+        # set up iterator to wrap lines that exceed desired width
+        if self._wrapcolumn:
+            diffs = self._line_wrapper(diffs)
+
+        # collect up from/to lines and flags into lists (also format the lines)
+        fromlist,tolist,flaglist = self._collect_lines(diffs)
+
+        # process change flags, generating middle column of next anchors/links
+        fromlist,tolist,flaglist,next_href,next_id = self._convert_flags(
+            fromlist,tolist,flaglist,context,numlines)
+
+        import cStringIO
+        s = cStringIO.StringIO()
+        fmt = '            <tr><td class="diff_next"%s>%s</td>%s' + \
+              '<td class="diff_next">%s</td>%s</tr>\n'
+        for i in range(len(flaglist)):
+            if flaglist[i] is None:
+                # mdiff yields None on separator lines skip the bogus ones
+                # generated for the first line
+                if i > 0:
+                    s.write('        </tbody>        \n        <tbody>\n')
+            else:
+                s.write( fmt % (next_id[i],next_href[i],fromlist[i],
+                                           next_href[i],tolist[i]))
+        if fromdesc or todesc:
+            header_row = '<thead><tr>%s%s%s%s</tr></thead>' % (
+                '<th class="diff_next"><br /></th>',
+                '<th colspan="2" class="diff_header">%s</th>' % fromdesc,
+                '<th class="diff_next"><br /></th>',
+                '<th colspan="2" class="diff_header">%s</th>' % todesc)
+        else:
+            header_row = ''
+
+        table = self._table_template % dict(
+            data_rows=s.getvalue(),
+            header_row=header_row,
+            prefix=self._prefix[1])
+
+        return table.replace('\0+','<span class="diff_add">'). \
+                     replace('\0-','<span class="diff_sub">'). \
+                     replace('\0^','<span class="diff_chg">'). \
+                     replace('\1','</span>'). \
+                     replace('\t','&nbsp;')
+
+del re
+
+def restore(delta, which):
+    r"""
+    Generate one of the two sequences that generated a delta.
+
+    Given a `delta` produced by `Differ.compare()` or `ndiff()`, extract
+    lines originating from file 1 or 2 (parameter `which`), stripping off line
+    prefixes.
+
+    Examples:
+
+    >>> diff = ndiff('one\ntwo\nthree\n'.splitlines(1),
+    ...              'ore\ntree\nemu\n'.splitlines(1))
+    >>> diff = list(diff)
+    >>> print ''.join(restore(diff, 1)),
+    one
+    two
+    three
+    >>> print ''.join(restore(diff, 2)),
+    ore
+    tree
+    emu
+    """
+    try:
+        tag = {1: "- ", 2: "+ "}[int(which)]
+    except KeyError:
+        raise ValueError, ('unknown delta choice (must be 1 or 2): %r'
+                           % which)
+    prefixes = ("  ", tag)
+    for line in delta:
+        if line[:2] in prefixes:
+            yield line[2:]
+
+def _test():
+    import doctest, difflib
+    return doctest.testmod(difflib)
+
+if __name__ == "__main__":
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/dircache.py b/depot_tools/release/win/python_24/Lib/dircache.py
new file mode 100644
index 0000000..78ec7fe0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/dircache.py
@@ -0,0 +1,38 @@
+"""Read and cache directory listings.
+
+The listdir() routine returns a sorted list of the files in a directory,
+using a cache to avoid reading the directory more often than necessary.
+The annotate() routine appends slashes to directories."""
+
+import os
+
+__all__ = ["listdir", "opendir", "annotate", "reset"]
+
+cache = {}
+
+def reset():
+    """Reset the cache completely."""
+    global cache
+    cache = {}
+
+def listdir(path):
+    """List directory contents, using cache."""
+    try:
+        cached_mtime, list = cache[path]
+        del cache[path]
+    except KeyError:
+        cached_mtime, list = -1, []
+    mtime = os.stat(path).st_mtime
+    if mtime != cached_mtime:
+        list = os.listdir(path)
+        list.sort()
+    cache[path] = mtime, list
+    return list
+
+opendir = listdir # XXX backward compatibility
+
+def annotate(head, list):
+    """Add '/' suffixes to directories."""
+    for i in range(len(list)):
+        if os.path.isdir(os.path.join(head, list[i])):
+            list[i] = list[i] + '/'
diff --git a/depot_tools/release/win/python_24/Lib/dis.py b/depot_tools/release/win/python_24/Lib/dis.py
new file mode 100644
index 0000000..5a74b3ae
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/dis.py
@@ -0,0 +1,223 @@
+"""Disassembler of Python byte code into mnemonics."""
+
+import sys
+import types
+
+from opcode import *
+from opcode import __all__ as _opcodes_all
+
+__all__ = ["dis","disassemble","distb","disco"] + _opcodes_all
+del _opcodes_all
+
+def dis(x=None):
+    """Disassemble classes, methods, functions, or code.
+
+    With no argument, disassemble the last traceback.
+
+    """
+    if x is None:
+        distb()
+        return
+    if type(x) is types.InstanceType:
+        x = x.__class__
+    if hasattr(x, 'im_func'):
+        x = x.im_func
+    if hasattr(x, 'func_code'):
+        x = x.func_code
+    if hasattr(x, '__dict__'):
+        items = x.__dict__.items()
+        items.sort()
+        for name, x1 in items:
+            if type(x1) in (types.MethodType,
+                            types.FunctionType,
+                            types.CodeType,
+                            types.ClassType):
+                print "Disassembly of %s:" % name
+                try:
+                    dis(x1)
+                except TypeError, msg:
+                    print "Sorry:", msg
+                print
+    elif hasattr(x, 'co_code'):
+        disassemble(x)
+    elif isinstance(x, str):
+        disassemble_string(x)
+    else:
+        raise TypeError, \
+              "don't know how to disassemble %s objects" % \
+              type(x).__name__
+
+def distb(tb=None):
+    """Disassemble a traceback (default: last traceback)."""
+    if tb is None:
+        try:
+            tb = sys.last_traceback
+        except AttributeError:
+            raise RuntimeError, "no last traceback to disassemble"
+        while tb.tb_next: tb = tb.tb_next
+    disassemble(tb.tb_frame.f_code, tb.tb_lasti)
+
+def disassemble(co, lasti=-1):
+    """Disassemble a code object."""
+    code = co.co_code
+    labels = findlabels(code)
+    linestarts = dict(findlinestarts(co))
+    n = len(code)
+    i = 0
+    extended_arg = 0
+    free = None
+    while i < n:
+        c = code[i]
+        op = ord(c)
+        if i in linestarts:
+            if i > 0:
+                print
+            print "%3d" % linestarts[i],
+        else:
+            print '   ',
+
+        if i == lasti: print '-->',
+        else: print '   ',
+        if i in labels: print '>>',
+        else: print '  ',
+        print repr(i).rjust(4),
+        print opname[op].ljust(20),
+        i = i+1
+        if op >= HAVE_ARGUMENT:
+            oparg = ord(code[i]) + ord(code[i+1])*256 + extended_arg
+            extended_arg = 0
+            i = i+2
+            if op == EXTENDED_ARG:
+                extended_arg = oparg*65536L
+            print repr(oparg).rjust(5),
+            if op in hasconst:
+                print '(' + repr(co.co_consts[oparg]) + ')',
+            elif op in hasname:
+                print '(' + co.co_names[oparg] + ')',
+            elif op in hasjrel:
+                print '(to ' + repr(i + oparg) + ')',
+            elif op in haslocal:
+                print '(' + co.co_varnames[oparg] + ')',
+            elif op in hascompare:
+                print '(' + cmp_op[oparg] + ')',
+            elif op in hasfree:
+                if free is None:
+                    free = co.co_cellvars + co.co_freevars
+                print '(' + free[oparg] + ')',
+        print
+
+def disassemble_string(code, lasti=-1, varnames=None, names=None,
+                       constants=None):
+    labels = findlabels(code)
+    n = len(code)
+    i = 0
+    while i < n:
+        c = code[i]
+        op = ord(c)
+        if i == lasti: print '-->',
+        else: print '   ',
+        if i in labels: print '>>',
+        else: print '  ',
+        print repr(i).rjust(4),
+        print opname[op].ljust(15),
+        i = i+1
+        if op >= HAVE_ARGUMENT:
+            oparg = ord(code[i]) + ord(code[i+1])*256
+            i = i+2
+            print repr(oparg).rjust(5),
+            if op in hasconst:
+                if constants:
+                    print '(' + repr(constants[oparg]) + ')',
+                else:
+                    print '(%d)'%oparg,
+            elif op in hasname:
+                if names is not None:
+                    print '(' + names[oparg] + ')',
+                else:
+                    print '(%d)'%oparg,
+            elif op in hasjrel:
+                print '(to ' + repr(i + oparg) + ')',
+            elif op in haslocal:
+                if varnames:
+                    print '(' + varnames[oparg] + ')',
+                else:
+                    print '(%d)' % oparg,
+            elif op in hascompare:
+                print '(' + cmp_op[oparg] + ')',
+        print
+
+disco = disassemble                     # XXX For backwards compatibility
+
+def findlabels(code):
+    """Detect all offsets in a byte code which are jump targets.
+
+    Return the list of offsets.
+
+    """
+    labels = []
+    n = len(code)
+    i = 0
+    while i < n:
+        c = code[i]
+        op = ord(c)
+        i = i+1
+        if op >= HAVE_ARGUMENT:
+            oparg = ord(code[i]) + ord(code[i+1])*256
+            i = i+2
+            label = -1
+            if op in hasjrel:
+                label = i+oparg
+            elif op in hasjabs:
+                label = oparg
+            if label >= 0:
+                if label not in labels:
+                    labels.append(label)
+    return labels
+
+def findlinestarts(code):
+    """Find the offsets in a byte code which are start of lines in the source.
+
+    Generate pairs (offset, lineno) as described in Python/compile.c.
+
+    """
+    byte_increments = [ord(c) for c in code.co_lnotab[0::2]]
+    line_increments = [ord(c) for c in code.co_lnotab[1::2]]
+
+    lastlineno = None
+    lineno = code.co_firstlineno
+    addr = 0
+    for byte_incr, line_incr in zip(byte_increments, line_increments):
+        if byte_incr:
+            if lineno != lastlineno:
+                yield (addr, lineno)
+                lastlineno = lineno
+            addr += byte_incr
+        lineno += line_incr
+    if lineno != lastlineno:
+        yield (addr, lineno)
+
+def _test():
+    """Simple test program to disassemble a file."""
+    if sys.argv[1:]:
+        if sys.argv[2:]:
+            sys.stderr.write("usage: python dis.py [-|file]\n")
+            sys.exit(2)
+        fn = sys.argv[1]
+        if not fn or fn == "-":
+            fn = None
+    else:
+        fn = None
+    if fn is None:
+        f = sys.stdin
+    else:
+        f = open(fn)
+    source = f.read()
+    if fn is not None:
+        f.close()
+    else:
+        fn = "<stdin>"
+    code = compile(source, fn, "exec")
+    dis(code)
+
+if __name__ == "__main__":
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/README b/depot_tools/release/win/python_24/Lib/distutils/README
new file mode 100644
index 0000000..100a71a8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/README
@@ -0,0 +1,22 @@
+This directory contains only a subset of the Distutils, specifically
+the Python modules in the 'distutils' and 'distutils.command'
+packages.  This is all you need to distribute and install Python
+modules using the Distutils.  There is also a separately packaged
+standalone version of the Distutils available for people who want to
+upgrade the Distutils without upgrading Python, available from the
+Distutils web page:
+
+    http://www.python.org/sigs/distutils-sig/
+
+The standalone version includes all of the code in this directory,
+plus documentation, test scripts, examples, etc.
+
+The Distutils documentation is divided into two documents, "Installing
+Python Modules", which explains how to install Python packages, and
+"Distributing Python Modules", which explains how to write setup.py
+files.  Both documents are part of the standard Python documentation
+set, and are available from http://www.python.org/doc/current/ .
+
+        Greg Ward (gward@python.net)
+
+$Id: README,v 1.2 2002/11/13 13:26:59 akuchling Exp $
diff --git a/depot_tools/release/win/python_24/Lib/distutils/__init__.py b/depot_tools/release/win/python_24/Lib/distutils/__init__.py
new file mode 100644
index 0000000..3057021
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/__init__.py
@@ -0,0 +1,15 @@
+"""distutils
+
+The main package for the Python Module Distribution Utilities.  Normally
+used from a setup script as
+
+   from distutils.core import setup
+
+   setup (...)
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: __init__.py,v 1.26.2.1 2005/01/20 19:25:24 theller Exp $"
+
+__version__ = "2.4.1"
diff --git a/depot_tools/release/win/python_24/Lib/distutils/archive_util.py b/depot_tools/release/win/python_24/Lib/distutils/archive_util.py
new file mode 100644
index 0000000..2bcfde7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/archive_util.py
@@ -0,0 +1,173 @@
+"""distutils.archive_util
+
+Utility functions for creating archive files (tarballs, zip files,
+that sort of thing)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: archive_util.py,v 1.17 2004/11/10 22:23:13 loewis Exp $"
+
+import os
+from distutils.errors import DistutilsExecError
+from distutils.spawn import spawn
+from distutils.dir_util import mkpath
+from distutils import log
+
+def make_tarball (base_name, base_dir, compress="gzip",
+                  verbose=0, dry_run=0):
+    """Create a (possibly compressed) tar file from all the files under
+    'base_dir'.  'compress' must be "gzip" (the default), "compress",
+    "bzip2", or None.  Both "tar" and the compression utility named by
+    'compress' must be on the default program search path, so this is
+    probably Unix-specific.  The output tar file will be named 'base_dir' +
+    ".tar", possibly plus the appropriate compression extension (".gz",
+    ".bz2" or ".Z").  Return the output filename.
+    """
+    # XXX GNU tar 1.13 has a nifty option to add a prefix directory.
+    # It's pretty new, though, so we certainly can't require it --
+    # but it would be nice to take advantage of it to skip the
+    # "create a tree of hardlinks" step!  (Would also be nice to
+    # detect GNU tar to use its 'z' option and save a step.)
+
+    compress_ext = { 'gzip': ".gz",
+                     'bzip2': '.bz2',
+                     'compress': ".Z" }
+
+    # flags for compression program, each element of list will be an argument
+    compress_flags = {'gzip': ["-f9"],
+                      'compress': ["-f"],
+                      'bzip2': ['-f9']}
+
+    if compress is not None and compress not in compress_ext.keys():
+        raise ValueError, \
+              "bad value for 'compress': must be None, 'gzip', or 'compress'"
+
+    archive_name = base_name + ".tar"
+    mkpath(os.path.dirname(archive_name), dry_run=dry_run)
+    cmd = ["tar", "-cf", archive_name, base_dir]
+    spawn(cmd, dry_run=dry_run)
+
+    if compress:
+        spawn([compress] + compress_flags[compress] + [archive_name],
+              dry_run=dry_run)
+        return archive_name + compress_ext[compress]
+    else:
+        return archive_name
+
+# make_tarball ()
+
+
+def make_zipfile (base_name, base_dir, verbose=0, dry_run=0):
+    """Create a zip file from all the files under 'base_dir'.  The output
+    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
+    Python module (if available) or the InfoZIP "zip" utility (if installed
+    and found on the default search path).  If neither tool is available,
+    raises DistutilsExecError.  Returns the name of the output zip file.
+    """
+    try:
+        import zipfile
+    except ImportError:
+        zipfile = None
+
+    zip_filename = base_name + ".zip"
+    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+
+    # If zipfile module is not available, try spawning an external
+    # 'zip' command.
+    if zipfile is None:
+        if verbose:
+            zipoptions = "-r"
+        else:
+            zipoptions = "-rq"
+
+        try:
+            spawn(["zip", zipoptions, zip_filename, base_dir],
+                  dry_run=dry_run)
+        except DistutilsExecError:
+            # XXX really should distinguish between "couldn't find
+            # external 'zip' command" and "zip failed".
+            raise DistutilsExecError, \
+                  ("unable to create zip file '%s': "
+                   "could neither import the 'zipfile' module nor "
+                   "find a standalone zip utility") % zip_filename
+
+    else:
+        log.info("creating '%s' and adding '%s' to it",
+                 zip_filename, base_dir)
+
+        def visit (z, dirname, names):
+            for name in names:
+                path = os.path.normpath(os.path.join(dirname, name))
+                if os.path.isfile(path):
+                    z.write(path, path)
+                    log.info("adding '%s'" % path)
+
+        if not dry_run:
+            z = zipfile.ZipFile(zip_filename, "w",
+                                compression=zipfile.ZIP_DEFLATED)
+
+            os.path.walk(base_dir, visit, z)
+            z.close()
+
+    return zip_filename
+
+# make_zipfile ()
+
+
+ARCHIVE_FORMATS = {
+    'gztar': (make_tarball, [('compress', 'gzip')], "gzip'ed tar-file"),
+    'bztar': (make_tarball, [('compress', 'bzip2')], "bzip2'ed tar-file"),
+    'ztar':  (make_tarball, [('compress', 'compress')], "compressed tar file"),
+    'tar':   (make_tarball, [('compress', None)], "uncompressed tar file"),
+    'zip':   (make_zipfile, [],"ZIP file")
+    }
+
+def check_archive_formats (formats):
+    for format in formats:
+        if not ARCHIVE_FORMATS.has_key(format):
+            return format
+    else:
+        return None
+
+def make_archive (base_name, format,
+                  root_dir=None, base_dir=None,
+                  verbose=0, dry_run=0):
+    """Create an archive file (eg. zip or tar).  'base_name' is the name
+    of the file to create, minus any format-specific extension; 'format'
+    is the archive format: one of "zip", "tar", "ztar", or "gztar".
+    'root_dir' is a directory that will be the root directory of the
+    archive; ie. we typically chdir into 'root_dir' before creating the
+    archive.  'base_dir' is the directory where we start archiving from;
+    ie. 'base_dir' will be the common prefix of all files and
+    directories in the archive.  'root_dir' and 'base_dir' both default
+    to the current directory.  Returns the name of the archive file.
+    """
+    save_cwd = os.getcwd()
+    if root_dir is not None:
+        log.debug("changing into '%s'", root_dir)
+        base_name = os.path.abspath(base_name)
+        if not dry_run:
+            os.chdir(root_dir)
+
+    if base_dir is None:
+        base_dir = os.curdir
+
+    kwargs = { 'dry_run': dry_run }
+
+    try:
+        format_info = ARCHIVE_FORMATS[format]
+    except KeyError:
+        raise ValueError, "unknown archive format '%s'" % format
+
+    func = format_info[0]
+    for (arg,val) in format_info[1]:
+        kwargs[arg] = val
+    filename = apply(func, (base_name, base_dir), kwargs)
+
+    if root_dir is not None:
+        log.debug("changing back to '%s'", save_cwd)
+        os.chdir(save_cwd)
+
+    return filename
+
+# make_archive ()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/bcppcompiler.py b/depot_tools/release/win/python_24/Lib/distutils/bcppcompiler.py
new file mode 100644
index 0000000..e3a303d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/bcppcompiler.py
@@ -0,0 +1,398 @@
+"""distutils.bcppcompiler
+
+Contains BorlandCCompiler, an implementation of the abstract CCompiler class
+for the Borland C++ compiler.
+"""
+
+# This implementation by Lyle Johnson, based on the original msvccompiler.py
+# module and using the directions originally published by Gordon Williams.
+
+# XXX looks like there's a LOT of overlap between these two classes:
+# someone should sit down and factor out the common code as
+# WindowsCCompiler!  --GPW
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: bcppcompiler.py,v 1.18 2004/11/10 22:23:13 loewis Exp $"
+
+
+import sys, os
+from distutils.errors import \
+     DistutilsExecError, DistutilsPlatformError, \
+     CompileError, LibError, LinkError, UnknownFileError
+from distutils.ccompiler import \
+     CCompiler, gen_preprocess_options, gen_lib_options
+from distutils.file_util import write_file
+from distutils.dep_util import newer
+from distutils import log
+
+class BCPPCompiler(CCompiler) :
+    """Concrete class that implements an interface to the Borland C/C++
+    compiler, as defined by the CCompiler abstract class.
+    """
+
+    compiler_type = 'bcpp'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = _c_extensions + _cpp_extensions
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+
+    def __init__ (self,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+
+        CCompiler.__init__ (self, verbose, dry_run, force)
+
+        # These executables are assumed to all be in the path.
+        # Borland doesn't seem to use any special registry settings to
+        # indicate their installation locations.
+
+        self.cc = "bcc32.exe"
+        self.linker = "ilink32.exe"
+        self.lib = "tlib.exe"
+
+        self.preprocess_options = None
+        self.compile_options = ['/tWM', '/O2', '/q', '/g0']
+        self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
+
+        self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
+        self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
+        self.ldflags_static = []
+        self.ldflags_exe = ['/Gn', '/q', '/x']
+        self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
+
+
+    # -- Worker methods ------------------------------------------------
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=0,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+        compile_opts = extra_preargs or []
+        compile_opts.append ('-c')
+        if debug:
+            compile_opts.extend (self.compile_options_debug)
+        else:
+            compile_opts.extend (self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            # XXX why do the normpath here?
+            src = os.path.normpath(src)
+            obj = os.path.normpath(obj)
+            # XXX _setup_compile() did a mkpath() too but before the normpath.
+            # Is it possible to skip the normpath?
+            self.mkpath(os.path.dirname(obj))
+
+            if ext == '.res':
+                # This is already a binary file -- skip it.
+                continue # the 'for' loop
+            if ext == '.rc':
+                # This needs to be compiled to a .res file -- do it now.
+                try:
+                    self.spawn (["brcc32", "-fo", obj, src])
+                except DistutilsExecError, msg:
+                    raise CompileError, msg
+                continue # the 'for' loop
+
+            # The next two are both for the real compiler.
+            if ext in self._c_extensions:
+                input_opt = ""
+            elif ext in self._cpp_extensions:
+                input_opt = "-P"
+            else:
+                # Unknown file type -- no extra options.  The compiler
+                # will probably fail, but let it just in case this is a
+                # file the compiler recognizes even if we don't.
+                input_opt = ""
+
+            output_opt = "-o" + obj
+
+            # Compiler command line syntax is: "bcc32 [options] file(s)".
+            # Note that the source file names must appear at the end of
+            # the command line.
+            try:
+                self.spawn ([self.cc] + compile_opts + pp_opts +
+                            [input_opt, output_opt] +
+                            extra_postargs + [src])
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+        return objects
+
+    # compile ()
+
+
+    def create_static_lib (self,
+                           objects,
+                           output_libname,
+                           output_dir=None,
+                           debug=0,
+                           target_lang=None):
+
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        output_filename = \
+            self.library_filename (output_libname, output_dir=output_dir)
+
+        if self._need_link (objects, output_filename):
+            lib_args = [output_filename, '/u'] + objects
+            if debug:
+                pass                    # XXX what goes here?
+            try:
+                self.spawn ([self.lib] + lib_args)
+            except DistutilsExecError, msg:
+                raise LibError, msg
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # create_static_lib ()
+
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+
+        # XXX this ignores 'build_temp'!  should follow the lead of
+        # msvccompiler.py
+
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        (libraries, library_dirs, runtime_library_dirs) = \
+            self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
+
+        if runtime_library_dirs:
+            log.warn("I don't know what to do with 'runtime_library_dirs': %s",
+                     str(runtime_library_dirs))
+
+        if output_dir is not None:
+            output_filename = os.path.join (output_dir, output_filename)
+
+        if self._need_link (objects, output_filename):
+
+            # Figure out linker args based on type of target.
+            if target_desc == CCompiler.EXECUTABLE:
+                startup_obj = 'c0w32'
+                if debug:
+                    ld_args = self.ldflags_exe_debug[:]
+                else:
+                    ld_args = self.ldflags_exe[:]
+            else:
+                startup_obj = 'c0d32'
+                if debug:
+                    ld_args = self.ldflags_shared_debug[:]
+                else:
+                    ld_args = self.ldflags_shared[:]
+
+
+            # Create a temporary exports file for use by the linker
+            if export_symbols is None:
+                def_file = ''
+            else:
+                head, tail = os.path.split (output_filename)
+                modname, ext = os.path.splitext (tail)
+                temp_dir = os.path.dirname(objects[0]) # preserve tree structure
+                def_file = os.path.join (temp_dir, '%s.def' % modname)
+                contents = ['EXPORTS']
+                for sym in (export_symbols or []):
+                    contents.append('  %s=_%s' % (sym, sym))
+                self.execute(write_file, (def_file, contents),
+                             "writing %s" % def_file)
+
+            # Borland C++ has problems with '/' in paths
+            objects2 = map(os.path.normpath, objects)
+            # split objects in .obj and .res files
+            # Borland C++ needs them at different positions in the command line
+            objects = [startup_obj]
+            resources = []
+            for file in objects2:
+                (base, ext) = os.path.splitext(os.path.normcase(file))
+                if ext == '.res':
+                    resources.append(file)
+                else:
+                    objects.append(file)
+
+
+            for l in library_dirs:
+                ld_args.append("/L%s" % os.path.normpath(l))
+            ld_args.append("/L.") # we sometimes use relative paths
+
+            # list of object files
+            ld_args.extend(objects)
+
+            # XXX the command-line syntax for Borland C++ is a bit wonky;
+            # certain filenames are jammed together in one big string, but
+            # comma-delimited.  This doesn't mesh too well with the
+            # Unix-centric attitude (with a DOS/Windows quoting hack) of
+            # 'spawn()', so constructing the argument list is a bit
+            # awkward.  Note that doing the obvious thing and jamming all
+            # the filenames and commas into one argument would be wrong,
+            # because 'spawn()' would quote any filenames with spaces in
+            # them.  Arghghh!.  Apparently it works fine as coded...
+
+            # name of dll/exe file
+            ld_args.extend([',',output_filename])
+            # no map file and start libraries
+            ld_args.append(',,')
+
+            for lib in libraries:
+                # see if we find it and if there is a bcpp specific lib
+                # (xxx_bcpp.lib)
+                libfile = self.find_library_file(library_dirs, lib, debug)
+                if libfile is None:
+                    ld_args.append(lib)
+                    # probably a BCPP internal library -- don't warn
+                else:
+                    # full name which prefers bcpp_xxx.lib over xxx.lib
+                    ld_args.append(libfile)
+
+            # some default libraries
+            ld_args.append ('import32')
+            ld_args.append ('cw32mt')
+
+            # def file for export symbols
+            ld_args.extend([',',def_file])
+            # add resource files
+            ld_args.append(',')
+            ld_args.extend(resources)
+
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath (os.path.dirname (output_filename))
+            try:
+                self.spawn ([self.linker] + ld_args)
+            except DistutilsExecError, msg:
+                raise LinkError, msg
+
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # link ()
+
+    # -- Miscellaneous methods -----------------------------------------
+
+
+    def find_library_file (self, dirs, lib, debug=0):
+        # List of effective library names to try, in order of preference:
+        # xxx_bcpp.lib is better than xxx.lib
+        # and xxx_d.lib is better than xxx.lib if debug is set
+        #
+        # The "_bcpp" suffix is to handle a Python installation for people
+        # with multiple compilers (primarily Distutils hackers, I suspect
+        # ;-).  The idea is they'd have one static library for each
+        # compiler they care about, since (almost?) every Windows compiler
+        # seems to have a different format for static libraries.
+        if debug:
+            dlib = (lib + "_d")
+            try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
+        else:
+            try_names = (lib + "_bcpp", lib)
+
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename(name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # overwrite the one from CCompiler to support rc and res-files
+    def object_filenames (self,
+                          source_filenames,
+                          strip_dir=0,
+                          output_dir=''):
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            (base, ext) = os.path.splitext (os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc','.res']):
+                raise UnknownFileError, \
+                      "unknown file type '%s' (from '%s')" % \
+                      (ext, src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext == '.res':
+                # these can go unchanged
+                obj_names.append (os.path.join (output_dir, base + ext))
+            elif ext == '.rc':
+                # these need to be compiled to .res-files
+                obj_names.append (os.path.join (output_dir, base + '.res'))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                            base + self.obj_extension))
+        return obj_names
+
+    # object_filenames ()
+
+    def preprocess (self,
+                    source,
+                    output_file=None,
+                    macros=None,
+                    include_dirs=None,
+                    extra_preargs=None,
+                    extra_postargs=None):
+
+        (_, macros, include_dirs) = \
+            self._fix_compile_args(None, macros, include_dirs)
+        pp_opts = gen_preprocess_options(macros, include_dirs)
+        pp_args = ['cpp32.exe'] + pp_opts
+        if output_file is not None:
+            pp_args.append('-o' + output_file)
+        if extra_preargs:
+            pp_args[:0] = extra_preargs
+        if extra_postargs:
+            pp_args.extend(extra_postargs)
+        pp_args.append(source)
+
+        # We need to preprocess: either we're being forced to, or the
+        # source file is newer than the target (or the target doesn't
+        # exist).
+        if self.force or output_file is None or newer(source, output_file):
+            if output_file:
+                self.mkpath(os.path.dirname(output_file))
+            try:
+                self.spawn(pp_args)
+            except DistutilsExecError, msg:
+                print msg
+                raise CompileError, msg
+
+    # preprocess()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/ccompiler.py b/depot_tools/release/win/python_24/Lib/distutils/ccompiler.py
new file mode 100644
index 0000000..2194501
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/ccompiler.py
@@ -0,0 +1,1270 @@
+"""distutils.ccompiler
+
+Contains CCompiler, an abstract base class that defines the interface
+for the Distutils compiler abstraction model."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: ccompiler.py,v 1.61 2004/11/10 22:23:13 loewis Exp $"
+
+import sys, os, re
+from types import *
+from copy import copy
+from distutils.errors import *
+from distutils.spawn import spawn
+from distutils.file_util import move_file
+from distutils.dir_util import mkpath
+from distutils.dep_util import newer_pairwise, newer_group
+from distutils.sysconfig import python_build
+from distutils.util import split_quoted, execute
+from distutils import log
+
+class CCompiler:
+    """Abstract base class to define the interface that must be implemented
+    by real compiler classes.  Also has some utility methods used by
+    several compiler classes.
+
+    The basic idea behind a compiler abstraction class is that each
+    instance can be used for all the compile/link steps in building a
+    single project.  Thus, attributes common to all of those compile and
+    link steps -- include directories, macros to define, libraries to link
+    against, etc. -- are attributes of the compiler instance.  To allow for
+    variability in how individual files are treated, most of those
+    attributes may be varied on a per-compilation or per-link basis.
+    """
+
+    # 'compiler_type' is a class attribute that identifies this class.  It
+    # keeps code that wants to know what kind of compiler it's dealing with
+    # from having to import all possible compiler classes just to do an
+    # 'isinstance'.  In concrete CCompiler subclasses, 'compiler_type'
+    # should really, really be one of the keys of the 'compiler_class'
+    # dictionary (see below -- used by the 'new_compiler()' factory
+    # function) -- authors of new compiler interface classes are
+    # responsible for updating 'compiler_class'!
+    compiler_type = None
+
+    # XXX things not handled by this compiler abstraction model:
+    #   * client can't provide additional options for a compiler,
+    #     e.g. warning, optimization, debugging flags.  Perhaps this
+    #     should be the domain of concrete compiler abstraction classes
+    #     (UnixCCompiler, MSVCCompiler, etc.) -- or perhaps the base
+    #     class should have methods for the common ones.
+    #   * can't completely override the include or library searchg
+    #     path, ie. no "cc -I -Idir1 -Idir2" or "cc -L -Ldir1 -Ldir2".
+    #     I'm not sure how widely supported this is even by Unix
+    #     compilers, much less on other platforms.  And I'm even less
+    #     sure how useful it is; maybe for cross-compiling, but
+    #     support for that is a ways off.  (And anyways, cross
+    #     compilers probably have a dedicated binary with the
+    #     right paths compiled in.  I hope.)
+    #   * can't do really freaky things with the library list/library
+    #     dirs, e.g. "-Ldir1 -lfoo -Ldir2 -lfoo" to link against
+    #     different versions of libfoo.a in different locations.  I
+    #     think this is useless without the ability to null out the
+    #     library search path anyways.
+
+
+    # Subclasses that rely on the standard filename generation methods
+    # implemented below should override these; see the comment near
+    # those methods ('object_filenames()' et. al.) for details:
+    src_extensions = None               # list of strings
+    obj_extension = None                # string
+    static_lib_extension = None
+    shared_lib_extension = None         # string
+    static_lib_format = None            # format string
+    shared_lib_format = None            # prob. same as static_lib_format
+    exe_extension = None                # string
+
+    # Default language settings. language_map is used to detect a source
+    # file or Extension target language, checking source filenames.
+    # language_order is used to detect the language precedence, when deciding
+    # what language to use when mixing source types. For example, if some
+    # extension has two files with ".c" extension, and one with ".cpp", it
+    # is still linked as c++.
+    language_map = {".c"   : "c",
+                    ".cc"  : "c++",
+                    ".cpp" : "c++",
+                    ".cxx" : "c++",
+                    ".m"   : "objc",
+                   }
+    language_order = ["c++", "objc", "c"]
+
+    def __init__ (self,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+
+        self.dry_run = dry_run
+        self.force = force
+        self.verbose = verbose
+
+        # 'output_dir': a common output directory for object, library,
+        # shared object, and shared library files
+        self.output_dir = None
+
+        # 'macros': a list of macro definitions (or undefinitions).  A
+        # macro definition is a 2-tuple (name, value), where the value is
+        # either a string or None (no explicit value).  A macro
+        # undefinition is a 1-tuple (name,).
+        self.macros = []
+
+        # 'include_dirs': a list of directories to search for include files
+        self.include_dirs = []
+
+        # 'libraries': a list of libraries to include in any link
+        # (library names, not filenames: eg. "foo" not "libfoo.a")
+        self.libraries = []
+
+        # 'library_dirs': a list of directories to search for libraries
+        self.library_dirs = []
+
+        # 'runtime_library_dirs': a list of directories to search for
+        # shared libraries/objects at runtime
+        self.runtime_library_dirs = []
+
+        # 'objects': a list of object files (or similar, such as explicitly
+        # named library files) to include on any link
+        self.objects = []
+
+        for key in self.executables.keys():
+            self.set_executable(key, self.executables[key])
+
+    # __init__ ()
+
+
+    def set_executables (self, **args):
+
+        """Define the executables (and options for them) that will be run
+        to perform the various stages of compilation.  The exact set of
+        executables that may be specified here depends on the compiler
+        class (via the 'executables' class attribute), but most will have:
+          compiler      the C/C++ compiler
+          linker_so     linker used to create shared objects and libraries
+          linker_exe    linker used to create binary executables
+          archiver      static library creator
+
+        On platforms with a command-line (Unix, DOS/Windows), each of these
+        is a string that will be split into executable name and (optional)
+        list of arguments.  (Splitting the string is done similarly to how
+        Unix shells operate: words are delimited by spaces, but quotes and
+        backslashes can override this.  See
+        'distutils.util.split_quoted()'.)
+        """
+
+        # Note that some CCompiler implementation classes will define class
+        # attributes 'cpp', 'cc', etc. with hard-coded executable names;
+        # this is appropriate when a compiler class is for exactly one
+        # compiler/OS combination (eg. MSVCCompiler).  Other compiler
+        # classes (UnixCCompiler, in particular) are driven by information
+        # discovered at run-time, since there are many different ways to do
+        # basically the same things with Unix C compilers.
+
+        for key in args.keys():
+            if not self.executables.has_key(key):
+                raise ValueError, \
+                      "unknown executable '%s' for class %s" % \
+                      (key, self.__class__.__name__)
+            self.set_executable(key, args[key])
+
+    # set_executables ()
+
+    def set_executable(self, key, value):
+        if type(value) is StringType:
+            setattr(self, key, split_quoted(value))
+        else:
+            setattr(self, key, value)
+
+
+    def _find_macro (self, name):
+        i = 0
+        for defn in self.macros:
+            if defn[0] == name:
+                return i
+            i = i + 1
+
+        return None
+
+
+    def _check_macro_definitions (self, definitions):
+        """Ensures that every element of 'definitions' is a valid macro
+        definition, ie. either (name,value) 2-tuple or a (name,) tuple.  Do
+        nothing if all definitions are OK, raise TypeError otherwise.
+        """
+        for defn in definitions:
+            if not (type (defn) is TupleType and
+                    (len (defn) == 1 or
+                     (len (defn) == 2 and
+                      (type (defn[1]) is StringType or defn[1] is None))) and
+                    type (defn[0]) is StringType):
+                raise TypeError, \
+                      ("invalid macro definition '%s': " % defn) + \
+                      "must be tuple (string,), (string, string), or " + \
+                      "(string, None)"
+
+
+    # -- Bookkeeping methods -------------------------------------------
+
+    def define_macro (self, name, value=None):
+        """Define a preprocessor macro for all compilations driven by this
+        compiler object.  The optional parameter 'value' should be a
+        string; if it is not supplied, then the macro will be defined
+        without an explicit value and the exact outcome depends on the
+        compiler used (XXX true? does ANSI say anything about this?)
+        """
+        # Delete from the list of macro definitions/undefinitions if
+        # already there (so that this one will take precedence).
+        i = self._find_macro (name)
+        if i is not None:
+            del self.macros[i]
+
+        defn = (name, value)
+        self.macros.append (defn)
+
+
+    def undefine_macro (self, name):
+        """Undefine a preprocessor macro for all compilations driven by
+        this compiler object.  If the same macro is defined by
+        'define_macro()' and undefined by 'undefine_macro()' the last call
+        takes precedence (including multiple redefinitions or
+        undefinitions).  If the macro is redefined/undefined on a
+        per-compilation basis (ie. in the call to 'compile()'), then that
+        takes precedence.
+        """
+        # Delete from the list of macro definitions/undefinitions if
+        # already there (so that this one will take precedence).
+        i = self._find_macro (name)
+        if i is not None:
+            del self.macros[i]
+
+        undefn = (name,)
+        self.macros.append (undefn)
+
+
+    def add_include_dir (self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        header files.  The compiler is instructed to search directories in
+        the order in which they are supplied by successive calls to
+        'add_include_dir()'.
+        """
+        self.include_dirs.append (dir)
+
+    def set_include_dirs (self, dirs):
+        """Set the list of directories that will be searched to 'dirs' (a
+        list of strings).  Overrides any preceding calls to
+        'add_include_dir()'; subsequence calls to 'add_include_dir()' add
+        to the list passed to 'set_include_dirs()'.  This does not affect
+        any list of standard include directories that the compiler may
+        search by default.
+        """
+        self.include_dirs = copy (dirs)
+
+
+    def add_library (self, libname):
+        """Add 'libname' to the list of libraries that will be included in
+        all links driven by this compiler object.  Note that 'libname'
+        should *not* be the name of a file containing a library, but the
+        name of the library itself: the actual filename will be inferred by
+        the linker, the compiler, or the compiler class (depending on the
+        platform).
+
+        The linker will be instructed to link against libraries in the
+        order they were supplied to 'add_library()' and/or
+        'set_libraries()'.  It is perfectly valid to duplicate library
+        names; the linker will be instructed to link against libraries as
+        many times as they are mentioned.
+        """
+        self.libraries.append (libname)
+
+    def set_libraries (self, libnames):
+        """Set the list of libraries to be included in all links driven by
+        this compiler object to 'libnames' (a list of strings).  This does
+        not affect any standard system libraries that the linker may
+        include by default.
+        """
+        self.libraries = copy (libnames)
+
+
+    def add_library_dir (self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        libraries specified to 'add_library()' and 'set_libraries()'.  The
+        linker will be instructed to search for libraries in the order they
+        are supplied to 'add_library_dir()' and/or 'set_library_dirs()'.
+        """
+        self.library_dirs.append (dir)
+
+    def set_library_dirs (self, dirs):
+        """Set the list of library search directories to 'dirs' (a list of
+        strings).  This does not affect any standard library search path
+        that the linker may search by default.
+        """
+        self.library_dirs = copy (dirs)
+
+
+    def add_runtime_library_dir (self, dir):
+        """Add 'dir' to the list of directories that will be searched for
+        shared libraries at runtime.
+        """
+        self.runtime_library_dirs.append (dir)
+
+    def set_runtime_library_dirs (self, dirs):
+        """Set the list of directories to search for shared libraries at
+        runtime to 'dirs' (a list of strings).  This does not affect any
+        standard search path that the runtime linker may search by
+        default.
+        """
+        self.runtime_library_dirs = copy (dirs)
+
+
+    def add_link_object (self, object):
+        """Add 'object' to the list of object files (or analogues, such as
+        explicitly named library files or the output of "resource
+        compilers") to be included in every link driven by this compiler
+        object.
+        """
+        self.objects.append (object)
+
+    def set_link_objects (self, objects):
+        """Set the list of object files (or analogues) to be included in
+        every link to 'objects'.  This does not affect any standard object
+        files that the linker may include by default (such as system
+        libraries).
+        """
+        self.objects = copy (objects)
+
+
+    # -- Private utility methods --------------------------------------
+    # (here for the convenience of subclasses)
+
+    # Helper method to prep compiler in subclass compile() methods
+
+    def _setup_compile(self, outdir, macros, incdirs, sources, depends,
+                       extra):
+        """Process arguments and decide which source files to compile.
+
+        Merges _fix_compile_args() and _prep_compile().
+        """
+        if outdir is None:
+            outdir = self.output_dir
+        elif type(outdir) is not StringType:
+            raise TypeError, "'output_dir' must be a string or None"
+
+        if macros is None:
+            macros = self.macros
+        elif type(macros) is ListType:
+            macros = macros + (self.macros or [])
+        else:
+            raise TypeError, "'macros' (if supplied) must be a list of tuples"
+
+        if incdirs is None:
+            incdirs = self.include_dirs
+        elif type(incdirs) in (ListType, TupleType):
+            incdirs = list(incdirs) + (self.include_dirs or [])
+        else:
+            raise TypeError, \
+                  "'include_dirs' (if supplied) must be a list of strings"
+
+        if extra is None:
+            extra = []
+
+        # Get the list of expected output (object) files
+        objects = self.object_filenames(sources,
+                                        strip_dir=python_build,
+                                        output_dir=outdir)
+        assert len(objects) == len(sources)
+
+        # XXX should redo this code to eliminate skip_source entirely.
+        # XXX instead create build and issue skip messages inline
+
+        if self.force:
+            skip_source = {}            # rebuild everything
+            for source in sources:
+                skip_source[source] = 0
+        elif depends is None:
+            # If depends is None, figure out which source files we
+            # have to recompile according to a simplistic check. We
+            # just compare the source and object file, no deep
+            # dependency checking involving header files.
+            skip_source = {}            # rebuild everything
+            for source in sources:      # no wait, rebuild nothing
+                skip_source[source] = 1
+
+            n_sources, n_objects = newer_pairwise(sources, objects)
+            for source in n_sources:    # no really, only rebuild what's
+                skip_source[source] = 0 # out-of-date
+        else:
+            # If depends is a list of files, then do a different
+            # simplistic check.  Assume that each object depends on
+            # its source and all files in the depends list.
+            skip_source = {}
+            # L contains all the depends plus a spot at the end for a
+            # particular source file
+            L = depends[:] + [None]
+            for i in range(len(objects)):
+                source = sources[i]
+                L[-1] = source
+                if newer_group(L, objects[i]):
+                    skip_source[source] = 0
+                else:
+                    skip_source[source] = 1
+
+        pp_opts = gen_preprocess_options(macros, incdirs)
+
+        build = {}
+        for i in range(len(sources)):
+            src = sources[i]
+            obj = objects[i]
+            ext = os.path.splitext(src)[1]
+            self.mkpath(os.path.dirname(obj))
+            if skip_source[src]:
+                log.debug("skipping %s (%s up-to-date)", src, obj)
+            else:
+                build[obj] = src, ext
+
+        return macros, objects, extra, pp_opts, build
+
+    def _get_cc_args(self, pp_opts, debug, before):
+        # works for unixccompiler, emxccompiler, cygwinccompiler
+        cc_args = pp_opts + ['-c']
+        if debug:
+            cc_args[:0] = ['-g']
+        if before:
+            cc_args[:0] = before
+        return cc_args
+
+    def _fix_compile_args (self, output_dir, macros, include_dirs):
+        """Typecheck and fix-up some of the arguments to the 'compile()'
+        method, and return fixed-up values.  Specifically: if 'output_dir'
+        is None, replaces it with 'self.output_dir'; ensures that 'macros'
+        is a list, and augments it with 'self.macros'; ensures that
+        'include_dirs' is a list, and augments it with 'self.include_dirs'.
+        Guarantees that the returned values are of the correct type,
+        i.e. for 'output_dir' either string or None, and for 'macros' and
+        'include_dirs' either list or None.
+        """
+        if output_dir is None:
+            output_dir = self.output_dir
+        elif type (output_dir) is not StringType:
+            raise TypeError, "'output_dir' must be a string or None"
+
+        if macros is None:
+            macros = self.macros
+        elif type (macros) is ListType:
+            macros = macros + (self.macros or [])
+        else:
+            raise TypeError, "'macros' (if supplied) must be a list of tuples"
+
+        if include_dirs is None:
+            include_dirs = self.include_dirs
+        elif type (include_dirs) in (ListType, TupleType):
+            include_dirs = list (include_dirs) + (self.include_dirs or [])
+        else:
+            raise TypeError, \
+                  "'include_dirs' (if supplied) must be a list of strings"
+
+        return output_dir, macros, include_dirs
+
+    # _fix_compile_args ()
+
+
+    def _prep_compile(self, sources, output_dir, depends=None):
+        """Decide which souce files must be recompiled.
+
+        Determine the list of object files corresponding to 'sources',
+        and figure out which ones really need to be recompiled.
+        Return a list of all object files and a dictionary telling
+        which source files can be skipped.
+        """
+        # Get the list of expected output (object) files
+        objects = self.object_filenames(sources, strip_dir=python_build,
+                                        output_dir=output_dir)
+        assert len(objects) == len(sources)
+
+        if self.force:
+            skip_source = {}            # rebuild everything
+            for source in sources:
+                skip_source[source] = 0
+        elif depends is None:
+            # If depends is None, figure out which source files we
+            # have to recompile according to a simplistic check. We
+            # just compare the source and object file, no deep
+            # dependency checking involving header files.
+            skip_source = {}            # rebuild everything
+            for source in sources:      # no wait, rebuild nothing
+                skip_source[source] = 1
+
+            n_sources, n_objects = newer_pairwise(sources, objects)
+            for source in n_sources:    # no really, only rebuild what's
+                skip_source[source] = 0 # out-of-date
+        else:
+            # If depends is a list of files, then do a different
+            # simplistic check.  Assume that each object depends on
+            # its source and all files in the depends list.
+            skip_source = {}
+            # L contains all the depends plus a spot at the end for a
+            # particular source file
+            L = depends[:] + [None]
+            for i in range(len(objects)):
+                source = sources[i]
+                L[-1] = source
+                if newer_group(L, objects[i]):
+                    skip_source[source] = 0
+                else:
+                    skip_source[source] = 1
+
+        return objects, skip_source
+
+    # _prep_compile ()
+
+
+    def _fix_object_args (self, objects, output_dir):
+        """Typecheck and fix up some arguments supplied to various methods.
+        Specifically: ensure that 'objects' is a list; if output_dir is
+        None, replace with self.output_dir.  Return fixed versions of
+        'objects' and 'output_dir'.
+        """
+        if type (objects) not in (ListType, TupleType):
+            raise TypeError, \
+                  "'objects' must be a list or tuple of strings"
+        objects = list (objects)
+
+        if output_dir is None:
+            output_dir = self.output_dir
+        elif type (output_dir) is not StringType:
+            raise TypeError, "'output_dir' must be a string or None"
+
+        return (objects, output_dir)
+
+
+    def _fix_lib_args (self, libraries, library_dirs, runtime_library_dirs):
+        """Typecheck and fix up some of the arguments supplied to the
+        'link_*' methods.  Specifically: ensure that all arguments are
+        lists, and augment them with their permanent versions
+        (eg. 'self.libraries' augments 'libraries').  Return a tuple with
+        fixed versions of all arguments.
+        """
+        if libraries is None:
+            libraries = self.libraries
+        elif type (libraries) in (ListType, TupleType):
+            libraries = list (libraries) + (self.libraries or [])
+        else:
+            raise TypeError, \
+                  "'libraries' (if supplied) must be a list of strings"
+
+        if library_dirs is None:
+            library_dirs = self.library_dirs
+        elif type (library_dirs) in (ListType, TupleType):
+            library_dirs = list (library_dirs) + (self.library_dirs or [])
+        else:
+            raise TypeError, \
+                  "'library_dirs' (if supplied) must be a list of strings"
+
+        if runtime_library_dirs is None:
+            runtime_library_dirs = self.runtime_library_dirs
+        elif type (runtime_library_dirs) in (ListType, TupleType):
+            runtime_library_dirs = (list (runtime_library_dirs) +
+                                    (self.runtime_library_dirs or []))
+        else:
+            raise TypeError, \
+                  "'runtime_library_dirs' (if supplied) " + \
+                  "must be a list of strings"
+
+        return (libraries, library_dirs, runtime_library_dirs)
+
+    # _fix_lib_args ()
+
+
+    def _need_link (self, objects, output_file):
+        """Return true if we need to relink the files listed in 'objects'
+        to recreate 'output_file'.
+        """
+        if self.force:
+            return 1
+        else:
+            if self.dry_run:
+                newer = newer_group (objects, output_file, missing='newer')
+            else:
+                newer = newer_group (objects, output_file)
+            return newer
+
+    # _need_link ()
+
+    def detect_language (self, sources):
+        """Detect the language of a given file, or list of files. Uses
+        language_map, and language_order to do the job.
+        """
+        if type(sources) is not ListType:
+            sources = [sources]
+        lang = None
+        index = len(self.language_order)
+        for source in sources:
+            base, ext = os.path.splitext(source)
+            extlang = self.language_map.get(ext)
+            try:
+                extindex = self.language_order.index(extlang)
+                if extindex < index:
+                    lang = extlang
+                    index = extindex
+            except ValueError:
+                pass
+        return lang
+
+    # detect_language ()
+
+    # -- Worker methods ------------------------------------------------
+    # (must be implemented by subclasses)
+
+    def preprocess (self,
+                    source,
+                    output_file=None,
+                    macros=None,
+                    include_dirs=None,
+                    extra_preargs=None,
+                    extra_postargs=None):
+        """Preprocess a single C/C++ source file, named in 'source'.
+        Output will be written to file named 'output_file', or stdout if
+        'output_file' not supplied.  'macros' is a list of macro
+        definitions as for 'compile()', which will augment the macros set
+        with 'define_macro()' and 'undefine_macro()'.  'include_dirs' is a
+        list of directory names that will be added to the default list.
+
+        Raises PreprocessError on failure.
+        """
+        pass
+
+    def compile(self, sources, output_dir=None, macros=None,
+                include_dirs=None, debug=0, extra_preargs=None,
+                extra_postargs=None, depends=None):
+        """Compile one or more source files.
+
+        'sources' must be a list of filenames, most likely C/C++
+        files, but in reality anything that can be handled by a
+        particular compiler and compiler class (eg. MSVCCompiler can
+        handle resource files in 'sources').  Return a list of object
+        filenames, one per source filename in 'sources'.  Depending on
+        the implementation, not all source files will necessarily be
+        compiled, but all corresponding object filenames will be
+        returned.
+
+        If 'output_dir' is given, object files will be put under it, while
+        retaining their original path component.  That is, "foo/bar.c"
+        normally compiles to "foo/bar.o" (for a Unix implementation); if
+        'output_dir' is "build", then it would compile to
+        "build/foo/bar.o".
+
+        'macros', if given, must be a list of macro definitions.  A macro
+        definition is either a (name, value) 2-tuple or a (name,) 1-tuple.
+        The former defines a macro; if the value is None, the macro is
+        defined without an explicit value.  The 1-tuple case undefines a
+        macro.  Later definitions/redefinitions/ undefinitions take
+        precedence.
+
+        'include_dirs', if given, must be a list of strings, the
+        directories to add to the default include file search path for this
+        compilation only.
+
+        'debug' is a boolean; if true, the compiler will be instructed to
+        output debug symbols in (or alongside) the object file(s).
+
+        'extra_preargs' and 'extra_postargs' are implementation- dependent.
+        On platforms that have the notion of a command-line (e.g. Unix,
+        DOS/Windows), they are most likely lists of strings: extra
+        command-line arguments to prepand/append to the compiler command
+        line.  On other platforms, consult the implementation class
+        documentation.  In any event, they are intended as an escape hatch
+        for those occasions when the abstract compiler framework doesn't
+        cut the mustard.
+
+        'depends', if given, is a list of filenames that all targets
+        depend on.  If a source file is older than any file in
+        depends, then the source file will be recompiled.  This
+        supports dependency tracking, but only at a coarse
+        granularity.
+
+        Raises CompileError on failure.
+        """
+
+        # A concrete compiler class can either override this method
+        # entirely or implement _compile().
+
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+        cc_args = self._get_cc_args(pp_opts, debug, extra_preargs)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts)
+
+        # Return *all* object filenames, not just the ones we just built.
+        return objects
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        """Compile 'src' to product 'obj'."""
+
+        # A concrete compiler class that does not override compile()
+        # should implement _compile().
+        pass
+
+    def create_static_lib (self,
+                           objects,
+                           output_libname,
+                           output_dir=None,
+                           debug=0,
+                           target_lang=None):
+        """Link a bunch of stuff together to create a static library file.
+        The "bunch of stuff" consists of the list of object files supplied
+        as 'objects', the extra object files supplied to
+        'add_link_object()' and/or 'set_link_objects()', the libraries
+        supplied to 'add_library()' and/or 'set_libraries()', and the
+        libraries supplied as 'libraries' (if any).
+
+        'output_libname' should be a library name, not a filename; the
+        filename will be inferred from the library name.  'output_dir' is
+        the directory where the library file will be put.
+
+        'debug' is a boolean; if true, debugging information will be
+        included in the library (note that on most platforms, it is the
+        compile step where this matters: the 'debug' flag is included here
+        just for consistency).
+
+        'target_lang' is the target language for which the given objects
+        are being compiled. This allows specific linkage time treatment of
+        certain languages.
+
+        Raises LibError on failure.
+        """
+        pass
+
+
+    # values for target_desc parameter in link()
+    SHARED_OBJECT = "shared_object"
+    SHARED_LIBRARY = "shared_library"
+    EXECUTABLE = "executable"
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+        """Link a bunch of stuff together to create an executable or
+        shared library file.
+
+        The "bunch of stuff" consists of the list of object files supplied
+        as 'objects'.  'output_filename' should be a filename.  If
+        'output_dir' is supplied, 'output_filename' is relative to it
+        (i.e. 'output_filename' can provide directory components if
+        needed).
+
+        'libraries' is a list of libraries to link against.  These are
+        library names, not filenames, since they're translated into
+        filenames in a platform-specific way (eg. "foo" becomes "libfoo.a"
+        on Unix and "foo.lib" on DOS/Windows).  However, they can include a
+        directory component, which means the linker will look in that
+        specific directory rather than searching all the normal locations.
+
+        'library_dirs', if supplied, should be a list of directories to
+        search for libraries that were specified as bare library names
+        (ie. no directory component).  These are on top of the system
+        default and those supplied to 'add_library_dir()' and/or
+        'set_library_dirs()'.  'runtime_library_dirs' is a list of
+        directories that will be embedded into the shared library and used
+        to search for other shared libraries that *it* depends on at
+        run-time.  (This may only be relevant on Unix.)
+
+        'export_symbols' is a list of symbols that the shared library will
+        export.  (This appears to be relevant only on Windows.)
+
+        'debug' is as for 'compile()' and 'create_static_lib()', with the
+        slight distinction that it actually matters on most platforms (as
+        opposed to 'create_static_lib()', which includes a 'debug' flag
+        mostly for form's sake).
+
+        'extra_preargs' and 'extra_postargs' are as for 'compile()' (except
+        of course that they supply command-line arguments for the
+        particular linker being used).
+
+        'target_lang' is the target language for which the given objects
+        are being compiled. This allows specific linkage time treatment of
+        certain languages.
+
+        Raises LinkError on failure.
+        """
+        raise NotImplementedError
+
+
+    # Old 'link_*()' methods, rewritten to use the new 'link()' method.
+
+    def link_shared_lib (self,
+                         objects,
+                         output_libname,
+                         output_dir=None,
+                         libraries=None,
+                         library_dirs=None,
+                         runtime_library_dirs=None,
+                         export_symbols=None,
+                         debug=0,
+                         extra_preargs=None,
+                         extra_postargs=None,
+                         build_temp=None,
+                         target_lang=None):
+        self.link(CCompiler.SHARED_LIBRARY, objects,
+                  self.library_filename(output_libname, lib_type='shared'),
+                  output_dir,
+                  libraries, library_dirs, runtime_library_dirs,
+                  export_symbols, debug,
+                  extra_preargs, extra_postargs, build_temp, target_lang)
+
+
+    def link_shared_object (self,
+                            objects,
+                            output_filename,
+                            output_dir=None,
+                            libraries=None,
+                            library_dirs=None,
+                            runtime_library_dirs=None,
+                            export_symbols=None,
+                            debug=0,
+                            extra_preargs=None,
+                            extra_postargs=None,
+                            build_temp=None,
+                            target_lang=None):
+        self.link(CCompiler.SHARED_OBJECT, objects,
+                  output_filename, output_dir,
+                  libraries, library_dirs, runtime_library_dirs,
+                  export_symbols, debug,
+                  extra_preargs, extra_postargs, build_temp, target_lang)
+
+
+    def link_executable (self,
+                         objects,
+                         output_progname,
+                         output_dir=None,
+                         libraries=None,
+                         library_dirs=None,
+                         runtime_library_dirs=None,
+                         debug=0,
+                         extra_preargs=None,
+                         extra_postargs=None,
+                         target_lang=None):
+        self.link(CCompiler.EXECUTABLE, objects,
+                  self.executable_filename(output_progname), output_dir,
+                  libraries, library_dirs, runtime_library_dirs, None,
+                  debug, extra_preargs, extra_postargs, None, target_lang)
+
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function; there is
+    # no appropriate default implementation so subclasses should
+    # implement all of these.
+
+    def library_dir_option (self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for libraries.
+        """
+        raise NotImplementedError
+
+    def runtime_library_dir_option (self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for runtime libraries.
+        """
+        raise NotImplementedError
+
+    def library_option (self, lib):
+        """Return the compiler option to add 'dir' to the list of libraries
+        linked into the shared library or executable.
+        """
+        raise NotImplementedError
+
+    def has_function(self, funcname,
+                     includes=None,
+                     include_dirs=None,
+                     libraries=None,
+                     library_dirs=None):
+        """Return a boolean indicating whether funcname is supported on
+        the current platform.  The optional arguments can be used to
+        augment the compilation environment.
+        """
+
+        # this can't be included at module scope because it tries to
+        # import math which might not be available at that point - maybe
+        # the necessary logic should just be inlined?
+        import tempfile
+        if includes is None:
+            includes = []
+        if include_dirs is None:
+            include_dirs = []
+        if libraries is None:
+            libraries = []
+        if library_dirs is None:
+            library_dirs = []
+        fd, fname = tempfile.mkstemp(".c", funcname, text=True)
+        f = os.fdopen(fd, "w")
+        for incl in includes:
+            f.write("""#include "%s"\n""" % incl)
+        f.write("""\
+main (int argc, char **argv) {
+    %s();
+}
+""" % funcname)
+        f.close()
+        try:
+            objects = self.compile([fname], include_dirs=include_dirs)
+        except CompileError:
+            return False
+
+        try:
+            self.link_executable(objects, "a.out",
+                                 libraries=libraries,
+                                 library_dirs=library_dirs)
+        except (LinkError, TypeError):
+            return False
+        return True
+
+    def find_library_file (self, dirs, lib, debug=0):
+        """Search the specified list of directories for a static or shared
+        library file 'lib' and return the full path to that file.  If
+        'debug' true, look for a debugging version (if that makes sense on
+        the current platform).  Return None if 'lib' wasn't found in any of
+        the specified directories.
+        """
+        raise NotImplementedError
+
+    # -- Filename generation methods -----------------------------------
+
+    # The default implementation of the filename generating methods are
+    # prejudiced towards the Unix/DOS/Windows view of the world:
+    #   * object files are named by replacing the source file extension
+    #     (eg. .c/.cpp -> .o/.obj)
+    #   * library files (shared or static) are named by plugging the
+    #     library name and extension into a format string, eg.
+    #     "lib%s.%s" % (lib_name, ".a") for Unix static libraries
+    #   * executables are named by appending an extension (possibly
+    #     empty) to the program name: eg. progname + ".exe" for
+    #     Windows
+    #
+    # To reduce redundant code, these methods expect to find
+    # several attributes in the current object (presumably defined
+    # as class attributes):
+    #   * src_extensions -
+    #     list of C/C++ source file extensions, eg. ['.c', '.cpp']
+    #   * obj_extension -
+    #     object file extension, eg. '.o' or '.obj'
+    #   * static_lib_extension -
+    #     extension for static library files, eg. '.a' or '.lib'
+    #   * shared_lib_extension -
+    #     extension for shared library/object files, eg. '.so', '.dll'
+    #   * static_lib_format -
+    #     format string for generating static library filenames,
+    #     eg. 'lib%s.%s' or '%s.%s'
+    #   * shared_lib_format
+    #     format string for generating shared library filenames
+    #     (probably same as static_lib_format, since the extension
+    #     is one of the intended parameters to the format string)
+    #   * exe_extension -
+    #     extension for executable files, eg. '' or '.exe'
+
+    def object_filenames(self, source_filenames, strip_dir=0, output_dir=''):
+        if output_dir is None:
+            output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            base, ext = os.path.splitext(src_name)
+            base = os.path.splitdrive(base)[1] # Chop off the drive
+            base = base[os.path.isabs(base):]  # If abs, chop off leading /
+            if ext not in self.src_extensions:
+                raise UnknownFileError, \
+                      "unknown file type '%s' (from '%s')" % (ext, src_name)
+            if strip_dir:
+                base = os.path.basename(base)
+            obj_names.append(os.path.join(output_dir,
+                                          base + self.obj_extension))
+        return obj_names
+
+    def shared_object_filename(self, basename, strip_dir=0, output_dir=''):
+        assert output_dir is not None
+        if strip_dir:
+            basename = os.path.basename (basename)
+        return os.path.join(output_dir, basename + self.shared_lib_extension)
+
+    def executable_filename(self, basename, strip_dir=0, output_dir=''):
+        assert output_dir is not None
+        if strip_dir:
+            basename = os.path.basename (basename)
+        return os.path.join(output_dir, basename + (self.exe_extension or ''))
+
+    def library_filename(self, libname, lib_type='static',     # or 'shared'
+                         strip_dir=0, output_dir=''):
+        assert output_dir is not None
+        if lib_type not in ("static", "shared", "dylib"):
+            raise ValueError, "'lib_type' must be \"static\", \"shared\" or \"dylib\""
+        fmt = getattr(self, lib_type + "_lib_format")
+        ext = getattr(self, lib_type + "_lib_extension")
+
+        dir, base = os.path.split (libname)
+        filename = fmt % (base, ext)
+        if strip_dir:
+            dir = ''
+
+        return os.path.join(output_dir, dir, filename)
+
+
+    # -- Utility methods -----------------------------------------------
+
+    def announce (self, msg, level=1):
+        log.debug(msg)
+
+    def debug_print (self, msg):
+        from distutils.debug import DEBUG
+        if DEBUG:
+            print msg
+
+    def warn (self, msg):
+        sys.stderr.write ("warning: %s\n" % msg)
+
+    def execute (self, func, args, msg=None, level=1):
+        execute(func, args, msg, self.dry_run)
+
+    def spawn (self, cmd):
+        spawn (cmd, dry_run=self.dry_run)
+
+    def move_file (self, src, dst):
+        return move_file (src, dst, dry_run=self.dry_run)
+
+    def mkpath (self, name, mode=0777):
+        mkpath (name, mode, self.dry_run)
+
+
+# class CCompiler
+
+
+# Map a sys.platform/os.name ('posix', 'nt') to the default compiler
+# type for that platform. Keys are interpreted as re match
+# patterns. Order is important; platform mappings are preferred over
+# OS names.
+_default_compilers = (
+
+    # Platform string mappings
+
+    # on a cygwin built python we can use gcc like an ordinary UNIXish
+    # compiler
+    ('cygwin.*', 'unix'),
+    ('os2emx', 'emx'),
+
+    # OS name mappings
+    ('posix', 'unix'),
+    ('nt', 'msvc'),
+    ('mac', 'mwerks'),
+
+    )
+
+def get_default_compiler(osname=None, platform=None):
+
+    """ Determine the default compiler to use for the given platform.
+
+        osname should be one of the standard Python OS names (i.e. the
+        ones returned by os.name) and platform the common value
+        returned by sys.platform for the platform in question.
+
+        The default values are os.name and sys.platform in case the
+        parameters are not given.
+
+    """
+    if osname is None:
+        osname = os.name
+    if platform is None:
+        platform = sys.platform
+    for pattern, compiler in _default_compilers:
+        if re.match(pattern, platform) is not None or \
+           re.match(pattern, osname) is not None:
+            return compiler
+    # Default to Unix compiler
+    return 'unix'
+
+# Map compiler types to (module_name, class_name) pairs -- ie. where to
+# find the code that implements an interface to this compiler.  (The module
+# is assumed to be in the 'distutils' package.)
+compiler_class = { 'unix':    ('unixccompiler', 'UnixCCompiler',
+                               "standard UNIX-style compiler"),
+                   'msvc':    ('msvccompiler', 'MSVCCompiler',
+                               "Microsoft Visual C++"),
+                   'cygwin':  ('cygwinccompiler', 'CygwinCCompiler',
+                               "Cygwin port of GNU C Compiler for Win32"),
+                   'mingw32': ('cygwinccompiler', 'Mingw32CCompiler',
+                               "Mingw32 port of GNU C Compiler for Win32"),
+                   'bcpp':    ('bcppcompiler', 'BCPPCompiler',
+                               "Borland C++ Compiler"),
+                   'mwerks':  ('mwerkscompiler', 'MWerksCompiler',
+                               "MetroWerks CodeWarrior"),
+                   'emx':     ('emxccompiler', 'EMXCCompiler',
+                               "EMX port of GNU C Compiler for OS/2"),
+                 }
+
+def show_compilers():
+    """Print list of available compilers (used by the "--help-compiler"
+    options to "build", "build_ext", "build_clib").
+    """
+    # XXX this "knows" that the compiler option it's describing is
+    # "--compiler", which just happens to be the case for the three
+    # commands that use it.
+    from distutils.fancy_getopt import FancyGetopt
+    compilers = []
+    for compiler in compiler_class.keys():
+        compilers.append(("compiler="+compiler, None,
+                          compiler_class[compiler][2]))
+    compilers.sort()
+    pretty_printer = FancyGetopt(compilers)
+    pretty_printer.print_help("List of available compilers:")
+
+
+def new_compiler (plat=None,
+                  compiler=None,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+    """Generate an instance of some CCompiler subclass for the supplied
+    platform/compiler combination.  'plat' defaults to 'os.name'
+    (eg. 'posix', 'nt'), and 'compiler' defaults to the default compiler
+    for that platform.  Currently only 'posix' and 'nt' are supported, and
+    the default compilers are "traditional Unix interface" (UnixCCompiler
+    class) and Visual C++ (MSVCCompiler class).  Note that it's perfectly
+    possible to ask for a Unix compiler object under Windows, and a
+    Microsoft compiler object under Unix -- if you supply a value for
+    'compiler', 'plat' is ignored.
+    """
+    if plat is None:
+        plat = os.name
+
+    try:
+        if compiler is None:
+            compiler = get_default_compiler(plat)
+
+        (module_name, class_name, long_description) = compiler_class[compiler]
+    except KeyError:
+        msg = "don't know how to compile C/C++ code on platform '%s'" % plat
+        if compiler is not None:
+            msg = msg + " with '%s' compiler" % compiler
+        raise DistutilsPlatformError, msg
+
+    try:
+        module_name = "distutils." + module_name
+        __import__ (module_name)
+        module = sys.modules[module_name]
+        klass = vars(module)[class_name]
+    except ImportError:
+        raise DistutilsModuleError, \
+              "can't compile C/C++ code: unable to load module '%s'" % \
+              module_name
+    except KeyError:
+        raise DistutilsModuleError, \
+              ("can't compile C/C++ code: unable to find class '%s' " +
+               "in module '%s'") % (class_name, module_name)
+
+    # XXX The None is necessary to preserve backwards compatibility
+    # with classes that expect verbose to be the first positional
+    # argument.
+    return klass (None, dry_run, force)
+
+
+def gen_preprocess_options (macros, include_dirs):
+    """Generate C pre-processor options (-D, -U, -I) as used by at least
+    two types of compilers: the typical Unix compiler and Visual C++.
+    'macros' is the usual thing, a list of 1- or 2-tuples, where (name,)
+    means undefine (-U) macro 'name', and (name,value) means define (-D)
+    macro 'name' to 'value'.  'include_dirs' is just a list of directory
+    names to be added to the header file search path (-I).  Returns a list
+    of command-line options suitable for either Unix compilers or Visual
+    C++.
+    """
+    # XXX it would be nice (mainly aesthetic, and so we don't generate
+    # stupid-looking command lines) to go over 'macros' and eliminate
+    # redundant definitions/undefinitions (ie. ensure that only the
+    # latest mention of a particular macro winds up on the command
+    # line).  I don't think it's essential, though, since most (all?)
+    # Unix C compilers only pay attention to the latest -D or -U
+    # mention of a macro on their command line.  Similar situation for
+    # 'include_dirs'.  I'm punting on both for now.  Anyways, weeding out
+    # redundancies like this should probably be the province of
+    # CCompiler, since the data structures used are inherited from it
+    # and therefore common to all CCompiler classes.
+
+    pp_opts = []
+    for macro in macros:
+
+        if not (type (macro) is TupleType and
+                1 <= len (macro) <= 2):
+            raise TypeError, \
+                  ("bad macro definition '%s': " +
+                   "each element of 'macros' list must be a 1- or 2-tuple") % \
+                  macro
+
+        if len (macro) == 1:        # undefine this macro
+            pp_opts.append ("-U%s" % macro[0])
+        elif len (macro) == 2:
+            if macro[1] is None:    # define with no explicit value
+                pp_opts.append ("-D%s" % macro[0])
+            else:
+                # XXX *don't* need to be clever about quoting the
+                # macro value here, because we're going to avoid the
+                # shell at all costs when we spawn the command!
+                pp_opts.append ("-D%s=%s" % macro)
+
+    for dir in include_dirs:
+        pp_opts.append ("-I%s" % dir)
+
+    return pp_opts
+
+# gen_preprocess_options ()
+
+
+def gen_lib_options (compiler, library_dirs, runtime_library_dirs, libraries):
+    """Generate linker options for searching library directories and
+    linking with specific libraries.  'libraries' and 'library_dirs' are,
+    respectively, lists of library names (not filenames!) and search
+    directories.  Returns a list of command-line options suitable for use
+    with some compiler (depending on the two format strings passed in).
+    """
+    lib_opts = []
+
+    for dir in library_dirs:
+        lib_opts.append (compiler.library_dir_option (dir))
+
+    for dir in runtime_library_dirs:
+        opt = compiler.runtime_library_dir_option (dir)
+        if type(opt) is ListType:
+            lib_opts = lib_opts + opt
+        else:
+            lib_opts.append (opt)
+
+    # XXX it's important that we *not* remove redundant library mentions!
+    # sometimes you really do have to say "-lfoo -lbar -lfoo" in order to
+    # resolve all symbols.  I just hope we never have to say "-lfoo obj.o
+    # -lbar" to get things to work -- that's certainly a possibility, but a
+    # pretty nasty way to arrange your C code.
+
+    for lib in libraries:
+        (lib_dir, lib_name) = os.path.split (lib)
+        if lib_dir:
+            lib_file = compiler.find_library_file ([lib_dir], lib_name)
+            if lib_file:
+                lib_opts.append (lib_file)
+            else:
+                compiler.warn ("no library file corresponding to "
+                               "'%s' found (skipping)" % lib)
+        else:
+            lib_opts.append (compiler.library_option (lib))
+
+    return lib_opts
+
+# gen_lib_options ()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/cmd.py b/depot_tools/release/win/python_24/Lib/distutils/cmd.py
new file mode 100644
index 0000000..df09b91b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/cmd.py
@@ -0,0 +1,478 @@
+"""distutils.cmd
+
+Provides the Command class, the base class for the command classes
+in the distutils.command package.
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: cmd.py,v 1.39 2004/11/10 22:23:14 loewis Exp $"
+
+import sys, os, string, re
+from types import *
+from distutils.errors import *
+from distutils import util, dir_util, file_util, archive_util, dep_util
+from distutils import log
+
+class Command:
+    """Abstract base class for defining command classes, the "worker bees"
+    of the Distutils.  A useful analogy for command classes is to think of
+    them as subroutines with local variables called "options".  The options
+    are "declared" in 'initialize_options()' and "defined" (given their
+    final values, aka "finalized") in 'finalize_options()', both of which
+    must be defined by every command class.  The distinction between the
+    two is necessary because option values might come from the outside
+    world (command line, config file, ...), and any options dependent on
+    other options must be computed *after* these outside influences have
+    been processed -- hence 'finalize_options()'.  The "body" of the
+    subroutine, where it does all its work based on the values of its
+    options, is the 'run()' method, which must also be implemented by every
+    command class.
+    """
+
+    # 'sub_commands' formalizes the notion of a "family" of commands,
+    # eg. "install" as the parent with sub-commands "install_lib",
+    # "install_headers", etc.  The parent of a family of commands
+    # defines 'sub_commands' as a class attribute; it's a list of
+    #    (command_name : string, predicate : unbound_method | string | None)
+    # tuples, where 'predicate' is a method of the parent command that
+    # determines whether the corresponding command is applicable in the
+    # current situation.  (Eg. we "install_headers" is only applicable if
+    # we have any C header files to install.)  If 'predicate' is None,
+    # that command is always applicable.
+    #
+    # 'sub_commands' is usually defined at the *end* of a class, because
+    # predicates can be unbound methods, so they must already have been
+    # defined.  The canonical example is the "install" command.
+    sub_commands = []
+
+
+    # -- Creation/initialization methods -------------------------------
+
+    def __init__ (self, dist):
+        """Create and initialize a new Command object.  Most importantly,
+        invokes the 'initialize_options()' method, which is the real
+        initializer and depends on the actual command being
+        instantiated.
+        """
+        # late import because of mutual dependence between these classes
+        from distutils.dist import Distribution
+
+        if not isinstance(dist, Distribution):
+            raise TypeError, "dist must be a Distribution instance"
+        if self.__class__ is Command:
+            raise RuntimeError, "Command is an abstract class"
+
+        self.distribution = dist
+        self.initialize_options()
+
+        # Per-command versions of the global flags, so that the user can
+        # customize Distutils' behaviour command-by-command and let some
+        # commands fall back on the Distribution's behaviour.  None means
+        # "not defined, check self.distribution's copy", while 0 or 1 mean
+        # false and true (duh).  Note that this means figuring out the real
+        # value of each flag is a touch complicated -- hence "self._dry_run"
+        # will be handled by __getattr__, below.
+        # XXX This needs to be fixed.
+        self._dry_run = None
+
+        # verbose is largely ignored, but needs to be set for
+        # backwards compatibility (I think)?
+        self.verbose = dist.verbose
+
+        # Some commands define a 'self.force' option to ignore file
+        # timestamps, but methods defined *here* assume that
+        # 'self.force' exists for all commands.  So define it here
+        # just to be safe.
+        self.force = None
+
+        # The 'help' flag is just used for command-line parsing, so
+        # none of that complicated bureaucracy is needed.
+        self.help = 0
+
+        # 'finalized' records whether or not 'finalize_options()' has been
+        # called.  'finalize_options()' itself should not pay attention to
+        # this flag: it is the business of 'ensure_finalized()', which
+        # always calls 'finalize_options()', to respect/update it.
+        self.finalized = 0
+
+    # __init__ ()
+
+
+    # XXX A more explicit way to customize dry_run would be better.
+
+    def __getattr__ (self, attr):
+        if attr == 'dry_run':
+            myval = getattr(self, "_" + attr)
+            if myval is None:
+                return getattr(self.distribution, attr)
+            else:
+                return myval
+        else:
+            raise AttributeError, attr
+
+
+    def ensure_finalized (self):
+        if not self.finalized:
+            self.finalize_options()
+        self.finalized = 1
+
+
+    # Subclasses must define:
+    #   initialize_options()
+    #     provide default values for all options; may be customized by
+    #     setup script, by options from config file(s), or by command-line
+    #     options
+    #   finalize_options()
+    #     decide on the final values for all options; this is called
+    #     after all possible intervention from the outside world
+    #     (command-line, option file, etc.) has been processed
+    #   run()
+    #     run the command: do whatever it is we're here to do,
+    #     controlled by the command's various option values
+
+    def initialize_options (self):
+        """Set default values for all the options that this command
+        supports.  Note that these defaults may be overridden by other
+        commands, by the setup script, by config files, or by the
+        command-line.  Thus, this is not the place to code dependencies
+        between options; generally, 'initialize_options()' implementations
+        are just a bunch of "self.foo = None" assignments.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError, \
+              "abstract method -- subclass %s must override" % self.__class__
+
+    def finalize_options (self):
+        """Set final values for all the options that this command supports.
+        This is always called as late as possible, ie.  after any option
+        assignments from the command-line or from other commands have been
+        done.  Thus, this is the place to code option dependencies: if
+        'foo' depends on 'bar', then it is safe to set 'foo' from 'bar' as
+        long as 'foo' still has the same value it was assigned in
+        'initialize_options()'.
+
+        This method must be implemented by all command classes.
+        """
+        raise RuntimeError, \
+              "abstract method -- subclass %s must override" % self.__class__
+
+
+    def dump_options (self, header=None, indent=""):
+        from distutils.fancy_getopt import longopt_xlate
+        if header is None:
+            header = "command options for '%s':" % self.get_command_name()
+        print indent + header
+        indent = indent + "  "
+        for (option, _, _) in self.user_options:
+            option = string.translate(option, longopt_xlate)
+            if option[-1] == "=":
+                option = option[:-1]
+            value = getattr(self, option)
+            print indent + "%s = %s" % (option, value)
+
+
+    def run (self):
+        """A command's raison d'etre: carry out the action it exists to
+        perform, controlled by the options initialized in
+        'initialize_options()', customized by other commands, the setup
+        script, the command-line, and config files, and finalized in
+        'finalize_options()'.  All terminal output and filesystem
+        interaction should be done by 'run()'.
+
+        This method must be implemented by all command classes.
+        """
+
+        raise RuntimeError, \
+              "abstract method -- subclass %s must override" % self.__class__
+
+    def announce (self, msg, level=1):
+        """If the current verbosity level is of greater than or equal to
+        'level' print 'msg' to stdout.
+        """
+        log.log(level, msg)
+
+    def debug_print (self, msg):
+        """Print 'msg' to stdout if the global DEBUG (taken from the
+        DISTUTILS_DEBUG environment variable) flag is true.
+        """
+        from distutils.debug import DEBUG
+        if DEBUG:
+            print msg
+            sys.stdout.flush()
+
+
+
+    # -- Option validation methods -------------------------------------
+    # (these are very handy in writing the 'finalize_options()' method)
+    #
+    # NB. the general philosophy here is to ensure that a particular option
+    # value meets certain type and value constraints.  If not, we try to
+    # force it into conformance (eg. if we expect a list but have a string,
+    # split the string on comma and/or whitespace).  If we can't force the
+    # option into conformance, raise DistutilsOptionError.  Thus, command
+    # classes need do nothing more than (eg.)
+    #   self.ensure_string_list('foo')
+    # and they can be guaranteed that thereafter, self.foo will be
+    # a list of strings.
+
+    def _ensure_stringlike (self, option, what, default=None):
+        val = getattr(self, option)
+        if val is None:
+            setattr(self, option, default)
+            return default
+        elif type(val) is not StringType:
+            raise DistutilsOptionError, \
+                  "'%s' must be a %s (got `%s`)" % (option, what, val)
+        return val
+
+    def ensure_string (self, option, default=None):
+        """Ensure that 'option' is a string; if not defined, set it to
+        'default'.
+        """
+        self._ensure_stringlike(option, "string", default)
+
+    def ensure_string_list (self, option):
+        """Ensure that 'option' is a list of strings.  If 'option' is
+        currently a string, we split it either on /,\s*/ or /\s+/, so
+        "foo bar baz", "foo,bar,baz", and "foo,   bar baz" all become
+        ["foo", "bar", "baz"].
+        """
+        val = getattr(self, option)
+        if val is None:
+            return
+        elif type(val) is StringType:
+            setattr(self, option, re.split(r',\s*|\s+', val))
+        else:
+            if type(val) is ListType:
+                types = map(type, val)
+                ok = (types == [StringType] * len(val))
+            else:
+                ok = 0
+
+            if not ok:
+                raise DistutilsOptionError, \
+                      "'%s' must be a list of strings (got %r)" % \
+                      (option, val)
+
+    def _ensure_tested_string (self, option, tester,
+                               what, error_fmt, default=None):
+        val = self._ensure_stringlike(option, what, default)
+        if val is not None and not tester(val):
+            raise DistutilsOptionError, \
+                  ("error in '%s' option: " + error_fmt) % (option, val)
+
+    def ensure_filename (self, option):
+        """Ensure that 'option' is the name of an existing file."""
+        self._ensure_tested_string(option, os.path.isfile,
+                                   "filename",
+                                   "'%s' does not exist or is not a file")
+
+    def ensure_dirname (self, option):
+        self._ensure_tested_string(option, os.path.isdir,
+                                   "directory name",
+                                   "'%s' does not exist or is not a directory")
+
+
+    # -- Convenience methods for commands ------------------------------
+
+    def get_command_name (self):
+        if hasattr(self, 'command_name'):
+            return self.command_name
+        else:
+            return self.__class__.__name__
+
+
+    def set_undefined_options (self, src_cmd, *option_pairs):
+        """Set the values of any "undefined" options from corresponding
+        option values in some other command object.  "Undefined" here means
+        "is None", which is the convention used to indicate that an option
+        has not been changed between 'initialize_options()' and
+        'finalize_options()'.  Usually called from 'finalize_options()' for
+        options that depend on some other command rather than another
+        option of the same command.  'src_cmd' is the other command from
+        which option values will be taken (a command object will be created
+        for it if necessary); the remaining arguments are
+        '(src_option,dst_option)' tuples which mean "take the value of
+        'src_option' in the 'src_cmd' command object, and copy it to
+        'dst_option' in the current command object".
+        """
+
+        # Option_pairs: list of (src_option, dst_option) tuples
+
+        src_cmd_obj = self.distribution.get_command_obj(src_cmd)
+        src_cmd_obj.ensure_finalized()
+        for (src_option, dst_option) in option_pairs:
+            if getattr(self, dst_option) is None:
+                setattr(self, dst_option,
+                        getattr(src_cmd_obj, src_option))
+
+
+    def get_finalized_command (self, command, create=1):
+        """Wrapper around Distribution's 'get_command_obj()' method: find
+        (create if necessary and 'create' is true) the command object for
+        'command', call its 'ensure_finalized()' method, and return the
+        finalized command object.
+        """
+        cmd_obj = self.distribution.get_command_obj(command, create)
+        cmd_obj.ensure_finalized()
+        return cmd_obj
+
+    # XXX rename to 'get_reinitialized_command()'? (should do the
+    # same in dist.py, if so)
+    def reinitialize_command (self, command, reinit_subcommands=0):
+        return self.distribution.reinitialize_command(
+            command, reinit_subcommands)
+
+    def run_command (self, command):
+        """Run some other command: uses the 'run_command()' method of
+        Distribution, which creates and finalizes the command object if
+        necessary and then invokes its 'run()' method.
+        """
+        self.distribution.run_command(command)
+
+
+    def get_sub_commands (self):
+        """Determine the sub-commands that are relevant in the current
+        distribution (ie., that need to be run).  This is based on the
+        'sub_commands' class attribute: each tuple in that list may include
+        a method that we call to determine if the subcommand needs to be
+        run for the current distribution.  Return a list of command names.
+        """
+        commands = []
+        for (cmd_name, method) in self.sub_commands:
+            if method is None or method(self):
+                commands.append(cmd_name)
+        return commands
+
+
+    # -- External world manipulation -----------------------------------
+
+    def warn (self, msg):
+        sys.stderr.write("warning: %s: %s\n" %
+                         (self.get_command_name(), msg))
+
+
+    def execute (self, func, args, msg=None, level=1):
+        util.execute(func, args, msg, dry_run=self.dry_run)
+
+
+    def mkpath (self, name, mode=0777):
+        dir_util.mkpath(name, mode, dry_run=self.dry_run)
+
+
+    def copy_file (self, infile, outfile,
+                   preserve_mode=1, preserve_times=1, link=None, level=1):
+        """Copy a file respecting verbose, dry-run and force flags.  (The
+        former two default to whatever is in the Distribution object, and
+        the latter defaults to false for commands that don't define it.)"""
+
+        return file_util.copy_file(
+            infile, outfile,
+            preserve_mode, preserve_times,
+            not self.force,
+            link,
+            dry_run=self.dry_run)
+
+
+    def copy_tree (self, infile, outfile,
+                   preserve_mode=1, preserve_times=1, preserve_symlinks=0,
+                   level=1):
+        """Copy an entire directory tree respecting verbose, dry-run,
+        and force flags.
+        """
+        return dir_util.copy_tree(
+            infile, outfile,
+            preserve_mode,preserve_times,preserve_symlinks,
+            not self.force,
+            dry_run=self.dry_run)
+
+    def move_file (self, src, dst, level=1):
+        """Move a file respectin dry-run flag."""
+        return file_util.move_file(src, dst, dry_run = self.dry_run)
+
+    def spawn (self, cmd, search_path=1, level=1):
+        """Spawn an external command respecting dry-run flag."""
+        from distutils.spawn import spawn
+        spawn(cmd, search_path, dry_run= self.dry_run)
+
+    def make_archive (self, base_name, format,
+                      root_dir=None, base_dir=None):
+        return archive_util.make_archive(
+            base_name, format, root_dir, base_dir, dry_run=self.dry_run)
+
+
+    def make_file (self, infiles, outfile, func, args,
+                   exec_msg=None, skip_msg=None, level=1):
+        """Special case of 'execute()' for operations that process one or
+        more input files and generate one output file.  Works just like
+        'execute()', except the operation is skipped and a different
+        message printed if 'outfile' already exists and is newer than all
+        files listed in 'infiles'.  If the command defined 'self.force',
+        and it is true, then the command is unconditionally run -- does no
+        timestamp checks.
+        """
+        if exec_msg is None:
+            exec_msg = "generating %s from %s" % \
+                       (outfile, string.join(infiles, ', '))
+        if skip_msg is None:
+            skip_msg = "skipping %s (inputs unchanged)" % outfile
+
+
+        # Allow 'infiles' to be a single string
+        if type(infiles) is StringType:
+            infiles = (infiles,)
+        elif type(infiles) not in (ListType, TupleType):
+            raise TypeError, \
+                  "'infiles' must be a string, or a list or tuple of strings"
+
+        # If 'outfile' must be regenerated (either because it doesn't
+        # exist, is out-of-date, or the 'force' flag is true) then
+        # perform the action that presumably regenerates it
+        if self.force or dep_util.newer_group (infiles, outfile):
+            self.execute(func, args, exec_msg, level)
+
+        # Otherwise, print the "skip" message
+        else:
+            log.debug(skip_msg)
+
+    # make_file ()
+
+# class Command
+
+
+# XXX 'install_misc' class not currently used -- it was the base class for
+# both 'install_scripts' and 'install_data', but they outgrew it.  It might
+# still be useful for 'install_headers', though, so I'm keeping it around
+# for the time being.
+
+class install_misc (Command):
+    """Common base class for installing some files in a subdirectory.
+    Currently used by install_data and install_scripts.
+    """
+
+    user_options = [('install-dir=', 'd', "directory to install the files to")]
+
+    def initialize_options (self):
+        self.install_dir = None
+        self.outfiles = []
+
+    def _install_dir_from (self, dirname):
+        self.set_undefined_options('install', (dirname, 'install_dir'))
+
+    def _copy_files (self, filelist):
+        self.outfiles = []
+        if not filelist:
+            return
+        self.mkpath(self.install_dir)
+        for f in filelist:
+            self.copy_file(f, self.install_dir)
+            self.outfiles.append(os.path.join(self.install_dir, f))
+
+    def get_outputs (self):
+        return self.outfiles
+
+
+if __name__ == "__main__":
+    print "ok"
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/__init__.py b/depot_tools/release/win/python_24/Lib/distutils/command/__init__.py
new file mode 100644
index 0000000..533db16a6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/__init__.py
@@ -0,0 +1,33 @@
+"""distutils.command
+
+Package containing implementation of all the standard Distutils
+commands."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: __init__.py,v 1.21 2004/11/10 22:23:14 loewis Exp $"
+
+__all__ = ['build',
+           'build_py',
+           'build_ext',
+           'build_clib',
+           'build_scripts',
+           'clean',
+           'install',
+           'install_lib',
+           'install_headers',
+           'install_scripts',
+           'install_data',
+           'sdist',
+           'register',
+           'bdist',
+           'bdist_dumb',
+           'bdist_rpm',
+           'bdist_wininst',
+           # These two are reserved for future use:
+           #'bdist_sdux',
+           #'bdist_pkgtool',
+           # Note:
+           # bdist_packager is not included because it only provides
+           # an abstract base class
+          ]
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/bdist.py b/depot_tools/release/win/python_24/Lib/distutils/command/bdist.py
new file mode 100644
index 0000000..302d289
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/bdist.py
@@ -0,0 +1,150 @@
+"""distutils.command.bdist
+
+Implements the Distutils 'bdist' command (create a built [binary]
+distribution)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: bdist.py,v 1.30 2004/11/10 22:23:14 loewis Exp $"
+
+import os, string
+from types import *
+from distutils.core import Command
+from distutils.errors import *
+from distutils.util import get_platform
+
+
+def show_formats ():
+    """Print list of available formats (arguments to "--format" option).
+    """
+    from distutils.fancy_getopt import FancyGetopt
+    formats=[]
+    for format in bdist.format_commands:
+        formats.append(("formats=" + format, None,
+                        bdist.format_command[format][1]))
+    pretty_printer = FancyGetopt(formats)
+    pretty_printer.print_help("List of available distribution formats:")
+
+
+class bdist (Command):
+
+    description = "create a built (binary) distribution"
+
+    user_options = [('bdist-base=', 'b',
+                     "temporary directory for creating built distributions"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('formats=', None,
+                     "formats for distribution (comma-separated list)"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in "
+                     "[default: dist]"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                   ]
+
+    boolean_options = ['skip-build']
+
+    help_options = [
+        ('help-formats', None,
+         "lists available distribution formats", show_formats),
+        ]
+
+    # The following commands do not take a format option from bdist
+    no_format_option = ('bdist_rpm',
+                        #'bdist_sdux', 'bdist_pkgtool'
+                        )
+
+    # This won't do in reality: will need to distinguish RPM-ish Linux,
+    # Debian-ish Linux, Solaris, FreeBSD, ..., Windows, Mac OS.
+    default_format = { 'posix': 'gztar',
+                       'nt': 'zip',
+                       'os2': 'zip', }
+
+    # Establish the preferred order (for the --help-formats option).
+    format_commands = ['rpm', 'gztar', 'bztar', 'ztar', 'tar',
+                       'wininst', 'zip',
+                       #'pkgtool', 'sdux'
+                       ]
+
+    # And the real information.
+    format_command = { 'rpm':   ('bdist_rpm',  "RPM distribution"),
+                       'zip':   ('bdist_dumb', "ZIP file"),
+                       'gztar': ('bdist_dumb', "gzip'ed tar file"),
+                       'bztar': ('bdist_dumb', "bzip2'ed tar file"),
+                       'ztar':  ('bdist_dumb', "compressed tar file"),
+                       'tar':   ('bdist_dumb', "tar file"),
+                       'wininst': ('bdist_wininst',
+                                   "Windows executable installer"),
+                       'zip':   ('bdist_dumb', "ZIP file"),
+                       #'pkgtool': ('bdist_pkgtool',
+                       #            "Solaris pkgtool distribution"),
+                       #'sdux':  ('bdist_sdux', "HP-UX swinstall depot"),
+                      }
+
+
+    def initialize_options (self):
+        self.bdist_base = None
+        self.plat_name = None
+        self.formats = None
+        self.dist_dir = None
+        self.skip_build = 0
+
+    # initialize_options()
+
+
+    def finalize_options (self):
+        # have to finalize 'plat_name' before 'bdist_base'
+        if self.plat_name is None:
+            self.plat_name = get_platform()
+
+        # 'bdist_base' -- parent of per-built-distribution-format
+        # temporary directories (eg. we'll probably have
+        # "build/bdist.<plat>/dumb", "build/bdist.<plat>/rpm", etc.)
+        if self.bdist_base is None:
+            build_base = self.get_finalized_command('build').build_base
+            self.bdist_base = os.path.join(build_base,
+                                           'bdist.' + self.plat_name)
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      "don't know how to create built distributions " + \
+                      "on platform %s" % os.name
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+    # finalize_options()
+
+
+    def run (self):
+
+        # Figure out which sub-commands we need to run.
+        commands = []
+        for format in self.formats:
+            try:
+                commands.append(self.format_command[format][0])
+            except KeyError:
+                raise DistutilsOptionError, "invalid format '%s'" % format
+
+        # Reinitialize and run each command.
+        for i in range(len(self.formats)):
+            cmd_name = commands[i]
+            sub_cmd = self.reinitialize_command(cmd_name)
+            if cmd_name not in self.no_format_option:
+                sub_cmd.format = self.formats[i]
+
+            # If we're going to need to run this command again, tell it to
+            # keep its temporary files around so subsequent runs go faster.
+            if cmd_name in commands[i+1:]:
+                sub_cmd.keep_temp = 1
+            self.run_command(cmd_name)
+
+    # run()
+
+# class bdist
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/bdist_dumb.py b/depot_tools/release/win/python_24/Lib/distutils/command/bdist_dumb.py
new file mode 100644
index 0000000..f12de289
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/bdist_dumb.py
@@ -0,0 +1,128 @@
+"""distutils.command.bdist_dumb
+
+Implements the Distutils 'bdist_dumb' command (create a "dumb" built
+distribution -- i.e., just an archive to be unpacked under $prefix or
+$exec_prefix)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: bdist_dumb.py,v 1.25 2004/11/10 22:23:14 loewis Exp $"
+
+import os
+from distutils.core import Command
+from distutils.util import get_platform
+from distutils.dir_util import create_tree, remove_tree, ensure_relative
+from distutils.errors import *
+from distutils import log
+
+class bdist_dumb (Command):
+
+    description = "create a \"dumb\" built distribution"
+
+    user_options = [('bdist-dir=', 'd',
+                     "temporary directory for creating the distribution"),
+                    ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_platform()),
+                    ('format=', 'f',
+                     "archive format to create (tar, ztar, gztar, zip)"),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('relative', None,
+                     "build the archive using relative paths"
+                     "(default: false)"),
+                   ]
+
+    boolean_options = ['keep-temp', 'skip-build', 'relative']
+
+    default_format = { 'posix': 'gztar',
+                       'nt': 'zip',
+                       'os2': 'zip' }
+
+
+    def initialize_options (self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.format = None
+        self.keep_temp = 0
+        self.dist_dir = None
+        self.skip_build = 0
+        self.relative = 0
+
+    # initialize_options()
+
+
+    def finalize_options (self):
+
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'dumb')
+
+        if self.format is None:
+            try:
+                self.format = self.default_format[os.name]
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      ("don't know how to create dumb built distributions " +
+                       "on platform %s") % os.name
+
+        self.set_undefined_options('bdist',
+                                   ('dist_dir', 'dist_dir'),
+                                   ('plat_name', 'plat_name'))
+
+    # finalize_options()
+
+
+    def run (self):
+
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.reinitialize_command('install', reinit_subcommands=1)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = 0
+
+        log.info("installing to %s" % self.bdist_dir)
+        self.run_command('install')
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        archive_basename = "%s.%s" % (self.distribution.get_fullname(),
+                                      self.plat_name)
+
+        # OS/2 objects to any ":" characters in a filename (such as when
+        # a timestamp is used in a version) so change them to hyphens.
+        if os.name == "os2":
+            archive_basename = archive_basename.replace(":", "-")
+
+        pseudoinstall_root = os.path.join(self.dist_dir, archive_basename)
+        if not self.relative:
+            archive_root = self.bdist_dir
+        else:
+            if (self.distribution.has_ext_modules() and
+                (install.install_base != install.install_platbase)):
+                raise DistutilsPlatformError, \
+                      ("can't make a dumb built distribution where "
+                       "base and platbase are different (%s, %s)"
+                       % (repr(install.install_base),
+                          repr(install.install_platbase)))
+            else:
+                archive_root = os.path.join(self.bdist_dir,
+                                   ensure_relative(install.install_base))
+
+        # Make the archive
+        self.make_archive(pseudoinstall_root,
+                          self.format, root_dir=archive_root)
+
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+    # run()
+
+# class bdist_dumb
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/bdist_rpm.py b/depot_tools/release/win/python_24/Lib/distutils/command/bdist_rpm.py
new file mode 100644
index 0000000..a354512
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/bdist_rpm.py
@@ -0,0 +1,539 @@
+"""distutils.command.bdist_rpm
+
+Implements the Distutils 'bdist_rpm' command (create RPM source and binary
+distributions)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: bdist_rpm.py,v 1.46 2004/11/10 22:23:14 loewis Exp $"
+
+import sys, os, string
+import glob
+from types import *
+from distutils.core import Command
+from distutils.debug import DEBUG
+from distutils.util import get_platform
+from distutils.file_util import write_file
+from distutils.errors import *
+from distutils import log
+
+class bdist_rpm (Command):
+
+    description = "create an RPM distribution"
+
+    user_options = [
+        ('bdist-base=', None,
+         "base directory for creating built distributions"),
+        ('rpm-base=', None,
+         "base directory for creating RPMs (defaults to \"rpm\" under "
+         "--bdist-base; must be specified for RPM 2)"),
+        ('dist-dir=', 'd',
+         "directory to put final RPM files in "
+         "(and .spec files if --spec-only)"),
+        ('python=', None,
+         "path to Python interpreter to hard-code in the .spec file "
+         "(default: \"python\")"),
+        ('fix-python', None,
+         "hard-code the exact path to the current Python interpreter in "
+         "the .spec file"),
+        ('spec-only', None,
+         "only regenerate spec file"),
+        ('source-only', None,
+         "only generate source RPM"),
+        ('binary-only', None,
+         "only generate binary RPM"),
+        ('use-bzip2', None,
+         "use bzip2 instead of gzip to create source distribution"),
+
+        # More meta-data: too RPM-specific to put in the setup script,
+        # but needs to go in the .spec file -- so we make these options
+        # to "bdist_rpm".  The idea is that packagers would put this
+        # info in setup.cfg, although they are of course free to
+        # supply it on the command line.
+        ('distribution-name=', None,
+         "name of the (Linux) distribution to which this "
+         "RPM applies (*not* the name of the module distribution!)"),
+        ('group=', None,
+         "package classification [default: \"Development/Libraries\"]"),
+        ('release=', None,
+         "RPM release number"),
+        ('serial=', None,
+         "RPM serial number"),
+        ('vendor=', None,
+         "RPM \"vendor\" (eg. \"Joe Blow <joe@example.com>\") "
+         "[default: maintainer or author from setup script]"),
+        ('packager=', None,
+         "RPM packager (eg. \"Jane Doe <jane@example.net>\")"
+         "[default: vendor]"),
+        ('doc-files=', None,
+         "list of documentation files (space or comma-separated)"),
+        ('changelog=', None,
+         "RPM changelog"),
+        ('icon=', None,
+         "name of icon file"),
+        ('provides=', None,
+         "capabilities provided by this package"),
+        ('requires=', None,
+         "capabilities required by this package"),
+        ('conflicts=', None,
+         "capabilities which conflict with this package"),
+        ('build-requires=', None,
+         "capabilities required to build this package"),
+        ('obsoletes=', None,
+         "capabilities made obsolete by this package"),
+        ('no-autoreq', None,
+         "do not automatically calculate dependencies"),
+
+        # Actions to take when building RPM
+        ('keep-temp', 'k',
+         "don't clean up RPM build directory"),
+        ('no-keep-temp', None,
+         "clean up RPM build directory [default]"),
+        ('use-rpm-opt-flags', None,
+         "compile with RPM_OPT_FLAGS when building from source RPM"),
+        ('no-rpm-opt-flags', None,
+         "do not pass any RPM CFLAGS to compiler"),
+        ('rpm3-mode', None,
+         "RPM 3 compatibility mode (default)"),
+        ('rpm2-mode', None,
+         "RPM 2 compatibility mode"),
+
+        # Add the hooks necessary for specifying custom scripts
+        ('prep-script=', None,
+         "Specify a script for the PREP phase of RPM building"),
+        ('build-script=', None,
+         "Specify a script for the BUILD phase of RPM building"),
+
+        ('pre-install=', None,
+         "Specify a script for the pre-INSTALL phase of RPM building"),
+        ('install-script=', None,
+         "Specify a script for the INSTALL phase of RPM building"),
+        ('post-install=', None,
+         "Specify a script for the post-INSTALL phase of RPM building"),
+
+        ('pre-uninstall=', None,
+         "Specify a script for the pre-UNINSTALL phase of RPM building"),
+        ('post-uninstall=', None,
+         "Specify a script for the post-UNINSTALL phase of RPM building"),
+
+        ('clean-script=', None,
+         "Specify a script for the CLEAN phase of RPM building"),
+
+        ('verify-script=', None,
+         "Specify a script for the VERIFY phase of the RPM build"),
+
+        # Allow a packager to explicitly force an architecture
+        ('force-arch=', None,
+         "Force an architecture onto the RPM build process"),
+       ]
+
+    boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode',
+                       'no-autoreq']
+
+    negative_opt = {'no-keep-temp': 'keep-temp',
+                    'no-rpm-opt-flags': 'use-rpm-opt-flags',
+                    'rpm2-mode': 'rpm3-mode'}
+
+
+    def initialize_options (self):
+        self.bdist_base = None
+        self.rpm_base = None
+        self.dist_dir = None
+        self.python = None
+        self.fix_python = None
+        self.spec_only = None
+        self.binary_only = None
+        self.source_only = None
+        self.use_bzip2 = None
+
+        self.distribution_name = None
+        self.group = None
+        self.release = None
+        self.serial = None
+        self.vendor = None
+        self.packager = None
+        self.doc_files = None
+        self.changelog = None
+        self.icon = None
+
+        self.prep_script = None
+        self.build_script = None
+        self.install_script = None
+        self.clean_script = None
+        self.verify_script = None
+        self.pre_install = None
+        self.post_install = None
+        self.pre_uninstall = None
+        self.post_uninstall = None
+        self.prep = None
+        self.provides = None
+        self.requires = None
+        self.conflicts = None
+        self.build_requires = None
+        self.obsoletes = None
+
+        self.keep_temp = 0
+        self.use_rpm_opt_flags = 1
+        self.rpm3_mode = 1
+        self.no_autoreq = 0
+
+        self.force_arch = None
+
+    # initialize_options()
+
+
+    def finalize_options (self):
+        self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
+        if self.rpm_base is None:
+            if not self.rpm3_mode:
+                raise DistutilsOptionError, \
+                      "you must specify --rpm-base in RPM 2 mode"
+            self.rpm_base = os.path.join(self.bdist_base, "rpm")
+
+        if self.python is None:
+            if self.fix_python:
+                self.python = sys.executable
+            else:
+                self.python = "python"
+        elif self.fix_python:
+            raise DistutilsOptionError, \
+                  "--python and --fix-python are mutually exclusive options"
+
+        if os.name != 'posix':
+            raise DistutilsPlatformError, \
+                  ("don't know how to create RPM "
+                   "distributions on platform %s" % os.name)
+        if self.binary_only and self.source_only:
+            raise DistutilsOptionError, \
+                  "cannot supply both '--source-only' and '--binary-only'"
+
+        # don't pass CFLAGS to pure python distributions
+        if not self.distribution.has_ext_modules():
+            self.use_rpm_opt_flags = 0
+
+        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+        self.finalize_package_data()
+
+    # finalize_options()
+
+    def finalize_package_data (self):
+        self.ensure_string('group', "Development/Libraries")
+        self.ensure_string('vendor',
+                           "%s <%s>" % (self.distribution.get_contact(),
+                                        self.distribution.get_contact_email()))
+        self.ensure_string('packager')
+        self.ensure_string_list('doc_files')
+        if type(self.doc_files) is ListType:
+            for readme in ('README', 'README.txt'):
+                if os.path.exists(readme) and readme not in self.doc_files:
+                    self.doc_files.append(readme)
+
+        self.ensure_string('release', "1")
+        self.ensure_string('serial')   # should it be an int?
+
+        self.ensure_string('distribution_name')
+
+        self.ensure_string('changelog')
+          # Format changelog correctly
+        self.changelog = self._format_changelog(self.changelog)
+
+        self.ensure_filename('icon')
+
+        self.ensure_filename('prep_script')
+        self.ensure_filename('build_script')
+        self.ensure_filename('install_script')
+        self.ensure_filename('clean_script')
+        self.ensure_filename('verify_script')
+        self.ensure_filename('pre_install')
+        self.ensure_filename('post_install')
+        self.ensure_filename('pre_uninstall')
+        self.ensure_filename('post_uninstall')
+
+        # XXX don't forget we punted on summaries and descriptions -- they
+        # should be handled here eventually!
+
+        # Now *this* is some meta-data that belongs in the setup script...
+        self.ensure_string_list('provides')
+        self.ensure_string_list('requires')
+        self.ensure_string_list('conflicts')
+        self.ensure_string_list('build_requires')
+        self.ensure_string_list('obsoletes')
+
+        self.ensure_string('force_arch')
+    # finalize_package_data ()
+
+
+    def run (self):
+
+        if DEBUG:
+            print "before _get_package_data():"
+            print "vendor =", self.vendor
+            print "packager =", self.packager
+            print "doc_files =", self.doc_files
+            print "changelog =", self.changelog
+
+        # make directories
+        if self.spec_only:
+            spec_dir = self.dist_dir
+            self.mkpath(spec_dir)
+        else:
+            rpm_dir = {}
+            for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
+                rpm_dir[d] = os.path.join(self.rpm_base, d)
+                self.mkpath(rpm_dir[d])
+            spec_dir = rpm_dir['SPECS']
+
+        # Spec file goes into 'dist_dir' if '--spec-only specified',
+        # build/rpm.<plat> otherwise.
+        spec_path = os.path.join(spec_dir,
+                                 "%s.spec" % self.distribution.get_name())
+        self.execute(write_file,
+                     (spec_path,
+                      self._make_spec_file()),
+                     "writing '%s'" % spec_path)
+
+        if self.spec_only: # stop if requested
+            return
+
+        # Make a source distribution and copy to SOURCES directory with
+        # optional icon.
+        sdist = self.reinitialize_command('sdist')
+        if self.use_bzip2:
+            sdist.formats = ['bztar']
+        else:
+            sdist.formats = ['gztar']
+        self.run_command('sdist')
+
+        source = sdist.get_archive_files()[0]
+        source_dir = rpm_dir['SOURCES']
+        self.copy_file(source, source_dir)
+
+        if self.icon:
+            if os.path.exists(self.icon):
+                self.copy_file(self.icon, source_dir)
+            else:
+                raise DistutilsFileError, \
+                      "icon file '%s' does not exist" % self.icon
+
+
+        # build package
+        log.info("building RPMs")
+        rpm_cmd = ['rpm']
+        if os.path.exists('/usr/bin/rpmbuild') or \
+           os.path.exists('/bin/rpmbuild'):
+            rpm_cmd = ['rpmbuild']
+        if self.source_only: # what kind of RPMs?
+            rpm_cmd.append('-bs')
+        elif self.binary_only:
+            rpm_cmd.append('-bb')
+        else:
+            rpm_cmd.append('-ba')
+        if self.rpm3_mode:
+            rpm_cmd.extend(['--define',
+                             '_topdir %s' % os.path.abspath(self.rpm_base)])
+        if not self.keep_temp:
+            rpm_cmd.append('--clean')
+        rpm_cmd.append(spec_path)
+        self.spawn(rpm_cmd)
+
+        # XXX this is a nasty hack -- we really should have a proper way to
+        # find out the names of the RPM files created; also, this assumes
+        # that RPM creates exactly one source and one binary RPM.
+        if not self.dry_run:
+            if not self.binary_only:
+                srpms = glob.glob(os.path.join(rpm_dir['SRPMS'], "*.rpm"))
+                assert len(srpms) == 1, \
+                       "unexpected number of SRPM files found: %s" % srpms
+                self.move_file(srpms[0], self.dist_dir)
+
+            if not self.source_only:
+                rpms = glob.glob(os.path.join(rpm_dir['RPMS'], "*/*.rpm"))
+                debuginfo = glob.glob(os.path.join(rpm_dir['RPMS'], \
+                                                   "*/*debuginfo*.rpm"))
+                if debuginfo:
+                    rpms.remove(debuginfo[0])
+                assert len(rpms) == 1, \
+                       "unexpected number of RPM files found: %s" % rpms
+                self.move_file(rpms[0], self.dist_dir)
+                if debuginfo:
+                    self.move_file(debuginfo[0], self.dist_dir)
+    # run()
+
+
+    def _make_spec_file(self):
+        """Generate the text of an RPM spec file and return it as a
+        list of strings (one per line).
+        """
+        # definitions and headers
+        spec_file = [
+            '%define name ' + self.distribution.get_name(),
+            '%define version ' + self.distribution.get_version().replace('-','_'),
+            '%define release ' + self.release.replace('-','_'),
+            '',
+            'Summary: ' + self.distribution.get_description(),
+            ]
+
+        # put locale summaries into spec file
+        # XXX not supported for now (hard to put a dictionary
+        # in a config file -- arg!)
+        #for locale in self.summaries.keys():
+        #    spec_file.append('Summary(%s): %s' % (locale,
+        #                                          self.summaries[locale]))
+
+        spec_file.extend([
+            'Name: %{name}',
+            'Version: %{version}',
+            'Release: %{release}',])
+
+        # XXX yuck! this filename is available from the "sdist" command,
+        # but only after it has run: and we create the spec file before
+        # running "sdist", in case of --spec-only.
+        if self.use_bzip2:
+            spec_file.append('Source0: %{name}-%{version}.tar.bz2')
+        else:
+            spec_file.append('Source0: %{name}-%{version}.tar.gz')
+
+        spec_file.extend([
+            'License: ' + self.distribution.get_license(),
+            'Group: ' + self.group,
+            'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
+            'Prefix: %{_prefix}', ])
+
+        if not self.force_arch:
+            # noarch if no extension modules
+            if not self.distribution.has_ext_modules():
+                spec_file.append('BuildArch: noarch')
+        else:
+            spec_file.append( 'BuildArch: %s' % self.force_arch )
+
+        for field in ('Vendor',
+                      'Packager',
+                      'Provides',
+                      'Requires',
+                      'Conflicts',
+                      'Obsoletes',
+                      ):
+            val = getattr(self, string.lower(field))
+            if type(val) is ListType:
+                spec_file.append('%s: %s' % (field, string.join(val)))
+            elif val is not None:
+                spec_file.append('%s: %s' % (field, val))
+
+
+        if self.distribution.get_url() != 'UNKNOWN':
+            spec_file.append('Url: ' + self.distribution.get_url())
+
+        if self.distribution_name:
+            spec_file.append('Distribution: ' + self.distribution_name)
+
+        if self.build_requires:
+            spec_file.append('BuildRequires: ' +
+                             string.join(self.build_requires))
+
+        if self.icon:
+            spec_file.append('Icon: ' + os.path.basename(self.icon))
+
+        if self.no_autoreq:
+            spec_file.append('AutoReq: 0')
+
+        spec_file.extend([
+            '',
+            '%description',
+            self.distribution.get_long_description()
+            ])
+
+        # put locale descriptions into spec file
+        # XXX again, suppressed because config file syntax doesn't
+        # easily support this ;-(
+        #for locale in self.descriptions.keys():
+        #    spec_file.extend([
+        #        '',
+        #        '%description -l ' + locale,
+        #        self.descriptions[locale],
+        #        ])
+
+        # rpm scripts
+        # figure out default build script
+        def_build = "%s setup.py build" % self.python
+        if self.use_rpm_opt_flags:
+            def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
+
+        # insert contents of files
+
+        # XXX this is kind of misleading: user-supplied options are files
+        # that we open and interpolate into the spec file, but the defaults
+        # are just text that we drop in as-is.  Hmmm.
+
+        script_options = [
+            ('prep', 'prep_script', "%setup"),
+            ('build', 'build_script', def_build),
+            ('install', 'install_script',
+             ("%s setup.py install "
+              "--root=$RPM_BUILD_ROOT "
+              "--record=INSTALLED_FILES") % self.python),
+            ('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
+            ('verifyscript', 'verify_script', None),
+            ('pre', 'pre_install', None),
+            ('post', 'post_install', None),
+            ('preun', 'pre_uninstall', None),
+            ('postun', 'post_uninstall', None),
+        ]
+
+        for (rpm_opt, attr, default) in script_options:
+            # Insert contents of file referred to, if no file is referred to
+            # use 'default' as contents of script
+            val = getattr(self, attr)
+            if val or default:
+                spec_file.extend([
+                    '',
+                    '%' + rpm_opt,])
+                if val:
+                    spec_file.extend(string.split(open(val, 'r').read(), '\n'))
+                else:
+                    spec_file.append(default)
+
+
+        # files section
+        spec_file.extend([
+            '',
+            '%files -f INSTALLED_FILES',
+            '%defattr(-,root,root)',
+            ])
+
+        if self.doc_files:
+            spec_file.append('%doc ' + string.join(self.doc_files))
+
+        if self.changelog:
+            spec_file.extend([
+                '',
+                '%changelog',])
+            spec_file.extend(self.changelog)
+
+        return spec_file
+
+    # _make_spec_file ()
+
+    def _format_changelog(self, changelog):
+        """Format the changelog correctly and convert it to a list of strings
+        """
+        if not changelog:
+            return changelog
+        new_changelog = []
+        for line in string.split(string.strip(changelog), '\n'):
+            line = string.strip(line)
+            if line[0] == '*':
+                new_changelog.extend(['', line])
+            elif line[0] == '-':
+                new_changelog.append(line)
+            else:
+                new_changelog.append('  ' + line)
+
+        # strip trailing newline inserted by first changelog entry
+        if not new_changelog[0]:
+            del new_changelog[0]
+
+        return new_changelog
+
+    # _format_changelog()
+
+# class bdist_rpm
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/bdist_wininst.py b/depot_tools/release/win/python_24/Lib/distutils/command/bdist_wininst.py
new file mode 100644
index 0000000..b4bf455
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/bdist_wininst.py
@@ -0,0 +1,322 @@
+"""distutils.command.bdist_wininst
+
+Implements the Distutils 'bdist_wininst' command: create a windows installer
+exe-program."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: bdist_wininst.py,v 1.56 2004/11/10 22:23:14 loewis Exp $"
+
+import sys, os, string
+from distutils.core import Command
+from distutils.util import get_platform
+from distutils.dir_util import create_tree, remove_tree
+from distutils.errors import *
+from distutils.sysconfig import get_python_version
+from distutils import log
+
+class bdist_wininst (Command):
+
+    description = "create an executable installer for MS Windows"
+
+    user_options = [('bdist-dir=', None,
+                     "temporary directory for creating the distribution"),
+                    ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+                    ('target-version=', None,
+                     "require a specific python version" +
+                     " on the target system"),
+                    ('no-target-compile', 'c',
+                     "do not compile .py to .pyc on the target system"),
+                    ('no-target-optimize', 'o',
+                     "do not compile .py to .pyo (optimized)"
+                     "on the target system"),
+                    ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+                    ('bitmap=', 'b',
+                     "bitmap to use for the installer instead of python-powered logo"),
+                    ('title=', 't',
+                     "title to display on the installer background instead of default"),
+                    ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+                    ('install-script=', None,
+                     "basename of installation script to be run after"
+                     "installation or before deinstallation"),
+                    ('pre-install-script=', None,
+                     "Fully qualified filename of a script to be run before "
+                     "any files are installed.  This script need not be in the "
+                     "distribution"),
+                   ]
+
+    boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
+                       'skip-build']
+
+    def initialize_options (self):
+        self.bdist_dir = None
+        self.keep_temp = 0
+        self.no_target_compile = 0
+        self.no_target_optimize = 0
+        self.target_version = None
+        self.dist_dir = None
+        self.bitmap = None
+        self.title = None
+        self.skip_build = 0
+        self.install_script = None
+        self.pre_install_script = None
+
+    # initialize_options()
+
+
+    def finalize_options (self):
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'wininst')
+        if not self.target_version:
+            self.target_version = ""
+        if not self.skip_build and self.distribution.has_ext_modules():
+            short_version = get_python_version()
+            if self.target_version and self.target_version != short_version:
+                raise DistutilsOptionError, \
+                      "target version can only be %s, or the '--skip_build'" \
+                      " option must be specified" % (short_version,)
+            self.target_version = short_version
+
+        self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
+
+        if self.install_script:
+            for script in self.distribution.scripts:
+                if self.install_script == os.path.basename(script):
+                    break
+            else:
+                raise DistutilsOptionError, \
+                      "install_script '%s' not found in scripts" % \
+                      self.install_script
+    # finalize_options()
+
+
+    def run (self):
+        if (sys.platform != "win32" and
+            (self.distribution.has_ext_modules() or
+             self.distribution.has_c_libraries())):
+            raise DistutilsPlatformError \
+                  ("distribution contains extensions and/or C libraries; "
+                   "must be compiled on a Windows 32 platform")
+
+        if not self.skip_build:
+            self.run_command('build')
+
+        install = self.reinitialize_command('install', reinit_subcommands=1)
+        install.root = self.bdist_dir
+        install.skip_build = self.skip_build
+        install.warn_dir = 0
+
+        install_lib = self.reinitialize_command('install_lib')
+        # we do not want to include pyc or pyo files
+        install_lib.compile = 0
+        install_lib.optimize = 0
+
+        if self.distribution.has_ext_modules():
+            # If we are building an installer for a Python version other
+            # than the one we are currently running, then we need to ensure
+            # our build_lib reflects the other Python version rather than ours.
+            # Note that for target_version!=sys.version, we must have skipped the
+            # build step, so there is no issue with enforcing the build of this
+            # version.
+            target_version = self.target_version
+            if not target_version:
+                assert self.skip_build, "Should have already checked this"
+                target_version = sys.version[0:3]
+            plat_specifier = ".%s-%s" % (get_platform(), target_version)
+            build = self.get_finalized_command('build')
+            build.build_lib = os.path.join(build.build_base,
+                                           'lib' + plat_specifier)
+
+        # Use a custom scheme for the zip-file, because we have to decide
+        # at installation time which scheme to use.
+        for key in ('purelib', 'platlib', 'headers', 'scripts', 'data'):
+            value = string.upper(key)
+            if key == 'headers':
+                value = value + '/Include/$dist_name'
+            setattr(install,
+                    'install_' + key,
+                    value)
+
+        log.info("installing to %s", self.bdist_dir)
+        install.ensure_finalized()
+
+        # avoid warning of 'install_lib' about installing
+        # into a directory not in sys.path
+        sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
+
+        install.run()
+
+        del sys.path[0]
+
+        # And make an archive relative to the root of the
+        # pseudo-installation tree.
+        from tempfile import mktemp
+        archive_basename = mktemp()
+        fullname = self.distribution.get_fullname()
+        arcname = self.make_archive(archive_basename, "zip",
+                                    root_dir=self.bdist_dir)
+        # create an exe containing the zip-file
+        self.create_exe(arcname, fullname, self.bitmap)
+        # remove the zip-file again
+        log.debug("removing temporary file '%s'", arcname)
+        os.remove(arcname)
+
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+    # run()
+
+    def get_inidata (self):
+        # Return data describing the installation.
+
+        lines = []
+        metadata = self.distribution.metadata
+
+        # Write the [metadata] section.
+        lines.append("[metadata]")
+
+        # 'info' will be displayed in the installer's dialog box,
+        # describing the items to be installed.
+        info = (metadata.long_description or '') + '\n'
+
+        # Escape newline characters
+        def escape(s):
+            return string.replace(s, "\n", "\\n")
+
+        for name in ["author", "author_email", "description", "maintainer",
+                     "maintainer_email", "name", "url", "version"]:
+            data = getattr(metadata, name, "")
+            if data:
+                info = info + ("\n    %s: %s" % \
+                               (string.capitalize(name), escape(data)))
+                lines.append("%s=%s" % (name, escape(data)))
+
+        # The [setup] section contains entries controlling
+        # the installer runtime.
+        lines.append("\n[Setup]")
+        if self.install_script:
+            lines.append("install_script=%s" % self.install_script)
+        lines.append("info=%s" % escape(info))
+        lines.append("target_compile=%d" % (not self.no_target_compile))
+        lines.append("target_optimize=%d" % (not self.no_target_optimize))
+        if self.target_version:
+            lines.append("target_version=%s" % self.target_version)
+
+        title = self.title or self.distribution.get_fullname()
+        lines.append("title=%s" % escape(title))
+        import time
+        import distutils
+        build_info = "Built %s with distutils-%s" % \
+                     (time.ctime(time.time()), distutils.__version__)
+        lines.append("build_info=%s" % build_info)
+        return string.join(lines, "\n")
+
+    # get_inidata()
+
+    def create_exe (self, arcname, fullname, bitmap=None):
+        import struct
+
+        self.mkpath(self.dist_dir)
+
+        cfgdata = self.get_inidata()
+
+        installer_name = self.get_installer_filename(fullname)
+        self.announce("creating %s" % installer_name)
+
+        if bitmap:
+            bitmapdata = open(bitmap, "rb").read()
+            bitmaplen = len(bitmapdata)
+        else:
+            bitmaplen = 0
+
+        file = open(installer_name, "wb")
+        file.write(self.get_exe_bytes())
+        if bitmap:
+            file.write(bitmapdata)
+
+        # Convert cfgdata from unicode to ascii, mbcs encoded
+        try:
+            unicode
+        except NameError:
+            pass
+        else:
+            if isinstance(cfgdata, unicode):
+                cfgdata = cfgdata.encode("mbcs")
+
+        # Append the pre-install script
+        cfgdata = cfgdata + "\0"
+        if self.pre_install_script:
+            script_data = open(self.pre_install_script, "r").read()
+            cfgdata = cfgdata + script_data + "\n\0"
+        else:
+            # empty pre-install script
+            cfgdata = cfgdata + "\0"
+        file.write(cfgdata)
+
+        # The 'magic number' 0x1234567B is used to make sure that the
+        # binary layout of 'cfgdata' is what the wininst.exe binary
+        # expects.  If the layout changes, increment that number, make
+        # the corresponding changes to the wininst.exe sources, and
+        # recompile them.
+        header = struct.pack("<iii",
+                             0x1234567B,       # tag
+                             len(cfgdata),     # length
+                             bitmaplen,        # number of bytes in bitmap
+                             )
+        file.write(header)
+        file.write(open(arcname, "rb").read())
+
+    # create_exe()
+
+    def get_installer_filename(self, fullname):
+        # Factored out to allow overriding in subclasses
+        if self.target_version:
+            # if we create an installer for a specific python version,
+            # it's better to include this in the name
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.win32-py%s.exe" %
+                                           (fullname, self.target_version))
+        else:
+            installer_name = os.path.join(self.dist_dir,
+                                          "%s.win32.exe" % fullname)
+        return installer_name
+    # get_installer_filename()
+
+    def get_exe_bytes (self):
+        from distutils.msvccompiler import get_build_version
+        # If a target-version other than the current version has been
+        # specified, then using the MSVC version from *this* build is no good.
+        # Without actually finding and executing the target version and parsing
+        # its sys.version, we just hard-code our knowledge of old versions.
+        # NOTE: Possible alternative is to allow "--target-version" to
+        # specify a Python executable rather than a simple version string.
+        # We can then execute this program to obtain any info we need, such
+        # as the real sys.version string for the build.
+        cur_version = get_python_version()
+        if self.target_version and self.target_version != cur_version:
+            # If the target version is *later* than us, then we assume they
+            # use what we use
+            # string compares seem wrong, but are what sysconfig.py itself uses
+            if self.target_version > cur_version:
+                bv = get_build_version()
+            else:
+                if self.target_version < "2.4":
+                    bv = "6"
+                else:
+                    bv = "7.1"
+        else:
+            # for current version - use authoritative check.
+            bv = get_build_version()
+
+        # wininst-x.y.exe is in the same directory as this file
+        directory = os.path.dirname(__file__)
+        # we must use a wininst-x.y.exe built with the same C compiler
+        # used for python.  XXX What about mingw, borland, and so on?
+        filename = os.path.join(directory, "wininst-%s.exe" % bv)
+        return open(filename, "rb").read()
+# class bdist_wininst
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/build.py b/depot_tools/release/win/python_24/Lib/distutils/command/build.py
new file mode 100644
index 0000000..30f2634f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/build.py
@@ -0,0 +1,136 @@
+"""distutils.command.build
+
+Implements the Distutils 'build' command."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: build.py,v 1.36 2004/11/10 22:23:15 loewis Exp $"
+
+import sys, os
+from distutils.core import Command
+from distutils.util import get_platform
+
+
+def show_compilers ():
+    from distutils.ccompiler import show_compilers
+    show_compilers()
+
+
+class build (Command):
+
+    description = "build everything needed to install"
+
+    user_options = [
+        ('build-base=', 'b',
+         "base directory for build library"),
+        ('build-purelib=', None,
+         "build directory for platform-neutral distributions"),
+        ('build-platlib=', None,
+         "build directory for platform-specific distributions"),
+        ('build-lib=', None,
+         "build directory for all distribution (defaults to either " +
+         "build-purelib or build-platlib"),
+        ('build-scripts=', None,
+         "build directory for scripts"),
+        ('build-temp=', 't',
+         "temporary build directory"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('debug', 'g',
+         "compile extensions and libraries with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('executable=', 'e',
+         "specify final destination interpreter path (build.py)"),
+        ]
+
+    boolean_options = ['debug', 'force']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options (self):
+        self.build_base = 'build'
+        # these are decided only after 'build_base' has its final value
+        # (unless overridden by the user or client)
+        self.build_purelib = None
+        self.build_platlib = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.compiler = None
+        self.debug = None
+        self.force = 0
+        self.executable = None
+
+    def finalize_options (self):
+
+        plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
+
+        # 'build_purelib' and 'build_platlib' just default to 'lib' and
+        # 'lib.<plat>' under the base build directory.  We only use one of
+        # them for a given distribution, though --
+        if self.build_purelib is None:
+            self.build_purelib = os.path.join(self.build_base, 'lib')
+        if self.build_platlib is None:
+            self.build_platlib = os.path.join(self.build_base,
+                                              'lib' + plat_specifier)
+
+        # 'build_lib' is the actual directory that we will use for this
+        # particular module distribution -- if user didn't supply it, pick
+        # one of 'build_purelib' or 'build_platlib'.
+        if self.build_lib is None:
+            if self.distribution.ext_modules:
+                self.build_lib = self.build_platlib
+            else:
+                self.build_lib = self.build_purelib
+
+        # 'build_temp' -- temporary directory for compiler turds,
+        # "build/temp.<plat>"
+        if self.build_temp is None:
+            self.build_temp = os.path.join(self.build_base,
+                                           'temp' + plat_specifier)
+        if self.build_scripts is None:
+            self.build_scripts = os.path.join(self.build_base,
+                                              'scripts-' + sys.version[0:3])
+
+        if self.executable is None:
+            self.executable = os.path.normpath(sys.executable)
+    # finalize_options ()
+
+
+    def run (self):
+
+        # Run all relevant sub-commands.  This will be some subset of:
+        #  - build_py      - pure Python modules
+        #  - build_clib    - standalone C libraries
+        #  - build_ext     - Python extensions
+        #  - build_scripts - (Python) scripts
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+
+    # -- Predicates for the sub-command list ---------------------------
+
+    def has_pure_modules (self):
+        return self.distribution.has_pure_modules()
+
+    def has_c_libraries (self):
+        return self.distribution.has_c_libraries()
+
+    def has_ext_modules (self):
+        return self.distribution.has_ext_modules()
+
+    def has_scripts (self):
+        return self.distribution.has_scripts()
+
+
+    sub_commands = [('build_py',      has_pure_modules),
+                    ('build_clib',    has_c_libraries),
+                    ('build_ext',     has_ext_modules),
+                    ('build_scripts', has_scripts),
+                   ]
+
+# class build
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/build_clib.py b/depot_tools/release/win/python_24/Lib/distutils/command/build_clib.py
new file mode 100644
index 0000000..3eed1b3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/build_clib.py
@@ -0,0 +1,238 @@
+"""distutils.command.build_clib
+
+Implements the Distutils 'build_clib' command, to build a C/C++ library
+that is included in the module distribution and needed by an extension
+module."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: build_clib.py,v 1.28 2004/11/10 22:23:15 loewis Exp $"
+
+
+# XXX this module has *lots* of code ripped-off quite transparently from
+# build_ext.py -- not surprisingly really, as the work required to build
+# a static library from a collection of C source files is not really all
+# that different from what's required to build a shared object file from
+# a collection of C source files.  Nevertheless, I haven't done the
+# necessary refactoring to account for the overlap in code between the
+# two modules, mainly because a number of subtle details changed in the
+# cut 'n paste.  Sigh.
+
+import os, string
+from types import *
+from distutils.core import Command
+from distutils.errors import *
+from distutils.sysconfig import customize_compiler
+from distutils import log
+
+def show_compilers ():
+    from distutils.ccompiler import show_compilers
+    show_compilers()
+
+
+class build_clib (Command):
+
+    description = "build C/C++ libraries used by Python extensions"
+
+    user_options = [
+        ('build-clib', 'b',
+         "directory to build C/C++ libraries to"),
+        ('build-temp', 't',
+         "directory to put temporary build by-products"),
+        ('debug', 'g',
+         "compile with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ]
+
+    boolean_options = ['debug', 'force']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options (self):
+        self.build_clib = None
+        self.build_temp = None
+
+        # List of libraries to build
+        self.libraries = None
+
+        # Compilation options for all libraries
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.debug = None
+        self.force = 0
+        self.compiler = None
+
+    # initialize_options()
+
+
+    def finalize_options (self):
+
+        # This might be confusing: both build-clib and build-temp default
+        # to build-temp as defined by the "build" command.  This is because
+        # I think that C libraries are really just temporary build
+        # by-products, at least from the point of view of building Python
+        # extensions -- but I want to keep my options open.
+        self.set_undefined_options('build',
+                                   ('build_temp', 'build_clib'),
+                                   ('build_temp', 'build_temp'),
+                                   ('compiler', 'compiler'),
+                                   ('debug', 'debug'),
+                                   ('force', 'force'))
+
+        self.libraries = self.distribution.libraries
+        if self.libraries:
+            self.check_library_list(self.libraries)
+
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if type(self.include_dirs) is StringType:
+            self.include_dirs = string.split(self.include_dirs,
+                                             os.pathsep)
+
+        # XXX same as for build_ext -- what about 'self.define' and
+        # 'self.undef' ?
+
+    # finalize_options()
+
+
+    def run (self):
+
+        if not self.libraries:
+            return
+
+        # Yech -- this is cut 'n pasted from build_ext.py!
+        from distutils.ccompiler import new_compiler
+        self.compiler = new_compiler(compiler=self.compiler,
+                                     dry_run=self.dry_run,
+                                     force=self.force)
+        customize_compiler(self.compiler)
+
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for (name,value) in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+
+        self.build_libraries(self.libraries)
+
+    # run()
+
+
+    def check_library_list (self, libraries):
+        """Ensure that the list of libraries (presumably provided as a
+           command option 'libraries') is valid, i.e. it is a list of
+           2-tuples, where the tuples are (library_name, build_info_dict).
+           Raise DistutilsSetupError if the structure is invalid anywhere;
+           just returns otherwise."""
+
+        # Yechh, blecch, ackk: this is ripped straight out of build_ext.py,
+        # with only names changed to protect the innocent!
+
+        if type(libraries) is not ListType:
+            raise DistutilsSetupError, \
+                  "'libraries' option must be a list of tuples"
+
+        for lib in libraries:
+            if type(lib) is not TupleType and len(lib) != 2:
+                raise DistutilsSetupError, \
+                      "each element of 'libraries' must a 2-tuple"
+
+            if type(lib[0]) is not StringType:
+                raise DistutilsSetupError, \
+                      "first element of each tuple in 'libraries' " + \
+                      "must be a string (the library name)"
+            if '/' in lib[0] or (os.sep != '/' and os.sep in lib[0]):
+                raise DistutilsSetupError, \
+                      ("bad library name '%s': " +
+                       "may not contain directory separators") % \
+                      lib[0]
+
+            if type(lib[1]) is not DictionaryType:
+                raise DistutilsSetupError, \
+                      "second element of each tuple in 'libraries' " + \
+                      "must be a dictionary (build info)"
+        # for lib
+
+    # check_library_list ()
+
+
+    def get_library_names (self):
+        # Assume the library list is valid -- 'check_library_list()' is
+        # called from 'finalize_options()', so it should be!
+
+        if not self.libraries:
+            return None
+
+        lib_names = []
+        for (lib_name, build_info) in self.libraries:
+            lib_names.append(lib_name)
+        return lib_names
+
+    # get_library_names ()
+
+
+    def get_source_files (self):
+        self.check_library_list(self.libraries)
+        filenames = []
+        for (lib_name, build_info) in self.libraries:
+            sources = build_info.get('sources')
+            if (sources is None or
+                type(sources) not in (ListType, TupleType) ):
+                raise DistutilsSetupError, \
+                      ("in 'libraries' option (library '%s'), "
+                       "'sources' must be present and must be "
+                       "a list of source filenames") % lib_name
+
+            filenames.extend(sources)
+
+        return filenames
+    # get_source_files ()
+
+
+    def build_libraries (self, libraries):
+
+        for (lib_name, build_info) in libraries:
+            sources = build_info.get('sources')
+            if sources is None or type(sources) not in (ListType, TupleType):
+                raise DistutilsSetupError, \
+                      ("in 'libraries' option (library '%s'), " +
+                       "'sources' must be present and must be " +
+                       "a list of source filenames") % lib_name
+            sources = list(sources)
+
+            log.info("building '%s' library", lib_name)
+
+            # First, compile the source code to object files in the library
+            # directory.  (This should probably change to putting object
+            # files in a temporary build directory.)
+            macros = build_info.get('macros')
+            include_dirs = build_info.get('include_dirs')
+            objects = self.compiler.compile(sources,
+                                            output_dir=self.build_temp,
+                                            macros=macros,
+                                            include_dirs=include_dirs,
+                                            debug=self.debug)
+
+            # Now "link" the object files together into a static library.
+            # (On Unix at least, this isn't really linking -- it just
+            # builds an archive.  Whatever.)
+            self.compiler.create_static_lib(objects, lib_name,
+                                            output_dir=self.build_clib,
+                                            debug=self.debug)
+
+        # for libraries
+
+    # build_libraries ()
+
+# class build_lib
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/build_ext.py b/depot_tools/release/win/python_24/Lib/distutils/command/build_ext.py
new file mode 100644
index 0000000..0c1741a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/build_ext.py
@@ -0,0 +1,693 @@
+"""distutils.command.build_ext
+
+Implements the Distutils 'build_ext' command, for building extension
+modules (currently limited to C extensions, should accommodate C++
+extensions ASAP)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: build_ext.py,v 1.98 2004/11/10 22:23:15 loewis Exp $"
+
+import sys, os, string, re
+from types import *
+from distutils.core import Command
+from distutils.errors import *
+from distutils.sysconfig import customize_compiler, get_python_version
+from distutils.dep_util import newer_group
+from distutils.extension import Extension
+from distutils import log
+
+# An extension name is just a dot-separated list of Python NAMEs (ie.
+# the same as a fully-qualified module name).
+extension_name_re = re.compile \
+    (r'^[a-zA-Z_][a-zA-Z_0-9]*(\.[a-zA-Z_][a-zA-Z_0-9]*)*$')
+
+
+def show_compilers ():
+    from distutils.ccompiler import show_compilers
+    show_compilers()
+
+
+class build_ext (Command):
+
+    description = "build C/C++ extensions (compile/link to build directory)"
+
+    # XXX thoughts on how to deal with complex command-line options like
+    # these, i.e. how to make it so fancy_getopt can suck them off the
+    # command line and make it look like setup.py defined the appropriate
+    # lists of tuples of what-have-you.
+    #   - each command needs a callback to process its command-line options
+    #   - Command.__init__() needs access to its share of the whole
+    #     command line (must ultimately come from
+    #     Distribution.parse_command_line())
+    #   - it then calls the current command class' option-parsing
+    #     callback to deal with weird options like -D, which have to
+    #     parse the option text and churn out some custom data
+    #     structure
+    #   - that data structure (in this case, a list of 2-tuples)
+    #     will then be present in the command object by the time
+    #     we get to finalize_options() (i.e. the constructor
+    #     takes care of both command-line and client options
+    #     in between initialize_options() and finalize_options())
+
+    sep_by = " (separated by '%s')" % os.pathsep
+    user_options = [
+        ('build-lib=', 'b',
+         "directory for compiled extension modules"),
+        ('build-temp=', 't',
+         "directory for temporary files (build by-products)"),
+        ('inplace', 'i',
+         "ignore build-lib and put compiled extensions into the source " +
+         "directory alongside your pure Python modules"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files" + sep_by),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries" + sep_by),
+        ('rpath=', 'R',
+         "directories to search for shared C libraries at runtime"),
+        ('link-objects=', 'O',
+         "extra explicit link objects to include in the link"),
+        ('debug', 'g',
+         "compile/link with debugging information"),
+        ('force', 'f',
+         "forcibly build everything (ignore file timestamps)"),
+        ('compiler=', 'c',
+         "specify the compiler type"),
+        ('swig-cpp', None,
+         "make SWIG create C++ files (default is C)"),
+        ('swig-opts=', None,
+         "list of SWIG command line options"),
+        ('swig=', None,
+         "path to the SWIG executable"),
+        ]
+
+    boolean_options = ['inplace', 'debug', 'force', 'swig-cpp']
+
+    help_options = [
+        ('help-compiler', None,
+         "list available compilers", show_compilers),
+        ]
+
+    def initialize_options (self):
+        self.extensions = None
+        self.build_lib = None
+        self.build_temp = None
+        self.inplace = 0
+        self.package = None
+
+        self.include_dirs = None
+        self.define = None
+        self.undef = None
+        self.libraries = None
+        self.library_dirs = None
+        self.rpath = None
+        self.link_objects = None
+        self.debug = None
+        self.force = None
+        self.compiler = None
+        self.swig = None
+        self.swig_cpp = None
+        self.swig_opts = None
+
+    def finalize_options (self):
+        from distutils import sysconfig
+
+        self.set_undefined_options('build',
+                                   ('build_lib', 'build_lib'),
+                                   ('build_temp', 'build_temp'),
+                                   ('compiler', 'compiler'),
+                                   ('debug', 'debug'),
+                                   ('force', 'force'))
+
+        if self.package is None:
+            self.package = self.distribution.ext_package
+
+        self.extensions = self.distribution.ext_modules
+
+
+        # Make sure Python's include directories (for Python.h, pyconfig.h,
+        # etc.) are in the include search path.
+        py_include = sysconfig.get_python_inc()
+        plat_py_include = sysconfig.get_python_inc(plat_specific=1)
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        if type(self.include_dirs) is StringType:
+            self.include_dirs = string.split(self.include_dirs, os.pathsep)
+
+        # Put the Python "system" include dir at the end, so that
+        # any local include dirs take precedence.
+        self.include_dirs.append(py_include)
+        if plat_py_include != py_include:
+            self.include_dirs.append(plat_py_include)
+
+        if type(self.libraries) is StringType:
+            self.libraries = [self.libraries]
+
+        # Life is easier if we're not forever checking for None, so
+        # simplify these options to empty lists if unset
+        if self.libraries is None:
+            self.libraries = []
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif type(self.library_dirs) is StringType:
+            self.library_dirs = string.split(self.library_dirs, os.pathsep)
+
+        if self.rpath is None:
+            self.rpath = []
+        elif type(self.rpath) is StringType:
+            self.rpath = string.split(self.rpath, os.pathsep)
+
+        # for extensions under windows use different directories
+        # for Release and Debug builds.
+        # also Python's library directory must be appended to library_dirs
+        if os.name == 'nt':
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs'))
+            if self.debug:
+                self.build_temp = os.path.join(self.build_temp, "Debug")
+            else:
+                self.build_temp = os.path.join(self.build_temp, "Release")
+
+            # Append the source distribution include and library directories,
+            # this allows distutils on windows to work in the source tree
+            self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC'))
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'PCBuild'))
+
+        # OS/2 (EMX) doesn't support Debug vs Release builds, but has the
+        # import libraries in its "Config" subdirectory
+        if os.name == 'os2':
+            self.library_dirs.append(os.path.join(sys.exec_prefix, 'Config'))
+
+        # for extensions under Cygwin and AtheOS Python's library directory must be
+        # appended to library_dirs
+        if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
+            if string.find(sys.executable, sys.exec_prefix) != -1:
+                # building third party extensions
+                self.library_dirs.append(os.path.join(sys.prefix, "lib",
+                                                      "python" + get_python_version(),
+                                                      "config"))
+            else:
+                # building python standard extensions
+                self.library_dirs.append('.')
+
+        # The argument parsing will result in self.define being a string, but
+        # it has to be a list of 2-tuples.  All the preprocessor symbols
+        # specified by the 'define' option will be set to '1'.  Multiple
+        # symbols can be separated with commas.
+
+        if self.define:
+            defines = string.split(self.define, ',')
+            self.define = map(lambda symbol: (symbol, '1'), defines)
+
+        # The option for macros to undefine is also a string from the
+        # option parsing, but has to be a list.  Multiple symbols can also
+        # be separated with commas here.
+        if self.undef:
+            self.undef = string.split(self.undef, ',')
+
+        if self.swig_opts is None:
+            self.swig_opts = []
+        else:
+            self.swig_opts = self.swig_opts.split(' ')
+
+    # finalize_options ()
+
+
+    def run (self):
+
+        from distutils.ccompiler import new_compiler
+
+        # 'self.extensions', as supplied by setup.py, is a list of
+        # Extension instances.  See the documentation for Extension (in
+        # distutils.extension) for details.
+        #
+        # For backwards compatibility with Distutils 0.8.2 and earlier, we
+        # also allow the 'extensions' list to be a list of tuples:
+        #    (ext_name, build_info)
+        # where build_info is a dictionary containing everything that
+        # Extension instances do except the name, with a few things being
+        # differently named.  We convert these 2-tuples to Extension
+        # instances as needed.
+
+        if not self.extensions:
+            return
+
+        # If we were asked to build any C/C++ libraries, make sure that the
+        # directory where we put them is in the library search path for
+        # linking extensions.
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.libraries.extend(build_clib.get_library_names() or [])
+            self.library_dirs.append(build_clib.build_clib)
+
+        # Setup the CCompiler object that we'll use to do all the
+        # compiling and linking
+        self.compiler = new_compiler(compiler=self.compiler,
+                                     verbose=self.verbose,
+                                     dry_run=self.dry_run,
+                                     force=self.force)
+        customize_compiler(self.compiler)
+
+        # And make sure that any compile/link-related options (which might
+        # come from the command-line or from the setup script) are set in
+        # that CCompiler object -- that way, they automatically apply to
+        # all compiling and linking done here.
+        if self.include_dirs is not None:
+            self.compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for (name,value) in self.define:
+                self.compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                self.compiler.undefine_macro(macro)
+        if self.libraries is not None:
+            self.compiler.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            self.compiler.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            self.compiler.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            self.compiler.set_link_objects(self.link_objects)
+
+        # Now actually compile and link everything.
+        self.build_extensions()
+
+    # run ()
+
+
+    def check_extensions_list (self, extensions):
+        """Ensure that the list of extensions (presumably provided as a
+        command option 'extensions') is valid, i.e. it is a list of
+        Extension objects.  We also support the old-style list of 2-tuples,
+        where the tuples are (ext_name, build_info), which are converted to
+        Extension instances here.
+
+        Raise DistutilsSetupError if the structure is invalid anywhere;
+        just returns otherwise.
+        """
+        if type(extensions) is not ListType:
+            raise DistutilsSetupError, \
+                  "'ext_modules' option must be a list of Extension instances"
+
+        for i in range(len(extensions)):
+            ext = extensions[i]
+            if isinstance(ext, Extension):
+                continue                # OK! (assume type-checking done
+                                        # by Extension constructor)
+
+            (ext_name, build_info) = ext
+            log.warn(("old-style (ext_name, build_info) tuple found in "
+                      "ext_modules for extension '%s'"
+                      "-- please convert to Extension instance" % ext_name))
+            if type(ext) is not TupleType and len(ext) != 2:
+                raise DistutilsSetupError, \
+                      ("each element of 'ext_modules' option must be an "
+                       "Extension instance or 2-tuple")
+
+            if not (type(ext_name) is StringType and
+                    extension_name_re.match(ext_name)):
+                raise DistutilsSetupError, \
+                      ("first element of each tuple in 'ext_modules' "
+                       "must be the extension name (a string)")
+
+            if type(build_info) is not DictionaryType:
+                raise DistutilsSetupError, \
+                      ("second element of each tuple in 'ext_modules' "
+                       "must be a dictionary (build info)")
+
+            # OK, the (ext_name, build_info) dict is type-safe: convert it
+            # to an Extension instance.
+            ext = Extension(ext_name, build_info['sources'])
+
+            # Easy stuff: one-to-one mapping from dict elements to
+            # instance attributes.
+            for key in ('include_dirs',
+                        'library_dirs',
+                        'libraries',
+                        'extra_objects',
+                        'extra_compile_args',
+                        'extra_link_args'):
+                val = build_info.get(key)
+                if val is not None:
+                    setattr(ext, key, val)
+
+            # Medium-easy stuff: same syntax/semantics, different names.
+            ext.runtime_library_dirs = build_info.get('rpath')
+            if build_info.has_key('def_file'):
+                log.warn("'def_file' element of build info dict "
+                         "no longer supported")
+
+            # Non-trivial stuff: 'macros' split into 'define_macros'
+            # and 'undef_macros'.
+            macros = build_info.get('macros')
+            if macros:
+                ext.define_macros = []
+                ext.undef_macros = []
+                for macro in macros:
+                    if not (type(macro) is TupleType and
+                            1 <= len(macro) <= 2):
+                        raise DistutilsSetupError, \
+                              ("'macros' element of build info dict "
+                               "must be 1- or 2-tuple")
+                    if len(macro) == 1:
+                        ext.undef_macros.append(macro[0])
+                    elif len(macro) == 2:
+                        ext.define_macros.append(macro)
+
+            extensions[i] = ext
+
+        # for extensions
+
+    # check_extensions_list ()
+
+
+    def get_source_files (self):
+        self.check_extensions_list(self.extensions)
+        filenames = []
+
+        # Wouldn't it be neat if we knew the names of header files too...
+        for ext in self.extensions:
+            filenames.extend(ext.sources)
+
+        return filenames
+
+
+    def get_outputs (self):
+
+        # Sanity check the 'extensions' list -- can't assume this is being
+        # done in the same run as a 'build_extensions()' call (in fact, we
+        # can probably assume that it *isn't*!).
+        self.check_extensions_list(self.extensions)
+
+        # And build the list of output (built) filenames.  Note that this
+        # ignores the 'inplace' flag, and assumes everything goes in the
+        # "build" tree.
+        outputs = []
+        for ext in self.extensions:
+            fullname = self.get_ext_fullname(ext.name)
+            outputs.append(os.path.join(self.build_lib,
+                                        self.get_ext_filename(fullname)))
+        return outputs
+
+    # get_outputs ()
+
+    def build_extensions(self):
+        # First, sanity-check the 'extensions' list
+        self.check_extensions_list(self.extensions)
+
+        for ext in self.extensions:
+            self.build_extension(ext)
+
+    def build_extension(self, ext):
+        sources = ext.sources
+        if sources is None or type(sources) not in (ListType, TupleType):
+            raise DistutilsSetupError, \
+                  ("in 'ext_modules' option (extension '%s'), " +
+                   "'sources' must be present and must be " +
+                   "a list of source filenames") % ext.name
+        sources = list(sources)
+
+        fullname = self.get_ext_fullname(ext.name)
+        if self.inplace:
+            # ignore build-lib -- put the compiled extension into
+            # the source tree along with pure Python modules
+
+            modpath = string.split(fullname, '.')
+            package = string.join(modpath[0:-1], '.')
+            base = modpath[-1]
+
+            build_py = self.get_finalized_command('build_py')
+            package_dir = build_py.get_package_dir(package)
+            ext_filename = os.path.join(package_dir,
+                                        self.get_ext_filename(base))
+        else:
+            ext_filename = os.path.join(self.build_lib,
+                                        self.get_ext_filename(fullname))
+        depends = sources + ext.depends
+        if not (self.force or newer_group(depends, ext_filename, 'newer')):
+            log.debug("skipping '%s' extension (up-to-date)", ext.name)
+            return
+        else:
+            log.info("building '%s' extension", ext.name)
+
+        # First, scan the sources for SWIG definition files (.i), run
+        # SWIG on 'em to create .c files, and modify the sources list
+        # accordingly.
+        sources = self.swig_sources(sources, ext)
+
+        # Next, compile the source code to object files.
+
+        # XXX not honouring 'define_macros' or 'undef_macros' -- the
+        # CCompiler API needs to change to accommodate this, and I
+        # want to do one thing at a time!
+
+        # Two possible sources for extra compiler arguments:
+        #   - 'extra_compile_args' in Extension object
+        #   - CFLAGS environment variable (not particularly
+        #     elegant, but people seem to expect it and I
+        #     guess it's useful)
+        # The environment variable should take precedence, and
+        # any sensible compiler will give precedence to later
+        # command line args.  Hence we combine them in order:
+        extra_args = ext.extra_compile_args or []
+
+        macros = ext.define_macros[:]
+        for undef in ext.undef_macros:
+            macros.append((undef,))
+
+        objects = self.compiler.compile(sources,
+                                        output_dir=self.build_temp,
+                                        macros=macros,
+                                        include_dirs=ext.include_dirs,
+                                        debug=self.debug,
+                                        extra_postargs=extra_args,
+                                        depends=ext.depends)
+
+        # XXX -- this is a Vile HACK!
+        #
+        # The setup.py script for Python on Unix needs to be able to
+        # get this list so it can perform all the clean up needed to
+        # avoid keeping object files around when cleaning out a failed
+        # build of an extension module.  Since Distutils does not
+        # track dependencies, we have to get rid of intermediates to
+        # ensure all the intermediates will be properly re-built.
+        #
+        self._built_objects = objects[:]
+
+        # Now link the object files together into a "shared object" --
+        # of course, first we have to figure out all the other things
+        # that go into the mix.
+        if ext.extra_objects:
+            objects.extend(ext.extra_objects)
+        extra_args = ext.extra_link_args or []
+
+        # Detect target language, if not provided
+        language = ext.language or self.compiler.detect_language(sources)
+
+        self.compiler.link_shared_object(
+            objects, ext_filename,
+            libraries=self.get_libraries(ext),
+            library_dirs=ext.library_dirs,
+            runtime_library_dirs=ext.runtime_library_dirs,
+            extra_postargs=extra_args,
+            export_symbols=self.get_export_symbols(ext),
+            debug=self.debug,
+            build_temp=self.build_temp,
+            target_lang=language)
+
+
+    def swig_sources (self, sources, extension):
+
+        """Walk the list of source files in 'sources', looking for SWIG
+        interface (.i) files.  Run SWIG on all that are found, and
+        return a modified 'sources' list with SWIG source files replaced
+        by the generated C (or C++) files.
+        """
+
+        new_sources = []
+        swig_sources = []
+        swig_targets = {}
+
+        # XXX this drops generated C/C++ files into the source tree, which
+        # is fine for developers who want to distribute the generated
+        # source -- but there should be an option to put SWIG output in
+        # the temp dir.
+
+        if self.swig_cpp:
+            log.warn("--swig-cpp is deprecated - use --swig-opts=-c++")
+
+        if self.swig_cpp or ('-c++' in self.swig_opts):
+            target_ext = '.cpp'
+        else:
+            target_ext = '.c'
+
+        for source in sources:
+            (base, ext) = os.path.splitext(source)
+            if ext == ".i":             # SWIG interface file
+                new_sources.append(base + '_wrap' + target_ext)
+                swig_sources.append(source)
+                swig_targets[source] = new_sources[-1]
+            else:
+                new_sources.append(source)
+
+        if not swig_sources:
+            return new_sources
+
+        swig = self.swig or self.find_swig()
+        swig_cmd = [swig, "-python"]
+        swig_cmd.extend(self.swig_opts)
+        if self.swig_cpp:
+            swig_cmd.append("-c++")
+
+        # Do not override commandline arguments
+        if not self.swig_opts:
+            for o in extension.swig_opts:
+                swig_cmd.append(o)
+
+        for source in swig_sources:
+            target = swig_targets[source]
+            log.info("swigging %s to %s", source, target)
+            self.spawn(swig_cmd + ["-o", target, source])
+
+        return new_sources
+
+    # swig_sources ()
+
+    def find_swig (self):
+        """Return the name of the SWIG executable.  On Unix, this is
+        just "swig" -- it should be in the PATH.  Tries a bit harder on
+        Windows.
+        """
+
+        if os.name == "posix":
+            return "swig"
+        elif os.name == "nt":
+
+            # Look for SWIG in its standard installation directory on
+            # Windows (or so I presume!).  If we find it there, great;
+            # if not, act like Unix and assume it's in the PATH.
+            for vers in ("1.3", "1.2", "1.1"):
+                fn = os.path.join("c:\\swig%s" % vers, "swig.exe")
+                if os.path.isfile(fn):
+                    return fn
+            else:
+                return "swig.exe"
+
+        elif os.name == "os2":
+            # assume swig available in the PATH.
+            return "swig.exe"
+
+        else:
+            raise DistutilsPlatformError, \
+                  ("I don't know how to find (much less run) SWIG "
+                   "on platform '%s'") % os.name
+
+    # find_swig ()
+
+    # -- Name generators -----------------------------------------------
+    # (extension names, filenames, whatever)
+
+    def get_ext_fullname (self, ext_name):
+        if self.package is None:
+            return ext_name
+        else:
+            return self.package + '.' + ext_name
+
+    def get_ext_filename (self, ext_name):
+        r"""Convert the name of an extension (eg. "foo.bar") into the name
+        of the file from which it will be loaded (eg. "foo/bar.so", or
+        "foo\bar.pyd").
+        """
+
+        from distutils.sysconfig import get_config_var
+        ext_path = string.split(ext_name, '.')
+        # OS/2 has an 8 character module (extension) limit :-(
+        if os.name == "os2":
+            ext_path[len(ext_path) - 1] = ext_path[len(ext_path) - 1][:8]
+        # extensions in debug_mode are named 'module_d.pyd' under windows
+        so_ext = get_config_var('SO')
+        if os.name == 'nt' and self.debug:
+            return apply(os.path.join, ext_path) + '_d' + so_ext
+        return apply(os.path.join, ext_path) + so_ext
+
+    def get_export_symbols (self, ext):
+        """Return the list of symbols that a shared extension has to
+        export.  This either uses 'ext.export_symbols' or, if it's not
+        provided, "init" + module_name.  Only relevant on Windows, where
+        the .pyd file (DLL) must export the module "init" function.
+        """
+
+        initfunc_name = "init" + string.split(ext.name,'.')[-1]
+        if initfunc_name not in ext.export_symbols:
+            ext.export_symbols.append(initfunc_name)
+        return ext.export_symbols
+
+    def get_libraries (self, ext):
+        """Return the list of libraries to link against when building a
+        shared extension.  On most platforms, this is just 'ext.libraries';
+        on Windows and OS/2, we add the Python library (eg. python20.dll).
+        """
+        # The python library is always needed on Windows.  For MSVC, this
+        # is redundant, since the library is mentioned in a pragma in
+        # pyconfig.h that MSVC groks.  The other Windows compilers all seem
+        # to need it mentioned explicitly, though, so that's what we do.
+        # Append '_d' to the python import library on debug builds.
+        if sys.platform == "win32":
+            from distutils.msvccompiler import MSVCCompiler
+            if not isinstance(self.compiler, MSVCCompiler):
+                template = "python%d%d"
+                if self.debug:
+                    template = template + '_d'
+                pythonlib = (template %
+                       (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                # don't extend ext.libraries, it may be shared with other
+                # extensions, it is a reference to the original list
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
+        elif sys.platform == "os2emx":
+            # EMX/GCC requires the python library explicitly, and I
+            # believe VACPP does as well (though not confirmed) - AIM Apr01
+            template = "python%d%d"
+            # debug versions of the main DLL aren't supported, at least
+            # not at this time - AIM Apr01
+            #if self.debug:
+            #    template = template + '_d'
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "cygwin":
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib]
+        elif sys.platform[:6] == "atheos":
+            from distutils import sysconfig
+
+            template = "python%d.%d"
+            pythonlib = (template %
+                   (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+            # Get SHLIBS from Makefile
+            extra = []
+            for lib in sysconfig.get_config_var('SHLIBS').split():
+                if lib.startswith('-l'):
+                    extra.append(lib[2:])
+                else:
+                    extra.append(lib)
+            # don't extend ext.libraries, it may be shared with other
+            # extensions, it is a reference to the original list
+            return ext.libraries + [pythonlib, "m"] + extra
+        else:
+            return ext.libraries
+
+# class build_ext
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/build_py.py b/depot_tools/release/win/python_24/Lib/distutils/command/build_py.py
new file mode 100644
index 0000000..a5b75b8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/build_py.py
@@ -0,0 +1,435 @@
+"""distutils.command.build_py
+
+Implements the Distutils 'build_py' command."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: build_py.py,v 1.46 2004/11/10 22:23:15 loewis Exp $"
+
+import sys, string, os
+from types import *
+from glob import glob
+
+from distutils.core import Command
+from distutils.errors import *
+from distutils.util import convert_path
+from distutils import log
+
+class build_py (Command):
+
+    description = "\"build\" pure Python modules (copy to build directory)"
+
+    user_options = [
+        ('build-lib=', 'd', "directory to \"build\" (copy) to"),
+        ('compile', 'c', "compile .py to .pyc"),
+        ('no-compile', None, "don't compile .py files [default]"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps)"),
+        ]
+
+    boolean_options = ['compile', 'force']
+    negative_opt = {'no-compile' : 'compile'}
+
+
+    def initialize_options (self):
+        self.build_lib = None
+        self.py_modules = None
+        self.package = None
+        self.package_data = None
+        self.package_dir = None
+        self.compile = 0
+        self.optimize = 0
+        self.force = None
+
+    def finalize_options (self):
+        self.set_undefined_options('build',
+                                   ('build_lib', 'build_lib'),
+                                   ('force', 'force'))
+
+        # Get the distribution options that are aliases for build_py
+        # options -- list of packages and list of modules.
+        self.packages = self.distribution.packages
+        self.py_modules = self.distribution.py_modules
+        self.package_data = self.distribution.package_data
+        self.package_dir = {}
+        if self.distribution.package_dir:
+            for name, path in self.distribution.package_dir.items():
+                self.package_dir[name] = convert_path(path)
+        self.data_files = self.get_data_files()
+
+        # Ick, copied straight from install_lib.py (fancy_getopt needs a
+        # type system!  Hell, *everything* needs a type system!!!)
+        if type(self.optimize) is not IntType:
+            try:
+                self.optimize = int(self.optimize)
+                assert 0 <= self.optimize <= 2
+            except (ValueError, AssertionError):
+                raise DistutilsOptionError, "optimize must be 0, 1, or 2"
+
+    def run (self):
+
+        # XXX copy_file by default preserves atime and mtime.  IMHO this is
+        # the right thing to do, but perhaps it should be an option -- in
+        # particular, a site administrator might want installed files to
+        # reflect the time of installation rather than the last
+        # modification time before the installed release.
+
+        # XXX copy_file by default preserves mode, which appears to be the
+        # wrong thing to do: if a file is read-only in the working
+        # directory, we want it to be installed read/write so that the next
+        # installation of the same module distribution can overwrite it
+        # without problems.  (This might be a Unix-specific issue.)  Thus
+        # we turn off 'preserve_mode' when copying to the build directory,
+        # since the build directory is supposed to be exactly what the
+        # installation will look like (ie. we preserve mode when
+        # installing).
+
+        # Two options control which modules will be installed: 'packages'
+        # and 'py_modules'.  The former lets us work with whole packages, not
+        # specifying individual modules at all; the latter is for
+        # specifying modules one-at-a-time.
+
+        if self.py_modules:
+            self.build_modules()
+        if self.packages:
+            self.build_packages()
+            self.build_package_data()
+
+        self.byte_compile(self.get_outputs(include_bytecode=0))
+
+    # run ()
+
+    def get_data_files (self):
+        """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+        data = []
+        if not self.packages:
+            return data
+        for package in self.packages:
+            # Locate package source directory
+            src_dir = self.get_package_dir(package)
+
+            # Compute package build directory
+            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+            # Length of path to strip from found files
+            plen = len(src_dir)+1
+
+            # Strip directory from globbed filenames
+            filenames = [
+                file[plen:] for file in self.find_data_files(package, src_dir)
+                ]
+            data.append((package, src_dir, build_dir, filenames))
+        return data
+
+    def find_data_files (self, package, src_dir):
+        """Return filenames for package's data files in 'src_dir'"""
+        globs = (self.package_data.get('', [])
+                 + self.package_data.get(package, []))
+        files = []
+        for pattern in globs:
+            # Each pattern has to be converted to a platform-specific path
+            filelist = glob(os.path.join(src_dir, convert_path(pattern)))
+            # Files that match more than one pattern are only added once
+            files.extend([fn for fn in filelist if fn not in files])
+        return files
+
+    def build_package_data (self):
+        """Copy data files into build directory"""
+        lastdir = None
+        for package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                self.mkpath(os.path.dirname(target))
+                self.copy_file(os.path.join(src_dir, filename), target,
+                               preserve_mode=False)
+
+    def get_package_dir (self, package):
+        """Return the directory, relative to the top of the source
+           distribution, where package 'package' should be found
+           (at least according to the 'package_dir' option, if any)."""
+
+        path = string.split(package, '.')
+
+        if not self.package_dir:
+            if path:
+                return apply(os.path.join, path)
+            else:
+                return ''
+        else:
+            tail = []
+            while path:
+                try:
+                    pdir = self.package_dir[string.join(path, '.')]
+                except KeyError:
+                    tail.insert(0, path[-1])
+                    del path[-1]
+                else:
+                    tail.insert(0, pdir)
+                    return apply(os.path.join, tail)
+            else:
+                # Oops, got all the way through 'path' without finding a
+                # match in package_dir.  If package_dir defines a directory
+                # for the root (nameless) package, then fallback on it;
+                # otherwise, we might as well have not consulted
+                # package_dir at all, as we just use the directory implied
+                # by 'tail' (which should be the same as the original value
+                # of 'path' at this point).
+                pdir = self.package_dir.get('')
+                if pdir is not None:
+                    tail.insert(0, pdir)
+
+                if tail:
+                    return apply(os.path.join, tail)
+                else:
+                    return ''
+
+    # get_package_dir ()
+
+
+    def check_package (self, package, package_dir):
+
+        # Empty dir name means current directory, which we can probably
+        # assume exists.  Also, os.path.exists and isdir don't know about
+        # my "empty string means current dir" convention, so we have to
+        # circumvent them.
+        if package_dir != "":
+            if not os.path.exists(package_dir):
+                raise DistutilsFileError, \
+                      "package directory '%s' does not exist" % package_dir
+            if not os.path.isdir(package_dir):
+                raise DistutilsFileError, \
+                      ("supposed package directory '%s' exists, " +
+                       "but is not a directory") % package_dir
+
+        # Require __init__.py for all but the "root package"
+        if package:
+            init_py = os.path.join(package_dir, "__init__.py")
+            if os.path.isfile(init_py):
+                return init_py
+            else:
+                log.warn(("package init file '%s' not found " +
+                          "(or not a regular file)"), init_py)
+
+        # Either not in a package at all (__init__.py not expected), or
+        # __init__.py doesn't exist -- so don't return the filename.
+        return None
+
+    # check_package ()
+
+
+    def check_module (self, module, module_file):
+        if not os.path.isfile(module_file):
+            log.warn("file %s (for module %s) not found", module_file, module)
+            return 0
+        else:
+            return 1
+
+    # check_module ()
+
+
+    def find_package_modules (self, package, package_dir):
+        self.check_package(package, package_dir)
+        module_files = glob(os.path.join(package_dir, "*.py"))
+        modules = []
+        setup_script = os.path.abspath(self.distribution.script_name)
+
+        for f in module_files:
+            abs_f = os.path.abspath(f)
+            if abs_f != setup_script:
+                module = os.path.splitext(os.path.basename(f))[0]
+                modules.append((package, module, f))
+            else:
+                self.debug_print("excluding %s" % setup_script)
+        return modules
+
+
+    def find_modules (self):
+        """Finds individually-specified Python modules, ie. those listed by
+        module name in 'self.py_modules'.  Returns a list of tuples (package,
+        module_base, filename): 'package' is a tuple of the path through
+        package-space to the module; 'module_base' is the bare (no
+        packages, no dots) module name, and 'filename' is the path to the
+        ".py" file (relative to the distribution root) that implements the
+        module.
+        """
+
+        # Map package names to tuples of useful info about the package:
+        #    (package_dir, checked)
+        # package_dir - the directory where we'll find source files for
+        #   this package
+        # checked - true if we have checked that the package directory
+        #   is valid (exists, contains __init__.py, ... ?)
+        packages = {}
+
+        # List of (package, module, filename) tuples to return
+        modules = []
+
+        # We treat modules-in-packages almost the same as toplevel modules,
+        # just the "package" for a toplevel is empty (either an empty
+        # string or empty list, depending on context).  Differences:
+        #   - don't check for __init__.py in directory for empty package
+
+        for module in self.py_modules:
+            path = string.split(module, '.')
+            package = string.join(path[0:-1], '.')
+            module_base = path[-1]
+
+            try:
+                (package_dir, checked) = packages[package]
+            except KeyError:
+                package_dir = self.get_package_dir(package)
+                checked = 0
+
+            if not checked:
+                init_py = self.check_package(package, package_dir)
+                packages[package] = (package_dir, 1)
+                if init_py:
+                    modules.append((package, "__init__", init_py))
+
+            # XXX perhaps we should also check for just .pyc files
+            # (so greedy closed-source bastards can distribute Python
+            # modules too)
+            module_file = os.path.join(package_dir, module_base + ".py")
+            if not self.check_module(module, module_file):
+                continue
+
+            modules.append((package, module_base, module_file))
+
+        return modules
+
+    # find_modules ()
+
+
+    def find_all_modules (self):
+        """Compute the list of all modules that will be built, whether
+        they are specified one-module-at-a-time ('self.py_modules') or
+        by whole packages ('self.packages').  Return a list of tuples
+        (package, module, module_file), just like 'find_modules()' and
+        'find_package_modules()' do."""
+
+        modules = []
+        if self.py_modules:
+            modules.extend(self.find_modules())
+        if self.packages:
+            for package in self.packages:
+                package_dir = self.get_package_dir(package)
+                m = self.find_package_modules(package, package_dir)
+                modules.extend(m)
+
+        return modules
+
+    # find_all_modules ()
+
+
+    def get_source_files (self):
+
+        modules = self.find_all_modules()
+        filenames = []
+        for module in modules:
+            filenames.append(module[-1])
+
+        return filenames
+
+
+    def get_module_outfile (self, build_dir, package, module):
+        outfile_path = [build_dir] + list(package) + [module + ".py"]
+        return apply(os.path.join, outfile_path)
+
+
+    def get_outputs (self, include_bytecode=1):
+        modules = self.find_all_modules()
+        outputs = []
+        for (package, module, module_file) in modules:
+            package = string.split(package, '.')
+            filename = self.get_module_outfile(self.build_lib, package, module)
+            outputs.append(filename)
+            if include_bytecode:
+                if self.compile:
+                    outputs.append(filename + "c")
+                if self.optimize > 0:
+                    outputs.append(filename + "o")
+
+        outputs += [
+            os.path.join(build_dir, filename)
+            for package, src_dir, build_dir, filenames in self.data_files
+            for filename in filenames
+            ]
+
+        return outputs
+
+
+    def build_module (self, module, module_file, package):
+        if type(package) is StringType:
+            package = string.split(package, '.')
+        elif type(package) not in (ListType, TupleType):
+            raise TypeError, \
+                  "'package' must be a string (dot-separated), list, or tuple"
+
+        # Now put the module source file into the "build" area -- this is
+        # easy, we just copy it somewhere under self.build_lib (the build
+        # directory for Python source).
+        outfile = self.get_module_outfile(self.build_lib, package, module)
+        dir = os.path.dirname(outfile)
+        self.mkpath(dir)
+        return self.copy_file(module_file, outfile, preserve_mode=0)
+
+
+    def build_modules (self):
+
+        modules = self.find_modules()
+        for (package, module, module_file) in modules:
+
+            # Now "build" the module -- ie. copy the source file to
+            # self.build_lib (the build directory for Python source).
+            # (Actually, it gets copied to the directory for this package
+            # under self.build_lib.)
+            self.build_module(module, module_file, package)
+
+    # build_modules ()
+
+
+    def build_packages (self):
+
+        for package in self.packages:
+
+            # Get list of (package, module, module_file) tuples based on
+            # scanning the package directory.  'package' is only included
+            # in the tuple so that 'find_modules()' and
+            # 'find_package_tuples()' have a consistent interface; it's
+            # ignored here (apart from a sanity check).  Also, 'module' is
+            # the *unqualified* module name (ie. no dots, no package -- we
+            # already know its package!), and 'module_file' is the path to
+            # the .py file, relative to the current directory
+            # (ie. including 'package_dir').
+            package_dir = self.get_package_dir(package)
+            modules = self.find_package_modules(package, package_dir)
+
+            # Now loop over the modules we found, "building" each one (just
+            # copy it to self.build_lib).
+            for (package_, module, module_file) in modules:
+                assert package == package_
+                self.build_module(module, module_file, package)
+
+    # build_packages ()
+
+
+    def byte_compile (self, files):
+        from distutils.util import byte_compile
+        prefix = self.build_lib
+        if prefix[-1] != os.sep:
+            prefix = prefix + os.sep
+
+        # XXX this code is essentially the same as the 'byte_compile()
+        # method of the "install_lib" command, except for the determination
+        # of the 'prefix' string.  Hmmm.
+
+        if self.compile:
+            byte_compile(files, optimize=0,
+                         force=self.force, prefix=prefix, dry_run=self.dry_run)
+        if self.optimize > 0:
+            byte_compile(files, optimize=self.optimize,
+                         force=self.force, prefix=prefix, dry_run=self.dry_run)
+
+# class build_py
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/build_scripts.py b/depot_tools/release/win/python_24/Lib/distutils/command/build_scripts.py
new file mode 100644
index 0000000..16a6483
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/build_scripts.py
@@ -0,0 +1,131 @@
+"""distutils.command.build_scripts
+
+Implements the Distutils 'build_scripts' command."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: build_scripts.py,v 1.25 2004/11/10 22:23:15 loewis Exp $"
+
+import sys, os, re
+from stat import ST_MODE
+from distutils import sysconfig
+from distutils.core import Command
+from distutils.dep_util import newer
+from distutils.util import convert_path
+from distutils import log
+
+# check if Python is called on the first line with this expression
+first_line_re = re.compile('^#!.*python[0-9.]*([ \t].*)?$')
+
+class build_scripts (Command):
+
+    description = "\"build\" scripts (copy and fixup #! line)"
+
+    user_options = [
+        ('build-dir=', 'd', "directory to \"build\" (copy) to"),
+        ('force', 'f', "forcibly build everything (ignore file timestamps"),
+        ('executable=', 'e', "specify final destination interpreter path"),
+        ]
+
+    boolean_options = ['force']
+
+
+    def initialize_options (self):
+        self.build_dir = None
+        self.scripts = None
+        self.force = None
+        self.executable = None
+        self.outfiles = None
+
+    def finalize_options (self):
+        self.set_undefined_options('build',
+                                   ('build_scripts', 'build_dir'),
+                                   ('force', 'force'),
+                                   ('executable', 'executable'))
+        self.scripts = self.distribution.scripts
+
+    def get_source_files(self):
+        return self.scripts
+
+    def run (self):
+        if not self.scripts:
+            return
+        self.copy_scripts()
+
+
+    def copy_scripts (self):
+        """Copy each script listed in 'self.scripts'; if it's marked as a
+        Python script in the Unix way (first line matches 'first_line_re',
+        ie. starts with "\#!" and contains "python"), then adjust the first
+        line to refer to the current Python interpreter as we copy.
+        """
+        self.mkpath(self.build_dir)
+        outfiles = []
+        for script in self.scripts:
+            adjust = 0
+            script = convert_path(script)
+            outfile = os.path.join(self.build_dir, os.path.basename(script))
+            outfiles.append(outfile)
+
+            if not self.force and not newer(script, outfile):
+                log.debug("not copying %s (up-to-date)", script)
+                continue
+
+            # Always open the file, but ignore failures in dry-run mode --
+            # that way, we'll get accurate feedback if we can read the
+            # script.
+            try:
+                f = open(script, "r")
+            except IOError:
+                if not self.dry_run:
+                    raise
+                f = None
+            else:
+                first_line = f.readline()
+                if not first_line:
+                    self.warn("%s is an empty file (skipping)" % script)
+                    continue
+
+                match = first_line_re.match(first_line)
+                if match:
+                    adjust = 1
+                    post_interp = match.group(1) or ''
+
+            if adjust:
+                log.info("copying and adjusting %s -> %s", script,
+                         self.build_dir)
+                if not self.dry_run:
+                    outf = open(outfile, "w")
+                    if not sysconfig.python_build:
+                        outf.write("#!%s%s\n" %
+                                   (self.executable,
+                                    post_interp))
+                    else:
+                        outf.write("#!%s%s\n" %
+                                   (os.path.join(
+                            sysconfig.get_config_var("BINDIR"),
+                            "python" + sysconfig.get_config_var("EXE")),
+                                    post_interp))
+                    outf.writelines(f.readlines())
+                    outf.close()
+                if f:
+                    f.close()
+            else:
+                f.close()
+                self.copy_file(script, outfile)
+
+        if os.name == 'posix':
+            for file in outfiles:
+                if self.dry_run:
+                    log.info("changing mode of %s", file)
+                else:
+                    oldmode = os.stat(file)[ST_MODE] & 07777
+                    newmode = (oldmode | 0555) & 07777
+                    if newmode != oldmode:
+                        log.info("changing mode of %s from %o to %o",
+                                 file, oldmode, newmode)
+                        os.chmod(file, newmode)
+
+    # copy_scripts ()
+
+# class build_scripts
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/clean.py b/depot_tools/release/win/python_24/Lib/distutils/command/clean.py
new file mode 100644
index 0000000..4053962
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/clean.py
@@ -0,0 +1,82 @@
+"""distutils.command.clean
+
+Implements the Distutils 'clean' command."""
+
+# contributed by Bastian Kleineidam <calvin@cs.uni-sb.de>, added 2000-03-18
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: clean.py,v 1.16 2004/11/10 22:23:15 loewis Exp $"
+
+import os
+from distutils.core import Command
+from distutils.dir_util import remove_tree
+from distutils import log
+
+class clean (Command):
+
+    description = "clean up output of 'build' command"
+    user_options = [
+        ('build-base=', 'b',
+         "base build directory (default: 'build.build-base')"),
+        ('build-lib=', None,
+         "build directory for all modules (default: 'build.build-lib')"),
+        ('build-temp=', 't',
+         "temporary build directory (default: 'build.build-temp')"),
+        ('build-scripts=', None,
+         "build directory for scripts (default: 'build.build-scripts')"),
+        ('bdist-base=', None,
+         "temporary directory for built distributions"),
+        ('all', 'a',
+         "remove all build output, not just temporary by-products")
+    ]
+
+    boolean_options = ['all']
+
+    def initialize_options(self):
+        self.build_base = None
+        self.build_lib = None
+        self.build_temp = None
+        self.build_scripts = None
+        self.bdist_base = None
+        self.all = None
+
+    def finalize_options(self):
+        self.set_undefined_options('build',
+                                   ('build_base', 'build_base'),
+                                   ('build_lib', 'build_lib'),
+                                   ('build_scripts', 'build_scripts'),
+                                   ('build_temp', 'build_temp'))
+        self.set_undefined_options('bdist',
+                                   ('bdist_base', 'bdist_base'))
+
+    def run(self):
+        # remove the build/temp.<plat> directory (unless it's already
+        # gone)
+        if os.path.exists(self.build_temp):
+            remove_tree(self.build_temp, dry_run=self.dry_run)
+        else:
+            log.debug("'%s' does not exist -- can't clean it",
+                      self.build_temp)
+
+        if self.all:
+            # remove build directories
+            for directory in (self.build_lib,
+                              self.bdist_base,
+                              self.build_scripts):
+                if os.path.exists(directory):
+                    remove_tree(directory, dry_run=self.dry_run)
+                else:
+                    log.warn("'%s' does not exist -- can't clean it",
+                             directory)
+
+        # just for the heck of it, try to remove the base build directory:
+        # we might have emptied it right now, but if not we don't care
+        if not self.dry_run:
+            try:
+                os.rmdir(self.build_base)
+                log.info("removing '%s'", self.build_base)
+            except OSError:
+                pass
+
+# class clean
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/command_template b/depot_tools/release/win/python_24/Lib/distutils/command/command_template
new file mode 100644
index 0000000..50bbab7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/command_template
@@ -0,0 +1,45 @@
+"""distutils.command.x
+
+Implements the Distutils 'x' command.
+"""
+
+# created 2000/mm/dd, John Doe
+
+__revision__ = "$Id$"
+
+from distutils.core import Command
+
+
+class x (Command):
+
+    # Brief (40-50 characters) description of the command
+    description = ""
+
+    # List of option tuples: long name, short name (None if no short
+    # name), and help string.
+    user_options = [('', '',
+                     ""),
+                   ]
+
+
+    def initialize_options (self):
+        self. = None
+        self. = None
+        self. = None
+
+    # initialize_options()
+
+
+    def finalize_options (self):
+        if self.x is None:
+            self.x = 
+
+    # finalize_options()
+
+
+    def run (self):
+
+
+    # run()
+
+# class x
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/config.py b/depot_tools/release/win/python_24/Lib/distutils/command/config.py
new file mode 100644
index 0000000..cf6d97df
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/config.py
@@ -0,0 +1,368 @@
+"""distutils.command.config
+
+Implements the Distutils 'config' command, a (mostly) empty command class
+that exists mainly to be sub-classed by specific module distributions and
+applications.  The idea is that while every "config" command is different,
+at least they're all named the same, and users always see "config" in the
+list of standard commands.  Also, this is a good place to put common
+configure-like tasks: "try to compile this C code", or "figure out where
+this header file lives".
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: config.py,v 1.18 2004/11/10 22:23:15 loewis Exp $"
+
+import sys, os, string, re
+from types import *
+from distutils.core import Command
+from distutils.errors import DistutilsExecError
+from distutils.sysconfig import customize_compiler
+from distutils import log
+
+LANG_EXT = {'c': '.c',
+            'c++': '.cxx'}
+
+class config (Command):
+
+    description = "prepare to build"
+
+    user_options = [
+        ('compiler=', None,
+         "specify the compiler type"),
+        ('cc=', None,
+         "specify the compiler executable"),
+        ('include-dirs=', 'I',
+         "list of directories to search for header files"),
+        ('define=', 'D',
+         "C preprocessor macros to define"),
+        ('undef=', 'U',
+         "C preprocessor macros to undefine"),
+        ('libraries=', 'l',
+         "external C libraries to link with"),
+        ('library-dirs=', 'L',
+         "directories to search for external C libraries"),
+
+        ('noisy', None,
+         "show every action (compile, link, run, ...) taken"),
+        ('dump-source', None,
+         "dump generated source files before attempting to compile them"),
+        ]
+
+
+    # The three standard command methods: since the "config" command
+    # does nothing by default, these are empty.
+
+    def initialize_options (self):
+        self.compiler = None
+        self.cc = None
+        self.include_dirs = None
+        #self.define = None
+        #self.undef = None
+        self.libraries = None
+        self.library_dirs = None
+
+        # maximal output for now
+        self.noisy = 1
+        self.dump_source = 1
+
+        # list of temporary files generated along-the-way that we have
+        # to clean at some point
+        self.temp_files = []
+
+    def finalize_options (self):
+        if self.include_dirs is None:
+            self.include_dirs = self.distribution.include_dirs or []
+        elif type(self.include_dirs) is StringType:
+            self.include_dirs = string.split(self.include_dirs, os.pathsep)
+
+        if self.libraries is None:
+            self.libraries = []
+        elif type(self.libraries) is StringType:
+            self.libraries = [self.libraries]
+
+        if self.library_dirs is None:
+            self.library_dirs = []
+        elif type(self.library_dirs) is StringType:
+            self.library_dirs = string.split(self.library_dirs, os.pathsep)
+
+
+    def run (self):
+        pass
+
+
+    # Utility methods for actual "config" commands.  The interfaces are
+    # loosely based on Autoconf macros of similar names.  Sub-classes
+    # may use these freely.
+
+    def _check_compiler (self):
+        """Check that 'self.compiler' really is a CCompiler object;
+        if not, make it one.
+        """
+        # We do this late, and only on-demand, because this is an expensive
+        # import.
+        from distutils.ccompiler import CCompiler, new_compiler
+        if not isinstance(self.compiler, CCompiler):
+            self.compiler = new_compiler(compiler=self.compiler,
+                                         dry_run=self.dry_run, force=1)
+            customize_compiler(self.compiler)
+            if self.include_dirs:
+                self.compiler.set_include_dirs(self.include_dirs)
+            if self.libraries:
+                self.compiler.set_libraries(self.libraries)
+            if self.library_dirs:
+                self.compiler.set_library_dirs(self.library_dirs)
+
+
+    def _gen_temp_sourcefile (self, body, headers, lang):
+        filename = "_configtest" + LANG_EXT[lang]
+        file = open(filename, "w")
+        if headers:
+            for header in headers:
+                file.write("#include <%s>\n" % header)
+            file.write("\n")
+        file.write(body)
+        if body[-1] != "\n":
+            file.write("\n")
+        file.close()
+        return filename
+
+    def _preprocess (self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        out = "_configtest.i"
+        self.temp_files.extend([src, out])
+        self.compiler.preprocess(src, out, include_dirs=include_dirs)
+        return (src, out)
+
+    def _compile (self, body, headers, include_dirs, lang):
+        src = self._gen_temp_sourcefile(body, headers, lang)
+        if self.dump_source:
+            dump_file(src, "compiling '%s':" % src)
+        (obj,) = self.compiler.object_filenames([src])
+        self.temp_files.extend([src, obj])
+        self.compiler.compile([src], include_dirs=include_dirs)
+        return (src, obj)
+
+    def _link (self, body,
+               headers, include_dirs,
+               libraries, library_dirs, lang):
+        (src, obj) = self._compile(body, headers, include_dirs, lang)
+        prog = os.path.splitext(os.path.basename(src))[0]
+        self.compiler.link_executable([obj], prog,
+                                      libraries=libraries,
+                                      library_dirs=library_dirs,
+                                      target_lang=lang)
+
+        if self.compiler.exe_extension is not None:
+            prog = prog + self.compiler.exe_extension
+        self.temp_files.append(prog)
+
+        return (src, obj, prog)
+
+    def _clean (self, *filenames):
+        if not filenames:
+            filenames = self.temp_files
+            self.temp_files = []
+        log.info("removing: %s", string.join(filenames))
+        for filename in filenames:
+            try:
+                os.remove(filename)
+            except OSError:
+                pass
+
+
+    # XXX these ignore the dry-run flag: what to do, what to do? even if
+    # you want a dry-run build, you still need some sort of configuration
+    # info.  My inclination is to make it up to the real config command to
+    # consult 'dry_run', and assume a default (minimal) configuration if
+    # true.  The problem with trying to do it here is that you'd have to
+    # return either true or false from all the 'try' methods, neither of
+    # which is correct.
+
+    # XXX need access to the header search path and maybe default macros.
+
+    def try_cpp (self, body=None, headers=None, include_dirs=None, lang="c"):
+        """Construct a source file from 'body' (a string containing lines
+        of C/C++ code) and 'headers' (a list of header files to include)
+        and run it through the preprocessor.  Return true if the
+        preprocessor succeeded, false if there were any errors.
+        ('body' probably isn't of much use, but what the heck.)
+        """
+        from distutils.ccompiler import CompileError
+        self._check_compiler()
+        ok = 1
+        try:
+            self._preprocess(body, headers, include_dirs, lang)
+        except CompileError:
+            ok = 0
+
+        self._clean()
+        return ok
+
+    def search_cpp (self, pattern, body=None,
+                    headers=None, include_dirs=None, lang="c"):
+        """Construct a source file (just like 'try_cpp()'), run it through
+        the preprocessor, and return true if any line of the output matches
+        'pattern'.  'pattern' should either be a compiled regex object or a
+        string containing a regex.  If both 'body' and 'headers' are None,
+        preprocesses an empty file -- which can be useful to determine the
+        symbols the preprocessor and compiler set by default.
+        """
+
+        self._check_compiler()
+        (src, out) = self._preprocess(body, headers, include_dirs, lang)
+
+        if type(pattern) is StringType:
+            pattern = re.compile(pattern)
+
+        file = open(out)
+        match = 0
+        while 1:
+            line = file.readline()
+            if line == '':
+                break
+            if pattern.search(line):
+                match = 1
+                break
+
+        file.close()
+        self._clean()
+        return match
+
+    def try_compile (self, body, headers=None, include_dirs=None, lang="c"):
+        """Try to compile a source file built from 'body' and 'headers'.
+        Return true on success, false otherwise.
+        """
+        from distutils.ccompiler import CompileError
+        self._check_compiler()
+        try:
+            self._compile(body, headers, include_dirs, lang)
+            ok = 1
+        except CompileError:
+            ok = 0
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_link (self, body,
+                  headers=None, include_dirs=None,
+                  libraries=None, library_dirs=None,
+                  lang="c"):
+        """Try to compile and link a source file, built from 'body' and
+        'headers', to executable form.  Return true on success, false
+        otherwise.
+        """
+        from distutils.ccompiler import CompileError, LinkError
+        self._check_compiler()
+        try:
+            self._link(body, headers, include_dirs,
+                       libraries, library_dirs, lang)
+            ok = 1
+        except (CompileError, LinkError):
+            ok = 0
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+    def try_run (self, body,
+                 headers=None, include_dirs=None,
+                 libraries=None, library_dirs=None,
+                 lang="c"):
+        """Try to compile, link to an executable, and run a program
+        built from 'body' and 'headers'.  Return true on success, false
+        otherwise.
+        """
+        from distutils.ccompiler import CompileError, LinkError
+        self._check_compiler()
+        try:
+            src, obj, exe = self._link(body, headers, include_dirs,
+                                       libraries, library_dirs, lang)
+            self.spawn([exe])
+            ok = 1
+        except (CompileError, LinkError, DistutilsExecError):
+            ok = 0
+
+        log.info(ok and "success!" or "failure.")
+        self._clean()
+        return ok
+
+
+    # -- High-level methods --------------------------------------------
+    # (these are the ones that are actually likely to be useful
+    # when implementing a real-world config command!)
+
+    def check_func (self, func,
+                    headers=None, include_dirs=None,
+                    libraries=None, library_dirs=None,
+                    decl=0, call=0):
+
+        """Determine if function 'func' is available by constructing a
+        source file that refers to 'func', and compiles and links it.
+        If everything succeeds, returns true; otherwise returns false.
+
+        The constructed source file starts out by including the header
+        files listed in 'headers'.  If 'decl' is true, it then declares
+        'func' (as "int func()"); you probably shouldn't supply 'headers'
+        and set 'decl' true in the same call, or you might get errors about
+        a conflicting declarations for 'func'.  Finally, the constructed
+        'main()' function either references 'func' or (if 'call' is true)
+        calls it.  'libraries' and 'library_dirs' are used when
+        linking.
+        """
+
+        self._check_compiler()
+        body = []
+        if decl:
+            body.append("int %s ();" % func)
+        body.append("int main () {")
+        if call:
+            body.append("  %s();" % func)
+        else:
+            body.append("  %s;" % func)
+        body.append("}")
+        body = string.join(body, "\n") + "\n"
+
+        return self.try_link(body, headers, include_dirs,
+                             libraries, library_dirs)
+
+    # check_func ()
+
+    def check_lib (self, library, library_dirs=None,
+                   headers=None, include_dirs=None, other_libraries=[]):
+        """Determine if 'library' is available to be linked against,
+        without actually checking that any particular symbols are provided
+        by it.  'headers' will be used in constructing the source file to
+        be compiled, but the only effect of this is to check if all the
+        header files listed are available.  Any libraries listed in
+        'other_libraries' will be included in the link, in case 'library'
+        has symbols that depend on other libraries.
+        """
+        self._check_compiler()
+        return self.try_link("int main (void) { }",
+                             headers, include_dirs,
+                             [library]+other_libraries, library_dirs)
+
+    def check_header (self, header, include_dirs=None,
+                      library_dirs=None, lang="c"):
+        """Determine if the system header file named by 'header_file'
+        exists and can be found by the preprocessor; return true if so,
+        false otherwise.
+        """
+        return self.try_cpp(body="/* No body */", headers=[header],
+                            include_dirs=include_dirs)
+
+
+# class config
+
+
+def dump_file (filename, head=None):
+    if head is None:
+        print filename + ":"
+    else:
+        print head
+
+    file = open(filename)
+    sys.stdout.write(file.read())
+    file.close()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/install.py b/depot_tools/release/win/python_24/Lib/distutils/command/install.py
new file mode 100644
index 0000000..8f7a24a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/install.py
@@ -0,0 +1,606 @@
+"""distutils.command.install
+
+Implements the Distutils 'install' command."""
+
+from distutils import log
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: install.py,v 1.72.2.1 2005/01/20 19:15:39 theller Exp $"
+
+import sys, os, string
+from types import *
+from distutils.core import Command
+from distutils.debug import DEBUG
+from distutils.sysconfig import get_config_vars
+from distutils.errors import DistutilsPlatformError
+from distutils.file_util import write_file
+from distutils.util import convert_path, subst_vars, change_root
+from distutils.errors import DistutilsOptionError
+from glob import glob
+
+if sys.version < "2.2":
+    WINDOWS_SCHEME = {
+        'purelib': '$base',
+        'platlib': '$base',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+    }
+else:
+    WINDOWS_SCHEME = {
+        'purelib': '$base/Lib/site-packages',
+        'platlib': '$base/Lib/site-packages',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+    }
+
+INSTALL_SCHEMES = {
+    'unix_prefix': {
+        'purelib': '$base/lib/python$py_version_short/site-packages',
+        'platlib': '$platbase/lib/python$py_version_short/site-packages',
+        'headers': '$base/include/python$py_version_short/$dist_name',
+        'scripts': '$base/bin',
+        'data'   : '$base',
+        },
+    'unix_home': {
+        'purelib': '$base/lib/python',
+        'platlib': '$base/lib/python',
+        'headers': '$base/include/python/$dist_name',
+        'scripts': '$base/bin',
+        'data'   : '$base',
+        },
+    'nt': WINDOWS_SCHEME,
+    'mac': {
+        'purelib': '$base/Lib/site-packages',
+        'platlib': '$base/Lib/site-packages',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+        },
+    'os2': {
+        'purelib': '$base/Lib/site-packages',
+        'platlib': '$base/Lib/site-packages',
+        'headers': '$base/Include/$dist_name',
+        'scripts': '$base/Scripts',
+        'data'   : '$base',
+        }
+    }
+
+# The keys to an installation scheme; if any new types of files are to be
+# installed, be sure to add an entry to every installation scheme above,
+# and to SCHEME_KEYS here.
+SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
+
+
+class install (Command):
+
+    description = "install everything from build directory"
+
+    user_options = [
+        # Select installation scheme and set base director(y|ies)
+        ('prefix=', None,
+         "installation prefix"),
+        ('exec-prefix=', None,
+         "(Unix only) prefix for platform-specific files"),
+        ('home=', None,
+         "(Unix only) home directory to install under"),
+
+        # Or, just set the base director(y|ies)
+        ('install-base=', None,
+         "base installation directory (instead of --prefix or --home)"),
+        ('install-platbase=', None,
+         "base installation directory for platform-specific files " +
+         "(instead of --exec-prefix or --home)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+
+        # Or, explicitly set the installation scheme
+        ('install-purelib=', None,
+         "installation directory for pure Python module distributions"),
+        ('install-platlib=', None,
+         "installation directory for non-pure module distributions"),
+        ('install-lib=', None,
+         "installation directory for all module distributions " +
+         "(overrides --install-purelib and --install-platlib)"),
+
+        ('install-headers=', None,
+         "installation directory for C/C++ headers"),
+        ('install-scripts=', None,
+         "installation directory for Python scripts"),
+        ('install-data=', None,
+         "installation directory for data files"),
+
+        # Byte-compilation options -- see install_lib.py for details, as
+        # these are duplicated from there (but only install_lib does
+        # anything with them).
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+
+        # Miscellaneous control options
+        ('force', 'f',
+         "force installation (overwrite any existing files)"),
+        ('skip-build', None,
+         "skip rebuilding everything (for testing/debugging)"),
+
+        # Where to install documentation (eventually!)
+        #('doc-format=', None, "format of documentation to generate"),
+        #('install-man=', None, "directory for Unix man pages"),
+        #('install-html=', None, "directory for HTML documentation"),
+        #('install-info=', None, "directory for GNU info files"),
+
+        ('record=', None,
+         "filename in which to record list of installed files"),
+        ]
+
+    boolean_options = ['compile', 'force', 'skip-build']
+    negative_opt = {'no-compile' : 'compile'}
+
+
+    def initialize_options (self):
+
+        # High-level options: these select both an installation base
+        # and scheme.
+        self.prefix = None
+        self.exec_prefix = None
+        self.home = None
+
+        # These select only the installation base; it's up to the user to
+        # specify the installation scheme (currently, that means supplying
+        # the --install-{platlib,purelib,scripts,data} options).
+        self.install_base = None
+        self.install_platbase = None
+        self.root = None
+
+        # These options are the actual installation directories; if not
+        # supplied by the user, they are filled in using the installation
+        # scheme implied by prefix/exec-prefix/home and the contents of
+        # that installation scheme.
+        self.install_purelib = None     # for pure module distributions
+        self.install_platlib = None     # non-pure (dists w/ extensions)
+        self.install_headers = None     # for C/C++ headers
+        self.install_lib = None         # set to either purelib or platlib
+        self.install_scripts = None
+        self.install_data = None
+
+        self.compile = None
+        self.optimize = None
+
+        # These two are for putting non-packagized distributions into their
+        # own directory and creating a .pth file if it makes sense.
+        # 'extra_path' comes from the setup file; 'install_path_file' can
+        # be turned off if it makes no sense to install a .pth file.  (But
+        # better to install it uselessly than to guess wrong and not
+        # install it when it's necessary and would be used!)  Currently,
+        # 'install_path_file' is always true unless some outsider meddles
+        # with it.
+        self.extra_path = None
+        self.install_path_file = 1
+
+        # 'force' forces installation, even if target files are not
+        # out-of-date.  'skip_build' skips running the "build" command,
+        # handy if you know it's not necessary.  'warn_dir' (which is *not*
+        # a user option, it's just there so the bdist_* commands can turn
+        # it off) determines whether we warn about installing to a
+        # directory not in sys.path.
+        self.force = 0
+        self.skip_build = 0
+        self.warn_dir = 1
+
+        # These are only here as a conduit from the 'build' command to the
+        # 'install_*' commands that do the real work.  ('build_base' isn't
+        # actually used anywhere, but it might be useful in future.)  They
+        # are not user options, because if the user told the install
+        # command where the build directory is, that wouldn't affect the
+        # build command.
+        self.build_base = None
+        self.build_lib = None
+
+        # Not defined yet because we don't know anything about
+        # documentation yet.
+        #self.install_man = None
+        #self.install_html = None
+        #self.install_info = None
+
+        self.record = None
+
+
+    # -- Option finalizing methods -------------------------------------
+    # (This is rather more involved than for most commands,
+    # because this is where the policy for installing third-
+    # party Python modules on various platforms given a wide
+    # array of user input is decided.  Yes, it's quite complex!)
+
+    def finalize_options (self):
+
+        # This method (and its pliant slaves, like 'finalize_unix()',
+        # 'finalize_other()', and 'select_scheme()') is where the default
+        # installation directories for modules, extension modules, and
+        # anything else we care to install from a Python module
+        # distribution.  Thus, this code makes a pretty important policy
+        # statement about how third-party stuff is added to a Python
+        # installation!  Note that the actual work of installation is done
+        # by the relatively simple 'install_*' commands; they just take
+        # their orders from the installation directory options determined
+        # here.
+
+        # Check for errors/inconsistencies in the options; first, stuff
+        # that's wrong on any platform.
+
+        if ((self.prefix or self.exec_prefix or self.home) and
+            (self.install_base or self.install_platbase)):
+            raise DistutilsOptionError, \
+                  ("must supply either prefix/exec-prefix/home or " +
+                   "install-base/install-platbase -- not both")
+
+        if self.home and (self.prefix or self.exec_prefix):
+            raise DistutilsOptionError, \
+                  "must supply either home or prefix/exec-prefix -- not both"
+
+        # Next, stuff that's wrong (or dubious) only on certain platforms.
+        if os.name != "posix":
+            if self.exec_prefix:
+                self.warn("exec-prefix option ignored on this platform")
+                self.exec_prefix = None
+
+        # Now the interesting logic -- so interesting that we farm it out
+        # to other methods.  The goal of these methods is to set the final
+        # values for the install_{lib,scripts,data,...}  options, using as
+        # input a heady brew of prefix, exec_prefix, home, install_base,
+        # install_platbase, user-supplied versions of
+        # install_{purelib,platlib,lib,scripts,data,...}, and the
+        # INSTALL_SCHEME dictionary above.  Phew!
+
+        self.dump_dirs("pre-finalize_{unix,other}")
+
+        if os.name == 'posix':
+            self.finalize_unix()
+        else:
+            self.finalize_other()
+
+        self.dump_dirs("post-finalize_{unix,other}()")
+
+        # Expand configuration variables, tilde, etc. in self.install_base
+        # and self.install_platbase -- that way, we can use $base or
+        # $platbase in the other installation directories and not worry
+        # about needing recursive variable expansion (shudder).
+
+        py_version = (string.split(sys.version))[0]
+        (prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
+        self.config_vars = {'dist_name': self.distribution.get_name(),
+                            'dist_version': self.distribution.get_version(),
+                            'dist_fullname': self.distribution.get_fullname(),
+                            'py_version': py_version,
+                            'py_version_short': py_version[0:3],
+                            'sys_prefix': prefix,
+                            'prefix': prefix,
+                            'sys_exec_prefix': exec_prefix,
+                            'exec_prefix': exec_prefix,
+                           }
+        self.expand_basedirs()
+
+        self.dump_dirs("post-expand_basedirs()")
+
+        # Now define config vars for the base directories so we can expand
+        # everything else.
+        self.config_vars['base'] = self.install_base
+        self.config_vars['platbase'] = self.install_platbase
+
+        if DEBUG:
+            from pprint import pprint
+            print "config vars:"
+            pprint(self.config_vars)
+
+        # Expand "~" and configuration variables in the installation
+        # directories.
+        self.expand_dirs()
+
+        self.dump_dirs("post-expand_dirs()")
+
+        # Pick the actual directory to install all modules to: either
+        # install_purelib or install_platlib, depending on whether this
+        # module distribution is pure or not.  Of course, if the user
+        # already specified install_lib, use their selection.
+        if self.install_lib is None:
+            if self.distribution.ext_modules: # has extensions: non-pure
+                self.install_lib = self.install_platlib
+            else:
+                self.install_lib = self.install_purelib
+
+
+        # Convert directories from Unix /-separated syntax to the local
+        # convention.
+        self.convert_paths('lib', 'purelib', 'platlib',
+                           'scripts', 'data', 'headers')
+
+        # Well, we're not actually fully completely finalized yet: we still
+        # have to deal with 'extra_path', which is the hack for allowing
+        # non-packagized module distributions (hello, Numerical Python!) to
+        # get their own directories.
+        self.handle_extra_path()
+        self.install_libbase = self.install_lib # needed for .pth file
+        self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
+
+        # If a new root directory was supplied, make all the installation
+        # dirs relative to it.
+        if self.root is not None:
+            self.change_roots('libbase', 'lib', 'purelib', 'platlib',
+                              'scripts', 'data', 'headers')
+
+        self.dump_dirs("after prepending root")
+
+        # Find out the build directories, ie. where to install from.
+        self.set_undefined_options('build',
+                                   ('build_base', 'build_base'),
+                                   ('build_lib', 'build_lib'))
+
+        # Punt on doc directories for now -- after all, we're punting on
+        # documentation completely!
+
+    # finalize_options ()
+
+
+    def dump_dirs (self, msg):
+        if DEBUG:
+            from distutils.fancy_getopt import longopt_xlate
+            print msg + ":"
+            for opt in self.user_options:
+                opt_name = opt[0]
+                if opt_name[-1] == "=":
+                    opt_name = opt_name[0:-1]
+                if self.negative_opt.has_key(opt_name):
+                    opt_name = string.translate(self.negative_opt[opt_name],
+                                                longopt_xlate)
+                    val = not getattr(self, opt_name)
+                else:
+                    opt_name = string.translate(opt_name, longopt_xlate)
+                    val = getattr(self, opt_name)
+                print "  %s: %s" % (opt_name, val)
+
+
+    def finalize_unix (self):
+
+        if self.install_base is not None or self.install_platbase is not None:
+            if ((self.install_lib is None and
+                 self.install_purelib is None and
+                 self.install_platlib is None) or
+                self.install_headers is None or
+                self.install_scripts is None or
+                self.install_data is None):
+                raise DistutilsOptionError, \
+                      ("install-base or install-platbase supplied, but "
+                      "installation scheme is incomplete")
+            return
+
+        if self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("unix_home")
+        else:
+            if self.prefix is None:
+                if self.exec_prefix is not None:
+                    raise DistutilsOptionError, \
+                          "must not supply exec-prefix without prefix"
+
+                self.prefix = os.path.normpath(sys.prefix)
+                self.exec_prefix = os.path.normpath(sys.exec_prefix)
+
+            else:
+                if self.exec_prefix is None:
+                    self.exec_prefix = self.prefix
+
+            self.install_base = self.prefix
+            self.install_platbase = self.exec_prefix
+            self.select_scheme("unix_prefix")
+
+    # finalize_unix ()
+
+
+    def finalize_other (self):          # Windows and Mac OS for now
+
+        if self.home is not None:
+            self.install_base = self.install_platbase = self.home
+            self.select_scheme("unix_home")
+        else:
+            if self.prefix is None:
+                self.prefix = os.path.normpath(sys.prefix)
+
+            self.install_base = self.install_platbase = self.prefix
+            try:
+                self.select_scheme(os.name)
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      "I don't know how to install stuff on '%s'" % os.name
+
+    # finalize_other ()
+
+
+    def select_scheme (self, name):
+        # it's the caller's problem if they supply a bad name!
+        scheme = INSTALL_SCHEMES[name]
+        for key in SCHEME_KEYS:
+            attrname = 'install_' + key
+            if getattr(self, attrname) is None:
+                setattr(self, attrname, scheme[key])
+
+
+    def _expand_attrs (self, attrs):
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                if os.name == 'posix':
+                    val = os.path.expanduser(val)
+                val = subst_vars(val, self.config_vars)
+                setattr(self, attr, val)
+
+
+    def expand_basedirs (self):
+        self._expand_attrs(['install_base',
+                            'install_platbase',
+                            'root'])
+
+    def expand_dirs (self):
+        self._expand_attrs(['install_purelib',
+                            'install_platlib',
+                            'install_lib',
+                            'install_headers',
+                            'install_scripts',
+                            'install_data',])
+
+
+    def convert_paths (self, *names):
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, convert_path(getattr(self, attr)))
+
+
+    def handle_extra_path (self):
+
+        if self.extra_path is None:
+            self.extra_path = self.distribution.extra_path
+
+        if self.extra_path is not None:
+            if type(self.extra_path) is StringType:
+                self.extra_path = string.split(self.extra_path, ',')
+
+            if len(self.extra_path) == 1:
+                path_file = extra_dirs = self.extra_path[0]
+            elif len(self.extra_path) == 2:
+                (path_file, extra_dirs) = self.extra_path
+            else:
+                raise DistutilsOptionError, \
+                      ("'extra_path' option must be a list, tuple, or "
+                      "comma-separated string with 1 or 2 elements")
+
+            # convert to local form in case Unix notation used (as it
+            # should be in setup scripts)
+            extra_dirs = convert_path(extra_dirs)
+
+        else:
+            path_file = None
+            extra_dirs = ''
+
+        # XXX should we warn if path_file and not extra_dirs? (in which
+        # case the path file would be harmless but pointless)
+        self.path_file = path_file
+        self.extra_dirs = extra_dirs
+
+    # handle_extra_path ()
+
+
+    def change_roots (self, *names):
+        for name in names:
+            attr = "install_" + name
+            setattr(self, attr, change_root(self.root, getattr(self, attr)))
+
+
+    # -- Command execution methods -------------------------------------
+
+    def run (self):
+
+        # Obviously have to build before we can install
+        if not self.skip_build:
+            self.run_command('build')
+
+        # Run all sub-commands (at least those that need to be run)
+        for cmd_name in self.get_sub_commands():
+            self.run_command(cmd_name)
+
+        if self.path_file:
+            self.create_path_file()
+
+        # write list of installed files, if requested.
+        if self.record:
+            outputs = self.get_outputs()
+            if self.root:               # strip any package prefix
+                root_len = len(self.root)
+                for counter in xrange(len(outputs)):
+                    outputs[counter] = outputs[counter][root_len:]
+            self.execute(write_file,
+                         (self.record, outputs),
+                         "writing list of installed files to '%s'" %
+                         self.record)
+
+        sys_path = map(os.path.normpath, sys.path)
+        sys_path = map(os.path.normcase, sys_path)
+        install_lib = os.path.normcase(os.path.normpath(self.install_lib))
+        if (self.warn_dir and
+            not (self.path_file and self.install_path_file) and
+            install_lib not in sys_path):
+            log.debug(("modules installed to '%s', which is not in "
+                       "Python's module search path (sys.path) -- "
+                       "you'll have to change the search path yourself"),
+                       self.install_lib)
+
+    # run ()
+
+    def create_path_file (self):
+        filename = os.path.join(self.install_libbase,
+                                self.path_file + ".pth")
+        if self.install_path_file:
+            self.execute(write_file,
+                         (filename, [self.extra_dirs]),
+                         "creating %s" % filename)
+        else:
+            self.warn("path file '%s' not created" % filename)
+
+
+    # -- Reporting methods ---------------------------------------------
+
+    def get_outputs (self):
+        # Assemble the outputs of all the sub-commands.
+        outputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            # Add the contents of cmd.get_outputs(), ensuring
+            # that outputs doesn't contain duplicate entries
+            for filename in cmd.get_outputs():
+                if filename not in outputs:
+                    outputs.append(filename)
+
+        if self.path_file and self.install_path_file:
+            outputs.append(os.path.join(self.install_libbase,
+                                        self.path_file + ".pth"))
+
+        return outputs
+
+    def get_inputs (self):
+        # XXX gee, this looks familiar ;-(
+        inputs = []
+        for cmd_name in self.get_sub_commands():
+            cmd = self.get_finalized_command(cmd_name)
+            inputs.extend(cmd.get_inputs())
+
+        return inputs
+
+
+    # -- Predicates for sub-command list -------------------------------
+
+    def has_lib (self):
+        """Return true if the current distribution has any Python
+        modules to install."""
+        return (self.distribution.has_pure_modules() or
+                self.distribution.has_ext_modules())
+
+    def has_headers (self):
+        return self.distribution.has_headers()
+
+    def has_scripts (self):
+        return self.distribution.has_scripts()
+
+    def has_data (self):
+        return self.distribution.has_data_files()
+
+
+    # 'sub_commands': a list of commands this command might have to run to
+    # get its work done.  See cmd.py for more info.
+    sub_commands = [('install_lib',     has_lib),
+                    ('install_headers', has_headers),
+                    ('install_scripts', has_scripts),
+                    ('install_data',    has_data),
+                   ]
+
+# class install
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/install_data.py b/depot_tools/release/win/python_24/Lib/distutils/command/install_data.py
new file mode 100644
index 0000000..c72061d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/install_data.py
@@ -0,0 +1,85 @@
+"""distutils.command.install_data
+
+Implements the Distutils 'install_data' command, for installing
+platform-independent data files."""
+
+# contributed by Bastian Kleineidam
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: install_data.py,v 1.22 2004/11/10 22:23:15 loewis Exp $"
+
+import os
+from types import StringType
+from distutils.core import Command
+from distutils.util import change_root, convert_path
+
+class install_data (Command):
+
+    description = "install data files"
+
+    user_options = [
+        ('install-dir=', 'd',
+         "base directory for installing data files "
+         "(default: installation base dir)"),
+        ('root=', None,
+         "install everything relative to this alternate root directory"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ]
+
+    boolean_options = ['force']
+
+    def initialize_options (self):
+        self.install_dir = None
+        self.outfiles = []
+        self.root = None
+        self.force = 0
+
+        self.data_files = self.distribution.data_files
+        self.warn_dir = 1
+
+    def finalize_options (self):
+        self.set_undefined_options('install',
+                                   ('install_data', 'install_dir'),
+                                   ('root', 'root'),
+                                   ('force', 'force'),
+                                  )
+
+    def run (self):
+        self.mkpath(self.install_dir)
+        for f in self.data_files:
+            if type(f) is StringType:
+                # it's a simple file, so copy it
+                f = convert_path(f)
+                if self.warn_dir:
+                    self.warn("setup script did not provide a directory for "
+                              "'%s' -- installing right in '%s'" %
+                              (f, self.install_dir))
+                (out, _) = self.copy_file(f, self.install_dir)
+                self.outfiles.append(out)
+            else:
+                # it's a tuple with path to install to and a list of files
+                dir = convert_path(f[0])
+                if not os.path.isabs(dir):
+                    dir = os.path.join(self.install_dir, dir)
+                elif self.root:
+                    dir = change_root(self.root, dir)
+                self.mkpath(dir)
+
+                if f[1] == []:
+                    # If there are no files listed, the user must be
+                    # trying to create an empty directory, so add the
+                    # directory to the list of output files.
+                    self.outfiles.append(dir)
+                else:
+                    # Copy files, adding them to the list of output files.
+                    for data in f[1]:
+                        data = convert_path(data)
+                        (out, _) = self.copy_file(data, dir)
+                        self.outfiles.append(out)
+
+    def get_inputs (self):
+        return self.data_files or []
+
+    def get_outputs (self):
+        return self.outfiles
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/install_headers.py b/depot_tools/release/win/python_24/Lib/distutils/command/install_headers.py
new file mode 100644
index 0000000..ab9fe11
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/install_headers.py
@@ -0,0 +1,53 @@
+"""distutils.command.install_headers
+
+Implements the Distutils 'install_headers' command, to install C/C++ header
+files to the Python include directory."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: install_headers.py,v 1.11 2004/11/10 22:23:15 loewis Exp $"
+
+import os
+from distutils.core import Command
+
+
+class install_headers (Command):
+
+    description = "install C/C++ header files"
+
+    user_options = [('install-dir=', 'd',
+                     "directory to install header files to"),
+                    ('force', 'f',
+                     "force installation (overwrite existing files)"),
+                   ]
+
+    boolean_options = ['force']
+
+    def initialize_options (self):
+        self.install_dir = None
+        self.force = 0
+        self.outfiles = []
+
+    def finalize_options (self):
+        self.set_undefined_options('install',
+                                   ('install_headers', 'install_dir'),
+                                   ('force', 'force'))
+
+
+    def run (self):
+        headers = self.distribution.headers
+        if not headers:
+            return
+
+        self.mkpath(self.install_dir)
+        for header in headers:
+            (out, _) = self.copy_file(header, self.install_dir)
+            self.outfiles.append(out)
+
+    def get_inputs (self):
+        return self.distribution.headers or []
+
+    def get_outputs (self):
+        return self.outfiles
+
+# class install_headers
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/install_lib.py b/depot_tools/release/win/python_24/Lib/distutils/command/install_lib.py
new file mode 100644
index 0000000..36447d3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/install_lib.py
@@ -0,0 +1,221 @@
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: install_lib.py,v 1.44 2004/11/10 22:23:15 loewis Exp $"
+
+import sys, os, string
+from types import IntType
+from distutils.core import Command
+from distutils.errors import DistutilsOptionError
+
+
+# Extension for Python source files.
+PYTHON_SOURCE_EXTENSION = os.extsep + "py"
+
+
+class install_lib (Command):
+
+    description = "install all Python modules (extensions and pure Python)"
+
+    # The byte-compilation options are a tad confusing.  Here are the
+    # possible scenarios:
+    #   1) no compilation at all (--no-compile --no-optimize)
+    #   2) compile .pyc only (--compile --no-optimize; default)
+    #   3) compile .pyc and "level 1" .pyo (--compile --optimize)
+    #   4) compile "level 1" .pyo only (--no-compile --optimize)
+    #   5) compile .pyc and "level 2" .pyo (--compile --optimize-more)
+    #   6) compile "level 2" .pyo only (--no-compile --optimize-more)
+    #
+    # The UI for this is two option, 'compile' and 'optimize'.
+    # 'compile' is strictly boolean, and only decides whether to
+    # generate .pyc files.  'optimize' is three-way (0, 1, or 2), and
+    # decides both whether to generate .pyo files and what level of
+    # optimization to use.
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+        ('build-dir=','b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('compile', 'c', "compile .py to .pyc [default]"),
+        ('no-compile', None, "don't compile .py files"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('skip-build', None, "skip the build steps"),
+        ]
+
+    boolean_options = ['force', 'compile', 'skip-build']
+    negative_opt = {'no-compile' : 'compile'}
+
+
+    def initialize_options (self):
+        # let the 'install' command dictate our installation directory
+        self.install_dir = None
+        self.build_dir = None
+        self.force = 0
+        self.compile = None
+        self.optimize = None
+        self.skip_build = None
+
+    def finalize_options (self):
+
+        # Get all the information we need to install pure Python modules
+        # from the umbrella 'install' command -- build (source) directory,
+        # install (target) directory, and whether to compile .py files.
+        self.set_undefined_options('install',
+                                   ('build_lib', 'build_dir'),
+                                   ('install_lib', 'install_dir'),
+                                   ('force', 'force'),
+                                   ('compile', 'compile'),
+                                   ('optimize', 'optimize'),
+                                   ('skip_build', 'skip_build'),
+                                  )
+
+        if self.compile is None:
+            self.compile = 1
+        if self.optimize is None:
+            self.optimize = 0
+
+        if type(self.optimize) is not IntType:
+            try:
+                self.optimize = int(self.optimize)
+                assert 0 <= self.optimize <= 2
+            except (ValueError, AssertionError):
+                raise DistutilsOptionError, "optimize must be 0, 1, or 2"
+
+    def run (self):
+
+        # Make sure we have built everything we need first
+        self.build()
+
+        # Install everything: simply dump the entire contents of the build
+        # directory to the installation directory (that's the beauty of
+        # having a build directory!)
+        outfiles = self.install()
+
+        # (Optionally) compile .py to .pyc
+        if outfiles is not None and self.distribution.has_pure_modules():
+            self.byte_compile(outfiles)
+
+    # run ()
+
+
+    # -- Top-level worker functions ------------------------------------
+    # (called from 'run()')
+
+    def build (self):
+        if not self.skip_build:
+            if self.distribution.has_pure_modules():
+                self.run_command('build_py')
+            if self.distribution.has_ext_modules():
+                self.run_command('build_ext')
+
+    def install (self):
+        if os.path.isdir(self.build_dir):
+            outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        else:
+            self.warn("'%s' does not exist -- no Python modules to install" %
+                      self.build_dir)
+            return
+        return outfiles
+
+    def byte_compile (self, files):
+        from distutils.util import byte_compile
+
+        # Get the "--root" directory supplied to the "install" command,
+        # and use it as a prefix to strip off the purported filename
+        # encoded in bytecode files.  This is far from complete, but it
+        # should at least generate usable bytecode in RPM distributions.
+        install_root = self.get_finalized_command('install').root
+
+        if self.compile:
+            byte_compile(files, optimize=0,
+                         force=self.force, prefix=install_root,
+                         dry_run=self.dry_run)
+        if self.optimize > 0:
+            byte_compile(files, optimize=self.optimize,
+                         force=self.force, prefix=install_root,
+                         verbose=self.verbose, dry_run=self.dry_run)
+
+
+    # -- Utility methods -----------------------------------------------
+
+    def _mutate_outputs (self, has_any, build_cmd, cmd_option, output_dir):
+
+        if not has_any:
+            return []
+
+        build_cmd = self.get_finalized_command(build_cmd)
+        build_files = build_cmd.get_outputs()
+        build_dir = getattr(build_cmd, cmd_option)
+
+        prefix_len = len(build_dir) + len(os.sep)
+        outputs = []
+        for file in build_files:
+            outputs.append(os.path.join(output_dir, file[prefix_len:]))
+
+        return outputs
+
+    # _mutate_outputs ()
+
+    def _bytecode_filenames (self, py_filenames):
+        bytecode_files = []
+        for py_file in py_filenames:
+            # Since build_py handles package data installation, the
+            # list of outputs can contain more than just .py files.
+            # Make sure we only report bytecode for the .py files.
+            ext = os.path.splitext(os.path.normcase(py_file))[1]
+            if ext != PYTHON_SOURCE_EXTENSION:
+                continue
+            if self.compile:
+                bytecode_files.append(py_file + "c")
+            if self.optimize > 0:
+                bytecode_files.append(py_file + "o")
+
+        return bytecode_files
+
+
+    # -- External interface --------------------------------------------
+    # (called by outsiders)
+
+    def get_outputs (self):
+        """Return the list of files that would be installed if this command
+        were actually run.  Not affected by the "dry-run" flag or whether
+        modules have actually been built yet.
+        """
+        pure_outputs = \
+            self._mutate_outputs(self.distribution.has_pure_modules(),
+                                 'build_py', 'build_lib',
+                                 self.install_dir)
+        if self.compile:
+            bytecode_outputs = self._bytecode_filenames(pure_outputs)
+        else:
+            bytecode_outputs = []
+
+        ext_outputs = \
+            self._mutate_outputs(self.distribution.has_ext_modules(),
+                                 'build_ext', 'build_lib',
+                                 self.install_dir)
+
+        return pure_outputs + bytecode_outputs + ext_outputs
+
+    # get_outputs ()
+
+    def get_inputs (self):
+        """Get the list of files that are input to this command, ie. the
+        files that get installed as they are named in the build tree.
+        The files in this list correspond one-to-one to the output
+        filenames returned by 'get_outputs()'.
+        """
+        inputs = []
+
+        if self.distribution.has_pure_modules():
+            build_py = self.get_finalized_command('build_py')
+            inputs.extend(build_py.get_outputs())
+
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            inputs.extend(build_ext.get_outputs())
+
+        return inputs
+
+# class install_lib
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/install_scripts.py b/depot_tools/release/win/python_24/Lib/distutils/command/install_scripts.py
new file mode 100644
index 0000000..8d6739ce
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/install_scripts.py
@@ -0,0 +1,66 @@
+"""distutils.command.install_scripts
+
+Implements the Distutils 'install_scripts' command, for installing
+Python scripts."""
+
+# contributed by Bastian Kleineidam
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: install_scripts.py,v 1.16 2004/11/10 22:23:15 loewis Exp $"
+
+import os
+from distutils.core import Command
+from distutils import log
+from stat import ST_MODE
+
+class install_scripts (Command):
+
+    description = "install scripts (Python or otherwise)"
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install scripts to"),
+        ('build-dir=','b', "build directory (where to install from)"),
+        ('force', 'f', "force installation (overwrite existing files)"),
+        ('skip-build', None, "skip the build steps"),
+    ]
+
+    boolean_options = ['force', 'skip-build']
+
+
+    def initialize_options (self):
+        self.install_dir = None
+        self.force = 0
+        self.build_dir = None
+        self.skip_build = None
+
+    def finalize_options (self):
+        self.set_undefined_options('build', ('build_scripts', 'build_dir'))
+        self.set_undefined_options('install',
+                                   ('install_scripts', 'install_dir'),
+                                   ('force', 'force'),
+                                   ('skip_build', 'skip_build'),
+                                  )
+
+    def run (self):
+        if not self.skip_build:
+            self.run_command('build_scripts')
+        self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
+        if os.name == 'posix':
+            # Set the executable bits (owner, group, and world) on
+            # all the scripts we just installed.
+            for file in self.get_outputs():
+                if self.dry_run:
+                    log.info("changing mode of %s", file)
+                else:
+                    mode = ((os.stat(file)[ST_MODE]) | 0555) & 07777
+                    log.info("changing mode of %s to %o", file, mode)
+                    os.chmod(file, mode)
+
+    def get_inputs (self):
+        return self.distribution.scripts or []
+
+    def get_outputs(self):
+        return self.outfiles or []
+
+# class install_scripts
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/register.py b/depot_tools/release/win/python_24/Lib/distutils/command/register.py
new file mode 100644
index 0000000..c11bedd4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/register.py
@@ -0,0 +1,288 @@
+"""distutils.command.register
+
+Implements the Distutils 'register' command (register with the repository).
+"""
+
+# created 2002/10/21, Richard Jones
+
+__revision__ = "$Id: register.py,v 1.7 2004/07/18 06:14:43 tim_one Exp $"
+
+import sys, os, string, urllib2, getpass, urlparse
+import StringIO, ConfigParser
+
+from distutils.core import Command
+from distutils.errors import *
+
+class register(Command):
+
+    description = ("register the distribution with the Python package index")
+
+    DEFAULT_REPOSITORY = 'http://www.python.org/pypi'
+
+    user_options = [
+        ('repository=', 'r',
+         "url of repository [default: %s]"%DEFAULT_REPOSITORY),
+        ('list-classifiers', None,
+         'list the valid Trove classifiers'),
+        ('show-response', None,
+         'display full response text from server'),
+        ]
+    boolean_options = ['verify', 'show-response', 'list-classifiers']
+
+    def initialize_options(self):
+        self.repository = None
+        self.show_response = 0
+        self.list_classifiers = 0
+
+    def finalize_options(self):
+        if self.repository is None:
+            self.repository = self.DEFAULT_REPOSITORY
+
+    def run(self):
+        self.check_metadata()
+        if self.dry_run:
+            self.verify_metadata()
+        elif self.list_classifiers:
+            self.classifiers()
+        else:
+            self.send_metadata()
+
+    def check_metadata(self):
+        """Ensure that all required elements of meta-data (name, version,
+           URL, (author and author_email) or (maintainer and
+           maintainer_email)) are supplied by the Distribution object; warn if
+           any are missing.
+        """
+        metadata = self.distribution.metadata
+
+        missing = []
+        for attr in ('name', 'version', 'url'):
+            if not (hasattr(metadata, attr) and getattr(metadata, attr)):
+                missing.append(attr)
+
+        if missing:
+            self.warn("missing required meta-data: " +
+                      string.join(missing, ", "))
+
+        if metadata.author:
+            if not metadata.author_email:
+                self.warn("missing meta-data: if 'author' supplied, " +
+                          "'author_email' must be supplied too")
+        elif metadata.maintainer:
+            if not metadata.maintainer_email:
+                self.warn("missing meta-data: if 'maintainer' supplied, " +
+                          "'maintainer_email' must be supplied too")
+        else:
+            self.warn("missing meta-data: either (author and author_email) " +
+                      "or (maintainer and maintainer_email) " +
+                      "must be supplied")
+
+    def classifiers(self):
+        ''' Fetch the list of classifiers from the server.
+        '''
+        response = urllib2.urlopen(self.repository+'?:action=list_classifiers')
+        print response.read()
+
+    def verify_metadata(self):
+        ''' Send the metadata to the package index server to be checked.
+        '''
+        # send the info to the server and report the result
+        (code, result) = self.post_to_server(self.build_post_data('verify'))
+        print 'Server response (%s): %s'%(code, result)
+
+    def send_metadata(self):
+        ''' Send the metadata to the package index server.
+
+            Well, do the following:
+            1. figure who the user is, and then
+            2. send the data as a Basic auth'ed POST.
+
+            First we try to read the username/password from $HOME/.pypirc,
+            which is a ConfigParser-formatted file with a section
+            [server-login] containing username and password entries (both
+            in clear text). Eg:
+
+                [server-login]
+                username: fred
+                password: sekrit
+
+            Otherwise, to figure who the user is, we offer the user three
+            choices:
+
+             1. use existing login,
+             2. register as a new user, or
+             3. set the password to a random string and email the user.
+
+        '''
+        choice = 'x'
+        username = password = ''
+
+        # see if we can short-cut and get the username/password from the
+        # config
+        config = None
+        if os.environ.has_key('HOME'):
+            rc = os.path.join(os.environ['HOME'], '.pypirc')
+            if os.path.exists(rc):
+                print 'Using PyPI login from %s'%rc
+                config = ConfigParser.ConfigParser()
+                config.read(rc)
+                username = config.get('server-login', 'username')
+                password = config.get('server-login', 'password')
+                choice = '1'
+
+        # get the user's login info
+        choices = '1 2 3 4'.split()
+        while choice not in choices:
+            print '''We need to know who you are, so please choose either:
+ 1. use your existing login,
+ 2. register as a new user,
+ 3. have the server generate a new password for you (and email it to you), or
+ 4. quit
+Your selection [default 1]: ''',
+            choice = raw_input()
+            if not choice:
+                choice = '1'
+            elif choice not in choices:
+                print 'Please choose one of the four options!'
+
+        if choice == '1':
+            # get the username and password
+            while not username:
+                username = raw_input('Username: ')
+            while not password:
+                password = getpass.getpass('Password: ')
+
+            # set up the authentication
+            auth = urllib2.HTTPPasswordMgr()
+            host = urlparse.urlparse(self.repository)[1]
+            auth.add_password('pypi', host, username, password)
+
+            # send the info to the server and report the result
+            code, result = self.post_to_server(self.build_post_data('submit'),
+                auth)
+            print 'Server response (%s): %s'%(code, result)
+
+            # possibly save the login
+            if os.environ.has_key('HOME') and config is None and code == 200:
+                rc = os.path.join(os.environ['HOME'], '.pypirc')
+                print 'I can store your PyPI login so future submissions will be faster.'
+                print '(the login will be stored in %s)'%rc
+                choice = 'X'
+                while choice.lower() not in 'yn':
+                    choice = raw_input('Save your login (y/N)?')
+                    if not choice:
+                        choice = 'n'
+                if choice.lower() == 'y':
+                    f = open(rc, 'w')
+                    f.write('[server-login]\nusername:%s\npassword:%s\n'%(
+                        username, password))
+                    f.close()
+                    try:
+                        os.chmod(rc, 0600)
+                    except:
+                        pass
+        elif choice == '2':
+            data = {':action': 'user'}
+            data['name'] = data['password'] = data['email'] = ''
+            data['confirm'] = None
+            while not data['name']:
+                data['name'] = raw_input('Username: ')
+            while data['password'] != data['confirm']:
+                while not data['password']:
+                    data['password'] = getpass.getpass('Password: ')
+                while not data['confirm']:
+                    data['confirm'] = getpass.getpass(' Confirm: ')
+                if data['password'] != data['confirm']:
+                    data['password'] = ''
+                    data['confirm'] = None
+                    print "Password and confirm don't match!"
+            while not data['email']:
+                data['email'] = raw_input('   EMail: ')
+            code, result = self.post_to_server(data)
+            if code != 200:
+                print 'Server response (%s): %s'%(code, result)
+            else:
+                print 'You will receive an email shortly.'
+                print 'Follow the instructions in it to complete registration.'
+        elif choice == '3':
+            data = {':action': 'password_reset'}
+            data['email'] = ''
+            while not data['email']:
+                data['email'] = raw_input('Your email address: ')
+            code, result = self.post_to_server(data)
+            print 'Server response (%s): %s'%(code, result)
+
+    def build_post_data(self, action):
+        # figure the data to send - the metadata plus some additional
+        # information used by the package server
+        meta = self.distribution.metadata
+        data = {
+            ':action': action,
+            'metadata_version' : '1.0',
+            'name': meta.get_name(),
+            'version': meta.get_version(),
+            'summary': meta.get_description(),
+            'home_page': meta.get_url(),
+            'author': meta.get_contact(),
+            'author_email': meta.get_contact_email(),
+            'license': meta.get_licence(),
+            'description': meta.get_long_description(),
+            'keywords': meta.get_keywords(),
+            'platform': meta.get_platforms(),
+            'classifiers': meta.get_classifiers(),
+            'download_url': meta.get_download_url(),
+        }
+        return data
+
+    def post_to_server(self, data, auth=None):
+        ''' Post a query to the server, and return a string response.
+        '''
+
+        # Build up the MIME payload for the urllib2 POST data
+        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = '\n--' + boundary
+        end_boundary = sep_boundary + '--'
+        body = StringIO.StringIO()
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if type(value) != type([]):
+                value = [value]
+            for value in value:
+                value = str(value)
+                body.write(sep_boundary)
+                body.write('\nContent-Disposition: form-data; name="%s"'%key)
+                body.write("\n\n")
+                body.write(value)
+                if value and value[-1] == '\r':
+                    body.write('\n')  # write an extra newline (lurve Macs)
+        body.write(end_boundary)
+        body.write("\n")
+        body = body.getvalue()
+
+        # build the Request
+        headers = {
+            'Content-type': 'multipart/form-data; boundary=%s'%boundary,
+            'Content-length': str(len(body))
+        }
+        req = urllib2.Request(self.repository, body, headers)
+
+        # handle HTTP and include the Basic Auth handler
+        opener = urllib2.build_opener(
+            urllib2.HTTPBasicAuthHandler(password_mgr=auth)
+        )
+        data = ''
+        try:
+            result = opener.open(req)
+        except urllib2.HTTPError, e:
+            if self.show_response:
+                data = e.fp.read()
+            result = e.code, e.msg
+        except urllib2.URLError, e:
+            result = 500, str(e)
+        else:
+            if self.show_response:
+                data = result.read()
+            result = 200, 'OK'
+        if self.show_response:
+            print '-'*75, data, '-'*75
+        return result
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/sdist.py b/depot_tools/release/win/python_24/Lib/distutils/command/sdist.py
new file mode 100644
index 0000000..cb27d05
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/sdist.py
@@ -0,0 +1,464 @@
+"""distutils.command.sdist
+
+Implements the Distutils 'sdist' command (create a source distribution)."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: sdist.py,v 1.59 2004/11/10 22:23:15 loewis Exp $"
+
+import sys, os, string
+from types import *
+from glob import glob
+from distutils.core import Command
+from distutils import dir_util, dep_util, file_util, archive_util
+from distutils.text_file import TextFile
+from distutils.errors import *
+from distutils.filelist import FileList
+from distutils import log
+
+
+def show_formats ():
+    """Print all possible values for the 'formats' option (used by
+    the "--help-formats" command-line option).
+    """
+    from distutils.fancy_getopt import FancyGetopt
+    from distutils.archive_util import ARCHIVE_FORMATS
+    formats=[]
+    for format in ARCHIVE_FORMATS.keys():
+        formats.append(("formats=" + format, None,
+                        ARCHIVE_FORMATS[format][2]))
+    formats.sort()
+    pretty_printer = FancyGetopt(formats)
+    pretty_printer.print_help(
+        "List of available source distribution formats:")
+
+class sdist (Command):
+
+    description = "create a source distribution (tarball, zip file, etc.)"
+
+    user_options = [
+        ('template=', 't',
+         "name of manifest template file [default: MANIFEST.in]"),
+        ('manifest=', 'm',
+         "name of manifest file [default: MANIFEST]"),
+        ('use-defaults', None,
+         "include the default file set in the manifest "
+         "[default; disable with --no-defaults]"),
+        ('no-defaults', None,
+         "don't include the default file set"),
+        ('prune', None,
+         "specifically exclude files/directories that should not be "
+         "distributed (build tree, RCS/CVS dirs, etc.) "
+         "[default; disable with --no-prune]"),
+        ('no-prune', None,
+         "don't automatically exclude anything"),
+        ('manifest-only', 'o',
+         "just regenerate the manifest and then stop "
+         "(implies --force-manifest)"),
+        ('force-manifest', 'f',
+         "forcibly regenerate the manifest and carry on as usual"),
+        ('formats=', None,
+         "formats for source distribution (comma-separated list)"),
+        ('keep-temp', 'k',
+         "keep the distribution tree around after creating " +
+         "archive file(s)"),
+        ('dist-dir=', 'd',
+         "directory to put the source distribution archive(s) in "
+         "[default: dist]"),
+        ]
+
+    boolean_options = ['use-defaults', 'prune',
+                       'manifest-only', 'force-manifest',
+                       'keep-temp']
+
+    help_options = [
+        ('help-formats', None,
+         "list available distribution formats", show_formats),
+        ]
+
+    negative_opt = {'no-defaults': 'use-defaults',
+                    'no-prune': 'prune' }
+
+    default_format = { 'posix': 'gztar',
+                       'nt': 'zip' }
+
+    def initialize_options (self):
+        # 'template' and 'manifest' are, respectively, the names of
+        # the manifest template and manifest file.
+        self.template = None
+        self.manifest = None
+
+        # 'use_defaults': if true, we will include the default file set
+        # in the manifest
+        self.use_defaults = 1
+        self.prune = 1
+
+        self.manifest_only = 0
+        self.force_manifest = 0
+
+        self.formats = None
+        self.keep_temp = 0
+        self.dist_dir = None
+
+        self.archive_files = None
+
+
+    def finalize_options (self):
+        if self.manifest is None:
+            self.manifest = "MANIFEST"
+        if self.template is None:
+            self.template = "MANIFEST.in"
+
+        self.ensure_string_list('formats')
+        if self.formats is None:
+            try:
+                self.formats = [self.default_format[os.name]]
+            except KeyError:
+                raise DistutilsPlatformError, \
+                      "don't know how to create source distributions " + \
+                      "on platform %s" % os.name
+
+        bad_format = archive_util.check_archive_formats(self.formats)
+        if bad_format:
+            raise DistutilsOptionError, \
+                  "unknown archive format '%s'" % bad_format
+
+        if self.dist_dir is None:
+            self.dist_dir = "dist"
+
+
+    def run (self):
+
+        # 'filelist' contains the list of files that will make up the
+        # manifest
+        self.filelist = FileList()
+
+        # Ensure that all required meta-data is given; warn if not (but
+        # don't die, it's not *that* serious!)
+        self.check_metadata()
+
+        # Do whatever it takes to get the list of files to process
+        # (process the manifest template, read an existing manifest,
+        # whatever).  File list is accumulated in 'self.filelist'.
+        self.get_file_list()
+
+        # If user just wanted us to regenerate the manifest, stop now.
+        if self.manifest_only:
+            return
+
+        # Otherwise, go ahead and create the source distribution tarball,
+        # or zipfile, or whatever.
+        self.make_distribution()
+
+
+    def check_metadata (self):
+        """Ensure that all required elements of meta-data (name, version,
+        URL, (author and author_email) or (maintainer and
+        maintainer_email)) are supplied by the Distribution object; warn if
+        any are missing.
+        """
+        metadata = self.distribution.metadata
+
+        missing = []
+        for attr in ('name', 'version', 'url'):
+            if not (hasattr(metadata, attr) and getattr(metadata, attr)):
+                missing.append(attr)
+
+        if missing:
+            self.warn("missing required meta-data: " +
+                      string.join(missing, ", "))
+
+        if metadata.author:
+            if not metadata.author_email:
+                self.warn("missing meta-data: if 'author' supplied, " +
+                          "'author_email' must be supplied too")
+        elif metadata.maintainer:
+            if not metadata.maintainer_email:
+                self.warn("missing meta-data: if 'maintainer' supplied, " +
+                          "'maintainer_email' must be supplied too")
+        else:
+            self.warn("missing meta-data: either (author and author_email) " +
+                      "or (maintainer and maintainer_email) " +
+                      "must be supplied")
+
+    # check_metadata ()
+
+
+    def get_file_list (self):
+        """Figure out the list of files to include in the source
+        distribution, and put it in 'self.filelist'.  This might involve
+        reading the manifest template (and writing the manifest), or just
+        reading the manifest, or just using the default file set -- it all
+        depends on the user's options and the state of the filesystem.
+        """
+
+        # If we have a manifest template, see if it's newer than the
+        # manifest; if so, we'll regenerate the manifest.
+        template_exists = os.path.isfile(self.template)
+        if template_exists:
+            template_newer = dep_util.newer(self.template, self.manifest)
+
+        # The contents of the manifest file almost certainly depend on the
+        # setup script as well as the manifest template -- so if the setup
+        # script is newer than the manifest, we'll regenerate the manifest
+        # from the template.  (Well, not quite: if we already have a
+        # manifest, but there's no template -- which will happen if the
+        # developer elects to generate a manifest some other way -- then we
+        # can't regenerate the manifest, so we don't.)
+        self.debug_print("checking if %s newer than %s" %
+                         (self.distribution.script_name, self.manifest))
+        setup_newer = dep_util.newer(self.distribution.script_name,
+                                     self.manifest)
+
+        # cases:
+        #   1) no manifest, template exists: generate manifest
+        #      (covered by 2a: no manifest == template newer)
+        #   2) manifest & template exist:
+        #      2a) template or setup script newer than manifest:
+        #          regenerate manifest
+        #      2b) manifest newer than both:
+        #          do nothing (unless --force or --manifest-only)
+        #   3) manifest exists, no template:
+        #      do nothing (unless --force or --manifest-only)
+        #   4) no manifest, no template: generate w/ warning ("defaults only")
+
+        manifest_outofdate = (template_exists and
+                              (template_newer or setup_newer))
+        force_regen = self.force_manifest or self.manifest_only
+        manifest_exists = os.path.isfile(self.manifest)
+        neither_exists = (not template_exists and not manifest_exists)
+
+        # Regenerate the manifest if necessary (or if explicitly told to)
+        if manifest_outofdate or neither_exists or force_regen:
+            if not template_exists:
+                self.warn(("manifest template '%s' does not exist " +
+                           "(using default file list)") %
+                          self.template)
+            self.filelist.findall()
+
+            if self.use_defaults:
+                self.add_defaults()
+            if template_exists:
+                self.read_template()
+            if self.prune:
+                self.prune_file_list()
+
+            self.filelist.sort()
+            self.filelist.remove_duplicates()
+            self.write_manifest()
+
+        # Don't regenerate the manifest, just read it in.
+        else:
+            self.read_manifest()
+
+    # get_file_list ()
+
+
+    def add_defaults (self):
+        """Add all the default files to self.filelist:
+          - README or README.txt
+          - setup.py
+          - test/test*.py
+          - all pure Python modules mentioned in setup script
+          - all C sources listed as part of extensions or C libraries
+            in the setup script (doesn't catch C headers!)
+        Warns if (README or README.txt) or setup.py are missing; everything
+        else is optional.
+        """
+
+        standards = [('README', 'README.txt'), self.distribution.script_name]
+        for fn in standards:
+            if type(fn) is TupleType:
+                alts = fn
+                got_it = 0
+                for fn in alts:
+                    if os.path.exists(fn):
+                        got_it = 1
+                        self.filelist.append(fn)
+                        break
+
+                if not got_it:
+                    self.warn("standard file not found: should have one of " +
+                              string.join(alts, ', '))
+            else:
+                if os.path.exists(fn):
+                    self.filelist.append(fn)
+                else:
+                    self.warn("standard file '%s' not found" % fn)
+
+        optional = ['test/test*.py', 'setup.cfg']
+        for pattern in optional:
+            files = filter(os.path.isfile, glob(pattern))
+            if files:
+                self.filelist.extend(files)
+
+        if self.distribution.has_pure_modules():
+            build_py = self.get_finalized_command('build_py')
+            self.filelist.extend(build_py.get_source_files())
+
+        if self.distribution.has_ext_modules():
+            build_ext = self.get_finalized_command('build_ext')
+            self.filelist.extend(build_ext.get_source_files())
+
+        if self.distribution.has_c_libraries():
+            build_clib = self.get_finalized_command('build_clib')
+            self.filelist.extend(build_clib.get_source_files())
+
+        if self.distribution.has_scripts():
+            build_scripts = self.get_finalized_command('build_scripts')
+            self.filelist.extend(build_scripts.get_source_files())
+
+    # add_defaults ()
+
+
+    def read_template (self):
+        """Read and parse manifest template file named by self.template.
+
+        (usually "MANIFEST.in") The parsing and processing is done by
+        'self.filelist', which updates itself accordingly.
+        """
+        log.info("reading manifest template '%s'", self.template)
+        template = TextFile(self.template,
+                            strip_comments=1,
+                            skip_blanks=1,
+                            join_lines=1,
+                            lstrip_ws=1,
+                            rstrip_ws=1,
+                            collapse_join=1)
+
+        while 1:
+            line = template.readline()
+            if line is None:            # end of file
+                break
+
+            try:
+                self.filelist.process_template_line(line)
+            except DistutilsTemplateError, msg:
+                self.warn("%s, line %d: %s" % (template.filename,
+                                               template.current_line,
+                                               msg))
+
+    # read_template ()
+
+
+    def prune_file_list (self):
+        """Prune off branches that might slip into the file list as created
+        by 'read_template()', but really don't belong there:
+          * the build tree (typically "build")
+          * the release tree itself (only an issue if we ran "sdist"
+            previously with --keep-temp, or it aborted)
+          * any RCS, CVS and .svn directories
+        """
+        build = self.get_finalized_command('build')
+        base_dir = self.distribution.get_fullname()
+
+        self.filelist.exclude_pattern(None, prefix=build.build_base)
+        self.filelist.exclude_pattern(None, prefix=base_dir)
+        self.filelist.exclude_pattern(r'/(RCS|CVS|\.svn)/.*', is_regex=1)
+
+
+    def write_manifest (self):
+        """Write the file list in 'self.filelist' (presumably as filled in
+        by 'add_defaults()' and 'read_template()') to the manifest file
+        named by 'self.manifest'.
+        """
+        self.execute(file_util.write_file,
+                     (self.manifest, self.filelist.files),
+                     "writing manifest file '%s'" % self.manifest)
+
+    # write_manifest ()
+
+
+    def read_manifest (self):
+        """Read the manifest file (named by 'self.manifest') and use it to
+        fill in 'self.filelist', the list of files to include in the source
+        distribution.
+        """
+        log.info("reading manifest file '%s'", self.manifest)
+        manifest = open(self.manifest)
+        while 1:
+            line = manifest.readline()
+            if line == '':              # end of file
+                break
+            if line[-1] == '\n':
+                line = line[0:-1]
+            self.filelist.append(line)
+
+    # read_manifest ()
+
+
+    def make_release_tree (self, base_dir, files):
+        """Create the directory tree that will become the source
+        distribution archive.  All directories implied by the filenames in
+        'files' are created under 'base_dir', and then we hard link or copy
+        (if hard linking is unavailable) those files into place.
+        Essentially, this duplicates the developer's source tree, but in a
+        directory named after the distribution, containing only the files
+        to be distributed.
+        """
+        # Create all the directories under 'base_dir' necessary to
+        # put 'files' there; the 'mkpath()' is just so we don't die
+        # if the manifest happens to be empty.
+        self.mkpath(base_dir)
+        dir_util.create_tree(base_dir, files, dry_run=self.dry_run)
+
+        # And walk over the list of files, either making a hard link (if
+        # os.link exists) to each one that doesn't already exist in its
+        # corresponding location under 'base_dir', or copying each file
+        # that's out-of-date in 'base_dir'.  (Usually, all files will be
+        # out-of-date, because by default we blow away 'base_dir' when
+        # we're done making the distribution archives.)
+
+        if hasattr(os, 'link'):        # can make hard links on this system
+            link = 'hard'
+            msg = "making hard links in %s..." % base_dir
+        else:                           # nope, have to copy
+            link = None
+            msg = "copying files to %s..." % base_dir
+
+        if not files:
+            log.warn("no files to distribute -- empty manifest?")
+        else:
+            log.info(msg)
+        for file in files:
+            if not os.path.isfile(file):
+                log.warn("'%s' not a regular file -- skipping" % file)
+            else:
+                dest = os.path.join(base_dir, file)
+                self.copy_file(file, dest, link=link)
+
+        self.distribution.metadata.write_pkg_info(base_dir)
+
+    # make_release_tree ()
+
+    def make_distribution (self):
+        """Create the source distribution(s).  First, we create the release
+        tree with 'make_release_tree()'; then, we create all required
+        archive files (according to 'self.formats') from the release tree.
+        Finally, we clean up by blowing away the release tree (unless
+        'self.keep_temp' is true).  The list of archive files created is
+        stored so it can be retrieved later by 'get_archive_files()'.
+        """
+        # Don't warn about missing meta-data here -- should be (and is!)
+        # done elsewhere.
+        base_dir = self.distribution.get_fullname()
+        base_name = os.path.join(self.dist_dir, base_dir)
+
+        self.make_release_tree(base_dir, self.filelist.files)
+        archive_files = []              # remember names of files we create
+        for fmt in self.formats:
+            file = self.make_archive(base_name, fmt, base_dir=base_dir)
+            archive_files.append(file)
+
+        self.archive_files = archive_files
+
+        if not self.keep_temp:
+            dir_util.remove_tree(base_dir, dry_run=self.dry_run)
+
+    def get_archive_files (self):
+        """Return the list of archive files created when the command
+        was run, or None if the command hasn't run yet.
+        """
+        return self.archive_files
+
+# class sdist
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/wininst-6.exe b/depot_tools/release/win/python_24/Lib/distutils/command/wininst-6.exe
new file mode 100644
index 0000000..100fc2e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/wininst-6.exe
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/distutils/command/wininst-7.1.exe b/depot_tools/release/win/python_24/Lib/distutils/command/wininst-7.1.exe
new file mode 100644
index 0000000..a8ee788
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/command/wininst-7.1.exe
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/distutils/core.py b/depot_tools/release/win/python_24/Lib/distutils/core.py
new file mode 100644
index 0000000..963b33f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/core.py
@@ -0,0 +1,240 @@
+"""distutils.core
+
+The only module that needs to be imported to use the Distutils; provides
+the 'setup' function (which is to be called from the setup script).  Also
+indirectly provides the Distribution and Command classes, although they are
+really defined in distutils.dist and distutils.cmd.
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: core.py,v 1.64 2004/11/10 22:23:14 loewis Exp $"
+
+import sys, os
+from types import *
+
+from distutils.debug import DEBUG
+from distutils.errors import *
+from distutils.util import grok_environment_error
+
+# Mainly import these so setup scripts can "from distutils.core import" them.
+from distutils.dist import Distribution
+from distutils.cmd import Command
+from distutils.extension import Extension
+
+# This is a barebones help message generated displayed when the user
+# runs the setup script with no arguments at all.  More useful help
+# is generated with various --help options: global help, list commands,
+# and per-command help.
+USAGE = """\
+usage: %(script)s [global_opts] cmd1 [cmd1_opts] [cmd2 [cmd2_opts] ...]
+   or: %(script)s --help [cmd1 cmd2 ...]
+   or: %(script)s --help-commands
+   or: %(script)s cmd --help
+"""
+
+def gen_usage (script_name):
+    script = os.path.basename(script_name)
+    return USAGE % vars()
+
+
+# Some mild magic to control the behaviour of 'setup()' from 'run_setup()'.
+_setup_stop_after = None
+_setup_distribution = None
+
+# Legal keyword arguments for the setup() function
+setup_keywords = ('distclass', 'script_name', 'script_args', 'options',
+                  'name', 'version', 'author', 'author_email',
+                  'maintainer', 'maintainer_email', 'url', 'license',
+                  'description', 'long_description', 'keywords',
+                  'platforms', 'classifiers', 'download_url',)
+
+# Legal keyword arguments for the Extension constructor
+extension_keywords = ('name', 'sources', 'include_dirs',
+                      'define_macros', 'undef_macros',
+                      'library_dirs', 'libraries', 'runtime_library_dirs',
+                      'extra_objects', 'extra_compile_args', 'extra_link_args',
+                      'swig_opts', 'export_symbols', 'depends', 'language')
+
+def setup (**attrs):
+    """The gateway to the Distutils: do everything your setup script needs
+    to do, in a highly flexible and user-driven way.  Briefly: create a
+    Distribution instance; find and parse config files; parse the command
+    line; run each Distutils command found there, customized by the options
+    supplied to 'setup()' (as keyword arguments), in config files, and on
+    the command line.
+
+    The Distribution instance might be an instance of a class supplied via
+    the 'distclass' keyword argument to 'setup'; if no such class is
+    supplied, then the Distribution class (in dist.py) is instantiated.
+    All other arguments to 'setup' (except for 'cmdclass') are used to set
+    attributes of the Distribution instance.
+
+    The 'cmdclass' argument, if supplied, is a dictionary mapping command
+    names to command classes.  Each command encountered on the command line
+    will be turned into a command class, which is in turn instantiated; any
+    class found in 'cmdclass' is used in place of the default, which is
+    (for command 'foo_bar') class 'foo_bar' in module
+    'distutils.command.foo_bar'.  The command class must provide a
+    'user_options' attribute which is a list of option specifiers for
+    'distutils.fancy_getopt'.  Any command-line options between the current
+    and the next command are used to set attributes of the current command
+    object.
+
+    When the entire command-line has been successfully parsed, calls the
+    'run()' method on each command object in turn.  This method will be
+    driven entirely by the Distribution object (which each command object
+    has a reference to, thanks to its constructor), and the
+    command-specific options that became attributes of each command
+    object.
+    """
+
+    global _setup_stop_after, _setup_distribution
+
+    # Determine the distribution class -- either caller-supplied or
+    # our Distribution (see below).
+    klass = attrs.get('distclass')
+    if klass:
+        del attrs['distclass']
+    else:
+        klass = Distribution
+
+    if not attrs.has_key('script_name'):
+        attrs['script_name'] = os.path.basename(sys.argv[0])
+    if not attrs.has_key('script_args'):
+        attrs['script_args'] = sys.argv[1:]
+
+    # Create the Distribution instance, using the remaining arguments
+    # (ie. everything except distclass) to initialize it
+    try:
+        _setup_distribution = dist = klass(attrs)
+    except DistutilsSetupError, msg:
+        if attrs.has_key('name'):
+            raise SystemExit, "error in %s setup command: %s" % \
+                  (attrs['name'], msg)
+        else:
+            raise SystemExit, "error in setup command: %s" % msg
+
+    if _setup_stop_after == "init":
+        return dist
+
+    # Find and parse the config file(s): they will override options from
+    # the setup script, but be overridden by the command line.
+    dist.parse_config_files()
+
+    if DEBUG:
+        print "options (after parsing config files):"
+        dist.dump_option_dicts()
+
+    if _setup_stop_after == "config":
+        return dist
+
+    # Parse the command line; any command-line errors are the end user's
+    # fault, so turn them into SystemExit to suppress tracebacks.
+    try:
+        ok = dist.parse_command_line()
+    except DistutilsArgError, msg:
+        raise SystemExit, gen_usage(dist.script_name) + "\nerror: %s" % msg
+
+    if DEBUG:
+        print "options (after parsing command line):"
+        dist.dump_option_dicts()
+
+    if _setup_stop_after == "commandline":
+        return dist
+
+    # And finally, run all the commands found on the command line.
+    if ok:
+        try:
+            dist.run_commands()
+        except KeyboardInterrupt:
+            raise SystemExit, "interrupted"
+        except (IOError, os.error), exc:
+            error = grok_environment_error(exc)
+
+            if DEBUG:
+                sys.stderr.write(error + "\n")
+                raise
+            else:
+                raise SystemExit, error
+
+        except (DistutilsError,
+                CCompilerError), msg:
+            if DEBUG:
+                raise
+            else:
+                raise SystemExit, "error: " + str(msg)
+
+    return dist
+
+# setup ()
+
+
+def run_setup (script_name, script_args=None, stop_after="run"):
+    """Run a setup script in a somewhat controlled environment, and
+    return the Distribution instance that drives things.  This is useful
+    if you need to find out the distribution meta-data (passed as
+    keyword args from 'script' to 'setup()', or the contents of the
+    config files or command-line.
+
+    'script_name' is a file that will be run with 'execfile()';
+    'sys.argv[0]' will be replaced with 'script' for the duration of the
+    call.  'script_args' is a list of strings; if supplied,
+    'sys.argv[1:]' will be replaced by 'script_args' for the duration of
+    the call.
+
+    'stop_after' tells 'setup()' when to stop processing; possible
+    values:
+      init
+        stop after the Distribution instance has been created and
+        populated with the keyword arguments to 'setup()'
+      config
+        stop after config files have been parsed (and their data
+        stored in the Distribution instance)
+      commandline
+        stop after the command-line ('sys.argv[1:]' or 'script_args')
+        have been parsed (and the data stored in the Distribution)
+      run [default]
+        stop after all commands have been run (the same as if 'setup()'
+        had been called in the usual way
+
+    Returns the Distribution instance, which provides all information
+    used to drive the Distutils.
+    """
+    if stop_after not in ('init', 'config', 'commandline', 'run'):
+        raise ValueError, "invalid value for 'stop_after': %r" % (stop_after,)
+
+    global _setup_stop_after, _setup_distribution
+    _setup_stop_after = stop_after
+
+    save_argv = sys.argv
+    g = {}
+    l = {}
+    try:
+        try:
+            sys.argv[0] = script_name
+            if script_args is not None:
+                sys.argv[1:] = script_args
+            execfile(script_name, g, l)
+        finally:
+            sys.argv = save_argv
+            _setup_stop_after = None
+    except SystemExit:
+        # Hmm, should we do something if exiting with a non-zero code
+        # (ie. error)?
+        pass
+    except:
+        raise
+
+    if _setup_distribution is None:
+        raise RuntimeError, \
+              ("'distutils.core.setup()' was never called -- "
+               "perhaps '%s' is not a Distutils setup script?") % \
+              script_name
+
+    # I wonder if the setup script's namespace -- g and l -- would be of
+    # any interest to callers?
+    #print "_setup_distribution:", _setup_distribution
+    return _setup_distribution
+
+# run_setup ()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/cygwinccompiler.py b/depot_tools/release/win/python_24/Lib/distutils/cygwinccompiler.py
new file mode 100644
index 0000000..f1b12b46
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/cygwinccompiler.py
@@ -0,0 +1,441 @@
+"""distutils.cygwinccompiler
+
+Provides the CygwinCCompiler class, a subclass of UnixCCompiler that
+handles the Cygwin port of the GNU C compiler to Windows.  It also contains
+the Mingw32CCompiler class which handles the mingw32 port of GCC (same as
+cygwin in no-cygwin mode).
+"""
+
+# problems:
+#
+# * if you use a msvc compiled python version (1.5.2)
+#   1. you have to insert a __GNUC__ section in its config.h
+#   2. you have to generate a import library for its dll
+#      - create a def-file for python??.dll
+#      - create a import library using
+#             dlltool --dllname python15.dll --def python15.def \
+#                       --output-lib libpython15.a
+#
+#   see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+#
+# * We put export_symbols in a def-file, and don't use
+#   --export-all-symbols because it doesn't worked reliable in some
+#   tested configurations. And because other windows compilers also
+#   need their symbols specified this no serious problem.
+#
+# tested configurations:
+#
+# * cygwin gcc 2.91.57/ld 2.9.4/dllwrap 0.2.4 works
+#   (after patching python's config.h and for C++ some other include files)
+#   see also http://starship.python.net/crew/kernr/mingw32/Notes.html
+# * mingw32 gcc 2.95.2/ld 2.9.4/dllwrap 0.2.4 works
+#   (ld doesn't support -shared, so we use dllwrap)
+# * cygwin gcc 2.95.2/ld 2.10.90/dllwrap 2.10.90 works now
+#   - its dllwrap doesn't work, there is a bug in binutils 2.10.90
+#     see also http://sources.redhat.com/ml/cygwin/2000-06/msg01274.html
+#   - using gcc -mdll instead dllwrap doesn't work without -static because
+#     it tries to link against dlls instead their import libraries. (If
+#     it finds the dll first.)
+#     By specifying -static we force ld to link against the import libraries,
+#     this is windows standard and there are normally not the necessary symbols
+#     in the dlls.
+#   *** only the version of June 2000 shows these problems
+# * cygwin gcc 3.2/ld 2.13.90 works
+#   (ld supports -shared)
+# * mingw gcc 3.2/ld 2.13 works
+#   (ld supports -shared)
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: cygwinccompiler.py,v 1.29 2004/11/10 22:23:14 loewis Exp $"
+
+import os,sys,copy
+from distutils.ccompiler import gen_preprocess_options, gen_lib_options
+from distutils.unixccompiler import UnixCCompiler
+from distutils.file_util import write_file
+from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
+from distutils import log
+
+class CygwinCCompiler (UnixCCompiler):
+
+    compiler_type = 'cygwin'
+    obj_extension = ".o"
+    static_lib_extension = ".a"
+    shared_lib_extension = ".dll"
+    static_lib_format = "lib%s%s"
+    shared_lib_format = "%s%s"
+    exe_extension = ".exe"
+
+    def __init__ (self, verbose=0, dry_run=0, force=0):
+
+        UnixCCompiler.__init__ (self, verbose, dry_run, force)
+
+        (status, details) = check_config_h()
+        self.debug_print("Python's GCC status: %s (details: %s)" %
+                         (status, details))
+        if status is not CONFIG_H_OK:
+            self.warn(
+                "Python's pyconfig.h doesn't seem to support your compiler. "
+                "Reason: %s. "
+                "Compiling may fail because of undefined preprocessor macros."
+                % details)
+
+        self.gcc_version, self.ld_version, self.dllwrap_version = \
+            get_versions()
+        self.debug_print(self.compiler_type + ": gcc %s, ld %s, dllwrap %s\n" %
+                         (self.gcc_version,
+                          self.ld_version,
+                          self.dllwrap_version) )
+
+        # ld_version >= "2.10.90" and < "2.13" should also be able to use
+        # gcc -mdll instead of dllwrap
+        # Older dllwraps had own version numbers, newer ones use the
+        # same as the rest of binutils ( also ld )
+        # dllwrap 2.10.90 is buggy
+        if self.ld_version >= "2.10.90":
+            self.linker_dll = "gcc"
+        else:
+            self.linker_dll = "dllwrap"
+
+        # ld_version >= "2.13" support -shared so use it instead of
+        # -mdll -static
+        if self.ld_version >= "2.13":
+            shared_option = "-shared"
+        else:
+            shared_option = "-mdll -static"
+
+        # Hard-code GCC because that's what this is all about.
+        # XXX optimization, warnings etc. should be customizable.
+        self.set_executables(compiler='gcc -mcygwin -O -Wall',
+                             compiler_so='gcc -mcygwin -mdll -O -Wall',
+                             compiler_cxx='g++ -mcygwin -O -Wall',
+                             linker_exe='gcc -mcygwin',
+                             linker_so=('%s -mcygwin %s' %
+                                        (self.linker_dll, shared_option)))
+
+        # cygwin and mingw32 need different sets of libraries
+        if self.gcc_version == "2.91.57":
+            # cygwin shouldn't need msvcrt, but without the dlls will crash
+            # (gcc version 2.91.57) -- perhaps something about initialization
+            self.dll_libraries=["msvcrt"]
+            self.warn(
+                "Consider upgrading to a newer version of gcc")
+        else:
+            self.dll_libraries=[]
+            # Include the appropriate MSVC runtime library if Python was built
+            # with MSVC 7.0 or 7.1.
+            msc_pos = sys.version.find('MSC v.')
+            if msc_pos != -1:
+                msc_ver = sys.version[msc_pos+6:msc_pos+10]
+                if msc_ver == '1300':
+                    # MSVC 7.0
+                    self.dll_libraries = ['msvcr70']
+                elif msc_ver == '1310':
+                    # MSVC 7.1
+                    self.dll_libraries = ['msvcr71']
+
+    # __init__ ()
+
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        if ext == '.rc' or ext == '.res':
+            # gcc needs '.res' and '.rc' compiled to object files !!!
+            try:
+                self.spawn(["windres", "-i", src, "-o", obj])
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+        else: # for other files use the C-compiler
+            try:
+                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+                           extra_postargs)
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+
+        # use separate copies, so we can modify the lists
+        extra_preargs = copy.copy(extra_preargs or [])
+        libraries = copy.copy(libraries or [])
+        objects = copy.copy(objects or [])
+
+        # Additional libraries
+        libraries.extend(self.dll_libraries)
+
+        # handle export symbols by creating a def-file
+        # with executables this only works with gcc/ld as linker
+        if ((export_symbols is not None) and
+            (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+            # (The linker doesn't do anything if output is up-to-date.
+            # So it would probably better to check if we really need this,
+            # but for this we had to insert some unchanged parts of
+            # UnixCCompiler, and this is not what we want.)
+
+            # we want to put some files in the same directory as the
+            # object files are, build_temp doesn't help much
+            # where are the object files
+            temp_dir = os.path.dirname(objects[0])
+            # name of dll to give the helper files the same base name
+            (dll_name, dll_extension) = os.path.splitext(
+                os.path.basename(output_filename))
+
+            # generate the filenames for these files
+            def_file = os.path.join(temp_dir, dll_name + ".def")
+            lib_file = os.path.join(temp_dir, 'lib' + dll_name + ".a")
+
+            # Generate .def file
+            contents = [
+                "LIBRARY %s" % os.path.basename(output_filename),
+                "EXPORTS"]
+            for sym in export_symbols:
+                contents.append(sym)
+            self.execute(write_file, (def_file, contents),
+                         "writing %s" % def_file)
+
+            # next add options for def-file and to creating import libraries
+
+            # dllwrap uses different options than gcc/ld
+            if self.linker_dll == "dllwrap":
+                extra_preargs.extend(["--output-lib", lib_file])
+                # for dllwrap we have to use a special option
+                extra_preargs.extend(["--def", def_file])
+            # we use gcc/ld here and can be sure ld is >= 2.9.10
+            else:
+                # doesn't work: bfd_close build\...\libfoo.a: Invalid operation
+                #extra_preargs.extend(["-Wl,--out-implib,%s" % lib_file])
+                # for gcc/ld the def-file is specified as any object files
+                objects.append(def_file)
+
+        #end: if ((export_symbols is not None) and
+        #        (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+
+        # who wants symbols and a many times larger output file
+        # should explicitly switch the debug mode on
+        # otherwise we let dllwrap/ld strip the output file
+        # (On my machine: 10KB < stripped_file < ??100KB
+        #   unstripped_file = stripped_file + XXX KB
+        #  ( XXX=254 for a typical python extension))
+        if not debug:
+            extra_preargs.append("-s")
+
+        UnixCCompiler.link(self,
+                           target_desc,
+                           objects,
+                           output_filename,
+                           output_dir,
+                           libraries,
+                           library_dirs,
+                           runtime_library_dirs,
+                           None, # export_symbols, we do this in our def-file
+                           debug,
+                           extra_preargs,
+                           extra_postargs,
+                           build_temp,
+                           target_lang)
+
+    # link ()
+
+    # -- Miscellaneous methods -----------------------------------------
+
+    # overwrite the one from CCompiler to support rc and res-files
+    def object_filenames (self,
+                          source_filenames,
+                          strip_dir=0,
+                          output_dir=''):
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            (base, ext) = os.path.splitext (os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc','.res']):
+                raise UnknownFileError, \
+                      "unknown file type '%s' (from '%s')" % \
+                      (ext, src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext == '.res' or ext == '.rc':
+                # these need to be compiled to object files
+                obj_names.append (os.path.join (output_dir,
+                                            base + ext + self.obj_extension))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                            base + self.obj_extension))
+        return obj_names
+
+    # object_filenames ()
+
+# class CygwinCCompiler
+
+
+# the same as cygwin plus some additional parameters
+class Mingw32CCompiler (CygwinCCompiler):
+
+    compiler_type = 'mingw32'
+
+    def __init__ (self,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+
+        CygwinCCompiler.__init__ (self, verbose, dry_run, force)
+
+        # ld_version >= "2.13" support -shared so use it instead of
+        # -mdll -static
+        if self.ld_version >= "2.13":
+            shared_option = "-shared"
+        else:
+            shared_option = "-mdll -static"
+
+        # A real mingw32 doesn't need to specify a different entry point,
+        # but cygwin 2.91.57 in no-cygwin-mode needs it.
+        if self.gcc_version <= "2.91.57":
+            entry_point = '--entry _DllMain@12'
+        else:
+            entry_point = ''
+
+        self.set_executables(compiler='gcc -mno-cygwin -O -Wall',
+                             compiler_so='gcc -mno-cygwin -mdll -O -Wall',
+                             compiler_cxx='g++ -mno-cygwin -O -Wall',
+                             linker_exe='gcc -mno-cygwin',
+                             linker_so='%s -mno-cygwin %s %s'
+                                        % (self.linker_dll, shared_option,
+                                           entry_point))
+        # Maybe we should also append -mthreads, but then the finished
+        # dlls need another dll (mingwm10.dll see Mingw32 docs)
+        # (-mthreads: Support thread-safe exception handling on `Mingw32')
+
+        # no additional libraries needed
+        self.dll_libraries=[]
+
+        # Include the appropriate MSVC runtime library if Python was built
+        # with MSVC 7.0 or 7.1.
+        msc_pos = sys.version.find('MSC v.')
+        if msc_pos != -1:
+            msc_ver = sys.version[msc_pos+6:msc_pos+10]
+            if msc_ver == '1300':
+                # MSVC 7.0
+                self.dll_libraries = ['msvcr70']
+            elif msc_ver == '1310':
+                # MSVC 7.1
+                self.dll_libraries = ['msvcr71']
+
+    # __init__ ()
+
+# class Mingw32CCompiler
+
+# Because these compilers aren't configured in Python's pyconfig.h file by
+# default, we should at least warn the user if he is using a unmodified
+# version.
+
+CONFIG_H_OK = "ok"
+CONFIG_H_NOTOK = "not ok"
+CONFIG_H_UNCERTAIN = "uncertain"
+
+def check_config_h():
+
+    """Check if the current Python installation (specifically, pyconfig.h)
+    appears amenable to building extensions with GCC.  Returns a tuple
+    (status, details), where 'status' is one of the following constants:
+      CONFIG_H_OK
+        all is well, go ahead and compile
+      CONFIG_H_NOTOK
+        doesn't look good
+      CONFIG_H_UNCERTAIN
+        not sure -- unable to read pyconfig.h
+    'details' is a human-readable string explaining the situation.
+
+    Note there are two ways to conclude "OK": either 'sys.version' contains
+    the string "GCC" (implying that this Python was built with GCC), or the
+    installed "pyconfig.h" contains the string "__GNUC__".
+    """
+
+    # XXX since this function also checks sys.version, it's not strictly a
+    # "pyconfig.h" check -- should probably be renamed...
+
+    from distutils import sysconfig
+    import string
+    # if sys.version contains GCC then python was compiled with
+    # GCC, and the pyconfig.h file should be OK
+    if string.find(sys.version,"GCC") >= 0:
+        return (CONFIG_H_OK, "sys.version mentions 'GCC'")
+
+    fn = sysconfig.get_config_h_filename()
+    try:
+        # It would probably better to read single lines to search.
+        # But we do this only once, and it is fast enough
+        f = open(fn)
+        s = f.read()
+        f.close()
+
+    except IOError, exc:
+        # if we can't read this file, we cannot say it is wrong
+        # the compiler will complain later about this file as missing
+        return (CONFIG_H_UNCERTAIN,
+                "couldn't read '%s': %s" % (fn, exc.strerror))
+
+    else:
+        # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
+        if string.find(s,"__GNUC__") >= 0:
+            return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
+        else:
+            return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
+
+
+
+def get_versions():
+    """ Try to find out the versions of gcc, ld and dllwrap.
+        If not possible it returns None for it.
+    """
+    from distutils.version import StrictVersion
+    from distutils.spawn import find_executable
+    import re
+
+    gcc_exe = find_executable('gcc')
+    if gcc_exe:
+        out = os.popen(gcc_exe + ' -dumpversion','r')
+        out_string = out.read()
+        out.close()
+        result = re.search('(\d+\.\d+(\.\d+)*)',out_string)
+        if result:
+            gcc_version = StrictVersion(result.group(1))
+        else:
+            gcc_version = None
+    else:
+        gcc_version = None
+    ld_exe = find_executable('ld')
+    if ld_exe:
+        out = os.popen(ld_exe + ' -v','r')
+        out_string = out.read()
+        out.close()
+        result = re.search('(\d+\.\d+(\.\d+)*)',out_string)
+        if result:
+            ld_version = StrictVersion(result.group(1))
+        else:
+            ld_version = None
+    else:
+        ld_version = None
+    dllwrap_exe = find_executable('dllwrap')
+    if dllwrap_exe:
+        out = os.popen(dllwrap_exe + ' --version','r')
+        out_string = out.read()
+        out.close()
+        result = re.search(' (\d+\.\d+(\.\d+)*)',out_string)
+        if result:
+            dllwrap_version = StrictVersion(result.group(1))
+        else:
+            dllwrap_version = None
+    else:
+        dllwrap_version = None
+    return (gcc_version, ld_version, dllwrap_version)
diff --git a/depot_tools/release/win/python_24/Lib/distutils/debug.py b/depot_tools/release/win/python_24/Lib/distutils/debug.py
new file mode 100644
index 0000000..6ccd3f6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/debug.py
@@ -0,0 +1,9 @@
+import os
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: debug.py,v 1.4 2004/11/10 22:23:14 loewis Exp $"
+
+# If DISTUTILS_DEBUG is anything other than the empty string, we run in
+# debug mode.
+DEBUG = os.environ.get('DISTUTILS_DEBUG')
diff --git a/depot_tools/release/win/python_24/Lib/distutils/dep_util.py b/depot_tools/release/win/python_24/Lib/distutils/dep_util.py
new file mode 100644
index 0000000..01df2bf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/dep_util.py
@@ -0,0 +1,95 @@
+"""distutils.dep_util
+
+Utility functions for simple, timestamp-based dependency of files
+and groups of files; also, function based entirely on such
+timestamp dependency analysis."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: dep_util.py,v 1.7 2004/11/10 22:23:14 loewis Exp $"
+
+import os
+from distutils.errors import DistutilsFileError
+
+
+def newer (source, target):
+    """Return true if 'source' exists and is more recently modified than
+    'target', or if 'source' exists and 'target' doesn't.  Return false if
+    both exist and 'target' is the same age or younger than 'source'.
+    Raise DistutilsFileError if 'source' does not exist.
+    """
+    if not os.path.exists(source):
+        raise DistutilsFileError, "file '%s' does not exist" % source
+    if not os.path.exists(target):
+        return 1
+
+    from stat import ST_MTIME
+    mtime1 = os.stat(source)[ST_MTIME]
+    mtime2 = os.stat(target)[ST_MTIME]
+
+    return mtime1 > mtime2
+
+# newer ()
+
+
+def newer_pairwise (sources, targets):
+    """Walk two filename lists in parallel, testing if each source is newer
+    than its corresponding target.  Return a pair of lists (sources,
+    targets) where source is newer than target, according to the semantics
+    of 'newer()'.
+    """
+    if len(sources) != len(targets):
+        raise ValueError, "'sources' and 'targets' must be same length"
+
+    # build a pair of lists (sources, targets) where  source is newer
+    n_sources = []
+    n_targets = []
+    for i in range(len(sources)):
+        if newer(sources[i], targets[i]):
+            n_sources.append(sources[i])
+            n_targets.append(targets[i])
+
+    return (n_sources, n_targets)
+
+# newer_pairwise ()
+
+
+def newer_group (sources, target, missing='error'):
+    """Return true if 'target' is out-of-date with respect to any file
+    listed in 'sources'.  In other words, if 'target' exists and is newer
+    than every file in 'sources', return false; otherwise return true.
+    'missing' controls what we do when a source file is missing; the
+    default ("error") is to blow up with an OSError from inside 'stat()';
+    if it is "ignore", we silently drop any missing source files; if it is
+    "newer", any missing source files make us assume that 'target' is
+    out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
+    carry out commands that wouldn't work because inputs are missing, but
+    that doesn't matter because you're not actually going to run the
+    commands).
+    """
+    # If the target doesn't even exist, then it's definitely out-of-date.
+    if not os.path.exists(target):
+        return 1
+
+    # Otherwise we have to find out the hard way: if *any* source file
+    # is more recent than 'target', then 'target' is out-of-date and
+    # we can immediately return true.  If we fall through to the end
+    # of the loop, then 'target' is up-to-date and we return false.
+    from stat import ST_MTIME
+    target_mtime = os.stat(target)[ST_MTIME]
+    for source in sources:
+        if not os.path.exists(source):
+            if missing == 'error':      # blow up when we stat() the file
+                pass
+            elif missing == 'ignore':   # missing source dropped from
+                continue                #  target's dependency list
+            elif missing == 'newer':    # missing source means target is
+                return 1                #  out-of-date
+
+        source_mtime = os.stat(source)[ST_MTIME]
+        if source_mtime > target_mtime:
+            return 1
+    else:
+        return 0
+
+# newer_group ()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/dir_util.py b/depot_tools/release/win/python_24/Lib/distutils/dir_util.py
new file mode 100644
index 0000000..d5c6ea2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/dir_util.py
@@ -0,0 +1,227 @@
+"""distutils.dir_util
+
+Utility functions for manipulating directories and directory trees."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: dir_util.py,v 1.15 2004/11/10 22:23:14 loewis Exp $"
+
+import os, sys
+from types import *
+from distutils.errors import DistutilsFileError, DistutilsInternalError
+from distutils import log
+
+# cache for by mkpath() -- in addition to cheapening redundant calls,
+# eliminates redundant "creating /foo/bar/baz" messages in dry-run mode
+_path_created = {}
+
+# I don't use os.makedirs because a) it's new to Python 1.5.2, and
+# b) it blows up if the directory already exists (I want to silently
+# succeed in that case).
+def mkpath (name, mode=0777, verbose=0, dry_run=0):
+    """Create a directory and any missing ancestor directories.  If the
+       directory already exists (or if 'name' is the empty string, which
+       means the current directory, which of course exists), then do
+       nothing.  Raise DistutilsFileError if unable to create some
+       directory along the way (eg. some sub-path exists, but is a file
+       rather than a directory).  If 'verbose' is true, print a one-line
+       summary of each mkdir to stdout.  Return the list of directories
+       actually created."""
+
+    global _path_created
+
+    # Detect a common bug -- name is None
+    if type(name) is not StringType:
+        raise DistutilsInternalError, \
+              "mkpath: 'name' must be a string (got %r)" % (name,)
+
+    # XXX what's the better way to handle verbosity? print as we create
+    # each directory in the path (the current behaviour), or only announce
+    # the creation of the whole path? (quite easy to do the latter since
+    # we're not using a recursive algorithm)
+
+    name = os.path.normpath(name)
+    created_dirs = []
+    if os.path.isdir(name) or name == '':
+        return created_dirs
+    if _path_created.get(os.path.abspath(name)):
+        return created_dirs
+
+    (head, tail) = os.path.split(name)
+    tails = [tail]                      # stack of lone dirs to create
+
+    while head and tail and not os.path.isdir(head):
+        #print "splitting '%s': " % head,
+        (head, tail) = os.path.split(head)
+        #print "to ('%s','%s')" % (head, tail)
+        tails.insert(0, tail)          # push next higher dir onto stack
+
+    #print "stack of tails:", tails
+
+    # now 'head' contains the deepest directory that already exists
+    # (that is, the child of 'head' in 'name' is the highest directory
+    # that does *not* exist)
+    for d in tails:
+        #print "head = %s, d = %s: " % (head, d),
+        head = os.path.join(head, d)
+        abs_head = os.path.abspath(head)
+
+        if _path_created.get(abs_head):
+            continue
+
+        log.info("creating %s", head)
+
+        if not dry_run:
+            try:
+                os.mkdir(head)
+                created_dirs.append(head)
+            except OSError, exc:
+                raise DistutilsFileError, \
+                      "could not create '%s': %s" % (head, exc[-1])
+
+        _path_created[abs_head] = 1
+    return created_dirs
+
+# mkpath ()
+
+
+def create_tree (base_dir, files, mode=0777, verbose=0, dry_run=0):
+
+    """Create all the empty directories under 'base_dir' needed to
+       put 'files' there.  'base_dir' is just the a name of a directory
+       which doesn't necessarily exist yet; 'files' is a list of filenames
+       to be interpreted relative to 'base_dir'.  'base_dir' + the
+       directory portion of every file in 'files' will be created if it
+       doesn't already exist.  'mode', 'verbose' and 'dry_run' flags are as
+       for 'mkpath()'."""
+
+    # First get the list of directories to create
+    need_dir = {}
+    for file in files:
+        need_dir[os.path.join(base_dir, os.path.dirname(file))] = 1
+    need_dirs = need_dir.keys()
+    need_dirs.sort()
+
+    # Now create them
+    for dir in need_dirs:
+        mkpath(dir, mode, dry_run=dry_run)
+
+# create_tree ()
+
+
+def copy_tree (src, dst,
+               preserve_mode=1,
+               preserve_times=1,
+               preserve_symlinks=0,
+               update=0,
+               verbose=0,
+               dry_run=0):
+
+    """Copy an entire directory tree 'src' to a new location 'dst'.  Both
+       'src' and 'dst' must be directory names.  If 'src' is not a
+       directory, raise DistutilsFileError.  If 'dst' does not exist, it is
+       created with 'mkpath()'.  The end result of the copy is that every
+       file in 'src' is copied to 'dst', and directories under 'src' are
+       recursively copied to 'dst'.  Return the list of files that were
+       copied or might have been copied, using their output name.  The
+       return value is unaffected by 'update' or 'dry_run': it is simply
+       the list of all files under 'src', with the names changed to be
+       under 'dst'.
+
+       'preserve_mode' and 'preserve_times' are the same as for
+       'copy_file'; note that they only apply to regular files, not to
+       directories.  If 'preserve_symlinks' is true, symlinks will be
+       copied as symlinks (on platforms that support them!); otherwise
+       (the default), the destination of the symlink will be copied.
+       'update' and 'verbose' are the same as for 'copy_file'."""
+
+    from distutils.file_util import copy_file
+
+    if not dry_run and not os.path.isdir(src):
+        raise DistutilsFileError, \
+              "cannot copy tree '%s': not a directory" % src
+    try:
+        names = os.listdir(src)
+    except os.error, (errno, errstr):
+        if dry_run:
+            names = []
+        else:
+            raise DistutilsFileError, \
+                  "error listing files in '%s': %s" % (src, errstr)
+
+    if not dry_run:
+        mkpath(dst)
+
+    outputs = []
+
+    for n in names:
+        src_name = os.path.join(src, n)
+        dst_name = os.path.join(dst, n)
+
+        if preserve_symlinks and os.path.islink(src_name):
+            link_dest = os.readlink(src_name)
+            log.info("linking %s -> %s", dst_name, link_dest)
+            if not dry_run:
+                os.symlink(link_dest, dst_name)
+            outputs.append(dst_name)
+
+        elif os.path.isdir(src_name):
+            outputs.extend(
+                copy_tree(src_name, dst_name, preserve_mode,
+                          preserve_times, preserve_symlinks, update,
+                          dry_run=dry_run))
+        else:
+            copy_file(src_name, dst_name, preserve_mode,
+                      preserve_times, update, dry_run=dry_run)
+            outputs.append(dst_name)
+
+    return outputs
+
+# copy_tree ()
+
+# Helper for remove_tree()
+def _build_cmdtuple(path, cmdtuples):
+    for f in os.listdir(path):
+        real_f = os.path.join(path,f)
+        if os.path.isdir(real_f) and not os.path.islink(real_f):
+            _build_cmdtuple(real_f, cmdtuples)
+        else:
+            cmdtuples.append((os.remove, real_f))
+    cmdtuples.append((os.rmdir, path))
+
+
+def remove_tree (directory, verbose=0, dry_run=0):
+    """Recursively remove an entire directory tree.  Any errors are ignored
+    (apart from being reported to stdout if 'verbose' is true).
+    """
+    from distutils.util import grok_environment_error
+    global _path_created
+
+    log.info("removing '%s' (and everything under it)", directory)
+    if dry_run:
+        return
+    cmdtuples = []
+    _build_cmdtuple(directory, cmdtuples)
+    for cmd in cmdtuples:
+        try:
+            apply(cmd[0], (cmd[1],))
+            # remove dir from cache if it's already there
+            abspath = os.path.abspath(cmd[1])
+            if _path_created.has_key(abspath):
+                del _path_created[abspath]
+        except (IOError, OSError), exc:
+            log.warn(grok_environment_error(
+                    exc, "error removing %s: " % directory))
+
+
+def ensure_relative (path):
+    """Take the full path 'path', and make it a relative path so
+    it can be the second argument to os.path.join().
+    """
+    drive, path = os.path.splitdrive(path)
+    if sys.platform == 'mac':
+        return os.sep + path
+    else:
+        if path[0:1] == os.sep:
+            path = drive + path[1:]
+        return path
diff --git a/depot_tools/release/win/python_24/Lib/distutils/dist.py b/depot_tools/release/win/python_24/Lib/distutils/dist.py
new file mode 100644
index 0000000..571a348
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/dist.py
@@ -0,0 +1,1143 @@
+"""distutils.dist
+
+Provides the Distribution class, which represents the module distribution
+being built/installed/distributed.
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: dist.py,v 1.72 2004/11/10 22:23:14 loewis Exp $"
+
+import sys, os, string, re
+from types import *
+from copy import copy
+
+try:
+    import warnings
+except ImportError:
+    warnings = None
+
+from distutils.errors import *
+from distutils.fancy_getopt import FancyGetopt, translate_longopt
+from distutils.util import check_environ, strtobool, rfc822_escape
+from distutils import log
+from distutils.debug import DEBUG
+
+# Regex to define acceptable Distutils command names.  This is not *quite*
+# the same as a Python NAME -- I don't allow leading underscores.  The fact
+# that they're very similar is no coincidence; the default naming scheme is
+# to look for a Python module named after the command.
+command_re = re.compile (r'^[a-zA-Z]([a-zA-Z0-9_]*)$')
+
+
+class Distribution:
+    """The core of the Distutils.  Most of the work hiding behind 'setup'
+    is really done within a Distribution instance, which farms the work out
+    to the Distutils commands specified on the command line.
+
+    Setup scripts will almost never instantiate Distribution directly,
+    unless the 'setup()' function is totally inadequate to their needs.
+    However, it is conceivable that a setup script might wish to subclass
+    Distribution for some specialized purpose, and then pass the subclass
+    to 'setup()' as the 'distclass' keyword argument.  If so, it is
+    necessary to respect the expectations that 'setup' has of Distribution.
+    See the code for 'setup()', in core.py, for details.
+    """
+
+
+    # 'global_options' describes the command-line options that may be
+    # supplied to the setup script prior to any actual commands.
+    # Eg. "./setup.py -n" or "./setup.py --quiet" both take advantage of
+    # these global options.  This list should be kept to a bare minimum,
+    # since every global option is also valid as a command option -- and we
+    # don't want to pollute the commands with too many options that they
+    # have minimal control over.
+    # The fourth entry for verbose means that it can be repeated.
+    global_options = [('verbose', 'v', "run verbosely (default)", 1),
+                      ('quiet', 'q', "run quietly (turns verbosity off)"),
+                      ('dry-run', 'n', "don't actually do anything"),
+                      ('help', 'h', "show detailed help message"),
+                     ]
+
+    # options that are not propagated to the commands
+    display_options = [
+        ('help-commands', None,
+         "list all available commands"),
+        ('name', None,
+         "print package name"),
+        ('version', 'V',
+         "print package version"),
+        ('fullname', None,
+         "print <package name>-<version>"),
+        ('author', None,
+         "print the author's name"),
+        ('author-email', None,
+         "print the author's email address"),
+        ('maintainer', None,
+         "print the maintainer's name"),
+        ('maintainer-email', None,
+         "print the maintainer's email address"),
+        ('contact', None,
+         "print the maintainer's name if known, else the author's"),
+        ('contact-email', None,
+         "print the maintainer's email address if known, else the author's"),
+        ('url', None,
+         "print the URL for this package"),
+        ('license', None,
+         "print the license of the package"),
+        ('licence', None,
+         "alias for --license"),
+        ('description', None,
+         "print the package description"),
+        ('long-description', None,
+         "print the long package description"),
+        ('platforms', None,
+         "print the list of platforms"),
+        ('classifiers', None,
+         "print the list of classifiers"),
+        ('keywords', None,
+         "print the list of keywords"),
+        ]
+    display_option_names = map(lambda x: translate_longopt(x[0]),
+                               display_options)
+
+    # negative options are options that exclude other options
+    negative_opt = {'quiet': 'verbose'}
+
+
+    # -- Creation/initialization methods -------------------------------
+
+    def __init__ (self, attrs=None):
+        """Construct a new Distribution instance: initialize all the
+        attributes of a Distribution, and then use 'attrs' (a dictionary
+        mapping attribute names to values) to assign some of those
+        attributes their "real" values.  (Any attributes not mentioned in
+        'attrs' will be assigned to some null value: 0, None, an empty list
+        or dictionary, etc.)  Most importantly, initialize the
+        'command_obj' attribute to the empty dictionary; this will be
+        filled in with real command objects by 'parse_command_line()'.
+        """
+
+        # Default values for our command-line options
+        self.verbose = 1
+        self.dry_run = 0
+        self.help = 0
+        for attr in self.display_option_names:
+            setattr(self, attr, 0)
+
+        # Store the distribution meta-data (name, version, author, and so
+        # forth) in a separate object -- we're getting to have enough
+        # information here (and enough command-line options) that it's
+        # worth it.  Also delegate 'get_XXX()' methods to the 'metadata'
+        # object in a sneaky and underhanded (but efficient!) way.
+        self.metadata = DistributionMetadata()
+        for basename in self.metadata._METHOD_BASENAMES:
+            method_name = "get_" + basename
+            setattr(self, method_name, getattr(self.metadata, method_name))
+
+        # 'cmdclass' maps command names to class objects, so we
+        # can 1) quickly figure out which class to instantiate when
+        # we need to create a new command object, and 2) have a way
+        # for the setup script to override command classes
+        self.cmdclass = {}
+
+        # 'command_packages' is a list of packages in which commands
+        # are searched for.  The factory for command 'foo' is expected
+        # to be named 'foo' in the module 'foo' in one of the packages
+        # named here.  This list is searched from the left; an error
+        # is raised if no named package provides the command being
+        # searched for.  (Always access using get_command_packages().)
+        self.command_packages = None
+
+        # 'script_name' and 'script_args' are usually set to sys.argv[0]
+        # and sys.argv[1:], but they can be overridden when the caller is
+        # not necessarily a setup script run from the command-line.
+        self.script_name = None
+        self.script_args = None
+
+        # 'command_options' is where we store command options between
+        # parsing them (from config files, the command-line, etc.) and when
+        # they are actually needed -- ie. when the command in question is
+        # instantiated.  It is a dictionary of dictionaries of 2-tuples:
+        #   command_options = { command_name : { option : (source, value) } }
+        self.command_options = {}
+
+        # These options are really the business of various commands, rather
+        # than of the Distribution itself.  We provide aliases for them in
+        # Distribution as a convenience to the developer.
+        self.packages = None
+        self.package_data = {}
+        self.package_dir = None
+        self.py_modules = None
+        self.libraries = None
+        self.headers = None
+        self.ext_modules = None
+        self.ext_package = None
+        self.include_dirs = None
+        self.extra_path = None
+        self.scripts = None
+        self.data_files = None
+
+        # And now initialize bookkeeping stuff that can't be supplied by
+        # the caller at all.  'command_obj' maps command names to
+        # Command instances -- that's how we enforce that every command
+        # class is a singleton.
+        self.command_obj = {}
+
+        # 'have_run' maps command names to boolean values; it keeps track
+        # of whether we have actually run a particular command, to make it
+        # cheap to "run" a command whenever we think we might need to -- if
+        # it's already been done, no need for expensive filesystem
+        # operations, we just check the 'have_run' dictionary and carry on.
+        # It's only safe to query 'have_run' for a command class that has
+        # been instantiated -- a false value will be inserted when the
+        # command object is created, and replaced with a true value when
+        # the command is successfully run.  Thus it's probably best to use
+        # '.get()' rather than a straight lookup.
+        self.have_run = {}
+
+        # Now we'll use the attrs dictionary (ultimately, keyword args from
+        # the setup script) to possibly override any or all of these
+        # distribution options.
+
+        if attrs:
+
+            # Pull out the set of command options and work on them
+            # specifically.  Note that this order guarantees that aliased
+            # command options will override any supplied redundantly
+            # through the general options dictionary.
+            options = attrs.get('options')
+            if options:
+                del attrs['options']
+                for (command, cmd_options) in options.items():
+                    opt_dict = self.get_option_dict(command)
+                    for (opt, val) in cmd_options.items():
+                        opt_dict[opt] = ("setup script", val)
+
+            if attrs.has_key('licence'):
+                attrs['license'] = attrs['licence']
+                del attrs['licence']
+                msg = "'licence' distribution option is deprecated; use 'license'"
+                if warnings is not None:
+                    warnings.warn(msg)
+                else:
+                    sys.stderr.write(msg + "\n")
+
+            # Now work on the rest of the attributes.  Any attribute that's
+            # not already defined is invalid!
+            for (key,val) in attrs.items():
+                if hasattr(self.metadata, key):
+                    setattr(self.metadata, key, val)
+                elif hasattr(self, key):
+                    setattr(self, key, val)
+                else:
+                    msg = "Unknown distribution option: %s" % repr(key)
+                    if warnings is not None:
+                        warnings.warn(msg)
+                    else:
+                        sys.stderr.write(msg + "\n")
+
+        self.finalize_options()
+
+    # __init__ ()
+
+
+    def get_option_dict (self, command):
+        """Get the option dictionary for a given command.  If that
+        command's option dictionary hasn't been created yet, then create it
+        and return the new dictionary; otherwise, return the existing
+        option dictionary.
+        """
+
+        dict = self.command_options.get(command)
+        if dict is None:
+            dict = self.command_options[command] = {}
+        return dict
+
+
+    def dump_option_dicts (self, header=None, commands=None, indent=""):
+        from pprint import pformat
+
+        if commands is None:             # dump all command option dicts
+            commands = self.command_options.keys()
+            commands.sort()
+
+        if header is not None:
+            print indent + header
+            indent = indent + "  "
+
+        if not commands:
+            print indent + "no commands known yet"
+            return
+
+        for cmd_name in commands:
+            opt_dict = self.command_options.get(cmd_name)
+            if opt_dict is None:
+                print indent + "no option dict for '%s' command" % cmd_name
+            else:
+                print indent + "option dict for '%s' command:" % cmd_name
+                out = pformat(opt_dict)
+                for line in string.split(out, "\n"):
+                    print indent + "  " + line
+
+    # dump_option_dicts ()
+
+
+
+    # -- Config file finding/parsing methods ---------------------------
+
+    def find_config_files (self):
+        """Find as many configuration files as should be processed for this
+        platform, and return a list of filenames in the order in which they
+        should be parsed.  The filenames returned are guaranteed to exist
+        (modulo nasty race conditions).
+
+        There are three possible config files: distutils.cfg in the
+        Distutils installation directory (ie. where the top-level
+        Distutils __inst__.py file lives), a file in the user's home
+        directory named .pydistutils.cfg on Unix and pydistutils.cfg
+        on Windows/Mac, and setup.cfg in the current directory.
+        """
+        files = []
+        check_environ()
+
+        # Where to look for the system-wide Distutils config file
+        sys_dir = os.path.dirname(sys.modules['distutils'].__file__)
+
+        # Look for the system config file
+        sys_file = os.path.join(sys_dir, "distutils.cfg")
+        if os.path.isfile(sys_file):
+            files.append(sys_file)
+
+        # What to call the per-user config file
+        if os.name == 'posix':
+            user_filename = ".pydistutils.cfg"
+        else:
+            user_filename = "pydistutils.cfg"
+
+        # And look for the user config file
+        if os.environ.has_key('HOME'):
+            user_file = os.path.join(os.environ.get('HOME'), user_filename)
+            if os.path.isfile(user_file):
+                files.append(user_file)
+
+        # All platforms support local setup.cfg
+        local_file = "setup.cfg"
+        if os.path.isfile(local_file):
+            files.append(local_file)
+
+        return files
+
+    # find_config_files ()
+
+
+    def parse_config_files (self, filenames=None):
+
+        from ConfigParser import ConfigParser
+
+        if filenames is None:
+            filenames = self.find_config_files()
+
+        if DEBUG: print "Distribution.parse_config_files():"
+
+        parser = ConfigParser()
+        for filename in filenames:
+            if DEBUG: print "  reading", filename
+            parser.read(filename)
+            for section in parser.sections():
+                options = parser.options(section)
+                opt_dict = self.get_option_dict(section)
+
+                for opt in options:
+                    if opt != '__name__':
+                        val = parser.get(section,opt)
+                        opt = string.replace(opt, '-', '_')
+                        opt_dict[opt] = (filename, val)
+
+            # Make the ConfigParser forget everything (so we retain
+            # the original filenames that options come from)
+            parser.__init__()
+
+        # If there was a "global" section in the config file, use it
+        # to set Distribution options.
+
+        if self.command_options.has_key('global'):
+            for (opt, (src, val)) in self.command_options['global'].items():
+                alias = self.negative_opt.get(opt)
+                try:
+                    if alias:
+                        setattr(self, alias, not strtobool(val))
+                    elif opt in ('verbose', 'dry_run'): # ugh!
+                        setattr(self, opt, strtobool(val))
+                    else:
+                        setattr(self, opt, val)
+                except ValueError, msg:
+                    raise DistutilsOptionError, msg
+
+    # parse_config_files ()
+
+
+    # -- Command-line parsing methods ----------------------------------
+
+    def parse_command_line (self):
+        """Parse the setup script's command line, taken from the
+        'script_args' instance attribute (which defaults to 'sys.argv[1:]'
+        -- see 'setup()' in core.py).  This list is first processed for
+        "global options" -- options that set attributes of the Distribution
+        instance.  Then, it is alternately scanned for Distutils commands
+        and options for that command.  Each new command terminates the
+        options for the previous command.  The allowed options for a
+        command are determined by the 'user_options' attribute of the
+        command class -- thus, we have to be able to load command classes
+        in order to parse the command line.  Any error in that 'options'
+        attribute raises DistutilsGetoptError; any error on the
+        command-line raises DistutilsArgError.  If no Distutils commands
+        were found on the command line, raises DistutilsArgError.  Return
+        true if command-line was successfully parsed and we should carry
+        on with executing commands; false if no errors but we shouldn't
+        execute commands (currently, this only happens if user asks for
+        help).
+        """
+        #
+        # We now have enough information to show the Macintosh dialog
+        # that allows the user to interactively specify the "command line".
+        #
+        toplevel_options = self._get_toplevel_options()
+        if sys.platform == 'mac':
+            import EasyDialogs
+            cmdlist = self.get_command_list()
+            self.script_args = EasyDialogs.GetArgv(
+                toplevel_options + self.display_options, cmdlist)
+
+        # We have to parse the command line a bit at a time -- global
+        # options, then the first command, then its options, and so on --
+        # because each command will be handled by a different class, and
+        # the options that are valid for a particular class aren't known
+        # until we have loaded the command class, which doesn't happen
+        # until we know what the command is.
+
+        self.commands = []
+        parser = FancyGetopt(toplevel_options + self.display_options)
+        parser.set_negative_aliases(self.negative_opt)
+        parser.set_aliases({'licence': 'license'})
+        args = parser.getopt(args=self.script_args, object=self)
+        option_order = parser.get_option_order()
+        log.set_verbosity(self.verbose)
+
+        # for display options we return immediately
+        if self.handle_display_options(option_order):
+            return
+
+        while args:
+            args = self._parse_command_opts(parser, args)
+            if args is None:            # user asked for help (and got it)
+                return
+
+        # Handle the cases of --help as a "global" option, ie.
+        # "setup.py --help" and "setup.py --help command ...".  For the
+        # former, we show global options (--verbose, --dry-run, etc.)
+        # and display-only options (--name, --version, etc.); for the
+        # latter, we omit the display-only options and show help for
+        # each command listed on the command line.
+        if self.help:
+            self._show_help(parser,
+                            display_options=len(self.commands) == 0,
+                            commands=self.commands)
+            return
+
+        # Oops, no commands found -- an end-user error
+        if not self.commands:
+            raise DistutilsArgError, "no commands supplied"
+
+        # All is well: return true
+        return 1
+
+    # parse_command_line()
+
+    def _get_toplevel_options (self):
+        """Return the non-display options recognized at the top level.
+
+        This includes options that are recognized *only* at the top
+        level as well as options recognized for commands.
+        """
+        return self.global_options + [
+            ("command-packages=", None,
+             "list of packages that provide distutils commands"),
+            ]
+
+    def _parse_command_opts (self, parser, args):
+        """Parse the command-line options for a single command.
+        'parser' must be a FancyGetopt instance; 'args' must be the list
+        of arguments, starting with the current command (whose options
+        we are about to parse).  Returns a new version of 'args' with
+        the next command at the front of the list; will be the empty
+        list if there are no more commands on the command line.  Returns
+        None if the user asked for help on this command.
+        """
+        # late import because of mutual dependence between these modules
+        from distutils.cmd import Command
+
+        # Pull the current command from the head of the command line
+        command = args[0]
+        if not command_re.match(command):
+            raise SystemExit, "invalid command name '%s'" % command
+        self.commands.append(command)
+
+        # Dig up the command class that implements this command, so we
+        # 1) know that it's a valid command, and 2) know which options
+        # it takes.
+        try:
+            cmd_class = self.get_command_class(command)
+        except DistutilsModuleError, msg:
+            raise DistutilsArgError, msg
+
+        # Require that the command class be derived from Command -- want
+        # to be sure that the basic "command" interface is implemented.
+        if not issubclass(cmd_class, Command):
+            raise DistutilsClassError, \
+                  "command class %s must subclass Command" % cmd_class
+
+        # Also make sure that the command object provides a list of its
+        # known options.
+        if not (hasattr(cmd_class, 'user_options') and
+                type(cmd_class.user_options) is ListType):
+            raise DistutilsClassError, \
+                  ("command class %s must provide " +
+                   "'user_options' attribute (a list of tuples)") % \
+                  cmd_class
+
+        # If the command class has a list of negative alias options,
+        # merge it in with the global negative aliases.
+        negative_opt = self.negative_opt
+        if hasattr(cmd_class, 'negative_opt'):
+            negative_opt = copy(negative_opt)
+            negative_opt.update(cmd_class.negative_opt)
+
+        # Check for help_options in command class.  They have a different
+        # format (tuple of four) so we need to preprocess them here.
+        if (hasattr(cmd_class, 'help_options') and
+            type(cmd_class.help_options) is ListType):
+            help_options = fix_help_options(cmd_class.help_options)
+        else:
+            help_options = []
+
+
+        # All commands support the global options too, just by adding
+        # in 'global_options'.
+        parser.set_option_table(self.global_options +
+                                cmd_class.user_options +
+                                help_options)
+        parser.set_negative_aliases(negative_opt)
+        (args, opts) = parser.getopt(args[1:])
+        if hasattr(opts, 'help') and opts.help:
+            self._show_help(parser, display_options=0, commands=[cmd_class])
+            return
+
+        if (hasattr(cmd_class, 'help_options') and
+            type(cmd_class.help_options) is ListType):
+            help_option_found=0
+            for (help_option, short, desc, func) in cmd_class.help_options:
+                if hasattr(opts, parser.get_attr_name(help_option)):
+                    help_option_found=1
+                    #print "showing help for option %s of command %s" % \
+                    #      (help_option[0],cmd_class)
+
+                    if callable(func):
+                        func()
+                    else:
+                        raise DistutilsClassError(
+                            "invalid help function %r for help option '%s': "
+                            "must be a callable object (function, etc.)"
+                            % (func, help_option))
+
+            if help_option_found:
+                return
+
+        # Put the options from the command-line into their official
+        # holding pen, the 'command_options' dictionary.
+        opt_dict = self.get_option_dict(command)
+        for (name, value) in vars(opts).items():
+            opt_dict[name] = ("command line", value)
+
+        return args
+
+    # _parse_command_opts ()
+
+    def finalize_options (self):
+        """Set final values for all the options on the Distribution
+        instance, analogous to the .finalize_options() method of Command
+        objects.
+        """
+
+        keywords = self.metadata.keywords
+        if keywords is not None:
+            if type(keywords) is StringType:
+                keywordlist = string.split(keywords, ',')
+                self.metadata.keywords = map(string.strip, keywordlist)
+
+        platforms = self.metadata.platforms
+        if platforms is not None:
+            if type(platforms) is StringType:
+                platformlist = string.split(platforms, ',')
+                self.metadata.platforms = map(string.strip, platformlist)
+
+    def _show_help (self,
+                    parser,
+                    global_options=1,
+                    display_options=1,
+                    commands=[]):
+        """Show help for the setup script command-line in the form of
+        several lists of command-line options.  'parser' should be a
+        FancyGetopt instance; do not expect it to be returned in the
+        same state, as its option table will be reset to make it
+        generate the correct help text.
+
+        If 'global_options' is true, lists the global options:
+        --verbose, --dry-run, etc.  If 'display_options' is true, lists
+        the "display-only" options: --name, --version, etc.  Finally,
+        lists per-command help for every command name or command class
+        in 'commands'.
+        """
+        # late import because of mutual dependence between these modules
+        from distutils.core import gen_usage
+        from distutils.cmd import Command
+
+        if global_options:
+            if display_options:
+                options = self._get_toplevel_options()
+            else:
+                options = self.global_options
+            parser.set_option_table(options)
+            parser.print_help("Global options:")
+            print
+
+        if display_options:
+            parser.set_option_table(self.display_options)
+            parser.print_help(
+                "Information display options (just display " +
+                "information, ignore any commands)")
+            print
+
+        for command in self.commands:
+            if type(command) is ClassType and issubclass(command, Command):
+                klass = command
+            else:
+                klass = self.get_command_class(command)
+            if (hasattr(klass, 'help_options') and
+                type(klass.help_options) is ListType):
+                parser.set_option_table(klass.user_options +
+                                        fix_help_options(klass.help_options))
+            else:
+                parser.set_option_table(klass.user_options)
+            parser.print_help("Options for '%s' command:" % klass.__name__)
+            print
+
+        print gen_usage(self.script_name)
+        return
+
+    # _show_help ()
+
+
+    def handle_display_options (self, option_order):
+        """If there were any non-global "display-only" options
+        (--help-commands or the metadata display options) on the command
+        line, display the requested info and return true; else return
+        false.
+        """
+        from distutils.core import gen_usage
+
+        # User just wants a list of commands -- we'll print it out and stop
+        # processing now (ie. if they ran "setup --help-commands foo bar",
+        # we ignore "foo bar").
+        if self.help_commands:
+            self.print_commands()
+            print
+            print gen_usage(self.script_name)
+            return 1
+
+        # If user supplied any of the "display metadata" options, then
+        # display that metadata in the order in which the user supplied the
+        # metadata options.
+        any_display_options = 0
+        is_display_option = {}
+        for option in self.display_options:
+            is_display_option[option[0]] = 1
+
+        for (opt, val) in option_order:
+            if val and is_display_option.get(opt):
+                opt = translate_longopt(opt)
+                value = getattr(self.metadata, "get_"+opt)()
+                if opt in ['keywords', 'platforms']:
+                    print string.join(value, ',')
+                elif opt == 'classifiers':
+                    print string.join(value, '\n')
+                else:
+                    print value
+                any_display_options = 1
+
+        return any_display_options
+
+    # handle_display_options()
+
+    def print_command_list (self, commands, header, max_length):
+        """Print a subset of the list of all commands -- used by
+        'print_commands()'.
+        """
+
+        print header + ":"
+
+        for cmd in commands:
+            klass = self.cmdclass.get(cmd)
+            if not klass:
+                klass = self.get_command_class(cmd)
+            try:
+                description = klass.description
+            except AttributeError:
+                description = "(no description available)"
+
+            print "  %-*s  %s" % (max_length, cmd, description)
+
+    # print_command_list ()
+
+
+    def print_commands (self):
+        """Print out a help message listing all available commands with a
+        description of each.  The list is divided into "standard commands"
+        (listed in distutils.command.__all__) and "extra commands"
+        (mentioned in self.cmdclass, but not a standard command).  The
+        descriptions come from the command class attribute
+        'description'.
+        """
+
+        import distutils.command
+        std_commands = distutils.command.__all__
+        is_std = {}
+        for cmd in std_commands:
+            is_std[cmd] = 1
+
+        extra_commands = []
+        for cmd in self.cmdclass.keys():
+            if not is_std.get(cmd):
+                extra_commands.append(cmd)
+
+        max_length = 0
+        for cmd in (std_commands + extra_commands):
+            if len(cmd) > max_length:
+                max_length = len(cmd)
+
+        self.print_command_list(std_commands,
+                                "Standard commands",
+                                max_length)
+        if extra_commands:
+            print
+            self.print_command_list(extra_commands,
+                                    "Extra commands",
+                                    max_length)
+
+    # print_commands ()
+
+    def get_command_list (self):
+        """Get a list of (command, description) tuples.
+        The list is divided into "standard commands" (listed in
+        distutils.command.__all__) and "extra commands" (mentioned in
+        self.cmdclass, but not a standard command).  The descriptions come
+        from the command class attribute 'description'.
+        """
+        # Currently this is only used on Mac OS, for the Mac-only GUI
+        # Distutils interface (by Jack Jansen)
+
+        import distutils.command
+        std_commands = distutils.command.__all__
+        is_std = {}
+        for cmd in std_commands:
+            is_std[cmd] = 1
+
+        extra_commands = []
+        for cmd in self.cmdclass.keys():
+            if not is_std.get(cmd):
+                extra_commands.append(cmd)
+
+        rv = []
+        for cmd in (std_commands + extra_commands):
+            klass = self.cmdclass.get(cmd)
+            if not klass:
+                klass = self.get_command_class(cmd)
+            try:
+                description = klass.description
+            except AttributeError:
+                description = "(no description available)"
+            rv.append((cmd, description))
+        return rv
+
+    # -- Command class/object methods ----------------------------------
+
+    def get_command_packages (self):
+        """Return a list of packages from which commands are loaded."""
+        pkgs = self.command_packages
+        if not isinstance(pkgs, type([])):
+            pkgs = string.split(pkgs or "", ",")
+            for i in range(len(pkgs)):
+                pkgs[i] = string.strip(pkgs[i])
+            pkgs = filter(None, pkgs)
+            if "distutils.command" not in pkgs:
+                pkgs.insert(0, "distutils.command")
+            self.command_packages = pkgs
+        return pkgs
+
+    def get_command_class (self, command):
+        """Return the class that implements the Distutils command named by
+        'command'.  First we check the 'cmdclass' dictionary; if the
+        command is mentioned there, we fetch the class object from the
+        dictionary and return it.  Otherwise we load the command module
+        ("distutils.command." + command) and fetch the command class from
+        the module.  The loaded class is also stored in 'cmdclass'
+        to speed future calls to 'get_command_class()'.
+
+        Raises DistutilsModuleError if the expected module could not be
+        found, or if that module does not define the expected class.
+        """
+        klass = self.cmdclass.get(command)
+        if klass:
+            return klass
+
+        for pkgname in self.get_command_packages():
+            module_name = "%s.%s" % (pkgname, command)
+            klass_name = command
+
+            try:
+                __import__ (module_name)
+                module = sys.modules[module_name]
+            except ImportError:
+                continue
+
+            try:
+                klass = getattr(module, klass_name)
+            except AttributeError:
+                raise DistutilsModuleError, \
+                      "invalid command '%s' (no class '%s' in module '%s')" \
+                      % (command, klass_name, module_name)
+
+            self.cmdclass[command] = klass
+            return klass
+
+        raise DistutilsModuleError("invalid command '%s'" % command)
+
+
+    # get_command_class ()
+
+    def get_command_obj (self, command, create=1):
+        """Return the command object for 'command'.  Normally this object
+        is cached on a previous call to 'get_command_obj()'; if no command
+        object for 'command' is in the cache, then we either create and
+        return it (if 'create' is true) or return None.
+        """
+        cmd_obj = self.command_obj.get(command)
+        if not cmd_obj and create:
+            if DEBUG:
+                print "Distribution.get_command_obj(): " \
+                      "creating '%s' command object" % command
+
+            klass = self.get_command_class(command)
+            cmd_obj = self.command_obj[command] = klass(self)
+            self.have_run[command] = 0
+
+            # Set any options that were supplied in config files
+            # or on the command line.  (NB. support for error
+            # reporting is lame here: any errors aren't reported
+            # until 'finalize_options()' is called, which means
+            # we won't report the source of the error.)
+            options = self.command_options.get(command)
+            if options:
+                self._set_command_options(cmd_obj, options)
+
+        return cmd_obj
+
+    def _set_command_options (self, command_obj, option_dict=None):
+        """Set the options for 'command_obj' from 'option_dict'.  Basically
+        this means copying elements of a dictionary ('option_dict') to
+        attributes of an instance ('command').
+
+        'command_obj' must be a Command instance.  If 'option_dict' is not
+        supplied, uses the standard option dictionary for this command
+        (from 'self.command_options').
+        """
+        command_name = command_obj.get_command_name()
+        if option_dict is None:
+            option_dict = self.get_option_dict(command_name)
+
+        if DEBUG: print "  setting options for '%s' command:" % command_name
+        for (option, (source, value)) in option_dict.items():
+            if DEBUG: print "    %s = %s (from %s)" % (option, value, source)
+            try:
+                bool_opts = map(translate_longopt, command_obj.boolean_options)
+            except AttributeError:
+                bool_opts = []
+            try:
+                neg_opt = command_obj.negative_opt
+            except AttributeError:
+                neg_opt = {}
+
+            try:
+                is_string = type(value) is StringType
+                if neg_opt.has_key(option) and is_string:
+                    setattr(command_obj, neg_opt[option], not strtobool(value))
+                elif option in bool_opts and is_string:
+                    setattr(command_obj, option, strtobool(value))
+                elif hasattr(command_obj, option):
+                    setattr(command_obj, option, value)
+                else:
+                    raise DistutilsOptionError, \
+                          ("error in %s: command '%s' has no such option '%s'"
+                           % (source, command_name, option))
+            except ValueError, msg:
+                raise DistutilsOptionError, msg
+
+    def reinitialize_command (self, command, reinit_subcommands=0):
+        """Reinitializes a command to the state it was in when first
+        returned by 'get_command_obj()': ie., initialized but not yet
+        finalized.  This provides the opportunity to sneak option
+        values in programmatically, overriding or supplementing
+        user-supplied values from the config files and command line.
+        You'll have to re-finalize the command object (by calling
+        'finalize_options()' or 'ensure_finalized()') before using it for
+        real.
+
+        'command' should be a command name (string) or command object.  If
+        'reinit_subcommands' is true, also reinitializes the command's
+        sub-commands, as declared by the 'sub_commands' class attribute (if
+        it has one).  See the "install" command for an example.  Only
+        reinitializes the sub-commands that actually matter, ie. those
+        whose test predicates return true.
+
+        Returns the reinitialized command object.
+        """
+        from distutils.cmd import Command
+        if not isinstance(command, Command):
+            command_name = command
+            command = self.get_command_obj(command_name)
+        else:
+            command_name = command.get_command_name()
+
+        if not command.finalized:
+            return command
+        command.initialize_options()
+        command.finalized = 0
+        self.have_run[command_name] = 0
+        self._set_command_options(command)
+
+        if reinit_subcommands:
+            for sub in command.get_sub_commands():
+                self.reinitialize_command(sub, reinit_subcommands)
+
+        return command
+
+
+    # -- Methods that operate on the Distribution ----------------------
+
+    def announce (self, msg, level=1):
+        log.debug(msg)
+
+    def run_commands (self):
+        """Run each command that was seen on the setup script command line.
+        Uses the list of commands found and cache of command objects
+        created by 'get_command_obj()'.
+        """
+        for cmd in self.commands:
+            self.run_command(cmd)
+
+
+    # -- Methods that operate on its Commands --------------------------
+
+    def run_command (self, command):
+        """Do whatever it takes to run a command (including nothing at all,
+        if the command has already been run).  Specifically: if we have
+        already created and run the command named by 'command', return
+        silently without doing anything.  If the command named by 'command'
+        doesn't even have a command object yet, create one.  Then invoke
+        'run()' on that command object (or an existing one).
+        """
+        # Already been here, done that? then return silently.
+        if self.have_run.get(command):
+            return
+
+        log.info("running %s", command)
+        cmd_obj = self.get_command_obj(command)
+        cmd_obj.ensure_finalized()
+        cmd_obj.run()
+        self.have_run[command] = 1
+
+
+    # -- Distribution query methods ------------------------------------
+
+    def has_pure_modules (self):
+        return len(self.packages or self.py_modules or []) > 0
+
+    def has_ext_modules (self):
+        return self.ext_modules and len(self.ext_modules) > 0
+
+    def has_c_libraries (self):
+        return self.libraries and len(self.libraries) > 0
+
+    def has_modules (self):
+        return self.has_pure_modules() or self.has_ext_modules()
+
+    def has_headers (self):
+        return self.headers and len(self.headers) > 0
+
+    def has_scripts (self):
+        return self.scripts and len(self.scripts) > 0
+
+    def has_data_files (self):
+        return self.data_files and len(self.data_files) > 0
+
+    def is_pure (self):
+        return (self.has_pure_modules() and
+                not self.has_ext_modules() and
+                not self.has_c_libraries())
+
+    # -- Metadata query methods ----------------------------------------
+
+    # If you're looking for 'get_name()', 'get_version()', and so forth,
+    # they are defined in a sneaky way: the constructor binds self.get_XXX
+    # to self.metadata.get_XXX.  The actual code is in the
+    # DistributionMetadata class, below.
+
+# class Distribution
+
+
+class DistributionMetadata:
+    """Dummy class to hold the distribution meta-data: name, version,
+    author, and so forth.
+    """
+
+    _METHOD_BASENAMES = ("name", "version", "author", "author_email",
+                         "maintainer", "maintainer_email", "url",
+                         "license", "description", "long_description",
+                         "keywords", "platforms", "fullname", "contact",
+                         "contact_email", "license", "classifiers",
+                         "download_url")
+
+    def __init__ (self):
+        self.name = None
+        self.version = None
+        self.author = None
+        self.author_email = None
+        self.maintainer = None
+        self.maintainer_email = None
+        self.url = None
+        self.license = None
+        self.description = None
+        self.long_description = None
+        self.keywords = None
+        self.platforms = None
+        self.classifiers = None
+        self.download_url = None
+
+    def write_pkg_info (self, base_dir):
+        """Write the PKG-INFO file into the release tree.
+        """
+
+        pkg_info = open( os.path.join(base_dir, 'PKG-INFO'), 'w')
+
+        pkg_info.write('Metadata-Version: 1.0\n')
+        pkg_info.write('Name: %s\n' % self.get_name() )
+        pkg_info.write('Version: %s\n' % self.get_version() )
+        pkg_info.write('Summary: %s\n' % self.get_description() )
+        pkg_info.write('Home-page: %s\n' % self.get_url() )
+        pkg_info.write('Author: %s\n' % self.get_contact() )
+        pkg_info.write('Author-email: %s\n' % self.get_contact_email() )
+        pkg_info.write('License: %s\n' % self.get_license() )
+        if self.download_url:
+            pkg_info.write('Download-URL: %s\n' % self.download_url)
+
+        long_desc = rfc822_escape( self.get_long_description() )
+        pkg_info.write('Description: %s\n' % long_desc)
+
+        keywords = string.join( self.get_keywords(), ',')
+        if keywords:
+            pkg_info.write('Keywords: %s\n' % keywords )
+
+        for platform in self.get_platforms():
+            pkg_info.write('Platform: %s\n' % platform )
+
+        for classifier in self.get_classifiers():
+            pkg_info.write('Classifier: %s\n' % classifier )
+
+        pkg_info.close()
+
+    # write_pkg_info ()
+
+    # -- Metadata query methods ----------------------------------------
+
+    def get_name (self):
+        return self.name or "UNKNOWN"
+
+    def get_version(self):
+        return self.version or "0.0.0"
+
+    def get_fullname (self):
+        return "%s-%s" % (self.get_name(), self.get_version())
+
+    def get_author(self):
+        return self.author or "UNKNOWN"
+
+    def get_author_email(self):
+        return self.author_email or "UNKNOWN"
+
+    def get_maintainer(self):
+        return self.maintainer or "UNKNOWN"
+
+    def get_maintainer_email(self):
+        return self.maintainer_email or "UNKNOWN"
+
+    def get_contact(self):
+        return (self.maintainer or
+                self.author or
+                "UNKNOWN")
+
+    def get_contact_email(self):
+        return (self.maintainer_email or
+                self.author_email or
+                "UNKNOWN")
+
+    def get_url(self):
+        return self.url or "UNKNOWN"
+
+    def get_license(self):
+        return self.license or "UNKNOWN"
+    get_licence = get_license
+
+    def get_description(self):
+        return self.description or "UNKNOWN"
+
+    def get_long_description(self):
+        return self.long_description or "UNKNOWN"
+
+    def get_keywords(self):
+        return self.keywords or []
+
+    def get_platforms(self):
+        return self.platforms or ["UNKNOWN"]
+
+    def get_classifiers(self):
+        return self.classifiers or []
+
+    def get_download_url(self):
+        return self.download_url or "UNKNOWN"
+
+# class DistributionMetadata
+
+
+def fix_help_options (options):
+    """Convert a 4-tuple 'help_options' list as found in various command
+    classes to the 3-tuple form required by FancyGetopt.
+    """
+    new_options = []
+    for help_tuple in options:
+        new_options.append(help_tuple[0:3])
+    return new_options
+
+
+if __name__ == "__main__":
+    dist = Distribution()
+    print "ok"
diff --git a/depot_tools/release/win/python_24/Lib/distutils/emxccompiler.py b/depot_tools/release/win/python_24/Lib/distutils/emxccompiler.py
new file mode 100644
index 0000000..1efac42f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/emxccompiler.py
@@ -0,0 +1,315 @@
+"""distutils.emxccompiler
+
+Provides the EMXCCompiler class, a subclass of UnixCCompiler that
+handles the EMX port of the GNU C compiler to OS/2.
+"""
+
+# issues:
+#
+# * OS/2 insists that DLLs can have names no longer than 8 characters
+#   We put export_symbols in a def-file, as though the DLL can have
+#   an arbitrary length name, but truncate the output filename.
+#
+# * only use OMF objects and use LINK386 as the linker (-Zomf)
+#
+# * always build for multithreading (-Zmt) as the accompanying OS/2 port
+#   of Python is only distributed with threads enabled.
+#
+# tested configurations:
+#
+# * EMX gcc 2.81/EMX 0.9d fix03
+
+__revision__ = "$Id: emxccompiler.py,v 1.11 2003/12/02 12:17:59 aimacintyre Exp $"
+
+import os,sys,copy
+from distutils.ccompiler import gen_preprocess_options, gen_lib_options
+from distutils.unixccompiler import UnixCCompiler
+from distutils.file_util import write_file
+from distutils.errors import DistutilsExecError, CompileError, UnknownFileError
+from distutils import log
+
+class EMXCCompiler (UnixCCompiler):
+
+    compiler_type = 'emx'
+    obj_extension = ".obj"
+    static_lib_extension = ".lib"
+    shared_lib_extension = ".dll"
+    static_lib_format = "%s%s"
+    shared_lib_format = "%s%s"
+    res_extension = ".res"      # compiled resource file
+    exe_extension = ".exe"
+
+    def __init__ (self,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+
+        UnixCCompiler.__init__ (self, verbose, dry_run, force)
+
+        (status, details) = check_config_h()
+        self.debug_print("Python's GCC status: %s (details: %s)" %
+                         (status, details))
+        if status is not CONFIG_H_OK:
+            self.warn(
+                "Python's pyconfig.h doesn't seem to support your compiler.  " +
+                ("Reason: %s." % details) +
+                "Compiling may fail because of undefined preprocessor macros.")
+
+        (self.gcc_version, self.ld_version) = \
+            get_versions()
+        self.debug_print(self.compiler_type + ": gcc %s, ld %s\n" %
+                         (self.gcc_version,
+                          self.ld_version) )
+
+        # Hard-code GCC because that's what this is all about.
+        # XXX optimization, warnings etc. should be customizable.
+        self.set_executables(compiler='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
+                             compiler_so='gcc -Zomf -Zmt -O3 -fomit-frame-pointer -mprobe -Wall',
+                             linker_exe='gcc -Zomf -Zmt -Zcrtdll',
+                             linker_so='gcc -Zomf -Zmt -Zcrtdll -Zdll')
+
+        # want the gcc library statically linked (so that we don't have
+        # to distribute a version dependent on the compiler we have)
+        self.dll_libraries=["gcc"]
+
+    # __init__ ()
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        if ext == '.rc':
+            # gcc requires '.rc' compiled to binary ('.res') files !!!
+            try:
+                self.spawn(["rc", "-r", src])
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+        else: # for other files use the C-compiler
+            try:
+                self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+                           extra_postargs)
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+
+        # use separate copies, so we can modify the lists
+        extra_preargs = copy.copy(extra_preargs or [])
+        libraries = copy.copy(libraries or [])
+        objects = copy.copy(objects or [])
+
+        # Additional libraries
+        libraries.extend(self.dll_libraries)
+
+        # handle export symbols by creating a def-file
+        # with executables this only works with gcc/ld as linker
+        if ((export_symbols is not None) and
+            (target_desc != self.EXECUTABLE)):
+            # (The linker doesn't do anything if output is up-to-date.
+            # So it would probably better to check if we really need this,
+            # but for this we had to insert some unchanged parts of
+            # UnixCCompiler, and this is not what we want.)
+
+            # we want to put some files in the same directory as the
+            # object files are, build_temp doesn't help much
+            # where are the object files
+            temp_dir = os.path.dirname(objects[0])
+            # name of dll to give the helper files the same base name
+            (dll_name, dll_extension) = os.path.splitext(
+                os.path.basename(output_filename))
+
+            # generate the filenames for these files
+            def_file = os.path.join(temp_dir, dll_name + ".def")
+
+            # Generate .def file
+            contents = [
+                "LIBRARY %s INITINSTANCE TERMINSTANCE" % \
+                os.path.splitext(os.path.basename(output_filename))[0],
+                "DATA MULTIPLE NONSHARED",
+                "EXPORTS"]
+            for sym in export_symbols:
+                contents.append('  "%s"' % sym)
+            self.execute(write_file, (def_file, contents),
+                         "writing %s" % def_file)
+
+            # next add options for def-file and to creating import libraries
+            # for gcc/ld the def-file is specified as any other object files
+            objects.append(def_file)
+
+        #end: if ((export_symbols is not None) and
+        #        (target_desc != self.EXECUTABLE or self.linker_dll == "gcc")):
+
+        # who wants symbols and a many times larger output file
+        # should explicitly switch the debug mode on
+        # otherwise we let dllwrap/ld strip the output file
+        # (On my machine: 10KB < stripped_file < ??100KB
+        #   unstripped_file = stripped_file + XXX KB
+        #  ( XXX=254 for a typical python extension))
+        if not debug:
+            extra_preargs.append("-s")
+
+        UnixCCompiler.link(self,
+                           target_desc,
+                           objects,
+                           output_filename,
+                           output_dir,
+                           libraries,
+                           library_dirs,
+                           runtime_library_dirs,
+                           None, # export_symbols, we do this in our def-file
+                           debug,
+                           extra_preargs,
+                           extra_postargs,
+                           build_temp,
+                           target_lang)
+
+    # link ()
+
+    # -- Miscellaneous methods -----------------------------------------
+
+    # override the object_filenames method from CCompiler to
+    # support rc and res-files
+    def object_filenames (self,
+                          source_filenames,
+                          strip_dir=0,
+                          output_dir=''):
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            # use normcase to make sure '.rc' is really '.rc' and not '.RC'
+            (base, ext) = os.path.splitext (os.path.normcase(src_name))
+            if ext not in (self.src_extensions + ['.rc']):
+                raise UnknownFileError, \
+                      "unknown file type '%s' (from '%s')" % \
+                      (ext, src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext == '.rc':
+                # these need to be compiled to object files
+                obj_names.append (os.path.join (output_dir,
+                                            base + self.res_extension))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                            base + self.obj_extension))
+        return obj_names
+
+    # object_filenames ()
+
+    # override the find_library_file method from UnixCCompiler
+    # to deal with file naming/searching differences
+    def find_library_file(self, dirs, lib, debug=0):
+        shortlib = '%s.lib' % lib
+        longlib = 'lib%s.lib' % lib    # this form very rare
+
+        # get EMX's default library directory search path
+        try:
+            emx_dirs = os.environ['LIBRARY_PATH'].split(';')
+        except KeyError:
+            emx_dirs = []
+
+        for dir in dirs + emx_dirs:
+            shortlibp = os.path.join(dir, shortlib)
+            longlibp = os.path.join(dir, longlib)
+            if os.path.exists(shortlibp):
+                return shortlibp
+            elif os.path.exists(longlibp):
+                return longlibp
+
+        # Oops, didn't find it in *any* of 'dirs'
+        return None
+
+# class EMXCCompiler
+
+
+# Because these compilers aren't configured in Python's pyconfig.h file by
+# default, we should at least warn the user if he is using a unmodified
+# version.
+
+CONFIG_H_OK = "ok"
+CONFIG_H_NOTOK = "not ok"
+CONFIG_H_UNCERTAIN = "uncertain"
+
+def check_config_h():
+
+    """Check if the current Python installation (specifically, pyconfig.h)
+    appears amenable to building extensions with GCC.  Returns a tuple
+    (status, details), where 'status' is one of the following constants:
+      CONFIG_H_OK
+        all is well, go ahead and compile
+      CONFIG_H_NOTOK
+        doesn't look good
+      CONFIG_H_UNCERTAIN
+        not sure -- unable to read pyconfig.h
+    'details' is a human-readable string explaining the situation.
+
+    Note there are two ways to conclude "OK": either 'sys.version' contains
+    the string "GCC" (implying that this Python was built with GCC), or the
+    installed "pyconfig.h" contains the string "__GNUC__".
+    """
+
+    # XXX since this function also checks sys.version, it's not strictly a
+    # "pyconfig.h" check -- should probably be renamed...
+
+    from distutils import sysconfig
+    import string
+    # if sys.version contains GCC then python was compiled with
+    # GCC, and the pyconfig.h file should be OK
+    if string.find(sys.version,"GCC") >= 0:
+        return (CONFIG_H_OK, "sys.version mentions 'GCC'")
+
+    fn = sysconfig.get_config_h_filename()
+    try:
+        # It would probably better to read single lines to search.
+        # But we do this only once, and it is fast enough
+        f = open(fn)
+        s = f.read()
+        f.close()
+
+    except IOError, exc:
+        # if we can't read this file, we cannot say it is wrong
+        # the compiler will complain later about this file as missing
+        return (CONFIG_H_UNCERTAIN,
+                "couldn't read '%s': %s" % (fn, exc.strerror))
+
+    else:
+        # "pyconfig.h" contains an "#ifdef __GNUC__" or something similar
+        if string.find(s,"__GNUC__") >= 0:
+            return (CONFIG_H_OK, "'%s' mentions '__GNUC__'" % fn)
+        else:
+            return (CONFIG_H_NOTOK, "'%s' does not mention '__GNUC__'" % fn)
+
+
+def get_versions():
+    """ Try to find out the versions of gcc and ld.
+        If not possible it returns None for it.
+    """
+    from distutils.version import StrictVersion
+    from distutils.spawn import find_executable
+    import re
+
+    gcc_exe = find_executable('gcc')
+    if gcc_exe:
+        out = os.popen(gcc_exe + ' -dumpversion','r')
+        out_string = out.read()
+        out.close()
+        result = re.search('(\d+\.\d+\.\d+)',out_string)
+        if result:
+            gcc_version = StrictVersion(result.group(1))
+        else:
+            gcc_version = None
+    else:
+        gcc_version = None
+    # EMX ld has no way of reporting version number, and we use GCC
+    # anyway - so we can link OMF DLLs
+    ld_version = None
+    return (gcc_version, ld_version)
diff --git a/depot_tools/release/win/python_24/Lib/distutils/errors.py b/depot_tools/release/win/python_24/Lib/distutils/errors.py
new file mode 100644
index 0000000..84ceb77
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/errors.py
@@ -0,0 +1,99 @@
+"""distutils.errors
+
+Provides exceptions used by the Distutils modules.  Note that Distutils
+modules may raise standard exceptions; in particular, SystemExit is
+usually raised for errors that are obviously the end-user's fault
+(eg. bad command-line arguments).
+
+This module is safe to use in "from ... import *" mode; it only exports
+symbols whose names start with "Distutils" and end with "Error"."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: errors.py,v 1.13 2004/11/10 22:23:14 loewis Exp $"
+
+class DistutilsError (Exception):
+    """The root of all Distutils evil."""
+    pass
+
+class DistutilsModuleError (DistutilsError):
+    """Unable to load an expected module, or to find an expected class
+    within some module (in particular, command modules and classes)."""
+    pass
+
+class DistutilsClassError (DistutilsError):
+    """Some command class (or possibly distribution class, if anyone
+    feels a need to subclass Distribution) is found not to be holding
+    up its end of the bargain, ie. implementing some part of the
+    "command "interface."""
+    pass
+
+class DistutilsGetoptError (DistutilsError):
+    """The option table provided to 'fancy_getopt()' is bogus."""
+    pass
+
+class DistutilsArgError (DistutilsError):
+    """Raised by fancy_getopt in response to getopt.error -- ie. an
+    error in the command line usage."""
+    pass
+
+class DistutilsFileError (DistutilsError):
+    """Any problems in the filesystem: expected file not found, etc.
+    Typically this is for problems that we detect before IOError or
+    OSError could be raised."""
+    pass
+
+class DistutilsOptionError (DistutilsError):
+    """Syntactic/semantic errors in command options, such as use of
+    mutually conflicting options, or inconsistent options,
+    badly-spelled values, etc.  No distinction is made between option
+    values originating in the setup script, the command line, config
+    files, or what-have-you -- but if we *know* something originated in
+    the setup script, we'll raise DistutilsSetupError instead."""
+    pass
+
+class DistutilsSetupError (DistutilsError):
+    """For errors that can be definitely blamed on the setup script,
+    such as invalid keyword arguments to 'setup()'."""
+    pass
+
+class DistutilsPlatformError (DistutilsError):
+    """We don't know how to do something on the current platform (but
+    we do know how to do it on some platform) -- eg. trying to compile
+    C files on a platform not supported by a CCompiler subclass."""
+    pass
+
+class DistutilsExecError (DistutilsError):
+    """Any problems executing an external program (such as the C
+    compiler, when compiling C files)."""
+    pass
+
+class DistutilsInternalError (DistutilsError):
+    """Internal inconsistencies or impossibilities (obviously, this
+    should never be seen if the code is working!)."""
+    pass
+
+class DistutilsTemplateError (DistutilsError):
+    """Syntax error in a file list template."""
+
+
+# Exception classes used by the CCompiler implementation classes
+class CCompilerError (Exception):
+    """Some compile/link operation failed."""
+
+class PreprocessError (CCompilerError):
+    """Failure to preprocess one or more C/C++ files."""
+
+class CompileError (CCompilerError):
+    """Failure to compile one or more C/C++ source files."""
+
+class LibError (CCompilerError):
+    """Failure to create a static library from one or more C/C++ object
+    files."""
+
+class LinkError (CCompilerError):
+    """Failure to link one or more C/C++ object files into an executable
+    or shared library file."""
+
+class UnknownFileError (CCompilerError):
+    """Attempt to process an unknown file type."""
diff --git a/depot_tools/release/win/python_24/Lib/distutils/extension.py b/depot_tools/release/win/python_24/Lib/distutils/extension.py
new file mode 100644
index 0000000..02e266c1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/extension.py
@@ -0,0 +1,246 @@
+"""distutils.extension
+
+Provides the Extension class, used to describe C/C++ extension
+modules in setup scripts."""
+
+__revision__ = "$Id: extension.py,v 1.19 2004/10/14 10:02:08 anthonybaxter Exp $"
+
+import os, string, sys
+from types import *
+
+try:
+    import warnings
+except ImportError:
+    warnings = None
+
+# This class is really only used by the "build_ext" command, so it might
+# make sense to put it in distutils.command.build_ext.  However, that
+# module is already big enough, and I want to make this class a bit more
+# complex to simplify some common cases ("foo" module in "foo.c") and do
+# better error-checking ("foo.c" actually exists).
+#
+# Also, putting this in build_ext.py means every setup script would have to
+# import that large-ish module (indirectly, through distutils.core) in
+# order to do anything.
+
+class Extension:
+    """Just a collection of attributes that describes an extension
+    module and everything needed to build it (hopefully in a portable
+    way, but there are hooks that let you be as unportable as you need).
+
+    Instance attributes:
+      name : string
+        the full name of the extension, including any packages -- ie.
+        *not* a filename or pathname, but Python dotted name
+      sources : [string]
+        list of source filenames, relative to the distribution root
+        (where the setup script lives), in Unix form (slash-separated)
+        for portability.  Source files may be C, C++, SWIG (.i),
+        platform-specific resource files, or whatever else is recognized
+        by the "build_ext" command as source for a Python extension.
+      include_dirs : [string]
+        list of directories to search for C/C++ header files (in Unix
+        form for portability)
+      define_macros : [(name : string, value : string|None)]
+        list of macros to define; each macro is defined using a 2-tuple,
+        where 'value' is either the string to define it to or None to
+        define it without a particular value (equivalent of "#define
+        FOO" in source or -DFOO on Unix C compiler command line)
+      undef_macros : [string]
+        list of macros to undefine explicitly
+      library_dirs : [string]
+        list of directories to search for C/C++ libraries at link time
+      libraries : [string]
+        list of library names (not filenames or paths) to link against
+      runtime_library_dirs : [string]
+        list of directories to search for C/C++ libraries at run time
+        (for shared extensions, this is when the extension is loaded)
+      extra_objects : [string]
+        list of extra files to link with (eg. object files not implied
+        by 'sources', static library that must be explicitly specified,
+        binary resource files, etc.)
+      extra_compile_args : [string]
+        any extra platform- and compiler-specific information to use
+        when compiling the source files in 'sources'.  For platforms and
+        compilers where "command line" makes sense, this is typically a
+        list of command-line arguments, but for other platforms it could
+        be anything.
+      extra_link_args : [string]
+        any extra platform- and compiler-specific information to use
+        when linking object files together to create the extension (or
+        to create a new static Python interpreter).  Similar
+        interpretation as for 'extra_compile_args'.
+      export_symbols : [string]
+        list of symbols to be exported from a shared extension.  Not
+        used on all platforms, and not generally necessary for Python
+        extensions, which typically export exactly one symbol: "init" +
+        extension_name.
+      swig_opts : [string]
+        any extra options to pass to SWIG if a source file has the .i
+        extension.
+      depends : [string]
+        list of files that the extension depends on
+      language : string
+        extension language (i.e. "c", "c++", "objc"). Will be detected
+        from the source extensions if not provided.
+    """
+
+    # When adding arguments to this constructor, be sure to update
+    # setup_keywords in core.py.
+    def __init__ (self, name, sources,
+                  include_dirs=None,
+                  define_macros=None,
+                  undef_macros=None,
+                  library_dirs=None,
+                  libraries=None,
+                  runtime_library_dirs=None,
+                  extra_objects=None,
+                  extra_compile_args=None,
+                  extra_link_args=None,
+                  export_symbols=None,
+                  swig_opts = None,
+                  depends=None,
+                  language=None,
+                  **kw                      # To catch unknown keywords
+                 ):
+        assert type(name) is StringType, "'name' must be a string"
+        assert (type(sources) is ListType and
+                map(type, sources) == [StringType]*len(sources)), \
+                "'sources' must be a list of strings"
+
+        self.name = name
+        self.sources = sources
+        self.include_dirs = include_dirs or []
+        self.define_macros = define_macros or []
+        self.undef_macros = undef_macros or []
+        self.library_dirs = library_dirs or []
+        self.libraries = libraries or []
+        self.runtime_library_dirs = runtime_library_dirs or []
+        self.extra_objects = extra_objects or []
+        self.extra_compile_args = extra_compile_args or []
+        self.extra_link_args = extra_link_args or []
+        self.export_symbols = export_symbols or []
+        self.swig_opts = swig_opts or []
+        self.depends = depends or []
+        self.language = language
+
+        # If there are unknown keyword options, warn about them
+        if len(kw):
+            L = kw.keys() ; L.sort()
+            L = map(repr, L)
+            msg = "Unknown Extension options: " + string.join(L, ', ')
+            if warnings is not None:
+                warnings.warn(msg)
+            else:
+                sys.stderr.write(msg + '\n')
+# class Extension
+
+
+def read_setup_file (filename):
+    from distutils.sysconfig import \
+         parse_makefile, expand_makefile_vars, _variable_rx
+    from distutils.text_file import TextFile
+    from distutils.util import split_quoted
+
+    # First pass over the file to gather "VAR = VALUE" assignments.
+    vars = parse_makefile(filename)
+
+    # Second pass to gobble up the real content: lines of the form
+    #   <module> ... [<sourcefile> ...] [<cpparg> ...] [<library> ...]
+    file = TextFile(filename,
+                    strip_comments=1, skip_blanks=1, join_lines=1,
+                    lstrip_ws=1, rstrip_ws=1)
+    extensions = []
+
+    while 1:
+        line = file.readline()
+        if line is None:                # eof
+            break
+        if _variable_rx.match(line):    # VAR=VALUE, handled in first pass
+            continue
+
+        if line[0] == line[-1] == "*":
+            file.warn("'%s' lines not handled yet" % line)
+            continue
+
+        #print "original line: " + line
+        line = expand_makefile_vars(line, vars)
+        words = split_quoted(line)
+        #print "expanded line: " + line
+
+        # NB. this parses a slightly different syntax than the old
+        # makesetup script: here, there must be exactly one extension per
+        # line, and it must be the first word of the line.  I have no idea
+        # why the old syntax supported multiple extensions per line, as
+        # they all wind up being the same.
+
+        module = words[0]
+        ext = Extension(module, [])
+        append_next_word = None
+
+        for word in words[1:]:
+            if append_next_word is not None:
+                append_next_word.append(word)
+                append_next_word = None
+                continue
+
+            suffix = os.path.splitext(word)[1]
+            switch = word[0:2] ; value = word[2:]
+
+            if suffix in (".c", ".cc", ".cpp", ".cxx", ".c++", ".m", ".mm"):
+                # hmm, should we do something about C vs. C++ sources?
+                # or leave it up to the CCompiler implementation to
+                # worry about?
+                ext.sources.append(word)
+            elif switch == "-I":
+                ext.include_dirs.append(value)
+            elif switch == "-D":
+                equals = string.find(value, "=")
+                if equals == -1:        # bare "-DFOO" -- no value
+                    ext.define_macros.append((value, None))
+                else:                   # "-DFOO=blah"
+                    ext.define_macros.append((value[0:equals],
+                                              value[equals+2:]))
+            elif switch == "-U":
+                ext.undef_macros.append(value)
+            elif switch == "-C":        # only here 'cause makesetup has it!
+                ext.extra_compile_args.append(word)
+            elif switch == "-l":
+                ext.libraries.append(value)
+            elif switch == "-L":
+                ext.library_dirs.append(value)
+            elif switch == "-R":
+                ext.runtime_library_dirs.append(value)
+            elif word == "-rpath":
+                append_next_word = ext.runtime_library_dirs
+            elif word == "-Xlinker":
+                append_next_word = ext.extra_link_args
+            elif word == "-Xcompiler":
+                append_next_word = ext.extra_compile_args
+            elif switch == "-u":
+                ext.extra_link_args.append(word)
+                if not value:
+                    append_next_word = ext.extra_link_args
+            elif suffix in (".a", ".so", ".sl", ".o", ".dylib"):
+                # NB. a really faithful emulation of makesetup would
+                # append a .o file to extra_objects only if it
+                # had a slash in it; otherwise, it would s/.o/.c/
+                # and append it to sources.  Hmmmm.
+                ext.extra_objects.append(word)
+            else:
+                file.warn("unrecognized argument '%s'" % word)
+
+        extensions.append(ext)
+
+        #print "module:", module
+        #print "source files:", source_files
+        #print "cpp args:", cpp_args
+        #print "lib args:", library_args
+
+        #extensions[module] = { 'sources': source_files,
+        #                       'cpp_args': cpp_args,
+        #                       'lib_args': library_args }
+
+    return extensions
+
+# read_setup_file ()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/fancy_getopt.py b/depot_tools/release/win/python_24/Lib/distutils/fancy_getopt.py
new file mode 100644
index 0000000..b60fca8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/fancy_getopt.py
@@ -0,0 +1,502 @@
+"""distutils.fancy_getopt
+
+Wrapper around the standard getopt module that provides the following
+additional features:
+  * short and long options are tied together
+  * options have help strings, so fancy_getopt could potentially
+    create a complete usage summary
+  * options set attributes of a passed-in object
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: fancy_getopt.py,v 1.30 2004/11/10 22:23:14 loewis Exp $"
+
+import sys, string, re
+from types import *
+import getopt
+from distutils.errors import *
+
+# Much like command_re in distutils.core, this is close to but not quite
+# the same as a Python NAME -- except, in the spirit of most GNU
+# utilities, we use '-' in place of '_'.  (The spirit of LISP lives on!)
+# The similarities to NAME are again not a coincidence...
+longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
+longopt_re = re.compile(r'^%s$' % longopt_pat)
+
+# For recognizing "negative alias" options, eg. "quiet=!verbose"
+neg_alias_re = re.compile("^(%s)=!(%s)$" % (longopt_pat, longopt_pat))
+
+# This is used to translate long options to legitimate Python identifiers
+# (for use as attributes of some object).
+longopt_xlate = string.maketrans('-', '_')
+
+class FancyGetopt:
+    """Wrapper around the standard 'getopt()' module that provides some
+    handy extra functionality:
+      * short and long options are tied together
+      * options have help strings, and help text can be assembled
+        from them
+      * options set attributes of a passed-in object
+      * boolean options can have "negative aliases" -- eg. if
+        --quiet is the "negative alias" of --verbose, then "--quiet"
+        on the command line sets 'verbose' to false
+    """
+
+    def __init__ (self, option_table=None):
+
+        # The option table is (currently) a list of tuples.  The
+        # tuples may have 3 or four values:
+        #   (long_option, short_option, help_string [, repeatable])
+        # if an option takes an argument, its long_option should have '='
+        # appended; short_option should just be a single character, no ':'
+        # in any case.  If a long_option doesn't have a corresponding
+        # short_option, short_option should be None.  All option tuples
+        # must have long options.
+        self.option_table = option_table
+
+        # 'option_index' maps long option names to entries in the option
+        # table (ie. those 3-tuples).
+        self.option_index = {}
+        if self.option_table:
+            self._build_index()
+
+        # 'alias' records (duh) alias options; {'foo': 'bar'} means
+        # --foo is an alias for --bar
+        self.alias = {}
+
+        # 'negative_alias' keeps track of options that are the boolean
+        # opposite of some other option
+        self.negative_alias = {}
+
+        # These keep track of the information in the option table.  We
+        # don't actually populate these structures until we're ready to
+        # parse the command-line, since the 'option_table' passed in here
+        # isn't necessarily the final word.
+        self.short_opts = []
+        self.long_opts = []
+        self.short2long = {}
+        self.attr_name = {}
+        self.takes_arg = {}
+
+        # And 'option_order' is filled up in 'getopt()'; it records the
+        # original order of options (and their values) on the command-line,
+        # but expands short options, converts aliases, etc.
+        self.option_order = []
+
+    # __init__ ()
+
+
+    def _build_index (self):
+        self.option_index.clear()
+        for option in self.option_table:
+            self.option_index[option[0]] = option
+
+    def set_option_table (self, option_table):
+        self.option_table = option_table
+        self._build_index()
+
+    def add_option (self, long_option, short_option=None, help_string=None):
+        if self.option_index.has_key(long_option):
+            raise DistutilsGetoptError, \
+                  "option conflict: already an option '%s'" % long_option
+        else:
+            option = (long_option, short_option, help_string)
+            self.option_table.append(option)
+            self.option_index[long_option] = option
+
+
+    def has_option (self, long_option):
+        """Return true if the option table for this parser has an
+        option with long name 'long_option'."""
+        return self.option_index.has_key(long_option)
+
+    def get_attr_name (self, long_option):
+        """Translate long option name 'long_option' to the form it
+        has as an attribute of some object: ie., translate hyphens
+        to underscores."""
+        return string.translate(long_option, longopt_xlate)
+
+
+    def _check_alias_dict (self, aliases, what):
+        assert type(aliases) is DictionaryType
+        for (alias, opt) in aliases.items():
+            if not self.option_index.has_key(alias):
+                raise DistutilsGetoptError, \
+                      ("invalid %s '%s': "
+                       "option '%s' not defined") % (what, alias, alias)
+            if not self.option_index.has_key(opt):
+                raise DistutilsGetoptError, \
+                      ("invalid %s '%s': "
+                       "aliased option '%s' not defined") % (what, alias, opt)
+
+    def set_aliases (self, alias):
+        """Set the aliases for this option parser."""
+        self._check_alias_dict(alias, "alias")
+        self.alias = alias
+
+    def set_negative_aliases (self, negative_alias):
+        """Set the negative aliases for this option parser.
+        'negative_alias' should be a dictionary mapping option names to
+        option names, both the key and value must already be defined
+        in the option table."""
+        self._check_alias_dict(negative_alias, "negative alias")
+        self.negative_alias = negative_alias
+
+
+    def _grok_option_table (self):
+        """Populate the various data structures that keep tabs on the
+        option table.  Called by 'getopt()' before it can do anything
+        worthwhile.
+        """
+        self.long_opts = []
+        self.short_opts = []
+        self.short2long.clear()
+        self.repeat = {}
+
+        for option in self.option_table:
+            if len(option) == 3:
+                long, short, help = option
+                repeat = 0
+            elif len(option) == 4:
+                long, short, help, repeat = option
+            else:
+                # the option table is part of the code, so simply
+                # assert that it is correct
+                raise ValueError, "invalid option tuple: %r" % (option,)
+
+            # Type- and value-check the option names
+            if type(long) is not StringType or len(long) < 2:
+                raise DistutilsGetoptError, \
+                      ("invalid long option '%s': "
+                       "must be a string of length >= 2") % long
+
+            if (not ((short is None) or
+                     (type(short) is StringType and len(short) == 1))):
+                raise DistutilsGetoptError, \
+                      ("invalid short option '%s': "
+                       "must a single character or None") % short
+
+            self.repeat[long] = repeat
+            self.long_opts.append(long)
+
+            if long[-1] == '=':             # option takes an argument?
+                if short: short = short + ':'
+                long = long[0:-1]
+                self.takes_arg[long] = 1
+            else:
+
+                # Is option is a "negative alias" for some other option (eg.
+                # "quiet" == "!verbose")?
+                alias_to = self.negative_alias.get(long)
+                if alias_to is not None:
+                    if self.takes_arg[alias_to]:
+                        raise DistutilsGetoptError, \
+                              ("invalid negative alias '%s': "
+                               "aliased option '%s' takes a value") % \
+                               (long, alias_to)
+
+                    self.long_opts[-1] = long # XXX redundant?!
+                    self.takes_arg[long] = 0
+
+                else:
+                    self.takes_arg[long] = 0
+
+            # If this is an alias option, make sure its "takes arg" flag is
+            # the same as the option it's aliased to.
+            alias_to = self.alias.get(long)
+            if alias_to is not None:
+                if self.takes_arg[long] != self.takes_arg[alias_to]:
+                    raise DistutilsGetoptError, \
+                          ("invalid alias '%s': inconsistent with "
+                           "aliased option '%s' (one of them takes a value, "
+                           "the other doesn't") % (long, alias_to)
+
+
+            # Now enforce some bondage on the long option name, so we can
+            # later translate it to an attribute name on some object.  Have
+            # to do this a bit late to make sure we've removed any trailing
+            # '='.
+            if not longopt_re.match(long):
+                raise DistutilsGetoptError, \
+                      ("invalid long option name '%s' " +
+                       "(must be letters, numbers, hyphens only") % long
+
+            self.attr_name[long] = self.get_attr_name(long)
+            if short:
+                self.short_opts.append(short)
+                self.short2long[short[0]] = long
+
+        # for option_table
+
+    # _grok_option_table()
+
+
+    def getopt (self, args=None, object=None):
+        """Parse command-line options in args. Store as attributes on object.
+
+        If 'args' is None or not supplied, uses 'sys.argv[1:]'.  If
+        'object' is None or not supplied, creates a new OptionDummy
+        object, stores option values there, and returns a tuple (args,
+        object).  If 'object' is supplied, it is modified in place and
+        'getopt()' just returns 'args'; in both cases, the returned
+        'args' is a modified copy of the passed-in 'args' list, which
+        is left untouched.
+        """
+        if args is None:
+            args = sys.argv[1:]
+        if object is None:
+            object = OptionDummy()
+            created_object = 1
+        else:
+            created_object = 0
+
+        self._grok_option_table()
+
+        short_opts = string.join(self.short_opts)
+        try:
+            opts, args = getopt.getopt(args, short_opts, self.long_opts)
+        except getopt.error, msg:
+            raise DistutilsArgError, msg
+
+        for opt, val in opts:
+            if len(opt) == 2 and opt[0] == '-': # it's a short option
+                opt = self.short2long[opt[1]]
+            else:
+                assert len(opt) > 2 and opt[:2] == '--'
+                opt = opt[2:]
+
+            alias = self.alias.get(opt)
+            if alias:
+                opt = alias
+
+            if not self.takes_arg[opt]:     # boolean option?
+                assert val == '', "boolean option can't have value"
+                alias = self.negative_alias.get(opt)
+                if alias:
+                    opt = alias
+                    val = 0
+                else:
+                    val = 1
+
+            attr = self.attr_name[opt]
+            # The only repeating option at the moment is 'verbose'.
+            # It has a negative option -q quiet, which should set verbose = 0.
+            if val and self.repeat.get(attr) is not None:
+                val = getattr(object, attr, 0) + 1
+            setattr(object, attr, val)
+            self.option_order.append((opt, val))
+
+        # for opts
+        if created_object:
+            return args, object
+        else:
+            return args
+
+    # getopt()
+
+
+    def get_option_order (self):
+        """Returns the list of (option, value) tuples processed by the
+        previous run of 'getopt()'.  Raises RuntimeError if
+        'getopt()' hasn't been called yet.
+        """
+        if self.option_order is None:
+            raise RuntimeError, "'getopt()' hasn't been called yet"
+        else:
+            return self.option_order
+
+
+    def generate_help (self, header=None):
+        """Generate help text (a list of strings, one per suggested line of
+        output) from the option table for this FancyGetopt object.
+        """
+        # Blithely assume the option table is good: probably wouldn't call
+        # 'generate_help()' unless you've already called 'getopt()'.
+
+        # First pass: determine maximum length of long option names
+        max_opt = 0
+        for option in self.option_table:
+            long = option[0]
+            short = option[1]
+            l = len(long)
+            if long[-1] == '=':
+                l = l - 1
+            if short is not None:
+                l = l + 5                   # " (-x)" where short == 'x'
+            if l > max_opt:
+                max_opt = l
+
+        opt_width = max_opt + 2 + 2 + 2     # room for indent + dashes + gutter
+
+        # Typical help block looks like this:
+        #   --foo       controls foonabulation
+        # Help block for longest option looks like this:
+        #   --flimflam  set the flim-flam level
+        # and with wrapped text:
+        #   --flimflam  set the flim-flam level (must be between
+        #               0 and 100, except on Tuesdays)
+        # Options with short names will have the short name shown (but
+        # it doesn't contribute to max_opt):
+        #   --foo (-f)  controls foonabulation
+        # If adding the short option would make the left column too wide,
+        # we push the explanation off to the next line
+        #   --flimflam (-l)
+        #               set the flim-flam level
+        # Important parameters:
+        #   - 2 spaces before option block start lines
+        #   - 2 dashes for each long option name
+        #   - min. 2 spaces between option and explanation (gutter)
+        #   - 5 characters (incl. space) for short option name
+
+        # Now generate lines of help text.  (If 80 columns were good enough
+        # for Jesus, then 78 columns are good enough for me!)
+        line_width = 78
+        text_width = line_width - opt_width
+        big_indent = ' ' * opt_width
+        if header:
+            lines = [header]
+        else:
+            lines = ['Option summary:']
+
+        for option in self.option_table:
+            long, short, help = option[:3]
+            text = wrap_text(help, text_width)
+            if long[-1] == '=':
+                long = long[0:-1]
+
+            # Case 1: no short option at all (makes life easy)
+            if short is None:
+                if text:
+                    lines.append("  --%-*s  %s" % (max_opt, long, text[0]))
+                else:
+                    lines.append("  --%-*s  " % (max_opt, long))
+
+            # Case 2: we have a short option, so we have to include it
+            # just after the long option
+            else:
+                opt_names = "%s (-%s)" % (long, short)
+                if text:
+                    lines.append("  --%-*s  %s" %
+                                 (max_opt, opt_names, text[0]))
+                else:
+                    lines.append("  --%-*s" % opt_names)
+
+            for l in text[1:]:
+                lines.append(big_indent + l)
+
+        # for self.option_table
+
+        return lines
+
+    # generate_help ()
+
+    def print_help (self, header=None, file=None):
+        if file is None:
+            file = sys.stdout
+        for line in self.generate_help(header):
+            file.write(line + "\n")
+
+# class FancyGetopt
+
+
+def fancy_getopt (options, negative_opt, object, args):
+    parser = FancyGetopt(options)
+    parser.set_negative_aliases(negative_opt)
+    return parser.getopt(args, object)
+
+
+WS_TRANS = string.maketrans(string.whitespace, ' ' * len(string.whitespace))
+
+def wrap_text (text, width):
+    """wrap_text(text : string, width : int) -> [string]
+
+    Split 'text' into multiple lines of no more than 'width' characters
+    each, and return the list of strings that results.
+    """
+
+    if text is None:
+        return []
+    if len(text) <= width:
+        return [text]
+
+    text = string.expandtabs(text)
+    text = string.translate(text, WS_TRANS)
+    chunks = re.split(r'( +|-+)', text)
+    chunks = filter(None, chunks)      # ' - ' results in empty strings
+    lines = []
+
+    while chunks:
+
+        cur_line = []                   # list of chunks (to-be-joined)
+        cur_len = 0                     # length of current line
+
+        while chunks:
+            l = len(chunks[0])
+            if cur_len + l <= width:    # can squeeze (at least) this chunk in
+                cur_line.append(chunks[0])
+                del chunks[0]
+                cur_len = cur_len + l
+            else:                       # this line is full
+                # drop last chunk if all space
+                if cur_line and cur_line[-1][0] == ' ':
+                    del cur_line[-1]
+                break
+
+        if chunks:                      # any chunks left to process?
+
+            # if the current line is still empty, then we had a single
+            # chunk that's too big too fit on a line -- so we break
+            # down and break it up at the line width
+            if cur_len == 0:
+                cur_line.append(chunks[0][0:width])
+                chunks[0] = chunks[0][width:]
+
+            # all-whitespace chunks at the end of a line can be discarded
+            # (and we know from the re.split above that if a chunk has
+            # *any* whitespace, it is *all* whitespace)
+            if chunks[0][0] == ' ':
+                del chunks[0]
+
+        # and store this line in the list-of-all-lines -- as a single
+        # string, of course!
+        lines.append(string.join(cur_line, ''))
+
+    # while chunks
+
+    return lines
+
+# wrap_text ()
+
+
+def translate_longopt (opt):
+    """Convert a long option name to a valid Python identifier by
+    changing "-" to "_".
+    """
+    return string.translate(opt, longopt_xlate)
+
+
+class OptionDummy:
+    """Dummy class just used as a place to hold command-line option
+    values as instance attributes."""
+
+    def __init__ (self, options=[]):
+        """Create a new OptionDummy instance.  The attributes listed in
+        'options' will be initialized to None."""
+        for opt in options:
+            setattr(self, opt, None)
+
+# class OptionDummy
+
+
+if __name__ == "__main__":
+    text = """\
+Tra-la-la, supercalifragilisticexpialidocious.
+How *do* you spell that odd word, anyways?
+(Someone ask Mary -- she'll know [or she'll
+say, "How should I know?"].)"""
+
+    for w in (10, 20, 30, 40):
+        print "width: %d" % w
+        print string.join(wrap_text(text, w), "\n")
+        print
diff --git a/depot_tools/release/win/python_24/Lib/distutils/file_util.py b/depot_tools/release/win/python_24/Lib/distutils/file_util.py
new file mode 100644
index 0000000..c182ac58
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/file_util.py
@@ -0,0 +1,253 @@
+"""distutils.file_util
+
+Utility functions for operating on single files.
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: file_util.py,v 1.17 2004/11/10 22:23:14 loewis Exp $"
+
+import os
+from distutils.errors import DistutilsFileError
+from distutils import log
+
+# for generating verbose output in 'copy_file()'
+_copy_action = { None:   'copying',
+                 'hard': 'hard linking',
+                 'sym':  'symbolically linking' }
+
+
+def _copy_file_contents (src, dst, buffer_size=16*1024):
+    """Copy the file 'src' to 'dst'; both must be filenames.  Any error
+    opening either file, reading from 'src', or writing to 'dst', raises
+    DistutilsFileError.  Data is read/written in chunks of 'buffer_size'
+    bytes (default 16k).  No attempt is made to handle anything apart from
+    regular files.
+    """
+    # Stolen from shutil module in the standard library, but with
+    # custom error-handling added.
+
+    fsrc = None
+    fdst = None
+    try:
+        try:
+            fsrc = open(src, 'rb')
+        except os.error, (errno, errstr):
+            raise DistutilsFileError, \
+                  "could not open '%s': %s" % (src, errstr)
+
+        if os.path.exists(dst):
+            try:
+                os.unlink(dst)
+            except os.error, (errno, errstr):
+                raise DistutilsFileError, \
+                      "could not delete '%s': %s" % (dst, errstr)
+
+        try:
+            fdst = open(dst, 'wb')
+        except os.error, (errno, errstr):
+            raise DistutilsFileError, \
+                  "could not create '%s': %s" % (dst, errstr)
+
+        while 1:
+            try:
+                buf = fsrc.read(buffer_size)
+            except os.error, (errno, errstr):
+                raise DistutilsFileError, \
+                      "could not read from '%s': %s" % (src, errstr)
+
+            if not buf:
+                break
+
+            try:
+                fdst.write(buf)
+            except os.error, (errno, errstr):
+                raise DistutilsFileError, \
+                      "could not write to '%s': %s" % (dst, errstr)
+
+    finally:
+        if fdst:
+            fdst.close()
+        if fsrc:
+            fsrc.close()
+
+# _copy_file_contents()
+
+def copy_file (src, dst,
+               preserve_mode=1,
+               preserve_times=1,
+               update=0,
+               link=None,
+               verbose=0,
+               dry_run=0):
+
+    """Copy a file 'src' to 'dst'.  If 'dst' is a directory, then 'src' is
+    copied there with the same name; otherwise, it must be a filename.  (If
+    the file exists, it will be ruthlessly clobbered.)  If 'preserve_mode'
+    is true (the default), the file's mode (type and permission bits, or
+    whatever is analogous on the current platform) is copied.  If
+    'preserve_times' is true (the default), the last-modified and
+    last-access times are copied as well.  If 'update' is true, 'src' will
+    only be copied if 'dst' does not exist, or if 'dst' does exist but is
+    older than 'src'.
+
+    'link' allows you to make hard links (os.link) or symbolic links
+    (os.symlink) instead of copying: set it to "hard" or "sym"; if it is
+    None (the default), files are copied.  Don't set 'link' on systems that
+    don't support it: 'copy_file()' doesn't check if hard or symbolic
+    linking is available.
+
+    Under Mac OS, uses the native file copy function in macostools; on
+    other systems, uses '_copy_file_contents()' to copy file contents.
+
+    Return a tuple (dest_name, copied): 'dest_name' is the actual name of
+    the output file, and 'copied' is true if the file was copied (or would
+    have been copied, if 'dry_run' true).
+    """
+    # XXX if the destination file already exists, we clobber it if
+    # copying, but blow up if linking.  Hmmm.  And I don't know what
+    # macostools.copyfile() does.  Should definitely be consistent, and
+    # should probably blow up if destination exists and we would be
+    # changing it (ie. it's not already a hard/soft link to src OR
+    # (not update) and (src newer than dst).
+
+    from distutils.dep_util import newer
+    from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
+
+    if not os.path.isfile(src):
+        raise DistutilsFileError, \
+              "can't copy '%s': doesn't exist or not a regular file" % src
+
+    if os.path.isdir(dst):
+        dir = dst
+        dst = os.path.join(dst, os.path.basename(src))
+    else:
+        dir = os.path.dirname(dst)
+
+    if update and not newer(src, dst):
+        log.debug("not copying %s (output up-to-date)", src)
+        return dst, 0
+
+    try:
+        action = _copy_action[link]
+    except KeyError:
+        raise ValueError, \
+              "invalid value '%s' for 'link' argument" % link
+    if os.path.basename(dst) == os.path.basename(src):
+        log.info("%s %s -> %s", action, src, dir)
+    else:
+        log.info("%s %s -> %s", action, src, dst)
+
+    if dry_run:
+        return (dst, 1)
+
+    # On Mac OS, use the native file copy routine
+    if os.name == 'mac':
+        import macostools
+        try:
+            macostools.copy(src, dst, 0, preserve_times)
+        except os.error, exc:
+            raise DistutilsFileError, \
+                  "could not copy '%s' to '%s': %s" % (src, dst, exc[-1])
+
+    # If linking (hard or symbolic), use the appropriate system call
+    # (Unix only, of course, but that's the caller's responsibility)
+    elif link == 'hard':
+        if not (os.path.exists(dst) and os.path.samefile(src, dst)):
+            os.link(src, dst)
+    elif link == 'sym':
+        if not (os.path.exists(dst) and os.path.samefile(src, dst)):
+            os.symlink(src, dst)
+
+    # Otherwise (non-Mac, not linking), copy the file contents and
+    # (optionally) copy the times and mode.
+    else:
+        _copy_file_contents(src, dst)
+        if preserve_mode or preserve_times:
+            st = os.stat(src)
+
+            # According to David Ascher <da@ski.org>, utime() should be done
+            # before chmod() (at least under NT).
+            if preserve_times:
+                os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
+            if preserve_mode:
+                os.chmod(dst, S_IMODE(st[ST_MODE]))
+
+    return (dst, 1)
+
+# copy_file ()
+
+
+# XXX I suspect this is Unix-specific -- need porting help!
+def move_file (src, dst,
+               verbose=0,
+               dry_run=0):
+
+    """Move a file 'src' to 'dst'.  If 'dst' is a directory, the file will
+    be moved into it with the same name; otherwise, 'src' is just renamed
+    to 'dst'.  Return the new full name of the file.
+
+    Handles cross-device moves on Unix using 'copy_file()'.  What about
+    other systems???
+    """
+    from os.path import exists, isfile, isdir, basename, dirname
+    import errno
+
+    log.info("moving %s -> %s", src, dst)
+
+    if dry_run:
+        return dst
+
+    if not isfile(src):
+        raise DistutilsFileError, \
+              "can't move '%s': not a regular file" % src
+
+    if isdir(dst):
+        dst = os.path.join(dst, basename(src))
+    elif exists(dst):
+        raise DistutilsFileError, \
+              "can't move '%s': destination '%s' already exists" % \
+              (src, dst)
+
+    if not isdir(dirname(dst)):
+        raise DistutilsFileError, \
+              "can't move '%s': destination '%s' not a valid path" % \
+              (src, dst)
+
+    copy_it = 0
+    try:
+        os.rename(src, dst)
+    except os.error, (num, msg):
+        if num == errno.EXDEV:
+            copy_it = 1
+        else:
+            raise DistutilsFileError, \
+                  "couldn't move '%s' to '%s': %s" % (src, dst, msg)
+
+    if copy_it:
+        copy_file(src, dst)
+        try:
+            os.unlink(src)
+        except os.error, (num, msg):
+            try:
+                os.unlink(dst)
+            except os.error:
+                pass
+            raise DistutilsFileError, \
+                  ("couldn't move '%s' to '%s' by copy/delete: " +
+                   "delete '%s' failed: %s") % \
+                  (src, dst, src, msg)
+
+    return dst
+
+# move_file ()
+
+
+def write_file (filename, contents):
+    """Create a file with the specified name and write 'contents' (a
+    sequence of strings without line terminators) to it.
+    """
+    f = open(filename, "w")
+    for line in contents:
+        f.write(line + "\n")
+    f.close()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/filelist.py b/depot_tools/release/win/python_24/Lib/distutils/filelist.py
new file mode 100644
index 0000000..6a1a37b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/filelist.py
@@ -0,0 +1,355 @@
+"""distutils.filelist
+
+Provides the FileList class, used for poking about the filesystem
+and building lists of files.
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: filelist.py,v 1.18 2004/11/10 22:23:14 loewis Exp $"
+
+import os, string, re
+import fnmatch
+from types import *
+from glob import glob
+from distutils.util import convert_path
+from distutils.errors import DistutilsTemplateError, DistutilsInternalError
+from distutils import log
+
+class FileList:
+
+    """A list of files built by on exploring the filesystem and filtered by
+    applying various patterns to what we find there.
+
+    Instance attributes:
+      dir
+        directory from which files will be taken -- only used if
+        'allfiles' not supplied to constructor
+      files
+        list of filenames currently being built/filtered/manipulated
+      allfiles
+        complete list of files under consideration (ie. without any
+        filtering applied)
+    """
+
+    def __init__(self,
+                 warn=None,
+                 debug_print=None):
+        # ignore argument to FileList, but keep them for backwards
+        # compatibility
+
+        self.allfiles = None
+        self.files = []
+
+    def set_allfiles (self, allfiles):
+        self.allfiles = allfiles
+
+    def findall (self, dir=os.curdir):
+        self.allfiles = findall(dir)
+
+    def debug_print (self, msg):
+        """Print 'msg' to stdout if the global DEBUG (taken from the
+        DISTUTILS_DEBUG environment variable) flag is true.
+        """
+        from distutils.debug import DEBUG
+        if DEBUG:
+            print msg
+
+    # -- List-like methods ---------------------------------------------
+
+    def append (self, item):
+        self.files.append(item)
+
+    def extend (self, items):
+        self.files.extend(items)
+
+    def sort (self):
+        # Not a strict lexical sort!
+        sortable_files = map(os.path.split, self.files)
+        sortable_files.sort()
+        self.files = []
+        for sort_tuple in sortable_files:
+            self.files.append(apply(os.path.join, sort_tuple))
+
+
+    # -- Other miscellaneous utility methods ---------------------------
+
+    def remove_duplicates (self):
+        # Assumes list has been sorted!
+        for i in range(len(self.files) - 1, 0, -1):
+            if self.files[i] == self.files[i - 1]:
+                del self.files[i]
+
+
+    # -- "File template" methods ---------------------------------------
+
+    def _parse_template_line (self, line):
+        words = string.split(line)
+        action = words[0]
+
+        patterns = dir = dir_pattern = None
+
+        if action in ('include', 'exclude',
+                      'global-include', 'global-exclude'):
+            if len(words) < 2:
+                raise DistutilsTemplateError, \
+                      "'%s' expects <pattern1> <pattern2> ..." % action
+
+            patterns = map(convert_path, words[1:])
+
+        elif action in ('recursive-include', 'recursive-exclude'):
+            if len(words) < 3:
+                raise DistutilsTemplateError, \
+                      "'%s' expects <dir> <pattern1> <pattern2> ..." % action
+
+            dir = convert_path(words[1])
+            patterns = map(convert_path, words[2:])
+
+        elif action in ('graft', 'prune'):
+            if len(words) != 2:
+                raise DistutilsTemplateError, \
+                     "'%s' expects a single <dir_pattern>" % action
+
+            dir_pattern = convert_path(words[1])
+
+        else:
+            raise DistutilsTemplateError, "unknown action '%s'" % action
+
+        return (action, patterns, dir, dir_pattern)
+
+    # _parse_template_line ()
+
+
+    def process_template_line (self, line):
+
+        # Parse the line: split it up, make sure the right number of words
+        # is there, and return the relevant words.  'action' is always
+        # defined: it's the first word of the line.  Which of the other
+        # three are defined depends on the action; it'll be either
+        # patterns, (dir and patterns), or (dir_pattern).
+        (action, patterns, dir, dir_pattern) = self._parse_template_line(line)
+
+        # OK, now we know that the action is valid and we have the
+        # right number of words on the line for that action -- so we
+        # can proceed with minimal error-checking.
+        if action == 'include':
+            self.debug_print("include " + string.join(patterns))
+            for pattern in patterns:
+                if not self.include_pattern(pattern, anchor=1):
+                    log.warn("warning: no files found matching '%s'",
+                             pattern)
+
+        elif action == 'exclude':
+            self.debug_print("exclude " + string.join(patterns))
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=1):
+                    log.warn(("warning: no previously-included files "
+                              "found matching '%s'"), pattern)
+
+        elif action == 'global-include':
+            self.debug_print("global-include " + string.join(patterns))
+            for pattern in patterns:
+                if not self.include_pattern(pattern, anchor=0):
+                    log.warn(("warning: no files found matching '%s' " +
+                              "anywhere in distribution"), pattern)
+
+        elif action == 'global-exclude':
+            self.debug_print("global-exclude " + string.join(patterns))
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, anchor=0):
+                    log.warn(("warning: no previously-included files matching "
+                              "'%s' found anywhere in distribution"),
+                             pattern)
+
+        elif action == 'recursive-include':
+            self.debug_print("recursive-include %s %s" %
+                             (dir, string.join(patterns)))
+            for pattern in patterns:
+                if not self.include_pattern(pattern, prefix=dir):
+                    log.warn(("warning: no files found matching '%s' " +
+                                "under directory '%s'"),
+                             pattern, dir)
+
+        elif action == 'recursive-exclude':
+            self.debug_print("recursive-exclude %s %s" %
+                             (dir, string.join(patterns)))
+            for pattern in patterns:
+                if not self.exclude_pattern(pattern, prefix=dir):
+                    log.warn(("warning: no previously-included files matching "
+                              "'%s' found under directory '%s'"),
+                             pattern, dir)
+
+        elif action == 'graft':
+            self.debug_print("graft " + dir_pattern)
+            if not self.include_pattern(None, prefix=dir_pattern):
+                log.warn("warning: no directories found matching '%s'",
+                         dir_pattern)
+
+        elif action == 'prune':
+            self.debug_print("prune " + dir_pattern)
+            if not self.exclude_pattern(None, prefix=dir_pattern):
+                log.warn(("no previously-included directories found " +
+                          "matching '%s'"), dir_pattern)
+        else:
+            raise DistutilsInternalError, \
+                  "this cannot happen: invalid action '%s'" % action
+
+    # process_template_line ()
+
+
+    # -- Filtering/selection methods -----------------------------------
+
+    def include_pattern (self, pattern,
+                         anchor=1, prefix=None, is_regex=0):
+        """Select strings (presumably filenames) from 'self.files' that
+        match 'pattern', a Unix-style wildcard (glob) pattern.  Patterns
+        are not quite the same as implemented by the 'fnmatch' module: '*'
+        and '?'  match non-special characters, where "special" is platform-
+        dependent: slash on Unix; colon, slash, and backslash on
+        DOS/Windows; and colon on Mac OS.
+
+        If 'anchor' is true (the default), then the pattern match is more
+        stringent: "*.py" will match "foo.py" but not "foo/bar.py".  If
+        'anchor' is false, both of these will match.
+
+        If 'prefix' is supplied, then only filenames starting with 'prefix'
+        (itself a pattern) and ending with 'pattern', with anything in between
+        them, will match.  'anchor' is ignored in this case.
+
+        If 'is_regex' is true, 'anchor' and 'prefix' are ignored, and
+        'pattern' is assumed to be either a string containing a regex or a
+        regex object -- no translation is done, the regex is just compiled
+        and used as-is.
+
+        Selected strings will be added to self.files.
+
+        Return 1 if files are found.
+        """
+        files_found = 0
+        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
+        self.debug_print("include_pattern: applying regex r'%s'" %
+                         pattern_re.pattern)
+
+        # delayed loading of allfiles list
+        if self.allfiles is None:
+            self.findall()
+
+        for name in self.allfiles:
+            if pattern_re.search(name):
+                self.debug_print(" adding " + name)
+                self.files.append(name)
+                files_found = 1
+
+        return files_found
+
+    # include_pattern ()
+
+
+    def exclude_pattern (self, pattern,
+                         anchor=1, prefix=None, is_regex=0):
+        """Remove strings (presumably filenames) from 'files' that match
+        'pattern'.  Other parameters are the same as for
+        'include_pattern()', above.
+        The list 'self.files' is modified in place.
+        Return 1 if files are found.
+        """
+        files_found = 0
+        pattern_re = translate_pattern(pattern, anchor, prefix, is_regex)
+        self.debug_print("exclude_pattern: applying regex r'%s'" %
+                         pattern_re.pattern)
+        for i in range(len(self.files)-1, -1, -1):
+            if pattern_re.search(self.files[i]):
+                self.debug_print(" removing " + self.files[i])
+                del self.files[i]
+                files_found = 1
+
+        return files_found
+
+    # exclude_pattern ()
+
+# class FileList
+
+
+# ----------------------------------------------------------------------
+# Utility functions
+
+def findall (dir = os.curdir):
+    """Find all files under 'dir' and return the list of full filenames
+    (relative to 'dir').
+    """
+    from stat import ST_MODE, S_ISREG, S_ISDIR, S_ISLNK
+
+    list = []
+    stack = [dir]
+    pop = stack.pop
+    push = stack.append
+
+    while stack:
+        dir = pop()
+        names = os.listdir(dir)
+
+        for name in names:
+            if dir != os.curdir:        # avoid the dreaded "./" syndrome
+                fullname = os.path.join(dir, name)
+            else:
+                fullname = name
+
+            # Avoid excess stat calls -- just one will do, thank you!
+            stat = os.stat(fullname)
+            mode = stat[ST_MODE]
+            if S_ISREG(mode):
+                list.append(fullname)
+            elif S_ISDIR(mode) and not S_ISLNK(mode):
+                push(fullname)
+
+    return list
+
+
+def glob_to_re (pattern):
+    """Translate a shell-like glob pattern to a regular expression; return
+    a string containing the regex.  Differs from 'fnmatch.translate()' in
+    that '*' does not match "special characters" (which are
+    platform-specific).
+    """
+    pattern_re = fnmatch.translate(pattern)
+
+    # '?' and '*' in the glob pattern become '.' and '.*' in the RE, which
+    # IMHO is wrong -- '?' and '*' aren't supposed to match slash in Unix,
+    # and by extension they shouldn't match such "special characters" under
+    # any OS.  So change all non-escaped dots in the RE to match any
+    # character except the special characters.
+    # XXX currently the "special characters" are just slash -- i.e. this is
+    # Unix-only.
+    pattern_re = re.sub(r'(^|[^\\])\.', r'\1[^/]', pattern_re)
+    return pattern_re
+
+# glob_to_re ()
+
+
+def translate_pattern (pattern, anchor=1, prefix=None, is_regex=0):
+    """Translate a shell-like wildcard pattern to a compiled regular
+    expression.  Return the compiled regex.  If 'is_regex' true,
+    then 'pattern' is directly compiled to a regex (if it's a string)
+    or just returned as-is (assumes it's a regex object).
+    """
+    if is_regex:
+        if type(pattern) is StringType:
+            return re.compile(pattern)
+        else:
+            return pattern
+
+    if pattern:
+        pattern_re = glob_to_re(pattern)
+    else:
+        pattern_re = ''
+
+    if prefix is not None:
+        prefix_re = (glob_to_re(prefix))[0:-1] # ditch trailing $
+        pattern_re = "^" + os.path.join(prefix_re, ".*" + pattern_re)
+    else:                               # no prefix -- respect anchor flag
+        if anchor:
+            pattern_re = "^" + pattern_re
+
+    return re.compile(pattern_re)
+
+# translate_pattern ()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/log.py b/depot_tools/release/win/python_24/Lib/distutils/log.py
new file mode 100644
index 0000000..cf3ee13
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/log.py
@@ -0,0 +1,64 @@
+"""A simple log mechanism styled after PEP 282."""
+
+# This module should be kept compatible with Python 2.1.
+
+# The class here is styled after PEP 282 so that it could later be
+# replaced with a standard Python logging implementation.
+
+DEBUG = 1
+INFO = 2
+WARN = 3
+ERROR = 4
+FATAL = 5
+
+import sys
+
+class Log:
+
+    def __init__(self, threshold=WARN):
+        self.threshold = threshold
+
+    def _log(self, level, msg, args):
+        if level >= self.threshold:
+            print msg % args
+            sys.stdout.flush()
+
+    def log(self, level, msg, *args):
+        self._log(level, msg, args)
+
+    def debug(self, msg, *args):
+        self._log(DEBUG, msg, args)
+
+    def info(self, msg, *args):
+        self._log(INFO, msg, args)
+
+    def warn(self, msg, *args):
+        self._log(WARN, msg, args)
+
+    def error(self, msg, *args):
+        self._log(ERROR, msg, args)
+
+    def fatal(self, msg, *args):
+        self._log(FATAL, msg, args)
+
+_global_log = Log()
+log = _global_log.log
+debug = _global_log.debug
+info = _global_log.info
+warn = _global_log.warn
+error = _global_log.error
+fatal = _global_log.fatal
+
+def set_threshold(level):
+    # return the old threshold for use from tests
+    old = _global_log.threshold
+    _global_log.threshold = level
+    return old
+
+def set_verbosity(v):
+    if v <= 0:
+        set_threshold(WARN)
+    elif v == 1:
+        set_threshold(INFO)
+    elif v >= 2:
+        set_threshold(DEBUG)
diff --git a/depot_tools/release/win/python_24/Lib/distutils/msvccompiler.py b/depot_tools/release/win/python_24/Lib/distutils/msvccompiler.py
new file mode 100644
index 0000000..d3ad8660
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/msvccompiler.py
@@ -0,0 +1,596 @@
+"""distutils.msvccompiler
+
+Contains MSVCCompiler, an implementation of the abstract CCompiler class
+for the Microsoft Visual Studio.
+"""
+
+# Written by Perry Stoll
+# hacked by Robin Becker and Thomas Heller to do a better job of
+#   finding DevStudio (through the registry)
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: msvccompiler.py,v 1.64.2.3 2005/03/11 17:20:41 tim_one Exp $"
+
+import sys, os, string
+from distutils.errors import \
+     DistutilsExecError, DistutilsPlatformError, \
+     CompileError, LibError, LinkError
+from distutils.ccompiler import \
+     CCompiler, gen_preprocess_options, gen_lib_options
+from distutils import log
+
+_can_read_reg = 0
+try:
+    import _winreg
+
+    _can_read_reg = 1
+    hkey_mod = _winreg
+
+    RegOpenKeyEx = _winreg.OpenKeyEx
+    RegEnumKey = _winreg.EnumKey
+    RegEnumValue = _winreg.EnumValue
+    RegError = _winreg.error
+
+except ImportError:
+    try:
+        import win32api
+        import win32con
+        _can_read_reg = 1
+        hkey_mod = win32con
+
+        RegOpenKeyEx = win32api.RegOpenKeyEx
+        RegEnumKey = win32api.RegEnumKey
+        RegEnumValue = win32api.RegEnumValue
+        RegError = win32api.error
+
+    except ImportError:
+        log.info("Warning: Can't read registry to find the "
+                 "necessary compiler setting\n"
+                 "Make sure that Python modules _winreg, "
+                 "win32api or win32con are installed.")
+        pass
+
+if _can_read_reg:
+    HKEYS = (hkey_mod.HKEY_USERS,
+             hkey_mod.HKEY_CURRENT_USER,
+             hkey_mod.HKEY_LOCAL_MACHINE,
+             hkey_mod.HKEY_CLASSES_ROOT)
+
+def read_keys(base, key):
+    """Return list of registry keys."""
+
+    try:
+        handle = RegOpenKeyEx(base, key)
+    except RegError:
+        return None
+    L = []
+    i = 0
+    while 1:
+        try:
+            k = RegEnumKey(handle, i)
+        except RegError:
+            break
+        L.append(k)
+        i = i + 1
+    return L
+
+def read_values(base, key):
+    """Return dict of registry keys and values.
+
+    All names are converted to lowercase.
+    """
+    try:
+        handle = RegOpenKeyEx(base, key)
+    except RegError:
+        return None
+    d = {}
+    i = 0
+    while 1:
+        try:
+            name, value, type = RegEnumValue(handle, i)
+        except RegError:
+            break
+        name = name.lower()
+        d[convert_mbcs(name)] = convert_mbcs(value)
+        i = i + 1
+    return d
+
+def convert_mbcs(s):
+    enc = getattr(s, "encode", None)
+    if enc is not None:
+        try:
+            s = enc("mbcs")
+        except UnicodeError:
+            pass
+    return s
+
+class MacroExpander:
+
+    def __init__(self, version):
+        self.macros = {}
+        self.load_macros(version)
+
+    def set_macro(self, macro, path, key):
+        for base in HKEYS:
+            d = read_values(base, path)
+            if d:
+                self.macros["$(%s)" % macro] = d[key]
+                break
+
+    def load_macros(self, version):
+        vsbase = r"Software\Microsoft\VisualStudio\%0.1f" % version
+        self.set_macro("VCInstallDir", vsbase + r"\Setup\VC", "productdir")
+        self.set_macro("VSInstallDir", vsbase + r"\Setup\VS", "productdir")
+        net = r"Software\Microsoft\.NETFramework"
+        self.set_macro("FrameworkDir", net, "installroot")
+        try:
+            if version > 7.0:
+                self.set_macro("FrameworkSDKDir", net, "sdkinstallrootv1.1")
+            else:
+                self.set_macro("FrameworkSDKDir", net, "sdkinstallroot")
+        except KeyError, exc: #
+            raise DistutilsPlatformError, \
+                  ("The .NET Framework SDK needs to be installed before "
+                   "building extensions for Python.")
+
+        p = r"Software\Microsoft\NET Framework Setup\Product"
+        for base in HKEYS:
+            try:
+                h = RegOpenKeyEx(base, p)
+            except RegError:
+                continue
+            key = RegEnumKey(h, 0)
+            d = read_values(base, r"%s\%s" % (p, key))
+            self.macros["$(FrameworkVersion)"] = d["version"]
+
+    def sub(self, s):
+        for k, v in self.macros.items():
+            s = string.replace(s, k, v)
+        return s
+
+def get_build_version():
+    """Return the version of MSVC that was used to build Python.
+
+    For Python 2.3 and up, the version number is included in
+    sys.version.  For earlier versions, assume the compiler is MSVC 6.
+    """
+
+    prefix = "MSC v."
+    i = string.find(sys.version, prefix)
+    if i == -1:
+        return 6
+    i = i + len(prefix)
+    s, rest = sys.version[i:].split(" ", 1)
+    majorVersion = int(s[:-2]) - 6
+    minorVersion = int(s[2:3]) / 10.0
+    # I don't think paths are affected by minor version in version 6
+    if majorVersion == 6:
+        minorVersion = 0
+    if majorVersion >= 6:
+        return majorVersion + minorVersion
+    # else we don't know what version of the compiler this is
+    return None
+
+
+class MSVCCompiler (CCompiler) :
+    """Concrete class that implements an interface to Microsoft Visual C++,
+       as defined by the CCompiler abstract class."""
+
+    compiler_type = 'msvc'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+    _rc_extensions = ['.rc']
+    _mc_extensions = ['.mc']
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = (_c_extensions + _cpp_extensions +
+                      _rc_extensions + _mc_extensions)
+    res_extension = '.res'
+    obj_extension = '.obj'
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.dll'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = '.exe'
+
+    def __init__ (self, verbose=0, dry_run=0, force=0):
+        CCompiler.__init__ (self, verbose, dry_run, force)
+        self.__version = get_build_version()
+        if self.__version >= 7:
+            self.__root = r"Software\Microsoft\VisualStudio"
+            self.__macros = MacroExpander(self.__version)
+        else:
+            self.__root = r"Software\Microsoft\Devstudio"
+        self.initialized = False
+
+    def initialize(self):
+        self.__paths = self.get_msvc_paths("path")
+
+        if len (self.__paths) == 0:
+            raise DistutilsPlatformError, \
+                  ("Python was built with version %s of Visual Studio, "
+                   "and extensions need to be built with the same "
+                   "version of the compiler, but it isn't installed." % self.__version)
+
+        self.cc = self.find_exe("cl.exe")
+        self.linker = self.find_exe("link.exe")
+        self.lib = self.find_exe("lib.exe")
+        self.rc = self.find_exe("rc.exe")   # resource compiler
+        self.mc = self.find_exe("mc.exe")   # message compiler
+        self.set_path_env_var('lib')
+        self.set_path_env_var('include')
+
+        # extend the MSVC path with the current path
+        try:
+            for p in string.split(os.environ['path'], ';'):
+                self.__paths.append(p)
+        except KeyError:
+            pass
+        os.environ['path'] = string.join(self.__paths, ';')
+
+        self.preprocess_options = None
+        self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GX' ,
+                                 '/DNDEBUG']
+        self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GX',
+                                      '/Z7', '/D_DEBUG']
+
+        self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
+        if self.__version >= 7:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
+                ]
+        else:
+            self.ldflags_shared_debug = [
+                '/DLL', '/nologo', '/INCREMENTAL:no', '/pdb:None', '/DEBUG'
+                ]
+        self.ldflags_static = [ '/nologo']
+
+        self.initialized = True
+
+
+    # -- Worker methods ------------------------------------------------
+
+    def object_filenames (self,
+                          source_filenames,
+                          strip_dir=0,
+                          output_dir=''):
+        # Copied from ccompiler.py, extended to return .res as 'object'-file
+        # for .rc input file
+        if output_dir is None: output_dir = ''
+        obj_names = []
+        for src_name in source_filenames:
+            (base, ext) = os.path.splitext (src_name)
+            if ext not in self.src_extensions:
+                # Better to raise an exception instead of silently continuing
+                # and later complain about sources and targets having
+                # different lengths
+                raise CompileError ("Don't know how to compile %s" % src_name)
+            if strip_dir:
+                base = os.path.basename (base)
+            if ext in self._rc_extensions:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.res_extension))
+            elif ext in self._mc_extensions:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.res_extension))
+            else:
+                obj_names.append (os.path.join (output_dir,
+                                                base + self.obj_extension))
+        return obj_names
+
+    # object_filenames ()
+
+
+    def compile(self, sources,
+                output_dir=None, macros=None, include_dirs=None, debug=0,
+                extra_preargs=None, extra_postargs=None, depends=None):
+
+        if not self.initialized: self.initialize()
+        macros, objects, extra_postargs, pp_opts, build = \
+                self._setup_compile(output_dir, macros, include_dirs, sources,
+                                    depends, extra_postargs)
+
+        compile_opts = extra_preargs or []
+        compile_opts.append ('/c')
+        if debug:
+            compile_opts.extend(self.compile_options_debug)
+        else:
+            compile_opts.extend(self.compile_options)
+
+        for obj in objects:
+            try:
+                src, ext = build[obj]
+            except KeyError:
+                continue
+            if debug:
+                # pass the full pathname to MSVC in debug mode,
+                # this allows the debugger to find the source file
+                # without asking the user to browse for it
+                src = os.path.abspath(src)
+
+            if ext in self._c_extensions:
+                input_opt = "/Tc" + src
+            elif ext in self._cpp_extensions:
+                input_opt = "/Tp" + src
+            elif ext in self._rc_extensions:
+                # compile .RC to .RES file
+                input_opt = src
+                output_opt = "/fo" + obj
+                try:
+                    self.spawn ([self.rc] + pp_opts +
+                                [output_opt] + [input_opt])
+                except DistutilsExecError, msg:
+                    raise CompileError, msg
+                continue
+            elif ext in self._mc_extensions:
+
+                # Compile .MC to .RC file to .RES file.
+                #   * '-h dir' specifies the directory for the
+                #     generated include file
+                #   * '-r dir' specifies the target directory of the
+                #     generated RC file and the binary message resource
+                #     it includes
+                #
+                # For now (since there are no options to change this),
+                # we use the source-directory for the include file and
+                # the build directory for the RC file and message
+                # resources. This works at least for win32all.
+
+                h_dir = os.path.dirname (src)
+                rc_dir = os.path.dirname (obj)
+                try:
+                    # first compile .MC to .RC and .H file
+                    self.spawn ([self.mc] +
+                                ['-h', h_dir, '-r', rc_dir] + [src])
+                    base, _ = os.path.splitext (os.path.basename (src))
+                    rc_file = os.path.join (rc_dir, base + '.rc')
+                    # then compile .RC to .RES file
+                    self.spawn ([self.rc] +
+                                ["/fo" + obj] + [rc_file])
+
+                except DistutilsExecError, msg:
+                    raise CompileError, msg
+                continue
+            else:
+                # how to handle this file?
+                raise CompileError (
+                    "Don't know how to compile %s to %s" % \
+                    (src, obj))
+
+            output_opt = "/Fo" + obj
+            try:
+                self.spawn ([self.cc] + compile_opts + pp_opts +
+                            [input_opt, output_opt] +
+                            extra_postargs)
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+        return objects
+
+    # compile ()
+
+
+    def create_static_lib (self,
+                           objects,
+                           output_libname,
+                           output_dir=None,
+                           debug=0,
+                           target_lang=None):
+
+        if not self.initialized: self.initialize()
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        output_filename = \
+            self.library_filename (output_libname, output_dir=output_dir)
+
+        if self._need_link (objects, output_filename):
+            lib_args = objects + ['/OUT:' + output_filename]
+            if debug:
+                pass                    # XXX what goes here?
+            try:
+                self.spawn ([self.lib] + lib_args)
+            except DistutilsExecError, msg:
+                raise LibError, msg
+
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # create_static_lib ()
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+
+        if not self.initialized: self.initialize()
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        (libraries, library_dirs, runtime_library_dirs) = \
+            self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
+
+        if runtime_library_dirs:
+            self.warn ("I don't know what to do with 'runtime_library_dirs': "
+                       + str (runtime_library_dirs))
+
+        lib_opts = gen_lib_options (self,
+                                    library_dirs, runtime_library_dirs,
+                                    libraries)
+        if output_dir is not None:
+            output_filename = os.path.join (output_dir, output_filename)
+
+        if self._need_link (objects, output_filename):
+
+            if target_desc == CCompiler.EXECUTABLE:
+                if debug:
+                    ldflags = self.ldflags_shared_debug[1:]
+                else:
+                    ldflags = self.ldflags_shared[1:]
+            else:
+                if debug:
+                    ldflags = self.ldflags_shared_debug
+                else:
+                    ldflags = self.ldflags_shared
+
+            export_opts = []
+            for sym in (export_symbols or []):
+                export_opts.append("/EXPORT:" + sym)
+
+            ld_args = (ldflags + lib_opts + export_opts +
+                       objects + ['/OUT:' + output_filename])
+
+            # The MSVC linker generates .lib and .exp files, which cannot be
+            # suppressed by any linker switches. The .lib files may even be
+            # needed! Make sure they are generated in the temporary build
+            # directory. Since they have different names for debug and release
+            # builds, they can go into the same directory.
+            if export_symbols is not None:
+                (dll_name, dll_ext) = os.path.splitext(
+                    os.path.basename(output_filename))
+                implib_file = os.path.join(
+                    os.path.dirname(objects[0]),
+                    self.library_filename(dll_name))
+                ld_args.append ('/IMPLIB:' + implib_file)
+
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+
+            self.mkpath (os.path.dirname (output_filename))
+            try:
+                self.spawn ([self.linker] + ld_args)
+            except DistutilsExecError, msg:
+                raise LinkError, msg
+
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # link ()
+
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option (self, dir):
+        return "/LIBPATH:" + dir
+
+    def runtime_library_dir_option (self, dir):
+        raise DistutilsPlatformError, \
+              "don't know how to set runtime library search path for MSVC++"
+
+    def library_option (self, lib):
+        return self.library_filename (lib)
+
+
+    def find_library_file (self, dirs, lib, debug=0):
+        # Prefer a debugging library if found (and requested), but deal
+        # with it if we don't have one.
+        if debug:
+            try_names = [lib + "_d", lib]
+        else:
+            try_names = [lib]
+        for dir in dirs:
+            for name in try_names:
+                libfile = os.path.join(dir, self.library_filename (name))
+                if os.path.exists(libfile):
+                    return libfile
+        else:
+            # Oops, didn't find it in *any* of 'dirs'
+            return None
+
+    # find_library_file ()
+
+    # Helper methods for using the MSVC registry settings
+
+    def find_exe(self, exe):
+        """Return path to an MSVC executable program.
+
+        Tries to find the program in several places: first, one of the
+        MSVC program search paths from the registry; next, the directories
+        in the PATH environment variable.  If any of those work, return an
+        absolute path that is known to exist.  If none of them work, just
+        return the original program name, 'exe'.
+        """
+
+        for p in self.__paths:
+            fn = os.path.join(os.path.abspath(p), exe)
+            if os.path.isfile(fn):
+                return fn
+
+        # didn't find it; try existing path
+        for p in string.split(os.environ['Path'],';'):
+            fn = os.path.join(os.path.abspath(p),exe)
+            if os.path.isfile(fn):
+                return fn
+
+        return exe
+
+    def get_msvc_paths(self, path, platform='x86'):
+        """Get a list of devstudio directories (include, lib or path).
+
+        Return a list of strings.  The list will be empty if unable to
+        access the registry or appropriate registry keys not found.
+        """
+
+        if not _can_read_reg:
+            return []
+
+        path = path + " dirs"
+        if self.__version >= 7:
+            key = (r"%s\%0.1f\VC\VC_OBJECTS_PLATFORM_INFO\Win32\Directories"
+                   % (self.__root, self.__version))
+        else:
+            key = (r"%s\6.0\Build System\Components\Platforms"
+                   r"\Win32 (%s)\Directories" % (self.__root, platform))
+
+        for base in HKEYS:
+            d = read_values(base, key)
+            if d:
+                if self.__version >= 7:
+                    return string.split(self.__macros.sub(d[path]), ";")
+                else:
+                    return string.split(d[path], ";")
+        # MSVC 6 seems to create the registry entries we need only when
+        # the GUI is run.
+        if self.__version == 6:
+            for base in HKEYS:
+                if read_values(base, r"%s\6.0" % self.__root) is not None:
+                    self.warn("It seems you have Visual Studio 6 installed, "
+                        "but the expected registry settings are not present.\n"
+                        "You must at least run the Visual Studio GUI once "
+                        "so that these entries are created.")
+                break
+        return []
+
+    def set_path_env_var(self, name):
+        """Set environment variable 'name' to an MSVC path type value.
+
+        This is equivalent to a SET command prior to execution of spawned
+        commands.
+        """
+
+        if name == "lib":
+            p = self.get_msvc_paths("library")
+        else:
+            p = self.get_msvc_paths(name)
+        if p:
+            os.environ[name] = string.join(p, ';')
diff --git a/depot_tools/release/win/python_24/Lib/distutils/mwerkscompiler.py b/depot_tools/release/win/python_24/Lib/distutils/mwerkscompiler.py
new file mode 100644
index 0000000..eaa2ccb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/mwerkscompiler.py
@@ -0,0 +1,248 @@
+"""distutils.mwerkscompiler
+
+Contains MWerksCompiler, an implementation of the abstract CCompiler class
+for MetroWerks CodeWarrior on the Macintosh. Needs work to support CW on
+Windows."""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: mwerkscompiler.py,v 1.13 2004/11/10 22:23:14 loewis Exp $"
+
+import sys, os, string
+from types import *
+from distutils.errors import \
+     DistutilsExecError, DistutilsPlatformError, \
+     CompileError, LibError, LinkError
+from distutils.ccompiler import \
+     CCompiler, gen_preprocess_options, gen_lib_options
+import distutils.util
+import distutils.dir_util
+from distutils import log
+import mkcwproject
+
+class MWerksCompiler (CCompiler) :
+    """Concrete class that implements an interface to MetroWerks CodeWarrior,
+       as defined by the CCompiler abstract class."""
+
+    compiler_type = 'mwerks'
+
+    # Just set this so CCompiler's constructor doesn't barf.  We currently
+    # don't use the 'set_executables()' bureaucracy provided by CCompiler,
+    # as it really isn't necessary for this sort of single-compiler class.
+    # Would be nice to have a consistent interface with UnixCCompiler,
+    # though, so it's worth thinking about.
+    executables = {}
+
+    # Private class data (need to distinguish C from C++ source for compiler)
+    _c_extensions = ['.c']
+    _cpp_extensions = ['.cc', '.cpp', '.cxx']
+    _rc_extensions = ['.r']
+    _exp_extension = '.exp'
+
+    # Needed for the filename generation methods provided by the
+    # base class, CCompiler.
+    src_extensions = (_c_extensions + _cpp_extensions +
+                      _rc_extensions)
+    res_extension = '.rsrc'
+    obj_extension = '.obj' # Not used, really
+    static_lib_extension = '.lib'
+    shared_lib_extension = '.slb'
+    static_lib_format = shared_lib_format = '%s%s'
+    exe_extension = ''
+
+
+    def __init__ (self,
+                  verbose=0,
+                  dry_run=0,
+                  force=0):
+
+        CCompiler.__init__ (self, verbose, dry_run, force)
+
+
+    def compile (self,
+                 sources,
+                 output_dir=None,
+                 macros=None,
+                 include_dirs=None,
+                 debug=0,
+                 extra_preargs=None,
+                 extra_postargs=None,
+                 depends=None):
+        (output_dir, macros, include_dirs) = \
+           self._fix_compile_args (output_dir, macros, include_dirs)
+        self.__sources = sources
+        self.__macros = macros
+        self.__include_dirs = include_dirs
+        # Don't need extra_preargs and extra_postargs for CW
+        return []
+
+    def link (self,
+              target_desc,
+              objects,
+              output_filename,
+              output_dir=None,
+              libraries=None,
+              library_dirs=None,
+              runtime_library_dirs=None,
+              export_symbols=None,
+              debug=0,
+              extra_preargs=None,
+              extra_postargs=None,
+              build_temp=None,
+              target_lang=None):
+        # First fixup.
+        (objects, output_dir) = self._fix_object_args (objects, output_dir)
+        (libraries, library_dirs, runtime_library_dirs) = \
+            self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
+
+        # First examine a couple of options for things that aren't implemented yet
+        if not target_desc in (self.SHARED_LIBRARY, self.SHARED_OBJECT):
+            raise DistutilsPlatformError, 'Can only make SHARED_LIBRARY or SHARED_OBJECT targets on the Mac'
+        if runtime_library_dirs:
+            raise DistutilsPlatformError, 'Runtime library dirs not implemented yet'
+        if extra_preargs or extra_postargs:
+            raise DistutilsPlatformError, 'Runtime library dirs not implemented yet'
+        if len(export_symbols) != 1:
+            raise DistutilsPlatformError, 'Need exactly one export symbol'
+        # Next there are various things for which we need absolute pathnames.
+        # This is because we (usually) create the project in a subdirectory of
+        # where we are now, and keeping the paths relative is too much work right
+        # now.
+        sources = map(self._filename_to_abs, self.__sources)
+        include_dirs = map(self._filename_to_abs, self.__include_dirs)
+        if objects:
+            objects = map(self._filename_to_abs, objects)
+        else:
+            objects = []
+        if build_temp:
+            build_temp = self._filename_to_abs(build_temp)
+        else:
+            build_temp = os.curdir()
+        if output_dir:
+            output_filename = os.path.join(output_dir, output_filename)
+        # The output filename needs special handling: splitting it into dir and
+        # filename part. Actually I'm not sure this is really needed, but it
+        # can't hurt.
+        output_filename = self._filename_to_abs(output_filename)
+        output_dir, output_filename = os.path.split(output_filename)
+        # Now we need the short names of a couple of things for putting them
+        # into the project.
+        if output_filename[-8:] == '.ppc.slb':
+            basename = output_filename[:-8]
+        elif output_filename[-11:] == '.carbon.slb':
+            basename = output_filename[:-11]
+        else:
+            basename = os.path.strip(output_filename)[0]
+        projectname = basename + '.mcp'
+        targetname = basename
+        xmlname = basename + '.xml'
+        exportname = basename + '.mcp.exp'
+        prefixname = 'mwerks_%s_config.h'%basename
+        # Create the directories we need
+        distutils.dir_util.mkpath(build_temp, dry_run=self.dry_run)
+        distutils.dir_util.mkpath(output_dir, dry_run=self.dry_run)
+        # And on to filling in the parameters for the project builder
+        settings = {}
+        settings['mac_exportname'] = exportname
+        settings['mac_outputdir'] = output_dir
+        settings['mac_dllname'] = output_filename
+        settings['mac_targetname'] = targetname
+        settings['sysprefix'] = sys.prefix
+        settings['mac_sysprefixtype'] = 'Absolute'
+        sourcefilenames = []
+        sourcefiledirs = []
+        for filename in sources + objects:
+            dirname, filename = os.path.split(filename)
+            sourcefilenames.append(filename)
+            if not dirname in sourcefiledirs:
+                sourcefiledirs.append(dirname)
+        settings['sources'] = sourcefilenames
+        settings['libraries'] = libraries
+        settings['extrasearchdirs'] = sourcefiledirs + include_dirs + library_dirs
+        if self.dry_run:
+            print 'CALLING LINKER IN', os.getcwd()
+            for key, value in settings.items():
+                print '%20.20s %s'%(key, value)
+            return
+        # Build the export file
+        exportfilename = os.path.join(build_temp, exportname)
+        log.debug("\tCreate export file %s", exportfilename)
+        fp = open(exportfilename, 'w')
+        fp.write('%s\n'%export_symbols[0])
+        fp.close()
+        # Generate the prefix file, if needed, and put it in the settings
+        if self.__macros:
+            prefixfilename = os.path.join(os.getcwd(), os.path.join(build_temp, prefixname))
+            fp = open(prefixfilename, 'w')
+            fp.write('#include "mwerks_shcarbon_config.h"\n')
+            for name, value in self.__macros:
+                if value is None:
+                    fp.write('#define %s\n'%name)
+                else:
+                    fp.write('#define %s %s\n'%(name, value))
+            fp.close()
+            settings['prefixname'] = prefixname
+
+        # Build the XML file. We need the full pathname (only lateron, really)
+        # because we pass this pathname to CodeWarrior in an AppleEvent, and CW
+        # doesn't have a clue about our working directory.
+        xmlfilename = os.path.join(os.getcwd(), os.path.join(build_temp, xmlname))
+        log.debug("\tCreate XML file %s", xmlfilename)
+        xmlbuilder = mkcwproject.cwxmlgen.ProjectBuilder(settings)
+        xmlbuilder.generate()
+        xmldata = settings['tmp_projectxmldata']
+        fp = open(xmlfilename, 'w')
+        fp.write(xmldata)
+        fp.close()
+        # Generate the project. Again a full pathname.
+        projectfilename = os.path.join(os.getcwd(), os.path.join(build_temp, projectname))
+        log.debug('\tCreate project file %s', projectfilename)
+        mkcwproject.makeproject(xmlfilename, projectfilename)
+        # And build it
+        log.debug('\tBuild project')
+        mkcwproject.buildproject(projectfilename)
+
+    def _filename_to_abs(self, filename):
+        # Some filenames seem to be unix-like. Convert to Mac names.
+##        if '/' in filename and ':' in filename:
+##           raise DistutilsPlatformError, 'Filename may be Unix or Mac style: %s'%filename
+##        if '/' in filename:
+##           filename = macurl2path(filename)
+        filename = distutils.util.convert_path(filename)
+        if not os.path.isabs(filename):
+            curdir = os.getcwd()
+            filename = os.path.join(curdir, filename)
+        # Finally remove .. components
+        components = string.split(filename, ':')
+        for i in range(1, len(components)):
+            if components[i] == '..':
+                components[i] = ''
+        return string.join(components, ':')
+
+    def library_dir_option (self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for libraries.
+        """
+        return # XXXX Not correct...
+
+    def runtime_library_dir_option (self, dir):
+        """Return the compiler option to add 'dir' to the list of
+        directories searched for runtime libraries.
+        """
+        # Nothing needed or Mwerks/Mac.
+        return
+
+    def library_option (self, lib):
+        """Return the compiler option to add 'dir' to the list of libraries
+        linked into the shared library or executable.
+        """
+        return
+
+    def find_library_file (self, dirs, lib, debug=0):
+        """Search the specified list of directories for a static or shared
+        library file 'lib' and return the full path to that file.  If
+        'debug' true, look for a debugging version (if that makes sense on
+        the current platform).  Return None if 'lib' wasn't found in any of
+        the specified directories.
+        """
+        return 0
diff --git a/depot_tools/release/win/python_24/Lib/distutils/spawn.py b/depot_tools/release/win/python_24/Lib/distutils/spawn.py
new file mode 100644
index 0000000..c2d593bd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/spawn.py
@@ -0,0 +1,201 @@
+"""distutils.spawn
+
+Provides the 'spawn()' function, a front-end to various platform-
+specific functions for launching another program in a sub-process.
+Also provides the 'find_executable()' to search the path for a given
+executable name.
+"""
+
+# This module should be kept compatible with Python 2.1.
+
+__revision__ = "$Id: spawn.py,v 1.19 2004/11/10 22:23:14 loewis Exp $"
+
+import sys, os, string
+from distutils.errors import *
+from distutils import log
+
+def spawn (cmd,
+           search_path=1,
+           verbose=0,
+           dry_run=0):
+
+    """Run another program, specified as a command list 'cmd', in a new
+    process.  'cmd' is just the argument list for the new process, ie.
+    cmd[0] is the program to run and cmd[1:] are the rest of its arguments.
+    There is no way to run a program with a name different from that of its
+    executable.
+
+    If 'search_path' is true (the default), the system's executable
+    search path will be used to find the program; otherwise, cmd[0]
+    must be the exact path to the executable.  If 'dry_run' is true,
+    the command will not actually be run.
+
+    Raise DistutilsExecError if running the program fails in any way; just
+    return on success.
+    """
+    if os.name == 'posix':
+        _spawn_posix(cmd, search_path, dry_run=dry_run)
+    elif os.name == 'nt':
+        _spawn_nt(cmd, search_path, dry_run=dry_run)
+    elif os.name == 'os2':
+        _spawn_os2(cmd, search_path, dry_run=dry_run)
+    else:
+        raise DistutilsPlatformError, \
+              "don't know how to spawn programs on platform '%s'" % os.name
+
+# spawn ()
+
+
+def _nt_quote_args (args):
+    """Quote command-line arguments for DOS/Windows conventions: just
+    wraps every argument which contains blanks in double quotes, and
+    returns a new argument list.
+    """
+
+    # XXX this doesn't seem very robust to me -- but if the Windows guys
+    # say it'll work, I guess I'll have to accept it.  (What if an arg
+    # contains quotes?  What other magic characters, other than spaces,
+    # have to be escaped?  Is there an escaping mechanism other than
+    # quoting?)
+
+    for i in range(len(args)):
+        if string.find(args[i], ' ') != -1:
+            args[i] = '"%s"' % args[i]
+    return args
+
+def _spawn_nt (cmd,
+               search_path=1,
+               verbose=0,
+               dry_run=0):
+
+    executable = cmd[0]
+    cmd = _nt_quote_args(cmd)
+    if search_path:
+        # either we find one or it stays the same
+        executable = find_executable(executable) or executable
+    log.info(string.join([executable] + cmd[1:], ' '))
+    if not dry_run:
+        # spawn for NT requires a full path to the .exe
+        try:
+            rc = os.spawnv(os.P_WAIT, executable, cmd)
+        except OSError, exc:
+            # this seems to happen when the command isn't found
+            raise DistutilsExecError, \
+                  "command '%s' failed: %s" % (cmd[0], exc[-1])
+        if rc != 0:
+            # and this reflects the command running but failing
+            raise DistutilsExecError, \
+                  "command '%s' failed with exit status %d" % (cmd[0], rc)
+
+
+def _spawn_os2 (cmd,
+                search_path=1,
+                verbose=0,
+                dry_run=0):
+
+    executable = cmd[0]
+    #cmd = _nt_quote_args(cmd)
+    if search_path:
+        # either we find one or it stays the same
+        executable = find_executable(executable) or executable
+    log.info(string.join([executable] + cmd[1:], ' '))
+    if not dry_run:
+        # spawnv for OS/2 EMX requires a full path to the .exe
+        try:
+            rc = os.spawnv(os.P_WAIT, executable, cmd)
+        except OSError, exc:
+            # this seems to happen when the command isn't found
+            raise DistutilsExecError, \
+                  "command '%s' failed: %s" % (cmd[0], exc[-1])
+        if rc != 0:
+            # and this reflects the command running but failing
+            print "command '%s' failed with exit status %d" % (cmd[0], rc)
+            raise DistutilsExecError, \
+                  "command '%s' failed with exit status %d" % (cmd[0], rc)
+
+
+def _spawn_posix (cmd,
+                  search_path=1,
+                  verbose=0,
+                  dry_run=0):
+
+    log.info(string.join(cmd, ' '))
+    if dry_run:
+        return
+    exec_fn = search_path and os.execvp or os.execv
+
+    pid = os.fork()
+
+    if pid == 0:                        # in the child
+        try:
+            #print "cmd[0] =", cmd[0]
+            #print "cmd =", cmd
+            exec_fn(cmd[0], cmd)
+        except OSError, e:
+            sys.stderr.write("unable to execute %s: %s\n" %
+                             (cmd[0], e.strerror))
+            os._exit(1)
+
+        sys.stderr.write("unable to execute %s for unknown reasons" % cmd[0])
+        os._exit(1)
+
+
+    else:                               # in the parent
+        # Loop until the child either exits or is terminated by a signal
+        # (ie. keep waiting if it's merely stopped)
+        while 1:
+            try:
+                (pid, status) = os.waitpid(pid, 0)
+            except OSError, exc:
+                import errno
+                if exc.errno == errno.EINTR:
+                    continue
+                raise DistutilsExecError, \
+                      "command '%s' failed: %s" % (cmd[0], exc[-1])
+            if os.WIFSIGNALED(status):
+                raise DistutilsExecError, \
+                      "command '%s' terminated by signal %d" % \
+                      (cmd[0], os.WTERMSIG(status))
+
+            elif os.WIFEXITED(status):
+                exit_status = os.WEXITSTATUS(status)
+                if exit_status == 0:
+                    return              # hey, it succeeded!
+                else:
+                    raise DistutilsExecError, \
+                          "command '%s' failed with exit status %d" % \
+                          (cmd[0], exit_status)
+
+            elif os.WIFSTOPPED(status):
+                continue
+
+            else:
+                raise DistutilsExecError, \
+                      "unknown error executing '%s': termination status %d" % \
+                      (cmd[0], status)
+# _spawn_posix ()
+
+
+def find_executable(executable, path=None):
+    """Try to find 'executable' in the directories listed in 'path' (a
+    string listing directories separated by 'os.pathsep'; defaults to
+    os.environ['PATH']).  Returns the complete filename or None if not
+    found.
+    """
+    if path is None:
+        path = os.environ['PATH']
+    paths = string.split(path, os.pathsep)
+    (base, ext) = os.path.splitext(executable)
+    if (sys.platform == 'win32' or os.name == 'os2') and (ext != '.exe'):
+        executable = executable + '.exe'
+    if not os.path.isfile(executable):
+        for p in paths:
+            f = os.path.join(p, executable)
+            if os.path.isfile(f):
+                # the file exists, we have a shot at spawn working
+                return f
+        return None
+    else:
+        return executable
+
+# find_executable()
diff --git a/depot_tools/release/win/python_24/Lib/distutils/sysconfig.py b/depot_tools/release/win/python_24/Lib/distutils/sysconfig.py
new file mode 100644
index 0000000..d895095
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/sysconfig.py
@@ -0,0 +1,511 @@
+"""Provide access to Python's configuration information.  The specific
+configuration variables available depend heavily on the platform and
+configuration.  The values may be retrieved using
+get_config_var(name), and the list of variables is available via
+get_config_vars().keys().  Additional convenience functions are also
+available.
+
+Written by:   Fred L. Drake, Jr.
+Email:        <fdrake@acm.org>
+"""
+
+__revision__ = "$Id: sysconfig.py,v 1.61.2.1 2005/01/06 23:16:03 jackjansen Exp $"
+
+import os
+import re
+import string
+import sys
+
+from errors import DistutilsPlatformError
+
+# These are needed in a couple of spots, so just compute them once.
+PREFIX = os.path.normpath(sys.prefix)
+EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
+
+# python_build: (Boolean) if true, we're either building Python or
+# building an extension with an un-installed Python, so we use
+# different (hard-wired) directories.
+
+argv0_path = os.path.dirname(os.path.abspath(sys.executable))
+landmark = os.path.join(argv0_path, "Modules", "Setup")
+
+python_build = os.path.isfile(landmark)
+
+del argv0_path, landmark
+
+
+def get_python_version ():
+    """Return a string containing the major and minor Python version,
+    leaving off the patchlevel.  Sample return values could be '1.5'
+    or '2.2'.
+    """
+    return sys.version[:3]
+
+
+def get_python_inc(plat_specific=0, prefix=None):
+    """Return the directory containing installed Python header files.
+
+    If 'plat_specific' is false (the default), this is the path to the
+    non-platform-specific header files, i.e. Python.h and so on;
+    otherwise, this is the path to platform-specific header files
+    (namely pyconfig.h).
+
+    If 'prefix' is supplied, use it instead of sys.prefix or
+    sys.exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        prefix = plat_specific and EXEC_PREFIX or PREFIX
+    if os.name == "posix":
+        if python_build:
+            base = os.path.dirname(os.path.abspath(sys.executable))
+            if plat_specific:
+                inc_dir = base
+            else:
+                inc_dir = os.path.join(base, "Include")
+                if not os.path.exists(inc_dir):
+                    inc_dir = os.path.join(os.path.dirname(base), "Include")
+            return inc_dir
+        return os.path.join(prefix, "include", "python" + sys.version[:3])
+    elif os.name == "nt":
+        return os.path.join(prefix, "include")
+    elif os.name == "mac":
+        if plat_specific:
+            return os.path.join(prefix, "Mac", "Include")
+        else:
+            return os.path.join(prefix, "Include")
+    elif os.name == "os2":
+        return os.path.join(prefix, "Include")
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its C header files "
+            "on platform '%s'" % os.name)
+
+
+def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
+    """Return the directory containing the Python library (standard or
+    site additions).
+
+    If 'plat_specific' is true, return the directory containing
+    platform-specific modules, i.e. any module from a non-pure-Python
+    module distribution; otherwise, return the platform-shared library
+    directory.  If 'standard_lib' is true, return the directory
+    containing standard Python library modules; otherwise, return the
+    directory for site-specific modules.
+
+    If 'prefix' is supplied, use it instead of sys.prefix or
+    sys.exec_prefix -- i.e., ignore 'plat_specific'.
+    """
+    if prefix is None:
+        prefix = plat_specific and EXEC_PREFIX or PREFIX
+
+    if os.name == "posix":
+        libpython = os.path.join(prefix,
+                                 "lib", "python" + get_python_version())
+        if standard_lib:
+            return libpython
+        else:
+            return os.path.join(libpython, "site-packages")
+
+    elif os.name == "nt":
+        if standard_lib:
+            return os.path.join(prefix, "Lib")
+        else:
+            if sys.version < "2.2":
+                return prefix
+            else:
+                return os.path.join(PREFIX, "Lib", "site-packages")
+
+    elif os.name == "mac":
+        if plat_specific:
+            if standard_lib:
+                return os.path.join(prefix, "Lib", "lib-dynload")
+            else:
+                return os.path.join(prefix, "Lib", "site-packages")
+        else:
+            if standard_lib:
+                return os.path.join(prefix, "Lib")
+            else:
+                return os.path.join(prefix, "Lib", "site-packages")
+
+    elif os.name == "os2":
+        if standard_lib:
+            return os.path.join(PREFIX, "Lib")
+        else:
+            return os.path.join(PREFIX, "Lib", "site-packages")
+
+    else:
+        raise DistutilsPlatformError(
+            "I don't know where Python installs its library "
+            "on platform '%s'" % os.name)
+
+
+def customize_compiler(compiler):
+    """Do any platform-specific customization of a CCompiler instance.
+
+    Mainly needed on Unix, so we can plug in the information that
+    varies across Unices and is stored in Python's Makefile.
+    """
+    if compiler.compiler_type == "unix":
+        (cc, cxx, opt, basecflags, ccshared, ldshared, so_ext) = \
+            get_config_vars('CC', 'CXX', 'OPT', 'BASECFLAGS', 'CCSHARED', 'LDSHARED', 'SO')
+
+        if os.environ.has_key('CC'):
+            cc = os.environ['CC']
+        if os.environ.has_key('CXX'):
+            cxx = os.environ['CXX']
+        if os.environ.has_key('LDSHARED'):
+            ldshared = os.environ['LDSHARED']
+        if os.environ.has_key('CPP'):
+            cpp = os.environ['CPP']
+        else:
+            cpp = cc + " -E"           # not always
+        if os.environ.has_key('LDFLAGS'):
+            ldshared = ldshared + ' ' + os.environ['LDFLAGS']
+        if basecflags:
+            opt = basecflags + ' ' + opt
+        if os.environ.has_key('CFLAGS'):
+            opt = opt + ' ' + os.environ['CFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CFLAGS']
+        if os.environ.has_key('CPPFLAGS'):
+            cpp = cpp + ' ' + os.environ['CPPFLAGS']
+            opt = opt + ' ' + os.environ['CPPFLAGS']
+            ldshared = ldshared + ' ' + os.environ['CPPFLAGS']
+
+        cc_cmd = cc + ' ' + opt
+        compiler.set_executables(
+            preprocessor=cpp,
+            compiler=cc_cmd,
+            compiler_so=cc_cmd + ' ' + ccshared,
+            compiler_cxx=cxx,
+            linker_so=ldshared,
+            linker_exe=cc)
+
+        compiler.shared_lib_extension = so_ext
+
+
+def get_config_h_filename():
+    """Return full pathname of installed pyconfig.h file."""
+    if python_build:
+        inc_dir = os.curdir
+    else:
+        inc_dir = get_python_inc(plat_specific=1)
+    if sys.version < '2.2':
+        config_h = 'config.h'
+    else:
+        # The name of the config.h file changed in 2.2
+        config_h = 'pyconfig.h'
+    return os.path.join(inc_dir, config_h)
+
+
+def get_makefile_filename():
+    """Return full pathname of installed Makefile from the Python build."""
+    if python_build:
+        return os.path.join(os.path.dirname(sys.executable), "Makefile")
+    lib_dir = get_python_lib(plat_specific=1, standard_lib=1)
+    return os.path.join(lib_dir, "config", "Makefile")
+
+
+def parse_config_h(fp, g=None):
+    """Parse a config.h-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    if g is None:
+        g = {}
+    define_rx = re.compile("#define ([A-Z][A-Z0-9_]+) (.*)\n")
+    undef_rx = re.compile("/[*] #undef ([A-Z][A-Z0-9_]+) [*]/\n")
+    #
+    while 1:
+        line = fp.readline()
+        if not line:
+            break
+        m = define_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            try: v = int(v)
+            except ValueError: pass
+            g[n] = v
+        else:
+            m = undef_rx.match(line)
+            if m:
+                g[m.group(1)] = 0
+    return g
+
+
+# Regexes needed for parsing Makefile (and similar syntaxes,
+# like old-style Setup files).
+_variable_rx = re.compile("([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
+_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
+_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
+
+def parse_makefile(fn, g=None):
+    """Parse a Makefile-style file.
+
+    A dictionary containing name/value pairs is returned.  If an
+    optional dictionary is passed in as the second argument, it is
+    used instead of a new dictionary.
+    """
+    from distutils.text_file import TextFile
+    fp = TextFile(fn, strip_comments=1, skip_blanks=1, join_lines=1)
+
+    if g is None:
+        g = {}
+    done = {}
+    notdone = {}
+
+    while 1:
+        line = fp.readline()
+        if line is None:                # eof
+            break
+        m = _variable_rx.match(line)
+        if m:
+            n, v = m.group(1, 2)
+            v = string.strip(v)
+            if "$" in v:
+                notdone[n] = v
+            else:
+                try: v = int(v)
+                except ValueError: pass
+                done[n] = v
+
+    # do variable interpolation here
+    while notdone:
+        for name in notdone.keys():
+            value = notdone[name]
+            m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
+            if m:
+                n = m.group(1)
+                if done.has_key(n):
+                    after = value[m.end():]
+                    value = value[:m.start()] + str(done[n]) + after
+                    if "$" in after:
+                        notdone[name] = value
+                    else:
+                        try: value = int(value)
+                        except ValueError:
+                            done[name] = string.strip(value)
+                        else:
+                            done[name] = value
+                        del notdone[name]
+                elif notdone.has_key(n):
+                    # get it on a subsequent round
+                    pass
+                else:
+                    done[n] = ""
+                    after = value[m.end():]
+                    value = value[:m.start()] + after
+                    if "$" in after:
+                        notdone[name] = value
+                    else:
+                        try: value = int(value)
+                        except ValueError:
+                            done[name] = string.strip(value)
+                        else:
+                            done[name] = value
+                        del notdone[name]
+            else:
+                # bogus variable reference; just drop it since we can't deal
+                del notdone[name]
+
+    fp.close()
+
+    # save the results in the global dictionary
+    g.update(done)
+    return g
+
+
+def expand_makefile_vars(s, vars):
+    """Expand Makefile-style variables -- "${foo}" or "$(foo)" -- in
+    'string' according to 'vars' (a dictionary mapping variable names to
+    values).  Variables not present in 'vars' are silently expanded to the
+    empty string.  The variable values in 'vars' should not contain further
+    variable expansions; if 'vars' is the output of 'parse_makefile()',
+    you're fine.  Returns a variable-expanded version of 's'.
+    """
+
+    # This algorithm does multiple expansion, so if vars['foo'] contains
+    # "${bar}", it will expand ${foo} to ${bar}, and then expand
+    # ${bar}... and so forth.  This is fine as long as 'vars' comes from
+    # 'parse_makefile()', which takes care of such expansions eagerly,
+    # according to make's variable expansion semantics.
+
+    while 1:
+        m = _findvar1_rx.search(s) or _findvar2_rx.search(s)
+        if m:
+            (beg, end) = m.span()
+            s = s[0:beg] + vars.get(m.group(1)) + s[end:]
+        else:
+            break
+    return s
+
+
+_config_vars = None
+
+def _init_posix():
+    """Initialize the module as appropriate for POSIX systems."""
+    g = {}
+    # load the installed Makefile:
+    try:
+        filename = get_makefile_filename()
+        parse_makefile(filename, g)
+    except IOError, msg:
+        my_msg = "invalid Python installation: unable to open %s" % filename
+        if hasattr(msg, "strerror"):
+            my_msg = my_msg + " (%s)" % msg.strerror
+
+        raise DistutilsPlatformError(my_msg)
+
+    # On MacOSX we need to check the setting of the environment variable
+    # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
+    # it needs to be compatible.
+    # If it isn't set we set it to the configure-time value
+    if sys.platform == 'darwin' and g.has_key('CONFIGURE_MACOSX_DEPLOYMENT_TARGET'):
+        cfg_target = g['CONFIGURE_MACOSX_DEPLOYMENT_TARGET']
+        cur_target = os.getenv('MACOSX_DEPLOYMENT_TARGET', '')
+        if cur_target == '':
+            cur_target = cfg_target
+            os.putenv('MACOSX_DEPLOYMENT_TARGET', cfg_target)
+        if cfg_target != cur_target:
+            my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" during configure'
+                % (cur_target, cfg_target))
+            raise DistutilsPlatformError(my_msg)
+
+    # On AIX, there are wrong paths to the linker scripts in the Makefile
+    # -- these paths are relative to the Python source, but when installed
+    # the scripts are in another directory.
+    if python_build:
+        g['LDSHARED'] = g['BLDSHARED']
+
+    elif sys.version < '2.1':
+        # The following two branches are for 1.5.2 compatibility.
+        if sys.platform == 'aix4':          # what about AIX 3.x ?
+            # Linker script is in the config directory, not in Modules as the
+            # Makefile says.
+            python_lib = get_python_lib(standard_lib=1)
+            ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
+            python_exp = os.path.join(python_lib, 'config', 'python.exp')
+
+            g['LDSHARED'] = "%s %s -bI:%s" % (ld_so_aix, g['CC'], python_exp)
+
+        elif sys.platform == 'beos':
+            # Linker script is in the config directory.  In the Makefile it is
+            # relative to the srcdir, which after installation no longer makes
+            # sense.
+            python_lib = get_python_lib(standard_lib=1)
+            linkerscript_path = string.split(g['LDSHARED'])[0]
+            linkerscript_name = os.path.basename(linkerscript_path)
+            linkerscript = os.path.join(python_lib, 'config',
+                                        linkerscript_name)
+
+            # XXX this isn't the right place to do this: adding the Python
+            # library to the link, if needed, should be in the "build_ext"
+            # command.  (It's also needed for non-MS compilers on Windows, and
+            # it's taken care of for them by the 'build_ext.get_libraries()'
+            # method.)
+            g['LDSHARED'] = ("%s -L%s/lib -lpython%s" %
+                             (linkerscript, PREFIX, sys.version[0:3]))
+
+    global _config_vars
+    _config_vars = g
+
+
+def _init_nt():
+    """Initialize the module as appropriate for NT"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    g['SO'] = '.pyd'
+    g['EXE'] = ".exe"
+
+    global _config_vars
+    _config_vars = g
+
+
+def _init_mac():
+    """Initialize the module as appropriate for Macintosh systems"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    import MacOS
+    if not hasattr(MacOS, 'runtimemodel'):
+        g['SO'] = '.ppc.slb'
+    else:
+        g['SO'] = '.%s.slb' % MacOS.runtimemodel
+
+    # XXX are these used anywhere?
+    g['install_lib'] = os.path.join(EXEC_PREFIX, "Lib")
+    g['install_platlib'] = os.path.join(EXEC_PREFIX, "Mac", "Lib")
+
+    # These are used by the extension module build
+    g['srcdir'] = ':'
+    global _config_vars
+    _config_vars = g
+
+
+def _init_os2():
+    """Initialize the module as appropriate for OS/2"""
+    g = {}
+    # set basic install directories
+    g['LIBDEST'] = get_python_lib(plat_specific=0, standard_lib=1)
+    g['BINLIBDEST'] = get_python_lib(plat_specific=1, standard_lib=1)
+
+    # XXX hmmm.. a normal install puts include files here
+    g['INCLUDEPY'] = get_python_inc(plat_specific=0)
+
+    g['SO'] = '.pyd'
+    g['EXE'] = ".exe"
+
+    global _config_vars
+    _config_vars = g
+
+
+def get_config_vars(*args):
+    """With no arguments, return a dictionary of all configuration
+    variables relevant for the current platform.  Generally this includes
+    everything needed to build extensions and install both pure modules and
+    extensions.  On Unix, this means every variable defined in Python's
+    installed Makefile; on Windows and Mac OS it's a much smaller set.
+
+    With arguments, return a list of values that result from looking up
+    each argument in the configuration variable dictionary.
+    """
+    global _config_vars
+    if _config_vars is None:
+        func = globals().get("_init_" + os.name)
+        if func:
+            func()
+        else:
+            _config_vars = {}
+
+        # Normalized versions of prefix and exec_prefix are handy to have;
+        # in fact, these are the standard versions used most places in the
+        # Distutils.
+        _config_vars['prefix'] = PREFIX
+        _config_vars['exec_prefix'] = EXEC_PREFIX
+
+    if args:
+        vals = []
+        for name in args:
+            vals.append(_config_vars.get(name))
+        return vals
+    else:
+        return _config_vars
+
+def get_config_var(name):
+    """Return the value of a single variable using the dictionary
+    returned by 'get_config_vars()'.  Equivalent to
+    get_config_vars().get(name)
+    """
+    return get_config_vars().get(name)
diff --git a/depot_tools/release/win/python_24/Lib/distutils/text_file.py b/depot_tools/release/win/python_24/Lib/distutils/text_file.py
new file mode 100644
index 0000000..47c78d6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/text_file.py
@@ -0,0 +1,382 @@
+"""text_file
+
+provides the TextFile class, which gives an interface to text files
+that (optionally) takes care of stripping comments, ignoring blank
+lines, and joining lines with backslashes."""
+
+__revision__ = "$Id: text_file.py,v 1.15 2002/11/14 02:25:41 akuchling Exp $"
+
+from types import *
+import sys, os, string
+
+
+class TextFile:
+
+    """Provides a file-like object that takes care of all the things you
+       commonly want to do when processing a text file that has some
+       line-by-line syntax: strip comments (as long as "#" is your
+       comment character), skip blank lines, join adjacent lines by
+       escaping the newline (ie. backslash at end of line), strip
+       leading and/or trailing whitespace.  All of these are optional
+       and independently controllable.
+
+       Provides a 'warn()' method so you can generate warning messages that
+       report physical line number, even if the logical line in question
+       spans multiple physical lines.  Also provides 'unreadline()' for
+       implementing line-at-a-time lookahead.
+
+       Constructor is called as:
+
+           TextFile (filename=None, file=None, **options)
+
+       It bombs (RuntimeError) if both 'filename' and 'file' are None;
+       'filename' should be a string, and 'file' a file object (or
+       something that provides 'readline()' and 'close()' methods).  It is
+       recommended that you supply at least 'filename', so that TextFile
+       can include it in warning messages.  If 'file' is not supplied,
+       TextFile creates its own using the 'open()' builtin.
+
+       The options are all boolean, and affect the value returned by
+       'readline()':
+         strip_comments [default: true]
+           strip from "#" to end-of-line, as well as any whitespace
+           leading up to the "#" -- unless it is escaped by a backslash
+         lstrip_ws [default: false]
+           strip leading whitespace from each line before returning it
+         rstrip_ws [default: true]
+           strip trailing whitespace (including line terminator!) from
+           each line before returning it
+         skip_blanks [default: true}
+           skip lines that are empty *after* stripping comments and
+           whitespace.  (If both lstrip_ws and rstrip_ws are false,
+           then some lines may consist of solely whitespace: these will
+           *not* be skipped, even if 'skip_blanks' is true.)
+         join_lines [default: false]
+           if a backslash is the last non-newline character on a line
+           after stripping comments and whitespace, join the following line
+           to it to form one "logical line"; if N consecutive lines end
+           with a backslash, then N+1 physical lines will be joined to
+           form one logical line.
+         collapse_join [default: false]
+           strip leading whitespace from lines that are joined to their
+           predecessor; only matters if (join_lines and not lstrip_ws)
+
+       Note that since 'rstrip_ws' can strip the trailing newline, the
+       semantics of 'readline()' must differ from those of the builtin file
+       object's 'readline()' method!  In particular, 'readline()' returns
+       None for end-of-file: an empty string might just be a blank line (or
+       an all-whitespace line), if 'rstrip_ws' is true but 'skip_blanks' is
+       not."""
+
+    default_options = { 'strip_comments': 1,
+                        'skip_blanks':    1,
+                        'lstrip_ws':      0,
+                        'rstrip_ws':      1,
+                        'join_lines':     0,
+                        'collapse_join':  0,
+                      }
+
+    def __init__ (self, filename=None, file=None, **options):
+        """Construct a new TextFile object.  At least one of 'filename'
+           (a string) and 'file' (a file-like object) must be supplied.
+           They keyword argument options are described above and affect
+           the values returned by 'readline()'."""
+
+        if filename is None and file is None:
+            raise RuntimeError, \
+                  "you must supply either or both of 'filename' and 'file'"
+
+        # set values for all options -- either from client option hash
+        # or fallback to default_options
+        for opt in self.default_options.keys():
+            if options.has_key (opt):
+                setattr (self, opt, options[opt])
+
+            else:
+                setattr (self, opt, self.default_options[opt])
+
+        # sanity check client option hash
+        for opt in options.keys():
+            if not self.default_options.has_key (opt):
+                raise KeyError, "invalid TextFile option '%s'" % opt
+
+        if file is None:
+            self.open (filename)
+        else:
+            self.filename = filename
+            self.file = file
+            self.current_line = 0       # assuming that file is at BOF!
+
+        # 'linebuf' is a stack of lines that will be emptied before we
+        # actually read from the file; it's only populated by an
+        # 'unreadline()' operation
+        self.linebuf = []
+
+
+    def open (self, filename):
+        """Open a new file named 'filename'.  This overrides both the
+           'filename' and 'file' arguments to the constructor."""
+
+        self.filename = filename
+        self.file = open (self.filename, 'r')
+        self.current_line = 0
+
+
+    def close (self):
+        """Close the current file and forget everything we know about it
+           (filename, current line number)."""
+
+        self.file.close ()
+        self.file = None
+        self.filename = None
+        self.current_line = None
+
+
+    def gen_error (self, msg, line=None):
+        outmsg = []
+        if line is None:
+            line = self.current_line
+        outmsg.append(self.filename + ", ")
+        if type (line) in (ListType, TupleType):
+            outmsg.append("lines %d-%d: " % tuple (line))
+        else:
+            outmsg.append("line %d: " % line)
+        outmsg.append(str(msg))
+        return string.join(outmsg, "")
+
+
+    def error (self, msg, line=None):
+        raise ValueError, "error: " + self.gen_error(msg, line)
+
+    def warn (self, msg, line=None):
+        """Print (to stderr) a warning message tied to the current logical
+           line in the current file.  If the current logical line in the
+           file spans multiple physical lines, the warning refers to the
+           whole range, eg. "lines 3-5".  If 'line' supplied, it overrides
+           the current line number; it may be a list or tuple to indicate a
+           range of physical lines, or an integer for a single physical
+           line."""
+        sys.stderr.write("warning: " + self.gen_error(msg, line) + "\n")
+
+
+    def readline (self):
+        """Read and return a single logical line from the current file (or
+           from an internal buffer if lines have previously been "unread"
+           with 'unreadline()').  If the 'join_lines' option is true, this
+           may involve reading multiple physical lines concatenated into a
+           single string.  Updates the current line number, so calling
+           'warn()' after 'readline()' emits a warning about the physical
+           line(s) just read.  Returns None on end-of-file, since the empty
+           string can occur if 'rstrip_ws' is true but 'strip_blanks' is
+           not."""
+
+        # If any "unread" lines waiting in 'linebuf', return the top
+        # one.  (We don't actually buffer read-ahead data -- lines only
+        # get put in 'linebuf' if the client explicitly does an
+        # 'unreadline()'.
+        if self.linebuf:
+            line = self.linebuf[-1]
+            del self.linebuf[-1]
+            return line
+
+        buildup_line = ''
+
+        while 1:
+            # read the line, make it None if EOF
+            line = self.file.readline()
+            if line == '': line = None
+
+            if self.strip_comments and line:
+
+                # Look for the first "#" in the line.  If none, never
+                # mind.  If we find one and it's the first character, or
+                # is not preceded by "\", then it starts a comment --
+                # strip the comment, strip whitespace before it, and
+                # carry on.  Otherwise, it's just an escaped "#", so
+                # unescape it (and any other escaped "#"'s that might be
+                # lurking in there) and otherwise leave the line alone.
+
+                pos = string.find (line, "#")
+                if pos == -1:           # no "#" -- no comments
+                    pass
+
+                # It's definitely a comment -- either "#" is the first
+                # character, or it's elsewhere and unescaped.
+                elif pos == 0 or line[pos-1] != "\\":
+                    # Have to preserve the trailing newline, because it's
+                    # the job of a later step (rstrip_ws) to remove it --
+                    # and if rstrip_ws is false, we'd better preserve it!
+                    # (NB. this means that if the final line is all comment
+                    # and has no trailing newline, we will think that it's
+                    # EOF; I think that's OK.)
+                    eol = (line[-1] == '\n') and '\n' or ''
+                    line = line[0:pos] + eol
+
+                    # If all that's left is whitespace, then skip line
+                    # *now*, before we try to join it to 'buildup_line' --
+                    # that way constructs like
+                    #   hello \\
+                    #   # comment that should be ignored
+                    #   there
+                    # result in "hello there".
+                    if string.strip(line) == "":
+                        continue
+
+                else:                   # it's an escaped "#"
+                    line = string.replace (line, "\\#", "#")
+
+
+            # did previous line end with a backslash? then accumulate
+            if self.join_lines and buildup_line:
+                # oops: end of file
+                if line is None:
+                    self.warn ("continuation line immediately precedes "
+                               "end-of-file")
+                    return buildup_line
+
+                if self.collapse_join:
+                    line = string.lstrip (line)
+                line = buildup_line + line
+
+                # careful: pay attention to line number when incrementing it
+                if type (self.current_line) is ListType:
+                    self.current_line[1] = self.current_line[1] + 1
+                else:
+                    self.current_line = [self.current_line,
+                                         self.current_line+1]
+            # just an ordinary line, read it as usual
+            else:
+                if line is None:        # eof
+                    return None
+
+                # still have to be careful about incrementing the line number!
+                if type (self.current_line) is ListType:
+                    self.current_line = self.current_line[1] + 1
+                else:
+                    self.current_line = self.current_line + 1
+
+
+            # strip whitespace however the client wants (leading and
+            # trailing, or one or the other, or neither)
+            if self.lstrip_ws and self.rstrip_ws:
+                line = string.strip (line)
+            elif self.lstrip_ws:
+                line = string.lstrip (line)
+            elif self.rstrip_ws:
+                line = string.rstrip (line)
+
+            # blank line (whether we rstrip'ed or not)? skip to next line
+            # if appropriate
+            if (line == '' or line == '\n') and self.skip_blanks:
+                continue
+
+            if self.join_lines:
+                if line[-1] == '\\':
+                    buildup_line = line[:-1]
+                    continue
+
+                if line[-2:] == '\\\n':
+                    buildup_line = line[0:-2] + '\n'
+                    continue
+
+            # well, I guess there's some actual content there: return it
+            return line
+
+    # readline ()
+
+
+    def readlines (self):
+        """Read and return the list of all logical lines remaining in the
+           current file."""
+
+        lines = []
+        while 1:
+            line = self.readline()
+            if line is None:
+                return lines
+            lines.append (line)
+
+
+    def unreadline (self, line):
+        """Push 'line' (a string) onto an internal buffer that will be
+           checked by future 'readline()' calls.  Handy for implementing
+           a parser with line-at-a-time lookahead."""
+
+        self.linebuf.append (line)
+
+
+if __name__ == "__main__":
+    test_data = """# test file
+
+line 3 \\
+# intervening comment
+  continues on next line
+"""
+    # result 1: no fancy options
+    result1 = map (lambda x: x + "\n", string.split (test_data, "\n")[0:-1])
+
+    # result 2: just strip comments
+    result2 = ["\n",
+               "line 3 \\\n",
+               "  continues on next line\n"]
+
+    # result 3: just strip blank lines
+    result3 = ["# test file\n",
+               "line 3 \\\n",
+               "# intervening comment\n",
+               "  continues on next line\n"]
+
+    # result 4: default, strip comments, blank lines, and trailing whitespace
+    result4 = ["line 3 \\",
+               "  continues on next line"]
+
+    # result 5: strip comments and blanks, plus join lines (but don't
+    # "collapse" joined lines
+    result5 = ["line 3   continues on next line"]
+
+    # result 6: strip comments and blanks, plus join lines (and
+    # "collapse" joined lines
+    result6 = ["line 3 continues on next line"]
+
+    def test_input (count, description, file, expected_result):
+        result = file.readlines ()
+        # result = string.join (result, '')
+        if result == expected_result:
+            print "ok %d (%s)" % (count, description)
+        else:
+            print "not ok %d (%s):" % (count, description)
+            print "** expected:"
+            print expected_result
+            print "** received:"
+            print result
+
+
+    filename = "test.txt"
+    out_file = open (filename, "w")
+    out_file.write (test_data)
+    out_file.close ()
+
+    in_file = TextFile (filename, strip_comments=0, skip_blanks=0,
+                        lstrip_ws=0, rstrip_ws=0)
+    test_input (1, "no processing", in_file, result1)
+
+    in_file = TextFile (filename, strip_comments=1, skip_blanks=0,
+                        lstrip_ws=0, rstrip_ws=0)
+    test_input (2, "strip comments", in_file, result2)
+
+    in_file = TextFile (filename, strip_comments=0, skip_blanks=1,
+                        lstrip_ws=0, rstrip_ws=0)
+    test_input (3, "strip blanks", in_file, result3)
+
+    in_file = TextFile (filename)
+    test_input (4, "default processing", in_file, result4)
+
+    in_file = TextFile (filename, strip_comments=1, skip_blanks=1,
+                        join_lines=1, rstrip_ws=1)
+    test_input (5, "join lines without collapsing", in_file, result5)
+
+    in_file = TextFile (filename, strip_comments=1, skip_blanks=1,
+                        join_lines=1, rstrip_ws=1, collapse_join=1)
+    test_input (6, "join lines with collapsing", in_file, result6)
+
+    os.remove (filename)
diff --git a/depot_tools/release/win/python_24/Lib/distutils/unixccompiler.py b/depot_tools/release/win/python_24/Lib/distutils/unixccompiler.py
new file mode 100644
index 0000000..9c47550
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/unixccompiler.py
@@ -0,0 +1,239 @@
+"""distutils.unixccompiler
+
+Contains the UnixCCompiler class, a subclass of CCompiler that handles
+the "typical" Unix-style command-line C compiler:
+  * macros defined with -Dname[=value]
+  * macros undefined with -Uname
+  * include search directories specified with -Idir
+  * libraries specified with -lllib
+  * library search directories specified with -Ldir
+  * compile handled by 'cc' (or similar) executable with -c option:
+    compiles .c to .o
+  * link static library handled by 'ar' command (possibly with 'ranlib')
+  * link shared library handled by 'cc -shared'
+"""
+
+__revision__ = "$Id: unixccompiler.py,v 1.56 2004/08/29 16:40:55 loewis Exp $"
+
+import os, sys
+from types import StringType, NoneType
+from copy import copy
+
+from distutils import sysconfig
+from distutils.dep_util import newer
+from distutils.ccompiler import \
+     CCompiler, gen_preprocess_options, gen_lib_options
+from distutils.errors import \
+     DistutilsExecError, CompileError, LibError, LinkError
+from distutils import log
+
+# XXX Things not currently handled:
+#   * optimization/debug/warning flags; we just use whatever's in Python's
+#     Makefile and live with it.  Is this adequate?  If not, we might
+#     have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
+#     SunCCompiler, and I suspect down that road lies madness.
+#   * even if we don't know a warning flag from an optimization flag,
+#     we need some way for outsiders to feed preprocessor/compiler/linker
+#     flags in to us -- eg. a sysadmin might want to mandate certain flags
+#     via a site config file, or a user might want to set something for
+#     compiling this module distribution only via the setup.py command
+#     line, whatever.  As long as these options come from something on the
+#     current system, they can be as system-dependent as they like, and we
+#     should just happily stuff them into the preprocessor/compiler/linker
+#     options and carry on.
+
+class UnixCCompiler(CCompiler):
+
+    compiler_type = 'unix'
+
+    # These are used by CCompiler in two places: the constructor sets
+    # instance attributes 'preprocessor', 'compiler', etc. from them, and
+    # 'set_executable()' allows any of these to be set.  The defaults here
+    # are pretty generic; they will probably have to be set by an outsider
+    # (eg. using information discovered by the sysconfig about building
+    # Python extensions).
+    executables = {'preprocessor' : None,
+                   'compiler'     : ["cc"],
+                   'compiler_so'  : ["cc"],
+                   'compiler_cxx' : ["cc"],
+                   'linker_so'    : ["cc", "-shared"],
+                   'linker_exe'   : ["cc"],
+                   'archiver'     : ["ar", "-cr"],
+                   'ranlib'       : None,
+                  }
+
+    if sys.platform[:6] == "darwin":
+        executables['ranlib'] = ["ranlib"]
+
+    # Needed for the filename generation methods provided by the base
+    # class, CCompiler.  NB. whoever instantiates/uses a particular
+    # UnixCCompiler instance should set 'shared_lib_ext' -- we set a
+    # reasonable common default here, but it's not necessarily used on all
+    # Unices!
+
+    src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
+    obj_extension = ".o"
+    static_lib_extension = ".a"
+    shared_lib_extension = ".so"
+    dylib_lib_extension = ".dylib"
+    static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
+    if sys.platform == "cygwin":
+        exe_extension = ".exe"
+
+    def preprocess(self, source,
+                   output_file=None, macros=None, include_dirs=None,
+                   extra_preargs=None, extra_postargs=None):
+        ignore, macros, include_dirs = \
+            self._fix_compile_args(None, macros, include_dirs)
+        pp_opts = gen_preprocess_options(macros, include_dirs)
+        pp_args = self.preprocessor + pp_opts
+        if output_file:
+            pp_args.extend(['-o', output_file])
+        if extra_preargs:
+            pp_args[:0] = extra_preargs
+        if extra_postargs:
+            pp_args.extend(extra_postargs)
+        pp_args.append(source)
+
+        # We need to preprocess: either we're being forced to, or we're
+        # generating output to stdout, or there's a target output file and
+        # the source file is newer than the target (or the target doesn't
+        # exist).
+        if self.force or output_file is None or newer(source, output_file):
+            if output_file:
+                self.mkpath(os.path.dirname(output_file))
+            try:
+                self.spawn(pp_args)
+            except DistutilsExecError, msg:
+                raise CompileError, msg
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        try:
+            self.spawn(self.compiler_so + cc_args + [src, '-o', obj] +
+                       extra_postargs)
+        except DistutilsExecError, msg:
+            raise CompileError, msg
+
+    def create_static_lib(self, objects, output_libname,
+                          output_dir=None, debug=0, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+
+        output_filename = \
+            self.library_filename(output_libname, output_dir=output_dir)
+
+        if self._need_link(objects, output_filename):
+            self.mkpath(os.path.dirname(output_filename))
+            self.spawn(self.archiver +
+                       [output_filename] +
+                       objects + self.objects)
+
+            # Not many Unices required ranlib anymore -- SunOS 4.x is, I
+            # think the only major Unix that does.  Maybe we need some
+            # platform intelligence here to skip ranlib if it's not
+            # needed -- or maybe Python's configure script took care of
+            # it for us, hence the check for leading colon.
+            if self.ranlib:
+                try:
+                    self.spawn(self.ranlib + [output_filename])
+                except DistutilsExecError, msg:
+                    raise LibError, msg
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    def link(self, target_desc, objects,
+             output_filename, output_dir=None, libraries=None,
+             library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=0, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+                                   libraries)
+        if type(output_dir) not in (StringType, NoneType):
+            raise TypeError, "'output_dir' must be a string or None"
+        if output_dir is not None:
+            output_filename = os.path.join(output_dir, output_filename)
+
+        if self._need_link(objects, output_filename):
+            ld_args = (objects + self.objects +
+                       lib_opts + ['-o', output_filename])
+            if debug:
+                ld_args[:0] = ['-g']
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+            self.mkpath(os.path.dirname(output_filename))
+            try:
+                if target_desc == CCompiler.EXECUTABLE:
+                    linker = self.linker_exe[:]
+                else:
+                    linker = self.linker_so[:]
+                if target_lang == "c++" and self.compiler_cxx:
+                    linker[0] = self.compiler_cxx[0]
+                self.spawn(linker + ld_args)
+            except DistutilsExecError, msg:
+                raise LinkError, msg
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    # -- Miscellaneous methods -----------------------------------------
+    # These are all used by the 'gen_lib_options() function, in
+    # ccompiler.py.
+
+    def library_dir_option(self, dir):
+        return "-L" + dir
+
+    def runtime_library_dir_option(self, dir):
+        # XXX Hackish, at the very least.  See Python bug #445902:
+        # http://sourceforge.net/tracker/index.php
+        #   ?func=detail&aid=445902&group_id=5470&atid=105470
+        # Linkers on different platforms need different options to
+        # specify that directories need to be added to the list of
+        # directories searched for dependencies when a dynamic library
+        # is sought.  GCC has to be told to pass the -R option through
+        # to the linker, whereas other compilers just know this.
+        # Other compilers may need something slightly different.  At
+        # this time, there's no way to determine this information from
+        # the configuration data stored in the Python installation, so
+        # we use this hack.
+        compiler = os.path.basename(sysconfig.get_config_var("CC"))
+        if sys.platform[:6] == "darwin":
+            # MacOSX's linker doesn't understand the -R flag at all
+            return "-L" + dir
+        elif sys.platform[:5] == "hp-ux":
+            return "+s -L" + dir
+        elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
+            return ["-rpath", dir]
+        elif compiler[:3] == "gcc" or compiler[:3] == "g++":
+            return "-Wl,-R" + dir
+        else:
+            return "-R" + dir
+
+    def library_option(self, lib):
+        return "-l" + lib
+
+    def find_library_file(self, dirs, lib, debug=0):
+        shared_f = self.library_filename(lib, lib_type='shared')
+        dylib_f = self.library_filename(lib, lib_type='dylib')
+        static_f = self.library_filename(lib, lib_type='static')
+
+        for dir in dirs:
+            shared = os.path.join(dir, shared_f)
+            dylib = os.path.join(dir, dylib_f)
+            static = os.path.join(dir, static_f)
+            # We're second-guessing the linker here, with not much hard
+            # data to go on: GCC seems to prefer the shared library, so I'm
+            # assuming that *all* Unix C compilers do.  And of course I'm
+            # ignoring even GCC's "-static" option.  So sue me.
+            if os.path.exists(dylib):
+                return dylib
+            elif os.path.exists(shared):
+                return shared
+            elif os.path.exists(static):
+                return static
+
+        # Oops, didn't find it in *any* of 'dirs'
+        return None
diff --git a/depot_tools/release/win/python_24/Lib/distutils/util.py b/depot_tools/release/win/python_24/Lib/distutils/util.py
new file mode 100644
index 0000000..809e693
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/util.py
@@ -0,0 +1,464 @@
+"""distutils.util
+
+Miscellaneous utility functions -- anything that doesn't fit into
+one of the other *util.py modules.
+"""
+
+__revision__ = "$Id: util.py,v 1.76 2004/07/18 06:14:42 tim_one Exp $"
+
+import sys, os, string, re
+from distutils.errors import DistutilsPlatformError
+from distutils.dep_util import newer
+from distutils.spawn import spawn
+from distutils import log
+
+def get_platform ():
+    """Return a string that identifies the current platform.  This is used
+    mainly to distinguish platform-specific build directories and
+    platform-specific built distributions.  Typically includes the OS name
+    and version and the architecture (as supplied by 'os.uname()'),
+    although the exact information included depends on the OS; eg. for IRIX
+    the architecture isn't particularly important (IRIX only runs on SGI
+    hardware), but for Linux the kernel version isn't particularly
+    important.
+
+    Examples of returned values:
+       linux-i586
+       linux-alpha (?)
+       solaris-2.6-sun4u
+       irix-5.3
+       irix64-6.2
+
+    For non-POSIX platforms, currently just returns 'sys.platform'.
+    """
+    if os.name != "posix" or not hasattr(os, 'uname'):
+        # XXX what about the architecture? NT is Intel or Alpha,
+        # Mac OS is M68k or PPC, etc.
+        return sys.platform
+
+    # Try to distinguish various flavours of Unix
+
+    (osname, host, release, version, machine) = os.uname()
+
+    # Convert the OS name to lowercase, remove '/' characters
+    # (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
+    osname = string.lower(osname)
+    osname = string.replace(osname, '/', '')
+    machine = string.replace(machine, ' ', '_')
+
+    if osname[:5] == "linux":
+        # At least on Linux/Intel, 'machine' is the processor --
+        # i386, etc.
+        # XXX what about Alpha, SPARC, etc?
+        return  "%s-%s" % (osname, machine)
+    elif osname[:5] == "sunos":
+        if release[0] >= "5":           # SunOS 5 == Solaris 2
+            osname = "solaris"
+            release = "%d.%s" % (int(release[0]) - 3, release[2:])
+        # fall through to standard osname-release-machine representation
+    elif osname[:4] == "irix":              # could be "irix64"!
+        return "%s-%s" % (osname, release)
+    elif osname[:3] == "aix":
+        return "%s-%s.%s" % (osname, version, release)
+    elif osname[:6] == "cygwin":
+        osname = "cygwin"
+        rel_re = re.compile (r'[\d.]+')
+        m = rel_re.match(release)
+        if m:
+            release = m.group()
+
+    return "%s-%s-%s" % (osname, release, machine)
+
+# get_platform ()
+
+
+def convert_path (pathname):
+    """Return 'pathname' as a name that will work on the native filesystem,
+    i.e. split it on '/' and put it back together again using the current
+    directory separator.  Needed because filenames in the setup script are
+    always supplied in Unix style, and have to be converted to the local
+    convention before we can actually use them in the filesystem.  Raises
+    ValueError on non-Unix-ish systems if 'pathname' either starts or
+    ends with a slash.
+    """
+    if os.sep == '/':
+        return pathname
+    if not pathname:
+        return pathname
+    if pathname[0] == '/':
+        raise ValueError, "path '%s' cannot be absolute" % pathname
+    if pathname[-1] == '/':
+        raise ValueError, "path '%s' cannot end with '/'" % pathname
+
+    paths = string.split(pathname, '/')
+    while '.' in paths:
+        paths.remove('.')
+    if not paths:
+        return os.curdir
+    return apply(os.path.join, paths)
+
+# convert_path ()
+
+
+def change_root (new_root, pathname):
+    """Return 'pathname' with 'new_root' prepended.  If 'pathname' is
+    relative, this is equivalent to "os.path.join(new_root,pathname)".
+    Otherwise, it requires making 'pathname' relative and then joining the
+    two, which is tricky on DOS/Windows and Mac OS.
+    """
+    if os.name == 'posix':
+        if not os.path.isabs(pathname):
+            return os.path.join(new_root, pathname)
+        else:
+            return os.path.join(new_root, pathname[1:])
+
+    elif os.name == 'nt':
+        (drive, path) = os.path.splitdrive(pathname)
+        if path[0] == '\\':
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    elif os.name == 'os2':
+        (drive, path) = os.path.splitdrive(pathname)
+        if path[0] == os.sep:
+            path = path[1:]
+        return os.path.join(new_root, path)
+
+    elif os.name == 'mac':
+        if not os.path.isabs(pathname):
+            return os.path.join(new_root, pathname)
+        else:
+            # Chop off volume name from start of path
+            elements = string.split(pathname, ":", 1)
+            pathname = ":" + elements[1]
+            return os.path.join(new_root, pathname)
+
+    else:
+        raise DistutilsPlatformError, \
+              "nothing known about platform '%s'" % os.name
+
+
+_environ_checked = 0
+def check_environ ():
+    """Ensure that 'os.environ' has all the environment variables we
+    guarantee that users can use in config files, command-line options,
+    etc.  Currently this includes:
+      HOME - user's home directory (Unix only)
+      PLAT - description of the current platform, including hardware
+             and OS (see 'get_platform()')
+    """
+    global _environ_checked
+    if _environ_checked:
+        return
+
+    if os.name == 'posix' and not os.environ.has_key('HOME'):
+        import pwd
+        os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
+
+    if not os.environ.has_key('PLAT'):
+        os.environ['PLAT'] = get_platform()
+
+    _environ_checked = 1
+
+
+def subst_vars (s, local_vars):
+    """Perform shell/Perl-style variable substitution on 'string'.  Every
+    occurrence of '$' followed by a name is considered a variable, and
+    variable is substituted by the value found in the 'local_vars'
+    dictionary, or in 'os.environ' if it's not in 'local_vars'.
+    'os.environ' is first checked/augmented to guarantee that it contains
+    certain values: see 'check_environ()'.  Raise ValueError for any
+    variables not found in either 'local_vars' or 'os.environ'.
+    """
+    check_environ()
+    def _subst (match, local_vars=local_vars):
+        var_name = match.group(1)
+        if local_vars.has_key(var_name):
+            return str(local_vars[var_name])
+        else:
+            return os.environ[var_name]
+
+    try:
+        return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
+    except KeyError, var:
+        raise ValueError, "invalid variable '$%s'" % var
+
+# subst_vars ()
+
+
+def grok_environment_error (exc, prefix="error: "):
+    """Generate a useful error message from an EnvironmentError (IOError or
+    OSError) exception object.  Handles Python 1.5.1 and 1.5.2 styles, and
+    does what it can to deal with exception objects that don't have a
+    filename (which happens when the error is due to a two-file operation,
+    such as 'rename()' or 'link()'.  Returns the error message as a string
+    prefixed with 'prefix'.
+    """
+    # check for Python 1.5.2-style {IO,OS}Error exception objects
+    if hasattr(exc, 'filename') and hasattr(exc, 'strerror'):
+        if exc.filename:
+            error = prefix + "%s: %s" % (exc.filename, exc.strerror)
+        else:
+            # two-argument functions in posix module don't
+            # include the filename in the exception object!
+            error = prefix + "%s" % exc.strerror
+    else:
+        error = prefix + str(exc[-1])
+
+    return error
+
+
+# Needed by 'split_quoted()'
+_wordchars_re = _squote_re = _dquote_re = None
+def _init_regex():
+    global _wordchars_re, _squote_re, _dquote_re
+    _wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
+    _squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
+    _dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
+
+def split_quoted (s):
+    """Split a string up according to Unix shell-like rules for quotes and
+    backslashes.  In short: words are delimited by spaces, as long as those
+    spaces are not escaped by a backslash, or inside a quoted string.
+    Single and double quotes are equivalent, and the quote characters can
+    be backslash-escaped.  The backslash is stripped from any two-character
+    escape sequence, leaving only the escaped character.  The quote
+    characters are stripped from any quoted string.  Returns a list of
+    words.
+    """
+
+    # This is a nice algorithm for splitting up a single string, since it
+    # doesn't require character-by-character examination.  It was a little
+    # bit of a brain-bender to get it working right, though...
+    if _wordchars_re is None: _init_regex()
+
+    s = string.strip(s)
+    words = []
+    pos = 0
+
+    while s:
+        m = _wordchars_re.match(s, pos)
+        end = m.end()
+        if end == len(s):
+            words.append(s[:end])
+            break
+
+        if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
+            words.append(s[:end])       # we definitely have a word delimiter
+            s = string.lstrip(s[end:])
+            pos = 0
+
+        elif s[end] == '\\':            # preserve whatever is being escaped;
+                                        # will become part of the current word
+            s = s[:end] + s[end+1:]
+            pos = end+1
+
+        else:
+            if s[end] == "'":           # slurp singly-quoted string
+                m = _squote_re.match(s, end)
+            elif s[end] == '"':         # slurp doubly-quoted string
+                m = _dquote_re.match(s, end)
+            else:
+                raise RuntimeError, \
+                      "this can't happen (bad char '%c')" % s[end]
+
+            if m is None:
+                raise ValueError, \
+                      "bad string (mismatched %s quotes?)" % s[end]
+
+            (beg, end) = m.span()
+            s = s[:beg] + s[beg+1:end-1] + s[end:]
+            pos = m.end() - 2
+
+        if pos >= len(s):
+            words.append(s)
+            break
+
+    return words
+
+# split_quoted ()
+
+
+def execute (func, args, msg=None, verbose=0, dry_run=0):
+    """Perform some action that affects the outside world (eg.  by
+    writing to the filesystem).  Such actions are special because they
+    are disabled by the 'dry_run' flag.  This method takes care of all
+    that bureaucracy for you; all you have to do is supply the
+    function to call and an argument tuple for it (to embody the
+    "external action" being performed), and an optional message to
+    print.
+    """
+    if msg is None:
+        msg = "%s%r" % (func.__name__, args)
+        if msg[-2:] == ',)':        # correct for singleton tuple
+            msg = msg[0:-2] + ')'
+
+    log.info(msg)
+    if not dry_run:
+        apply(func, args)
+
+
+def strtobool (val):
+    """Convert a string representation of truth to true (1) or false (0).
+
+    True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+    are 'n', 'no', 'f', 'false', 'off', and '0'.  Raises ValueError if
+    'val' is anything else.
+    """
+    val = string.lower(val)
+    if val in ('y', 'yes', 't', 'true', 'on', '1'):
+        return 1
+    elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+        return 0
+    else:
+        raise ValueError, "invalid truth value %r" % (val,)
+
+
+def byte_compile (py_files,
+                  optimize=0, force=0,
+                  prefix=None, base_dir=None,
+                  verbose=1, dry_run=0,
+                  direct=None):
+    """Byte-compile a collection of Python source files to either .pyc
+    or .pyo files in the same directory.  'py_files' is a list of files
+    to compile; any files that don't end in ".py" are silently skipped.
+    'optimize' must be one of the following:
+      0 - don't optimize (generate .pyc)
+      1 - normal optimization (like "python -O")
+      2 - extra optimization (like "python -OO")
+    If 'force' is true, all files are recompiled regardless of
+    timestamps.
+
+    The source filename encoded in each bytecode file defaults to the
+    filenames listed in 'py_files'; you can modify these with 'prefix' and
+    'basedir'.  'prefix' is a string that will be stripped off of each
+    source filename, and 'base_dir' is a directory name that will be
+    prepended (after 'prefix' is stripped).  You can supply either or both
+    (or neither) of 'prefix' and 'base_dir', as you wish.
+
+    If 'dry_run' is true, doesn't actually do anything that would
+    affect the filesystem.
+
+    Byte-compilation is either done directly in this interpreter process
+    with the standard py_compile module, or indirectly by writing a
+    temporary script and executing it.  Normally, you should let
+    'byte_compile()' figure out to use direct compilation or not (see
+    the source for details).  The 'direct' flag is used by the script
+    generated in indirect mode; unless you know what you're doing, leave
+    it set to None.
+    """
+
+    # First, if the caller didn't force us into direct or indirect mode,
+    # figure out which mode we should be in.  We take a conservative
+    # approach: choose direct mode *only* if the current interpreter is
+    # in debug mode and optimize is 0.  If we're not in debug mode (-O
+    # or -OO), we don't know which level of optimization this
+    # interpreter is running with, so we can't do direct
+    # byte-compilation and be certain that it's the right thing.  Thus,
+    # always compile indirectly if the current interpreter is in either
+    # optimize mode, or if either optimization level was requested by
+    # the caller.
+    if direct is None:
+        direct = (__debug__ and optimize == 0)
+
+    # "Indirect" byte-compilation: write a temporary script and then
+    # run it with the appropriate flags.
+    if not direct:
+        try:
+            from tempfile import mkstemp
+            (script_fd, script_name) = mkstemp(".py")
+        except ImportError:
+            from tempfile import mktemp
+            (script_fd, script_name) = None, mktemp(".py")
+        log.info("writing byte-compilation script '%s'", script_name)
+        if not dry_run:
+            if script_fd is not None:
+                script = os.fdopen(script_fd, "w")
+            else:
+                script = open(script_name, "w")
+
+            script.write("""\
+from distutils.util import byte_compile
+files = [
+""")
+
+            # XXX would be nice to write absolute filenames, just for
+            # safety's sake (script should be more robust in the face of
+            # chdir'ing before running it).  But this requires abspath'ing
+            # 'prefix' as well, and that breaks the hack in build_lib's
+            # 'byte_compile()' method that carefully tacks on a trailing
+            # slash (os.sep really) to make sure the prefix here is "just
+            # right".  This whole prefix business is rather delicate -- the
+            # problem is that it's really a directory, but I'm treating it
+            # as a dumb string, so trailing slashes and so forth matter.
+
+            #py_files = map(os.path.abspath, py_files)
+            #if prefix:
+            #    prefix = os.path.abspath(prefix)
+
+            script.write(string.join(map(repr, py_files), ",\n") + "]\n")
+            script.write("""
+byte_compile(files, optimize=%r, force=%r,
+             prefix=%r, base_dir=%r,
+             verbose=%r, dry_run=0,
+             direct=1)
+""" % (optimize, force, prefix, base_dir, verbose))
+
+            script.close()
+
+        cmd = [sys.executable, script_name]
+        if optimize == 1:
+            cmd.insert(1, "-O")
+        elif optimize == 2:
+            cmd.insert(1, "-OO")
+        spawn(cmd, dry_run=dry_run)
+        execute(os.remove, (script_name,), "removing %s" % script_name,
+                dry_run=dry_run)
+
+    # "Direct" byte-compilation: use the py_compile module to compile
+    # right here, right now.  Note that the script generated in indirect
+    # mode simply calls 'byte_compile()' in direct mode, a weird sort of
+    # cross-process recursion.  Hey, it works!
+    else:
+        from py_compile import compile
+
+        for file in py_files:
+            if file[-3:] != ".py":
+                # This lets us be lazy and not filter filenames in
+                # the "install_lib" command.
+                continue
+
+            # Terminology from the py_compile module:
+            #   cfile - byte-compiled file
+            #   dfile - purported source filename (same as 'file' by default)
+            cfile = file + (__debug__ and "c" or "o")
+            dfile = file
+            if prefix:
+                if file[:len(prefix)] != prefix:
+                    raise ValueError, \
+                          ("invalid prefix: filename %r doesn't start with %r"
+                           % (file, prefix))
+                dfile = dfile[len(prefix):]
+            if base_dir:
+                dfile = os.path.join(base_dir, dfile)
+
+            cfile_base = os.path.basename(cfile)
+            if direct:
+                if force or newer(file, cfile):
+                    log.info("byte-compiling %s to %s", file, cfile_base)
+                    if not dry_run:
+                        compile(file, cfile, dfile)
+                else:
+                    log.debug("skipping byte-compilation of %s to %s",
+                              file, cfile_base)
+
+# byte_compile ()
+
+def rfc822_escape (header):
+    """Return a version of the string escaped for inclusion in an
+    RFC-822 header, by ensuring there are 8 spaces space after each newline.
+    """
+    lines = string.split(header, '\n')
+    lines = map(string.strip, lines)
+    header = string.join(lines, '\n' + 8*' ')
+    return header
diff --git a/depot_tools/release/win/python_24/Lib/distutils/version.py b/depot_tools/release/win/python_24/Lib/distutils/version.py
new file mode 100644
index 0000000..4d64215e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/distutils/version.py
@@ -0,0 +1,299 @@
+#
+# distutils/version.py
+#
+# Implements multiple version numbering conventions for the
+# Python Module Distribution Utilities.
+#
+# $Id: version.py,v 1.7 2002/11/14 02:25:42 akuchling Exp $
+#
+
+"""Provides classes to represent module version numbers (one class for
+each style of version numbering).  There are currently two such classes
+implemented: StrictVersion and LooseVersion.
+
+Every version number class implements the following interface:
+  * the 'parse' method takes a string and parses it to some internal
+    representation; if the string is an invalid version number,
+    'parse' raises a ValueError exception
+  * the class constructor takes an optional string argument which,
+    if supplied, is passed to 'parse'
+  * __str__ reconstructs the string that was passed to 'parse' (or
+    an equivalent string -- ie. one that will generate an equivalent
+    version number instance)
+  * __repr__ generates Python code to recreate the version number instance
+  * __cmp__ compares the current instance with either another instance
+    of the same class or a string (which will be parsed to an instance
+    of the same class, thus must follow the same rules)
+"""
+
+import string, re
+from types import StringType
+
+class Version:
+    """Abstract base class for version numbering classes.  Just provides
+    constructor (__init__) and reproducer (__repr__), because those
+    seem to be the same for all version numbering classes.
+    """
+
+    def __init__ (self, vstring=None):
+        if vstring:
+            self.parse(vstring)
+
+    def __repr__ (self):
+        return "%s ('%s')" % (self.__class__.__name__, str(self))
+
+
+# Interface for version-number classes -- must be implemented
+# by the following classes (the concrete ones -- Version should
+# be treated as an abstract class).
+#    __init__ (string) - create and take same action as 'parse'
+#                        (string parameter is optional)
+#    parse (string)    - convert a string representation to whatever
+#                        internal representation is appropriate for
+#                        this style of version numbering
+#    __str__ (self)    - convert back to a string; should be very similar
+#                        (if not identical to) the string supplied to parse
+#    __repr__ (self)   - generate Python code to recreate
+#                        the instance
+#    __cmp__ (self, other) - compare two version numbers ('other' may
+#                        be an unparsed version string, or another
+#                        instance of your version class)
+
+
+class StrictVersion (Version):
+
+    """Version numbering for anal retentives and software idealists.
+    Implements the standard interface for version number classes as
+    described above.  A version number consists of two or three
+    dot-separated numeric components, with an optional "pre-release" tag
+    on the end.  The pre-release tag consists of the letter 'a' or 'b'
+    followed by a number.  If the numeric components of two version
+    numbers are equal, then one with a pre-release tag will always
+    be deemed earlier (lesser) than one without.
+
+    The following are valid version numbers (shown in the order that
+    would be obtained by sorting according to the supplied cmp function):
+
+        0.4       0.4.0  (these two are equivalent)
+        0.4.1
+        0.5a1
+        0.5b3
+        0.5
+        0.9.6
+        1.0
+        1.0.4a3
+        1.0.4b1
+        1.0.4
+
+    The following are examples of invalid version numbers:
+
+        1
+        2.7.2.2
+        1.3.a4
+        1.3pl1
+        1.3c4
+
+    The rationale for this version numbering system will be explained
+    in the distutils documentation.
+    """
+
+    version_re = re.compile(r'^(\d+) \. (\d+) (\. (\d+))? ([ab](\d+))?$',
+                            re.VERBOSE)
+
+
+    def parse (self, vstring):
+        match = self.version_re.match(vstring)
+        if not match:
+            raise ValueError, "invalid version number '%s'" % vstring
+
+        (major, minor, patch, prerelease, prerelease_num) = \
+            match.group(1, 2, 4, 5, 6)
+
+        if patch:
+            self.version = tuple(map(string.atoi, [major, minor, patch]))
+        else:
+            self.version = tuple(map(string.atoi, [major, minor]) + [0])
+
+        if prerelease:
+            self.prerelease = (prerelease[0], string.atoi(prerelease_num))
+        else:
+            self.prerelease = None
+
+
+    def __str__ (self):
+
+        if self.version[2] == 0:
+            vstring = string.join(map(str, self.version[0:2]), '.')
+        else:
+            vstring = string.join(map(str, self.version), '.')
+
+        if self.prerelease:
+            vstring = vstring + self.prerelease[0] + str(self.prerelease[1])
+
+        return vstring
+
+
+    def __cmp__ (self, other):
+        if isinstance(other, StringType):
+            other = StrictVersion(other)
+
+        compare = cmp(self.version, other.version)
+        if (compare == 0):              # have to compare prerelease
+
+            # case 1: neither has prerelease; they're equal
+            # case 2: self has prerelease, other doesn't; other is greater
+            # case 3: self doesn't have prerelease, other does: self is greater
+            # case 4: both have prerelease: must compare them!
+
+            if (not self.prerelease and not other.prerelease):
+                return 0
+            elif (self.prerelease and not other.prerelease):
+                return -1
+            elif (not self.prerelease and other.prerelease):
+                return 1
+            elif (self.prerelease and other.prerelease):
+                return cmp(self.prerelease, other.prerelease)
+
+        else:                           # numeric versions don't match --
+            return compare              # prerelease stuff doesn't matter
+
+
+# end class StrictVersion
+
+
+# The rules according to Greg Stein:
+# 1) a version number has 1 or more numbers separate by a period or by
+#    sequences of letters. If only periods, then these are compared
+#    left-to-right to determine an ordering.
+# 2) sequences of letters are part of the tuple for comparison and are
+#    compared lexicographically
+# 3) recognize the numeric components may have leading zeroes
+#
+# The LooseVersion class below implements these rules: a version number
+# string is split up into a tuple of integer and string components, and
+# comparison is a simple tuple comparison.  This means that version
+# numbers behave in a predictable and obvious way, but a way that might
+# not necessarily be how people *want* version numbers to behave.  There
+# wouldn't be a problem if people could stick to purely numeric version
+# numbers: just split on period and compare the numbers as tuples.
+# However, people insist on putting letters into their version numbers;
+# the most common purpose seems to be:
+#   - indicating a "pre-release" version
+#     ('alpha', 'beta', 'a', 'b', 'pre', 'p')
+#   - indicating a post-release patch ('p', 'pl', 'patch')
+# but of course this can't cover all version number schemes, and there's
+# no way to know what a programmer means without asking him.
+#
+# The problem is what to do with letters (and other non-numeric
+# characters) in a version number.  The current implementation does the
+# obvious and predictable thing: keep them as strings and compare
+# lexically within a tuple comparison.  This has the desired effect if
+# an appended letter sequence implies something "post-release":
+# eg. "0.99" < "0.99pl14" < "1.0", and "5.001" < "5.001m" < "5.002".
+#
+# However, if letters in a version number imply a pre-release version,
+# the "obvious" thing isn't correct.  Eg. you would expect that
+# "1.5.1" < "1.5.2a2" < "1.5.2", but under the tuple/lexical comparison
+# implemented here, this just isn't so.
+#
+# Two possible solutions come to mind.  The first is to tie the
+# comparison algorithm to a particular set of semantic rules, as has
+# been done in the StrictVersion class above.  This works great as long
+# as everyone can go along with bondage and discipline.  Hopefully a
+# (large) subset of Python module programmers will agree that the
+# particular flavour of bondage and discipline provided by StrictVersion
+# provides enough benefit to be worth using, and will submit their
+# version numbering scheme to its domination.  The free-thinking
+# anarchists in the lot will never give in, though, and something needs
+# to be done to accommodate them.
+#
+# Perhaps a "moderately strict" version class could be implemented that
+# lets almost anything slide (syntactically), and makes some heuristic
+# assumptions about non-digits in version number strings.  This could
+# sink into special-case-hell, though; if I was as talented and
+# idiosyncratic as Larry Wall, I'd go ahead and implement a class that
+# somehow knows that "1.2.1" < "1.2.2a2" < "1.2.2" < "1.2.2pl3", and is
+# just as happy dealing with things like "2g6" and "1.13++".  I don't
+# think I'm smart enough to do it right though.
+#
+# In any case, I've coded the test suite for this module (see
+# ../test/test_version.py) specifically to fail on things like comparing
+# "1.2a2" and "1.2".  That's not because the *code* is doing anything
+# wrong, it's because the simple, obvious design doesn't match my
+# complicated, hairy expectations for real-world version numbers.  It
+# would be a snap to fix the test suite to say, "Yep, LooseVersion does
+# the Right Thing" (ie. the code matches the conception).  But I'd rather
+# have a conception that matches common notions about version numbers.
+
+class LooseVersion (Version):
+
+    """Version numbering for anarchists and software realists.
+    Implements the standard interface for version number classes as
+    described above.  A version number consists of a series of numbers,
+    separated by either periods or strings of letters.  When comparing
+    version numbers, the numeric components will be compared
+    numerically, and the alphabetic components lexically.  The following
+    are all valid version numbers, in no particular order:
+
+        1.5.1
+        1.5.2b2
+        161
+        3.10a
+        8.02
+        3.4j
+        1996.07.12
+        3.2.pl0
+        3.1.1.6
+        2g6
+        11g
+        0.960923
+        2.2beta29
+        1.13++
+        5.5.kw
+        2.0b1pl0
+
+    In fact, there is no such thing as an invalid version number under
+    this scheme; the rules for comparison are simple and predictable,
+    but may not always give the results you want (for some definition
+    of "want").
+    """
+
+    component_re = re.compile(r'(\d+ | [a-z]+ | \.)', re.VERBOSE)
+
+    def __init__ (self, vstring=None):
+        if vstring:
+            self.parse(vstring)
+
+
+    def parse (self, vstring):
+        # I've given up on thinking I can reconstruct the version string
+        # from the parsed tuple -- so I just store the string here for
+        # use by __str__
+        self.vstring = vstring
+        components = filter(lambda x: x and x != '.',
+                            self.component_re.split(vstring))
+        for i in range(len(components)):
+            try:
+                components[i] = int(components[i])
+            except ValueError:
+                pass
+
+        self.version = components
+
+
+    def __str__ (self):
+        return self.vstring
+
+
+    def __repr__ (self):
+        return "LooseVersion ('%s')" % str(self)
+
+
+    def __cmp__ (self, other):
+        if isinstance(other, StringType):
+            other = LooseVersion(other)
+
+        return cmp(self.version, other.version)
+
+
+# end class LooseVersion
diff --git a/depot_tools/release/win/python_24/Lib/doctest.py b/depot_tools/release/win/python_24/Lib/doctest.py
new file mode 100644
index 0000000..f621b62f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/doctest.py
@@ -0,0 +1,2665 @@
+# Module doctest.
+# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
+# Major enhancements and refactoring by:
+#     Jim Fulton
+#     Edward Loper
+
+# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
+
+r"""Module doctest -- a framework for running examples in docstrings.
+
+In simplest use, end each module M to be tested with:
+
+def _test():
+    import doctest
+    doctest.testmod()
+
+if __name__ == "__main__":
+    _test()
+
+Then running the module as a script will cause the examples in the
+docstrings to get executed and verified:
+
+python M.py
+
+This won't display anything unless an example fails, in which case the
+failing example(s) and the cause(s) of the failure(s) are printed to stdout
+(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
+line of output is "Test failed.".
+
+Run it with the -v switch instead:
+
+python M.py -v
+
+and a detailed report of all examples tried is printed to stdout, along
+with assorted summaries at the end.
+
+You can force verbose mode by passing "verbose=True" to testmod, or prohibit
+it by passing "verbose=False".  In either of those cases, sys.argv is not
+examined by testmod.
+
+There are a variety of other ways to run doctests, including integration
+with the unittest framework, and support for running non-Python text
+files containing doctests.  There are also many ways to override parts
+of doctest's default behaviors.  See the Library Reference Manual for
+details.
+"""
+
+__docformat__ = 'reStructuredText en'
+
+__all__ = [
+    # 0, Option Flags
+    'register_optionflag',
+    'DONT_ACCEPT_TRUE_FOR_1',
+    'DONT_ACCEPT_BLANKLINE',
+    'NORMALIZE_WHITESPACE',
+    'ELLIPSIS',
+    'IGNORE_EXCEPTION_DETAIL',
+    'COMPARISON_FLAGS',
+    'REPORT_UDIFF',
+    'REPORT_CDIFF',
+    'REPORT_NDIFF',
+    'REPORT_ONLY_FIRST_FAILURE',
+    'REPORTING_FLAGS',
+    # 1. Utility Functions
+    'is_private',
+    # 2. Example & DocTest
+    'Example',
+    'DocTest',
+    # 3. Doctest Parser
+    'DocTestParser',
+    # 4. Doctest Finder
+    'DocTestFinder',
+    # 5. Doctest Runner
+    'DocTestRunner',
+    'OutputChecker',
+    'DocTestFailure',
+    'UnexpectedException',
+    'DebugRunner',
+    # 6. Test Functions
+    'testmod',
+    'testfile',
+    'run_docstring_examples',
+    # 7. Tester
+    'Tester',
+    # 8. Unittest Support
+    'DocTestSuite',
+    'DocFileSuite',
+    'set_unittest_reportflags',
+    # 9. Debugging Support
+    'script_from_examples',
+    'testsource',
+    'debug_src',
+    'debug',
+]
+
+import __future__
+
+import sys, traceback, inspect, linecache, os, re, types
+import unittest, difflib, pdb, tempfile
+import warnings
+from StringIO import StringIO
+
+# Don't whine about the deprecated is_private function in this
+# module's tests.
+warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
+                        __name__, 0)
+
+# There are 4 basic classes:
+#  - Example: a <source, want> pair, plus an intra-docstring line number.
+#  - DocTest: a collection of examples, parsed from a docstring, plus
+#    info about where the docstring came from (name, filename, lineno).
+#  - DocTestFinder: extracts DocTests from a given object's docstring and
+#    its contained objects' docstrings.
+#  - DocTestRunner: runs DocTest cases, and accumulates statistics.
+#
+# So the basic picture is:
+#
+#                             list of:
+# +------+                   +---------+                   +-------+
+# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
+# +------+                   +---------+                   +-------+
+#                            | Example |
+#                            |   ...   |
+#                            | Example |
+#                            +---------+
+
+# Option constants.
+
+OPTIONFLAGS_BY_NAME = {}
+def register_optionflag(name):
+    flag = 1 << len(OPTIONFLAGS_BY_NAME)
+    OPTIONFLAGS_BY_NAME[name] = flag
+    return flag
+
+DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
+DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
+NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
+ELLIPSIS = register_optionflag('ELLIPSIS')
+IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
+
+COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
+                    DONT_ACCEPT_BLANKLINE |
+                    NORMALIZE_WHITESPACE |
+                    ELLIPSIS |
+                    IGNORE_EXCEPTION_DETAIL)
+
+REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
+REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
+REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
+REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
+
+REPORTING_FLAGS = (REPORT_UDIFF |
+                   REPORT_CDIFF |
+                   REPORT_NDIFF |
+                   REPORT_ONLY_FIRST_FAILURE)
+
+# Special string markers for use in `want` strings:
+BLANKLINE_MARKER = '<BLANKLINE>'
+ELLIPSIS_MARKER = '...'
+
+######################################################################
+## Table of Contents
+######################################################################
+#  1. Utility Functions
+#  2. Example & DocTest -- store test cases
+#  3. DocTest Parser -- extracts examples from strings
+#  4. DocTest Finder -- extracts test cases from objects
+#  5. DocTest Runner -- runs test cases
+#  6. Test Functions -- convenient wrappers for testing
+#  7. Tester Class -- for backwards compatibility
+#  8. Unittest Support
+#  9. Debugging Support
+# 10. Example Usage
+
+######################################################################
+## 1. Utility Functions
+######################################################################
+
+def is_private(prefix, base):
+    """prefix, base -> true iff name prefix + "." + base is "private".
+
+    Prefix may be an empty string, and base does not contain a period.
+    Prefix is ignored (although functions you write conforming to this
+    protocol may make use of it).
+    Return true iff base begins with an (at least one) underscore, but
+    does not both begin and end with (at least) two underscores.
+
+    >>> is_private("a.b", "my_func")
+    False
+    >>> is_private("____", "_my_func")
+    True
+    >>> is_private("someclass", "__init__")
+    False
+    >>> is_private("sometypo", "__init_")
+    True
+    >>> is_private("x.y.z", "_")
+    True
+    >>> is_private("_x.y.z", "__")
+    False
+    >>> is_private("", "")  # senseless but consistent
+    False
+    """
+    warnings.warn("is_private is deprecated; it wasn't useful; "
+                  "examine DocTestFinder.find() lists instead",
+                  DeprecationWarning, stacklevel=2)
+    return base[:1] == "_" and not base[:2] == "__" == base[-2:]
+
+def _extract_future_flags(globs):
+    """
+    Return the compiler-flags associated with the future features that
+    have been imported into the given namespace (globs).
+    """
+    flags = 0
+    for fname in __future__.all_feature_names:
+        feature = globs.get(fname, None)
+        if feature is getattr(__future__, fname):
+            flags |= feature.compiler_flag
+    return flags
+
+def _normalize_module(module, depth=2):
+    """
+    Return the module specified by `module`.  In particular:
+      - If `module` is a module, then return module.
+      - If `module` is a string, then import and return the
+        module with that name.
+      - If `module` is None, then return the calling module.
+        The calling module is assumed to be the module of
+        the stack frame at the given depth in the call stack.
+    """
+    if inspect.ismodule(module):
+        return module
+    elif isinstance(module, (str, unicode)):
+        return __import__(module, globals(), locals(), ["*"])
+    elif module is None:
+        return sys.modules[sys._getframe(depth).f_globals['__name__']]
+    else:
+        raise TypeError("Expected a module, string, or None")
+
+def _indent(s, indent=4):
+    """
+    Add the given number of space characters to the beginning every
+    non-blank line in `s`, and return the result.
+    """
+    # This regexp matches the start of non-blank lines:
+    return re.sub('(?m)^(?!$)', indent*' ', s)
+
+def _exception_traceback(exc_info):
+    """
+    Return a string containing a traceback message for the given
+    exc_info tuple (as returned by sys.exc_info()).
+    """
+    # Get a traceback message.
+    excout = StringIO()
+    exc_type, exc_val, exc_tb = exc_info
+    traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
+    return excout.getvalue()
+
+# Override some StringIO methods.
+class _SpoofOut(StringIO):
+    def getvalue(self):
+        result = StringIO.getvalue(self)
+        # If anything at all was written, make sure there's a trailing
+        # newline.  There's no way for the expected output to indicate
+        # that a trailing newline is missing.
+        if result and not result.endswith("\n"):
+            result += "\n"
+        # Prevent softspace from screwing up the next test case, in
+        # case they used print with a trailing comma in an example.
+        if hasattr(self, "softspace"):
+            del self.softspace
+        return result
+
+    def truncate(self,   size=None):
+        StringIO.truncate(self, size)
+        if hasattr(self, "softspace"):
+            del self.softspace
+
+# Worst-case linear-time ellipsis matching.
+def _ellipsis_match(want, got):
+    """
+    Essentially the only subtle case:
+    >>> _ellipsis_match('aa...aa', 'aaa')
+    False
+    """
+    if ELLIPSIS_MARKER not in want:
+        return want == got
+
+    # Find "the real" strings.
+    ws = want.split(ELLIPSIS_MARKER)
+    assert len(ws) >= 2
+
+    # Deal with exact matches possibly needed at one or both ends.
+    startpos, endpos = 0, len(got)
+    w = ws[0]
+    if w:   # starts with exact match
+        if got.startswith(w):
+            startpos = len(w)
+            del ws[0]
+        else:
+            return False
+    w = ws[-1]
+    if w:   # ends with exact match
+        if got.endswith(w):
+            endpos -= len(w)
+            del ws[-1]
+        else:
+            return False
+
+    if startpos > endpos:
+        # Exact end matches required more characters than we have, as in
+        # _ellipsis_match('aa...aa', 'aaa')
+        return False
+
+    # For the rest, we only need to find the leftmost non-overlapping
+    # match for each piece.  If there's no overall match that way alone,
+    # there's no overall match period.
+    for w in ws:
+        # w may be '' at times, if there are consecutive ellipses, or
+        # due to an ellipsis at the start or end of `want`.  That's OK.
+        # Search for an empty string succeeds, and doesn't change startpos.
+        startpos = got.find(w, startpos, endpos)
+        if startpos < 0:
+            return False
+        startpos += len(w)
+
+    return True
+
+def _comment_line(line):
+    "Return a commented form of the given line"
+    line = line.rstrip()
+    if line:
+        return '# '+line
+    else:
+        return '#'
+
+class _OutputRedirectingPdb(pdb.Pdb):
+    """
+    A specialized version of the python debugger that redirects stdout
+    to a given stream when interacting with the user.  Stdout is *not*
+    redirected when traced code is executed.
+    """
+    def __init__(self, out):
+        self.__out = out
+        pdb.Pdb.__init__(self)
+
+    def trace_dispatch(self, *args):
+        # Redirect stdout to the given stream.
+        save_stdout = sys.stdout
+        sys.stdout = self.__out
+        # Call Pdb's trace dispatch method.
+        try:
+            return pdb.Pdb.trace_dispatch(self, *args)
+        finally:
+            sys.stdout = save_stdout
+
+# [XX] Normalize with respect to os.path.pardir?
+def _module_relative_path(module, path):
+    if not inspect.ismodule(module):
+        raise TypeError, 'Expected a module: %r' % module
+    if path.startswith('/'):
+        raise ValueError, 'Module-relative files may not have absolute paths'
+
+    # Find the base directory for the path.
+    if hasattr(module, '__file__'):
+        # A normal module/package
+        basedir = os.path.split(module.__file__)[0]
+    elif module.__name__ == '__main__':
+        # An interactive session.
+        if len(sys.argv)>0 and sys.argv[0] != '':
+            basedir = os.path.split(sys.argv[0])[0]
+        else:
+            basedir = os.curdir
+    else:
+        # A module w/o __file__ (this includes builtins)
+        raise ValueError("Can't resolve paths relative to the module " +
+                         module + " (it has no __file__)")
+
+    # Combine the base directory and the path.
+    return os.path.join(basedir, *(path.split('/')))
+
+######################################################################
+## 2. Example & DocTest
+######################################################################
+## - An "example" is a <source, want> pair, where "source" is a
+##   fragment of source code, and "want" is the expected output for
+##   "source."  The Example class also includes information about
+##   where the example was extracted from.
+##
+## - A "doctest" is a collection of examples, typically extracted from
+##   a string (such as an object's docstring).  The DocTest class also
+##   includes information about where the string was extracted from.
+
+class Example:
+    """
+    A single doctest example, consisting of source code and expected
+    output.  `Example` defines the following attributes:
+
+      - source: A single Python statement, always ending with a newline.
+        The constructor adds a newline if needed.
+
+      - want: The expected output from running the source code (either
+        from stdout, or a traceback in case of exception).  `want` ends
+        with a newline unless it's empty, in which case it's an empty
+        string.  The constructor adds a newline if needed.
+
+      - exc_msg: The exception message generated by the example, if
+        the example is expected to generate an exception; or `None` if
+        it is not expected to generate an exception.  This exception
+        message is compared against the return value of
+        `traceback.format_exception_only()`.  `exc_msg` ends with a
+        newline unless it's `None`.  The constructor adds a newline
+        if needed.
+
+      - lineno: The line number within the DocTest string containing
+        this Example where the Example begins.  This line number is
+        zero-based, with respect to the beginning of the DocTest.
+
+      - indent: The example's indentation in the DocTest string.
+        I.e., the number of space characters that preceed the
+        example's first prompt.
+
+      - options: A dictionary mapping from option flags to True or
+        False, which is used to override default options for this
+        example.  Any option flags not contained in this dictionary
+        are left at their default value (as specified by the
+        DocTestRunner's optionflags).  By default, no options are set.
+    """
+    def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
+                 options=None):
+        # Normalize inputs.
+        if not source.endswith('\n'):
+            source += '\n'
+        if want and not want.endswith('\n'):
+            want += '\n'
+        if exc_msg is not None and not exc_msg.endswith('\n'):
+            exc_msg += '\n'
+        # Store properties.
+        self.source = source
+        self.want = want
+        self.lineno = lineno
+        self.indent = indent
+        if options is None: options = {}
+        self.options = options
+        self.exc_msg = exc_msg
+
+class DocTest:
+    """
+    A collection of doctest examples that should be run in a single
+    namespace.  Each `DocTest` defines the following attributes:
+
+      - examples: the list of examples.
+
+      - globs: The namespace (aka globals) that the examples should
+        be run in.
+
+      - name: A name identifying the DocTest (typically, the name of
+        the object whose docstring this DocTest was extracted from).
+
+      - filename: The name of the file that this DocTest was extracted
+        from, or `None` if the filename is unknown.
+
+      - lineno: The line number within filename where this DocTest
+        begins, or `None` if the line number is unavailable.  This
+        line number is zero-based, with respect to the beginning of
+        the file.
+
+      - docstring: The string that the examples were extracted from,
+        or `None` if the string is unavailable.
+    """
+    def __init__(self, examples, globs, name, filename, lineno, docstring):
+        """
+        Create a new DocTest containing the given examples.  The
+        DocTest's globals are initialized with a copy of `globs`.
+        """
+        assert not isinstance(examples, basestring), \
+               "DocTest no longer accepts str; use DocTestParser instead"
+        self.examples = examples
+        self.docstring = docstring
+        self.globs = globs.copy()
+        self.name = name
+        self.filename = filename
+        self.lineno = lineno
+
+    def __repr__(self):
+        if len(self.examples) == 0:
+            examples = 'no examples'
+        elif len(self.examples) == 1:
+            examples = '1 example'
+        else:
+            examples = '%d examples' % len(self.examples)
+        return ('<DocTest %s from %s:%s (%s)>' %
+                (self.name, self.filename, self.lineno, examples))
+
+
+    # This lets us sort tests by name:
+    def __cmp__(self, other):
+        if not isinstance(other, DocTest):
+            return -1
+        return cmp((self.name, self.filename, self.lineno, id(self)),
+                   (other.name, other.filename, other.lineno, id(other)))
+
+######################################################################
+## 3. DocTestParser
+######################################################################
+
+class DocTestParser:
+    """
+    A class used to parse strings containing doctest examples.
+    """
+    # This regular expression is used to find doctest examples in a
+    # string.  It defines three groups: `source` is the source code
+    # (including leading indentation and prompts); `indent` is the
+    # indentation of the first (PS1) line of the source code; and
+    # `want` is the expected output (including leading indentation).
+    _EXAMPLE_RE = re.compile(r'''
+        # Source consists of a PS1 line followed by zero or more PS2 lines.
+        (?P<source>
+            (?:^(?P<indent> [ ]*) >>>    .*)    # PS1 line
+            (?:\n           [ ]*  \.\.\. .*)*)  # PS2 lines
+        \n?
+        # Want consists of any non-blank lines that do not start with PS1.
+        (?P<want> (?:(?![ ]*$)    # Not a blank line
+                     (?![ ]*>>>)  # Not a line starting with PS1
+                     .*$\n?       # But any other line
+                  )*)
+        ''', re.MULTILINE | re.VERBOSE)
+
+    # A regular expression for handling `want` strings that contain
+    # expected exceptions.  It divides `want` into three pieces:
+    #    - the traceback header line (`hdr`)
+    #    - the traceback stack (`stack`)
+    #    - the exception message (`msg`), as generated by
+    #      traceback.format_exception_only()
+    # `msg` may have multiple lines.  We assume/require that the
+    # exception message is the first non-indented line starting with a word
+    # character following the traceback header line.
+    _EXCEPTION_RE = re.compile(r"""
+        # Grab the traceback header.  Different versions of Python have
+        # said different things on the first traceback line.
+        ^(?P<hdr> Traceback\ \(
+            (?: most\ recent\ call\ last
+            |   innermost\ last
+            ) \) :
+        )
+        \s* $                # toss trailing whitespace on the header.
+        (?P<stack> .*?)      # don't blink: absorb stuff until...
+        ^ (?P<msg> \w+ .*)   #     a line *starts* with alphanum.
+        """, re.VERBOSE | re.MULTILINE | re.DOTALL)
+
+    # A callable returning a true value iff its argument is a blank line
+    # or contains a single comment.
+    _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
+
+    def parse(self, string, name='<string>'):
+        """
+        Divide the given string into examples and intervening text,
+        and return them as a list of alternating Examples and strings.
+        Line numbers for the Examples are 0-based.  The optional
+        argument `name` is a name identifying this string, and is only
+        used for error messages.
+        """
+        string = string.expandtabs()
+        # If all lines begin with the same indentation, then strip it.
+        min_indent = self._min_indent(string)
+        if min_indent > 0:
+            string = '\n'.join([l[min_indent:] for l in string.split('\n')])
+
+        output = []
+        charno, lineno = 0, 0
+        # Find all doctest examples in the string:
+        for m in self._EXAMPLE_RE.finditer(string):
+            # Add the pre-example text to `output`.
+            output.append(string[charno:m.start()])
+            # Update lineno (lines before this example)
+            lineno += string.count('\n', charno, m.start())
+            # Extract info from the regexp match.
+            (source, options, want, exc_msg) = \
+                     self._parse_example(m, name, lineno)
+            # Create an Example, and add it to the list.
+            if not self._IS_BLANK_OR_COMMENT(source):
+                output.append( Example(source, want, exc_msg,
+                                    lineno=lineno,
+                                    indent=min_indent+len(m.group('indent')),
+                                    options=options) )
+            # Update lineno (lines inside this example)
+            lineno += string.count('\n', m.start(), m.end())
+            # Update charno.
+            charno = m.end()
+        # Add any remaining post-example text to `output`.
+        output.append(string[charno:])
+        return output
+
+    def get_doctest(self, string, globs, name, filename, lineno):
+        """
+        Extract all doctest examples from the given string, and
+        collect them into a `DocTest` object.
+
+        `globs`, `name`, `filename`, and `lineno` are attributes for
+        the new `DocTest` object.  See the documentation for `DocTest`
+        for more information.
+        """
+        return DocTest(self.get_examples(string, name), globs,
+                       name, filename, lineno, string)
+
+    def get_examples(self, string, name='<string>'):
+        """
+        Extract all doctest examples from the given string, and return
+        them as a list of `Example` objects.  Line numbers are
+        0-based, because it's most common in doctests that nothing
+        interesting appears on the same line as opening triple-quote,
+        and so the first interesting line is called \"line 1\" then.
+
+        The optional argument `name` is a name identifying this
+        string, and is only used for error messages.
+        """
+        return [x for x in self.parse(string, name)
+                if isinstance(x, Example)]
+
+    def _parse_example(self, m, name, lineno):
+        """
+        Given a regular expression match from `_EXAMPLE_RE` (`m`),
+        return a pair `(source, want)`, where `source` is the matched
+        example's source code (with prompts and indentation stripped);
+        and `want` is the example's expected output (with indentation
+        stripped).
+
+        `name` is the string's name, and `lineno` is the line number
+        where the example starts; both are used for error messages.
+        """
+        # Get the example's indentation level.
+        indent = len(m.group('indent'))
+
+        # Divide source into lines; check that they're properly
+        # indented; and then strip their indentation & prompts.
+        source_lines = m.group('source').split('\n')
+        self._check_prompt_blank(source_lines, indent, name, lineno)
+        self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
+        source = '\n'.join([sl[indent+4:] for sl in source_lines])
+
+        # Divide want into lines; check that it's properly indented; and
+        # then strip the indentation.  Spaces before the last newline should
+        # be preserved, so plain rstrip() isn't good enough.
+        want = m.group('want')
+        want_lines = want.split('\n')
+        if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
+            del want_lines[-1]  # forget final newline & spaces after it
+        self._check_prefix(want_lines, ' '*indent, name,
+                           lineno + len(source_lines))
+        want = '\n'.join([wl[indent:] for wl in want_lines])
+
+        # If `want` contains a traceback message, then extract it.
+        m = self._EXCEPTION_RE.match(want)
+        if m:
+            exc_msg = m.group('msg')
+        else:
+            exc_msg = None
+
+        # Extract options from the source.
+        options = self._find_options(source, name, lineno)
+
+        return source, options, want, exc_msg
+
+    # This regular expression looks for option directives in the
+    # source code of an example.  Option directives are comments
+    # starting with "doctest:".  Warning: this may give false
+    # positives for string-literals that contain the string
+    # "#doctest:".  Eliminating these false positives would require
+    # actually parsing the string; but we limit them by ignoring any
+    # line containing "#doctest:" that is *followed* by a quote mark.
+    _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
+                                      re.MULTILINE)
+
+    def _find_options(self, source, name, lineno):
+        """
+        Return a dictionary containing option overrides extracted from
+        option directives in the given source string.
+
+        `name` is the string's name, and `lineno` is the line number
+        where the example starts; both are used for error messages.
+        """
+        options = {}
+        # (note: with the current regexp, this will match at most once:)
+        for m in self._OPTION_DIRECTIVE_RE.finditer(source):
+            option_strings = m.group(1).replace(',', ' ').split()
+            for option in option_strings:
+                if (option[0] not in '+-' or
+                    option[1:] not in OPTIONFLAGS_BY_NAME):
+                    raise ValueError('line %r of the doctest for %s '
+                                     'has an invalid option: %r' %
+                                     (lineno+1, name, option))
+                flag = OPTIONFLAGS_BY_NAME[option[1:]]
+                options[flag] = (option[0] == '+')
+        if options and self._IS_BLANK_OR_COMMENT(source):
+            raise ValueError('line %r of the doctest for %s has an option '
+                             'directive on a line with no example: %r' %
+                             (lineno, name, source))
+        return options
+
+    # This regular expression finds the indentation of every non-blank
+    # line in a string.
+    _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
+
+    def _min_indent(self, s):
+        "Return the minimum indentation of any non-blank line in `s`"
+        indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
+        if len(indents) > 0:
+            return min(indents)
+        else:
+            return 0
+
+    def _check_prompt_blank(self, lines, indent, name, lineno):
+        """
+        Given the lines of a source string (including prompts and
+        leading indentation), check to make sure that every prompt is
+        followed by a space character.  If any line is not followed by
+        a space character, then raise ValueError.
+        """
+        for i, line in enumerate(lines):
+            if len(line) >= indent+4 and line[indent+3] != ' ':
+                raise ValueError('line %r of the docstring for %s '
+                                 'lacks blank after %s: %r' %
+                                 (lineno+i+1, name,
+                                  line[indent:indent+3], line))
+
+    def _check_prefix(self, lines, prefix, name, lineno):
+        """
+        Check that every line in the given list starts with the given
+        prefix; if any line does not, then raise a ValueError.
+        """
+        for i, line in enumerate(lines):
+            if line and not line.startswith(prefix):
+                raise ValueError('line %r of the docstring for %s has '
+                                 'inconsistent leading whitespace: %r' %
+                                 (lineno+i+1, name, line))
+
+
+######################################################################
+## 4. DocTest Finder
+######################################################################
+
+class DocTestFinder:
+    """
+    A class used to extract the DocTests that are relevant to a given
+    object, from its docstring and the docstrings of its contained
+    objects.  Doctests can currently be extracted from the following
+    object types: modules, functions, classes, methods, staticmethods,
+    classmethods, and properties.
+    """
+
+    def __init__(self, verbose=False, parser=DocTestParser(),
+                 recurse=True, _namefilter=None, exclude_empty=True):
+        """
+        Create a new doctest finder.
+
+        The optional argument `parser` specifies a class or
+        function that should be used to create new DocTest objects (or
+        objects that implement the same interface as DocTest).  The
+        signature for this factory function should match the signature
+        of the DocTest constructor.
+
+        If the optional argument `recurse` is false, then `find` will
+        only examine the given object, and not any contained objects.
+
+        If the optional argument `exclude_empty` is false, then `find`
+        will include tests for objects with empty docstrings.
+        """
+        self._parser = parser
+        self._verbose = verbose
+        self._recurse = recurse
+        self._exclude_empty = exclude_empty
+        # _namefilter is undocumented, and exists only for temporary backward-
+        # compatibility support of testmod's deprecated isprivate mess.
+        self._namefilter = _namefilter
+
+    def find(self, obj, name=None, module=None, globs=None,
+             extraglobs=None):
+        """
+        Return a list of the DocTests that are defined by the given
+        object's docstring, or by any of its contained objects'
+        docstrings.
+
+        The optional parameter `module` is the module that contains
+        the given object.  If the module is not specified or is None, then
+        the test finder will attempt to automatically determine the
+        correct module.  The object's module is used:
+
+            - As a default namespace, if `globs` is not specified.
+            - To prevent the DocTestFinder from extracting DocTests
+              from objects that are imported from other modules.
+            - To find the name of the file containing the object.
+            - To help find the line number of the object within its
+              file.
+
+        Contained objects whose module does not match `module` are ignored.
+
+        If `module` is False, no attempt to find the module will be made.
+        This is obscure, of use mostly in tests:  if `module` is False, or
+        is None but cannot be found automatically, then all objects are
+        considered to belong to the (non-existent) module, so all contained
+        objects will (recursively) be searched for doctests.
+
+        The globals for each DocTest is formed by combining `globs`
+        and `extraglobs` (bindings in `extraglobs` override bindings
+        in `globs`).  A new copy of the globals dictionary is created
+        for each DocTest.  If `globs` is not specified, then it
+        defaults to the module's `__dict__`, if specified, or {}
+        otherwise.  If `extraglobs` is not specified, then it defaults
+        to {}.
+
+        """
+        # If name was not specified, then extract it from the object.
+        if name is None:
+            name = getattr(obj, '__name__', None)
+            if name is None:
+                raise ValueError("DocTestFinder.find: name must be given "
+                        "when obj.__name__ doesn't exist: %r" %
+                                 (type(obj),))
+
+        # Find the module that contains the given object (if obj is
+        # a module, then module=obj.).  Note: this may fail, in which
+        # case module will be None.
+        if module is False:
+            module = None
+        elif module is None:
+            module = inspect.getmodule(obj)
+
+        # Read the module's source code.  This is used by
+        # DocTestFinder._find_lineno to find the line number for a
+        # given object's docstring.
+        try:
+            file = inspect.getsourcefile(obj) or inspect.getfile(obj)
+            source_lines = linecache.getlines(file)
+            if not source_lines:
+                source_lines = None
+        except TypeError:
+            source_lines = None
+
+        # Initialize globals, and merge in extraglobs.
+        if globs is None:
+            if module is None:
+                globs = {}
+            else:
+                globs = module.__dict__.copy()
+        else:
+            globs = globs.copy()
+        if extraglobs is not None:
+            globs.update(extraglobs)
+
+        # Recursively expore `obj`, extracting DocTests.
+        tests = []
+        self._find(tests, obj, name, module, source_lines, globs, {})
+        return tests
+
+    def _filter(self, obj, prefix, base):
+        """
+        Return true if the given object should not be examined.
+        """
+        return (self._namefilter is not None and
+                self._namefilter(prefix, base))
+
+    def _from_module(self, module, object):
+        """
+        Return true if the given object is defined in the given
+        module.
+        """
+        if module is None:
+            return True
+        elif inspect.isfunction(object):
+            return module.__dict__ is object.func_globals
+        elif inspect.isclass(object):
+            return module.__name__ == object.__module__
+        elif inspect.getmodule(object) is not None:
+            return module is inspect.getmodule(object)
+        elif hasattr(object, '__module__'):
+            return module.__name__ == object.__module__
+        elif isinstance(object, property):
+            return True # [XX] no way not be sure.
+        else:
+            raise ValueError("object must be a class or function")
+
+    def _find(self, tests, obj, name, module, source_lines, globs, seen):
+        """
+        Find tests for the given object and any contained objects, and
+        add them to `tests`.
+        """
+        if self._verbose:
+            print 'Finding tests in %s' % name
+
+        # If we've already processed this object, then ignore it.
+        if id(obj) in seen:
+            return
+        seen[id(obj)] = 1
+
+        # Find a test for this object, and add it to the list of tests.
+        test = self._get_test(obj, name, module, globs, source_lines)
+        if test is not None:
+            tests.append(test)
+
+        # Look for tests in a module's contained objects.
+        if inspect.ismodule(obj) and self._recurse:
+            for valname, val in obj.__dict__.items():
+                # Check if this contained object should be ignored.
+                if self._filter(val, name, valname):
+                    continue
+                valname = '%s.%s' % (name, valname)
+                # Recurse to functions & classes.
+                if ((inspect.isfunction(val) or inspect.isclass(val)) and
+                    self._from_module(module, val)):
+                    self._find(tests, val, valname, module, source_lines,
+                               globs, seen)
+
+        # Look for tests in a module's __test__ dictionary.
+        if inspect.ismodule(obj) and self._recurse:
+            for valname, val in getattr(obj, '__test__', {}).items():
+                if not isinstance(valname, basestring):
+                    raise ValueError("DocTestFinder.find: __test__ keys "
+                                     "must be strings: %r" %
+                                     (type(valname),))
+                if not (inspect.isfunction(val) or inspect.isclass(val) or
+                        inspect.ismethod(val) or inspect.ismodule(val) or
+                        isinstance(val, basestring)):
+                    raise ValueError("DocTestFinder.find: __test__ values "
+                                     "must be strings, functions, methods, "
+                                     "classes, or modules: %r" %
+                                     (type(val),))
+                valname = '%s.__test__.%s' % (name, valname)
+                self._find(tests, val, valname, module, source_lines,
+                           globs, seen)
+
+        # Look for tests in a class's contained objects.
+        if inspect.isclass(obj) and self._recurse:
+            for valname, val in obj.__dict__.items():
+                # Check if this contained object should be ignored.
+                if self._filter(val, name, valname):
+                    continue
+                # Special handling for staticmethod/classmethod.
+                if isinstance(val, staticmethod):
+                    val = getattr(obj, valname)
+                if isinstance(val, classmethod):
+                    val = getattr(obj, valname).im_func
+
+                # Recurse to methods, properties, and nested classes.
+                if ((inspect.isfunction(val) or inspect.isclass(val) or
+                      isinstance(val, property)) and
+                      self._from_module(module, val)):
+                    valname = '%s.%s' % (name, valname)
+                    self._find(tests, val, valname, module, source_lines,
+                               globs, seen)
+
+    def _get_test(self, obj, name, module, globs, source_lines):
+        """
+        Return a DocTest for the given object, if it defines a docstring;
+        otherwise, return None.
+        """
+        # Extract the object's docstring.  If it doesn't have one,
+        # then return None (no test for this object).
+        if isinstance(obj, basestring):
+            docstring = obj
+        else:
+            try:
+                if obj.__doc__ is None:
+                    docstring = ''
+                else:
+                    docstring = obj.__doc__
+                    if not isinstance(docstring, basestring):
+                        docstring = str(docstring)
+            except (TypeError, AttributeError):
+                docstring = ''
+
+        # Find the docstring's location in the file.
+        lineno = self._find_lineno(obj, source_lines)
+
+        # Don't bother if the docstring is empty.
+        if self._exclude_empty and not docstring:
+            return None
+
+        # Return a DocTest for this object.
+        if module is None:
+            filename = None
+        else:
+            filename = getattr(module, '__file__', module.__name__)
+            if filename[-4:] in (".pyc", ".pyo"):
+                filename = filename[:-1]
+        return self._parser.get_doctest(docstring, globs, name,
+                                        filename, lineno)
+
+    def _find_lineno(self, obj, source_lines):
+        """
+        Return a line number of the given object's docstring.  Note:
+        this method assumes that the object has a docstring.
+        """
+        lineno = None
+
+        # Find the line number for modules.
+        if inspect.ismodule(obj):
+            lineno = 0
+
+        # Find the line number for classes.
+        # Note: this could be fooled if a class is defined multiple
+        # times in a single file.
+        if inspect.isclass(obj):
+            if source_lines is None:
+                return None
+            pat = re.compile(r'^\s*class\s*%s\b' %
+                             getattr(obj, '__name__', '-'))
+            for i, line in enumerate(source_lines):
+                if pat.match(line):
+                    lineno = i
+                    break
+
+        # Find the line number for functions & methods.
+        if inspect.ismethod(obj): obj = obj.im_func
+        if inspect.isfunction(obj): obj = obj.func_code
+        if inspect.istraceback(obj): obj = obj.tb_frame
+        if inspect.isframe(obj): obj = obj.f_code
+        if inspect.iscode(obj):
+            lineno = getattr(obj, 'co_firstlineno', None)-1
+
+        # Find the line number where the docstring starts.  Assume
+        # that it's the first line that begins with a quote mark.
+        # Note: this could be fooled by a multiline function
+        # signature, where a continuation line begins with a quote
+        # mark.
+        if lineno is not None:
+            if source_lines is None:
+                return lineno+1
+            pat = re.compile('(^|.*:)\s*\w*("|\')')
+            for lineno in range(lineno, len(source_lines)):
+                if pat.match(source_lines[lineno]):
+                    return lineno
+
+        # We couldn't find the line number.
+        return None
+
+######################################################################
+## 5. DocTest Runner
+######################################################################
+
+class DocTestRunner:
+    """
+    A class used to run DocTest test cases, and accumulate statistics.
+    The `run` method is used to process a single DocTest case.  It
+    returns a tuple `(f, t)`, where `t` is the number of test cases
+    tried, and `f` is the number of test cases that failed.
+
+        >>> tests = DocTestFinder().find(_TestClass)
+        >>> runner = DocTestRunner(verbose=False)
+        >>> for test in tests:
+        ...     print runner.run(test)
+        (0, 2)
+        (0, 1)
+        (0, 2)
+        (0, 2)
+
+    The `summarize` method prints a summary of all the test cases that
+    have been run by the runner, and returns an aggregated `(f, t)`
+    tuple:
+
+        >>> runner.summarize(verbose=1)
+        4 items passed all tests:
+           2 tests in _TestClass
+           2 tests in _TestClass.__init__
+           2 tests in _TestClass.get
+           1 tests in _TestClass.square
+        7 tests in 4 items.
+        7 passed and 0 failed.
+        Test passed.
+        (0, 7)
+
+    The aggregated number of tried examples and failed examples is
+    also available via the `tries` and `failures` attributes:
+
+        >>> runner.tries
+        7
+        >>> runner.failures
+        0
+
+    The comparison between expected outputs and actual outputs is done
+    by an `OutputChecker`.  This comparison may be customized with a
+    number of option flags; see the documentation for `testmod` for
+    more information.  If the option flags are insufficient, then the
+    comparison may also be customized by passing a subclass of
+    `OutputChecker` to the constructor.
+
+    The test runner's display output can be controlled in two ways.
+    First, an output function (`out) can be passed to
+    `TestRunner.run`; this function will be called with strings that
+    should be displayed.  It defaults to `sys.stdout.write`.  If
+    capturing the output is not sufficient, then the display output
+    can be also customized by subclassing DocTestRunner, and
+    overriding the methods `report_start`, `report_success`,
+    `report_unexpected_exception`, and `report_failure`.
+    """
+    # This divider string is used to separate failure messages, and to
+    # separate sections of the summary.
+    DIVIDER = "*" * 70
+
+    def __init__(self, checker=None, verbose=None, optionflags=0):
+        """
+        Create a new test runner.
+
+        Optional keyword arg `checker` is the `OutputChecker` that
+        should be used to compare the expected outputs and actual
+        outputs of doctest examples.
+
+        Optional keyword arg 'verbose' prints lots of stuff if true,
+        only failures if false; by default, it's true iff '-v' is in
+        sys.argv.
+
+        Optional argument `optionflags` can be used to control how the
+        test runner compares expected output to actual output, and how
+        it displays failures.  See the documentation for `testmod` for
+        more information.
+        """
+        self._checker = checker or OutputChecker()
+        if verbose is None:
+            verbose = '-v' in sys.argv
+        self._verbose = verbose
+        self.optionflags = optionflags
+        self.original_optionflags = optionflags
+
+        # Keep track of the examples we've run.
+        self.tries = 0
+        self.failures = 0
+        self._name2ft = {}
+
+        # Create a fake output target for capturing doctest output.
+        self._fakeout = _SpoofOut()
+
+    #/////////////////////////////////////////////////////////////////
+    # Reporting methods
+    #/////////////////////////////////////////////////////////////////
+
+    def report_start(self, out, test, example):
+        """
+        Report that the test runner is about to process the given
+        example.  (Only displays a message if verbose=True)
+        """
+        if self._verbose:
+            if example.want:
+                out('Trying:\n' + _indent(example.source) +
+                    'Expecting:\n' + _indent(example.want))
+            else:
+                out('Trying:\n' + _indent(example.source) +
+                    'Expecting nothing\n')
+
+    def report_success(self, out, test, example, got):
+        """
+        Report that the given example ran successfully.  (Only
+        displays a message if verbose=True)
+        """
+        if self._verbose:
+            out("ok\n")
+
+    def report_failure(self, out, test, example, got):
+        """
+        Report that the given example failed.
+        """
+        out(self._failure_header(test, example) +
+            self._checker.output_difference(example, got, self.optionflags))
+
+    def report_unexpected_exception(self, out, test, example, exc_info):
+        """
+        Report that the given example raised an unexpected exception.
+        """
+        out(self._failure_header(test, example) +
+            'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
+
+    def _failure_header(self, test, example):
+        out = [self.DIVIDER]
+        if test.filename:
+            if test.lineno is not None and example.lineno is not None:
+                lineno = test.lineno + example.lineno + 1
+            else:
+                lineno = '?'
+            out.append('File "%s", line %s, in %s' %
+                       (test.filename, lineno, test.name))
+        else:
+            out.append('Line %s, in %s' % (example.lineno+1, test.name))
+        out.append('Failed example:')
+        source = example.source
+        out.append(_indent(source))
+        return '\n'.join(out)
+
+    #/////////////////////////////////////////////////////////////////
+    # DocTest Running
+    #/////////////////////////////////////////////////////////////////
+
+    def __run(self, test, compileflags, out):
+        """
+        Run the examples in `test`.  Write the outcome of each example
+        with one of the `DocTestRunner.report_*` methods, using the
+        writer function `out`.  `compileflags` is the set of compiler
+        flags that should be used to execute examples.  Return a tuple
+        `(f, t)`, where `t` is the number of examples tried, and `f`
+        is the number of examples that failed.  The examples are run
+        in the namespace `test.globs`.
+        """
+        # Keep track of the number of failures and tries.
+        failures = tries = 0
+
+        # Save the option flags (since option directives can be used
+        # to modify them).
+        original_optionflags = self.optionflags
+
+        SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
+
+        check = self._checker.check_output
+
+        # Process each example.
+        for examplenum, example in enumerate(test.examples):
+
+            # If REPORT_ONLY_FIRST_FAILURE is set, then supress
+            # reporting after the first failure.
+            quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
+                     failures > 0)
+
+            # Merge in the example's options.
+            self.optionflags = original_optionflags
+            if example.options:
+                for (optionflag, val) in example.options.items():
+                    if val:
+                        self.optionflags |= optionflag
+                    else:
+                        self.optionflags &= ~optionflag
+
+            # Record that we started this example.
+            tries += 1
+            if not quiet:
+                self.report_start(out, test, example)
+
+            # Use a special filename for compile(), so we can retrieve
+            # the source code during interactive debugging (see
+            # __patched_linecache_getlines).
+            filename = '<doctest %s[%d]>' % (test.name, examplenum)
+
+            # Run the example in the given context (globs), and record
+            # any exception that gets raised.  (But don't intercept
+            # keyboard interrupts.)
+            try:
+                # Don't blink!  This is where the user's code gets run.
+                exec compile(example.source, filename, "single",
+                             compileflags, 1) in test.globs
+                self.debugger.set_continue() # ==== Example Finished ====
+                exception = None
+            except KeyboardInterrupt:
+                raise
+            except:
+                exception = sys.exc_info()
+                self.debugger.set_continue() # ==== Example Finished ====
+
+            got = self._fakeout.getvalue()  # the actual output
+            self._fakeout.truncate(0)
+            outcome = FAILURE   # guilty until proved innocent or insane
+
+            # If the example executed without raising any exceptions,
+            # verify its output.
+            if exception is None:
+                if check(example.want, got, self.optionflags):
+                    outcome = SUCCESS
+
+            # The example raised an exception:  check if it was expected.
+            else:
+                exc_info = sys.exc_info()
+                exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
+                if not quiet:
+                    got += _exception_traceback(exc_info)
+
+                # If `example.exc_msg` is None, then we weren't expecting
+                # an exception.
+                if example.exc_msg is None:
+                    outcome = BOOM
+
+                # We expected an exception:  see whether it matches.
+                elif check(example.exc_msg, exc_msg, self.optionflags):
+                    outcome = SUCCESS
+
+                # Another chance if they didn't care about the detail.
+                elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
+                    m1 = re.match(r'[^:]*:', example.exc_msg)
+                    m2 = re.match(r'[^:]*:', exc_msg)
+                    if m1 and m2 and check(m1.group(0), m2.group(0),
+                                           self.optionflags):
+                        outcome = SUCCESS
+
+            # Report the outcome.
+            if outcome is SUCCESS:
+                if not quiet:
+                    self.report_success(out, test, example, got)
+            elif outcome is FAILURE:
+                if not quiet:
+                    self.report_failure(out, test, example, got)
+                failures += 1
+            elif outcome is BOOM:
+                if not quiet:
+                    self.report_unexpected_exception(out, test, example,
+                                                     exc_info)
+                failures += 1
+            else:
+                assert False, ("unknown outcome", outcome)
+
+        # Restore the option flags (in case they were modified)
+        self.optionflags = original_optionflags
+
+        # Record and return the number of failures and tries.
+        self.__record_outcome(test, failures, tries)
+        return failures, tries
+
+    def __record_outcome(self, test, f, t):
+        """
+        Record the fact that the given DocTest (`test`) generated `f`
+        failures out of `t` tried examples.
+        """
+        f2, t2 = self._name2ft.get(test.name, (0,0))
+        self._name2ft[test.name] = (f+f2, t+t2)
+        self.failures += f
+        self.tries += t
+
+    __LINECACHE_FILENAME_RE = re.compile(r'<doctest '
+                                         r'(?P<name>[\w\.]+)'
+                                         r'\[(?P<examplenum>\d+)\]>$')
+    def __patched_linecache_getlines(self, filename):
+        m = self.__LINECACHE_FILENAME_RE.match(filename)
+        if m and m.group('name') == self.test.name:
+            example = self.test.examples[int(m.group('examplenum'))]
+            return example.source.splitlines(True)
+        else:
+            return self.save_linecache_getlines(filename)
+
+    def run(self, test, compileflags=None, out=None, clear_globs=True):
+        """
+        Run the examples in `test`, and display the results using the
+        writer function `out`.
+
+        The examples are run in the namespace `test.globs`.  If
+        `clear_globs` is true (the default), then this namespace will
+        be cleared after the test runs, to help with garbage
+        collection.  If you would like to examine the namespace after
+        the test completes, then use `clear_globs=False`.
+
+        `compileflags` gives the set of flags that should be used by
+        the Python compiler when running the examples.  If not
+        specified, then it will default to the set of future-import
+        flags that apply to `globs`.
+
+        The output of each example is checked using
+        `DocTestRunner.check_output`, and the results are formatted by
+        the `DocTestRunner.report_*` methods.
+        """
+        self.test = test
+
+        if compileflags is None:
+            compileflags = _extract_future_flags(test.globs)
+
+        save_stdout = sys.stdout
+        if out is None:
+            out = save_stdout.write
+        sys.stdout = self._fakeout
+
+        # Patch pdb.set_trace to restore sys.stdout during interactive
+        # debugging (so it's not still redirected to self._fakeout).
+        # Note that the interactive output will go to *our*
+        # save_stdout, even if that's not the real sys.stdout; this
+        # allows us to write test cases for the set_trace behavior.
+        save_set_trace = pdb.set_trace
+        self.debugger = _OutputRedirectingPdb(save_stdout)
+        self.debugger.reset()
+        pdb.set_trace = self.debugger.set_trace
+
+        # Patch linecache.getlines, so we can see the example's source
+        # when we're inside the debugger.
+        self.save_linecache_getlines = linecache.getlines
+        linecache.getlines = self.__patched_linecache_getlines
+
+        try:
+            return self.__run(test, compileflags, out)
+        finally:
+            sys.stdout = save_stdout
+            pdb.set_trace = save_set_trace
+            linecache.getlines = self.save_linecache_getlines
+            if clear_globs:
+                test.globs.clear()
+
+    #/////////////////////////////////////////////////////////////////
+    # Summarization
+    #/////////////////////////////////////////////////////////////////
+    def summarize(self, verbose=None):
+        """
+        Print a summary of all the test cases that have been run by
+        this DocTestRunner, and return a tuple `(f, t)`, where `f` is
+        the total number of failed examples, and `t` is the total
+        number of tried examples.
+
+        The optional `verbose` argument controls how detailed the
+        summary is.  If the verbosity is not specified, then the
+        DocTestRunner's verbosity is used.
+        """
+        if verbose is None:
+            verbose = self._verbose
+        notests = []
+        passed = []
+        failed = []
+        totalt = totalf = 0
+        for x in self._name2ft.items():
+            name, (f, t) = x
+            assert f <= t
+            totalt += t
+            totalf += f
+            if t == 0:
+                notests.append(name)
+            elif f == 0:
+                passed.append( (name, t) )
+            else:
+                failed.append(x)
+        if verbose:
+            if notests:
+                print len(notests), "items had no tests:"
+                notests.sort()
+                for thing in notests:
+                    print "   ", thing
+            if passed:
+                print len(passed), "items passed all tests:"
+                passed.sort()
+                for thing, count in passed:
+                    print " %3d tests in %s" % (count, thing)
+        if failed:
+            print self.DIVIDER
+            print len(failed), "items had failures:"
+            failed.sort()
+            for thing, (f, t) in failed:
+                print " %3d of %3d in %s" % (f, t, thing)
+        if verbose:
+            print totalt, "tests in", len(self._name2ft), "items."
+            print totalt - totalf, "passed and", totalf, "failed."
+        if totalf:
+            print "***Test Failed***", totalf, "failures."
+        elif verbose:
+            print "Test passed."
+        return totalf, totalt
+
+    #/////////////////////////////////////////////////////////////////
+    # Backward compatibility cruft to maintain doctest.master.
+    #/////////////////////////////////////////////////////////////////
+    def merge(self, other):
+        d = self._name2ft
+        for name, (f, t) in other._name2ft.items():
+            if name in d:
+                print "*** DocTestRunner.merge: '" + name + "' in both" \
+                    " testers; summing outcomes."
+                f2, t2 = d[name]
+                f = f + f2
+                t = t + t2
+            d[name] = f, t
+
+class OutputChecker:
+    """
+    A class used to check the whether the actual output from a doctest
+    example matches the expected output.  `OutputChecker` defines two
+    methods: `check_output`, which compares a given pair of outputs,
+    and returns true if they match; and `output_difference`, which
+    returns a string describing the differences between two outputs.
+    """
+    def check_output(self, want, got, optionflags):
+        """
+        Return True iff the actual output from an example (`got`)
+        matches the expected output (`want`).  These strings are
+        always considered to match if they are identical; but
+        depending on what option flags the test runner is using,
+        several non-exact match types are also possible.  See the
+        documentation for `TestRunner` for more information about
+        option flags.
+        """
+        # Handle the common case first, for efficiency:
+        # if they're string-identical, always return true.
+        if got == want:
+            return True
+
+        # The values True and False replaced 1 and 0 as the return
+        # value for boolean comparisons in Python 2.3.
+        if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
+            if (got,want) == ("True\n", "1\n"):
+                return True
+            if (got,want) == ("False\n", "0\n"):
+                return True
+
+        # <BLANKLINE> can be used as a special sequence to signify a
+        # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
+        if not (optionflags & DONT_ACCEPT_BLANKLINE):
+            # Replace <BLANKLINE> in want with a blank line.
+            want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
+                          '', want)
+            # If a line in got contains only spaces, then remove the
+            # spaces.
+            got = re.sub('(?m)^\s*?$', '', got)
+            if got == want:
+                return True
+
+        # This flag causes doctest to ignore any differences in the
+        # contents of whitespace strings.  Note that this can be used
+        # in conjunction with the ELLIPSIS flag.
+        if optionflags & NORMALIZE_WHITESPACE:
+            got = ' '.join(got.split())
+            want = ' '.join(want.split())
+            if got == want:
+                return True
+
+        # The ELLIPSIS flag says to let the sequence "..." in `want`
+        # match any substring in `got`.
+        if optionflags & ELLIPSIS:
+            if _ellipsis_match(want, got):
+                return True
+
+        # We didn't find any match; return false.
+        return False
+
+    # Should we do a fancy diff?
+    def _do_a_fancy_diff(self, want, got, optionflags):
+        # Not unless they asked for a fancy diff.
+        if not optionflags & (REPORT_UDIFF |
+                              REPORT_CDIFF |
+                              REPORT_NDIFF):
+            return False
+
+        # If expected output uses ellipsis, a meaningful fancy diff is
+        # too hard ... or maybe not.  In two real-life failures Tim saw,
+        # a diff was a major help anyway, so this is commented out.
+        # [todo] _ellipsis_match() knows which pieces do and don't match,
+        # and could be the basis for a kick-ass diff in this case.
+        ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
+        ##    return False
+
+        # ndiff does intraline difference marking, so can be useful even
+        # for 1-line differences.
+        if optionflags & REPORT_NDIFF:
+            return True
+
+        # The other diff types need at least a few lines to be helpful.
+        return want.count('\n') > 2 and got.count('\n') > 2
+
+    def output_difference(self, example, got, optionflags):
+        """
+        Return a string describing the differences between the
+        expected output for a given example (`example`) and the actual
+        output (`got`).  `optionflags` is the set of option flags used
+        to compare `want` and `got`.
+        """
+        want = example.want
+        # If <BLANKLINE>s are being used, then replace blank lines
+        # with <BLANKLINE> in the actual output string.
+        if not (optionflags & DONT_ACCEPT_BLANKLINE):
+            got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
+
+        # Check if we should use diff.
+        if self._do_a_fancy_diff(want, got, optionflags):
+            # Split want & got into lines.
+            want_lines = want.splitlines(True)  # True == keep line ends
+            got_lines = got.splitlines(True)
+            # Use difflib to find their differences.
+            if optionflags & REPORT_UDIFF:
+                diff = difflib.unified_diff(want_lines, got_lines, n=2)
+                diff = list(diff)[2:] # strip the diff header
+                kind = 'unified diff with -expected +actual'
+            elif optionflags & REPORT_CDIFF:
+                diff = difflib.context_diff(want_lines, got_lines, n=2)
+                diff = list(diff)[2:] # strip the diff header
+                kind = 'context diff with expected followed by actual'
+            elif optionflags & REPORT_NDIFF:
+                engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
+                diff = list(engine.compare(want_lines, got_lines))
+                kind = 'ndiff with -expected +actual'
+            else:
+                assert 0, 'Bad diff option'
+            # Remove trailing whitespace on diff output.
+            diff = [line.rstrip() + '\n' for line in diff]
+            return 'Differences (%s):\n' % kind + _indent(''.join(diff))
+
+        # If we're not using diff, then simply list the expected
+        # output followed by the actual output.
+        if want and got:
+            return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
+        elif want:
+            return 'Expected:\n%sGot nothing\n' % _indent(want)
+        elif got:
+            return 'Expected nothing\nGot:\n%s' % _indent(got)
+        else:
+            return 'Expected nothing\nGot nothing\n'
+
+class DocTestFailure(Exception):
+    """A DocTest example has failed in debugging mode.
+
+    The exception instance has variables:
+
+    - test: the DocTest object being run
+
+    - excample: the Example object that failed
+
+    - got: the actual output
+    """
+    def __init__(self, test, example, got):
+        self.test = test
+        self.example = example
+        self.got = got
+
+    def __str__(self):
+        return str(self.test)
+
+class UnexpectedException(Exception):
+    """A DocTest example has encountered an unexpected exception
+
+    The exception instance has variables:
+
+    - test: the DocTest object being run
+
+    - excample: the Example object that failed
+
+    - exc_info: the exception info
+    """
+    def __init__(self, test, example, exc_info):
+        self.test = test
+        self.example = example
+        self.exc_info = exc_info
+
+    def __str__(self):
+        return str(self.test)
+
+class DebugRunner(DocTestRunner):
+    r"""Run doc tests but raise an exception as soon as there is a failure.
+
+       If an unexpected exception occurs, an UnexpectedException is raised.
+       It contains the test, the example, and the original exception:
+
+         >>> runner = DebugRunner(verbose=False)
+         >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
+         ...                                    {}, 'foo', 'foo.py', 0)
+         >>> try:
+         ...     runner.run(test)
+         ... except UnexpectedException, failure:
+         ...     pass
+
+         >>> failure.test is test
+         True
+
+         >>> failure.example.want
+         '42\n'
+
+         >>> exc_info = failure.exc_info
+         >>> raise exc_info[0], exc_info[1], exc_info[2]
+         Traceback (most recent call last):
+         ...
+         KeyError
+
+       We wrap the original exception to give the calling application
+       access to the test and example information.
+
+       If the output doesn't match, then a DocTestFailure is raised:
+
+         >>> test = DocTestParser().get_doctest('''
+         ...      >>> x = 1
+         ...      >>> x
+         ...      2
+         ...      ''', {}, 'foo', 'foo.py', 0)
+
+         >>> try:
+         ...    runner.run(test)
+         ... except DocTestFailure, failure:
+         ...    pass
+
+       DocTestFailure objects provide access to the test:
+
+         >>> failure.test is test
+         True
+
+       As well as to the example:
+
+         >>> failure.example.want
+         '2\n'
+
+       and the actual output:
+
+         >>> failure.got
+         '1\n'
+
+       If a failure or error occurs, the globals are left intact:
+
+         >>> del test.globs['__builtins__']
+         >>> test.globs
+         {'x': 1}
+
+         >>> test = DocTestParser().get_doctest('''
+         ...      >>> x = 2
+         ...      >>> raise KeyError
+         ...      ''', {}, 'foo', 'foo.py', 0)
+
+         >>> runner.run(test)
+         Traceback (most recent call last):
+         ...
+         UnexpectedException: <DocTest foo from foo.py:0 (2 examples)>
+
+         >>> del test.globs['__builtins__']
+         >>> test.globs
+         {'x': 2}
+
+       But the globals are cleared if there is no error:
+
+         >>> test = DocTestParser().get_doctest('''
+         ...      >>> x = 2
+         ...      ''', {}, 'foo', 'foo.py', 0)
+
+         >>> runner.run(test)
+         (0, 1)
+
+         >>> test.globs
+         {}
+
+       """
+
+    def run(self, test, compileflags=None, out=None, clear_globs=True):
+        r = DocTestRunner.run(self, test, compileflags, out, False)
+        if clear_globs:
+            test.globs.clear()
+        return r
+
+    def report_unexpected_exception(self, out, test, example, exc_info):
+        raise UnexpectedException(test, example, exc_info)
+
+    def report_failure(self, out, test, example, got):
+        raise DocTestFailure(test, example, got)
+
+######################################################################
+## 6. Test Functions
+######################################################################
+# These should be backwards compatible.
+
+# For backward compatibility, a global instance of a DocTestRunner
+# class, updated by testmod.
+master = None
+
+def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
+            report=True, optionflags=0, extraglobs=None,
+            raise_on_error=False, exclude_empty=False):
+    """m=None, name=None, globs=None, verbose=None, isprivate=None,
+       report=True, optionflags=0, extraglobs=None, raise_on_error=False,
+       exclude_empty=False
+
+    Test examples in docstrings in functions and classes reachable
+    from module m (or the current module if m is not supplied), starting
+    with m.__doc__.  Unless isprivate is specified, private names
+    are not skipped.
+
+    Also test examples reachable from dict m.__test__ if it exists and is
+    not None.  m.__test__ maps names to functions, classes and strings;
+    function and class docstrings are tested even if the name is private;
+    strings are tested directly, as if they were docstrings.
+
+    Return (#failures, #tests).
+
+    See doctest.__doc__ for an overview.
+
+    Optional keyword arg "name" gives the name of the module; by default
+    use m.__name__.
+
+    Optional keyword arg "globs" gives a dict to be used as the globals
+    when executing examples; by default, use m.__dict__.  A copy of this
+    dict is actually used for each docstring, so that each docstring's
+    examples start with a clean slate.
+
+    Optional keyword arg "extraglobs" gives a dictionary that should be
+    merged into the globals that are used to execute examples.  By
+    default, no extra globals are used.  This is new in 2.4.
+
+    Optional keyword arg "verbose" prints lots of stuff if true, prints
+    only failures if false; by default, it's true iff "-v" is in sys.argv.
+
+    Optional keyword arg "report" prints a summary at the end when true,
+    else prints nothing at the end.  In verbose mode, the summary is
+    detailed, else very brief (in fact, empty if all tests passed).
+
+    Optional keyword arg "optionflags" or's together module constants,
+    and defaults to 0.  This is new in 2.3.  Possible values (see the
+    docs for details):
+
+        DONT_ACCEPT_TRUE_FOR_1
+        DONT_ACCEPT_BLANKLINE
+        NORMALIZE_WHITESPACE
+        ELLIPSIS
+        IGNORE_EXCEPTION_DETAIL
+        REPORT_UDIFF
+        REPORT_CDIFF
+        REPORT_NDIFF
+        REPORT_ONLY_FIRST_FAILURE
+
+    Optional keyword arg "raise_on_error" raises an exception on the
+    first unexpected exception or failure. This allows failures to be
+    post-mortem debugged.
+
+    Deprecated in Python 2.4:
+    Optional keyword arg "isprivate" specifies a function used to
+    determine whether a name is private.  The default function is
+    treat all functions as public.  Optionally, "isprivate" can be
+    set to doctest.is_private to skip over functions marked as private
+    using the underscore naming convention; see its docs for details.
+
+    Advanced tomfoolery:  testmod runs methods of a local instance of
+    class doctest.Tester, then merges the results into (or creates)
+    global Tester instance doctest.master.  Methods of doctest.master
+    can be called directly too, if you want to do something unusual.
+    Passing report=0 to testmod is especially useful then, to delay
+    displaying a summary.  Invoke doctest.master.summarize(verbose)
+    when you're done fiddling.
+    """
+    global master
+
+    if isprivate is not None:
+        warnings.warn("the isprivate argument is deprecated; "
+                      "examine DocTestFinder.find() lists instead",
+                      DeprecationWarning)
+
+    # If no module was given, then use __main__.
+    if m is None:
+        # DWA - m will still be None if this wasn't invoked from the command
+        # line, in which case the following TypeError is about as good an error
+        # as we should expect
+        m = sys.modules.get('__main__')
+
+    # Check that we were actually given a module.
+    if not inspect.ismodule(m):
+        raise TypeError("testmod: module required; %r" % (m,))
+
+    # If no name was given, then use the module's name.
+    if name is None:
+        name = m.__name__
+
+    # Find, parse, and run all tests in the given module.
+    finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
+
+    if raise_on_error:
+        runner = DebugRunner(verbose=verbose, optionflags=optionflags)
+    else:
+        runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+
+    for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
+        runner.run(test)
+
+    if report:
+        runner.summarize()
+
+    if master is None:
+        master = runner
+    else:
+        master.merge(runner)
+
+    return runner.failures, runner.tries
+
+def testfile(filename, module_relative=True, name=None, package=None,
+             globs=None, verbose=None, report=True, optionflags=0,
+             extraglobs=None, raise_on_error=False, parser=DocTestParser()):
+    """
+    Test examples in the given file.  Return (#failures, #tests).
+
+    Optional keyword arg "module_relative" specifies how filenames
+    should be interpreted:
+
+      - If "module_relative" is True (the default), then "filename"
+         specifies a module-relative path.  By default, this path is
+         relative to the calling module's directory; but if the
+         "package" argument is specified, then it is relative to that
+         package.  To ensure os-independence, "filename" should use
+         "/" characters to separate path segments, and should not
+         be an absolute path (i.e., it may not begin with "/").
+
+      - If "module_relative" is False, then "filename" specifies an
+        os-specific path.  The path may be absolute or relative (to
+        the current working directory).
+
+    Optional keyword arg "name" gives the name of the test; by default
+    use the file's basename.
+
+    Optional keyword argument "package" is a Python package or the
+    name of a Python package whose directory should be used as the
+    base directory for a module relative filename.  If no package is
+    specified, then the calling module's directory is used as the base
+    directory for module relative filenames.  It is an error to
+    specify "package" if "module_relative" is False.
+
+    Optional keyword arg "globs" gives a dict to be used as the globals
+    when executing examples; by default, use {}.  A copy of this dict
+    is actually used for each docstring, so that each docstring's
+    examples start with a clean slate.
+
+    Optional keyword arg "extraglobs" gives a dictionary that should be
+    merged into the globals that are used to execute examples.  By
+    default, no extra globals are used.
+
+    Optional keyword arg "verbose" prints lots of stuff if true, prints
+    only failures if false; by default, it's true iff "-v" is in sys.argv.
+
+    Optional keyword arg "report" prints a summary at the end when true,
+    else prints nothing at the end.  In verbose mode, the summary is
+    detailed, else very brief (in fact, empty if all tests passed).
+
+    Optional keyword arg "optionflags" or's together module constants,
+    and defaults to 0.  Possible values (see the docs for details):
+
+        DONT_ACCEPT_TRUE_FOR_1
+        DONT_ACCEPT_BLANKLINE
+        NORMALIZE_WHITESPACE
+        ELLIPSIS
+        IGNORE_EXCEPTION_DETAIL
+        REPORT_UDIFF
+        REPORT_CDIFF
+        REPORT_NDIFF
+        REPORT_ONLY_FIRST_FAILURE
+
+    Optional keyword arg "raise_on_error" raises an exception on the
+    first unexpected exception or failure. This allows failures to be
+    post-mortem debugged.
+
+    Optional keyword arg "parser" specifies a DocTestParser (or
+    subclass) that should be used to extract tests from the files.
+
+    Advanced tomfoolery:  testmod runs methods of a local instance of
+    class doctest.Tester, then merges the results into (or creates)
+    global Tester instance doctest.master.  Methods of doctest.master
+    can be called directly too, if you want to do something unusual.
+    Passing report=0 to testmod is especially useful then, to delay
+    displaying a summary.  Invoke doctest.master.summarize(verbose)
+    when you're done fiddling.
+    """
+    global master
+
+    if package and not module_relative:
+        raise ValueError("Package may only be specified for module-"
+                         "relative paths.")
+
+    # Relativize the path
+    if module_relative:
+        package = _normalize_module(package)
+        filename = _module_relative_path(package, filename)
+
+    # If no name was given, then use the file's name.
+    if name is None:
+        name = os.path.basename(filename)
+
+    # Assemble the globals.
+    if globs is None:
+        globs = {}
+    else:
+        globs = globs.copy()
+    if extraglobs is not None:
+        globs.update(extraglobs)
+
+    if raise_on_error:
+        runner = DebugRunner(verbose=verbose, optionflags=optionflags)
+    else:
+        runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+
+    # Read the file, convert it to a test, and run it.
+    s = open(filename).read()
+    test = parser.get_doctest(s, globs, name, filename, 0)
+    runner.run(test)
+
+    if report:
+        runner.summarize()
+
+    if master is None:
+        master = runner
+    else:
+        master.merge(runner)
+
+    return runner.failures, runner.tries
+
+def run_docstring_examples(f, globs, verbose=False, name="NoName",
+                           compileflags=None, optionflags=0):
+    """
+    Test examples in the given object's docstring (`f`), using `globs`
+    as globals.  Optional argument `name` is used in failure messages.
+    If the optional argument `verbose` is true, then generate output
+    even if there are no failures.
+
+    `compileflags` gives the set of flags that should be used by the
+    Python compiler when running the examples.  If not specified, then
+    it will default to the set of future-import flags that apply to
+    `globs`.
+
+    Optional keyword arg `optionflags` specifies options for the
+    testing and output.  See the documentation for `testmod` for more
+    information.
+    """
+    # Find, parse, and run all tests in the given module.
+    finder = DocTestFinder(verbose=verbose, recurse=False)
+    runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
+    for test in finder.find(f, name, globs=globs):
+        runner.run(test, compileflags=compileflags)
+
+######################################################################
+## 7. Tester
+######################################################################
+# This is provided only for backwards compatibility.  It's not
+# actually used in any way.
+
+class Tester:
+    def __init__(self, mod=None, globs=None, verbose=None,
+                 isprivate=None, optionflags=0):
+
+        warnings.warn("class Tester is deprecated; "
+                      "use class doctest.DocTestRunner instead",
+                      DeprecationWarning, stacklevel=2)
+        if mod is None and globs is None:
+            raise TypeError("Tester.__init__: must specify mod or globs")
+        if mod is not None and not inspect.ismodule(mod):
+            raise TypeError("Tester.__init__: mod must be a module; %r" %
+                            (mod,))
+        if globs is None:
+            globs = mod.__dict__
+        self.globs = globs
+
+        self.verbose = verbose
+        self.isprivate = isprivate
+        self.optionflags = optionflags
+        self.testfinder = DocTestFinder(_namefilter=isprivate)
+        self.testrunner = DocTestRunner(verbose=verbose,
+                                        optionflags=optionflags)
+
+    def runstring(self, s, name):
+        test = DocTestParser().get_doctest(s, self.globs, name, None, None)
+        if self.verbose:
+            print "Running string", name
+        (f,t) = self.testrunner.run(test)
+        if self.verbose:
+            print f, "of", t, "examples failed in string", name
+        return (f,t)
+
+    def rundoc(self, object, name=None, module=None):
+        f = t = 0
+        tests = self.testfinder.find(object, name, module=module,
+                                     globs=self.globs)
+        for test in tests:
+            (f2, t2) = self.testrunner.run(test)
+            (f,t) = (f+f2, t+t2)
+        return (f,t)
+
+    def rundict(self, d, name, module=None):
+        import new
+        m = new.module(name)
+        m.__dict__.update(d)
+        if module is None:
+            module = False
+        return self.rundoc(m, name, module)
+
+    def run__test__(self, d, name):
+        import new
+        m = new.module(name)
+        m.__test__ = d
+        return self.rundoc(m, name)
+
+    def summarize(self, verbose=None):
+        return self.testrunner.summarize(verbose)
+
+    def merge(self, other):
+        self.testrunner.merge(other.testrunner)
+
+######################################################################
+## 8. Unittest Support
+######################################################################
+
+_unittest_reportflags = 0
+
+def set_unittest_reportflags(flags):
+    """Sets the unittest option flags.
+
+    The old flag is returned so that a runner could restore the old
+    value if it wished to:
+
+      >>> import doctest
+      >>> old = doctest._unittest_reportflags
+      >>> doctest.set_unittest_reportflags(REPORT_NDIFF |
+      ...                          REPORT_ONLY_FIRST_FAILURE) == old
+      True
+
+      >>> doctest._unittest_reportflags == (REPORT_NDIFF |
+      ...                                   REPORT_ONLY_FIRST_FAILURE)
+      True
+
+    Only reporting flags can be set:
+
+      >>> doctest.set_unittest_reportflags(ELLIPSIS)
+      Traceback (most recent call last):
+      ...
+      ValueError: ('Only reporting flags allowed', 8)
+
+      >>> doctest.set_unittest_reportflags(old) == (REPORT_NDIFF |
+      ...                                   REPORT_ONLY_FIRST_FAILURE)
+      True
+    """
+    global _unittest_reportflags
+
+    if (flags & REPORTING_FLAGS) != flags:
+        raise ValueError("Only reporting flags allowed", flags)
+    old = _unittest_reportflags
+    _unittest_reportflags = flags
+    return old
+
+
+class DocTestCase(unittest.TestCase):
+
+    def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
+                 checker=None):
+
+        unittest.TestCase.__init__(self)
+        self._dt_optionflags = optionflags
+        self._dt_checker = checker
+        self._dt_test = test
+        self._dt_setUp = setUp
+        self._dt_tearDown = tearDown
+
+    def setUp(self):
+        test = self._dt_test
+
+        if self._dt_setUp is not None:
+            self._dt_setUp(test)
+
+    def tearDown(self):
+        test = self._dt_test
+
+        if self._dt_tearDown is not None:
+            self._dt_tearDown(test)
+
+        test.globs.clear()
+
+    def runTest(self):
+        test = self._dt_test
+        old = sys.stdout
+        new = StringIO()
+        optionflags = self._dt_optionflags
+
+        if not (optionflags & REPORTING_FLAGS):
+            # The option flags don't include any reporting flags,
+            # so add the default reporting flags
+            optionflags |= _unittest_reportflags
+
+        runner = DocTestRunner(optionflags=optionflags,
+                               checker=self._dt_checker, verbose=False)
+
+        try:
+            runner.DIVIDER = "-"*70
+            failures, tries = runner.run(
+                test, out=new.write, clear_globs=False)
+        finally:
+            sys.stdout = old
+
+        if failures:
+            raise self.failureException(self.format_failure(new.getvalue()))
+
+    def format_failure(self, err):
+        test = self._dt_test
+        if test.lineno is None:
+            lineno = 'unknown line number'
+        else:
+            lineno = '%s' % test.lineno
+        lname = '.'.join(test.name.split('.')[-1:])
+        return ('Failed doctest test for %s\n'
+                '  File "%s", line %s, in %s\n\n%s'
+                % (test.name, test.filename, lineno, lname, err)
+                )
+
+    def debug(self):
+        r"""Run the test case without results and without catching exceptions
+
+           The unit test framework includes a debug method on test cases
+           and test suites to support post-mortem debugging.  The test code
+           is run in such a way that errors are not caught.  This way a
+           caller can catch the errors and initiate post-mortem debugging.
+
+           The DocTestCase provides a debug method that raises
+           UnexpectedException errors if there is an unexepcted
+           exception:
+
+             >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
+             ...                {}, 'foo', 'foo.py', 0)
+             >>> case = DocTestCase(test)
+             >>> try:
+             ...     case.debug()
+             ... except UnexpectedException, failure:
+             ...     pass
+
+           The UnexpectedException contains the test, the example, and
+           the original exception:
+
+             >>> failure.test is test
+             True
+
+             >>> failure.example.want
+             '42\n'
+
+             >>> exc_info = failure.exc_info
+             >>> raise exc_info[0], exc_info[1], exc_info[2]
+             Traceback (most recent call last):
+             ...
+             KeyError
+
+           If the output doesn't match, then a DocTestFailure is raised:
+
+             >>> test = DocTestParser().get_doctest('''
+             ...      >>> x = 1
+             ...      >>> x
+             ...      2
+             ...      ''', {}, 'foo', 'foo.py', 0)
+             >>> case = DocTestCase(test)
+
+             >>> try:
+             ...    case.debug()
+             ... except DocTestFailure, failure:
+             ...    pass
+
+           DocTestFailure objects provide access to the test:
+
+             >>> failure.test is test
+             True
+
+           As well as to the example:
+
+             >>> failure.example.want
+             '2\n'
+
+           and the actual output:
+
+             >>> failure.got
+             '1\n'
+
+           """
+
+        self.setUp()
+        runner = DebugRunner(optionflags=self._dt_optionflags,
+                             checker=self._dt_checker, verbose=False)
+        runner.run(self._dt_test)
+        self.tearDown()
+
+    def id(self):
+        return self._dt_test.name
+
+    def __repr__(self):
+        name = self._dt_test.name.split('.')
+        return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
+
+    __str__ = __repr__
+
+    def shortDescription(self):
+        return "Doctest: " + self._dt_test.name
+
+def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
+                 **options):
+    """
+    Convert doctest tests for a module to a unittest test suite.
+
+    This converts each documentation string in a module that
+    contains doctest tests to a unittest test case.  If any of the
+    tests in a doc string fail, then the test case fails.  An exception
+    is raised showing the name of the file containing the test and a
+    (sometimes approximate) line number.
+
+    The `module` argument provides the module to be tested.  The argument
+    can be either a module or a module name.
+
+    If no argument is given, the calling module is used.
+
+    A number of options may be provided as keyword arguments:
+
+    setUp
+      A set-up function.  This is called before running the
+      tests in each file. The setUp function will be passed a DocTest
+      object.  The setUp function can access the test globals as the
+      globs attribute of the test passed.
+
+    tearDown
+      A tear-down function.  This is called after running the
+      tests in each file.  The tearDown function will be passed a DocTest
+      object.  The tearDown function can access the test globals as the
+      globs attribute of the test passed.
+
+    globs
+      A dictionary containing initial global variables for the tests.
+
+    optionflags
+       A set of doctest option flags expressed as an integer.
+    """
+
+    if test_finder is None:
+        test_finder = DocTestFinder()
+
+    module = _normalize_module(module)
+    tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
+    if globs is None:
+        globs = module.__dict__
+    if not tests:
+        # Why do we want to do this? Because it reveals a bug that might
+        # otherwise be hidden.
+        raise ValueError(module, "has no tests")
+
+    tests.sort()
+    suite = unittest.TestSuite()
+    for test in tests:
+        if len(test.examples) == 0:
+            continue
+        if not test.filename:
+            filename = module.__file__
+            if filename[-4:] in (".pyc", ".pyo"):
+                filename = filename[:-1]
+            test.filename = filename
+        suite.addTest(DocTestCase(test, **options))
+
+    return suite
+
+class DocFileCase(DocTestCase):
+
+    def id(self):
+        return '_'.join(self._dt_test.name.split('.'))
+
+    def __repr__(self):
+        return self._dt_test.filename
+    __str__ = __repr__
+
+    def format_failure(self, err):
+        return ('Failed doctest test for %s\n  File "%s", line 0\n\n%s'
+                % (self._dt_test.name, self._dt_test.filename, err)
+                )
+
+def DocFileTest(path, module_relative=True, package=None,
+                globs=None, parser=DocTestParser(), **options):
+    if globs is None:
+        globs = {}
+
+    if package and not module_relative:
+        raise ValueError("Package may only be specified for module-"
+                         "relative paths.")
+
+    # Relativize the path.
+    if module_relative:
+        package = _normalize_module(package)
+        path = _module_relative_path(package, path)
+
+    # Find the file and read it.
+    name = os.path.basename(path)
+    doc = open(path).read()
+
+    # Convert it to a test, and wrap it in a DocFileCase.
+    test = parser.get_doctest(doc, globs, name, path, 0)
+    return DocFileCase(test, **options)
+
+def DocFileSuite(*paths, **kw):
+    """A unittest suite for one or more doctest files.
+
+    The path to each doctest file is given as a string; the
+    interpretation of that string depends on the keyword argument
+    "module_relative".
+
+    A number of options may be provided as keyword arguments:
+
+    module_relative
+      If "module_relative" is True, then the given file paths are
+      interpreted as os-independent module-relative paths.  By
+      default, these paths are relative to the calling module's
+      directory; but if the "package" argument is specified, then
+      they are relative to that package.  To ensure os-independence,
+      "filename" should use "/" characters to separate path
+      segments, and may not be an absolute path (i.e., it may not
+      begin with "/").
+
+      If "module_relative" is False, then the given file paths are
+      interpreted as os-specific paths.  These paths may be absolute
+      or relative (to the current working directory).
+
+    package
+      A Python package or the name of a Python package whose directory
+      should be used as the base directory for module relative paths.
+      If "package" is not specified, then the calling module's
+      directory is used as the base directory for module relative
+      filenames.  It is an error to specify "package" if
+      "module_relative" is False.
+
+    setUp
+      A set-up function.  This is called before running the
+      tests in each file. The setUp function will be passed a DocTest
+      object.  The setUp function can access the test globals as the
+      globs attribute of the test passed.
+
+    tearDown
+      A tear-down function.  This is called after running the
+      tests in each file.  The tearDown function will be passed a DocTest
+      object.  The tearDown function can access the test globals as the
+      globs attribute of the test passed.
+
+    globs
+      A dictionary containing initial global variables for the tests.
+
+    optionflags
+      A set of doctest option flags expressed as an integer.
+
+    parser
+      A DocTestParser (or subclass) that should be used to extract
+      tests from the files.
+    """
+    suite = unittest.TestSuite()
+
+    # We do this here so that _normalize_module is called at the right
+    # level.  If it were called in DocFileTest, then this function
+    # would be the caller and we might guess the package incorrectly.
+    if kw.get('module_relative', True):
+        kw['package'] = _normalize_module(kw.get('package'))
+
+    for path in paths:
+        suite.addTest(DocFileTest(path, **kw))
+
+    return suite
+
+######################################################################
+## 9. Debugging Support
+######################################################################
+
+def script_from_examples(s):
+    r"""Extract script from text with examples.
+
+       Converts text with examples to a Python script.  Example input is
+       converted to regular code.  Example output and all other words
+       are converted to comments:
+
+       >>> text = '''
+       ...       Here are examples of simple math.
+       ...
+       ...           Python has super accurate integer addition
+       ...
+       ...           >>> 2 + 2
+       ...           5
+       ...
+       ...           And very friendly error messages:
+       ...
+       ...           >>> 1/0
+       ...           To Infinity
+       ...           And
+       ...           Beyond
+       ...
+       ...           You can use logic if you want:
+       ...
+       ...           >>> if 0:
+       ...           ...    blah
+       ...           ...    blah
+       ...           ...
+       ...
+       ...           Ho hum
+       ...           '''
+
+       >>> print script_from_examples(text)
+       # Here are examples of simple math.
+       #
+       #     Python has super accurate integer addition
+       #
+       2 + 2
+       # Expected:
+       ## 5
+       #
+       #     And very friendly error messages:
+       #
+       1/0
+       # Expected:
+       ## To Infinity
+       ## And
+       ## Beyond
+       #
+       #     You can use logic if you want:
+       #
+       if 0:
+          blah
+          blah
+       #
+       #     Ho hum
+       """
+    output = []
+    for piece in DocTestParser().parse(s):
+        if isinstance(piece, Example):
+            # Add the example's source code (strip trailing NL)
+            output.append(piece.source[:-1])
+            # Add the expected output:
+            want = piece.want
+            if want:
+                output.append('# Expected:')
+                output += ['## '+l for l in want.split('\n')[:-1]]
+        else:
+            # Add non-example text.
+            output += [_comment_line(l)
+                       for l in piece.split('\n')[:-1]]
+
+    # Trim junk on both ends.
+    while output and output[-1] == '#':
+        output.pop()
+    while output and output[0] == '#':
+        output.pop(0)
+    # Combine the output, and return it.
+    return '\n'.join(output)
+
+def testsource(module, name):
+    """Extract the test sources from a doctest docstring as a script.
+
+    Provide the module (or dotted name of the module) containing the
+    test to be debugged and the name (within the module) of the object
+    with the doc string with tests to be debugged.
+    """
+    module = _normalize_module(module)
+    tests = DocTestFinder().find(module)
+    test = [t for t in tests if t.name == name]
+    if not test:
+        raise ValueError(name, "not found in tests")
+    test = test[0]
+    testsrc = script_from_examples(test.docstring)
+    return testsrc
+
+def debug_src(src, pm=False, globs=None):
+    """Debug a single doctest docstring, in argument `src`'"""
+    testsrc = script_from_examples(src)
+    debug_script(testsrc, pm, globs)
+
+def debug_script(src, pm=False, globs=None):
+    "Debug a test script.  `src` is the script, as a string."
+    import pdb
+
+    # Note that tempfile.NameTemporaryFile() cannot be used.  As the
+    # docs say, a file so created cannot be opened by name a second time
+    # on modern Windows boxes, and execfile() needs to open it.
+    srcfilename = tempfile.mktemp(".py", "doctestdebug")
+    f = open(srcfilename, 'w')
+    f.write(src)
+    f.close()
+
+    try:
+        if globs:
+            globs = globs.copy()
+        else:
+            globs = {}
+
+        if pm:
+            try:
+                execfile(srcfilename, globs, globs)
+            except:
+                print sys.exc_info()[1]
+                pdb.post_mortem(sys.exc_info()[2])
+        else:
+            # Note that %r is vital here.  '%s' instead can, e.g., cause
+            # backslashes to get treated as metacharacters on Windows.
+            pdb.run("execfile(%r)" % srcfilename, globs, globs)
+
+    finally:
+        os.remove(srcfilename)
+
+def debug(module, name, pm=False):
+    """Debug a single doctest docstring.
+
+    Provide the module (or dotted name of the module) containing the
+    test to be debugged and the name (within the module) of the object
+    with the docstring with tests to be debugged.
+    """
+    module = _normalize_module(module)
+    testsrc = testsource(module, name)
+    debug_script(testsrc, pm, module.__dict__)
+
+######################################################################
+## 10. Example Usage
+######################################################################
+class _TestClass:
+    """
+    A pointless class, for sanity-checking of docstring testing.
+
+    Methods:
+        square()
+        get()
+
+    >>> _TestClass(13).get() + _TestClass(-12).get()
+    1
+    >>> hex(_TestClass(13).square().get())
+    '0xa9'
+    """
+
+    def __init__(self, val):
+        """val -> _TestClass object with associated value val.
+
+        >>> t = _TestClass(123)
+        >>> print t.get()
+        123
+        """
+
+        self.val = val
+
+    def square(self):
+        """square() -> square TestClass's associated value
+
+        >>> _TestClass(13).square().get()
+        169
+        """
+
+        self.val = self.val ** 2
+        return self
+
+    def get(self):
+        """get() -> return TestClass's associated value.
+
+        >>> x = _TestClass(-42)
+        >>> print x.get()
+        -42
+        """
+
+        return self.val
+
+__test__ = {"_TestClass": _TestClass,
+            "string": r"""
+                      Example of a string object, searched as-is.
+                      >>> x = 1; y = 2
+                      >>> x + y, x * y
+                      (3, 2)
+                      """,
+
+            "bool-int equivalence": r"""
+                                    In 2.2, boolean expressions displayed
+                                    0 or 1.  By default, we still accept
+                                    them.  This can be disabled by passing
+                                    DONT_ACCEPT_TRUE_FOR_1 to the new
+                                    optionflags argument.
+                                    >>> 4 == 4
+                                    1
+                                    >>> 4 == 4
+                                    True
+                                    >>> 4 > 4
+                                    0
+                                    >>> 4 > 4
+                                    False
+                                    """,
+
+            "blank lines": r"""
+                Blank lines can be marked with <BLANKLINE>:
+                    >>> print 'foo\n\nbar\n'
+                    foo
+                    <BLANKLINE>
+                    bar
+                    <BLANKLINE>
+            """,
+
+            "ellipsis": r"""
+                If the ellipsis flag is used, then '...' can be used to
+                elide substrings in the desired output:
+                    >>> print range(1000) #doctest: +ELLIPSIS
+                    [0, 1, 2, ..., 999]
+            """,
+
+            "whitespace normalization": r"""
+                If the whitespace normalization flag is used, then
+                differences in whitespace are ignored.
+                    >>> print range(30) #doctest: +NORMALIZE_WHITESPACE
+                    [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
+                     15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
+                     27, 28, 29]
+            """,
+           }
+
+def _test():
+    r = unittest.TextTestRunner()
+    r.run(DocTestSuite())
+
+if __name__ == "__main__":
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/dumbdbm.py b/depot_tools/release/win/python_24/Lib/dumbdbm.py
new file mode 100644
index 0000000..b85844d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/dumbdbm.py
@@ -0,0 +1,232 @@
+"""A dumb and slow but simple dbm clone.
+
+For database spam, spam.dir contains the index (a text file),
+spam.bak *may* contain a backup of the index (also a text file),
+while spam.dat contains the data (a binary file).
+
+XXX TO DO:
+
+- seems to contain a bug when updating...
+
+- reclaim free space (currently, space once occupied by deleted or expanded
+items is never reused)
+
+- support concurrent access (currently, if two processes take turns making
+updates, they can mess up the index)
+
+- support efficient access to large databases (currently, the whole index
+is read when the database is opened, and some updates rewrite the whole index)
+
+- support opening for read-only (flag = 'm')
+
+"""
+
+import os as _os
+import __builtin__
+import UserDict
+
+_open = __builtin__.open
+
+_BLOCKSIZE = 512
+
+error = IOError                         # For anydbm
+
+class _Database(UserDict.DictMixin):
+
+    # The on-disk directory and data files can remain in mutually
+    # inconsistent states for an arbitrarily long time (see comments
+    # at the end of __setitem__).  This is only repaired when _commit()
+    # gets called.  One place _commit() gets called is from __del__(),
+    # and if that occurs at program shutdown time, module globals may
+    # already have gotten rebound to None.  Since it's crucial that
+    # _commit() finish successfully, we can't ignore shutdown races
+    # here, and _commit() must not reference any globals.
+    _os = _os       # for _commit()
+    _open = _open   # for _commit()
+
+    def __init__(self, filebasename, mode):
+        self._mode = mode
+
+        # The directory file is a text file.  Each line looks like
+        #    "%r, (%d, %d)\n" % (key, pos, siz)
+        # where key is the string key, pos is the offset into the dat
+        # file of the associated value's first byte, and siz is the number
+        # of bytes in the associated value.
+        self._dirfile = filebasename + _os.extsep + 'dir'
+
+        # The data file is a binary file pointed into by the directory
+        # file, and holds the values associated with keys.  Each value
+        # begins at a _BLOCKSIZE-aligned byte offset, and is a raw
+        # binary 8-bit string value.
+        self._datfile = filebasename + _os.extsep + 'dat'
+        self._bakfile = filebasename + _os.extsep + 'bak'
+
+        # The index is an in-memory dict, mirroring the directory file.
+        self._index = None  # maps keys to (pos, siz) pairs
+
+        # Mod by Jack: create data file if needed
+        try:
+            f = _open(self._datfile, 'r')
+        except IOError:
+            f = _open(self._datfile, 'w', self._mode)
+        f.close()
+        self._update()
+
+    # Read directory file into the in-memory index dict.
+    def _update(self):
+        self._index = {}
+        try:
+            f = _open(self._dirfile)
+        except IOError:
+            pass
+        else:
+            for line in f:
+                key, pos_and_siz_pair = eval(line)
+                self._index[key] = pos_and_siz_pair
+            f.close()
+
+    # Write the index dict to the directory file.  The original directory
+    # file (if any) is renamed with a .bak extension first.  If a .bak
+    # file currently exists, it's deleted.
+    def _commit(self):
+        # CAUTION:  It's vital that _commit() succeed, and _commit() can
+        # be called from __del__().  Therefore we must never reference a
+        # global in this routine.
+        if self._index is None:
+            return  # nothing to do
+
+        try:
+            self._os.unlink(self._bakfile)
+        except self._os.error:
+            pass
+
+        try:
+            self._os.rename(self._dirfile, self._bakfile)
+        except self._os.error:
+            pass
+
+        f = self._open(self._dirfile, 'w', self._mode)
+        for key, pos_and_siz_pair in self._index.iteritems():
+            f.write("%r, %r\n" % (key, pos_and_siz_pair))
+        f.close()
+
+    sync = _commit
+
+    def __getitem__(self, key):
+        pos, siz = self._index[key]     # may raise KeyError
+        f = _open(self._datfile, 'rb')
+        f.seek(pos)
+        dat = f.read(siz)
+        f.close()
+        return dat
+
+    # Append val to the data file, starting at a _BLOCKSIZE-aligned
+    # offset.  The data file is first padded with NUL bytes (if needed)
+    # to get to an aligned offset.  Return pair
+    #     (starting offset of val, len(val))
+    def _addval(self, val):
+        f = _open(self._datfile, 'rb+')
+        f.seek(0, 2)
+        pos = int(f.tell())
+        npos = ((pos + _BLOCKSIZE - 1) // _BLOCKSIZE) * _BLOCKSIZE
+        f.write('\0'*(npos-pos))
+        pos = npos
+        f.write(val)
+        f.close()
+        return (pos, len(val))
+
+    # Write val to the data file, starting at offset pos.  The caller
+    # is responsible for ensuring that there's enough room starting at
+    # pos to hold val, without overwriting some other value.  Return
+    # pair (pos, len(val)).
+    def _setval(self, pos, val):
+        f = _open(self._datfile, 'rb+')
+        f.seek(pos)
+        f.write(val)
+        f.close()
+        return (pos, len(val))
+
+    # key is a new key whose associated value starts in the data file
+    # at offset pos and with length siz.  Add an index record to
+    # the in-memory index dict, and append one to the directory file.
+    def _addkey(self, key, pos_and_siz_pair):
+        self._index[key] = pos_and_siz_pair
+        f = _open(self._dirfile, 'a', self._mode)
+        f.write("%r, %r\n" % (key, pos_and_siz_pair))
+        f.close()
+
+    def __setitem__(self, key, val):
+        if not type(key) == type('') == type(val):
+            raise TypeError, "keys and values must be strings"
+        if key not in self._index:
+            self._addkey(key, self._addval(val))
+        else:
+            # See whether the new value is small enough to fit in the
+            # (padded) space currently occupied by the old value.
+            pos, siz = self._index[key]
+            oldblocks = (siz + _BLOCKSIZE - 1) // _BLOCKSIZE
+            newblocks = (len(val) + _BLOCKSIZE - 1) // _BLOCKSIZE
+            if newblocks <= oldblocks:
+                self._index[key] = self._setval(pos, val)
+            else:
+                # The new value doesn't fit in the (padded) space used
+                # by the old value.  The blocks used by the old value are
+                # forever lost.
+                self._index[key] = self._addval(val)
+
+            # Note that _index may be out of synch with the directory
+            # file now:  _setval() and _addval() don't update the directory
+            # file.  This also means that the on-disk directory and data
+            # files are in a mutually inconsistent state, and they'll
+            # remain that way until _commit() is called.  Note that this
+            # is a disaster (for the database) if the program crashes
+            # (so that _commit() never gets called).
+
+    def __delitem__(self, key):
+        # The blocks used by the associated value are lost.
+        del self._index[key]
+        # XXX It's unclear why we do a _commit() here (the code always
+        # XXX has, so I'm not changing it).  _setitem__ doesn't try to
+        # XXX keep the directory file in synch.  Why should we?  Or
+        # XXX why shouldn't __setitem__?
+        self._commit()
+
+    def keys(self):
+        return self._index.keys()
+
+    def has_key(self, key):
+        return key in self._index
+
+    def __contains__(self, key):
+        return key in self._index
+
+    def iterkeys(self):
+        return self._index.iterkeys()
+    __iter__ = iterkeys
+
+    def __len__(self):
+        return len(self._index)
+
+    def close(self):
+        self._commit()
+        self._index = self._datfile = self._dirfile = self._bakfile = None
+
+    __del__ = close
+
+
+
+def open(file, flag=None, mode=0666):
+    """Open the database file, filename, and return corresponding object.
+
+    The flag argument, used to control how the database is opened in the
+    other DBM implementations, is ignored in the dumbdbm module; the
+    database is always opened for update, and will be created if it does
+    not exist.
+
+    The optional mode argument is the UNIX mode of the file, used only when
+    the database has to be created.  It defaults to octal code 0666 (and
+    will be modified by the prevailing umask).
+
+    """
+    # flag argument is currently ignored
+    return _Database(file, mode)
diff --git a/depot_tools/release/win/python_24/Lib/dummy_thread.py b/depot_tools/release/win/python_24/Lib/dummy_thread.py
new file mode 100644
index 0000000..fb3abbf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/dummy_thread.py
@@ -0,0 +1,140 @@
+"""Drop-in replacement for the thread module.
+
+Meant to be used as a brain-dead substitute so that threaded code does
+not need to be rewritten for when the thread module is not present.
+
+Suggested usage is::
+
+    try:
+        import thread
+    except ImportError:
+        import dummy_thread as thread
+
+"""
+__author__ = "Brett Cannon"
+__email__ = "brett@python.org"
+
+# Exports only things specified by thread documentation
+# (skipping obsolete synonyms allocate(), start_new(), exit_thread())
+__all__ = ['error', 'start_new_thread', 'exit', 'get_ident', 'allocate_lock',
+           'interrupt_main', 'LockType']
+
+import traceback as _traceback
+
+class error(Exception):
+    """Dummy implementation of thread.error."""
+
+    def __init__(self, *args):
+        self.args = args
+
+def start_new_thread(function, args, kwargs={}):
+    """Dummy implementation of thread.start_new_thread().
+
+    Compatibility is maintained by making sure that ``args`` is a
+    tuple and ``kwargs`` is a dictionary.  If an exception is raised
+    and it is SystemExit (which can be done by thread.exit()) it is
+    caught and nothing is done; all other exceptions are printed out
+    by using traceback.print_exc().
+
+    If the executed function calls interrupt_main the KeyboardInterrupt will be
+    raised when the function returns.
+
+    """
+    if type(args) != type(tuple()):
+        raise TypeError("2nd arg must be a tuple")
+    if type(kwargs) != type(dict()):
+        raise TypeError("3rd arg must be a dict")
+    global _main
+    _main = False
+    try:
+        function(*args, **kwargs)
+    except SystemExit:
+        pass
+    except:
+        _traceback.print_exc()
+    _main = True
+    global _interrupt
+    if _interrupt:
+        _interrupt = False
+        raise KeyboardInterrupt
+
+def exit():
+    """Dummy implementation of thread.exit()."""
+    raise SystemExit
+
+def get_ident():
+    """Dummy implementation of thread.get_ident().
+
+    Since this module should only be used when threadmodule is not
+    available, it is safe to assume that the current process is the
+    only thread.  Thus a constant can be safely returned.
+    """
+    return -1
+
+def allocate_lock():
+    """Dummy implementation of thread.allocate_lock()."""
+    return LockType()
+
+class LockType(object):
+    """Class implementing dummy implementation of thread.LockType.
+
+    Compatibility is maintained by maintaining self.locked_status
+    which is a boolean that stores the state of the lock.  Pickling of
+    the lock, though, should not be done since if the thread module is
+    then used with an unpickled ``lock()`` from here problems could
+    occur from this class not having atomic methods.
+
+    """
+
+    def __init__(self):
+        self.locked_status = False
+
+    def acquire(self, waitflag=None):
+        """Dummy implementation of acquire().
+
+        For blocking calls, self.locked_status is automatically set to
+        True and returned appropriately based on value of
+        ``waitflag``.  If it is non-blocking, then the value is
+        actually checked and not set if it is already acquired.  This
+        is all done so that threading.Condition's assert statements
+        aren't triggered and throw a little fit.
+
+        """
+        if waitflag is None:
+            self.locked_status = True
+            return None
+        elif not waitflag:
+            if not self.locked_status:
+                self.locked_status = True
+                return True
+            else:
+                return False
+        else:
+            self.locked_status = True
+            return True
+
+    def release(self):
+        """Release the dummy lock."""
+        # XXX Perhaps shouldn't actually bother to test?  Could lead
+        #     to problems for complex, threaded code.
+        if not self.locked_status:
+            raise error
+        self.locked_status = False
+        return True
+
+    def locked(self):
+        return self.locked_status
+
+# Used to signal that interrupt_main was called in a "thread"
+_interrupt = False
+# True when not executing in a "thread"
+_main = True
+
+def interrupt_main():
+    """Set _interrupt flag to True to have start_new_thread raise
+    KeyboardInterrupt upon exiting."""
+    if _main:
+        raise KeyboardInterrupt
+    else:
+        global _interrupt
+        _interrupt = True
diff --git a/depot_tools/release/win/python_24/Lib/dummy_threading.py b/depot_tools/release/win/python_24/Lib/dummy_threading.py
new file mode 100644
index 0000000..48f7c4c7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/dummy_threading.py
@@ -0,0 +1,83 @@
+"""Faux ``threading`` version using ``dummy_thread`` instead of ``thread``.
+
+The module ``_dummy_threading`` is added to ``sys.modules`` in order
+to not have ``threading`` considered imported.  Had ``threading`` been
+directly imported it would have made all subsequent imports succeed
+regardless of whether ``thread`` was available which is not desired.
+
+:Author: Brett Cannon
+:Contact: brett@python.org
+
+XXX: Try to get rid of ``_dummy_threading``.
+
+"""
+from sys import modules as sys_modules
+
+import dummy_thread
+
+# Declaring now so as to not have to nest ``try``s to get proper clean-up.
+holding_thread = False
+holding_threading = False
+holding__threading_local = False
+
+try:
+    # Could have checked if ``thread`` was not in sys.modules and gone
+    # a different route, but decided to mirror technique used with
+    # ``threading`` below.
+    if 'thread' in sys_modules:
+        held_thread = sys_modules['thread']
+        holding_thread = True
+    # Must have some module named ``thread`` that implements its API
+    # in order to initially import ``threading``.
+    sys_modules['thread'] = sys_modules['dummy_thread']
+
+    if 'threading' in sys_modules:
+        # If ``threading`` is already imported, might as well prevent
+        # trying to import it more than needed by saving it if it is
+        # already imported before deleting it.
+        held_threading = sys_modules['threading']
+        holding_threading = True
+        del sys_modules['threading']
+
+    if '_threading_local' in sys_modules:
+        # If ``_threading_local`` is already imported, might as well prevent
+        # trying to import it more than needed by saving it if it is
+        # already imported before deleting it.
+        held__threading_local = sys_modules['_threading_local']
+        holding__threading_local = True
+        del sys_modules['_threading_local']
+
+    import threading
+    # Need a copy of the code kept somewhere...
+    sys_modules['_dummy_threading'] = sys_modules['threading']
+    del sys_modules['threading']
+    sys_modules['_dummy__threading_local'] = sys_modules['_threading_local']
+    del sys_modules['_threading_local']
+    from _dummy_threading import *
+    from _dummy_threading import __all__
+
+finally:
+    # Put back ``threading`` if we overwrote earlier
+
+    if holding_threading:
+        sys_modules['threading'] = held_threading
+        del held_threading
+    del holding_threading
+
+    # Put back ``_threading_local`` if we overwrote earlier
+
+    if holding__threading_local:
+        sys_modules['_threading_local'] = held__threading_local
+        del held__threading_local
+    del holding__threading_local
+
+    # Put back ``thread`` if we overwrote, else del the entry we made
+    if holding_thread:
+        sys_modules['thread'] = held_thread
+        del held_thread
+    else:
+        del sys_modules['thread']
+    del holding_thread
+
+    del dummy_thread
+    del sys_modules
diff --git a/depot_tools/release/win/python_24/Lib/email/Charset.py b/depot_tools/release/win/python_24/Lib/email/Charset.py
new file mode 100644
index 0000000..df860c5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Charset.py
@@ -0,0 +1,370 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Ben Gertzfield, Barry Warsaw
+# Contact: email-sig@python.org
+
+import email.base64MIME
+import email.quopriMIME
+from email.Encoders import encode_7or8bit
+
+
+
+# Flags for types of header encodings
+QP          = 1 # Quoted-Printable
+BASE64      = 2 # Base64
+SHORTEST    = 3 # the shorter of QP and base64, but only for headers
+
+# In "=?charset?q?hello_world?=", the =?, ?q?, and ?= add up to 7
+MISC_LEN = 7
+
+DEFAULT_CHARSET = 'us-ascii'
+
+
+
+# Defaults
+CHARSETS = {
+    # input        header enc  body enc output conv
+    'iso-8859-1':  (QP,        QP,      None),
+    'iso-8859-2':  (QP,        QP,      None),
+    'iso-8859-3':  (QP,        QP,      None),
+    'iso-8859-4':  (QP,        QP,      None),
+    # iso-8859-5 is Cyrillic, and not especially used
+    # iso-8859-6 is Arabic, also not particularly used
+    # iso-8859-7 is Greek, QP will not make it readable
+    # iso-8859-8 is Hebrew, QP will not make it readable
+    'iso-8859-9':  (QP,        QP,      None),
+    'iso-8859-10': (QP,        QP,      None),
+    # iso-8859-11 is Thai, QP will not make it readable
+    'iso-8859-13': (QP,        QP,      None),
+    'iso-8859-14': (QP,        QP,      None),
+    'iso-8859-15': (QP,        QP,      None),
+    'windows-1252':(QP,        QP,      None),
+    'viscii':      (QP,        QP,      None),
+    'us-ascii':    (None,      None,    None),
+    'big5':        (BASE64,    BASE64,  None),
+    'gb2312':      (BASE64,    BASE64,  None),
+    'euc-jp':      (BASE64,    None,    'iso-2022-jp'),
+    'shift_jis':   (BASE64,    None,    'iso-2022-jp'),
+    'iso-2022-jp': (BASE64,    None,    None),
+    'koi8-r':      (BASE64,    BASE64,  None),
+    'utf-8':       (SHORTEST,  BASE64, 'utf-8'),
+    # We're making this one up to represent raw unencoded 8-bit
+    '8bit':        (None,      BASE64, 'utf-8'),
+    }
+
+# Aliases for other commonly-used names for character sets.  Map
+# them to the real ones used in email.
+ALIASES = {
+    'latin_1': 'iso-8859-1',
+    'latin-1': 'iso-8859-1',
+    'latin_2': 'iso-8859-2',
+    'latin-2': 'iso-8859-2',
+    'latin_3': 'iso-8859-3',
+    'latin-3': 'iso-8859-3',
+    'latin_4': 'iso-8859-4',
+    'latin-4': 'iso-8859-4',
+    'latin_5': 'iso-8859-9',
+    'latin-5': 'iso-8859-9',
+    'latin_6': 'iso-8859-10',
+    'latin-6': 'iso-8859-10',
+    'latin_7': 'iso-8859-13',
+    'latin-7': 'iso-8859-13',
+    'latin_8': 'iso-8859-14',
+    'latin-8': 'iso-8859-14',
+    'latin_9': 'iso-8859-15',
+    'latin-9': 'iso-8859-15',
+    'cp949':   'ks_c_5601-1987',
+    'euc_jp':  'euc-jp',
+    'euc_kr':  'euc-kr',
+    'ascii':   'us-ascii',
+    }
+
+
+# Map charsets to their Unicode codec strings.
+CODEC_MAP = {
+    'gb2312':      'eucgb2312_cn',
+    'big5':        'big5_tw',
+    # Hack: We don't want *any* conversion for stuff marked us-ascii, as all
+    # sorts of garbage might be sent to us in the guise of 7-bit us-ascii.
+    # Let that stuff pass through without conversion to/from Unicode.
+    'us-ascii':    None,
+    }
+
+
+
+# Convenience functions for extending the above mappings
+def add_charset(charset, header_enc=None, body_enc=None, output_charset=None):
+    """Add character set properties to the global registry.
+
+    charset is the input character set, and must be the canonical name of a
+    character set.
+
+    Optional header_enc and body_enc is either Charset.QP for
+    quoted-printable, Charset.BASE64 for base64 encoding, Charset.SHORTEST for
+    the shortest of qp or base64 encoding, or None for no encoding.  SHORTEST
+    is only valid for header_enc.  It describes how message headers and
+    message bodies in the input charset are to be encoded.  Default is no
+    encoding.
+
+    Optional output_charset is the character set that the output should be
+    in.  Conversions will proceed from input charset, to Unicode, to the
+    output charset when the method Charset.convert() is called.  The default
+    is to output in the same character set as the input.
+
+    Both input_charset and output_charset must have Unicode codec entries in
+    the module's charset-to-codec mapping; use add_codec(charset, codecname)
+    to add codecs the module does not know about.  See the codecs module's
+    documentation for more information.
+    """
+    if body_enc == SHORTEST:
+        raise ValueError('SHORTEST not allowed for body_enc')
+    CHARSETS[charset] = (header_enc, body_enc, output_charset)
+
+
+def add_alias(alias, canonical):
+    """Add a character set alias.
+
+    alias is the alias name, e.g. latin-1
+    canonical is the character set's canonical name, e.g. iso-8859-1
+    """
+    ALIASES[alias] = canonical
+
+
+def add_codec(charset, codecname):
+    """Add a codec that map characters in the given charset to/from Unicode.
+
+    charset is the canonical name of a character set.  codecname is the name
+    of a Python codec, as appropriate for the second argument to the unicode()
+    built-in, or to the encode() method of a Unicode string.
+    """
+    CODEC_MAP[charset] = codecname
+
+
+
+class Charset:
+    """Map character sets to their email properties.
+
+    This class provides information about the requirements imposed on email
+    for a specific character set.  It also provides convenience routines for
+    converting between character sets, given the availability of the
+    applicable codecs.  Given a character set, it will do its best to provide
+    information on how to use that character set in an email in an
+    RFC-compliant way.
+
+    Certain character sets must be encoded with quoted-printable or base64
+    when used in email headers or bodies.  Certain character sets must be
+    converted outright, and are not allowed in email.  Instances of this
+    module expose the following information about a character set:
+
+    input_charset: The initial character set specified.  Common aliases
+                   are converted to their `official' email names (e.g. latin_1
+                   is converted to iso-8859-1).  Defaults to 7-bit us-ascii.
+
+    header_encoding: If the character set must be encoded before it can be
+                     used in an email header, this attribute will be set to
+                     Charset.QP (for quoted-printable), Charset.BASE64 (for
+                     base64 encoding), or Charset.SHORTEST for the shortest of
+                     QP or BASE64 encoding.  Otherwise, it will be None.
+
+    body_encoding: Same as header_encoding, but describes the encoding for the
+                   mail message's body, which indeed may be different than the
+                   header encoding.  Charset.SHORTEST is not allowed for
+                   body_encoding.
+
+    output_charset: Some character sets must be converted before the can be
+                    used in email headers or bodies.  If the input_charset is
+                    one of them, this attribute will contain the name of the
+                    charset output will be converted to.  Otherwise, it will
+                    be None.
+
+    input_codec: The name of the Python codec used to convert the
+                 input_charset to Unicode.  If no conversion codec is
+                 necessary, this attribute will be None.
+
+    output_codec: The name of the Python codec used to convert Unicode
+                  to the output_charset.  If no conversion codec is necessary,
+                  this attribute will have the same value as the input_codec.
+    """
+    def __init__(self, input_charset=DEFAULT_CHARSET):
+        # RFC 2046, $4.1.2 says charsets are not case sensitive.  We coerce to
+        # unicode because its .lower() is locale insensitive.
+        input_charset = unicode(input_charset, 'ascii').lower()
+        # Set the input charset after filtering through the aliases
+        self.input_charset = ALIASES.get(input_charset, input_charset)
+        # We can try to guess which encoding and conversion to use by the
+        # charset_map dictionary.  Try that first, but let the user override
+        # it.
+        henc, benc, conv = CHARSETS.get(self.input_charset,
+                                        (SHORTEST, BASE64, None))
+        if not conv:
+            conv = self.input_charset
+        # Set the attributes, allowing the arguments to override the default.
+        self.header_encoding = henc
+        self.body_encoding = benc
+        self.output_charset = ALIASES.get(conv, conv)
+        # Now set the codecs.  If one isn't defined for input_charset,
+        # guess and try a Unicode codec with the same name as input_codec.
+        self.input_codec = CODEC_MAP.get(self.input_charset,
+                                         self.input_charset)
+        self.output_codec = CODEC_MAP.get(self.output_charset,
+                                            self.output_charset)
+
+    def __str__(self):
+        return self.input_charset.lower()
+
+    __repr__ = __str__
+
+    def __eq__(self, other):
+        return str(self) == str(other).lower()
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def get_body_encoding(self):
+        """Return the content-transfer-encoding used for body encoding.
+
+        This is either the string `quoted-printable' or `base64' depending on
+        the encoding used, or it is a function in which case you should call
+        the function with a single argument, the Message object being
+        encoded.  The function should then set the Content-Transfer-Encoding
+        header itself to whatever is appropriate.
+
+        Returns "quoted-printable" if self.body_encoding is QP.
+        Returns "base64" if self.body_encoding is BASE64.
+        Returns "7bit" otherwise.
+        """
+        assert self.body_encoding <> SHORTEST
+        if self.body_encoding == QP:
+            return 'quoted-printable'
+        elif self.body_encoding == BASE64:
+            return 'base64'
+        else:
+            return encode_7or8bit
+
+    def convert(self, s):
+        """Convert a string from the input_codec to the output_codec."""
+        if self.input_codec <> self.output_codec:
+            return unicode(s, self.input_codec).encode(self.output_codec)
+        else:
+            return s
+
+    def to_splittable(self, s):
+        """Convert a possibly multibyte string to a safely splittable format.
+
+        Uses the input_codec to try and convert the string to Unicode, so it
+        can be safely split on character boundaries (even for multibyte
+        characters).
+
+        Returns the string as-is if it isn't known how to convert it to
+        Unicode with the input_charset.
+
+        Characters that could not be converted to Unicode will be replaced
+        with the Unicode replacement character U+FFFD.
+        """
+        if isinstance(s, unicode) or self.input_codec is None:
+            return s
+        try:
+            return unicode(s, self.input_codec, 'replace')
+        except LookupError:
+            # Input codec not installed on system, so return the original
+            # string unchanged.
+            return s
+
+    def from_splittable(self, ustr, to_output=True):
+        """Convert a splittable string back into an encoded string.
+
+        Uses the proper codec to try and convert the string from Unicode back
+        into an encoded format.  Return the string as-is if it is not Unicode,
+        or if it could not be converted from Unicode.
+
+        Characters that could not be converted from Unicode will be replaced
+        with an appropriate character (usually '?').
+
+        If to_output is True (the default), uses output_codec to convert to an
+        encoded format.  If to_output is False, uses input_codec.
+        """
+        if to_output:
+            codec = self.output_codec
+        else:
+            codec = self.input_codec
+        if not isinstance(ustr, unicode) or codec is None:
+            return ustr
+        try:
+            return ustr.encode(codec, 'replace')
+        except LookupError:
+            # Output codec not installed
+            return ustr
+
+    def get_output_charset(self):
+        """Return the output character set.
+
+        This is self.output_charset if that is not None, otherwise it is
+        self.input_charset.
+        """
+        return self.output_charset or self.input_charset
+
+    def encoded_header_len(self, s):
+        """Return the length of the encoded header string."""
+        cset = self.get_output_charset()
+        # The len(s) of a 7bit encoding is len(s)
+        if self.header_encoding == BASE64:
+            return email.base64MIME.base64_len(s) + len(cset) + MISC_LEN
+        elif self.header_encoding == QP:
+            return email.quopriMIME.header_quopri_len(s) + len(cset) + MISC_LEN
+        elif self.header_encoding == SHORTEST:
+            lenb64 = email.base64MIME.base64_len(s)
+            lenqp = email.quopriMIME.header_quopri_len(s)
+            return min(lenb64, lenqp) + len(cset) + MISC_LEN
+        else:
+            return len(s)
+
+    def header_encode(self, s, convert=False):
+        """Header-encode a string, optionally converting it to output_charset.
+
+        If convert is True, the string will be converted from the input
+        charset to the output charset automatically.  This is not useful for
+        multibyte character sets, which have line length issues (multibyte
+        characters must be split on a character, not a byte boundary); use the
+        high-level Header class to deal with these issues.  convert defaults
+        to False.
+
+        The type of encoding (base64 or quoted-printable) will be based on
+        self.header_encoding.
+        """
+        cset = self.get_output_charset()
+        if convert:
+            s = self.convert(s)
+        # 7bit/8bit encodings return the string unchanged (modulo conversions)
+        if self.header_encoding == BASE64:
+            return email.base64MIME.header_encode(s, cset)
+        elif self.header_encoding == QP:
+            return email.quopriMIME.header_encode(s, cset, maxlinelen=None)
+        elif self.header_encoding == SHORTEST:
+            lenb64 = email.base64MIME.base64_len(s)
+            lenqp = email.quopriMIME.header_quopri_len(s)
+            if lenb64 < lenqp:
+                return email.base64MIME.header_encode(s, cset)
+            else:
+                return email.quopriMIME.header_encode(s, cset, maxlinelen=None)
+        else:
+            return s
+
+    def body_encode(self, s, convert=True):
+        """Body-encode a string and convert it to output_charset.
+
+        If convert is True (the default), the string will be converted from
+        the input charset to output charset automatically.  Unlike
+        header_encode(), there are no issues with byte boundaries and
+        multibyte charsets in email bodies, so this is usually pretty safe.
+
+        The type of encoding (base64 or quoted-printable) will be based on
+        self.body_encoding.
+        """
+        if convert:
+            s = self.convert(s)
+        # 7bit/8bit encodings return the string unchanged (module conversions)
+        if self.body_encoding is BASE64:
+            return email.base64MIME.body_encode(s)
+        elif self.body_encoding is QP:
+            return email.quopriMIME.body_encode(s)
+        else:
+            return s
diff --git a/depot_tools/release/win/python_24/Lib/email/Encoders.py b/depot_tools/release/win/python_24/Lib/email/Encoders.py
new file mode 100644
index 0000000..baac2a3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Encoders.py
@@ -0,0 +1,78 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Encodings and related functions."""
+
+import base64
+from quopri import encodestring as _encodestring
+
+def _qencode(s):
+    enc = _encodestring(s, quotetabs=True)
+    # Must encode spaces, which quopri.encodestring() doesn't do
+    return enc.replace(' ', '=20')
+
+
+def _bencode(s):
+    # We can't quite use base64.encodestring() since it tacks on a "courtesy
+    # newline".  Blech!
+    if not s:
+        return s
+    hasnewline = (s[-1] == '\n')
+    value = base64.encodestring(s)
+    if not hasnewline and value[-1] == '\n':
+        return value[:-1]
+    return value
+
+
+
+def encode_base64(msg):
+    """Encode the message's payload in Base64.
+
+    Also, add an appropriate Content-Transfer-Encoding header.
+    """
+    orig = msg.get_payload()
+    encdata = _bencode(orig)
+    msg.set_payload(encdata)
+    msg['Content-Transfer-Encoding'] = 'base64'
+
+
+
+def encode_quopri(msg):
+    """Encode the message's payload in quoted-printable.
+
+    Also, add an appropriate Content-Transfer-Encoding header.
+    """
+    orig = msg.get_payload()
+    encdata = _qencode(orig)
+    msg.set_payload(encdata)
+    msg['Content-Transfer-Encoding'] = 'quoted-printable'
+
+
+
+def encode_7or8bit(msg):
+    """Set the Content-Transfer-Encoding header to 7bit or 8bit."""
+    orig = msg.get_payload()
+    if orig is None:
+        # There's no payload.  For backwards compatibility we use 7bit
+        msg['Content-Transfer-Encoding'] = '7bit'
+        return
+    # We play a trick to make this go fast.  If encoding to ASCII succeeds, we
+    # know the data must be 7bit, otherwise treat it as 8bit.
+    try:
+        orig.encode('ascii')
+    except UnicodeError:
+        # iso-2022-* is non-ASCII but still 7-bit
+        charset = msg.get_charset()
+        output_cset = charset and charset.output_charset
+        if output_cset and output_cset.lower().startswith('iso-2202-'):
+            msg['Content-Transfer-Encoding'] = '7bit'
+        else:
+            msg['Content-Transfer-Encoding'] = '8bit'
+    else:
+        msg['Content-Transfer-Encoding'] = '7bit'
+
+
+
+def encode_noop(msg):
+    """Do nothing."""
diff --git a/depot_tools/release/win/python_24/Lib/email/Errors.py b/depot_tools/release/win/python_24/Lib/email/Errors.py
new file mode 100644
index 0000000..e13a2c7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Errors.py
@@ -0,0 +1,53 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""email package exception classes."""
+
+
+
+class MessageError(Exception):
+    """Base class for errors in the email package."""
+
+
+class MessageParseError(MessageError):
+    """Base class for message parsing errors."""
+
+
+class HeaderParseError(MessageParseError):
+    """Error while parsing headers."""
+
+
+class BoundaryError(MessageParseError):
+    """Couldn't find terminating boundary."""
+
+
+class MultipartConversionError(MessageError, TypeError):
+    """Conversion to a multipart is prohibited."""
+
+
+
+# These are parsing defects which the parser was able to work around.
+class MessageDefect:
+    """Base class for a message defect."""
+
+    def __init__(self, line=None):
+        self.line = line
+
+class NoBoundaryInMultipartDefect(MessageDefect):
+    """A message claimed to be a multipart but had no boundary parameter."""
+
+class StartBoundaryNotFoundDefect(MessageDefect):
+    """The claimed start boundary was never found."""
+
+class FirstHeaderLineIsContinuationDefect(MessageDefect):
+    """A message had a continuation line as its first header line."""
+
+class MisplacedEnvelopeHeaderDefect(MessageDefect):
+    """A 'Unix-from' header was found in the middle of a header block."""
+
+class MalformedHeaderDefect(MessageDefect):
+    """Found a header that was missing a colon, or was otherwise malformed."""
+
+class MultipartInvariantViolationDefect(MessageDefect):
+    """A message claimed to be a multipart but no subparts were found."""
diff --git a/depot_tools/release/win/python_24/Lib/email/FeedParser.py b/depot_tools/release/win/python_24/Lib/email/FeedParser.py
new file mode 100644
index 0000000..5aad15d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/FeedParser.py
@@ -0,0 +1,477 @@
+# Copyright (C) 2004 Python Software Foundation
+# Authors: Baxter, Wouters and Warsaw
+# Contact: email-sig@python.org
+
+"""FeedParser - An email feed parser.
+
+The feed parser implements an interface for incrementally parsing an email
+message, line by line.  This has advantages for certain applications, such as
+those reading email messages off a socket.
+
+FeedParser.feed() is the primary interface for pushing new data into the
+parser.  It returns when there's nothing more it can do with the available
+data.  When you have no more data to push into the parser, call .close().
+This completes the parsing and returns the root message object.
+
+The other advantage of this parser is that it will never throw a parsing
+exception.  Instead, when it finds something unexpected, it adds a 'defect' to
+the current message.  Defects are just instances that live on the message
+object's .defects attribute.
+"""
+
+import re
+from email import Errors
+from email import Message
+
+NLCRE = re.compile('\r\n|\r|\n')
+NLCRE_bol = re.compile('(\r\n|\r|\n)')
+NLCRE_eol = re.compile('(\r\n|\r|\n)$')
+NLCRE_crack = re.compile('(\r\n|\r|\n)')
+# RFC 2822 $3.6.8 Optional fields.  ftext is %d33-57 / %d59-126, Any character
+# except controls, SP, and ":".
+headerRE = re.compile(r'^(From |[\041-\071\073-\176]{2,}:|[\t ])')
+EMPTYSTRING = ''
+NL = '\n'
+
+NeedMoreData = object()
+
+
+
+class BufferedSubFile(object):
+    """A file-ish object that can have new data loaded into it.
+
+    You can also push and pop line-matching predicates onto a stack.  When the
+    current predicate matches the current line, a false EOF response
+    (i.e. empty string) is returned instead.  This lets the parser adhere to a
+    simple abstraction -- it parses until EOF closes the current message.
+    """
+    def __init__(self):
+        # The last partial line pushed into this object.
+        self._partial = ''
+        # The list of full, pushed lines, in reverse order
+        self._lines = []
+        # The stack of false-EOF checking predicates.
+        self._eofstack = []
+        # A flag indicating whether the file has been closed or not.
+        self._closed = False
+
+    def push_eof_matcher(self, pred):
+        self._eofstack.append(pred)
+
+    def pop_eof_matcher(self):
+        return self._eofstack.pop()
+
+    def close(self):
+        # Don't forget any trailing partial line.
+        self._lines.append(self._partial)
+        self._partial = ''
+        self._closed = True
+
+    def readline(self):
+        if not self._lines:
+            if self._closed:
+                return ''
+            return NeedMoreData
+        # Pop the line off the stack and see if it matches the current
+        # false-EOF predicate.
+        line = self._lines.pop()
+        # RFC 2046, section 5.1.2 requires us to recognize outer level
+        # boundaries at any level of inner nesting.  Do this, but be sure it's
+        # in the order of most to least nested.
+        for ateof in self._eofstack[::-1]:
+            if ateof(line):
+                # We're at the false EOF.  But push the last line back first.
+                self._lines.append(line)
+                return ''
+        return line
+
+    def unreadline(self, line):
+        # Let the consumer push a line back into the buffer.
+        assert line is not NeedMoreData
+        self._lines.append(line)
+
+    def push(self, data):
+        """Push some new data into this object."""
+        # Handle any previous leftovers
+        data, self._partial = self._partial + data, ''
+        # Crack into lines, but preserve the newlines on the end of each
+        parts = NLCRE_crack.split(data)
+        # The *ahem* interesting behaviour of re.split when supplied grouping
+        # parentheses is that the last element of the resulting list is the
+        # data after the final RE.  In the case of a NL/CR terminated string,
+        # this is the empty string.
+        self._partial = parts.pop()
+        # parts is a list of strings, alternating between the line contents
+        # and the eol character(s).  Gather up a list of lines after
+        # re-attaching the newlines.
+        lines = []
+        for i in range(len(parts) // 2):
+            lines.append(parts[i*2] + parts[i*2+1])
+        self.pushlines(lines)
+
+    def pushlines(self, lines):
+        # Reverse and insert at the front of the lines.
+        self._lines[:0] = lines[::-1]
+
+    def is_closed(self):
+        return self._closed
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        line = self.readline()
+        if line == '':
+            raise StopIteration
+        return line
+
+
+
+class FeedParser:
+    """A feed-style parser of email."""
+
+    def __init__(self, _factory=Message.Message):
+        """_factory is called with no arguments to create a new message obj"""
+        self._factory = _factory
+        self._input = BufferedSubFile()
+        self._msgstack = []
+        self._parse = self._parsegen().next
+        self._cur = None
+        self._last = None
+        self._headersonly = False
+
+    # Non-public interface for supporting Parser's headersonly flag
+    def _set_headersonly(self):
+        self._headersonly = True
+
+    def feed(self, data):
+        """Push more data into the parser."""
+        self._input.push(data)
+        self._call_parse()
+
+    def _call_parse(self):
+        try:
+            self._parse()
+        except StopIteration:
+            pass
+
+    def close(self):
+        """Parse all remaining data and return the root message object."""
+        self._input.close()
+        self._call_parse()
+        root = self._pop_message()
+        assert not self._msgstack
+        # Look for final set of defects
+        if root.get_content_maintype() == 'multipart' \
+               and not root.is_multipart():
+            root.defects.append(Errors.MultipartInvariantViolationDefect())
+        return root
+
+    def _new_message(self):
+        msg = self._factory()
+        if self._cur and self._cur.get_content_type() == 'multipart/digest':
+            msg.set_default_type('message/rfc822')
+        if self._msgstack:
+            self._msgstack[-1].attach(msg)
+        self._msgstack.append(msg)
+        self._cur = msg
+        self._last = msg
+
+    def _pop_message(self):
+        retval = self._msgstack.pop()
+        if self._msgstack:
+            self._cur = self._msgstack[-1]
+        else:
+            self._cur = None
+        return retval
+
+    def _parsegen(self):
+        # Create a new message and start by parsing headers.
+        self._new_message()
+        headers = []
+        # Collect the headers, searching for a line that doesn't match the RFC
+        # 2822 header or continuation pattern (including an empty line).
+        for line in self._input:
+            if line is NeedMoreData:
+                yield NeedMoreData
+                continue
+            if not headerRE.match(line):
+                # If we saw the RFC defined header/body separator
+                # (i.e. newline), just throw it away. Otherwise the line is
+                # part of the body so push it back.
+                if not NLCRE.match(line):
+                    self._input.unreadline(line)
+                break
+            headers.append(line)
+        # Done with the headers, so parse them and figure out what we're
+        # supposed to see in the body of the message.
+        self._parse_headers(headers)
+        # Headers-only parsing is a backwards compatibility hack, which was
+        # necessary in the older parser, which could throw errors.  All
+        # remaining lines in the input are thrown into the message body.
+        if self._headersonly:
+            lines = []
+            while True:
+                line = self._input.readline()
+                if line is NeedMoreData:
+                    yield NeedMoreData
+                    continue
+                if line == '':
+                    break
+                lines.append(line)
+            self._cur.set_payload(EMPTYSTRING.join(lines))
+            return
+        if self._cur.get_content_type() == 'message/delivery-status':
+            # message/delivery-status contains blocks of headers separated by
+            # a blank line.  We'll represent each header block as a separate
+            # nested message object, but the processing is a bit different
+            # than standard message/* types because there is no body for the
+            # nested messages.  A blank line separates the subparts.
+            while True:
+                self._input.push_eof_matcher(NLCRE.match)
+                for retval in self._parsegen():
+                    if retval is NeedMoreData:
+                        yield NeedMoreData
+                        continue
+                    break
+                msg = self._pop_message()
+                # We need to pop the EOF matcher in order to tell if we're at
+                # the end of the current file, not the end of the last block
+                # of message headers.
+                self._input.pop_eof_matcher()
+                # The input stream must be sitting at the newline or at the
+                # EOF.  We want to see if we're at the end of this subpart, so
+                # first consume the blank line, then test the next line to see
+                # if we're at this subpart's EOF.
+                while True:
+                    line = self._input.readline()
+                    if line is NeedMoreData:
+                        yield NeedMoreData
+                        continue
+                    break
+                while True:
+                    line = self._input.readline()
+                    if line is NeedMoreData:
+                        yield NeedMoreData
+                        continue
+                    break
+                if line == '':
+                    break
+                # Not at EOF so this is a line we're going to need.
+                self._input.unreadline(line)
+            return
+        if self._cur.get_content_maintype() == 'message':
+            # The message claims to be a message/* type, then what follows is
+            # another RFC 2822 message.
+            for retval in self._parsegen():
+                if retval is NeedMoreData:
+                    yield NeedMoreData
+                    continue
+                break
+            self._pop_message()
+            return
+        if self._cur.get_content_maintype() == 'multipart':
+            boundary = self._cur.get_boundary()
+            if boundary is None:
+                # The message /claims/ to be a multipart but it has not
+                # defined a boundary.  That's a problem which we'll handle by
+                # reading everything until the EOF and marking the message as
+                # defective.
+                self._cur.defects.append(Errors.NoBoundaryInMultipartDefect())
+                lines = []
+                for line in self._input:
+                    if line is NeedMoreData:
+                        yield NeedMoreData
+                        continue
+                    lines.append(line)
+                self._cur.set_payload(EMPTYSTRING.join(lines))
+                return
+            # Create a line match predicate which matches the inter-part
+            # boundary as well as the end-of-multipart boundary.  Don't push
+            # this onto the input stream until we've scanned past the
+            # preamble.
+            separator = '--' + boundary
+            boundaryre = re.compile(
+                '(?P<sep>' + re.escape(separator) +
+                r')(?P<end>--)?(?P<ws>[ \t]*)(?P<linesep>\r\n|\r|\n)?$')
+            capturing_preamble = True
+            preamble = []
+            linesep = False
+            while True:
+                line = self._input.readline()
+                if line is NeedMoreData:
+                    yield NeedMoreData
+                    continue
+                if line == '':
+                    break
+                mo = boundaryre.match(line)
+                if mo:
+                    # If we're looking at the end boundary, we're done with
+                    # this multipart.  If there was a newline at the end of
+                    # the closing boundary, then we need to initialize the
+                    # epilogue with the empty string (see below).
+                    if mo.group('end'):
+                        linesep = mo.group('linesep')
+                        break
+                    # We saw an inter-part boundary.  Were we in the preamble?
+                    if capturing_preamble:
+                        if preamble:
+                            # According to RFC 2046, the last newline belongs
+                            # to the boundary.
+                            lastline = preamble[-1]
+                            eolmo = NLCRE_eol.search(lastline)
+                            if eolmo:
+                                preamble[-1] = lastline[:-len(eolmo.group(0))]
+                            self._cur.preamble = EMPTYSTRING.join(preamble)
+                        capturing_preamble = False
+                        self._input.unreadline(line)
+                        continue
+                    # We saw a boundary separating two parts.  Consume any
+                    # multiple boundary lines that may be following.  Our
+                    # interpretation of RFC 2046 BNF grammar does not produce
+                    # body parts within such double boundaries.
+                    while True:
+                        line = self._input.readline()
+                        if line is NeedMoreData:
+                            yield NeedMoreData
+                            continue
+                        mo = boundaryre.match(line)
+                        if not mo:
+                            self._input.unreadline(line)
+                            break
+                    # Recurse to parse this subpart; the input stream points
+                    # at the subpart's first line.
+                    self._input.push_eof_matcher(boundaryre.match)
+                    for retval in self._parsegen():
+                        if retval is NeedMoreData:
+                            yield NeedMoreData
+                            continue
+                        break
+                    # Because of RFC 2046, the newline preceding the boundary
+                    # separator actually belongs to the boundary, not the
+                    # previous subpart's payload (or epilogue if the previous
+                    # part is a multipart).
+                    if self._last.get_content_maintype() == 'multipart':
+                        epilogue = self._last.epilogue
+                        if epilogue == '':
+                            self._last.epilogue = None
+                        elif epilogue is not None:
+                            mo = NLCRE_eol.search(epilogue)
+                            if mo:
+                                end = len(mo.group(0))
+                                self._last.epilogue = epilogue[:-end]
+                    else:
+                        payload = self._last.get_payload()
+                        if isinstance(payload, basestring):
+                            mo = NLCRE_eol.search(payload)
+                            if mo:
+                                payload = payload[:-len(mo.group(0))]
+                                self._last.set_payload(payload)
+                    self._input.pop_eof_matcher()
+                    self._pop_message()
+                    # Set the multipart up for newline cleansing, which will
+                    # happen if we're in a nested multipart.
+                    self._last = self._cur
+                else:
+                    # I think we must be in the preamble
+                    assert capturing_preamble
+                    preamble.append(line)
+            # We've seen either the EOF or the end boundary.  If we're still
+            # capturing the preamble, we never saw the start boundary.  Note
+            # that as a defect and store the captured text as the payload.
+            # Everything from here to the EOF is epilogue.
+            if capturing_preamble:
+                self._cur.defects.append(Errors.StartBoundaryNotFoundDefect())
+                self._cur.set_payload(EMPTYSTRING.join(preamble))
+                epilogue = []
+                for line in self._input:
+                    if line is NeedMoreData:
+                        yield NeedMoreData
+                        continue
+                self._cur.epilogue = EMPTYSTRING.join(epilogue)
+                return
+            # If the end boundary ended in a newline, we'll need to make sure
+            # the epilogue isn't None
+            if linesep:
+                epilogue = ['']
+            else:
+                epilogue = []
+            for line in self._input:
+                if line is NeedMoreData:
+                    yield NeedMoreData
+                    continue
+                epilogue.append(line)
+            # Any CRLF at the front of the epilogue is not technically part of
+            # the epilogue.  Also, watch out for an empty string epilogue,
+            # which means a single newline.
+            if epilogue:
+                firstline = epilogue[0]
+                bolmo = NLCRE_bol.match(firstline)
+                if bolmo:
+                    epilogue[0] = firstline[len(bolmo.group(0)):]
+            self._cur.epilogue = EMPTYSTRING.join(epilogue)
+            return
+        # Otherwise, it's some non-multipart type, so the entire rest of the
+        # file contents becomes the payload.
+        lines = []
+        for line in self._input:
+            if line is NeedMoreData:
+                yield NeedMoreData
+                continue
+            lines.append(line)
+        self._cur.set_payload(EMPTYSTRING.join(lines))
+
+    def _parse_headers(self, lines):
+        # Passed a list of lines that make up the headers for the current msg
+        lastheader = ''
+        lastvalue = []
+        for lineno, line in enumerate(lines):
+            # Check for continuation
+            if line[0] in ' \t':
+                if not lastheader:
+                    # The first line of the headers was a continuation.  This
+                    # is illegal, so let's note the defect, store the illegal
+                    # line, and ignore it for purposes of headers.
+                    defect = Errors.FirstHeaderLineIsContinuationDefect(line)
+                    self._cur.defects.append(defect)
+                    continue
+                lastvalue.append(line)
+                continue
+            if lastheader:
+                # XXX reconsider the joining of folded lines
+                lhdr = EMPTYSTRING.join(lastvalue)[:-1].rstrip('\r\n')
+                self._cur[lastheader] = lhdr
+                lastheader, lastvalue = '', []
+            # Check for envelope header, i.e. unix-from
+            if line.startswith('From '):
+                if lineno == 0:
+                    # Strip off the trailing newline
+                    mo = NLCRE_eol.search(line)
+                    if mo:
+                        line = line[:-len(mo.group(0))]
+                    self._cur.set_unixfrom(line)
+                    continue
+                elif lineno == len(lines) - 1:
+                    # Something looking like a unix-from at the end - it's
+                    # probably the first line of the body, so push back the
+                    # line and stop.
+                    self._input.unreadline(line)
+                    return
+                else:
+                    # Weirdly placed unix-from line.  Note this as a defect
+                    # and ignore it.
+                    defect = Errors.MisplacedEnvelopeHeaderDefect(line)
+                    self._cur.defects.append(defect)
+                    continue
+            # Split the line on the colon separating field name from value.
+            i = line.find(':')
+            if i < 0:
+                defect = Errors.MalformedHeaderDefect(line)
+                self._cur.defects.append(defect)
+                continue
+            lastheader = line[:i]
+            lastvalue = [line[i+1:].lstrip()]
+        # Done with all the lines, so handle the last header.
+        if lastheader:
+            # XXX reconsider the joining of folded lines
+            self._cur[lastheader] = EMPTYSTRING.join(lastvalue).rstrip('\r\n')
diff --git a/depot_tools/release/win/python_24/Lib/email/Generator.py b/depot_tools/release/win/python_24/Lib/email/Generator.py
new file mode 100644
index 0000000..9411a9e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Generator.py
@@ -0,0 +1,355 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Classes to generate plain text from a message object tree."""
+
+import re
+import sys
+import time
+import random
+import warnings
+from cStringIO import StringIO
+
+from email.Header import Header
+
+UNDERSCORE = '_'
+NL = '\n'
+
+fcre = re.compile(r'^From ', re.MULTILINE)
+
+def _is8bitstring(s):
+    if isinstance(s, str):
+        try:
+            unicode(s, 'us-ascii')
+        except UnicodeError:
+            return True
+    return False
+
+
+
+class Generator:
+    """Generates output from a Message object tree.
+
+    This basic generator writes the message to the given file object as plain
+    text.
+    """
+    #
+    # Public interface
+    #
+
+    def __init__(self, outfp, mangle_from_=True, maxheaderlen=78):
+        """Create the generator for message flattening.
+
+        outfp is the output file-like object for writing the message to.  It
+        must have a write() method.
+
+        Optional mangle_from_ is a flag that, when True (the default), escapes
+        From_ lines in the body of the message by putting a `>' in front of
+        them.
+
+        Optional maxheaderlen specifies the longest length for a non-continued
+        header.  When a header line is longer (in characters, with tabs
+        expanded to 8 spaces) than maxheaderlen, the header will split as
+        defined in the Header class.  Set maxheaderlen to zero to disable
+        header wrapping.  The default is 78, as recommended (but not required)
+        by RFC 2822.
+        """
+        self._fp = outfp
+        self._mangle_from_ = mangle_from_
+        self._maxheaderlen = maxheaderlen
+
+    def write(self, s):
+        # Just delegate to the file object
+        self._fp.write(s)
+
+    def flatten(self, msg, unixfrom=False):
+        """Print the message object tree rooted at msg to the output file
+        specified when the Generator instance was created.
+
+        unixfrom is a flag that forces the printing of a Unix From_ delimiter
+        before the first object in the message tree.  If the original message
+        has no From_ delimiter, a `standard' one is crafted.  By default, this
+        is False to inhibit the printing of any From_ delimiter.
+
+        Note that for subobjects, no From_ line is printed.
+        """
+        if unixfrom:
+            ufrom = msg.get_unixfrom()
+            if not ufrom:
+                ufrom = 'From nobody ' + time.ctime(time.time())
+            print >> self._fp, ufrom
+        self._write(msg)
+
+    # For backwards compatibility, but this is slower
+    def __call__(self, msg, unixfrom=False):
+        warnings.warn('__call__() deprecated; use flatten()',
+                      DeprecationWarning, 2)
+        self.flatten(msg, unixfrom)
+
+    def clone(self, fp):
+        """Clone this generator with the exact same options."""
+        return self.__class__(fp, self._mangle_from_, self._maxheaderlen)
+
+    #
+    # Protected interface - undocumented ;/
+    #
+
+    def _write(self, msg):
+        # We can't write the headers yet because of the following scenario:
+        # say a multipart message includes the boundary string somewhere in
+        # its body.  We'd have to calculate the new boundary /before/ we write
+        # the headers so that we can write the correct Content-Type:
+        # parameter.
+        #
+        # The way we do this, so as to make the _handle_*() methods simpler,
+        # is to cache any subpart writes into a StringIO.  The we write the
+        # headers and the StringIO contents.  That way, subpart handlers can
+        # Do The Right Thing, and can still modify the Content-Type: header if
+        # necessary.
+        oldfp = self._fp
+        try:
+            self._fp = sfp = StringIO()
+            self._dispatch(msg)
+        finally:
+            self._fp = oldfp
+        # Write the headers.  First we see if the message object wants to
+        # handle that itself.  If not, we'll do it generically.
+        meth = getattr(msg, '_write_headers', None)
+        if meth is None:
+            self._write_headers(msg)
+        else:
+            meth(self)
+        self._fp.write(sfp.getvalue())
+
+    def _dispatch(self, msg):
+        # Get the Content-Type: for the message, then try to dispatch to
+        # self._handle_<maintype>_<subtype>().  If there's no handler for the
+        # full MIME type, then dispatch to self._handle_<maintype>().  If
+        # that's missing too, then dispatch to self._writeBody().
+        main = msg.get_content_maintype()
+        sub = msg.get_content_subtype()
+        specific = UNDERSCORE.join((main, sub)).replace('-', '_')
+        meth = getattr(self, '_handle_' + specific, None)
+        if meth is None:
+            generic = main.replace('-', '_')
+            meth = getattr(self, '_handle_' + generic, None)
+            if meth is None:
+                meth = self._writeBody
+        meth(msg)
+
+    #
+    # Default handlers
+    #
+
+    def _write_headers(self, msg):
+        for h, v in msg.items():
+            print >> self._fp, '%s:' % h,
+            if self._maxheaderlen == 0:
+                # Explicit no-wrapping
+                print >> self._fp, v
+            elif isinstance(v, Header):
+                # Header instances know what to do
+                print >> self._fp, v.encode()
+            elif _is8bitstring(v):
+                # If we have raw 8bit data in a byte string, we have no idea
+                # what the encoding is.  There is no safe way to split this
+                # string.  If it's ascii-subset, then we could do a normal
+                # ascii split, but if it's multibyte then we could break the
+                # string.  There's no way to know so the least harm seems to
+                # be to not split the string and risk it being too long.
+                print >> self._fp, v
+            else:
+                # Header's got lots of smarts, so use it.
+                print >> self._fp, Header(
+                    v, maxlinelen=self._maxheaderlen,
+                    header_name=h, continuation_ws='\t').encode()
+        # A blank line always separates headers from body
+        print >> self._fp
+
+    #
+    # Handlers for writing types and subtypes
+    #
+
+    def _handle_text(self, msg):
+        payload = msg.get_payload()
+        if payload is None:
+            return
+        cset = msg.get_charset()
+        if cset is not None:
+            payload = cset.body_encode(payload)
+        if not isinstance(payload, basestring):
+            raise TypeError('string payload expected: %s' % type(payload))
+        if self._mangle_from_:
+            payload = fcre.sub('>From ', payload)
+        self._fp.write(payload)
+
+    # Default body handler
+    _writeBody = _handle_text
+
+    def _handle_multipart(self, msg):
+        # The trick here is to write out each part separately, merge them all
+        # together, and then make sure that the boundary we've chosen isn't
+        # present in the payload.
+        msgtexts = []
+        subparts = msg.get_payload()
+        if subparts is None:
+            subparts = []
+        elif isinstance(subparts, basestring):
+            # e.g. a non-strict parse of a message with no starting boundary.
+            self._fp.write(subparts)
+            return
+        elif not isinstance(subparts, list):
+            # Scalar payload
+            subparts = [subparts]
+        for part in subparts:
+            s = StringIO()
+            g = self.clone(s)
+            g.flatten(part, unixfrom=False)
+            msgtexts.append(s.getvalue())
+        # Now make sure the boundary we've selected doesn't appear in any of
+        # the message texts.
+        alltext = NL.join(msgtexts)
+        # BAW: What about boundaries that are wrapped in double-quotes?
+        boundary = msg.get_boundary(failobj=_make_boundary(alltext))
+        # If we had to calculate a new boundary because the body text
+        # contained that string, set the new boundary.  We don't do it
+        # unconditionally because, while set_boundary() preserves order, it
+        # doesn't preserve newlines/continuations in headers.  This is no big
+        # deal in practice, but turns out to be inconvenient for the unittest
+        # suite.
+        if msg.get_boundary() <> boundary:
+            msg.set_boundary(boundary)
+        # If there's a preamble, write it out, with a trailing CRLF
+        if msg.preamble is not None:
+            print >> self._fp, msg.preamble
+        # dash-boundary transport-padding CRLF
+        print >> self._fp, '--' + boundary
+        # body-part
+        if msgtexts:
+            self._fp.write(msgtexts.pop(0))
+        # *encapsulation
+        # --> delimiter transport-padding
+        # --> CRLF body-part
+        for body_part in msgtexts:
+            # delimiter transport-padding CRLF
+            print >> self._fp, '\n--' + boundary
+            # body-part
+            self._fp.write(body_part)
+        # close-delimiter transport-padding
+        self._fp.write('\n--' + boundary + '--')
+        if msg.epilogue is not None:
+            print >> self._fp
+            self._fp.write(msg.epilogue)
+
+    def _handle_message_delivery_status(self, msg):
+        # We can't just write the headers directly to self's file object
+        # because this will leave an extra newline between the last header
+        # block and the boundary.  Sigh.
+        blocks = []
+        for part in msg.get_payload():
+            s = StringIO()
+            g = self.clone(s)
+            g.flatten(part, unixfrom=False)
+            text = s.getvalue()
+            lines = text.split('\n')
+            # Strip off the unnecessary trailing empty line
+            if lines and lines[-1] == '':
+                blocks.append(NL.join(lines[:-1]))
+            else:
+                blocks.append(text)
+        # Now join all the blocks with an empty line.  This has the lovely
+        # effect of separating each block with an empty line, but not adding
+        # an extra one after the last one.
+        self._fp.write(NL.join(blocks))
+
+    def _handle_message(self, msg):
+        s = StringIO()
+        g = self.clone(s)
+        # The payload of a message/rfc822 part should be a multipart sequence
+        # of length 1.  The zeroth element of the list should be the Message
+        # object for the subpart.  Extract that object, stringify it, and
+        # write it out.
+        g.flatten(msg.get_payload(0), unixfrom=False)
+        self._fp.write(s.getvalue())
+
+
+
+_FMT = '[Non-text (%(type)s) part of message omitted, filename %(filename)s]'
+
+class DecodedGenerator(Generator):
+    """Generator a text representation of a message.
+
+    Like the Generator base class, except that non-text parts are substituted
+    with a format string representing the part.
+    """
+    def __init__(self, outfp, mangle_from_=True, maxheaderlen=78, fmt=None):
+        """Like Generator.__init__() except that an additional optional
+        argument is allowed.
+
+        Walks through all subparts of a message.  If the subpart is of main
+        type `text', then it prints the decoded payload of the subpart.
+
+        Otherwise, fmt is a format string that is used instead of the message
+        payload.  fmt is expanded with the following keywords (in
+        %(keyword)s format):
+
+        type       : Full MIME type of the non-text part
+        maintype   : Main MIME type of the non-text part
+        subtype    : Sub-MIME type of the non-text part
+        filename   : Filename of the non-text part
+        description: Description associated with the non-text part
+        encoding   : Content transfer encoding of the non-text part
+
+        The default value for fmt is None, meaning
+
+        [Non-text (%(type)s) part of message omitted, filename %(filename)s]
+        """
+        Generator.__init__(self, outfp, mangle_from_, maxheaderlen)
+        if fmt is None:
+            self._fmt = _FMT
+        else:
+            self._fmt = fmt
+
+    def _dispatch(self, msg):
+        for part in msg.walk():
+            maintype = part.get_content_maintype()
+            if maintype == 'text':
+                print >> self, part.get_payload(decode=True)
+            elif maintype == 'multipart':
+                # Just skip this
+                pass
+            else:
+                print >> self, self._fmt % {
+                    'type'       : part.get_content_type(),
+                    'maintype'   : part.get_content_maintype(),
+                    'subtype'    : part.get_content_subtype(),
+                    'filename'   : part.get_filename('[no filename]'),
+                    'description': part.get('Content-Description',
+                                            '[no description]'),
+                    'encoding'   : part.get('Content-Transfer-Encoding',
+                                            '[no encoding]'),
+                    }
+
+
+
+# Helper
+_width = len(repr(sys.maxint-1))
+_fmt = '%%0%dd' % _width
+
+def _make_boundary(text=None):
+    # Craft a random boundary.  If text is given, ensure that the chosen
+    # boundary doesn't appear in the text.
+    token = random.randrange(sys.maxint)
+    boundary = ('=' * 15) + (_fmt % token) + '=='
+    if text is None:
+        return boundary
+    b = boundary
+    counter = 0
+    while True:
+        cre = re.compile('^--' + re.escape(b) + '(--)?$', re.MULTILINE)
+        if not cre.search(text):
+            break
+        b = boundary + '.' + str(counter)
+        counter += 1
+    return b
diff --git a/depot_tools/release/win/python_24/Lib/email/Header.py b/depot_tools/release/win/python_24/Lib/email/Header.py
new file mode 100644
index 0000000..5e24afe
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Header.py
@@ -0,0 +1,495 @@
+# Copyright (C) 2002-2004 Python Software Foundation
+# Author: Ben Gertzfield, Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Header encoding and decoding functionality."""
+
+import re
+import binascii
+
+import email.quopriMIME
+import email.base64MIME
+from email.Errors import HeaderParseError
+from email.Charset import Charset
+
+NL = '\n'
+SPACE = ' '
+USPACE = u' '
+SPACE8 = ' ' * 8
+UEMPTYSTRING = u''
+
+MAXLINELEN = 76
+
+USASCII = Charset('us-ascii')
+UTF8 = Charset('utf-8')
+
+# Match encoded-word strings in the form =?charset?q?Hello_World?=
+ecre = re.compile(r'''
+  =\?                   # literal =?
+  (?P<charset>[^?]*?)   # non-greedy up to the next ? is the charset
+  \?                    # literal ?
+  (?P<encoding>[qb])    # either a "q" or a "b", case insensitive
+  \?                    # literal ?
+  (?P<encoded>.*?)      # non-greedy up to the next ?= is the encoded string
+  \?=                   # literal ?=
+  ''', re.VERBOSE | re.IGNORECASE)
+
+# Field name regexp, including trailing colon, but not separating whitespace,
+# according to RFC 2822.  Character range is from tilde to exclamation mark.
+# For use with .match()
+fcre = re.compile(r'[\041-\176]+:$')
+
+
+
+# Helpers
+_max_append = email.quopriMIME._max_append
+
+
+
+def decode_header(header):
+    """Decode a message header value without converting charset.
+
+    Returns a list of (decoded_string, charset) pairs containing each of the
+    decoded parts of the header.  Charset is None for non-encoded parts of the
+    header, otherwise a lower-case string containing the name of the character
+    set specified in the encoded string.
+
+    An email.Errors.HeaderParseError may be raised when certain decoding error
+    occurs (e.g. a base64 decoding exception).
+    """
+    # If no encoding, just return the header
+    header = str(header)
+    if not ecre.search(header):
+        return [(header, None)]
+    decoded = []
+    dec = ''
+    for line in header.splitlines():
+        # This line might not have an encoding in it
+        if not ecre.search(line):
+            decoded.append((line, None))
+            continue
+        parts = ecre.split(line)
+        while parts:
+            unenc = parts.pop(0).strip()
+            if unenc:
+                # Should we continue a long line?
+                if decoded and decoded[-1][1] is None:
+                    decoded[-1] = (decoded[-1][0] + SPACE + unenc, None)
+                else:
+                    decoded.append((unenc, None))
+            if parts:
+                charset, encoding = [s.lower() for s in parts[0:2]]
+                encoded = parts[2]
+                dec = None
+                if encoding == 'q':
+                    dec = email.quopriMIME.header_decode(encoded)
+                elif encoding == 'b':
+                    try:
+                        dec = email.base64MIME.decode(encoded)
+                    except binascii.Error:
+                        # Turn this into a higher level exception.  BAW: Right
+                        # now we throw the lower level exception away but
+                        # when/if we get exception chaining, we'll preserve it.
+                        raise HeaderParseError
+                if dec is None:
+                    dec = encoded
+
+                if decoded and decoded[-1][1] == charset:
+                    decoded[-1] = (decoded[-1][0] + dec, decoded[-1][1])
+                else:
+                    decoded.append((dec, charset))
+            del parts[0:3]
+    return decoded
+
+
+
+def make_header(decoded_seq, maxlinelen=None, header_name=None,
+                continuation_ws=' '):
+    """Create a Header from a sequence of pairs as returned by decode_header()
+
+    decode_header() takes a header value string and returns a sequence of
+    pairs of the format (decoded_string, charset) where charset is the string
+    name of the character set.
+
+    This function takes one of those sequence of pairs and returns a Header
+    instance.  Optional maxlinelen, header_name, and continuation_ws are as in
+    the Header constructor.
+    """
+    h = Header(maxlinelen=maxlinelen, header_name=header_name,
+               continuation_ws=continuation_ws)
+    for s, charset in decoded_seq:
+        # None means us-ascii but we can simply pass it on to h.append()
+        if charset is not None and not isinstance(charset, Charset):
+            charset = Charset(charset)
+        h.append(s, charset)
+    return h
+
+
+
+class Header:
+    def __init__(self, s=None, charset=None,
+                 maxlinelen=None, header_name=None,
+                 continuation_ws=' ', errors='strict'):
+        """Create a MIME-compliant header that can contain many character sets.
+
+        Optional s is the initial header value.  If None, the initial header
+        value is not set.  You can later append to the header with .append()
+        method calls.  s may be a byte string or a Unicode string, but see the
+        .append() documentation for semantics.
+
+        Optional charset serves two purposes: it has the same meaning as the
+        charset argument to the .append() method.  It also sets the default
+        character set for all subsequent .append() calls that omit the charset
+        argument.  If charset is not provided in the constructor, the us-ascii
+        charset is used both as s's initial charset and as the default for
+        subsequent .append() calls.
+
+        The maximum line length can be specified explicit via maxlinelen.  For
+        splitting the first line to a shorter value (to account for the field
+        header which isn't included in s, e.g. `Subject') pass in the name of
+        the field in header_name.  The default maxlinelen is 76.
+
+        continuation_ws must be RFC 2822 compliant folding whitespace (usually
+        either a space or a hard tab) which will be prepended to continuation
+        lines.
+
+        errors is passed through to the .append() call.
+        """
+        if charset is None:
+            charset = USASCII
+        if not isinstance(charset, Charset):
+            charset = Charset(charset)
+        self._charset = charset
+        self._continuation_ws = continuation_ws
+        cws_expanded_len = len(continuation_ws.replace('\t', SPACE8))
+        # BAW: I believe `chunks' and `maxlinelen' should be non-public.
+        self._chunks = []
+        if s is not None:
+            self.append(s, charset, errors)
+        if maxlinelen is None:
+            maxlinelen = MAXLINELEN
+        if header_name is None:
+            # We don't know anything about the field header so the first line
+            # is the same length as subsequent lines.
+            self._firstlinelen = maxlinelen
+        else:
+            # The first line should be shorter to take into account the field
+            # header.  Also subtract off 2 extra for the colon and space.
+            self._firstlinelen = maxlinelen - len(header_name) - 2
+        # Second and subsequent lines should subtract off the length in
+        # columns of the continuation whitespace prefix.
+        self._maxlinelen = maxlinelen - cws_expanded_len
+
+    def __str__(self):
+        """A synonym for self.encode()."""
+        return self.encode()
+
+    def __unicode__(self):
+        """Helper for the built-in unicode function."""
+        uchunks = []
+        lastcs = None
+        for s, charset in self._chunks:
+            # We must preserve spaces between encoded and non-encoded word
+            # boundaries, which means for us we need to add a space when we go
+            # from a charset to None/us-ascii, or from None/us-ascii to a
+            # charset.  Only do this for the second and subsequent chunks.
+            nextcs = charset
+            if uchunks:
+                if lastcs not in (None, 'us-ascii'):
+                    if nextcs in (None, 'us-ascii'):
+                        uchunks.append(USPACE)
+                        nextcs = None
+                elif nextcs not in (None, 'us-ascii'):
+                    uchunks.append(USPACE)
+            lastcs = nextcs
+            uchunks.append(unicode(s, str(charset)))
+        return UEMPTYSTRING.join(uchunks)
+
+    # Rich comparison operators for equality only.  BAW: does it make sense to
+    # have or explicitly disable <, <=, >, >= operators?
+    def __eq__(self, other):
+        # other may be a Header or a string.  Both are fine so coerce
+        # ourselves to a string, swap the args and do another comparison.
+        return other == self.encode()
+
+    def __ne__(self, other):
+        return not self == other
+
+    def append(self, s, charset=None, errors='strict'):
+        """Append a string to the MIME header.
+
+        Optional charset, if given, should be a Charset instance or the name
+        of a character set (which will be converted to a Charset instance).  A
+        value of None (the default) means that the charset given in the
+        constructor is used.
+
+        s may be a byte string or a Unicode string.  If it is a byte string
+        (i.e. isinstance(s, str) is true), then charset is the encoding of
+        that byte string, and a UnicodeError will be raised if the string
+        cannot be decoded with that charset.  If s is a Unicode string, then
+        charset is a hint specifying the character set of the characters in
+        the string.  In this case, when producing an RFC 2822 compliant header
+        using RFC 2047 rules, the Unicode string will be encoded using the
+        following charsets in order: us-ascii, the charset hint, utf-8.  The
+        first character set not to provoke a UnicodeError is used.
+
+        Optional `errors' is passed as the third argument to any unicode() or
+        ustr.encode() call.
+        """
+        if charset is None:
+            charset = self._charset
+        elif not isinstance(charset, Charset):
+            charset = Charset(charset)
+        # If the charset is our faux 8bit charset, leave the string unchanged
+        if charset <> '8bit':
+            # We need to test that the string can be converted to unicode and
+            # back to a byte string, given the input and output codecs of the
+            # charset.
+            if isinstance(s, str):
+                # Possibly raise UnicodeError if the byte string can't be
+                # converted to a unicode with the input codec of the charset.
+                incodec = charset.input_codec or 'us-ascii'
+                ustr = unicode(s, incodec, errors)
+                # Now make sure that the unicode could be converted back to a
+                # byte string with the output codec, which may be different
+                # than the iput coded.  Still, use the original byte string.
+                outcodec = charset.output_codec or 'us-ascii'
+                ustr.encode(outcodec, errors)
+            elif isinstance(s, unicode):
+                # Now we have to be sure the unicode string can be converted
+                # to a byte string with a reasonable output codec.  We want to
+                # use the byte string in the chunk.
+                for charset in USASCII, charset, UTF8:
+                    try:
+                        outcodec = charset.output_codec or 'us-ascii'
+                        s = s.encode(outcodec, errors)
+                        break
+                    except UnicodeError:
+                        pass
+                else:
+                    assert False, 'utf-8 conversion failed'
+        self._chunks.append((s, charset))
+
+    def _split(self, s, charset, maxlinelen, splitchars):
+        # Split up a header safely for use with encode_chunks.
+        splittable = charset.to_splittable(s)
+        encoded = charset.from_splittable(splittable, True)
+        elen = charset.encoded_header_len(encoded)
+        # If the line's encoded length first, just return it
+        if elen <= maxlinelen:
+            return [(encoded, charset)]
+        # If we have undetermined raw 8bit characters sitting in a byte
+        # string, we really don't know what the right thing to do is.  We
+        # can't really split it because it might be multibyte data which we
+        # could break if we split it between pairs.  The least harm seems to
+        # be to not split the header at all, but that means they could go out
+        # longer than maxlinelen.
+        if charset == '8bit':
+            return [(s, charset)]
+        # BAW: I'm not sure what the right test here is.  What we're trying to
+        # do is be faithful to RFC 2822's recommendation that ($2.2.3):
+        #
+        # "Note: Though structured field bodies are defined in such a way that
+        #  folding can take place between many of the lexical tokens (and even
+        #  within some of the lexical tokens), folding SHOULD be limited to
+        #  placing the CRLF at higher-level syntactic breaks."
+        #
+        # For now, I can only imagine doing this when the charset is us-ascii,
+        # although it's possible that other charsets may also benefit from the
+        # higher-level syntactic breaks.
+        elif charset == 'us-ascii':
+            return self._split_ascii(s, charset, maxlinelen, splitchars)
+        # BAW: should we use encoded?
+        elif elen == len(s):
+            # We can split on _maxlinelen boundaries because we know that the
+            # encoding won't change the size of the string
+            splitpnt = maxlinelen
+            first = charset.from_splittable(splittable[:splitpnt], False)
+            last = charset.from_splittable(splittable[splitpnt:], False)
+        else:
+            # Binary search for split point
+            first, last = _binsplit(splittable, charset, maxlinelen)
+        # first is of the proper length so just wrap it in the appropriate
+        # chrome.  last must be recursively split.
+        fsplittable = charset.to_splittable(first)
+        fencoded = charset.from_splittable(fsplittable, True)
+        chunk = [(fencoded, charset)]
+        return chunk + self._split(last, charset, self._maxlinelen, splitchars)
+
+    def _split_ascii(self, s, charset, firstlen, splitchars):
+        chunks = _split_ascii(s, firstlen, self._maxlinelen,
+                              self._continuation_ws, splitchars)
+        return zip(chunks, [charset]*len(chunks))
+
+    def _encode_chunks(self, newchunks, maxlinelen):
+        # MIME-encode a header with many different charsets and/or encodings.
+        #
+        # Given a list of pairs (string, charset), return a MIME-encoded
+        # string suitable for use in a header field.  Each pair may have
+        # different charsets and/or encodings, and the resulting header will
+        # accurately reflect each setting.
+        #
+        # Each encoding can be email.Utils.QP (quoted-printable, for
+        # ASCII-like character sets like iso-8859-1), email.Utils.BASE64
+        # (Base64, for non-ASCII like character sets like KOI8-R and
+        # iso-2022-jp), or None (no encoding).
+        #
+        # Each pair will be represented on a separate line; the resulting
+        # string will be in the format:
+        #
+        # =?charset1?q?Mar=EDa_Gonz=E1lez_Alonso?=\n
+        #  =?charset2?b?SvxyZ2VuIEL2aW5n?="
+        chunks = []
+        for header, charset in newchunks:
+            if not header:
+                continue
+            if charset is None or charset.header_encoding is None:
+                s = header
+            else:
+                s = charset.header_encode(header)
+            # Don't add more folding whitespace than necessary
+            if chunks and chunks[-1].endswith(' '):
+                extra = ''
+            else:
+                extra = ' '
+            _max_append(chunks, s, maxlinelen, extra)
+        joiner = NL + self._continuation_ws
+        return joiner.join(chunks)
+
+    def encode(self, splitchars=';, '):
+        """Encode a message header into an RFC-compliant format.
+
+        There are many issues involved in converting a given string for use in
+        an email header.  Only certain character sets are readable in most
+        email clients, and as header strings can only contain a subset of
+        7-bit ASCII, care must be taken to properly convert and encode (with
+        Base64 or quoted-printable) header strings.  In addition, there is a
+        75-character length limit on any given encoded header field, so
+        line-wrapping must be performed, even with double-byte character sets.
+
+        This method will do its best to convert the string to the correct
+        character set used in email, and encode and line wrap it safely with
+        the appropriate scheme for that character set.
+
+        If the given charset is not known or an error occurs during
+        conversion, this function will return the header untouched.
+
+        Optional splitchars is a string containing characters to split long
+        ASCII lines on, in rough support of RFC 2822's `highest level
+        syntactic breaks'.  This doesn't affect RFC 2047 encoded lines.
+        """
+        newchunks = []
+        maxlinelen = self._firstlinelen
+        lastlen = 0
+        for s, charset in self._chunks:
+            # The first bit of the next chunk should be just long enough to
+            # fill the next line.  Don't forget the space separating the
+            # encoded words.
+            targetlen = maxlinelen - lastlen - 1
+            if targetlen < charset.encoded_header_len(''):
+                # Stick it on the next line
+                targetlen = maxlinelen
+            newchunks += self._split(s, charset, targetlen, splitchars)
+            lastchunk, lastcharset = newchunks[-1]
+            lastlen = lastcharset.encoded_header_len(lastchunk)
+        return self._encode_chunks(newchunks, maxlinelen)
+
+
+
+def _split_ascii(s, firstlen, restlen, continuation_ws, splitchars):
+    lines = []
+    maxlen = firstlen
+    for line in s.splitlines():
+        # Ignore any leading whitespace (i.e. continuation whitespace) already
+        # on the line, since we'll be adding our own.
+        line = line.lstrip()
+        if len(line) < maxlen:
+            lines.append(line)
+            maxlen = restlen
+            continue
+        # Attempt to split the line at the highest-level syntactic break
+        # possible.  Note that we don't have a lot of smarts about field
+        # syntax; we just try to break on semi-colons, then commas, then
+        # whitespace.
+        for ch in splitchars:
+            if ch in line:
+                break
+        else:
+            # There's nothing useful to split the line on, not even spaces, so
+            # just append this line unchanged
+            lines.append(line)
+            maxlen = restlen
+            continue
+        # Now split the line on the character plus trailing whitespace
+        cre = re.compile(r'%s\s*' % ch)
+        if ch in ';,':
+            eol = ch
+        else:
+            eol = ''
+        joiner = eol + ' '
+        joinlen = len(joiner)
+        wslen = len(continuation_ws.replace('\t', SPACE8))
+        this = []
+        linelen = 0
+        for part in cre.split(line):
+            curlen = linelen + max(0, len(this)-1) * joinlen
+            partlen = len(part)
+            onfirstline = not lines
+            # We don't want to split after the field name, if we're on the
+            # first line and the field name is present in the header string.
+            if ch == ' ' and onfirstline and \
+                   len(this) == 1 and fcre.match(this[0]):
+                this.append(part)
+                linelen += partlen
+            elif curlen + partlen > maxlen:
+                if this:
+                    lines.append(joiner.join(this) + eol)
+                # If this part is longer than maxlen and we aren't already
+                # splitting on whitespace, try to recursively split this line
+                # on whitespace.
+                if partlen > maxlen and ch <> ' ':
+                    subl = _split_ascii(part, maxlen, restlen,
+                                        continuation_ws, ' ')
+                    lines.extend(subl[:-1])
+                    this = [subl[-1]]
+                else:
+                    this = [part]
+                linelen = wslen + len(this[-1])
+                maxlen = restlen
+            else:
+                this.append(part)
+                linelen += partlen
+        # Put any left over parts on a line by themselves
+        if this:
+            lines.append(joiner.join(this))
+    return lines
+
+
+
+def _binsplit(splittable, charset, maxlinelen):
+    i = 0
+    j = len(splittable)
+    while i < j:
+        # Invariants:
+        # 1. splittable[:k] fits for all k <= i (note that we *assume*,
+        #    at the start, that splittable[:0] fits).
+        # 2. splittable[:k] does not fit for any k > j (at the start,
+        #    this means we shouldn't look at any k > len(splittable)).
+        # 3. We don't know about splittable[:k] for k in i+1..j.
+        # 4. We want to set i to the largest k that fits, with i <= k <= j.
+        #
+        m = (i+j+1) >> 1  # ceiling((i+j)/2); i < m <= j
+        chunk = charset.from_splittable(splittable[:m], True)
+        chunklen = charset.encoded_header_len(chunk)
+        if chunklen <= maxlinelen:
+            # m is acceptable, so is a new lower bound.
+            i = m
+        else:
+            # m is not acceptable, so final i must be < m.
+            j = m - 1
+    # i == j.  Invariant #1 implies that splittable[:i] fits, and
+    # invariant #2 implies that splittable[:i+1] does not fit, so i
+    # is what we're looking for.
+    first = charset.from_splittable(splittable[:i], False)
+    last  = charset.from_splittable(splittable[i:], False)
+    return first, last
diff --git a/depot_tools/release/win/python_24/Lib/email/Iterators.py b/depot_tools/release/win/python_24/Lib/email/Iterators.py
new file mode 100644
index 0000000..74a93c7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Iterators.py
@@ -0,0 +1,67 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Various types of useful iterators and generators."""
+
+import sys
+from cStringIO import StringIO
+
+
+
+# This function will become a method of the Message class
+def walk(self):
+    """Walk over the message tree, yielding each subpart.
+
+    The walk is performed in depth-first order.  This method is a
+    generator.
+    """
+    yield self
+    if self.is_multipart():
+        for subpart in self.get_payload():
+            for subsubpart in subpart.walk():
+                yield subsubpart
+
+
+
+# These two functions are imported into the Iterators.py interface module.
+# The Python 2.2 version uses generators for efficiency.
+def body_line_iterator(msg, decode=False):
+    """Iterate over the parts, returning string payloads line-by-line.
+
+    Optional decode (default False) is passed through to .get_payload().
+    """
+    for subpart in msg.walk():
+        payload = subpart.get_payload(decode=decode)
+        if isinstance(payload, basestring):
+            for line in StringIO(payload):
+                yield line
+
+
+def typed_subpart_iterator(msg, maintype='text', subtype=None):
+    """Iterate over the subparts with a given MIME type.
+
+    Use `maintype' as the main MIME type to match against; this defaults to
+    "text".  Optional `subtype' is the MIME subtype to match against; if
+    omitted, only the main type is matched.
+    """
+    for subpart in msg.walk():
+        if subpart.get_content_maintype() == maintype:
+            if subtype is None or subpart.get_content_subtype() == subtype:
+                yield subpart
+
+
+
+def _structure(msg, fp=None, level=0, include_default=False):
+    """A handy debugging aid"""
+    if fp is None:
+        fp = sys.stdout
+    tab = ' ' * (level * 4)
+    print >> fp, tab + msg.get_content_type(),
+    if include_default:
+        print >> fp, '[%s]' % msg.get_default_type()
+    else:
+        print >> fp
+    if msg.is_multipart():
+        for subpart in msg.get_payload():
+            _structure(subpart, fp, level+1, include_default)
diff --git a/depot_tools/release/win/python_24/Lib/email/MIMEAudio.py b/depot_tools/release/win/python_24/Lib/email/MIMEAudio.py
new file mode 100644
index 0000000..266ec4c4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/MIMEAudio.py
@@ -0,0 +1,72 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Anthony Baxter
+# Contact: email-sig@python.org
+
+"""Class representing audio/* type MIME documents."""
+
+import sndhdr
+from cStringIO import StringIO
+
+from email import Errors
+from email import Encoders
+from email.MIMENonMultipart import MIMENonMultipart
+
+
+
+_sndhdr_MIMEmap = {'au'  : 'basic',
+                   'wav' :'x-wav',
+                   'aiff':'x-aiff',
+                   'aifc':'x-aiff',
+                   }
+
+# There are others in sndhdr that don't have MIME types. :(
+# Additional ones to be added to sndhdr? midi, mp3, realaudio, wma??
+def _whatsnd(data):
+    """Try to identify a sound file type.
+
+    sndhdr.what() has a pretty cruddy interface, unfortunately.  This is why
+    we re-do it here.  It would be easier to reverse engineer the Unix 'file'
+    command and use the standard 'magic' file, as shipped with a modern Unix.
+    """
+    hdr = data[:512]
+    fakefile = StringIO(hdr)
+    for testfn in sndhdr.tests:
+        res = testfn(hdr, fakefile)
+        if res is not None:
+            return _sndhdr_MIMEmap.get(res[0])
+    return None
+
+
+
+class MIMEAudio(MIMENonMultipart):
+    """Class for generating audio/* MIME documents."""
+
+    def __init__(self, _audiodata, _subtype=None,
+                 _encoder=Encoders.encode_base64, **_params):
+        """Create an audio/* type MIME document.
+
+        _audiodata is a string containing the raw audio data.  If this data
+        can be decoded by the standard Python `sndhdr' module, then the
+        subtype will be automatically included in the Content-Type header.
+        Otherwise, you can specify  the specific audio subtype via the
+        _subtype parameter.  If _subtype is not given, and no subtype can be
+        guessed, a TypeError is raised.
+
+        _encoder is a function which will perform the actual encoding for
+        transport of the image data.  It takes one argument, which is this
+        Image instance.  It should use get_payload() and set_payload() to
+        change the payload to the encoded form.  It should also add any
+        Content-Transfer-Encoding or other headers to the message as
+        necessary.  The default encoding is Base64.
+
+        Any additional keyword arguments are passed to the base class
+        constructor, which turns them into parameters on the Content-Type
+        header.
+        """
+        if _subtype is None:
+            _subtype = _whatsnd(_audiodata)
+        if _subtype is None:
+            raise TypeError('Could not find audio MIME subtype')
+        MIMENonMultipart.__init__(self, 'audio', _subtype, **_params)
+        self.set_payload(_audiodata)
+        _encoder(self)
diff --git a/depot_tools/release/win/python_24/Lib/email/MIMEBase.py b/depot_tools/release/win/python_24/Lib/email/MIMEBase.py
new file mode 100644
index 0000000..88691f8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/MIMEBase.py
@@ -0,0 +1,24 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Base class for MIME specializations."""
+
+from email import Message
+
+
+
+class MIMEBase(Message.Message):
+    """Base class for MIME specializations."""
+
+    def __init__(self, _maintype, _subtype, **_params):
+        """This constructor adds a Content-Type: and a MIME-Version: header.
+
+        The Content-Type: header is taken from the _maintype and _subtype
+        arguments.  Additional parameters for this header are taken from the
+        keyword arguments.
+        """
+        Message.Message.__init__(self)
+        ctype = '%s/%s' % (_maintype, _subtype)
+        self.add_header('Content-Type', ctype, **_params)
+        self['MIME-Version'] = '1.0'
diff --git a/depot_tools/release/win/python_24/Lib/email/MIMEImage.py b/depot_tools/release/win/python_24/Lib/email/MIMEImage.py
new file mode 100644
index 0000000..a658067
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/MIMEImage.py
@@ -0,0 +1,45 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Class representing image/* type MIME documents."""
+
+import imghdr
+
+from email import Errors
+from email import Encoders
+from email.MIMENonMultipart import MIMENonMultipart
+
+
+
+class MIMEImage(MIMENonMultipart):
+    """Class for generating image/* type MIME documents."""
+
+    def __init__(self, _imagedata, _subtype=None,
+                 _encoder=Encoders.encode_base64, **_params):
+        """Create an image/* type MIME document.
+
+        _imagedata is a string containing the raw image data.  If this data
+        can be decoded by the standard Python `imghdr' module, then the
+        subtype will be automatically included in the Content-Type header.
+        Otherwise, you can specify the specific image subtype via the _subtype
+        parameter.
+
+        _encoder is a function which will perform the actual encoding for
+        transport of the image data.  It takes one argument, which is this
+        Image instance.  It should use get_payload() and set_payload() to
+        change the payload to the encoded form.  It should also add any
+        Content-Transfer-Encoding or other headers to the message as
+        necessary.  The default encoding is Base64.
+
+        Any additional keyword arguments are passed to the base class
+        constructor, which turns them into parameters on the Content-Type
+        header.
+        """
+        if _subtype is None:
+            _subtype = imghdr.what(None, _imagedata)
+        if _subtype is None:
+            raise TypeError('Could not guess image MIME subtype')
+        MIMENonMultipart.__init__(self, 'image', _subtype, **_params)
+        self.set_payload(_imagedata)
+        _encoder(self)
diff --git a/depot_tools/release/win/python_24/Lib/email/MIMEMessage.py b/depot_tools/release/win/python_24/Lib/email/MIMEMessage.py
new file mode 100644
index 0000000..3021934
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/MIMEMessage.py
@@ -0,0 +1,32 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Class representing message/* MIME documents."""
+
+from email import Message
+from email.MIMENonMultipart import MIMENonMultipart
+
+
+
+class MIMEMessage(MIMENonMultipart):
+    """Class representing message/* MIME documents."""
+
+    def __init__(self, _msg, _subtype='rfc822'):
+        """Create a message/* type MIME document.
+
+        _msg is a message object and must be an instance of Message, or a
+        derived class of Message, otherwise a TypeError is raised.
+
+        Optional _subtype defines the subtype of the contained message.  The
+        default is "rfc822" (this is defined by the MIME standard, even though
+        the term "rfc822" is technically outdated by RFC 2822).
+        """
+        MIMENonMultipart.__init__(self, 'message', _subtype)
+        if not isinstance(_msg, Message.Message):
+            raise TypeError('Argument is not an instance of Message')
+        # It's convenient to use this base class method.  We need to do it
+        # this way or we'll get an exception
+        Message.Message.attach(self, _msg)
+        # And be sure our default type is set correctly
+        self.set_default_type('message/rfc822')
diff --git a/depot_tools/release/win/python_24/Lib/email/MIMEMultipart.py b/depot_tools/release/win/python_24/Lib/email/MIMEMultipart.py
new file mode 100644
index 0000000..9072a64
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/MIMEMultipart.py
@@ -0,0 +1,39 @@
+# Copyright (C) 2002-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Base class for MIME multipart/* type messages."""
+
+from email import MIMEBase
+
+
+
+class MIMEMultipart(MIMEBase.MIMEBase):
+    """Base class for MIME multipart/* type messages."""
+
+    def __init__(self, _subtype='mixed', boundary=None, _subparts=None,
+                 **_params):
+        """Creates a multipart/* type message.
+
+        By default, creates a multipart/mixed message, with proper
+        Content-Type and MIME-Version headers.
+
+        _subtype is the subtype of the multipart content type, defaulting to
+        `mixed'.
+
+        boundary is the multipart boundary string.  By default it is
+        calculated as needed.
+
+        _subparts is a sequence of initial subparts for the payload.  It
+        must be an iterable object, such as a list.  You can always
+        attach new subparts to the message by using the attach() method.
+
+        Additional parameters for the Content-Type header are taken from the
+        keyword arguments (or passed into the _params argument).
+        """
+        MIMEBase.MIMEBase.__init__(self, 'multipart', _subtype, **_params)
+        if _subparts:
+            for p in _subparts:
+                self.attach(p)
+        if boundary:
+            self.set_boundary(boundary)
diff --git a/depot_tools/release/win/python_24/Lib/email/MIMENonMultipart.py b/depot_tools/release/win/python_24/Lib/email/MIMENonMultipart.py
new file mode 100644
index 0000000..4195d2a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/MIMENonMultipart.py
@@ -0,0 +1,24 @@
+# Copyright (C) 2002-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Base class for MIME type messages that are not multipart."""
+
+from email import Errors
+from email import MIMEBase
+
+
+
+class MIMENonMultipart(MIMEBase.MIMEBase):
+    """Base class for MIME multipart/* type messages."""
+
+    __pychecker__ = 'unusednames=payload'
+
+    def attach(self, payload):
+        # The public API prohibits attaching multiple subparts to MIMEBase
+        # derived subtypes since none of them are, by definition, of content
+        # type multipart/*
+        raise Errors.MultipartConversionError(
+            'Cannot attach additional subparts to non-multipart/*')
+
+    del __pychecker__
diff --git a/depot_tools/release/win/python_24/Lib/email/MIMEText.py b/depot_tools/release/win/python_24/Lib/email/MIMEText.py
new file mode 100644
index 0000000..5ef1876
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/MIMEText.py
@@ -0,0 +1,28 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Class representing text/* type MIME documents."""
+
+from email.MIMENonMultipart import MIMENonMultipart
+from email.Encoders import encode_7or8bit
+
+
+
+class MIMEText(MIMENonMultipart):
+    """Class for generating text/* type MIME documents."""
+
+    def __init__(self, _text, _subtype='plain', _charset='us-ascii'):
+        """Create a text/* type MIME document.
+
+        _text is the string for this message object.
+
+        _subtype is the MIME sub content type, defaulting to "plain".
+
+        _charset is the character set parameter added to the Content-Type
+        header.  This defaults to "us-ascii".  Note that as a side-effect, the
+        Content-Transfer-Encoding header will also be set.
+        """
+        MIMENonMultipart.__init__(self, 'text', _subtype,
+                                  **{'charset': _charset})
+        self.set_payload(_text, _charset)
diff --git a/depot_tools/release/win/python_24/Lib/email/Message.py b/depot_tools/release/win/python_24/Lib/email/Message.py
new file mode 100644
index 0000000..b466f396
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Message.py
@@ -0,0 +1,807 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Basic message object for the email package object model."""
+
+import re
+import uu
+import binascii
+import warnings
+from cStringIO import StringIO
+
+# Intrapackage imports
+from email import Utils
+from email import Errors
+from email import Charset
+
+SEMISPACE = '; '
+
+# Regular expression used to split header parameters.  BAW: this may be too
+# simple.  It isn't strictly RFC 2045 (section 5.1) compliant, but it catches
+# most headers found in the wild.  We may eventually need a full fledged
+# parser eventually.
+paramre = re.compile(r'\s*;\s*')
+# Regular expression that matches `special' characters in parameters, the
+# existance of which force quoting of the parameter value.
+tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
+
+
+
+# Helper functions
+def _formatparam(param, value=None, quote=True):
+    """Convenience function to format and return a key=value pair.
+
+    This will quote the value if needed or if quote is true.
+    """
+    if value is not None and len(value) > 0:
+        # A tuple is used for RFC 2231 encoded parameter values where items
+        # are (charset, language, value).  charset is a string, not a Charset
+        # instance.
+        if isinstance(value, tuple):
+            # Encode as per RFC 2231
+            param += '*'
+            value = Utils.encode_rfc2231(value[2], value[0], value[1])
+        # BAW: Please check this.  I think that if quote is set it should
+        # force quoting even if not necessary.
+        if quote or tspecials.search(value):
+            return '%s="%s"' % (param, Utils.quote(value))
+        else:
+            return '%s=%s' % (param, value)
+    else:
+        return param
+
+def _parseparam(s):
+    plist = []
+    while s[:1] == ';':
+        s = s[1:]
+        end = s.find(';')
+        while end > 0 and s.count('"', 0, end) % 2:
+            end = s.find(';', end + 1)
+        if end < 0:
+            end = len(s)
+        f = s[:end]
+        if '=' in f:
+            i = f.index('=')
+            f = f[:i].strip().lower() + '=' + f[i+1:].strip()
+        plist.append(f.strip())
+        s = s[end:]
+    return plist
+
+
+def _unquotevalue(value):
+    # This is different than Utils.collapse_rfc2231_value() because it doesn't
+    # try to convert the value to a unicode.  Message.get_param() and
+    # Message.get_params() are both currently defined to return the tuple in
+    # the face of RFC 2231 parameters.
+    if isinstance(value, tuple):
+        return value[0], value[1], Utils.unquote(value[2])
+    else:
+        return Utils.unquote(value)
+
+
+
+class Message:
+    """Basic message object.
+
+    A message object is defined as something that has a bunch of RFC 2822
+    headers and a payload.  It may optionally have an envelope header
+    (a.k.a. Unix-From or From_ header).  If the message is a container (i.e. a
+    multipart or a message/rfc822), then the payload is a list of Message
+    objects, otherwise it is a string.
+
+    Message objects implement part of the `mapping' interface, which assumes
+    there is exactly one occurrance of the header per message.  Some headers
+    do in fact appear multiple times (e.g. Received) and for those headers,
+    you must use the explicit API to set or get all the headers.  Not all of
+    the mapping methods are implemented.
+    """
+    def __init__(self):
+        self._headers = []
+        self._unixfrom = None
+        self._payload = None
+        self._charset = None
+        # Defaults for multipart messages
+        self.preamble = self.epilogue = None
+        self.defects = []
+        # Default content type
+        self._default_type = 'text/plain'
+
+    def __str__(self):
+        """Return the entire formatted message as a string.
+        This includes the headers, body, and envelope header.
+        """
+        return self.as_string(unixfrom=True)
+
+    def as_string(self, unixfrom=False):
+        """Return the entire formatted message as a string.
+        Optional `unixfrom' when True, means include the Unix From_ envelope
+        header.
+
+        This is a convenience method and may not generate the message exactly
+        as you intend because by default it mangles lines that begin with
+        "From ".  For more flexibility, use the flatten() method of a
+        Generator instance.
+        """
+        from email.Generator import Generator
+        fp = StringIO()
+        g = Generator(fp)
+        g.flatten(self, unixfrom=unixfrom)
+        return fp.getvalue()
+
+    def is_multipart(self):
+        """Return True if the message consists of multiple parts."""
+        return isinstance(self._payload, list)
+
+    #
+    # Unix From_ line
+    #
+    def set_unixfrom(self, unixfrom):
+        self._unixfrom = unixfrom
+
+    def get_unixfrom(self):
+        return self._unixfrom
+
+    #
+    # Payload manipulation.
+    #
+    def attach(self, payload):
+        """Add the given payload to the current payload.
+
+        The current payload will always be a list of objects after this method
+        is called.  If you want to set the payload to a scalar object, use
+        set_payload() instead.
+        """
+        if self._payload is None:
+            self._payload = [payload]
+        else:
+            self._payload.append(payload)
+
+    def get_payload(self, i=None, decode=False):
+        """Return a reference to the payload.
+
+        The payload will either be a list object or a string.  If you mutate
+        the list object, you modify the message's payload in place.  Optional
+        i returns that index into the payload.
+
+        Optional decode is a flag indicating whether the payload should be
+        decoded or not, according to the Content-Transfer-Encoding header
+        (default is False).
+
+        When True and the message is not a multipart, the payload will be
+        decoded if this header's value is `quoted-printable' or `base64'.  If
+        some other encoding is used, or the header is missing, or if the
+        payload has bogus data (i.e. bogus base64 or uuencoded data), the
+        payload is returned as-is.
+
+        If the message is a multipart and the decode flag is True, then None
+        is returned.
+        """
+        if i is None:
+            payload = self._payload
+        elif not isinstance(self._payload, list):
+            raise TypeError('Expected list, got %s' % type(self._payload))
+        else:
+            payload = self._payload[i]
+        if decode:
+            if self.is_multipart():
+                return None
+            cte = self.get('content-transfer-encoding', '').lower()
+            if cte == 'quoted-printable':
+                return Utils._qdecode(payload)
+            elif cte == 'base64':
+                try:
+                    return Utils._bdecode(payload)
+                except binascii.Error:
+                    # Incorrect padding
+                    return payload
+            elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
+                sfp = StringIO()
+                try:
+                    uu.decode(StringIO(payload+'\n'), sfp)
+                    payload = sfp.getvalue()
+                except uu.Error:
+                    # Some decoding problem
+                    return payload
+        # Everything else, including encodings with 8bit or 7bit are returned
+        # unchanged.
+        return payload
+
+    def set_payload(self, payload, charset=None):
+        """Set the payload to the given value.
+
+        Optional charset sets the message's default character set.  See
+        set_charset() for details.
+        """
+        self._payload = payload
+        if charset is not None:
+            self.set_charset(charset)
+
+    def set_charset(self, charset):
+        """Set the charset of the payload to a given character set.
+
+        charset can be a Charset instance, a string naming a character set, or
+        None.  If it is a string it will be converted to a Charset instance.
+        If charset is None, the charset parameter will be removed from the
+        Content-Type field.  Anything else will generate a TypeError.
+
+        The message will be assumed to be of type text/* encoded with
+        charset.input_charset.  It will be converted to charset.output_charset
+        and encoded properly, if needed, when generating the plain text
+        representation of the message.  MIME headers (MIME-Version,
+        Content-Type, Content-Transfer-Encoding) will be added as needed.
+
+        """
+        if charset is None:
+            self.del_param('charset')
+            self._charset = None
+            return
+        if isinstance(charset, str):
+            charset = Charset.Charset(charset)
+        if not isinstance(charset, Charset.Charset):
+            raise TypeError(charset)
+        # BAW: should we accept strings that can serve as arguments to the
+        # Charset constructor?
+        self._charset = charset
+        if not self.has_key('MIME-Version'):
+            self.add_header('MIME-Version', '1.0')
+        if not self.has_key('Content-Type'):
+            self.add_header('Content-Type', 'text/plain',
+                            charset=charset.get_output_charset())
+        else:
+            self.set_param('charset', charset.get_output_charset())
+        if not self.has_key('Content-Transfer-Encoding'):
+            cte = charset.get_body_encoding()
+            try:
+                cte(self)
+            except TypeError:
+                self.add_header('Content-Transfer-Encoding', cte)
+
+    def get_charset(self):
+        """Return the Charset instance associated with the message's payload.
+        """
+        return self._charset
+
+    #
+    # MAPPING INTERFACE (partial)
+    #
+    def __len__(self):
+        """Return the total number of headers, including duplicates."""
+        return len(self._headers)
+
+    def __getitem__(self, name):
+        """Get a header value.
+
+        Return None if the header is missing instead of raising an exception.
+
+        Note that if the header appeared multiple times, exactly which
+        occurrance gets returned is undefined.  Use get_all() to get all
+        the values matching a header field name.
+        """
+        return self.get(name)
+
+    def __setitem__(self, name, val):
+        """Set the value of a header.
+
+        Note: this does not overwrite an existing header with the same field
+        name.  Use __delitem__() first to delete any existing headers.
+        """
+        self._headers.append((name, val))
+
+    def __delitem__(self, name):
+        """Delete all occurrences of a header, if present.
+
+        Does not raise an exception if the header is missing.
+        """
+        name = name.lower()
+        newheaders = []
+        for k, v in self._headers:
+            if k.lower() <> name:
+                newheaders.append((k, v))
+        self._headers = newheaders
+
+    def __contains__(self, name):
+        return name.lower() in [k.lower() for k, v in self._headers]
+
+    def has_key(self, name):
+        """Return true if the message contains the header."""
+        missing = object()
+        return self.get(name, missing) is not missing
+
+    def keys(self):
+        """Return a list of all the message's header field names.
+
+        These will be sorted in the order they appeared in the original
+        message, or were added to the message, and may contain duplicates.
+        Any fields deleted and re-inserted are always appended to the header
+        list.
+        """
+        return [k for k, v in self._headers]
+
+    def values(self):
+        """Return a list of all the message's header values.
+
+        These will be sorted in the order they appeared in the original
+        message, or were added to the message, and may contain duplicates.
+        Any fields deleted and re-inserted are always appended to the header
+        list.
+        """
+        return [v for k, v in self._headers]
+
+    def items(self):
+        """Get all the message's header fields and values.
+
+        These will be sorted in the order they appeared in the original
+        message, or were added to the message, and may contain duplicates.
+        Any fields deleted and re-inserted are always appended to the header
+        list.
+        """
+        return self._headers[:]
+
+    def get(self, name, failobj=None):
+        """Get a header value.
+
+        Like __getitem__() but return failobj instead of None when the field
+        is missing.
+        """
+        name = name.lower()
+        for k, v in self._headers:
+            if k.lower() == name:
+                return v
+        return failobj
+
+    #
+    # Additional useful stuff
+    #
+
+    def get_all(self, name, failobj=None):
+        """Return a list of all the values for the named field.
+
+        These will be sorted in the order they appeared in the original
+        message, and may contain duplicates.  Any fields deleted and
+        re-inserted are always appended to the header list.
+
+        If no such fields exist, failobj is returned (defaults to None).
+        """
+        values = []
+        name = name.lower()
+        for k, v in self._headers:
+            if k.lower() == name:
+                values.append(v)
+        if not values:
+            return failobj
+        return values
+
+    def add_header(self, _name, _value, **_params):
+        """Extended header setting.
+
+        name is the header field to add.  keyword arguments can be used to set
+        additional parameters for the header field, with underscores converted
+        to dashes.  Normally the parameter will be added as key="value" unless
+        value is None, in which case only the key will be added.
+
+        Example:
+
+        msg.add_header('content-disposition', 'attachment', filename='bud.gif')
+        """
+        parts = []
+        for k, v in _params.items():
+            if v is None:
+                parts.append(k.replace('_', '-'))
+            else:
+                parts.append(_formatparam(k.replace('_', '-'), v))
+        if _value is not None:
+            parts.insert(0, _value)
+        self._headers.append((_name, SEMISPACE.join(parts)))
+
+    def replace_header(self, _name, _value):
+        """Replace a header.
+
+        Replace the first matching header found in the message, retaining
+        header order and case.  If no matching header was found, a KeyError is
+        raised.
+        """
+        _name = _name.lower()
+        for i, (k, v) in zip(range(len(self._headers)), self._headers):
+            if k.lower() == _name:
+                self._headers[i] = (k, _value)
+                break
+        else:
+            raise KeyError(_name)
+
+    #
+    # Deprecated methods.  These will be removed in email 3.1.
+    #
+
+    def get_type(self, failobj=None):
+        """Returns the message's content type.
+
+        The returned string is coerced to lowercase and returned as a single
+        string of the form `maintype/subtype'.  If there was no Content-Type
+        header in the message, failobj is returned (defaults to None).
+        """
+        warnings.warn('get_type() deprecated; use get_content_type()',
+                      DeprecationWarning, 2)
+        missing = object()
+        value = self.get('content-type', missing)
+        if value is missing:
+            return failobj
+        return paramre.split(value)[0].lower().strip()
+
+    def get_main_type(self, failobj=None):
+        """Return the message's main content type if present."""
+        warnings.warn('get_main_type() deprecated; use get_content_maintype()',
+                      DeprecationWarning, 2)
+        missing = object()
+        ctype = self.get_type(missing)
+        if ctype is missing:
+            return failobj
+        if ctype.count('/') <> 1:
+            return failobj
+        return ctype.split('/')[0]
+
+    def get_subtype(self, failobj=None):
+        """Return the message's content subtype if present."""
+        warnings.warn('get_subtype() deprecated; use get_content_subtype()',
+                      DeprecationWarning, 2)
+        missing = object()
+        ctype = self.get_type(missing)
+        if ctype is missing:
+            return failobj
+        if ctype.count('/') <> 1:
+            return failobj
+        return ctype.split('/')[1]
+
+    #
+    # Use these three methods instead of the three above.
+    #
+
+    def get_content_type(self):
+        """Return the message's content type.
+
+        The returned string is coerced to lower case of the form
+        `maintype/subtype'.  If there was no Content-Type header in the
+        message, the default type as given by get_default_type() will be
+        returned.  Since according to RFC 2045, messages always have a default
+        type this will always return a value.
+
+        RFC 2045 defines a message's default type to be text/plain unless it
+        appears inside a multipart/digest container, in which case it would be
+        message/rfc822.
+        """
+        missing = object()
+        value = self.get('content-type', missing)
+        if value is missing:
+            # This should have no parameters
+            return self.get_default_type()
+        ctype = paramre.split(value)[0].lower().strip()
+        # RFC 2045, section 5.2 says if its invalid, use text/plain
+        if ctype.count('/') <> 1:
+            return 'text/plain'
+        return ctype
+
+    def get_content_maintype(self):
+        """Return the message's main content type.
+
+        This is the `maintype' part of the string returned by
+        get_content_type().
+        """
+        ctype = self.get_content_type()
+        return ctype.split('/')[0]
+
+    def get_content_subtype(self):
+        """Returns the message's sub-content type.
+
+        This is the `subtype' part of the string returned by
+        get_content_type().
+        """
+        ctype = self.get_content_type()
+        return ctype.split('/')[1]
+
+    def get_default_type(self):
+        """Return the `default' content type.
+
+        Most messages have a default content type of text/plain, except for
+        messages that are subparts of multipart/digest containers.  Such
+        subparts have a default content type of message/rfc822.
+        """
+        return self._default_type
+
+    def set_default_type(self, ctype):
+        """Set the `default' content type.
+
+        ctype should be either "text/plain" or "message/rfc822", although this
+        is not enforced.  The default content type is not stored in the
+        Content-Type header.
+        """
+        self._default_type = ctype
+
+    def _get_params_preserve(self, failobj, header):
+        # Like get_params() but preserves the quoting of values.  BAW:
+        # should this be part of the public interface?
+        missing = object()
+        value = self.get(header, missing)
+        if value is missing:
+            return failobj
+        params = []
+        for p in _parseparam(';' + value):
+            try:
+                name, val = p.split('=', 1)
+                name = name.strip()
+                val = val.strip()
+            except ValueError:
+                # Must have been a bare attribute
+                name = p.strip()
+                val = ''
+            params.append((name, val))
+        params = Utils.decode_params(params)
+        return params
+
+    def get_params(self, failobj=None, header='content-type', unquote=True):
+        """Return the message's Content-Type parameters, as a list.
+
+        The elements of the returned list are 2-tuples of key/value pairs, as
+        split on the `=' sign.  The left hand side of the `=' is the key,
+        while the right hand side is the value.  If there is no `=' sign in
+        the parameter the value is the empty string.  The value is as
+        described in the get_param() method.
+
+        Optional failobj is the object to return if there is no Content-Type
+        header.  Optional header is the header to search instead of
+        Content-Type.  If unquote is True, the value is unquoted.
+        """
+        missing = object()
+        params = self._get_params_preserve(missing, header)
+        if params is missing:
+            return failobj
+        if unquote:
+            return [(k, _unquotevalue(v)) for k, v in params]
+        else:
+            return params
+
+    def get_param(self, param, failobj=None, header='content-type',
+                  unquote=True):
+        """Return the parameter value if found in the Content-Type header.
+
+        Optional failobj is the object to return if there is no Content-Type
+        header, or the Content-Type header has no such parameter.  Optional
+        header is the header to search instead of Content-Type.
+
+        Parameter keys are always compared case insensitively.  The return
+        value can either be a string, or a 3-tuple if the parameter was RFC
+        2231 encoded.  When it's a 3-tuple, the elements of the value are of
+        the form (CHARSET, LANGUAGE, VALUE).  Note that both CHARSET and
+        LANGUAGE can be None, in which case you should consider VALUE to be
+        encoded in the us-ascii charset.  You can usually ignore LANGUAGE.
+
+        Your application should be prepared to deal with 3-tuple return
+        values, and can convert the parameter to a Unicode string like so:
+
+            param = msg.get_param('foo')
+            if isinstance(param, tuple):
+                param = unicode(param[2], param[0] or 'us-ascii')
+
+        In any case, the parameter value (either the returned string, or the
+        VALUE item in the 3-tuple) is always unquoted, unless unquote is set
+        to False.
+        """
+        if not self.has_key(header):
+            return failobj
+        for k, v in self._get_params_preserve(failobj, header):
+            if k.lower() == param.lower():
+                if unquote:
+                    return _unquotevalue(v)
+                else:
+                    return v
+        return failobj
+
+    def set_param(self, param, value, header='Content-Type', requote=True,
+                  charset=None, language=''):
+        """Set a parameter in the Content-Type header.
+
+        If the parameter already exists in the header, its value will be
+        replaced with the new value.
+
+        If header is Content-Type and has not yet been defined for this
+        message, it will be set to "text/plain" and the new parameter and
+        value will be appended as per RFC 2045.
+
+        An alternate header can specified in the header argument, and all
+        parameters will be quoted as necessary unless requote is False.
+
+        If charset is specified, the parameter will be encoded according to RFC
+        2231.  Optional language specifies the RFC 2231 language, defaulting
+        to the empty string.  Both charset and language should be strings.
+        """
+        if not isinstance(value, tuple) and charset:
+            value = (charset, language, value)
+
+        if not self.has_key(header) and header.lower() == 'content-type':
+            ctype = 'text/plain'
+        else:
+            ctype = self.get(header)
+        if not self.get_param(param, header=header):
+            if not ctype:
+                ctype = _formatparam(param, value, requote)
+            else:
+                ctype = SEMISPACE.join(
+                    [ctype, _formatparam(param, value, requote)])
+        else:
+            ctype = ''
+            for old_param, old_value in self.get_params(header=header,
+                                                        unquote=requote):
+                append_param = ''
+                if old_param.lower() == param.lower():
+                    append_param = _formatparam(param, value, requote)
+                else:
+                    append_param = _formatparam(old_param, old_value, requote)
+                if not ctype:
+                    ctype = append_param
+                else:
+                    ctype = SEMISPACE.join([ctype, append_param])
+        if ctype <> self.get(header):
+            del self[header]
+            self[header] = ctype
+
+    def del_param(self, param, header='content-type', requote=True):
+        """Remove the given parameter completely from the Content-Type header.
+
+        The header will be re-written in place without the parameter or its
+        value. All values will be quoted as necessary unless requote is
+        False.  Optional header specifies an alternative to the Content-Type
+        header.
+        """
+        if not self.has_key(header):
+            return
+        new_ctype = ''
+        for p, v in self.get_params(header=header, unquote=requote):
+            if p.lower() <> param.lower():
+                if not new_ctype:
+                    new_ctype = _formatparam(p, v, requote)
+                else:
+                    new_ctype = SEMISPACE.join([new_ctype,
+                                                _formatparam(p, v, requote)])
+        if new_ctype <> self.get(header):
+            del self[header]
+            self[header] = new_ctype
+
+    def set_type(self, type, header='Content-Type', requote=True):
+        """Set the main type and subtype for the Content-Type header.
+
+        type must be a string in the form "maintype/subtype", otherwise a
+        ValueError is raised.
+
+        This method replaces the Content-Type header, keeping all the
+        parameters in place.  If requote is False, this leaves the existing
+        header's quoting as is.  Otherwise, the parameters will be quoted (the
+        default).
+
+        An alternative header can be specified in the header argument.  When
+        the Content-Type header is set, we'll always also add a MIME-Version
+        header.
+        """
+        # BAW: should we be strict?
+        if not type.count('/') == 1:
+            raise ValueError
+        # Set the Content-Type, you get a MIME-Version
+        if header.lower() == 'content-type':
+            del self['mime-version']
+            self['MIME-Version'] = '1.0'
+        if not self.has_key(header):
+            self[header] = type
+            return
+        params = self.get_params(header=header, unquote=requote)
+        del self[header]
+        self[header] = type
+        # Skip the first param; it's the old type.
+        for p, v in params[1:]:
+            self.set_param(p, v, header, requote)
+
+    def get_filename(self, failobj=None):
+        """Return the filename associated with the payload if present.
+
+        The filename is extracted from the Content-Disposition header's
+        `filename' parameter, and it is unquoted.
+        """
+        missing = object()
+        filename = self.get_param('filename', missing, 'content-disposition')
+        if filename is missing:
+            return failobj
+        return Utils.collapse_rfc2231_value(filename).strip()
+
+    def get_boundary(self, failobj=None):
+        """Return the boundary associated with the payload if present.
+
+        The boundary is extracted from the Content-Type header's `boundary'
+        parameter, and it is unquoted.
+        """
+        missing = object()
+        boundary = self.get_param('boundary', missing)
+        if boundary is missing:
+            return failobj
+        # RFC 2046 says that boundaries may begin but not end in w/s
+        return Utils.collapse_rfc2231_value(boundary).rstrip()
+
+    def set_boundary(self, boundary):
+        """Set the boundary parameter in Content-Type to 'boundary'.
+
+        This is subtly different than deleting the Content-Type header and
+        adding a new one with a new boundary parameter via add_header().  The
+        main difference is that using the set_boundary() method preserves the
+        order of the Content-Type header in the original message.
+
+        HeaderParseError is raised if the message has no Content-Type header.
+        """
+        missing = object()
+        params = self._get_params_preserve(missing, 'content-type')
+        if params is missing:
+            # There was no Content-Type header, and we don't know what type
+            # to set it to, so raise an exception.
+            raise Errors.HeaderParseError, 'No Content-Type header found'
+        newparams = []
+        foundp = False
+        for pk, pv in params:
+            if pk.lower() == 'boundary':
+                newparams.append(('boundary', '"%s"' % boundary))
+                foundp = True
+            else:
+                newparams.append((pk, pv))
+        if not foundp:
+            # The original Content-Type header had no boundary attribute.
+            # Tack one on the end.  BAW: should we raise an exception
+            # instead???
+            newparams.append(('boundary', '"%s"' % boundary))
+        # Replace the existing Content-Type header with the new value
+        newheaders = []
+        for h, v in self._headers:
+            if h.lower() == 'content-type':
+                parts = []
+                for k, v in newparams:
+                    if v == '':
+                        parts.append(k)
+                    else:
+                        parts.append('%s=%s' % (k, v))
+                newheaders.append((h, SEMISPACE.join(parts)))
+
+            else:
+                newheaders.append((h, v))
+        self._headers = newheaders
+
+    def get_content_charset(self, failobj=None):
+        """Return the charset parameter of the Content-Type header.
+
+        The returned string is always coerced to lower case.  If there is no
+        Content-Type header, or if that header has no charset parameter,
+        failobj is returned.
+        """
+        missing = object()
+        charset = self.get_param('charset', missing)
+        if charset is missing:
+            return failobj
+        if isinstance(charset, tuple):
+            # RFC 2231 encoded, so decode it, and it better end up as ascii.
+            pcharset = charset[0] or 'us-ascii'
+            charset = unicode(charset[2], pcharset).encode('us-ascii')
+        # RFC 2046, $4.1.2 says charsets are not case sensitive
+        return charset.lower()
+
+    def get_charsets(self, failobj=None):
+        """Return a list containing the charset(s) used in this message.
+
+        The returned list of items describes the Content-Type headers'
+        charset parameter for this message and all the subparts in its
+        payload.
+
+        Each item will either be a string (the value of the charset parameter
+        in the Content-Type header of that part) or the value of the
+        'failobj' parameter (defaults to None), if the part does not have a
+        main MIME type of "text", or the charset is not defined.
+
+        The list will contain one string for each part of the message, plus
+        one for the container message (i.e. self), so that a non-multipart
+        message will still return a list of length 1.
+        """
+        return [part.get_content_charset(failobj) for part in self.walk()]
+
+    # I.e. def walk(self): ...
+    from email.Iterators import walk
diff --git a/depot_tools/release/win/python_24/Lib/email/Parser.py b/depot_tools/release/win/python_24/Lib/email/Parser.py
new file mode 100644
index 0000000..0c052244
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Parser.py
@@ -0,0 +1,88 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw, Thomas Wouters, Anthony Baxter
+# Contact: email-sig@python.org
+
+"""A parser of RFC 2822 and MIME email messages."""
+
+import warnings
+from cStringIO import StringIO
+from email.FeedParser import FeedParser
+from email.Message import Message
+
+
+
+class Parser:
+    def __init__(self, *args, **kws):
+        """Parser of RFC 2822 and MIME email messages.
+
+        Creates an in-memory object tree representing the email message, which
+        can then be manipulated and turned over to a Generator to return the
+        textual representation of the message.
+
+        The string must be formatted as a block of RFC 2822 headers and header
+        continuation lines, optionally preceeded by a `Unix-from' header.  The
+        header block is terminated either by the end of the string or by a
+        blank line.
+
+        _class is the class to instantiate for new message objects when they
+        must be created.  This class must have a constructor that can take
+        zero arguments.  Default is Message.Message.
+        """
+        if len(args) >= 1:
+            if '_class' in kws:
+                raise TypeError("Multiple values for keyword arg '_class'")
+            kws['_class'] = args[0]
+        if len(args) == 2:
+            if 'strict' in kws:
+                raise TypeError("Multiple values for keyword arg 'strict'")
+            kws['strict'] = args[1]
+        if len(args) > 2:
+            raise TypeError('Too many arguments')
+        if '_class' in kws:
+            self._class = kws['_class']
+            del kws['_class']
+        else:
+            self._class = Message
+        if 'strict' in kws:
+            warnings.warn("'strict' argument is deprecated (and ignored)",
+                          DeprecationWarning, 2)
+            del kws['strict']
+        if kws:
+            raise TypeError('Unexpected keyword arguments')
+
+    def parse(self, fp, headersonly=False):
+        """Create a message structure from the data in a file.
+
+        Reads all the data from the file and returns the root of the message
+        structure.  Optional headersonly is a flag specifying whether to stop
+        parsing after reading the headers or not.  The default is False,
+        meaning it parses the entire contents of the file.
+        """
+        feedparser = FeedParser(self._class)
+        if headersonly:
+            feedparser._set_headersonly()
+        while True:
+            data = fp.read(8192)
+            if not data:
+                break
+            feedparser.feed(data)
+        return feedparser.close()
+
+    def parsestr(self, text, headersonly=False):
+        """Create a message structure from a string.
+
+        Returns the root of the message structure.  Optional headersonly is a
+        flag specifying whether to stop parsing after reading the headers or
+        not.  The default is False, meaning it parses the entire contents of
+        the file.
+        """
+        return self.parse(StringIO(text), headersonly=headersonly)
+
+
+
+class HeaderParser(Parser):
+    def parse(self, fp, headersonly=True):
+        return Parser.parse(self, fp, True)
+
+    def parsestr(self, text, headersonly=True):
+        return Parser.parsestr(self, text, True)
diff --git a/depot_tools/release/win/python_24/Lib/email/Utils.py b/depot_tools/release/win/python_24/Lib/email/Utils.py
new file mode 100644
index 0000000..9ba7601
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/Utils.py
@@ -0,0 +1,291 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""Miscellaneous utilities."""
+
+import os
+import re
+import time
+import base64
+import random
+import socket
+import warnings
+from cStringIO import StringIO
+
+from email._parseaddr import quote
+from email._parseaddr import AddressList as _AddressList
+from email._parseaddr import mktime_tz
+
+# We need wormarounds for bugs in these methods in older Pythons (see below)
+from email._parseaddr import parsedate as _parsedate
+from email._parseaddr import parsedate_tz as _parsedate_tz
+
+from quopri import decodestring as _qdecode
+
+# Intrapackage imports
+from email.Encoders import _bencode, _qencode
+
+COMMASPACE = ', '
+EMPTYSTRING = ''
+UEMPTYSTRING = u''
+CRLF = '\r\n'
+
+specialsre = re.compile(r'[][\\()<>@,:;".]')
+escapesre = re.compile(r'[][\\()"]')
+
+
+
+# Helpers
+
+def _identity(s):
+    return s
+
+
+def _bdecode(s):
+    # We can't quite use base64.encodestring() since it tacks on a "courtesy
+    # newline".  Blech!
+    if not s:
+        return s
+    value = base64.decodestring(s)
+    if not s.endswith('\n') and value.endswith('\n'):
+        return value[:-1]
+    return value
+
+
+
+def fix_eols(s):
+    """Replace all line-ending characters with \r\n."""
+    # Fix newlines with no preceding carriage return
+    s = re.sub(r'(?<!\r)\n', CRLF, s)
+    # Fix carriage returns with no following newline
+    s = re.sub(r'\r(?!\n)', CRLF, s)
+    return s
+
+
+
+def formataddr(pair):
+    """The inverse of parseaddr(), this takes a 2-tuple of the form
+    (realname, email_address) and returns the string value suitable
+    for an RFC 2822 From, To or Cc header.
+
+    If the first element of pair is false, then the second element is
+    returned unmodified.
+    """
+    name, address = pair
+    if name:
+        quotes = ''
+        if specialsre.search(name):
+            quotes = '"'
+        name = escapesre.sub(r'\\\g<0>', name)
+        return '%s%s%s <%s>' % (quotes, name, quotes, address)
+    return address
+
+
+
+def getaddresses(fieldvalues):
+    """Return a list of (REALNAME, EMAIL) for each fieldvalue."""
+    all = COMMASPACE.join(fieldvalues)
+    a = _AddressList(all)
+    return a.addresslist
+
+
+
+ecre = re.compile(r'''
+  =\?                   # literal =?
+  (?P<charset>[^?]*?)   # non-greedy up to the next ? is the charset
+  \?                    # literal ?
+  (?P<encoding>[qb])    # either a "q" or a "b", case insensitive
+  \?                    # literal ?
+  (?P<atom>.*?)         # non-greedy up to the next ?= is the atom
+  \?=                   # literal ?=
+  ''', re.VERBOSE | re.IGNORECASE)
+
+
+
+def formatdate(timeval=None, localtime=False, usegmt=False):
+    """Returns a date string as specified by RFC 2822, e.g.:
+
+    Fri, 09 Nov 2001 01:08:47 -0000
+
+    Optional timeval if given is a floating point time value as accepted by
+    gmtime() and localtime(), otherwise the current time is used.
+
+    Optional localtime is a flag that when True, interprets timeval, and
+    returns a date relative to the local timezone instead of UTC, properly
+    taking daylight savings time into account.
+
+    Optional argument usegmt means that the timezone is written out as
+    an ascii string, not numeric one (so "GMT" instead of "+0000"). This
+    is needed for HTTP, and is only used when localtime==False.
+    """
+    # Note: we cannot use strftime() because that honors the locale and RFC
+    # 2822 requires that day and month names be the English abbreviations.
+    if timeval is None:
+        timeval = time.time()
+    if localtime:
+        now = time.localtime(timeval)
+        # Calculate timezone offset, based on whether the local zone has
+        # daylight savings time, and whether DST is in effect.
+        if time.daylight and now[-1]:
+            offset = time.altzone
+        else:
+            offset = time.timezone
+        hours, minutes = divmod(abs(offset), 3600)
+        # Remember offset is in seconds west of UTC, but the timezone is in
+        # minutes east of UTC, so the signs differ.
+        if offset > 0:
+            sign = '-'
+        else:
+            sign = '+'
+        zone = '%s%02d%02d' % (sign, hours, minutes // 60)
+    else:
+        now = time.gmtime(timeval)
+        # Timezone offset is always -0000
+        if usegmt:
+            zone = 'GMT'
+        else:
+            zone = '-0000'
+    return '%s, %02d %s %04d %02d:%02d:%02d %s' % (
+        ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][now[6]],
+        now[2],
+        ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+         'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][now[1] - 1],
+        now[0], now[3], now[4], now[5],
+        zone)
+
+
+
+def make_msgid(idstring=None):
+    """Returns a string suitable for RFC 2822 compliant Message-ID, e.g:
+
+    <20020201195627.33539.96671@nightshade.la.mastaler.com>
+
+    Optional idstring if given is a string used to strengthen the
+    uniqueness of the message id.
+    """
+    timeval = time.time()
+    utcdate = time.strftime('%Y%m%d%H%M%S', time.gmtime(timeval))
+    pid = os.getpid()
+    randint = random.randrange(100000)
+    if idstring is None:
+        idstring = ''
+    else:
+        idstring = '.' + idstring
+    idhost = socket.getfqdn()
+    msgid = '<%s.%s.%s%s@%s>' % (utcdate, pid, randint, idstring, idhost)
+    return msgid
+
+
+
+# These functions are in the standalone mimelib version only because they've
+# subsequently been fixed in the latest Python versions.  We use this to worm
+# around broken older Pythons.
+def parsedate(data):
+    if not data:
+        return None
+    return _parsedate(data)
+
+
+def parsedate_tz(data):
+    if not data:
+        return None
+    return _parsedate_tz(data)
+
+
+def parseaddr(addr):
+    addrs = _AddressList(addr).addresslist
+    if not addrs:
+        return '', ''
+    return addrs[0]
+
+
+# rfc822.unquote() doesn't properly de-backslash-ify in Python pre-2.3.
+def unquote(str):
+    """Remove quotes from a string."""
+    if len(str) > 1:
+        if str.startswith('"') and str.endswith('"'):
+            return str[1:-1].replace('\\\\', '\\').replace('\\"', '"')
+        if str.startswith('<') and str.endswith('>'):
+            return str[1:-1]
+    return str
+
+
+
+# RFC2231-related functions - parameter encoding and decoding
+def decode_rfc2231(s):
+    """Decode string according to RFC 2231"""
+    import urllib
+    parts = s.split("'", 2)
+    if len(parts) == 1:
+        return None, None, urllib.unquote(s)
+    charset, language, s = parts
+    return charset, language, urllib.unquote(s)
+
+
+def encode_rfc2231(s, charset=None, language=None):
+    """Encode string according to RFC 2231.
+
+    If neither charset nor language is given, then s is returned as-is.  If
+    charset is given but not language, the string is encoded using the empty
+    string for language.
+    """
+    import urllib
+    s = urllib.quote(s, safe='')
+    if charset is None and language is None:
+        return s
+    if language is None:
+        language = ''
+    return "%s'%s'%s" % (charset, language, s)
+
+
+rfc2231_continuation = re.compile(r'^(?P<name>\w+)\*((?P<num>[0-9]+)\*?)?$')
+
+def decode_params(params):
+    """Decode parameters list according to RFC 2231.
+
+    params is a sequence of 2-tuples containing (content type, string value).
+    """
+    new_params = []
+    # maps parameter's name to a list of continuations
+    rfc2231_params = {}
+    # params is a sequence of 2-tuples containing (content_type, string value)
+    name, value = params[0]
+    new_params.append((name, value))
+    # Cycle through each of the rest of the parameters.
+    for name, value in params[1:]:
+        value = unquote(value)
+        mo = rfc2231_continuation.match(name)
+        if mo:
+            name, num = mo.group('name', 'num')
+            if num is not None:
+                num = int(num)
+            rfc2231_param1 = rfc2231_params.setdefault(name, [])
+            rfc2231_param1.append((num, value))
+        else:
+            new_params.append((name, '"%s"' % quote(value)))
+    if rfc2231_params:
+        for name, continuations in rfc2231_params.items():
+            value = []
+            # Sort by number
+            continuations.sort()
+            # And now append all values in num order
+            for num, continuation in continuations:
+                value.append(continuation)
+            charset, language, value = decode_rfc2231(EMPTYSTRING.join(value))
+            new_params.append(
+                (name, (charset, language, '"%s"' % quote(value))))
+    return new_params
+
+def collapse_rfc2231_value(value, errors='replace',
+                           fallback_charset='us-ascii'):
+    if isinstance(value, tuple):
+        rawval = unquote(value[2])
+        charset = value[0] or 'us-ascii'
+        try:
+            return unicode(rawval, charset, errors)
+        except LookupError:
+            # XXX charset is unknown to Python.
+            return unicode(rawval, fallback_charset, errors)
+    else:
+        return unquote(value)
diff --git a/depot_tools/release/win/python_24/Lib/email/__init__.py b/depot_tools/release/win/python_24/Lib/email/__init__.py
new file mode 100644
index 0000000..e622b3f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/__init__.py
@@ -0,0 +1,52 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Barry Warsaw
+# Contact: email-sig@python.org
+
+"""A package for parsing, handling, and generating email messages."""
+
+__version__ = '3.0+'
+
+__all__ = [
+    'base64MIME',
+    'Charset',
+    'Encoders',
+    'Errors',
+    'Generator',
+    'Header',
+    'Iterators',
+    'Message',
+    'MIMEAudio',
+    'MIMEBase',
+    'MIMEImage',
+    'MIMEMessage',
+    'MIMEMultipart',
+    'MIMENonMultipart',
+    'MIMEText',
+    'Parser',
+    'quopriMIME',
+    'Utils',
+    'message_from_string',
+    'message_from_file',
+    ]
+
+
+
+# Some convenience routines.  Don't import Parser and Message as side-effects
+# of importing email since those cascadingly import most of the rest of the
+# email package.
+def message_from_string(s, *args, **kws):
+    """Parse a string into a Message object model.
+
+    Optional _class and strict are passed to the Parser constructor.
+    """
+    from email.Parser import Parser
+    return Parser(*args, **kws).parsestr(s)
+
+
+def message_from_file(fp, *args, **kws):
+    """Read a file and parse its contents into a Message object model.
+
+    Optional _class and strict are passed to the Parser constructor.
+    """
+    from email.Parser import Parser
+    return Parser(*args, **kws).parse(fp)
diff --git a/depot_tools/release/win/python_24/Lib/email/_parseaddr.py b/depot_tools/release/win/python_24/Lib/email/_parseaddr.py
new file mode 100644
index 0000000..f6efcd5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/_parseaddr.py
@@ -0,0 +1,471 @@
+# Copyright (C) 2002-2004 Python Software Foundation
+# Contact: email-sig@python.org
+
+"""Email address parsing code.
+
+Lifted directly from rfc822.py.  This should eventually be rewritten.
+"""
+
+import time
+
+SPACE = ' '
+EMPTYSTRING = ''
+COMMASPACE = ', '
+
+# Parse a date field
+_monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
+               'aug', 'sep', 'oct', 'nov', 'dec',
+               'january', 'february', 'march', 'april', 'may', 'june', 'july',
+               'august', 'september', 'october', 'november', 'december']
+
+_daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
+
+# The timezone table does not include the military time zones defined
+# in RFC822, other than Z.  According to RFC1123, the description in
+# RFC822 gets the signs wrong, so we can't rely on any such time
+# zones.  RFC1123 recommends that numeric timezone indicators be used
+# instead of timezone names.
+
+_timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0,
+              'AST': -400, 'ADT': -300,  # Atlantic (used in Canada)
+              'EST': -500, 'EDT': -400,  # Eastern
+              'CST': -600, 'CDT': -500,  # Central
+              'MST': -700, 'MDT': -600,  # Mountain
+              'PST': -800, 'PDT': -700   # Pacific
+              }
+
+
+def parsedate_tz(data):
+    """Convert a date string to a time tuple.
+
+    Accounts for military timezones.
+    """
+    data = data.split()
+    # The FWS after the comma after the day-of-week is optional, so search and
+    # adjust for this.
+    if data[0].endswith(',') or data[0].lower() in _daynames:
+        # There's a dayname here. Skip it
+        del data[0]
+    else:
+        i = data[0].rfind(',')
+        if i >= 0:
+            data[0] = data[0][i+1:]
+    if len(data) == 3: # RFC 850 date, deprecated
+        stuff = data[0].split('-')
+        if len(stuff) == 3:
+            data = stuff + data[1:]
+    if len(data) == 4:
+        s = data[3]
+        i = s.find('+')
+        if i > 0:
+            data[3:] = [s[:i], s[i+1:]]
+        else:
+            data.append('') # Dummy tz
+    if len(data) < 5:
+        return None
+    data = data[:5]
+    [dd, mm, yy, tm, tz] = data
+    mm = mm.lower()
+    if mm not in _monthnames:
+        dd, mm = mm, dd.lower()
+        if mm not in _monthnames:
+            return None
+    mm = _monthnames.index(mm) + 1
+    if mm > 12:
+        mm -= 12
+    if dd[-1] == ',':
+        dd = dd[:-1]
+    i = yy.find(':')
+    if i > 0:
+        yy, tm = tm, yy
+    if yy[-1] == ',':
+        yy = yy[:-1]
+    if not yy[0].isdigit():
+        yy, tz = tz, yy
+    if tm[-1] == ',':
+        tm = tm[:-1]
+    tm = tm.split(':')
+    if len(tm) == 2:
+        [thh, tmm] = tm
+        tss = '0'
+    elif len(tm) == 3:
+        [thh, tmm, tss] = tm
+    else:
+        return None
+    try:
+        yy = int(yy)
+        dd = int(dd)
+        thh = int(thh)
+        tmm = int(tmm)
+        tss = int(tss)
+    except ValueError:
+        return None
+    tzoffset = None
+    tz = tz.upper()
+    if _timezones.has_key(tz):
+        tzoffset = _timezones[tz]
+    else:
+        try:
+            tzoffset = int(tz)
+        except ValueError:
+            pass
+    # Convert a timezone offset into seconds ; -0500 -> -18000
+    if tzoffset:
+        if tzoffset < 0:
+            tzsign = -1
+            tzoffset = -tzoffset
+        else:
+            tzsign = 1
+        tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
+    tuple = (yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset)
+    return tuple
+
+
+def parsedate(data):
+    """Convert a time string to a time tuple."""
+    t = parsedate_tz(data)
+    if isinstance(t, tuple):
+        return t[:9]
+    else:
+        return t
+
+
+def mktime_tz(data):
+    """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
+    if data[9] is None:
+        # No zone info, so localtime is better assumption than GMT
+        return time.mktime(data[:8] + (-1,))
+    else:
+        t = time.mktime(data[:8] + (0,))
+        return t - data[9] - time.timezone
+
+
+def quote(str):
+    """Add quotes around a string."""
+    return str.replace('\\', '\\\\').replace('"', '\\"')
+
+
+class AddrlistClass:
+    """Address parser class by Ben Escoto.
+
+    To understand what this class does, it helps to have a copy of RFC 2822 in
+    front of you.
+
+    Note: this class interface is deprecated and may be removed in the future.
+    Use rfc822.AddressList instead.
+    """
+
+    def __init__(self, field):
+        """Initialize a new instance.
+
+        `field' is an unparsed address header field, containing
+        one or more addresses.
+        """
+        self.specials = '()<>@,:;.\"[]'
+        self.pos = 0
+        self.LWS = ' \t'
+        self.CR = '\r\n'
+        self.atomends = self.specials + self.LWS + self.CR
+        # Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it
+        # is obsolete syntax.  RFC 2822 requires that we recognize obsolete
+        # syntax, so allow dots in phrases.
+        self.phraseends = self.atomends.replace('.', '')
+        self.field = field
+        self.commentlist = []
+
+    def gotonext(self):
+        """Parse up to the start of the next address."""
+        while self.pos < len(self.field):
+            if self.field[self.pos] in self.LWS + '\n\r':
+                self.pos += 1
+            elif self.field[self.pos] == '(':
+                self.commentlist.append(self.getcomment())
+            else:
+                break
+
+    def getaddrlist(self):
+        """Parse all addresses.
+
+        Returns a list containing all of the addresses.
+        """
+        result = []
+        while self.pos < len(self.field):
+            ad = self.getaddress()
+            if ad:
+                result += ad
+            else:
+                result.append(('', ''))
+        return result
+
+    def getaddress(self):
+        """Parse the next address."""
+        self.commentlist = []
+        self.gotonext()
+
+        oldpos = self.pos
+        oldcl = self.commentlist
+        plist = self.getphraselist()
+
+        self.gotonext()
+        returnlist = []
+
+        if self.pos >= len(self.field):
+            # Bad email address technically, no domain.
+            if plist:
+                returnlist = [(SPACE.join(self.commentlist), plist[0])]
+
+        elif self.field[self.pos] in '.@':
+            # email address is just an addrspec
+            # this isn't very efficient since we start over
+            self.pos = oldpos
+            self.commentlist = oldcl
+            addrspec = self.getaddrspec()
+            returnlist = [(SPACE.join(self.commentlist), addrspec)]
+
+        elif self.field[self.pos] == ':':
+            # address is a group
+            returnlist = []
+
+            fieldlen = len(self.field)
+            self.pos += 1
+            while self.pos < len(self.field):
+                self.gotonext()
+                if self.pos < fieldlen and self.field[self.pos] == ';':
+                    self.pos += 1
+                    break
+                returnlist = returnlist + self.getaddress()
+
+        elif self.field[self.pos] == '<':
+            # Address is a phrase then a route addr
+            routeaddr = self.getrouteaddr()
+
+            if self.commentlist:
+                returnlist = [(SPACE.join(plist) + ' (' +
+                               ' '.join(self.commentlist) + ')', routeaddr)]
+            else:
+                returnlist = [(SPACE.join(plist), routeaddr)]
+
+        else:
+            if plist:
+                returnlist = [(SPACE.join(self.commentlist), plist[0])]
+            elif self.field[self.pos] in self.specials:
+                self.pos += 1
+
+        self.gotonext()
+        if self.pos < len(self.field) and self.field[self.pos] == ',':
+            self.pos += 1
+        return returnlist
+
+    def getrouteaddr(self):
+        """Parse a route address (Return-path value).
+
+        This method just skips all the route stuff and returns the addrspec.
+        """
+        if self.field[self.pos] != '<':
+            return
+
+        expectroute = False
+        self.pos += 1
+        self.gotonext()
+        adlist = ''
+        while self.pos < len(self.field):
+            if expectroute:
+                self.getdomain()
+                expectroute = False
+            elif self.field[self.pos] == '>':
+                self.pos += 1
+                break
+            elif self.field[self.pos] == '@':
+                self.pos += 1
+                expectroute = True
+            elif self.field[self.pos] == ':':
+                self.pos += 1
+            else:
+                adlist = self.getaddrspec()
+                self.pos += 1
+                break
+            self.gotonext()
+
+        return adlist
+
+    def getaddrspec(self):
+        """Parse an RFC 2822 addr-spec."""
+        aslist = []
+
+        self.gotonext()
+        while self.pos < len(self.field):
+            if self.field[self.pos] == '.':
+                aslist.append('.')
+                self.pos += 1
+            elif self.field[self.pos] == '"':
+                aslist.append('"%s"' % self.getquote())
+            elif self.field[self.pos] in self.atomends:
+                break
+            else:
+                aslist.append(self.getatom())
+            self.gotonext()
+
+        if self.pos >= len(self.field) or self.field[self.pos] != '@':
+            return EMPTYSTRING.join(aslist)
+
+        aslist.append('@')
+        self.pos += 1
+        self.gotonext()
+        return EMPTYSTRING.join(aslist) + self.getdomain()
+
+    def getdomain(self):
+        """Get the complete domain name from an address."""
+        sdlist = []
+        while self.pos < len(self.field):
+            if self.field[self.pos] in self.LWS:
+                self.pos += 1
+            elif self.field[self.pos] == '(':
+                self.commentlist.append(self.getcomment())
+            elif self.field[self.pos] == '[':
+                sdlist.append(self.getdomainliteral())
+            elif self.field[self.pos] == '.':
+                self.pos += 1
+                sdlist.append('.')
+            elif self.field[self.pos] in self.atomends:
+                break
+            else:
+                sdlist.append(self.getatom())
+        return EMPTYSTRING.join(sdlist)
+
+    def getdelimited(self, beginchar, endchars, allowcomments=True):
+        """Parse a header fragment delimited by special characters.
+
+        `beginchar' is the start character for the fragment.
+        If self is not looking at an instance of `beginchar' then
+        getdelimited returns the empty string.
+
+        `endchars' is a sequence of allowable end-delimiting characters.
+        Parsing stops when one of these is encountered.
+
+        If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed
+        within the parsed fragment.
+        """
+        if self.field[self.pos] != beginchar:
+            return ''
+
+        slist = ['']
+        quote = False
+        self.pos += 1
+        while self.pos < len(self.field):
+            if quote:
+                slist.append(self.field[self.pos])
+                quote = False
+            elif self.field[self.pos] in endchars:
+                self.pos += 1
+                break
+            elif allowcomments and self.field[self.pos] == '(':
+                slist.append(self.getcomment())
+            elif self.field[self.pos] == '\\':
+                quote = True
+            else:
+                slist.append(self.field[self.pos])
+            self.pos += 1
+
+        return EMPTYSTRING.join(slist)
+
+    def getquote(self):
+        """Get a quote-delimited fragment from self's field."""
+        return self.getdelimited('"', '"\r', False)
+
+    def getcomment(self):
+        """Get a parenthesis-delimited fragment from self's field."""
+        return self.getdelimited('(', ')\r', True)
+
+    def getdomainliteral(self):
+        """Parse an RFC 2822 domain-literal."""
+        return '[%s]' % self.getdelimited('[', ']\r', False)
+
+    def getatom(self, atomends=None):
+        """Parse an RFC 2822 atom.
+
+        Optional atomends specifies a different set of end token delimiters
+        (the default is to use self.atomends).  This is used e.g. in
+        getphraselist() since phrase endings must not include the `.' (which
+        is legal in phrases)."""
+        atomlist = ['']
+        if atomends is None:
+            atomends = self.atomends
+
+        while self.pos < len(self.field):
+            if self.field[self.pos] in atomends:
+                break
+            else:
+                atomlist.append(self.field[self.pos])
+            self.pos += 1
+
+        return EMPTYSTRING.join(atomlist)
+
+    def getphraselist(self):
+        """Parse a sequence of RFC 2822 phrases.
+
+        A phrase is a sequence of words, which are in turn either RFC 2822
+        atoms or quoted-strings.  Phrases are canonicalized by squeezing all
+        runs of continuous whitespace into one space.
+        """
+        plist = []
+
+        while self.pos < len(self.field):
+            if self.field[self.pos] in self.LWS:
+                self.pos += 1
+            elif self.field[self.pos] == '"':
+                plist.append(self.getquote())
+            elif self.field[self.pos] == '(':
+                self.commentlist.append(self.getcomment())
+            elif self.field[self.pos] in self.phraseends:
+                break
+            else:
+                plist.append(self.getatom(self.phraseends))
+
+        return plist
+
+class AddressList(AddrlistClass):
+    """An AddressList encapsulates a list of parsed RFC 2822 addresses."""
+    def __init__(self, field):
+        AddrlistClass.__init__(self, field)
+        if field:
+            self.addresslist = self.getaddrlist()
+        else:
+            self.addresslist = []
+
+    def __len__(self):
+        return len(self.addresslist)
+
+    def __add__(self, other):
+        # Set union
+        newaddr = AddressList(None)
+        newaddr.addresslist = self.addresslist[:]
+        for x in other.addresslist:
+            if not x in self.addresslist:
+                newaddr.addresslist.append(x)
+        return newaddr
+
+    def __iadd__(self, other):
+        # Set union, in-place
+        for x in other.addresslist:
+            if not x in self.addresslist:
+                self.addresslist.append(x)
+        return self
+
+    def __sub__(self, other):
+        # Set difference
+        newaddr = AddressList(None)
+        for x in self.addresslist:
+            if not x in other.addresslist:
+                newaddr.addresslist.append(x)
+        return newaddr
+
+    def __isub__(self, other):
+        # Set difference, in-place
+        for x in other.addresslist:
+            if x in self.addresslist:
+                self.addresslist.remove(x)
+        return self
+
+    def __getitem__(self, index):
+        # Make indexing, slices, and 'in' work
+        return self.addresslist[index]
diff --git a/depot_tools/release/win/python_24/Lib/email/base64MIME.py b/depot_tools/release/win/python_24/Lib/email/base64MIME.py
new file mode 100644
index 0000000..6ed1d53
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/base64MIME.py
@@ -0,0 +1,172 @@
+# Copyright (C) 2002-2004 Python Software Foundation
+# Author: Ben Gertzfield
+# Contact: email-sig@python.org
+
+"""Base64 content transfer encoding per RFCs 2045-2047.
+
+This module handles the content transfer encoding method defined in RFC 2045
+to encode arbitrary 8-bit data using the three 8-bit bytes in four 7-bit
+characters encoding known as Base64.
+
+It is used in the MIME standards for email to attach images, audio, and text
+using some 8-bit character sets to messages.
+
+This module provides an interface to encode and decode both headers and bodies
+with Base64 encoding.
+
+RFC 2045 defines a method for including character set information in an
+`encoded-word' in a header.  This method is commonly used for 8-bit real names
+in To:, From:, Cc:, etc. fields, as well as Subject: lines.
+
+This module does not do the line wrapping or end-of-line character conversion
+necessary for proper internationalized headers; it only does dumb encoding and
+decoding.  To deal with the various line wrapping issues, use the email.Header
+module.
+"""
+
+import re
+from binascii import b2a_base64, a2b_base64
+from email.Utils import fix_eols
+
+CRLF = '\r\n'
+NL = '\n'
+EMPTYSTRING = ''
+
+# See also Charset.py
+MISC_LEN = 7
+
+
+
+# Helpers
+def base64_len(s):
+    """Return the length of s when it is encoded with base64."""
+    groups_of_3, leftover = divmod(len(s), 3)
+    # 4 bytes out for each 3 bytes (or nonzero fraction thereof) in.
+    # Thanks, Tim!
+    n = groups_of_3 * 4
+    if leftover:
+        n += 4
+    return n
+
+
+
+def header_encode(header, charset='iso-8859-1', keep_eols=False,
+                  maxlinelen=76, eol=NL):
+    """Encode a single header line with Base64 encoding in a given charset.
+
+    Defined in RFC 2045, this Base64 encoding is identical to normal Base64
+    encoding, except that each line must be intelligently wrapped (respecting
+    the Base64 encoding), and subsequent lines must start with a space.
+
+    charset names the character set to use to encode the header.  It defaults
+    to iso-8859-1.
+
+    End-of-line characters (\\r, \\n, \\r\\n) will be automatically converted
+    to the canonical email line separator \\r\\n unless the keep_eols
+    parameter is True (the default is False).
+
+    Each line of the header will be terminated in the value of eol, which
+    defaults to "\\n".  Set this to "\\r\\n" if you are using the result of
+    this function directly in email.
+
+    The resulting string will be in the form:
+
+    "=?charset?b?WW/5ciBtYXp66XLrIHf8eiBhIGhhbXBzdGHuciBBIFlv+XIgbWF6euly?=\\n
+      =?charset?b?6yB3/HogYSBoYW1wc3Rh7nIgQkMgWW/5ciBtYXp66XLrIHf8eiBhIGhh?="
+
+    with each line wrapped at, at most, maxlinelen characters (defaults to 76
+    characters).
+    """
+    # Return empty headers unchanged
+    if not header:
+        return header
+
+    if not keep_eols:
+        header = fix_eols(header)
+
+    # Base64 encode each line, in encoded chunks no greater than maxlinelen in
+    # length, after the RFC chrome is added in.
+    base64ed = []
+    max_encoded = maxlinelen - len(charset) - MISC_LEN
+    max_unencoded = max_encoded * 3 // 4
+
+    for i in range(0, len(header), max_unencoded):
+        base64ed.append(b2a_base64(header[i:i+max_unencoded]))
+
+    # Now add the RFC chrome to each encoded chunk
+    lines = []
+    for line in base64ed:
+        # Ignore the last character of each line if it is a newline
+        if line.endswith(NL):
+            line = line[:-1]
+        # Add the chrome
+        lines.append('=?%s?b?%s?=' % (charset, line))
+    # Glue the lines together and return it.  BAW: should we be able to
+    # specify the leading whitespace in the joiner?
+    joiner = eol + ' '
+    return joiner.join(lines)
+
+
+
+def encode(s, binary=True, maxlinelen=76, eol=NL):
+    """Encode a string with base64.
+
+    Each line will be wrapped at, at most, maxlinelen characters (defaults to
+    76 characters).
+
+    If binary is False, end-of-line characters will be converted to the
+    canonical email end-of-line sequence \\r\\n.  Otherwise they will be left
+    verbatim (this is the default).
+
+    Each line of encoded text will end with eol, which defaults to "\\n".  Set
+    this to "\r\n" if you will be using the result of this function directly
+    in an email.
+    """
+    if not s:
+        return s
+
+    if not binary:
+        s = fix_eols(s)
+
+    encvec = []
+    max_unencoded = maxlinelen * 3 // 4
+    for i in range(0, len(s), max_unencoded):
+        # BAW: should encode() inherit b2a_base64()'s dubious behavior in
+        # adding a newline to the encoded string?
+        enc = b2a_base64(s[i:i + max_unencoded])
+        if enc.endswith(NL) and eol <> NL:
+            enc = enc[:-1] + eol
+        encvec.append(enc)
+    return EMPTYSTRING.join(encvec)
+
+
+# For convenience and backwards compatibility w/ standard base64 module
+body_encode = encode
+encodestring = encode
+
+
+
+def decode(s, convert_eols=None):
+    """Decode a raw base64 string.
+
+    If convert_eols is set to a string value, all canonical email linefeeds,
+    e.g. "\\r\\n", in the decoded text will be converted to the value of
+    convert_eols.  os.linesep is a good choice for convert_eols if you are
+    decoding a text attachment.
+
+    This function does not parse a full MIME header value encoded with
+    base64 (like =?iso-8895-1?b?bmloISBuaWgh?=) -- please use the high
+    level email.Header class for that functionality.
+    """
+    if not s:
+        return s
+
+    dec = a2b_base64(s)
+    if convert_eols:
+        return dec.replace(CRLF, convert_eols)
+    return dec
+
+
+# For convenience and backwards compatibility w/ standard base64 module
+body_decode = decode
+decodestring = decode
diff --git a/depot_tools/release/win/python_24/Lib/email/quopriMIME.py b/depot_tools/release/win/python_24/Lib/email/quopriMIME.py
new file mode 100644
index 0000000..a9b5d490a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/email/quopriMIME.py
@@ -0,0 +1,318 @@
+# Copyright (C) 2001-2004 Python Software Foundation
+# Author: Ben Gertzfield
+# Contact: email-sig@python.org
+
+"""Quoted-printable content transfer encoding per RFCs 2045-2047.
+
+This module handles the content transfer encoding method defined in RFC 2045
+to encode US ASCII-like 8-bit data called `quoted-printable'.  It is used to
+safely encode text that is in a character set similar to the 7-bit US ASCII
+character set, but that includes some 8-bit characters that are normally not
+allowed in email bodies or headers.
+
+Quoted-printable is very space-inefficient for encoding binary files; use the
+email.base64MIME module for that instead.
+
+This module provides an interface to encode and decode both headers and bodies
+with quoted-printable encoding.
+
+RFC 2045 defines a method for including character set information in an
+`encoded-word' in a header.  This method is commonly used for 8-bit real names
+in To:/From:/Cc: etc. fields, as well as Subject: lines.
+
+This module does not do the line wrapping or end-of-line character
+conversion necessary for proper internationalized headers; it only
+does dumb encoding and decoding.  To deal with the various line
+wrapping issues, use the email.Header module.
+"""
+
+import re
+from string import hexdigits
+from email.Utils import fix_eols
+
+CRLF = '\r\n'
+NL = '\n'
+
+# See also Charset.py
+MISC_LEN = 7
+
+hqre = re.compile(r'[^-a-zA-Z0-9!*+/ ]')
+bqre = re.compile(r'[^ !-<>-~\t]')
+
+
+
+# Helpers
+def header_quopri_check(c):
+    """Return True if the character should be escaped with header quopri."""
+    return bool(hqre.match(c))
+
+
+def body_quopri_check(c):
+    """Return True if the character should be escaped with body quopri."""
+    return bool(bqre.match(c))
+
+
+def header_quopri_len(s):
+    """Return the length of str when it is encoded with header quopri."""
+    count = 0
+    for c in s:
+        if hqre.match(c):
+            count += 3
+        else:
+            count += 1
+    return count
+
+
+def body_quopri_len(str):
+    """Return the length of str when it is encoded with body quopri."""
+    count = 0
+    for c in str:
+        if bqre.match(c):
+            count += 3
+        else:
+            count += 1
+    return count
+
+
+def _max_append(L, s, maxlen, extra=''):
+    if not L:
+        L.append(s.lstrip())
+    elif len(L[-1]) + len(s) <= maxlen:
+        L[-1] += extra + s
+    else:
+        L.append(s.lstrip())
+
+
+def unquote(s):
+    """Turn a string in the form =AB to the ASCII character with value 0xab"""
+    return chr(int(s[1:3], 16))
+
+
+def quote(c):
+    return "=%02X" % ord(c)
+
+
+
+def header_encode(header, charset="iso-8859-1", keep_eols=False,
+                  maxlinelen=76, eol=NL):
+    """Encode a single header line with quoted-printable (like) encoding.
+
+    Defined in RFC 2045, this `Q' encoding is similar to quoted-printable, but
+    used specifically for email header fields to allow charsets with mostly 7
+    bit characters (and some 8 bit) to remain more or less readable in non-RFC
+    2045 aware mail clients.
+
+    charset names the character set to use to encode the header.  It defaults
+    to iso-8859-1.
+
+    The resulting string will be in the form:
+
+    "=?charset?q?I_f=E2rt_in_your_g=E8n=E8ral_dire=E7tion?\\n
+      =?charset?q?Silly_=C8nglish_Kn=EEghts?="
+
+    with each line wrapped safely at, at most, maxlinelen characters (defaults
+    to 76 characters).  If maxlinelen is None, the entire string is encoded in
+    one chunk with no splitting.
+
+    End-of-line characters (\\r, \\n, \\r\\n) will be automatically converted
+    to the canonical email line separator \\r\\n unless the keep_eols
+    parameter is True (the default is False).
+
+    Each line of the header will be terminated in the value of eol, which
+    defaults to "\\n".  Set this to "\\r\\n" if you are using the result of
+    this function directly in email.
+    """
+    # Return empty headers unchanged
+    if not header:
+        return header
+
+    if not keep_eols:
+        header = fix_eols(header)
+
+    # Quopri encode each line, in encoded chunks no greater than maxlinelen in
+    # length, after the RFC chrome is added in.
+    quoted = []
+    if maxlinelen is None:
+        # An obnoxiously large number that's good enough
+        max_encoded = 100000
+    else:
+        max_encoded = maxlinelen - len(charset) - MISC_LEN - 1
+
+    for c in header:
+        # Space may be represented as _ instead of =20 for readability
+        if c == ' ':
+            _max_append(quoted, '_', max_encoded)
+        # These characters can be included verbatim
+        elif not hqre.match(c):
+            _max_append(quoted, c, max_encoded)
+        # Otherwise, replace with hex value like =E2
+        else:
+            _max_append(quoted, "=%02X" % ord(c), max_encoded)
+
+    # Now add the RFC chrome to each encoded chunk and glue the chunks
+    # together.  BAW: should we be able to specify the leading whitespace in
+    # the joiner?
+    joiner = eol + ' '
+    return joiner.join(['=?%s?q?%s?=' % (charset, line) for line in quoted])
+
+
+
+def encode(body, binary=False, maxlinelen=76, eol=NL):
+    """Encode with quoted-printable, wrapping at maxlinelen characters.
+
+    If binary is False (the default), end-of-line characters will be converted
+    to the canonical email end-of-line sequence \\r\\n.  Otherwise they will
+    be left verbatim.
+
+    Each line of encoded text will end with eol, which defaults to "\\n".  Set
+    this to "\\r\\n" if you will be using the result of this function directly
+    in an email.
+
+    Each line will be wrapped at, at most, maxlinelen characters (defaults to
+    76 characters).  Long lines will have the `soft linefeed' quoted-printable
+    character "=" appended to them, so the decoded text will be identical to
+    the original text.
+    """
+    if not body:
+        return body
+
+    if not binary:
+        body = fix_eols(body)
+
+    # BAW: We're accumulating the body text by string concatenation.  That
+    # can't be very efficient, but I don't have time now to rewrite it.  It
+    # just feels like this algorithm could be more efficient.
+    encoded_body = ''
+    lineno = -1
+    # Preserve line endings here so we can check later to see an eol needs to
+    # be added to the output later.
+    lines = body.splitlines(1)
+    for line in lines:
+        # But strip off line-endings for processing this line.
+        if line.endswith(CRLF):
+            line = line[:-2]
+        elif line[-1] in CRLF:
+            line = line[:-1]
+
+        lineno += 1
+        encoded_line = ''
+        prev = None
+        linelen = len(line)
+        # Now we need to examine every character to see if it needs to be
+        # quopri encoded.  BAW: again, string concatenation is inefficient.
+        for j in range(linelen):
+            c = line[j]
+            prev = c
+            if bqre.match(c):
+                c = quote(c)
+            elif j+1 == linelen:
+                # Check for whitespace at end of line; special case
+                if c not in ' \t':
+                    encoded_line += c
+                prev = c
+                continue
+            # Check to see to see if the line has reached its maximum length
+            if len(encoded_line) + len(c) >= maxlinelen:
+                encoded_body += encoded_line + '=' + eol
+                encoded_line = ''
+            encoded_line += c
+        # Now at end of line..
+        if prev and prev in ' \t':
+            # Special case for whitespace at end of file
+            if lineno + 1 == len(lines):
+                prev = quote(prev)
+                if len(encoded_line) + len(prev) > maxlinelen:
+                    encoded_body += encoded_line + '=' + eol + prev
+                else:
+                    encoded_body += encoded_line + prev
+            # Just normal whitespace at end of line
+            else:
+                encoded_body += encoded_line + prev + '=' + eol
+            encoded_line = ''
+        # Now look at the line we just finished and it has a line ending, we
+        # need to add eol to the end of the line.
+        if lines[lineno].endswith(CRLF) or lines[lineno][-1] in CRLF:
+            encoded_body += encoded_line + eol
+        else:
+            encoded_body += encoded_line
+        encoded_line = ''
+    return encoded_body
+
+
+# For convenience and backwards compatibility w/ standard base64 module
+body_encode = encode
+encodestring = encode
+
+
+
+# BAW: I'm not sure if the intent was for the signature of this function to be
+# the same as base64MIME.decode() or not...
+def decode(encoded, eol=NL):
+    """Decode a quoted-printable string.
+
+    Lines are separated with eol, which defaults to \\n.
+    """
+    if not encoded:
+        return encoded
+    # BAW: see comment in encode() above.  Again, we're building up the
+    # decoded string with string concatenation, which could be done much more
+    # efficiently.
+    decoded = ''
+
+    for line in encoded.splitlines():
+        line = line.rstrip()
+        if not line:
+            decoded += eol
+            continue
+
+        i = 0
+        n = len(line)
+        while i < n:
+            c = line[i]
+            if c <> '=':
+                decoded += c
+                i += 1
+            # Otherwise, c == "=".  Are we at the end of the line?  If so, add
+            # a soft line break.
+            elif i+1 == n:
+                i += 1
+                continue
+            # Decode if in form =AB
+            elif i+2 < n and line[i+1] in hexdigits and line[i+2] in hexdigits:
+                decoded += unquote(line[i:i+3])
+                i += 3
+            # Otherwise, not in form =AB, pass literally
+            else:
+                decoded += c
+                i += 1
+
+            if i == n:
+                decoded += eol
+    # Special case if original string did not end with eol
+    if not encoded.endswith(eol) and decoded.endswith(eol):
+        decoded = decoded[:-1]
+    return decoded
+
+
+# For convenience and backwards compatibility w/ standard base64 module
+body_decode = decode
+decodestring = decode
+
+
+
+def _unquote_match(match):
+    """Turn a match in the form =AB to the ASCII character with value 0xab"""
+    s = match.group(0)
+    return unquote(s)
+
+
+# Header decoding is done a bit differently
+def header_decode(s):
+    """Decode a string encoded with RFC 2045 MIME header `Q' encoding.
+
+    This function does not parse a full MIME header value encoded with
+    quoted-printable (like =?iso-8895-1?q?Hello_World?=) -- please use
+    the high level email.Header class for that functionality.
+    """
+    s = s.replace('_', ' ')
+    return re.sub(r'=\w{2}', _unquote_match, s)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/__init__.py b/depot_tools/release/win/python_24/Lib/encodings/__init__.py
new file mode 100644
index 0000000..d2e9523
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/__init__.py
@@ -0,0 +1,145 @@
+""" Standard "encodings" Package
+
+    Standard Python encoding modules are stored in this package
+    directory.
+
+    Codec modules must have names corresponding to normalized encoding
+    names as defined in the normalize_encoding() function below, e.g.
+    'utf-8' must be implemented by the module 'utf_8.py'.
+
+    Each codec module must export the following interface:
+
+    * getregentry() -> (encoder, decoder, stream_reader, stream_writer)
+    The getregentry() API must return callable objects which adhere to
+    the Python Codec Interface Standard.
+
+    In addition, a module may optionally also define the following
+    APIs which are then used by the package's codec search function:
+
+    * getaliases() -> sequence of encoding name strings to use as aliases
+
+    Alias names returned by getaliases() must be normalized encoding
+    names as defined by normalize_encoding().
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""#"
+
+import codecs, exceptions, types, aliases
+
+_cache = {}
+_unknown = '--unknown--'
+_import_tail = ['*']
+_norm_encoding_map = ('                                              . '
+                      '0123456789       ABCDEFGHIJKLMNOPQRSTUVWXYZ     '
+                      ' abcdefghijklmnopqrstuvwxyz                     '
+                      '                                                '
+                      '                                                '
+                      '                ')
+_aliases = aliases.aliases
+
+class CodecRegistryError(exceptions.LookupError,
+                         exceptions.SystemError):
+    pass
+
+def normalize_encoding(encoding):
+
+    """ Normalize an encoding name.
+
+        Normalization works as follows: all non-alphanumeric
+        characters except the dot used for Python package names are
+        collapsed and replaced with a single underscore, e.g. '  -;#'
+        becomes '_'. Leading and trailing underscores are removed.
+
+        Note that encoding names should be ASCII only; if they do use
+        non-ASCII characters, these must be Latin-1 compatible.
+
+    """
+    # Make sure we have an 8-bit string, because .translate() works
+    # differently for Unicode strings.
+    if type(encoding) is types.UnicodeType:
+        # Note that .encode('latin-1') does *not* use the codec
+        # registry, so this call doesn't recurse. (See unicodeobject.c
+        # PyUnicode_AsEncodedString() for details)
+        encoding = encoding.encode('latin-1')
+    return '_'.join(encoding.translate(_norm_encoding_map).split())
+
+def search_function(encoding):
+
+    # Cache lookup
+    entry = _cache.get(encoding, _unknown)
+    if entry is not _unknown:
+        return entry
+
+    # Import the module:
+    #
+    # First try to find an alias for the normalized encoding
+    # name and lookup the module using the aliased name, then try to
+    # lookup the module using the standard import scheme, i.e. first
+    # try in the encodings package, then at top-level.
+    #
+    norm_encoding = normalize_encoding(encoding)
+    aliased_encoding = _aliases.get(norm_encoding) or \
+                       _aliases.get(norm_encoding.replace('.', '_'))
+    if aliased_encoding is not None:
+        modnames = [aliased_encoding,
+                    norm_encoding]
+    else:
+        modnames = [norm_encoding]
+    for modname in modnames:
+        if not modname:
+            continue
+        try:
+            mod = __import__(modname,
+                             globals(), locals(), _import_tail)
+        except ImportError:
+            pass
+        else:
+            break
+    else:
+        mod = None
+
+    try:
+        getregentry = mod.getregentry
+    except AttributeError:
+        # Not a codec module
+        mod = None
+
+    if mod is None:
+        # Cache misses
+        _cache[encoding] = None
+        return None
+
+    # Now ask the module for the registry entry
+    entry = tuple(getregentry())
+    if len(entry) != 4:
+        raise CodecRegistryError,\
+              'module "%s" (%s) failed to register' % \
+              (mod.__name__, mod.__file__)
+    for obj in entry:
+        if not callable(obj):
+            raise CodecRegistryError,\
+                  'incompatible codecs in module "%s" (%s)' % \
+                  (mod.__name__, mod.__file__)
+
+    # Cache the codec registry entry
+    _cache[encoding] = entry
+
+    # Register its aliases (without overwriting previously registered
+    # aliases)
+    try:
+        codecaliases = mod.getaliases()
+    except AttributeError:
+        pass
+    else:
+        for alias in codecaliases:
+            if not _aliases.has_key(alias):
+                _aliases[alias] = modname
+
+    # Return the registry entry
+    return entry
+
+# Register the search_function in the Python codec registry
+codecs.register(search_function)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/aliases.py b/depot_tools/release/win/python_24/Lib/encodings/aliases.py
new file mode 100644
index 0000000..80652dc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/aliases.py
@@ -0,0 +1,500 @@
+""" Encoding Aliases Support
+
+    This module is used by the encodings package search function to
+    map encodings names to module names.
+
+    Note that the search function normalizes the encoding names before
+    doing the lookup, so the mapping will have to map normalized
+    encoding names to module names.
+
+    Contents:
+
+        The following aliases dictionary contains mappings of all IANA
+        character set names for which the Python core library provides
+        codecs. In addition to these, a few Python specific codec
+        aliases have also been added.
+
+"""
+aliases = {
+
+    # Please keep this list sorted alphabetically by value !
+
+    # ascii codec
+    '646'                : 'ascii',
+    'ansi_x3.4_1968'     : 'ascii',
+    'ansi_x3_4_1968'     : 'ascii', # some email headers use this non-standard name
+    'ansi_x3.4_1986'     : 'ascii',
+    'cp367'              : 'ascii',
+    'csascii'            : 'ascii',
+    'ibm367'             : 'ascii',
+    'iso646_us'          : 'ascii',
+    'iso_646.irv_1991'   : 'ascii',
+    'iso_ir_6'           : 'ascii',
+    'us'                 : 'ascii',
+    'us_ascii'           : 'ascii',
+
+    # base64_codec codec
+    'base64'             : 'base64_codec',
+    'base_64'            : 'base64_codec',
+
+    # big5 codec
+    'big5_tw'            : 'big5',
+    'csbig5'             : 'big5',
+
+    # big5hkscs codec
+    'big5_hkscs'         : 'big5hkscs',
+    'hkscs'              : 'big5hkscs',
+
+    # bz2_codec codec
+    'bz2'                : 'bz2_codec',
+
+    # cp037 codec
+    '037'                : 'cp037',
+    'csibm037'           : 'cp037',
+    'ebcdic_cp_ca'       : 'cp037',
+    'ebcdic_cp_nl'       : 'cp037',
+    'ebcdic_cp_us'       : 'cp037',
+    'ebcdic_cp_wt'       : 'cp037',
+    'ibm037'             : 'cp037',
+    'ibm039'             : 'cp037',
+
+    # cp1026 codec
+    '1026'               : 'cp1026',
+    'csibm1026'          : 'cp1026',
+    'ibm1026'            : 'cp1026',
+
+    # cp1140 codec
+    '1140'               : 'cp1140',
+    'ibm1140'            : 'cp1140',
+
+    # cp1250 codec
+    '1250'               : 'cp1250',
+    'windows_1250'       : 'cp1250',
+
+    # cp1251 codec
+    '1251'               : 'cp1251',
+    'windows_1251'       : 'cp1251',
+
+    # cp1252 codec
+    '1252'               : 'cp1252',
+    'windows_1252'       : 'cp1252',
+
+    # cp1253 codec
+    '1253'               : 'cp1253',
+    'windows_1253'       : 'cp1253',
+
+    # cp1254 codec
+    '1254'               : 'cp1254',
+    'windows_1254'       : 'cp1254',
+
+    # cp1255 codec
+    '1255'               : 'cp1255',
+    'windows_1255'       : 'cp1255',
+
+    # cp1256 codec
+    '1256'               : 'cp1256',
+    'windows_1256'       : 'cp1256',
+
+    # cp1257 codec
+    '1257'               : 'cp1257',
+    'windows_1257'       : 'cp1257',
+
+    # cp1258 codec
+    '1258'               : 'cp1258',
+    'windows_1258'       : 'cp1258',
+
+    # cp424 codec
+    '424'                : 'cp424',
+    'csibm424'           : 'cp424',
+    'ebcdic_cp_he'       : 'cp424',
+    'ibm424'             : 'cp424',
+
+    # cp437 codec
+    '437'                : 'cp437',
+    'cspc8codepage437'   : 'cp437',
+    'ibm437'             : 'cp437',
+
+    # cp500 codec
+    '500'                : 'cp500',
+    'csibm500'           : 'cp500',
+    'ebcdic_cp_be'       : 'cp500',
+    'ebcdic_cp_ch'       : 'cp500',
+    'ibm500'             : 'cp500',
+
+    # cp775 codec
+    '775'              : 'cp775',
+    'cspc775baltic'      : 'cp775',
+    'ibm775'             : 'cp775',
+
+    # cp850 codec
+    '850'                : 'cp850',
+    'cspc850multilingual' : 'cp850',
+    'ibm850'             : 'cp850',
+
+    # cp852 codec
+    '852'                : 'cp852',
+    'cspcp852'           : 'cp852',
+    'ibm852'             : 'cp852',
+
+    # cp855 codec
+    '855'                : 'cp855',
+    'csibm855'           : 'cp855',
+    'ibm855'             : 'cp855',
+
+    # cp857 codec
+    '857'                : 'cp857',
+    'csibm857'           : 'cp857',
+    'ibm857'             : 'cp857',
+
+    # cp860 codec
+    '860'                : 'cp860',
+    'csibm860'           : 'cp860',
+    'ibm860'             : 'cp860',
+
+    # cp861 codec
+    '861'                : 'cp861',
+    'cp_is'              : 'cp861',
+    'csibm861'           : 'cp861',
+    'ibm861'             : 'cp861',
+
+    # cp862 codec
+    '862'                : 'cp862',
+    'cspc862latinhebrew' : 'cp862',
+    'ibm862'             : 'cp862',
+
+    # cp863 codec
+    '863'                : 'cp863',
+    'csibm863'           : 'cp863',
+    'ibm863'             : 'cp863',
+
+    # cp864 codec
+    '864'                : 'cp864',
+    'csibm864'           : 'cp864',
+    'ibm864'             : 'cp864',
+
+    # cp865 codec
+    '865'                : 'cp865',
+    'csibm865'           : 'cp865',
+    'ibm865'             : 'cp865',
+
+    # cp866 codec
+    '866'                : 'cp866',
+    'csibm866'           : 'cp866',
+    'ibm866'             : 'cp866',
+
+    # cp869 codec
+    '869'                : 'cp869',
+    'cp_gr'              : 'cp869',
+    'csibm869'           : 'cp869',
+    'ibm869'             : 'cp869',
+
+    # cp932 codec
+    '932'                : 'cp932',
+    'ms932'              : 'cp932',
+    'mskanji'            : 'cp932',
+    'ms_kanji'           : 'cp932',
+
+    # cp949 codec
+    '949'                : 'cp949',
+    'ms949'              : 'cp949',
+    'uhc'                : 'cp949',
+
+    # cp950 codec
+    '950'                : 'cp950',
+    'ms950'              : 'cp950',
+
+    # euc_jis_2004 codec
+    'jisx0213'           : 'euc_jis_2004',
+    'eucjis2004'         : 'euc_jis_2004',
+    'euc_jis2004'        : 'euc_jis_2004',
+
+    # euc_jisx0213 codec
+    'eucjisx0213'        : 'euc_jisx0213',
+
+    # euc_jp codec
+    'eucjp'              : 'euc_jp',
+    'ujis'               : 'euc_jp',
+    'u_jis'              : 'euc_jp',
+
+    # euc_kr codec
+    'euckr'              : 'euc_kr',
+    'korean'             : 'euc_kr',
+    'ksc5601'            : 'euc_kr',
+    'ks_c_5601'          : 'euc_kr',
+    'ks_c_5601_1987'     : 'euc_kr',
+    'ksx1001'            : 'euc_kr',
+    'ks_x_1001'          : 'euc_kr',
+
+    # gb18030 codec
+    'gb18030_2000'       : 'gb18030',
+
+    # gb2312 codec
+    'chinese'            : 'gb2312',
+    'csiso58gb231280'    : 'gb2312',
+    'euc_cn'             : 'gb2312',
+    'euccn'              : 'gb2312',
+    'eucgb2312_cn'       : 'gb2312',
+    'gb2312_1980'        : 'gb2312',
+    'gb2312_80'          : 'gb2312',
+    'iso_ir_58'          : 'gb2312',
+
+    # gbk codec
+    '936'                : 'gbk',
+    'cp936'              : 'gbk',
+    'ms936'              : 'gbk',
+
+    # hex_codec codec
+    'hex'                : 'hex_codec',
+
+    # hp_roman8 codec
+    'roman8'             : 'hp_roman8',
+    'r8'                 : 'hp_roman8',
+    'csHPRoman8'         : 'hp_roman8',
+
+    # hz codec
+    'hzgb'               : 'hz',
+    'hz_gb'              : 'hz',
+    'hz_gb_2312'         : 'hz',
+
+    # iso2022_jp codec
+    'csiso2022jp'        : 'iso2022_jp',
+    'iso2022jp'          : 'iso2022_jp',
+    'iso_2022_jp'        : 'iso2022_jp',
+
+    # iso2022_jp_1 codec
+    'iso2022jp_1'        : 'iso2022_jp_1',
+    'iso_2022_jp_1'      : 'iso2022_jp_1',
+
+    # iso2022_jp_2 codec
+    'iso2022jp_2'        : 'iso2022_jp_2',
+    'iso_2022_jp_2'      : 'iso2022_jp_2',
+
+    # iso2022_jp_2004 codec
+    'iso_2022_jp_2004'   : 'iso2022_jp_2004',
+    'iso2022jp_2004'     : 'iso2022_jp_2004',
+
+    # iso2022_jp_3 codec
+    'iso2022jp_3'        : 'iso2022_jp_3',
+    'iso_2022_jp_3'      : 'iso2022_jp_3',
+
+    # iso2022_jp_ext codec
+    'iso2022jp_ext'      : 'iso2022_jp_ext',
+    'iso_2022_jp_ext'    : 'iso2022_jp_ext',
+
+    # iso2022_kr codec
+    'csiso2022kr'        : 'iso2022_kr',
+    'iso2022kr'          : 'iso2022_kr',
+    'iso_2022_kr'        : 'iso2022_kr',
+
+    # iso8859_10 codec
+    'csisolatin6'        : 'iso8859_10',
+    'iso_8859_10'        : 'iso8859_10',
+    'iso_8859_10_1992'   : 'iso8859_10',
+    'iso_ir_157'         : 'iso8859_10',
+    'l6'                 : 'iso8859_10',
+    'latin6'             : 'iso8859_10',
+
+    # iso8859_13 codec
+    'iso_8859_13'        : 'iso8859_13',
+
+    # iso8859_14 codec
+    'iso_8859_14'        : 'iso8859_14',
+    'iso_8859_14_1998'   : 'iso8859_14',
+    'iso_celtic'         : 'iso8859_14',
+    'iso_ir_199'         : 'iso8859_14',
+    'l8'                 : 'iso8859_14',
+    'latin8'             : 'iso8859_14',
+
+    # iso8859_15 codec
+    'iso_8859_15'        : 'iso8859_15',
+
+    # iso8859_2 codec
+    'csisolatin2'        : 'iso8859_2',
+    'iso_8859_2'         : 'iso8859_2',
+    'iso_8859_2_1987'    : 'iso8859_2',
+    'iso_ir_101'         : 'iso8859_2',
+    'l2'                 : 'iso8859_2',
+    'latin2'             : 'iso8859_2',
+
+    # iso8859_3 codec
+    'csisolatin3'        : 'iso8859_3',
+    'iso_8859_3'         : 'iso8859_3',
+    'iso_8859_3_1988'    : 'iso8859_3',
+    'iso_ir_109'         : 'iso8859_3',
+    'l3'                 : 'iso8859_3',
+    'latin3'             : 'iso8859_3',
+
+    # iso8859_4 codec
+    'csisolatin4'        : 'iso8859_4',
+    'iso_8859_4'         : 'iso8859_4',
+    'iso_8859_4_1988'    : 'iso8859_4',
+    'iso_ir_110'         : 'iso8859_4',
+    'l4'                 : 'iso8859_4',
+    'latin4'             : 'iso8859_4',
+
+    # iso8859_5 codec
+    'csisolatincyrillic' : 'iso8859_5',
+    'cyrillic'           : 'iso8859_5',
+    'iso_8859_5'         : 'iso8859_5',
+    'iso_8859_5_1988'    : 'iso8859_5',
+    'iso_ir_144'         : 'iso8859_5',
+
+    # iso8859_6 codec
+    'arabic'             : 'iso8859_6',
+    'asmo_708'           : 'iso8859_6',
+    'csisolatinarabic'   : 'iso8859_6',
+    'ecma_114'           : 'iso8859_6',
+    'iso_8859_6'         : 'iso8859_6',
+    'iso_8859_6_1987'    : 'iso8859_6',
+    'iso_ir_127'         : 'iso8859_6',
+
+    # iso8859_7 codec
+    'csisolatingreek'    : 'iso8859_7',
+    'ecma_118'           : 'iso8859_7',
+    'elot_928'           : 'iso8859_7',
+    'greek'              : 'iso8859_7',
+    'greek8'             : 'iso8859_7',
+    'iso_8859_7'         : 'iso8859_7',
+    'iso_8859_7_1987'    : 'iso8859_7',
+    'iso_ir_126'         : 'iso8859_7',
+
+    # iso8859_8 codec
+    'csisolatinhebrew'   : 'iso8859_8',
+    'hebrew'             : 'iso8859_8',
+    'iso_8859_8'         : 'iso8859_8',
+    'iso_8859_8_1988'    : 'iso8859_8',
+    'iso_ir_138'         : 'iso8859_8',
+
+    # iso8859_9 codec
+    'csisolatin5'        : 'iso8859_9',
+    'iso_8859_9'         : 'iso8859_9',
+    'iso_8859_9_1989'    : 'iso8859_9',
+    'iso_ir_148'         : 'iso8859_9',
+    'l5'                 : 'iso8859_9',
+    'latin5'             : 'iso8859_9',
+
+    # iso8859_11 codec
+    'thai'               : 'iso8859_11',
+    'iso_8859_11'        : 'iso8859_11',
+    'iso_8859_11_2001'   : 'iso8859_11',
+
+    # iso8859_16 codec
+    'iso_8859_16'        : 'iso8859_16',
+    'iso_8859_16_2001'   : 'iso8859_16',
+    'iso_ir_226'         : 'iso8859_16',
+    'l10'                : 'iso8859_16',
+    'latin10'            : 'iso8859_16',
+
+    # johab codec
+    'cp1361'             : 'johab',
+    'ms1361'             : 'johab',
+
+    # koi8_r codec
+    'cskoi8r'            : 'koi8_r',
+
+    # latin_1 codec
+    '8859'               : 'latin_1',
+    'cp819'              : 'latin_1',
+    'csisolatin1'        : 'latin_1',
+    'ibm819'             : 'latin_1',
+    'iso8859'            : 'latin_1',
+    'iso_8859_1'         : 'latin_1',
+    'iso_8859_1_1987'    : 'latin_1',
+    'iso_ir_100'         : 'latin_1',
+    'l1'                 : 'latin_1',
+    'latin'              : 'latin_1',
+    'latin1'             : 'latin_1',
+
+    # mac_cyrillic codec
+    'maccyrillic'        : 'mac_cyrillic',
+
+    # mac_greek codec
+    'macgreek'           : 'mac_greek',
+
+    # mac_iceland codec
+    'maciceland'         : 'mac_iceland',
+
+    # mac_latin2 codec
+    'maccentraleurope'   : 'mac_latin2',
+    'maclatin2'          : 'mac_latin2',
+
+    # mac_roman codec
+    'macroman'           : 'mac_roman',
+
+    # mac_turkish codec
+    'macturkish'         : 'mac_turkish',
+
+    # mbcs codec
+    'dbcs'               : 'mbcs',
+
+    # ptcp154 codec
+    'csptcp154'          : 'ptcp154',
+    'pt154'              : 'ptcp154',
+    'cp154'              : 'ptcp154',
+    'cyrillic-asian'     : 'ptcp154',
+
+    # quopri_codec codec
+    'quopri'             : 'quopri_codec',
+    'quoted_printable'   : 'quopri_codec',
+    'quotedprintable'    : 'quopri_codec',
+
+    # rot_13 codec
+    'rot13'              : 'rot_13',
+
+    # shift_jis codec
+    'csshiftjis'         : 'shift_jis',
+    'shiftjis'           : 'shift_jis',
+    'sjis'               : 'shift_jis',
+    's_jis'              : 'shift_jis',
+
+    # shift_jis_2004 codec
+    'shiftjis2004'       : 'shift_jis_2004',
+    'sjis_2004'          : 'shift_jis_2004',
+    's_jis_2004'         : 'shift_jis_2004',
+
+    # shift_jisx0213 codec
+    'shiftjisx0213'      : 'shift_jisx0213',
+    'sjisx0213'          : 'shift_jisx0213',
+    's_jisx0213'         : 'shift_jisx0213',
+
+    # tactis codec
+    'tis260'             : 'tactis',
+
+    # tis_620 codec
+    'tis620'             : 'tis_620',
+    'tis_620_0'          : 'tis_620',
+    'tis_620_2529_0'     : 'tis_620',
+    'tis_620_2529_1'     : 'tis_620',
+    'iso_ir_166'         : 'tis_620',
+
+    # utf_16 codec
+    'u16'                : 'utf_16',
+    'utf16'              : 'utf_16',
+
+    # utf_16_be codec
+    'unicodebigunmarked' : 'utf_16_be',
+    'utf_16be'           : 'utf_16_be',
+
+    # utf_16_le codec
+    'unicodelittleunmarked' : 'utf_16_le',
+    'utf_16le'           : 'utf_16_le',
+
+    # utf_7 codec
+    'u7'                 : 'utf_7',
+    'utf7'               : 'utf_7',
+
+    # utf_8 codec
+    'u8'                 : 'utf_8',
+    'utf'                : 'utf_8',
+    'utf8'               : 'utf_8',
+    'utf8_ucs2'          : 'utf_8',
+    'utf8_ucs4'          : 'utf_8',
+
+    # uu_codec codec
+    'uu'                 : 'uu_codec',
+
+    # zlib_codec codec
+    'zip'                : 'zlib_codec',
+    'zlib'               : 'zlib_codec',
+
+}
diff --git a/depot_tools/release/win/python_24/Lib/encodings/ascii.py b/depot_tools/release/win/python_24/Lib/encodings/ascii.py
new file mode 100644
index 0000000..05fc36a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/ascii.py
@@ -0,0 +1,35 @@
+""" Python 'ascii' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    # Note: Binding these as C functions will result in the class not
+    # converting them to methods. This is intended.
+    encode = codecs.ascii_encode
+    decode = codecs.ascii_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+class StreamConverter(StreamWriter,StreamReader):
+
+    encode = codecs.ascii_decode
+    decode = codecs.ascii_encode
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/base64_codec.py b/depot_tools/release/win/python_24/Lib/encodings/base64_codec.py
new file mode 100644
index 0000000..085ab14
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/base64_codec.py
@@ -0,0 +1,62 @@
+""" Python 'base64_codec' Codec - base64 content transfer encoding
+
+    Unlike most of the other codecs which target Unicode, this codec
+    will return Python string objects for both encode and decode.
+
+    Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+"""
+import codecs, base64
+
+### Codec APIs
+
+def base64_encode(input,errors='strict'):
+
+    """ Encodes the object input and returns a tuple (output
+        object, length consumed).
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    output = base64.encodestring(input)
+    return (output, len(input))
+
+def base64_decode(input,errors='strict'):
+
+    """ Decodes the object input and returns a tuple (output
+        object, length consumed).
+
+        input must be an object which provides the bf_getreadbuf
+        buffer slot. Python strings, buffer objects and memory
+        mapped files are examples of objects providing this slot.
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    output = base64.decodestring(input)
+    return (output, len(input))
+
+class Codec(codecs.Codec):
+
+    def encode(self, input,errors='strict'):
+        return base64_encode(input,errors)
+    def decode(self, input,errors='strict'):
+        return base64_decode(input,errors)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (base64_encode,base64_decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/big5.py b/depot_tools/release/win/python_24/Lib/encodings/big5.py
new file mode 100644
index 0000000..d56aa1be
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/big5.py
@@ -0,0 +1,34 @@
+#
+# big5.py: Python Unicode Codec for BIG5
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: big5.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_tw, codecs
+
+codec = _codecs_tw.getcodec('big5')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/big5hkscs.py b/depot_tools/release/win/python_24/Lib/encodings/big5hkscs.py
new file mode 100644
index 0000000..443997f4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/big5hkscs.py
@@ -0,0 +1,34 @@
+#
+# big5hkscs.py: Python Unicode Codec for BIG5HKSCS
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: big5hkscs.py,v 1.1 2004/06/29 05:14:27 perky Exp $
+#
+
+import _codecs_hk, codecs
+
+codec = _codecs_hk.getcodec('big5hkscs')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/bz2_codec.py b/depot_tools/release/win/python_24/Lib/encodings/bz2_codec.py
new file mode 100644
index 0000000..870474c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/bz2_codec.py
@@ -0,0 +1,64 @@
+""" Python 'bz2_codec' Codec - bz2 compression encoding
+
+    Unlike most of the other codecs which target Unicode, this codec
+    will return Python string objects for both encode and decode.
+
+    Adapted by Raymond Hettinger from zlib_codec.py which was written
+    by Marc-Andre Lemburg (mal@lemburg.com).
+
+"""
+import codecs
+import bz2 # this codec needs the optional bz2 module !
+
+### Codec APIs
+
+def bz2_encode(input,errors='strict'):
+
+    """ Encodes the object input and returns a tuple (output
+        object, length consumed).
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    output = bz2.compress(input)
+    return (output, len(input))
+
+def bz2_decode(input,errors='strict'):
+
+    """ Decodes the object input and returns a tuple (output
+        object, length consumed).
+
+        input must be an object which provides the bf_getreadbuf
+        buffer slot. Python strings, buffer objects and memory
+        mapped files are examples of objects providing this slot.
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    output = bz2.decompress(input)
+    return (output, len(input))
+
+class Codec(codecs.Codec):
+
+    def encode(self, input, errors='strict'):
+        return bz2_encode(input, errors)
+    def decode(self, input, errors='strict'):
+        return bz2_decode(input, errors)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (bz2_encode,bz2_decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/charmap.py b/depot_tools/release/win/python_24/Lib/encodings/charmap.py
new file mode 100644
index 0000000..9bd93ec
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/charmap.py
@@ -0,0 +1,50 @@
+""" Generic Python Character Mapping Codec.
+
+    Use this codec directly rather than through the automatic
+    conversion mechanisms supplied by unicode() and .encode().
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    # Note: Binding these as C functions will result in the class not
+    # converting them to methods. This is intended.
+    encode = codecs.charmap_encode
+    decode = codecs.charmap_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+
+    def __init__(self,stream,errors='strict',mapping=None):
+
+        codecs.StreamWriter.__init__(self,stream,errors)
+        self.mapping = mapping
+
+    def encode(self,input,errors='strict'):
+
+        return Codec.encode(input,errors,self.mapping)
+
+class StreamReader(Codec,codecs.StreamReader):
+
+    def __init__(self,stream,errors='strict',mapping=None):
+
+        codecs.StreamReader.__init__(self,stream,errors)
+        self.mapping = mapping
+
+    def decode(self,input,errors='strict'):
+
+        return Codec.decode(input,errors,self.mapping)
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp037.py b/depot_tools/release/win/python_24/Lib/encodings/cp037.py
new file mode 100644
index 0000000..42c5b1f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp037.py
@@ -0,0 +1,280 @@
+""" Python Character Mapping Codec generated from 'CP037.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0004: 0x009c, # CONTROL
+        0x0005: 0x0009, # HORIZONTAL TABULATION
+        0x0006: 0x0086, # CONTROL
+        0x0007: 0x007f, # DELETE
+        0x0008: 0x0097, # CONTROL
+        0x0009: 0x008d, # CONTROL
+        0x000a: 0x008e, # CONTROL
+        0x0014: 0x009d, # CONTROL
+        0x0015: 0x0085, # CONTROL
+        0x0016: 0x0008, # BACKSPACE
+        0x0017: 0x0087, # CONTROL
+        0x001a: 0x0092, # CONTROL
+        0x001b: 0x008f, # CONTROL
+        0x0020: 0x0080, # CONTROL
+        0x0021: 0x0081, # CONTROL
+        0x0022: 0x0082, # CONTROL
+        0x0023: 0x0083, # CONTROL
+        0x0024: 0x0084, # CONTROL
+        0x0025: 0x000a, # LINE FEED
+        0x0026: 0x0017, # END OF TRANSMISSION BLOCK
+        0x0027: 0x001b, # ESCAPE
+        0x0028: 0x0088, # CONTROL
+        0x0029: 0x0089, # CONTROL
+        0x002a: 0x008a, # CONTROL
+        0x002b: 0x008b, # CONTROL
+        0x002c: 0x008c, # CONTROL
+        0x002d: 0x0005, # ENQUIRY
+        0x002e: 0x0006, # ACKNOWLEDGE
+        0x002f: 0x0007, # BELL
+        0x0030: 0x0090, # CONTROL
+        0x0031: 0x0091, # CONTROL
+        0x0032: 0x0016, # SYNCHRONOUS IDLE
+        0x0033: 0x0093, # CONTROL
+        0x0034: 0x0094, # CONTROL
+        0x0035: 0x0095, # CONTROL
+        0x0036: 0x0096, # CONTROL
+        0x0037: 0x0004, # END OF TRANSMISSION
+        0x0038: 0x0098, # CONTROL
+        0x0039: 0x0099, # CONTROL
+        0x003a: 0x009a, # CONTROL
+        0x003b: 0x009b, # CONTROL
+        0x003c: 0x0014, # DEVICE CONTROL FOUR
+        0x003d: 0x0015, # NEGATIVE ACKNOWLEDGE
+        0x003e: 0x009e, # CONTROL
+        0x003f: 0x001a, # SUBSTITUTE
+        0x0040: 0x0020, # SPACE
+        0x0041: 0x00a0, # NO-BREAK SPACE
+        0x0042: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0043: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0044: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0045: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x0046: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x0047: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0048: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0049: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x004a: 0x00a2, # CENT SIGN
+        0x004b: 0x002e, # FULL STOP
+        0x004c: 0x003c, # LESS-THAN SIGN
+        0x004d: 0x0028, # LEFT PARENTHESIS
+        0x004e: 0x002b, # PLUS SIGN
+        0x004f: 0x007c, # VERTICAL LINE
+        0x0050: 0x0026, # AMPERSAND
+        0x0051: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0052: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0053: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x0054: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x0055: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x0056: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x0057: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x0058: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x0059: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN)
+        0x005a: 0x0021, # EXCLAMATION MARK
+        0x005b: 0x0024, # DOLLAR SIGN
+        0x005c: 0x002a, # ASTERISK
+        0x005d: 0x0029, # RIGHT PARENTHESIS
+        0x005e: 0x003b, # SEMICOLON
+        0x005f: 0x00ac, # NOT SIGN
+        0x0060: 0x002d, # HYPHEN-MINUS
+        0x0061: 0x002f, # SOLIDUS
+        0x0062: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x0063: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x0064: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x0065: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x0066: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x0067: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0068: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0069: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x006a: 0x00a6, # BROKEN BAR
+        0x006b: 0x002c, # COMMA
+        0x006c: 0x0025, # PERCENT SIGN
+        0x006d: 0x005f, # LOW LINE
+        0x006e: 0x003e, # GREATER-THAN SIGN
+        0x006f: 0x003f, # QUESTION MARK
+        0x0070: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x0071: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0072: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x0073: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x0074: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x0075: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x0076: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x0077: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x0078: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x0079: 0x0060, # GRAVE ACCENT
+        0x007a: 0x003a, # COLON
+        0x007b: 0x0023, # NUMBER SIGN
+        0x007c: 0x0040, # COMMERCIAL AT
+        0x007d: 0x0027, # APOSTROPHE
+        0x007e: 0x003d, # EQUALS SIGN
+        0x007f: 0x0022, # QUOTATION MARK
+        0x0080: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x0081: 0x0061, # LATIN SMALL LETTER A
+        0x0082: 0x0062, # LATIN SMALL LETTER B
+        0x0083: 0x0063, # LATIN SMALL LETTER C
+        0x0084: 0x0064, # LATIN SMALL LETTER D
+        0x0085: 0x0065, # LATIN SMALL LETTER E
+        0x0086: 0x0066, # LATIN SMALL LETTER F
+        0x0087: 0x0067, # LATIN SMALL LETTER G
+        0x0088: 0x0068, # LATIN SMALL LETTER H
+        0x0089: 0x0069, # LATIN SMALL LETTER I
+        0x008a: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x008b: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x008c: 0x00f0, # LATIN SMALL LETTER ETH (ICELANDIC)
+        0x008d: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
+        0x008e: 0x00fe, # LATIN SMALL LETTER THORN (ICELANDIC)
+        0x008f: 0x00b1, # PLUS-MINUS SIGN
+        0x0090: 0x00b0, # DEGREE SIGN
+        0x0091: 0x006a, # LATIN SMALL LETTER J
+        0x0092: 0x006b, # LATIN SMALL LETTER K
+        0x0093: 0x006c, # LATIN SMALL LETTER L
+        0x0094: 0x006d, # LATIN SMALL LETTER M
+        0x0095: 0x006e, # LATIN SMALL LETTER N
+        0x0096: 0x006f, # LATIN SMALL LETTER O
+        0x0097: 0x0070, # LATIN SMALL LETTER P
+        0x0098: 0x0071, # LATIN SMALL LETTER Q
+        0x0099: 0x0072, # LATIN SMALL LETTER R
+        0x009a: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x009b: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x009c: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x009d: 0x00b8, # CEDILLA
+        0x009e: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x009f: 0x00a4, # CURRENCY SIGN
+        0x00a0: 0x00b5, # MICRO SIGN
+        0x00a1: 0x007e, # TILDE
+        0x00a2: 0x0073, # LATIN SMALL LETTER S
+        0x00a3: 0x0074, # LATIN SMALL LETTER T
+        0x00a4: 0x0075, # LATIN SMALL LETTER U
+        0x00a5: 0x0076, # LATIN SMALL LETTER V
+        0x00a6: 0x0077, # LATIN SMALL LETTER W
+        0x00a7: 0x0078, # LATIN SMALL LETTER X
+        0x00a8: 0x0079, # LATIN SMALL LETTER Y
+        0x00a9: 0x007a, # LATIN SMALL LETTER Z
+        0x00aa: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ab: 0x00bf, # INVERTED QUESTION MARK
+        0x00ac: 0x00d0, # LATIN CAPITAL LETTER ETH (ICELANDIC)
+        0x00ad: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
+        0x00ae: 0x00de, # LATIN CAPITAL LETTER THORN (ICELANDIC)
+        0x00af: 0x00ae, # REGISTERED SIGN
+        0x00b0: 0x005e, # CIRCUMFLEX ACCENT
+        0x00b1: 0x00a3, # POUND SIGN
+        0x00b2: 0x00a5, # YEN SIGN
+        0x00b3: 0x00b7, # MIDDLE DOT
+        0x00b4: 0x00a9, # COPYRIGHT SIGN
+        0x00b5: 0x00a7, # SECTION SIGN
+        0x00b7: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00b8: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00b9: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00ba: 0x005b, # LEFT SQUARE BRACKET
+        0x00bb: 0x005d, # RIGHT SQUARE BRACKET
+        0x00bc: 0x00af, # MACRON
+        0x00bd: 0x00a8, # DIAERESIS
+        0x00be: 0x00b4, # ACUTE ACCENT
+        0x00bf: 0x00d7, # MULTIPLICATION SIGN
+        0x00c0: 0x007b, # LEFT CURLY BRACKET
+        0x00c1: 0x0041, # LATIN CAPITAL LETTER A
+        0x00c2: 0x0042, # LATIN CAPITAL LETTER B
+        0x00c3: 0x0043, # LATIN CAPITAL LETTER C
+        0x00c4: 0x0044, # LATIN CAPITAL LETTER D
+        0x00c5: 0x0045, # LATIN CAPITAL LETTER E
+        0x00c6: 0x0046, # LATIN CAPITAL LETTER F
+        0x00c7: 0x0047, # LATIN CAPITAL LETTER G
+        0x00c8: 0x0048, # LATIN CAPITAL LETTER H
+        0x00c9: 0x0049, # LATIN CAPITAL LETTER I
+        0x00ca: 0x00ad, # SOFT HYPHEN
+        0x00cb: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x00cc: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x00cd: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x00ce: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00cf: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x00d0: 0x007d, # RIGHT CURLY BRACKET
+        0x00d1: 0x004a, # LATIN CAPITAL LETTER J
+        0x00d2: 0x004b, # LATIN CAPITAL LETTER K
+        0x00d3: 0x004c, # LATIN CAPITAL LETTER L
+        0x00d4: 0x004d, # LATIN CAPITAL LETTER M
+        0x00d5: 0x004e, # LATIN CAPITAL LETTER N
+        0x00d6: 0x004f, # LATIN CAPITAL LETTER O
+        0x00d7: 0x0050, # LATIN CAPITAL LETTER P
+        0x00d8: 0x0051, # LATIN CAPITAL LETTER Q
+        0x00d9: 0x0052, # LATIN CAPITAL LETTER R
+        0x00da: 0x00b9, # SUPERSCRIPT ONE
+        0x00db: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x00dc: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x00dd: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x00de: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00df: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x00e0: 0x005c, # REVERSE SOLIDUS
+        0x00e1: 0x00f7, # DIVISION SIGN
+        0x00e2: 0x0053, # LATIN CAPITAL LETTER S
+        0x00e3: 0x0054, # LATIN CAPITAL LETTER T
+        0x00e4: 0x0055, # LATIN CAPITAL LETTER U
+        0x00e5: 0x0056, # LATIN CAPITAL LETTER V
+        0x00e6: 0x0057, # LATIN CAPITAL LETTER W
+        0x00e7: 0x0058, # LATIN CAPITAL LETTER X
+        0x00e8: 0x0059, # LATIN CAPITAL LETTER Y
+        0x00e9: 0x005a, # LATIN CAPITAL LETTER Z
+        0x00ea: 0x00b2, # SUPERSCRIPT TWO
+        0x00eb: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00ec: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x00ed: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00ef: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00f0: 0x0030, # DIGIT ZERO
+        0x00f1: 0x0031, # DIGIT ONE
+        0x00f2: 0x0032, # DIGIT TWO
+        0x00f3: 0x0033, # DIGIT THREE
+        0x00f4: 0x0034, # DIGIT FOUR
+        0x00f5: 0x0035, # DIGIT FIVE
+        0x00f6: 0x0036, # DIGIT SIX
+        0x00f7: 0x0037, # DIGIT SEVEN
+        0x00f8: 0x0038, # DIGIT EIGHT
+        0x00f9: 0x0039, # DIGIT NINE
+        0x00fa: 0x00b3, # SUPERSCRIPT THREE
+        0x00fb: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00fc: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x00fd: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x00fe: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00ff: 0x009f, # CONTROL
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1006.py b/depot_tools/release/win/python_24/Lib/encodings/cp1006.py
new file mode 100644
index 0000000..bbd3d87a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1006.py
@@ -0,0 +1,138 @@
+""" Python Character Mapping Codec generated from 'CP1006.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x06f0, #       EXTENDED ARABIC-INDIC DIGIT ZERO
+        0x00a2: 0x06f1, #       EXTENDED ARABIC-INDIC DIGIT ONE
+        0x00a3: 0x06f2, #       EXTENDED ARABIC-INDIC DIGIT TWO
+        0x00a4: 0x06f3, #       EXTENDED ARABIC-INDIC DIGIT THREE
+        0x00a5: 0x06f4, #       EXTENDED ARABIC-INDIC DIGIT FOUR
+        0x00a6: 0x06f5, #       EXTENDED ARABIC-INDIC DIGIT FIVE
+        0x00a7: 0x06f6, #       EXTENDED ARABIC-INDIC DIGIT SIX
+        0x00a8: 0x06f7, #       EXTENDED ARABIC-INDIC DIGIT SEVEN
+        0x00a9: 0x06f8, #       EXTENDED ARABIC-INDIC DIGIT EIGHT
+        0x00aa: 0x06f9, #       EXTENDED ARABIC-INDIC DIGIT NINE
+        0x00ab: 0x060c, #       ARABIC COMMA
+        0x00ac: 0x061b, #       ARABIC SEMICOLON
+        0x00ae: 0x061f, #       ARABIC QUESTION MARK
+        0x00af: 0xfe81, #       ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM
+        0x00b0: 0xfe8d, #       ARABIC LETTER ALEF ISOLATED FORM
+        0x00b1: 0xfe8e, #       ARABIC LETTER ALEF FINAL FORM
+        0x00b2: 0xfe8e, #       ARABIC LETTER ALEF FINAL FORM
+        0x00b3: 0xfe8f, #       ARABIC LETTER BEH ISOLATED FORM
+        0x00b4: 0xfe91, #       ARABIC LETTER BEH INITIAL FORM
+        0x00b5: 0xfb56, #       ARABIC LETTER PEH ISOLATED FORM
+        0x00b6: 0xfb58, #       ARABIC LETTER PEH INITIAL FORM
+        0x00b7: 0xfe93, #       ARABIC LETTER TEH MARBUTA ISOLATED FORM
+        0x00b8: 0xfe95, #       ARABIC LETTER TEH ISOLATED FORM
+        0x00b9: 0xfe97, #       ARABIC LETTER TEH INITIAL FORM
+        0x00ba: 0xfb66, #       ARABIC LETTER TTEH ISOLATED FORM
+        0x00bb: 0xfb68, #       ARABIC LETTER TTEH INITIAL FORM
+        0x00bc: 0xfe99, #       ARABIC LETTER THEH ISOLATED FORM
+        0x00bd: 0xfe9b, #       ARABIC LETTER THEH INITIAL FORM
+        0x00be: 0xfe9d, #       ARABIC LETTER JEEM ISOLATED FORM
+        0x00bf: 0xfe9f, #       ARABIC LETTER JEEM INITIAL FORM
+        0x00c0: 0xfb7a, #       ARABIC LETTER TCHEH ISOLATED FORM
+        0x00c1: 0xfb7c, #       ARABIC LETTER TCHEH INITIAL FORM
+        0x00c2: 0xfea1, #       ARABIC LETTER HAH ISOLATED FORM
+        0x00c3: 0xfea3, #       ARABIC LETTER HAH INITIAL FORM
+        0x00c4: 0xfea5, #       ARABIC LETTER KHAH ISOLATED FORM
+        0x00c5: 0xfea7, #       ARABIC LETTER KHAH INITIAL FORM
+        0x00c6: 0xfea9, #       ARABIC LETTER DAL ISOLATED FORM
+        0x00c7: 0xfb84, #       ARABIC LETTER DAHAL ISOLATED FORMN
+        0x00c8: 0xfeab, #       ARABIC LETTER THAL ISOLATED FORM
+        0x00c9: 0xfead, #       ARABIC LETTER REH ISOLATED FORM
+        0x00ca: 0xfb8c, #       ARABIC LETTER RREH ISOLATED FORM
+        0x00cb: 0xfeaf, #       ARABIC LETTER ZAIN ISOLATED FORM
+        0x00cc: 0xfb8a, #       ARABIC LETTER JEH ISOLATED FORM
+        0x00cd: 0xfeb1, #       ARABIC LETTER SEEN ISOLATED FORM
+        0x00ce: 0xfeb3, #       ARABIC LETTER SEEN INITIAL FORM
+        0x00cf: 0xfeb5, #       ARABIC LETTER SHEEN ISOLATED FORM
+        0x00d0: 0xfeb7, #       ARABIC LETTER SHEEN INITIAL FORM
+        0x00d1: 0xfeb9, #       ARABIC LETTER SAD ISOLATED FORM
+        0x00d2: 0xfebb, #       ARABIC LETTER SAD INITIAL FORM
+        0x00d3: 0xfebd, #       ARABIC LETTER DAD ISOLATED FORM
+        0x00d4: 0xfebf, #       ARABIC LETTER DAD INITIAL FORM
+        0x00d5: 0xfec1, #       ARABIC LETTER TAH ISOLATED FORM
+        0x00d6: 0xfec5, #       ARABIC LETTER ZAH ISOLATED FORM
+        0x00d7: 0xfec9, #       ARABIC LETTER AIN ISOLATED FORM
+        0x00d8: 0xfeca, #       ARABIC LETTER AIN FINAL FORM
+        0x00d9: 0xfecb, #       ARABIC LETTER AIN INITIAL FORM
+        0x00da: 0xfecc, #       ARABIC LETTER AIN MEDIAL FORM
+        0x00db: 0xfecd, #       ARABIC LETTER GHAIN ISOLATED FORM
+        0x00dc: 0xfece, #       ARABIC LETTER GHAIN FINAL FORM
+        0x00dd: 0xfecf, #       ARABIC LETTER GHAIN INITIAL FORM
+        0x00de: 0xfed0, #       ARABIC LETTER GHAIN MEDIAL FORM
+        0x00df: 0xfed1, #       ARABIC LETTER FEH ISOLATED FORM
+        0x00e0: 0xfed3, #       ARABIC LETTER FEH INITIAL FORM
+        0x00e1: 0xfed5, #       ARABIC LETTER QAF ISOLATED FORM
+        0x00e2: 0xfed7, #       ARABIC LETTER QAF INITIAL FORM
+        0x00e3: 0xfed9, #       ARABIC LETTER KAF ISOLATED FORM
+        0x00e4: 0xfedb, #       ARABIC LETTER KAF INITIAL FORM
+        0x00e5: 0xfb92, #       ARABIC LETTER GAF ISOLATED FORM
+        0x00e6: 0xfb94, #       ARABIC LETTER GAF INITIAL FORM
+        0x00e7: 0xfedd, #       ARABIC LETTER LAM ISOLATED FORM
+        0x00e8: 0xfedf, #       ARABIC LETTER LAM INITIAL FORM
+        0x00e9: 0xfee0, #       ARABIC LETTER LAM MEDIAL FORM
+        0x00ea: 0xfee1, #       ARABIC LETTER MEEM ISOLATED FORM
+        0x00eb: 0xfee3, #       ARABIC LETTER MEEM INITIAL FORM
+        0x00ec: 0xfb9e, #       ARABIC LETTER NOON GHUNNA ISOLATED FORM
+        0x00ed: 0xfee5, #       ARABIC LETTER NOON ISOLATED FORM
+        0x00ee: 0xfee7, #       ARABIC LETTER NOON INITIAL FORM
+        0x00ef: 0xfe85, #       ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM
+        0x00f0: 0xfeed, #       ARABIC LETTER WAW ISOLATED FORM
+        0x00f1: 0xfba6, #       ARABIC LETTER HEH GOAL ISOLATED FORM
+        0x00f2: 0xfba8, #       ARABIC LETTER HEH GOAL INITIAL FORM
+        0x00f3: 0xfba9, #       ARABIC LETTER HEH GOAL MEDIAL FORM
+        0x00f4: 0xfbaa, #       ARABIC LETTER HEH DOACHASHMEE ISOLATED FORM
+        0x00f5: 0xfe80, #       ARABIC LETTER HAMZA ISOLATED FORM
+        0x00f6: 0xfe89, #       ARABIC LETTER YEH WITH HAMZA ABOVE ISOLATED FORM
+        0x00f7: 0xfe8a, #       ARABIC LETTER YEH WITH HAMZA ABOVE FINAL FORM
+        0x00f8: 0xfe8b, #       ARABIC LETTER YEH WITH HAMZA ABOVE INITIAL FORM
+        0x00f9: 0xfef1, #       ARABIC LETTER YEH ISOLATED FORM
+        0x00fa: 0xfef2, #       ARABIC LETTER YEH FINAL FORM
+        0x00fb: 0xfef3, #       ARABIC LETTER YEH INITIAL FORM
+        0x00fc: 0xfbb0, #       ARABIC LETTER YEH BARREE WITH HAMZA ABOVE ISOLATED FORM
+        0x00fd: 0xfbae, #       ARABIC LETTER YEH BARREE ISOLATED FORM
+        0x00fe: 0xfe7c, #       ARABIC SHADDA ISOLATED FORM
+        0x00ff: 0xfe7d, #       ARABIC SHADDA MEDIAL FORM
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1026.py b/depot_tools/release/win/python_24/Lib/encodings/cp1026.py
new file mode 100644
index 0000000..3aec91b4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1026.py
@@ -0,0 +1,280 @@
+""" Python Character Mapping Codec generated from 'CP1026.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0004: 0x009c, # CONTROL
+        0x0005: 0x0009, # HORIZONTAL TABULATION
+        0x0006: 0x0086, # CONTROL
+        0x0007: 0x007f, # DELETE
+        0x0008: 0x0097, # CONTROL
+        0x0009: 0x008d, # CONTROL
+        0x000a: 0x008e, # CONTROL
+        0x0014: 0x009d, # CONTROL
+        0x0015: 0x0085, # CONTROL
+        0x0016: 0x0008, # BACKSPACE
+        0x0017: 0x0087, # CONTROL
+        0x001a: 0x0092, # CONTROL
+        0x001b: 0x008f, # CONTROL
+        0x0020: 0x0080, # CONTROL
+        0x0021: 0x0081, # CONTROL
+        0x0022: 0x0082, # CONTROL
+        0x0023: 0x0083, # CONTROL
+        0x0024: 0x0084, # CONTROL
+        0x0025: 0x000a, # LINE FEED
+        0x0026: 0x0017, # END OF TRANSMISSION BLOCK
+        0x0027: 0x001b, # ESCAPE
+        0x0028: 0x0088, # CONTROL
+        0x0029: 0x0089, # CONTROL
+        0x002a: 0x008a, # CONTROL
+        0x002b: 0x008b, # CONTROL
+        0x002c: 0x008c, # CONTROL
+        0x002d: 0x0005, # ENQUIRY
+        0x002e: 0x0006, # ACKNOWLEDGE
+        0x002f: 0x0007, # BELL
+        0x0030: 0x0090, # CONTROL
+        0x0031: 0x0091, # CONTROL
+        0x0032: 0x0016, # SYNCHRONOUS IDLE
+        0x0033: 0x0093, # CONTROL
+        0x0034: 0x0094, # CONTROL
+        0x0035: 0x0095, # CONTROL
+        0x0036: 0x0096, # CONTROL
+        0x0037: 0x0004, # END OF TRANSMISSION
+        0x0038: 0x0098, # CONTROL
+        0x0039: 0x0099, # CONTROL
+        0x003a: 0x009a, # CONTROL
+        0x003b: 0x009b, # CONTROL
+        0x003c: 0x0014, # DEVICE CONTROL FOUR
+        0x003d: 0x0015, # NEGATIVE ACKNOWLEDGE
+        0x003e: 0x009e, # CONTROL
+        0x003f: 0x001a, # SUBSTITUTE
+        0x0040: 0x0020, # SPACE
+        0x0041: 0x00a0, # NO-BREAK SPACE
+        0x0042: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0043: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0044: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0045: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x0046: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x0047: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0048: 0x007b, # LEFT CURLY BRACKET
+        0x0049: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x004a: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x004b: 0x002e, # FULL STOP
+        0x004c: 0x003c, # LESS-THAN SIGN
+        0x004d: 0x0028, # LEFT PARENTHESIS
+        0x004e: 0x002b, # PLUS SIGN
+        0x004f: 0x0021, # EXCLAMATION MARK
+        0x0050: 0x0026, # AMPERSAND
+        0x0051: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0052: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0053: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x0054: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x0055: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x0056: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x0057: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x0058: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x0059: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN)
+        0x005a: 0x011e, # LATIN CAPITAL LETTER G WITH BREVE
+        0x005b: 0x0130, # LATIN CAPITAL LETTER I WITH DOT ABOVE
+        0x005c: 0x002a, # ASTERISK
+        0x005d: 0x0029, # RIGHT PARENTHESIS
+        0x005e: 0x003b, # SEMICOLON
+        0x005f: 0x005e, # CIRCUMFLEX ACCENT
+        0x0060: 0x002d, # HYPHEN-MINUS
+        0x0061: 0x002f, # SOLIDUS
+        0x0062: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x0063: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x0064: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x0065: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x0066: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x0067: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0068: 0x005b, # LEFT SQUARE BRACKET
+        0x0069: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x006a: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
+        0x006b: 0x002c, # COMMA
+        0x006c: 0x0025, # PERCENT SIGN
+        0x006d: 0x005f, # LOW LINE
+        0x006e: 0x003e, # GREATER-THAN SIGN
+        0x006f: 0x003f, # QUESTION MARK
+        0x0070: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x0071: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0072: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x0073: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x0074: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x0075: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x0076: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x0077: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x0078: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x0079: 0x0131, # LATIN SMALL LETTER DOTLESS I
+        0x007a: 0x003a, # COLON
+        0x007b: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x007c: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
+        0x007d: 0x0027, # APOSTROPHE
+        0x007e: 0x003d, # EQUALS SIGN
+        0x007f: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x0080: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x0081: 0x0061, # LATIN SMALL LETTER A
+        0x0082: 0x0062, # LATIN SMALL LETTER B
+        0x0083: 0x0063, # LATIN SMALL LETTER C
+        0x0084: 0x0064, # LATIN SMALL LETTER D
+        0x0085: 0x0065, # LATIN SMALL LETTER E
+        0x0086: 0x0066, # LATIN SMALL LETTER F
+        0x0087: 0x0067, # LATIN SMALL LETTER G
+        0x0088: 0x0068, # LATIN SMALL LETTER H
+        0x0089: 0x0069, # LATIN SMALL LETTER I
+        0x008a: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x008b: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x008c: 0x007d, # RIGHT CURLY BRACKET
+        0x008d: 0x0060, # GRAVE ACCENT
+        0x008e: 0x00a6, # BROKEN BAR
+        0x008f: 0x00b1, # PLUS-MINUS SIGN
+        0x0090: 0x00b0, # DEGREE SIGN
+        0x0091: 0x006a, # LATIN SMALL LETTER J
+        0x0092: 0x006b, # LATIN SMALL LETTER K
+        0x0093: 0x006c, # LATIN SMALL LETTER L
+        0x0094: 0x006d, # LATIN SMALL LETTER M
+        0x0095: 0x006e, # LATIN SMALL LETTER N
+        0x0096: 0x006f, # LATIN SMALL LETTER O
+        0x0097: 0x0070, # LATIN SMALL LETTER P
+        0x0098: 0x0071, # LATIN SMALL LETTER Q
+        0x0099: 0x0072, # LATIN SMALL LETTER R
+        0x009a: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x009b: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x009c: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x009d: 0x00b8, # CEDILLA
+        0x009e: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x009f: 0x00a4, # CURRENCY SIGN
+        0x00a0: 0x00b5, # MICRO SIGN
+        0x00a1: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x00a2: 0x0073, # LATIN SMALL LETTER S
+        0x00a3: 0x0074, # LATIN SMALL LETTER T
+        0x00a4: 0x0075, # LATIN SMALL LETTER U
+        0x00a5: 0x0076, # LATIN SMALL LETTER V
+        0x00a6: 0x0077, # LATIN SMALL LETTER W
+        0x00a7: 0x0078, # LATIN SMALL LETTER X
+        0x00a8: 0x0079, # LATIN SMALL LETTER Y
+        0x00a9: 0x007a, # LATIN SMALL LETTER Z
+        0x00aa: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ab: 0x00bf, # INVERTED QUESTION MARK
+        0x00ac: 0x005d, # RIGHT SQUARE BRACKET
+        0x00ad: 0x0024, # DOLLAR SIGN
+        0x00ae: 0x0040, # COMMERCIAL AT
+        0x00af: 0x00ae, # REGISTERED SIGN
+        0x00b0: 0x00a2, # CENT SIGN
+        0x00b1: 0x00a3, # POUND SIGN
+        0x00b2: 0x00a5, # YEN SIGN
+        0x00b3: 0x00b7, # MIDDLE DOT
+        0x00b4: 0x00a9, # COPYRIGHT SIGN
+        0x00b5: 0x00a7, # SECTION SIGN
+        0x00b7: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00b8: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00b9: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00ba: 0x00ac, # NOT SIGN
+        0x00bb: 0x007c, # VERTICAL LINE
+        0x00bc: 0x00af, # MACRON
+        0x00bd: 0x00a8, # DIAERESIS
+        0x00be: 0x00b4, # ACUTE ACCENT
+        0x00bf: 0x00d7, # MULTIPLICATION SIGN
+        0x00c0: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x00c1: 0x0041, # LATIN CAPITAL LETTER A
+        0x00c2: 0x0042, # LATIN CAPITAL LETTER B
+        0x00c3: 0x0043, # LATIN CAPITAL LETTER C
+        0x00c4: 0x0044, # LATIN CAPITAL LETTER D
+        0x00c5: 0x0045, # LATIN CAPITAL LETTER E
+        0x00c6: 0x0046, # LATIN CAPITAL LETTER F
+        0x00c7: 0x0047, # LATIN CAPITAL LETTER G
+        0x00c8: 0x0048, # LATIN CAPITAL LETTER H
+        0x00c9: 0x0049, # LATIN CAPITAL LETTER I
+        0x00ca: 0x00ad, # SOFT HYPHEN
+        0x00cb: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x00cc: 0x007e, # TILDE
+        0x00cd: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x00ce: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00cf: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x00d0: 0x011f, # LATIN SMALL LETTER G WITH BREVE
+        0x00d1: 0x004a, # LATIN CAPITAL LETTER J
+        0x00d2: 0x004b, # LATIN CAPITAL LETTER K
+        0x00d3: 0x004c, # LATIN CAPITAL LETTER L
+        0x00d4: 0x004d, # LATIN CAPITAL LETTER M
+        0x00d5: 0x004e, # LATIN CAPITAL LETTER N
+        0x00d6: 0x004f, # LATIN CAPITAL LETTER O
+        0x00d7: 0x0050, # LATIN CAPITAL LETTER P
+        0x00d8: 0x0051, # LATIN CAPITAL LETTER Q
+        0x00d9: 0x0052, # LATIN CAPITAL LETTER R
+        0x00da: 0x00b9, # SUPERSCRIPT ONE
+        0x00db: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x00dc: 0x005c, # REVERSE SOLIDUS
+        0x00dd: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x00de: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00df: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x00e0: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x00e1: 0x00f7, # DIVISION SIGN
+        0x00e2: 0x0053, # LATIN CAPITAL LETTER S
+        0x00e3: 0x0054, # LATIN CAPITAL LETTER T
+        0x00e4: 0x0055, # LATIN CAPITAL LETTER U
+        0x00e5: 0x0056, # LATIN CAPITAL LETTER V
+        0x00e6: 0x0057, # LATIN CAPITAL LETTER W
+        0x00e7: 0x0058, # LATIN CAPITAL LETTER X
+        0x00e8: 0x0059, # LATIN CAPITAL LETTER Y
+        0x00e9: 0x005a, # LATIN CAPITAL LETTER Z
+        0x00ea: 0x00b2, # SUPERSCRIPT TWO
+        0x00eb: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00ec: 0x0023, # NUMBER SIGN
+        0x00ed: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00ef: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00f0: 0x0030, # DIGIT ZERO
+        0x00f1: 0x0031, # DIGIT ONE
+        0x00f2: 0x0032, # DIGIT TWO
+        0x00f3: 0x0033, # DIGIT THREE
+        0x00f4: 0x0034, # DIGIT FOUR
+        0x00f5: 0x0035, # DIGIT FIVE
+        0x00f6: 0x0036, # DIGIT SIX
+        0x00f7: 0x0037, # DIGIT SEVEN
+        0x00f8: 0x0038, # DIGIT EIGHT
+        0x00f9: 0x0039, # DIGIT NINE
+        0x00fa: 0x00b3, # SUPERSCRIPT THREE
+        0x00fb: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00fc: 0x0022, # QUOTATION MARK
+        0x00fd: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x00fe: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00ff: 0x009f, # CONTROL
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1140.py b/depot_tools/release/win/python_24/Lib/encodings/cp1140.py
new file mode 100644
index 0000000..5fd31a6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1140.py
@@ -0,0 +1,44 @@
+""" Python Character Mapping Codec for cp1140
+
+Written by Brian Quinlan(brian@sweetapp.com). NO WARRANTY.
+"""
+
+import codecs
+import copy
+import cp037
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = copy.copy(cp037.decoding_map)
+
+decoding_map.update({
+        0x009f: 0x20ac # EURO SIGN
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1250.py b/depot_tools/release/win/python_24/Lib/encodings/cp1250.py
new file mode 100644
index 0000000..85774ed
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1250.py
@@ -0,0 +1,123 @@
+""" Python Character Mapping Codec generated from 'CP1250.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: None,   # UNDEFINED
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: None,   # UNDEFINED
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: None,   # UNDEFINED
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
+        0x008d: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
+        0x008e: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
+        0x008f: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
+        0x0090: None,   # UNDEFINED
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: None,   # UNDEFINED
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
+        0x009d: 0x0165, # LATIN SMALL LETTER T WITH CARON
+        0x009e: 0x017e, # LATIN SMALL LETTER Z WITH CARON
+        0x009f: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
+        0x00a1: 0x02c7, # CARON
+        0x00a2: 0x02d8, # BREVE
+        0x00a3: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
+        0x00a5: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
+        0x00aa: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
+        0x00af: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00b2: 0x02db, # OGONEK
+        0x00b3: 0x0142, # LATIN SMALL LETTER L WITH STROKE
+        0x00b9: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
+        0x00ba: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
+        0x00bc: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
+        0x00bd: 0x02dd, # DOUBLE ACUTE ACCENT
+        0x00be: 0x013e, # LATIN SMALL LETTER L WITH CARON
+        0x00bf: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00c0: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
+        0x00c3: 0x0102, # LATIN CAPITAL LETTER A WITH BREVE
+        0x00c5: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
+        0x00c6: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
+        0x00c8: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
+        0x00ca: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
+        0x00cc: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
+        0x00cf: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
+        0x00d0: 0x0110, # LATIN CAPITAL LETTER D WITH STROKE
+        0x00d1: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
+        0x00d2: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
+        0x00d5: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
+        0x00d8: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
+        0x00d9: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
+        0x00db: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
+        0x00de: 0x0162, # LATIN CAPITAL LETTER T WITH CEDILLA
+        0x00e0: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
+        0x00e3: 0x0103, # LATIN SMALL LETTER A WITH BREVE
+        0x00e5: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
+        0x00e6: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
+        0x00e8: 0x010d, # LATIN SMALL LETTER C WITH CARON
+        0x00ea: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
+        0x00ec: 0x011b, # LATIN SMALL LETTER E WITH CARON
+        0x00ef: 0x010f, # LATIN SMALL LETTER D WITH CARON
+        0x00f0: 0x0111, # LATIN SMALL LETTER D WITH STROKE
+        0x00f1: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
+        0x00f2: 0x0148, # LATIN SMALL LETTER N WITH CARON
+        0x00f5: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
+        0x00f8: 0x0159, # LATIN SMALL LETTER R WITH CARON
+        0x00f9: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
+        0x00fb: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
+        0x00fe: 0x0163, # LATIN SMALL LETTER T WITH CEDILLA
+        0x00ff: 0x02d9, # DOT ABOVE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1251.py b/depot_tools/release/win/python_24/Lib/encodings/cp1251.py
new file mode 100644
index 0000000..f191b06
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1251.py
@@ -0,0 +1,157 @@
+""" Python Character Mapping Codec generated from 'CP1251.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x0402, # CYRILLIC CAPITAL LETTER DJE
+        0x0081: 0x0403, # CYRILLIC CAPITAL LETTER GJE
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: 0x0453, # CYRILLIC SMALL LETTER GJE
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: 0x20ac, # EURO SIGN
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: 0x0409, # CYRILLIC CAPITAL LETTER LJE
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: 0x040a, # CYRILLIC CAPITAL LETTER NJE
+        0x008d: 0x040c, # CYRILLIC CAPITAL LETTER KJE
+        0x008e: 0x040b, # CYRILLIC CAPITAL LETTER TSHE
+        0x008f: 0x040f, # CYRILLIC CAPITAL LETTER DZHE
+        0x0090: 0x0452, # CYRILLIC SMALL LETTER DJE
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: None,   # UNDEFINED
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: 0x0459, # CYRILLIC SMALL LETTER LJE
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: 0x045a, # CYRILLIC SMALL LETTER NJE
+        0x009d: 0x045c, # CYRILLIC SMALL LETTER KJE
+        0x009e: 0x045b, # CYRILLIC SMALL LETTER TSHE
+        0x009f: 0x045f, # CYRILLIC SMALL LETTER DZHE
+        0x00a1: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
+        0x00a2: 0x045e, # CYRILLIC SMALL LETTER SHORT U
+        0x00a3: 0x0408, # CYRILLIC CAPITAL LETTER JE
+        0x00a5: 0x0490, # CYRILLIC CAPITAL LETTER GHE WITH UPTURN
+        0x00a8: 0x0401, # CYRILLIC CAPITAL LETTER IO
+        0x00aa: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
+        0x00af: 0x0407, # CYRILLIC CAPITAL LETTER YI
+        0x00b2: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00b3: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00b4: 0x0491, # CYRILLIC SMALL LETTER GHE WITH UPTURN
+        0x00b8: 0x0451, # CYRILLIC SMALL LETTER IO
+        0x00b9: 0x2116, # NUMERO SIGN
+        0x00ba: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
+        0x00bc: 0x0458, # CYRILLIC SMALL LETTER JE
+        0x00bd: 0x0405, # CYRILLIC CAPITAL LETTER DZE
+        0x00be: 0x0455, # CYRILLIC SMALL LETTER DZE
+        0x00bf: 0x0457, # CYRILLIC SMALL LETTER YI
+        0x00c0: 0x0410, # CYRILLIC CAPITAL LETTER A
+        0x00c1: 0x0411, # CYRILLIC CAPITAL LETTER BE
+        0x00c2: 0x0412, # CYRILLIC CAPITAL LETTER VE
+        0x00c3: 0x0413, # CYRILLIC CAPITAL LETTER GHE
+        0x00c4: 0x0414, # CYRILLIC CAPITAL LETTER DE
+        0x00c5: 0x0415, # CYRILLIC CAPITAL LETTER IE
+        0x00c6: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
+        0x00c7: 0x0417, # CYRILLIC CAPITAL LETTER ZE
+        0x00c8: 0x0418, # CYRILLIC CAPITAL LETTER I
+        0x00c9: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
+        0x00ca: 0x041a, # CYRILLIC CAPITAL LETTER KA
+        0x00cb: 0x041b, # CYRILLIC CAPITAL LETTER EL
+        0x00cc: 0x041c, # CYRILLIC CAPITAL LETTER EM
+        0x00cd: 0x041d, # CYRILLIC CAPITAL LETTER EN
+        0x00ce: 0x041e, # CYRILLIC CAPITAL LETTER O
+        0x00cf: 0x041f, # CYRILLIC CAPITAL LETTER PE
+        0x00d0: 0x0420, # CYRILLIC CAPITAL LETTER ER
+        0x00d1: 0x0421, # CYRILLIC CAPITAL LETTER ES
+        0x00d2: 0x0422, # CYRILLIC CAPITAL LETTER TE
+        0x00d3: 0x0423, # CYRILLIC CAPITAL LETTER U
+        0x00d4: 0x0424, # CYRILLIC CAPITAL LETTER EF
+        0x00d5: 0x0425, # CYRILLIC CAPITAL LETTER HA
+        0x00d6: 0x0426, # CYRILLIC CAPITAL LETTER TSE
+        0x00d7: 0x0427, # CYRILLIC CAPITAL LETTER CHE
+        0x00d8: 0x0428, # CYRILLIC CAPITAL LETTER SHA
+        0x00d9: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
+        0x00da: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
+        0x00db: 0x042b, # CYRILLIC CAPITAL LETTER YERU
+        0x00dc: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
+        0x00dd: 0x042d, # CYRILLIC CAPITAL LETTER E
+        0x00de: 0x042e, # CYRILLIC CAPITAL LETTER YU
+        0x00df: 0x042f, # CYRILLIC CAPITAL LETTER YA
+        0x00e0: 0x0430, # CYRILLIC SMALL LETTER A
+        0x00e1: 0x0431, # CYRILLIC SMALL LETTER BE
+        0x00e2: 0x0432, # CYRILLIC SMALL LETTER VE
+        0x00e3: 0x0433, # CYRILLIC SMALL LETTER GHE
+        0x00e4: 0x0434, # CYRILLIC SMALL LETTER DE
+        0x00e5: 0x0435, # CYRILLIC SMALL LETTER IE
+        0x00e6: 0x0436, # CYRILLIC SMALL LETTER ZHE
+        0x00e7: 0x0437, # CYRILLIC SMALL LETTER ZE
+        0x00e8: 0x0438, # CYRILLIC SMALL LETTER I
+        0x00e9: 0x0439, # CYRILLIC SMALL LETTER SHORT I
+        0x00ea: 0x043a, # CYRILLIC SMALL LETTER KA
+        0x00eb: 0x043b, # CYRILLIC SMALL LETTER EL
+        0x00ec: 0x043c, # CYRILLIC SMALL LETTER EM
+        0x00ed: 0x043d, # CYRILLIC SMALL LETTER EN
+        0x00ee: 0x043e, # CYRILLIC SMALL LETTER O
+        0x00ef: 0x043f, # CYRILLIC SMALL LETTER PE
+        0x00f0: 0x0440, # CYRILLIC SMALL LETTER ER
+        0x00f1: 0x0441, # CYRILLIC SMALL LETTER ES
+        0x00f2: 0x0442, # CYRILLIC SMALL LETTER TE
+        0x00f3: 0x0443, # CYRILLIC SMALL LETTER U
+        0x00f4: 0x0444, # CYRILLIC SMALL LETTER EF
+        0x00f5: 0x0445, # CYRILLIC SMALL LETTER HA
+        0x00f6: 0x0446, # CYRILLIC SMALL LETTER TSE
+        0x00f7: 0x0447, # CYRILLIC SMALL LETTER CHE
+        0x00f8: 0x0448, # CYRILLIC SMALL LETTER SHA
+        0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
+        0x00fa: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
+        0x00fb: 0x044b, # CYRILLIC SMALL LETTER YERU
+        0x00fc: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
+        0x00fd: 0x044d, # CYRILLIC SMALL LETTER E
+        0x00fe: 0x044e, # CYRILLIC SMALL LETTER YU
+        0x00ff: 0x044f, # CYRILLIC SMALL LETTER YA
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1252.py b/depot_tools/release/win/python_24/Lib/encodings/cp1252.py
new file mode 100644
index 0000000..d1ecaba8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1252.py
@@ -0,0 +1,76 @@
+""" Python Character Mapping Codec generated from 'CP1252.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: None,   # UNDEFINED
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE
+        0x008d: None,   # UNDEFINED
+        0x008e: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
+        0x008f: None,   # UNDEFINED
+        0x0090: None,   # UNDEFINED
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: 0x02dc, # SMALL TILDE
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: 0x0153, # LATIN SMALL LIGATURE OE
+        0x009d: None,   # UNDEFINED
+        0x009e: 0x017e, # LATIN SMALL LETTER Z WITH CARON
+        0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1253.py b/depot_tools/release/win/python_24/Lib/encodings/cp1253.py
new file mode 100644
index 0000000..22c70df
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1253.py
@@ -0,0 +1,151 @@
+""" Python Character Mapping Codec generated from 'CP1253.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: None,   # UNDEFINED
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: None,   # UNDEFINED
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: None,   # UNDEFINED
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: None,   # UNDEFINED
+        0x008d: None,   # UNDEFINED
+        0x008e: None,   # UNDEFINED
+        0x008f: None,   # UNDEFINED
+        0x0090: None,   # UNDEFINED
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: None,   # UNDEFINED
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: None,   # UNDEFINED
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: None,   # UNDEFINED
+        0x009d: None,   # UNDEFINED
+        0x009e: None,   # UNDEFINED
+        0x009f: None,   # UNDEFINED
+        0x00a1: 0x0385, # GREEK DIALYTIKA TONOS
+        0x00a2: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS
+        0x00aa: None,   # UNDEFINED
+        0x00af: 0x2015, # HORIZONTAL BAR
+        0x00b4: 0x0384, # GREEK TONOS
+        0x00b8: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS
+        0x00b9: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS
+        0x00ba: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS
+        0x00bc: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS
+        0x00be: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS
+        0x00bf: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS
+        0x00c0: 0x0390, # GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
+        0x00c1: 0x0391, # GREEK CAPITAL LETTER ALPHA
+        0x00c2: 0x0392, # GREEK CAPITAL LETTER BETA
+        0x00c3: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00c4: 0x0394, # GREEK CAPITAL LETTER DELTA
+        0x00c5: 0x0395, # GREEK CAPITAL LETTER EPSILON
+        0x00c6: 0x0396, # GREEK CAPITAL LETTER ZETA
+        0x00c7: 0x0397, # GREEK CAPITAL LETTER ETA
+        0x00c8: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00c9: 0x0399, # GREEK CAPITAL LETTER IOTA
+        0x00ca: 0x039a, # GREEK CAPITAL LETTER KAPPA
+        0x00cb: 0x039b, # GREEK CAPITAL LETTER LAMDA
+        0x00cc: 0x039c, # GREEK CAPITAL LETTER MU
+        0x00cd: 0x039d, # GREEK CAPITAL LETTER NU
+        0x00ce: 0x039e, # GREEK CAPITAL LETTER XI
+        0x00cf: 0x039f, # GREEK CAPITAL LETTER OMICRON
+        0x00d0: 0x03a0, # GREEK CAPITAL LETTER PI
+        0x00d1: 0x03a1, # GREEK CAPITAL LETTER RHO
+        0x00d2: None,   # UNDEFINED
+        0x00d3: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00d4: 0x03a4, # GREEK CAPITAL LETTER TAU
+        0x00d5: 0x03a5, # GREEK CAPITAL LETTER UPSILON
+        0x00d6: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00d7: 0x03a7, # GREEK CAPITAL LETTER CHI
+        0x00d8: 0x03a8, # GREEK CAPITAL LETTER PSI
+        0x00d9: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00da: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
+        0x00db: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
+        0x00dc: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS
+        0x00dd: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS
+        0x00de: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS
+        0x00df: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS
+        0x00e0: 0x03b0, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
+        0x00e1: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00e2: 0x03b2, # GREEK SMALL LETTER BETA
+        0x00e3: 0x03b3, # GREEK SMALL LETTER GAMMA
+        0x00e4: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00e5: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00e6: 0x03b6, # GREEK SMALL LETTER ZETA
+        0x00e7: 0x03b7, # GREEK SMALL LETTER ETA
+        0x00e8: 0x03b8, # GREEK SMALL LETTER THETA
+        0x00e9: 0x03b9, # GREEK SMALL LETTER IOTA
+        0x00ea: 0x03ba, # GREEK SMALL LETTER KAPPA
+        0x00eb: 0x03bb, # GREEK SMALL LETTER LAMDA
+        0x00ec: 0x03bc, # GREEK SMALL LETTER MU
+        0x00ed: 0x03bd, # GREEK SMALL LETTER NU
+        0x00ee: 0x03be, # GREEK SMALL LETTER XI
+        0x00ef: 0x03bf, # GREEK SMALL LETTER OMICRON
+        0x00f0: 0x03c0, # GREEK SMALL LETTER PI
+        0x00f1: 0x03c1, # GREEK SMALL LETTER RHO
+        0x00f2: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA
+        0x00f3: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00f4: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00f5: 0x03c5, # GREEK SMALL LETTER UPSILON
+        0x00f6: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00f7: 0x03c7, # GREEK SMALL LETTER CHI
+        0x00f8: 0x03c8, # GREEK SMALL LETTER PSI
+        0x00f9: 0x03c9, # GREEK SMALL LETTER OMEGA
+        0x00fa: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
+        0x00fb: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
+        0x00fc: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS
+        0x00fd: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS
+        0x00fe: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS
+        0x00ff: None,   # UNDEFINED
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1254.py b/depot_tools/release/win/python_24/Lib/encodings/cp1254.py
new file mode 100644
index 0000000..718ed27
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1254.py
@@ -0,0 +1,82 @@
+""" Python Character Mapping Codec generated from 'CP1254.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: None,   # UNDEFINED
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE
+        0x008d: None,   # UNDEFINED
+        0x008e: None,   # UNDEFINED
+        0x008f: None,   # UNDEFINED
+        0x0090: None,   # UNDEFINED
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: 0x02dc, # SMALL TILDE
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: 0x0161, # LATIN SMALL LETTER S WITH CARON
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: 0x0153, # LATIN SMALL LIGATURE OE
+        0x009d: None,   # UNDEFINED
+        0x009e: None,   # UNDEFINED
+        0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
+        0x00d0: 0x011e, # LATIN CAPITAL LETTER G WITH BREVE
+        0x00dd: 0x0130, # LATIN CAPITAL LETTER I WITH DOT ABOVE
+        0x00de: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
+        0x00f0: 0x011f, # LATIN SMALL LETTER G WITH BREVE
+        0x00fd: 0x0131, # LATIN SMALL LETTER DOTLESS I
+        0x00fe: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1255.py b/depot_tools/release/win/python_24/Lib/encodings/cp1255.py
new file mode 100644
index 0000000..b20f5da
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1255.py
@@ -0,0 +1,143 @@
+""" Python Character Mapping Codec generated from 'CP1255.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: None,   # UNDEFINED
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: None,   # UNDEFINED
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: None,   # UNDEFINED
+        0x008d: None,   # UNDEFINED
+        0x008e: None,   # UNDEFINED
+        0x008f: None,   # UNDEFINED
+        0x0090: None,   # UNDEFINED
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: 0x02dc, # SMALL TILDE
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: None,   # UNDEFINED
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: None,   # UNDEFINED
+        0x009d: None,   # UNDEFINED
+        0x009e: None,   # UNDEFINED
+        0x009f: None,   # UNDEFINED
+        0x00a4: 0x20aa, # NEW SHEQEL SIGN
+        0x00aa: 0x00d7, # MULTIPLICATION SIGN
+        0x00ba: 0x00f7, # DIVISION SIGN
+        0x00c0: 0x05b0, # HEBREW POINT SHEVA
+        0x00c1: 0x05b1, # HEBREW POINT HATAF SEGOL
+        0x00c2: 0x05b2, # HEBREW POINT HATAF PATAH
+        0x00c3: 0x05b3, # HEBREW POINT HATAF QAMATS
+        0x00c4: 0x05b4, # HEBREW POINT HIRIQ
+        0x00c5: 0x05b5, # HEBREW POINT TSERE
+        0x00c6: 0x05b6, # HEBREW POINT SEGOL
+        0x00c7: 0x05b7, # HEBREW POINT PATAH
+        0x00c8: 0x05b8, # HEBREW POINT QAMATS
+        0x00c9: 0x05b9, # HEBREW POINT HOLAM
+        0x00ca: None,   # UNDEFINED
+        0x00cb: 0x05bb, # HEBREW POINT QUBUTS
+        0x00cc: 0x05bc, # HEBREW POINT DAGESH OR MAPIQ
+        0x00cd: 0x05bd, # HEBREW POINT METEG
+        0x00ce: 0x05be, # HEBREW PUNCTUATION MAQAF
+        0x00cf: 0x05bf, # HEBREW POINT RAFE
+        0x00d0: 0x05c0, # HEBREW PUNCTUATION PASEQ
+        0x00d1: 0x05c1, # HEBREW POINT SHIN DOT
+        0x00d2: 0x05c2, # HEBREW POINT SIN DOT
+        0x00d3: 0x05c3, # HEBREW PUNCTUATION SOF PASUQ
+        0x00d4: 0x05f0, # HEBREW LIGATURE YIDDISH DOUBLE VAV
+        0x00d5: 0x05f1, # HEBREW LIGATURE YIDDISH VAV YOD
+        0x00d6: 0x05f2, # HEBREW LIGATURE YIDDISH DOUBLE YOD
+        0x00d7: 0x05f3, # HEBREW PUNCTUATION GERESH
+        0x00d8: 0x05f4, # HEBREW PUNCTUATION GERSHAYIM
+        0x00d9: None,   # UNDEFINED
+        0x00da: None,   # UNDEFINED
+        0x00db: None,   # UNDEFINED
+        0x00dc: None,   # UNDEFINED
+        0x00dd: None,   # UNDEFINED
+        0x00de: None,   # UNDEFINED
+        0x00df: None,   # UNDEFINED
+        0x00e0: 0x05d0, # HEBREW LETTER ALEF
+        0x00e1: 0x05d1, # HEBREW LETTER BET
+        0x00e2: 0x05d2, # HEBREW LETTER GIMEL
+        0x00e3: 0x05d3, # HEBREW LETTER DALET
+        0x00e4: 0x05d4, # HEBREW LETTER HE
+        0x00e5: 0x05d5, # HEBREW LETTER VAV
+        0x00e6: 0x05d6, # HEBREW LETTER ZAYIN
+        0x00e7: 0x05d7, # HEBREW LETTER HET
+        0x00e8: 0x05d8, # HEBREW LETTER TET
+        0x00e9: 0x05d9, # HEBREW LETTER YOD
+        0x00ea: 0x05da, # HEBREW LETTER FINAL KAF
+        0x00eb: 0x05db, # HEBREW LETTER KAF
+        0x00ec: 0x05dc, # HEBREW LETTER LAMED
+        0x00ed: 0x05dd, # HEBREW LETTER FINAL MEM
+        0x00ee: 0x05de, # HEBREW LETTER MEM
+        0x00ef: 0x05df, # HEBREW LETTER FINAL NUN
+        0x00f0: 0x05e0, # HEBREW LETTER NUN
+        0x00f1: 0x05e1, # HEBREW LETTER SAMEKH
+        0x00f2: 0x05e2, # HEBREW LETTER AYIN
+        0x00f3: 0x05e3, # HEBREW LETTER FINAL PE
+        0x00f4: 0x05e4, # HEBREW LETTER PE
+        0x00f5: 0x05e5, # HEBREW LETTER FINAL TSADI
+        0x00f6: 0x05e6, # HEBREW LETTER TSADI
+        0x00f7: 0x05e7, # HEBREW LETTER QOF
+        0x00f8: 0x05e8, # HEBREW LETTER RESH
+        0x00f9: 0x05e9, # HEBREW LETTER SHIN
+        0x00fa: 0x05ea, # HEBREW LETTER TAV
+        0x00fb: None,   # UNDEFINED
+        0x00fc: None,   # UNDEFINED
+        0x00fd: 0x200e, # LEFT-TO-RIGHT MARK
+        0x00fe: 0x200f, # RIGHT-TO-LEFT MARK
+        0x00ff: None,   # UNDEFINED
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1256.py b/depot_tools/release/win/python_24/Lib/encodings/cp1256.py
new file mode 100644
index 0000000..f3e694c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1256.py
@@ -0,0 +1,129 @@
+""" Python Character Mapping Codec generated from 'CP1256.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: 0x067e, # ARABIC LETTER PEH
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: 0x0679, # ARABIC LETTER TTEH
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE
+        0x008d: 0x0686, # ARABIC LETTER TCHEH
+        0x008e: 0x0698, # ARABIC LETTER JEH
+        0x008f: 0x0688, # ARABIC LETTER DDAL
+        0x0090: 0x06af, # ARABIC LETTER GAF
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: 0x06a9, # ARABIC LETTER KEHEH
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: 0x0691, # ARABIC LETTER RREH
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: 0x0153, # LATIN SMALL LIGATURE OE
+        0x009d: 0x200c, # ZERO WIDTH NON-JOINER
+        0x009e: 0x200d, # ZERO WIDTH JOINER
+        0x009f: 0x06ba, # ARABIC LETTER NOON GHUNNA
+        0x00a1: 0x060c, # ARABIC COMMA
+        0x00aa: 0x06be, # ARABIC LETTER HEH DOACHASHMEE
+        0x00ba: 0x061b, # ARABIC SEMICOLON
+        0x00bf: 0x061f, # ARABIC QUESTION MARK
+        0x00c0: 0x06c1, # ARABIC LETTER HEH GOAL
+        0x00c1: 0x0621, # ARABIC LETTER HAMZA
+        0x00c2: 0x0622, # ARABIC LETTER ALEF WITH MADDA ABOVE
+        0x00c3: 0x0623, # ARABIC LETTER ALEF WITH HAMZA ABOVE
+        0x00c4: 0x0624, # ARABIC LETTER WAW WITH HAMZA ABOVE
+        0x00c5: 0x0625, # ARABIC LETTER ALEF WITH HAMZA BELOW
+        0x00c6: 0x0626, # ARABIC LETTER YEH WITH HAMZA ABOVE
+        0x00c7: 0x0627, # ARABIC LETTER ALEF
+        0x00c8: 0x0628, # ARABIC LETTER BEH
+        0x00c9: 0x0629, # ARABIC LETTER TEH MARBUTA
+        0x00ca: 0x062a, # ARABIC LETTER TEH
+        0x00cb: 0x062b, # ARABIC LETTER THEH
+        0x00cc: 0x062c, # ARABIC LETTER JEEM
+        0x00cd: 0x062d, # ARABIC LETTER HAH
+        0x00ce: 0x062e, # ARABIC LETTER KHAH
+        0x00cf: 0x062f, # ARABIC LETTER DAL
+        0x00d0: 0x0630, # ARABIC LETTER THAL
+        0x00d1: 0x0631, # ARABIC LETTER REH
+        0x00d2: 0x0632, # ARABIC LETTER ZAIN
+        0x00d3: 0x0633, # ARABIC LETTER SEEN
+        0x00d4: 0x0634, # ARABIC LETTER SHEEN
+        0x00d5: 0x0635, # ARABIC LETTER SAD
+        0x00d6: 0x0636, # ARABIC LETTER DAD
+        0x00d8: 0x0637, # ARABIC LETTER TAH
+        0x00d9: 0x0638, # ARABIC LETTER ZAH
+        0x00da: 0x0639, # ARABIC LETTER AIN
+        0x00db: 0x063a, # ARABIC LETTER GHAIN
+        0x00dc: 0x0640, # ARABIC TATWEEL
+        0x00dd: 0x0641, # ARABIC LETTER FEH
+        0x00de: 0x0642, # ARABIC LETTER QAF
+        0x00df: 0x0643, # ARABIC LETTER KAF
+        0x00e1: 0x0644, # ARABIC LETTER LAM
+        0x00e3: 0x0645, # ARABIC LETTER MEEM
+        0x00e4: 0x0646, # ARABIC LETTER NOON
+        0x00e5: 0x0647, # ARABIC LETTER HEH
+        0x00e6: 0x0648, # ARABIC LETTER WAW
+        0x00ec: 0x0649, # ARABIC LETTER ALEF MAKSURA
+        0x00ed: 0x064a, # ARABIC LETTER YEH
+        0x00f0: 0x064b, # ARABIC FATHATAN
+        0x00f1: 0x064c, # ARABIC DAMMATAN
+        0x00f2: 0x064d, # ARABIC KASRATAN
+        0x00f3: 0x064e, # ARABIC FATHA
+        0x00f5: 0x064f, # ARABIC DAMMA
+        0x00f6: 0x0650, # ARABIC KASRA
+        0x00f8: 0x0651, # ARABIC SHADDA
+        0x00fa: 0x0652, # ARABIC SUKUN
+        0x00fd: 0x200e, # LEFT-TO-RIGHT MARK
+        0x00fe: 0x200f, # RIGHT-TO-LEFT MARK
+        0x00ff: 0x06d2, # ARABIC LETTER YEH BARREE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1257.py b/depot_tools/release/win/python_24/Lib/encodings/cp1257.py
new file mode 100644
index 0000000..1a21850
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1257.py
@@ -0,0 +1,131 @@
+""" Python Character Mapping Codec generated from 'CP1257.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: None,   # UNDEFINED
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: None,   # UNDEFINED
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: None,   # UNDEFINED
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: None,   # UNDEFINED
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: None,   # UNDEFINED
+        0x008d: 0x00a8, # DIAERESIS
+        0x008e: 0x02c7, # CARON
+        0x008f: 0x00b8, # CEDILLA
+        0x0090: None,   # UNDEFINED
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: None,   # UNDEFINED
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: None,   # UNDEFINED
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: None,   # UNDEFINED
+        0x009d: 0x00af, # MACRON
+        0x009e: 0x02db, # OGONEK
+        0x009f: None,   # UNDEFINED
+        0x00a1: None,   # UNDEFINED
+        0x00a5: None,   # UNDEFINED
+        0x00a8: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x00aa: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
+        0x00af: 0x00c6, # LATIN CAPITAL LETTER AE
+        0x00b8: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x00ba: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
+        0x00bf: 0x00e6, # LATIN SMALL LETTER AE
+        0x00c0: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
+        0x00c1: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
+        0x00c2: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
+        0x00c3: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
+        0x00c6: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
+        0x00c7: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
+        0x00c8: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
+        0x00ca: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
+        0x00cb: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
+        0x00cc: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
+        0x00cd: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
+        0x00ce: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
+        0x00cf: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
+        0x00d0: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
+        0x00d1: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
+        0x00d2: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
+        0x00d4: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
+        0x00d8: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
+        0x00d9: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
+        0x00da: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
+        0x00db: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
+        0x00dd: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00de: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
+        0x00e0: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
+        0x00e1: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
+        0x00e2: 0x0101, # LATIN SMALL LETTER A WITH MACRON
+        0x00e3: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
+        0x00e6: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
+        0x00e7: 0x0113, # LATIN SMALL LETTER E WITH MACRON
+        0x00e8: 0x010d, # LATIN SMALL LETTER C WITH CARON
+        0x00ea: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
+        0x00eb: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
+        0x00ec: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
+        0x00ed: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
+        0x00ee: 0x012b, # LATIN SMALL LETTER I WITH MACRON
+        0x00ef: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
+        0x00f0: 0x0161, # LATIN SMALL LETTER S WITH CARON
+        0x00f1: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
+        0x00f2: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
+        0x00f4: 0x014d, # LATIN SMALL LETTER O WITH MACRON
+        0x00f8: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
+        0x00f9: 0x0142, # LATIN SMALL LETTER L WITH STROKE
+        0x00fa: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
+        0x00fb: 0x016b, # LATIN SMALL LETTER U WITH MACRON
+        0x00fd: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00fe: 0x017e, # LATIN SMALL LETTER Z WITH CARON
+        0x00ff: 0x02d9, # DOT ABOVE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp1258.py b/depot_tools/release/win/python_24/Lib/encodings/cp1258.py
new file mode 100644
index 0000000..03a6d3b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp1258.py
@@ -0,0 +1,90 @@
+""" Python Character Mapping Codec generated from 'CP1258.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: None,   # UNDEFINED
+        0x0082: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x0083: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, # DAGGER
+        0x0087: 0x2021, # DOUBLE DAGGER
+        0x0088: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x0089: 0x2030, # PER MILLE SIGN
+        0x008a: None,   # UNDEFINED
+        0x008b: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: 0x0152, # LATIN CAPITAL LIGATURE OE
+        0x008d: None,   # UNDEFINED
+        0x008e: None,   # UNDEFINED
+        0x008f: None,   # UNDEFINED
+        0x0090: None,   # UNDEFINED
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: 0x02dc, # SMALL TILDE
+        0x0099: 0x2122, # TRADE MARK SIGN
+        0x009a: None,   # UNDEFINED
+        0x009b: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x009c: 0x0153, # LATIN SMALL LIGATURE OE
+        0x009d: None,   # UNDEFINED
+        0x009e: None,   # UNDEFINED
+        0x009f: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
+        0x00c3: 0x0102, # LATIN CAPITAL LETTER A WITH BREVE
+        0x00cc: 0x0300, # COMBINING GRAVE ACCENT
+        0x00d0: 0x0110, # LATIN CAPITAL LETTER D WITH STROKE
+        0x00d2: 0x0309, # COMBINING HOOK ABOVE
+        0x00d5: 0x01a0, # LATIN CAPITAL LETTER O WITH HORN
+        0x00dd: 0x01af, # LATIN CAPITAL LETTER U WITH HORN
+        0x00de: 0x0303, # COMBINING TILDE
+        0x00e3: 0x0103, # LATIN SMALL LETTER A WITH BREVE
+        0x00ec: 0x0301, # COMBINING ACUTE ACCENT
+        0x00f0: 0x0111, # LATIN SMALL LETTER D WITH STROKE
+        0x00f2: 0x0323, # COMBINING DOT BELOW
+        0x00f5: 0x01a1, # LATIN SMALL LETTER O WITH HORN
+        0x00fd: 0x01b0, # LATIN SMALL LETTER U WITH HORN
+        0x00fe: 0x20ab, # DONG SIGN
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp424.py b/depot_tools/release/win/python_24/Lib/encodings/cp424.py
new file mode 100644
index 0000000..6a532333
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp424.py
@@ -0,0 +1,280 @@
+""" Python Character Mapping Codec generated from 'CP424.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0004: 0x009c, # SELECT
+        0x0005: 0x0009, # HORIZONTAL TABULATION
+        0x0006: 0x0086, # REQUIRED NEW LINE
+        0x0007: 0x007f, # DELETE
+        0x0008: 0x0097, # GRAPHIC ESCAPE
+        0x0009: 0x008d, # SUPERSCRIPT
+        0x000a: 0x008e, # REPEAT
+        0x0014: 0x009d, # RESTORE/ENABLE PRESENTATION
+        0x0015: 0x0085, # NEW LINE
+        0x0016: 0x0008, # BACKSPACE
+        0x0017: 0x0087, # PROGRAM OPERATOR COMMUNICATION
+        0x001a: 0x0092, # UNIT BACK SPACE
+        0x001b: 0x008f, # CUSTOMER USE ONE
+        0x0020: 0x0080, # DIGIT SELECT
+        0x0021: 0x0081, # START OF SIGNIFICANCE
+        0x0022: 0x0082, # FIELD SEPARATOR
+        0x0023: 0x0083, # WORD UNDERSCORE
+        0x0024: 0x0084, # BYPASS OR INHIBIT PRESENTATION
+        0x0025: 0x000a, # LINE FEED
+        0x0026: 0x0017, # END OF TRANSMISSION BLOCK
+        0x0027: 0x001b, # ESCAPE
+        0x0028: 0x0088, # SET ATTRIBUTE
+        0x0029: 0x0089, # START FIELD EXTENDED
+        0x002a: 0x008a, # SET MODE OR SWITCH
+        0x002b: 0x008b, # CONTROL SEQUENCE PREFIX
+        0x002c: 0x008c, # MODIFY FIELD ATTRIBUTE
+        0x002d: 0x0005, # ENQUIRY
+        0x002e: 0x0006, # ACKNOWLEDGE
+        0x002f: 0x0007, # BELL
+        0x0030: 0x0090, # <reserved>
+        0x0031: 0x0091, # <reserved>
+        0x0032: 0x0016, # SYNCHRONOUS IDLE
+        0x0033: 0x0093, # INDEX RETURN
+        0x0034: 0x0094, # PRESENTATION POSITION
+        0x0035: 0x0095, # TRANSPARENT
+        0x0036: 0x0096, # NUMERIC BACKSPACE
+        0x0037: 0x0004, # END OF TRANSMISSION
+        0x0038: 0x0098, # SUBSCRIPT
+        0x0039: 0x0099, # INDENT TABULATION
+        0x003a: 0x009a, # REVERSE FORM FEED
+        0x003b: 0x009b, # CUSTOMER USE THREE
+        0x003c: 0x0014, # DEVICE CONTROL FOUR
+        0x003d: 0x0015, # NEGATIVE ACKNOWLEDGE
+        0x003e: 0x009e, # <reserved>
+        0x003f: 0x001a, # SUBSTITUTE
+        0x0040: 0x0020, # SPACE
+        0x0041: 0x05d0, # HEBREW LETTER ALEF
+        0x0042: 0x05d1, # HEBREW LETTER BET
+        0x0043: 0x05d2, # HEBREW LETTER GIMEL
+        0x0044: 0x05d3, # HEBREW LETTER DALET
+        0x0045: 0x05d4, # HEBREW LETTER HE
+        0x0046: 0x05d5, # HEBREW LETTER VAV
+        0x0047: 0x05d6, # HEBREW LETTER ZAYIN
+        0x0048: 0x05d7, # HEBREW LETTER HET
+        0x0049: 0x05d8, # HEBREW LETTER TET
+        0x004a: 0x00a2, # CENT SIGN
+        0x004b: 0x002e, # FULL STOP
+        0x004c: 0x003c, # LESS-THAN SIGN
+        0x004d: 0x0028, # LEFT PARENTHESIS
+        0x004e: 0x002b, # PLUS SIGN
+        0x004f: 0x007c, # VERTICAL LINE
+        0x0050: 0x0026, # AMPERSAND
+        0x0051: 0x05d9, # HEBREW LETTER YOD
+        0x0052: 0x05da, # HEBREW LETTER FINAL KAF
+        0x0053: 0x05db, # HEBREW LETTER KAF
+        0x0054: 0x05dc, # HEBREW LETTER LAMED
+        0x0055: 0x05dd, # HEBREW LETTER FINAL MEM
+        0x0056: 0x05de, # HEBREW LETTER MEM
+        0x0057: 0x05df, # HEBREW LETTER FINAL NUN
+        0x0058: 0x05e0, # HEBREW LETTER NUN
+        0x0059: 0x05e1, # HEBREW LETTER SAMEKH
+        0x005a: 0x0021, # EXCLAMATION MARK
+        0x005b: 0x0024, # DOLLAR SIGN
+        0x005c: 0x002a, # ASTERISK
+        0x005d: 0x0029, # RIGHT PARENTHESIS
+        0x005e: 0x003b, # SEMICOLON
+        0x005f: 0x00ac, # NOT SIGN
+        0x0060: 0x002d, # HYPHEN-MINUS
+        0x0061: 0x002f, # SOLIDUS
+        0x0062: 0x05e2, # HEBREW LETTER AYIN
+        0x0063: 0x05e3, # HEBREW LETTER FINAL PE
+        0x0064: 0x05e4, # HEBREW LETTER PE
+        0x0065: 0x05e5, # HEBREW LETTER FINAL TSADI
+        0x0066: 0x05e6, # HEBREW LETTER TSADI
+        0x0067: 0x05e7, # HEBREW LETTER QOF
+        0x0068: 0x05e8, # HEBREW LETTER RESH
+        0x0069: 0x05e9, # HEBREW LETTER SHIN
+        0x006a: 0x00a6, # BROKEN BAR
+        0x006b: 0x002c, # COMMA
+        0x006c: 0x0025, # PERCENT SIGN
+        0x006d: 0x005f, # LOW LINE
+        0x006e: 0x003e, # GREATER-THAN SIGN
+        0x006f: 0x003f, # QUESTION MARK
+        0x0070: None,   # UNDEFINED
+        0x0071: 0x05ea, # HEBREW LETTER TAV
+        0x0072: None,   # UNDEFINED
+        0x0073: None,   # UNDEFINED
+        0x0074: 0x00a0, # NO-BREAK SPACE
+        0x0075: None,   # UNDEFINED
+        0x0076: None,   # UNDEFINED
+        0x0077: None,   # UNDEFINED
+        0x0078: 0x2017, # DOUBLE LOW LINE
+        0x0079: 0x0060, # GRAVE ACCENT
+        0x007a: 0x003a, # COLON
+        0x007b: 0x0023, # NUMBER SIGN
+        0x007c: 0x0040, # COMMERCIAL AT
+        0x007d: 0x0027, # APOSTROPHE
+        0x007e: 0x003d, # EQUALS SIGN
+        0x007f: 0x0022, # QUOTATION MARK
+        0x0080: None,   # UNDEFINED
+        0x0081: 0x0061, # LATIN SMALL LETTER A
+        0x0082: 0x0062, # LATIN SMALL LETTER B
+        0x0083: 0x0063, # LATIN SMALL LETTER C
+        0x0084: 0x0064, # LATIN SMALL LETTER D
+        0x0085: 0x0065, # LATIN SMALL LETTER E
+        0x0086: 0x0066, # LATIN SMALL LETTER F
+        0x0087: 0x0067, # LATIN SMALL LETTER G
+        0x0088: 0x0068, # LATIN SMALL LETTER H
+        0x0089: 0x0069, # LATIN SMALL LETTER I
+        0x008a: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x008b: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x008c: None,   # UNDEFINED
+        0x008d: None,   # UNDEFINED
+        0x008e: None,   # UNDEFINED
+        0x008f: 0x00b1, # PLUS-MINUS SIGN
+        0x0090: 0x00b0, # DEGREE SIGN
+        0x0091: 0x006a, # LATIN SMALL LETTER J
+        0x0092: 0x006b, # LATIN SMALL LETTER K
+        0x0093: 0x006c, # LATIN SMALL LETTER L
+        0x0094: 0x006d, # LATIN SMALL LETTER M
+        0x0095: 0x006e, # LATIN SMALL LETTER N
+        0x0096: 0x006f, # LATIN SMALL LETTER O
+        0x0097: 0x0070, # LATIN SMALL LETTER P
+        0x0098: 0x0071, # LATIN SMALL LETTER Q
+        0x0099: 0x0072, # LATIN SMALL LETTER R
+        0x009a: None,   # UNDEFINED
+        0x009b: None,   # UNDEFINED
+        0x009c: None,   # UNDEFINED
+        0x009d: 0x00b8, # CEDILLA
+        0x009e: None,   # UNDEFINED
+        0x009f: 0x00a4, # CURRENCY SIGN
+        0x00a0: 0x00b5, # MICRO SIGN
+        0x00a1: 0x007e, # TILDE
+        0x00a2: 0x0073, # LATIN SMALL LETTER S
+        0x00a3: 0x0074, # LATIN SMALL LETTER T
+        0x00a4: 0x0075, # LATIN SMALL LETTER U
+        0x00a5: 0x0076, # LATIN SMALL LETTER V
+        0x00a6: 0x0077, # LATIN SMALL LETTER W
+        0x00a7: 0x0078, # LATIN SMALL LETTER X
+        0x00a8: 0x0079, # LATIN SMALL LETTER Y
+        0x00a9: 0x007a, # LATIN SMALL LETTER Z
+        0x00aa: None,   # UNDEFINED
+        0x00ab: None,   # UNDEFINED
+        0x00ac: None,   # UNDEFINED
+        0x00ad: None,   # UNDEFINED
+        0x00ae: None,   # UNDEFINED
+        0x00af: 0x00ae, # REGISTERED SIGN
+        0x00b0: 0x005e, # CIRCUMFLEX ACCENT
+        0x00b1: 0x00a3, # POUND SIGN
+        0x00b2: 0x00a5, # YEN SIGN
+        0x00b3: 0x00b7, # MIDDLE DOT
+        0x00b4: 0x00a9, # COPYRIGHT SIGN
+        0x00b5: 0x00a7, # SECTION SIGN
+        0x00b7: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00b8: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00b9: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00ba: 0x005b, # LEFT SQUARE BRACKET
+        0x00bb: 0x005d, # RIGHT SQUARE BRACKET
+        0x00bc: 0x00af, # MACRON
+        0x00bd: 0x00a8, # DIAERESIS
+        0x00be: 0x00b4, # ACUTE ACCENT
+        0x00bf: 0x00d7, # MULTIPLICATION SIGN
+        0x00c0: 0x007b, # LEFT CURLY BRACKET
+        0x00c1: 0x0041, # LATIN CAPITAL LETTER A
+        0x00c2: 0x0042, # LATIN CAPITAL LETTER B
+        0x00c3: 0x0043, # LATIN CAPITAL LETTER C
+        0x00c4: 0x0044, # LATIN CAPITAL LETTER D
+        0x00c5: 0x0045, # LATIN CAPITAL LETTER E
+        0x00c6: 0x0046, # LATIN CAPITAL LETTER F
+        0x00c7: 0x0047, # LATIN CAPITAL LETTER G
+        0x00c8: 0x0048, # LATIN CAPITAL LETTER H
+        0x00c9: 0x0049, # LATIN CAPITAL LETTER I
+        0x00ca: 0x00ad, # SOFT HYPHEN
+        0x00cb: None,   # UNDEFINED
+        0x00cc: None,   # UNDEFINED
+        0x00cd: None,   # UNDEFINED
+        0x00ce: None,   # UNDEFINED
+        0x00cf: None,   # UNDEFINED
+        0x00d0: 0x007d, # RIGHT CURLY BRACKET
+        0x00d1: 0x004a, # LATIN CAPITAL LETTER J
+        0x00d2: 0x004b, # LATIN CAPITAL LETTER K
+        0x00d3: 0x004c, # LATIN CAPITAL LETTER L
+        0x00d4: 0x004d, # LATIN CAPITAL LETTER M
+        0x00d5: 0x004e, # LATIN CAPITAL LETTER N
+        0x00d6: 0x004f, # LATIN CAPITAL LETTER O
+        0x00d7: 0x0050, # LATIN CAPITAL LETTER P
+        0x00d8: 0x0051, # LATIN CAPITAL LETTER Q
+        0x00d9: 0x0052, # LATIN CAPITAL LETTER R
+        0x00da: 0x00b9, # SUPERSCRIPT ONE
+        0x00db: None,   # UNDEFINED
+        0x00dc: None,   # UNDEFINED
+        0x00dd: None,   # UNDEFINED
+        0x00de: None,   # UNDEFINED
+        0x00df: None,   # UNDEFINED
+        0x00e0: 0x005c, # REVERSE SOLIDUS
+        0x00e1: 0x00f7, # DIVISION SIGN
+        0x00e2: 0x0053, # LATIN CAPITAL LETTER S
+        0x00e3: 0x0054, # LATIN CAPITAL LETTER T
+        0x00e4: 0x0055, # LATIN CAPITAL LETTER U
+        0x00e5: 0x0056, # LATIN CAPITAL LETTER V
+        0x00e6: 0x0057, # LATIN CAPITAL LETTER W
+        0x00e7: 0x0058, # LATIN CAPITAL LETTER X
+        0x00e8: 0x0059, # LATIN CAPITAL LETTER Y
+        0x00e9: 0x005a, # LATIN CAPITAL LETTER Z
+        0x00ea: 0x00b2, # SUPERSCRIPT TWO
+        0x00eb: None,   # UNDEFINED
+        0x00ec: None,   # UNDEFINED
+        0x00ed: None,   # UNDEFINED
+        0x00ee: None,   # UNDEFINED
+        0x00ef: None,   # UNDEFINED
+        0x00f0: 0x0030, # DIGIT ZERO
+        0x00f1: 0x0031, # DIGIT ONE
+        0x00f2: 0x0032, # DIGIT TWO
+        0x00f3: 0x0033, # DIGIT THREE
+        0x00f4: 0x0034, # DIGIT FOUR
+        0x00f5: 0x0035, # DIGIT FIVE
+        0x00f6: 0x0036, # DIGIT SIX
+        0x00f7: 0x0037, # DIGIT SEVEN
+        0x00f8: 0x0038, # DIGIT EIGHT
+        0x00f9: 0x0039, # DIGIT NINE
+        0x00fa: 0x00b3, # SUPERSCRIPT THREE
+        0x00fb: None,   # UNDEFINED
+        0x00fc: None,   # UNDEFINED
+        0x00fd: None,   # UNDEFINED
+        0x00fe: None,   # UNDEFINED
+        0x00ff: 0x009f, # EIGHT ONES
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp437.py b/depot_tools/release/win/python_24/Lib/encodings/cp437.py
new file mode 100644
index 0000000..bfe218f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp437.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP437.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x00a2, # CENT SIGN
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00a5, # YEN SIGN
+        0x009e: 0x20a7, # PESETA SIGN
+        0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00a8: 0x00bf, # INVERTED QUESTION MARK
+        0x00a9: 0x2310, # REVERSED NOT SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00e3: 0x03c0, # GREEK SMALL LETTER PI
+        0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00ec: 0x221e, # INFINITY
+        0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00ef: 0x2229, # INTERSECTION
+        0x00f0: 0x2261, # IDENTICAL TO
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00f4: 0x2320, # TOP HALF INTEGRAL
+        0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x2248, # ALMOST EQUAL TO
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x221a, # SQUARE ROOT
+        0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp500.py b/depot_tools/release/win/python_24/Lib/encodings/cp500.py
new file mode 100644
index 0000000..bc3474f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp500.py
@@ -0,0 +1,280 @@
+""" Python Character Mapping Codec generated from 'CP500.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0004: 0x009c, # CONTROL
+        0x0005: 0x0009, # HORIZONTAL TABULATION
+        0x0006: 0x0086, # CONTROL
+        0x0007: 0x007f, # DELETE
+        0x0008: 0x0097, # CONTROL
+        0x0009: 0x008d, # CONTROL
+        0x000a: 0x008e, # CONTROL
+        0x0014: 0x009d, # CONTROL
+        0x0015: 0x0085, # CONTROL
+        0x0016: 0x0008, # BACKSPACE
+        0x0017: 0x0087, # CONTROL
+        0x001a: 0x0092, # CONTROL
+        0x001b: 0x008f, # CONTROL
+        0x0020: 0x0080, # CONTROL
+        0x0021: 0x0081, # CONTROL
+        0x0022: 0x0082, # CONTROL
+        0x0023: 0x0083, # CONTROL
+        0x0024: 0x0084, # CONTROL
+        0x0025: 0x000a, # LINE FEED
+        0x0026: 0x0017, # END OF TRANSMISSION BLOCK
+        0x0027: 0x001b, # ESCAPE
+        0x0028: 0x0088, # CONTROL
+        0x0029: 0x0089, # CONTROL
+        0x002a: 0x008a, # CONTROL
+        0x002b: 0x008b, # CONTROL
+        0x002c: 0x008c, # CONTROL
+        0x002d: 0x0005, # ENQUIRY
+        0x002e: 0x0006, # ACKNOWLEDGE
+        0x002f: 0x0007, # BELL
+        0x0030: 0x0090, # CONTROL
+        0x0031: 0x0091, # CONTROL
+        0x0032: 0x0016, # SYNCHRONOUS IDLE
+        0x0033: 0x0093, # CONTROL
+        0x0034: 0x0094, # CONTROL
+        0x0035: 0x0095, # CONTROL
+        0x0036: 0x0096, # CONTROL
+        0x0037: 0x0004, # END OF TRANSMISSION
+        0x0038: 0x0098, # CONTROL
+        0x0039: 0x0099, # CONTROL
+        0x003a: 0x009a, # CONTROL
+        0x003b: 0x009b, # CONTROL
+        0x003c: 0x0014, # DEVICE CONTROL FOUR
+        0x003d: 0x0015, # NEGATIVE ACKNOWLEDGE
+        0x003e: 0x009e, # CONTROL
+        0x003f: 0x001a, # SUBSTITUTE
+        0x0040: 0x0020, # SPACE
+        0x0041: 0x00a0, # NO-BREAK SPACE
+        0x0042: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0043: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0044: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0045: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x0046: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x0047: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0048: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0049: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x004a: 0x005b, # LEFT SQUARE BRACKET
+        0x004b: 0x002e, # FULL STOP
+        0x004c: 0x003c, # LESS-THAN SIGN
+        0x004d: 0x0028, # LEFT PARENTHESIS
+        0x004e: 0x002b, # PLUS SIGN
+        0x004f: 0x0021, # EXCLAMATION MARK
+        0x0050: 0x0026, # AMPERSAND
+        0x0051: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0052: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0053: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x0054: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x0055: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x0056: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x0057: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x0058: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x0059: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN)
+        0x005a: 0x005d, # RIGHT SQUARE BRACKET
+        0x005b: 0x0024, # DOLLAR SIGN
+        0x005c: 0x002a, # ASTERISK
+        0x005d: 0x0029, # RIGHT PARENTHESIS
+        0x005e: 0x003b, # SEMICOLON
+        0x005f: 0x005e, # CIRCUMFLEX ACCENT
+        0x0060: 0x002d, # HYPHEN-MINUS
+        0x0061: 0x002f, # SOLIDUS
+        0x0062: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x0063: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x0064: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x0065: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x0066: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x0067: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0068: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0069: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x006a: 0x00a6, # BROKEN BAR
+        0x006b: 0x002c, # COMMA
+        0x006c: 0x0025, # PERCENT SIGN
+        0x006d: 0x005f, # LOW LINE
+        0x006e: 0x003e, # GREATER-THAN SIGN
+        0x006f: 0x003f, # QUESTION MARK
+        0x0070: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x0071: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0072: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x0073: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x0074: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x0075: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x0076: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x0077: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x0078: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x0079: 0x0060, # GRAVE ACCENT
+        0x007a: 0x003a, # COLON
+        0x007b: 0x0023, # NUMBER SIGN
+        0x007c: 0x0040, # COMMERCIAL AT
+        0x007d: 0x0027, # APOSTROPHE
+        0x007e: 0x003d, # EQUALS SIGN
+        0x007f: 0x0022, # QUOTATION MARK
+        0x0080: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x0081: 0x0061, # LATIN SMALL LETTER A
+        0x0082: 0x0062, # LATIN SMALL LETTER B
+        0x0083: 0x0063, # LATIN SMALL LETTER C
+        0x0084: 0x0064, # LATIN SMALL LETTER D
+        0x0085: 0x0065, # LATIN SMALL LETTER E
+        0x0086: 0x0066, # LATIN SMALL LETTER F
+        0x0087: 0x0067, # LATIN SMALL LETTER G
+        0x0088: 0x0068, # LATIN SMALL LETTER H
+        0x0089: 0x0069, # LATIN SMALL LETTER I
+        0x008a: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x008b: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x008c: 0x00f0, # LATIN SMALL LETTER ETH (ICELANDIC)
+        0x008d: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
+        0x008e: 0x00fe, # LATIN SMALL LETTER THORN (ICELANDIC)
+        0x008f: 0x00b1, # PLUS-MINUS SIGN
+        0x0090: 0x00b0, # DEGREE SIGN
+        0x0091: 0x006a, # LATIN SMALL LETTER J
+        0x0092: 0x006b, # LATIN SMALL LETTER K
+        0x0093: 0x006c, # LATIN SMALL LETTER L
+        0x0094: 0x006d, # LATIN SMALL LETTER M
+        0x0095: 0x006e, # LATIN SMALL LETTER N
+        0x0096: 0x006f, # LATIN SMALL LETTER O
+        0x0097: 0x0070, # LATIN SMALL LETTER P
+        0x0098: 0x0071, # LATIN SMALL LETTER Q
+        0x0099: 0x0072, # LATIN SMALL LETTER R
+        0x009a: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x009b: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x009c: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x009d: 0x00b8, # CEDILLA
+        0x009e: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x009f: 0x00a4, # CURRENCY SIGN
+        0x00a0: 0x00b5, # MICRO SIGN
+        0x00a1: 0x007e, # TILDE
+        0x00a2: 0x0073, # LATIN SMALL LETTER S
+        0x00a3: 0x0074, # LATIN SMALL LETTER T
+        0x00a4: 0x0075, # LATIN SMALL LETTER U
+        0x00a5: 0x0076, # LATIN SMALL LETTER V
+        0x00a6: 0x0077, # LATIN SMALL LETTER W
+        0x00a7: 0x0078, # LATIN SMALL LETTER X
+        0x00a8: 0x0079, # LATIN SMALL LETTER Y
+        0x00a9: 0x007a, # LATIN SMALL LETTER Z
+        0x00aa: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ab: 0x00bf, # INVERTED QUESTION MARK
+        0x00ac: 0x00d0, # LATIN CAPITAL LETTER ETH (ICELANDIC)
+        0x00ad: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
+        0x00ae: 0x00de, # LATIN CAPITAL LETTER THORN (ICELANDIC)
+        0x00af: 0x00ae, # REGISTERED SIGN
+        0x00b0: 0x00a2, # CENT SIGN
+        0x00b1: 0x00a3, # POUND SIGN
+        0x00b2: 0x00a5, # YEN SIGN
+        0x00b3: 0x00b7, # MIDDLE DOT
+        0x00b4: 0x00a9, # COPYRIGHT SIGN
+        0x00b5: 0x00a7, # SECTION SIGN
+        0x00b7: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00b8: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00b9: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00ba: 0x00ac, # NOT SIGN
+        0x00bb: 0x007c, # VERTICAL LINE
+        0x00bc: 0x00af, # MACRON
+        0x00bd: 0x00a8, # DIAERESIS
+        0x00be: 0x00b4, # ACUTE ACCENT
+        0x00bf: 0x00d7, # MULTIPLICATION SIGN
+        0x00c0: 0x007b, # LEFT CURLY BRACKET
+        0x00c1: 0x0041, # LATIN CAPITAL LETTER A
+        0x00c2: 0x0042, # LATIN CAPITAL LETTER B
+        0x00c3: 0x0043, # LATIN CAPITAL LETTER C
+        0x00c4: 0x0044, # LATIN CAPITAL LETTER D
+        0x00c5: 0x0045, # LATIN CAPITAL LETTER E
+        0x00c6: 0x0046, # LATIN CAPITAL LETTER F
+        0x00c7: 0x0047, # LATIN CAPITAL LETTER G
+        0x00c8: 0x0048, # LATIN CAPITAL LETTER H
+        0x00c9: 0x0049, # LATIN CAPITAL LETTER I
+        0x00ca: 0x00ad, # SOFT HYPHEN
+        0x00cb: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x00cc: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x00cd: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x00ce: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00cf: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x00d0: 0x007d, # RIGHT CURLY BRACKET
+        0x00d1: 0x004a, # LATIN CAPITAL LETTER J
+        0x00d2: 0x004b, # LATIN CAPITAL LETTER K
+        0x00d3: 0x004c, # LATIN CAPITAL LETTER L
+        0x00d4: 0x004d, # LATIN CAPITAL LETTER M
+        0x00d5: 0x004e, # LATIN CAPITAL LETTER N
+        0x00d6: 0x004f, # LATIN CAPITAL LETTER O
+        0x00d7: 0x0050, # LATIN CAPITAL LETTER P
+        0x00d8: 0x0051, # LATIN CAPITAL LETTER Q
+        0x00d9: 0x0052, # LATIN CAPITAL LETTER R
+        0x00da: 0x00b9, # SUPERSCRIPT ONE
+        0x00db: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x00dc: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x00dd: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x00de: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00df: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x00e0: 0x005c, # REVERSE SOLIDUS
+        0x00e1: 0x00f7, # DIVISION SIGN
+        0x00e2: 0x0053, # LATIN CAPITAL LETTER S
+        0x00e3: 0x0054, # LATIN CAPITAL LETTER T
+        0x00e4: 0x0055, # LATIN CAPITAL LETTER U
+        0x00e5: 0x0056, # LATIN CAPITAL LETTER V
+        0x00e6: 0x0057, # LATIN CAPITAL LETTER W
+        0x00e7: 0x0058, # LATIN CAPITAL LETTER X
+        0x00e8: 0x0059, # LATIN CAPITAL LETTER Y
+        0x00e9: 0x005a, # LATIN CAPITAL LETTER Z
+        0x00ea: 0x00b2, # SUPERSCRIPT TWO
+        0x00eb: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00ec: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x00ed: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00ef: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00f0: 0x0030, # DIGIT ZERO
+        0x00f1: 0x0031, # DIGIT ONE
+        0x00f2: 0x0032, # DIGIT TWO
+        0x00f3: 0x0033, # DIGIT THREE
+        0x00f4: 0x0034, # DIGIT FOUR
+        0x00f5: 0x0035, # DIGIT FIVE
+        0x00f6: 0x0036, # DIGIT SIX
+        0x00f7: 0x0037, # DIGIT SEVEN
+        0x00f8: 0x0038, # DIGIT EIGHT
+        0x00f9: 0x0039, # DIGIT NINE
+        0x00fa: 0x00b3, # SUPERSCRIPT THREE
+        0x00fb: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00fc: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x00fd: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x00fe: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00ff: 0x009f, # CONTROL
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp737.py b/depot_tools/release/win/python_24/Lib/encodings/cp737.py
new file mode 100644
index 0000000..a4729b1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp737.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP737.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x0391, # GREEK CAPITAL LETTER ALPHA
+        0x0081: 0x0392, # GREEK CAPITAL LETTER BETA
+        0x0082: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x0083: 0x0394, # GREEK CAPITAL LETTER DELTA
+        0x0084: 0x0395, # GREEK CAPITAL LETTER EPSILON
+        0x0085: 0x0396, # GREEK CAPITAL LETTER ZETA
+        0x0086: 0x0397, # GREEK CAPITAL LETTER ETA
+        0x0087: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x0088: 0x0399, # GREEK CAPITAL LETTER IOTA
+        0x0089: 0x039a, # GREEK CAPITAL LETTER KAPPA
+        0x008a: 0x039b, # GREEK CAPITAL LETTER LAMDA
+        0x008b: 0x039c, # GREEK CAPITAL LETTER MU
+        0x008c: 0x039d, # GREEK CAPITAL LETTER NU
+        0x008d: 0x039e, # GREEK CAPITAL LETTER XI
+        0x008e: 0x039f, # GREEK CAPITAL LETTER OMICRON
+        0x008f: 0x03a0, # GREEK CAPITAL LETTER PI
+        0x0090: 0x03a1, # GREEK CAPITAL LETTER RHO
+        0x0091: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x0092: 0x03a4, # GREEK CAPITAL LETTER TAU
+        0x0093: 0x03a5, # GREEK CAPITAL LETTER UPSILON
+        0x0094: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x0095: 0x03a7, # GREEK CAPITAL LETTER CHI
+        0x0096: 0x03a8, # GREEK CAPITAL LETTER PSI
+        0x0097: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x0098: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x0099: 0x03b2, # GREEK SMALL LETTER BETA
+        0x009a: 0x03b3, # GREEK SMALL LETTER GAMMA
+        0x009b: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x009c: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x009d: 0x03b6, # GREEK SMALL LETTER ZETA
+        0x009e: 0x03b7, # GREEK SMALL LETTER ETA
+        0x009f: 0x03b8, # GREEK SMALL LETTER THETA
+        0x00a0: 0x03b9, # GREEK SMALL LETTER IOTA
+        0x00a1: 0x03ba, # GREEK SMALL LETTER KAPPA
+        0x00a2: 0x03bb, # GREEK SMALL LETTER LAMDA
+        0x00a3: 0x03bc, # GREEK SMALL LETTER MU
+        0x00a4: 0x03bd, # GREEK SMALL LETTER NU
+        0x00a5: 0x03be, # GREEK SMALL LETTER XI
+        0x00a6: 0x03bf, # GREEK SMALL LETTER OMICRON
+        0x00a7: 0x03c0, # GREEK SMALL LETTER PI
+        0x00a8: 0x03c1, # GREEK SMALL LETTER RHO
+        0x00a9: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00aa: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA
+        0x00ab: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00ac: 0x03c5, # GREEK SMALL LETTER UPSILON
+        0x00ad: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00ae: 0x03c7, # GREEK SMALL LETTER CHI
+        0x00af: 0x03c8, # GREEK SMALL LETTER PSI
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x03c9, # GREEK SMALL LETTER OMEGA
+        0x00e1: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS
+        0x00e2: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS
+        0x00e3: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS
+        0x00e4: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
+        0x00e5: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS
+        0x00e6: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS
+        0x00e7: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS
+        0x00e8: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
+        0x00e9: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS
+        0x00ea: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS
+        0x00eb: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS
+        0x00ec: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS
+        0x00ed: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS
+        0x00ee: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS
+        0x00ef: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS
+        0x00f0: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00f4: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
+        0x00f5: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x2248, # ALMOST EQUAL TO
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x221a, # SQUARE ROOT
+        0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp775.py b/depot_tools/release/win/python_24/Lib/encodings/cp775.py
new file mode 100644
index 0000000..d8cda72
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp775.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP775.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x0101, # LATIN SMALL LETTER A WITH MACRON
+        0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0085: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
+        0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0087: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
+        0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
+        0x0089: 0x0113, # LATIN SMALL LETTER E WITH MACRON
+        0x008a: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
+        0x008b: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
+        0x008c: 0x012b, # LATIN SMALL LETTER I WITH MACRON
+        0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
+        0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x0093: 0x014d, # LATIN SMALL LETTER O WITH MACRON
+        0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x0095: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
+        0x0096: 0x00a2, # CENT SIGN
+        0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
+        0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
+        0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x009e: 0x00d7, # MULTIPLICATION SIGN
+        0x009f: 0x00a4, # CURRENCY SIGN
+        0x00a0: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
+        0x00a1: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00a4: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00a5: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
+        0x00a6: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x00a7: 0x00a6, # BROKEN BAR
+        0x00a8: 0x00a9, # COPYRIGHT SIGN
+        0x00a9: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
+        0x00b6: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
+        0x00b7: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
+        0x00b8: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
+        0x00be: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
+        0x00c7: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
+        0x00d0: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
+        0x00d1: 0x010d, # LATIN SMALL LETTER C WITH CARON
+        0x00d2: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
+        0x00d3: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
+        0x00d4: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
+        0x00d5: 0x0161, # LATIN SMALL LETTER S WITH CARON
+        0x00d6: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
+        0x00d7: 0x016b, # LATIN SMALL LETTER U WITH MACRON
+        0x00d8: 0x017e, # LATIN SMALL LETTER Z WITH CARON
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN)
+        0x00e2: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
+        0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
+        0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
+        0x00e8: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
+        0x00e9: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
+        0x00ea: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
+        0x00eb: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
+        0x00ec: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
+        0x00ed: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
+        0x00ee: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
+        0x00ef: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x00f0: 0x00ad, # SOFT HYPHEN
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00f4: 0x00b6, # PILCROW SIGN
+        0x00f5: 0x00a7, # SECTION SIGN
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x00b9, # SUPERSCRIPT ONE
+        0x00fc: 0x00b3, # SUPERSCRIPT THREE
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp850.py b/depot_tools/release/win/python_24/Lib/encodings/cp850.py
new file mode 100644
index 0000000..ae098397
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp850.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP850.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x009e: 0x00d7, # MULTIPLICATION SIGN
+        0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00a8: 0x00bf, # INVERTED QUESTION MARK
+        0x00a9: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x00b7: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x00b8: 0x00a9, # COPYRIGHT SIGN
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x00a2, # CENT SIGN
+        0x00be: 0x00a5, # YEN SIGN
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x00c7: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x00a4, # CURRENCY SIGN
+        0x00d0: 0x00f0, # LATIN SMALL LETTER ETH
+        0x00d1: 0x00d0, # LATIN CAPITAL LETTER ETH
+        0x00d2: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x00d4: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x00d5: 0x0131, # LATIN SMALL LETTER DOTLESS I
+        0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x00d8: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x00a6, # BROKEN BAR
+        0x00de: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00e3: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: 0x00fe, # LATIN SMALL LETTER THORN
+        0x00e8: 0x00de, # LATIN CAPITAL LETTER THORN
+        0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00ea: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00eb: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
+        0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
+        0x00ee: 0x00af, # MACRON
+        0x00ef: 0x00b4, # ACUTE ACCENT
+        0x00f0: 0x00ad, # SOFT HYPHEN
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2017, # DOUBLE LOW LINE
+        0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00f4: 0x00b6, # PILCROW SIGN
+        0x00f5: 0x00a7, # SECTION SIGN
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x00b8, # CEDILLA
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x00a8, # DIAERESIS
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x00b9, # SUPERSCRIPT ONE
+        0x00fc: 0x00b3, # SUPERSCRIPT THREE
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp852.py b/depot_tools/release/win/python_24/Lib/encodings/cp852.py
new file mode 100644
index 0000000..dad5d29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp852.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP852.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0085: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
+        0x0086: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
+        0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0088: 0x0142, # LATIN SMALL LETTER L WITH STROKE
+        0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x008a: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
+        0x008b: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
+        0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x008d: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
+        0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x008f: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
+        0x0092: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
+        0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x0095: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
+        0x0096: 0x013e, # LATIN SMALL LETTER L WITH CARON
+        0x0097: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
+        0x0098: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
+        0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
+        0x009c: 0x0165, # LATIN SMALL LETTER T WITH CARON
+        0x009d: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
+        0x009e: 0x00d7, # MULTIPLICATION SIGN
+        0x009f: 0x010d, # LATIN SMALL LETTER C WITH CARON
+        0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
+        0x00a5: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
+        0x00a6: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
+        0x00a7: 0x017e, # LATIN SMALL LETTER Z WITH CARON
+        0x00a8: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
+        0x00a9: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
+        0x00ac: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
+        0x00ad: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x00b7: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
+        0x00b8: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00be: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x0102, # LATIN CAPITAL LETTER A WITH BREVE
+        0x00c7: 0x0103, # LATIN SMALL LETTER A WITH BREVE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x00a4, # CURRENCY SIGN
+        0x00d0: 0x0111, # LATIN SMALL LETTER D WITH STROKE
+        0x00d1: 0x0110, # LATIN CAPITAL LETTER D WITH STROKE
+        0x00d2: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
+        0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x00d4: 0x010f, # LATIN SMALL LETTER D WITH CARON
+        0x00d5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
+        0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x00d8: 0x011b, # LATIN SMALL LETTER E WITH CARON
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x0162, # LATIN CAPITAL LETTER T WITH CEDILLA
+        0x00de: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00e3: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
+        0x00e4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
+        0x00e5: 0x0148, # LATIN SMALL LETTER N WITH CARON
+        0x00e6: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
+        0x00e7: 0x0161, # LATIN SMALL LETTER S WITH CARON
+        0x00e8: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
+        0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00ea: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
+        0x00eb: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
+        0x00ec: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
+        0x00ed: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
+        0x00ee: 0x0163, # LATIN SMALL LETTER T WITH CEDILLA
+        0x00ef: 0x00b4, # ACUTE ACCENT
+        0x00f0: 0x00ad, # SOFT HYPHEN
+        0x00f1: 0x02dd, # DOUBLE ACUTE ACCENT
+        0x00f2: 0x02db, # OGONEK
+        0x00f3: 0x02c7, # CARON
+        0x00f4: 0x02d8, # BREVE
+        0x00f5: 0x00a7, # SECTION SIGN
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x00b8, # CEDILLA
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x00a8, # DIAERESIS
+        0x00fa: 0x02d9, # DOT ABOVE
+        0x00fb: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
+        0x00fc: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
+        0x00fd: 0x0159, # LATIN SMALL LETTER R WITH CARON
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp855.py b/depot_tools/release/win/python_24/Lib/encodings/cp855.py
new file mode 100644
index 0000000..d93c5e8e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp855.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP855.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x0452, # CYRILLIC SMALL LETTER DJE
+        0x0081: 0x0402, # CYRILLIC CAPITAL LETTER DJE
+        0x0082: 0x0453, # CYRILLIC SMALL LETTER GJE
+        0x0083: 0x0403, # CYRILLIC CAPITAL LETTER GJE
+        0x0084: 0x0451, # CYRILLIC SMALL LETTER IO
+        0x0085: 0x0401, # CYRILLIC CAPITAL LETTER IO
+        0x0086: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
+        0x0087: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
+        0x0088: 0x0455, # CYRILLIC SMALL LETTER DZE
+        0x0089: 0x0405, # CYRILLIC CAPITAL LETTER DZE
+        0x008a: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x008b: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x008c: 0x0457, # CYRILLIC SMALL LETTER YI
+        0x008d: 0x0407, # CYRILLIC CAPITAL LETTER YI
+        0x008e: 0x0458, # CYRILLIC SMALL LETTER JE
+        0x008f: 0x0408, # CYRILLIC CAPITAL LETTER JE
+        0x0090: 0x0459, # CYRILLIC SMALL LETTER LJE
+        0x0091: 0x0409, # CYRILLIC CAPITAL LETTER LJE
+        0x0092: 0x045a, # CYRILLIC SMALL LETTER NJE
+        0x0093: 0x040a, # CYRILLIC CAPITAL LETTER NJE
+        0x0094: 0x045b, # CYRILLIC SMALL LETTER TSHE
+        0x0095: 0x040b, # CYRILLIC CAPITAL LETTER TSHE
+        0x0096: 0x045c, # CYRILLIC SMALL LETTER KJE
+        0x0097: 0x040c, # CYRILLIC CAPITAL LETTER KJE
+        0x0098: 0x045e, # CYRILLIC SMALL LETTER SHORT U
+        0x0099: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
+        0x009a: 0x045f, # CYRILLIC SMALL LETTER DZHE
+        0x009b: 0x040f, # CYRILLIC CAPITAL LETTER DZHE
+        0x009c: 0x044e, # CYRILLIC SMALL LETTER YU
+        0x009d: 0x042e, # CYRILLIC CAPITAL LETTER YU
+        0x009e: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
+        0x009f: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
+        0x00a0: 0x0430, # CYRILLIC SMALL LETTER A
+        0x00a1: 0x0410, # CYRILLIC CAPITAL LETTER A
+        0x00a2: 0x0431, # CYRILLIC SMALL LETTER BE
+        0x00a3: 0x0411, # CYRILLIC CAPITAL LETTER BE
+        0x00a4: 0x0446, # CYRILLIC SMALL LETTER TSE
+        0x00a5: 0x0426, # CYRILLIC CAPITAL LETTER TSE
+        0x00a6: 0x0434, # CYRILLIC SMALL LETTER DE
+        0x00a7: 0x0414, # CYRILLIC CAPITAL LETTER DE
+        0x00a8: 0x0435, # CYRILLIC SMALL LETTER IE
+        0x00a9: 0x0415, # CYRILLIC CAPITAL LETTER IE
+        0x00aa: 0x0444, # CYRILLIC SMALL LETTER EF
+        0x00ab: 0x0424, # CYRILLIC CAPITAL LETTER EF
+        0x00ac: 0x0433, # CYRILLIC SMALL LETTER GHE
+        0x00ad: 0x0413, # CYRILLIC CAPITAL LETTER GHE
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x0445, # CYRILLIC SMALL LETTER HA
+        0x00b6: 0x0425, # CYRILLIC CAPITAL LETTER HA
+        0x00b7: 0x0438, # CYRILLIC SMALL LETTER I
+        0x00b8: 0x0418, # CYRILLIC CAPITAL LETTER I
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x0439, # CYRILLIC SMALL LETTER SHORT I
+        0x00be: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x043a, # CYRILLIC SMALL LETTER KA
+        0x00c7: 0x041a, # CYRILLIC CAPITAL LETTER KA
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x00a4, # CURRENCY SIGN
+        0x00d0: 0x043b, # CYRILLIC SMALL LETTER EL
+        0x00d1: 0x041b, # CYRILLIC CAPITAL LETTER EL
+        0x00d2: 0x043c, # CYRILLIC SMALL LETTER EM
+        0x00d3: 0x041c, # CYRILLIC CAPITAL LETTER EM
+        0x00d4: 0x043d, # CYRILLIC SMALL LETTER EN
+        0x00d5: 0x041d, # CYRILLIC CAPITAL LETTER EN
+        0x00d6: 0x043e, # CYRILLIC SMALL LETTER O
+        0x00d7: 0x041e, # CYRILLIC CAPITAL LETTER O
+        0x00d8: 0x043f, # CYRILLIC SMALL LETTER PE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x041f, # CYRILLIC CAPITAL LETTER PE
+        0x00de: 0x044f, # CYRILLIC SMALL LETTER YA
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x042f, # CYRILLIC CAPITAL LETTER YA
+        0x00e1: 0x0440, # CYRILLIC SMALL LETTER ER
+        0x00e2: 0x0420, # CYRILLIC CAPITAL LETTER ER
+        0x00e3: 0x0441, # CYRILLIC SMALL LETTER ES
+        0x00e4: 0x0421, # CYRILLIC CAPITAL LETTER ES
+        0x00e5: 0x0442, # CYRILLIC SMALL LETTER TE
+        0x00e6: 0x0422, # CYRILLIC CAPITAL LETTER TE
+        0x00e7: 0x0443, # CYRILLIC SMALL LETTER U
+        0x00e8: 0x0423, # CYRILLIC CAPITAL LETTER U
+        0x00e9: 0x0436, # CYRILLIC SMALL LETTER ZHE
+        0x00ea: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
+        0x00eb: 0x0432, # CYRILLIC SMALL LETTER VE
+        0x00ec: 0x0412, # CYRILLIC CAPITAL LETTER VE
+        0x00ed: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
+        0x00ee: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
+        0x00ef: 0x2116, # NUMERO SIGN
+        0x00f0: 0x00ad, # SOFT HYPHEN
+        0x00f1: 0x044b, # CYRILLIC SMALL LETTER YERU
+        0x00f2: 0x042b, # CYRILLIC CAPITAL LETTER YERU
+        0x00f3: 0x0437, # CYRILLIC SMALL LETTER ZE
+        0x00f4: 0x0417, # CYRILLIC CAPITAL LETTER ZE
+        0x00f5: 0x0448, # CYRILLIC SMALL LETTER SHA
+        0x00f6: 0x0428, # CYRILLIC CAPITAL LETTER SHA
+        0x00f7: 0x044d, # CYRILLIC SMALL LETTER E
+        0x00f8: 0x042d, # CYRILLIC CAPITAL LETTER E
+        0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
+        0x00fa: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
+        0x00fb: 0x0447, # CYRILLIC SMALL LETTER CHE
+        0x00fc: 0x0427, # CYRILLIC CAPITAL LETTER CHE
+        0x00fd: 0x00a7, # SECTION SIGN
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp856.py b/depot_tools/release/win/python_24/Lib/encodings/cp856.py
new file mode 100644
index 0000000..1bf67f0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp856.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP856.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x05d0, # HEBREW LETTER ALEF
+        0x0081: 0x05d1, # HEBREW LETTER BET
+        0x0082: 0x05d2, # HEBREW LETTER GIMEL
+        0x0083: 0x05d3, # HEBREW LETTER DALET
+        0x0084: 0x05d4, # HEBREW LETTER HE
+        0x0085: 0x05d5, # HEBREW LETTER VAV
+        0x0086: 0x05d6, # HEBREW LETTER ZAYIN
+        0x0087: 0x05d7, # HEBREW LETTER HET
+        0x0088: 0x05d8, # HEBREW LETTER TET
+        0x0089: 0x05d9, # HEBREW LETTER YOD
+        0x008a: 0x05da, # HEBREW LETTER FINAL KAF
+        0x008b: 0x05db, # HEBREW LETTER KAF
+        0x008c: 0x05dc, # HEBREW LETTER LAMED
+        0x008d: 0x05dd, # HEBREW LETTER FINAL MEM
+        0x008e: 0x05de, # HEBREW LETTER MEM
+        0x008f: 0x05df, # HEBREW LETTER FINAL NUN
+        0x0090: 0x05e0, # HEBREW LETTER NUN
+        0x0091: 0x05e1, # HEBREW LETTER SAMEKH
+        0x0092: 0x05e2, # HEBREW LETTER AYIN
+        0x0093: 0x05e3, # HEBREW LETTER FINAL PE
+        0x0094: 0x05e4, # HEBREW LETTER PE
+        0x0095: 0x05e5, # HEBREW LETTER FINAL TSADI
+        0x0096: 0x05e6, # HEBREW LETTER TSADI
+        0x0097: 0x05e7, # HEBREW LETTER QOF
+        0x0098: 0x05e8, # HEBREW LETTER RESH
+        0x0099: 0x05e9, # HEBREW LETTER SHIN
+        0x009a: 0x05ea, # HEBREW LETTER TAV
+        0x009b: None,   # UNDEFINED
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: None,   # UNDEFINED
+        0x009e: 0x00d7, # MULTIPLICATION SIGN
+        0x009f: None,   # UNDEFINED
+        0x00a0: None,   # UNDEFINED
+        0x00a1: None,   # UNDEFINED
+        0x00a2: None,   # UNDEFINED
+        0x00a3: None,   # UNDEFINED
+        0x00a4: None,   # UNDEFINED
+        0x00a5: None,   # UNDEFINED
+        0x00a6: None,   # UNDEFINED
+        0x00a7: None,   # UNDEFINED
+        0x00a8: None,   # UNDEFINED
+        0x00a9: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: None,   # UNDEFINED
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: None,   # UNDEFINED
+        0x00b6: None,   # UNDEFINED
+        0x00b7: None,   # UNDEFINED
+        0x00b8: 0x00a9, # COPYRIGHT SIGN
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x00a2, # CENT SIGN
+        0x00be: 0x00a5, # YEN SIGN
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: None,   # UNDEFINED
+        0x00c7: None,   # UNDEFINED
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x00a4, # CURRENCY SIGN
+        0x00d0: None,   # UNDEFINED
+        0x00d1: None,   # UNDEFINED
+        0x00d2: None,   # UNDEFINED
+        0x00d3: None,   # UNDEFINEDS
+        0x00d4: None,   # UNDEFINED
+        0x00d5: None,   # UNDEFINED
+        0x00d6: None,   # UNDEFINEDE
+        0x00d7: None,   # UNDEFINED
+        0x00d8: None,   # UNDEFINED
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x00a6, # BROKEN BAR
+        0x00de: None,   # UNDEFINED
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: None,   # UNDEFINED
+        0x00e1: None,   # UNDEFINED
+        0x00e2: None,   # UNDEFINED
+        0x00e3: None,   # UNDEFINED
+        0x00e4: None,   # UNDEFINED
+        0x00e5: None,   # UNDEFINED
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: None,   # UNDEFINED
+        0x00e8: None,   # UNDEFINED
+        0x00e9: None,   # UNDEFINED
+        0x00ea: None,   # UNDEFINED
+        0x00eb: None,   # UNDEFINED
+        0x00ec: None,   # UNDEFINED
+        0x00ed: None,   # UNDEFINED
+        0x00ee: 0x00af, # MACRON
+        0x00ef: 0x00b4, # ACUTE ACCENT
+        0x00f0: 0x00ad, # SOFT HYPHEN
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2017, # DOUBLE LOW LINE
+        0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00f4: 0x00b6, # PILCROW SIGN
+        0x00f5: 0x00a7, # SECTION SIGN
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x00b8, # CEDILLA
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x00a8, # DIAERESIS
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x00b9, # SUPERSCRIPT ONE
+        0x00fc: 0x00b3, # SUPERSCRIPT THREE
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp857.py b/depot_tools/release/win/python_24/Lib/encodings/cp857.py
new file mode 100644
index 0000000..bc262410
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp857.py
@@ -0,0 +1,171 @@
+""" Python Character Mapping Codec generated from 'CP857.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x008d: 0x0131, # LATIN SMALL LETTER DOTLESS I
+        0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x0098: 0x0130, # LATIN CAPITAL LETTER I WITH DOT ABOVE
+        0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x009e: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
+        0x009f: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
+        0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x00a6: 0x011e, # LATIN CAPITAL LETTER G WITH BREVE
+        0x00a7: 0x011f, # LATIN SMALL LETTER G WITH BREVE
+        0x00a8: 0x00bf, # INVERTED QUESTION MARK
+        0x00a9: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x00b6: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x00b7: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x00b8: 0x00a9, # COPYRIGHT SIGN
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x00a2, # CENT SIGN
+        0x00be: 0x00a5, # YEN SIGN
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x00c7: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x00a4, # CURRENCY SIGN
+        0x00d0: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00d1: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00d2: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x00d3: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x00d4: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x00d5: None,   # UNDEFINED
+        0x00d6: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x00d7: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x00d8: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x00a6, # BROKEN BAR
+        0x00de: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00e2: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00e3: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00e4: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x00e5: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: None,   # UNDEFINED
+        0x00e8: 0x00d7, # MULTIPLICATION SIGN
+        0x00e9: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00ea: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00eb: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x00ed: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x00ee: 0x00af, # MACRON
+        0x00ef: 0x00b4, # ACUTE ACCENT
+        0x00f0: 0x00ad, # SOFT HYPHEN
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: None,   # UNDEFINED
+        0x00f3: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00f4: 0x00b6, # PILCROW SIGN
+        0x00f5: 0x00a7, # SECTION SIGN
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x00b8, # CEDILLA
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x00a8, # DIAERESIS
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x00b9, # SUPERSCRIPT ONE
+        0x00fc: 0x00b3, # SUPERSCRIPT THREE
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp860.py b/depot_tools/release/win/python_24/Lib/encodings/cp860.py
new file mode 100644
index 0000000..ae0fcd8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp860.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP860.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0084: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0086: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0089: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x008b: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x008c: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x008e: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x008f: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x0092: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x0094: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x0096: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x0098: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x0099: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x00a2, # CENT SIGN
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x009e: 0x20a7, # PESETA SIGN
+        0x009f: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00a8: 0x00bf, # INVERTED QUESTION MARK
+        0x00a9: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00e3: 0x03c0, # GREEK SMALL LETTER PI
+        0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00ec: 0x221e, # INFINITY
+        0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00ef: 0x2229, # INTERSECTION
+        0x00f0: 0x2261, # IDENTICAL TO
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00f4: 0x2320, # TOP HALF INTEGRAL
+        0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x2248, # ALMOST EQUAL TO
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x221a, # SQUARE ROOT
+        0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp861.py b/depot_tools/release/win/python_24/Lib/encodings/cp861.py
new file mode 100644
index 0000000..4d15b81
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp861.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP861.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x008b: 0x00d0, # LATIN CAPITAL LETTER ETH
+        0x008c: 0x00f0, # LATIN SMALL LETTER ETH
+        0x008d: 0x00de, # LATIN CAPITAL LETTER THORN
+        0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x0095: 0x00fe, # LATIN SMALL LETTER THORN
+        0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x0097: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
+        0x0098: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
+        0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x009e: 0x20a7, # PESETA SIGN
+        0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x00a5: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x00a6: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00a7: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00a8: 0x00bf, # INVERTED QUESTION MARK
+        0x00a9: 0x2310, # REVERSED NOT SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00e3: 0x03c0, # GREEK SMALL LETTER PI
+        0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00ec: 0x221e, # INFINITY
+        0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00ef: 0x2229, # INTERSECTION
+        0x00f0: 0x2261, # IDENTICAL TO
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00f4: 0x2320, # TOP HALF INTEGRAL
+        0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x2248, # ALMOST EQUAL TO
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x221a, # SQUARE ROOT
+        0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp862.py b/depot_tools/release/win/python_24/Lib/encodings/cp862.py
new file mode 100644
index 0000000..f892002
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp862.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP862.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x05d0, # HEBREW LETTER ALEF
+        0x0081: 0x05d1, # HEBREW LETTER BET
+        0x0082: 0x05d2, # HEBREW LETTER GIMEL
+        0x0083: 0x05d3, # HEBREW LETTER DALET
+        0x0084: 0x05d4, # HEBREW LETTER HE
+        0x0085: 0x05d5, # HEBREW LETTER VAV
+        0x0086: 0x05d6, # HEBREW LETTER ZAYIN
+        0x0087: 0x05d7, # HEBREW LETTER HET
+        0x0088: 0x05d8, # HEBREW LETTER TET
+        0x0089: 0x05d9, # HEBREW LETTER YOD
+        0x008a: 0x05da, # HEBREW LETTER FINAL KAF
+        0x008b: 0x05db, # HEBREW LETTER KAF
+        0x008c: 0x05dc, # HEBREW LETTER LAMED
+        0x008d: 0x05dd, # HEBREW LETTER FINAL MEM
+        0x008e: 0x05de, # HEBREW LETTER MEM
+        0x008f: 0x05df, # HEBREW LETTER FINAL NUN
+        0x0090: 0x05e0, # HEBREW LETTER NUN
+        0x0091: 0x05e1, # HEBREW LETTER SAMEKH
+        0x0092: 0x05e2, # HEBREW LETTER AYIN
+        0x0093: 0x05e3, # HEBREW LETTER FINAL PE
+        0x0094: 0x05e4, # HEBREW LETTER PE
+        0x0095: 0x05e5, # HEBREW LETTER FINAL TSADI
+        0x0096: 0x05e6, # HEBREW LETTER TSADI
+        0x0097: 0x05e7, # HEBREW LETTER QOF
+        0x0098: 0x05e8, # HEBREW LETTER RESH
+        0x0099: 0x05e9, # HEBREW LETTER SHIN
+        0x009a: 0x05ea, # HEBREW LETTER TAV
+        0x009b: 0x00a2, # CENT SIGN
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00a5, # YEN SIGN
+        0x009e: 0x20a7, # PESETA SIGN
+        0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00a8: 0x00bf, # INVERTED QUESTION MARK
+        0x00a9: 0x2310, # REVERSED NOT SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S (GERMAN)
+        0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00e3: 0x03c0, # GREEK SMALL LETTER PI
+        0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00ec: 0x221e, # INFINITY
+        0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00ef: 0x2229, # INTERSECTION
+        0x00f0: 0x2261, # IDENTICAL TO
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00f4: 0x2320, # TOP HALF INTEGRAL
+        0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x2248, # ALMOST EQUAL TO
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x221a, # SQUARE ROOT
+        0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp863.py b/depot_tools/release/win/python_24/Lib/encodings/cp863.py
new file mode 100644
index 0000000..5f823d10
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp863.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP863.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0084: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0086: 0x00b6, # PILCROW SIGN
+        0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x008d: 0x2017, # DOUBLE LOW LINE
+        0x008e: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x008f: 0x00a7, # SECTION SIGN
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x0092: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x0094: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x0095: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x0098: 0x00a4, # CURRENCY SIGN
+        0x0099: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x00a2, # CENT SIGN
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x009e: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00a0: 0x00a6, # BROKEN BAR
+        0x00a1: 0x00b4, # ACUTE ACCENT
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x00a8, # DIAERESIS
+        0x00a5: 0x00b8, # CEDILLA
+        0x00a6: 0x00b3, # SUPERSCRIPT THREE
+        0x00a7: 0x00af, # MACRON
+        0x00a8: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x00a9: 0x2310, # REVERSED NOT SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x00be, # VULGAR FRACTION THREE QUARTERS
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00e3: 0x03c0, # GREEK SMALL LETTER PI
+        0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00ec: 0x221e, # INFINITY
+        0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00ef: 0x2229, # INTERSECTION
+        0x00f0: 0x2261, # IDENTICAL TO
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00f4: 0x2320, # TOP HALF INTEGRAL
+        0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x2248, # ALMOST EQUAL TO
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x221a, # SQUARE ROOT
+        0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp864.py b/depot_tools/release/win/python_24/Lib/encodings/cp864.py
new file mode 100644
index 0000000..ac59217
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp864.py
@@ -0,0 +1,170 @@
+""" Python Character Mapping Codec generated from 'CP864.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0025: 0x066a, # ARABIC PERCENT SIGN
+        0x0080: 0x00b0, # DEGREE SIGN
+        0x0081: 0x00b7, # MIDDLE DOT
+        0x0082: 0x2219, # BULLET OPERATOR
+        0x0083: 0x221a, # SQUARE ROOT
+        0x0084: 0x2592, # MEDIUM SHADE
+        0x0085: 0x2500, # FORMS LIGHT HORIZONTAL
+        0x0086: 0x2502, # FORMS LIGHT VERTICAL
+        0x0087: 0x253c, # FORMS LIGHT VERTICAL AND HORIZONTAL
+        0x0088: 0x2524, # FORMS LIGHT VERTICAL AND LEFT
+        0x0089: 0x252c, # FORMS LIGHT DOWN AND HORIZONTAL
+        0x008a: 0x251c, # FORMS LIGHT VERTICAL AND RIGHT
+        0x008b: 0x2534, # FORMS LIGHT UP AND HORIZONTAL
+        0x008c: 0x2510, # FORMS LIGHT DOWN AND LEFT
+        0x008d: 0x250c, # FORMS LIGHT DOWN AND RIGHT
+        0x008e: 0x2514, # FORMS LIGHT UP AND RIGHT
+        0x008f: 0x2518, # FORMS LIGHT UP AND LEFT
+        0x0090: 0x03b2, # GREEK SMALL BETA
+        0x0091: 0x221e, # INFINITY
+        0x0092: 0x03c6, # GREEK SMALL PHI
+        0x0093: 0x00b1, # PLUS-OR-MINUS SIGN
+        0x0094: 0x00bd, # FRACTION 1/2
+        0x0095: 0x00bc, # FRACTION 1/4
+        0x0096: 0x2248, # ALMOST EQUAL TO
+        0x0097: 0x00ab, # LEFT POINTING GUILLEMET
+        0x0098: 0x00bb, # RIGHT POINTING GUILLEMET
+        0x0099: 0xfef7, # ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE ISOLATED FORM
+        0x009a: 0xfef8, # ARABIC LIGATURE LAM WITH ALEF WITH HAMZA ABOVE FINAL FORM
+        0x009b: None,   # UNDEFINED
+        0x009c: None,   # UNDEFINED
+        0x009d: 0xfefb, # ARABIC LIGATURE LAM WITH ALEF ISOLATED FORM
+        0x009e: 0xfefc, # ARABIC LIGATURE LAM WITH ALEF FINAL FORM
+        0x009f: None,   # UNDEFINED
+        0x00a1: 0x00ad, # SOFT HYPHEN
+        0x00a2: 0xfe82, # ARABIC LETTER ALEF WITH MADDA ABOVE FINAL FORM
+        0x00a5: 0xfe84, # ARABIC LETTER ALEF WITH HAMZA ABOVE FINAL FORM
+        0x00a6: None,   # UNDEFINED
+        0x00a7: None,   # UNDEFINED
+        0x00a8: 0xfe8e, # ARABIC LETTER ALEF FINAL FORM
+        0x00a9: 0xfe8f, # ARABIC LETTER BEH ISOLATED FORM
+        0x00aa: 0xfe95, # ARABIC LETTER TEH ISOLATED FORM
+        0x00ab: 0xfe99, # ARABIC LETTER THEH ISOLATED FORM
+        0x00ac: 0x060c, # ARABIC COMMA
+        0x00ad: 0xfe9d, # ARABIC LETTER JEEM ISOLATED FORM
+        0x00ae: 0xfea1, # ARABIC LETTER HAH ISOLATED FORM
+        0x00af: 0xfea5, # ARABIC LETTER KHAH ISOLATED FORM
+        0x00b0: 0x0660, # ARABIC-INDIC DIGIT ZERO
+        0x00b1: 0x0661, # ARABIC-INDIC DIGIT ONE
+        0x00b2: 0x0662, # ARABIC-INDIC DIGIT TWO
+        0x00b3: 0x0663, # ARABIC-INDIC DIGIT THREE
+        0x00b4: 0x0664, # ARABIC-INDIC DIGIT FOUR
+        0x00b5: 0x0665, # ARABIC-INDIC DIGIT FIVE
+        0x00b6: 0x0666, # ARABIC-INDIC DIGIT SIX
+        0x00b7: 0x0667, # ARABIC-INDIC DIGIT SEVEN
+        0x00b8: 0x0668, # ARABIC-INDIC DIGIT EIGHT
+        0x00b9: 0x0669, # ARABIC-INDIC DIGIT NINE
+        0x00ba: 0xfed1, # ARABIC LETTER FEH ISOLATED FORM
+        0x00bb: 0x061b, # ARABIC SEMICOLON
+        0x00bc: 0xfeb1, # ARABIC LETTER SEEN ISOLATED FORM
+        0x00bd: 0xfeb5, # ARABIC LETTER SHEEN ISOLATED FORM
+        0x00be: 0xfeb9, # ARABIC LETTER SAD ISOLATED FORM
+        0x00bf: 0x061f, # ARABIC QUESTION MARK
+        0x00c0: 0x00a2, # CENT SIGN
+        0x00c1: 0xfe80, # ARABIC LETTER HAMZA ISOLATED FORM
+        0x00c2: 0xfe81, # ARABIC LETTER ALEF WITH MADDA ABOVE ISOLATED FORM
+        0x00c3: 0xfe83, # ARABIC LETTER ALEF WITH HAMZA ABOVE ISOLATED FORM
+        0x00c4: 0xfe85, # ARABIC LETTER WAW WITH HAMZA ABOVE ISOLATED FORM
+        0x00c5: 0xfeca, # ARABIC LETTER AIN FINAL FORM
+        0x00c6: 0xfe8b, # ARABIC LETTER YEH WITH HAMZA ABOVE INITIAL FORM
+        0x00c7: 0xfe8d, # ARABIC LETTER ALEF ISOLATED FORM
+        0x00c8: 0xfe91, # ARABIC LETTER BEH INITIAL FORM
+        0x00c9: 0xfe93, # ARABIC LETTER TEH MARBUTA ISOLATED FORM
+        0x00ca: 0xfe97, # ARABIC LETTER TEH INITIAL FORM
+        0x00cb: 0xfe9b, # ARABIC LETTER THEH INITIAL FORM
+        0x00cc: 0xfe9f, # ARABIC LETTER JEEM INITIAL FORM
+        0x00cd: 0xfea3, # ARABIC LETTER HAH INITIAL FORM
+        0x00ce: 0xfea7, # ARABIC LETTER KHAH INITIAL FORM
+        0x00cf: 0xfea9, # ARABIC LETTER DAL ISOLATED FORM
+        0x00d0: 0xfeab, # ARABIC LETTER THAL ISOLATED FORM
+        0x00d1: 0xfead, # ARABIC LETTER REH ISOLATED FORM
+        0x00d2: 0xfeaf, # ARABIC LETTER ZAIN ISOLATED FORM
+        0x00d3: 0xfeb3, # ARABIC LETTER SEEN INITIAL FORM
+        0x00d4: 0xfeb7, # ARABIC LETTER SHEEN INITIAL FORM
+        0x00d5: 0xfebb, # ARABIC LETTER SAD INITIAL FORM
+        0x00d6: 0xfebf, # ARABIC LETTER DAD INITIAL FORM
+        0x00d7: 0xfec1, # ARABIC LETTER TAH ISOLATED FORM
+        0x00d8: 0xfec5, # ARABIC LETTER ZAH ISOLATED FORM
+        0x00d9: 0xfecb, # ARABIC LETTER AIN INITIAL FORM
+        0x00da: 0xfecf, # ARABIC LETTER GHAIN INITIAL FORM
+        0x00db: 0x00a6, # BROKEN VERTICAL BAR
+        0x00dc: 0x00ac, # NOT SIGN
+        0x00dd: 0x00f7, # DIVISION SIGN
+        0x00de: 0x00d7, # MULTIPLICATION SIGN
+        0x00df: 0xfec9, # ARABIC LETTER AIN ISOLATED FORM
+        0x00e0: 0x0640, # ARABIC TATWEEL
+        0x00e1: 0xfed3, # ARABIC LETTER FEH INITIAL FORM
+        0x00e2: 0xfed7, # ARABIC LETTER QAF INITIAL FORM
+        0x00e3: 0xfedb, # ARABIC LETTER KAF INITIAL FORM
+        0x00e4: 0xfedf, # ARABIC LETTER LAM INITIAL FORM
+        0x00e5: 0xfee3, # ARABIC LETTER MEEM INITIAL FORM
+        0x00e6: 0xfee7, # ARABIC LETTER NOON INITIAL FORM
+        0x00e7: 0xfeeb, # ARABIC LETTER HEH INITIAL FORM
+        0x00e8: 0xfeed, # ARABIC LETTER WAW ISOLATED FORM
+        0x00e9: 0xfeef, # ARABIC LETTER ALEF MAKSURA ISOLATED FORM
+        0x00ea: 0xfef3, # ARABIC LETTER YEH INITIAL FORM
+        0x00eb: 0xfebd, # ARABIC LETTER DAD ISOLATED FORM
+        0x00ec: 0xfecc, # ARABIC LETTER AIN MEDIAL FORM
+        0x00ed: 0xfece, # ARABIC LETTER GHAIN FINAL FORM
+        0x00ee: 0xfecd, # ARABIC LETTER GHAIN ISOLATED FORM
+        0x00ef: 0xfee1, # ARABIC LETTER MEEM ISOLATED FORM
+        0x00f0: 0xfe7d, # ARABIC SHADDA MEDIAL FORM
+        0x00f1: 0x0651, # ARABIC SHADDAH
+        0x00f2: 0xfee5, # ARABIC LETTER NOON ISOLATED FORM
+        0x00f3: 0xfee9, # ARABIC LETTER HEH ISOLATED FORM
+        0x00f4: 0xfeec, # ARABIC LETTER HEH MEDIAL FORM
+        0x00f5: 0xfef0, # ARABIC LETTER ALEF MAKSURA FINAL FORM
+        0x00f6: 0xfef2, # ARABIC LETTER YEH FINAL FORM
+        0x00f7: 0xfed0, # ARABIC LETTER GHAIN MEDIAL FORM
+        0x00f8: 0xfed5, # ARABIC LETTER QAF ISOLATED FORM
+        0x00f9: 0xfef5, # ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE ISOLATED FORM
+        0x00fa: 0xfef6, # ARABIC LIGATURE LAM WITH ALEF WITH MADDA ABOVE FINAL FORM
+        0x00fb: 0xfedd, # ARABIC LETTER LAM ISOLATED FORM
+        0x00fc: 0xfed9, # ARABIC LETTER KAF ISOLATED FORM
+        0x00fd: 0xfef1, # ARABIC LETTER YEH ISOLATED FORM
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: None,   # UNDEFINED
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp865.py b/depot_tools/release/win/python_24/Lib/encodings/cp865.py
new file mode 100644
index 0000000..b4f88f68
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp865.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP865.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0081: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x0082: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x0083: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x0084: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x0085: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0086: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x0087: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x0088: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0089: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x008a: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x008b: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x008c: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x008d: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x008e: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x008f: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0090: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0091: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x0092: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x0093: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x0094: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x0095: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x0096: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x0097: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x0098: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x0099: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x009a: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x009b: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x009e: 0x20a7, # PESETA SIGN
+        0x009f: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00a0: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x00a1: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x00a2: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x00a3: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x00a4: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x00a5: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x00a6: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00a7: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00a8: 0x00bf, # INVERTED QUESTION MARK
+        0x00a9: 0x2310, # REVERSED NOT SIGN
+        0x00aa: 0x00ac, # NOT SIGN
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x00bc, # VULGAR FRACTION ONE QUARTER
+        0x00ad: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00a4, # CURRENCY SIGN
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00e1: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00e2: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00e3: 0x03c0, # GREEK SMALL LETTER PI
+        0x00e4: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00e5: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00e6: 0x00b5, # MICRO SIGN
+        0x00e7: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00e8: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00e9: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00ea: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00eb: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00ec: 0x221e, # INFINITY
+        0x00ed: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00ee: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00ef: 0x2229, # INTERSECTION
+        0x00f0: 0x2261, # IDENTICAL TO
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00f3: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00f4: 0x2320, # TOP HALF INTEGRAL
+        0x00f5: 0x2321, # BOTTOM HALF INTEGRAL
+        0x00f6: 0x00f7, # DIVISION SIGN
+        0x00f7: 0x2248, # ALMOST EQUAL TO
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x221a, # SQUARE ROOT
+        0x00fc: 0x207f, # SUPERSCRIPT LATIN SMALL LETTER N
+        0x00fd: 0x00b2, # SUPERSCRIPT TWO
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp866.py b/depot_tools/release/win/python_24/Lib/encodings/cp866.py
new file mode 100644
index 0000000..a6a26e54f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp866.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP866.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x0410, # CYRILLIC CAPITAL LETTER A
+        0x0081: 0x0411, # CYRILLIC CAPITAL LETTER BE
+        0x0082: 0x0412, # CYRILLIC CAPITAL LETTER VE
+        0x0083: 0x0413, # CYRILLIC CAPITAL LETTER GHE
+        0x0084: 0x0414, # CYRILLIC CAPITAL LETTER DE
+        0x0085: 0x0415, # CYRILLIC CAPITAL LETTER IE
+        0x0086: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
+        0x0087: 0x0417, # CYRILLIC CAPITAL LETTER ZE
+        0x0088: 0x0418, # CYRILLIC CAPITAL LETTER I
+        0x0089: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
+        0x008a: 0x041a, # CYRILLIC CAPITAL LETTER KA
+        0x008b: 0x041b, # CYRILLIC CAPITAL LETTER EL
+        0x008c: 0x041c, # CYRILLIC CAPITAL LETTER EM
+        0x008d: 0x041d, # CYRILLIC CAPITAL LETTER EN
+        0x008e: 0x041e, # CYRILLIC CAPITAL LETTER O
+        0x008f: 0x041f, # CYRILLIC CAPITAL LETTER PE
+        0x0090: 0x0420, # CYRILLIC CAPITAL LETTER ER
+        0x0091: 0x0421, # CYRILLIC CAPITAL LETTER ES
+        0x0092: 0x0422, # CYRILLIC CAPITAL LETTER TE
+        0x0093: 0x0423, # CYRILLIC CAPITAL LETTER U
+        0x0094: 0x0424, # CYRILLIC CAPITAL LETTER EF
+        0x0095: 0x0425, # CYRILLIC CAPITAL LETTER HA
+        0x0096: 0x0426, # CYRILLIC CAPITAL LETTER TSE
+        0x0097: 0x0427, # CYRILLIC CAPITAL LETTER CHE
+        0x0098: 0x0428, # CYRILLIC CAPITAL LETTER SHA
+        0x0099: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
+        0x009a: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
+        0x009b: 0x042b, # CYRILLIC CAPITAL LETTER YERU
+        0x009c: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
+        0x009d: 0x042d, # CYRILLIC CAPITAL LETTER E
+        0x009e: 0x042e, # CYRILLIC CAPITAL LETTER YU
+        0x009f: 0x042f, # CYRILLIC CAPITAL LETTER YA
+        0x00a0: 0x0430, # CYRILLIC SMALL LETTER A
+        0x00a1: 0x0431, # CYRILLIC SMALL LETTER BE
+        0x00a2: 0x0432, # CYRILLIC SMALL LETTER VE
+        0x00a3: 0x0433, # CYRILLIC SMALL LETTER GHE
+        0x00a4: 0x0434, # CYRILLIC SMALL LETTER DE
+        0x00a5: 0x0435, # CYRILLIC SMALL LETTER IE
+        0x00a6: 0x0436, # CYRILLIC SMALL LETTER ZHE
+        0x00a7: 0x0437, # CYRILLIC SMALL LETTER ZE
+        0x00a8: 0x0438, # CYRILLIC SMALL LETTER I
+        0x00a9: 0x0439, # CYRILLIC SMALL LETTER SHORT I
+        0x00aa: 0x043a, # CYRILLIC SMALL LETTER KA
+        0x00ab: 0x043b, # CYRILLIC SMALL LETTER EL
+        0x00ac: 0x043c, # CYRILLIC SMALL LETTER EM
+        0x00ad: 0x043d, # CYRILLIC SMALL LETTER EN
+        0x00ae: 0x043e, # CYRILLIC SMALL LETTER O
+        0x00af: 0x043f, # CYRILLIC SMALL LETTER PE
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x2561, # BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b6: 0x2562, # BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b7: 0x2556, # BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00b8: 0x2555, # BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x255c, # BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00be: 0x255b, # BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x255e, # BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00c7: 0x255f, # BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x2567, # BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00d0: 0x2568, # BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00d1: 0x2564, # BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00d2: 0x2565, # BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00d3: 0x2559, # BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00d4: 0x2558, # BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00d5: 0x2552, # BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00d6: 0x2553, # BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00d7: 0x256b, # BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00d8: 0x256a, # BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x258c, # LEFT HALF BLOCK
+        0x00de: 0x2590, # RIGHT HALF BLOCK
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x0440, # CYRILLIC SMALL LETTER ER
+        0x00e1: 0x0441, # CYRILLIC SMALL LETTER ES
+        0x00e2: 0x0442, # CYRILLIC SMALL LETTER TE
+        0x00e3: 0x0443, # CYRILLIC SMALL LETTER U
+        0x00e4: 0x0444, # CYRILLIC SMALL LETTER EF
+        0x00e5: 0x0445, # CYRILLIC SMALL LETTER HA
+        0x00e6: 0x0446, # CYRILLIC SMALL LETTER TSE
+        0x00e7: 0x0447, # CYRILLIC SMALL LETTER CHE
+        0x00e8: 0x0448, # CYRILLIC SMALL LETTER SHA
+        0x00e9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
+        0x00ea: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
+        0x00eb: 0x044b, # CYRILLIC SMALL LETTER YERU
+        0x00ec: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
+        0x00ed: 0x044d, # CYRILLIC SMALL LETTER E
+        0x00ee: 0x044e, # CYRILLIC SMALL LETTER YU
+        0x00ef: 0x044f, # CYRILLIC SMALL LETTER YA
+        0x00f0: 0x0401, # CYRILLIC CAPITAL LETTER IO
+        0x00f1: 0x0451, # CYRILLIC SMALL LETTER IO
+        0x00f2: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
+        0x00f3: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
+        0x00f4: 0x0407, # CYRILLIC CAPITAL LETTER YI
+        0x00f5: 0x0457, # CYRILLIC SMALL LETTER YI
+        0x00f6: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
+        0x00f7: 0x045e, # CYRILLIC SMALL LETTER SHORT U
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x2219, # BULLET OPERATOR
+        0x00fa: 0x00b7, # MIDDLE DOT
+        0x00fb: 0x221a, # SQUARE ROOT
+        0x00fc: 0x2116, # NUMERO SIGN
+        0x00fd: 0x00a4, # CURRENCY SIGN
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp869.py b/depot_tools/release/win/python_24/Lib/encodings/cp869.py
new file mode 100644
index 0000000..21b0ab8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp869.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'CP869.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: None,   # UNDEFINED
+        0x0081: None,   # UNDEFINED
+        0x0082: None,   # UNDEFINED
+        0x0083: None,   # UNDEFINED
+        0x0084: None,   # UNDEFINED
+        0x0085: None,   # UNDEFINED
+        0x0086: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS
+        0x0087: None,   # UNDEFINED
+        0x0088: 0x00b7, # MIDDLE DOT
+        0x0089: 0x00ac, # NOT SIGN
+        0x008a: 0x00a6, # BROKEN BAR
+        0x008b: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x008c: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x008d: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS
+        0x008e: 0x2015, # HORIZONTAL BAR
+        0x008f: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS
+        0x0090: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS
+        0x0091: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
+        0x0092: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS
+        0x0093: None,   # UNDEFINED
+        0x0094: None,   # UNDEFINED
+        0x0095: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS
+        0x0096: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
+        0x0097: 0x00a9, # COPYRIGHT SIGN
+        0x0098: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS
+        0x0099: 0x00b2, # SUPERSCRIPT TWO
+        0x009a: 0x00b3, # SUPERSCRIPT THREE
+        0x009b: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS
+        0x009c: 0x00a3, # POUND SIGN
+        0x009d: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS
+        0x009e: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS
+        0x009f: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS
+        0x00a0: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
+        0x00a1: 0x0390, # GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
+        0x00a2: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS
+        0x00a3: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS
+        0x00a4: 0x0391, # GREEK CAPITAL LETTER ALPHA
+        0x00a5: 0x0392, # GREEK CAPITAL LETTER BETA
+        0x00a6: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00a7: 0x0394, # GREEK CAPITAL LETTER DELTA
+        0x00a8: 0x0395, # GREEK CAPITAL LETTER EPSILON
+        0x00a9: 0x0396, # GREEK CAPITAL LETTER ZETA
+        0x00aa: 0x0397, # GREEK CAPITAL LETTER ETA
+        0x00ab: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00ac: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00ad: 0x0399, # GREEK CAPITAL LETTER IOTA
+        0x00ae: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00af: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00b0: 0x2591, # LIGHT SHADE
+        0x00b1: 0x2592, # MEDIUM SHADE
+        0x00b2: 0x2593, # DARK SHADE
+        0x00b3: 0x2502, # BOX DRAWINGS LIGHT VERTICAL
+        0x00b4: 0x2524, # BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x00b5: 0x039a, # GREEK CAPITAL LETTER KAPPA
+        0x00b6: 0x039b, # GREEK CAPITAL LETTER LAMDA
+        0x00b7: 0x039c, # GREEK CAPITAL LETTER MU
+        0x00b8: 0x039d, # GREEK CAPITAL LETTER NU
+        0x00b9: 0x2563, # BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00ba: 0x2551, # BOX DRAWINGS DOUBLE VERTICAL
+        0x00bb: 0x2557, # BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00bc: 0x255d, # BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00bd: 0x039e, # GREEK CAPITAL LETTER XI
+        0x00be: 0x039f, # GREEK CAPITAL LETTER OMICRON
+        0x00bf: 0x2510, # BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x00c0: 0x2514, # BOX DRAWINGS LIGHT UP AND RIGHT
+        0x00c1: 0x2534, # BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x00c2: 0x252c, # BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x00c3: 0x251c, # BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x00c4: 0x2500, # BOX DRAWINGS LIGHT HORIZONTAL
+        0x00c5: 0x253c, # BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x00c6: 0x03a0, # GREEK CAPITAL LETTER PI
+        0x00c7: 0x03a1, # GREEK CAPITAL LETTER RHO
+        0x00c8: 0x255a, # BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00c9: 0x2554, # BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00ca: 0x2569, # BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00cb: 0x2566, # BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00cc: 0x2560, # BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00cd: 0x2550, # BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00ce: 0x256c, # BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00cf: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00d0: 0x03a4, # GREEK CAPITAL LETTER TAU
+        0x00d1: 0x03a5, # GREEK CAPITAL LETTER UPSILON
+        0x00d2: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00d3: 0x03a7, # GREEK CAPITAL LETTER CHI
+        0x00d4: 0x03a8, # GREEK CAPITAL LETTER PSI
+        0x00d5: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00d6: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00d7: 0x03b2, # GREEK SMALL LETTER BETA
+        0x00d8: 0x03b3, # GREEK SMALL LETTER GAMMA
+        0x00d9: 0x2518, # BOX DRAWINGS LIGHT UP AND LEFT
+        0x00da: 0x250c, # BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x00db: 0x2588, # FULL BLOCK
+        0x00dc: 0x2584, # LOWER HALF BLOCK
+        0x00dd: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00de: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00df: 0x2580, # UPPER HALF BLOCK
+        0x00e0: 0x03b6, # GREEK SMALL LETTER ZETA
+        0x00e1: 0x03b7, # GREEK SMALL LETTER ETA
+        0x00e2: 0x03b8, # GREEK SMALL LETTER THETA
+        0x00e3: 0x03b9, # GREEK SMALL LETTER IOTA
+        0x00e4: 0x03ba, # GREEK SMALL LETTER KAPPA
+        0x00e5: 0x03bb, # GREEK SMALL LETTER LAMDA
+        0x00e6: 0x03bc, # GREEK SMALL LETTER MU
+        0x00e7: 0x03bd, # GREEK SMALL LETTER NU
+        0x00e8: 0x03be, # GREEK SMALL LETTER XI
+        0x00e9: 0x03bf, # GREEK SMALL LETTER OMICRON
+        0x00ea: 0x03c0, # GREEK SMALL LETTER PI
+        0x00eb: 0x03c1, # GREEK SMALL LETTER RHO
+        0x00ec: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00ed: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA
+        0x00ee: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00ef: 0x0384, # GREEK TONOS
+        0x00f0: 0x00ad, # SOFT HYPHEN
+        0x00f1: 0x00b1, # PLUS-MINUS SIGN
+        0x00f2: 0x03c5, # GREEK SMALL LETTER UPSILON
+        0x00f3: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00f4: 0x03c7, # GREEK SMALL LETTER CHI
+        0x00f5: 0x00a7, # SECTION SIGN
+        0x00f6: 0x03c8, # GREEK SMALL LETTER PSI
+        0x00f7: 0x0385, # GREEK DIALYTIKA TONOS
+        0x00f8: 0x00b0, # DEGREE SIGN
+        0x00f9: 0x00a8, # DIAERESIS
+        0x00fa: 0x03c9, # GREEK SMALL LETTER OMEGA
+        0x00fb: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
+        0x00fc: 0x03b0, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
+        0x00fd: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS
+        0x00fe: 0x25a0, # BLACK SQUARE
+        0x00ff: 0x00a0, # NO-BREAK SPACE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp874.py b/depot_tools/release/win/python_24/Lib/encodings/cp874.py
new file mode 100644
index 0000000..01c5eb6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp874.py
@@ -0,0 +1,171 @@
+""" Python Character Mapping Codec generated from 'CP874.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x20ac, # EURO SIGN
+        0x0081: None,   # UNDEFINED
+        0x0082: None,   # UNDEFINED
+        0x0083: None,   # UNDEFINED
+        0x0084: None,   # UNDEFINED
+        0x0085: 0x2026, # HORIZONTAL ELLIPSIS
+        0x0086: None,   # UNDEFINED
+        0x0087: None,   # UNDEFINED
+        0x0088: None,   # UNDEFINED
+        0x0089: None,   # UNDEFINED
+        0x008a: None,   # UNDEFINED
+        0x008b: None,   # UNDEFINED
+        0x008c: None,   # UNDEFINED
+        0x008d: None,   # UNDEFINED
+        0x008e: None,   # UNDEFINED
+        0x008f: None,   # UNDEFINED
+        0x0090: None,   # UNDEFINED
+        0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, # BULLET
+        0x0096: 0x2013, # EN DASH
+        0x0097: 0x2014, # EM DASH
+        0x0098: None,   # UNDEFINED
+        0x0099: None,   # UNDEFINED
+        0x009a: None,   # UNDEFINED
+        0x009b: None,   # UNDEFINED
+        0x009c: None,   # UNDEFINED
+        0x009d: None,   # UNDEFINED
+        0x009e: None,   # UNDEFINED
+        0x009f: None,   # UNDEFINED
+        0x00a1: 0x0e01, # THAI CHARACTER KO KAI
+        0x00a2: 0x0e02, # THAI CHARACTER KHO KHAI
+        0x00a3: 0x0e03, # THAI CHARACTER KHO KHUAT
+        0x00a4: 0x0e04, # THAI CHARACTER KHO KHWAI
+        0x00a5: 0x0e05, # THAI CHARACTER KHO KHON
+        0x00a6: 0x0e06, # THAI CHARACTER KHO RAKHANG
+        0x00a7: 0x0e07, # THAI CHARACTER NGO NGU
+        0x00a8: 0x0e08, # THAI CHARACTER CHO CHAN
+        0x00a9: 0x0e09, # THAI CHARACTER CHO CHING
+        0x00aa: 0x0e0a, # THAI CHARACTER CHO CHANG
+        0x00ab: 0x0e0b, # THAI CHARACTER SO SO
+        0x00ac: 0x0e0c, # THAI CHARACTER CHO CHOE
+        0x00ad: 0x0e0d, # THAI CHARACTER YO YING
+        0x00ae: 0x0e0e, # THAI CHARACTER DO CHADA
+        0x00af: 0x0e0f, # THAI CHARACTER TO PATAK
+        0x00b0: 0x0e10, # THAI CHARACTER THO THAN
+        0x00b1: 0x0e11, # THAI CHARACTER THO NANGMONTHO
+        0x00b2: 0x0e12, # THAI CHARACTER THO PHUTHAO
+        0x00b3: 0x0e13, # THAI CHARACTER NO NEN
+        0x00b4: 0x0e14, # THAI CHARACTER DO DEK
+        0x00b5: 0x0e15, # THAI CHARACTER TO TAO
+        0x00b6: 0x0e16, # THAI CHARACTER THO THUNG
+        0x00b7: 0x0e17, # THAI CHARACTER THO THAHAN
+        0x00b8: 0x0e18, # THAI CHARACTER THO THONG
+        0x00b9: 0x0e19, # THAI CHARACTER NO NU
+        0x00ba: 0x0e1a, # THAI CHARACTER BO BAIMAI
+        0x00bb: 0x0e1b, # THAI CHARACTER PO PLA
+        0x00bc: 0x0e1c, # THAI CHARACTER PHO PHUNG
+        0x00bd: 0x0e1d, # THAI CHARACTER FO FA
+        0x00be: 0x0e1e, # THAI CHARACTER PHO PHAN
+        0x00bf: 0x0e1f, # THAI CHARACTER FO FAN
+        0x00c0: 0x0e20, # THAI CHARACTER PHO SAMPHAO
+        0x00c1: 0x0e21, # THAI CHARACTER MO MA
+        0x00c2: 0x0e22, # THAI CHARACTER YO YAK
+        0x00c3: 0x0e23, # THAI CHARACTER RO RUA
+        0x00c4: 0x0e24, # THAI CHARACTER RU
+        0x00c5: 0x0e25, # THAI CHARACTER LO LING
+        0x00c6: 0x0e26, # THAI CHARACTER LU
+        0x00c7: 0x0e27, # THAI CHARACTER WO WAEN
+        0x00c8: 0x0e28, # THAI CHARACTER SO SALA
+        0x00c9: 0x0e29, # THAI CHARACTER SO RUSI
+        0x00ca: 0x0e2a, # THAI CHARACTER SO SUA
+        0x00cb: 0x0e2b, # THAI CHARACTER HO HIP
+        0x00cc: 0x0e2c, # THAI CHARACTER LO CHULA
+        0x00cd: 0x0e2d, # THAI CHARACTER O ANG
+        0x00ce: 0x0e2e, # THAI CHARACTER HO NOKHUK
+        0x00cf: 0x0e2f, # THAI CHARACTER PAIYANNOI
+        0x00d0: 0x0e30, # THAI CHARACTER SARA A
+        0x00d1: 0x0e31, # THAI CHARACTER MAI HAN-AKAT
+        0x00d2: 0x0e32, # THAI CHARACTER SARA AA
+        0x00d3: 0x0e33, # THAI CHARACTER SARA AM
+        0x00d4: 0x0e34, # THAI CHARACTER SARA I
+        0x00d5: 0x0e35, # THAI CHARACTER SARA II
+        0x00d6: 0x0e36, # THAI CHARACTER SARA UE
+        0x00d7: 0x0e37, # THAI CHARACTER SARA UEE
+        0x00d8: 0x0e38, # THAI CHARACTER SARA U
+        0x00d9: 0x0e39, # THAI CHARACTER SARA UU
+        0x00da: 0x0e3a, # THAI CHARACTER PHINTHU
+        0x00db: None,   # UNDEFINED
+        0x00dc: None,   # UNDEFINED
+        0x00dd: None,   # UNDEFINED
+        0x00de: None,   # UNDEFINED
+        0x00df: 0x0e3f, # THAI CURRENCY SYMBOL BAHT
+        0x00e0: 0x0e40, # THAI CHARACTER SARA E
+        0x00e1: 0x0e41, # THAI CHARACTER SARA AE
+        0x00e2: 0x0e42, # THAI CHARACTER SARA O
+        0x00e3: 0x0e43, # THAI CHARACTER SARA AI MAIMUAN
+        0x00e4: 0x0e44, # THAI CHARACTER SARA AI MAIMALAI
+        0x00e5: 0x0e45, # THAI CHARACTER LAKKHANGYAO
+        0x00e6: 0x0e46, # THAI CHARACTER MAIYAMOK
+        0x00e7: 0x0e47, # THAI CHARACTER MAITAIKHU
+        0x00e8: 0x0e48, # THAI CHARACTER MAI EK
+        0x00e9: 0x0e49, # THAI CHARACTER MAI THO
+        0x00ea: 0x0e4a, # THAI CHARACTER MAI TRI
+        0x00eb: 0x0e4b, # THAI CHARACTER MAI CHATTAWA
+        0x00ec: 0x0e4c, # THAI CHARACTER THANTHAKHAT
+        0x00ed: 0x0e4d, # THAI CHARACTER NIKHAHIT
+        0x00ee: 0x0e4e, # THAI CHARACTER YAMAKKAN
+        0x00ef: 0x0e4f, # THAI CHARACTER FONGMAN
+        0x00f0: 0x0e50, # THAI DIGIT ZERO
+        0x00f1: 0x0e51, # THAI DIGIT ONE
+        0x00f2: 0x0e52, # THAI DIGIT TWO
+        0x00f3: 0x0e53, # THAI DIGIT THREE
+        0x00f4: 0x0e54, # THAI DIGIT FOUR
+        0x00f5: 0x0e55, # THAI DIGIT FIVE
+        0x00f6: 0x0e56, # THAI DIGIT SIX
+        0x00f7: 0x0e57, # THAI DIGIT SEVEN
+        0x00f8: 0x0e58, # THAI DIGIT EIGHT
+        0x00f9: 0x0e59, # THAI DIGIT NINE
+        0x00fa: 0x0e5a, # THAI CHARACTER ANGKHANKHU
+        0x00fb: 0x0e5b, # THAI CHARACTER KHOMUT
+        0x00fc: None,   # UNDEFINED
+        0x00fd: None,   # UNDEFINED
+        0x00fe: None,   # UNDEFINED
+        0x00ff: None,   # UNDEFINED
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp875.py b/depot_tools/release/win/python_24/Lib/encodings/cp875.py
new file mode 100644
index 0000000..95bb7772
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp875.py
@@ -0,0 +1,281 @@
+""" Python Character Mapping Codec generated from 'CP875.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0004: 0x009c, # CONTROL
+        0x0005: 0x0009, # HORIZONTAL TABULATION
+        0x0006: 0x0086, # CONTROL
+        0x0007: 0x007f, # DELETE
+        0x0008: 0x0097, # CONTROL
+        0x0009: 0x008d, # CONTROL
+        0x000a: 0x008e, # CONTROL
+        0x0014: 0x009d, # CONTROL
+        0x0015: 0x0085, # CONTROL
+        0x0016: 0x0008, # BACKSPACE
+        0x0017: 0x0087, # CONTROL
+        0x001a: 0x0092, # CONTROL
+        0x001b: 0x008f, # CONTROL
+        0x0020: 0x0080, # CONTROL
+        0x0021: 0x0081, # CONTROL
+        0x0022: 0x0082, # CONTROL
+        0x0023: 0x0083, # CONTROL
+        0x0024: 0x0084, # CONTROL
+        0x0025: 0x000a, # LINE FEED
+        0x0026: 0x0017, # END OF TRANSMISSION BLOCK
+        0x0027: 0x001b, # ESCAPE
+        0x0028: 0x0088, # CONTROL
+        0x0029: 0x0089, # CONTROL
+        0x002a: 0x008a, # CONTROL
+        0x002b: 0x008b, # CONTROL
+        0x002c: 0x008c, # CONTROL
+        0x002d: 0x0005, # ENQUIRY
+        0x002e: 0x0006, # ACKNOWLEDGE
+        0x002f: 0x0007, # BELL
+        0x0030: 0x0090, # CONTROL
+        0x0031: 0x0091, # CONTROL
+        0x0032: 0x0016, # SYNCHRONOUS IDLE
+        0x0033: 0x0093, # CONTROL
+        0x0034: 0x0094, # CONTROL
+        0x0035: 0x0095, # CONTROL
+        0x0036: 0x0096, # CONTROL
+        0x0037: 0x0004, # END OF TRANSMISSION
+        0x0038: 0x0098, # CONTROL
+        0x0039: 0x0099, # CONTROL
+        0x003a: 0x009a, # CONTROL
+        0x003b: 0x009b, # CONTROL
+        0x003c: 0x0014, # DEVICE CONTROL FOUR
+        0x003d: 0x0015, # NEGATIVE ACKNOWLEDGE
+        0x003e: 0x009e, # CONTROL
+        0x003f: 0x001a, # SUBSTITUTE
+        0x0040: 0x0020, # SPACE
+        0x0041: 0x0391, # GREEK CAPITAL LETTER ALPHA
+        0x0042: 0x0392, # GREEK CAPITAL LETTER BETA
+        0x0043: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x0044: 0x0394, # GREEK CAPITAL LETTER DELTA
+        0x0045: 0x0395, # GREEK CAPITAL LETTER EPSILON
+        0x0046: 0x0396, # GREEK CAPITAL LETTER ZETA
+        0x0047: 0x0397, # GREEK CAPITAL LETTER ETA
+        0x0048: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x0049: 0x0399, # GREEK CAPITAL LETTER IOTA
+        0x004a: 0x005b, # LEFT SQUARE BRACKET
+        0x004b: 0x002e, # FULL STOP
+        0x004c: 0x003c, # LESS-THAN SIGN
+        0x004d: 0x0028, # LEFT PARENTHESIS
+        0x004e: 0x002b, # PLUS SIGN
+        0x004f: 0x0021, # EXCLAMATION MARK
+        0x0050: 0x0026, # AMPERSAND
+        0x0051: 0x039a, # GREEK CAPITAL LETTER KAPPA
+        0x0052: 0x039b, # GREEK CAPITAL LETTER LAMDA
+        0x0053: 0x039c, # GREEK CAPITAL LETTER MU
+        0x0054: 0x039d, # GREEK CAPITAL LETTER NU
+        0x0055: 0x039e, # GREEK CAPITAL LETTER XI
+        0x0056: 0x039f, # GREEK CAPITAL LETTER OMICRON
+        0x0057: 0x03a0, # GREEK CAPITAL LETTER PI
+        0x0058: 0x03a1, # GREEK CAPITAL LETTER RHO
+        0x0059: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x005a: 0x005d, # RIGHT SQUARE BRACKET
+        0x005b: 0x0024, # DOLLAR SIGN
+        0x005c: 0x002a, # ASTERISK
+        0x005d: 0x0029, # RIGHT PARENTHESIS
+        0x005e: 0x003b, # SEMICOLON
+        0x005f: 0x005e, # CIRCUMFLEX ACCENT
+        0x0060: 0x002d, # HYPHEN-MINUS
+        0x0061: 0x002f, # SOLIDUS
+        0x0062: 0x03a4, # GREEK CAPITAL LETTER TAU
+        0x0063: 0x03a5, # GREEK CAPITAL LETTER UPSILON
+        0x0064: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x0065: 0x03a7, # GREEK CAPITAL LETTER CHI
+        0x0066: 0x03a8, # GREEK CAPITAL LETTER PSI
+        0x0067: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x0068: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
+        0x0069: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
+        0x006a: 0x007c, # VERTICAL LINE
+        0x006b: 0x002c, # COMMA
+        0x006c: 0x0025, # PERCENT SIGN
+        0x006d: 0x005f, # LOW LINE
+        0x006e: 0x003e, # GREATER-THAN SIGN
+        0x006f: 0x003f, # QUESTION MARK
+        0x0070: 0x00a8, # DIAERESIS
+        0x0071: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS
+        0x0072: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS
+        0x0073: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS
+        0x0074: 0x00a0, # NO-BREAK SPACE
+        0x0075: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS
+        0x0076: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS
+        0x0077: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS
+        0x0078: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS
+        0x0079: 0x0060, # GRAVE ACCENT
+        0x007a: 0x003a, # COLON
+        0x007b: 0x0023, # NUMBER SIGN
+        0x007c: 0x0040, # COMMERCIAL AT
+        0x007d: 0x0027, # APOSTROPHE
+        0x007e: 0x003d, # EQUALS SIGN
+        0x007f: 0x0022, # QUOTATION MARK
+        0x0080: 0x0385, # GREEK DIALYTIKA TONOS
+        0x0081: 0x0061, # LATIN SMALL LETTER A
+        0x0082: 0x0062, # LATIN SMALL LETTER B
+        0x0083: 0x0063, # LATIN SMALL LETTER C
+        0x0084: 0x0064, # LATIN SMALL LETTER D
+        0x0085: 0x0065, # LATIN SMALL LETTER E
+        0x0086: 0x0066, # LATIN SMALL LETTER F
+        0x0087: 0x0067, # LATIN SMALL LETTER G
+        0x0088: 0x0068, # LATIN SMALL LETTER H
+        0x0089: 0x0069, # LATIN SMALL LETTER I
+        0x008a: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x008b: 0x03b2, # GREEK SMALL LETTER BETA
+        0x008c: 0x03b3, # GREEK SMALL LETTER GAMMA
+        0x008d: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x008e: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x008f: 0x03b6, # GREEK SMALL LETTER ZETA
+        0x0090: 0x00b0, # DEGREE SIGN
+        0x0091: 0x006a, # LATIN SMALL LETTER J
+        0x0092: 0x006b, # LATIN SMALL LETTER K
+        0x0093: 0x006c, # LATIN SMALL LETTER L
+        0x0094: 0x006d, # LATIN SMALL LETTER M
+        0x0095: 0x006e, # LATIN SMALL LETTER N
+        0x0096: 0x006f, # LATIN SMALL LETTER O
+        0x0097: 0x0070, # LATIN SMALL LETTER P
+        0x0098: 0x0071, # LATIN SMALL LETTER Q
+        0x0099: 0x0072, # LATIN SMALL LETTER R
+        0x009a: 0x03b7, # GREEK SMALL LETTER ETA
+        0x009b: 0x03b8, # GREEK SMALL LETTER THETA
+        0x009c: 0x03b9, # GREEK SMALL LETTER IOTA
+        0x009d: 0x03ba, # GREEK SMALL LETTER KAPPA
+        0x009e: 0x03bb, # GREEK SMALL LETTER LAMDA
+        0x009f: 0x03bc, # GREEK SMALL LETTER MU
+        0x00a0: 0x00b4, # ACUTE ACCENT
+        0x00a1: 0x007e, # TILDE
+        0x00a2: 0x0073, # LATIN SMALL LETTER S
+        0x00a3: 0x0074, # LATIN SMALL LETTER T
+        0x00a4: 0x0075, # LATIN SMALL LETTER U
+        0x00a5: 0x0076, # LATIN SMALL LETTER V
+        0x00a6: 0x0077, # LATIN SMALL LETTER W
+        0x00a7: 0x0078, # LATIN SMALL LETTER X
+        0x00a8: 0x0079, # LATIN SMALL LETTER Y
+        0x00a9: 0x007a, # LATIN SMALL LETTER Z
+        0x00aa: 0x03bd, # GREEK SMALL LETTER NU
+        0x00ab: 0x03be, # GREEK SMALL LETTER XI
+        0x00ac: 0x03bf, # GREEK SMALL LETTER OMICRON
+        0x00ad: 0x03c0, # GREEK SMALL LETTER PI
+        0x00ae: 0x03c1, # GREEK SMALL LETTER RHO
+        0x00af: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00b0: 0x00a3, # POUND SIGN
+        0x00b1: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS
+        0x00b2: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS
+        0x00b3: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS
+        0x00b4: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
+        0x00b5: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS
+        0x00b6: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS
+        0x00b7: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS
+        0x00b8: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
+        0x00b9: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS
+        0x00ba: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA
+        0x00bb: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00bc: 0x03c5, # GREEK SMALL LETTER UPSILON
+        0x00bd: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00be: 0x03c7, # GREEK SMALL LETTER CHI
+        0x00bf: 0x03c8, # GREEK SMALL LETTER PSI
+        0x00c0: 0x007b, # LEFT CURLY BRACKET
+        0x00c1: 0x0041, # LATIN CAPITAL LETTER A
+        0x00c2: 0x0042, # LATIN CAPITAL LETTER B
+        0x00c3: 0x0043, # LATIN CAPITAL LETTER C
+        0x00c4: 0x0044, # LATIN CAPITAL LETTER D
+        0x00c5: 0x0045, # LATIN CAPITAL LETTER E
+        0x00c6: 0x0046, # LATIN CAPITAL LETTER F
+        0x00c7: 0x0047, # LATIN CAPITAL LETTER G
+        0x00c8: 0x0048, # LATIN CAPITAL LETTER H
+        0x00c9: 0x0049, # LATIN CAPITAL LETTER I
+        0x00ca: 0x00ad, # SOFT HYPHEN
+        0x00cb: 0x03c9, # GREEK SMALL LETTER OMEGA
+        0x00cc: 0x0390, # GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
+        0x00cd: 0x03b0, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
+        0x00ce: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x00cf: 0x2015, # HORIZONTAL BAR
+        0x00d0: 0x007d, # RIGHT CURLY BRACKET
+        0x00d1: 0x004a, # LATIN CAPITAL LETTER J
+        0x00d2: 0x004b, # LATIN CAPITAL LETTER K
+        0x00d3: 0x004c, # LATIN CAPITAL LETTER L
+        0x00d4: 0x004d, # LATIN CAPITAL LETTER M
+        0x00d5: 0x004e, # LATIN CAPITAL LETTER N
+        0x00d6: 0x004f, # LATIN CAPITAL LETTER O
+        0x00d7: 0x0050, # LATIN CAPITAL LETTER P
+        0x00d8: 0x0051, # LATIN CAPITAL LETTER Q
+        0x00d9: 0x0052, # LATIN CAPITAL LETTER R
+        0x00da: 0x00b1, # PLUS-MINUS SIGN
+        0x00db: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x00dc: 0x001a, # SUBSTITUTE
+        0x00dd: 0x0387, # GREEK ANO TELEIA
+        0x00de: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x00df: 0x00a6, # BROKEN BAR
+        0x00e0: 0x005c, # REVERSE SOLIDUS
+        0x00e1: 0x001a, # SUBSTITUTE
+        0x00e2: 0x0053, # LATIN CAPITAL LETTER S
+        0x00e3: 0x0054, # LATIN CAPITAL LETTER T
+        0x00e4: 0x0055, # LATIN CAPITAL LETTER U
+        0x00e5: 0x0056, # LATIN CAPITAL LETTER V
+        0x00e6: 0x0057, # LATIN CAPITAL LETTER W
+        0x00e7: 0x0058, # LATIN CAPITAL LETTER X
+        0x00e8: 0x0059, # LATIN CAPITAL LETTER Y
+        0x00e9: 0x005a, # LATIN CAPITAL LETTER Z
+        0x00ea: 0x00b2, # SUPERSCRIPT TWO
+        0x00eb: 0x00a7, # SECTION SIGN
+        0x00ec: 0x001a, # SUBSTITUTE
+        0x00ed: 0x001a, # SUBSTITUTE
+        0x00ee: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00ef: 0x00ac, # NOT SIGN
+        0x00f0: 0x0030, # DIGIT ZERO
+        0x00f1: 0x0031, # DIGIT ONE
+        0x00f2: 0x0032, # DIGIT TWO
+        0x00f3: 0x0033, # DIGIT THREE
+        0x00f4: 0x0034, # DIGIT FOUR
+        0x00f5: 0x0035, # DIGIT FIVE
+        0x00f6: 0x0036, # DIGIT SIX
+        0x00f7: 0x0037, # DIGIT SEVEN
+        0x00f8: 0x0038, # DIGIT EIGHT
+        0x00f9: 0x0039, # DIGIT NINE
+        0x00fa: 0x00b3, # SUPERSCRIPT THREE
+        0x00fb: 0x00a9, # COPYRIGHT SIGN
+        0x00fc: 0x001a, # SUBSTITUTE
+        0x00fd: 0x001a, # SUBSTITUTE
+        0x00fe: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00ff: 0x009f, # CONTROL
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp932.py b/depot_tools/release/win/python_24/Lib/encodings/cp932.py
new file mode 100644
index 0000000..38937f5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp932.py
@@ -0,0 +1,34 @@
+#
+# cp932.py: Python Unicode Codec for CP932
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: cp932.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_jp, codecs
+
+codec = _codecs_jp.getcodec('cp932')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp949.py b/depot_tools/release/win/python_24/Lib/encodings/cp949.py
new file mode 100644
index 0000000..0f3c847
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp949.py
@@ -0,0 +1,34 @@
+#
+# cp949.py: Python Unicode Codec for CP949
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: cp949.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_kr, codecs
+
+codec = _codecs_kr.getcodec('cp949')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/cp950.py b/depot_tools/release/win/python_24/Lib/encodings/cp950.py
new file mode 100644
index 0000000..dab3e289
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/cp950.py
@@ -0,0 +1,34 @@
+#
+# cp950.py: Python Unicode Codec for CP950
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: cp950.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_tw, codecs
+
+codec = _codecs_tw.getcodec('cp950')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/euc_jis_2004.py b/depot_tools/release/win/python_24/Lib/encodings/euc_jis_2004.py
new file mode 100644
index 0000000..02d55ca0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/euc_jis_2004.py
@@ -0,0 +1,34 @@
+#
+# euc_jis_2004.py: Python Unicode Codec for EUC_JIS_2004
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: euc_jis_2004.py,v 1.1 2004/07/07 16:18:25 perky Exp $
+#
+
+import _codecs_jp, codecs
+
+codec = _codecs_jp.getcodec('euc_jis_2004')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/euc_jisx0213.py b/depot_tools/release/win/python_24/Lib/encodings/euc_jisx0213.py
new file mode 100644
index 0000000..30f173e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/euc_jisx0213.py
@@ -0,0 +1,34 @@
+#
+# euc_jisx0213.py: Python Unicode Codec for EUC_JISX0213
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: euc_jisx0213.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_jp, codecs
+
+codec = _codecs_jp.getcodec('euc_jisx0213')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/euc_jp.py b/depot_tools/release/win/python_24/Lib/encodings/euc_jp.py
new file mode 100644
index 0000000..a3947a3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/euc_jp.py
@@ -0,0 +1,34 @@
+#
+# euc_jp.py: Python Unicode Codec for EUC_JP
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: euc_jp.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_jp, codecs
+
+codec = _codecs_jp.getcodec('euc_jp')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/euc_kr.py b/depot_tools/release/win/python_24/Lib/encodings/euc_kr.py
new file mode 100644
index 0000000..bbebee8c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/euc_kr.py
@@ -0,0 +1,34 @@
+#
+# euc_kr.py: Python Unicode Codec for EUC_KR
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: euc_kr.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_kr, codecs
+
+codec = _codecs_kr.getcodec('euc_kr')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/gb18030.py b/depot_tools/release/win/python_24/Lib/encodings/gb18030.py
new file mode 100644
index 0000000..7eca319
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/gb18030.py
@@ -0,0 +1,34 @@
+#
+# gb18030.py: Python Unicode Codec for GB18030
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: gb18030.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_cn, codecs
+
+codec = _codecs_cn.getcodec('gb18030')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/gb2312.py b/depot_tools/release/win/python_24/Lib/encodings/gb2312.py
new file mode 100644
index 0000000..5130efa
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/gb2312.py
@@ -0,0 +1,34 @@
+#
+# gb2312.py: Python Unicode Codec for GB2312
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: gb2312.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_cn, codecs
+
+codec = _codecs_cn.getcodec('gb2312')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/gbk.py b/depot_tools/release/win/python_24/Lib/encodings/gbk.py
new file mode 100644
index 0000000..67854bc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/gbk.py
@@ -0,0 +1,34 @@
+#
+# gbk.py: Python Unicode Codec for GBK
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: gbk.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_cn, codecs
+
+codec = _codecs_cn.getcodec('gbk')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/hex_codec.py b/depot_tools/release/win/python_24/Lib/encodings/hex_codec.py
new file mode 100644
index 0000000..5c6e4a4e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/hex_codec.py
@@ -0,0 +1,62 @@
+""" Python 'hex_codec' Codec - 2-digit hex content transfer encoding
+
+    Unlike most of the other codecs which target Unicode, this codec
+    will return Python string objects for both encode and decode.
+
+    Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+"""
+import codecs, binascii
+
+### Codec APIs
+
+def hex_encode(input,errors='strict'):
+
+    """ Encodes the object input and returns a tuple (output
+        object, length consumed).
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    output = binascii.b2a_hex(input)
+    return (output, len(input))
+
+def hex_decode(input,errors='strict'):
+
+    """ Decodes the object input and returns a tuple (output
+        object, length consumed).
+
+        input must be an object which provides the bf_getreadbuf
+        buffer slot. Python strings, buffer objects and memory
+        mapped files are examples of objects providing this slot.
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    output = binascii.a2b_hex(input)
+    return (output, len(input))
+
+class Codec(codecs.Codec):
+
+    def encode(self, input,errors='strict'):
+        return hex_encode(input,errors)
+    def decode(self, input,errors='strict'):
+        return hex_decode(input,errors)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (hex_encode,hex_decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/hp_roman8.py b/depot_tools/release/win/python_24/Lib/encodings/hp_roman8.py
new file mode 100644
index 0000000..4f201b8b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/hp_roman8.py
@@ -0,0 +1,139 @@
+""" Python Character Mapping Codec generated from 'hp_roman8.txt' with gencodec.py.
+
+    Based on data from ftp://dkuug.dk/i18n/charmaps/HP-ROMAN8 (Keld Simonsen)
+
+    Original source: LaserJet IIP Printer User's Manual HP part no
+    33471-90901, Hewlet-Packard, June 1989.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x00c0, #       LATIN CAPITAL LETTER A WITH GRAVE
+        0x00a2: 0x00c2, #       LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x00a3: 0x00c8, #       LATIN CAPITAL LETTER E WITH GRAVE
+        0x00a4: 0x00ca, #       LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x00a5: 0x00cb, #       LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x00a6: 0x00ce, #       LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x00a7: 0x00cf, #       LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x00a8: 0x00b4, #       ACUTE ACCENT
+        0x00a9: 0x02cb, #       MODIFIER LETTER GRAVE ACCENT (Mandarin Chinese fourth tone)
+        0x00aa: 0x02c6, #       MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x00ab: 0x00a8, #       DIAERESIS
+        0x00ac: 0x02dc, #       SMALL TILDE
+        0x00ad: 0x00d9, #       LATIN CAPITAL LETTER U WITH GRAVE
+        0x00ae: 0x00db, #       LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00af: 0x20a4, #       LIRA SIGN
+        0x00b0: 0x00af, #       MACRON
+        0x00b1: 0x00dd, #       LATIN CAPITAL LETTER Y WITH ACUTE
+        0x00b2: 0x00fd, #       LATIN SMALL LETTER Y WITH ACUTE
+        0x00b3: 0x00b0, #       DEGREE SIGN
+        0x00b4: 0x00c7, #       LATIN CAPITAL LETTER C WITH CEDILLA
+        0x00b5: 0x00e7, #       LATIN SMALL LETTER C WITH CEDILLA
+        0x00b6: 0x00d1, #       LATIN CAPITAL LETTER N WITH TILDE
+        0x00b7: 0x00f1, #       LATIN SMALL LETTER N WITH TILDE
+        0x00b8: 0x00a1, #       INVERTED EXCLAMATION MARK
+        0x00b9: 0x00bf, #       INVERTED QUESTION MARK
+        0x00ba: 0x00a4, #       CURRENCY SIGN
+        0x00bb: 0x00a3, #       POUND SIGN
+        0x00bc: 0x00a5, #       YEN SIGN
+        0x00bd: 0x00a7, #       SECTION SIGN
+        0x00be: 0x0192, #       LATIN SMALL LETTER F WITH HOOK
+        0x00bf: 0x00a2, #       CENT SIGN
+        0x00c0: 0x00e2, #       LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x00c1: 0x00ea, #       LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x00c2: 0x00f4, #       LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x00c3: 0x00fb, #       LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x00c4: 0x00e1, #       LATIN SMALL LETTER A WITH ACUTE
+        0x00c5: 0x00e9, #       LATIN SMALL LETTER E WITH ACUTE
+        0x00c6: 0x00f3, #       LATIN SMALL LETTER O WITH ACUTE
+        0x00c7: 0x00fa, #       LATIN SMALL LETTER U WITH ACUTE
+        0x00c8: 0x00e0, #       LATIN SMALL LETTER A WITH GRAVE
+        0x00c9: 0x00e8, #       LATIN SMALL LETTER E WITH GRAVE
+        0x00ca: 0x00f2, #       LATIN SMALL LETTER O WITH GRAVE
+        0x00cb: 0x00f9, #       LATIN SMALL LETTER U WITH GRAVE
+        0x00cc: 0x00e4, #       LATIN SMALL LETTER A WITH DIAERESIS
+        0x00cd: 0x00eb, #       LATIN SMALL LETTER E WITH DIAERESIS
+        0x00ce: 0x00f6, #       LATIN SMALL LETTER O WITH DIAERESIS
+        0x00cf: 0x00fc, #       LATIN SMALL LETTER U WITH DIAERESIS
+        0x00d0: 0x00c5, #       LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x00d1: 0x00ee, #       LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x00d2: 0x00d8, #       LATIN CAPITAL LETTER O WITH STROKE
+        0x00d3: 0x00c6, #       LATIN CAPITAL LETTER AE
+        0x00d4: 0x00e5, #       LATIN SMALL LETTER A WITH RING ABOVE
+        0x00d5: 0x00ed, #       LATIN SMALL LETTER I WITH ACUTE
+        0x00d6: 0x00f8, #       LATIN SMALL LETTER O WITH STROKE
+        0x00d7: 0x00e6, #       LATIN SMALL LETTER AE
+        0x00d8: 0x00c4, #       LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x00d9: 0x00ec, #       LATIN SMALL LETTER I WITH GRAVE
+        0x00da: 0x00d6, #       LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x00db: 0x00dc, #       LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x00dc: 0x00c9, #       LATIN CAPITAL LETTER E WITH ACUTE
+        0x00dd: 0x00ef, #       LATIN SMALL LETTER I WITH DIAERESIS
+        0x00de: 0x00df, #       LATIN SMALL LETTER SHARP S (German)
+        0x00df: 0x00d4, #       LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00e0: 0x00c1, #       LATIN CAPITAL LETTER A WITH ACUTE
+        0x00e1: 0x00c3, #       LATIN CAPITAL LETTER A WITH TILDE
+        0x00e2: 0x00e3, #       LATIN SMALL LETTER A WITH TILDE
+        0x00e3: 0x00d0, #       LATIN CAPITAL LETTER ETH (Icelandic)
+        0x00e4: 0x00f0, #       LATIN SMALL LETTER ETH (Icelandic)
+        0x00e5: 0x00cd, #       LATIN CAPITAL LETTER I WITH ACUTE
+        0x00e6: 0x00cc, #       LATIN CAPITAL LETTER I WITH GRAVE
+        0x00e7: 0x00d3, #       LATIN CAPITAL LETTER O WITH ACUTE
+        0x00e8: 0x00d2, #       LATIN CAPITAL LETTER O WITH GRAVE
+        0x00e9: 0x00d5, #       LATIN CAPITAL LETTER O WITH TILDE
+        0x00ea: 0x00f5, #       LATIN SMALL LETTER O WITH TILDE
+        0x00eb: 0x0160, #       LATIN CAPITAL LETTER S WITH CARON
+        0x00ec: 0x0161, #       LATIN SMALL LETTER S WITH CARON
+        0x00ed: 0x00da, #       LATIN CAPITAL LETTER U WITH ACUTE
+        0x00ee: 0x0178, #       LATIN CAPITAL LETTER Y WITH DIAERESIS
+        0x00ef: 0x00ff, #       LATIN SMALL LETTER Y WITH DIAERESIS
+        0x00f0: 0x00de, #       LATIN CAPITAL LETTER THORN (Icelandic)
+        0x00f1: 0x00fe, #       LATIN SMALL LETTER THORN (Icelandic)
+        0x00f2: 0x00b7, #       MIDDLE DOT
+        0x00f3: 0x00b5, #       MICRO SIGN
+        0x00f4: 0x00b6, #       PILCROW SIGN
+        0x00f5: 0x00be, #       VULGAR FRACTION THREE QUARTERS
+        0x00f6: 0x2014, #       EM DASH
+        0x00f7: 0x00bc, #       VULGAR FRACTION ONE QUARTER
+        0x00f8: 0x00bd, #       VULGAR FRACTION ONE HALF
+        0x00f9: 0x00aa, #       FEMININE ORDINAL INDICATOR
+        0x00fa: 0x00ba, #       MASCULINE ORDINAL INDICATOR
+        0x00fb: 0x00ab, #       LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00fc: 0x25a0, #       BLACK SQUARE
+        0x00fd: 0x00bb, #       RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00fe: 0x00b1, #       PLUS-MINUS SIGN
+        0x00ff: None,
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/hz.py b/depot_tools/release/win/python_24/Lib/encodings/hz.py
new file mode 100644
index 0000000..3940894
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/hz.py
@@ -0,0 +1,34 @@
+#
+# hz.py: Python Unicode Codec for HZ
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: hz.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_cn, codecs
+
+codec = _codecs_cn.getcodec('hz')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/idna.py b/depot_tools/release/win/python_24/Lib/encodings/idna.py
new file mode 100644
index 0000000..4814215
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/idna.py
@@ -0,0 +1,200 @@
+# This module implements the RFCs 3490 (IDNA) and 3491 (Nameprep)
+
+import stringprep, unicodedata, re, codecs
+
+# IDNA section 3.1
+dots = re.compile(u"[\u002E\u3002\uFF0E\uFF61]")
+
+# IDNA section 5
+ace_prefix = "xn--"
+uace_prefix = unicode(ace_prefix, "ascii")
+
+# This assumes query strings, so AllowUnassigned is true
+def nameprep(label):
+    # Map
+    newlabel = []
+    for c in label:
+        if stringprep.in_table_b1(c):
+            # Map to nothing
+            continue
+        newlabel.append(stringprep.map_table_b2(c))
+    label = u"".join(newlabel)
+
+    # Normalize
+    label = unicodedata.normalize("NFKC", label)
+
+    # Prohibit
+    for c in label:
+        if stringprep.in_table_c12(c) or \
+           stringprep.in_table_c22(c) or \
+           stringprep.in_table_c3(c) or \
+           stringprep.in_table_c4(c) or \
+           stringprep.in_table_c5(c) or \
+           stringprep.in_table_c6(c) or \
+           stringprep.in_table_c7(c) or \
+           stringprep.in_table_c8(c) or \
+           stringprep.in_table_c9(c):
+            raise UnicodeError, "Invalid character %s" % repr(c)
+
+    # Check bidi
+    RandAL = map(stringprep.in_table_d1, label)
+    for c in RandAL:
+        if c:
+            # There is a RandAL char in the string. Must perform further
+            # tests:
+            # 1) The characters in section 5.8 MUST be prohibited.
+            # This is table C.8, which was already checked
+            # 2) If a string contains any RandALCat character, the string
+            # MUST NOT contain any LCat character.
+            if filter(stringprep.in_table_d2, label):
+                raise UnicodeError, "Violation of BIDI requirement 2"
+
+            # 3) If a string contains any RandALCat character, a
+            # RandALCat character MUST be the first character of the
+            # string, and a RandALCat character MUST be the last
+            # character of the string.
+            if not RandAL[0] or not RandAL[-1]:
+                raise UnicodeError, "Violation of BIDI requirement 3"
+
+    return label
+
+def ToASCII(label):
+    try:
+        # Step 1: try ASCII
+        label = label.encode("ascii")
+    except UnicodeError:
+        pass
+    else:
+        # Skip to step 3: UseSTD3ASCIIRules is false, so
+        # Skip to step 8.
+        if 0 < len(label) < 64:
+            return label
+        raise UnicodeError, "label too long"
+
+    # Step 2: nameprep
+    label = nameprep(label)
+
+    # Step 3: UseSTD3ASCIIRules is false
+    # Step 4: try ASCII
+    try:
+        label = label.encode("ascii")
+    except UnicodeError:
+        pass
+    else:
+        # Skip to step 8.
+        if 0 < len(label) < 64:
+            return label
+        raise UnicodeError, "label too long"
+
+    # Step 5: Check ACE prefix
+    if label.startswith(uace_prefix):
+        raise UnicodeError, "Label starts with ACE prefix"
+
+    # Step 6: Encode with PUNYCODE
+    label = label.encode("punycode")
+
+    # Step 7: Prepend ACE prefix
+    label = ace_prefix + label
+
+    # Step 8: Check size
+    if 0 < len(label) < 64:
+        return label
+    raise UnicodeError, "label too long"
+
+def ToUnicode(label):
+    # Step 1: Check for ASCII
+    if isinstance(label, str):
+        pure_ascii = True
+    else:
+        try:
+            label = label.encode("ascii")
+            pure_ascii = True
+        except UnicodeError:
+            pure_ascii = False
+    if not pure_ascii:
+        # Step 2: Perform nameprep
+        label = nameprep(label)
+        # It doesn't say this, but apparently, it should be ASCII now
+        try:
+            label = label.encode("ascii")
+        except UnicodeError:
+            raise UnicodeError, "Invalid character in IDN label"
+    # Step 3: Check for ACE prefix
+    if not label.startswith(ace_prefix):
+        return unicode(label, "ascii")
+
+    # Step 4: Remove ACE prefix
+    label1 = label[len(ace_prefix):]
+
+    # Step 5: Decode using PUNYCODE
+    result = label1.decode("punycode")
+
+    # Step 6: Apply ToASCII
+    label2 = ToASCII(result)
+
+    # Step 7: Compare the result of step 6 with the one of step 3
+    # label2 will already be in lower case.
+    if label.lower() != label2:
+        raise UnicodeError, ("IDNA does not round-trip", label, label2)
+
+    # Step 8: return the result of step 5
+    return result
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+    def encode(self,input,errors='strict'):
+
+        if errors != 'strict':
+            # IDNA is quite clear that implementations must be strict
+            raise UnicodeError, "unsupported error handling "+errors
+
+        result = []
+        labels = dots.split(input)
+        if labels and len(labels[-1])==0:
+            trailing_dot = '.'
+            del labels[-1]
+        else:
+            trailing_dot = ''
+        for label in labels:
+            result.append(ToASCII(label))
+        # Join with U+002E
+        return ".".join(result)+trailing_dot, len(input)
+
+    def decode(self,input,errors='strict'):
+
+        if errors != 'strict':
+            raise UnicodeError, "Unsupported error handling "+errors
+
+        # IDNA allows decoding to operate on Unicode strings, too.
+        if isinstance(input, unicode):
+            labels = dots.split(input)
+        else:
+            # Must be ASCII string
+            input = str(input)
+            unicode(input, "ascii")
+            labels = input.split(".")
+
+        if labels and len(labels[-1]) == 0:
+            trailing_dot = u'.'
+            del labels[-1]
+        else:
+            trailing_dot = u''
+
+        result = []
+        for label in labels:
+            result.append(ToUnicode(label))
+
+        return u".".join(result)+trailing_dot, len(input)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp.py b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp.py
new file mode 100644
index 0000000..109658b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp.py
@@ -0,0 +1,34 @@
+#
+# iso2022_jp.py: Python Unicode Codec for ISO2022_JP
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: iso2022_jp.py,v 1.2 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_iso2022, codecs
+
+codec = _codecs_iso2022.getcodec('iso2022_jp')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_1.py b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_1.py
new file mode 100644
index 0000000..201bd28
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_1.py
@@ -0,0 +1,34 @@
+#
+# iso2022_jp_1.py: Python Unicode Codec for ISO2022_JP_1
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: iso2022_jp_1.py,v 1.2 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_iso2022, codecs
+
+codec = _codecs_iso2022.getcodec('iso2022_jp_1')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_2.py b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_2.py
new file mode 100644
index 0000000..7a61018
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_2.py
@@ -0,0 +1,34 @@
+#
+# iso2022_jp_2.py: Python Unicode Codec for ISO2022_JP_2
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: iso2022_jp_2.py,v 1.2 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_iso2022, codecs
+
+codec = _codecs_iso2022.getcodec('iso2022_jp_2')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_2004.py b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_2004.py
new file mode 100644
index 0000000..2497124
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_2004.py
@@ -0,0 +1,34 @@
+#
+# iso2022_jp_2004.py: Python Unicode Codec for ISO2022_JP_2004
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: iso2022_jp_2004.py,v 1.1 2004/07/07 16:18:25 perky Exp $
+#
+
+import _codecs_iso2022, codecs
+
+codec = _codecs_iso2022.getcodec('iso2022_jp_2004')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_3.py b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_3.py
new file mode 100644
index 0000000..8b2ed00280
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_3.py
@@ -0,0 +1,34 @@
+#
+# iso2022_jp_3.py: Python Unicode Codec for ISO2022_JP_3
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: iso2022_jp_3.py,v 1.2 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_iso2022, codecs
+
+codec = _codecs_iso2022.getcodec('iso2022_jp_3')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_ext.py b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_ext.py
new file mode 100644
index 0000000..97cb4e7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso2022_jp_ext.py
@@ -0,0 +1,34 @@
+#
+# iso2022_jp_ext.py: Python Unicode Codec for ISO2022_JP_EXT
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: iso2022_jp_ext.py,v 1.2 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_iso2022, codecs
+
+codec = _codecs_iso2022.getcodec('iso2022_jp_ext')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso2022_kr.py b/depot_tools/release/win/python_24/Lib/encodings/iso2022_kr.py
new file mode 100644
index 0000000..f5549ca
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso2022_kr.py
@@ -0,0 +1,34 @@
+#
+# iso2022_kr.py: Python Unicode Codec for ISO2022_KR
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: iso2022_kr.py,v 1.2 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_iso2022, codecs
+
+codec = _codecs_iso2022.getcodec('iso2022_kr')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_1.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_1.py
new file mode 100644
index 0000000..9fa2fcc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_1.py
@@ -0,0 +1,44 @@
+""" Python Character Mapping Codec generated from '8859-1.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_10.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_10.py
new file mode 100644
index 0000000..df2565af
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_10.py
@@ -0,0 +1,90 @@
+""" Python Character Mapping Codec generated from '8859-10.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x0104, #       LATIN CAPITAL LETTER A WITH OGONEK
+        0x00a2: 0x0112, #       LATIN CAPITAL LETTER E WITH MACRON
+        0x00a3: 0x0122, #       LATIN CAPITAL LETTER G WITH CEDILLA
+        0x00a4: 0x012a, #       LATIN CAPITAL LETTER I WITH MACRON
+        0x00a5: 0x0128, #       LATIN CAPITAL LETTER I WITH TILDE
+        0x00a6: 0x0136, #       LATIN CAPITAL LETTER K WITH CEDILLA
+        0x00a8: 0x013b, #       LATIN CAPITAL LETTER L WITH CEDILLA
+        0x00a9: 0x0110, #       LATIN CAPITAL LETTER D WITH STROKE
+        0x00aa: 0x0160, #       LATIN CAPITAL LETTER S WITH CARON
+        0x00ab: 0x0166, #       LATIN CAPITAL LETTER T WITH STROKE
+        0x00ac: 0x017d, #       LATIN CAPITAL LETTER Z WITH CARON
+        0x00ae: 0x016a, #       LATIN CAPITAL LETTER U WITH MACRON
+        0x00af: 0x014a, #       LATIN CAPITAL LETTER ENG
+        0x00b1: 0x0105, #       LATIN SMALL LETTER A WITH OGONEK
+        0x00b2: 0x0113, #       LATIN SMALL LETTER E WITH MACRON
+        0x00b3: 0x0123, #       LATIN SMALL LETTER G WITH CEDILLA
+        0x00b4: 0x012b, #       LATIN SMALL LETTER I WITH MACRON
+        0x00b5: 0x0129, #       LATIN SMALL LETTER I WITH TILDE
+        0x00b6: 0x0137, #       LATIN SMALL LETTER K WITH CEDILLA
+        0x00b8: 0x013c, #       LATIN SMALL LETTER L WITH CEDILLA
+        0x00b9: 0x0111, #       LATIN SMALL LETTER D WITH STROKE
+        0x00ba: 0x0161, #       LATIN SMALL LETTER S WITH CARON
+        0x00bb: 0x0167, #       LATIN SMALL LETTER T WITH STROKE
+        0x00bc: 0x017e, #       LATIN SMALL LETTER Z WITH CARON
+        0x00bd: 0x2015, #       HORIZONTAL BAR
+        0x00be: 0x016b, #       LATIN SMALL LETTER U WITH MACRON
+        0x00bf: 0x014b, #       LATIN SMALL LETTER ENG
+        0x00c0: 0x0100, #       LATIN CAPITAL LETTER A WITH MACRON
+        0x00c7: 0x012e, #       LATIN CAPITAL LETTER I WITH OGONEK
+        0x00c8: 0x010c, #       LATIN CAPITAL LETTER C WITH CARON
+        0x00ca: 0x0118, #       LATIN CAPITAL LETTER E WITH OGONEK
+        0x00cc: 0x0116, #       LATIN CAPITAL LETTER E WITH DOT ABOVE
+        0x00d1: 0x0145, #       LATIN CAPITAL LETTER N WITH CEDILLA
+        0x00d2: 0x014c, #       LATIN CAPITAL LETTER O WITH MACRON
+        0x00d7: 0x0168, #       LATIN CAPITAL LETTER U WITH TILDE
+        0x00d9: 0x0172, #       LATIN CAPITAL LETTER U WITH OGONEK
+        0x00e0: 0x0101, #       LATIN SMALL LETTER A WITH MACRON
+        0x00e7: 0x012f, #       LATIN SMALL LETTER I WITH OGONEK
+        0x00e8: 0x010d, #       LATIN SMALL LETTER C WITH CARON
+        0x00ea: 0x0119, #       LATIN SMALL LETTER E WITH OGONEK
+        0x00ec: 0x0117, #       LATIN SMALL LETTER E WITH DOT ABOVE
+        0x00f1: 0x0146, #       LATIN SMALL LETTER N WITH CEDILLA
+        0x00f2: 0x014d, #       LATIN SMALL LETTER O WITH MACRON
+        0x00f7: 0x0169, #       LATIN SMALL LETTER U WITH TILDE
+        0x00f9: 0x0173, #       LATIN SMALL LETTER U WITH OGONEK
+        0x00ff: 0x0138, #       LATIN SMALL LETTER KRA
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_11.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_11.py
new file mode 100644
index 0000000..dfe46b1bb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_11.py
@@ -0,0 +1,137 @@
+""" Python Character Mapping Codec generated from '8859-11.TXT' with gencodec.py.
+
+    Generated from mapping found in
+    ftp://ftp.unicode.org/Public/MAPPINGS/ISO8859/8859-11.TXT
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x0e01, #       THAI CHARACTER KO KAI
+        0x00a2: 0x0e02, #       THAI CHARACTER KHO KHAI
+        0x00a3: 0x0e03, #       THAI CHARACTER KHO KHUAT
+        0x00a4: 0x0e04, #       THAI CHARACTER KHO KHWAI
+        0x00a5: 0x0e05, #       THAI CHARACTER KHO KHON
+        0x00a6: 0x0e06, #       THAI CHARACTER KHO RAKHANG
+        0x00a7: 0x0e07, #       THAI CHARACTER NGO NGU
+        0x00a8: 0x0e08, #       THAI CHARACTER CHO CHAN
+        0x00a9: 0x0e09, #       THAI CHARACTER CHO CHING
+        0x00aa: 0x0e0a, #       THAI CHARACTER CHO CHANG
+        0x00ab: 0x0e0b, #       THAI CHARACTER SO SO
+        0x00ac: 0x0e0c, #       THAI CHARACTER CHO CHOE
+        0x00ad: 0x0e0d, #       THAI CHARACTER YO YING
+        0x00ae: 0x0e0e, #       THAI CHARACTER DO CHADA
+        0x00af: 0x0e0f, #       THAI CHARACTER TO PATAK
+        0x00b0: 0x0e10, #       THAI CHARACTER THO THAN
+        0x00b1: 0x0e11, #       THAI CHARACTER THO NANGMONTHO
+        0x00b2: 0x0e12, #       THAI CHARACTER THO PHUTHAO
+        0x00b3: 0x0e13, #       THAI CHARACTER NO NEN
+        0x00b4: 0x0e14, #       THAI CHARACTER DO DEK
+        0x00b5: 0x0e15, #       THAI CHARACTER TO TAO
+        0x00b6: 0x0e16, #       THAI CHARACTER THO THUNG
+        0x00b7: 0x0e17, #       THAI CHARACTER THO THAHAN
+        0x00b8: 0x0e18, #       THAI CHARACTER THO THONG
+        0x00b9: 0x0e19, #       THAI CHARACTER NO NU
+        0x00ba: 0x0e1a, #       THAI CHARACTER BO BAIMAI
+        0x00bb: 0x0e1b, #       THAI CHARACTER PO PLA
+        0x00bc: 0x0e1c, #       THAI CHARACTER PHO PHUNG
+        0x00bd: 0x0e1d, #       THAI CHARACTER FO FA
+        0x00be: 0x0e1e, #       THAI CHARACTER PHO PHAN
+        0x00bf: 0x0e1f, #       THAI CHARACTER FO FAN
+        0x00c0: 0x0e20, #       THAI CHARACTER PHO SAMPHAO
+        0x00c1: 0x0e21, #       THAI CHARACTER MO MA
+        0x00c2: 0x0e22, #       THAI CHARACTER YO YAK
+        0x00c3: 0x0e23, #       THAI CHARACTER RO RUA
+        0x00c4: 0x0e24, #       THAI CHARACTER RU
+        0x00c5: 0x0e25, #       THAI CHARACTER LO LING
+        0x00c6: 0x0e26, #       THAI CHARACTER LU
+        0x00c7: 0x0e27, #       THAI CHARACTER WO WAEN
+        0x00c8: 0x0e28, #       THAI CHARACTER SO SALA
+        0x00c9: 0x0e29, #       THAI CHARACTER SO RUSI
+        0x00ca: 0x0e2a, #       THAI CHARACTER SO SUA
+        0x00cb: 0x0e2b, #       THAI CHARACTER HO HIP
+        0x00cc: 0x0e2c, #       THAI CHARACTER LO CHULA
+        0x00cd: 0x0e2d, #       THAI CHARACTER O ANG
+        0x00ce: 0x0e2e, #       THAI CHARACTER HO NOKHUK
+        0x00cf: 0x0e2f, #       THAI CHARACTER PAIYANNOI
+        0x00d0: 0x0e30, #       THAI CHARACTER SARA A
+        0x00d1: 0x0e31, #       THAI CHARACTER MAI HAN-AKAT
+        0x00d2: 0x0e32, #       THAI CHARACTER SARA AA
+        0x00d3: 0x0e33, #       THAI CHARACTER SARA AM
+        0x00d4: 0x0e34, #       THAI CHARACTER SARA I
+        0x00d5: 0x0e35, #       THAI CHARACTER SARA II
+        0x00d6: 0x0e36, #       THAI CHARACTER SARA UE
+        0x00d7: 0x0e37, #       THAI CHARACTER SARA UEE
+        0x00d8: 0x0e38, #       THAI CHARACTER SARA U
+        0x00d9: 0x0e39, #       THAI CHARACTER SARA UU
+        0x00da: 0x0e3a, #       THAI CHARACTER PHINTHU
+        0x00db: None,
+        0x00dc: None,
+        0x00dd: None,
+        0x00de: None,
+        0x00df: 0x0e3f, #       THAI CURRENCY SYMBOL BAHT
+        0x00e0: 0x0e40, #       THAI CHARACTER SARA E
+        0x00e1: 0x0e41, #       THAI CHARACTER SARA AE
+        0x00e2: 0x0e42, #       THAI CHARACTER SARA O
+        0x00e3: 0x0e43, #       THAI CHARACTER SARA AI MAIMUAN
+        0x00e4: 0x0e44, #       THAI CHARACTER SARA AI MAIMALAI
+        0x00e5: 0x0e45, #       THAI CHARACTER LAKKHANGYAO
+        0x00e6: 0x0e46, #       THAI CHARACTER MAIYAMOK
+        0x00e7: 0x0e47, #       THAI CHARACTER MAITAIKHU
+        0x00e8: 0x0e48, #       THAI CHARACTER MAI EK
+        0x00e9: 0x0e49, #       THAI CHARACTER MAI THO
+        0x00ea: 0x0e4a, #       THAI CHARACTER MAI TRI
+        0x00eb: 0x0e4b, #       THAI CHARACTER MAI CHATTAWA
+        0x00ec: 0x0e4c, #       THAI CHARACTER THANTHAKHAT
+        0x00ed: 0x0e4d, #       THAI CHARACTER NIKHAHIT
+        0x00ee: 0x0e4e, #       THAI CHARACTER YAMAKKAN
+        0x00ef: 0x0e4f, #       THAI CHARACTER FONGMAN
+        0x00f0: 0x0e50, #       THAI DIGIT ZERO
+        0x00f1: 0x0e51, #       THAI DIGIT ONE
+        0x00f2: 0x0e52, #       THAI DIGIT TWO
+        0x00f3: 0x0e53, #       THAI DIGIT THREE
+        0x00f4: 0x0e54, #       THAI DIGIT FOUR
+        0x00f5: 0x0e55, #       THAI DIGIT FIVE
+        0x00f6: 0x0e56, #       THAI DIGIT SIX
+        0x00f7: 0x0e57, #       THAI DIGIT SEVEN
+        0x00f8: 0x0e58, #       THAI DIGIT EIGHT
+        0x00f9: 0x0e59, #       THAI DIGIT NINE
+        0x00fa: 0x0e5a, #       THAI CHARACTER ANGKHANKHU
+        0x00fb: 0x0e5b, #       THAI CHARACTER KHOMUT
+        0x00fc: None,
+        0x00fd: None,
+        0x00fe: None,
+        0x00ff: None,
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_13.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_13.py
new file mode 100644
index 0000000..57b6c70
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_13.py
@@ -0,0 +1,100 @@
+""" Python Character Mapping Codec generated from '8859-13.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x201d, #       RIGHT DOUBLE QUOTATION MARK
+        0x00a5: 0x201e, #       DOUBLE LOW-9 QUOTATION MARK
+        0x00a8: 0x00d8, #       LATIN CAPITAL LETTER O WITH STROKE
+        0x00aa: 0x0156, #       LATIN CAPITAL LETTER R WITH CEDILLA
+        0x00af: 0x00c6, #       LATIN CAPITAL LETTER AE
+        0x00b4: 0x201c, #       LEFT DOUBLE QUOTATION MARK
+        0x00b8: 0x00f8, #       LATIN SMALL LETTER O WITH STROKE
+        0x00ba: 0x0157, #       LATIN SMALL LETTER R WITH CEDILLA
+        0x00bf: 0x00e6, #       LATIN SMALL LETTER AE
+        0x00c0: 0x0104, #       LATIN CAPITAL LETTER A WITH OGONEK
+        0x00c1: 0x012e, #       LATIN CAPITAL LETTER I WITH OGONEK
+        0x00c2: 0x0100, #       LATIN CAPITAL LETTER A WITH MACRON
+        0x00c3: 0x0106, #       LATIN CAPITAL LETTER C WITH ACUTE
+        0x00c6: 0x0118, #       LATIN CAPITAL LETTER E WITH OGONEK
+        0x00c7: 0x0112, #       LATIN CAPITAL LETTER E WITH MACRON
+        0x00c8: 0x010c, #       LATIN CAPITAL LETTER C WITH CARON
+        0x00ca: 0x0179, #       LATIN CAPITAL LETTER Z WITH ACUTE
+        0x00cb: 0x0116, #       LATIN CAPITAL LETTER E WITH DOT ABOVE
+        0x00cc: 0x0122, #       LATIN CAPITAL LETTER G WITH CEDILLA
+        0x00cd: 0x0136, #       LATIN CAPITAL LETTER K WITH CEDILLA
+        0x00ce: 0x012a, #       LATIN CAPITAL LETTER I WITH MACRON
+        0x00cf: 0x013b, #       LATIN CAPITAL LETTER L WITH CEDILLA
+        0x00d0: 0x0160, #       LATIN CAPITAL LETTER S WITH CARON
+        0x00d1: 0x0143, #       LATIN CAPITAL LETTER N WITH ACUTE
+        0x00d2: 0x0145, #       LATIN CAPITAL LETTER N WITH CEDILLA
+        0x00d4: 0x014c, #       LATIN CAPITAL LETTER O WITH MACRON
+        0x00d8: 0x0172, #       LATIN CAPITAL LETTER U WITH OGONEK
+        0x00d9: 0x0141, #       LATIN CAPITAL LETTER L WITH STROKE
+        0x00da: 0x015a, #       LATIN CAPITAL LETTER S WITH ACUTE
+        0x00db: 0x016a, #       LATIN CAPITAL LETTER U WITH MACRON
+        0x00dd: 0x017b, #       LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00de: 0x017d, #       LATIN CAPITAL LETTER Z WITH CARON
+        0x00e0: 0x0105, #       LATIN SMALL LETTER A WITH OGONEK
+        0x00e1: 0x012f, #       LATIN SMALL LETTER I WITH OGONEK
+        0x00e2: 0x0101, #       LATIN SMALL LETTER A WITH MACRON
+        0x00e3: 0x0107, #       LATIN SMALL LETTER C WITH ACUTE
+        0x00e6: 0x0119, #       LATIN SMALL LETTER E WITH OGONEK
+        0x00e7: 0x0113, #       LATIN SMALL LETTER E WITH MACRON
+        0x00e8: 0x010d, #       LATIN SMALL LETTER C WITH CARON
+        0x00ea: 0x017a, #       LATIN SMALL LETTER Z WITH ACUTE
+        0x00eb: 0x0117, #       LATIN SMALL LETTER E WITH DOT ABOVE
+        0x00ec: 0x0123, #       LATIN SMALL LETTER G WITH CEDILLA
+        0x00ed: 0x0137, #       LATIN SMALL LETTER K WITH CEDILLA
+        0x00ee: 0x012b, #       LATIN SMALL LETTER I WITH MACRON
+        0x00ef: 0x013c, #       LATIN SMALL LETTER L WITH CEDILLA
+        0x00f0: 0x0161, #       LATIN SMALL LETTER S WITH CARON
+        0x00f1: 0x0144, #       LATIN SMALL LETTER N WITH ACUTE
+        0x00f2: 0x0146, #       LATIN SMALL LETTER N WITH CEDILLA
+        0x00f4: 0x014d, #       LATIN SMALL LETTER O WITH MACRON
+        0x00f8: 0x0173, #       LATIN SMALL LETTER U WITH OGONEK
+        0x00f9: 0x0142, #       LATIN SMALL LETTER L WITH STROKE
+        0x00fa: 0x015b, #       LATIN SMALL LETTER S WITH ACUTE
+        0x00fb: 0x016b, #       LATIN SMALL LETTER U WITH MACRON
+        0x00fd: 0x017c, #       LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00fe: 0x017e, #       LATIN SMALL LETTER Z WITH CARON
+        0x00ff: 0x2019, #       RIGHT SINGLE QUOTATION MARK
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_14.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_14.py
new file mode 100644
index 0000000..9f3d6fe
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_14.py
@@ -0,0 +1,75 @@
+""" Python Character Mapping Codec generated from '8859-14.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x1e02, #       LATIN CAPITAL LETTER B WITH DOT ABOVE
+        0x00a2: 0x1e03, #       LATIN SMALL LETTER B WITH DOT ABOVE
+        0x00a4: 0x010a, #       LATIN CAPITAL LETTER C WITH DOT ABOVE
+        0x00a5: 0x010b, #       LATIN SMALL LETTER C WITH DOT ABOVE
+        0x00a6: 0x1e0a, #       LATIN CAPITAL LETTER D WITH DOT ABOVE
+        0x00a8: 0x1e80, #       LATIN CAPITAL LETTER W WITH GRAVE
+        0x00aa: 0x1e82, #       LATIN CAPITAL LETTER W WITH ACUTE
+        0x00ab: 0x1e0b, #       LATIN SMALL LETTER D WITH DOT ABOVE
+        0x00ac: 0x1ef2, #       LATIN CAPITAL LETTER Y WITH GRAVE
+        0x00af: 0x0178, #       LATIN CAPITAL LETTER Y WITH DIAERESIS
+        0x00b0: 0x1e1e, #       LATIN CAPITAL LETTER F WITH DOT ABOVE
+        0x00b1: 0x1e1f, #       LATIN SMALL LETTER F WITH DOT ABOVE
+        0x00b2: 0x0120, #       LATIN CAPITAL LETTER G WITH DOT ABOVE
+        0x00b3: 0x0121, #       LATIN SMALL LETTER G WITH DOT ABOVE
+        0x00b4: 0x1e40, #       LATIN CAPITAL LETTER M WITH DOT ABOVE
+        0x00b5: 0x1e41, #       LATIN SMALL LETTER M WITH DOT ABOVE
+        0x00b7: 0x1e56, #       LATIN CAPITAL LETTER P WITH DOT ABOVE
+        0x00b8: 0x1e81, #       LATIN SMALL LETTER W WITH GRAVE
+        0x00b9: 0x1e57, #       LATIN SMALL LETTER P WITH DOT ABOVE
+        0x00ba: 0x1e83, #       LATIN SMALL LETTER W WITH ACUTE
+        0x00bb: 0x1e60, #       LATIN CAPITAL LETTER S WITH DOT ABOVE
+        0x00bc: 0x1ef3, #       LATIN SMALL LETTER Y WITH GRAVE
+        0x00bd: 0x1e84, #       LATIN CAPITAL LETTER W WITH DIAERESIS
+        0x00be: 0x1e85, #       LATIN SMALL LETTER W WITH DIAERESIS
+        0x00bf: 0x1e61, #       LATIN SMALL LETTER S WITH DOT ABOVE
+        0x00d0: 0x0174, #       LATIN CAPITAL LETTER W WITH CIRCUMFLEX
+        0x00d7: 0x1e6a, #       LATIN CAPITAL LETTER T WITH DOT ABOVE
+        0x00de: 0x0176, #       LATIN CAPITAL LETTER Y WITH CIRCUMFLEX
+        0x00f0: 0x0175, #       LATIN SMALL LETTER W WITH CIRCUMFLEX
+        0x00f7: 0x1e6b, #       LATIN SMALL LETTER T WITH DOT ABOVE
+        0x00fe: 0x0177, #       LATIN SMALL LETTER Y WITH CIRCUMFLEX
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_15.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_15.py
new file mode 100644
index 0000000..5708df0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_15.py
@@ -0,0 +1,52 @@
+""" Python Character Mapping Codec generated from '8859-15.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a4: 0x20ac, #       EURO SIGN
+        0x00a6: 0x0160, #       LATIN CAPITAL LETTER S WITH CARON
+        0x00a8: 0x0161, #       LATIN SMALL LETTER S WITH CARON
+        0x00b4: 0x017d, #       LATIN CAPITAL LETTER Z WITH CARON
+        0x00b8: 0x017e, #       LATIN SMALL LETTER Z WITH CARON
+        0x00bc: 0x0152, #       LATIN CAPITAL LIGATURE OE
+        0x00bd: 0x0153, #       LATIN SMALL LIGATURE OE
+        0x00be: 0x0178, #       LATIN CAPITAL LETTER Y WITH DIAERESIS
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_16.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_16.py
new file mode 100644
index 0000000..80c07409
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_16.py
@@ -0,0 +1,82 @@
+""" Python Character Mapping Codec generated from '8859-16.TXT' with gencodec.py.
+
+    Generated from mapping found in
+    ftp://ftp.unicode.org/Public/MAPPINGS/ISO8859/8859-16.TXT
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x0104, #       LATIN CAPITAL LETTER A WITH OGONEK
+        0x00a2: 0x0105, #       LATIN SMALL LETTER A WITH OGONEK
+        0x00a3: 0x0141, #       LATIN CAPITAL LETTER L WITH STROKE
+        0x00a4: 0x20ac, #       EURO SIGN
+        0x00a5: 0x201e, #       DOUBLE LOW-9 QUOTATION MARK
+        0x00a6: 0x0160, #       LATIN CAPITAL LETTER S WITH CARON
+        0x00a8: 0x0161, #       LATIN SMALL LETTER S WITH CARON
+        0x00aa: 0x0218, #       LATIN CAPITAL LETTER S WITH COMMA BELOW
+        0x00ac: 0x0179, #       LATIN CAPITAL LETTER Z WITH ACUTE
+        0x00ae: 0x017a, #       LATIN SMALL LETTER Z WITH ACUTE
+        0x00af: 0x017b, #       LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00b2: 0x010c, #       LATIN CAPITAL LETTER C WITH CARON
+        0x00b3: 0x0142, #       LATIN SMALL LETTER L WITH STROKE
+        0x00b4: 0x017d, #       LATIN CAPITAL LETTER Z WITH CARON
+        0x00b5: 0x201d, #       RIGHT DOUBLE QUOTATION MARK
+        0x00b8: 0x017e, #       LATIN SMALL LETTER Z WITH CARON
+        0x00b9: 0x010d, #       LATIN SMALL LETTER C WITH CARON
+        0x00ba: 0x0219, #       LATIN SMALL LETTER S WITH COMMA BELOW
+        0x00bc: 0x0152, #       LATIN CAPITAL LIGATURE OE
+        0x00bd: 0x0153, #       LATIN SMALL LIGATURE OE
+        0x00be: 0x0178, #       LATIN CAPITAL LETTER Y WITH DIAERESIS
+        0x00bf: 0x017c, #       LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00c3: 0x0102, #       LATIN CAPITAL LETTER A WITH BREVE
+        0x00c5: 0x0106, #       LATIN CAPITAL LETTER C WITH ACUTE
+        0x00d0: 0x0110, #       LATIN CAPITAL LETTER D WITH STROKE
+        0x00d1: 0x0143, #       LATIN CAPITAL LETTER N WITH ACUTE
+        0x00d5: 0x0150, #       LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
+        0x00d7: 0x015a, #       LATIN CAPITAL LETTER S WITH ACUTE
+        0x00d8: 0x0170, #       LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
+        0x00dd: 0x0118, #       LATIN CAPITAL LETTER E WITH OGONEK
+        0x00de: 0x021a, #       LATIN CAPITAL LETTER T WITH COMMA BELOW
+        0x00e3: 0x0103, #       LATIN SMALL LETTER A WITH BREVE
+        0x00e5: 0x0107, #       LATIN SMALL LETTER C WITH ACUTE
+        0x00f0: 0x0111, #       LATIN SMALL LETTER D WITH STROKE
+        0x00f1: 0x0144, #       LATIN SMALL LETTER N WITH ACUTE
+        0x00f5: 0x0151, #       LATIN SMALL LETTER O WITH DOUBLE ACUTE
+        0x00f7: 0x015b, #       LATIN SMALL LETTER S WITH ACUTE
+        0x00f8: 0x0171, #       LATIN SMALL LETTER U WITH DOUBLE ACUTE
+        0x00fd: 0x0119, #       LATIN SMALL LETTER E WITH OGONEK
+        0x00fe: 0x021b, #       LATIN SMALL LETTER T WITH COMMA BELOW
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_2.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_2.py
new file mode 100644
index 0000000..c9ac2c2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_2.py
@@ -0,0 +1,101 @@
+""" Python Character Mapping Codec generated from '8859-2.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x0104, #       LATIN CAPITAL LETTER A WITH OGONEK
+        0x00a2: 0x02d8, #       BREVE
+        0x00a3: 0x0141, #       LATIN CAPITAL LETTER L WITH STROKE
+        0x00a5: 0x013d, #       LATIN CAPITAL LETTER L WITH CARON
+        0x00a6: 0x015a, #       LATIN CAPITAL LETTER S WITH ACUTE
+        0x00a9: 0x0160, #       LATIN CAPITAL LETTER S WITH CARON
+        0x00aa: 0x015e, #       LATIN CAPITAL LETTER S WITH CEDILLA
+        0x00ab: 0x0164, #       LATIN CAPITAL LETTER T WITH CARON
+        0x00ac: 0x0179, #       LATIN CAPITAL LETTER Z WITH ACUTE
+        0x00ae: 0x017d, #       LATIN CAPITAL LETTER Z WITH CARON
+        0x00af: 0x017b, #       LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00b1: 0x0105, #       LATIN SMALL LETTER A WITH OGONEK
+        0x00b2: 0x02db, #       OGONEK
+        0x00b3: 0x0142, #       LATIN SMALL LETTER L WITH STROKE
+        0x00b5: 0x013e, #       LATIN SMALL LETTER L WITH CARON
+        0x00b6: 0x015b, #       LATIN SMALL LETTER S WITH ACUTE
+        0x00b7: 0x02c7, #       CARON
+        0x00b9: 0x0161, #       LATIN SMALL LETTER S WITH CARON
+        0x00ba: 0x015f, #       LATIN SMALL LETTER S WITH CEDILLA
+        0x00bb: 0x0165, #       LATIN SMALL LETTER T WITH CARON
+        0x00bc: 0x017a, #       LATIN SMALL LETTER Z WITH ACUTE
+        0x00bd: 0x02dd, #       DOUBLE ACUTE ACCENT
+        0x00be: 0x017e, #       LATIN SMALL LETTER Z WITH CARON
+        0x00bf: 0x017c, #       LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00c0: 0x0154, #       LATIN CAPITAL LETTER R WITH ACUTE
+        0x00c3: 0x0102, #       LATIN CAPITAL LETTER A WITH BREVE
+        0x00c5: 0x0139, #       LATIN CAPITAL LETTER L WITH ACUTE
+        0x00c6: 0x0106, #       LATIN CAPITAL LETTER C WITH ACUTE
+        0x00c8: 0x010c, #       LATIN CAPITAL LETTER C WITH CARON
+        0x00ca: 0x0118, #       LATIN CAPITAL LETTER E WITH OGONEK
+        0x00cc: 0x011a, #       LATIN CAPITAL LETTER E WITH CARON
+        0x00cf: 0x010e, #       LATIN CAPITAL LETTER D WITH CARON
+        0x00d0: 0x0110, #       LATIN CAPITAL LETTER D WITH STROKE
+        0x00d1: 0x0143, #       LATIN CAPITAL LETTER N WITH ACUTE
+        0x00d2: 0x0147, #       LATIN CAPITAL LETTER N WITH CARON
+        0x00d5: 0x0150, #       LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
+        0x00d8: 0x0158, #       LATIN CAPITAL LETTER R WITH CARON
+        0x00d9: 0x016e, #       LATIN CAPITAL LETTER U WITH RING ABOVE
+        0x00db: 0x0170, #       LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
+        0x00de: 0x0162, #       LATIN CAPITAL LETTER T WITH CEDILLA
+        0x00e0: 0x0155, #       LATIN SMALL LETTER R WITH ACUTE
+        0x00e3: 0x0103, #       LATIN SMALL LETTER A WITH BREVE
+        0x00e5: 0x013a, #       LATIN SMALL LETTER L WITH ACUTE
+        0x00e6: 0x0107, #       LATIN SMALL LETTER C WITH ACUTE
+        0x00e8: 0x010d, #       LATIN SMALL LETTER C WITH CARON
+        0x00ea: 0x0119, #       LATIN SMALL LETTER E WITH OGONEK
+        0x00ec: 0x011b, #       LATIN SMALL LETTER E WITH CARON
+        0x00ef: 0x010f, #       LATIN SMALL LETTER D WITH CARON
+        0x00f0: 0x0111, #       LATIN SMALL LETTER D WITH STROKE
+        0x00f1: 0x0144, #       LATIN SMALL LETTER N WITH ACUTE
+        0x00f2: 0x0148, #       LATIN SMALL LETTER N WITH CARON
+        0x00f5: 0x0151, #       LATIN SMALL LETTER O WITH DOUBLE ACUTE
+        0x00f8: 0x0159, #       LATIN SMALL LETTER R WITH CARON
+        0x00f9: 0x016f, #       LATIN SMALL LETTER U WITH RING ABOVE
+        0x00fb: 0x0171, #       LATIN SMALL LETTER U WITH DOUBLE ACUTE
+        0x00fe: 0x0163, #       LATIN SMALL LETTER T WITH CEDILLA
+        0x00ff: 0x02d9, #       DOT ABOVE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_3.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_3.py
new file mode 100644
index 0000000..776423e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_3.py
@@ -0,0 +1,79 @@
+""" Python Character Mapping Codec generated from '8859-3.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x0126, #       LATIN CAPITAL LETTER H WITH STROKE
+        0x00a2: 0x02d8, #       BREVE
+        0x00a5: None,
+        0x00a6: 0x0124, #       LATIN CAPITAL LETTER H WITH CIRCUMFLEX
+        0x00a9: 0x0130, #       LATIN CAPITAL LETTER I WITH DOT ABOVE
+        0x00aa: 0x015e, #       LATIN CAPITAL LETTER S WITH CEDILLA
+        0x00ab: 0x011e, #       LATIN CAPITAL LETTER G WITH BREVE
+        0x00ac: 0x0134, #       LATIN CAPITAL LETTER J WITH CIRCUMFLEX
+        0x00ae: None,
+        0x00af: 0x017b, #       LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00b1: 0x0127, #       LATIN SMALL LETTER H WITH STROKE
+        0x00b6: 0x0125, #       LATIN SMALL LETTER H WITH CIRCUMFLEX
+        0x00b9: 0x0131, #       LATIN SMALL LETTER DOTLESS I
+        0x00ba: 0x015f, #       LATIN SMALL LETTER S WITH CEDILLA
+        0x00bb: 0x011f, #       LATIN SMALL LETTER G WITH BREVE
+        0x00bc: 0x0135, #       LATIN SMALL LETTER J WITH CIRCUMFLEX
+        0x00be: None,
+        0x00bf: 0x017c, #       LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00c3: None,
+        0x00c5: 0x010a, #       LATIN CAPITAL LETTER C WITH DOT ABOVE
+        0x00c6: 0x0108, #       LATIN CAPITAL LETTER C WITH CIRCUMFLEX
+        0x00d0: None,
+        0x00d5: 0x0120, #       LATIN CAPITAL LETTER G WITH DOT ABOVE
+        0x00d8: 0x011c, #       LATIN CAPITAL LETTER G WITH CIRCUMFLEX
+        0x00dd: 0x016c, #       LATIN CAPITAL LETTER U WITH BREVE
+        0x00de: 0x015c, #       LATIN CAPITAL LETTER S WITH CIRCUMFLEX
+        0x00e3: None,
+        0x00e5: 0x010b, #       LATIN SMALL LETTER C WITH DOT ABOVE
+        0x00e6: 0x0109, #       LATIN SMALL LETTER C WITH CIRCUMFLEX
+        0x00f0: None,
+        0x00f5: 0x0121, #       LATIN SMALL LETTER G WITH DOT ABOVE
+        0x00f8: 0x011d, #       LATIN SMALL LETTER G WITH CIRCUMFLEX
+        0x00fd: 0x016d, #       LATIN SMALL LETTER U WITH BREVE
+        0x00fe: 0x015d, #       LATIN SMALL LETTER S WITH CIRCUMFLEX
+        0x00ff: 0x02d9, #       DOT ABOVE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_4.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_4.py
new file mode 100644
index 0000000..3f84965
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_4.py
@@ -0,0 +1,94 @@
+""" Python Character Mapping Codec generated from '8859-4.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x0104, #       LATIN CAPITAL LETTER A WITH OGONEK
+        0x00a2: 0x0138, #       LATIN SMALL LETTER KRA
+        0x00a3: 0x0156, #       LATIN CAPITAL LETTER R WITH CEDILLA
+        0x00a5: 0x0128, #       LATIN CAPITAL LETTER I WITH TILDE
+        0x00a6: 0x013b, #       LATIN CAPITAL LETTER L WITH CEDILLA
+        0x00a9: 0x0160, #       LATIN CAPITAL LETTER S WITH CARON
+        0x00aa: 0x0112, #       LATIN CAPITAL LETTER E WITH MACRON
+        0x00ab: 0x0122, #       LATIN CAPITAL LETTER G WITH CEDILLA
+        0x00ac: 0x0166, #       LATIN CAPITAL LETTER T WITH STROKE
+        0x00ae: 0x017d, #       LATIN CAPITAL LETTER Z WITH CARON
+        0x00b1: 0x0105, #       LATIN SMALL LETTER A WITH OGONEK
+        0x00b2: 0x02db, #       OGONEK
+        0x00b3: 0x0157, #       LATIN SMALL LETTER R WITH CEDILLA
+        0x00b5: 0x0129, #       LATIN SMALL LETTER I WITH TILDE
+        0x00b6: 0x013c, #       LATIN SMALL LETTER L WITH CEDILLA
+        0x00b7: 0x02c7, #       CARON
+        0x00b9: 0x0161, #       LATIN SMALL LETTER S WITH CARON
+        0x00ba: 0x0113, #       LATIN SMALL LETTER E WITH MACRON
+        0x00bb: 0x0123, #       LATIN SMALL LETTER G WITH CEDILLA
+        0x00bc: 0x0167, #       LATIN SMALL LETTER T WITH STROKE
+        0x00bd: 0x014a, #       LATIN CAPITAL LETTER ENG
+        0x00be: 0x017e, #       LATIN SMALL LETTER Z WITH CARON
+        0x00bf: 0x014b, #       LATIN SMALL LETTER ENG
+        0x00c0: 0x0100, #       LATIN CAPITAL LETTER A WITH MACRON
+        0x00c7: 0x012e, #       LATIN CAPITAL LETTER I WITH OGONEK
+        0x00c8: 0x010c, #       LATIN CAPITAL LETTER C WITH CARON
+        0x00ca: 0x0118, #       LATIN CAPITAL LETTER E WITH OGONEK
+        0x00cc: 0x0116, #       LATIN CAPITAL LETTER E WITH DOT ABOVE
+        0x00cf: 0x012a, #       LATIN CAPITAL LETTER I WITH MACRON
+        0x00d0: 0x0110, #       LATIN CAPITAL LETTER D WITH STROKE
+        0x00d1: 0x0145, #       LATIN CAPITAL LETTER N WITH CEDILLA
+        0x00d2: 0x014c, #       LATIN CAPITAL LETTER O WITH MACRON
+        0x00d3: 0x0136, #       LATIN CAPITAL LETTER K WITH CEDILLA
+        0x00d9: 0x0172, #       LATIN CAPITAL LETTER U WITH OGONEK
+        0x00dd: 0x0168, #       LATIN CAPITAL LETTER U WITH TILDE
+        0x00de: 0x016a, #       LATIN CAPITAL LETTER U WITH MACRON
+        0x00e0: 0x0101, #       LATIN SMALL LETTER A WITH MACRON
+        0x00e7: 0x012f, #       LATIN SMALL LETTER I WITH OGONEK
+        0x00e8: 0x010d, #       LATIN SMALL LETTER C WITH CARON
+        0x00ea: 0x0119, #       LATIN SMALL LETTER E WITH OGONEK
+        0x00ec: 0x0117, #       LATIN SMALL LETTER E WITH DOT ABOVE
+        0x00ef: 0x012b, #       LATIN SMALL LETTER I WITH MACRON
+        0x00f0: 0x0111, #       LATIN SMALL LETTER D WITH STROKE
+        0x00f1: 0x0146, #       LATIN SMALL LETTER N WITH CEDILLA
+        0x00f2: 0x014d, #       LATIN SMALL LETTER O WITH MACRON
+        0x00f3: 0x0137, #       LATIN SMALL LETTER K WITH CEDILLA
+        0x00f9: 0x0173, #       LATIN SMALL LETTER U WITH OGONEK
+        0x00fd: 0x0169, #       LATIN SMALL LETTER U WITH TILDE
+        0x00fe: 0x016b, #       LATIN SMALL LETTER U WITH MACRON
+        0x00ff: 0x02d9, #       DOT ABOVE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_5.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_5.py
new file mode 100644
index 0000000..ac9a842
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_5.py
@@ -0,0 +1,138 @@
+""" Python Character Mapping Codec generated from '8859-5.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x0401, #       CYRILLIC CAPITAL LETTER IO
+        0x00a2: 0x0402, #       CYRILLIC CAPITAL LETTER DJE
+        0x00a3: 0x0403, #       CYRILLIC CAPITAL LETTER GJE
+        0x00a4: 0x0404, #       CYRILLIC CAPITAL LETTER UKRAINIAN IE
+        0x00a5: 0x0405, #       CYRILLIC CAPITAL LETTER DZE
+        0x00a6: 0x0406, #       CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00a7: 0x0407, #       CYRILLIC CAPITAL LETTER YI
+        0x00a8: 0x0408, #       CYRILLIC CAPITAL LETTER JE
+        0x00a9: 0x0409, #       CYRILLIC CAPITAL LETTER LJE
+        0x00aa: 0x040a, #       CYRILLIC CAPITAL LETTER NJE
+        0x00ab: 0x040b, #       CYRILLIC CAPITAL LETTER TSHE
+        0x00ac: 0x040c, #       CYRILLIC CAPITAL LETTER KJE
+        0x00ae: 0x040e, #       CYRILLIC CAPITAL LETTER SHORT U
+        0x00af: 0x040f, #       CYRILLIC CAPITAL LETTER DZHE
+        0x00b0: 0x0410, #       CYRILLIC CAPITAL LETTER A
+        0x00b1: 0x0411, #       CYRILLIC CAPITAL LETTER BE
+        0x00b2: 0x0412, #       CYRILLIC CAPITAL LETTER VE
+        0x00b3: 0x0413, #       CYRILLIC CAPITAL LETTER GHE
+        0x00b4: 0x0414, #       CYRILLIC CAPITAL LETTER DE
+        0x00b5: 0x0415, #       CYRILLIC CAPITAL LETTER IE
+        0x00b6: 0x0416, #       CYRILLIC CAPITAL LETTER ZHE
+        0x00b7: 0x0417, #       CYRILLIC CAPITAL LETTER ZE
+        0x00b8: 0x0418, #       CYRILLIC CAPITAL LETTER I
+        0x00b9: 0x0419, #       CYRILLIC CAPITAL LETTER SHORT I
+        0x00ba: 0x041a, #       CYRILLIC CAPITAL LETTER KA
+        0x00bb: 0x041b, #       CYRILLIC CAPITAL LETTER EL
+        0x00bc: 0x041c, #       CYRILLIC CAPITAL LETTER EM
+        0x00bd: 0x041d, #       CYRILLIC CAPITAL LETTER EN
+        0x00be: 0x041e, #       CYRILLIC CAPITAL LETTER O
+        0x00bf: 0x041f, #       CYRILLIC CAPITAL LETTER PE
+        0x00c0: 0x0420, #       CYRILLIC CAPITAL LETTER ER
+        0x00c1: 0x0421, #       CYRILLIC CAPITAL LETTER ES
+        0x00c2: 0x0422, #       CYRILLIC CAPITAL LETTER TE
+        0x00c3: 0x0423, #       CYRILLIC CAPITAL LETTER U
+        0x00c4: 0x0424, #       CYRILLIC CAPITAL LETTER EF
+        0x00c5: 0x0425, #       CYRILLIC CAPITAL LETTER HA
+        0x00c6: 0x0426, #       CYRILLIC CAPITAL LETTER TSE
+        0x00c7: 0x0427, #       CYRILLIC CAPITAL LETTER CHE
+        0x00c8: 0x0428, #       CYRILLIC CAPITAL LETTER SHA
+        0x00c9: 0x0429, #       CYRILLIC CAPITAL LETTER SHCHA
+        0x00ca: 0x042a, #       CYRILLIC CAPITAL LETTER HARD SIGN
+        0x00cb: 0x042b, #       CYRILLIC CAPITAL LETTER YERU
+        0x00cc: 0x042c, #       CYRILLIC CAPITAL LETTER SOFT SIGN
+        0x00cd: 0x042d, #       CYRILLIC CAPITAL LETTER E
+        0x00ce: 0x042e, #       CYRILLIC CAPITAL LETTER YU
+        0x00cf: 0x042f, #       CYRILLIC CAPITAL LETTER YA
+        0x00d0: 0x0430, #       CYRILLIC SMALL LETTER A
+        0x00d1: 0x0431, #       CYRILLIC SMALL LETTER BE
+        0x00d2: 0x0432, #       CYRILLIC SMALL LETTER VE
+        0x00d3: 0x0433, #       CYRILLIC SMALL LETTER GHE
+        0x00d4: 0x0434, #       CYRILLIC SMALL LETTER DE
+        0x00d5: 0x0435, #       CYRILLIC SMALL LETTER IE
+        0x00d6: 0x0436, #       CYRILLIC SMALL LETTER ZHE
+        0x00d7: 0x0437, #       CYRILLIC SMALL LETTER ZE
+        0x00d8: 0x0438, #       CYRILLIC SMALL LETTER I
+        0x00d9: 0x0439, #       CYRILLIC SMALL LETTER SHORT I
+        0x00da: 0x043a, #       CYRILLIC SMALL LETTER KA
+        0x00db: 0x043b, #       CYRILLIC SMALL LETTER EL
+        0x00dc: 0x043c, #       CYRILLIC SMALL LETTER EM
+        0x00dd: 0x043d, #       CYRILLIC SMALL LETTER EN
+        0x00de: 0x043e, #       CYRILLIC SMALL LETTER O
+        0x00df: 0x043f, #       CYRILLIC SMALL LETTER PE
+        0x00e0: 0x0440, #       CYRILLIC SMALL LETTER ER
+        0x00e1: 0x0441, #       CYRILLIC SMALL LETTER ES
+        0x00e2: 0x0442, #       CYRILLIC SMALL LETTER TE
+        0x00e3: 0x0443, #       CYRILLIC SMALL LETTER U
+        0x00e4: 0x0444, #       CYRILLIC SMALL LETTER EF
+        0x00e5: 0x0445, #       CYRILLIC SMALL LETTER HA
+        0x00e6: 0x0446, #       CYRILLIC SMALL LETTER TSE
+        0x00e7: 0x0447, #       CYRILLIC SMALL LETTER CHE
+        0x00e8: 0x0448, #       CYRILLIC SMALL LETTER SHA
+        0x00e9: 0x0449, #       CYRILLIC SMALL LETTER SHCHA
+        0x00ea: 0x044a, #       CYRILLIC SMALL LETTER HARD SIGN
+        0x00eb: 0x044b, #       CYRILLIC SMALL LETTER YERU
+        0x00ec: 0x044c, #       CYRILLIC SMALL LETTER SOFT SIGN
+        0x00ed: 0x044d, #       CYRILLIC SMALL LETTER E
+        0x00ee: 0x044e, #       CYRILLIC SMALL LETTER YU
+        0x00ef: 0x044f, #       CYRILLIC SMALL LETTER YA
+        0x00f0: 0x2116, #       NUMERO SIGN
+        0x00f1: 0x0451, #       CYRILLIC SMALL LETTER IO
+        0x00f2: 0x0452, #       CYRILLIC SMALL LETTER DJE
+        0x00f3: 0x0453, #       CYRILLIC SMALL LETTER GJE
+        0x00f4: 0x0454, #       CYRILLIC SMALL LETTER UKRAINIAN IE
+        0x00f5: 0x0455, #       CYRILLIC SMALL LETTER DZE
+        0x00f6: 0x0456, #       CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00f7: 0x0457, #       CYRILLIC SMALL LETTER YI
+        0x00f8: 0x0458, #       CYRILLIC SMALL LETTER JE
+        0x00f9: 0x0459, #       CYRILLIC SMALL LETTER LJE
+        0x00fa: 0x045a, #       CYRILLIC SMALL LETTER NJE
+        0x00fb: 0x045b, #       CYRILLIC SMALL LETTER TSHE
+        0x00fc: 0x045c, #       CYRILLIC SMALL LETTER KJE
+        0x00fd: 0x00a7, #       SECTION SIGN
+        0x00fe: 0x045e, #       CYRILLIC SMALL LETTER SHORT U
+        0x00ff: 0x045f, #       CYRILLIC SMALL LETTER DZHE
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_6.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_6.py
new file mode 100644
index 0000000..3f5ab56a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_6.py
@@ -0,0 +1,137 @@
+""" Python Character Mapping Codec generated from '8859-6.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: None,
+        0x00a2: None,
+        0x00a3: None,
+        0x00a5: None,
+        0x00a6: None,
+        0x00a7: None,
+        0x00a8: None,
+        0x00a9: None,
+        0x00aa: None,
+        0x00ab: None,
+        0x00ac: 0x060c, #       ARABIC COMMA
+        0x00ae: None,
+        0x00af: None,
+        0x00b0: None,
+        0x00b1: None,
+        0x00b2: None,
+        0x00b3: None,
+        0x00b4: None,
+        0x00b5: None,
+        0x00b6: None,
+        0x00b7: None,
+        0x00b8: None,
+        0x00b9: None,
+        0x00ba: None,
+        0x00bb: 0x061b, #       ARABIC SEMICOLON
+        0x00bc: None,
+        0x00bd: None,
+        0x00be: None,
+        0x00bf: 0x061f, #       ARABIC QUESTION MARK
+        0x00c0: None,
+        0x00c1: 0x0621, #       ARABIC LETTER HAMZA
+        0x00c2: 0x0622, #       ARABIC LETTER ALEF WITH MADDA ABOVE
+        0x00c3: 0x0623, #       ARABIC LETTER ALEF WITH HAMZA ABOVE
+        0x00c4: 0x0624, #       ARABIC LETTER WAW WITH HAMZA ABOVE
+        0x00c5: 0x0625, #       ARABIC LETTER ALEF WITH HAMZA BELOW
+        0x00c6: 0x0626, #       ARABIC LETTER YEH WITH HAMZA ABOVE
+        0x00c7: 0x0627, #       ARABIC LETTER ALEF
+        0x00c8: 0x0628, #       ARABIC LETTER BEH
+        0x00c9: 0x0629, #       ARABIC LETTER TEH MARBUTA
+        0x00ca: 0x062a, #       ARABIC LETTER TEH
+        0x00cb: 0x062b, #       ARABIC LETTER THEH
+        0x00cc: 0x062c, #       ARABIC LETTER JEEM
+        0x00cd: 0x062d, #       ARABIC LETTER HAH
+        0x00ce: 0x062e, #       ARABIC LETTER KHAH
+        0x00cf: 0x062f, #       ARABIC LETTER DAL
+        0x00d0: 0x0630, #       ARABIC LETTER THAL
+        0x00d1: 0x0631, #       ARABIC LETTER REH
+        0x00d2: 0x0632, #       ARABIC LETTER ZAIN
+        0x00d3: 0x0633, #       ARABIC LETTER SEEN
+        0x00d4: 0x0634, #       ARABIC LETTER SHEEN
+        0x00d5: 0x0635, #       ARABIC LETTER SAD
+        0x00d6: 0x0636, #       ARABIC LETTER DAD
+        0x00d7: 0x0637, #       ARABIC LETTER TAH
+        0x00d8: 0x0638, #       ARABIC LETTER ZAH
+        0x00d9: 0x0639, #       ARABIC LETTER AIN
+        0x00da: 0x063a, #       ARABIC LETTER GHAIN
+        0x00db: None,
+        0x00dc: None,
+        0x00dd: None,
+        0x00de: None,
+        0x00df: None,
+        0x00e0: 0x0640, #       ARABIC TATWEEL
+        0x00e1: 0x0641, #       ARABIC LETTER FEH
+        0x00e2: 0x0642, #       ARABIC LETTER QAF
+        0x00e3: 0x0643, #       ARABIC LETTER KAF
+        0x00e4: 0x0644, #       ARABIC LETTER LAM
+        0x00e5: 0x0645, #       ARABIC LETTER MEEM
+        0x00e6: 0x0646, #       ARABIC LETTER NOON
+        0x00e7: 0x0647, #       ARABIC LETTER HEH
+        0x00e8: 0x0648, #       ARABIC LETTER WAW
+        0x00e9: 0x0649, #       ARABIC LETTER ALEF MAKSURA
+        0x00ea: 0x064a, #       ARABIC LETTER YEH
+        0x00eb: 0x064b, #       ARABIC FATHATAN
+        0x00ec: 0x064c, #       ARABIC DAMMATAN
+        0x00ed: 0x064d, #       ARABIC KASRATAN
+        0x00ee: 0x064e, #       ARABIC FATHA
+        0x00ef: 0x064f, #       ARABIC DAMMA
+        0x00f0: 0x0650, #       ARABIC KASRA
+        0x00f1: 0x0651, #       ARABIC SHADDA
+        0x00f2: 0x0652, #       ARABIC SUKUN
+        0x00f3: None,
+        0x00f4: None,
+        0x00f5: None,
+        0x00f6: None,
+        0x00f7: None,
+        0x00f8: None,
+        0x00f9: None,
+        0x00fa: None,
+        0x00fb: None,
+        0x00fc: None,
+        0x00fd: None,
+        0x00fe: None,
+        0x00ff: None,
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_7.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_7.py
new file mode 100644
index 0000000..2530c685
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_7.py
@@ -0,0 +1,124 @@
+""" Python Character Mapping Codec generated from '8859-7.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: 0x2018, #       LEFT SINGLE QUOTATION MARK
+        0x00a2: 0x2019, #       RIGHT SINGLE QUOTATION MARK
+        0x00a4: None,
+        0x00a5: None,
+        0x00aa: None,
+        0x00ae: None,
+        0x00af: 0x2015, #       HORIZONTAL BAR
+        0x00b4: 0x0384, #       GREEK TONOS
+        0x00b5: 0x0385, #       GREEK DIALYTIKA TONOS
+        0x00b6: 0x0386, #       GREEK CAPITAL LETTER ALPHA WITH TONOS
+        0x00b8: 0x0388, #       GREEK CAPITAL LETTER EPSILON WITH TONOS
+        0x00b9: 0x0389, #       GREEK CAPITAL LETTER ETA WITH TONOS
+        0x00ba: 0x038a, #       GREEK CAPITAL LETTER IOTA WITH TONOS
+        0x00bc: 0x038c, #       GREEK CAPITAL LETTER OMICRON WITH TONOS
+        0x00be: 0x038e, #       GREEK CAPITAL LETTER UPSILON WITH TONOS
+        0x00bf: 0x038f, #       GREEK CAPITAL LETTER OMEGA WITH TONOS
+        0x00c0: 0x0390, #       GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
+        0x00c1: 0x0391, #       GREEK CAPITAL LETTER ALPHA
+        0x00c2: 0x0392, #       GREEK CAPITAL LETTER BETA
+        0x00c3: 0x0393, #       GREEK CAPITAL LETTER GAMMA
+        0x00c4: 0x0394, #       GREEK CAPITAL LETTER DELTA
+        0x00c5: 0x0395, #       GREEK CAPITAL LETTER EPSILON
+        0x00c6: 0x0396, #       GREEK CAPITAL LETTER ZETA
+        0x00c7: 0x0397, #       GREEK CAPITAL LETTER ETA
+        0x00c8: 0x0398, #       GREEK CAPITAL LETTER THETA
+        0x00c9: 0x0399, #       GREEK CAPITAL LETTER IOTA
+        0x00ca: 0x039a, #       GREEK CAPITAL LETTER KAPPA
+        0x00cb: 0x039b, #       GREEK CAPITAL LETTER LAMDA
+        0x00cc: 0x039c, #       GREEK CAPITAL LETTER MU
+        0x00cd: 0x039d, #       GREEK CAPITAL LETTER NU
+        0x00ce: 0x039e, #       GREEK CAPITAL LETTER XI
+        0x00cf: 0x039f, #       GREEK CAPITAL LETTER OMICRON
+        0x00d0: 0x03a0, #       GREEK CAPITAL LETTER PI
+        0x00d1: 0x03a1, #       GREEK CAPITAL LETTER RHO
+        0x00d2: None,
+        0x00d3: 0x03a3, #       GREEK CAPITAL LETTER SIGMA
+        0x00d4: 0x03a4, #       GREEK CAPITAL LETTER TAU
+        0x00d5: 0x03a5, #       GREEK CAPITAL LETTER UPSILON
+        0x00d6: 0x03a6, #       GREEK CAPITAL LETTER PHI
+        0x00d7: 0x03a7, #       GREEK CAPITAL LETTER CHI
+        0x00d8: 0x03a8, #       GREEK CAPITAL LETTER PSI
+        0x00d9: 0x03a9, #       GREEK CAPITAL LETTER OMEGA
+        0x00da: 0x03aa, #       GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
+        0x00db: 0x03ab, #       GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
+        0x00dc: 0x03ac, #       GREEK SMALL LETTER ALPHA WITH TONOS
+        0x00dd: 0x03ad, #       GREEK SMALL LETTER EPSILON WITH TONOS
+        0x00de: 0x03ae, #       GREEK SMALL LETTER ETA WITH TONOS
+        0x00df: 0x03af, #       GREEK SMALL LETTER IOTA WITH TONOS
+        0x00e0: 0x03b0, #       GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
+        0x00e1: 0x03b1, #       GREEK SMALL LETTER ALPHA
+        0x00e2: 0x03b2, #       GREEK SMALL LETTER BETA
+        0x00e3: 0x03b3, #       GREEK SMALL LETTER GAMMA
+        0x00e4: 0x03b4, #       GREEK SMALL LETTER DELTA
+        0x00e5: 0x03b5, #       GREEK SMALL LETTER EPSILON
+        0x00e6: 0x03b6, #       GREEK SMALL LETTER ZETA
+        0x00e7: 0x03b7, #       GREEK SMALL LETTER ETA
+        0x00e8: 0x03b8, #       GREEK SMALL LETTER THETA
+        0x00e9: 0x03b9, #       GREEK SMALL LETTER IOTA
+        0x00ea: 0x03ba, #       GREEK SMALL LETTER KAPPA
+        0x00eb: 0x03bb, #       GREEK SMALL LETTER LAMDA
+        0x00ec: 0x03bc, #       GREEK SMALL LETTER MU
+        0x00ed: 0x03bd, #       GREEK SMALL LETTER NU
+        0x00ee: 0x03be, #       GREEK SMALL LETTER XI
+        0x00ef: 0x03bf, #       GREEK SMALL LETTER OMICRON
+        0x00f0: 0x03c0, #       GREEK SMALL LETTER PI
+        0x00f1: 0x03c1, #       GREEK SMALL LETTER RHO
+        0x00f2: 0x03c2, #       GREEK SMALL LETTER FINAL SIGMA
+        0x00f3: 0x03c3, #       GREEK SMALL LETTER SIGMA
+        0x00f4: 0x03c4, #       GREEK SMALL LETTER TAU
+        0x00f5: 0x03c5, #       GREEK SMALL LETTER UPSILON
+        0x00f6: 0x03c6, #       GREEK SMALL LETTER PHI
+        0x00f7: 0x03c7, #       GREEK SMALL LETTER CHI
+        0x00f8: 0x03c8, #       GREEK SMALL LETTER PSI
+        0x00f9: 0x03c9, #       GREEK SMALL LETTER OMEGA
+        0x00fa: 0x03ca, #       GREEK SMALL LETTER IOTA WITH DIALYTIKA
+        0x00fb: 0x03cb, #       GREEK SMALL LETTER UPSILON WITH DIALYTIKA
+        0x00fc: 0x03cc, #       GREEK SMALL LETTER OMICRON WITH TONOS
+        0x00fd: 0x03cd, #       GREEK SMALL LETTER UPSILON WITH TONOS
+        0x00fe: 0x03ce, #       GREEK SMALL LETTER OMEGA WITH TONOS
+        0x00ff: None,
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_8.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_8.py
new file mode 100644
index 0000000..d0176ee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_8.py
@@ -0,0 +1,112 @@
+""" Python Character Mapping Codec generated from '8859-8.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00a1: None,
+        0x00aa: 0x00d7, #       MULTIPLICATION SIGN
+        0x00ba: 0x00f7, #       DIVISION SIGN
+        0x00bf: None,
+        0x00c0: None,
+        0x00c1: None,
+        0x00c2: None,
+        0x00c3: None,
+        0x00c4: None,
+        0x00c5: None,
+        0x00c6: None,
+        0x00c7: None,
+        0x00c8: None,
+        0x00c9: None,
+        0x00ca: None,
+        0x00cb: None,
+        0x00cc: None,
+        0x00cd: None,
+        0x00ce: None,
+        0x00cf: None,
+        0x00d0: None,
+        0x00d1: None,
+        0x00d2: None,
+        0x00d3: None,
+        0x00d4: None,
+        0x00d5: None,
+        0x00d6: None,
+        0x00d7: None,
+        0x00d8: None,
+        0x00d9: None,
+        0x00da: None,
+        0x00db: None,
+        0x00dc: None,
+        0x00dd: None,
+        0x00de: None,
+        0x00df: 0x2017, #       DOUBLE LOW LINE
+        0x00e0: 0x05d0, #       HEBREW LETTER ALEF
+        0x00e1: 0x05d1, #       HEBREW LETTER BET
+        0x00e2: 0x05d2, #       HEBREW LETTER GIMEL
+        0x00e3: 0x05d3, #       HEBREW LETTER DALET
+        0x00e4: 0x05d4, #       HEBREW LETTER HE
+        0x00e5: 0x05d5, #       HEBREW LETTER VAV
+        0x00e6: 0x05d6, #       HEBREW LETTER ZAYIN
+        0x00e7: 0x05d7, #       HEBREW LETTER HET
+        0x00e8: 0x05d8, #       HEBREW LETTER TET
+        0x00e9: 0x05d9, #       HEBREW LETTER YOD
+        0x00ea: 0x05da, #       HEBREW LETTER FINAL KAF
+        0x00eb: 0x05db, #       HEBREW LETTER KAF
+        0x00ec: 0x05dc, #       HEBREW LETTER LAMED
+        0x00ed: 0x05dd, #       HEBREW LETTER FINAL MEM
+        0x00ee: 0x05de, #       HEBREW LETTER MEM
+        0x00ef: 0x05df, #       HEBREW LETTER FINAL NUN
+        0x00f0: 0x05e0, #       HEBREW LETTER NUN
+        0x00f1: 0x05e1, #       HEBREW LETTER SAMEKH
+        0x00f2: 0x05e2, #       HEBREW LETTER AYIN
+        0x00f3: 0x05e3, #       HEBREW LETTER FINAL PE
+        0x00f4: 0x05e4, #       HEBREW LETTER PE
+        0x00f5: 0x05e5, #       HEBREW LETTER FINAL TSADI
+        0x00f6: 0x05e6, #       HEBREW LETTER TSADI
+        0x00f7: 0x05e7, #       HEBREW LETTER QOF
+        0x00f8: 0x05e8, #       HEBREW LETTER RESH
+        0x00f9: 0x05e9, #       HEBREW LETTER SHIN
+        0x00fa: 0x05ea, #       HEBREW LETTER TAV
+        0x00fb: None,
+        0x00fc: None,
+        0x00fd: 0x200e, #       LEFT-TO-RIGHT MARK
+        0x00fe: 0x200f, #       RIGHT-TO-LEFT MARK
+        0x00ff: None,
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/iso8859_9.py b/depot_tools/release/win/python_24/Lib/encodings/iso8859_9.py
new file mode 100644
index 0000000..28a603f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/iso8859_9.py
@@ -0,0 +1,50 @@
+""" Python Character Mapping Codec generated from '8859-9.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x00d0: 0x011e, #       LATIN CAPITAL LETTER G WITH BREVE
+        0x00dd: 0x0130, #       LATIN CAPITAL LETTER I WITH DOT ABOVE
+        0x00de: 0x015e, #       LATIN CAPITAL LETTER S WITH CEDILLA
+        0x00f0: 0x011f, #       LATIN SMALL LETTER G WITH BREVE
+        0x00fd: 0x0131, #       LATIN SMALL LETTER DOTLESS I
+        0x00fe: 0x015f, #       LATIN SMALL LETTER S WITH CEDILLA
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/johab.py b/depot_tools/release/win/python_24/Lib/encodings/johab.py
new file mode 100644
index 0000000..b6a87d7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/johab.py
@@ -0,0 +1,34 @@
+#
+# johab.py: Python Unicode Codec for JOHAB
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: johab.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_kr, codecs
+
+codec = _codecs_kr.getcodec('johab')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/koi8_r.py b/depot_tools/release/win/python_24/Lib/encodings/koi8_r.py
new file mode 100644
index 0000000..7494ca6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/koi8_r.py
@@ -0,0 +1,172 @@
+""" Python Character Mapping Codec generated from 'KOI8-R.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x2500, #       BOX DRAWINGS LIGHT HORIZONTAL
+        0x0081: 0x2502, #       BOX DRAWINGS LIGHT VERTICAL
+        0x0082: 0x250c, #       BOX DRAWINGS LIGHT DOWN AND RIGHT
+        0x0083: 0x2510, #       BOX DRAWINGS LIGHT DOWN AND LEFT
+        0x0084: 0x2514, #       BOX DRAWINGS LIGHT UP AND RIGHT
+        0x0085: 0x2518, #       BOX DRAWINGS LIGHT UP AND LEFT
+        0x0086: 0x251c, #       BOX DRAWINGS LIGHT VERTICAL AND RIGHT
+        0x0087: 0x2524, #       BOX DRAWINGS LIGHT VERTICAL AND LEFT
+        0x0088: 0x252c, #       BOX DRAWINGS LIGHT DOWN AND HORIZONTAL
+        0x0089: 0x2534, #       BOX DRAWINGS LIGHT UP AND HORIZONTAL
+        0x008a: 0x253c, #       BOX DRAWINGS LIGHT VERTICAL AND HORIZONTAL
+        0x008b: 0x2580, #       UPPER HALF BLOCK
+        0x008c: 0x2584, #       LOWER HALF BLOCK
+        0x008d: 0x2588, #       FULL BLOCK
+        0x008e: 0x258c, #       LEFT HALF BLOCK
+        0x008f: 0x2590, #       RIGHT HALF BLOCK
+        0x0090: 0x2591, #       LIGHT SHADE
+        0x0091: 0x2592, #       MEDIUM SHADE
+        0x0092: 0x2593, #       DARK SHADE
+        0x0093: 0x2320, #       TOP HALF INTEGRAL
+        0x0094: 0x25a0, #       BLACK SQUARE
+        0x0095: 0x2219, #       BULLET OPERATOR
+        0x0096: 0x221a, #       SQUARE ROOT
+        0x0097: 0x2248, #       ALMOST EQUAL TO
+        0x0098: 0x2264, #       LESS-THAN OR EQUAL TO
+        0x0099: 0x2265, #       GREATER-THAN OR EQUAL TO
+        0x009a: 0x00a0, #       NO-BREAK SPACE
+        0x009b: 0x2321, #       BOTTOM HALF INTEGRAL
+        0x009c: 0x00b0, #       DEGREE SIGN
+        0x009d: 0x00b2, #       SUPERSCRIPT TWO
+        0x009e: 0x00b7, #       MIDDLE DOT
+        0x009f: 0x00f7, #       DIVISION SIGN
+        0x00a0: 0x2550, #       BOX DRAWINGS DOUBLE HORIZONTAL
+        0x00a1: 0x2551, #       BOX DRAWINGS DOUBLE VERTICAL
+        0x00a2: 0x2552, #       BOX DRAWINGS DOWN SINGLE AND RIGHT DOUBLE
+        0x00a3: 0x0451, #       CYRILLIC SMALL LETTER IO
+        0x00a4: 0x2553, #       BOX DRAWINGS DOWN DOUBLE AND RIGHT SINGLE
+        0x00a5: 0x2554, #       BOX DRAWINGS DOUBLE DOWN AND RIGHT
+        0x00a6: 0x2555, #       BOX DRAWINGS DOWN SINGLE AND LEFT DOUBLE
+        0x00a7: 0x2556, #       BOX DRAWINGS DOWN DOUBLE AND LEFT SINGLE
+        0x00a8: 0x2557, #       BOX DRAWINGS DOUBLE DOWN AND LEFT
+        0x00a9: 0x2558, #       BOX DRAWINGS UP SINGLE AND RIGHT DOUBLE
+        0x00aa: 0x2559, #       BOX DRAWINGS UP DOUBLE AND RIGHT SINGLE
+        0x00ab: 0x255a, #       BOX DRAWINGS DOUBLE UP AND RIGHT
+        0x00ac: 0x255b, #       BOX DRAWINGS UP SINGLE AND LEFT DOUBLE
+        0x00ad: 0x255c, #       BOX DRAWINGS UP DOUBLE AND LEFT SINGLE
+        0x00ae: 0x255d, #       BOX DRAWINGS DOUBLE UP AND LEFT
+        0x00af: 0x255e, #       BOX DRAWINGS VERTICAL SINGLE AND RIGHT DOUBLE
+        0x00b0: 0x255f, #       BOX DRAWINGS VERTICAL DOUBLE AND RIGHT SINGLE
+        0x00b1: 0x2560, #       BOX DRAWINGS DOUBLE VERTICAL AND RIGHT
+        0x00b2: 0x2561, #       BOX DRAWINGS VERTICAL SINGLE AND LEFT DOUBLE
+        0x00b3: 0x0401, #       CYRILLIC CAPITAL LETTER IO
+        0x00b4: 0x2562, #       BOX DRAWINGS VERTICAL DOUBLE AND LEFT SINGLE
+        0x00b5: 0x2563, #       BOX DRAWINGS DOUBLE VERTICAL AND LEFT
+        0x00b6: 0x2564, #       BOX DRAWINGS DOWN SINGLE AND HORIZONTAL DOUBLE
+        0x00b7: 0x2565, #       BOX DRAWINGS DOWN DOUBLE AND HORIZONTAL SINGLE
+        0x00b8: 0x2566, #       BOX DRAWINGS DOUBLE DOWN AND HORIZONTAL
+        0x00b9: 0x2567, #       BOX DRAWINGS UP SINGLE AND HORIZONTAL DOUBLE
+        0x00ba: 0x2568, #       BOX DRAWINGS UP DOUBLE AND HORIZONTAL SINGLE
+        0x00bb: 0x2569, #       BOX DRAWINGS DOUBLE UP AND HORIZONTAL
+        0x00bc: 0x256a, #       BOX DRAWINGS VERTICAL SINGLE AND HORIZONTAL DOUBLE
+        0x00bd: 0x256b, #       BOX DRAWINGS VERTICAL DOUBLE AND HORIZONTAL SINGLE
+        0x00be: 0x256c, #       BOX DRAWINGS DOUBLE VERTICAL AND HORIZONTAL
+        0x00bf: 0x00a9, #       COPYRIGHT SIGN
+        0x00c0: 0x044e, #       CYRILLIC SMALL LETTER YU
+        0x00c1: 0x0430, #       CYRILLIC SMALL LETTER A
+        0x00c2: 0x0431, #       CYRILLIC SMALL LETTER BE
+        0x00c3: 0x0446, #       CYRILLIC SMALL LETTER TSE
+        0x00c4: 0x0434, #       CYRILLIC SMALL LETTER DE
+        0x00c5: 0x0435, #       CYRILLIC SMALL LETTER IE
+        0x00c6: 0x0444, #       CYRILLIC SMALL LETTER EF
+        0x00c7: 0x0433, #       CYRILLIC SMALL LETTER GHE
+        0x00c8: 0x0445, #       CYRILLIC SMALL LETTER HA
+        0x00c9: 0x0438, #       CYRILLIC SMALL LETTER I
+        0x00ca: 0x0439, #       CYRILLIC SMALL LETTER SHORT I
+        0x00cb: 0x043a, #       CYRILLIC SMALL LETTER KA
+        0x00cc: 0x043b, #       CYRILLIC SMALL LETTER EL
+        0x00cd: 0x043c, #       CYRILLIC SMALL LETTER EM
+        0x00ce: 0x043d, #       CYRILLIC SMALL LETTER EN
+        0x00cf: 0x043e, #       CYRILLIC SMALL LETTER O
+        0x00d0: 0x043f, #       CYRILLIC SMALL LETTER PE
+        0x00d1: 0x044f, #       CYRILLIC SMALL LETTER YA
+        0x00d2: 0x0440, #       CYRILLIC SMALL LETTER ER
+        0x00d3: 0x0441, #       CYRILLIC SMALL LETTER ES
+        0x00d4: 0x0442, #       CYRILLIC SMALL LETTER TE
+        0x00d5: 0x0443, #       CYRILLIC SMALL LETTER U
+        0x00d6: 0x0436, #       CYRILLIC SMALL LETTER ZHE
+        0x00d7: 0x0432, #       CYRILLIC SMALL LETTER VE
+        0x00d8: 0x044c, #       CYRILLIC SMALL LETTER SOFT SIGN
+        0x00d9: 0x044b, #       CYRILLIC SMALL LETTER YERU
+        0x00da: 0x0437, #       CYRILLIC SMALL LETTER ZE
+        0x00db: 0x0448, #       CYRILLIC SMALL LETTER SHA
+        0x00dc: 0x044d, #       CYRILLIC SMALL LETTER E
+        0x00dd: 0x0449, #       CYRILLIC SMALL LETTER SHCHA
+        0x00de: 0x0447, #       CYRILLIC SMALL LETTER CHE
+        0x00df: 0x044a, #       CYRILLIC SMALL LETTER HARD SIGN
+        0x00e0: 0x042e, #       CYRILLIC CAPITAL LETTER YU
+        0x00e1: 0x0410, #       CYRILLIC CAPITAL LETTER A
+        0x00e2: 0x0411, #       CYRILLIC CAPITAL LETTER BE
+        0x00e3: 0x0426, #       CYRILLIC CAPITAL LETTER TSE
+        0x00e4: 0x0414, #       CYRILLIC CAPITAL LETTER DE
+        0x00e5: 0x0415, #       CYRILLIC CAPITAL LETTER IE
+        0x00e6: 0x0424, #       CYRILLIC CAPITAL LETTER EF
+        0x00e7: 0x0413, #       CYRILLIC CAPITAL LETTER GHE
+        0x00e8: 0x0425, #       CYRILLIC CAPITAL LETTER HA
+        0x00e9: 0x0418, #       CYRILLIC CAPITAL LETTER I
+        0x00ea: 0x0419, #       CYRILLIC CAPITAL LETTER SHORT I
+        0x00eb: 0x041a, #       CYRILLIC CAPITAL LETTER KA
+        0x00ec: 0x041b, #       CYRILLIC CAPITAL LETTER EL
+        0x00ed: 0x041c, #       CYRILLIC CAPITAL LETTER EM
+        0x00ee: 0x041d, #       CYRILLIC CAPITAL LETTER EN
+        0x00ef: 0x041e, #       CYRILLIC CAPITAL LETTER O
+        0x00f0: 0x041f, #       CYRILLIC CAPITAL LETTER PE
+        0x00f1: 0x042f, #       CYRILLIC CAPITAL LETTER YA
+        0x00f2: 0x0420, #       CYRILLIC CAPITAL LETTER ER
+        0x00f3: 0x0421, #       CYRILLIC CAPITAL LETTER ES
+        0x00f4: 0x0422, #       CYRILLIC CAPITAL LETTER TE
+        0x00f5: 0x0423, #       CYRILLIC CAPITAL LETTER U
+        0x00f6: 0x0416, #       CYRILLIC CAPITAL LETTER ZHE
+        0x00f7: 0x0412, #       CYRILLIC CAPITAL LETTER VE
+        0x00f8: 0x042c, #       CYRILLIC CAPITAL LETTER SOFT SIGN
+        0x00f9: 0x042b, #       CYRILLIC CAPITAL LETTER YERU
+        0x00fa: 0x0417, #       CYRILLIC CAPITAL LETTER ZE
+        0x00fb: 0x0428, #       CYRILLIC CAPITAL LETTER SHA
+        0x00fc: 0x042d, #       CYRILLIC CAPITAL LETTER E
+        0x00fd: 0x0429, #       CYRILLIC CAPITAL LETTER SHCHA
+        0x00fe: 0x0427, #       CYRILLIC CAPITAL LETTER CHE
+        0x00ff: 0x042a, #       CYRILLIC CAPITAL LETTER HARD SIGN
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/koi8_u.py b/depot_tools/release/win/python_24/Lib/encodings/koi8_u.py
new file mode 100644
index 0000000..59020e7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/koi8_u.py
@@ -0,0 +1,54 @@
+""" Python Character Mapping Codec for KOI8U.
+
+    This character scheme is compliant to RFC2319
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+Modified by Maxim Dzumanenko <mvd@mylinux.com.ua>.
+
+(c) Copyright 2002, Python Software Foundation.
+
+"""#"
+
+import codecs, koi8_r
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = koi8_r.decoding_map.copy()
+decoding_map.update({
+        0x00a4: 0x0454, #       CYRILLIC SMALL LETTER UKRAINIAN IE
+        0x00a6: 0x0456, #       CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00a7: 0x0457, #       CYRILLIC SMALL LETTER YI (UKRAINIAN)
+        0x00ad: 0x0491, #       CYRILLIC SMALL LETTER UKRAINIAN GHE WITH UPTURN
+        0x00b4: 0x0404, #       CYRILLIC CAPITAL LETTER UKRAINIAN IE
+        0x00b6: 0x0406, #       CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00b7: 0x0407, #       CYRILLIC CAPITAL LETTER YI (UKRAINIAN)
+        0x00bd: 0x0490, #       CYRILLIC CAPITAL LETTER UKRAINIAN GHE WITH UPTURN
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/latin_1.py b/depot_tools/release/win/python_24/Lib/encodings/latin_1.py
new file mode 100644
index 0000000..0e55917b3d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/latin_1.py
@@ -0,0 +1,35 @@
+""" Python 'latin-1' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    # Note: Binding these as C functions will result in the class not
+    # converting them to methods. This is intended.
+    encode = codecs.latin_1_encode
+    decode = codecs.latin_1_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+class StreamConverter(StreamWriter,StreamReader):
+
+    encode = codecs.latin_1_decode
+    decode = codecs.latin_1_encode
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/mac_cyrillic.py b/depot_tools/release/win/python_24/Lib/encodings/mac_cyrillic.py
new file mode 100644
index 0000000..922523bb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/mac_cyrillic.py
@@ -0,0 +1,167 @@
+""" Python Character Mapping Codec generated from 'CYRILLIC.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x0410, # CYRILLIC CAPITAL LETTER A
+        0x0081: 0x0411, # CYRILLIC CAPITAL LETTER BE
+        0x0082: 0x0412, # CYRILLIC CAPITAL LETTER VE
+        0x0083: 0x0413, # CYRILLIC CAPITAL LETTER GHE
+        0x0084: 0x0414, # CYRILLIC CAPITAL LETTER DE
+        0x0085: 0x0415, # CYRILLIC CAPITAL LETTER IE
+        0x0086: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
+        0x0087: 0x0417, # CYRILLIC CAPITAL LETTER ZE
+        0x0088: 0x0418, # CYRILLIC CAPITAL LETTER I
+        0x0089: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
+        0x008a: 0x041a, # CYRILLIC CAPITAL LETTER KA
+        0x008b: 0x041b, # CYRILLIC CAPITAL LETTER EL
+        0x008c: 0x041c, # CYRILLIC CAPITAL LETTER EM
+        0x008d: 0x041d, # CYRILLIC CAPITAL LETTER EN
+        0x008e: 0x041e, # CYRILLIC CAPITAL LETTER O
+        0x008f: 0x041f, # CYRILLIC CAPITAL LETTER PE
+        0x0090: 0x0420, # CYRILLIC CAPITAL LETTER ER
+        0x0091: 0x0421, # CYRILLIC CAPITAL LETTER ES
+        0x0092: 0x0422, # CYRILLIC CAPITAL LETTER TE
+        0x0093: 0x0423, # CYRILLIC CAPITAL LETTER U
+        0x0094: 0x0424, # CYRILLIC CAPITAL LETTER EF
+        0x0095: 0x0425, # CYRILLIC CAPITAL LETTER HA
+        0x0096: 0x0426, # CYRILLIC CAPITAL LETTER TSE
+        0x0097: 0x0427, # CYRILLIC CAPITAL LETTER CHE
+        0x0098: 0x0428, # CYRILLIC CAPITAL LETTER SHA
+        0x0099: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
+        0x009a: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
+        0x009b: 0x042b, # CYRILLIC CAPITAL LETTER YERU
+        0x009c: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
+        0x009d: 0x042d, # CYRILLIC CAPITAL LETTER E
+        0x009e: 0x042e, # CYRILLIC CAPITAL LETTER YU
+        0x009f: 0x042f, # CYRILLIC CAPITAL LETTER YA
+        0x00a0: 0x2020, # DAGGER
+        0x00a1: 0x00b0, # DEGREE SIGN
+        0x00a4: 0x00a7, # SECTION SIGN
+        0x00a5: 0x2022, # BULLET
+        0x00a6: 0x00b6, # PILCROW SIGN
+        0x00a7: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00a8: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x2122, # TRADE MARK SIGN
+        0x00ab: 0x0402, # CYRILLIC CAPITAL LETTER DJE
+        0x00ac: 0x0452, # CYRILLIC SMALL LETTER DJE
+        0x00ad: 0x2260, # NOT EQUAL TO
+        0x00ae: 0x0403, # CYRILLIC CAPITAL LETTER GJE
+        0x00af: 0x0453, # CYRILLIC SMALL LETTER GJE
+        0x00b0: 0x221e, # INFINITY
+        0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00b4: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00b6: 0x2202, # PARTIAL DIFFERENTIAL
+        0x00b7: 0x0408, # CYRILLIC CAPITAL LETTER JE
+        0x00b8: 0x0404, # CYRILLIC CAPITAL LETTER UKRAINIAN IE
+        0x00b9: 0x0454, # CYRILLIC SMALL LETTER UKRAINIAN IE
+        0x00ba: 0x0407, # CYRILLIC CAPITAL LETTER YI
+        0x00bb: 0x0457, # CYRILLIC SMALL LETTER YI
+        0x00bc: 0x0409, # CYRILLIC CAPITAL LETTER LJE
+        0x00bd: 0x0459, # CYRILLIC SMALL LETTER LJE
+        0x00be: 0x040a, # CYRILLIC CAPITAL LETTER NJE
+        0x00bf: 0x045a, # CYRILLIC SMALL LETTER NJE
+        0x00c0: 0x0458, # CYRILLIC SMALL LETTER JE
+        0x00c1: 0x0405, # CYRILLIC CAPITAL LETTER DZE
+        0x00c2: 0x00ac, # NOT SIGN
+        0x00c3: 0x221a, # SQUARE ROOT
+        0x00c4: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00c5: 0x2248, # ALMOST EQUAL TO
+        0x00c6: 0x2206, # INCREMENT
+        0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
+        0x00ca: 0x00a0, # NO-BREAK SPACE
+        0x00cb: 0x040b, # CYRILLIC CAPITAL LETTER TSHE
+        0x00cc: 0x045b, # CYRILLIC SMALL LETTER TSHE
+        0x00cd: 0x040c, # CYRILLIC CAPITAL LETTER KJE
+        0x00ce: 0x045c, # CYRILLIC SMALL LETTER KJE
+        0x00cf: 0x0455, # CYRILLIC SMALL LETTER DZE
+        0x00d0: 0x2013, # EN DASH
+        0x00d1: 0x2014, # EM DASH
+        0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x00d6: 0x00f7, # DIVISION SIGN
+        0x00d7: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x00d8: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U
+        0x00d9: 0x045e, # CYRILLIC SMALL LETTER SHORT U
+        0x00da: 0x040f, # CYRILLIC CAPITAL LETTER DZHE
+        0x00db: 0x045f, # CYRILLIC SMALL LETTER DZHE
+        0x00dc: 0x2116, # NUMERO SIGN
+        0x00dd: 0x0401, # CYRILLIC CAPITAL LETTER IO
+        0x00de: 0x0451, # CYRILLIC SMALL LETTER IO
+        0x00df: 0x044f, # CYRILLIC SMALL LETTER YA
+        0x00e0: 0x0430, # CYRILLIC SMALL LETTER A
+        0x00e1: 0x0431, # CYRILLIC SMALL LETTER BE
+        0x00e2: 0x0432, # CYRILLIC SMALL LETTER VE
+        0x00e3: 0x0433, # CYRILLIC SMALL LETTER GHE
+        0x00e4: 0x0434, # CYRILLIC SMALL LETTER DE
+        0x00e5: 0x0435, # CYRILLIC SMALL LETTER IE
+        0x00e6: 0x0436, # CYRILLIC SMALL LETTER ZHE
+        0x00e7: 0x0437, # CYRILLIC SMALL LETTER ZE
+        0x00e8: 0x0438, # CYRILLIC SMALL LETTER I
+        0x00e9: 0x0439, # CYRILLIC SMALL LETTER SHORT I
+        0x00ea: 0x043a, # CYRILLIC SMALL LETTER KA
+        0x00eb: 0x043b, # CYRILLIC SMALL LETTER EL
+        0x00ec: 0x043c, # CYRILLIC SMALL LETTER EM
+        0x00ed: 0x043d, # CYRILLIC SMALL LETTER EN
+        0x00ee: 0x043e, # CYRILLIC SMALL LETTER O
+        0x00ef: 0x043f, # CYRILLIC SMALL LETTER PE
+        0x00f0: 0x0440, # CYRILLIC SMALL LETTER ER
+        0x00f1: 0x0441, # CYRILLIC SMALL LETTER ES
+        0x00f2: 0x0442, # CYRILLIC SMALL LETTER TE
+        0x00f3: 0x0443, # CYRILLIC SMALL LETTER U
+        0x00f4: 0x0444, # CYRILLIC SMALL LETTER EF
+        0x00f5: 0x0445, # CYRILLIC SMALL LETTER HA
+        0x00f6: 0x0446, # CYRILLIC SMALL LETTER TSE
+        0x00f7: 0x0447, # CYRILLIC SMALL LETTER CHE
+        0x00f8: 0x0448, # CYRILLIC SMALL LETTER SHA
+        0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
+        0x00fa: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
+        0x00fb: 0x044b, # CYRILLIC SMALL LETTER YERU
+        0x00fc: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
+        0x00fd: 0x044d, # CYRILLIC SMALL LETTER E
+        0x00fe: 0x044e, # CYRILLIC SMALL LETTER YU
+        0x00ff: 0x00a4, # CURRENCY SIGN
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/mac_greek.py b/depot_tools/release/win/python_24/Lib/encodings/mac_greek.py
new file mode 100644
index 0000000..473a1575
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/mac_greek.py
@@ -0,0 +1,170 @@
+""" Python Character Mapping Codec generated from 'GREEK.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x0081: 0x00b9, # SUPERSCRIPT ONE
+        0x0082: 0x00b2, # SUPERSCRIPT TWO
+        0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0084: 0x00b3, # SUPERSCRIPT THREE
+        0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x0087: 0x0385, # GREEK DIALYTIKA TONOS
+        0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x008b: 0x0384, # GREEK TONOS
+        0x008c: 0x00a8, # DIAERESIS
+        0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x0092: 0x00a3, # POUND SIGN
+        0x0093: 0x2122, # TRADE MARK SIGN
+        0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x0096: 0x2022, # BULLET
+        0x0097: 0x00bd, # VULGAR FRACTION ONE HALF
+        0x0098: 0x2030, # PER MILLE SIGN
+        0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x009b: 0x00a6, # BROKEN BAR
+        0x009c: 0x00ad, # SOFT HYPHEN
+        0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x00a0: 0x2020, # DAGGER
+        0x00a1: 0x0393, # GREEK CAPITAL LETTER GAMMA
+        0x00a2: 0x0394, # GREEK CAPITAL LETTER DELTA
+        0x00a3: 0x0398, # GREEK CAPITAL LETTER THETA
+        0x00a4: 0x039b, # GREEK CAPITAL LETTER LAMBDA
+        0x00a5: 0x039e, # GREEK CAPITAL LETTER XI
+        0x00a6: 0x03a0, # GREEK CAPITAL LETTER PI
+        0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00a8: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x03a3, # GREEK CAPITAL LETTER SIGMA
+        0x00ab: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
+        0x00ac: 0x00a7, # SECTION SIGN
+        0x00ad: 0x2260, # NOT EQUAL TO
+        0x00ae: 0x00b0, # DEGREE SIGN
+        0x00af: 0x0387, # GREEK ANO TELEIA
+        0x00b0: 0x0391, # GREEK CAPITAL LETTER ALPHA
+        0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00b4: 0x00a5, # YEN SIGN
+        0x00b5: 0x0392, # GREEK CAPITAL LETTER BETA
+        0x00b6: 0x0395, # GREEK CAPITAL LETTER EPSILON
+        0x00b7: 0x0396, # GREEK CAPITAL LETTER ZETA
+        0x00b8: 0x0397, # GREEK CAPITAL LETTER ETA
+        0x00b9: 0x0399, # GREEK CAPITAL LETTER IOTA
+        0x00ba: 0x039a, # GREEK CAPITAL LETTER KAPPA
+        0x00bb: 0x039c, # GREEK CAPITAL LETTER MU
+        0x00bc: 0x03a6, # GREEK CAPITAL LETTER PHI
+        0x00bd: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
+        0x00be: 0x03a8, # GREEK CAPITAL LETTER PSI
+        0x00bf: 0x03a9, # GREEK CAPITAL LETTER OMEGA
+        0x00c0: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS
+        0x00c1: 0x039d, # GREEK CAPITAL LETTER NU
+        0x00c2: 0x00ac, # NOT SIGN
+        0x00c3: 0x039f, # GREEK CAPITAL LETTER OMICRON
+        0x00c4: 0x03a1, # GREEK CAPITAL LETTER RHO
+        0x00c5: 0x2248, # ALMOST EQUAL TO
+        0x00c6: 0x03a4, # GREEK CAPITAL LETTER TAU
+        0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
+        0x00ca: 0x00a0, # NO-BREAK SPACE
+        0x00cb: 0x03a5, # GREEK CAPITAL LETTER UPSILON
+        0x00cc: 0x03a7, # GREEK CAPITAL LETTER CHI
+        0x00cd: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS
+        0x00ce: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS
+        0x00cf: 0x0153, # LATIN SMALL LIGATURE OE
+        0x00d0: 0x2013, # EN DASH
+        0x00d1: 0x2015, # HORIZONTAL BAR
+        0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x00d6: 0x00f7, # DIVISION SIGN
+        0x00d7: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS
+        0x00d8: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS
+        0x00d9: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS
+        0x00da: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS
+        0x00db: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS
+        0x00dc: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS
+        0x00dd: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS
+        0x00de: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS
+        0x00df: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS
+        0x00e0: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS
+        0x00e1: 0x03b1, # GREEK SMALL LETTER ALPHA
+        0x00e2: 0x03b2, # GREEK SMALL LETTER BETA
+        0x00e3: 0x03c8, # GREEK SMALL LETTER PSI
+        0x00e4: 0x03b4, # GREEK SMALL LETTER DELTA
+        0x00e5: 0x03b5, # GREEK SMALL LETTER EPSILON
+        0x00e6: 0x03c6, # GREEK SMALL LETTER PHI
+        0x00e7: 0x03b3, # GREEK SMALL LETTER GAMMA
+        0x00e8: 0x03b7, # GREEK SMALL LETTER ETA
+        0x00e9: 0x03b9, # GREEK SMALL LETTER IOTA
+        0x00ea: 0x03be, # GREEK SMALL LETTER XI
+        0x00eb: 0x03ba, # GREEK SMALL LETTER KAPPA
+        0x00ec: 0x03bb, # GREEK SMALL LETTER LAMBDA
+        0x00ed: 0x03bc, # GREEK SMALL LETTER MU
+        0x00ee: 0x03bd, # GREEK SMALL LETTER NU
+        0x00ef: 0x03bf, # GREEK SMALL LETTER OMICRON
+        0x00f0: 0x03c0, # GREEK SMALL LETTER PI
+        0x00f1: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS
+        0x00f2: 0x03c1, # GREEK SMALL LETTER RHO
+        0x00f3: 0x03c3, # GREEK SMALL LETTER SIGMA
+        0x00f4: 0x03c4, # GREEK SMALL LETTER TAU
+        0x00f5: 0x03b8, # GREEK SMALL LETTER THETA
+        0x00f6: 0x03c9, # GREEK SMALL LETTER OMEGA
+        0x00f7: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA
+        0x00f8: 0x03c7, # GREEK SMALL LETTER CHI
+        0x00f9: 0x03c5, # GREEK SMALL LETTER UPSILON
+        0x00fa: 0x03b6, # GREEK SMALL LETTER ZETA
+        0x00fb: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
+        0x00fc: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
+        0x00fd: 0x0390, # GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
+        0x00fe: 0x03b0, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
+        0x00ff: None,   # UNDEFINED
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/mac_iceland.py b/depot_tools/release/win/python_24/Lib/encodings/mac_iceland.py
new file mode 100644
index 0000000..00bddf9f8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/mac_iceland.py
@@ -0,0 +1,166 @@
+""" Python Character Mapping Codec generated from 'ICELAND.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x0081: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0082: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0084: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x008b: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x008c: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x0093: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x0096: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x0098: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x00a0: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
+        0x00a1: 0x00b0, # DEGREE SIGN
+        0x00a4: 0x00a7, # SECTION SIGN
+        0x00a5: 0x2022, # BULLET
+        0x00a6: 0x00b6, # PILCROW SIGN
+        0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00a8: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x2122, # TRADE MARK SIGN
+        0x00ab: 0x00b4, # ACUTE ACCENT
+        0x00ac: 0x00a8, # DIAERESIS
+        0x00ad: 0x2260, # NOT EQUAL TO
+        0x00ae: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x00af: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x00b0: 0x221e, # INFINITY
+        0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00b4: 0x00a5, # YEN SIGN
+        0x00b6: 0x2202, # PARTIAL DIFFERENTIAL
+        0x00b7: 0x2211, # N-ARY SUMMATION
+        0x00b8: 0x220f, # N-ARY PRODUCT
+        0x00b9: 0x03c0, # GREEK SMALL LETTER PI
+        0x00ba: 0x222b, # INTEGRAL
+        0x00bb: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00bc: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00bd: 0x2126, # OHM SIGN
+        0x00be: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x00bf: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x00c0: 0x00bf, # INVERTED QUESTION MARK
+        0x00c1: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00c2: 0x00ac, # NOT SIGN
+        0x00c3: 0x221a, # SQUARE ROOT
+        0x00c4: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00c5: 0x2248, # ALMOST EQUAL TO
+        0x00c6: 0x2206, # INCREMENT
+        0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
+        0x00ca: 0x00a0, # NO-BREAK SPACE
+        0x00cb: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x00cc: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00ce: 0x0152, # LATIN CAPITAL LIGATURE OE
+        0x00cf: 0x0153, # LATIN SMALL LIGATURE OE
+        0x00d0: 0x2013, # EN DASH
+        0x00d1: 0x2014, # EM DASH
+        0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x00d6: 0x00f7, # DIVISION SIGN
+        0x00d7: 0x25ca, # LOZENGE
+        0x00d8: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x00d9: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
+        0x00da: 0x2044, # FRACTION SLASH
+        0x00db: 0x00a4, # CURRENCY SIGN
+        0x00dc: 0x00d0, # LATIN CAPITAL LETTER ETH
+        0x00dd: 0x00f0, # LATIN SMALL LETTER ETH
+        0x00df: 0x00fe, # LATIN SMALL LETTER THORN
+        0x00e0: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
+        0x00e1: 0x00b7, # MIDDLE DOT
+        0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x00e4: 0x2030, # PER MILLE SIGN
+        0x00e5: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x00e6: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x00e8: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x00e9: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x00eb: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x00ec: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x00ed: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00f0: None,   # UNDEFINED
+        0x00f1: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00f3: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00f4: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x00f5: 0x0131, # LATIN SMALL LETTER DOTLESS I
+        0x00f6: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x00f7: 0x02dc, # SMALL TILDE
+        0x00f8: 0x00af, # MACRON
+        0x00f9: 0x02d8, # BREVE
+        0x00fa: 0x02d9, # DOT ABOVE
+        0x00fb: 0x02da, # RING ABOVE
+        0x00fc: 0x00b8, # CEDILLA
+        0x00fd: 0x02dd, # DOUBLE ACUTE ACCENT
+        0x00fe: 0x02db, # OGONEK
+        0x00ff: 0x02c7, # CARON
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/mac_latin2.py b/depot_tools/release/win/python_24/Lib/encodings/mac_latin2.py
new file mode 100644
index 0000000..f5d5225
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/mac_latin2.py
@@ -0,0 +1,170 @@
+""" Python Character Mapping Codec generated from 'LATIN2.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x0081: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
+        0x0082: 0x0101, # LATIN SMALL LETTER A WITH MACRON
+        0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0084: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
+        0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x0088: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
+        0x0089: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
+        0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x008b: 0x010d, # LATIN SMALL LETTER C WITH CARON
+        0x008c: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
+        0x008d: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
+        0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x008f: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
+        0x0090: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
+        0x0091: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
+        0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x0093: 0x010f, # LATIN SMALL LETTER D WITH CARON
+        0x0094: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
+        0x0095: 0x0113, # LATIN SMALL LETTER E WITH MACRON
+        0x0096: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
+        0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x0098: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
+        0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x009d: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
+        0x009e: 0x011b, # LATIN SMALL LETTER E WITH CARON
+        0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x00a0: 0x2020, # DAGGER
+        0x00a1: 0x00b0, # DEGREE SIGN
+        0x00a2: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
+        0x00a4: 0x00a7, # SECTION SIGN
+        0x00a5: 0x2022, # BULLET
+        0x00a6: 0x00b6, # PILCROW SIGN
+        0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00a8: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x2122, # TRADE MARK SIGN
+        0x00ab: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
+        0x00ac: 0x00a8, # DIAERESIS
+        0x00ad: 0x2260, # NOT EQUAL TO
+        0x00ae: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
+        0x00af: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
+        0x00b0: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
+        0x00b1: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
+        0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00b4: 0x012b, # LATIN SMALL LETTER I WITH MACRON
+        0x00b5: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
+        0x00b6: 0x2202, # PARTIAL DIFFERENTIAL
+        0x00b7: 0x2211, # N-ARY SUMMATION
+        0x00b8: 0x0142, # LATIN SMALL LETTER L WITH STROKE
+        0x00b9: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
+        0x00ba: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
+        0x00bb: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
+        0x00bc: 0x013e, # LATIN SMALL LETTER L WITH CARON
+        0x00bd: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
+        0x00be: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
+        0x00bf: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
+        0x00c0: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
+        0x00c1: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
+        0x00c2: 0x00ac, # NOT SIGN
+        0x00c3: 0x221a, # SQUARE ROOT
+        0x00c4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
+        0x00c5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
+        0x00c6: 0x2206, # INCREMENT
+        0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
+        0x00ca: 0x00a0, # NO-BREAK SPACE
+        0x00cb: 0x0148, # LATIN SMALL LETTER N WITH CARON
+        0x00cc: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
+        0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00ce: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
+        0x00cf: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
+        0x00d0: 0x2013, # EN DASH
+        0x00d1: 0x2014, # EM DASH
+        0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x00d6: 0x00f7, # DIVISION SIGN
+        0x00d7: 0x25ca, # LOZENGE
+        0x00d8: 0x014d, # LATIN SMALL LETTER O WITH MACRON
+        0x00d9: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
+        0x00da: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
+        0x00db: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
+        0x00dc: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x00dd: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x00de: 0x0159, # LATIN SMALL LETTER R WITH CARON
+        0x00df: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
+        0x00e0: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
+        0x00e1: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
+        0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x00e4: 0x0161, # LATIN SMALL LETTER S WITH CARON
+        0x00e5: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
+        0x00e6: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
+        0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x00e8: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
+        0x00e9: 0x0165, # LATIN SMALL LETTER T WITH CARON
+        0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x00eb: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
+        0x00ec: 0x017e, # LATIN SMALL LETTER Z WITH CARON
+        0x00ed: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
+        0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00f0: 0x016b, # LATIN SMALL LETTER U WITH MACRON
+        0x00f1: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
+        0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00f3: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
+        0x00f4: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
+        0x00f5: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
+        0x00f6: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
+        0x00f7: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
+        0x00f8: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
+        0x00f9: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
+        0x00fa: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
+        0x00fb: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
+        0x00fc: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
+        0x00fd: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
+        0x00fe: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
+        0x00ff: 0x02c7, # CARON
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/mac_roman.py b/depot_tools/release/win/python_24/Lib/encodings/mac_roman.py
new file mode 100644
index 0000000..3f02ab11
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/mac_roman.py
@@ -0,0 +1,167 @@
+""" Python Character Mapping Codec generated from 'ROMAN.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x0081: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0082: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0084: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x008b: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x008c: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x0093: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x0096: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x0098: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x00a0: 0x2020, # DAGGER
+        0x00a1: 0x00b0, # DEGREE SIGN
+        0x00a4: 0x00a7, # SECTION SIGN
+        0x00a5: 0x2022, # BULLET
+        0x00a6: 0x00b6, # PILCROW SIGN
+        0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00a8: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x2122, # TRADE MARK SIGN
+        0x00ab: 0x00b4, # ACUTE ACCENT
+        0x00ac: 0x00a8, # DIAERESIS
+        0x00ad: 0x2260, # NOT EQUAL TO
+        0x00ae: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x00af: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x00b0: 0x221e, # INFINITY
+        0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00b4: 0x00a5, # YEN SIGN
+        0x00b6: 0x2202, # PARTIAL DIFFERENTIAL
+        0x00b7: 0x2211, # N-ARY SUMMATION
+        0x00b8: 0x220f, # N-ARY PRODUCT
+        0x00b9: 0x03c0, # GREEK SMALL LETTER PI
+        0x00ba: 0x222b, # INTEGRAL
+        0x00bb: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00bc: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00bd: 0x2126, # OHM SIGN
+        0x00be: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x00bf: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x00c0: 0x00bf, # INVERTED QUESTION MARK
+        0x00c1: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00c2: 0x00ac, # NOT SIGN
+        0x00c3: 0x221a, # SQUARE ROOT
+        0x00c4: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00c5: 0x2248, # ALMOST EQUAL TO
+        0x00c6: 0x2206, # INCREMENT
+        0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
+        0x00ca: 0x00a0, # NO-BREAK SPACE
+        0x00cb: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x00cc: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00ce: 0x0152, # LATIN CAPITAL LIGATURE OE
+        0x00cf: 0x0153, # LATIN SMALL LIGATURE OE
+        0x00d0: 0x2013, # EN DASH
+        0x00d1: 0x2014, # EM DASH
+        0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x00d6: 0x00f7, # DIVISION SIGN
+        0x00d7: 0x25ca, # LOZENGE
+        0x00d8: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x00d9: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
+        0x00da: 0x2044, # FRACTION SLASH
+        0x00db: 0x00a4, # CURRENCY SIGN
+        0x00dc: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x00dd: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
+        0x00de: 0xfb01, # LATIN SMALL LIGATURE FI
+        0x00df: 0xfb02, # LATIN SMALL LIGATURE FL
+        0x00e0: 0x2021, # DOUBLE DAGGER
+        0x00e1: 0x00b7, # MIDDLE DOT
+        0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x00e4: 0x2030, # PER MILLE SIGN
+        0x00e5: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x00e6: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x00e8: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x00e9: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x00eb: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x00ec: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x00ed: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00f0: None,   # UNDEFINED
+        0x00f1: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00f3: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00f4: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x00f5: 0x0131, # LATIN SMALL LETTER DOTLESS I
+        0x00f6: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x00f7: 0x02dc, # SMALL TILDE
+        0x00f8: 0x00af, # MACRON
+        0x00f9: 0x02d8, # BREVE
+        0x00fa: 0x02d9, # DOT ABOVE
+        0x00fb: 0x02da, # RING ABOVE
+        0x00fc: 0x00b8, # CEDILLA
+        0x00fd: 0x02dd, # DOUBLE ACUTE ACCENT
+        0x00fe: 0x02db, # OGONEK
+        0x00ff: 0x02c7, # CARON
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/mac_turkish.py b/depot_tools/release/win/python_24/Lib/encodings/mac_turkish.py
new file mode 100644
index 0000000..7f66f50
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/mac_turkish.py
@@ -0,0 +1,167 @@
+""" Python Character Mapping Codec generated from 'TURKISH.TXT' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
+        0x0081: 0x00c5, # LATIN CAPITAL LETTER A WITH RING ABOVE
+        0x0082: 0x00c7, # LATIN CAPITAL LETTER C WITH CEDILLA
+        0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
+        0x0084: 0x00d1, # LATIN CAPITAL LETTER N WITH TILDE
+        0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
+        0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
+        0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
+        0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
+        0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
+        0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
+        0x008b: 0x00e3, # LATIN SMALL LETTER A WITH TILDE
+        0x008c: 0x00e5, # LATIN SMALL LETTER A WITH RING ABOVE
+        0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
+        0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
+        0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
+        0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
+        0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
+        0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
+        0x0093: 0x00ec, # LATIN SMALL LETTER I WITH GRAVE
+        0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
+        0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
+        0x0096: 0x00f1, # LATIN SMALL LETTER N WITH TILDE
+        0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
+        0x0098: 0x00f2, # LATIN SMALL LETTER O WITH GRAVE
+        0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
+        0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
+        0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
+        0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
+        0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
+        0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
+        0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
+        0x00a0: 0x2020, # DAGGER
+        0x00a1: 0x00b0, # DEGREE SIGN
+        0x00a4: 0x00a7, # SECTION SIGN
+        0x00a5: 0x2022, # BULLET
+        0x00a6: 0x00b6, # PILCROW SIGN
+        0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
+        0x00a8: 0x00ae, # REGISTERED SIGN
+        0x00aa: 0x2122, # TRADE MARK SIGN
+        0x00ab: 0x00b4, # ACUTE ACCENT
+        0x00ac: 0x00a8, # DIAERESIS
+        0x00ad: 0x2260, # NOT EQUAL TO
+        0x00ae: 0x00c6, # LATIN CAPITAL LIGATURE AE
+        0x00af: 0x00d8, # LATIN CAPITAL LETTER O WITH STROKE
+        0x00b0: 0x221e, # INFINITY
+        0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
+        0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
+        0x00b4: 0x00a5, # YEN SIGN
+        0x00b6: 0x2202, # PARTIAL DIFFERENTIAL
+        0x00b7: 0x2211, # N-ARY SUMMATION
+        0x00b8: 0x220f, # N-ARY PRODUCT
+        0x00b9: 0x03c0, # GREEK SMALL LETTER PI
+        0x00ba: 0x222b, # INTEGRAL
+        0x00bb: 0x00aa, # FEMININE ORDINAL INDICATOR
+        0x00bc: 0x00ba, # MASCULINE ORDINAL INDICATOR
+        0x00bd: 0x2126, # OHM SIGN
+        0x00be: 0x00e6, # LATIN SMALL LIGATURE AE
+        0x00bf: 0x00f8, # LATIN SMALL LETTER O WITH STROKE
+        0x00c0: 0x00bf, # INVERTED QUESTION MARK
+        0x00c1: 0x00a1, # INVERTED EXCLAMATION MARK
+        0x00c2: 0x00ac, # NOT SIGN
+        0x00c3: 0x221a, # SQUARE ROOT
+        0x00c4: 0x0192, # LATIN SMALL LETTER F WITH HOOK
+        0x00c5: 0x2248, # ALMOST EQUAL TO
+        0x00c6: 0x2206, # INCREMENT
+        0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
+        0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
+        0x00ca: 0x00a0, # NO-BREAK SPACE
+        0x00cb: 0x00c0, # LATIN CAPITAL LETTER A WITH GRAVE
+        0x00cc: 0x00c3, # LATIN CAPITAL LETTER A WITH TILDE
+        0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
+        0x00ce: 0x0152, # LATIN CAPITAL LIGATURE OE
+        0x00cf: 0x0153, # LATIN SMALL LIGATURE OE
+        0x00d0: 0x2013, # EN DASH
+        0x00d1: 0x2014, # EM DASH
+        0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
+        0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
+        0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
+        0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
+        0x00d6: 0x00f7, # DIVISION SIGN
+        0x00d7: 0x25ca, # LOZENGE
+        0x00d8: 0x00ff, # LATIN SMALL LETTER Y WITH DIAERESIS
+        0x00d9: 0x0178, # LATIN CAPITAL LETTER Y WITH DIAERESIS
+        0x00da: 0x011e, # LATIN CAPITAL LETTER G WITH BREVE
+        0x00db: 0x011f, # LATIN SMALL LETTER G WITH BREVE
+        0x00dc: 0x0130, # LATIN CAPITAL LETTER I WITH DOT ABOVE
+        0x00dd: 0x0131, # LATIN SMALL LETTER DOTLESS I
+        0x00de: 0x015e, # LATIN CAPITAL LETTER S WITH CEDILLA
+        0x00df: 0x015f, # LATIN SMALL LETTER S WITH CEDILLA
+        0x00e0: 0x2021, # DOUBLE DAGGER
+        0x00e1: 0x00b7, # MIDDLE DOT
+        0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK
+        0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
+        0x00e4: 0x2030, # PER MILLE SIGN
+        0x00e5: 0x00c2, # LATIN CAPITAL LETTER A WITH CIRCUMFLEX
+        0x00e6: 0x00ca, # LATIN CAPITAL LETTER E WITH CIRCUMFLEX
+        0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
+        0x00e8: 0x00cb, # LATIN CAPITAL LETTER E WITH DIAERESIS
+        0x00e9: 0x00c8, # LATIN CAPITAL LETTER E WITH GRAVE
+        0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
+        0x00eb: 0x00ce, # LATIN CAPITAL LETTER I WITH CIRCUMFLEX
+        0x00ec: 0x00cf, # LATIN CAPITAL LETTER I WITH DIAERESIS
+        0x00ed: 0x00cc, # LATIN CAPITAL LETTER I WITH GRAVE
+        0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
+        0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
+        0x00f0: None,   # UNDEFINED
+        0x00f1: 0x00d2, # LATIN CAPITAL LETTER O WITH GRAVE
+        0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
+        0x00f3: 0x00db, # LATIN CAPITAL LETTER U WITH CIRCUMFLEX
+        0x00f4: 0x00d9, # LATIN CAPITAL LETTER U WITH GRAVE
+        0x00f5: None,   # UNDEFINED
+        0x00f6: 0x02c6, # MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x00f7: 0x02dc, # SMALL TILDE
+        0x00f8: 0x00af, # MACRON
+        0x00f9: 0x02d8, # BREVE
+        0x00fa: 0x02d9, # DOT ABOVE
+        0x00fb: 0x02da, # RING ABOVE
+        0x00fc: 0x00b8, # CEDILLA
+        0x00fd: 0x02dd, # DOUBLE ACUTE ACCENT
+        0x00fe: 0x02db, # OGONEK
+        0x00ff: 0x02c7, # CARON
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/mbcs.py b/depot_tools/release/win/python_24/Lib/encodings/mbcs.py
new file mode 100644
index 0000000..c79f47c3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/mbcs.py
@@ -0,0 +1,36 @@
+""" Python 'mbcs' Codec for Windows
+
+
+Cloned by Mark Hammond (mhammond@skippinet.com.au) from ascii.py,
+which was written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    # Note: Binding these as C functions will result in the class not
+    # converting them to methods. This is intended.
+    encode = codecs.mbcs_encode
+    decode = codecs.mbcs_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+class StreamConverter(StreamWriter,StreamReader):
+
+    encode = codecs.mbcs_decode
+    decode = codecs.mbcs_encode
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/palmos.py b/depot_tools/release/win/python_24/Lib/encodings/palmos.py
new file mode 100644
index 0000000..c0f0606
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/palmos.py
@@ -0,0 +1,67 @@
+""" Python Character Mapping Codec for PalmOS 3.5.
+
+Written by Sjoerd Mullender (sjoerd@acm.org); based on iso8859_15.py.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+    def encode(self,input,errors='strict'):
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+
+# The PalmOS character set is mostly iso-8859-1 with some differences.
+decoding_map.update({
+        0x0080: 0x20ac, #       EURO SIGN
+        0x0082: 0x201a, #       SINGLE LOW-9 QUOTATION MARK
+        0x0083: 0x0192, #       LATIN SMALL LETTER F WITH HOOK
+        0x0084: 0x201e, #       DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, #       HORIZONTAL ELLIPSIS
+        0x0086: 0x2020, #       DAGGER
+        0x0087: 0x2021, #       DOUBLE DAGGER
+        0x0088: 0x02c6, #       MODIFIER LETTER CIRCUMFLEX ACCENT
+        0x0089: 0x2030, #       PER MILLE SIGN
+        0x008a: 0x0160, #       LATIN CAPITAL LETTER S WITH CARON
+        0x008b: 0x2039, #       SINGLE LEFT-POINTING ANGLE QUOTATION MARK
+        0x008c: 0x0152, #       LATIN CAPITAL LIGATURE OE
+        0x008d: 0x2666, #       BLACK DIAMOND SUIT
+        0x008e: 0x2663, #       BLACK CLUB SUIT
+        0x008f: 0x2665, #       BLACK HEART SUIT
+        0x0090: 0x2660, #       BLACK SPADE SUIT
+        0x0091: 0x2018, #       LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, #       RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, #       LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, #       RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, #       BULLET
+        0x0096: 0x2013, #       EN DASH
+        0x0097: 0x2014, #       EM DASH
+        0x0098: 0x02dc, #       SMALL TILDE
+        0x0099: 0x2122, #       TRADE MARK SIGN
+        0x009a: 0x0161, #       LATIN SMALL LETTER S WITH CARON
+        0x009c: 0x0153, #       LATIN SMALL LIGATURE OE
+        0x009f: 0x0178, #       LATIN CAPITAL LETTER Y WITH DIAERESIS
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/ptcp154.py b/depot_tools/release/win/python_24/Lib/encodings/ptcp154.py
new file mode 100644
index 0000000..5cdd98c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/ptcp154.py
@@ -0,0 +1,162 @@
+""" Python Character Mapping Codec generated from 'PTCP154.txt' with gencodec.py.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+(c) Copyright 2000 Guido van Rossum.
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+        0x0080: 0x0496, #        CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER
+        0x0081: 0x0492, #        CYRILLIC CAPITAL LETTER GHE WITH STROKE
+        0x0082: 0x04ee, #        CYRILLIC CAPITAL LETTER U WITH MACRON
+        0x0083: 0x0493, #        CYRILLIC SMALL LETTER GHE WITH STROKE
+        0x0084: 0x201e, #        DOUBLE LOW-9 QUOTATION MARK
+        0x0085: 0x2026, #        HORIZONTAL ELLIPSIS
+        0x0086: 0x04b6, #        CYRILLIC CAPITAL LETTER CHE WITH DESCENDER
+        0x0087: 0x04ae, #        CYRILLIC CAPITAL LETTER STRAIGHT U
+        0x0088: 0x04b2, #        CYRILLIC CAPITAL LETTER HA WITH DESCENDER
+        0x0089: 0x04af, #        CYRILLIC SMALL LETTER STRAIGHT U
+        0x008a: 0x04a0, #        CYRILLIC CAPITAL LETTER BASHKIR KA
+        0x008b: 0x04e2, #        CYRILLIC CAPITAL LETTER I WITH MACRON
+        0x008c: 0x04a2, #        CYRILLIC CAPITAL LETTER EN WITH DESCENDER
+        0x008d: 0x049a, #        CYRILLIC CAPITAL LETTER KA WITH DESCENDER
+        0x008e: 0x04ba, #        CYRILLIC CAPITAL LETTER SHHA
+        0x008f: 0x04b8, #        CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE
+        0x0090: 0x0497, #        CYRILLIC SMALL LETTER ZHE WITH DESCENDER
+        0x0091: 0x2018, #        LEFT SINGLE QUOTATION MARK
+        0x0092: 0x2019, #        RIGHT SINGLE QUOTATION MARK
+        0x0093: 0x201c, #        LEFT DOUBLE QUOTATION MARK
+        0x0094: 0x201d, #        RIGHT DOUBLE QUOTATION MARK
+        0x0095: 0x2022, #        BULLET
+        0x0096: 0x2013, #        EN DASH
+        0x0097: 0x2014, #        EM DASH
+        0x0098: 0x04b3, #        CYRILLIC SMALL LETTER HA WITH DESCENDER
+        0x0099: 0x04b7, #        CYRILLIC SMALL LETTER CHE WITH DESCENDER
+        0x009a: 0x04a1, #        CYRILLIC SMALL LETTER BASHKIR KA
+        0x009b: 0x04e3, #        CYRILLIC SMALL LETTER I WITH MACRON
+        0x009c: 0x04a3, #        CYRILLIC SMALL LETTER EN WITH DESCENDER
+        0x009d: 0x049b, #        CYRILLIC SMALL LETTER KA WITH DESCENDER
+        0x009e: 0x04bb, #        CYRILLIC SMALL LETTER SHHA
+        0x009f: 0x04b9, #        CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE
+        0x00a1: 0x040e, #        CYRILLIC CAPITAL LETTER SHORT U (Byelorussian)
+        0x00a2: 0x045e, #        CYRILLIC SMALL LETTER SHORT U (Byelorussian)
+        0x00a3: 0x0408, #        CYRILLIC CAPITAL LETTER JE
+        0x00a4: 0x04e8, #        CYRILLIC CAPITAL LETTER BARRED O
+        0x00a5: 0x0498, #        CYRILLIC CAPITAL LETTER ZE WITH DESCENDER
+        0x00a6: 0x04b0, #        CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE
+        0x00a8: 0x0401, #        CYRILLIC CAPITAL LETTER IO
+        0x00aa: 0x04d8, #        CYRILLIC CAPITAL LETTER SCHWA
+        0x00ad: 0x04ef, #        CYRILLIC SMALL LETTER U WITH MACRON
+        0x00af: 0x049c, #        CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE
+        0x00b1: 0x04b1, #        CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE
+        0x00b2: 0x0406, #        CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00b3: 0x0456, #        CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
+        0x00b4: 0x0499, #        CYRILLIC SMALL LETTER ZE WITH DESCENDER
+        0x00b5: 0x04e9, #        CYRILLIC SMALL LETTER BARRED O
+        0x00b8: 0x0451, #        CYRILLIC SMALL LETTER IO
+        0x00b9: 0x2116, #        NUMERO SIGN
+        0x00ba: 0x04d9, #        CYRILLIC SMALL LETTER SCHWA
+        0x00bc: 0x0458, #        CYRILLIC SMALL LETTER JE
+        0x00bd: 0x04aa, #        CYRILLIC CAPITAL LETTER ES WITH DESCENDER
+        0x00be: 0x04ab, #        CYRILLIC SMALL LETTER ES WITH DESCENDER
+        0x00bf: 0x049d, #        CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE
+        0x00c0: 0x0410, #        CYRILLIC CAPITAL LETTER A
+        0x00c1: 0x0411, #        CYRILLIC CAPITAL LETTER BE
+        0x00c2: 0x0412, #        CYRILLIC CAPITAL LETTER VE
+        0x00c3: 0x0413, #        CYRILLIC CAPITAL LETTER GHE
+        0x00c4: 0x0414, #        CYRILLIC CAPITAL LETTER DE
+        0x00c5: 0x0415, #        CYRILLIC CAPITAL LETTER IE
+        0x00c6: 0x0416, #        CYRILLIC CAPITAL LETTER ZHE
+        0x00c7: 0x0417, #        CYRILLIC CAPITAL LETTER ZE
+        0x00c8: 0x0418, #        CYRILLIC CAPITAL LETTER I
+        0x00c9: 0x0419, #        CYRILLIC CAPITAL LETTER SHORT I
+        0x00ca: 0x041a, #        CYRILLIC CAPITAL LETTER KA
+        0x00cb: 0x041b, #        CYRILLIC CAPITAL LETTER EL
+        0x00cc: 0x041c, #        CYRILLIC CAPITAL LETTER EM
+        0x00cd: 0x041d, #        CYRILLIC CAPITAL LETTER EN
+        0x00ce: 0x041e, #        CYRILLIC CAPITAL LETTER O
+        0x00cf: 0x041f, #        CYRILLIC CAPITAL LETTER PE
+        0x00d0: 0x0420, #        CYRILLIC CAPITAL LETTER ER
+        0x00d1: 0x0421, #        CYRILLIC CAPITAL LETTER ES
+        0x00d2: 0x0422, #        CYRILLIC CAPITAL LETTER TE
+        0x00d3: 0x0423, #        CYRILLIC CAPITAL LETTER U
+        0x00d4: 0x0424, #        CYRILLIC CAPITAL LETTER EF
+        0x00d5: 0x0425, #        CYRILLIC CAPITAL LETTER HA
+        0x00d6: 0x0426, #        CYRILLIC CAPITAL LETTER TSE
+        0x00d7: 0x0427, #        CYRILLIC CAPITAL LETTER CHE
+        0x00d8: 0x0428, #        CYRILLIC CAPITAL LETTER SHA
+        0x00d9: 0x0429, #        CYRILLIC CAPITAL LETTER SHCHA
+        0x00da: 0x042a, #        CYRILLIC CAPITAL LETTER HARD SIGN
+        0x00db: 0x042b, #        CYRILLIC CAPITAL LETTER YERU
+        0x00dc: 0x042c, #        CYRILLIC CAPITAL LETTER SOFT SIGN
+        0x00dd: 0x042d, #        CYRILLIC CAPITAL LETTER E
+        0x00de: 0x042e, #        CYRILLIC CAPITAL LETTER YU
+        0x00df: 0x042f, #        CYRILLIC CAPITAL LETTER YA
+        0x00e0: 0x0430, #        CYRILLIC SMALL LETTER A
+        0x00e1: 0x0431, #        CYRILLIC SMALL LETTER BE
+        0x00e2: 0x0432, #        CYRILLIC SMALL LETTER VE
+        0x00e3: 0x0433, #        CYRILLIC SMALL LETTER GHE
+        0x00e4: 0x0434, #        CYRILLIC SMALL LETTER DE
+        0x00e5: 0x0435, #        CYRILLIC SMALL LETTER IE
+        0x00e6: 0x0436, #        CYRILLIC SMALL LETTER ZHE
+        0x00e7: 0x0437, #        CYRILLIC SMALL LETTER ZE
+        0x00e8: 0x0438, #        CYRILLIC SMALL LETTER I
+        0x00e9: 0x0439, #        CYRILLIC SMALL LETTER SHORT I
+        0x00ea: 0x043a, #        CYRILLIC SMALL LETTER KA
+        0x00eb: 0x043b, #        CYRILLIC SMALL LETTER EL
+        0x00ec: 0x043c, #        CYRILLIC SMALL LETTER EM
+        0x00ed: 0x043d, #        CYRILLIC SMALL LETTER EN
+        0x00ee: 0x043e, #        CYRILLIC SMALL LETTER O
+        0x00ef: 0x043f, #        CYRILLIC SMALL LETTER PE
+        0x00f0: 0x0440, #        CYRILLIC SMALL LETTER ER
+        0x00f1: 0x0441, #        CYRILLIC SMALL LETTER ES
+        0x00f2: 0x0442, #        CYRILLIC SMALL LETTER TE
+        0x00f3: 0x0443, #        CYRILLIC SMALL LETTER U
+        0x00f4: 0x0444, #        CYRILLIC SMALL LETTER EF
+        0x00f5: 0x0445, #        CYRILLIC SMALL LETTER HA
+        0x00f6: 0x0446, #        CYRILLIC SMALL LETTER TSE
+        0x00f7: 0x0447, #        CYRILLIC SMALL LETTER CHE
+        0x00f8: 0x0448, #        CYRILLIC SMALL LETTER SHA
+        0x00f9: 0x0449, #        CYRILLIC SMALL LETTER SHCHA
+        0x00fa: 0x044a, #        CYRILLIC SMALL LETTER HARD SIGN
+        0x00fb: 0x044b, #        CYRILLIC SMALL LETTER YERU
+        0x00fc: 0x044c, #        CYRILLIC SMALL LETTER SOFT SIGN
+        0x00fd: 0x044d, #        CYRILLIC SMALL LETTER E
+        0x00fe: 0x044e, #        CYRILLIC SMALL LETTER YU
+        0x00ff: 0x044f, #        CYRILLIC SMALL LETTER YA
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/punycode.py b/depot_tools/release/win/python_24/Lib/encodings/punycode.py
new file mode 100644
index 0000000..82fd458
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/punycode.py
@@ -0,0 +1,222 @@
+# -*- coding: iso-8859-1 -*-
+""" Codec for the Punicode encoding, as specified in RFC 3492
+
+Written by Martin v. Löwis.
+"""
+
+import codecs
+
+##################### Encoding #####################################
+
+def segregate(str):
+    """3.1 Basic code point segregation"""
+    base = []
+    extended = {}
+    for c in str:
+        if ord(c) < 128:
+            base.append(c)
+        else:
+            extended[c] = 1
+    extended = extended.keys()
+    extended.sort()
+    return "".join(base).encode("ascii"),extended
+
+def selective_len(str, max):
+    """Return the length of str, considering only characters below max."""
+    res = 0
+    for c in str:
+        if ord(c) < max:
+            res += 1
+    return res
+
+def selective_find(str, char, index, pos):
+    """Return a pair (index, pos), indicating the next occurrence of
+    char in str. index is the position of the character considering
+    only ordinals up to and including char, and pos is the position in
+    the full string. index/pos is the starting position in the full
+    string."""
+
+    l = len(str)
+    while 1:
+        pos += 1
+        if pos == l:
+            return (-1, -1)
+        c = str[pos]
+        if c == char:
+            return index+1, pos
+        elif c < char:
+            index += 1
+
+def insertion_unsort(str, extended):
+    """3.2 Insertion unsort coding"""
+    oldchar = 0x80
+    result = []
+    oldindex = -1
+    for c in extended:
+        index = pos = -1
+        char = ord(c)
+        curlen = selective_len(str, char)
+        delta = (curlen+1) * (char - oldchar)
+        while 1:
+            index,pos = selective_find(str,c,index,pos)
+            if index == -1:
+                break
+            delta += index - oldindex
+            result.append(delta-1)
+            oldindex = index
+            delta = 0
+        oldchar = char
+
+    return result
+
+def T(j, bias):
+    # Punycode parameters: tmin = 1, tmax = 26, base = 36
+    res = 36 * (j + 1) - bias
+    if res < 1: return 1
+    if res > 26: return 26
+    return res
+
+digits = "abcdefghijklmnopqrstuvwxyz0123456789"
+def generate_generalized_integer(N, bias):
+    """3.3 Generalized variable-length integers"""
+    result = []
+    j = 0
+    while 1:
+        t = T(j, bias)
+        if N < t:
+            result.append(digits[N])
+            return result
+        result.append(digits[t + ((N - t) % (36 - t))])
+        N = (N - t) // (36 - t)
+        j += 1
+
+def adapt(delta, first, numchars):
+    if first:
+        delta //= 700
+    else:
+        delta //= 2
+    delta += delta // numchars
+    # ((base - tmin) * tmax) // 2 == 455
+    divisions = 0
+    while delta > 455:
+        delta = delta // 35 # base - tmin
+        divisions += 36
+    bias = divisions + (36 * delta // (delta + 38))
+    return bias
+
+
+def generate_integers(baselen, deltas):
+    """3.4 Bias adaptation"""
+    # Punycode parameters: initial bias = 72, damp = 700, skew = 38
+    result = []
+    bias = 72
+    for points, delta in enumerate(deltas):
+        s = generate_generalized_integer(delta, bias)
+        result.extend(s)
+        bias = adapt(delta, points==0, baselen+points+1)
+    return "".join(result)
+
+def punycode_encode(text):
+    base, extended = segregate(text)
+    base = base.encode("ascii")
+    deltas = insertion_unsort(text, extended)
+    extended = generate_integers(len(base), deltas)
+    if base:
+        return base + "-" + extended
+    return extended
+
+##################### Decoding #####################################
+
+def decode_generalized_number(extended, extpos, bias, errors):
+    """3.3 Generalized variable-length integers"""
+    result = 0
+    w = 1
+    j = 0
+    while 1:
+        try:
+            char = ord(extended[extpos])
+        except IndexError:
+            if errors == "strict":
+                raise UnicodeError, "incomplete punicode string"
+            return extpos + 1, None
+        extpos += 1
+        if 0x41 <= char <= 0x5A: # A-Z
+            digit = char - 0x41
+        elif 0x30 <= char <= 0x39:
+            digit = char - 22 # 0x30-26
+        elif errors == "strict":
+            raise UnicodeError("Invalid extended code point '%s'"
+                               % extended[extpos])
+        else:
+            return extpos, None
+        t = T(j, bias)
+        result += digit * w
+        if digit < t:
+            return extpos, result
+        w = w * (36 - t)
+        j += 1
+
+
+def insertion_sort(base, extended, errors):
+    """3.2 Insertion unsort coding"""
+    char = 0x80
+    pos = -1
+    bias = 72
+    extpos = 0
+    while extpos < len(extended):
+        newpos, delta = decode_generalized_number(extended, extpos,
+                                                  bias, errors)
+        if delta is None:
+            # There was an error in decoding. We can't continue because
+            # synchronization is lost.
+            return base
+        pos += delta+1
+        char += pos // (len(base) + 1)
+        if char > 0x10FFFF:
+            if errors == "strict":
+                raise UnicodeError, ("Invalid character U+%x" % char)
+            char = ord('?')
+        pos = pos % (len(base) + 1)
+        base = base[:pos] + unichr(char) + base[pos:]
+        bias = adapt(delta, (extpos == 0), len(base))
+        extpos = newpos
+    return base
+
+def punycode_decode(text, errors):
+    pos = text.rfind("-")
+    if pos == -1:
+        base = ""
+        extended = text
+    else:
+        base = text[:pos]
+        extended = text[pos+1:]
+    base = unicode(base, "ascii", errors)
+    extended = extended.upper()
+    return insertion_sort(base, extended, errors)
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+    def encode(self,input,errors='strict'):
+
+        res = punycode_encode(input)
+        return res, len(input)
+
+    def decode(self,input,errors='strict'):
+
+        if errors not in ('strict', 'replace', 'ignore'):
+            raise UnicodeError, "Unsupported error handling "+errors
+        res = punycode_decode(input, errors)
+        return res, len(input)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/quopri_codec.py b/depot_tools/release/win/python_24/Lib/encodings/quopri_codec.py
new file mode 100644
index 0000000..d98b5ed0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/quopri_codec.py
@@ -0,0 +1,58 @@
+"""Codec for quoted-printable encoding.
+
+Like base64 and rot13, this returns Python strings, not Unicode.
+"""
+
+import codecs, quopri
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+def quopri_encode(input, errors='strict'):
+    """Encode the input, returning a tuple (output object, length consumed).
+
+    errors defines the error handling to apply. It defaults to
+    'strict' handling which is the only currently supported
+    error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    f = StringIO(input)
+    g = StringIO()
+    quopri.encode(f, g, 1)
+    output = g.getvalue()
+    return (output, len(input))
+
+def quopri_decode(input, errors='strict'):
+    """Decode the input, returning a tuple (output object, length consumed).
+
+    errors defines the error handling to apply. It defaults to
+    'strict' handling which is the only currently supported
+    error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    f = StringIO(input)
+    g = StringIO()
+    quopri.decode(f, g)
+    output = g.getvalue()
+    return (output, len(input))
+
+class Codec(codecs.Codec):
+
+    def encode(self, input,errors='strict'):
+        return quopri_encode(input,errors)
+    def decode(self, input,errors='strict'):
+        return quopri_decode(input,errors)
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+# encodings module API
+
+def getregentry():
+    return (quopri_encode, quopri_decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/raw_unicode_escape.py b/depot_tools/release/win/python_24/Lib/encodings/raw_unicode_escape.py
new file mode 100644
index 0000000..a2f3fffa
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/raw_unicode_escape.py
@@ -0,0 +1,30 @@
+""" Python 'raw-unicode-escape' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    # Note: Binding these as C functions will result in the class not
+    # converting them to methods. This is intended.
+    encode = codecs.raw_unicode_escape_encode
+    decode = codecs.raw_unicode_escape_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/rot_13.py b/depot_tools/release/win/python_24/Lib/encodings/rot_13.py
new file mode 100644
index 0000000..c628181
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/rot_13.py
@@ -0,0 +1,105 @@
+#!/usr/bin/env python
+""" Python Character Mapping Codec for ROT13.
+
+    See http://ucsub.colorado.edu/~kominek/rot13/ for details.
+
+    Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+"""#"
+
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = codecs.make_identity_dict(range(256))
+decoding_map.update({
+   0x0041: 0x004e,
+   0x0042: 0x004f,
+   0x0043: 0x0050,
+   0x0044: 0x0051,
+   0x0045: 0x0052,
+   0x0046: 0x0053,
+   0x0047: 0x0054,
+   0x0048: 0x0055,
+   0x0049: 0x0056,
+   0x004a: 0x0057,
+   0x004b: 0x0058,
+   0x004c: 0x0059,
+   0x004d: 0x005a,
+   0x004e: 0x0041,
+   0x004f: 0x0042,
+   0x0050: 0x0043,
+   0x0051: 0x0044,
+   0x0052: 0x0045,
+   0x0053: 0x0046,
+   0x0054: 0x0047,
+   0x0055: 0x0048,
+   0x0056: 0x0049,
+   0x0057: 0x004a,
+   0x0058: 0x004b,
+   0x0059: 0x004c,
+   0x005a: 0x004d,
+   0x0061: 0x006e,
+   0x0062: 0x006f,
+   0x0063: 0x0070,
+   0x0064: 0x0071,
+   0x0065: 0x0072,
+   0x0066: 0x0073,
+   0x0067: 0x0074,
+   0x0068: 0x0075,
+   0x0069: 0x0076,
+   0x006a: 0x0077,
+   0x006b: 0x0078,
+   0x006c: 0x0079,
+   0x006d: 0x007a,
+   0x006e: 0x0061,
+   0x006f: 0x0062,
+   0x0070: 0x0063,
+   0x0071: 0x0064,
+   0x0072: 0x0065,
+   0x0073: 0x0066,
+   0x0074: 0x0067,
+   0x0075: 0x0068,
+   0x0076: 0x0069,
+   0x0077: 0x006a,
+   0x0078: 0x006b,
+   0x0079: 0x006c,
+   0x007a: 0x006d,
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
+
+### Filter API
+
+def rot13(infile, outfile):
+    outfile.write(infile.read().encode('rot-13'))
+
+if __name__ == '__main__':
+    import sys
+    rot13(sys.stdin, sys.stdout)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/shift_jis.py b/depot_tools/release/win/python_24/Lib/encodings/shift_jis.py
new file mode 100644
index 0000000..ec5e517
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/shift_jis.py
@@ -0,0 +1,34 @@
+#
+# shift_jis.py: Python Unicode Codec for SHIFT_JIS
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: shift_jis.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_jp, codecs
+
+codec = _codecs_jp.getcodec('shift_jis')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/shift_jis_2004.py b/depot_tools/release/win/python_24/Lib/encodings/shift_jis_2004.py
new file mode 100644
index 0000000..446cd7cce
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/shift_jis_2004.py
@@ -0,0 +1,34 @@
+#
+# shift_jis_2004.py: Python Unicode Codec for SHIFT_JIS_2004
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: shift_jis_2004.py,v 1.1 2004/07/07 16:18:25 perky Exp $
+#
+
+import _codecs_jp, codecs
+
+codec = _codecs_jp.getcodec('shift_jis_2004')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/shift_jisx0213.py b/depot_tools/release/win/python_24/Lib/encodings/shift_jisx0213.py
new file mode 100644
index 0000000..495468b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/shift_jisx0213.py
@@ -0,0 +1,34 @@
+#
+# shift_jisx0213.py: Python Unicode Codec for SHIFT_JISX0213
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+# $CJKCodecs: shift_jisx0213.py,v 1.8 2004/06/28 18:16:03 perky Exp $
+#
+
+import _codecs_jp, codecs
+
+codec = _codecs_jp.getcodec('shift_jisx0213')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class StreamReader(Codec, codecs.StreamReader):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamReader.__init__(self, stream, errors)
+        __codec = codec.StreamReader(stream, errors)
+        self.read = __codec.read
+        self.readline = __codec.readline
+        self.readlines = __codec.readlines
+        self.reset = __codec.reset
+
+class StreamWriter(Codec, codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        codecs.StreamWriter.__init__(self, stream, errors)
+        __codec = codec.StreamWriter(stream, errors)
+        self.write = __codec.write
+        self.writelines = __codec.writelines
+        self.reset = __codec.reset
+
+def getregentry():
+    return (codec.encode, codec.decode, StreamReader, StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/string_escape.py b/depot_tools/release/win/python_24/Lib/encodings/string_escape.py
new file mode 100644
index 0000000..c02bfee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/string_escape.py
@@ -0,0 +1,23 @@
+# -*- coding: iso-8859-1 -*-
+""" Python 'escape' Codec
+
+
+Written by Martin v. Löwis (martin@v.loewis.de).
+
+"""
+import codecs
+
+class Codec(codecs.Codec):
+
+    encode = codecs.escape_encode
+    decode = codecs.escape_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/tis_620.py b/depot_tools/release/win/python_24/Lib/encodings/tis_620.py
new file mode 100644
index 0000000..6e1229e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/tis_620.py
@@ -0,0 +1,46 @@
+""" Python Character Mapping Codec for TIS-620.
+
+    According to
+    ftp://ftp.unicode.org/Public/MAPPINGS/ISO8859/8859-11.TXT the
+    TIS-620 is the identical to ISO_8859-11 with the 0xA0 (no-break
+    space) mapping removed.
+
+"""#"
+
+import codecs
+from encodings.iso8859_11 import decoding_map
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+
+        return codecs.charmap_encode(input,errors,encoding_map)
+
+    def decode(self,input,errors='strict'):
+
+        return codecs.charmap_decode(input,errors,decoding_map)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
+
+### Decoding Map
+
+decoding_map = decoding_map.copy()
+decoding_map.update({
+        0x00a0: None,
+})
+
+### Encoding Map
+
+encoding_map = codecs.make_encoding_map(decoding_map)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/undefined.py b/depot_tools/release/win/python_24/Lib/encodings/undefined.py
new file mode 100644
index 0000000..d2277ac
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/undefined.py
@@ -0,0 +1,34 @@
+""" Python 'undefined' Codec
+
+    This codec will always raise a ValueError exception when being
+    used. It is intended for use by the site.py file to switch off
+    automatic string to Unicode coercion.
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+        raise UnicodeError, "undefined encoding"
+
+    def decode(self,input,errors='strict'):
+        raise UnicodeError, "undefined encoding"
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/unicode_escape.py b/depot_tools/release/win/python_24/Lib/encodings/unicode_escape.py
new file mode 100644
index 0000000..8fb6293
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/unicode_escape.py
@@ -0,0 +1,30 @@
+""" Python 'unicode-escape' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    # Note: Binding these as C functions will result in the class not
+    # converting them to methods. This is intended.
+    encode = codecs.unicode_escape_encode
+    decode = codecs.unicode_escape_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/unicode_internal.py b/depot_tools/release/win/python_24/Lib/encodings/unicode_internal.py
new file mode 100644
index 0000000..3bd2fa09
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/unicode_internal.py
@@ -0,0 +1,30 @@
+""" Python 'unicode-internal' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    # Note: Binding these as C functions will result in the class not
+    # converting them to methods. This is intended.
+    encode = codecs.unicode_internal_encode
+    decode = codecs.unicode_internal_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/utf_16.py b/depot_tools/release/win/python_24/Lib/encodings/utf_16.py
new file mode 100644
index 0000000..95abb05
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/utf_16.py
@@ -0,0 +1,56 @@
+""" Python 'utf-16' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs, sys
+
+### Codec APIs
+
+encode = codecs.utf_16_encode
+
+def decode(input, errors='strict'):
+    return codecs.utf_16_decode(input, errors, True)
+
+class StreamWriter(codecs.StreamWriter):
+    def __init__(self, stream, errors='strict'):
+        self.bom_written = False
+        codecs.StreamWriter.__init__(self, stream, errors)
+
+    def encode(self, input, errors='strict'):
+        self.bom_written = True
+        result = codecs.utf_16_encode(input, errors)
+        if sys.byteorder == 'little':
+            self.encode = codecs.utf_16_le_encode
+        else:
+            self.encode = codecs.utf_16_be_encode
+        return result
+
+class StreamReader(codecs.StreamReader):
+
+    def reset(self):
+        codecs.StreamReader.reset(self)
+        try:
+            del self.decode
+        except AttributeError:
+            pass
+
+    def decode(self, input, errors='strict'):
+        (object, consumed, byteorder) = \
+            codecs.utf_16_ex_decode(input, errors, 0, False)
+        if byteorder == -1:
+            self.decode = codecs.utf_16_le_decode
+        elif byteorder == 1:
+            self.decode = codecs.utf_16_be_decode
+        elif consumed>=2:
+            raise UnicodeError,"UTF-16 stream does not start with BOM"
+        return (object, consumed)
+
+### encodings module API
+
+def getregentry():
+
+    return (encode,decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/utf_16_be.py b/depot_tools/release/win/python_24/Lib/encodings/utf_16_be.py
new file mode 100644
index 0000000..9a51f8c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/utf_16_be.py
@@ -0,0 +1,28 @@
+""" Python 'utf-16-be' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+encode = codecs.utf_16_be_encode
+
+def decode(input, errors='strict'):
+    return codecs.utf_16_be_decode(input, errors, True)
+
+class StreamWriter(codecs.StreamWriter):
+    encode = codecs.utf_16_be_encode
+
+class StreamReader(codecs.StreamReader):
+    decode = codecs.utf_16_be_decode
+
+### encodings module API
+
+def getregentry():
+
+    return (encode,decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/utf_16_le.py b/depot_tools/release/win/python_24/Lib/encodings/utf_16_le.py
new file mode 100644
index 0000000..95ca8304
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/utf_16_le.py
@@ -0,0 +1,29 @@
+""" Python 'utf-16-le' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+encode = codecs.utf_16_le_encode
+
+def decode(input, errors='strict'):
+    return codecs.utf_16_le_decode(input, errors, True)
+
+class StreamWriter(codecs.StreamWriter):
+    encode = codecs.utf_16_le_encode
+
+class StreamReader(codecs.StreamReader):
+    decode = codecs.utf_16_le_decode
+
+
+### encodings module API
+
+def getregentry():
+
+    return (encode,decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/utf_7.py b/depot_tools/release/win/python_24/Lib/encodings/utf_7.py
new file mode 100644
index 0000000..ee78d09e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/utf_7.py
@@ -0,0 +1,26 @@
+""" Python 'utf-7' Codec
+
+Written by Brian Quinlan (brian@sweetapp.com).
+"""
+import codecs
+
+### Codec APIs
+
+class Codec(codecs.Codec):
+
+    # Note: Binding these as C functions will result in the class not
+    # converting them to methods. This is intended.
+    encode = codecs.utf_7_encode
+    decode = codecs.utf_7_decode
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (Codec.encode,Codec.decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/utf_8.py b/depot_tools/release/win/python_24/Lib/encodings/utf_8.py
new file mode 100644
index 0000000..9cb0b4b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/utf_8.py
@@ -0,0 +1,28 @@
+""" Python 'utf-8' Codec
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
+
+"""
+import codecs
+
+### Codec APIs
+
+encode = codecs.utf_8_encode
+
+def decode(input, errors='strict'):
+    return codecs.utf_8_decode(input, errors, True)
+
+class StreamWriter(codecs.StreamWriter):
+    encode = codecs.utf_8_encode
+
+class StreamReader(codecs.StreamReader):
+    decode = codecs.utf_8_decode
+
+### encodings module API
+
+def getregentry():
+
+    return (encode,decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/uu_codec.py b/depot_tools/release/win/python_24/Lib/encodings/uu_codec.py
new file mode 100644
index 0000000..a70ff9e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/uu_codec.py
@@ -0,0 +1,112 @@
+""" Python 'uu_codec' Codec - UU content transfer encoding
+
+    Unlike most of the other codecs which target Unicode, this codec
+    will return Python string objects for both encode and decode.
+
+    Written by Marc-Andre Lemburg (mal@lemburg.com). Some details were
+    adapted from uu.py which was written by Lance Ellinghouse and
+    modified by Jack Jansen and Fredrik Lundh.
+
+"""
+import codecs, binascii
+
+### Codec APIs
+
+def uu_encode(input,errors='strict',filename='<data>',mode=0666):
+
+    """ Encodes the object input and returns a tuple (output
+        object, length consumed).
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    from cStringIO import StringIO
+    from binascii import b2a_uu
+    infile = StringIO(input)
+    outfile = StringIO()
+    read = infile.read
+    write = outfile.write
+
+    # Encode
+    write('begin %o %s\n' % (mode & 0777, filename))
+    chunk = read(45)
+    while chunk:
+        write(b2a_uu(chunk))
+        chunk = read(45)
+    write(' \nend\n')
+
+    return (outfile.getvalue(), len(input))
+
+def uu_decode(input,errors='strict'):
+
+    """ Decodes the object input and returns a tuple (output
+        object, length consumed).
+
+        input must be an object which provides the bf_getreadbuf
+        buffer slot. Python strings, buffer objects and memory
+        mapped files are examples of objects providing this slot.
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+        Note: filename and file mode information in the input data is
+        ignored.
+
+    """
+    assert errors == 'strict'
+    from cStringIO import StringIO
+    from binascii import a2b_uu
+    infile = StringIO(input)
+    outfile = StringIO()
+    readline = infile.readline
+    write = outfile.write
+
+    # Find start of encoded data
+    while 1:
+        s = readline()
+        if not s:
+            raise ValueError, 'Missing "begin" line in input data'
+        if s[:5] == 'begin':
+            break
+
+    # Decode
+    while 1:
+        s = readline()
+        if not s or \
+           s == 'end\n':
+            break
+        try:
+            data = a2b_uu(s)
+        except binascii.Error, v:
+            # Workaround for broken uuencoders by /Fredrik Lundh
+            nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3
+            data = a2b_uu(s[:nbytes])
+            #sys.stderr.write("Warning: %s\n" % str(v))
+        write(data)
+    if not s:
+        raise ValueError, 'Truncated input data'
+
+    return (outfile.getvalue(), len(input))
+
+class Codec(codecs.Codec):
+
+    def encode(self,input,errors='strict'):
+        return uu_encode(input,errors)
+    def decode(self,input,errors='strict'):
+        return uu_decode(input,errors)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (uu_encode,uu_decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/encodings/zlib_codec.py b/depot_tools/release/win/python_24/Lib/encodings/zlib_codec.py
new file mode 100644
index 0000000..9b6e4d1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/encodings/zlib_codec.py
@@ -0,0 +1,63 @@
+""" Python 'zlib_codec' Codec - zlib compression encoding
+
+    Unlike most of the other codecs which target Unicode, this codec
+    will return Python string objects for both encode and decode.
+
+    Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+"""
+import codecs
+import zlib # this codec needs the optional zlib module !
+
+### Codec APIs
+
+def zlib_encode(input,errors='strict'):
+
+    """ Encodes the object input and returns a tuple (output
+        object, length consumed).
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    output = zlib.compress(input)
+    return (output, len(input))
+
+def zlib_decode(input,errors='strict'):
+
+    """ Decodes the object input and returns a tuple (output
+        object, length consumed).
+
+        input must be an object which provides the bf_getreadbuf
+        buffer slot. Python strings, buffer objects and memory
+        mapped files are examples of objects providing this slot.
+
+        errors defines the error handling to apply. It defaults to
+        'strict' handling which is the only currently supported
+        error handling for this codec.
+
+    """
+    assert errors == 'strict'
+    output = zlib.decompress(input)
+    return (output, len(input))
+
+class Codec(codecs.Codec):
+
+    def encode(self, input, errors='strict'):
+        return zlib_encode(input, errors)
+    def decode(self, input, errors='strict'):
+        return zlib_decode(input, errors)
+
+class StreamWriter(Codec,codecs.StreamWriter):
+    pass
+
+class StreamReader(Codec,codecs.StreamReader):
+    pass
+
+### encodings module API
+
+def getregentry():
+
+    return (zlib_encode,zlib_decode,StreamReader,StreamWriter)
diff --git a/depot_tools/release/win/python_24/Lib/filecmp.py b/depot_tools/release/win/python_24/Lib/filecmp.py
new file mode 100644
index 0000000..089c6674
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/filecmp.py
@@ -0,0 +1,306 @@
+"""Utilities for comparing files and directories.
+
+Classes:
+    dircmp
+
+Functions:
+    cmp(f1, f2, shallow=1) -> int
+    cmpfiles(a, b, common) -> ([], [], [])
+
+"""
+
+import os
+import stat
+import warnings
+from itertools import ifilter, ifilterfalse, imap, izip
+
+__all__ = ["cmp","dircmp","cmpfiles"]
+
+_cache = {}
+BUFSIZE=8*1024
+
+def cmp(f1, f2, shallow=1, use_statcache=None):
+    """Compare two files.
+
+    Arguments:
+
+    f1 -- First file name
+
+    f2 -- Second file name
+
+    shallow -- Just check stat signature (do not read the files).
+               defaults to 1.
+
+    use_statcache -- obsolete argument.
+
+    Return value:
+
+    True if the files are the same, False otherwise.
+
+    This function uses a cache for past comparisons and the results,
+    with a cache invalidation mechanism relying on stale signatures.
+
+    """
+    if use_statcache is not None:
+        warnings.warn("use_statcache argument is deprecated",
+                      DeprecationWarning)
+
+    s1 = _sig(os.stat(f1))
+    s2 = _sig(os.stat(f2))
+    if s1[0] != stat.S_IFREG or s2[0] != stat.S_IFREG:
+        return False
+    if shallow and s1 == s2:
+        return True
+    if s1[1] != s2[1]:
+        return False
+
+    result = _cache.get((f1, f2))
+    if result and (s1, s2) == result[:2]:
+        return result[2]
+    outcome = _do_cmp(f1, f2)
+    _cache[f1, f2] = s1, s2, outcome
+    return outcome
+
+def _sig(st):
+    return (stat.S_IFMT(st.st_mode),
+            st.st_size,
+            st.st_mtime)
+
+def _do_cmp(f1, f2):
+    bufsize = BUFSIZE
+    fp1 = open(f1, 'rb')
+    fp2 = open(f2, 'rb')
+    while True:
+        b1 = fp1.read(bufsize)
+        b2 = fp2.read(bufsize)
+        if b1 != b2:
+            return False
+        if not b1:
+            return True
+
+# Directory comparison class.
+#
+class dircmp:
+    """A class that manages the comparison of 2 directories.
+
+    dircmp(a,b,ignore=None,hide=None)
+      A and B are directories.
+      IGNORE is a list of names to ignore,
+        defaults to ['RCS', 'CVS', 'tags'].
+      HIDE is a list of names to hide,
+        defaults to [os.curdir, os.pardir].
+
+    High level usage:
+      x = dircmp(dir1, dir2)
+      x.report() -> prints a report on the differences between dir1 and dir2
+       or
+      x.report_partial_closure() -> prints report on differences between dir1
+            and dir2, and reports on common immediate subdirectories.
+      x.report_full_closure() -> like report_partial_closure,
+            but fully recursive.
+
+    Attributes:
+     left_list, right_list: The files in dir1 and dir2,
+        filtered by hide and ignore.
+     common: a list of names in both dir1 and dir2.
+     left_only, right_only: names only in dir1, dir2.
+     common_dirs: subdirectories in both dir1 and dir2.
+     common_files: files in both dir1 and dir2.
+     common_funny: names in both dir1 and dir2 where the type differs between
+        dir1 and dir2, or the name is not stat-able.
+     same_files: list of identical files.
+     diff_files: list of filenames which differ.
+     funny_files: list of files which could not be compared.
+     subdirs: a dictionary of dircmp objects, keyed by names in common_dirs.
+     """
+
+    def __init__(self, a, b, ignore=None, hide=None): # Initialize
+        self.left = a
+        self.right = b
+        if hide is None:
+            self.hide = [os.curdir, os.pardir] # Names never to be shown
+        else:
+            self.hide = hide
+        if ignore is None:
+            self.ignore = ['RCS', 'CVS', 'tags'] # Names ignored in comparison
+        else:
+            self.ignore = ignore
+
+    def phase0(self): # Compare everything except common subdirectories
+        self.left_list = _filter(os.listdir(self.left),
+                                 self.hide+self.ignore)
+        self.right_list = _filter(os.listdir(self.right),
+                                  self.hide+self.ignore)
+        self.left_list.sort()
+        self.right_list.sort()
+
+    def phase1(self): # Compute common names
+        a = dict(izip(imap(os.path.normcase, self.left_list), self.left_list))
+        b = dict(izip(imap(os.path.normcase, self.right_list), self.right_list))
+        self.common = map(a.__getitem__, ifilter(b.has_key, a))
+        self.left_only = map(a.__getitem__, ifilterfalse(b.has_key, a))
+        self.right_only = map(b.__getitem__, ifilterfalse(a.has_key, b))
+
+    def phase2(self): # Distinguish files, directories, funnies
+        self.common_dirs = []
+        self.common_files = []
+        self.common_funny = []
+
+        for x in self.common:
+            a_path = os.path.join(self.left, x)
+            b_path = os.path.join(self.right, x)
+
+            ok = 1
+            try:
+                a_stat = os.stat(a_path)
+            except os.error, why:
+                # print 'Can\'t stat', a_path, ':', why[1]
+                ok = 0
+            try:
+                b_stat = os.stat(b_path)
+            except os.error, why:
+                # print 'Can\'t stat', b_path, ':', why[1]
+                ok = 0
+
+            if ok:
+                a_type = stat.S_IFMT(a_stat.st_mode)
+                b_type = stat.S_IFMT(b_stat.st_mode)
+                if a_type != b_type:
+                    self.common_funny.append(x)
+                elif stat.S_ISDIR(a_type):
+                    self.common_dirs.append(x)
+                elif stat.S_ISREG(a_type):
+                    self.common_files.append(x)
+                else:
+                    self.common_funny.append(x)
+            else:
+                self.common_funny.append(x)
+
+    def phase3(self): # Find out differences between common files
+        xx = cmpfiles(self.left, self.right, self.common_files)
+        self.same_files, self.diff_files, self.funny_files = xx
+
+    def phase4(self): # Find out differences between common subdirectories
+        # A new dircmp object is created for each common subdirectory,
+        # these are stored in a dictionary indexed by filename.
+        # The hide and ignore properties are inherited from the parent
+        self.subdirs = {}
+        for x in self.common_dirs:
+            a_x = os.path.join(self.left, x)
+            b_x = os.path.join(self.right, x)
+            self.subdirs[x]  = dircmp(a_x, b_x, self.ignore, self.hide)
+
+    def phase4_closure(self): # Recursively call phase4() on subdirectories
+        self.phase4()
+        for sd in self.subdirs.itervalues():
+            sd.phase4_closure()
+
+    def report(self): # Print a report on the differences between a and b
+        # Output format is purposely lousy
+        print 'diff', self.left, self.right
+        if self.left_only:
+            self.left_only.sort()
+            print 'Only in', self.left, ':', self.left_only
+        if self.right_only:
+            self.right_only.sort()
+            print 'Only in', self.right, ':', self.right_only
+        if self.same_files:
+            self.same_files.sort()
+            print 'Identical files :', self.same_files
+        if self.diff_files:
+            self.diff_files.sort()
+            print 'Differing files :', self.diff_files
+        if self.funny_files:
+            self.funny_files.sort()
+            print 'Trouble with common files :', self.funny_files
+        if self.common_dirs:
+            self.common_dirs.sort()
+            print 'Common subdirectories :', self.common_dirs
+        if self.common_funny:
+            self.common_funny.sort()
+            print 'Common funny cases :', self.common_funny
+
+    def report_partial_closure(self): # Print reports on self and on subdirs
+        self.report()
+        for sd in self.subdirs.itervalues():
+            print
+            sd.report()
+
+    def report_full_closure(self): # Report on self and subdirs recursively
+        self.report()
+        for sd in self.subdirs.itervalues():
+            print
+            sd.report_full_closure()
+
+    methodmap = dict(subdirs=phase4,
+                     same_files=phase3, diff_files=phase3, funny_files=phase3,
+                     common_dirs = phase2, common_files=phase2, common_funny=phase2,
+                     common=phase1, left_only=phase1, right_only=phase1,
+                     left_list=phase0, right_list=phase0)
+
+    def __getattr__(self, attr):
+        if attr not in self.methodmap:
+            raise AttributeError, attr
+        self.methodmap[attr](self)
+        return getattr(self, attr)
+
+def cmpfiles(a, b, common, shallow=1, use_statcache=None):
+    """Compare common files in two directories.
+
+    a, b -- directory names
+    common -- list of file names found in both directories
+    shallow -- if true, do comparison based solely on stat() information
+    use_statcache -- obsolete argument
+
+    Returns a tuple of three lists:
+      files that compare equal
+      files that are different
+      filenames that aren't regular files.
+
+    """
+    if use_statcache is not None:
+        warnings.warn("use_statcache argument is deprecated",
+                      DeprecationWarning)
+    res = ([], [], [])
+    for x in common:
+        ax = os.path.join(a, x)
+        bx = os.path.join(b, x)
+        res[_cmp(ax, bx, shallow)].append(x)
+    return res
+
+
+# Compare two files.
+# Return:
+#       0 for equal
+#       1 for different
+#       2 for funny cases (can't stat, etc.)
+#
+def _cmp(a, b, sh, abs=abs, cmp=cmp):
+    try:
+        return not abs(cmp(a, b, sh))
+    except os.error:
+        return 2
+
+
+# Return a copy with items that occur in skip removed.
+#
+def _filter(flist, skip):
+    return list(ifilterfalse(skip.__contains__, flist))
+
+
+# Demonstration and testing.
+#
+def demo():
+    import sys
+    import getopt
+    options, args = getopt.getopt(sys.argv[1:], 'r')
+    if len(args) != 2:
+        raise getopt.GetoptError('need exactly two args', None)
+    dd = dircmp(args[0], args[1])
+    if ('-r', '') in options:
+        dd.report_full_closure()
+    else:
+        dd.report()
+
+if __name__ == '__main__':
+    demo()
diff --git a/depot_tools/release/win/python_24/Lib/fileinput.py b/depot_tools/release/win/python_24/Lib/fileinput.py
new file mode 100644
index 0000000..27ccc3b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/fileinput.py
@@ -0,0 +1,359 @@
+"""Helper class to quickly write a loop over all standard input files.
+
+Typical use is:
+
+    import fileinput
+    for line in fileinput.input():
+        process(line)
+
+This iterates over the lines of all files listed in sys.argv[1:],
+defaulting to sys.stdin if the list is empty.  If a filename is '-' it
+is also replaced by sys.stdin.  To specify an alternative list of
+filenames, pass it as the argument to input().  A single file name is
+also allowed.
+
+Functions filename(), lineno() return the filename and cumulative line
+number of the line that has just been read; filelineno() returns its
+line number in the current file; isfirstline() returns true iff the
+line just read is the first line of its file; isstdin() returns true
+iff the line was read from sys.stdin.  Function nextfile() closes the
+current file so that the next iteration will read the first line from
+the next file (if any); lines not read from the file will not count
+towards the cumulative line count; the filename is not changed until
+after the first line of the next file has been read.  Function close()
+closes the sequence.
+
+Before any lines have been read, filename() returns None and both line
+numbers are zero; nextfile() has no effect.  After all lines have been
+read, filename() and the line number functions return the values
+pertaining to the last line read; nextfile() has no effect.
+
+All files are opened in text mode.  If an I/O error occurs during
+opening or reading a file, the IOError exception is raised.
+
+If sys.stdin is used more than once, the second and further use will
+return no lines, except perhaps for interactive use, or if it has been
+explicitly reset (e.g. using sys.stdin.seek(0)).
+
+Empty files are opened and immediately closed; the only time their
+presence in the list of filenames is noticeable at all is when the
+last file opened is empty.
+
+It is possible that the last line of a file doesn't end in a newline
+character; otherwise lines are returned including the trailing
+newline.
+
+Class FileInput is the implementation; its methods filename(),
+lineno(), fileline(), isfirstline(), isstdin(), nextfile() and close()
+correspond to the functions in the module.  In addition it has a
+readline() method which returns the next input line, and a
+__getitem__() method which implements the sequence behavior.  The
+sequence must be accessed in strictly sequential order; sequence
+access and readline() cannot be mixed.
+
+Optional in-place filtering: if the keyword argument inplace=1 is
+passed to input() or to the FileInput constructor, the file is moved
+to a backup file and standard output is directed to the input file.
+This makes it possible to write a filter that rewrites its input file
+in place.  If the keyword argument backup=".<some extension>" is also
+given, it specifies the extension for the backup file, and the backup
+file remains around; by default, the extension is ".bak" and it is
+deleted when the output file is closed.  In-place filtering is
+disabled when standard input is read.  XXX The current implementation
+does not work for MS-DOS 8+3 filesystems.
+
+Performance: this module is unfortunately one of the slower ways of
+processing large numbers of input lines.  Nevertheless, a significant
+speed-up has been obtained by using readlines(bufsize) instead of
+readline().  A new keyword argument, bufsize=N, is present on the
+input() function and the FileInput() class to override the default
+buffer size.
+
+XXX Possible additions:
+
+- optional getopt argument processing
+- specify open mode ('r' or 'rb')
+- fileno()
+- isatty()
+- read(), read(size), even readlines()
+
+"""
+
+import sys, os
+
+__all__ = ["input","close","nextfile","filename","lineno","filelineno",
+           "isfirstline","isstdin","FileInput"]
+
+_state = None
+
+DEFAULT_BUFSIZE = 8*1024
+
+def input(files=None, inplace=0, backup="", bufsize=0):
+    """input([files[, inplace[, backup]]])
+
+    Create an instance of the FileInput class. The instance will be used
+    as global state for the functions of this module, and is also returned
+    to use during iteration. The parameters to this function will be passed
+    along to the constructor of the FileInput class.
+    """
+    global _state
+    if _state and _state._file:
+        raise RuntimeError, "input() already active"
+    _state = FileInput(files, inplace, backup, bufsize)
+    return _state
+
+def close():
+    """Close the sequence."""
+    global _state
+    state = _state
+    _state = None
+    if state:
+        state.close()
+
+def nextfile():
+    """
+    Close the current file so that the next iteration will read the first
+    line from the next file (if any); lines not read from the file will
+    not count towards the cumulative line count. The filename is not
+    changed until after the first line of the next file has been read.
+    Before the first line has been read, this function has no effect;
+    it cannot be used to skip the first file. After the last line of the
+    last file has been read, this function has no effect.
+    """
+    if not _state:
+        raise RuntimeError, "no active input()"
+    return _state.nextfile()
+
+def filename():
+    """
+    Return the name of the file currently being read.
+    Before the first line has been read, returns None.
+    """
+    if not _state:
+        raise RuntimeError, "no active input()"
+    return _state.filename()
+
+def lineno():
+    """
+    Return the cumulative line number of the line that has just been read.
+    Before the first line has been read, returns 0. After the last line
+    of the last file has been read, returns the line number of that line.
+    """
+    if not _state:
+        raise RuntimeError, "no active input()"
+    return _state.lineno()
+
+def filelineno():
+    """
+    Return the line number in the current file. Before the first line
+    has been read, returns 0. After the last line of the last file has
+    been read, returns the line number of that line within the file.
+    """
+    if not _state:
+        raise RuntimeError, "no active input()"
+    return _state.filelineno()
+
+def isfirstline():
+    """
+    Returns true the line just read is the first line of its file,
+    otherwise returns false.
+    """
+    if not _state:
+        raise RuntimeError, "no active input()"
+    return _state.isfirstline()
+
+def isstdin():
+    """
+    Returns true if the last line was read from sys.stdin,
+    otherwise returns false.
+    """
+    if not _state:
+        raise RuntimeError, "no active input()"
+    return _state.isstdin()
+
+class FileInput:
+    """class FileInput([files[, inplace[, backup]]])
+
+    Class FileInput is the implementation of the module; its methods
+    filename(), lineno(), fileline(), isfirstline(), isstdin(), nextfile()
+    and close() correspond to the functions of the same name in the module.
+    In addition it has a readline() method which returns the next
+    input line, and a __getitem__() method which implements the
+    sequence behavior. The sequence must be accessed in strictly
+    sequential order; random access and readline() cannot be mixed.
+    """
+
+    def __init__(self, files=None, inplace=0, backup="", bufsize=0):
+        if type(files) == type(''):
+            files = (files,)
+        else:
+            if files is None:
+                files = sys.argv[1:]
+            if not files:
+                files = ('-',)
+            else:
+                files = tuple(files)
+        self._files = files
+        self._inplace = inplace
+        self._backup = backup
+        self._bufsize = bufsize or DEFAULT_BUFSIZE
+        self._savestdout = None
+        self._output = None
+        self._filename = None
+        self._lineno = 0
+        self._filelineno = 0
+        self._file = None
+        self._isstdin = False
+        self._backupfilename = None
+        self._buffer = []
+        self._bufindex = 0
+
+    def __del__(self):
+        self.close()
+
+    def close(self):
+        self.nextfile()
+        self._files = ()
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        try:
+            line = self._buffer[self._bufindex]
+        except IndexError:
+            pass
+        else:
+            self._bufindex += 1
+            self._lineno += 1
+            self._filelineno += 1
+            return line
+        line = self.readline()
+        if not line:
+            raise StopIteration
+        return line
+
+    def __getitem__(self, i):
+        if i != self._lineno:
+            raise RuntimeError, "accessing lines out of order"
+        try:
+            return self.next()
+        except StopIteration:
+            raise IndexError, "end of input reached"
+
+    def nextfile(self):
+        savestdout = self._savestdout
+        self._savestdout = 0
+        if savestdout:
+            sys.stdout = savestdout
+
+        output = self._output
+        self._output = 0
+        if output:
+            output.close()
+
+        file = self._file
+        self._file = 0
+        if file and not self._isstdin:
+            file.close()
+
+        backupfilename = self._backupfilename
+        self._backupfilename = 0
+        if backupfilename and not self._backup:
+            try: os.unlink(backupfilename)
+            except OSError: pass
+
+        self._isstdin = False
+        self._buffer = []
+        self._bufindex = 0
+
+    def readline(self):
+        try:
+            line = self._buffer[self._bufindex]
+        except IndexError:
+            pass
+        else:
+            self._bufindex += 1
+            self._lineno += 1
+            self._filelineno += 1
+            return line
+        if not self._file:
+            if not self._files:
+                return ""
+            self._filename = self._files[0]
+            self._files = self._files[1:]
+            self._filelineno = 0
+            self._file = None
+            self._isstdin = False
+            self._backupfilename = 0
+            if self._filename == '-':
+                self._filename = '<stdin>'
+                self._file = sys.stdin
+                self._isstdin = True
+            else:
+                if self._inplace:
+                    self._backupfilename = (
+                        self._filename + (self._backup or os.extsep+"bak"))
+                    try: os.unlink(self._backupfilename)
+                    except os.error: pass
+                    # The next few lines may raise IOError
+                    os.rename(self._filename, self._backupfilename)
+                    self._file = open(self._backupfilename, "r")
+                    try:
+                        perm = os.fstat(self._file.fileno()).st_mode
+                    except OSError:
+                        self._output = open(self._filename, "w")
+                    else:
+                        fd = os.open(self._filename,
+                                     os.O_CREAT | os.O_WRONLY | os.O_TRUNC,
+                                     perm)
+                        self._output = os.fdopen(fd, "w")
+                        try:
+                            if hasattr(os, 'chmod'):
+                                os.chmod(self._filename, perm)
+                        except OSError:
+                            pass
+                    self._savestdout = sys.stdout
+                    sys.stdout = self._output
+                else:
+                    # This may raise IOError
+                    self._file = open(self._filename, "r")
+        self._buffer = self._file.readlines(self._bufsize)
+        self._bufindex = 0
+        if not self._buffer:
+            self.nextfile()
+        # Recursive call
+        return self.readline()
+
+    def filename(self):
+        return self._filename
+
+    def lineno(self):
+        return self._lineno
+
+    def filelineno(self):
+        return self._filelineno
+
+    def isfirstline(self):
+        return self._filelineno == 1
+
+    def isstdin(self):
+        return self._isstdin
+
+def _test():
+    import getopt
+    inplace = 0
+    backup = 0
+    opts, args = getopt.getopt(sys.argv[1:], "ib:")
+    for o, a in opts:
+        if o == '-i': inplace = 1
+        if o == '-b': backup = a
+    for line in input(args, inplace=inplace, backup=backup):
+        if line[-1:] == '\n': line = line[:-1]
+        if line[-1:] == '\r': line = line[:-1]
+        print "%d: %s[%d]%s %s" % (lineno(), filename(), filelineno(),
+                                   isfirstline() and "*" or "", line)
+    print "%d: %s[%d]" % (lineno(), filename(), filelineno())
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/fnmatch.py b/depot_tools/release/win/python_24/Lib/fnmatch.py
new file mode 100644
index 0000000..3bf2463
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/fnmatch.py
@@ -0,0 +1,107 @@
+"""Filename matching with shell patterns.
+
+fnmatch(FILENAME, PATTERN) matches according to the local convention.
+fnmatchcase(FILENAME, PATTERN) always takes case in account.
+
+The functions operate by translating the pattern into a regular
+expression.  They cache the compiled regular expressions for speed.
+
+The function translate(PATTERN) returns a regular expression
+corresponding to PATTERN.  (It does not compile it.)
+"""
+
+import re
+
+__all__ = ["filter", "fnmatch","fnmatchcase","translate"]
+
+_cache = {}
+
+def fnmatch(name, pat):
+    """Test whether FILENAME matches PATTERN.
+
+    Patterns are Unix shell style:
+
+    *       matches everything
+    ?       matches any single character
+    [seq]   matches any character in seq
+    [!seq]  matches any char not in seq
+
+    An initial period in FILENAME is not special.
+    Both FILENAME and PATTERN are first case-normalized
+    if the operating system requires it.
+    If you don't want this, use fnmatchcase(FILENAME, PATTERN).
+    """
+
+    import os
+    name = os.path.normcase(name)
+    pat = os.path.normcase(pat)
+    return fnmatchcase(name, pat)
+
+def filter(names, pat):
+    """Return the subset of the list NAMES that match PAT"""
+    import os,posixpath
+    result=[]
+    pat=os.path.normcase(pat)
+    if not pat in _cache:
+        res = translate(pat)
+        _cache[pat] = re.compile(res)
+    match=_cache[pat].match
+    if os.path is posixpath:
+        # normcase on posix is NOP. Optimize it away from the loop.
+        for name in names:
+            if match(name):
+                result.append(name)
+    else:
+        for name in names:
+            if match(os.path.normcase(name)):
+                result.append(name)
+    return result
+
+def fnmatchcase(name, pat):
+    """Test whether FILENAME matches PATTERN, including case.
+
+    This is a version of fnmatch() which doesn't case-normalize
+    its arguments.
+    """
+
+    if not pat in _cache:
+        res = translate(pat)
+        _cache[pat] = re.compile(res)
+    return _cache[pat].match(name) is not None
+
+def translate(pat):
+    """Translate a shell PATTERN to a regular expression.
+
+    There is no way to quote meta-characters.
+    """
+
+    i, n = 0, len(pat)
+    res = ''
+    while i < n:
+        c = pat[i]
+        i = i+1
+        if c == '*':
+            res = res + '.*'
+        elif c == '?':
+            res = res + '.'
+        elif c == '[':
+            j = i
+            if j < n and pat[j] == '!':
+                j = j+1
+            if j < n and pat[j] == ']':
+                j = j+1
+            while j < n and pat[j] != ']':
+                j = j+1
+            if j >= n:
+                res = res + '\\['
+            else:
+                stuff = pat[i:j].replace('\\','\\\\')
+                i = j+1
+                if stuff[0] == '!':
+                    stuff = '^' + stuff[1:]
+                elif stuff[0] == '^':
+                    stuff = '\\' + stuff
+                res = '%s[%s]' % (res, stuff)
+        else:
+            res = res + re.escape(c)
+    return res + "$"
diff --git a/depot_tools/release/win/python_24/Lib/formatter.py b/depot_tools/release/win/python_24/Lib/formatter.py
new file mode 100644
index 0000000..109d66c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/formatter.py
@@ -0,0 +1,449 @@
+"""Generic output formatting.
+
+Formatter objects transform an abstract flow of formatting events into
+specific output events on writer objects. Formatters manage several stack
+structures to allow various properties of a writer object to be changed and
+restored; writers need not be able to handle relative changes nor any sort
+of ``change back'' operation. Specific writer properties which may be
+controlled via formatter objects are horizontal alignment, font, and left
+margin indentations. A mechanism is provided which supports providing
+arbitrary, non-exclusive style settings to a writer as well. Additional
+interfaces facilitate formatting events which are not reversible, such as
+paragraph separation.
+
+Writer objects encapsulate device interfaces. Abstract devices, such as
+file formats, are supported as well as physical devices. The provided
+implementations all work with abstract devices. The interface makes
+available mechanisms for setting the properties which formatter objects
+manage and inserting data into the output.
+"""
+
+import sys
+
+
+AS_IS = None
+
+
+class NullFormatter:
+    """A formatter which does nothing.
+
+    If the writer parameter is omitted, a NullWriter instance is created.
+    No methods of the writer are called by NullFormatter instances.
+
+    Implementations should inherit from this class if implementing a writer
+    interface but don't need to inherit any implementation.
+
+    """
+
+    def __init__(self, writer=None):
+        if writer is None:
+            writer = NullWriter()
+        self.writer = writer
+    def end_paragraph(self, blankline): pass
+    def add_line_break(self): pass
+    def add_hor_rule(self, *args, **kw): pass
+    def add_label_data(self, format, counter, blankline=None): pass
+    def add_flowing_data(self, data): pass
+    def add_literal_data(self, data): pass
+    def flush_softspace(self): pass
+    def push_alignment(self, align): pass
+    def pop_alignment(self): pass
+    def push_font(self, x): pass
+    def pop_font(self): pass
+    def push_margin(self, margin): pass
+    def pop_margin(self): pass
+    def set_spacing(self, spacing): pass
+    def push_style(self, *styles): pass
+    def pop_style(self, n=1): pass
+    def assert_line_data(self, flag=1): pass
+
+
+class AbstractFormatter:
+    """The standard formatter.
+
+    This implementation has demonstrated wide applicability to many writers,
+    and may be used directly in most circumstances.  It has been used to
+    implement a full-featured World Wide Web browser.
+
+    """
+
+    #  Space handling policy:  blank spaces at the boundary between elements
+    #  are handled by the outermost context.  "Literal" data is not checked
+    #  to determine context, so spaces in literal data are handled directly
+    #  in all circumstances.
+
+    def __init__(self, writer):
+        self.writer = writer            # Output device
+        self.align = None               # Current alignment
+        self.align_stack = []           # Alignment stack
+        self.font_stack = []            # Font state
+        self.margin_stack = []          # Margin state
+        self.spacing = None             # Vertical spacing state
+        self.style_stack = []           # Other state, e.g. color
+        self.nospace = 1                # Should leading space be suppressed
+        self.softspace = 0              # Should a space be inserted
+        self.para_end = 1               # Just ended a paragraph
+        self.parskip = 0                # Skipped space between paragraphs?
+        self.hard_break = 1             # Have a hard break
+        self.have_label = 0
+
+    def end_paragraph(self, blankline):
+        if not self.hard_break:
+            self.writer.send_line_break()
+            self.have_label = 0
+        if self.parskip < blankline and not self.have_label:
+            self.writer.send_paragraph(blankline - self.parskip)
+            self.parskip = blankline
+            self.have_label = 0
+        self.hard_break = self.nospace = self.para_end = 1
+        self.softspace = 0
+
+    def add_line_break(self):
+        if not (self.hard_break or self.para_end):
+            self.writer.send_line_break()
+            self.have_label = self.parskip = 0
+        self.hard_break = self.nospace = 1
+        self.softspace = 0
+
+    def add_hor_rule(self, *args, **kw):
+        if not self.hard_break:
+            self.writer.send_line_break()
+        self.writer.send_hor_rule(*args, **kw)
+        self.hard_break = self.nospace = 1
+        self.have_label = self.para_end = self.softspace = self.parskip = 0
+
+    def add_label_data(self, format, counter, blankline = None):
+        if self.have_label or not self.hard_break:
+            self.writer.send_line_break()
+        if not self.para_end:
+            self.writer.send_paragraph((blankline and 1) or 0)
+        if isinstance(format, str):
+            self.writer.send_label_data(self.format_counter(format, counter))
+        else:
+            self.writer.send_label_data(format)
+        self.nospace = self.have_label = self.hard_break = self.para_end = 1
+        self.softspace = self.parskip = 0
+
+    def format_counter(self, format, counter):
+        label = ''
+        for c in format:
+            if c == '1':
+                label = label + ('%d' % counter)
+            elif c in 'aA':
+                if counter > 0:
+                    label = label + self.format_letter(c, counter)
+            elif c in 'iI':
+                if counter > 0:
+                    label = label + self.format_roman(c, counter)
+            else:
+                label = label + c
+        return label
+
+    def format_letter(self, case, counter):
+        label = ''
+        while counter > 0:
+            counter, x = divmod(counter-1, 26)
+            # This makes a strong assumption that lowercase letters
+            # and uppercase letters form two contiguous blocks, with
+            # letters in order!
+            s = chr(ord(case) + x)
+            label = s + label
+        return label
+
+    def format_roman(self, case, counter):
+        ones = ['i', 'x', 'c', 'm']
+        fives = ['v', 'l', 'd']
+        label, index = '', 0
+        # This will die of IndexError when counter is too big
+        while counter > 0:
+            counter, x = divmod(counter, 10)
+            if x == 9:
+                label = ones[index] + ones[index+1] + label
+            elif x == 4:
+                label = ones[index] + fives[index] + label
+            else:
+                if x >= 5:
+                    s = fives[index]
+                    x = x-5
+                else:
+                    s = ''
+                s = s + ones[index]*x
+                label = s + label
+            index = index + 1
+        if case == 'I':
+            return label.upper()
+        return label
+
+    def add_flowing_data(self, data):
+        if not data: return
+        # The following looks a bit convoluted but is a great improvement over
+        # data = regsub.gsub('[' + string.whitespace + ']+', ' ', data)
+        prespace = data[:1].isspace()
+        postspace = data[-1:].isspace()
+        data = " ".join(data.split())
+        if self.nospace and not data:
+            return
+        elif prespace or self.softspace:
+            if not data:
+                if not self.nospace:
+                    self.softspace = 1
+                    self.parskip = 0
+                return
+            if not self.nospace:
+                data = ' ' + data
+        self.hard_break = self.nospace = self.para_end = \
+                          self.parskip = self.have_label = 0
+        self.softspace = postspace
+        self.writer.send_flowing_data(data)
+
+    def add_literal_data(self, data):
+        if not data: return
+        if self.softspace:
+            self.writer.send_flowing_data(" ")
+        self.hard_break = data[-1:] == '\n'
+        self.nospace = self.para_end = self.softspace = \
+                       self.parskip = self.have_label = 0
+        self.writer.send_literal_data(data)
+
+    def flush_softspace(self):
+        if self.softspace:
+            self.hard_break = self.para_end = self.parskip = \
+                              self.have_label = self.softspace = 0
+            self.nospace = 1
+            self.writer.send_flowing_data(' ')
+
+    def push_alignment(self, align):
+        if align and align != self.align:
+            self.writer.new_alignment(align)
+            self.align = align
+            self.align_stack.append(align)
+        else:
+            self.align_stack.append(self.align)
+
+    def pop_alignment(self):
+        if self.align_stack:
+            del self.align_stack[-1]
+        if self.align_stack:
+            self.align = align = self.align_stack[-1]
+            self.writer.new_alignment(align)
+        else:
+            self.align = None
+            self.writer.new_alignment(None)
+
+    def push_font(self, (size, i, b, tt)):
+        if self.softspace:
+            self.hard_break = self.para_end = self.softspace = 0
+            self.nospace = 1
+            self.writer.send_flowing_data(' ')
+        if self.font_stack:
+            csize, ci, cb, ctt = self.font_stack[-1]
+            if size is AS_IS: size = csize
+            if i is AS_IS: i = ci
+            if b is AS_IS: b = cb
+            if tt is AS_IS: tt = ctt
+        font = (size, i, b, tt)
+        self.font_stack.append(font)
+        self.writer.new_font(font)
+
+    def pop_font(self):
+        if self.font_stack:
+            del self.font_stack[-1]
+        if self.font_stack:
+            font = self.font_stack[-1]
+        else:
+            font = None
+        self.writer.new_font(font)
+
+    def push_margin(self, margin):
+        self.margin_stack.append(margin)
+        fstack = filter(None, self.margin_stack)
+        if not margin and fstack:
+            margin = fstack[-1]
+        self.writer.new_margin(margin, len(fstack))
+
+    def pop_margin(self):
+        if self.margin_stack:
+            del self.margin_stack[-1]
+        fstack = filter(None, self.margin_stack)
+        if fstack:
+            margin = fstack[-1]
+        else:
+            margin = None
+        self.writer.new_margin(margin, len(fstack))
+
+    def set_spacing(self, spacing):
+        self.spacing = spacing
+        self.writer.new_spacing(spacing)
+
+    def push_style(self, *styles):
+        if self.softspace:
+            self.hard_break = self.para_end = self.softspace = 0
+            self.nospace = 1
+            self.writer.send_flowing_data(' ')
+        for style in styles:
+            self.style_stack.append(style)
+        self.writer.new_styles(tuple(self.style_stack))
+
+    def pop_style(self, n=1):
+        del self.style_stack[-n:]
+        self.writer.new_styles(tuple(self.style_stack))
+
+    def assert_line_data(self, flag=1):
+        self.nospace = self.hard_break = not flag
+        self.para_end = self.parskip = self.have_label = 0
+
+
+class NullWriter:
+    """Minimal writer interface to use in testing & inheritance.
+
+    A writer which only provides the interface definition; no actions are
+    taken on any methods.  This should be the base class for all writers
+    which do not need to inherit any implementation methods.
+
+    """
+    def __init__(self): pass
+    def flush(self): pass
+    def new_alignment(self, align): pass
+    def new_font(self, font): pass
+    def new_margin(self, margin, level): pass
+    def new_spacing(self, spacing): pass
+    def new_styles(self, styles): pass
+    def send_paragraph(self, blankline): pass
+    def send_line_break(self): pass
+    def send_hor_rule(self, *args, **kw): pass
+    def send_label_data(self, data): pass
+    def send_flowing_data(self, data): pass
+    def send_literal_data(self, data): pass
+
+
+class AbstractWriter(NullWriter):
+    """A writer which can be used in debugging formatters, but not much else.
+
+    Each method simply announces itself by printing its name and
+    arguments on standard output.
+
+    """
+
+    def new_alignment(self, align):
+        print "new_alignment(%r)" % (align,)
+
+    def new_font(self, font):
+        print "new_font(%r)" % (font,)
+
+    def new_margin(self, margin, level):
+        print "new_margin(%r, %d)" % (margin, level)
+
+    def new_spacing(self, spacing):
+        print "new_spacing(%r)" % (spacing,)
+
+    def new_styles(self, styles):
+        print "new_styles(%r)" % (styles,)
+
+    def send_paragraph(self, blankline):
+        print "send_paragraph(%r)" % (blankline,)
+
+    def send_line_break(self):
+        print "send_line_break()"
+
+    def send_hor_rule(self, *args, **kw):
+        print "send_hor_rule()"
+
+    def send_label_data(self, data):
+        print "send_label_data(%r)" % (data,)
+
+    def send_flowing_data(self, data):
+        print "send_flowing_data(%r)" % (data,)
+
+    def send_literal_data(self, data):
+        print "send_literal_data(%r)" % (data,)
+
+
+class DumbWriter(NullWriter):
+    """Simple writer class which writes output on the file object passed in
+    as the file parameter or, if file is omitted, on standard output.  The
+    output is simply word-wrapped to the number of columns specified by
+    the maxcol parameter.  This class is suitable for reflowing a sequence
+    of paragraphs.
+
+    """
+
+    def __init__(self, file=None, maxcol=72):
+        self.file = file or sys.stdout
+        self.maxcol = maxcol
+        NullWriter.__init__(self)
+        self.reset()
+
+    def reset(self):
+        self.col = 0
+        self.atbreak = 0
+
+    def send_paragraph(self, blankline):
+        self.file.write('\n'*blankline)
+        self.col = 0
+        self.atbreak = 0
+
+    def send_line_break(self):
+        self.file.write('\n')
+        self.col = 0
+        self.atbreak = 0
+
+    def send_hor_rule(self, *args, **kw):
+        self.file.write('\n')
+        self.file.write('-'*self.maxcol)
+        self.file.write('\n')
+        self.col = 0
+        self.atbreak = 0
+
+    def send_literal_data(self, data):
+        self.file.write(data)
+        i = data.rfind('\n')
+        if i >= 0:
+            self.col = 0
+            data = data[i+1:]
+        data = data.expandtabs()
+        self.col = self.col + len(data)
+        self.atbreak = 0
+
+    def send_flowing_data(self, data):
+        if not data: return
+        atbreak = self.atbreak or data[0].isspace()
+        col = self.col
+        maxcol = self.maxcol
+        write = self.file.write
+        for word in data.split():
+            if atbreak:
+                if col + len(word) >= maxcol:
+                    write('\n')
+                    col = 0
+                else:
+                    write(' ')
+                    col = col + 1
+            write(word)
+            col = col + len(word)
+            atbreak = 1
+        self.col = col
+        self.atbreak = data[-1].isspace()
+
+
+def test(file = None):
+    w = DumbWriter()
+    f = AbstractFormatter(w)
+    if file is not None:
+        fp = open(file)
+    elif sys.argv[1:]:
+        fp = open(sys.argv[1])
+    else:
+        fp = sys.stdin
+    while 1:
+        line = fp.readline()
+        if not line:
+            break
+        if line == '\n':
+            f.end_paragraph(1)
+        else:
+            f.add_flowing_data(line)
+    f.end_paragraph(0)
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/fpformat.py b/depot_tools/release/win/python_24/Lib/fpformat.py
new file mode 100644
index 0000000..0ae86a91
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/fpformat.py
@@ -0,0 +1,142 @@
+"""General floating point formatting functions.
+
+Functions:
+fix(x, digits_behind)
+sci(x, digits_behind)
+
+Each takes a number or a string and a number of digits as arguments.
+
+Parameters:
+x:             number to be formatted; or a string resembling a number
+digits_behind: number of digits behind the decimal point
+"""
+
+import re
+
+__all__ = ["fix","sci","NotANumber"]
+
+# Compiled regular expression to "decode" a number
+decoder = re.compile(r'^([-+]?)0*(\d*)((?:\.\d*)?)(([eE][-+]?\d+)?)$')
+# \0 the whole thing
+# \1 leading sign or empty
+# \2 digits left of decimal point
+# \3 fraction (empty or begins with point)
+# \4 exponent part (empty or begins with 'e' or 'E')
+
+try:
+    class NotANumber(ValueError):
+        pass
+except TypeError:
+    NotANumber = 'fpformat.NotANumber'
+
+def extract(s):
+    """Return (sign, intpart, fraction, expo) or raise an exception:
+    sign is '+' or '-'
+    intpart is 0 or more digits beginning with a nonzero
+    fraction is 0 or more digits
+    expo is an integer"""
+    res = decoder.match(s)
+    if res is None: raise NotANumber, s
+    sign, intpart, fraction, exppart = res.group(1,2,3,4)
+    if sign == '+': sign = ''
+    if fraction: fraction = fraction[1:]
+    if exppart: expo = int(exppart[1:])
+    else: expo = 0
+    return sign, intpart, fraction, expo
+
+def unexpo(intpart, fraction, expo):
+    """Remove the exponent by changing intpart and fraction."""
+    if expo > 0: # Move the point left
+        f = len(fraction)
+        intpart, fraction = intpart + fraction[:expo], fraction[expo:]
+        if expo > f:
+            intpart = intpart + '0'*(expo-f)
+    elif expo < 0: # Move the point right
+        i = len(intpart)
+        intpart, fraction = intpart[:expo], intpart[expo:] + fraction
+        if expo < -i:
+            fraction = '0'*(-expo-i) + fraction
+    return intpart, fraction
+
+def roundfrac(intpart, fraction, digs):
+    """Round or extend the fraction to size digs."""
+    f = len(fraction)
+    if f <= digs:
+        return intpart, fraction + '0'*(digs-f)
+    i = len(intpart)
+    if i+digs < 0:
+        return '0'*-digs, ''
+    total = intpart + fraction
+    nextdigit = total[i+digs]
+    if nextdigit >= '5': # Hard case: increment last digit, may have carry!
+        n = i + digs - 1
+        while n >= 0:
+            if total[n] != '9': break
+            n = n-1
+        else:
+            total = '0' + total
+            i = i+1
+            n = 0
+        total = total[:n] + chr(ord(total[n]) + 1) + '0'*(len(total)-n-1)
+        intpart, fraction = total[:i], total[i:]
+    if digs >= 0:
+        return intpart, fraction[:digs]
+    else:
+        return intpart[:digs] + '0'*-digs, ''
+
+def fix(x, digs):
+    """Format x as [-]ddd.ddd with 'digs' digits after the point
+    and at least one digit before.
+    If digs <= 0, the point is suppressed."""
+    if type(x) != type(''): x = repr(x)
+    try:
+        sign, intpart, fraction, expo = extract(x)
+    except NotANumber:
+        return x
+    intpart, fraction = unexpo(intpart, fraction, expo)
+    intpart, fraction = roundfrac(intpart, fraction, digs)
+    while intpart and intpart[0] == '0': intpart = intpart[1:]
+    if intpart == '': intpart = '0'
+    if digs > 0: return sign + intpart + '.' + fraction
+    else: return sign + intpart
+
+def sci(x, digs):
+    """Format x as [-]d.dddE[+-]ddd with 'digs' digits after the point
+    and exactly one digit before.
+    If digs is <= 0, one digit is kept and the point is suppressed."""
+    if type(x) != type(''): x = repr(x)
+    sign, intpart, fraction, expo = extract(x)
+    if not intpart:
+        while fraction and fraction[0] == '0':
+            fraction = fraction[1:]
+            expo = expo - 1
+        if fraction:
+            intpart, fraction = fraction[0], fraction[1:]
+            expo = expo - 1
+        else:
+            intpart = '0'
+    else:
+        expo = expo + len(intpart) - 1
+        intpart, fraction = intpart[0], intpart[1:] + fraction
+    digs = max(0, digs)
+    intpart, fraction = roundfrac(intpart, fraction, digs)
+    if len(intpart) > 1:
+        intpart, fraction, expo = \
+            intpart[0], intpart[1:] + fraction[:-1], \
+            expo + len(intpart) - 1
+    s = sign + intpart
+    if digs > 0: s = s + '.' + fraction
+    e = repr(abs(expo))
+    e = '0'*(3-len(e)) + e
+    if expo < 0: e = '-' + e
+    else: e = '+' + e
+    return s + 'e' + e
+
+def test():
+    """Interactive test run."""
+    try:
+        while 1:
+            x, digs = input('Enter (x, digs): ')
+            print x, fix(x, digs), sci(x, digs)
+    except (EOFError, KeyboardInterrupt):
+        pass
diff --git a/depot_tools/release/win/python_24/Lib/ftplib.py b/depot_tools/release/win/python_24/Lib/ftplib.py
new file mode 100644
index 0000000..9486918f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/ftplib.py
@@ -0,0 +1,803 @@
+"""An FTP client class and some helper functions.
+
+Based on RFC 959: File Transfer Protocol (FTP), by J. Postel and J. Reynolds
+
+Example:
+
+>>> from ftplib import FTP
+>>> ftp = FTP('ftp.python.org') # connect to host, default port
+>>> ftp.login() # default, i.e.: user anonymous, passwd anonymous@
+'230 Guest login ok, access restrictions apply.'
+>>> ftp.retrlines('LIST') # list directory contents
+total 9
+drwxr-xr-x   8 root     wheel        1024 Jan  3  1994 .
+drwxr-xr-x   8 root     wheel        1024 Jan  3  1994 ..
+drwxr-xr-x   2 root     wheel        1024 Jan  3  1994 bin
+drwxr-xr-x   2 root     wheel        1024 Jan  3  1994 etc
+d-wxrwxr-x   2 ftp      wheel        1024 Sep  5 13:43 incoming
+drwxr-xr-x   2 root     wheel        1024 Nov 17  1993 lib
+drwxr-xr-x   6 1094     wheel        1024 Sep 13 19:07 pub
+drwxr-xr-x   3 root     wheel        1024 Jan  3  1994 usr
+-rw-r--r--   1 root     root          312 Aug  1  1994 welcome.msg
+'226 Transfer complete.'
+>>> ftp.quit()
+'221 Goodbye.'
+>>>
+
+A nice test that reveals some of the network dialogue would be:
+python ftplib.py -d localhost -l -p -l
+"""
+
+#
+# Changes and improvements suggested by Steve Majewski.
+# Modified by Jack to work on the mac.
+# Modified by Siebren to support docstrings and PASV.
+#
+
+import os
+import sys
+
+# Import SOCKS module if it exists, else standard socket module socket
+try:
+    import SOCKS; socket = SOCKS; del SOCKS # import SOCKS as socket
+    from socket import getfqdn; socket.getfqdn = getfqdn; del getfqdn
+except ImportError:
+    import socket
+
+__all__ = ["FTP","Netrc"]
+
+# Magic number from <socket.h>
+MSG_OOB = 0x1                           # Process data out of band
+
+
+# The standard FTP server control port
+FTP_PORT = 21
+
+
+# Exception raised when an error or invalid response is received
+class Error(Exception): pass
+class error_reply(Error): pass          # unexpected [123]xx reply
+class error_temp(Error): pass           # 4xx errors
+class error_perm(Error): pass           # 5xx errors
+class error_proto(Error): pass          # response does not begin with [1-5]
+
+
+# All exceptions (hopefully) that may be raised here and that aren't
+# (always) programming errors on our side
+all_errors = (Error, socket.error, IOError, EOFError)
+
+
+# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
+CRLF = '\r\n'
+
+
+# The class itself
+class FTP:
+
+    '''An FTP client class.
+
+    To create a connection, call the class using these argument:
+            host, user, passwd, acct
+    These are all strings, and have default value ''.
+    Then use self.connect() with optional host and port argument.
+
+    To download a file, use ftp.retrlines('RETR ' + filename),
+    or ftp.retrbinary() with slightly different arguments.
+    To upload a file, use ftp.storlines() or ftp.storbinary(),
+    which have an open file as argument (see their definitions
+    below for details).
+    The download/upload functions first issue appropriate TYPE
+    and PORT or PASV commands.
+'''
+
+    debugging = 0
+    host = ''
+    port = FTP_PORT
+    sock = None
+    file = None
+    welcome = None
+    passiveserver = 1
+
+    # Initialization method (called by class instantiation).
+    # Initialize host to localhost, port to standard ftp port
+    # Optional arguments are host (for connect()),
+    # and user, passwd, acct (for login())
+    def __init__(self, host='', user='', passwd='', acct=''):
+        if host:
+            self.connect(host)
+            if user: self.login(user, passwd, acct)
+
+    def connect(self, host = '', port = 0):
+        '''Connect to host.  Arguments are:
+        - host: hostname to connect to (string, default previous host)
+        - port: port to connect to (integer, default previous port)'''
+        if host: self.host = host
+        if port: self.port = port
+        msg = "getaddrinfo returns an empty list"
+        for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                self.sock = socket.socket(af, socktype, proto)
+                self.sock.connect(sa)
+            except socket.error, msg:
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error, msg
+        self.af = af
+        self.file = self.sock.makefile('rb')
+        self.welcome = self.getresp()
+        return self.welcome
+
+    def getwelcome(self):
+        '''Get the welcome message from the server.
+        (this is read and squirreled away by connect())'''
+        if self.debugging:
+            print '*welcome*', self.sanitize(self.welcome)
+        return self.welcome
+
+    def set_debuglevel(self, level):
+        '''Set the debugging level.
+        The required argument level means:
+        0: no debugging output (default)
+        1: print commands and responses but not body text etc.
+        2: also print raw lines read and sent before stripping CR/LF'''
+        self.debugging = level
+    debug = set_debuglevel
+
+    def set_pasv(self, val):
+        '''Use passive or active mode for data transfers.
+        With a false argument, use the normal PORT mode,
+        With a true argument, use the PASV command.'''
+        self.passiveserver = val
+
+    # Internal: "sanitize" a string for printing
+    def sanitize(self, s):
+        if s[:5] == 'pass ' or s[:5] == 'PASS ':
+            i = len(s)
+            while i > 5 and s[i-1] in '\r\n':
+                i = i-1
+            s = s[:5] + '*'*(i-5) + s[i:]
+        return repr(s)
+
+    # Internal: send one line to the server, appending CRLF
+    def putline(self, line):
+        line = line + CRLF
+        if self.debugging > 1: print '*put*', self.sanitize(line)
+        self.sock.sendall(line)
+
+    # Internal: send one command to the server (through putline())
+    def putcmd(self, line):
+        if self.debugging: print '*cmd*', self.sanitize(line)
+        self.putline(line)
+
+    # Internal: return one line from the server, stripping CRLF.
+    # Raise EOFError if the connection is closed
+    def getline(self):
+        line = self.file.readline()
+        if self.debugging > 1:
+            print '*get*', self.sanitize(line)
+        if not line: raise EOFError
+        if line[-2:] == CRLF: line = line[:-2]
+        elif line[-1:] in CRLF: line = line[:-1]
+        return line
+
+    # Internal: get a response from the server, which may possibly
+    # consist of multiple lines.  Return a single string with no
+    # trailing CRLF.  If the response consists of multiple lines,
+    # these are separated by '\n' characters in the string
+    def getmultiline(self):
+        line = self.getline()
+        if line[3:4] == '-':
+            code = line[:3]
+            while 1:
+                nextline = self.getline()
+                line = line + ('\n' + nextline)
+                if nextline[:3] == code and \
+                        nextline[3:4] != '-':
+                    break
+        return line
+
+    # Internal: get a response from the server.
+    # Raise various errors if the response indicates an error
+    def getresp(self):
+        resp = self.getmultiline()
+        if self.debugging: print '*resp*', self.sanitize(resp)
+        self.lastresp = resp[:3]
+        c = resp[:1]
+        if c == '4':
+            raise error_temp, resp
+        if c == '5':
+            raise error_perm, resp
+        if c not in '123':
+            raise error_proto, resp
+        return resp
+
+    def voidresp(self):
+        """Expect a response beginning with '2'."""
+        resp = self.getresp()
+        if resp[0] != '2':
+            raise error_reply, resp
+        return resp
+
+    def abort(self):
+        '''Abort a file transfer.  Uses out-of-band data.
+        This does not follow the procedure from the RFC to send Telnet
+        IP and Synch; that doesn't seem to work with the servers I've
+        tried.  Instead, just send the ABOR command as OOB data.'''
+        line = 'ABOR' + CRLF
+        if self.debugging > 1: print '*put urgent*', self.sanitize(line)
+        self.sock.sendall(line, MSG_OOB)
+        resp = self.getmultiline()
+        if resp[:3] not in ('426', '226'):
+            raise error_proto, resp
+
+    def sendcmd(self, cmd):
+        '''Send a command and return the response.'''
+        self.putcmd(cmd)
+        return self.getresp()
+
+    def voidcmd(self, cmd):
+        """Send a command and expect a response beginning with '2'."""
+        self.putcmd(cmd)
+        return self.voidresp()
+
+    def sendport(self, host, port):
+        '''Send a PORT command with the current host and the given
+        port number.
+        '''
+        hbytes = host.split('.')
+        pbytes = [repr(port/256), repr(port%256)]
+        bytes = hbytes + pbytes
+        cmd = 'PORT ' + ','.join(bytes)
+        return self.voidcmd(cmd)
+
+    def sendeprt(self, host, port):
+        '''Send a EPRT command with the current host and the given port number.'''
+        af = 0
+        if self.af == socket.AF_INET:
+            af = 1
+        if self.af == socket.AF_INET6:
+            af = 2
+        if af == 0:
+            raise error_proto, 'unsupported address family'
+        fields = ['', repr(af), host, repr(port), '']
+        cmd = 'EPRT ' + '|'.join(fields)
+        return self.voidcmd(cmd)
+
+    def makeport(self):
+        '''Create a new socket and send a PORT command for it.'''
+        msg = "getaddrinfo returns an empty list"
+        sock = None
+        for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
+            af, socktype, proto, canonname, sa = res
+            try:
+                sock = socket.socket(af, socktype, proto)
+                sock.bind(sa)
+            except socket.error, msg:
+                if sock:
+                    sock.close()
+                sock = None
+                continue
+            break
+        if not sock:
+            raise socket.error, msg
+        sock.listen(1)
+        port = sock.getsockname()[1] # Get proper port
+        host = self.sock.getsockname()[0] # Get proper host
+        if self.af == socket.AF_INET:
+            resp = self.sendport(host, port)
+        else:
+            resp = self.sendeprt(host, port)
+        return sock
+
+    def makepasv(self):
+        if self.af == socket.AF_INET:
+            host, port = parse227(self.sendcmd('PASV'))
+        else:
+            host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername())
+        return host, port
+
+    def ntransfercmd(self, cmd, rest=None):
+        """Initiate a transfer over the data connection.
+
+        If the transfer is active, send a port command and the
+        transfer command, and accept the connection.  If the server is
+        passive, send a pasv command, connect to it, and start the
+        transfer command.  Either way, return the socket for the
+        connection and the expected size of the transfer.  The
+        expected size may be None if it could not be determined.
+
+        Optional `rest' argument can be a string that is sent as the
+        argument to a RESTART command.  This is essentially a server
+        marker used to tell the server to skip over any data up to the
+        given marker.
+        """
+        size = None
+        if self.passiveserver:
+            host, port = self.makepasv()
+            af, socktype, proto, canon, sa = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)[0]
+            conn = socket.socket(af, socktype, proto)
+            conn.connect(sa)
+            if rest is not None:
+                self.sendcmd("REST %s" % rest)
+            resp = self.sendcmd(cmd)
+            if resp[0] != '1':
+                raise error_reply, resp
+        else:
+            sock = self.makeport()
+            if rest is not None:
+                self.sendcmd("REST %s" % rest)
+            resp = self.sendcmd(cmd)
+            if resp[0] != '1':
+                raise error_reply, resp
+            conn, sockaddr = sock.accept()
+        if resp[:3] == '150':
+            # this is conditional in case we received a 125
+            size = parse150(resp)
+        return conn, size
+
+    def transfercmd(self, cmd, rest=None):
+        """Like ntransfercmd() but returns only the socket."""
+        return self.ntransfercmd(cmd, rest)[0]
+
+    def login(self, user = '', passwd = '', acct = ''):
+        '''Login, default anonymous.'''
+        if not user: user = 'anonymous'
+        if not passwd: passwd = ''
+        if not acct: acct = ''
+        if user == 'anonymous' and passwd in ('', '-'):
+            # If there is no anonymous ftp password specified
+            # then we'll just use anonymous@
+            # We don't send any other thing because:
+            # - We want to remain anonymous
+            # - We want to stop SPAM
+            # - We don't want to let ftp sites to discriminate by the user,
+            #   host or country.
+            passwd = passwd + 'anonymous@'
+        resp = self.sendcmd('USER ' + user)
+        if resp[0] == '3': resp = self.sendcmd('PASS ' + passwd)
+        if resp[0] == '3': resp = self.sendcmd('ACCT ' + acct)
+        if resp[0] != '2':
+            raise error_reply, resp
+        return resp
+
+    def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
+        """Retrieve data in binary mode.
+
+        `cmd' is a RETR command.  `callback' is a callback function is
+        called for each block.  No more than `blocksize' number of
+        bytes will be read from the socket.  Optional `rest' is passed
+        to transfercmd().
+
+        A new port is created for you.  Return the response code.
+        """
+        self.voidcmd('TYPE I')
+        conn = self.transfercmd(cmd, rest)
+        while 1:
+            data = conn.recv(blocksize)
+            if not data:
+                break
+            callback(data)
+        conn.close()
+        return self.voidresp()
+
+    def retrlines(self, cmd, callback = None):
+        '''Retrieve data in line mode.
+        The argument is a RETR or LIST command.
+        The callback function (2nd argument) is called for each line,
+        with trailing CRLF stripped.  This creates a new port for you.
+        print_line() is the default callback.'''
+        if callback is None: callback = print_line
+        resp = self.sendcmd('TYPE A')
+        conn = self.transfercmd(cmd)
+        fp = conn.makefile('rb')
+        while 1:
+            line = fp.readline()
+            if self.debugging > 2: print '*retr*', repr(line)
+            if not line:
+                break
+            if line[-2:] == CRLF:
+                line = line[:-2]
+            elif line[-1:] == '\n':
+                line = line[:-1]
+            callback(line)
+        fp.close()
+        conn.close()
+        return self.voidresp()
+
+    def storbinary(self, cmd, fp, blocksize=8192):
+        '''Store a file in binary mode.'''
+        self.voidcmd('TYPE I')
+        conn = self.transfercmd(cmd)
+        while 1:
+            buf = fp.read(blocksize)
+            if not buf: break
+            conn.sendall(buf)
+        conn.close()
+        return self.voidresp()
+
+    def storlines(self, cmd, fp):
+        '''Store a file in line mode.'''
+        self.voidcmd('TYPE A')
+        conn = self.transfercmd(cmd)
+        while 1:
+            buf = fp.readline()
+            if not buf: break
+            if buf[-2:] != CRLF:
+                if buf[-1] in CRLF: buf = buf[:-1]
+                buf = buf + CRLF
+            conn.sendall(buf)
+        conn.close()
+        return self.voidresp()
+
+    def acct(self, password):
+        '''Send new account name.'''
+        cmd = 'ACCT ' + password
+        return self.voidcmd(cmd)
+
+    def nlst(self, *args):
+        '''Return a list of files in a given directory (default the current).'''
+        cmd = 'NLST'
+        for arg in args:
+            cmd = cmd + (' ' + arg)
+        files = []
+        self.retrlines(cmd, files.append)
+        return files
+
+    def dir(self, *args):
+        '''List a directory in long form.
+        By default list current directory to stdout.
+        Optional last argument is callback function; all
+        non-empty arguments before it are concatenated to the
+        LIST command.  (This *should* only be used for a pathname.)'''
+        cmd = 'LIST'
+        func = None
+        if args[-1:] and type(args[-1]) != type(''):
+            args, func = args[:-1], args[-1]
+        for arg in args:
+            if arg:
+                cmd = cmd + (' ' + arg)
+        self.retrlines(cmd, func)
+
+    def rename(self, fromname, toname):
+        '''Rename a file.'''
+        resp = self.sendcmd('RNFR ' + fromname)
+        if resp[0] != '3':
+            raise error_reply, resp
+        return self.voidcmd('RNTO ' + toname)
+
+    def delete(self, filename):
+        '''Delete a file.'''
+        resp = self.sendcmd('DELE ' + filename)
+        if resp[:3] in ('250', '200'):
+            return resp
+        elif resp[:1] == '5':
+            raise error_perm, resp
+        else:
+            raise error_reply, resp
+
+    def cwd(self, dirname):
+        '''Change to a directory.'''
+        if dirname == '..':
+            try:
+                return self.voidcmd('CDUP')
+            except error_perm, msg:
+                if msg.args[0][:3] != '500':
+                    raise
+        elif dirname == '':
+            dirname = '.'  # does nothing, but could return error
+        cmd = 'CWD ' + dirname
+        return self.voidcmd(cmd)
+
+    def size(self, filename):
+        '''Retrieve the size of a file.'''
+        # Note that the RFC doesn't say anything about 'SIZE'
+        resp = self.sendcmd('SIZE ' + filename)
+        if resp[:3] == '213':
+            s = resp[3:].strip()
+            try:
+                return int(s)
+            except (OverflowError, ValueError):
+                return long(s)
+
+    def mkd(self, dirname):
+        '''Make a directory, return its full pathname.'''
+        resp = self.sendcmd('MKD ' + dirname)
+        return parse257(resp)
+
+    def rmd(self, dirname):
+        '''Remove a directory.'''
+        return self.voidcmd('RMD ' + dirname)
+
+    def pwd(self):
+        '''Return current working directory.'''
+        resp = self.sendcmd('PWD')
+        return parse257(resp)
+
+    def quit(self):
+        '''Quit, and close the connection.'''
+        resp = self.voidcmd('QUIT')
+        self.close()
+        return resp
+
+    def close(self):
+        '''Close the connection without assuming anything about it.'''
+        if self.file:
+            self.file.close()
+            self.sock.close()
+            self.file = self.sock = None
+
+
+_150_re = None
+
+def parse150(resp):
+    '''Parse the '150' response for a RETR request.
+    Returns the expected transfer size or None; size is not guaranteed to
+    be present in the 150 message.
+    '''
+    if resp[:3] != '150':
+        raise error_reply, resp
+    global _150_re
+    if _150_re is None:
+        import re
+        _150_re = re.compile("150 .* \((\d+) bytes\)", re.IGNORECASE)
+    m = _150_re.match(resp)
+    if not m:
+        return None
+    s = m.group(1)
+    try:
+        return int(s)
+    except (OverflowError, ValueError):
+        return long(s)
+
+
+_227_re = None
+
+def parse227(resp):
+    '''Parse the '227' response for a PASV request.
+    Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)'
+    Return ('host.addr.as.numbers', port#) tuple.'''
+
+    if resp[:3] != '227':
+        raise error_reply, resp
+    global _227_re
+    if _227_re is None:
+        import re
+        _227_re = re.compile(r'(\d+),(\d+),(\d+),(\d+),(\d+),(\d+)')
+    m = _227_re.search(resp)
+    if not m:
+        raise error_proto, resp
+    numbers = m.groups()
+    host = '.'.join(numbers[:4])
+    port = (int(numbers[4]) << 8) + int(numbers[5])
+    return host, port
+
+
+def parse229(resp, peer):
+    '''Parse the '229' response for a EPSV request.
+    Raises error_proto if it does not contain '(|||port|)'
+    Return ('host.addr.as.numbers', port#) tuple.'''
+
+    if resp[:3] <> '229':
+        raise error_reply, resp
+    left = resp.find('(')
+    if left < 0: raise error_proto, resp
+    right = resp.find(')', left + 1)
+    if right < 0:
+        raise error_proto, resp # should contain '(|||port|)'
+    if resp[left + 1] <> resp[right - 1]:
+        raise error_proto, resp
+    parts = resp[left + 1:right].split(resp[left+1])
+    if len(parts) <> 5:
+        raise error_proto, resp
+    host = peer[0]
+    port = int(parts[3])
+    return host, port
+
+
+def parse257(resp):
+    '''Parse the '257' response for a MKD or PWD request.
+    This is a response to a MKD or PWD request: a directory name.
+    Returns the directoryname in the 257 reply.'''
+
+    if resp[:3] != '257':
+        raise error_reply, resp
+    if resp[3:5] != ' "':
+        return '' # Not compliant to RFC 959, but UNIX ftpd does this
+    dirname = ''
+    i = 5
+    n = len(resp)
+    while i < n:
+        c = resp[i]
+        i = i+1
+        if c == '"':
+            if i >= n or resp[i] != '"':
+                break
+            i = i+1
+        dirname = dirname + c
+    return dirname
+
+
+def print_line(line):
+    '''Default retrlines callback to print a line.'''
+    print line
+
+
+def ftpcp(source, sourcename, target, targetname = '', type = 'I'):
+    '''Copy file from one FTP-instance to another.'''
+    if not targetname: targetname = sourcename
+    type = 'TYPE ' + type
+    source.voidcmd(type)
+    target.voidcmd(type)
+    sourcehost, sourceport = parse227(source.sendcmd('PASV'))
+    target.sendport(sourcehost, sourceport)
+    # RFC 959: the user must "listen" [...] BEFORE sending the
+    # transfer request.
+    # So: STOR before RETR, because here the target is a "user".
+    treply = target.sendcmd('STOR ' + targetname)
+    if treply[:3] not in ('125', '150'): raise error_proto  # RFC 959
+    sreply = source.sendcmd('RETR ' + sourcename)
+    if sreply[:3] not in ('125', '150'): raise error_proto  # RFC 959
+    source.voidresp()
+    target.voidresp()
+
+
+class Netrc:
+    """Class to parse & provide access to 'netrc' format files.
+
+    See the netrc(4) man page for information on the file format.
+
+    WARNING: This class is obsolete -- use module netrc instead.
+
+    """
+    __defuser = None
+    __defpasswd = None
+    __defacct = None
+
+    def __init__(self, filename=None):
+        if filename is None:
+            if "HOME" in os.environ:
+                filename = os.path.join(os.environ["HOME"],
+                                        ".netrc")
+            else:
+                raise IOError, \
+                      "specify file to load or set $HOME"
+        self.__hosts = {}
+        self.__macros = {}
+        fp = open(filename, "r")
+        in_macro = 0
+        while 1:
+            line = fp.readline()
+            if not line: break
+            if in_macro and line.strip():
+                macro_lines.append(line)
+                continue
+            elif in_macro:
+                self.__macros[macro_name] = tuple(macro_lines)
+                in_macro = 0
+            words = line.split()
+            host = user = passwd = acct = None
+            default = 0
+            i = 0
+            while i < len(words):
+                w1 = words[i]
+                if i+1 < len(words):
+                    w2 = words[i + 1]
+                else:
+                    w2 = None
+                if w1 == 'default':
+                    default = 1
+                elif w1 == 'machine' and w2:
+                    host = w2.lower()
+                    i = i + 1
+                elif w1 == 'login' and w2:
+                    user = w2
+                    i = i + 1
+                elif w1 == 'password' and w2:
+                    passwd = w2
+                    i = i + 1
+                elif w1 == 'account' and w2:
+                    acct = w2
+                    i = i + 1
+                elif w1 == 'macdef' and w2:
+                    macro_name = w2
+                    macro_lines = []
+                    in_macro = 1
+                    break
+                i = i + 1
+            if default:
+                self.__defuser = user or self.__defuser
+                self.__defpasswd = passwd or self.__defpasswd
+                self.__defacct = acct or self.__defacct
+            if host:
+                if host in self.__hosts:
+                    ouser, opasswd, oacct = \
+                           self.__hosts[host]
+                    user = user or ouser
+                    passwd = passwd or opasswd
+                    acct = acct or oacct
+                self.__hosts[host] = user, passwd, acct
+        fp.close()
+
+    def get_hosts(self):
+        """Return a list of hosts mentioned in the .netrc file."""
+        return self.__hosts.keys()
+
+    def get_account(self, host):
+        """Returns login information for the named host.
+
+        The return value is a triple containing userid,
+        password, and the accounting field.
+
+        """
+        host = host.lower()
+        user = passwd = acct = None
+        if host in self.__hosts:
+            user, passwd, acct = self.__hosts[host]
+        user = user or self.__defuser
+        passwd = passwd or self.__defpasswd
+        acct = acct or self.__defacct
+        return user, passwd, acct
+
+    def get_macros(self):
+        """Return a list of all defined macro names."""
+        return self.__macros.keys()
+
+    def get_macro(self, macro):
+        """Return a sequence of lines which define a named macro."""
+        return self.__macros[macro]
+
+
+
+def test():
+    '''Test program.
+    Usage: ftp [-d] [-r[file]] host [-l[dir]] [-d[dir]] [-p] [file] ...'''
+
+    debugging = 0
+    rcfile = None
+    while sys.argv[1] == '-d':
+        debugging = debugging+1
+        del sys.argv[1]
+    if sys.argv[1][:2] == '-r':
+        # get name of alternate ~/.netrc file:
+        rcfile = sys.argv[1][2:]
+        del sys.argv[1]
+    host = sys.argv[1]
+    ftp = FTP(host)
+    ftp.set_debuglevel(debugging)
+    userid = passwd = acct = ''
+    try:
+        netrc = Netrc(rcfile)
+    except IOError:
+        if rcfile is not None:
+            sys.stderr.write("Could not open account file"
+                             " -- using anonymous login.")
+    else:
+        try:
+            userid, passwd, acct = netrc.get_account(host)
+        except KeyError:
+            # no account for host
+            sys.stderr.write(
+                    "No account -- using anonymous login.")
+    ftp.login(userid, passwd, acct)
+    for file in sys.argv[2:]:
+        if file[:2] == '-l':
+            ftp.dir(file[2:])
+        elif file[:2] == '-d':
+            cmd = 'CWD'
+            if file[2:]: cmd = cmd + ' ' + file[2:]
+            resp = ftp.sendcmd(cmd)
+        elif file == '-p':
+            ftp.set_pasv(not ftp.passiveserver)
+        else:
+            ftp.retrbinary('RETR ' + file, \
+                           sys.stdout.write, 1024)
+    ftp.quit()
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/getopt.py b/depot_tools/release/win/python_24/Lib/getopt.py
new file mode 100644
index 0000000..04e881e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/getopt.py
@@ -0,0 +1,211 @@
+# -*- coding: iso-8859-1 -*-
+"""Parser for command line options.
+
+This module helps scripts to parse the command line arguments in
+sys.argv.  It supports the same conventions as the Unix getopt()
+function (including the special meanings of arguments of the form `-'
+and `--').  Long options similar to those supported by GNU software
+may be used as well via an optional third argument.  This module
+provides two functions and an exception:
+
+getopt() -- Parse command line options
+gnu_getopt() -- Like getopt(), but allow option and non-option arguments
+to be intermixed.
+GetoptError -- exception (class) raised with 'opt' attribute, which is the
+option involved with the exception.
+"""
+
+# Long option support added by Lars Wirzenius <liw@iki.fi>.
+#
+# Gerrit Holl <gerrit@nl.linux.org> moved the string-based exceptions
+# to class-based exceptions.
+#
+# Peter Åstrand <astrand@lysator.liu.se> added gnu_getopt().
+#
+# TODO for gnu_getopt():
+#
+# - GNU getopt_long_only mechanism
+# - allow the caller to specify ordering
+# - RETURN_IN_ORDER option
+# - GNU extension with '-' as first character of option string
+# - optional arguments, specified by double colons
+# - a option string with a W followed by semicolon should
+#   treat "-W foo" as "--foo"
+
+__all__ = ["GetoptError","error","getopt","gnu_getopt"]
+
+import os
+
+class GetoptError(Exception):
+    opt = ''
+    msg = ''
+    def __init__(self, msg, opt=''):
+        self.msg = msg
+        self.opt = opt
+        Exception.__init__(self, msg, opt)
+
+    def __str__(self):
+        return self.msg
+
+error = GetoptError # backward compatibility
+
+def getopt(args, shortopts, longopts = []):
+    """getopt(args, options[, long_options]) -> opts, args
+
+    Parses command line options and parameter list.  args is the
+    argument list to be parsed, without the leading reference to the
+    running program.  Typically, this means "sys.argv[1:]".  shortopts
+    is the string of option letters that the script wants to
+    recognize, with options that require an argument followed by a
+    colon (i.e., the same format that Unix getopt() uses).  If
+    specified, longopts is a list of strings with the names of the
+    long options which should be supported.  The leading '--'
+    characters should not be included in the option name.  Options
+    which require an argument should be followed by an equal sign
+    ('=').
+
+    The return value consists of two elements: the first is a list of
+    (option, value) pairs; the second is the list of program arguments
+    left after the option list was stripped (this is a trailing slice
+    of the first argument).  Each option-and-value pair returned has
+    the option as its first element, prefixed with a hyphen (e.g.,
+    '-x'), and the option argument as its second element, or an empty
+    string if the option has no argument.  The options occur in the
+    list in the same order in which they were found, thus allowing
+    multiple occurrences.  Long and short options may be mixed.
+
+    """
+
+    opts = []
+    if type(longopts) == type(""):
+        longopts = [longopts]
+    else:
+        longopts = list(longopts)
+    while args and args[0].startswith('-') and args[0] != '-':
+        if args[0] == '--':
+            args = args[1:]
+            break
+        if args[0].startswith('--'):
+            opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
+        else:
+            opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
+
+    return opts, args
+
+def gnu_getopt(args, shortopts, longopts = []):
+    """getopt(args, options[, long_options]) -> opts, args
+
+    This function works like getopt(), except that GNU style scanning
+    mode is used by default. This means that option and non-option
+    arguments may be intermixed. The getopt() function stops
+    processing options as soon as a non-option argument is
+    encountered.
+
+    If the first character of the option string is `+', or if the
+    environment variable POSIXLY_CORRECT is set, then option
+    processing stops as soon as a non-option argument is encountered.
+
+    """
+
+    opts = []
+    prog_args = []
+    if isinstance(longopts, str):
+        longopts = [longopts]
+    else:
+        longopts = list(longopts)
+
+    # Allow options after non-option arguments?
+    if shortopts.startswith('+'):
+        shortopts = shortopts[1:]
+        all_options_first = True
+    elif os.environ.get("POSIXLY_CORRECT"):
+        all_options_first = True
+    else:
+        all_options_first = False
+
+    while args:
+        if args[0] == '--':
+            prog_args += args[1:]
+            break
+
+        if args[0][:2] == '--':
+            opts, args = do_longs(opts, args[0][2:], longopts, args[1:])
+        elif args[0][:1] == '-':
+            opts, args = do_shorts(opts, args[0][1:], shortopts, args[1:])
+        else:
+            if all_options_first:
+                prog_args += args
+                break
+            else:
+                prog_args.append(args[0])
+                args = args[1:]
+
+    return opts, prog_args
+
+def do_longs(opts, opt, longopts, args):
+    try:
+        i = opt.index('=')
+    except ValueError:
+        optarg = None
+    else:
+        opt, optarg = opt[:i], opt[i+1:]
+
+    has_arg, opt = long_has_args(opt, longopts)
+    if has_arg:
+        if optarg is None:
+            if not args:
+                raise GetoptError('option --%s requires argument' % opt, opt)
+            optarg, args = args[0], args[1:]
+    elif optarg:
+        raise GetoptError('option --%s must not have an argument' % opt, opt)
+    opts.append(('--' + opt, optarg or ''))
+    return opts, args
+
+# Return:
+#   has_arg?
+#   full option name
+def long_has_args(opt, longopts):
+    possibilities = [o for o in longopts if o.startswith(opt)]
+    if not possibilities:
+        raise GetoptError('option --%s not recognized' % opt, opt)
+    # Is there an exact match?
+    if opt in possibilities:
+        return False, opt
+    elif opt + '=' in possibilities:
+        return True, opt
+    # No exact match, so better be unique.
+    if len(possibilities) > 1:
+        # XXX since possibilities contains all valid continuations, might be
+        # nice to work them into the error msg
+        raise GetoptError('option --%s not a unique prefix' % opt, opt)
+    assert len(possibilities) == 1
+    unique_match = possibilities[0]
+    has_arg = unique_match.endswith('=')
+    if has_arg:
+        unique_match = unique_match[:-1]
+    return has_arg, unique_match
+
+def do_shorts(opts, optstring, shortopts, args):
+    while optstring != '':
+        opt, optstring = optstring[0], optstring[1:]
+        if short_has_arg(opt, shortopts):
+            if optstring == '':
+                if not args:
+                    raise GetoptError('option -%s requires argument' % opt,
+                                      opt)
+                optstring, args = args[0], args[1:]
+            optarg, optstring = optstring, ''
+        else:
+            optarg = ''
+        opts.append(('-' + opt, optarg))
+    return opts, args
+
+def short_has_arg(opt, shortopts):
+    for i in range(len(shortopts)):
+        if opt == shortopts[i] != ':':
+            return shortopts.startswith(':', i+1)
+    raise GetoptError('option -%s not recognized' % opt, opt)
+
+if __name__ == '__main__':
+    import sys
+    print getopt(sys.argv[1:], "a:b", ["alpha=", "beta"])
diff --git a/depot_tools/release/win/python_24/Lib/getpass.py b/depot_tools/release/win/python_24/Lib/getpass.py
new file mode 100644
index 0000000..e96491f9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/getpass.py
@@ -0,0 +1,123 @@
+"""Utilities to get a password and/or the current user name.
+
+getpass(prompt) - prompt for a password, with echo turned off
+getuser() - get the user name from the environment or password database
+
+On Windows, the msvcrt module will be used.
+On the Mac EasyDialogs.AskPassword is used, if available.
+
+"""
+
+# Authors: Piers Lauder (original)
+#          Guido van Rossum (Windows support and cleanup)
+
+import sys
+
+__all__ = ["getpass","getuser"]
+
+def unix_getpass(prompt='Password: '):
+    """Prompt for a password, with echo turned off.
+
+    Restore terminal settings at end.
+    """
+
+    try:
+        fd = sys.stdin.fileno()
+    except:
+        return default_getpass(prompt)
+
+    old = termios.tcgetattr(fd)     # a copy to save
+    new = old[:]
+
+    new[3] = new[3] & ~termios.ECHO # 3 == 'lflags'
+    try:
+        termios.tcsetattr(fd, termios.TCSADRAIN, new)
+        passwd = _raw_input(prompt)
+    finally:
+        termios.tcsetattr(fd, termios.TCSADRAIN, old)
+
+    sys.stdout.write('\n')
+    return passwd
+
+
+def win_getpass(prompt='Password: '):
+    """Prompt for password with echo off, using Windows getch()."""
+    if sys.stdin is not sys.__stdin__:
+        return default_getpass(prompt)
+    import msvcrt
+    for c in prompt:
+        msvcrt.putch(c)
+    pw = ""
+    while 1:
+        c = msvcrt.getch()
+        if c == '\r' or c == '\n':
+            break
+        if c == '\003':
+            raise KeyboardInterrupt
+        if c == '\b':
+            pw = pw[:-1]
+        else:
+            pw = pw + c
+    msvcrt.putch('\r')
+    msvcrt.putch('\n')
+    return pw
+
+
+def default_getpass(prompt='Password: '):
+    print "Warning: Problem with getpass. Passwords may be echoed."
+    return _raw_input(prompt)
+
+
+def _raw_input(prompt=""):
+    # A raw_input() replacement that doesn't save the string in the
+    # GNU readline history.
+    prompt = str(prompt)
+    if prompt:
+        sys.stdout.write(prompt)
+    line = sys.stdin.readline()
+    if not line:
+        raise EOFError
+    if line[-1] == '\n':
+        line = line[:-1]
+    return line
+
+
+def getuser():
+    """Get the username from the environment or password database.
+
+    First try various environment variables, then the password
+    database.  This works on Windows as long as USERNAME is set.
+
+    """
+
+    import os
+
+    for name in ('LOGNAME', 'USER', 'LNAME', 'USERNAME'):
+        user = os.environ.get(name)
+        if user:
+            return user
+
+    # If this fails, the exception will "explain" why
+    import pwd
+    return pwd.getpwuid(os.getuid())[0]
+
+# Bind the name getpass to the appropriate function
+try:
+    import termios
+    # it's possible there is an incompatible termios from the
+    # McMillan Installer, make sure we have a UNIX-compatible termios
+    termios.tcgetattr, termios.tcsetattr
+except (ImportError, AttributeError):
+    try:
+        import msvcrt
+    except ImportError:
+        try:
+            from EasyDialogs import AskPassword
+        except ImportError:
+            getpass = default_getpass
+        else:
+            getpass = AskPassword
+    else:
+        getpass = win_getpass
+else:
+    getpass = unix_getpass
diff --git a/depot_tools/release/win/python_24/Lib/gettext.py b/depot_tools/release/win/python_24/Lib/gettext.py
new file mode 100644
index 0000000..6e29176
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/gettext.py
@@ -0,0 +1,578 @@
+"""Internationalization and localization support.
+
+This module provides internationalization (I18N) and localization (L10N)
+support for your Python programs by providing an interface to the GNU gettext
+message catalog library.
+
+I18N refers to the operation by which a program is made aware of multiple
+languages.  L10N refers to the adaptation of your program, once
+internationalized, to the local language and cultural habits.
+
+"""
+
+# This module represents the integration of work, contributions, feedback, and
+# suggestions from the following people:
+#
+# Martin von Loewis, who wrote the initial implementation of the underlying
+# C-based libintlmodule (later renamed _gettext), along with a skeletal
+# gettext.py implementation.
+#
+# Peter Funk, who wrote fintl.py, a fairly complete wrapper around intlmodule,
+# which also included a pure-Python implementation to read .mo files if
+# intlmodule wasn't available.
+#
+# James Henstridge, who also wrote a gettext.py module, which has some
+# interesting, but currently unsupported experimental features: the notion of
+# a Catalog class and instances, and the ability to add to a catalog file via
+# a Python API.
+#
+# Barry Warsaw integrated these modules, wrote the .install() API and code,
+# and conformed all C and Python code to Python's coding standards.
+#
+# Francois Pinard and Marc-Andre Lemburg also contributed valuably to this
+# module.
+#
+# J. David Ibanez implemented plural forms. Bruno Haible fixed some bugs.
+#
+# TODO:
+# - Lazy loading of .mo files.  Currently the entire catalog is loaded into
+#   memory, but that's probably bad for large translated programs.  Instead,
+#   the lexical sort of original strings in GNU .mo files should be exploited
+#   to do binary searches and lazy initializations.  Or you might want to use
+#   the undocumented double-hash algorithm for .mo files with hash tables, but
+#   you'll need to study the GNU gettext code to do this.
+#
+# - Support Solaris .mo file formats.  Unfortunately, we've been unable to
+#   find this format documented anywhere.
+
+
+import locale, copy, os, re, struct, sys
+from errno import ENOENT
+
+
+__all__ = ['NullTranslations', 'GNUTranslations', 'Catalog',
+           'find', 'translation', 'install', 'textdomain', 'bindtextdomain',
+           'dgettext', 'dngettext', 'gettext', 'ngettext',
+           ]
+
+_default_localedir = os.path.join(sys.prefix, 'share', 'locale')
+
+
+def test(condition, true, false):
+    """
+    Implements the C expression:
+
+      condition ? true : false
+
+    Required to correctly interpret plural forms.
+    """
+    if condition:
+        return true
+    else:
+        return false
+
+
+def c2py(plural):
+    """Gets a C expression as used in PO files for plural forms and returns a
+    Python lambda function that implements an equivalent expression.
+    """
+    # Security check, allow only the "n" identifier
+    from StringIO import StringIO
+    import token, tokenize
+    tokens = tokenize.generate_tokens(StringIO(plural).readline)
+    try:
+        danger = [x for x in tokens if x[0] == token.NAME and x[1] != 'n']
+    except tokenize.TokenError:
+        raise ValueError, \
+              'plural forms expression error, maybe unbalanced parenthesis'
+    else:
+        if danger:
+            raise ValueError, 'plural forms expression could be dangerous'
+
+    # Replace some C operators by their Python equivalents
+    plural = plural.replace('&&', ' and ')
+    plural = plural.replace('||', ' or ')
+
+    expr = re.compile(r'\!([^=])')
+    plural = expr.sub(' not \\1', plural)
+
+    # Regular expression and replacement function used to transform
+    # "a?b:c" to "test(a,b,c)".
+    expr = re.compile(r'(.*?)\?(.*?):(.*)')
+    def repl(x):
+        return "test(%s, %s, %s)" % (x.group(1), x.group(2),
+                                     expr.sub(repl, x.group(3)))
+
+    # Code to transform the plural expression, taking care of parentheses
+    stack = ['']
+    for c in plural:
+        if c == '(':
+            stack.append('')
+        elif c == ')':
+            if len(stack) == 1:
+                # Actually, we never reach this code, because unbalanced
+                # parentheses get caught in the security check at the
+                # beginning.
+                raise ValueError, 'unbalanced parenthesis in plural form'
+            s = expr.sub(repl, stack.pop())
+            stack[-1] += '(%s)' % s
+        else:
+            stack[-1] += c
+    plural = expr.sub(repl, stack.pop())
+
+    return eval('lambda n: int(%s)' % plural)
+
+
+
+def _expand_lang(locale):
+    from locale import normalize
+    locale = normalize(locale)
+    COMPONENT_CODESET   = 1 << 0
+    COMPONENT_TERRITORY = 1 << 1
+    COMPONENT_MODIFIER  = 1 << 2
+    # split up the locale into its base components
+    mask = 0
+    pos = locale.find('@')
+    if pos >= 0:
+        modifier = locale[pos:]
+        locale = locale[:pos]
+        mask |= COMPONENT_MODIFIER
+    else:
+        modifier = ''
+    pos = locale.find('.')
+    if pos >= 0:
+        codeset = locale[pos:]
+        locale = locale[:pos]
+        mask |= COMPONENT_CODESET
+    else:
+        codeset = ''
+    pos = locale.find('_')
+    if pos >= 0:
+        territory = locale[pos:]
+        locale = locale[:pos]
+        mask |= COMPONENT_TERRITORY
+    else:
+        territory = ''
+    language = locale
+    ret = []
+    for i in range(mask+1):
+        if not (i & ~mask):  # if all components for this combo exist ...
+            val = language
+            if i & COMPONENT_TERRITORY: val += territory
+            if i & COMPONENT_CODESET:   val += codeset
+            if i & COMPONENT_MODIFIER:  val += modifier
+            ret.append(val)
+    ret.reverse()
+    return ret
+
+
+
+class NullTranslations:
+    def __init__(self, fp=None):
+        self._info = {}
+        self._charset = None
+        self._output_charset = None
+        self._fallback = None
+        if fp is not None:
+            self._parse(fp)
+
+    def _parse(self, fp):
+        pass
+
+    def add_fallback(self, fallback):
+        if self._fallback:
+            self._fallback.add_fallback(fallback)
+        else:
+            self._fallback = fallback
+
+    def gettext(self, message):
+        if self._fallback:
+            return self._fallback.gettext(message)
+        return message
+
+    def lgettext(self, message):
+        if self._fallback:
+            return self._fallback.lgettext(message)
+        return message
+
+    def ngettext(self, msgid1, msgid2, n):
+        if self._fallback:
+            return self._fallback.ngettext(msgid1, msgid2, n)
+        if n == 1:
+            return msgid1
+        else:
+            return msgid2
+
+    def lngettext(self, msgid1, msgid2, n):
+        if self._fallback:
+            return self._fallback.lngettext(msgid1, msgid2, n)
+        if n == 1:
+            return msgid1
+        else:
+            return msgid2
+
+    def ugettext(self, message):
+        if self._fallback:
+            return self._fallback.ugettext(message)
+        return unicode(message)
+
+    def ungettext(self, msgid1, msgid2, n):
+        if self._fallback:
+            return self._fallback.ungettext(msgid1, msgid2, n)
+        if n == 1:
+            return unicode(msgid1)
+        else:
+            return unicode(msgid2)
+
+    def info(self):
+        return self._info
+
+    def charset(self):
+        return self._charset
+
+    def output_charset(self):
+        return self._output_charset
+
+    def set_output_charset(self, charset):
+        self._output_charset = charset
+
+    def install(self, unicode=False):
+        import __builtin__
+        __builtin__.__dict__['_'] = unicode and self.ugettext or self.gettext
+
+
+class GNUTranslations(NullTranslations):
+    # Magic number of .mo files
+    LE_MAGIC = 0x950412deL
+    BE_MAGIC = 0xde120495L
+
+    def _parse(self, fp):
+        """Override this method to support alternative .mo formats."""
+        unpack = struct.unpack
+        filename = getattr(fp, 'name', '')
+        # Parse the .mo file header, which consists of 5 little endian 32
+        # bit words.
+        self._catalog = catalog = {}
+        self.plural = lambda n: int(n != 1) # germanic plural by default
+        buf = fp.read()
+        buflen = len(buf)
+        # Are we big endian or little endian?
+        magic = unpack('<I', buf[:4])[0]
+        if magic == self.LE_MAGIC:
+            version, msgcount, masteridx, transidx = unpack('<4I', buf[4:20])
+            ii = '<II'
+        elif magic == self.BE_MAGIC:
+            version, msgcount, masteridx, transidx = unpack('>4I', buf[4:20])
+            ii = '>II'
+        else:
+            raise IOError(0, 'Bad magic number', filename)
+        # Now put all messages from the .mo file buffer into the catalog
+        # dictionary.
+        for i in xrange(0, msgcount):
+            mlen, moff = unpack(ii, buf[masteridx:masteridx+8])
+            mend = moff + mlen
+            tlen, toff = unpack(ii, buf[transidx:transidx+8])
+            tend = toff + tlen
+            if mend < buflen and tend < buflen:
+                msg = buf[moff:mend]
+                tmsg = buf[toff:tend]
+            else:
+                raise IOError(0, 'File is corrupt', filename)
+            # See if we're looking at GNU .mo conventions for metadata
+            if mlen == 0:
+                # Catalog description
+                lastk = k = None
+                for item in tmsg.splitlines():
+                    item = item.strip()
+                    if not item:
+                        continue
+                    if ':' in item:
+                        k, v = item.split(':', 1)
+                        k = k.strip().lower()
+                        v = v.strip()
+                        self._info[k] = v
+                        lastk = k
+                    elif lastk:
+                        self._info[lastk] += '\n' + item
+                    if k == 'content-type':
+                        self._charset = v.split('charset=')[1]
+                    elif k == 'plural-forms':
+                        v = v.split(';')
+                        plural = v[1].split('plural=')[1]
+                        self.plural = c2py(plural)
+            # Note: we unconditionally convert both msgids and msgstrs to
+            # Unicode using the character encoding specified in the charset
+            # parameter of the Content-Type header.  The gettext documentation
+            # strongly encourages msgids to be us-ascii, but some appliations
+            # require alternative encodings (e.g. Zope's ZCML and ZPT).  For
+            # traditional gettext applications, the msgid conversion will
+            # cause no problems since us-ascii should always be a subset of
+            # the charset encoding.  We may want to fall back to 8-bit msgids
+            # if the Unicode conversion fails.
+            if '\x00' in msg:
+                # Plural forms
+                msgid1, msgid2 = msg.split('\x00')
+                tmsg = tmsg.split('\x00')
+                if self._charset:
+                    msgid1 = unicode(msgid1, self._charset)
+                    tmsg = [unicode(x, self._charset) for x in tmsg]
+                for i in range(len(tmsg)):
+                    catalog[(msgid1, i)] = tmsg[i]
+            else:
+                if self._charset:
+                    msg = unicode(msg, self._charset)
+                    tmsg = unicode(tmsg, self._charset)
+                catalog[msg] = tmsg
+            # advance to next entry in the seek tables
+            masteridx += 8
+            transidx += 8
+
+    def gettext(self, message):
+        missing = object()
+        tmsg = self._catalog.get(message, missing)
+        if tmsg is missing:
+            if self._fallback:
+                return self._fallback.gettext(message)
+            return message
+        # Encode the Unicode tmsg back to an 8-bit string, if possible
+        if self._output_charset:
+            return tmsg.encode(self._output_charset)
+        elif self._charset:
+            return tmsg.encode(self._charset)
+        return tmsg
+
+    def lgettext(self, message):
+        missing = object()
+        tmsg = self._catalog.get(message, missing)
+        if tmsg is missing:
+            if self._fallback:
+                return self._fallback.lgettext(message)
+            return message
+        if self._output_charset:
+            return tmsg.encode(self._output_charset)
+        return tmsg.encode(locale.getpreferredencoding())
+
+    def ngettext(self, msgid1, msgid2, n):
+        try:
+            tmsg = self._catalog[(msgid1, self.plural(n))]
+            if self._output_charset:
+                return tmsg.encode(self._output_charset)
+            elif self._charset:
+                return tmsg.encode(self._charset)
+            return tmsg
+        except KeyError:
+            if self._fallback:
+                return self._fallback.ngettext(msgid1, msgid2, n)
+            if n == 1:
+                return msgid1
+            else:
+                return msgid2
+
+    def lngettext(self, msgid1, msgid2, n):
+        try:
+            tmsg = self._catalog[(msgid1, self.plural(n))]
+            if self._output_charset:
+                return tmsg.encode(self._output_charset)
+            return tmsg.encode(locale.getpreferredencoding())
+        except KeyError:
+            if self._fallback:
+                return self._fallback.lngettext(msgid1, msgid2, n)
+            if n == 1:
+                return msgid1
+            else:
+                return msgid2
+
+    def ugettext(self, message):
+        missing = object()
+        tmsg = self._catalog.get(message, missing)
+        if tmsg is missing:
+            if self._fallback:
+                return self._fallback.ugettext(message)
+            return unicode(message)
+        return tmsg
+
+    def ungettext(self, msgid1, msgid2, n):
+        try:
+            tmsg = self._catalog[(msgid1, self.plural(n))]
+        except KeyError:
+            if self._fallback:
+                return self._fallback.ungettext(msgid1, msgid2, n)
+            if n == 1:
+                tmsg = unicode(msgid1)
+            else:
+                tmsg = unicode(msgid2)
+        return tmsg
+
+
+# Locate a .mo file using the gettext strategy
+def find(domain, localedir=None, languages=None, all=0):
+    # Get some reasonable defaults for arguments that were not supplied
+    if localedir is None:
+        localedir = _default_localedir
+    if languages is None:
+        languages = []
+        for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'):
+            val = os.environ.get(envar)
+            if val:
+                languages = val.split(':')
+                break
+        if 'C' not in languages:
+            languages.append('C')
+    # now normalize and expand the languages
+    nelangs = []
+    for lang in languages:
+        for nelang in _expand_lang(lang):
+            if nelang not in nelangs:
+                nelangs.append(nelang)
+    # select a language
+    if all:
+        result = []
+    else:
+        result = None
+    for lang in nelangs:
+        if lang == 'C':
+            break
+        mofile = os.path.join(localedir, lang, 'LC_MESSAGES', '%s.mo' % domain)
+        if os.path.exists(mofile):
+            if all:
+                result.append(mofile)
+            else:
+                return mofile
+    return result
+
+
+
+# a mapping between absolute .mo file path and Translation object
+_translations = {}
+
+def translation(domain, localedir=None, languages=None,
+                class_=None, fallback=False, codeset=None):
+    if class_ is None:
+        class_ = GNUTranslations
+    mofiles = find(domain, localedir, languages, all=1)
+    if not mofiles:
+        if fallback:
+            return NullTranslations()
+        raise IOError(ENOENT, 'No translation file found for domain', domain)
+    # TBD: do we need to worry about the file pointer getting collected?
+    # Avoid opening, reading, and parsing the .mo file after it's been done
+    # once.
+    result = None
+    for mofile in mofiles:
+        key = os.path.abspath(mofile)
+        t = _translations.get(key)
+        if t is None:
+            t = _translations.setdefault(key, class_(open(mofile, 'rb')))
+        # Copy the translation object to allow setting fallbacks and
+        # output charset. All other instance data is shared with the
+        # cached object.
+        t = copy.copy(t)
+        if codeset:
+            t.set_output_charset(codeset)
+        if result is None:
+            result = t
+        else:
+            result.add_fallback(t)
+    return result
+
+
+def install(domain, localedir=None, unicode=False, codeset=None):
+    t = translation(domain, localedir, fallback=True, codeset=codeset)
+    t.install(unicode)
+
+
+
+# a mapping b/w domains and locale directories
+_localedirs = {}
+# a mapping b/w domains and codesets
+_localecodesets = {}
+# current global domain, `messages' used for compatibility w/ GNU gettext
+_current_domain = 'messages'
+
+
+def textdomain(domain=None):
+    global _current_domain
+    if domain is not None:
+        _current_domain = domain
+    return _current_domain
+
+
+def bindtextdomain(domain, localedir=None):
+    global _localedirs
+    if localedir is not None:
+        _localedirs[domain] = localedir
+    return _localedirs.get(domain, _default_localedir)
+
+
+def bind_textdomain_codeset(domain, codeset=None):
+    global _localecodesets
+    if codeset is not None:
+        _localecodesets[domain] = codeset
+    return _localecodesets.get(domain)
+
+
+def dgettext(domain, message):
+    try:
+        t = translation(domain, _localedirs.get(domain, None),
+                        codeset=_localecodesets.get(domain))
+    except IOError:
+        return message
+    return t.gettext(message)
+
+def ldgettext(domain, message):
+    try:
+        t = translation(domain, _localedirs.get(domain, None),
+                        codeset=_localecodesets.get(domain))
+    except IOError:
+        return message
+    return t.lgettext(message)
+
+def dngettext(domain, msgid1, msgid2, n):
+    try:
+        t = translation(domain, _localedirs.get(domain, None),
+                        codeset=_localecodesets.get(domain))
+    except IOError:
+        if n == 1:
+            return msgid1
+        else:
+            return msgid2
+    return t.ngettext(msgid1, msgid2, n)
+
+def ldngettext(domain, msgid1, msgid2, n):
+    try:
+        t = translation(domain, _localedirs.get(domain, None),
+                        codeset=_localecodesets.get(domain))
+    except IOError:
+        if n == 1:
+            return msgid1
+        else:
+            return msgid2
+    return t.lngettext(msgid1, msgid2, n)
+
+def gettext(message):
+    return dgettext(_current_domain, message)
+
+def lgettext(message):
+    return ldgettext(_current_domain, message)
+
+def ngettext(msgid1, msgid2, n):
+    return dngettext(_current_domain, msgid1, msgid2, n)
+
+def lngettext(msgid1, msgid2, n):
+    return ldngettext(_current_domain, msgid1, msgid2, n)
+
+# dcgettext() has been deemed unnecessary and is not implemented.
+
+# James Henstridge's Catalog constructor from GNOME gettext.  Documented usage
+# was:
+#
+#    import gettext
+#    cat = gettext.Catalog(PACKAGE, localedir=LOCALEDIR)
+#    _ = cat.gettext
+#    print _('Hello World')
+
+# The resulting catalog object currently don't support access through a
+# dictionary API, which was supported (but apparently unused) in GNOME
+# gettext.
+
+Catalog = translation
diff --git a/depot_tools/release/win/python_24/Lib/glob.py b/depot_tools/release/win/python_24/Lib/glob.py
new file mode 100644
index 0000000..4ba4138
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/glob.py
@@ -0,0 +1,56 @@
+"""Filename globbing utility."""
+
+import os
+import fnmatch
+import re
+
+__all__ = ["glob"]
+
+def glob(pathname):
+    """Return a list of paths matching a pathname pattern.
+
+    The pattern may contain simple shell-style wildcards a la fnmatch.
+
+    """
+    if not has_magic(pathname):
+        if os.path.lexists(pathname):
+            return [pathname]
+        else:
+            return []
+    dirname, basename = os.path.split(pathname)
+    if not dirname:
+        return glob1(os.curdir, basename)
+    elif has_magic(dirname):
+        list = glob(dirname)
+    else:
+        list = [dirname]
+    if not has_magic(basename):
+        result = []
+        for dirname in list:
+            if basename or os.path.isdir(dirname):
+                name = os.path.join(dirname, basename)
+                if os.path.lexists(name):
+                    result.append(name)
+    else:
+        result = []
+        for dirname in list:
+            sublist = glob1(dirname, basename)
+            for name in sublist:
+                result.append(os.path.join(dirname, name))
+    return result
+
+def glob1(dirname, pattern):
+    if not dirname: dirname = os.curdir
+    try:
+        names = os.listdir(dirname)
+    except os.error:
+        return []
+    if pattern[0]!='.':
+        names=filter(lambda x: x[0]!='.',names)
+    return fnmatch.filter(names,pattern)
+
+
+magic_check = re.compile('[*?[]')
+
+def has_magic(s):
+    return magic_check.search(s) is not None
diff --git a/depot_tools/release/win/python_24/Lib/gopherlib.py b/depot_tools/release/win/python_24/Lib/gopherlib.py
new file mode 100644
index 0000000..01eab0a3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/gopherlib.py
@@ -0,0 +1,205 @@
+"""Gopher protocol client interface."""
+
+__all__ = ["send_selector","send_query"]
+
+# Default selector, host and port
+DEF_SELECTOR = '1/'
+DEF_HOST     = 'gopher.micro.umn.edu'
+DEF_PORT     = 70
+
+# Recognized file types
+A_TEXT       = '0'
+A_MENU       = '1'
+A_CSO        = '2'
+A_ERROR      = '3'
+A_MACBINHEX  = '4'
+A_PCBINHEX   = '5'
+A_UUENCODED  = '6'
+A_INDEX      = '7'
+A_TELNET     = '8'
+A_BINARY     = '9'
+A_DUPLICATE  = '+'
+A_SOUND      = 's'
+A_EVENT      = 'e'
+A_CALENDAR   = 'c'
+A_HTML       = 'h'
+A_TN3270     = 'T'
+A_MIME       = 'M'
+A_IMAGE      = 'I'
+A_WHOIS      = 'w'
+A_QUERY      = 'q'
+A_GIF        = 'g'
+A_HTML       = 'h'          # HTML file
+A_WWW        = 'w'          # WWW address
+A_PLUS_IMAGE = ':'
+A_PLUS_MOVIE = ';'
+A_PLUS_SOUND = '<'
+
+
+_names = dir()
+_type_to_name_map = {}
+def type_to_name(gtype):
+    """Map all file types to strings; unknown types become TYPE='x'."""
+    global _type_to_name_map
+    if _type_to_name_map=={}:
+        for name in _names:
+            if name[:2] == 'A_':
+                _type_to_name_map[eval(name)] = name[2:]
+    if gtype in _type_to_name_map:
+        return _type_to_name_map[gtype]
+    return 'TYPE=%r' % (gtype,)
+
+# Names for characters and strings
+CRLF = '\r\n'
+TAB = '\t'
+
+def send_selector(selector, host, port = 0):
+    """Send a selector to a given host and port, return a file with the reply."""
+    import socket
+    if not port:
+        i = host.find(':')
+        if i >= 0:
+            host, port = host[:i], int(host[i+1:])
+    if not port:
+        port = DEF_PORT
+    elif type(port) == type(''):
+        port = int(port)
+    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+    s.connect((host, port))
+    s.sendall(selector + CRLF)
+    s.shutdown(1)
+    return s.makefile('rb')
+
+def send_query(selector, query, host, port = 0):
+    """Send a selector and a query string."""
+    return send_selector(selector + '\t' + query, host, port)
+
+def path_to_selector(path):
+    """Takes a path as returned by urlparse and returns the appropriate selector."""
+    if path=="/":
+        return "/"
+    else:
+        return path[2:] # Cuts initial slash and data type identifier
+
+def path_to_datatype_name(path):
+    """Takes a path as returned by urlparse and maps it to a string.
+    See section 3.4 of RFC 1738 for details."""
+    if path=="/":
+        # No way to tell, although "INDEX" is likely
+        return "TYPE='unknown'"
+    else:
+        return type_to_name(path[1])
+
+# The following functions interpret the data returned by the gopher
+# server according to the expected type, e.g. textfile or directory
+
+def get_directory(f):
+    """Get a directory in the form of a list of entries."""
+    entries = []
+    while 1:
+        line = f.readline()
+        if not line:
+            print '(Unexpected EOF from server)'
+            break
+        if line[-2:] == CRLF:
+            line = line[:-2]
+        elif line[-1:] in CRLF:
+            line = line[:-1]
+        if line == '.':
+            break
+        if not line:
+            print '(Empty line from server)'
+            continue
+        gtype = line[0]
+        parts = line[1:].split(TAB)
+        if len(parts) < 4:
+            print '(Bad line from server: %r)' % (line,)
+            continue
+        if len(parts) > 4:
+            if parts[4:] != ['+']:
+                print '(Extra info from server:',
+                print parts[4:], ')'
+        else:
+            parts.append('')
+        parts.insert(0, gtype)
+        entries.append(parts)
+    return entries
+
+def get_textfile(f):
+    """Get a text file as a list of lines, with trailing CRLF stripped."""
+    lines = []
+    get_alt_textfile(f, lines.append)
+    return lines
+
+def get_alt_textfile(f, func):
+    """Get a text file and pass each line to a function, with trailing CRLF stripped."""
+    while 1:
+        line = f.readline()
+        if not line:
+            print '(Unexpected EOF from server)'
+            break
+        if line[-2:] == CRLF:
+            line = line[:-2]
+        elif line[-1:] in CRLF:
+            line = line[:-1]
+        if line == '.':
+            break
+        if line[:2] == '..':
+            line = line[1:]
+        func(line)
+
+def get_binary(f):
+    """Get a binary file as one solid data block."""
+    data = f.read()
+    return data
+
+def get_alt_binary(f, func, blocksize):
+    """Get a binary file and pass each block to a function."""
+    while 1:
+        data = f.read(blocksize)
+        if not data:
+            break
+        func(data)
+
+def test():
+    """Trivial test program."""
+    import sys
+    import getopt
+    opts, args = getopt.getopt(sys.argv[1:], '')
+    selector = DEF_SELECTOR
+    type = selector[0]
+    host = DEF_HOST
+    if args:
+        host = args[0]
+        args = args[1:]
+    if args:
+        type = args[0]
+        args = args[1:]
+        if len(type) > 1:
+            type, selector = type[0], type
+        else:
+            selector = ''
+            if args:
+                selector = args[0]
+                args = args[1:]
+        query = ''
+        if args:
+            query = args[0]
+            args = args[1:]
+    if type == A_INDEX:
+        f = send_query(selector, query, host)
+    else:
+        f = send_selector(selector, host)
+    if type == A_TEXT:
+        lines = get_textfile(f)
+        for item in lines: print item
+    elif type in (A_MENU, A_INDEX):
+        entries = get_directory(f)
+        for item in entries: print item
+    else:
+        data = get_binary(f)
+        print 'binary data:', len(data), 'bytes:', repr(data[:100])[:40]
+
+# Run the test when run as script
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/gzip.py b/depot_tools/release/win/python_24/Lib/gzip.py
new file mode 100644
index 0000000..5f638c4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/gzip.py
@@ -0,0 +1,475 @@
+"""Functions that read and write gzipped files.
+
+The user of the file doesn't have to worry about the compression,
+but random access is not allowed."""
+
+# based on Andrew Kuchling's minigzip.py distributed with the zlib module
+
+import struct, sys, time
+import zlib
+import __builtin__
+
+__all__ = ["GzipFile","open"]
+
+FTEXT, FHCRC, FEXTRA, FNAME, FCOMMENT = 1, 2, 4, 8, 16
+
+READ, WRITE = 1, 2
+
+def U32(i):
+    """Return i as an unsigned integer, assuming it fits in 32 bits.
+
+    If it's >= 2GB when viewed as a 32-bit unsigned int, return a long.
+    """
+    if i < 0:
+        i += 1L << 32
+    return i
+
+def LOWU32(i):
+    """Return the low-order 32 bits of an int, as a non-negative int."""
+    return i & 0xFFFFFFFFL
+
+def write32(output, value):
+    output.write(struct.pack("<l", value))
+
+def write32u(output, value):
+    # The L format writes the bit pattern correctly whether signed
+    # or unsigned.
+    output.write(struct.pack("<L", value))
+
+def read32(input):
+    return struct.unpack("<l", input.read(4))[0]
+
+def open(filename, mode="rb", compresslevel=9):
+    """Shorthand for GzipFile(filename, mode, compresslevel).
+
+    The filename argument is required; mode defaults to 'rb'
+    and compresslevel defaults to 9.
+
+    """
+    return GzipFile(filename, mode, compresslevel)
+
+class GzipFile:
+    """The GzipFile class simulates most of the methods of a file object with
+    the exception of the readinto() and truncate() methods.
+
+    """
+
+    myfileobj = None
+
+    def __init__(self, filename=None, mode=None,
+                 compresslevel=9, fileobj=None):
+        """Constructor for the GzipFile class.
+
+        At least one of fileobj and filename must be given a
+        non-trivial value.
+
+        The new class instance is based on fileobj, which can be a regular
+        file, a StringIO object, or any other object which simulates a file.
+        It defaults to None, in which case filename is opened to provide
+        a file object.
+
+        When fileobj is not None, the filename argument is only used to be
+        included in the gzip file header, which may includes the original
+        filename of the uncompressed file.  It defaults to the filename of
+        fileobj, if discernible; otherwise, it defaults to the empty string,
+        and in this case the original filename is not included in the header.
+
+        The mode argument can be any of 'r', 'rb', 'a', 'ab', 'w', or 'wb',
+        depending on whether the file will be read or written.  The default
+        is the mode of fileobj if discernible; otherwise, the default is 'rb'.
+        Be aware that only the 'rb', 'ab', and 'wb' values should be used
+        for cross-platform portability.
+
+        The compresslevel argument is an integer from 1 to 9 controlling the
+        level of compression; 1 is fastest and produces the least compression,
+        and 9 is slowest and produces the most compression.  The default is 9.
+
+        """
+
+        # guarantee the file is opened in binary mode on platforms
+        # that care about that sort of thing
+        if mode and 'b' not in mode:
+            mode += 'b'
+        if fileobj is None:
+            fileobj = self.myfileobj = __builtin__.open(filename, mode or 'rb')
+        if filename is None:
+            if hasattr(fileobj, 'name'): filename = fileobj.name
+            else: filename = ''
+        if mode is None:
+            if hasattr(fileobj, 'mode'): mode = fileobj.mode
+            else: mode = 'rb'
+
+        if mode[0:1] == 'r':
+            self.mode = READ
+            # Set flag indicating start of a new member
+            self._new_member = True
+            self.extrabuf = ""
+            self.extrasize = 0
+            self.filename = filename
+
+        elif mode[0:1] == 'w' or mode[0:1] == 'a':
+            self.mode = WRITE
+            self._init_write(filename)
+            self.compress = zlib.compressobj(compresslevel,
+                                             zlib.DEFLATED,
+                                             -zlib.MAX_WBITS,
+                                             zlib.DEF_MEM_LEVEL,
+                                             0)
+        else:
+            raise IOError, "Mode " + mode + " not supported"
+
+        self.fileobj = fileobj
+        self.offset = 0
+
+        if self.mode == WRITE:
+            self._write_gzip_header()
+
+    def __repr__(self):
+        s = repr(self.fileobj)
+        return '<gzip ' + s[1:-1] + ' ' + hex(id(self)) + '>'
+
+    def _init_write(self, filename):
+        if filename[-3:] != '.gz':
+            filename = filename + '.gz'
+        self.filename = filename
+        self.crc = zlib.crc32("")
+        self.size = 0
+        self.writebuf = []
+        self.bufsize = 0
+
+    def _write_gzip_header(self):
+        self.fileobj.write('\037\213')             # magic header
+        self.fileobj.write('\010')                 # compression method
+        fname = self.filename[:-3]
+        flags = 0
+        if fname:
+            flags = FNAME
+        self.fileobj.write(chr(flags))
+        write32u(self.fileobj, long(time.time()))
+        self.fileobj.write('\002')
+        self.fileobj.write('\377')
+        if fname:
+            self.fileobj.write(fname + '\000')
+
+    def _init_read(self):
+        self.crc = zlib.crc32("")
+        self.size = 0
+
+    def _read_gzip_header(self):
+        magic = self.fileobj.read(2)
+        if magic != '\037\213':
+            raise IOError, 'Not a gzipped file'
+        method = ord( self.fileobj.read(1) )
+        if method != 8:
+            raise IOError, 'Unknown compression method'
+        flag = ord( self.fileobj.read(1) )
+        # modtime = self.fileobj.read(4)
+        # extraflag = self.fileobj.read(1)
+        # os = self.fileobj.read(1)
+        self.fileobj.read(6)
+
+        if flag & FEXTRA:
+            # Read & discard the extra field, if present
+            xlen = ord(self.fileobj.read(1))
+            xlen = xlen + 256*ord(self.fileobj.read(1))
+            self.fileobj.read(xlen)
+        if flag & FNAME:
+            # Read and discard a null-terminated string containing the filename
+            while True:
+                s = self.fileobj.read(1)
+                if not s or s=='\000':
+                    break
+        if flag & FCOMMENT:
+            # Read and discard a null-terminated string containing a comment
+            while True:
+                s = self.fileobj.read(1)
+                if not s or s=='\000':
+                    break
+        if flag & FHCRC:
+            self.fileobj.read(2)     # Read & discard the 16-bit header CRC
+
+
+    def write(self,data):
+        if self.mode != WRITE:
+            import errno
+            raise IOError(errno.EBADF, "write() on read-only GzipFile object")
+
+        if self.fileobj is None:
+            raise ValueError, "write() on closed GzipFile object"
+        if len(data) > 0:
+            self.size = self.size + len(data)
+            self.crc = zlib.crc32(data, self.crc)
+            self.fileobj.write( self.compress.compress(data) )
+            self.offset += len(data)
+
+    def read(self, size=-1):
+        if self.mode != READ:
+            import errno
+            raise IOError(errno.EBADF, "read() on write-only GzipFile object")
+
+        if self.extrasize <= 0 and self.fileobj is None:
+            return ''
+
+        readsize = 1024
+        if size < 0:        # get the whole thing
+            try:
+                while True:
+                    self._read(readsize)
+                    readsize = readsize * 2
+            except EOFError:
+                size = self.extrasize
+        else:               # just get some more of it
+            try:
+                while size > self.extrasize:
+                    self._read(readsize)
+                    readsize = readsize * 2
+            except EOFError:
+                if size > self.extrasize:
+                    size = self.extrasize
+
+        chunk = self.extrabuf[:size]
+        self.extrabuf = self.extrabuf[size:]
+        self.extrasize = self.extrasize - size
+
+        self.offset += size
+        return chunk
+
+    def _unread(self, buf):
+        self.extrabuf = buf + self.extrabuf
+        self.extrasize = len(buf) + self.extrasize
+        self.offset -= len(buf)
+
+    def _read(self, size=1024):
+        if self.fileobj is None:
+            raise EOFError, "Reached EOF"
+
+        if self._new_member:
+            # If the _new_member flag is set, we have to
+            # jump to the next member, if there is one.
+            #
+            # First, check if we're at the end of the file;
+            # if so, it's time to stop; no more members to read.
+            pos = self.fileobj.tell()   # Save current position
+            self.fileobj.seek(0, 2)     # Seek to end of file
+            if pos == self.fileobj.tell():
+                raise EOFError, "Reached EOF"
+            else:
+                self.fileobj.seek( pos ) # Return to original position
+
+            self._init_read()
+            self._read_gzip_header()
+            self.decompress = zlib.decompressobj(-zlib.MAX_WBITS)
+            self._new_member = False
+
+        # Read a chunk of data from the file
+        buf = self.fileobj.read(size)
+
+        # If the EOF has been reached, flush the decompression object
+        # and mark this object as finished.
+
+        if buf == "":
+            uncompress = self.decompress.flush()
+            self._read_eof()
+            self._add_read_data( uncompress )
+            raise EOFError, 'Reached EOF'
+
+        uncompress = self.decompress.decompress(buf)
+        self._add_read_data( uncompress )
+
+        if self.decompress.unused_data != "":
+            # Ending case: we've come to the end of a member in the file,
+            # so seek back to the start of the unused data, finish up
+            # this member, and read a new gzip header.
+            # (The number of bytes to seek back is the length of the unused
+            # data, minus 8 because _read_eof() will rewind a further 8 bytes)
+            self.fileobj.seek( -len(self.decompress.unused_data)+8, 1)
+
+            # Check the CRC and file size, and set the flag so we read
+            # a new member on the next call
+            self._read_eof()
+            self._new_member = True
+
+    def _add_read_data(self, data):
+        self.crc = zlib.crc32(data, self.crc)
+        self.extrabuf = self.extrabuf + data
+        self.extrasize = self.extrasize + len(data)
+        self.size = self.size + len(data)
+
+    def _read_eof(self):
+        # We've read to the end of the file, so we have to rewind in order
+        # to reread the 8 bytes containing the CRC and the file size.
+        # We check the that the computed CRC and size of the
+        # uncompressed data matches the stored values.  Note that the size
+        # stored is the true file size mod 2**32.
+        self.fileobj.seek(-8, 1)
+        crc32 = read32(self.fileobj)
+        isize = U32(read32(self.fileobj))   # may exceed 2GB
+        if U32(crc32) != U32(self.crc):
+            raise IOError, "CRC check failed"
+        elif isize != LOWU32(self.size):
+            raise IOError, "Incorrect length of data produced"
+
+    def close(self):
+        if self.mode == WRITE:
+            self.fileobj.write(self.compress.flush())
+            write32(self.fileobj, self.crc)
+            # self.size may exceed 2GB, or even 4GB
+            write32u(self.fileobj, LOWU32(self.size))
+            self.fileobj = None
+        elif self.mode == READ:
+            self.fileobj = None
+        if self.myfileobj:
+            self.myfileobj.close()
+            self.myfileobj = None
+
+    def __del__(self):
+        try:
+            if (self.myfileobj is None and
+                self.fileobj is None):
+                return
+        except AttributeError:
+            return
+        self.close()
+
+    def flush(self):
+        self.fileobj.flush()
+
+    def fileno(self):
+        """Invoke the underlying file object's fileno() method.
+
+        This will raise AttributeError if the underlying file object
+        doesn't support fileno().
+        """
+        return self.fileobj.fileno()
+
+    def isatty(self):
+        return False
+
+    def tell(self):
+        return self.offset
+
+    def rewind(self):
+        '''Return the uncompressed stream file position indicator to the
+        beginning of the file'''
+        if self.mode != READ:
+            raise IOError("Can't rewind in write mode")
+        self.fileobj.seek(0)
+        self._new_member = True
+        self.extrabuf = ""
+        self.extrasize = 0
+        self.offset = 0
+
+    def seek(self, offset):
+        if self.mode == WRITE:
+            if offset < self.offset:
+                raise IOError('Negative seek in write mode')
+            count = offset - self.offset
+            for i in range(count // 1024):
+                self.write(1024 * '\0')
+            self.write((count % 1024) * '\0')
+        elif self.mode == READ:
+            if offset < self.offset:
+                # for negative seek, rewind and do positive seek
+                self.rewind()
+            count = offset - self.offset
+            for i in range(count // 1024):
+                self.read(1024)
+            self.read(count % 1024)
+
+    def readline(self, size=-1):
+        if size < 0: size = sys.maxint
+        bufs = []
+        readsize = min(100, size)    # Read from the file in small chunks
+        while True:
+            if size == 0:
+                return "".join(bufs) # Return resulting line
+
+            c = self.read(readsize)
+            i = c.find('\n')
+            if size is not None:
+                # We set i=size to break out of the loop under two
+                # conditions: 1) there's no newline, and the chunk is
+                # larger than size, or 2) there is a newline, but the
+                # resulting line would be longer than 'size'.
+                if i==-1 and len(c) > size: i=size-1
+                elif size <= i: i = size -1
+
+            if i >= 0 or c == '':
+                bufs.append(c[:i+1])    # Add portion of last chunk
+                self._unread(c[i+1:])   # Push back rest of chunk
+                return ''.join(bufs)    # Return resulting line
+
+            # Append chunk to list, decrease 'size',
+            bufs.append(c)
+            size = size - len(c)
+            readsize = min(size, readsize * 2)
+
+    def readlines(self, sizehint=0):
+        # Negative numbers result in reading all the lines
+        if sizehint <= 0:
+            sizehint = sys.maxint
+        L = []
+        while sizehint > 0:
+            line = self.readline()
+            if line == "":
+                break
+            L.append(line)
+            sizehint = sizehint - len(line)
+
+        return L
+
+    def writelines(self, L):
+        for line in L:
+            self.write(line)
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        line = self.readline()
+        if line:
+            return line
+        else:
+            raise StopIteration
+
+
+def _test():
+    # Act like gzip; with -d, act like gunzip.
+    # The input file is not deleted, however, nor are any other gzip
+    # options or features supported.
+    args = sys.argv[1:]
+    decompress = args and args[0] == "-d"
+    if decompress:
+        args = args[1:]
+    if not args:
+        args = ["-"]
+    for arg in args:
+        if decompress:
+            if arg == "-":
+                f = GzipFile(filename="", mode="rb", fileobj=sys.stdin)
+                g = sys.stdout
+            else:
+                if arg[-3:] != ".gz":
+                    print "filename doesn't end in .gz:", repr(arg)
+                    continue
+                f = open(arg, "rb")
+                g = __builtin__.open(arg[:-3], "wb")
+        else:
+            if arg == "-":
+                f = sys.stdin
+                g = GzipFile(filename="", mode="wb", fileobj=sys.stdout)
+            else:
+                f = __builtin__.open(arg, "rb")
+                g = open(arg + ".gz", "wb")
+        while True:
+            chunk = f.read(1024)
+            if not chunk:
+                break
+            g.write(chunk)
+        if g is not sys.stdout:
+            g.close()
+        if f is not sys.stdin:
+            f.close()
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/heapq.py b/depot_tools/release/win/python_24/Lib/heapq.py
new file mode 100644
index 0000000..b4ebb91
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/heapq.py
@@ -0,0 +1,319 @@
+# -*- coding: Latin-1 -*-
+
+"""Heap queue algorithm (a.k.a. priority queue).
+
+Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
+all k, counting elements from 0.  For the sake of comparison,
+non-existing elements are considered to be infinite.  The interesting
+property of a heap is that a[0] is always its smallest element.
+
+Usage:
+
+heap = []            # creates an empty heap
+heappush(heap, item) # pushes a new item on the heap
+item = heappop(heap) # pops the smallest item from the heap
+item = heap[0]       # smallest item on the heap without popping it
+heapify(x)           # transforms list into a heap, in-place, in linear time
+item = heapreplace(heap, item) # pops and returns smallest item, and adds
+                               # new item; the heap size is unchanged
+
+Our API differs from textbook heap algorithms as follows:
+
+- We use 0-based indexing.  This makes the relationship between the
+  index for a node and the indexes for its children slightly less
+  obvious, but is more suitable since Python uses 0-based indexing.
+
+- Our heappop() method returns the smallest item, not the largest.
+
+These two make it possible to view the heap as a regular Python list
+without surprises: heap[0] is the smallest item, and heap.sort()
+maintains the heap invariant!
+"""
+
+# Original code by Kevin O'Connor, augmented by Tim Peters and Raymond Hettinger
+
+__about__ = """Heap queues
+
+[explanation by François Pinard]
+
+Heaps are arrays for which a[k] <= a[2*k+1] and a[k] <= a[2*k+2] for
+all k, counting elements from 0.  For the sake of comparison,
+non-existing elements are considered to be infinite.  The interesting
+property of a heap is that a[0] is always its smallest element.
+
+The strange invariant above is meant to be an efficient memory
+representation for a tournament.  The numbers below are `k', not a[k]:
+
+                                   0
+
+                  1                                 2
+
+          3               4                5               6
+
+      7       8       9       10      11      12      13      14
+
+    15 16   17 18   19 20   21 22   23 24   25 26   27 28   29 30
+
+
+In the tree above, each cell `k' is topping `2*k+1' and `2*k+2'.  In
+an usual binary tournament we see in sports, each cell is the winner
+over the two cells it tops, and we can trace the winner down the tree
+to see all opponents s/he had.  However, in many computer applications
+of such tournaments, we do not need to trace the history of a winner.
+To be more memory efficient, when a winner is promoted, we try to
+replace it by something else at a lower level, and the rule becomes
+that a cell and the two cells it tops contain three different items,
+but the top cell "wins" over the two topped cells.
+
+If this heap invariant is protected at all time, index 0 is clearly
+the overall winner.  The simplest algorithmic way to remove it and
+find the "next" winner is to move some loser (let's say cell 30 in the
+diagram above) into the 0 position, and then percolate this new 0 down
+the tree, exchanging values, until the invariant is re-established.
+This is clearly logarithmic on the total number of items in the tree.
+By iterating over all items, you get an O(n ln n) sort.
+
+A nice feature of this sort is that you can efficiently insert new
+items while the sort is going on, provided that the inserted items are
+not "better" than the last 0'th element you extracted.  This is
+especially useful in simulation contexts, where the tree holds all
+incoming events, and the "win" condition means the smallest scheduled
+time.  When an event schedule other events for execution, they are
+scheduled into the future, so they can easily go into the heap.  So, a
+heap is a good structure for implementing schedulers (this is what I
+used for my MIDI sequencer :-).
+
+Various structures for implementing schedulers have been extensively
+studied, and heaps are good for this, as they are reasonably speedy,
+the speed is almost constant, and the worst case is not much different
+than the average case.  However, there are other representations which
+are more efficient overall, yet the worst cases might be terrible.
+
+Heaps are also very useful in big disk sorts.  You most probably all
+know that a big sort implies producing "runs" (which are pre-sorted
+sequences, which size is usually related to the amount of CPU memory),
+followed by a merging passes for these runs, which merging is often
+very cleverly organised[1].  It is very important that the initial
+sort produces the longest runs possible.  Tournaments are a good way
+to that.  If, using all the memory available to hold a tournament, you
+replace and percolate items that happen to fit the current run, you'll
+produce runs which are twice the size of the memory for random input,
+and much better for input fuzzily ordered.
+
+Moreover, if you output the 0'th item on disk and get an input which
+may not fit in the current tournament (because the value "wins" over
+the last output value), it cannot fit in the heap, so the size of the
+heap decreases.  The freed memory could be cleverly reused immediately
+for progressively building a second heap, which grows at exactly the
+same rate the first heap is melting.  When the first heap completely
+vanishes, you switch heaps and start a new run.  Clever and quite
+effective!
+
+In a word, heaps are useful memory structures to know.  I use them in
+a few applications, and I think it is good to keep a `heap' module
+around. :-)
+
+--------------------
+[1] The disk balancing algorithms which are current, nowadays, are
+more annoying than clever, and this is a consequence of the seeking
+capabilities of the disks.  On devices which cannot seek, like big
+tape drives, the story was quite different, and one had to be very
+clever to ensure (far in advance) that each tape movement will be the
+most effective possible (that is, will best participate at
+"progressing" the merge).  Some tapes were even able to read
+backwards, and this was also used to avoid the rewinding time.
+Believe me, real good tape sorts were quite spectacular to watch!
+From all times, sorting has always been a Great Art! :-)
+"""
+
+__all__ = ['heappush', 'heappop', 'heapify', 'heapreplace', 'nlargest',
+           'nsmallest']
+
+from itertools import islice, repeat
+import bisect
+
+def heappush(heap, item):
+    """Push item onto heap, maintaining the heap invariant."""
+    heap.append(item)
+    _siftdown(heap, 0, len(heap)-1)
+
+def heappop(heap):
+    """Pop the smallest item off the heap, maintaining the heap invariant."""
+    lastelt = heap.pop()    # raises appropriate IndexError if heap is empty
+    if heap:
+        returnitem = heap[0]
+        heap[0] = lastelt
+        _siftup(heap, 0)
+    else:
+        returnitem = lastelt
+    return returnitem
+
+def heapreplace(heap, item):
+    """Pop and return the current smallest value, and add the new item.
+
+    This is more efficient than heappop() followed by heappush(), and can be
+    more appropriate when using a fixed-size heap.  Note that the value
+    returned may be larger than item!  That constrains reasonable uses of
+    this routine unless written as part of a conditional replacement:
+
+        if item > heap[0]:
+            item = heapreplace(heap, item)
+    """
+    returnitem = heap[0]    # raises appropriate IndexError if heap is empty
+    heap[0] = item
+    _siftup(heap, 0)
+    return returnitem
+
+def heapify(x):
+    """Transform list into a heap, in-place, in O(len(heap)) time."""
+    n = len(x)
+    # Transform bottom-up.  The largest index there's any point to looking at
+    # is the largest with a child index in-range, so must have 2*i + 1 < n,
+    # or i < (n-1)/2.  If n is even = 2*j, this is (2*j-1)/2 = j-1/2 so
+    # j-1 is the largest, which is n//2 - 1.  If n is odd = 2*j+1, this is
+    # (2*j+1-1)/2 = j so j-1 is the largest, and that's again n//2-1.
+    for i in reversed(xrange(n//2)):
+        _siftup(x, i)
+
+def nlargest(n, iterable):
+    """Find the n largest elements in a dataset.
+
+    Equivalent to:  sorted(iterable, reverse=True)[:n]
+    """
+    it = iter(iterable)
+    result = list(islice(it, n))
+    if not result:
+        return result
+    heapify(result)
+    _heapreplace = heapreplace
+    sol = result[0]         # sol --> smallest of the nlargest
+    for elem in it:
+        if elem <= sol:
+            continue
+        _heapreplace(result, elem)
+        sol = result[0]
+    result.sort(reverse=True)
+    return result
+
+def nsmallest(n, iterable):
+    """Find the n smallest elements in a dataset.
+
+    Equivalent to:  sorted(iterable)[:n]
+    """
+    if hasattr(iterable, '__len__') and n * 10 <= len(iterable):
+        # For smaller values of n, the bisect method is faster than a minheap.
+        # It is also memory efficient, consuming only n elements of space.
+        it = iter(iterable)
+        result = sorted(islice(it, 0, n))
+        if not result:
+            return result
+        insort = bisect.insort
+        pop = result.pop
+        los = result[-1]    # los --> Largest of the nsmallest
+        for elem in it:
+            if los <= elem:
+                continue
+            insort(result, elem)
+            pop()
+            los = result[-1]
+        return result
+    # An alternative approach manifests the whole iterable in memory but
+    # saves comparisons by heapifying all at once.  Also, saves time
+    # over bisect.insort() which has O(n) data movement time for every
+    # insertion.  Finding the n smallest of an m length iterable requires
+    #    O(m) + O(n log m) comparisons.
+    h = list(iterable)
+    heapify(h)
+    return map(heappop, repeat(h, min(n, len(h))))
+
+# 'heap' is a heap at all indices >= startpos, except possibly for pos.  pos
+# is the index of a leaf with a possibly out-of-order value.  Restore the
+# heap invariant.
+def _siftdown(heap, startpos, pos):
+    newitem = heap[pos]
+    # Follow the path to the root, moving parents down until finding a place
+    # newitem fits.
+    while pos > startpos:
+        parentpos = (pos - 1) >> 1
+        parent = heap[parentpos]
+        if parent <= newitem:
+            break
+        heap[pos] = parent
+        pos = parentpos
+    heap[pos] = newitem
+
+# The child indices of heap index pos are already heaps, and we want to make
+# a heap at index pos too.  We do this by bubbling the smaller child of
+# pos up (and so on with that child's children, etc) until hitting a leaf,
+# then using _siftdown to move the oddball originally at index pos into place.
+#
+# We *could* break out of the loop as soon as we find a pos where newitem <=
+# both its children, but turns out that's not a good idea, and despite that
+# many books write the algorithm that way.  During a heap pop, the last array
+# element is sifted in, and that tends to be large, so that comparing it
+# against values starting from the root usually doesn't pay (= usually doesn't
+# get us out of the loop early).  See Knuth, Volume 3, where this is
+# explained and quantified in an exercise.
+#
+# Cutting the # of comparisons is important, since these routines have no
+# way to extract "the priority" from an array element, so that intelligence
+# is likely to be hiding in custom __cmp__ methods, or in array elements
+# storing (priority, record) tuples.  Comparisons are thus potentially
+# expensive.
+#
+# On random arrays of length 1000, making this change cut the number of
+# comparisons made by heapify() a little, and those made by exhaustive
+# heappop() a lot, in accord with theory.  Here are typical results from 3
+# runs (3 just to demonstrate how small the variance is):
+#
+# Compares needed by heapify     Compares needed by 1000 heappops
+# --------------------------     --------------------------------
+# 1837 cut to 1663               14996 cut to 8680
+# 1855 cut to 1659               14966 cut to 8678
+# 1847 cut to 1660               15024 cut to 8703
+#
+# Building the heap by using heappush() 1000 times instead required
+# 2198, 2148, and 2219 compares:  heapify() is more efficient, when
+# you can use it.
+#
+# The total compares needed by list.sort() on the same lists were 8627,
+# 8627, and 8632 (this should be compared to the sum of heapify() and
+# heappop() compares):  list.sort() is (unsurprisingly!) more efficient
+# for sorting.
+
+def _siftup(heap, pos):
+    endpos = len(heap)
+    startpos = pos
+    newitem = heap[pos]
+    # Bubble up the smaller child until hitting a leaf.
+    childpos = 2*pos + 1    # leftmost child position
+    while childpos < endpos:
+        # Set childpos to index of smaller child.
+        rightpos = childpos + 1
+        if rightpos < endpos and heap[rightpos] <= heap[childpos]:
+            childpos = rightpos
+        # Move the smaller child up.
+        heap[pos] = heap[childpos]
+        pos = childpos
+        childpos = 2*pos + 1
+    # The leaf at pos is empty now.  Put newitem there, and bubble it up
+    # to its final resting place (by sifting its parents down).
+    heap[pos] = newitem
+    _siftdown(heap, startpos, pos)
+
+# If available, use C implementation
+try:
+    from _heapq import heappush, heappop, heapify, heapreplace, nlargest, nsmallest
+except ImportError:
+    pass
+
+if __name__ == "__main__":
+    # Simple sanity test
+    heap = []
+    data = [1, 3, 5, 7, 9, 2, 4, 6, 8, 0]
+    for item in data:
+        heappush(heap, item)
+    sort = []
+    while heap:
+        sort.append(heappop(heap))
+    print sort
diff --git a/depot_tools/release/win/python_24/Lib/hmac.py b/depot_tools/release/win/python_24/Lib/hmac.py
new file mode 100644
index 0000000..11b0fb3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/hmac.py
@@ -0,0 +1,107 @@
+"""HMAC (Keyed-Hashing for Message Authentication) Python module.
+
+Implements the HMAC algorithm as described by RFC 2104.
+"""
+
+def _strxor(s1, s2):
+    """Utility method. XOR the two strings s1 and s2 (must have same length).
+    """
+    return "".join(map(lambda x, y: chr(ord(x) ^ ord(y)), s1, s2))
+
+# The size of the digests returned by HMAC depends on the underlying
+# hashing module used.
+digest_size = None
+
+# A unique object passed by HMAC.copy() to the HMAC constructor, in order
+# that the latter return very quickly.  HMAC("") in contrast is quite
+# expensive.
+_secret_backdoor_key = []
+
+class HMAC:
+    """RFC2104 HMAC class.
+
+    This supports the API for Cryptographic Hash Functions (PEP 247).
+    """
+
+    def __init__(self, key, msg = None, digestmod = None):
+        """Create a new HMAC object.
+
+        key:       key for the keyed hash object.
+        msg:       Initial input for the hash, if provided.
+        digestmod: A module supporting PEP 247. Defaults to the md5 module.
+        """
+
+        if key is _secret_backdoor_key: # cheap
+            return
+
+        if digestmod is None:
+            import md5
+            digestmod = md5
+
+        self.digestmod = digestmod
+        self.outer = digestmod.new()
+        self.inner = digestmod.new()
+        self.digest_size = digestmod.digest_size
+
+        blocksize = 64
+        ipad = "\x36" * blocksize
+        opad = "\x5C" * blocksize
+
+        if len(key) > blocksize:
+            key = digestmod.new(key).digest()
+
+        key = key + chr(0) * (blocksize - len(key))
+        self.outer.update(_strxor(key, opad))
+        self.inner.update(_strxor(key, ipad))
+        if msg is not None:
+            self.update(msg)
+
+##    def clear(self):
+##        raise NotImplementedError, "clear() method not available in HMAC."
+
+    def update(self, msg):
+        """Update this hashing object with the string msg.
+        """
+        self.inner.update(msg)
+
+    def copy(self):
+        """Return a separate copy of this hashing object.
+
+        An update to this copy won't affect the original object.
+        """
+        other = HMAC(_secret_backdoor_key)
+        other.digestmod = self.digestmod
+        other.digest_size = self.digest_size
+        other.inner = self.inner.copy()
+        other.outer = self.outer.copy()
+        return other
+
+    def digest(self):
+        """Return the hash value of this hashing object.
+
+        This returns a string containing 8-bit data.  The object is
+        not altered in any way by this function; you can continue
+        updating the object after calling this function.
+        """
+        h = self.outer.copy()
+        h.update(self.inner.digest())
+        return h.digest()
+
+    def hexdigest(self):
+        """Like digest(), but returns a string of hexadecimal digits instead.
+        """
+        return "".join([hex(ord(x))[2:].zfill(2)
+                        for x in tuple(self.digest())])
+
+def new(key, msg = None, digestmod = None):
+    """Create a new hashing object and return it.
+
+    key: The starting key for the hash.
+    msg: if available, will immediately be hashed into the object's starting
+    state.
+
+    You can now feed arbitrary strings into the object using its update()
+    method, and can ask for the hash value at any time by calling its digest()
+    method.
+    """
+    return HMAC(key, msg, digestmod)
diff --git a/depot_tools/release/win/python_24/Lib/hotshot/__init__.py b/depot_tools/release/win/python_24/Lib/hotshot/__init__.py
new file mode 100644
index 0000000..b9f7866e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/hotshot/__init__.py
@@ -0,0 +1,76 @@
+"""High-perfomance logging profiler, mostly written in C."""
+
+import _hotshot
+
+from _hotshot import ProfilerError
+
+
+class Profile:
+    def __init__(self, logfn, lineevents=0, linetimings=1):
+        self.lineevents = lineevents and 1 or 0
+        self.linetimings = (linetimings and lineevents) and 1 or 0
+        self._prof = p = _hotshot.profiler(
+            logfn, self.lineevents, self.linetimings)
+
+        # Attempt to avoid confusing results caused by the presence of
+        # Python wrappers around these functions, but only if we can
+        # be sure the methods have not been overridden or extended.
+        if self.__class__ is Profile:
+            self.close = p.close
+            self.start = p.start
+            self.stop = p.stop
+            self.addinfo = p.addinfo
+
+    def close(self):
+        """Close the logfile and terminate the profiler."""
+        self._prof.close()
+
+    def fileno(self):
+        """Return the file descriptor of the profiler's log file."""
+        return self._prof.fileno()
+
+    def start(self):
+        """Start the profiler."""
+        self._prof.start()
+
+    def stop(self):
+        """Stop the profiler."""
+        self._prof.stop()
+
+    def addinfo(self, key, value):
+        """Add an arbitrary labelled value to the profile log."""
+        self._prof.addinfo(key, value)
+
+    # These methods offer the same interface as the profile.Profile class,
+    # but delegate most of the work to the C implementation underneath.
+
+    def run(self, cmd):
+        """Profile an exec-compatible string in the script
+        environment.
+
+        The globals from the __main__ module are used as both the
+        globals and locals for the script.
+        """
+        import __main__
+        dict = __main__.__dict__
+        return self.runctx(cmd, dict, dict)
+
+    def runctx(self, cmd, globals, locals):
+        """Evaluate an exec-compatible string in a specific
+        environment.
+
+        The string is compiled before profiling begins.
+        """
+        code = compile(cmd, "<string>", "exec")
+        self._prof.runcode(code, globals, locals)
+        return self
+
+    def runcall(self, func, *args, **kw):
+        """Profile a single call of a callable.
+
+        Additional positional and keyword arguments may be passed
+        along; the result of the call is returned, and exceptions are
+        allowed to propogate cleanly, while ensuring that profiling is
+        disabled on the way out.
+        """
+        return self._prof.runcall(func, args, kw)
diff --git a/depot_tools/release/win/python_24/Lib/hotshot/log.py b/depot_tools/release/win/python_24/Lib/hotshot/log.py
new file mode 100644
index 0000000..7d6d91d4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/hotshot/log.py
@@ -0,0 +1,192 @@
+import _hotshot
+import os.path
+import parser
+import symbol
+import sys
+
+from _hotshot import \
+     WHAT_ENTER, \
+     WHAT_EXIT, \
+     WHAT_LINENO, \
+     WHAT_DEFINE_FILE, \
+     WHAT_DEFINE_FUNC, \
+     WHAT_ADD_INFO
+
+
+__all__ = ["LogReader", "ENTER", "EXIT", "LINE"]
+
+
+ENTER = WHAT_ENTER
+EXIT  = WHAT_EXIT
+LINE  = WHAT_LINENO
+
+
+class LogReader:
+    def __init__(self, logfn):
+        # fileno -> filename
+        self._filemap = {}
+        # (fileno, lineno) -> filename, funcname
+        self._funcmap = {}
+
+        self._reader = _hotshot.logreader(logfn)
+        self._nextitem = self._reader.next
+        self._info = self._reader.info
+        if self._info.has_key('current-directory'):
+            self.cwd = self._info['current-directory']
+        else:
+            self.cwd = None
+
+        # This mirrors the call stack of the profiled code as the log
+        # is read back in.  It contains tuples of the form:
+        #
+        #   (file name, line number of function def, function name)
+        #
+        self._stack = []
+        self._append = self._stack.append
+        self._pop = self._stack.pop
+
+    def close(self):
+        self._reader.close()
+
+    def fileno(self):
+        """Return the file descriptor of the log reader's log file."""
+        return self._reader.fileno()
+
+    def addinfo(self, key, value):
+        """This method is called for each additional ADD_INFO record.
+
+        This can be overridden by applications that want to receive
+        these events.  The default implementation does not need to be
+        called by alternate implementations.
+
+        The initial set of ADD_INFO records do not pass through this
+        mechanism; this is only needed to receive notification when
+        new values are added.  Subclasses can inspect self._info after
+        calling LogReader.__init__().
+        """
+        pass
+
+    def get_filename(self, fileno):
+        try:
+            return self._filemap[fileno]
+        except KeyError:
+            raise ValueError, "unknown fileno"
+
+    def get_filenames(self):
+        return self._filemap.values()
+
+    def get_fileno(self, filename):
+        filename = os.path.normcase(os.path.normpath(filename))
+        for fileno, name in self._filemap.items():
+            if name == filename:
+                return fileno
+        raise ValueError, "unknown filename"
+
+    def get_funcname(self, fileno, lineno):
+        try:
+            return self._funcmap[(fileno, lineno)]
+        except KeyError:
+            raise ValueError, "unknown function location"
+
+    # Iteration support:
+    # This adds an optional (& ignored) parameter to next() so that the
+    # same bound method can be used as the __getitem__() method -- this
+    # avoids using an additional method call which kills the performance.
+
+    def next(self, index=0):
+        while 1:
+            # This call may raise StopIteration:
+            what, tdelta, fileno, lineno = self._nextitem()
+
+            # handle the most common cases first
+
+            if what == WHAT_ENTER:
+                filename, funcname = self._decode_location(fileno, lineno)
+                t = (filename, lineno, funcname)
+                self._append(t)
+                return what, t, tdelta
+
+            if what == WHAT_EXIT:
+                return what, self._pop(), tdelta
+
+            if what == WHAT_LINENO:
+                filename, firstlineno, funcname = self._stack[-1]
+                return what, (filename, lineno, funcname), tdelta
+
+            if what == WHAT_DEFINE_FILE:
+                filename = os.path.normcase(os.path.normpath(tdelta))
+                self._filemap[fileno] = filename
+            elif what == WHAT_DEFINE_FUNC:
+                filename = self._filemap[fileno]
+                self._funcmap[(fileno, lineno)] = (filename, tdelta)
+            elif what == WHAT_ADD_INFO:
+                # value already loaded into self.info; call the
+                # overridable addinfo() handler so higher-level code
+                # can pick up the new value
+                if tdelta == 'current-directory':
+                    self.cwd = lineno
+                self.addinfo(tdelta, lineno)
+            else:
+                raise ValueError, "unknown event type"
+
+    def __iter__(self):
+        return self
+
+    #
+    #  helpers
+    #
+
+    def _decode_location(self, fileno, lineno):
+        try:
+            return self._funcmap[(fileno, lineno)]
+        except KeyError:
+            #
+            # This should only be needed when the log file does not
+            # contain all the DEFINE_FUNC records needed to allow the
+            # function name to be retrieved from the log file.
+            #
+            if self._loadfile(fileno):
+                filename = funcname = None
+            try:
+                filename, funcname = self._funcmap[(fileno, lineno)]
+            except KeyError:
+                filename = self._filemap.get(fileno)
+                funcname = None
+                self._funcmap[(fileno, lineno)] = (filename, funcname)
+        return filename, funcname
+
+    def _loadfile(self, fileno):
+        try:
+            filename = self._filemap[fileno]
+        except KeyError:
+            print "Could not identify fileId", fileno
+            return 1
+        if filename is None:
+            return 1
+        absname = os.path.normcase(os.path.join(self.cwd, filename))
+
+        try:
+            fp = open(absname)
+        except IOError:
+            return
+        st = parser.suite(fp.read())
+        fp.close()
+
+        # Scan the tree looking for def and lambda nodes, filling in
+        # self._funcmap with all the available information.
+        funcdef = symbol.funcdef
+        lambdef = symbol.lambdef
+
+        stack = [st.totuple(1)]
+
+        while stack:
+            tree = stack.pop()
+            try:
+                sym = tree[0]
+            except (IndexError, TypeError):
+                continue
+            if sym == funcdef:
+                self._funcmap[(fileno, tree[2][2])] = filename, tree[2][1]
+            elif sym == lambdef:
+                self._funcmap[(fileno, tree[1][2])] = filename, "<lambda>"
+            stack.extend(list(tree[1:]))
diff --git a/depot_tools/release/win/python_24/Lib/hotshot/stats.py b/depot_tools/release/win/python_24/Lib/hotshot/stats.py
new file mode 100644
index 0000000..7ff2277
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/hotshot/stats.py
@@ -0,0 +1,93 @@
+"""Statistics analyzer for HotShot."""
+
+import profile
+import pstats
+
+import hotshot.log
+
+from hotshot.log import ENTER, EXIT
+
+
+def load(filename):
+    return StatsLoader(filename).load()
+
+
+class StatsLoader:
+    def __init__(self, logfn):
+        self._logfn = logfn
+        self._code = {}
+        self._stack = []
+        self.pop_frame = self._stack.pop
+
+    def load(self):
+        # The timer selected by the profiler should never be used, so make
+        # sure it doesn't work:
+        p = Profile()
+        p.get_time = _brokentimer
+        log = hotshot.log.LogReader(self._logfn)
+        taccum = 0
+        for event in log:
+            what, (filename, lineno, funcname), tdelta = event
+            if tdelta > 0:
+                taccum += tdelta
+
+            # We multiply taccum to convert from the microseconds we
+            # have to the seconds that the profile/pstats module work
+            # with; this allows the numbers to have some basis in
+            # reality (ignoring calibration issues for now).
+
+            if what == ENTER:
+                frame = self.new_frame(filename, lineno, funcname)
+                p.trace_dispatch_call(frame, taccum * .000001)
+                taccum = 0
+
+            elif what == EXIT:
+                frame = self.pop_frame()
+                p.trace_dispatch_return(frame, taccum * .000001)
+                taccum = 0
+
+            # no further work for line events
+
+        assert not self._stack
+        return pstats.Stats(p)
+
+    def new_frame(self, *args):
+        # args must be filename, firstlineno, funcname
+        # our code objects are cached since we don't need to create
+        # new ones every time
+        try:
+            code = self._code[args]
+        except KeyError:
+            code = FakeCode(*args)
+            self._code[args] = code
+        # frame objects are create fresh, since the back pointer will
+        # vary considerably
+        if self._stack:
+            back = self._stack[-1]
+        else:
+            back = None
+        frame = FakeFrame(code, back)
+        self._stack.append(frame)
+        return frame
+
+
+class Profile(profile.Profile):
+    def simulate_cmd_complete(self):
+        pass
+
+
+class FakeCode:
+    def __init__(self, filename, firstlineno, funcname):
+        self.co_filename = filename
+        self.co_firstlineno = firstlineno
+        self.co_name = self.__name__ = funcname
+
+
+class FakeFrame:
+    def __init__(self, code, back):
+        self.f_back = back
+        self.f_code = code
+
+
+def _brokentimer():
+    raise RuntimeError, "this timer should not be called"
diff --git a/depot_tools/release/win/python_24/Lib/hotshot/stones.py b/depot_tools/release/win/python_24/Lib/hotshot/stones.py
new file mode 100644
index 0000000..cd4c51d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/hotshot/stones.py
@@ -0,0 +1,31 @@
+import errno
+import hotshot
+import hotshot.stats
+import os
+import sys
+import test.pystone
+
+def main(logfile):
+    p = hotshot.Profile(logfile)
+    benchtime, stones = p.runcall(test.pystone.pystones)
+    p.close()
+
+    print "Pystone(%s) time for %d passes = %g" % \
+          (test.pystone.__version__, test.pystone.LOOPS, benchtime)
+    print "This machine benchmarks at %g pystones/second" % stones
+
+    stats = hotshot.stats.load(logfile)
+    stats.strip_dirs()
+    stats.sort_stats('time', 'calls')
+    try:
+        stats.print_stats(20)
+    except IOError, e:
+        if e.errno != errno.EPIPE:
+            raise
+
+if __name__ == '__main__':
+    if sys.argv[1:]:
+        main(sys.argv[1])
+    else:
+        import tempfile
+        main(tempfile.NamedTemporaryFile().name)
diff --git a/depot_tools/release/win/python_24/Lib/htmlentitydefs.py b/depot_tools/release/win/python_24/Lib/htmlentitydefs.py
new file mode 100644
index 0000000..3dd14a79
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/htmlentitydefs.py
@@ -0,0 +1,273 @@
+"""HTML character entity references."""
+
+# maps the HTML entity name to the Unicode codepoint
+name2codepoint = {
+    'AElig':    0x00c6, # latin capital letter AE = latin capital ligature AE, U+00C6 ISOlat1
+    'Aacute':   0x00c1, # latin capital letter A with acute, U+00C1 ISOlat1
+    'Acirc':    0x00c2, # latin capital letter A with circumflex, U+00C2 ISOlat1
+    'Agrave':   0x00c0, # latin capital letter A with grave = latin capital letter A grave, U+00C0 ISOlat1
+    'Alpha':    0x0391, # greek capital letter alpha, U+0391
+    'Aring':    0x00c5, # latin capital letter A with ring above = latin capital letter A ring, U+00C5 ISOlat1
+    'Atilde':   0x00c3, # latin capital letter A with tilde, U+00C3 ISOlat1
+    'Auml':     0x00c4, # latin capital letter A with diaeresis, U+00C4 ISOlat1
+    'Beta':     0x0392, # greek capital letter beta, U+0392
+    'Ccedil':   0x00c7, # latin capital letter C with cedilla, U+00C7 ISOlat1
+    'Chi':      0x03a7, # greek capital letter chi, U+03A7
+    'Dagger':   0x2021, # double dagger, U+2021 ISOpub
+    'Delta':    0x0394, # greek capital letter delta, U+0394 ISOgrk3
+    'ETH':      0x00d0, # latin capital letter ETH, U+00D0 ISOlat1
+    'Eacute':   0x00c9, # latin capital letter E with acute, U+00C9 ISOlat1
+    'Ecirc':    0x00ca, # latin capital letter E with circumflex, U+00CA ISOlat1
+    'Egrave':   0x00c8, # latin capital letter E with grave, U+00C8 ISOlat1
+    'Epsilon':  0x0395, # greek capital letter epsilon, U+0395
+    'Eta':      0x0397, # greek capital letter eta, U+0397
+    'Euml':     0x00cb, # latin capital letter E with diaeresis, U+00CB ISOlat1
+    'Gamma':    0x0393, # greek capital letter gamma, U+0393 ISOgrk3
+    'Iacute':   0x00cd, # latin capital letter I with acute, U+00CD ISOlat1
+    'Icirc':    0x00ce, # latin capital letter I with circumflex, U+00CE ISOlat1
+    'Igrave':   0x00cc, # latin capital letter I with grave, U+00CC ISOlat1
+    'Iota':     0x0399, # greek capital letter iota, U+0399
+    'Iuml':     0x00cf, # latin capital letter I with diaeresis, U+00CF ISOlat1
+    'Kappa':    0x039a, # greek capital letter kappa, U+039A
+    'Lambda':   0x039b, # greek capital letter lambda, U+039B ISOgrk3
+    'Mu':       0x039c, # greek capital letter mu, U+039C
+    'Ntilde':   0x00d1, # latin capital letter N with tilde, U+00D1 ISOlat1
+    'Nu':       0x039d, # greek capital letter nu, U+039D
+    'OElig':    0x0152, # latin capital ligature OE, U+0152 ISOlat2
+    'Oacute':   0x00d3, # latin capital letter O with acute, U+00D3 ISOlat1
+    'Ocirc':    0x00d4, # latin capital letter O with circumflex, U+00D4 ISOlat1
+    'Ograve':   0x00d2, # latin capital letter O with grave, U+00D2 ISOlat1
+    'Omega':    0x03a9, # greek capital letter omega, U+03A9 ISOgrk3
+    'Omicron':  0x039f, # greek capital letter omicron, U+039F
+    'Oslash':   0x00d8, # latin capital letter O with stroke = latin capital letter O slash, U+00D8 ISOlat1
+    'Otilde':   0x00d5, # latin capital letter O with tilde, U+00D5 ISOlat1
+    'Ouml':     0x00d6, # latin capital letter O with diaeresis, U+00D6 ISOlat1
+    'Phi':      0x03a6, # greek capital letter phi, U+03A6 ISOgrk3
+    'Pi':       0x03a0, # greek capital letter pi, U+03A0 ISOgrk3
+    'Prime':    0x2033, # double prime = seconds = inches, U+2033 ISOtech
+    'Psi':      0x03a8, # greek capital letter psi, U+03A8 ISOgrk3
+    'Rho':      0x03a1, # greek capital letter rho, U+03A1
+    'Scaron':   0x0160, # latin capital letter S with caron, U+0160 ISOlat2
+    'Sigma':    0x03a3, # greek capital letter sigma, U+03A3 ISOgrk3
+    'THORN':    0x00de, # latin capital letter THORN, U+00DE ISOlat1
+    'Tau':      0x03a4, # greek capital letter tau, U+03A4
+    'Theta':    0x0398, # greek capital letter theta, U+0398 ISOgrk3
+    'Uacute':   0x00da, # latin capital letter U with acute, U+00DA ISOlat1
+    'Ucirc':    0x00db, # latin capital letter U with circumflex, U+00DB ISOlat1
+    'Ugrave':   0x00d9, # latin capital letter U with grave, U+00D9 ISOlat1
+    'Upsilon':  0x03a5, # greek capital letter upsilon, U+03A5 ISOgrk3
+    'Uuml':     0x00dc, # latin capital letter U with diaeresis, U+00DC ISOlat1
+    'Xi':       0x039e, # greek capital letter xi, U+039E ISOgrk3
+    'Yacute':   0x00dd, # latin capital letter Y with acute, U+00DD ISOlat1
+    'Yuml':     0x0178, # latin capital letter Y with diaeresis, U+0178 ISOlat2
+    'Zeta':     0x0396, # greek capital letter zeta, U+0396
+    'aacute':   0x00e1, # latin small letter a with acute, U+00E1 ISOlat1
+    'acirc':    0x00e2, # latin small letter a with circumflex, U+00E2 ISOlat1
+    'acute':    0x00b4, # acute accent = spacing acute, U+00B4 ISOdia
+    'aelig':    0x00e6, # latin small letter ae = latin small ligature ae, U+00E6 ISOlat1
+    'agrave':   0x00e0, # latin small letter a with grave = latin small letter a grave, U+00E0 ISOlat1
+    'alefsym':  0x2135, # alef symbol = first transfinite cardinal, U+2135 NEW
+    'alpha':    0x03b1, # greek small letter alpha, U+03B1 ISOgrk3
+    'amp':      0x0026, # ampersand, U+0026 ISOnum
+    'and':      0x2227, # logical and = wedge, U+2227 ISOtech
+    'ang':      0x2220, # angle, U+2220 ISOamso
+    'aring':    0x00e5, # latin small letter a with ring above = latin small letter a ring, U+00E5 ISOlat1
+    'asymp':    0x2248, # almost equal to = asymptotic to, U+2248 ISOamsr
+    'atilde':   0x00e3, # latin small letter a with tilde, U+00E3 ISOlat1
+    'auml':     0x00e4, # latin small letter a with diaeresis, U+00E4 ISOlat1
+    'bdquo':    0x201e, # double low-9 quotation mark, U+201E NEW
+    'beta':     0x03b2, # greek small letter beta, U+03B2 ISOgrk3
+    'brvbar':   0x00a6, # broken bar = broken vertical bar, U+00A6 ISOnum
+    'bull':     0x2022, # bullet = black small circle, U+2022 ISOpub
+    'cap':      0x2229, # intersection = cap, U+2229 ISOtech
+    'ccedil':   0x00e7, # latin small letter c with cedilla, U+00E7 ISOlat1
+    'cedil':    0x00b8, # cedilla = spacing cedilla, U+00B8 ISOdia
+    'cent':     0x00a2, # cent sign, U+00A2 ISOnum
+    'chi':      0x03c7, # greek small letter chi, U+03C7 ISOgrk3
+    'circ':     0x02c6, # modifier letter circumflex accent, U+02C6 ISOpub
+    'clubs':    0x2663, # black club suit = shamrock, U+2663 ISOpub
+    'cong':     0x2245, # approximately equal to, U+2245 ISOtech
+    'copy':     0x00a9, # copyright sign, U+00A9 ISOnum
+    'crarr':    0x21b5, # downwards arrow with corner leftwards = carriage return, U+21B5 NEW
+    'cup':      0x222a, # union = cup, U+222A ISOtech
+    'curren':   0x00a4, # currency sign, U+00A4 ISOnum
+    'dArr':     0x21d3, # downwards double arrow, U+21D3 ISOamsa
+    'dagger':   0x2020, # dagger, U+2020 ISOpub
+    'darr':     0x2193, # downwards arrow, U+2193 ISOnum
+    'deg':      0x00b0, # degree sign, U+00B0 ISOnum
+    'delta':    0x03b4, # greek small letter delta, U+03B4 ISOgrk3
+    'diams':    0x2666, # black diamond suit, U+2666 ISOpub
+    'divide':   0x00f7, # division sign, U+00F7 ISOnum
+    'eacute':   0x00e9, # latin small letter e with acute, U+00E9 ISOlat1
+    'ecirc':    0x00ea, # latin small letter e with circumflex, U+00EA ISOlat1
+    'egrave':   0x00e8, # latin small letter e with grave, U+00E8 ISOlat1
+    'empty':    0x2205, # empty set = null set = diameter, U+2205 ISOamso
+    'emsp':     0x2003, # em space, U+2003 ISOpub
+    'ensp':     0x2002, # en space, U+2002 ISOpub
+    'epsilon':  0x03b5, # greek small letter epsilon, U+03B5 ISOgrk3
+    'equiv':    0x2261, # identical to, U+2261 ISOtech
+    'eta':      0x03b7, # greek small letter eta, U+03B7 ISOgrk3
+    'eth':      0x00f0, # latin small letter eth, U+00F0 ISOlat1
+    'euml':     0x00eb, # latin small letter e with diaeresis, U+00EB ISOlat1
+    'euro':     0x20ac, # euro sign, U+20AC NEW
+    'exist':    0x2203, # there exists, U+2203 ISOtech
+    'fnof':     0x0192, # latin small f with hook = function = florin, U+0192 ISOtech
+    'forall':   0x2200, # for all, U+2200 ISOtech
+    'frac12':   0x00bd, # vulgar fraction one half = fraction one half, U+00BD ISOnum
+    'frac14':   0x00bc, # vulgar fraction one quarter = fraction one quarter, U+00BC ISOnum
+    'frac34':   0x00be, # vulgar fraction three quarters = fraction three quarters, U+00BE ISOnum
+    'frasl':    0x2044, # fraction slash, U+2044 NEW
+    'gamma':    0x03b3, # greek small letter gamma, U+03B3 ISOgrk3
+    'ge':       0x2265, # greater-than or equal to, U+2265 ISOtech
+    'gt':       0x003e, # greater-than sign, U+003E ISOnum
+    'hArr':     0x21d4, # left right double arrow, U+21D4 ISOamsa
+    'harr':     0x2194, # left right arrow, U+2194 ISOamsa
+    'hearts':   0x2665, # black heart suit = valentine, U+2665 ISOpub
+    'hellip':   0x2026, # horizontal ellipsis = three dot leader, U+2026 ISOpub
+    'iacute':   0x00ed, # latin small letter i with acute, U+00ED ISOlat1
+    'icirc':    0x00ee, # latin small letter i with circumflex, U+00EE ISOlat1
+    'iexcl':    0x00a1, # inverted exclamation mark, U+00A1 ISOnum
+    'igrave':   0x00ec, # latin small letter i with grave, U+00EC ISOlat1
+    'image':    0x2111, # blackletter capital I = imaginary part, U+2111 ISOamso
+    'infin':    0x221e, # infinity, U+221E ISOtech
+    'int':      0x222b, # integral, U+222B ISOtech
+    'iota':     0x03b9, # greek small letter iota, U+03B9 ISOgrk3
+    'iquest':   0x00bf, # inverted question mark = turned question mark, U+00BF ISOnum
+    'isin':     0x2208, # element of, U+2208 ISOtech
+    'iuml':     0x00ef, # latin small letter i with diaeresis, U+00EF ISOlat1
+    'kappa':    0x03ba, # greek small letter kappa, U+03BA ISOgrk3
+    'lArr':     0x21d0, # leftwards double arrow, U+21D0 ISOtech
+    'lambda':   0x03bb, # greek small letter lambda, U+03BB ISOgrk3
+    'lang':     0x2329, # left-pointing angle bracket = bra, U+2329 ISOtech
+    'laquo':    0x00ab, # left-pointing double angle quotation mark = left pointing guillemet, U+00AB ISOnum
+    'larr':     0x2190, # leftwards arrow, U+2190 ISOnum
+    'lceil':    0x2308, # left ceiling = apl upstile, U+2308 ISOamsc
+    'ldquo':    0x201c, # left double quotation mark, U+201C ISOnum
+    'le':       0x2264, # less-than or equal to, U+2264 ISOtech
+    'lfloor':   0x230a, # left floor = apl downstile, U+230A ISOamsc
+    'lowast':   0x2217, # asterisk operator, U+2217 ISOtech
+    'loz':      0x25ca, # lozenge, U+25CA ISOpub
+    'lrm':      0x200e, # left-to-right mark, U+200E NEW RFC 2070
+    'lsaquo':   0x2039, # single left-pointing angle quotation mark, U+2039 ISO proposed
+    'lsquo':    0x2018, # left single quotation mark, U+2018 ISOnum
+    'lt':       0x003c, # less-than sign, U+003C ISOnum
+    'macr':     0x00af, # macron = spacing macron = overline = APL overbar, U+00AF ISOdia
+    'mdash':    0x2014, # em dash, U+2014 ISOpub
+    'micro':    0x00b5, # micro sign, U+00B5 ISOnum
+    'middot':   0x00b7, # middle dot = Georgian comma = Greek middle dot, U+00B7 ISOnum
+    'minus':    0x2212, # minus sign, U+2212 ISOtech
+    'mu':       0x03bc, # greek small letter mu, U+03BC ISOgrk3
+    'nabla':    0x2207, # nabla = backward difference, U+2207 ISOtech
+    'nbsp':     0x00a0, # no-break space = non-breaking space, U+00A0 ISOnum
+    'ndash':    0x2013, # en dash, U+2013 ISOpub
+    'ne':       0x2260, # not equal to, U+2260 ISOtech
+    'ni':       0x220b, # contains as member, U+220B ISOtech
+    'not':      0x00ac, # not sign, U+00AC ISOnum
+    'notin':    0x2209, # not an element of, U+2209 ISOtech
+    'nsub':     0x2284, # not a subset of, U+2284 ISOamsn
+    'ntilde':   0x00f1, # latin small letter n with tilde, U+00F1 ISOlat1
+    'nu':       0x03bd, # greek small letter nu, U+03BD ISOgrk3
+    'oacute':   0x00f3, # latin small letter o with acute, U+00F3 ISOlat1
+    'ocirc':    0x00f4, # latin small letter o with circumflex, U+00F4 ISOlat1
+    'oelig':    0x0153, # latin small ligature oe, U+0153 ISOlat2
+    'ograve':   0x00f2, # latin small letter o with grave, U+00F2 ISOlat1
+    'oline':    0x203e, # overline = spacing overscore, U+203E NEW
+    'omega':    0x03c9, # greek small letter omega, U+03C9 ISOgrk3
+    'omicron':  0x03bf, # greek small letter omicron, U+03BF NEW
+    'oplus':    0x2295, # circled plus = direct sum, U+2295 ISOamsb
+    'or':       0x2228, # logical or = vee, U+2228 ISOtech
+    'ordf':     0x00aa, # feminine ordinal indicator, U+00AA ISOnum
+    'ordm':     0x00ba, # masculine ordinal indicator, U+00BA ISOnum
+    'oslash':   0x00f8, # latin small letter o with stroke, = latin small letter o slash, U+00F8 ISOlat1
+    'otilde':   0x00f5, # latin small letter o with tilde, U+00F5 ISOlat1
+    'otimes':   0x2297, # circled times = vector product, U+2297 ISOamsb
+    'ouml':     0x00f6, # latin small letter o with diaeresis, U+00F6 ISOlat1
+    'para':     0x00b6, # pilcrow sign = paragraph sign, U+00B6 ISOnum
+    'part':     0x2202, # partial differential, U+2202 ISOtech
+    'permil':   0x2030, # per mille sign, U+2030 ISOtech
+    'perp':     0x22a5, # up tack = orthogonal to = perpendicular, U+22A5 ISOtech
+    'phi':      0x03c6, # greek small letter phi, U+03C6 ISOgrk3
+    'pi':       0x03c0, # greek small letter pi, U+03C0 ISOgrk3
+    'piv':      0x03d6, # greek pi symbol, U+03D6 ISOgrk3
+    'plusmn':   0x00b1, # plus-minus sign = plus-or-minus sign, U+00B1 ISOnum
+    'pound':    0x00a3, # pound sign, U+00A3 ISOnum
+    'prime':    0x2032, # prime = minutes = feet, U+2032 ISOtech
+    'prod':     0x220f, # n-ary product = product sign, U+220F ISOamsb
+    'prop':     0x221d, # proportional to, U+221D ISOtech
+    'psi':      0x03c8, # greek small letter psi, U+03C8 ISOgrk3
+    'quot':     0x0022, # quotation mark = APL quote, U+0022 ISOnum
+    'rArr':     0x21d2, # rightwards double arrow, U+21D2 ISOtech
+    'radic':    0x221a, # square root = radical sign, U+221A ISOtech
+    'rang':     0x232a, # right-pointing angle bracket = ket, U+232A ISOtech
+    'raquo':    0x00bb, # right-pointing double angle quotation mark = right pointing guillemet, U+00BB ISOnum
+    'rarr':     0x2192, # rightwards arrow, U+2192 ISOnum
+    'rceil':    0x2309, # right ceiling, U+2309 ISOamsc
+    'rdquo':    0x201d, # right double quotation mark, U+201D ISOnum
+    'real':     0x211c, # blackletter capital R = real part symbol, U+211C ISOamso
+    'reg':      0x00ae, # registered sign = registered trade mark sign, U+00AE ISOnum
+    'rfloor':   0x230b, # right floor, U+230B ISOamsc
+    'rho':      0x03c1, # greek small letter rho, U+03C1 ISOgrk3
+    'rlm':      0x200f, # right-to-left mark, U+200F NEW RFC 2070
+    'rsaquo':   0x203a, # single right-pointing angle quotation mark, U+203A ISO proposed
+    'rsquo':    0x2019, # right single quotation mark, U+2019 ISOnum
+    'sbquo':    0x201a, # single low-9 quotation mark, U+201A NEW
+    'scaron':   0x0161, # latin small letter s with caron, U+0161 ISOlat2
+    'sdot':     0x22c5, # dot operator, U+22C5 ISOamsb
+    'sect':     0x00a7, # section sign, U+00A7 ISOnum
+    'shy':      0x00ad, # soft hyphen = discretionary hyphen, U+00AD ISOnum
+    'sigma':    0x03c3, # greek small letter sigma, U+03C3 ISOgrk3
+    'sigmaf':   0x03c2, # greek small letter final sigma, U+03C2 ISOgrk3
+    'sim':      0x223c, # tilde operator = varies with = similar to, U+223C ISOtech
+    'spades':   0x2660, # black spade suit, U+2660 ISOpub
+    'sub':      0x2282, # subset of, U+2282 ISOtech
+    'sube':     0x2286, # subset of or equal to, U+2286 ISOtech
+    'sum':      0x2211, # n-ary sumation, U+2211 ISOamsb
+    'sup':      0x2283, # superset of, U+2283 ISOtech
+    'sup1':     0x00b9, # superscript one = superscript digit one, U+00B9 ISOnum
+    'sup2':     0x00b2, # superscript two = superscript digit two = squared, U+00B2 ISOnum
+    'sup3':     0x00b3, # superscript three = superscript digit three = cubed, U+00B3 ISOnum
+    'supe':     0x2287, # superset of or equal to, U+2287 ISOtech
+    'szlig':    0x00df, # latin small letter sharp s = ess-zed, U+00DF ISOlat1
+    'tau':      0x03c4, # greek small letter tau, U+03C4 ISOgrk3
+    'there4':   0x2234, # therefore, U+2234 ISOtech
+    'theta':    0x03b8, # greek small letter theta, U+03B8 ISOgrk3
+    'thetasym': 0x03d1, # greek small letter theta symbol, U+03D1 NEW
+    'thinsp':   0x2009, # thin space, U+2009 ISOpub
+    'thorn':    0x00fe, # latin small letter thorn with, U+00FE ISOlat1
+    'tilde':    0x02dc, # small tilde, U+02DC ISOdia
+    'times':    0x00d7, # multiplication sign, U+00D7 ISOnum
+    'trade':    0x2122, # trade mark sign, U+2122 ISOnum
+    'uArr':     0x21d1, # upwards double arrow, U+21D1 ISOamsa
+    'uacute':   0x00fa, # latin small letter u with acute, U+00FA ISOlat1
+    'uarr':     0x2191, # upwards arrow, U+2191 ISOnum
+    'ucirc':    0x00fb, # latin small letter u with circumflex, U+00FB ISOlat1
+    'ugrave':   0x00f9, # latin small letter u with grave, U+00F9 ISOlat1
+    'uml':      0x00a8, # diaeresis = spacing diaeresis, U+00A8 ISOdia
+    'upsih':    0x03d2, # greek upsilon with hook symbol, U+03D2 NEW
+    'upsilon':  0x03c5, # greek small letter upsilon, U+03C5 ISOgrk3
+    'uuml':     0x00fc, # latin small letter u with diaeresis, U+00FC ISOlat1
+    'weierp':   0x2118, # script capital P = power set = Weierstrass p, U+2118 ISOamso
+    'xi':       0x03be, # greek small letter xi, U+03BE ISOgrk3
+    'yacute':   0x00fd, # latin small letter y with acute, U+00FD ISOlat1
+    'yen':      0x00a5, # yen sign = yuan sign, U+00A5 ISOnum
+    'yuml':     0x00ff, # latin small letter y with diaeresis, U+00FF ISOlat1
+    'zeta':     0x03b6, # greek small letter zeta, U+03B6 ISOgrk3
+    'zwj':      0x200d, # zero width joiner, U+200D NEW RFC 2070
+    'zwnj':     0x200c, # zero width non-joiner, U+200C NEW RFC 2070
+}
+
+# maps the Unicode codepoint to the HTML entity name
+codepoint2name = {}
+
+# maps the HTML entity name to the character
+# (or a character reference if the character is outside the Latin-1 range)
+entitydefs = {}
+
+for (name, codepoint) in name2codepoint.iteritems():
+    codepoint2name[codepoint] = name
+    if codepoint <= 0xff:
+        entitydefs[name] = chr(codepoint)
+    else:
+        entitydefs[name] = '&#%d;' % codepoint
+
+del name, codepoint
diff --git a/depot_tools/release/win/python_24/Lib/htmllib.py b/depot_tools/release/win/python_24/Lib/htmllib.py
new file mode 100644
index 0000000..24a2e2f3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/htmllib.py
@@ -0,0 +1,486 @@
+"""HTML 2.0 parser.
+
+See the HTML 2.0 specification:
+http://www.w3.org/hypertext/WWW/MarkUp/html-spec/html-spec_toc.html
+"""
+
+import sgmllib
+
+from formatter import AS_IS
+
+__all__ = ["HTMLParser", "HTMLParseError"]
+
+
+class HTMLParseError(sgmllib.SGMLParseError):
+    """Error raised when an HTML document can't be parsed."""
+
+
+class HTMLParser(sgmllib.SGMLParser):
+    """This is the basic HTML parser class.
+
+    It supports all entity names required by the XHTML 1.0 Recommendation.
+    It also defines handlers for all HTML 2.0 and many HTML 3.0 and 3.2
+    elements.
+
+    """
+
+    from htmlentitydefs import entitydefs
+
+    def __init__(self, formatter, verbose=0):
+        """Creates an instance of the HTMLParser class.
+
+        The formatter parameter is the formatter instance associated with
+        the parser.
+
+        """
+        sgmllib.SGMLParser.__init__(self, verbose)
+        self.formatter = formatter
+
+    def error(self, message):
+        raise HTMLParseError(message)
+
+    def reset(self):
+        sgmllib.SGMLParser.reset(self)
+        self.savedata = None
+        self.isindex = 0
+        self.title = None
+        self.base = None
+        self.anchor = None
+        self.anchorlist = []
+        self.nofill = 0
+        self.list_stack = []
+
+    # ------ Methods used internally; some may be overridden
+
+    # --- Formatter interface, taking care of 'savedata' mode;
+    # shouldn't need to be overridden
+
+    def handle_data(self, data):
+        if self.savedata is not None:
+            self.savedata = self.savedata + data
+        else:
+            if self.nofill:
+                self.formatter.add_literal_data(data)
+            else:
+                self.formatter.add_flowing_data(data)
+
+    # --- Hooks to save data; shouldn't need to be overridden
+
+    def save_bgn(self):
+        """Begins saving character data in a buffer instead of sending it
+        to the formatter object.
+
+        Retrieve the stored data via the save_end() method.  Use of the
+        save_bgn() / save_end() pair may not be nested.
+
+        """
+        self.savedata = ''
+
+    def save_end(self):
+        """Ends buffering character data and returns all data saved since
+        the preceding call to the save_bgn() method.
+
+        If the nofill flag is false, whitespace is collapsed to single
+        spaces.  A call to this method without a preceding call to the
+        save_bgn() method will raise a TypeError exception.
+
+        """
+        data = self.savedata
+        self.savedata = None
+        if not self.nofill:
+            data = ' '.join(data.split())
+        return data
+
+    # --- Hooks for anchors; should probably be overridden
+
+    def anchor_bgn(self, href, name, type):
+        """This method is called at the start of an anchor region.
+
+        The arguments correspond to the attributes of the <A> tag with
+        the same names.  The default implementation maintains a list of
+        hyperlinks (defined by the HREF attribute for <A> tags) within
+        the document.  The list of hyperlinks is available as the data
+        attribute anchorlist.
+
+        """
+        self.anchor = href
+        if self.anchor:
+            self.anchorlist.append(href)
+
+    def anchor_end(self):
+        """This method is called at the end of an anchor region.
+
+        The default implementation adds a textual footnote marker using an
+        index into the list of hyperlinks created by the anchor_bgn()method.
+
+        """
+        if self.anchor:
+            self.handle_data("[%d]" % len(self.anchorlist))
+            self.anchor = None
+
+    # --- Hook for images; should probably be overridden
+
+    def handle_image(self, src, alt, *args):
+        """This method is called to handle images.
+
+        The default implementation simply passes the alt value to the
+        handle_data() method.
+
+        """
+        self.handle_data(alt)
+
+    # --------- Top level elememts
+
+    def start_html(self, attrs): pass
+    def end_html(self): pass
+
+    def start_head(self, attrs): pass
+    def end_head(self): pass
+
+    def start_body(self, attrs): pass
+    def end_body(self): pass
+
+    # ------ Head elements
+
+    def start_title(self, attrs):
+        self.save_bgn()
+
+    def end_title(self):
+        self.title = self.save_end()
+
+    def do_base(self, attrs):
+        for a, v in attrs:
+            if a == 'href':
+                self.base = v
+
+    def do_isindex(self, attrs):
+        self.isindex = 1
+
+    def do_link(self, attrs):
+        pass
+
+    def do_meta(self, attrs):
+        pass
+
+    def do_nextid(self, attrs): # Deprecated
+        pass
+
+    # ------ Body elements
+
+    # --- Headings
+
+    def start_h1(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.formatter.push_font(('h1', 0, 1, 0))
+
+    def end_h1(self):
+        self.formatter.end_paragraph(1)
+        self.formatter.pop_font()
+
+    def start_h2(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.formatter.push_font(('h2', 0, 1, 0))
+
+    def end_h2(self):
+        self.formatter.end_paragraph(1)
+        self.formatter.pop_font()
+
+    def start_h3(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.formatter.push_font(('h3', 0, 1, 0))
+
+    def end_h3(self):
+        self.formatter.end_paragraph(1)
+        self.formatter.pop_font()
+
+    def start_h4(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.formatter.push_font(('h4', 0, 1, 0))
+
+    def end_h4(self):
+        self.formatter.end_paragraph(1)
+        self.formatter.pop_font()
+
+    def start_h5(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.formatter.push_font(('h5', 0, 1, 0))
+
+    def end_h5(self):
+        self.formatter.end_paragraph(1)
+        self.formatter.pop_font()
+
+    def start_h6(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.formatter.push_font(('h6', 0, 1, 0))
+
+    def end_h6(self):
+        self.formatter.end_paragraph(1)
+        self.formatter.pop_font()
+
+    # --- Block Structuring Elements
+
+    def do_p(self, attrs):
+        self.formatter.end_paragraph(1)
+
+    def start_pre(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.formatter.push_font((AS_IS, AS_IS, AS_IS, 1))
+        self.nofill = self.nofill + 1
+
+    def end_pre(self):
+        self.formatter.end_paragraph(1)
+        self.formatter.pop_font()
+        self.nofill = max(0, self.nofill - 1)
+
+    def start_xmp(self, attrs):
+        self.start_pre(attrs)
+        self.setliteral('xmp') # Tell SGML parser
+
+    def end_xmp(self):
+        self.end_pre()
+
+    def start_listing(self, attrs):
+        self.start_pre(attrs)
+        self.setliteral('listing') # Tell SGML parser
+
+    def end_listing(self):
+        self.end_pre()
+
+    def start_address(self, attrs):
+        self.formatter.end_paragraph(0)
+        self.formatter.push_font((AS_IS, 1, AS_IS, AS_IS))
+
+    def end_address(self):
+        self.formatter.end_paragraph(0)
+        self.formatter.pop_font()
+
+    def start_blockquote(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.formatter.push_margin('blockquote')
+
+    def end_blockquote(self):
+        self.formatter.end_paragraph(1)
+        self.formatter.pop_margin()
+
+    # --- List Elements
+
+    def start_ul(self, attrs):
+        self.formatter.end_paragraph(not self.list_stack)
+        self.formatter.push_margin('ul')
+        self.list_stack.append(['ul', '*', 0])
+
+    def end_ul(self):
+        if self.list_stack: del self.list_stack[-1]
+        self.formatter.end_paragraph(not self.list_stack)
+        self.formatter.pop_margin()
+
+    def do_li(self, attrs):
+        self.formatter.end_paragraph(0)
+        if self.list_stack:
+            [dummy, label, counter] = top = self.list_stack[-1]
+            top[2] = counter = counter+1
+        else:
+            label, counter = '*', 0
+        self.formatter.add_label_data(label, counter)
+
+    def start_ol(self, attrs):
+        self.formatter.end_paragraph(not self.list_stack)
+        self.formatter.push_margin('ol')
+        label = '1.'
+        for a, v in attrs:
+            if a == 'type':
+                if len(v) == 1: v = v + '.'
+                label = v
+        self.list_stack.append(['ol', label, 0])
+
+    def end_ol(self):
+        if self.list_stack: del self.list_stack[-1]
+        self.formatter.end_paragraph(not self.list_stack)
+        self.formatter.pop_margin()
+
+    def start_menu(self, attrs):
+        self.start_ul(attrs)
+
+    def end_menu(self):
+        self.end_ul()
+
+    def start_dir(self, attrs):
+        self.start_ul(attrs)
+
+    def end_dir(self):
+        self.end_ul()
+
+    def start_dl(self, attrs):
+        self.formatter.end_paragraph(1)
+        self.list_stack.append(['dl', '', 0])
+
+    def end_dl(self):
+        self.ddpop(1)
+        if self.list_stack: del self.list_stack[-1]
+
+    def do_dt(self, attrs):
+        self.ddpop()
+
+    def do_dd(self, attrs):
+        self.ddpop()
+        self.formatter.push_margin('dd')
+        self.list_stack.append(['dd', '', 0])
+
+    def ddpop(self, bl=0):
+        self.formatter.end_paragraph(bl)
+        if self.list_stack:
+            if self.list_stack[-1][0] == 'dd':
+                del self.list_stack[-1]
+                self.formatter.pop_margin()
+
+    # --- Phrase Markup
+
+    # Idiomatic Elements
+
+    def start_cite(self, attrs): self.start_i(attrs)
+    def end_cite(self): self.end_i()
+
+    def start_code(self, attrs): self.start_tt(attrs)
+    def end_code(self): self.end_tt()
+
+    def start_em(self, attrs): self.start_i(attrs)
+    def end_em(self): self.end_i()
+
+    def start_kbd(self, attrs): self.start_tt(attrs)
+    def end_kbd(self): self.end_tt()
+
+    def start_samp(self, attrs): self.start_tt(attrs)
+    def end_samp(self): self.end_tt()
+
+    def start_strong(self, attrs): self.start_b(attrs)
+    def end_strong(self): self.end_b()
+
+    def start_var(self, attrs): self.start_i(attrs)
+    def end_var(self): self.end_i()
+
+    # Typographic Elements
+
+    def start_i(self, attrs):
+        self.formatter.push_font((AS_IS, 1, AS_IS, AS_IS))
+    def end_i(self):
+        self.formatter.pop_font()
+
+    def start_b(self, attrs):
+        self.formatter.push_font((AS_IS, AS_IS, 1, AS_IS))
+    def end_b(self):
+        self.formatter.pop_font()
+
+    def start_tt(self, attrs):
+        self.formatter.push_font((AS_IS, AS_IS, AS_IS, 1))
+    def end_tt(self):
+        self.formatter.pop_font()
+
+    def start_a(self, attrs):
+        href = ''
+        name = ''
+        type = ''
+        for attrname, value in attrs:
+            value = value.strip()
+            if attrname == 'href':
+                href = value
+            if attrname == 'name':
+                name = value
+            if attrname == 'type':
+                type = value.lower()
+        self.anchor_bgn(href, name, type)
+
+    def end_a(self):
+        self.anchor_end()
+
+    # --- Line Break
+
+    def do_br(self, attrs):
+        self.formatter.add_line_break()
+
+    # --- Horizontal Rule
+
+    def do_hr(self, attrs):
+        self.formatter.add_hor_rule()
+
+    # --- Image
+
+    def do_img(self, attrs):
+        align = ''
+        alt = '(image)'
+        ismap = ''
+        src = ''
+        width = 0
+        height = 0
+        for attrname, value in attrs:
+            if attrname == 'align':
+                align = value
+            if attrname == 'alt':
+                alt = value
+            if attrname == 'ismap':
+                ismap = value
+            if attrname == 'src':
+                src = value
+            if attrname == 'width':
+                try: width = int(value)
+                except ValueError: pass
+            if attrname == 'height':
+                try: height = int(value)
+                except ValueError: pass
+        self.handle_image(src, alt, ismap, align, width, height)
+
+    # --- Really Old Unofficial Deprecated Stuff
+
+    def do_plaintext(self, attrs):
+        self.start_pre(attrs)
+        self.setnomoretags() # Tell SGML parser
+
+    # --- Unhandled tags
+
+    def unknown_starttag(self, tag, attrs):
+        pass
+
+    def unknown_endtag(self, tag):
+        pass
+
+
+def test(args = None):
+    import sys, formatter
+
+    if not args:
+        args = sys.argv[1:]
+
+    silent = args and args[0] == '-s'
+    if silent:
+        del args[0]
+
+    if args:
+        file = args[0]
+    else:
+        file = 'test.html'
+
+    if file == '-':
+        f = sys.stdin
+    else:
+        try:
+            f = open(file, 'r')
+        except IOError, msg:
+            print file, ":", msg
+            sys.exit(1)
+
+    data = f.read()
+
+    if f is not sys.stdin:
+        f.close()
+
+    if silent:
+        f = formatter.NullFormatter()
+    else:
+        f = formatter.AbstractFormatter(formatter.DumbWriter())
+
+    p = HTMLParser(f)
+    p.feed(data)
+    p.close()
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/httplib.py b/depot_tools/release/win/python_24/Lib/httplib.py
new file mode 100644
index 0000000..01fc1ee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/httplib.py
@@ -0,0 +1,1366 @@
+"""HTTP/1.1 client library
+
+<intro stuff goes here>
+<other stuff, too>
+
+HTTPConnection go through a number of "states", which defines when a client
+may legally make another request or fetch the response for a particular
+request. This diagram details these state transitions:
+
+    (null)
+      |
+      | HTTPConnection()
+      v
+    Idle
+      |
+      | putrequest()
+      v
+    Request-started
+      |
+      | ( putheader() )*  endheaders()
+      v
+    Request-sent
+      |
+      | response = getresponse()
+      v
+    Unread-response   [Response-headers-read]
+      |\____________________
+      |                     |
+      | response.read()     | putrequest()
+      v                     v
+    Idle                  Req-started-unread-response
+                     ______/|
+                   /        |
+   response.read() |        | ( putheader() )*  endheaders()
+                   v        v
+       Request-started    Req-sent-unread-response
+                            |
+                            | response.read()
+                            v
+                          Request-sent
+
+This diagram presents the following rules:
+  -- a second request may not be started until {response-headers-read}
+  -- a response [object] cannot be retrieved until {request-sent}
+  -- there is no differentiation between an unread response body and a
+     partially read response body
+
+Note: this enforcement is applied by the HTTPConnection class. The
+      HTTPResponse class does not enforce this state machine, which
+      implies sophisticated clients may accelerate the request/response
+      pipeline. Caution should be taken, though: accelerating the states
+      beyond the above pattern may imply knowledge of the server's
+      connection-close behavior for certain requests. For example, it
+      is impossible to tell whether the server will close the connection
+      UNTIL the response headers have been read; this means that further
+      requests cannot be placed into the pipeline until it is known that
+      the server will NOT be closing the connection.
+
+Logical State                  __state            __response
+-------------                  -------            ----------
+Idle                           _CS_IDLE           None
+Request-started                _CS_REQ_STARTED    None
+Request-sent                   _CS_REQ_SENT       None
+Unread-response                _CS_IDLE           <response_class>
+Req-started-unread-response    _CS_REQ_STARTED    <response_class>
+Req-sent-unread-response       _CS_REQ_SENT       <response_class>
+"""
+
+import errno
+import mimetools
+import socket
+from urlparse import urlsplit
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+__all__ = ["HTTP", "HTTPResponse", "HTTPConnection", "HTTPSConnection",
+           "HTTPException", "NotConnected", "UnknownProtocol",
+           "UnknownTransferEncoding", "UnimplementedFileMode",
+           "IncompleteRead", "InvalidURL", "ImproperConnectionState",
+           "CannotSendRequest", "CannotSendHeader", "ResponseNotReady",
+           "BadStatusLine", "error"]
+
+HTTP_PORT = 80
+HTTPS_PORT = 443
+
+_UNKNOWN = 'UNKNOWN'
+
+# connection states
+_CS_IDLE = 'Idle'
+_CS_REQ_STARTED = 'Request-started'
+_CS_REQ_SENT = 'Request-sent'
+
+# status codes
+# informational
+CONTINUE = 100
+SWITCHING_PROTOCOLS = 101
+PROCESSING = 102
+
+# successful
+OK = 200
+CREATED = 201
+ACCEPTED = 202
+NON_AUTHORITATIVE_INFORMATION = 203
+NO_CONTENT = 204
+RESET_CONTENT = 205
+PARTIAL_CONTENT = 206
+MULTI_STATUS = 207
+IM_USED = 226
+
+# redirection
+MULTIPLE_CHOICES = 300
+MOVED_PERMANENTLY = 301
+FOUND = 302
+SEE_OTHER = 303
+NOT_MODIFIED = 304
+USE_PROXY = 305
+TEMPORARY_REDIRECT = 307
+
+# client error
+BAD_REQUEST = 400
+UNAUTHORIZED = 401
+PAYMENT_REQUIRED = 402
+FORBIDDEN = 403
+NOT_FOUND = 404
+METHOD_NOT_ALLOWED = 405
+NOT_ACCEPTABLE = 406
+PROXY_AUTHENTICATION_REQUIRED = 407
+REQUEST_TIMEOUT = 408
+CONFLICT = 409
+GONE = 410
+LENGTH_REQUIRED = 411
+PRECONDITION_FAILED = 412
+REQUEST_ENTITY_TOO_LARGE = 413
+REQUEST_URI_TOO_LONG = 414
+UNSUPPORTED_MEDIA_TYPE = 415
+REQUESTED_RANGE_NOT_SATISFIABLE = 416
+EXPECTATION_FAILED = 417
+UNPROCESSABLE_ENTITY = 422
+LOCKED = 423
+FAILED_DEPENDENCY = 424
+UPGRADE_REQUIRED = 426
+
+# server error
+INTERNAL_SERVER_ERROR = 500
+NOT_IMPLEMENTED = 501
+BAD_GATEWAY = 502
+SERVICE_UNAVAILABLE = 503
+GATEWAY_TIMEOUT = 504
+HTTP_VERSION_NOT_SUPPORTED = 505
+INSUFFICIENT_STORAGE = 507
+NOT_EXTENDED = 510
+
+class HTTPMessage(mimetools.Message):
+
+    def addheader(self, key, value):
+        """Add header for field key handling repeats."""
+        prev = self.dict.get(key)
+        if prev is None:
+            self.dict[key] = value
+        else:
+            combined = ", ".join((prev, value))
+            self.dict[key] = combined
+
+    def addcontinue(self, key, more):
+        """Add more field data from a continuation line."""
+        prev = self.dict[key]
+        self.dict[key] = prev + "\n " + more
+
+    def readheaders(self):
+        """Read header lines.
+
+        Read header lines up to the entirely blank line that terminates them.
+        The (normally blank) line that ends the headers is skipped, but not
+        included in the returned list.  If a non-header line ends the headers,
+        (which is an error), an attempt is made to backspace over it; it is
+        never included in the returned list.
+
+        The variable self.status is set to the empty string if all went well,
+        otherwise it is an error message.  The variable self.headers is a
+        completely uninterpreted list of lines contained in the header (so
+        printing them will reproduce the header exactly as it appears in the
+        file).
+
+        If multiple header fields with the same name occur, they are combined
+        according to the rules in RFC 2616 sec 4.2:
+
+        Appending each subsequent field-value to the first, each separated
+        by a comma. The order in which header fields with the same field-name
+        are received is significant to the interpretation of the combined
+        field value.
+        """
+        # XXX The implementation overrides the readheaders() method of
+        # rfc822.Message.  The base class design isn't amenable to
+        # customized behavior here so the method here is a copy of the
+        # base class code with a few small changes.
+
+        self.dict = {}
+        self.unixfrom = ''
+        self.headers = hlist = []
+        self.status = ''
+        headerseen = ""
+        firstline = 1
+        startofline = unread = tell = None
+        if hasattr(self.fp, 'unread'):
+            unread = self.fp.unread
+        elif self.seekable:
+            tell = self.fp.tell
+        while True:
+            if tell:
+                try:
+                    startofline = tell()
+                except IOError:
+                    startofline = tell = None
+                    self.seekable = 0
+            line = self.fp.readline()
+            if not line:
+                self.status = 'EOF in headers'
+                break
+            # Skip unix From name time lines
+            if firstline and line.startswith('From '):
+                self.unixfrom = self.unixfrom + line
+                continue
+            firstline = 0
+            if headerseen and line[0] in ' \t':
+                # XXX Not sure if continuation lines are handled properly
+                # for http and/or for repeating headers
+                # It's a continuation line.
+                hlist.append(line)
+                self.addcontinue(headerseen, line.strip())
+                continue
+            elif self.iscomment(line):
+                # It's a comment.  Ignore it.
+                continue
+            elif self.islast(line):
+                # Note! No pushback here!  The delimiter line gets eaten.
+                break
+            headerseen = self.isheader(line)
+            if headerseen:
+                # It's a legal header line, save it.
+                hlist.append(line)
+                self.addheader(headerseen, line[len(headerseen)+1:].strip())
+                continue
+            else:
+                # It's not a header line; throw it back and stop here.
+                if not self.dict:
+                    self.status = 'No headers'
+                else:
+                    self.status = 'Non-header line where header expected'
+                # Try to undo the read.
+                if unread:
+                    unread(line)
+                elif tell:
+                    self.fp.seek(startofline)
+                else:
+                    self.status = self.status + '; bad seek'
+                break
+
+class HTTPResponse:
+
+    # strict: If true, raise BadStatusLine if the status line can't be
+    # parsed as a valid HTTP/1.0 or 1.1 status line.  By default it is
+    # false because it prevents clients from talking to HTTP/0.9
+    # servers.  Note that a response with a sufficiently corrupted
+    # status line will look like an HTTP/0.9 response.
+
+    # See RFC 2616 sec 19.6 and RFC 1945 sec 6 for details.
+
+    def __init__(self, sock, debuglevel=0, strict=0, method=None):
+        self.fp = sock.makefile('rb', 0)
+        self.debuglevel = debuglevel
+        self.strict = strict
+        self._method = method
+
+        self.msg = None
+
+        # from the Status-Line of the response
+        self.version = _UNKNOWN # HTTP-Version
+        self.status = _UNKNOWN  # Status-Code
+        self.reason = _UNKNOWN  # Reason-Phrase
+
+        self.chunked = _UNKNOWN         # is "chunked" being used?
+        self.chunk_left = _UNKNOWN      # bytes left to read in current chunk
+        self.length = _UNKNOWN          # number of bytes left in response
+        self.will_close = _UNKNOWN      # conn will close at end of response
+
+    def _read_status(self):
+        # Initialize with Simple-Response defaults
+        line = self.fp.readline()
+        if self.debuglevel > 0:
+            print "reply:", repr(line)
+        if not line:
+            # Presumably, the server closed the connection before
+            # sending a valid response.
+            raise BadStatusLine(line)
+        try:
+            [version, status, reason] = line.split(None, 2)
+        except ValueError:
+            try:
+                [version, status] = line.split(None, 1)
+                reason = ""
+            except ValueError:
+                # empty version will cause next test to fail and status
+                # will be treated as 0.9 response.
+                version = ""
+        if not version.startswith('HTTP/'):
+            if self.strict:
+                self.close()
+                raise BadStatusLine(line)
+            else:
+                # assume it's a Simple-Response from an 0.9 server
+                self.fp = LineAndFileWrapper(line, self.fp)
+                return "HTTP/0.9", 200, ""
+
+        # The status code is a three-digit number
+        try:
+            status = int(status)
+            if status < 100 or status > 999:
+                raise BadStatusLine(line)
+        except ValueError:
+            raise BadStatusLine(line)
+        return version, status, reason
+
+    def begin(self):
+        if self.msg is not None:
+            # we've already started reading the response
+            return
+
+        # read until we get a non-100 response
+        while True:
+            version, status, reason = self._read_status()
+            if status != CONTINUE:
+                break
+            # skip the header from the 100 response
+            while True:
+                skip = self.fp.readline().strip()
+                if not skip:
+                    break
+                if self.debuglevel > 0:
+                    print "header:", skip
+
+        self.status = status
+        self.reason = reason.strip()
+        if version == 'HTTP/1.0':
+            self.version = 10
+        elif version.startswith('HTTP/1.'):
+            self.version = 11   # use HTTP/1.1 code for HTTP/1.x where x>=1
+        elif version == 'HTTP/0.9':
+            self.version = 9
+        else:
+            raise UnknownProtocol(version)
+
+        if self.version == 9:
+            self.chunked = 0
+            self.will_close = 1
+            self.msg = HTTPMessage(StringIO())
+            return
+
+        self.msg = HTTPMessage(self.fp, 0)
+        if self.debuglevel > 0:
+            for hdr in self.msg.headers:
+                print "header:", hdr,
+
+        # don't let the msg keep an fp
+        self.msg.fp = None
+
+        # are we using the chunked-style of transfer encoding?
+        tr_enc = self.msg.getheader('transfer-encoding')
+        if tr_enc and tr_enc.lower() == "chunked":
+            self.chunked = 1
+            self.chunk_left = None
+        else:
+            self.chunked = 0
+
+        # will the connection close at the end of the response?
+        self.will_close = self._check_close()
+
+        # do we have a Content-Length?
+        # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked"
+        length = self.msg.getheader('content-length')
+        if length and not self.chunked:
+            try:
+                self.length = int(length)
+            except ValueError:
+                self.length = None
+        else:
+            self.length = None
+
+        # does the body have a fixed length? (of zero)
+        if (status == NO_CONTENT or status == NOT_MODIFIED or
+            100 <= status < 200 or      # 1xx codes
+            self._method == 'HEAD'):
+            self.length = 0
+
+        # if the connection remains open, and we aren't using chunked, and
+        # a content-length was not provided, then assume that the connection
+        # WILL close.
+        if not self.will_close and \
+           not self.chunked and \
+           self.length is None:
+            self.will_close = 1
+
+    def _check_close(self):
+        conn = self.msg.getheader('connection')
+        if self.version == 11:
+            # An HTTP/1.1 proxy is assumed to stay open unless
+            # explicitly closed.
+            conn = self.msg.getheader('connection')
+            if conn and "close" in conn.lower():
+                return True
+            return False
+
+        # Some HTTP/1.0 implementations have support for persistent
+        # connections, using rules different than HTTP/1.1.
+
+        # For older HTTP, Keep-Alive indiciates persistent connection.
+        if self.msg.getheader('keep-alive'):
+            return False
+
+        # At least Akamai returns a "Connection: Keep-Alive" header,
+        # which was supposed to be sent by the client.
+        if conn and "keep-alive" in conn.lower():
+            return False
+
+        # Proxy-Connection is a netscape hack.
+        pconn = self.msg.getheader('proxy-connection')
+        if pconn and "keep-alive" in pconn.lower():
+            return False
+
+        # otherwise, assume it will close
+        return True
+
+    def close(self):
+        if self.fp:
+            self.fp.close()
+            self.fp = None
+
+    def isclosed(self):
+        # NOTE: it is possible that we will not ever call self.close(). This
+        #       case occurs when will_close is TRUE, length is None, and we
+        #       read up to the last byte, but NOT past it.
+        #
+        # IMPLIES: if will_close is FALSE, then self.close() will ALWAYS be
+        #          called, meaning self.isclosed() is meaningful.
+        return self.fp is None
+
+    # XXX It would be nice to have readline and __iter__ for this, too.
+
+    def read(self, amt=None):
+        if self.fp is None:
+            return ''
+
+        if self.chunked:
+            return self._read_chunked(amt)
+
+        if amt is None:
+            # unbounded read
+            if self.length is None:
+                s = self.fp.read()
+            else:
+                s = self._safe_read(self.length)
+                self.length = 0
+            self.close()        # we read everything
+            return s
+
+        if self.length is not None:
+            if amt > self.length:
+                # clip the read to the "end of response"
+                amt = self.length
+
+        # we do not use _safe_read() here because this may be a .will_close
+        # connection, and the user is reading more bytes than will be provided
+        # (for example, reading in 1k chunks)
+        s = self.fp.read(amt)
+        if self.length is not None:
+            self.length -= len(s)
+
+        return s
+
+    def _read_chunked(self, amt):
+        assert self.chunked != _UNKNOWN
+        chunk_left = self.chunk_left
+        value = ''
+
+        # XXX This accumulates chunks by repeated string concatenation,
+        # which is not efficient as the number or size of chunks gets big.
+        while True:
+            if chunk_left is None:
+                line = self.fp.readline()
+                i = line.find(';')
+                if i >= 0:
+                    line = line[:i] # strip chunk-extensions
+                chunk_left = int(line, 16)
+                if chunk_left == 0:
+                    break
+            if amt is None:
+                value += self._safe_read(chunk_left)
+            elif amt < chunk_left:
+                value += self._safe_read(amt)
+                self.chunk_left = chunk_left - amt
+                return value
+            elif amt == chunk_left:
+                value += self._safe_read(amt)
+                self._safe_read(2)  # toss the CRLF at the end of the chunk
+                self.chunk_left = None
+                return value
+            else:
+                value += self._safe_read(chunk_left)
+                amt -= chunk_left
+
+            # we read the whole chunk, get another
+            self._safe_read(2)      # toss the CRLF at the end of the chunk
+            chunk_left = None
+
+        # read and discard trailer up to the CRLF terminator
+        ### note: we shouldn't have any trailers!
+        while True:
+            line = self.fp.readline()
+            if line == '\r\n':
+                break
+
+        # we read everything; close the "file"
+        self.close()
+
+        return value
+
+    def _safe_read(self, amt):
+        """Read the number of bytes requested, compensating for partial reads.
+
+        Normally, we have a blocking socket, but a read() can be interrupted
+        by a signal (resulting in a partial read).
+
+        Note that we cannot distinguish between EOF and an interrupt when zero
+        bytes have been read. IncompleteRead() will be raised in this
+        situation.
+
+        This function should be used when <amt> bytes "should" be present for
+        reading. If the bytes are truly not available (due to EOF), then the
+        IncompleteRead exception can be used to detect the problem.
+        """
+        s = ''
+        while amt > 0:
+            chunk = self.fp.read(amt)
+            if not chunk:
+                raise IncompleteRead(s)
+            s += chunk
+            amt -= len(chunk)
+        return s
+
+    def getheader(self, name, default=None):
+        if self.msg is None:
+            raise ResponseNotReady()
+        return self.msg.getheader(name, default)
+
+    def getheaders(self):
+        """Return list of (header, value) tuples."""
+        if self.msg is None:
+            raise ResponseNotReady()
+        return self.msg.items()
+
+
+class HTTPConnection:
+
+    _http_vsn = 11
+    _http_vsn_str = 'HTTP/1.1'
+
+    response_class = HTTPResponse
+    default_port = HTTP_PORT
+    auto_open = 1
+    debuglevel = 0
+    strict = 0
+
+    def __init__(self, host, port=None, strict=None):
+        self.sock = None
+        self._buffer = []
+        self.__response = None
+        self.__state = _CS_IDLE
+        self._method = None
+
+        self._set_hostport(host, port)
+        if strict is not None:
+            self.strict = strict
+
+    def _set_hostport(self, host, port):
+        if port is None:
+            i = host.rfind(':')
+            j = host.rfind(']')         # ipv6 addresses have [...]
+            if i > j:
+                try:
+                    port = int(host[i+1:])
+                except ValueError:
+                    raise InvalidURL("nonnumeric port: '%s'" % host[i+1:])
+                host = host[:i]
+            else:
+                port = self.default_port
+            if host and host[0] == '[' and host[-1] == ']':
+                host = host[1:-1]
+        self.host = host
+        self.port = port
+
+    def set_debuglevel(self, level):
+        self.debuglevel = level
+
+    def connect(self):
+        """Connect to the host and port specified in __init__."""
+        msg = "getaddrinfo returns an empty list"
+        for res in socket.getaddrinfo(self.host, self.port, 0,
+                                      socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                self.sock = socket.socket(af, socktype, proto)
+                if self.debuglevel > 0:
+                    print "connect: (%s, %s)" % (self.host, self.port)
+                self.sock.connect(sa)
+            except socket.error, msg:
+                if self.debuglevel > 0:
+                    print 'connect fail:', (self.host, self.port)
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error, msg
+
+    def close(self):
+        """Close the connection to the HTTP server."""
+        if self.sock:
+            self.sock.close()   # close it manually... there may be other refs
+            self.sock = None
+        if self.__response:
+            self.__response.close()
+            self.__response = None
+        self.__state = _CS_IDLE
+
+    def send(self, str):
+        """Send `str' to the server."""
+        if self.sock is None:
+            if self.auto_open:
+                self.connect()
+            else:
+                raise NotConnected()
+
+        # send the data to the server. if we get a broken pipe, then close
+        # the socket. we want to reconnect when somebody tries to send again.
+        #
+        # NOTE: we DO propagate the error, though, because we cannot simply
+        #       ignore the error... the caller will know if they can retry.
+        if self.debuglevel > 0:
+            print "send:", repr(str)
+        try:
+            self.sock.sendall(str)
+        except socket.error, v:
+            if v[0] == 32:      # Broken pipe
+                self.close()
+            raise
+
+    def _output(self, s):
+        """Add a line of output to the current request buffer.
+
+        Assumes that the line does *not* end with \\r\\n.
+        """
+        self._buffer.append(s)
+
+    def _send_output(self):
+        """Send the currently buffered request and clear the buffer.
+
+        Appends an extra \\r\\n to the buffer.
+        """
+        self._buffer.extend(("", ""))
+        msg = "\r\n".join(self._buffer)
+        del self._buffer[:]
+        self.send(msg)
+
+    def putrequest(self, method, url, skip_host=0, skip_accept_encoding=0):
+        """Send a request to the server.
+
+        `method' specifies an HTTP request method, e.g. 'GET'.
+        `url' specifies the object being requested, e.g. '/index.html'.
+        `skip_host' if True does not add automatically a 'Host:' header
+        `skip_accept_encoding' if True does not add automatically an
+           'Accept-Encoding:' header
+        """
+
+        # if a prior response has been completed, then forget about it.
+        if self.__response and self.__response.isclosed():
+            self.__response = None
+
+
+        # in certain cases, we cannot issue another request on this connection.
+        # this occurs when:
+        #   1) we are in the process of sending a request.   (_CS_REQ_STARTED)
+        #   2) a response to a previous request has signalled that it is going
+        #      to close the connection upon completion.
+        #   3) the headers for the previous response have not been read, thus
+        #      we cannot determine whether point (2) is true.   (_CS_REQ_SENT)
+        #
+        # if there is no prior response, then we can request at will.
+        #
+        # if point (2) is true, then we will have passed the socket to the
+        # response (effectively meaning, "there is no prior response"), and
+        # will open a new one when a new request is made.
+        #
+        # Note: if a prior response exists, then we *can* start a new request.
+        #       We are not allowed to begin fetching the response to this new
+        #       request, however, until that prior response is complete.
+        #
+        if self.__state == _CS_IDLE:
+            self.__state = _CS_REQ_STARTED
+        else:
+            raise CannotSendRequest()
+
+        # Save the method we use, we need it later in the response phase
+        self._method = method
+        if not url:
+            url = '/'
+        str = '%s %s %s' % (method, url, self._http_vsn_str)
+
+        self._output(str)
+
+        if self._http_vsn == 11:
+            # Issue some standard headers for better HTTP/1.1 compliance
+
+            if not skip_host:
+                # this header is issued *only* for HTTP/1.1
+                # connections. more specifically, this means it is
+                # only issued when the client uses the new
+                # HTTPConnection() class. backwards-compat clients
+                # will be using HTTP/1.0 and those clients may be
+                # issuing this header themselves. we should NOT issue
+                # it twice; some web servers (such as Apache) barf
+                # when they see two Host: headers
+
+                # If we need a non-standard port,include it in the
+                # header.  If the request is going through a proxy,
+                # but the host of the actual URL, not the host of the
+                # proxy.
+
+                netloc = ''
+                if url.startswith('http'):
+                    nil, netloc, nil, nil, nil = urlsplit(url)
+
+                if netloc:
+                    self.putheader('Host', netloc.encode("idna"))
+                elif self.port == HTTP_PORT:
+                    self.putheader('Host', self.host.encode("idna"))
+                else:
+                    self.putheader('Host', "%s:%s" % (self.host.encode("idna"), self.port))
+
+            # note: we are assuming that clients will not attempt to set these
+            #       headers since *this* library must deal with the
+            #       consequences. this also means that when the supporting
+            #       libraries are updated to recognize other forms, then this
+            #       code should be changed (removed or updated).
+
+            # we only want a Content-Encoding of "identity" since we don't
+            # support encodings such as x-gzip or x-deflate.
+            if not skip_accept_encoding:
+                self.putheader('Accept-Encoding', 'identity')
+
+            # we can accept "chunked" Transfer-Encodings, but no others
+            # NOTE: no TE header implies *only* "chunked"
+            #self.putheader('TE', 'chunked')
+
+            # if TE is supplied in the header, then it must appear in a
+            # Connection header.
+            #self.putheader('Connection', 'TE')
+
+        else:
+            # For HTTP/1.0, the server will assume "not chunked"
+            pass
+
+    def putheader(self, header, value):
+        """Send a request header line to the server.
+
+        For example: h.putheader('Accept', 'text/html')
+        """
+        if self.__state != _CS_REQ_STARTED:
+            raise CannotSendHeader()
+
+        str = '%s: %s' % (header, value)
+        self._output(str)
+
+    def endheaders(self):
+        """Indicate that the last header line has been sent to the server."""
+
+        if self.__state == _CS_REQ_STARTED:
+            self.__state = _CS_REQ_SENT
+        else:
+            raise CannotSendHeader()
+
+        self._send_output()
+
+    def request(self, method, url, body=None, headers={}):
+        """Send a complete request to the server."""
+
+        try:
+            self._send_request(method, url, body, headers)
+        except socket.error, v:
+            # trap 'Broken pipe' if we're allowed to automatically reconnect
+            if v[0] != 32 or not self.auto_open:
+                raise
+            # try one more time
+            self._send_request(method, url, body, headers)
+
+    def _send_request(self, method, url, body, headers):
+        # honour explicitly requested Host: and Accept-Encoding headers
+        header_names = dict.fromkeys([k.lower() for k in headers])
+        skips = {}
+        if 'host' in header_names:
+            skips['skip_host'] = 1
+        if 'accept-encoding' in header_names:
+            skips['skip_accept_encoding'] = 1
+
+        self.putrequest(method, url, **skips)
+
+        if body and ('content-length' not in header_names):
+            self.putheader('Content-Length', str(len(body)))
+        for hdr, value in headers.iteritems():
+            self.putheader(hdr, value)
+        self.endheaders()
+
+        if body:
+            self.send(body)
+
+    def getresponse(self):
+        "Get the response from the server."
+
+        # if a prior response has been completed, then forget about it.
+        if self.__response and self.__response.isclosed():
+            self.__response = None
+
+        #
+        # if a prior response exists, then it must be completed (otherwise, we
+        # cannot read this response's header to determine the connection-close
+        # behavior)
+        #
+        # note: if a prior response existed, but was connection-close, then the
+        # socket and response were made independent of this HTTPConnection
+        # object since a new request requires that we open a whole new
+        # connection
+        #
+        # this means the prior response had one of two states:
+        #   1) will_close: this connection was reset and the prior socket and
+        #                  response operate independently
+        #   2) persistent: the response was retained and we await its
+        #                  isclosed() status to become true.
+        #
+        if self.__state != _CS_REQ_SENT or self.__response:
+            raise ResponseNotReady()
+
+        if self.debuglevel > 0:
+            response = self.response_class(self.sock, self.debuglevel,
+                                           strict=self.strict,
+                                           method=self._method)
+        else:
+            response = self.response_class(self.sock, strict=self.strict,
+                                           method=self._method)
+
+        response.begin()
+        assert response.will_close != _UNKNOWN
+        self.__state = _CS_IDLE
+
+        if response.will_close:
+            # this effectively passes the connection to the response
+            self.close()
+        else:
+            # remember this, so we can tell when it is complete
+            self.__response = response
+
+        return response
+
+# The next several classes are used to define FakeSocket,a socket-like
+# interface to an SSL connection.
+
+# The primary complexity comes from faking a makefile() method.  The
+# standard socket makefile() implementation calls dup() on the socket
+# file descriptor.  As a consequence, clients can call close() on the
+# parent socket and its makefile children in any order.  The underlying
+# socket isn't closed until they are all closed.
+
+# The implementation uses reference counting to keep the socket open
+# until the last client calls close().  SharedSocket keeps track of
+# the reference counting and SharedSocketClient provides an constructor
+# and close() method that call incref() and decref() correctly.
+
+class SharedSocket:
+
+    def __init__(self, sock):
+        self.sock = sock
+        self._refcnt = 0
+
+    def incref(self):
+        self._refcnt += 1
+
+    def decref(self):
+        self._refcnt -= 1
+        assert self._refcnt >= 0
+        if self._refcnt == 0:
+            self.sock.close()
+
+    def __del__(self):
+        self.sock.close()
+
+class SharedSocketClient:
+
+    def __init__(self, shared):
+        self._closed = 0
+        self._shared = shared
+        self._shared.incref()
+        self._sock = shared.sock
+
+    def close(self):
+        if not self._closed:
+            self._shared.decref()
+            self._closed = 1
+            self._shared = None
+
+class SSLFile(SharedSocketClient):
+    """File-like object wrapping an SSL socket."""
+
+    BUFSIZE = 8192
+
+    def __init__(self, sock, ssl, bufsize=None):
+        SharedSocketClient.__init__(self, sock)
+        self._ssl = ssl
+        self._buf = ''
+        self._bufsize = bufsize or self.__class__.BUFSIZE
+
+    def _read(self):
+        buf = ''
+        # put in a loop so that we retry on transient errors
+        while True:
+            try:
+                buf = self._ssl.read(self._bufsize)
+            except socket.sslerror, err:
+                if (err[0] == socket.SSL_ERROR_WANT_READ
+                    or err[0] == socket.SSL_ERROR_WANT_WRITE):
+                    continue
+                if (err[0] == socket.SSL_ERROR_ZERO_RETURN
+                    or err[0] == socket.SSL_ERROR_EOF):
+                    break
+                raise
+            except socket.error, err:
+                if err[0] == errno.EINTR:
+                    continue
+                if err[0] == errno.EBADF:
+                    # XXX socket was closed?
+                    break
+                raise
+            else:
+                break
+        return buf
+
+    def read(self, size=None):
+        L = [self._buf]
+        avail = len(self._buf)
+        while size is None or avail < size:
+            s = self._read()
+            if s == '':
+                break
+            L.append(s)
+            avail += len(s)
+        all = "".join(L)
+        if size is None:
+            self._buf = ''
+            return all
+        else:
+            self._buf = all[size:]
+            return all[:size]
+
+    def readline(self):
+        L = [self._buf]
+        self._buf = ''
+        while 1:
+            i = L[-1].find("\n")
+            if i >= 0:
+                break
+            s = self._read()
+            if s == '':
+                break
+            L.append(s)
+        if i == -1:
+            # loop exited because there is no more data
+            return "".join(L)
+        else:
+            all = "".join(L)
+            # XXX could do enough bookkeeping not to do a 2nd search
+            i = all.find("\n") + 1
+            line = all[:i]
+            self._buf = all[i:]
+            return line
+
+    def readlines(self, sizehint=0):
+        total = 0
+        list = []
+        while True:
+            line = self.readline()
+            if not line:
+                break
+            list.append(line)
+            total += len(line)
+            if sizehint and total >= sizehint:
+                break
+        return list
+
+    def fileno(self):
+        return self._sock.fileno()
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        line = self.readline()
+        if not line:
+            raise StopIteration
+        return line
+
+class FakeSocket(SharedSocketClient):
+
+    class _closedsocket:
+        def __getattr__(self, name):
+            raise error(9, 'Bad file descriptor')
+
+    def __init__(self, sock, ssl):
+        sock = SharedSocket(sock)
+        SharedSocketClient.__init__(self, sock)
+        self._ssl = ssl
+
+    def close(self):
+        SharedSocketClient.close(self)
+        self._sock = self.__class__._closedsocket()
+
+    def makefile(self, mode, bufsize=None):
+        if mode != 'r' and mode != 'rb':
+            raise UnimplementedFileMode()
+        return SSLFile(self._shared, self._ssl, bufsize)
+
+    def send(self, stuff, flags = 0):
+        return self._ssl.write(stuff)
+
+    sendall = send
+
+    def recv(self, len = 1024, flags = 0):
+        return self._ssl.read(len)
+
+    def __getattr__(self, attr):
+        return getattr(self._sock, attr)
+
+
+class HTTPSConnection(HTTPConnection):
+    "This class allows communication via SSL."
+
+    default_port = HTTPS_PORT
+
+    def __init__(self, host, port=None, key_file=None, cert_file=None,
+                 strict=None):
+        HTTPConnection.__init__(self, host, port, strict)
+        self.key_file = key_file
+        self.cert_file = cert_file
+
+    def connect(self):
+        "Connect to a host on a given (SSL) port."
+
+        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        sock.connect((self.host, self.port))
+        ssl = socket.ssl(sock, self.key_file, self.cert_file)
+        self.sock = FakeSocket(sock, ssl)
+
+
+class HTTP:
+    "Compatibility class with httplib.py from 1.5."
+
+    _http_vsn = 10
+    _http_vsn_str = 'HTTP/1.0'
+
+    debuglevel = 0
+
+    _connection_class = HTTPConnection
+
+    def __init__(self, host='', port=None, strict=None):
+        "Provide a default host, since the superclass requires one."
+
+        # some joker passed 0 explicitly, meaning default port
+        if port == 0:
+            port = None
+
+        # Note that we may pass an empty string as the host; this will throw
+        # an error when we attempt to connect. Presumably, the client code
+        # will call connect before then, with a proper host.
+        self._setup(self._connection_class(host, port, strict))
+
+    def _setup(self, conn):
+        self._conn = conn
+
+        # set up delegation to flesh out interface
+        self.send = conn.send
+        self.putrequest = conn.putrequest
+        self.endheaders = conn.endheaders
+        self.set_debuglevel = conn.set_debuglevel
+
+        conn._http_vsn = self._http_vsn
+        conn._http_vsn_str = self._http_vsn_str
+
+        self.file = None
+
+    def connect(self, host=None, port=None):
+        "Accept arguments to set the host/port, since the superclass doesn't."
+
+        if host is not None:
+            self._conn._set_hostport(host, port)
+        self._conn.connect()
+
+    def getfile(self):
+        "Provide a getfile, since the superclass' does not use this concept."
+        return self.file
+
+    def putheader(self, header, *values):
+        "The superclass allows only one value argument."
+        self._conn.putheader(header, '\r\n\t'.join(values))
+
+    def getreply(self):
+        """Compat definition since superclass does not define it.
+
+        Returns a tuple consisting of:
+        - server status code (e.g. '200' if all goes well)
+        - server "reason" corresponding to status code
+        - any RFC822 headers in the response from the server
+        """
+        try:
+            response = self._conn.getresponse()
+        except BadStatusLine, e:
+            ### hmm. if getresponse() ever closes the socket on a bad request,
+            ### then we are going to have problems with self.sock
+
+            ### should we keep this behavior? do people use it?
+            # keep the socket open (as a file), and return it
+            self.file = self._conn.sock.makefile('rb', 0)
+
+            # close our socket -- we want to restart after any protocol error
+            self.close()
+
+            self.headers = None
+            return -1, e.line, None
+
+        self.headers = response.msg
+        self.file = response.fp
+        return response.status, response.reason, response.msg
+
+    def close(self):
+        self._conn.close()
+
+        # note that self.file == response.fp, which gets closed by the
+        # superclass. just clear the object ref here.
+        ### hmm. messy. if status==-1, then self.file is owned by us.
+        ### well... we aren't explicitly closing, but losing this ref will
+        ### do it
+        self.file = None
+
+if hasattr(socket, 'ssl'):
+    class HTTPS(HTTP):
+        """Compatibility with 1.5 httplib interface
+
+        Python 1.5.2 did not have an HTTPS class, but it defined an
+        interface for sending http requests that is also useful for
+        https.
+        """
+
+        _connection_class = HTTPSConnection
+
+        def __init__(self, host='', port=None, key_file=None, cert_file=None,
+                     strict=None):
+            # provide a default host, pass the X509 cert info
+
+            # urf. compensate for bad input.
+            if port == 0:
+                port = None
+            self._setup(self._connection_class(host, port, key_file,
+                                               cert_file, strict))
+
+            # we never actually use these for anything, but we keep them
+            # here for compatibility with post-1.5.2 CVS.
+            self.key_file = key_file
+            self.cert_file = cert_file
+
+
+class HTTPException(Exception):
+    # Subclasses that define an __init__ must call Exception.__init__
+    # or define self.args.  Otherwise, str() will fail.
+    pass
+
+class NotConnected(HTTPException):
+    pass
+
+class InvalidURL(HTTPException):
+    pass
+
+class UnknownProtocol(HTTPException):
+    def __init__(self, version):
+        self.args = version,
+        self.version = version
+
+class UnknownTransferEncoding(HTTPException):
+    pass
+
+class UnimplementedFileMode(HTTPException):
+    pass
+
+class IncompleteRead(HTTPException):
+    def __init__(self, partial):
+        self.args = partial,
+        self.partial = partial
+
+class ImproperConnectionState(HTTPException):
+    pass
+
+class CannotSendRequest(ImproperConnectionState):
+    pass
+
+class CannotSendHeader(ImproperConnectionState):
+    pass
+
+class ResponseNotReady(ImproperConnectionState):
+    pass
+
+class BadStatusLine(HTTPException):
+    def __init__(self, line):
+        self.args = line,
+        self.line = line
+
+# for backwards compatibility
+error = HTTPException
+
+class LineAndFileWrapper:
+    """A limited file-like object for HTTP/0.9 responses."""
+
+    # The status-line parsing code calls readline(), which normally
+    # get the HTTP status line.  For a 0.9 response, however, this is
+    # actually the first line of the body!  Clients need to get a
+    # readable file object that contains that line.
+
+    def __init__(self, line, file):
+        self._line = line
+        self._file = file
+        self._line_consumed = 0
+        self._line_offset = 0
+        self._line_left = len(line)
+
+    def __getattr__(self, attr):
+        return getattr(self._file, attr)
+
+    def _done(self):
+        # called when the last byte is read from the line.  After the
+        # call, all read methods are delegated to the underlying file
+        # object.
+        self._line_consumed = 1
+        self.read = self._file.read
+        self.readline = self._file.readline
+        self.readlines = self._file.readlines
+
+    def read(self, amt=None):
+        if self._line_consumed:
+            return self._file.read(amt)
+        assert self._line_left
+        if amt is None or amt > self._line_left:
+            s = self._line[self._line_offset:]
+            self._done()
+            if amt is None:
+                return s + self._file.read()
+            else:
+                return s + self._file.read(amt - len(s))
+        else:
+            assert amt <= self._line_left
+            i = self._line_offset
+            j = i + amt
+            s = self._line[i:j]
+            self._line_offset = j
+            self._line_left -= amt
+            if self._line_left == 0:
+                self._done()
+            return s
+
+    def readline(self):
+        if self._line_consumed:
+            return self._file.readline()
+        assert self._line_left
+        s = self._line[self._line_offset:]
+        self._done()
+        return s
+
+    def readlines(self, size=None):
+        if self._line_consumed:
+            return self._file.readlines(size)
+        assert self._line_left
+        L = [self._line[self._line_offset:]]
+        self._done()
+        if size is None:
+            return L + self._file.readlines()
+        else:
+            return L + self._file.readlines(size)
+
+def test():
+    """Test this module.
+
+    A hodge podge of tests collected here, because they have too many
+    external dependencies for the regular test suite.
+    """
+
+    import sys
+    import getopt
+    opts, args = getopt.getopt(sys.argv[1:], 'd')
+    dl = 0
+    for o, a in opts:
+        if o == '-d': dl = dl + 1
+    host = 'www.python.org'
+    selector = '/'
+    if args[0:]: host = args[0]
+    if args[1:]: selector = args[1]
+    h = HTTP()
+    h.set_debuglevel(dl)
+    h.connect(host)
+    h.putrequest('GET', selector)
+    h.endheaders()
+    status, reason, headers = h.getreply()
+    print 'status =', status
+    print 'reason =', reason
+    print "read", len(h.getfile().read())
+    print
+    if headers:
+        for header in headers.headers: print header.strip()
+    print
+
+    # minimal test that code to extract host from url works
+    class HTTP11(HTTP):
+        _http_vsn = 11
+        _http_vsn_str = 'HTTP/1.1'
+
+    h = HTTP11('www.python.org')
+    h.putrequest('GET', 'http://www.python.org/~jeremy/')
+    h.endheaders()
+    h.getreply()
+    h.close()
+
+    if hasattr(socket, 'ssl'):
+
+        for host, selector in (('sourceforge.net', '/projects/python'),
+                               ):
+            print "https://%s%s" % (host, selector)
+            hs = HTTPS()
+            hs.set_debuglevel(dl)
+            hs.connect(host)
+            hs.putrequest('GET', selector)
+            hs.endheaders()
+            status, reason, headers = hs.getreply()
+            print 'status =', status
+            print 'reason =', reason
+            print "read", len(hs.getfile().read())
+            print
+            if headers:
+                for header in headers.headers: print header.strip()
+            print
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/AutoExpand.py b/depot_tools/release/win/python_24/Lib/idlelib/AutoExpand.py
new file mode 100644
index 0000000..9e93d57
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/AutoExpand.py
@@ -0,0 +1,83 @@
+import string
+import re
+
+###$ event <<expand-word>>
+###$ win <Alt-slash>
+###$ unix <Alt-slash>
+
+class AutoExpand:
+
+    menudefs = [
+        ('edit', [
+            ('E_xpand Word', '<<expand-word>>'),
+         ]),
+    ]
+
+    wordchars = string.ascii_letters + string.digits + "_"
+
+    def __init__(self, editwin):
+        self.text = editwin.text
+        self.state = None
+
+    def expand_word_event(self, event):
+        curinsert = self.text.index("insert")
+        curline = self.text.get("insert linestart", "insert lineend")
+        if not self.state:
+            words = self.getwords()
+            index = 0
+        else:
+            words, index, insert, line = self.state
+            if insert != curinsert or line != curline:
+                words = self.getwords()
+                index = 0
+        if not words:
+            self.text.bell()
+            return "break"
+        word = self.getprevword()
+        self.text.delete("insert - %d chars" % len(word), "insert")
+        newword = words[index]
+        index = (index + 1) % len(words)
+        if index == 0:
+            self.text.bell()            # Warn we cycled around
+        self.text.insert("insert", newword)
+        curinsert = self.text.index("insert")
+        curline = self.text.get("insert linestart", "insert lineend")
+        self.state = words, index, curinsert, curline
+        return "break"
+
+    def getwords(self):
+        word = self.getprevword()
+        if not word:
+            return []
+        before = self.text.get("1.0", "insert wordstart")
+        wbefore = re.findall(r"\b" + word + r"\w+\b", before)
+        del before
+        after = self.text.get("insert wordend", "end")
+        wafter = re.findall(r"\b" + word + r"\w+\b", after)
+        del after
+        if not wbefore and not wafter:
+            return []
+        words = []
+        dict = {}
+        # search backwards through words before
+        wbefore.reverse()
+        for w in wbefore:
+            if dict.get(w):
+                continue
+            words.append(w)
+            dict[w] = w
+        # search onwards through words after
+        for w in wafter:
+            if dict.get(w):
+                continue
+            words.append(w)
+            dict[w] = w
+        words.append(word)
+        return words
+
+    def getprevword(self):
+        line = self.text.get("insert linestart", "insert")
+        i = len(line)
+        while i > 0 and line[i-1] in self.wordchars:
+            i = i-1
+        return line[i:]
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Bindings.py b/depot_tools/release/win/python_24/Lib/idlelib/Bindings.py
new file mode 100644
index 0000000..b5e90b0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Bindings.py
@@ -0,0 +1,85 @@
+"""Define the menu contents, hotkeys, and event bindings.
+
+There is additional configuration information in the EditorWindow class (and
+subclasses): the menus are created there based on the menu_specs (class)
+variable, and menus not created are silently skipped in the code here.  This
+makes it possible, for example, to define a Debug menu which is only present in
+the PythonShell window, and a Format menu which is only present in the Editor
+windows.
+
+"""
+import sys
+from configHandler import idleConf
+
+menudefs = [
+ # underscore prefixes character to underscore
+ ('file', [
+   ('_New Window', '<<open-new-window>>'),
+   ('_Open...', '<<open-window-from-file>>'),
+   ('Open _Module...', '<<open-module>>'),
+   ('Class _Browser', '<<open-class-browser>>'),
+   ('_Path Browser', '<<open-path-browser>>'),
+   None,
+   ('_Save', '<<save-window>>'),
+   ('Save _As...', '<<save-window-as-file>>'),
+   ('Save Co_py As...', '<<save-copy-of-window-as-file>>'),
+   None,
+   ('_Print Window', '<<print-window>>'),
+   None,
+   ('_Close', '<<close-window>>'),
+   ('E_xit', '<<close-all-windows>>'),
+  ]),
+ ('edit', [
+   ('_Undo', '<<undo>>'),
+   ('_Redo', '<<redo>>'),
+   None,
+   ('Cu_t', '<<cut>>'),
+   ('_Copy', '<<copy>>'),
+   ('_Paste', '<<paste>>'),
+   ('Select _All', '<<select-all>>'),
+   None,
+   ('_Find...', '<<find>>'),
+   ('Find A_gain', '<<find-again>>'),
+   ('Find _Selection', '<<find-selection>>'),
+   ('Find in Files...', '<<find-in-files>>'),
+   ('R_eplace...', '<<replace>>'),
+   ('Go to _Line', '<<goto-line>>'),
+  ]),
+('format', [
+   ('_Indent Region', '<<indent-region>>'),
+   ('_Dedent Region', '<<dedent-region>>'),
+   ('Comment _Out Region', '<<comment-region>>'),
+   ('U_ncomment Region', '<<uncomment-region>>'),
+   ('Tabify Region', '<<tabify-region>>'),
+   ('Untabify Region', '<<untabify-region>>'),
+   ('Toggle Tabs', '<<toggle-tabs>>'),
+   ('New Indent Width', '<<change-indentwidth>>'),
+   ]),
+ ('run', [
+   ('Python Shell', '<<open-python-shell>>'),
+   ]),
+ ('shell', [
+   ('_View Last Restart', '<<view-restart>>'),
+   ('_Restart Shell', '<<restart-shell>>'),
+   ]),
+ ('debug', [
+   ('_Go to File/Line', '<<goto-file-line>>'),
+   ('!_Debugger', '<<toggle-debugger>>'),
+   ('_Stack Viewer', '<<open-stack-viewer>>'),
+   ('!_Auto-open Stack Viewer', '<<toggle-jit-stack-viewer>>'),
+   ]),
+ ('options', [
+   ('_Configure IDLE...', '<<open-config-dialog>>'),
+   None,
+   ]),
+ ('help', [
+   ('_About IDLE', '<<about-idle>>'),
+   None,
+   ('_IDLE Help', '<<help>>'),
+   ('Python _Docs', '<<python-docs>>'),
+   ]),
+]
+
+default_keydefs = idleConf.GetCurrentKeySet()
+
+del sys
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/CREDITS.txt b/depot_tools/release/win/python_24/Lib/idlelib/CREDITS.txt
new file mode 100644
index 0000000..ef9c450
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/CREDITS.txt
@@ -0,0 +1,35 @@
+Guido van Rossum, as well as being the creator of the Python language, is the
+original creator of IDLE.  Other contributors prior to Version 0.8 include
+Mark Hammond, Jeremy Hylton, Tim Peters, and Moshe Zadka.
+
+IDLE's recent development has been carried out in the IDLEfork project.
+The objective was to develop a version of IDLE which had an execution
+environment which could be initialized prior to each run of user code.
+
+The IDLEfork project was initiated by David Scherer, with some help from Peter
+Schneider-Kamp and Nicholas Riley.  David wrote the first version of the RPC
+code and designed a fast turn-around environment for VPython.  Guido developed
+the RPC code and Remote Debugger currently integrated in IDLE.  Bruce Sherwood
+contributed considerable time testing and suggesting improvements.
+
+Besides David and Guido, the main developers who have been active on IDLEfork
+are Stephen M. Gava, who implemented the configuration GUI, the new
+configuration system, and the About dialog, and Kurt B. Kaiser, who completed
+the integration of the RPC and remote debugger, implemented the threaded
+subprocess, and made a number of usability enhancements.
+
+Other contributors include Raymond Hettinger, Tony Lownds (Mac integration),
+Neal Norwitz (code check and clean-up), and Chui Tey (RPC integration, debugger
+integration and persistent breakpoints).
+
+Scott David Daniels, Hernan Foffani, Christos Georgiou, Martin v. Löwis, 
+Jason Orendorff, Noam Raphael, Josh Robb, Nigel Rowe, and Bruce Sherwood have
+submitted useful patches.  Thanks, guys!
+
+For additional details refer to NEWS.txt and Changelog.
+
+Please contact the IDLE maintainer to have yourself included here if you
+are one of those we missed! 
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/CallTipWindow.py b/depot_tools/release/win/python_24/Lib/idlelib/CallTipWindow.py
new file mode 100644
index 0000000..990d96e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/CallTipWindow.py
@@ -0,0 +1,88 @@
+"""A CallTip window class for Tkinter/IDLE.
+
+After ToolTip.py, which uses ideas gleaned from PySol
+Used by the CallTips IDLE extension.
+
+"""
+from Tkinter import *
+
+class CallTip:
+
+    def __init__(self, widget):
+        self.widget = widget
+        self.tipwindow = None
+        self.id = None
+        self.x = self.y = 0
+
+    def showtip(self, text):
+        " Display text in calltip window"
+        # truncate overly long calltip
+        if len(text) >= 79:
+            text = text[:75] + ' ...'
+        self.text = text
+        if self.tipwindow or not self.text:
+            return
+        self.widget.see("insert")
+        x, y, cx, cy = self.widget.bbox("insert")
+        x = x + self.widget.winfo_rootx() + 2
+        y = y + cy + self.widget.winfo_rooty()
+        self.tipwindow = tw = Toplevel(self.widget)
+        # XXX 12 Dec 2002 KBK The following command has two effects: It removes
+        #     the calltip window border (good) but also causes (at least on
+        #     Linux) the calltip to show as a top level window, burning through
+        #     any other window dragged over it.  Also, shows on all viewports!
+        tw.wm_overrideredirect(1)
+        tw.wm_geometry("+%d+%d" % (x, y))
+        try:
+            # This command is only needed and available on Tk >= 8.4.0 for OSX
+            # Without it, call tips intrude on the typing process by grabbing
+            # the focus.
+            tw.tk.call("::tk::unsupported::MacWindowStyle", "style", tw._w,
+                       "help", "noActivates")
+        except TclError:
+            pass
+        label = Label(tw, text=self.text, justify=LEFT,
+                      background="#ffffe0", relief=SOLID, borderwidth=1,
+                      font = self.widget['font'])
+        label.pack()
+
+    def hidetip(self):
+        tw = self.tipwindow
+        self.tipwindow = None
+        if tw:
+            tw.destroy()
+
+
+###############################
+#
+# Test Code
+#
+class container: # Conceptually an editor_window
+    def __init__(self):
+        root = Tk()
+        text = self.text = Text(root)
+        text.pack(side=LEFT, fill=BOTH, expand=1)
+        text.insert("insert", "string.split")
+        root.update()
+        self.calltip = CallTip(text)
+
+        text.event_add("<<calltip-show>>", "(")
+        text.event_add("<<calltip-hide>>", ")")
+        text.bind("<<calltip-show>>", self.calltip_show)
+        text.bind("<<calltip-hide>>", self.calltip_hide)
+
+        text.focus_set()
+        root.mainloop()
+
+    def calltip_show(self, event):
+        self.calltip.showtip("Hello world")
+
+    def calltip_hide(self, event):
+        self.calltip.hidetip()
+
+def main():
+    # Test code
+    c=container()
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/CallTips.py b/depot_tools/release/win/python_24/Lib/idlelib/CallTips.py
new file mode 100644
index 0000000..97d9746
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/CallTips.py
@@ -0,0 +1,216 @@
+"""CallTips.py - An IDLE Extension to Jog Your Memory
+
+Call Tips are floating windows which display function, class, and method
+parameter and docstring information when you type an opening parenthesis, and
+which disappear when you type a closing parenthesis.
+
+Future plans include extending the functionality to include class attributes.
+
+"""
+import sys
+import string
+import types
+
+import CallTipWindow
+
+import __main__
+
+class CallTips:
+
+    menudefs = [
+    ]
+
+    def __init__(self, editwin=None):
+        if editwin is None:  # subprocess and test
+            self.editwin = None
+            return
+        self.editwin = editwin
+        self.text = editwin.text
+        self.calltip = None
+        self._make_calltip_window = self._make_tk_calltip_window
+
+    def close(self):
+        self._make_calltip_window = None
+
+    def _make_tk_calltip_window(self):
+        # See __init__ for usage
+        return CallTipWindow.CallTip(self.text)
+
+    def _remove_calltip_window(self):
+        if self.calltip:
+            self.calltip.hidetip()
+            self.calltip = None
+
+    def paren_open_event(self, event):
+        self._remove_calltip_window()
+        name = self.get_name_at_cursor()
+        arg_text = self.fetch_tip(name)
+        if arg_text:
+            self.calltip_start = self.text.index("insert")
+            self.calltip = self._make_calltip_window()
+            self.calltip.showtip(arg_text)
+        return "" #so the event is handled normally.
+
+    def paren_close_event(self, event):
+        # Now just hides, but later we should check if other
+        # paren'd expressions remain open.
+        self._remove_calltip_window()
+        return "" #so the event is handled normally.
+
+    def check_calltip_cancel_event(self, event):
+        if self.calltip:
+            # If we have moved before the start of the calltip,
+            # or off the calltip line, then cancel the tip.
+            # (Later need to be smarter about multi-line, etc)
+            if self.text.compare("insert", "<=", self.calltip_start) or \
+               self.text.compare("insert", ">", self.calltip_start
+                                 + " lineend"):
+                self._remove_calltip_window()
+        return "" #so the event is handled normally.
+
+    def calltip_cancel_event(self, event):
+        self._remove_calltip_window()
+        return "" #so the event is handled normally.
+
+    __IDCHARS = "._" + string.ascii_letters + string.digits
+
+    def get_name_at_cursor(self):
+        idchars = self.__IDCHARS
+        str = self.text.get("insert linestart", "insert")
+        i = len(str)
+        while i and str[i-1] in idchars:
+            i -= 1
+        return str[i:]
+
+    def fetch_tip(self, name):
+        """Return the argument list and docstring of a function or class
+
+        If there is a Python subprocess, get the calltip there.  Otherwise,
+        either fetch_tip() is running in the subprocess itself or it was called
+        in an IDLE EditorWindow before any script had been run.
+
+        The subprocess environment is that of the most recently run script.  If
+        two unrelated modules are being edited some calltips in the current
+        module may be inoperative if the module was not the last to run.
+
+        """
+        try:
+            rpcclt = self.editwin.flist.pyshell.interp.rpcclt
+        except:
+            rpcclt = None
+        if rpcclt:
+            return rpcclt.remotecall("exec", "get_the_calltip",
+                                     (name,), {})
+        else:
+            entity = self.get_entity(name)
+            return get_arg_text(entity)
+
+    def get_entity(self, name):
+        "Lookup name in a namespace spanning sys.modules and __main.dict__"
+        if name:
+            namespace = sys.modules.copy()
+            namespace.update(__main__.__dict__)
+            try:
+                return eval(name, namespace)
+            except:
+                return None
+
+def _find_constructor(class_ob):
+    # Given a class object, return a function object used for the
+    # constructor (ie, __init__() ) or None if we can't find one.
+    try:
+        return class_ob.__init__.im_func
+    except AttributeError:
+        for base in class_ob.__bases__:
+            rc = _find_constructor(base)
+            if rc is not None: return rc
+    return None
+
+def get_arg_text(ob):
+    "Get a string describing the arguments for the given object"
+    argText = ""
+    if ob is not None:
+        argOffset = 0
+        if type(ob)==types.ClassType:
+            # Look for the highest __init__ in the class chain.
+            fob = _find_constructor(ob)
+            if fob is None:
+                fob = lambda: None
+            else:
+                argOffset = 1
+        elif type(ob)==types.MethodType:
+            # bit of a hack for methods - turn it into a function
+            # but we drop the "self" param.
+            fob = ob.im_func
+            argOffset = 1
+        else:
+            fob = ob
+        # Try and build one for Python defined functions
+        if type(fob) in [types.FunctionType, types.LambdaType]:
+            try:
+                realArgs = fob.func_code.co_varnames[argOffset:fob.func_code.co_argcount]
+                defaults = fob.func_defaults or []
+                defaults = list(map(lambda name: "=%s" % name, defaults))
+                defaults = [""] * (len(realArgs)-len(defaults)) + defaults
+                items = map(lambda arg, dflt: arg+dflt, realArgs, defaults)
+                if fob.func_code.co_flags & 0x4:
+                    items.append("...")
+                if fob.func_code.co_flags & 0x8:
+                    items.append("***")
+                argText = ", ".join(items)
+                argText = "(%s)" % argText
+            except:
+                pass
+        # See if we can use the docstring
+        doc = getattr(ob, "__doc__", "")
+        if doc:
+            doc = doc.lstrip()
+            pos = doc.find("\n")
+            if pos < 0 or pos > 70:
+                pos = 70
+            if argText:
+                argText += "\n"
+            argText += doc[:pos]
+    return argText
+
+#################################################
+#
+# Test code
+#
+if __name__=='__main__':
+
+    def t1(): "()"
+    def t2(a, b=None): "(a, b=None)"
+    def t3(a, *args): "(a, ...)"
+    def t4(*args): "(...)"
+    def t5(a, *args): "(a, ...)"
+    def t6(a, b=None, *args, **kw): "(a, b=None, ..., ***)"
+
+    class TC:
+        "(a=None, ...)"
+        def __init__(self, a=None, *b): "(a=None, ...)"
+        def t1(self): "()"
+        def t2(self, a, b=None): "(a, b=None)"
+        def t3(self, a, *args): "(a, ...)"
+        def t4(self, *args): "(...)"
+        def t5(self, a, *args): "(a, ...)"
+        def t6(self, a, b=None, *args, **kw): "(a, b=None, ..., ***)"
+
+    def test(tests):
+        ct = CallTips()
+        failed=[]
+        for t in tests:
+            expected = t.__doc__ + "\n" + t.__doc__
+            name = t.__name__
+            arg_text = ct.fetch_tip(name)
+            if arg_text != expected:
+                failed.append(t)
+                print "%s - expected %s, but got %s" % (t, expected,
+                                                        get_arg_text(entity))
+        print "%d of %d tests failed" % (len(failed), len(tests))
+
+    tc = TC()
+    tests = (t1, t2, t3, t4, t5, t6,
+             TC, tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6)
+
+    test(tests)
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ChangeLog b/depot_tools/release/win/python_24/Lib/idlelib/ChangeLog
new file mode 100644
index 0000000..a409c608
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ChangeLog
@@ -0,0 +1,1591 @@
+Please refer to the IDLEfork and IDLE CVS repositories for
+change details subsequent to the 0.8.1 release.
+
+
+IDLEfork ChangeLog
+==================
+
+2001-07-20 11:35  elguavas
+
+	* README.txt, NEWS.txt: bring up to date for 0.8.1 release
+
+2001-07-19 16:40  elguavas
+
+	* IDLEFORK.html: replaced by IDLEFORK-index.html
+
+2001-07-19 16:39  elguavas
+
+	* IDLEFORK-index.html: updated placeholder idlefork homepage
+
+2001-07-19 14:49  elguavas
+
+	* ChangeLog, EditorWindow.py, INSTALLATION, NEWS.txt, README.txt,
+	TODO.txt, idlever.py: 
+	minor tidy-ups ready for 0.8.1 alpha tarball release
+
+2001-07-17 15:12  kbk
+
+	* INSTALLATION, setup.py: INSTALLATION: Remove the coexist.patch
+	instructions
+	
+	**************** setup.py:
+	
+	Remove the idles script, add some words on IDLE Fork to the
+	long_description, and clean up some line spacing.
+
+2001-07-17 15:01  kbk
+
+	* coexist.patch: Put this in the attic, at least for now...
+
+2001-07-17 14:59  kbk
+
+	* PyShell.py, idle, idles: Implement idle command interface as
+	suggested by GvR [idle-dev] 16 July **************** PyShell: Added
+	functionality:
+	
+	usage: idle.py [-c command] [-d] [-i] [-r script] [-s] [-t title]
+	[arg] ...
+	
+	idle file(s)	(without options) edit the file(s)
+	
+	-c cmd	   run the command in a shell -d	 enable the
+	debugger -i	    open an interactive shell -i file(s) open a
+	shell and also an editor window for each file -r script  run a file
+	as a script in a shell -s	  run $IDLESTARTUP or
+	$PYTHONSTARTUP before anything else -t title   set title of shell
+	window
+	
+	Remaining arguments are applied to the command (-c) or script (-r).
+	
+	****************** idles: Removed the idles script, not needed
+	
+	****************** idle:  Removed the IdleConf references, not
+	required anymore
+
+2001-07-16 17:08  kbk
+
+	* INSTALLATION, coexist.patch: Added installation instructions.
+	
+	Added a patch which modifies idlefork so that it can co-exist with
+	"official" IDLE in the site-packages directory. This patch is not
+	necessary if only idlefork IDLE is installed. See INSTALLATION for
+	further details.
+
+2001-07-16 15:50  kbk
+
+	* idles: Add a script "idles" which opens a Python Shell window.
+	
+	The default behaviour of idlefork idle is to open an editor window
+	instead of a shell. Complex expressions may be run in a fresh
+	environment by selecting "run".  There are times, however, when a
+	shell is desired.  Though one can be started by "idle -t 'foo'",
+	this script is more convenient.  In addition, a shell and an editor
+	window can be started in parallel by "idles -e foo.py".
+
+2001-07-16 15:25  kbk
+
+	* PyShell.py: Call out IDLE Fork in startup message.
+
+2001-07-16 14:00  kbk
+
+	* PyShell.py, setup.py: Add a script "idles" which opens a Python
+	Shell window.
+	
+	The default behaviour of idlefork idle is to open an editor window
+	instead of a shell. Complex expressions may be run in a fresh
+	environment by selecting "run".  There are times, however, when a
+	shell is desired.  Though one can be started by "idle -t 'foo'",
+	this script is more convenient.  In addition, a shell and an editor
+	window can be started in parallel by "idles -e foo.py".
+
+2001-07-15 03:06  kbk
+
+	* pyclbr.py, tabnanny.py: tabnanny and pyclbr are now found in /Lib
+
+2001-07-15 02:29  kbk
+
+	* BrowserControl.py: Remove, was retained for 1.5.2 support
+
+2001-07-14 15:48  kbk
+
+	* setup.py: Installing Idle to site-packages via Distutils does not
+	copy the Idle help.txt file.
+	
+	Ref SF Python Patch 422471
+
+2001-07-14 15:26  kbk
+
+	* keydefs.py: py-cvs-2001_07_13 (Rev 1.3) merge
+	
+	"Make copy, cut and paste events case insensitive.  Reported by
+	Patrick K. O'Brien on idle-dev. (Should other bindings follow
+	suit?)" --GvR
+
+2001-07-14 15:21  kbk
+
+	* idle.py: py-cvs-2001_07_13 (Rev 1.4) merge
+	
+	"Move the action of loading the configuration to the IdleConf
+	module rather than the idle.py script.	This has advantages and
+	disadvantages; the biggest advantage being that we can more easily
+	have an alternative main program."  --GvR
+
+2001-07-14 15:18  kbk
+
+	* extend.txt: py-cvs-2001_07_13 (Rev 1.4) merge
+	
+	"Quick update to the extension mechanism (extend.py is gone, long
+	live config.txt)" --GvR
+
+2001-07-14 15:15  kbk
+
+	* StackViewer.py: py-cvs-2001_07_13 (Rev 1.16) merge
+	
+	"Refactored, with some future plans in mind. This now uses the new
+	gotofileline() method defined in FileList.py"  --GvR
+
+2001-07-14 15:10  kbk
+
+	* PyShell.py: py-cvs-2001_07_13 (Rev 1.34) merge
+	
+	"Amazing.  A very subtle change in policy in descr-branch actually
+	found a bug here.  Here's the deal: Class PyShell derives from
+	class OutputWindow.  Method PyShell.close() wants to invoke its
+	parent method, but because PyShell long ago was inherited from
+	class PyShellEditorWindow, it invokes
+	PyShelEditorWindow.close(self).  Now, class PyShellEditorWindow
+	itself derives from class OutputWindow, and inherits the close()
+	method from there without overriding it.  Under the old rules,
+	PyShellEditorWindow.close would return an unbound method restricted
+	to the class that defined the implementation of close(), which was
+	OutputWindow.close.  Under the new rules, the unbound method is
+	restricted to the class whose method was requested, that is
+	PyShellEditorWindow, and this was correctly trapped as an error."
+	--GvR
+
+2001-07-14 14:59  kbk
+
+	* PyParse.py: py-cvs-2001_07_13 (Rel 1.9) merge
+	
+	"Taught IDLE's autoident parser that "yield" is a keyword that
+	begins a stmt.	Along w/ the preceding change to keyword.py, making
+	all this work w/ a future-stmt just looks harder and harder."
+	--tim_one
+	
+	(From Rel 1.8: "Hack to make this still work with Python 1.5.2. 
+	;-( " --fdrake)
+
+2001-07-14 14:51  kbk
+
+	* IdleConf.py: py-cvs-2001_07_13 (Rel 1.7) merge
+	
+	"Move the action of loading the configuration to the IdleConf
+	module rather than the idle.py script.	This has advantages and
+	disadvantages; the biggest advantage being that we can more easily
+	have an alternative main program." --GvR
+
+2001-07-14 14:45  kbk
+
+	* FileList.py: py-cvs-2000_07_13 (Rev 1.9) merge
+	
+	"Delete goodname() method, which is unused. Add gotofileline(), a
+	convenience method which I intend to use in a variant. Rename
+	test() to _test()."  --GvR
+	
+	This was an interesting merge. The join completely missed removing
+	goodname(), which was adjacent, but outside of, a small conflict. 
+	I only caught it by comparing the 1.1.3.2/1.1.3.3 diff.  CVS ain't
+	infallible.
+
+2001-07-14 13:58  kbk
+
+	* EditorWindow.py: py-cvs-2000_07_13 (Rev 1.38) merge "Remove
+	legacy support for the BrowserControl module; the webbrowser module
+	has been included since Python 2.0, and that is the preferred
+	interface." --fdrake
+
+2001-07-14 13:32  kbk
+
+	* EditorWindow.py, FileList.py, IdleConf.py, PyParse.py,
+	PyShell.py, StackViewer.py, extend.txt, idle.py, keydefs.py: Import
+	the 2001 July 13 23:59 GMT version of Python CVS IDLE on the
+	existing 1.1.3 vendor branch named py-cvs-vendor-branch. Release
+	tag is py-cvs-2001_07_13.
+
+2001-07-14 12:02  kbk
+
+	* Icons/python.gif: py-cvs-rel2_1 (Rev 1.2) merge Copied py-cvs rev
+	1.2 changed file to idlefork MAIN
+
+2001-07-14 11:58  kbk
+
+	* Icons/minusnode.gif: py-cvs-rel2_1 (Rev 1.2) merge Copied py-cvs
+	1.2 changed file to idlefork MAIN
+
+2001-07-14 11:23  kbk
+
+	* ScrolledList.py: py-cvs-rel2_1 (rev 1.5) merge - whitespace
+	normalization
+
+2001-07-14 11:20  kbk
+
+	* Separator.py: py-cvs-rel2_1 (Rev 1.3) merge - whitespace
+	normalization
+
+2001-07-14 11:16  kbk
+
+	* StackViewer.py: py-cvs-rel2_1 (Rev 1.15) merge - whitespace
+	normalization
+
+2001-07-14 11:14  kbk
+
+	* ToolTip.py: py-cvs-rel2_1 (Rev 1.2) merge - whitespace
+	normalization
+
+2001-07-14 10:13  kbk
+
+	* PyShell.py: cvs-py-rel2_1 (Rev 1.29 - 1.33) merge
+	
+	Merged the following py-cvs revs without conflict: 1.29 Reduce
+	copyright text output at startup 1.30 Delay setting sys.args until
+	Tkinter is fully initialized 1.31 Whitespace normalization 1.32
+	Turn syntax warning into error when interactive 1.33 Fix warning
+	initialization bug
+	
+	Note that module is extensively modified wrt py-cvs
+
+2001-07-14 06:33  kbk
+
+	* PyParse.py: py-cvs-rel2_1 (Rev 1.6 - 1.8) merge Fix autoindent
+	bug and deflect Unicode from text.get()
+
+2001-07-14 06:00  kbk
+
+	* Percolator.py: py-cvs-rel2_1 (Rev 1.3) "move "from Tkinter import
+	*" to module level" --jhylton
+
+2001-07-14 05:57  kbk
+
+	* PathBrowser.py: py-cvs-rel2_1 (Rev 1.6) merge - whitespace
+	normalization
+
+2001-07-14 05:49  kbk
+
+	* ParenMatch.py: cvs-py-rel2_1 (Rev 1.5) merge - whitespace
+	normalization
+
+2001-07-14 03:57  kbk
+
+	* ObjectBrowser.py: py-cvs-rel2_1 (Rev 1.3) merge "Make the test
+	program work outside IDLE."  -- GvR
+
+2001-07-14 03:52  kbk
+
+	* MultiStatusBar.py: py-cvs-rel2_1 (Rev 1.2) merge - whitespace
+	normalization
+
+2001-07-14 03:44  kbk
+
+	* MultiScrolledLists.py: py-cvs-rel2_1 (Rev 1.2) merge - whitespace
+	normalization
+
+2001-07-14 03:40  kbk
+
+	* IdleHistory.py: py-cvs-rel2_1 (Rev 1.4) merge - whitespace
+	normalization
+
+2001-07-14 03:38  kbk
+
+	* IdleConf.py: py-cvs-rel2_1 (Rev 1.6) merge - whitespace
+	normalization
+
+2001-07-13 14:18  kbk
+
+	* IOBinding.py: py-cvs-rel2_1 (Rev 1.4) merge - move "import *" to
+	module level
+
+2001-07-13 14:12  kbk
+
+	* FormatParagraph.py: py-cvs-rel2_1 (Rev 1.9) merge - whitespace
+	normalization
+
+2001-07-13 14:07  kbk
+
+	* FileList.py: py-cvs-rel2_1 (Rev 1.8) merge - whitespace
+	normalization
+
+2001-07-13 13:35  kbk
+
+	* EditorWindow.py: py-cvs-rel2_1 (Rev 1.33 - 1.37) merge
+	
+	VP IDLE version depended on VP's ExecBinding.py and spawn.py to get
+	the path to the Windows Doc directory (relative to python.exe).
+	Removed this conflicting code in favor of py-cvs updates which on
+	Windows use a hard coded path relative to the location of this
+	module. py-cvs updates include support for webbrowser.py.  Module
+	still has BrowserControl.py for 1.5.2 support.
+	
+	At this point, the differences wrt py-cvs relate to menu
+	functionality.
+
+2001-07-13 11:30  kbk
+
+	* ConfigParser.py: py-cvs-rel2_1 merge - Remove, lives in /Lib
+
+2001-07-13 10:10  kbk
+
+	* Delegator.py: py-cvs-rel2_1 (Rev 1.3) merge - whitespace
+	normalization
+
+2001-07-13 10:07  kbk
+
+	* Debugger.py: py-cvs-rel2_1 (Rev 1.15) merge - whitespace
+	normalization
+
+2001-07-13 10:04  kbk
+
+	* ColorDelegator.py: py-cvs-rel2_1 (Rev 1.11 and 1.12) merge
+	Colorize "as" after "import" / use DEBUG instead of __debug__
+
+2001-07-13 09:54  kbk
+
+	* ClassBrowser.py: py-cvs-rel2_1 (Rev 1.12) merge - whitespace
+	normalization
+
+2001-07-13 09:41  kbk
+
+	* BrowserControl.py: py-cvs-rel2_1 (Rev 1.1) merge - New File -
+	Force HEAD to trunk with -f Note: browser.py was renamed
+	BrowserControl.py 10 May 2000. It provides a collection of classes
+	and convenience functions to control external browsers "for 1.5.2
+	support". It was removed from py-cvs 18 April 2001.
+
+2001-07-13 09:10  kbk
+
+	* CallTips.py: py-cvs-rel2_1 (Rev 1.8) merge - whitespace
+	normalization
+
+2001-07-13 08:26  kbk
+
+	* CallTipWindow.py: py-cvs-rel2_1 (Rev 1.3) merge - whitespace
+	normalization
+
+2001-07-13 08:13  kbk
+
+	* AutoExpand.py: py-cvs-rel1_2 (Rev 1.4) merge, "Add Alt-slash to
+	Unix keydefs (I somehow need it on RH 6.2).  Get rid of assignment
+	to unused self.text.wordlist."	--GvR
+
+2001-07-12 16:54  elguavas
+
+	* ReplaceDialog.py: py-cvs merge, python 1.5.2 compatability
+
+2001-07-12 16:46  elguavas
+
+	* ScriptBinding.py: py-cvs merge, better error dialog
+
+2001-07-12 16:38  elguavas
+
+	* TODO.txt: py-cvs merge, additions
+
+2001-07-12 15:35  elguavas
+
+	* WindowList.py: py-cvs merge, correct indentation
+
+2001-07-12 15:24  elguavas
+
+	* config.txt: py-cvs merge, correct typo
+
+2001-07-12 15:21  elguavas
+
+	* help.txt: py-cvs merge, update colour changing info
+
+2001-07-12 14:51  elguavas
+
+	* idle.py: py-cvs merge, idle_dir loading changed
+
+2001-07-12 14:44  elguavas
+
+	* idlever.py: py-cvs merge, version update
+
+2001-07-11 12:53  kbk
+
+	* BrowserControl.py: Initial revision
+
+2001-07-11 12:53  kbk
+
+	* AutoExpand.py, BrowserControl.py, CallTipWindow.py, CallTips.py,
+	ClassBrowser.py, ColorDelegator.py, Debugger.py, Delegator.py,
+	EditorWindow.py, FileList.py, FormatParagraph.py, IOBinding.py,
+	IdleConf.py, IdleHistory.py, MultiScrolledLists.py,
+	MultiStatusBar.py, ObjectBrowser.py, OutputWindow.py,
+	ParenMatch.py, PathBrowser.py, Percolator.py, PyParse.py,
+	PyShell.py, RemoteInterp.py, ReplaceDialog.py, ScriptBinding.py,
+	ScrolledList.py, Separator.py, StackViewer.py, TODO.txt,
+	ToolTip.py, WindowList.py, config.txt, help.txt, idle, idle.bat,
+	idle.py, idlever.py, setup.py, Icons/minusnode.gif,
+	Icons/python.gif: Import the release 2.1 version of Python CVS IDLE
+	on the existing 1.1.3 vendor branch named py-cvs-vendor-branch,
+	with release tag py-cvs-rel2_1.
+
+2001-07-11 12:34  kbk
+
+	* AutoExpand.py, AutoIndent.py, Bindings.py, CallTipWindow.py,
+	CallTips.py, ChangeLog, ClassBrowser.py, ColorDelegator.py,
+	Debugger.py, Delegator.py, EditorWindow.py, FileList.py,
+	FormatParagraph.py, FrameViewer.py, GrepDialog.py, IOBinding.py,
+	IdleConf.py, IdleHistory.py, MultiScrolledLists.py,
+	MultiStatusBar.py, NEWS.txt, ObjectBrowser.py, OldStackViewer.py,
+	OutputWindow.py, ParenMatch.py, PathBrowser.py, Percolator.py,
+	PyParse.py, PyShell.py, README.txt, RemoteInterp.py,
+	ReplaceDialog.py, ScriptBinding.py, ScrolledList.py,
+	SearchBinding.py, SearchDialog.py, SearchDialogBase.py,
+	SearchEngine.py, Separator.py, StackViewer.py, TODO.txt,
+	ToolTip.py, TreeWidget.py, UndoDelegator.py, WidgetRedirector.py,
+	WindowList.py, ZoomHeight.py, __init__.py, config-unix.txt,
+	config-win.txt, config.txt, eventparse.py, extend.txt, help.txt,
+	idle.bat, idle.py, idle.pyw, idlever.py, keydefs.py, pyclbr.py,
+	tabnanny.py, testcode.py, Icons/folder.gif, Icons/minusnode.gif,
+	Icons/openfolder.gif, Icons/plusnode.gif, Icons/python.gif,
+	Icons/tk.gif: Import the 9 March 2000 version of Python CVS IDLE as
+	1.1.3 vendor branch named py-cvs-vendor-branch.
+
+2001-07-04 13:43  kbk
+
+	* Icons/: folder.gif, minusnode.gif, openfolder.gif, plusnode.gif,
+	python.gif, tk.gif: Null commit with -f option to force an uprev
+	and put HEADs firmly on the trunk.
+
+2001-07-04 13:15  kbk
+
+	* AutoExpand.py, AutoIndent.py, Bindings.py, CallTipWindow.py,
+	CallTips.py, ChangeLog, ClassBrowser.py, ColorDelegator.py,
+	ConfigParser.py, Debugger.py, Delegator.py, EditorWindow.py,
+	ExecBinding.py, FileList.py, FormatParagraph.py, FrameViewer.py,
+	GrepDialog.py, IDLEFORK.html, IOBinding.py, IdleConf.py,
+	IdleHistory.py, MultiScrolledLists.py, MultiStatusBar.py, NEWS.txt,
+	ObjectBrowser.py, OldStackViewer.py, OutputWindow.py,
+	ParenMatch.py, PathBrowser.py, Percolator.py, PyParse.py,
+	PyShell.py, README.txt, Remote.py, RemoteInterp.py,
+	ReplaceDialog.py, ScriptBinding.py, ScrolledList.py,
+	SearchBinding.py, SearchDialog.py, SearchDialogBase.py,
+	SearchEngine.py, Separator.py, StackViewer.py, TODO.txt,
+	ToolTip.py, TreeWidget.py, UndoDelegator.py, WidgetRedirector.py,
+	WindowList.py, ZoomHeight.py, __init__.py, config-unix.txt,
+	config-win.txt, config.txt, eventparse.py, extend.txt, help.txt,
+	idle, idle.bat, idle.py, idle.pyw, idlever.py, keydefs.py,
+	loader.py, protocol.py, pyclbr.py, setup.py, spawn.py, tabnanny.py,
+	testcode.py: Null commit with -f option to force an uprev and put
+	HEADs firmly on the trunk.
+
+2001-06-27 10:24  elguavas
+
+	* IDLEFORK.html: updated contact details
+
+2001-06-25 17:23  elguavas
+
+	* idle, RemoteInterp.py, setup.py: Initial revision
+
+2001-06-25 17:23  elguavas
+
+	* idle, RemoteInterp.py, setup.py: import current python cvs idle
+	as a vendor branch
+
+2001-06-24 15:10  elguavas
+
+	* IDLEFORK.html: tiny change to test new syncmail setup
+
+2001-06-24 14:41  elguavas
+
+	* IDLEFORK.html: change to new developer contact, also a test
+	commit for new syncmail setup
+
+2001-06-23 18:15  elguavas
+
+	* IDLEFORK.html: tiny test update for revitalised idle-fork
+
+2000-09-24 17:29  nriley
+
+	* protocol.py: Fixes for Python 1.6 compatibility - socket bind and
+	connect get a tuple instead two arguments.
+
+2000-09-24 17:28  nriley
+
+	* spawn.py: Change for Python 1.6 compatibility - UNIX's 'os'
+	module defines 'spawnv' now, so we check for 'fork' first.
+
+2000-08-15 22:51  nowonder
+
+	* IDLEFORK.html: 
+	corrected email address
+
+2000-08-15 22:47  nowonder
+
+	* IDLEFORK.html: 
+	added .html file for http://idlefork.sourceforge.net
+
+2000-08-15 11:13  dscherer
+
+	* AutoExpand.py, AutoIndent.py, Bindings.py, CallTipWindow.py,
+	CallTips.py, __init__.py, ChangeLog, ClassBrowser.py,
+	ColorDelegator.py, ConfigParser.py, Debugger.py, Delegator.py,
+	FileList.py, FormatParagraph.py, FrameViewer.py, GrepDialog.py,
+	IOBinding.py, IdleConf.py, IdleHistory.py, MultiScrolledLists.py,
+	MultiStatusBar.py, NEWS.txt, ObjectBrowser.py, OldStackViewer.py,
+	OutputWindow.py, ParenMatch.py, PathBrowser.py, Percolator.py,
+	PyParse.py, PyShell.py, README.txt, ReplaceDialog.py,
+	ScriptBinding.py, ScrolledList.py, SearchBinding.py,
+	SearchDialog.py, SearchDialogBase.py, SearchEngine.py,
+	Separator.py, StackViewer.py, TODO.txt, ToolTip.py, TreeWidget.py,
+	UndoDelegator.py, WidgetRedirector.py, WindowList.py, help.txt,
+	ZoomHeight.py, config-unix.txt, config-win.txt, config.txt,
+	eventparse.py, extend.txt, idle.bat, idle.py, idle.pyw, idlever.py,
+	keydefs.py, loader.py, pyclbr.py, tabnanny.py, testcode.py,
+	EditorWindow.py, ExecBinding.py, Remote.py, protocol.py, spawn.py,
+	Icons/folder.gif, Icons/minusnode.gif, Icons/openfolder.gif,
+	Icons/plusnode.gif, Icons/python.gif, Icons/tk.gif: Initial
+	revision
+
+2000-08-15 11:13  dscherer
+
+	* AutoExpand.py, AutoIndent.py, Bindings.py, CallTipWindow.py,
+	CallTips.py, __init__.py, ChangeLog, ClassBrowser.py,
+	ColorDelegator.py, ConfigParser.py, Debugger.py, Delegator.py,
+	FileList.py, FormatParagraph.py, FrameViewer.py, GrepDialog.py,
+	IOBinding.py, IdleConf.py, IdleHistory.py, MultiScrolledLists.py,
+	MultiStatusBar.py, NEWS.txt, ObjectBrowser.py, OldStackViewer.py,
+	OutputWindow.py, ParenMatch.py, PathBrowser.py, Percolator.py,
+	PyParse.py, PyShell.py, README.txt, ReplaceDialog.py,
+	ScriptBinding.py, ScrolledList.py, SearchBinding.py,
+	SearchDialog.py, SearchDialogBase.py, SearchEngine.py,
+	Separator.py, StackViewer.py, TODO.txt, ToolTip.py, TreeWidget.py,
+	UndoDelegator.py, WidgetRedirector.py, WindowList.py, help.txt,
+	ZoomHeight.py, config-unix.txt, config-win.txt, config.txt,
+	eventparse.py, extend.txt, idle.bat, idle.py, idle.pyw, idlever.py,
+	keydefs.py, loader.py, pyclbr.py, tabnanny.py, testcode.py,
+	EditorWindow.py, ExecBinding.py, Remote.py, protocol.py, spawn.py,
+	Icons/folder.gif, Icons/minusnode.gif, Icons/openfolder.gif,
+	Icons/plusnode.gif, Icons/python.gif, Icons/tk.gif: Modified IDLE
+	from VPython 0.2
+
+
+original IDLE ChangeLog:
+========================
+
+Tue Feb 15 18:08:19 2000  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* NEWS.txt: Notice status bar and stack viewer.
+
+	* EditorWindow.py: Support for Moshe's status bar.
+
+	* MultiStatusBar.py: Status bar code -- by Moshe Zadka.
+
+	* OldStackViewer.py:
+	Adding the old stack viewer implementation back, for the debugger.
+
+	* StackViewer.py: New stack viewer, uses a tree widget.
+	(XXX: the debugger doesn't yet use this.)
+
+	* WindowList.py:
+	Correct a typo and remove an unqualified except that was hiding the error.
+
+	* ClassBrowser.py: Add an XXX comment about the ClassBrowser AIP.
+
+	* ChangeLog: Updated change log.
+
+	* NEWS.txt: News update.  Probably incomplete; what else is new?
+
+	* README.txt:
+	Updated for pending IDLE 0.5 release (still very rough -- just getting
+	it out in a more convenient format than CVS).
+
+	* TODO.txt: Tiny addition.
+
+Thu Sep  9 14:16:02 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* TODO.txt: A few new TODO entries.
+
+Thu Aug 26 23:06:22 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* Bindings.py: Add Python Documentation entry to Help menu.
+
+	* EditorWindow.py:
+	Find the help.txt file relative to __file__ or ".", not in sys.path.
+	(Suggested by Moshe Zadka, but implemented differently.)
+
+	Add <<python-docs>> event which, on Unix, brings up Netscape pointing
+	to http://www.python.doc/current/ (a local copy would be nice but its
+	location can't be predicted).  Windows solution TBD.
+
+Wed Aug 11 14:55:43 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* TreeWidget.py:
+	Moshe noticed an inconsistency in his comment, so I'm rephrasing it to
+	be clearer.
+
+	* TreeWidget.py:
+	Patch inspired by Moshe Zadka to search for the Icons directory in the
+	same directory as __file__, rather than searching for it along sys.path.
+	This works better when idle is a package.
+
+Thu Jul 15 13:11:02 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* TODO.txt: New wishes.
+
+Sat Jul 10 13:17:35 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* IdlePrefs.py:
+	Make the color for stderr red (i.e. the standard warning/danger/stop
+	color) rather than green.  Suggested by Sam Schulenburg.
+
+Fri Jun 25 17:26:34 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* PyShell.py: Close debugger when closing.  This may break a cycle.
+
+	* Debugger.py: Break cycle on close.
+
+	* ClassBrowser.py: Destroy the tree when closing.
+
+	* TreeWidget.py: Add destroy() method to recursively destroy a tree.
+
+	* PyShell.py: Extend _close() to break cycles.
+	Break some other cycles too (and destroy the root when done).
+
+	* EditorWindow.py:
+	Add _close() method that does the actual cleanup (close() asks the
+	user what they want first if there's unsaved stuff, and may cancel).
+	It closes more than before.
+
+	Add unload_extensions() method to unload all extensions; called from
+	_close().  It calls an extension's close() method if it has one.
+
+	* Percolator.py: Add close() method that breaks cycles.
+
+	* WidgetRedirector.py: Add unregister() method.
+	Unregister everything at closing.
+	Don't call close() in __del__, rely on explicit call to close().
+
+	* IOBinding.py, FormatParagraph.py, CallTips.py:
+	Add close() method that breaks a cycle.
+
+Fri Jun 11 15:03:00 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* AutoIndent.py, EditorWindow.py, FormatParagraph.py:
+	Tim Peters smart.patch:
+
+	EditorWindow.py:
+
+	+ Added get_tabwidth & set_tabwidth "virtual text" methods, that get/set the
+	widget's view of what a tab means.
+
+	+ Moved TK_TABWIDTH_DEFAULT here from AutoIndent.
+
+	+ Renamed Mark's get_selection_index to get_selection_indices (sorry, Mark,
+	but the name was plain wrong <wink>).
+
+	FormatParagraph.py:  renamed use of get_selection_index.
+
+	AutoIndent.py:
+
+	+ Moved TK_TABWIDTH_DEFAULT to EditorWindow.
+
+	+ Rewrote set_indentation_params to use new VTW get/set_tabwidth methods.
+
+	+ Changed smart_backspace_event to delete whitespace back to closest
+	preceding virtual tab stop or real character (note that this may require
+	inserting characters if backspacing over a tab!).
+
+	+ Nuked almost references to the selection tag, in favor of using
+	get_selection_indices.  The sole exception is in set_region, for which no
+	"set_selection" abstraction has yet been agreed upon.
+
+	+ Had too much fun using the spiffy new features of the format-paragraph
+	cmd.
+
+Thu Jun 10 17:48:02 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* FormatParagraph.py:
+	Code by Mark Hammond to format paragraphs embedded in comments.
+	Read the comments (which I reformatted using the new feature :-)
+	for some limitations.
+
+	* EditorWindow.py:
+	Added abstraction get_selection_index() (Mark Hammond).  Also
+	reformatted some comment blocks to show off a cool feature I'm about
+	to check in next.
+
+	* ClassBrowser.py:
+	Adapt to the new pyclbr's support of listing top-level functions.  If
+	this functionality is not present (e.g. when used with a vintage
+	Python 1.5.2 installation) top-level functions are not listed.
+
+	(Hmm...  Any distribution of IDLE 0.5 should probably include a copy
+	of the new pyclbr.py!)
+
+	* AutoIndent.py:
+	Fix off-by-one error in Tim's recent change to comment_region(): the
+	list of lines returned by get_region() contains an empty line at the
+	end representing the start of the next line, and this shouldn't be
+	commented out!
+
+	* CallTips.py:
+	Mark Hammond writes: Here is another change that allows it to work for
+	class creation - tries to locate an __init__ function.  Also updated
+	the test code to reflect your new "***" change.
+
+	* CallTipWindow.py:
+	Mark Hammond writes: Tim's suggestion of copying the font for the
+	CallTipWindow from the text control makes sense, and actually makes
+	the control look better IMO.
+
+Wed Jun  9 20:34:57 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* CallTips.py:
+	Append "..." if the appropriate flag (for varargs) in co_flags is set.
+	Ditto "***" for kwargs.
+
+Tue Jun  8 13:06:07 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* ReplaceDialog.py:
+	Hmm...  Tim didn't turn "replace all" into a single undo block.
+	I think I like it better if it os, so here.
+
+	* ReplaceDialog.py: Tim Peters: made replacement atomic for undo/redo.
+
+	* AutoIndent.py: Tim Peters:
+
+	+ Set usetabs=1.  Editing pyclbr.py was driving me nuts <0.6 wink>.
+	usetabs=1 is the Emacs pymode default too, and thanks to indentwidth !=
+	tabwidth magical usetabs disabling, new files are still created with tabs
+	turned off.  The only implication is that if you open a file whose first
+	indent is a single tab, IDLE will now magically use tabs for that file (and
+	set indentwidth to 8).  Note that the whole scheme doesn't work right for
+	PythonWin, though, since Windows users typically set tabwidth to 4; Mark
+	probably has to hide the IDLE algorithm from them (which he already knows).
+
+	+ Changed comment_region_event to stick "##" in front of every line.  The
+	"holes" previously left on blank lines were visually confusing (made it
+	needlessly hard to figure out what to uncomment later).
+
+Mon Jun  7 15:38:40 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* TreeWidget.py, ObjectBrowser.py:
+	Remove unnecessary reference to pyclbr from test() code.
+
+	* PyParse.py: Tim Peters:
+
+	Smarter logic for finding a parse synch point.
+
+	Does a half to a fifth the work in normal cases; don't notice the speedup,
+	but makes  more breathing room for other extensions.
+
+	Speeds terrible cases by at least a factor of 10. "Terrible" == e.g. you put
+	""" at the start of Tkinter.py, undo it, zoom to the bottom, and start
+	typing in code.  Used to take about 8 seconds for ENTER to respond, now some
+	large fraction of a second.  The new code gets indented correctly, despite
+	that it all remains "string colored" until the colorizer catches up (after
+	which, ENTER appears instantaneous again).
+
+Fri Jun  4 19:21:19 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* extend.py: Might as well enable CallTips by default.
+	If there are too many complaints I'll remove it again or fix it.
+
+Thu Jun  3 14:32:16 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* AutoIndent.py, EditorWindow.py, PyParse.py:
+	New offerings by Tim Peters; he writes:
+
+	IDLE is now the first Python editor in the Universe not confused by my
+	doctest.py <wink>.
+
+	As threatened, this defines IDLE's is_char_in_string function as a
+	method of EditorWindow.  You just need to define one similarly in
+	whatever it is you pass as editwin to AutoIndent; looking at the
+	EditorWindow.py part of the patch should make this clear.
+
+	* GrepDialog.py: Enclose pattern in quotes in status message.
+
+	* CallTips.py:
+	Mark Hammond fixed some comments and improved the way the tip text is
+	constructed.
+
+Wed Jun  2 18:18:57 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* CallTips.py:
+	My fix to Mark's code: restore the universal check on <KeyRelease>.
+	Always cancel on <Key-Escape> or <ButtonPress>.
+
+	* CallTips.py:
+	A version that Mark Hammond posted to the newsgroup.  Has some newer
+	stuff for getting the tip.  Had to fix the Key-( and Key-) events
+	for Unix.  Will have to re-apply my patch for catching KeyRelease and
+	ButtonRelease events.
+
+	* CallTipWindow.py, CallTips.py:
+	Call tips by Mark Hammond (plus tiny fix by me.)
+
+	* IdleHistory.py:
+	Changes by Mark Hammond: (1) support optional output_sep argument to
+	the constructor so he can eliminate the sys.ps2 that PythonWin leaves
+	in the source; (2) remove duplicate history items.
+
+	* AutoIndent.py:
+	Changes by Mark Hammond to allow using IDLE extensions in PythonWin as
+	well: make three dialog routines instance variables.
+
+	* EditorWindow.py:
+	Change by Mark Hammond to allow using IDLE extensions in PythonWin as
+	well: make three dialog routines instance variables.
+
+Tue Jun  1 20:06:44 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* AutoIndent.py: Hah!  A fix of my own to Tim's code!
+	Unix bindings for <<toggle-tabs>> and <<change-indentwidth>> were
+	missing, and somehow that meant the events were never generated,
+	even though they were in the menu.  The new Unix bindings are now
+	the same as the Windows bindings (M-t and M-u).
+
+	* AutoIndent.py, PyParse.py, PyShell.py: Tim Peters again:
+
+	The new version (attached) is fast enough all the time in every real module
+	I have <whew!>.  You can make it slow by, e.g., creating an open list with
+	5,000 90-character identifiers (+ trailing comma) each on its own line, then
+	adding an item to the end -- but that still consumes less than a second on
+	my P5-166.  Response time in real code appears instantaneous.
+
+	Fixed some bugs.
+
+	New feature:  when hitting ENTER and the cursor is beyond the line's leading
+	indentation, whitespace is removed on both sides of the cursor; before
+	whitespace was removed only on the left; e.g., assuming the cursor is
+	between the comma and the space:
+
+	def something(arg1, arg2):
+	                   ^ cursor to the left of here, and hit ENTER
+	               arg2):   # new line used to end up here
+	              arg2):    # but now lines up the way you expect
+
+	New hack:  AutoIndent has grown a context_use_ps1 Boolean config option,
+	defaulting to 0 (false) and set to 1 (only) by PyShell.  Reason:  handling
+	the fancy stuff requires looking backward for a parsing synch point; ps1
+	lines are the only sensible thing to look for in a shell window, but are a
+	bad thing to look for in a file window (ps1 lines show up in my module
+	docstrings often).  PythonWin's shell should set this true too.
+
+	Persistent problem:  strings containing def/class can still screw things up
+	completely.  No improvement.  Simplest workaround is on the user's head, and
+	consists of inserting e.g.
+
+	def _(): pass
+
+	(or any other def/class) after the end of the multiline string that's
+	screwing them up.  This is especially irksome because IDLE's syntax coloring
+	is *not* confused, so when this happens the colors don't match the
+	indentation behavior they see.
+
+	* AutoIndent.py: Tim Peters again:
+
+	[Tim, after adding some bracket smarts to AutoIndent.py]
+	> ...
+	> What it can't possibly do without reparsing large gobs of text is
+	> suggest a reasonable indent level after you've *closed* a bracket
+	> left open on some previous line.
+	> ...
+
+	The attached can, and actually fast enough to use -- most of the time.  The
+	code is tricky beyond belief to achieve that, but it works so far; e.g.,
+
+	        return len(string.expandtabs(str[self.stmt_start :
+	                                         ^ indents to caret
+	                                         i],
+	                                     ^ indents to caret
+	                                     self.tabwidth)) + 1
+	    ^ indents to caret
+
+	It's about as smart as pymode now, wrt both bracket and backslash
+	continuation rules.  It does require reparsing large gobs of text, and if it
+	happens to find something that looks like a "def" or "class" or sys.ps1
+	buried in a multiline string, but didn't suck up enough preceding text to
+	see the start of the string, it's completely hosed.  I can't repair that --
+	it's just too slow to reparse from the start of the file all the time.
+
+	AutoIndent has grown a new num_context_lines tuple attribute that controls
+	how far to look back, and-- like other params --this could/should be made
+	user-overridable at startup and per-file on the fly.
+
+	* PyParse.py: New file by Tim Peters:
+
+	One new file in the attached, PyParse.py.  The LineStudier (whatever it was
+	called <wink>) class was removed from AutoIndent; PyParse subsumes its
+	functionality.
+
+	* AutoIndent.py: Tim Peters keeps revising this module (more to come):
+
+	Removed "New tabwidth" menu binding.
+
+	Added "a tab means how many spaces?" dialog to block tabify and untabify.  I
+	think prompting for this is good now:  they're usually at-most-once-per-file
+	commands, and IDLE can't let them change tabwidth from the Tk default
+	anymore, so IDLE can no longer presume to have any idea what a tab means.
+
+	Irony:  for the purpose of keeping comments aligned via tabs, Tk's
+	non-default approach is much nicer than the Emacs/Notepad/Codewright/vi/etc
+	approach.
+
+	* EditorWindow.py:
+	1. Catch NameError on import (could be raised by case mismatch on Windows).
+	2. No longer need to reset pyclbr cache and show watch cursor when calling
+	   ClassBrowser -- the ClassBrowser takes care of pyclbr and the TreeWidget
+	   takes care of the watch cursor.
+	3. Reset the focus to the current window after error message about class
+	   browser on buffer without filename.
+
+	* Icons/minusnode.gif, Icons/plusnode.gif: Missed a few.
+
+	* ClassBrowser.py, PathBrowser.py: Rewritten based on TreeWidget.py
+
+	* ObjectBrowser.py: Object browser, based on TreeWidget.py.
+
+	* TreeWidget.py: Tree widget done right.
+
+	* ToolTip.py: As yet unused code for tool tips.
+
+	* ScriptBinding.py:
+	Ensure sys.argv[0] is the script name on Run Script.
+
+	* ZoomHeight.py: Move zoom height functionality to separate function.
+
+	* Icons/folder.gif, Icons/openfolder.gif, Icons/python.gif, Icons/tk.gif:
+	A few icons used by ../TreeWidget.py and its callers.
+
+	* AutoIndent.py: New version by Tim Peters improves block opening test.
+
+Fri May 21 04:46:17 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* Attic/History.py, PyShell.py: Rename History to IdleHistory.
+	Add isatty() to pseudo files.
+
+	* StackViewer.py: Make initial stack viewer wider
+
+	* TODO.txt: New wishes
+
+	* AutoIndent.py, EditorWindow.py, PyShell.py:
+	Much improved autoindent and handling of tabs,
+	by Tim Peters.
+
+Mon May  3 15:49:52 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* AutoIndent.py, EditorWindow.py, FormatParagraph.py, UndoDelegator.py:
+	Tim Peters writes:
+
+	I'm still unsure, but couldn't stand the virtual event trickery so tried a
+	different sin (adding undo_block_start/stop methods to the Text instance in
+	EditorWindow.py).  Like it or not, it's efficient and works <wink>.  Better
+	idea?
+
+	Give the attached a whirl.  Even if you hate the implementation, I think
+	you'll like the results.  Think I caught all the "block edit" cmds,
+	including Format Paragraph, plus subtler ones involving smart indents and
+	backspacing.
+
+	* WidgetRedirector.py: Tim Peters writes:
+
+	[W]hile trying to dope out how redirection works, stumbled into two
+	possible glitches.  In the first, it doesn't appear to make sense to try to
+	rename a command that's already been destroyed; in the second, the name
+	"previous" doesn't really bring to mind "ignore the previous value" <wink>.
+
+Fri Apr 30 19:39:25 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* __init__.py: Support for using idle as a package.
+
+	* PathBrowser.py:
+	Avoid listing files more than once (e.g. foomodule.so has two hits:
+	once for foo + module.so, once for foomodule + .so).
+
+Mon Apr 26 22:20:38 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* ChangeLog, ColorDelegator.py, PyShell.py: Tim Peters strikes again:
+
+	Ho ho ho -- that's trickier than it sounded!  The colorizer is working with
+	"line.col" strings instead of Text marks, and the absolute coordinates of
+	the point of interest can change across the self.update call (voice of
+	baffled experience, when two quick backspaces no longer fooled it, but a
+	backspace followed by a quick ENTER did <wink>).
+
+	Anyway, the attached appears to do the trick.  CPU usage goes way up when
+	typing quickly into a long triple-quoted string, but the latency is fine for
+	me (a relatively fast typist on a relatively slow machine).  Most of the
+	changes here are left over from reducing the # of vrbl names to help me
+	reason about the logic better; I hope the code is a *little* easier to
+
+Fri Apr 23 14:01:25 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* EditorWindow.py:
+	Provide full arguments to __import__ so it works in packagized IDLE.
+
+Thu Apr 22 23:20:17 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+        * help.txt:
+        Bunch of updates necessary due to recent changes; added docs for File
+        menu, command line and color preferences.
+
+        * Bindings.py: Remove obsolete 'script' menu.
+
+	* TODO.txt: Several wishes fulfilled.
+
+	* OutputWindow.py:
+	Moved classes OnDemandOutputWindow and PseudoFile here,
+	from ScriptBinding.py where they are no longer needed.
+
+	* ScriptBinding.py:
+	Mostly rewritten.  Instead of the old Run module and Debug module,
+	there are two new commands:
+
+	Import module (F5) imports or reloads the module and also adds its
+	name to the __main__ namespace.  This gets executed in the PyShell
+	window under control of its debug settings.
+
+	Run script (Control-F5) is similar but executes the contents of the
+	file directly in the __main__ namespace.
+
+	* PyShell.py: Nits: document use of $IDLESTARTUP; display idle version
+
+	* idlever.py: New version to celebrate new command line
+
+	* OutputWindow.py: Added flush(), for completeness.
+
+	* PyShell.py:
+	A lot of changes to make the command line more useful.  You can now do:
+	  idle.py -e file ...    -- to edit files
+	  idle.py script arg ... -- to run a script
+	  idle.py -c cmd arg ... -- to run a command
+	Other options, see also the usage message (also new!) for more details:
+	  -d       -- enable debugger
+	  -s       -- run $IDLESTARTUP or $PYTHONSTARTUP
+	  -t title -- set Python Shell window's title
+	sys.argv is set accordingly, unless -e is used.
+	sys.path is absolutized, and all relevant paths are inserted into it.
+
+	Other changes:
+	- the environment in which commands are executed is now the
+	  __main__ module
+	- explicitly save sys.stdout etc., don't restore from sys.__stdout__
+	- new interpreter methods execsource(), execfile(), stuffsource()
+	- a few small nits
+
+	* TODO.txt:
+	Some more TODO items.  Made up my mind about command line args,
+	Run/Import, __main__.
+
+	* ColorDelegator.py:
+	Super-elegant patch by Tim Peters that speeds up colorization
+	dramatically (up to 15 times he claims).  Works by reading more than
+	one line at a time, up to 100-line chunks (starting with one line and
+	then doubling up to the limit).  On a typical machine (e.g. Tim's
+	P5-166) this doesn't reduce interactive responsiveness in a noticeable
+	way.
+
+Wed Apr 21 15:49:34 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* ColorDelegator.py:
+	Patch by Tim Peters to speed up colorizing of big multiline strings.
+
+Tue Apr 20 17:32:52 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* extend.txt:
+	For an event 'foo-bar', the corresponding method must be called
+	foo_bar_event().  Therefore, fix the references to zoom_height() in
+	the example.
+
+	* IdlePrefs.py: Restored the original IDLE color scheme.
+
+	* PyShell.py, IdlePrefs.py, ColorDelegator.py, EditorWindow.py:
+	Color preferences code by Loren Luke (massaged by me somewhat)
+
+	* SearchEngine.py:
+	Patch by Mark Favas: it fixes the search engine behaviour where an
+	unsuccessful search wraps around and re-searches that part of the file
+	between the start of the search and the end of the file - only really
+	an issue for very large files, but... (also removes a redundant
+	m.span() call).
+
+Mon Apr 19 16:26:02 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* TODO.txt: A few wishes are now fulfilled.
+
+	* AutoIndent.py: Tim Peters implements some of my wishes:
+
+	o Makes the tab key intelligently insert spaces when appropriate
+	(see Help list banter twixt David Ascher and me; idea stolen from
+	every other editor on earth <wink>).
+
+	o newline_and_indent_event trims trailing whitespace on the old
+	line (pymode and Codewright).
+
+	o newline_and_indent_event no longer fooled by trailing whitespace or
+	comment after ":" (pymode, PTUI).
+
+	o newline_and_indent_event now reduces the new line's indentation after
+	return, break, continue, raise and pass stmts (pymode).
+
+	The last two are easy to fool in the presence of strings &
+	continuations, but pymode requires Emacs's high-powered C parsing
+	functions to avoid that in finite time.
+
+======================================================================
+	Python release 1.5.2c1, IDLE version 0.4
+======================================================================
+
+Wed Apr  7 18:41:59 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* README.txt, NEWS.txt: New version.
+
+	* idlever.py: Version bump awaiting impending new release.
+	(Not much has changed :-( )
+
+Mon Mar 29 14:52:28 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* ScriptBinding.py, PyShell.py:
+	At Tim Peters' recommendation, add a dummy flush() method to
+	PseudoFile.
+
+Thu Mar 11 23:21:23 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* PathBrowser.py: Don't crash when sys.path contains an empty string.
+
+	* Attic/Outline.py: This file was never supposed to be part of IDLE.
+
+	* PathBrowser.py:
+	- Don't crash in the case where a superclass is a string instead of a
+	pyclbr.Class object; this can happen when the superclass is
+	unrecognizable (to pyclbr), e.g. when module renaming is used.
+
+	- Show a watch cursor when calling pyclbr (since it may take a while
+	recursively parsing imported modules!).
+
+Wed Mar 10 05:18:02 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* EditorWindow.py, Bindings.py: Add PathBrowser to File module
+
+	* PathBrowser.py: "Path browser" - 4 scrolled lists displaying:
+	    directories on sys.path
+	    modules in selected directory
+	    classes in selected module
+	    methods of selected class
+
+	Sinlge clicking in a directory, module or class item updates the next
+	column with info about the selected item.  Double clicking in a
+	module, class or method item opens the file (and selects the clicked
+	item if it is a class or method).
+
+	I guess eventually I should be using a tree widget for this, but the
+	ones I've seen don't work well enough, so for now I use the old
+	Smalltalk or NeXT style multi-column hierarchical browser.
+
+	* MultiScrolledLists.py:
+	New utility: multiple scrolled lists in parallel
+
+	* ScrolledList.py: - White background.
+	- Display "(None)" (or text of your choosing) when empty.
+	- Don't set the focus.
+
+======================================================================
+	Python release 1.5.2b2, IDLE version 0.3
+======================================================================
+	
+Wed Feb 17 22:47:41 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* NEWS.txt: News in 0.3.
+
+	* README.txt, idlever.py: Bump version to 0.3.
+
+	* EditorWindow.py:
+	After all, we don't need to call the callbacks ourselves!
+
+	* WindowList.py:
+	When deleting, call the callbacks *after* deleting the window from our list!
+
+	* EditorWindow.py:
+	Fix up the Windows menu via the new callback mechanism instead of
+	depending on menu post commands (which don't work when the menu is
+	torn off).
+
+	* WindowList.py:
+	Support callbacks to patch up Windows menus everywhere.
+
+	* ChangeLog: Oh, why not.  Checking in the Emacs-generated change log.
+
+Tue Feb 16 22:34:17 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* ScriptBinding.py:
+	Only pop up the stack viewer when requested in the Debug menu.
+
+Mon Feb  8 22:27:49 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* WindowList.py: Don't crash if a window no longer exists.
+
+	* TODO.txt: Restructured a bit.
+
+Mon Feb  1 23:06:17 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* PyShell.py: Add current dir or paths of file args to sys.path.
+
+	* Debugger.py: Add canonic() function -- for brand new bdb.py feature.
+
+	* StackViewer.py: Protect against accessing an empty stack.
+
+Fri Jan 29 20:44:45 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* ZoomHeight.py:
+	Use only the height to decide whether to zoom in or out.
+
+Thu Jan 28 22:24:30 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* EditorWindow.py, FileList.py:
+	Make sure the Tcl variables are shared between windows.
+
+	* PyShell.py, EditorWindow.py, Bindings.py:
+	Move menu/key binding code from Bindings.py to EditorWindow.py,
+	with changed APIs -- it makes much more sense there.
+	Also add a new feature: if the first character of a menu label is
+	a '!', it gets a checkbox.  Checkboxes are bound to Boolean Tcl variables
+	that can be accessed through the new getvar/setvar/getrawvar API;
+	the variable is named after the event to which the menu is bound.
+
+	* Debugger.py: Add Quit button to the debugger window.
+
+	* SearchDialog.py:
+	When find_again() finds exactly the current selection, it's a failure.
+
+	* idle.py, Attic/idle: Rename idle -> idle.py
+
+Mon Jan 18 15:18:57 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* EditorWindow.py, WindowList.py: Only deiconify when iconic.
+
+	* TODO.txt: Misc
+
+Tue Jan 12 22:14:34 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* testcode.py, Attic/test.py:
+	Renamed test.py to testcode.py so one can import Python's
+	test package from inside IDLE.  (Suggested by Jack Jansen.)
+
+	* EditorWindow.py, ColorDelegator.py:
+	Hack to close a window that is colorizing.
+
+	* Separator.py: Vladimir Marangozov's patch:
+	The separator dances too much and seems to jump by arbitrary amounts
+	in arbitrary directions when I try to move it for resizing the frames.
+	This patch makes it more quiet.
+
+Mon Jan 11 14:52:40 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* TODO.txt: Some requests have been fulfilled.
+
+	* EditorWindow.py:
+	Set the cursor to a watch when opening the class browser (which may
+	take quite a while, browsing multiple files).
+
+	Newer, better center() -- but assumes no wrapping.
+
+	* SearchBinding.py:
+	Got rid of debug print statement in goto_line_event().
+
+	* ScriptBinding.py:
+	I think I like it better if it prints the traceback even when it displays
+	the stack viewer.
+
+	* Debugger.py: Bind ESC to close-window.
+
+	* ClassBrowser.py: Use a HSeparator between the classes and the items.
+	Make the list of classes wider by default (40 chars).
+	Bind ESC to close-window.
+
+	* Separator.py:
+	Separator classes (draggable divider between two panes).
+
+Sat Jan  9 22:01:33 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* WindowList.py:
+	Don't traceback when wakeup() is called when the window has been destroyed.
+	This can happen when a torn-of Windows menu references closed windows.
+	And Tim Peters claims that the Windows menu is his favorite to tear off...
+
+	* EditorWindow.py: Allow tearing off of the Windows menu.
+
+	* StackViewer.py: Close on ESC.
+
+	* help.txt: Updated a bunch of things (it was mostly still 0.1!)
+
+	* extend.py: Added ScriptBinding to standard bindings.
+
+	* ScriptBinding.py:
+	This now actually works.  See doc string.  It can run a module (i.e.
+	import or reload) or debug it (same with debugger control).  Output
+	goes to a fresh output window, only created when needed.
+
+======================================================================
+	Python release 1.5.2b1, IDLE version 0.2
+======================================================================
+	
+Fri Jan  8 17:26:02 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* README.txt, NEWS.txt: What's new in this release.
+
+	* Bindings.py, PyShell.py:
+	Paul Prescod's patches to allow the stack viewer to pop up when a
+	traceback is printed.
+
+Thu Jan  7 00:12:15 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* FormatParagraph.py:
+	Change paragraph width limit to 70 (like Emacs M-Q).
+
+	* README.txt:
+	Separating TODO from README.  Slight reformulation of features.  No
+	exact release date.
+
+	* TODO.txt: Separating TODO from README.
+
+Mon Jan  4 21:19:09 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* FormatParagraph.py:
+	Hm.  There was a boundary condition error at the end of the file too.
+
+	* SearchBinding.py: Hm.  Add Unix binding for replace, too.
+
+	* keydefs.py: Ran eventparse.py again.
+
+	* FormatParagraph.py: Added Unix Meta-q key binding;
+	fix find_paragraph when at start of file.
+
+	* AutoExpand.py: Added Meta-/ binding for Unix as alt for Alt-/.
+
+	* SearchBinding.py:
+	Add unix binding for grep (otherwise the menu entry doesn't work!)
+
+	* ZoomHeight.py: Adjusted Unix height to work with fvwm96. :=(
+
+	* GrepDialog.py: Need to import sys!
+
+	* help.txt, extend.txt, README.txt: Formatted some paragraphs
+
+	* extend.py, FormatParagraph.py:
+	Add new extension to reformat a (text) paragraph.
+
+	* ZoomHeight.py: Typo in Win specific height setting.
+
+Sun Jan  3 00:47:35 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* AutoIndent.py: Added something like Tim Peters' backspace patch.
+
+	* ZoomHeight.py: Adapted to Unix (i.e., more hardcoded constants).
+
+Sat Jan  2 21:28:54 1999  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* keydefs.py, idlever.py, idle.pyw, idle.bat, help.txt, extend.txt, extend.py, eventparse.py, ZoomHeight.py, WindowList.py, UndoDelegator.py, StackViewer.py, SearchEngine.py, SearchDialogBase.py, SearchDialog.py, ScrolledList.py, SearchBinding.py, ScriptBinding.py, ReplaceDialog.py, Attic/README, README.txt, PyShell.py, Attic/PopupMenu.py, OutputWindow.py, IOBinding.py, Attic/HelpWindow.py, History.py, GrepDialog.py, FileList.py, FrameViewer.py, EditorWindow.py, Debugger.py, Delegator.py, ColorDelegator.py, Bindings.py, ClassBrowser.py, AutoExpand.py, AutoIndent.py:
+	Checking in IDLE 0.2.
+
+	Much has changed -- too much, in fact, to write down.
+	The big news is that there's a standard way to write IDLE extensions;
+	see extend.txt.  Some sample extensions have been provided, and
+	some existing code has been converted to extensions.  Probably the
+	biggest new user feature is a new search dialog with more options,
+	search and replace, and even search in files (grep).
+
+	This is exactly as downloaded from my laptop after returning
+	from the holidays -- it hasn't even been tested on Unix yet.
+
+Fri Dec 18 15:52:54 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* FileList.py, ClassBrowser.py:
+	Fix the class browser to work even when the file is not on sys.path.
+
+Tue Dec  8 20:39:36 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* Attic/turtle.py: Moved to Python 1.5.2/Lib
+
+Fri Nov 27 03:19:20 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* help.txt: Typo
+
+	* EditorWindow.py, FileList.py: Support underlining of menu labels
+
+	* Bindings.py:
+	New approach, separate tables for menus (platform-independent) and key
+	definitions (platform-specific), and generating accelerator strings
+	automatically from the key definitions.
+
+Mon Nov 16 18:37:42 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* Attic/README: Clarify portability and main program.
+
+	* Attic/README: Added intro for 0.1 release and append Grail notes.
+
+Mon Oct 26 18:49:00 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* Attic/turtle.py: root is now a global called _root
+
+Sat Oct 24 16:38:38 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* Attic/turtle.py: Raise the root window on reset().
+	Different action on WM_DELETE_WINDOW is more likely to do the right thing,
+	allowing us to destroy old windows.
+
+	* Attic/turtle.py:
+	Split the goto() function in two: _goto() is the internal one,
+	using Canvas coordinates, and goto() uses turtle coordinates
+	and accepts variable argument lists.
+
+	* Attic/turtle.py: Cope with destruction of the window
+
+	* Attic/turtle.py: Turtle graphics
+
+	* Debugger.py: Use of Breakpoint class should be bdb.Breakpoint.
+
+Mon Oct 19 03:33:40 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* SearchBinding.py:
+	Speed up the search a bit -- don't drag a mark around...
+
+	* PyShell.py:
+	Change our special entries from <console#N> to <pyshell#N>.
+	Patch linecache.checkcache() to keep our special entries alive.
+	Add popup menu to all editor windows to set a breakpoint.
+
+	* Debugger.py:
+	Use and pass through the 'force' flag to set_dict() where appropriate.
+	Default source and globals checkboxes to false.
+	Don't interact in user_return().
+	Add primitive set_breakpoint() method.
+
+	* ColorDelegator.py:
+	Raise priority of 'sel' tag so its foreground (on Windows) will take
+	priority over text colorization (which on Windows is almost the
+	same color as the selection background).
+
+	Define a tag and color for breakpoints ("BREAK").
+
+	* Attic/PopupMenu.py: Disable "Open stack viewer" and "help" commands.
+
+	* StackViewer.py:
+	Add optional 'force' argument (default 0) to load_dict().
+	If set, redo the display even if it's the same dict.
+
+Fri Oct 16 21:10:12 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* StackViewer.py: Do nothing when loading the same dict as before.
+
+	* PyShell.py: Details for debugger interface.
+
+	* Debugger.py:
+	Restructured and more consistent.  Save checkboxes across instantiations.
+
+	* EditorWindow.py, Attic/README, Bindings.py:
+	Get rid of conflicting ^X binding.  Use ^W.
+
+	* Debugger.py, StackViewer.py:
+	Debugger can now show local and global variables.
+
+	* Debugger.py: Oops
+
+	* Debugger.py, PyShell.py: Better debugger support (show stack etc).
+
+	* Attic/PopupMenu.py: Follow renames in StackViewer module
+
+	* StackViewer.py:
+	Rename classes to StackViewer (the widget) and StackBrowser (the toplevel).
+
+	* ScrolledList.py: Add close() method
+
+	* EditorWindow.py: Clarify 'Open Module' dialog text
+
+	* StackViewer.py: Restructured into a browser and a widget.
+
+Thu Oct 15 23:27:08 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* ClassBrowser.py, ScrolledList.py:
+	Generalized the scrolled list which is the base for the class and
+	method browser into a separate class in its own module.
+
+	* Attic/test.py: Cosmetic change
+
+	* Debugger.py: Don't show function name if there is none
+
+Wed Oct 14 03:43:05 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* Debugger.py, PyShell.py: Polish the Debugger GUI a bit.
+	Closing it now also does the right thing.
+
+Tue Oct 13 23:51:13 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* Debugger.py, PyShell.py, Bindings.py:
+	Ad primitive debugger interface (so far it will step and show you the
+	source, but it doesn't yet show the stack).
+
+	* Attic/README: Misc
+
+	* StackViewer.py: Whoops -- referenced self.top before it was set.
+
+	* help.txt: Added history and completion commands.
+
+	* help.txt: Updated
+
+	* FileList.py: Add class browser functionality.
+
+	* StackViewer.py:
+	Add a close() method and bind to WM_DELETE_WINDOW protocol
+
+	* PyShell.py: Clear the linecache before printing a traceback
+
+	* Bindings.py: Added class browser binding.
+
+	* ClassBrowser.py: Much improved, much left to do.
+
+	* PyShell.py: Make the return key do what I mean more often.
+
+	* ClassBrowser.py:
+	Adding the beginnings of a Class browser.  Incomplete, yet.
+
+	* EditorWindow.py, Bindings.py:
+	Add new command, "Open module".  You select or type a module name,
+	and it opens the source.
+
+Mon Oct 12 23:59:27 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* PyShell.py: Subsume functionality from Popup menu in Debug menu.
+	Other stuff so the PyShell window can be resurrected from the Windows menu.
+
+	* FileList.py: Get rid of PopUp menu.
+	Create a simple Windows menu.  (Imperfect when Untitled windows exist.)
+	Add wakeup() method: deiconify, raise, focus.
+
+	* EditorWindow.py: Generalize menu creation.
+
+	* Bindings.py: Add Debug and Help menu items.
+
+	* EditorWindow.py: Added a menu bar to every window.
+
+	* Bindings.py: Add menu configuration to the event configuration.
+
+	* Attic/PopupMenu.py: Pass a root to the help window.
+
+	* SearchBinding.py:
+	Add parent argument to 'to to line number' dialog box.
+
+Sat Oct 10 19:15:32 1998  Guido van Rossum  <guido@cnri.reston.va.us>
+
+	* StackViewer.py:
+	Add a label at the top showing (very basic) help for the stack viewer.
+	Add a label at the bottom showing the exception info.
+
+	* Attic/test.py, Attic/idle: Add Unix main script and test program.
+
+	* idle.pyw, help.txt, WidgetRedirector.py, UndoDelegator.py, StackViewer.py, SearchBinding.py, Attic/README, PyShell.py, Attic/PopupMenu.py, Percolator.py, Outline.py, IOBinding.py, History.py, Attic/HelpWindow.py, FrameViewer.py, FileList.py, EditorWindow.py, Delegator.py, ColorDelegator.py, Bindings.py, AutoIndent.py, AutoExpand.py:
+	Initial checking of Tk-based Python IDE.
+	Features: text editor with syntax coloring and undo;
+	subclassed into interactive Python shell which adds history.
+
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ClassBrowser.py b/depot_tools/release/win/python_24/Lib/idlelib/ClassBrowser.py
new file mode 100644
index 0000000..e5a60a5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ClassBrowser.py
@@ -0,0 +1,221 @@
+"""Class browser.
+
+XXX TO DO:
+
+- reparse when source changed (maybe just a button would be OK?)
+    (or recheck on window popup)
+- add popup menu with more options (e.g. doc strings, base classes, imports)
+- show function argument list? (have to do pattern matching on source)
+- should the classes and methods lists also be in the module's menu bar?
+- add base classes to class browser tree
+"""
+
+import os
+import sys
+import pyclbr
+
+import PyShell
+from WindowList import ListedToplevel
+from TreeWidget import TreeNode, TreeItem, ScrolledCanvas
+from configHandler import idleConf
+
+class ClassBrowser:
+
+    def __init__(self, flist, name, path):
+        # XXX This API should change, if the file doesn't end in ".py"
+        # XXX the code here is bogus!
+        self.name = name
+        self.file = os.path.join(path[0], self.name + ".py")
+        self.init(flist)
+
+    def close(self, event=None):
+        self.top.destroy()
+        self.node.destroy()
+
+    def init(self, flist):
+        self.flist = flist
+        # reset pyclbr
+        pyclbr._modules.clear()
+        # create top
+        self.top = top = ListedToplevel(flist.root)
+        top.protocol("WM_DELETE_WINDOW", self.close)
+        top.bind("<Escape>", self.close)
+        self.settitle()
+        top.focus_set()
+        # create scrolled canvas
+        theme = idleConf.GetOption('main','Theme','name')
+        background = idleConf.GetHighlight(theme, 'normal')['background']
+        sc = ScrolledCanvas(top, bg=background, highlightthickness=0, takefocus=1)
+        sc.frame.pack(expand=1, fill="both")
+        item = self.rootnode()
+        self.node = node = TreeNode(sc.canvas, None, item)
+        node.update()
+        node.expand()
+
+    def settitle(self):
+        self.top.wm_title("Class Browser - " + self.name)
+        self.top.wm_iconname("Class Browser")
+
+    def rootnode(self):
+        return ModuleBrowserTreeItem(self.file)
+
+class ModuleBrowserTreeItem(TreeItem):
+
+    def __init__(self, file):
+        self.file = file
+
+    def GetText(self):
+        return os.path.basename(self.file)
+
+    def GetIconName(self):
+        return "python"
+
+    def GetSubList(self):
+        sublist = []
+        for name in self.listclasses():
+            item = ClassBrowserTreeItem(name, self.classes, self.file)
+            sublist.append(item)
+        return sublist
+
+    def OnDoubleClick(self):
+        if os.path.normcase(self.file[-3:]) != ".py":
+            return
+        if not os.path.exists(self.file):
+            return
+        PyShell.flist.open(self.file)
+
+    def IsExpandable(self):
+        return os.path.normcase(self.file[-3:]) == ".py"
+
+    def listclasses(self):
+        dir, file = os.path.split(self.file)
+        name, ext = os.path.splitext(file)
+        if os.path.normcase(ext) != ".py":
+            return []
+        try:
+            dict = pyclbr.readmodule_ex(name, [dir] + sys.path)
+        except ImportError, msg:
+            return []
+        items = []
+        self.classes = {}
+        for key, cl in dict.items():
+            if cl.module == name:
+                s = key
+                if hasattr(cl, 'super') and cl.super:
+                    supers = []
+                    for sup in cl.super:
+                        if type(sup) is type(''):
+                            sname = sup
+                        else:
+                            sname = sup.name
+                            if sup.module != cl.module:
+                                sname = "%s.%s" % (sup.module, sname)
+                        supers.append(sname)
+                    s = s + "(%s)" % ", ".join(supers)
+                items.append((cl.lineno, s))
+                self.classes[s] = cl
+        items.sort()
+        list = []
+        for item, s in items:
+            list.append(s)
+        return list
+
+class ClassBrowserTreeItem(TreeItem):
+
+    def __init__(self, name, classes, file):
+        self.name = name
+        self.classes = classes
+        self.file = file
+        try:
+            self.cl = self.classes[self.name]
+        except (IndexError, KeyError):
+            self.cl = None
+        self.isfunction = isinstance(self.cl, pyclbr.Function)
+
+    def GetText(self):
+        if self.isfunction:
+            return "def " + self.name + "(...)"
+        else:
+            return "class " + self.name
+
+    def GetIconName(self):
+        if self.isfunction:
+            return "python"
+        else:
+            return "folder"
+
+    def IsExpandable(self):
+        if self.cl:
+            try:
+                return not not self.cl.methods
+            except AttributeError:
+                return False
+
+    def GetSubList(self):
+        if not self.cl:
+            return []
+        sublist = []
+        for name in self.listmethods():
+            item = MethodBrowserTreeItem(name, self.cl, self.file)
+            sublist.append(item)
+        return sublist
+
+    def OnDoubleClick(self):
+        if not os.path.exists(self.file):
+            return
+        edit = PyShell.flist.open(self.file)
+        if hasattr(self.cl, 'lineno'):
+            lineno = self.cl.lineno
+            edit.gotoline(lineno)
+
+    def listmethods(self):
+        if not self.cl:
+            return []
+        items = []
+        for name, lineno in self.cl.methods.items():
+            items.append((lineno, name))
+        items.sort()
+        list = []
+        for item, name in items:
+            list.append(name)
+        return list
+
+class MethodBrowserTreeItem(TreeItem):
+
+    def __init__(self, name, cl, file):
+        self.name = name
+        self.cl = cl
+        self.file = file
+
+    def GetText(self):
+        return "def " + self.name + "(...)"
+
+    def GetIconName(self):
+        return "python" # XXX
+
+    def IsExpandable(self):
+        return 0
+
+    def OnDoubleClick(self):
+        if not os.path.exists(self.file):
+            return
+        edit = PyShell.flist.open(self.file)
+        edit.gotoline(self.cl.methods[self.name])
+
+def main():
+    try:
+        file = __file__
+    except NameError:
+        file = sys.argv[0]
+        if sys.argv[1:]:
+            file = sys.argv[1]
+        else:
+            file = sys.argv[0]
+    dir, file = os.path.split(file)
+    name = os.path.splitext(file)[0]
+    ClassBrowser(PyShell.flist, name, [dir])
+    if sys.stdin is sys.__stdin__:
+        mainloop()
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/CodeContext.py b/depot_tools/release/win/python_24/Lib/idlelib/CodeContext.py
new file mode 100644
index 0000000..c791f98846
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/CodeContext.py
@@ -0,0 +1,139 @@
+"""CodeContext - Display the block context of code at top of edit window
+
+Once code has scrolled off the top of the screen, it can be difficult
+to determine which block you are in.  This extension implements a pane
+at the top of each IDLE edit window which provides block structure
+hints.  These hints are the lines which contain the block opening
+keywords, e.g. 'if', for the enclosing block.  The number of hint lines
+is determined by the numlines variable in the CodeContext section of
+config-extensions.def. Lines which do not open blocks are not shown in
+the context hints pane.
+
+"""
+import Tkinter
+from configHandler import idleConf
+from sets import Set
+import re
+
+BLOCKOPENERS = Set(["class", "def", "elif", "else", "except", "finally", "for",
+                    "if", "try", "while"])
+INFINITY = 1 << 30
+UPDATEINTERVAL = 100 # millisec
+FONTUPDATEINTERVAL = 1000 # millisec
+
+getspacesfirstword = lambda s, c=re.compile(r"^(\s*)(\w*)"): c.match(s).groups()
+
+class CodeContext:
+    menudefs = [('options', [('!Code Conte_xt', '<<toggle-code-context>>')])]
+
+    numlines = idleConf.GetOption("extensions", "CodeContext",
+                                  "numlines", type="int", default=3)
+    bgcolor = idleConf.GetOption("extensions", "CodeContext",
+                                 "bgcolor", type="str", default="LightGray")
+    fgcolor = idleConf.GetOption("extensions", "CodeContext",
+                                 "fgcolor", type="str", default="Black")
+    def __init__(self, editwin):
+        self.editwin = editwin
+        self.text = editwin.text
+        self.textfont = self.text["font"]
+        self.label = None
+        # Dummy line, which starts the "block" of the whole document:
+        self.info = list(self.interesting_lines(1))
+        self.lastfirstline = 1
+        visible = idleConf.GetOption("extensions", "CodeContext",
+                                     "visible", type="bool", default=False)
+        if visible:
+            self.toggle_code_context_event()
+            self.editwin.setvar('<<toggle-code-context>>', True)
+        # Start two update cycles, one for context lines, one for font changes.
+        self.text.after(UPDATEINTERVAL, self.timer_event)
+        self.text.after(FONTUPDATEINTERVAL, self.font_timer_event)
+
+    def toggle_code_context_event(self, event=None):
+        if not self.label:
+            self.label = Tkinter.Label(self.editwin.top,
+                                      text="\n" * (self.numlines - 1),
+                                      anchor="w", justify="left",
+                                      font=self.textfont,
+                                      bg=self.bgcolor, fg=self.fgcolor,
+                                      relief="sunken",
+                                      width=1, # Don't request more than we get
+                                      )
+            self.label.pack(side="top", fill="x", expand=0,
+                            after=self.editwin.status_bar)
+        else:
+            self.label.destroy()
+            self.label = None
+        idleConf.SetOption("extensions", "CodeContext", "visible",
+                           str(self.label is not None))
+        idleConf.SaveUserCfgFiles()
+
+    def get_line_info(self, linenum):
+        """Get the line indent value, text, and any block start keyword
+
+        If the line does not start a block, the keyword value is False.
+        The indentation of empty lines (or comment lines) is INFINITY.
+        There is a dummy block start, with indentation -1 and text "".
+
+        Return the indent level, text (including leading whitespace),
+        and the block opening keyword.
+
+        """
+        if linenum == 0:
+            return -1, "", True
+        text = self.text.get("%d.0" % linenum, "%d.end" % linenum)
+        spaces, firstword = getspacesfirstword(text)
+        opener = firstword in BLOCKOPENERS and firstword
+        if len(text) == len(spaces) or text[len(spaces)] == '#':
+            indent = INFINITY
+        else:
+            indent = len(spaces)
+        return indent, text, opener
+
+    def interesting_lines(self, firstline):
+        """Generator which yields context lines, starting at firstline."""
+        # The indentation level we are currently in:
+        lastindent = INFINITY
+        # For a line to be interesting, it must begin with a block opening
+        # keyword, and have less indentation than lastindent.
+        for line_index in xrange(firstline, -1, -1):
+            indent, text, opener = self.get_line_info(line_index)
+            if indent < lastindent:
+                lastindent = indent
+                if opener in ("else", "elif"):
+                    # We also show the if statement
+                    lastindent += 1
+                if opener and line_index < firstline:
+                    yield line_index, text
+
+    def update_label(self):
+        firstline = int(self.text.index("@0,0").split('.')[0])
+        if self.lastfirstline == firstline:
+            return
+        self.lastfirstline = firstline
+        tmpstack = []
+        for line_index, text in self.interesting_lines(firstline):
+            # Remove irrelevant self.info items, and when we reach a relevant
+            # item (which must happen because of the dummy element), break.
+            while self.info[-1][0] > line_index:
+                del self.info[-1]
+            if self.info[-1][0] == line_index:
+                break
+            tmpstack.append((line_index, text))
+        while tmpstack:
+            self.info.append(tmpstack.pop())
+        lines = [""] * max(0, self.numlines - len(self.info)) + \
+                [x[1] for x in self.info[-self.numlines:]]
+        self.label["text"] = '\n'.join(lines)
+
+    def timer_event(self):
+        if self.label:
+            self.update_label()
+        self.text.after(UPDATEINTERVAL, self.timer_event)
+
+    def font_timer_event(self):
+        newtextfont = self.text["font"]
+        if self.label and newtextfont != self.textfont:
+            self.textfont = newtextfont
+            self.label["font"] = self.textfont
+        self.text.after(FONTUPDATEINTERVAL, self.font_timer_event)
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ColorDelegator.py b/depot_tools/release/win/python_24/Lib/idlelib/ColorDelegator.py
new file mode 100644
index 0000000..019e5d41
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ColorDelegator.py
@@ -0,0 +1,254 @@
+import time
+import re
+import keyword
+import __builtin__
+from Tkinter import *
+from Delegator import Delegator
+from configHandler import idleConf
+
+DEBUG = False
+
+def any(name, list):
+    return "(?P<%s>" % name + "|".join(list) + ")"
+
+def make_pat():
+    kw = r"\b" + any("KEYWORD", keyword.kwlist) + r"\b"
+    builtinlist = [str(name) for name in dir(__builtin__)
+                                        if not name.startswith('_')]
+    # self.file = file("file") :
+    # 1st 'file' colorized normal, 2nd as builtin, 3rd as comment
+    builtin = r"([^.'\"\\]\b|^)" + any("BUILTIN", builtinlist) + r"\b"
+    comment = any("COMMENT", [r"#[^\n]*"])
+    sqstring = r"(\b[rR])?'[^'\\\n]*(\\.[^'\\\n]*)*'?"
+    dqstring = r'(\b[rR])?"[^"\\\n]*(\\.[^"\\\n]*)*"?'
+    sq3string = r"(\b[rR])?'''[^'\\]*((\\.|'(?!''))[^'\\]*)*(''')?"
+    dq3string = r'(\b[rR])?"""[^"\\]*((\\.|"(?!""))[^"\\]*)*(""")?'
+    string = any("STRING", [sq3string, dq3string, sqstring, dqstring])
+    return kw + "|" + builtin + "|" + comment + "|" + string +\
+           "|" + any("SYNC", [r"\n"])
+
+prog = re.compile(make_pat(), re.S)
+idprog = re.compile(r"\s+(\w+)", re.S)
+asprog = re.compile(r".*?\b(as)\b", re.S)
+
+class ColorDelegator(Delegator):
+
+    def __init__(self):
+        Delegator.__init__(self)
+        self.prog = prog
+        self.idprog = idprog
+        self.asprog = asprog
+        self.LoadTagDefs()
+
+    def setdelegate(self, delegate):
+        if self.delegate is not None:
+            self.unbind("<<toggle-auto-coloring>>")
+        Delegator.setdelegate(self, delegate)
+        if delegate is not None:
+            self.config_colors()
+            self.bind("<<toggle-auto-coloring>>", self.toggle_colorize_event)
+            self.notify_range("1.0", "end")
+
+    def config_colors(self):
+        for tag, cnf in self.tagdefs.items():
+            if cnf:
+                self.tag_configure(tag, **cnf)
+        self.tag_raise('sel')
+
+    def LoadTagDefs(self):
+        theme = idleConf.GetOption('main','Theme','name')
+        self.tagdefs = {
+            "COMMENT": idleConf.GetHighlight(theme, "comment"),
+            "KEYWORD": idleConf.GetHighlight(theme, "keyword"),
+            "BUILTIN": idleConf.GetHighlight(theme, "builtin"),
+            "STRING": idleConf.GetHighlight(theme, "string"),
+            "DEFINITION": idleConf.GetHighlight(theme, "definition"),
+            "SYNC": {'background':None,'foreground':None},
+            "TODO": {'background':None,'foreground':None},
+            "BREAK": idleConf.GetHighlight(theme, "break"),
+            "ERROR": idleConf.GetHighlight(theme, "error"),
+            # The following is used by ReplaceDialog:
+            "hit": idleConf.GetHighlight(theme, "hit"),
+            }
+
+        if DEBUG: print 'tagdefs',self.tagdefs
+
+    def insert(self, index, chars, tags=None):
+        index = self.index(index)
+        self.delegate.insert(index, chars, tags)
+        self.notify_range(index, index + "+%dc" % len(chars))
+
+    def delete(self, index1, index2=None):
+        index1 = self.index(index1)
+        self.delegate.delete(index1, index2)
+        self.notify_range(index1)
+
+    after_id = None
+    allow_colorizing = True
+    colorizing = False
+
+    def notify_range(self, index1, index2=None):
+        self.tag_add("TODO", index1, index2)
+        if self.after_id:
+            if DEBUG: print "colorizing already scheduled"
+            return
+        if self.colorizing:
+            self.stop_colorizing = True
+            if DEBUG: print "stop colorizing"
+        if self.allow_colorizing:
+            if DEBUG: print "schedule colorizing"
+            self.after_id = self.after(1, self.recolorize)
+
+    close_when_done = None # Window to be closed when done colorizing
+
+    def close(self, close_when_done=None):
+        if self.after_id:
+            after_id = self.after_id
+            self.after_id = None
+            if DEBUG: print "cancel scheduled recolorizer"
+            self.after_cancel(after_id)
+        self.allow_colorizing = False
+        self.stop_colorizing = True
+        if close_when_done:
+            if not self.colorizing:
+                close_when_done.destroy()
+            else:
+                self.close_when_done = close_when_done
+
+    def toggle_colorize_event(self, event):
+        if self.after_id:
+            after_id = self.after_id
+            self.after_id = None
+            if DEBUG: print "cancel scheduled recolorizer"
+            self.after_cancel(after_id)
+        if self.allow_colorizing and self.colorizing:
+            if DEBUG: print "stop colorizing"
+            self.stop_colorizing = True
+        self.allow_colorizing = not self.allow_colorizing
+        if self.allow_colorizing and not self.colorizing:
+            self.after_id = self.after(1, self.recolorize)
+        if DEBUG:
+            print "auto colorizing turned",\
+                  self.allow_colorizing and "on" or "off"
+        return "break"
+
+    def recolorize(self):
+        self.after_id = None
+        if not self.delegate:
+            if DEBUG: print "no delegate"
+            return
+        if not self.allow_colorizing:
+            if DEBUG: print "auto colorizing is off"
+            return
+        if self.colorizing:
+            if DEBUG: print "already colorizing"
+            return
+        try:
+            self.stop_colorizing = False
+            self.colorizing = True
+            if DEBUG: print "colorizing..."
+            t0 = time.clock()
+            self.recolorize_main()
+            t1 = time.clock()
+            if DEBUG: print "%.3f seconds" % (t1-t0)
+        finally:
+            self.colorizing = False
+        if self.allow_colorizing and self.tag_nextrange("TODO", "1.0"):
+            if DEBUG: print "reschedule colorizing"
+            self.after_id = self.after(1, self.recolorize)
+        if self.close_when_done:
+            top = self.close_when_done
+            self.close_when_done = None
+            top.destroy()
+
+    def recolorize_main(self):
+        next = "1.0"
+        while True:
+            item = self.tag_nextrange("TODO", next)
+            if not item:
+                break
+            head, tail = item
+            self.tag_remove("SYNC", head, tail)
+            item = self.tag_prevrange("SYNC", head)
+            if item:
+                head = item[1]
+            else:
+                head = "1.0"
+
+            chars = ""
+            next = head
+            lines_to_get = 1
+            ok = False
+            while not ok:
+                mark = next
+                next = self.index(mark + "+%d lines linestart" %
+                                         lines_to_get)
+                lines_to_get = min(lines_to_get * 2, 100)
+                ok = "SYNC" in self.tag_names(next + "-1c")
+                line = self.get(mark, next)
+                ##print head, "get", mark, next, "->", repr(line)
+                if not line:
+                    return
+                for tag in self.tagdefs.keys():
+                    self.tag_remove(tag, mark, next)
+                chars = chars + line
+                m = self.prog.search(chars)
+                while m:
+                    for key, value in m.groupdict().items():
+                        if value:
+                            a, b = m.span(key)
+                            self.tag_add(key,
+                                         head + "+%dc" % a,
+                                         head + "+%dc" % b)
+                            if value in ("def", "class"):
+                                m1 = self.idprog.match(chars, b)
+                                if m1:
+                                    a, b = m1.span(1)
+                                    self.tag_add("DEFINITION",
+                                                 head + "+%dc" % a,
+                                                 head + "+%dc" % b)
+                            elif value == "import":
+                                # color all the "as" words on same line;
+                                # cheap approximation to the truth
+                                while True:
+                                    m1 = self.asprog.match(chars, b)
+                                    if not m1:
+                                        break
+                                    a, b = m1.span(1)
+                                    self.tag_add("KEYWORD",
+                                                 head + "+%dc" % a,
+                                                 head + "+%dc" % b)
+                    m = self.prog.search(chars, m.end())
+                if "SYNC" in self.tag_names(next + "-1c"):
+                    head = next
+                    chars = ""
+                else:
+                    ok = False
+                if not ok:
+                    # We're in an inconsistent state, and the call to
+                    # update may tell us to stop.  It may also change
+                    # the correct value for "next" (since this is a
+                    # line.col string, not a true mark).  So leave a
+                    # crumb telling the next invocation to resume here
+                    # in case update tells us to leave.
+                    self.tag_add("TODO", next)
+                self.update()
+                if self.stop_colorizing:
+                    if DEBUG: print "colorizing stopped"
+                    return
+
+
+def main():
+    from Percolator import Percolator
+    root = Tk()
+    root.wm_protocol("WM_DELETE_WINDOW", root.quit)
+    text = Text(background="white")
+    text.pack(expand=1, fill="both")
+    text.focus_set()
+    p = Percolator(text)
+    d = ColorDelegator()
+    p.insertfilter(d)
+    root.mainloop()
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Debugger.py b/depot_tools/release/win/python_24/Lib/idlelib/Debugger.py
new file mode 100644
index 0000000..7a9d02f6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Debugger.py
@@ -0,0 +1,474 @@
+import os
+import bdb
+import types
+from Tkinter import *
+from WindowList import ListedToplevel
+from ScrolledList import ScrolledList
+
+
+class Idb(bdb.Bdb):
+
+    def __init__(self, gui):
+        self.gui = gui
+        bdb.Bdb.__init__(self)
+
+    def user_line(self, frame):
+        if self.in_rpc_code(frame):
+            self.set_step()
+            return
+        message = self.__frame2message(frame)
+        self.gui.interaction(message, frame)
+
+    def user_exception(self, frame, info):
+        if self.in_rpc_code(frame):
+            self.set_step()
+            return
+        message = self.__frame2message(frame)
+        self.gui.interaction(message, frame, info)
+
+    def in_rpc_code(self, frame):
+        if frame.f_code.co_filename.count('rpc.py'):
+            return True
+        else:
+            prev_frame = frame.f_back
+            if prev_frame.f_code.co_filename.count('Debugger.py'):
+                # (that test will catch both Debugger.py and RemoteDebugger.py)
+                return False
+            return self.in_rpc_code(prev_frame)
+
+    def __frame2message(self, frame):
+        code = frame.f_code
+        filename = code.co_filename
+        lineno = frame.f_lineno
+        basename = os.path.basename(filename)
+        message = "%s:%s" % (basename, lineno)
+        if code.co_name != "?":
+            message = "%s: %s()" % (message, code.co_name)
+        return message
+
+
+class Debugger:
+
+    vstack = vsource = vlocals = vglobals = None
+
+    def __init__(self, pyshell, idb=None):
+        if idb is None:
+            idb = Idb(self)
+        self.pyshell = pyshell
+        self.idb = idb
+        self.frame = None
+        self.make_gui()
+        self.interacting = 0
+
+    def run(self, *args):
+        try:
+            self.interacting = 1
+            return self.idb.run(*args)
+        finally:
+            self.interacting = 0
+
+    def close(self, event=None):
+        if self.interacting:
+            self.top.bell()
+            return
+        if self.stackviewer:
+            self.stackviewer.close(); self.stackviewer = None
+        # Clean up pyshell if user clicked debugger control close widget.
+        # (Causes a harmless extra cycle through close_debugger() if user
+        # toggled debugger from pyshell Debug menu)
+        self.pyshell.close_debugger()
+        # Now close the debugger control window....
+        self.top.destroy()
+
+    def make_gui(self):
+        pyshell = self.pyshell
+        self.flist = pyshell.flist
+        self.root = root = pyshell.root
+        self.top = top = ListedToplevel(root)
+        self.top.wm_title("Debug Control")
+        self.top.wm_iconname("Debug")
+        top.wm_protocol("WM_DELETE_WINDOW", self.close)
+        self.top.bind("<Escape>", self.close)
+        #
+        self.bframe = bframe = Frame(top)
+        self.bframe.pack(anchor="w")
+        self.buttons = bl = []
+        #
+        self.bcont = b = Button(bframe, text="Go", command=self.cont)
+        bl.append(b)
+        self.bstep = b = Button(bframe, text="Step", command=self.step)
+        bl.append(b)
+        self.bnext = b = Button(bframe, text="Over", command=self.next)
+        bl.append(b)
+        self.bret = b = Button(bframe, text="Out", command=self.ret)
+        bl.append(b)
+        self.bret = b = Button(bframe, text="Quit", command=self.quit)
+        bl.append(b)
+        #
+        for b in bl:
+            b.configure(state="disabled")
+            b.pack(side="left")
+        #
+        self.cframe = cframe = Frame(bframe)
+        self.cframe.pack(side="left")
+        #
+        if not self.vstack:
+            self.__class__.vstack = BooleanVar(top)
+            self.vstack.set(1)
+        self.bstack = Checkbutton(cframe,
+            text="Stack", command=self.show_stack, variable=self.vstack)
+        self.bstack.grid(row=0, column=0)
+        if not self.vsource:
+            self.__class__.vsource = BooleanVar(top)
+        self.bsource = Checkbutton(cframe,
+            text="Source", command=self.show_source, variable=self.vsource)
+        self.bsource.grid(row=0, column=1)
+        if not self.vlocals:
+            self.__class__.vlocals = BooleanVar(top)
+            self.vlocals.set(1)
+        self.blocals = Checkbutton(cframe,
+            text="Locals", command=self.show_locals, variable=self.vlocals)
+        self.blocals.grid(row=1, column=0)
+        if not self.vglobals:
+            self.__class__.vglobals = BooleanVar(top)
+        self.bglobals = Checkbutton(cframe,
+            text="Globals", command=self.show_globals, variable=self.vglobals)
+        self.bglobals.grid(row=1, column=1)
+        #
+        self.status = Label(top, anchor="w")
+        self.status.pack(anchor="w")
+        self.error = Label(top, anchor="w")
+        self.error.pack(anchor="w", fill="x")
+        self.errorbg = self.error.cget("background")
+        #
+        self.fstack = Frame(top, height=1)
+        self.fstack.pack(expand=1, fill="both")
+        self.flocals = Frame(top)
+        self.flocals.pack(expand=1, fill="both")
+        self.fglobals = Frame(top, height=1)
+        self.fglobals.pack(expand=1, fill="both")
+        #
+        if self.vstack.get():
+            self.show_stack()
+        if self.vlocals.get():
+            self.show_locals()
+        if self.vglobals.get():
+            self.show_globals()
+
+    def interaction(self, message, frame, info=None):
+        self.frame = frame
+        self.status.configure(text=message)
+        #
+        if info:
+            type, value, tb = info
+            try:
+                m1 = type.__name__
+            except AttributeError:
+                m1 = "%s" % str(type)
+            if value is not None:
+                try:
+                    m1 = "%s: %s" % (m1, str(value))
+                except:
+                    pass
+            bg = "yellow"
+        else:
+            m1 = ""
+            tb = None
+            bg = self.errorbg
+        self.error.configure(text=m1, background=bg)
+        #
+        sv = self.stackviewer
+        if sv:
+            stack, i = self.idb.get_stack(self.frame, tb)
+            sv.load_stack(stack, i)
+        #
+        self.show_variables(1)
+        #
+        if self.vsource.get():
+            self.sync_source_line()
+        #
+        for b in self.buttons:
+            b.configure(state="normal")
+        #
+        self.top.wakeup()
+        self.root.mainloop()
+        #
+        for b in self.buttons:
+            b.configure(state="disabled")
+        self.status.configure(text="")
+        self.error.configure(text="", background=self.errorbg)
+        self.frame = None
+
+    def sync_source_line(self):
+        frame = self.frame
+        if not frame:
+            return
+        filename, lineno = self.__frame2fileline(frame)
+        if filename[:1] + filename[-1:] != "<>" and os.path.exists(filename):
+            self.flist.gotofileline(filename, lineno)
+
+    def __frame2fileline(self, frame):
+        code = frame.f_code
+        filename = code.co_filename
+        lineno = frame.f_lineno
+        return filename, lineno
+
+    def cont(self):
+        self.idb.set_continue()
+        self.root.quit()
+
+    def step(self):
+        self.idb.set_step()
+        self.root.quit()
+
+    def next(self):
+        self.idb.set_next(self.frame)
+        self.root.quit()
+
+    def ret(self):
+        self.idb.set_return(self.frame)
+        self.root.quit()
+
+    def quit(self):
+        self.idb.set_quit()
+        self.root.quit()
+
+    stackviewer = None
+
+    def show_stack(self):
+        if not self.stackviewer and self.vstack.get():
+            self.stackviewer = sv = StackViewer(self.fstack, self.flist, self)
+            if self.frame:
+                stack, i = self.idb.get_stack(self.frame, None)
+                sv.load_stack(stack, i)
+        else:
+            sv = self.stackviewer
+            if sv and not self.vstack.get():
+                self.stackviewer = None
+                sv.close()
+            self.fstack['height'] = 1
+
+    def show_source(self):
+        if self.vsource.get():
+            self.sync_source_line()
+
+    def show_frame(self, (frame, lineno)):
+        self.frame = frame
+        self.show_variables()
+
+    localsviewer = None
+    globalsviewer = None
+
+    def show_locals(self):
+        lv = self.localsviewer
+        if self.vlocals.get():
+            if not lv:
+                self.localsviewer = NamespaceViewer(self.flocals, "Locals")
+        else:
+            if lv:
+                self.localsviewer = None
+                lv.close()
+                self.flocals['height'] = 1
+        self.show_variables()
+
+    def show_globals(self):
+        gv = self.globalsviewer
+        if self.vglobals.get():
+            if not gv:
+                self.globalsviewer = NamespaceViewer(self.fglobals, "Globals")
+        else:
+            if gv:
+                self.globalsviewer = None
+                gv.close()
+                self.fglobals['height'] = 1
+        self.show_variables()
+
+    def show_variables(self, force=0):
+        lv = self.localsviewer
+        gv = self.globalsviewer
+        frame = self.frame
+        if not frame:
+            ldict = gdict = None
+        else:
+            ldict = frame.f_locals
+            gdict = frame.f_globals
+            if lv and gv and ldict is gdict:
+                ldict = None
+        if lv:
+            lv.load_dict(ldict, force, self.pyshell.interp.rpcclt)
+        if gv:
+            gv.load_dict(gdict, force, self.pyshell.interp.rpcclt)
+
+    def set_breakpoint_here(self, filename, lineno):
+        self.idb.set_break(filename, lineno)
+
+    def clear_breakpoint_here(self, filename, lineno):
+        self.idb.clear_break(filename, lineno)
+
+    def clear_file_breaks(self, filename):
+        self.idb.clear_all_file_breaks(filename)
+
+    def load_breakpoints(self):
+        "Load PyShellEditorWindow breakpoints into subprocess debugger"
+        pyshell_edit_windows = self.pyshell.flist.inversedict.keys()
+        for editwin in pyshell_edit_windows:
+            filename = editwin.io.filename
+            try:
+                for lineno in editwin.breakpoints:
+                    self.set_breakpoint_here(filename, lineno)
+            except AttributeError:
+                continue
+
+class StackViewer(ScrolledList):
+
+    def __init__(self, master, flist, gui):
+        ScrolledList.__init__(self, master, width=80)
+        self.flist = flist
+        self.gui = gui
+        self.stack = []
+
+    def load_stack(self, stack, index=None):
+        self.stack = stack
+        self.clear()
+        for i in range(len(stack)):
+            frame, lineno = stack[i]
+            try:
+                modname = frame.f_globals["__name__"]
+            except:
+                modname = "?"
+            code = frame.f_code
+            filename = code.co_filename
+            funcname = code.co_name
+            import linecache
+            sourceline = linecache.getline(filename, lineno)
+            import string
+            sourceline = string.strip(sourceline)
+            if funcname in ("?", "", None):
+                item = "%s, line %d: %s" % (modname, lineno, sourceline)
+            else:
+                item = "%s.%s(), line %d: %s" % (modname, funcname,
+                                                 lineno, sourceline)
+            if i == index:
+                item = "> " + item
+            self.append(item)
+        if index is not None:
+            self.select(index)
+
+    def popup_event(self, event):
+        "override base method"
+        if self.stack:
+            return ScrolledList.popup_event(self, event)
+
+    def fill_menu(self):
+        "override base method"
+        menu = self.menu
+        menu.add_command(label="Go to source line",
+                         command=self.goto_source_line)
+        menu.add_command(label="Show stack frame",
+                         command=self.show_stack_frame)
+
+    def on_select(self, index):
+        "override base method"
+        if 0 <= index < len(self.stack):
+            self.gui.show_frame(self.stack[index])
+
+    def on_double(self, index):
+        "override base method"
+        self.show_source(index)
+
+    def goto_source_line(self):
+        index = self.listbox.index("active")
+        self.show_source(index)
+
+    def show_stack_frame(self):
+        index = self.listbox.index("active")
+        if 0 <= index < len(self.stack):
+            self.gui.show_frame(self.stack[index])
+
+    def show_source(self, index):
+        if not (0 <= index < len(self.stack)):
+            return
+        frame, lineno = self.stack[index]
+        code = frame.f_code
+        filename = code.co_filename
+        if os.path.isfile(filename):
+            edit = self.flist.open(filename)
+            if edit:
+                edit.gotoline(lineno)
+
+
+class NamespaceViewer:
+
+    def __init__(self, master, title, dict=None):
+        width = 0
+        height = 40
+        if dict:
+            height = 20*len(dict) # XXX 20 == observed height of Entry widget
+        self.master = master
+        self.title = title
+        import repr
+        self.repr = repr.Repr()
+        self.repr.maxstring = 60
+        self.repr.maxother = 60
+        self.frame = frame = Frame(master)
+        self.frame.pack(expand=1, fill="both")
+        self.label = Label(frame, text=title, borderwidth=2, relief="groove")
+        self.label.pack(fill="x")
+        self.vbar = vbar = Scrollbar(frame, name="vbar")
+        vbar.pack(side="right", fill="y")
+        self.canvas = canvas = Canvas(frame,
+                                      height=min(300, max(40, height)),
+                                      scrollregion=(0, 0, width, height))
+        canvas.pack(side="left", fill="both", expand=1)
+        vbar["command"] = canvas.yview
+        canvas["yscrollcommand"] = vbar.set
+        self.subframe = subframe = Frame(canvas)
+        self.sfid = canvas.create_window(0, 0, window=subframe, anchor="nw")
+        self.load_dict(dict)
+
+    dict = -1
+
+    def load_dict(self, dict, force=0, rpc_client=None):
+        if dict is self.dict and not force:
+            return
+        subframe = self.subframe
+        frame = self.frame
+        for c in subframe.children.values():
+            c.destroy()
+        self.dict = None
+        if not dict:
+            l = Label(subframe, text="None")
+            l.grid(row=0, column=0)
+        else:
+            names = dict.keys()
+            names.sort()
+            row = 0
+            for name in names:
+                value = dict[name]
+                svalue = self.repr.repr(value) # repr(value)
+                # Strip extra quotes caused by calling repr on the (already)
+                # repr'd value sent across the RPC interface:
+                if rpc_client:
+                    svalue = svalue[1:-1]
+                l = Label(subframe, text=name)
+                l.grid(row=row, column=0, sticky="nw")
+                l = Entry(subframe, width=0, borderwidth=0)
+                l.insert(0, svalue)
+                l.grid(row=row, column=1, sticky="nw")
+                row = row+1
+        self.dict = dict
+        # XXX Could we use a <Configure> callback for the following?
+        subframe.update_idletasks() # Alas!
+        width = subframe.winfo_reqwidth()
+        height = subframe.winfo_reqheight()
+        canvas = self.canvas
+        self.canvas["scrollregion"] = (0, 0, width, height)
+        if height > 300:
+            canvas["height"] = 300
+            frame.pack(expand=1)
+        else:
+            canvas["height"] = height
+            frame.pack(expand=0)
+
+    def close(self):
+        self.frame.destroy()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Delegator.py b/depot_tools/release/win/python_24/Lib/idlelib/Delegator.py
new file mode 100644
index 0000000..6125591f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Delegator.py
@@ -0,0 +1,33 @@
+class Delegator:
+
+    # The cache is only used to be able to change delegates!
+
+    def __init__(self, delegate=None):
+        self.delegate = delegate
+        self.__cache = {}
+
+    def __getattr__(self, name):
+        attr = getattr(self.delegate, name) # May raise AttributeError
+        setattr(self, name, attr)
+        self.__cache[name] = attr
+        return attr
+
+    def resetcache(self):
+        for key in self.__cache.keys():
+            try:
+                delattr(self, key)
+            except AttributeError:
+                pass
+        self.__cache.clear()
+
+    def cachereport(self):
+        keys = self.__cache.keys()
+        keys.sort()
+        print keys
+
+    def setdelegate(self, delegate):
+        self.resetcache()
+        self.delegate = delegate
+
+    def getdelegate(self):
+        return self.delegate
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/EditorWindow.py b/depot_tools/release/win/python_24/Lib/idlelib/EditorWindow.py
new file mode 100644
index 0000000..c0b66a3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/EditorWindow.py
@@ -0,0 +1,1409 @@
+import sys
+import os
+import re
+import imp
+from itertools import count
+from Tkinter import *
+import tkSimpleDialog
+import tkMessageBox
+
+import webbrowser
+import idlever
+import WindowList
+import SearchDialog
+import GrepDialog
+import ReplaceDialog
+import PyParse
+from configHandler import idleConf
+import aboutDialog, textView, configDialog
+
+# The default tab setting for a Text widget, in average-width characters.
+TK_TABWIDTH_DEFAULT = 8
+
+def _find_module(fullname, path=None):
+    """Version of imp.find_module() that handles hierarchical module names"""
+
+    file = None
+    for tgt in fullname.split('.'):
+        if file is not None:
+            file.close()            # close intermediate files
+        (file, filename, descr) = imp.find_module(tgt, path)
+        if descr[2] == imp.PY_SOURCE:
+            break                   # find but not load the source file
+        module = imp.load_module(tgt, file, filename, descr)
+        try:
+            path = module.__path__
+        except AttributeError:
+            raise ImportError, 'No source for module ' + module.__name__
+    return file, filename, descr
+
+class EditorWindow:
+    from Percolator import Percolator
+    from ColorDelegator import ColorDelegator
+    from UndoDelegator import UndoDelegator
+    from IOBinding import IOBinding
+    import Bindings
+    from Tkinter import Toplevel
+    from MultiStatusBar import MultiStatusBar
+
+    help_url = None
+
+    def __init__(self, flist=None, filename=None, key=None, root=None):
+        if EditorWindow.help_url is None:
+            dochome =  os.path.join(sys.prefix, 'Doc', 'index.html')
+            if sys.platform.count('linux'):
+                # look for html docs in a couple of standard places
+                pyver = 'python-docs-' + '%s.%s.%s' % sys.version_info[:3]
+                if os.path.isdir('/var/www/html/python/'):  # "python2" rpm
+                    dochome = '/var/www/html/python/index.html'
+                else:
+                    basepath = '/usr/share/doc/'  # standard location
+                    dochome = os.path.join(basepath, pyver,
+                                           'Doc', 'index.html')
+            elif sys.platform[:3] == 'win':
+                chmfile = os.path.join(sys.prefix, 'Doc',
+                                       'Python%d%d.chm' % sys.version_info[:2])
+                if os.path.isfile(chmfile):
+                    dochome = chmfile
+            dochome = os.path.normpath(dochome)
+            if os.path.isfile(dochome):
+                EditorWindow.help_url = dochome
+            else:
+                EditorWindow.help_url = "http://www.python.org/doc/current"
+        currentTheme=idleConf.CurrentTheme()
+        self.flist = flist
+        root = root or flist.root
+        self.root = root
+        self.menubar = Menu(root)
+        self.top = top = WindowList.ListedToplevel(root, menu=self.menubar)
+        if flist:
+            self.tkinter_vars = flist.vars
+            #self.top.instance_dict makes flist.inversedict avalable to
+            #configDialog.py so it can access all EditorWindow instaces
+            self.top.instance_dict=flist.inversedict
+        else:
+            self.tkinter_vars = {}  # keys: Tkinter event names
+                                    # values: Tkinter variable instances
+        self.recent_files_path=os.path.join(idleConf.GetUserCfgDir(),
+                'recent-files.lst')
+        self.vbar = vbar = Scrollbar(top, name='vbar')
+        self.text_frame = text_frame = Frame(top)
+        self.width = idleConf.GetOption('main','EditorWindow','width')
+        self.text = text = Text(text_frame, name='text', padx=5, wrap='none',
+                foreground=idleConf.GetHighlight(currentTheme,
+                        'normal',fgBg='fg'),
+                background=idleConf.GetHighlight(currentTheme,
+                        'normal',fgBg='bg'),
+                highlightcolor=idleConf.GetHighlight(currentTheme,
+                        'hilite',fgBg='fg'),
+                highlightbackground=idleConf.GetHighlight(currentTheme,
+                        'hilite',fgBg='bg'),
+                insertbackground=idleConf.GetHighlight(currentTheme,
+                        'cursor',fgBg='fg'),
+                width=self.width,
+                height=idleConf.GetOption('main','EditorWindow','height') )
+        self.top.focused_widget = self.text
+
+        self.createmenubar()
+        self.apply_bindings()
+
+        self.top.protocol("WM_DELETE_WINDOW", self.close)
+        self.top.bind("<<close-window>>", self.close_event)
+        text.bind("<<cut>>", self.cut)
+        text.bind("<<copy>>", self.copy)
+        text.bind("<<paste>>", self.paste)
+        text.bind("<<center-insert>>", self.center_insert_event)
+        text.bind("<<help>>", self.help_dialog)
+        text.bind("<<python-docs>>", self.python_docs)
+        text.bind("<<about-idle>>", self.about_dialog)
+        text.bind("<<open-config-dialog>>", self.config_dialog)
+        text.bind("<<open-module>>", self.open_module)
+        text.bind("<<do-nothing>>", lambda event: "break")
+        text.bind("<<select-all>>", self.select_all)
+        text.bind("<<remove-selection>>", self.remove_selection)
+        text.bind("<<find>>", self.find_event)
+        text.bind("<<find-again>>", self.find_again_event)
+        text.bind("<<find-in-files>>", self.find_in_files_event)
+        text.bind("<<find-selection>>", self.find_selection_event)
+        text.bind("<<replace>>", self.replace_event)
+        text.bind("<<goto-line>>", self.goto_line_event)
+        text.bind("<3>", self.right_menu_event)
+        text.bind("<<smart-backspace>>",self.smart_backspace_event)
+        text.bind("<<newline-and-indent>>",self.newline_and_indent_event)
+        text.bind("<<smart-indent>>",self.smart_indent_event)
+        text.bind("<<indent-region>>",self.indent_region_event)
+        text.bind("<<dedent-region>>",self.dedent_region_event)
+        text.bind("<<comment-region>>",self.comment_region_event)
+        text.bind("<<uncomment-region>>",self.uncomment_region_event)
+        text.bind("<<tabify-region>>",self.tabify_region_event)
+        text.bind("<<untabify-region>>",self.untabify_region_event)
+        text.bind("<<toggle-tabs>>",self.toggle_tabs_event)
+        text.bind("<<change-indentwidth>>",self.change_indentwidth_event)
+        text.bind("<Left>", self.move_at_edge_if_selection(0))
+        text.bind("<Right>", self.move_at_edge_if_selection(1))
+
+        if flist:
+            flist.inversedict[self] = key
+            if key:
+                flist.dict[key] = self
+            text.bind("<<open-new-window>>", self.new_callback)
+            text.bind("<<close-all-windows>>", self.flist.close_all_callback)
+            text.bind("<<open-class-browser>>", self.open_class_browser)
+            text.bind("<<open-path-browser>>", self.open_path_browser)
+
+        self.set_status_bar()
+        vbar['command'] = text.yview
+        vbar.pack(side=RIGHT, fill=Y)
+        text['yscrollcommand'] = vbar.set
+        fontWeight='normal'
+        if idleConf.GetOption('main','EditorWindow','font-bold',type='bool'):
+            fontWeight='bold'
+        text.config(font=(idleConf.GetOption('main','EditorWindow','font'),
+                idleConf.GetOption('main','EditorWindow','font-size'),
+                fontWeight))
+        text_frame.pack(side=LEFT, fill=BOTH, expand=1)
+        text.pack(side=TOP, fill=BOTH, expand=1)
+        text.focus_set()
+
+        self.per = per = self.Percolator(text)
+        if self.ispythonsource(filename):
+            self.color = color = self.ColorDelegator()
+            per.insertfilter(color)
+        else:
+            self.color = None
+
+        self.undo = undo = self.UndoDelegator()
+        per.insertfilter(undo)
+        text.undo_block_start = undo.undo_block_start
+        text.undo_block_stop = undo.undo_block_stop
+        undo.set_saved_change_hook(self.saved_change_hook)
+
+        # IOBinding implements file I/O and printing functionality
+        self.io = io = self.IOBinding(self)
+        io.set_filename_change_hook(self.filename_change_hook)
+
+        # Create the recent files submenu
+        self.recent_files_menu = Menu(self.menubar)
+        self.menudict['file'].insert_cascade(3, label='Recent Files',
+                                             underline=0,
+                                             menu=self.recent_files_menu)
+        self.update_recent_files_list()
+
+        if filename:
+            if os.path.exists(filename) and not os.path.isdir(filename):
+                io.loadfile(filename)
+            else:
+                io.set_filename(filename)
+        self.saved_change_hook()
+
+        self.load_extensions()
+
+        menu = self.menudict.get('windows')
+        if menu:
+            end = menu.index("end")
+            if end is None:
+                end = -1
+            if end >= 0:
+                menu.add_separator()
+                end = end + 1
+            self.wmenu_end = end
+            WindowList.register_callback(self.postwindowsmenu)
+
+        # Some abstractions so IDLE extensions are cross-IDE
+        self.askyesno = tkMessageBox.askyesno
+        self.askinteger = tkSimpleDialog.askinteger
+        self.showerror = tkMessageBox.showerror
+
+        if self.extensions.has_key('AutoIndent'):
+            self.extensions['AutoIndent'].set_indentation_params(
+                self.ispythonsource(filename))
+
+    def new_callback(self, event):
+        dirname, basename = self.io.defaultfilename()
+        self.flist.new(dirname)
+        return "break"
+
+    def set_status_bar(self):
+        self.status_bar = self.MultiStatusBar(self.top)
+        self.status_bar.set_label('column', 'Col: ?', side=RIGHT)
+        self.status_bar.set_label('line', 'Ln: ?', side=RIGHT)
+        self.status_bar.pack(side=BOTTOM, fill=X)
+        self.text.bind('<KeyRelease>', self.set_line_and_column)
+        self.text.bind('<ButtonRelease>', self.set_line_and_column)
+        self.text.after_idle(self.set_line_and_column)
+
+    def set_line_and_column(self, event=None):
+        line, column = self.text.index(INSERT).split('.')
+        self.status_bar.set_label('column', 'Col: %s' % column)
+        self.status_bar.set_label('line', 'Ln: %s' % line)
+
+    menu_specs = [
+        ("file", "_File"),
+        ("edit", "_Edit"),
+        ("format", "F_ormat"),
+        ("run", "_Run"),
+        ("options", "_Options"),
+        ("windows", "_Windows"),
+        ("help", "_Help"),
+    ]
+
+    def createmenubar(self):
+        mbar = self.menubar
+        self.menudict = menudict = {}
+        for name, label in self.menu_specs:
+            underline, label = prepstr(label)
+            menudict[name] = menu = Menu(mbar, name=name)
+            mbar.add_cascade(label=label, menu=menu, underline=underline)
+        self.fill_menus()
+        self.base_helpmenu_length = self.menudict['help'].index(END)
+        self.reset_help_menu_entries()
+
+    def postwindowsmenu(self):
+        # Only called when Windows menu exists
+        menu = self.menudict['windows']
+        end = menu.index("end")
+        if end is None:
+            end = -1
+        if end > self.wmenu_end:
+            menu.delete(self.wmenu_end+1, end)
+        WindowList.add_windows_to_menu(menu)
+
+    rmenu = None
+
+    def right_menu_event(self, event):
+        self.text.tag_remove("sel", "1.0", "end")
+        self.text.mark_set("insert", "@%d,%d" % (event.x, event.y))
+        if not self.rmenu:
+            self.make_rmenu()
+        rmenu = self.rmenu
+        self.event = event
+        iswin = sys.platform[:3] == 'win'
+        if iswin:
+            self.text.config(cursor="arrow")
+        rmenu.tk_popup(event.x_root, event.y_root)
+        if iswin:
+            self.text.config(cursor="ibeam")
+
+    rmenu_specs = [
+        # ("Label", "<<virtual-event>>"), ...
+        ("Close", "<<close-window>>"), # Example
+    ]
+
+    def make_rmenu(self):
+        rmenu = Menu(self.text, tearoff=0)
+        for label, eventname in self.rmenu_specs:
+            def command(text=self.text, eventname=eventname):
+                text.event_generate(eventname)
+            rmenu.add_command(label=label, command=command)
+        self.rmenu = rmenu
+
+    def about_dialog(self, event=None):
+        aboutDialog.AboutDialog(self.top,'About IDLE')
+
+    def config_dialog(self, event=None):
+        configDialog.ConfigDialog(self.top,'Settings')
+
+    def help_dialog(self, event=None):
+        fn=os.path.join(os.path.abspath(os.path.dirname(__file__)),'help.txt')
+        textView.TextViewer(self.top,'Help',fn)
+
+    def python_docs(self, event=None):
+        if sys.platform[:3] == 'win':
+            os.startfile(self.help_url)
+        else:
+            webbrowser.open(self.help_url)
+        return "break"
+
+    def cut(self,event):
+        self.text.event_generate("<<Cut>>")
+        return "break"
+
+    def copy(self,event):
+        self.text.event_generate("<<Copy>>")
+        return "break"
+
+    def paste(self,event):
+        self.text.event_generate("<<Paste>>")
+        return "break"
+
+    def select_all(self, event=None):
+        self.text.tag_add("sel", "1.0", "end-1c")
+        self.text.mark_set("insert", "1.0")
+        self.text.see("insert")
+        return "break"
+
+    def remove_selection(self, event=None):
+        self.text.tag_remove("sel", "1.0", "end")
+        self.text.see("insert")
+
+    def move_at_edge_if_selection(self, edge_index):
+        """Cursor move begins at start or end of selection
+
+        When a left/right cursor key is pressed create and return to Tkinter a
+        function which causes a cursor move from the associated edge of the
+        selection.
+
+        """
+        self_text_index = self.text.index
+        self_text_mark_set = self.text.mark_set
+        edges_table = ("sel.first+1c", "sel.last-1c")
+        def move_at_edge(event):
+            if (event.state & 5) == 0: # no shift(==1) or control(==4) pressed
+                try:
+                    self_text_index("sel.first")
+                    self_text_mark_set("insert", edges_table[edge_index])
+                except TclError:
+                    pass
+        return move_at_edge
+
+    def find_event(self, event):
+        SearchDialog.find(self.text)
+        return "break"
+
+    def find_again_event(self, event):
+        SearchDialog.find_again(self.text)
+        return "break"
+
+    def find_selection_event(self, event):
+        SearchDialog.find_selection(self.text)
+        return "break"
+
+    def find_in_files_event(self, event):
+        GrepDialog.grep(self.text, self.io, self.flist)
+        return "break"
+
+    def replace_event(self, event):
+        ReplaceDialog.replace(self.text)
+        return "break"
+
+    def goto_line_event(self, event):
+        text = self.text
+        lineno = tkSimpleDialog.askinteger("Goto",
+                "Go to line number:",parent=text)
+        if lineno is None:
+            return "break"
+        if lineno <= 0:
+            text.bell()
+            return "break"
+        text.mark_set("insert", "%d.0" % lineno)
+        text.see("insert")
+
+    def open_module(self, event=None):
+        # XXX Shouldn't this be in IOBinding or in FileList?
+        try:
+            name = self.text.get("sel.first", "sel.last")
+        except TclError:
+            name = ""
+        else:
+            name = name.strip()
+        name = tkSimpleDialog.askstring("Module",
+                 "Enter the name of a Python module\n"
+                 "to search on sys.path and open:",
+                 parent=self.text, initialvalue=name)
+        if name:
+            name = name.strip()
+        if not name:
+            return
+        # XXX Ought to insert current file's directory in front of path
+        try:
+            (f, file, (suffix, mode, type)) = _find_module(name)
+        except (NameError, ImportError), msg:
+            tkMessageBox.showerror("Import error", str(msg), parent=self.text)
+            return
+        if type != imp.PY_SOURCE:
+            tkMessageBox.showerror("Unsupported type",
+                "%s is not a source module" % name, parent=self.text)
+            return
+        if f:
+            f.close()
+        if self.flist:
+            self.flist.open(file)
+        else:
+            self.io.loadfile(file)
+
+    def open_class_browser(self, event=None):
+        filename = self.io.filename
+        if not filename:
+            tkMessageBox.showerror(
+                "No filename",
+                "This buffer has no associated filename",
+                master=self.text)
+            self.text.focus_set()
+            return None
+        head, tail = os.path.split(filename)
+        base, ext = os.path.splitext(tail)
+        import ClassBrowser
+        ClassBrowser.ClassBrowser(self.flist, base, [head])
+
+    def open_path_browser(self, event=None):
+        import PathBrowser
+        PathBrowser.PathBrowser(self.flist)
+
+    def gotoline(self, lineno):
+        if lineno is not None and lineno > 0:
+            self.text.mark_set("insert", "%d.0" % lineno)
+            self.text.tag_remove("sel", "1.0", "end")
+            self.text.tag_add("sel", "insert", "insert +1l")
+            self.center()
+
+    def ispythonsource(self, filename):
+        if not filename:
+            return True
+        base, ext = os.path.splitext(os.path.basename(filename))
+        if os.path.normcase(ext) in (".py", ".pyw"):
+            return True
+        try:
+            f = open(filename)
+            line = f.readline()
+            f.close()
+        except IOError:
+            return False
+        return line.startswith('#!') and line.find('python') >= 0
+
+    def close_hook(self):
+        if self.flist:
+            self.flist.close_edit(self)
+
+    def set_close_hook(self, close_hook):
+        self.close_hook = close_hook
+
+    def filename_change_hook(self):
+        if self.flist:
+            self.flist.filename_changed_edit(self)
+        self.saved_change_hook()
+        self.top.update_windowlist_registry(self)
+        if self.ispythonsource(self.io.filename):
+            self.addcolorizer()
+        else:
+            self.rmcolorizer()
+
+    def addcolorizer(self):
+        if self.color:
+            return
+        self.per.removefilter(self.undo)
+        self.color = self.ColorDelegator()
+        self.per.insertfilter(self.color)
+        self.per.insertfilter(self.undo)
+
+    def rmcolorizer(self):
+        if not self.color:
+            return
+        self.per.removefilter(self.undo)
+        self.per.removefilter(self.color)
+        self.color = None
+        self.per.insertfilter(self.undo)
+
+    def ResetColorizer(self):
+        "Update the colour theme if it is changed"
+        # Called from configDialog.py
+        if self.color:
+            self.color = self.ColorDelegator()
+            self.per.insertfilter(self.color)
+        theme = idleConf.GetOption('main','Theme','name')
+        self.text.config(idleConf.GetHighlight(theme, "normal"))
+
+    def ResetFont(self):
+        "Update the text widgets' font if it is changed"
+        # Called from configDialog.py
+        fontWeight='normal'
+        if idleConf.GetOption('main','EditorWindow','font-bold',type='bool'):
+            fontWeight='bold'
+        self.text.config(font=(idleConf.GetOption('main','EditorWindow','font'),
+                idleConf.GetOption('main','EditorWindow','font-size'),
+                fontWeight))
+
+    def ResetKeybindings(self):
+        "Update the keybindings if they are changed"
+        # Called from configDialog.py
+        self.Bindings.default_keydefs=idleConf.GetCurrentKeySet()
+        keydefs = self.Bindings.default_keydefs
+        for event, keylist in keydefs.items():
+            self.text.event_delete(event)
+        self.apply_bindings()
+        #update menu accelerators
+        menuEventDict={}
+        for menu in self.Bindings.menudefs:
+            menuEventDict[menu[0]]={}
+            for item in menu[1]:
+                if item:
+                    menuEventDict[menu[0]][prepstr(item[0])[1]]=item[1]
+        for menubarItem in self.menudict.keys():
+            menu=self.menudict[menubarItem]
+            end=menu.index(END)+1
+            for index in range(0,end):
+                if menu.type(index)=='command':
+                    accel=menu.entrycget(index,'accelerator')
+                    if accel:
+                        itemName=menu.entrycget(index,'label')
+                        event=''
+                        if menuEventDict.has_key(menubarItem):
+                            if menuEventDict[menubarItem].has_key(itemName):
+                                event=menuEventDict[menubarItem][itemName]
+                        if event:
+                            accel=get_accelerator(keydefs, event)
+                            menu.entryconfig(index,accelerator=accel)
+
+    def reset_help_menu_entries(self):
+        "Update the additional help entries on the Help menu"
+        help_list = idleConf.GetAllExtraHelpSourcesList()
+        helpmenu = self.menudict['help']
+        # first delete the extra help entries, if any
+        helpmenu_length = helpmenu.index(END)
+        if helpmenu_length > self.base_helpmenu_length:
+            helpmenu.delete((self.base_helpmenu_length + 1), helpmenu_length)
+        # then rebuild them
+        if help_list:
+            helpmenu.add_separator()
+            for entry in help_list:
+                cmd = self.__extra_help_callback(entry[1])
+                helpmenu.add_command(label=entry[0], command=cmd)
+        # and update the menu dictionary
+        self.menudict['help'] = helpmenu
+
+    def __extra_help_callback(self, helpfile):
+        "Create a callback with the helpfile value frozen at definition time"
+        def display_extra_help(helpfile=helpfile):
+            if not (helpfile.startswith('www') or helpfile.startswith('http')):
+                url = os.path.normpath(helpfile)
+            if sys.platform[:3] == 'win':
+                os.startfile(helpfile)
+            else:
+                webbrowser.open(helpfile)
+        return display_extra_help
+
+    def update_recent_files_list(self, new_file=None):
+        "Load and update the recent files list and menus"
+        rf_list = []
+        if os.path.exists(self.recent_files_path):
+            rf_list_file = open(self.recent_files_path,'r')
+            try:
+                rf_list = rf_list_file.readlines()
+            finally:
+                rf_list_file.close()
+        if new_file:
+            new_file = os.path.abspath(new_file) + '\n'
+            if new_file in rf_list:
+                rf_list.remove(new_file)  # move to top
+            rf_list.insert(0, new_file)
+        # clean and save the recent files list
+        bad_paths = []
+        for path in rf_list:
+            if '\0' in path or not os.path.exists(path[0:-1]):
+                bad_paths.append(path)
+        rf_list = [path for path in rf_list if path not in bad_paths]
+        ulchars = "1234567890ABCDEFGHIJK"
+        rf_list = rf_list[0:len(ulchars)]
+        rf_file = open(self.recent_files_path, 'w')
+        try:
+            rf_file.writelines(rf_list)
+        finally:
+            rf_file.close()
+        # for each edit window instance, construct the recent files menu
+        for instance in self.top.instance_dict.keys():
+            menu = instance.recent_files_menu
+            menu.delete(1, END)  # clear, and rebuild:
+            for i, file in zip(count(), rf_list):
+                file_name = file[0:-1]  # zap \n
+                callback = instance.__recent_file_callback(file_name)
+                menu.add_command(label=ulchars[i] + " " + file_name,
+                                 command=callback,
+                                 underline=0)
+
+    def __recent_file_callback(self, file_name):
+        def open_recent_file(fn_closure=file_name):
+            self.io.open(editFile=fn_closure)
+        return open_recent_file
+
+    def saved_change_hook(self):
+        short = self.short_title()
+        long = self.long_title()
+        if short and long:
+            title = short + " - " + long
+        elif short:
+            title = short
+        elif long:
+            title = long
+        else:
+            title = "Untitled"
+        icon = short or long or title
+        if not self.get_saved():
+            title = "*%s*" % title
+            icon = "*%s" % icon
+        self.top.wm_title(title)
+        self.top.wm_iconname(icon)
+
+    def get_saved(self):
+        return self.undo.get_saved()
+
+    def set_saved(self, flag):
+        self.undo.set_saved(flag)
+
+    def reset_undo(self):
+        self.undo.reset_undo()
+
+    def short_title(self):
+        filename = self.io.filename
+        if filename:
+            filename = os.path.basename(filename)
+        return filename
+
+    def long_title(self):
+        return self.io.filename or ""
+
+    def center_insert_event(self, event):
+        self.center()
+
+    def center(self, mark="insert"):
+        text = self.text
+        top, bot = self.getwindowlines()
+        lineno = self.getlineno(mark)
+        height = bot - top
+        newtop = max(1, lineno - height//2)
+        text.yview(float(newtop))
+
+    def getwindowlines(self):
+        text = self.text
+        top = self.getlineno("@0,0")
+        bot = self.getlineno("@0,65535")
+        if top == bot and text.winfo_height() == 1:
+            # Geometry manager hasn't run yet
+            height = int(text['height'])
+            bot = top + height - 1
+        return top, bot
+
+    def getlineno(self, mark="insert"):
+        text = self.text
+        return int(float(text.index(mark)))
+
+    def get_geometry(self):
+        "Return (width, height, x, y)"
+        geom = self.top.wm_geometry()
+        m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
+        tuple = (map(int, m.groups()))
+        return tuple
+
+    def close_event(self, event):
+        self.close()
+
+    def maybesave(self):
+        if self.io:
+            if not self.get_saved():
+                if self.top.state()!='normal':
+                    self.top.deiconify()
+                self.top.lower()
+                self.top.lift()
+            return self.io.maybesave()
+
+    def close(self):
+        reply = self.maybesave()
+        if reply != "cancel":
+            self._close()
+        return reply
+
+    def _close(self):
+        if self.io.filename:
+            self.update_recent_files_list(new_file=self.io.filename)
+        WindowList.unregister_callback(self.postwindowsmenu)
+        if self.close_hook:
+            self.close_hook()
+        self.flist = None
+        colorizing = 0
+        self.unload_extensions()
+        self.io.close(); self.io = None
+        self.undo = None # XXX
+        if self.color:
+            colorizing = self.color.colorizing
+            doh = colorizing and self.top
+            self.color.close(doh) # Cancel colorization
+        self.text = None
+        self.tkinter_vars = None
+        self.per.close(); self.per = None
+        if not colorizing:
+            self.top.destroy()
+
+    def load_extensions(self):
+        self.extensions = {}
+        self.load_standard_extensions()
+
+    def unload_extensions(self):
+        for ins in self.extensions.values():
+            if hasattr(ins, "close"):
+                ins.close()
+        self.extensions = {}
+
+    def load_standard_extensions(self):
+        for name in self.get_standard_extension_names():
+            try:
+                self.load_extension(name)
+            except:
+                print "Failed to load extension", repr(name)
+                import traceback
+                traceback.print_exc()
+
+    def get_standard_extension_names(self):
+        return idleConf.GetExtensions(editor_only=True)
+
+    def load_extension(self, name):
+        try:
+            mod = __import__(name, globals(), locals(), [])
+        except ImportError:
+            print "\nFailed to import extension: ", name
+            return None
+        cls = getattr(mod, name)
+        keydefs = idleConf.GetExtensionBindings(name)
+        if hasattr(cls, "menudefs"):
+            self.fill_menus(cls.menudefs, keydefs)
+        ins = cls(self)
+        self.extensions[name] = ins
+        if keydefs:
+            self.apply_bindings(keydefs)
+            for vevent in keydefs.keys():
+                methodname = vevent.replace("-", "_")
+                while methodname[:1] == '<':
+                    methodname = methodname[1:]
+                while methodname[-1:] == '>':
+                    methodname = methodname[:-1]
+                methodname = methodname + "_event"
+                if hasattr(ins, methodname):
+                    self.text.bind(vevent, getattr(ins, methodname))
+        return ins
+
+    def apply_bindings(self, keydefs=None):
+        if keydefs is None:
+            keydefs = self.Bindings.default_keydefs
+        text = self.text
+        text.keydefs = keydefs
+        for event, keylist in keydefs.items():
+            if keylist:
+                text.event_add(event, *keylist)
+
+    def fill_menus(self, menudefs=None, keydefs=None):
+        """Add appropriate entries to the menus and submenus
+
+        Menus that are absent or None in self.menudict are ignored.
+        """
+        if menudefs is None:
+            menudefs = self.Bindings.menudefs
+        if keydefs is None:
+            keydefs = self.Bindings.default_keydefs
+        menudict = self.menudict
+        text = self.text
+        for mname, entrylist in menudefs:
+            menu = menudict.get(mname)
+            if not menu:
+                continue
+            for entry in entrylist:
+                if not entry:
+                    menu.add_separator()
+                else:
+                    label, eventname = entry
+                    checkbutton = (label[:1] == '!')
+                    if checkbutton:
+                        label = label[1:]
+                    underline, label = prepstr(label)
+                    accelerator = get_accelerator(keydefs, eventname)
+                    def command(text=text, eventname=eventname):
+                        text.event_generate(eventname)
+                    if checkbutton:
+                        var = self.get_var_obj(eventname, BooleanVar)
+                        menu.add_checkbutton(label=label, underline=underline,
+                            command=command, accelerator=accelerator,
+                            variable=var)
+                    else:
+                        menu.add_command(label=label, underline=underline,
+                                         command=command,
+                                         accelerator=accelerator)
+
+    def getvar(self, name):
+        var = self.get_var_obj(name)
+        if var:
+            value = var.get()
+            return value
+        else:
+            raise NameError, name
+
+    def setvar(self, name, value, vartype=None):
+        var = self.get_var_obj(name, vartype)
+        if var:
+            var.set(value)
+        else:
+            raise NameError, name
+
+    def get_var_obj(self, name, vartype=None):
+        var = self.tkinter_vars.get(name)
+        if not var and vartype:
+            # create a Tkinter variable object with self.text as master:
+            self.tkinter_vars[name] = var = vartype(self.text)
+        return var
+
+    # Tk implementations of "virtual text methods" -- each platform
+    # reusing IDLE's support code needs to define these for its GUI's
+    # flavor of widget.
+
+    # Is character at text_index in a Python string?  Return 0 for
+    # "guaranteed no", true for anything else.  This info is expensive
+    # to compute ab initio, but is probably already known by the
+    # platform's colorizer.
+
+    def is_char_in_string(self, text_index):
+        if self.color:
+            # Return true iff colorizer hasn't (re)gotten this far
+            # yet, or the character is tagged as being in a string
+            return self.text.tag_prevrange("TODO", text_index) or \
+                   "STRING" in self.text.tag_names(text_index)
+        else:
+            # The colorizer is missing: assume the worst
+            return 1
+
+    # If a selection is defined in the text widget, return (start,
+    # end) as Tkinter text indices, otherwise return (None, None)
+    def get_selection_indices(self):
+        try:
+            first = self.text.index("sel.first")
+            last = self.text.index("sel.last")
+            return first, last
+        except TclError:
+            return None, None
+
+    # Return the text widget's current view of what a tab stop means
+    # (equivalent width in spaces).
+
+    def get_tabwidth(self):
+        current = self.text['tabs'] or TK_TABWIDTH_DEFAULT
+        return int(current)
+
+    # Set the text widget's current view of what a tab stop means.
+
+    def set_tabwidth(self, newtabwidth):
+        text = self.text
+        if self.get_tabwidth() != newtabwidth:
+            pixels = text.tk.call("font", "measure", text["font"],
+                                  "-displayof", text.master,
+                                  "n" * newtabwidth)
+            text.configure(tabs=pixels)
+
+### begin autoindent code ###
+
+    # usetabs true  -> literal tab characters are used by indent and
+    #                  dedent cmds, possibly mixed with spaces if
+    #                  indentwidth is not a multiple of tabwidth
+    #         false -> tab characters are converted to spaces by indent
+    #                  and dedent cmds, and ditto TAB keystrokes
+    # indentwidth is the number of characters per logical indent level.
+    # tabwidth is the display width of a literal tab character.
+    # CAUTION:  telling Tk to use anything other than its default
+    # tab setting causes it to use an entirely different tabbing algorithm,
+    # treating tab stops as fixed distances from the left margin.
+    # Nobody expects this, so for now tabwidth should never be changed.
+    usetabs = 0
+    indentwidth = 4
+    tabwidth = 8    # for IDLE use, must remain 8 until Tk is fixed
+
+    # If context_use_ps1 is true, parsing searches back for a ps1 line;
+    # else searches for a popular (if, def, ...) Python stmt.
+    context_use_ps1 = 0
+
+    # When searching backwards for a reliable place to begin parsing,
+    # first start num_context_lines[0] lines back, then
+    # num_context_lines[1] lines back if that didn't work, and so on.
+    # The last value should be huge (larger than the # of lines in a
+    # conceivable file).
+    # Making the initial values larger slows things down more often.
+    num_context_lines = 50, 500, 5000000
+
+    def config(self, **options):
+        for key, value in options.items():
+            if key == 'usetabs':
+                self.usetabs = value
+            elif key == 'indentwidth':
+                self.indentwidth = value
+            elif key == 'tabwidth':
+                self.tabwidth = value
+            elif key == 'context_use_ps1':
+                self.context_use_ps1 = value
+            else:
+                raise KeyError, "bad option name: %r" % (key,)
+
+    # If ispythonsource and guess are true, guess a good value for
+    # indentwidth based on file content (if possible), and if
+    # indentwidth != tabwidth set usetabs false.
+    # In any case, adjust the Text widget's view of what a tab
+    # character means.
+
+    def set_indentation_params(self, ispythonsource, guess=1):
+        if guess and ispythonsource:
+            i = self.guess_indent()
+            if 2 <= i <= 8:
+                self.indentwidth = i
+            if self.indentwidth != self.tabwidth:
+                self.usetabs = 0
+
+        self.set_tabwidth(self.tabwidth)
+
+    def smart_backspace_event(self, event):
+        text = self.text
+        first, last = self.get_selection_indices()
+        if first and last:
+            text.delete(first, last)
+            text.mark_set("insert", first)
+            return "break"
+        # Delete whitespace left, until hitting a real char or closest
+        # preceding virtual tab stop.
+        chars = text.get("insert linestart", "insert")
+        if chars == '':
+            if text.compare("insert", ">", "1.0"):
+                # easy: delete preceding newline
+                text.delete("insert-1c")
+            else:
+                text.bell()     # at start of buffer
+            return "break"
+        if  chars[-1] not in " \t":
+            # easy: delete preceding real char
+            text.delete("insert-1c")
+            return "break"
+        # Ick.  It may require *inserting* spaces if we back up over a
+        # tab character!  This is written to be clear, not fast.
+        tabwidth = self.tabwidth
+        have = len(chars.expandtabs(tabwidth))
+        assert have > 0
+        want = ((have - 1) // self.indentwidth) * self.indentwidth
+        # Debug prompt is multilined....
+        last_line_of_prompt = sys.ps1.split('\n')[-1]
+        ncharsdeleted = 0
+        while 1:
+            if chars == last_line_of_prompt:
+                break
+            chars = chars[:-1]
+            ncharsdeleted = ncharsdeleted + 1
+            have = len(chars.expandtabs(tabwidth))
+            if have <= want or chars[-1] not in " \t":
+                break
+        text.undo_block_start()
+        text.delete("insert-%dc" % ncharsdeleted, "insert")
+        if have < want:
+            text.insert("insert", ' ' * (want - have))
+        text.undo_block_stop()
+        return "break"
+
+    def smart_indent_event(self, event):
+        # if intraline selection:
+        #     delete it
+        # elif multiline selection:
+        #     do indent-region & return
+        # indent one level
+        text = self.text
+        first, last = self.get_selection_indices()
+        text.undo_block_start()
+        try:
+            if first and last:
+                if index2line(first) != index2line(last):
+                    return self.indent_region_event(event)
+                text.delete(first, last)
+                text.mark_set("insert", first)
+            prefix = text.get("insert linestart", "insert")
+            raw, effective = classifyws(prefix, self.tabwidth)
+            if raw == len(prefix):
+                # only whitespace to the left
+                self.reindent_to(effective + self.indentwidth)
+            else:
+                if self.usetabs:
+                    pad = '\t'
+                else:
+                    effective = len(prefix.expandtabs(self.tabwidth))
+                    n = self.indentwidth
+                    pad = ' ' * (n - effective % n)
+                text.insert("insert", pad)
+            text.see("insert")
+            return "break"
+        finally:
+            text.undo_block_stop()
+
+    def newline_and_indent_event(self, event):
+        text = self.text
+        first, last = self.get_selection_indices()
+        text.undo_block_start()
+        try:
+            if first and last:
+                text.delete(first, last)
+                text.mark_set("insert", first)
+            line = text.get("insert linestart", "insert")
+            i, n = 0, len(line)
+            while i < n and line[i] in " \t":
+                i = i+1
+            if i == n:
+                # the cursor is in or at leading indentation in a continuation
+                # line; just inject an empty line at the start
+                text.insert("insert linestart", '\n')
+                return "break"
+            indent = line[:i]
+            # strip whitespace before insert point unless it's in the prompt
+            i = 0
+            last_line_of_prompt = sys.ps1.split('\n')[-1]
+            while line and line[-1] in " \t" and line != last_line_of_prompt:
+                line = line[:-1]
+                i = i+1
+            if i:
+                text.delete("insert - %d chars" % i, "insert")
+            # strip whitespace after insert point
+            while text.get("insert") in " \t":
+                text.delete("insert")
+            # start new line
+            text.insert("insert", '\n')
+
+            # adjust indentation for continuations and block
+            # open/close first need to find the last stmt
+            lno = index2line(text.index('insert'))
+            y = PyParse.Parser(self.indentwidth, self.tabwidth)
+            for context in self.num_context_lines:
+                startat = max(lno - context, 1)
+                startatindex = repr(startat) + ".0"
+                rawtext = text.get(startatindex, "insert")
+                y.set_str(rawtext)
+                bod = y.find_good_parse_start(
+                          self.context_use_ps1,
+                          self._build_char_in_string_func(startatindex))
+                if bod is not None or startat == 1:
+                    break
+            y.set_lo(bod or 0)
+            c = y.get_continuation_type()
+            if c != PyParse.C_NONE:
+                # The current stmt hasn't ended yet.
+                if c == PyParse.C_STRING:
+                    # inside a string; just mimic the current indent
+                    text.insert("insert", indent)
+                elif c == PyParse.C_BRACKET:
+                    # line up with the first (if any) element of the
+                    # last open bracket structure; else indent one
+                    # level beyond the indent of the line with the
+                    # last open bracket
+                    self.reindent_to(y.compute_bracket_indent())
+                elif c == PyParse.C_BACKSLASH:
+                    # if more than one line in this stmt already, just
+                    # mimic the current indent; else if initial line
+                    # has a start on an assignment stmt, indent to
+                    # beyond leftmost =; else to beyond first chunk of
+                    # non-whitespace on initial line
+                    if y.get_num_lines_in_stmt() > 1:
+                        text.insert("insert", indent)
+                    else:
+                        self.reindent_to(y.compute_backslash_indent())
+                else:
+                    assert 0, "bogus continuation type %r" % (c,)
+                return "break"
+
+            # This line starts a brand new stmt; indent relative to
+            # indentation of initial line of closest preceding
+            # interesting stmt.
+            indent = y.get_base_indent_string()
+            text.insert("insert", indent)
+            if y.is_block_opener():
+                self.smart_indent_event(event)
+            elif indent and y.is_block_closer():
+                self.smart_backspace_event(event)
+            return "break"
+        finally:
+            text.see("insert")
+            text.undo_block_stop()
+
+    # Our editwin provides a is_char_in_string function that works
+    # with a Tk text index, but PyParse only knows about offsets into
+    # a string. This builds a function for PyParse that accepts an
+    # offset.
+
+    def _build_char_in_string_func(self, startindex):
+        def inner(offset, _startindex=startindex,
+                  _icis=self.is_char_in_string):
+            return _icis(_startindex + "+%dc" % offset)
+        return inner
+
+    def indent_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        for pos in range(len(lines)):
+            line = lines[pos]
+            if line:
+                raw, effective = classifyws(line, self.tabwidth)
+                effective = effective + self.indentwidth
+                lines[pos] = self._make_blanks(effective) + line[raw:]
+        self.set_region(head, tail, chars, lines)
+        return "break"
+
+    def dedent_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        for pos in range(len(lines)):
+            line = lines[pos]
+            if line:
+                raw, effective = classifyws(line, self.tabwidth)
+                effective = max(effective - self.indentwidth, 0)
+                lines[pos] = self._make_blanks(effective) + line[raw:]
+        self.set_region(head, tail, chars, lines)
+        return "break"
+
+    def comment_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        for pos in range(len(lines) - 1):
+            line = lines[pos]
+            lines[pos] = '##' + line
+        self.set_region(head, tail, chars, lines)
+
+    def uncomment_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        for pos in range(len(lines)):
+            line = lines[pos]
+            if not line:
+                continue
+            if line[:2] == '##':
+                line = line[2:]
+            elif line[:1] == '#':
+                line = line[1:]
+            lines[pos] = line
+        self.set_region(head, tail, chars, lines)
+
+    def tabify_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        tabwidth = self._asktabwidth()
+        for pos in range(len(lines)):
+            line = lines[pos]
+            if line:
+                raw, effective = classifyws(line, tabwidth)
+                ntabs, nspaces = divmod(effective, tabwidth)
+                lines[pos] = '\t' * ntabs + ' ' * nspaces + line[raw:]
+        self.set_region(head, tail, chars, lines)
+
+    def untabify_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        tabwidth = self._asktabwidth()
+        for pos in range(len(lines)):
+            lines[pos] = lines[pos].expandtabs(tabwidth)
+        self.set_region(head, tail, chars, lines)
+
+    def toggle_tabs_event(self, event):
+        if self.askyesno(
+              "Toggle tabs",
+              "Turn tabs " + ("on", "off")[self.usetabs] + "?",
+              parent=self.text):
+            self.usetabs = not self.usetabs
+        return "break"
+
+    # XXX this isn't bound to anything -- see class tabwidth comments
+    def change_tabwidth_event(self, event):
+        new = self._asktabwidth()
+        if new != self.tabwidth:
+            self.tabwidth = new
+            self.set_indentation_params(0, guess=0)
+        return "break"
+
+    def change_indentwidth_event(self, event):
+        new = self.askinteger(
+                  "Indent width",
+                  "New indent width (2-16)",
+                  parent=self.text,
+                  initialvalue=self.indentwidth,
+                  minvalue=2,
+                  maxvalue=16)
+        if new and new != self.indentwidth:
+            self.indentwidth = new
+        return "break"
+
+    def get_region(self):
+        text = self.text
+        first, last = self.get_selection_indices()
+        if first and last:
+            head = text.index(first + " linestart")
+            tail = text.index(last + "-1c lineend +1c")
+        else:
+            head = text.index("insert linestart")
+            tail = text.index("insert lineend +1c")
+        chars = text.get(head, tail)
+        lines = chars.split("\n")
+        return head, tail, chars, lines
+
+    def set_region(self, head, tail, chars, lines):
+        text = self.text
+        newchars = "\n".join(lines)
+        if newchars == chars:
+            text.bell()
+            return
+        text.tag_remove("sel", "1.0", "end")
+        text.mark_set("insert", head)
+        text.undo_block_start()
+        text.delete(head, tail)
+        text.insert(head, newchars)
+        text.undo_block_stop()
+        text.tag_add("sel", head, "insert")
+
+    # Make string that displays as n leading blanks.
+
+    def _make_blanks(self, n):
+        if self.usetabs:
+            ntabs, nspaces = divmod(n, self.tabwidth)
+            return '\t' * ntabs + ' ' * nspaces
+        else:
+            return ' ' * n
+
+    # Delete from beginning of line to insert point, then reinsert
+    # column logical (meaning use tabs if appropriate) spaces.
+
+    def reindent_to(self, column):
+        text = self.text
+        text.undo_block_start()
+        if text.compare("insert linestart", "!=", "insert"):
+            text.delete("insert linestart", "insert")
+        if column:
+            text.insert("insert", self._make_blanks(column))
+        text.undo_block_stop()
+
+    def _asktabwidth(self):
+        return self.askinteger(
+            "Tab width",
+            "Spaces per tab? (2-16)",
+            parent=self.text,
+            initialvalue=self.indentwidth,
+            minvalue=2,
+            maxvalue=16) or self.tabwidth
+
+    # Guess indentwidth from text content.
+    # Return guessed indentwidth.  This should not be believed unless
+    # it's in a reasonable range (e.g., it will be 0 if no indented
+    # blocks are found).
+
+    def guess_indent(self):
+        opener, indented = IndentSearcher(self.text, self.tabwidth).run()
+        if opener and indented:
+            raw, indentsmall = classifyws(opener, self.tabwidth)
+            raw, indentlarge = classifyws(indented, self.tabwidth)
+        else:
+            indentsmall = indentlarge = 0
+        return indentlarge - indentsmall
+
+# "line.col" -> line, as an int
+def index2line(index):
+    return int(float(index))
+
+# Look at the leading whitespace in s.
+# Return pair (# of leading ws characters,
+#              effective # of leading blanks after expanding
+#              tabs to width tabwidth)
+
+def classifyws(s, tabwidth):
+    raw = effective = 0
+    for ch in s:
+        if ch == ' ':
+            raw = raw + 1
+            effective = effective + 1
+        elif ch == '\t':
+            raw = raw + 1
+            effective = (effective // tabwidth + 1) * tabwidth
+        else:
+            break
+    return raw, effective
+
+import tokenize
+_tokenize = tokenize
+del tokenize
+
+class IndentSearcher:
+
+    # .run() chews over the Text widget, looking for a block opener
+    # and the stmt following it.  Returns a pair,
+    #     (line containing block opener, line containing stmt)
+    # Either or both may be None.
+
+    def __init__(self, text, tabwidth):
+        self.text = text
+        self.tabwidth = tabwidth
+        self.i = self.finished = 0
+        self.blkopenline = self.indentedline = None
+
+    def readline(self):
+        if self.finished:
+            return ""
+        i = self.i = self.i + 1
+        mark = repr(i) + ".0"
+        if self.text.compare(mark, ">=", "end"):
+            return ""
+        return self.text.get(mark, mark + " lineend+1c")
+
+    def tokeneater(self, type, token, start, end, line,
+                   INDENT=_tokenize.INDENT,
+                   NAME=_tokenize.NAME,
+                   OPENERS=('class', 'def', 'for', 'if', 'try', 'while')):
+        if self.finished:
+            pass
+        elif type == NAME and token in OPENERS:
+            self.blkopenline = line
+        elif type == INDENT and self.blkopenline:
+            self.indentedline = line
+            self.finished = 1
+
+    def run(self):
+        save_tabsize = _tokenize.tabsize
+        _tokenize.tabsize = self.tabwidth
+        try:
+            try:
+                _tokenize.tokenize(self.readline, self.tokeneater)
+            except _tokenize.TokenError:
+                # since we cut off the tokenizer early, we can trigger
+                # spurious errors
+                pass
+        finally:
+            _tokenize.tabsize = save_tabsize
+        return self.blkopenline, self.indentedline
+
+### end autoindent code ###
+
+def prepstr(s):
+    # Helper to extract the underscore from a string, e.g.
+    # prepstr("Co_py") returns (2, "Copy").
+    i = s.find('_')
+    if i >= 0:
+        s = s[:i] + s[i+1:]
+    return i, s
+
+
+keynames = {
+ 'bracketleft': '[',
+ 'bracketright': ']',
+ 'slash': '/',
+}
+
+def get_accelerator(keydefs, eventname):
+    keylist = keydefs.get(eventname)
+    if not keylist:
+        return ""
+    s = keylist[0]
+    s = re.sub(r"-[a-z]\b", lambda m: m.group().upper(), s)
+    s = re.sub(r"\b\w+\b", lambda m: keynames.get(m.group(), m.group()), s)
+    s = re.sub("Key-", "", s)
+    s = re.sub("Cancel","Ctrl-Break",s)   # dscherer@cmu.edu
+    s = re.sub("Control-", "Ctrl-", s)
+    s = re.sub("-", "+", s)
+    s = re.sub("><", " ", s)
+    s = re.sub("<", "", s)
+    s = re.sub(">", "", s)
+    return s
+
+
+def fixwordbreaks(root):
+    # Make sure that Tk's double-click and next/previous word
+    # operations use our definition of a word (i.e. an identifier)
+    tk = root.tk
+    tk.call('tcl_wordBreakAfter', 'a b', 0) # make sure word.tcl is loaded
+    tk.call('set', 'tcl_wordchars', '[a-zA-Z0-9_]')
+    tk.call('set', 'tcl_nonwordchars', '[^a-zA-Z0-9_]')
+
+
+def test():
+    root = Tk()
+    fixwordbreaks(root)
+    root.withdraw()
+    if sys.argv[1:]:
+        filename = sys.argv[1]
+    else:
+        filename = None
+    edit = EditorWindow(root=root, filename=filename)
+    edit.set_close_hook(root.quit)
+    root.mainloop()
+    root.destroy()
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/FileList.py b/depot_tools/release/win/python_24/Lib/idlelib/FileList.py
new file mode 100644
index 0000000..4b579010
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/FileList.py
@@ -0,0 +1,124 @@
+import os
+from Tkinter import *
+import tkMessageBox
+
+
+class FileList:
+
+    from EditorWindow import EditorWindow  # class variable, may be overridden
+                                           # e.g. by PyShellFileList
+
+    def __init__(self, root):
+        self.root = root
+        self.dict = {}
+        self.inversedict = {}
+        self.vars = {} # For EditorWindow.getrawvar (shared Tcl variables)
+
+    def open(self, filename, action=None):
+        assert filename
+        filename = self.canonize(filename)
+        if os.path.isdir(filename):
+            # This can happen when bad filename is passed on command line:
+            tkMessageBox.showerror(
+                "File Error",
+                "%r is a directory." % (filename,),
+                master=self.root)
+            return None
+        key = os.path.normcase(filename)
+        if self.dict.has_key(key):
+            edit = self.dict[key]
+            edit.top.wakeup()
+            return edit
+        if action:
+            # Don't create window, perform 'action', e.g. open in same window
+            return action(filename)
+        else:
+            return self.EditorWindow(self, filename, key)
+
+    def gotofileline(self, filename, lineno=None):
+        edit = self.open(filename)
+        if edit is not None and lineno is not None:
+            edit.gotoline(lineno)
+
+    def new(self, filename=None):
+        return self.EditorWindow(self, filename)
+
+    def close_all_callback(self, event):
+        for edit in self.inversedict.keys():
+            reply = edit.close()
+            if reply == "cancel":
+                break
+        return "break"
+
+    def close_edit(self, edit):
+        try:
+            key = self.inversedict[edit]
+        except KeyError:
+            print "Don't know this EditorWindow object.  (close)"
+            return
+        if key:
+            del self.dict[key]
+        del self.inversedict[edit]
+        if not self.inversedict:
+            self.root.quit()
+
+    def filename_changed_edit(self, edit):
+        edit.saved_change_hook()
+        try:
+            key = self.inversedict[edit]
+        except KeyError:
+            print "Don't know this EditorWindow object.  (rename)"
+            return
+        filename = edit.io.filename
+        if not filename:
+            if key:
+                del self.dict[key]
+            self.inversedict[edit] = None
+            return
+        filename = self.canonize(filename)
+        newkey = os.path.normcase(filename)
+        if newkey == key:
+            return
+        if self.dict.has_key(newkey):
+            conflict = self.dict[newkey]
+            self.inversedict[conflict] = None
+            tkMessageBox.showerror(
+                "Name Conflict",
+                "You now have multiple edit windows open for %r" % (filename,),
+                master=self.root)
+        self.dict[newkey] = edit
+        self.inversedict[edit] = newkey
+        if key:
+            try:
+                del self.dict[key]
+            except KeyError:
+                pass
+
+    def canonize(self, filename):
+        if not os.path.isabs(filename):
+            try:
+                pwd = os.getcwd()
+            except os.error:
+                pass
+            else:
+                filename = os.path.join(pwd, filename)
+        return os.path.normpath(filename)
+
+
+def _test():
+    from EditorWindow import fixwordbreaks
+    import sys
+    root = Tk()
+    fixwordbreaks(root)
+    root.withdraw()
+    flist = FileList(root)
+    if sys.argv[1:]:
+        for filename in sys.argv[1:]:
+            flist.open(filename)
+    else:
+        flist.new()
+    if flist.inversedict:
+        root.mainloop()
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/FormatParagraph.py b/depot_tools/release/win/python_24/Lib/idlelib/FormatParagraph.py
new file mode 100644
index 0000000..ed9f28d1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/FormatParagraph.py
@@ -0,0 +1,148 @@
+# Extension to format a paragraph
+
+# Does basic, standard text formatting, and also understands Python
+# comment blocks.  Thus, for editing Python source code, this
+# extension is really only suitable for reformatting these comment
+# blocks or triple-quoted strings.
+
+# Known problems with comment reformatting:
+# * If there is a selection marked, and the first line of the
+#   selection is not complete, the block will probably not be detected
+#   as comments, and will have the normal "text formatting" rules
+#   applied.
+# * If a comment block has leading whitespace that mixes tabs and
+#   spaces, they will not be considered part of the same block.
+# * Fancy comments, like this bulleted list, arent handled :-)
+
+import re
+from configHandler import idleConf
+
+class FormatParagraph:
+
+    menudefs = [
+        ('format', [   # /s/edit/format   dscherer@cmu.edu
+            ('Format Paragraph', '<<format-paragraph>>'),
+         ])
+    ]
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+
+    def close(self):
+        self.editwin = None
+
+    def format_paragraph_event(self, event):
+        maxformatwidth = int(idleConf.GetOption('main','FormatParagraph','paragraph'))
+        text = self.editwin.text
+        first, last = self.editwin.get_selection_indices()
+        if first and last:
+            data = text.get(first, last)
+            comment_header = ''
+        else:
+            first, last, comment_header, data = \
+                    find_paragraph(text, text.index("insert"))
+        if comment_header:
+            # Reformat the comment lines - convert to text sans header.
+            lines = data.split("\n")
+            lines = map(lambda st, l=len(comment_header): st[l:], lines)
+            data = "\n".join(lines)
+            # Reformat to maxformatwidth chars or a 20 char width, whichever is greater.
+            format_width = max(maxformatwidth - len(comment_header), 20)
+            newdata = reformat_paragraph(data, format_width)
+            # re-split and re-insert the comment header.
+            newdata = newdata.split("\n")
+            # If the block ends in a \n, we dont want the comment
+            # prefix inserted after it. (Im not sure it makes sense to
+            # reformat a comment block that isnt made of complete
+            # lines, but whatever!)  Can't think of a clean soltution,
+            # so we hack away
+            block_suffix = ""
+            if not newdata[-1]:
+                block_suffix = "\n"
+                newdata = newdata[:-1]
+            builder = lambda item, prefix=comment_header: prefix+item
+            newdata = '\n'.join(map(builder, newdata)) + block_suffix
+        else:
+            # Just a normal text format
+            newdata = reformat_paragraph(data, maxformatwidth)
+        text.tag_remove("sel", "1.0", "end")
+        if newdata != data:
+            text.mark_set("insert", first)
+            text.undo_block_start()
+            text.delete(first, last)
+            text.insert(first, newdata)
+            text.undo_block_stop()
+        else:
+            text.mark_set("insert", last)
+        text.see("insert")
+
+def find_paragraph(text, mark):
+    lineno, col = map(int, mark.split("."))
+    line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    while text.compare("%d.0" % lineno, "<", "end") and is_all_white(line):
+        lineno = lineno + 1
+        line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    first_lineno = lineno
+    comment_header = get_comment_header(line)
+    comment_header_len = len(comment_header)
+    while get_comment_header(line)==comment_header and \
+              not is_all_white(line[comment_header_len:]):
+        lineno = lineno + 1
+        line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    last = "%d.0" % lineno
+    # Search back to beginning of paragraph
+    lineno = first_lineno - 1
+    line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    while lineno > 0 and \
+              get_comment_header(line)==comment_header and \
+              not is_all_white(line[comment_header_len:]):
+        lineno = lineno - 1
+        line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    first = "%d.0" % (lineno+1)
+    return first, last, comment_header, text.get(first, last)
+
+def reformat_paragraph(data, limit):
+    lines = data.split("\n")
+    i = 0
+    n = len(lines)
+    while i < n and is_all_white(lines[i]):
+        i = i+1
+    if i >= n:
+        return data
+    indent1 = get_indent(lines[i])
+    if i+1 < n and not is_all_white(lines[i+1]):
+        indent2 = get_indent(lines[i+1])
+    else:
+        indent2 = indent1
+    new = lines[:i]
+    partial = indent1
+    while i < n and not is_all_white(lines[i]):
+        # XXX Should take double space after period (etc.) into account
+        words = re.split("(\s+)", lines[i])
+        for j in range(0, len(words), 2):
+            word = words[j]
+            if not word:
+                continue # Can happen when line ends in whitespace
+            if len((partial + word).expandtabs()) > limit and \
+               partial != indent1:
+                new.append(partial.rstrip())
+                partial = indent2
+            partial = partial + word + " "
+            if j+1 < len(words) and words[j+1] != " ":
+                partial = partial + " "
+        i = i+1
+    new.append(partial.rstrip())
+    # XXX Should reformat remaining paragraphs as well
+    new.extend(lines[i:])
+    return "\n".join(new)
+
+def is_all_white(line):
+    return re.match(r"^\s*$", line) is not None
+
+def get_indent(line):
+    return re.match(r"^(\s*)", line).group()
+
+def get_comment_header(line):
+    m = re.match(r"^(\s*#*)", line)
+    if m is None: return ""
+    return m.group(1)
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/GrepDialog.py b/depot_tools/release/win/python_24/Lib/idlelib/GrepDialog.py
new file mode 100644
index 0000000..ab136bc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/GrepDialog.py
@@ -0,0 +1,133 @@
+import os
+import fnmatch
+import sys
+from Tkinter import *
+import SearchEngine
+from SearchDialogBase import SearchDialogBase
+
+def grep(text, io=None, flist=None):
+    root = text._root()
+    engine = SearchEngine.get(root)
+    if not hasattr(engine, "_grepdialog"):
+        engine._grepdialog = GrepDialog(root, engine, flist)
+    dialog = engine._grepdialog
+    searchphrase = text.get("sel.first", "sel.last")
+    dialog.open(text, searchphrase, io)
+
+class GrepDialog(SearchDialogBase):
+
+    title = "Find in Files Dialog"
+    icon = "Grep"
+    needwrapbutton = 0
+
+    def __init__(self, root, engine, flist):
+        SearchDialogBase.__init__(self, root, engine)
+        self.flist = flist
+        self.globvar = StringVar(root)
+        self.recvar = BooleanVar(root)
+
+    def open(self, text, searchphrase, io=None):
+        SearchDialogBase.open(self, text, searchphrase)
+        if io:
+            path = io.filename or ""
+        else:
+            path = ""
+        dir, base = os.path.split(path)
+        head, tail = os.path.splitext(base)
+        if not tail:
+            tail = ".py"
+        self.globvar.set(os.path.join(dir, "*" + tail))
+
+    def create_entries(self):
+        SearchDialogBase.create_entries(self)
+        self.globent = self.make_entry("In files:", self.globvar)
+
+    def create_other_buttons(self):
+        f = self.make_frame()
+
+        btn = Checkbutton(f, anchor="w",
+                variable=self.recvar,
+                text="Recurse down subdirectories")
+        btn.pack(side="top", fill="both")
+        btn.select()
+
+    def create_command_buttons(self):
+        SearchDialogBase.create_command_buttons(self)
+        self.make_button("Search Files", self.default_command, 1)
+
+    def default_command(self, event=None):
+        prog = self.engine.getprog()
+        if not prog:
+            return
+        path = self.globvar.get()
+        if not path:
+            self.top.bell()
+            return
+        from OutputWindow import OutputWindow
+        save = sys.stdout
+        try:
+            sys.stdout = OutputWindow(self.flist)
+            self.grep_it(prog, path)
+        finally:
+            sys.stdout = save
+
+    def grep_it(self, prog, path):
+        dir, base = os.path.split(path)
+        list = self.findfiles(dir, base, self.recvar.get())
+        list.sort()
+        self.close()
+        pat = self.engine.getpat()
+        print "Searching %r in %s ..." % (pat, path)
+        hits = 0
+        for fn in list:
+            try:
+                f = open(fn)
+            except IOError, msg:
+                print msg
+                continue
+            lineno = 0
+            while 1:
+                block = f.readlines(100000)
+                if not block:
+                    break
+                for line in block:
+                    lineno = lineno + 1
+                    if line[-1:] == '\n':
+                        line = line[:-1]
+                    if prog.search(line):
+                        sys.stdout.write("%s: %s: %s\n" % (fn, lineno, line))
+                        hits = hits + 1
+        if hits:
+            if hits == 1:
+                s = ""
+            else:
+                s = "s"
+            print "Found", hits, "hit%s." % s
+            print "(Hint: right-click to open locations.)"
+        else:
+            print "No hits."
+
+    def findfiles(self, dir, base, rec):
+        try:
+            names = os.listdir(dir or os.curdir)
+        except os.error, msg:
+            print msg
+            return []
+        list = []
+        subdirs = []
+        for name in names:
+            fn = os.path.join(dir, name)
+            if os.path.isdir(fn):
+                subdirs.append(fn)
+            else:
+                if fnmatch.fnmatch(name, base):
+                    list.append(fn)
+        if rec:
+            for subdir in subdirs:
+                list.extend(self.findfiles(subdir, base, rec))
+        return list
+
+    def close(self, event=None):
+        if self.top:
+            self.top.grab_release()
+            self.top.withdraw()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/HISTORY.txt b/depot_tools/release/win/python_24/Lib/idlelib/HISTORY.txt
new file mode 100644
index 0000000..c0faaad
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/HISTORY.txt
@@ -0,0 +1,296 @@
+IDLE History
+============
+
+This file contains the release messages for previous IDLE releases.
+As you read on you go back to the dark ages of IDLE's history.
+
+
+What's New in IDLEfork 0.8.1?
+=============================
+
+*Release date: 22-Jul-2001*
+
+- New tarball released as a result of the 'revitalisation' of the IDLEfork
+  project. 
+
+- This release requires python 2.1 or better. Compatability with earlier
+  versions of python (especially ancient ones like 1.5x) is no longer a
+  priority in IDLEfork development.
+
+- This release is based on a merging of the earlier IDLE fork work with current
+  cvs IDLE (post IDLE version 0.8), with some minor additional coding by Kurt
+  B. Kaiser and Stephen M. Gava.
+
+- This release is basically functional but also contains some known breakages,
+  particularly with running things from the shell window. Also the debugger is
+  not working, but I believe this was the case with the previous IDLE fork
+  release (0.7.1) as well.
+
+- This release is being made now to mark the point at which IDLEfork is 
+  launching into a new stage of development. 
+
+- IDLEfork CVS will now be branched to enable further development and
+  exploration of the two "execution in a remote process" patches submitted by
+  David Scherer (David's is currently in IDLEfork) and GvR, while stabilisation
+  and development of less heavyweight improvements (like user customisation)
+  can continue on the trunk.
+
+
+What's New in IDLEfork 0.7.1?
+==============================
+
+*Release date: 15-Aug-2000*
+
+- First project tarball released.
+
+- This was the first release of IDLE fork, which at this stage was a
+  combination of IDLE 0.5 and the VPython idle fork, with additional changes
+  coded by David Scherer, Peter Schneider-Kamp and Nicholas Riley.
+
+
+
+IDLEfork 0.7.1 - 29 May 2000
+-----------------------------
+
+   David Scherer  <dscherer@cmu.edu>
+
+- This is a modification of the CVS version of IDLE 0.5, updated as of
+  2000-03-09.  It is alpha software and might be unstable.  If it breaks, you
+  get to keep both pieces.
+
+- If you have problems or suggestions, you should either contact me or post to
+  the list at http://www.python.org/mailman/listinfo/idle-dev (making it clear
+  that you are using this modified version of IDLE).
+
+- Changes:
+
+  - The ExecBinding module, a replacement for ScriptBinding, executes programs
+    in a separate process, piping standard I/O through an RPC mechanism to an
+    OnDemandOutputWindow in IDLE.  It supports executing unnamed programs
+    (through a temporary file).  It does not yet support debugging.
+
+  - When running programs with ExecBinding, tracebacks will be clipped to
+    exclude system modules.  If, however, a system module calls back into the
+    user program, that part of the traceback will be shown.
+
+  - The OnDemandOutputWindow class has been improved.  In particular, it now
+    supports a readline() function used to implement user input, and a
+    scroll_clear() operation which is used to hide the output of a previous run
+    by scrolling it out of the window.
+
+  - Startup behavior has been changed.  By default IDLE starts up with just a
+    blank editor window, rather than an interactive window.  Opening a file in
+    such a blank window replaces the (nonexistent) contents of that window
+    instead of creating another window.  Because of the need to have a
+    well-known port for the ExecBinding protocol, only one copy of IDLE can be
+    running.  Additional invocations use the RPC mechanism to report their
+    command line arguments to the copy already running.
+
+  - The menus have been reorganized.  In particular, the excessively large
+    'edit' menu has been split up into 'edit', 'format', and 'run'.
+
+  - 'Python Documentation' now works on Windows, if the win32api module is
+    present.
+
+  - A few key bindings have been changed: F1 now loads Python Documentation
+    instead of the IDLE help; shift-TAB is now a synonym for unindent.
+
+- New modules:
+  
+  ExecBinding.py         Executes program through loader
+  loader.py              Bootstraps user program
+  protocol.py            RPC protocol
+  Remote.py              User-process interpreter
+  spawn.py               OS-specific code to start programs
+
+- Files modified:
+
+  autoindent.py          ( bindings tweaked )
+  bindings.py            ( menus reorganized )
+  config.txt             ( execbinding enabled )
+  editorwindow.py        ( new menus, fixed 'Python Documentation' )
+  filelist.py            ( hook for "open in same window" )
+  formatparagraph.py     ( bindings tweaked )
+  idle.bat               ( removed absolute pathname )
+  idle.pyw               ( weird bug due to import with same name? )
+  iobinding.py           ( open in same window, EOL convention )
+  keydefs.py             ( bindings tweaked )
+  outputwindow.py        ( readline, scroll_clear, etc )
+  pyshell.py             ( changed startup behavior )
+  readme.txt             ( <Recursion on file with id=1234567> )
+
+
+
+IDLE 0.5 - February 2000 - Release Notes
+----------------------------------------
+
+This is an early release of IDLE, my own attempt at a Tkinter-based
+IDE for Python.
+
+(For a more detailed change log, see the file ChangeLog.)
+
+FEATURES
+
+IDLE has the following features:
+
+- coded in 100% pure Python, using the Tkinter GUI toolkit (i.e. Tcl/Tk)
+
+- cross-platform: works on Windows and Unix (on the Mac, there are
+currently problems with Tcl/Tk)
+
+- multi-window text editor with multiple undo, Python colorizing
+and many other features, e.g. smart indent and call tips
+
+- Python shell window (a.k.a. interactive interpreter)
+
+- debugger (not complete, but you can set breakpoints, view  and step)
+
+USAGE
+
+The main program is in the file "idle.py"; on Unix, you should be able
+to run it by typing "./idle.py" to your shell.  On Windows, you can
+run it by double-clicking it; you can use idle.pyw to avoid popping up
+a DOS console.  If you want to pass command line arguments on Windows,
+use the batch file idle.bat.
+
+Command line arguments: files passed on the command line are executed,
+not opened for editing, unless you give the -e command line option.
+Try "./idle.py -h" to see other command line options.
+
+IDLE requires Python 1.5.2, so it is currently only usable with a
+Python 1.5.2 distribution.  (An older version of IDLE is distributed
+with Python 1.5.2; you can drop this version on top of it.)
+
+COPYRIGHT
+
+IDLE is covered by the standard Python copyright notice
+(http://www.python.org/doc/Copyright.html).
+
+
+New in IDLE 0.5 (2/15/2000)
+---------------------------
+
+Tons of stuff, much of it contributed by Tim Peters and Mark Hammond:
+
+- Status bar, displaying current line/column (Moshe Zadka).
+
+- Better stack viewer, using tree widget.  (XXX Only used by Stack
+Viewer menu, not by the debugger.)
+
+- Format paragraph now recognizes Python block comments and reformats
+them correctly (MH)
+
+- New version of pyclbr.py parses top-level functions and understands
+much more of Python's syntax; this is reflected in the class and path
+browsers (TP)
+
+- Much better auto-indent; knows how to indent the insides of
+multi-line statements (TP)
+
+- Call tip window pops up when you type the name of a known function
+followed by an open parenthesis.  Hit ESC or click elsewhere in the
+window to close the tip window (MH)
+
+- Comment out region now inserts ## to make it stand out more (TP)
+
+- New path and class browsers based on a tree widget that looks
+familiar to Windows users
+
+- Reworked script running commands to be more intuitive: I/O now
+always goes to the *Python Shell* window, and raw_input() works
+correctly.  You use F5 to import/reload a module: this adds the module
+name to the __main__ namespace.  You use Control-F5 to run a script:
+this runs the script *in* the __main__ namespace.  The latter also
+sets sys.argv[] to the script name
+
+
+New in IDLE 0.4 (4/7/99)
+------------------------
+
+Most important change: a new menu entry "File -> Path browser", shows
+a 4-column hierarchical browser which lets you browse sys.path,
+directories, modules, and classes.  Yes, it's a superset of the Class
+browser menu entry.  There's also a new internal module,
+MultiScrolledLists.py, which provides the framework for this dialog.
+
+
+New in IDLE 0.3 (2/17/99)
+-------------------------
+
+Most important changes:
+
+- Enabled support for running a module, with or without the debugger.
+Output goes to a new window.  Pressing F5 in a module is effectively a
+reload of that module; Control-F5 loads it under the debugger.
+
+- Re-enable tearing off the Windows menu, and make a torn-off Windows
+menu update itself whenever a window is opened or closed.
+
+- Menu items can now be have a checkbox (when the menu label starts
+with "!"); use this for the Debugger and "Auto-open stack viewer"
+(was: JIT stack viewer) menu items.
+
+- Added a Quit button to the Debugger API.
+
+- The current directory is explicitly inserted into sys.path.
+
+- Fix the debugger (when using Python 1.5.2b2) to use canonical
+filenames for breakpoints, so these actually work.  (There's still a
+lot of work to be done to the management of breakpoints in the
+debugger though.)
+
+- Closing a window that is still colorizing now actually works.
+
+- Allow dragging of the separator between the two list boxes in the
+class browser.
+
+- Bind ESC to "close window" of the debugger, stack viewer and class
+browser.  It removes the selection highlighting in regular text
+windows.  (These are standard Windows conventions.)
+
+
+New in IDLE 0.2 (1/8/99)
+------------------------
+
+Lots of changes; here are the highlights:
+
+General:
+
+- You can now write and configure your own IDLE extension modules; see
+extend.txt.
+
+
+File menu:
+
+The command to open the Python shell window is now in the File menu.
+
+
+Edit menu:
+
+New Find dialog with more options; replace dialog; find in files dialog.
+
+Commands to tabify or untabify a region.
+
+Command to format a paragraph.
+
+
+Debug menu:
+
+JIT (Just-In-Time) stack viewer toggle -- if set, the stack viewer
+automaticall pops up when you get a traceback.
+
+Windows menu:
+
+Zoom height -- make the window full height.
+
+
+Help menu:
+
+The help text now show up in a regular window so you can search and
+even edit it if you like.
+
+
+
+IDLE 0.1 was distributed with the Python 1.5.2b1 release on 12/22/98.
+
+======================================================================
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/IOBinding.py b/depot_tools/release/win/python_24/Lib/idlelib/IOBinding.py
new file mode 100644
index 0000000..4d67292
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/IOBinding.py
@@ -0,0 +1,574 @@
+# changes by dscherer@cmu.edu
+#   - IOBinding.open() replaces the current window with the opened file,
+#     if the current window is both unmodified and unnamed
+#   - IOBinding.loadfile() interprets Windows, UNIX, and Macintosh
+#     end-of-line conventions, instead of relying on the standard library,
+#     which will only understand the local convention.
+
+import os
+import types
+import sys
+import codecs
+import tempfile
+import tkFileDialog
+import tkMessageBox
+import re
+from Tkinter import *
+from SimpleDialog import SimpleDialog
+
+from configHandler import idleConf
+
+try:
+    from codecs import BOM_UTF8
+except ImportError:
+    # only available since Python 2.3
+    BOM_UTF8 = '\xef\xbb\xbf'
+
+# Try setting the locale, so that we can find out
+# what encoding to use
+try:
+    import locale
+    locale.setlocale(locale.LC_CTYPE, "")
+except (ImportError, locale.Error):
+    pass
+
+encoding = "ascii"
+if sys.platform == 'win32':
+    # On Windows, we could use "mbcs". However, to give the user
+    # a portable encoding name, we need to find the code page
+    try:
+        encoding = locale.getdefaultlocale()[1]
+        codecs.lookup(encoding)
+    except LookupError:
+        pass
+else:
+    try:
+        # Different things can fail here: the locale module may not be
+        # loaded, it may not offer nl_langinfo, or CODESET, or the
+        # resulting codeset may be unknown to Python. We ignore all
+        # these problems, falling back to ASCII
+        encoding = locale.nl_langinfo(locale.CODESET)
+        if encoding is None or encoding is '':
+            # situation occurs on Mac OS X
+            encoding = 'ascii'
+        codecs.lookup(encoding)
+    except (NameError, AttributeError, LookupError):
+        # Try getdefaultlocale well: it parses environment variables,
+        # which may give a clue. Unfortunately, getdefaultlocale has
+        # bugs that can cause ValueError.
+        try:
+            encoding = locale.getdefaultlocale()[1]
+            if encoding is None or encoding is '':
+                # situation occurs on Mac OS X
+                encoding = 'ascii'
+            codecs.lookup(encoding)
+        except (ValueError, LookupError):
+            pass
+
+encoding = encoding.lower()
+
+coding_re = re.compile("coding[:=]\s*([-\w_.]+)")
+
+class EncodingMessage(SimpleDialog):
+    "Inform user that an encoding declaration is needed."
+    def __init__(self, master, enc):
+        self.should_edit = False
+
+        self.root = top = Toplevel(master)
+        top.bind("<Return>", self.return_event)
+        top.bind("<Escape>", self.do_ok)
+        top.protocol("WM_DELETE_WINDOW", self.wm_delete_window)
+        top.wm_title("I/O Warning")
+        top.wm_iconname("I/O Warning")
+        self.top = top
+
+        l1 = Label(top,
+            text="Non-ASCII found, yet no encoding declared. Add a line like")
+        l1.pack(side=TOP, anchor=W)
+        l2 = Entry(top, font="courier")
+        l2.insert(0, "# -*- coding: %s -*-" % enc)
+        # For some reason, the text is not selectable anymore if the
+        # widget is disabled.
+        # l2['state'] = DISABLED
+        l2.pack(side=TOP, anchor = W, fill=X)
+        l3 = Label(top, text="to your file\n"
+                   "Choose OK to save this file as %s\n"
+                   "Edit your general options to silence this warning" % enc)
+        l3.pack(side=TOP, anchor = W)
+
+        buttons = Frame(top)
+        buttons.pack(side=TOP, fill=X)
+        # Both return and cancel mean the same thing: do nothing
+        self.default = self.cancel = 0
+        b1 = Button(buttons, text="Ok", default="active",
+                    command=self.do_ok)
+        b1.pack(side=LEFT, fill=BOTH, expand=1)
+        b2 = Button(buttons, text="Edit my file",
+                    command=self.do_edit)
+        b2.pack(side=LEFT, fill=BOTH, expand=1)
+
+        self._set_transient(master)
+
+    def do_ok(self):
+        self.done(0)
+
+    def do_edit(self):
+        self.done(1)
+
+def coding_spec(str):
+    """Return the encoding declaration according to PEP 263.
+
+    Raise LookupError if the encoding is declared but unknown.
+    """
+    # Only consider the first two lines
+    str = str.split("\n")[:2]
+    str = "\n".join(str)
+
+    match = coding_re.search(str)
+    if not match:
+        return None
+    name = match.group(1)
+    # Check whether the encoding is known
+    import codecs
+    try:
+        codecs.lookup(name)
+    except LookupError:
+        # The standard encoding error does not indicate the encoding
+        raise LookupError, "Unknown encoding "+name
+    return name
+
+
+class IOBinding:
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+        self.text = editwin.text
+        self.__id_open = self.text.bind("<<open-window-from-file>>", self.open)
+        self.__id_save = self.text.bind("<<save-window>>", self.save)
+        self.__id_saveas = self.text.bind("<<save-window-as-file>>",
+                                          self.save_as)
+        self.__id_savecopy = self.text.bind("<<save-copy-of-window-as-file>>",
+                                            self.save_a_copy)
+        self.fileencoding = None
+        self.__id_print = self.text.bind("<<print-window>>", self.print_window)
+
+    def close(self):
+        # Undo command bindings
+        self.text.unbind("<<open-window-from-file>>", self.__id_open)
+        self.text.unbind("<<save-window>>", self.__id_save)
+        self.text.unbind("<<save-window-as-file>>",self.__id_saveas)
+        self.text.unbind("<<save-copy-of-window-as-file>>", self.__id_savecopy)
+        self.text.unbind("<<print-window>>", self.__id_print)
+        # Break cycles
+        self.editwin = None
+        self.text = None
+        self.filename_change_hook = None
+
+    def get_saved(self):
+        return self.editwin.get_saved()
+
+    def set_saved(self, flag):
+        self.editwin.set_saved(flag)
+
+    def reset_undo(self):
+        self.editwin.reset_undo()
+
+    filename_change_hook = None
+
+    def set_filename_change_hook(self, hook):
+        self.filename_change_hook = hook
+
+    filename = None
+    dirname = None
+
+    def set_filename(self, filename):
+        if filename and os.path.isdir(filename):
+            self.filename = None
+            self.dirname = filename
+        else:
+            self.filename = filename
+            self.dirname = None
+            self.set_saved(1)
+            if self.filename_change_hook:
+                self.filename_change_hook()
+
+    def open(self, event=None, editFile=None):
+        if self.editwin.flist:
+            if not editFile:
+                filename = self.askopenfile()
+            else:
+                filename=editFile
+            if filename:
+                # If the current window has no filename and hasn't been
+                # modified, we replace its contents (no loss).  Otherwise
+                # we open a new window.  But we won't replace the
+                # shell window (which has an interp(reter) attribute), which
+                # gets set to "not modified" at every new prompt.
+                try:
+                    interp = self.editwin.interp
+                except:
+                    interp = None
+                if not self.filename and self.get_saved() and not interp:
+                    self.editwin.flist.open(filename, self.loadfile)
+                else:
+                    self.editwin.flist.open(filename)
+            else:
+                self.text.focus_set()
+            return "break"
+        #
+        # Code for use outside IDLE:
+        if self.get_saved():
+            reply = self.maybesave()
+            if reply == "cancel":
+                self.text.focus_set()
+                return "break"
+        if not editFile:
+            filename = self.askopenfile()
+        else:
+            filename=editFile
+        if filename:
+            self.loadfile(filename)
+        else:
+            self.text.focus_set()
+        return "break"
+
+    eol = r"(\r\n)|\n|\r"  # \r\n (Windows), \n (UNIX), or \r (Mac)
+    eol_re = re.compile(eol)
+    eol_convention = os.linesep # Default
+
+    def loadfile(self, filename):
+        try:
+            # open the file in binary mode so that we can handle
+            #   end-of-line convention ourselves.
+            f = open(filename,'rb')
+            chars = f.read()
+            f.close()
+        except IOError, msg:
+            tkMessageBox.showerror("I/O Error", str(msg), master=self.text)
+            return False
+
+        chars = self.decode(chars)
+        # We now convert all end-of-lines to '\n's
+        firsteol = self.eol_re.search(chars)
+        if firsteol:
+            self.eol_convention = firsteol.group(0)
+            if isinstance(self.eol_convention, unicode):
+                # Make sure it is an ASCII string
+                self.eol_convention = self.eol_convention.encode("ascii")
+            chars = self.eol_re.sub(r"\n", chars)
+
+        self.text.delete("1.0", "end")
+        self.set_filename(None)
+        self.text.insert("1.0", chars)
+        self.reset_undo()
+        self.set_filename(filename)
+        self.text.mark_set("insert", "1.0")
+        self.text.see("insert")
+        self.updaterecentfileslist(filename)
+        return True
+
+    def decode(self, chars):
+        """Create a Unicode string
+
+        If that fails, let Tcl try its best
+        """
+        # Check presence of a UTF-8 signature first
+        if chars.startswith(BOM_UTF8):
+            try:
+                chars = chars[3:].decode("utf-8")
+            except UnicodeError:
+                # has UTF-8 signature, but fails to decode...
+                return chars
+            else:
+                # Indicates that this file originally had a BOM
+                self.fileencoding = BOM_UTF8
+                return chars
+        # Next look for coding specification
+        try:
+            enc = coding_spec(chars)
+        except LookupError, name:
+            tkMessageBox.showerror(
+                title="Error loading the file",
+                message="The encoding '%s' is not known to this Python "\
+                "installation. The file may not display correctly" % name,
+                master = self.text)
+            enc = None
+        if enc:
+            try:
+                return unicode(chars, enc)
+            except UnicodeError:
+                pass
+        # If it is ASCII, we need not to record anything
+        try:
+            return unicode(chars, 'ascii')
+        except UnicodeError:
+            pass
+        # Finally, try the locale's encoding. This is deprecated;
+        # the user should declare a non-ASCII encoding
+        try:
+            chars = unicode(chars, encoding)
+            self.fileencoding = encoding
+        except UnicodeError:
+            pass
+        return chars
+
+    def maybesave(self):
+        if self.get_saved():
+            return "yes"
+        message = "Do you want to save %s before closing?" % (
+            self.filename or "this untitled document")
+        m = tkMessageBox.Message(
+            title="Save On Close",
+            message=message,
+            icon=tkMessageBox.QUESTION,
+            type=tkMessageBox.YESNOCANCEL,
+            master=self.text)
+        reply = m.show()
+        if reply == "yes":
+            self.save(None)
+            if not self.get_saved():
+                reply = "cancel"
+        self.text.focus_set()
+        return reply
+
+    def save(self, event):
+        if not self.filename:
+            self.save_as(event)
+        else:
+            if self.writefile(self.filename):
+                self.set_saved(1)
+                try:
+                    self.editwin.store_file_breaks()
+                except AttributeError:  # may be a PyShell
+                    pass
+        self.text.focus_set()
+        return "break"
+
+    def save_as(self, event):
+        filename = self.asksavefile()
+        if filename:
+            if self.writefile(filename):
+                self.set_filename(filename)
+                self.set_saved(1)
+                try:
+                    self.editwin.store_file_breaks()
+                except AttributeError:
+                    pass
+        self.text.focus_set()
+        self.updaterecentfileslist(filename)
+        return "break"
+
+    def save_a_copy(self, event):
+        filename = self.asksavefile()
+        if filename:
+            self.writefile(filename)
+        self.text.focus_set()
+        self.updaterecentfileslist(filename)
+        return "break"
+
+    def writefile(self, filename):
+        self.fixlastline()
+        chars = self.encode(self.text.get("1.0", "end-1c"))
+        if self.eol_convention != "\n":
+            chars = chars.replace("\n", self.eol_convention)
+        try:
+            f = open(filename, "wb")
+            f.write(chars)
+            f.close()
+            return True
+        except IOError, msg:
+            tkMessageBox.showerror("I/O Error", str(msg),
+                                   master=self.text)
+            return False
+
+    def encode(self, chars):
+        if isinstance(chars, types.StringType):
+            # This is either plain ASCII, or Tk was returning mixed-encoding
+            # text to us. Don't try to guess further.
+            return chars
+        # See whether there is anything non-ASCII in it.
+        # If not, no need to figure out the encoding.
+        try:
+            return chars.encode('ascii')
+        except UnicodeError:
+            pass
+        # If there is an encoding declared, try this first.
+        try:
+            enc = coding_spec(chars)
+            failed = None
+        except LookupError, msg:
+            failed = msg
+            enc = None
+        if enc:
+            try:
+                return chars.encode(enc)
+            except UnicodeError:
+                failed = "Invalid encoding '%s'" % enc
+        if failed:
+            tkMessageBox.showerror(
+                "I/O Error",
+                "%s. Saving as UTF-8" % failed,
+                master = self.text)
+        # If there was a UTF-8 signature, use that. This should not fail
+        if self.fileencoding == BOM_UTF8 or failed:
+            return BOM_UTF8 + chars.encode("utf-8")
+        # Try the original file encoding next, if any
+        if self.fileencoding:
+            try:
+                return chars.encode(self.fileencoding)
+            except UnicodeError:
+                tkMessageBox.showerror(
+                    "I/O Error",
+                    "Cannot save this as '%s' anymore. Saving as UTF-8" \
+                    % self.fileencoding,
+                    master = self.text)
+                return BOM_UTF8 + chars.encode("utf-8")
+        # Nothing was declared, and we had not determined an encoding
+        # on loading. Recommend an encoding line.
+        config_encoding = idleConf.GetOption("main","EditorWindow",
+                                             "encoding")
+        if config_encoding == 'utf-8':
+            # User has requested that we save files as UTF-8
+            return BOM_UTF8 + chars.encode("utf-8")
+        ask_user = True
+        try:
+            chars = chars.encode(encoding)
+            enc = encoding
+            if config_encoding == 'locale':
+                ask_user = False
+        except UnicodeError:
+            chars = BOM_UTF8 + chars.encode("utf-8")
+            enc = "utf-8"
+        if not ask_user:
+            return chars
+        dialog = EncodingMessage(self.editwin.top, enc)
+        dialog.go()
+        if dialog.num == 1:
+            # User asked us to edit the file
+            encline = "# -*- coding: %s -*-\n" % enc
+            firstline = self.text.get("1.0", "2.0")
+            if firstline.startswith("#!"):
+                # Insert encoding after #! line
+                self.text.insert("2.0", encline)
+            else:
+                self.text.insert("1.0", encline)
+            return self.encode(self.text.get("1.0", "end-1c"))
+        return chars
+
+    def fixlastline(self):
+        c = self.text.get("end-2c")
+        if c != '\n':
+            self.text.insert("end-1c", "\n")
+
+    def print_window(self, event):
+        tempfilename = None
+        saved = self.get_saved()
+        if saved:
+            filename = self.filename
+        # shell undo is reset after every prompt, looks saved, probably isn't
+        if not saved or filename is None:
+            # XXX KBK 08Jun03 Wouldn't it be better to ask the user to save?
+            (tfd, tempfilename) = tempfile.mkstemp(prefix='IDLE_tmp_')
+            filename = tempfilename
+            os.close(tfd)
+            if not self.writefile(tempfilename):
+                os.unlink(tempfilename)
+                return "break"
+        platform=os.name
+        printPlatform=1
+        if platform == 'posix': #posix platform
+            command = idleConf.GetOption('main','General',
+                                         'print-command-posix')
+            command = command + " 2>&1"
+        elif platform == 'nt': #win32 platform
+            command = idleConf.GetOption('main','General','print-command-win')
+        else: #no printing for this platform
+            printPlatform=0
+        if printPlatform:  #we can try to print for this platform
+            command = command % filename
+            pipe = os.popen(command, "r")
+            # things can get ugly on NT if there is no printer available.
+            output = pipe.read().strip()
+            status = pipe.close()
+            if status:
+                output = "Printing failed (exit status 0x%x)\n" % \
+                         status + output
+            if output:
+                output = "Printing command: %s\n" % repr(command) + output
+                tkMessageBox.showerror("Print status", output, master=self.text)
+        else:  #no printing for this platform
+            message="Printing is not enabled for this platform: %s" % platform
+            tkMessageBox.showinfo("Print status", message, master=self.text)
+        if tempfilename:
+            os.unlink(tempfilename)
+        return "break"
+
+    opendialog = None
+    savedialog = None
+
+    filetypes = [
+        ("Python and text files", "*.py *.pyw *.txt", "TEXT"),
+        ("All text files", "*", "TEXT"),
+        ("All files", "*"),
+        ]
+
+    def askopenfile(self):
+        dir, base = self.defaultfilename("open")
+        if not self.opendialog:
+            self.opendialog = tkFileDialog.Open(master=self.text,
+                                                filetypes=self.filetypes)
+        return self.opendialog.show(initialdir=dir, initialfile=base)
+
+    def defaultfilename(self, mode="open"):
+        if self.filename:
+            return os.path.split(self.filename)
+        elif self.dirname:
+            return self.dirname, ""
+        else:
+            try:
+                pwd = os.getcwd()
+            except os.error:
+                pwd = ""
+            return pwd, ""
+
+    def asksavefile(self):
+        dir, base = self.defaultfilename("save")
+        if not self.savedialog:
+            self.savedialog = tkFileDialog.SaveAs(master=self.text,
+                                                  filetypes=self.filetypes)
+        return self.savedialog.show(initialdir=dir, initialfile=base)
+
+    def updaterecentfileslist(self,filename):
+        "Update recent file list on all editor windows"
+        self.editwin.update_recent_files_list(filename)
+
+def test():
+    root = Tk()
+    class MyEditWin:
+        def __init__(self, text):
+            self.text = text
+            self.flist = None
+            self.text.bind("<Control-o>", self.open)
+            self.text.bind("<Control-s>", self.save)
+            self.text.bind("<Alt-s>", self.save_as)
+            self.text.bind("<Alt-z>", self.save_a_copy)
+        def get_saved(self): return 0
+        def set_saved(self, flag): pass
+        def reset_undo(self): pass
+        def open(self, event):
+            self.text.event_generate("<<open-window-from-file>>")
+        def save(self, event):
+            self.text.event_generate("<<save-window>>")
+        def save_as(self, event):
+            self.text.event_generate("<<save-window-as-file>>")
+        def save_a_copy(self, event):
+            self.text.event_generate("<<save-copy-of-window-as-file>>")
+    text = Text(root)
+    text.pack()
+    text.focus_set()
+    editwin = MyEditWin(text)
+    io = IOBinding(editwin)
+    root.mainloop()
+
+if __name__ == "__main__":
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Icons/folder.gif b/depot_tools/release/win/python_24/Lib/idlelib/Icons/folder.gif
new file mode 100644
index 0000000..effe8dc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Icons/folder.gif
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Icons/idle.icns b/depot_tools/release/win/python_24/Lib/idlelib/Icons/idle.icns
new file mode 100644
index 0000000..f65e313
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Icons/idle.icns
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Icons/minusnode.gif b/depot_tools/release/win/python_24/Lib/idlelib/Icons/minusnode.gif
new file mode 100644
index 0000000..c72e46f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Icons/minusnode.gif
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Icons/openfolder.gif b/depot_tools/release/win/python_24/Lib/idlelib/Icons/openfolder.gif
new file mode 100644
index 0000000..24aea1be
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Icons/openfolder.gif
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Icons/plusnode.gif b/depot_tools/release/win/python_24/Lib/idlelib/Icons/plusnode.gif
new file mode 100644
index 0000000..13ace90e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Icons/plusnode.gif
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Icons/python.gif b/depot_tools/release/win/python_24/Lib/idlelib/Icons/python.gif
new file mode 100644
index 0000000..58271ed
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Icons/python.gif
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Icons/tk.gif b/depot_tools/release/win/python_24/Lib/idlelib/Icons/tk.gif
new file mode 100644
index 0000000..a603f5ec
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Icons/tk.gif
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/IdleHistory.py b/depot_tools/release/win/python_24/Lib/idlelib/IdleHistory.py
new file mode 100644
index 0000000..46e70e1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/IdleHistory.py
@@ -0,0 +1,86 @@
+class History:
+
+    def __init__(self, text, output_sep = "\n"):
+        self.text = text
+        self.history = []
+        self.history_prefix = None
+        self.history_pointer = None
+        self.output_sep = output_sep
+        text.bind("<<history-previous>>", self.history_prev)
+        text.bind("<<history-next>>", self.history_next)
+
+    def history_next(self, event):
+        self.history_do(0)
+        return "break"
+
+    def history_prev(self, event):
+        self.history_do(1)
+        return "break"
+
+    def _get_source(self, start, end):
+        # Get source code from start index to end index.  Lines in the
+        # text control may be separated by sys.ps2 .
+        lines = self.text.get(start, end).split(self.output_sep)
+        return "\n".join(lines)
+
+    def _put_source(self, where, source):
+        output = self.output_sep.join(source.split("\n"))
+        self.text.insert(where, output)
+
+    def history_do(self, reverse):
+        nhist = len(self.history)
+        pointer = self.history_pointer
+        prefix = self.history_prefix
+        if pointer is not None and prefix is not None:
+            if self.text.compare("insert", "!=", "end-1c") or \
+               self._get_source("iomark", "end-1c") != self.history[pointer]:
+                pointer = prefix = None
+        if pointer is None or prefix is None:
+            prefix = self._get_source("iomark", "end-1c")
+            if reverse:
+                pointer = nhist
+            else:
+                pointer = -1
+        nprefix = len(prefix)
+        while 1:
+            if reverse:
+                pointer = pointer - 1
+            else:
+                pointer = pointer + 1
+            if pointer < 0 or pointer >= nhist:
+                self.text.bell()
+                if self._get_source("iomark", "end-1c") != prefix:
+                    self.text.delete("iomark", "end-1c")
+                    self._put_source("iomark", prefix)
+                pointer = prefix = None
+                break
+            item = self.history[pointer]
+            if item[:nprefix] == prefix and len(item) > nprefix:
+                self.text.delete("iomark", "end-1c")
+                self._put_source("iomark", item)
+                break
+        self.text.mark_set("insert", "end-1c")
+        self.text.see("insert")
+        self.text.tag_remove("sel", "1.0", "end")
+        self.history_pointer = pointer
+        self.history_prefix = prefix
+
+    def history_store(self, source):
+        source = source.strip()
+        if len(source) > 2:
+            # avoid duplicates
+            try:
+                self.history.remove(source)
+            except ValueError:
+                pass
+            self.history.append(source)
+        self.history_pointer = None
+        self.history_prefix = None
+
+    def recall(self, s):
+        s = s.strip()
+        self.text.tag_remove("sel", "1.0", "end")
+        self.text.delete("iomark", "end-1c")
+        self.text.mark_set("insert", "end-1c")
+        self.text.insert("insert", s)
+        self.text.see("insert")
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/MultiStatusBar.py b/depot_tools/release/win/python_24/Lib/idlelib/MultiStatusBar.py
new file mode 100644
index 0000000..2d4c547
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/MultiStatusBar.py
@@ -0,0 +1,32 @@
+from Tkinter import *
+
+class MultiStatusBar(Frame):
+
+    def __init__(self, master=None, **kw):
+        if master is None:
+            master = Tk()
+        Frame.__init__(self, master, **kw)
+        self.labels = {}
+
+    def set_label(self, name, text='', side=LEFT):
+        if not self.labels.has_key(name):
+            label = Label(self, bd=1, relief=SUNKEN, anchor=W)
+            label.pack(side=side)
+            self.labels[name] = label
+        else:
+            label = self.labels[name]
+        label.config(text=text)
+
+def _test():
+    b = Frame()
+    c = Text(b)
+    c.pack(side=TOP)
+    a = MultiStatusBar(b)
+    a.set_label("one", "hello")
+    a.set_label("two", "world")
+    a.pack(side=BOTTOM, fill=X)
+    b.pack()
+    b.mainloop()
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/NEWS.txt b/depot_tools/release/win/python_24/Lib/idlelib/NEWS.txt
new file mode 100644
index 0000000..17cb2d85
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/NEWS.txt
@@ -0,0 +1,476 @@
+What's New in IDLE 1.1.1?
+=========================
+
+*Release date: 30-MAR-2005*
+
+
+What's New in IDLE 1.1.1c2?
+===========================
+
+*Release date: 17-MAR-2005*
+
+
+What's New in IDLE 1.1.1c1?
+===========================
+
+*Release date: 10-MAR-2005*
+
+- If an extension can't be loaded, print warning and skip it instead of
+  erroring out.
+
+- Improve error handling when .idlerc can't be created (warn and exit)
+
+- The GUI was hanging if the shell window was closed while a raw_input() 
+  was pending.  Restored the quit() of the readline() mainloop().
+  http://mail.python.org/pipermail/idle-dev/2004-December/002307.html
+
+What's New in IDLE 1.1?
+=======================
+
+*Release date: 30-NOV-2004*
+
+- On OpenBSD, terminating IDLE with ctrl-c from the command line caused a
+  stuck subprocess MainThread because only the SocketThread was exiting.
+
+What's New in IDLE 1.1b3/rc1?
+=============================
+
+*Release date: 18-NOV-2004*
+
+- Saving a Keyset w/o making changes (by using the "Save as New Custom Key Set"
+  button) caused IDLE to fail on restart (no new keyset was created in
+  config-keys.cfg).  Also true for Theme/highlights.  Python Bug 1064535.
+
+- A change to the linecache.py API caused IDLE to exit when an exception was
+  raised while running without the subprocess (-n switch).  Python Bug 1063840.
+
+What's New in IDLE 1.1b2?
+=========================
+
+*Release date: 03-NOV-2004*
+
+- When paragraph reformat width was made configurable, a bug was
+  introduced that caused reformatting of comment blocks to ignore how
+  far the block was indented, effectively adding the indentation width
+  to the reformat width.  This has been repaired, and the reformat
+  width is again a bound on the total width of reformatted lines.
+
+What's New in IDLE 1.1b1?
+=========================
+
+*Release date: 15-OCT-2004*
+
+
+What's New in IDLE 1.1a3?
+=========================
+
+*Release date: 02-SEP-2004*
+
+- Improve keyboard focus binding, especially in Windows menu.  Improve
+  window raising, especially in the Windows menu and in the debugger.
+  IDLEfork 763524.
+
+- If user passes a non-existant filename on the commandline, just
+  open a new file, don't raise a dialog.  IDLEfork 854928.
+
+
+What's New in IDLE 1.1a2?
+=========================
+
+*Release date: 05-AUG-2004*
+
+- EditorWindow.py was not finding the .chm help file on Windows.  Typo
+  at Rev 1.54.  Python Bug 990954
+
+- checking sys.platform for substring 'win' was breaking IDLE docs on Mac
+  (darwin).  Also, Mac Safari browser requires full file:// URIs.  SF 900580.
+
+
+What's New in IDLE 1.1a1?
+=========================
+
+*Release date: 08-JUL-2004*
+
+- Redirect the warning stream to the shell during the ScriptBinding check of
+  user code and format the warning similarly to an exception for both that
+  check and for runtime warnings raised in the subprocess.
+
+- CodeContext hint pane visibility state is now persistent across sessions.
+  The pane no longer appears in the shell window.  Added capability to limit
+  extensions to shell window or editor windows.  Noam Raphael addition
+  to Patch 936169.
+
+- Paragraph reformat width is now a configurable parameter in the
+  Options GUI.
+
+- New Extension: CodeContext.  Provides block structuring hints for code
+  which has scrolled above an edit window. Patch 936169 Noam Raphael.
+
+- If nulls somehow got into the strings in recent-files.lst
+  EditorWindow.update_recent_files_list() was failing.  Python Bug 931336.
+
+- If the normal background is changed via Configure/Highlighting, it will update
+  immediately, thanks to the previously mentioned patch by Nigel Rowe.
+
+- Add a highlight theme for builtin keywords.  Python Patch 805830 Nigel Rowe
+  This also fixed IDLEfork bug [ 693418 ] Normal text background color not refreshed
+  and Python bug [897872 ] Unknown color name on HP-UX
+
+- rpc.py:SocketIO - Large modules were generating large pickles when downloaded
+  to the execution server.  The return of the OK response from the subprocess
+  initialization was interfering and causing the sending socket to be not
+  ready.  Add an IO ready test to fix this.  Moved the polling IO ready test
+  into pollpacket().
+
+- Fix typo in rpc.py, s/b "pickle.PicklingError" not "pickle.UnpicklingError".
+
+- Added a Tk error dialog to run.py inform the user if the subprocess can't
+  connect to the user GUI process.  Added a timeout to the GUI's listening
+  socket.  Added Tk error dialogs to PyShell.py to announce a failure to bind
+  the port or connect to the subprocess.  Clean up error handling during
+  connection initiation phase.  This is an update of Python Patch 778323.
+
+- Print correct exception even if source file changed since shell was
+  restarted.  IDLEfork Patch 869012 Noam Raphael
+
+- Keybindings with the Shift modifier now work correctly.  So do bindings which
+  use the Space key.  Limit unmodified user keybindings to the function keys.
+  Python Bug 775353, IDLEfork Bugs 755647, 761557
+
+- After an exception, run.py was not setting the exception vector. Noam
+  Raphael suggested correcting this so pdb's postmortem pm() would work.
+  IDLEfork Patch 844675
+
+- IDLE now does not fail to save the file anymore if the Tk buffer is not a
+  Unicode string, yet eol_convention is.  Python Bugs 774680, 788378
+
+- IDLE didn't start correctly when Python was installed in "Program Files" on
+  W2K and XP.  Python Bugs 780451, 784183
+
+- config-main.def documentation incorrectly referred to idle- instead of
+  config-  filenames.  SF 782759  Also added note about .idlerc location.
+
+
+What's New in IDLE 1.0?
+=======================
+
+*Release date: 29-Jul-2003*
+
+- Added a banner to the shell discussimg warnings possibly raised by personal
+  firewall software.  Added same comment to README.txt.
+
+
+What's New in IDLE 1.0 release candidate 2?
+===========================================
+
+*Release date: 24-Jul-2003*
+
+- Calltip error when docstring was None  Python Bug 775541
+
+
+What's New in IDLE 1.0 release candidate 1?
+===========================================
+
+*Release date: 18-Jul-2003*
+
+- Updated extend.txt, help.txt, and config-extensions.def to correctly
+  reflect the current status of the configuration system.  Python Bug 768469
+
+- Fixed: Call Tip Trimming May Loop Forever. Python Patch 769142 (Daniels)
+
+- Replaced apply(f, args, kwds) with f(*args, **kwargs) to improve performance
+  Python Patch 768187
+
+- Break or continue statements outside a loop were causing IDLE crash
+  Python Bug 767794
+
+- Convert Unicode strings from readline to IOBinding.encoding.  Also set
+  sys.std{in|out|err}.encoding, for both the local and the subprocess case.
+  SF IDLEfork patch 682347.
+
+
+What's New in IDLE 1.0b2?
+=========================
+
+*Release date: 29-Jun-2003*
+
+- Extend AboutDialog.ViewFile() to support file encodings.  Make the CREDITS
+  file Latin-1.
+
+- Updated the About dialog to reflect re-integration into Python.  Provide
+  buttons to display Python's NEWS, License, and Credits, plus additional
+  buttons for IDLE's README and NEWS.
+
+- TextViewer() now has a third parameter which allows inserting text into the
+  viewer instead of reading from a file.
+
+- (Created the .../Lib/idlelib directory in the Python CVS, which is a clone of
+  IDLEfork modified to install in the Python environment.  The code in the
+  interrupt module has been moved to thread.interrupt_main(). )
+
+- Printing the Shell window was failing if it was not saved first SF 748975
+
+- When using the Search in Files dialog, if the user had a selection
+  highlighted in his Editor window, insert it into the dialog search field.
+
+- The Python Shell entry was disappearing from the Windows menu.
+
+- Update the Windows file list when a file name change occurs
+
+- Change to File / Open Module: always pop up the dialog, using the current
+  selection as the default value.  This is easier to use habitually.
+
+- Avoided a problem with starting the subprocess when 'localhost' doesn't
+  resolve to the user's loopback interface.  SF 747772
+
+- Fixed an issue with highlighted errors never de-colorizing.  SF 747677.  Also
+  improved notification of Tabnanny Token Error.
+
+- File / New will by default save in the directory of the Edit window from
+  which it was initiated.  SF 748973 Guido van Rossum patch.
+
+
+What's New in IDLEfork 0.9b1?
+=============================
+
+*Release date: 02-Jun-2003*
+
+- The current working directory of the execution environment (and shell
+  following completion of execution) is now that of the module being run.
+
+- Added the delete-exitfunc option to config-main.def.  (This option is not
+  included in the Options dialog.)  Setting this to True (the default) will
+  cause IDLE to not run sys.exitfunc/atexit when the subprocess exits.
+
+- IDLE now preserves the line ending codes when editing a file produced on
+  a different platform. SF 661759,  SF 538584
+
+- Reduced default editor font size to 10 point and increased window height
+  to provide a better initial impression on Windows.
+
+- Options / Fonts/Tabs / Set Base Editor Font: List box was not highlighting
+  the default font when first installed on Windows.  SF 661676
+
+- Added Autosave feature: when user runs code from edit window, if the file
+  has been modified IDLE will silently save it if Autosave is enabled.  The
+  option is set in the Options dialog, and the default is to prompt the
+  user to save the file.   SF 661318 Bruce Sherwood patch.
+
+- Improved the RESTART annotation in the shell window when the user restarts
+  the shell while it is generating output.  Also improved annotation when user
+  repeatedly hammers the Ctrl-F6 restart.
+
+- Allow IDLE to run when not installed and cwd is not the IDLE directory
+  SF Patch 686254 "Run IDLEfork from any directory without set-up" - Raphael
+
+- When a module is run from an EditorWindow: if its directory is not in
+  sys.path, prepend it.  This allows the module to import other modules in
+  the same directory.  Do the same for a script run from the command line.
+
+- Correctly restart the subprocess if it is running user code and the user
+  attempts to run some other module or restarts the shell.  Do the same if
+  the link is broken and it is possible to restart the subprocess and re-
+  connect to the GUI.   SF RFE 661321.
+
+- Improved exception reporting when running commands or scripts from the
+  command line.
+
+- Added a -n command line switch to start IDLE without the subprocess.
+  Removed the Shell menu when running in that mode.  Updated help messages.
+
+- Added a comment to the shell startup header to indicate when IDLE is not
+  using the subprocess.
+
+- Restore the ability to run without the subprocess.  This can be important for
+  some platforms or configurations.  (Running without the subprocess allows the
+  debugger to trace through parts of IDLE itself, which may or may not be
+  desirable, depending on your point of view.  In addition, the traditional
+  reload/import tricks must be use if user source code is changed.)  This is
+  helpful for developing IDLE using IDLE, because one instance can be used to
+  edit the code and a separate instance run to test changes.  (Multiple
+  concurrent IDLE instances with subprocesses is a future feature)
+
+- Improve the error message a user gets when saving a file with non-ASCII
+  characters and no source encoding is specified.  Done by adding a dialog
+  'EncodingMessage', which contains the line to add in a fixed-font entry
+  widget, and which has a button to add that line to the file automatically.
+  Also, add a configuration option 'EditorWindow/encoding', which has three
+  possible values: none, utf-8, and locale. None is the default: IDLE will show
+  this dialog when non-ASCII characters are encountered. utf-8 means that files
+  with non-ASCII characters are saved as utf-8-with-bom. locale means that
+  files are saved in the locale's encoding; the dialog is only displayed if the
+  source contains characters outside the locale's charset.  SF 710733 - Loewis
+
+- Improved I/O response by tweaking the wait parameter in various
+  calls to signal.signal().
+
+- Implemented a threaded subprocess which allows interrupting a pass
+  loop in user code using the 'interrupt' extension.  User code runs
+  in MainThread, while the RPCServer is handled by SockThread.  This is
+  necessary because Windows doesn't support signals.
+
+- Implemented the 'interrupt' extension module, which allows a subthread
+  to raise a KeyboardInterrupt in the main thread.
+
+- Attempting to save the shell raised an error related to saving
+  breakpoints, which are not implemented in the shell
+
+- Provide a correct message when 'exit' or 'quit' are entered at the
+  IDLE command prompt  SF 695861
+
+- Eliminate extra blank line in shell output caused by not flushing
+  stdout when user code ends with an unterminated print. SF 695861
+
+- Moved responsibility for exception formatting (i.e. pruning IDLE internal
+  calls) out of rpc.py into the client and server.
+
+- Exit IDLE cleanly even when doing subprocess I/O
+
+- Handle subprocess interrupt with an RPC message.
+
+- Restart the subprocess if it terminates itself. (VPython programs do that)
+
+- Support subclassing of exceptions, including in the shell, by moving the
+  exception formatting to the subprocess.
+
+
+
+What's New in IDLEfork 0.9 Alpha 2?
+===================================
+
+*Release date: 27-Jan-2003*
+
+- Updated INSTALL.txt to claify use of the python2 rpm.
+
+- Improved formatting in IDLE Help.
+
+- Run menu: Replace "Run Script" with "Run Module".
+
+- Code encountering an unhandled exception under the debugger now shows
+  the correct traceback, with IDLE internal levels pruned out.
+
+- If an exception occurs entirely in IDLE, don't prune the IDLE internal
+  modules from the traceback displayed.
+
+- Class Browser and Path Browser now use Alt-Key-2 for vertical zoom.
+
+- IDLE icons will now install correctly even when setup.py is run from the
+  build directory
+
+- Class Browser now compatible with Python2.3 version of pyclbr.py
+
+- Left cursor move in presence of selected text now moves from left end
+  of the selection.
+
+- Add Meta keybindings to "IDLE Classic Windows" to handle reversed
+  Alt/Meta on some Linux distros.
+
+- Change default: IDLE now starts with Python Shell.
+
+- Removed the File Path from the Additional Help Sources scrolled list.
+
+- Add capability to access Additional Help Sources on the web if the
+  Help File Path begins with //http or www.  (Otherwise local path is
+  validated, as before.)
+
+- Additional Help Sources were not being posted on the Help menu in the
+  order entered.  Implement sorting the list by [HelpFiles] 'option'
+  number.
+
+- Add Browse button to New Help Source dialog.  Arrange to start in
+  Python/Doc if platform is Windows, otherwise start in current directory.
+
+- Put the Additional Help Sources directly on the Help menu instead of in
+  an Extra Help cascade menu.  Rearrange the Help menu so the Additional
+  Help Sources come last.  Update help.txt appropriately.
+
+- Fix Tk root pop-ups in configSectionNameDialog.py  and configDialog.py
+
+- Uniform capitalization in General tab of ConfigDialog, update the doc string.
+
+- Fix bug in ConfigDialog where SaveAllChangedConfig() was unexpectedly
+  deleting Additional Help Sources from the user's config file.
+
+- Make configHelpSourceEdit OK button the default and bind <Return>
+
+- Fix Tk root pop-ups in configHelpSourceEdit: error dialogs not attached
+  to parents.
+
+- Use os.startfile() to open both Additional Help and Python Help on the
+  Windows platform.  The application associated with the file type will act as
+  the viewer.  Windows help files (.chm) are now supported via the
+  Settings/General/Additional Help facility.
+
+- If Python Help files are installed locally on Linux, use them instead of
+  accessing python.org.
+
+- Make the methods for finding the Python help docs more robust, and make
+  them work in the installed configuration, also.
+
+- On the Save Before Run dialog, make the OK button the default.  One
+  less mouse action!
+
+- Add a method: EditorWindow.get_geometry() for future use in implementing
+  window location persistence.
+
+- Removed the "Help/Advice" menu entry.  Thanks, David!  We'll remember!
+
+- Change the "Classic Windows" theme's paste key to be <ctrl-v>.
+
+- Rearrange the Shell menu to put Stack Viewer entries adjacent.
+
+- Add the ability to restart the subprocess interpreter from the shell window;
+  add an associated menu entry "Shell/Restart" with binding Control-F6.  Update
+  IDLE help.
+
+- Upon a restart, annotate the shell window with a "restart boundary".  Add a
+  shell window menu "Shell/View Restart" with binding F6 to jump to the most
+  recent restart boundary.
+
+- Add Shell menu to Python Shell; change "Settings" to "Options".
+
+- Remove incorrect comment in setup.py: IDLEfork is now installed as a package.
+
+- Add INSTALL.txt, HISTORY.txt, NEWS.txt to installed configuration.
+
+- In installer text, fix reference to Visual Python, should be VPython.
+  Properly credit David Scherer.
+
+- Modified idle, idle.py, idle.pyw to improve exception handling.
+
+
+What's New in IDLEfork 0.9 Alpha 1?
+===================================
+
+*Release date: 31-Dec-2002*
+
+- First release of major new functionality.  For further details refer to
+  Idle-dev and/or the Sourceforge CVS.
+
+- Adapted to the Mac platform.
+
+- Overhauled the IDLE startup options and revised the idle -h help message,
+  which provides details of command line usage.
+
+- Multiple bug fixes and usability enhancements.
+
+- Introduced the new RPC implementation, which includes a debugger.  The output
+  of user code is to the shell, and the shell may be used to inspect the
+  environment after the run has finished.  (In version 0.8.1 the shell
+  environment was separate from the environment of the user code.)
+
+- Introduced the configuration GUI and a new About dialog.
+
+- Removed David Scherer's Remote Procedure Call code and replaced with Guido
+  van Rossum's.  GvR code has support for the IDLE debugger and uses the shell
+  to inspect the environment of code Run from an Edit window.  Files removed:
+  ExecBinding.py, loader.py, protocol.py, Remote.py, spawn.py
+
+--------------------------------------------------------------------
+Refer to HISTORY.txt for additional information on earlier releases.
+--------------------------------------------------------------------
+
+
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ObjectBrowser.py b/depot_tools/release/win/python_24/Lib/idlelib/ObjectBrowser.py
new file mode 100644
index 0000000..a2a6cee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ObjectBrowser.py
@@ -0,0 +1,151 @@
+# XXX TO DO:
+# - popup menu
+# - support partial or total redisplay
+# - more doc strings
+# - tooltips
+
+# object browser
+
+# XXX TO DO:
+# - for classes/modules, add "open source" to object browser
+
+from TreeWidget import TreeItem, TreeNode, ScrolledCanvas
+
+from repr import Repr
+
+myrepr = Repr()
+myrepr.maxstring = 100
+myrepr.maxother = 100
+
+class ObjectTreeItem(TreeItem):
+    def __init__(self, labeltext, object, setfunction=None):
+        self.labeltext = labeltext
+        self.object = object
+        self.setfunction = setfunction
+    def GetLabelText(self):
+        return self.labeltext
+    def GetText(self):
+        return myrepr.repr(self.object)
+    def GetIconName(self):
+        if not self.IsExpandable():
+            return "python"
+    def IsEditable(self):
+        return self.setfunction is not None
+    def SetText(self, text):
+        try:
+            value = eval(text)
+            self.setfunction(value)
+        except:
+            pass
+        else:
+            self.object = value
+    def IsExpandable(self):
+        return not not dir(self.object)
+    def GetSubList(self):
+        keys = dir(self.object)
+        sublist = []
+        for key in keys:
+            try:
+                value = getattr(self.object, key)
+            except AttributeError:
+                continue
+            item = make_objecttreeitem(
+                str(key) + " =",
+                value,
+                lambda value, key=key, object=self.object:
+                    setattr(object, key, value))
+            sublist.append(item)
+        return sublist
+
+class InstanceTreeItem(ObjectTreeItem):
+    def IsExpandable(self):
+        return True
+    def GetSubList(self):
+        sublist = ObjectTreeItem.GetSubList(self)
+        sublist.insert(0,
+            make_objecttreeitem("__class__ =", self.object.__class__))
+        return sublist
+
+class ClassTreeItem(ObjectTreeItem):
+    def IsExpandable(self):
+        return True
+    def GetSubList(self):
+        sublist = ObjectTreeItem.GetSubList(self)
+        if len(self.object.__bases__) == 1:
+            item = make_objecttreeitem("__bases__[0] =",
+                self.object.__bases__[0])
+        else:
+            item = make_objecttreeitem("__bases__ =", self.object.__bases__)
+        sublist.insert(0, item)
+        return sublist
+
+class AtomicObjectTreeItem(ObjectTreeItem):
+    def IsExpandable(self):
+        return 0
+
+class SequenceTreeItem(ObjectTreeItem):
+    def IsExpandable(self):
+        return len(self.object) > 0
+    def keys(self):
+        return range(len(self.object))
+    def GetSubList(self):
+        sublist = []
+        for key in self.keys():
+            try:
+                value = self.object[key]
+            except KeyError:
+                continue
+            def setfunction(value, key=key, object=self.object):
+                object[key] = value
+            item = make_objecttreeitem("%r:" % (key,), value, setfunction)
+            sublist.append(item)
+        return sublist
+
+class DictTreeItem(SequenceTreeItem):
+    def keys(self):
+        keys = self.object.keys()
+        try:
+            keys.sort()
+        except:
+            pass
+        return keys
+
+from types import *
+
+dispatch = {
+    IntType: AtomicObjectTreeItem,
+    LongType: AtomicObjectTreeItem,
+    FloatType: AtomicObjectTreeItem,
+    StringType: AtomicObjectTreeItem,
+    TupleType: SequenceTreeItem,
+    ListType: SequenceTreeItem,
+    DictType: DictTreeItem,
+    InstanceType: InstanceTreeItem,
+    ClassType: ClassTreeItem,
+}
+
+def make_objecttreeitem(labeltext, object, setfunction=None):
+    t = type(object)
+    if dispatch.has_key(t):
+        c = dispatch[t]
+    else:
+        c = ObjectTreeItem
+    return c(labeltext, object, setfunction)
+
+# Test script
+
+def _test():
+    import sys
+    from Tkinter import Tk
+    root = Tk()
+    root.configure(bd=0, bg="yellow")
+    root.focus_set()
+    sc = ScrolledCanvas(root, bg="white", highlightthickness=0, takefocus=1)
+    sc.frame.pack(expand=1, fill="both")
+    item = make_objecttreeitem("sys", sys)
+    node = TreeNode(sc.canvas, None, item)
+    node.update()
+    root.mainloop()
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/OutputWindow.py b/depot_tools/release/win/python_24/Lib/idlelib/OutputWindow.py
new file mode 100644
index 0000000..787e9b0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/OutputWindow.py
@@ -0,0 +1,157 @@
+from Tkinter import *
+from EditorWindow import EditorWindow
+import re
+import tkMessageBox
+import IOBinding
+
+class OutputWindow(EditorWindow):
+
+    """An editor window that can serve as an output file.
+
+    Also the future base class for the Python shell window.
+    This class has no input facilities.
+    """
+
+    def __init__(self, *args):
+        EditorWindow.__init__(self, *args)
+        self.text.bind("<<goto-file-line>>", self.goto_file_line)
+
+    # Customize EditorWindow
+
+    def ispythonsource(self, filename):
+        # No colorization needed
+        return 0
+
+    def short_title(self):
+        return "Output"
+
+    def maybesave(self):
+        # Override base class method -- don't ask any questions
+        if self.get_saved():
+            return "yes"
+        else:
+            return "no"
+
+    # Act as output file
+
+    def write(self, s, tags=(), mark="insert"):
+        # Tk assumes that byte strings are Latin-1;
+        # we assume that they are in the locale's encoding
+        if isinstance(s, str):
+            try:
+                s = unicode(s, IOBinding.encoding)
+            except UnicodeError:
+                # some other encoding; let Tcl deal with it
+                pass
+        self.text.insert(mark, s, tags)
+        self.text.see(mark)
+        self.text.update()
+
+    def writelines(self, l):
+        map(self.write, l)
+
+    def flush(self):
+        pass
+
+    # Our own right-button menu
+
+    rmenu_specs = [
+        ("Go to file/line", "<<goto-file-line>>"),
+    ]
+
+    file_line_pats = [
+        r'file "([^"]*)", line (\d+)',
+        r'([^\s]+)\((\d+)\)',
+        r'([^\s]+):\s*(\d+):',
+    ]
+
+    file_line_progs = None
+
+    def goto_file_line(self, event=None):
+        if self.file_line_progs is None:
+            l = []
+            for pat in self.file_line_pats:
+                l.append(re.compile(pat, re.IGNORECASE))
+            self.file_line_progs = l
+        # x, y = self.event.x, self.event.y
+        # self.text.mark_set("insert", "@%d,%d" % (x, y))
+        line = self.text.get("insert linestart", "insert lineend")
+        result = self._file_line_helper(line)
+        if not result:
+            # Try the previous line.  This is handy e.g. in tracebacks,
+            # where you tend to right-click on the displayed source line
+            line = self.text.get("insert -1line linestart",
+                                 "insert -1line lineend")
+            result = self._file_line_helper(line)
+            if not result:
+                tkMessageBox.showerror(
+                    "No special line",
+                    "The line you point at doesn't look like "
+                    "a valid file name followed by a line number.",
+                    master=self.text)
+                return
+        filename, lineno = result
+        edit = self.flist.open(filename)
+        edit.gotoline(lineno)
+
+    def _file_line_helper(self, line):
+        for prog in self.file_line_progs:
+            m = prog.search(line)
+            if m:
+                break
+        else:
+            return None
+        filename, lineno = m.group(1, 2)
+        try:
+            f = open(filename, "r")
+            f.close()
+        except IOError:
+            return None
+        try:
+            return filename, int(lineno)
+        except TypeError:
+            return None
+
+# These classes are currently not used but might come in handy
+
+class OnDemandOutputWindow:
+
+    tagdefs = {
+        # XXX Should use IdlePrefs.ColorPrefs
+        "stdout":  {"foreground": "blue"},
+        "stderr":  {"foreground": "#007700"},
+    }
+
+    def __init__(self, flist):
+        self.flist = flist
+        self.owin = None
+
+    def write(self, s, tags, mark):
+        if not self.owin:
+            self.setup()
+        self.owin.write(s, tags, mark)
+
+    def setup(self):
+        self.owin = owin = OutputWindow(self.flist)
+        text = owin.text
+        for tag, cnf in self.tagdefs.items():
+            if cnf:
+                text.tag_configure(tag, **cnf)
+        text.tag_raise('sel')
+        self.write = self.owin.write
+
+#class PseudoFile:
+#
+#      def __init__(self, owin, tags, mark="end"):
+#          self.owin = owin
+#          self.tags = tags
+#          self.mark = mark
+
+#      def write(self, s):
+#          self.owin.write(s, self.tags, self.mark)
+
+#      def writelines(self, l):
+#          map(self.write, l)
+
+#      def flush(self):
+#          pass
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ParenMatch.py b/depot_tools/release/win/python_24/Lib/idlelib/ParenMatch.py
new file mode 100644
index 0000000..407f468
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ParenMatch.py
@@ -0,0 +1,178 @@
+"""ParenMatch -- An IDLE extension for parenthesis matching.
+
+When you hit a right paren, the cursor should move briefly to the left
+paren.  Paren here is used generically; the matching applies to
+parentheses, square brackets, and curly braces.
+
+WARNING: This extension will fight with the CallTips extension,
+because they both are interested in the KeyRelease-parenright event.
+We'll have to fix IDLE to do something reasonable when two or more
+extensions what to capture the same event.
+"""
+
+import PyParse
+from EditorWindow import EditorWindow, index2line
+from configHandler import idleConf
+
+class ParenMatch:
+    """Highlight matching parentheses
+
+    There are three supported style of paren matching, based loosely
+    on the Emacs options.  The style is select based on the
+    HILITE_STYLE attribute; it can be changed used the set_style
+    method.
+
+    The supported styles are:
+
+    default -- When a right paren is typed, highlight the matching
+        left paren for 1/2 sec.
+
+    expression -- When a right paren is typed, highlight the entire
+        expression from the left paren to the right paren.
+
+    TODO:
+        - fix interaction with CallTips
+        - extend IDLE with configuration dialog to change options
+        - implement rest of Emacs highlight styles (see below)
+        - print mismatch warning in IDLE status window
+
+    Note: In Emacs, there are several styles of highlight where the
+    matching paren is highlighted whenever the cursor is immediately
+    to the right of a right paren.  I don't know how to do that in Tk,
+    so I haven't bothered.
+    """
+    menudefs = []
+    STYLE = idleConf.GetOption('extensions','ParenMatch','style',
+            default='expression')
+    FLASH_DELAY = idleConf.GetOption('extensions','ParenMatch','flash-delay',
+            type='int',default=500)
+    HILITE_CONFIG = idleConf.GetHighlight(idleConf.CurrentTheme(),'hilite')
+    BELL = idleConf.GetOption('extensions','ParenMatch','bell',
+            type='bool',default=1)
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+        self.text = editwin.text
+        self.finder = LastOpenBracketFinder(editwin)
+        self.counter = 0
+        self._restore = None
+        self.set_style(self.STYLE)
+
+    def set_style(self, style):
+        self.STYLE = style
+        if style == "default":
+            self.create_tag = self.create_tag_default
+            self.set_timeout = self.set_timeout_last
+        elif style == "expression":
+            self.create_tag = self.create_tag_expression
+            self.set_timeout = self.set_timeout_none
+
+    def flash_open_paren_event(self, event):
+        index = self.finder.find(keysym_type(event.keysym))
+        if index is None:
+            self.warn_mismatched()
+            return
+        self._restore = 1
+        self.create_tag(index)
+        self.set_timeout()
+
+    def check_restore_event(self, event=None):
+        if self._restore:
+            self.text.tag_delete("paren")
+            self._restore = None
+
+    def handle_restore_timer(self, timer_count):
+        if timer_count + 1 == self.counter:
+            self.check_restore_event()
+
+    def warn_mismatched(self):
+        if self.BELL:
+            self.text.bell()
+
+    # any one of the create_tag_XXX methods can be used depending on
+    # the style
+
+    def create_tag_default(self, index):
+        """Highlight the single paren that matches"""
+        self.text.tag_add("paren", index)
+        self.text.tag_config("paren", self.HILITE_CONFIG)
+
+    def create_tag_expression(self, index):
+        """Highlight the entire expression"""
+        self.text.tag_add("paren", index, "insert")
+        self.text.tag_config("paren", self.HILITE_CONFIG)
+
+    # any one of the set_timeout_XXX methods can be used depending on
+    # the style
+
+    def set_timeout_none(self):
+        """Highlight will remain until user input turns it off"""
+        pass
+
+    def set_timeout_last(self):
+        """The last highlight created will be removed after .5 sec"""
+        # associate a counter with an event; only disable the "paren"
+        # tag if the event is for the most recent timer.
+        self.editwin.text_frame.after(self.FLASH_DELAY,
+                                      lambda self=self, c=self.counter: \
+                                      self.handle_restore_timer(c))
+        self.counter = self.counter + 1
+
+def keysym_type(ks):
+    # Not all possible chars or keysyms are checked because of the
+    # limited context in which the function is used.
+    if ks == "parenright" or ks == "(":
+        return "paren"
+    if ks == "bracketright" or ks == "[":
+        return "bracket"
+    if ks == "braceright" or ks == "{":
+        return "brace"
+
+class LastOpenBracketFinder:
+    num_context_lines = EditorWindow.num_context_lines
+    indentwidth = EditorWindow.indentwidth
+    tabwidth = EditorWindow.tabwidth
+    context_use_ps1 = EditorWindow.context_use_ps1
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+        self.text = editwin.text
+
+    def _find_offset_in_buf(self, lno):
+        y = PyParse.Parser(self.indentwidth, self.tabwidth)
+        for context in self.num_context_lines:
+            startat = max(lno - context, 1)
+            startatindex = repr(startat) + ".0"
+            # rawtext needs to contain everything up to the last
+            # character, which was the close paren.  the parser also
+            # requires that the last line ends with "\n"
+            rawtext = self.text.get(startatindex, "insert")[:-1] + "\n"
+            y.set_str(rawtext)
+            bod = y.find_good_parse_start(
+                        self.context_use_ps1,
+                        self._build_char_in_string_func(startatindex))
+            if bod is not None or startat == 1:
+                break
+        y.set_lo(bod or 0)
+        i = y.get_last_open_bracket_pos()
+        return i, y.str
+
+    def find(self, right_keysym_type):
+        """Return the location of the last open paren"""
+        lno = index2line(self.text.index("insert"))
+        i, buf = self._find_offset_in_buf(lno)
+        if i is None \
+           or keysym_type(buf[i]) != right_keysym_type:
+            return None
+        lines_back = buf[i:].count("\n") - 1
+        # subtract one for the "\n" added to please the parser
+        upto_open = buf[:i]
+        j = upto_open.rfind("\n") + 1 # offset of column 0 of line
+        offset = i - j
+        return "%d.%d" % (lno - lines_back, offset)
+
+    def _build_char_in_string_func(self, startindex):
+        def inner(offset, startindex=startindex,
+                  icis=self.editwin.is_char_in_string):
+            return icis(startindex + "%dc" % offset)
+        return inner
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/PathBrowser.py b/depot_tools/release/win/python_24/Lib/idlelib/PathBrowser.py
new file mode 100644
index 0000000..86cd270
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/PathBrowser.py
@@ -0,0 +1,95 @@
+import os
+import sys
+import imp
+
+from TreeWidget import TreeItem
+from ClassBrowser import ClassBrowser, ModuleBrowserTreeItem
+
+class PathBrowser(ClassBrowser):
+
+    def __init__(self, flist):
+        self.init(flist)
+
+    def settitle(self):
+        self.top.wm_title("Path Browser")
+        self.top.wm_iconname("Path Browser")
+
+    def rootnode(self):
+        return PathBrowserTreeItem()
+
+class PathBrowserTreeItem(TreeItem):
+
+    def GetText(self):
+        return "sys.path"
+
+    def GetSubList(self):
+        sublist = []
+        for dir in sys.path:
+            item = DirBrowserTreeItem(dir)
+            sublist.append(item)
+        return sublist
+
+class DirBrowserTreeItem(TreeItem):
+
+    def __init__(self, dir, packages=[]):
+        self.dir = dir
+        self.packages = packages
+
+    def GetText(self):
+        if not self.packages:
+            return self.dir
+        else:
+            return self.packages[-1] + ": package"
+
+    def GetSubList(self):
+        try:
+            names = os.listdir(self.dir or os.curdir)
+        except os.error:
+            return []
+        packages = []
+        for name in names:
+            file = os.path.join(self.dir, name)
+            if self.ispackagedir(file):
+                nn = os.path.normcase(name)
+                packages.append((nn, name, file))
+        packages.sort()
+        sublist = []
+        for nn, name, file in packages:
+            item = DirBrowserTreeItem(file, self.packages + [name])
+            sublist.append(item)
+        for nn, name in self.listmodules(names):
+            item = ModuleBrowserTreeItem(os.path.join(self.dir, name))
+            sublist.append(item)
+        return sublist
+
+    def ispackagedir(self, file):
+        if not os.path.isdir(file):
+            return 0
+        init = os.path.join(file, "__init__.py")
+        return os.path.exists(init)
+
+    def listmodules(self, allnames):
+        modules = {}
+        suffixes = imp.get_suffixes()
+        sorted = []
+        for suff, mode, flag in suffixes:
+            i = -len(suff)
+            for name in allnames[:]:
+                normed_name = os.path.normcase(name)
+                if normed_name[i:] == suff:
+                    mod_name = name[:i]
+                    if not modules.has_key(mod_name):
+                        modules[mod_name] = None
+                        sorted.append((normed_name, name))
+                        allnames.remove(name)
+        sorted.sort()
+        return sorted
+
+def main():
+    import PyShell
+    PathBrowser(PyShell.flist)
+    if sys.stdin is sys.__stdin__:
+        mainloop()
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/Percolator.py b/depot_tools/release/win/python_24/Lib/idlelib/Percolator.py
new file mode 100644
index 0000000..ebbcba9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/Percolator.py
@@ -0,0 +1,85 @@
+from WidgetRedirector import WidgetRedirector
+from Delegator import Delegator
+
+class Percolator:
+
+    def __init__(self, text):
+        # XXX would be nice to inherit from Delegator
+        self.text = text
+        self.redir = WidgetRedirector(text)
+        self.top = self.bottom = Delegator(text)
+        self.bottom.insert = self.redir.register("insert", self.insert)
+        self.bottom.delete = self.redir.register("delete", self.delete)
+        self.filters = []
+
+    def close(self):
+        while self.top is not self.bottom:
+            self.removefilter(self.top)
+        self.top = None
+        self.bottom.setdelegate(None); self.bottom = None
+        self.redir.close(); self.redir = None
+        self.text = None
+
+    def insert(self, index, chars, tags=None):
+        # Could go away if inheriting from Delegator
+        self.top.insert(index, chars, tags)
+
+    def delete(self, index1, index2=None):
+        # Could go away if inheriting from Delegator
+        self.top.delete(index1, index2)
+
+    def insertfilter(self, filter):
+        # Perhaps rename to pushfilter()?
+        assert isinstance(filter, Delegator)
+        assert filter.delegate is None
+        filter.setdelegate(self.top)
+        self.top = filter
+
+    def removefilter(self, filter):
+        # XXX Perhaps should only support popfilter()?
+        assert isinstance(filter, Delegator)
+        assert filter.delegate is not None
+        f = self.top
+        if f is filter:
+            self.top = filter.delegate
+            filter.setdelegate(None)
+        else:
+            while f.delegate is not filter:
+                assert f is not self.bottom
+                f.resetcache()
+                f = f.delegate
+            f.setdelegate(filter.delegate)
+            filter.setdelegate(None)
+
+
+def main():
+    class Tracer(Delegator):
+        def __init__(self, name):
+            self.name = name
+            Delegator.__init__(self, None)
+        def insert(self, *args):
+            print self.name, ": insert", args
+            self.delegate.insert(*args)
+        def delete(self, *args):
+            print self.name, ": delete", args
+            self.delegate.delete(*args)
+    root = Tk()
+    root.wm_protocol("WM_DELETE_WINDOW", root.quit)
+    text = Text()
+    text.pack()
+    text.focus_set()
+    p = Percolator(text)
+    t1 = Tracer("t1")
+    t2 = Tracer("t2")
+    p.insertfilter(t1)
+    p.insertfilter(t2)
+    root.mainloop()
+    p.removefilter(t2)
+    root.mainloop()
+    p.insertfilter(t2)
+    p.removefilter(t1)
+    root.mainloop()
+
+if __name__ == "__main__":
+    from Tkinter import *
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/PyParse.py b/depot_tools/release/win/python_24/Lib/idlelib/PyParse.py
new file mode 100644
index 0000000..422a86cc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/PyParse.py
@@ -0,0 +1,584 @@
+import re
+import sys
+
+# Reason last stmt is continued (or C_NONE if it's not).
+C_NONE, C_BACKSLASH, C_STRING, C_BRACKET = range(4)
+
+if 0:   # for throwaway debugging output
+    def dump(*stuff):
+        sys.__stdout__.write(" ".join(map(str, stuff)) + "\n")
+
+# Find what looks like the start of a popular stmt.
+
+_synchre = re.compile(r"""
+    ^
+    [ \t]*
+    (?: if
+    |   for
+    |   while
+    |   else
+    |   def
+    |   return
+    |   assert
+    |   break
+    |   class
+    |   continue
+    |   elif
+    |   try
+    |   except
+    |   raise
+    |   import
+    |   yield
+    )
+    \b
+""", re.VERBOSE | re.MULTILINE).search
+
+# Match blank line or non-indenting comment line.
+
+_junkre = re.compile(r"""
+    [ \t]*
+    (?: \# \S .* )?
+    \n
+""", re.VERBOSE).match
+
+# Match any flavor of string; the terminating quote is optional
+# so that we're robust in the face of incomplete program text.
+
+_match_stringre = re.compile(r"""
+    \""" [^"\\]* (?:
+                     (?: \\. | "(?!"") )
+                     [^"\\]*
+                 )*
+    (?: \""" )?
+
+|   " [^"\\\n]* (?: \\. [^"\\\n]* )* "?
+
+|   ''' [^'\\]* (?:
+                   (?: \\. | '(?!'') )
+                   [^'\\]*
+                )*
+    (?: ''' )?
+
+|   ' [^'\\\n]* (?: \\. [^'\\\n]* )* '?
+""", re.VERBOSE | re.DOTALL).match
+
+# Match a line that starts with something interesting;
+# used to find the first item of a bracket structure.
+
+_itemre = re.compile(r"""
+    [ \t]*
+    [^\s#\\]    # if we match, m.end()-1 is the interesting char
+""", re.VERBOSE).match
+
+# Match start of stmts that should be followed by a dedent.
+
+_closere = re.compile(r"""
+    \s*
+    (?: return
+    |   break
+    |   continue
+    |   raise
+    |   pass
+    )
+    \b
+""", re.VERBOSE).match
+
+# Chew up non-special chars as quickly as possible.  If match is
+# successful, m.end() less 1 is the index of the last boring char
+# matched.  If match is unsuccessful, the string starts with an
+# interesting char.
+
+_chew_ordinaryre = re.compile(r"""
+    [^[\](){}#'"\\]+
+""", re.VERBOSE).match
+
+# Build translation table to map uninteresting chars to "x", open
+# brackets to "(", and close brackets to ")".
+
+_tran = ['x'] * 256
+for ch in "({[":
+    _tran[ord(ch)] = '('
+for ch in ")}]":
+    _tran[ord(ch)] = ')'
+for ch in "\"'\\\n#":
+    _tran[ord(ch)] = ch
+_tran = ''.join(_tran)
+del ch
+
+try:
+    UnicodeType = type(unicode(""))
+except NameError:
+    UnicodeType = None
+
+class Parser:
+
+    def __init__(self, indentwidth, tabwidth):
+        self.indentwidth = indentwidth
+        self.tabwidth = tabwidth
+
+    def set_str(self, str):
+        assert len(str) == 0 or str[-1] == '\n'
+        if type(str) is UnicodeType:
+            # The parse functions have no idea what to do with Unicode, so
+            # replace all Unicode characters with "x".  This is "safe"
+            # so long as the only characters germane to parsing the structure
+            # of Python are 7-bit ASCII.  It's *necessary* because Unicode
+            # strings don't have a .translate() method that supports
+            # deletechars.
+            uniphooey = str
+            str = []
+            push = str.append
+            for raw in map(ord, uniphooey):
+                push(raw < 127 and chr(raw) or "x")
+            str = "".join(str)
+        self.str = str
+        self.study_level = 0
+
+    # Return index of a good place to begin parsing, as close to the
+    # end of the string as possible.  This will be the start of some
+    # popular stmt like "if" or "def".  Return None if none found:
+    # the caller should pass more prior context then, if possible, or
+    # if not (the entire program text up until the point of interest
+    # has already been tried) pass 0 to set_lo.
+    #
+    # This will be reliable iff given a reliable is_char_in_string
+    # function, meaning that when it says "no", it's absolutely
+    # guaranteed that the char is not in a string.
+    #
+    # Ack, hack: in the shell window this kills us, because there's
+    # no way to tell the differences between output, >>> etc and
+    # user input.  Indeed, IDLE's first output line makes the rest
+    # look like it's in an unclosed paren!:
+    # Python 1.5.2 (#0, Apr 13 1999, ...
+
+    def find_good_parse_start(self, use_ps1, is_char_in_string=None,
+                              _synchre=_synchre):
+        str, pos = self.str, None
+        if use_ps1:
+            # shell window
+            ps1 = '\n' + sys.ps1
+            i = str.rfind(ps1)
+            if i >= 0:
+                pos = i + len(ps1)
+                # make it look like there's a newline instead
+                # of ps1 at the start -- hacking here once avoids
+                # repeated hackery later
+                self.str = str[:pos-1] + '\n' + str[pos:]
+            return pos
+
+        # File window -- real work.
+        if not is_char_in_string:
+            # no clue -- make the caller pass everything
+            return None
+
+        # Peek back from the end for a good place to start,
+        # but don't try too often; pos will be left None, or
+        # bumped to a legitimate synch point.
+        limit = len(str)
+        for tries in range(5):
+            i = str.rfind(":\n", 0, limit)
+            if i < 0:
+                break
+            i = str.rfind('\n', 0, i) + 1  # start of colon line
+            m = _synchre(str, i, limit)
+            if m and not is_char_in_string(m.start()):
+                pos = m.start()
+                break
+            limit = i
+        if pos is None:
+            # Nothing looks like a block-opener, or stuff does
+            # but is_char_in_string keeps returning true; most likely
+            # we're in or near a giant string, the colorizer hasn't
+            # caught up enough to be helpful, or there simply *aren't*
+            # any interesting stmts.  In any of these cases we're
+            # going to have to parse the whole thing to be sure, so
+            # give it one last try from the start, but stop wasting
+            # time here regardless of the outcome.
+            m = _synchre(str)
+            if m and not is_char_in_string(m.start()):
+                pos = m.start()
+            return pos
+
+        # Peeking back worked; look forward until _synchre no longer
+        # matches.
+        i = pos + 1
+        while 1:
+            m = _synchre(str, i)
+            if m:
+                s, i = m.span()
+                if not is_char_in_string(s):
+                    pos = s
+            else:
+                break
+        return pos
+
+    # Throw away the start of the string.  Intended to be called with
+    # find_good_parse_start's result.
+
+    def set_lo(self, lo):
+        assert lo == 0 or self.str[lo-1] == '\n'
+        if lo > 0:
+            self.str = self.str[lo:]
+
+    # As quickly as humanly possible <wink>, find the line numbers (0-
+    # based) of the non-continuation lines.
+    # Creates self.{goodlines, continuation}.
+
+    def _study1(self):
+        if self.study_level >= 1:
+            return
+        self.study_level = 1
+
+        # Map all uninteresting characters to "x", all open brackets
+        # to "(", all close brackets to ")", then collapse runs of
+        # uninteresting characters.  This can cut the number of chars
+        # by a factor of 10-40, and so greatly speed the following loop.
+        str = self.str
+        str = str.translate(_tran)
+        str = str.replace('xxxxxxxx', 'x')
+        str = str.replace('xxxx', 'x')
+        str = str.replace('xx', 'x')
+        str = str.replace('xx', 'x')
+        str = str.replace('\nx', '\n')
+        # note that replacing x\n with \n would be incorrect, because
+        # x may be preceded by a backslash
+
+        # March over the squashed version of the program, accumulating
+        # the line numbers of non-continued stmts, and determining
+        # whether & why the last stmt is a continuation.
+        continuation = C_NONE
+        level = lno = 0     # level is nesting level; lno is line number
+        self.goodlines = goodlines = [0]
+        push_good = goodlines.append
+        i, n = 0, len(str)
+        while i < n:
+            ch = str[i]
+            i = i+1
+
+            # cases are checked in decreasing order of frequency
+            if ch == 'x':
+                continue
+
+            if ch == '\n':
+                lno = lno + 1
+                if level == 0:
+                    push_good(lno)
+                    # else we're in an unclosed bracket structure
+                continue
+
+            if ch == '(':
+                level = level + 1
+                continue
+
+            if ch == ')':
+                if level:
+                    level = level - 1
+                    # else the program is invalid, but we can't complain
+                continue
+
+            if ch == '"' or ch == "'":
+                # consume the string
+                quote = ch
+                if str[i-1:i+2] == quote * 3:
+                    quote = quote * 3
+                w = len(quote) - 1
+                i = i+w
+                while i < n:
+                    ch = str[i]
+                    i = i+1
+
+                    if ch == 'x':
+                        continue
+
+                    if str[i-1:i+w] == quote:
+                        i = i+w
+                        break
+
+                    if ch == '\n':
+                        lno = lno + 1
+                        if w == 0:
+                            # unterminated single-quoted string
+                            if level == 0:
+                                push_good(lno)
+                            break
+                        continue
+
+                    if ch == '\\':
+                        assert i < n
+                        if str[i] == '\n':
+                            lno = lno + 1
+                        i = i+1
+                        continue
+
+                    # else comment char or paren inside string
+
+                else:
+                    # didn't break out of the loop, so we're still
+                    # inside a string
+                    continuation = C_STRING
+                continue    # with outer loop
+
+            if ch == '#':
+                # consume the comment
+                i = str.find('\n', i)
+                assert i >= 0
+                continue
+
+            assert ch == '\\'
+            assert i < n
+            if str[i] == '\n':
+                lno = lno + 1
+                if i+1 == n:
+                    continuation = C_BACKSLASH
+            i = i+1
+
+        # The last stmt may be continued for all 3 reasons.
+        # String continuation takes precedence over bracket
+        # continuation, which beats backslash continuation.
+        if continuation != C_STRING and level > 0:
+            continuation = C_BRACKET
+        self.continuation = continuation
+
+        # Push the final line number as a sentinel value, regardless of
+        # whether it's continued.
+        assert (continuation == C_NONE) == (goodlines[-1] == lno)
+        if goodlines[-1] != lno:
+            push_good(lno)
+
+    def get_continuation_type(self):
+        self._study1()
+        return self.continuation
+
+    # study1 was sufficient to determine the continuation status,
+    # but doing more requires looking at every character.  study2
+    # does this for the last interesting statement in the block.
+    # Creates:
+    #     self.stmt_start, stmt_end
+    #         slice indices of last interesting stmt
+    #     self.lastch
+    #         last non-whitespace character before optional trailing
+    #         comment
+    #     self.lastopenbracketpos
+    #         if continuation is C_BRACKET, index of last open bracket
+
+    def _study2(self):
+        if self.study_level >= 2:
+            return
+        self._study1()
+        self.study_level = 2
+
+        # Set p and q to slice indices of last interesting stmt.
+        str, goodlines = self.str, self.goodlines
+        i = len(goodlines) - 1
+        p = len(str)    # index of newest line
+        while i:
+            assert p
+            # p is the index of the stmt at line number goodlines[i].
+            # Move p back to the stmt at line number goodlines[i-1].
+            q = p
+            for nothing in range(goodlines[i-1], goodlines[i]):
+                # tricky: sets p to 0 if no preceding newline
+                p = str.rfind('\n', 0, p-1) + 1
+            # The stmt str[p:q] isn't a continuation, but may be blank
+            # or a non-indenting comment line.
+            if  _junkre(str, p):
+                i = i-1
+            else:
+                break
+        if i == 0:
+            # nothing but junk!
+            assert p == 0
+            q = p
+        self.stmt_start, self.stmt_end = p, q
+
+        # Analyze this stmt, to find the last open bracket (if any)
+        # and last interesting character (if any).
+        lastch = ""
+        stack = []  # stack of open bracket indices
+        push_stack = stack.append
+        while p < q:
+            # suck up all except ()[]{}'"#\\
+            m = _chew_ordinaryre(str, p, q)
+            if m:
+                # we skipped at least one boring char
+                newp = m.end()
+                # back up over totally boring whitespace
+                i = newp - 1    # index of last boring char
+                while i >= p and str[i] in " \t\n":
+                    i = i-1
+                if i >= p:
+                    lastch = str[i]
+                p = newp
+                if p >= q:
+                    break
+
+            ch = str[p]
+
+            if ch in "([{":
+                push_stack(p)
+                lastch = ch
+                p = p+1
+                continue
+
+            if ch in ")]}":
+                if stack:
+                    del stack[-1]
+                lastch = ch
+                p = p+1
+                continue
+
+            if ch == '"' or ch == "'":
+                # consume string
+                # Note that study1 did this with a Python loop, but
+                # we use a regexp here; the reason is speed in both
+                # cases; the string may be huge, but study1 pre-squashed
+                # strings to a couple of characters per line.  study1
+                # also needed to keep track of newlines, and we don't
+                # have to.
+                lastch = ch
+                p = _match_stringre(str, p, q).end()
+                continue
+
+            if ch == '#':
+                # consume comment and trailing newline
+                p = str.find('\n', p, q) + 1
+                assert p > 0
+                continue
+
+            assert ch == '\\'
+            p = p+1     # beyond backslash
+            assert p < q
+            if str[p] != '\n':
+                # the program is invalid, but can't complain
+                lastch = ch + str[p]
+            p = p+1     # beyond escaped char
+
+        # end while p < q:
+
+        self.lastch = lastch
+        if stack:
+            self.lastopenbracketpos = stack[-1]
+
+    # Assuming continuation is C_BRACKET, return the number
+    # of spaces the next line should be indented.
+
+    def compute_bracket_indent(self):
+        self._study2()
+        assert self.continuation == C_BRACKET
+        j = self.lastopenbracketpos
+        str = self.str
+        n = len(str)
+        origi = i = str.rfind('\n', 0, j) + 1
+        j = j+1     # one beyond open bracket
+        # find first list item; set i to start of its line
+        while j < n:
+            m = _itemre(str, j)
+            if m:
+                j = m.end() - 1     # index of first interesting char
+                extra = 0
+                break
+            else:
+                # this line is junk; advance to next line
+                i = j = str.find('\n', j) + 1
+        else:
+            # nothing interesting follows the bracket;
+            # reproduce the bracket line's indentation + a level
+            j = i = origi
+            while str[j] in " \t":
+                j = j+1
+            extra = self.indentwidth
+        return len(str[i:j].expandtabs(self.tabwidth)) + extra
+
+    # Return number of physical lines in last stmt (whether or not
+    # it's an interesting stmt!  this is intended to be called when
+    # continuation is C_BACKSLASH).
+
+    def get_num_lines_in_stmt(self):
+        self._study1()
+        goodlines = self.goodlines
+        return goodlines[-1] - goodlines[-2]
+
+    # Assuming continuation is C_BACKSLASH, return the number of spaces
+    # the next line should be indented.  Also assuming the new line is
+    # the first one following the initial line of the stmt.
+
+    def compute_backslash_indent(self):
+        self._study2()
+        assert self.continuation == C_BACKSLASH
+        str = self.str
+        i = self.stmt_start
+        while str[i] in " \t":
+            i = i+1
+        startpos = i
+
+        # See whether the initial line starts an assignment stmt; i.e.,
+        # look for an = operator
+        endpos = str.find('\n', startpos) + 1
+        found = level = 0
+        while i < endpos:
+            ch = str[i]
+            if ch in "([{":
+                level = level + 1
+                i = i+1
+            elif ch in ")]}":
+                if level:
+                    level = level - 1
+                i = i+1
+            elif ch == '"' or ch == "'":
+                i = _match_stringre(str, i, endpos).end()
+            elif ch == '#':
+                break
+            elif level == 0 and ch == '=' and \
+                   (i == 0 or str[i-1] not in "=<>!") and \
+                   str[i+1] != '=':
+                found = 1
+                break
+            else:
+                i = i+1
+
+        if found:
+            # found a legit =, but it may be the last interesting
+            # thing on the line
+            i = i+1     # move beyond the =
+            found = re.match(r"\s*\\", str[i:endpos]) is None
+
+        if not found:
+            # oh well ... settle for moving beyond the first chunk
+            # of non-whitespace chars
+            i = startpos
+            while str[i] not in " \t\n":
+                i = i+1
+
+        return len(str[self.stmt_start:i].expandtabs(\
+                                     self.tabwidth)) + 1
+
+    # Return the leading whitespace on the initial line of the last
+    # interesting stmt.
+
+    def get_base_indent_string(self):
+        self._study2()
+        i, n = self.stmt_start, self.stmt_end
+        j = i
+        str = self.str
+        while j < n and str[j] in " \t":
+            j = j + 1
+        return str[i:j]
+
+    # Did the last interesting stmt open a block?
+
+    def is_block_opener(self):
+        self._study2()
+        return self.lastch == ':'
+
+    # Did the last interesting stmt close a block?
+
+    def is_block_closer(self):
+        self._study2()
+        return _closere(self.str, self.stmt_start) is not None
+
+    # index of last open bracket ({[, or None if none
+    lastopenbracketpos = None
+
+    def get_last_open_bracket_pos(self):
+        self._study2()
+        return self.lastopenbracketpos
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/PyShell.py b/depot_tools/release/win/python_24/Lib/idlelib/PyShell.py
new file mode 100644
index 0000000..387b5375
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/PyShell.py
@@ -0,0 +1,1388 @@
+#! /usr/bin/env python
+
+import os
+import os.path
+import sys
+import string
+import getopt
+import re
+import socket
+import time
+import threading
+import traceback
+import types
+import exceptions
+
+import linecache
+from code import InteractiveInterpreter
+
+try:
+    from Tkinter import *
+except ImportError:
+    print>>sys.__stderr__, "** IDLE can't import Tkinter.  " \
+                           "Your Python may not be configured for Tk. **"
+    sys.exit(1)
+import tkMessageBox
+
+from EditorWindow import EditorWindow, fixwordbreaks
+from FileList import FileList
+from ColorDelegator import ColorDelegator
+from UndoDelegator import UndoDelegator
+from OutputWindow import OutputWindow
+from configHandler import idleConf
+import idlever
+
+import rpc
+import Debugger
+import RemoteDebugger
+
+IDENTCHARS = string.ascii_letters + string.digits + "_"
+LOCALHOST = '127.0.0.1'
+
+try:
+    from signal import SIGTERM
+except ImportError:
+    SIGTERM = 15
+
+# Override warnings module to write to warning_stream.  Initialize to send IDLE
+# internal warnings to the console.  ScriptBinding.check_syntax() will
+# temporarily redirect the stream to the shell window to display warnings when
+# checking user's code.
+global warning_stream
+warning_stream = sys.__stderr__
+try:
+    import warnings
+except ImportError:
+    pass
+else:
+    def idle_showwarning(message, category, filename, lineno):
+        file = warning_stream
+        try:
+            file.write(warnings.formatwarning(message, category, filename, lineno))
+        except IOError:
+            pass  ## file (probably __stderr__) is invalid, warning dropped.
+    warnings.showwarning = idle_showwarning
+    def idle_formatwarning(message, category, filename, lineno):
+        """Format warnings the IDLE way"""
+        s = "\nWarning (from warnings module):\n"
+        s += '  File \"%s\", line %s\n' % (filename, lineno)
+        line = linecache.getline(filename, lineno).strip()
+        if line:
+            s += "    %s\n" % line
+        s += "%s: %s\n>>> " % (category.__name__, message)
+        return s
+    warnings.formatwarning = idle_formatwarning
+
+def extended_linecache_checkcache(filename=None,
+                                  orig_checkcache=linecache.checkcache):
+    """Extend linecache.checkcache to preserve the <pyshell#...> entries
+
+    Rather than repeating the linecache code, patch it to save the
+    <pyshell#...> entries, call the original linecache.checkcache()
+    (which destroys them), and then restore the saved entries.
+
+    orig_checkcache is bound at definition time to the original
+    method, allowing it to be patched.
+
+    """
+    cache = linecache.cache
+    save = {}
+    for filename in cache.keys():
+        if filename[:1] + filename[-1:] == '<>':
+            save[filename] = cache[filename]
+    orig_checkcache()
+    cache.update(save)
+
+# Patch linecache.checkcache():
+linecache.checkcache = extended_linecache_checkcache
+
+
+class PyShellEditorWindow(EditorWindow):
+    "Regular text edit window in IDLE, supports breakpoints"
+
+    def __init__(self, *args):
+        self.breakpoints = []
+        EditorWindow.__init__(self, *args)
+        self.text.bind("<<set-breakpoint-here>>", self.set_breakpoint_here)
+        self.text.bind("<<clear-breakpoint-here>>", self.clear_breakpoint_here)
+        self.text.bind("<<open-python-shell>>", self.flist.open_shell)
+
+        self.breakpointPath = os.path.join(idleConf.GetUserCfgDir(),
+                                           'breakpoints.lst')
+        # whenever a file is changed, restore breakpoints
+        if self.io.filename: self.restore_file_breaks()
+        def filename_changed_hook(old_hook=self.io.filename_change_hook,
+                                  self=self):
+            self.restore_file_breaks()
+            old_hook()
+        self.io.set_filename_change_hook(filename_changed_hook)
+
+    rmenu_specs = [("Set Breakpoint", "<<set-breakpoint-here>>"),
+                   ("Clear Breakpoint", "<<clear-breakpoint-here>>")]
+
+    def set_breakpoint(self, lineno):
+        text = self.text
+        filename = self.io.filename
+        text.tag_add("BREAK", "%d.0" % lineno, "%d.0" % (lineno+1))
+        try:
+            i = self.breakpoints.index(lineno)
+        except ValueError:  # only add if missing, i.e. do once
+            self.breakpoints.append(lineno)
+        try:    # update the subprocess debugger
+            debug = self.flist.pyshell.interp.debugger
+            debug.set_breakpoint_here(filename, lineno)
+        except: # but debugger may not be active right now....
+            pass
+
+    def set_breakpoint_here(self, event=None):
+        text = self.text
+        filename = self.io.filename
+        if not filename:
+            text.bell()
+            return
+        lineno = int(float(text.index("insert")))
+        self.set_breakpoint(lineno)
+
+    def clear_breakpoint_here(self, event=None):
+        text = self.text
+        filename = self.io.filename
+        if not filename:
+            text.bell()
+            return
+        lineno = int(float(text.index("insert")))
+        try:
+            self.breakpoints.remove(lineno)
+        except:
+            pass
+        text.tag_remove("BREAK", "insert linestart",\
+                        "insert lineend +1char")
+        try:
+            debug = self.flist.pyshell.interp.debugger
+            debug.clear_breakpoint_here(filename, lineno)
+        except:
+            pass
+
+    def clear_file_breaks(self):
+        if self.breakpoints:
+            text = self.text
+            filename = self.io.filename
+            if not filename:
+                text.bell()
+                return
+            self.breakpoints = []
+            text.tag_remove("BREAK", "1.0", END)
+            try:
+                debug = self.flist.pyshell.interp.debugger
+                debug.clear_file_breaks(filename)
+            except:
+                pass
+
+    def store_file_breaks(self):
+        "Save breakpoints when file is saved"
+        # XXX 13 Dec 2002 KBK Currently the file must be saved before it can
+        #     be run.  The breaks are saved at that time.  If we introduce
+        #     a temporary file save feature the save breaks functionality
+        #     needs to be re-verified, since the breaks at the time the
+        #     temp file is created may differ from the breaks at the last
+        #     permanent save of the file.  Currently, a break introduced
+        #     after a save will be effective, but not persistent.
+        #     This is necessary to keep the saved breaks synched with the
+        #     saved file.
+        #
+        #     Breakpoints are set as tagged ranges in the text.  Certain
+        #     kinds of edits cause these ranges to be deleted: Inserting
+        #     or deleting a line just before a breakpoint, and certain
+        #     deletions prior to a breakpoint.  These issues need to be
+        #     investigated and understood.  It's not clear if they are
+        #     Tk issues or IDLE issues, or whether they can actually
+        #     be fixed.  Since a modified file has to be saved before it is
+        #     run, and since self.breakpoints (from which the subprocess
+        #     debugger is loaded) is updated during the save, the visible
+        #     breaks stay synched with the subprocess even if one of these
+        #     unexpected breakpoint deletions occurs.
+        breaks = self.breakpoints
+        filename = self.io.filename
+        try:
+            lines = open(self.breakpointPath,"r").readlines()
+        except IOError:
+            lines = []
+        new_file = open(self.breakpointPath,"w")
+        for line in lines:
+            if not line.startswith(filename + '='):
+                new_file.write(line)
+        self.update_breakpoints()
+        breaks = self.breakpoints
+        if breaks:
+            new_file.write(filename + '=' + str(breaks) + '\n')
+        new_file.close()
+
+    def restore_file_breaks(self):
+        self.text.update()   # this enables setting "BREAK" tags to be visible
+        filename = self.io.filename
+        if filename is None:
+            return
+        if os.path.isfile(self.breakpointPath):
+            lines = open(self.breakpointPath,"r").readlines()
+            for line in lines:
+                if line.startswith(filename + '='):
+                    breakpoint_linenumbers = eval(line[len(filename)+1:])
+                    for breakpoint_linenumber in breakpoint_linenumbers:
+                        self.set_breakpoint(breakpoint_linenumber)
+
+    def update_breakpoints(self):
+        "Retrieves all the breakpoints in the current window"
+        text = self.text
+        ranges = text.tag_ranges("BREAK")
+        linenumber_list = self.ranges_to_linenumbers(ranges)
+        self.breakpoints = linenumber_list
+
+    def ranges_to_linenumbers(self, ranges):
+        lines = []
+        for index in range(0, len(ranges), 2):
+            lineno = int(float(ranges[index]))
+            end = int(float(ranges[index+1]))
+            while lineno < end:
+                lines.append(lineno)
+                lineno += 1
+        return lines
+
+# XXX 13 Dec 2002 KBK Not used currently
+#    def saved_change_hook(self):
+#        "Extend base method - clear breaks if module is modified"
+#        if not self.get_saved():
+#            self.clear_file_breaks()
+#        EditorWindow.saved_change_hook(self)
+
+    def _close(self):
+        "Extend base method - clear breaks when module is closed"
+        self.clear_file_breaks()
+        EditorWindow._close(self)
+
+
+class PyShellFileList(FileList):
+    "Extend base class: IDLE supports a shell and breakpoints"
+
+    # override FileList's class variable, instances return PyShellEditorWindow
+    # instead of EditorWindow when new edit windows are created.
+    EditorWindow = PyShellEditorWindow
+
+    pyshell = None
+
+    def open_shell(self, event=None):
+        if self.pyshell:
+            self.pyshell.top.wakeup()
+        else:
+            self.pyshell = PyShell(self)
+            if self.pyshell:
+                if not self.pyshell.begin():
+                    return None
+        return self.pyshell
+
+
+class ModifiedColorDelegator(ColorDelegator):
+    "Extend base class: colorizer for the shell window itself"
+
+    def __init__(self):
+        ColorDelegator.__init__(self)
+        self.LoadTagDefs()
+
+    def recolorize_main(self):
+        self.tag_remove("TODO", "1.0", "iomark")
+        self.tag_add("SYNC", "1.0", "iomark")
+        ColorDelegator.recolorize_main(self)
+
+    def LoadTagDefs(self):
+        ColorDelegator.LoadTagDefs(self)
+        theme = idleConf.GetOption('main','Theme','name')
+        self.tagdefs.update({
+            "stdin": {'background':None,'foreground':None},
+            "stdout": idleConf.GetHighlight(theme, "stdout"),
+            "stderr": idleConf.GetHighlight(theme, "stderr"),
+            "console": idleConf.GetHighlight(theme, "console"),
+            None: idleConf.GetHighlight(theme, "normal"),
+        })
+
+class ModifiedUndoDelegator(UndoDelegator):
+    "Extend base class: forbid insert/delete before the I/O mark"
+
+    def insert(self, index, chars, tags=None):
+        try:
+            if self.delegate.compare(index, "<", "iomark"):
+                self.delegate.bell()
+                return
+        except TclError:
+            pass
+        UndoDelegator.insert(self, index, chars, tags)
+
+    def delete(self, index1, index2=None):
+        try:
+            if self.delegate.compare(index1, "<", "iomark"):
+                self.delegate.bell()
+                return
+        except TclError:
+            pass
+        UndoDelegator.delete(self, index1, index2)
+
+
+class MyRPCClient(rpc.RPCClient):
+
+    def handle_EOF(self):
+        "Override the base class - just re-raise EOFError"
+        raise EOFError
+
+
+class ModifiedInterpreter(InteractiveInterpreter):
+
+    def __init__(self, tkconsole):
+        self.tkconsole = tkconsole
+        locals = sys.modules['__main__'].__dict__
+        InteractiveInterpreter.__init__(self, locals=locals)
+        self.save_warnings_filters = None
+        self.restarting = False
+        self.subprocess_arglist = self.build_subprocess_arglist()
+
+    port = 8833
+    rpcclt = None
+    rpcpid = None
+
+    def spawn_subprocess(self):
+        args = self.subprocess_arglist
+        self.rpcpid = os.spawnv(os.P_NOWAIT, sys.executable, args)
+
+    def build_subprocess_arglist(self):
+        w = ['-W' + s for s in sys.warnoptions]
+        # Maybe IDLE is installed and is being accessed via sys.path,
+        # or maybe it's not installed and the idle.py script is being
+        # run from the IDLE source directory.
+        del_exitf = idleConf.GetOption('main', 'General', 'delete-exitfunc',
+                                       default=False, type='bool')
+        if __name__ == 'idlelib.PyShell':
+            command = "__import__('idlelib.run').run.main(%r)" % (del_exitf,)
+        else:
+            command = "__import__('run').main(%r)" % (del_exitf,)
+        if sys.platform[:3] == 'win' and ' ' in sys.executable:
+            # handle embedded space in path by quoting the argument
+            decorated_exec = '"%s"' % sys.executable
+        else:
+            decorated_exec = sys.executable
+        return [decorated_exec] + w + ["-c", command, str(self.port)]
+
+    def start_subprocess(self):
+        # spawning first avoids passing a listening socket to the subprocess
+        self.spawn_subprocess()
+        #time.sleep(20) # test to simulate GUI not accepting connection
+        addr = (LOCALHOST, self.port)
+        # Idle starts listening for connection on localhost
+        for i in range(3):
+            time.sleep(i)
+            try:
+                self.rpcclt = MyRPCClient(addr)
+                break
+            except socket.error, err:
+                pass
+        else:
+            self.display_port_binding_error()
+            return None
+        # Accept the connection from the Python execution server
+        self.rpcclt.listening_sock.settimeout(10)
+        try:
+            self.rpcclt.accept()
+        except socket.timeout, err:
+            self.display_no_subprocess_error()
+            return None
+        self.rpcclt.register("stdin", self.tkconsole)
+        self.rpcclt.register("stdout", self.tkconsole.stdout)
+        self.rpcclt.register("stderr", self.tkconsole.stderr)
+        self.rpcclt.register("flist", self.tkconsole.flist)
+        self.rpcclt.register("linecache", linecache)
+        self.rpcclt.register("interp", self)
+        self.transfer_path()
+        self.poll_subprocess()
+        return self.rpcclt
+
+    def restart_subprocess(self):
+        if self.restarting:
+            return self.rpcclt
+        self.restarting = True
+        # close only the subprocess debugger
+        debug = self.getdebugger()
+        if debug:
+            try:
+                # Only close subprocess debugger, don't unregister gui_adap!
+                RemoteDebugger.close_subprocess_debugger(self.rpcclt)
+            except:
+                pass
+        # Kill subprocess, spawn a new one, accept connection.
+        self.rpcclt.close()
+        self.unix_terminate()
+        console = self.tkconsole
+        was_executing = console.executing
+        console.executing = False
+        self.spawn_subprocess()
+        try:
+            self.rpcclt.accept()
+        except socket.timeout, err:
+            self.display_no_subprocess_error()
+            return None
+        self.transfer_path()
+        # annotate restart in shell window and mark it
+        console.text.delete("iomark", "end-1c")
+        if was_executing:
+            console.write('\n')
+            console.showprompt()
+        halfbar = ((int(console.width) - 16) // 2) * '='
+        console.write(halfbar + ' RESTART ' + halfbar)
+        console.text.mark_set("restart", "end-1c")
+        console.text.mark_gravity("restart", "left")
+        console.showprompt()
+        # restart subprocess debugger
+        if debug:
+            # Restarted debugger connects to current instance of debug GUI
+            gui = RemoteDebugger.restart_subprocess_debugger(self.rpcclt)
+            # reload remote debugger breakpoints for all PyShellEditWindows
+            debug.load_breakpoints()
+        self.restarting = False
+        return self.rpcclt
+
+    def __request_interrupt(self):
+        self.rpcclt.remotecall("exec", "interrupt_the_server", (), {})
+
+    def interrupt_subprocess(self):
+        threading.Thread(target=self.__request_interrupt).start()
+
+    def kill_subprocess(self):
+        try:
+            self.rpcclt.close()
+        except AttributeError:  # no socket
+            pass
+        self.unix_terminate()
+        self.tkconsole.executing = False
+        self.rpcclt = None
+
+    def unix_terminate(self):
+        "UNIX: make sure subprocess is terminated and collect status"
+        if hasattr(os, 'kill'):
+            try:
+                os.kill(self.rpcpid, SIGTERM)
+            except OSError:
+                # process already terminated:
+                return
+            else:
+                try:
+                    os.waitpid(self.rpcpid, 0)
+                except OSError:
+                    return
+
+    def transfer_path(self):
+        self.runcommand("""if 1:
+        import sys as _sys
+        _sys.path = %r
+        del _sys
+        _msg = 'Use File/Exit or your end-of-file key to quit IDLE'
+        __builtins__.quit = __builtins__.exit = _msg
+        del _msg
+        \n""" % (sys.path,))
+
+    active_seq = None
+
+    def poll_subprocess(self):
+        clt = self.rpcclt
+        if clt is None:
+            return
+        try:
+            response = clt.pollresponse(self.active_seq, wait=0.05)
+        except (EOFError, IOError, KeyboardInterrupt):
+            # lost connection or subprocess terminated itself, restart
+            # [the KBI is from rpc.SocketIO.handle_EOF()]
+            if self.tkconsole.closing:
+                return
+            response = None
+            self.restart_subprocess()
+        if response:
+            self.tkconsole.resetoutput()
+            self.active_seq = None
+            how, what = response
+            console = self.tkconsole.console
+            if how == "OK":
+                if what is not None:
+                    print >>console, repr(what)
+            elif how == "EXCEPTION":
+                if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"):
+                    self.remote_stack_viewer()
+            elif how == "ERROR":
+                errmsg = "PyShell.ModifiedInterpreter: Subprocess ERROR:\n"
+                print >>sys.__stderr__, errmsg, what
+                print >>console, errmsg, what
+            # we received a response to the currently active seq number:
+            self.tkconsole.endexecuting()
+        # Reschedule myself
+        if not self.tkconsole.closing:
+            self.tkconsole.text.after(self.tkconsole.pollinterval,
+                                      self.poll_subprocess)
+
+    debugger = None
+
+    def setdebugger(self, debugger):
+        self.debugger = debugger
+
+    def getdebugger(self):
+        return self.debugger
+
+    def open_remote_stack_viewer(self):
+        """Initiate the remote stack viewer from a separate thread.
+
+        This method is called from the subprocess, and by returning from this
+        method we allow the subprocess to unblock.  After a bit the shell
+        requests the subprocess to open the remote stack viewer which returns a
+        static object looking at the last exceptiopn.  It is queried through
+        the RPC mechanism.
+
+        """
+        self.tkconsole.text.after(300, self.remote_stack_viewer)
+        return
+
+    def remote_stack_viewer(self):
+        import RemoteObjectBrowser
+        oid = self.rpcclt.remotequeue("exec", "stackviewer", ("flist",), {})
+        if oid is None:
+            self.tkconsole.root.bell()
+            return
+        item = RemoteObjectBrowser.StubObjectTreeItem(self.rpcclt, oid)
+        from TreeWidget import ScrolledCanvas, TreeNode
+        top = Toplevel(self.tkconsole.root)
+        theme = idleConf.GetOption('main','Theme','name')
+        background = idleConf.GetHighlight(theme, 'normal')['background']
+        sc = ScrolledCanvas(top, bg=background, highlightthickness=0)
+        sc.frame.pack(expand=1, fill="both")
+        node = TreeNode(sc.canvas, None, item)
+        node.expand()
+        # XXX Should GC the remote tree when closing the window
+
+    gid = 0
+
+    def execsource(self, source):
+        "Like runsource() but assumes complete exec source"
+        filename = self.stuffsource(source)
+        self.execfile(filename, source)
+
+    def execfile(self, filename, source=None):
+        "Execute an existing file"
+        if source is None:
+            source = open(filename, "r").read()
+        try:
+            code = compile(source, filename, "exec")
+        except (OverflowError, SyntaxError):
+            self.tkconsole.resetoutput()
+            tkerr = self.tkconsole.stderr
+            print>>tkerr, '*** Error in script or command!\n'
+            print>>tkerr, 'Traceback (most recent call last):'
+            InteractiveInterpreter.showsyntaxerror(self, filename)
+            self.tkconsole.showprompt()
+        else:
+            self.runcode(code)
+
+    def runsource(self, source):
+        "Extend base class method: Stuff the source in the line cache first"
+        filename = self.stuffsource(source)
+        self.more = 0
+        self.save_warnings_filters = warnings.filters[:]
+        warnings.filterwarnings(action="error", category=SyntaxWarning)
+        if isinstance(source, types.UnicodeType):
+            import IOBinding
+            try:
+                source = source.encode(IOBinding.encoding)
+            except UnicodeError:
+                self.tkconsole.resetoutput()
+                self.write("Unsupported characters in input")
+                return
+        try:
+            return InteractiveInterpreter.runsource(self, source, filename)
+        finally:
+            if self.save_warnings_filters is not None:
+                warnings.filters[:] = self.save_warnings_filters
+                self.save_warnings_filters = None
+
+    def stuffsource(self, source):
+        "Stuff source in the filename cache"
+        filename = "<pyshell#%d>" % self.gid
+        self.gid = self.gid + 1
+        lines = source.split("\n")
+        linecache.cache[filename] = len(source)+1, 0, lines, filename
+        return filename
+
+    def prepend_syspath(self, filename):
+        "Prepend sys.path with file's directory if not already included"
+        self.runcommand("""if 1:
+            _filename = %r
+            import sys as _sys
+            from os.path import dirname as _dirname
+            _dir = _dirname(_filename)
+            if not _dir in _sys.path:
+                _sys.path.insert(0, _dir)
+            del _filename, _sys, _dirname, _dir
+            \n""" % (filename,))
+
+    def showsyntaxerror(self, filename=None):
+        """Extend base class method: Add Colorizing
+
+        Color the offending position instead of printing it and pointing at it
+        with a caret.
+
+        """
+        text = self.tkconsole.text
+        stuff = self.unpackerror()
+        if stuff:
+            msg, lineno, offset, line = stuff
+            if lineno == 1:
+                pos = "iomark + %d chars" % (offset-1)
+            else:
+                pos = "iomark linestart + %d lines + %d chars" % \
+                      (lineno-1, offset-1)
+            text.tag_add("ERROR", pos)
+            text.see(pos)
+            char = text.get(pos)
+            if char and char in IDENTCHARS:
+                text.tag_add("ERROR", pos + " wordstart", pos)
+            self.tkconsole.resetoutput()
+            self.write("SyntaxError: %s\n" % str(msg))
+        else:
+            self.tkconsole.resetoutput()
+            InteractiveInterpreter.showsyntaxerror(self, filename)
+        self.tkconsole.showprompt()
+
+    def unpackerror(self):
+        type, value, tb = sys.exc_info()
+        ok = type is SyntaxError
+        if ok:
+            try:
+                msg, (dummy_filename, lineno, offset, line) = value
+                if not offset:
+                    offset = 0
+            except:
+                ok = 0
+        if ok:
+            return msg, lineno, offset, line
+        else:
+            return None
+
+    def showtraceback(self):
+        "Extend base class method to reset output properly"
+        self.tkconsole.resetoutput()
+        self.checklinecache()
+        InteractiveInterpreter.showtraceback(self)
+        if self.tkconsole.getvar("<<toggle-jit-stack-viewer>>"):
+            self.tkconsole.open_stack_viewer()
+
+    def checklinecache(self):
+        c = linecache.cache
+        for key in c.keys():
+            if key[:1] + key[-1:] != "<>":
+                del c[key]
+
+    def runcommand(self, code):
+        "Run the code without invoking the debugger"
+        # The code better not raise an exception!
+        if self.tkconsole.executing:
+            self.display_executing_dialog()
+            return 0
+        if self.rpcclt:
+            self.rpcclt.remotequeue("exec", "runcode", (code,), {})
+        else:
+            exec code in self.locals
+        return 1
+
+    def runcode(self, code):
+        "Override base class method"
+        if self.tkconsole.executing:
+            self.interp.restart_subprocess()
+        self.checklinecache()
+        if self.save_warnings_filters is not None:
+            warnings.filters[:] = self.save_warnings_filters
+            self.save_warnings_filters = None
+        debugger = self.debugger
+        try:
+            self.tkconsole.beginexecuting()
+            try:
+                if not debugger and self.rpcclt is not None:
+                    self.active_seq = self.rpcclt.asyncqueue("exec", "runcode",
+                                                            (code,), {})
+                elif debugger:
+                    debugger.run(code, self.locals)
+                else:
+                    exec code in self.locals
+            except SystemExit:
+                if tkMessageBox.askyesno(
+                    "Exit?",
+                    "Do you want to exit altogether?",
+                    default="yes",
+                    master=self.tkconsole.text):
+                    raise
+                else:
+                    self.showtraceback()
+            except:
+                self.showtraceback()
+        finally:
+            if not use_subprocess:
+                self.tkconsole.endexecuting()
+
+    def write(self, s):
+        "Override base class method"
+        self.tkconsole.stderr.write(s)
+
+    def display_port_binding_error(self):
+        tkMessageBox.showerror(
+            "Port Binding Error",
+            "IDLE can't bind TCP/IP port 8833, which is necessary to "
+            "communicate with its Python execution server.  Either "
+            "no networking is installed on this computer or another "
+            "process (another IDLE?) is using the port.  Run IDLE with the -n "
+            "command line switch to start without a subprocess and refer to "
+            "Help/IDLE Help 'Running without a subprocess' for further "
+            "details.",
+            master=self.tkconsole.text)
+
+    def display_no_subprocess_error(self):
+        tkMessageBox.showerror(
+            "Subprocess Startup Error",
+            "IDLE's subprocess didn't make connection.  Either IDLE can't "
+            "start a subprocess or personal firewall software is blocking "
+            "the connection.",
+            master=self.tkconsole.text)
+
+    def display_executing_dialog(self):
+        tkMessageBox.showerror(
+            "Already executing",
+            "The Python Shell window is already executing a command; "
+            "please wait until it is finished.",
+            master=self.tkconsole.text)
+
+
+class PyShell(OutputWindow):
+
+    shell_title = "Python Shell"
+
+    # Override classes
+    ColorDelegator = ModifiedColorDelegator
+    UndoDelegator = ModifiedUndoDelegator
+
+    # Override menus
+    menu_specs = [
+        ("file", "_File"),
+        ("edit", "_Edit"),
+        ("debug", "_Debug"),
+        ("options", "_Options"),
+        ("windows", "_Windows"),
+        ("help", "_Help"),
+    ]
+
+    # New classes
+    from IdleHistory import History
+
+    def __init__(self, flist=None):
+        if use_subprocess:
+            ms = self.menu_specs
+            if ms[2][0] != "shell":
+                ms.insert(2, ("shell", "_Shell"))
+        self.interp = ModifiedInterpreter(self)
+        if flist is None:
+            root = Tk()
+            fixwordbreaks(root)
+            root.withdraw()
+            flist = PyShellFileList(root)
+        #
+        OutputWindow.__init__(self, flist, None, None)
+        #
+        import __builtin__
+        __builtin__.quit = __builtin__.exit = "To exit, type Ctrl-D."
+        #
+        self.config(usetabs=1, indentwidth=8, context_use_ps1=1)
+        #
+        text = self.text
+        text.configure(wrap="char")
+        text.bind("<<newline-and-indent>>", self.enter_callback)
+        text.bind("<<plain-newline-and-indent>>", self.linefeed_callback)
+        text.bind("<<interrupt-execution>>", self.cancel_callback)
+        text.bind("<<beginning-of-line>>", self.home_callback)
+        text.bind("<<end-of-file>>", self.eof_callback)
+        text.bind("<<open-stack-viewer>>", self.open_stack_viewer)
+        text.bind("<<toggle-debugger>>", self.toggle_debugger)
+        text.bind("<<toggle-jit-stack-viewer>>", self.toggle_jit_stack_viewer)
+        if use_subprocess:
+            text.bind("<<view-restart>>", self.view_restart_mark)
+            text.bind("<<restart-shell>>", self.restart_shell)
+        #
+        self.save_stdout = sys.stdout
+        self.save_stderr = sys.stderr
+        self.save_stdin = sys.stdin
+        import IOBinding
+        self.stdout = PseudoFile(self, "stdout", IOBinding.encoding)
+        self.stderr = PseudoFile(self, "stderr", IOBinding.encoding)
+        self.console = PseudoFile(self, "console", IOBinding.encoding)
+        if not use_subprocess:
+            sys.stdout = self.stdout
+            sys.stderr = self.stderr
+            sys.stdin = self
+        #
+        self.history = self.History(self.text)
+        #
+        self.pollinterval = 50  # millisec
+
+    def get_standard_extension_names(self):
+        return idleConf.GetExtensions(shell_only=True)
+
+    reading = False
+    executing = False
+    canceled = False
+    endoffile = False
+    closing = False
+
+    def set_warning_stream(self, stream):
+        global warning_stream
+        warning_stream = stream
+
+    def get_warning_stream(self):
+        return warning_stream
+
+    def toggle_debugger(self, event=None):
+        if self.executing:
+            tkMessageBox.showerror("Don't debug now",
+                "You can only toggle the debugger when idle",
+                master=self.text)
+            self.set_debugger_indicator()
+            return "break"
+        else:
+            db = self.interp.getdebugger()
+            if db:
+                self.close_debugger()
+            else:
+                self.open_debugger()
+
+    def set_debugger_indicator(self):
+        db = self.interp.getdebugger()
+        self.setvar("<<toggle-debugger>>", not not db)
+
+    def toggle_jit_stack_viewer(self, event=None):
+        pass # All we need is the variable
+
+    def close_debugger(self):
+        db = self.interp.getdebugger()
+        if db:
+            self.interp.setdebugger(None)
+            db.close()
+            if self.interp.rpcclt:
+                RemoteDebugger.close_remote_debugger(self.interp.rpcclt)
+            self.resetoutput()
+            self.console.write("[DEBUG OFF]\n")
+            sys.ps1 = ">>> "
+            self.showprompt()
+        self.set_debugger_indicator()
+
+    def open_debugger(self):
+        if self.interp.rpcclt:
+            dbg_gui = RemoteDebugger.start_remote_debugger(self.interp.rpcclt,
+                                                           self)
+        else:
+            dbg_gui = Debugger.Debugger(self)
+        self.interp.setdebugger(dbg_gui)
+        dbg_gui.load_breakpoints()
+        sys.ps1 = "[DEBUG ON]\n>>> "
+        self.showprompt()
+        self.set_debugger_indicator()
+
+    def beginexecuting(self):
+        "Helper for ModifiedInterpreter"
+        self.resetoutput()
+        self.executing = 1
+
+    def endexecuting(self):
+        "Helper for ModifiedInterpreter"
+        self.executing = 0
+        self.canceled = 0
+        self.showprompt()
+
+    def close(self):
+        "Extend EditorWindow.close()"
+        if self.executing:
+            response = tkMessageBox.askokcancel(
+                "Kill?",
+                "The program is still running!\n Do you want to kill it?",
+                default="ok",
+                parent=self.text)
+            if response == False:
+                return "cancel"
+        if self.reading:
+            self.top.quit()
+        self.canceled = True
+        self.closing = True
+        # Wait for poll_subprocess() rescheduling to stop
+        self.text.after(2 * self.pollinterval, self.close2)
+
+    def close2(self):
+        return EditorWindow.close(self)
+
+    def _close(self):
+        "Extend EditorWindow._close(), shut down debugger and execution server"
+        self.close_debugger()
+        if use_subprocess:
+            self.interp.kill_subprocess()
+        # Restore std streams
+        sys.stdout = self.save_stdout
+        sys.stderr = self.save_stderr
+        sys.stdin = self.save_stdin
+        # Break cycles
+        self.interp = None
+        self.console = None
+        self.flist.pyshell = None
+        self.history = None
+        EditorWindow._close(self)
+
+    def ispythonsource(self, filename):
+        "Override EditorWindow method: never remove the colorizer"
+        return True
+
+    def short_title(self):
+        return self.shell_title
+
+    COPYRIGHT = \
+          'Type "copyright", "credits" or "license()" for more information.'
+
+    firewallmessage = """
+    ****************************************************************
+    Personal firewall software may warn about the connection IDLE
+    makes to its subprocess using this computer's internal loopback
+    interface.  This connection is not visible on any external
+    interface and no data is sent to or received from the Internet.
+    ****************************************************************
+    """
+
+    def begin(self):
+        self.resetoutput()
+        if use_subprocess:
+            nosub = ''
+            client = self.interp.start_subprocess()
+            if not client:
+                self.close()
+                return False
+        else:
+            nosub = "==== No Subprocess ===="
+        self.write("Python %s on %s\n%s\n%s\nIDLE %s      %s\n" %
+                   (sys.version, sys.platform, self.COPYRIGHT,
+                    self.firewallmessage, idlever.IDLE_VERSION, nosub))
+        self.showprompt()
+        import Tkinter
+        Tkinter._default_root = None # 03Jan04 KBK What's this?
+        return True
+
+    def readline(self):
+        save = self.reading
+        try:
+            self.reading = 1
+            self.top.mainloop()  # nested mainloop()
+        finally:
+            self.reading = save
+        line = self.text.get("iomark", "end-1c")
+        if len(line) == 0:  # may be EOF if we quit our mainloop with Ctrl-C
+            line = "\n"
+        if isinstance(line, unicode):
+            import IOBinding
+            try:
+                line = line.encode(IOBinding.encoding)
+            except UnicodeError:
+                pass
+        self.resetoutput()
+        if self.canceled:
+            self.canceled = 0
+            if not use_subprocess:
+                raise KeyboardInterrupt
+        if self.endoffile:
+            self.endoffile = 0
+            line = ""
+        return line
+
+    def isatty(self):
+        return True
+
+    def cancel_callback(self, event=None):
+        try:
+            if self.text.compare("sel.first", "!=", "sel.last"):
+                return # Active selection -- always use default binding
+        except:
+            pass
+        if not (self.executing or self.reading):
+            self.resetoutput()
+            self.interp.write("KeyboardInterrupt\n")
+            self.showprompt()
+            return "break"
+        self.endoffile = 0
+        self.canceled = 1
+        if (self.executing and self.interp.rpcclt):
+            if self.interp.getdebugger():
+                self.interp.restart_subprocess()
+            else:
+                self.interp.interrupt_subprocess()
+        if self.reading:
+            self.top.quit()  # exit the nested mainloop() in readline()
+        return "break"
+
+    def eof_callback(self, event):
+        if self.executing and not self.reading:
+            return # Let the default binding (delete next char) take over
+        if not (self.text.compare("iomark", "==", "insert") and
+                self.text.compare("insert", "==", "end-1c")):
+            return # Let the default binding (delete next char) take over
+        if not self.executing:
+            self.resetoutput()
+            self.close()
+        else:
+            self.canceled = 0
+            self.endoffile = 1
+            self.top.quit()
+        return "break"
+
+    def home_callback(self, event):
+        if event.state != 0 and event.keysym == "Home":
+            return # <Modifier-Home>; fall back to class binding
+        if self.text.compare("iomark", "<=", "insert") and \
+           self.text.compare("insert linestart", "<=", "iomark"):
+            self.text.mark_set("insert", "iomark")
+            self.text.tag_remove("sel", "1.0", "end")
+            self.text.see("insert")
+            return "break"
+
+    def linefeed_callback(self, event):
+        # Insert a linefeed without entering anything (still autoindented)
+        if self.reading:
+            self.text.insert("insert", "\n")
+            self.text.see("insert")
+        else:
+            self.newline_and_indent_event(event)
+        return "break"
+
+    def enter_callback(self, event):
+        if self.executing and not self.reading:
+            return # Let the default binding (insert '\n') take over
+        # If some text is selected, recall the selection
+        # (but only if this before the I/O mark)
+        try:
+            sel = self.text.get("sel.first", "sel.last")
+            if sel:
+                if self.text.compare("sel.last", "<=", "iomark"):
+                    self.recall(sel)
+                    return "break"
+        except:
+            pass
+        # If we're strictly before the line containing iomark, recall
+        # the current line, less a leading prompt, less leading or
+        # trailing whitespace
+        if self.text.compare("insert", "<", "iomark linestart"):
+            # Check if there's a relevant stdin range -- if so, use it
+            prev = self.text.tag_prevrange("stdin", "insert")
+            if prev and self.text.compare("insert", "<", prev[1]):
+                self.recall(self.text.get(prev[0], prev[1]))
+                return "break"
+            next = self.text.tag_nextrange("stdin", "insert")
+            if next and self.text.compare("insert lineend", ">=", next[0]):
+                self.recall(self.text.get(next[0], next[1]))
+                return "break"
+            # No stdin mark -- just get the current line, less any prompt
+            line = self.text.get("insert linestart", "insert lineend")
+            last_line_of_prompt = sys.ps1.split('\n')[-1]
+            if line.startswith(last_line_of_prompt):
+                line = line[len(last_line_of_prompt):]
+            self.recall(line)
+            return "break"
+        # If we're between the beginning of the line and the iomark, i.e.
+        # in the prompt area, move to the end of the prompt
+        if self.text.compare("insert", "<", "iomark"):
+            self.text.mark_set("insert", "iomark")
+        # If we're in the current input and there's only whitespace
+        # beyond the cursor, erase that whitespace first
+        s = self.text.get("insert", "end-1c")
+        if s and not s.strip():
+            self.text.delete("insert", "end-1c")
+        # If we're in the current input before its last line,
+        # insert a newline right at the insert point
+        if self.text.compare("insert", "<", "end-1c linestart"):
+            self.newline_and_indent_event(event)
+            return "break"
+        # We're in the last line; append a newline and submit it
+        self.text.mark_set("insert", "end-1c")
+        if self.reading:
+            self.text.insert("insert", "\n")
+            self.text.see("insert")
+        else:
+            self.newline_and_indent_event(event)
+        self.text.tag_add("stdin", "iomark", "end-1c")
+        self.text.update_idletasks()
+        if self.reading:
+            self.top.quit() # Break out of recursive mainloop() in raw_input()
+        else:
+            self.runit()
+        return "break"
+
+    def recall(self, s):
+        if self.history:
+            self.history.recall(s)
+
+    def runit(self):
+        line = self.text.get("iomark", "end-1c")
+        # Strip off last newline and surrounding whitespace.
+        # (To allow you to hit return twice to end a statement.)
+        i = len(line)
+        while i > 0 and line[i-1] in " \t":
+            i = i-1
+        if i > 0 and line[i-1] == "\n":
+            i = i-1
+        while i > 0 and line[i-1] in " \t":
+            i = i-1
+        line = line[:i]
+        more = self.interp.runsource(line)
+
+    def open_stack_viewer(self, event=None):
+        if self.interp.rpcclt:
+            return self.interp.remote_stack_viewer()
+        try:
+            sys.last_traceback
+        except:
+            tkMessageBox.showerror("No stack trace",
+                "There is no stack trace yet.\n"
+                "(sys.last_traceback is not defined)",
+                master=self.text)
+            return
+        from StackViewer import StackBrowser
+        sv = StackBrowser(self.root, self.flist)
+
+    def view_restart_mark(self, event=None):
+        self.text.see("iomark")
+        self.text.see("restart")
+
+    def restart_shell(self, event=None):
+        self.interp.restart_subprocess()
+
+    def showprompt(self):
+        self.resetoutput()
+        try:
+            s = str(sys.ps1)
+        except:
+            s = ""
+        self.console.write(s)
+        self.text.mark_set("insert", "end-1c")
+        self.set_line_and_column()
+        self.io.reset_undo()
+
+    def resetoutput(self):
+        source = self.text.get("iomark", "end-1c")
+        if self.history:
+            self.history.history_store(source)
+        if self.text.get("end-2c") != "\n":
+            self.text.insert("end-1c", "\n")
+        self.text.mark_set("iomark", "end-1c")
+        self.set_line_and_column()
+        sys.stdout.softspace = 0
+
+    def write(self, s, tags=()):
+        try:
+            self.text.mark_gravity("iomark", "right")
+            OutputWindow.write(self, s, tags, "iomark")
+            self.text.mark_gravity("iomark", "left")
+        except:
+            pass
+        if self.canceled:
+            self.canceled = 0
+            if not use_subprocess:
+                raise KeyboardInterrupt
+
+class PseudoFile:
+
+    def __init__(self, shell, tags, encoding=None):
+        self.shell = shell
+        self.tags = tags
+        self.softspace = 0
+        self.encoding = encoding
+
+    def write(self, s):
+        self.shell.write(s, self.tags)
+
+    def writelines(self, l):
+        map(self.write, l)
+
+    def flush(self):
+        pass
+
+    def isatty(self):
+        return True
+
+
+usage_msg = """\
+
+USAGE: idle  [-deins] [-t title] [file]*
+       idle  [-dns] [-t title] (-c cmd | -r file) [arg]*
+       idle  [-dns] [-t title] - [arg]*
+
+  -h         print this help message and exit
+  -n         run IDLE without a subprocess (see Help/IDLE Help for details)
+
+The following options will override the IDLE 'settings' configuration:
+
+  -e         open an edit window
+  -i         open a shell window
+
+The following options imply -i and will open a shell:
+
+  -c cmd     run the command in a shell, or
+  -r file    run script from file
+
+  -d         enable the debugger
+  -s         run $IDLESTARTUP or $PYTHONSTARTUP before anything else
+  -t title   set title of shell window
+
+A default edit window will be bypassed when -c, -r, or - are used.
+
+[arg]* are passed to the command (-c) or script (-r) in sys.argv[1:].
+
+Examples:
+
+idle
+        Open an edit window or shell depending on IDLE's configuration.
+
+idle foo.py foobar.py
+        Edit the files, also open a shell if configured to start with shell.
+
+idle -est "Baz" foo.py
+        Run $IDLESTARTUP or $PYTHONSTARTUP, edit foo.py, and open a shell
+        window with the title "Baz".
+
+idle -c "import sys; print sys.argv" "foo"
+        Open a shell window and run the command, passing "-c" in sys.argv[0]
+        and "foo" in sys.argv[1].
+
+idle -d -s -r foo.py "Hello World"
+        Open a shell window, run a startup script, enable the debugger, and
+        run foo.py, passing "foo.py" in sys.argv[0] and "Hello World" in
+        sys.argv[1].
+
+echo "import sys; print sys.argv" | idle - "foobar"
+        Open a shell window, run the script piped in, passing '' in sys.argv[0]
+        and "foobar" in sys.argv[1].
+"""
+
+def main():
+    global flist, root, use_subprocess
+
+    use_subprocess = True
+    enable_shell = False
+    enable_edit = False
+    debug = False
+    cmd = None
+    script = None
+    startup = False
+    try:
+        sys.ps1
+    except AttributeError:
+        sys.ps1 = '>>> '
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "c:deihnr:st:")
+    except getopt.error, msg:
+        sys.stderr.write("Error: %s\n" % str(msg))
+        sys.stderr.write(usage_msg)
+        sys.exit(2)
+    for o, a in opts:
+        if o == '-c':
+            cmd = a
+            enable_shell = True
+        if o == '-d':
+            debug = True
+            enable_shell = True
+        if o == '-e':
+            enable_edit = True
+        if o == '-h':
+            sys.stdout.write(usage_msg)
+            sys.exit()
+        if o == '-i':
+            enable_shell = True
+        if o == '-n':
+            use_subprocess = False
+        if o == '-r':
+            script = a
+            if os.path.isfile(script):
+                pass
+            else:
+                print "No script file: ", script
+                sys.exit()
+            enable_shell = True
+        if o == '-s':
+            startup = True
+            enable_shell = True
+        if o == '-t':
+            PyShell.shell_title = a
+            enable_shell = True
+    if args and args[0] == '-':
+        cmd = sys.stdin.read()
+        enable_shell = True
+    # process sys.argv and sys.path:
+    for i in range(len(sys.path)):
+        sys.path[i] = os.path.abspath(sys.path[i])
+    if args and args[0] == '-':
+        sys.argv = [''] + args[1:]
+    elif cmd:
+        sys.argv = ['-c'] + args
+    elif script:
+        sys.argv = [script] + args
+    elif args:
+        enable_edit = True
+        pathx = []
+        for filename in args:
+            pathx.append(os.path.dirname(filename))
+        for dir in pathx:
+            dir = os.path.abspath(dir)
+            if not dir in sys.path:
+                sys.path.insert(0, dir)
+    else:
+        dir = os.getcwd()
+        if not dir in sys.path:
+            sys.path.insert(0, dir)
+    # check the IDLE settings configuration (but command line overrides)
+    edit_start = idleConf.GetOption('main', 'General',
+                                    'editor-on-startup', type='bool')
+    enable_edit = enable_edit or edit_start
+    enable_shell = enable_shell or not edit_start
+    # start editor and/or shell windows:
+    root = Tk(className="Idle")
+    fixwordbreaks(root)
+    root.withdraw()
+    flist = PyShellFileList(root)
+    if enable_edit:
+        if not (cmd or script):
+            for filename in args:
+                flist.open(filename)
+            if not args:
+                flist.new()
+    if enable_shell:
+        if not flist.open_shell():
+            return # couldn't open shell
+    shell = flist.pyshell
+    # handle remaining options:
+    if debug:
+        shell.open_debugger()
+    if startup:
+        filename = os.environ.get("IDLESTARTUP") or \
+                   os.environ.get("PYTHONSTARTUP")
+        if filename and os.path.isfile(filename):
+            shell.interp.execfile(filename)
+    if shell and cmd or script:
+        shell.interp.runcommand("""if 1:
+            import sys as _sys
+            _sys.argv = %r
+            del _sys
+            \n""" % (sys.argv,))
+        if cmd:
+            shell.interp.execsource(cmd)
+        elif script:
+            shell.interp.prepend_syspath(script)
+            shell.interp.execfile(script)
+    root.mainloop()
+    root.destroy()
+
+if __name__ == "__main__":
+    sys.modules['PyShell'] = sys.modules['__main__']
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/README.txt b/depot_tools/release/win/python_24/Lib/idlelib/README.txt
new file mode 100644
index 0000000..101f7eb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/README.txt
@@ -0,0 +1,63 @@
+IDLE is Python's Tkinter-based Integrated DeveLopment Environment.
+
+IDLE emphasizes a lightweight, clean design with a simple user interface.
+Although it is suitable for beginners, even advanced users will find that
+IDLE has everything they really need to develop pure Python code.
+
+IDLE features a multi-window text editor with multiple undo, Python colorizing,
+and many other capabilities, e.g. smart indent, call tips, and autocompletion.
+
+The editor has comprehensive search functions, including searching through
+multiple files.  Class browsers and path browsers provide fast access to
+code objects from a top level viewpoint without dealing with code folding.
+
+There is a Python Shell window which features colorizing and command recall.
+
+IDLE executes Python code in a separate process, which is restarted for each
+Run (F5) initiated from an editor window.  The environment can also be 
+restarted from the Shell window without restarting IDLE.
+
+This enhancement has often been requested, and is now finally available.  The
+magic "reload/import *" incantations are no longer required when editing and
+testing a module two or three steps down the import chain.
+
+(Personal firewall software may warn about the connection IDLE makes to its
+subprocess using this computer's internal loopback interface.  This connection
+is not visible on any external interface and no data is sent to or received
+from the Internet.)
+
+It is possible to interrupt tightly looping user code, even on Windows.
+
+Applications which cannot support subprocesses and/or sockets can still run
+IDLE in a single process.
+
+IDLE has an integrated debugger with stepping, persistent breakpoints, and call
+stack visibility.
+
+There is a GUI configuration manager which makes it easy to select fonts,
+colors, keybindings, and startup options.  This facility includes a feature
+which allows the user to specify additional help sources, either locally or on
+the web.
+
+IDLE is coded in 100% pure Python, using the Tkinter GUI toolkit (Tk/Tcl)
+and is cross-platform, working on Unix, Mac, and Windows.
+
+IDLE accepts command line arguments.  Try idle -h to see the options.
+
+
+If you find bugs or have suggestions, let us know about them by using the
+Python Bug Tracker:
+
+http://sourceforge.net/projects/python
+
+Patches are always appreciated at the Python Patch Tracker, and change
+requests should be posted to the RFE Tracker.
+
+For further details and links, read the Help files and check the IDLE home
+page at
+
+http://www.python.org/idle/
+
+There is a mail list for IDLE: idle-dev@python.org.  You can join at
+
+http://mail.python.org/mailman/listinfo/idle-dev
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/RemoteDebugger.py b/depot_tools/release/win/python_24/Lib/idlelib/RemoteDebugger.py
new file mode 100644
index 0000000..74085c36
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/RemoteDebugger.py
@@ -0,0 +1,381 @@
+"""Support for remote Python debugging.
+
+Some ASCII art to describe the structure:
+
+       IN PYTHON SUBPROCESS          #             IN IDLE PROCESS
+                                     #
+                                     #        oid='gui_adapter'
+                 +----------+        #       +------------+          +-----+
+                 | GUIProxy |--remote#call-->| GUIAdapter |--calls-->| GUI |
++-----+--calls-->+----------+        #       +------------+          +-----+
+| Idb |                               #                             /
++-----+<-calls--+------------+         #      +----------+<--calls-/
+                | IdbAdapter |<--remote#call--| IdbProxy |
+                +------------+         #      +----------+
+                oid='idb_adapter'      #
+
+The purpose of the Proxy and Adapter classes is to translate certain
+arguments and return values that cannot be transported through the RPC
+barrier, in particular frame and traceback objects.
+
+"""
+
+import sys
+import types
+import rpc
+import Debugger
+
+debugging = 0
+
+idb_adap_oid = "idb_adapter"
+gui_adap_oid = "gui_adapter"
+
+#=======================================
+#
+# In the PYTHON subprocess:
+
+frametable = {}
+dicttable = {}
+codetable = {}
+tracebacktable = {}
+
+def wrap_frame(frame):
+    fid = id(frame)
+    frametable[fid] = frame
+    return fid
+
+def wrap_info(info):
+    "replace info[2], a traceback instance, by its ID"
+    if info is None:
+        return None
+    else:
+        traceback = info[2]
+        assert isinstance(traceback, types.TracebackType)
+        traceback_id = id(traceback)
+        tracebacktable[traceback_id] = traceback
+        modified_info = (info[0], info[1], traceback_id)
+        return modified_info
+
+class GUIProxy:
+
+    def __init__(self, conn, gui_adap_oid):
+        self.conn = conn
+        self.oid = gui_adap_oid
+
+    def interaction(self, message, frame, info=None):
+        # calls rpc.SocketIO.remotecall() via run.MyHandler instance
+        # pass frame and traceback object IDs instead of the objects themselves
+        self.conn.remotecall(self.oid, "interaction",
+                             (message, wrap_frame(frame), wrap_info(info)),
+                             {})
+
+class IdbAdapter:
+
+    def __init__(self, idb):
+        self.idb = idb
+
+    #----------called by an IdbProxy----------
+
+    def set_step(self):
+        self.idb.set_step()
+
+    def set_quit(self):
+        self.idb.set_quit()
+
+    def set_continue(self):
+        self.idb.set_continue()
+
+    def set_next(self, fid):
+        frame = frametable[fid]
+        self.idb.set_next(frame)
+
+    def set_return(self, fid):
+        frame = frametable[fid]
+        self.idb.set_return(frame)
+
+    def get_stack(self, fid, tbid):
+        ##print >>sys.__stderr__, "get_stack(%r, %r)" % (fid, tbid)
+        frame = frametable[fid]
+        if tbid is None:
+            tb = None
+        else:
+            tb = tracebacktable[tbid]
+        stack, i = self.idb.get_stack(frame, tb)
+        ##print >>sys.__stderr__, "get_stack() ->", stack
+        stack = [(wrap_frame(frame), k) for frame, k in stack]
+        ##print >>sys.__stderr__, "get_stack() ->", stack
+        return stack, i
+
+    def run(self, cmd):
+        import __main__
+        self.idb.run(cmd, __main__.__dict__)
+
+    def set_break(self, filename, lineno):
+        msg = self.idb.set_break(filename, lineno)
+        return msg
+
+    def clear_break(self, filename, lineno):
+        msg = self.idb.clear_break(filename, lineno)
+        return msg
+
+    def clear_all_file_breaks(self, filename):
+        msg = self.idb.clear_all_file_breaks(filename)
+        return msg
+
+    #----------called by a FrameProxy----------
+
+    def frame_attr(self, fid, name):
+        frame = frametable[fid]
+        return getattr(frame, name)
+
+    def frame_globals(self, fid):
+        frame = frametable[fid]
+        dict = frame.f_globals
+        did = id(dict)
+        dicttable[did] = dict
+        return did
+
+    def frame_locals(self, fid):
+        frame = frametable[fid]
+        dict = frame.f_locals
+        did = id(dict)
+        dicttable[did] = dict
+        return did
+
+    def frame_code(self, fid):
+        frame = frametable[fid]
+        code = frame.f_code
+        cid = id(code)
+        codetable[cid] = code
+        return cid
+
+    #----------called by a CodeProxy----------
+
+    def code_name(self, cid):
+        code = codetable[cid]
+        return code.co_name
+
+    def code_filename(self, cid):
+        code = codetable[cid]
+        return code.co_filename
+
+    #----------called by a DictProxy----------
+
+    def dict_keys(self, did):
+        dict = dicttable[did]
+        return dict.keys()
+
+    def dict_item(self, did, key):
+        dict = dicttable[did]
+        value = dict[key]
+        value = repr(value)
+        return value
+
+#----------end class IdbAdapter----------
+
+
+def start_debugger(rpchandler, gui_adap_oid):
+    """Start the debugger and its RPC link in the Python subprocess
+
+    Start the subprocess side of the split debugger and set up that side of the
+    RPC link by instantiating the GUIProxy, Idb debugger, and IdbAdapter
+    objects and linking them together.  Register the IdbAdapter with the
+    RPCServer to handle RPC requests from the split debugger GUI via the
+    IdbProxy.
+
+    """
+    gui_proxy = GUIProxy(rpchandler, gui_adap_oid)
+    idb = Debugger.Idb(gui_proxy)
+    idb_adap = IdbAdapter(idb)
+    rpchandler.register(idb_adap_oid, idb_adap)
+    return idb_adap_oid
+
+
+#=======================================
+#
+# In the IDLE process:
+
+
+class FrameProxy:
+
+    def __init__(self, conn, fid):
+        self._conn = conn
+        self._fid = fid
+        self._oid = "idb_adapter"
+        self._dictcache = {}
+
+    def __getattr__(self, name):
+        if name[:1] == "_":
+            raise AttributeError, name
+        if name == "f_code":
+            return self._get_f_code()
+        if name == "f_globals":
+            return self._get_f_globals()
+        if name == "f_locals":
+            return self._get_f_locals()
+        return self._conn.remotecall(self._oid, "frame_attr",
+                                     (self._fid, name), {})
+
+    def _get_f_code(self):
+        cid = self._conn.remotecall(self._oid, "frame_code", (self._fid,), {})
+        return CodeProxy(self._conn, self._oid, cid)
+
+    def _get_f_globals(self):
+        did = self._conn.remotecall(self._oid, "frame_globals",
+                                    (self._fid,), {})
+        return self._get_dict_proxy(did)
+
+    def _get_f_locals(self):
+        did = self._conn.remotecall(self._oid, "frame_locals",
+                                    (self._fid,), {})
+        return self._get_dict_proxy(did)
+
+    def _get_dict_proxy(self, did):
+        if self._dictcache.has_key(did):
+            return self._dictcache[did]
+        dp = DictProxy(self._conn, self._oid, did)
+        self._dictcache[did] = dp
+        return dp
+
+
+class CodeProxy:
+
+    def __init__(self, conn, oid, cid):
+        self._conn = conn
+        self._oid = oid
+        self._cid = cid
+
+    def __getattr__(self, name):
+        if name == "co_name":
+            return self._conn.remotecall(self._oid, "code_name",
+                                         (self._cid,), {})
+        if name == "co_filename":
+            return self._conn.remotecall(self._oid, "code_filename",
+                                         (self._cid,), {})
+
+
+class DictProxy:
+
+    def __init__(self, conn, oid, did):
+        self._conn = conn
+        self._oid = oid
+        self._did = did
+
+    def keys(self):
+        return self._conn.remotecall(self._oid, "dict_keys", (self._did,), {})
+
+    def __getitem__(self, key):
+        return self._conn.remotecall(self._oid, "dict_item",
+                                     (self._did, key), {})
+
+    def __getattr__(self, name):
+        ##print >>sys.__stderr__, "failed DictProxy.__getattr__:", name
+        raise AttributeError, name
+
+
+class GUIAdapter:
+
+    def __init__(self, conn, gui):
+        self.conn = conn
+        self.gui = gui
+
+    def interaction(self, message, fid, modified_info):
+        ##print "interaction: (%s, %s, %s)" % (message, fid, modified_info)
+        frame = FrameProxy(self.conn, fid)
+        self.gui.interaction(message, frame, modified_info)
+
+
+class IdbProxy:
+
+    def __init__(self, conn, shell, oid):
+        self.oid = oid
+        self.conn = conn
+        self.shell = shell
+
+    def call(self, methodname, *args, **kwargs):
+        ##print "**IdbProxy.call %s %s %s" % (methodname, args, kwargs)
+        value = self.conn.remotecall(self.oid, methodname, args, kwargs)
+        ##print "**IdbProxy.call %s returns %r" % (methodname, value)
+        return value
+
+    def run(self, cmd, locals):
+        # Ignores locals on purpose!
+        seq = self.conn.asyncqueue(self.oid, "run", (cmd,), {})
+        self.shell.interp.active_seq = seq
+
+    def get_stack(self, frame, tbid):
+        # passing frame and traceback IDs, not the objects themselves
+        stack, i = self.call("get_stack", frame._fid, tbid)
+        stack = [(FrameProxy(self.conn, fid), k) for fid, k in stack]
+        return stack, i
+
+    def set_continue(self):
+        self.call("set_continue")
+
+    def set_step(self):
+        self.call("set_step")
+
+    def set_next(self, frame):
+        self.call("set_next", frame._fid)
+
+    def set_return(self, frame):
+        self.call("set_return", frame._fid)
+
+    def set_quit(self):
+        self.call("set_quit")
+
+    def set_break(self, filename, lineno):
+        msg = self.call("set_break", filename, lineno)
+        return msg
+
+    def clear_break(self, filename, lineno):
+        msg = self.call("clear_break", filename, lineno)
+        return msg
+
+    def clear_all_file_breaks(self, filename):
+        msg = self.call("clear_all_file_breaks", filename)
+        return msg
+
+def start_remote_debugger(rpcclt, pyshell):
+    """Start the subprocess debugger, initialize the debugger GUI and RPC link
+
+    Request the RPCServer start the Python subprocess debugger and link.  Set
+    up the Idle side of the split debugger by instantiating the IdbProxy,
+    debugger GUI, and debugger GUIAdapter objects and linking them together.
+
+    Register the GUIAdapter with the RPCClient to handle debugger GUI
+    interaction requests coming from the subprocess debugger via the GUIProxy.
+
+    The IdbAdapter will pass execution and environment requests coming from the
+    Idle debugger GUI to the subprocess debugger via the IdbProxy.
+
+    """
+    global idb_adap_oid
+
+    idb_adap_oid = rpcclt.remotecall("exec", "start_the_debugger",\
+                                   (gui_adap_oid,), {})
+    idb_proxy = IdbProxy(rpcclt, pyshell, idb_adap_oid)
+    gui = Debugger.Debugger(pyshell, idb_proxy)
+    gui_adap = GUIAdapter(rpcclt, gui)
+    rpcclt.register(gui_adap_oid, gui_adap)
+    return gui
+
+def close_remote_debugger(rpcclt):
+    """Shut down subprocess debugger and Idle side of debugger RPC link
+
+    Request that the RPCServer shut down the subprocess debugger and link.
+    Unregister the GUIAdapter, which will cause a GC on the Idle process
+    debugger and RPC link objects.  (The second reference to the debugger GUI
+    is deleted in PyShell.close_remote_debugger().)
+
+    """
+    close_subprocess_debugger(rpcclt)
+    rpcclt.unregister(gui_adap_oid)
+
+def close_subprocess_debugger(rpcclt):
+    rpcclt.remotecall("exec", "stop_the_debugger", (idb_adap_oid,), {})
+
+def restart_subprocess_debugger(rpcclt):
+    idb_adap_oid_ret = rpcclt.remotecall("exec", "start_the_debugger",\
+                                         (gui_adap_oid,), {})
+    assert idb_adap_oid_ret == idb_adap_oid, 'Idb restarted with different oid'
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/RemoteObjectBrowser.py b/depot_tools/release/win/python_24/Lib/idlelib/RemoteObjectBrowser.py
new file mode 100644
index 0000000..6ba3391
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/RemoteObjectBrowser.py
@@ -0,0 +1,36 @@
+import rpc
+
+def remote_object_tree_item(item):
+    wrapper = WrappedObjectTreeItem(item)
+    oid = id(wrapper)
+    rpc.objecttable[oid] = wrapper
+    return oid
+
+class WrappedObjectTreeItem:
+    # Lives in PYTHON subprocess
+
+    def __init__(self, item):
+        self.__item = item
+
+    def __getattr__(self, name):
+        value = getattr(self.__item, name)
+        return value
+
+    def _GetSubList(self):
+        list = self.__item._GetSubList()
+        return map(remote_object_tree_item, list)
+
+class StubObjectTreeItem:
+    # Lives in IDLE process
+
+    def __init__(self, sockio, oid):
+        self.sockio = sockio
+        self.oid = oid
+
+    def __getattr__(self, name):
+        value = rpc.MethodProxy(self.sockio, self.oid, name)
+        return value
+
+    def _GetSubList(self):
+        list = self.sockio.remotecall(self.oid, "_GetSubList", (), {})
+        return [StubObjectTreeItem(self.sockio, oid) for oid in list]
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ReplaceDialog.py b/depot_tools/release/win/python_24/Lib/idlelib/ReplaceDialog.py
new file mode 100644
index 0000000..c8eb1c8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ReplaceDialog.py
@@ -0,0 +1,167 @@
+from Tkinter import *
+import SearchEngine
+from SearchDialogBase import SearchDialogBase
+
+def replace(text):
+    root = text._root()
+    engine = SearchEngine.get(root)
+    if not hasattr(engine, "_replacedialog"):
+        engine._replacedialog = ReplaceDialog(root, engine)
+    dialog = engine._replacedialog
+    dialog.open(text)
+
+class ReplaceDialog(SearchDialogBase):
+
+    title = "Replace Dialog"
+    icon = "Replace"
+
+    def __init__(self, root, engine):
+        SearchDialogBase.__init__(self, root, engine)
+        self.replvar = StringVar(root)
+
+    def open(self, text):
+        SearchDialogBase.open(self, text)
+        try:
+            first = text.index("sel.first")
+        except TclError:
+            first = None
+        try:
+            last = text.index("sel.last")
+        except TclError:
+            last = None
+        first = first or text.index("insert")
+        last = last or first
+        self.show_hit(first, last)
+        self.ok = 1
+
+    def create_entries(self):
+        SearchDialogBase.create_entries(self)
+        self.replent = self.make_entry("Replace with:", self.replvar)
+
+    def create_command_buttons(self):
+        SearchDialogBase.create_command_buttons(self)
+        self.make_button("Find", self.find_it)
+        self.make_button("Replace", self.replace_it)
+        self.make_button("Replace+Find", self.default_command, 1)
+        self.make_button("Replace All", self.replace_all)
+
+    def find_it(self, event=None):
+        self.do_find(0)
+
+    def replace_it(self, event=None):
+        if self.do_find(self.ok):
+            self.do_replace()
+
+    def default_command(self, event=None):
+        if self.do_find(self.ok):
+            self.do_replace()
+            self.do_find(0)
+
+    def replace_all(self, event=None):
+        prog = self.engine.getprog()
+        if not prog:
+            return
+        repl = self.replvar.get()
+        text = self.text
+        res = self.engine.search_text(text, prog)
+        if not res:
+            text.bell()
+            return
+        text.tag_remove("sel", "1.0", "end")
+        text.tag_remove("hit", "1.0", "end")
+        line = res[0]
+        col = res[1].start()
+        if self.engine.iswrap():
+            line = 1
+            col = 0
+        ok = 1
+        first = last = None
+        # XXX ought to replace circular instead of top-to-bottom when wrapping
+        text.undo_block_start()
+        while 1:
+            res = self.engine.search_forward(text, prog, line, col, 0, ok)
+            if not res:
+                break
+            line, m = res
+            chars = text.get("%d.0" % line, "%d.0" % (line+1))
+            orig = m.group()
+            new = m.expand(repl)
+            i, j = m.span()
+            first = "%d.%d" % (line, i)
+            last = "%d.%d" % (line, j)
+            if new == orig:
+                text.mark_set("insert", last)
+            else:
+                text.mark_set("insert", first)
+                if first != last:
+                    text.delete(first, last)
+                if new:
+                    text.insert(first, new)
+            col = i + len(new)
+            ok = 0
+        text.undo_block_stop()
+        if first and last:
+            self.show_hit(first, last)
+        self.close()
+
+    def do_find(self, ok=0):
+        if not self.engine.getprog():
+            return False
+        text = self.text
+        res = self.engine.search_text(text, None, ok)
+        if not res:
+            text.bell()
+            return False
+        line, m = res
+        i, j = m.span()
+        first = "%d.%d" % (line, i)
+        last = "%d.%d" % (line, j)
+        self.show_hit(first, last)
+        self.ok = 1
+        return True
+
+    def do_replace(self):
+        prog = self.engine.getprog()
+        if not prog:
+            return False
+        text = self.text
+        try:
+            first = pos = text.index("sel.first")
+            last = text.index("sel.last")
+        except TclError:
+            pos = None
+        if not pos:
+            first = last = pos = text.index("insert")
+        line, col = SearchEngine.get_line_col(pos)
+        chars = text.get("%d.0" % line, "%d.0" % (line+1))
+        m = prog.match(chars, col)
+        if not prog:
+            return False
+        new = m.expand(self.replvar.get())
+        text.mark_set("insert", first)
+        text.undo_block_start()
+        if m.group():
+            text.delete(first, last)
+        if new:
+            text.insert(first, new)
+        text.undo_block_stop()
+        self.show_hit(first, text.index("insert"))
+        self.ok = 0
+        return True
+
+    def show_hit(self, first, last):
+        text = self.text
+        text.mark_set("insert", first)
+        text.tag_remove("sel", "1.0", "end")
+        text.tag_add("sel", first, last)
+        text.tag_remove("hit", "1.0", "end")
+        if first == last:
+            text.tag_add("hit", first)
+        else:
+            text.tag_add("hit", first, last)
+        text.see("insert")
+        text.update_idletasks()
+
+    def close(self, event=None):
+        SearchDialogBase.close(self, event)
+        self.text.tag_remove("hit", "1.0", "end")
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ScriptBinding.py b/depot_tools/release/win/python_24/Lib/idlelib/ScriptBinding.py
new file mode 100644
index 0000000..8ad02a4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ScriptBinding.py
@@ -0,0 +1,207 @@
+"""Extension to execute code outside the Python shell window.
+
+This adds the following commands:
+
+- Check module does a full syntax check of the current module.
+  It also runs the tabnanny to catch any inconsistent tabs.
+
+- Run module executes the module's code in the __main__ namespace.  The window
+  must have been saved previously. The module is added to sys.modules, and is
+  also added to the __main__ namespace.
+
+XXX GvR Redesign this interface (yet again) as follows:
+
+- Present a dialog box for ``Run Module''
+
+- Allow specify command line arguments in the dialog box
+
+"""
+
+import os
+import re
+import string
+import tabnanny
+import tokenize
+import tkMessageBox
+import PyShell
+
+from configHandler import idleConf
+
+IDENTCHARS = string.ascii_letters + string.digits + "_"
+
+indent_message = """Error: Inconsistent indentation detected!
+
+This means that either:
+
+1) your indentation is outright incorrect (easy to fix), or
+
+2) your indentation mixes tabs and spaces in a way that depends on \
+how many spaces a tab is worth.
+
+To fix case 2, change all tabs to spaces by using Select All followed \
+by Untabify Region (both in the Edit menu)."""
+
+
+class ScriptBinding:
+
+    menudefs = [
+        ('run', [None,
+                 ('Check Module', '<<check-module>>'),
+                 ('Run Module', '<<run-module>>'), ]), ]
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+        # Provide instance variables referenced by Debugger
+        # XXX This should be done differently
+        self.flist = self.editwin.flist
+        self.root = self.flist.root
+
+    def check_module_event(self, event):
+        filename = self.getfilename()
+        if not filename:
+            return
+        if not self.tabnanny(filename):
+            return
+        self.checksyntax(filename)
+
+    def tabnanny(self, filename):
+        f = open(filename, 'r')
+        try:
+            tabnanny.process_tokens(tokenize.generate_tokens(f.readline))
+        except tokenize.TokenError, msg:
+            msgtxt, (lineno, start) = msg
+            self.editwin.gotoline(lineno)
+            self.errorbox("Tabnanny Tokenizing Error",
+                          "Token Error: %s" % msgtxt)
+            return False
+        except tabnanny.NannyNag, nag:
+            # The error messages from tabnanny are too confusing...
+            self.editwin.gotoline(nag.get_lineno())
+            self.errorbox("Tab/space error", indent_message)
+            return False
+        return True
+
+    def checksyntax(self, filename):
+        self.shell = shell = self.flist.open_shell()
+        saved_stream = shell.get_warning_stream()
+        shell.set_warning_stream(shell.stderr)
+        f = open(filename, 'r')
+        source = f.read()
+        f.close()
+        if '\r' in source:
+            source = re.sub(r"\r\n", "\n", source)
+        if source and source[-1] != '\n':
+            source = source + '\n'
+        text = self.editwin.text
+        text.tag_remove("ERROR", "1.0", "end")
+        try:
+            try:
+                # If successful, return the compiled code
+                return compile(source, filename, "exec")
+            except (SyntaxError, OverflowError), err:
+                try:
+                    msg, (errorfilename, lineno, offset, line) = err
+                    if not errorfilename:
+                        err.args = msg, (filename, lineno, offset, line)
+                        err.filename = filename
+                    self.colorize_syntax_error(msg, lineno, offset)
+                except:
+                    msg = "*** " + str(err)
+                self.errorbox("Syntax error",
+                              "There's an error in your program:\n" + msg)
+                return False
+        finally:
+            shell.set_warning_stream(saved_stream)
+
+    def colorize_syntax_error(self, msg, lineno, offset):
+        text = self.editwin.text
+        pos = "0.0 + %d lines + %d chars" % (lineno-1, offset-1)
+        text.tag_add("ERROR", pos)
+        char = text.get(pos)
+        if char and char in IDENTCHARS:
+            text.tag_add("ERROR", pos + " wordstart", pos)
+        if '\n' == text.get(pos):   # error at line end
+            text.mark_set("insert", pos)
+        else:
+            text.mark_set("insert", pos + "+1c")
+        text.see(pos)
+
+    def run_module_event(self, event):
+        """Run the module after setting up the environment.
+
+        First check the syntax.  If OK, make sure the shell is active and
+        then transfer the arguments, set the run environment's working
+        directory to the directory of the module being executed and also
+        add that directory to its sys.path if not already included.
+
+        """
+        filename = self.getfilename()
+        if not filename:
+            return
+        code = self.checksyntax(filename)
+        if not code:
+            return
+        shell = self.shell
+        interp = shell.interp
+        if PyShell.use_subprocess:
+            shell.restart_shell()
+        dirname = os.path.dirname(filename)
+        # XXX Too often this discards arguments the user just set...
+        interp.runcommand("""if 1:
+            _filename = %r
+            import sys as _sys
+            from os.path import basename as _basename
+            if (not _sys.argv or
+                _basename(_sys.argv[0]) != _basename(_filename)):
+                _sys.argv = [_filename]
+            import os as _os
+            _os.chdir(%r)
+            del _filename, _sys, _basename, _os
+            \n""" % (filename, dirname))
+        interp.prepend_syspath(filename)
+        # XXX KBK 03Jul04 When run w/o subprocess, runtime warnings still
+        #         go to __stderr__.  With subprocess, they go to the shell.
+        #         Need to change streams in PyShell.ModifiedInterpreter.
+        interp.runcode(code)
+
+    def getfilename(self):
+        """Get source filename.  If not saved, offer to save (or create) file
+
+        The debugger requires a source file.  Make sure there is one, and that
+        the current version of the source buffer has been saved.  If the user
+        declines to save or cancels the Save As dialog, return None.
+
+        If the user has configured IDLE for Autosave, the file will be
+        silently saved if it already exists and is dirty.
+
+        """
+        filename = self.editwin.io.filename
+        if not self.editwin.get_saved():
+            autosave = idleConf.GetOption('main', 'General',
+                                          'autosave', type='bool')
+            if autosave and filename:
+                self.editwin.io.save(None)
+            else:
+                reply = self.ask_save_dialog()
+                self.editwin.text.focus_set()
+                if reply == "ok":
+                    self.editwin.io.save(None)
+                    filename = self.editwin.io.filename
+                else:
+                    filename = None
+        return filename
+
+    def ask_save_dialog(self):
+        msg = "Source Must Be Saved\n" + 5*' ' + "OK to Save?"
+        mb = tkMessageBox.Message(title="Save Before Run or Check",
+                                  message=msg,
+                                  icon=tkMessageBox.QUESTION,
+                                  type=tkMessageBox.OKCANCEL,
+                                  default=tkMessageBox.OK,
+                                  master=self.editwin.text)
+        return mb.show()
+
+    def errorbox(self, title, message):
+        # XXX This should really be a function of EditorWindow...
+        tkMessageBox.showerror(title, message, master=self.editwin.text)
+        self.editwin.text.focus_set()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ScrolledList.py b/depot_tools/release/win/python_24/Lib/idlelib/ScrolledList.py
new file mode 100644
index 0000000..9211936
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ScrolledList.py
@@ -0,0 +1,139 @@
+from Tkinter import *
+
+class ScrolledList:
+
+    default = "(None)"
+
+    def __init__(self, master, **options):
+        # Create top frame, with scrollbar and listbox
+        self.master = master
+        self.frame = frame = Frame(master)
+        self.frame.pack(fill="both", expand=1)
+        self.vbar = vbar = Scrollbar(frame, name="vbar")
+        self.vbar.pack(side="right", fill="y")
+        self.listbox = listbox = Listbox(frame, exportselection=0,
+            background="white")
+        if options:
+            listbox.configure(options)
+        listbox.pack(expand=1, fill="both")
+        # Tie listbox and scrollbar together
+        vbar["command"] = listbox.yview
+        listbox["yscrollcommand"] = vbar.set
+        # Bind events to the list box
+        listbox.bind("<ButtonRelease-1>", self.click_event)
+        listbox.bind("<Double-ButtonRelease-1>", self.double_click_event)
+        listbox.bind("<ButtonPress-3>", self.popup_event)
+        listbox.bind("<Key-Up>", self.up_event)
+        listbox.bind("<Key-Down>", self.down_event)
+        # Mark as empty
+        self.clear()
+
+    def close(self):
+        self.frame.destroy()
+
+    def clear(self):
+        self.listbox.delete(0, "end")
+        self.empty = 1
+        self.listbox.insert("end", self.default)
+
+    def append(self, item):
+        if self.empty:
+            self.listbox.delete(0, "end")
+            self.empty = 0
+        self.listbox.insert("end", str(item))
+
+    def get(self, index):
+        return self.listbox.get(index)
+
+    def click_event(self, event):
+        self.listbox.activate("@%d,%d" % (event.x, event.y))
+        index = self.listbox.index("active")
+        self.select(index)
+        self.on_select(index)
+        return "break"
+
+    def double_click_event(self, event):
+        index = self.listbox.index("active")
+        self.select(index)
+        self.on_double(index)
+        return "break"
+
+    menu = None
+
+    def popup_event(self, event):
+        if not self.menu:
+            self.make_menu()
+        menu = self.menu
+        self.listbox.activate("@%d,%d" % (event.x, event.y))
+        index = self.listbox.index("active")
+        self.select(index)
+        menu.tk_popup(event.x_root, event.y_root)
+
+    def make_menu(self):
+        menu = Menu(self.listbox, tearoff=0)
+        self.menu = menu
+        self.fill_menu()
+
+    def up_event(self, event):
+        index = self.listbox.index("active")
+        if self.listbox.selection_includes(index):
+            index = index - 1
+        else:
+            index = self.listbox.size() - 1
+        if index < 0:
+            self.listbox.bell()
+        else:
+            self.select(index)
+            self.on_select(index)
+        return "break"
+
+    def down_event(self, event):
+        index = self.listbox.index("active")
+        if self.listbox.selection_includes(index):
+            index = index + 1
+        else:
+            index = 0
+        if index >= self.listbox.size():
+            self.listbox.bell()
+        else:
+            self.select(index)
+            self.on_select(index)
+        return "break"
+
+    def select(self, index):
+        self.listbox.focus_set()
+        self.listbox.activate(index)
+        self.listbox.selection_clear(0, "end")
+        self.listbox.selection_set(index)
+        self.listbox.see(index)
+
+    # Methods to override for specific actions
+
+    def fill_menu(self):
+        pass
+
+    def on_select(self, index):
+        pass
+
+    def on_double(self, index):
+        pass
+
+
+def test():
+    root = Tk()
+    root.protocol("WM_DELETE_WINDOW", root.destroy)
+    class MyScrolledList(ScrolledList):
+        def fill_menu(self): self.menu.add_command(label="pass")
+        def on_select(self, index): print "select", self.get(index)
+        def on_double(self, index): print "double", self.get(index)
+    s = MyScrolledList(root)
+    for i in range(30):
+        s.append("item %02d" % i)
+    return root
+
+def main():
+    root = test()
+    root.mainloop()
+
+if __name__ == '__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/SearchDialog.py b/depot_tools/release/win/python_24/Lib/idlelib/SearchDialog.py
new file mode 100644
index 0000000..d7124d6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/SearchDialog.py
@@ -0,0 +1,68 @@
+from Tkinter import *
+import SearchEngine
+from SearchDialogBase import SearchDialogBase
+
+
+def _setup(text):
+    root = text._root()
+    engine = SearchEngine.get(root)
+    if not hasattr(engine, "_searchdialog"):
+        engine._searchdialog = SearchDialog(root, engine)
+    return engine._searchdialog
+
+def find(text):
+    pat = text.get("sel.first", "sel.last")
+    return _setup(text).open(text,pat)
+
+def find_again(text):
+    return _setup(text).find_again(text)
+
+def find_selection(text):
+    return _setup(text).find_selection(text)
+
+class SearchDialog(SearchDialogBase):
+
+    def create_widgets(self):
+        f = SearchDialogBase.create_widgets(self)
+        self.make_button("Find", self.default_command, 1)
+
+    def default_command(self, event=None):
+        if not self.engine.getprog():
+            return
+        if self.find_again(self.text):
+            self.close()
+
+    def find_again(self, text):
+        if not self.engine.getpat():
+            self.open(text)
+            return False
+        if not self.engine.getprog():
+            return False
+        res = self.engine.search_text(text)
+        if res:
+            line, m = res
+            i, j = m.span()
+            first = "%d.%d" % (line, i)
+            last = "%d.%d" % (line, j)
+            try:
+                selfirst = text.index("sel.first")
+                sellast = text.index("sel.last")
+                if selfirst == first and sellast == last:
+                    text.bell()
+                    return False
+            except TclError:
+                pass
+            text.tag_remove("sel", "1.0", "end")
+            text.tag_add("sel", first, last)
+            text.mark_set("insert", self.engine.isback() and first or last)
+            text.see("insert")
+            return True
+        else:
+            text.bell()
+            return False
+
+    def find_selection(self, text):
+        pat = text.get("sel.first", "sel.last")
+        if pat:
+            self.engine.setcookedpat(pat)
+        return self.find_again(text)
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/SearchDialogBase.py b/depot_tools/release/win/python_24/Lib/idlelib/SearchDialogBase.py
new file mode 100644
index 0000000..f63e7ae
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/SearchDialogBase.py
@@ -0,0 +1,140 @@
+from Tkinter import *
+
+class SearchDialogBase:
+
+    title = "Search Dialog"
+    icon = "Search"
+    needwrapbutton = 1
+
+    def __init__(self, root, engine):
+        self.root = root
+        self.engine = engine
+        self.top = None
+
+    def open(self, text, searchphrase=None):
+        self.text = text
+        if not self.top:
+            self.create_widgets()
+        else:
+            self.top.deiconify()
+            self.top.tkraise()
+        if searchphrase:
+            self.ent.delete(0,"end")
+            self.ent.insert("end",searchphrase)
+        self.ent.focus_set()
+        self.ent.selection_range(0, "end")
+        self.ent.icursor(0)
+        self.top.grab_set()
+
+    def close(self, event=None):
+        if self.top:
+            self.top.grab_release()
+            self.top.withdraw()
+
+    def create_widgets(self):
+        top = Toplevel(self.root)
+        top.bind("<Return>", self.default_command)
+        top.bind("<Escape>", self.close)
+        top.protocol("WM_DELETE_WINDOW", self.close)
+        top.wm_title(self.title)
+        top.wm_iconname(self.icon)
+        self.top = top
+
+        self.row = 0
+        self.top.grid_columnconfigure(0, pad=2, weight=0)
+        self.top.grid_columnconfigure(1, pad=2, minsize=100, weight=100)
+
+        self.create_entries()
+        self.create_option_buttons()
+        self.create_other_buttons()
+        return self.create_command_buttons()
+
+    def make_entry(self, label, var):
+        l = Label(self.top, text=label)
+        l.grid(row=self.row, column=0, sticky="nw")
+        e = Entry(self.top, textvariable=var, exportselection=0)
+        e.grid(row=self.row, column=1, sticky="nwe")
+        self.row = self.row + 1
+        return e
+
+    def make_frame(self,labeltext=None):
+        if labeltext:
+            l = Label(self.top, text=labeltext)
+            l.grid(row=self.row, column=0, sticky="nw")
+        f = Frame(self.top)
+        f.grid(row=self.row, column=1, columnspan=1, sticky="nwe")
+        self.row = self.row + 1
+        return f
+
+    def make_button(self, label, command, isdef=0):
+        b = Button(self.buttonframe,
+                   text=label, command=command,
+                   default=isdef and "active" or "normal")
+        cols,rows=self.buttonframe.grid_size()
+        b.grid(pady=1,row=rows,column=0,sticky="ew")
+        self.buttonframe.grid(rowspan=rows+1)
+        return b
+
+    def create_entries(self):
+        self.ent = self.make_entry("Find:", self.engine.patvar)
+
+    def create_option_buttons(self):
+        f = self.make_frame("Options")
+
+        btn = Checkbutton(f, anchor="w",
+                variable=self.engine.revar,
+                text="Regular expression")
+        btn.pack(side="left", fill="both")
+        if self.engine.isre():
+            btn.select()
+
+        btn = Checkbutton(f, anchor="w",
+                variable=self.engine.casevar,
+                text="Match case")
+        btn.pack(side="left", fill="both")
+        if self.engine.iscase():
+            btn.select()
+
+        btn = Checkbutton(f, anchor="w",
+                variable=self.engine.wordvar,
+                text="Whole word")
+        btn.pack(side="left", fill="both")
+        if self.engine.isword():
+            btn.select()
+
+        if self.needwrapbutton:
+            btn = Checkbutton(f, anchor="w",
+                    variable=self.engine.wrapvar,
+                    text="Wrap around")
+            btn.pack(side="left", fill="both")
+            if self.engine.iswrap():
+                btn.select()
+
+    def create_other_buttons(self):
+        f = self.make_frame("Direction")
+
+        #lbl = Label(f, text="Direction: ")
+        #lbl.pack(side="left")
+
+        btn = Radiobutton(f, anchor="w",
+                variable=self.engine.backvar, value=1,
+                text="Up")
+        btn.pack(side="left", fill="both")
+        if self.engine.isback():
+            btn.select()
+
+        btn = Radiobutton(f, anchor="w",
+                variable=self.engine.backvar, value=0,
+                text="Down")
+        btn.pack(side="left", fill="both")
+        if not self.engine.isback():
+            btn.select()
+
+    def create_command_buttons(self):
+        #
+        # place button frame on the right
+        f = self.buttonframe = Frame(self.top)
+        f.grid(row=0,column=2,padx=2,pady=2,ipadx=2,ipady=2)
+
+        b = self.make_button("close", self.close)
+        b.lower()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/SearchEngine.py b/depot_tools/release/win/python_24/Lib/idlelib/SearchEngine.py
new file mode 100644
index 0000000..cc40a00
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/SearchEngine.py
@@ -0,0 +1,220 @@
+import re
+from Tkinter import *
+import tkMessageBox
+
+def get(root):
+    if not hasattr(root, "_searchengine"):
+        root._searchengine = SearchEngine(root)
+        # XXX This will never garbage-collect -- who cares
+    return root._searchengine
+
+class SearchEngine:
+
+    def __init__(self, root):
+        self.root = root
+        # State shared by search, replace, and grep;
+        # the search dialogs bind these to UI elements.
+        self.patvar = StringVar(root)           # search pattern
+        self.revar = BooleanVar(root)           # regular expression?
+        self.casevar = BooleanVar(root)         # match case?
+        self.wordvar = BooleanVar(root)         # match whole word?
+        self.wrapvar = BooleanVar(root)         # wrap around buffer?
+        self.wrapvar.set(1)                     # (on by default)
+        self.backvar = BooleanVar(root)         # search backwards?
+
+    # Access methods
+
+    def getpat(self):
+        return self.patvar.get()
+
+    def setpat(self, pat):
+        self.patvar.set(pat)
+
+    def isre(self):
+        return self.revar.get()
+
+    def iscase(self):
+        return self.casevar.get()
+
+    def isword(self):
+        return self.wordvar.get()
+
+    def iswrap(self):
+        return self.wrapvar.get()
+
+    def isback(self):
+        return self.backvar.get()
+
+    # Higher level access methods
+
+    def getcookedpat(self):
+        pat = self.getpat()
+        if not self.isre():
+            pat = re.escape(pat)
+        if self.isword():
+            pat = r"\b%s\b" % pat
+        return pat
+
+    def getprog(self):
+        pat = self.getpat()
+        if not pat:
+            self.report_error(pat, "Empty regular expression")
+            return None
+        pat = self.getcookedpat()
+        flags = 0
+        if not self.iscase():
+            flags = flags | re.IGNORECASE
+        try:
+            prog = re.compile(pat, flags)
+        except re.error, what:
+            try:
+                msg, col = what
+            except:
+                msg = str(what)
+                col = -1
+            self.report_error(pat, msg, col)
+            return None
+        return prog
+
+    def report_error(self, pat, msg, col=-1):
+        # Derived class could overrid this with something fancier
+        msg = "Error: " + str(msg)
+        if pat:
+            msg = msg + "\np\Pattern: " + str(pat)
+        if col >= 0:
+            msg = msg + "\nOffset: " + str(col)
+        tkMessageBox.showerror("Regular expression error",
+                               msg, master=self.root)
+
+    def setcookedpat(self, pat):
+        if self.isre():
+            pat = re.escape(pat)
+        self.setpat(pat)
+
+    def search_text(self, text, prog=None, ok=0):
+        """Search a text widget for the pattern.
+
+        If prog is given, it should be the precompiled pattern.
+        Return a tuple (lineno, matchobj); None if not found.
+
+        This obeys the wrap and direction (back) settings.
+
+        The search starts at the selection (if there is one) or
+        at the insert mark (otherwise).  If the search is forward,
+        it starts at the right of the selection; for a backward
+        search, it starts at the left end.  An empty match exactly
+        at either end of the selection (or at the insert mark if
+        there is no selection) is ignored  unless the ok flag is true
+        -- this is done to guarantee progress.
+
+        If the search is allowed to wrap around, it will return the
+        original selection if (and only if) it is the only match.
+
+        """
+        if not prog:
+            prog = self.getprog()
+            if not prog:
+                return None # Compilation failed -- stop
+        wrap = self.wrapvar.get()
+        first, last = get_selection(text)
+        if self.isback():
+            if ok:
+                start = last
+            else:
+                start = first
+            line, col = get_line_col(start)
+            res = self.search_backward(text, prog, line, col, wrap, ok)
+        else:
+            if ok:
+                start = first
+            else:
+                start = last
+            line, col = get_line_col(start)
+            res = self.search_forward(text, prog, line, col, wrap, ok)
+        return res
+
+    def search_forward(self, text, prog, line, col, wrap, ok=0):
+        wrapped = 0
+        startline = line
+        chars = text.get("%d.0" % line, "%d.0" % (line+1))
+        while chars:
+            m = prog.search(chars[:-1], col)
+            if m:
+                if ok or m.end() > col:
+                    return line, m
+            line = line + 1
+            if wrapped and line > startline:
+                break
+            col = 0
+            ok = 1
+            chars = text.get("%d.0" % line, "%d.0" % (line+1))
+            if not chars and wrap:
+                wrapped = 1
+                wrap = 0
+                line = 1
+                chars = text.get("1.0", "2.0")
+        return None
+
+    def search_backward(self, text, prog, line, col, wrap, ok=0):
+        wrapped = 0
+        startline = line
+        chars = text.get("%d.0" % line, "%d.0" % (line+1))
+        while 1:
+            m = search_reverse(prog, chars[:-1], col)
+            if m:
+                if ok or m.start() < col:
+                    return line, m
+            line = line - 1
+            if wrapped and line < startline:
+                break
+            ok = 1
+            if line <= 0:
+                if not wrap:
+                    break
+                wrapped = 1
+                wrap = 0
+                pos = text.index("end-1c")
+                line, col = map(int, pos.split("."))
+            chars = text.get("%d.0" % line, "%d.0" % (line+1))
+            col = len(chars) - 1
+        return None
+
+# Helper to search backwards in a string.
+# (Optimized for the case where the pattern isn't found.)
+
+def search_reverse(prog, chars, col):
+    m = prog.search(chars)
+    if not m:
+        return None
+    found = None
+    i, j = m.span()
+    while i < col and j <= col:
+        found = m
+        if i == j:
+            j = j+1
+        m = prog.search(chars, j)
+        if not m:
+            break
+        i, j = m.span()
+    return found
+
+# Helper to get selection end points, defaulting to insert mark.
+# Return a tuple of indices ("line.col" strings).
+
+def get_selection(text):
+    try:
+        first = text.index("sel.first")
+        last = text.index("sel.last")
+    except TclError:
+        first = last = None
+    if not first:
+        first = text.index("insert")
+    if not last:
+        last = first
+    return first, last
+
+# Helper to parse a text index into a (line, col) tuple.
+
+def get_line_col(index):
+    line, col = map(int, index.split(".")) # Fails on invalid index
+    return line, col
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/StackViewer.py b/depot_tools/release/win/python_24/Lib/idlelib/StackViewer.py
new file mode 100644
index 0000000..6b7730bc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/StackViewer.py
@@ -0,0 +1,137 @@
+import os
+import sys
+import linecache
+
+from TreeWidget import TreeNode, TreeItem, ScrolledCanvas
+from ObjectBrowser import ObjectTreeItem, make_objecttreeitem
+
+def StackBrowser(root, flist=None, tb=None, top=None):
+    if top is None:
+        from Tkinter import Toplevel
+        top = Toplevel(root)
+    sc = ScrolledCanvas(top, bg="white", highlightthickness=0)
+    sc.frame.pack(expand=1, fill="both")
+    item = StackTreeItem(flist, tb)
+    node = TreeNode(sc.canvas, None, item)
+    node.expand()
+
+class StackTreeItem(TreeItem):
+
+    def __init__(self, flist=None, tb=None):
+        self.flist = flist
+        self.stack = self.get_stack(tb)
+        self.text = self.get_exception()
+
+    def get_stack(self, tb):
+        if tb is None:
+            tb = sys.last_traceback
+        stack = []
+        if tb and tb.tb_frame is None:
+            tb = tb.tb_next
+        while tb is not None:
+            stack.append((tb.tb_frame, tb.tb_lineno))
+            tb = tb.tb_next
+        return stack
+
+    def get_exception(self):
+        type = sys.last_type
+        value = sys.last_value
+        if hasattr(type, "__name__"):
+            type = type.__name__
+        s = str(type)
+        if value is not None:
+            s = s + ": " + str(value)
+        return s
+
+    def GetText(self):
+        return self.text
+
+    def GetSubList(self):
+        sublist = []
+        for info in self.stack:
+            item = FrameTreeItem(info, self.flist)
+            sublist.append(item)
+        return sublist
+
+class FrameTreeItem(TreeItem):
+
+    def __init__(self, info, flist):
+        self.info = info
+        self.flist = flist
+
+    def GetText(self):
+        frame, lineno = self.info
+        try:
+            modname = frame.f_globals["__name__"]
+        except:
+            modname = "?"
+        code = frame.f_code
+        filename = code.co_filename
+        funcname = code.co_name
+        sourceline = linecache.getline(filename, lineno)
+        sourceline = sourceline.strip()
+        if funcname in ("?", "", None):
+            item = "%s, line %d: %s" % (modname, lineno, sourceline)
+        else:
+            item = "%s.%s(...), line %d: %s" % (modname, funcname,
+                                             lineno, sourceline)
+        return item
+
+    def GetSubList(self):
+        frame, lineno = self.info
+        sublist = []
+        if frame.f_globals is not frame.f_locals:
+            item = VariablesTreeItem("<locals>", frame.f_locals, self.flist)
+            sublist.append(item)
+        item = VariablesTreeItem("<globals>", frame.f_globals, self.flist)
+        sublist.append(item)
+        return sublist
+
+    def OnDoubleClick(self):
+        if self.flist:
+            frame, lineno = self.info
+            filename = frame.f_code.co_filename
+            if os.path.isfile(filename):
+                self.flist.gotofileline(filename, lineno)
+
+class VariablesTreeItem(ObjectTreeItem):
+
+    def GetText(self):
+        return self.labeltext
+
+    def GetLabelText(self):
+        return None
+
+    def IsExpandable(self):
+        return len(self.object) > 0
+
+    def keys(self):
+        return self.object.keys()
+
+    def GetSubList(self):
+        sublist = []
+        for key in self.keys():
+            try:
+                value = self.object[key]
+            except KeyError:
+                continue
+            def setfunction(value, key=key, object=self.object):
+                object[key] = value
+            item = make_objecttreeitem(key + " =", value, setfunction)
+            sublist.append(item)
+        return sublist
+
+
+def _test():
+    try:
+        import testcode
+        reload(testcode)
+    except:
+        sys.last_type, sys.last_value, sys.last_traceback = sys.exc_info()
+    from Tkinter import Tk
+    root = Tk()
+    StackBrowser(None, top=root)
+    root.mainloop()
+
+if __name__ == "__main__":
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/TODO.txt b/depot_tools/release/win/python_24/Lib/idlelib/TODO.txt
new file mode 100644
index 0000000..e2f1ac0f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/TODO.txt
@@ -0,0 +1,210 @@
+Original IDLE todo, much of it now outdated:
+============================================
+TO DO:
+
+- improve debugger:
+    - manage breakpoints globally, allow bp deletion, tbreak, cbreak etc.
+    - real object browser
+    - help on how to use it (a simple help button will do wonders)
+    - performance?  (updates of large sets of locals are slow)
+    - better integration of "debug module"
+    - debugger should be global resource (attached to flist, not to shell)
+    - fix the stupid bug where you need to step twice
+    - display class name in stack viewer entries for methods
+    - suppress tracing through IDLE internals (e.g. print) DONE
+    - add a button to suppress through a specific module or class or method
+    - more object inspection to stack viewer, e.g. to view all array items
+- insert the initial current directory into sys.path DONE
+- default directory attribute for each window instead of only for windows
+  that have an associated filename
+- command expansion from keywords, module contents, other buffers, etc.
+- "Recent documents" menu item DONE
+- Filter region command
+- Optional horizontal scroll bar
+- more Emacsisms:
+    - ^K should cut to buffer
+    - M-[, M-] to move by paragraphs
+    - incremental search?
+- search should indicate wrap-around in some way
+- restructure state sensitive code to avoid testing flags all the time
+- persistent user state (e.g. window and cursor positions, bindings)
+- make backups when saving
+- check file mtimes at various points
+- Pluggable interface with RCS/CVS/Perforce/Clearcase
+- better help?
+- don't open second class browser on same module (nor second path browser)
+- unify class and path browsers
+- Need to define a standard way whereby one can determine one is running
+  inside IDLE (needed for Tk mainloop, also handy for $PYTHONSTARTUP)
+- Add more utility methods for use by extensions (a la get_selection)
+- Way to run command in totally separate interpreter (fork+os.system?) DONE
+- Way to find definition of fully-qualified name:
+  In other words, select "UserDict.UserDict", hit some magic key and
+  it loads up UserDict.py and finds the first def or class for UserDict.
+- need a way to force colorization on/off
+- need a way to force auto-indent on/off
+
+Details:
+
+- ^O (on Unix -- open-line) should honor autoindent
+- after paste, show end of pasted text
+- on Windows, should turn short filename to long filename (not only in argv!)
+  (shouldn't this be done -- or undone -- by ntpath.normpath?)
+- new autoindent after colon even indents when the colon is in a comment!
+- sometimes forward slashes in pathname remain
+- sometimes star in window name remains in Windows menu
+- With unix bindings, ESC by itself is ignored
+- Sometimes for no apparent reason a selection from the cursor to the
+  end of the command buffer appears, which is hard to get rid of
+  because it stays when you are typing!
+- The Line/Col in the status bar can be wrong initially in PyShell DONE
+
+Structural problems:
+
+- too much knowledge in FileList about EditorWindow (for example)
+- should add some primitives for accessing the selection etc.
+  to repeat cumbersome code over and over
+
+======================================================================
+
+Jeff Bauer suggests:
+
+- Open Module doesn't appear to handle hierarchical packages.
+- Class browser should also allow hierarchical packages.
+- Open and Open Module could benefit from a history, DONE
+  either command line style, or Microsoft recent-file
+  style.
+- Add a Smalltalk-style inspector  (i.e. Tkinspect)
+
+The last suggestion is already a reality, but not yet
+integrated into IDLE.  I use a module called inspector.py,
+that used to be available from python.org(?)  It no longer
+appears to be in the contributed section, and the source
+has no author attribution.
+
+In any case, the code is useful for visually navigating
+an object's attributes, including its container hierarchy.
+
+    >>> from inspector import Tkinspect
+    >>> Tkinspect(None, myObject)
+
+Tkinspect could probably be extended and refined to
+integrate better into IDLE.
+
+======================================================================
+
+Comparison to PTUI
+------------------
+
++ PTUI's help is better (HTML!)
+
++ PTUI can attach a shell to any module
+
++ PTUI has some more I/O commands:
+  open multiple
+  append
+  examine (what's that?)
+
+======================================================================
+
+Notes after trying to run Grail
+-------------------------------
+
+- Grail does stuff to sys.path based on sys.argv[0]; you must set
+sys.argv[0] to something decent first (it is normally set to the path of
+the idle script).
+
+- Grail must be exec'ed in __main__ because that's imported by some
+other parts of Grail.
+
+- Grail uses a module called History and so does idle :-(
+
+======================================================================
+
+Robin Friedrich's items:
+
+Things I'd like to see:
+    - I'd like support for shift-click extending the selection. There's a
+      bug now that it doesn't work the first time you try it.
+    - Printing is needed. How hard can that be on Windows? FIRST CUT DONE
+    - The python-mode trick of autoindenting a line with <tab> is neat and
+      very handy.
+    - (someday) a spellchecker for docstrings and comments.
+    - a pagedown/up command key which moves to next class/def statement (top
+      level)
+    - split window capability
+    - DnD text relocation/copying
+
+Things I don't want to see.
+    - line numbers...  will probably slow things down way too much.
+    - Please use another icon for the tree browser leaf. The small snake
+      isn't cutting it.
+
+----------------------------------------------------------------------
+
+- Customizable views (multi-window or multi-pane).  (Markus Gritsch)
+
+- Being able to double click (maybe double right click) on a callable
+object in the editor which shows the source of the object, if
+possible.  (Gerrit Holl)
+
+- Hooks into the guts, like in Emacs.  (Mike Romberg)
+
+- Sharing the editor with a remote tutor.  (Martijn Faassen)
+
+- Multiple views on the same file.  (Tony J Ibbs)
+
+- Store breakpoints in a global (per-project) database (GvR); Dirk
+Heise adds: save some space-trimmed context and search around when
+reopening a file that might have been edited by someone else.
+
+- Capture menu events in extensions without changing the IDLE source.
+(Matthias Barmeier)
+
+- Use overlapping panels (a "notebook" in MFC terms I think) for info
+that doesn't need to be accessible simultaneously (e.g. HTML source
+and output).  Use multi-pane windows for info that does need to be
+shown together (e.g. class browser and source).  (Albert Brandl)
+
+- A project should invisibly track all symbols, for instant search,
+replace and cross-ref.  Projects should be allowed to span multiple
+directories, hosts, etc.  Project management files are placed in a
+directory you specify.  A global mapping between project names and
+project directories should exist [not so sure --GvR].  (Tim Peters)
+
+- Merge attr-tips and auto-expand.  (Mark Hammond, Tim Peters)
+
+- Python Shell should behave more like a "shell window" as users know
+it -- i.e. you can only edit the current command, and the cursor can't
+escape from the command area.  (Albert Brandl)
+
+- Set X11 class to "idle/Idle", set icon and title to something
+beginning with "idle" -- for window manangers.  (Randall Hopper)
+
+- Config files editable through a preferences dialog.  (me) DONE
+
+- Config files still editable outside the preferences dialog.
+(Randall Hopper) DONE
+
+- When you're editing a command in PyShell, and there are only blank
+lines below the cursor, hitting Return should ignore or delete those
+blank lines rather than deciding you're not on the last line.  (me)
+
+- Run command (F5 c.s.) should be more like Pythonwin's Run -- a
+dialog with options to give command line arguments, run the debugger,
+etc.  (me)
+
+- Shouldn't be able to delete part of the prompt (or any text before
+it) in the PyShell.  (Martijn Faassen)   DONE
+
+- Emacs style auto-fill (also smart about comments and strings).
+(Jeremy Hylton)
+
+- Output of Run Script should go to a separate output window, not to
+the shell window.  Output of separate runs should all go to the same
+window but clearly delimited.  (David Scherer) REJECT FIRST, LATTER DONE
+
+- GUI form designer to kick VB's butt.  (Robert Geiger) THAT'S NOT IDLE
+
+- Printing!  Possibly via generation of PDF files which the user must
+then send to the printer separately.  (Dinu Gherman)  FIRST CUT
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ToolTip.py b/depot_tools/release/win/python_24/Lib/idlelib/ToolTip.py
new file mode 100644
index 0000000..a403804a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ToolTip.py
@@ -0,0 +1,89 @@
+# general purpose 'tooltip' routines - currently unused in idlefork
+# (although the 'calltips' extension is partly based on this code)
+# may be useful for some purposes in (or almost in ;) the current project scope
+# Ideas gleaned from PySol
+
+from Tkinter import *
+
+class ToolTipBase:
+
+    def __init__(self, button):
+        self.button = button
+        self.tipwindow = None
+        self.id = None
+        self.x = self.y = 0
+        self._id1 = self.button.bind("<Enter>", self.enter)
+        self._id2 = self.button.bind("<Leave>", self.leave)
+        self._id3 = self.button.bind("<ButtonPress>", self.leave)
+
+    def enter(self, event=None):
+        self.schedule()
+
+    def leave(self, event=None):
+        self.unschedule()
+        self.hidetip()
+
+    def schedule(self):
+        self.unschedule()
+        self.id = self.button.after(1500, self.showtip)
+
+    def unschedule(self):
+        id = self.id
+        self.id = None
+        if id:
+            self.button.after_cancel(id)
+
+    def showtip(self):
+        if self.tipwindow:
+            return
+        # The tip window must be completely outside the button;
+        # otherwise when the mouse enters the tip window we get
+        # a leave event and it disappears, and then we get an enter
+        # event and it reappears, and so on forever :-(
+        x = self.button.winfo_rootx() + 20
+        y = self.button.winfo_rooty() + self.button.winfo_height() + 1
+        self.tipwindow = tw = Toplevel(self.button)
+        tw.wm_overrideredirect(1)
+        tw.wm_geometry("+%d+%d" % (x, y))
+        self.showcontents()
+
+    def showcontents(self, text="Your text here"):
+        # Override this in derived class
+        label = Label(self.tipwindow, text=text, justify=LEFT,
+                      background="#ffffe0", relief=SOLID, borderwidth=1)
+        label.pack()
+
+    def hidetip(self):
+        tw = self.tipwindow
+        self.tipwindow = None
+        if tw:
+            tw.destroy()
+
+class ToolTip(ToolTipBase):
+    def __init__(self, button, text):
+        ToolTipBase.__init__(self, button)
+        self.text = text
+    def showcontents(self):
+        ToolTipBase.showcontents(self, self.text)
+
+class ListboxToolTip(ToolTipBase):
+    def __init__(self, button, items):
+        ToolTipBase.__init__(self, button)
+        self.items = items
+    def showcontents(self):
+        listbox = Listbox(self.tipwindow, background="#ffffe0")
+        listbox.pack()
+        for item in self.items:
+            listbox.insert(END, item)
+
+def main():
+    # Test code
+    root = Tk()
+    b = Button(root, text="Hello", command=root.destroy)
+    b.pack()
+    root.update()
+    tip = ListboxToolTip(b, ["Hello", "world"])
+
+    # root.mainloop() # not in idle
+
+main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/TreeWidget.py b/depot_tools/release/win/python_24/Lib/idlelib/TreeWidget.py
new file mode 100644
index 0000000..c5c171f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/TreeWidget.py
@@ -0,0 +1,478 @@
+# XXX TO DO:
+# - popup menu
+# - support partial or total redisplay
+# - key bindings (instead of quick-n-dirty bindings on Canvas):
+#   - up/down arrow keys to move focus around
+#   - ditto for page up/down, home/end
+#   - left/right arrows to expand/collapse & move out/in
+# - more doc strings
+# - add icons for "file", "module", "class", "method"; better "python" icon
+# - callback for selection???
+# - multiple-item selection
+# - tooltips
+# - redo geometry without magic numbers
+# - keep track of object ids to allow more careful cleaning
+# - optimize tree redraw after expand of subnode
+
+import os
+import sys
+from Tkinter import *
+import imp
+
+import ZoomHeight
+from configHandler import idleConf
+
+ICONDIR = "Icons"
+
+# Look for Icons subdirectory in the same directory as this module
+try:
+    _icondir = os.path.join(os.path.dirname(__file__), ICONDIR)
+except NameError:
+    _icondir = ICONDIR
+if os.path.isdir(_icondir):
+    ICONDIR = _icondir
+elif not os.path.isdir(ICONDIR):
+    raise RuntimeError, "can't find icon directory (%r)" % (ICONDIR,)
+
+def listicons(icondir=ICONDIR):
+    """Utility to display the available icons."""
+    root = Tk()
+    import glob
+    list = glob.glob(os.path.join(icondir, "*.gif"))
+    list.sort()
+    images = []
+    row = column = 0
+    for file in list:
+        name = os.path.splitext(os.path.basename(file))[0]
+        image = PhotoImage(file=file, master=root)
+        images.append(image)
+        label = Label(root, image=image, bd=1, relief="raised")
+        label.grid(row=row, column=column)
+        label = Label(root, text=name)
+        label.grid(row=row+1, column=column)
+        column = column + 1
+        if column >= 10:
+            row = row+2
+            column = 0
+    root.images = images
+
+
+class TreeNode:
+
+    def __init__(self, canvas, parent, item):
+        self.canvas = canvas
+        self.parent = parent
+        self.item = item
+        self.state = 'collapsed'
+        self.selected = False
+        self.children = []
+        self.x = self.y = None
+        self.iconimages = {} # cache of PhotoImage instances for icons
+
+    def destroy(self):
+        for c in self.children[:]:
+            self.children.remove(c)
+            c.destroy()
+        self.parent = None
+
+    def geticonimage(self, name):
+        try:
+            return self.iconimages[name]
+        except KeyError:
+            pass
+        file, ext = os.path.splitext(name)
+        ext = ext or ".gif"
+        fullname = os.path.join(ICONDIR, file + ext)
+        image = PhotoImage(master=self.canvas, file=fullname)
+        self.iconimages[name] = image
+        return image
+
+    def select(self, event=None):
+        if self.selected:
+            return
+        self.deselectall()
+        self.selected = True
+        self.canvas.delete(self.image_id)
+        self.drawicon()
+        self.drawtext()
+
+    def deselect(self, event=None):
+        if not self.selected:
+            return
+        self.selected = False
+        self.canvas.delete(self.image_id)
+        self.drawicon()
+        self.drawtext()
+
+    def deselectall(self):
+        if self.parent:
+            self.parent.deselectall()
+        else:
+            self.deselecttree()
+
+    def deselecttree(self):
+        if self.selected:
+            self.deselect()
+        for child in self.children:
+            child.deselecttree()
+
+    def flip(self, event=None):
+        if self.state == 'expanded':
+            self.collapse()
+        else:
+            self.expand()
+        self.item.OnDoubleClick()
+        return "break"
+
+    def expand(self, event=None):
+        if not self.item._IsExpandable():
+            return
+        if self.state != 'expanded':
+            self.state = 'expanded'
+            self.update()
+            self.view()
+
+    def collapse(self, event=None):
+        if self.state != 'collapsed':
+            self.state = 'collapsed'
+            self.update()
+
+    def view(self):
+        top = self.y - 2
+        bottom = self.lastvisiblechild().y + 17
+        height = bottom - top
+        visible_top = self.canvas.canvasy(0)
+        visible_height = self.canvas.winfo_height()
+        visible_bottom = self.canvas.canvasy(visible_height)
+        if visible_top <= top and bottom <= visible_bottom:
+            return
+        x0, y0, x1, y1 = self.canvas._getints(self.canvas['scrollregion'])
+        if top >= visible_top and height <= visible_height:
+            fraction = top + height - visible_height
+        else:
+            fraction = top
+        fraction = float(fraction) / y1
+        self.canvas.yview_moveto(fraction)
+
+    def lastvisiblechild(self):
+        if self.children and self.state == 'expanded':
+            return self.children[-1].lastvisiblechild()
+        else:
+            return self
+
+    def update(self):
+        if self.parent:
+            self.parent.update()
+        else:
+            oldcursor = self.canvas['cursor']
+            self.canvas['cursor'] = "watch"
+            self.canvas.update()
+            self.canvas.delete(ALL)     # XXX could be more subtle
+            self.draw(7, 2)
+            x0, y0, x1, y1 = self.canvas.bbox(ALL)
+            self.canvas.configure(scrollregion=(0, 0, x1, y1))
+            self.canvas['cursor'] = oldcursor
+
+    def draw(self, x, y):
+        # XXX This hard-codes too many geometry constants!
+        self.x, self.y = x, y
+        self.drawicon()
+        self.drawtext()
+        if self.state != 'expanded':
+            return y+17
+        # draw children
+        if not self.children:
+            sublist = self.item._GetSubList()
+            if not sublist:
+                # _IsExpandable() was mistaken; that's allowed
+                return y+17
+            for item in sublist:
+                child = self.__class__(self.canvas, self, item)
+                self.children.append(child)
+        cx = x+20
+        cy = y+17
+        cylast = 0
+        for child in self.children:
+            cylast = cy
+            self.canvas.create_line(x+9, cy+7, cx, cy+7, fill="gray50")
+            cy = child.draw(cx, cy)
+            if child.item._IsExpandable():
+                if child.state == 'expanded':
+                    iconname = "minusnode"
+                    callback = child.collapse
+                else:
+                    iconname = "plusnode"
+                    callback = child.expand
+                image = self.geticonimage(iconname)
+                id = self.canvas.create_image(x+9, cylast+7, image=image)
+                # XXX This leaks bindings until canvas is deleted:
+                self.canvas.tag_bind(id, "<1>", callback)
+                self.canvas.tag_bind(id, "<Double-1>", lambda x: None)
+        id = self.canvas.create_line(x+9, y+10, x+9, cylast+7,
+            ##stipple="gray50",     # XXX Seems broken in Tk 8.0.x
+            fill="gray50")
+        self.canvas.tag_lower(id) # XXX .lower(id) before Python 1.5.2
+        return cy
+
+    def drawicon(self):
+        if self.selected:
+            imagename = (self.item.GetSelectedIconName() or
+                         self.item.GetIconName() or
+                         "openfolder")
+        else:
+            imagename = self.item.GetIconName() or "folder"
+        image = self.geticonimage(imagename)
+        id = self.canvas.create_image(self.x, self.y, anchor="nw", image=image)
+        self.image_id = id
+        self.canvas.tag_bind(id, "<1>", self.select)
+        self.canvas.tag_bind(id, "<Double-1>", self.flip)
+
+    def drawtext(self):
+        textx = self.x+20-1
+        texty = self.y-1
+        labeltext = self.item.GetLabelText()
+        if labeltext:
+            id = self.canvas.create_text(textx, texty, anchor="nw",
+                                         text=labeltext)
+            self.canvas.tag_bind(id, "<1>", self.select)
+            self.canvas.tag_bind(id, "<Double-1>", self.flip)
+            x0, y0, x1, y1 = self.canvas.bbox(id)
+            textx = max(x1, 200) + 10
+        text = self.item.GetText() or "<no text>"
+        try:
+            self.entry
+        except AttributeError:
+            pass
+        else:
+            self.edit_finish()
+        try:
+            label = self.label
+        except AttributeError:
+            # padding carefully selected (on Windows) to match Entry widget:
+            self.label = Label(self.canvas, text=text, bd=0, padx=2, pady=2)
+        theme = idleConf.GetOption('main','Theme','name')
+        if self.selected:
+            self.label.configure(idleConf.GetHighlight(theme, 'hilite'))
+        else:
+            self.label.configure(idleConf.GetHighlight(theme, 'normal'))
+        id = self.canvas.create_window(textx, texty,
+                                       anchor="nw", window=self.label)
+        self.label.bind("<1>", self.select_or_edit)
+        self.label.bind("<Double-1>", self.flip)
+        self.text_id = id
+
+    def select_or_edit(self, event=None):
+        if self.selected and self.item.IsEditable():
+            self.edit(event)
+        else:
+            self.select(event)
+
+    def edit(self, event=None):
+        self.entry = Entry(self.label, bd=0, highlightthickness=1, width=0)
+        self.entry.insert(0, self.label['text'])
+        self.entry.selection_range(0, END)
+        self.entry.pack(ipadx=5)
+        self.entry.focus_set()
+        self.entry.bind("<Return>", self.edit_finish)
+        self.entry.bind("<Escape>", self.edit_cancel)
+
+    def edit_finish(self, event=None):
+        try:
+            entry = self.entry
+            del self.entry
+        except AttributeError:
+            return
+        text = entry.get()
+        entry.destroy()
+        if text and text != self.item.GetText():
+            self.item.SetText(text)
+        text = self.item.GetText()
+        self.label['text'] = text
+        self.drawtext()
+        self.canvas.focus_set()
+
+    def edit_cancel(self, event=None):
+        try:
+            entry = self.entry
+            del self.entry
+        except AttributeError:
+            return
+        entry.destroy()
+        self.drawtext()
+        self.canvas.focus_set()
+
+
+class TreeItem:
+
+    """Abstract class representing tree items.
+
+    Methods should typically be overridden, otherwise a default action
+    is used.
+
+    """
+
+    def __init__(self):
+        """Constructor.  Do whatever you need to do."""
+
+    def GetText(self):
+        """Return text string to display."""
+
+    def GetLabelText(self):
+        """Return label text string to display in front of text (if any)."""
+
+    expandable = None
+
+    def _IsExpandable(self):
+        """Do not override!  Called by TreeNode."""
+        if self.expandable is None:
+            self.expandable = self.IsExpandable()
+        return self.expandable
+
+    def IsExpandable(self):
+        """Return whether there are subitems."""
+        return 1
+
+    def _GetSubList(self):
+        """Do not override!  Called by TreeNode."""
+        if not self.IsExpandable():
+            return []
+        sublist = self.GetSubList()
+        if not sublist:
+            self.expandable = 0
+        return sublist
+
+    def IsEditable(self):
+        """Return whether the item's text may be edited."""
+
+    def SetText(self, text):
+        """Change the item's text (if it is editable)."""
+
+    def GetIconName(self):
+        """Return name of icon to be displayed normally."""
+
+    def GetSelectedIconName(self):
+        """Return name of icon to be displayed when selected."""
+
+    def GetSubList(self):
+        """Return list of items forming sublist."""
+
+    def OnDoubleClick(self):
+        """Called on a double-click on the item."""
+
+
+# Example application
+
+class FileTreeItem(TreeItem):
+
+    """Example TreeItem subclass -- browse the file system."""
+
+    def __init__(self, path):
+        self.path = path
+
+    def GetText(self):
+        return os.path.basename(self.path) or self.path
+
+    def IsEditable(self):
+        return os.path.basename(self.path) != ""
+
+    def SetText(self, text):
+        newpath = os.path.dirname(self.path)
+        newpath = os.path.join(newpath, text)
+        if os.path.dirname(newpath) != os.path.dirname(self.path):
+            return
+        try:
+            os.rename(self.path, newpath)
+            self.path = newpath
+        except os.error:
+            pass
+
+    def GetIconName(self):
+        if not self.IsExpandable():
+            return "python" # XXX wish there was a "file" icon
+
+    def IsExpandable(self):
+        return os.path.isdir(self.path)
+
+    def GetSubList(self):
+        try:
+            names = os.listdir(self.path)
+        except os.error:
+            return []
+        names.sort(lambda a, b: cmp(os.path.normcase(a), os.path.normcase(b)))
+        sublist = []
+        for name in names:
+            item = FileTreeItem(os.path.join(self.path, name))
+            sublist.append(item)
+        return sublist
+
+
+# A canvas widget with scroll bars and some useful bindings
+
+class ScrolledCanvas:
+    def __init__(self, master, **opts):
+        if not opts.has_key('yscrollincrement'):
+            opts['yscrollincrement'] = 17
+        self.master = master
+        self.frame = Frame(master)
+        self.frame.rowconfigure(0, weight=1)
+        self.frame.columnconfigure(0, weight=1)
+        self.canvas = Canvas(self.frame, **opts)
+        self.canvas.grid(row=0, column=0, sticky="nsew")
+        self.vbar = Scrollbar(self.frame, name="vbar")
+        self.vbar.grid(row=0, column=1, sticky="nse")
+        self.hbar = Scrollbar(self.frame, name="hbar", orient="horizontal")
+        self.hbar.grid(row=1, column=0, sticky="ews")
+        self.canvas['yscrollcommand'] = self.vbar.set
+        self.vbar['command'] = self.canvas.yview
+        self.canvas['xscrollcommand'] = self.hbar.set
+        self.hbar['command'] = self.canvas.xview
+        self.canvas.bind("<Key-Prior>", self.page_up)
+        self.canvas.bind("<Key-Next>", self.page_down)
+        self.canvas.bind("<Key-Up>", self.unit_up)
+        self.canvas.bind("<Key-Down>", self.unit_down)
+        #if isinstance(master, Toplevel) or isinstance(master, Tk):
+        self.canvas.bind("<Alt-Key-2>", self.zoom_height)
+        self.canvas.focus_set()
+    def page_up(self, event):
+        self.canvas.yview_scroll(-1, "page")
+        return "break"
+    def page_down(self, event):
+        self.canvas.yview_scroll(1, "page")
+        return "break"
+    def unit_up(self, event):
+        self.canvas.yview_scroll(-1, "unit")
+        return "break"
+    def unit_down(self, event):
+        self.canvas.yview_scroll(1, "unit")
+        return "break"
+    def zoom_height(self, event):
+        ZoomHeight.zoom_height(self.master)
+        return "break"
+
+
+# Testing functions
+
+def test():
+    import PyShell
+    root = Toplevel(PyShell.root)
+    root.configure(bd=0, bg="yellow")
+    root.focus_set()
+    sc = ScrolledCanvas(root, bg="white", highlightthickness=0, takefocus=1)
+    sc.frame.pack(expand=1, fill="both")
+    item = FileTreeItem("C:/windows/desktop")
+    node = TreeNode(sc.canvas, None, item)
+    node.expand()
+
+def test2():
+    # test w/o scrolling canvas
+    root = Tk()
+    root.configure(bd=0)
+    canvas = Canvas(root, bg="white", highlightthickness=0)
+    canvas.pack(expand=1, fill="both")
+    item = FileTreeItem(os.curdir)
+    node = TreeNode(canvas, None, item)
+    node.update()
+    canvas.focus_set()
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/UndoDelegator.py b/depot_tools/release/win/python_24/Lib/idlelib/UndoDelegator.py
new file mode 100644
index 0000000..182a117
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/UndoDelegator.py
@@ -0,0 +1,352 @@
+import sys
+import string
+from Tkinter import *
+from Delegator import Delegator
+
+#$ event <<redo>>
+#$ win <Control-y>
+#$ unix <Alt-z>
+
+#$ event <<undo>>
+#$ win <Control-z>
+#$ unix <Control-z>
+
+#$ event <<dump-undo-state>>
+#$ win <Control-backslash>
+#$ unix <Control-backslash>
+
+
+class UndoDelegator(Delegator):
+
+    max_undo = 1000
+
+    def __init__(self):
+        Delegator.__init__(self)
+        self.reset_undo()
+
+    def setdelegate(self, delegate):
+        if self.delegate is not None:
+            self.unbind("<<undo>>")
+            self.unbind("<<redo>>")
+            self.unbind("<<dump-undo-state>>")
+        Delegator.setdelegate(self, delegate)
+        if delegate is not None:
+            self.bind("<<undo>>", self.undo_event)
+            self.bind("<<redo>>", self.redo_event)
+            self.bind("<<dump-undo-state>>", self.dump_event)
+
+    def dump_event(self, event):
+        from pprint import pprint
+        pprint(self.undolist[:self.pointer])
+        print "pointer:", self.pointer,
+        print "saved:", self.saved,
+        print "can_merge:", self.can_merge,
+        print "get_saved():", self.get_saved()
+        pprint(self.undolist[self.pointer:])
+        return "break"
+
+    def reset_undo(self):
+        self.was_saved = -1
+        self.pointer = 0
+        self.undolist = []
+        self.undoblock = 0  # or a CommandSequence instance
+        self.set_saved(1)
+
+    def set_saved(self, flag):
+        if flag:
+            self.saved = self.pointer
+        else:
+            self.saved = -1
+        self.can_merge = False
+        self.check_saved()
+
+    def get_saved(self):
+        return self.saved == self.pointer
+
+    saved_change_hook = None
+
+    def set_saved_change_hook(self, hook):
+        self.saved_change_hook = hook
+
+    was_saved = -1
+
+    def check_saved(self):
+        is_saved = self.get_saved()
+        if is_saved != self.was_saved:
+            self.was_saved = is_saved
+            if self.saved_change_hook:
+                self.saved_change_hook()
+
+    def insert(self, index, chars, tags=None):
+        self.addcmd(InsertCommand(index, chars, tags))
+
+    def delete(self, index1, index2=None):
+        self.addcmd(DeleteCommand(index1, index2))
+
+    # Clients should call undo_block_start() and undo_block_stop()
+    # around a sequence of editing cmds to be treated as a unit by
+    # undo & redo.  Nested matching calls are OK, and the inner calls
+    # then act like nops.  OK too if no editing cmds, or only one
+    # editing cmd, is issued in between:  if no cmds, the whole
+    # sequence has no effect; and if only one cmd, that cmd is entered
+    # directly into the undo list, as if undo_block_xxx hadn't been
+    # called.  The intent of all that is to make this scheme easy
+    # to use:  all the client has to worry about is making sure each
+    # _start() call is matched by a _stop() call.
+
+    def undo_block_start(self):
+        if self.undoblock == 0:
+            self.undoblock = CommandSequence()
+        self.undoblock.bump_depth()
+
+    def undo_block_stop(self):
+        if self.undoblock.bump_depth(-1) == 0:
+            cmd = self.undoblock
+            self.undoblock = 0
+            if len(cmd) > 0:
+                if len(cmd) == 1:
+                    # no need to wrap a single cmd
+                    cmd = cmd.getcmd(0)
+                # this blk of cmds, or single cmd, has already
+                # been done, so don't execute it again
+                self.addcmd(cmd, 0)
+
+    def addcmd(self, cmd, execute=True):
+        if execute:
+            cmd.do(self.delegate)
+        if self.undoblock != 0:
+            self.undoblock.append(cmd)
+            return
+        if self.can_merge and self.pointer > 0:
+            lastcmd = self.undolist[self.pointer-1]
+            if lastcmd.merge(cmd):
+                return
+        self.undolist[self.pointer:] = [cmd]
+        if self.saved > self.pointer:
+            self.saved = -1
+        self.pointer = self.pointer + 1
+        if len(self.undolist) > self.max_undo:
+            ##print "truncating undo list"
+            del self.undolist[0]
+            self.pointer = self.pointer - 1
+            if self.saved >= 0:
+                self.saved = self.saved - 1
+        self.can_merge = True
+        self.check_saved()
+
+    def undo_event(self, event):
+        if self.pointer == 0:
+            self.bell()
+            return "break"
+        cmd = self.undolist[self.pointer - 1]
+        cmd.undo(self.delegate)
+        self.pointer = self.pointer - 1
+        self.can_merge = False
+        self.check_saved()
+        return "break"
+
+    def redo_event(self, event):
+        if self.pointer >= len(self.undolist):
+            self.bell()
+            return "break"
+        cmd = self.undolist[self.pointer]
+        cmd.redo(self.delegate)
+        self.pointer = self.pointer + 1
+        self.can_merge = False
+        self.check_saved()
+        return "break"
+
+
+class Command:
+
+    # Base class for Undoable commands
+
+    tags = None
+
+    def __init__(self, index1, index2, chars, tags=None):
+        self.marks_before = {}
+        self.marks_after = {}
+        self.index1 = index1
+        self.index2 = index2
+        self.chars = chars
+        if tags:
+            self.tags = tags
+
+    def __repr__(self):
+        s = self.__class__.__name__
+        t = (self.index1, self.index2, self.chars, self.tags)
+        if self.tags is None:
+            t = t[:-1]
+        return s + repr(t)
+
+    def do(self, text):
+        pass
+
+    def redo(self, text):
+        pass
+
+    def undo(self, text):
+        pass
+
+    def merge(self, cmd):
+        return 0
+
+    def save_marks(self, text):
+        marks = {}
+        for name in text.mark_names():
+            if name != "insert" and name != "current":
+                marks[name] = text.index(name)
+        return marks
+
+    def set_marks(self, text, marks):
+        for name, index in marks.items():
+            text.mark_set(name, index)
+
+
+class InsertCommand(Command):
+
+    # Undoable insert command
+
+    def __init__(self, index1, chars, tags=None):
+        Command.__init__(self, index1, None, chars, tags)
+
+    def do(self, text):
+        self.marks_before = self.save_marks(text)
+        self.index1 = text.index(self.index1)
+        if text.compare(self.index1, ">", "end-1c"):
+            # Insert before the final newline
+            self.index1 = text.index("end-1c")
+        text.insert(self.index1, self.chars, self.tags)
+        self.index2 = text.index("%s+%dc" % (self.index1, len(self.chars)))
+        self.marks_after = self.save_marks(text)
+        ##sys.__stderr__.write("do: %s\n" % self)
+
+    def redo(self, text):
+        text.mark_set('insert', self.index1)
+        text.insert(self.index1, self.chars, self.tags)
+        self.set_marks(text, self.marks_after)
+        text.see('insert')
+        ##sys.__stderr__.write("redo: %s\n" % self)
+
+    def undo(self, text):
+        text.mark_set('insert', self.index1)
+        text.delete(self.index1, self.index2)
+        self.set_marks(text, self.marks_before)
+        text.see('insert')
+        ##sys.__stderr__.write("undo: %s\n" % self)
+
+    def merge(self, cmd):
+        if self.__class__ is not cmd.__class__:
+            return False
+        if self.index2 != cmd.index1:
+            return False
+        if self.tags != cmd.tags:
+            return False
+        if len(cmd.chars) != 1:
+            return False
+        if self.chars and \
+           self.classify(self.chars[-1]) != self.classify(cmd.chars):
+            return False
+        self.index2 = cmd.index2
+        self.chars = self.chars + cmd.chars
+        return True
+
+    alphanumeric = string.ascii_letters + string.digits + "_"
+
+    def classify(self, c):
+        if c in self.alphanumeric:
+            return "alphanumeric"
+        if c == "\n":
+            return "newline"
+        return "punctuation"
+
+
+class DeleteCommand(Command):
+
+    # Undoable delete command
+
+    def __init__(self, index1, index2=None):
+        Command.__init__(self, index1, index2, None, None)
+
+    def do(self, text):
+        self.marks_before = self.save_marks(text)
+        self.index1 = text.index(self.index1)
+        if self.index2:
+            self.index2 = text.index(self.index2)
+        else:
+            self.index2 = text.index(self.index1 + " +1c")
+        if text.compare(self.index2, ">", "end-1c"):
+            # Don't delete the final newline
+            self.index2 = text.index("end-1c")
+        self.chars = text.get(self.index1, self.index2)
+        text.delete(self.index1, self.index2)
+        self.marks_after = self.save_marks(text)
+        ##sys.__stderr__.write("do: %s\n" % self)
+
+    def redo(self, text):
+        text.mark_set('insert', self.index1)
+        text.delete(self.index1, self.index2)
+        self.set_marks(text, self.marks_after)
+        text.see('insert')
+        ##sys.__stderr__.write("redo: %s\n" % self)
+
+    def undo(self, text):
+        text.mark_set('insert', self.index1)
+        text.insert(self.index1, self.chars)
+        self.set_marks(text, self.marks_before)
+        text.see('insert')
+        ##sys.__stderr__.write("undo: %s\n" % self)
+
+class CommandSequence(Command):
+
+    # Wrapper for a sequence of undoable cmds to be undone/redone
+    # as a unit
+
+    def __init__(self):
+        self.cmds = []
+        self.depth = 0
+
+    def __repr__(self):
+        s = self.__class__.__name__
+        strs = []
+        for cmd in self.cmds:
+            strs.append("    %r" % (cmd,))
+        return s + "(\n" + ",\n".join(strs) + "\n)"
+
+    def __len__(self):
+        return len(self.cmds)
+
+    def append(self, cmd):
+        self.cmds.append(cmd)
+
+    def getcmd(self, i):
+        return self.cmds[i]
+
+    def redo(self, text):
+        for cmd in self.cmds:
+            cmd.redo(text)
+
+    def undo(self, text):
+        cmds = self.cmds[:]
+        cmds.reverse()
+        for cmd in cmds:
+            cmd.undo(text)
+
+    def bump_depth(self, incr=1):
+        self.depth = self.depth + incr
+        return self.depth
+
+def main():
+    from Percolator import Percolator
+    root = Tk()
+    root.wm_protocol("WM_DELETE_WINDOW", root.quit)
+    text = Text()
+    text.pack()
+    text.focus_set()
+    p = Percolator(text)
+    d = UndoDelegator()
+    p.insertfilter(d)
+    root.mainloop()
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/WidgetRedirector.py b/depot_tools/release/win/python_24/Lib/idlelib/WidgetRedirector.py
new file mode 100644
index 0000000..df60cea
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/WidgetRedirector.py
@@ -0,0 +1,92 @@
+from Tkinter import *
+
+
+class WidgetRedirector:
+
+    """Support for redirecting arbitrary widget subcommands."""
+
+    def __init__(self, widget):
+        self.dict = {}
+        self.widget = widget
+        self.tk = tk = widget.tk
+        w = widget._w
+        self.orig = w + "_orig"
+        tk.call("rename", w, self.orig)
+        tk.createcommand(w, self.dispatch)
+
+    def __repr__(self):
+        return "WidgetRedirector(%s<%s>)" % (self.widget.__class__.__name__,
+                                             self.widget._w)
+
+    def close(self):
+        for name in self.dict.keys():
+            self.unregister(name)
+        widget = self.widget; del self.widget
+        orig = self.orig; del self.orig
+        tk = widget.tk
+        w = widget._w
+        tk.deletecommand(w)
+        tk.call("rename", orig, w)
+
+    def register(self, name, function):
+        if self.dict.has_key(name):
+            previous = dict[name]
+        else:
+            previous = OriginalCommand(self, name)
+        self.dict[name] = function
+        setattr(self.widget, name, function)
+        return previous
+
+    def unregister(self, name):
+        if self.dict.has_key(name):
+            function = self.dict[name]
+            del self.dict[name]
+            if hasattr(self.widget, name):
+                delattr(self.widget, name)
+            return function
+        else:
+            return None
+
+    def dispatch(self, cmd, *args):
+        m = self.dict.get(cmd)
+        try:
+            if m:
+                return m(*args)
+            else:
+                return self.tk.call((self.orig, cmd) + args)
+        except TclError:
+            return ""
+
+
+class OriginalCommand:
+
+    def __init__(self, redir, name):
+        self.redir = redir
+        self.name = name
+        self.tk = redir.tk
+        self.orig = redir.orig
+        self.tk_call = self.tk.call
+        self.orig_and_name = (self.orig, self.name)
+
+    def __repr__(self):
+        return "OriginalCommand(%r, %r)" % (self.redir, self.name)
+
+    def __call__(self, *args):
+        return self.tk_call(self.orig_and_name + args)
+
+
+def main():
+    root = Tk()
+    text = Text()
+    text.pack()
+    text.focus_set()
+    redir = WidgetRedirector(text)
+    global orig_insert
+    def my_insert(*args):
+        print "insert", args
+        orig_insert(*args)
+    orig_insert = redir.register("insert", my_insert)
+    root.mainloop()
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/WindowList.py b/depot_tools/release/win/python_24/Lib/idlelib/WindowList.py
new file mode 100644
index 0000000..658502b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/WindowList.py
@@ -0,0 +1,90 @@
+from Tkinter import *
+
+class WindowList:
+
+    def __init__(self):
+        self.dict = {}
+        self.callbacks = []
+
+    def add(self, window):
+        window.after_idle(self.call_callbacks)
+        self.dict[str(window)] = window
+
+    def delete(self, window):
+        try:
+            del self.dict[str(window)]
+        except KeyError:
+            # Sometimes, destroy() is called twice
+            pass
+        self.call_callbacks()
+
+    def add_windows_to_menu(self,  menu):
+        list = []
+        for key in self.dict.keys():
+            window = self.dict[key]
+            try:
+                title = window.get_title()
+            except TclError:
+                continue
+            list.append((title, window))
+        list.sort()
+        for title, window in list:
+            menu.add_command(label=title, command=window.wakeup)
+
+    def register_callback(self, callback):
+        self.callbacks.append(callback)
+
+    def unregister_callback(self, callback):
+        try:
+            self.callbacks.remove(callback)
+        except ValueError:
+            pass
+
+    def call_callbacks(self):
+        for callback in self.callbacks:
+            try:
+                callback()
+            except:
+                print "warning: callback failed in WindowList", \
+                      sys.exc_type, ":", sys.exc_value
+
+registry = WindowList()
+
+add_windows_to_menu = registry.add_windows_to_menu
+register_callback = registry.register_callback
+unregister_callback = registry.unregister_callback
+
+
+class ListedToplevel(Toplevel):
+
+    def __init__(self, master, **kw):
+        Toplevel.__init__(self, master, kw)
+        registry.add(self)
+        self.focused_widget = self
+
+    def destroy(self):
+        registry.delete(self)
+        Toplevel.destroy(self)
+        # If this is Idle's last window then quit the mainloop
+        # (Needed for clean exit on Windows 98)
+        if not registry.dict:
+            self.quit()
+
+    def update_windowlist_registry(self, window):
+        registry.call_callbacks()
+
+    def get_title(self):
+        # Subclass can override
+        return self.wm_title()
+
+    def wakeup(self):
+        try:
+            if self.wm_state() == "iconic":
+                self.wm_withdraw()
+                self.wm_deiconify()
+            self.tkraise()
+            self.focused_widget.focus_set()
+        except TclError:
+            # This can happen when the window menu was torn off.
+            # Simply ignore it.
+            pass
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/ZoomHeight.py b/depot_tools/release/win/python_24/Lib/idlelib/ZoomHeight.py
new file mode 100644
index 0000000..2ab4656
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/ZoomHeight.py
@@ -0,0 +1,41 @@
+# Sample extension: zoom a window to maximum height
+
+import re
+import sys
+
+class ZoomHeight:
+
+    menudefs = [
+        ('windows', [
+            ('_Zoom Height', '<<zoom-height>>'),
+         ])
+    ]
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+
+    def zoom_height_event(self, event):
+        top = self.editwin.top
+        zoom_height(top)
+
+def zoom_height(top):
+    geom = top.wm_geometry()
+    m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom)
+    if not m:
+        top.bell()
+        return
+    width, height, x, y = map(int, m.groups())
+    newheight = top.winfo_screenheight()
+    if sys.platform == 'win32':
+        newy = 0
+        newheight = newheight - 72
+    else:
+        #newy = 24
+        newy = 0
+        #newheight = newheight - 96
+        newheight = newheight - 88
+    if height >= newheight:
+        newgeom = ""
+    else:
+        newgeom = "%dx%d+%d+%d" % (width, newheight, x, newy)
+    top.wm_geometry(newgeom)
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/__init__.py b/depot_tools/release/win/python_24/Lib/idlelib/__init__.py
new file mode 100644
index 0000000..7a83dde
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/__init__.py
@@ -0,0 +1 @@
+# Dummy file to make this a package.
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/aboutDialog.py b/depot_tools/release/win/python_24/Lib/idlelib/aboutDialog.py
new file mode 100644
index 0000000..c1210612
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/aboutDialog.py
@@ -0,0 +1,163 @@
+"""About Dialog for IDLE
+
+"""
+
+from Tkinter import *
+import string, os
+import textView
+import idlever
+
+class AboutDialog(Toplevel):
+    """Modal about dialog for idle
+
+    """
+    def __init__(self,parent,title):
+        Toplevel.__init__(self, parent)
+        self.configure(borderwidth=5)
+        self.geometry("+%d+%d" % (parent.winfo_rootx()+30,
+                                  parent.winfo_rooty()+30))
+        self.bg = "#707070"
+        self.fg = "#ffffff"
+        self.CreateWidgets()
+        self.resizable(height=FALSE, width=FALSE)
+        self.title(title)
+        self.transient(parent)
+        self.grab_set()
+        self.protocol("WM_DELETE_WINDOW", self.Ok)
+        self.parent = parent
+        self.buttonOk.focus_set()
+        self.bind('<Return>',self.Ok) #dismiss dialog
+        self.bind('<Escape>',self.Ok) #dismiss dialog
+        self.wait_window()
+
+    def CreateWidgets(self):
+        frameMain = Frame(self, borderwidth=2, relief=SUNKEN)
+        frameButtons = Frame(self)
+        frameButtons.pack(side=BOTTOM, fill=X)
+        frameMain.pack(side=TOP, expand=TRUE, fill=BOTH)
+        self.buttonOk = Button(frameButtons, text='Close',
+                               command=self.Ok)
+        self.buttonOk.pack(padx=5, pady=5)
+        #self.picture = Image('photo', data=self.pictureData)
+        frameBg = Frame(frameMain, bg=self.bg)
+        frameBg.pack(expand=TRUE, fill=BOTH)
+        labelTitle = Label(frameBg, text='IDLE', fg=self.fg, bg=self.bg,
+                           font=('courier', 24, 'bold'))
+        labelTitle.grid(row=0, column=0, sticky=W, padx=10, pady=10)
+        #labelPicture = Label(frameBg, text='[picture]')
+        #image=self.picture, bg=self.bg)
+        #labelPicture.grid(row=1, column=1, sticky=W, rowspan=2,
+        #                  padx=0, pady=3)
+        byline = "Python's Integrated DeveLopment Environment" + 5*'\n'
+        labelDesc = Label(frameBg, text=byline, justify=LEFT,
+                          fg=self.fg, bg=self.bg)
+        labelDesc.grid(row=2, column=0, sticky=W, columnspan=3, padx=10, pady=5)
+        labelEmail = Label(frameBg, text='email:  idle-dev@python.org',
+                           justify=LEFT, fg=self.fg, bg=self.bg)
+        labelEmail.grid(row=6, column=0, columnspan=2,
+                        sticky=W, padx=10, pady=0)
+        labelWWW = Label(frameBg, text='www:  http://www.python.org/idle/',
+                         justify=LEFT, fg=self.fg, bg=self.bg)
+        labelWWW.grid(row=7, column=0, columnspan=2, sticky=W, padx=10, pady=0)
+        Frame(frameBg, borderwidth=1, relief=SUNKEN,
+              height=2, bg=self.bg).grid(row=8, column=0, sticky=EW,
+                                         columnspan=3, padx=5, pady=5)
+        labelPythonVer = Label(frameBg, text='Python version:  ' + \
+                               sys.version.split()[0], fg=self.fg, bg=self.bg)
+        labelPythonVer.grid(row=9, column=0, sticky=W, padx=10, pady=0)
+        # handle weird tk version num in windoze python >= 1.6 (?!?)
+        tkVer = repr(TkVersion).split('.')
+        tkVer[len(tkVer)-1] = str('%.3g' % (float('.'+tkVer[len(tkVer)-1])))[2:]
+        if tkVer[len(tkVer)-1] == '':
+            tkVer[len(tkVer)-1] = '0'
+        tkVer = string.join(tkVer,'.')
+        labelTkVer = Label(frameBg, text='Tk version:  '+
+                           tkVer, fg=self.fg, bg=self.bg)
+        labelTkVer.grid(row=9, column=1, sticky=W, padx=2, pady=0)
+        py_button_f = Frame(frameBg, bg=self.bg)
+        py_button_f.grid(row=10, column=0, columnspan=2, sticky=NSEW)
+        buttonLicense = Button(py_button_f, text='License', width=8,
+                               highlightbackground=self.bg,
+                               command=self.ShowLicense)
+        buttonLicense.pack(side=LEFT, padx=10, pady=10)
+        buttonCopyright = Button(py_button_f, text='Copyright', width=8,
+                                 highlightbackground=self.bg,
+                                 command=self.ShowCopyright)
+        buttonCopyright.pack(side=LEFT, padx=10, pady=10)
+        buttonCredits = Button(py_button_f, text='Credits', width=8,
+                               highlightbackground=self.bg,
+                               command=self.ShowPythonCredits)
+        buttonCredits.pack(side=LEFT, padx=10, pady=10)
+        Frame(frameBg, borderwidth=1, relief=SUNKEN,
+              height=2, bg=self.bg).grid(row=11, column=0, sticky=EW,
+                                         columnspan=3, padx=5, pady=5)
+        idle_v = Label(frameBg, text='IDLE version:   ' + idlever.IDLE_VERSION,
+                       fg=self.fg, bg=self.bg)
+        idle_v.grid(row=12, column=0, sticky=W, padx=10, pady=0)
+        idle_button_f = Frame(frameBg, bg=self.bg)
+        idle_button_f.grid(row=13, column=0, columnspan=3, sticky=NSEW)
+        idle_about_b = Button(idle_button_f, text='README', width=8,
+                                highlightbackground=self.bg,
+                                command=self.ShowIDLEAbout)
+        idle_about_b.pack(side=LEFT, padx=10, pady=10)
+        idle_news_b = Button(idle_button_f, text='NEWS', width=8,
+                                highlightbackground=self.bg,
+                                command=self.ShowIDLENEWS)
+        idle_news_b.pack(side=LEFT, padx=10, pady=10)
+        idle_credits_b = Button(idle_button_f, text='Credits', width=8,
+                                highlightbackground=self.bg,
+                                command=self.ShowIDLECredits)
+        idle_credits_b.pack(side=LEFT, padx=10, pady=10)
+
+    def ShowLicense(self):
+        self.display_printer_text(license, 'About - License')
+
+    def ShowCopyright(self):
+        self.display_printer_text(copyright, 'About - Copyright')
+
+    def ShowPythonCredits(self):
+        self.display_printer_text(credits, 'About - Python Credits')
+
+    def ShowIDLECredits(self):
+        self.ViewFile('About - Credits','CREDITS.txt', 'iso-8859-1')
+
+    def ShowIDLEAbout(self):
+        self.ViewFile('About - Readme', 'README.txt')
+
+    def ShowIDLENEWS(self):
+        self.ViewFile('About - NEWS', 'NEWS.txt')
+
+    def display_printer_text(self, printer, title):
+        printer._Printer__setup()
+        data = '\n'.join(printer._Printer__lines)
+        textView.TextViewer(self, title, None, data)
+
+    def ViewFile(self, viewTitle, viewFile, encoding=None):
+        fn = os.path.join(os.path.abspath(os.path.dirname(__file__)), viewFile)
+        if encoding:
+            import codecs
+            try:
+                textFile = codecs.open(fn, 'r')
+            except IOError:
+                import tkMessageBox
+                tkMessageBox.showerror(title='File Load Error',
+                                       message='Unable to load file %r .' % (fn,),
+                                       parent=self)
+                return
+            else:
+                data = textFile.read()
+        else:
+            data = None
+        textView.TextViewer(self, viewTitle, fn, data=data)
+
+    def Ok(self, event=None):
+        self.destroy()
+
+if __name__ == '__main__':
+    # test the dialog
+    root = Tk()
+    def run():
+        import aboutDialog
+        aboutDialog.AboutDialog(root, 'About')
+    Button(root, text='Dialog', command=run).pack()
+    root.mainloop()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/buildapp.py b/depot_tools/release/win/python_24/Lib/idlelib/buildapp.py
new file mode 100644
index 0000000..672eb1e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/buildapp.py
@@ -0,0 +1,17 @@
+#
+# After running python setup.py install, run this program from the command
+# line like so:
+#
+# % python2.3 buildapp.py build
+#
+# A double-clickable IDLE application will be created in the build/ directory.
+#
+
+from bundlebuilder import buildapp
+
+buildapp(
+        name="IDLE",
+        mainprogram="idle.py",
+        argv_emulation=1,
+        iconfile="Icons/idle.icns",
+)
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/config-extensions.def b/depot_tools/release/win/python_24/Lib/idlelib/config-extensions.def
new file mode 100644
index 0000000..4a4055f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/config-extensions.def
@@ -0,0 +1,80 @@
+# config-extensions.def
+#
+# IDLE reads several config files to determine user preferences.  This
+# file is the default configuration file for IDLE extensions settings.
+#
+# Each extension must have at least one section, named after the extension
+# module. This section must contain an 'enable' item (=1 to enable the
+# extension, =0 to disable it), it may contain 'enable_editor' or 'enable_shell'
+# items, to apply it only to editor/shell windows, and may also contain any
+# other general configuration items for the extension.
+#
+# Each extension must define at least one section named ExtensionName_bindings
+# or ExtensionName_cfgBindings. If present, ExtensionName_bindings defines
+# virtual event bindings for the extension that are not user re-configurable.
+# If present, ExtensionName_cfgBindings defines virtual event bindings for the
+# extension that may be sensibly re-configured.
+#
+# If there are no keybindings for a menus' virtual events, include lines like
+# <<toggle-code-context>>=   (See [CodeContext], below.)
+#
+# Currently it is necessary to manually modify this file to change extension
+# key bindings and default values. To customize, create
+# ~/.idlerc/config-extensions.cfg and append the appropriate customized
+# section(s).  Those sections will override the defaults in this file.
+#
+# Note: If a keybinding is already in use when the extension is
+# loaded, the extension's virtual event's keybinding will be set to ''.
+#
+# See config-keys.def for notes on specifying keys and extend.txt for
+# information on creating IDLE extensions.
+
+[FormatParagraph]
+enable=1
+[FormatParagraph_cfgBindings]
+format-paragraph=<Alt-Key-q>
+
+[AutoExpand]
+enable=1
+[AutoExpand_cfgBindings]
+expand-word=<Alt-Key-slash>
+
+[ZoomHeight]
+enable=1
+[ZoomHeight_cfgBindings]
+zoom-height=<Alt-Key-2>
+
+[ScriptBinding]
+enable=1
+[ScriptBinding_cfgBindings]
+run-module=<Key-F5>
+check-module=<Alt-Key-x>
+
+[CallTips]
+enable=1
+[CallTips_bindings]
+paren-open=<Key-parenleft>
+paren-close=<Key-parenright>
+check-calltip-cancel=<KeyRelease>
+calltip-cancel=<ButtonPress> <Key-Escape>
+
+[ParenMatch]
+enable=0
+style= expression
+flash-delay= 500
+bell= 1
+hilite-foreground= black
+hilite-background= #43cd80
+[ParenMatch_bindings]
+flash-open-paren=<KeyRelease-parenright> <KeyRelease-bracketright> <KeyRelease-braceright>
+check-restore=<KeyPress>
+
+[CodeContext]
+enable=1
+enable_shell=0
+numlines=3
+visible=0
+bgcolor=LightGray
+fgcolor=Black
+[CodeContext_bindings]
+toggle-code-context=
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/config-highlight.def b/depot_tools/release/win/python_24/Lib/idlelib/config-highlight.def
new file mode 100644
index 0000000..7d20f78
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/config-highlight.def
@@ -0,0 +1,64 @@
+# IDLE reads several config files to determine user preferences.  This
+# file is the default config file for idle highlight theme settings.
+
+[IDLE Classic]
+normal-foreground= #000000
+normal-background= #ffffff
+keyword-foreground= #ff7700
+keyword-background= #ffffff
+builtin-foreground= #900090
+builtin-background= #ffffff
+comment-foreground= #dd0000
+comment-background= #ffffff
+string-foreground= #00aa00
+string-background= #ffffff
+definition-foreground= #0000ff
+definition-background= #ffffff
+hilite-foreground= #000000
+hilite-background= gray
+break-foreground= black
+break-background= #ffff55
+hit-foreground= #ffffff
+hit-background= #000000
+error-foreground= #000000
+error-background= #ff7777
+#cursor (only foreground can be set, restart IDLE)
+cursor-foreground= black
+#shell window
+stdout-foreground= blue
+stdout-background= #ffffff
+stderr-foreground= red
+stderr-background= #ffffff
+console-foreground= #770000
+console-background= #ffffff
+
+[IDLE New]
+normal-foreground= #000000
+normal-background= #ffffff
+keyword-foreground= #ff7700
+keyword-background= #ffffff
+builtin-foreground= #900090
+builtin-background= #ffffff
+comment-foreground= #dd0000
+comment-background= #ffffff
+string-foreground= #00aa00
+string-background= #ffffff
+definition-foreground= #0000ff
+definition-background= #ffffff
+hilite-foreground= #000000
+hilite-background= gray
+break-foreground= black
+break-background= #ffff55
+hit-foreground= #ffffff
+hit-background= #000000
+error-foreground= #000000
+error-background= #ff7777
+#cursor (only foreground can be set, restart IDLE)
+cursor-foreground= black
+#shell window
+stdout-foreground= blue
+stdout-background= #ffffff
+stderr-foreground= red
+stderr-background= #ffffff
+console-foreground= #770000
+console-background= #ffffff
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/config-keys.def b/depot_tools/release/win/python_24/Lib/idlelib/config-keys.def
new file mode 100644
index 0000000..05b0cf4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/config-keys.def
@@ -0,0 +1,155 @@
+# IDLE reads several config files to determine user preferences.  This
+# file is the default config file for idle key binding settings.
+# Where multiple keys are specified for an action: if they are separated
+# by a space (eg. action=<key1> <key2>) then the keys are alternatives, if
+# there is no space (eg. action=<key1><key2>) then the keys comprise a
+# single 'emacs style' multi-keystoke binding. The tk event specifier 'Key'
+# is used in all cases, for consistency in auto key conflict checking in the
+# configuration gui.
+
+[IDLE Classic Windows]
+copy=<Control-Key-c> <Control-Key-C>
+cut=<Control-Key-x> <Control-Key-X>
+paste=<Control-Key-v> <Control-Key-V>
+beginning-of-line= <Key-Home>
+center-insert=<Control-Key-l> <Control-Key-L>
+close-all-windows=<Control-Key-q>
+close-window=<Alt-Key-F4> <Meta-Key-F4>
+do-nothing=<Control-Key-F12>
+end-of-file=<Control-Key-d> <Control-Key-D>
+python-docs=<Key-F1>
+python-context-help=<Shift-Key-F1>
+history-next=<Alt-Key-n> <Meta-Key-n>
+history-previous=<Alt-Key-p> <Meta-Key-p>
+interrupt-execution=<Control-Key-c> <Control-Key-C>
+view-restart=<Key-F6>
+restart-shell=<Control-Key-F6>
+open-class-browser=<Alt-Key-c> <Meta-Key-c> <Alt-Key-C>
+open-module=<Alt-Key-m> <Meta-Key-m> <Alt-Key-M>
+open-new-window=<Control-Key-n> <Control-Key-N>
+open-window-from-file=<Control-Key-o> <Control-Key-O>
+plain-newline-and-indent=<Control-Key-j> <Control-Key-J>
+print-window=<Control-Key-p> <Control-Key-P>
+redo=<Control-Shift-Key-Z>
+remove-selection=<Key-Escape>
+save-copy-of-window-as-file=<Alt-Shift-Key-S>
+save-window-as-file=<Control-Shift-Key-S>
+save-window=<Control-Key-s>
+select-all=<Control-Key-a>
+toggle-auto-coloring=<Control-Key-slash>
+undo=<Control-Key-z> <Control-Key-Z>
+find=<Control-Key-f> <Control-Key-F>
+find-again=<Control-Key-g> <Key-F3>
+find-in-files=<Alt-Key-F3> <Meta-Key-F3>
+find-selection=<Control-Key-F3>
+replace=<Control-Key-h> <Control-Key-H>
+goto-line=<Alt-Key-g> <Meta-Key-g>
+smart-backspace=<Key-BackSpace>
+newline-and-indent=<Key-Return> <Key-KP_Enter>
+smart-indent=<Key-Tab>
+indent-region=<Control-Key-bracketright>
+dedent-region=<Control-Key-bracketleft>
+comment-region=<Alt-Key-3> <Meta-Key-3>
+uncomment-region=<Alt-Key-4> <Meta-Key-4>
+tabify-region=<Alt-Key-5> <Meta-Key-5>
+untabify-region=<Alt-Key-6> <Meta-Key-6>
+toggle-tabs=<Alt-Key-t> <Meta-Key-t> <Alt-Key-T>
+change-indentwidth=<Alt-Key-u> <Meta-Key-u> <Alt-Key-U>
+
+[IDLE Classic Unix]
+copy=<Alt-Key-w> <Meta-Key-w>
+cut=<Control-Key-w>
+paste=<Control-Key-y>
+beginning-of-line=<Control-Key-a> <Key-Home>
+center-insert=<Control-Key-l>
+close-all-windows=<Control-Key-x><Control-Key-c>
+close-window=<Control-Key-x><Control-Key-0>
+do-nothing=<Control-Key-x>
+end-of-file=<Control-Key-d>
+history-next=<Alt-Key-n> <Meta-Key-n>
+history-previous=<Alt-Key-p> <Meta-Key-p>
+interrupt-execution=<Control-Key-c>
+view-restart=<Key-F6>
+restart-shell=<Control-Key-F6>
+open-class-browser=<Control-Key-x><Control-Key-b>
+open-module=<Control-Key-x><Control-Key-m>
+open-new-window=<Control-Key-x><Control-Key-n>
+open-window-from-file=<Control-Key-x><Control-Key-f>
+plain-newline-and-indent=<Control-Key-j>
+print-window=<Control-x><Control-Key-p>
+python-docs=<Control-Key-h>
+python-context-help=<Control-Shift-Key-H>
+redo=<Alt-Key-z> <Meta-Key-z>
+remove-selection=<Key-Escape>
+save-copy-of-window-as-file=<Control-Key-x><Control-Key-y>
+save-window-as-file=<Control-Key-x><Control-Key-w>
+save-window=<Control-Key-x><Control-Key-s>
+select-all=<Alt-Key-a> <Meta-Key-a>
+toggle-auto-coloring=<Control-Key-slash>
+undo=<Control-Key-z>
+find=<Control-Key-u><Control-Key-u><Control-Key-s>
+find-again=<Control-Key-u><Control-Key-s>
+find-in-files=<Alt-Key-s> <Meta-Key-s>
+find-selection=<Control-Key-s>
+replace=<Control-Key-r>
+goto-line=<Alt-Key-g> <Meta-Key-g>
+smart-backspace=<Key-BackSpace>
+newline-and-indent=<Key-Return> <Key-KP_Enter>
+smart-indent=<Key-Tab>
+indent-region=<Control-Key-bracketright>
+dedent-region=<Control-Key-bracketleft>
+comment-region=<Alt-Key-3>
+uncomment-region=<Alt-Key-4>
+tabify-region=<Alt-Key-5>
+untabify-region=<Alt-Key-6>
+toggle-tabs=<Alt-Key-t>
+change-indentwidth=<Alt-Key-u>
+
+[IDLE Classic Mac]
+copy=<Command-Key-c>
+cut=<Command-Key-x>
+paste=<Command-Key-v>
+beginning-of-line= <Key-Home>
+center-insert=<Control-Key-l>
+close-all-windows=<Command-Key-q>
+close-window=<Command-Key-w>
+do-nothing=<Control-Key-F12>
+end-of-file=<Control-Key-d>
+python-docs=<Key-F1>
+python-context-help=<Shift-Key-F1>
+history-next=<Control-Key-n>
+history-previous=<Control-Key-p>
+interrupt-execution=<Control-Key-c>
+view-restart=<Key-F6>
+restart-shell=<Control-Key-F6>
+open-class-browser=<Command-Key-b>
+open-module=<Command-Key-m>
+open-new-window=<Command-Key-n>
+open-window-from-file=<Command-Key-o>
+plain-newline-and-indent=<Control-Key-j>
+print-window=<Command-Key-p>
+redo=<Shift-Command-Key-Z>
+remove-selection=<Key-Escape>
+save-window-as-file=<Shift-Command-Key-S>
+save-window=<Command-Key-s>
+save-copy-of-window-as-file=<Option-Command-Key-s>
+select-all=<Command-Key-a>
+toggle-auto-coloring=<Control-Key-slash>
+undo=<Command-Key-z>
+find=<Command-Key-f>
+find-again=<Command-Key-g> <Key-F3>
+find-in-files=<Command-Key-F3>
+find-selection=<Shift-Command-Key-F3>
+replace=<Command-Key-r>
+goto-line=<Command-Key-j>
+smart-backspace=<Key-BackSpace>
+newline-and-indent=<Key-Return> <Key-KP_Enter>
+smart-indent=<Key-Tab>
+indent-region=<Command-Key-bracketright>
+dedent-region=<Command-Key-bracketleft>
+comment-region=<Control-Key-3>
+uncomment-region=<Control-Key-4>
+tabify-region=<Control-Key-5>
+untabify-region=<Control-Key-6>
+toggle-tabs=<Control-Key-t>
+change-indentwidth=<Control-Key-u>
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/config-main.def b/depot_tools/release/win/python_24/Lib/idlelib/config-main.def
new file mode 100644
index 0000000..b8667b8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/config-main.def
@@ -0,0 +1,76 @@
+# IDLE reads several config files to determine user preferences.  This
+# file is the default config file for general idle settings.
+#
+# When IDLE starts, it will look in
+# the following two sets of files, in order:
+#
+#     default configuration
+#     ---------------------
+#     config-main.def         the default general config file
+#     config-extensions.def   the default extension config file
+#     config-highlight.def    the default highlighting config file
+#     config-keys.def         the default keybinding config file
+#
+#     user configuration
+#     -------------------
+#     ~/.idlerc/config-main.cfg            the user general config file
+#     ~/.idlerc/config-extensions.cfg      the user extension config file
+#     ~/.idlerc/config-highlight.cfg       the user highlighting config file
+#     ~/.idlerc/config-keys.cfg            the user keybinding config file
+#
+# On Windows2000 and Windows XP the .idlerc directory is at 
+#	Documents and Settings\<username>\.idlerc	
+#
+# On Windows98 it is at c:\.idlerc
+#
+# Any options the user saves through the config dialog will be saved to
+# the relevant user config file. Reverting any general setting to the
+# default causes that entry to be wiped from the user file and re-read
+# from the default file. User highlighting themes or keybinding sets are
+# retained unless specifically deleted within the config dialog. Choosing
+# one of the default themes or keysets just applies the relevant settings
+# from the default file.
+#
+# Additional help sources are listed in the [HelpFiles] section and must be
+# viewable by a web browser (or the Windows Help viewer in the case of .chm
+# files). These sources will be listed on the Help menu.  The pattern is
+# <sequence_number = menu item;/path/to/help/source>
+# You can't use a semi-colon in a menu item or path.  The path will be platform
+# specific because of path separators, drive specs etc.
+#
+# It is best to use the Configuration GUI to set up additional help sources!
+# Example:
+#1 = My Extra Help Source;/usr/share/doc/foo/index.html
+#2 = Another Help Source;/path/to/another.pdf
+
+[General]
+editor-on-startup= 0
+autosave= 0
+print-command-posix=lpr %s
+print-command-win=start /min notepad /p %s
+delete-exitfunc= 1
+
+[EditorWindow]
+width= 80
+height= 40
+font= courier
+font-size= 10
+font-bold= 0
+encoding= none
+
+[FormatParagraph]
+paragraph=70
+
+[Indent]
+use-spaces= 1
+num-spaces= 4
+
+[Theme]
+default= 1
+name= IDLE Classic
+
+[Keys]
+default= 1
+name= IDLE Classic Windows
+
+[HelpFiles]
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/configDialog.py b/depot_tools/release/win/python_24/Lib/idlelib/configDialog.py
new file mode 100644
index 0000000..48f22b8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/configDialog.py
@@ -0,0 +1,1208 @@
+"""IDLE Configuration Dialog: support user customization of IDLE by GUI
+
+Customize font faces, sizes, and colorization attributes.  Set indentation
+defaults.  Customize keybindings.  Colorization and keybindings can be
+saved as user defined sets.  Select startup options including shell/editor
+and default window size.  Define additional help sources.
+
+Note that tab width in IDLE is currently fixed at eight due to Tk issues.
+Refer to comment in EditorWindow autoindent code for details.
+
+"""
+from Tkinter import *
+import tkMessageBox, tkColorChooser, tkFont
+import string, copy
+
+from configHandler import idleConf
+from dynOptionMenuWidget import DynOptionMenu
+from tabpage import TabPageSet
+from keybindingDialog import GetKeysDialog
+from configSectionNameDialog import GetCfgSectionNameDialog
+from configHelpSourceEdit import GetHelpSourceDialog
+
+class ConfigDialog(Toplevel):
+    """
+    configuration dialog for idle
+    """
+    def __init__(self,parent,title):
+        Toplevel.__init__(self, parent)
+        self.configure(borderwidth=5)
+        self.geometry("+%d+%d" % (parent.winfo_rootx()+20,
+                parent.winfo_rooty()+30))
+        #Theme Elements. Each theme element key is it's display name.
+        #The first value of the tuple is the sample area tag name.
+        #The second value is the display name list sort index.
+        self.themeElements={'Normal Text':('normal','00'),
+            'Python Keywords':('keyword','01'),
+            'Python Definitions':('definition','02'),
+            'Python Builtins':('builtin', '03'),
+            'Python Comments':('comment','04'),
+            'Python Strings':('string','05'),
+            'Selected Text':('hilite','06'),
+            'Found Text':('hit','07'),
+            'Cursor':('cursor','08'),
+            'Error Text':('error','09'),
+            'Shell Normal Text':('console','10'),
+            'Shell Stdout Text':('stdout','11'),
+            'Shell Stderr Text':('stderr','12'),
+            }
+        self.ResetChangedItems() #load initial values in changed items dict
+        self.CreateWidgets()
+        self.resizable(height=FALSE,width=FALSE)
+        self.transient(parent)
+        self.grab_set()
+        self.protocol("WM_DELETE_WINDOW", self.Cancel)
+        self.parent = parent
+        self.tabPages.focus_set()
+        #key bindings for this dialog
+        #self.bind('<Escape>',self.Cancel) #dismiss dialog, no save
+        #self.bind('<Alt-a>',self.Apply) #apply changes, save
+        #self.bind('<F1>',self.Help) #context help
+        self.LoadConfigs()
+        self.AttachVarCallbacks() #avoid callbacks during LoadConfigs
+        self.wait_window()
+
+    def CreateWidgets(self):
+        self.tabPages = TabPageSet(self,
+                pageNames=['Fonts/Tabs','Highlighting','Keys','General'])
+        self.tabPages.ChangePage()#activates default (first) page
+        frameActionButtons = Frame(self)
+        #action buttons
+        self.buttonHelp = Button(frameActionButtons,text='Help',
+                command=self.Help,takefocus=FALSE)
+        self.buttonOk = Button(frameActionButtons,text='Ok',
+                command=self.Ok,takefocus=FALSE)
+        self.buttonApply = Button(frameActionButtons,text='Apply',
+                command=self.Apply,takefocus=FALSE)
+        self.buttonCancel = Button(frameActionButtons,text='Cancel',
+                command=self.Cancel,takefocus=FALSE)
+        self.CreatePageFontTab()
+        self.CreatePageHighlight()
+        self.CreatePageKeys()
+        self.CreatePageGeneral()
+        self.buttonHelp.pack(side=RIGHT,padx=5,pady=5)
+        self.buttonOk.pack(side=LEFT,padx=5,pady=5)
+        self.buttonApply.pack(side=LEFT,padx=5,pady=5)
+        self.buttonCancel.pack(side=LEFT,padx=5,pady=5)
+        frameActionButtons.pack(side=BOTTOM)
+        self.tabPages.pack(side=TOP,expand=TRUE,fill=BOTH)
+
+    def CreatePageFontTab(self):
+        #tkVars
+        self.fontSize=StringVar(self)
+        self.fontBold=BooleanVar(self)
+        self.fontName=StringVar(self)
+        self.spaceNum=IntVar(self)
+        #self.tabCols=IntVar(self)
+        self.indentBySpaces=BooleanVar(self)
+        self.editFont=tkFont.Font(self,('courier',10,'normal'))
+        ##widget creation
+        #body frame
+        frame=self.tabPages.pages['Fonts/Tabs']['page']
+        #body section frames
+        frameFont=Frame(frame,borderwidth=2,relief=GROOVE)
+        frameIndent=Frame(frame,borderwidth=2,relief=GROOVE)
+        #frameFont
+        labelFontTitle=Label(frameFont,text='Set Base Editor Font')
+        frameFontName=Frame(frameFont)
+        frameFontParam=Frame(frameFont)
+        labelFontNameTitle=Label(frameFontName,justify=LEFT,
+                text='Font :')
+        self.listFontName=Listbox(frameFontName,height=5,takefocus=FALSE,
+                exportselection=FALSE)
+        self.listFontName.bind('<ButtonRelease-1>',self.OnListFontButtonRelease)
+        scrollFont=Scrollbar(frameFontName)
+        scrollFont.config(command=self.listFontName.yview)
+        self.listFontName.config(yscrollcommand=scrollFont.set)
+        labelFontSizeTitle=Label(frameFontParam,text='Size :')
+        self.optMenuFontSize=DynOptionMenu(frameFontParam,self.fontSize,None,
+            command=self.SetFontSample)
+        checkFontBold=Checkbutton(frameFontParam,variable=self.fontBold,
+            onvalue=1,offvalue=0,text='Bold',command=self.SetFontSample)
+        frameFontSample=Frame(frameFont,relief=SOLID,borderwidth=1)
+        self.labelFontSample=Label(frameFontSample,
+                text='AaBbCcDdEe\nFfGgHhIiJjK\n1234567890\n#:+=(){}[]',
+                justify=LEFT,font=self.editFont)
+        #frameIndent
+        labelIndentTitle=Label(frameIndent,text='Set Indentation Defaults')
+        frameIndentType=Frame(frameIndent)
+        frameIndentSize=Frame(frameIndent)
+        labelIndentTypeTitle=Label(frameIndentType,
+                text='Choose indentation type :')
+        radioUseSpaces=Radiobutton(frameIndentType,variable=self.indentBySpaces,
+            value=1,text='Tab key inserts spaces')
+        radioUseTabs=Radiobutton(frameIndentType,variable=self.indentBySpaces,
+            value=0,text='Tab key inserts tabs')
+        labelIndentSizeTitle=Label(frameIndentSize,
+                text='Choose indentation size :')
+        labelSpaceNumTitle=Label(frameIndentSize,justify=LEFT,
+                text='indent width')
+        self.scaleSpaceNum=Scale(frameIndentSize,variable=self.spaceNum,
+                orient='horizontal',tickinterval=2,from_=2,to=16)
+        #labeltabColsTitle=Label(frameIndentSize,justify=LEFT,
+        #        text='when tab key inserts tabs,\ncolumns per tab')
+        #self.scaleTabCols=Scale(frameIndentSize,variable=self.tabCols,
+        #        orient='horizontal',tickinterval=2,from_=2,to=8)
+        #widget packing
+        #body
+        frameFont.pack(side=LEFT,padx=5,pady=10,expand=TRUE,fill=BOTH)
+        frameIndent.pack(side=LEFT,padx=5,pady=10,fill=Y)
+        #frameFont
+        labelFontTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        frameFontName.pack(side=TOP,padx=5,pady=5,fill=X)
+        frameFontParam.pack(side=TOP,padx=5,pady=5,fill=X)
+        labelFontNameTitle.pack(side=TOP,anchor=W)
+        self.listFontName.pack(side=LEFT,expand=TRUE,fill=X)
+        scrollFont.pack(side=LEFT,fill=Y)
+        labelFontSizeTitle.pack(side=LEFT,anchor=W)
+        self.optMenuFontSize.pack(side=LEFT,anchor=W)
+        checkFontBold.pack(side=LEFT,anchor=W,padx=20)
+        frameFontSample.pack(side=TOP,padx=5,pady=5,expand=TRUE,fill=BOTH)
+        self.labelFontSample.pack(expand=TRUE,fill=BOTH)
+        #frameIndent
+        labelIndentTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        frameIndentType.pack(side=TOP,padx=5,fill=X)
+        frameIndentSize.pack(side=TOP,padx=5,pady=5,fill=BOTH)
+        labelIndentTypeTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        radioUseSpaces.pack(side=TOP,anchor=W,padx=5)
+        radioUseTabs.pack(side=TOP,anchor=W,padx=5)
+        labelIndentSizeTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        labelSpaceNumTitle.pack(side=TOP,anchor=W,padx=5)
+        self.scaleSpaceNum.pack(side=TOP,padx=5,fill=X)
+        #labeltabColsTitle.pack(side=TOP,anchor=W,padx=5)
+        #self.scaleTabCols.pack(side=TOP,padx=5,fill=X)
+        return frame
+
+    def CreatePageHighlight(self):
+        self.builtinTheme=StringVar(self)
+        self.customTheme=StringVar(self)
+        self.fgHilite=BooleanVar(self)
+        self.colour=StringVar(self)
+        self.fontName=StringVar(self)
+        self.themeIsBuiltin=BooleanVar(self)
+        self.highlightTarget=StringVar(self)
+        ##widget creation
+        #body frame
+        frame=self.tabPages.pages['Highlighting']['page']
+        #body section frames
+        frameCustom=Frame(frame,borderwidth=2,relief=GROOVE)
+        frameTheme=Frame(frame,borderwidth=2,relief=GROOVE)
+        #frameCustom
+        self.textHighlightSample=Text(frameCustom,relief=SOLID,borderwidth=1,
+            font=('courier',12,''),cursor='hand2',width=21,height=10,
+            takefocus=FALSE,highlightthickness=0,wrap=NONE)
+        text=self.textHighlightSample
+        text.bind('<Double-Button-1>',lambda e: 'break')
+        text.bind('<B1-Motion>',lambda e: 'break')
+        textAndTags=(('#you can click here','comment'),('\n','normal'),
+            ('#to choose items','comment'),('\n','normal'),('def','keyword'),
+            (' ','normal'),('func','definition'),('(param):','normal'),
+            ('\n  ','normal'),('"""string"""','string'),('\n  var0 = ','normal'),
+            ("'string'",'string'),('\n  var1 = ','normal'),("'selected'",'hilite'),
+            ('\n  var2 = ','normal'),("'found'",'hit'),
+            ('\n  var3 = ','normal'),('list', 'builtin'), ('(','normal'),
+            ('None', 'builtin'),(')\n\n','normal'),
+            (' error ','error'),(' ','normal'),('cursor |','cursor'),
+            ('\n ','normal'),('shell','console'),(' ','normal'),('stdout','stdout'),
+            (' ','normal'),('stderr','stderr'),('\n','normal'))
+        for txTa in textAndTags:
+            text.insert(END,txTa[0],txTa[1])
+        for element in self.themeElements.keys():
+            text.tag_bind(self.themeElements[element][0],'<ButtonPress-1>',
+                lambda event,elem=element: event.widget.winfo_toplevel()
+                .highlightTarget.set(elem))
+        text.config(state=DISABLED)
+        self.frameColourSet=Frame(frameCustom,relief=SOLID,borderwidth=1)
+        frameFgBg=Frame(frameCustom)
+        labelCustomTitle=Label(frameCustom,text='Set Custom Highlighting')
+        buttonSetColour=Button(self.frameColourSet,text='Choose Colour for :',
+            command=self.GetColour,highlightthickness=0)
+        self.optMenuHighlightTarget=DynOptionMenu(self.frameColourSet,
+            self.highlightTarget,None,highlightthickness=0)#,command=self.SetHighlightTargetBinding
+        self.radioFg=Radiobutton(frameFgBg,variable=self.fgHilite,
+            value=1,text='Foreground',command=self.SetColourSampleBinding)
+        self.radioBg=Radiobutton(frameFgBg,variable=self.fgHilite,
+            value=0,text='Background',command=self.SetColourSampleBinding)
+        self.fgHilite.set(1)
+        buttonSaveCustomTheme=Button(frameCustom,
+            text='Save as New Custom Theme',command=self.SaveAsNewTheme)
+        #frameTheme
+        labelThemeTitle=Label(frameTheme,text='Select a Highlighting Theme')
+        labelTypeTitle=Label(frameTheme,text='Select : ')
+        self.radioThemeBuiltin=Radiobutton(frameTheme,variable=self.themeIsBuiltin,
+            value=1,command=self.SetThemeType,text='a Built-in Theme')
+        self.radioThemeCustom=Radiobutton(frameTheme,variable=self.themeIsBuiltin,
+            value=0,command=self.SetThemeType,text='a Custom Theme')
+        self.optMenuThemeBuiltin=DynOptionMenu(frameTheme,
+            self.builtinTheme,None,command=None)
+        self.optMenuThemeCustom=DynOptionMenu(frameTheme,
+            self.customTheme,None,command=None)
+        self.buttonDeleteCustomTheme=Button(frameTheme,text='Delete Custom Theme',
+                command=self.DeleteCustomTheme)
+        ##widget packing
+        #body
+        frameCustom.pack(side=LEFT,padx=5,pady=10,expand=TRUE,fill=BOTH)
+        frameTheme.pack(side=LEFT,padx=5,pady=10,fill=Y)
+        #frameCustom
+        labelCustomTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        self.frameColourSet.pack(side=TOP,padx=5,pady=5,expand=TRUE,fill=X)
+        frameFgBg.pack(side=TOP,padx=5,pady=0)
+        self.textHighlightSample.pack(side=TOP,padx=5,pady=5,expand=TRUE,
+            fill=BOTH)
+        buttonSetColour.pack(side=TOP,expand=TRUE,fill=X,padx=8,pady=4)
+        self.optMenuHighlightTarget.pack(side=TOP,expand=TRUE,fill=X,padx=8,pady=3)
+        self.radioFg.pack(side=LEFT,anchor=E)
+        self.radioBg.pack(side=RIGHT,anchor=W)
+        buttonSaveCustomTheme.pack(side=BOTTOM,fill=X,padx=5,pady=5)
+        #frameTheme
+        labelThemeTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        labelTypeTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        self.radioThemeBuiltin.pack(side=TOP,anchor=W,padx=5)
+        self.radioThemeCustom.pack(side=TOP,anchor=W,padx=5,pady=2)
+        self.optMenuThemeBuiltin.pack(side=TOP,fill=X,padx=5,pady=5)
+        self.optMenuThemeCustom.pack(side=TOP,fill=X,anchor=W,padx=5,pady=5)
+        self.buttonDeleteCustomTheme.pack(side=TOP,fill=X,padx=5,pady=5)
+        return frame
+
+    def CreatePageKeys(self):
+        #tkVars
+        self.bindingTarget=StringVar(self)
+        self.builtinKeys=StringVar(self)
+        self.customKeys=StringVar(self)
+        self.keysAreBuiltin=BooleanVar(self)
+        self.keyBinding=StringVar(self)
+        ##widget creation
+        #body frame
+        frame=self.tabPages.pages['Keys']['page']
+        #body section frames
+        frameCustom=Frame(frame,borderwidth=2,relief=GROOVE)
+        frameKeySets=Frame(frame,borderwidth=2,relief=GROOVE)
+        #frameCustom
+        frameTarget=Frame(frameCustom)
+        labelCustomTitle=Label(frameCustom,text='Set Custom Key Bindings')
+        labelTargetTitle=Label(frameTarget,text='Action - Key(s)')
+        scrollTargetY=Scrollbar(frameTarget)
+        scrollTargetX=Scrollbar(frameTarget,orient=HORIZONTAL)
+        self.listBindings=Listbox(frameTarget,takefocus=FALSE,
+                exportselection=FALSE)
+        self.listBindings.bind('<ButtonRelease-1>',self.KeyBindingSelected)
+        scrollTargetY.config(command=self.listBindings.yview)
+        scrollTargetX.config(command=self.listBindings.xview)
+        self.listBindings.config(yscrollcommand=scrollTargetY.set)
+        self.listBindings.config(xscrollcommand=scrollTargetX.set)
+        self.buttonNewKeys=Button(frameCustom,text='Get New Keys for Selection',
+            command=self.GetNewKeys,state=DISABLED)
+        buttonSaveCustomKeys=Button(frameCustom,
+                text='Save as New Custom Key Set',command=self.SaveAsNewKeySet)
+        #frameKeySets
+        labelKeysTitle=Label(frameKeySets,text='Select a Key Set')
+        labelTypeTitle=Label(frameKeySets,text='Select : ')
+        self.radioKeysBuiltin=Radiobutton(frameKeySets,variable=self.keysAreBuiltin,
+            value=1,command=self.SetKeysType,text='a Built-in Key Set')
+        self.radioKeysCustom=Radiobutton(frameKeySets,variable=self.keysAreBuiltin,
+            value=0,command=self.SetKeysType,text='a Custom Key Set')
+        self.optMenuKeysBuiltin=DynOptionMenu(frameKeySets,
+            self.builtinKeys,None,command=None)
+        self.optMenuKeysCustom=DynOptionMenu(frameKeySets,
+            self.customKeys,None,command=None)
+        self.buttonDeleteCustomKeys=Button(frameKeySets,text='Delete Custom Key Set',
+                command=self.DeleteCustomKeys)
+        ##widget packing
+        #body
+        frameCustom.pack(side=LEFT,padx=5,pady=5,expand=TRUE,fill=BOTH)
+        frameKeySets.pack(side=LEFT,padx=5,pady=5,fill=Y)
+        #frameCustom
+        labelCustomTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        buttonSaveCustomKeys.pack(side=BOTTOM,fill=X,padx=5,pady=5)
+        self.buttonNewKeys.pack(side=BOTTOM,fill=X,padx=5,pady=5)
+        frameTarget.pack(side=LEFT,padx=5,pady=5,expand=TRUE,fill=BOTH)
+        #frame target
+        frameTarget.columnconfigure(0,weight=1)
+        frameTarget.rowconfigure(1,weight=1)
+        labelTargetTitle.grid(row=0,column=0,columnspan=2,sticky=W)
+        self.listBindings.grid(row=1,column=0,sticky=NSEW)
+        scrollTargetY.grid(row=1,column=1,sticky=NS)
+        scrollTargetX.grid(row=2,column=0,sticky=EW)
+        #frameKeySets
+        labelKeysTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        labelTypeTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        self.radioKeysBuiltin.pack(side=TOP,anchor=W,padx=5)
+        self.radioKeysCustom.pack(side=TOP,anchor=W,padx=5,pady=2)
+        self.optMenuKeysBuiltin.pack(side=TOP,fill=X,padx=5,pady=5)
+        self.optMenuKeysCustom.pack(side=TOP,fill=X,anchor=W,padx=5,pady=5)
+        self.buttonDeleteCustomKeys.pack(side=TOP,fill=X,padx=5,pady=5)
+        return frame
+
+    def CreatePageGeneral(self):
+        #tkVars
+        self.winWidth=StringVar(self)
+        self.winHeight=StringVar(self)
+        self.paraWidth=StringVar(self)
+        self.startupEdit=IntVar(self)
+        self.autoSave=IntVar(self)
+        self.encoding=StringVar(self)
+        self.userHelpBrowser=BooleanVar(self)
+        self.helpBrowser=StringVar(self)
+        #widget creation
+        #body
+        frame=self.tabPages.pages['General']['page']
+        #body section frames
+        frameRun=Frame(frame,borderwidth=2,relief=GROOVE)
+        frameSave=Frame(frame,borderwidth=2,relief=GROOVE)
+        frameWinSize=Frame(frame,borderwidth=2,relief=GROOVE)
+        frameParaSize=Frame(frame,borderwidth=2,relief=GROOVE)
+        frameEncoding=Frame(frame,borderwidth=2,relief=GROOVE)
+        frameHelp=Frame(frame,borderwidth=2,relief=GROOVE)
+        #frameRun
+        labelRunTitle=Label(frameRun,text='Startup Preferences')
+        labelRunChoiceTitle=Label(frameRun,text='At Startup')
+        radioStartupEdit=Radiobutton(frameRun,variable=self.startupEdit,
+            value=1,command=self.SetKeysType,text="Open Edit Window")
+        radioStartupShell=Radiobutton(frameRun,variable=self.startupEdit,
+            value=0,command=self.SetKeysType,text='Open Shell Window')
+        #frameSave
+        labelSaveTitle=Label(frameSave,text='Autosave Preference')
+        labelRunSaveTitle=Label(frameSave,text='At Start of Run (F5)  ')
+        radioSaveAsk=Radiobutton(frameSave,variable=self.autoSave,
+            value=0,command=self.SetKeysType,text="Prompt to Save")
+        radioSaveAuto=Radiobutton(frameSave,variable=self.autoSave,
+            value=1,command=self.SetKeysType,text='No Prompt')
+        #frameWinSize
+        labelWinSizeTitle=Label(frameWinSize,text='Initial Window Size'+
+                '  (in characters)')
+        labelWinWidthTitle=Label(frameWinSize,text='Width')
+        entryWinWidth=Entry(frameWinSize,textvariable=self.winWidth,
+                width=3)
+        labelWinHeightTitle=Label(frameWinSize,text='Height')
+        entryWinHeight=Entry(frameWinSize,textvariable=self.winHeight,
+                width=3)
+        #paragraphFormatWidth
+        labelParaWidthTitle=Label(frameParaSize,text='Paragraph reformat'+
+                ' width (in characters)')
+        entryParaWidth=Entry(frameParaSize,textvariable=self.paraWidth,
+                width=3)
+        #frameEncoding
+        labelEncodingTitle=Label(frameEncoding,text="Default Source Encoding")
+        radioEncLocale=Radiobutton(frameEncoding,variable=self.encoding,
+            value="locale",text="Locale-defined")
+        radioEncUTF8=Radiobutton(frameEncoding,variable=self.encoding,
+            value="utf-8",text="UTF-8")
+        radioEncNone=Radiobutton(frameEncoding,variable=self.encoding,
+            value="none",text="None")
+        #frameHelp
+        ##labelHelpTitle=Label(frameHelp,text='Help Options')
+        frameHelpList=Frame(frameHelp)
+        frameHelpListButtons=Frame(frameHelpList)
+        labelHelpListTitle=Label(frameHelpList,text='Additional Help Sources:')
+        scrollHelpList=Scrollbar(frameHelpList)
+        self.listHelp=Listbox(frameHelpList,height=5,takefocus=FALSE,
+                exportselection=FALSE)
+        scrollHelpList.config(command=self.listHelp.yview)
+        self.listHelp.config(yscrollcommand=scrollHelpList.set)
+        self.listHelp.bind('<ButtonRelease-1>',self.HelpSourceSelected)
+        self.buttonHelpListEdit=Button(frameHelpListButtons,text='Edit',
+                state=DISABLED,width=8,command=self.HelpListItemEdit)
+        self.buttonHelpListAdd=Button(frameHelpListButtons,text='Add',
+                width=8,command=self.HelpListItemAdd)
+        self.buttonHelpListRemove=Button(frameHelpListButtons,text='Remove',
+                state=DISABLED,width=8,command=self.HelpListItemRemove)
+        # the following is better handled by the BROWSER environment
+        # variable under unix/linux
+        #checkHelpBrowser=Checkbutton(frameHelp,variable=self.userHelpBrowser,
+        #    onvalue=1,offvalue=0,text='user specified (html) help browser:',
+        #    command=self.OnCheckUserHelpBrowser)
+        #self.entryHelpBrowser=Entry(frameHelp,textvariable=self.helpBrowser,
+        #        width=40)
+        #widget packing
+        #body
+        frameRun.pack(side=TOP,padx=5,pady=5,fill=X)
+        frameSave.pack(side=TOP,padx=5,pady=5,fill=X)
+        frameWinSize.pack(side=TOP,padx=5,pady=5,fill=X)
+        frameParaSize.pack(side=TOP,padx=5,pady=5,fill=X)
+        frameEncoding.pack(side=TOP,padx=5,pady=5,fill=X)
+        frameHelp.pack(side=TOP,padx=5,pady=5,expand=TRUE,fill=BOTH)
+        #frameRun
+        labelRunTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        labelRunChoiceTitle.pack(side=LEFT,anchor=W,padx=5,pady=5)
+        radioStartupShell.pack(side=RIGHT,anchor=W,padx=5,pady=5)
+        radioStartupEdit.pack(side=RIGHT,anchor=W,padx=5,pady=5)
+        #frameSave
+        labelSaveTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        labelRunSaveTitle.pack(side=LEFT,anchor=W,padx=5,pady=5)
+        radioSaveAuto.pack(side=RIGHT,anchor=W,padx=5,pady=5)
+        radioSaveAsk.pack(side=RIGHT,anchor=W,padx=5,pady=5)
+        #frameWinSize
+        labelWinSizeTitle.pack(side=LEFT,anchor=W,padx=5,pady=5)
+        entryWinHeight.pack(side=RIGHT,anchor=E,padx=10,pady=5)
+        labelWinHeightTitle.pack(side=RIGHT,anchor=E,pady=5)
+        entryWinWidth.pack(side=RIGHT,anchor=E,padx=10,pady=5)
+        labelWinWidthTitle.pack(side=RIGHT,anchor=E,pady=5)
+        #paragraphFormatWidth
+        labelParaWidthTitle.pack(side=LEFT,anchor=W,padx=5,pady=5)
+        entryParaWidth.pack(side=RIGHT,anchor=E,padx=10,pady=5)
+        #frameEncoding
+        labelEncodingTitle.pack(side=LEFT,anchor=W,padx=5,pady=5)
+        radioEncNone.pack(side=RIGHT,anchor=E,pady=5)
+        radioEncUTF8.pack(side=RIGHT,anchor=E,pady=5)
+        radioEncLocale.pack(side=RIGHT,anchor=E,pady=5)
+        #frameHelp
+        ##labelHelpTitle.pack(side=TOP,anchor=W,padx=5,pady=5)
+        frameHelpListButtons.pack(side=RIGHT,padx=5,pady=5,fill=Y)
+        frameHelpList.pack(side=TOP,padx=5,pady=5,expand=TRUE,fill=BOTH)
+        labelHelpListTitle.pack(side=TOP,anchor=W)
+        scrollHelpList.pack(side=RIGHT,anchor=W,fill=Y)
+        self.listHelp.pack(side=LEFT,anchor=E,expand=TRUE,fill=BOTH)
+        self.buttonHelpListEdit.pack(side=TOP,anchor=W,pady=5)
+        self.buttonHelpListAdd.pack(side=TOP,anchor=W)
+        self.buttonHelpListRemove.pack(side=TOP,anchor=W,pady=5)
+        #checkHelpBrowser.pack(side=TOP,anchor=W,padx=5)
+        #self.entryHelpBrowser.pack(side=TOP,anchor=W,padx=5,pady=5)
+        return frame
+
+    def AttachVarCallbacks(self):
+        self.fontSize.trace_variable('w',self.VarChanged_fontSize)
+        self.fontName.trace_variable('w',self.VarChanged_fontName)
+        self.fontBold.trace_variable('w',self.VarChanged_fontBold)
+        self.spaceNum.trace_variable('w',self.VarChanged_spaceNum)
+        #self.tabCols.trace_variable('w',self.VarChanged_tabCols)
+        self.indentBySpaces.trace_variable('w',self.VarChanged_indentBySpaces)
+        self.colour.trace_variable('w',self.VarChanged_colour)
+        self.builtinTheme.trace_variable('w',self.VarChanged_builtinTheme)
+        self.customTheme.trace_variable('w',self.VarChanged_customTheme)
+        self.themeIsBuiltin.trace_variable('w',self.VarChanged_themeIsBuiltin)
+        self.highlightTarget.trace_variable('w',self.VarChanged_highlightTarget)
+        self.keyBinding.trace_variable('w',self.VarChanged_keyBinding)
+        self.builtinKeys.trace_variable('w',self.VarChanged_builtinKeys)
+        self.customKeys.trace_variable('w',self.VarChanged_customKeys)
+        self.keysAreBuiltin.trace_variable('w',self.VarChanged_keysAreBuiltin)
+        self.winWidth.trace_variable('w',self.VarChanged_winWidth)
+        self.winHeight.trace_variable('w',self.VarChanged_winHeight)
+        self.paraWidth.trace_variable('w',self.VarChanged_paraWidth)
+        self.startupEdit.trace_variable('w',self.VarChanged_startupEdit)
+        self.autoSave.trace_variable('w',self.VarChanged_autoSave)
+        self.encoding.trace_variable('w',self.VarChanged_encoding)
+
+    def VarChanged_fontSize(self,*params):
+        value=self.fontSize.get()
+        self.AddChangedItem('main','EditorWindow','font-size',value)
+
+    def VarChanged_fontName(self,*params):
+        value=self.fontName.get()
+        self.AddChangedItem('main','EditorWindow','font',value)
+
+    def VarChanged_fontBold(self,*params):
+        value=self.fontBold.get()
+        self.AddChangedItem('main','EditorWindow','font-bold',value)
+
+    def VarChanged_indentBySpaces(self,*params):
+        value=self.indentBySpaces.get()
+        self.AddChangedItem('main','Indent','use-spaces',value)
+
+    def VarChanged_spaceNum(self,*params):
+        value=self.spaceNum.get()
+        self.AddChangedItem('main','Indent','num-spaces',value)
+
+    #def VarChanged_tabCols(self,*params):
+    #    value=self.tabCols.get()
+    #    self.AddChangedItem('main','Indent','tab-cols',value)
+
+    def VarChanged_colour(self,*params):
+        self.OnNewColourSet()
+
+    def VarChanged_builtinTheme(self,*params):
+        value=self.builtinTheme.get()
+        self.AddChangedItem('main','Theme','name',value)
+        self.PaintThemeSample()
+
+    def VarChanged_customTheme(self,*params):
+        value=self.customTheme.get()
+        if value != '- no custom themes -':
+            self.AddChangedItem('main','Theme','name',value)
+            self.PaintThemeSample()
+
+    def VarChanged_themeIsBuiltin(self,*params):
+        value=self.themeIsBuiltin.get()
+        self.AddChangedItem('main','Theme','default',value)
+        if value:
+            self.VarChanged_builtinTheme()
+        else:
+            self.VarChanged_customTheme()
+
+    def VarChanged_highlightTarget(self,*params):
+        self.SetHighlightTarget()
+
+    def VarChanged_keyBinding(self,*params):
+        value=self.keyBinding.get()
+        keySet=self.customKeys.get()
+        event=self.listBindings.get(ANCHOR).split()[0]
+        if idleConf.IsCoreBinding(event):
+            #this is a core keybinding
+            self.AddChangedItem('keys',keySet,event,value)
+        else: #this is an extension key binding
+            extName=idleConf.GetExtnNameForEvent(event)
+            extKeybindSection=extName+'_cfgBindings'
+            self.AddChangedItem('extensions',extKeybindSection,event,value)
+
+    def VarChanged_builtinKeys(self,*params):
+        value=self.builtinKeys.get()
+        self.AddChangedItem('main','Keys','name',value)
+        self.LoadKeysList(value)
+
+    def VarChanged_customKeys(self,*params):
+        value=self.customKeys.get()
+        if value != '- no custom keys -':
+            self.AddChangedItem('main','Keys','name',value)
+            self.LoadKeysList(value)
+
+    def VarChanged_keysAreBuiltin(self,*params):
+        value=self.keysAreBuiltin.get()
+        self.AddChangedItem('main','Keys','default',value)
+        if value:
+            self.VarChanged_builtinKeys()
+        else:
+            self.VarChanged_customKeys()
+
+    def VarChanged_winWidth(self,*params):
+        value=self.winWidth.get()
+        self.AddChangedItem('main','EditorWindow','width',value)
+
+    def VarChanged_winHeight(self,*params):
+        value=self.winHeight.get()
+        self.AddChangedItem('main','EditorWindow','height',value)
+
+    def VarChanged_paraWidth(self,*params):
+        value=self.paraWidth.get()
+        self.AddChangedItem('main','FormatParagraph','paragraph',value)
+
+    def VarChanged_startupEdit(self,*params):
+        value=self.startupEdit.get()
+        self.AddChangedItem('main','General','editor-on-startup',value)
+
+    def VarChanged_autoSave(self,*params):
+        value=self.autoSave.get()
+        self.AddChangedItem('main','General','autosave',value)
+
+    def VarChanged_encoding(self,*params):
+        value=self.encoding.get()
+        self.AddChangedItem('main','EditorWindow','encoding',value)
+
+    def ResetChangedItems(self):
+        #When any config item is changed in this dialog, an entry
+        #should be made in the relevant section (config type) of this
+        #dictionary. The key should be the config file section name and the
+        #value a dictionary, whose key:value pairs are item=value pairs for
+        #that config file section.
+        self.changedItems={'main':{},'highlight':{},'keys':{},'extensions':{}}
+
+    def AddChangedItem(self,type,section,item,value):
+        value=str(value) #make sure we use a string
+        if not self.changedItems[type].has_key(section):
+            self.changedItems[type][section]={}
+        self.changedItems[type][section][item]=value
+
+    def GetDefaultItems(self):
+        dItems={'main':{},'highlight':{},'keys':{},'extensions':{}}
+        for configType in dItems.keys():
+            sections=idleConf.GetSectionList('default',configType)
+            for section in sections:
+                dItems[configType][section]={}
+                options=idleConf.defaultCfg[configType].GetOptionList(section)
+                for option in options:
+                    dItems[configType][section][option]=(
+                            idleConf.defaultCfg[configType].Get(section,option))
+        return dItems
+
+    def SetThemeType(self):
+        if self.themeIsBuiltin.get():
+            self.optMenuThemeBuiltin.config(state=NORMAL)
+            self.optMenuThemeCustom.config(state=DISABLED)
+            self.buttonDeleteCustomTheme.config(state=DISABLED)
+        else:
+            self.optMenuThemeBuiltin.config(state=DISABLED)
+            self.radioThemeCustom.config(state=NORMAL)
+            self.optMenuThemeCustom.config(state=NORMAL)
+            self.buttonDeleteCustomTheme.config(state=NORMAL)
+
+    def SetKeysType(self):
+        if self.keysAreBuiltin.get():
+            self.optMenuKeysBuiltin.config(state=NORMAL)
+            self.optMenuKeysCustom.config(state=DISABLED)
+            self.buttonDeleteCustomKeys.config(state=DISABLED)
+        else:
+            self.optMenuKeysBuiltin.config(state=DISABLED)
+            self.radioKeysCustom.config(state=NORMAL)
+            self.optMenuKeysCustom.config(state=NORMAL)
+            self.buttonDeleteCustomKeys.config(state=NORMAL)
+
+    def GetNewKeys(self):
+        listIndex=self.listBindings.index(ANCHOR)
+        binding=self.listBindings.get(listIndex)
+        bindName=binding.split()[0] #first part, up to first space
+        if self.keysAreBuiltin.get():
+            currentKeySetName=self.builtinKeys.get()
+        else:
+            currentKeySetName=self.customKeys.get()
+        currentBindings=idleConf.GetCurrentKeySet()
+        if currentKeySetName in self.changedItems['keys'].keys(): #unsaved changes
+            keySetChanges=self.changedItems['keys'][currentKeySetName]
+            for event in keySetChanges.keys():
+                currentBindings[event]=keySetChanges[event].split()
+        currentKeySequences=currentBindings.values()
+        newKeys=GetKeysDialog(self,'Get New Keys',bindName,
+                currentKeySequences).result
+        if newKeys: #new keys were specified
+            if self.keysAreBuiltin.get(): #current key set is a built-in
+                message=('Your changes will be saved as a new Custom Key Set. '+
+                        'Enter a name for your new Custom Key Set below.')
+                newKeySet=self.GetNewKeysName(message)
+                if not newKeySet: #user cancelled custom key set creation
+                    self.listBindings.select_set(listIndex)
+                    self.listBindings.select_anchor(listIndex)
+                    return
+                else: #create new custom key set based on previously active key set
+                    self.CreateNewKeySet(newKeySet)
+            self.listBindings.delete(listIndex)
+            self.listBindings.insert(listIndex,bindName+' - '+newKeys)
+            self.listBindings.select_set(listIndex)
+            self.listBindings.select_anchor(listIndex)
+            self.keyBinding.set(newKeys)
+        else:
+            self.listBindings.select_set(listIndex)
+            self.listBindings.select_anchor(listIndex)
+
+    def GetNewKeysName(self,message):
+        usedNames=(idleConf.GetSectionList('user','keys')+
+                idleConf.GetSectionList('default','keys'))
+        newKeySet=GetCfgSectionNameDialog(self,'New Custom Key Set',
+                message,usedNames).result
+        return newKeySet
+
+    def SaveAsNewKeySet(self):
+        newKeysName=self.GetNewKeysName('New Key Set Name:')
+        if newKeysName:
+            self.CreateNewKeySet(newKeysName)
+
+    def KeyBindingSelected(self,event):
+        self.buttonNewKeys.config(state=NORMAL)
+
+    def CreateNewKeySet(self,newKeySetName):
+        #creates new custom key set based on the previously active key set,
+        #and makes the new key set active
+        if self.keysAreBuiltin.get():
+            prevKeySetName=self.builtinKeys.get()
+        else:
+            prevKeySetName=self.customKeys.get()
+        prevKeys=idleConf.GetCoreKeys(prevKeySetName)
+        newKeys={}
+        for event in prevKeys.keys(): #add key set to changed items
+            eventName=event[2:-2] #trim off the angle brackets
+            binding=string.join(prevKeys[event])
+            newKeys[eventName]=binding
+        #handle any unsaved changes to prev key set
+        if prevKeySetName in self.changedItems['keys'].keys():
+            keySetChanges=self.changedItems['keys'][prevKeySetName]
+            for event in keySetChanges.keys():
+                newKeys[event]=keySetChanges[event]
+        #save the new theme
+        self.SaveNewKeySet(newKeySetName,newKeys)
+        #change gui over to the new key set
+        customKeyList=idleConf.GetSectionList('user','keys')
+        customKeyList.sort()
+        self.optMenuKeysCustom.SetMenu(customKeyList,newKeySetName)
+        self.keysAreBuiltin.set(0)
+        self.SetKeysType()
+
+    def LoadKeysList(self,keySetName):
+        reselect=0
+        newKeySet=0
+        if self.listBindings.curselection():
+            reselect=1
+            listIndex=self.listBindings.index(ANCHOR)
+        keySet=idleConf.GetKeySet(keySetName)
+        bindNames=keySet.keys()
+        bindNames.sort()
+        self.listBindings.delete(0,END)
+        for bindName in bindNames:
+            key=string.join(keySet[bindName]) #make key(s) into a string
+            bindName=bindName[2:-2] #trim off the angle brackets
+            if keySetName in self.changedItems['keys'].keys():
+                #handle any unsaved changes to this key set
+                if bindName in self.changedItems['keys'][keySetName].keys():
+                    key=self.changedItems['keys'][keySetName][bindName]
+            self.listBindings.insert(END, bindName+' - '+key)
+        if reselect:
+            self.listBindings.see(listIndex)
+            self.listBindings.select_set(listIndex)
+            self.listBindings.select_anchor(listIndex)
+
+    def DeleteCustomKeys(self):
+        keySetName=self.customKeys.get()
+        if not tkMessageBox.askyesno('Delete Key Set','Are you sure you wish '+
+                                     'to delete the key set %r ?' % (keySetName),
+                                     parent=self):
+            return
+        #remove key set from config
+        idleConf.userCfg['keys'].remove_section(keySetName)
+        if self.changedItems['keys'].has_key(keySetName):
+            del(self.changedItems['keys'][keySetName])
+        #write changes
+        idleConf.userCfg['keys'].Save()
+        #reload user key set list
+        itemList=idleConf.GetSectionList('user','keys')
+        itemList.sort()
+        if not itemList:
+            self.radioKeysCustom.config(state=DISABLED)
+            self.optMenuKeysCustom.SetMenu(itemList,'- no custom keys -')
+        else:
+            self.optMenuKeysCustom.SetMenu(itemList,itemList[0])
+        #revert to default key set
+        self.keysAreBuiltin.set(idleConf.defaultCfg['main'].Get('Keys','default'))
+        self.builtinKeys.set(idleConf.defaultCfg['main'].Get('Keys','name'))
+        #user can't back out of these changes, they must be applied now
+        self.Apply()
+        self.SetKeysType()
+
+    def DeleteCustomTheme(self):
+        themeName=self.customTheme.get()
+        if not tkMessageBox.askyesno('Delete Theme','Are you sure you wish '+
+                                     'to delete the theme %r ?' % (themeName,),
+                                     parent=self):
+            return
+        #remove theme from config
+        idleConf.userCfg['highlight'].remove_section(themeName)
+        if self.changedItems['highlight'].has_key(themeName):
+            del(self.changedItems['highlight'][themeName])
+        #write changes
+        idleConf.userCfg['highlight'].Save()
+        #reload user theme list
+        itemList=idleConf.GetSectionList('user','highlight')
+        itemList.sort()
+        if not itemList:
+            self.radioThemeCustom.config(state=DISABLED)
+            self.optMenuThemeCustom.SetMenu(itemList,'- no custom themes -')
+        else:
+            self.optMenuThemeCustom.SetMenu(itemList,itemList[0])
+        #revert to default theme
+        self.themeIsBuiltin.set(idleConf.defaultCfg['main'].Get('Theme','default'))
+        self.builtinTheme.set(idleConf.defaultCfg['main'].Get('Theme','name'))
+        #user can't back out of these changes, they must be applied now
+        self.Apply()
+        self.SetThemeType()
+
+    def GetColour(self):
+        target=self.highlightTarget.get()
+        prevColour=self.frameColourSet.cget('bg')
+        rgbTuplet, colourString = tkColorChooser.askcolor(parent=self,
+            title='Pick new colour for : '+target,initialcolor=prevColour)
+        if colourString and (colourString!=prevColour):
+            #user didn't cancel, and they chose a new colour
+            if self.themeIsBuiltin.get(): #current theme is a built-in
+                message=('Your changes will be saved as a new Custom Theme. '+
+                        'Enter a name for your new Custom Theme below.')
+                newTheme=self.GetNewThemeName(message)
+                if not newTheme: #user cancelled custom theme creation
+                    return
+                else: #create new custom theme based on previously active theme
+                    self.CreateNewTheme(newTheme)
+                    self.colour.set(colourString)
+            else: #current theme is user defined
+                self.colour.set(colourString)
+
+    def OnNewColourSet(self):
+        newColour=self.colour.get()
+        self.frameColourSet.config(bg=newColour)#set sample
+        if self.fgHilite.get(): plane='foreground'
+        else: plane='background'
+        sampleElement=self.themeElements[self.highlightTarget.get()][0]
+        self.textHighlightSample.tag_config(sampleElement, **{plane:newColour})
+        theme=self.customTheme.get()
+        themeElement=sampleElement+'-'+plane
+        self.AddChangedItem('highlight',theme,themeElement,newColour)
+
+    def GetNewThemeName(self,message):
+        usedNames=(idleConf.GetSectionList('user','highlight')+
+                idleConf.GetSectionList('default','highlight'))
+        newTheme=GetCfgSectionNameDialog(self,'New Custom Theme',
+                message,usedNames).result
+        return newTheme
+
+    def SaveAsNewTheme(self):
+        newThemeName=self.GetNewThemeName('New Theme Name:')
+        if newThemeName:
+            self.CreateNewTheme(newThemeName)
+
+    def CreateNewTheme(self,newThemeName):
+        #creates new custom theme based on the previously active theme,
+        #and makes the new theme active
+        if self.themeIsBuiltin.get():
+            themeType='default'
+            themeName=self.builtinTheme.get()
+        else:
+            themeType='user'
+            themeName=self.customTheme.get()
+        newTheme=idleConf.GetThemeDict(themeType,themeName)
+        #apply any of the old theme's unsaved changes to the new theme
+        if themeName in self.changedItems['highlight'].keys():
+            themeChanges=self.changedItems['highlight'][themeName]
+            for element in themeChanges.keys():
+                newTheme[element]=themeChanges[element]
+        #save the new theme
+        self.SaveNewTheme(newThemeName,newTheme)
+        #change gui over to the new theme
+        customThemeList=idleConf.GetSectionList('user','highlight')
+        customThemeList.sort()
+        self.optMenuThemeCustom.SetMenu(customThemeList,newThemeName)
+        self.themeIsBuiltin.set(0)
+        self.SetThemeType()
+
+    def OnListFontButtonRelease(self,event):
+        font = self.listFontName.get(ANCHOR)
+        self.fontName.set(font.lower())
+        self.SetFontSample()
+
+    def SetFontSample(self,event=None):
+        fontName=self.fontName.get()
+        if self.fontBold.get():
+            fontWeight=tkFont.BOLD
+        else:
+            fontWeight=tkFont.NORMAL
+        self.editFont.config(size=self.fontSize.get(),
+                weight=fontWeight,family=fontName)
+
+    def SetHighlightTarget(self):
+        if self.highlightTarget.get()=='Cursor': #bg not possible
+            self.radioFg.config(state=DISABLED)
+            self.radioBg.config(state=DISABLED)
+            self.fgHilite.set(1)
+        else: #both fg and bg can be set
+            self.radioFg.config(state=NORMAL)
+            self.radioBg.config(state=NORMAL)
+            self.fgHilite.set(1)
+        self.SetColourSample()
+
+    def SetColourSampleBinding(self,*args):
+        self.SetColourSample()
+
+    def SetColourSample(self):
+        #set the colour smaple area
+        tag=self.themeElements[self.highlightTarget.get()][0]
+        if self.fgHilite.get(): plane='foreground'
+        else: plane='background'
+        colour=self.textHighlightSample.tag_cget(tag,plane)
+        self.frameColourSet.config(bg=colour)
+
+    def PaintThemeSample(self):
+        if self.themeIsBuiltin.get(): #a default theme
+            theme=self.builtinTheme.get()
+        else: #a user theme
+            theme=self.customTheme.get()
+        for elementTitle in self.themeElements.keys():
+            element=self.themeElements[elementTitle][0]
+            colours=idleConf.GetHighlight(theme,element)
+            if element=='cursor': #cursor sample needs special painting
+                colours['background']=idleConf.GetHighlight(theme,
+                        'normal', fgBg='bg')
+            #handle any unsaved changes to this theme
+            if theme in self.changedItems['highlight'].keys():
+                themeDict=self.changedItems['highlight'][theme]
+                if themeDict.has_key(element+'-foreground'):
+                    colours['foreground']=themeDict[element+'-foreground']
+                if themeDict.has_key(element+'-background'):
+                    colours['background']=themeDict[element+'-background']
+            self.textHighlightSample.tag_config(element, **colours)
+        self.SetColourSample()
+
+##     def OnCheckUserHelpBrowser(self):
+##         if self.userHelpBrowser.get():
+##             self.entryHelpBrowser.config(state=NORMAL)
+##         else:
+##             self.entryHelpBrowser.config(state=DISABLED)
+
+    def HelpSourceSelected(self,event):
+        self.SetHelpListButtonStates()
+
+    def SetHelpListButtonStates(self):
+        if self.listHelp.size()<1: #no entries in list
+            self.buttonHelpListEdit.config(state=DISABLED)
+            self.buttonHelpListRemove.config(state=DISABLED)
+        else: #there are some entries
+            if self.listHelp.curselection(): #there currently is a selection
+                self.buttonHelpListEdit.config(state=NORMAL)
+                self.buttonHelpListRemove.config(state=NORMAL)
+            else:  #there currently is not a selection
+                self.buttonHelpListEdit.config(state=DISABLED)
+                self.buttonHelpListRemove.config(state=DISABLED)
+
+    def HelpListItemAdd(self):
+        helpSource=GetHelpSourceDialog(self,'New Help Source').result
+        if helpSource:
+            self.userHelpList.append( (helpSource[0],helpSource[1]) )
+            self.listHelp.insert(END,helpSource[0])
+            self.UpdateUserHelpChangedItems()
+        self.SetHelpListButtonStates()
+
+    def HelpListItemEdit(self):
+        itemIndex=self.listHelp.index(ANCHOR)
+        helpSource=self.userHelpList[itemIndex]
+        newHelpSource=GetHelpSourceDialog(self,'Edit Help Source',
+                menuItem=helpSource[0],filePath=helpSource[1]).result
+        if (not newHelpSource) or (newHelpSource==helpSource):
+            return #no changes
+        self.userHelpList[itemIndex]=newHelpSource
+        self.listHelp.delete(itemIndex)
+        self.listHelp.insert(itemIndex,newHelpSource[0])
+        self.UpdateUserHelpChangedItems()
+        self.SetHelpListButtonStates()
+
+    def HelpListItemRemove(self):
+        itemIndex=self.listHelp.index(ANCHOR)
+        del(self.userHelpList[itemIndex])
+        self.listHelp.delete(itemIndex)
+        self.UpdateUserHelpChangedItems()
+        self.SetHelpListButtonStates()
+
+    def UpdateUserHelpChangedItems(self):
+        "Clear and rebuild the HelpFiles section in self.changedItems"
+        self.changedItems['main']['HelpFiles'] = {}
+        for num in range(1,len(self.userHelpList)+1):
+            self.AddChangedItem('main','HelpFiles',str(num),
+                    string.join(self.userHelpList[num-1][:2],';'))
+
+    def LoadFontCfg(self):
+        ##base editor font selection list
+        fonts=list(tkFont.families(self))
+        fonts.sort()
+        for font in fonts:
+            self.listFontName.insert(END,font)
+        configuredFont=idleConf.GetOption('main','EditorWindow','font',
+                default='courier')
+        lc_configuredFont = configuredFont.lower()
+        self.fontName.set(lc_configuredFont)
+        lc_fonts = [s.lower() for s in fonts]
+        if lc_configuredFont in lc_fonts:
+            currentFontIndex = lc_fonts.index(lc_configuredFont)
+            self.listFontName.see(currentFontIndex)
+            self.listFontName.select_set(currentFontIndex)
+            self.listFontName.select_anchor(currentFontIndex)
+        ##font size dropdown
+        fontSize=idleConf.GetOption('main','EditorWindow','font-size',
+                default='10')
+        self.optMenuFontSize.SetMenu(('7','8','9','10','11','12','13','14',
+                '16','18','20','22'),fontSize )
+        ##fontWeight
+        self.fontBold.set(idleConf.GetOption('main','EditorWindow',
+                'font-bold',default=0,type='bool'))
+        ##font sample
+        self.SetFontSample()
+
+    def LoadTabCfg(self):
+        ##indent type radiobuttons
+        spaceIndent=idleConf.GetOption('main','Indent','use-spaces',
+                default=1,type='bool')
+        self.indentBySpaces.set(spaceIndent)
+        ##indent sizes
+        spaceNum=idleConf.GetOption('main','Indent','num-spaces',
+                default=4,type='int')
+        #tabCols=idleConf.GetOption('main','Indent','tab-cols',
+        #        default=4,type='int')
+        self.spaceNum.set(spaceNum)
+        #self.tabCols.set(tabCols)
+
+    def LoadThemeCfg(self):
+        ##current theme type radiobutton
+        self.themeIsBuiltin.set(idleConf.GetOption('main','Theme','default',
+            type='bool',default=1))
+        ##currently set theme
+        currentOption=idleConf.CurrentTheme()
+        ##load available theme option menus
+        if self.themeIsBuiltin.get(): #default theme selected
+            itemList=idleConf.GetSectionList('default','highlight')
+            itemList.sort()
+            self.optMenuThemeBuiltin.SetMenu(itemList,currentOption)
+            itemList=idleConf.GetSectionList('user','highlight')
+            itemList.sort()
+            if not itemList:
+                self.radioThemeCustom.config(state=DISABLED)
+                self.customTheme.set('- no custom themes -')
+            else:
+                self.optMenuThemeCustom.SetMenu(itemList,itemList[0])
+        else: #user theme selected
+            itemList=idleConf.GetSectionList('user','highlight')
+            itemList.sort()
+            self.optMenuThemeCustom.SetMenu(itemList,currentOption)
+            itemList=idleConf.GetSectionList('default','highlight')
+            itemList.sort()
+            self.optMenuThemeBuiltin.SetMenu(itemList,itemList[0])
+        self.SetThemeType()
+        ##load theme element option menu
+        themeNames=self.themeElements.keys()
+        themeNames.sort(self.__ThemeNameIndexCompare)
+        self.optMenuHighlightTarget.SetMenu(themeNames,themeNames[0])
+        self.PaintThemeSample()
+        self.SetHighlightTarget()
+
+    def __ThemeNameIndexCompare(self,a,b):
+        if self.themeElements[a][1]<self.themeElements[b][1]: return -1
+        elif self.themeElements[a][1]==self.themeElements[b][1]: return 0
+        else: return 1
+
+    def LoadKeyCfg(self):
+        ##current keys type radiobutton
+        self.keysAreBuiltin.set(idleConf.GetOption('main','Keys','default',
+            type='bool',default=1))
+        ##currently set keys
+        currentOption=idleConf.CurrentKeys()
+        ##load available keyset option menus
+        if self.keysAreBuiltin.get(): #default theme selected
+            itemList=idleConf.GetSectionList('default','keys')
+            itemList.sort()
+            self.optMenuKeysBuiltin.SetMenu(itemList,currentOption)
+            itemList=idleConf.GetSectionList('user','keys')
+            itemList.sort()
+            if not itemList:
+                self.radioKeysCustom.config(state=DISABLED)
+                self.customKeys.set('- no custom keys -')
+            else:
+                self.optMenuKeysCustom.SetMenu(itemList,itemList[0])
+        else: #user key set selected
+            itemList=idleConf.GetSectionList('user','keys')
+            itemList.sort()
+            self.optMenuKeysCustom.SetMenu(itemList,currentOption)
+            itemList=idleConf.GetSectionList('default','keys')
+            itemList.sort()
+            self.optMenuKeysBuiltin.SetMenu(itemList,itemList[0])
+        self.SetKeysType()
+        ##load keyset element list
+        keySetName=idleConf.CurrentKeys()
+        self.LoadKeysList(keySetName)
+
+    def LoadGeneralCfg(self):
+        #startup state
+        self.startupEdit.set(idleConf.GetOption('main','General',
+                'editor-on-startup',default=1,type='bool'))
+        #autosave state
+        self.autoSave.set(idleConf.GetOption('main', 'General', 'autosave',
+                                             default=0, type='bool'))
+        #initial window size
+        self.winWidth.set(idleConf.GetOption('main','EditorWindow','width'))
+        self.winHeight.set(idleConf.GetOption('main','EditorWindow','height'))
+        #initial paragraph reformat size
+        self.paraWidth.set(idleConf.GetOption('main','FormatParagraph','paragraph'))
+        # default source encoding
+        self.encoding.set(idleConf.GetOption('main', 'EditorWindow',
+                                             'encoding', default='none'))
+        # additional help sources
+        self.userHelpList = idleConf.GetAllExtraHelpSourcesList()
+        for helpItem in self.userHelpList:
+            self.listHelp.insert(END,helpItem[0])
+        self.SetHelpListButtonStates()
+        #self.userHelpBrowser.set(idleConf.GetOption('main','General',
+        #        'user-help-browser',default=0,type='bool'))
+        #self.helpBrowser.set(idleConf.GetOption('main','General',
+        #        'user-help-browser-command',default=''))
+        #self.OnCheckUserHelpBrowser()
+
+    def LoadConfigs(self):
+        """
+        load configuration from default and user config files and populate
+        the widgets on the config dialog pages.
+        """
+        ### fonts / tabs page
+        self.LoadFontCfg()
+        self.LoadTabCfg()
+        ### highlighting page
+        self.LoadThemeCfg()
+        ### keys page
+        self.LoadKeyCfg()
+        ### general page
+        self.LoadGeneralCfg()
+
+    def SaveNewKeySet(self,keySetName,keySet):
+        """
+        save a newly created core key set.
+        keySetName - string, the name of the new key set
+        keySet - dictionary containing the new key set
+        """
+        if not idleConf.userCfg['keys'].has_section(keySetName):
+            idleConf.userCfg['keys'].add_section(keySetName)
+        for event in keySet.keys():
+            value=keySet[event]
+            idleConf.userCfg['keys'].SetOption(keySetName,event,value)
+
+    def SaveNewTheme(self,themeName,theme):
+        """
+        save a newly created theme.
+        themeName - string, the name of the new theme
+        theme - dictionary containing the new theme
+        """
+        if not idleConf.userCfg['highlight'].has_section(themeName):
+            idleConf.userCfg['highlight'].add_section(themeName)
+        for element in theme.keys():
+            value=theme[element]
+            idleConf.userCfg['highlight'].SetOption(themeName,element,value)
+
+    def SetUserValue(self,configType,section,item,value):
+        if idleConf.defaultCfg[configType].has_option(section,item):
+            if idleConf.defaultCfg[configType].Get(section,item)==value:
+                #the setting equals a default setting, remove it from user cfg
+                return idleConf.userCfg[configType].RemoveOption(section,item)
+        #if we got here set the option
+        return idleConf.userCfg[configType].SetOption(section,item,value)
+
+    def SaveAllChangedConfigs(self):
+        "Save configuration changes to the user config file."
+        idleConf.userCfg['main'].Save()
+        for configType in self.changedItems.keys():
+            cfgTypeHasChanges = False
+            for section in self.changedItems[configType].keys():
+                if section == 'HelpFiles':
+                    #this section gets completely replaced
+                    idleConf.userCfg['main'].remove_section('HelpFiles')
+                    cfgTypeHasChanges = True
+                for item in self.changedItems[configType][section].keys():
+                    value = self.changedItems[configType][section][item]
+                    if self.SetUserValue(configType,section,item,value):
+                        cfgTypeHasChanges = True
+            if cfgTypeHasChanges:
+                idleConf.userCfg[configType].Save()
+        for configType in ['keys', 'highlight']:
+            # save these even if unchanged!
+            idleConf.userCfg[configType].Save()
+        self.ResetChangedItems() #clear the changed items dict
+
+    def ActivateConfigChanges(self):
+        #things that need to be done to make
+        #applied config changes dynamic:
+        #update editor/shell font and repaint
+        #dynamically update indentation setttings
+        #update theme and repaint
+        #update keybindings and re-bind
+        #update user help sources menu
+        winInstances=self.parent.instance_dict.keys()
+        for instance in winInstances:
+            instance.ResetColorizer()
+            instance.ResetFont()
+            instance.ResetKeybindings()
+            instance.reset_help_menu_entries()
+
+    def Cancel(self):
+        self.destroy()
+
+    def Ok(self):
+        self.Apply()
+        self.destroy()
+
+    def Apply(self):
+        self.SaveAllChangedConfigs()
+        self.ActivateConfigChanges()
+
+    def Help(self):
+        pass
+
+if __name__ == '__main__':
+    #test the dialog
+    root=Tk()
+    Button(root,text='Dialog',
+            command=lambda:ConfigDialog(root,'Settings')).pack()
+    root.instance_dict={}
+    root.mainloop()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/configHandler.py b/depot_tools/release/win/python_24/Lib/idlelib/configHandler.py
new file mode 100644
index 0000000..d13f1e4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/configHandler.py
@@ -0,0 +1,682 @@
+"""Provides access to stored IDLE configuration information.
+
+Refer to the comments at the beginning of config-main.def for a description of
+the available configuration files and the design implemented to update user
+configuration information.  In particular, user configuration choices which
+duplicate the defaults will be removed from the user's configuration files,
+and if a file becomes empty, it will be deleted.
+
+The contents of the user files may be altered using the Options/Configure IDLE
+menu to access the configuration GUI (configDialog.py), or manually.
+
+Throughout this module there is an emphasis on returning useable defaults
+when a problem occurs in returning a requested configuration value back to
+idle. This is to allow IDLE to continue to function in spite of errors in
+the retrieval of config information. When a default is returned instead of
+a requested config value, a message is printed to stderr to aid in
+configuration problem notification and resolution.
+
+"""
+import os
+import sys
+import string
+from ConfigParser import ConfigParser, NoOptionError, NoSectionError
+
+class InvalidConfigType(Exception): pass
+class InvalidConfigSet(Exception): pass
+class InvalidFgBg(Exception): pass
+class InvalidTheme(Exception): pass
+
+class IdleConfParser(ConfigParser):
+    """
+    A ConfigParser specialised for idle configuration file handling
+    """
+    def __init__(self, cfgFile, cfgDefaults=None):
+        """
+        cfgFile - string, fully specified configuration file name
+        """
+        self.file=cfgFile
+        ConfigParser.__init__(self,defaults=cfgDefaults)
+
+    def Get(self, section, option, type=None, default=None):
+        """
+        Get an option value for given section/option or return default.
+        If type is specified, return as type.
+        """
+        if type=='bool':
+            getVal=self.getboolean
+        elif type=='int':
+            getVal=self.getint
+        else:
+            getVal=self.get
+        if self.has_option(section,option):
+            #return getVal(section, option, raw, vars, default)
+            return getVal(section, option)
+        else:
+            return default
+
+    def GetOptionList(self,section):
+        """
+        Get an option list for given section
+        """
+        if self.has_section(section):
+            return self.options(section)
+        else:  #return a default value
+            return []
+
+    def Load(self):
+        """
+        Load the configuration file from disk
+        """
+        self.read(self.file)
+
+class IdleUserConfParser(IdleConfParser):
+    """
+    IdleConfigParser specialised for user configuration handling.
+    """
+
+    def AddSection(self,section):
+        """
+        if section doesn't exist, add it
+        """
+        if not self.has_section(section):
+            self.add_section(section)
+
+    def RemoveEmptySections(self):
+        """
+        remove any sections that have no options
+        """
+        for section in self.sections():
+            if not self.GetOptionList(section):
+                self.remove_section(section)
+
+    def IsEmpty(self):
+        """
+        Remove empty sections and then return 1 if parser has no sections
+        left, else return 0.
+        """
+        self.RemoveEmptySections()
+        if self.sections():
+            return 0
+        else:
+            return 1
+
+    def RemoveOption(self,section,option):
+        """
+        If section/option exists, remove it.
+        Returns 1 if option was removed, 0 otherwise.
+        """
+        if self.has_section(section):
+            return self.remove_option(section,option)
+
+    def SetOption(self,section,option,value):
+        """
+        Sets option to value, adding section if required.
+        Returns 1 if option was added or changed, otherwise 0.
+        """
+        if self.has_option(section,option):
+            if self.get(section,option)==value:
+                return 0
+            else:
+                self.set(section,option,value)
+                return 1
+        else:
+            if not self.has_section(section):
+                self.add_section(section)
+            self.set(section,option,value)
+            return 1
+
+    def RemoveFile(self):
+        """
+        Removes the user config file from disk if it exists.
+        """
+        if os.path.exists(self.file):
+            os.remove(self.file)
+
+    def Save(self):
+        """Update user configuration file.
+
+        Remove empty sections. If resulting config isn't empty, write the file
+        to disk. If config is empty, remove the file from disk if it exists.
+
+        """
+        if not self.IsEmpty():
+            cfgFile=open(self.file,'w')
+            self.write(cfgFile)
+        else:
+            self.RemoveFile()
+
+class IdleConf:
+    """
+    holds config parsers for all idle config files:
+    default config files
+        (idle install dir)/config-main.def
+        (idle install dir)/config-extensions.def
+        (idle install dir)/config-highlight.def
+        (idle install dir)/config-keys.def
+    user config  files
+        (user home dir)/.idlerc/config-main.cfg
+        (user home dir)/.idlerc/config-extensions.cfg
+        (user home dir)/.idlerc/config-highlight.cfg
+        (user home dir)/.idlerc/config-keys.cfg
+    """
+    def __init__(self):
+        self.defaultCfg={}
+        self.userCfg={}
+        self.cfg={}
+        self.CreateConfigHandlers()
+        self.LoadCfgFiles()
+        #self.LoadCfg()
+
+    def CreateConfigHandlers(self):
+        """
+        set up a dictionary of config parsers for default and user
+        configurations respectively
+        """
+        #build idle install path
+        if __name__ != '__main__': # we were imported
+            idleDir=os.path.dirname(__file__)
+        else: # we were exec'ed (for testing only)
+            idleDir=os.path.abspath(sys.path[0])
+        userDir=self.GetUserCfgDir()
+        configTypes=('main','extensions','highlight','keys')
+        defCfgFiles={}
+        usrCfgFiles={}
+        for cfgType in configTypes: #build config file names
+            defCfgFiles[cfgType]=os.path.join(idleDir,'config-'+cfgType+'.def')
+            usrCfgFiles[cfgType]=os.path.join(userDir,'config-'+cfgType+'.cfg')
+        for cfgType in configTypes: #create config parsers
+            self.defaultCfg[cfgType]=IdleConfParser(defCfgFiles[cfgType])
+            self.userCfg[cfgType]=IdleUserConfParser(usrCfgFiles[cfgType])
+
+    def GetUserCfgDir(self):
+        """
+        Creates (if required) and returns a filesystem directory for storing
+        user config files.
+        
+        """
+        cfgDir = '.idlerc'
+        userDir = os.path.expanduser('~')
+        if userDir != '~': # expanduser() found user home dir
+            if not os.path.exists(userDir):
+                warn = ('\n Warning: os.path.expanduser("~") points to\n '+
+                        userDir+',\n but the path does not exist.\n')
+                sys.stderr.write(warn)
+                userDir = '~'
+        if userDir == "~": # still no path to home!
+            # traditionally IDLE has defaulted to os.getcwd(), is this adequate?
+            userDir = os.getcwd()
+        userDir = os.path.join(userDir, cfgDir)
+        if not os.path.exists(userDir):
+            try:
+                os.mkdir(userDir)
+            except (OSError, IOError):
+                warn = ('\n Warning: unable to create user config directory\n'+
+                        userDir+'\n Check path and permissions.\n Exiting!\n\n')
+                sys.stderr.write(warn)
+                raise SystemExit
+        return userDir
+
+    def GetOption(self, configType, section, option, default=None, type=None,
+                  warn_on_default=True):
+        """
+        Get an option value for given config type and given general
+        configuration section/option or return a default. If type is specified,
+        return as type. Firstly the user configuration is checked, with a
+        fallback to the default configuration, and a final 'catch all'
+        fallback to a useable passed-in default if the option isn't present in
+        either the user or the default configuration.
+        configType must be one of ('main','extensions','highlight','keys')
+        If a default is returned, and warn_on_default is True, a warning is
+        printed to stderr.
+
+        """
+        if self.userCfg[configType].has_option(section,option):
+            return self.userCfg[configType].Get(section, option, type=type)
+        elif self.defaultCfg[configType].has_option(section,option):
+            return self.defaultCfg[configType].Get(section, option, type=type)
+        else: #returning default, print warning
+            if warn_on_default:
+                warning = ('\n Warning: configHandler.py - IdleConf.GetOption -\n'
+                           ' problem retrieving configration option %r\n'
+                           ' from section %r.\n'
+                           ' returning default value: %r\n' %
+                           (option, section, default))
+                sys.stderr.write(warning)
+            return default
+
+    def SetOption(self, configType, section, option, value):
+        """In user's config file, set section's option to value.
+
+        """
+        self.userCfg[configType].SetOption(section, option, value)
+
+    def GetSectionList(self, configSet, configType):
+        """
+        Get a list of sections from either the user or default config for
+        the given config type.
+        configSet must be either 'user' or 'default'
+        configType must be one of ('main','extensions','highlight','keys')
+        """
+        if not (configType in ('main','extensions','highlight','keys')):
+            raise InvalidConfigType, 'Invalid configType specified'
+        if configSet == 'user':
+            cfgParser=self.userCfg[configType]
+        elif configSet == 'default':
+            cfgParser=self.defaultCfg[configType]
+        else:
+            raise InvalidConfigSet, 'Invalid configSet specified'
+        return cfgParser.sections()
+
+    def GetHighlight(self, theme, element, fgBg=None):
+        """
+        return individual highlighting theme elements.
+        fgBg - string ('fg'or'bg') or None, if None return a dictionary
+        containing fg and bg colours (appropriate for passing to Tkinter in,
+        e.g., a tag_config call), otherwise fg or bg colour only as specified.
+        """
+        if self.defaultCfg['highlight'].has_section(theme):
+            themeDict=self.GetThemeDict('default',theme)
+        else:
+            themeDict=self.GetThemeDict('user',theme)
+        fore=themeDict[element+'-foreground']
+        if element=='cursor': #there is no config value for cursor bg
+            back=themeDict['normal-background']
+        else:
+            back=themeDict[element+'-background']
+        highlight={"foreground": fore,"background": back}
+        if not fgBg: #return dict of both colours
+            return highlight
+        else: #return specified colour only
+            if fgBg == 'fg':
+                return highlight["foreground"]
+            if fgBg == 'bg':
+                return highlight["background"]
+            else:
+                raise InvalidFgBg, 'Invalid fgBg specified'
+
+    def GetThemeDict(self,type,themeName):
+        """
+        type - string, 'default' or 'user' theme type
+        themeName - string, theme name
+        Returns a dictionary which holds {option:value} for each element
+        in the specified theme. Values are loaded over a set of ultimate last
+        fallback defaults to guarantee that all theme elements are present in
+        a newly created theme.
+        """
+        if type == 'user':
+            cfgParser=self.userCfg['highlight']
+        elif type == 'default':
+            cfgParser=self.defaultCfg['highlight']
+        else:
+            raise InvalidTheme, 'Invalid theme type specified'
+        #foreground and background values are provded for each theme element
+        #(apart from cursor) even though all these values are not yet used
+        #by idle, to allow for their use in the future. Default values are
+        #generally black and white.
+        theme={ 'normal-foreground':'#000000',
+                'normal-background':'#ffffff',
+                'keyword-foreground':'#000000',
+                'keyword-background':'#ffffff',
+                'builtin-foreground':'#000000',
+                'builtin-background':'#ffffff',
+                'comment-foreground':'#000000',
+                'comment-background':'#ffffff',
+                'string-foreground':'#000000',
+                'string-background':'#ffffff',
+                'definition-foreground':'#000000',
+                'definition-background':'#ffffff',
+                'hilite-foreground':'#000000',
+                'hilite-background':'gray',
+                'break-foreground':'#ffffff',
+                'break-background':'#000000',
+                'hit-foreground':'#ffffff',
+                'hit-background':'#000000',
+                'error-foreground':'#ffffff',
+                'error-background':'#000000',
+                #cursor (only foreground can be set)
+                'cursor-foreground':'#000000',
+                #shell window
+                'stdout-foreground':'#000000',
+                'stdout-background':'#ffffff',
+                'stderr-foreground':'#000000',
+                'stderr-background':'#ffffff',
+                'console-foreground':'#000000',
+                'console-background':'#ffffff' }
+        for element in theme.keys():
+            if not cfgParser.has_option(themeName,element):
+                #we are going to return a default, print warning
+                warning=('\n Warning: configHandler.py - IdleConf.GetThemeDict'
+                           ' -\n problem retrieving theme element %r'
+                           '\n from theme %r.\n'
+                           ' returning default value: %r\n' %
+                           (element, themeName, theme[element]))
+                sys.stderr.write(warning)
+            colour=cfgParser.Get(themeName,element,default=theme[element])
+            theme[element]=colour
+        return theme
+
+    def CurrentTheme(self):
+        """
+        Returns the name of the currently active theme
+        """
+        return self.GetOption('main','Theme','name',default='')
+
+    def CurrentKeys(self):
+        """
+        Returns the name of the currently active key set
+        """
+        return self.GetOption('main','Keys','name',default='')
+
+    def GetExtensions(self, active_only=True, editor_only=False, shell_only=False):
+        """
+        Gets a list of all idle extensions declared in the config files.
+        active_only - boolean, if true only return active (enabled) extensions
+        """
+        extns=self.RemoveKeyBindNames(
+                self.GetSectionList('default','extensions'))
+        userExtns=self.RemoveKeyBindNames(
+                self.GetSectionList('user','extensions'))
+        for extn in userExtns:
+            if extn not in extns: #user has added own extension
+                extns.append(extn)
+        if active_only:
+            activeExtns=[]
+            for extn in extns:
+                if self.GetOption('extensions', extn, 'enable', default=True,
+                                  type='bool'):
+                    #the extension is enabled
+                    if editor_only or shell_only:
+                        if editor_only:
+                            option = "enable_editor"
+                        else:
+                            option = "enable_shell"
+                        if self.GetOption('extensions', extn,option,
+                                          default=True, type='bool',
+                                          warn_on_default=False):
+                            activeExtns.append(extn)
+                    else:
+                        activeExtns.append(extn)
+            return activeExtns
+        else:
+            return extns
+
+    def RemoveKeyBindNames(self,extnNameList):
+        #get rid of keybinding section names
+        names=extnNameList
+        kbNameIndicies=[]
+        for name in names:
+            if name.endswith('_bindings') or name.endswith('_cfgBindings'):
+                kbNameIndicies.append(names.index(name))
+        kbNameIndicies.sort()
+        kbNameIndicies.reverse()
+        for index in kbNameIndicies: #delete each keybinding section name
+            del(names[index])
+        return names
+
+    def GetExtnNameForEvent(self,virtualEvent):
+        """
+        Returns the name of the extension that virtualEvent is bound in, or
+        None if not bound in any extension.
+        virtualEvent - string, name of the virtual event to test for, without
+                       the enclosing '<< >>'
+        """
+        extName=None
+        vEvent='<<'+virtualEvent+'>>'
+        for extn in self.GetExtensions(active_only=0):
+            for event in self.GetExtensionKeys(extn).keys():
+                if event == vEvent:
+                    extName=extn
+        return extName
+
+    def GetExtensionKeys(self,extensionName):
+        """
+        returns a dictionary of the configurable keybindings for a particular
+        extension,as they exist in the dictionary returned by GetCurrentKeySet;
+        that is, where previously used bindings are disabled.
+        """
+        keysName=extensionName+'_cfgBindings'
+        activeKeys=self.GetCurrentKeySet()
+        extKeys={}
+        if self.defaultCfg['extensions'].has_section(keysName):
+            eventNames=self.defaultCfg['extensions'].GetOptionList(keysName)
+            for eventName in eventNames:
+                event='<<'+eventName+'>>'
+                binding=activeKeys[event]
+                extKeys[event]=binding
+        return extKeys
+
+    def __GetRawExtensionKeys(self,extensionName):
+        """
+        returns a dictionary of the configurable keybindings for a particular
+        extension, as defined in the configuration files, or an empty dictionary
+        if no bindings are found
+        """
+        keysName=extensionName+'_cfgBindings'
+        extKeys={}
+        if self.defaultCfg['extensions'].has_section(keysName):
+            eventNames=self.defaultCfg['extensions'].GetOptionList(keysName)
+            for eventName in eventNames:
+                binding=self.GetOption('extensions',keysName,
+                        eventName,default='').split()
+                event='<<'+eventName+'>>'
+                extKeys[event]=binding
+        return extKeys
+
+    def GetExtensionBindings(self,extensionName):
+        """
+        Returns a dictionary of all the event bindings for a particular
+        extension. The configurable keybindings are returned as they exist in
+        the dictionary returned by GetCurrentKeySet; that is, where re-used
+        keybindings are disabled.
+        """
+        bindsName=extensionName+'_bindings'
+        extBinds=self.GetExtensionKeys(extensionName)
+        #add the non-configurable bindings
+        if self.defaultCfg['extensions'].has_section(bindsName):
+            eventNames=self.defaultCfg['extensions'].GetOptionList(bindsName)
+            for eventName in eventNames:
+                binding=self.GetOption('extensions',bindsName,
+                        eventName,default='').split()
+                event='<<'+eventName+'>>'
+                extBinds[event]=binding
+
+        return extBinds
+
+    def GetKeyBinding(self, keySetName, eventStr):
+        """
+        returns the keybinding for a specific event.
+        keySetName - string, name of key binding set
+        eventStr - string, the virtual event we want the binding for,
+                   represented as a string, eg. '<<event>>'
+        """
+        eventName=eventStr[2:-2] #trim off the angle brackets
+        binding=self.GetOption('keys',keySetName,eventName,default='').split()
+        return binding
+
+    def GetCurrentKeySet(self):
+        return self.GetKeySet(self.CurrentKeys())
+
+    def GetKeySet(self,keySetName):
+        """
+        Returns a dictionary of: all requested core keybindings, plus the
+        keybindings for all currently active extensions. If a binding defined
+        in an extension is already in use, that binding is disabled.
+        """
+        keySet=self.GetCoreKeys(keySetName)
+        activeExtns=self.GetExtensions(active_only=1)
+        for extn in activeExtns:
+            extKeys=self.__GetRawExtensionKeys(extn)
+            if extKeys: #the extension defines keybindings
+                for event in extKeys.keys():
+                    if extKeys[event] in keySet.values():
+                        #the binding is already in use
+                        extKeys[event]='' #disable this binding
+                    keySet[event]=extKeys[event] #add binding
+        return keySet
+
+    def IsCoreBinding(self,virtualEvent):
+        """
+        returns true if the virtual event is bound in the core idle keybindings.
+        virtualEvent - string, name of the virtual event to test for, without
+                       the enclosing '<< >>'
+        """
+        return ('<<'+virtualEvent+'>>') in self.GetCoreKeys().keys()
+
+    def GetCoreKeys(self, keySetName=None):
+        """
+        returns the requested set of core keybindings, with fallbacks if
+        required.
+        Keybindings loaded from the config file(s) are loaded _over_ these
+        defaults, so if there is a problem getting any core binding there will
+        be an 'ultimate last resort fallback' to the CUA-ish bindings
+        defined here.
+        """
+        keyBindings={
+            '<<copy>>': ['<Control-c>', '<Control-C>'],
+            '<<cut>>': ['<Control-x>', '<Control-X>'],
+            '<<paste>>': ['<Control-v>', '<Control-V>'],
+            '<<beginning-of-line>>': ['<Control-a>', '<Home>'],
+            '<<center-insert>>': ['<Control-l>'],
+            '<<close-all-windows>>': ['<Control-q>'],
+            '<<close-window>>': ['<Alt-F4>'],
+            '<<do-nothing>>': ['<Control-x>'],
+            '<<end-of-file>>': ['<Control-d>'],
+            '<<python-docs>>': ['<F1>'],
+            '<<python-context-help>>': ['<Shift-F1>'],
+            '<<history-next>>': ['<Alt-n>'],
+            '<<history-previous>>': ['<Alt-p>'],
+            '<<interrupt-execution>>': ['<Control-c>'],
+            '<<view-restart>>': ['<F6>'],
+            '<<restart-shell>>': ['<Control-F6>'],
+            '<<open-class-browser>>': ['<Alt-c>'],
+            '<<open-module>>': ['<Alt-m>'],
+            '<<open-new-window>>': ['<Control-n>'],
+            '<<open-window-from-file>>': ['<Control-o>'],
+            '<<plain-newline-and-indent>>': ['<Control-j>'],
+            '<<print-window>>': ['<Control-p>'],
+            '<<redo>>': ['<Control-y>'],
+            '<<remove-selection>>': ['<Escape>'],
+            '<<save-copy-of-window-as-file>>': ['<Alt-Shift-S>'],
+            '<<save-window-as-file>>': ['<Alt-s>'],
+            '<<save-window>>': ['<Control-s>'],
+            '<<select-all>>': ['<Alt-a>'],
+            '<<toggle-auto-coloring>>': ['<Control-slash>'],
+            '<<undo>>': ['<Control-z>'],
+            '<<find-again>>': ['<Control-g>', '<F3>'],
+            '<<find-in-files>>': ['<Alt-F3>'],
+            '<<find-selection>>': ['<Control-F3>'],
+            '<<find>>': ['<Control-f>'],
+            '<<replace>>': ['<Control-h>'],
+            '<<goto-line>>': ['<Alt-g>'],
+            '<<smart-backspace>>': ['<Key-BackSpace>'],
+            '<<newline-and-indent>>': ['<Key-Return> <Key-KP_Enter>'],
+            '<<smart-indent>>': ['<Key-Tab>'],
+            '<<indent-region>>': ['<Control-Key-bracketright>'],
+            '<<dedent-region>>': ['<Control-Key-bracketleft>'],
+            '<<comment-region>>': ['<Alt-Key-3>'],
+            '<<uncomment-region>>': ['<Alt-Key-4>'],
+            '<<tabify-region>>': ['<Alt-Key-5>'],
+            '<<untabify-region>>': ['<Alt-Key-6>'],
+            '<<toggle-tabs>>': ['<Alt-Key-t>'],
+            '<<change-indentwidth>>': ['<Alt-Key-u>']
+            }
+        if keySetName:
+            for event in keyBindings.keys():
+                binding=self.GetKeyBinding(keySetName,event)
+                if binding:
+                    keyBindings[event]=binding
+                else: #we are going to return a default, print warning
+                    warning=('\n Warning: configHandler.py - IdleConf.GetCoreKeys'
+                               ' -\n problem retrieving key binding for event %r'
+                               '\n from key set %r.\n'
+                               ' returning default value: %r\n' %
+                               (event, keySetName, keyBindings[event]))
+                    sys.stderr.write(warning)
+        return keyBindings
+
+    def GetExtraHelpSourceList(self,configSet):
+        """Fetch list of extra help sources from a given configSet.
+
+        Valid configSets are 'user' or 'default'.  Return a list of tuples of
+        the form (menu_item , path_to_help_file , option), or return the empty
+        list.  'option' is the sequence number of the help resource.  'option'
+        values determine the position of the menu items on the Help menu,
+        therefore the returned list must be sorted by 'option'.
+
+        """
+        helpSources=[]
+        if configSet=='user':
+            cfgParser=self.userCfg['main']
+        elif configSet=='default':
+            cfgParser=self.defaultCfg['main']
+        else:
+            raise InvalidConfigSet, 'Invalid configSet specified'
+        options=cfgParser.GetOptionList('HelpFiles')
+        for option in options:
+            value=cfgParser.Get('HelpFiles',option,default=';')
+            if value.find(';')==-1: #malformed config entry with no ';'
+                menuItem='' #make these empty
+                helpPath='' #so value won't be added to list
+            else: #config entry contains ';' as expected
+                value=string.split(value,';')
+                menuItem=value[0].strip()
+                helpPath=value[1].strip()
+            if menuItem and helpPath: #neither are empty strings
+                helpSources.append( (menuItem,helpPath,option) )
+        helpSources.sort(self.__helpsort)
+        return helpSources
+
+    def __helpsort(self, h1, h2):
+        if int(h1[2]) < int(h2[2]):
+            return -1
+        elif int(h1[2]) > int(h2[2]):
+            return 1
+        else:
+            return 0
+
+    def GetAllExtraHelpSourcesList(self):
+        """
+        Returns a list of tuples containing the details of all additional help
+        sources configured, or an empty list if there are none. Tuples are of
+        the format returned by GetExtraHelpSourceList.
+        """
+        allHelpSources=( self.GetExtraHelpSourceList('default')+
+                self.GetExtraHelpSourceList('user') )
+        return allHelpSources
+
+    def LoadCfgFiles(self):
+        """
+        load all configuration files.
+        """
+        for key in self.defaultCfg.keys():
+            self.defaultCfg[key].Load()
+            self.userCfg[key].Load() #same keys
+
+    def SaveUserCfgFiles(self):
+        """
+        write all loaded user configuration files back to disk
+        """
+        for key in self.userCfg.keys():
+            self.userCfg[key].Save()
+
+idleConf=IdleConf()
+
+### module test
+if __name__ == '__main__':
+    def dumpCfg(cfg):
+        print '\n',cfg,'\n'
+        for key in cfg.keys():
+            sections=cfg[key].sections()
+            print key
+            print sections
+            for section in sections:
+                options=cfg[key].options(section)
+                print section
+                print options
+                for option in options:
+                    print option, '=', cfg[key].Get(section,option)
+    dumpCfg(idleConf.defaultCfg)
+    dumpCfg(idleConf.userCfg)
+    print idleConf.userCfg['main'].Get('Theme','name')
+    #print idleConf.userCfg['highlight'].GetDefHighlight('Foo','normal')
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/configHelpSourceEdit.py b/depot_tools/release/win/python_24/Lib/idlelib/configHelpSourceEdit.py
new file mode 100644
index 0000000..3db1e0a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/configHelpSourceEdit.py
@@ -0,0 +1,169 @@
+"Dialog to specify or edit the parameters for a user configured help source."
+
+import os
+import sys
+
+from Tkinter import *
+import tkMessageBox
+import tkFileDialog
+
+class GetHelpSourceDialog(Toplevel):
+    def __init__(self, parent, title, menuItem='', filePath=''):
+        """Get menu entry and url/ local file location for Additional Help
+
+        User selects a name for the Help resource and provides a web url
+        or a local file as its source.  The user can enter a url or browse
+        for the file.
+
+        """
+        Toplevel.__init__(self, parent)
+        self.configure(borderwidth=5)
+        self.resizable(height=FALSE, width=FALSE)
+        self.title(title)
+        self.transient(parent)
+        self.grab_set()
+        self.protocol("WM_DELETE_WINDOW", self.Cancel)
+        self.parent = parent
+        self.result = None
+        self.CreateWidgets()
+        self.menu.set(menuItem)
+        self.path.set(filePath)
+        self.withdraw() #hide while setting geometry
+        #needs to be done here so that the winfo_reqwidth is valid
+        self.update_idletasks()
+        #centre dialog over parent:
+        self.geometry("+%d+%d" %
+                      ((parent.winfo_rootx() + ((parent.winfo_width()/2)
+                                                -(self.winfo_reqwidth()/2)),
+                        parent.winfo_rooty() + ((parent.winfo_height()/2)
+                                                -(self.winfo_reqheight()/2)))))
+        self.deiconify() #geometry set, unhide
+        self.bind('<Return>', self.Ok)
+        self.wait_window()
+
+    def CreateWidgets(self):
+        self.menu = StringVar(self)
+        self.path = StringVar(self)
+        self.fontSize = StringVar(self)
+        self.frameMain = Frame(self, borderwidth=2, relief=GROOVE)
+        self.frameMain.pack(side=TOP, expand=TRUE, fill=BOTH)
+        labelMenu = Label(self.frameMain, anchor=W, justify=LEFT,
+                          text='Menu Item:')
+        self.entryMenu = Entry(self.frameMain, textvariable=self.menu,
+                               width=30)
+        self.entryMenu.focus_set()
+        labelPath = Label(self.frameMain, anchor=W, justify=LEFT,
+                          text='Help File Path: Enter URL or browse for file')
+        self.entryPath = Entry(self.frameMain, textvariable=self.path,
+                               width=40)
+        self.entryMenu.focus_set()
+        labelMenu.pack(anchor=W, padx=5, pady=3)
+        self.entryMenu.pack(anchor=W, padx=5, pady=3)
+        labelPath.pack(anchor=W, padx=5, pady=3)
+        self.entryPath.pack(anchor=W, padx=5, pady=3)
+        browseButton = Button(self.frameMain, text='Browse', width=8,
+                              command=self.browseFile)
+        browseButton.pack(pady=3)
+        frameButtons = Frame(self)
+        frameButtons.pack(side=BOTTOM, fill=X)
+        self.buttonOk = Button(frameButtons, text='OK',
+                               width=8, default=ACTIVE,  command=self.Ok)
+        self.buttonOk.grid(row=0, column=0, padx=5,pady=5)
+        self.buttonCancel = Button(frameButtons, text='Cancel',
+                                   width=8, command=self.Cancel)
+        self.buttonCancel.grid(row=0, column=1, padx=5, pady=5)
+
+    def browseFile(self):
+        filetypes = [
+            ("HTML Files", "*.htm *.html", "TEXT"),
+            ("PDF Files", "*.pdf", "TEXT"),
+            ("Windows Help Files", "*.chm"),
+            ("Text Files", "*.txt", "TEXT"),
+            ("All Files", "*")]
+        path = self.path.get()
+        if path:
+            dir, base = os.path.split(path)
+        else:
+            base = None
+            if sys.platform[:3] == 'win':
+                dir = os.path.join(os.path.dirname(sys.executable), 'Doc')
+                if not os.path.isdir(dir):
+                    dir = os.getcwd()
+            else:
+                dir = os.getcwd()
+        opendialog = tkFileDialog.Open(parent=self, filetypes=filetypes)
+        file = opendialog.show(initialdir=dir, initialfile=base)
+        if file:
+            self.path.set(file)
+
+    def MenuOk(self):
+        "Simple validity check for a sensible menu item name"
+        menuOk = True
+        menu = self.menu.get()
+        menu.strip()
+        if not menu:
+            tkMessageBox.showerror(title='Menu Item Error',
+                                   message='No menu item specified',
+                                   parent=self)
+            self.entryMenu.focus_set()
+            menuOk = False
+        elif len(menu) > 30:
+            tkMessageBox.showerror(title='Menu Item Error',
+                                   message='Menu item too long:'
+                                           '\nLimit 30 characters.',
+                                   parent=self)
+            self.entryMenu.focus_set()
+            menuOk = False
+        return menuOk
+
+    def PathOk(self):
+        "Simple validity check for menu file path"
+        pathOk = True
+        path = self.path.get()
+        path.strip()
+        if not path: #no path specified
+            tkMessageBox.showerror(title='File Path Error',
+                                   message='No help file path specified.',
+                                   parent=self)
+            self.entryPath.focus_set()
+            pathOk = False
+        elif path.startswith('www.') or path.startswith('http'):
+            pass
+        else:
+            if path[:5] == 'file:':
+                path = path[5:]
+            if not os.path.exists(path):
+                tkMessageBox.showerror(title='File Path Error',
+                                       message='Help file path does not exist.',
+                                       parent=self)
+                self.entryPath.focus_set()
+                pathOk = False
+        return pathOk
+
+    def Ok(self, event=None):
+        if self.MenuOk() and self.PathOk():
+            self.result = (self.menu.get().strip(),
+                           self.path.get().strip())
+            if sys.platform == 'darwin':
+                path = self.result[1]
+                if (path.startswith('www') or path.startswith('file:')
+                    or path.startswith('http:')):
+                    pass
+                else:
+                    # Mac Safari insists on using the URI form for local files
+                    self.result[1] = "file://" + path
+            self.destroy()
+
+    def Cancel(self, event=None):
+        self.result = None
+        self.destroy()
+
+if __name__ == '__main__':
+    #test the dialog
+    root = Tk()
+    def run():
+        keySeq = ''
+        dlg = GetHelpSourceDialog(root, 'Get Help Source')
+        print dlg.result
+    Button(root,text='Dialog', command=run).pack()
+    root.mainloop()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/configSectionNameDialog.py b/depot_tools/release/win/python_24/Lib/idlelib/configSectionNameDialog.py
new file mode 100644
index 0000000..4f1b002
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/configSectionNameDialog.py
@@ -0,0 +1,97 @@
+"""
+Dialog that allows user to specify a new config file section name.
+Used to get new highlight theme and keybinding set names.
+"""
+from Tkinter import *
+import tkMessageBox
+
+class GetCfgSectionNameDialog(Toplevel):
+    def __init__(self,parent,title,message,usedNames):
+        """
+        message - string, informational message to display
+        usedNames - list, list of names already in use for validity check
+        """
+        Toplevel.__init__(self, parent)
+        self.configure(borderwidth=5)
+        self.resizable(height=FALSE,width=FALSE)
+        self.title(title)
+        self.transient(parent)
+        self.grab_set()
+        self.protocol("WM_DELETE_WINDOW", self.Cancel)
+        self.parent = parent
+        self.message=message
+        self.usedNames=usedNames
+        self.result=''
+        self.CreateWidgets()
+        self.withdraw() #hide while setting geometry
+        self.update_idletasks()
+        #needs to be done here so that the winfo_reqwidth is valid
+        self.messageInfo.config(width=self.frameMain.winfo_reqwidth())
+        self.geometry("+%d+%d" %
+            ((parent.winfo_rootx()+((parent.winfo_width()/2)
+                -(self.winfo_reqwidth()/2)),
+              parent.winfo_rooty()+((parent.winfo_height()/2)
+                -(self.winfo_reqheight()/2)) )) ) #centre dialog over parent
+        self.deiconify() #geometry set, unhide
+        self.wait_window()
+
+    def CreateWidgets(self):
+        self.name=StringVar(self)
+        self.fontSize=StringVar(self)
+        self.frameMain = Frame(self,borderwidth=2,relief=SUNKEN)
+        self.frameMain.pack(side=TOP,expand=TRUE,fill=BOTH)
+        self.messageInfo=Message(self.frameMain,anchor=W,justify=LEFT,padx=5,pady=5,
+                text=self.message)#,aspect=200)
+        entryName=Entry(self.frameMain,textvariable=self.name,width=30)
+        entryName.focus_set()
+        self.messageInfo.pack(padx=5,pady=5)#,expand=TRUE,fill=BOTH)
+        entryName.pack(padx=5,pady=5)
+        frameButtons=Frame(self)
+        frameButtons.pack(side=BOTTOM,fill=X)
+        self.buttonOk = Button(frameButtons,text='Ok',
+                width=8,command=self.Ok)
+        self.buttonOk.grid(row=0,column=0,padx=5,pady=5)
+        self.buttonCancel = Button(frameButtons,text='Cancel',
+                width=8,command=self.Cancel)
+        self.buttonCancel.grid(row=0,column=1,padx=5,pady=5)
+
+    def NameOk(self):
+        #simple validity check for a sensible
+        #ConfigParser file section name
+        nameOk=1
+        name=self.name.get()
+        name.strip()
+        if not name: #no name specified
+            tkMessageBox.showerror(title='Name Error',
+                    message='No name specified.', parent=self)
+            nameOk=0
+        elif len(name)>30: #name too long
+            tkMessageBox.showerror(title='Name Error',
+                    message='Name too long. It should be no more than '+
+                    '30 characters.', parent=self)
+            nameOk=0
+        elif name in self.usedNames:
+            tkMessageBox.showerror(title='Name Error',
+                    message='This name is already in use.', parent=self)
+            nameOk=0
+        return nameOk
+
+    def Ok(self, event=None):
+        if self.NameOk():
+            self.result=self.name.get().strip()
+            self.destroy()
+
+    def Cancel(self, event=None):
+        self.result=''
+        self.destroy()
+
+if __name__ == '__main__':
+    #test the dialog
+    root=Tk()
+    def run():
+        keySeq=''
+        dlg=GetCfgSectionNameDialog(root,'Get Name',
+                'The information here should need to be word wrapped. Test.')
+        print dlg.result
+    Button(root,text='Dialog',command=run).pack()
+    root.mainloop()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/dynOptionMenuWidget.py b/depot_tools/release/win/python_24/Lib/idlelib/dynOptionMenuWidget.py
new file mode 100644
index 0000000..e81f7ba
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/dynOptionMenuWidget.py
@@ -0,0 +1,35 @@
+"""
+OptionMenu widget modified to allow dynamic menu reconfiguration
+and setting of highlightthickness
+"""
+from Tkinter import OptionMenu
+from Tkinter import _setit
+import copy
+
+class DynOptionMenu(OptionMenu):
+    """
+    unlike OptionMenu, our kwargs can include highlightthickness
+    """
+    def __init__(self, master, variable, value, *values, **kwargs):
+        #get a copy of kwargs before OptionMenu.__init__ munges them
+        kwargsCopy=copy.copy(kwargs)
+        if 'highlightthickness' in kwargs.keys():
+            del(kwargs['highlightthickness'])
+        OptionMenu.__init__(self, master, variable, value, *values, **kwargs)
+        self.config(highlightthickness=kwargsCopy.get('highlightthickness'))
+        #self.menu=self['menu']
+        self.variable=variable
+        self.command=kwargs.get('command')
+
+    def SetMenu(self,valueList,value=None):
+        """
+        clear and reload the menu with a new set of options.
+        valueList - list of new options
+        value - initial value to set the optionmenu's menubutton to
+        """
+        self['menu'].delete(0,'end')
+        for item in valueList:
+            self['menu'].add_command(label=item,
+                    command=_setit(self.variable,item,self.command))
+        if value:
+            self.variable.set(value)
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/extend.txt b/depot_tools/release/win/python_24/Lib/idlelib/extend.txt
new file mode 100644
index 0000000..f5fb3e0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/extend.txt
@@ -0,0 +1,83 @@
+Writing an IDLE extension
+=========================
+
+An IDLE extension can define new key bindings and menu entries for IDLE
+edit windows.  There is a simple mechanism to load extensions when IDLE
+starts up and to attach them to each edit window. (It is also possible
+to make other changes to IDLE, but this must be done by editing the IDLE
+source code.)
+
+The list of extensions loaded at startup time is configured by editing
+the file config-extensions.def.  See below for details.
+
+An IDLE extension is defined by a class.  Methods of the class define
+actions that are invoked by event bindings or menu entries. Class (or
+instance) variables define the bindings and menu additions; these are
+automatically applied by IDLE when the extension is linked to an edit
+window.
+
+An IDLE extension class is instantiated with a single argument,
+`editwin', an EditorWindow instance. The extension cannot assume much
+about this argument, but it is guarateed to have the following instance
+variables:
+
+    text	a Text instance (a widget)
+    io		an IOBinding instance (more about this later)
+    flist	the FileList instance (shared by all edit windows)
+
+(There are a few more, but they are rarely useful.)
+
+The extension class must not directly bind Window Manager (e.g. X) events.
+Rather, it must define one or more virtual events, e.g. <<zoom-height>>, and
+corresponding methods, e.g. zoom_height_event().  The virtual events will be
+bound to the corresponding methods, and Window Manager events can then be bound
+to the virtual events. (This indirection is done so that the key bindings can
+easily be changed, and so that other sources of virtual events can exist, such
+as menu entries.)
+
+An extension can define menu entries.  This is done with a class or instance
+variable named menudefs; it should be a list of pairs, where each pair is a
+menu name (lowercase) and a list of menu entries. Each menu entry is either
+None (to insert a separator entry) or a pair of strings (menu_label,
+virtual_event).  Here, menu_label is the label of the menu entry, and
+virtual_event is the virtual event to be generated when the entry is selected.
+An underscore in the menu label is removed; the character following the
+underscore is displayed underlined, to indicate the shortcut character (for
+Windows).
+
+At the moment, extensions cannot define whole new menus; they must define
+entries in existing menus.  Some menus are not present on some windows; such
+entry definitions are then ignored, but key bindings are still applied.  (This
+should probably be refined in the future.)
+
+Extensions are not required to define menu entries for all the events they
+implement.  (They are also not required to create keybindings, but in that
+case there must be empty bindings in cofig-extensions.def)
+
+Here is a complete example example:
+
+class ZoomHeight:
+
+    menudefs = [
+        ('edit', [
+            None, # Separator
+            ('_Zoom Height', '<<zoom-height>>'),
+         ])
+    ]
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+
+    def zoom_height_event(self, event):
+        "...Do what you want here..."
+
+The final piece of the puzzle is the file "config-extensions.def", which is
+used to to configure the loading of extensions and to establish key (or, more
+generally, event) bindings to the virtual events defined in the extensions.
+
+See the comments at the top of config-extensions.def for information.  It's
+currently necessary to manually modify that file to change IDLE's extension
+loading or extension key bindings.
+
+For further information on binding refer to the Tkinter Resources web page at
+python.org and to the Tk Command "bind" man page.
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/help.txt b/depot_tools/release/win/python_24/Lib/idlelib/help.txt
new file mode 100644
index 0000000..e91e8c6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/help.txt
@@ -0,0 +1,212 @@
+[See the end of this file for ** TIPS ** on using IDLE !!]
+
+Click on the dotted line at the top of a menu to "tear it off": a
+separate window containing the menu is created.
+
+File Menu:
+
+	New Window       -- Create a new editing window
+	Open...          -- Open an existing file
+	Recent Files...  -- Open a list of recent files
+	Open Module...   -- Open an existing module (searches sys.path)
+	Class Browser    -- Show classes and methods in current file
+	Path Browser     -- Show sys.path directories, modules, classes
+                            and methods
+	---
+	Save             -- Save current window to the associated file (unsaved
+		            windows have a * before and after the window title)
+
+	Save As...       -- Save current window to new file, which becomes
+		            the associated file
+	Save Copy As...  -- Save current window to different file
+		            without changing the associated file
+	---
+	Print Window     -- Print the current window
+	---
+	Close            -- Close current window (asks to save if unsaved)
+	Exit             -- Close all windows, quit (asks to save if unsaved)
+
+Edit Menu:
+
+	Undo             -- Undo last change to current window
+                            (A maximum of 1000 changes may be undone)
+	Redo             -- Redo last undone change to current window
+	---
+	Cut              -- Copy a selection into system-wide clipboard,
+                            then delete the selection
+	Copy             -- Copy selection into system-wide clipboard
+	Paste            -- Insert system-wide clipboard into window
+	Select All       -- Select the entire contents of the edit buffer
+	---
+	Find...          -- Open a search dialog box with many options
+	Find Again       -- Repeat last search
+	Find Selection   -- Search for the string in the selection
+	Find in Files... -- Open a search dialog box for searching files
+	Replace...       -- Open a search-and-replace dialog box
+	Go to Line       -- Ask for a line number and show that line
+	Expand Word      -- Expand the word you have typed to match another
+		            word in the same buffer; repeat to get a
+                            different expansion
+
+Format Menu (only in Edit window):
+
+	Indent Region       -- Shift selected lines right 4 spaces
+	Dedent Region       -- Shift selected lines left 4 spaces
+	Comment Out Region  -- Insert ## in front of selected lines
+	Uncomment Region    -- Remove leading # or ## from selected lines
+	Tabify Region       -- Turns *leading* stretches of spaces into tabs
+		(Note: We recommend using 4 space blocks to indent Python code.)
+	Untabify Region     -- Turn *all* tabs into the right number of spaces
+	New Indent Width... -- Open dialog to change indent width
+	Format Paragraph    -- Reformat the current blank-line-separated
+                               paragraph
+
+Run Menu (only in Edit window):
+
+	Python Shell -- Open or wake up the Python shell window
+	---
+	Check Module -- Run a syntax check on the module
+	Run Module   -- Execute the current file in the __main__ namespace
+
+Shell Menu (only in Shell window):
+
+	View Last Restart -- Scroll the shell window to the last restart
+	Restart Shell     -- Restart the interpreter with a fresh environment
+
+Debug Menu (only in Shell window):
+
+	Go to File/Line   -- look around the insert point for a filename
+		             and linenumber, open the file, and show the line
+	Debugger (toggle) -- Run commands in the shell under the debugger
+	Stack Viewer      -- Show the stack traceback of the last exception
+	Auto-open Stack Viewer (toggle) -- Open stack viewer on traceback
+
+Options Menu:
+
+	Configure IDLE -- Open a configuration dialog.  Fonts, indentation,
+                          keybindings, and color themes may be altered.
+                          Startup Preferences may be set, and Additional Help
+                          Souces can be specified.
+	---
+	Code Context --	  Open a pane at the top of the edit window which
+			  shows the block context of the section of code
+			  which is scrolling off the top or the window.
+
+Windows Menu:
+
+	Zoom Height -- toggles the window between configured size
+	and maximum height.
+	---
+	The rest of this menu lists the names of all open windows;
+	select one to bring it to the foreground (deiconifying it if
+	necessary).
+
+Help Menu:
+
+	About IDLE  -- Version, copyright, license, credits
+	IDLE Readme -- Background discussion and change details
+	---
+	IDLE Help   -- Display this file
+	Python Docs -- Access local Python documentation, if
+		       installed.  Otherwise, access www.python.org.
+	---
+	(Additional Help Sources may be added here)
+
+
+** TIPS **
+==========
+
+Additional Help Sources:
+
+	Windows users can Google on zopeshelf.chm to access Zope help files in
+	the Windows help format.  The Additional Help Sources feature of the
+	configuration GUI supports .chm, along with any other filetypes
+	supported by your browser.  Supply a Menu Item title, and enter the
+	location in the Help File Path slot of the New Help Source dialog.  Use
+	http:// and/or www. to identify external URLs, or download the file and
+	browse for its path on your machine using the Browse button.
+
+	All users can access the extensive sources of help, including
+	tutorials, available at www.python.org/doc.  Selected URLs can be added
+	or removed from the Help menu at any time using Configure IDLE.
+
+Basic editing and navigation:
+
+	Backspace deletes to the left; DEL deletes to the right.
+	Arrow keys and Page Up/Down move around.
+	Control-left/right Arrow moves by words in a strange but useful way.
+	Home/End go to begin/end of line.
+	Control-Home/End go to begin/end of file.
+	Some useful Emacs bindings (Control-a, Control-e, Control-k, etc.)
+		are inherited from Tcl/Tk.
+	Standard Windows bindings may work on that platform.
+	Keybindings are selected in the Settings Dialog, look there.
+
+Automatic indentation:
+
+	After a block-opening statement, the next line is indented by 4 spaces
+	(in the Python Shell window by one tab).  After certain keywords
+	(break, return etc.) the next line is dedented.  In leading
+	indentation, Backspace deletes up to 4 spaces if they are there.  Tab
+	inserts spaces (in the Python Shell window one tab), number depends on
+	Indent Width.  (N.B. Currently tabs are restricted to four spaces due
+	to Tcl/Tk issues.)
+
+        See also the indent/dedent region commands in the edit menu.
+
+Python Shell window:
+
+	Control-c interrupts executing command.
+	Control-d sends end-of-file; closes window if typed at >>> prompt
+		(this is Control-z on Windows).
+
+    Command history:
+
+	Alt-p retrieves previous command matching what you have typed.
+	Alt-n retrieves next.
+	      (These are Control-p, Control-n on the Mac)      
+	Return while cursor is on a previous command retrieves that command.
+	Expand word is also useful to reduce typing.
+
+    Syntax colors:
+
+	The coloring is applied in a background "thread", so you may
+	occasionally see uncolorized text.  To change the color
+	scheme, use the Configure IDLE / Highlighting dialog.
+
+    Python default syntax colors:
+
+	Keywords	orange
+	Builtins	royal purple
+	Strings		green
+	Comments	red
+	Definitions	blue
+
+    Shell default colors:
+
+	Console output	brown
+	stdout		blue
+	stderr		red
+	stdin		black
+
+Other preferences:
+
+	The font preferences, keybinding, and startup preferences can
+	be changed using the Settings dialog.
+
+Command line usage:
+	
+	Enter idle -h at the command prompt to get a usage message.
+
+Running without a subprocess:
+
+	If IDLE is started with the -n command line switch it will run in a
+	single process and will not create the subprocess which runs the RPC
+	Python execution server.  This can be useful if Python cannot create
+	the subprocess or the RPC socket interface on your platform.  However,
+	in this mode user code is not isolated from IDLE itself.  Also, the
+	environment is not restarted when Run/Run Module (F5) is selected.  If
+	your code has been modified, you must reload() the affected modules and
+	re-import any specific items (e.g. from foo import baz) if the changes
+	are to take effect.  For these reasons, it is preferable to run IDLE
+	with the default subprocess if at all possible.
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/idle.bat b/depot_tools/release/win/python_24/Lib/idlelib/idle.bat
new file mode 100644
index 0000000..c1b5fd2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/idle.bat
@@ -0,0 +1,3 @@
+@echo off
+rem Working IDLE bat for Windows - uses start instead of absolute pathname
+start idle.pyw %1 %2 %3 %4 %5 %6 %7 %8 %9
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/idle.py b/depot_tools/release/win/python_24/Lib/idlelib/idle.py
new file mode 100644
index 0000000..537dd5a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/idle.py
@@ -0,0 +1,21 @@
+try:
+    import idlelib.PyShell
+except ImportError:
+    # IDLE is not installed, but maybe PyShell is on sys.path:
+    try:
+        import PyShell
+    except ImportError:
+        raise
+    else:
+        import os
+        idledir = os.path.dirname(os.path.abspath(PyShell.__file__))
+        if idledir != os.getcwd():
+            # We're not in the IDLE directory, help the subprocess find run.py
+            pypath = os.environ.get('PYTHONPATH', '')
+            if pypath:
+                os.environ['PYTHONPATH'] = pypath + ':' + idledir
+            else:
+                os.environ['PYTHONPATH'] = idledir
+        PyShell.main()
+else:
+    idlelib.PyShell.main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/idle.pyw b/depot_tools/release/win/python_24/Lib/idlelib/idle.pyw
new file mode 100644
index 0000000..537dd5a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/idle.pyw
@@ -0,0 +1,21 @@
+try:
+    import idlelib.PyShell
+except ImportError:
+    # IDLE is not installed, but maybe PyShell is on sys.path:
+    try:
+        import PyShell
+    except ImportError:
+        raise
+    else:
+        import os
+        idledir = os.path.dirname(os.path.abspath(PyShell.__file__))
+        if idledir != os.getcwd():
+            # We're not in the IDLE directory, help the subprocess find run.py
+            pypath = os.environ.get('PYTHONPATH', '')
+            if pypath:
+                os.environ['PYTHONPATH'] = pypath + ':' + idledir
+            else:
+                os.environ['PYTHONPATH'] = idledir
+        PyShell.main()
+else:
+    idlelib.PyShell.main()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/idlever.py b/depot_tools/release/win/python_24/Lib/idlelib/idlever.py
new file mode 100644
index 0000000..46b58aa4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/idlever.py
@@ -0,0 +1 @@
+IDLE_VERSION = "1.1.1"
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/keybindingDialog.py b/depot_tools/release/win/python_24/Lib/idlelib/keybindingDialog.py
new file mode 100644
index 0000000..200936a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/keybindingDialog.py
@@ -0,0 +1,261 @@
+"""
+Dialog for building Tkinter accelerator key bindings
+"""
+from Tkinter import *
+import tkMessageBox
+import string, os
+
+class GetKeysDialog(Toplevel):
+    def __init__(self,parent,title,action,currentKeySequences):
+        """
+        action - string, the name of the virtual event these keys will be
+                 mapped to
+        currentKeys - list, a list of all key sequence lists currently mapped
+                 to virtual events, for overlap checking
+        """
+        Toplevel.__init__(self, parent)
+        self.configure(borderwidth=5)
+        self.resizable(height=FALSE,width=FALSE)
+        self.title(title)
+        self.transient(parent)
+        self.grab_set()
+        self.protocol("WM_DELETE_WINDOW", self.Cancel)
+        self.parent = parent
+        self.action=action
+        self.currentKeySequences=currentKeySequences
+        self.result=''
+        self.keyString=StringVar(self)
+        self.keyString.set('')
+        self.SetModifiersForPlatform()
+        self.modifier_vars = []
+        for modifier in self.modifiers:
+            variable = StringVar(self)
+            variable.set('')
+            self.modifier_vars.append(variable)
+        self.CreateWidgets()
+        self.LoadFinalKeyList()
+        self.withdraw() #hide while setting geometry
+        self.update_idletasks()
+        self.geometry("+%d+%d" %
+            ((parent.winfo_rootx()+((parent.winfo_width()/2)
+                -(self.winfo_reqwidth()/2)),
+              parent.winfo_rooty()+((parent.winfo_height()/2)
+                -(self.winfo_reqheight()/2)) )) ) #centre dialog over parent
+        self.deiconify() #geometry set, unhide
+        self.wait_window()
+
+    def CreateWidgets(self):
+        frameMain = Frame(self,borderwidth=2,relief=SUNKEN)
+        frameMain.pack(side=TOP,expand=TRUE,fill=BOTH)
+        frameButtons=Frame(self)
+        frameButtons.pack(side=BOTTOM,fill=X)
+        self.buttonOK = Button(frameButtons,text='OK',
+                width=8,command=self.OK)
+        self.buttonOK.grid(row=0,column=0,padx=5,pady=5)
+        self.buttonCancel = Button(frameButtons,text='Cancel',
+                width=8,command=self.Cancel)
+        self.buttonCancel.grid(row=0,column=1,padx=5,pady=5)
+        self.frameKeySeqBasic = Frame(frameMain)
+        self.frameKeySeqAdvanced = Frame(frameMain)
+        self.frameControlsBasic = Frame(frameMain)
+        self.frameHelpAdvanced = Frame(frameMain)
+        self.frameKeySeqAdvanced.grid(row=0,column=0,sticky=NSEW,padx=5,pady=5)
+        self.frameKeySeqBasic.grid(row=0,column=0,sticky=NSEW,padx=5,pady=5)
+        self.frameKeySeqBasic.lift()
+        self.frameHelpAdvanced.grid(row=1,column=0,sticky=NSEW,padx=5)
+        self.frameControlsBasic.grid(row=1,column=0,sticky=NSEW,padx=5)
+        self.frameControlsBasic.lift()
+        self.buttonLevel = Button(frameMain,command=self.ToggleLevel,
+                text='Advanced Key Binding Entry >>')
+        self.buttonLevel.grid(row=2,column=0,stick=EW,padx=5,pady=5)
+        labelTitleBasic = Label(self.frameKeySeqBasic,
+                text="New keys for  '"+self.action+"' :")
+        labelTitleBasic.pack(anchor=W)
+        labelKeysBasic = Label(self.frameKeySeqBasic,justify=LEFT,
+                textvariable=self.keyString,relief=GROOVE,borderwidth=2)
+        labelKeysBasic.pack(ipadx=5,ipady=5,fill=X)
+        self.modifier_checkbuttons = {}
+        column = 0
+        for modifier, variable in zip(self.modifiers, self.modifier_vars):
+            label = self.modifier_label.get(modifier, modifier)
+            check=Checkbutton(self.frameControlsBasic,
+                command=self.BuildKeyString,
+                text=label,variable=variable,onvalue=modifier,offvalue='')
+            check.grid(row=0,column=column,padx=2,sticky=W)
+            self.modifier_checkbuttons[modifier] = check
+            column += 1
+        labelFnAdvice=Label(self.frameControlsBasic,justify=LEFT,
+                            text=\
+                            "Select the desired modifier keys\n"+
+                            "above, and the final key from the\n"+
+                            "list on the right.\n\n" +
+                            "Use upper case Symbols when using\n" +
+                            "the Shift modifier.  (Letters will be\n" +
+                            "converted automatically.)")
+        labelFnAdvice.grid(row=1,column=0,columnspan=4,padx=2,sticky=W)
+        self.listKeysFinal=Listbox(self.frameControlsBasic,width=15,height=10,
+                selectmode=SINGLE)
+        self.listKeysFinal.bind('<ButtonRelease-1>',self.FinalKeySelected)
+        self.listKeysFinal.grid(row=0,column=4,rowspan=4,sticky=NS)
+        scrollKeysFinal=Scrollbar(self.frameControlsBasic,orient=VERTICAL,
+                command=self.listKeysFinal.yview)
+        self.listKeysFinal.config(yscrollcommand=scrollKeysFinal.set)
+        scrollKeysFinal.grid(row=0,column=5,rowspan=4,sticky=NS)
+        self.buttonClear=Button(self.frameControlsBasic,
+                text='Clear Keys',command=self.ClearKeySeq)
+        self.buttonClear.grid(row=2,column=0,columnspan=4)
+        labelTitleAdvanced = Label(self.frameKeySeqAdvanced,justify=LEFT,
+                text="Enter new binding(s) for  '"+self.action+"' :\n"+
+                "(These bindings will not be checked for validity!)")
+        labelTitleAdvanced.pack(anchor=W)
+        self.entryKeysAdvanced=Entry(self.frameKeySeqAdvanced,
+                textvariable=self.keyString)
+        self.entryKeysAdvanced.pack(fill=X)
+        labelHelpAdvanced=Label(self.frameHelpAdvanced,justify=LEFT,
+            text="Key bindings are specified using Tkinter keysyms as\n"+
+                 "in these samples: <Control-f>, <Shift-F2>, <F12>,\n"
+                 "<Control-space>, <Meta-less>, <Control-Alt-Shift-X>.\n"
+                 "Upper case is used when the Shift modifier is present!\n\n" +
+                 "'Emacs style' multi-keystroke bindings are specified as\n" +
+                 "follows: <Control-x><Control-y>, where the first key\n" +
+                 "is the 'do-nothing' keybinding.\n\n" +
+                 "Multiple separate bindings for one action should be\n"+
+                 "separated by a space, eg., <Alt-v> <Meta-v>." )
+        labelHelpAdvanced.grid(row=0,column=0,sticky=NSEW)
+
+    def SetModifiersForPlatform(self):
+        """Determine list of names of key modifiers for this platform.
+
+        The names are used to build Tk bindings -- it doesn't matter if the
+        keyboard has these keys, it matters if Tk understands them. The
+        order is also important: key binding equality depends on it, so
+        config-keys.def must use the same ordering.
+        """
+        import sys
+        if sys.platform == 'darwin' and sys.executable.count('.app'):
+            self.modifiers = ['Shift', 'Control', 'Option', 'Command']
+        else:
+            self.modifiers = ['Control', 'Alt', 'Shift']
+        self.modifier_label = {'Control': 'Ctrl'}
+
+    def ToggleLevel(self):
+        if  self.buttonLevel.cget('text')[:8]=='Advanced':
+            self.ClearKeySeq()
+            self.buttonLevel.config(text='<< Basic Key Binding Entry')
+            self.frameKeySeqAdvanced.lift()
+            self.frameHelpAdvanced.lift()
+            self.entryKeysAdvanced.focus_set()
+        else:
+            self.ClearKeySeq()
+            self.buttonLevel.config(text='Advanced Key Binding Entry >>')
+            self.frameKeySeqBasic.lift()
+            self.frameControlsBasic.lift()
+
+    def FinalKeySelected(self,event):
+        self.BuildKeyString()
+
+    def BuildKeyString(self):
+        keyList = modifiers = self.GetModifiers()
+        finalKey = self.listKeysFinal.get(ANCHOR)
+        if finalKey:
+            finalKey = self.TranslateKey(finalKey, modifiers)
+            keyList.append(finalKey)
+        self.keyString.set('<' + string.join(keyList,'-') + '>')
+
+    def GetModifiers(self):
+        modList = [variable.get() for variable in self.modifier_vars]
+        return filter(None, modList)
+
+    def ClearKeySeq(self):
+        self.listKeysFinal.select_clear(0,END)
+        self.listKeysFinal.yview(MOVETO, '0.0')
+        for variable in self.modifier_vars:
+            variable.set('')
+        self.keyString.set('')
+
+    def LoadFinalKeyList(self):
+        #these tuples are also available for use in validity checks
+        self.functionKeys=('F1','F2','F2','F4','F5','F6','F7','F8','F9',
+                'F10','F11','F12')
+        self.alphanumKeys=tuple(string.ascii_lowercase+string.digits)
+        self.punctuationKeys=tuple('~!@#%^&*()_-+={}[]|;:,.<>/?')
+        self.whitespaceKeys=('Tab','Space','Return')
+        self.editKeys=('BackSpace','Delete','Insert')
+        self.moveKeys=('Home','End','Page Up','Page Down','Left Arrow',
+                'Right Arrow','Up Arrow','Down Arrow')
+        #make a tuple of most of the useful common 'final' keys
+        keys=(self.alphanumKeys+self.punctuationKeys+self.functionKeys+
+                self.whitespaceKeys+self.editKeys+self.moveKeys)
+        self.listKeysFinal.insert(END, *keys)
+
+    def TranslateKey(self, key, modifiers):
+        "Translate from keycap symbol to the Tkinter keysym"
+        translateDict = {'Space':'space',
+                '~':'asciitilde','!':'exclam','@':'at','#':'numbersign',
+                '%':'percent','^':'asciicircum','&':'ampersand','*':'asterisk',
+                '(':'parenleft',')':'parenright','_':'underscore','-':'minus',
+                '+':'plus','=':'equal','{':'braceleft','}':'braceright',
+                '[':'bracketleft',']':'bracketright','|':'bar',';':'semicolon',
+                ':':'colon',',':'comma','.':'period','<':'less','>':'greater',
+                '/':'slash','?':'question','Page Up':'Prior','Page Down':'Next',
+                'Left Arrow':'Left','Right Arrow':'Right','Up Arrow':'Up',
+                'Down Arrow': 'Down', 'Tab':'tab'}
+        if key in translateDict.keys():
+            key = translateDict[key]
+        if 'Shift' in modifiers and key in string.ascii_lowercase:
+            key = key.upper()
+        key = 'Key-' + key
+        return key
+
+    def OK(self, event=None):
+        if self.KeysOK():
+            self.result=self.keyString.get()
+            self.destroy()
+
+    def Cancel(self, event=None):
+        self.result=''
+        self.destroy()
+
+    def KeysOK(self):
+        "Validity check on user's keybinding selection"
+        keys = self.keyString.get()
+        keys.strip()
+        finalKey = self.listKeysFinal.get(ANCHOR)
+        modifiers = self.GetModifiers()
+        # create a key sequence list for overlap check:
+        keySequence = keys.split()
+        keysOK = False
+        title = 'Key Sequence Error'
+        if not keys:
+            tkMessageBox.showerror(title=title, parent=self,
+                                   message='No keys specified.')
+        elif not keys.endswith('>'):
+            tkMessageBox.showerror(title=title, parent=self,
+                                   message='Missing the final Key')
+        elif not modifiers and finalKey not in self.functionKeys:
+            tkMessageBox.showerror(title=title, parent=self,
+                                   message='No modifier key(s) specified.')
+        elif (modifiers == ['Shift']) \
+                 and (finalKey not in
+                      self.functionKeys + ('Tab', 'Space')):
+            msg = 'The shift modifier by itself may not be used with' \
+                  ' this key symbol; only with F1-F12, Tab, or Space'
+            tkMessageBox.showerror(title=title, parent=self,
+                                   message=msg)
+        elif keySequence in self.currentKeySequences:
+            msg = 'This key combination is already in use.'
+            tkMessageBox.showerror(title=title, parent=self,
+                                   message=msg)
+        else:
+            keysOK = True
+        return keysOK
+
+if __name__ == '__main__':
+    #test the dialog
+    root=Tk()
+    def run():
+        keySeq=''
+        dlg=GetKeysDialog(root,'Get Keys','find-again',[])
+        print dlg.result
+    Button(root,text='Dialog',command=run).pack()
+    root.mainloop()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/rpc.py b/depot_tools/release/win/python_24/Lib/idlelib/rpc.py
new file mode 100644
index 0000000..d097f9b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/rpc.py
@@ -0,0 +1,595 @@
+"""RPC Implemention, originally written for the Python Idle IDE
+
+For security reasons, GvR requested that Idle's Python execution server process
+connect to the Idle process, which listens for the connection.  Since Idle has
+has only one client per server, this was not a limitation.
+
+   +---------------------------------+ +-------------+
+   | SocketServer.BaseRequestHandler | | SocketIO    |
+   +---------------------------------+ +-------------+
+                   ^                   | register()  |
+                   |                   | unregister()|
+                   |                   +-------------+
+                   |                      ^  ^
+                   |                      |  |
+                   | + -------------------+  |
+                   | |                       |
+   +-------------------------+        +-----------------+
+   | RPCHandler              |        | RPCClient       |
+   | [attribute of RPCServer]|        |                 |
+   +-------------------------+        +-----------------+
+
+The RPCServer handler class is expected to provide register/unregister methods.
+RPCHandler inherits the mix-in class SocketIO, which provides these methods.
+
+See the Idle run.main() docstring for further information on how this was
+accomplished in Idle.
+
+"""
+
+import sys
+import os
+import socket
+import select
+import SocketServer
+import struct
+import cPickle as pickle
+import threading
+import Queue
+import traceback
+import copy_reg
+import types
+import marshal
+
+
+def unpickle_code(ms):
+    co = marshal.loads(ms)
+    assert isinstance(co, types.CodeType)
+    return co
+
+def pickle_code(co):
+    assert isinstance(co, types.CodeType)
+    ms = marshal.dumps(co)
+    return unpickle_code, (ms,)
+
+# XXX KBK 24Aug02 function pickling capability not used in Idle
+#  def unpickle_function(ms):
+#      return ms
+
+#  def pickle_function(fn):
+#      assert isinstance(fn, type.FunctionType)
+#      return repr(fn)
+
+copy_reg.pickle(types.CodeType, pickle_code, unpickle_code)
+# copy_reg.pickle(types.FunctionType, pickle_function, unpickle_function)
+
+BUFSIZE = 8*1024
+LOCALHOST = '127.0.0.1'
+
+class RPCServer(SocketServer.TCPServer):
+
+    def __init__(self, addr, handlerclass=None):
+        if handlerclass is None:
+            handlerclass = RPCHandler
+        SocketServer.TCPServer.__init__(self, addr, handlerclass)
+
+    def server_bind(self):
+        "Override TCPServer method, no bind() phase for connecting entity"
+        pass
+
+    def server_activate(self):
+        """Override TCPServer method, connect() instead of listen()
+
+        Due to the reversed connection, self.server_address is actually the
+        address of the Idle Client to which we are connecting.
+
+        """
+        self.socket.connect(self.server_address)
+
+    def get_request(self):
+        "Override TCPServer method, return already connected socket"
+        return self.socket, self.server_address
+
+    def handle_error(self, request, client_address):
+        """Override TCPServer method
+
+        Error message goes to __stderr__.  No error message if exiting
+        normally or socket raised EOF.  Other exceptions not handled in
+        server code will cause os._exit.
+
+        """
+        try:
+            raise
+        except SystemExit:
+            raise
+        except:
+            erf = sys.__stderr__
+            print>>erf, '\n' + '-'*40
+            print>>erf, 'Unhandled server exception!'
+            print>>erf, 'Thread: %s' % threading.currentThread().getName()
+            print>>erf, 'Client Address: ', client_address
+            print>>erf, 'Request: ', repr(request)
+            traceback.print_exc(file=erf)
+            print>>erf, '\n*** Unrecoverable, server exiting!'
+            print>>erf, '-'*40
+            os._exit(0)
+
+#----------------- end class RPCServer --------------------
+
+objecttable = {}
+request_queue = Queue.Queue(0)
+response_queue = Queue.Queue(0)
+
+
+class SocketIO:
+
+    nextseq = 0
+
+    def __init__(self, sock, objtable=None, debugging=None):
+        self.sockthread = threading.currentThread()
+        if debugging is not None:
+            self.debugging = debugging
+        self.sock = sock
+        if objtable is None:
+            objtable = objecttable
+        self.objtable = objtable
+        self.responses = {}
+        self.cvars = {}
+
+    def close(self):
+        sock = self.sock
+        self.sock = None
+        if sock is not None:
+            sock.close()
+
+    def exithook(self):
+        "override for specific exit action"
+        os._exit()
+
+    def debug(self, *args):
+        if not self.debugging:
+            return
+        s = self.location + " " + str(threading.currentThread().getName())
+        for a in args:
+            s = s + " " + str(a)
+        print>>sys.__stderr__, s
+
+    def register(self, oid, object):
+        self.objtable[oid] = object
+
+    def unregister(self, oid):
+        try:
+            del self.objtable[oid]
+        except KeyError:
+            pass
+
+    def localcall(self, seq, request):
+        self.debug("localcall:", request)
+        try:
+            how, (oid, methodname, args, kwargs) = request
+        except TypeError:
+            return ("ERROR", "Bad request format")
+        if not self.objtable.has_key(oid):
+            return ("ERROR", "Unknown object id: %r" % (oid,))
+        obj = self.objtable[oid]
+        if methodname == "__methods__":
+            methods = {}
+            _getmethods(obj, methods)
+            return ("OK", methods)
+        if methodname == "__attributes__":
+            attributes = {}
+            _getattributes(obj, attributes)
+            return ("OK", attributes)
+        if not hasattr(obj, methodname):
+            return ("ERROR", "Unsupported method name: %r" % (methodname,))
+        method = getattr(obj, methodname)
+        try:
+            if how == 'CALL':
+                ret = method(*args, **kwargs)
+                if isinstance(ret, RemoteObject):
+                    ret = remoteref(ret)
+                return ("OK", ret)
+            elif how == 'QUEUE':
+                request_queue.put((seq, (method, args, kwargs)))
+                return("QUEUED", None)
+            else:
+                return ("ERROR", "Unsupported message type: %s" % how)
+        except SystemExit:
+            raise
+        except socket.error:
+            raise
+        except:
+            self.debug("localcall:EXCEPTION")
+            traceback.print_exc(file=sys.__stderr__)
+            return ("EXCEPTION", None)
+
+    def remotecall(self, oid, methodname, args, kwargs):
+        self.debug("remotecall:asynccall: ", oid, methodname)
+        seq = self.asynccall(oid, methodname, args, kwargs)
+        return self.asyncreturn(seq)
+
+    def remotequeue(self, oid, methodname, args, kwargs):
+        self.debug("remotequeue:asyncqueue: ", oid, methodname)
+        seq = self.asyncqueue(oid, methodname, args, kwargs)
+        return self.asyncreturn(seq)
+
+    def asynccall(self, oid, methodname, args, kwargs):
+        request = ("CALL", (oid, methodname, args, kwargs))
+        seq = self.newseq()
+        if threading.currentThread() != self.sockthread:
+            cvar = threading.Condition()
+            self.cvars[seq] = cvar
+        self.debug(("asynccall:%d:" % seq), oid, methodname, args, kwargs)
+        self.putmessage((seq, request))
+        return seq
+
+    def asyncqueue(self, oid, methodname, args, kwargs):
+        request = ("QUEUE", (oid, methodname, args, kwargs))
+        seq = self.newseq()
+        if threading.currentThread() != self.sockthread:
+            cvar = threading.Condition()
+            self.cvars[seq] = cvar
+        self.debug(("asyncqueue:%d:" % seq), oid, methodname, args, kwargs)
+        self.putmessage((seq, request))
+        return seq
+
+    def asyncreturn(self, seq):
+        self.debug("asyncreturn:%d:call getresponse(): " % seq)
+        response = self.getresponse(seq, wait=0.05)
+        self.debug(("asyncreturn:%d:response: " % seq), response)
+        return self.decoderesponse(response)
+
+    def decoderesponse(self, response):
+        how, what = response
+        if how == "OK":
+            return what
+        if how == "QUEUED":
+            return None
+        if how == "EXCEPTION":
+            self.debug("decoderesponse: EXCEPTION")
+            return None
+        if how == "EOF":
+            self.debug("decoderesponse: EOF")
+            self.decode_interrupthook()
+            return None
+        if how == "ERROR":
+            self.debug("decoderesponse: Internal ERROR:", what)
+            raise RuntimeError, what
+        raise SystemError, (how, what)
+
+    def decode_interrupthook(self):
+        ""
+        raise EOFError
+
+    def mainloop(self):
+        """Listen on socket until I/O not ready or EOF
+
+        pollresponse() will loop looking for seq number None, which
+        never comes, and exit on EOFError.
+
+        """
+        try:
+            self.getresponse(myseq=None, wait=0.05)
+        except EOFError:
+            self.debug("mainloop:return")
+            return
+
+    def getresponse(self, myseq, wait):
+        response = self._getresponse(myseq, wait)
+        if response is not None:
+            how, what = response
+            if how == "OK":
+                response = how, self._proxify(what)
+        return response
+
+    def _proxify(self, obj):
+        if isinstance(obj, RemoteProxy):
+            return RPCProxy(self, obj.oid)
+        if isinstance(obj, types.ListType):
+            return map(self._proxify, obj)
+        # XXX Check for other types -- not currently needed
+        return obj
+
+    def _getresponse(self, myseq, wait):
+        self.debug("_getresponse:myseq:", myseq)
+        if threading.currentThread() is self.sockthread:
+            # this thread does all reading of requests or responses
+            while 1:
+                response = self.pollresponse(myseq, wait)
+                if response is not None:
+                    return response
+        else:
+            # wait for notification from socket handling thread
+            cvar = self.cvars[myseq]
+            cvar.acquire()
+            while not self.responses.has_key(myseq):
+                cvar.wait()
+            response = self.responses[myseq]
+            self.debug("_getresponse:%s: thread woke up: response: %s" %
+                       (myseq, response))
+            del self.responses[myseq]
+            del self.cvars[myseq]
+            cvar.release()
+            return response
+
+    def newseq(self):
+        self.nextseq = seq = self.nextseq + 2
+        return seq
+
+    def putmessage(self, message):
+        self.debug("putmessage:%d:" % message[0])
+        try:
+            s = pickle.dumps(message)
+        except pickle.PicklingError:
+            print >>sys.__stderr__, "Cannot pickle:", repr(message)
+            raise
+        s = struct.pack("<i", len(s)) + s
+        while len(s) > 0:
+            try:
+                r, w, x = select.select([], [self.sock], [])
+                n = self.sock.send(s[:BUFSIZE])
+            except (AttributeError, socket.error):
+                # socket was closed
+                raise IOError
+            else:
+                s = s[n:]
+
+    buffer = ""
+    bufneed = 4
+    bufstate = 0 # meaning: 0 => reading count; 1 => reading data
+
+    def pollpacket(self, wait):
+        self._stage0()
+        if len(self.buffer) < self.bufneed:
+            r, w, x = select.select([self.sock.fileno()], [], [], wait)
+            if len(r) == 0:
+                return None
+            try:
+                s = self.sock.recv(BUFSIZE)
+            except socket.error:
+                raise EOFError
+            if len(s) == 0:
+                raise EOFError
+            self.buffer += s
+            self._stage0()
+        return self._stage1()
+
+    def _stage0(self):
+        if self.bufstate == 0 and len(self.buffer) >= 4:
+            s = self.buffer[:4]
+            self.buffer = self.buffer[4:]
+            self.bufneed = struct.unpack("<i", s)[0]
+            self.bufstate = 1
+
+    def _stage1(self):
+        if self.bufstate == 1 and len(self.buffer) >= self.bufneed:
+            packet = self.buffer[:self.bufneed]
+            self.buffer = self.buffer[self.bufneed:]
+            self.bufneed = 4
+            self.bufstate = 0
+            return packet
+
+    def pollmessage(self, wait):
+        packet = self.pollpacket(wait)
+        if packet is None:
+            return None
+        try:
+            message = pickle.loads(packet)
+        except pickle.UnpicklingError:
+            print >>sys.__stderr__, "-----------------------"
+            print >>sys.__stderr__, "cannot unpickle packet:", repr(packet)
+            traceback.print_stack(file=sys.__stderr__)
+            print >>sys.__stderr__, "-----------------------"
+            raise
+        return message
+
+    def pollresponse(self, myseq, wait):
+        """Handle messages received on the socket.
+
+        Some messages received may be asynchronous 'call' or 'queue' requests,
+        and some may be responses for other threads.
+
+        'call' requests are passed to self.localcall() with the expectation of
+        immediate execution, during which time the socket is not serviced.
+
+        'queue' requests are used for tasks (which may block or hang) to be
+        processed in a different thread.  These requests are fed into
+        request_queue by self.localcall().  Responses to queued requests are
+        taken from response_queue and sent across the link with the associated
+        sequence numbers.  Messages in the queues are (sequence_number,
+        request/response) tuples and code using this module removing messages
+        from the request_queue is responsible for returning the correct
+        sequence number in the response_queue.
+
+        pollresponse() will loop until a response message with the myseq
+        sequence number is received, and will save other responses in
+        self.responses and notify the owning thread.
+
+        """
+        while 1:
+            # send queued response if there is one available
+            try:
+                qmsg = response_queue.get(0)
+            except Queue.Empty:
+                pass
+            else:
+                seq, response = qmsg
+                message = (seq, ('OK', response))
+                self.putmessage(message)
+            # poll for message on link
+            try:
+                message = self.pollmessage(wait)
+                if message is None:  # socket not ready
+                    return None
+            except EOFError:
+                self.handle_EOF()
+                return None
+            except AttributeError:
+                return None
+            seq, resq = message
+            how = resq[0]
+            self.debug("pollresponse:%d:myseq:%s" % (seq, myseq))
+            # process or queue a request
+            if how in ("CALL", "QUEUE"):
+                self.debug("pollresponse:%d:localcall:call:" % seq)
+                response = self.localcall(seq, resq)
+                self.debug("pollresponse:%d:localcall:response:%s"
+                           % (seq, response))
+                if how == "CALL":
+                    self.putmessage((seq, response))
+                elif how == "QUEUE":
+                    # don't acknowledge the 'queue' request!
+                    pass
+                continue
+            # return if completed message transaction
+            elif seq == myseq:
+                return resq
+            # must be a response for a different thread:
+            else:
+                cv = self.cvars.get(seq, None)
+                # response involving unknown sequence number is discarded,
+                # probably intended for prior incarnation of server
+                if cv is not None:
+                    cv.acquire()
+                    self.responses[seq] = resq
+                    cv.notify()
+                    cv.release()
+                continue
+
+    def handle_EOF(self):
+        "action taken upon link being closed by peer"
+        self.EOFhook()
+        self.debug("handle_EOF")
+        for key in self.cvars:
+            cv = self.cvars[key]
+            cv.acquire()
+            self.responses[key] = ('EOF', None)
+            cv.notify()
+            cv.release()
+        # call our (possibly overridden) exit function
+        self.exithook()
+
+    def EOFhook(self):
+        "Classes using rpc client/server can override to augment EOF action"
+        pass
+
+#----------------- end class SocketIO --------------------
+
+class RemoteObject:
+    # Token mix-in class
+    pass
+
+def remoteref(obj):
+    oid = id(obj)
+    objecttable[oid] = obj
+    return RemoteProxy(oid)
+
+class RemoteProxy:
+
+    def __init__(self, oid):
+        self.oid = oid
+
+class RPCHandler(SocketServer.BaseRequestHandler, SocketIO):
+
+    debugging = False
+    location = "#S"  # Server
+
+    def __init__(self, sock, addr, svr):
+        svr.current_handler = self ## cgt xxx
+        SocketIO.__init__(self, sock)
+        SocketServer.BaseRequestHandler.__init__(self, sock, addr, svr)
+
+    def handle(self):
+        "handle() method required by SocketServer"
+        self.mainloop()
+
+    def get_remote_proxy(self, oid):
+        return RPCProxy(self, oid)
+
+class RPCClient(SocketIO):
+
+    debugging = False
+    location = "#C"  # Client
+
+    nextseq = 1 # Requests coming from the client are odd numbered
+
+    def __init__(self, address, family=socket.AF_INET, type=socket.SOCK_STREAM):
+        self.listening_sock = socket.socket(family, type)
+        self.listening_sock.setsockopt(socket.SOL_SOCKET,
+                                       socket.SO_REUSEADDR, 1)
+        self.listening_sock.bind(address)
+        self.listening_sock.listen(1)
+
+    def accept(self):
+        working_sock, address = self.listening_sock.accept()
+        if self.debugging:
+            print>>sys.__stderr__, "****** Connection request from ", address
+        if address[0] == LOCALHOST:
+            SocketIO.__init__(self, working_sock)
+        else:
+            print>>sys.__stderr__, "** Invalid host: ", address
+            raise socket.error
+
+    def get_remote_proxy(self, oid):
+        return RPCProxy(self, oid)
+
+class RPCProxy:
+
+    __methods = None
+    __attributes = None
+
+    def __init__(self, sockio, oid):
+        self.sockio = sockio
+        self.oid = oid
+
+    def __getattr__(self, name):
+        if self.__methods is None:
+            self.__getmethods()
+        if self.__methods.get(name):
+            return MethodProxy(self.sockio, self.oid, name)
+        if self.__attributes is None:
+            self.__getattributes()
+        if not self.__attributes.has_key(name):
+            raise AttributeError, name
+
+    def __getattributes(self):
+        self.__attributes = self.sockio.remotecall(self.oid,
+                                                "__attributes__", (), {})
+
+    def __getmethods(self):
+        self.__methods = self.sockio.remotecall(self.oid,
+                                                "__methods__", (), {})
+
+def _getmethods(obj, methods):
+    # Helper to get a list of methods from an object
+    # Adds names to dictionary argument 'methods'
+    for name in dir(obj):
+        attr = getattr(obj, name)
+        if callable(attr):
+            methods[name] = 1
+    if type(obj) == types.InstanceType:
+        _getmethods(obj.__class__, methods)
+    if type(obj) == types.ClassType:
+        for super in obj.__bases__:
+            _getmethods(super, methods)
+
+def _getattributes(obj, attributes):
+    for name in dir(obj):
+        attr = getattr(obj, name)
+        if not callable(attr):
+            attributes[name] = 1
+
+class MethodProxy:
+
+    def __init__(self, sockio, oid, name):
+        self.sockio = sockio
+        self.oid = oid
+        self.name = name
+
+    def __call__(self, *args, **kwargs):
+        value = self.sockio.remotecall(self.oid, self.name, args, kwargs)
+        return value
+
+
+# XXX KBK 09Sep03  We need a proper unit test for this module.  Previously
+#                  existing test code was removed at Rev 1.27.
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/run.py b/depot_tools/release/win/python_24/Lib/idlelib/run.py
new file mode 100644
index 0000000..90a4692
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/run.py
@@ -0,0 +1,322 @@
+import sys
+import os
+import linecache
+import time
+import socket
+import traceback
+import thread
+import threading
+import Queue
+
+import CallTips
+import RemoteDebugger
+import RemoteObjectBrowser
+import StackViewer
+import rpc
+
+import __main__
+
+LOCALHOST = '127.0.0.1'
+
+try:
+    import warnings
+except ImportError:
+    pass
+else:
+    def idle_formatwarning_subproc(message, category, filename, lineno):
+        """Format warnings the IDLE way"""
+        s = "\nWarning (from warnings module):\n"
+        s += '  File \"%s\", line %s\n' % (filename, lineno)
+        line = linecache.getline(filename, lineno).strip()
+        if line:
+            s += "    %s\n" % line
+        s += "%s: %s\n" % (category.__name__, message)
+        return s
+    warnings.formatwarning = idle_formatwarning_subproc
+
+# Thread shared globals: Establish a queue between a subthread (which handles
+# the socket) and the main thread (which runs user code), plus global
+# completion and exit flags:
+
+exit_now = False
+quitting = False
+
+def main(del_exitfunc=False):
+    """Start the Python execution server in a subprocess
+
+    In the Python subprocess, RPCServer is instantiated with handlerclass
+    MyHandler, which inherits register/unregister methods from RPCHandler via
+    the mix-in class SocketIO.
+
+    When the RPCServer 'server' is instantiated, the TCPServer initialization
+    creates an instance of run.MyHandler and calls its handle() method.
+    handle() instantiates a run.Executive object, passing it a reference to the
+    MyHandler object.  That reference is saved as attribute rpchandler of the
+    Executive instance.  The Executive methods have access to the reference and
+    can pass it on to entities that they command
+    (e.g. RemoteDebugger.Debugger.start_debugger()).  The latter, in turn, can
+    call MyHandler(SocketIO) register/unregister methods via the reference to
+    register and unregister themselves.
+
+    """
+    global exit_now
+    global quitting
+    global no_exitfunc
+    no_exitfunc = del_exitfunc
+    port = 8833
+    #time.sleep(15) # test subprocess not responding
+    if sys.argv[1:]:
+        port = int(sys.argv[1])
+    sys.argv[:] = [""]
+    sockthread = threading.Thread(target=manage_socket,
+                                  name='SockThread',
+                                  args=((LOCALHOST, port),))
+    sockthread.setDaemon(True)
+    sockthread.start()
+    while 1:
+        try:
+            if exit_now:
+                try:
+                    exit()
+                except KeyboardInterrupt:
+                    # exiting but got an extra KBI? Try again!
+                    continue
+            try:
+                seq, request = rpc.request_queue.get(0)
+            except Queue.Empty:
+                time.sleep(0.05)
+                continue
+            method, args, kwargs = request
+            ret = method(*args, **kwargs)
+            rpc.response_queue.put((seq, ret))
+        except KeyboardInterrupt:
+            if quitting:
+                exit_now = True
+            continue
+        except SystemExit:
+            raise
+        except:
+            type, value, tb = sys.exc_info()
+            try:
+                print_exception()
+                rpc.response_queue.put((seq, None))
+            except:
+                # Link didn't work, print same exception to __stderr__
+                traceback.print_exception(type, value, tb, file=sys.__stderr__)
+                exit()
+            else:
+                continue
+
+def manage_socket(address):
+    for i in range(3):
+        time.sleep(i)
+        try:
+            server = MyRPCServer(address, MyHandler)
+            break
+        except socket.error, err:
+            print>>sys.__stderr__,"IDLE Subprocess: socket error: "\
+                                        + err[1] + ", retrying...."
+    else:
+        print>>sys.__stderr__, "IDLE Subprocess: Connection to "\
+                               "IDLE GUI failed, exiting."
+        show_socket_error(err, address)
+        global exit_now
+        exit_now = True
+        return
+    server.handle_request() # A single request only
+
+def show_socket_error(err, address):
+    import Tkinter
+    import tkMessageBox
+    root = Tkinter.Tk()
+    root.withdraw()
+    if err[0] == 61: # connection refused
+        msg = "IDLE's subprocess can't connect to %s:%d.  This may be due "\
+              "to your personal firewall configuration.  It is safe to "\
+              "allow this internal connection because no data is visible on "\
+              "external ports." % address
+        tkMessageBox.showerror("IDLE Subprocess Error", msg, parent=root)
+    else:
+        tkMessageBox.showerror("IDLE Subprocess Error", "Socket Error: %s" % err[1])
+    root.destroy()
+
+def print_exception():
+    import linecache
+    linecache.checkcache()
+    flush_stdout()
+    efile = sys.stderr
+    typ, val, tb = excinfo = sys.exc_info()
+    sys.last_type, sys.last_value, sys.last_traceback = excinfo
+    tbe = traceback.extract_tb(tb)
+    print>>efile, '\nTraceback (most recent call last):'
+    exclude = ("run.py", "rpc.py", "threading.py", "Queue.py",
+               "RemoteDebugger.py", "bdb.py")
+    cleanup_traceback(tbe, exclude)
+    traceback.print_list(tbe, file=efile)
+    lines = traceback.format_exception_only(typ, val)
+    for line in lines:
+        print>>efile, line,
+
+def cleanup_traceback(tb, exclude):
+    "Remove excluded traces from beginning/end of tb; get cached lines"
+    orig_tb = tb[:]
+    while tb:
+        for rpcfile in exclude:
+            if tb[0][0].count(rpcfile):
+                break    # found an exclude, break for: and delete tb[0]
+        else:
+            break        # no excludes, have left RPC code, break while:
+        del tb[0]
+    while tb:
+        for rpcfile in exclude:
+            if tb[-1][0].count(rpcfile):
+                break
+        else:
+            break
+        del tb[-1]
+    if len(tb) == 0:
+        # exception was in IDLE internals, don't prune!
+        tb[:] = orig_tb[:]
+        print>>sys.stderr, "** IDLE Internal Exception: "
+    rpchandler = rpc.objecttable['exec'].rpchandler
+    for i in range(len(tb)):
+        fn, ln, nm, line = tb[i]
+        if nm == '?':
+            nm = "-toplevel-"
+        if not line and fn.startswith("<pyshell#"):
+            line = rpchandler.remotecall('linecache', 'getline',
+                                              (fn, ln), {})
+        tb[i] = fn, ln, nm, line
+
+def flush_stdout():
+    try:
+        if sys.stdout.softspace:
+            sys.stdout.softspace = 0
+            sys.stdout.write("\n")
+    except (AttributeError, EOFError):
+        pass
+
+def exit():
+    """Exit subprocess, possibly after first deleting sys.exitfunc
+
+    If config-main.cfg/.def 'General' 'delete-exitfunc' is True, then any
+    sys.exitfunc will be removed before exiting.  (VPython support)
+
+    """
+    if no_exitfunc:
+        del sys.exitfunc
+    sys.exit(0)
+
+class MyRPCServer(rpc.RPCServer):
+
+    def handle_error(self, request, client_address):
+        """Override RPCServer method for IDLE
+
+        Interrupt the MainThread and exit server if link is dropped.
+
+        """
+        global quitting
+        try:
+            raise
+        except SystemExit:
+            raise
+        except EOFError:
+            global exit_now
+            exit_now = True
+            thread.interrupt_main()
+        except:
+            erf = sys.__stderr__
+            print>>erf, '\n' + '-'*40
+            print>>erf, 'Unhandled server exception!'
+            print>>erf, 'Thread: %s' % threading.currentThread().getName()
+            print>>erf, 'Client Address: ', client_address
+            print>>erf, 'Request: ', repr(request)
+            traceback.print_exc(file=erf)
+            print>>erf, '\n*** Unrecoverable, server exiting!'
+            print>>erf, '-'*40
+            quitting = True
+            thread.interrupt_main()
+
+
+class MyHandler(rpc.RPCHandler):
+
+    def handle(self):
+        """Override base method"""
+        executive = Executive(self)
+        self.register("exec", executive)
+        sys.stdin = self.console = self.get_remote_proxy("stdin")
+        sys.stdout = self.get_remote_proxy("stdout")
+        sys.stderr = self.get_remote_proxy("stderr")
+        import IOBinding
+        sys.stdin.encoding = sys.stdout.encoding = \
+                             sys.stderr.encoding = IOBinding.encoding
+        self.interp = self.get_remote_proxy("interp")
+        rpc.RPCHandler.getresponse(self, myseq=None, wait=0.05)
+
+    def exithook(self):
+        "override SocketIO method - wait for MainThread to shut us down"
+        time.sleep(10)
+
+    def EOFhook(self):
+        "Override SocketIO method - terminate wait on callback and exit thread"
+        global quitting
+        quitting = True
+        thread.interrupt_main()
+
+    def decode_interrupthook(self):
+        "interrupt awakened thread"
+        global quitting
+        quitting = True
+        thread.interrupt_main()
+
+
+class Executive:
+
+    def __init__(self, rpchandler):
+        self.rpchandler = rpchandler
+        self.locals = __main__.__dict__
+        self.calltip = CallTips.CallTips()
+
+    def runcode(self, code):
+        try:
+            self.usr_exc_info = None
+            exec code in self.locals
+        except:
+            self.usr_exc_info = sys.exc_info()
+            if quitting:
+                exit()
+            # even print a user code SystemExit exception, continue
+            print_exception()
+            jit = self.rpchandler.console.getvar("<<toggle-jit-stack-viewer>>")
+            if jit:
+                self.rpchandler.interp.open_remote_stack_viewer()
+        else:
+            flush_stdout()
+
+    def interrupt_the_server(self):
+        thread.interrupt_main()
+
+    def start_the_debugger(self, gui_adap_oid):
+        return RemoteDebugger.start_debugger(self.rpchandler, gui_adap_oid)
+
+    def stop_the_debugger(self, idb_adap_oid):
+        "Unregister the Idb Adapter.  Link objects and Idb then subject to GC"
+        self.rpchandler.unregister(idb_adap_oid)
+
+    def get_the_calltip(self, name):
+        return self.calltip.fetch_tip(name)
+
+    def stackviewer(self, flist_oid=None):
+        if self.usr_exc_info:
+            typ, val, tb = self.usr_exc_info
+        else:
+            return None
+        flist = None
+        if flist_oid is not None:
+            flist = self.rpchandler.get_remote_proxy(flist_oid)
+        while tb and tb.tb_frame.f_globals["__name__"] in ["rpc", "run"]:
+            tb = tb.tb_next
+        sys.last_type = typ
+        sys.last_value = val
+        item = StackViewer.StackTreeItem(flist, tb)
+        return RemoteObjectBrowser.remote_object_tree_item(item)
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/tabpage.py b/depot_tools/release/win/python_24/Lib/idlelib/tabpage.py
new file mode 100644
index 0000000..12f8929
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/tabpage.py
@@ -0,0 +1,113 @@
+"""
+a couple of classes for implementing partial tabbed-page like behaviour
+"""
+
+from Tkinter import *
+
+class InvalidTabPage(Exception): pass
+class AlreadyExists(Exception): pass
+
+class PageTab(Frame):
+    """
+    a 'page tab' like framed button
+    """
+    def __init__(self,parent):
+        Frame.__init__(self, parent,borderwidth=2,relief=RIDGE)
+        self.button=Radiobutton(self,padx=5,pady=5,takefocus=FALSE,
+                indicatoron=FALSE,highlightthickness=0,
+                borderwidth=0,selectcolor=self.cget('bg'))
+        self.button.pack()
+
+class TabPageSet(Frame):
+    """
+    a set of 'pages' with TabButtons for controlling their display
+    """
+    def __init__(self,parent,pageNames=[],**kw):
+        """
+        pageNames - a list of strings, each string will be the dictionary key
+        to a page's data, and the name displayed on the page's tab. Should be
+        specified in desired page order. The first page will be the default
+        and first active page.
+        """
+        Frame.__init__(self, parent, kw)
+        self.grid_location(0,0)
+        self.columnconfigure(0,weight=1)
+        self.rowconfigure(1,weight=1)
+        self.tabBar=Frame(self)
+        self.tabBar.grid(row=0,column=0,sticky=EW)
+        self.activePage=StringVar(self)
+        self.defaultPage=''
+        self.pages={}
+        for name in pageNames:
+            self.AddPage(name)
+
+    def ChangePage(self,pageName=None):
+        if pageName:
+            if pageName in self.pages.keys():
+                self.activePage.set(pageName)
+            else:
+                raise InvalidTabPage, 'Invalid TabPage Name'
+        ## pop up the active 'tab' only
+        for page in self.pages.keys():
+            self.pages[page]['tab'].config(relief=RIDGE)
+        self.pages[self.GetActivePage()]['tab'].config(relief=RAISED)
+        ## switch page
+        self.pages[self.GetActivePage()]['page'].lift()
+
+    def GetActivePage(self):
+        return self.activePage.get()
+
+    def AddPage(self,pageName):
+        if pageName in self.pages.keys():
+            raise AlreadyExists, 'TabPage Name Already Exists'
+        self.pages[pageName]={'tab':PageTab(self.tabBar),
+                'page':Frame(self,borderwidth=2,relief=RAISED)}
+        self.pages[pageName]['tab'].button.config(text=pageName,
+                command=self.ChangePage,variable=self.activePage,
+                value=pageName)
+        self.pages[pageName]['tab'].pack(side=LEFT)
+        self.pages[pageName]['page'].grid(row=1,column=0,sticky=NSEW)
+        if len(self.pages)==1: # adding first page
+            self.defaultPage=pageName
+            self.activePage.set(self.defaultPage)
+            self.ChangePage()
+
+    def RemovePage(self,pageName):
+        if not pageName in self.pages.keys():
+            raise InvalidTabPage, 'Invalid TabPage Name'
+        self.pages[pageName]['tab'].pack_forget()
+        self.pages[pageName]['page'].grid_forget()
+        self.pages[pageName]['tab'].destroy()
+        self.pages[pageName]['page'].destroy()
+        del(self.pages[pageName])
+        # handle removing last remaining, or default, or active page
+        if not self.pages: # removed last remaining page
+            self.defaultPage=''
+            return
+        if pageName==self.defaultPage: # set a new default page
+            self.defaultPage=\
+                self.tabBar.winfo_children()[0].button.cget('text')
+        if pageName==self.GetActivePage(): # set a new active page
+            self.activePage.set(self.defaultPage)
+        self.ChangePage()
+
+if __name__ == '__main__':
+    #test dialog
+    root=Tk()
+    tabPage=TabPageSet(root,pageNames=['Foobar','Baz'])
+    tabPage.pack(expand=TRUE,fill=BOTH)
+    Label(tabPage.pages['Foobar']['page'],text='Foo',pady=20).pack()
+    Label(tabPage.pages['Foobar']['page'],text='Bar',pady=20).pack()
+    Label(tabPage.pages['Baz']['page'],text='Baz').pack()
+    entryPgName=Entry(root)
+    buttonAdd=Button(root,text='Add Page',
+            command=lambda:tabPage.AddPage(entryPgName.get()))
+    buttonRemove=Button(root,text='Remove Page',
+            command=lambda:tabPage.RemovePage(entryPgName.get()))
+    labelPgName=Label(root,text='name of page to add/remove:')
+    buttonAdd.pack(padx=5,pady=5)
+    buttonRemove.pack(padx=5,pady=5)
+    labelPgName.pack(padx=5)
+    entryPgName.pack(padx=5)
+    tabPage.ChangePage()
+    root.mainloop()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/testcode.py b/depot_tools/release/win/python_24/Lib/idlelib/testcode.py
new file mode 100644
index 0000000..05eaa56
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/testcode.py
@@ -0,0 +1,31 @@
+import string
+
+def f():
+    a = 0
+    b = 1
+    c = 2
+    d = 3
+    e = 4
+    g()
+
+def g():
+    h()
+
+def h():
+    i()
+
+def i():
+    j()
+
+def j():
+    k()
+
+def k():
+    l()
+
+l = lambda: test()
+
+def test():
+    string.capwords(1)
+
+f()
diff --git a/depot_tools/release/win/python_24/Lib/idlelib/textView.py b/depot_tools/release/win/python_24/Lib/idlelib/textView.py
new file mode 100644
index 0000000..917a6cc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/idlelib/textView.py
@@ -0,0 +1,78 @@
+"""Simple text browser for IDLE
+
+"""
+
+from Tkinter import *
+import tkMessageBox
+
+class TextViewer(Toplevel):
+    """
+    simple text viewer dialog for idle
+    """
+    def __init__(self, parent, title, fileName, data=None):
+        """If data exists, load it into viewer, otherwise try to load file.
+
+        fileName - string, should be an absoulute filename
+        """
+        Toplevel.__init__(self, parent)
+        self.configure(borderwidth=5)
+        self.geometry("=%dx%d+%d+%d" % (625, 500,
+                                        parent.winfo_rootx() + 10,
+                                        parent.winfo_rooty() + 10))
+        #elguavas - config placeholders til config stuff completed
+        self.bg = '#ffffff'
+        self.fg = '#000000'
+
+        self.CreateWidgets()
+        self.title(title)
+        self.transient(parent)
+        self.grab_set()
+        self.protocol("WM_DELETE_WINDOW", self.Ok)
+        self.parent = parent
+        self.textView.focus_set()
+        #key bindings for this dialog
+        self.bind('<Return>',self.Ok) #dismiss dialog
+        self.bind('<Escape>',self.Ok) #dismiss dialog
+        if data:
+            self.textView.insert(0.0, data)
+        else:
+            self.LoadTextFile(fileName)
+        self.textView.config(state=DISABLED)
+        self.wait_window()
+
+    def LoadTextFile(self, fileName):
+        textFile = None
+        try:
+            textFile = open(fileName, 'r')
+        except IOError:
+            tkMessageBox.showerror(title='File Load Error',
+                    message='Unable to load file %r .' % (fileName,))
+        else:
+            self.textView.insert(0.0,textFile.read())
+
+    def CreateWidgets(self):
+        frameText = Frame(self, relief=SUNKEN, height=700)
+        frameButtons = Frame(self)
+        self.buttonOk = Button(frameButtons, text='Close',
+                               command=self.Ok, takefocus=FALSE)
+        self.scrollbarView = Scrollbar(frameText, orient=VERTICAL,
+                                       takefocus=FALSE, highlightthickness=0)
+        self.textView = Text(frameText, wrap=WORD, highlightthickness=0,
+                             fg=self.fg, bg=self.bg)
+        self.scrollbarView.config(command=self.textView.yview)
+        self.textView.config(yscrollcommand=self.scrollbarView.set)
+        self.buttonOk.pack()
+        self.scrollbarView.pack(side=RIGHT,fill=Y)
+        self.textView.pack(side=LEFT,expand=TRUE,fill=BOTH)
+        frameButtons.pack(side=BOTTOM,fill=X)
+        frameText.pack(side=TOP,expand=TRUE,fill=BOTH)
+
+    def Ok(self, event=None):
+        self.destroy()
+
+if __name__ == '__main__':
+    #test the dialog
+    root=Tk()
+    Button(root,text='View',
+            command=lambda:TextViewer(root,'Text','./textView.py')).pack()
+    root.mainloop()
diff --git a/depot_tools/release/win/python_24/Lib/ihooks.py b/depot_tools/release/win/python_24/Lib/ihooks.py
new file mode 100644
index 0000000..f5b93ab
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/ihooks.py
@@ -0,0 +1,520 @@
+"""Import hook support.
+
+Consistent use of this module will make it possible to change the
+different mechanisms involved in loading modules independently.
+
+While the built-in module imp exports interfaces to the built-in
+module searching and loading algorithm, and it is possible to replace
+the built-in function __import__ in order to change the semantics of
+the import statement, until now it has been difficult to combine the
+effect of different __import__ hacks, like loading modules from URLs
+by rimport.py, or restricted execution by rexec.py.
+
+This module defines three new concepts:
+
+1) A "file system hooks" class provides an interface to a filesystem.
+
+One hooks class is defined (Hooks), which uses the interface provided
+by standard modules os and os.path.  It should be used as the base
+class for other hooks classes.
+
+2) A "module loader" class provides an interface to search for a
+module in a search path and to load it.  It defines a method which
+searches for a module in a single directory; by overriding this method
+one can redefine the details of the search.  If the directory is None,
+built-in and frozen modules are searched instead.
+
+Two module loader class are defined, both implementing the search
+strategy used by the built-in __import__ function: ModuleLoader uses
+the imp module's find_module interface, while HookableModuleLoader
+uses a file system hooks class to interact with the file system.  Both
+use the imp module's load_* interfaces to actually load the module.
+
+3) A "module importer" class provides an interface to import a
+module, as well as interfaces to reload and unload a module.  It also
+provides interfaces to install and uninstall itself instead of the
+default __import__ and reload (and unload) functions.
+
+One module importer class is defined (ModuleImporter), which uses a
+module loader instance passed in (by default HookableModuleLoader is
+instantiated).
+
+The classes defined here should be used as base classes for extended
+functionality along those lines.
+
+If a module importer class supports dotted names, its import_module()
+must return a different value depending on whether it is called on
+behalf of a "from ... import ..." statement or not.  (This is caused
+by the way the __import__ hook is used by the Python interpreter.)  It
+would also do wise to install a different version of reload().
+
+"""
+
+
+import __builtin__
+import imp
+import os
+import sys
+
+__all__ = ["BasicModuleLoader","Hooks","ModuleLoader","FancyModuleLoader",
+           "BasicModuleImporter","ModuleImporter","install","uninstall"]
+
+VERBOSE = 0
+
+
+from imp import C_EXTENSION, PY_SOURCE, PY_COMPILED
+from imp import C_BUILTIN, PY_FROZEN, PKG_DIRECTORY
+BUILTIN_MODULE = C_BUILTIN
+FROZEN_MODULE = PY_FROZEN
+
+
+class _Verbose:
+
+    def __init__(self, verbose = VERBOSE):
+        self.verbose = verbose
+
+    def get_verbose(self):
+        return self.verbose
+
+    def set_verbose(self, verbose):
+        self.verbose = verbose
+
+    # XXX The following is an experimental interface
+
+    def note(self, *args):
+        if self.verbose:
+            self.message(*args)
+
+    def message(self, format, *args):
+        if args:
+            print format%args
+        else:
+            print format
+
+
+class BasicModuleLoader(_Verbose):
+
+    """Basic module loader.
+
+    This provides the same functionality as built-in import.  It
+    doesn't deal with checking sys.modules -- all it provides is
+    find_module() and a load_module(), as well as find_module_in_dir()
+    which searches just one directory, and can be overridden by a
+    derived class to change the module search algorithm when the basic
+    dependency on sys.path is unchanged.
+
+    The interface is a little more convenient than imp's:
+    find_module(name, [path]) returns None or 'stuff', and
+    load_module(name, stuff) loads the module.
+
+    """
+
+    def find_module(self, name, path = None):
+        if path is None:
+            path = [None] + self.default_path()
+        for dir in path:
+            stuff = self.find_module_in_dir(name, dir)
+            if stuff: return stuff
+        return None
+
+    def default_path(self):
+        return sys.path
+
+    def find_module_in_dir(self, name, dir):
+        if dir is None:
+            return self.find_builtin_module(name)
+        else:
+            try:
+                return imp.find_module(name, [dir])
+            except ImportError:
+                return None
+
+    def find_builtin_module(self, name):
+        # XXX frozen packages?
+        if imp.is_builtin(name):
+            return None, '', ('', '', BUILTIN_MODULE)
+        if imp.is_frozen(name):
+            return None, '', ('', '', FROZEN_MODULE)
+        return None
+
+    def load_module(self, name, stuff):
+        file, filename, info = stuff
+        try:
+            return imp.load_module(name, file, filename, info)
+        finally:
+            if file: file.close()
+
+
+class Hooks(_Verbose):
+
+    """Hooks into the filesystem and interpreter.
+
+    By deriving a subclass you can redefine your filesystem interface,
+    e.g. to merge it with the URL space.
+
+    This base class behaves just like the native filesystem.
+
+    """
+
+    # imp interface
+    def get_suffixes(self): return imp.get_suffixes()
+    def new_module(self, name): return imp.new_module(name)
+    def is_builtin(self, name): return imp.is_builtin(name)
+    def init_builtin(self, name): return imp.init_builtin(name)
+    def is_frozen(self, name): return imp.is_frozen(name)
+    def init_frozen(self, name): return imp.init_frozen(name)
+    def get_frozen_object(self, name): return imp.get_frozen_object(name)
+    def load_source(self, name, filename, file=None):
+        return imp.load_source(name, filename, file)
+    def load_compiled(self, name, filename, file=None):
+        return imp.load_compiled(name, filename, file)
+    def load_dynamic(self, name, filename, file=None):
+        return imp.load_dynamic(name, filename, file)
+    def load_package(self, name, filename, file=None):
+        return imp.load_module(name, file, filename, ("", "", PKG_DIRECTORY))
+
+    def add_module(self, name):
+        d = self.modules_dict()
+        if name in d: return d[name]
+        d[name] = m = self.new_module(name)
+        return m
+
+    # sys interface
+    def modules_dict(self): return sys.modules
+    def default_path(self): return sys.path
+
+    def path_split(self, x): return os.path.split(x)
+    def path_join(self, x, y): return os.path.join(x, y)
+    def path_isabs(self, x): return os.path.isabs(x)
+    # etc.
+
+    def path_exists(self, x): return os.path.exists(x)
+    def path_isdir(self, x): return os.path.isdir(x)
+    def path_isfile(self, x): return os.path.isfile(x)
+    def path_islink(self, x): return os.path.islink(x)
+    # etc.
+
+    def openfile(self, *x): return open(*x)
+    openfile_error = IOError
+    def listdir(self, x): return os.listdir(x)
+    listdir_error = os.error
+    # etc.
+
+
+class ModuleLoader(BasicModuleLoader):
+
+    """Default module loader; uses file system hooks.
+
+    By defining suitable hooks, you might be able to load modules from
+    other sources than the file system, e.g. from compressed or
+    encrypted files, tar files or (if you're brave!) URLs.
+
+    """
+
+    def __init__(self, hooks = None, verbose = VERBOSE):
+        BasicModuleLoader.__init__(self, verbose)
+        self.hooks = hooks or Hooks(verbose)
+
+    def default_path(self):
+        return self.hooks.default_path()
+
+    def modules_dict(self):
+        return self.hooks.modules_dict()
+
+    def get_hooks(self):
+        return self.hooks
+
+    def set_hooks(self, hooks):
+        self.hooks = hooks
+
+    def find_builtin_module(self, name):
+        # XXX frozen packages?
+        if self.hooks.is_builtin(name):
+            return None, '', ('', '', BUILTIN_MODULE)
+        if self.hooks.is_frozen(name):
+            return None, '', ('', '', FROZEN_MODULE)
+        return None
+
+    def find_module_in_dir(self, name, dir, allow_packages=1):
+        if dir is None:
+            return self.find_builtin_module(name)
+        if allow_packages:
+            fullname = self.hooks.path_join(dir, name)
+            if self.hooks.path_isdir(fullname):
+                stuff = self.find_module_in_dir("__init__", fullname, 0)
+                if stuff:
+                    file = stuff[0]
+                    if file: file.close()
+                    return None, fullname, ('', '', PKG_DIRECTORY)
+        for info in self.hooks.get_suffixes():
+            suff, mode, type = info
+            fullname = self.hooks.path_join(dir, name+suff)
+            try:
+                fp = self.hooks.openfile(fullname, mode)
+                return fp, fullname, info
+            except self.hooks.openfile_error:
+                pass
+        return None
+
+    def load_module(self, name, stuff):
+        file, filename, info = stuff
+        (suff, mode, type) = info
+        try:
+            if type == BUILTIN_MODULE:
+                return self.hooks.init_builtin(name)
+            if type == FROZEN_MODULE:
+                return self.hooks.init_frozen(name)
+            if type == C_EXTENSION:
+                m = self.hooks.load_dynamic(name, filename, file)
+            elif type == PY_SOURCE:
+                m = self.hooks.load_source(name, filename, file)
+            elif type == PY_COMPILED:
+                m = self.hooks.load_compiled(name, filename, file)
+            elif type == PKG_DIRECTORY:
+                m = self.hooks.load_package(name, filename, file)
+            else:
+                raise ImportError, "Unrecognized module type (%r) for %s" % \
+                      (type, name)
+        finally:
+            if file: file.close()
+        m.__file__ = filename
+        return m
+
+
+class FancyModuleLoader(ModuleLoader):
+
+    """Fancy module loader -- parses and execs the code itself."""
+
+    def load_module(self, name, stuff):
+        file, filename, (suff, mode, type) = stuff
+        realfilename = filename
+        path = None
+
+        if type == PKG_DIRECTORY:
+            initstuff = self.find_module_in_dir("__init__", filename, 0)
+            if not initstuff:
+                raise ImportError, "No __init__ module in package %s" % name
+            initfile, initfilename, initinfo = initstuff
+            initsuff, initmode, inittype = initinfo
+            if inittype not in (PY_COMPILED, PY_SOURCE):
+                if initfile: initfile.close()
+                raise ImportError, \
+                    "Bad type (%r) for __init__ module in package %s" % (
+                    inittype, name)
+            path = [filename]
+            file = initfile
+            realfilename = initfilename
+            type = inittype
+
+        if type == FROZEN_MODULE:
+            code = self.hooks.get_frozen_object(name)
+        elif type == PY_COMPILED:
+            import marshal
+            file.seek(8)
+            code = marshal.load(file)
+        elif type == PY_SOURCE:
+            data = file.read()
+            code = compile(data, realfilename, 'exec')
+        else:
+            return ModuleLoader.load_module(self, name, stuff)
+
+        m = self.hooks.add_module(name)
+        if path:
+            m.__path__ = path
+        m.__file__ = filename
+        try:
+            exec code in m.__dict__
+        except:
+            d = self.hooks.modules_dict()
+            if name in d:
+                del d[name]
+            raise
+        return m
+
+
+class BasicModuleImporter(_Verbose):
+
+    """Basic module importer; uses module loader.
+
+    This provides basic import facilities but no package imports.
+
+    """
+
+    def __init__(self, loader = None, verbose = VERBOSE):
+        _Verbose.__init__(self, verbose)
+        self.loader = loader or ModuleLoader(None, verbose)
+        self.modules = self.loader.modules_dict()
+
+    def get_loader(self):
+        return self.loader
+
+    def set_loader(self, loader):
+        self.loader = loader
+
+    def get_hooks(self):
+        return self.loader.get_hooks()
+
+    def set_hooks(self, hooks):
+        return self.loader.set_hooks(hooks)
+
+    def import_module(self, name, globals={}, locals={}, fromlist=[]):
+        name = str(name)
+        if name in self.modules:
+            return self.modules[name] # Fast path
+        stuff = self.loader.find_module(name)
+        if not stuff:
+            raise ImportError, "No module named %s" % name
+        return self.loader.load_module(name, stuff)
+
+    def reload(self, module, path = None):
+        name = str(module.__name__)
+        stuff = self.loader.find_module(name, path)
+        if not stuff:
+            raise ImportError, "Module %s not found for reload" % name
+        return self.loader.load_module(name, stuff)
+
+    def unload(self, module):
+        del self.modules[str(module.__name__)]
+        # XXX Should this try to clear the module's namespace?
+
+    def install(self):
+        self.save_import_module = __builtin__.__import__
+        self.save_reload = __builtin__.reload
+        if not hasattr(__builtin__, 'unload'):
+            __builtin__.unload = None
+        self.save_unload = __builtin__.unload
+        __builtin__.__import__ = self.import_module
+        __builtin__.reload = self.reload
+        __builtin__.unload = self.unload
+
+    def uninstall(self):
+        __builtin__.__import__ = self.save_import_module
+        __builtin__.reload = self.save_reload
+        __builtin__.unload = self.save_unload
+        if not __builtin__.unload:
+            del __builtin__.unload
+
+
+class ModuleImporter(BasicModuleImporter):
+
+    """A module importer that supports packages."""
+
+    def import_module(self, name, globals=None, locals=None, fromlist=None):
+        parent = self.determine_parent(globals)
+        q, tail = self.find_head_package(parent, str(name))
+        m = self.load_tail(q, tail)
+        if not fromlist:
+            return q
+        if hasattr(m, "__path__"):
+            self.ensure_fromlist(m, fromlist)
+        return m
+
+    def determine_parent(self, globals):
+        if not globals or not "__name__" in globals:
+            return None
+        pname = globals['__name__']
+        if "__path__" in globals:
+            parent = self.modules[pname]
+            assert globals is parent.__dict__
+            return parent
+        if '.' in pname:
+            i = pname.rfind('.')
+            pname = pname[:i]
+            parent = self.modules[pname]
+            assert parent.__name__ == pname
+            return parent
+        return None
+
+    def find_head_package(self, parent, name):
+        if '.' in name:
+            i = name.find('.')
+            head = name[:i]
+            tail = name[i+1:]
+        else:
+            head = name
+            tail = ""
+        if parent:
+            qname = "%s.%s" % (parent.__name__, head)
+        else:
+            qname = head
+        q = self.import_it(head, qname, parent)
+        if q: return q, tail
+        if parent:
+            qname = head
+            parent = None
+            q = self.import_it(head, qname, parent)
+            if q: return q, tail
+        raise ImportError, "No module named " + qname
+
+    def load_tail(self, q, tail):
+        m = q
+        while tail:
+            i = tail.find('.')
+            if i < 0: i = len(tail)
+            head, tail = tail[:i], tail[i+1:]
+            mname = "%s.%s" % (m.__name__, head)
+            m = self.import_it(head, mname, m)
+            if not m:
+                raise ImportError, "No module named " + mname
+        return m
+
+    def ensure_fromlist(self, m, fromlist, recursive=0):
+        for sub in fromlist:
+            if sub == "*":
+                if not recursive:
+                    try:
+                        all = m.__all__
+                    except AttributeError:
+                        pass
+                    else:
+                        self.ensure_fromlist(m, all, 1)
+                continue
+            if sub != "*" and not hasattr(m, sub):
+                subname = "%s.%s" % (m.__name__, sub)
+                submod = self.import_it(sub, subname, m)
+                if not submod:
+                    raise ImportError, "No module named " + subname
+
+    def import_it(self, partname, fqname, parent, force_load=0):
+        if not partname:
+            raise ValueError, "Empty module name"
+        if not force_load:
+            try:
+                return self.modules[fqname]
+            except KeyError:
+                pass
+        try:
+            path = parent and parent.__path__
+        except AttributeError:
+            return None
+        partname = str(partname)
+        stuff = self.loader.find_module(partname, path)
+        if not stuff:
+            return None
+        fqname = str(fqname)
+        m = self.loader.load_module(fqname, stuff)
+        if parent:
+            setattr(parent, partname, m)
+        return m
+
+    def reload(self, module):
+        name = str(module.__name__)
+        if '.' not in name:
+            return self.import_it(name, name, None, force_load=1)
+        i = name.rfind('.')
+        pname = name[:i]
+        parent = self.modules[pname]
+        return self.import_it(name[i+1:], name, parent, force_load=1)
+
+
+default_importer = None
+current_importer = None
+
+def install(importer = None):
+    global current_importer
+    current_importer = importer or default_importer or ModuleImporter()
+    current_importer.install()
+
+def uninstall():
+    global current_importer
+    current_importer.uninstall()
diff --git a/depot_tools/release/win/python_24/Lib/imaplib.py b/depot_tools/release/win/python_24/Lib/imaplib.py
new file mode 100644
index 0000000..f353015
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/imaplib.py
@@ -0,0 +1,1472 @@
+"""IMAP4 client.
+
+Based on RFC 2060.
+
+Public class:           IMAP4
+Public variable:        Debug
+Public functions:       Internaldate2tuple
+                        Int2AP
+                        ParseFlags
+                        Time2Internaldate
+"""
+
+# Author: Piers Lauder <piers@cs.su.oz.au> December 1997.
+#
+# Authentication code contributed by Donn Cave <donn@u.washington.edu> June 1998.
+# String method conversion by ESR, February 2001.
+# GET/SETACL contributed by Anthony Baxter <anthony@interlink.com.au> April 2001.
+# IMAP4_SSL contributed by Tino Lange <Tino.Lange@isg.de> March 2002.
+# GET/SETQUOTA contributed by Andreas Zeidler <az@kreativkombinat.de> June 2002.
+# PROXYAUTH contributed by Rick Holbert <holbert.13@osu.edu> November 2002.
+
+__version__ = "2.55"
+
+import binascii, os, random, re, socket, sys, time
+
+__all__ = ["IMAP4", "IMAP4_SSL", "IMAP4_stream", "Internaldate2tuple",
+           "Int2AP", "ParseFlags", "Time2Internaldate"]
+
+#       Globals
+
+CRLF = '\r\n'
+Debug = 0
+IMAP4_PORT = 143
+IMAP4_SSL_PORT = 993
+AllowedVersions = ('IMAP4REV1', 'IMAP4')        # Most recent first
+
+#       Commands
+
+Commands = {
+        # name            valid states
+        'APPEND':       ('AUTH', 'SELECTED'),
+        'AUTHENTICATE': ('NONAUTH',),
+        'CAPABILITY':   ('NONAUTH', 'AUTH', 'SELECTED', 'LOGOUT'),
+        'CHECK':        ('SELECTED',),
+        'CLOSE':        ('SELECTED',),
+        'COPY':         ('SELECTED',),
+        'CREATE':       ('AUTH', 'SELECTED'),
+        'DELETE':       ('AUTH', 'SELECTED'),
+        'DELETEACL':    ('AUTH', 'SELECTED'),
+        'EXAMINE':      ('AUTH', 'SELECTED'),
+        'EXPUNGE':      ('SELECTED',),
+        'FETCH':        ('SELECTED',),
+        'GETACL':       ('AUTH', 'SELECTED'),
+        'GETQUOTA':     ('AUTH', 'SELECTED'),
+        'GETQUOTAROOT': ('AUTH', 'SELECTED'),
+        'MYRIGHTS':     ('AUTH', 'SELECTED'),
+        'LIST':         ('AUTH', 'SELECTED'),
+        'LOGIN':        ('NONAUTH',),
+        'LOGOUT':       ('NONAUTH', 'AUTH', 'SELECTED', 'LOGOUT'),
+        'LSUB':         ('AUTH', 'SELECTED'),
+        'NAMESPACE':    ('AUTH', 'SELECTED'),
+        'NOOP':         ('NONAUTH', 'AUTH', 'SELECTED', 'LOGOUT'),
+        'PARTIAL':      ('SELECTED',),                                  # NB: obsolete
+        'PROXYAUTH':    ('AUTH',),
+        'RENAME':       ('AUTH', 'SELECTED'),
+        'SEARCH':       ('SELECTED',),
+        'SELECT':       ('AUTH', 'SELECTED'),
+        'SETACL':       ('AUTH', 'SELECTED'),
+        'SETQUOTA':     ('AUTH', 'SELECTED'),
+        'SORT':         ('SELECTED',),
+        'STATUS':       ('AUTH', 'SELECTED'),
+        'STORE':        ('SELECTED',),
+        'SUBSCRIBE':    ('AUTH', 'SELECTED'),
+        'THREAD':       ('SELECTED',),
+        'UID':          ('SELECTED',),
+        'UNSUBSCRIBE':  ('AUTH', 'SELECTED'),
+        }
+
+#       Patterns to match server responses
+
+Continuation = re.compile(r'\+( (?P<data>.*))?')
+Flags = re.compile(r'.*FLAGS \((?P<flags>[^\)]*)\)')
+InternalDate = re.compile(r'.*INTERNALDATE "'
+        r'(?P<day>[ 123][0-9])-(?P<mon>[A-Z][a-z][a-z])-(?P<year>[0-9][0-9][0-9][0-9])'
+        r' (?P<hour>[0-9][0-9]):(?P<min>[0-9][0-9]):(?P<sec>[0-9][0-9])'
+        r' (?P<zonen>[-+])(?P<zoneh>[0-9][0-9])(?P<zonem>[0-9][0-9])'
+        r'"')
+Literal = re.compile(r'.*{(?P<size>\d+)}$')
+MapCRLF = re.compile(r'\r\n|\r|\n')
+Response_code = re.compile(r'\[(?P<type>[A-Z-]+)( (?P<data>[^\]]*))?\]')
+Untagged_response = re.compile(r'\* (?P<type>[A-Z-]+)( (?P<data>.*))?')
+Untagged_status = re.compile(r'\* (?P<data>\d+) (?P<type>[A-Z-]+)( (?P<data2>.*))?')
+
+
+
+class IMAP4:
+
+    """IMAP4 client class.
+
+    Instantiate with: IMAP4([host[, port]])
+
+            host - host's name (default: localhost);
+            port - port number (default: standard IMAP4 port).
+
+    All IMAP4rev1 commands are supported by methods of the same
+    name (in lower-case).
+
+    All arguments to commands are converted to strings, except for
+    AUTHENTICATE, and the last argument to APPEND which is passed as
+    an IMAP4 literal.  If necessary (the string contains any
+    non-printing characters or white-space and isn't enclosed with
+    either parentheses or double quotes) each string is quoted.
+    However, the 'password' argument to the LOGIN command is always
+    quoted.  If you want to avoid having an argument string quoted
+    (eg: the 'flags' argument to STORE) then enclose the string in
+    parentheses (eg: "(\Deleted)").
+
+    Each command returns a tuple: (type, [data, ...]) where 'type'
+    is usually 'OK' or 'NO', and 'data' is either the text from the
+    tagged response, or untagged results from command. Each 'data'
+    is either a string, or a tuple. If a tuple, then the first part
+    is the header of the response, and the second part contains
+    the data (ie: 'literal' value).
+
+    Errors raise the exception class <instance>.error("<reason>").
+    IMAP4 server errors raise <instance>.abort("<reason>"),
+    which is a sub-class of 'error'. Mailbox status changes
+    from READ-WRITE to READ-ONLY raise the exception class
+    <instance>.readonly("<reason>"), which is a sub-class of 'abort'.
+
+    "error" exceptions imply a program error.
+    "abort" exceptions imply the connection should be reset, and
+            the command re-tried.
+    "readonly" exceptions imply the command should be re-tried.
+
+    Note: to use this module, you must read the RFCs pertaining
+    to the IMAP4 protocol, as the semantics of the arguments to
+    each IMAP4 command are left to the invoker, not to mention
+    the results.
+    """
+
+    class error(Exception): pass    # Logical errors - debug required
+    class abort(error): pass        # Service errors - close and retry
+    class readonly(abort): pass     # Mailbox status changed to READ-ONLY
+
+    mustquote = re.compile(r"[^\w!#$%&'*+,.:;<=>?^`|~-]")
+
+    def __init__(self, host = '', port = IMAP4_PORT):
+        self.debug = Debug
+        self.state = 'LOGOUT'
+        self.literal = None             # A literal argument to a command
+        self.tagged_commands = {}       # Tagged commands awaiting response
+        self.untagged_responses = {}    # {typ: [data, ...], ...}
+        self.continuation_response = '' # Last continuation response
+        self.is_readonly = None         # READ-ONLY desired state
+        self.tagnum = 0
+
+        # Open socket to server.
+
+        self.open(host, port)
+
+        # Create unique tag for this session,
+        # and compile tagged response matcher.
+
+        self.tagpre = Int2AP(random.randint(0, 31999))
+        self.tagre = re.compile(r'(?P<tag>'
+                        + self.tagpre
+                        + r'\d+) (?P<type>[A-Z]+) (?P<data>.*)')
+
+        # Get server welcome message,
+        # request and store CAPABILITY response.
+
+        if __debug__:
+            self._cmd_log_len = 10
+            self._cmd_log_idx = 0
+            self._cmd_log = {}           # Last `_cmd_log_len' interactions
+            if self.debug >= 1:
+                self._mesg('imaplib version %s' % __version__)
+                self._mesg('new IMAP4 connection, tag=%s' % self.tagpre)
+
+        self.welcome = self._get_response()
+        if 'PREAUTH' in self.untagged_responses:
+            self.state = 'AUTH'
+        elif 'OK' in self.untagged_responses:
+            self.state = 'NONAUTH'
+        else:
+            raise self.error(self.welcome)
+
+        cap = 'CAPABILITY'
+        self._simple_command(cap)
+        if not cap in self.untagged_responses:
+            raise self.error('no CAPABILITY response from server')
+        self.capabilities = tuple(self.untagged_responses[cap][-1].upper().split())
+
+        if __debug__:
+            if self.debug >= 3:
+                self._mesg('CAPABILITIES: %r' % (self.capabilities,))
+
+        for version in AllowedVersions:
+            if not version in self.capabilities:
+                continue
+            self.PROTOCOL_VERSION = version
+            return
+
+        raise self.error('server not IMAP4 compliant')
+
+
+    def __getattr__(self, attr):
+        #       Allow UPPERCASE variants of IMAP4 command methods.
+        if attr in Commands:
+            return getattr(self, attr.lower())
+        raise AttributeError("Unknown IMAP4 command: '%s'" % attr)
+
+
+
+    #       Overridable methods
+
+
+    def open(self, host = '', port = IMAP4_PORT):
+        """Setup connection to remote server on "host:port"
+            (default: localhost:standard IMAP4 port).
+        This connection will be used by the routines:
+            read, readline, send, shutdown.
+        """
+        self.host = host
+        self.port = port
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.connect((host, port))
+        self.file = self.sock.makefile('rb')
+
+
+    def read(self, size):
+        """Read 'size' bytes from remote."""
+        return self.file.read(size)
+
+
+    def readline(self):
+        """Read line from remote."""
+        return self.file.readline()
+
+
+    def send(self, data):
+        """Send data to remote."""
+        self.sock.sendall(data)
+
+
+    def shutdown(self):
+        """Close I/O established in "open"."""
+        self.file.close()
+        self.sock.close()
+
+
+    def socket(self):
+        """Return socket instance used to connect to IMAP4 server.
+
+        socket = <instance>.socket()
+        """
+        return self.sock
+
+
+
+    #       Utility methods
+
+
+    def recent(self):
+        """Return most recent 'RECENT' responses if any exist,
+        else prompt server for an update using the 'NOOP' command.
+
+        (typ, [data]) = <instance>.recent()
+
+        'data' is None if no new messages,
+        else list of RECENT responses, most recent last.
+        """
+        name = 'RECENT'
+        typ, dat = self._untagged_response('OK', [None], name)
+        if dat[-1]:
+            return typ, dat
+        typ, dat = self.noop()  # Prod server for response
+        return self._untagged_response(typ, dat, name)
+
+
+    def response(self, code):
+        """Return data for response 'code' if received, or None.
+
+        Old value for response 'code' is cleared.
+
+        (code, [data]) = <instance>.response(code)
+        """
+        return self._untagged_response(code, [None], code.upper())
+
+
+
+    #       IMAP4 commands
+
+
+    def append(self, mailbox, flags, date_time, message):
+        """Append message to named mailbox.
+
+        (typ, [data]) = <instance>.append(mailbox, flags, date_time, message)
+
+                All args except `message' can be None.
+        """
+        name = 'APPEND'
+        if not mailbox:
+            mailbox = 'INBOX'
+        if flags:
+            if (flags[0],flags[-1]) != ('(',')'):
+                flags = '(%s)' % flags
+        else:
+            flags = None
+        if date_time:
+            date_time = Time2Internaldate(date_time)
+        else:
+            date_time = None
+        self.literal = MapCRLF.sub(CRLF, message)
+        return self._simple_command(name, mailbox, flags, date_time)
+
+
+    def authenticate(self, mechanism, authobject):
+        """Authenticate command - requires response processing.
+
+        'mechanism' specifies which authentication mechanism is to
+        be used - it must appear in <instance>.capabilities in the
+        form AUTH=<mechanism>.
+
+        'authobject' must be a callable object:
+
+                data = authobject(response)
+
+        It will be called to process server continuation responses.
+        It should return data that will be encoded and sent to server.
+        It should return None if the client abort response '*' should
+        be sent instead.
+        """
+        mech = mechanism.upper()
+        # XXX: shouldn't this code be removed, not commented out?
+        #cap = 'AUTH=%s' % mech
+        #if not cap in self.capabilities:       # Let the server decide!
+        #    raise self.error("Server doesn't allow %s authentication." % mech)
+        self.literal = _Authenticator(authobject).process
+        typ, dat = self._simple_command('AUTHENTICATE', mech)
+        if typ != 'OK':
+            raise self.error(dat[-1])
+        self.state = 'AUTH'
+        return typ, dat
+
+
+    def check(self):
+        """Checkpoint mailbox on server.
+
+        (typ, [data]) = <instance>.check()
+        """
+        return self._simple_command('CHECK')
+
+
+    def close(self):
+        """Close currently selected mailbox.
+
+        Deleted messages are removed from writable mailbox.
+        This is the recommended command before 'LOGOUT'.
+
+        (typ, [data]) = <instance>.close()
+        """
+        try:
+            typ, dat = self._simple_command('CLOSE')
+        finally:
+            self.state = 'AUTH'
+        return typ, dat
+
+
+    def copy(self, message_set, new_mailbox):
+        """Copy 'message_set' messages onto end of 'new_mailbox'.
+
+        (typ, [data]) = <instance>.copy(message_set, new_mailbox)
+        """
+        return self._simple_command('COPY', message_set, new_mailbox)
+
+
+    def create(self, mailbox):
+        """Create new mailbox.
+
+        (typ, [data]) = <instance>.create(mailbox)
+        """
+        return self._simple_command('CREATE', mailbox)
+
+
+    def delete(self, mailbox):
+        """Delete old mailbox.
+
+        (typ, [data]) = <instance>.delete(mailbox)
+        """
+        return self._simple_command('DELETE', mailbox)
+
+    def deleteacl(self, mailbox, who):
+        """Delete the ACLs (remove any rights) set for who on mailbox.
+
+        (typ, [data]) = <instance>.deleteacl(mailbox, who)
+        """
+        return self._simple_command('DELETEACL', mailbox, who)
+
+    def expunge(self):
+        """Permanently remove deleted items from selected mailbox.
+
+        Generates 'EXPUNGE' response for each deleted message.
+
+        (typ, [data]) = <instance>.expunge()
+
+        'data' is list of 'EXPUNGE'd message numbers in order received.
+        """
+        name = 'EXPUNGE'
+        typ, dat = self._simple_command(name)
+        return self._untagged_response(typ, dat, name)
+
+
+    def fetch(self, message_set, message_parts):
+        """Fetch (parts of) messages.
+
+        (typ, [data, ...]) = <instance>.fetch(message_set, message_parts)
+
+        'message_parts' should be a string of selected parts
+        enclosed in parentheses, eg: "(UID BODY[TEXT])".
+
+        'data' are tuples of message part envelope and data.
+        """
+        name = 'FETCH'
+        typ, dat = self._simple_command(name, message_set, message_parts)
+        return self._untagged_response(typ, dat, name)
+
+
+    def getacl(self, mailbox):
+        """Get the ACLs for a mailbox.
+
+        (typ, [data]) = <instance>.getacl(mailbox)
+        """
+        typ, dat = self._simple_command('GETACL', mailbox)
+        return self._untagged_response(typ, dat, 'ACL')
+
+
+    def getquota(self, root):
+        """Get the quota root's resource usage and limits.
+
+        Part of the IMAP4 QUOTA extension defined in rfc2087.
+
+        (typ, [data]) = <instance>.getquota(root)
+        """
+        typ, dat = self._simple_command('GETQUOTA', root)
+        return self._untagged_response(typ, dat, 'QUOTA')
+
+
+    def getquotaroot(self, mailbox):
+        """Get the list of quota roots for the named mailbox.
+
+        (typ, [[QUOTAROOT responses...], [QUOTA responses]]) = <instance>.getquotaroot(mailbox)
+        """
+        typ, dat = self._simple_command('GETQUOTAROOT', mailbox)
+        typ, quota = self._untagged_response(typ, dat, 'QUOTA')
+        typ, quotaroot = self._untagged_response(typ, dat, 'QUOTAROOT')
+        return typ, [quotaroot, quota]
+
+
+    def list(self, directory='""', pattern='*'):
+        """List mailbox names in directory matching pattern.
+
+        (typ, [data]) = <instance>.list(directory='""', pattern='*')
+
+        'data' is list of LIST responses.
+        """
+        name = 'LIST'
+        typ, dat = self._simple_command(name, directory, pattern)
+        return self._untagged_response(typ, dat, name)
+
+
+    def login(self, user, password):
+        """Identify client using plaintext password.
+
+        (typ, [data]) = <instance>.login(user, password)
+
+        NB: 'password' will be quoted.
+        """
+        typ, dat = self._simple_command('LOGIN', user, self._quote(password))
+        if typ != 'OK':
+            raise self.error(dat[-1])
+        self.state = 'AUTH'
+        return typ, dat
+
+
+    def login_cram_md5(self, user, password):
+        """ Force use of CRAM-MD5 authentication.
+
+        (typ, [data]) = <instance>.login_cram_md5(user, password)
+        """
+        self.user, self.password = user, password
+        return self.authenticate('CRAM-MD5', self._CRAM_MD5_AUTH)
+
+
+    def _CRAM_MD5_AUTH(self, challenge):
+        """ Authobject to use with CRAM-MD5 authentication. """
+        import hmac
+        return self.user + " " + hmac.HMAC(self.password, challenge).hexdigest()
+
+
+    def logout(self):
+        """Shutdown connection to server.
+
+        (typ, [data]) = <instance>.logout()
+
+        Returns server 'BYE' response.
+        """
+        self.state = 'LOGOUT'
+        try: typ, dat = self._simple_command('LOGOUT')
+        except: typ, dat = 'NO', ['%s: %s' % sys.exc_info()[:2]]
+        self.shutdown()
+        if 'BYE' in self.untagged_responses:
+            return 'BYE', self.untagged_responses['BYE']
+        return typ, dat
+
+
+    def lsub(self, directory='""', pattern='*'):
+        """List 'subscribed' mailbox names in directory matching pattern.
+
+        (typ, [data, ...]) = <instance>.lsub(directory='""', pattern='*')
+
+        'data' are tuples of message part envelope and data.
+        """
+        name = 'LSUB'
+        typ, dat = self._simple_command(name, directory, pattern)
+        return self._untagged_response(typ, dat, name)
+
+    def myrights(self, mailbox):
+        """Show my ACLs for a mailbox (i.e. the rights that I have on mailbox).
+
+        (typ, [data]) = <instance>.myrights(mailbox)
+        """
+        typ,dat = self._simple_command('MYRIGHTS', mailbox)
+        return self._untagged_response(typ, dat, 'MYRIGHTS')
+
+    def namespace(self):
+        """ Returns IMAP namespaces ala rfc2342
+
+        (typ, [data, ...]) = <instance>.namespace()
+        """
+        name = 'NAMESPACE'
+        typ, dat = self._simple_command(name)
+        return self._untagged_response(typ, dat, name)
+
+
+    def noop(self):
+        """Send NOOP command.
+
+        (typ, [data]) = <instance>.noop()
+        """
+        if __debug__:
+            if self.debug >= 3:
+                self._dump_ur(self.untagged_responses)
+        return self._simple_command('NOOP')
+
+
+    def partial(self, message_num, message_part, start, length):
+        """Fetch truncated part of a message.
+
+        (typ, [data, ...]) = <instance>.partial(message_num, message_part, start, length)
+
+        'data' is tuple of message part envelope and data.
+        """
+        name = 'PARTIAL'
+        typ, dat = self._simple_command(name, message_num, message_part, start, length)
+        return self._untagged_response(typ, dat, 'FETCH')
+
+
+    def proxyauth(self, user):
+        """Assume authentication as "user".
+
+        Allows an authorised administrator to proxy into any user's
+        mailbox.
+
+        (typ, [data]) = <instance>.proxyauth(user)
+        """
+
+        name = 'PROXYAUTH'
+        return self._simple_command('PROXYAUTH', user)
+
+
+    def rename(self, oldmailbox, newmailbox):
+        """Rename old mailbox name to new.
+
+        (typ, [data]) = <instance>.rename(oldmailbox, newmailbox)
+        """
+        return self._simple_command('RENAME', oldmailbox, newmailbox)
+
+
+    def search(self, charset, *criteria):
+        """Search mailbox for matching messages.
+
+        (typ, [data]) = <instance>.search(charset, criterion, ...)
+
+        'data' is space separated list of matching message numbers.
+        """
+        name = 'SEARCH'
+        if charset:
+            typ, dat = self._simple_command(name, 'CHARSET', charset, *criteria)
+        else:
+            typ, dat = self._simple_command(name, *criteria)
+        return self._untagged_response(typ, dat, name)
+
+
+    def select(self, mailbox='INBOX', readonly=None):
+        """Select a mailbox.
+
+        Flush all untagged responses.
+
+        (typ, [data]) = <instance>.select(mailbox='INBOX', readonly=None)
+
+        'data' is count of messages in mailbox ('EXISTS' response).
+
+        Mandated responses are ('FLAGS', 'EXISTS', 'RECENT', 'UIDVALIDITY'), so
+        other responses should be obtained via <instance>.response('FLAGS') etc.
+        """
+        self.untagged_responses = {}    # Flush old responses.
+        self.is_readonly = readonly
+        if readonly is not None:
+            name = 'EXAMINE'
+        else:
+            name = 'SELECT'
+        typ, dat = self._simple_command(name, mailbox)
+        if typ != 'OK':
+            self.state = 'AUTH'     # Might have been 'SELECTED'
+            return typ, dat
+        self.state = 'SELECTED'
+        if 'READ-ONLY' in self.untagged_responses \
+                and not readonly:
+            if __debug__:
+                if self.debug >= 1:
+                    self._dump_ur(self.untagged_responses)
+            raise self.readonly('%s is not writable' % mailbox)
+        return typ, self.untagged_responses.get('EXISTS', [None])
+
+
+    def setacl(self, mailbox, who, what):
+        """Set a mailbox acl.
+
+        (typ, [data]) = <instance>.setacl(mailbox, who, what)
+        """
+        return self._simple_command('SETACL', mailbox, who, what)
+
+
+    def setquota(self, root, limits):
+        """Set the quota root's resource limits.
+
+        (typ, [data]) = <instance>.setquota(root, limits)
+        """
+        typ, dat = self._simple_command('SETQUOTA', root, limits)
+        return self._untagged_response(typ, dat, 'QUOTA')
+
+
+    def sort(self, sort_criteria, charset, *search_criteria):
+        """IMAP4rev1 extension SORT command.
+
+        (typ, [data]) = <instance>.sort(sort_criteria, charset, search_criteria, ...)
+        """
+        name = 'SORT'
+        #if not name in self.capabilities:      # Let the server decide!
+        #       raise self.error('unimplemented extension command: %s' % name)
+        if (sort_criteria[0],sort_criteria[-1]) != ('(',')'):
+            sort_criteria = '(%s)' % sort_criteria
+        typ, dat = self._simple_command(name, sort_criteria, charset, *search_criteria)
+        return self._untagged_response(typ, dat, name)
+
+
+    def status(self, mailbox, names):
+        """Request named status conditions for mailbox.
+
+        (typ, [data]) = <instance>.status(mailbox, names)
+        """
+        name = 'STATUS'
+        #if self.PROTOCOL_VERSION == 'IMAP4':   # Let the server decide!
+        #    raise self.error('%s unimplemented in IMAP4 (obtain IMAP4rev1 server, or re-code)' % name)
+        typ, dat = self._simple_command(name, mailbox, names)
+        return self._untagged_response(typ, dat, name)
+
+
+    def store(self, message_set, command, flags):
+        """Alters flag dispositions for messages in mailbox.
+
+        (typ, [data]) = <instance>.store(message_set, command, flags)
+        """
+        if (flags[0],flags[-1]) != ('(',')'):
+            flags = '(%s)' % flags  # Avoid quoting the flags
+        typ, dat = self._simple_command('STORE', message_set, command, flags)
+        return self._untagged_response(typ, dat, 'FETCH')
+
+
+    def subscribe(self, mailbox):
+        """Subscribe to new mailbox.
+
+        (typ, [data]) = <instance>.subscribe(mailbox)
+        """
+        return self._simple_command('SUBSCRIBE', mailbox)
+
+
+    def thread(self, threading_algorithm, charset, *search_criteria):
+        """IMAPrev1 extension THREAD command.
+
+        (type, [data]) = <instance>.thread(threading_alogrithm, charset, search_criteria, ...)
+        """
+        name = 'THREAD'
+        typ, dat = self._simple_command(name, threading_algorithm, charset, *search_criteria)
+        return self._untagged_response(typ, dat, name)
+
+
+    def uid(self, command, *args):
+        """Execute "command arg ..." with messages identified by UID,
+                rather than message number.
+
+        (typ, [data]) = <instance>.uid(command, arg1, arg2, ...)
+
+        Returns response appropriate to 'command'.
+        """
+        command = command.upper()
+        if not command in Commands:
+            raise self.error("Unknown IMAP4 UID command: %s" % command)
+        if self.state not in Commands[command]:
+            raise self.error('command %s illegal in state %s'
+                                    % (command, self.state))
+        name = 'UID'
+        typ, dat = self._simple_command(name, command, *args)
+        if command in ('SEARCH', 'SORT'):
+            name = command
+        else:
+            name = 'FETCH'
+        return self._untagged_response(typ, dat, name)
+
+
+    def unsubscribe(self, mailbox):
+        """Unsubscribe from old mailbox.
+
+        (typ, [data]) = <instance>.unsubscribe(mailbox)
+        """
+        return self._simple_command('UNSUBSCRIBE', mailbox)
+
+
+    def xatom(self, name, *args):
+        """Allow simple extension commands
+                notified by server in CAPABILITY response.
+
+        Assumes command is legal in current state.
+
+        (typ, [data]) = <instance>.xatom(name, arg, ...)
+
+        Returns response appropriate to extension command `name'.
+        """
+        name = name.upper()
+        #if not name in self.capabilities:      # Let the server decide!
+        #    raise self.error('unknown extension command: %s' % name)
+        if not name in Commands:
+            Commands[name] = (self.state,)
+        return self._simple_command(name, *args)
+
+
+
+    #       Private methods
+
+
+    def _append_untagged(self, typ, dat):
+
+        if dat is None: dat = ''
+        ur = self.untagged_responses
+        if __debug__:
+            if self.debug >= 5:
+                self._mesg('untagged_responses[%s] %s += ["%s"]' %
+                        (typ, len(ur.get(typ,'')), dat))
+        if typ in ur:
+            ur[typ].append(dat)
+        else:
+            ur[typ] = [dat]
+
+
+    def _check_bye(self):
+        bye = self.untagged_responses.get('BYE')
+        if bye:
+            raise self.abort(bye[-1])
+
+
+    def _command(self, name, *args):
+
+        if self.state not in Commands[name]:
+            self.literal = None
+            raise self.error(
+            'command %s illegal in state %s' % (name, self.state))
+
+        for typ in ('OK', 'NO', 'BAD'):
+            if typ in self.untagged_responses:
+                del self.untagged_responses[typ]
+
+        if 'READ-ONLY' in self.untagged_responses \
+        and not self.is_readonly:
+            raise self.readonly('mailbox status changed to READ-ONLY')
+
+        tag = self._new_tag()
+        data = '%s %s' % (tag, name)
+        for arg in args:
+            if arg is None: continue
+            data = '%s %s' % (data, self._checkquote(arg))
+
+        literal = self.literal
+        if literal is not None:
+            self.literal = None
+            if type(literal) is type(self._command):
+                literator = literal
+            else:
+                literator = None
+                data = '%s {%s}' % (data, len(literal))
+
+        if __debug__:
+            if self.debug >= 4:
+                self._mesg('> %s' % data)
+            else:
+                self._log('> %s' % data)
+
+        try:
+            self.send('%s%s' % (data, CRLF))
+        except (socket.error, OSError), val:
+            raise self.abort('socket error: %s' % val)
+
+        if literal is None:
+            return tag
+
+        while 1:
+            # Wait for continuation response
+
+            while self._get_response():
+                if self.tagged_commands[tag]:   # BAD/NO?
+                    return tag
+
+            # Send literal
+
+            if literator:
+                literal = literator(self.continuation_response)
+
+            if __debug__:
+                if self.debug >= 4:
+                    self._mesg('write literal size %s' % len(literal))
+
+            try:
+                self.send(literal)
+                self.send(CRLF)
+            except (socket.error, OSError), val:
+                raise self.abort('socket error: %s' % val)
+
+            if not literator:
+                break
+
+        return tag
+
+
+    def _command_complete(self, name, tag):
+        self._check_bye()
+        try:
+            typ, data = self._get_tagged_response(tag)
+        except self.abort, val:
+            raise self.abort('command: %s => %s' % (name, val))
+        except self.error, val:
+            raise self.error('command: %s => %s' % (name, val))
+        self._check_bye()
+        if typ == 'BAD':
+            raise self.error('%s command error: %s %s' % (name, typ, data))
+        return typ, data
+
+
+    def _get_response(self):
+
+        # Read response and store.
+        #
+        # Returns None for continuation responses,
+        # otherwise first response line received.
+
+        resp = self._get_line()
+
+        # Command completion response?
+
+        if self._match(self.tagre, resp):
+            tag = self.mo.group('tag')
+            if not tag in self.tagged_commands:
+                raise self.abort('unexpected tagged response: %s' % resp)
+
+            typ = self.mo.group('type')
+            dat = self.mo.group('data')
+            self.tagged_commands[tag] = (typ, [dat])
+        else:
+            dat2 = None
+
+            # '*' (untagged) responses?
+
+            if not self._match(Untagged_response, resp):
+                if self._match(Untagged_status, resp):
+                    dat2 = self.mo.group('data2')
+
+            if self.mo is None:
+                # Only other possibility is '+' (continuation) response...
+
+                if self._match(Continuation, resp):
+                    self.continuation_response = self.mo.group('data')
+                    return None     # NB: indicates continuation
+
+                raise self.abort("unexpected response: '%s'" % resp)
+
+            typ = self.mo.group('type')
+            dat = self.mo.group('data')
+            if dat is None: dat = ''        # Null untagged response
+            if dat2: dat = dat + ' ' + dat2
+
+            # Is there a literal to come?
+
+            while self._match(Literal, dat):
+
+                # Read literal direct from connection.
+
+                size = int(self.mo.group('size'))
+                if __debug__:
+                    if self.debug >= 4:
+                        self._mesg('read literal size %s' % size)
+                data = self.read(size)
+
+                # Store response with literal as tuple
+
+                self._append_untagged(typ, (dat, data))
+
+                # Read trailer - possibly containing another literal
+
+                dat = self._get_line()
+
+            self._append_untagged(typ, dat)
+
+        # Bracketed response information?
+
+        if typ in ('OK', 'NO', 'BAD') and self._match(Response_code, dat):
+            self._append_untagged(self.mo.group('type'), self.mo.group('data'))
+
+        if __debug__:
+            if self.debug >= 1 and typ in ('NO', 'BAD', 'BYE'):
+                self._mesg('%s response: %s' % (typ, dat))
+
+        return resp
+
+
+    def _get_tagged_response(self, tag):
+
+        while 1:
+            result = self.tagged_commands[tag]
+            if result is not None:
+                del self.tagged_commands[tag]
+                return result
+
+            # Some have reported "unexpected response" exceptions.
+            # Note that ignoring them here causes loops.
+            # Instead, send me details of the unexpected response and
+            # I'll update the code in `_get_response()'.
+
+            try:
+                self._get_response()
+            except self.abort, val:
+                if __debug__:
+                    if self.debug >= 1:
+                        self.print_log()
+                raise
+
+
+    def _get_line(self):
+
+        line = self.readline()
+        if not line:
+            raise self.abort('socket error: EOF')
+
+        # Protocol mandates all lines terminated by CRLF
+
+        line = line[:-2]
+        if __debug__:
+            if self.debug >= 4:
+                self._mesg('< %s' % line)
+            else:
+                self._log('< %s' % line)
+        return line
+
+
+    def _match(self, cre, s):
+
+        # Run compiled regular expression match method on 's'.
+        # Save result, return success.
+
+        self.mo = cre.match(s)
+        if __debug__:
+            if self.mo is not None and self.debug >= 5:
+                self._mesg("\tmatched r'%s' => %r" % (cre.pattern, self.mo.groups()))
+        return self.mo is not None
+
+
+    def _new_tag(self):
+
+        tag = '%s%s' % (self.tagpre, self.tagnum)
+        self.tagnum = self.tagnum + 1
+        self.tagged_commands[tag] = None
+        return tag
+
+
+    def _checkquote(self, arg):
+
+        # Must quote command args if non-alphanumeric chars present,
+        # and not already quoted.
+
+        if type(arg) is not type(''):
+            return arg
+        if len(arg) >= 2 and (arg[0],arg[-1]) in (('(',')'),('"','"')):
+            return arg
+        if arg and self.mustquote.search(arg) is None:
+            return arg
+        return self._quote(arg)
+
+
+    def _quote(self, arg):
+
+        arg = arg.replace('\\', '\\\\')
+        arg = arg.replace('"', '\\"')
+
+        return '"%s"' % arg
+
+
+    def _simple_command(self, name, *args):
+
+        return self._command_complete(name, self._command(name, *args))
+
+
+    def _untagged_response(self, typ, dat, name):
+
+        if typ == 'NO':
+            return typ, dat
+        if not name in self.untagged_responses:
+            return typ, [None]
+        data = self.untagged_responses.pop(name)
+        if __debug__:
+            if self.debug >= 5:
+                self._mesg('untagged_responses[%s] => %s' % (name, data))
+        return typ, data
+
+
+    if __debug__:
+
+        def _mesg(self, s, secs=None):
+            if secs is None:
+                secs = time.time()
+            tm = time.strftime('%M:%S', time.localtime(secs))
+            sys.stderr.write('  %s.%02d %s\n' % (tm, (secs*100)%100, s))
+            sys.stderr.flush()
+
+        def _dump_ur(self, dict):
+            # Dump untagged responses (in `dict').
+            l = dict.items()
+            if not l: return
+            t = '\n\t\t'
+            l = map(lambda x:'%s: "%s"' % (x[0], x[1][0] and '" "'.join(x[1]) or ''), l)
+            self._mesg('untagged responses dump:%s%s' % (t, t.join(l)))
+
+        def _log(self, line):
+            # Keep log of last `_cmd_log_len' interactions for debugging.
+            self._cmd_log[self._cmd_log_idx] = (line, time.time())
+            self._cmd_log_idx += 1
+            if self._cmd_log_idx >= self._cmd_log_len:
+                self._cmd_log_idx = 0
+
+        def print_log(self):
+            self._mesg('last %d IMAP4 interactions:' % len(self._cmd_log))
+            i, n = self._cmd_log_idx, self._cmd_log_len
+            while n:
+                try:
+                    self._mesg(*self._cmd_log[i])
+                except:
+                    pass
+                i += 1
+                if i >= self._cmd_log_len:
+                    i = 0
+                n -= 1
+
+
+
+class IMAP4_SSL(IMAP4):
+
+    """IMAP4 client class over SSL connection
+
+    Instantiate with: IMAP4_SSL([host[, port[, keyfile[, certfile]]]])
+
+            host - host's name (default: localhost);
+            port - port number (default: standard IMAP4 SSL port).
+            keyfile - PEM formatted file that contains your private key (default: None);
+            certfile - PEM formatted certificate chain file (default: None);
+
+    for more documentation see the docstring of the parent class IMAP4.
+    """
+
+
+    def __init__(self, host = '', port = IMAP4_SSL_PORT, keyfile = None, certfile = None):
+        self.keyfile = keyfile
+        self.certfile = certfile
+        IMAP4.__init__(self, host, port)
+
+
+    def open(self, host = '', port = IMAP4_SSL_PORT):
+        """Setup connection to remote server on "host:port".
+            (default: localhost:standard IMAP4 SSL port).
+        This connection will be used by the routines:
+            read, readline, send, shutdown.
+        """
+        self.host = host
+        self.port = port
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.connect((host, port))
+        self.sslobj = socket.ssl(self.sock, self.keyfile, self.certfile)
+
+
+    def read(self, size):
+        """Read 'size' bytes from remote."""
+        # sslobj.read() sometimes returns < size bytes
+        chunks = []
+        read = 0
+        while read < size:
+            data = self.sslobj.read(size-read)
+            read += len(data)
+            chunks.append(data)
+
+        return ''.join(chunks)
+
+
+    def readline(self):
+        """Read line from remote."""
+        # NB: socket.ssl needs a "readline" method, or perhaps a "makefile" method.
+        line = []
+        while 1:
+            char = self.sslobj.read(1)
+            line.append(char)
+            if char == "\n": return ''.join(line)
+
+
+    def send(self, data):
+        """Send data to remote."""
+        # NB: socket.ssl needs a "sendall" method to match socket objects.
+        bytes = len(data)
+        while bytes > 0:
+            sent = self.sslobj.write(data)
+            if sent == bytes:
+                break    # avoid copy
+            data = data[sent:]
+            bytes = bytes - sent
+
+
+    def shutdown(self):
+        """Close I/O established in "open"."""
+        self.sock.close()
+
+
+    def socket(self):
+        """Return socket instance used to connect to IMAP4 server.
+
+        socket = <instance>.socket()
+        """
+        return self.sock
+
+
+    def ssl(self):
+        """Return SSLObject instance used to communicate with the IMAP4 server.
+
+        ssl = <instance>.socket.ssl()
+        """
+        return self.sslobj
+
+
+
+class IMAP4_stream(IMAP4):
+
+    """IMAP4 client class over a stream
+
+    Instantiate with: IMAP4_stream(command)
+
+            where "command" is a string that can be passed to os.popen2()
+
+    for more documentation see the docstring of the parent class IMAP4.
+    """
+
+
+    def __init__(self, command):
+        self.command = command
+        IMAP4.__init__(self)
+
+
+    def open(self, host = None, port = None):
+        """Setup a stream connection.
+        This connection will be used by the routines:
+            read, readline, send, shutdown.
+        """
+        self.host = None        # For compatibility with parent class
+        self.port = None
+        self.sock = None
+        self.file = None
+        self.writefile, self.readfile = os.popen2(self.command)
+
+
+    def read(self, size):
+        """Read 'size' bytes from remote."""
+        return self.readfile.read(size)
+
+
+    def readline(self):
+        """Read line from remote."""
+        return self.readfile.readline()
+
+
+    def send(self, data):
+        """Send data to remote."""
+        self.writefile.write(data)
+        self.writefile.flush()
+
+
+    def shutdown(self):
+        """Close I/O established in "open"."""
+        self.readfile.close()
+        self.writefile.close()
+
+
+
+class _Authenticator:
+
+    """Private class to provide en/decoding
+            for base64-based authentication conversation.
+    """
+
+    def __init__(self, mechinst):
+        self.mech = mechinst    # Callable object to provide/process data
+
+    def process(self, data):
+        ret = self.mech(self.decode(data))
+        if ret is None:
+            return '*'      # Abort conversation
+        return self.encode(ret)
+
+    def encode(self, inp):
+        #
+        #  Invoke binascii.b2a_base64 iteratively with
+        #  short even length buffers, strip the trailing
+        #  line feed from the result and append.  "Even"
+        #  means a number that factors to both 6 and 8,
+        #  so when it gets to the end of the 8-bit input
+        #  there's no partial 6-bit output.
+        #
+        oup = ''
+        while inp:
+            if len(inp) > 48:
+                t = inp[:48]
+                inp = inp[48:]
+            else:
+                t = inp
+                inp = ''
+            e = binascii.b2a_base64(t)
+            if e:
+                oup = oup + e[:-1]
+        return oup
+
+    def decode(self, inp):
+        if not inp:
+            return ''
+        return binascii.a2b_base64(inp)
+
+
+
+Mon2num = {'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr': 4, 'May': 5, 'Jun': 6,
+        'Jul': 7, 'Aug': 8, 'Sep': 9, 'Oct': 10, 'Nov': 11, 'Dec': 12}
+
+def Internaldate2tuple(resp):
+    """Convert IMAP4 INTERNALDATE to UT.
+
+    Returns Python time module tuple.
+    """
+
+    mo = InternalDate.match(resp)
+    if not mo:
+        return None
+
+    mon = Mon2num[mo.group('mon')]
+    zonen = mo.group('zonen')
+
+    day = int(mo.group('day'))
+    year = int(mo.group('year'))
+    hour = int(mo.group('hour'))
+    min = int(mo.group('min'))
+    sec = int(mo.group('sec'))
+    zoneh = int(mo.group('zoneh'))
+    zonem = int(mo.group('zonem'))
+
+    # INTERNALDATE timezone must be subtracted to get UT
+
+    zone = (zoneh*60 + zonem)*60
+    if zonen == '-':
+        zone = -zone
+
+    tt = (year, mon, day, hour, min, sec, -1, -1, -1)
+
+    utc = time.mktime(tt)
+
+    # Following is necessary because the time module has no 'mkgmtime'.
+    # 'mktime' assumes arg in local timezone, so adds timezone/altzone.
+
+    lt = time.localtime(utc)
+    if time.daylight and lt[-1]:
+        zone = zone + time.altzone
+    else:
+        zone = zone + time.timezone
+
+    return time.localtime(utc - zone)
+
+
+
+def Int2AP(num):
+
+    """Convert integer to A-P string representation."""
+
+    val = ''; AP = 'ABCDEFGHIJKLMNOP'
+    num = int(abs(num))
+    while num:
+        num, mod = divmod(num, 16)
+        val = AP[mod] + val
+    return val
+
+
+
+def ParseFlags(resp):
+
+    """Convert IMAP4 flags response to python tuple."""
+
+    mo = Flags.match(resp)
+    if not mo:
+        return ()
+
+    return tuple(mo.group('flags').split())
+
+
+def Time2Internaldate(date_time):
+
+    """Convert 'date_time' to IMAP4 INTERNALDATE representation.
+
+    Return string in form: '"DD-Mmm-YYYY HH:MM:SS +HHMM"'
+    """
+
+    if isinstance(date_time, (int, float)):
+        tt = time.localtime(date_time)
+    elif isinstance(date_time, (tuple, time.struct_time)):
+        tt = date_time
+    elif isinstance(date_time, str) and (date_time[0],date_time[-1]) == ('"','"'):
+        return date_time        # Assume in correct format
+    else:
+        raise ValueError("date_time not of a known type")
+
+    dt = time.strftime("%d-%b-%Y %H:%M:%S", tt)
+    if dt[0] == '0':
+        dt = ' ' + dt[1:]
+    if time.daylight and tt[-1]:
+        zone = -time.altzone
+    else:
+        zone = -time.timezone
+    return '"' + dt + " %+03d%02d" % divmod(zone//60, 60) + '"'
+
+
+
+if __name__ == '__main__':
+
+    # To test: invoke either as 'python imaplib.py [IMAP4_server_hostname]'
+    # or 'python imaplib.py -s "rsh IMAP4_server_hostname exec /etc/rimapd"'
+    # to test the IMAP4_stream class
+
+    import getopt, getpass
+
+    try:
+        optlist, args = getopt.getopt(sys.argv[1:], 'd:s:')
+    except getopt.error, val:
+        optlist, args = (), ()
+
+    stream_command = None
+    for opt,val in optlist:
+        if opt == '-d':
+            Debug = int(val)
+        elif opt == '-s':
+            stream_command = val
+            if not args: args = (stream_command,)
+
+    if not args: args = ('',)
+
+    host = args[0]
+
+    USER = getpass.getuser()
+    PASSWD = getpass.getpass("IMAP password for %s on %s: " % (USER, host or "localhost"))
+
+    test_mesg = 'From: %(user)s@localhost%(lf)sSubject: IMAP4 test%(lf)s%(lf)sdata...%(lf)s' % {'user':USER, 'lf':'\n'}
+    test_seq1 = (
+    ('login', (USER, PASSWD)),
+    ('create', ('/tmp/xxx 1',)),
+    ('rename', ('/tmp/xxx 1', '/tmp/yyy')),
+    ('CREATE', ('/tmp/yyz 2',)),
+    ('append', ('/tmp/yyz 2', None, None, test_mesg)),
+    ('list', ('/tmp', 'yy*')),
+    ('select', ('/tmp/yyz 2',)),
+    ('search', (None, 'SUBJECT', 'test')),
+    ('fetch', ('1', '(FLAGS INTERNALDATE RFC822)')),
+    ('store', ('1', 'FLAGS', '(\Deleted)')),
+    ('namespace', ()),
+    ('expunge', ()),
+    ('recent', ()),
+    ('close', ()),
+    )
+
+    test_seq2 = (
+    ('select', ()),
+    ('response',('UIDVALIDITY',)),
+    ('uid', ('SEARCH', 'ALL')),
+    ('response', ('EXISTS',)),
+    ('append', (None, None, None, test_mesg)),
+    ('recent', ()),
+    ('logout', ()),
+    )
+
+    def run(cmd, args):
+        M._mesg('%s %s' % (cmd, args))
+        typ, dat = getattr(M, cmd)(*args)
+        M._mesg('%s => %s %s' % (cmd, typ, dat))
+        if typ == 'NO': raise dat[0]
+        return dat
+
+    try:
+        if stream_command:
+            M = IMAP4_stream(stream_command)
+        else:
+            M = IMAP4(host)
+        if M.state == 'AUTH':
+            test_seq1 = test_seq1[1:]   # Login not needed
+        M._mesg('PROTOCOL_VERSION = %s' % M.PROTOCOL_VERSION)
+        M._mesg('CAPABILITIES = %r' % (M.capabilities,))
+
+        for cmd,args in test_seq1:
+            run(cmd, args)
+
+        for ml in run('list', ('/tmp/', 'yy%')):
+            mo = re.match(r'.*"([^"]+)"$', ml)
+            if mo: path = mo.group(1)
+            else: path = ml.split()[-1]
+            run('delete', (path,))
+
+        for cmd,args in test_seq2:
+            dat = run(cmd, args)
+
+            if (cmd,args) != ('uid', ('SEARCH', 'ALL')):
+                continue
+
+            uid = dat[-1].split()
+            if not uid: continue
+            run('uid', ('FETCH', '%s' % uid[-1],
+                    '(FLAGS INTERNALDATE RFC822.SIZE RFC822.HEADER RFC822.TEXT)'))
+
+        print '\nAll tests OK.'
+
+    except:
+        print '\nTests failed.'
+
+        if not Debug:
+            print '''
+If you would like to see debugging output,
+try: %s -d5
+''' % sys.argv[0]
+
+        raise
diff --git a/depot_tools/release/win/python_24/Lib/imghdr.py b/depot_tools/release/win/python_24/Lib/imghdr.py
new file mode 100644
index 0000000..dc5fb22
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/imghdr.py
@@ -0,0 +1,154 @@
+"""Recognize image file formats based on their first few bytes."""
+
+__all__ = ["what"]
+
+#-------------------------#
+# Recognize image headers #
+#-------------------------#
+
+def what(file, h=None):
+    if h is None:
+        if type(file) == type(''):
+            f = open(file, 'rb')
+            h = f.read(32)
+        else:
+            location = file.tell()
+            h = file.read(32)
+            file.seek(location)
+            f = None
+    else:
+        f = None
+    try:
+        for tf in tests:
+            res = tf(h, f)
+            if res:
+                return res
+    finally:
+        if f: f.close()
+    return None
+
+
+#---------------------------------#
+# Subroutines per image file type #
+#---------------------------------#
+
+tests = []
+
+def test_rgb(h, f):
+    """SGI image library"""
+    if h[:2] == '\001\332':
+        return 'rgb'
+
+tests.append(test_rgb)
+
+def test_gif(h, f):
+    """GIF ('87 and '89 variants)"""
+    if h[:6] in ('GIF87a', 'GIF89a'):
+        return 'gif'
+
+tests.append(test_gif)
+
+def test_pbm(h, f):
+    """PBM (portable bitmap)"""
+    if len(h) >= 3 and \
+        h[0] == 'P' and h[1] in '14' and h[2] in ' \t\n\r':
+        return 'pbm'
+
+tests.append(test_pbm)
+
+def test_pgm(h, f):
+    """PGM (portable graymap)"""
+    if len(h) >= 3 and \
+        h[0] == 'P' and h[1] in '25' and h[2] in ' \t\n\r':
+        return 'pgm'
+
+tests.append(test_pgm)
+
+def test_ppm(h, f):
+    """PPM (portable pixmap)"""
+    if len(h) >= 3 and \
+        h[0] == 'P' and h[1] in '36' and h[2] in ' \t\n\r':
+        return 'ppm'
+
+tests.append(test_ppm)
+
+def test_tiff(h, f):
+    """TIFF (can be in Motorola or Intel byte order)"""
+    if h[:2] in ('MM', 'II'):
+        return 'tiff'
+
+tests.append(test_tiff)
+
+def test_rast(h, f):
+    """Sun raster file"""
+    if h[:4] == '\x59\xA6\x6A\x95':
+        return 'rast'
+
+tests.append(test_rast)
+
+def test_xbm(h, f):
+    """X bitmap (X10 or X11)"""
+    s = '#define '
+    if h[:len(s)] == s:
+        return 'xbm'
+
+tests.append(test_xbm)
+
+def test_jpeg(h, f):
+    """JPEG data in JFIF format"""
+    if h[6:10] == 'JFIF':
+        return 'jpeg'
+
+tests.append(test_jpeg)
+
+def test_bmp(h, f):
+    if h[:2] == 'BM':
+        return 'bmp'
+
+tests.append(test_bmp)
+
+def test_png(h, f):
+    if h[:8] == "\211PNG\r\n\032\n":
+        return 'png'
+
+tests.append(test_png)
+
+#--------------------#
+# Small test program #
+#--------------------#
+
+def test():
+    import sys
+    recursive = 0
+    if sys.argv[1:] and sys.argv[1] == '-r':
+        del sys.argv[1:2]
+        recursive = 1
+    try:
+        if sys.argv[1:]:
+            testall(sys.argv[1:], recursive, 1)
+        else:
+            testall(['.'], recursive, 1)
+    except KeyboardInterrupt:
+        sys.stderr.write('\n[Interrupted]\n')
+        sys.exit(1)
+
+def testall(list, recursive, toplevel):
+    import sys
+    import os
+    for filename in list:
+        if os.path.isdir(filename):
+            print filename + '/:',
+            if recursive or toplevel:
+                print 'recursing down:'
+                import glob
+                names = glob.glob(os.path.join(filename, '*'))
+                testall(names, recursive, 0)
+            else:
+                print '*** directory (use -r) ***'
+        else:
+            print filename + ':',
+            sys.stdout.flush()
+            try:
+                print what(filename)
+            except IOError:
+                print '*** not found ***'
diff --git a/depot_tools/release/win/python_24/Lib/imputil.py b/depot_tools/release/win/python_24/Lib/imputil.py
new file mode 100644
index 0000000..e6ad7eca
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/imputil.py
@@ -0,0 +1,724 @@
+"""
+Import utilities
+
+Exported classes:
+    ImportManager   Manage the import process
+
+    Importer        Base class for replacing standard import functions
+    BuiltinImporter Emulate the import mechanism for builtin and frozen modules
+
+    DynLoadSuffixImporter
+"""
+
+# note: avoid importing non-builtin modules
+import imp                      ### not available in JPython?
+import sys
+import __builtin__
+
+# for the DirectoryImporter
+import struct
+import marshal
+
+__all__ = ["ImportManager","Importer","BuiltinImporter"]
+
+_StringType = type('')
+_ModuleType = type(sys)         ### doesn't work in JPython...
+
+class ImportManager:
+    "Manage the import process."
+
+    def install(self, namespace=vars(__builtin__)):
+        "Install this ImportManager into the specified namespace."
+
+        if isinstance(namespace, _ModuleType):
+            namespace = vars(namespace)
+
+        # Note: we have no notion of "chaining"
+
+        # Record the previous import hook, then install our own.
+        self.previous_importer = namespace['__import__']
+        self.namespace = namespace
+        namespace['__import__'] = self._import_hook
+
+        ### fix this
+        #namespace['reload'] = self._reload_hook
+
+    def uninstall(self):
+        "Restore the previous import mechanism."
+        self.namespace['__import__'] = self.previous_importer
+
+    def add_suffix(self, suffix, importFunc):
+        assert callable(importFunc)
+        self.fs_imp.add_suffix(suffix, importFunc)
+
+    ######################################################################
+    #
+    # PRIVATE METHODS
+    #
+
+    clsFilesystemImporter = None
+
+    def __init__(self, fs_imp=None):
+        # we're definitely going to be importing something in the future,
+        # so let's just load the OS-related facilities.
+        if not _os_stat:
+            _os_bootstrap()
+
+        # This is the Importer that we use for grabbing stuff from the
+        # filesystem. It defines one more method (import_from_dir) for our use.
+        if fs_imp is None:
+            cls = self.clsFilesystemImporter or _FilesystemImporter
+            fs_imp = cls()
+        self.fs_imp = fs_imp
+
+        # Initialize the set of suffixes that we recognize and import.
+        # The default will import dynamic-load modules first, followed by
+        # .py files (or a .py file's cached bytecode)
+        for desc in imp.get_suffixes():
+            if desc[2] == imp.C_EXTENSION:
+                self.add_suffix(desc[0],
+                                DynLoadSuffixImporter(desc).import_file)
+        self.add_suffix('.py', py_suffix_importer)
+
+    def _import_hook(self, fqname, globals=None, locals=None, fromlist=None):
+        """Python calls this hook to locate and import a module."""
+
+        parts = fqname.split('.')
+
+        # determine the context of this import
+        parent = self._determine_import_context(globals)
+
+        # if there is a parent, then its importer should manage this import
+        if parent:
+            module = parent.__importer__._do_import(parent, parts, fromlist)
+            if module:
+                return module
+
+        # has the top module already been imported?
+        try:
+            top_module = sys.modules[parts[0]]
+        except KeyError:
+
+            # look for the topmost module
+            top_module = self._import_top_module(parts[0])
+            if not top_module:
+                # the topmost module wasn't found at all.
+                raise ImportError, 'No module named ' + fqname
+
+        # fast-path simple imports
+        if len(parts) == 1:
+            if not fromlist:
+                return top_module
+
+            if not top_module.__dict__.get('__ispkg__'):
+                # __ispkg__ isn't defined (the module was not imported by us),
+                # or it is zero.
+                #
+                # In the former case, there is no way that we could import
+                # sub-modules that occur in the fromlist (but we can't raise an
+                # error because it may just be names) because we don't know how
+                # to deal with packages that were imported by other systems.
+                #
+                # In the latter case (__ispkg__ == 0), there can't be any sub-
+                # modules present, so we can just return.
+                #
+                # In both cases, since len(parts) == 1, the top_module is also
+                # the "bottom" which is the defined return when a fromlist
+                # exists.
+                return top_module
+
+        importer = top_module.__dict__.get('__importer__')
+        if importer:
+            return importer._finish_import(top_module, parts[1:], fromlist)
+
+        # Grrr, some people "import os.path"
+        if len(parts) == 2 and hasattr(top_module, parts[1]):
+            return top_module
+
+        # If the importer does not exist, then we have to bail. A missing
+        # importer means that something else imported the module, and we have
+        # no knowledge of how to get sub-modules out of the thing.
+        raise ImportError, 'No module named ' + fqname
+
+    def _determine_import_context(self, globals):
+        """Returns the context in which a module should be imported.
+
+        The context could be a loaded (package) module and the imported module
+        will be looked for within that package. The context could also be None,
+        meaning there is no context -- the module should be looked for as a
+        "top-level" module.
+        """
+
+        if not globals or not globals.get('__importer__'):
+            # globals does not refer to one of our modules or packages. That
+            # implies there is no relative import context (as far as we are
+            # concerned), and it should just pick it off the standard path.
+            return None
+
+        # The globals refer to a module or package of ours. It will define
+        # the context of the new import. Get the module/package fqname.
+        parent_fqname = globals['__name__']
+
+        # if a package is performing the import, then return itself (imports
+        # refer to pkg contents)
+        if globals['__ispkg__']:
+            parent = sys.modules[parent_fqname]
+            assert globals is parent.__dict__
+            return parent
+
+        i = parent_fqname.rfind('.')
+
+        # a module outside of a package has no particular import context
+        if i == -1:
+            return None
+
+        # if a module in a package is performing the import, then return the
+        # package (imports refer to siblings)
+        parent_fqname = parent_fqname[:i]
+        parent = sys.modules[parent_fqname]
+        assert parent.__name__ == parent_fqname
+        return parent
+
+    def _import_top_module(self, name):
+        # scan sys.path looking for a location in the filesystem that contains
+        # the module, or an Importer object that can import the module.
+        for item in sys.path:
+            if isinstance(item, _StringType):
+                module = self.fs_imp.import_from_dir(item, name)
+            else:
+                module = item.import_top(name)
+            if module:
+                return module
+        return None
+
+    def _reload_hook(self, module):
+        "Python calls this hook to reload a module."
+
+        # reloading of a module may or may not be possible (depending on the
+        # importer), but at least we can validate that it's ours to reload
+        importer = module.__dict__.get('__importer__')
+        if not importer:
+            ### oops. now what...
+            pass
+
+        # okay. it is using the imputil system, and we must delegate it, but
+        # we don't know what to do (yet)
+        ### we should blast the module dict and do another get_code(). need to
+        ### flesh this out and add proper docco...
+        raise SystemError, "reload not yet implemented"
+
+
+class Importer:
+    "Base class for replacing standard import functions."
+
+    def import_top(self, name):
+        "Import a top-level module."
+        return self._import_one(None, name, name)
+
+    ######################################################################
+    #
+    # PRIVATE METHODS
+    #
+    def _finish_import(self, top, parts, fromlist):
+        # if "a.b.c" was provided, then load the ".b.c" portion down from
+        # below the top-level module.
+        bottom = self._load_tail(top, parts)
+
+        # if the form is "import a.b.c", then return "a"
+        if not fromlist:
+            # no fromlist: return the top of the import tree
+            return top
+
+        # the top module was imported by self.
+        #
+        # this means that the bottom module was also imported by self (just
+        # now, or in the past and we fetched it from sys.modules).
+        #
+        # since we imported/handled the bottom module, this means that we can
+        # also handle its fromlist (and reliably use __ispkg__).
+
+        # if the bottom node is a package, then (potentially) import some
+        # modules.
+        #
+        # note: if it is not a package, then "fromlist" refers to names in
+        #       the bottom module rather than modules.
+        # note: for a mix of names and modules in the fromlist, we will
+        #       import all modules and insert those into the namespace of
+        #       the package module. Python will pick up all fromlist names
+        #       from the bottom (package) module; some will be modules that
+        #       we imported and stored in the namespace, others are expected
+        #       to be present already.
+        if bottom.__ispkg__:
+            self._import_fromlist(bottom, fromlist)
+
+        # if the form is "from a.b import c, d" then return "b"
+        return bottom
+
+    def _import_one(self, parent, modname, fqname):
+        "Import a single module."
+
+        # has the module already been imported?
+        try:
+            return sys.modules[fqname]
+        except KeyError:
+            pass
+
+        # load the module's code, or fetch the module itself
+        result = self.get_code(parent, modname, fqname)
+        if result is None:
+            return None
+
+        module = self._process_result(result, fqname)
+
+        # insert the module into its parent
+        if parent:
+            setattr(parent, modname, module)
+        return module
+
+    def _process_result(self, (ispkg, code, values), fqname):
+        # did get_code() return an actual module? (rather than a code object)
+        is_module = isinstance(code, _ModuleType)
+
+        # use the returned module, or create a new one to exec code into
+        if is_module:
+            module = code
+        else:
+            module = imp.new_module(fqname)
+
+        ### record packages a bit differently??
+        module.__importer__ = self
+        module.__ispkg__ = ispkg
+
+        # insert additional values into the module (before executing the code)
+        module.__dict__.update(values)
+
+        # the module is almost ready... make it visible
+        sys.modules[fqname] = module
+
+        # execute the code within the module's namespace
+        if not is_module:
+            try:
+                exec code in module.__dict__
+            except:
+                if fqname in sys.modules:
+                    del sys.modules[fqname]
+                raise
+
+        # fetch from sys.modules instead of returning module directly.
+        # also make module's __name__ agree with fqname, in case
+        # the "exec code in module.__dict__" played games on us.
+        module = sys.modules[fqname]
+        module.__name__ = fqname
+        return module
+
+    def _load_tail(self, m, parts):
+        """Import the rest of the modules, down from the top-level module.
+
+        Returns the last module in the dotted list of modules.
+        """
+        for part in parts:
+            fqname = "%s.%s" % (m.__name__, part)
+            m = self._import_one(m, part, fqname)
+            if not m:
+                raise ImportError, "No module named " + fqname
+        return m
+
+    def _import_fromlist(self, package, fromlist):
+        'Import any sub-modules in the "from" list.'
+
+        # if '*' is present in the fromlist, then look for the '__all__'
+        # variable to find additional items (modules) to import.
+        if '*' in fromlist:
+            fromlist = list(fromlist) + \
+                       list(package.__dict__.get('__all__', []))
+
+        for sub in fromlist:
+            # if the name is already present, then don't try to import it (it
+            # might not be a module!).
+            if sub != '*' and not hasattr(package, sub):
+                subname = "%s.%s" % (package.__name__, sub)
+                submod = self._import_one(package, sub, subname)
+                if not submod:
+                    raise ImportError, "cannot import name " + subname
+
+    def _do_import(self, parent, parts, fromlist):
+        """Attempt to import the module relative to parent.
+
+        This method is used when the import context specifies that <self>
+        imported the parent module.
+        """
+        top_name = parts[0]
+        top_fqname = parent.__name__ + '.' + top_name
+        top_module = self._import_one(parent, top_name, top_fqname)
+        if not top_module:
+            # this importer and parent could not find the module (relatively)
+            return None
+
+        return self._finish_import(top_module, parts[1:], fromlist)
+
+    ######################################################################
+    #
+    # METHODS TO OVERRIDE
+    #
+    def get_code(self, parent, modname, fqname):
+        """Find and retrieve the code for the given module.
+
+        parent specifies a parent module to define a context for importing. It
+        may be None, indicating no particular context for the search.
+
+        modname specifies a single module (not dotted) within the parent.
+
+        fqname specifies the fully-qualified module name. This is a
+        (potentially) dotted name from the "root" of the module namespace
+        down to the modname.
+        If there is no parent, then modname==fqname.
+
+        This method should return None, or a 3-tuple.
+
+        * If the module was not found, then None should be returned.
+
+        * The first item of the 2- or 3-tuple should be the integer 0 or 1,
+            specifying whether the module that was found is a package or not.
+
+        * The second item is the code object for the module (it will be
+            executed within the new module's namespace). This item can also
+            be a fully-loaded module object (e.g. loaded from a shared lib).
+
+        * The third item is a dictionary of name/value pairs that will be
+            inserted into new module before the code object is executed. This
+            is provided in case the module's code expects certain values (such
+            as where the module was found). When the second item is a module
+            object, then these names/values will be inserted *after* the module
+            has been loaded/initialized.
+        """
+        raise RuntimeError, "get_code not implemented"
+
+
+######################################################################
+#
+# Some handy stuff for the Importers
+#
+
+# byte-compiled file suffix character
+_suffix_char = __debug__ and 'c' or 'o'
+
+# byte-compiled file suffix
+_suffix = '.py' + _suffix_char
+
+def _compile(pathname, timestamp):
+    """Compile (and cache) a Python source file.
+
+    The file specified by <pathname> is compiled to a code object and
+    returned.
+
+    Presuming the appropriate privileges exist, the bytecodes will be
+    saved back to the filesystem for future imports. The source file's
+    modification timestamp must be provided as a Long value.
+    """
+    codestring = open(pathname, 'rU').read()
+    if codestring and codestring[-1] != '\n':
+        codestring = codestring + '\n'
+    code = __builtin__.compile(codestring, pathname, 'exec')
+
+    # try to cache the compiled code
+    try:
+        f = open(pathname + _suffix_char, 'wb')
+    except IOError:
+        pass
+    else:
+        f.write('\0\0\0\0')
+        f.write(struct.pack('<I', timestamp))
+        marshal.dump(code, f)
+        f.flush()
+        f.seek(0, 0)
+        f.write(imp.get_magic())
+        f.close()
+
+    return code
+
+_os_stat = _os_path_join = None
+def _os_bootstrap():
+    "Set up 'os' module replacement functions for use during import bootstrap."
+
+    names = sys.builtin_module_names
+
+    join = None
+    if 'posix' in names:
+        sep = '/'
+        from posix import stat
+    elif 'nt' in names:
+        sep = '\\'
+        from nt import stat
+    elif 'dos' in names:
+        sep = '\\'
+        from dos import stat
+    elif 'os2' in names:
+        sep = '\\'
+        from os2 import stat
+    elif 'mac' in names:
+        from mac import stat
+        def join(a, b):
+            if a == '':
+                return b
+            if ':' not in a:
+                a = ':' + a
+            if a[-1:] != ':':
+                a = a + ':'
+            return a + b
+    else:
+        raise ImportError, 'no os specific module found'
+
+    if join is None:
+        def join(a, b, sep=sep):
+            if a == '':
+                return b
+            lastchar = a[-1:]
+            if lastchar == '/' or lastchar == sep:
+                return a + b
+            return a + sep + b
+
+    global _os_stat
+    _os_stat = stat
+
+    global _os_path_join
+    _os_path_join = join
+
+def _os_path_isdir(pathname):
+    "Local replacement for os.path.isdir()."
+    try:
+        s = _os_stat(pathname)
+    except OSError:
+        return None
+    return (s.st_mode & 0170000) == 0040000
+
+def _timestamp(pathname):
+    "Return the file modification time as a Long."
+    try:
+        s = _os_stat(pathname)
+    except OSError:
+        return None
+    return long(s.st_mtime)
+
+
+######################################################################
+#
+# Emulate the import mechanism for builtin and frozen modules
+#
+class BuiltinImporter(Importer):
+    def get_code(self, parent, modname, fqname):
+        if parent:
+            # these modules definitely do not occur within a package context
+            return None
+
+        # look for the module
+        if imp.is_builtin(modname):
+            type = imp.C_BUILTIN
+        elif imp.is_frozen(modname):
+            type = imp.PY_FROZEN
+        else:
+            # not found
+            return None
+
+        # got it. now load and return it.
+        module = imp.load_module(modname, None, modname, ('', '', type))
+        return 0, module, { }
+
+
+######################################################################
+#
+# Internal importer used for importing from the filesystem
+#
+class _FilesystemImporter(Importer):
+    def __init__(self):
+        self.suffixes = [ ]
+
+    def add_suffix(self, suffix, importFunc):
+        assert callable(importFunc)
+        self.suffixes.append((suffix, importFunc))
+
+    def import_from_dir(self, dir, fqname):
+        result = self._import_pathname(_os_path_join(dir, fqname), fqname)
+        if result:
+            return self._process_result(result, fqname)
+        return None
+
+    def get_code(self, parent, modname, fqname):
+        # This importer is never used with an empty parent. Its existence is
+        # private to the ImportManager. The ImportManager uses the
+        # import_from_dir() method to import top-level modules/packages.
+        # This method is only used when we look for a module within a package.
+        assert parent
+
+        return self._import_pathname(_os_path_join(parent.__pkgdir__, modname),
+                                     fqname)
+
+    def _import_pathname(self, pathname, fqname):
+        if _os_path_isdir(pathname):
+            result = self._import_pathname(_os_path_join(pathname, '__init__'),
+                                           fqname)
+            if result:
+                values = result[2]
+                values['__pkgdir__'] = pathname
+                values['__path__'] = [ pathname ]
+                return 1, result[1], values
+            return None
+
+        for suffix, importFunc in self.suffixes:
+            filename = pathname + suffix
+            try:
+                finfo = _os_stat(filename)
+            except OSError:
+                pass
+            else:
+                return importFunc(filename, finfo, fqname)
+        return None
+
+######################################################################
+#
+# SUFFIX-BASED IMPORTERS
+#
+
+def py_suffix_importer(filename, finfo, fqname):
+    file = filename[:-3] + _suffix
+    t_py = long(finfo[8])
+    t_pyc = _timestamp(file)
+
+    code = None
+    if t_pyc is not None and t_pyc >= t_py:
+        f = open(file, 'rb')
+        if f.read(4) == imp.get_magic():
+            t = struct.unpack('<I', f.read(4))[0]
+            if t == t_py:
+                code = marshal.load(f)
+        f.close()
+    if code is None:
+        file = filename
+        code = _compile(file, t_py)
+
+    return 0, code, { '__file__' : file }
+
+class DynLoadSuffixImporter:
+    def __init__(self, desc):
+        self.desc = desc
+
+    def import_file(self, filename, finfo, fqname):
+        fp = open(filename, self.desc[1])
+        module = imp.load_module(fqname, fp, filename, self.desc)
+        module.__file__ = filename
+        return 0, module, { }
+
+
+######################################################################
+
+def _print_importers():
+    items = sys.modules.items()
+    items.sort()
+    for name, module in items:
+        if module:
+            print name, module.__dict__.get('__importer__', '-- no importer')
+        else:
+            print name, '-- non-existent module'
+
+def _test_revamp():
+    ImportManager().install()
+    sys.path.insert(0, BuiltinImporter())
+
+######################################################################
+
+#
+# TODO
+#
+# from Finn Bock:
+#   type(sys) is not a module in JPython. what to use instead?
+#   imp.C_EXTENSION is not in JPython. same for get_suffixes and new_module
+#
+#   given foo.py of:
+#      import sys
+#      sys.modules['foo'] = sys
+#
+#   ---- standard import mechanism
+#   >>> import foo
+#   >>> foo
+#   <module 'sys' (built-in)>
+#
+#   ---- revamped import mechanism
+#   >>> import imputil
+#   >>> imputil._test_revamp()
+#   >>> import foo
+#   >>> foo
+#   <module 'foo' from 'foo.py'>
+#
+#
+# from MAL:
+#   should BuiltinImporter exist in sys.path or hard-wired in ImportManager?
+#   need __path__ processing
+#   performance
+#   move chaining to a subclass [gjs: it's been nuked]
+#   deinstall should be possible
+#   query mechanism needed: is a specific Importer installed?
+#   py/pyc/pyo piping hooks to filter/process these files
+#   wish list:
+#     distutils importer hooked to list of standard Internet repositories
+#     module->file location mapper to speed FS-based imports
+#     relative imports
+#     keep chaining so that it can play nice with other import hooks
+#
+# from Gordon:
+#   push MAL's mapper into sys.path[0] as a cache (hard-coded for apps)
+#
+# from Guido:
+#   need to change sys.* references for rexec environs
+#   need hook for MAL's walk-me-up import strategy, or Tim's absolute strategy
+#   watch out for sys.modules[...] is None
+#   flag to force absolute imports? (speeds _determine_import_context and
+#       checking for a relative module)
+#   insert names of archives into sys.path  (see quote below)
+#   note: reload does NOT blast module dict
+#   shift import mechanisms and policies around; provide for hooks, overrides
+#       (see quote below)
+#   add get_source stuff
+#   get_topcode and get_subcode
+#   CRLF handling in _compile
+#   race condition in _compile
+#   refactoring of os.py to deal with _os_bootstrap problem
+#   any special handling to do for importing a module with a SyntaxError?
+#       (e.g. clean up the traceback)
+#   implement "domain" for path-type functionality using pkg namespace
+#       (rather than FS-names like __path__)
+#   don't use the word "private"... maybe "internal"
+#
+#
+# Guido's comments on sys.path caching:
+#
+# We could cache this in a dictionary: the ImportManager can have a
+# cache dict mapping pathnames to importer objects, and a separate
+# method for coming up with an importer given a pathname that's not yet
+# in the cache.  The method should do a stat and/or look at the
+# extension to decide which importer class to use; you can register new
+# importer classes by registering a suffix or a Boolean function, plus a
+# class.  If you register a new importer class, the cache is zapped.
+# The cache is independent from sys.path (but maintained per
+# ImportManager instance) so that rearrangements of sys.path do the
+# right thing.  If a path is dropped from sys.path the corresponding
+# cache entry is simply no longer used.
+#
+# My/Guido's comments on factoring ImportManager and Importer:
+#
+# > However, we still have a tension occurring here:
+# >
+# > 1) implementing policy in ImportManager assists in single-point policy
+# >    changes for app/rexec situations
+# > 2) implementing policy in Importer assists in package-private policy
+# >    changes for normal, operating conditions
+# >
+# > I'll see if I can sort out a way to do this. Maybe the Importer class will
+# > implement the methods (which can be overridden to change policy) by
+# > delegating to ImportManager.
+#
+# Maybe also think about what kind of policies an Importer would be
+# likely to want to change.  I have a feeling that a lot of the code
+# there is actually not so much policy but a *necessity* to get things
+# working given the calling conventions for the __import__ hook: whether
+# to return the head or tail of a dotted name, or when to do the "finish
+# fromlist" stuff.
+#
diff --git a/depot_tools/release/win/python_24/Lib/inspect.py b/depot_tools/release/win/python_24/Lib/inspect.py
new file mode 100644
index 0000000..11c96e6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/inspect.py
@@ -0,0 +1,823 @@
+# -*- coding: iso-8859-1 -*-
+"""Get useful information from live Python objects.
+
+This module encapsulates the interface provided by the internal special
+attributes (func_*, co_*, im_*, tb_*, etc.) in a friendlier fashion.
+It also provides some help for examining source code and class layout.
+
+Here are some of the useful functions provided by this module:
+
+    ismodule(), isclass(), ismethod(), isfunction(), istraceback(),
+        isframe(), iscode(), isbuiltin(), isroutine() - check object types
+    getmembers() - get members of an object that satisfy a given condition
+
+    getfile(), getsourcefile(), getsource() - find an object's source code
+    getdoc(), getcomments() - get documentation on an object
+    getmodule() - determine the module that an object came from
+    getclasstree() - arrange classes so as to represent their hierarchy
+
+    getargspec(), getargvalues() - get info about function arguments
+    formatargspec(), formatargvalues() - format an argument spec
+    getouterframes(), getinnerframes() - get info about frames
+    currentframe() - get the current stack frame
+    stack(), trace() - get info about frames on the stack or in a traceback
+"""
+
+# This module is in the public domain.  No warranties.
+
+__author__ = 'Ka-Ping Yee <ping@lfw.org>'
+__date__ = '1 Jan 2001'
+
+import sys, os, types, string, re, dis, imp, tokenize, linecache
+
+# ----------------------------------------------------------- type-checking
+def ismodule(object):
+    """Return true if the object is a module.
+
+    Module objects provide these attributes:
+        __doc__         documentation string
+        __file__        filename (missing for built-in modules)"""
+    return isinstance(object, types.ModuleType)
+
+def isclass(object):
+    """Return true if the object is a class.
+
+    Class objects provide these attributes:
+        __doc__         documentation string
+        __module__      name of module in which this class was defined"""
+    return isinstance(object, types.ClassType) or hasattr(object, '__bases__')
+
+def ismethod(object):
+    """Return true if the object is an instance method.
+
+    Instance method objects provide these attributes:
+        __doc__         documentation string
+        __name__        name with which this method was defined
+        im_class        class object in which this method belongs
+        im_func         function object containing implementation of method
+        im_self         instance to which this method is bound, or None"""
+    return isinstance(object, types.MethodType)
+
+def ismethoddescriptor(object):
+    """Return true if the object is a method descriptor.
+
+    But not if ismethod() or isclass() or isfunction() are true.
+
+    This is new in Python 2.2, and, for example, is true of int.__add__.
+    An object passing this test has a __get__ attribute but not a __set__
+    attribute, but beyond that the set of attributes varies.  __name__ is
+    usually sensible, and __doc__ often is.
+
+    Methods implemented via descriptors that also pass one of the other
+    tests return false from the ismethoddescriptor() test, simply because
+    the other tests promise more -- you can, e.g., count on having the
+    im_func attribute (etc) when an object passes ismethod()."""
+    return (hasattr(object, "__get__")
+            and not hasattr(object, "__set__") # else it's a data descriptor
+            and not ismethod(object)           # mutual exclusion
+            and not isfunction(object)
+            and not isclass(object))
+
+def isdatadescriptor(object):
+    """Return true if the object is a data descriptor.
+
+    Data descriptors have both a __get__ and a __set__ attribute.  Examples are
+    properties (defined in Python) and getsets and members (defined in C).
+    Typically, data descriptors will also have __name__ and __doc__ attributes
+    (properties, getsets, and members have both of these attributes), but this
+    is not guaranteed."""
+    return (hasattr(object, "__set__") and hasattr(object, "__get__"))
+
+def isfunction(object):
+    """Return true if the object is a user-defined function.
+
+    Function objects provide these attributes:
+        __doc__         documentation string
+        __name__        name with which this function was defined
+        func_code       code object containing compiled function bytecode
+        func_defaults   tuple of any default values for arguments
+        func_doc        (same as __doc__)
+        func_globals    global namespace in which this function was defined
+        func_name       (same as __name__)"""
+    return isinstance(object, types.FunctionType)
+
+def istraceback(object):
+    """Return true if the object is a traceback.
+
+    Traceback objects provide these attributes:
+        tb_frame        frame object at this level
+        tb_lasti        index of last attempted instruction in bytecode
+        tb_lineno       current line number in Python source code
+        tb_next         next inner traceback object (called by this level)"""
+    return isinstance(object, types.TracebackType)
+
+def isframe(object):
+    """Return true if the object is a frame object.
+
+    Frame objects provide these attributes:
+        f_back          next outer frame object (this frame's caller)
+        f_builtins      built-in namespace seen by this frame
+        f_code          code object being executed in this frame
+        f_exc_traceback traceback if raised in this frame, or None
+        f_exc_type      exception type if raised in this frame, or None
+        f_exc_value     exception value if raised in this frame, or None
+        f_globals       global namespace seen by this frame
+        f_lasti         index of last attempted instruction in bytecode
+        f_lineno        current line number in Python source code
+        f_locals        local namespace seen by this frame
+        f_restricted    0 or 1 if frame is in restricted execution mode
+        f_trace         tracing function for this frame, or None"""
+    return isinstance(object, types.FrameType)
+
+def iscode(object):
+    """Return true if the object is a code object.
+
+    Code objects provide these attributes:
+        co_argcount     number of arguments (not including * or ** args)
+        co_code         string of raw compiled bytecode
+        co_consts       tuple of constants used in the bytecode
+        co_filename     name of file in which this code object was created
+        co_firstlineno  number of first line in Python source code
+        co_flags        bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg
+        co_lnotab       encoded mapping of line numbers to bytecode indices
+        co_name         name with which this code object was defined
+        co_names        tuple of names of local variables
+        co_nlocals      number of local variables
+        co_stacksize    virtual machine stack space required
+        co_varnames     tuple of names of arguments and local variables"""
+    return isinstance(object, types.CodeType)
+
+def isbuiltin(object):
+    """Return true if the object is a built-in function or method.
+
+    Built-in functions and methods provide these attributes:
+        __doc__         documentation string
+        __name__        original name of this function or method
+        __self__        instance to which a method is bound, or None"""
+    return isinstance(object, types.BuiltinFunctionType)
+
+def isroutine(object):
+    """Return true if the object is any kind of function or method."""
+    return (isbuiltin(object)
+            or isfunction(object)
+            or ismethod(object)
+            or ismethoddescriptor(object))
+
+def getmembers(object, predicate=None):
+    """Return all members of an object as (name, value) pairs sorted by name.
+    Optionally, only return members that satisfy a given predicate."""
+    results = []
+    for key in dir(object):
+        value = getattr(object, key)
+        if not predicate or predicate(value):
+            results.append((key, value))
+    results.sort()
+    return results
+
+def classify_class_attrs(cls):
+    """Return list of attribute-descriptor tuples.
+
+    For each name in dir(cls), the return list contains a 4-tuple
+    with these elements:
+
+        0. The name (a string).
+
+        1. The kind of attribute this is, one of these strings:
+               'class method'    created via classmethod()
+               'static method'   created via staticmethod()
+               'property'        created via property()
+               'method'          any other flavor of method
+               'data'            not a method
+
+        2. The class which defined this attribute (a class).
+
+        3. The object as obtained directly from the defining class's
+           __dict__, not via getattr.  This is especially important for
+           data attributes:  C.data is just a data object, but
+           C.__dict__['data'] may be a data descriptor with additional
+           info, like a __doc__ string.
+    """
+
+    mro = getmro(cls)
+    names = dir(cls)
+    result = []
+    for name in names:
+        # Get the object associated with the name.
+        # Getting an obj from the __dict__ sometimes reveals more than
+        # using getattr.  Static and class methods are dramatic examples.
+        if name in cls.__dict__:
+            obj = cls.__dict__[name]
+        else:
+            obj = getattr(cls, name)
+
+        # Figure out where it was defined.
+        homecls = getattr(obj, "__objclass__", None)
+        if homecls is None:
+            # search the dicts.
+            for base in mro:
+                if name in base.__dict__:
+                    homecls = base
+                    break
+
+        # Get the object again, in order to get it from the defining
+        # __dict__ instead of via getattr (if possible).
+        if homecls is not None and name in homecls.__dict__:
+            obj = homecls.__dict__[name]
+
+        # Also get the object via getattr.
+        obj_via_getattr = getattr(cls, name)
+
+        # Classify the object.
+        if isinstance(obj, staticmethod):
+            kind = "static method"
+        elif isinstance(obj, classmethod):
+            kind = "class method"
+        elif isinstance(obj, property):
+            kind = "property"
+        elif (ismethod(obj_via_getattr) or
+              ismethoddescriptor(obj_via_getattr)):
+            kind = "method"
+        else:
+            kind = "data"
+
+        result.append((name, kind, homecls, obj))
+
+    return result
+
+# ----------------------------------------------------------- class helpers
+def _searchbases(cls, accum):
+    # Simulate the "classic class" search order.
+    if cls in accum:
+        return
+    accum.append(cls)
+    for base in cls.__bases__:
+        _searchbases(base, accum)
+
+def getmro(cls):
+    "Return tuple of base classes (including cls) in method resolution order."
+    if hasattr(cls, "__mro__"):
+        return cls.__mro__
+    else:
+        result = []
+        _searchbases(cls, result)
+        return tuple(result)
+
+# -------------------------------------------------- source code extraction
+def indentsize(line):
+    """Return the indent size, in spaces, at the start of a line of text."""
+    expline = string.expandtabs(line)
+    return len(expline) - len(string.lstrip(expline))
+
+def getdoc(object):
+    """Get the documentation string for an object.
+
+    All tabs are expanded to spaces.  To clean up docstrings that are
+    indented to line up with blocks of code, any whitespace than can be
+    uniformly removed from the second line onwards is removed."""
+    try:
+        doc = object.__doc__
+    except AttributeError:
+        return None
+    if not isinstance(doc, types.StringTypes):
+        return None
+    try:
+        lines = string.split(string.expandtabs(doc), '\n')
+    except UnicodeError:
+        return None
+    else:
+        # Find minimum indentation of any non-blank lines after first line.
+        margin = sys.maxint
+        for line in lines[1:]:
+            content = len(string.lstrip(line))
+            if content:
+                indent = len(line) - content
+                margin = min(margin, indent)
+        # Remove indentation.
+        if lines:
+            lines[0] = lines[0].lstrip()
+        if margin < sys.maxint:
+            for i in range(1, len(lines)): lines[i] = lines[i][margin:]
+        # Remove any trailing or leading blank lines.
+        while lines and not lines[-1]:
+            lines.pop()
+        while lines and not lines[0]:
+            lines.pop(0)
+        return string.join(lines, '\n')
+
+def getfile(object):
+    """Work out which source or compiled file an object was defined in."""
+    if ismodule(object):
+        if hasattr(object, '__file__'):
+            return object.__file__
+        raise TypeError('arg is a built-in module')
+    if isclass(object):
+        object = sys.modules.get(object.__module__)
+        if hasattr(object, '__file__'):
+            return object.__file__
+        raise TypeError('arg is a built-in class')
+    if ismethod(object):
+        object = object.im_func
+    if isfunction(object):
+        object = object.func_code
+    if istraceback(object):
+        object = object.tb_frame
+    if isframe(object):
+        object = object.f_code
+    if iscode(object):
+        return object.co_filename
+    raise TypeError('arg is not a module, class, method, '
+                    'function, traceback, frame, or code object')
+
+def getmoduleinfo(path):
+    """Get the module name, suffix, mode, and module type for a given file."""
+    filename = os.path.basename(path)
+    suffixes = map(lambda (suffix, mode, mtype):
+                   (-len(suffix), suffix, mode, mtype), imp.get_suffixes())
+    suffixes.sort() # try longest suffixes first, in case they overlap
+    for neglen, suffix, mode, mtype in suffixes:
+        if filename[neglen:] == suffix:
+            return filename[:neglen], suffix, mode, mtype
+
+def getmodulename(path):
+    """Return the module name for a given file, or None."""
+    info = getmoduleinfo(path)
+    if info: return info[0]
+
+def getsourcefile(object):
+    """Return the Python source file an object was defined in, if it exists."""
+    filename = getfile(object)
+    if string.lower(filename[-4:]) in ['.pyc', '.pyo']:
+        filename = filename[:-4] + '.py'
+    for suffix, mode, kind in imp.get_suffixes():
+        if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:
+            # Looks like a binary file.  We want to only return a text file.
+            return None
+    if os.path.exists(filename):
+        return filename
+
+def getabsfile(object):
+    """Return an absolute path to the source or compiled file for an object.
+
+    The idea is for each object to have a unique origin, so this routine
+    normalizes the result as much as possible."""
+    return os.path.normcase(
+        os.path.abspath(getsourcefile(object) or getfile(object)))
+
+modulesbyfile = {}
+
+def getmodule(object):
+    """Return the module an object was defined in, or None if not found."""
+    if ismodule(object):
+        return object
+    if hasattr(object, '__module__'):
+        return sys.modules.get(object.__module__)
+    try:
+        file = getabsfile(object)
+    except TypeError:
+        return None
+    if file in modulesbyfile:
+        return sys.modules.get(modulesbyfile[file])
+    for module in sys.modules.values():
+        if hasattr(module, '__file__'):
+            modulesbyfile[
+                os.path.realpath(
+                        getabsfile(module))] = module.__name__
+    if file in modulesbyfile:
+        return sys.modules.get(modulesbyfile[file])
+    main = sys.modules['__main__']
+    if not hasattr(object, '__name__'):
+        return None
+    if hasattr(main, object.__name__):
+        mainobject = getattr(main, object.__name__)
+        if mainobject is object:
+            return main
+    builtin = sys.modules['__builtin__']
+    if hasattr(builtin, object.__name__):
+        builtinobject = getattr(builtin, object.__name__)
+        if builtinobject is object:
+            return builtin
+
+def findsource(object):
+    """Return the entire source file and starting line number for an object.
+
+    The argument may be a module, class, method, function, traceback, frame,
+    or code object.  The source code is returned as a list of all the lines
+    in the file and the line number indexes a line in that list.  An IOError
+    is raised if the source code cannot be retrieved."""
+    file = getsourcefile(object) or getfile(object)
+    lines = linecache.getlines(file)
+    if not lines:
+        raise IOError('could not get source code')
+
+    if ismodule(object):
+        return lines, 0
+
+    if isclass(object):
+        name = object.__name__
+        pat = re.compile(r'^\s*class\s*' + name + r'\b')
+        for i in range(len(lines)):
+            if pat.match(lines[i]): return lines, i
+        else:
+            raise IOError('could not find class definition')
+
+    if ismethod(object):
+        object = object.im_func
+    if isfunction(object):
+        object = object.func_code
+    if istraceback(object):
+        object = object.tb_frame
+    if isframe(object):
+        object = object.f_code
+    if iscode(object):
+        if not hasattr(object, 'co_firstlineno'):
+            raise IOError('could not find function definition')
+        lnum = object.co_firstlineno - 1
+        pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
+        while lnum > 0:
+            if pat.match(lines[lnum]): break
+            lnum = lnum - 1
+        return lines, lnum
+    raise IOError('could not find code object')
+
+def getcomments(object):
+    """Get lines of comments immediately preceding an object's source code.
+
+    Returns None when source can't be found.
+    """
+    try:
+        lines, lnum = findsource(object)
+    except (IOError, TypeError):
+        return None
+
+    if ismodule(object):
+        # Look for a comment block at the top of the file.
+        start = 0
+        if lines and lines[0][:2] == '#!': start = 1
+        while start < len(lines) and string.strip(lines[start]) in ['', '#']:
+            start = start + 1
+        if start < len(lines) and lines[start][:1] == '#':
+            comments = []
+            end = start
+            while end < len(lines) and lines[end][:1] == '#':
+                comments.append(string.expandtabs(lines[end]))
+                end = end + 1
+            return string.join(comments, '')
+
+    # Look for a preceding block of comments at the same indentation.
+    elif lnum > 0:
+        indent = indentsize(lines[lnum])
+        end = lnum - 1
+        if end >= 0 and string.lstrip(lines[end])[:1] == '#' and \
+            indentsize(lines[end]) == indent:
+            comments = [string.lstrip(string.expandtabs(lines[end]))]
+            if end > 0:
+                end = end - 1
+                comment = string.lstrip(string.expandtabs(lines[end]))
+                while comment[:1] == '#' and indentsize(lines[end]) == indent:
+                    comments[:0] = [comment]
+                    end = end - 1
+                    if end < 0: break
+                    comment = string.lstrip(string.expandtabs(lines[end]))
+            while comments and string.strip(comments[0]) == '#':
+                comments[:1] = []
+            while comments and string.strip(comments[-1]) == '#':
+                comments[-1:] = []
+            return string.join(comments, '')
+
+class ListReader:
+    """Provide a readline() method to return lines from a list of strings."""
+    def __init__(self, lines):
+        self.lines = lines
+        self.index = 0
+
+    def readline(self):
+        i = self.index
+        if i < len(self.lines):
+            self.index = i + 1
+            return self.lines[i]
+        else: return ''
+
+class EndOfBlock(Exception): pass
+
+class BlockFinder:
+    """Provide a tokeneater() method to detect the end of a code block."""
+    def __init__(self):
+        self.indent = 0
+        self.islambda = False
+        self.started = False
+        self.passline = False
+        self.last = 0
+
+    def tokeneater(self, type, token, (srow, scol), (erow, ecol), line):
+        if not self.started:
+            if token in ("def", "class", "lambda"):
+                if token == "lambda":
+                    self.islambda = True
+                self.started = True
+            self.passline = True
+        elif type == tokenize.NEWLINE:
+            self.passline = False
+            self.last = srow
+        elif self.passline:
+            pass
+        elif self.islambda:
+            raise EndOfBlock, self.last
+        elif type == tokenize.INDENT:
+            self.indent = self.indent + 1
+            self.passline = True
+        elif type == tokenize.DEDENT:
+            self.indent = self.indent - 1
+            if self.indent == 0:
+                raise EndOfBlock, self.last
+        elif type == tokenize.NAME and scol == 0:
+            raise EndOfBlock, self.last
+
+def getblock(lines):
+    """Extract the block of code at the top of the given list of lines."""
+    try:
+        tokenize.tokenize(ListReader(lines).readline, BlockFinder().tokeneater)
+    except EndOfBlock, eob:
+        return lines[:eob.args[0]]
+    # Fooling the indent/dedent logic implies a one-line definition
+    return lines[:1]
+
+def getsourcelines(object):
+    """Return a list of source lines and starting line number for an object.
+
+    The argument may be a module, class, method, function, traceback, frame,
+    or code object.  The source code is returned as a list of the lines
+    corresponding to the object and the line number indicates where in the
+    original source file the first line of code was found.  An IOError is
+    raised if the source code cannot be retrieved."""
+    lines, lnum = findsource(object)
+
+    if ismodule(object): return lines, 0
+    else: return getblock(lines[lnum:]), lnum + 1
+
+def getsource(object):
+    """Return the text of the source code for an object.
+
+    The argument may be a module, class, method, function, traceback, frame,
+    or code object.  The source code is returned as a single string.  An
+    IOError is raised if the source code cannot be retrieved."""
+    lines, lnum = getsourcelines(object)
+    return string.join(lines, '')
+
+# --------------------------------------------------- class tree extraction
+def walktree(classes, children, parent):
+    """Recursive helper function for getclasstree()."""
+    results = []
+    classes.sort(key=lambda c: (c.__module__, c.__name__))
+    for c in classes:
+        results.append((c, c.__bases__))
+        if c in children:
+            results.append(walktree(children[c], children, c))
+    return results
+
+def getclasstree(classes, unique=0):
+    """Arrange the given list of classes into a hierarchy of nested lists.
+
+    Where a nested list appears, it contains classes derived from the class
+    whose entry immediately precedes the list.  Each entry is a 2-tuple
+    containing a class and a tuple of its base classes.  If the 'unique'
+    argument is true, exactly one entry appears in the returned structure
+    for each class in the given list.  Otherwise, classes using multiple
+    inheritance and their descendants will appear multiple times."""
+    children = {}
+    roots = []
+    for c in classes:
+        if c.__bases__:
+            for parent in c.__bases__:
+                if not parent in children:
+                    children[parent] = []
+                children[parent].append(c)
+                if unique and parent in classes: break
+        elif c not in roots:
+            roots.append(c)
+    for parent in children:
+        if parent not in classes:
+            roots.append(parent)
+    return walktree(roots, children, None)
+
+# ------------------------------------------------ argument list extraction
+# These constants are from Python's compile.h.
+CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS = 1, 2, 4, 8
+
+def getargs(co):
+    """Get information about the arguments accepted by a code object.
+
+    Three things are returned: (args, varargs, varkw), where 'args' is
+    a list of argument names (possibly containing nested lists), and
+    'varargs' and 'varkw' are the names of the * and ** arguments or None."""
+
+    if not iscode(co):
+        raise TypeError('arg is not a code object')
+
+    code = co.co_code
+    nargs = co.co_argcount
+    names = co.co_varnames
+    args = list(names[:nargs])
+    step = 0
+
+    # The following acrobatics are for anonymous (tuple) arguments.
+    for i in range(nargs):
+        if args[i][:1] in ['', '.']:
+            stack, remain, count = [], [], []
+            while step < len(code):
+                op = ord(code[step])
+                step = step + 1
+                if op >= dis.HAVE_ARGUMENT:
+                    opname = dis.opname[op]
+                    value = ord(code[step]) + ord(code[step+1])*256
+                    step = step + 2
+                    if opname in ['UNPACK_TUPLE', 'UNPACK_SEQUENCE']:
+                        remain.append(value)
+                        count.append(value)
+                    elif opname == 'STORE_FAST':
+                        stack.append(names[value])
+
+                        # Special case for sublists of length 1: def foo((bar))
+                        # doesn't generate the UNPACK_TUPLE bytecode, so if
+                        # `remain` is empty here, we have such a sublist.
+                        if not remain:
+                            stack[0] = [stack[0]]
+                            break
+                        else:
+                            remain[-1] = remain[-1] - 1
+                            while remain[-1] == 0:
+                                remain.pop()
+                                size = count.pop()
+                                stack[-size:] = [stack[-size:]]
+                                if not remain: break
+                                remain[-1] = remain[-1] - 1
+                            if not remain: break
+            args[i] = stack[0]
+
+    varargs = None
+    if co.co_flags & CO_VARARGS:
+        varargs = co.co_varnames[nargs]
+        nargs = nargs + 1
+    varkw = None
+    if co.co_flags & CO_VARKEYWORDS:
+        varkw = co.co_varnames[nargs]
+    return args, varargs, varkw
+
+def getargspec(func):
+    """Get the names and default values of a function's arguments.
+
+    A tuple of four things is returned: (args, varargs, varkw, defaults).
+    'args' is a list of the argument names (it may contain nested lists).
+    'varargs' and 'varkw' are the names of the * and ** arguments or None.
+    'defaults' is an n-tuple of the default values of the last n arguments.
+    """
+
+    if ismethod(func):
+        func = func.im_func
+    if not isfunction(func):
+        raise TypeError('arg is not a Python function')
+    args, varargs, varkw = getargs(func.func_code)
+    return args, varargs, varkw, func.func_defaults
+
+def getargvalues(frame):
+    """Get information about arguments passed into a particular frame.
+
+    A tuple of four things is returned: (args, varargs, varkw, locals).
+    'args' is a list of the argument names (it may contain nested lists).
+    'varargs' and 'varkw' are the names of the * and ** arguments or None.
+    'locals' is the locals dictionary of the given frame."""
+    args, varargs, varkw = getargs(frame.f_code)
+    return args, varargs, varkw, frame.f_locals
+
+def joinseq(seq):
+    if len(seq) == 1:
+        return '(' + seq[0] + ',)'
+    else:
+        return '(' + string.join(seq, ', ') + ')'
+
+def strseq(object, convert, join=joinseq):
+    """Recursively walk a sequence, stringifying each element."""
+    if type(object) in [types.ListType, types.TupleType]:
+        return join(map(lambda o, c=convert, j=join: strseq(o, c, j), object))
+    else:
+        return convert(object)
+
+def formatargspec(args, varargs=None, varkw=None, defaults=None,
+                  formatarg=str,
+                  formatvarargs=lambda name: '*' + name,
+                  formatvarkw=lambda name: '**' + name,
+                  formatvalue=lambda value: '=' + repr(value),
+                  join=joinseq):
+    """Format an argument spec from the 4 values returned by getargspec.
+
+    The first four arguments are (args, varargs, varkw, defaults).  The
+    other four arguments are the corresponding optional formatting functions
+    that are called to turn names and values into strings.  The ninth
+    argument is an optional function to format the sequence of arguments."""
+    specs = []
+    if defaults:
+        firstdefault = len(args) - len(defaults)
+    for i in range(len(args)):
+        spec = strseq(args[i], formatarg, join)
+        if defaults and i >= firstdefault:
+            spec = spec + formatvalue(defaults[i - firstdefault])
+        specs.append(spec)
+    if varargs is not None:
+        specs.append(formatvarargs(varargs))
+    if varkw is not None:
+        specs.append(formatvarkw(varkw))
+    return '(' + string.join(specs, ', ') + ')'
+
+def formatargvalues(args, varargs, varkw, locals,
+                    formatarg=str,
+                    formatvarargs=lambda name: '*' + name,
+                    formatvarkw=lambda name: '**' + name,
+                    formatvalue=lambda value: '=' + repr(value),
+                    join=joinseq):
+    """Format an argument spec from the 4 values returned by getargvalues.
+
+    The first four arguments are (args, varargs, varkw, locals).  The
+    next four arguments are the corresponding optional formatting functions
+    that are called to turn names and values into strings.  The ninth
+    argument is an optional function to format the sequence of arguments."""
+    def convert(name, locals=locals,
+                formatarg=formatarg, formatvalue=formatvalue):
+        return formatarg(name) + formatvalue(locals[name])
+    specs = []
+    for i in range(len(args)):
+        specs.append(strseq(args[i], convert, join))
+    if varargs:
+        specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))
+    if varkw:
+        specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))
+    return '(' + string.join(specs, ', ') + ')'
+
+# -------------------------------------------------- stack frame extraction
+def getframeinfo(frame, context=1):
+    """Get information about a frame or traceback object.
+
+    A tuple of five things is returned: the filename, the line number of
+    the current line, the function name, a list of lines of context from
+    the source code, and the index of the current line within that list.
+    The optional second argument specifies the number of lines of context
+    to return, which are centered around the current line."""
+    if istraceback(frame):
+        lineno = frame.tb_lineno
+        frame = frame.tb_frame
+    else:
+        lineno = frame.f_lineno
+    if not isframe(frame):
+        raise TypeError('arg is not a frame or traceback object')
+
+    filename = getsourcefile(frame) or getfile(frame)
+    if context > 0:
+        start = lineno - 1 - context//2
+        try:
+            lines, lnum = findsource(frame)
+        except IOError:
+            lines = index = None
+        else:
+            start = max(start, 1)
+            start = max(0, min(start, len(lines) - context))
+            lines = lines[start:start+context]
+            index = lineno - 1 - start
+    else:
+        lines = index = None
+
+    return (filename, lineno, frame.f_code.co_name, lines, index)
+
+def getlineno(frame):
+    """Get the line number from a frame object, allowing for optimization."""
+    # FrameType.f_lineno is now a descriptor that grovels co_lnotab
+    return frame.f_lineno
+
+def getouterframes(frame, context=1):
+    """Get a list of records for a frame and all higher (calling) frames.
+
+    Each record contains a frame object, filename, line number, function
+    name, a list of lines of context, and index within the context."""
+    framelist = []
+    while frame:
+        framelist.append((frame,) + getframeinfo(frame, context))
+        frame = frame.f_back
+    return framelist
+
+def getinnerframes(tb, context=1):
+    """Get a list of records for a traceback's frame and all lower frames.
+
+    Each record contains a frame object, filename, line number, function
+    name, a list of lines of context, and index within the context."""
+    framelist = []
+    while tb:
+        framelist.append((tb.tb_frame,) + getframeinfo(tb, context))
+        tb = tb.tb_next
+    return framelist
+
+currentframe = sys._getframe
+
+def stack(context=1):
+    """Return a list of records for the stack above the caller's frame."""
+    return getouterframes(sys._getframe(1), context)
+
+def trace(context=1):
+    """Return a list of records for the stack below the current exception."""
+    return getinnerframes(sys.exc_info()[2], context)
diff --git a/depot_tools/release/win/python_24/Lib/keyword.py b/depot_tools/release/win/python_24/Lib/keyword.py
new file mode 100644
index 0000000..223a88a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/keyword.py
@@ -0,0 +1,93 @@
+#! /usr/bin/env python
+
+"""Keywords (from "graminit.c")
+
+This file is automatically generated; please don't muck it up!
+
+To update the symbols in this file, 'cd' to the top directory of
+the python source tree after building the interpreter and run:
+
+    python Lib/keyword.py
+"""
+
+__all__ = ["iskeyword", "kwlist"]
+
+kwlist = [
+#--start keywords--
+        'and',
+        'assert',
+        'break',
+        'class',
+        'continue',
+        'def',
+        'del',
+        'elif',
+        'else',
+        'except',
+        'exec',
+        'finally',
+        'for',
+        'from',
+        'global',
+        'if',
+        'import',
+        'in',
+        'is',
+        'lambda',
+        'not',
+        'or',
+        'pass',
+        'print',
+        'raise',
+        'return',
+        'try',
+        'while',
+        'yield',
+#--end keywords--
+        ]
+
+iskeyword = frozenset(kwlist).__contains__
+
+def main():
+    import sys, re
+
+    args = sys.argv[1:]
+    iptfile = args and args[0] or "Python/graminit.c"
+    if len(args) > 1: optfile = args[1]
+    else: optfile = "Lib/keyword.py"
+
+    # scan the source file for keywords
+    fp = open(iptfile)
+    strprog = re.compile('"([^"]+)"')
+    lines = []
+    while 1:
+        line = fp.readline()
+        if not line: break
+        if '{1, "' in line:
+            match = strprog.search(line)
+            if match:
+                lines.append("        '" + match.group(1) + "',\n")
+    fp.close()
+    lines.sort()
+
+    # load the output skeleton from the target
+    fp = open(optfile)
+    format = fp.readlines()
+    fp.close()
+
+    # insert the lines of keywords
+    try:
+        start = format.index("#--start keywords--\n") + 1
+        end = format.index("#--end keywords--\n")
+        format[start:end] = lines
+    except ValueError:
+        sys.stderr.write("target does not contain format markers\n")
+        sys.exit(1)
+
+    # write the output file
+    fp = open(optfile, 'w')
+    fp.write(''.join(format))
+    fp.close()
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/Para.py b/depot_tools/release/win/python_24/Lib/lib-old/Para.py
new file mode 100644
index 0000000..2fd8dc6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/Para.py
@@ -0,0 +1,343 @@
+# Text formatting abstractions
+# Note -- this module is obsolete, it's too slow anyway
+
+
+# Oft-used type object
+Int = type(0)
+
+
+# Represent a paragraph.  This is a list of words with associated
+# font and size information, plus indents and justification for the
+# entire paragraph.
+# Once the words have been added to a paragraph, it can be laid out
+# for different line widths.  Once laid out, it can be rendered at
+# different screen locations.  Once rendered, it can be queried
+# for mouse hits, and parts of the text can be highlighted
+class Para:
+    #
+    def __init__(self):
+        self.words = [] # The words
+        self.just = 'l' # Justification: 'l', 'r', 'lr' or 'c'
+        self.indent_left = self.indent_right = self.indent_hang = 0
+        # Final lay-out parameters, may change
+        self.left = self.top = self.right = self.bottom = \
+                self.width = self.height = self.lines = None
+    #
+    # Add a word, computing size information for it.
+    # Words may also be added manually by appending to self.words
+    # Each word should be a 7-tuple:
+    # (font, text, width, space, stretch, ascent, descent)
+    def addword(self, d, font, text, space, stretch):
+        if font is not None:
+            d.setfont(font)
+        width = d.textwidth(text)
+        ascent = d.baseline()
+        descent = d.lineheight() - ascent
+        spw = d.textwidth(' ')
+        space = space * spw
+        stretch = stretch * spw
+        tuple = (font, text, width, space, stretch, ascent, descent)
+        self.words.append(tuple)
+    #
+    # Hooks to begin and end anchors -- insert numbers in the word list!
+    def bgn_anchor(self, id):
+        self.words.append(id)
+    #
+    def end_anchor(self, id):
+        self.words.append(0)
+    #
+    # Return the total length (width) of the text added so far, in pixels
+    def getlength(self):
+        total = 0
+        for word in self.words:
+            if type(word) is not Int:
+                total = total + word[2] + word[3]
+        return total
+    #
+    # Tab to a given position (relative to the current left indent):
+    # remove all stretch, add fixed space up to the new indent.
+    # If the current position is already at the tab stop,
+    # don't add any new space (but still remove the stretch)
+    def tabto(self, tab):
+        total = 0
+        as, de = 1, 0
+        for i in range(len(self.words)):
+            word = self.words[i]
+            if type(word) is Int: continue
+            (fo, te, wi, sp, st, as, de) = word
+            self.words[i] = (fo, te, wi, sp, 0, as, de)
+            total = total + wi + sp
+        if total < tab:
+            self.words.append((None, '', 0, tab-total, 0, as, de))
+    #
+    # Make a hanging tag: tab to hang, increment indent_left by hang,
+    # and reset indent_hang to -hang
+    def makehangingtag(self, hang):
+        self.tabto(hang)
+        self.indent_left = self.indent_left + hang
+        self.indent_hang = -hang
+    #
+    # Decide where the line breaks will be given some screen width
+    def layout(self, linewidth):
+        self.width = linewidth
+        height = 0
+        self.lines = lines = []
+        avail1 = self.width - self.indent_left - self.indent_right
+        avail = avail1 - self.indent_hang
+        words = self.words
+        i = 0
+        n = len(words)
+        lastfont = None
+        while i < n:
+            firstfont = lastfont
+            charcount = 0
+            width = 0
+            stretch = 0
+            ascent = 0
+            descent = 0
+            lsp = 0
+            j = i
+            while i < n:
+                word = words[i]
+                if type(word) is Int:
+                    if word > 0 and width >= avail:
+                        break
+                    i = i+1
+                    continue
+                fo, te, wi, sp, st, as, de = word
+                if width + wi > avail and width > 0 and wi > 0:
+                    break
+                if fo is not None:
+                    lastfont = fo
+                    if width == 0:
+                        firstfont = fo
+                charcount = charcount + len(te) + (sp > 0)
+                width = width + wi + sp
+                lsp = sp
+                stretch = stretch + st
+                lst = st
+                ascent = max(ascent, as)
+                descent = max(descent, de)
+                i = i+1
+            while i > j and type(words[i-1]) is Int and \
+                    words[i-1] > 0: i = i-1
+            width = width - lsp
+            if i < n:
+                stretch = stretch - lst
+            else:
+                stretch = 0
+            tuple = i-j, firstfont, charcount, width, stretch, \
+                    ascent, descent
+            lines.append(tuple)
+            height = height + ascent + descent
+            avail = avail1
+        self.height = height
+    #
+    # Call a function for all words in a line
+    def visit(self, wordfunc, anchorfunc):
+        avail1 = self.width - self.indent_left - self.indent_right
+        avail = avail1 - self.indent_hang
+        v = self.top
+        i = 0
+        for tuple in self.lines:
+            wordcount, firstfont, charcount, width, stretch, \
+                    ascent, descent = tuple
+            h = self.left + self.indent_left
+            if i == 0: h = h + self.indent_hang
+            extra = 0
+            if self.just == 'r': h = h + avail - width
+            elif self.just == 'c': h = h + (avail - width) / 2
+            elif self.just == 'lr' and stretch > 0:
+                extra = avail - width
+            v2 = v + ascent + descent
+            for j in range(i, i+wordcount):
+                word = self.words[j]
+                if type(word) is Int:
+                    ok = anchorfunc(self, tuple, word, \
+                                    h, v)
+                    if ok is not None: return ok
+                    continue
+                fo, te, wi, sp, st, as, de = word
+                if extra > 0 and stretch > 0:
+                    ex = extra * st / stretch
+                    extra = extra - ex
+                    stretch = stretch - st
+                else:
+                    ex = 0
+                h2 = h + wi + sp + ex
+                ok = wordfunc(self, tuple, word, h, v, \
+                        h2, v2, (j==i), (j==i+wordcount-1))
+                if ok is not None: return ok
+                h = h2
+            v = v2
+            i = i + wordcount
+            avail = avail1
+    #
+    # Render a paragraph in "drawing object" d, using the rectangle
+    # given by (left, top, right) with an unspecified bottom.
+    # Return the computed bottom of the text.
+    def render(self, d, left, top, right):
+        if self.width != right-left:
+            self.layout(right-left)
+        self.left = left
+        self.top = top
+        self.right = right
+        self.bottom = self.top + self.height
+        self.anchorid = 0
+        try:
+            self.d = d
+            self.visit(self.__class__._renderword, \
+                       self.__class__._renderanchor)
+        finally:
+            self.d = None
+        return self.bottom
+    #
+    def _renderword(self, tuple, word, h, v, h2, v2, isfirst, islast):
+        if word[0] is not None: self.d.setfont(word[0])
+        baseline = v + tuple[5]
+        self.d.text((h, baseline - word[5]), word[1])
+        if self.anchorid > 0:
+            self.d.line((h, baseline+2), (h2, baseline+2))
+    #
+    def _renderanchor(self, tuple, word, h, v):
+        self.anchorid = word
+    #
+    # Return which anchor(s) was hit by the mouse
+    def hitcheck(self, mouseh, mousev):
+        self.mouseh = mouseh
+        self.mousev = mousev
+        self.anchorid = 0
+        self.hits = []
+        self.visit(self.__class__._hitcheckword, \
+                   self.__class__._hitcheckanchor)
+        return self.hits
+    #
+    def _hitcheckword(self, tuple, word, h, v, h2, v2, isfirst, islast):
+        if self.anchorid > 0 and h <= self.mouseh <= h2 and \
+                v <= self.mousev <= v2:
+            self.hits.append(self.anchorid)
+    #
+    def _hitcheckanchor(self, tuple, word, h, v):
+        self.anchorid = word
+    #
+    # Return whether the given anchor id is present
+    def hasanchor(self, id):
+        return id in self.words or -id in self.words
+    #
+    # Extract the raw text from the word list, substituting one space
+    # for non-empty inter-word space, and terminating with '\n'
+    def extract(self):
+        text = ''
+        for w in self.words:
+            if type(w) is not Int:
+                word = w[1]
+                if w[3]: word = word + ' '
+                text = text + word
+        return text + '\n'
+    #
+    # Return which character position was hit by the mouse, as
+    # an offset in the entire text as returned by extract().
+    # Return None if the mouse was not in this paragraph
+    def whereis(self, d, mouseh, mousev):
+        if mousev < self.top or mousev > self.bottom:
+            return None
+        self.mouseh = mouseh
+        self.mousev = mousev
+        self.lastfont = None
+        self.charcount = 0
+        try:
+            self.d = d
+            return self.visit(self.__class__._whereisword, \
+                              self.__class__._whereisanchor)
+        finally:
+            self.d = None
+    #
+    def _whereisword(self, tuple, word, h1, v1, h2, v2, isfirst, islast):
+        fo, te, wi, sp, st, as, de = word
+        if fo is not None: self.lastfont = fo
+        h = h1
+        if isfirst: h1 = 0
+        if islast: h2 = 999999
+        if not (v1 <= self.mousev <= v2 and h1 <= self.mouseh <= h2):
+            self.charcount = self.charcount + len(te) + (sp > 0)
+            return
+        if self.lastfont is not None:
+            self.d.setfont(self.lastfont)
+        cc = 0
+        for c in te:
+            cw = self.d.textwidth(c)
+            if self.mouseh <= h + cw/2:
+                return self.charcount + cc
+            cc = cc+1
+            h = h+cw
+        self.charcount = self.charcount + cc
+        if self.mouseh <= (h+h2) / 2:
+            return self.charcount
+        else:
+            return self.charcount + 1
+    #
+    def _whereisanchor(self, tuple, word, h, v):
+        pass
+    #
+    # Return screen position corresponding to position in paragraph.
+    # Return tuple (h, vtop, vbaseline, vbottom).
+    # This is more or less the inverse of whereis()
+    def screenpos(self, d, pos):
+        if pos < 0:
+            ascent, descent = self.lines[0][5:7]
+            return self.left, self.top, self.top + ascent, \
+                    self.top + ascent + descent
+        self.pos = pos
+        self.lastfont = None
+        try:
+            self.d = d
+            ok = self.visit(self.__class__._screenposword, \
+                            self.__class__._screenposanchor)
+        finally:
+            self.d = None
+        if ok is None:
+            ascent, descent = self.lines[-1][5:7]
+            ok = self.right, self.bottom - ascent - descent, \
+                    self.bottom - descent, self.bottom
+        return ok
+    #
+    def _screenposword(self, tuple, word, h1, v1, h2, v2, isfirst, islast):
+        fo, te, wi, sp, st, as, de = word
+        if fo is not None: self.lastfont = fo
+        cc = len(te) + (sp > 0)
+        if self.pos > cc:
+            self.pos = self.pos - cc
+            return
+        if self.pos < cc:
+            self.d.setfont(self.lastfont)
+            h = h1 + self.d.textwidth(te[:self.pos])
+        else:
+            h = h2
+        ascent, descent = tuple[5:7]
+        return h, v1, v1+ascent, v2
+    #
+    def _screenposanchor(self, tuple, word, h, v):
+        pass
+    #
+    # Invert the stretch of text between pos1 and pos2.
+    # If pos1 is None, the beginning is implied;
+    # if pos2 is None, the end is implied.
+    # Undoes its own effect when called again with the same arguments
+    def invert(self, d, pos1, pos2):
+        if pos1 is None:
+            pos1 = self.left, self.top, self.top, self.top
+        else:
+            pos1 = self.screenpos(d, pos1)
+        if pos2 is None:
+            pos2 = self.right, self.bottom,self.bottom,self.bottom
+        else:
+            pos2 = self.screenpos(d, pos2)
+        h1, top1, baseline1, bottom1 = pos1
+        h2, top2, baseline2, bottom2 = pos2
+        if bottom1 <= top2:
+            d.invert((h1, top1), (self.right, bottom1))
+            h1 = self.left
+            if bottom1 < top2:
+                d.invert((h1, bottom1), (self.right, top2))
+            top1, bottom1 = top2, bottom2
+        d.invert((h1, top1), (h2, bottom2))
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/addpack.py b/depot_tools/release/win/python_24/Lib/lib-old/addpack.py
new file mode 100644
index 0000000..2fb2601
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/addpack.py
@@ -0,0 +1,67 @@
+# This module provides standard support for "packages".
+#
+# The idea is that large groups of related modules can be placed in
+# their own subdirectory, which can be added to the Python search path
+# in a relatively easy way.
+#
+# The current version takes a package name and searches the Python
+# search path for a directory by that name, and if found adds it to
+# the module search path (sys.path).  It maintains a list of packages
+# that have already been added so adding the same package many times
+# is OK.
+#
+# It is intended to be used in a fairly stylized manner: each module
+# that wants to use a particular package, say 'Foo', is supposed to
+# contain the following code:
+#
+#   from addpack import addpack
+#   addpack('Foo')
+#   <import modules from package Foo>
+#
+# Additional arguments, when present, provide additional places where
+# to look for the package before trying sys.path (these may be either
+# strings or lists/tuples of strings).  Also, if the package name is a
+# full pathname, first the last component is tried in the usual way,
+# then the full pathname is tried last.  If the package name is a
+# *relative* pathname (UNIX: contains a slash but doesn't start with
+# one), then nothing special is done.  The packages "/foo/bar/bletch"
+# and "bletch" are considered the same, but unrelated to "bar/bletch".
+#
+# If the algorithm finds more than one suitable subdirectory, all are
+# added to the search path -- this makes it possible to override part
+# of a package.  The same path will not be added more than once.
+#
+# If no directory is found, ImportError is raised.
+
+_packs = {}                             # {pack: [pathname, ...], ...}
+
+def addpack(pack, *locations):
+    import os
+    if os.path.isabs(pack):
+        base = os.path.basename(pack)
+    else:
+        base = pack
+    if _packs.has_key(base):
+        return
+    import sys
+    path = []
+    for loc in _flatten(locations) + sys.path:
+        fn = os.path.join(loc, base)
+        if fn not in path and os.path.isdir(fn):
+            path.append(fn)
+    if pack != base and pack not in path and os.path.isdir(pack):
+        path.append(pack)
+    if not path: raise ImportError, 'package ' + pack + ' not found'
+    _packs[base] = path
+    for fn in path:
+        if fn not in sys.path:
+            sys.path.append(fn)
+
+def _flatten(locations):
+    locs = []
+    for loc in locations:
+        if type(loc) == type(''):
+            locs.append(loc)
+        else:
+            locs = locs + _flatten(loc)
+    return locs
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/cmp.py b/depot_tools/release/win/python_24/Lib/lib-old/cmp.py
new file mode 100644
index 0000000..1146a25
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/cmp.py
@@ -0,0 +1,63 @@
+"""Efficiently compare files, boolean outcome only (equal / not equal).
+
+Tricks (used in this order):
+    - Files with identical type, size & mtime are assumed to be clones
+    - Files with different type or size cannot be identical
+    - We keep a cache of outcomes of earlier comparisons
+    - We don't fork a process to run 'cmp' but read the files ourselves
+"""
+
+import os
+
+cache = {}
+
+def cmp(f1, f2, shallow=1):
+    """Compare two files, use the cache if possible.
+    Return 1 for identical files, 0 for different.
+    Raise exceptions if either file could not be statted, read, etc."""
+    s1, s2 = sig(os.stat(f1)), sig(os.stat(f2))
+    if s1[0] != 8 or s2[0] != 8:
+        # Either is a not a plain file -- always report as different
+        return 0
+    if shallow and s1 == s2:
+        # type, size & mtime match -- report same
+        return 1
+    if s1[:2] != s2[:2]: # Types or sizes differ, don't bother
+        # types or sizes differ -- report different
+        return 0
+    # same type and size -- look in the cache
+    key = (f1, f2)
+    try:
+        cs1, cs2, outcome = cache[key]
+        # cache hit
+        if s1 == cs1 and s2 == cs2:
+            # cached signatures match
+            return outcome
+        # stale cached signature(s)
+    except KeyError:
+        # cache miss
+        pass
+    # really compare
+    outcome = do_cmp(f1, f2)
+    cache[key] = s1, s2, outcome
+    return outcome
+
+def sig(st):
+    """Return signature (i.e., type, size, mtime) from raw stat data
+    0-5: st_mode, st_ino, st_dev, st_nlink, st_uid, st_gid
+    6-9: st_size, st_atime, st_mtime, st_ctime"""
+    type = st[0] / 4096
+    size = st[6]
+    mtime = st[8]
+    return type, size, mtime
+
+def do_cmp(f1, f2):
+    """Compare two files, really."""
+    bufsize = 8*1024 # Could be tuned
+    fp1 = open(f1, 'rb')
+    fp2 = open(f2, 'rb')
+    while 1:
+        b1 = fp1.read(bufsize)
+        b2 = fp2.read(bufsize)
+        if b1 != b2: return 0
+        if not b1: return 1
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/cmpcache.py b/depot_tools/release/win/python_24/Lib/lib-old/cmpcache.py
new file mode 100644
index 0000000..11540f8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/cmpcache.py
@@ -0,0 +1,64 @@
+"""Efficiently compare files, boolean outcome only (equal / not equal).
+
+Tricks (used in this order):
+    - Use the statcache module to avoid statting files more than once
+    - Files with identical type, size & mtime are assumed to be clones
+    - Files with different type or size cannot be identical
+    - We keep a cache of outcomes of earlier comparisons
+    - We don't fork a process to run 'cmp' but read the files ourselves
+"""
+
+import os
+from stat import *
+import statcache
+
+
+# The cache.
+#
+cache = {}
+
+
+def cmp(f1, f2, shallow=1):
+    """Compare two files, use the cache if possible.
+    May raise os.error if a stat or open of either fails.
+    Return 1 for identical files, 0 for different.
+    Raise exceptions if either file could not be statted, read, etc."""
+    s1, s2 = sig(statcache.stat(f1)), sig(statcache.stat(f2))
+    if not S_ISREG(s1[0]) or not S_ISREG(s2[0]):
+        # Either is a not a plain file -- always report as different
+        return 0
+    if shallow and s1 == s2:
+        # type, size & mtime match -- report same
+        return 1
+    if s1[:2] != s2[:2]: # Types or sizes differ, don't bother
+        # types or sizes differ -- report different
+        return 0
+    # same type and size -- look in the cache
+    key = f1 + ' ' + f2
+    if cache.has_key(key):
+        cs1, cs2, outcome = cache[key]
+        # cache hit
+        if s1 == cs1 and s2 == cs2:
+            # cached signatures match
+            return outcome
+        # stale cached signature(s)
+    # really compare
+    outcome = do_cmp(f1, f2)
+    cache[key] = s1, s2, outcome
+    return outcome
+
+def sig(st):
+    """Return signature (i.e., type, size, mtime) from raw stat data."""
+    return S_IFMT(st[ST_MODE]), st[ST_SIZE], st[ST_MTIME]
+
+def do_cmp(f1, f2):
+    """Compare two files, really."""
+    #print '    cmp', f1, f2 # XXX remove when debugged
+    bufsize = 8*1024 # Could be tuned
+    fp1 = open(f1, 'rb')
+    fp2 = open(f2, 'rb')
+    while 1:
+        b1 = fp1.read(bufsize)
+        b2 = fp2.read(bufsize)
+        if b1 != b2: return 0
+        if not b1: return 1
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/codehack.py b/depot_tools/release/win/python_24/Lib/lib-old/codehack.py
new file mode 100644
index 0000000..0b5e3a17
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/codehack.py
@@ -0,0 +1,81 @@
+# A subroutine for extracting a function name from a code object
+# (with cache)
+
+import sys
+from stat import *
+import string
+import os
+import linecache
+
+# XXX The functions getcodename() and getfuncname() are now obsolete
+# XXX as code and function objects now have a name attribute --
+# XXX co.co_name and f.func_name.
+# XXX getlineno() is now also obsolete because of the new attribute
+# XXX of code objects, co.co_firstlineno.
+
+# Extract the function or class name from a code object.
+# This is a bit of a hack, since a code object doesn't contain
+# the name directly.  So what do we do:
+# - get the filename (which *is* in the code object)
+# - look in the code string to find the first SET_LINENO instruction
+#   (this must be the first instruction)
+# - get the line from the file
+# - if the line starts with 'class' or 'def' (after possible whitespace),
+#   extract the following identifier
+#
+# This breaks apart when the function was read from <stdin>
+# or constructed by exec(), when the file is not accessible,
+# and also when the file has been modified or when a line is
+# continued with a backslash before the function or class name.
+#
+# Because this is a pretty expensive hack, a cache is kept.
+
+SET_LINENO = 127 # The opcode (see "opcode.h" in the Python source)
+identchars = string.ascii_letters + string.digits + '_' # Identifier characters
+
+_namecache = {} # The cache
+
+def getcodename(co):
+    try:
+        return co.co_name
+    except AttributeError:
+        pass
+    key = `co` # arbitrary but uniquely identifying string
+    if _namecache.has_key(key): return _namecache[key]
+    filename = co.co_filename
+    code = co.co_code
+    name = ''
+    if ord(code[0]) == SET_LINENO:
+        lineno = ord(code[1]) | ord(code[2]) << 8
+        line = linecache.getline(filename, lineno)
+        words = line.split()
+        if len(words) >= 2 and words[0] in ('def', 'class'):
+            name = words[1]
+            for i in range(len(name)):
+                if name[i] not in identchars:
+                    name = name[:i]
+                    break
+    _namecache[key] = name
+    return name
+
+# Use the above routine to find a function's name.
+
+def getfuncname(func):
+    try:
+        return func.func_name
+    except AttributeError:
+        pass
+    return getcodename(func.func_code)
+
+# A part of the above code to extract just the line number from a code object.
+
+def getlineno(co):
+    try:
+        return co.co_firstlineno
+    except AttributeError:
+        pass
+    code = co.co_code
+    if ord(code[0]) == SET_LINENO:
+        return ord(code[1]) | ord(code[2]) << 8
+    else:
+        return -1
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/dircmp.py b/depot_tools/release/win/python_24/Lib/lib-old/dircmp.py
new file mode 100644
index 0000000..1e7bf2a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/dircmp.py
@@ -0,0 +1,202 @@
+"""A class to build directory diff tools on."""
+
+import os
+
+import dircache
+import cmpcache
+import statcache
+from stat import *
+
+class dircmp:
+    """Directory comparison class."""
+
+    def new(self, a, b):
+        """Initialize."""
+        self.a = a
+        self.b = b
+        # Properties that caller may change before calling self.run():
+        self.hide = [os.curdir, os.pardir] # Names never to be shown
+        self.ignore = ['RCS', 'tags'] # Names ignored in comparison
+
+        return self
+
+    def run(self):
+        """Compare everything except common subdirectories."""
+        self.a_list = filter(dircache.listdir(self.a), self.hide)
+        self.b_list = filter(dircache.listdir(self.b), self.hide)
+        self.a_list.sort()
+        self.b_list.sort()
+        self.phase1()
+        self.phase2()
+        self.phase3()
+
+    def phase1(self):
+        """Compute common names."""
+        self.a_only = []
+        self.common = []
+        for x in self.a_list:
+            if x in self.b_list:
+                self.common.append(x)
+            else:
+                self.a_only.append(x)
+
+        self.b_only = []
+        for x in self.b_list:
+            if x not in self.common:
+                self.b_only.append(x)
+
+    def phase2(self):
+        """Distinguish files, directories, funnies."""
+        self.common_dirs = []
+        self.common_files = []
+        self.common_funny = []
+
+        for x in self.common:
+            a_path = os.path.join(self.a, x)
+            b_path = os.path.join(self.b, x)
+
+            ok = 1
+            try:
+                a_stat = statcache.stat(a_path)
+            except os.error, why:
+                # print 'Can\'t stat', a_path, ':', why[1]
+                ok = 0
+            try:
+                b_stat = statcache.stat(b_path)
+            except os.error, why:
+                # print 'Can\'t stat', b_path, ':', why[1]
+                ok = 0
+
+            if ok:
+                a_type = S_IFMT(a_stat[ST_MODE])
+                b_type = S_IFMT(b_stat[ST_MODE])
+                if a_type != b_type:
+                    self.common_funny.append(x)
+                elif S_ISDIR(a_type):
+                    self.common_dirs.append(x)
+                elif S_ISREG(a_type):
+                    self.common_files.append(x)
+                else:
+                    self.common_funny.append(x)
+            else:
+                self.common_funny.append(x)
+
+    def phase3(self):
+        """Find out differences between common files."""
+        xx = cmpfiles(self.a, self.b, self.common_files)
+        self.same_files, self.diff_files, self.funny_files = xx
+
+    def phase4(self):
+        """Find out differences between common subdirectories.
+        A new dircmp object is created for each common subdirectory,
+        these are stored in a dictionary indexed by filename.
+        The hide and ignore properties are inherited from the parent."""
+        self.subdirs = {}
+        for x in self.common_dirs:
+            a_x = os.path.join(self.a, x)
+            b_x = os.path.join(self.b, x)
+            self.subdirs[x] = newdd = dircmp().new(a_x, b_x)
+            newdd.hide = self.hide
+            newdd.ignore = self.ignore
+            newdd.run()
+
+    def phase4_closure(self):
+        """Recursively call phase4() on subdirectories."""
+        self.phase4()
+        for x in self.subdirs.keys():
+            self.subdirs[x].phase4_closure()
+
+    def report(self):
+        """Print a report on the differences between a and b."""
+        # Assume that phases 1 to 3 have been executed
+        # Output format is purposely lousy
+        print 'diff', self.a, self.b
+        if self.a_only:
+            print 'Only in', self.a, ':', self.a_only
+        if self.b_only:
+            print 'Only in', self.b, ':', self.b_only
+        if self.same_files:
+            print 'Identical files :', self.same_files
+        if self.diff_files:
+            print 'Differing files :', self.diff_files
+        if self.funny_files:
+            print 'Trouble with common files :', self.funny_files
+        if self.common_dirs:
+            print 'Common subdirectories :', self.common_dirs
+        if self.common_funny:
+            print 'Common funny cases :', self.common_funny
+
+    def report_closure(self):
+        """Print reports on self and on subdirs.
+        If phase 4 hasn't been done, no subdir reports are printed."""
+        self.report()
+        try:
+            x = self.subdirs
+        except AttributeError:
+            return # No subdirectories computed
+        for x in self.subdirs.keys():
+            print
+            self.subdirs[x].report_closure()
+
+    def report_phase4_closure(self):
+        """Report and do phase 4 recursively."""
+        self.report()
+        self.phase4()
+        for x in self.subdirs.keys():
+            print
+            self.subdirs[x].report_phase4_closure()
+
+
+def cmpfiles(a, b, common):
+    """Compare common files in two directories.
+    Return:
+        - files that compare equal
+        - files that compare different
+        - funny cases (can't stat etc.)"""
+
+    res = ([], [], [])
+    for x in common:
+        res[cmp(os.path.join(a, x), os.path.join(b, x))].append(x)
+    return res
+
+
+def cmp(a, b):
+    """Compare two files.
+    Return:
+        0 for equal
+        1 for different
+        2 for funny cases (can't stat, etc.)"""
+
+    try:
+        if cmpcache.cmp(a, b): return 0
+        return 1
+    except os.error:
+        return 2
+
+
+def filter(list, skip):
+    """Return a copy with items that occur in skip removed."""
+
+    result = []
+    for item in list:
+        if item not in skip: result.append(item)
+    return result
+
+
+def demo():
+    """Demonstration and testing."""
+
+    import sys
+    import getopt
+    options, args = getopt.getopt(sys.argv[1:], 'r')
+    if len(args) != 2:
+        raise getopt.error, 'need exactly two args'
+    dd = dircmp().new(args[0], args[1])
+    dd.run()
+    if ('-r', '') in options:
+        dd.report_phase4_closure()
+    else:
+        dd.report()
+
+if __name__ == "__main__":
+    demo()
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/dump.py b/depot_tools/release/win/python_24/Lib/lib-old/dump.py
new file mode 100644
index 0000000..60bdba8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/dump.py
@@ -0,0 +1,63 @@
+# Module 'dump'
+#
+# Print python code that reconstructs a variable.
+# This only works in certain cases.
+#
+# It works fine for:
+# - ints and floats (except NaNs and other weird things)
+# - strings
+# - compounds and lists, provided it works for all their elements
+# - imported modules, provided their name is the module name
+#
+# It works for top-level dictionaries but not for dictionaries
+# contained in other objects (could be made to work with some hassle
+# though).
+#
+# It does not work for functions (all sorts), classes, class objects,
+# windows, files etc.
+#
+# Finally, objects referenced by more than one name or contained in more
+# than one other object lose their sharing property (this is bad for
+# strings used as exception identifiers, for instance).
+
+# Dump a whole symbol table
+#
+def dumpsymtab(dict):
+    for key in dict.keys():
+        dumpvar(key, dict[key])
+
+# Dump a single variable
+#
+def dumpvar(name, x):
+    import sys
+    t = type(x)
+    if t == type({}):
+        print name, '= {}'
+        for key in x.keys():
+            item = x[key]
+            if not printable(item):
+                print '#',
+            print name, '[', `key`, '] =', `item`
+    elif t in (type(''), type(0), type(0.0), type([]), type(())):
+        if not printable(x):
+            print '#',
+        print name, '=', `x`
+    elif t == type(sys):
+        print 'import', name, '#', x
+    else:
+        print '#', name, '=', x
+
+# check if a value is printable in a way that can be read back with input()
+#
+def printable(x):
+    t = type(x)
+    if t in (type(''), type(0), type(0.0)):
+        return 1
+    if t in (type([]), type(())):
+        for item in x:
+            if not printable(item):
+                return 0
+        return 1
+    if x == {}:
+        return 1
+    return 0
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/find.py b/depot_tools/release/win/python_24/Lib/lib-old/find.py
new file mode 100644
index 0000000..39ad7717
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/find.py
@@ -0,0 +1,26 @@
+import fnmatch
+import os
+
+_debug = 0
+
+_prune = ['(*)']
+
+def find(pattern, dir = os.curdir):
+    list = []
+    names = os.listdir(dir)
+    names.sort()
+    for name in names:
+        if name in (os.curdir, os.pardir):
+            continue
+        fullname = os.path.join(dir, name)
+        if fnmatch.fnmatch(name, pattern):
+            list.append(fullname)
+        if os.path.isdir(fullname) and not os.path.islink(fullname):
+            for p in _prune:
+                if fnmatch.fnmatch(name, p):
+                    if _debug: print "skip", `fullname`
+                    break
+            else:
+                if _debug: print "descend into", `fullname`
+                list = list + find(pattern, fullname)
+    return list
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/fmt.py b/depot_tools/release/win/python_24/Lib/lib-old/fmt.py
new file mode 100644
index 0000000..997d37a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/fmt.py
@@ -0,0 +1,623 @@
+# Text formatting abstractions
+# Note -- this module is obsolete, it's too slow anyway
+
+
+import string
+import Para
+
+
+# A formatter back-end object has one method that is called by the formatter:
+# addpara(p), where p is a paragraph object.  For example:
+
+
+# Formatter back-end to do nothing at all with the paragraphs
+class NullBackEnd:
+    #
+    def __init__(self):
+        pass
+    #
+    def addpara(self, p):
+        pass
+    #
+    def bgn_anchor(self, id):
+        pass
+    #
+    def end_anchor(self, id):
+        pass
+
+
+# Formatter back-end to collect the paragraphs in a list
+class SavingBackEnd(NullBackEnd):
+    #
+    def __init__(self):
+        self.paralist = []
+    #
+    def addpara(self, p):
+        self.paralist.append(p)
+    #
+    def hitcheck(self, h, v):
+        hits = []
+        for p in self.paralist:
+            if p.top <= v <= p.bottom:
+                for id in p.hitcheck(h, v):
+                    if id not in hits:
+                        hits.append(id)
+        return hits
+    #
+    def extract(self):
+        text = ''
+        for p in self.paralist:
+            text = text + (p.extract())
+        return text
+    #
+    def extractpart(self, long1, long2):
+        if long1 > long2: long1, long2 = long2, long1
+        para1, pos1 = long1
+        para2, pos2 = long2
+        text = ''
+        while para1 < para2:
+            ptext = self.paralist[para1].extract()
+            text = text + ptext[pos1:]
+            pos1 = 0
+            para1 = para1 + 1
+        ptext = self.paralist[para2].extract()
+        return text + ptext[pos1:pos2]
+    #
+    def whereis(self, d, h, v):
+        total = 0
+        for i in range(len(self.paralist)):
+            p = self.paralist[i]
+            result = p.whereis(d, h, v)
+            if result is not None:
+                return i, result
+        return None
+    #
+    def roundtowords(self, long1, long2):
+        i, offset = long1
+        text = self.paralist[i].extract()
+        while offset > 0 and text[offset-1] != ' ': offset = offset-1
+        long1 = i, offset
+        #
+        i, offset = long2
+        text = self.paralist[i].extract()
+        n = len(text)
+        while offset < n-1 and text[offset] != ' ': offset = offset+1
+        long2 = i, offset
+        #
+        return long1, long2
+    #
+    def roundtoparagraphs(self, long1, long2):
+        long1 = long1[0], 0
+        long2 = long2[0], len(self.paralist[long2[0]].extract())
+        return long1, long2
+
+
+# Formatter back-end to send the text directly to the drawing object
+class WritingBackEnd(NullBackEnd):
+    #
+    def __init__(self, d, width):
+        self.d = d
+        self.width = width
+        self.lineno = 0
+    #
+    def addpara(self, p):
+        self.lineno = p.render(self.d, 0, self.lineno, self.width)
+
+
+# A formatter receives a stream of formatting instructions and assembles
+# these into a stream of paragraphs on to a back-end.  The assembly is
+# parametrized by a text measurement object, which must match the output
+# operations of the back-end.  The back-end is responsible for splitting
+# paragraphs up in lines of a given maximum width.  (This is done because
+# in a windowing environment, when the window size changes, there is no
+# need to redo the assembly into paragraphs, but the splitting into lines
+# must be done taking the new window size into account.)
+
+
+# Formatter base class.  Initialize it with a text measurement object,
+# which is used for text measurements, and a back-end object,
+# which receives the completed paragraphs.  The formatting methods are:
+# setfont(font)
+# setleftindent(nspaces)
+# setjust(type) where type is 'l', 'c', 'r', or 'lr'
+# flush()
+# vspace(nlines)
+# needvspace(nlines)
+# addword(word, nspaces)
+class BaseFormatter:
+    #
+    def __init__(self, d, b):
+        # Drawing object used for text measurements
+        self.d = d
+        #
+        # BackEnd object receiving completed paragraphs
+        self.b = b
+        #
+        # Parameters of the formatting model
+        self.leftindent = 0
+        self.just = 'l'
+        self.font = None
+        self.blanklines = 0
+        #
+        # Parameters derived from the current font
+        self.space = d.textwidth(' ')
+        self.line = d.lineheight()
+        self.ascent = d.baseline()
+        self.descent = self.line - self.ascent
+        #
+        # Parameter derived from the default font
+        self.n_space = self.space
+        #
+        # Current paragraph being built
+        self.para = None
+        self.nospace = 1
+        #
+        # Font to set on the next word
+        self.nextfont = None
+    #
+    def newpara(self):
+        return Para.Para()
+    #
+    def setfont(self, font):
+        if font is None: return
+        self.font = self.nextfont = font
+        d = self.d
+        d.setfont(font)
+        self.space = d.textwidth(' ')
+        self.line = d.lineheight()
+        self.ascent = d.baseline()
+        self.descent = self.line - self.ascent
+    #
+    def setleftindent(self, nspaces):
+        self.leftindent = int(self.n_space * nspaces)
+        if self.para:
+            hang = self.leftindent - self.para.indent_left
+            if hang > 0 and self.para.getlength() <= hang:
+                self.para.makehangingtag(hang)
+                self.nospace = 1
+            else:
+                self.flush()
+    #
+    def setrightindent(self, nspaces):
+        self.rightindent = int(self.n_space * nspaces)
+        if self.para:
+            self.para.indent_right = self.rightindent
+            self.flush()
+    #
+    def setjust(self, just):
+        self.just = just
+        if self.para:
+            self.para.just = self.just
+    #
+    def flush(self):
+        if self.para:
+            self.b.addpara(self.para)
+            self.para = None
+            if self.font is not None:
+                self.d.setfont(self.font)
+        self.nospace = 1
+    #
+    def vspace(self, nlines):
+        self.flush()
+        if nlines > 0:
+            self.para = self.newpara()
+            tuple = None, '', 0, 0, 0, int(nlines*self.line), 0
+            self.para.words.append(tuple)
+            self.flush()
+            self.blanklines = self.blanklines + nlines
+    #
+    def needvspace(self, nlines):
+        self.flush() # Just to be sure
+        if nlines > self.blanklines:
+            self.vspace(nlines - self.blanklines)
+    #
+    def addword(self, text, space):
+        if self.nospace and not text:
+            return
+        self.nospace = 0
+        self.blanklines = 0
+        if not self.para:
+            self.para = self.newpara()
+            self.para.indent_left = self.leftindent
+            self.para.just = self.just
+            self.nextfont = self.font
+        space = int(space * self.space)
+        self.para.words.append((self.nextfont, text,
+                self.d.textwidth(text), space, space,
+                self.ascent, self.descent))
+        self.nextfont = None
+    #
+    def bgn_anchor(self, id):
+        if not self.para:
+            self.nospace = 0
+            self.addword('', 0)
+        self.para.bgn_anchor(id)
+    #
+    def end_anchor(self, id):
+        if not self.para:
+            self.nospace = 0
+            self.addword('', 0)
+        self.para.end_anchor(id)
+
+
+# Measuring object for measuring text as viewed on a tty
+class NullMeasurer:
+    #
+    def __init__(self):
+        pass
+    #
+    def setfont(self, font):
+        pass
+    #
+    def textwidth(self, text):
+        return len(text)
+    #
+    def lineheight(self):
+        return 1
+    #
+    def baseline(self):
+        return 0
+
+
+# Drawing object for writing plain ASCII text to a file
+class FileWriter:
+    #
+    def __init__(self, fp):
+        self.fp = fp
+        self.lineno, self.colno = 0, 0
+    #
+    def setfont(self, font):
+        pass
+    #
+    def text(self, (h, v), str):
+        if not str: return
+        if '\n' in str:
+            raise ValueError, 'can\'t write \\n'
+        while self.lineno < v:
+            self.fp.write('\n')
+            self.colno, self.lineno = 0, self.lineno + 1
+        while self.lineno > v:
+            # XXX This should never happen...
+            self.fp.write('\033[A') # ANSI up arrow
+            self.lineno = self.lineno - 1
+        if self.colno < h:
+            self.fp.write(' ' * (h - self.colno))
+        elif self.colno > h:
+            self.fp.write('\b' * (self.colno - h))
+        self.colno = h
+        self.fp.write(str)
+        self.colno = h + len(str)
+
+
+# Formatting class to do nothing at all with the data
+class NullFormatter(BaseFormatter):
+    #
+    def __init__(self):
+        d = NullMeasurer()
+        b = NullBackEnd()
+        BaseFormatter.__init__(self, d, b)
+
+
+# Formatting class to write directly to a file
+class WritingFormatter(BaseFormatter):
+    #
+    def __init__(self, fp, width):
+        dm = NullMeasurer()
+        dw = FileWriter(fp)
+        b = WritingBackEnd(dw, width)
+        BaseFormatter.__init__(self, dm, b)
+        self.blanklines = 1
+    #
+    # Suppress multiple blank lines
+    def needvspace(self, nlines):
+        BaseFormatter.needvspace(self, min(1, nlines))
+
+
+# A "FunnyFormatter" writes ASCII text with a twist: *bold words*,
+# _italic text_ and _underlined words_, and `quoted text'.
+# It assumes that the fonts are 'r', 'i', 'b', 'u', 'q': (roman,
+# italic, bold, underline, quote).
+# Moreover, if the font is in upper case, the text is converted to
+# UPPER CASE.
+class FunnyFormatter(WritingFormatter):
+    #
+    def flush(self):
+        if self.para: finalize(self.para)
+        WritingFormatter.flush(self)
+
+
+# Surrounds *bold words* and _italic text_ in a paragraph with
+# appropriate markers, fixing the size (assuming these characters'
+# width is 1).
+openchar = \
+    {'b':'*', 'i':'_', 'u':'_', 'q':'`', 'B':'*', 'I':'_', 'U':'_', 'Q':'`'}
+closechar = \
+    {'b':'*', 'i':'_', 'u':'_', 'q':'\'', 'B':'*', 'I':'_', 'U':'_', 'Q':'\''}
+def finalize(para):
+    oldfont = curfont = 'r'
+    para.words.append(('r', '', 0, 0, 0, 0)) # temporary, deleted at end
+    for i in range(len(para.words)):
+        fo, te, wi = para.words[i][:3]
+        if fo is not None: curfont = fo
+        if curfont != oldfont:
+            if closechar.has_key(oldfont):
+                c = closechar[oldfont]
+                j = i-1
+                while j > 0 and para.words[j][1] == '': j = j-1
+                fo1, te1, wi1 = para.words[j][:3]
+                te1 = te1 + c
+                wi1 = wi1 + len(c)
+                para.words[j] = (fo1, te1, wi1) + \
+                        para.words[j][3:]
+            if openchar.has_key(curfont) and te:
+                c = openchar[curfont]
+                te = c + te
+                wi = len(c) + wi
+                para.words[i] = (fo, te, wi) + \
+                        para.words[i][3:]
+            if te: oldfont = curfont
+            else: oldfont = 'r'
+        if curfont in string.uppercase:
+            te = string.upper(te)
+            para.words[i] = (fo, te, wi) + para.words[i][3:]
+    del para.words[-1]
+
+
+# Formatter back-end to draw the text in a window.
+# This has an option to draw while the paragraphs are being added,
+# to minimize the delay before the user sees anything.
+# This manages the entire "document" of the window.
+class StdwinBackEnd(SavingBackEnd):
+    #
+    def __init__(self, window, drawnow):
+        self.window = window
+        self.drawnow = drawnow
+        self.width = window.getwinsize()[0]
+        self.selection = None
+        self.height = 0
+        window.setorigin(0, 0)
+        window.setdocsize(0, 0)
+        self.d = window.begindrawing()
+        SavingBackEnd.__init__(self)
+    #
+    def finish(self):
+        self.d.close()
+        self.d = None
+        self.window.setdocsize(0, self.height)
+    #
+    def addpara(self, p):
+        self.paralist.append(p)
+        if self.drawnow:
+            self.height = \
+                    p.render(self.d, 0, self.height, self.width)
+        else:
+            p.layout(self.width)
+            p.left = 0
+            p.top = self.height
+            p.right = self.width
+            p.bottom = self.height + p.height
+            self.height = p.bottom
+    #
+    def resize(self):
+        self.window.change((0, 0), (self.width, self.height))
+        self.width = self.window.getwinsize()[0]
+        self.height = 0
+        for p in self.paralist:
+            p.layout(self.width)
+            p.left = 0
+            p.top = self.height
+            p.right = self.width
+            p.bottom = self.height + p.height
+            self.height = p.bottom
+        self.window.change((0, 0), (self.width, self.height))
+        self.window.setdocsize(0, self.height)
+    #
+    def redraw(self, area):
+        d = self.window.begindrawing()
+        (left, top), (right, bottom) = area
+        d.erase(area)
+        d.cliprect(area)
+        for p in self.paralist:
+            if top < p.bottom and p.top < bottom:
+                v = p.render(d, p.left, p.top, p.right)
+        if self.selection:
+            self.invert(d, self.selection)
+        d.close()
+    #
+    def setselection(self, new):
+        if new:
+            long1, long2 = new
+            pos1 = long1[:3]
+            pos2 = long2[:3]
+            new = pos1, pos2
+        if new != self.selection:
+            d = self.window.begindrawing()
+            if self.selection:
+                self.invert(d, self.selection)
+            if new:
+                self.invert(d, new)
+            d.close()
+            self.selection = new
+    #
+    def getselection(self):
+        return self.selection
+    #
+    def extractselection(self):
+        if self.selection:
+            a, b = self.selection
+            return self.extractpart(a, b)
+        else:
+            return None
+    #
+    def invert(self, d, region):
+        long1, long2 = region
+        if long1 > long2: long1, long2 = long2, long1
+        para1, pos1 = long1
+        para2, pos2 = long2
+        while para1 < para2:
+            self.paralist[para1].invert(d, pos1, None)
+            pos1 = None
+            para1 = para1 + 1
+        self.paralist[para2].invert(d, pos1, pos2)
+    #
+    def search(self, prog):
+        import re, string
+        if type(prog) is type(''):
+            prog = re.compile(string.lower(prog))
+        if self.selection:
+            iold = self.selection[0][0]
+        else:
+            iold = -1
+        hit = None
+        for i in range(len(self.paralist)):
+            if i == iold or i < iold and hit:
+                continue
+            p = self.paralist[i]
+            text = string.lower(p.extract())
+            match = prog.search(text)
+            if match:
+                a, b = match.group(0)
+                long1 = i, a
+                long2 = i, b
+                hit = long1, long2
+                if i > iold:
+                    break
+        if hit:
+            self.setselection(hit)
+            i = hit[0][0]
+            p = self.paralist[i]
+            self.window.show((p.left, p.top), (p.right, p.bottom))
+            return 1
+        else:
+            return 0
+    #
+    def showanchor(self, id):
+        for i in range(len(self.paralist)):
+            p = self.paralist[i]
+            if p.hasanchor(id):
+                long1 = i, 0
+                long2 = i, len(p.extract())
+                hit = long1, long2
+                self.setselection(hit)
+                self.window.show(
+                        (p.left, p.top), (p.right, p.bottom))
+                break
+
+
+# GL extensions
+
+class GLFontCache:
+    #
+    def __init__(self):
+        self.reset()
+        self.setfont('')
+    #
+    def reset(self):
+        self.fontkey = None
+        self.fonthandle = None
+        self.fontinfo = None
+        self.fontcache = {}
+    #
+    def close(self):
+        self.reset()
+    #
+    def setfont(self, fontkey):
+        if fontkey == '':
+            fontkey = 'Times-Roman 12'
+        elif ' ' not in fontkey:
+            fontkey = fontkey + ' 12'
+        if fontkey == self.fontkey:
+            return
+        if self.fontcache.has_key(fontkey):
+            handle = self.fontcache[fontkey]
+        else:
+            import string
+            i = string.index(fontkey, ' ')
+            name, sizestr = fontkey[:i], fontkey[i:]
+            size = eval(sizestr)
+            key1 = name + ' 1'
+            key = name + ' ' + `size`
+            # NB key may differ from fontkey!
+            if self.fontcache.has_key(key):
+                handle = self.fontcache[key]
+            else:
+                if self.fontcache.has_key(key1):
+                    handle = self.fontcache[key1]
+                else:
+                    import fm
+                    handle = fm.findfont(name)
+                    self.fontcache[key1] = handle
+                handle = handle.scalefont(size)
+                self.fontcache[fontkey] = \
+                        self.fontcache[key] = handle
+        self.fontkey = fontkey
+        if self.fonthandle != handle:
+            self.fonthandle = handle
+            self.fontinfo = handle.getfontinfo()
+            handle.setfont()
+
+
+class GLMeasurer(GLFontCache):
+    #
+    def textwidth(self, text):
+        return self.fonthandle.getstrwidth(text)
+    #
+    def baseline(self):
+        return self.fontinfo[6] - self.fontinfo[3]
+    #
+    def lineheight(self):
+        return self.fontinfo[6]
+
+
+class GLWriter(GLFontCache):
+    #
+    # NOTES:
+    # (1) Use gl.ortho2 to use X pixel coordinates!
+    #
+    def text(self, (h, v), text):
+        import gl, fm
+        gl.cmov2i(h, v + self.fontinfo[6] - self.fontinfo[3])
+        fm.prstr(text)
+    #
+    def setfont(self, fontkey):
+        oldhandle = self.fonthandle
+        GLFontCache.setfont(fontkey)
+        if self.fonthandle != oldhandle:
+            handle.setfont()
+
+
+class GLMeasurerWriter(GLMeasurer, GLWriter):
+    pass
+
+
+class GLBackEnd(SavingBackEnd):
+    #
+    def __init__(self, wid):
+        import gl
+        gl.winset(wid)
+        self.wid = wid
+        self.width = gl.getsize()[1]
+        self.height = 0
+        self.d = GLMeasurerWriter()
+        SavingBackEnd.__init__(self)
+    #
+    def finish(self):
+        pass
+    #
+    def addpara(self, p):
+        self.paralist.append(p)
+        self.height = p.render(self.d, 0, self.height, self.width)
+    #
+    def redraw(self):
+        import gl
+        gl.winset(self.wid)
+        width = gl.getsize()[1]
+        if width != self.width:
+            setdocsize = 1
+            self.width = width
+            for p in self.paralist:
+                p.top = p.bottom = None
+        d = self.d
+        v = 0
+        for p in self.paralist:
+            v = p.render(d, 0, v, width)
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/grep.py b/depot_tools/release/win/python_24/Lib/lib-old/grep.py
new file mode 100644
index 0000000..2926746
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/grep.py
@@ -0,0 +1,79 @@
+# 'grep'
+
+import regex
+from regex_syntax import *
+
+opt_show_where = 0
+opt_show_filename = 0
+opt_show_lineno = 1
+
+def grep(pat, *files):
+    return ggrep(RE_SYNTAX_GREP, pat, files)
+
+def egrep(pat, *files):
+    return ggrep(RE_SYNTAX_EGREP, pat, files)
+
+def emgrep(pat, *files):
+    return ggrep(RE_SYNTAX_EMACS, pat, files)
+
+def ggrep(syntax, pat, files):
+    if len(files) == 1 and type(files[0]) == type([]):
+        files = files[0]
+    global opt_show_filename
+    opt_show_filename = (len(files) != 1)
+    syntax = regex.set_syntax(syntax)
+    try:
+        prog = regex.compile(pat)
+    finally:
+        syntax = regex.set_syntax(syntax)
+    for filename in files:
+        fp = open(filename, 'r')
+        lineno = 0
+        while 1:
+            line = fp.readline()
+            if not line: break
+            lineno = lineno + 1
+            if prog.search(line) >= 0:
+                showline(filename, lineno, line, prog)
+        fp.close()
+
+def pgrep(pat, *files):
+    if len(files) == 1 and type(files[0]) == type([]):
+        files = files[0]
+    global opt_show_filename
+    opt_show_filename = (len(files) != 1)
+    import re
+    prog = re.compile(pat)
+    for filename in files:
+        fp = open(filename, 'r')
+        lineno = 0
+        while 1:
+            line = fp.readline()
+            if not line: break
+            lineno = lineno + 1
+            if prog.search(line):
+                showline(filename, lineno, line, prog)
+        fp.close()
+
+def showline(filename, lineno, line, prog):
+    if line[-1:] == '\n': line = line[:-1]
+    if opt_show_lineno:
+        prefix = `lineno`.rjust(3) + ': '
+    else:
+        prefix = ''
+    if opt_show_filename:
+        prefix = filename + ': ' + prefix
+    print prefix + line
+    if opt_show_where:
+        start, end = prog.regs()[0]
+        line = line[:start]
+        if '\t' not in line:
+            prefix = ' ' * (len(prefix) + start)
+        else:
+            prefix = ' ' * len(prefix)
+            for c in line:
+                if c != '\t': c = ' '
+                prefix = prefix + c
+        if start == end: prefix = prefix + '\\'
+        else: prefix = prefix + '^'*(end-start)
+        print prefix
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/lockfile.py b/depot_tools/release/win/python_24/Lib/lib-old/lockfile.py
new file mode 100644
index 0000000..cde9b48c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/lockfile.py
@@ -0,0 +1,15 @@
+import struct, fcntl
+
+def writelock(f):
+    _lock(f, fcntl.F_WRLCK)
+
+def readlock(f):
+    _lock(f, fcntl.F_RDLCK)
+
+def unlock(f):
+    _lock(f, fcntl.F_UNLCK)
+
+def _lock(f, op):
+    dummy = fcntl.fcntl(f.fileno(), fcntl.F_SETLKW,
+                        struct.pack('2h8l', op,
+                                    0, 0, 0, 0, 0, 0, 0, 0, 0))
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/newdir.py b/depot_tools/release/win/python_24/Lib/lib-old/newdir.py
new file mode 100644
index 0000000..356becc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/newdir.py
@@ -0,0 +1,73 @@
+# New dir() function
+
+
+# This should be the new dir(), except that it should still list
+# the current local name space by default
+
+def listattrs(x):
+    try:
+        dictkeys = x.__dict__.keys()
+    except (AttributeError, TypeError):
+        dictkeys = []
+    #
+    try:
+        methods = x.__methods__
+    except (AttributeError, TypeError):
+        methods = []
+    #
+    try:
+        members = x.__members__
+    except (AttributeError, TypeError):
+        members = []
+    #
+    try:
+        the_class = x.__class__
+    except (AttributeError, TypeError):
+        the_class = None
+    #
+    try:
+        bases = x.__bases__
+    except (AttributeError, TypeError):
+        bases = ()
+    #
+    total = dictkeys + methods + members
+    if the_class:
+        # It's a class instace; add the class's attributes
+        # that are functions (methods)...
+        class_attrs = listattrs(the_class)
+        class_methods = []
+        for name in class_attrs:
+            if is_function(getattr(the_class, name)):
+                class_methods.append(name)
+        total = total + class_methods
+    elif bases:
+        # It's a derived class; add the base class attributes
+        for base in bases:
+            base_attrs = listattrs(base)
+            total = total + base_attrs
+    total.sort()
+    return total
+    i = 0
+    while i+1 < len(total):
+        if total[i] == total[i+1]:
+            del total[i+1]
+        else:
+            i = i+1
+    return total
+
+
+# Helper to recognize functions
+
+def is_function(x):
+    return type(x) == type(is_function)
+
+
+# Approximation of builtin dir(); but note that this lists the user's
+# variables by default, not the current local name space.
+
+def dir(x = None):
+    if x is not None:
+        return listattrs(x)
+    else:
+        import __main__
+        return listattrs(__main__)
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/ni.py b/depot_tools/release/win/python_24/Lib/lib-old/ni.py
new file mode 100644
index 0000000..074f989
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/ni.py
@@ -0,0 +1,433 @@
+"""New import scheme with package support.
+
+Quick Reference
+---------------
+
+- To enable package support, execute "import ni" before importing any
+  packages.  Importing this module automatically installs the relevant
+  import hooks.
+
+- To create a package named spam containing sub-modules ham, bacon and
+  eggs, create a directory spam somewhere on Python's module search
+  path (i.e. spam's parent directory must be one of the directories in
+  sys.path or $PYTHONPATH); then create files ham.py, bacon.py and
+  eggs.py inside spam.
+
+- To import module ham from package spam and use function hamneggs()
+  from that module, you can either do
+
+    import spam.ham             # *not* "import spam" !!!
+    spam.ham.hamneggs()
+
+  or
+
+    from spam import ham
+    ham.hamneggs()
+
+  or
+
+    from spam.ham import hamneggs
+    hamneggs()
+
+- Importing just "spam" does not do what you expect: it creates an
+  empty package named spam if one does not already exist, but it does
+  not import spam's submodules.  The only submodule that is guaranteed
+  to be imported is spam.__init__, if it exists.  Note that
+  spam.__init__ is a submodule of package spam.  It can reference to
+  spam's namespace via the '__.' prefix, for instance
+
+    __.spam_inited = 1          # Set a package-level variable
+
+
+
+Theory of Operation
+-------------------
+
+A Package is a module that can contain other modules.  Packages can be
+nested.  Package introduce dotted names for modules, like P.Q.M, which
+could correspond to a file P/Q/M.py found somewhere on sys.path.  It
+is possible to import a package itself, though this makes little sense
+unless the package contains a module called __init__.
+
+A package has two variables that control the namespace used for
+packages and modules, both initialized to sensible defaults the first
+time the package is referenced.
+
+(1) A package's *module search path*, contained in the per-package
+variable __path__, defines a list of *directories* where submodules or
+subpackages of the package are searched.  It is initialized to the
+directory containing the package.  Setting this variable to None makes
+the module search path default to sys.path (this is not quite the same
+as setting it to sys.path, since the latter won't track later
+assignments to sys.path).
+
+(2) A package's *import domain*, contained in the per-package variable
+__domain__, defines a list of *packages* that are searched (using
+their respective module search paths) to satisfy imports.  It is
+initialized to the list consisting of the package itself, its parent
+package, its parent's parent, and so on, ending with the root package
+(the nameless package containing all top-level packages and modules,
+whose module search path is None, implying sys.path).
+
+The default domain implements a search algorithm called "expanding
+search".  An alternative search algorithm called "explicit search"
+fixes the import search path to contain only the root package,
+requiring the modules in the package to name all imported modules by
+their full name.  The convention of using '__' to refer to the current
+package (both as a per-module variable and in module names) can be
+used by packages using explicit search to refer to modules in the same
+package; this combination is known as "explicit-relative search".
+
+The PackageImporter and PackageLoader classes together implement the
+following policies:
+
+- There is a root package, whose name is ''.  It cannot be imported
+  directly but may be referenced, e.g. by using '__' from a top-level
+  module.
+
+- In each module or package, the variable '__' contains a reference to
+  the parent package; in the root package, '__' points to itself.
+
+- In the name for imported modules (e.g. M in "import M" or "from M
+  import ..."), a leading '__' refers to the current package (i.e.
+  the package containing the current module); leading '__.__' and so
+  on refer to the current package's parent, and so on.  The use of
+  '__' elsewhere in the module name is not supported.
+
+- Modules are searched using the "expanding search" algorithm by
+  virtue of the default value for __domain__.
+
+- If A.B.C is imported, A is searched using __domain__; then
+  subpackage B is searched in A using its __path__, and so on.
+
+- Built-in modules have priority: even if a file sys.py exists in a
+  package, "import sys" imports the built-in sys module.
+
+- The same holds for frozen modules, for better or for worse.
+
+- Submodules and subpackages are not automatically loaded when their
+  parent packages is loaded.
+
+- The construct "from package import *" is illegal.  (It can still be
+  used to import names from a module.)
+
+- When "from package import module1, module2, ..." is used, those
+    modules are explicitly loaded.
+
+- When a package is loaded, if it has a submodule __init__, that
+  module is loaded.  This is the place where required submodules can
+  be loaded, the __path__ variable extended, etc.  The __init__ module
+  is loaded even if the package was loaded only in order to create a
+  stub for a sub-package: if "import P.Q.R" is the first reference to
+  P, and P has a submodule __init__, P.__init__ is loaded before P.Q
+  is even searched.
+
+Caveats:
+
+- It is possible to import a package that has no __init__ submodule;
+  this is not particularly useful but there may be useful applications
+  for it (e.g. to manipulate its search paths from the outside!).
+
+- There are no special provisions for os.chdir().  If you plan to use
+  os.chdir() before you have imported all your modules, it is better
+  not to have relative pathnames in sys.path.  (This could actually be
+  fixed by changing the implementation of path_join() in the hook to
+  absolutize paths.)
+
+- Packages and modules are introduced in sys.modules as soon as their
+  loading is started.  When the loading is terminated by an exception,
+  the sys.modules entries remain around.
+
+- There are no special measures to support mutually recursive modules,
+  but it will work under the same conditions where it works in the
+  flat module space system.
+
+- Sometimes dummy entries (whose value is None) are entered in
+  sys.modules, to indicate that a particular module does not exist --
+  this is done to speed up the expanding search algorithm when a
+  module residing at a higher level is repeatedly imported (Python
+  promises that importing a previously imported module is cheap!)
+
+- Although dynamically loaded extensions are allowed inside packages,
+  the current implementation (hardcoded in the interpreter) of their
+  initialization may cause problems if an extension invokes the
+  interpreter during its initialization.
+
+- reload() may find another version of the module only if it occurs on
+  the package search path.  Thus, it keeps the connection to the
+  package to which the module belongs, but may find a different file.
+
+XXX Need to have an explicit name for '', e.g. '__root__'.
+
+"""
+
+
+import imp
+import sys
+import __builtin__
+
+import ihooks
+from ihooks import ModuleLoader, ModuleImporter
+
+
+class PackageLoader(ModuleLoader):
+
+    """A subclass of ModuleLoader with package support.
+
+    find_module_in_dir() will succeed if there's a subdirectory with
+    the given name; load_module() will create a stub for a package and
+    load its __init__ module if it exists.
+
+    """
+
+    def find_module_in_dir(self, name, dir):
+        if dir is not None:
+            dirname = self.hooks.path_join(dir, name)
+            if self.hooks.path_isdir(dirname):
+                return None, dirname, ('', '', 'PACKAGE')
+        return ModuleLoader.find_module_in_dir(self, name, dir)
+
+    def load_module(self, name, stuff):
+        file, filename, info = stuff
+        suff, mode, type = info
+        if type == 'PACKAGE':
+            return self.load_package(name, stuff)
+        if sys.modules.has_key(name):
+            m = sys.modules[name]
+        else:
+            sys.modules[name] = m = imp.new_module(name)
+        self.set_parent(m)
+        if type == imp.C_EXTENSION and '.' in name:
+            return self.load_dynamic(name, stuff)
+        else:
+            return ModuleLoader.load_module(self, name, stuff)
+
+    def load_dynamic(self, name, stuff):
+        file, filename, (suff, mode, type) = stuff
+        # Hack around restriction in imp.load_dynamic()
+        i = name.rfind('.')
+        tail = name[i+1:]
+        if sys.modules.has_key(tail):
+            save = sys.modules[tail]
+        else:
+            save = None
+        sys.modules[tail] = imp.new_module(name)
+        try:
+            m = imp.load_dynamic(tail, filename, file)
+        finally:
+            if save:
+                sys.modules[tail] = save
+            else:
+                del sys.modules[tail]
+        sys.modules[name] = m
+        return m
+
+    def load_package(self, name, stuff):
+        file, filename, info = stuff
+        if sys.modules.has_key(name):
+            package = sys.modules[name]
+        else:
+            sys.modules[name] = package = imp.new_module(name)
+        package.__path__ = [filename]
+        self.init_package(package)
+        return package
+
+    def init_package(self, package):
+        self.set_parent(package)
+        self.set_domain(package)
+        self.call_init_module(package)
+
+    def set_parent(self, m):
+        name = m.__name__
+        if '.' in name:
+            name = name[:name.rfind('.')]
+        else:
+            name = ''
+        m.__ = sys.modules[name]
+
+    def set_domain(self, package):
+        name = package.__name__
+        package.__domain__ = domain = [name]
+        while '.' in name:
+            name = name[:name.rfind('.')]
+            domain.append(name)
+        if name:
+            domain.append('')
+
+    def call_init_module(self, package):
+        stuff = self.find_module('__init__', package.__path__)
+        if stuff:
+            m = self.load_module(package.__name__ + '.__init__', stuff)
+            package.__init__ = m
+
+
+class PackageImporter(ModuleImporter):
+
+    """Importer that understands packages and '__'."""
+
+    def __init__(self, loader = None, verbose = 0):
+        ModuleImporter.__init__(self,
+        loader or PackageLoader(None, verbose), verbose)
+
+    def import_module(self, name, globals={}, locals={}, fromlist=[]):
+        if globals.has_key('__'):
+            package = globals['__']
+        else:
+            # No calling context, assume in root package
+            package = sys.modules['']
+        if name[:3] in ('__.', '__'):
+            p = package
+            name = name[3:]
+            while name[:3] in ('__.', '__'):
+                p = p.__
+                name = name[3:]
+            if not name:
+                return self.finish(package, p, '', fromlist)
+            if '.' in name:
+                i = name.find('.')
+                name, tail = name[:i], name[i:]
+            else:
+                tail = ''
+            mname = p.__name__ and p.__name__+'.'+name or name
+            m = self.get1(mname)
+            return self.finish(package, m, tail, fromlist)
+        if '.' in name:
+            i = name.find('.')
+            name, tail = name[:i], name[i:]
+        else:
+            tail = ''
+        for pname in package.__domain__:
+            mname = pname and pname+'.'+name or name
+            m = self.get0(mname)
+            if m: break
+        else:
+            raise ImportError, "No such module %s" % name
+        return self.finish(m, m, tail, fromlist)
+
+    def finish(self, module, m, tail, fromlist):
+        # Got ....A; now get ....A.B.C.D
+        yname = m.__name__
+        if tail and sys.modules.has_key(yname + tail): # Fast path
+            yname, tail = yname + tail, ''
+            m = self.get1(yname)
+        while tail:
+            i = tail.find('.', 1)
+            if i > 0:
+                head, tail = tail[:i], tail[i:]
+            else:
+                head, tail = tail, ''
+            yname = yname + head
+            m = self.get1(yname)
+
+        # Got ....A.B.C.D; now finalize things depending on fromlist
+        if not fromlist:
+            return module
+        if '__' in fromlist:
+            raise ImportError, "Can't import __ from anywhere"
+        if not hasattr(m, '__path__'): return m
+        if '*' in fromlist:
+            raise ImportError, "Can't import * from a package"
+        for f in fromlist:
+            if hasattr(m, f): continue
+            fname = yname + '.' + f
+            self.get1(fname)
+        return m
+
+    def get1(self, name):
+        m = self.get(name)
+        if not m:
+            raise ImportError, "No module named %s" % name
+        return m
+
+    def get0(self, name):
+        m = self.get(name)
+        if not m:
+            sys.modules[name] = None
+        return m
+
+    def get(self, name):
+        # Internal routine to get or load a module when its parent exists
+        if sys.modules.has_key(name):
+            return sys.modules[name]
+        if '.' in name:
+            i = name.rfind('.')
+            head, tail = name[:i], name[i+1:]
+        else:
+            head, tail = '', name
+        path = sys.modules[head].__path__
+        stuff = self.loader.find_module(tail, path)
+        if not stuff:
+            return None
+        sys.modules[name] = m = self.loader.load_module(name, stuff)
+        if head:
+            setattr(sys.modules[head], tail, m)
+        return m
+
+    def reload(self, module):
+        name = module.__name__
+        if '.' in name:
+            i = name.rfind('.')
+            head, tail = name[:i], name[i+1:]
+            path = sys.modules[head].__path__
+        else:
+            tail = name
+            path = sys.modules[''].__path__
+        stuff = self.loader.find_module(tail, path)
+        if not stuff:
+            raise ImportError, "No module named %s" % name
+        return self.loader.load_module(name, stuff)
+
+    def unload(self, module):
+        if hasattr(module, '__path__'):
+            raise ImportError, "don't know how to unload packages yet"
+        PackageImporter.unload(self, module)
+
+    def install(self):
+        if not sys.modules.has_key(''):
+            sys.modules[''] = package = imp.new_module('')
+            package.__path__ = None
+            self.loader.init_package(package)
+            for m in sys.modules.values():
+                if not m: continue
+                if not hasattr(m, '__'):
+                    self.loader.set_parent(m)
+        ModuleImporter.install(self)
+
+
+def install(v = 0):
+    ihooks.install(PackageImporter(None, v))
+
+def uninstall():
+    ihooks.uninstall()
+
+def ni(v = 0):
+    install(v)
+
+def no():
+    uninstall()
+
+def test():
+    import pdb
+    try:
+        testproper()
+    except:
+        sys.last_type, sys.last_value, sys.last_traceback = sys.exc_info()
+        print
+        print sys.last_type, ':', sys.last_value
+        print
+        pdb.pm()
+
+def testproper():
+    install(1)
+    try:
+        import mactest
+        print dir(mactest)
+        raw_input('OK?')
+    finally:
+        uninstall()
+
+
+if __name__ == '__main__':
+    test()
+else:
+    install()
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/packmail.py b/depot_tools/release/win/python_24/Lib/lib-old/packmail.py
new file mode 100644
index 0000000..e569108a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/packmail.py
@@ -0,0 +1,111 @@
+# Module 'packmail' -- create a self-unpacking shell archive.
+
+# This module works on UNIX and on the Mac; the archives can unpack
+# themselves only on UNIX.
+
+import os
+from stat import ST_MTIME
+
+# Print help
+def help():
+    print 'All fns have a file open for writing as first parameter'
+    print 'pack(f, fullname, name): pack fullname as name'
+    print 'packsome(f, directory, namelist): selected files from directory'
+    print 'packall(f, directory): pack all files from directory'
+    print 'packnotolder(f, directory, name): pack all files from directory'
+    print '                        that are not older than a file there'
+    print 'packtree(f, directory): pack entire directory tree'
+
+# Pack one file
+def pack(outfp, file, name):
+    fp = open(file, 'r')
+    outfp.write('echo ' + name + '\n')
+    outfp.write('sed "s/^X//" >"' + name + '" <<"!"\n')
+    while 1:
+        line = fp.readline()
+        if not line: break
+        if line[-1:] != '\n':
+            line = line + '\n'
+        outfp.write('X' + line)
+    outfp.write('!\n')
+    fp.close()
+
+# Pack some files from a directory
+def packsome(outfp, dirname, names):
+    for name in names:
+        print name
+        file = os.path.join(dirname, name)
+        pack(outfp, file, name)
+
+# Pack all files from a directory
+def packall(outfp, dirname):
+    names = os.listdir(dirname)
+    try:
+        names.remove('.')
+    except:
+        pass
+    try:
+        names.remove('..')
+    except:
+        pass
+    names.sort()
+    packsome(outfp, dirname, names)
+
+# Pack all files from a directory that are not older than a give one
+def packnotolder(outfp, dirname, oldest):
+    names = os.listdir(dirname)
+    try:
+        names.remove('.')
+    except:
+        pass
+    try:
+        names.remove('..')
+    except:
+        pass
+    oldest = os.path.join(dirname, oldest)
+    st = os.stat(oldest)
+    mtime = st[ST_MTIME]
+    todo = []
+    for name in names:
+        print name, '...',
+        st = os.stat(os.path.join(dirname, name))
+        if st[ST_MTIME] >= mtime:
+            print 'Yes.'
+            todo.append(name)
+        else:
+            print 'No.'
+    todo.sort()
+    packsome(outfp, dirname, todo)
+
+# Pack a whole tree (no exceptions)
+def packtree(outfp, dirname):
+    print 'packtree', dirname
+    outfp.write('mkdir ' + unixfix(dirname) + '\n')
+    names = os.listdir(dirname)
+    try:
+        names.remove('.')
+    except:
+        pass
+    try:
+        names.remove('..')
+    except:
+        pass
+    subdirs = []
+    for name in names:
+        fullname = os.path.join(dirname, name)
+        if os.path.isdir(fullname):
+            subdirs.append(fullname)
+        else:
+            print 'pack', fullname
+            pack(outfp, fullname, unixfix(fullname))
+    for subdirname in subdirs:
+        packtree(outfp, subdirname)
+
+def unixfix(name):
+    comps = name.split(os.sep)
+    res = ''
+    for comp in comps:
+        if comp:
+            if res: res = res + '/'
+            res = res + comp
+    return res
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/poly.py b/depot_tools/release/win/python_24/Lib/lib-old/poly.py
new file mode 100644
index 0000000..fe6a1dcc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/poly.py
@@ -0,0 +1,52 @@
+# module 'poly' -- Polynomials
+
+# A polynomial is represented by a list of coefficients, e.g.,
+# [1, 10, 5] represents 1*x**0 + 10*x**1 + 5*x**2 (or 1 + 10x + 5x**2).
+# There is no way to suppress internal zeros; trailing zeros are
+# taken out by normalize().
+
+def normalize(p): # Strip unnecessary zero coefficients
+    n = len(p)
+    while n:
+        if p[n-1]: return p[:n]
+        n = n-1
+    return []
+
+def plus(a, b):
+    if len(a) < len(b): a, b = b, a # make sure a is the longest
+    res = a[:] # make a copy
+    for i in range(len(b)):
+        res[i] = res[i] + b[i]
+    return normalize(res)
+
+def minus(a, b):
+    neg_b = map(lambda x: -x, b[:])
+    return plus(a, neg_b)
+
+def one(power, coeff): # Representation of coeff * x**power
+    res = []
+    for i in range(power): res.append(0)
+    return res + [coeff]
+
+def times(a, b):
+    res = []
+    for i in range(len(a)):
+        for j in range(len(b)):
+            res = plus(res, one(i+j, a[i]*b[j]))
+    return res
+
+def power(a, n): # Raise polynomial a to the positive integral power n
+    if n == 0: return [1]
+    if n == 1: return a
+    if n/2*2 == n:
+        b = power(a, n/2)
+        return times(b, b)
+    return times(power(a, n-1), a)
+
+def der(a): # First derivative
+    res = a[1:]
+    for i in range(len(res)):
+        res[i] = res[i] * (i+1)
+    return res
+
+# Computing a primitive function would require rational arithmetic...
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/rand.py b/depot_tools/release/win/python_24/Lib/lib-old/rand.py
new file mode 100644
index 0000000..a557b695
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/rand.py
@@ -0,0 +1,13 @@
+# Module 'rand'
+# Don't use unless you want compatibility with C's rand()!
+
+import whrandom
+
+def srand(seed):
+    whrandom.seed(seed%256, seed/256%256, seed/65536%256)
+
+def rand():
+    return int(whrandom.random() * 32768.0) % 32768
+
+def choice(seq):
+    return seq[rand() % len(seq)]
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/tb.py b/depot_tools/release/win/python_24/Lib/lib-old/tb.py
new file mode 100644
index 0000000..9063559
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/tb.py
@@ -0,0 +1,177 @@
+# Print tracebacks, with a dump of local variables.
+# Also an interactive stack trace browser.
+# Note -- this module is obsolete -- use pdb.pm() instead.
+
+import sys
+import os
+from stat import *
+import linecache
+
+def br(): browser(sys.last_traceback)
+
+def tb(): printtb(sys.last_traceback)
+
+def browser(tb):
+    if not tb:
+        print 'No traceback.'
+        return
+    tblist = []
+    while tb:
+        tblist.append(tb)
+        tb = tb.tb_next
+    ptr = len(tblist)-1
+    tb = tblist[ptr]
+    while 1:
+        if tb != tblist[ptr]:
+            tb = tblist[ptr]
+            print `ptr` + ':',
+            printtbheader(tb)
+        try:
+            line = raw_input('TB: ')
+        except KeyboardInterrupt:
+            print '\n[Interrupted]'
+            break
+        except EOFError:
+            print '\n[EOF]'
+            break
+        cmd = line.strip()
+        if cmd:
+            if cmd == 'quit':
+                break
+            elif cmd == 'list':
+                browserlist(tb)
+            elif cmd == 'up':
+                if ptr-1 >= 0: ptr = ptr-1
+                else: print 'Bottom of stack.'
+            elif cmd == 'down':
+                if ptr+1 < len(tblist): ptr = ptr+1
+                else: print 'Top of stack.'
+            elif cmd == 'locals':
+                printsymbols(tb.tb_frame.f_locals)
+            elif cmd == 'globals':
+                printsymbols(tb.tb_frame.f_globals)
+            elif cmd in ('?', 'help'):
+                browserhelp()
+            else:
+                browserexec(tb, cmd)
+
+def browserlist(tb):
+    filename = tb.tb_frame.f_code.co_filename
+    lineno = tb.tb_lineno
+    last = lineno
+    first = max(1, last-10)
+    for i in range(first, last+1):
+        if i == lineno: prefix = '***' + `i`.rjust(4) + ':'
+        else: prefix = `i`.rjust(7) + ':'
+        line = linecache.getline(filename, i)
+        if line[-1:] == '\n': line = line[:-1]
+        print prefix + line
+
+def browserexec(tb, cmd):
+    locals = tb.tb_frame.f_locals
+    globals = tb.tb_frame.f_globals
+    try:
+        exec cmd+'\n' in globals, locals
+    except:
+        t, v = sys.exc_info()[:2]
+        print '*** Exception:',
+        if type(t) is type(''):
+            print t,
+        else:
+            print t.__name__,
+        if v is not None:
+            print ':', v,
+        print
+        print 'Type help to get help.'
+
+def browserhelp():
+    print
+    print '    This is the traceback browser.  Commands are:'
+    print '        up      : move one level up in the call stack'
+    print '        down    : move one level down in the call stack'
+    print '        locals  : print all local variables at this level'
+    print '        globals : print all global variables at this level'
+    print '        list    : list source code around the failure'
+    print '        help    : print help (what you are reading now)'
+    print '        quit    : back to command interpreter'
+    print '    Typing any other 1-line statement will execute it'
+    print '    using the current level\'s symbol tables'
+    print
+
+def printtb(tb):
+    while tb:
+        print1tb(tb)
+        tb = tb.tb_next
+
+def print1tb(tb):
+    printtbheader(tb)
+    if tb.tb_frame.f_locals is not tb.tb_frame.f_globals:
+        printsymbols(tb.tb_frame.f_locals)
+
+def printtbheader(tb):
+    filename = tb.tb_frame.f_code.co_filename
+    lineno = tb.tb_lineno
+    info = '"' + filename + '"(' + `lineno` + ')'
+    line = linecache.getline(filename, lineno)
+    if line:
+        info = info + ': ' + line.strip()
+    print info
+
+def printsymbols(d):
+    keys = d.keys()
+    keys.sort()
+    for name in keys:
+        print '  ' + name.ljust(12) + ':',
+        printobject(d[name], 4)
+        print
+
+def printobject(v, maxlevel):
+    if v is None:
+        print 'None',
+    elif type(v) in (type(0), type(0.0)):
+        print v,
+    elif type(v) is type(''):
+        if len(v) > 20:
+            print `v[:17] + '...'`,
+        else:
+            print `v`,
+    elif type(v) is type(()):
+        print '(',
+        printlist(v, maxlevel)
+        print ')',
+    elif type(v) is type([]):
+        print '[',
+        printlist(v, maxlevel)
+        print ']',
+    elif type(v) is type({}):
+        print '{',
+        printdict(v, maxlevel)
+        print '}',
+    else:
+        print v,
+
+def printlist(v, maxlevel):
+    n = len(v)
+    if n == 0: return
+    if maxlevel <= 0:
+        print '...',
+        return
+    for i in range(min(6, n)):
+        printobject(v[i], maxlevel-1)
+        if i+1 < n: print ',',
+    if n > 6: print '...',
+
+def printdict(v, maxlevel):
+    keys = v.keys()
+    n = len(keys)
+    if n == 0: return
+    if maxlevel <= 0:
+        print '...',
+        return
+    keys.sort()
+    for i in range(min(6, n)):
+        key = keys[i]
+        print `key` + ':',
+        printobject(v[key], maxlevel-1)
+        if i+1 < n: print ',',
+    if n > 6: print '...',
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/util.py b/depot_tools/release/win/python_24/Lib/lib-old/util.py
new file mode 100644
index 0000000..104af1e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/util.py
@@ -0,0 +1,25 @@
+# Module 'util' -- some useful functions that don't fit elsewhere
+
+# NB: These are now built-in functions, but this module is provided
+# for compatibility.  Don't use in new programs unless you need backward
+# compatibility (i.e. need to run with old interpreters).
+
+
+# Remove an item from a list.
+# No complaints if it isn't in the list at all.
+# If it occurs more than once, remove the first occurrence.
+#
+def remove(item, list):
+    if item in list: list.remove(item)
+
+
+# Return a string containing a file's contents.
+#
+def readfile(fn):
+    return readopenfile(open(fn, 'r'))
+
+
+# Read an open file until EOF.
+#
+def readopenfile(fp):
+    return fp.read()
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/whatsound.py b/depot_tools/release/win/python_24/Lib/lib-old/whatsound.py
new file mode 100644
index 0000000..1b1df23
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/whatsound.py
@@ -0,0 +1 @@
+from sndhdr import *
diff --git a/depot_tools/release/win/python_24/Lib/lib-old/zmod.py b/depot_tools/release/win/python_24/Lib/lib-old/zmod.py
new file mode 100644
index 0000000..55f49df
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/lib-old/zmod.py
@@ -0,0 +1,94 @@
+# module 'zmod'
+
+# Compute properties of mathematical "fields" formed by taking
+# Z/n (the whole numbers modulo some whole number n) and an
+# irreducible polynomial (i.e., a polynomial with only complex zeros),
+# e.g., Z/5 and X**2 + 2.
+#
+# The field is formed by taking all possible linear combinations of
+# a set of d base vectors (where d is the degree of the polynomial).
+#
+# Note that this procedure doesn't yield a field for all combinations
+# of n and p: it may well be that some numbers have more than one
+# inverse and others have none.  This is what we check.
+#
+# Remember that a field is a ring where each element has an inverse.
+# A ring has commutative addition and multiplication, a zero and a one:
+# 0*x = x*0 = 0, 0+x = x+0 = x, 1*x = x*1 = x.  Also, the distributive
+# property holds: a*(b+c) = a*b + b*c.
+# (XXX I forget if this is an axiom or follows from the rules.)
+
+import poly
+
+
+# Example N and polynomial
+
+N = 5
+P = poly.plus(poly.one(0, 2), poly.one(2, 1)) # 2 + x**2
+
+
+# Return x modulo y.  Returns >= 0 even if x < 0.
+
+def mod(x, y):
+    return divmod(x, y)[1]
+
+
+# Normalize a polynomial modulo n and modulo p.
+
+def norm(a, n, p):
+    a = poly.modulo(a, p)
+    a = a[:]
+    for i in range(len(a)): a[i] = mod(a[i], n)
+    a = poly.normalize(a)
+    return a
+
+
+# Make a list of all n^d elements of the proposed field.
+
+def make_all(mat):
+    all = []
+    for row in mat:
+        for a in row:
+            all.append(a)
+    return all
+
+def make_elements(n, d):
+    if d == 0: return [poly.one(0, 0)]
+    sub = make_elements(n, d-1)
+    all = []
+    for a in sub:
+        for i in range(n):
+            all.append(poly.plus(a, poly.one(d-1, i)))
+    return all
+
+def make_inv(all, n, p):
+    x = poly.one(1, 1)
+    inv = []
+    for a in all:
+        inv.append(norm(poly.times(a, x), n, p))
+    return inv
+
+def checkfield(n, p):
+    all = make_elements(n, len(p)-1)
+    inv = make_inv(all, n, p)
+    all1 = all[:]
+    inv1 = inv[:]
+    all1.sort()
+    inv1.sort()
+    if all1 == inv1: print 'BINGO!'
+    else:
+        print 'Sorry:', n, p
+        print all
+        print inv
+
+def rj(s, width):
+    if type(s) is not type(''): s = `s`
+    n = len(s)
+    if n >= width: return s
+    return ' '*(width - n) + s
+
+def lj(s, width):
+    if type(s) is not type(''): s = `s`
+    n = len(s)
+    if n >= width: return s
+    return s + ' '*(width - n)
diff --git a/depot_tools/release/win/python_24/Lib/linecache.py b/depot_tools/release/win/python_24/Lib/linecache.py
new file mode 100644
index 0000000..2ccc6c67
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/linecache.py
@@ -0,0 +1,108 @@
+"""Cache lines from files.
+
+This is intended to read lines from modules imported -- hence if a filename
+is not found, it will look down the module search path for a file by
+that name.
+"""
+
+import sys
+import os
+
+__all__ = ["getline", "clearcache", "checkcache"]
+
+def getline(filename, lineno):
+    lines = getlines(filename)
+    if 1 <= lineno <= len(lines):
+        return lines[lineno-1]
+    else:
+        return ''
+
+
+# The cache
+
+cache = {} # The cache
+
+
+def clearcache():
+    """Clear the cache entirely."""
+
+    global cache
+    cache = {}
+
+
+def getlines(filename):
+    """Get the lines for a file from the cache.
+    Update the cache if it doesn't contain an entry for this file already."""
+
+    if filename in cache:
+        return cache[filename][2]
+    else:
+        return updatecache(filename)
+
+
+def checkcache(filename=None):
+    """Discard cache entries that are out of date.
+    (This is not checked upon each call!)"""
+
+    if filename is None:
+        filenames = cache.keys()
+    else:
+        if filename in cache:
+            filenames = [filename]
+        else:
+            return
+
+    for filename in filenames:
+        size, mtime, lines, fullname = cache[filename]
+        try:
+            stat = os.stat(fullname)
+        except os.error:
+            del cache[filename]
+            continue
+        if size != stat.st_size or mtime != stat.st_mtime:
+            del cache[filename]
+
+
+def updatecache(filename):
+    """Update a cache entry and return its list of lines.
+    If something's wrong, print a message, discard the cache entry,
+    and return an empty list."""
+
+    if filename in cache:
+        del cache[filename]
+    if not filename or filename[0] + filename[-1] == '<>':
+        return []
+    fullname = filename
+    try:
+        stat = os.stat(fullname)
+    except os.error, msg:
+        # Try looking through the module search path.
+        basename = os.path.split(filename)[1]
+        for dirname in sys.path:
+            # When using imputil, sys.path may contain things other than
+            # strings; ignore them when it happens.
+            try:
+                fullname = os.path.join(dirname, basename)
+            except (TypeError, AttributeError):
+                # Not sufficiently string-like to do anything useful with.
+                pass
+            else:
+                try:
+                    stat = os.stat(fullname)
+                    break
+                except os.error:
+                    pass
+        else:
+            # No luck
+##          print '*** Cannot stat', filename, ':', msg
+            return []
+    try:
+        fp = open(fullname, 'rU')
+        lines = fp.readlines()
+        fp.close()
+    except IOError, msg:
+##      print '*** Cannot open', fullname, ':', msg
+        return []
+    size, mtime = stat.st_size, stat.st_mtime
+    cache[filename] = size, mtime, lines, fullname
+    return lines
diff --git a/depot_tools/release/win/python_24/Lib/locale.py b/depot_tools/release/win/python_24/Lib/locale.py
new file mode 100644
index 0000000..a2318104
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/locale.py
@@ -0,0 +1,776 @@
+""" Locale support.
+
+    The module provides low-level access to the C lib's locale APIs
+    and adds high level number formatting APIs as well as a locale
+    aliasing engine to complement these.
+
+    The aliasing engine includes support for many commonly used locale
+    names and maps them to values suitable for passing to the C lib's
+    setlocale() function. It also includes default encodings for all
+    supported locale names.
+
+"""
+
+import sys
+
+# Try importing the _locale module.
+#
+# If this fails, fall back on a basic 'C' locale emulation.
+
+# Yuck:  LC_MESSAGES is non-standard:  can't tell whether it exists before
+# trying the import.  So __all__ is also fiddled at the end of the file.
+__all__ = ["setlocale","Error","localeconv","strcoll","strxfrm",
+           "format","str","atof","atoi","LC_CTYPE","LC_COLLATE",
+           "LC_TIME","LC_MONETARY","LC_NUMERIC", "LC_ALL","CHAR_MAX"]
+
+try:
+
+    from _locale import *
+
+except ImportError:
+
+    # Locale emulation
+
+    CHAR_MAX = 127
+    LC_ALL = 6
+    LC_COLLATE = 3
+    LC_CTYPE = 0
+    LC_MESSAGES = 5
+    LC_MONETARY = 4
+    LC_NUMERIC = 1
+    LC_TIME = 2
+    Error = ValueError
+
+    def localeconv():
+        """ localeconv() -> dict.
+            Returns numeric and monetary locale-specific parameters.
+        """
+        # 'C' locale default values
+        return {'grouping': [127],
+                'currency_symbol': '',
+                'n_sign_posn': 127,
+                'p_cs_precedes': 127,
+                'n_cs_precedes': 127,
+                'mon_grouping': [],
+                'n_sep_by_space': 127,
+                'decimal_point': '.',
+                'negative_sign': '',
+                'positive_sign': '',
+                'p_sep_by_space': 127,
+                'int_curr_symbol': '',
+                'p_sign_posn': 127,
+                'thousands_sep': '',
+                'mon_thousands_sep': '',
+                'frac_digits': 127,
+                'mon_decimal_point': '',
+                'int_frac_digits': 127}
+
+    def setlocale(category, value=None):
+        """ setlocale(integer,string=None) -> string.
+            Activates/queries locale processing.
+        """
+        if value not in (None, '', 'C'):
+            raise Error, '_locale emulation only supports "C" locale'
+        return 'C'
+
+    def strcoll(a,b):
+        """ strcoll(string,string) -> int.
+            Compares two strings according to the locale.
+        """
+        return cmp(a,b)
+
+    def strxfrm(s):
+        """ strxfrm(string) -> string.
+            Returns a string that behaves for cmp locale-aware.
+        """
+        return s
+
+### Number formatting APIs
+
+# Author: Martin von Loewis
+
+#perform the grouping from right to left
+def _group(s):
+    conv=localeconv()
+    grouping=conv['grouping']
+    if not grouping:return (s, 0)
+    result=""
+    seps = 0
+    spaces = ""
+    if s[-1] == ' ':
+        sp = s.find(' ')
+        spaces = s[sp:]
+        s = s[:sp]
+    while s and grouping:
+        # if grouping is -1, we are done
+        if grouping[0]==CHAR_MAX:
+            break
+        # 0: re-use last group ad infinitum
+        elif grouping[0]!=0:
+            #process last group
+            group=grouping[0]
+            grouping=grouping[1:]
+        if result:
+            result=s[-group:]+conv['thousands_sep']+result
+            seps += 1
+        else:
+            result=s[-group:]
+        s=s[:-group]
+        if s and s[-1] not in "0123456789":
+            # the leading string is only spaces and signs
+            return s+result+spaces,seps
+    if not result:
+        return s+spaces,seps
+    if s:
+        result=s+conv['thousands_sep']+result
+        seps += 1
+    return result+spaces,seps
+
+def format(f,val,grouping=0):
+    """Formats a value in the same way that the % formatting would use,
+    but takes the current locale into account.
+    Grouping is applied if the third parameter is true."""
+    result = f % val
+    fields = result.split(".")
+    seps = 0
+    if grouping:
+        fields[0],seps=_group(fields[0])
+    if len(fields)==2:
+        result = fields[0]+localeconv()['decimal_point']+fields[1]
+    elif len(fields)==1:
+        result = fields[0]
+    else:
+        raise Error, "Too many decimal points in result string"
+
+    while seps:
+        # If the number was formatted for a specific width, then it
+        # might have been filled with spaces to the left or right. If
+        # so, kill as much spaces as there where separators.
+        # Leading zeroes as fillers are not yet dealt with, as it is
+        # not clear how they should interact with grouping.
+        sp = result.find(" ")
+        if sp==-1:break
+        result = result[:sp]+result[sp+1:]
+        seps -= 1
+
+    return result
+
+def str(val):
+    """Convert float to integer, taking the locale into account."""
+    return format("%.12g",val)
+
+def atof(string,func=float):
+    "Parses a string as a float according to the locale settings."
+    #First, get rid of the grouping
+    ts = localeconv()['thousands_sep']
+    if ts:
+        string = string.replace(ts, '')
+    #next, replace the decimal point with a dot
+    dd = localeconv()['decimal_point']
+    if dd:
+        string = string.replace(dd, '.')
+    #finally, parse the string
+    return func(string)
+
+def atoi(str):
+    "Converts a string to an integer according to the locale settings."
+    return atof(str, int)
+
+def _test():
+    setlocale(LC_ALL, "")
+    #do grouping
+    s1=format("%d", 123456789,1)
+    print s1, "is", atoi(s1)
+    #standard formatting
+    s1=str(3.14)
+    print s1, "is", atof(s1)
+
+### Locale name aliasing engine
+
+# Author: Marc-Andre Lemburg, mal@lemburg.com
+# Various tweaks by Fredrik Lundh <fredrik@pythonware.com>
+
+# store away the low-level version of setlocale (it's
+# overridden below)
+_setlocale = setlocale
+
+def normalize(localename):
+
+    """ Returns a normalized locale code for the given locale
+        name.
+
+        The returned locale code is formatted for use with
+        setlocale().
+
+        If normalization fails, the original name is returned
+        unchanged.
+
+        If the given encoding is not known, the function defaults to
+        the default encoding for the locale code just like setlocale()
+        does.
+
+    """
+    # Normalize the locale name and extract the encoding
+    fullname = localename.lower()
+    if ':' in fullname:
+        # ':' is sometimes used as encoding delimiter.
+        fullname = fullname.replace(':', '.')
+    if '.' in fullname:
+        langname, encoding = fullname.split('.')[:2]
+        fullname = langname + '.' + encoding
+    else:
+        langname = fullname
+        encoding = ''
+
+    # First lookup: fullname (possibly with encoding)
+    code = locale_alias.get(fullname, None)
+    if code is not None:
+        return code
+
+    # Second try: langname (without encoding)
+    code = locale_alias.get(langname, None)
+    if code is not None:
+        if '.' in code:
+            langname, defenc = code.split('.')
+        else:
+            langname = code
+            defenc = ''
+        if encoding:
+            encoding = encoding_alias.get(encoding, encoding)
+        else:
+            encoding = defenc
+        if encoding:
+            return langname + '.' + encoding
+        else:
+            return langname
+
+    else:
+        return localename
+
+def _parse_localename(localename):
+
+    """ Parses the locale code for localename and returns the
+        result as tuple (language code, encoding).
+
+        The localename is normalized and passed through the locale
+        alias engine. A ValueError is raised in case the locale name
+        cannot be parsed.
+
+        The language code corresponds to RFC 1766.  code and encoding
+        can be None in case the values cannot be determined or are
+        unknown to this implementation.
+
+    """
+    code = normalize(localename)
+    if '@' in localename:
+        # Deal with locale modifiers
+        code, modifier = code.split('@')
+        if modifier == 'euro' and '.' not in code:
+            # Assume Latin-9 for @euro locales. This is bogus,
+            # since some systems may use other encodings for these
+            # locales. Also, we ignore other modifiers.
+            return code, 'iso-8859-15'
+
+    if '.' in code:
+        return tuple(code.split('.')[:2])
+    elif code == 'C':
+        return None, None
+    raise ValueError, 'unknown locale: %s' % localename
+
+def _build_localename(localetuple):
+
+    """ Builds a locale code from the given tuple (language code,
+        encoding).
+
+        No aliasing or normalizing takes place.
+
+    """
+    language, encoding = localetuple
+    if language is None:
+        language = 'C'
+    if encoding is None:
+        return language
+    else:
+        return language + '.' + encoding
+
+def getdefaultlocale(envvars=('LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG')):
+
+    """ Tries to determine the default locale settings and returns
+        them as tuple (language code, encoding).
+
+        According to POSIX, a program which has not called
+        setlocale(LC_ALL, "") runs using the portable 'C' locale.
+        Calling setlocale(LC_ALL, "") lets it use the default locale as
+        defined by the LANG variable. Since we don't want to interfere
+        with the current locale setting we thus emulate the behavior
+        in the way described above.
+
+        To maintain compatibility with other platforms, not only the
+        LANG variable is tested, but a list of variables given as
+        envvars parameter. The first found to be defined will be
+        used. envvars defaults to the search path used in GNU gettext;
+        it must always contain the variable name 'LANG'.
+
+        Except for the code 'C', the language code corresponds to RFC
+        1766.  code and encoding can be None in case the values cannot
+        be determined.
+
+    """
+
+    try:
+        # check if it's supported by the _locale module
+        import _locale
+        code, encoding = _locale._getdefaultlocale()
+    except (ImportError, AttributeError):
+        pass
+    else:
+        # make sure the code/encoding values are valid
+        if sys.platform == "win32" and code and code[:2] == "0x":
+            # map windows language identifier to language name
+            code = windows_locale.get(int(code, 0))
+        # ...add other platform-specific processing here, if
+        # necessary...
+        return code, encoding
+
+    # fall back on POSIX behaviour
+    import os
+    lookup = os.environ.get
+    for variable in envvars:
+        localename = lookup(variable,None)
+        if localename:
+            break
+    else:
+        localename = 'C'
+    return _parse_localename(localename)
+
+
+def getlocale(category=LC_CTYPE):
+
+    """ Returns the current setting for the given locale category as
+        tuple (language code, encoding).
+
+        category may be one of the LC_* value except LC_ALL. It
+        defaults to LC_CTYPE.
+
+        Except for the code 'C', the language code corresponds to RFC
+        1766.  code and encoding can be None in case the values cannot
+        be determined.
+
+    """
+    localename = _setlocale(category)
+    if category == LC_ALL and ';' in localename:
+        raise TypeError, 'category LC_ALL is not supported'
+    return _parse_localename(localename)
+
+def setlocale(category, locale=None):
+
+    """ Set the locale for the given category.  The locale can be
+        a string, a locale tuple (language code, encoding), or None.
+
+        Locale tuples are converted to strings the locale aliasing
+        engine.  Locale strings are passed directly to the C lib.
+
+        category may be given as one of the LC_* values.
+
+    """
+    if locale and type(locale) is not type(""):
+        # convert to string
+        locale = normalize(_build_localename(locale))
+    return _setlocale(category, locale)
+
+def resetlocale(category=LC_ALL):
+
+    """ Sets the locale for category to the default setting.
+
+        The default setting is determined by calling
+        getdefaultlocale(). category defaults to LC_ALL.
+
+    """
+    _setlocale(category, _build_localename(getdefaultlocale()))
+
+if sys.platform in ('win32', 'darwin', 'mac'):
+    # On Win32, this will return the ANSI code page
+    # On the Mac, it should return the system encoding;
+    # it might return "ascii" instead
+    def getpreferredencoding(do_setlocale = True):
+        """Return the charset that the user is likely using."""
+        import _locale
+        return _locale._getdefaultlocale()[1]
+else:
+    # On Unix, if CODESET is available, use that.
+    try:
+        CODESET
+    except NameError:
+        # Fall back to parsing environment variables :-(
+        def getpreferredencoding(do_setlocale = True):
+            """Return the charset that the user is likely using,
+            by looking at environment variables."""
+            return getdefaultlocale()[1]
+    else:
+        def getpreferredencoding(do_setlocale = True):
+            """Return the charset that the user is likely using,
+            according to the system configuration."""
+            if do_setlocale:
+                oldloc = setlocale(LC_CTYPE)
+                setlocale(LC_CTYPE, "")
+                result = nl_langinfo(CODESET)
+                setlocale(LC_CTYPE, oldloc)
+                return result
+            else:
+                return nl_langinfo(CODESET)
+
+
+### Database
+#
+# The following data was extracted from the locale.alias file which
+# comes with X11 and then hand edited removing the explicit encoding
+# definitions and adding some more aliases. The file is usually
+# available as /usr/lib/X11/locale/locale.alias.
+#
+
+#
+# The encoding_alias table maps lowercase encoding alias names to C
+# locale encoding names (case-sensitive).
+#
+encoding_alias = {
+        '437':                          'C',
+        'c':                            'C',
+        'iso8859':                      'ISO8859-1',
+        '8859':                         'ISO8859-1',
+        '88591':                        'ISO8859-1',
+        'ascii':                        'ISO8859-1',
+        'en':                           'ISO8859-1',
+        'iso88591':                     'ISO8859-1',
+        'iso_8859-1':                   'ISO8859-1',
+        '885915':                       'ISO8859-15',
+        'iso885915':                    'ISO8859-15',
+        'iso_8859-15':                  'ISO8859-15',
+        'iso8859-2':                    'ISO8859-2',
+        'iso88592':                     'ISO8859-2',
+        'iso_8859-2':                   'ISO8859-2',
+        'iso88595':                     'ISO8859-5',
+        'iso88596':                     'ISO8859-6',
+        'iso88597':                     'ISO8859-7',
+        'iso88598':                     'ISO8859-8',
+        'iso88599':                     'ISO8859-9',
+        'iso-2022-jp':                  'JIS7',
+        'jis':                          'JIS7',
+        'jis7':                         'JIS7',
+        'sjis':                         'SJIS',
+        'tis620':                       'TACTIS',
+        'ajec':                         'eucJP',
+        'eucjp':                        'eucJP',
+        'ujis':                         'eucJP',
+        'utf-8':                        'utf',
+        'utf8':                         'utf',
+        'utf8@ucs4':                    'utf',
+}
+
+#
+# The locale_alias table maps lowercase alias names to C locale names
+# (case-sensitive). Encodings are always separated from the locale
+# name using a dot ('.'); they should only be given in case the
+# language name is needed to interpret the given encoding alias
+# correctly (CJK codes often have this need).
+#
+locale_alias = {
+        'american':                      'en_US.ISO8859-1',
+        'ar':                            'ar_AA.ISO8859-6',
+        'ar_aa':                         'ar_AA.ISO8859-6',
+        'ar_sa':                         'ar_SA.ISO8859-6',
+        'arabic':                        'ar_AA.ISO8859-6',
+        'bg':                            'bg_BG.ISO8859-5',
+        'bg_bg':                         'bg_BG.ISO8859-5',
+        'bulgarian':                     'bg_BG.ISO8859-5',
+        'c-french':                      'fr_CA.ISO8859-1',
+        'c':                             'C',
+        'c_c':                           'C',
+        'cextend':                       'en_US.ISO8859-1',
+        'chinese-s':                     'zh_CN.eucCN',
+        'chinese-t':                     'zh_TW.eucTW',
+        'croatian':                      'hr_HR.ISO8859-2',
+        'cs':                            'cs_CZ.ISO8859-2',
+        'cs_cs':                         'cs_CZ.ISO8859-2',
+        'cs_cz':                         'cs_CZ.ISO8859-2',
+        'cz':                            'cz_CZ.ISO8859-2',
+        'cz_cz':                         'cz_CZ.ISO8859-2',
+        'czech':                         'cs_CS.ISO8859-2',
+        'da':                            'da_DK.ISO8859-1',
+        'da_dk':                         'da_DK.ISO8859-1',
+        'danish':                        'da_DK.ISO8859-1',
+        'de':                            'de_DE.ISO8859-1',
+        'de_at':                         'de_AT.ISO8859-1',
+        'de_ch':                         'de_CH.ISO8859-1',
+        'de_de':                         'de_DE.ISO8859-1',
+        'dutch':                         'nl_BE.ISO8859-1',
+        'ee':                            'ee_EE.ISO8859-4',
+        'el':                            'el_GR.ISO8859-7',
+        'el_gr':                         'el_GR.ISO8859-7',
+        'en':                            'en_US.ISO8859-1',
+        'en_au':                         'en_AU.ISO8859-1',
+        'en_ca':                         'en_CA.ISO8859-1',
+        'en_gb':                         'en_GB.ISO8859-1',
+        'en_ie':                         'en_IE.ISO8859-1',
+        'en_nz':                         'en_NZ.ISO8859-1',
+        'en_uk':                         'en_GB.ISO8859-1',
+        'en_us':                         'en_US.ISO8859-1',
+        'eng_gb':                        'en_GB.ISO8859-1',
+        'english':                       'en_EN.ISO8859-1',
+        'english_uk':                    'en_GB.ISO8859-1',
+        'english_united-states':         'en_US.ISO8859-1',
+        'english_us':                    'en_US.ISO8859-1',
+        'es':                            'es_ES.ISO8859-1',
+        'es_ar':                         'es_AR.ISO8859-1',
+        'es_bo':                         'es_BO.ISO8859-1',
+        'es_cl':                         'es_CL.ISO8859-1',
+        'es_co':                         'es_CO.ISO8859-1',
+        'es_cr':                         'es_CR.ISO8859-1',
+        'es_ec':                         'es_EC.ISO8859-1',
+        'es_es':                         'es_ES.ISO8859-1',
+        'es_gt':                         'es_GT.ISO8859-1',
+        'es_mx':                         'es_MX.ISO8859-1',
+        'es_ni':                         'es_NI.ISO8859-1',
+        'es_pa':                         'es_PA.ISO8859-1',
+        'es_pe':                         'es_PE.ISO8859-1',
+        'es_py':                         'es_PY.ISO8859-1',
+        'es_sv':                         'es_SV.ISO8859-1',
+        'es_uy':                         'es_UY.ISO8859-1',
+        'es_ve':                         'es_VE.ISO8859-1',
+        'et':                            'et_EE.ISO8859-4',
+        'et_ee':                         'et_EE.ISO8859-4',
+        'fi':                            'fi_FI.ISO8859-1',
+        'fi_fi':                         'fi_FI.ISO8859-1',
+        'finnish':                       'fi_FI.ISO8859-1',
+        'fr':                            'fr_FR.ISO8859-1',
+        'fr_be':                         'fr_BE.ISO8859-1',
+        'fr_ca':                         'fr_CA.ISO8859-1',
+        'fr_ch':                         'fr_CH.ISO8859-1',
+        'fr_fr':                         'fr_FR.ISO8859-1',
+        'fre_fr':                        'fr_FR.ISO8859-1',
+        'french':                        'fr_FR.ISO8859-1',
+        'french_france':                 'fr_FR.ISO8859-1',
+        'ger_de':                        'de_DE.ISO8859-1',
+        'german':                        'de_DE.ISO8859-1',
+        'german_germany':                'de_DE.ISO8859-1',
+        'greek':                         'el_GR.ISO8859-7',
+        'hebrew':                        'iw_IL.ISO8859-8',
+        'hr':                            'hr_HR.ISO8859-2',
+        'hr_hr':                         'hr_HR.ISO8859-2',
+        'hu':                            'hu_HU.ISO8859-2',
+        'hu_hu':                         'hu_HU.ISO8859-2',
+        'hungarian':                     'hu_HU.ISO8859-2',
+        'icelandic':                     'is_IS.ISO8859-1',
+        'id':                            'id_ID.ISO8859-1',
+        'id_id':                         'id_ID.ISO8859-1',
+        'is':                            'is_IS.ISO8859-1',
+        'is_is':                         'is_IS.ISO8859-1',
+        'iso-8859-1':                    'en_US.ISO8859-1',
+        'iso-8859-15':                   'en_US.ISO8859-15',
+        'iso8859-1':                     'en_US.ISO8859-1',
+        'iso8859-15':                    'en_US.ISO8859-15',
+        'iso_8859_1':                    'en_US.ISO8859-1',
+        'iso_8859_15':                   'en_US.ISO8859-15',
+        'it':                            'it_IT.ISO8859-1',
+        'it_ch':                         'it_CH.ISO8859-1',
+        'it_it':                         'it_IT.ISO8859-1',
+        'italian':                       'it_IT.ISO8859-1',
+        'iw':                            'iw_IL.ISO8859-8',
+        'iw_il':                         'iw_IL.ISO8859-8',
+        'ja':                            'ja_JP.eucJP',
+        'ja.jis':                        'ja_JP.JIS7',
+        'ja.sjis':                       'ja_JP.SJIS',
+        'ja_jp':                         'ja_JP.eucJP',
+        'ja_jp.ajec':                    'ja_JP.eucJP',
+        'ja_jp.euc':                     'ja_JP.eucJP',
+        'ja_jp.eucjp':                   'ja_JP.eucJP',
+        'ja_jp.iso-2022-jp':             'ja_JP.JIS7',
+        'ja_jp.jis':                     'ja_JP.JIS7',
+        'ja_jp.jis7':                    'ja_JP.JIS7',
+        'ja_jp.mscode':                  'ja_JP.SJIS',
+        'ja_jp.sjis':                    'ja_JP.SJIS',
+        'ja_jp.ujis':                    'ja_JP.eucJP',
+        'japan':                         'ja_JP.eucJP',
+        'japanese':                      'ja_JP.SJIS',
+        'japanese-euc':                  'ja_JP.eucJP',
+        'japanese.euc':                  'ja_JP.eucJP',
+        'jp_jp':                         'ja_JP.eucJP',
+        'ko':                            'ko_KR.eucKR',
+        'ko_kr':                         'ko_KR.eucKR',
+        'ko_kr.euc':                     'ko_KR.eucKR',
+        'korean':                        'ko_KR.eucKR',
+        'lt':                            'lt_LT.ISO8859-4',
+        'lv':                            'lv_LV.ISO8859-4',
+        'mk':                            'mk_MK.ISO8859-5',
+        'mk_mk':                         'mk_MK.ISO8859-5',
+        'nl':                            'nl_NL.ISO8859-1',
+        'nl_be':                         'nl_BE.ISO8859-1',
+        'nl_nl':                         'nl_NL.ISO8859-1',
+        'no':                            'no_NO.ISO8859-1',
+        'no_no':                         'no_NO.ISO8859-1',
+        'norwegian':                     'no_NO.ISO8859-1',
+        'pl':                            'pl_PL.ISO8859-2',
+        'pl_pl':                         'pl_PL.ISO8859-2',
+        'polish':                        'pl_PL.ISO8859-2',
+        'portuguese':                    'pt_PT.ISO8859-1',
+        'portuguese_brazil':             'pt_BR.ISO8859-1',
+        'posix':                         'C',
+        'posix-utf2':                    'C',
+        'pt':                            'pt_PT.ISO8859-1',
+        'pt_br':                         'pt_BR.ISO8859-1',
+        'pt_pt':                         'pt_PT.ISO8859-1',
+        'ro':                            'ro_RO.ISO8859-2',
+        'ro_ro':                         'ro_RO.ISO8859-2',
+        'ru':                            'ru_RU.ISO8859-5',
+        'ru_ru':                         'ru_RU.ISO8859-5',
+        'rumanian':                      'ro_RO.ISO8859-2',
+        'russian':                       'ru_RU.ISO8859-5',
+        'serbocroatian':                 'sh_YU.ISO8859-2',
+        'sh':                            'sh_YU.ISO8859-2',
+        'sh_hr':                         'sh_HR.ISO8859-2',
+        'sh_sp':                         'sh_YU.ISO8859-2',
+        'sh_yu':                         'sh_YU.ISO8859-2',
+        'sk':                            'sk_SK.ISO8859-2',
+        'sk_sk':                         'sk_SK.ISO8859-2',
+        'sl':                            'sl_CS.ISO8859-2',
+        'sl_cs':                         'sl_CS.ISO8859-2',
+        'sl_si':                         'sl_SI.ISO8859-2',
+        'slovak':                        'sk_SK.ISO8859-2',
+        'slovene':                       'sl_CS.ISO8859-2',
+        'sp':                            'sp_YU.ISO8859-5',
+        'sp_yu':                         'sp_YU.ISO8859-5',
+        'spanish':                       'es_ES.ISO8859-1',
+        'spanish_spain':                 'es_ES.ISO8859-1',
+        'sr_sp':                         'sr_SP.ISO8859-2',
+        'sv':                            'sv_SE.ISO8859-1',
+        'sv_se':                         'sv_SE.ISO8859-1',
+        'swedish':                       'sv_SE.ISO8859-1',
+        'th_th':                         'th_TH.TACTIS',
+        'tr':                            'tr_TR.ISO8859-9',
+        'tr_tr':                         'tr_TR.ISO8859-9',
+        'turkish':                       'tr_TR.ISO8859-9',
+        'univ':                          'en_US.utf',
+        'universal':                     'en_US.utf',
+        'zh':                            'zh_CN.eucCN',
+        'zh_cn':                         'zh_CN.eucCN',
+        'zh_cn.big5':                    'zh_TW.eucTW',
+        'zh_cn.euc':                     'zh_CN.eucCN',
+        'zh_tw':                         'zh_TW.eucTW',
+        'zh_tw.euc':                     'zh_TW.eucTW',
+}
+
+#
+# this maps windows language identifiers (as used on Windows 95 and
+# earlier) to locale strings.
+#
+# NOTE: this mapping is incomplete.  If your language is missing, please
+# submit a bug report to Python bug manager, which you can find via:
+#     http://www.python.org/dev/
+# Make sure you include the missing language identifier and the suggested
+# locale code.
+#
+
+windows_locale = {
+    0x0404: "zh_TW", # Chinese (Taiwan)
+    0x0804: "zh_CN", # Chinese (PRC)
+    0x0406: "da_DK", # Danish
+    0x0413: "nl_NL", # Dutch (Netherlands)
+    0x0409: "en_US", # English (United States)
+    0x0809: "en_UK", # English (United Kingdom)
+    0x0c09: "en_AU", # English (Australian)
+    0x1009: "en_CA", # English (Canadian)
+    0x1409: "en_NZ", # English (New Zealand)
+    0x1809: "en_IE", # English (Ireland)
+    0x1c09: "en_ZA", # English (South Africa)
+    0x040b: "fi_FI", # Finnish
+    0x040c: "fr_FR", # French (Standard)
+    0x080c: "fr_BE", # French (Belgian)
+    0x0c0c: "fr_CA", # French (Canadian)
+    0x100c: "fr_CH", # French (Switzerland)
+    0x0407: "de_DE", # German (Standard)
+    0x0408: "el_GR", # Greek
+    0x040d: "iw_IL", # Hebrew
+    0x040f: "is_IS", # Icelandic
+    0x0410: "it_IT", # Italian (Standard)
+    0x0411: "ja_JA", # Japanese
+    0x0414: "no_NO", # Norwegian (Bokmal)
+    0x0816: "pt_PT", # Portuguese (Standard)
+    0x0c0a: "es_ES", # Spanish (Modern Sort)
+    0x0441: "sw_KE", # Swahili (Kenya)
+    0x041d: "sv_SE", # Swedish
+    0x081d: "sv_FI", # Swedish (Finland)
+    0x041f: "tr_TR", # Turkish
+}
+
+def _print_locale():
+
+    """ Test function.
+    """
+    categories = {}
+    def _init_categories(categories=categories):
+        for k,v in globals().items():
+            if k[:3] == 'LC_':
+                categories[k] = v
+    _init_categories()
+    del categories['LC_ALL']
+
+    print 'Locale defaults as determined by getdefaultlocale():'
+    print '-'*72
+    lang, enc = getdefaultlocale()
+    print 'Language: ', lang or '(undefined)'
+    print 'Encoding: ', enc or '(undefined)'
+    print
+
+    print 'Locale settings on startup:'
+    print '-'*72
+    for name,category in categories.items():
+        print name, '...'
+        lang, enc = getlocale(category)
+        print '   Language: ', lang or '(undefined)'
+        print '   Encoding: ', enc or '(undefined)'
+        print
+
+    print
+    print 'Locale settings after calling resetlocale():'
+    print '-'*72
+    resetlocale()
+    for name,category in categories.items():
+        print name, '...'
+        lang, enc = getlocale(category)
+        print '   Language: ', lang or '(undefined)'
+        print '   Encoding: ', enc or '(undefined)'
+        print
+
+    try:
+        setlocale(LC_ALL, "")
+    except:
+        print 'NOTE:'
+        print 'setlocale(LC_ALL, "") does not support the default locale'
+        print 'given in the OS environment variables.'
+    else:
+        print
+        print 'Locale settings after calling setlocale(LC_ALL, ""):'
+        print '-'*72
+        for name,category in categories.items():
+            print name, '...'
+            lang, enc = getlocale(category)
+            print '   Language: ', lang or '(undefined)'
+            print '   Encoding: ', enc or '(undefined)'
+            print
+
+###
+
+try:
+    LC_MESSAGES
+except NameError:
+    pass
+else:
+    __all__.append("LC_MESSAGES")
+
+if __name__=='__main__':
+    print 'Locale aliasing:'
+    print
+    _print_locale()
+    print
+    print 'Number formatting:'
+    print
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/logging/__init__.py b/depot_tools/release/win/python_24/Lib/logging/__init__.py
new file mode 100644
index 0000000..f0dec04
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/logging/__init__.py
@@ -0,0 +1,1304 @@
+# Copyright 2001-2004 by Vinay Sajip. All Rights Reserved.
+#
+# Permission to use, copy, modify, and distribute this software and its
+# documentation for any purpose and without fee is hereby granted,
+# provided that the above copyright notice appear in all copies and that
+# both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of Vinay Sajip
+# not be used in advertising or publicity pertaining to distribution
+# of the software without specific, written prior permission.
+# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
+# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
+# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
+# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
+# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Logging package for Python. Based on PEP 282 and comments thereto in
+comp.lang.python, and influenced by Apache's log4j system.
+
+Should work under Python versions >= 1.5.2, except that source line
+information is not available unless 'sys._getframe()' is.
+
+Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
+
+To use, simply 'import logging' and log away!
+"""
+
+import sys, os, types, time, string, cStringIO
+
+try:
+    import thread
+    import threading
+except ImportError:
+    thread = None
+
+__author__  = "Vinay Sajip <vinay_sajip@red-dove.com>"
+__status__  = "beta"
+__version__ = "0.4.9.6"
+__date__    = "20 October 2004"
+
+#---------------------------------------------------------------------------
+#   Miscellaneous module data
+#---------------------------------------------------------------------------
+
+#
+#_srcfile is used when walking the stack to check when we've got the first
+# caller stack frame.
+#
+if string.lower(__file__[-4:]) in ['.pyc', '.pyo']:
+    _srcfile = __file__[:-4] + '.py'
+else:
+    _srcfile = __file__
+_srcfile = os.path.normcase(_srcfile)
+
+# _srcfile is only used in conjunction with sys._getframe().
+# To provide compatibility with older versions of Python, set _srcfile
+# to None if _getframe() is not available; this value will prevent
+# findCaller() from being called.
+if not hasattr(sys, "_getframe"):
+    _srcfile = None
+
+#
+#_startTime is used as the base when calculating the relative time of events
+#
+_startTime = time.time()
+
+#
+#raiseExceptions is used to see if exceptions during handling should be
+#propagated
+#
+raiseExceptions = 1
+
+#---------------------------------------------------------------------------
+#   Level related stuff
+#---------------------------------------------------------------------------
+#
+# Default levels and level names, these can be replaced with any positive set
+# of values having corresponding names. There is a pseudo-level, NOTSET, which
+# is only really there as a lower limit for user-defined levels. Handlers and
+# loggers are initialized with NOTSET so that they will log all messages, even
+# at user-defined levels.
+#
+CRITICAL = 50
+FATAL = CRITICAL
+ERROR = 40
+WARNING = 30
+WARN = WARNING
+INFO = 20
+DEBUG = 10
+NOTSET = 0
+
+_levelNames = {
+    CRITICAL : 'CRITICAL',
+    ERROR : 'ERROR',
+    WARNING : 'WARNING',
+    INFO : 'INFO',
+    DEBUG : 'DEBUG',
+    NOTSET : 'NOTSET',
+    'CRITICAL' : CRITICAL,
+    'ERROR' : ERROR,
+    'WARN' : WARNING,
+    'WARNING' : WARNING,
+    'INFO' : INFO,
+    'DEBUG' : DEBUG,
+    'NOTSET' : NOTSET,
+}
+
+def getLevelName(level):
+    """
+    Return the textual representation of logging level 'level'.
+
+    If the level is one of the predefined levels (CRITICAL, ERROR, WARNING,
+    INFO, DEBUG) then you get the corresponding string. If you have
+    associated levels with names using addLevelName then the name you have
+    associated with 'level' is returned.
+
+    If a numeric value corresponding to one of the defined levels is passed
+    in, the corresponding string representation is returned.
+
+    Otherwise, the string "Level %s" % level is returned.
+    """
+    return _levelNames.get(level, ("Level %s" % level))
+
+def addLevelName(level, levelName):
+    """
+    Associate 'levelName' with 'level'.
+
+    This is used when converting levels to text during message formatting.
+    """
+    _acquireLock()
+    try:    #unlikely to cause an exception, but you never know...
+        _levelNames[level] = levelName
+        _levelNames[levelName] = level
+    finally:
+        _releaseLock()
+
+#---------------------------------------------------------------------------
+#   Thread-related stuff
+#---------------------------------------------------------------------------
+
+#
+#_lock is used to serialize access to shared data structures in this module.
+#This needs to be an RLock because fileConfig() creates Handlers and so
+#might arbitrary user threads. Since Handler.__init__() updates the shared
+#dictionary _handlers, it needs to acquire the lock. But if configuring,
+#the lock would already have been acquired - so we need an RLock.
+#The same argument applies to Loggers and Manager.loggerDict.
+#
+_lock = None
+
+def _acquireLock():
+    """
+    Acquire the module-level lock for serializing access to shared data.
+
+    This should be released with _releaseLock().
+    """
+    global _lock
+    if (not _lock) and thread:
+        _lock = threading.RLock()
+    if _lock:
+        _lock.acquire()
+
+def _releaseLock():
+    """
+    Release the module-level lock acquired by calling _acquireLock().
+    """
+    if _lock:
+        _lock.release()
+
+#---------------------------------------------------------------------------
+#   The logging record
+#---------------------------------------------------------------------------
+
+class LogRecord:
+    """
+    A LogRecord instance represents an event being logged.
+
+    LogRecord instances are created every time something is logged. They
+    contain all the information pertinent to the event being logged. The
+    main information passed in is in msg and args, which are combined
+    using str(msg) % args to create the message field of the record. The
+    record also includes information such as when the record was created,
+    the source line where the logging call was made, and any exception
+    information to be logged.
+    """
+    def __init__(self, name, level, pathname, lineno, msg, args, exc_info):
+        """
+        Initialize a logging record with interesting information.
+        """
+        ct = time.time()
+        self.name = name
+        self.msg = msg
+        #
+        # The following statement allows passing of a dictionary as a sole
+        # argument, so that you can do something like
+        #  logging.debug("a %(a)d b %(b)s", {'a':1, 'b':2})
+        # Suggested by Stefan Behnel.
+        # Note that without the test for args[0], we get a problem because
+        # during formatting, we test to see if the arg is present using
+        # 'if self.args:'. If the event being logged is e.g. 'Value is %d'
+        # and if the passed arg fails 'if self.args:' then no formatting
+        # is done. For example, logger.warn('Value is %d', 0) would log
+        # 'Value is %d' instead of 'Value is 0'.
+        # For the use case of passing a dictionary, this should not be a
+        # problem.
+        if args and (len(args) == 1) and args[0] and (type(args[0]) == types.DictType):
+            args = args[0]
+        self.args = args
+        self.levelname = getLevelName(level)
+        self.levelno = level
+        self.pathname = pathname
+        try:
+            self.filename = os.path.basename(pathname)
+            self.module = os.path.splitext(self.filename)[0]
+        except:
+            self.filename = pathname
+            self.module = "Unknown module"
+        self.exc_info = exc_info
+        self.exc_text = None      # used to cache the traceback text
+        self.lineno = lineno
+        self.created = ct
+        self.msecs = (ct - long(ct)) * 1000
+        self.relativeCreated = (self.created - _startTime) * 1000
+        if thread:
+            self.thread = thread.get_ident()
+        else:
+            self.thread = None
+        if hasattr(os, 'getpid'):
+            self.process = os.getpid()
+        else:
+            self.process = None
+
+    def __str__(self):
+        return '<LogRecord: %s, %s, %s, %s, "%s">'%(self.name, self.levelno,
+            self.pathname, self.lineno, self.msg)
+
+    def getMessage(self):
+        """
+        Return the message for this LogRecord.
+
+        Return the message for this LogRecord after merging any user-supplied
+        arguments with the message.
+        """
+        if not hasattr(types, "UnicodeType"): #if no unicode support...
+            msg = str(self.msg)
+        else:
+            try:
+                msg = str(self.msg)
+            except UnicodeError:
+                msg = self.msg      #Defer encoding till later
+        if self.args:
+            msg = msg % self.args
+        return msg
+
+def makeLogRecord(dict):
+    """
+    Make a LogRecord whose attributes are defined by the specified dictionary,
+    This function is useful for converting a logging event received over
+    a socket connection (which is sent as a dictionary) into a LogRecord
+    instance.
+    """
+    rv = LogRecord(None, None, "", 0, "", (), None)
+    rv.__dict__.update(dict)
+    return rv
+
+#---------------------------------------------------------------------------
+#   Formatter classes and functions
+#---------------------------------------------------------------------------
+
+class Formatter:
+    """
+    Formatter instances are used to convert a LogRecord to text.
+
+    Formatters need to know how a LogRecord is constructed. They are
+    responsible for converting a LogRecord to (usually) a string which can
+    be interpreted by either a human or an external system. The base Formatter
+    allows a formatting string to be specified. If none is supplied, the
+    default value of "%s(message)\\n" is used.
+
+    The Formatter can be initialized with a format string which makes use of
+    knowledge of the LogRecord attributes - e.g. the default value mentioned
+    above makes use of the fact that the user's message and arguments are pre-
+    formatted into a LogRecord's message attribute. Currently, the useful
+    attributes in a LogRecord are described by:
+
+    %(name)s            Name of the logger (logging channel)
+    %(levelno)s         Numeric logging level for the message (DEBUG, INFO,
+                        WARNING, ERROR, CRITICAL)
+    %(levelname)s       Text logging level for the message ("DEBUG", "INFO",
+                        "WARNING", "ERROR", "CRITICAL")
+    %(pathname)s        Full pathname of the source file where the logging
+                        call was issued (if available)
+    %(filename)s        Filename portion of pathname
+    %(module)s          Module (name portion of filename)
+    %(lineno)d          Source line number where the logging call was issued
+                        (if available)
+    %(created)f         Time when the LogRecord was created (time.time()
+                        return value)
+    %(asctime)s         Textual time when the LogRecord was created
+    %(msecs)d           Millisecond portion of the creation time
+    %(relativeCreated)d Time in milliseconds when the LogRecord was created,
+                        relative to the time the logging module was loaded
+                        (typically at application startup time)
+    %(thread)d          Thread ID (if available)
+    %(process)d         Process ID (if available)
+    %(message)s         The result of record.getMessage(), computed just as
+                        the record is emitted
+    """
+
+    converter = time.localtime
+
+    def __init__(self, fmt=None, datefmt=None):
+        """
+        Initialize the formatter with specified format strings.
+
+        Initialize the formatter either with the specified format string, or a
+        default as described above. Allow for specialized date formatting with
+        the optional datefmt argument (if omitted, you get the ISO8601 format).
+        """
+        if fmt:
+            self._fmt = fmt
+        else:
+            self._fmt = "%(message)s"
+        self.datefmt = datefmt
+
+    def formatTime(self, record, datefmt=None):
+        """
+        Return the creation time of the specified LogRecord as formatted text.
+
+        This method should be called from format() by a formatter which
+        wants to make use of a formatted time. This method can be overridden
+        in formatters to provide for any specific requirement, but the
+        basic behaviour is as follows: if datefmt (a string) is specified,
+        it is used with time.strftime() to format the creation time of the
+        record. Otherwise, the ISO8601 format is used. The resulting
+        string is returned. This function uses a user-configurable function
+        to convert the creation time to a tuple. By default, time.localtime()
+        is used; to change this for a particular formatter instance, set the
+        'converter' attribute to a function with the same signature as
+        time.localtime() or time.gmtime(). To change it for all formatters,
+        for example if you want all logging times to be shown in GMT,
+        set the 'converter' attribute in the Formatter class.
+        """
+        ct = self.converter(record.created)
+        if datefmt:
+            s = time.strftime(datefmt, ct)
+        else:
+            t = time.strftime("%Y-%m-%d %H:%M:%S", ct)
+            s = "%s,%03d" % (t, record.msecs)
+        return s
+
+    def formatException(self, ei):
+        """
+        Format and return the specified exception information as a string.
+
+        This default implementation just uses
+        traceback.print_exception()
+        """
+        import traceback
+        sio = cStringIO.StringIO()
+        traceback.print_exception(ei[0], ei[1], ei[2], None, sio)
+        s = sio.getvalue()
+        sio.close()
+        if s[-1] == "\n":
+            s = s[:-1]
+        return s
+
+    def format(self, record):
+        """
+        Format the specified record as text.
+
+        The record's attribute dictionary is used as the operand to a
+        string formatting operation which yields the returned string.
+        Before formatting the dictionary, a couple of preparatory steps
+        are carried out. The message attribute of the record is computed
+        using LogRecord.getMessage(). If the formatting string contains
+        "%(asctime)", formatTime() is called to format the event time.
+        If there is exception information, it is formatted using
+        formatException() and appended to the message.
+        """
+        record.message = record.getMessage()
+        if string.find(self._fmt,"%(asctime)") >= 0:
+            record.asctime = self.formatTime(record, self.datefmt)
+        s = self._fmt % record.__dict__
+        if record.exc_info:
+            # Cache the traceback text to avoid converting it multiple times
+            # (it's constant anyway)
+            if not record.exc_text:
+                record.exc_text = self.formatException(record.exc_info)
+        if record.exc_text:
+            if s[-1] != "\n":
+                s = s + "\n"
+            s = s + record.exc_text
+        return s
+
+#
+#   The default formatter to use when no other is specified
+#
+_defaultFormatter = Formatter()
+
+class BufferingFormatter:
+    """
+    A formatter suitable for formatting a number of records.
+    """
+    def __init__(self, linefmt=None):
+        """
+        Optionally specify a formatter which will be used to format each
+        individual record.
+        """
+        if linefmt:
+            self.linefmt = linefmt
+        else:
+            self.linefmt = _defaultFormatter
+
+    def formatHeader(self, records):
+        """
+        Return the header string for the specified records.
+        """
+        return ""
+
+    def formatFooter(self, records):
+        """
+        Return the footer string for the specified records.
+        """
+        return ""
+
+    def format(self, records):
+        """
+        Format the specified records and return the result as a string.
+        """
+        rv = ""
+        if len(records) > 0:
+            rv = rv + self.formatHeader(records)
+            for record in records:
+                rv = rv + self.linefmt.format(record)
+            rv = rv + self.formatFooter(records)
+        return rv
+
+#---------------------------------------------------------------------------
+#   Filter classes and functions
+#---------------------------------------------------------------------------
+
+class Filter:
+    """
+    Filter instances are used to perform arbitrary filtering of LogRecords.
+
+    Loggers and Handlers can optionally use Filter instances to filter
+    records as desired. The base filter class only allows events which are
+    below a certain point in the logger hierarchy. For example, a filter
+    initialized with "A.B" will allow events logged by loggers "A.B",
+    "A.B.C", "A.B.C.D", "A.B.D" etc. but not "A.BB", "B.A.B" etc. If
+    initialized with the empty string, all events are passed.
+    """
+    def __init__(self, name=''):
+        """
+        Initialize a filter.
+
+        Initialize with the name of the logger which, together with its
+        children, will have its events allowed through the filter. If no
+        name is specified, allow every event.
+        """
+        self.name = name
+        self.nlen = len(name)
+
+    def filter(self, record):
+        """
+        Determine if the specified record is to be logged.
+
+        Is the specified record to be logged? Returns 0 for no, nonzero for
+        yes. If deemed appropriate, the record may be modified in-place.
+        """
+        if self.nlen == 0:
+            return 1
+        elif self.name == record.name:
+            return 1
+        elif string.find(record.name, self.name, 0, self.nlen) != 0:
+            return 0
+        return (record.name[self.nlen] == ".")
+
+class Filterer:
+    """
+    A base class for loggers and handlers which allows them to share
+    common code.
+    """
+    def __init__(self):
+        """
+        Initialize the list of filters to be an empty list.
+        """
+        self.filters = []
+
+    def addFilter(self, filter):
+        """
+        Add the specified filter to this handler.
+        """
+        if not (filter in self.filters):
+            self.filters.append(filter)
+
+    def removeFilter(self, filter):
+        """
+        Remove the specified filter from this handler.
+        """
+        if filter in self.filters:
+            self.filters.remove(filter)
+
+    def filter(self, record):
+        """
+        Determine if a record is loggable by consulting all the filters.
+
+        The default is to allow the record to be logged; any filter can veto
+        this and the record is then dropped. Returns a zero value if a record
+        is to be dropped, else non-zero.
+        """
+        rv = 1
+        for f in self.filters:
+            if not f.filter(record):
+                rv = 0
+                break
+        return rv
+
+#---------------------------------------------------------------------------
+#   Handler classes and functions
+#---------------------------------------------------------------------------
+
+_handlers = {}  #repository of handlers (for flushing when shutdown called)
+
+class Handler(Filterer):
+    """
+    Handler instances dispatch logging events to specific destinations.
+
+    The base handler class. Acts as a placeholder which defines the Handler
+    interface. Handlers can optionally use Formatter instances to format
+    records as desired. By default, no formatter is specified; in this case,
+    the 'raw' message as determined by record.message is logged.
+    """
+    def __init__(self, level=NOTSET):
+        """
+        Initializes the instance - basically setting the formatter to None
+        and the filter list to empty.
+        """
+        Filterer.__init__(self)
+        self.level = level
+        self.formatter = None
+        #get the module data lock, as we're updating a shared structure.
+        _acquireLock()
+        try:    #unlikely to raise an exception, but you never know...
+            _handlers[self] = 1
+        finally:
+            _releaseLock()
+        self.createLock()
+
+    def createLock(self):
+        """
+        Acquire a thread lock for serializing access to the underlying I/O.
+        """
+        if thread:
+            self.lock = thread.allocate_lock()
+        else:
+            self.lock = None
+
+    def acquire(self):
+        """
+        Acquire the I/O thread lock.
+        """
+        if self.lock:
+            self.lock.acquire()
+
+    def release(self):
+        """
+        Release the I/O thread lock.
+        """
+        if self.lock:
+            self.lock.release()
+
+    def setLevel(self, level):
+        """
+        Set the logging level of this handler.
+        """
+        self.level = level
+
+    def format(self, record):
+        """
+        Format the specified record.
+
+        If a formatter is set, use it. Otherwise, use the default formatter
+        for the module.
+        """
+        if self.formatter:
+            fmt = self.formatter
+        else:
+            fmt = _defaultFormatter
+        return fmt.format(record)
+
+    def emit(self, record):
+        """
+        Do whatever it takes to actually log the specified logging record.
+
+        This version is intended to be implemented by subclasses and so
+        raises a NotImplementedError.
+        """
+        raise NotImplementedError, 'emit must be implemented '\
+                                    'by Handler subclasses'
+
+    def handle(self, record):
+        """
+        Conditionally emit the specified logging record.
+
+        Emission depends on filters which may have been added to the handler.
+        Wrap the actual emission of the record with acquisition/release of
+        the I/O thread lock. Returns whether the filter passed the record for
+        emission.
+        """
+        rv = self.filter(record)
+        if rv:
+            self.acquire()
+            try:
+                self.emit(record)
+            finally:
+                self.release()
+        return rv
+
+    def setFormatter(self, fmt):
+        """
+        Set the formatter for this handler.
+        """
+        self.formatter = fmt
+
+    def flush(self):
+        """
+        Ensure all logging output has been flushed.
+
+        This version does nothing and is intended to be implemented by
+        subclasses.
+        """
+        pass
+
+    def close(self):
+        """
+        Tidy up any resources used by the handler.
+
+        This version does removes the handler from an internal list
+        of handlers which is closed when shutdown() is called. Subclasses
+        should ensure that this gets called from overridden close()
+        methods.
+        """
+        #get the module data lock, as we're updating a shared structure.
+        _acquireLock()
+        try:    #unlikely to raise an exception, but you never know...
+            del _handlers[self]
+        finally:
+            _releaseLock()
+
+    def handleError(self, record):
+        """
+        Handle errors which occur during an emit() call.
+
+        This method should be called from handlers when an exception is
+        encountered during an emit() call. If raiseExceptions is false,
+        exceptions get silently ignored. This is what is mostly wanted
+        for a logging system - most users will not care about errors in
+        the logging system, they are more interested in application errors.
+        You could, however, replace this with a custom handler if you wish.
+        The record which was being processed is passed in to this method.
+        """
+        if raiseExceptions:
+            import traceback
+            ei = sys.exc_info()
+            traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr)
+            del ei
+
+class StreamHandler(Handler):
+    """
+    A handler class which writes logging records, appropriately formatted,
+    to a stream. Note that this class does not close the stream, as
+    sys.stdout or sys.stderr may be used.
+    """
+    def __init__(self, strm=None):
+        """
+        Initialize the handler.
+
+        If strm is not specified, sys.stderr is used.
+        """
+        Handler.__init__(self)
+        if not strm:
+            strm = sys.stderr
+        self.stream = strm
+        self.formatter = None
+
+    def flush(self):
+        """
+        Flushes the stream.
+        """
+        self.stream.flush()
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        If a formatter is specified, it is used to format the record.
+        The record is then written to the stream with a trailing newline
+        [N.B. this may be removed depending on feedback]. If exception
+        information is present, it is formatted using
+        traceback.print_exception and appended to the stream.
+        """
+        try:
+            msg = self.format(record)
+            fs = "%s\n"
+            if not hasattr(types, "UnicodeType"): #if no unicode support...
+                self.stream.write(fs % msg)
+            else:
+                try:
+                    self.stream.write(fs % msg)
+                except UnicodeError:
+                    self.stream.write(fs % msg.encode("UTF-8"))
+            self.flush()
+        except:
+            self.handleError(record)
+
+class FileHandler(StreamHandler):
+    """
+    A handler class which writes formatted logging records to disk files.
+    """
+    def __init__(self, filename, mode="a"):
+        """
+        Open the specified file and use it as the stream for logging.
+        """
+        StreamHandler.__init__(self, open(filename, mode))
+        #keep the absolute path, otherwise derived classes which use this
+        #may come a cropper when the current directory changes
+        self.baseFilename = os.path.abspath(filename)
+        self.mode = mode
+
+    def close(self):
+        """
+        Closes the stream.
+        """
+        self.flush()
+        self.stream.close()
+        StreamHandler.close(self)
+
+#---------------------------------------------------------------------------
+#   Manager classes and functions
+#---------------------------------------------------------------------------
+
+class PlaceHolder:
+    """
+    PlaceHolder instances are used in the Manager logger hierarchy to take
+    the place of nodes for which no loggers have been defined. This class is
+    intended for internal use only and not as part of the public API.
+    """
+    def __init__(self, alogger):
+        """
+        Initialize with the specified logger being a child of this placeholder.
+        """
+        self.loggers = [alogger]
+
+    def append(self, alogger):
+        """
+        Add the specified logger as a child of this placeholder.
+        """
+        if alogger not in self.loggers:
+            self.loggers.append(alogger)
+
+#
+#   Determine which class to use when instantiating loggers.
+#
+_loggerClass = None
+
+def setLoggerClass(klass):
+    """
+    Set the class to be used when instantiating a logger. The class should
+    define __init__() such that only a name argument is required, and the
+    __init__() should call Logger.__init__()
+    """
+    if klass != Logger:
+        if not issubclass(klass, Logger):
+            raise TypeError, "logger not derived from logging.Logger: " + \
+                            klass.__name__
+    global _loggerClass
+    _loggerClass = klass
+
+def getLoggerClass():
+    """
+    Return the class to be used when instantiating a logger.
+    """
+
+    return _loggerClass
+
+class Manager:
+    """
+    There is [under normal circumstances] just one Manager instance, which
+    holds the hierarchy of loggers.
+    """
+    def __init__(self, rootnode):
+        """
+        Initialize the manager with the root node of the logger hierarchy.
+        """
+        self.root = rootnode
+        self.disable = 0
+        self.emittedNoHandlerWarning = 0
+        self.loggerDict = {}
+
+    def getLogger(self, name):
+        """
+        Get a logger with the specified name (channel name), creating it
+        if it doesn't yet exist. This name is a dot-separated hierarchical
+        name, such as "a", "a.b", "a.b.c" or similar.
+
+        If a PlaceHolder existed for the specified name [i.e. the logger
+        didn't exist but a child of it did], replace it with the created
+        logger and fix up the parent/child references which pointed to the
+        placeholder to now point to the logger.
+        """
+        rv = None
+        _acquireLock()
+        try:
+            if self.loggerDict.has_key(name):
+                rv = self.loggerDict[name]
+                if isinstance(rv, PlaceHolder):
+                    ph = rv
+                    rv = _loggerClass(name)
+                    rv.manager = self
+                    self.loggerDict[name] = rv
+                    self._fixupChildren(ph, rv)
+                    self._fixupParents(rv)
+            else:
+                rv = _loggerClass(name)
+                rv.manager = self
+                self.loggerDict[name] = rv
+                self._fixupParents(rv)
+        finally:
+            _releaseLock()
+        return rv
+
+    def _fixupParents(self, alogger):
+        """
+        Ensure that there are either loggers or placeholders all the way
+        from the specified logger to the root of the logger hierarchy.
+        """
+        name = alogger.name
+        i = string.rfind(name, ".")
+        rv = None
+        while (i > 0) and not rv:
+            substr = name[:i]
+            if not self.loggerDict.has_key(substr):
+                self.loggerDict[substr] = PlaceHolder(alogger)
+            else:
+                obj = self.loggerDict[substr]
+                if isinstance(obj, Logger):
+                    rv = obj
+                else:
+                    assert isinstance(obj, PlaceHolder)
+                    obj.append(alogger)
+            i = string.rfind(name, ".", 0, i - 1)
+        if not rv:
+            rv = self.root
+        alogger.parent = rv
+
+    def _fixupChildren(self, ph, alogger):
+        """
+        Ensure that children of the placeholder ph are connected to the
+        specified logger.
+        """
+        for c in ph.loggers:
+            if string.find(c.parent.name, alogger.name) <> 0:
+                alogger.parent = c.parent
+                c.parent = alogger
+
+#---------------------------------------------------------------------------
+#   Logger classes and functions
+#---------------------------------------------------------------------------
+
+class Logger(Filterer):
+    """
+    Instances of the Logger class represent a single logging channel. A
+    "logging channel" indicates an area of an application. Exactly how an
+    "area" is defined is up to the application developer. Since an
+    application can have any number of areas, logging channels are identified
+    by a unique string. Application areas can be nested (e.g. an area
+    of "input processing" might include sub-areas "read CSV files", "read
+    XLS files" and "read Gnumeric files"). To cater for this natural nesting,
+    channel names are organized into a namespace hierarchy where levels are
+    separated by periods, much like the Java or Python package namespace. So
+    in the instance given above, channel names might be "input" for the upper
+    level, and "input.csv", "input.xls" and "input.gnu" for the sub-levels.
+    There is no arbitrary limit to the depth of nesting.
+    """
+    def __init__(self, name, level=NOTSET):
+        """
+        Initialize the logger with a name and an optional level.
+        """
+        Filterer.__init__(self)
+        self.name = name
+        self.level = level
+        self.parent = None
+        self.propagate = 1
+        self.handlers = []
+        self.disabled = 0
+
+    def setLevel(self, level):
+        """
+        Set the logging level of this logger.
+        """
+        self.level = level
+
+    def debug(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'DEBUG'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.debug("Houston, we have a %s", "thorny problem", exc_info=1)
+        """
+        if self.manager.disable >= DEBUG:
+            return
+        if DEBUG >= self.getEffectiveLevel():
+            apply(self._log, (DEBUG, msg, args), kwargs)
+
+    def info(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'INFO'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.info("Houston, we have a %s", "interesting problem", exc_info=1)
+        """
+        if self.manager.disable >= INFO:
+            return
+        if INFO >= self.getEffectiveLevel():
+            apply(self._log, (INFO, msg, args), kwargs)
+
+    def warning(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'WARNING'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.warning("Houston, we have a %s", "bit of a problem", exc_info=1)
+        """
+        if self.manager.disable >= WARNING:
+            return
+        if self.isEnabledFor(WARNING):
+            apply(self._log, (WARNING, msg, args), kwargs)
+
+    warn = warning
+
+    def error(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'ERROR'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.error("Houston, we have a %s", "major problem", exc_info=1)
+        """
+        if self.manager.disable >= ERROR:
+            return
+        if self.isEnabledFor(ERROR):
+            apply(self._log, (ERROR, msg, args), kwargs)
+
+    def exception(self, msg, *args):
+        """
+        Convenience method for logging an ERROR with exception information.
+        """
+        apply(self.error, (msg,) + args, {'exc_info': 1})
+
+    def critical(self, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with severity 'CRITICAL'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.critical("Houston, we have a %s", "major disaster", exc_info=1)
+        """
+        if self.manager.disable >= CRITICAL:
+            return
+        if CRITICAL >= self.getEffectiveLevel():
+            apply(self._log, (CRITICAL, msg, args), kwargs)
+
+    fatal = critical
+
+    def log(self, level, msg, *args, **kwargs):
+        """
+        Log 'msg % args' with the integer severity 'level'.
+
+        To pass exception information, use the keyword argument exc_info with
+        a true value, e.g.
+
+        logger.log(level, "We have a %s", "mysterious problem", exc_info=1)
+        """
+        if type(level) != types.IntType:
+            if raiseExceptions:
+                raise TypeError, "level must be an integer"
+            else:
+                return
+        if self.manager.disable >= level:
+            return
+        if self.isEnabledFor(level):
+            apply(self._log, (level, msg, args), kwargs)
+
+    def findCaller(self):
+        """
+        Find the stack frame of the caller so that we can note the source
+        file name and line number.
+        """
+        f = sys._getframe(1)
+        while 1:
+            co = f.f_code
+            filename = os.path.normcase(co.co_filename)
+            if filename == _srcfile:
+                f = f.f_back
+                continue
+            return filename, f.f_lineno
+
+    def makeRecord(self, name, level, fn, lno, msg, args, exc_info):
+        """
+        A factory method which can be overridden in subclasses to create
+        specialized LogRecords.
+        """
+        return LogRecord(name, level, fn, lno, msg, args, exc_info)
+
+    def _log(self, level, msg, args, exc_info=None):
+        """
+        Low-level logging routine which creates a LogRecord and then calls
+        all the handlers of this logger to handle the record.
+        """
+        if _srcfile:
+            fn, lno = self.findCaller()
+        else:
+            fn, lno = "<unknown file>", 0
+        if exc_info:
+            if type(exc_info) != types.TupleType:
+                exc_info = sys.exc_info()
+        record = self.makeRecord(self.name, level, fn, lno, msg, args, exc_info)
+        self.handle(record)
+
+    def handle(self, record):
+        """
+        Call the handlers for the specified record.
+
+        This method is used for unpickled records received from a socket, as
+        well as those created locally. Logger-level filtering is applied.
+        """
+        if (not self.disabled) and self.filter(record):
+            self.callHandlers(record)
+
+    def addHandler(self, hdlr):
+        """
+        Add the specified handler to this logger.
+        """
+        if not (hdlr in self.handlers):
+            self.handlers.append(hdlr)
+
+    def removeHandler(self, hdlr):
+        """
+        Remove the specified handler from this logger.
+        """
+        if hdlr in self.handlers:
+            #hdlr.close()
+            self.handlers.remove(hdlr)
+
+    def callHandlers(self, record):
+        """
+        Pass a record to all relevant handlers.
+
+        Loop through all handlers for this logger and its parents in the
+        logger hierarchy. If no handler was found, output a one-off error
+        message to sys.stderr. Stop searching up the hierarchy whenever a
+        logger with the "propagate" attribute set to zero is found - that
+        will be the last logger whose handlers are called.
+        """
+        c = self
+        found = 0
+        while c:
+            for hdlr in c.handlers:
+                found = found + 1
+                if record.levelno >= hdlr.level:
+                    hdlr.handle(record)
+            if not c.propagate:
+                c = None    #break out
+            else:
+                c = c.parent
+        if (found == 0) and not self.manager.emittedNoHandlerWarning:
+            sys.stderr.write("No handlers could be found for logger"
+                             " \"%s\"\n" % self.name)
+            self.manager.emittedNoHandlerWarning = 1
+
+    def getEffectiveLevel(self):
+        """
+        Get the effective level for this logger.
+
+        Loop through this logger and its parents in the logger hierarchy,
+        looking for a non-zero logging level. Return the first one found.
+        """
+        logger = self
+        while logger:
+            if logger.level:
+                return logger.level
+            logger = logger.parent
+        return NOTSET
+
+    def isEnabledFor(self, level):
+        """
+        Is this logger enabled for level 'level'?
+        """
+        if self.manager.disable >= level:
+            return 0
+        return level >= self.getEffectiveLevel()
+
+class RootLogger(Logger):
+    """
+    A root logger is not that different to any other logger, except that
+    it must have a logging level and there is only one instance of it in
+    the hierarchy.
+    """
+    def __init__(self, level):
+        """
+        Initialize the logger with the name "root".
+        """
+        Logger.__init__(self, "root", level)
+
+_loggerClass = Logger
+
+root = RootLogger(WARNING)
+Logger.root = root
+Logger.manager = Manager(Logger.root)
+
+#---------------------------------------------------------------------------
+# Configuration classes and functions
+#---------------------------------------------------------------------------
+
+BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s"
+
+def basicConfig(**kwargs):
+    """
+    Do basic configuration for the logging system.
+
+    This function does nothing if the root logger already has handlers
+    configured. It is a convenience method intended for use by simple scripts
+    to do one-shot configuration of the logging package.
+
+    The default behaviour is to create a StreamHandler which writes to
+    sys.stderr, set a formatter using the BASIC_FORMAT format string, and
+    add the handler to the root logger.
+
+    A number of optional keyword arguments may be specified, which can alter
+    the default behaviour.
+
+    filename  Specifies that a FileHandler be created, using the specified
+              filename, rather than a StreamHandler.
+    filemode  Specifies the mode to open the file, if filename is specified
+              (if filemode is unspecified, it defaults to "a").
+    format    Use the specified format string for the handler.
+    datefmt   Use the specified date/time format.
+    level     Set the root logger level to the specified level.
+    stream    Use the specified stream to initialize the StreamHandler. Note
+              that this argument is incompatible with 'filename' - if both
+              are present, 'stream' is ignored.
+
+    Note that you could specify a stream created using open(filename, mode)
+    rather than passing the filename and mode in. However, it should be
+    remembered that StreamHandler does not close its stream (since it may be
+    using sys.stdout or sys.stderr), whereas FileHandler closes its stream
+    when the handler is closed.
+    """
+    if len(root.handlers) == 0:
+        filename = kwargs.get("filename")
+        if filename:
+            mode = kwargs.get("filemode", "a")
+            hdlr = FileHandler(filename, mode)
+        else:
+            stream = kwargs.get("stream")
+            hdlr = StreamHandler(stream)
+        fs = kwargs.get("format", BASIC_FORMAT)
+        dfs = kwargs.get("datefmt", None)
+        fmt = Formatter(fs, dfs)
+        hdlr.setFormatter(fmt)
+        root.addHandler(hdlr)
+        level = kwargs.get("level")
+        if level:
+            root.setLevel(level)
+
+#---------------------------------------------------------------------------
+# Utility functions at module level.
+# Basically delegate everything to the root logger.
+#---------------------------------------------------------------------------
+
+def getLogger(name=None):
+    """
+    Return a logger with the specified name, creating it if necessary.
+
+    If no name is specified, return the root logger.
+    """
+    if name:
+        return Logger.manager.getLogger(name)
+    else:
+        return root
+
+#def getRootLogger():
+#    """
+#    Return the root logger.
+#
+#    Note that getLogger('') now does the same thing, so this function is
+#    deprecated and may disappear in the future.
+#    """
+#    return root
+
+def critical(msg, *args, **kwargs):
+    """
+    Log a message with severity 'CRITICAL' on the root logger.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    apply(root.critical, (msg,)+args, kwargs)
+
+fatal = critical
+
+def error(msg, *args, **kwargs):
+    """
+    Log a message with severity 'ERROR' on the root logger.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    apply(root.error, (msg,)+args, kwargs)
+
+def exception(msg, *args):
+    """
+    Log a message with severity 'ERROR' on the root logger,
+    with exception information.
+    """
+    apply(error, (msg,)+args, {'exc_info': 1})
+
+def warning(msg, *args, **kwargs):
+    """
+    Log a message with severity 'WARNING' on the root logger.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    apply(root.warning, (msg,)+args, kwargs)
+
+warn = warning
+
+def info(msg, *args, **kwargs):
+    """
+    Log a message with severity 'INFO' on the root logger.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    apply(root.info, (msg,)+args, kwargs)
+
+def debug(msg, *args, **kwargs):
+    """
+    Log a message with severity 'DEBUG' on the root logger.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    apply(root.debug, (msg,)+args, kwargs)
+
+def log(level, msg, *args, **kwargs):
+    """
+    Log 'msg % args' with the integer severity 'level' on the root logger.
+    """
+    if len(root.handlers) == 0:
+        basicConfig()
+    apply(root.log, (level, msg)+args, kwargs)
+
+def disable(level):
+    """
+    Disable all logging calls less severe than 'level'.
+    """
+    root.manager.disable = level
+
+def shutdown():
+    """
+    Perform any cleanup actions in the logging system (e.g. flushing
+    buffers).
+
+    Should be called at application exit.
+    """
+    for h in _handlers.keys():
+        #errors might occur, for example, if files are locked
+        #we just ignore them
+        try:
+            h.flush()
+            h.close()
+        except:
+            pass
+
+#Let's try and shutdown automatically on application exit...
+try:
+    import atexit
+    atexit.register(shutdown)
+except ImportError: # for Python versions < 2.0
+    def exithook(status, old_exit=sys.exit):
+        try:
+            shutdown()
+        finally:
+            old_exit(status)
+
+    sys.exit = exithook
diff --git a/depot_tools/release/win/python_24/Lib/logging/config.py b/depot_tools/release/win/python_24/Lib/logging/config.py
new file mode 100644
index 0000000..92f2af0a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/logging/config.py
@@ -0,0 +1,304 @@
+# Copyright 2001-2004 by Vinay Sajip. All Rights Reserved.
+#
+# Permission to use, copy, modify, and distribute this software and its
+# documentation for any purpose and without fee is hereby granted,
+# provided that the above copyright notice appear in all copies and that
+# both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of Vinay Sajip
+# not be used in advertising or publicity pertaining to distribution
+# of the software without specific, written prior permission.
+# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
+# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
+# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
+# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
+# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Configuration functions for the logging package for Python. The core package
+is based on PEP 282 and comments thereto in comp.lang.python, and influenced
+by Apache's log4j system.
+
+Should work under Python versions >= 1.5.2, except that source line
+information is not available unless 'sys._getframe()' is.
+
+Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
+
+To use, simply 'import logging' and log away!
+"""
+
+import sys, logging, logging.handlers, string, thread, threading, socket, struct, os
+
+from SocketServer import ThreadingTCPServer, StreamRequestHandler
+
+
+DEFAULT_LOGGING_CONFIG_PORT = 9030
+
+if sys.platform == "win32":
+    RESET_ERROR = 10054   #WSAECONNRESET
+else:
+    RESET_ERROR = 104     #ECONNRESET
+
+#
+#   The following code implements a socket listener for on-the-fly
+#   reconfiguration of logging.
+#
+#   _listener holds the server object doing the listening
+_listener = None
+
+def fileConfig(fname, defaults=None):
+    """
+    Read the logging configuration from a ConfigParser-format file.
+
+    This can be called several times from an application, allowing an end user
+    the ability to select from various pre-canned configurations (if the
+    developer provides a mechanism to present the choices and load the chosen
+    configuration).
+    In versions of ConfigParser which have the readfp method [typically
+    shipped in 2.x versions of Python], you can pass in a file-like object
+    rather than a filename, in which case the file-like object will be read
+    using readfp.
+    """
+    import ConfigParser
+
+    cp = ConfigParser.ConfigParser(defaults)
+    if hasattr(cp, 'readfp') and hasattr(fname, 'readline'):
+        cp.readfp(fname)
+    else:
+        cp.read(fname)
+    #first, do the formatters...
+    flist = cp.get("formatters", "keys")
+    if len(flist):
+        flist = string.split(flist, ",")
+        formatters = {}
+        for form in flist:
+            sectname = "formatter_%s" % form
+            opts = cp.options(sectname)
+            if "format" in opts:
+                fs = cp.get(sectname, "format", 1)
+            else:
+                fs = None
+            if "datefmt" in opts:
+                dfs = cp.get(sectname, "datefmt", 1)
+            else:
+                dfs = None
+            f = logging.Formatter(fs, dfs)
+            formatters[form] = f
+    #next, do the handlers...
+    #critical section...
+    logging._acquireLock()
+    try:
+        try:
+            #first, lose the existing handlers...
+            logging._handlers.clear()
+            #now set up the new ones...
+            hlist = cp.get("handlers", "keys")
+            if len(hlist):
+                hlist = string.split(hlist, ",")
+                handlers = {}
+                fixups = [] #for inter-handler references
+                for hand in hlist:
+                    try:
+                        sectname = "handler_%s" % hand
+                        klass = cp.get(sectname, "class")
+                        opts = cp.options(sectname)
+                        if "formatter" in opts:
+                            fmt = cp.get(sectname, "formatter")
+                        else:
+                            fmt = ""
+                        klass = eval(klass, vars(logging))
+                        args = cp.get(sectname, "args")
+                        args = eval(args, vars(logging))
+                        h = apply(klass, args)
+                        if "level" in opts:
+                            level = cp.get(sectname, "level")
+                            h.setLevel(logging._levelNames[level])
+                        if len(fmt):
+                            h.setFormatter(formatters[fmt])
+                        #temporary hack for FileHandler and MemoryHandler.
+                        if klass == logging.handlers.MemoryHandler:
+                            if "target" in opts:
+                                target = cp.get(sectname,"target")
+                            else:
+                                target = ""
+                            if len(target): #the target handler may not be loaded yet, so keep for later...
+                                fixups.append((h, target))
+                        handlers[hand] = h
+                    except:     #if an error occurs when instantiating a handler, too bad
+                        pass    #this could happen e.g. because of lack of privileges
+                #now all handlers are loaded, fixup inter-handler references...
+                for fixup in fixups:
+                    h = fixup[0]
+                    t = fixup[1]
+                    h.setTarget(handlers[t])
+            #at last, the loggers...first the root...
+            llist = cp.get("loggers", "keys")
+            llist = string.split(llist, ",")
+            llist.remove("root")
+            sectname = "logger_root"
+            root = logging.root
+            log = root
+            opts = cp.options(sectname)
+            if "level" in opts:
+                level = cp.get(sectname, "level")
+                log.setLevel(logging._levelNames[level])
+            for h in root.handlers[:]:
+                root.removeHandler(h)
+            hlist = cp.get(sectname, "handlers")
+            if len(hlist):
+                hlist = string.split(hlist, ",")
+                for hand in hlist:
+                    log.addHandler(handlers[hand])
+            #and now the others...
+            #we don't want to lose the existing loggers,
+            #since other threads may have pointers to them.
+            #existing is set to contain all existing loggers,
+            #and as we go through the new configuration we
+            #remove any which are configured. At the end,
+            #what's left in existing is the set of loggers
+            #which were in the previous configuration but
+            #which are not in the new configuration.
+            existing = root.manager.loggerDict.keys()
+            #now set up the new ones...
+            for log in llist:
+                sectname = "logger_%s" % log
+                qn = cp.get(sectname, "qualname")
+                opts = cp.options(sectname)
+                if "propagate" in opts:
+                    propagate = cp.getint(sectname, "propagate")
+                else:
+                    propagate = 1
+                logger = logging.getLogger(qn)
+                if qn in existing:
+                    existing.remove(qn)
+                if "level" in opts:
+                    level = cp.get(sectname, "level")
+                    logger.setLevel(logging._levelNames[level])
+                for h in logger.handlers[:]:
+                    logger.removeHandler(h)
+                logger.propagate = propagate
+                logger.disabled = 0
+                hlist = cp.get(sectname, "handlers")
+                if len(hlist):
+                    hlist = string.split(hlist, ",")
+                    for hand in hlist:
+                        logger.addHandler(handlers[hand])
+            #Disable any old loggers. There's no point deleting
+            #them as other threads may continue to hold references
+            #and by disabling them, you stop them doing any logging.
+            for log in existing:
+                root.manager.loggerDict[log].disabled = 1
+        except:
+            import traceback
+            ei = sys.exc_info()
+            traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr)
+            del ei
+    finally:
+        logging._releaseLock()
+
+def listen(port=DEFAULT_LOGGING_CONFIG_PORT):
+    """
+    Start up a socket server on the specified port, and listen for new
+    configurations.
+
+    These will be sent as a file suitable for processing by fileConfig().
+    Returns a Thread object on which you can call start() to start the server,
+    and which you can join() when appropriate. To stop the server, call
+    stopListening().
+    """
+    if not thread:
+        raise NotImplementedError, "listen() needs threading to work"
+
+    class ConfigStreamHandler(StreamRequestHandler):
+        """
+        Handler for a logging configuration request.
+
+        It expects a completely new logging configuration and uses fileConfig
+        to install it.
+        """
+        def handle(self):
+            """
+            Handle a request.
+
+            Each request is expected to be a 4-byte length,
+            followed by the config file. Uses fileConfig() to do the
+            grunt work.
+            """
+            import tempfile
+            try:
+                conn = self.connection
+                chunk = conn.recv(4)
+                if len(chunk) == 4:
+                    slen = struct.unpack(">L", chunk)[0]
+                    chunk = self.connection.recv(slen)
+                    while len(chunk) < slen:
+                        chunk = chunk + conn.recv(slen - len(chunk))
+                    #Apply new configuration. We'd like to be able to
+                    #create a StringIO and pass that in, but unfortunately
+                    #1.5.2 ConfigParser does not support reading file
+                    #objects, only actual files. So we create a temporary
+                    #file and remove it later.
+                    file = tempfile.mktemp(".ini")
+                    f = open(file, "w")
+                    f.write(chunk)
+                    f.close()
+                    fileConfig(file)
+                    os.remove(file)
+            except socket.error, e:
+                if type(e.args) != types.TupleType:
+                    raise
+                else:
+                    errcode = e.args[0]
+                    if errcode != RESET_ERROR:
+                        raise
+
+    class ConfigSocketReceiver(ThreadingTCPServer):
+        """
+        A simple TCP socket-based logging config receiver.
+        """
+
+        allow_reuse_address = 1
+
+        def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT,
+                     handler=None):
+            ThreadingTCPServer.__init__(self, (host, port), handler)
+            logging._acquireLock()
+            self.abort = 0
+            logging._releaseLock()
+            self.timeout = 1
+
+        def serve_until_stopped(self):
+            import select
+            abort = 0
+            while not abort:
+                rd, wr, ex = select.select([self.socket.fileno()],
+                                           [], [],
+                                           self.timeout)
+                if rd:
+                    self.handle_request()
+                logging._acquireLock()
+                abort = self.abort
+                logging._releaseLock()
+
+    def serve(rcvr, hdlr, port):
+        server = rcvr(port=port, handler=hdlr)
+        global _listener
+        logging._acquireLock()
+        _listener = server
+        logging._releaseLock()
+        server.serve_until_stopped()
+
+    return threading.Thread(target=serve,
+                            args=(ConfigSocketReceiver,
+                                  ConfigStreamHandler, port))
+
+def stopListening():
+    """
+    Stop the listening server which was created with a call to listen().
+    """
+    global _listener
+    if _listener:
+        logging._acquireLock()
+        _listener.abort = 1
+        _listener = None
+        logging._releaseLock()
diff --git a/depot_tools/release/win/python_24/Lib/logging/handlers.py b/depot_tools/release/win/python_24/Lib/logging/handlers.py
new file mode 100644
index 0000000..19aefa64a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/logging/handlers.py
@@ -0,0 +1,954 @@
+# Copyright 2001-2004 by Vinay Sajip. All Rights Reserved.
+#
+# Permission to use, copy, modify, and distribute this software and its
+# documentation for any purpose and without fee is hereby granted,
+# provided that the above copyright notice appear in all copies and that
+# both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of Vinay Sajip
+# not be used in advertising or publicity pertaining to distribution
+# of the software without specific, written prior permission.
+# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
+# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL
+# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR
+# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
+# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+"""
+Additional handlers for the logging package for Python. The core package is
+based on PEP 282 and comments thereto in comp.lang.python, and influenced by
+Apache's log4j system.
+
+Should work under Python versions >= 1.5.2, except that source line
+information is not available unless 'sys._getframe()' is.
+
+Copyright (C) 2001-2004 Vinay Sajip. All Rights Reserved.
+
+To use, simply 'import logging' and log away!
+"""
+
+import sys, logging, socket, types, os, string, cPickle, struct, time, glob
+
+#
+# Some constants...
+#
+
+DEFAULT_TCP_LOGGING_PORT    = 9020
+DEFAULT_UDP_LOGGING_PORT    = 9021
+DEFAULT_HTTP_LOGGING_PORT   = 9022
+DEFAULT_SOAP_LOGGING_PORT   = 9023
+SYSLOG_UDP_PORT             = 514
+
+class BaseRotatingHandler(logging.FileHandler):
+    """
+    Base class for handlers that rotate log files at a certain point.
+    Not meant to be instantiated directly.  Instead, use RotatingFileHandler
+    or TimedRotatingFileHandler.
+    """
+    def __init__(self, filename, mode):
+        """
+        Use the specified filename for streamed logging
+        """
+        logging.FileHandler.__init__(self, filename, mode)
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        Output the record to the file, catering for rollover as described
+        in doRollover().
+        """
+        try:
+            if self.shouldRollover(record):
+                self.doRollover()
+            logging.FileHandler.emit(self, record)
+        except:
+            self.handleError(record)
+
+class RotatingFileHandler(BaseRotatingHandler):
+    """
+    Handler for logging to a set of files, which switches from one file
+    to the next when the current file reaches a certain size.
+    """
+    def __init__(self, filename, mode="a", maxBytes=0, backupCount=0):
+        """
+        Open the specified file and use it as the stream for logging.
+
+        By default, the file grows indefinitely. You can specify particular
+        values of maxBytes and backupCount to allow the file to rollover at
+        a predetermined size.
+
+        Rollover occurs whenever the current log file is nearly maxBytes in
+        length. If backupCount is >= 1, the system will successively create
+        new files with the same pathname as the base file, but with extensions
+        ".1", ".2" etc. appended to it. For example, with a backupCount of 5
+        and a base file name of "app.log", you would get "app.log",
+        "app.log.1", "app.log.2", ... through to "app.log.5". The file being
+        written to is always "app.log" - when it gets filled up, it is closed
+        and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc.
+        exist, then they are renamed to "app.log.2", "app.log.3" etc.
+        respectively.
+
+        If maxBytes is zero, rollover never occurs.
+        """
+        self.mode = mode
+        if maxBytes > 0:
+            self.mode = "a" # doesn't make sense otherwise!
+        BaseRotatingHandler.__init__(self, filename, self.mode)
+        self.maxBytes = maxBytes
+        self.backupCount = backupCount
+
+    def doRollover(self):
+        """
+        Do a rollover, as described in __init__().
+        """
+
+        self.stream.close()
+        if self.backupCount > 0:
+            for i in range(self.backupCount - 1, 0, -1):
+                sfn = "%s.%d" % (self.baseFilename, i)
+                dfn = "%s.%d" % (self.baseFilename, i + 1)
+                if os.path.exists(sfn):
+                    #print "%s -> %s" % (sfn, dfn)
+                    if os.path.exists(dfn):
+                        os.remove(dfn)
+                    os.rename(sfn, dfn)
+            dfn = self.baseFilename + ".1"
+            if os.path.exists(dfn):
+                os.remove(dfn)
+            os.rename(self.baseFilename, dfn)
+            #print "%s -> %s" % (self.baseFilename, dfn)
+        self.stream = open(self.baseFilename, "w")
+
+    def shouldRollover(self, record):
+        """
+        Determine if rollover should occur.
+
+        Basically, see if the supplied record would cause the file to exceed
+        the size limit we have.
+        """
+        if self.maxBytes > 0:                   # are we rolling over?
+            msg = "%s\n" % self.format(record)
+            self.stream.seek(0, 2)  #due to non-posix-compliant Windows feature
+            if self.stream.tell() + len(msg) >= self.maxBytes:
+                return 1
+        return 0
+
+class TimedRotatingFileHandler(BaseRotatingHandler):
+    """
+    Handler for logging to a file, rotating the log file at certain timed
+    intervals.
+
+    If backupCount is > 0, when rollover is done, no more than backupCount
+    files are kept - the oldest ones are deleted.
+    """
+    def __init__(self, filename, when='h', interval=1, backupCount=0):
+        BaseRotatingHandler.__init__(self, filename, 'a')
+        self.when = string.upper(when)
+        self.backupCount = backupCount
+        # Calculate the real rollover interval, which is just the number of
+        # seconds between rollovers.  Also set the filename suffix used when
+        # a rollover occurs.  Current 'when' events supported:
+        # S - Seconds
+        # M - Minutes
+        # H - Hours
+        # D - Days
+        # midnight - roll over at midnight
+        # W{0-6} - roll over on a certain day; 0 - Monday
+        #
+        # Case of the 'when' specifier is not important; lower or upper case
+        # will work.
+        currentTime = int(time.time())
+        if self.when == 'S':
+            self.interval = 1 # one second
+            self.suffix = "%Y-%m-%d_%H-%M-%S"
+        elif self.when == 'M':
+            self.interval = 60 # one minute
+            self.suffix = "%Y-%m-%d_%H-%M"
+        elif self.when == 'H':
+            self.interval = 60 * 60 # one hour
+            self.suffix = "%Y-%m-%d_%H"
+        elif self.when == 'D' or self.when == 'MIDNIGHT':
+            self.interval = 60 * 60 * 24 # one day
+            self.suffix = "%Y-%m-%d"
+        elif self.when.startswith('W'):
+            self.interval = 60 * 60 * 24 * 7 # one week
+            if len(self.when) != 2:
+                raise ValueError("You must specify a day for weekly rollover from 0 to 6 (0 is Monday): %s" % self.when)
+            if self.when[1] < '0' or self.when[1] > '6':
+                raise ValueError("Invalid day specified for weekly rollover: %s" % self.when)
+            self.dayOfWeek = int(self.when[1])
+            self.suffix = "%Y-%m-%d"
+        else:
+            raise ValueError("Invalid rollover interval specified: %s" % self.when)
+
+        self.interval = self.interval * interval # multiply by units requested
+        self.rolloverAt = currentTime + self.interval
+
+        # If we are rolling over at midnight or weekly, then the interval is already known.
+        # What we need to figure out is WHEN the next interval is.  In other words,
+        # if you are rolling over at midnight, then your base interval is 1 day,
+        # but you want to start that one day clock at midnight, not now.  So, we
+        # have to fudge the rolloverAt value in order to trigger the first rollover
+        # at the right time.  After that, the regular interval will take care of
+        # the rest.  Note that this code doesn't care about leap seconds. :)
+        if self.when == 'MIDNIGHT' or self.when.startswith('W'):
+            # This could be done with less code, but I wanted it to be clear
+            t = time.localtime(currentTime)
+            currentHour = t[3]
+            currentMinute = t[4]
+            currentSecond = t[5]
+            # r is the number of seconds left between now and midnight
+            r = (24 - currentHour) * 60 * 60 # number of hours in seconds
+            r = r + (59 - currentMinute) * 60 # plus the number of minutes (in secs)
+            r = r + (59 - currentSecond) # plus the number of seconds
+            self.rolloverAt = currentTime + r
+            # If we are rolling over on a certain day, add in the number of days until
+            # the next rollover, but offset by 1 since we just calculated the time
+            # until the next day starts.  There are three cases:
+            # Case 1) The day to rollover is today; in this case, do nothing
+            # Case 2) The day to rollover is further in the interval (i.e., today is
+            #         day 2 (Wednesday) and rollover is on day 6 (Sunday).  Days to
+            #         next rollover is simply 6 - 2 - 1, or 3.
+            # Case 3) The day to rollover is behind us in the interval (i.e., today
+            #         is day 5 (Saturday) and rollover is on day 3 (Thursday).
+            #         Days to rollover is 6 - 5 + 3, or 4.  In this case, it's the
+            #         number of days left in the current week (1) plus the number
+            #         of days in the next week until the rollover day (3).
+            if when.startswith('W'):
+                day = t[6] # 0 is Monday
+                if day > self.dayOfWeek:
+                    daysToWait = (day - self.dayOfWeek) - 1
+                    self.rolloverAt = self.rolloverAt + (daysToWait * (60 * 60 * 24))
+                if day < self.dayOfWeek:
+                    daysToWait = (6 - self.dayOfWeek) + day
+                    self.rolloverAt = self.rolloverAt + (daysToWait * (60 * 60 * 24))
+
+        #print "Will rollover at %d, %d seconds from now" % (self.rolloverAt, self.rolloverAt - currentTime)
+
+    def shouldRollover(self, record):
+        """
+        Determine if rollover should occur
+
+        record is not used, as we are just comparing times, but it is needed so
+        the method siguratures are the same
+        """
+        t = int(time.time())
+        if t >= self.rolloverAt:
+            return 1
+        #print "No need to rollover: %d, %d" % (t, self.rolloverAt)
+        return 0
+
+    def doRollover(self):
+        """
+        do a rollover; in this case, a date/time stamp is appended to the filename
+        when the rollover happens.  However, you want the file to be named for the
+        start of the interval, not the current time.  If there is a backup count,
+        then we have to get a list of matching filenames, sort them and remove
+        the one with the oldest suffix.
+        """
+        self.stream.close()
+        # get the time that this sequence started at and make it a TimeTuple
+        t = self.rolloverAt - self.interval
+        timeTuple = time.localtime(t)
+        dfn = self.baseFilename + "." + time.strftime(self.suffix, timeTuple)
+        if os.path.exists(dfn):
+            os.remove(dfn)
+        os.rename(self.baseFilename, dfn)
+        if self.backupCount > 0:
+            # find the oldest log file and delete it
+            s = glob.glob(self.baseFilename + ".20*")
+            if len(s) > self.backupCount:
+                s.sort()
+                os.remove(s[0])
+        #print "%s -> %s" % (self.baseFilename, dfn)
+        self.stream = open(self.baseFilename, "w")
+        self.rolloverAt = int(time.time()) + self.interval
+
+class SocketHandler(logging.Handler):
+    """
+    A handler class which writes logging records, in pickle format, to
+    a streaming socket. The socket is kept open across logging calls.
+    If the peer resets it, an attempt is made to reconnect on the next call.
+    The pickle which is sent is that of the LogRecord's attribute dictionary
+    (__dict__), so that the receiver does not need to have the logging module
+    installed in order to process the logging event.
+
+    To unpickle the record at the receiving end into a LogRecord, use the
+    makeLogRecord function.
+    """
+
+    def __init__(self, host, port):
+        """
+        Initializes the handler with a specific host address and port.
+
+        The attribute 'closeOnError' is set to 1 - which means that if
+        a socket error occurs, the socket is silently closed and then
+        reopened on the next logging call.
+        """
+        logging.Handler.__init__(self)
+        self.host = host
+        self.port = port
+        self.sock = None
+        self.closeOnError = 0
+        self.retryTime = None
+        #
+        # Exponential backoff parameters.
+        #
+        self.retryStart = 1.0
+        self.retryMax = 30.0
+        self.retryFactor = 2.0
+
+    def makeSocket(self):
+        """
+        A factory method which allows subclasses to define the precise
+        type of socket they want.
+        """
+        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        s.connect((self.host, self.port))
+        return s
+
+    def createSocket(self):
+        """
+        Try to create a socket, using an exponential backoff with
+        a max retry time. Thanks to Robert Olson for the original patch
+        (SF #815911) which has been slightly refactored.
+        """
+        now = time.time()
+        # Either retryTime is None, in which case this
+        # is the first time back after a disconnect, or
+        # we've waited long enough.
+        if self.retryTime is None:
+            attempt = 1
+        else:
+            attempt = (now >= self.retryTime)
+        if attempt:
+            try:
+                self.sock = self.makeSocket()
+                self.retryTime = None # next time, no delay before trying
+            except:
+                #Creation failed, so set the retry time and return.
+                if self.retryTime is None:
+                    self.retryPeriod = self.retryStart
+                else:
+                    self.retryPeriod = self.retryPeriod * self.retryFactor
+                    if self.retryPeriod > self.retryMax:
+                        self.retryPeriod = self.retryMax
+                self.retryTime = now + self.retryPeriod
+
+    def send(self, s):
+        """
+        Send a pickled string to the socket.
+
+        This function allows for partial sends which can happen when the
+        network is busy.
+        """
+        if self.sock is None:
+            self.createSocket()
+        #self.sock can be None either because we haven't reached the retry
+        #time yet, or because we have reached the retry time and retried,
+        #but are still unable to connect.
+        if self.sock:
+            try:
+                if hasattr(self.sock, "sendall"):
+                    self.sock.sendall(s)
+                else:
+                    sentsofar = 0
+                    left = len(s)
+                    while left > 0:
+                        sent = self.sock.send(s[sentsofar:])
+                        sentsofar = sentsofar + sent
+                        left = left - sent
+            except socket.error:
+                self.sock.close()
+                self.sock = None  # so we can call createSocket next time
+
+    def makePickle(self, record):
+        """
+        Pickles the record in binary format with a length prefix, and
+        returns it ready for transmission across the socket.
+        """
+        ei = record.exc_info
+        if ei:
+            dummy = self.format(record) # just to get traceback text into record.exc_text
+            record.exc_info = None  # to avoid Unpickleable error
+        s = cPickle.dumps(record.__dict__, 1)
+        if ei:
+            record.exc_info = ei  # for next handler
+        slen = struct.pack(">L", len(s))
+        return slen + s
+
+    def handleError(self, record):
+        """
+        Handle an error during logging.
+
+        An error has occurred during logging. Most likely cause -
+        connection lost. Close the socket so that we can retry on the
+        next event.
+        """
+        if self.closeOnError and self.sock:
+            self.sock.close()
+            self.sock = None        #try to reconnect next time
+        else:
+            logging.Handler.handleError(self, record)
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        Pickles the record and writes it to the socket in binary format.
+        If there is an error with the socket, silently drop the packet.
+        If there was a problem with the socket, re-establishes the
+        socket.
+        """
+        try:
+            s = self.makePickle(record)
+            self.send(s)
+        except:
+            self.handleError(record)
+
+    def close(self):
+        """
+        Closes the socket.
+        """
+        if self.sock:
+            self.sock.close()
+            self.sock = None
+        logging.Handler.close(self)
+
+class DatagramHandler(SocketHandler):
+    """
+    A handler class which writes logging records, in pickle format, to
+    a datagram socket.  The pickle which is sent is that of the LogRecord's
+    attribute dictionary (__dict__), so that the receiver does not need to
+    have the logging module installed in order to process the logging event.
+
+    To unpickle the record at the receiving end into a LogRecord, use the
+    makeLogRecord function.
+
+    """
+    def __init__(self, host, port):
+        """
+        Initializes the handler with a specific host address and port.
+        """
+        SocketHandler.__init__(self, host, port)
+        self.closeOnError = 0
+
+    def makeSocket(self):
+        """
+        The factory method of SocketHandler is here overridden to create
+        a UDP socket (SOCK_DGRAM).
+        """
+        s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+        return s
+
+    def send(self, s):
+        """
+        Send a pickled string to a socket.
+
+        This function no longer allows for partial sends which can happen
+        when the network is busy - UDP does not guarantee delivery and
+        can deliver packets out of sequence.
+        """
+        if self.sock is None:
+            self.createSocket()
+        self.sock.sendto(s, (self.host, self.port))
+
+class SysLogHandler(logging.Handler):
+    """
+    A handler class which sends formatted logging records to a syslog
+    server. Based on Sam Rushing's syslog module:
+    http://www.nightmare.com/squirl/python-ext/misc/syslog.py
+    Contributed by Nicolas Untz (after which minor refactoring changes
+    have been made).
+    """
+
+    # from <linux/sys/syslog.h>:
+    # ======================================================================
+    # priorities/facilities are encoded into a single 32-bit quantity, where
+    # the bottom 3 bits are the priority (0-7) and the top 28 bits are the
+    # facility (0-big number). Both the priorities and the facilities map
+    # roughly one-to-one to strings in the syslogd(8) source code.  This
+    # mapping is included in this file.
+    #
+    # priorities (these are ordered)
+
+    LOG_EMERG     = 0       #  system is unusable
+    LOG_ALERT     = 1       #  action must be taken immediately
+    LOG_CRIT      = 2       #  critical conditions
+    LOG_ERR       = 3       #  error conditions
+    LOG_WARNING   = 4       #  warning conditions
+    LOG_NOTICE    = 5       #  normal but significant condition
+    LOG_INFO      = 6       #  informational
+    LOG_DEBUG     = 7       #  debug-level messages
+
+    #  facility codes
+    LOG_KERN      = 0       #  kernel messages
+    LOG_USER      = 1       #  random user-level messages
+    LOG_MAIL      = 2       #  mail system
+    LOG_DAEMON    = 3       #  system daemons
+    LOG_AUTH      = 4       #  security/authorization messages
+    LOG_SYSLOG    = 5       #  messages generated internally by syslogd
+    LOG_LPR       = 6       #  line printer subsystem
+    LOG_NEWS      = 7       #  network news subsystem
+    LOG_UUCP      = 8       #  UUCP subsystem
+    LOG_CRON      = 9       #  clock daemon
+    LOG_AUTHPRIV  = 10  #  security/authorization messages (private)
+
+    #  other codes through 15 reserved for system use
+    LOG_LOCAL0    = 16      #  reserved for local use
+    LOG_LOCAL1    = 17      #  reserved for local use
+    LOG_LOCAL2    = 18      #  reserved for local use
+    LOG_LOCAL3    = 19      #  reserved for local use
+    LOG_LOCAL4    = 20      #  reserved for local use
+    LOG_LOCAL5    = 21      #  reserved for local use
+    LOG_LOCAL6    = 22      #  reserved for local use
+    LOG_LOCAL7    = 23      #  reserved for local use
+
+    priority_names = {
+        "alert":    LOG_ALERT,
+        "crit":     LOG_CRIT,
+        "critical": LOG_CRIT,
+        "debug":    LOG_DEBUG,
+        "emerg":    LOG_EMERG,
+        "err":      LOG_ERR,
+        "error":    LOG_ERR,        #  DEPRECATED
+        "info":     LOG_INFO,
+        "notice":   LOG_NOTICE,
+        "panic":    LOG_EMERG,      #  DEPRECATED
+        "warn":     LOG_WARNING,    #  DEPRECATED
+        "warning":  LOG_WARNING,
+        }
+
+    facility_names = {
+        "auth":     LOG_AUTH,
+        "authpriv": LOG_AUTHPRIV,
+        "cron":     LOG_CRON,
+        "daemon":   LOG_DAEMON,
+        "kern":     LOG_KERN,
+        "lpr":      LOG_LPR,
+        "mail":     LOG_MAIL,
+        "news":     LOG_NEWS,
+        "security": LOG_AUTH,       #  DEPRECATED
+        "syslog":   LOG_SYSLOG,
+        "user":     LOG_USER,
+        "uucp":     LOG_UUCP,
+        "local0":   LOG_LOCAL0,
+        "local1":   LOG_LOCAL1,
+        "local2":   LOG_LOCAL2,
+        "local3":   LOG_LOCAL3,
+        "local4":   LOG_LOCAL4,
+        "local5":   LOG_LOCAL5,
+        "local6":   LOG_LOCAL6,
+        "local7":   LOG_LOCAL7,
+        }
+
+    def __init__(self, address=('localhost', SYSLOG_UDP_PORT), facility=LOG_USER):
+        """
+        Initialize a handler.
+
+        If address is specified as a string, UNIX socket is used.
+        If facility is not specified, LOG_USER is used.
+        """
+        logging.Handler.__init__(self)
+
+        self.address = address
+        self.facility = facility
+        if type(address) == types.StringType:
+            self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
+            # syslog may require either DGRAM or STREAM sockets
+            try:
+                self.socket.connect(address)
+            except socket.error:
+                self.socket.close()
+                self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+            self.socket.connect(address)
+            self.unixsocket = 1
+        else:
+            self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+            self.unixsocket = 0
+
+        self.formatter = None
+
+    # curious: when talking to the unix-domain '/dev/log' socket, a
+    #   zero-terminator seems to be required.  this string is placed
+    #   into a class variable so that it can be overridden if
+    #   necessary.
+    log_format_string = '<%d>%s\000'
+
+    def encodePriority (self, facility, priority):
+        """
+        Encode the facility and priority. You can pass in strings or
+        integers - if strings are passed, the facility_names and
+        priority_names mapping dictionaries are used to convert them to
+        integers.
+        """
+        if type(facility) == types.StringType:
+            facility = self.facility_names[facility]
+        if type(priority) == types.StringType:
+            priority = self.priority_names[priority]
+        return (facility << 3) | priority
+
+    def close (self):
+        """
+        Closes the socket.
+        """
+        if self.unixsocket:
+            self.socket.close()
+        logging.Handler.close(self)
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        The record is formatted, and then sent to the syslog server. If
+        exception information is present, it is NOT sent to the server.
+        """
+        msg = self.format(record)
+        """
+        We need to convert record level to lowercase, maybe this will
+        change in the future.
+        """
+        msg = self.log_format_string % (
+            self.encodePriority(self.facility,
+                                string.lower(record.levelname)),
+            msg)
+        try:
+            if self.unixsocket:
+                self.socket.send(msg)
+            else:
+                self.socket.sendto(msg, self.address)
+        except:
+            self.handleError(record)
+
+class SMTPHandler(logging.Handler):
+    """
+    A handler class which sends an SMTP email for each logging event.
+    """
+    def __init__(self, mailhost, fromaddr, toaddrs, subject):
+        """
+        Initialize the handler.
+
+        Initialize the instance with the from and to addresses and subject
+        line of the email. To specify a non-standard SMTP port, use the
+        (host, port) tuple format for the mailhost argument.
+        """
+        logging.Handler.__init__(self)
+        if type(mailhost) == types.TupleType:
+            host, port = mailhost
+            self.mailhost = host
+            self.mailport = port
+        else:
+            self.mailhost = mailhost
+            self.mailport = None
+        self.fromaddr = fromaddr
+        if type(toaddrs) == types.StringType:
+            toaddrs = [toaddrs]
+        self.toaddrs = toaddrs
+        self.subject = subject
+
+    def getSubject(self, record):
+        """
+        Determine the subject for the email.
+
+        If you want to specify a subject line which is record-dependent,
+        override this method.
+        """
+        return self.subject
+
+    weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+
+    monthname = [None,
+                 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+                 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+
+    def date_time(self):
+        """
+        Return the current date and time formatted for a MIME header.
+        Needed for Python 1.5.2 (no email package available)
+        """
+        year, month, day, hh, mm, ss, wd, y, z = time.gmtime(time.time())
+        s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
+                self.weekdayname[wd],
+                day, self.monthname[month], year,
+                hh, mm, ss)
+        return s
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        Format the record and send it to the specified addressees.
+        """
+        try:
+            import smtplib
+            try:
+                from email.Utils import formatdate
+            except:
+                formatdate = self.date_time
+            port = self.mailport
+            if not port:
+                port = smtplib.SMTP_PORT
+            smtp = smtplib.SMTP(self.mailhost, port)
+            msg = self.format(record)
+            msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % (
+                            self.fromaddr,
+                            string.join(self.toaddrs, ","),
+                            self.getSubject(record),
+                            formatdate(), msg)
+            smtp.sendmail(self.fromaddr, self.toaddrs, msg)
+            smtp.quit()
+        except:
+            self.handleError(record)
+
+class NTEventLogHandler(logging.Handler):
+    """
+    A handler class which sends events to the NT Event Log. Adds a
+    registry entry for the specified application name. If no dllname is
+    provided, win32service.pyd (which contains some basic message
+    placeholders) is used. Note that use of these placeholders will make
+    your event logs big, as the entire message source is held in the log.
+    If you want slimmer logs, you have to pass in the name of your own DLL
+    which contains the message definitions you want to use in the event log.
+    """
+    def __init__(self, appname, dllname=None, logtype="Application"):
+        logging.Handler.__init__(self)
+        try:
+            import win32evtlogutil, win32evtlog
+            self.appname = appname
+            self._welu = win32evtlogutil
+            if not dllname:
+                dllname = os.path.split(self._welu.__file__)
+                dllname = os.path.split(dllname[0])
+                dllname = os.path.join(dllname[0], r'win32service.pyd')
+            self.dllname = dllname
+            self.logtype = logtype
+            self._welu.AddSourceToRegistry(appname, dllname, logtype)
+            self.deftype = win32evtlog.EVENTLOG_ERROR_TYPE
+            self.typemap = {
+                logging.DEBUG   : win32evtlog.EVENTLOG_INFORMATION_TYPE,
+                logging.INFO    : win32evtlog.EVENTLOG_INFORMATION_TYPE,
+                logging.WARNING : win32evtlog.EVENTLOG_WARNING_TYPE,
+                logging.ERROR   : win32evtlog.EVENTLOG_ERROR_TYPE,
+                logging.CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE,
+         }
+        except ImportError:
+            print "The Python Win32 extensions for NT (service, event "\
+                        "logging) appear not to be available."
+            self._welu = None
+
+    def getMessageID(self, record):
+        """
+        Return the message ID for the event record. If you are using your
+        own messages, you could do this by having the msg passed to the
+        logger being an ID rather than a formatting string. Then, in here,
+        you could use a dictionary lookup to get the message ID. This
+        version returns 1, which is the base message ID in win32service.pyd.
+        """
+        return 1
+
+    def getEventCategory(self, record):
+        """
+        Return the event category for the record.
+
+        Override this if you want to specify your own categories. This version
+        returns 0.
+        """
+        return 0
+
+    def getEventType(self, record):
+        """
+        Return the event type for the record.
+
+        Override this if you want to specify your own types. This version does
+        a mapping using the handler's typemap attribute, which is set up in
+        __init__() to a dictionary which contains mappings for DEBUG, INFO,
+        WARNING, ERROR and CRITICAL. If you are using your own levels you will
+        either need to override this method or place a suitable dictionary in
+        the handler's typemap attribute.
+        """
+        return self.typemap.get(record.levelno, self.deftype)
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        Determine the message ID, event category and event type. Then
+        log the message in the NT event log.
+        """
+        if self._welu:
+            try:
+                id = self.getMessageID(record)
+                cat = self.getEventCategory(record)
+                type = self.getEventType(record)
+                msg = self.format(record)
+                self._welu.ReportEvent(self.appname, id, cat, type, [msg])
+            except:
+                self.handleError(record)
+
+    def close(self):
+        """
+        Clean up this handler.
+
+        You can remove the application name from the registry as a
+        source of event log entries. However, if you do this, you will
+        not be able to see the events as you intended in the Event Log
+        Viewer - it needs to be able to access the registry to get the
+        DLL name.
+        """
+        #self._welu.RemoveSourceFromRegistry(self.appname, self.logtype)
+        logging.Handler.close(self)
+
+class HTTPHandler(logging.Handler):
+    """
+    A class which sends records to a Web server, using either GET or
+    POST semantics.
+    """
+    def __init__(self, host, url, method="GET"):
+        """
+        Initialize the instance with the host, the request URL, and the method
+        ("GET" or "POST")
+        """
+        logging.Handler.__init__(self)
+        method = string.upper(method)
+        if method not in ["GET", "POST"]:
+            raise ValueError, "method must be GET or POST"
+        self.host = host
+        self.url = url
+        self.method = method
+
+    def mapLogRecord(self, record):
+        """
+        Default implementation of mapping the log record into a dict
+        that is sent as the CGI data. Overwrite in your class.
+        Contributed by Franz  Glasner.
+        """
+        return record.__dict__
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        Send the record to the Web server as an URL-encoded dictionary
+        """
+        try:
+            import httplib, urllib
+            h = httplib.HTTP(self.host)
+            url = self.url
+            data = urllib.urlencode(self.mapLogRecord(record))
+            if self.method == "GET":
+                if (string.find(url, '?') >= 0):
+                    sep = '&'
+                else:
+                    sep = '?'
+                url = url + "%c%s" % (sep, data)
+            h.putrequest(self.method, url)
+            if self.method == "POST":
+                h.putheader("Content-length", str(len(data)))
+            h.endheaders()
+            if self.method == "POST":
+                h.send(data)
+            h.getreply()    #can't do anything with the result
+        except:
+            self.handleError(record)
+
+class BufferingHandler(logging.Handler):
+    """
+  A handler class which buffers logging records in memory. Whenever each
+  record is added to the buffer, a check is made to see if the buffer should
+  be flushed. If it should, then flush() is expected to do what's needed.
+    """
+    def __init__(self, capacity):
+        """
+        Initialize the handler with the buffer size.
+        """
+        logging.Handler.__init__(self)
+        self.capacity = capacity
+        self.buffer = []
+
+    def shouldFlush(self, record):
+        """
+        Should the handler flush its buffer?
+
+        Returns true if the buffer is up to capacity. This method can be
+        overridden to implement custom flushing strategies.
+        """
+        return (len(self.buffer) >= self.capacity)
+
+    def emit(self, record):
+        """
+        Emit a record.
+
+        Append the record. If shouldFlush() tells us to, call flush() to process
+        the buffer.
+        """
+        self.buffer.append(record)
+        if self.shouldFlush(record):
+            self.flush()
+
+    def flush(self):
+        """
+        Override to implement custom flushing behaviour.
+
+        This version just zaps the buffer to empty.
+        """
+        self.buffer = []
+
+    def close(self):
+        """
+        Close the handler.
+
+        This version just flushes and chains to the parent class' close().
+        """
+        self.flush()
+        logging.Handler.close(self)
+
+class MemoryHandler(BufferingHandler):
+    """
+    A handler class which buffers logging records in memory, periodically
+    flushing them to a target handler. Flushing occurs whenever the buffer
+    is full, or when an event of a certain severity or greater is seen.
+    """
+    def __init__(self, capacity, flushLevel=logging.ERROR, target=None):
+        """
+        Initialize the handler with the buffer size, the level at which
+        flushing should occur and an optional target.
+
+        Note that without a target being set either here or via setTarget(),
+        a MemoryHandler is no use to anyone!
+        """
+        BufferingHandler.__init__(self, capacity)
+        self.flushLevel = flushLevel
+        self.target = target
+
+    def shouldFlush(self, record):
+        """
+        Check for buffer full or a record at the flushLevel or higher.
+        """
+        return (len(self.buffer) >= self.capacity) or \
+                (record.levelno >= self.flushLevel)
+
+    def setTarget(self, target):
+        """
+        Set the target handler for this handler.
+        """
+        self.target = target
+
+    def flush(self):
+        """
+        For a MemoryHandler, flushing means just sending the buffered
+        records to the target, if there is one. Override if you want
+        different behaviour.
+        """
+        if self.target:
+            for record in self.buffer:
+                self.target.handle(record)
+            self.buffer = []
+
+    def close(self):
+        """
+        Flush, set the target to None and lose the buffer.
+        """
+        self.flush()
+        self.target = None
+        BufferingHandler.close(self)
diff --git a/depot_tools/release/win/python_24/Lib/macpath.py b/depot_tools/release/win/python_24/Lib/macpath.py
new file mode 100644
index 0000000..f50f6607
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/macpath.py
@@ -0,0 +1,275 @@
+"""Pathname and path-related operations for the Macintosh."""
+
+import os
+from stat import *
+
+__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
+           "basename","dirname","commonprefix","getsize","getmtime",
+           "getatime","getctime", "islink","exists","isdir","isfile",
+           "walk","expanduser","expandvars","normpath","abspath",
+           "curdir","pardir","sep","pathsep","defpath","altsep","extsep",
+           "devnull","realpath","supports_unicode_filenames"]
+
+# strings representing various path-related bits and pieces
+curdir = ':'
+pardir = '::'
+extsep = '.'
+sep = ':'
+pathsep = '\n'
+defpath = ':'
+altsep = None
+devnull = 'Dev:Null'
+
+# Normalize the case of a pathname.  Dummy in Posix, but <s>.lower() here.
+
+def normcase(path):
+    return path.lower()
+
+
+def isabs(s):
+    """Return true if a path is absolute.
+    On the Mac, relative paths begin with a colon,
+    but as a special case, paths with no colons at all are also relative.
+    Anything else is absolute (the string up to the first colon is the
+    volume name)."""
+
+    return ':' in s and s[0] != ':'
+
+
+def join(s, *p):
+    path = s
+    for t in p:
+        if (not s) or isabs(t):
+            path = t
+            continue
+        if t[:1] == ':':
+            t = t[1:]
+        if ':' not in path:
+            path = ':' + path
+        if path[-1:] != ':':
+            path = path + ':'
+        path = path + t
+    return path
+
+
+def split(s):
+    """Split a pathname into two parts: the directory leading up to the final
+    bit, and the basename (the filename, without colons, in that directory).
+    The result (s, t) is such that join(s, t) yields the original argument."""
+
+    if ':' not in s: return '', s
+    colon = 0
+    for i in range(len(s)):
+        if s[i] == ':': colon = i + 1
+    path, file = s[:colon-1], s[colon:]
+    if path and not ':' in path:
+        path = path + ':'
+    return path, file
+
+
+def splitext(p):
+    """Split a path into root and extension.
+    The extension is everything starting at the last dot in the last
+    pathname component; the root is everything before that.
+    It is always true that root + ext == p."""
+
+    i = p.rfind('.')
+    if i<=p.rfind(':'):
+        return p, ''
+    else:
+        return p[:i], p[i:]
+
+
+def splitdrive(p):
+    """Split a pathname into a drive specification and the rest of the
+    path.  Useful on DOS/Windows/NT; on the Mac, the drive is always
+    empty (don't use the volume name -- it doesn't have the same
+    syntactic and semantic oddities as DOS drive letters, such as there
+    being a separate current directory per drive)."""
+
+    return '', p
+
+
+# Short interfaces to split()
+
+def dirname(s): return split(s)[0]
+def basename(s): return split(s)[1]
+
+def ismount(s):
+    if not isabs(s):
+        return False
+    components = split(s)
+    return len(components) == 2 and components[1] == ''
+
+def isdir(s):
+    """Return true if the pathname refers to an existing directory."""
+
+    try:
+        st = os.stat(s)
+    except os.error:
+        return 0
+    return S_ISDIR(st.st_mode)
+
+
+# Get size, mtime, atime of files.
+
+def getsize(filename):
+    """Return the size of a file, reported by os.stat()."""
+    return os.stat(filename).st_size
+
+def getmtime(filename):
+    """Return the last modification time of a file, reported by os.stat()."""
+    return os.stat(filename).st_mtime
+
+def getatime(filename):
+    """Return the last access time of a file, reported by os.stat()."""
+    return os.stat(filename).st_atime
+
+
+def islink(s):
+    """Return true if the pathname refers to a symbolic link."""
+
+    try:
+        import Carbon.File
+        return Carbon.File.ResolveAliasFile(s, 0)[2]
+    except:
+        return False
+
+
+def isfile(s):
+    """Return true if the pathname refers to an existing regular file."""
+
+    try:
+        st = os.stat(s)
+    except os.error:
+        return False
+    return S_ISREG(st.st_mode)
+
+def getctime(filename):
+    """Return the creation time of a file, reported by os.stat()."""
+    return os.stat(filename).st_ctime
+
+def exists(s):
+    """Test whether a path exists.  Returns False for broken symbolic links"""
+
+    try:
+        st = os.stat(s)
+    except os.error:
+        return False
+    return True
+
+# Is `stat`/`lstat` a meaningful difference on the Mac?  This is safe in any
+# case.
+
+def lexists(path):
+    """Test whether a path exists.  Returns True for broken symbolic links"""
+
+    try:
+        st = os.lstat(path)
+    except os.error:
+        return False
+    return True
+
+# Return the longest prefix of all list elements.
+
+def commonprefix(m):
+    "Given a list of pathnames, returns the longest common leading component"
+    if not m: return ''
+    prefix = m[0]
+    for item in m:
+        for i in range(len(prefix)):
+            if prefix[:i+1] != item[:i+1]:
+                prefix = prefix[:i]
+                if i == 0: return ''
+                break
+    return prefix
+
+def expandvars(path):
+    """Dummy to retain interface-compatibility with other operating systems."""
+    return path
+
+
+def expanduser(path):
+    """Dummy to retain interface-compatibility with other operating systems."""
+    return path
+
+class norm_error(Exception):
+    """Path cannot be normalized"""
+
+def normpath(s):
+    """Normalize a pathname.  Will return the same result for
+    equivalent paths."""
+
+    if ":" not in s:
+        return ":"+s
+
+    comps = s.split(":")
+    i = 1
+    while i < len(comps)-1:
+        if comps[i] == "" and comps[i-1] != "":
+            if i > 1:
+                del comps[i-1:i+1]
+                i = i - 1
+            else:
+                # best way to handle this is to raise an exception
+                raise norm_error, 'Cannot use :: immediately after volume name'
+        else:
+            i = i + 1
+
+    s = ":".join(comps)
+
+    # remove trailing ":" except for ":" and "Volume:"
+    if s[-1] == ":" and len(comps) > 2 and s != ":"*len(s):
+        s = s[:-1]
+    return s
+
+
+def walk(top, func, arg):
+    """Directory tree walk with callback function.
+
+    For each directory in the directory tree rooted at top (including top
+    itself, but excluding '.' and '..'), call func(arg, dirname, fnames).
+    dirname is the name of the directory, and fnames a list of the names of
+    the files and subdirectories in dirname (excluding '.' and '..').  func
+    may modify the fnames list in-place (e.g. via del or slice assignment),
+    and walk will only recurse into the subdirectories whose names remain in
+    fnames; this can be used to implement a filter, or to impose a specific
+    order of visiting.  No semantics are defined for, or required of, arg,
+    beyond that arg is always passed to func.  It can be used, e.g., to pass
+    a filename pattern, or a mutable object designed to accumulate
+    statistics.  Passing None for arg is common."""
+
+    try:
+        names = os.listdir(top)
+    except os.error:
+        return
+    func(arg, top, names)
+    for name in names:
+        name = join(top, name)
+        if isdir(name) and not islink(name):
+            walk(name, func, arg)
+
+
+def abspath(path):
+    """Return an absolute path."""
+    if not isabs(path):
+        path = join(os.getcwd(), path)
+    return normpath(path)
+
+# realpath is a no-op on systems without islink support
+def realpath(path):
+    path = abspath(path)
+    try:
+        import Carbon.File
+    except ImportError:
+        return path
+    if not path:
+        return path
+    components = path.split(':')
+    path = components[0] + ':'
+    for c in components[1:]:
+        path = join(path, c)
+        path = Carbon.File.FSResolveAliasFile(path, 1)[0].as_pathname()
+    return path
+
+supports_unicode_filenames = False
diff --git a/depot_tools/release/win/python_24/Lib/macurl2path.py b/depot_tools/release/win/python_24/Lib/macurl2path.py
new file mode 100644
index 0000000..ed23883c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/macurl2path.py
@@ -0,0 +1,95 @@
+"""Macintosh-specific module for conversion between pathnames and URLs.
+
+Do not import directly; use urllib instead."""
+
+import urllib
+import os
+
+__all__ = ["url2pathname","pathname2url"]
+
+def url2pathname(pathname):
+    "Convert /-delimited pathname to mac pathname"
+    #
+    # XXXX The .. handling should be fixed...
+    #
+    tp = urllib.splittype(pathname)[0]
+    if tp and tp != 'file':
+        raise RuntimeError, 'Cannot convert non-local URL to pathname'
+    # Turn starting /// into /, an empty hostname means current host
+    if pathname[:3] == '///':
+        pathname = pathname[2:]
+    elif pathname[:2] == '//':
+        raise RuntimeError, 'Cannot convert non-local URL to pathname'
+    components = pathname.split('/')
+    # Remove . and embedded ..
+    i = 0
+    while i < len(components):
+        if components[i] == '.':
+            del components[i]
+        elif components[i] == '..' and i > 0 and \
+                                  components[i-1] not in ('', '..'):
+            del components[i-1:i+1]
+            i = i-1
+        elif components[i] == '' and i > 0 and components[i-1] != '':
+            del components[i]
+        else:
+            i = i+1
+    if not components[0]:
+        # Absolute unix path, don't start with colon
+        rv = ':'.join(components[1:])
+    else:
+        # relative unix path, start with colon. First replace
+        # leading .. by empty strings (giving ::file)
+        i = 0
+        while i < len(components) and components[i] == '..':
+            components[i] = ''
+            i = i + 1
+        rv = ':' + ':'.join(components)
+    # and finally unquote slashes and other funny characters
+    return urllib.unquote(rv)
+
+def pathname2url(pathname):
+    "convert mac pathname to /-delimited pathname"
+    if '/' in pathname:
+        raise RuntimeError, "Cannot convert pathname containing slashes"
+    components = pathname.split(':')
+    # Remove empty first and/or last component
+    if components[0] == '':
+        del components[0]
+    if components[-1] == '':
+        del components[-1]
+    # Replace empty string ('::') by .. (will result in '/../' later)
+    for i in range(len(components)):
+        if components[i] == '':
+            components[i] = '..'
+    # Truncate names longer than 31 bytes
+    components = map(_pncomp2url, components)
+
+    if os.path.isabs(pathname):
+        return '/' + '/'.join(components)
+    else:
+        return '/'.join(components)
+
+def _pncomp2url(component):
+    component = urllib.quote(component[:31], safe='')  # We want to quote slashes
+    return component
+
+def test():
+    for url in ["index.html",
+                "bar/index.html",
+                "/foo/bar/index.html",
+                "/foo/bar/",
+                "/"]:
+        print '%r -> %r' % (url, url2pathname(url))
+    for path in ["drive:",
+                 "drive:dir:",
+                 "drive:dir:file",
+                 "drive:file",
+                 "file",
+                 ":file",
+                 ":dir:",
+                 ":dir:file"]:
+        print '%r -> %r' % (path, pathname2url(path))
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/mailbox.py b/depot_tools/release/win/python_24/Lib/mailbox.py
new file mode 100644
index 0000000..c89c1a4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/mailbox.py
@@ -0,0 +1,321 @@
+#! /usr/bin/env python
+
+"""Classes to handle Unix style, MMDF style, and MH style mailboxes."""
+
+
+import rfc822
+import os
+
+__all__ = ["UnixMailbox","MmdfMailbox","MHMailbox","Maildir","BabylMailbox",
+           "PortableUnixMailbox"]
+
+class _Mailbox:
+
+    def __init__(self, fp, factory=rfc822.Message):
+        self.fp = fp
+        self.seekp = 0
+        self.factory = factory
+
+    def __iter__(self):
+        return iter(self.next, None)
+
+    def next(self):
+        while 1:
+            self.fp.seek(self.seekp)
+            try:
+                self._search_start()
+            except EOFError:
+                self.seekp = self.fp.tell()
+                return None
+            start = self.fp.tell()
+            self._search_end()
+            self.seekp = stop = self.fp.tell()
+            if start != stop:
+                break
+        return self.factory(_Subfile(self.fp, start, stop))
+
+
+class _Subfile:
+
+    def __init__(self, fp, start, stop):
+        self.fp = fp
+        self.start = start
+        self.stop = stop
+        self.pos = self.start
+
+
+    def _read(self, length, read_function):
+        if self.pos >= self.stop:
+            return ''
+        remaining = self.stop - self.pos
+        if length is None or length < 0 or length > remaining:
+            length = remaining
+        self.fp.seek(self.pos)
+        data = read_function(length)
+        self.pos = self.fp.tell()
+        return data
+
+    def read(self, length = None):
+        return self._read(length, self.fp.read)
+
+    def readline(self, length = None):
+        return self._read(length, self.fp.readline)
+
+    def readlines(self, sizehint = -1):
+        lines = []
+        while 1:
+            line = self.readline()
+            if not line:
+                break
+            lines.append(line)
+            if sizehint >= 0:
+                sizehint = sizehint - len(line)
+                if sizehint <= 0:
+                    break
+        return lines
+
+    def tell(self):
+        return self.pos - self.start
+
+    def seek(self, pos, whence=0):
+        if whence == 0:
+            self.pos = self.start + pos
+        elif whence == 1:
+            self.pos = self.pos + pos
+        elif whence == 2:
+            self.pos = self.stop + pos
+
+    def close(self):
+        del self.fp
+
+
+# Recommended to use PortableUnixMailbox instead!
+class UnixMailbox(_Mailbox):
+
+    def _search_start(self):
+        while 1:
+            pos = self.fp.tell()
+            line = self.fp.readline()
+            if not line:
+                raise EOFError
+            if line[:5] == 'From ' and self._isrealfromline(line):
+                self.fp.seek(pos)
+                return
+
+    def _search_end(self):
+        self.fp.readline()      # Throw away header line
+        while 1:
+            pos = self.fp.tell()
+            line = self.fp.readline()
+            if not line:
+                return
+            if line[:5] == 'From ' and self._isrealfromline(line):
+                self.fp.seek(pos)
+                return
+
+    # An overridable mechanism to test for From-line-ness.  You can either
+    # specify a different regular expression or define a whole new
+    # _isrealfromline() method.  Note that this only gets called for lines
+    # starting with the 5 characters "From ".
+    #
+    # BAW: According to
+    #http://home.netscape.com/eng/mozilla/2.0/relnotes/demo/content-length.html
+    # the only portable, reliable way to find message delimiters in a BSD (i.e
+    # Unix mailbox) style folder is to search for "\n\nFrom .*\n", or at the
+    # beginning of the file, "^From .*\n".  While _fromlinepattern below seems
+    # like a good idea, in practice, there are too many variations for more
+    # strict parsing of the line to be completely accurate.
+    #
+    # _strict_isrealfromline() is the old version which tries to do stricter
+    # parsing of the From_ line.  _portable_isrealfromline() simply returns
+    # true, since it's never called if the line doesn't already start with
+    # "From ".
+    #
+    # This algorithm, and the way it interacts with _search_start() and
+    # _search_end() may not be completely correct, because it doesn't check
+    # that the two characters preceding "From " are \n\n or the beginning of
+    # the file.  Fixing this would require a more extensive rewrite than is
+    # necessary.  For convenience, we've added a PortableUnixMailbox class
+    # which uses the more lenient _fromlinepattern regular expression.
+
+    _fromlinepattern = r"From \s*[^\s]+\s+\w\w\w\s+\w\w\w\s+\d?\d\s+" \
+                       r"\d?\d:\d\d(:\d\d)?(\s+[^\s]+)?\s+\d\d\d\d\s*$"
+    _regexp = None
+
+    def _strict_isrealfromline(self, line):
+        if not self._regexp:
+            import re
+            self._regexp = re.compile(self._fromlinepattern)
+        return self._regexp.match(line)
+
+    def _portable_isrealfromline(self, line):
+        return True
+
+    _isrealfromline = _strict_isrealfromline
+
+
+class PortableUnixMailbox(UnixMailbox):
+    _isrealfromline = UnixMailbox._portable_isrealfromline
+
+
+class MmdfMailbox(_Mailbox):
+
+    def _search_start(self):
+        while 1:
+            line = self.fp.readline()
+            if not line:
+                raise EOFError
+            if line[:5] == '\001\001\001\001\n':
+                return
+
+    def _search_end(self):
+        while 1:
+            pos = self.fp.tell()
+            line = self.fp.readline()
+            if not line:
+                return
+            if line == '\001\001\001\001\n':
+                self.fp.seek(pos)
+                return
+
+
+class MHMailbox:
+
+    def __init__(self, dirname, factory=rfc822.Message):
+        import re
+        pat = re.compile('^[1-9][0-9]*$')
+        self.dirname = dirname
+        # the three following lines could be combined into:
+        # list = map(long, filter(pat.match, os.listdir(self.dirname)))
+        list = os.listdir(self.dirname)
+        list = filter(pat.match, list)
+        list = map(long, list)
+        list.sort()
+        # This only works in Python 1.6 or later;
+        # before that str() added 'L':
+        self.boxes = map(str, list)
+        self.boxes.reverse()
+        self.factory = factory
+
+    def __iter__(self):
+        return iter(self.next, None)
+
+    def next(self):
+        if not self.boxes:
+            return None
+        fn = self.boxes.pop()
+        fp = open(os.path.join(self.dirname, fn))
+        msg = self.factory(fp)
+        try:
+            msg._mh_msgno = fn
+        except (AttributeError, TypeError):
+            pass
+        return msg
+
+
+class Maildir:
+    # Qmail directory mailbox
+
+    def __init__(self, dirname, factory=rfc822.Message):
+        self.dirname = dirname
+        self.factory = factory
+
+        # check for new mail
+        newdir = os.path.join(self.dirname, 'new')
+        boxes = [os.path.join(newdir, f)
+                 for f in os.listdir(newdir) if f[0] != '.']
+
+        # Now check for current mail in this maildir
+        curdir = os.path.join(self.dirname, 'cur')
+        boxes += [os.path.join(curdir, f)
+                  for f in os.listdir(curdir) if f[0] != '.']
+        boxes.reverse()
+        self.boxes = boxes
+
+    def __iter__(self):
+        return iter(self.next, None)
+
+    def next(self):
+        if not self.boxes:
+            return None
+        fn = self.boxes.pop()
+        fp = open(fn)
+        return self.factory(fp)
+
+
+class BabylMailbox(_Mailbox):
+
+    def _search_start(self):
+        while 1:
+            line = self.fp.readline()
+            if not line:
+                raise EOFError
+            if line == '*** EOOH ***\n':
+                return
+
+    def _search_end(self):
+        while 1:
+            pos = self.fp.tell()
+            line = self.fp.readline()
+            if not line:
+                return
+            if line == '\037\014\n' or line == '\037':
+                self.fp.seek(pos)
+                return
+
+
+def _test():
+    import sys
+
+    args = sys.argv[1:]
+    if not args:
+        for key in 'MAILDIR', 'MAIL', 'LOGNAME', 'USER':
+            if key in os.environ:
+                mbox = os.environ[key]
+                break
+        else:
+            print "$MAIL, $LOGNAME nor $USER set -- who are you?"
+            return
+    else:
+        mbox = args[0]
+    if mbox[:1] == '+':
+        mbox = os.environ['HOME'] + '/Mail/' + mbox[1:]
+    elif not '/' in mbox:
+        if os.path.isfile('/var/mail/' + mbox):
+            mbox = '/var/mail/' + mbox
+        else:
+            mbox = '/usr/mail/' + mbox
+    if os.path.isdir(mbox):
+        if os.path.isdir(os.path.join(mbox, 'cur')):
+            mb = Maildir(mbox)
+        else:
+            mb = MHMailbox(mbox)
+    else:
+        fp = open(mbox, 'r')
+        mb = PortableUnixMailbox(fp)
+
+    msgs = []
+    while 1:
+        msg = mb.next()
+        if msg is None:
+            break
+        msgs.append(msg)
+        if len(args) <= 1:
+            msg.fp = None
+    if len(args) > 1:
+        num = int(args[1])
+        print 'Message %d body:'%num
+        msg = msgs[num-1]
+        msg.rewindbody()
+        sys.stdout.write(msg.fp.read())
+    else:
+        print 'Mailbox',mbox,'has',len(msgs),'messages:'
+        for msg in msgs:
+            f = msg.getheader('from') or ""
+            s = msg.getheader('subject') or ""
+            d = msg.getheader('date') or ""
+            print '-%20.20s   %20.20s   %-30.30s'%(f, d[5:], s)
+
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/mailcap.py b/depot_tools/release/win/python_24/Lib/mailcap.py
new file mode 100644
index 0000000..b2ddacd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/mailcap.py
@@ -0,0 +1,255 @@
+"""Mailcap file handling.  See RFC 1524."""
+
+import os
+
+__all__ = ["getcaps","findmatch"]
+
+# Part 1: top-level interface.
+
+def getcaps():
+    """Return a dictionary containing the mailcap database.
+
+    The dictionary maps a MIME type (in all lowercase, e.g. 'text/plain')
+    to a list of dictionaries corresponding to mailcap entries.  The list
+    collects all the entries for that MIME type from all available mailcap
+    files.  Each dictionary contains key-value pairs for that MIME type,
+    where the viewing command is stored with the key "view".
+
+    """
+    caps = {}
+    for mailcap in listmailcapfiles():
+        try:
+            fp = open(mailcap, 'r')
+        except IOError:
+            continue
+        morecaps = readmailcapfile(fp)
+        fp.close()
+        for key, value in morecaps.iteritems():
+            if not key in caps:
+                caps[key] = value
+            else:
+                caps[key] = caps[key] + value
+    return caps
+
+def listmailcapfiles():
+    """Return a list of all mailcap files found on the system."""
+    # XXX Actually, this is Unix-specific
+    if 'MAILCAPS' in os.environ:
+        str = os.environ['MAILCAPS']
+        mailcaps = str.split(':')
+    else:
+        if 'HOME' in os.environ:
+            home = os.environ['HOME']
+        else:
+            # Don't bother with getpwuid()
+            home = '.' # Last resort
+        mailcaps = [home + '/.mailcap', '/etc/mailcap',
+                '/usr/etc/mailcap', '/usr/local/etc/mailcap']
+    return mailcaps
+
+
+# Part 2: the parser.
+
+def readmailcapfile(fp):
+    """Read a mailcap file and return a dictionary keyed by MIME type.
+
+    Each MIME type is mapped to an entry consisting of a list of
+    dictionaries; the list will contain more than one such dictionary
+    if a given MIME type appears more than once in the mailcap file.
+    Each dictionary contains key-value pairs for that MIME type, where
+    the viewing command is stored with the key "view".
+    """
+    caps = {}
+    while 1:
+        line = fp.readline()
+        if not line: break
+        # Ignore comments and blank lines
+        if line[0] == '#' or line.strip() == '':
+            continue
+        nextline = line
+        # Join continuation lines
+        while nextline[-2:] == '\\\n':
+            nextline = fp.readline()
+            if not nextline: nextline = '\n'
+            line = line[:-2] + nextline
+        # Parse the line
+        key, fields = parseline(line)
+        if not (key and fields):
+            continue
+        # Normalize the key
+        types = key.split('/')
+        for j in range(len(types)):
+            types[j] = types[j].strip()
+        key = '/'.join(types).lower()
+        # Update the database
+        if key in caps:
+            caps[key].append(fields)
+        else:
+            caps[key] = [fields]
+    return caps
+
+def parseline(line):
+    """Parse one entry in a mailcap file and return a dictionary.
+
+    The viewing command is stored as the value with the key "view",
+    and the rest of the fields produce key-value pairs in the dict.
+    """
+    fields = []
+    i, n = 0, len(line)
+    while i < n:
+        field, i = parsefield(line, i, n)
+        fields.append(field)
+        i = i+1 # Skip semicolon
+    if len(fields) < 2:
+        return None, None
+    key, view, rest = fields[0], fields[1], fields[2:]
+    fields = {'view': view}
+    for field in rest:
+        i = field.find('=')
+        if i < 0:
+            fkey = field
+            fvalue = ""
+        else:
+            fkey = field[:i].strip()
+            fvalue = field[i+1:].strip()
+        if fkey in fields:
+            # Ignore it
+            pass
+        else:
+            fields[fkey] = fvalue
+    return key, fields
+
+def parsefield(line, i, n):
+    """Separate one key-value pair in a mailcap entry."""
+    start = i
+    while i < n:
+        c = line[i]
+        if c == ';':
+            break
+        elif c == '\\':
+            i = i+2
+        else:
+            i = i+1
+    return line[start:i].strip(), i
+
+
+# Part 3: using the database.
+
+def findmatch(caps, MIMEtype, key='view', filename="/dev/null", plist=[]):
+    """Find a match for a mailcap entry.
+
+    Return a tuple containing the command line, and the mailcap entry
+    used; (None, None) if no match is found.  This may invoke the
+    'test' command of several matching entries before deciding which
+    entry to use.
+
+    """
+    entries = lookup(caps, MIMEtype, key)
+    # XXX This code should somehow check for the needsterminal flag.
+    for e in entries:
+        if 'test' in e:
+            test = subst(e['test'], filename, plist)
+            if test and os.system(test) != 0:
+                continue
+        command = subst(e[key], MIMEtype, filename, plist)
+        return command, e
+    return None, None
+
+def lookup(caps, MIMEtype, key=None):
+    entries = []
+    if MIMEtype in caps:
+        entries = entries + caps[MIMEtype]
+    MIMEtypes = MIMEtype.split('/')
+    MIMEtype = MIMEtypes[0] + '/*'
+    if MIMEtype in caps:
+        entries = entries + caps[MIMEtype]
+    if key is not None:
+        entries = filter(lambda e, key=key: key in e, entries)
+    return entries
+
+def subst(field, MIMEtype, filename, plist=[]):
+    # XXX Actually, this is Unix-specific
+    res = ''
+    i, n = 0, len(field)
+    while i < n:
+        c = field[i]; i = i+1
+        if c != '%':
+            if c == '\\':
+                c = field[i:i+1]; i = i+1
+            res = res + c
+        else:
+            c = field[i]; i = i+1
+            if c == '%':
+                res = res + c
+            elif c == 's':
+                res = res + filename
+            elif c == 't':
+                res = res + MIMEtype
+            elif c == '{':
+                start = i
+                while i < n and field[i] != '}':
+                    i = i+1
+                name = field[start:i]
+                i = i+1
+                res = res + findparam(name, plist)
+            # XXX To do:
+            # %n == number of parts if type is multipart/*
+            # %F == list of alternating type and filename for parts
+            else:
+                res = res + '%' + c
+    return res
+
+def findparam(name, plist):
+    name = name.lower() + '='
+    n = len(name)
+    for p in plist:
+        if p[:n].lower() == name:
+            return p[n:]
+    return ''
+
+
+# Part 4: test program.
+
+def test():
+    import sys
+    caps = getcaps()
+    if not sys.argv[1:]:
+        show(caps)
+        return
+    for i in range(1, len(sys.argv), 2):
+        args = sys.argv[i:i+2]
+        if len(args) < 2:
+            print "usage: mailcap [MIMEtype file] ..."
+            return
+        MIMEtype = args[0]
+        file = args[1]
+        command, e = findmatch(caps, MIMEtype, 'view', file)
+        if not command:
+            print "No viewer found for", type
+        else:
+            print "Executing:", command
+            sts = os.system(command)
+            if sts:
+                print "Exit status:", sts
+
+def show(caps):
+    print "Mailcap files:"
+    for fn in listmailcapfiles(): print "\t" + fn
+    print
+    if not caps: caps = getcaps()
+    print "Mailcap entries:"
+    print
+    ckeys = caps.keys()
+    ckeys.sort()
+    for type in ckeys:
+        print type
+        entries = caps[type]
+        for e in entries:
+            keys = e.keys()
+            keys.sort()
+            for k in keys:
+                print "  %-15s" % k, e[k]
+            print
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/markupbase.py b/depot_tools/release/win/python_24/Lib/markupbase.py
new file mode 100644
index 0000000..b8d8d5d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/markupbase.py
@@ -0,0 +1,383 @@
+"""Shared support for scanning document type declarations in HTML and XHTML."""
+
+import re
+
+_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
+_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
+_commentclose = re.compile(r'--\s*>')
+_markedsectionclose = re.compile(r']\s*]\s*>')
+
+# An analysis of the MS-Word extensions is available at
+# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
+
+_msmarkedsectionclose = re.compile(r']\s*>')
+
+del re
+
+
+class ParserBase:
+    """Parser base class which provides some common support methods used
+    by the SGML/HTML and XHTML parsers."""
+
+    def __init__(self):
+        if self.__class__ is ParserBase:
+            raise RuntimeError(
+                "markupbase.ParserBase must be subclassed")
+
+    def error(self, message):
+        raise NotImplementedError(
+            "subclasses of ParserBase must override error()")
+
+    def reset(self):
+        self.lineno = 1
+        self.offset = 0
+
+    def getpos(self):
+        """Return current line number and offset."""
+        return self.lineno, self.offset
+
+    # Internal -- update line number and offset.  This should be
+    # called for each piece of data exactly once, in order -- in other
+    # words the concatenation of all the input strings to this
+    # function should be exactly the entire input.
+    def updatepos(self, i, j):
+        if i >= j:
+            return j
+        rawdata = self.rawdata
+        nlines = rawdata.count("\n", i, j)
+        if nlines:
+            self.lineno = self.lineno + nlines
+            pos = rawdata.rindex("\n", i, j) # Should not fail
+            self.offset = j-(pos+1)
+        else:
+            self.offset = self.offset + j-i
+        return j
+
+    _decl_otherchars = ''
+
+    # Internal -- parse declaration (for use by subclasses).
+    def parse_declaration(self, i):
+        # This is some sort of declaration; in "HTML as
+        # deployed," this should only be the document type
+        # declaration ("<!DOCTYPE html...>").
+        # ISO 8879:1986, however, has more complex
+        # declaration syntax for elements in <!...>, including:
+        # --comment--
+        # [marked section]
+        # name in the following list: ENTITY, DOCTYPE, ELEMENT,
+        # ATTLIST, NOTATION, SHORTREF, USEMAP,
+        # LINKTYPE, LINK, IDLINK, USELINK, SYSTEM
+        rawdata = self.rawdata
+        j = i + 2
+        assert rawdata[i:j] == "<!", "unexpected call to parse_declaration"
+        if rawdata[j:j+1] in ("-", ""):
+            # Start of comment followed by buffer boundary,
+            # or just a buffer boundary.
+            return -1
+        # A simple, practical version could look like: ((name|stringlit) S*) + '>'
+        n = len(rawdata)
+        if rawdata[j:j+1] == '--': #comment
+            # Locate --.*-- as the body of the comment
+            return self.parse_comment(i)
+        elif rawdata[j] == '[': #marked section
+            # Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
+            # Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
+            # Note that this is extended by Microsoft Office "Save as Web" function
+            # to include [if...] and [endif].
+            return self.parse_marked_section(i)
+        else: #all other declaration elements
+            decltype, j = self._scan_name(j, i)
+        if j < 0:
+            return j
+        if decltype == "doctype":
+            self._decl_otherchars = ''
+        while j < n:
+            c = rawdata[j]
+            if c == ">":
+                # end of declaration syntax
+                data = rawdata[i+2:j]
+                if decltype == "doctype":
+                    self.handle_decl(data)
+                else:
+                    self.unknown_decl(data)
+                return j + 1
+            if c in "\"'":
+                m = _declstringlit_match(rawdata, j)
+                if not m:
+                    return -1 # incomplete
+                j = m.end()
+            elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
+                name, j = self._scan_name(j, i)
+            elif c in self._decl_otherchars:
+                j = j + 1
+            elif c == "[":
+                # this could be handled in a separate doctype parser
+                if decltype == "doctype":
+                    j = self._parse_doctype_subset(j + 1, i)
+                elif decltype in ("attlist", "linktype", "link", "element"):
+                    # must tolerate []'d groups in a content model in an element declaration
+                    # also in data attribute specifications of attlist declaration
+                    # also link type declaration subsets in linktype declarations
+                    # also link attribute specification lists in link declarations
+                    self.error("unsupported '[' char in %s declaration" % decltype)
+                else:
+                    self.error("unexpected '[' char in declaration")
+            else:
+                self.error(
+                    "unexpected %r char in declaration" % rawdata[j])
+            if j < 0:
+                return j
+        return -1 # incomplete
+
+    # Internal -- parse a marked section
+    # Override this to handle MS-word extension syntax <![if word]>content<![endif]>
+    def parse_marked_section( self, i, report=1 ):
+        rawdata= self.rawdata
+        assert rawdata[i:i+3] == '<![', "unexpected call to parse_marked_section()"
+        sectName, j = self._scan_name( i+3, i )
+        if j < 0:
+            return j
+        if sectName in ("temp", "cdata", "ignore", "include", "rcdata"):
+            # look for standard ]]> ending
+            match= _markedsectionclose.search(rawdata, i+3)
+        elif sectName in ("if", "else", "endif"):
+            # look for MS Office ]> ending
+            match= _msmarkedsectionclose.search(rawdata, i+3)
+        else:
+            self.error('unknown status keyword %r in marked section' % rawdata[i+3:j])
+        if not match:
+            return -1
+        if report:
+            j = match.start(0)
+            self.unknown_decl(rawdata[i+3: j])
+        return match.end(0)
+
+    # Internal -- parse comment, return length or -1 if not terminated
+    def parse_comment(self, i, report=1):
+        rawdata = self.rawdata
+        if rawdata[i:i+4] != '<!--':
+            self.error('unexpected call to parse_comment()')
+        match = _commentclose.search(rawdata, i+4)
+        if not match:
+            return -1
+        if report:
+            j = match.start(0)
+            self.handle_comment(rawdata[i+4: j])
+        return match.end(0)
+
+    # Internal -- scan past the internal subset in a <!DOCTYPE declaration,
+    # returning the index just past any whitespace following the trailing ']'.
+    def _parse_doctype_subset(self, i, declstartpos):
+        rawdata = self.rawdata
+        n = len(rawdata)
+        j = i
+        while j < n:
+            c = rawdata[j]
+            if c == "<":
+                s = rawdata[j:j+2]
+                if s == "<":
+                    # end of buffer; incomplete
+                    return -1
+                if s != "<!":
+                    self.updatepos(declstartpos, j + 1)
+                    self.error("unexpected char in internal subset (in %r)" % s)
+                if (j + 2) == n:
+                    # end of buffer; incomplete
+                    return -1
+                if (j + 4) > n:
+                    # end of buffer; incomplete
+                    return -1
+                if rawdata[j:j+4] == "<!--":
+                    j = self.parse_comment(j, report=0)
+                    if j < 0:
+                        return j
+                    continue
+                name, j = self._scan_name(j + 2, declstartpos)
+                if j == -1:
+                    return -1
+                if name not in ("attlist", "element", "entity", "notation"):
+                    self.updatepos(declstartpos, j + 2)
+                    self.error(
+                        "unknown declaration %r in internal subset" % name)
+                # handle the individual names
+                meth = getattr(self, "_parse_doctype_" + name)
+                j = meth(j, declstartpos)
+                if j < 0:
+                    return j
+            elif c == "%":
+                # parameter entity reference
+                if (j + 1) == n:
+                    # end of buffer; incomplete
+                    return -1
+                s, j = self._scan_name(j + 1, declstartpos)
+                if j < 0:
+                    return j
+                if rawdata[j] == ";":
+                    j = j + 1
+            elif c == "]":
+                j = j + 1
+                while j < n and rawdata[j].isspace():
+                    j = j + 1
+                if j < n:
+                    if rawdata[j] == ">":
+                        return j
+                    self.updatepos(declstartpos, j)
+                    self.error("unexpected char after internal subset")
+                else:
+                    return -1
+            elif c.isspace():
+                j = j + 1
+            else:
+                self.updatepos(declstartpos, j)
+                self.error("unexpected char %r in internal subset" % c)
+        # end of buffer reached
+        return -1
+
+    # Internal -- scan past <!ELEMENT declarations
+    def _parse_doctype_element(self, i, declstartpos):
+        name, j = self._scan_name(i, declstartpos)
+        if j == -1:
+            return -1
+        # style content model; just skip until '>'
+        rawdata = self.rawdata
+        if '>' in rawdata[j:]:
+            return rawdata.find(">", j) + 1
+        return -1
+
+    # Internal -- scan past <!ATTLIST declarations
+    def _parse_doctype_attlist(self, i, declstartpos):
+        rawdata = self.rawdata
+        name, j = self._scan_name(i, declstartpos)
+        c = rawdata[j:j+1]
+        if c == "":
+            return -1
+        if c == ">":
+            return j + 1
+        while 1:
+            # scan a series of attribute descriptions; simplified:
+            #   name type [value] [#constraint]
+            name, j = self._scan_name(j, declstartpos)
+            if j < 0:
+                return j
+            c = rawdata[j:j+1]
+            if c == "":
+                return -1
+            if c == "(":
+                # an enumerated type; look for ')'
+                if ")" in rawdata[j:]:
+                    j = rawdata.find(")", j) + 1
+                else:
+                    return -1
+                while rawdata[j:j+1].isspace():
+                    j = j + 1
+                if not rawdata[j:]:
+                    # end of buffer, incomplete
+                    return -1
+            else:
+                name, j = self._scan_name(j, declstartpos)
+            c = rawdata[j:j+1]
+            if not c:
+                return -1
+            if c in "'\"":
+                m = _declstringlit_match(rawdata, j)
+                if m:
+                    j = m.end()
+                else:
+                    return -1
+                c = rawdata[j:j+1]
+                if not c:
+                    return -1
+            if c == "#":
+                if rawdata[j:] == "#":
+                    # end of buffer
+                    return -1
+                name, j = self._scan_name(j + 1, declstartpos)
+                if j < 0:
+                    return j
+                c = rawdata[j:j+1]
+                if not c:
+                    return -1
+            if c == '>':
+                # all done
+                return j + 1
+
+    # Internal -- scan past <!NOTATION declarations
+    def _parse_doctype_notation(self, i, declstartpos):
+        name, j = self._scan_name(i, declstartpos)
+        if j < 0:
+            return j
+        rawdata = self.rawdata
+        while 1:
+            c = rawdata[j:j+1]
+            if not c:
+                # end of buffer; incomplete
+                return -1
+            if c == '>':
+                return j + 1
+            if c in "'\"":
+                m = _declstringlit_match(rawdata, j)
+                if not m:
+                    return -1
+                j = m.end()
+            else:
+                name, j = self._scan_name(j, declstartpos)
+                if j < 0:
+                    return j
+
+    # Internal -- scan past <!ENTITY declarations
+    def _parse_doctype_entity(self, i, declstartpos):
+        rawdata = self.rawdata
+        if rawdata[i:i+1] == "%":
+            j = i + 1
+            while 1:
+                c = rawdata[j:j+1]
+                if not c:
+                    return -1
+                if c.isspace():
+                    j = j + 1
+                else:
+                    break
+        else:
+            j = i
+        name, j = self._scan_name(j, declstartpos)
+        if j < 0:
+            return j
+        while 1:
+            c = self.rawdata[j:j+1]
+            if not c:
+                return -1
+            if c in "'\"":
+                m = _declstringlit_match(rawdata, j)
+                if m:
+                    j = m.end()
+                else:
+                    return -1    # incomplete
+            elif c == ">":
+                return j + 1
+            else:
+                name, j = self._scan_name(j, declstartpos)
+                if j < 0:
+                    return j
+
+    # Internal -- scan a name token and the new position and the token, or
+    # return -1 if we've reached the end of the buffer.
+    def _scan_name(self, i, declstartpos):
+        rawdata = self.rawdata
+        n = len(rawdata)
+        if i == n:
+            return None, -1
+        m = _declname_match(rawdata, i)
+        if m:
+            s = m.group()
+            name = s.strip()
+            if (i + len(s)) == n:
+                return None, -1  # end of buffer
+            return name.lower(), m.end()
+        else:
+            self.updatepos(declstartpos, i)
+            self.error("expected name token at %r"
+                       % rawdata[declstartpos:declstartpos+20])
+
+    # To be overridden -- handlers for unknown objects
+    def unknown_decl(self, data):
+        pass
diff --git a/depot_tools/release/win/python_24/Lib/mhlib.py b/depot_tools/release/win/python_24/Lib/mhlib.py
new file mode 100644
index 0000000..0a8c444
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/mhlib.py
@@ -0,0 +1,998 @@
+"""MH interface -- purely object-oriented (well, almost)
+
+Executive summary:
+
+import mhlib
+
+mh = mhlib.MH()         # use default mailbox directory and profile
+mh = mhlib.MH(mailbox)  # override mailbox location (default from profile)
+mh = mhlib.MH(mailbox, profile) # override mailbox and profile
+
+mh.error(format, ...)   # print error message -- can be overridden
+s = mh.getprofile(key)  # profile entry (None if not set)
+path = mh.getpath()     # mailbox pathname
+name = mh.getcontext()  # name of current folder
+mh.setcontext(name)     # set name of current folder
+
+list = mh.listfolders() # names of top-level folders
+list = mh.listallfolders() # names of all folders, including subfolders
+list = mh.listsubfolders(name) # direct subfolders of given folder
+list = mh.listallsubfolders(name) # all subfolders of given folder
+
+mh.makefolder(name)     # create new folder
+mh.deletefolder(name)   # delete folder -- must have no subfolders
+
+f = mh.openfolder(name) # new open folder object
+
+f.error(format, ...)    # same as mh.error(format, ...)
+path = f.getfullname()  # folder's full pathname
+path = f.getsequencesfilename() # full pathname of folder's sequences file
+path = f.getmessagefilename(n)  # full pathname of message n in folder
+
+list = f.listmessages() # list of messages in folder (as numbers)
+n = f.getcurrent()      # get current message
+f.setcurrent(n)         # set current message
+list = f.parsesequence(seq)     # parse msgs syntax into list of messages
+n = f.getlast()         # get last message (0 if no messagse)
+f.setlast(n)            # set last message (internal use only)
+
+dict = f.getsequences() # dictionary of sequences in folder {name: list}
+f.putsequences(dict)    # write sequences back to folder
+
+f.createmessage(n, fp)  # add message from file f as number n
+f.removemessages(list)  # remove messages in list from folder
+f.refilemessages(list, tofolder) # move messages in list to other folder
+f.movemessage(n, tofolder, ton)  # move one message to a given destination
+f.copymessage(n, tofolder, ton)  # copy one message to a given destination
+
+m = f.openmessage(n)    # new open message object (costs a file descriptor)
+m is a derived class of mimetools.Message(rfc822.Message), with:
+s = m.getheadertext()   # text of message's headers
+s = m.getheadertext(pred) # text of message's headers, filtered by pred
+s = m.getbodytext()     # text of message's body, decoded
+s = m.getbodytext(0)    # text of message's body, not decoded
+"""
+
+# XXX To do, functionality:
+# - annotate messages
+# - send messages
+#
+# XXX To do, organization:
+# - move IntSet to separate file
+# - move most Message functionality to module mimetools
+
+
+# Customizable defaults
+
+MH_PROFILE = '~/.mh_profile'
+PATH = '~/Mail'
+MH_SEQUENCES = '.mh_sequences'
+FOLDER_PROTECT = 0700
+
+
+# Imported modules
+
+import os
+import sys
+import re
+import mimetools
+import multifile
+import shutil
+from bisect import bisect
+
+__all__ = ["MH","Error","Folder","Message"]
+
+# Exported constants
+
+class Error(Exception):
+    pass
+
+
+class MH:
+    """Class representing a particular collection of folders.
+    Optional constructor arguments are the pathname for the directory
+    containing the collection, and the MH profile to use.
+    If either is omitted or empty a default is used; the default
+    directory is taken from the MH profile if it is specified there."""
+
+    def __init__(self, path = None, profile = None):
+        """Constructor."""
+        if profile is None: profile = MH_PROFILE
+        self.profile = os.path.expanduser(profile)
+        if path is None: path = self.getprofile('Path')
+        if not path: path = PATH
+        if not os.path.isabs(path) and path[0] != '~':
+            path = os.path.join('~', path)
+        path = os.path.expanduser(path)
+        if not os.path.isdir(path): raise Error, 'MH() path not found'
+        self.path = path
+
+    def __repr__(self):
+        """String representation."""
+        return 'MH(%r, %r)' % (self.path, self.profile)
+
+    def error(self, msg, *args):
+        """Routine to print an error.  May be overridden by a derived class."""
+        sys.stderr.write('MH error: %s\n' % (msg % args))
+
+    def getprofile(self, key):
+        """Return a profile entry, None if not found."""
+        return pickline(self.profile, key)
+
+    def getpath(self):
+        """Return the path (the name of the collection's directory)."""
+        return self.path
+
+    def getcontext(self):
+        """Return the name of the current folder."""
+        context = pickline(os.path.join(self.getpath(), 'context'),
+                  'Current-Folder')
+        if not context: context = 'inbox'
+        return context
+
+    def setcontext(self, context):
+        """Set the name of the current folder."""
+        fn = os.path.join(self.getpath(), 'context')
+        f = open(fn, "w")
+        f.write("Current-Folder: %s\n" % context)
+        f.close()
+
+    def listfolders(self):
+        """Return the names of the top-level folders."""
+        folders = []
+        path = self.getpath()
+        for name in os.listdir(path):
+            fullname = os.path.join(path, name)
+            if os.path.isdir(fullname):
+                folders.append(name)
+        folders.sort()
+        return folders
+
+    def listsubfolders(self, name):
+        """Return the names of the subfolders in a given folder
+        (prefixed with the given folder name)."""
+        fullname = os.path.join(self.path, name)
+        # Get the link count so we can avoid listing folders
+        # that have no subfolders.
+        nlinks = os.stat(fullname).st_nlink
+        if nlinks <= 2:
+            return []
+        subfolders = []
+        subnames = os.listdir(fullname)
+        for subname in subnames:
+            fullsubname = os.path.join(fullname, subname)
+            if os.path.isdir(fullsubname):
+                name_subname = os.path.join(name, subname)
+                subfolders.append(name_subname)
+                # Stop looking for subfolders when
+                # we've seen them all
+                nlinks = nlinks - 1
+                if nlinks <= 2:
+                    break
+        subfolders.sort()
+        return subfolders
+
+    def listallfolders(self):
+        """Return the names of all folders and subfolders, recursively."""
+        return self.listallsubfolders('')
+
+    def listallsubfolders(self, name):
+        """Return the names of subfolders in a given folder, recursively."""
+        fullname = os.path.join(self.path, name)
+        # Get the link count so we can avoid listing folders
+        # that have no subfolders.
+        nlinks = os.stat(fullname).st_nlink
+        if nlinks <= 2:
+            return []
+        subfolders = []
+        subnames = os.listdir(fullname)
+        for subname in subnames:
+            if subname[0] == ',' or isnumeric(subname): continue
+            fullsubname = os.path.join(fullname, subname)
+            if os.path.isdir(fullsubname):
+                name_subname = os.path.join(name, subname)
+                subfolders.append(name_subname)
+                if not os.path.islink(fullsubname):
+                    subsubfolders = self.listallsubfolders(
+                              name_subname)
+                    subfolders = subfolders + subsubfolders
+                # Stop looking for subfolders when
+                # we've seen them all
+                nlinks = nlinks - 1
+                if nlinks <= 2:
+                    break
+        subfolders.sort()
+        return subfolders
+
+    def openfolder(self, name):
+        """Return a new Folder object for the named folder."""
+        return Folder(self, name)
+
+    def makefolder(self, name):
+        """Create a new folder (or raise os.error if it cannot be created)."""
+        protect = pickline(self.profile, 'Folder-Protect')
+        if protect and isnumeric(protect):
+            mode = int(protect, 8)
+        else:
+            mode = FOLDER_PROTECT
+        os.mkdir(os.path.join(self.getpath(), name), mode)
+
+    def deletefolder(self, name):
+        """Delete a folder.  This removes files in the folder but not
+        subdirectories.  Raise os.error if deleting the folder itself fails."""
+        fullname = os.path.join(self.getpath(), name)
+        for subname in os.listdir(fullname):
+            fullsubname = os.path.join(fullname, subname)
+            try:
+                os.unlink(fullsubname)
+            except os.error:
+                self.error('%s not deleted, continuing...' %
+                          fullsubname)
+        os.rmdir(fullname)
+
+
+numericprog = re.compile('^[1-9][0-9]*$')
+def isnumeric(str):
+    return numericprog.match(str) is not None
+
+class Folder:
+    """Class representing a particular folder."""
+
+    def __init__(self, mh, name):
+        """Constructor."""
+        self.mh = mh
+        self.name = name
+        if not os.path.isdir(self.getfullname()):
+            raise Error, 'no folder %s' % name
+
+    def __repr__(self):
+        """String representation."""
+        return 'Folder(%r, %r)' % (self.mh, self.name)
+
+    def error(self, *args):
+        """Error message handler."""
+        self.mh.error(*args)
+
+    def getfullname(self):
+        """Return the full pathname of the folder."""
+        return os.path.join(self.mh.path, self.name)
+
+    def getsequencesfilename(self):
+        """Return the full pathname of the folder's sequences file."""
+        return os.path.join(self.getfullname(), MH_SEQUENCES)
+
+    def getmessagefilename(self, n):
+        """Return the full pathname of a message in the folder."""
+        return os.path.join(self.getfullname(), str(n))
+
+    def listsubfolders(self):
+        """Return list of direct subfolders."""
+        return self.mh.listsubfolders(self.name)
+
+    def listallsubfolders(self):
+        """Return list of all subfolders."""
+        return self.mh.listallsubfolders(self.name)
+
+    def listmessages(self):
+        """Return the list of messages currently present in the folder.
+        As a side effect, set self.last to the last message (or 0)."""
+        messages = []
+        match = numericprog.match
+        append = messages.append
+        for name in os.listdir(self.getfullname()):
+            if match(name):
+                append(name)
+        messages = map(int, messages)
+        messages.sort()
+        if messages:
+            self.last = messages[-1]
+        else:
+            self.last = 0
+        return messages
+
+    def getsequences(self):
+        """Return the set of sequences for the folder."""
+        sequences = {}
+        fullname = self.getsequencesfilename()
+        try:
+            f = open(fullname, 'r')
+        except IOError:
+            return sequences
+        while 1:
+            line = f.readline()
+            if not line: break
+            fields = line.split(':')
+            if len(fields) != 2:
+                self.error('bad sequence in %s: %s' %
+                          (fullname, line.strip()))
+            key = fields[0].strip()
+            value = IntSet(fields[1].strip(), ' ').tolist()
+            sequences[key] = value
+        return sequences
+
+    def putsequences(self, sequences):
+        """Write the set of sequences back to the folder."""
+        fullname = self.getsequencesfilename()
+        f = None
+        for key, seq in sequences.iteritems():
+            s = IntSet('', ' ')
+            s.fromlist(seq)
+            if not f: f = open(fullname, 'w')
+            f.write('%s: %s\n' % (key, s.tostring()))
+        if not f:
+            try:
+                os.unlink(fullname)
+            except os.error:
+                pass
+        else:
+            f.close()
+
+    def getcurrent(self):
+        """Return the current message.  Raise Error when there is none."""
+        seqs = self.getsequences()
+        try:
+            return max(seqs['cur'])
+        except (ValueError, KeyError):
+            raise Error, "no cur message"
+
+    def setcurrent(self, n):
+        """Set the current message."""
+        updateline(self.getsequencesfilename(), 'cur', str(n), 0)
+
+    def parsesequence(self, seq):
+        """Parse an MH sequence specification into a message list.
+        Attempt to mimic mh-sequence(5) as close as possible.
+        Also attempt to mimic observed behavior regarding which
+        conditions cause which error messages."""
+        # XXX Still not complete (see mh-format(5)).
+        # Missing are:
+        # - 'prev', 'next' as count
+        # - Sequence-Negation option
+        all = self.listmessages()
+        # Observed behavior: test for empty folder is done first
+        if not all:
+            raise Error, "no messages in %s" % self.name
+        # Common case first: all is frequently the default
+        if seq == 'all':
+            return all
+        # Test for X:Y before X-Y because 'seq:-n' matches both
+        i = seq.find(':')
+        if i >= 0:
+            head, dir, tail = seq[:i], '', seq[i+1:]
+            if tail[:1] in '-+':
+                dir, tail = tail[:1], tail[1:]
+            if not isnumeric(tail):
+                raise Error, "bad message list %s" % seq
+            try:
+                count = int(tail)
+            except (ValueError, OverflowError):
+                # Can't use sys.maxint because of i+count below
+                count = len(all)
+            try:
+                anchor = self._parseindex(head, all)
+            except Error, msg:
+                seqs = self.getsequences()
+                if not head in seqs:
+                    if not msg:
+                        msg = "bad message list %s" % seq
+                    raise Error, msg, sys.exc_info()[2]
+                msgs = seqs[head]
+                if not msgs:
+                    raise Error, "sequence %s empty" % head
+                if dir == '-':
+                    return msgs[-count:]
+                else:
+                    return msgs[:count]
+            else:
+                if not dir:
+                    if head in ('prev', 'last'):
+                        dir = '-'
+                if dir == '-':
+                    i = bisect(all, anchor)
+                    return all[max(0, i-count):i]
+                else:
+                    i = bisect(all, anchor-1)
+                    return all[i:i+count]
+        # Test for X-Y next
+        i = seq.find('-')
+        if i >= 0:
+            begin = self._parseindex(seq[:i], all)
+            end = self._parseindex(seq[i+1:], all)
+            i = bisect(all, begin-1)
+            j = bisect(all, end)
+            r = all[i:j]
+            if not r:
+                raise Error, "bad message list %s" % seq
+            return r
+        # Neither X:Y nor X-Y; must be a number or a (pseudo-)sequence
+        try:
+            n = self._parseindex(seq, all)
+        except Error, msg:
+            seqs = self.getsequences()
+            if not seq in seqs:
+                if not msg:
+                    msg = "bad message list %s" % seq
+                raise Error, msg
+            return seqs[seq]
+        else:
+            if n not in all:
+                if isnumeric(seq):
+                    raise Error, "message %d doesn't exist" % n
+                else:
+                    raise Error, "no %s message" % seq
+            else:
+                return [n]
+
+    def _parseindex(self, seq, all):
+        """Internal: parse a message number (or cur, first, etc.)."""
+        if isnumeric(seq):
+            try:
+                return int(seq)
+            except (OverflowError, ValueError):
+                return sys.maxint
+        if seq in ('cur', '.'):
+            return self.getcurrent()
+        if seq == 'first':
+            return all[0]
+        if seq == 'last':
+            return all[-1]
+        if seq == 'next':
+            n = self.getcurrent()
+            i = bisect(all, n)
+            try:
+                return all[i]
+            except IndexError:
+                raise Error, "no next message"
+        if seq == 'prev':
+            n = self.getcurrent()
+            i = bisect(all, n-1)
+            if i == 0:
+                raise Error, "no prev message"
+            try:
+                return all[i-1]
+            except IndexError:
+                raise Error, "no prev message"
+        raise Error, None
+
+    def openmessage(self, n):
+        """Open a message -- returns a Message object."""
+        return Message(self, n)
+
+    def removemessages(self, list):
+        """Remove one or more messages -- may raise os.error."""
+        errors = []
+        deleted = []
+        for n in list:
+            path = self.getmessagefilename(n)
+            commapath = self.getmessagefilename(',' + str(n))
+            try:
+                os.unlink(commapath)
+            except os.error:
+                pass
+            try:
+                os.rename(path, commapath)
+            except os.error, msg:
+                errors.append(msg)
+            else:
+                deleted.append(n)
+        if deleted:
+            self.removefromallsequences(deleted)
+        if errors:
+            if len(errors) == 1:
+                raise os.error, errors[0]
+            else:
+                raise os.error, ('multiple errors:', errors)
+
+    def refilemessages(self, list, tofolder, keepsequences=0):
+        """Refile one or more messages -- may raise os.error.
+        'tofolder' is an open folder object."""
+        errors = []
+        refiled = {}
+        for n in list:
+            ton = tofolder.getlast() + 1
+            path = self.getmessagefilename(n)
+            topath = tofolder.getmessagefilename(ton)
+            try:
+                os.rename(path, topath)
+            except os.error:
+                # Try copying
+                try:
+                    shutil.copy2(path, topath)
+                    os.unlink(path)
+                except (IOError, os.error), msg:
+                    errors.append(msg)
+                    try:
+                        os.unlink(topath)
+                    except os.error:
+                        pass
+                    continue
+            tofolder.setlast(ton)
+            refiled[n] = ton
+        if refiled:
+            if keepsequences:
+                tofolder._copysequences(self, refiled.items())
+            self.removefromallsequences(refiled.keys())
+        if errors:
+            if len(errors) == 1:
+                raise os.error, errors[0]
+            else:
+                raise os.error, ('multiple errors:', errors)
+
+    def _copysequences(self, fromfolder, refileditems):
+        """Helper for refilemessages() to copy sequences."""
+        fromsequences = fromfolder.getsequences()
+        tosequences = self.getsequences()
+        changed = 0
+        for name, seq in fromsequences.items():
+            try:
+                toseq = tosequences[name]
+                new = 0
+            except KeyError:
+                toseq = []
+                new = 1
+            for fromn, ton in refileditems:
+                if fromn in seq:
+                    toseq.append(ton)
+                    changed = 1
+            if new and toseq:
+                tosequences[name] = toseq
+        if changed:
+            self.putsequences(tosequences)
+
+    def movemessage(self, n, tofolder, ton):
+        """Move one message over a specific destination message,
+        which may or may not already exist."""
+        path = self.getmessagefilename(n)
+        # Open it to check that it exists
+        f = open(path)
+        f.close()
+        del f
+        topath = tofolder.getmessagefilename(ton)
+        backuptopath = tofolder.getmessagefilename(',%d' % ton)
+        try:
+            os.rename(topath, backuptopath)
+        except os.error:
+            pass
+        try:
+            os.rename(path, topath)
+        except os.error:
+            # Try copying
+            ok = 0
+            try:
+                tofolder.setlast(None)
+                shutil.copy2(path, topath)
+                ok = 1
+            finally:
+                if not ok:
+                    try:
+                        os.unlink(topath)
+                    except os.error:
+                        pass
+            os.unlink(path)
+        self.removefromallsequences([n])
+
+    def copymessage(self, n, tofolder, ton):
+        """Copy one message over a specific destination message,
+        which may or may not already exist."""
+        path = self.getmessagefilename(n)
+        # Open it to check that it exists
+        f = open(path)
+        f.close()
+        del f
+        topath = tofolder.getmessagefilename(ton)
+        backuptopath = tofolder.getmessagefilename(',%d' % ton)
+        try:
+            os.rename(topath, backuptopath)
+        except os.error:
+            pass
+        ok = 0
+        try:
+            tofolder.setlast(None)
+            shutil.copy2(path, topath)
+            ok = 1
+        finally:
+            if not ok:
+                try:
+                    os.unlink(topath)
+                except os.error:
+                    pass
+
+    def createmessage(self, n, txt):
+        """Create a message, with text from the open file txt."""
+        path = self.getmessagefilename(n)
+        backuppath = self.getmessagefilename(',%d' % n)
+        try:
+            os.rename(path, backuppath)
+        except os.error:
+            pass
+        ok = 0
+        BUFSIZE = 16*1024
+        try:
+            f = open(path, "w")
+            while 1:
+                buf = txt.read(BUFSIZE)
+                if not buf:
+                    break
+                f.write(buf)
+            f.close()
+            ok = 1
+        finally:
+            if not ok:
+                try:
+                    os.unlink(path)
+                except os.error:
+                    pass
+
+    def removefromallsequences(self, list):
+        """Remove one or more messages from all sequences (including last)
+        -- but not from 'cur'!!!"""
+        if hasattr(self, 'last') and self.last in list:
+            del self.last
+        sequences = self.getsequences()
+        changed = 0
+        for name, seq in sequences.items():
+            if name == 'cur':
+                continue
+            for n in list:
+                if n in seq:
+                    seq.remove(n)
+                    changed = 1
+                    if not seq:
+                        del sequences[name]
+        if changed:
+            self.putsequences(sequences)
+
+    def getlast(self):
+        """Return the last message number."""
+        if not hasattr(self, 'last'):
+            self.listmessages() # Set self.last
+        return self.last
+
+    def setlast(self, last):
+        """Set the last message number."""
+        if last is None:
+            if hasattr(self, 'last'):
+                del self.last
+        else:
+            self.last = last
+
+class Message(mimetools.Message):
+
+    def __init__(self, f, n, fp = None):
+        """Constructor."""
+        self.folder = f
+        self.number = n
+        if fp is None:
+            path = f.getmessagefilename(n)
+            fp = open(path, 'r')
+        mimetools.Message.__init__(self, fp)
+
+    def __repr__(self):
+        """String representation."""
+        return 'Message(%s, %s)' % (repr(self.folder), self.number)
+
+    def getheadertext(self, pred = None):
+        """Return the message's header text as a string.  If an
+        argument is specified, it is used as a filter predicate to
+        decide which headers to return (its argument is the header
+        name converted to lower case)."""
+        if pred is None:
+            return ''.join(self.headers)
+        headers = []
+        hit = 0
+        for line in self.headers:
+            if not line[0].isspace():
+                i = line.find(':')
+                if i > 0:
+                    hit = pred(line[:i].lower())
+            if hit: headers.append(line)
+        return ''.join(headers)
+
+    def getbodytext(self, decode = 1):
+        """Return the message's body text as string.  This undoes a
+        Content-Transfer-Encoding, but does not interpret other MIME
+        features (e.g. multipart messages).  To suppress decoding,
+        pass 0 as an argument."""
+        self.fp.seek(self.startofbody)
+        encoding = self.getencoding()
+        if not decode or encoding in ('', '7bit', '8bit', 'binary'):
+            return self.fp.read()
+        from StringIO import StringIO
+        output = StringIO()
+        mimetools.decode(self.fp, output, encoding)
+        return output.getvalue()
+
+    def getbodyparts(self):
+        """Only for multipart messages: return the message's body as a
+        list of SubMessage objects.  Each submessage object behaves
+        (almost) as a Message object."""
+        if self.getmaintype() != 'multipart':
+            raise Error, 'Content-Type is not multipart/*'
+        bdry = self.getparam('boundary')
+        if not bdry:
+            raise Error, 'multipart/* without boundary param'
+        self.fp.seek(self.startofbody)
+        mf = multifile.MultiFile(self.fp)
+        mf.push(bdry)
+        parts = []
+        while mf.next():
+            n = "%s.%r" % (self.number, 1 + len(parts))
+            part = SubMessage(self.folder, n, mf)
+            parts.append(part)
+        mf.pop()
+        return parts
+
+    def getbody(self):
+        """Return body, either a string or a list of messages."""
+        if self.getmaintype() == 'multipart':
+            return self.getbodyparts()
+        else:
+            return self.getbodytext()
+
+
+class SubMessage(Message):
+
+    def __init__(self, f, n, fp):
+        """Constructor."""
+        Message.__init__(self, f, n, fp)
+        if self.getmaintype() == 'multipart':
+            self.body = Message.getbodyparts(self)
+        else:
+            self.body = Message.getbodytext(self)
+        self.bodyencoded = Message.getbodytext(self, decode=0)
+            # XXX If this is big, should remember file pointers
+
+    def __repr__(self):
+        """String representation."""
+        f, n, fp = self.folder, self.number, self.fp
+        return 'SubMessage(%s, %s, %s)' % (f, n, fp)
+
+    def getbodytext(self, decode = 1):
+        if not decode:
+            return self.bodyencoded
+        if type(self.body) == type(''):
+            return self.body
+
+    def getbodyparts(self):
+        if type(self.body) == type([]):
+            return self.body
+
+    def getbody(self):
+        return self.body
+
+
+class IntSet:
+    """Class implementing sets of integers.
+
+    This is an efficient representation for sets consisting of several
+    continuous ranges, e.g. 1-100,200-400,402-1000 is represented
+    internally as a list of three pairs: [(1,100), (200,400),
+    (402,1000)].  The internal representation is always kept normalized.
+
+    The constructor has up to three arguments:
+    - the string used to initialize the set (default ''),
+    - the separator between ranges (default ',')
+    - the separator between begin and end of a range (default '-')
+    The separators must be strings (not regexprs) and should be different.
+
+    The tostring() function yields a string that can be passed to another
+    IntSet constructor; __repr__() is a valid IntSet constructor itself.
+    """
+
+    # XXX The default begin/end separator means that negative numbers are
+    #     not supported very well.
+    #
+    # XXX There are currently no operations to remove set elements.
+
+    def __init__(self, data = None, sep = ',', rng = '-'):
+        self.pairs = []
+        self.sep = sep
+        self.rng = rng
+        if data: self.fromstring(data)
+
+    def reset(self):
+        self.pairs = []
+
+    def __cmp__(self, other):
+        return cmp(self.pairs, other.pairs)
+
+    def __hash__(self):
+        return hash(self.pairs)
+
+    def __repr__(self):
+        return 'IntSet(%r, %r, %r)' % (self.tostring(), self.sep, self.rng)
+
+    def normalize(self):
+        self.pairs.sort()
+        i = 1
+        while i < len(self.pairs):
+            alo, ahi = self.pairs[i-1]
+            blo, bhi = self.pairs[i]
+            if ahi >= blo-1:
+                self.pairs[i-1:i+1] = [(alo, max(ahi, bhi))]
+            else:
+                i = i+1
+
+    def tostring(self):
+        s = ''
+        for lo, hi in self.pairs:
+            if lo == hi: t = repr(lo)
+            else: t = repr(lo) + self.rng + repr(hi)
+            if s: s = s + (self.sep + t)
+            else: s = t
+        return s
+
+    def tolist(self):
+        l = []
+        for lo, hi in self.pairs:
+            m = range(lo, hi+1)
+            l = l + m
+        return l
+
+    def fromlist(self, list):
+        for i in list:
+            self.append(i)
+
+    def clone(self):
+        new = IntSet()
+        new.pairs = self.pairs[:]
+        return new
+
+    def min(self):
+        return self.pairs[0][0]
+
+    def max(self):
+        return self.pairs[-1][-1]
+
+    def contains(self, x):
+        for lo, hi in self.pairs:
+            if lo <= x <= hi: return True
+        return False
+
+    def append(self, x):
+        for i in range(len(self.pairs)):
+            lo, hi = self.pairs[i]
+            if x < lo: # Need to insert before
+                if x+1 == lo:
+                    self.pairs[i] = (x, hi)
+                else:
+                    self.pairs.insert(i, (x, x))
+                if i > 0 and x-1 == self.pairs[i-1][1]:
+                    # Merge with previous
+                    self.pairs[i-1:i+1] = [
+                            (self.pairs[i-1][0],
+                             self.pairs[i][1])
+                          ]
+                return
+            if x <= hi: # Already in set
+                return
+        i = len(self.pairs) - 1
+        if i >= 0:
+            lo, hi = self.pairs[i]
+            if x-1 == hi:
+                self.pairs[i] = lo, x
+                return
+        self.pairs.append((x, x))
+
+    def addpair(self, xlo, xhi):
+        if xlo > xhi: return
+        self.pairs.append((xlo, xhi))
+        self.normalize()
+
+    def fromstring(self, data):
+        new = []
+        for part in data.split(self.sep):
+            list = []
+            for subp in part.split(self.rng):
+                s = subp.strip()
+                list.append(int(s))
+            if len(list) == 1:
+                new.append((list[0], list[0]))
+            elif len(list) == 2 and list[0] <= list[1]:
+                new.append((list[0], list[1]))
+            else:
+                raise ValueError, 'bad data passed to IntSet'
+        self.pairs = self.pairs + new
+        self.normalize()
+
+
+# Subroutines to read/write entries in .mh_profile and .mh_sequences
+
+def pickline(file, key, casefold = 1):
+    try:
+        f = open(file, 'r')
+    except IOError:
+        return None
+    pat = re.escape(key) + ':'
+    prog = re.compile(pat, casefold and re.IGNORECASE)
+    while 1:
+        line = f.readline()
+        if not line: break
+        if prog.match(line):
+            text = line[len(key)+1:]
+            while 1:
+                line = f.readline()
+                if not line or not line[0].isspace():
+                    break
+                text = text + line
+            return text.strip()
+    return None
+
+def updateline(file, key, value, casefold = 1):
+    try:
+        f = open(file, 'r')
+        lines = f.readlines()
+        f.close()
+    except IOError:
+        lines = []
+    pat = re.escape(key) + ':(.*)\n'
+    prog = re.compile(pat, casefold and re.IGNORECASE)
+    if value is None:
+        newline = None
+    else:
+        newline = '%s: %s\n' % (key, value)
+    for i in range(len(lines)):
+        line = lines[i]
+        if prog.match(line):
+            if newline is None:
+                del lines[i]
+            else:
+                lines[i] = newline
+            break
+    else:
+        if newline is not None:
+            lines.append(newline)
+    tempfile = file + "~"
+    f = open(tempfile, 'w')
+    for line in lines:
+        f.write(line)
+    f.close()
+    os.rename(tempfile, file)
+
+
+# Test program
+
+def test():
+    global mh, f
+    os.system('rm -rf $HOME/Mail/@test')
+    mh = MH()
+    def do(s): print s; print eval(s)
+    do('mh.listfolders()')
+    do('mh.listallfolders()')
+    testfolders = ['@test', '@test/test1', '@test/test2',
+                   '@test/test1/test11', '@test/test1/test12',
+                   '@test/test1/test11/test111']
+    for t in testfolders: do('mh.makefolder(%r)' % (t,))
+    do('mh.listsubfolders(\'@test\')')
+    do('mh.listallsubfolders(\'@test\')')
+    f = mh.openfolder('@test')
+    do('f.listsubfolders()')
+    do('f.listallsubfolders()')
+    do('f.getsequences()')
+    seqs = f.getsequences()
+    seqs['foo'] = IntSet('1-10 12-20', ' ').tolist()
+    print seqs
+    f.putsequences(seqs)
+    do('f.getsequences()')
+    for t in reversed(testfolders): do('mh.deletefolder(%r)' % (t,))
+    do('mh.getcontext()')
+    context = mh.getcontext()
+    f = mh.openfolder(context)
+    do('f.getcurrent()')
+    for seq in ['first', 'last', 'cur', '.', 'prev', 'next',
+                'first:3', 'last:3', 'cur:3', 'cur:-3',
+                'prev:3', 'next:3',
+                '1:3', '1:-3', '100:3', '100:-3', '10000:3', '10000:-3',
+                'all']:
+        try:
+            do('f.parsesequence(%r)' % (seq,))
+        except Error, msg:
+            print "Error:", msg
+        stuff = os.popen("pick %r 2>/dev/null" % (seq,)).read()
+        list = map(int, stuff.split())
+        print list, "<-- pick"
+    do('f.listmessages()')
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/mimetools.py b/depot_tools/release/win/python_24/Lib/mimetools.py
new file mode 100644
index 0000000..0b698ac
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/mimetools.py
@@ -0,0 +1,238 @@
+"""Various tools used by MIME-reading or MIME-writing programs."""
+
+
+import os
+import rfc822
+import tempfile
+
+__all__ = ["Message","choose_boundary","encode","decode","copyliteral",
+           "copybinary"]
+
+class Message(rfc822.Message):
+    """A derived class of rfc822.Message that knows about MIME headers and
+    contains some hooks for decoding encoded and multipart messages."""
+
+    def __init__(self, fp, seekable = 1):
+        rfc822.Message.__init__(self, fp, seekable)
+        self.encodingheader = \
+                self.getheader('content-transfer-encoding')
+        self.typeheader = \
+                self.getheader('content-type')
+        self.parsetype()
+        self.parseplist()
+
+    def parsetype(self):
+        str = self.typeheader
+        if str is None:
+            str = 'text/plain'
+        if ';' in str:
+            i = str.index(';')
+            self.plisttext = str[i:]
+            str = str[:i]
+        else:
+            self.plisttext = ''
+        fields = str.split('/')
+        for i in range(len(fields)):
+            fields[i] = fields[i].strip().lower()
+        self.type = '/'.join(fields)
+        self.maintype = fields[0]
+        self.subtype = '/'.join(fields[1:])
+
+    def parseplist(self):
+        str = self.plisttext
+        self.plist = []
+        while str[:1] == ';':
+            str = str[1:]
+            if ';' in str:
+                # XXX Should parse quotes!
+                end = str.index(';')
+            else:
+                end = len(str)
+            f = str[:end]
+            if '=' in f:
+                i = f.index('=')
+                f = f[:i].strip().lower() + \
+                        '=' + f[i+1:].strip()
+            self.plist.append(f.strip())
+            str = str[end:]
+
+    def getplist(self):
+        return self.plist
+
+    def getparam(self, name):
+        name = name.lower() + '='
+        n = len(name)
+        for p in self.plist:
+            if p[:n] == name:
+                return rfc822.unquote(p[n:])
+        return None
+
+    def getparamnames(self):
+        result = []
+        for p in self.plist:
+            i = p.find('=')
+            if i >= 0:
+                result.append(p[:i].lower())
+        return result
+
+    def getencoding(self):
+        if self.encodingheader is None:
+            return '7bit'
+        return self.encodingheader.lower()
+
+    def gettype(self):
+        return self.type
+
+    def getmaintype(self):
+        return self.maintype
+
+    def getsubtype(self):
+        return self.subtype
+
+
+
+
+# Utility functions
+# -----------------
+
+try:
+    import thread
+except ImportError:
+    import dummy_thread as thread
+_counter_lock = thread.allocate_lock()
+del thread
+
+_counter = 0
+def _get_next_counter():
+    global _counter
+    _counter_lock.acquire()
+    _counter += 1
+    result = _counter
+    _counter_lock.release()
+    return result
+
+_prefix = None
+
+def choose_boundary():
+    """Return a string usable as a multipart boundary.
+
+    The string chosen is unique within a single program run, and
+    incorporates the user id (if available), process id (if available),
+    and current time.  So it's very unlikely the returned string appears
+    in message text, but there's no guarantee.
+
+    The boundary contains dots so you have to quote it in the header."""
+
+    global _prefix
+    import time
+    if _prefix is None:
+        import socket
+        hostid = socket.gethostbyname(socket.gethostname())
+        try:
+            uid = repr(os.getuid())
+        except AttributeError:
+            uid = '1'
+        try:
+            pid = repr(os.getpid())
+        except AttributeError:
+            pid = '1'
+        _prefix = hostid + '.' + uid + '.' + pid
+    return "%s.%.3f.%d" % (_prefix, time.time(), _get_next_counter())
+
+
+# Subroutines for decoding some common content-transfer-types
+
+def decode(input, output, encoding):
+    """Decode common content-transfer-encodings (base64, quopri, uuencode)."""
+    if encoding == 'base64':
+        import base64
+        return base64.decode(input, output)
+    if encoding == 'quoted-printable':
+        import quopri
+        return quopri.decode(input, output)
+    if encoding in ('uuencode', 'x-uuencode', 'uue', 'x-uue'):
+        import uu
+        return uu.decode(input, output)
+    if encoding in ('7bit', '8bit'):
+        return output.write(input.read())
+    if encoding in decodetab:
+        pipethrough(input, decodetab[encoding], output)
+    else:
+        raise ValueError, \
+              'unknown Content-Transfer-Encoding: %s' % encoding
+
+def encode(input, output, encoding):
+    """Encode common content-transfer-encodings (base64, quopri, uuencode)."""
+    if encoding == 'base64':
+        import base64
+        return base64.encode(input, output)
+    if encoding == 'quoted-printable':
+        import quopri
+        return quopri.encode(input, output, 0)
+    if encoding in ('uuencode', 'x-uuencode', 'uue', 'x-uue'):
+        import uu
+        return uu.encode(input, output)
+    if encoding in ('7bit', '8bit'):
+        return output.write(input.read())
+    if encoding in encodetab:
+        pipethrough(input, encodetab[encoding], output)
+    else:
+        raise ValueError, \
+              'unknown Content-Transfer-Encoding: %s' % encoding
+
+# The following is no longer used for standard encodings
+
+# XXX This requires that uudecode and mmencode are in $PATH
+
+uudecode_pipe = '''(
+TEMP=/tmp/@uu.$$
+sed "s%^begin [0-7][0-7]* .*%begin 600 $TEMP%" | uudecode
+cat $TEMP
+rm $TEMP
+)'''
+
+decodetab = {
+        'uuencode':             uudecode_pipe,
+        'x-uuencode':           uudecode_pipe,
+        'uue':                  uudecode_pipe,
+        'x-uue':                uudecode_pipe,
+        'quoted-printable':     'mmencode -u -q',
+        'base64':               'mmencode -u -b',
+}
+
+encodetab = {
+        'x-uuencode':           'uuencode tempfile',
+        'uuencode':             'uuencode tempfile',
+        'x-uue':                'uuencode tempfile',
+        'uue':                  'uuencode tempfile',
+        'quoted-printable':     'mmencode -q',
+        'base64':               'mmencode -b',
+}
+
+def pipeto(input, command):
+    pipe = os.popen(command, 'w')
+    copyliteral(input, pipe)
+    pipe.close()
+
+def pipethrough(input, command, output):
+    (fd, tempname) = tempfile.mkstemp()
+    temp = os.fdopen(fd, 'w')
+    copyliteral(input, temp)
+    temp.close()
+    pipe = os.popen(command + ' <' + tempname, 'r')
+    copybinary(pipe, output)
+    pipe.close()
+    os.unlink(tempname)
+
+def copyliteral(input, output):
+    while 1:
+        line = input.readline()
+        if not line: break
+        output.write(line)
+
+def copybinary(input, output):
+    BUFSIZE = 8192
+    while 1:
+        line = input.read(BUFSIZE)
+        if not line: break
+        output.write(line)
diff --git a/depot_tools/release/win/python_24/Lib/mimetypes.py b/depot_tools/release/win/python_24/Lib/mimetypes.py
new file mode 100644
index 0000000..88b9e2f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/mimetypes.py
@@ -0,0 +1,518 @@
+"""Guess the MIME type of a file.
+
+This module defines two useful functions:
+
+guess_type(url, strict=1) -- guess the MIME type and encoding of a URL.
+
+guess_extension(type, strict=1) -- guess the extension for a given MIME type.
+
+It also contains the following, for tuning the behavior:
+
+Data:
+
+knownfiles -- list of files to parse
+inited -- flag set when init() has been called
+suffix_map -- dictionary mapping suffixes to suffixes
+encodings_map -- dictionary mapping suffixes to encodings
+types_map -- dictionary mapping suffixes to types
+
+Functions:
+
+init([files]) -- parse a list of files, default knownfiles
+read_mime_types(file) -- parse one file, return a dictionary or None
+"""
+
+import os
+import posixpath
+import urllib
+
+__all__ = [
+    "guess_type","guess_extension","guess_all_extensions",
+    "add_type","read_mime_types","init"
+]
+
+knownfiles = [
+    "/etc/mime.types",
+    "/usr/local/etc/httpd/conf/mime.types",
+    "/usr/local/lib/netscape/mime.types",
+    "/usr/local/etc/httpd/conf/mime.types",     # Apache 1.2
+    "/usr/local/etc/mime.types",                # Apache 1.3
+    ]
+
+inited = False
+
+
+class MimeTypes:
+    """MIME-types datastore.
+
+    This datastore can handle information from mime.types-style files
+    and supports basic determination of MIME type from a filename or
+    URL, and can guess a reasonable extension given a MIME type.
+    """
+
+    def __init__(self, filenames=(), strict=True):
+        if not inited:
+            init()
+        self.encodings_map = encodings_map.copy()
+        self.suffix_map = suffix_map.copy()
+        self.types_map = ({}, {}) # dict for (non-strict, strict)
+        self.types_map_inv = ({}, {})
+        for (ext, type) in types_map.items():
+            self.add_type(type, ext, True)
+        for (ext, type) in common_types.items():
+            self.add_type(type, ext, False)
+        for name in filenames:
+            self.read(name, strict)
+
+    def add_type(self, type, ext, strict=True):
+        """Add a mapping between a type and an extension.
+
+        When the extension is already known, the new
+        type will replace the old one. When the type
+        is already known the extension will be added
+        to the list of known extensions.
+
+        If strict is true, information will be added to
+        list of standard types, else to the list of non-standard
+        types.
+        """
+        self.types_map[strict][ext] = type
+        exts = self.types_map_inv[strict].setdefault(type, [])
+        if ext not in exts:
+            exts.append(ext)
+
+    def guess_type(self, url, strict=True):
+        """Guess the type of a file based on its URL.
+
+        Return value is a tuple (type, encoding) where type is None if
+        the type can't be guessed (no or unknown suffix) or a string
+        of the form type/subtype, usable for a MIME Content-type
+        header; and encoding is None for no encoding or the name of
+        the program used to encode (e.g. compress or gzip).  The
+        mappings are table driven.  Encoding suffixes are case
+        sensitive; type suffixes are first tried case sensitive, then
+        case insensitive.
+
+        The suffixes .tgz, .taz and .tz (case sensitive!) are all
+        mapped to '.tar.gz'.  (This is table-driven too, using the
+        dictionary suffix_map.)
+
+        Optional `strict' argument when False adds a bunch of commonly found,
+        but non-standard types.
+        """
+        scheme, url = urllib.splittype(url)
+        if scheme == 'data':
+            # syntax of data URLs:
+            # dataurl   := "data:" [ mediatype ] [ ";base64" ] "," data
+            # mediatype := [ type "/" subtype ] *( ";" parameter )
+            # data      := *urlchar
+            # parameter := attribute "=" value
+            # type/subtype defaults to "text/plain"
+            comma = url.find(',')
+            if comma < 0:
+                # bad data URL
+                return None, None
+            semi = url.find(';', 0, comma)
+            if semi >= 0:
+                type = url[:semi]
+            else:
+                type = url[:comma]
+            if '=' in type or '/' not in type:
+                type = 'text/plain'
+            return type, None           # never compressed, so encoding is None
+        base, ext = posixpath.splitext(url)
+        while ext in self.suffix_map:
+            base, ext = posixpath.splitext(base + self.suffix_map[ext])
+        if ext in self.encodings_map:
+            encoding = self.encodings_map[ext]
+            base, ext = posixpath.splitext(base)
+        else:
+            encoding = None
+        types_map = self.types_map[True]
+        if ext in types_map:
+            return types_map[ext], encoding
+        elif ext.lower() in types_map:
+            return types_map[ext.lower()], encoding
+        elif strict:
+            return None, encoding
+        types_map = self.types_map[False]
+        if ext in types_map:
+            return types_map[ext], encoding
+        elif ext.lower() in types_map:
+            return types_map[ext.lower()], encoding
+        else:
+            return None, encoding
+
+    def guess_all_extensions(self, type, strict=True):
+        """Guess the extensions for a file based on its MIME type.
+
+        Return value is a list of strings giving the possible filename
+        extensions, including the leading dot ('.').  The extension is not
+        guaranteed to have been associated with any particular data stream,
+        but would be mapped to the MIME type `type' by guess_type().
+
+        Optional `strict' argument when false adds a bunch of commonly found,
+        but non-standard types.
+        """
+        type = type.lower()
+        extensions = self.types_map_inv[True].get(type, [])
+        if not strict:
+            for ext in self.types_map_inv[False].get(type, []):
+                if ext not in extensions:
+                    extensions.append(ext)
+        return extensions
+
+    def guess_extension(self, type, strict=True):
+        """Guess the extension for a file based on its MIME type.
+
+        Return value is a string giving a filename extension,
+        including the leading dot ('.').  The extension is not
+        guaranteed to have been associated with any particular data
+        stream, but would be mapped to the MIME type `type' by
+        guess_type().  If no extension can be guessed for `type', None
+        is returned.
+
+        Optional `strict' argument when false adds a bunch of commonly found,
+        but non-standard types.
+        """
+        extensions = self.guess_all_extensions(type, strict)
+        if not extensions:
+            return None
+        return extensions[0]
+
+    def read(self, filename, strict=True):
+        """
+        Read a single mime.types-format file, specified by pathname.
+
+        If strict is true, information will be added to
+        list of standard types, else to the list of non-standard
+        types.
+        """
+        fp = open(filename)
+        self.readfp(fp, strict)
+        fp.close()
+
+    def readfp(self, fp, strict=True):
+        """
+        Read a single mime.types-format file.
+
+        If strict is true, information will be added to
+        list of standard types, else to the list of non-standard
+        types.
+        """
+        while 1:
+            line = fp.readline()
+            if not line:
+                break
+            words = line.split()
+            for i in range(len(words)):
+                if words[i][0] == '#':
+                    del words[i:]
+                    break
+            if not words:
+                continue
+            type, suffixes = words[0], words[1:]
+            for suff in suffixes:
+                self.add_type(type, '.' + suff, strict)
+
+def guess_type(url, strict=True):
+    """Guess the type of a file based on its URL.
+
+    Return value is a tuple (type, encoding) where type is None if the
+    type can't be guessed (no or unknown suffix) or a string of the
+    form type/subtype, usable for a MIME Content-type header; and
+    encoding is None for no encoding or the name of the program used
+    to encode (e.g. compress or gzip).  The mappings are table
+    driven.  Encoding suffixes are case sensitive; type suffixes are
+    first tried case sensitive, then case insensitive.
+
+    The suffixes .tgz, .taz and .tz (case sensitive!) are all mapped
+    to ".tar.gz".  (This is table-driven too, using the dictionary
+    suffix_map).
+
+    Optional `strict' argument when false adds a bunch of commonly found, but
+    non-standard types.
+    """
+    init()
+    return guess_type(url, strict)
+
+
+def guess_all_extensions(type, strict=True):
+    """Guess the extensions for a file based on its MIME type.
+
+    Return value is a list of strings giving the possible filename
+    extensions, including the leading dot ('.').  The extension is not
+    guaranteed to have been associated with any particular data
+    stream, but would be mapped to the MIME type `type' by
+    guess_type().  If no extension can be guessed for `type', None
+    is returned.
+
+    Optional `strict' argument when false adds a bunch of commonly found,
+    but non-standard types.
+    """
+    init()
+    return guess_all_extensions(type, strict)
+
+def guess_extension(type, strict=True):
+    """Guess the extension for a file based on its MIME type.
+
+    Return value is a string giving a filename extension, including the
+    leading dot ('.').  The extension is not guaranteed to have been
+    associated with any particular data stream, but would be mapped to the
+    MIME type `type' by guess_type().  If no extension can be guessed for
+    `type', None is returned.
+
+    Optional `strict' argument when false adds a bunch of commonly found,
+    but non-standard types.
+    """
+    init()
+    return guess_extension(type, strict)
+
+def add_type(type, ext, strict=True):
+    """Add a mapping between a type and an extension.
+
+    When the extension is already known, the new
+    type will replace the old one. When the type
+    is already known the extension will be added
+    to the list of known extensions.
+
+    If strict is true, information will be added to
+    list of standard types, else to the list of non-standard
+    types.
+    """
+    init()
+    return add_type(type, ext, strict)
+
+
+def init(files=None):
+    global guess_all_extensions, guess_extension, guess_type
+    global suffix_map, types_map, encodings_map, common_types
+    global add_type, inited
+    inited = True
+    db = MimeTypes()
+    if files is None:
+        files = knownfiles
+    for file in files:
+        if os.path.isfile(file):
+            db.readfp(open(file))
+    encodings_map = db.encodings_map
+    suffix_map = db.suffix_map
+    types_map = db.types_map[True]
+    guess_all_extensions = db.guess_all_extensions
+    guess_extension = db.guess_extension
+    guess_type = db.guess_type
+    add_type = db.add_type
+    common_types = db.types_map[False]
+
+
+def read_mime_types(file):
+    try:
+        f = open(file)
+    except IOError:
+        return None
+    db = MimeTypes()
+    db.readfp(f, True)
+    return db.types_map[True]
+
+
+suffix_map = {
+    '.tgz': '.tar.gz',
+    '.taz': '.tar.gz',
+    '.tz': '.tar.gz',
+    }
+
+encodings_map = {
+    '.gz': 'gzip',
+    '.Z': 'compress',
+    }
+
+# Before adding new types, make sure they are either registered with IANA, at
+# http://www.isi.edu/in-notes/iana/assignments/media-types
+# or extensions, i.e. using the x- prefix
+
+# If you add to these, please keep them sorted!
+types_map = {
+    '.a'      : 'application/octet-stream',
+    '.ai'     : 'application/postscript',
+    '.aif'    : 'audio/x-aiff',
+    '.aifc'   : 'audio/x-aiff',
+    '.aiff'   : 'audio/x-aiff',
+    '.au'     : 'audio/basic',
+    '.avi'    : 'video/x-msvideo',
+    '.bat'    : 'text/plain',
+    '.bcpio'  : 'application/x-bcpio',
+    '.bin'    : 'application/octet-stream',
+    '.bmp'    : 'image/x-ms-bmp',
+    '.c'      : 'text/plain',
+    # Duplicates :(
+    '.cdf'    : 'application/x-cdf',
+    '.cdf'    : 'application/x-netcdf',
+    '.cpio'   : 'application/x-cpio',
+    '.csh'    : 'application/x-csh',
+    '.css'    : 'text/css',
+    '.dll'    : 'application/octet-stream',
+    '.doc'    : 'application/msword',
+    '.dot'    : 'application/msword',
+    '.dvi'    : 'application/x-dvi',
+    '.eml'    : 'message/rfc822',
+    '.eps'    : 'application/postscript',
+    '.etx'    : 'text/x-setext',
+    '.exe'    : 'application/octet-stream',
+    '.gif'    : 'image/gif',
+    '.gtar'   : 'application/x-gtar',
+    '.h'      : 'text/plain',
+    '.hdf'    : 'application/x-hdf',
+    '.htm'    : 'text/html',
+    '.html'   : 'text/html',
+    '.ief'    : 'image/ief',
+    '.jpe'    : 'image/jpeg',
+    '.jpeg'   : 'image/jpeg',
+    '.jpg'    : 'image/jpeg',
+    '.js'     : 'application/x-javascript',
+    '.ksh'    : 'text/plain',
+    '.latex'  : 'application/x-latex',
+    '.m1v'    : 'video/mpeg',
+    '.man'    : 'application/x-troff-man',
+    '.me'     : 'application/x-troff-me',
+    '.mht'    : 'message/rfc822',
+    '.mhtml'  : 'message/rfc822',
+    '.mif'    : 'application/x-mif',
+    '.mov'    : 'video/quicktime',
+    '.movie'  : 'video/x-sgi-movie',
+    '.mp2'    : 'audio/mpeg',
+    '.mp3'    : 'audio/mpeg',
+    '.mpa'    : 'video/mpeg',
+    '.mpe'    : 'video/mpeg',
+    '.mpeg'   : 'video/mpeg',
+    '.mpg'    : 'video/mpeg',
+    '.ms'     : 'application/x-troff-ms',
+    '.nc'     : 'application/x-netcdf',
+    '.nws'    : 'message/rfc822',
+    '.o'      : 'application/octet-stream',
+    '.obj'    : 'application/octet-stream',
+    '.oda'    : 'application/oda',
+    '.p12'    : 'application/x-pkcs12',
+    '.p7c'    : 'application/pkcs7-mime',
+    '.pbm'    : 'image/x-portable-bitmap',
+    '.pdf'    : 'application/pdf',
+    '.pfx'    : 'application/x-pkcs12',
+    '.pgm'    : 'image/x-portable-graymap',
+    '.pl'     : 'text/plain',
+    '.png'    : 'image/png',
+    '.pnm'    : 'image/x-portable-anymap',
+    '.pot'    : 'application/vnd.ms-powerpoint',
+    '.ppa'    : 'application/vnd.ms-powerpoint',
+    '.ppm'    : 'image/x-portable-pixmap',
+    '.pps'    : 'application/vnd.ms-powerpoint',
+    '.ppt'    : 'application/vnd.ms-powerpoint',
+    '.ps'     : 'application/postscript',
+    '.pwz'    : 'application/vnd.ms-powerpoint',
+    '.py'     : 'text/x-python',
+    '.pyc'    : 'application/x-python-code',
+    '.pyo'    : 'application/x-python-code',
+    '.qt'     : 'video/quicktime',
+    '.ra'     : 'audio/x-pn-realaudio',
+    '.ram'    : 'application/x-pn-realaudio',
+    '.ras'    : 'image/x-cmu-raster',
+    '.rdf'    : 'application/xml',
+    '.rgb'    : 'image/x-rgb',
+    '.roff'   : 'application/x-troff',
+    '.rtx'    : 'text/richtext',
+    '.sgm'    : 'text/x-sgml',
+    '.sgml'   : 'text/x-sgml',
+    '.sh'     : 'application/x-sh',
+    '.shar'   : 'application/x-shar',
+    '.snd'    : 'audio/basic',
+    '.so'     : 'application/octet-stream',
+    '.src'    : 'application/x-wais-source',
+    '.sv4cpio': 'application/x-sv4cpio',
+    '.sv4crc' : 'application/x-sv4crc',
+    '.swf'    : 'application/x-shockwave-flash',
+    '.t'      : 'application/x-troff',
+    '.tar'    : 'application/x-tar',
+    '.tcl'    : 'application/x-tcl',
+    '.tex'    : 'application/x-tex',
+    '.texi'   : 'application/x-texinfo',
+    '.texinfo': 'application/x-texinfo',
+    '.tif'    : 'image/tiff',
+    '.tiff'   : 'image/tiff',
+    '.tr'     : 'application/x-troff',
+    '.tsv'    : 'text/tab-separated-values',
+    '.txt'    : 'text/plain',
+    '.ustar'  : 'application/x-ustar',
+    '.vcf'    : 'text/x-vcard',
+    '.wav'    : 'audio/x-wav',
+    '.wiz'    : 'application/msword',
+    '.xbm'    : 'image/x-xbitmap',
+    '.xlb'    : 'application/vnd.ms-excel',
+    # Duplicates :(
+    '.xls'    : 'application/excel',
+    '.xls'    : 'application/vnd.ms-excel',
+    '.xml'    : 'text/xml',
+    '.xpm'    : 'image/x-xpixmap',
+    '.xsl'    : 'application/xml',
+    '.xwd'    : 'image/x-xwindowdump',
+    '.zip'    : 'application/zip',
+    }
+
+# These are non-standard types, commonly found in the wild.  They will only
+# match if strict=0 flag is given to the API methods.
+
+# Please sort these too
+common_types = {
+    '.jpg' : 'image/jpg',
+    '.mid' : 'audio/midi',
+    '.midi': 'audio/midi',
+    '.pct' : 'image/pict',
+    '.pic' : 'image/pict',
+    '.pict': 'image/pict',
+    '.rtf' : 'application/rtf',
+    '.xul' : 'text/xul'
+    }
+
+
+if __name__ == '__main__':
+    import sys
+    import getopt
+
+    USAGE = """\
+Usage: mimetypes.py [options] type
+
+Options:
+    --help / -h       -- print this message and exit
+    --lenient / -l    -- additionally search of some common, but non-standard
+                         types.
+    --extension / -e  -- guess extension instead of type
+
+More than one type argument may be given.
+"""
+
+    def usage(code, msg=''):
+        print USAGE
+        if msg: print msg
+        sys.exit(code)
+
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'hle',
+                                   ['help', 'lenient', 'extension'])
+    except getopt.error, msg:
+        usage(1, msg)
+
+    strict = 1
+    extension = 0
+    for opt, arg in opts:
+        if opt in ('-h', '--help'):
+            usage(0)
+        elif opt in ('-l', '--lenient'):
+            strict = 0
+        elif opt in ('-e', '--extension'):
+            extension = 1
+    for gtype in args:
+        if extension:
+            guess = guess_extension(gtype, strict)
+            if not guess: print "I don't know anything about type", gtype
+            else: print guess
+        else:
+            guess, encoding = guess_type(gtype, strict)
+            if not guess: print "I don't know anything about type", gtype
+            else: print 'type:', guess, 'encoding:', encoding
diff --git a/depot_tools/release/win/python_24/Lib/mimify.py b/depot_tools/release/win/python_24/Lib/mimify.py
new file mode 100644
index 0000000..b6f6143
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/mimify.py
@@ -0,0 +1,464 @@
+#! /usr/bin/env python
+
+"""Mimification and unmimification of mail messages.
+
+Decode quoted-printable parts of a mail message or encode using
+quoted-printable.
+
+Usage:
+        mimify(input, output)
+        unmimify(input, output, decode_base64 = 0)
+to encode and decode respectively.  Input and output may be the name
+of a file or an open file object.  Only a readline() method is used
+on the input file, only a write() method is used on the output file.
+When using file names, the input and output file names may be the
+same.
+
+Interactive usage:
+        mimify.py -e [infile [outfile]]
+        mimify.py -d [infile [outfile]]
+to encode and decode respectively.  Infile defaults to standard
+input and outfile to standard output.
+"""
+
+# Configure
+MAXLEN = 200    # if lines longer than this, encode as quoted-printable
+CHARSET = 'ISO-8859-1'  # default charset for non-US-ASCII mail
+QUOTE = '> '            # string replies are quoted with
+# End configure
+
+import re
+
+__all__ = ["mimify","unmimify","mime_encode_header","mime_decode_header"]
+
+qp = re.compile('^content-transfer-encoding:\\s*quoted-printable', re.I)
+base64_re = re.compile('^content-transfer-encoding:\\s*base64', re.I)
+mp = re.compile('^content-type:.*multipart/.*boundary="?([^;"\n]*)', re.I|re.S)
+chrset = re.compile('^(content-type:.*charset=")(us-ascii|iso-8859-[0-9]+)(".*)', re.I|re.S)
+he = re.compile('^-*\n')
+mime_code = re.compile('=([0-9a-f][0-9a-f])', re.I)
+mime_head = re.compile('=\\?iso-8859-1\\?q\\?([^? \t\n]+)\\?=', re.I)
+repl = re.compile('^subject:\\s+re: ', re.I)
+
+class File:
+    """A simple fake file object that knows about limited read-ahead and
+    boundaries.  The only supported method is readline()."""
+
+    def __init__(self, file, boundary):
+        self.file = file
+        self.boundary = boundary
+        self.peek = None
+
+    def readline(self):
+        if self.peek is not None:
+            return ''
+        line = self.file.readline()
+        if not line:
+            return line
+        if self.boundary:
+            if line == self.boundary + '\n':
+                self.peek = line
+                return ''
+            if line == self.boundary + '--\n':
+                self.peek = line
+                return ''
+        return line
+
+class HeaderFile:
+    def __init__(self, file):
+        self.file = file
+        self.peek = None
+
+    def readline(self):
+        if self.peek is not None:
+            line = self.peek
+            self.peek = None
+        else:
+            line = self.file.readline()
+        if not line:
+            return line
+        if he.match(line):
+            return line
+        while 1:
+            self.peek = self.file.readline()
+            if len(self.peek) == 0 or \
+               (self.peek[0] != ' ' and self.peek[0] != '\t'):
+                return line
+            line = line + self.peek
+            self.peek = None
+
+def mime_decode(line):
+    """Decode a single line of quoted-printable text to 8bit."""
+    newline = ''
+    pos = 0
+    while 1:
+        res = mime_code.search(line, pos)
+        if res is None:
+            break
+        newline = newline + line[pos:res.start(0)] + \
+                  chr(int(res.group(1), 16))
+        pos = res.end(0)
+    return newline + line[pos:]
+
+def mime_decode_header(line):
+    """Decode a header line to 8bit."""
+    newline = ''
+    pos = 0
+    while 1:
+        res = mime_head.search(line, pos)
+        if res is None:
+            break
+        match = res.group(1)
+        # convert underscores to spaces (before =XX conversion!)
+        match = ' '.join(match.split('_'))
+        newline = newline + line[pos:res.start(0)] + mime_decode(match)
+        pos = res.end(0)
+    return newline + line[pos:]
+
+def unmimify_part(ifile, ofile, decode_base64 = 0):
+    """Convert a quoted-printable part of a MIME mail message to 8bit."""
+    multipart = None
+    quoted_printable = 0
+    is_base64 = 0
+    is_repl = 0
+    if ifile.boundary and ifile.boundary[:2] == QUOTE:
+        prefix = QUOTE
+    else:
+        prefix = ''
+
+    # read header
+    hfile = HeaderFile(ifile)
+    while 1:
+        line = hfile.readline()
+        if not line:
+            return
+        if prefix and line[:len(prefix)] == prefix:
+            line = line[len(prefix):]
+            pref = prefix
+        else:
+            pref = ''
+        line = mime_decode_header(line)
+        if qp.match(line):
+            quoted_printable = 1
+            continue        # skip this header
+        if decode_base64 and base64_re.match(line):
+            is_base64 = 1
+            continue
+        ofile.write(pref + line)
+        if not prefix and repl.match(line):
+            # we're dealing with a reply message
+            is_repl = 1
+        mp_res = mp.match(line)
+        if mp_res:
+            multipart = '--' + mp_res.group(1)
+        if he.match(line):
+            break
+    if is_repl and (quoted_printable or multipart):
+        is_repl = 0
+
+    # read body
+    while 1:
+        line = ifile.readline()
+        if not line:
+            return
+        line = re.sub(mime_head, '\\1', line)
+        if prefix and line[:len(prefix)] == prefix:
+            line = line[len(prefix):]
+            pref = prefix
+        else:
+            pref = ''
+##              if is_repl and len(line) >= 4 and line[:4] == QUOTE+'--' and line[-3:] != '--\n':
+##                      multipart = line[:-1]
+        while multipart:
+            if line == multipart + '--\n':
+                ofile.write(pref + line)
+                multipart = None
+                line = None
+                break
+            if line == multipart + '\n':
+                ofile.write(pref + line)
+                nifile = File(ifile, multipart)
+                unmimify_part(nifile, ofile, decode_base64)
+                line = nifile.peek
+                if not line:
+                    # premature end of file
+                    break
+                continue
+            # not a boundary between parts
+            break
+        if line and quoted_printable:
+            while line[-2:] == '=\n':
+                line = line[:-2]
+                newline = ifile.readline()
+                if newline[:len(QUOTE)] == QUOTE:
+                    newline = newline[len(QUOTE):]
+                line = line + newline
+            line = mime_decode(line)
+        if line and is_base64 and not pref:
+            import base64
+            line = base64.decodestring(line)
+        if line:
+            ofile.write(pref + line)
+
+def unmimify(infile, outfile, decode_base64 = 0):
+    """Convert quoted-printable parts of a MIME mail message to 8bit."""
+    if type(infile) == type(''):
+        ifile = open(infile)
+        if type(outfile) == type('') and infile == outfile:
+            import os
+            d, f = os.path.split(infile)
+            os.rename(infile, os.path.join(d, ',' + f))
+    else:
+        ifile = infile
+    if type(outfile) == type(''):
+        ofile = open(outfile, 'w')
+    else:
+        ofile = outfile
+    nifile = File(ifile, None)
+    unmimify_part(nifile, ofile, decode_base64)
+    ofile.flush()
+
+mime_char = re.compile('[=\177-\377]') # quote these chars in body
+mime_header_char = re.compile('[=?\177-\377]') # quote these in header
+
+def mime_encode(line, header):
+    """Code a single line as quoted-printable.
+    If header is set, quote some extra characters."""
+    if header:
+        reg = mime_header_char
+    else:
+        reg = mime_char
+    newline = ''
+    pos = 0
+    if len(line) >= 5 and line[:5] == 'From ':
+        # quote 'From ' at the start of a line for stupid mailers
+        newline = ('=%02x' % ord('F')).upper()
+        pos = 1
+    while 1:
+        res = reg.search(line, pos)
+        if res is None:
+            break
+        newline = newline + line[pos:res.start(0)] + \
+                  ('=%02x' % ord(res.group(0))).upper()
+        pos = res.end(0)
+    line = newline + line[pos:]
+
+    newline = ''
+    while len(line) >= 75:
+        i = 73
+        while line[i] == '=' or line[i-1] == '=':
+            i = i - 1
+        i = i + 1
+        newline = newline + line[:i] + '=\n'
+        line = line[i:]
+    return newline + line
+
+mime_header = re.compile('([ \t(]|^)([-a-zA-Z0-9_+]*[\177-\377][-a-zA-Z0-9_+\177-\377]*)(?=[ \t)]|\n)')
+
+def mime_encode_header(line):
+    """Code a single header line as quoted-printable."""
+    newline = ''
+    pos = 0
+    while 1:
+        res = mime_header.search(line, pos)
+        if res is None:
+            break
+        newline = '%s%s%s=?%s?Q?%s?=' % \
+                  (newline, line[pos:res.start(0)], res.group(1),
+                   CHARSET, mime_encode(res.group(2), 1))
+        pos = res.end(0)
+    return newline + line[pos:]
+
+mv = re.compile('^mime-version:', re.I)
+cte = re.compile('^content-transfer-encoding:', re.I)
+iso_char = re.compile('[\177-\377]')
+
+def mimify_part(ifile, ofile, is_mime):
+    """Convert an 8bit part of a MIME mail message to quoted-printable."""
+    has_cte = is_qp = is_base64 = 0
+    multipart = None
+    must_quote_body = must_quote_header = has_iso_chars = 0
+
+    header = []
+    header_end = ''
+    message = []
+    message_end = ''
+    # read header
+    hfile = HeaderFile(ifile)
+    while 1:
+        line = hfile.readline()
+        if not line:
+            break
+        if not must_quote_header and iso_char.search(line):
+            must_quote_header = 1
+        if mv.match(line):
+            is_mime = 1
+        if cte.match(line):
+            has_cte = 1
+            if qp.match(line):
+                is_qp = 1
+            elif base64_re.match(line):
+                is_base64 = 1
+        mp_res = mp.match(line)
+        if mp_res:
+            multipart = '--' + mp_res.group(1)
+        if he.match(line):
+            header_end = line
+            break
+        header.append(line)
+
+    # read body
+    while 1:
+        line = ifile.readline()
+        if not line:
+            break
+        if multipart:
+            if line == multipart + '--\n':
+                message_end = line
+                break
+            if line == multipart + '\n':
+                message_end = line
+                break
+        if is_base64:
+            message.append(line)
+            continue
+        if is_qp:
+            while line[-2:] == '=\n':
+                line = line[:-2]
+                newline = ifile.readline()
+                if newline[:len(QUOTE)] == QUOTE:
+                    newline = newline[len(QUOTE):]
+                line = line + newline
+            line = mime_decode(line)
+        message.append(line)
+        if not has_iso_chars:
+            if iso_char.search(line):
+                has_iso_chars = must_quote_body = 1
+        if not must_quote_body:
+            if len(line) > MAXLEN:
+                must_quote_body = 1
+
+    # convert and output header and body
+    for line in header:
+        if must_quote_header:
+            line = mime_encode_header(line)
+        chrset_res = chrset.match(line)
+        if chrset_res:
+            if has_iso_chars:
+                # change us-ascii into iso-8859-1
+                if chrset_res.group(2).lower() == 'us-ascii':
+                    line = '%s%s%s' % (chrset_res.group(1),
+                                       CHARSET,
+                                       chrset_res.group(3))
+            else:
+                # change iso-8859-* into us-ascii
+                line = '%sus-ascii%s' % chrset_res.group(1, 3)
+        if has_cte and cte.match(line):
+            line = 'Content-Transfer-Encoding: '
+            if is_base64:
+                line = line + 'base64\n'
+            elif must_quote_body:
+                line = line + 'quoted-printable\n'
+            else:
+                line = line + '7bit\n'
+        ofile.write(line)
+    if (must_quote_header or must_quote_body) and not is_mime:
+        ofile.write('Mime-Version: 1.0\n')
+        ofile.write('Content-Type: text/plain; ')
+        if has_iso_chars:
+            ofile.write('charset="%s"\n' % CHARSET)
+        else:
+            ofile.write('charset="us-ascii"\n')
+    if must_quote_body and not has_cte:
+        ofile.write('Content-Transfer-Encoding: quoted-printable\n')
+    ofile.write(header_end)
+
+    for line in message:
+        if must_quote_body:
+            line = mime_encode(line, 0)
+        ofile.write(line)
+    ofile.write(message_end)
+
+    line = message_end
+    while multipart:
+        if line == multipart + '--\n':
+            # read bit after the end of the last part
+            while 1:
+                line = ifile.readline()
+                if not line:
+                    return
+                if must_quote_body:
+                    line = mime_encode(line, 0)
+                ofile.write(line)
+        if line == multipart + '\n':
+            nifile = File(ifile, multipart)
+            mimify_part(nifile, ofile, 1)
+            line = nifile.peek
+            if not line:
+                # premature end of file
+                break
+            ofile.write(line)
+            continue
+        # unexpectedly no multipart separator--copy rest of file
+        while 1:
+            line = ifile.readline()
+            if not line:
+                return
+            if must_quote_body:
+                line = mime_encode(line, 0)
+            ofile.write(line)
+
+def mimify(infile, outfile):
+    """Convert 8bit parts of a MIME mail message to quoted-printable."""
+    if type(infile) == type(''):
+        ifile = open(infile)
+        if type(outfile) == type('') and infile == outfile:
+            import os
+            d, f = os.path.split(infile)
+            os.rename(infile, os.path.join(d, ',' + f))
+    else:
+        ifile = infile
+    if type(outfile) == type(''):
+        ofile = open(outfile, 'w')
+    else:
+        ofile = outfile
+    nifile = File(ifile, None)
+    mimify_part(nifile, ofile, 0)
+    ofile.flush()
+
+import sys
+if __name__ == '__main__' or (len(sys.argv) > 0 and sys.argv[0] == 'mimify'):
+    import getopt
+    usage = 'Usage: mimify [-l len] -[ed] [infile [outfile]]'
+
+    decode_base64 = 0
+    opts, args = getopt.getopt(sys.argv[1:], 'l:edb')
+    if len(args) not in (0, 1, 2):
+        print usage
+        sys.exit(1)
+    if (('-e', '') in opts) == (('-d', '') in opts) or \
+       ((('-b', '') in opts) and (('-d', '') not in opts)):
+        print usage
+        sys.exit(1)
+    for o, a in opts:
+        if o == '-e':
+            encode = mimify
+        elif o == '-d':
+            encode = unmimify
+        elif o == '-l':
+            try:
+                MAXLEN = int(a)
+            except (ValueError, OverflowError):
+                print usage
+                sys.exit(1)
+        elif o == '-b':
+            decode_base64 = 1
+    if len(args) == 0:
+        encode_args = (sys.stdin, sys.stdout)
+    elif len(args) == 1:
+        encode_args = (args[0], sys.stdout)
+    else:
+        encode_args = (args[0], args[1])
+    if decode_base64:
+        encode_args = encode_args + (decode_base64,)
+    encode(*encode_args)
diff --git a/depot_tools/release/win/python_24/Lib/modulefinder.py b/depot_tools/release/win/python_24/Lib/modulefinder.py
new file mode 100644
index 0000000..25e14827
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/modulefinder.py
@@ -0,0 +1,595 @@
+"""Find modules used by a script, using introspection."""
+
+# This module should be kept compatible with Python 2.2, see PEP 291.
+
+import dis
+import imp
+import marshal
+import os
+import sys
+import new
+
+if hasattr(sys.__stdout__, "newlines"):
+    READ_MODE = "U"  # universal line endings
+else:
+    # remain compatible with Python  < 2.3
+    READ_MODE = "r"
+
+LOAD_CONST = dis.opname.index('LOAD_CONST')
+IMPORT_NAME = dis.opname.index('IMPORT_NAME')
+STORE_NAME = dis.opname.index('STORE_NAME')
+STORE_GLOBAL = dis.opname.index('STORE_GLOBAL')
+STORE_OPS = [STORE_NAME, STORE_GLOBAL]
+
+# Modulefinder does a good job at simulating Python's, but it can not
+# handle __path__ modifications packages make at runtime.  Therefore there
+# is a mechanism whereby you can register extra paths in this map for a
+# package, and it will be honored.
+
+# Note this is a mapping is lists of paths.
+packagePathMap = {}
+
+# A Public interface
+def AddPackagePath(packagename, path):
+    paths = packagePathMap.get(packagename, [])
+    paths.append(path)
+    packagePathMap[packagename] = paths
+
+replacePackageMap = {}
+
+# This ReplacePackage mechanism allows modulefinder to work around the
+# way the _xmlplus package injects itself under the name "xml" into
+# sys.modules at runtime by calling ReplacePackage("_xmlplus", "xml")
+# before running ModuleFinder.
+
+def ReplacePackage(oldname, newname):
+    replacePackageMap[oldname] = newname
+
+
+class Module:
+
+    def __init__(self, name, file=None, path=None):
+        self.__name__ = name
+        self.__file__ = file
+        self.__path__ = path
+        self.__code__ = None
+        # The set of global names that are assigned to in the module.
+        # This includes those names imported through starimports of
+        # Python modules.
+        self.globalnames = {}
+        # The set of starimports this module did that could not be
+        # resolved, ie. a starimport from a non-Python module.
+        self.starimports = {}
+
+    def __repr__(self):
+        s = "Module(%r" % (self.__name__,)
+        if self.__file__ is not None:
+            s = s + ", %r" % (self.__file__,)
+        if self.__path__ is not None:
+            s = s + ", %r" % (self.__path__,)
+        s = s + ")"
+        return s
+
+class ModuleFinder:
+
+    def __init__(self, path=None, debug=0, excludes=[], replace_paths=[]):
+        if path is None:
+            path = sys.path
+        self.path = path
+        self.modules = {}
+        self.badmodules = {}
+        self.debug = debug
+        self.indent = 0
+        self.excludes = excludes
+        self.replace_paths = replace_paths
+        self.processed_paths = []   # Used in debugging only
+
+    def msg(self, level, str, *args):
+        if level <= self.debug:
+            for i in range(self.indent):
+                print "   ",
+            print str,
+            for arg in args:
+                print repr(arg),
+            print
+
+    def msgin(self, *args):
+        level = args[0]
+        if level <= self.debug:
+            self.indent = self.indent + 1
+            self.msg(*args)
+
+    def msgout(self, *args):
+        level = args[0]
+        if level <= self.debug:
+            self.indent = self.indent - 1
+            self.msg(*args)
+
+    def run_script(self, pathname):
+        self.msg(2, "run_script", pathname)
+        fp = open(pathname, READ_MODE)
+        stuff = ("", "r", imp.PY_SOURCE)
+        self.load_module('__main__', fp, pathname, stuff)
+
+    def load_file(self, pathname):
+        dir, name = os.path.split(pathname)
+        name, ext = os.path.splitext(name)
+        fp = open(pathname, READ_MODE)
+        stuff = (ext, "r", imp.PY_SOURCE)
+        self.load_module(name, fp, pathname, stuff)
+
+    def import_hook(self, name, caller=None, fromlist=None):
+        self.msg(3, "import_hook", name, caller, fromlist)
+        parent = self.determine_parent(caller)
+        q, tail = self.find_head_package(parent, name)
+        m = self.load_tail(q, tail)
+        if not fromlist:
+            return q
+        if m.__path__:
+            self.ensure_fromlist(m, fromlist)
+        return None
+
+    def determine_parent(self, caller):
+        self.msgin(4, "determine_parent", caller)
+        if not caller:
+            self.msgout(4, "determine_parent -> None")
+            return None
+        pname = caller.__name__
+        if caller.__path__:
+            parent = self.modules[pname]
+            assert caller is parent
+            self.msgout(4, "determine_parent ->", parent)
+            return parent
+        if '.' in pname:
+            i = pname.rfind('.')
+            pname = pname[:i]
+            parent = self.modules[pname]
+            assert parent.__name__ == pname
+            self.msgout(4, "determine_parent ->", parent)
+            return parent
+        self.msgout(4, "determine_parent -> None")
+        return None
+
+    def find_head_package(self, parent, name):
+        self.msgin(4, "find_head_package", parent, name)
+        if '.' in name:
+            i = name.find('.')
+            head = name[:i]
+            tail = name[i+1:]
+        else:
+            head = name
+            tail = ""
+        if parent:
+            qname = "%s.%s" % (parent.__name__, head)
+        else:
+            qname = head
+        q = self.import_module(head, qname, parent)
+        if q:
+            self.msgout(4, "find_head_package ->", (q, tail))
+            return q, tail
+        if parent:
+            qname = head
+            parent = None
+            q = self.import_module(head, qname, parent)
+            if q:
+                self.msgout(4, "find_head_package ->", (q, tail))
+                return q, tail
+        self.msgout(4, "raise ImportError: No module named", qname)
+        raise ImportError, "No module named " + qname
+
+    def load_tail(self, q, tail):
+        self.msgin(4, "load_tail", q, tail)
+        m = q
+        while tail:
+            i = tail.find('.')
+            if i < 0: i = len(tail)
+            head, tail = tail[:i], tail[i+1:]
+            mname = "%s.%s" % (m.__name__, head)
+            m = self.import_module(head, mname, m)
+            if not m:
+                self.msgout(4, "raise ImportError: No module named", mname)
+                raise ImportError, "No module named " + mname
+        self.msgout(4, "load_tail ->", m)
+        return m
+
+    def ensure_fromlist(self, m, fromlist, recursive=0):
+        self.msg(4, "ensure_fromlist", m, fromlist, recursive)
+        for sub in fromlist:
+            if sub == "*":
+                if not recursive:
+                    all = self.find_all_submodules(m)
+                    if all:
+                        self.ensure_fromlist(m, all, 1)
+            elif not hasattr(m, sub):
+                subname = "%s.%s" % (m.__name__, sub)
+                submod = self.import_module(sub, subname, m)
+                if not submod:
+                    raise ImportError, "No module named " + subname
+
+    def find_all_submodules(self, m):
+        if not m.__path__:
+            return
+        modules = {}
+        # 'suffixes' used to be a list hardcoded to [".py", ".pyc", ".pyo"].
+        # But we must also collect Python extension modules - although
+        # we cannot separate normal dlls from Python extensions.
+        suffixes = []
+        for triple in imp.get_suffixes():
+            suffixes.append(triple[0])
+        for dir in m.__path__:
+            try:
+                names = os.listdir(dir)
+            except os.error:
+                self.msg(2, "can't list directory", dir)
+                continue
+            for name in names:
+                mod = None
+                for suff in suffixes:
+                    n = len(suff)
+                    if name[-n:] == suff:
+                        mod = name[:-n]
+                        break
+                if mod and mod != "__init__":
+                    modules[mod] = mod
+        return modules.keys()
+
+    def import_module(self, partname, fqname, parent):
+        self.msgin(3, "import_module", partname, fqname, parent)
+        try:
+            m = self.modules[fqname]
+        except KeyError:
+            pass
+        else:
+            self.msgout(3, "import_module ->", m)
+            return m
+        if self.badmodules.has_key(fqname):
+            self.msgout(3, "import_module -> None")
+            return None
+        if parent and parent.__path__ is None:
+            self.msgout(3, "import_module -> None")
+            return None
+        try:
+            fp, pathname, stuff = self.find_module(partname,
+                                                   parent and parent.__path__, parent)
+        except ImportError:
+            self.msgout(3, "import_module ->", None)
+            return None
+        try:
+            m = self.load_module(fqname, fp, pathname, stuff)
+        finally:
+            if fp: fp.close()
+        if parent:
+            setattr(parent, partname, m)
+        self.msgout(3, "import_module ->", m)
+        return m
+
+    def load_module(self, fqname, fp, pathname, (suffix, mode, type)):
+        self.msgin(2, "load_module", fqname, fp and "fp", pathname)
+        if type == imp.PKG_DIRECTORY:
+            m = self.load_package(fqname, pathname)
+            self.msgout(2, "load_module ->", m)
+            return m
+        if type == imp.PY_SOURCE:
+            co = compile(fp.read()+'\n', pathname, 'exec')
+        elif type == imp.PY_COMPILED:
+            if fp.read(4) != imp.get_magic():
+                self.msgout(2, "raise ImportError: Bad magic number", pathname)
+                raise ImportError, "Bad magic number in %s" % pathname
+            fp.read(4)
+            co = marshal.load(fp)
+        else:
+            co = None
+        m = self.add_module(fqname)
+        m.__file__ = pathname
+        if co:
+            if self.replace_paths:
+                co = self.replace_paths_in_code(co)
+            m.__code__ = co
+            self.scan_code(co, m)
+        self.msgout(2, "load_module ->", m)
+        return m
+
+    def _add_badmodule(self, name, caller):
+        if name not in self.badmodules:
+            self.badmodules[name] = {}
+        self.badmodules[name][caller.__name__] = 1
+
+    def _safe_import_hook(self, name, caller, fromlist):
+        # wrapper for self.import_hook() that won't raise ImportError
+        if name in self.badmodules:
+            self._add_badmodule(name, caller)
+            return
+        try:
+            self.import_hook(name, caller)
+        except ImportError, msg:
+            self.msg(2, "ImportError:", str(msg))
+            self._add_badmodule(name, caller)
+        else:
+            if fromlist:
+                for sub in fromlist:
+                    if sub in self.badmodules:
+                        self._add_badmodule(sub, caller)
+                        continue
+                    try:
+                        self.import_hook(name, caller, [sub])
+                    except ImportError, msg:
+                        self.msg(2, "ImportError:", str(msg))
+                        fullname = name + "." + sub
+                        self._add_badmodule(fullname, caller)
+
+    def scan_code(self, co, m):
+        code = co.co_code
+        n = len(code)
+        i = 0
+        fromlist = None
+        while i < n:
+            c = code[i]
+            i = i+1
+            op = ord(c)
+            if op >= dis.HAVE_ARGUMENT:
+                oparg = ord(code[i]) + ord(code[i+1])*256
+                i = i+2
+            if op == LOAD_CONST:
+                # An IMPORT_NAME is always preceded by a LOAD_CONST, it's
+                # a tuple of "from" names, or None for a regular import.
+                # The tuple may contain "*" for "from <mod> import *"
+                fromlist = co.co_consts[oparg]
+            elif op == IMPORT_NAME:
+                assert fromlist is None or type(fromlist) is tuple
+                name = co.co_names[oparg]
+                have_star = 0
+                if fromlist is not None:
+                    if "*" in fromlist:
+                        have_star = 1
+                    fromlist = [f for f in fromlist if f != "*"]
+                self._safe_import_hook(name, m, fromlist)
+                if have_star:
+                    # We've encountered an "import *". If it is a Python module,
+                    # the code has already been parsed and we can suck out the
+                    # global names.
+                    mm = None
+                    if m.__path__:
+                        # At this point we don't know whether 'name' is a
+                        # submodule of 'm' or a global module. Let's just try
+                        # the full name first.
+                        mm = self.modules.get(m.__name__ + "." + name)
+                    if mm is None:
+                        mm = self.modules.get(name)
+                    if mm is not None:
+                        m.globalnames.update(mm.globalnames)
+                        m.starimports.update(mm.starimports)
+                        if mm.__code__ is None:
+                            m.starimports[name] = 1
+                    else:
+                        m.starimports[name] = 1
+            elif op in STORE_OPS:
+                # keep track of all global names that are assigned to
+                name = co.co_names[oparg]
+                m.globalnames[name] = 1
+        for c in co.co_consts:
+            if isinstance(c, type(co)):
+                self.scan_code(c, m)
+
+    def load_package(self, fqname, pathname):
+        self.msgin(2, "load_package", fqname, pathname)
+        newname = replacePackageMap.get(fqname)
+        if newname:
+            fqname = newname
+        m = self.add_module(fqname)
+        m.__file__ = pathname
+        m.__path__ = [pathname]
+
+        # As per comment at top of file, simulate runtime __path__ additions.
+        m.__path__ = m.__path__ + packagePathMap.get(fqname, [])
+
+        fp, buf, stuff = self.find_module("__init__", m.__path__)
+        self.load_module(fqname, fp, buf, stuff)
+        self.msgout(2, "load_package ->", m)
+        return m
+
+    def add_module(self, fqname):
+        if self.modules.has_key(fqname):
+            return self.modules[fqname]
+        self.modules[fqname] = m = Module(fqname)
+        return m
+
+    def find_module(self, name, path, parent=None):
+        if parent is not None:
+            # assert path is not None
+            fullname = parent.__name__+'.'+name
+        else:
+            fullname = name
+        if fullname in self.excludes:
+            self.msgout(3, "find_module -> Excluded", fullname)
+            raise ImportError, name
+
+        if path is None:
+            if name in sys.builtin_module_names:
+                return (None, None, ("", "", imp.C_BUILTIN))
+
+            path = self.path
+        return imp.find_module(name, path)
+
+    def report(self):
+        """Print a report to stdout, listing the found modules with their
+        paths, as well as modules that are missing, or seem to be missing.
+        """
+        print
+        print "  %-25s %s" % ("Name", "File")
+        print "  %-25s %s" % ("----", "----")
+        # Print modules found
+        keys = self.modules.keys()
+        keys.sort()
+        for key in keys:
+            m = self.modules[key]
+            if m.__path__:
+                print "P",
+            else:
+                print "m",
+            print "%-25s" % key, m.__file__ or ""
+
+        # Print missing modules
+        missing, maybe = self.any_missing_maybe()
+        if missing:
+            print
+            print "Missing modules:"
+            for name in missing:
+                mods = self.badmodules[name].keys()
+                mods.sort()
+                print "?", name, "imported from", ', '.join(mods)
+        # Print modules that may be missing, but then again, maybe not...
+        if maybe:
+            print
+            print "Submodules thay appear to be missing, but could also be",
+            print "global names in the parent package:"
+            for name in maybe:
+                mods = self.badmodules[name].keys()
+                mods.sort()
+                print "?", name, "imported from", ', '.join(mods)
+
+    def any_missing(self):
+        """Return a list of modules that appear to be missing. Use
+        any_missing_maybe() if you want to know which modules are
+        certain to be missing, and which *may* be missing.
+        """
+        missing, maybe = self.any_missing_maybe()
+        return missing + maybe
+
+    def any_missing_maybe(self):
+        """Return two lists, one with modules that are certainly missing
+        and one with modules that *may* be missing. The latter names could
+        either be submodules *or* just global names in the package.
+
+        The reason it can't always be determined is that it's impossible to
+        tell which names are imported when "from module import *" is done
+        with an extension module, short of actually importing it.
+        """
+        missing = []
+        maybe = []
+        for name in self.badmodules:
+            if name in self.excludes:
+                continue
+            i = name.rfind(".")
+            if i < 0:
+                missing.append(name)
+                continue
+            subname = name[i+1:]
+            pkgname = name[:i]
+            pkg = self.modules.get(pkgname)
+            if pkg is not None:
+                if pkgname in self.badmodules[name]:
+                    # The package tried to import this module itself and
+                    # failed. It's definitely missing.
+                    missing.append(name)
+                elif subname in pkg.globalnames:
+                    # It's a global in the package: definitely not missing.
+                    pass
+                elif pkg.starimports:
+                    # It could be missing, but the package did an "import *"
+                    # from a non-Python module, so we simply can't be sure.
+                    maybe.append(name)
+                else:
+                    # It's not a global in the package, the package didn't
+                    # do funny star imports, it's very likely to be missing.
+                    # The symbol could be inserted into the package from the
+                    # outside, but since that's not good style we simply list
+                    # it missing.
+                    missing.append(name)
+            else:
+                missing.append(name)
+        missing.sort()
+        maybe.sort()
+        return missing, maybe
+
+    def replace_paths_in_code(self, co):
+        new_filename = original_filename = os.path.normpath(co.co_filename)
+        for f, r in self.replace_paths:
+            if original_filename.startswith(f):
+                new_filename = r + original_filename[len(f):]
+                break
+
+        if self.debug and original_filename not in self.processed_paths:
+            if new_filename != original_filename:
+                self.msgout(2, "co_filename %r changed to %r" \
+                                    % (original_filename,new_filename,))
+            else:
+                self.msgout(2, "co_filename %r remains unchanged" \
+                                    % (original_filename,))
+            self.processed_paths.append(original_filename)
+
+        consts = list(co.co_consts)
+        for i in range(len(consts)):
+            if isinstance(consts[i], type(co)):
+                consts[i] = self.replace_paths_in_code(consts[i])
+
+        return new.code(co.co_argcount, co.co_nlocals, co.co_stacksize,
+                         co.co_flags, co.co_code, tuple(consts), co.co_names,
+                         co.co_varnames, new_filename, co.co_name,
+                         co.co_firstlineno, co.co_lnotab,
+                         co.co_freevars, co.co_cellvars)
+
+
+def test():
+    # Parse command line
+    import getopt
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "dmp:qx:")
+    except getopt.error, msg:
+        print msg
+        return
+
+    # Process options
+    debug = 1
+    domods = 0
+    addpath = []
+    exclude = []
+    for o, a in opts:
+        if o == '-d':
+            debug = debug + 1
+        if o == '-m':
+            domods = 1
+        if o == '-p':
+            addpath = addpath + a.split(os.pathsep)
+        if o == '-q':
+            debug = 0
+        if o == '-x':
+            exclude.append(a)
+
+    # Provide default arguments
+    if not args:
+        script = "hello.py"
+    else:
+        script = args[0]
+
+    # Set the path based on sys.path and the script directory
+    path = sys.path[:]
+    path[0] = os.path.dirname(script)
+    path = addpath + path
+    if debug > 1:
+        print "path:"
+        for item in path:
+            print "   ", repr(item)
+
+    # Create the module finder and turn its crank
+    mf = ModuleFinder(path, debug, exclude)
+    for arg in args[1:]:
+        if arg == '-m':
+            domods = 1
+            continue
+        if domods:
+            if arg[-2:] == '.*':
+                mf.import_hook(arg[:-2], None, ["*"])
+            else:
+                mf.import_hook(arg)
+        else:
+            mf.load_file(arg)
+    mf.run_script(script)
+    mf.report()
+    return mf  # for -i debugging
+
+
+if __name__ == '__main__':
+    try:
+        mf = test()
+    except KeyboardInterrupt:
+        print "\n[interrupt]"
diff --git a/depot_tools/release/win/python_24/Lib/multifile.py b/depot_tools/release/win/python_24/Lib/multifile.py
new file mode 100644
index 0000000..e82a3fd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/multifile.py
@@ -0,0 +1,158 @@
+"""A readline()-style interface to the parts of a multipart message.
+
+The MultiFile class makes each part of a multipart message "feel" like
+an ordinary file, as long as you use fp.readline().  Allows recursive
+use, for nested multipart messages.  Probably best used together
+with module mimetools.
+
+Suggested use:
+
+real_fp = open(...)
+fp = MultiFile(real_fp)
+
+"read some lines from fp"
+fp.push(separator)
+while 1:
+        "read lines from fp until it returns an empty string" (A)
+        if not fp.next(): break
+fp.pop()
+"read remaining lines from fp until it returns an empty string"
+
+The latter sequence may be used recursively at (A).
+It is also allowed to use multiple push()...pop() sequences.
+
+If seekable is given as 0, the class code will not do the bookkeeping
+it normally attempts in order to make seeks relative to the beginning of the
+current file part.  This may be useful when using MultiFile with a non-
+seekable stream object.
+"""
+
+__all__ = ["MultiFile","Error"]
+
+class Error(Exception):
+    pass
+
+class MultiFile:
+
+    seekable = 0
+
+    def __init__(self, fp, seekable=1):
+        self.fp = fp
+        self.stack = []
+        self.level = 0
+        self.last = 0
+        if seekable:
+            self.seekable = 1
+            self.start = self.fp.tell()
+            self.posstack = []
+
+    def tell(self):
+        if self.level > 0:
+            return self.lastpos
+        return self.fp.tell() - self.start
+
+    def seek(self, pos, whence=0):
+        here = self.tell()
+        if whence:
+            if whence == 1:
+                pos = pos + here
+            elif whence == 2:
+                if self.level > 0:
+                    pos = pos + self.lastpos
+                else:
+                    raise Error, "can't use whence=2 yet"
+        if not 0 <= pos <= here or \
+                        self.level > 0 and pos > self.lastpos:
+            raise Error, 'bad MultiFile.seek() call'
+        self.fp.seek(pos + self.start)
+        self.level = 0
+        self.last = 0
+
+    def readline(self):
+        if self.level > 0:
+            return ''
+        line = self.fp.readline()
+        # Real EOF?
+        if not line:
+            self.level = len(self.stack)
+            self.last = (self.level > 0)
+            if self.last:
+                raise Error, 'sudden EOF in MultiFile.readline()'
+            return ''
+        assert self.level == 0
+        # Fast check to see if this is just data
+        if self.is_data(line):
+            return line
+        else:
+            # Ignore trailing whitespace on marker lines
+            marker = line.rstrip()
+        # No?  OK, try to match a boundary.
+        # Return the line (unstripped) if we don't.
+        for i, sep in enumerate(reversed(self.stack)):
+            if marker == self.section_divider(sep):
+                self.last = 0
+                break
+            elif marker == self.end_marker(sep):
+                self.last = 1
+                break
+        else:
+            return line
+        # We only get here if we see a section divider or EOM line
+        if self.seekable:
+            self.lastpos = self.tell() - len(line)
+        self.level = i+1
+        if self.level > 1:
+            raise Error,'Missing endmarker in MultiFile.readline()'
+        return ''
+
+    def readlines(self):
+        list = []
+        while 1:
+            line = self.readline()
+            if not line: break
+            list.append(line)
+        return list
+
+    def read(self): # Note: no size argument -- read until EOF only!
+        return ''.join(self.readlines())
+
+    def next(self):
+        while self.readline(): pass
+        if self.level > 1 or self.last:
+            return 0
+        self.level = 0
+        self.last = 0
+        if self.seekable:
+            self.start = self.fp.tell()
+        return 1
+
+    def push(self, sep):
+        if self.level > 0:
+            raise Error, 'bad MultiFile.push() call'
+        self.stack.append(sep)
+        if self.seekable:
+            self.posstack.append(self.start)
+            self.start = self.fp.tell()
+
+    def pop(self):
+        if self.stack == []:
+            raise Error, 'bad MultiFile.pop() call'
+        if self.level <= 1:
+            self.last = 0
+        else:
+            abslastpos = self.lastpos + self.start
+        self.level = max(0, self.level - 1)
+        self.stack.pop()
+        if self.seekable:
+            self.start = self.posstack.pop()
+            if self.level > 0:
+                self.lastpos = abslastpos - self.start
+
+    def is_data(self, line):
+        return line[:2] != '--'
+
+    def section_divider(self, str):
+        return "--" + str
+
+    def end_marker(self, str):
+        return "--" + str + "--"
diff --git a/depot_tools/release/win/python_24/Lib/mutex.py b/depot_tools/release/win/python_24/Lib/mutex.py
new file mode 100644
index 0000000..5d35bdf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/mutex.py
@@ -0,0 +1,52 @@
+"""Mutual exclusion -- for use with module sched
+
+A mutex has two pieces of state -- a 'locked' bit and a queue.
+When the mutex is not locked, the queue is empty.
+Otherwise, the queue contains 0 or more (function, argument) pairs
+representing functions (or methods) waiting to acquire the lock.
+When the mutex is unlocked while the queue is not empty,
+the first queue entry is removed and its function(argument) pair called,
+implying it now has the lock.
+
+Of course, no multi-threading is implied -- hence the funny interface
+for lock, where a function is called once the lock is aquired.
+"""
+
+from collections import deque
+
+class mutex:
+    def __init__(self):
+        """Create a new mutex -- initially unlocked."""
+        self.locked = 0
+        self.queue = deque()
+
+    def test(self):
+        """Test the locked bit of the mutex."""
+        return self.locked
+
+    def testandset(self):
+        """Atomic test-and-set -- grab the lock if it is not set,
+        return True if it succeeded."""
+        if not self.locked:
+            self.locked = 1
+            return True
+        else:
+            return False
+
+    def lock(self, function, argument):
+        """Lock a mutex, call the function with supplied argument
+        when it is acquired.  If the mutex is already locked, place
+        function and argument in the queue."""
+        if self.testandset():
+            function(argument)
+        else:
+            self.queue.append((function, argument))
+
+    def unlock(self):
+        """Unlock a mutex.  If the queue is not empty, call the next
+        function with its argument."""
+        if self.queue:
+            function, argument = self.queue.popleft()
+            function(argument)
+        else:
+            self.locked = 0
diff --git a/depot_tools/release/win/python_24/Lib/netrc.py b/depot_tools/release/win/python_24/Lib/netrc.py
new file mode 100644
index 0000000..5493d77d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/netrc.py
@@ -0,0 +1,111 @@
+"""An object-oriented interface to .netrc files."""
+
+# Module and documentation by Eric S. Raymond, 21 Dec 1998
+
+import os, shlex
+
+__all__ = ["netrc", "NetrcParseError"]
+
+
+class NetrcParseError(Exception):
+    """Exception raised on syntax errors in the .netrc file."""
+    def __init__(self, msg, filename=None, lineno=None):
+        self.filename = filename
+        self.lineno = lineno
+        self.msg = msg
+        Exception.__init__(self, msg)
+
+    def __str__(self):
+        return "%s (%s, line %s)" % (self.msg, self.filename, self.lineno)
+
+
+class netrc:
+    def __init__(self, file=None):
+        if file is None:
+            try:
+                file = os.path.join(os.environ['HOME'], ".netrc")
+            except KeyError:
+                raise IOError("Could not find .netrc: $HOME is not set")
+        fp = open(file)
+        self.hosts = {}
+        self.macros = {}
+        lexer = shlex.shlex(fp)
+        lexer.wordchars += r"""!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
+        while 1:
+            # Look for a machine, default, or macdef top-level keyword
+            toplevel = tt = lexer.get_token()
+            if not tt:
+                break
+            elif tt == 'machine':
+                entryname = lexer.get_token()
+            elif tt == 'default':
+                entryname = 'default'
+            elif tt == 'macdef':                # Just skip to end of macdefs
+                entryname = lexer.get_token()
+                self.macros[entryname] = []
+                lexer.whitespace = ' \t'
+                while 1:
+                    line = lexer.instream.readline()
+                    if not line or line == '\012':
+                        lexer.whitespace = ' \t\r\n'
+                        break
+                    self.macros[entryname].append(line)
+                continue
+            else:
+                raise NetrcParseError(
+                    "bad toplevel token %r" % tt, file, lexer.lineno)
+
+            # We're looking at start of an entry for a named machine or default.
+            login = ''
+            account = password = None
+            self.hosts[entryname] = {}
+            while 1:
+                tt = lexer.get_token()
+                if (tt=='' or tt == 'machine' or
+                    tt == 'default' or tt =='macdef'):
+                    if password:
+                        self.hosts[entryname] = (login, account, password)
+                        lexer.push_token(tt)
+                        break
+                    else:
+                        raise NetrcParseError(
+                            "malformed %s entry %s terminated by %s"
+                            % (toplevel, entryname, repr(tt)),
+                            file, lexer.lineno)
+                elif tt == 'login' or tt == 'user':
+                    login = lexer.get_token()
+                elif tt == 'account':
+                    account = lexer.get_token()
+                elif tt == 'password':
+                    password = lexer.get_token()
+                else:
+                    raise NetrcParseError("bad follower token %r" % tt,
+                                          file, lexer.lineno)
+
+    def authenticators(self, host):
+        """Return a (user, account, password) tuple for given host."""
+        if host in self.hosts:
+            return self.hosts[host]
+        elif 'default' in self.hosts:
+            return self.hosts['default']
+        else:
+            return None
+
+    def __repr__(self):
+        """Dump the class data in the format of a .netrc file."""
+        rep = ""
+        for host in self.hosts.keys():
+            attrs = self.hosts[host]
+            rep = rep + "machine "+ host + "\n\tlogin " + repr(attrs[0]) + "\n"
+            if attrs[1]:
+                rep = rep + "account " + repr(attrs[1])
+            rep = rep + "\tpassword " + repr(attrs[2]) + "\n"
+        for macro in self.macros.keys():
+            rep = rep + "macdef " + macro + "\n"
+            for line in self.macros[macro]:
+                rep = rep + line
+            rep = rep + "\n"
+        return rep
+
+if __name__ == '__main__':
+    print netrc()
diff --git a/depot_tools/release/win/python_24/Lib/new.py b/depot_tools/release/win/python_24/Lib/new.py
new file mode 100644
index 0000000..99a1c3f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/new.py
@@ -0,0 +1,17 @@
+"""Create new objects of various types.  Deprecated.
+
+This module is no longer required except for backward compatibility.
+Objects of most types can now be created by calling the type object.
+"""
+
+from types import ClassType as classobj
+from types import FunctionType as function
+from types import InstanceType as instance
+from types import MethodType as instancemethod
+from types import ModuleType as module
+
+# CodeType is not accessible in restricted execution mode
+try:
+    from types import CodeType as code
+except ImportError:
+    pass
diff --git a/depot_tools/release/win/python_24/Lib/nntplib.py b/depot_tools/release/win/python_24/Lib/nntplib.py
new file mode 100644
index 0000000..8709fffc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/nntplib.py
@@ -0,0 +1,628 @@
+"""An NNTP client class based on RFC 977: Network News Transfer Protocol.
+
+Example:
+
+>>> from nntplib import NNTP
+>>> s = NNTP('news')
+>>> resp, count, first, last, name = s.group('comp.lang.python')
+>>> print 'Group', name, 'has', count, 'articles, range', first, 'to', last
+Group comp.lang.python has 51 articles, range 5770 to 5821
+>>> resp, subs = s.xhdr('subject', first + '-' + last)
+>>> resp = s.quit()
+>>>
+
+Here 'resp' is the server response line.
+Error responses are turned into exceptions.
+
+To post an article from a file:
+>>> f = open(filename, 'r') # file containing article, including header
+>>> resp = s.post(f)
+>>>
+
+For descriptions of all methods, read the comments in the code below.
+Note that all arguments and return values representing article numbers
+are strings, not numbers, since they are rarely used for calculations.
+"""
+
+# RFC 977 by Brian Kantor and Phil Lapsley.
+# xover, xgtitle, xpath, date methods by Kevan Heydon
+
+
+# Imports
+import re
+import socket
+
+__all__ = ["NNTP","NNTPReplyError","NNTPTemporaryError",
+           "NNTPPermanentError","NNTPProtocolError","NNTPDataError",
+           "error_reply","error_temp","error_perm","error_proto",
+           "error_data",]
+
+# Exceptions raised when an error or invalid response is received
+class NNTPError(Exception):
+    """Base class for all nntplib exceptions"""
+    def __init__(self, *args):
+        Exception.__init__(self, *args)
+        try:
+            self.response = args[0]
+        except IndexError:
+            self.response = 'No response given'
+
+class NNTPReplyError(NNTPError):
+    """Unexpected [123]xx reply"""
+    pass
+
+class NNTPTemporaryError(NNTPError):
+    """4xx errors"""
+    pass
+
+class NNTPPermanentError(NNTPError):
+    """5xx errors"""
+    pass
+
+class NNTPProtocolError(NNTPError):
+    """Response does not begin with [1-5]"""
+    pass
+
+class NNTPDataError(NNTPError):
+    """Error in response data"""
+    pass
+
+# for backwards compatibility
+error_reply = NNTPReplyError
+error_temp = NNTPTemporaryError
+error_perm = NNTPPermanentError
+error_proto = NNTPProtocolError
+error_data = NNTPDataError
+
+
+
+# Standard port used by NNTP servers
+NNTP_PORT = 119
+
+
+# Response numbers that are followed by additional text (e.g. article)
+LONGRESP = ['100', '215', '220', '221', '222', '224', '230', '231', '282']
+
+
+# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
+CRLF = '\r\n'
+
+
+
+# The class itself
+class NNTP:
+    def __init__(self, host, port=NNTP_PORT, user=None, password=None,
+                 readermode=None, usenetrc=True):
+        """Initialize an instance.  Arguments:
+        - host: hostname to connect to
+        - port: port to connect to (default the standard NNTP port)
+        - user: username to authenticate with
+        - password: password to use with username
+        - readermode: if true, send 'mode reader' command after
+                      connecting.
+
+        readermode is sometimes necessary if you are connecting to an
+        NNTP server on the local machine and intend to call
+        reader-specific comamnds, such as `group'.  If you get
+        unexpected NNTPPermanentErrors, you might need to set
+        readermode.
+        """
+        self.host = host
+        self.port = port
+        self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.sock.connect((self.host, self.port))
+        self.file = self.sock.makefile('rb')
+        self.debugging = 0
+        self.welcome = self.getresp()
+
+        # 'mode reader' is sometimes necessary to enable 'reader' mode.
+        # However, the order in which 'mode reader' and 'authinfo' need to
+        # arrive differs between some NNTP servers. Try to send
+        # 'mode reader', and if it fails with an authorization failed
+        # error, try again after sending authinfo.
+        readermode_afterauth = 0
+        if readermode:
+            try:
+                self.welcome = self.shortcmd('mode reader')
+            except NNTPPermanentError:
+                # error 500, probably 'not implemented'
+                pass
+            except NNTPTemporaryError, e:
+                if user and e.response[:3] == '480':
+                    # Need authorization before 'mode reader'
+                    readermode_afterauth = 1
+                else:
+                    raise
+        # If no login/password was specified, try to get them from ~/.netrc
+        # Presume that if .netc has an entry, NNRP authentication is required.
+        try:
+            if usenetrc and not user:
+                import netrc
+                credentials = netrc.netrc()
+                auth = credentials.authenticators(host)
+                if auth:
+                    user = auth[0]
+                    password = auth[2]
+        except IOError:
+            pass
+        # Perform NNRP authentication if needed.
+        if user:
+            resp = self.shortcmd('authinfo user '+user)
+            if resp[:3] == '381':
+                if not password:
+                    raise NNTPReplyError(resp)
+                else:
+                    resp = self.shortcmd(
+                            'authinfo pass '+password)
+                    if resp[:3] != '281':
+                        raise NNTPPermanentError(resp)
+            if readermode_afterauth:
+                try:
+                    self.welcome = self.shortcmd('mode reader')
+                except NNTPPermanentError:
+                    # error 500, probably 'not implemented'
+                    pass
+
+
+    # Get the welcome message from the server
+    # (this is read and squirreled away by __init__()).
+    # If the response code is 200, posting is allowed;
+    # if it 201, posting is not allowed
+
+    def getwelcome(self):
+        """Get the welcome message from the server
+        (this is read and squirreled away by __init__()).
+        If the response code is 200, posting is allowed;
+        if it 201, posting is not allowed."""
+
+        if self.debugging: print '*welcome*', repr(self.welcome)
+        return self.welcome
+
+    def set_debuglevel(self, level):
+        """Set the debugging level.  Argument 'level' means:
+        0: no debugging output (default)
+        1: print commands and responses but not body text etc.
+        2: also print raw lines read and sent before stripping CR/LF"""
+
+        self.debugging = level
+    debug = set_debuglevel
+
+    def putline(self, line):
+        """Internal: send one line to the server, appending CRLF."""
+        line = line + CRLF
+        if self.debugging > 1: print '*put*', repr(line)
+        self.sock.sendall(line)
+
+    def putcmd(self, line):
+        """Internal: send one command to the server (through putline())."""
+        if self.debugging: print '*cmd*', repr(line)
+        self.putline(line)
+
+    def getline(self):
+        """Internal: return one line from the server, stripping CRLF.
+        Raise EOFError if the connection is closed."""
+        line = self.file.readline()
+        if self.debugging > 1:
+            print '*get*', repr(line)
+        if not line: raise EOFError
+        if line[-2:] == CRLF: line = line[:-2]
+        elif line[-1:] in CRLF: line = line[:-1]
+        return line
+
+    def getresp(self):
+        """Internal: get a response from the server.
+        Raise various errors if the response indicates an error."""
+        resp = self.getline()
+        if self.debugging: print '*resp*', repr(resp)
+        c = resp[:1]
+        if c == '4':
+            raise NNTPTemporaryError(resp)
+        if c == '5':
+            raise NNTPPermanentError(resp)
+        if c not in '123':
+            raise NNTPProtocolError(resp)
+        return resp
+
+    def getlongresp(self, file=None):
+        """Internal: get a response plus following text from the server.
+        Raise various errors if the response indicates an error."""
+
+        openedFile = None
+        try:
+            # If a string was passed then open a file with that name
+            if isinstance(file, str):
+                openedFile = file = open(file, "w")
+
+            resp = self.getresp()
+            if resp[:3] not in LONGRESP:
+                raise NNTPReplyError(resp)
+            list = []
+            while 1:
+                line = self.getline()
+                if line == '.':
+                    break
+                if line[:2] == '..':
+                    line = line[1:]
+                if file:
+                    file.write(line + "\n")
+                else:
+                    list.append(line)
+        finally:
+            # If this method created the file, then it must close it
+            if openedFile:
+                openedFile.close()
+
+        return resp, list
+
+    def shortcmd(self, line):
+        """Internal: send a command and get the response."""
+        self.putcmd(line)
+        return self.getresp()
+
+    def longcmd(self, line, file=None):
+        """Internal: send a command and get the response plus following text."""
+        self.putcmd(line)
+        return self.getlongresp(file)
+
+    def newgroups(self, date, time, file=None):
+        """Process a NEWGROUPS command.  Arguments:
+        - date: string 'yymmdd' indicating the date
+        - time: string 'hhmmss' indicating the time
+        Return:
+        - resp: server response if successful
+        - list: list of newsgroup names"""
+
+        return self.longcmd('NEWGROUPS ' + date + ' ' + time, file)
+
+    def newnews(self, group, date, time, file=None):
+        """Process a NEWNEWS command.  Arguments:
+        - group: group name or '*'
+        - date: string 'yymmdd' indicating the date
+        - time: string 'hhmmss' indicating the time
+        Return:
+        - resp: server response if successful
+        - list: list of article ids"""
+
+        cmd = 'NEWNEWS ' + group + ' ' + date + ' ' + time
+        return self.longcmd(cmd, file)
+
+    def list(self, file=None):
+        """Process a LIST command.  Return:
+        - resp: server response if successful
+        - list: list of (group, last, first, flag) (strings)"""
+
+        resp, list = self.longcmd('LIST', file)
+        for i in range(len(list)):
+            # Parse lines into "group last first flag"
+            list[i] = tuple(list[i].split())
+        return resp, list
+
+    def description(self, group):
+
+        """Get a description for a single group.  If more than one
+        group matches ('group' is a pattern), return the first.  If no
+        group matches, return an empty string.
+
+        This elides the response code from the server, since it can
+        only be '215' or '285' (for xgtitle) anyway.  If the response
+        code is needed, use the 'descriptions' method.
+
+        NOTE: This neither checks for a wildcard in 'group' nor does
+        it check whether the group actually exists."""
+
+        resp, lines = self.descriptions(group)
+        if len(lines) == 0:
+            return ""
+        else:
+            return lines[0][1]
+
+    def descriptions(self, group_pattern):
+        """Get descriptions for a range of groups."""
+        line_pat = re.compile("^(?P<group>[^ \t]+)[ \t]+(.*)$")
+        # Try the more std (acc. to RFC2980) LIST NEWSGROUPS first
+        resp, raw_lines = self.longcmd('LIST NEWSGROUPS ' + group_pattern)
+        if resp[:3] != "215":
+            # Now the deprecated XGTITLE.  This either raises an error
+            # or succeeds with the same output structure as LIST
+            # NEWSGROUPS.
+            resp, raw_lines = self.longcmd('XGTITLE ' + group_pattern)
+        lines = []
+        for raw_line in raw_lines:
+            match = line_pat.search(raw_line.strip())
+            if match:
+                lines.append(match.group(1, 2))
+        return resp, lines
+
+    def group(self, name):
+        """Process a GROUP command.  Argument:
+        - group: the group name
+        Returns:
+        - resp: server response if successful
+        - count: number of articles (string)
+        - first: first article number (string)
+        - last: last article number (string)
+        - name: the group name"""
+
+        resp = self.shortcmd('GROUP ' + name)
+        if resp[:3] != '211':
+            raise NNTPReplyError(resp)
+        words = resp.split()
+        count = first = last = 0
+        n = len(words)
+        if n > 1:
+            count = words[1]
+            if n > 2:
+                first = words[2]
+                if n > 3:
+                    last = words[3]
+                    if n > 4:
+                        name = words[4].lower()
+        return resp, count, first, last, name
+
+    def help(self, file=None):
+        """Process a HELP command.  Returns:
+        - resp: server response if successful
+        - list: list of strings"""
+
+        return self.longcmd('HELP',file)
+
+    def statparse(self, resp):
+        """Internal: parse the response of a STAT, NEXT or LAST command."""
+        if resp[:2] != '22':
+            raise NNTPReplyError(resp)
+        words = resp.split()
+        nr = 0
+        id = ''
+        n = len(words)
+        if n > 1:
+            nr = words[1]
+            if n > 2:
+                id = words[2]
+        return resp, nr, id
+
+    def statcmd(self, line):
+        """Internal: process a STAT, NEXT or LAST command."""
+        resp = self.shortcmd(line)
+        return self.statparse(resp)
+
+    def stat(self, id):
+        """Process a STAT command.  Argument:
+        - id: article number or message id
+        Returns:
+        - resp: server response if successful
+        - nr:   the article number
+        - id:   the article id"""
+
+        return self.statcmd('STAT ' + id)
+
+    def next(self):
+        """Process a NEXT command.  No arguments.  Return as for STAT."""
+        return self.statcmd('NEXT')
+
+    def last(self):
+        """Process a LAST command.  No arguments.  Return as for STAT."""
+        return self.statcmd('LAST')
+
+    def artcmd(self, line, file=None):
+        """Internal: process a HEAD, BODY or ARTICLE command."""
+        resp, list = self.longcmd(line, file)
+        resp, nr, id = self.statparse(resp)
+        return resp, nr, id, list
+
+    def head(self, id):
+        """Process a HEAD command.  Argument:
+        - id: article number or message id
+        Returns:
+        - resp: server response if successful
+        - nr: article number
+        - id: message id
+        - list: the lines of the article's header"""
+
+        return self.artcmd('HEAD ' + id)
+
+    def body(self, id, file=None):
+        """Process a BODY command.  Argument:
+        - id: article number or message id
+        - file: Filename string or file object to store the article in
+        Returns:
+        - resp: server response if successful
+        - nr: article number
+        - id: message id
+        - list: the lines of the article's body or an empty list
+                if file was used"""
+
+        return self.artcmd('BODY ' + id, file)
+
+    def article(self, id):
+        """Process an ARTICLE command.  Argument:
+        - id: article number or message id
+        Returns:
+        - resp: server response if successful
+        - nr: article number
+        - id: message id
+        - list: the lines of the article"""
+
+        return self.artcmd('ARTICLE ' + id)
+
+    def slave(self):
+        """Process a SLAVE command.  Returns:
+        - resp: server response if successful"""
+
+        return self.shortcmd('SLAVE')
+
+    def xhdr(self, hdr, str, file=None):
+        """Process an XHDR command (optional server extension).  Arguments:
+        - hdr: the header type (e.g. 'subject')
+        - str: an article nr, a message id, or a range nr1-nr2
+        Returns:
+        - resp: server response if successful
+        - list: list of (nr, value) strings"""
+
+        pat = re.compile('^([0-9]+) ?(.*)\n?')
+        resp, lines = self.longcmd('XHDR ' + hdr + ' ' + str, file)
+        for i in range(len(lines)):
+            line = lines[i]
+            m = pat.match(line)
+            if m:
+                lines[i] = m.group(1, 2)
+        return resp, lines
+
+    def xover(self, start, end, file=None):
+        """Process an XOVER command (optional server extension) Arguments:
+        - start: start of range
+        - end: end of range
+        Returns:
+        - resp: server response if successful
+        - list: list of (art-nr, subject, poster, date,
+                         id, references, size, lines)"""
+
+        resp, lines = self.longcmd('XOVER ' + start + '-' + end, file)
+        xover_lines = []
+        for line in lines:
+            elem = line.split("\t")
+            try:
+                xover_lines.append((elem[0],
+                                    elem[1],
+                                    elem[2],
+                                    elem[3],
+                                    elem[4],
+                                    elem[5].split(),
+                                    elem[6],
+                                    elem[7]))
+            except IndexError:
+                raise NNTPDataError(line)
+        return resp,xover_lines
+
+    def xgtitle(self, group, file=None):
+        """Process an XGTITLE command (optional server extension) Arguments:
+        - group: group name wildcard (i.e. news.*)
+        Returns:
+        - resp: server response if successful
+        - list: list of (name,title) strings"""
+
+        line_pat = re.compile("^([^ \t]+)[ \t]+(.*)$")
+        resp, raw_lines = self.longcmd('XGTITLE ' + group, file)
+        lines = []
+        for raw_line in raw_lines:
+            match = line_pat.search(raw_line.strip())
+            if match:
+                lines.append(match.group(1, 2))
+        return resp, lines
+
+    def xpath(self,id):
+        """Process an XPATH command (optional server extension) Arguments:
+        - id: Message id of article
+        Returns:
+        resp: server response if successful
+        path: directory path to article"""
+
+        resp = self.shortcmd("XPATH " + id)
+        if resp[:3] != '223':
+            raise NNTPReplyError(resp)
+        try:
+            [resp_num, path] = resp.split()
+        except ValueError:
+            raise NNTPReplyError(resp)
+        else:
+            return resp, path
+
+    def date (self):
+        """Process the DATE command. Arguments:
+        None
+        Returns:
+        resp: server response if successful
+        date: Date suitable for newnews/newgroups commands etc.
+        time: Time suitable for newnews/newgroups commands etc."""
+
+        resp = self.shortcmd("DATE")
+        if resp[:3] != '111':
+            raise NNTPReplyError(resp)
+        elem = resp.split()
+        if len(elem) != 2:
+            raise NNTPDataError(resp)
+        date = elem[1][2:8]
+        time = elem[1][-6:]
+        if len(date) != 6 or len(time) != 6:
+            raise NNTPDataError(resp)
+        return resp, date, time
+
+
+    def post(self, f):
+        """Process a POST command.  Arguments:
+        - f: file containing the article
+        Returns:
+        - resp: server response if successful"""
+
+        resp = self.shortcmd('POST')
+        # Raises error_??? if posting is not allowed
+        if resp[0] != '3':
+            raise NNTPReplyError(resp)
+        while 1:
+            line = f.readline()
+            if not line:
+                break
+            if line[-1] == '\n':
+                line = line[:-1]
+            if line[:1] == '.':
+                line = '.' + line
+            self.putline(line)
+        self.putline('.')
+        return self.getresp()
+
+    def ihave(self, id, f):
+        """Process an IHAVE command.  Arguments:
+        - id: message-id of the article
+        - f:  file containing the article
+        Returns:
+        - resp: server response if successful
+        Note that if the server refuses the article an exception is raised."""
+
+        resp = self.shortcmd('IHAVE ' + id)
+        # Raises error_??? if the server already has it
+        if resp[0] != '3':
+            raise NNTPReplyError(resp)
+        while 1:
+            line = f.readline()
+            if not line:
+                break
+            if line[-1] == '\n':
+                line = line[:-1]
+            if line[:1] == '.':
+                line = '.' + line
+            self.putline(line)
+        self.putline('.')
+        return self.getresp()
+
+    def quit(self):
+        """Process a QUIT command and close the socket.  Returns:
+        - resp: server response if successful"""
+
+        resp = self.shortcmd('QUIT')
+        self.file.close()
+        self.sock.close()
+        del self.file, self.sock
+        return resp
+
+
+# Test retrieval when run as a script.
+# Assumption: if there's a local news server, it's called 'news'.
+# Assumption: if user queries a remote news server, it's named
+# in the environment variable NNTPSERVER (used by slrn and kin)
+# and we want readermode off.
+if __name__ == '__main__':
+    import os
+    newshost = 'news' and os.environ["NNTPSERVER"]
+    if newshost.find('.') == -1:
+        mode = 'readermode'
+    else:
+        mode = None
+    s = NNTP(newshost, readermode=mode)
+    resp, count, first, last, name = s.group('comp.lang.python')
+    print resp
+    print 'Group', name, 'has', count, 'articles, range', first, 'to', last
+    resp, subs = s.xhdr('subject', first + '-' + last)
+    print resp
+    for item in subs:
+        print "%7s %s" % item
+    resp = s.quit()
+    print resp
diff --git a/depot_tools/release/win/python_24/Lib/ntpath.py b/depot_tools/release/win/python_24/Lib/ntpath.py
new file mode 100644
index 0000000..649e424
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/ntpath.py
@@ -0,0 +1,511 @@
+# Module 'ntpath' -- common operations on WinNT/Win95 pathnames
+"""Common pathname manipulations, WindowsNT/95 version.
+
+Instead of importing this module directly, import os and refer to this
+module as os.path.
+"""
+
+import os
+import stat
+import sys
+
+__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
+           "basename","dirname","commonprefix","getsize","getmtime",
+           "getatime","getctime", "islink","exists","isdir","isfile","ismount",
+           "walk","expanduser","expandvars","normpath","abspath","splitunc",
+           "curdir","pardir","sep","pathsep","defpath","altsep","extsep",
+           "devnull","realpath","supports_unicode_filenames"]
+
+# strings representing various path-related bits and pieces
+curdir = '.'
+pardir = '..'
+extsep = '.'
+sep = '\\'
+pathsep = ';'
+altsep = '/'
+defpath = '.;C:\\bin'
+if 'ce' in sys.builtin_module_names:
+    defpath = '\\Windows'
+elif 'os2' in sys.builtin_module_names:
+    # OS/2 w/ VACPP
+    altsep = '/'
+devnull = 'nul'
+
+# Normalize the case of a pathname and map slashes to backslashes.
+# Other normalizations (such as optimizing '../' away) are not done
+# (this is done by normpath).
+
+def normcase(s):
+    """Normalize case of pathname.
+
+    Makes all characters lowercase and all slashes into backslashes."""
+    return s.replace("/", "\\").lower()
+
+
+# Return whether a path is absolute.
+# Trivial in Posix, harder on the Mac or MS-DOS.
+# For DOS it is absolute if it starts with a slash or backslash (current
+# volume), or if a pathname after the volume letter and colon / UNC resource
+# starts with a slash or backslash.
+
+def isabs(s):
+    """Test whether a path is absolute"""
+    s = splitdrive(s)[1]
+    return s != '' and s[:1] in '/\\'
+
+
+# Join two (or more) paths.
+
+def join(a, *p):
+    """Join two or more pathname components, inserting "\\" as needed"""
+    path = a
+    for b in p:
+        b_wins = 0  # set to 1 iff b makes path irrelevant
+        if path == "":
+            b_wins = 1
+
+        elif isabs(b):
+            # This probably wipes out path so far.  However, it's more
+            # complicated if path begins with a drive letter:
+            #     1. join('c:', '/a') == 'c:/a'
+            #     2. join('c:/', '/a') == 'c:/a'
+            # But
+            #     3. join('c:/a', '/b') == '/b'
+            #     4. join('c:', 'd:/') = 'd:/'
+            #     5. join('c:/', 'd:/') = 'd:/'
+            if path[1:2] != ":" or b[1:2] == ":":
+                # Path doesn't start with a drive letter, or cases 4 and 5.
+                b_wins = 1
+
+            # Else path has a drive letter, and b doesn't but is absolute.
+            elif len(path) > 3 or (len(path) == 3 and
+                                   path[-1] not in "/\\"):
+                # case 3
+                b_wins = 1
+
+        if b_wins:
+            path = b
+        else:
+            # Join, and ensure there's a separator.
+            assert len(path) > 0
+            if path[-1] in "/\\":
+                if b and b[0] in "/\\":
+                    path += b[1:]
+                else:
+                    path += b
+            elif path[-1] == ":":
+                path += b
+            elif b:
+                if b[0] in "/\\":
+                    path += b
+                else:
+                    path += "\\" + b
+            else:
+                # path is not empty and does not end with a backslash,
+                # but b is empty; since, e.g., split('a/') produces
+                # ('a', ''), it's best if join() adds a backslash in
+                # this case.
+                path += '\\'
+
+    return path
+
+
+# Split a path in a drive specification (a drive letter followed by a
+# colon) and the path specification.
+# It is always true that drivespec + pathspec == p
+def splitdrive(p):
+    """Split a pathname into drive and path specifiers. Returns a 2-tuple
+"(drive,path)";  either part may be empty"""
+    if p[1:2] == ':':
+        return p[0:2], p[2:]
+    return '', p
+
+
+# Parse UNC paths
+def splitunc(p):
+    """Split a pathname into UNC mount point and relative path specifiers.
+
+    Return a 2-tuple (unc, rest); either part may be empty.
+    If unc is not empty, it has the form '//host/mount' (or similar
+    using backslashes).  unc+rest is always the input path.
+    Paths containing drive letters never have an UNC part.
+    """
+    if p[1:2] == ':':
+        return '', p # Drive letter present
+    firstTwo = p[0:2]
+    if firstTwo == '//' or firstTwo == '\\\\':
+        # is a UNC path:
+        # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter
+        # \\machine\mountpoint\directories...
+        #           directory ^^^^^^^^^^^^^^^
+        normp = normcase(p)
+        index = normp.find('\\', 2)
+        if index == -1:
+            ##raise RuntimeError, 'illegal UNC path: "' + p + '"'
+            return ("", p)
+        index = normp.find('\\', index + 1)
+        if index == -1:
+            index = len(p)
+        return p[:index], p[index:]
+    return '', p
+
+
+# Split a path in head (everything up to the last '/') and tail (the
+# rest).  After the trailing '/' is stripped, the invariant
+# join(head, tail) == p holds.
+# The resulting head won't end in '/' unless it is the root.
+
+def split(p):
+    """Split a pathname.
+
+    Return tuple (head, tail) where tail is everything after the final slash.
+    Either part may be empty."""
+
+    d, p = splitdrive(p)
+    # set i to index beyond p's last slash
+    i = len(p)
+    while i and p[i-1] not in '/\\':
+        i = i - 1
+    head, tail = p[:i], p[i:]  # now tail has no slashes
+    # remove trailing slashes from head, unless it's all slashes
+    head2 = head
+    while head2 and head2[-1] in '/\\':
+        head2 = head2[:-1]
+    head = head2 or head
+    return d + head, tail
+
+
+# Split a path in root and extension.
+# The extension is everything starting at the last dot in the last
+# pathname component; the root is everything before that.
+# It is always true that root + ext == p.
+
+def splitext(p):
+    """Split the extension from a pathname.
+
+    Extension is everything from the last dot to the end.
+    Return (root, ext), either part may be empty."""
+
+    i = p.rfind('.')
+    if i<=max(p.rfind('/'), p.rfind('\\')):
+        return p, ''
+    else:
+        return p[:i], p[i:]
+
+
+# Return the tail (basename) part of a path.
+
+def basename(p):
+    """Returns the final component of a pathname"""
+    return split(p)[1]
+
+
+# Return the head (dirname) part of a path.
+
+def dirname(p):
+    """Returns the directory component of a pathname"""
+    return split(p)[0]
+
+
+# Return the longest prefix of all list elements.
+
+def commonprefix(m):
+    "Given a list of pathnames, returns the longest common leading component"
+    if not m: return ''
+    prefix = m[0]
+    for item in m:
+        for i in range(len(prefix)):
+            if prefix[:i+1] != item[:i+1]:
+                prefix = prefix[:i]
+                if i == 0: return ''
+                break
+    return prefix
+
+
+# Get size, mtime, atime of files.
+
+def getsize(filename):
+    """Return the size of a file, reported by os.stat()"""
+    return os.stat(filename).st_size
+
+def getmtime(filename):
+    """Return the last modification time of a file, reported by os.stat()"""
+    return os.stat(filename).st_mtime
+
+def getatime(filename):
+    """Return the last access time of a file, reported by os.stat()"""
+    return os.stat(filename).st_atime
+
+def getctime(filename):
+    """Return the creation time of a file, reported by os.stat()."""
+    return os.stat(filename).st_ctime
+
+# Is a path a symbolic link?
+# This will always return false on systems where posix.lstat doesn't exist.
+
+def islink(path):
+    """Test for symbolic link.  On WindowsNT/95 always returns false"""
+    return False
+
+
+# Does a path exist?
+
+def exists(path):
+    """Test whether a path exists"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return True
+
+lexists = exists
+
+
+# Is a path a dos directory?
+# This follows symbolic links, so both islink() and isdir() can be true
+# for the same path.
+
+def isdir(path):
+    """Test whether a path is a directory"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return stat.S_ISDIR(st.st_mode)
+
+
+# Is a path a regular file?
+# This follows symbolic links, so both islink() and isdir() can be true
+# for the same path.
+
+def isfile(path):
+    """Test whether a path is a regular file"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return stat.S_ISREG(st.st_mode)
+
+
+# Is a path a mount point?  Either a root (with or without drive letter)
+# or an UNC path with at most a / or \ after the mount point.
+
+def ismount(path):
+    """Test whether a path is a mount point (defined as root of drive)"""
+    unc, rest = splitunc(path)
+    if unc:
+        return rest in ("", "/", "\\")
+    p = splitdrive(path)[1]
+    return len(p) == 1 and p[0] in '/\\'
+
+
+# Directory tree walk.
+# For each directory under top (including top itself, but excluding
+# '.' and '..'), func(arg, dirname, filenames) is called, where
+# dirname is the name of the directory and filenames is the list
+# of files (and subdirectories etc.) in the directory.
+# The func may modify the filenames list, to implement a filter,
+# or to impose a different order of visiting.
+
+def walk(top, func, arg):
+    """Directory tree walk with callback function.
+
+    For each directory in the directory tree rooted at top (including top
+    itself, but excluding '.' and '..'), call func(arg, dirname, fnames).
+    dirname is the name of the directory, and fnames a list of the names of
+    the files and subdirectories in dirname (excluding '.' and '..').  func
+    may modify the fnames list in-place (e.g. via del or slice assignment),
+    and walk will only recurse into the subdirectories whose names remain in
+    fnames; this can be used to implement a filter, or to impose a specific
+    order of visiting.  No semantics are defined for, or required of, arg,
+    beyond that arg is always passed to func.  It can be used, e.g., to pass
+    a filename pattern, or a mutable object designed to accumulate
+    statistics.  Passing None for arg is common."""
+
+    try:
+        names = os.listdir(top)
+    except os.error:
+        return
+    func(arg, top, names)
+    exceptions = ('.', '..')
+    for name in names:
+        if name not in exceptions:
+            name = join(top, name)
+            if isdir(name):
+                walk(name, func, arg)
+
+
+# Expand paths beginning with '~' or '~user'.
+# '~' means $HOME; '~user' means that user's home directory.
+# If the path doesn't begin with '~', or if the user or $HOME is unknown,
+# the path is returned unchanged (leaving error reporting to whatever
+# function is called with the expanded path as argument).
+# See also module 'glob' for expansion of *, ? and [...] in pathnames.
+# (A function should also be defined to do full *sh-style environment
+# variable expansion.)
+
+def expanduser(path):
+    """Expand ~ and ~user constructs.
+
+    If user or $HOME is unknown, do nothing."""
+    if path[:1] != '~':
+        return path
+    i, n = 1, len(path)
+    while i < n and path[i] not in '/\\':
+        i = i + 1
+    if i == 1:
+        if 'HOME' in os.environ:
+            userhome = os.environ['HOME']
+        elif not 'HOMEPATH' in os.environ:
+            return path
+        else:
+            try:
+                drive = os.environ['HOMEDRIVE']
+            except KeyError:
+                drive = ''
+            userhome = join(drive, os.environ['HOMEPATH'])
+    else:
+        return path
+    return userhome + path[i:]
+
+
+# Expand paths containing shell variable substitutions.
+# The following rules apply:
+#       - no expansion within single quotes
+#       - no escape character, except for '$$' which is translated into '$'
+#       - ${varname} is accepted.
+#       - varnames can be made out of letters, digits and the character '_'
+# XXX With COMMAND.COM you can use any characters in a variable name,
+# XXX except '^|<>='.
+
+def expandvars(path):
+    """Expand shell variables of form $var and ${var}.
+
+    Unknown variables are left unchanged."""
+    if '$' not in path:
+        return path
+    import string
+    varchars = string.ascii_letters + string.digits + '_-'
+    res = ''
+    index = 0
+    pathlen = len(path)
+    while index < pathlen:
+        c = path[index]
+        if c == '\'':   # no expansion within single quotes
+            path = path[index + 1:]
+            pathlen = len(path)
+            try:
+                index = path.index('\'')
+                res = res + '\'' + path[:index + 1]
+            except ValueError:
+                res = res + path
+                index = pathlen - 1
+        elif c == '$':  # variable or '$$'
+            if path[index + 1:index + 2] == '$':
+                res = res + c
+                index = index + 1
+            elif path[index + 1:index + 2] == '{':
+                path = path[index+2:]
+                pathlen = len(path)
+                try:
+                    index = path.index('}')
+                    var = path[:index]
+                    if var in os.environ:
+                        res = res + os.environ[var]
+                except ValueError:
+                    res = res + path
+                    index = pathlen - 1
+            else:
+                var = ''
+                index = index + 1
+                c = path[index:index + 1]
+                while c != '' and c in varchars:
+                    var = var + c
+                    index = index + 1
+                    c = path[index:index + 1]
+                if var in os.environ:
+                    res = res + os.environ[var]
+                if c != '':
+                    res = res + c
+        else:
+            res = res + c
+        index = index + 1
+    return res
+
+
+# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A\B.
+# Previously, this function also truncated pathnames to 8+3 format,
+# but as this module is called "ntpath", that's obviously wrong!
+
+def normpath(path):
+    """Normalize path, eliminating double slashes, etc."""
+    path = path.replace("/", "\\")
+    prefix, path = splitdrive(path)
+    # We need to be careful here. If the prefix is empty, and the path starts
+    # with a backslash, it could either be an absolute path on the current
+    # drive (\dir1\dir2\file) or a UNC filename (\\server\mount\dir1\file). It
+    # is therefore imperative NOT to collapse multiple backslashes blindly in
+    # that case.
+    # The code below preserves multiple backslashes when there is no drive
+    # letter. This means that the invalid filename \\\a\b is preserved
+    # unchanged, where a\\\b is normalised to a\b. It's not clear that there
+    # is any better behaviour for such edge cases.
+    if prefix == '':
+        # No drive letter - preserve initial backslashes
+        while path[:1] == "\\":
+            prefix = prefix + "\\"
+            path = path[1:]
+    else:
+        # We have a drive letter - collapse initial backslashes
+        if path.startswith("\\"):
+            prefix = prefix + "\\"
+            path = path.lstrip("\\")
+    comps = path.split("\\")
+    i = 0
+    while i < len(comps):
+        if comps[i] in ('.', ''):
+            del comps[i]
+        elif comps[i] == '..':
+            if i > 0 and comps[i-1] != '..':
+                del comps[i-1:i+1]
+                i -= 1
+            elif i == 0 and prefix.endswith("\\"):
+                del comps[i]
+            else:
+                i += 1
+        else:
+            i += 1
+    # If the path is now empty, substitute '.'
+    if not prefix and not comps:
+        comps.append('.')
+    return prefix + "\\".join(comps)
+
+
+# Return an absolute path.
+def abspath(path):
+    """Return the absolute version of a path"""
+    try:
+        from nt import _getfullpathname
+    except ImportError: # Not running on Windows - mock up something sensible.
+        global abspath
+        def _abspath(path):
+            if not isabs(path):
+                path = join(os.getcwd(), path)
+            return normpath(path)
+        abspath = _abspath
+        return _abspath(path)
+
+    if path: # Empty path must return current working directory.
+        try:
+            path = _getfullpathname(path)
+        except WindowsError:
+            pass # Bad path - return unchanged.
+    else:
+        path = os.getcwd()
+    return normpath(path)
+
+# realpath is a no-op on systems without islink support
+realpath = abspath
+# Win9x family and earlier have no Unicode filename support.
+supports_unicode_filenames = (hasattr(sys, "getwindowsversion") and
+                              sys.getwindowsversion()[3] >= 2)
diff --git a/depot_tools/release/win/python_24/Lib/nturl2path.py b/depot_tools/release/win/python_24/Lib/nturl2path.py
new file mode 100644
index 0000000..4fb2148
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/nturl2path.py
@@ -0,0 +1,66 @@
+"""Convert a NT pathname to a file URL and vice versa."""
+
+def url2pathname(url):
+    r"""Convert a URL to a DOS path.
+
+            ///C|/foo/bar/spam.foo
+
+                    becomes
+
+            C:\foo\bar\spam.foo
+    """
+    import string, urllib
+    if not '|' in url:
+        # No drive specifier, just convert slashes
+        if url[:4] == '////':
+            # path is something like ////host/path/on/remote/host
+            # convert this to \\host\path\on\remote\host
+            # (notice halving of slashes at the start of the path)
+            url = url[2:]
+        components = url.split('/')
+        # make sure not to convert quoted slashes :-)
+        return urllib.unquote('\\'.join(components))
+    comp = url.split('|')
+    if len(comp) != 2 or comp[0][-1] not in string.ascii_letters:
+        error = 'Bad URL: ' + url
+        raise IOError, error
+    drive = comp[0][-1].upper()
+    components = comp[1].split('/')
+    path = drive + ':'
+    for  comp in components:
+        if comp:
+            path = path + '\\' + urllib.unquote(comp)
+    return path
+
+def pathname2url(p):
+    r"""Convert a DOS path name to a file url.
+
+            C:\foo\bar\spam.foo
+
+                    becomes
+
+            ///C|/foo/bar/spam.foo
+    """
+
+    import urllib
+    if not ':' in p:
+        # No drive specifier, just convert slashes and quote the name
+        if p[:2] == '\\\\':
+        # path is something like \\host\path\on\remote\host
+        # convert this to ////host/path/on/remote/host
+        # (notice doubling of slashes at the start of the path)
+            p = '\\\\' + p
+        components = p.split('\\')
+        return urllib.quote('/'.join(components))
+    comp = p.split(':')
+    if len(comp) != 2 or len(comp[0]) > 1:
+        error = 'Bad path: ' + p
+        raise IOError, error
+
+    drive = urllib.quote(comp[0].upper())
+    components = comp[1].split('\\')
+    path = '///' + drive + '|'
+    for comp in components:
+        if comp:
+            path = path + '/' + urllib.quote(comp)
+    return path
diff --git a/depot_tools/release/win/python_24/Lib/opcode.py b/depot_tools/release/win/python_24/Lib/opcode.py
new file mode 100644
index 0000000..9517c43
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/opcode.py
@@ -0,0 +1,190 @@
+
+"""
+opcode module - potentially shared between dis and other modules which
+operate on bytecodes (e.g. peephole optimizers).
+"""
+
+__all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs",
+           "haslocal", "hascompare", "hasfree", "opname", "opmap",
+           "HAVE_ARGUMENT", "EXTENDED_ARG"]
+
+cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is',
+        'is not', 'exception match', 'BAD')
+
+hasconst = []
+hasname = []
+hasjrel = []
+hasjabs = []
+haslocal = []
+hascompare = []
+hasfree = []
+
+opmap = {}
+opname = [''] * 256
+for op in range(256): opname[op] = '<%r>' % (op,)
+del op
+
+def def_op(name, op):
+    opname[op] = name
+    opmap[name] = op
+
+def name_op(name, op):
+    def_op(name, op)
+    hasname.append(op)
+
+def jrel_op(name, op):
+    def_op(name, op)
+    hasjrel.append(op)
+
+def jabs_op(name, op):
+    def_op(name, op)
+    hasjabs.append(op)
+
+# Instruction opcodes for compiled code
+
+def_op('STOP_CODE', 0)
+def_op('POP_TOP', 1)
+def_op('ROT_TWO', 2)
+def_op('ROT_THREE', 3)
+def_op('DUP_TOP', 4)
+def_op('ROT_FOUR', 5)
+
+def_op('NOP', 9)
+def_op('UNARY_POSITIVE', 10)
+def_op('UNARY_NEGATIVE', 11)
+def_op('UNARY_NOT', 12)
+def_op('UNARY_CONVERT', 13)
+
+def_op('UNARY_INVERT', 15)
+
+def_op('LIST_APPEND', 18)
+def_op('BINARY_POWER', 19)
+
+def_op('BINARY_MULTIPLY', 20)
+def_op('BINARY_DIVIDE', 21)
+def_op('BINARY_MODULO', 22)
+def_op('BINARY_ADD', 23)
+def_op('BINARY_SUBTRACT', 24)
+def_op('BINARY_SUBSCR', 25)
+def_op('BINARY_FLOOR_DIVIDE', 26)
+def_op('BINARY_TRUE_DIVIDE', 27)
+def_op('INPLACE_FLOOR_DIVIDE', 28)
+def_op('INPLACE_TRUE_DIVIDE', 29)
+
+def_op('SLICE+0', 30)
+def_op('SLICE+1', 31)
+def_op('SLICE+2', 32)
+def_op('SLICE+3', 33)
+
+def_op('STORE_SLICE+0', 40)
+def_op('STORE_SLICE+1', 41)
+def_op('STORE_SLICE+2', 42)
+def_op('STORE_SLICE+3', 43)
+
+def_op('DELETE_SLICE+0', 50)
+def_op('DELETE_SLICE+1', 51)
+def_op('DELETE_SLICE+2', 52)
+def_op('DELETE_SLICE+3', 53)
+
+def_op('INPLACE_ADD', 55)
+def_op('INPLACE_SUBTRACT', 56)
+def_op('INPLACE_MULTIPLY', 57)
+def_op('INPLACE_DIVIDE', 58)
+def_op('INPLACE_MODULO', 59)
+def_op('STORE_SUBSCR', 60)
+def_op('DELETE_SUBSCR', 61)
+
+def_op('BINARY_LSHIFT', 62)
+def_op('BINARY_RSHIFT', 63)
+def_op('BINARY_AND', 64)
+def_op('BINARY_XOR', 65)
+def_op('BINARY_OR', 66)
+def_op('INPLACE_POWER', 67)
+def_op('GET_ITER', 68)
+
+def_op('PRINT_EXPR', 70)
+def_op('PRINT_ITEM', 71)
+def_op('PRINT_NEWLINE', 72)
+def_op('PRINT_ITEM_TO', 73)
+def_op('PRINT_NEWLINE_TO', 74)
+def_op('INPLACE_LSHIFT', 75)
+def_op('INPLACE_RSHIFT', 76)
+def_op('INPLACE_AND', 77)
+def_op('INPLACE_XOR', 78)
+def_op('INPLACE_OR', 79)
+def_op('BREAK_LOOP', 80)
+
+def_op('LOAD_LOCALS', 82)
+def_op('RETURN_VALUE', 83)
+def_op('IMPORT_STAR', 84)
+def_op('EXEC_STMT', 85)
+def_op('YIELD_VALUE', 86)
+
+def_op('POP_BLOCK', 87)
+def_op('END_FINALLY', 88)
+def_op('BUILD_CLASS', 89)
+
+HAVE_ARGUMENT = 90              # Opcodes from here have an argument:
+
+name_op('STORE_NAME', 90)       # Index in name list
+name_op('DELETE_NAME', 91)      # ""
+def_op('UNPACK_SEQUENCE', 92)   # Number of tuple items
+jrel_op('FOR_ITER', 93)
+
+name_op('STORE_ATTR', 95)       # Index in name list
+name_op('DELETE_ATTR', 96)      # ""
+name_op('STORE_GLOBAL', 97)     # ""
+name_op('DELETE_GLOBAL', 98)    # ""
+def_op('DUP_TOPX', 99)          # number of items to duplicate
+def_op('LOAD_CONST', 100)       # Index in const list
+hasconst.append(100)
+name_op('LOAD_NAME', 101)       # Index in name list
+def_op('BUILD_TUPLE', 102)      # Number of tuple items
+def_op('BUILD_LIST', 103)       # Number of list items
+def_op('BUILD_MAP', 104)        # Always zero for now
+name_op('LOAD_ATTR', 105)       # Index in name list
+def_op('COMPARE_OP', 106)       # Comparison operator
+hascompare.append(106)
+name_op('IMPORT_NAME', 107)     # Index in name list
+name_op('IMPORT_FROM', 108)     # Index in name list
+
+jrel_op('JUMP_FORWARD', 110)    # Number of bytes to skip
+jrel_op('JUMP_IF_FALSE', 111)   # ""
+jrel_op('JUMP_IF_TRUE', 112)    # ""
+jabs_op('JUMP_ABSOLUTE', 113)   # Target byte offset from beginning of code
+
+name_op('LOAD_GLOBAL', 116)     # Index in name list
+
+jabs_op('CONTINUE_LOOP', 119)   # Target address
+jrel_op('SETUP_LOOP', 120)      # Distance to target address
+jrel_op('SETUP_EXCEPT', 121)    # ""
+jrel_op('SETUP_FINALLY', 122)   # ""
+
+def_op('LOAD_FAST', 124)        # Local variable number
+haslocal.append(124)
+def_op('STORE_FAST', 125)       # Local variable number
+haslocal.append(125)
+def_op('DELETE_FAST', 126)      # Local variable number
+haslocal.append(126)
+
+def_op('RAISE_VARARGS', 130)    # Number of raise arguments (1, 2, or 3)
+def_op('CALL_FUNCTION', 131)    # #args + (#kwargs << 8)
+def_op('MAKE_FUNCTION', 132)    # Number of args with default values
+def_op('BUILD_SLICE', 133)      # Number of items
+
+def_op('MAKE_CLOSURE', 134)
+def_op('LOAD_CLOSURE', 135)
+hasfree.append(135)
+def_op('LOAD_DEREF', 136)
+hasfree.append(136)
+def_op('STORE_DEREF', 137)
+hasfree.append(137)
+
+def_op('CALL_FUNCTION_VAR', 140)     # #args + (#kwargs << 8)
+def_op('CALL_FUNCTION_KW', 141)      # #args + (#kwargs << 8)
+def_op('CALL_FUNCTION_VAR_KW', 142)  # #args + (#kwargs << 8)
+
+def_op('EXTENDED_ARG', 143)
+EXTENDED_ARG = 143
+
+del def_op, name_op, jrel_op, jabs_op
diff --git a/depot_tools/release/win/python_24/Lib/optparse.py b/depot_tools/release/win/python_24/Lib/optparse.py
new file mode 100644
index 0000000..4a3d3a8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/optparse.py
@@ -0,0 +1,1567 @@
+"""optparse - a powerful, extensible, and easy-to-use option parser.
+
+By Greg Ward <gward@python.net>
+
+Originally distributed as Optik; see http://optik.sourceforge.net/ .
+
+If you have problems with this module, please do not file bugs,
+patches, or feature requests with Python; instead, use Optik's
+SourceForge project page:
+  http://sourceforge.net/projects/optik
+
+For support, use the optik-users@lists.sourceforge.net mailing list
+(http://lists.sourceforge.net/lists/listinfo/optik-users).
+"""
+
+# Python developers: please do not make changes to this file, since
+# it is automatically generated from the Optik source code.
+
+__version__ = "1.5a2"
+
+__all__ = ['Option',
+           'SUPPRESS_HELP',
+           'SUPPRESS_USAGE',
+           'Values',
+           'OptionContainer',
+           'OptionGroup',
+           'OptionParser',
+           'HelpFormatter',
+           'IndentedHelpFormatter',
+           'TitledHelpFormatter',
+           'OptParseError',
+           'OptionError',
+           'OptionConflictError',
+           'OptionValueError',
+           'BadOptionError']
+
+__copyright__ = """
+Copyright (c) 2001-2004 Gregory P. Ward.  All rights reserved.
+Copyright (c) 2002-2004 Python Software Foundation.  All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+  * Redistributions of source code must retain the above copyright
+    notice, this list of conditions and the following disclaimer.
+
+  * Redistributions in binary form must reproduce the above copyright
+    notice, this list of conditions and the following disclaimer in the
+    documentation and/or other materials provided with the distribution.
+
+  * Neither the name of the author nor the names of its
+    contributors may be used to endorse or promote products derived from
+    this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
+IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""
+
+import sys, os
+import types
+import textwrap
+from gettext import gettext as _
+
+def _repr(self):
+    return "<%s at 0x%x: %s>" % (self.__class__.__name__, id(self), self)
+
+
+# This file was generated from:
+#   Id: option_parser.py 421 2004-10-26 00:45:16Z greg
+#   Id: option.py 422 2004-10-26 00:53:47Z greg
+#   Id: help.py 367 2004-07-24 23:21:21Z gward
+#   Id: errors.py 367 2004-07-24 23:21:21Z gward
+
+class OptParseError (Exception):
+    def __init__(self, msg):
+        self.msg = msg
+
+    def __str__(self):
+        return self.msg
+
+
+class OptionError (OptParseError):
+    """
+    Raised if an Option instance is created with invalid or
+    inconsistent arguments.
+    """
+
+    def __init__(self, msg, option):
+        self.msg = msg
+        self.option_id = str(option)
+
+    def __str__(self):
+        if self.option_id:
+            return "option %s: %s" % (self.option_id, self.msg)
+        else:
+            return self.msg
+
+class OptionConflictError (OptionError):
+    """
+    Raised if conflicting options are added to an OptionParser.
+    """
+
+class OptionValueError (OptParseError):
+    """
+    Raised if an invalid option value is encountered on the command
+    line.
+    """
+
+class BadOptionError (OptParseError):
+    """
+    Raised if an invalid or ambiguous option is seen on the command-line.
+    """
+
+
+class HelpFormatter:
+
+    """
+    Abstract base class for formatting option help.  OptionParser
+    instances should use one of the HelpFormatter subclasses for
+    formatting help; by default IndentedHelpFormatter is used.
+
+    Instance attributes:
+      parser : OptionParser
+        the controlling OptionParser instance
+      indent_increment : int
+        the number of columns to indent per nesting level
+      max_help_position : int
+        the maximum starting column for option help text
+      help_position : int
+        the calculated starting column for option help text;
+        initially the same as the maximum
+      width : int
+        total number of columns for output (pass None to constructor for
+        this value to be taken from the $COLUMNS environment variable)
+      level : int
+        current indentation level
+      current_indent : int
+        current indentation level (in columns)
+      help_width : int
+        number of columns available for option help text (calculated)
+      default_tag : str
+        text to replace with each option's default value, "%default"
+        by default.  Set to false value to disable default value expansion.
+      option_strings : { Option : str }
+        maps Option instances to the snippet of help text explaining
+        the syntax of that option, e.g. "-h, --help" or
+        "-fFILE, --file=FILE"
+      _short_opt_fmt : str
+        format string controlling how short options with values are
+        printed in help text.  Must be either "%s%s" ("-fFILE") or
+        "%s %s" ("-f FILE"), because those are the two syntaxes that
+        Optik supports.
+      _long_opt_fmt : str
+        similar but for long options; must be either "%s %s" ("--file FILE")
+        or "%s=%s" ("--file=FILE").
+    """
+
+    NO_DEFAULT_VALUE = "none"
+
+    def __init__(self,
+                 indent_increment,
+                 max_help_position,
+                 width,
+                 short_first):
+        self.parser = None
+        self.indent_increment = indent_increment
+        self.help_position = self.max_help_position = max_help_position
+        if width is None:
+            try:
+                width = int(os.environ['COLUMNS'])
+            except (KeyError, ValueError):
+                width = 80
+            width -= 2
+        self.width = width
+        self.current_indent = 0
+        self.level = 0
+        self.help_width = None          # computed later
+        self.short_first = short_first
+        self.default_tag = "%default"
+        self.option_strings = {}
+        self._short_opt_fmt = "%s %s"
+        self._long_opt_fmt = "%s=%s"
+
+    def set_parser(self, parser):
+        self.parser = parser
+
+    def set_short_opt_delimiter(self, delim):
+        if delim not in ("", " "):
+            raise ValueError(
+                "invalid metavar delimiter for short options: %r" % delim)
+        self._short_opt_fmt = "%s" + delim + "%s"
+
+    def set_long_opt_delimiter(self, delim):
+        if delim not in ("=", " "):
+            raise ValueError(
+                "invalid metavar delimiter for long options: %r" % delim)
+        self._long_opt_fmt = "%s" + delim + "%s"
+
+    def indent(self):
+        self.current_indent += self.indent_increment
+        self.level += 1
+
+    def dedent(self):
+        self.current_indent -= self.indent_increment
+        assert self.current_indent >= 0, "Indent decreased below 0."
+        self.level -= 1
+
+    def format_usage(self, usage):
+        raise NotImplementedError, "subclasses must implement"
+
+    def format_heading(self, heading):
+        raise NotImplementedError, "subclasses must implement"
+
+    def format_description(self, description):
+        if not description:
+            return ""
+        desc_width = self.width - self.current_indent
+        indent = " "*self.current_indent
+        return textwrap.fill(description,
+                             desc_width,
+                             initial_indent=indent,
+                             subsequent_indent=indent) + "\n"
+
+    def expand_default(self, option):
+        if self.parser is None or not self.default_tag:
+            return option.help
+
+        default_value = self.parser.defaults.get(option.dest)
+        if default_value is NO_DEFAULT or default_value is None:
+            default_value = self.NO_DEFAULT_VALUE
+
+        return option.help.replace(self.default_tag, str(default_value))
+
+    def format_option(self, option):
+        # The help for each option consists of two parts:
+        #   * the opt strings and metavars
+        #     eg. ("-x", or "-fFILENAME, --file=FILENAME")
+        #   * the user-supplied help string
+        #     eg. ("turn on expert mode", "read data from FILENAME")
+        #
+        # If possible, we write both of these on the same line:
+        #   -x      turn on expert mode
+        #
+        # But if the opt string list is too long, we put the help
+        # string on a second line, indented to the same column it would
+        # start in if it fit on the first line.
+        #   -fFILENAME, --file=FILENAME
+        #           read data from FILENAME
+        result = []
+        opts = self.option_strings[option]
+        opt_width = self.help_position - self.current_indent - 2
+        if len(opts) > opt_width:
+            opts = "%*s%s\n" % (self.current_indent, "", opts)
+            indent_first = self.help_position
+        else:                       # start help on same line as opts
+            opts = "%*s%-*s  " % (self.current_indent, "", opt_width, opts)
+            indent_first = 0
+        result.append(opts)
+        if option.help:
+            help_text = self.expand_default(option)
+            help_lines = textwrap.wrap(help_text, self.help_width)
+            result.append("%*s%s\n" % (indent_first, "", help_lines[0]))
+            result.extend(["%*s%s\n" % (self.help_position, "", line)
+                           for line in help_lines[1:]])
+        elif opts[-1] != "\n":
+            result.append("\n")
+        return "".join(result)
+
+    def store_option_strings(self, parser):
+        self.indent()
+        max_len = 0
+        for opt in parser.option_list:
+            strings = self.format_option_strings(opt)
+            self.option_strings[opt] = strings
+            max_len = max(max_len, len(strings) + self.current_indent)
+        self.indent()
+        for group in parser.option_groups:
+            for opt in group.option_list:
+                strings = self.format_option_strings(opt)
+                self.option_strings[opt] = strings
+                max_len = max(max_len, len(strings) + self.current_indent)
+        self.dedent()
+        self.dedent()
+        self.help_position = min(max_len + 2, self.max_help_position)
+        self.help_width = self.width - self.help_position
+
+    def format_option_strings(self, option):
+        """Return a comma-separated list of option strings & metavariables."""
+        if option.takes_value():
+            metavar = option.metavar or option.dest.upper()
+            short_opts = [self._short_opt_fmt % (sopt, metavar)
+                          for sopt in option._short_opts]
+            long_opts = [self._long_opt_fmt % (lopt, metavar)
+                         for lopt in option._long_opts]
+        else:
+            short_opts = option._short_opts
+            long_opts = option._long_opts
+
+        if self.short_first:
+            opts = short_opts + long_opts
+        else:
+            opts = long_opts + short_opts
+
+        return ", ".join(opts)
+
+class IndentedHelpFormatter (HelpFormatter):
+    """Format help with indented section bodies.
+    """
+
+    def __init__(self,
+                 indent_increment=2,
+                 max_help_position=24,
+                 width=None,
+                 short_first=1):
+        HelpFormatter.__init__(
+            self, indent_increment, max_help_position, width, short_first)
+
+    def format_usage(self, usage):
+        return _("usage: %s\n") % usage
+
+    def format_heading(self, heading):
+        return "%*s%s:\n" % (self.current_indent, "", heading)
+
+
+class TitledHelpFormatter (HelpFormatter):
+    """Format help with underlined section headers.
+    """
+
+    def __init__(self,
+                 indent_increment=0,
+                 max_help_position=24,
+                 width=None,
+                 short_first=0):
+        HelpFormatter.__init__ (
+            self, indent_increment, max_help_position, width, short_first)
+
+    def format_usage(self, usage):
+        return "%s  %s\n" % (self.format_heading(_("Usage")), usage)
+
+    def format_heading(self, heading):
+        return "%s\n%s\n" % (heading, "=-"[self.level] * len(heading))
+
+
+_builtin_cvt = { "int" : (int, _("integer")),
+                 "long" : (long, _("long integer")),
+                 "float" : (float, _("floating-point")),
+                 "complex" : (complex, _("complex")) }
+
+def check_builtin(option, opt, value):
+    (cvt, what) = _builtin_cvt[option.type]
+    try:
+        return cvt(value)
+    except ValueError:
+        raise OptionValueError(
+            _("option %s: invalid %s value: %r") % (opt, what, value))
+
+def check_choice(option, opt, value):
+    if value in option.choices:
+        return value
+    else:
+        choices = ", ".join(map(repr, option.choices))
+        raise OptionValueError(
+            _("option %s: invalid choice: %r (choose from %s)")
+            % (opt, value, choices))
+
+# Not supplying a default is different from a default of None,
+# so we need an explicit "not supplied" value.
+NO_DEFAULT = ("NO", "DEFAULT")
+
+
+class Option:
+    """
+    Instance attributes:
+      _short_opts : [string]
+      _long_opts : [string]
+
+      action : string
+      type : string
+      dest : string
+      default : any
+      nargs : int
+      const : any
+      choices : [string]
+      callback : function
+      callback_args : (any*)
+      callback_kwargs : { string : any }
+      help : string
+      metavar : string
+    """
+
+    # The list of instance attributes that may be set through
+    # keyword args to the constructor.
+    ATTRS = ['action',
+             'type',
+             'dest',
+             'default',
+             'nargs',
+             'const',
+             'choices',
+             'callback',
+             'callback_args',
+             'callback_kwargs',
+             'help',
+             'metavar']
+
+    # The set of actions allowed by option parsers.  Explicitly listed
+    # here so the constructor can validate its arguments.
+    ACTIONS = ("store",
+               "store_const",
+               "store_true",
+               "store_false",
+               "append",
+               "count",
+               "callback",
+               "help",
+               "version")
+
+    # The set of actions that involve storing a value somewhere;
+    # also listed just for constructor argument validation.  (If
+    # the action is one of these, there must be a destination.)
+    STORE_ACTIONS = ("store",
+                     "store_const",
+                     "store_true",
+                     "store_false",
+                     "append",
+                     "count")
+
+    # The set of actions for which it makes sense to supply a value
+    # type, ie. which may consume an argument from the command line.
+    TYPED_ACTIONS = ("store",
+                     "append",
+                     "callback")
+
+    # The set of actions which *require* a value type, ie. that
+    # always consume an argument from the command line.
+    ALWAYS_TYPED_ACTIONS = ("store",
+                            "append")
+
+    # The set of known types for option parsers.  Again, listed here for
+    # constructor argument validation.
+    TYPES = ("string", "int", "long", "float", "complex", "choice")
+
+    # Dictionary of argument checking functions, which convert and
+    # validate option arguments according to the option type.
+    #
+    # Signature of checking functions is:
+    #   check(option : Option, opt : string, value : string) -> any
+    # where
+    #   option is the Option instance calling the checker
+    #   opt is the actual option seen on the command-line
+    #     (eg. "-a", "--file")
+    #   value is the option argument seen on the command-line
+    #
+    # The return value should be in the appropriate Python type
+    # for option.type -- eg. an integer if option.type == "int".
+    #
+    # If no checker is defined for a type, arguments will be
+    # unchecked and remain strings.
+    TYPE_CHECKER = { "int"    : check_builtin,
+                     "long"   : check_builtin,
+                     "float"  : check_builtin,
+                     "complex": check_builtin,
+                     "choice" : check_choice,
+                   }
+
+
+    # CHECK_METHODS is a list of unbound method objects; they are called
+    # by the constructor, in order, after all attributes are
+    # initialized.  The list is created and filled in later, after all
+    # the methods are actually defined.  (I just put it here because I
+    # like to define and document all class attributes in the same
+    # place.)  Subclasses that add another _check_*() method should
+    # define their own CHECK_METHODS list that adds their check method
+    # to those from this class.
+    CHECK_METHODS = None
+
+
+    # -- Constructor/initialization methods ----------------------------
+
+    def __init__(self, *opts, **attrs):
+        # Set _short_opts, _long_opts attrs from 'opts' tuple.
+        # Have to be set now, in case no option strings are supplied.
+        self._short_opts = []
+        self._long_opts = []
+        opts = self._check_opt_strings(opts)
+        self._set_opt_strings(opts)
+
+        # Set all other attrs (action, type, etc.) from 'attrs' dict
+        self._set_attrs(attrs)
+
+        # Check all the attributes we just set.  There are lots of
+        # complicated interdependencies, but luckily they can be farmed
+        # out to the _check_*() methods listed in CHECK_METHODS -- which
+        # could be handy for subclasses!  The one thing these all share
+        # is that they raise OptionError if they discover a problem.
+        for checker in self.CHECK_METHODS:
+            checker(self)
+
+    def _check_opt_strings(self, opts):
+        # Filter out None because early versions of Optik had exactly
+        # one short option and one long option, either of which
+        # could be None.
+        opts = filter(None, opts)
+        if not opts:
+            raise TypeError("at least one option string must be supplied")
+        return opts
+
+    def _set_opt_strings(self, opts):
+        for opt in opts:
+            if len(opt) < 2:
+                raise OptionError(
+                    "invalid option string %r: "
+                    "must be at least two characters long" % opt, self)
+            elif len(opt) == 2:
+                if not (opt[0] == "-" and opt[1] != "-"):
+                    raise OptionError(
+                        "invalid short option string %r: "
+                        "must be of the form -x, (x any non-dash char)" % opt,
+                        self)
+                self._short_opts.append(opt)
+            else:
+                if not (opt[0:2] == "--" and opt[2] != "-"):
+                    raise OptionError(
+                        "invalid long option string %r: "
+                        "must start with --, followed by non-dash" % opt,
+                        self)
+                self._long_opts.append(opt)
+
+    def _set_attrs(self, attrs):
+        for attr in self.ATTRS:
+            if attrs.has_key(attr):
+                setattr(self, attr, attrs[attr])
+                del attrs[attr]
+            else:
+                if attr == 'default':
+                    setattr(self, attr, NO_DEFAULT)
+                else:
+                    setattr(self, attr, None)
+        if attrs:
+            raise OptionError(
+                "invalid keyword arguments: %s" % ", ".join(attrs.keys()),
+                self)
+
+
+    # -- Constructor validation methods --------------------------------
+
+    def _check_action(self):
+        if self.action is None:
+            self.action = "store"
+        elif self.action not in self.ACTIONS:
+            raise OptionError("invalid action: %r" % self.action, self)
+
+    def _check_type(self):
+        if self.type is None:
+            if self.action in self.ALWAYS_TYPED_ACTIONS:
+                if self.choices is not None:
+                    # The "choices" attribute implies "choice" type.
+                    self.type = "choice"
+                else:
+                    # No type given?  "string" is the most sensible default.
+                    self.type = "string"
+        else:
+            # Allow type objects as an alternative to their names.
+            if type(self.type) is type:
+                self.type = self.type.__name__
+            if self.type == "str":
+                self.type = "string"
+
+            if self.type not in self.TYPES:
+                raise OptionError("invalid option type: %r" % self.type, self)
+            if self.action not in self.TYPED_ACTIONS:
+                raise OptionError(
+                    "must not supply a type for action %r" % self.action, self)
+
+    def _check_choice(self):
+        if self.type == "choice":
+            if self.choices is None:
+                raise OptionError(
+                    "must supply a list of choices for type 'choice'", self)
+            elif type(self.choices) not in (types.TupleType, types.ListType):
+                raise OptionError(
+                    "choices must be a list of strings ('%s' supplied)"
+                    % str(type(self.choices)).split("'")[1], self)
+        elif self.choices is not None:
+            raise OptionError(
+                "must not supply choices for type %r" % self.type, self)
+
+    def _check_dest(self):
+        # No destination given, and we need one for this action.  The
+        # self.type check is for callbacks that take a value.
+        takes_value = (self.action in self.STORE_ACTIONS or
+                       self.type is not None)
+        if self.dest is None and takes_value:
+
+            # Glean a destination from the first long option string,
+            # or from the first short option string if no long options.
+            if self._long_opts:
+                # eg. "--foo-bar" -> "foo_bar"
+                self.dest = self._long_opts[0][2:].replace('-', '_')
+            else:
+                self.dest = self._short_opts[0][1]
+
+    def _check_const(self):
+        if self.action != "store_const" and self.const is not None:
+            raise OptionError(
+                "'const' must not be supplied for action %r" % self.action,
+                self)
+
+    def _check_nargs(self):
+        if self.action in self.TYPED_ACTIONS:
+            if self.nargs is None:
+                self.nargs = 1
+        elif self.nargs is not None:
+            raise OptionError(
+                "'nargs' must not be supplied for action %r" % self.action,
+                self)
+
+    def _check_callback(self):
+        if self.action == "callback":
+            if not callable(self.callback):
+                raise OptionError(
+                    "callback not callable: %r" % self.callback, self)
+            if (self.callback_args is not None and
+                type(self.callback_args) is not types.TupleType):
+                raise OptionError(
+                    "callback_args, if supplied, must be a tuple: not %r"
+                    % self.callback_args, self)
+            if (self.callback_kwargs is not None and
+                type(self.callback_kwargs) is not types.DictType):
+                raise OptionError(
+                    "callback_kwargs, if supplied, must be a dict: not %r"
+                    % self.callback_kwargs, self)
+        else:
+            if self.callback is not None:
+                raise OptionError(
+                    "callback supplied (%r) for non-callback option"
+                    % self.callback, self)
+            if self.callback_args is not None:
+                raise OptionError(
+                    "callback_args supplied for non-callback option", self)
+            if self.callback_kwargs is not None:
+                raise OptionError(
+                    "callback_kwargs supplied for non-callback option", self)
+
+
+    CHECK_METHODS = [_check_action,
+                     _check_type,
+                     _check_choice,
+                     _check_dest,
+                     _check_const,
+                     _check_nargs,
+                     _check_callback]
+
+
+    # -- Miscellaneous methods -----------------------------------------
+
+    def __str__(self):
+        return "/".join(self._short_opts + self._long_opts)
+
+    __repr__ = _repr
+
+    def takes_value(self):
+        return self.type is not None
+
+    def get_opt_string(self):
+        if self._long_opts:
+            return self._long_opts[0]
+        else:
+            return self._short_opts[0]
+
+
+    # -- Processing methods --------------------------------------------
+
+    def check_value(self, opt, value):
+        checker = self.TYPE_CHECKER.get(self.type)
+        if checker is None:
+            return value
+        else:
+            return checker(self, opt, value)
+
+    def convert_value(self, opt, value):
+        if value is not None:
+            if self.nargs == 1:
+                return self.check_value(opt, value)
+            else:
+                return tuple([self.check_value(opt, v) for v in value])
+
+    def process(self, opt, value, values, parser):
+
+        # First, convert the value(s) to the right type.  Howl if any
+        # value(s) are bogus.
+        value = self.convert_value(opt, value)
+
+        # And then take whatever action is expected of us.
+        # This is a separate method to make life easier for
+        # subclasses to add new actions.
+        return self.take_action(
+            self.action, self.dest, opt, value, values, parser)
+
+    def take_action(self, action, dest, opt, value, values, parser):
+        if action == "store":
+            setattr(values, dest, value)
+        elif action == "store_const":
+            setattr(values, dest, self.const)
+        elif action == "store_true":
+            setattr(values, dest, True)
+        elif action == "store_false":
+            setattr(values, dest, False)
+        elif action == "append":
+            values.ensure_value(dest, []).append(value)
+        elif action == "count":
+            setattr(values, dest, values.ensure_value(dest, 0) + 1)
+        elif action == "callback":
+            args = self.callback_args or ()
+            kwargs = self.callback_kwargs or {}
+            self.callback(self, opt, value, parser, *args, **kwargs)
+        elif action == "help":
+            parser.print_help()
+            parser.exit()
+        elif action == "version":
+            parser.print_version()
+            parser.exit()
+        else:
+            raise RuntimeError, "unknown action %r" % self.action
+
+        return 1
+
+# class Option
+
+
+SUPPRESS_HELP = "SUPPRESS"+"HELP"
+SUPPRESS_USAGE = "SUPPRESS"+"USAGE"
+
+# For compatibility with Python 2.2
+try:
+    True, False
+except NameError:
+    (True, False) = (1, 0)
+try:
+    basestring
+except NameError:
+    basestring = (str, unicode)
+
+
+class Values:
+
+    def __init__(self, defaults=None):
+        if defaults:
+            for (attr, val) in defaults.items():
+                setattr(self, attr, val)
+
+    def __str__(self):
+        return str(self.__dict__)
+
+    __repr__ = _repr
+
+    def __eq__(self, other):
+        if isinstance(other, Values):
+            return self.__dict__ == other.__dict__
+        elif isinstance(other, dict):
+            return self.__dict__ == other
+        else:
+            return False
+
+    def __ne__(self, other):
+        return not (self == other)
+
+    def _update_careful(self, dict):
+        """
+        Update the option values from an arbitrary dictionary, but only
+        use keys from dict that already have a corresponding attribute
+        in self.  Any keys in dict without a corresponding attribute
+        are silently ignored.
+        """
+        for attr in dir(self):
+            if dict.has_key(attr):
+                dval = dict[attr]
+                if dval is not None:
+                    setattr(self, attr, dval)
+
+    def _update_loose(self, dict):
+        """
+        Update the option values from an arbitrary dictionary,
+        using all keys from the dictionary regardless of whether
+        they have a corresponding attribute in self or not.
+        """
+        self.__dict__.update(dict)
+
+    def _update(self, dict, mode):
+        if mode == "careful":
+            self._update_careful(dict)
+        elif mode == "loose":
+            self._update_loose(dict)
+        else:
+            raise ValueError, "invalid update mode: %r" % mode
+
+    def read_module(self, modname, mode="careful"):
+        __import__(modname)
+        mod = sys.modules[modname]
+        self._update(vars(mod), mode)
+
+    def read_file(self, filename, mode="careful"):
+        vars = {}
+        execfile(filename, vars)
+        self._update(vars, mode)
+
+    def ensure_value(self, attr, value):
+        if not hasattr(self, attr) or getattr(self, attr) is None:
+            setattr(self, attr, value)
+        return getattr(self, attr)
+
+
+class OptionContainer:
+
+    """
+    Abstract base class.
+
+    Class attributes:
+      standard_option_list : [Option]
+        list of standard options that will be accepted by all instances
+        of this parser class (intended to be overridden by subclasses).
+
+    Instance attributes:
+      option_list : [Option]
+        the list of Option objects contained by this OptionContainer
+      _short_opt : { string : Option }
+        dictionary mapping short option strings, eg. "-f" or "-X",
+        to the Option instances that implement them.  If an Option
+        has multiple short option strings, it will appears in this
+        dictionary multiple times. [1]
+      _long_opt : { string : Option }
+        dictionary mapping long option strings, eg. "--file" or
+        "--exclude", to the Option instances that implement them.
+        Again, a given Option can occur multiple times in this
+        dictionary. [1]
+      defaults : { string : any }
+        dictionary mapping option destination names to default
+        values for each destination [1]
+
+    [1] These mappings are common to (shared by) all components of the
+        controlling OptionParser, where they are initially created.
+
+    """
+
+    def __init__(self, option_class, conflict_handler, description):
+        # Initialize the option list and related data structures.
+        # This method must be provided by subclasses, and it must
+        # initialize at least the following instance attributes:
+        # option_list, _short_opt, _long_opt, defaults.
+        self._create_option_list()
+
+        self.option_class = option_class
+        self.set_conflict_handler(conflict_handler)
+        self.set_description(description)
+
+    def _create_option_mappings(self):
+        # For use by OptionParser constructor -- create the master
+        # option mappings used by this OptionParser and all
+        # OptionGroups that it owns.
+        self._short_opt = {}            # single letter -> Option instance
+        self._long_opt = {}             # long option -> Option instance
+        self.defaults = {}              # maps option dest -> default value
+
+
+    def _share_option_mappings(self, parser):
+        # For use by OptionGroup constructor -- use shared option
+        # mappings from the OptionParser that owns this OptionGroup.
+        self._short_opt = parser._short_opt
+        self._long_opt = parser._long_opt
+        self.defaults = parser.defaults
+
+    def set_conflict_handler(self, handler):
+        if handler not in ("error", "resolve"):
+            raise ValueError, "invalid conflict_resolution value %r" % handler
+        self.conflict_handler = handler
+
+    def set_description(self, description):
+        self.description = description
+
+    def get_description(self):
+        return self.description
+
+
+    # -- Option-adding methods -----------------------------------------
+
+    def _check_conflict(self, option):
+        conflict_opts = []
+        for opt in option._short_opts:
+            if self._short_opt.has_key(opt):
+                conflict_opts.append((opt, self._short_opt[opt]))
+        for opt in option._long_opts:
+            if self._long_opt.has_key(opt):
+                conflict_opts.append((opt, self._long_opt[opt]))
+
+        if conflict_opts:
+            handler = self.conflict_handler
+            if handler == "error":
+                raise OptionConflictError(
+                    "conflicting option string(s): %s"
+                    % ", ".join([co[0] for co in conflict_opts]),
+                    option)
+            elif handler == "resolve":
+                for (opt, c_option) in conflict_opts:
+                    if opt.startswith("--"):
+                        c_option._long_opts.remove(opt)
+                        del self._long_opt[opt]
+                    else:
+                        c_option._short_opts.remove(opt)
+                        del self._short_opt[opt]
+                    if not (c_option._short_opts or c_option._long_opts):
+                        c_option.container.option_list.remove(c_option)
+
+    def add_option(self, *args, **kwargs):
+        """add_option(Option)
+           add_option(opt_str, ..., kwarg=val, ...)
+        """
+        if type(args[0]) is types.StringType:
+            option = self.option_class(*args, **kwargs)
+        elif len(args) == 1 and not kwargs:
+            option = args[0]
+            if not isinstance(option, Option):
+                raise TypeError, "not an Option instance: %r" % option
+        else:
+            raise TypeError, "invalid arguments"
+
+        self._check_conflict(option)
+
+        self.option_list.append(option)
+        option.container = self
+        for opt in option._short_opts:
+            self._short_opt[opt] = option
+        for opt in option._long_opts:
+            self._long_opt[opt] = option
+
+        if option.dest is not None:     # option has a dest, we need a default
+            if option.default is not NO_DEFAULT:
+                self.defaults[option.dest] = option.default
+            elif not self.defaults.has_key(option.dest):
+                self.defaults[option.dest] = None
+
+        return option
+
+    def add_options(self, option_list):
+        for option in option_list:
+            self.add_option(option)
+
+    # -- Option query/removal methods ----------------------------------
+
+    def get_option(self, opt_str):
+        return (self._short_opt.get(opt_str) or
+                self._long_opt.get(opt_str))
+
+    def has_option(self, opt_str):
+        return (self._short_opt.has_key(opt_str) or
+                self._long_opt.has_key(opt_str))
+
+    def remove_option(self, opt_str):
+        option = self._short_opt.get(opt_str)
+        if option is None:
+            option = self._long_opt.get(opt_str)
+        if option is None:
+            raise ValueError("no such option %r" % opt_str)
+
+        for opt in option._short_opts:
+            del self._short_opt[opt]
+        for opt in option._long_opts:
+            del self._long_opt[opt]
+        option.container.option_list.remove(option)
+
+
+    # -- Help-formatting methods ---------------------------------------
+
+    def format_option_help(self, formatter):
+        if not self.option_list:
+            return ""
+        result = []
+        for option in self.option_list:
+            if not option.help is SUPPRESS_HELP:
+                result.append(formatter.format_option(option))
+        return "".join(result)
+
+    def format_description(self, formatter):
+        return formatter.format_description(self.get_description())
+
+    def format_help(self, formatter):
+        result = []
+        if self.description:
+            result.append(self.format_description(formatter))
+        if self.option_list:
+            result.append(self.format_option_help(formatter))
+        return "\n".join(result)
+
+
+class OptionGroup (OptionContainer):
+
+    def __init__(self, parser, title, description=None):
+        self.parser = parser
+        OptionContainer.__init__(
+            self, parser.option_class, parser.conflict_handler, description)
+        self.title = title
+
+    def _create_option_list(self):
+        self.option_list = []
+        self._share_option_mappings(self.parser)
+
+    def set_title(self, title):
+        self.title = title
+
+    # -- Help-formatting methods ---------------------------------------
+
+    def format_help(self, formatter):
+        result = formatter.format_heading(self.title)
+        formatter.indent()
+        result += OptionContainer.format_help(self, formatter)
+        formatter.dedent()
+        return result
+
+
+class OptionParser (OptionContainer):
+
+    """
+    Class attributes:
+      standard_option_list : [Option]
+        list of standard options that will be accepted by all instances
+        of this parser class (intended to be overridden by subclasses).
+
+    Instance attributes:
+      usage : string
+        a usage string for your program.  Before it is displayed
+        to the user, "%prog" will be expanded to the name of
+        your program (self.prog or os.path.basename(sys.argv[0])).
+      prog : string
+        the name of the current program (to override
+        os.path.basename(sys.argv[0])).
+
+      option_groups : [OptionGroup]
+        list of option groups in this parser (option groups are
+        irrelevant for parsing the command-line, but very useful
+        for generating help)
+
+      allow_interspersed_args : bool = true
+        if true, positional arguments may be interspersed with options.
+        Assuming -a and -b each take a single argument, the command-line
+          -ablah foo bar -bboo baz
+        will be interpreted the same as
+          -ablah -bboo -- foo bar baz
+        If this flag were false, that command line would be interpreted as
+          -ablah -- foo bar -bboo baz
+        -- ie. we stop processing options as soon as we see the first
+        non-option argument.  (This is the tradition followed by
+        Python's getopt module, Perl's Getopt::Std, and other argument-
+        parsing libraries, but it is generally annoying to users.)
+
+      process_default_values : bool = true
+        if true, option default values are processed similarly to option
+        values from the command line: that is, they are passed to the
+        type-checking function for the option's type (as long as the
+        default value is a string).  (This really only matters if you
+        have defined custom types; see SF bug #955889.)  Set it to false
+        to restore the behaviour of Optik 1.4.1 and earlier.
+
+      rargs : [string]
+        the argument list currently being parsed.  Only set when
+        parse_args() is active, and continually trimmed down as
+        we consume arguments.  Mainly there for the benefit of
+        callback options.
+      largs : [string]
+        the list of leftover arguments that we have skipped while
+        parsing options.  If allow_interspersed_args is false, this
+        list is always empty.
+      values : Values
+        the set of option values currently being accumulated.  Only
+        set when parse_args() is active.  Also mainly for callbacks.
+
+    Because of the 'rargs', 'largs', and 'values' attributes,
+    OptionParser is not thread-safe.  If, for some perverse reason, you
+    need to parse command-line arguments simultaneously in different
+    threads, use different OptionParser instances.
+
+    """
+
+    standard_option_list = []
+
+    def __init__(self,
+                 usage=None,
+                 option_list=None,
+                 option_class=Option,
+                 version=None,
+                 conflict_handler="error",
+                 description=None,
+                 formatter=None,
+                 add_help_option=True,
+                 prog=None):
+        OptionContainer.__init__(
+            self, option_class, conflict_handler, description)
+        self.set_usage(usage)
+        self.prog = prog
+        self.version = version
+        self.allow_interspersed_args = True
+        self.process_default_values = True
+        if formatter is None:
+            formatter = IndentedHelpFormatter()
+        self.formatter = formatter
+        self.formatter.set_parser(self)
+
+        # Populate the option list; initial sources are the
+        # standard_option_list class attribute, the 'option_list'
+        # argument, and (if applicable) the _add_version_option() and
+        # _add_help_option() methods.
+        self._populate_option_list(option_list,
+                                   add_help=add_help_option)
+
+        self._init_parsing_state()
+
+    # -- Private methods -----------------------------------------------
+    # (used by our or OptionContainer's constructor)
+
+    def _create_option_list(self):
+        self.option_list = []
+        self.option_groups = []
+        self._create_option_mappings()
+
+    def _add_help_option(self):
+        self.add_option("-h", "--help",
+                        action="help",
+                        help=_("show this help message and exit"))
+
+    def _add_version_option(self):
+        self.add_option("--version",
+                        action="version",
+                        help=_("show program's version number and exit"))
+
+    def _populate_option_list(self, option_list, add_help=True):
+        if self.standard_option_list:
+            self.add_options(self.standard_option_list)
+        if option_list:
+            self.add_options(option_list)
+        if self.version:
+            self._add_version_option()
+        if add_help:
+            self._add_help_option()
+
+    def _init_parsing_state(self):
+        # These are set in parse_args() for the convenience of callbacks.
+        self.rargs = None
+        self.largs = None
+        self.values = None
+
+
+    # -- Simple modifier methods ---------------------------------------
+
+    def set_usage(self, usage):
+        if usage is None:
+            self.usage = _("%prog [options]")
+        elif usage is SUPPRESS_USAGE:
+            self.usage = None
+        # For backwards compatibility with Optik 1.3 and earlier.
+        elif usage.startswith("usage:" + " "):
+            self.usage = usage[7:]
+        else:
+            self.usage = usage
+
+    def enable_interspersed_args(self):
+        self.allow_interspersed_args = True
+
+    def disable_interspersed_args(self):
+        self.allow_interspersed_args = False
+
+    def set_process_default_values(self, process):
+        self.process_default_values = process
+
+    def set_default(self, dest, value):
+        self.defaults[dest] = value
+
+    def set_defaults(self, **kwargs):
+        self.defaults.update(kwargs)
+
+    def _get_all_options(self):
+        options = self.option_list[:]
+        for group in self.option_groups:
+            options.extend(group.option_list)
+        return options
+
+    def get_default_values(self):
+        if not self.process_default_values:
+            # Old, pre-Optik 1.5 behaviour.
+            return Values(self.defaults)
+
+        defaults = self.defaults.copy()
+        for option in self._get_all_options():
+            default = defaults.get(option.dest)
+            if isinstance(default, basestring):
+                opt_str = option.get_opt_string()
+                defaults[option.dest] = option.check_value(opt_str, default)
+
+        return Values(defaults)
+
+
+    # -- OptionGroup methods -------------------------------------------
+
+    def add_option_group(self, *args, **kwargs):
+        # XXX lots of overlap with OptionContainer.add_option()
+        if type(args[0]) is types.StringType:
+            group = OptionGroup(self, *args, **kwargs)
+        elif len(args) == 1 and not kwargs:
+            group = args[0]
+            if not isinstance(group, OptionGroup):
+                raise TypeError, "not an OptionGroup instance: %r" % group
+            if group.parser is not self:
+                raise ValueError, "invalid OptionGroup (wrong parser)"
+        else:
+            raise TypeError, "invalid arguments"
+
+        self.option_groups.append(group)
+        return group
+
+    def get_option_group(self, opt_str):
+        option = (self._short_opt.get(opt_str) or
+                  self._long_opt.get(opt_str))
+        if option and option.container is not self:
+            return option.container
+        return None
+
+
+    # -- Option-parsing methods ----------------------------------------
+
+    def _get_args(self, args):
+        if args is None:
+            return sys.argv[1:]
+        else:
+            return args[:]              # don't modify caller's list
+
+    def parse_args(self, args=None, values=None):
+        """
+        parse_args(args : [string] = sys.argv[1:],
+                   values : Values = None)
+        -> (values : Values, args : [string])
+
+        Parse the command-line options found in 'args' (default:
+        sys.argv[1:]).  Any errors result in a call to 'error()', which
+        by default prints the usage message to stderr and calls
+        sys.exit() with an error message.  On success returns a pair
+        (values, args) where 'values' is an Values instance (with all
+        your option values) and 'args' is the list of arguments left
+        over after parsing options.
+        """
+        rargs = self._get_args(args)
+        if values is None:
+            values = self.get_default_values()
+
+        # Store the halves of the argument list as attributes for the
+        # convenience of callbacks:
+        #   rargs
+        #     the rest of the command-line (the "r" stands for
+        #     "remaining" or "right-hand")
+        #   largs
+        #     the leftover arguments -- ie. what's left after removing
+        #     options and their arguments (the "l" stands for "leftover"
+        #     or "left-hand")
+        self.rargs = rargs
+        self.largs = largs = []
+        self.values = values
+
+        try:
+            stop = self._process_args(largs, rargs, values)
+        except (BadOptionError, OptionValueError), err:
+            self.error(err.msg)
+
+        args = largs + rargs
+        return self.check_values(values, args)
+
+    def check_values(self, values, args):
+        """
+        check_values(values : Values, args : [string])
+        -> (values : Values, args : [string])
+
+        Check that the supplied option values and leftover arguments are
+        valid.  Returns the option values and leftover arguments
+        (possibly adjusted, possibly completely new -- whatever you
+        like).  Default implementation just returns the passed-in
+        values; subclasses may override as desired.
+        """
+        return (values, args)
+
+    def _process_args(self, largs, rargs, values):
+        """_process_args(largs : [string],
+                         rargs : [string],
+                         values : Values)
+
+        Process command-line arguments and populate 'values', consuming
+        options and arguments from 'rargs'.  If 'allow_interspersed_args' is
+        false, stop at the first non-option argument.  If true, accumulate any
+        interspersed non-option arguments in 'largs'.
+        """
+        while rargs:
+            arg = rargs[0]
+            # We handle bare "--" explicitly, and bare "-" is handled by the
+            # standard arg handler since the short arg case ensures that the
+            # len of the opt string is greater than 1.
+            if arg == "--":
+                del rargs[0]
+                return
+            elif arg[0:2] == "--":
+                # process a single long option (possibly with value(s))
+                self._process_long_opt(rargs, values)
+            elif arg[:1] == "-" and len(arg) > 1:
+                # process a cluster of short options (possibly with
+                # value(s) for the last one only)
+                self._process_short_opts(rargs, values)
+            elif self.allow_interspersed_args:
+                largs.append(arg)
+                del rargs[0]
+            else:
+                return                  # stop now, leave this arg in rargs
+
+        # Say this is the original argument list:
+        # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)]
+        #                            ^
+        # (we are about to process arg(i)).
+        #
+        # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of
+        # [arg0, ..., arg(i-1)] (any options and their arguments will have
+        # been removed from largs).
+        #
+        # The while loop will usually consume 1 or more arguments per pass.
+        # If it consumes 1 (eg. arg is an option that takes no arguments),
+        # then after _process_arg() is done the situation is:
+        #
+        #   largs = subset of [arg0, ..., arg(i)]
+        #   rargs = [arg(i+1), ..., arg(N-1)]
+        #
+        # If allow_interspersed_args is false, largs will always be
+        # *empty* -- still a subset of [arg0, ..., arg(i-1)], but
+        # not a very interesting subset!
+
+    def _match_long_opt(self, opt):
+        """_match_long_opt(opt : string) -> string
+
+        Determine which long option string 'opt' matches, ie. which one
+        it is an unambiguous abbrevation for.  Raises BadOptionError if
+        'opt' doesn't unambiguously match any long option string.
+        """
+        return _match_abbrev(opt, self._long_opt)
+
+    def _process_long_opt(self, rargs, values):
+        arg = rargs.pop(0)
+
+        # Value explicitly attached to arg?  Pretend it's the next
+        # argument.
+        if "=" in arg:
+            (opt, next_arg) = arg.split("=", 1)
+            rargs.insert(0, next_arg)
+            had_explicit_value = True
+        else:
+            opt = arg
+            had_explicit_value = False
+
+        opt = self._match_long_opt(opt)
+        option = self._long_opt[opt]
+        if option.takes_value():
+            nargs = option.nargs
+            if len(rargs) < nargs:
+                if nargs == 1:
+                    self.error(_("%s option requires an argument") % opt)
+                else:
+                    self.error(_("%s option requires %d arguments")
+                               % (opt, nargs))
+            elif nargs == 1:
+                value = rargs.pop(0)
+            else:
+                value = tuple(rargs[0:nargs])
+                del rargs[0:nargs]
+
+        elif had_explicit_value:
+            self.error(_("%s option does not take a value") % opt)
+
+        else:
+            value = None
+
+        option.process(opt, value, values, self)
+
+    def _process_short_opts(self, rargs, values):
+        arg = rargs.pop(0)
+        stop = False
+        i = 1
+        for ch in arg[1:]:
+            opt = "-" + ch
+            option = self._short_opt.get(opt)
+            i += 1                      # we have consumed a character
+
+            if not option:
+                self.error(_("no such option: %s") % opt)
+            if option.takes_value():
+                # Any characters left in arg?  Pretend they're the
+                # next arg, and stop consuming characters of arg.
+                if i < len(arg):
+                    rargs.insert(0, arg[i:])
+                    stop = True
+
+                nargs = option.nargs
+                if len(rargs) < nargs:
+                    if nargs == 1:
+                        self.error(_("%s option requires an argument") % opt)
+                    else:
+                        self.error(_("%s option requires %d arguments")
+                                   % (opt, nargs))
+                elif nargs == 1:
+                    value = rargs.pop(0)
+                else:
+                    value = tuple(rargs[0:nargs])
+                    del rargs[0:nargs]
+
+            else:                       # option doesn't take a value
+                value = None
+
+            option.process(opt, value, values, self)
+
+            if stop:
+                break
+
+
+    # -- Feedback methods ----------------------------------------------
+
+    def get_prog_name(self):
+        if self.prog is None:
+            return os.path.basename(sys.argv[0])
+        else:
+            return self.prog
+
+    def expand_prog_name(self, s):
+        return s.replace("%prog", self.get_prog_name())
+
+    def get_description(self):
+        return self.expand_prog_name(self.description)
+
+    def exit(self, status=0, msg=None):
+        if msg:
+            sys.stderr.write(msg)
+        sys.exit(status)
+
+    def error(self, msg):
+        """error(msg : string)
+
+        Print a usage message incorporating 'msg' to stderr and exit.
+        If you override this in a subclass, it should not return -- it
+        should either exit or raise an exception.
+        """
+        self.print_usage(sys.stderr)
+        self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg))
+
+    def get_usage(self):
+        if self.usage:
+            return self.formatter.format_usage(
+                self.expand_prog_name(self.usage))
+        else:
+            return ""
+
+    def print_usage(self, file=None):
+        """print_usage(file : file = stdout)
+
+        Print the usage message for the current program (self.usage) to
+        'file' (default stdout).  Any occurence of the string "%prog" in
+        self.usage is replaced with the name of the current program
+        (basename of sys.argv[0]).  Does nothing if self.usage is empty
+        or not defined.
+        """
+        if self.usage:
+            print >>file, self.get_usage()
+
+    def get_version(self):
+        if self.version:
+            return self.expand_prog_name(self.version)
+        else:
+            return ""
+
+    def print_version(self, file=None):
+        """print_version(file : file = stdout)
+
+        Print the version message for this program (self.version) to
+        'file' (default stdout).  As with print_usage(), any occurence
+        of "%prog" in self.version is replaced by the current program's
+        name.  Does nothing if self.version is empty or undefined.
+        """
+        if self.version:
+            print >>file, self.get_version()
+
+    def format_option_help(self, formatter=None):
+        if formatter is None:
+            formatter = self.formatter
+        formatter.store_option_strings(self)
+        result = []
+        result.append(formatter.format_heading(_("options")))
+        formatter.indent()
+        if self.option_list:
+            result.append(OptionContainer.format_option_help(self, formatter))
+            result.append("\n")
+        for group in self.option_groups:
+            result.append(group.format_help(formatter))
+            result.append("\n")
+        formatter.dedent()
+        # Drop the last "\n", or the header if no options or option groups:
+        return "".join(result[:-1])
+
+    def format_help(self, formatter=None):
+        if formatter is None:
+            formatter = self.formatter
+        result = []
+        if self.usage:
+            result.append(self.get_usage() + "\n")
+        if self.description:
+            result.append(self.format_description(formatter) + "\n")
+        result.append(self.format_option_help(formatter))
+        return "".join(result)
+
+    def print_help(self, file=None):
+        """print_help(file : file = stdout)
+
+        Print an extended help message, listing all options and any
+        help text provided with them, to 'file' (default stdout).
+        """
+        if file is None:
+            file = sys.stdout
+        file.write(self.format_help())
+
+# class OptionParser
+
+
+def _match_abbrev(s, wordmap):
+    """_match_abbrev(s : string, wordmap : {string : Option}) -> string
+
+    Return the string key in 'wordmap' for which 's' is an unambiguous
+    abbreviation.  If 's' is found to be ambiguous or doesn't match any of
+    'words', raise BadOptionError.
+    """
+    # Is there an exact match?
+    if wordmap.has_key(s):
+        return s
+    else:
+        # Isolate all words with s as a prefix.
+        possibilities = [word for word in wordmap.keys()
+                         if word.startswith(s)]
+        # No exact match, so there had better be just one possibility.
+        if len(possibilities) == 1:
+            return possibilities[0]
+        elif not possibilities:
+            raise BadOptionError(_("no such option: %s") % s)
+        else:
+            # More than one possible completion: ambiguous prefix.
+            raise BadOptionError(_("ambiguous option: %s (%s?)")
+                                 % (s, ", ".join(possibilities)))
+
+
+# Some day, there might be many Option classes.  As of Optik 1.3, the
+# preferred way to instantiate Options is indirectly, via make_option(),
+# which will become a factory function when there are many Option
+# classes.
+make_option = Option
diff --git a/depot_tools/release/win/python_24/Lib/os.py b/depot_tools/release/win/python_24/Lib/os.py
new file mode 100644
index 0000000..514d440
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/os.py
@@ -0,0 +1,729 @@
+r"""OS routines for Mac, DOS, NT, or Posix depending on what system we're on.
+
+This exports:
+  - all functions from posix, nt, os2, mac, or ce, e.g. unlink, stat, etc.
+  - os.path is one of the modules posixpath, ntpath, or macpath
+  - os.name is 'posix', 'nt', 'os2', 'mac', 'ce' or 'riscos'
+  - os.curdir is a string representing the current directory ('.' or ':')
+  - os.pardir is a string representing the parent directory ('..' or '::')
+  - os.sep is the (or a most common) pathname separator ('/' or ':' or '\\')
+  - os.extsep is the extension separator ('.' or '/')
+  - os.altsep is the alternate pathname separator (None or '/')
+  - os.pathsep is the component separator used in $PATH etc
+  - os.linesep is the line separator in text files ('\r' or '\n' or '\r\n')
+  - os.defpath is the default search path for executables
+  - os.devnull is the file path of the null device ('/dev/null', etc.)
+
+Programs that import and use 'os' stand a better chance of being
+portable between different platforms.  Of course, they must then
+only use functions that are defined by all platforms (e.g., unlink
+and opendir), and leave all pathname manipulation to os.path
+(e.g., split and join).
+"""
+
+#'
+
+import sys
+
+_names = sys.builtin_module_names
+
+# Note:  more names are added to __all__ later.
+__all__ = ["altsep", "curdir", "pardir", "sep", "pathsep", "linesep",
+           "defpath", "name", "path", "devnull"]
+
+def _get_exports_list(module):
+    try:
+        return list(module.__all__)
+    except AttributeError:
+        return [n for n in dir(module) if n[0] != '_']
+
+if 'posix' in _names:
+    name = 'posix'
+    linesep = '\n'
+    from posix import *
+    try:
+        from posix import _exit
+    except ImportError:
+        pass
+    import posixpath as path
+
+    import posix
+    __all__.extend(_get_exports_list(posix))
+    del posix
+
+elif 'nt' in _names:
+    name = 'nt'
+    linesep = '\r\n'
+    from nt import *
+    try:
+        from nt import _exit
+    except ImportError:
+        pass
+    import ntpath as path
+
+    import nt
+    __all__.extend(_get_exports_list(nt))
+    del nt
+
+elif 'os2' in _names:
+    name = 'os2'
+    linesep = '\r\n'
+    from os2 import *
+    try:
+        from os2 import _exit
+    except ImportError:
+        pass
+    if sys.version.find('EMX GCC') == -1:
+        import ntpath as path
+    else:
+        import os2emxpath as path
+        from _emx_link import link
+
+    import os2
+    __all__.extend(_get_exports_list(os2))
+    del os2
+
+elif 'mac' in _names:
+    name = 'mac'
+    linesep = '\r'
+    from mac import *
+    try:
+        from mac import _exit
+    except ImportError:
+        pass
+    import macpath as path
+
+    import mac
+    __all__.extend(_get_exports_list(mac))
+    del mac
+
+elif 'ce' in _names:
+    name = 'ce'
+    linesep = '\r\n'
+    from ce import *
+    try:
+        from ce import _exit
+    except ImportError:
+        pass
+    # We can use the standard Windows path.
+    import ntpath as path
+
+    import ce
+    __all__.extend(_get_exports_list(ce))
+    del ce
+
+elif 'riscos' in _names:
+    name = 'riscos'
+    linesep = '\n'
+    from riscos import *
+    try:
+        from riscos import _exit
+    except ImportError:
+        pass
+    import riscospath as path
+
+    import riscos
+    __all__.extend(_get_exports_list(riscos))
+    del riscos
+
+else:
+    raise ImportError, 'no os specific module found'
+
+sys.modules['os.path'] = path
+from os.path import (curdir, pardir, sep, pathsep, defpath, extsep, altsep,
+    devnull)
+
+del _names
+
+#'
+
+# Super directory utilities.
+# (Inspired by Eric Raymond; the doc strings are mostly his)
+
+def makedirs(name, mode=0777):
+    """makedirs(path [, mode=0777])
+
+    Super-mkdir; create a leaf directory and all intermediate ones.
+    Works like mkdir, except that any intermediate path segment (not
+    just the rightmost) will be created if it does not exist.  This is
+    recursive.
+
+    """
+    head, tail = path.split(name)
+    if not tail:
+        head, tail = path.split(head)
+    if head and tail and not path.exists(head):
+        makedirs(head, mode)
+        if tail == curdir:           # xxx/newdir/. exists if xxx/newdir exists
+            return
+    mkdir(name, mode)
+
+def removedirs(name):
+    """removedirs(path)
+
+    Super-rmdir; remove a leaf directory and empty all intermediate
+    ones.  Works like rmdir except that, if the leaf directory is
+    successfully removed, directories corresponding to rightmost path
+    segments will be pruned away until either the whole path is
+    consumed or an error occurs.  Errors during this latter phase are
+    ignored -- they generally mean that a directory was not empty.
+
+    """
+    rmdir(name)
+    head, tail = path.split(name)
+    if not tail:
+        head, tail = path.split(head)
+    while head and tail:
+        try:
+            rmdir(head)
+        except error:
+            break
+        head, tail = path.split(head)
+
+def renames(old, new):
+    """renames(old, new)
+
+    Super-rename; create directories as necessary and delete any left
+    empty.  Works like rename, except creation of any intermediate
+    directories needed to make the new pathname good is attempted
+    first.  After the rename, directories corresponding to rightmost
+    path segments of the old name will be pruned way until either the
+    whole path is consumed or a nonempty directory is found.
+
+    Note: this function can fail with the new directory structure made
+    if you lack permissions needed to unlink the leaf directory or
+    file.
+
+    """
+    head, tail = path.split(new)
+    if head and tail and not path.exists(head):
+        makedirs(head)
+    rename(old, new)
+    head, tail = path.split(old)
+    if head and tail:
+        try:
+            removedirs(head)
+        except error:
+            pass
+
+__all__.extend(["makedirs", "removedirs", "renames"])
+
+def walk(top, topdown=True, onerror=None):
+    """Directory tree generator.
+
+    For each directory in the directory tree rooted at top (including top
+    itself, but excluding '.' and '..'), yields a 3-tuple
+
+        dirpath, dirnames, filenames
+
+    dirpath is a string, the path to the directory.  dirnames is a list of
+    the names of the subdirectories in dirpath (excluding '.' and '..').
+    filenames is a list of the names of the non-directory files in dirpath.
+    Note that the names in the lists are just names, with no path components.
+    To get a full path (which begins with top) to a file or directory in
+    dirpath, do os.path.join(dirpath, name).
+
+    If optional arg 'topdown' is true or not specified, the triple for a
+    directory is generated before the triples for any of its subdirectories
+    (directories are generated top down).  If topdown is false, the triple
+    for a directory is generated after the triples for all of its
+    subdirectories (directories are generated bottom up).
+
+    When topdown is true, the caller can modify the dirnames list in-place
+    (e.g., via del or slice assignment), and walk will only recurse into the
+    subdirectories whose names remain in dirnames; this can be used to prune
+    the search, or to impose a specific order of visiting.  Modifying
+    dirnames when topdown is false is ineffective, since the directories in
+    dirnames have already been generated by the time dirnames itself is
+    generated.
+
+    By default errors from the os.listdir() call are ignored.  If
+    optional arg 'onerror' is specified, it should be a function; it
+    will be called with one argument, an os.error instance.  It can
+    report the error to continue with the walk, or raise the exception
+    to abort the walk.  Note that the filename is available as the
+    filename attribute of the exception object.
+
+    Caution:  if you pass a relative pathname for top, don't change the
+    current working directory between resumptions of walk.  walk never
+    changes the current directory, and assumes that the client doesn't
+    either.
+
+    Example:
+
+    from os.path import join, getsize
+    for root, dirs, files in walk('python/Lib/email'):
+        print root, "consumes",
+        print sum([getsize(join(root, name)) for name in files]),
+        print "bytes in", len(files), "non-directory files"
+        if 'CVS' in dirs:
+            dirs.remove('CVS')  # don't visit CVS directories
+    """
+
+    from os.path import join, isdir, islink
+
+    # We may not have read permission for top, in which case we can't
+    # get a list of the files the directory contains.  os.path.walk
+    # always suppressed the exception then, rather than blow up for a
+    # minor reason when (say) a thousand readable directories are still
+    # left to visit.  That logic is copied here.
+    try:
+        # Note that listdir and error are globals in this module due
+        # to earlier import-*.
+        names = listdir(top)
+    except error, err:
+        if onerror is not None:
+            onerror(err)
+        return
+
+    dirs, nondirs = [], []
+    for name in names:
+        if isdir(join(top, name)):
+            dirs.append(name)
+        else:
+            nondirs.append(name)
+
+    if topdown:
+        yield top, dirs, nondirs
+    for name in dirs:
+        path = join(top, name)
+        if not islink(path):
+            for x in walk(path, topdown, onerror):
+                yield x
+    if not topdown:
+        yield top, dirs, nondirs
+
+__all__.append("walk")
+
+# Make sure os.environ exists, at least
+try:
+    environ
+except NameError:
+    environ = {}
+
+def execl(file, *args):
+    """execl(file, *args)
+
+    Execute the executable file with argument list args, replacing the
+    current process. """
+    execv(file, args)
+
+def execle(file, *args):
+    """execle(file, *args, env)
+
+    Execute the executable file with argument list args and
+    environment env, replacing the current process. """
+    env = args[-1]
+    execve(file, args[:-1], env)
+
+def execlp(file, *args):
+    """execlp(file, *args)
+
+    Execute the executable file (which is searched for along $PATH)
+    with argument list args, replacing the current process. """
+    execvp(file, args)
+
+def execlpe(file, *args):
+    """execlpe(file, *args, env)
+
+    Execute the executable file (which is searched for along $PATH)
+    with argument list args and environment env, replacing the current
+    process. """
+    env = args[-1]
+    execvpe(file, args[:-1], env)
+
+def execvp(file, args):
+    """execp(file, args)
+
+    Execute the executable file (which is searched for along $PATH)
+    with argument list args, replacing the current process.
+    args may be a list or tuple of strings. """
+    _execvpe(file, args)
+
+def execvpe(file, args, env):
+    """execvpe(file, args, env)
+
+    Execute the executable file (which is searched for along $PATH)
+    with argument list args and environment env , replacing the
+    current process.
+    args may be a list or tuple of strings. """
+    _execvpe(file, args, env)
+
+__all__.extend(["execl","execle","execlp","execlpe","execvp","execvpe"])
+
+def _execvpe(file, args, env=None):
+    from errno import ENOENT, ENOTDIR
+
+    if env is not None:
+        func = execve
+        argrest = (args, env)
+    else:
+        func = execv
+        argrest = (args,)
+        env = environ
+
+    head, tail = path.split(file)
+    if head:
+        func(file, *argrest)
+        return
+    if 'PATH' in env:
+        envpath = env['PATH']
+    else:
+        envpath = defpath
+    PATH = envpath.split(pathsep)
+    saved_exc = None
+    saved_tb = None
+    for dir in PATH:
+        fullname = path.join(dir, file)
+        try:
+            func(fullname, *argrest)
+        except error, e:
+            tb = sys.exc_info()[2]
+            if (e.errno != ENOENT and e.errno != ENOTDIR
+                and saved_exc is None):
+                saved_exc = e
+                saved_tb = tb
+    if saved_exc:
+        raise error, saved_exc, saved_tb
+    raise error, e, tb
+
+# Change environ to automatically call putenv() if it exists
+try:
+    # This will fail if there's no putenv
+    putenv
+except NameError:
+    pass
+else:
+    import UserDict
+
+    # Fake unsetenv() for Windows
+    # not sure about os2 here but
+    # I'm guessing they are the same.
+
+    if name in ('os2', 'nt'):
+        def unsetenv(key):
+            putenv(key, "")
+
+    if name == "riscos":
+        # On RISC OS, all env access goes through getenv and putenv
+        from riscosenviron import _Environ
+    elif name in ('os2', 'nt'):  # Where Env Var Names Must Be UPPERCASE
+        # But we store them as upper case
+        class _Environ(UserDict.IterableUserDict):
+            def __init__(self, environ):
+                UserDict.UserDict.__init__(self)
+                data = self.data
+                for k, v in environ.items():
+                    data[k.upper()] = v
+            def __setitem__(self, key, item):
+                putenv(key, item)
+                self.data[key.upper()] = item
+            def __getitem__(self, key):
+                return self.data[key.upper()]
+            try:
+                unsetenv
+            except NameError:
+                def __delitem__(self, key):
+                    del self.data[key.upper()]
+            else:
+                def __delitem__(self, key):
+                    unsetenv(key)
+                    del self.data[key.upper()]
+            def has_key(self, key):
+                return key.upper() in self.data
+            def __contains__(self, key):
+                return key.upper() in self.data
+            def get(self, key, failobj=None):
+                return self.data.get(key.upper(), failobj)
+            def update(self, dict=None, **kwargs):
+                if dict:
+                    try:
+                        keys = dict.keys()
+                    except AttributeError:
+                        # List of (key, value)
+                        for k, v in dict:
+                            self[k] = v
+                    else:
+                        # got keys
+                        # cannot use items(), since mappings
+                        # may not have them.
+                        for k in keys:
+                            self[k] = dict[k]
+                if kwargs:
+                    self.update(kwargs)
+            def copy(self):
+                return dict(self)
+
+    else:  # Where Env Var Names Can Be Mixed Case
+        class _Environ(UserDict.IterableUserDict):
+            def __init__(self, environ):
+                UserDict.UserDict.__init__(self)
+                self.data = environ
+            def __setitem__(self, key, item):
+                putenv(key, item)
+                self.data[key] = item
+            def update(self,  dict=None, **kwargs):
+                if dict:
+                    try:
+                        keys = dict.keys()
+                    except AttributeError:
+                        # List of (key, value)
+                        for k, v in dict:
+                            self[k] = v
+                    else:
+                        # got keys
+                        # cannot use items(), since mappings
+                        # may not have them.
+                        for k in keys:
+                            self[k] = dict[k]
+                if kwargs:
+                    self.update(kwargs)
+            try:
+                unsetenv
+            except NameError:
+                pass
+            else:
+                def __delitem__(self, key):
+                    unsetenv(key)
+                    del self.data[key]
+            def copy(self):
+                return dict(self)
+
+
+    environ = _Environ(environ)
+
+def getenv(key, default=None):
+    """Get an environment variable, return None if it doesn't exist.
+    The optional second argument can specify an alternate default."""
+    return environ.get(key, default)
+__all__.append("getenv")
+
+def _exists(name):
+    try:
+        eval(name)
+        return True
+    except NameError:
+        return False
+
+# Supply spawn*() (probably only for Unix)
+if _exists("fork") and not _exists("spawnv") and _exists("execv"):
+
+    P_WAIT = 0
+    P_NOWAIT = P_NOWAITO = 1
+
+    # XXX Should we support P_DETACH?  I suppose it could fork()**2
+    # and close the std I/O streams.  Also, P_OVERLAY is the same
+    # as execv*()?
+
+    def _spawnvef(mode, file, args, env, func):
+        # Internal helper; func is the exec*() function to use
+        pid = fork()
+        if not pid:
+            # Child
+            try:
+                if env is None:
+                    func(file, args)
+                else:
+                    func(file, args, env)
+            except:
+                _exit(127)
+        else:
+            # Parent
+            if mode == P_NOWAIT:
+                return pid # Caller is responsible for waiting!
+            while 1:
+                wpid, sts = waitpid(pid, 0)
+                if WIFSTOPPED(sts):
+                    continue
+                elif WIFSIGNALED(sts):
+                    return -WTERMSIG(sts)
+                elif WIFEXITED(sts):
+                    return WEXITSTATUS(sts)
+                else:
+                    raise error, "Not stopped, signaled or exited???"
+
+    def spawnv(mode, file, args):
+        """spawnv(mode, file, args) -> integer
+
+Execute file with arguments from args in a subprocess.
+If mode == P_NOWAIT return the pid of the process.
+If mode == P_WAIT return the process's exit code if it exits normally;
+otherwise return -SIG, where SIG is the signal that killed it. """
+        return _spawnvef(mode, file, args, None, execv)
+
+    def spawnve(mode, file, args, env):
+        """spawnve(mode, file, args, env) -> integer
+
+Execute file with arguments from args in a subprocess with the
+specified environment.
+If mode == P_NOWAIT return the pid of the process.
+If mode == P_WAIT return the process's exit code if it exits normally;
+otherwise return -SIG, where SIG is the signal that killed it. """
+        return _spawnvef(mode, file, args, env, execve)
+
+    # Note: spawnvp[e] is't currently supported on Windows
+
+    def spawnvp(mode, file, args):
+        """spawnvp(mode, file, args) -> integer
+
+Execute file (which is looked for along $PATH) with arguments from
+args in a subprocess.
+If mode == P_NOWAIT return the pid of the process.
+If mode == P_WAIT return the process's exit code if it exits normally;
+otherwise return -SIG, where SIG is the signal that killed it. """
+        return _spawnvef(mode, file, args, None, execvp)
+
+    def spawnvpe(mode, file, args, env):
+        """spawnvpe(mode, file, args, env) -> integer
+
+Execute file (which is looked for along $PATH) with arguments from
+args in a subprocess with the supplied environment.
+If mode == P_NOWAIT return the pid of the process.
+If mode == P_WAIT return the process's exit code if it exits normally;
+otherwise return -SIG, where SIG is the signal that killed it. """
+        return _spawnvef(mode, file, args, env, execvpe)
+
+if _exists("spawnv"):
+    # These aren't supplied by the basic Windows code
+    # but can be easily implemented in Python
+
+    def spawnl(mode, file, *args):
+        """spawnl(mode, file, *args) -> integer
+
+Execute file with arguments from args in a subprocess.
+If mode == P_NOWAIT return the pid of the process.
+If mode == P_WAIT return the process's exit code if it exits normally;
+otherwise return -SIG, where SIG is the signal that killed it. """
+        return spawnv(mode, file, args)
+
+    def spawnle(mode, file, *args):
+        """spawnle(mode, file, *args, env) -> integer
+
+Execute file with arguments from args in a subprocess with the
+supplied environment.
+If mode == P_NOWAIT return the pid of the process.
+If mode == P_WAIT return the process's exit code if it exits normally;
+otherwise return -SIG, where SIG is the signal that killed it. """
+        env = args[-1]
+        return spawnve(mode, file, args[:-1], env)
+
+
+    __all__.extend(["spawnv", "spawnve", "spawnl", "spawnle",])
+
+
+if _exists("spawnvp"):
+    # At the moment, Windows doesn't implement spawnvp[e],
+    # so it won't have spawnlp[e] either.
+    def spawnlp(mode, file, *args):
+        """spawnlp(mode, file, *args) -> integer
+
+Execute file (which is looked for along $PATH) with arguments from
+args in a subprocess with the supplied environment.
+If mode == P_NOWAIT return the pid of the process.
+If mode == P_WAIT return the process's exit code if it exits normally;
+otherwise return -SIG, where SIG is the signal that killed it. """
+        return spawnvp(mode, file, args)
+
+    def spawnlpe(mode, file, *args):
+        """spawnlpe(mode, file, *args, env) -> integer
+
+Execute file (which is looked for along $PATH) with arguments from
+args in a subprocess with the supplied environment.
+If mode == P_NOWAIT return the pid of the process.
+If mode == P_WAIT return the process's exit code if it exits normally;
+otherwise return -SIG, where SIG is the signal that killed it. """
+        env = args[-1]
+        return spawnvpe(mode, file, args[:-1], env)
+
+
+    __all__.extend(["spawnvp", "spawnvpe", "spawnlp", "spawnlpe",])
+
+
+# Supply popen2 etc. (for Unix)
+if _exists("fork"):
+    if not _exists("popen2"):
+        def popen2(cmd, mode="t", bufsize=-1):
+            """Execute the shell command 'cmd' in a sub-process.  On UNIX, 'cmd'
+            may be a sequence, in which case arguments will be passed directly to
+            the program without shell intervention (as with os.spawnv()).  If 'cmd'
+            is a string it will be passed to the shell (as with os.system()). If
+            'bufsize' is specified, it sets the buffer size for the I/O pipes.  The
+            file objects (child_stdin, child_stdout) are returned."""
+            import popen2
+            stdout, stdin = popen2.popen2(cmd, bufsize)
+            return stdin, stdout
+        __all__.append("popen2")
+
+    if not _exists("popen3"):
+        def popen3(cmd, mode="t", bufsize=-1):
+            """Execute the shell command 'cmd' in a sub-process.  On UNIX, 'cmd'
+            may be a sequence, in which case arguments will be passed directly to
+            the program without shell intervention (as with os.spawnv()).  If 'cmd'
+            is a string it will be passed to the shell (as with os.system()). If
+            'bufsize' is specified, it sets the buffer size for the I/O pipes.  The
+            file objects (child_stdin, child_stdout, child_stderr) are returned."""
+            import popen2
+            stdout, stdin, stderr = popen2.popen3(cmd, bufsize)
+            return stdin, stdout, stderr
+        __all__.append("popen3")
+
+    if not _exists("popen4"):
+        def popen4(cmd, mode="t", bufsize=-1):
+            """Execute the shell command 'cmd' in a sub-process.  On UNIX, 'cmd'
+            may be a sequence, in which case arguments will be passed directly to
+            the program without shell intervention (as with os.spawnv()).  If 'cmd'
+            is a string it will be passed to the shell (as with os.system()). If
+            'bufsize' is specified, it sets the buffer size for the I/O pipes.  The
+            file objects (child_stdin, child_stdout_stderr) are returned."""
+            import popen2
+            stdout, stdin = popen2.popen4(cmd, bufsize)
+            return stdin, stdout
+        __all__.append("popen4")
+
+import copy_reg as _copy_reg
+
+def _make_stat_result(tup, dict):
+    return stat_result(tup, dict)
+
+def _pickle_stat_result(sr):
+    (type, args) = sr.__reduce__()
+    return (_make_stat_result, args)
+
+try:
+    _copy_reg.pickle(stat_result, _pickle_stat_result, _make_stat_result)
+except NameError: # stat_result may not exist
+    pass
+
+def _make_statvfs_result(tup, dict):
+    return statvfs_result(tup, dict)
+
+def _pickle_statvfs_result(sr):
+    (type, args) = sr.__reduce__()
+    return (_make_statvfs_result, args)
+
+try:
+    _copy_reg.pickle(statvfs_result, _pickle_statvfs_result,
+                     _make_statvfs_result)
+except NameError: # statvfs_result may not exist
+    pass
+
+if not _exists("urandom"):
+    _urandomfd = None
+    def urandom(n):
+        """urandom(n) -> str
+
+        Return a string of n random bytes suitable for cryptographic use.
+
+        """
+        global _urandomfd
+        if _urandomfd is None:
+            try:
+                _urandomfd = open("/dev/urandom", O_RDONLY)
+            except:
+                _urandomfd = NotImplementedError
+        if _urandomfd is NotImplementedError:
+            raise NotImplementedError("/dev/urandom (or equivalent) not found")
+        bytes = ""
+        while len(bytes) < n:
+            bytes += read(_urandomfd, n - len(bytes))
+        return bytes
diff --git a/depot_tools/release/win/python_24/Lib/os2emxpath.py b/depot_tools/release/win/python_24/Lib/os2emxpath.py
new file mode 100644
index 0000000..4c64324e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/os2emxpath.py
@@ -0,0 +1,424 @@
+# Module 'os2emxpath' -- common operations on OS/2 pathnames
+"""Common pathname manipulations, OS/2 EMX version.
+
+Instead of importing this module directly, import os and refer to this
+module as os.path.
+"""
+
+import os
+import stat
+
+__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
+           "basename","dirname","commonprefix","getsize","getmtime",
+           "getatime","getctime", "islink","exists","isdir","isfile","ismount",
+           "walk","expanduser","expandvars","normpath","abspath","splitunc",
+           "curdir","pardir","sep","pathsep","defpath","altsep","extsep",
+           "devnull","realpath","supports_unicode_filenames"]
+
+# strings representing various path-related bits and pieces
+curdir = '.'
+pardir = '..'
+extsep = '.'
+sep = '/'
+altsep = '\\'
+pathsep = ';'
+defpath = '.;C:\\bin'
+devnull = 'nul'
+
+# Normalize the case of a pathname and map slashes to backslashes.
+# Other normalizations (such as optimizing '../' away) are not done
+# (this is done by normpath).
+
+def normcase(s):
+    """Normalize case of pathname.
+
+    Makes all characters lowercase and all altseps into seps."""
+    return s.replace('\\', '/').lower()
+
+
+# Return whether a path is absolute.
+# Trivial in Posix, harder on the Mac or MS-DOS.
+# For DOS it is absolute if it starts with a slash or backslash (current
+# volume), or if a pathname after the volume letter and colon / UNC resource
+# starts with a slash or backslash.
+
+def isabs(s):
+    """Test whether a path is absolute"""
+    s = splitdrive(s)[1]
+    return s != '' and s[:1] in '/\\'
+
+
+# Join two (or more) paths.
+
+def join(a, *p):
+    """Join two or more pathname components, inserting sep as needed"""
+    path = a
+    for b in p:
+        if isabs(b):
+            path = b
+        elif path == '' or path[-1:] in '/\\:':
+            path = path + b
+        else:
+            path = path + '/' + b
+    return path
+
+
+# Split a path in a drive specification (a drive letter followed by a
+# colon) and the path specification.
+# It is always true that drivespec + pathspec == p
+def splitdrive(p):
+    """Split a pathname into drive and path specifiers. Returns a 2-tuple
+"(drive,path)";  either part may be empty"""
+    if p[1:2] == ':':
+        return p[0:2], p[2:]
+    return '', p
+
+
+# Parse UNC paths
+def splitunc(p):
+    """Split a pathname into UNC mount point and relative path specifiers.
+
+    Return a 2-tuple (unc, rest); either part may be empty.
+    If unc is not empty, it has the form '//host/mount' (or similar
+    using backslashes).  unc+rest is always the input path.
+    Paths containing drive letters never have an UNC part.
+    """
+    if p[1:2] == ':':
+        return '', p # Drive letter present
+    firstTwo = p[0:2]
+    if firstTwo == '/' * 2 or firstTwo == '\\' * 2:
+        # is a UNC path:
+        # vvvvvvvvvvvvvvvvvvvv equivalent to drive letter
+        # \\machine\mountpoint\directories...
+        #           directory ^^^^^^^^^^^^^^^
+        normp = normcase(p)
+        index = normp.find('/', 2)
+        if index == -1:
+            ##raise RuntimeError, 'illegal UNC path: "' + p + '"'
+            return ("", p)
+        index = normp.find('/', index + 1)
+        if index == -1:
+            index = len(p)
+        return p[:index], p[index:]
+    return '', p
+
+
+# Split a path in head (everything up to the last '/') and tail (the
+# rest).  After the trailing '/' is stripped, the invariant
+# join(head, tail) == p holds.
+# The resulting head won't end in '/' unless it is the root.
+
+def split(p):
+    """Split a pathname.
+
+    Return tuple (head, tail) where tail is everything after the final slash.
+    Either part may be empty."""
+
+    d, p = splitdrive(p)
+    # set i to index beyond p's last slash
+    i = len(p)
+    while i and p[i-1] not in '/\\':
+        i = i - 1
+    head, tail = p[:i], p[i:]  # now tail has no slashes
+    # remove trailing slashes from head, unless it's all slashes
+    head2 = head
+    while head2 and head2[-1] in '/\\':
+        head2 = head2[:-1]
+    head = head2 or head
+    return d + head, tail
+
+
+# Split a path in root and extension.
+# The extension is everything starting at the last dot in the last
+# pathname component; the root is everything before that.
+# It is always true that root + ext == p.
+
+def splitext(p):
+    """Split the extension from a pathname.
+
+    Extension is everything from the last dot to the end.
+    Return (root, ext), either part may be empty."""
+    root, ext = '', ''
+    for c in p:
+        if c in ['/','\\']:
+            root, ext = root + ext + c, ''
+        elif c == '.':
+            if ext:
+                root, ext = root + ext, c
+            else:
+                ext = c
+        elif ext:
+            ext = ext + c
+        else:
+            root = root + c
+    return root, ext
+
+
+# Return the tail (basename) part of a path.
+
+def basename(p):
+    """Returns the final component of a pathname"""
+    return split(p)[1]
+
+
+# Return the head (dirname) part of a path.
+
+def dirname(p):
+    """Returns the directory component of a pathname"""
+    return split(p)[0]
+
+
+# Return the longest prefix of all list elements.
+
+def commonprefix(m):
+    "Given a list of pathnames, returns the longest common leading component"
+    if not m: return ''
+    prefix = m[0]
+    for item in m:
+        for i in range(len(prefix)):
+            if prefix[:i+1] != item[:i+1]:
+                prefix = prefix[:i]
+                if i == 0: return ''
+                break
+    return prefix
+
+
+# Get size, mtime, atime of files.
+
+def getsize(filename):
+    """Return the size of a file, reported by os.stat()"""
+    return os.stat(filename).st_size
+
+def getmtime(filename):
+    """Return the last modification time of a file, reported by os.stat()"""
+    return os.stat(filename).st_mtime
+
+def getatime(filename):
+    """Return the last access time of a file, reported by os.stat()"""
+    return os.stat(filename).st_atime
+
+def getctime(filename):
+    """Return the creation time of a file, reported by os.stat()."""
+    return os.stat(filename).st_ctime
+
+# Is a path a symbolic link?
+# This will always return false on systems where posix.lstat doesn't exist.
+
+def islink(path):
+    """Test for symbolic link.  On OS/2 always returns false"""
+    return False
+
+
+# Does a path exist?
+# This is false for dangling symbolic links.
+
+def exists(path):
+    """Test whether a path exists"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return True
+
+lexists = exists
+
+
+# Is a path a directory?
+
+def isdir(path):
+    """Test whether a path is a directory"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return stat.S_ISDIR(st.st_mode)
+
+
+# Is a path a regular file?
+# This follows symbolic links, so both islink() and isdir() can be true
+# for the same path.
+
+def isfile(path):
+    """Test whether a path is a regular file"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return stat.S_ISREG(st.st_mode)
+
+
+# Is a path a mount point?  Either a root (with or without drive letter)
+# or an UNC path with at most a / or \ after the mount point.
+
+def ismount(path):
+    """Test whether a path is a mount point (defined as root of drive)"""
+    unc, rest = splitunc(path)
+    if unc:
+        return rest in ("", "/", "\\")
+    p = splitdrive(path)[1]
+    return len(p) == 1 and p[0] in '/\\'
+
+
+# Directory tree walk.
+# For each directory under top (including top itself, but excluding
+# '.' and '..'), func(arg, dirname, filenames) is called, where
+# dirname is the name of the directory and filenames is the list
+# of files (and subdirectories etc.) in the directory.
+# The func may modify the filenames list, to implement a filter,
+# or to impose a different order of visiting.
+
+def walk(top, func, arg):
+    """Directory tree walk whth callback function.
+
+    walk(top, func, arg) calls func(arg, d, files) for each directory d
+    in the tree rooted at top (including top itself); files is a list
+    of all the files and subdirs in directory d."""
+    try:
+        names = os.listdir(top)
+    except os.error:
+        return
+    func(arg, top, names)
+    exceptions = ('.', '..')
+    for name in names:
+        if name not in exceptions:
+            name = join(top, name)
+            if isdir(name):
+                walk(name, func, arg)
+
+
+# Expand paths beginning with '~' or '~user'.
+# '~' means $HOME; '~user' means that user's home directory.
+# If the path doesn't begin with '~', or if the user or $HOME is unknown,
+# the path is returned unchanged (leaving error reporting to whatever
+# function is called with the expanded path as argument).
+# See also module 'glob' for expansion of *, ? and [...] in pathnames.
+# (A function should also be defined to do full *sh-style environment
+# variable expansion.)
+
+def expanduser(path):
+    """Expand ~ and ~user constructs.
+
+    If user or $HOME is unknown, do nothing."""
+    if path[:1] != '~':
+        return path
+    i, n = 1, len(path)
+    while i < n and path[i] not in '/\\':
+        i = i + 1
+    if i == 1:
+        if 'HOME' in os.environ:
+            userhome = os.environ['HOME']
+        elif not 'HOMEPATH' in os.environ:
+            return path
+        else:
+            try:
+                drive = os.environ['HOMEDRIVE']
+            except KeyError:
+                drive = ''
+            userhome = join(drive, os.environ['HOMEPATH'])
+    else:
+        return path
+    return userhome + path[i:]
+
+
+# Expand paths containing shell variable substitutions.
+# The following rules apply:
+#       - no expansion within single quotes
+#       - no escape character, except for '$$' which is translated into '$'
+#       - ${varname} is accepted.
+#       - varnames can be made out of letters, digits and the character '_'
+# XXX With COMMAND.COM you can use any characters in a variable name,
+# XXX except '^|<>='.
+
+def expandvars(path):
+    """Expand shell variables of form $var and ${var}.
+
+    Unknown variables are left unchanged."""
+    if '$' not in path:
+        return path
+    import string
+    varchars = string.letters + string.digits + '_-'
+    res = ''
+    index = 0
+    pathlen = len(path)
+    while index < pathlen:
+        c = path[index]
+        if c == '\'':   # no expansion within single quotes
+            path = path[index + 1:]
+            pathlen = len(path)
+            try:
+                index = path.index('\'')
+                res = res + '\'' + path[:index + 1]
+            except ValueError:
+                res = res + path
+                index = pathlen - 1
+        elif c == '$':  # variable or '$$'
+            if path[index + 1:index + 2] == '$':
+                res = res + c
+                index = index + 1
+            elif path[index + 1:index + 2] == '{':
+                path = path[index+2:]
+                pathlen = len(path)
+                try:
+                    index = path.index('}')
+                    var = path[:index]
+                    if var in os.environ:
+                        res = res + os.environ[var]
+                except ValueError:
+                    res = res + path
+                    index = pathlen - 1
+            else:
+                var = ''
+                index = index + 1
+                c = path[index:index + 1]
+                while c != '' and c in varchars:
+                    var = var + c
+                    index = index + 1
+                    c = path[index:index + 1]
+                if var in os.environ:
+                    res = res + os.environ[var]
+                if c != '':
+                    res = res + c
+        else:
+            res = res + c
+        index = index + 1
+    return res
+
+
+# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
+
+def normpath(path):
+    """Normalize path, eliminating double slashes, etc."""
+    path = path.replace('\\', '/')
+    prefix, path = splitdrive(path)
+    while path[:1] == '/':
+        prefix = prefix + '/'
+        path = path[1:]
+    comps = path.split('/')
+    i = 0
+    while i < len(comps):
+        if comps[i] == '.':
+            del comps[i]
+        elif comps[i] == '..' and i > 0 and comps[i-1] not in ('', '..'):
+            del comps[i-1:i+1]
+            i = i - 1
+        elif comps[i] == '' and i > 0 and comps[i-1] != '':
+            del comps[i]
+        else:
+            i = i + 1
+    # If the path is now empty, substitute '.'
+    if not prefix and not comps:
+        comps.append('.')
+    return prefix + '/'.join(comps)
+
+
+# Return an absolute path.
+def abspath(path):
+    """Return the absolute version of a path"""
+    if not isabs(path):
+        path = join(os.getcwd(), path)
+    return normpath(path)
+
+# realpath is a no-op on systems without islink support
+realpath = abspath
+
+supports_unicode_filenames = False
diff --git a/depot_tools/release/win/python_24/Lib/pdb.doc b/depot_tools/release/win/python_24/Lib/pdb.doc
new file mode 100644
index 0000000..81df323
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pdb.doc
@@ -0,0 +1,192 @@
+The Python Debugger Pdb
+=======================
+
+To use the debugger in its simplest form:
+
+        >>> import pdb
+        >>> pdb.run('<a statement>')
+
+The debugger's prompt is '(Pdb) '.  This will stop in the first
+function call in <a statement>.
+
+Alternatively, if a statement terminated with an unhandled exception,
+you can use pdb's post-mortem facility to inspect the contents of the
+traceback:
+
+        >>> <a statement>
+        <exception traceback>
+        >>> import pdb
+        >>> pdb.pm()
+
+The commands recognized by the debugger are listed in the next
+section.  Most can be abbreviated as indicated; e.g., h(elp) means
+that 'help' can be typed as 'h' or 'help' (but not as 'he' or 'hel',
+nor as 'H' or 'Help' or 'HELP').  Optional arguments are enclosed in
+square brackets.
+
+A blank line repeats the previous command literally, except for
+'list', where it lists the next 11 lines.
+
+Commands that the debugger doesn't recognize are assumed to be Python
+statements and are executed in the context of the program being
+debugged.  Python statements can also be prefixed with an exclamation
+point ('!').  This is a powerful way to inspect the program being
+debugged; it is even possible to change variables.  When an exception
+occurs in such a statement, the exception name is printed but the
+debugger's state is not changed.
+
+The debugger supports aliases, which can save typing.  And aliases can
+have parameters (see the alias help entry) which allows one a certain
+level of adaptability to the context under examination.
+
+Multiple commands may be entered on a single line, separated by the
+pair ';;'.  No intelligence is applied to separating the commands; the
+input is split at the first ';;', even if it is in the middle of a
+quoted string.
+
+If a file ".pdbrc" exists in your home directory or in the current
+directory, it is read in and executed as if it had been typed at the
+debugger prompt.  This is particularly useful for aliases.  If both
+files exist, the one in the home directory is read first and aliases
+defined there can be overriden by the local file.
+
+Aside from aliases, the debugger is not directly programmable; but it
+is implemented as a class from which you can derive your own debugger
+class, which you can make as fancy as you like.
+
+
+Debugger commands
+=================
+
+h(elp)
+        Without argument, print the list of available commands.  With
+        a command name as argument, print help about that command
+        (this is currently not implemented).
+
+w(here)
+        Print a stack trace, with the most recent frame at the bottom.
+        An arrow indicates the "current frame", which determines the
+        context of most commands.
+
+d(own)
+        Move the current frame one level down in the stack trace
+        (to a newer frame).
+
+u(p)
+        Move the current frame one level up in the stack trace
+        (to an older frame).
+
+b(reak) [ ([filename:]lineno | function) [, condition] ]
+        With a filename:line number argument, set a break there.  If
+        filename is omitted, use the current file.  With a function
+        name, set a break at the first executable line of that
+        function.  Without argument, list all breaks.  Each breakpoint
+        is assigned a number to which all the other breakpoint
+        commands refer.
+
+        The condition argument, if present, is a string which must
+        evaluate to true in order for the breakpoint to be honored.
+
+tbreak [ ([filename:]lineno | function) [, condition] ]
+        Temporary breakpoint, which is removed automatically when it
+        is first hit.  The arguments are the same as break.
+
+cl(ear) [bpnumber [bpnumber ...] ]
+        With a space separated list of breakpoint numbers, clear those
+        breakpoints.  Without argument, clear all breaks (but first
+        ask confirmation).
+
+disable bpnumber [bpnumber ...]
+        Disables the breakpoints given as a space separated list of
+        breakpoint numbers.  Disabling a breakpoint means it cannot
+        cause the program to stop execution, but unlike clearing a
+        breakpoint, it remains in the list of breakpoints and can be
+        (re-)enabled.
+
+enable bpnumber [bpnumber ...]
+        Enables the breakpoints specified.
+
+ignore bpnumber count
+        Sets the ignore count for the given breakpoint number.  If
+        count is omitted, the ignore count is set to 0.  A breakpoint
+        becomes active when the ignore count is zero.  When non-zero,
+        the count is decremented each time the breakpoint is reached
+        and the breakpoint is not disabled and any associated
+        condition evaluates to true.
+
+condition bpnumber condition
+        condition is an expression which must evaluate to true before
+        the breakpoint is honored.  If condition is absent, any
+        existing condition is removed; i.e., the breakpoint is made
+        unconditional.
+
+s(tep)
+        Execute the current line, stop at the first possible occasion
+        (either in a function that is called or in the current function).
+
+n(ext)
+        Continue execution until the next line in the current function
+        is reached or it returns.
+
+r(eturn)
+        Continue execution until the current function returns.
+
+c(ont(inue))
+        Continue execution, only stop when a breakpoint is encountered.
+
+l(ist) [first [,last]]
+        List source code for the current file.
+        Without arguments, list 11 lines around the current line
+        or continue the previous listing.
+        With one argument, list 11 lines starting at that line.
+        With two arguments, list the given range;
+        if the second argument is less than the first, it is a count.
+
+a(rgs)
+        Print the argument list of the current function.
+
+p expression
+        Print the value of the expression.
+
+(!) statement
+        Execute the (one-line) statement in the context of the current
+        stack frame.  The exclamation point can be omitted unless the
+        first word of the statement resembles a debugger command.  To
+        assign to a global variable you must always prefix the command
+        with a 'global' command, e.g.:
+        (Pdb) global list_options; list_options = ['-l']
+        (Pdb)
+
+
+whatis arg
+         Prints the type of the argument.
+
+alias [name [command]]
+        Creates an alias called 'name' that executes 'command'.  The
+        command must *not* be enclosed in quotes.  Replaceable
+        parameters can be indicated by %1, %2, and so on, while %* is
+        replaced by all the parameters.  If no command is given, the
+        current alias for name is shown. If no name is given, all
+        aliases are listed.
+
+        Aliases may be nested and can contain anything that can be
+        legally typed at the pdb prompt.  Note!  You *can* override
+        internal pdb commands with aliases!  Those internal commands
+        are then hidden until the alias is removed.  Aliasing is
+        recursively applied to the first word of the command line; all
+        other words in the line are left alone.
+
+        As an example, here are two useful aliases (especially when
+        placed in the .pdbrc file):
+
+        #Print instance variables (usage "pi classInst")
+        alias pi for k in %1.__dict__.keys(): print "%1.",k,"=",%1.__dict__[k]
+        #Print instance variables in self
+        alias ps pi self
+                
+unalias name
+        Deletes the specified alias.
+
+q(uit)
+        Quit from the debugger.
+        The program being executed is aborted.
diff --git a/depot_tools/release/win/python_24/Lib/pdb.py b/depot_tools/release/win/python_24/Lib/pdb.py
new file mode 100644
index 0000000..7b5dffa3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pdb.py
@@ -0,0 +1,1078 @@
+#! /usr/bin/env python
+
+"""A Python debugger."""
+
+# (See pdb.doc for documentation.)
+
+import sys
+import linecache
+import cmd
+import bdb
+from repr import Repr
+import os
+import re
+import pprint
+import traceback
+# Create a custom safe Repr instance and increase its maxstring.
+# The default of 30 truncates error messages too easily.
+_repr = Repr()
+_repr.maxstring = 200
+_saferepr = _repr.repr
+
+__all__ = ["run", "pm", "Pdb", "runeval", "runctx", "runcall", "set_trace",
+           "post_mortem", "help"]
+
+def find_function(funcname, filename):
+    cre = re.compile(r'def\s+%s\s*[(]' % funcname)
+    try:
+        fp = open(filename)
+    except IOError:
+        return None
+    # consumer of this info expects the first line to be 1
+    lineno = 1
+    answer = None
+    while 1:
+        line = fp.readline()
+        if line == '':
+            break
+        if cre.match(line):
+            answer = funcname, filename, lineno
+            break
+        lineno = lineno + 1
+    fp.close()
+    return answer
+
+
+# Interaction prompt line will separate file and call info from code
+# text using value of line_prefix string.  A newline and arrow may
+# be to your liking.  You can set it once pdb is imported using the
+# command "pdb.line_prefix = '\n% '".
+# line_prefix = ': '    # Use this to get the old situation back
+line_prefix = '\n-> '   # Probably a better default
+
+class Pdb(bdb.Bdb, cmd.Cmd):
+
+    def __init__(self):
+        bdb.Bdb.__init__(self)
+        cmd.Cmd.__init__(self)
+        self.prompt = '(Pdb) '
+        self.aliases = {}
+        self.mainpyfile = ''
+        self._wait_for_mainpyfile = 0
+        # Try to load readline if it exists
+        try:
+            import readline
+        except ImportError:
+            pass
+
+        # Read $HOME/.pdbrc and ./.pdbrc
+        self.rcLines = []
+        if 'HOME' in os.environ:
+            envHome = os.environ['HOME']
+            try:
+                rcFile = open(os.path.join(envHome, ".pdbrc"))
+            except IOError:
+                pass
+            else:
+                for line in rcFile.readlines():
+                    self.rcLines.append(line)
+                rcFile.close()
+        try:
+            rcFile = open(".pdbrc")
+        except IOError:
+            pass
+        else:
+            for line in rcFile.readlines():
+                self.rcLines.append(line)
+            rcFile.close()
+
+    def reset(self):
+        bdb.Bdb.reset(self)
+        self.forget()
+
+    def forget(self):
+        self.lineno = None
+        self.stack = []
+        self.curindex = 0
+        self.curframe = None
+
+    def setup(self, f, t):
+        self.forget()
+        self.stack, self.curindex = self.get_stack(f, t)
+        self.curframe = self.stack[self.curindex][0]
+        self.execRcLines()
+
+    # Can be executed earlier than 'setup' if desired
+    def execRcLines(self):
+        if self.rcLines:
+            # Make local copy because of recursion
+            rcLines = self.rcLines
+            # executed only once
+            self.rcLines = []
+            for line in rcLines:
+                line = line[:-1]
+                if len(line) > 0 and line[0] != '#':
+                    self.onecmd(line)
+
+    # Override Bdb methods
+
+    def user_call(self, frame, argument_list):
+        """This method is called when there is the remote possibility
+        that we ever need to stop in this function."""
+        if self._wait_for_mainpyfile:
+            return
+        if self.stop_here(frame):
+            print '--Call--'
+            self.interaction(frame, None)
+
+    def user_line(self, frame):
+        """This function is called when we stop or break at this line."""
+        if self._wait_for_mainpyfile:
+            if (self.mainpyfile != self.canonic(frame.f_code.co_filename)
+                or frame.f_lineno<= 0):
+                return
+            self._wait_for_mainpyfile = 0
+        self.interaction(frame, None)
+
+    def user_return(self, frame, return_value):
+        """This function is called when a return trap is set here."""
+        frame.f_locals['__return__'] = return_value
+        print '--Return--'
+        self.interaction(frame, None)
+
+    def user_exception(self, frame, (exc_type, exc_value, exc_traceback)):
+        """This function is called if an exception occurs,
+        but only if we are to stop at or just below this level."""
+        frame.f_locals['__exception__'] = exc_type, exc_value
+        if type(exc_type) == type(''):
+            exc_type_name = exc_type
+        else: exc_type_name = exc_type.__name__
+        print exc_type_name + ':', _saferepr(exc_value)
+        self.interaction(frame, exc_traceback)
+
+    # General interaction function
+
+    def interaction(self, frame, traceback):
+        self.setup(frame, traceback)
+        self.print_stack_entry(self.stack[self.curindex])
+        self.cmdloop()
+        self.forget()
+
+    def default(self, line):
+        if line[:1] == '!': line = line[1:]
+        locals = self.curframe.f_locals
+        globals = self.curframe.f_globals
+        try:
+            code = compile(line + '\n', '<stdin>', 'single')
+            exec code in globals, locals
+        except:
+            t, v = sys.exc_info()[:2]
+            if type(t) == type(''):
+                exc_type_name = t
+            else: exc_type_name = t.__name__
+            print '***', exc_type_name + ':', v
+
+    def precmd(self, line):
+        """Handle alias expansion and ';;' separator."""
+        if not line.strip():
+            return line
+        args = line.split()
+        while args[0] in self.aliases:
+            line = self.aliases[args[0]]
+            ii = 1
+            for tmpArg in args[1:]:
+                line = line.replace("%" + str(ii),
+                                      tmpArg)
+                ii = ii + 1
+            line = line.replace("%*", ' '.join(args[1:]))
+            args = line.split()
+        # split into ';;' separated commands
+        # unless it's an alias command
+        if args[0] != 'alias':
+            marker = line.find(';;')
+            if marker >= 0:
+                # queue up everything after marker
+                next = line[marker+2:].lstrip()
+                self.cmdqueue.append(next)
+                line = line[:marker].rstrip()
+        return line
+
+    # Command definitions, called by cmdloop()
+    # The argument is the remaining string on the command line
+    # Return true to exit from the command loop
+
+    do_h = cmd.Cmd.do_help
+
+    def do_break(self, arg, temporary = 0):
+        # break [ ([filename:]lineno | function) [, "condition"] ]
+        if not arg:
+            if self.breaks:  # There's at least one
+                print "Num Type         Disp Enb   Where"
+                for bp in bdb.Breakpoint.bpbynumber:
+                    if bp:
+                        bp.bpprint()
+            return
+        # parse arguments; comma has lowest precedence
+        # and cannot occur in filename
+        filename = None
+        lineno = None
+        cond = None
+        comma = arg.find(',')
+        if comma > 0:
+            # parse stuff after comma: "condition"
+            cond = arg[comma+1:].lstrip()
+            arg = arg[:comma].rstrip()
+        # parse stuff before comma: [filename:]lineno | function
+        colon = arg.rfind(':')
+        funcname = None
+        if colon >= 0:
+            filename = arg[:colon].rstrip()
+            f = self.lookupmodule(filename)
+            if not f:
+                print '*** ', repr(filename),
+                print 'not found from sys.path'
+                return
+            else:
+                filename = f
+            arg = arg[colon+1:].lstrip()
+            try:
+                lineno = int(arg)
+            except ValueError, msg:
+                print '*** Bad lineno:', arg
+                return
+        else:
+            # no colon; can be lineno or function
+            try:
+                lineno = int(arg)
+            except ValueError:
+                try:
+                    func = eval(arg,
+                                self.curframe.f_globals,
+                                self.curframe.f_locals)
+                except:
+                    func = arg
+                try:
+                    if hasattr(func, 'im_func'):
+                        func = func.im_func
+                    code = func.func_code
+                    #use co_name to identify the bkpt (function names
+                    #could be aliased, but co_name is invariant)
+                    funcname = code.co_name
+                    lineno = code.co_firstlineno
+                    filename = code.co_filename
+                except:
+                    # last thing to try
+                    (ok, filename, ln) = self.lineinfo(arg)
+                    if not ok:
+                        print '*** The specified object',
+                        print repr(arg),
+                        print 'is not a function'
+                        print ('or was not found '
+                               'along sys.path.')
+                        return
+                    funcname = ok # ok contains a function name
+                    lineno = int(ln)
+        if not filename:
+            filename = self.defaultFile()
+        # Check for reasonable breakpoint
+        line = self.checkline(filename, lineno)
+        if line:
+            # now set the break point
+            err = self.set_break(filename, line, temporary, cond, funcname)
+            if err: print '***', err
+            else:
+                bp = self.get_breaks(filename, line)[-1]
+                print "Breakpoint %d at %s:%d" % (bp.number,
+                                                  bp.file,
+                                                  bp.line)
+
+    # To be overridden in derived debuggers
+    def defaultFile(self):
+        """Produce a reasonable default."""
+        filename = self.curframe.f_code.co_filename
+        if filename == '<string>' and self.mainpyfile:
+            filename = self.mainpyfile
+        return filename
+
+    do_b = do_break
+
+    def do_tbreak(self, arg):
+        self.do_break(arg, 1)
+
+    def lineinfo(self, identifier):
+        failed = (None, None, None)
+        # Input is identifier, may be in single quotes
+        idstring = identifier.split("'")
+        if len(idstring) == 1:
+            # not in single quotes
+            id = idstring[0].strip()
+        elif len(idstring) == 3:
+            # quoted
+            id = idstring[1].strip()
+        else:
+            return failed
+        if id == '': return failed
+        parts = id.split('.')
+        # Protection for derived debuggers
+        if parts[0] == 'self':
+            del parts[0]
+            if len(parts) == 0:
+                return failed
+        # Best first guess at file to look at
+        fname = self.defaultFile()
+        if len(parts) == 1:
+            item = parts[0]
+        else:
+            # More than one part.
+            # First is module, second is method/class
+            f = self.lookupmodule(parts[0])
+            if f:
+                fname = f
+            item = parts[1]
+        answer = find_function(item, fname)
+        return answer or failed
+
+    def checkline(self, filename, lineno):
+        """Check whether specified line seems to be executable.
+
+        Return `lineno` if it is, 0 if not (e.g. a docstring, comment, blank
+        line or EOF). Warning: testing is not comprehensive.
+        """
+        line = linecache.getline(filename, lineno)
+        if not line:
+            print 'End of file'
+            return 0
+        line = line.strip()
+        # Don't allow setting breakpoint at a blank line
+        if (not line or (line[0] == '#') or
+             (line[:3] == '"""') or line[:3] == "'''"):
+            print '*** Blank or comment'
+            return 0
+        return lineno
+
+    def do_enable(self, arg):
+        args = arg.split()
+        for i in args:
+            try:
+                i = int(i)
+            except ValueError:
+                print 'Breakpoint index %r is not a number' % i
+                continue
+
+            if not (0 <= i < len(bdb.Breakpoint.bpbynumber)):
+                print 'No breakpoint numbered', i
+                continue
+
+            bp = bdb.Breakpoint.bpbynumber[i]
+            if bp:
+                bp.enable()
+
+    def do_disable(self, arg):
+        args = arg.split()
+        for i in args:
+            try:
+                i = int(i)
+            except ValueError:
+                print 'Breakpoint index %r is not a number' % i
+                continue
+
+            if not (0 <= i < len(bdb.Breakpoint.bpbynumber)):
+                print 'No breakpoint numbered', i
+                continue
+
+            bp = bdb.Breakpoint.bpbynumber[i]
+            if bp:
+                bp.disable()
+
+    def do_condition(self, arg):
+        # arg is breakpoint number and condition
+        args = arg.split(' ', 1)
+        bpnum = int(args[0].strip())
+        try:
+            cond = args[1]
+        except:
+            cond = None
+        bp = bdb.Breakpoint.bpbynumber[bpnum]
+        if bp:
+            bp.cond = cond
+            if not cond:
+                print 'Breakpoint', bpnum,
+                print 'is now unconditional.'
+
+    def do_ignore(self,arg):
+        """arg is bp number followed by ignore count."""
+        args = arg.split()
+        bpnum = int(args[0].strip())
+        try:
+            count = int(args[1].strip())
+        except:
+            count = 0
+        bp = bdb.Breakpoint.bpbynumber[bpnum]
+        if bp:
+            bp.ignore = count
+            if count > 0:
+                reply = 'Will ignore next '
+                if count > 1:
+                    reply = reply + '%d crossings' % count
+                else:
+                    reply = reply + '1 crossing'
+                print reply + ' of breakpoint %d.' % bpnum
+            else:
+                print 'Will stop next time breakpoint',
+                print bpnum, 'is reached.'
+
+    def do_clear(self, arg):
+        """Three possibilities, tried in this order:
+        clear -> clear all breaks, ask for confirmation
+        clear file:lineno -> clear all breaks at file:lineno
+        clear bpno bpno ... -> clear breakpoints by number"""
+        if not arg:
+            try:
+                reply = raw_input('Clear all breaks? ')
+            except EOFError:
+                reply = 'no'
+            reply = reply.strip().lower()
+            if reply in ('y', 'yes'):
+                self.clear_all_breaks()
+            return
+        if ':' in arg:
+            # Make sure it works for "clear C:\foo\bar.py:12"
+            i = arg.rfind(':')
+            filename = arg[:i]
+            arg = arg[i+1:]
+            try:
+                lineno = int(arg)
+            except:
+                err = "Invalid line number (%s)" % arg
+            else:
+                err = self.clear_break(filename, lineno)
+            if err: print '***', err
+            return
+        numberlist = arg.split()
+        for i in numberlist:
+            err = self.clear_bpbynumber(i)
+            if err:
+                print '***', err
+            else:
+                print 'Deleted breakpoint %s ' % (i,)
+    do_cl = do_clear # 'c' is already an abbreviation for 'continue'
+
+    def do_where(self, arg):
+        self.print_stack_trace()
+    do_w = do_where
+    do_bt = do_where
+
+    def do_up(self, arg):
+        if self.curindex == 0:
+            print '*** Oldest frame'
+        else:
+            self.curindex = self.curindex - 1
+            self.curframe = self.stack[self.curindex][0]
+            self.print_stack_entry(self.stack[self.curindex])
+            self.lineno = None
+    do_u = do_up
+
+    def do_down(self, arg):
+        if self.curindex + 1 == len(self.stack):
+            print '*** Newest frame'
+        else:
+            self.curindex = self.curindex + 1
+            self.curframe = self.stack[self.curindex][0]
+            self.print_stack_entry(self.stack[self.curindex])
+            self.lineno = None
+    do_d = do_down
+
+    def do_step(self, arg):
+        self.set_step()
+        return 1
+    do_s = do_step
+
+    def do_next(self, arg):
+        self.set_next(self.curframe)
+        return 1
+    do_n = do_next
+
+    def do_return(self, arg):
+        self.set_return(self.curframe)
+        return 1
+    do_r = do_return
+
+    def do_continue(self, arg):
+        self.set_continue()
+        return 1
+    do_c = do_cont = do_continue
+
+    def do_jump(self, arg):
+        if self.curindex + 1 != len(self.stack):
+            print "*** You can only jump within the bottom frame"
+            return
+        try:
+            arg = int(arg)
+        except ValueError:
+            print "*** The 'jump' command requires a line number."
+        else:
+            try:
+                # Do the jump, fix up our copy of the stack, and display the
+                # new position
+                self.curframe.f_lineno = arg
+                self.stack[self.curindex] = self.stack[self.curindex][0], arg
+                self.print_stack_entry(self.stack[self.curindex])
+            except ValueError, e:
+                print '*** Jump failed:', e
+    do_j = do_jump
+
+    def do_debug(self, arg):
+        sys.settrace(None)
+        globals = self.curframe.f_globals
+        locals = self.curframe.f_locals
+        p = Pdb()
+        p.prompt = "(%s) " % self.prompt.strip()
+        print "ENTERING RECURSIVE DEBUGGER"
+        sys.call_tracing(p.run, (arg, globals, locals))
+        print "LEAVING RECURSIVE DEBUGGER"
+        sys.settrace(self.trace_dispatch)
+        self.lastcmd = p.lastcmd
+
+    def do_quit(self, arg):
+        self._user_requested_quit = 1
+        self.set_quit()
+        return 1
+
+    do_q = do_quit
+    do_exit = do_quit
+
+    def do_EOF(self, arg):
+        print
+        self._user_requested_quit = 1
+        self.set_quit()
+        return 1
+
+    def do_args(self, arg):
+        f = self.curframe
+        co = f.f_code
+        dict = f.f_locals
+        n = co.co_argcount
+        if co.co_flags & 4: n = n+1
+        if co.co_flags & 8: n = n+1
+        for i in range(n):
+            name = co.co_varnames[i]
+            print name, '=',
+            if name in dict: print dict[name]
+            else: print "*** undefined ***"
+    do_a = do_args
+
+    def do_retval(self, arg):
+        if '__return__' in self.curframe.f_locals:
+            print self.curframe.f_locals['__return__']
+        else:
+            print '*** Not yet returned!'
+    do_rv = do_retval
+
+    def _getval(self, arg):
+        try:
+            return eval(arg, self.curframe.f_globals,
+                        self.curframe.f_locals)
+        except:
+            t, v = sys.exc_info()[:2]
+            if isinstance(t, str):
+                exc_type_name = t
+            else: exc_type_name = t.__name__
+            print '***', exc_type_name + ':', repr(v)
+            raise
+
+    def do_p(self, arg):
+        try:
+            print repr(self._getval(arg))
+        except:
+            pass
+
+    def do_pp(self, arg):
+        try:
+            pprint.pprint(self._getval(arg))
+        except:
+            pass
+
+    def do_list(self, arg):
+        self.lastcmd = 'list'
+        last = None
+        if arg:
+            try:
+                x = eval(arg, {}, {})
+                if type(x) == type(()):
+                    first, last = x
+                    first = int(first)
+                    last = int(last)
+                    if last < first:
+                        # Assume it's a count
+                        last = first + last
+                else:
+                    first = max(1, int(x) - 5)
+            except:
+                print '*** Error in argument:', repr(arg)
+                return
+        elif self.lineno is None:
+            first = max(1, self.curframe.f_lineno - 5)
+        else:
+            first = self.lineno + 1
+        if last is None:
+            last = first + 10
+        filename = self.curframe.f_code.co_filename
+        breaklist = self.get_file_breaks(filename)
+        try:
+            for lineno in range(first, last+1):
+                line = linecache.getline(filename, lineno)
+                if not line:
+                    print '[EOF]'
+                    break
+                else:
+                    s = repr(lineno).rjust(3)
+                    if len(s) < 4: s = s + ' '
+                    if lineno in breaklist: s = s + 'B'
+                    else: s = s + ' '
+                    if lineno == self.curframe.f_lineno:
+                        s = s + '->'
+                    print s + '\t' + line,
+                    self.lineno = lineno
+        except KeyboardInterrupt:
+            pass
+    do_l = do_list
+
+    def do_whatis(self, arg):
+        try:
+            value = eval(arg, self.curframe.f_globals,
+                            self.curframe.f_locals)
+        except:
+            t, v = sys.exc_info()[:2]
+            if type(t) == type(''):
+                exc_type_name = t
+            else: exc_type_name = t.__name__
+            print '***', exc_type_name + ':', repr(v)
+            return
+        code = None
+        # Is it a function?
+        try: code = value.func_code
+        except: pass
+        if code:
+            print 'Function', code.co_name
+            return
+        # Is it an instance method?
+        try: code = value.im_func.func_code
+        except: pass
+        if code:
+            print 'Method', code.co_name
+            return
+        # None of the above...
+        print type(value)
+
+    def do_alias(self, arg):
+        args = arg.split()
+        if len(args) == 0:
+            keys = self.aliases.keys()
+            keys.sort()
+            for alias in keys:
+                print "%s = %s" % (alias, self.aliases[alias])
+            return
+        if args[0] in self.aliases and len(args) == 1:
+            print "%s = %s" % (args[0], self.aliases[args[0]])
+        else:
+            self.aliases[args[0]] = ' '.join(args[1:])
+
+    def do_unalias(self, arg):
+        args = arg.split()
+        if len(args) == 0: return
+        if args[0] in self.aliases:
+            del self.aliases[args[0]]
+
+    # Print a traceback starting at the top stack frame.
+    # The most recently entered frame is printed last;
+    # this is different from dbx and gdb, but consistent with
+    # the Python interpreter's stack trace.
+    # It is also consistent with the up/down commands (which are
+    # compatible with dbx and gdb: up moves towards 'main()'
+    # and down moves towards the most recent stack frame).
+
+    def print_stack_trace(self):
+        try:
+            for frame_lineno in self.stack:
+                self.print_stack_entry(frame_lineno)
+        except KeyboardInterrupt:
+            pass
+
+    def print_stack_entry(self, frame_lineno, prompt_prefix=line_prefix):
+        frame, lineno = frame_lineno
+        if frame is self.curframe:
+            print '>',
+        else:
+            print ' ',
+        print self.format_stack_entry(frame_lineno, prompt_prefix)
+
+
+    # Help methods (derived from pdb.doc)
+
+    def help_help(self):
+        self.help_h()
+
+    def help_h(self):
+        print """h(elp)
+Without argument, print the list of available commands.
+With a command name as argument, print help about that command
+"help pdb" pipes the full documentation file to the $PAGER
+"help exec" gives help on the ! command"""
+
+    def help_where(self):
+        self.help_w()
+
+    def help_w(self):
+        print """w(here)
+Print a stack trace, with the most recent frame at the bottom.
+An arrow indicates the "current frame", which determines the
+context of most commands.  'bt' is an alias for this command."""
+
+    help_bt = help_w
+
+    def help_down(self):
+        self.help_d()
+
+    def help_d(self):
+        print """d(own)
+Move the current frame one level down in the stack trace
+(to a newer frame)."""
+
+    def help_up(self):
+        self.help_u()
+
+    def help_u(self):
+        print """u(p)
+Move the current frame one level up in the stack trace
+(to an older frame)."""
+
+    def help_break(self):
+        self.help_b()
+
+    def help_b(self):
+        print """b(reak) ([file:]lineno | function) [, condition]
+With a line number argument, set a break there in the current
+file.  With a function name, set a break at first executable line
+of that function.  Without argument, list all breaks.  If a second
+argument is present, it is a string specifying an expression
+which must evaluate to true before the breakpoint is honored.
+
+The line number may be prefixed with a filename and a colon,
+to specify a breakpoint in another file (probably one that
+hasn't been loaded yet).  The file is searched for on sys.path;
+the .py suffix may be omitted."""
+
+    def help_clear(self):
+        self.help_cl()
+
+    def help_cl(self):
+        print "cl(ear) filename:lineno"
+        print """cl(ear) [bpnumber [bpnumber...]]
+With a space separated list of breakpoint numbers, clear
+those breakpoints.  Without argument, clear all breaks (but
+first ask confirmation).  With a filename:lineno argument,
+clear all breaks at that line in that file.
+
+Note that the argument is different from previous versions of
+the debugger (in python distributions 1.5.1 and before) where
+a linenumber was used instead of either filename:lineno or
+breakpoint numbers."""
+
+    def help_tbreak(self):
+        print """tbreak  same arguments as break, but breakpoint is
+removed when first hit."""
+
+    def help_enable(self):
+        print """enable bpnumber [bpnumber ...]
+Enables the breakpoints given as a space separated list of
+bp numbers."""
+
+    def help_disable(self):
+        print """disable bpnumber [bpnumber ...]
+Disables the breakpoints given as a space separated list of
+bp numbers."""
+
+    def help_ignore(self):
+        print """ignore bpnumber count
+Sets the ignore count for the given breakpoint number.  A breakpoint
+becomes active when the ignore count is zero.  When non-zero, the
+count is decremented each time the breakpoint is reached and the
+breakpoint is not disabled and any associated condition evaluates
+to true."""
+
+    def help_condition(self):
+        print """condition bpnumber str_condition
+str_condition is a string specifying an expression which
+must evaluate to true before the breakpoint is honored.
+If str_condition is absent, any existing condition is removed;
+i.e., the breakpoint is made unconditional."""
+
+    def help_step(self):
+        self.help_s()
+
+    def help_s(self):
+        print """s(tep)
+Execute the current line, stop at the first possible occasion
+(either in a function that is called or in the current function)."""
+
+    def help_next(self):
+        self.help_n()
+
+    def help_n(self):
+        print """n(ext)
+Continue execution until the next line in the current function
+is reached or it returns."""
+
+    def help_return(self):
+        self.help_r()
+
+    def help_r(self):
+        print """r(eturn)
+Continue execution until the current function returns."""
+
+    def help_continue(self):
+        self.help_c()
+
+    def help_cont(self):
+        self.help_c()
+
+    def help_c(self):
+        print """c(ont(inue))
+Continue execution, only stop when a breakpoint is encountered."""
+
+    def help_jump(self):
+        self.help_j()
+
+    def help_j(self):
+        print """j(ump) lineno
+Set the next line that will be executed."""
+
+    def help_debug(self):
+        print """debug code
+Enter a recursive debugger that steps through the code argument
+(which is an arbitrary expression or statement to be executed
+in the current environment)."""
+
+    def help_list(self):
+        self.help_l()
+
+    def help_l(self):
+        print """l(ist) [first [,last]]
+List source code for the current file.
+Without arguments, list 11 lines around the current line
+or continue the previous listing.
+With one argument, list 11 lines starting at that line.
+With two arguments, list the given range;
+if the second argument is less than the first, it is a count."""
+
+    def help_args(self):
+        self.help_a()
+
+    def help_a(self):
+        print """a(rgs)
+Print the arguments of the current function."""
+
+    def help_p(self):
+        print """p expression
+Print the value of the expression."""
+
+    def help_pp(self):
+        print """pp expression
+Pretty-print the value of the expression."""
+
+    def help_exec(self):
+        print """(!) statement
+Execute the (one-line) statement in the context of
+the current stack frame.
+The exclamation point can be omitted unless the first word
+of the statement resembles a debugger command.
+To assign to a global variable you must always prefix the
+command with a 'global' command, e.g.:
+(Pdb) global list_options; list_options = ['-l']
+(Pdb)"""
+
+    def help_quit(self):
+        self.help_q()
+
+    def help_q(self):
+        print """q(uit) or exit - Quit from the debugger.
+The program being executed is aborted."""
+
+    help_exit = help_q
+
+    def help_whatis(self):
+        print """whatis arg
+Prints the type of the argument."""
+
+    def help_EOF(self):
+        print """EOF
+Handles the receipt of EOF as a command."""
+
+    def help_alias(self):
+        print """alias [name [command [parameter parameter ...] ]]
+Creates an alias called 'name' the executes 'command'.  The command
+must *not* be enclosed in quotes.  Replaceable parameters are
+indicated by %1, %2, and so on, while %* is replaced by all the
+parameters.  If no command is given, the current alias for name
+is shown. If no name is given, all aliases are listed.
+
+Aliases may be nested and can contain anything that can be
+legally typed at the pdb prompt.  Note!  You *can* override
+internal pdb commands with aliases!  Those internal commands
+are then hidden until the alias is removed.  Aliasing is recursively
+applied to the first word of the command line; all other words
+in the line are left alone.
+
+Some useful aliases (especially when placed in the .pdbrc file) are:
+
+#Print instance variables (usage "pi classInst")
+alias pi for k in %1.__dict__.keys(): print "%1.",k,"=",%1.__dict__[k]
+
+#Print instance variables in self
+alias ps pi self
+"""
+
+    def help_unalias(self):
+        print """unalias name
+Deletes the specified alias."""
+
+    def help_pdb(self):
+        help()
+
+    def lookupmodule(self, filename):
+        """Helper function for break/clear parsing -- may be overridden.
+
+        lookupmodule() translates (possibly incomplete) file or module name
+        into an absolute file name.
+        """
+        if os.path.isabs(filename) and  os.path.exists(filename):
+            return filename
+        f = os.path.join(sys.path[0], filename)
+        if  os.path.exists(f) and self.canonic(f) == self.mainpyfile:
+            return f
+        root, ext = os.path.splitext(filename)
+        if ext == '':
+            filename = filename + '.py'
+        if os.path.isabs(filename):
+            return filename
+        for dirname in sys.path:
+            while os.path.islink(dirname):
+                dirname = os.readlink(dirname)
+            fullname = os.path.join(dirname, filename)
+            if os.path.exists(fullname):
+                return fullname
+        return None
+
+    def _runscript(self, filename):
+        # Start with fresh empty copy of globals and locals and tell the script
+        # that it's being run as __main__ to avoid scripts being able to access
+        # the pdb.py namespace.
+        globals_ = {"__name__" : "__main__"}
+        locals_ = globals_
+
+        # When bdb sets tracing, a number of call and line events happens
+        # BEFORE debugger even reaches user's code (and the exact sequence of
+        # events depends on python version). So we take special measures to
+        # avoid stopping before we reach the main script (see user_line and
+        # user_call for details).
+        self._wait_for_mainpyfile = 1
+        self.mainpyfile = self.canonic(filename)
+        self._user_requested_quit = 0
+        statement = 'execfile( "%s")' % filename
+        self.run(statement, globals=globals_, locals=locals_)
+
+# Simplified interface
+
+def run(statement, globals=None, locals=None):
+    Pdb().run(statement, globals, locals)
+
+def runeval(expression, globals=None, locals=None):
+    return Pdb().runeval(expression, globals, locals)
+
+def runctx(statement, globals, locals):
+    # B/W compatibility
+    run(statement, globals, locals)
+
+def runcall(*args, **kwds):
+    return Pdb().runcall(*args, **kwds)
+
+def set_trace():
+    Pdb().set_trace(sys._getframe().f_back)
+
+# Post-Mortem interface
+
+def post_mortem(t):
+    p = Pdb()
+    p.reset()
+    while t.tb_next is not None:
+        t = t.tb_next
+    p.interaction(t.tb_frame, t)
+
+def pm():
+    post_mortem(sys.last_traceback)
+
+
+# Main program for testing
+
+TESTCMD = 'import x; x.main()'
+
+def test():
+    run(TESTCMD)
+
+# print help
+def help():
+    for dirname in sys.path:
+        fullname = os.path.join(dirname, 'pdb.doc')
+        if os.path.exists(fullname):
+            sts = os.system('${PAGER-more} '+fullname)
+            if sts: print '*** Pager exit status:', sts
+            break
+    else:
+        print 'Sorry, can\'t find the help file "pdb.doc"',
+        print 'along the Python search path'
+
+def main():
+    if not sys.argv[1:]:
+        print "usage: pdb.py scriptfile [arg] ..."
+        sys.exit(2)
+
+    mainpyfile =  sys.argv[1]     # Get script filename
+    if not os.path.exists(mainpyfile):
+        print 'Error:', mainpyfile, 'does not exist'
+        sys.exit(1)
+
+    del sys.argv[0]         # Hide "pdb.py" from argument list
+
+    # Replace pdb's dir with script's dir in front of module search path.
+    sys.path[0] = os.path.dirname(mainpyfile)
+
+    # Note on saving/restoring sys.argv: it's a good idea when sys.argv was
+    # modified by the script being debugged. It's a bad idea when it was
+    # changed by the user from the command line. The best approach would be to
+    # have a "restart" command which would allow explicit specification of
+    # command line arguments.
+    pdb = Pdb()
+    while 1:
+        try:
+            pdb._runscript(mainpyfile)
+            if pdb._user_requested_quit:
+                break
+            print "The program finished and will be restarted"
+        except SystemExit:
+            # In most cases SystemExit does not warrant a post-mortem session.
+            print "The program exited via sys.exit(). Exit status: ",
+            print sys.exc_info()[1]
+        except:
+            traceback.print_exc()
+            print "Uncaught exception. Entering post mortem debugging"
+            print "Running 'cont' or 'step' will restart the program"
+            t = sys.exc_info()[2]
+            while t.tb_next is not None:
+                t = t.tb_next
+            pdb.interaction(t.tb_frame,t)
+            print "Post mortem debugger finished. The "+mainpyfile+" will be restarted"
+
+
+# When invoked as main program, invoke the debugger on a script
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/pickle.py b/depot_tools/release/win/python_24/Lib/pickle.py
new file mode 100644
index 0000000..d782b48
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pickle.py
@@ -0,0 +1,1403 @@
+"""Create portable serialized representations of Python objects.
+
+See module cPickle for a (much) faster implementation.
+See module copy_reg for a mechanism for registering custom picklers.
+See module pickletools source for extensive comments.
+
+Classes:
+
+    Pickler
+    Unpickler
+
+Functions:
+
+    dump(object, file)
+    dumps(object) -> string
+    load(file) -> object
+    loads(string) -> object
+
+Misc variables:
+
+    __version__
+    format_version
+    compatible_formats
+
+"""
+
+__version__ = "$Revision: 1.158 $"       # Code version
+
+from types import *
+from copy_reg import dispatch_table
+from copy_reg import _extension_registry, _inverted_registry, _extension_cache
+import marshal
+import sys
+import struct
+import re
+import warnings
+
+__all__ = ["PickleError", "PicklingError", "UnpicklingError", "Pickler",
+           "Unpickler", "dump", "dumps", "load", "loads"]
+
+# These are purely informational; no code uses these.
+format_version = "2.0"                  # File format version we write
+compatible_formats = ["1.0",            # Original protocol 0
+                      "1.1",            # Protocol 0 with INST added
+                      "1.2",            # Original protocol 1
+                      "1.3",            # Protocol 1 with BINFLOAT added
+                      "2.0",            # Protocol 2
+                      ]                 # Old format versions we can read
+
+# Keep in synch with cPickle.  This is the highest protocol number we
+# know how to read.
+HIGHEST_PROTOCOL = 2
+
+# Why use struct.pack() for pickling but marshal.loads() for
+# unpickling?  struct.pack() is 40% faster than marshal.dumps(), but
+# marshal.loads() is twice as fast as struct.unpack()!
+mloads = marshal.loads
+
+class PickleError(Exception):
+    """A common base class for the other pickling exceptions."""
+    pass
+
+class PicklingError(PickleError):
+    """This exception is raised when an unpicklable object is passed to the
+    dump() method.
+
+    """
+    pass
+
+class UnpicklingError(PickleError):
+    """This exception is raised when there is a problem unpickling an object,
+    such as a security violation.
+
+    Note that other exceptions may also be raised during unpickling, including
+    (but not necessarily limited to) AttributeError, EOFError, ImportError,
+    and IndexError.
+
+    """
+    pass
+
+# An instance of _Stop is raised by Unpickler.load_stop() in response to
+# the STOP opcode, passing the object that is the result of unpickling.
+class _Stop(Exception):
+    def __init__(self, value):
+        self.value = value
+
+# Jython has PyStringMap; it's a dict subclass with string keys
+try:
+    from org.python.core import PyStringMap
+except ImportError:
+    PyStringMap = None
+
+# UnicodeType may or may not be exported (normally imported from types)
+try:
+    UnicodeType
+except NameError:
+    UnicodeType = None
+
+# Pickle opcodes.  See pickletools.py for extensive docs.  The listing
+# here is in kind-of alphabetical order of 1-character pickle code.
+# pickletools groups them by purpose.
+
+MARK            = '('   # push special markobject on stack
+STOP            = '.'   # every pickle ends with STOP
+POP             = '0'   # discard topmost stack item
+POP_MARK        = '1'   # discard stack top through topmost markobject
+DUP             = '2'   # duplicate top stack item
+FLOAT           = 'F'   # push float object; decimal string argument
+INT             = 'I'   # push integer or bool; decimal string argument
+BININT          = 'J'   # push four-byte signed int
+BININT1         = 'K'   # push 1-byte unsigned int
+LONG            = 'L'   # push long; decimal string argument
+BININT2         = 'M'   # push 2-byte unsigned int
+NONE            = 'N'   # push None
+PERSID          = 'P'   # push persistent object; id is taken from string arg
+BINPERSID       = 'Q'   #  "       "         "  ;  "  "   "     "  stack
+REDUCE          = 'R'   # apply callable to argtuple, both on stack
+STRING          = 'S'   # push string; NL-terminated string argument
+BINSTRING       = 'T'   # push string; counted binary string argument
+SHORT_BINSTRING = 'U'   #  "     "   ;    "      "       "      " < 256 bytes
+UNICODE         = 'V'   # push Unicode string; raw-unicode-escaped'd argument
+BINUNICODE      = 'X'   #   "     "       "  ; counted UTF-8 string argument
+APPEND          = 'a'   # append stack top to list below it
+BUILD           = 'b'   # call __setstate__ or __dict__.update()
+GLOBAL          = 'c'   # push self.find_class(modname, name); 2 string args
+DICT            = 'd'   # build a dict from stack items
+EMPTY_DICT      = '}'   # push empty dict
+APPENDS         = 'e'   # extend list on stack by topmost stack slice
+GET             = 'g'   # push item from memo on stack; index is string arg
+BINGET          = 'h'   #   "    "    "    "   "   "  ;   "    " 1-byte arg
+INST            = 'i'   # build & push class instance
+LONG_BINGET     = 'j'   # push item from memo on stack; index is 4-byte arg
+LIST            = 'l'   # build list from topmost stack items
+EMPTY_LIST      = ']'   # push empty list
+OBJ             = 'o'   # build & push class instance
+PUT             = 'p'   # store stack top in memo; index is string arg
+BINPUT          = 'q'   #   "     "    "   "   " ;   "    " 1-byte arg
+LONG_BINPUT     = 'r'   #   "     "    "   "   " ;   "    " 4-byte arg
+SETITEM         = 's'   # add key+value pair to dict
+TUPLE           = 't'   # build tuple from topmost stack items
+EMPTY_TUPLE     = ')'   # push empty tuple
+SETITEMS        = 'u'   # modify dict by adding topmost key+value pairs
+BINFLOAT        = 'G'   # push float; arg is 8-byte float encoding
+
+TRUE            = 'I01\n'  # not an opcode; see INT docs in pickletools.py
+FALSE           = 'I00\n'  # not an opcode; see INT docs in pickletools.py
+
+# Protocol 2
+
+PROTO           = '\x80'  # identify pickle protocol
+NEWOBJ          = '\x81'  # build object by applying cls.__new__ to argtuple
+EXT1            = '\x82'  # push object from extension registry; 1-byte index
+EXT2            = '\x83'  # ditto, but 2-byte index
+EXT4            = '\x84'  # ditto, but 4-byte index
+TUPLE1          = '\x85'  # build 1-tuple from stack top
+TUPLE2          = '\x86'  # build 2-tuple from two topmost stack items
+TUPLE3          = '\x87'  # build 3-tuple from three topmost stack items
+NEWTRUE         = '\x88'  # push True
+NEWFALSE        = '\x89'  # push False
+LONG1           = '\x8a'  # push long from < 256 bytes
+LONG4           = '\x8b'  # push really big long
+
+_tuplesize2code = [EMPTY_TUPLE, TUPLE1, TUPLE2, TUPLE3]
+
+
+__all__.extend([x for x in dir() if re.match("[A-Z][A-Z0-9_]+$",x)])
+del x
+
+
+# Pickling machinery
+
+class Pickler:
+
+    def __init__(self, file, protocol=None, bin=None):
+        """This takes a file-like object for writing a pickle data stream.
+
+        The optional protocol argument tells the pickler to use the
+        given protocol; supported protocols are 0, 1, 2.  The default
+        protocol is 0, to be backwards compatible.  (Protocol 0 is the
+        only protocol that can be written to a file opened in text
+        mode and read back successfully.  When using a protocol higher
+        than 0, make sure the file is opened in binary mode, both when
+        pickling and unpickling.)
+
+        Protocol 1 is more efficient than protocol 0; protocol 2 is
+        more efficient than protocol 1.
+
+        Specifying a negative protocol version selects the highest
+        protocol version supported.  The higher the protocol used, the
+        more recent the version of Python needed to read the pickle
+        produced.
+
+        The file parameter must have a write() method that accepts a single
+        string argument.  It can thus be an open file object, a StringIO
+        object, or any other custom object that meets this interface.
+
+        """
+        if protocol is not None and bin is not None:
+            raise ValueError, "can't specify both 'protocol' and 'bin'"
+        if bin is not None:
+            warnings.warn("The 'bin' argument to Pickler() is deprecated",
+                          DeprecationWarning)
+            protocol = bin
+        if protocol is None:
+            protocol = 0
+        if protocol < 0:
+            protocol = HIGHEST_PROTOCOL
+        elif not 0 <= protocol <= HIGHEST_PROTOCOL:
+            raise ValueError("pickle protocol must be <= %d" % HIGHEST_PROTOCOL)
+        self.write = file.write
+        self.memo = {}
+        self.proto = int(protocol)
+        self.bin = protocol >= 1
+        self.fast = 0
+
+    def clear_memo(self):
+        """Clears the pickler's "memo".
+
+        The memo is the data structure that remembers which objects the
+        pickler has already seen, so that shared or recursive objects are
+        pickled by reference and not by value.  This method is useful when
+        re-using picklers.
+
+        """
+        self.memo.clear()
+
+    def dump(self, obj):
+        """Write a pickled representation of obj to the open file."""
+        if self.proto >= 2:
+            self.write(PROTO + chr(self.proto))
+        self.save(obj)
+        self.write(STOP)
+
+    def memoize(self, obj):
+        """Store an object in the memo."""
+
+        # The Pickler memo is a dictionary mapping object ids to 2-tuples
+        # that contain the Unpickler memo key and the object being memoized.
+        # The memo key is written to the pickle and will become
+        # the key in the Unpickler's memo.  The object is stored in the
+        # Pickler memo so that transient objects are kept alive during
+        # pickling.
+
+        # The use of the Unpickler memo length as the memo key is just a
+        # convention.  The only requirement is that the memo values be unique.
+        # But there appears no advantage to any other scheme, and this
+        # scheme allows the Unpickler memo to be implemented as a plain (but
+        # growable) array, indexed by memo key.
+        if self.fast:
+            return
+        assert id(obj) not in self.memo
+        memo_len = len(self.memo)
+        self.write(self.put(memo_len))
+        self.memo[id(obj)] = memo_len, obj
+
+    # Return a PUT (BINPUT, LONG_BINPUT) opcode string, with argument i.
+    def put(self, i, pack=struct.pack):
+        if self.bin:
+            if i < 256:
+                return BINPUT + chr(i)
+            else:
+                return LONG_BINPUT + pack("<i", i)
+
+        return PUT + repr(i) + '\n'
+
+    # Return a GET (BINGET, LONG_BINGET) opcode string, with argument i.
+    def get(self, i, pack=struct.pack):
+        if self.bin:
+            if i < 256:
+                return BINGET + chr(i)
+            else:
+                return LONG_BINGET + pack("<i", i)
+
+        return GET + repr(i) + '\n'
+
+    def save(self, obj):
+        # Check for persistent id (defined by a subclass)
+        pid = self.persistent_id(obj)
+        if pid:
+            self.save_pers(pid)
+            return
+
+        # Check the memo
+        x = self.memo.get(id(obj))
+        if x:
+            self.write(self.get(x[0]))
+            return
+
+        # Check the type dispatch table
+        t = type(obj)
+        f = self.dispatch.get(t)
+        if f:
+            f(self, obj) # Call unbound method with explicit self
+            return
+
+        # Check for a class with a custom metaclass; treat as regular class
+        try:
+            issc = issubclass(t, TypeType)
+        except TypeError: # t is not a class (old Boost; see SF #502085)
+            issc = 0
+        if issc:
+            self.save_global(obj)
+            return
+
+        # Check copy_reg.dispatch_table
+        reduce = dispatch_table.get(t)
+        if reduce:
+            rv = reduce(obj)
+        else:
+            # Check for a __reduce_ex__ method, fall back to __reduce__
+            reduce = getattr(obj, "__reduce_ex__", None)
+            if reduce:
+                rv = reduce(self.proto)
+            else:
+                reduce = getattr(obj, "__reduce__", None)
+                if reduce:
+                    rv = reduce()
+                else:
+                    raise PicklingError("Can't pickle %r object: %r" %
+                                        (t.__name__, obj))
+
+        # Check for string returned by reduce(), meaning "save as global"
+        if type(rv) is StringType:
+            self.save_global(obj, rv)
+            return
+
+        # Assert that reduce() returned a tuple
+        if type(rv) is not TupleType:
+            raise PicklingError("%s must return string or tuple" % reduce)
+
+        # Assert that it returned an appropriately sized tuple
+        l = len(rv)
+        if not (2 <= l <= 5):
+            raise PicklingError("Tuple returned by %s must have "
+                                "two to five elements" % reduce)
+
+        # Save the reduce() output and finally memoize the object
+        self.save_reduce(obj=obj, *rv)
+
+    def persistent_id(self, obj):
+        # This exists so a subclass can override it
+        return None
+
+    def save_pers(self, pid):
+        # Save a persistent id reference
+        if self.bin:
+            self.save(pid)
+            self.write(BINPERSID)
+        else:
+            self.write(PERSID + str(pid) + '\n')
+
+    def save_reduce(self, func, args, state=None,
+                    listitems=None, dictitems=None, obj=None):
+        # This API is called by some subclasses
+
+        # Assert that args is a tuple or None
+        if not isinstance(args, TupleType):
+            if args is None:
+                # A hack for Jim Fulton's ExtensionClass, now deprecated.
+                # See load_reduce()
+                warnings.warn("__basicnew__ special case is deprecated",
+                              DeprecationWarning)
+            else:
+                raise PicklingError(
+                    "args from reduce() should be a tuple")
+
+        # Assert that func is callable
+        if not callable(func):
+            raise PicklingError("func from reduce should be callable")
+
+        save = self.save
+        write = self.write
+
+        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
+        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
+            # A __reduce__ implementation can direct protocol 2 to
+            # use the more efficient NEWOBJ opcode, while still
+            # allowing protocol 0 and 1 to work normally.  For this to
+            # work, the function returned by __reduce__ should be
+            # called __newobj__, and its first argument should be a
+            # new-style class.  The implementation for __newobj__
+            # should be as follows, although pickle has no way to
+            # verify this:
+            #
+            # def __newobj__(cls, *args):
+            #     return cls.__new__(cls, *args)
+            #
+            # Protocols 0 and 1 will pickle a reference to __newobj__,
+            # while protocol 2 (and above) will pickle a reference to
+            # cls, the remaining args tuple, and the NEWOBJ code,
+            # which calls cls.__new__(cls, *args) at unpickling time
+            # (see load_newobj below).  If __reduce__ returns a
+            # three-tuple, the state from the third tuple item will be
+            # pickled regardless of the protocol, calling __setstate__
+            # at unpickling time (see load_build below).
+            #
+            # Note that no standard __newobj__ implementation exists;
+            # you have to provide your own.  This is to enforce
+            # compatibility with Python 2.2 (pickles written using
+            # protocol 0 or 1 in Python 2.3 should be unpicklable by
+            # Python 2.2).
+            cls = args[0]
+            if not hasattr(cls, "__new__"):
+                raise PicklingError(
+                    "args[0] from __newobj__ args has no __new__")
+            if obj is not None and cls is not obj.__class__:
+                raise PicklingError(
+                    "args[0] from __newobj__ args has the wrong class")
+            args = args[1:]
+            save(cls)
+            save(args)
+            write(NEWOBJ)
+        else:
+            save(func)
+            save(args)
+            write(REDUCE)
+
+        if obj is not None:
+            self.memoize(obj)
+
+        # More new special cases (that work with older protocols as
+        # well): when __reduce__ returns a tuple with 4 or 5 items,
+        # the 4th and 5th item should be iterators that provide list
+        # items and dict items (as (key, value) tuples), or None.
+
+        if listitems is not None:
+            self._batch_appends(listitems)
+
+        if dictitems is not None:
+            self._batch_setitems(dictitems)
+
+        if state is not None:
+            save(state)
+            write(BUILD)
+
+    # Methods below this point are dispatched through the dispatch table
+
+    dispatch = {}
+
+    def save_none(self, obj):
+        self.write(NONE)
+    dispatch[NoneType] = save_none
+
+    def save_bool(self, obj):
+        if self.proto >= 2:
+            self.write(obj and NEWTRUE or NEWFALSE)
+        else:
+            self.write(obj and TRUE or FALSE)
+    dispatch[bool] = save_bool
+
+    def save_int(self, obj, pack=struct.pack):
+        if self.bin:
+            # If the int is small enough to fit in a signed 4-byte 2's-comp
+            # format, we can store it more efficiently than the general
+            # case.
+            # First one- and two-byte unsigned ints:
+            if obj >= 0:
+                if obj <= 0xff:
+                    self.write(BININT1 + chr(obj))
+                    return
+                if obj <= 0xffff:
+                    self.write("%c%c%c" % (BININT2, obj&0xff, obj>>8))
+                    return
+            # Next check for 4-byte signed ints:
+            high_bits = obj >> 31  # note that Python shift sign-extends
+            if high_bits == 0 or high_bits == -1:
+                # All high bits are copies of bit 2**31, so the value
+                # fits in a 4-byte signed int.
+                self.write(BININT + pack("<i", obj))
+                return
+        # Text pickle, or int too big to fit in signed 4-byte format.
+        self.write(INT + repr(obj) + '\n')
+    dispatch[IntType] = save_int
+
+    def save_long(self, obj, pack=struct.pack):
+        if self.proto >= 2:
+            bytes = encode_long(obj)
+            n = len(bytes)
+            if n < 256:
+                self.write(LONG1 + chr(n) + bytes)
+            else:
+                self.write(LONG4 + pack("<i", n) + bytes)
+            return
+        self.write(LONG + repr(obj) + '\n')
+    dispatch[LongType] = save_long
+
+    def save_float(self, obj, pack=struct.pack):
+        if self.bin:
+            self.write(BINFLOAT + pack('>d', obj))
+        else:
+            self.write(FLOAT + repr(obj) + '\n')
+    dispatch[FloatType] = save_float
+
+    def save_string(self, obj, pack=struct.pack):
+        if self.bin:
+            n = len(obj)
+            if n < 256:
+                self.write(SHORT_BINSTRING + chr(n) + obj)
+            else:
+                self.write(BINSTRING + pack("<i", n) + obj)
+        else:
+            self.write(STRING + repr(obj) + '\n')
+        self.memoize(obj)
+    dispatch[StringType] = save_string
+
+    def save_unicode(self, obj, pack=struct.pack):
+        if self.bin:
+            encoding = obj.encode('utf-8')
+            n = len(encoding)
+            self.write(BINUNICODE + pack("<i", n) + encoding)
+        else:
+            obj = obj.replace("\\", "\\u005c")
+            obj = obj.replace("\n", "\\u000a")
+            self.write(UNICODE + obj.encode('raw-unicode-escape') + '\n')
+        self.memoize(obj)
+    dispatch[UnicodeType] = save_unicode
+
+    if StringType == UnicodeType:
+        # This is true for Jython
+        def save_string(self, obj, pack=struct.pack):
+            unicode = obj.isunicode()
+
+            if self.bin:
+                if unicode:
+                    obj = obj.encode("utf-8")
+                l = len(obj)
+                if l < 256 and not unicode:
+                    self.write(SHORT_BINSTRING + chr(l) + obj)
+                else:
+                    s = pack("<i", l)
+                    if unicode:
+                        self.write(BINUNICODE + s + obj)
+                    else:
+                        self.write(BINSTRING + s + obj)
+            else:
+                if unicode:
+                    obj = obj.replace("\\", "\\u005c")
+                    obj = obj.replace("\n", "\\u000a")
+                    obj = obj.encode('raw-unicode-escape')
+                    self.write(UNICODE + obj + '\n')
+                else:
+                    self.write(STRING + repr(obj) + '\n')
+            self.memoize(obj)
+        dispatch[StringType] = save_string
+
+    def save_tuple(self, obj):
+        write = self.write
+        proto = self.proto
+
+        n = len(obj)
+        if n == 0:
+            if proto:
+                write(EMPTY_TUPLE)
+            else:
+                write(MARK + TUPLE)
+            return
+
+        save = self.save
+        memo = self.memo
+        if n <= 3 and proto >= 2:
+            for element in obj:
+                save(element)
+            # Subtle.  Same as in the big comment below.
+            if id(obj) in memo:
+                get = self.get(memo[id(obj)][0])
+                write(POP * n + get)
+            else:
+                write(_tuplesize2code[n])
+                self.memoize(obj)
+            return
+
+        # proto 0 or proto 1 and tuple isn't empty, or proto > 1 and tuple
+        # has more than 3 elements.
+        write(MARK)
+        for element in obj:
+            save(element)
+
+        if id(obj) in memo:
+            # Subtle.  d was not in memo when we entered save_tuple(), so
+            # the process of saving the tuple's elements must have saved
+            # the tuple itself:  the tuple is recursive.  The proper action
+            # now is to throw away everything we put on the stack, and
+            # simply GET the tuple (it's already constructed).  This check
+            # could have been done in the "for element" loop instead, but
+            # recursive tuples are a rare thing.
+            get = self.get(memo[id(obj)][0])
+            if proto:
+                write(POP_MARK + get)
+            else:   # proto 0 -- POP_MARK not available
+                write(POP * (n+1) + get)
+            return
+
+        # No recursion.
+        self.write(TUPLE)
+        self.memoize(obj)
+
+    dispatch[TupleType] = save_tuple
+
+    # save_empty_tuple() isn't used by anything in Python 2.3.  However, I
+    # found a Pickler subclass in Zope3 that calls it, so it's not harmless
+    # to remove it.
+    def save_empty_tuple(self, obj):
+        self.write(EMPTY_TUPLE)
+
+    def save_list(self, obj):
+        write = self.write
+
+        if self.bin:
+            write(EMPTY_LIST)
+        else:   # proto 0 -- can't use EMPTY_LIST
+            write(MARK + LIST)
+
+        self.memoize(obj)
+        self._batch_appends(iter(obj))
+
+    dispatch[ListType] = save_list
+
+    # Keep in synch with cPickle's BATCHSIZE.  Nothing will break if it gets
+    # out of synch, though.
+    _BATCHSIZE = 1000
+
+    def _batch_appends(self, items):
+        # Helper to batch up APPENDS sequences
+        save = self.save
+        write = self.write
+
+        if not self.bin:
+            for x in items:
+                save(x)
+                write(APPEND)
+            return
+
+        r = xrange(self._BATCHSIZE)
+        while items is not None:
+            tmp = []
+            for i in r:
+                try:
+                    x = items.next()
+                    tmp.append(x)
+                except StopIteration:
+                    items = None
+                    break
+            n = len(tmp)
+            if n > 1:
+                write(MARK)
+                for x in tmp:
+                    save(x)
+                write(APPENDS)
+            elif n:
+                save(tmp[0])
+                write(APPEND)
+            # else tmp is empty, and we're done
+
+    def save_dict(self, obj):
+        write = self.write
+
+        if self.bin:
+            write(EMPTY_DICT)
+        else:   # proto 0 -- can't use EMPTY_DICT
+            write(MARK + DICT)
+
+        self.memoize(obj)
+        self._batch_setitems(obj.iteritems())
+
+    dispatch[DictionaryType] = save_dict
+    if not PyStringMap is None:
+        dispatch[PyStringMap] = save_dict
+
+    def _batch_setitems(self, items):
+        # Helper to batch up SETITEMS sequences; proto >= 1 only
+        save = self.save
+        write = self.write
+
+        if not self.bin:
+            for k, v in items:
+                save(k)
+                save(v)
+                write(SETITEM)
+            return
+
+        r = xrange(self._BATCHSIZE)
+        while items is not None:
+            tmp = []
+            for i in r:
+                try:
+                    tmp.append(items.next())
+                except StopIteration:
+                    items = None
+                    break
+            n = len(tmp)
+            if n > 1:
+                write(MARK)
+                for k, v in tmp:
+                    save(k)
+                    save(v)
+                write(SETITEMS)
+            elif n:
+                k, v = tmp[0]
+                save(k)
+                save(v)
+                write(SETITEM)
+            # else tmp is empty, and we're done
+
+    def save_inst(self, obj):
+        cls = obj.__class__
+
+        memo  = self.memo
+        write = self.write
+        save  = self.save
+
+        if hasattr(obj, '__getinitargs__'):
+            args = obj.__getinitargs__()
+            len(args) # XXX Assert it's a sequence
+            _keep_alive(args, memo)
+        else:
+            args = ()
+
+        write(MARK)
+
+        if self.bin:
+            save(cls)
+            for arg in args:
+                save(arg)
+            write(OBJ)
+        else:
+            for arg in args:
+                save(arg)
+            write(INST + cls.__module__ + '\n' + cls.__name__ + '\n')
+
+        self.memoize(obj)
+
+        try:
+            getstate = obj.__getstate__
+        except AttributeError:
+            stuff = obj.__dict__
+        else:
+            stuff = getstate()
+            _keep_alive(stuff, memo)
+        save(stuff)
+        write(BUILD)
+
+    dispatch[InstanceType] = save_inst
+
+    def save_global(self, obj, name=None, pack=struct.pack):
+        write = self.write
+        memo = self.memo
+
+        if name is None:
+            name = obj.__name__
+
+        module = getattr(obj, "__module__", None)
+        if module is None:
+            module = whichmodule(obj, name)
+
+        try:
+            __import__(module)
+            mod = sys.modules[module]
+            klass = getattr(mod, name)
+        except (ImportError, KeyError, AttributeError):
+            raise PicklingError(
+                "Can't pickle %r: it's not found as %s.%s" %
+                (obj, module, name))
+        else:
+            if klass is not obj:
+                raise PicklingError(
+                    "Can't pickle %r: it's not the same object as %s.%s" %
+                    (obj, module, name))
+
+        if self.proto >= 2:
+            code = _extension_registry.get((module, name))
+            if code:
+                assert code > 0
+                if code <= 0xff:
+                    write(EXT1 + chr(code))
+                elif code <= 0xffff:
+                    write("%c%c%c" % (EXT2, code&0xff, code>>8))
+                else:
+                    write(EXT4 + pack("<i", code))
+                return
+
+        write(GLOBAL + module + '\n' + name + '\n')
+        self.memoize(obj)
+
+    dispatch[ClassType] = save_global
+    dispatch[FunctionType] = save_global
+    dispatch[BuiltinFunctionType] = save_global
+    dispatch[TypeType] = save_global
+
+# Pickling helpers
+
+def _keep_alive(x, memo):
+    """Keeps a reference to the object x in the memo.
+
+    Because we remember objects by their id, we have
+    to assure that possibly temporary objects are kept
+    alive by referencing them.
+    We store a reference at the id of the memo, which should
+    normally not be used unless someone tries to deepcopy
+    the memo itself...
+    """
+    try:
+        memo[id(memo)].append(x)
+    except KeyError:
+        # aha, this is the first one :-)
+        memo[id(memo)]=[x]
+
+
+# A cache for whichmodule(), mapping a function object to the name of
+# the module in which the function was found.
+
+classmap = {} # called classmap for backwards compatibility
+
+def whichmodule(func, funcname):
+    """Figure out the module in which a function occurs.
+
+    Search sys.modules for the module.
+    Cache in classmap.
+    Return a module name.
+    If the function cannot be found, return "__main__".
+    """
+    # Python functions should always get an __module__ from their globals.
+    mod = getattr(func, "__module__", None)
+    if mod is not None:
+        return mod
+    if func in classmap:
+        return classmap[func]
+
+    for name, module in sys.modules.items():
+        if module is None:
+            continue # skip dummy package entries
+        if name != '__main__' and getattr(module, funcname, None) is func:
+            break
+    else:
+        name = '__main__'
+    classmap[func] = name
+    return name
+
+
+# Unpickling machinery
+
+class Unpickler:
+
+    def __init__(self, file):
+        """This takes a file-like object for reading a pickle data stream.
+
+        The protocol version of the pickle is detected automatically, so no
+        proto argument is needed.
+
+        The file-like object must have two methods, a read() method that
+        takes an integer argument, and a readline() method that requires no
+        arguments.  Both methods should return a string.  Thus file-like
+        object can be a file object opened for reading, a StringIO object,
+        or any other custom object that meets this interface.
+        """
+        self.readline = file.readline
+        self.read = file.read
+        self.memo = {}
+
+    def load(self):
+        """Read a pickled object representation from the open file.
+
+        Return the reconstituted object hierarchy specified in the file.
+        """
+        self.mark = object() # any new unique object
+        self.stack = []
+        self.append = self.stack.append
+        read = self.read
+        dispatch = self.dispatch
+        try:
+            while 1:
+                key = read(1)
+                dispatch[key](self)
+        except _Stop, stopinst:
+            return stopinst.value
+
+    # Return largest index k such that self.stack[k] is self.mark.
+    # If the stack doesn't contain a mark, eventually raises IndexError.
+    # This could be sped by maintaining another stack, of indices at which
+    # the mark appears.  For that matter, the latter stack would suffice,
+    # and we wouldn't need to push mark objects on self.stack at all.
+    # Doing so is probably a good thing, though, since if the pickle is
+    # corrupt (or hostile) we may get a clue from finding self.mark embedded
+    # in unpickled objects.
+    def marker(self):
+        stack = self.stack
+        mark = self.mark
+        k = len(stack)-1
+        while stack[k] is not mark: k = k-1
+        return k
+
+    dispatch = {}
+
+    def load_eof(self):
+        raise EOFError
+    dispatch[''] = load_eof
+
+    def load_proto(self):
+        proto = ord(self.read(1))
+        if not 0 <= proto <= 2:
+            raise ValueError, "unsupported pickle protocol: %d" % proto
+    dispatch[PROTO] = load_proto
+
+    def load_persid(self):
+        pid = self.readline()[:-1]
+        self.append(self.persistent_load(pid))
+    dispatch[PERSID] = load_persid
+
+    def load_binpersid(self):
+        pid = self.stack.pop()
+        self.append(self.persistent_load(pid))
+    dispatch[BINPERSID] = load_binpersid
+
+    def load_none(self):
+        self.append(None)
+    dispatch[NONE] = load_none
+
+    def load_false(self):
+        self.append(False)
+    dispatch[NEWFALSE] = load_false
+
+    def load_true(self):
+        self.append(True)
+    dispatch[NEWTRUE] = load_true
+
+    def load_int(self):
+        data = self.readline()
+        if data == FALSE[1:]:
+            val = False
+        elif data == TRUE[1:]:
+            val = True
+        else:
+            try:
+                val = int(data)
+            except ValueError:
+                val = long(data)
+        self.append(val)
+    dispatch[INT] = load_int
+
+    def load_binint(self):
+        self.append(mloads('i' + self.read(4)))
+    dispatch[BININT] = load_binint
+
+    def load_binint1(self):
+        self.append(ord(self.read(1)))
+    dispatch[BININT1] = load_binint1
+
+    def load_binint2(self):
+        self.append(mloads('i' + self.read(2) + '\000\000'))
+    dispatch[BININT2] = load_binint2
+
+    def load_long(self):
+        self.append(long(self.readline()[:-1], 0))
+    dispatch[LONG] = load_long
+
+    def load_long1(self):
+        n = ord(self.read(1))
+        bytes = self.read(n)
+        self.append(decode_long(bytes))
+    dispatch[LONG1] = load_long1
+
+    def load_long4(self):
+        n = mloads('i' + self.read(4))
+        bytes = self.read(n)
+        self.append(decode_long(bytes))
+    dispatch[LONG4] = load_long4
+
+    def load_float(self):
+        self.append(float(self.readline()[:-1]))
+    dispatch[FLOAT] = load_float
+
+    def load_binfloat(self, unpack=struct.unpack):
+        self.append(unpack('>d', self.read(8))[0])
+    dispatch[BINFLOAT] = load_binfloat
+
+    def load_string(self):
+        rep = self.readline()[:-1]
+        for q in "\"'": # double or single quote
+            if rep.startswith(q):
+                if not rep.endswith(q):
+                    raise ValueError, "insecure string pickle"
+                rep = rep[len(q):-len(q)]
+                break
+        else:
+            raise ValueError, "insecure string pickle"
+        self.append(rep.decode("string-escape"))
+    dispatch[STRING] = load_string
+
+    def load_binstring(self):
+        len = mloads('i' + self.read(4))
+        self.append(self.read(len))
+    dispatch[BINSTRING] = load_binstring
+
+    def load_unicode(self):
+        self.append(unicode(self.readline()[:-1],'raw-unicode-escape'))
+    dispatch[UNICODE] = load_unicode
+
+    def load_binunicode(self):
+        len = mloads('i' + self.read(4))
+        self.append(unicode(self.read(len),'utf-8'))
+    dispatch[BINUNICODE] = load_binunicode
+
+    def load_short_binstring(self):
+        len = ord(self.read(1))
+        self.append(self.read(len))
+    dispatch[SHORT_BINSTRING] = load_short_binstring
+
+    def load_tuple(self):
+        k = self.marker()
+        self.stack[k:] = [tuple(self.stack[k+1:])]
+    dispatch[TUPLE] = load_tuple
+
+    def load_empty_tuple(self):
+        self.stack.append(())
+    dispatch[EMPTY_TUPLE] = load_empty_tuple
+
+    def load_tuple1(self):
+        self.stack[-1] = (self.stack[-1],)
+    dispatch[TUPLE1] = load_tuple1
+
+    def load_tuple2(self):
+        self.stack[-2:] = [(self.stack[-2], self.stack[-1])]
+    dispatch[TUPLE2] = load_tuple2
+
+    def load_tuple3(self):
+        self.stack[-3:] = [(self.stack[-3], self.stack[-2], self.stack[-1])]
+    dispatch[TUPLE3] = load_tuple3
+
+    def load_empty_list(self):
+        self.stack.append([])
+    dispatch[EMPTY_LIST] = load_empty_list
+
+    def load_empty_dictionary(self):
+        self.stack.append({})
+    dispatch[EMPTY_DICT] = load_empty_dictionary
+
+    def load_list(self):
+        k = self.marker()
+        self.stack[k:] = [self.stack[k+1:]]
+    dispatch[LIST] = load_list
+
+    def load_dict(self):
+        k = self.marker()
+        d = {}
+        items = self.stack[k+1:]
+        for i in range(0, len(items), 2):
+            key = items[i]
+            value = items[i+1]
+            d[key] = value
+        self.stack[k:] = [d]
+    dispatch[DICT] = load_dict
+
+    # INST and OBJ differ only in how they get a class object.  It's not
+    # only sensible to do the rest in a common routine, the two routines
+    # previously diverged and grew different bugs.
+    # klass is the class to instantiate, and k points to the topmost mark
+    # object, following which are the arguments for klass.__init__.
+    def _instantiate(self, klass, k):
+        args = tuple(self.stack[k+1:])
+        del self.stack[k:]
+        instantiated = 0
+        if (not args and
+                type(klass) is ClassType and
+                not hasattr(klass, "__getinitargs__")):
+            try:
+                value = _EmptyClass()
+                value.__class__ = klass
+                instantiated = 1
+            except RuntimeError:
+                # In restricted execution, assignment to inst.__class__ is
+                # prohibited
+                pass
+        if not instantiated:
+            try:
+                value = klass(*args)
+            except TypeError, err:
+                raise TypeError, "in constructor for %s: %s" % (
+                    klass.__name__, str(err)), sys.exc_info()[2]
+        self.append(value)
+
+    def load_inst(self):
+        module = self.readline()[:-1]
+        name = self.readline()[:-1]
+        klass = self.find_class(module, name)
+        self._instantiate(klass, self.marker())
+    dispatch[INST] = load_inst
+
+    def load_obj(self):
+        # Stack is ... markobject classobject arg1 arg2 ...
+        k = self.marker()
+        klass = self.stack.pop(k+1)
+        self._instantiate(klass, k)
+    dispatch[OBJ] = load_obj
+
+    def load_newobj(self):
+        args = self.stack.pop()
+        cls = self.stack[-1]
+        obj = cls.__new__(cls, *args)
+        self.stack[-1] = obj
+    dispatch[NEWOBJ] = load_newobj
+
+    def load_global(self):
+        module = self.readline()[:-1]
+        name = self.readline()[:-1]
+        klass = self.find_class(module, name)
+        self.append(klass)
+    dispatch[GLOBAL] = load_global
+
+    def load_ext1(self):
+        code = ord(self.read(1))
+        self.get_extension(code)
+    dispatch[EXT1] = load_ext1
+
+    def load_ext2(self):
+        code = mloads('i' + self.read(2) + '\000\000')
+        self.get_extension(code)
+    dispatch[EXT2] = load_ext2
+
+    def load_ext4(self):
+        code = mloads('i' + self.read(4))
+        self.get_extension(code)
+    dispatch[EXT4] = load_ext4
+
+    def get_extension(self, code):
+        nil = []
+        obj = _extension_cache.get(code, nil)
+        if obj is not nil:
+            self.append(obj)
+            return
+        key = _inverted_registry.get(code)
+        if not key:
+            raise ValueError("unregistered extension code %d" % code)
+        obj = self.find_class(*key)
+        _extension_cache[code] = obj
+        self.append(obj)
+
+    def find_class(self, module, name):
+        # Subclasses may override this
+        __import__(module)
+        mod = sys.modules[module]
+        klass = getattr(mod, name)
+        return klass
+
+    def load_reduce(self):
+        stack = self.stack
+        args = stack.pop()
+        func = stack[-1]
+        if args is None:
+            # A hack for Jim Fulton's ExtensionClass, now deprecated
+            warnings.warn("__basicnew__ special case is deprecated",
+                          DeprecationWarning)
+            value = func.__basicnew__()
+        else:
+            value = func(*args)
+        stack[-1] = value
+    dispatch[REDUCE] = load_reduce
+
+    def load_pop(self):
+        del self.stack[-1]
+    dispatch[POP] = load_pop
+
+    def load_pop_mark(self):
+        k = self.marker()
+        del self.stack[k:]
+    dispatch[POP_MARK] = load_pop_mark
+
+    def load_dup(self):
+        self.append(self.stack[-1])
+    dispatch[DUP] = load_dup
+
+    def load_get(self):
+        self.append(self.memo[self.readline()[:-1]])
+    dispatch[GET] = load_get
+
+    def load_binget(self):
+        i = ord(self.read(1))
+        self.append(self.memo[repr(i)])
+    dispatch[BINGET] = load_binget
+
+    def load_long_binget(self):
+        i = mloads('i' + self.read(4))
+        self.append(self.memo[repr(i)])
+    dispatch[LONG_BINGET] = load_long_binget
+
+    def load_put(self):
+        self.memo[self.readline()[:-1]] = self.stack[-1]
+    dispatch[PUT] = load_put
+
+    def load_binput(self):
+        i = ord(self.read(1))
+        self.memo[repr(i)] = self.stack[-1]
+    dispatch[BINPUT] = load_binput
+
+    def load_long_binput(self):
+        i = mloads('i' + self.read(4))
+        self.memo[repr(i)] = self.stack[-1]
+    dispatch[LONG_BINPUT] = load_long_binput
+
+    def load_append(self):
+        stack = self.stack
+        value = stack.pop()
+        list = stack[-1]
+        list.append(value)
+    dispatch[APPEND] = load_append
+
+    def load_appends(self):
+        stack = self.stack
+        mark = self.marker()
+        list = stack[mark - 1]
+        list.extend(stack[mark + 1:])
+        del stack[mark:]
+    dispatch[APPENDS] = load_appends
+
+    def load_setitem(self):
+        stack = self.stack
+        value = stack.pop()
+        key = stack.pop()
+        dict = stack[-1]
+        dict[key] = value
+    dispatch[SETITEM] = load_setitem
+
+    def load_setitems(self):
+        stack = self.stack
+        mark = self.marker()
+        dict = stack[mark - 1]
+        for i in range(mark + 1, len(stack), 2):
+            dict[stack[i]] = stack[i + 1]
+
+        del stack[mark:]
+    dispatch[SETITEMS] = load_setitems
+
+    def load_build(self):
+        stack = self.stack
+        state = stack.pop()
+        inst = stack[-1]
+        setstate = getattr(inst, "__setstate__", None)
+        if setstate:
+            setstate(state)
+            return
+        slotstate = None
+        if isinstance(state, tuple) and len(state) == 2:
+            state, slotstate = state
+        if state:
+            try:
+                inst.__dict__.update(state)
+            except RuntimeError:
+                # XXX In restricted execution, the instance's __dict__
+                # is not accessible.  Use the old way of unpickling
+                # the instance variables.  This is a semantic
+                # difference when unpickling in restricted
+                # vs. unrestricted modes.
+                # Note, however, that cPickle has never tried to do the
+                # .update() business, and always uses
+                #     PyObject_SetItem(inst.__dict__, key, value) in a
+                # loop over state.items().
+                for k, v in state.items():
+                    setattr(inst, k, v)
+        if slotstate:
+            for k, v in slotstate.items():
+                setattr(inst, k, v)
+    dispatch[BUILD] = load_build
+
+    def load_mark(self):
+        self.append(self.mark)
+    dispatch[MARK] = load_mark
+
+    def load_stop(self):
+        value = self.stack.pop()
+        raise _Stop(value)
+    dispatch[STOP] = load_stop
+
+# Helper class for load_inst/load_obj
+
+class _EmptyClass:
+    pass
+
+# Encode/decode longs in linear time.
+
+import binascii as _binascii
+
+def encode_long(x):
+    r"""Encode a long to a two's complement little-endian binary string.
+    Note that 0L is a special case, returning an empty string, to save a
+    byte in the LONG1 pickling context.
+
+    >>> encode_long(0L)
+    ''
+    >>> encode_long(255L)
+    '\xff\x00'
+    >>> encode_long(32767L)
+    '\xff\x7f'
+    >>> encode_long(-256L)
+    '\x00\xff'
+    >>> encode_long(-32768L)
+    '\x00\x80'
+    >>> encode_long(-128L)
+    '\x80'
+    >>> encode_long(127L)
+    '\x7f'
+    >>>
+    """
+
+    if x == 0:
+        return ''
+    if x > 0:
+        ashex = hex(x)
+        assert ashex.startswith("0x")
+        njunkchars = 2 + ashex.endswith('L')
+        nibbles = len(ashex) - njunkchars
+        if nibbles & 1:
+            # need an even # of nibbles for unhexlify
+            ashex = "0x0" + ashex[2:]
+        elif int(ashex[2], 16) >= 8:
+            # "looks negative", so need a byte of sign bits
+            ashex = "0x00" + ashex[2:]
+    else:
+        # Build the 256's-complement:  (1L << nbytes) + x.  The trick is
+        # to find the number of bytes in linear time (although that should
+        # really be a constant-time task).
+        ashex = hex(-x)
+        assert ashex.startswith("0x")
+        njunkchars = 2 + ashex.endswith('L')
+        nibbles = len(ashex) - njunkchars
+        if nibbles & 1:
+            # Extend to a full byte.
+            nibbles += 1
+        nbits = nibbles * 4
+        x += 1L << nbits
+        assert x > 0
+        ashex = hex(x)
+        njunkchars = 2 + ashex.endswith('L')
+        newnibbles = len(ashex) - njunkchars
+        if newnibbles < nibbles:
+            ashex = "0x" + "0" * (nibbles - newnibbles) + ashex[2:]
+        if int(ashex[2], 16) < 8:
+            # "looks positive", so need a byte of sign bits
+            ashex = "0xff" + ashex[2:]
+
+    if ashex.endswith('L'):
+        ashex = ashex[2:-1]
+    else:
+        ashex = ashex[2:]
+    assert len(ashex) & 1 == 0, (x, ashex)
+    binary = _binascii.unhexlify(ashex)
+    return binary[::-1]
+
+def decode_long(data):
+    r"""Decode a long from a two's complement little-endian binary string.
+
+    >>> decode_long('')
+    0L
+    >>> decode_long("\xff\x00")
+    255L
+    >>> decode_long("\xff\x7f")
+    32767L
+    >>> decode_long("\x00\xff")
+    -256L
+    >>> decode_long("\x00\x80")
+    -32768L
+    >>> decode_long("\x80")
+    -128L
+    >>> decode_long("\x7f")
+    127L
+    """
+
+    nbytes = len(data)
+    if nbytes == 0:
+        return 0L
+    ashex = _binascii.hexlify(data[::-1])
+    n = long(ashex, 16) # quadratic time before Python 2.3; linear now
+    if data[-1] >= '\x80':
+        n -= 1L << (nbytes * 8)
+    return n
+
+# Shorthands
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+def dump(obj, file, protocol=None, bin=None):
+    Pickler(file, protocol, bin).dump(obj)
+
+def dumps(obj, protocol=None, bin=None):
+    file = StringIO()
+    Pickler(file, protocol, bin).dump(obj)
+    return file.getvalue()
+
+def load(file):
+    return Unpickler(file).load()
+
+def loads(str):
+    file = StringIO(str)
+    return Unpickler(file).load()
+
+# Doctest
+
+def _test():
+    import doctest
+    return doctest.testmod()
+
+if __name__ == "__main__":
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/pickletools.py b/depot_tools/release/win/python_24/Lib/pickletools.py
new file mode 100644
index 0000000..5e05b64
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pickletools.py
@@ -0,0 +1,2246 @@
+'''"Executable documentation" for the pickle module.
+
+Extensive comments about the pickle protocols and pickle-machine opcodes
+can be found here.  Some functions meant for external use:
+
+genops(pickle)
+   Generate all the opcodes in a pickle, as (opcode, arg, position) triples.
+
+dis(pickle, out=None, memo=None, indentlevel=4)
+   Print a symbolic disassembly of a pickle.
+'''
+
+__all__ = ['dis',
+           'genops',
+          ]
+
+# Other ideas:
+#
+# - A pickle verifier:  read a pickle and check it exhaustively for
+#   well-formedness.  dis() does a lot of this already.
+#
+# - A protocol identifier:  examine a pickle and return its protocol number
+#   (== the highest .proto attr value among all the opcodes in the pickle).
+#   dis() already prints this info at the end.
+#
+# - A pickle optimizer:  for example, tuple-building code is sometimes more
+#   elaborate than necessary, catering for the possibility that the tuple
+#   is recursive.  Or lots of times a PUT is generated that's never accessed
+#   by a later GET.
+
+
+"""
+"A pickle" is a program for a virtual pickle machine (PM, but more accurately
+called an unpickling machine).  It's a sequence of opcodes, interpreted by the
+PM, building an arbitrarily complex Python object.
+
+For the most part, the PM is very simple:  there are no looping, testing, or
+conditional instructions, no arithmetic and no function calls.  Opcodes are
+executed once each, from first to last, until a STOP opcode is reached.
+
+The PM has two data areas, "the stack" and "the memo".
+
+Many opcodes push Python objects onto the stack; e.g., INT pushes a Python
+integer object on the stack, whose value is gotten from a decimal string
+literal immediately following the INT opcode in the pickle bytestream.  Other
+opcodes take Python objects off the stack.  The result of unpickling is
+whatever object is left on the stack when the final STOP opcode is executed.
+
+The memo is simply an array of objects, or it can be implemented as a dict
+mapping little integers to objects.  The memo serves as the PM's "long term
+memory", and the little integers indexing the memo are akin to variable
+names.  Some opcodes pop a stack object into the memo at a given index,
+and others push a memo object at a given index onto the stack again.
+
+At heart, that's all the PM has.  Subtleties arise for these reasons:
+
++ Object identity.  Objects can be arbitrarily complex, and subobjects
+  may be shared (for example, the list [a, a] refers to the same object a
+  twice).  It can be vital that unpickling recreate an isomorphic object
+  graph, faithfully reproducing sharing.
+
++ Recursive objects.  For example, after "L = []; L.append(L)", L is a
+  list, and L[0] is the same list.  This is related to the object identity
+  point, and some sequences of pickle opcodes are subtle in order to
+  get the right result in all cases.
+
++ Things pickle doesn't know everything about.  Examples of things pickle
+  does know everything about are Python's builtin scalar and container
+  types, like ints and tuples.  They generally have opcodes dedicated to
+  them.  For things like module references and instances of user-defined
+  classes, pickle's knowledge is limited.  Historically, many enhancements
+  have been made to the pickle protocol in order to do a better (faster,
+  and/or more compact) job on those.
+
++ Backward compatibility and micro-optimization.  As explained below,
+  pickle opcodes never go away, not even when better ways to do a thing
+  get invented.  The repertoire of the PM just keeps growing over time.
+  For example, protocol 0 had two opcodes for building Python integers (INT
+  and LONG), protocol 1 added three more for more-efficient pickling of short
+  integers, and protocol 2 added two more for more-efficient pickling of
+  long integers (before protocol 2, the only ways to pickle a Python long
+  took time quadratic in the number of digits, for both pickling and
+  unpickling).  "Opcode bloat" isn't so much a subtlety as a source of
+  wearying complication.
+
+
+Pickle protocols:
+
+For compatibility, the meaning of a pickle opcode never changes.  Instead new
+pickle opcodes get added, and each version's unpickler can handle all the
+pickle opcodes in all protocol versions to date.  So old pickles continue to
+be readable forever.  The pickler can generally be told to restrict itself to
+the subset of opcodes available under previous protocol versions too, so that
+users can create pickles under the current version readable by older
+versions.  However, a pickle does not contain its version number embedded
+within it.  If an older unpickler tries to read a pickle using a later
+protocol, the result is most likely an exception due to seeing an unknown (in
+the older unpickler) opcode.
+
+The original pickle used what's now called "protocol 0", and what was called
+"text mode" before Python 2.3.  The entire pickle bytestream is made up of
+printable 7-bit ASCII characters, plus the newline character, in protocol 0.
+That's why it was called text mode.  Protocol 0 is small and elegant, but
+sometimes painfully inefficient.
+
+The second major set of additions is now called "protocol 1", and was called
+"binary mode" before Python 2.3.  This added many opcodes with arguments
+consisting of arbitrary bytes, including NUL bytes and unprintable "high bit"
+bytes.  Binary mode pickles can be substantially smaller than equivalent
+text mode pickles, and sometimes faster too; e.g., BININT represents a 4-byte
+int as 4 bytes following the opcode, which is cheaper to unpickle than the
+(perhaps) 11-character decimal string attached to INT.  Protocol 1 also added
+a number of opcodes that operate on many stack elements at once (like APPENDS
+and SETITEMS), and "shortcut" opcodes (like EMPTY_DICT and EMPTY_TUPLE).
+
+The third major set of additions came in Python 2.3, and is called "protocol
+2".  This added:
+
+- A better way to pickle instances of new-style classes (NEWOBJ).
+
+- A way for a pickle to identify its protocol (PROTO).
+
+- Time- and space- efficient pickling of long ints (LONG{1,4}).
+
+- Shortcuts for small tuples (TUPLE{1,2,3}}.
+
+- Dedicated opcodes for bools (NEWTRUE, NEWFALSE).
+
+- The "extension registry", a vector of popular objects that can be pushed
+  efficiently by index (EXT{1,2,4}).  This is akin to the memo and GET, but
+  the registry contents are predefined (there's nothing akin to the memo's
+  PUT).
+
+Another independent change with Python 2.3 is the abandonment of any
+pretense that it might be safe to load pickles received from untrusted
+parties -- no sufficient security analysis has been done to guarantee
+this and there isn't a use case that warrants the expense of such an
+analysis.
+
+To this end, all tests for __safe_for_unpickling__ or for
+copy_reg.safe_constructors are removed from the unpickling code.
+References to these variables in the descriptions below are to be seen
+as describing unpickling in Python 2.2 and before.
+"""
+
+# Meta-rule:  Descriptions are stored in instances of descriptor objects,
+# with plain constructors.  No meta-language is defined from which
+# descriptors could be constructed.  If you want, e.g., XML, write a little
+# program to generate XML from the objects.
+
+##############################################################################
+# Some pickle opcodes have an argument, following the opcode in the
+# bytestream.  An argument is of a specific type, described by an instance
+# of ArgumentDescriptor.  These are not to be confused with arguments taken
+# off the stack -- ArgumentDescriptor applies only to arguments embedded in
+# the opcode stream, immediately following an opcode.
+
+# Represents the number of bytes consumed by an argument delimited by the
+# next newline character.
+UP_TO_NEWLINE = -1
+
+# Represents the number of bytes consumed by a two-argument opcode where
+# the first argument gives the number of bytes in the second argument.
+TAKEN_FROM_ARGUMENT1 = -2   # num bytes is 1-byte unsigned int
+TAKEN_FROM_ARGUMENT4 = -3   # num bytes is 4-byte signed little-endian int
+
+class ArgumentDescriptor(object):
+    __slots__ = (
+        # name of descriptor record, also a module global name; a string
+        'name',
+
+        # length of argument, in bytes; an int; UP_TO_NEWLINE and
+        # TAKEN_FROM_ARGUMENT{1,4} are negative values for variable-length
+        # cases
+        'n',
+
+        # a function taking a file-like object, reading this kind of argument
+        # from the object at the current position, advancing the current
+        # position by n bytes, and returning the value of the argument
+        'reader',
+
+        # human-readable docs for this arg descriptor; a string
+        'doc',
+    )
+
+    def __init__(self, name, n, reader, doc):
+        assert isinstance(name, str)
+        self.name = name
+
+        assert isinstance(n, int) and (n >= 0 or
+                                       n in (UP_TO_NEWLINE,
+                                             TAKEN_FROM_ARGUMENT1,
+                                             TAKEN_FROM_ARGUMENT4))
+        self.n = n
+
+        self.reader = reader
+
+        assert isinstance(doc, str)
+        self.doc = doc
+
+from struct import unpack as _unpack
+
+def read_uint1(f):
+    r"""
+    >>> import StringIO
+    >>> read_uint1(StringIO.StringIO('\xff'))
+    255
+    """
+
+    data = f.read(1)
+    if data:
+        return ord(data)
+    raise ValueError("not enough data in stream to read uint1")
+
+uint1 = ArgumentDescriptor(
+            name='uint1',
+            n=1,
+            reader=read_uint1,
+            doc="One-byte unsigned integer.")
+
+
+def read_uint2(f):
+    r"""
+    >>> import StringIO
+    >>> read_uint2(StringIO.StringIO('\xff\x00'))
+    255
+    >>> read_uint2(StringIO.StringIO('\xff\xff'))
+    65535
+    """
+
+    data = f.read(2)
+    if len(data) == 2:
+        return _unpack("<H", data)[0]
+    raise ValueError("not enough data in stream to read uint2")
+
+uint2 = ArgumentDescriptor(
+            name='uint2',
+            n=2,
+            reader=read_uint2,
+            doc="Two-byte unsigned integer, little-endian.")
+
+
+def read_int4(f):
+    r"""
+    >>> import StringIO
+    >>> read_int4(StringIO.StringIO('\xff\x00\x00\x00'))
+    255
+    >>> read_int4(StringIO.StringIO('\x00\x00\x00\x80')) == -(2**31)
+    True
+    """
+
+    data = f.read(4)
+    if len(data) == 4:
+        return _unpack("<i", data)[0]
+    raise ValueError("not enough data in stream to read int4")
+
+int4 = ArgumentDescriptor(
+           name='int4',
+           n=4,
+           reader=read_int4,
+           doc="Four-byte signed integer, little-endian, 2's complement.")
+
+
+def read_stringnl(f, decode=True, stripquotes=True):
+    r"""
+    >>> import StringIO
+    >>> read_stringnl(StringIO.StringIO("'abcd'\nefg\n"))
+    'abcd'
+
+    >>> read_stringnl(StringIO.StringIO("\n"))
+    Traceback (most recent call last):
+    ...
+    ValueError: no string quotes around ''
+
+    >>> read_stringnl(StringIO.StringIO("\n"), stripquotes=False)
+    ''
+
+    >>> read_stringnl(StringIO.StringIO("''\n"))
+    ''
+
+    >>> read_stringnl(StringIO.StringIO('"abcd"'))
+    Traceback (most recent call last):
+    ...
+    ValueError: no newline found when trying to read stringnl
+
+    Embedded escapes are undone in the result.
+    >>> read_stringnl(StringIO.StringIO(r"'a\n\\b\x00c\td'" + "\n'e'"))
+    'a\n\\b\x00c\td'
+    """
+
+    data = f.readline()
+    if not data.endswith('\n'):
+        raise ValueError("no newline found when trying to read stringnl")
+    data = data[:-1]    # lose the newline
+
+    if stripquotes:
+        for q in "'\"":
+            if data.startswith(q):
+                if not data.endswith(q):
+                    raise ValueError("strinq quote %r not found at both "
+                                     "ends of %r" % (q, data))
+                data = data[1:-1]
+                break
+        else:
+            raise ValueError("no string quotes around %r" % data)
+
+    # I'm not sure when 'string_escape' was added to the std codecs; it's
+    # crazy not to use it if it's there.
+    if decode:
+        data = data.decode('string_escape')
+    return data
+
+stringnl = ArgumentDescriptor(
+               name='stringnl',
+               n=UP_TO_NEWLINE,
+               reader=read_stringnl,
+               doc="""A newline-terminated string.
+
+                   This is a repr-style string, with embedded escapes, and
+                   bracketing quotes.
+                   """)
+
+def read_stringnl_noescape(f):
+    return read_stringnl(f, decode=False, stripquotes=False)
+
+stringnl_noescape = ArgumentDescriptor(
+                        name='stringnl_noescape',
+                        n=UP_TO_NEWLINE,
+                        reader=read_stringnl_noescape,
+                        doc="""A newline-terminated string.
+
+                        This is a str-style string, without embedded escapes,
+                        or bracketing quotes.  It should consist solely of
+                        printable ASCII characters.
+                        """)
+
+def read_stringnl_noescape_pair(f):
+    r"""
+    >>> import StringIO
+    >>> read_stringnl_noescape_pair(StringIO.StringIO("Queue\nEmpty\njunk"))
+    'Queue Empty'
+    """
+
+    return "%s %s" % (read_stringnl_noescape(f), read_stringnl_noescape(f))
+
+stringnl_noescape_pair = ArgumentDescriptor(
+                             name='stringnl_noescape_pair',
+                             n=UP_TO_NEWLINE,
+                             reader=read_stringnl_noescape_pair,
+                             doc="""A pair of newline-terminated strings.
+
+                             These are str-style strings, without embedded
+                             escapes, or bracketing quotes.  They should
+                             consist solely of printable ASCII characters.
+                             The pair is returned as a single string, with
+                             a single blank separating the two strings.
+                             """)
+
+def read_string4(f):
+    r"""
+    >>> import StringIO
+    >>> read_string4(StringIO.StringIO("\x00\x00\x00\x00abc"))
+    ''
+    >>> read_string4(StringIO.StringIO("\x03\x00\x00\x00abcdef"))
+    'abc'
+    >>> read_string4(StringIO.StringIO("\x00\x00\x00\x03abcdef"))
+    Traceback (most recent call last):
+    ...
+    ValueError: expected 50331648 bytes in a string4, but only 6 remain
+    """
+
+    n = read_int4(f)
+    if n < 0:
+        raise ValueError("string4 byte count < 0: %d" % n)
+    data = f.read(n)
+    if len(data) == n:
+        return data
+    raise ValueError("expected %d bytes in a string4, but only %d remain" %
+                     (n, len(data)))
+
+string4 = ArgumentDescriptor(
+              name="string4",
+              n=TAKEN_FROM_ARGUMENT4,
+              reader=read_string4,
+              doc="""A counted string.
+
+              The first argument is a 4-byte little-endian signed int giving
+              the number of bytes in the string, and the second argument is
+              that many bytes.
+              """)
+
+
+def read_string1(f):
+    r"""
+    >>> import StringIO
+    >>> read_string1(StringIO.StringIO("\x00"))
+    ''
+    >>> read_string1(StringIO.StringIO("\x03abcdef"))
+    'abc'
+    """
+
+    n = read_uint1(f)
+    assert n >= 0
+    data = f.read(n)
+    if len(data) == n:
+        return data
+    raise ValueError("expected %d bytes in a string1, but only %d remain" %
+                     (n, len(data)))
+
+string1 = ArgumentDescriptor(
+              name="string1",
+              n=TAKEN_FROM_ARGUMENT1,
+              reader=read_string1,
+              doc="""A counted string.
+
+              The first argument is a 1-byte unsigned int giving the number
+              of bytes in the string, and the second argument is that many
+              bytes.
+              """)
+
+
+def read_unicodestringnl(f):
+    r"""
+    >>> import StringIO
+    >>> read_unicodestringnl(StringIO.StringIO("abc\uabcd\njunk"))
+    u'abc\uabcd'
+    """
+
+    data = f.readline()
+    if not data.endswith('\n'):
+        raise ValueError("no newline found when trying to read "
+                         "unicodestringnl")
+    data = data[:-1]    # lose the newline
+    return unicode(data, 'raw-unicode-escape')
+
+unicodestringnl = ArgumentDescriptor(
+                      name='unicodestringnl',
+                      n=UP_TO_NEWLINE,
+                      reader=read_unicodestringnl,
+                      doc="""A newline-terminated Unicode string.
+
+                      This is raw-unicode-escape encoded, so consists of
+                      printable ASCII characters, and may contain embedded
+                      escape sequences.
+                      """)
+
+def read_unicodestring4(f):
+    r"""
+    >>> import StringIO
+    >>> s = u'abcd\uabcd'
+    >>> enc = s.encode('utf-8')
+    >>> enc
+    'abcd\xea\xaf\x8d'
+    >>> n = chr(len(enc)) + chr(0) * 3  # little-endian 4-byte length
+    >>> t = read_unicodestring4(StringIO.StringIO(n + enc + 'junk'))
+    >>> s == t
+    True
+
+    >>> read_unicodestring4(StringIO.StringIO(n + enc[:-1]))
+    Traceback (most recent call last):
+    ...
+    ValueError: expected 7 bytes in a unicodestring4, but only 6 remain
+    """
+
+    n = read_int4(f)
+    if n < 0:
+        raise ValueError("unicodestring4 byte count < 0: %d" % n)
+    data = f.read(n)
+    if len(data) == n:
+        return unicode(data, 'utf-8')
+    raise ValueError("expected %d bytes in a unicodestring4, but only %d "
+                     "remain" % (n, len(data)))
+
+unicodestring4 = ArgumentDescriptor(
+                    name="unicodestring4",
+                    n=TAKEN_FROM_ARGUMENT4,
+                    reader=read_unicodestring4,
+                    doc="""A counted Unicode string.
+
+                    The first argument is a 4-byte little-endian signed int
+                    giving the number of bytes in the string, and the second
+                    argument-- the UTF-8 encoding of the Unicode string --
+                    contains that many bytes.
+                    """)
+
+
+def read_decimalnl_short(f):
+    r"""
+    >>> import StringIO
+    >>> read_decimalnl_short(StringIO.StringIO("1234\n56"))
+    1234
+
+    >>> read_decimalnl_short(StringIO.StringIO("1234L\n56"))
+    Traceback (most recent call last):
+    ...
+    ValueError: trailing 'L' not allowed in '1234L'
+    """
+
+    s = read_stringnl(f, decode=False, stripquotes=False)
+    if s.endswith("L"):
+        raise ValueError("trailing 'L' not allowed in %r" % s)
+
+    # It's not necessarily true that the result fits in a Python short int:
+    # the pickle may have been written on a 64-bit box.  There's also a hack
+    # for True and False here.
+    if s == "00":
+        return False
+    elif s == "01":
+        return True
+
+    try:
+        return int(s)
+    except OverflowError:
+        return long(s)
+
+def read_decimalnl_long(f):
+    r"""
+    >>> import StringIO
+
+    >>> read_decimalnl_long(StringIO.StringIO("1234\n56"))
+    Traceback (most recent call last):
+    ...
+    ValueError: trailing 'L' required in '1234'
+
+    Someday the trailing 'L' will probably go away from this output.
+
+    >>> read_decimalnl_long(StringIO.StringIO("1234L\n56"))
+    1234L
+
+    >>> read_decimalnl_long(StringIO.StringIO("123456789012345678901234L\n6"))
+    123456789012345678901234L
+    """
+
+    s = read_stringnl(f, decode=False, stripquotes=False)
+    if not s.endswith("L"):
+        raise ValueError("trailing 'L' required in %r" % s)
+    return long(s)
+
+
+decimalnl_short = ArgumentDescriptor(
+                      name='decimalnl_short',
+                      n=UP_TO_NEWLINE,
+                      reader=read_decimalnl_short,
+                      doc="""A newline-terminated decimal integer literal.
+
+                          This never has a trailing 'L', and the integer fit
+                          in a short Python int on the box where the pickle
+                          was written -- but there's no guarantee it will fit
+                          in a short Python int on the box where the pickle
+                          is read.
+                          """)
+
+decimalnl_long = ArgumentDescriptor(
+                     name='decimalnl_long',
+                     n=UP_TO_NEWLINE,
+                     reader=read_decimalnl_long,
+                     doc="""A newline-terminated decimal integer literal.
+
+                         This has a trailing 'L', and can represent integers
+                         of any size.
+                         """)
+
+
+def read_floatnl(f):
+    r"""
+    >>> import StringIO
+    >>> read_floatnl(StringIO.StringIO("-1.25\n6"))
+    -1.25
+    """
+    s = read_stringnl(f, decode=False, stripquotes=False)
+    return float(s)
+
+floatnl = ArgumentDescriptor(
+              name='floatnl',
+              n=UP_TO_NEWLINE,
+              reader=read_floatnl,
+              doc="""A newline-terminated decimal floating literal.
+
+              In general this requires 17 significant digits for roundtrip
+              identity, and pickling then unpickling infinities, NaNs, and
+              minus zero doesn't work across boxes, or on some boxes even
+              on itself (e.g., Windows can't read the strings it produces
+              for infinities or NaNs).
+              """)
+
+def read_float8(f):
+    r"""
+    >>> import StringIO, struct
+    >>> raw = struct.pack(">d", -1.25)
+    >>> raw
+    '\xbf\xf4\x00\x00\x00\x00\x00\x00'
+    >>> read_float8(StringIO.StringIO(raw + "\n"))
+    -1.25
+    """
+
+    data = f.read(8)
+    if len(data) == 8:
+        return _unpack(">d", data)[0]
+    raise ValueError("not enough data in stream to read float8")
+
+
+float8 = ArgumentDescriptor(
+             name='float8',
+             n=8,
+             reader=read_float8,
+             doc="""An 8-byte binary representation of a float, big-endian.
+
+             The format is unique to Python, and shared with the struct
+             module (format string '>d') "in theory" (the struct and cPickle
+             implementations don't share the code -- they should).  It's
+             strongly related to the IEEE-754 double format, and, in normal
+             cases, is in fact identical to the big-endian 754 double format.
+             On other boxes the dynamic range is limited to that of a 754
+             double, and "add a half and chop" rounding is used to reduce
+             the precision to 53 bits.  However, even on a 754 box,
+             infinities, NaNs, and minus zero may not be handled correctly
+             (may not survive roundtrip pickling intact).
+             """)
+
+# Protocol 2 formats
+
+from pickle import decode_long
+
+def read_long1(f):
+    r"""
+    >>> import StringIO
+    >>> read_long1(StringIO.StringIO("\x00"))
+    0L
+    >>> read_long1(StringIO.StringIO("\x02\xff\x00"))
+    255L
+    >>> read_long1(StringIO.StringIO("\x02\xff\x7f"))
+    32767L
+    >>> read_long1(StringIO.StringIO("\x02\x00\xff"))
+    -256L
+    >>> read_long1(StringIO.StringIO("\x02\x00\x80"))
+    -32768L
+    """
+
+    n = read_uint1(f)
+    data = f.read(n)
+    if len(data) != n:
+        raise ValueError("not enough data in stream to read long1")
+    return decode_long(data)
+
+long1 = ArgumentDescriptor(
+    name="long1",
+    n=TAKEN_FROM_ARGUMENT1,
+    reader=read_long1,
+    doc="""A binary long, little-endian, using 1-byte size.
+
+    This first reads one byte as an unsigned size, then reads that
+    many bytes and interprets them as a little-endian 2's-complement long.
+    If the size is 0, that's taken as a shortcut for the long 0L.
+    """)
+
+def read_long4(f):
+    r"""
+    >>> import StringIO
+    >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x00"))
+    255L
+    >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\xff\x7f"))
+    32767L
+    >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\xff"))
+    -256L
+    >>> read_long4(StringIO.StringIO("\x02\x00\x00\x00\x00\x80"))
+    -32768L
+    >>> read_long1(StringIO.StringIO("\x00\x00\x00\x00"))
+    0L
+    """
+
+    n = read_int4(f)
+    if n < 0:
+        raise ValueError("long4 byte count < 0: %d" % n)
+    data = f.read(n)
+    if len(data) != n:
+        raise ValueError("not enough data in stream to read long4")
+    return decode_long(data)
+
+long4 = ArgumentDescriptor(
+    name="long4",
+    n=TAKEN_FROM_ARGUMENT4,
+    reader=read_long4,
+    doc="""A binary representation of a long, little-endian.
+
+    This first reads four bytes as a signed size (but requires the
+    size to be >= 0), then reads that many bytes and interprets them
+    as a little-endian 2's-complement long.  If the size is 0, that's taken
+    as a shortcut for the long 0L, although LONG1 should really be used
+    then instead (and in any case where # of bytes < 256).
+    """)
+
+
+##############################################################################
+# Object descriptors.  The stack used by the pickle machine holds objects,
+# and in the stack_before and stack_after attributes of OpcodeInfo
+# descriptors we need names to describe the various types of objects that can
+# appear on the stack.
+
+class StackObject(object):
+    __slots__ = (
+        # name of descriptor record, for info only
+        'name',
+
+        # type of object, or tuple of type objects (meaning the object can
+        # be of any type in the tuple)
+        'obtype',
+
+        # human-readable docs for this kind of stack object; a string
+        'doc',
+    )
+
+    def __init__(self, name, obtype, doc):
+        assert isinstance(name, str)
+        self.name = name
+
+        assert isinstance(obtype, type) or isinstance(obtype, tuple)
+        if isinstance(obtype, tuple):
+            for contained in obtype:
+                assert isinstance(contained, type)
+        self.obtype = obtype
+
+        assert isinstance(doc, str)
+        self.doc = doc
+
+    def __repr__(self):
+        return self.name
+
+
+pyint = StackObject(
+            name='int',
+            obtype=int,
+            doc="A short (as opposed to long) Python integer object.")
+
+pylong = StackObject(
+             name='long',
+             obtype=long,
+             doc="A long (as opposed to short) Python integer object.")
+
+pyinteger_or_bool = StackObject(
+                        name='int_or_bool',
+                        obtype=(int, long, bool),
+                        doc="A Python integer object (short or long), or "
+                            "a Python bool.")
+
+pybool = StackObject(
+             name='bool',
+             obtype=(bool,),
+             doc="A Python bool object.")
+
+pyfloat = StackObject(
+              name='float',
+              obtype=float,
+              doc="A Python float object.")
+
+pystring = StackObject(
+               name='str',
+               obtype=str,
+               doc="A Python string object.")
+
+pyunicode = StackObject(
+                name='unicode',
+                obtype=unicode,
+                doc="A Python Unicode string object.")
+
+pynone = StackObject(
+             name="None",
+             obtype=type(None),
+             doc="The Python None object.")
+
+pytuple = StackObject(
+              name="tuple",
+              obtype=tuple,
+              doc="A Python tuple object.")
+
+pylist = StackObject(
+             name="list",
+             obtype=list,
+             doc="A Python list object.")
+
+pydict = StackObject(
+             name="dict",
+             obtype=dict,
+             doc="A Python dict object.")
+
+anyobject = StackObject(
+                name='any',
+                obtype=object,
+                doc="Any kind of object whatsoever.")
+
+markobject = StackObject(
+                 name="mark",
+                 obtype=StackObject,
+                 doc="""'The mark' is a unique object.
+
+                 Opcodes that operate on a variable number of objects
+                 generally don't embed the count of objects in the opcode,
+                 or pull it off the stack.  Instead the MARK opcode is used
+                 to push a special marker object on the stack, and then
+                 some other opcodes grab all the objects from the top of
+                 the stack down to (but not including) the topmost marker
+                 object.
+                 """)
+
+stackslice = StackObject(
+                 name="stackslice",
+                 obtype=StackObject,
+                 doc="""An object representing a contiguous slice of the stack.
+
+                 This is used in conjuction with markobject, to represent all
+                 of the stack following the topmost markobject.  For example,
+                 the POP_MARK opcode changes the stack from
+
+                     [..., markobject, stackslice]
+                 to
+                     [...]
+
+                 No matter how many object are on the stack after the topmost
+                 markobject, POP_MARK gets rid of all of them (including the
+                 topmost markobject too).
+                 """)
+
+##############################################################################
+# Descriptors for pickle opcodes.
+
+class OpcodeInfo(object):
+
+    __slots__ = (
+        # symbolic name of opcode; a string
+        'name',
+
+        # the code used in a bytestream to represent the opcode; a
+        # one-character string
+        'code',
+
+        # If the opcode has an argument embedded in the byte string, an
+        # instance of ArgumentDescriptor specifying its type.  Note that
+        # arg.reader(s) can be used to read and decode the argument from
+        # the bytestream s, and arg.doc documents the format of the raw
+        # argument bytes.  If the opcode doesn't have an argument embedded
+        # in the bytestream, arg should be None.
+        'arg',
+
+        # what the stack looks like before this opcode runs; a list
+        'stack_before',
+
+        # what the stack looks like after this opcode runs; a list
+        'stack_after',
+
+        # the protocol number in which this opcode was introduced; an int
+        'proto',
+
+        # human-readable docs for this opcode; a string
+        'doc',
+    )
+
+    def __init__(self, name, code, arg,
+                 stack_before, stack_after, proto, doc):
+        assert isinstance(name, str)
+        self.name = name
+
+        assert isinstance(code, str)
+        assert len(code) == 1
+        self.code = code
+
+        assert arg is None or isinstance(arg, ArgumentDescriptor)
+        self.arg = arg
+
+        assert isinstance(stack_before, list)
+        for x in stack_before:
+            assert isinstance(x, StackObject)
+        self.stack_before = stack_before
+
+        assert isinstance(stack_after, list)
+        for x in stack_after:
+            assert isinstance(x, StackObject)
+        self.stack_after = stack_after
+
+        assert isinstance(proto, int) and 0 <= proto <= 2
+        self.proto = proto
+
+        assert isinstance(doc, str)
+        self.doc = doc
+
+I = OpcodeInfo
+opcodes = [
+
+    # Ways to spell integers.
+
+    I(name='INT',
+      code='I',
+      arg=decimalnl_short,
+      stack_before=[],
+      stack_after=[pyinteger_or_bool],
+      proto=0,
+      doc="""Push an integer or bool.
+
+      The argument is a newline-terminated decimal literal string.
+
+      The intent may have been that this always fit in a short Python int,
+      but INT can be generated in pickles written on a 64-bit box that
+      require a Python long on a 32-bit box.  The difference between this
+      and LONG then is that INT skips a trailing 'L', and produces a short
+      int whenever possible.
+
+      Another difference is due to that, when bool was introduced as a
+      distinct type in 2.3, builtin names True and False were also added to
+      2.2.2, mapping to ints 1 and 0.  For compatibility in both directions,
+      True gets pickled as INT + "I01\\n", and False as INT + "I00\\n".
+      Leading zeroes are never produced for a genuine integer.  The 2.3
+      (and later) unpicklers special-case these and return bool instead;
+      earlier unpicklers ignore the leading "0" and return the int.
+      """),
+
+    I(name='BININT',
+      code='J',
+      arg=int4,
+      stack_before=[],
+      stack_after=[pyint],
+      proto=1,
+      doc="""Push a four-byte signed integer.
+
+      This handles the full range of Python (short) integers on a 32-bit
+      box, directly as binary bytes (1 for the opcode and 4 for the integer).
+      If the integer is non-negative and fits in 1 or 2 bytes, pickling via
+      BININT1 or BININT2 saves space.
+      """),
+
+    I(name='BININT1',
+      code='K',
+      arg=uint1,
+      stack_before=[],
+      stack_after=[pyint],
+      proto=1,
+      doc="""Push a one-byte unsigned integer.
+
+      This is a space optimization for pickling very small non-negative ints,
+      in range(256).
+      """),
+
+    I(name='BININT2',
+      code='M',
+      arg=uint2,
+      stack_before=[],
+      stack_after=[pyint],
+      proto=1,
+      doc="""Push a two-byte unsigned integer.
+
+      This is a space optimization for pickling small positive ints, in
+      range(256, 2**16).  Integers in range(256) can also be pickled via
+      BININT2, but BININT1 instead saves a byte.
+      """),
+
+    I(name='LONG',
+      code='L',
+      arg=decimalnl_long,
+      stack_before=[],
+      stack_after=[pylong],
+      proto=0,
+      doc="""Push a long integer.
+
+      The same as INT, except that the literal ends with 'L', and always
+      unpickles to a Python long.  There doesn't seem a real purpose to the
+      trailing 'L'.
+
+      Note that LONG takes time quadratic in the number of digits when
+      unpickling (this is simply due to the nature of decimal->binary
+      conversion).  Proto 2 added linear-time (in C; still quadratic-time
+      in Python) LONG1 and LONG4 opcodes.
+      """),
+
+    I(name="LONG1",
+      code='\x8a',
+      arg=long1,
+      stack_before=[],
+      stack_after=[pylong],
+      proto=2,
+      doc="""Long integer using one-byte length.
+
+      A more efficient encoding of a Python long; the long1 encoding
+      says it all."""),
+
+    I(name="LONG4",
+      code='\x8b',
+      arg=long4,
+      stack_before=[],
+      stack_after=[pylong],
+      proto=2,
+      doc="""Long integer using found-byte length.
+
+      A more efficient encoding of a Python long; the long4 encoding
+      says it all."""),
+
+    # Ways to spell strings (8-bit, not Unicode).
+
+    I(name='STRING',
+      code='S',
+      arg=stringnl,
+      stack_before=[],
+      stack_after=[pystring],
+      proto=0,
+      doc="""Push a Python string object.
+
+      The argument is a repr-style string, with bracketing quote characters,
+      and perhaps embedded escapes.  The argument extends until the next
+      newline character.
+      """),
+
+    I(name='BINSTRING',
+      code='T',
+      arg=string4,
+      stack_before=[],
+      stack_after=[pystring],
+      proto=1,
+      doc="""Push a Python string object.
+
+      There are two arguments:  the first is a 4-byte little-endian signed int
+      giving the number of bytes in the string, and the second is that many
+      bytes, which are taken literally as the string content.
+      """),
+
+    I(name='SHORT_BINSTRING',
+      code='U',
+      arg=string1,
+      stack_before=[],
+      stack_after=[pystring],
+      proto=1,
+      doc="""Push a Python string object.
+
+      There are two arguments:  the first is a 1-byte unsigned int giving
+      the number of bytes in the string, and the second is that many bytes,
+      which are taken literally as the string content.
+      """),
+
+    # Ways to spell None.
+
+    I(name='NONE',
+      code='N',
+      arg=None,
+      stack_before=[],
+      stack_after=[pynone],
+      proto=0,
+      doc="Push None on the stack."),
+
+    # Ways to spell bools, starting with proto 2.  See INT for how this was
+    # done before proto 2.
+
+    I(name='NEWTRUE',
+      code='\x88',
+      arg=None,
+      stack_before=[],
+      stack_after=[pybool],
+      proto=2,
+      doc="""True.
+
+      Push True onto the stack."""),
+
+    I(name='NEWFALSE',
+      code='\x89',
+      arg=None,
+      stack_before=[],
+      stack_after=[pybool],
+      proto=2,
+      doc="""True.
+
+      Push False onto the stack."""),
+
+    # Ways to spell Unicode strings.
+
+    I(name='UNICODE',
+      code='V',
+      arg=unicodestringnl,
+      stack_before=[],
+      stack_after=[pyunicode],
+      proto=0,  # this may be pure-text, but it's a later addition
+      doc="""Push a Python Unicode string object.
+
+      The argument is a raw-unicode-escape encoding of a Unicode string,
+      and so may contain embedded escape sequences.  The argument extends
+      until the next newline character.
+      """),
+
+    I(name='BINUNICODE',
+      code='X',
+      arg=unicodestring4,
+      stack_before=[],
+      stack_after=[pyunicode],
+      proto=1,
+      doc="""Push a Python Unicode string object.
+
+      There are two arguments:  the first is a 4-byte little-endian signed int
+      giving the number of bytes in the string.  The second is that many
+      bytes, and is the UTF-8 encoding of the Unicode string.
+      """),
+
+    # Ways to spell floats.
+
+    I(name='FLOAT',
+      code='F',
+      arg=floatnl,
+      stack_before=[],
+      stack_after=[pyfloat],
+      proto=0,
+      doc="""Newline-terminated decimal float literal.
+
+      The argument is repr(a_float), and in general requires 17 significant
+      digits for roundtrip conversion to be an identity (this is so for
+      IEEE-754 double precision values, which is what Python float maps to
+      on most boxes).
+
+      In general, FLOAT cannot be used to transport infinities, NaNs, or
+      minus zero across boxes (or even on a single box, if the platform C
+      library can't read the strings it produces for such things -- Windows
+      is like that), but may do less damage than BINFLOAT on boxes with
+      greater precision or dynamic range than IEEE-754 double.
+      """),
+
+    I(name='BINFLOAT',
+      code='G',
+      arg=float8,
+      stack_before=[],
+      stack_after=[pyfloat],
+      proto=1,
+      doc="""Float stored in binary form, with 8 bytes of data.
+
+      This generally requires less than half the space of FLOAT encoding.
+      In general, BINFLOAT cannot be used to transport infinities, NaNs, or
+      minus zero, raises an exception if the exponent exceeds the range of
+      an IEEE-754 double, and retains no more than 53 bits of precision (if
+      there are more than that, "add a half and chop" rounding is used to
+      cut it back to 53 significant bits).
+      """),
+
+    # Ways to build lists.
+
+    I(name='EMPTY_LIST',
+      code=']',
+      arg=None,
+      stack_before=[],
+      stack_after=[pylist],
+      proto=1,
+      doc="Push an empty list."),
+
+    I(name='APPEND',
+      code='a',
+      arg=None,
+      stack_before=[pylist, anyobject],
+      stack_after=[pylist],
+      proto=0,
+      doc="""Append an object to a list.
+
+      Stack before:  ... pylist anyobject
+      Stack after:   ... pylist+[anyobject]
+
+      although pylist is really extended in-place.
+      """),
+
+    I(name='APPENDS',
+      code='e',
+      arg=None,
+      stack_before=[pylist, markobject, stackslice],
+      stack_after=[pylist],
+      proto=1,
+      doc="""Extend a list by a slice of stack objects.
+
+      Stack before:  ... pylist markobject stackslice
+      Stack after:   ... pylist+stackslice
+
+      although pylist is really extended in-place.
+      """),
+
+    I(name='LIST',
+      code='l',
+      arg=None,
+      stack_before=[markobject, stackslice],
+      stack_after=[pylist],
+      proto=0,
+      doc="""Build a list out of the topmost stack slice, after markobject.
+
+      All the stack entries following the topmost markobject are placed into
+      a single Python list, which single list object replaces all of the
+      stack from the topmost markobject onward.  For example,
+
+      Stack before: ... markobject 1 2 3 'abc'
+      Stack after:  ... [1, 2, 3, 'abc']
+      """),
+
+    # Ways to build tuples.
+
+    I(name='EMPTY_TUPLE',
+      code=')',
+      arg=None,
+      stack_before=[],
+      stack_after=[pytuple],
+      proto=1,
+      doc="Push an empty tuple."),
+
+    I(name='TUPLE',
+      code='t',
+      arg=None,
+      stack_before=[markobject, stackslice],
+      stack_after=[pytuple],
+      proto=0,
+      doc="""Build a tuple out of the topmost stack slice, after markobject.
+
+      All the stack entries following the topmost markobject are placed into
+      a single Python tuple, which single tuple object replaces all of the
+      stack from the topmost markobject onward.  For example,
+
+      Stack before: ... markobject 1 2 3 'abc'
+      Stack after:  ... (1, 2, 3, 'abc')
+      """),
+
+    I(name='TUPLE1',
+      code='\x85',
+      arg=None,
+      stack_before=[anyobject],
+      stack_after=[pytuple],
+      proto=2,
+      doc="""One-tuple.
+
+      This code pops one value off the stack and pushes a tuple of
+      length 1 whose one item is that value back onto it.  IOW:
+
+          stack[-1] = tuple(stack[-1:])
+      """),
+
+    I(name='TUPLE2',
+      code='\x86',
+      arg=None,
+      stack_before=[anyobject, anyobject],
+      stack_after=[pytuple],
+      proto=2,
+      doc="""One-tuple.
+
+      This code pops two values off the stack and pushes a tuple
+      of length 2 whose items are those values back onto it.  IOW:
+
+          stack[-2:] = [tuple(stack[-2:])]
+      """),
+
+    I(name='TUPLE3',
+      code='\x87',
+      arg=None,
+      stack_before=[anyobject, anyobject, anyobject],
+      stack_after=[pytuple],
+      proto=2,
+      doc="""One-tuple.
+
+      This code pops three values off the stack and pushes a tuple
+      of length 3 whose items are those values back onto it.  IOW:
+
+          stack[-3:] = [tuple(stack[-3:])]
+      """),
+
+    # Ways to build dicts.
+
+    I(name='EMPTY_DICT',
+      code='}',
+      arg=None,
+      stack_before=[],
+      stack_after=[pydict],
+      proto=1,
+      doc="Push an empty dict."),
+
+    I(name='DICT',
+      code='d',
+      arg=None,
+      stack_before=[markobject, stackslice],
+      stack_after=[pydict],
+      proto=0,
+      doc="""Build a dict out of the topmost stack slice, after markobject.
+
+      All the stack entries following the topmost markobject are placed into
+      a single Python dict, which single dict object replaces all of the
+      stack from the topmost markobject onward.  The stack slice alternates
+      key, value, key, value, ....  For example,
+
+      Stack before: ... markobject 1 2 3 'abc'
+      Stack after:  ... {1: 2, 3: 'abc'}
+      """),
+
+    I(name='SETITEM',
+      code='s',
+      arg=None,
+      stack_before=[pydict, anyobject, anyobject],
+      stack_after=[pydict],
+      proto=0,
+      doc="""Add a key+value pair to an existing dict.
+
+      Stack before:  ... pydict key value
+      Stack after:   ... pydict
+
+      where pydict has been modified via pydict[key] = value.
+      """),
+
+    I(name='SETITEMS',
+      code='u',
+      arg=None,
+      stack_before=[pydict, markobject, stackslice],
+      stack_after=[pydict],
+      proto=1,
+      doc="""Add an arbitrary number of key+value pairs to an existing dict.
+
+      The slice of the stack following the topmost markobject is taken as
+      an alternating sequence of keys and values, added to the dict
+      immediately under the topmost markobject.  Everything at and after the
+      topmost markobject is popped, leaving the mutated dict at the top
+      of the stack.
+
+      Stack before:  ... pydict markobject key_1 value_1 ... key_n value_n
+      Stack after:   ... pydict
+
+      where pydict has been modified via pydict[key_i] = value_i for i in
+      1, 2, ..., n, and in that order.
+      """),
+
+    # Stack manipulation.
+
+    I(name='POP',
+      code='0',
+      arg=None,
+      stack_before=[anyobject],
+      stack_after=[],
+      proto=0,
+      doc="Discard the top stack item, shrinking the stack by one item."),
+
+    I(name='DUP',
+      code='2',
+      arg=None,
+      stack_before=[anyobject],
+      stack_after=[anyobject, anyobject],
+      proto=0,
+      doc="Push the top stack item onto the stack again, duplicating it."),
+
+    I(name='MARK',
+      code='(',
+      arg=None,
+      stack_before=[],
+      stack_after=[markobject],
+      proto=0,
+      doc="""Push markobject onto the stack.
+
+      markobject is a unique object, used by other opcodes to identify a
+      region of the stack containing a variable number of objects for them
+      to work on.  See markobject.doc for more detail.
+      """),
+
+    I(name='POP_MARK',
+      code='1',
+      arg=None,
+      stack_before=[markobject, stackslice],
+      stack_after=[],
+      proto=0,
+      doc="""Pop all the stack objects at and above the topmost markobject.
+
+      When an opcode using a variable number of stack objects is done,
+      POP_MARK is used to remove those objects, and to remove the markobject
+      that delimited their starting position on the stack.
+      """),
+
+    # Memo manipulation.  There are really only two operations (get and put),
+    # each in all-text, "short binary", and "long binary" flavors.
+
+    I(name='GET',
+      code='g',
+      arg=decimalnl_short,
+      stack_before=[],
+      stack_after=[anyobject],
+      proto=0,
+      doc="""Read an object from the memo and push it on the stack.
+
+      The index of the memo object to push is given by the newline-teriminated
+      decimal string following.  BINGET and LONG_BINGET are space-optimized
+      versions.
+      """),
+
+    I(name='BINGET',
+      code='h',
+      arg=uint1,
+      stack_before=[],
+      stack_after=[anyobject],
+      proto=1,
+      doc="""Read an object from the memo and push it on the stack.
+
+      The index of the memo object to push is given by the 1-byte unsigned
+      integer following.
+      """),
+
+    I(name='LONG_BINGET',
+      code='j',
+      arg=int4,
+      stack_before=[],
+      stack_after=[anyobject],
+      proto=1,
+      doc="""Read an object from the memo and push it on the stack.
+
+      The index of the memo object to push is given by the 4-byte signed
+      little-endian integer following.
+      """),
+
+    I(name='PUT',
+      code='p',
+      arg=decimalnl_short,
+      stack_before=[],
+      stack_after=[],
+      proto=0,
+      doc="""Store the stack top into the memo.  The stack is not popped.
+
+      The index of the memo location to write into is given by the newline-
+      terminated decimal string following.  BINPUT and LONG_BINPUT are
+      space-optimized versions.
+      """),
+
+    I(name='BINPUT',
+      code='q',
+      arg=uint1,
+      stack_before=[],
+      stack_after=[],
+      proto=1,
+      doc="""Store the stack top into the memo.  The stack is not popped.
+
+      The index of the memo location to write into is given by the 1-byte
+      unsigned integer following.
+      """),
+
+    I(name='LONG_BINPUT',
+      code='r',
+      arg=int4,
+      stack_before=[],
+      stack_after=[],
+      proto=1,
+      doc="""Store the stack top into the memo.  The stack is not popped.
+
+      The index of the memo location to write into is given by the 4-byte
+      signed little-endian integer following.
+      """),
+
+    # Access the extension registry (predefined objects).  Akin to the GET
+    # family.
+
+    I(name='EXT1',
+      code='\x82',
+      arg=uint1,
+      stack_before=[],
+      stack_after=[anyobject],
+      proto=2,
+      doc="""Extension code.
+
+      This code and the similar EXT2 and EXT4 allow using a registry
+      of popular objects that are pickled by name, typically classes.
+      It is envisioned that through a global negotiation and
+      registration process, third parties can set up a mapping between
+      ints and object names.
+
+      In order to guarantee pickle interchangeability, the extension
+      code registry ought to be global, although a range of codes may
+      be reserved for private use.
+
+      EXT1 has a 1-byte integer argument.  This is used to index into the
+      extension registry, and the object at that index is pushed on the stack.
+      """),
+
+    I(name='EXT2',
+      code='\x83',
+      arg=uint2,
+      stack_before=[],
+      stack_after=[anyobject],
+      proto=2,
+      doc="""Extension code.
+
+      See EXT1.  EXT2 has a two-byte integer argument.
+      """),
+
+    I(name='EXT4',
+      code='\x84',
+      arg=int4,
+      stack_before=[],
+      stack_after=[anyobject],
+      proto=2,
+      doc="""Extension code.
+
+      See EXT1.  EXT4 has a four-byte integer argument.
+      """),
+
+    # Push a class object, or module function, on the stack, via its module
+    # and name.
+
+    I(name='GLOBAL',
+      code='c',
+      arg=stringnl_noescape_pair,
+      stack_before=[],
+      stack_after=[anyobject],
+      proto=0,
+      doc="""Push a global object (module.attr) on the stack.
+
+      Two newline-terminated strings follow the GLOBAL opcode.  The first is
+      taken as a module name, and the second as a class name.  The class
+      object module.class is pushed on the stack.  More accurately, the
+      object returned by self.find_class(module, class) is pushed on the
+      stack, so unpickling subclasses can override this form of lookup.
+      """),
+
+    # Ways to build objects of classes pickle doesn't know about directly
+    # (user-defined classes).  I despair of documenting this accurately
+    # and comprehensibly -- you really have to read the pickle code to
+    # find all the special cases.
+
+    I(name='REDUCE',
+      code='R',
+      arg=None,
+      stack_before=[anyobject, anyobject],
+      stack_after=[anyobject],
+      proto=0,
+      doc="""Push an object built from a callable and an argument tuple.
+
+      The opcode is named to remind of the __reduce__() method.
+
+      Stack before: ... callable pytuple
+      Stack after:  ... callable(*pytuple)
+
+      The callable and the argument tuple are the first two items returned
+      by a __reduce__ method.  Applying the callable to the argtuple is
+      supposed to reproduce the original object, or at least get it started.
+      If the __reduce__ method returns a 3-tuple, the last component is an
+      argument to be passed to the object's __setstate__, and then the REDUCE
+      opcode is followed by code to create setstate's argument, and then a
+      BUILD opcode to apply  __setstate__ to that argument.
+
+      There are lots of special cases here.  The argtuple can be None, in
+      which case callable.__basicnew__() is called instead to produce the
+      object to be pushed on the stack.  This appears to be a trick unique
+      to ExtensionClasses, and is deprecated regardless.
+
+      If type(callable) is not ClassType, REDUCE complains unless the
+      callable has been registered with the copy_reg module's
+      safe_constructors dict, or the callable has a magic
+      '__safe_for_unpickling__' attribute with a true value.  I'm not sure
+      why it does this, but I've sure seen this complaint often enough when
+      I didn't want to <wink>.
+      """),
+
+    I(name='BUILD',
+      code='b',
+      arg=None,
+      stack_before=[anyobject, anyobject],
+      stack_after=[anyobject],
+      proto=0,
+      doc="""Finish building an object, via __setstate__ or dict update.
+
+      Stack before: ... anyobject argument
+      Stack after:  ... anyobject
+
+      where anyobject may have been mutated, as follows:
+
+      If the object has a __setstate__ method,
+
+          anyobject.__setstate__(argument)
+
+      is called.
+
+      Else the argument must be a dict, the object must have a __dict__, and
+      the object is updated via
+
+          anyobject.__dict__.update(argument)
+
+      This may raise RuntimeError in restricted execution mode (which
+      disallows access to __dict__ directly); in that case, the object
+      is updated instead via
+
+          for k, v in argument.items():
+              anyobject[k] = v
+      """),
+
+    I(name='INST',
+      code='i',
+      arg=stringnl_noescape_pair,
+      stack_before=[markobject, stackslice],
+      stack_after=[anyobject],
+      proto=0,
+      doc="""Build a class instance.
+
+      This is the protocol 0 version of protocol 1's OBJ opcode.
+      INST is followed by two newline-terminated strings, giving a
+      module and class name, just as for the GLOBAL opcode (and see
+      GLOBAL for more details about that).  self.find_class(module, name)
+      is used to get a class object.
+
+      In addition, all the objects on the stack following the topmost
+      markobject are gathered into a tuple and popped (along with the
+      topmost markobject), just as for the TUPLE opcode.
+
+      Now it gets complicated.  If all of these are true:
+
+        + The argtuple is empty (markobject was at the top of the stack
+          at the start).
+
+        + It's an old-style class object (the type of the class object is
+          ClassType).
+
+        + The class object does not have a __getinitargs__ attribute.
+
+      then we want to create an old-style class instance without invoking
+      its __init__() method (pickle has waffled on this over the years; not
+      calling __init__() is current wisdom).  In this case, an instance of
+      an old-style dummy class is created, and then we try to rebind its
+      __class__ attribute to the desired class object.  If this succeeds,
+      the new instance object is pushed on the stack, and we're done.  In
+      restricted execution mode it can fail (assignment to __class__ is
+      disallowed), and I'm not really sure what happens then -- it looks
+      like the code ends up calling the class object's __init__ anyway,
+      via falling into the next case.
+
+      Else (the argtuple is not empty, it's not an old-style class object,
+      or the class object does have a __getinitargs__ attribute), the code
+      first insists that the class object have a __safe_for_unpickling__
+      attribute.  Unlike as for the __safe_for_unpickling__ check in REDUCE,
+      it doesn't matter whether this attribute has a true or false value, it
+      only matters whether it exists (XXX this is a bug; cPickle
+      requires the attribute to be true).  If __safe_for_unpickling__
+      doesn't exist, UnpicklingError is raised.
+
+      Else (the class object does have a __safe_for_unpickling__ attr),
+      the class object obtained from INST's arguments is applied to the
+      argtuple obtained from the stack, and the resulting instance object
+      is pushed on the stack.
+
+      NOTE:  checks for __safe_for_unpickling__ went away in Python 2.3.
+      """),
+
+    I(name='OBJ',
+      code='o',
+      arg=None,
+      stack_before=[markobject, anyobject, stackslice],
+      stack_after=[anyobject],
+      proto=1,
+      doc="""Build a class instance.
+
+      This is the protocol 1 version of protocol 0's INST opcode, and is
+      very much like it.  The major difference is that the class object
+      is taken off the stack, allowing it to be retrieved from the memo
+      repeatedly if several instances of the same class are created.  This
+      can be much more efficient (in both time and space) than repeatedly
+      embedding the module and class names in INST opcodes.
+
+      Unlike INST, OBJ takes no arguments from the opcode stream.  Instead
+      the class object is taken off the stack, immediately above the
+      topmost markobject:
+
+      Stack before: ... markobject classobject stackslice
+      Stack after:  ... new_instance_object
+
+      As for INST, the remainder of the stack above the markobject is
+      gathered into an argument tuple, and then the logic seems identical,
+      except that no __safe_for_unpickling__ check is done (XXX this is
+      a bug; cPickle does test __safe_for_unpickling__).  See INST for
+      the gory details.
+
+      NOTE:  In Python 2.3, INST and OBJ are identical except for how they
+      get the class object.  That was always the intent; the implementations
+      had diverged for accidental reasons.
+      """),
+
+    I(name='NEWOBJ',
+      code='\x81',
+      arg=None,
+      stack_before=[anyobject, anyobject],
+      stack_after=[anyobject],
+      proto=2,
+      doc="""Build an object instance.
+
+      The stack before should be thought of as containing a class
+      object followed by an argument tuple (the tuple being the stack
+      top).  Call these cls and args.  They are popped off the stack,
+      and the value returned by cls.__new__(cls, *args) is pushed back
+      onto the stack.
+      """),
+
+    # Machine control.
+
+    I(name='PROTO',
+      code='\x80',
+      arg=uint1,
+      stack_before=[],
+      stack_after=[],
+      proto=2,
+      doc="""Protocol version indicator.
+
+      For protocol 2 and above, a pickle must start with this opcode.
+      The argument is the protocol version, an int in range(2, 256).
+      """),
+
+    I(name='STOP',
+      code='.',
+      arg=None,
+      stack_before=[anyobject],
+      stack_after=[],
+      proto=0,
+      doc="""Stop the unpickling machine.
+
+      Every pickle ends with this opcode.  The object at the top of the stack
+      is popped, and that's the result of unpickling.  The stack should be
+      empty then.
+      """),
+
+    # Ways to deal with persistent IDs.
+
+    I(name='PERSID',
+      code='P',
+      arg=stringnl_noescape,
+      stack_before=[],
+      stack_after=[anyobject],
+      proto=0,
+      doc="""Push an object identified by a persistent ID.
+
+      The pickle module doesn't define what a persistent ID means.  PERSID's
+      argument is a newline-terminated str-style (no embedded escapes, no
+      bracketing quote characters) string, which *is* "the persistent ID".
+      The unpickler passes this string to self.persistent_load().  Whatever
+      object that returns is pushed on the stack.  There is no implementation
+      of persistent_load() in Python's unpickler:  it must be supplied by an
+      unpickler subclass.
+      """),
+
+    I(name='BINPERSID',
+      code='Q',
+      arg=None,
+      stack_before=[anyobject],
+      stack_after=[anyobject],
+      proto=1,
+      doc="""Push an object identified by a persistent ID.
+
+      Like PERSID, except the persistent ID is popped off the stack (instead
+      of being a string embedded in the opcode bytestream).  The persistent
+      ID is passed to self.persistent_load(), and whatever object that
+      returns is pushed on the stack.  See PERSID for more detail.
+      """),
+]
+del I
+
+# Verify uniqueness of .name and .code members.
+name2i = {}
+code2i = {}
+
+for i, d in enumerate(opcodes):
+    if d.name in name2i:
+        raise ValueError("repeated name %r at indices %d and %d" %
+                         (d.name, name2i[d.name], i))
+    if d.code in code2i:
+        raise ValueError("repeated code %r at indices %d and %d" %
+                         (d.code, code2i[d.code], i))
+
+    name2i[d.name] = i
+    code2i[d.code] = i
+
+del name2i, code2i, i, d
+
+##############################################################################
+# Build a code2op dict, mapping opcode characters to OpcodeInfo records.
+# Also ensure we've got the same stuff as pickle.py, although the
+# introspection here is dicey.
+
+code2op = {}
+for d in opcodes:
+    code2op[d.code] = d
+del d
+
+def assure_pickle_consistency(verbose=False):
+    import pickle, re
+
+    copy = code2op.copy()
+    for name in pickle.__all__:
+        if not re.match("[A-Z][A-Z0-9_]+$", name):
+            if verbose:
+                print "skipping %r: it doesn't look like an opcode name" % name
+            continue
+        picklecode = getattr(pickle, name)
+        if not isinstance(picklecode, str) or len(picklecode) != 1:
+            if verbose:
+                print ("skipping %r: value %r doesn't look like a pickle "
+                       "code" % (name, picklecode))
+            continue
+        if picklecode in copy:
+            if verbose:
+                print "checking name %r w/ code %r for consistency" % (
+                      name, picklecode)
+            d = copy[picklecode]
+            if d.name != name:
+                raise ValueError("for pickle code %r, pickle.py uses name %r "
+                                 "but we're using name %r" % (picklecode,
+                                                              name,
+                                                              d.name))
+            # Forget this one.  Any left over in copy at the end are a problem
+            # of a different kind.
+            del copy[picklecode]
+        else:
+            raise ValueError("pickle.py appears to have a pickle opcode with "
+                             "name %r and code %r, but we don't" %
+                             (name, picklecode))
+    if copy:
+        msg = ["we appear to have pickle opcodes that pickle.py doesn't have:"]
+        for code, d in copy.items():
+            msg.append("    name %r with code %r" % (d.name, code))
+        raise ValueError("\n".join(msg))
+
+assure_pickle_consistency()
+del assure_pickle_consistency
+
+##############################################################################
+# A pickle opcode generator.
+
+def genops(pickle):
+    """Generate all the opcodes in a pickle.
+
+    'pickle' is a file-like object, or string, containing the pickle.
+
+    Each opcode in the pickle is generated, from the current pickle position,
+    stopping after a STOP opcode is delivered.  A triple is generated for
+    each opcode:
+
+        opcode, arg, pos
+
+    opcode is an OpcodeInfo record, describing the current opcode.
+
+    If the opcode has an argument embedded in the pickle, arg is its decoded
+    value, as a Python object.  If the opcode doesn't have an argument, arg
+    is None.
+
+    If the pickle has a tell() method, pos was the value of pickle.tell()
+    before reading the current opcode.  If the pickle is a string object,
+    it's wrapped in a StringIO object, and the latter's tell() result is
+    used.  Else (the pickle doesn't have a tell(), and it's not obvious how
+    to query its current position) pos is None.
+    """
+
+    import cStringIO as StringIO
+
+    if isinstance(pickle, str):
+        pickle = StringIO.StringIO(pickle)
+
+    if hasattr(pickle, "tell"):
+        getpos = pickle.tell
+    else:
+        getpos = lambda: None
+
+    while True:
+        pos = getpos()
+        code = pickle.read(1)
+        opcode = code2op.get(code)
+        if opcode is None:
+            if code == "":
+                raise ValueError("pickle exhausted before seeing STOP")
+            else:
+                raise ValueError("at position %s, opcode %r unknown" % (
+                                 pos is None and "<unknown>" or pos,
+                                 code))
+        if opcode.arg is None:
+            arg = None
+        else:
+            arg = opcode.arg.reader(pickle)
+        yield opcode, arg, pos
+        if code == '.':
+            assert opcode.name == 'STOP'
+            break
+
+##############################################################################
+# A symbolic pickle disassembler.
+
+def dis(pickle, out=None, memo=None, indentlevel=4):
+    """Produce a symbolic disassembly of a pickle.
+
+    'pickle' is a file-like object, or string, containing a (at least one)
+    pickle.  The pickle is disassembled from the current position, through
+    the first STOP opcode encountered.
+
+    Optional arg 'out' is a file-like object to which the disassembly is
+    printed.  It defaults to sys.stdout.
+
+    Optional arg 'memo' is a Python dict, used as the pickle's memo.  It
+    may be mutated by dis(), if the pickle contains PUT or BINPUT opcodes.
+    Passing the same memo object to another dis() call then allows disassembly
+    to proceed across multiple pickles that were all created by the same
+    pickler with the same memo.  Ordinarily you don't need to worry about this.
+
+    Optional arg indentlevel is the number of blanks by which to indent
+    a new MARK level.  It defaults to 4.
+
+    In addition to printing the disassembly, some sanity checks are made:
+
+    + All embedded opcode arguments "make sense".
+
+    + Explicit and implicit pop operations have enough items on the stack.
+
+    + When an opcode implicitly refers to a markobject, a markobject is
+      actually on the stack.
+
+    + A memo entry isn't referenced before it's defined.
+
+    + The markobject isn't stored in the memo.
+
+    + A memo entry isn't redefined.
+    """
+
+    # Most of the hair here is for sanity checks, but most of it is needed
+    # anyway to detect when a protocol 0 POP takes a MARK off the stack
+    # (which in turn is needed to indent MARK blocks correctly).
+
+    stack = []          # crude emulation of unpickler stack
+    if memo is None:
+        memo = {}       # crude emulation of unpicker memo
+    maxproto = -1       # max protocol number seen
+    markstack = []      # bytecode positions of MARK opcodes
+    indentchunk = ' ' * indentlevel
+    errormsg = None
+    for opcode, arg, pos in genops(pickle):
+        if pos is not None:
+            print >> out, "%5d:" % pos,
+
+        line = "%-4s %s%s" % (repr(opcode.code)[1:-1],
+                              indentchunk * len(markstack),
+                              opcode.name)
+
+        maxproto = max(maxproto, opcode.proto)
+        before = opcode.stack_before    # don't mutate
+        after = opcode.stack_after      # don't mutate
+        numtopop = len(before)
+
+        # See whether a MARK should be popped.
+        markmsg = None
+        if markobject in before or (opcode.name == "POP" and
+                                    stack and
+                                    stack[-1] is markobject):
+            assert markobject not in after
+            if __debug__:
+                if markobject in before:
+                    assert before[-1] is stackslice
+            if markstack:
+                markpos = markstack.pop()
+                if markpos is None:
+                    markmsg = "(MARK at unknown opcode offset)"
+                else:
+                    markmsg = "(MARK at %d)" % markpos
+                # Pop everything at and after the topmost markobject.
+                while stack[-1] is not markobject:
+                    stack.pop()
+                stack.pop()
+                # Stop later code from popping too much.
+                try:
+                    numtopop = before.index(markobject)
+                except ValueError:
+                    assert opcode.name == "POP"
+                    numtopop = 0
+            else:
+                errormsg = markmsg = "no MARK exists on stack"
+
+        # Check for correct memo usage.
+        if opcode.name in ("PUT", "BINPUT", "LONG_BINPUT"):
+            assert arg is not None
+            if arg in memo:
+                errormsg = "memo key %r already defined" % arg
+            elif not stack:
+                errormsg = "stack is empty -- can't store into memo"
+            elif stack[-1] is markobject:
+                errormsg = "can't store markobject in the memo"
+            else:
+                memo[arg] = stack[-1]
+
+        elif opcode.name in ("GET", "BINGET", "LONG_BINGET"):
+            if arg in memo:
+                assert len(after) == 1
+                after = [memo[arg]]     # for better stack emulation
+            else:
+                errormsg = "memo key %r has never been stored into" % arg
+
+        if arg is not None or markmsg:
+            # make a mild effort to align arguments
+            line += ' ' * (10 - len(opcode.name))
+            if arg is not None:
+                line += ' ' + repr(arg)
+            if markmsg:
+                line += ' ' + markmsg
+        print >> out, line
+
+        if errormsg:
+            # Note that we delayed complaining until the offending opcode
+            # was printed.
+            raise ValueError(errormsg)
+
+        # Emulate the stack effects.
+        if len(stack) < numtopop:
+            raise ValueError("tries to pop %d items from stack with "
+                             "only %d items" % (numtopop, len(stack)))
+        if numtopop:
+            del stack[-numtopop:]
+        if markobject in after:
+            assert markobject not in before
+            markstack.append(pos)
+
+        stack.extend(after)
+
+    print >> out, "highest protocol among opcodes =", maxproto
+    if stack:
+        raise ValueError("stack not empty after STOP: %r" % stack)
+
+_dis_test = r"""
+>>> import pickle
+>>> x = [1, 2, (3, 4), {'abc': u"def"}]
+>>> pkl = pickle.dumps(x, 0)
+>>> dis(pkl)
+    0: (    MARK
+    1: l        LIST       (MARK at 0)
+    2: p    PUT        0
+    5: I    INT        1
+    8: a    APPEND
+    9: I    INT        2
+   12: a    APPEND
+   13: (    MARK
+   14: I        INT        3
+   17: I        INT        4
+   20: t        TUPLE      (MARK at 13)
+   21: p    PUT        1
+   24: a    APPEND
+   25: (    MARK
+   26: d        DICT       (MARK at 25)
+   27: p    PUT        2
+   30: S    STRING     'abc'
+   37: p    PUT        3
+   40: V    UNICODE    u'def'
+   45: p    PUT        4
+   48: s    SETITEM
+   49: a    APPEND
+   50: .    STOP
+highest protocol among opcodes = 0
+
+Try again with a "binary" pickle.
+
+>>> pkl = pickle.dumps(x, 1)
+>>> dis(pkl)
+    0: ]    EMPTY_LIST
+    1: q    BINPUT     0
+    3: (    MARK
+    4: K        BININT1    1
+    6: K        BININT1    2
+    8: (        MARK
+    9: K            BININT1    3
+   11: K            BININT1    4
+   13: t            TUPLE      (MARK at 8)
+   14: q        BINPUT     1
+   16: }        EMPTY_DICT
+   17: q        BINPUT     2
+   19: U        SHORT_BINSTRING 'abc'
+   24: q        BINPUT     3
+   26: X        BINUNICODE u'def'
+   34: q        BINPUT     4
+   36: s        SETITEM
+   37: e        APPENDS    (MARK at 3)
+   38: .    STOP
+highest protocol among opcodes = 1
+
+Exercise the INST/OBJ/BUILD family.
+
+>>> import random
+>>> dis(pickle.dumps(random.random, 0))
+    0: c    GLOBAL     'random random'
+   15: p    PUT        0
+   18: .    STOP
+highest protocol among opcodes = 0
+
+>>> x = [pickle.PicklingError()] * 2
+>>> dis(pickle.dumps(x, 0))
+    0: (    MARK
+    1: l        LIST       (MARK at 0)
+    2: p    PUT        0
+    5: (    MARK
+    6: i        INST       'pickle PicklingError' (MARK at 5)
+   28: p    PUT        1
+   31: (    MARK
+   32: d        DICT       (MARK at 31)
+   33: p    PUT        2
+   36: S    STRING     'args'
+   44: p    PUT        3
+   47: (    MARK
+   48: t        TUPLE      (MARK at 47)
+   49: s    SETITEM
+   50: b    BUILD
+   51: a    APPEND
+   52: g    GET        1
+   55: a    APPEND
+   56: .    STOP
+highest protocol among opcodes = 0
+
+>>> dis(pickle.dumps(x, 1))
+    0: ]    EMPTY_LIST
+    1: q    BINPUT     0
+    3: (    MARK
+    4: (        MARK
+    5: c            GLOBAL     'pickle PicklingError'
+   27: q            BINPUT     1
+   29: o            OBJ        (MARK at 4)
+   30: q        BINPUT     2
+   32: }        EMPTY_DICT
+   33: q        BINPUT     3
+   35: U        SHORT_BINSTRING 'args'
+   41: q        BINPUT     4
+   43: )        EMPTY_TUPLE
+   44: s        SETITEM
+   45: b        BUILD
+   46: h        BINGET     2
+   48: e        APPENDS    (MARK at 3)
+   49: .    STOP
+highest protocol among opcodes = 1
+
+Try "the canonical" recursive-object test.
+
+>>> L = []
+>>> T = L,
+>>> L.append(T)
+>>> L[0] is T
+True
+>>> T[0] is L
+True
+>>> L[0][0] is L
+True
+>>> T[0][0] is T
+True
+>>> dis(pickle.dumps(L, 0))
+    0: (    MARK
+    1: l        LIST       (MARK at 0)
+    2: p    PUT        0
+    5: (    MARK
+    6: g        GET        0
+    9: t        TUPLE      (MARK at 5)
+   10: p    PUT        1
+   13: a    APPEND
+   14: .    STOP
+highest protocol among opcodes = 0
+
+>>> dis(pickle.dumps(L, 1))
+    0: ]    EMPTY_LIST
+    1: q    BINPUT     0
+    3: (    MARK
+    4: h        BINGET     0
+    6: t        TUPLE      (MARK at 3)
+    7: q    BINPUT     1
+    9: a    APPEND
+   10: .    STOP
+highest protocol among opcodes = 1
+
+Note that, in the protocol 0 pickle of the recursive tuple, the disassembler
+has to emulate the stack in order to realize that the POP opcode at 16 gets
+rid of the MARK at 0.
+
+>>> dis(pickle.dumps(T, 0))
+    0: (    MARK
+    1: (        MARK
+    2: l            LIST       (MARK at 1)
+    3: p        PUT        0
+    6: (        MARK
+    7: g            GET        0
+   10: t            TUPLE      (MARK at 6)
+   11: p        PUT        1
+   14: a        APPEND
+   15: 0        POP
+   16: 0        POP        (MARK at 0)
+   17: g    GET        1
+   20: .    STOP
+highest protocol among opcodes = 0
+
+>>> dis(pickle.dumps(T, 1))
+    0: (    MARK
+    1: ]        EMPTY_LIST
+    2: q        BINPUT     0
+    4: (        MARK
+    5: h            BINGET     0
+    7: t            TUPLE      (MARK at 4)
+    8: q        BINPUT     1
+   10: a        APPEND
+   11: 1        POP_MARK   (MARK at 0)
+   12: h    BINGET     1
+   14: .    STOP
+highest protocol among opcodes = 1
+
+Try protocol 2.
+
+>>> dis(pickle.dumps(L, 2))
+    0: \x80 PROTO      2
+    2: ]    EMPTY_LIST
+    3: q    BINPUT     0
+    5: h    BINGET     0
+    7: \x85 TUPLE1
+    8: q    BINPUT     1
+   10: a    APPEND
+   11: .    STOP
+highest protocol among opcodes = 2
+
+>>> dis(pickle.dumps(T, 2))
+    0: \x80 PROTO      2
+    2: ]    EMPTY_LIST
+    3: q    BINPUT     0
+    5: h    BINGET     0
+    7: \x85 TUPLE1
+    8: q    BINPUT     1
+   10: a    APPEND
+   11: 0    POP
+   12: h    BINGET     1
+   14: .    STOP
+highest protocol among opcodes = 2
+"""
+
+_memo_test = r"""
+>>> import pickle
+>>> from StringIO import StringIO
+>>> f = StringIO()
+>>> p = pickle.Pickler(f, 2)
+>>> x = [1, 2, 3]
+>>> p.dump(x)
+>>> p.dump(x)
+>>> f.seek(0)
+>>> memo = {}
+>>> dis(f, memo=memo)
+    0: \x80 PROTO      2
+    2: ]    EMPTY_LIST
+    3: q    BINPUT     0
+    5: (    MARK
+    6: K        BININT1    1
+    8: K        BININT1    2
+   10: K        BININT1    3
+   12: e        APPENDS    (MARK at 5)
+   13: .    STOP
+highest protocol among opcodes = 2
+>>> dis(f, memo=memo)
+   14: \x80 PROTO      2
+   16: h    BINGET     0
+   18: .    STOP
+highest protocol among opcodes = 2
+"""
+
+__test__ = {'disassembler_test': _dis_test,
+            'disassembler_memo_test': _memo_test,
+           }
+
+def _test():
+    import doctest
+    return doctest.testmod()
+
+if __name__ == "__main__":
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/pipes.py b/depot_tools/release/win/python_24/Lib/pipes.py
new file mode 100644
index 0000000..295d9c8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pipes.py
@@ -0,0 +1,298 @@
+"""Conversion pipeline templates.
+
+The problem:
+------------
+
+Suppose you have some data that you want to convert to another format,
+such as from GIF image format to PPM image format.  Maybe the
+conversion involves several steps (e.g. piping it through compress or
+uuencode).  Some of the conversion steps may require that their input
+is a disk file, others may be able to read standard input; similar for
+their output.  The input to the entire conversion may also be read
+from a disk file or from an open file, and similar for its output.
+
+The module lets you construct a pipeline template by sticking one or
+more conversion steps together.  It will take care of creating and
+removing temporary files if they are necessary to hold intermediate
+data.  You can then use the template to do conversions from many
+different sources to many different destinations.  The temporary
+file names used are different each time the template is used.
+
+The templates are objects so you can create templates for many
+different conversion steps and store them in a dictionary, for
+instance.
+
+
+Directions:
+-----------
+
+To create a template:
+    t = Template()
+
+To add a conversion step to a template:
+   t.append(command, kind)
+where kind is a string of two characters: the first is '-' if the
+command reads its standard input or 'f' if it requires a file; the
+second likewise for the output. The command must be valid /bin/sh
+syntax.  If input or output files are required, they are passed as
+$IN and $OUT; otherwise, it must be  possible to use the command in
+a pipeline.
+
+To add a conversion step at the beginning:
+   t.prepend(command, kind)
+
+To convert a file to another file using a template:
+  sts = t.copy(infile, outfile)
+If infile or outfile are the empty string, standard input is read or
+standard output is written, respectively.  The return value is the
+exit status of the conversion pipeline.
+
+To open a file for reading or writing through a conversion pipeline:
+   fp = t.open(file, mode)
+where mode is 'r' to read the file, or 'w' to write it -- just like
+for the built-in function open() or for os.popen().
+
+To create a new template object initialized to a given one:
+   t2 = t.clone()
+
+For an example, see the function test() at the end of the file.
+"""                                     # '
+
+
+import re
+
+import os
+import tempfile
+import string
+
+__all__ = ["Template"]
+
+# Conversion step kinds
+
+FILEIN_FILEOUT = 'ff'                   # Must read & write real files
+STDIN_FILEOUT  = '-f'                   # Must write a real file
+FILEIN_STDOUT  = 'f-'                   # Must read a real file
+STDIN_STDOUT   = '--'                   # Normal pipeline element
+SOURCE         = '.-'                   # Must be first, writes stdout
+SINK           = '-.'                   # Must be last, reads stdin
+
+stepkinds = [FILEIN_FILEOUT, STDIN_FILEOUT, FILEIN_STDOUT, STDIN_STDOUT, \
+             SOURCE, SINK]
+
+
+class Template:
+    """Class representing a pipeline template."""
+
+    def __init__(self):
+        """Template() returns a fresh pipeline template."""
+        self.debugging = 0
+        self.reset()
+
+    def __repr__(self):
+        """t.__repr__() implements repr(t)."""
+        return '<Template instance, steps=%r>' % (self.steps,)
+
+    def reset(self):
+        """t.reset() restores a pipeline template to its initial state."""
+        self.steps = []
+
+    def clone(self):
+        """t.clone() returns a new pipeline template with identical
+        initial state as the current one."""
+        t = Template()
+        t.steps = self.steps[:]
+        t.debugging = self.debugging
+        return t
+
+    def debug(self, flag):
+        """t.debug(flag) turns debugging on or off."""
+        self.debugging = flag
+
+    def append(self, cmd, kind):
+        """t.append(cmd, kind) adds a new step at the end."""
+        if type(cmd) is not type(''):
+            raise TypeError, \
+                  'Template.append: cmd must be a string'
+        if kind not in stepkinds:
+            raise ValueError, \
+                  'Template.append: bad kind %r' % (kind,)
+        if kind == SOURCE:
+            raise ValueError, \
+                  'Template.append: SOURCE can only be prepended'
+        if self.steps and self.steps[-1][1] == SINK:
+            raise ValueError, \
+                  'Template.append: already ends with SINK'
+        if kind[0] == 'f' and not re.search(r'\$IN\b', cmd):
+            raise ValueError, \
+                  'Template.append: missing $IN in cmd'
+        if kind[1] == 'f' and not re.search(r'\$OUT\b', cmd):
+            raise ValueError, \
+                  'Template.append: missing $OUT in cmd'
+        self.steps.append((cmd, kind))
+
+    def prepend(self, cmd, kind):
+        """t.prepend(cmd, kind) adds a new step at the front."""
+        if type(cmd) is not type(''):
+            raise TypeError, \
+                  'Template.prepend: cmd must be a string'
+        if kind not in stepkinds:
+            raise ValueError, \
+                  'Template.prepend: bad kind %r' % (kind,)
+        if kind == SINK:
+            raise ValueError, \
+                  'Template.prepend: SINK can only be appended'
+        if self.steps and self.steps[0][1] == SOURCE:
+            raise ValueError, \
+                  'Template.prepend: already begins with SOURCE'
+        if kind[0] == 'f' and not re.search(r'\$IN\b', cmd):
+            raise ValueError, \
+                  'Template.prepend: missing $IN in cmd'
+        if kind[1] == 'f' and not re.search(r'\$OUT\b', cmd):
+            raise ValueError, \
+                  'Template.prepend: missing $OUT in cmd'
+        self.steps.insert(0, (cmd, kind))
+
+    def open(self, file, rw):
+        """t.open(file, rw) returns a pipe or file object open for
+        reading or writing; the file is the other end of the pipeline."""
+        if rw == 'r':
+            return self.open_r(file)
+        if rw == 'w':
+            return self.open_w(file)
+        raise ValueError, \
+              'Template.open: rw must be \'r\' or \'w\', not %r' % (rw,)
+
+    def open_r(self, file):
+        """t.open_r(file) and t.open_w(file) implement
+        t.open(file, 'r') and t.open(file, 'w') respectively."""
+        if not self.steps:
+            return open(file, 'r')
+        if self.steps[-1][1] == SINK:
+            raise ValueError, \
+                  'Template.open_r: pipeline ends width SINK'
+        cmd = self.makepipeline(file, '')
+        return os.popen(cmd, 'r')
+
+    def open_w(self, file):
+        if not self.steps:
+            return open(file, 'w')
+        if self.steps[0][1] == SOURCE:
+            raise ValueError, \
+                  'Template.open_w: pipeline begins with SOURCE'
+        cmd = self.makepipeline('', file)
+        return os.popen(cmd, 'w')
+
+    def copy(self, infile, outfile):
+        return os.system(self.makepipeline(infile, outfile))
+
+    def makepipeline(self, infile, outfile):
+        cmd = makepipeline(infile, self.steps, outfile)
+        if self.debugging:
+            print cmd
+            cmd = 'set -x; ' + cmd
+        return cmd
+
+
+def makepipeline(infile, steps, outfile):
+    # Build a list with for each command:
+    # [input filename or '', command string, kind, output filename or '']
+
+    list = []
+    for cmd, kind in steps:
+        list.append(['', cmd, kind, ''])
+    #
+    # Make sure there is at least one step
+    #
+    if not list:
+        list.append(['', 'cat', '--', ''])
+    #
+    # Take care of the input and output ends
+    #
+    [cmd, kind] = list[0][1:3]
+    if kind[0] == 'f' and not infile:
+        list.insert(0, ['', 'cat', '--', ''])
+    list[0][0] = infile
+    #
+    [cmd, kind] = list[-1][1:3]
+    if kind[1] == 'f' and not outfile:
+        list.append(['', 'cat', '--', ''])
+    list[-1][-1] = outfile
+    #
+    # Invent temporary files to connect stages that need files
+    #
+    garbage = []
+    for i in range(1, len(list)):
+        lkind = list[i-1][2]
+        rkind = list[i][2]
+        if lkind[1] == 'f' or rkind[0] == 'f':
+            (fd, temp) = tempfile.mkstemp()
+            os.close(fd)
+            garbage.append(temp)
+            list[i-1][-1] = list[i][0] = temp
+    #
+    for item in list:
+        [inf, cmd, kind, outf] = item
+        if kind[1] == 'f':
+            cmd = 'OUT=' + quote(outf) + '; ' + cmd
+        if kind[0] == 'f':
+            cmd = 'IN=' + quote(inf) + '; ' + cmd
+        if kind[0] == '-' and inf:
+            cmd = cmd + ' <' + quote(inf)
+        if kind[1] == '-' and outf:
+            cmd = cmd + ' >' + quote(outf)
+        item[1] = cmd
+    #
+    cmdlist = list[0][1]
+    for item in list[1:]:
+        [cmd, kind] = item[1:3]
+        if item[0] == '':
+            if 'f' in kind:
+                cmd = '{ ' + cmd + '; }'
+            cmdlist = cmdlist + ' |\n' + cmd
+        else:
+            cmdlist = cmdlist + '\n' + cmd
+    #
+    if garbage:
+        rmcmd = 'rm -f'
+        for file in garbage:
+            rmcmd = rmcmd + ' ' + quote(file)
+        trapcmd = 'trap ' + quote(rmcmd + '; exit') + ' 1 2 3 13 14 15'
+        cmdlist = trapcmd + '\n' + cmdlist + '\n' + rmcmd
+    #
+    return cmdlist
+
+
+# Reliably quote a string as a single argument for /bin/sh
+
+_safechars = string.ascii_letters + string.digits + '!@%_-+=:,./' # Safe unquoted
+_funnychars = '"`$\\'                           # Unsafe inside "double quotes"
+
+def quote(file):
+    for c in file:
+        if c not in _safechars:
+            break
+    else:
+        return file
+    if '\'' not in file:
+        return '\'' + file + '\''
+    res = ''
+    for c in file:
+        if c in _funnychars:
+            c = '\\' + c
+        res = res + c
+    return '"' + res + '"'
+
+
+# Small test program and example
+
+def test():
+    print 'Testing...'
+    t = Template()
+    t.append('togif $IN $OUT', 'ff')
+    t.append('giftoppm', '--')
+    t.append('ppmtogif >$OUT', '-f')
+    t.append('fromgif $IN $OUT', 'ff')
+    t.debug(1)
+    FILE = '/usr/local/images/rgb/rogues/guido.rgb'
+    t.copy(FILE, '@temp')
+    print 'Done.'
diff --git a/depot_tools/release/win/python_24/Lib/pkgutil.py b/depot_tools/release/win/python_24/Lib/pkgutil.py
new file mode 100644
index 0000000..fbd708ac
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pkgutil.py
@@ -0,0 +1,78 @@
+"""Utilities to support packages."""
+
+import os
+import sys
+
+def extend_path(path, name):
+    """Extend a package's path.
+
+    Intended use is to place the following code in a package's __init__.py:
+
+        from pkgutil import extend_path
+        __path__ = extend_path(__path__, __name__)
+
+    This will add to the package's __path__ all subdirectories of
+    directories on sys.path named after the package.  This is useful
+    if one wants to distribute different parts of a single logical
+    package as multiple directories.
+
+    It also looks for *.pkg files beginning where * matches the name
+    argument.  This feature is similar to *.pth files (see site.py),
+    except that it doesn't special-case lines starting with 'import'.
+    A *.pkg file is trusted at face value: apart from checking for
+    duplicates, all entries found in a *.pkg file are added to the
+    path, regardless of whether they are exist the filesystem.  (This
+    is a feature.)
+
+    If the input path is not a list (as is the case for frozen
+    packages) it is returned unchanged.  The input path is not
+    modified; an extended copy is returned.  Items are only appended
+    to the copy at the end.
+
+    It is assumed that sys.path is a sequence.  Items of sys.path that
+    are not (unicode or 8-bit) strings referring to existing
+    directories are ignored.  Unicode items of sys.path that cause
+    errors when used as filenames may cause this function to raise an
+    exception (in line with os.path.isdir() behavior).
+    """
+
+    if not isinstance(path, list):
+        # This could happen e.g. when this is called from inside a
+        # frozen package.  Return the path unchanged in that case.
+        return path
+
+    pname = os.path.join(*name.split('.')) # Reconstitute as relative path
+    # Just in case os.extsep != '.'
+    sname = os.extsep.join(name.split('.'))
+    sname_pkg = sname + os.extsep + "pkg"
+    init_py = "__init__" + os.extsep + "py"
+
+    path = path[:] # Start with a copy of the existing path
+
+    for dir in sys.path:
+        if not isinstance(dir, basestring) or not os.path.isdir(dir):
+            continue
+        subdir = os.path.join(dir, pname)
+        # XXX This may still add duplicate entries to path on
+        # case-insensitive filesystems
+        initfile = os.path.join(subdir, init_py)
+        if subdir not in path and os.path.isfile(initfile):
+            path.append(subdir)
+        # XXX Is this the right thing for subpackages like zope.app?
+        # It looks for a file named "zope.app.pkg"
+        pkgfile = os.path.join(dir, sname_pkg)
+        if os.path.isfile(pkgfile):
+            try:
+                f = open(pkgfile)
+            except IOError, msg:
+                sys.stderr.write("Can't open %s: %s\n" %
+                                 (pkgfile, msg))
+            else:
+                for line in f:
+                    line = line.rstrip('\n')
+                    if not line or line.startswith('#'):
+                        continue
+                    path.append(line) # Don't check for existence!
+                f.close()
+
+    return path
diff --git a/depot_tools/release/win/python_24/Lib/platform.py b/depot_tools/release/win/python_24/Lib/platform.py
new file mode 100644
index 0000000..372fe921
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/platform.py
@@ -0,0 +1,1248 @@
+#!/usr/bin/env python
+
+""" This module tries to retrieve as much platform-identifying data as
+    possible. It makes this information available via function APIs.
+
+    If called from the command line, it prints the platform
+    information concatenated as single string to stdout. The output
+    format is useable as part of a filename.
+
+"""
+#    This module is maintained by Marc-Andre Lemburg <mal@egenix.com>.
+#    If you find problems, please submit bug reports/patches via the
+#    Python SourceForge Project Page and assign them to "lemburg".
+#
+#    Note: Please keep this module compatible to Python 1.5.2.
+#
+#    Still needed:
+#    * more support for WinCE
+#    * support for MS-DOS (PythonDX ?)
+#    * support for Amiga and other still unsupported platforms running Python
+#    * support for additional Linux distributions
+#
+#    Many thanks to all those who helped adding platform-specific
+#    checks (in no particular order):
+#
+#      Charles G Waldman, David Arnold, Gordon McMillan, Ben Darnell,
+#      Jeff Bauer, Cliff Crawford, Ivan Van Laningham, Josef
+#      Betancourt, Randall Hopper, Karl Putland, John Farrell, Greg
+#      Andruk, Just van Rossum, Thomas Heller, Mark R. Levinson, Mark
+#      Hammond, Bill Tutt, Hans Nowak, Uwe Zessin (OpenVMS support),
+#      Colin Kong, Trent Mick, Guido van Rossum
+#
+#    History:
+#    1.0.3 - added normalization of Windows system name
+#    1.0.2 - added more Windows support
+#    1.0.1 - reformatted to make doc.py happy
+#    1.0.0 - reformatted a bit and checked into Python CVS
+#    0.8.0 - added sys.version parser and various new access
+#            APIs (python_version(), python_compiler(), etc.)
+#    0.7.2 - fixed architecture() to use sizeof(pointer) where available
+#    0.7.1 - added support for Caldera OpenLinux
+#    0.7.0 - some fixes for WinCE; untabified the source file
+#    0.6.2 - support for OpenVMS - requires version 1.5.2-V006 or higher and
+#            vms_lib.getsyi() configured
+#    0.6.1 - added code to prevent 'uname -p' on platforms which are
+#            known not to support it
+#    0.6.0 - fixed win32_ver() to hopefully work on Win95,98,NT and Win2k;
+#            did some cleanup of the interfaces - some APIs have changed
+#    0.5.5 - fixed another type in the MacOS code... should have
+#            used more coffee today ;-)
+#    0.5.4 - fixed a few typos in the MacOS code
+#    0.5.3 - added experimental MacOS support; added better popen()
+#            workarounds in _syscmd_ver() -- still not 100% elegant
+#            though
+#    0.5.2 - fixed uname() to return '' instead of 'unknown' in all
+#            return values (the system uname command tends to return
+#            'unknown' instead of just leaving the field emtpy)
+#    0.5.1 - included code for slackware dist; added exception handlers
+#            to cover up situations where platforms don't have os.popen
+#            (e.g. Mac) or fail on socket.gethostname(); fixed libc
+#            detection RE
+#    0.5.0 - changed the API names referring to system commands to *syscmd*;
+#            added java_ver(); made syscmd_ver() a private
+#            API (was system_ver() in previous versions) -- use uname()
+#            instead; extended the win32_ver() to also return processor
+#            type information
+#    0.4.0 - added win32_ver() and modified the platform() output for WinXX
+#    0.3.4 - fixed a bug in _follow_symlinks()
+#    0.3.3 - fixed popen() and "file" command invokation bugs
+#    0.3.2 - added architecture() API and support for it in platform()
+#    0.3.1 - fixed syscmd_ver() RE to support Windows NT
+#    0.3.0 - added system alias support
+#    0.2.3 - removed 'wince' again... oh well.
+#    0.2.2 - added 'wince' to syscmd_ver() supported platforms
+#    0.2.1 - added cache logic and changed the platform string format
+#    0.2.0 - changed the API to use functions instead of module globals
+#            since some action take too long to be run on module import
+#    0.1.0 - first release
+#
+#    You can always get the latest version of this module at:
+#
+#             http://www.egenix.com/files/python/platform.py
+#
+#    If that URL should fail, try contacting the author.
+
+__copyright__ = """
+    Copyright (c) 1999-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
+    Copyright (c) 2000-2003, eGenix.com Software GmbH; mailto:info@egenix.com
+
+    Permission to use, copy, modify, and distribute this software and its
+    documentation for any purpose and without fee or royalty is hereby granted,
+    provided that the above copyright notice appear in all copies and that
+    both that copyright notice and this permission notice appear in
+    supporting documentation or portions thereof, including modifications,
+    that you make.
+
+    EGENIX.COM SOFTWARE GMBH DISCLAIMS ALL WARRANTIES WITH REGARD TO
+    THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+    FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
+    INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
+    FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+    NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
+    WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
+
+"""
+
+__version__ = '1.0.2'
+
+import sys,string,os,re
+
+### Platform specific APIs
+
+_libc_search = re.compile(r'(__libc_init)'
+                          '|'
+                          '(GLIBC_([0-9.]+))'
+                          '|'
+                          '(libc(_\w+)?\.so(?:\.(\d[0-9.]*))?)')
+
+def libc_ver(executable=sys.executable,lib='',version='',
+
+             chunksize=2048):
+
+    """ Tries to determine the libc version that the file executable
+        (which defaults to the Python interpreter) is linked against.
+
+        Returns a tuple of strings (lib,version) which default to the
+        given parameters in case the lookup fails.
+
+        Note that the function has intimate knowledge of how different
+        libc versions add symbols to the executable and thus is probably
+        only useable for executables compiled using gcc.
+
+        The file is read and scanned in chunks of chunksize bytes.
+
+    """
+    f = open(executable,'rb')
+    binary = f.read(chunksize)
+    pos = 0
+    while 1:
+        m = _libc_search.search(binary,pos)
+        if not m:
+            binary = f.read(chunksize)
+            if not binary:
+                break
+            pos = 0
+            continue
+        libcinit,glibc,glibcversion,so,threads,soversion = m.groups()
+        if libcinit and not lib:
+            lib = 'libc'
+        elif glibc:
+            if lib != 'glibc':
+                lib = 'glibc'
+                version = glibcversion
+            elif glibcversion > version:
+                version = glibcversion
+        elif so:
+            if lib != 'glibc':
+                lib = 'libc'
+                if soversion > version:
+                    version = soversion
+                if threads and version[-len(threads):] != threads:
+                    version = version + threads
+        pos = m.end()
+    f.close()
+    return lib,version
+
+def _dist_try_harder(distname,version,id):
+
+    """ Tries some special tricks to get the distribution
+        information in case the default method fails.
+
+        Currently supports older SuSE Linux, Caldera OpenLinux and
+        Slackware Linux distributions.
+
+    """
+    if os.path.exists('/var/adm/inst-log/info'):
+        # SuSE Linux stores distribution information in that file
+        info = open('/var/adm/inst-log/info').readlines()
+        distname = 'SuSE'
+        for line in info:
+            tv = string.split(line)
+            if len(tv) == 2:
+                tag,value = tv
+            else:
+                continue
+            if tag == 'MIN_DIST_VERSION':
+                version = string.strip(value)
+            elif tag == 'DIST_IDENT':
+                values = string.split(value,'-')
+                id = values[2]
+        return distname,version,id
+
+    if os.path.exists('/etc/.installed'):
+        # Caldera OpenLinux has some infos in that file (thanks to Colin Kong)
+        info = open('/etc/.installed').readlines()
+        for line in info:
+            pkg = string.split(line,'-')
+            if len(pkg) >= 2 and pkg[0] == 'OpenLinux':
+                # XXX does Caldera support non Intel platforms ? If yes,
+                #     where can we find the needed id ?
+                return 'OpenLinux',pkg[1],id
+
+    if os.path.isdir('/usr/lib/setup'):
+        # Check for slackware verson tag file (thanks to Greg Andruk)
+        verfiles = os.listdir('/usr/lib/setup')
+        for n in range(len(verfiles)-1, -1, -1):
+            if verfiles[n][:14] != 'slack-version-':
+                del verfiles[n]
+        if verfiles:
+            verfiles.sort()
+            distname = 'slackware'
+            version = verfiles[-1][14:]
+            return distname,version,id
+
+    return distname,version,id
+
+_release_filename = re.compile(r'(\w+)[-_](release|version)')
+_release_version = re.compile(r'([\d.]+)[^(]*(?:\((.+)\))?')
+
+def dist(distname='',version='',id='',
+
+         supported_dists=('SuSE','debian','redhat','mandrake')):
+
+    """ Tries to determine the name of the Linux OS distribution name.
+
+        The function first looks for a distribution release file in
+        /etc and then reverts to _dist_try_harder() in case no
+        suitable files are found.
+
+        Returns a tuple (distname,version,id) which default to the
+        args given as parameters.
+
+    """
+    try:
+        etc = os.listdir('/etc')
+    except os.error:
+        # Probably not a Unix system
+        return distname,version,id
+    for file in etc:
+        m = _release_filename.match(file)
+        if m:
+            _distname,dummy = m.groups()
+            if _distname in supported_dists:
+                distname = _distname
+                break
+    else:
+        return _dist_try_harder(distname,version,id)
+    f = open('/etc/'+file,'r')
+    firstline = f.readline()
+    f.close()
+    m = _release_version.search(firstline)
+    if m:
+        _version,_id = m.groups()
+        if _version:
+            version = _version
+        if _id:
+            id = _id
+    else:
+        # Unkown format... take the first two words
+        l = string.split(string.strip(firstline))
+        if l:
+            version = l[0]
+            if len(l) > 1:
+                id = l[1]
+    return distname,version,id
+
+class _popen:
+
+    """ Fairly portable (alternative) popen implementation.
+
+        This is mostly needed in case os.popen() is not available, or
+        doesn't work as advertised, e.g. in Win9X GUI programs like
+        PythonWin or IDLE.
+
+        Writing to the pipe is currently not supported.
+
+    """
+    tmpfile = ''
+    pipe = None
+    bufsize = None
+    mode = 'r'
+
+    def __init__(self,cmd,mode='r',bufsize=None):
+
+        if mode != 'r':
+            raise ValueError,'popen()-emulation only supports read mode'
+        import tempfile
+        self.tmpfile = tmpfile = tempfile.mktemp()
+        os.system(cmd + ' > %s' % tmpfile)
+        self.pipe = open(tmpfile,'rb')
+        self.bufsize = bufsize
+        self.mode = mode
+
+    def read(self):
+
+        return self.pipe.read()
+
+    def readlines(self):
+
+        if self.bufsize is not None:
+            return self.pipe.readlines()
+
+    def close(self,
+
+              remove=os.unlink,error=os.error):
+
+        if self.pipe:
+            rc = self.pipe.close()
+        else:
+            rc = 255
+        if self.tmpfile:
+            try:
+                remove(self.tmpfile)
+            except error:
+                pass
+        return rc
+
+    # Alias
+    __del__ = close
+
+def popen(cmd, mode='r', bufsize=None):
+
+    """ Portable popen() interface.
+    """
+    # Find a working popen implementation preferring win32pipe.popen
+    # over os.popen over _popen
+    popen = None
+    if os.environ.get('OS','') == 'Windows_NT':
+        # On NT win32pipe should work; on Win9x it hangs due to bugs
+        # in the MS C lib (see MS KnowledgeBase article Q150956)
+        try:
+            import win32pipe
+        except ImportError:
+            pass
+        else:
+            popen = win32pipe.popen
+    if popen is None:
+        if hasattr(os,'popen'):
+            popen = os.popen
+            # Check whether it works... it doesn't in GUI programs
+            # on Windows platforms
+            if sys.platform == 'win32': # XXX Others too ?
+                try:
+                    popen('')
+                except os.error:
+                    popen = _popen
+        else:
+            popen = _popen
+    if bufsize is None:
+        return popen(cmd,mode)
+    else:
+        return popen(cmd,mode,bufsize)
+
+def _norm_version(version,build=''):
+
+    """ Normalize the version and build strings and return a single
+        vesion string using the format major.minor.build (or patchlevel).
+    """
+    l = string.split(version,'.')
+    if build:
+        l.append(build)
+    try:
+        ints = map(int,l)
+    except ValueError:
+        strings = l
+    else:
+        strings = map(str,ints)
+    version = string.join(strings[:3],'.')
+    return version
+
+_ver_output = re.compile(r'(?:([\w ]+) ([\w.]+) '
+                         '.*'
+                         'Version ([\d.]+))')
+
+def _syscmd_ver(system='',release='',version='',
+
+               supported_platforms=('win32','win16','dos','os2')):
+
+    """ Tries to figure out the OS version used and returns
+        a tuple (system,release,version).
+
+        It uses the "ver" shell command for this which is known
+        to exists on Windows, DOS and OS/2. XXX Others too ?
+
+        In case this fails, the given parameters are used as
+        defaults.
+
+    """
+    if sys.platform not in supported_platforms:
+        return system,release,version
+
+    # Try some common cmd strings
+    for cmd in ('ver','command /c ver','cmd /c ver'):
+        try:
+            pipe = popen(cmd)
+            info = pipe.read()
+            if pipe.close():
+                raise os.error,'command failed'
+            # XXX How can I supress shell errors from being written
+            #     to stderr ?
+        except os.error,why:
+            #print 'Command %s failed: %s' % (cmd,why)
+            continue
+        except IOError,why:
+            #print 'Command %s failed: %s' % (cmd,why)
+            continue
+        else:
+            break
+    else:
+        return system,release,version
+
+    # Parse the output
+    info = string.strip(info)
+    m = _ver_output.match(info)
+    if m:
+        system,release,version = m.groups()
+        # Strip trailing dots from version and release
+        if release[-1] == '.':
+            release = release[:-1]
+        if version[-1] == '.':
+            version = version[:-1]
+        # Normalize the version and build strings (eliminating additional
+        # zeros)
+        version = _norm_version(version)
+    return system,release,version
+
+def _win32_getvalue(key,name,default=''):
+
+    """ Read a value for name from the registry key.
+
+        In case this fails, default is returned.
+
+    """
+    from win32api import RegQueryValueEx
+    try:
+        return RegQueryValueEx(key,name)
+    except:
+        return default
+
+def win32_ver(release='',version='',csd='',ptype=''):
+
+    """ Get additional version information from the Windows Registry
+        and return a tuple (version,csd,ptype) referring to version
+        number, CSD level and OS type (multi/single
+        processor).
+
+        As a hint: ptype returns 'Uniprocessor Free' on single
+        processor NT machines and 'Multiprocessor Free' on multi
+        processor machines. The 'Free' refers to the OS version being
+        free of debugging code. It could also state 'Checked' which
+        means the OS version uses debugging code, i.e. code that
+        checks arguments, ranges, etc. (Thomas Heller).
+
+        Note: this function only works if Mark Hammond's win32
+        package is installed and obviously only runs on Win32
+        compatible platforms.
+
+    """
+    # XXX Is there any way to find out the processor type on WinXX ?
+    # XXX Is win32 available on Windows CE ?
+    #
+    # Adapted from code posted by Karl Putland to comp.lang.python.
+    #
+    # The mappings between reg. values and release names can be found
+    # here: http://msdn.microsoft.com/library/en-us/sysinfo/base/osversioninfo_str.asp
+
+    # Import the needed APIs
+    try:
+        import win32api
+    except ImportError:
+        return release,version,csd,ptype
+    from win32api import RegQueryValueEx,RegOpenKeyEx,RegCloseKey,GetVersionEx
+    from win32con import HKEY_LOCAL_MACHINE,VER_PLATFORM_WIN32_NT,\
+                         VER_PLATFORM_WIN32_WINDOWS
+
+    # Find out the registry key and some general version infos
+    maj,min,buildno,plat,csd = GetVersionEx()
+    version = '%i.%i.%i' % (maj,min,buildno & 0xFFFF)
+    if csd[:13] == 'Service Pack ':
+        csd = 'SP' + csd[13:]
+    if plat == VER_PLATFORM_WIN32_WINDOWS:
+        regkey = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion'
+        # Try to guess the release name
+        if maj == 4:
+            if min == 0:
+                release = '95'
+            elif min == 10:
+                release = '98'
+            elif min == 90:
+                release = 'Me'
+            else:
+                release = 'postMe'
+        elif maj == 5:
+            release = '2000'
+    elif plat == VER_PLATFORM_WIN32_NT:
+        regkey = 'SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion'
+        if maj <= 4:
+            release = 'NT'
+        elif maj == 5:
+            if min == 0:
+                release = '2000'
+            elif min == 1:
+                release = 'XP'
+            elif min == 2:
+                release = '2003Server'
+            else:
+                release = 'post2003'
+    else:
+        if not release:
+            # E.g. Win3.1 with win32s
+            release = '%i.%i' % (maj,min)
+        return release,version,csd,ptype
+
+    # Open the registry key
+    try:
+        keyCurVer = RegOpenKeyEx(HKEY_LOCAL_MACHINE,regkey)
+        # Get a value to make sure the key exists...
+        RegQueryValueEx(keyCurVer,'SystemRoot')
+    except:
+        return release,version,csd,ptype
+
+    # Parse values
+    #subversion = _win32_getvalue(keyCurVer,
+    #                            'SubVersionNumber',
+    #                            ('',1))[0]
+    #if subversion:
+    #   release = release + subversion # 95a, 95b, etc.
+    build = _win32_getvalue(keyCurVer,
+                            'CurrentBuildNumber',
+                            ('',1))[0]
+    ptype = _win32_getvalue(keyCurVer,
+                           'CurrentType',
+                           (ptype,1))[0]
+
+    # Normalize version
+    version = _norm_version(version,build)
+
+    # Close key
+    RegCloseKey(keyCurVer)
+    return release,version,csd,ptype
+
+def _mac_ver_lookup(selectors,default=None):
+
+    from gestalt import gestalt
+    import MacOS
+    l = []
+    append = l.append
+    for selector in selectors:
+        try:
+            append(gestalt(selector))
+        except (RuntimeError, MacOS.Error):
+            append(default)
+    return l
+
+def _bcd2str(bcd):
+
+    return hex(bcd)[2:]
+
+def mac_ver(release='',versioninfo=('','',''),machine=''):
+
+    """ Get MacOS version information and return it as tuple (release,
+        versioninfo, machine) with versioninfo being a tuple (version,
+        dev_stage, non_release_version).
+
+        Entries which cannot be determined are set to the paramter values
+        which default to ''. All tuple entries are strings.
+
+        Thanks to Mark R. Levinson for mailing documentation links and
+        code examples for this function. Documentation for the
+        gestalt() API is available online at:
+
+           http://www.rgaros.nl/gestalt/
+
+    """
+    # Check whether the version info module is available
+    try:
+        import gestalt
+        import MacOS
+    except ImportError:
+        return release,versioninfo,machine
+    # Get the infos
+    sysv,sysu,sysa = _mac_ver_lookup(('sysv','sysu','sysa'))
+    # Decode the infos
+    if sysv:
+        major = (sysv & 0xFF00) >> 8
+        minor = (sysv & 0x00F0) >> 4
+        patch = (sysv & 0x000F)
+        release = '%s.%i.%i' % (_bcd2str(major),minor,patch)
+    if sysu:
+        major =  int((sysu & 0xFF000000L) >> 24)
+        minor =  (sysu & 0x00F00000) >> 20
+        bugfix = (sysu & 0x000F0000) >> 16
+        stage =  (sysu & 0x0000FF00) >> 8
+        nonrel = (sysu & 0x000000FF)
+        version = '%s.%i.%i' % (_bcd2str(major),minor,bugfix)
+        nonrel = _bcd2str(nonrel)
+        stage = {0x20:'development',
+                 0x40:'alpha',
+                 0x60:'beta',
+                 0x80:'final'}.get(stage,'')
+        versioninfo = (version,stage,nonrel)
+    if sysa:
+        machine = {0x1: '68k',
+                   0x2: 'PowerPC'}.get(sysa,'')
+    return release,versioninfo,machine
+
+def _java_getprop(name,default):
+
+    from java.lang import System
+    try:
+        return System.getProperty(name)
+    except:
+        return default
+
+def java_ver(release='',vendor='',vminfo=('','',''),osinfo=('','','')):
+
+    """ Version interface for Jython.
+
+        Returns a tuple (release,vendor,vminfo,osinfo) with vminfo being
+        a tuple (vm_name,vm_release,vm_vendor) and osinfo being a
+        tuple (os_name,os_version,os_arch).
+
+        Values which cannot be determined are set to the defaults
+        given as parameters (which all default to '').
+
+    """
+    # Import the needed APIs
+    try:
+        import java.lang
+    except ImportError:
+        return release,vendor,vminfo,osinfo
+
+    vendor = _java_getprop('java.vendor',vendor)
+    release = _java_getprop('java.version',release)
+    vm_name,vm_release,vm_vendor = vminfo
+    vm_name = _java_getprop('java.vm.name',vm_name)
+    vm_vendor = _java_getprop('java.vm.vendor',vm_vendor)
+    vm_release = _java_getprop('java.vm.version',vm_release)
+    vminfo = vm_name,vm_release,vm_vendor
+    os_name,os_version,os_arch = osinfo
+    os_arch = _java_getprop('java.os.arch',os_arch)
+    os_name = _java_getprop('java.os.name',os_name)
+    os_version = _java_getprop('java.os.version',os_version)
+    osinfo = os_name,os_version,os_arch
+
+    return release,vendor,vminfo,osinfo
+
+### System name aliasing
+
+def system_alias(system,release,version):
+
+    """ Returns (system,release,version) aliased to common
+        marketing names used for some systems.
+
+        It also does some reordering of the information in some cases
+        where it would otherwise cause confusion.
+
+    """
+    if system == 'Rhapsody':
+        # Apple's BSD derivative
+        # XXX How can we determine the marketing release number ?
+        return 'MacOS X Server',system+release,version
+
+    elif system == 'SunOS':
+        # Sun's OS
+        if release < '5':
+            # These releases use the old name SunOS
+            return system,release,version
+        # Modify release (marketing release = SunOS release - 3)
+        l = string.split(release,'.')
+        if l:
+            try:
+                major = int(l[0])
+            except ValueError:
+                pass
+            else:
+                major = major - 3
+                l[0] = str(major)
+                release = string.join(l,'.')
+        if release < '6':
+            system = 'Solaris'
+        else:
+            # XXX Whatever the new SunOS marketing name is...
+            system = 'Solaris'
+
+    elif system == 'IRIX64':
+        # IRIX reports IRIX64 on platforms with 64-bit support; yet it
+        # is really a version and not a different platform, since 32-bit
+        # apps are also supported..
+        system = 'IRIX'
+        if version:
+            version = version + ' (64bit)'
+        else:
+            version = '64bit'
+
+    elif system in ('win32','win16'):
+        # In case one of the other tricks
+        system = 'Windows'
+
+    return system,release,version
+
+### Various internal helpers
+
+def _platform(*args):
+
+    """ Helper to format the platform string in a filename
+        compatible format e.g. "system-version-machine".
+    """
+    # Format the platform string
+    platform = string.join(
+        map(string.strip,
+            filter(len,args)),
+        '-')
+
+    # Cleanup some possible filename obstacles...
+    replace = string.replace
+    platform = replace(platform,' ','_')
+    platform = replace(platform,'/','-')
+    platform = replace(platform,'\\','-')
+    platform = replace(platform,':','-')
+    platform = replace(platform,';','-')
+    platform = replace(platform,'"','-')
+    platform = replace(platform,'(','-')
+    platform = replace(platform,')','-')
+
+    # No need to report 'unknown' information...
+    platform = replace(platform,'unknown','')
+
+    # Fold '--'s and remove trailing '-'
+    while 1:
+        cleaned = replace(platform,'--','-')
+        if cleaned == platform:
+            break
+        platform = cleaned
+    while platform[-1] == '-':
+        platform = platform[:-1]
+
+    return platform
+
+def _node(default=''):
+
+    """ Helper to determine the node name of this machine.
+    """
+    try:
+        import socket
+    except ImportError:
+        # No sockets...
+        return default
+    try:
+        return socket.gethostname()
+    except socket.error:
+        # Still not working...
+        return default
+
+# os.path.abspath is new in Python 1.5.2:
+if not hasattr(os.path,'abspath'):
+
+    def _abspath(path,
+
+                 isabs=os.path.isabs,join=os.path.join,getcwd=os.getcwd,
+                 normpath=os.path.normpath):
+
+        if not isabs(path):
+            path = join(getcwd(), path)
+        return normpath(path)
+
+else:
+
+    _abspath = os.path.abspath
+
+def _follow_symlinks(filepath):
+
+    """ In case filepath is a symlink, follow it until a
+        real file is reached.
+    """
+    filepath = _abspath(filepath)
+    while os.path.islink(filepath):
+        filepath = os.path.normpath(
+            os.path.join(filepath,os.readlink(filepath)))
+    return filepath
+
+def _syscmd_uname(option,default=''):
+
+    """ Interface to the system's uname command.
+    """
+    if sys.platform in ('dos','win32','win16','os2'):
+        # XXX Others too ?
+        return default
+    try:
+        f = os.popen('uname %s 2> /dev/null' % option)
+    except (AttributeError,os.error):
+        return default
+    output = string.strip(f.read())
+    rc = f.close()
+    if not output or rc:
+        return default
+    else:
+        return output
+
+def _syscmd_file(target,default=''):
+
+    """ Interface to the system's file command.
+
+        The function uses the -b option of the file command to have it
+        ommit the filename in its output and if possible the -L option
+        to have the command follow symlinks. It returns default in
+        case the command should fail.
+
+    """
+    target = _follow_symlinks(target)
+    try:
+        f = os.popen('file %s 2> /dev/null' % target)
+    except (AttributeError,os.error):
+        return default
+    output = string.strip(f.read())
+    rc = f.close()
+    if not output or rc:
+        return default
+    else:
+        return output
+
+### Information about the used architecture
+
+# Default values for architecture; non-empty strings override the
+# defaults given as parameters
+_default_architecture = {
+    'win32': ('','WindowsPE'),
+    'win16': ('','Windows'),
+    'dos': ('','MSDOS'),
+}
+
+_architecture_split = re.compile(r'[\s,]').split
+
+def architecture(executable=sys.executable,bits='',linkage=''):
+
+    """ Queries the given executable (defaults to the Python interpreter
+        binary) for various architecture information.
+
+        Returns a tuple (bits,linkage) which contains information about
+        the bit architecture and the linkage format used for the
+        executable. Both values are returned as strings.
+
+        Values that cannot be determined are returned as given by the
+        parameter presets. If bits is given as '', the sizeof(pointer)
+        (or sizeof(long) on Python version < 1.5.2) is used as
+        indicator for the supported pointer size.
+
+        The function relies on the system's "file" command to do the
+        actual work. This is available on most if not all Unix
+        platforms. On some non-Unix platforms where the "file" command
+        does not exist and the executable is set to the Python interpreter
+        binary defaults from _default_architecture are used.
+
+    """
+    # Use the sizeof(pointer) as default number of bits if nothing
+    # else is given as default.
+    if not bits:
+        import struct
+        try:
+            size = struct.calcsize('P')
+        except struct.error:
+            # Older installations can only query longs
+            size = struct.calcsize('l')
+        bits = str(size*8) + 'bit'
+
+    # Get data from the 'file' system command
+    output = _syscmd_file(executable,'')
+
+    if not output and \
+       executable == sys.executable:
+        # "file" command did not return anything; we'll try to provide
+        # some sensible defaults then...
+        if _default_architecture.has_key(sys.platform):
+            b,l = _default_architecture[sys.platform]
+            if b:
+                bits = b
+            if l:
+                linkage = l
+        return bits,linkage
+
+    # Split the output into a list of strings omitting the filename
+    fileout = _architecture_split(output)[1:]
+
+    if 'executable' not in fileout:
+        # Format not supported
+        return bits,linkage
+
+    # Bits
+    if '32-bit' in fileout:
+        bits = '32bit'
+    elif 'N32' in fileout:
+        # On Irix only
+        bits = 'n32bit'
+    elif '64-bit' in fileout:
+        bits = '64bit'
+
+    # Linkage
+    if 'ELF' in fileout:
+        linkage = 'ELF'
+    elif 'PE' in fileout:
+        # E.g. Windows uses this format
+        if 'Windows' in fileout:
+            linkage = 'WindowsPE'
+        else:
+            linkage = 'PE'
+    elif 'COFF' in fileout:
+        linkage = 'COFF'
+    elif 'MS-DOS' in fileout:
+        linkage = 'MSDOS'
+    else:
+        # XXX the A.OUT format also falls under this class...
+        pass
+
+    return bits,linkage
+
+### Portable uname() interface
+
+_uname_cache = None
+
+def uname():
+
+    """ Fairly portable uname interface. Returns a tuple
+        of strings (system,node,release,version,machine,processor)
+        identifying the underlying platform.
+
+        Note that unlike the os.uname function this also returns
+        possible processor information as an additional tuple entry.
+
+        Entries which cannot be determined are set to ''.
+
+    """
+    global _uname_cache
+
+    if _uname_cache is not None:
+        return _uname_cache
+
+    # Get some infos from the builtin os.uname API...
+    try:
+        system,node,release,version,machine = os.uname()
+
+    except AttributeError:
+        # Hmm, no uname... we'll have to poke around the system then.
+        system = sys.platform
+        release = ''
+        version = ''
+        node = _node()
+        machine = ''
+        processor = ''
+        use_syscmd_ver = 1
+
+        # Try win32_ver() on win32 platforms
+        if system == 'win32':
+            release,version,csd,ptype = win32_ver()
+            if release and version:
+                use_syscmd_ver = 0
+
+        # Try the 'ver' system command available on some
+        # platforms
+        if use_syscmd_ver:
+            system,release,version = _syscmd_ver(system)
+            # Normalize system to what win32_ver() normally returns
+            # (_syscmd_ver() tends to return the vendor name as well)
+            if system == 'Microsoft Windows':
+                system = 'Windows'
+
+        # In case we still don't know anything useful, we'll try to
+        # help ourselves
+        if system in ('win32','win16'):
+            if not version:
+                if system == 'win32':
+                    version = '32bit'
+                else:
+                    version = '16bit'
+            system = 'Windows'
+
+        elif system[:4] == 'java':
+            release,vendor,vminfo,osinfo = java_ver()
+            system = 'Java'
+            version = string.join(vminfo,', ')
+            if not version:
+                version = vendor
+
+        elif os.name == 'mac':
+            release,(version,stage,nonrel),machine = mac_ver()
+            system = 'MacOS'
+
+    else:
+        # System specific extensions
+        if system == 'OpenVMS':
+            # OpenVMS seems to have release and version mixed up
+            if not release or release == '0':
+                release = version
+                version = ''
+            # Get processor information
+            try:
+                import vms_lib
+            except ImportError:
+                pass
+            else:
+                csid, cpu_number = vms_lib.getsyi('SYI$_CPU',0)
+                if (cpu_number >= 128):
+                    processor = 'Alpha'
+                else:
+                    processor = 'VAX'
+        else:
+            # Get processor information from the uname system command
+            processor = _syscmd_uname('-p','')
+
+    # 'unknown' is not really any useful as information; we'll convert
+    # it to '' which is more portable
+    if system == 'unknown':
+        system = ''
+    if node == 'unknown':
+        node = ''
+    if release == 'unknown':
+        release = ''
+    if version == 'unknown':
+        version = ''
+    if machine == 'unknown':
+        machine = ''
+    if processor == 'unknown':
+        processor = ''
+    _uname_cache = system,node,release,version,machine,processor
+    return _uname_cache
+
+### Direct interfaces to some of the uname() return values
+
+def system():
+
+    """ Returns the system/OS name, e.g. 'Linux', 'Windows' or 'Java'.
+
+        An empty string is returned if the value cannot be determined.
+
+    """
+    return uname()[0]
+
+def node():
+
+    """ Returns the computer's network name (which may not be fully
+        qualified)
+
+        An empty string is returned if the value cannot be determined.
+
+    """
+    return uname()[1]
+
+def release():
+
+    """ Returns the system's release, e.g. '2.2.0' or 'NT'
+
+        An empty string is returned if the value cannot be determined.
+
+    """
+    return uname()[2]
+
+def version():
+
+    """ Returns the system's release version, e.g. '#3 on degas'
+
+        An empty string is returned if the value cannot be determined.
+
+    """
+    return uname()[3]
+
+def machine():
+
+    """ Returns the machine type, e.g. 'i386'
+
+        An empty string is returned if the value cannot be determined.
+
+    """
+    return uname()[4]
+
+def processor():
+
+    """ Returns the (true) processor name, e.g. 'amdk6'
+
+        An empty string is returned if the value cannot be
+        determined. Note that many platforms do not provide this
+        information or simply return the same value as for machine(),
+        e.g.  NetBSD does this.
+
+    """
+    return uname()[5]
+
+### Various APIs for extracting information from sys.version
+
+_sys_version_parser = re.compile(r'([\w.+]+)\s*'
+                                  '\(#(\d+),\s*([\w ]+),\s*([\w :]+)\)\s*'
+                                  '\[([^\]]+)\]?')
+_sys_version_cache = None
+
+def _sys_version():
+
+    """ Returns a parsed version of Python's sys.version as tuple
+        (version, buildno, builddate, compiler) referring to the Python
+        version, build number, build date/time as string and the compiler
+        identification string.
+
+        Note that unlike the Python sys.version, the returned value
+        for the Python version will always include the patchlevel (it
+        defaults to '.0').
+
+    """
+    global _sys_version_cache
+
+    if _sys_version_cache is not None:
+        return _sys_version_cache
+    version, buildno, builddate, buildtime, compiler = \
+             _sys_version_parser.match(sys.version).groups()
+    buildno = int(buildno)
+    builddate = builddate + ' ' + buildtime
+    l = string.split(version, '.')
+    if len(l) == 2:
+        l.append('0')
+        version = string.join(l, '.')
+    _sys_version_cache = (version, buildno, builddate, compiler)
+    return _sys_version_cache
+
+def python_version():
+
+    """ Returns the Python version as string 'major.minor.patchlevel'
+
+        Note that unlike the Python sys.version, the returned value
+        will always include the patchlevel (it defaults to 0).
+
+    """
+    return _sys_version()[0]
+
+def python_version_tuple():
+
+    """ Returns the Python version as tuple (major, minor, patchlevel)
+        of strings.
+
+        Note that unlike the Python sys.version, the returned value
+        will always include the patchlevel (it defaults to 0).
+
+    """
+    return string.split(_sys_version()[0], '.')
+
+def python_build():
+
+    """ Returns a tuple (buildno, builddate) stating the Python
+        build number and date as strings.
+
+    """
+    return _sys_version()[1:3]
+
+def python_compiler():
+
+    """ Returns a string identifying the compiler used for compiling
+        Python.
+
+    """
+    return _sys_version()[3]
+
+### The Opus Magnum of platform strings :-)
+
+_platform_cache = {}
+
+def platform(aliased=0, terse=0):
+
+    """ Returns a single string identifying the underlying platform
+        with as much useful information as possible (but no more :).
+
+        The output is intended to be human readable rather than
+        machine parseable. It may look different on different
+        platforms and this is intended.
+
+        If "aliased" is true, the function will use aliases for
+        various platforms that report system names which differ from
+        their common names, e.g. SunOS will be reported as
+        Solaris. The system_alias() function is used to implement
+        this.
+
+        Setting terse to true causes the function to return only the
+        absolute minimum information needed to identify the platform.
+
+    """
+    result = _platform_cache.get((aliased, terse), None)
+    if result is not None:
+        return result
+
+    # Get uname information and then apply platform specific cosmetics
+    # to it...
+    system,node,release,version,machine,processor = uname()
+    if machine == processor:
+        processor = ''
+    if aliased:
+        system,release,version = system_alias(system,release,version)
+
+    if system == 'Windows':
+        # MS platforms
+        rel,vers,csd,ptype = win32_ver(version)
+        if terse:
+            platform = _platform(system,release)
+        else:
+            platform = _platform(system,release,version,csd)
+
+    elif system in ('Linux',):
+        # Linux based systems
+        distname,distversion,distid = dist('')
+        if distname and not terse:
+            platform = _platform(system,release,machine,processor,
+                                 'with',
+                                 distname,distversion,distid)
+        else:
+            # If the distribution name is unknown check for libc vs. glibc
+            libcname,libcversion = libc_ver(sys.executable)
+            platform = _platform(system,release,machine,processor,
+                                 'with',
+                                 libcname+libcversion)
+    elif system == 'Java':
+        # Java platforms
+        r,v,vminfo,(os_name,os_version,os_arch) = java_ver()
+        if terse:
+            platform = _platform(system,release,version)
+        else:
+            platform = _platform(system,release,version,
+                                 'on',
+                                 os_name,os_version,os_arch)
+
+    elif system == 'MacOS':
+        # MacOS platforms
+        if terse:
+            platform = _platform(system,release)
+        else:
+            platform = _platform(system,release,machine)
+
+    else:
+        # Generic handler
+        if terse:
+            platform = _platform(system,release)
+        else:
+            bits,linkage = architecture(sys.executable)
+            platform = _platform(system,release,machine,processor,bits,linkage)
+
+    _platform_cache[(aliased, terse)] = platform
+    return platform
+
+### Command line interface
+
+if __name__ == '__main__':
+    # Default is to print the aliased verbose platform string
+    terse = ('terse' in sys.argv or '--terse' in sys.argv)
+    aliased = (not 'nonaliased' in sys.argv and not '--nonaliased' in sys.argv)
+    print platform(aliased,terse)
+    sys.exit(0)
diff --git a/depot_tools/release/win/python_24/Lib/popen2.py b/depot_tools/release/win/python_24/Lib/popen2.py
new file mode 100644
index 0000000..54543be
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/popen2.py
@@ -0,0 +1,224 @@
+"""Spawn a command with pipes to its stdin, stdout, and optionally stderr.
+
+The normal os.popen(cmd, mode) call spawns a shell command and provides a
+file interface to just the input or output of the process depending on
+whether mode is 'r' or 'w'.  This module provides the functions popen2(cmd)
+and popen3(cmd) which return two or three pipes to the spawned command.
+"""
+
+import os
+import sys
+
+__all__ = ["popen2", "popen3", "popen4"]
+
+try:
+    MAXFD = os.sysconf('SC_OPEN_MAX')
+except (AttributeError, ValueError):
+    MAXFD = 256
+
+_active = []
+
+def _cleanup():
+    for inst in _active[:]:
+        inst.poll()
+
+class Popen3:
+    """Class representing a child process.  Normally instances are created
+    by the factory functions popen2() and popen3()."""
+
+    sts = -1                    # Child not completed yet
+
+    def __init__(self, cmd, capturestderr=False, bufsize=-1):
+        """The parameter 'cmd' is the shell command to execute in a
+        sub-process.  On UNIX, 'cmd' may be a sequence, in which case arguments
+        will be passed directly to the program without shell intervention (as
+        with os.spawnv()).  If 'cmd' is a string it will be passed to the shell
+        (as with os.system()).   The 'capturestderr' flag, if true, specifies
+        that the object should capture standard error output of the child
+        process.  The default is false.  If the 'bufsize' parameter is
+        specified, it specifies the size of the I/O buffers to/from the child
+        process."""
+        _cleanup()
+        p2cread, p2cwrite = os.pipe()
+        c2pread, c2pwrite = os.pipe()
+        if capturestderr:
+            errout, errin = os.pipe()
+        self.pid = os.fork()
+        if self.pid == 0:
+            # Child
+            os.dup2(p2cread, 0)
+            os.dup2(c2pwrite, 1)
+            if capturestderr:
+                os.dup2(errin, 2)
+            self._run_child(cmd)
+        os.close(p2cread)
+        self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
+        os.close(c2pwrite)
+        self.fromchild = os.fdopen(c2pread, 'r', bufsize)
+        if capturestderr:
+            os.close(errin)
+            self.childerr = os.fdopen(errout, 'r', bufsize)
+        else:
+            self.childerr = None
+        _active.append(self)
+
+    def _run_child(self, cmd):
+        if isinstance(cmd, basestring):
+            cmd = ['/bin/sh', '-c', cmd]
+        for i in range(3, MAXFD):
+            try:
+                os.close(i)
+            except OSError:
+                pass
+        try:
+            os.execvp(cmd[0], cmd)
+        finally:
+            os._exit(1)
+
+    def poll(self):
+        """Return the exit status of the child process if it has finished,
+        or -1 if it hasn't finished yet."""
+        if self.sts < 0:
+            try:
+                pid, sts = os.waitpid(self.pid, os.WNOHANG)
+                if pid == self.pid:
+                    self.sts = sts
+                    _active.remove(self)
+            except os.error:
+                pass
+        return self.sts
+
+    def wait(self):
+        """Wait for and return the exit status of the child process."""
+        if self.sts < 0:
+            pid, sts = os.waitpid(self.pid, 0)
+            if pid == self.pid:
+                self.sts = sts
+                _active.remove(self)
+        return self.sts
+
+
+class Popen4(Popen3):
+    childerr = None
+
+    def __init__(self, cmd, bufsize=-1):
+        _cleanup()
+        p2cread, p2cwrite = os.pipe()
+        c2pread, c2pwrite = os.pipe()
+        self.pid = os.fork()
+        if self.pid == 0:
+            # Child
+            os.dup2(p2cread, 0)
+            os.dup2(c2pwrite, 1)
+            os.dup2(c2pwrite, 2)
+            self._run_child(cmd)
+        os.close(p2cread)
+        self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
+        os.close(c2pwrite)
+        self.fromchild = os.fdopen(c2pread, 'r', bufsize)
+        _active.append(self)
+
+
+if sys.platform[:3] == "win" or sys.platform == "os2emx":
+    # Some things don't make sense on non-Unix platforms.
+    del Popen3, Popen4
+
+    def popen2(cmd, bufsize=-1, mode='t'):
+        """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may
+        be a sequence, in which case arguments will be passed directly to the
+        program without shell intervention (as with os.spawnv()). If 'cmd' is a
+        string it will be passed to the shell (as with os.system()). If
+        'bufsize' is specified, it sets the buffer size for the I/O pipes. The
+        file objects (child_stdout, child_stdin) are returned."""
+        w, r = os.popen2(cmd, mode, bufsize)
+        return r, w
+
+    def popen3(cmd, bufsize=-1, mode='t'):
+        """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may
+        be a sequence, in which case arguments will be passed directly to the
+        program without shell intervention (as with os.spawnv()). If 'cmd' is a
+        string it will be passed to the shell (as with os.system()). If
+        'bufsize' is specified, it sets the buffer size for the I/O pipes. The
+        file objects (child_stdout, child_stdin, child_stderr) are returned."""
+        w, r, e = os.popen3(cmd, mode, bufsize)
+        return r, w, e
+
+    def popen4(cmd, bufsize=-1, mode='t'):
+        """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may
+        be a sequence, in which case arguments will be passed directly to the
+        program without shell intervention (as with os.spawnv()). If 'cmd' is a
+        string it will be passed to the shell (as with os.system()). If
+        'bufsize' is specified, it sets the buffer size for the I/O pipes. The
+        file objects (child_stdout_stderr, child_stdin) are returned."""
+        w, r = os.popen4(cmd, mode, bufsize)
+        return r, w
+else:
+    def popen2(cmd, bufsize=-1, mode='t'):
+        """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may
+        be a sequence, in which case arguments will be passed directly to the
+        program without shell intervention (as with os.spawnv()). If 'cmd' is a
+        string it will be passed to the shell (as with os.system()). If
+        'bufsize' is specified, it sets the buffer size for the I/O pipes. The
+        file objects (child_stdout, child_stdin) are returned."""
+        inst = Popen3(cmd, False, bufsize)
+        return inst.fromchild, inst.tochild
+
+    def popen3(cmd, bufsize=-1, mode='t'):
+        """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may
+        be a sequence, in which case arguments will be passed directly to the
+        program without shell intervention (as with os.spawnv()). If 'cmd' is a
+        string it will be passed to the shell (as with os.system()). If
+        'bufsize' is specified, it sets the buffer size for the I/O pipes. The
+        file objects (child_stdout, child_stdin, child_stderr) are returned."""
+        inst = Popen3(cmd, True, bufsize)
+        return inst.fromchild, inst.tochild, inst.childerr
+
+    def popen4(cmd, bufsize=-1, mode='t'):
+        """Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd' may
+        be a sequence, in which case arguments will be passed directly to the
+        program without shell intervention (as with os.spawnv()). If 'cmd' is a
+        string it will be passed to the shell (as with os.system()). If
+        'bufsize' is specified, it sets the buffer size for the I/O pipes. The
+        file objects (child_stdout_stderr, child_stdin) are returned."""
+        inst = Popen4(cmd, bufsize)
+        return inst.fromchild, inst.tochild
+
+    __all__.extend(["Popen3", "Popen4"])
+
+def _test():
+    cmd  = "cat"
+    teststr = "ab cd\n"
+    if os.name == "nt":
+        cmd = "more"
+    # "more" doesn't act the same way across Windows flavors,
+    # sometimes adding an extra newline at the start or the
+    # end.  So we strip whitespace off both ends for comparison.
+    expected = teststr.strip()
+    print "testing popen2..."
+    r, w = popen2(cmd)
+    w.write(teststr)
+    w.close()
+    got = r.read()
+    if got.strip() != expected:
+        raise ValueError("wrote %r read %r" % (teststr, got))
+    print "testing popen3..."
+    try:
+        r, w, e = popen3([cmd])
+    except:
+        r, w, e = popen3(cmd)
+    w.write(teststr)
+    w.close()
+    got = r.read()
+    if got.strip() != expected:
+        raise ValueError("wrote %r read %r" % (teststr, got))
+    got = e.read()
+    if got:
+        raise ValueError("unexpected %r on stderr" % (got,))
+    for inst in _active[:]:
+        inst.wait()
+    if _active:
+        raise ValueError("_active not empty")
+    print "All OK"
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/poplib.py b/depot_tools/release/win/python_24/Lib/poplib.py
new file mode 100644
index 0000000..1475bdc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/poplib.py
@@ -0,0 +1,423 @@
+"""A POP3 client class.
+
+Based on the J. Myers POP3 draft, Jan. 96
+"""
+
+# Author: David Ascher <david_ascher@brown.edu>
+#         [heavily stealing from nntplib.py]
+# Updated: Piers Lauder <piers@cs.su.oz.au> [Jul '97]
+# String method conversion and test jig improvements by ESR, February 2001.
+# Added the POP3_SSL class. Methods loosely based on IMAP_SSL. Hector Urtubia <urtubia@mrbook.org> Aug 2003
+
+# Example (see the test function at the end of this file)
+
+# Imports
+
+import re, socket
+
+__all__ = ["POP3","error_proto","POP3_SSL"]
+
+# Exception raised when an error or invalid response is received:
+
+class error_proto(Exception): pass
+
+# Standard Port
+POP3_PORT = 110
+
+# POP SSL PORT
+POP3_SSL_PORT = 995
+
+# Line terminators (we always output CRLF, but accept any of CRLF, LFCR, LF)
+CR = '\r'
+LF = '\n'
+CRLF = CR+LF
+
+
+class POP3:
+
+    """This class supports both the minimal and optional command sets.
+    Arguments can be strings or integers (where appropriate)
+    (e.g.: retr(1) and retr('1') both work equally well.
+
+    Minimal Command Set:
+            USER name               user(name)
+            PASS string             pass_(string)
+            STAT                    stat()
+            LIST [msg]              list(msg = None)
+            RETR msg                retr(msg)
+            DELE msg                dele(msg)
+            NOOP                    noop()
+            RSET                    rset()
+            QUIT                    quit()
+
+    Optional Commands (some servers support these):
+            RPOP name               rpop(name)
+            APOP name digest        apop(name, digest)
+            TOP msg n               top(msg, n)
+            UIDL [msg]              uidl(msg = None)
+
+    Raises one exception: 'error_proto'.
+
+    Instantiate with:
+            POP3(hostname, port=110)
+
+    NB:     the POP protocol locks the mailbox from user
+            authorization until QUIT, so be sure to get in, suck
+            the messages, and quit, each time you access the
+            mailbox.
+
+            POP is a line-based protocol, which means large mail
+            messages consume lots of python cycles reading them
+            line-by-line.
+
+            If it's available on your mail server, use IMAP4
+            instead, it doesn't suffer from the two problems
+            above.
+    """
+
+
+    def __init__(self, host, port = POP3_PORT):
+        self.host = host
+        self.port = port
+        msg = "getaddrinfo returns an empty list"
+        self.sock = None
+        for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                self.sock = socket.socket(af, socktype, proto)
+                self.sock.connect(sa)
+            except socket.error, msg:
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error, msg
+        self.file = self.sock.makefile('rb')
+        self._debugging = 0
+        self.welcome = self._getresp()
+
+
+    def _putline(self, line):
+        if self._debugging > 1: print '*put*', repr(line)
+        self.sock.sendall('%s%s' % (line, CRLF))
+
+
+    # Internal: send one command to the server (through _putline())
+
+    def _putcmd(self, line):
+        if self._debugging: print '*cmd*', repr(line)
+        self._putline(line)
+
+
+    # Internal: return one line from the server, stripping CRLF.
+    # This is where all the CPU time of this module is consumed.
+    # Raise error_proto('-ERR EOF') if the connection is closed.
+
+    def _getline(self):
+        line = self.file.readline()
+        if self._debugging > 1: print '*get*', repr(line)
+        if not line: raise error_proto('-ERR EOF')
+        octets = len(line)
+        # server can send any combination of CR & LF
+        # however, 'readline()' returns lines ending in LF
+        # so only possibilities are ...LF, ...CRLF, CR...LF
+        if line[-2:] == CRLF:
+            return line[:-2], octets
+        if line[0] == CR:
+            return line[1:-1], octets
+        return line[:-1], octets
+
+
+    # Internal: get a response from the server.
+    # Raise 'error_proto' if the response doesn't start with '+'.
+
+    def _getresp(self):
+        resp, o = self._getline()
+        if self._debugging > 1: print '*resp*', repr(resp)
+        c = resp[:1]
+        if c != '+':
+            raise error_proto(resp)
+        return resp
+
+
+    # Internal: get a response plus following text from the server.
+
+    def _getlongresp(self):
+        resp = self._getresp()
+        list = []; octets = 0
+        line, o = self._getline()
+        while line != '.':
+            if line[:2] == '..':
+                o = o-1
+                line = line[1:]
+            octets = octets + o
+            list.append(line)
+            line, o = self._getline()
+        return resp, list, octets
+
+
+    # Internal: send a command and get the response
+
+    def _shortcmd(self, line):
+        self._putcmd(line)
+        return self._getresp()
+
+
+    # Internal: send a command and get the response plus following text
+
+    def _longcmd(self, line):
+        self._putcmd(line)
+        return self._getlongresp()
+
+
+    # These can be useful:
+
+    def getwelcome(self):
+        return self.welcome
+
+
+    def set_debuglevel(self, level):
+        self._debugging = level
+
+
+    # Here are all the POP commands:
+
+    def user(self, user):
+        """Send user name, return response
+
+        (should indicate password required).
+        """
+        return self._shortcmd('USER %s' % user)
+
+
+    def pass_(self, pswd):
+        """Send password, return response
+
+        (response includes message count, mailbox size).
+
+        NB: mailbox is locked by server from here to 'quit()'
+        """
+        return self._shortcmd('PASS %s' % pswd)
+
+
+    def stat(self):
+        """Get mailbox status.
+
+        Result is tuple of 2 ints (message count, mailbox size)
+        """
+        retval = self._shortcmd('STAT')
+        rets = retval.split()
+        if self._debugging: print '*stat*', repr(rets)
+        numMessages = int(rets[1])
+        sizeMessages = int(rets[2])
+        return (numMessages, sizeMessages)
+
+
+    def list(self, which=None):
+        """Request listing, return result.
+
+        Result without a message number argument is in form
+        ['response', ['mesg_num octets', ...]].
+
+        Result when a message number argument is given is a
+        single response: the "scan listing" for that message.
+        """
+        if which is not None:
+            return self._shortcmd('LIST %s' % which)
+        return self._longcmd('LIST')
+
+
+    def retr(self, which):
+        """Retrieve whole message number 'which'.
+
+        Result is in form ['response', ['line', ...], octets].
+        """
+        return self._longcmd('RETR %s' % which)
+
+
+    def dele(self, which):
+        """Delete message number 'which'.
+
+        Result is 'response'.
+        """
+        return self._shortcmd('DELE %s' % which)
+
+
+    def noop(self):
+        """Does nothing.
+
+        One supposes the response indicates the server is alive.
+        """
+        return self._shortcmd('NOOP')
+
+
+    def rset(self):
+        """Not sure what this does."""
+        return self._shortcmd('RSET')
+
+
+    def quit(self):
+        """Signoff: commit changes on server, unlock mailbox, close connection."""
+        try:
+            resp = self._shortcmd('QUIT')
+        except error_proto, val:
+            resp = val
+        self.file.close()
+        self.sock.close()
+        del self.file, self.sock
+        return resp
+
+    #__del__ = quit
+
+
+    # optional commands:
+
+    def rpop(self, user):
+        """Not sure what this does."""
+        return self._shortcmd('RPOP %s' % user)
+
+
+    timestamp = re.compile(r'\+OK.*(<[^>]+>)')
+
+    def apop(self, user, secret):
+        """Authorisation
+
+        - only possible if server has supplied a timestamp in initial greeting.
+
+        Args:
+                user    - mailbox user;
+                secret  - secret shared between client and server.
+
+        NB: mailbox is locked by server from here to 'quit()'
+        """
+        m = self.timestamp.match(self.welcome)
+        if not m:
+            raise error_proto('-ERR APOP not supported by server')
+        import md5
+        digest = md5.new(m.group(1)+secret).digest()
+        digest = ''.join(map(lambda x:'%02x'%ord(x), digest))
+        return self._shortcmd('APOP %s %s' % (user, digest))
+
+
+    def top(self, which, howmuch):
+        """Retrieve message header of message number 'which'
+        and first 'howmuch' lines of message body.
+
+        Result is in form ['response', ['line', ...], octets].
+        """
+        return self._longcmd('TOP %s %s' % (which, howmuch))
+
+
+    def uidl(self, which=None):
+        """Return message digest (unique id) list.
+
+        If 'which', result contains unique id for that message
+        in the form 'response mesgnum uid', otherwise result is
+        the list ['response', ['mesgnum uid', ...], octets]
+        """
+        if which is not None:
+            return self._shortcmd('UIDL %s' % which)
+        return self._longcmd('UIDL')
+
+class POP3_SSL(POP3):
+    """POP3 client class over SSL connection
+
+    Instantiate with: POP3_SSL(hostname, port=995, keyfile=None, certfile=None)
+
+           hostname - the hostname of the pop3 over ssl server
+           port - port number
+           keyfile - PEM formatted file that countains your private key
+           certfile - PEM formatted certificate chain file
+
+        See the methods of the parent class POP3 for more documentation.
+    """
+
+    def __init__(self, host, port = POP3_SSL_PORT, keyfile = None, certfile = None):
+        self.host = host
+        self.port = port
+        self.keyfile = keyfile
+        self.certfile = certfile
+        self.buffer = ""
+        msg = "getaddrinfo returns an empty list"
+        self.sock = None
+        for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                self.sock = socket.socket(af, socktype, proto)
+                self.sock.connect(sa)
+            except socket.error, msg:
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error, msg
+        self.file = self.sock.makefile('rb')
+        self.sslobj = socket.ssl(self.sock, self.keyfile, self.certfile)
+        self._debugging = 0
+        self.welcome = self._getresp()
+
+    def _fillBuffer(self):
+        localbuf = self.sslobj.read()
+        if len(localbuf) == 0:
+            raise error_proto('-ERR EOF')
+        self.buffer += localbuf
+
+    def _getline(self):
+        line = ""
+        renewline = re.compile(r'.*?\n')
+        match = renewline.match(self.buffer)
+        while not match:
+            self._fillBuffer()
+            match = renewline.match(self.buffer)
+        line = match.group(0)
+        self.buffer = renewline.sub('' ,self.buffer, 1)
+        if self._debugging > 1: print '*get*', repr(line)
+
+        octets = len(line)
+        if line[-2:] == CRLF:
+            return line[:-2], octets
+        if line[0] == CR:
+            return line[1:-1], octets
+        return line[:-1], octets
+
+    def _putline(self, line):
+        if self._debugging > 1: print '*put*', repr(line)
+        line += CRLF
+        bytes = len(line)
+        while bytes > 0:
+            sent = self.sslobj.write(line)
+            if sent == bytes:
+                break    # avoid copy
+            line = line[sent:]
+            bytes = bytes - sent
+
+    def quit(self):
+        """Signoff: commit changes on server, unlock mailbox, close connection."""
+        try:
+            resp = self._shortcmd('QUIT')
+        except error_proto, val:
+            resp = val
+        self.sock.close()
+        del self.sslobj, self.sock
+        return resp
+
+
+if __name__ == "__main__":
+    import sys
+    a = POP3(sys.argv[1])
+    print a.getwelcome()
+    a.user(sys.argv[2])
+    a.pass_(sys.argv[3])
+    a.list()
+    (numMsgs, totalSize) = a.stat()
+    for i in range(1, numMsgs + 1):
+        (header, msg, octets) = a.retr(i)
+        print "Message %d:" % i
+        for line in msg:
+            print '   ' + line
+        print '-----------------------'
+    a.quit()
diff --git a/depot_tools/release/win/python_24/Lib/posixfile.py b/depot_tools/release/win/python_24/Lib/posixfile.py
new file mode 100644
index 0000000..5a7ed061
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/posixfile.py
@@ -0,0 +1,240 @@
+"""Extended file operations available in POSIX.
+
+f = posixfile.open(filename, [mode, [bufsize]])
+      will create a new posixfile object
+
+f = posixfile.fileopen(fileobject)
+      will create a posixfile object from a builtin file object
+
+f.file()
+      will return the original builtin file object
+
+f.dup()
+      will return a new file object based on a new filedescriptor
+
+f.dup2(fd)
+      will return a new file object based on the given filedescriptor
+
+f.flags(mode)
+      will turn on the associated flag (merge)
+      mode can contain the following characters:
+
+  (character representing a flag)
+      a       append only flag
+      c       close on exec flag
+      n       no delay flag
+      s       synchronization flag
+  (modifiers)
+      !       turn flags 'off' instead of default 'on'
+      =       copy flags 'as is' instead of default 'merge'
+      ?       return a string in which the characters represent the flags
+              that are set
+
+      note: - the '!' and '=' modifiers are mutually exclusive.
+            - the '?' modifier will return the status of the flags after they
+              have been changed by other characters in the mode string
+
+f.lock(mode [, len [, start [, whence]]])
+      will (un)lock a region
+      mode can contain the following characters:
+
+  (character representing type of lock)
+      u       unlock
+      r       read lock
+      w       write lock
+  (modifiers)
+      |       wait until the lock can be granted
+      ?       return the first lock conflicting with the requested lock
+              or 'None' if there is no conflict. The lock returned is in the
+              format (mode, len, start, whence, pid) where mode is a
+              character representing the type of lock ('r' or 'w')
+
+      note: - the '?' modifier prevents a region from being locked; it is
+              query only
+"""
+
+import warnings
+warnings.warn(
+    "The posixfile module is obsolete and will disappear in the future",
+    DeprecationWarning)
+del warnings
+
+
+class _posixfile_:
+    """File wrapper class that provides extra POSIX file routines."""
+
+    states = ['open', 'closed']
+
+    #
+    # Internal routines
+    #
+    def __repr__(self):
+        file = self._file_
+        return "<%s posixfile '%s', mode '%s' at %s>" % \
+                (self.states[file.closed], file.name, file.mode, \
+                 hex(id(self))[2:])
+
+    #
+    # Initialization routines
+    #
+    def open(self, name, mode='r', bufsize=-1):
+        import __builtin__
+        return self.fileopen(__builtin__.open(name, mode, bufsize))
+
+    def fileopen(self, file):
+        import types
+        if repr(type(file)) != "<type 'file'>":
+            raise TypeError, 'posixfile.fileopen() arg must be file object'
+        self._file_  = file
+        # Copy basic file methods
+        for maybemethod in dir(file):
+            if not maybemethod.startswith('_'):
+                attr = getattr(file, maybemethod)
+                if isinstance(attr, types.BuiltinMethodType):
+                    setattr(self, maybemethod, attr)
+        return self
+
+    #
+    # New methods
+    #
+    def file(self):
+        return self._file_
+
+    def dup(self):
+        import posix
+
+        if not hasattr(posix, 'fdopen'):
+            raise AttributeError, 'dup() method unavailable'
+
+        return posix.fdopen(posix.dup(self._file_.fileno()), self._file_.mode)
+
+    def dup2(self, fd):
+        import posix
+
+        if not hasattr(posix, 'fdopen'):
+            raise AttributeError, 'dup() method unavailable'
+
+        posix.dup2(self._file_.fileno(), fd)
+        return posix.fdopen(fd, self._file_.mode)
+
+    def flags(self, *which):
+        import fcntl, os
+
+        if which:
+            if len(which) > 1:
+                raise TypeError, 'Too many arguments'
+            which = which[0]
+        else: which = '?'
+
+        l_flags = 0
+        if 'n' in which: l_flags = l_flags | os.O_NDELAY
+        if 'a' in which: l_flags = l_flags | os.O_APPEND
+        if 's' in which: l_flags = l_flags | os.O_SYNC
+
+        file = self._file_
+
+        if '=' not in which:
+            cur_fl = fcntl.fcntl(file.fileno(), fcntl.F_GETFL, 0)
+            if '!' in which: l_flags = cur_fl & ~ l_flags
+            else: l_flags = cur_fl | l_flags
+
+        l_flags = fcntl.fcntl(file.fileno(), fcntl.F_SETFL, l_flags)
+
+        if 'c' in which:
+            arg = ('!' not in which)    # 0 is don't, 1 is do close on exec
+            l_flags = fcntl.fcntl(file.fileno(), fcntl.F_SETFD, arg)
+
+        if '?' in which:
+            which = ''                  # Return current flags
+            l_flags = fcntl.fcntl(file.fileno(), fcntl.F_GETFL, 0)
+            if os.O_APPEND & l_flags: which = which + 'a'
+            if fcntl.fcntl(file.fileno(), fcntl.F_GETFD, 0) & 1:
+                which = which + 'c'
+            if os.O_NDELAY & l_flags: which = which + 'n'
+            if os.O_SYNC & l_flags: which = which + 's'
+            return which
+
+    def lock(self, how, *args):
+        import struct, fcntl
+
+        if 'w' in how: l_type = fcntl.F_WRLCK
+        elif 'r' in how: l_type = fcntl.F_RDLCK
+        elif 'u' in how: l_type = fcntl.F_UNLCK
+        else: raise TypeError, 'no type of lock specified'
+
+        if '|' in how: cmd = fcntl.F_SETLKW
+        elif '?' in how: cmd = fcntl.F_GETLK
+        else: cmd = fcntl.F_SETLK
+
+        l_whence = 0
+        l_start = 0
+        l_len = 0
+
+        if len(args) == 1:
+            l_len = args[0]
+        elif len(args) == 2:
+            l_len, l_start = args
+        elif len(args) == 3:
+            l_len, l_start, l_whence = args
+        elif len(args) > 3:
+            raise TypeError, 'too many arguments'
+
+        # Hack by davem@magnet.com to get locking to go on freebsd;
+        # additions for AIX by Vladimir.Marangozov@imag.fr
+        import sys, os
+        if sys.platform in ('netbsd1',
+                            'openbsd2',
+                            'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5',
+                            'freebsd6', 'bsdos2', 'bsdos3', 'bsdos4'):
+            flock = struct.pack('lxxxxlxxxxlhh', \
+                  l_start, l_len, os.getpid(), l_type, l_whence)
+        elif sys.platform in ['aix3', 'aix4']:
+            flock = struct.pack('hhlllii', \
+                  l_type, l_whence, l_start, l_len, 0, 0, 0)
+        else:
+            flock = struct.pack('hhllhh', \
+                  l_type, l_whence, l_start, l_len, 0, 0)
+
+        flock = fcntl.fcntl(self._file_.fileno(), cmd, flock)
+
+        if '?' in how:
+            if sys.platform in ('netbsd1',
+                                'openbsd2',
+                                'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5',
+                                'bsdos2', 'bsdos3', 'bsdos4'):
+                l_start, l_len, l_pid, l_type, l_whence = \
+                    struct.unpack('lxxxxlxxxxlhh', flock)
+            elif sys.platform in ['aix3', 'aix4']:
+                l_type, l_whence, l_start, l_len, l_sysid, l_pid, l_vfs = \
+                    struct.unpack('hhlllii', flock)
+            elif sys.platform == "linux2":
+                l_type, l_whence, l_start, l_len, l_pid, l_sysid = \
+                    struct.unpack('hhllhh', flock)
+            else:
+                l_type, l_whence, l_start, l_len, l_sysid, l_pid = \
+                    struct.unpack('hhllhh', flock)
+
+            if l_type != fcntl.F_UNLCK:
+                if l_type == fcntl.F_RDLCK:
+                    return 'r', l_len, l_start, l_whence, l_pid
+                else:
+                    return 'w', l_len, l_start, l_whence, l_pid
+
+def open(name, mode='r', bufsize=-1):
+    """Public routine to open a file as a posixfile object."""
+    return _posixfile_().open(name, mode, bufsize)
+
+def fileopen(file):
+    """Public routine to get a posixfile object from a Python file object."""
+    return _posixfile_().fileopen(file)
+
+#
+# Constants
+#
+SEEK_SET = 0
+SEEK_CUR = 1
+SEEK_END = 2
+
+#
+# End of posixfile.py
+#
diff --git a/depot_tools/release/win/python_24/Lib/posixpath.py b/depot_tools/release/win/python_24/Lib/posixpath.py
new file mode 100644
index 0000000..b29eedc3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/posixpath.py
@@ -0,0 +1,454 @@
+"""Common operations on Posix pathnames.
+
+Instead of importing this module directly, import os and refer to
+this module as os.path.  The "os.path" name is an alias for this
+module on Posix systems; on other systems (e.g. Mac, Windows),
+os.path provides the same operations in a manner specific to that
+platform, and is an alias to another module (e.g. macpath, ntpath).
+
+Some of this can actually be useful on non-Posix systems too, e.g.
+for manipulation of the pathname component of URLs.
+"""
+
+import os
+import stat
+
+__all__ = ["normcase","isabs","join","splitdrive","split","splitext",
+           "basename","dirname","commonprefix","getsize","getmtime",
+           "getatime","getctime","islink","exists","isdir","isfile","ismount",
+           "walk","expanduser","expandvars","normpath","abspath",
+           "samefile","sameopenfile","samestat",
+           "curdir","pardir","sep","pathsep","defpath","altsep","extsep",
+           "devnull","realpath","supports_unicode_filenames"]
+
+# strings representing various path-related bits and pieces
+curdir = '.'
+pardir = '..'
+extsep = '.'
+sep = '/'
+pathsep = ':'
+defpath = ':/bin:/usr/bin'
+altsep = None
+devnull = '/dev/null'
+
+# Normalize the case of a pathname.  Trivial in Posix, string.lower on Mac.
+# On MS-DOS this may also turn slashes into backslashes; however, other
+# normalizations (such as optimizing '../' away) are not allowed
+# (another function should be defined to do that).
+
+def normcase(s):
+    """Normalize case of pathname.  Has no effect under Posix"""
+    return s
+
+
+# Return whether a path is absolute.
+# Trivial in Posix, harder on the Mac or MS-DOS.
+
+def isabs(s):
+    """Test whether a path is absolute"""
+    return s.startswith('/')
+
+
+# Join pathnames.
+# Ignore the previous parts if a part is absolute.
+# Insert a '/' unless the first part is empty or already ends in '/'.
+
+def join(a, *p):
+    """Join two or more pathname components, inserting '/' as needed"""
+    path = a
+    for b in p:
+        if b.startswith('/'):
+            path = b
+        elif path == '' or path.endswith('/'):
+            path +=  b
+        else:
+            path += '/' + b
+    return path
+
+
+# Split a path in head (everything up to the last '/') and tail (the
+# rest).  If the path ends in '/', tail will be empty.  If there is no
+# '/' in the path, head  will be empty.
+# Trailing '/'es are stripped from head unless it is the root.
+
+def split(p):
+    """Split a pathname.  Returns tuple "(head, tail)" where "tail" is
+    everything after the final slash.  Either part may be empty."""
+    i = p.rfind('/') + 1
+    head, tail = p[:i], p[i:]
+    if head and head != '/'*len(head):
+        head = head.rstrip('/')
+    return head, tail
+
+
+# Split a path in root and extension.
+# The extension is everything starting at the last dot in the last
+# pathname component; the root is everything before that.
+# It is always true that root + ext == p.
+
+def splitext(p):
+    """Split the extension from a pathname.  Extension is everything from the
+    last dot to the end.  Returns "(root, ext)", either part may be empty."""
+    i = p.rfind('.')
+    if i<=p.rfind('/'):
+        return p, ''
+    else:
+        return p[:i], p[i:]
+
+
+# Split a pathname into a drive specification and the rest of the
+# path.  Useful on DOS/Windows/NT; on Unix, the drive is always empty.
+
+def splitdrive(p):
+    """Split a pathname into drive and path. On Posix, drive is always
+    empty."""
+    return '', p
+
+
+# Return the tail (basename) part of a path.
+
+def basename(p):
+    """Returns the final component of a pathname"""
+    return split(p)[1]
+
+
+# Return the head (dirname) part of a path.
+
+def dirname(p):
+    """Returns the directory component of a pathname"""
+    return split(p)[0]
+
+
+# Return the longest prefix of all list elements.
+
+def commonprefix(m):
+    "Given a list of pathnames, returns the longest common leading component"
+    if not m: return ''
+    s1 = min(m)
+    s2 = max(m)
+    n = min(len(s1), len(s2))
+    for i in xrange(n):
+        if s1[i] != s2[i]:
+            return s1[:i]
+    return s1[:n]
+
+# Get size, mtime, atime of files.
+
+def getsize(filename):
+    """Return the size of a file, reported by os.stat()."""
+    return os.stat(filename).st_size
+
+def getmtime(filename):
+    """Return the last modification time of a file, reported by os.stat()."""
+    return os.stat(filename).st_mtime
+
+def getatime(filename):
+    """Return the last access time of a file, reported by os.stat()."""
+    return os.stat(filename).st_atime
+
+def getctime(filename):
+    """Return the metadata change time of a file, reported by os.stat()."""
+    return os.stat(filename).st_ctime
+
+# Is a path a symbolic link?
+# This will always return false on systems where os.lstat doesn't exist.
+
+def islink(path):
+    """Test whether a path is a symbolic link"""
+    try:
+        st = os.lstat(path)
+    except (os.error, AttributeError):
+        return False
+    return stat.S_ISLNK(st.st_mode)
+
+
+# Does a path exist?
+# This is false for dangling symbolic links.
+
+def exists(path):
+    """Test whether a path exists.  Returns False for broken symbolic links"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return True
+
+
+# Being true for dangling symbolic links is also useful.
+
+def lexists(path):
+    """Test whether a path exists.  Returns True for broken symbolic links"""
+    try:
+        st = os.lstat(path)
+    except os.error:
+        return False
+    return True
+
+
+# Is a path a directory?
+# This follows symbolic links, so both islink() and isdir() can be true
+# for the same path.
+
+def isdir(path):
+    """Test whether a path is a directory"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return stat.S_ISDIR(st.st_mode)
+
+
+# Is a path a regular file?
+# This follows symbolic links, so both islink() and isfile() can be true
+# for the same path.
+
+def isfile(path):
+    """Test whether a path is a regular file"""
+    try:
+        st = os.stat(path)
+    except os.error:
+        return False
+    return stat.S_ISREG(st.st_mode)
+
+
+# Are two filenames really pointing to the same file?
+
+def samefile(f1, f2):
+    """Test whether two pathnames reference the same actual file"""
+    s1 = os.stat(f1)
+    s2 = os.stat(f2)
+    return samestat(s1, s2)
+
+
+# Are two open files really referencing the same file?
+# (Not necessarily the same file descriptor!)
+
+def sameopenfile(fp1, fp2):
+    """Test whether two open file objects reference the same file"""
+    s1 = os.fstat(fp1)
+    s2 = os.fstat(fp2)
+    return samestat(s1, s2)
+
+
+# Are two stat buffers (obtained from stat, fstat or lstat)
+# describing the same file?
+
+def samestat(s1, s2):
+    """Test whether two stat buffers reference the same file"""
+    return s1.st_ino == s2.st_ino and \
+           s1.st_dev == s2.st_dev
+
+
+# Is a path a mount point?
+# (Does this work for all UNIXes?  Is it even guaranteed to work by Posix?)
+
+def ismount(path):
+    """Test whether a path is a mount point"""
+    try:
+        s1 = os.stat(path)
+        s2 = os.stat(join(path, '..'))
+    except os.error:
+        return False # It doesn't exist -- so not a mount point :-)
+    dev1 = s1.st_dev
+    dev2 = s2.st_dev
+    if dev1 != dev2:
+        return True     # path/.. on a different device as path
+    ino1 = s1.st_ino
+    ino2 = s2.st_ino
+    if ino1 == ino2:
+        return True     # path/.. is the same i-node as path
+    return False
+
+
+# Directory tree walk.
+# For each directory under top (including top itself, but excluding
+# '.' and '..'), func(arg, dirname, filenames) is called, where
+# dirname is the name of the directory and filenames is the list
+# of files (and subdirectories etc.) in the directory.
+# The func may modify the filenames list, to implement a filter,
+# or to impose a different order of visiting.
+
+def walk(top, func, arg):
+    """Directory tree walk with callback function.
+
+    For each directory in the directory tree rooted at top (including top
+    itself, but excluding '.' and '..'), call func(arg, dirname, fnames).
+    dirname is the name of the directory, and fnames a list of the names of
+    the files and subdirectories in dirname (excluding '.' and '..').  func
+    may modify the fnames list in-place (e.g. via del or slice assignment),
+    and walk will only recurse into the subdirectories whose names remain in
+    fnames; this can be used to implement a filter, or to impose a specific
+    order of visiting.  No semantics are defined for, or required of, arg,
+    beyond that arg is always passed to func.  It can be used, e.g., to pass
+    a filename pattern, or a mutable object designed to accumulate
+    statistics.  Passing None for arg is common."""
+
+    try:
+        names = os.listdir(top)
+    except os.error:
+        return
+    func(arg, top, names)
+    for name in names:
+        name = join(top, name)
+        try:
+            st = os.lstat(name)
+        except os.error:
+            continue
+        if stat.S_ISDIR(st.st_mode):
+            walk(name, func, arg)
+
+
+# Expand paths beginning with '~' or '~user'.
+# '~' means $HOME; '~user' means that user's home directory.
+# If the path doesn't begin with '~', or if the user or $HOME is unknown,
+# the path is returned unchanged (leaving error reporting to whatever
+# function is called with the expanded path as argument).
+# See also module 'glob' for expansion of *, ? and [...] in pathnames.
+# (A function should also be defined to do full *sh-style environment
+# variable expansion.)
+
+def expanduser(path):
+    """Expand ~ and ~user constructions.  If user or $HOME is unknown,
+    do nothing."""
+    if not path.startswith('~'):
+        return path
+    i = path.find('/', 1)
+    if i < 0:
+        i = len(path)
+    if i == 1:
+        if 'HOME' not in os.environ:
+            import pwd
+            userhome = pwd.getpwuid(os.getuid()).pw_dir
+        else:
+            userhome = os.environ['HOME']
+    else:
+        import pwd
+        try:
+            pwent = pwd.getpwnam(path[1:i])
+        except KeyError:
+            return path
+        userhome = pwent.pw_dir
+    if userhome.endswith('/'):
+        i += 1
+    return userhome + path[i:]
+
+
+# Expand paths containing shell variable substitutions.
+# This expands the forms $variable and ${variable} only.
+# Non-existent variables are left unchanged.
+
+_varprog = None
+
+def expandvars(path):
+    """Expand shell variables of form $var and ${var}.  Unknown variables
+    are left unchanged."""
+    global _varprog
+    if '$' not in path:
+        return path
+    if not _varprog:
+        import re
+        _varprog = re.compile(r'\$(\w+|\{[^}]*\})')
+    i = 0
+    while True:
+        m = _varprog.search(path, i)
+        if not m:
+            break
+        i, j = m.span(0)
+        name = m.group(1)
+        if name.startswith('{') and name.endswith('}'):
+            name = name[1:-1]
+        if name in os.environ:
+            tail = path[j:]
+            path = path[:i] + os.environ[name]
+            i = len(path)
+            path += tail
+        else:
+            i = j
+    return path
+
+
+# Normalize a path, e.g. A//B, A/./B and A/foo/../B all become A/B.
+# It should be understood that this may change the meaning of the path
+# if it contains symbolic links!
+
+def normpath(path):
+    """Normalize path, eliminating double slashes, etc."""
+    if path == '':
+        return '.'
+    initial_slashes = path.startswith('/')
+    # POSIX allows one or two initial slashes, but treats three or more
+    # as single slash.
+    if (initial_slashes and
+        path.startswith('//') and not path.startswith('///')):
+        initial_slashes = 2
+    comps = path.split('/')
+    new_comps = []
+    for comp in comps:
+        if comp in ('', '.'):
+            continue
+        if (comp != '..' or (not initial_slashes and not new_comps) or
+             (new_comps and new_comps[-1] == '..')):
+            new_comps.append(comp)
+        elif new_comps:
+            new_comps.pop()
+    comps = new_comps
+    path = '/'.join(comps)
+    if initial_slashes:
+        path = '/'*initial_slashes + path
+    return path or '.'
+
+
+def abspath(path):
+    """Return an absolute path."""
+    if not isabs(path):
+        path = join(os.getcwd(), path)
+    return normpath(path)
+
+
+# Return a canonical path (i.e. the absolute location of a file on the
+# filesystem).
+
+def realpath(filename):
+    """Return the canonical path of the specified filename, eliminating any
+symbolic links encountered in the path."""
+    if isabs(filename):
+        bits = ['/'] + filename.split('/')[1:]
+    else:
+        bits = filename.split('/')
+
+    for i in range(2, len(bits)+1):
+        component = join(*bits[0:i])
+        # Resolve symbolic links.
+        if islink(component):
+            resolved = _resolve_link(component)
+            if resolved is None:
+                # Infinite loop -- return original component + rest of the path
+                return abspath(join(*([component] + bits[i:])))
+            else:
+                newpath = join(*([resolved] + bits[i:]))
+                return realpath(newpath)
+
+    return abspath(filename)
+
+
+def _resolve_link(path):
+    """Internal helper function.  Takes a path and follows symlinks
+    until we either arrive at something that isn't a symlink, or
+    encounter a path we've seen before (meaning that there's a loop).
+    """
+    paths_seen = []
+    while islink(path):
+        if path in paths_seen:
+            # Already seen this path, so we must have a symlink loop
+            return None
+        paths_seen.append(path)
+        # Resolve where the link points to
+        resolved = os.readlink(path)
+        if not isabs(resolved):
+            dir = dirname(path)
+            path = normpath(join(dir, resolved))
+        else:
+            path = normpath(resolved)
+    return path
+
+supports_unicode_filenames = False
diff --git a/depot_tools/release/win/python_24/Lib/pprint.py b/depot_tools/release/win/python_24/Lib/pprint.py
new file mode 100644
index 0000000..da6ab1a9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pprint.py
@@ -0,0 +1,314 @@
+#  Author:      Fred L. Drake, Jr.
+#               fdrake@acm.org
+#
+#  This is a simple little module I wrote to make life easier.  I didn't
+#  see anything quite like it in the library, though I may have overlooked
+#  something.  I wrote this when I was trying to read some heavily nested
+#  tuples with fairly non-descriptive content.  This is modeled very much
+#  after Lisp/Scheme - style pretty-printing of lists.  If you find it
+#  useful, thank small children who sleep at night.
+
+"""Support to pretty-print lists, tuples, & dictionaries recursively.
+
+Very simple, but useful, especially in debugging data structures.
+
+Classes
+-------
+
+PrettyPrinter()
+    Handle pretty-printing operations onto a stream using a configured
+    set of formatting parameters.
+
+Functions
+---------
+
+pformat()
+    Format a Python object into a pretty-printed representation.
+
+pprint()
+    Pretty-print a Python object to a stream [default is sys.stdout].
+
+saferepr()
+    Generate a 'standard' repr()-like value, but protect against recursive
+    data structures.
+
+"""
+
+import sys as _sys
+
+from cStringIO import StringIO as _StringIO
+
+__all__ = ["pprint","pformat","isreadable","isrecursive","saferepr",
+           "PrettyPrinter"]
+
+# cache these for faster access:
+_commajoin = ", ".join
+_id = id
+_len = len
+_type = type
+
+
+def pprint(object, stream=None, indent=1, width=80, depth=None):
+    """Pretty-print a Python object to a stream [default is sys.stdout]."""
+    printer = PrettyPrinter(
+        stream=stream, indent=indent, width=width, depth=depth)
+    printer.pprint(object)
+
+def pformat(object, indent=1, width=80, depth=None):
+    """Format a Python object into a pretty-printed representation."""
+    return PrettyPrinter(indent=indent, width=width, depth=depth).pformat(object)
+
+def saferepr(object):
+    """Version of repr() which can handle recursive data structures."""
+    return _safe_repr(object, {}, None, 0)[0]
+
+def isreadable(object):
+    """Determine if saferepr(object) is readable by eval()."""
+    return _safe_repr(object, {}, None, 0)[1]
+
+def isrecursive(object):
+    """Determine if object requires a recursive representation."""
+    return _safe_repr(object, {}, None, 0)[2]
+
+class PrettyPrinter:
+    def __init__(self, indent=1, width=80, depth=None, stream=None):
+        """Handle pretty printing operations onto a stream using a set of
+        configured parameters.
+
+        indent
+            Number of spaces to indent for each level of nesting.
+
+        width
+            Attempted maximum number of columns in the output.
+
+        depth
+            The maximum depth to print out nested structures.
+
+        stream
+            The desired output stream.  If omitted (or false), the standard
+            output stream available at construction will be used.
+
+        """
+        indent = int(indent)
+        width = int(width)
+        assert indent >= 0, "indent must be >= 0"
+        assert depth is None or depth > 0, "depth must be > 0"
+        assert width, "width must be != 0"
+        self._depth = depth
+        self._indent_per_level = indent
+        self._width = width
+        if stream is not None:
+            self._stream = stream
+        else:
+            self._stream = _sys.stdout
+
+    def pprint(self, object):
+        self._stream.write(self.pformat(object) + "\n")
+
+    def pformat(self, object):
+        sio = _StringIO()
+        self._format(object, sio, 0, 0, {}, 0)
+        return sio.getvalue()
+
+    def isrecursive(self, object):
+        return self.format(object, {}, 0, 0)[2]
+
+    def isreadable(self, object):
+        s, readable, recursive = self.format(object, {}, 0, 0)
+        return readable and not recursive
+
+    def _format(self, object, stream, indent, allowance, context, level):
+        level = level + 1
+        objid = _id(object)
+        if objid in context:
+            stream.write(_recursion(object))
+            self._recursive = True
+            self._readable = False
+            return
+        rep = self._repr(object, context, level - 1)
+        typ = _type(object)
+        sepLines = _len(rep) > (self._width - 1 - indent - allowance)
+        write = stream.write
+
+        if sepLines:
+            r = getattr(typ, "__repr__", None)
+            if issubclass(typ, dict) and r is dict.__repr__:
+                write('{')
+                if self._indent_per_level > 1:
+                    write((self._indent_per_level - 1) * ' ')
+                length = _len(object)
+                if length:
+                    context[objid] = 1
+                    indent = indent + self._indent_per_level
+                    items  = object.items()
+                    items.sort()
+                    key, ent = items[0]
+                    rep = self._repr(key, context, level)
+                    write(rep)
+                    write(': ')
+                    self._format(ent, stream, indent + _len(rep) + 2,
+                                  allowance + 1, context, level)
+                    if length > 1:
+                        for key, ent in items[1:]:
+                            rep = self._repr(key, context, level)
+                            write(',\n%s%s: ' % (' '*indent, rep))
+                            self._format(ent, stream, indent + _len(rep) + 2,
+                                          allowance + 1, context, level)
+                    indent = indent - self._indent_per_level
+                    del context[objid]
+                write('}')
+                return
+
+            if (issubclass(typ, list) and r is list.__repr__) or \
+               (issubclass(typ, tuple) and r is tuple.__repr__):
+                if issubclass(typ, list):
+                    write('[')
+                    endchar = ']'
+                else:
+                    write('(')
+                    endchar = ')'
+                if self._indent_per_level > 1:
+                    write((self._indent_per_level - 1) * ' ')
+                length = _len(object)
+                if length:
+                    context[objid] = 1
+                    indent = indent + self._indent_per_level
+                    self._format(object[0], stream, indent, allowance + 1,
+                                 context, level)
+                    if length > 1:
+                        for ent in object[1:]:
+                            write(',\n' + ' '*indent)
+                            self._format(ent, stream, indent,
+                                          allowance + 1, context, level)
+                    indent = indent - self._indent_per_level
+                    del context[objid]
+                if issubclass(typ, tuple) and length == 1:
+                    write(',')
+                write(endchar)
+                return
+
+        write(rep)
+
+    def _repr(self, object, context, level):
+        repr, readable, recursive = self.format(object, context.copy(),
+                                                self._depth, level)
+        if not readable:
+            self._readable = False
+        if recursive:
+            self._recursive = True
+        return repr
+
+    def format(self, object, context, maxlevels, level):
+        """Format object for a specific context, returning a string
+        and flags indicating whether the representation is 'readable'
+        and whether the object represents a recursive construct.
+        """
+        return _safe_repr(object, context, maxlevels, level)
+
+
+# Return triple (repr_string, isreadable, isrecursive).
+
+def _safe_repr(object, context, maxlevels, level):
+    typ = _type(object)
+    if typ is str:
+        if 'locale' not in _sys.modules:
+            return repr(object), True, False
+        if "'" in object and '"' not in object:
+            closure = '"'
+            quotes = {'"': '\\"'}
+        else:
+            closure = "'"
+            quotes = {"'": "\\'"}
+        qget = quotes.get
+        sio = _StringIO()
+        write = sio.write
+        for char in object:
+            if char.isalpha():
+                write(char)
+            else:
+                write(qget(char, repr(char)[1:-1]))
+        return ("%s%s%s" % (closure, sio.getvalue(), closure)), True, False
+
+    r = getattr(typ, "__repr__", None)
+    if issubclass(typ, dict) and r is dict.__repr__:
+        if not object:
+            return "{}", True, False
+        objid = _id(object)
+        if maxlevels and level > maxlevels:
+            return "{...}", False, objid in context
+        if objid in context:
+            return _recursion(object), False, True
+        context[objid] = 1
+        readable = True
+        recursive = False
+        components = []
+        append = components.append
+        level += 1
+        saferepr = _safe_repr
+        for k, v in object.iteritems():
+            krepr, kreadable, krecur = saferepr(k, context, maxlevels, level)
+            vrepr, vreadable, vrecur = saferepr(v, context, maxlevels, level)
+            append("%s: %s" % (krepr, vrepr))
+            readable = readable and kreadable and vreadable
+            if krecur or vrecur:
+                recursive = True
+        del context[objid]
+        return "{%s}" % _commajoin(components), readable, recursive
+
+    if (issubclass(typ, list) and r is list.__repr__) or \
+       (issubclass(typ, tuple) and r is tuple.__repr__):
+        if issubclass(typ, list):
+            if not object:
+                return "[]", True, False
+            format = "[%s]"
+        elif _len(object) == 1:
+            format = "(%s,)"
+        else:
+            if not object:
+                return "()", True, False
+            format = "(%s)"
+        objid = _id(object)
+        if maxlevels and level > maxlevels:
+            return format % "...", False, objid in context
+        if objid in context:
+            return _recursion(object), False, True
+        context[objid] = 1
+        readable = True
+        recursive = False
+        components = []
+        append = components.append
+        level += 1
+        for o in object:
+            orepr, oreadable, orecur = _safe_repr(o, context, maxlevels, level)
+            append(orepr)
+            if not oreadable:
+                readable = False
+            if orecur:
+                recursive = True
+        del context[objid]
+        return format % _commajoin(components), readable, recursive
+
+    rep = repr(object)
+    return rep, (rep and not rep.startswith('<')), False
+
+
+def _recursion(object):
+    return ("<Recursion on %s with id=%s>"
+            % (_type(object).__name__, _id(object)))
+
+
+def _perfcheck(object=None):
+    import time
+    if object is None:
+        object = [("string", (1, 2), [3, 4], {5: 6, 7: 8})] * 100000
+    p = PrettyPrinter()
+    t1 = time.time()
+    _safe_repr(object, {}, None, 0)
+    t2 = time.time()
+    p.pformat(object)
+    t3 = time.time()
+    print "_safe_repr:", t2 - t1
+    print "pformat:", t3 - t2
+
+if __name__ == "__main__":
+    _perfcheck()
diff --git a/depot_tools/release/win/python_24/Lib/profile.doc b/depot_tools/release/win/python_24/Lib/profile.doc
new file mode 100644
index 0000000..8724484
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/profile.doc
@@ -0,0 +1,702 @@
+profile.doc                     last updated 6/23/94 [by Guido]
+
+ PROFILER DOCUMENTATION and (mini) USER'S MANUAL
+
+Copyright 1994, by InfoSeek Corporation, all rights reserved.
+Written by James Roskind
+
+Permission to use, copy, modify, and distribute this Python software
+and its associated documentation for any purpose (subject to the
+restriction in the following sentence) without fee is hereby granted,
+provided that the above copyright notice appears in all copies, and
+that both that copyright notice and this permission notice appear in
+supporting documentation, and that the name of InfoSeek not be used in
+advertising or publicity pertaining to distribution of the software
+without specific, written prior permission.  This permission is
+explicitly restricted to the copying and modification of the software
+to remain in Python, compiled Python, or other languages (such as C)
+wherein the modified or derived code is exclusively imported into a
+Python module.
+
+INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
+SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY
+SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
+RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+The profiler was written after only programming in Python for 3 weeks.
+As a result, it is probably clumsy code, but I don't know for sure yet
+'cause I'm a beginner :-).  I did work hard to make the code run fast,
+so that profiling would be a reasonable thing to do.  I tried not to
+repeat code fragments, but I'm sure I did some stuff in really awkward
+ways at times.  Please send suggestions for improvements to:
+jar@infoseek.com.  I won't promise *any* support.  ...but I'd
+appreciate the feedback.
+
+
+SECTION HEADING LIST:
+  INTRODUCTION
+  HOW IS THIS profile DIFFERENT FROM THE OLD profile MODULE?
+  INSTANT USERS MANUAL
+  WHAT IS DETERMINISTIC PROFILING?
+  REFERENCE MANUAL			  
+    FUNCTION	profile.run(string, filename_opt)
+    CLASS	Stats(filename, ...)
+    METHOD	strip_dirs()
+    METHOD	add(filename, ...)
+    METHOD	sort_stats(key, ...)
+    METHOD	reverse_order()
+    METHOD	print_stats(restriction, ...)
+    METHOD	print_callers(restrictions, ...)
+    METHOD	print_callees(restrictions, ...)
+    METHOD	ignore()
+  LIMITATIONS
+  CALIBRATION
+  EXTENSIONS: Deriving Better Profilers
+
+
+
+INTRODUCTION
+
+A "profiler" is a program that describes the run time performance of a
+program, providing a variety of statistics.  This documentation
+describes the profiler functionality provided in the modules
+"profile" and "pstats."  This profiler provides "deterministic
+profiling" of any Python programs.  It also provides a series of
+report generation tools to allow users to rapidly examine the results
+of a profile operation.
+
+
+HOW IS THIS profile DIFFERENT FROM THE OLD profile MODULE?
+
+The big changes from standard profiling module are that you get more
+information, and you pay less CPU time.  It's not a trade-off, it's a
+trade-up.
+
+To be specific:
+
+ bugs removed: local stack frame is no longer molested, execution time
+      is now charged to correct functions, ....
+
+ accuracy increased: profiler execution time is no longer charged to
+      user's code, calibration for platform is supported, file reads
+      are not done *by* profiler *during* profiling (and charged to
+      user's code!), ...
+
+ speed increased: Overhead CPU cost was reduced by more than a factor of
+      two (perhaps a factor of five), lightweight profiler module is
+      all that must be loaded, and the report generating module
+      (pstats) is not needed during profiling. 
+
+ recursive functions support: cumulative times in recursive functions
+      are correctly calculated; recursive entries are counted; ...
+
+ large growth in report generating UI: distinct profiles runs can be added
+       together forming a comprehensive report; functions that import
+       statistics take arbitrary lists of files; sorting criteria is now
+       based on keywords (instead of 4 integer options); reports shows
+       what functions were profiled as well as what profile file was
+       referenced; output format has been improved, ...
+
+
+INSTANT USERS MANUAL
+
+This section is provided for users that "don't want to read the
+manual." It provides a very brief overview, and allows a user to
+rapidly perform profiling on an existing application.
+
+To profile an application with a main entry point of "foo()", you
+would add the following to your module:
+
+	import profile
+	profile.run("foo()")
+
+The above action would cause "foo()" to be run, and a series of
+informative lines (the profile) to be printed.  The above approach is
+most useful when working with the interpreter.  If you would like to
+save the results of a profile into a file for later examination, you
+can supply a file name as the second argument to the run() function:
+
+	import profile
+	profile.run("foo()", 'fooprof')
+
+When you wish to review the profile, you should use the methods in the
+pstats module.  Typically you would load the statistics data as
+follows:
+
+	import pstats
+	p = pstats.Stats('fooprof')
+
+The class "Stats" (the above code just created an instance of this
+class) has a variety of methods for manipulating and printing the data
+that was just read into "p".  When you ran profile.run() above, what
+was printed was the result of three method calls:
+
+	p.strip_dirs().sort_stats(-1).print_stats()
+
+The first method removed the extraneous path from all the module
+names. The second method sorted all the entries according to the
+standard module/line/name string that is printed (this is to comply
+with the semantics of the old profiler).  The third method printed out
+all the statistics.  You might try the following sort calls:
+
+	p.sort_stats('name')
+	p.print_stats()
+
+The first call will actually sort the list by function name, and the
+second call will print out the statistics.  The following are some
+interesting calls to experiment with:
+
+	p.sort_stats('cumulative').print_stats(10)
+
+This sorts the profile by cumulative time in a function, and then only
+prints the ten most significant lines.  If you want to understand what
+algorithms are taking time, the above line is what you would use.
+
+If you were looking to see what functions were looping a lot, and
+taking a lot of time, you would do:
+
+	p.sort_stats('time').print_stats(10)
+
+to sort according to time spent within each function, and then print
+the statistics for the top ten functions.
+
+You might also try:
+
+	p.sort_stats('file').print_stats('__init__')
+
+This will sort all the statistics by file name, and then print out
+statistics for only the class init methods ('cause they are spelled
+with "__init__" in them).  As one final example, you could try:
+
+	p.sort_stats('time', 'cum').print_stats(.5, 'init')
+
+This line sorts stats with a primary key of time, and a secondary key
+of cumulative time, and then prints out some of the statistics.  To be
+specific, the list is first culled down to 50% (re: .5) of its
+original size, then only lines containing "init" are maintained, and
+that sub-sub-list is printed.
+
+If you wondered what functions called the above functions, you could
+now (p is still sorted according to the last criteria) do:
+
+	p.print_callers(.5, 'init')
+
+and you would get a list of callers for each of the listed functions. 
+
+If you want more functionality, you're going to have to read the
+manual (or guess) what the following functions do:
+
+	p.print_callees()
+	p.add('fooprof')
+
+
+WHAT IS DETERMINISTIC PROFILING?
+
+"Deterministic profiling" is meant to reflect the fact that all
+"function call", "function return", and "exception" events are
+monitored, and precise timings are made for the intervals between
+these events (during which time the user's code is executing).  In
+contrast, "statistical profiling" (which is not done by this module)
+randomly samples the effective instruction pointer, and deduces where
+time is being spent.  The latter technique traditionally involves less
+overhead (as the code does not need to be instrumented), but provides
+only relative indications of where time is being spent.
+
+In Python, since there is an interpreter active during execution, the
+presence of instrumented code is not required to do deterministic
+profiling. Python automatically provides a hook (optional callback)
+for each event.  In addition, the interpreted nature of Python tends
+to add so much overhead to execution, that deterministic profiling
+tends to only add small processing overhead, in typical applications.
+The result is that deterministic profiling is not that expensive, but
+yet provides extensive run time statistics about the execution of a
+Python program.  
+
+Call count statistics can be used to identify bugs in code (surprising
+counts), and to identify possible inline-expansion points (high call
+counts).  Internal time statistics can be used to identify hot loops
+that should be carefully optimized.  Cumulative time statistics should
+be used to identify high level errors in the selection of algorithms.
+Note that the unusual handling of cumulative times in this profiler
+allows statistics for recursive implementations of algorithms to be
+directly compared to iterative implementations.
+
+
+REFERENCE MANUAL			  
+
+The primary entry point for the profiler is the global function
+profile.run().  It is typically used to create any profile
+information.  The reports are formatted and printed using methods for
+the class pstats.Stats.  The following is a description of all of
+these standard entry points and functions.  For a more in-depth view
+of some of the code, consider reading the later section on "Profiler
+Extensions," which includes discussion of how to derive "better"
+profilers from the classes presented, or reading the source code for
+these modules.
+
+
+FUNCTION	profile.run(string, filename_opt)
+
+This function takes a single argument that has can be passed to the
+"exec" statement, and an optional file name.  In all cases this
+routine attempts to "exec" its first argument, and gather profiling
+statistics from the execution. If no file name is present, then this
+function automatically prints a simple profiling report, sorted by the
+standard name string (file/line/function-name) that is presented in
+each line.  The following is a typical output from such a call:
+
+cut here----
+
+         main()
+         2706 function calls (2004 primitive calls) in 4.504 CPU seconds
+
+   Ordered by: standard name
+
+   ncalls  tottime  percall  cumtime  percall filename:lineno(function)
+        2    0.006    0.003    0.953    0.477 pobject.py:75(save_objects)
+     43/3    0.533    0.012    0.749    0.250 pobject.py:99(evaluate)
+	...
+
+cut here----
+
+The first line indicates that this profile was generated by the call:
+profile.run('main()'), and hence the exec'ed string is 'main()'.  The
+second line indicates that 2706 calls were monitored.  Of those calls,
+2004 were "primitive."  We define "primitive" to mean that the call
+was not induced via recursion.  The next line: "Ordered by: standard
+name", indicates that the text string in the far right column was used
+to sort the output.  The column headings include:
+
+	"ncalls" for the number of calls, 
+	"tottime" for the total time spent in the given function
+		(and excluding time made in calls to sub-functions), 
+	"percall" is the quotient of "tottime" divided by "ncalls"
+	"cumtime" is the total time spent in this and all subfunctions
+		(i.e., from invocation till exit). This figure is
+		accurate *even* for recursive functions.
+	"percall" is the quotient of "cumtime" divided by primitive
+		calls
+	"filename:lineno(function)" provides the respective data of
+		each function
+
+When there are two numbers in the first column (e.g.: 43/3), then the
+latter is the number of primitive calls, and the former is the actual
+number of calls.  Note that when the function does not recurse, these
+two values are the same, and only the single figure is printed.
+
+
+CLASS	Stats(filename, ...)
+
+This class constructor creates an instance of a statistics object from
+a filename (or set of filenames).  Stats objects are manipulated by
+methods, in order to print useful reports.  
+
+The file selected by the above constructor must have been created by
+the corresponding version of profile.  To be specific, there is *NO*
+file compatibility guaranteed with future versions of this profiler,
+and there is no compatibility with files produced by other profilers
+(e.g., the standard system profiler).
+
+If several files are provided, all the statistics for identical
+functions will be coalesced, so that an overall view of several
+processes can be considered in a single report.  If additional files
+need to be combined with data in an existing Stats object, the add()
+method can be used.
+
+
+METHOD	strip_dirs()
+
+This method for the Stats class removes all leading path information
+from file names.  It is very useful in reducing the size of the
+printout to fit within (close to) 80 columns.  This method modifies
+the object, and the striped information is lost.  After performing a
+strip operation, the object is considered to have its entries in a
+"random" order, as it was just after object initialization and
+loading.  If strip_dir() causes two function names to be
+indistinguishable (i.e., they are on the same line of the same
+filename, and have the same function name), then the statistics for
+these two entries are accumulated into a single entry.
+
+
+METHOD	add(filename, ...)
+
+This methods of the Stats class accumulates additional profiling
+information into the current profiling object.  Its arguments should
+refer to filenames created my the corresponding version of
+profile.run().  Statistics for identically named (re: file, line,
+name) functions are automatically accumulated into single function
+statistics.
+
+
+METHOD	sort_stats(key, ...)
+
+This method modifies the Stats object by sorting it according to the
+supplied criteria.  The argument is typically a string identifying the
+basis of a sort (example: "time" or "name").
+
+When more than one key is provided, then additional keys are used as
+secondary criteria when there is equality in all keys selected
+before them.  For example, sort_stats('name', 'file') will sort all
+the entries according to their function name, and resolve all ties
+(identical function names) by sorting by file name.
+
+Abbreviations can be used for any key names, as long as the
+abbreviation is unambiguous.  The following are the keys currently
+defined: 
+
+		Valid Arg       Meaning
+		  "calls"      call count
+		  "cumulative" cumulative time
+		  "file"       file name
+		  "module"     file name
+		  "pcalls"     primitive call count
+		  "line"       line number
+		  "name"       function name
+		  "nfl"        name/file/line
+		  "stdname"    standard name
+		  "time"       internal time
+
+Note that all sorts on statistics are in descending order (placing most
+time consuming items first), where as name, file, and line number
+searches are in ascending order (i.e., alphabetical). The subtle
+distinction between "nfl" and "stdname" is that the standard name is a
+sort of the name as printed, which means that the embedded line
+numbers get compared in an odd way.  For example, lines 3, 20, and 40
+would (if the file names were the same) appear in the string order
+"20" "3" and "40".  In contrast, "nfl" does a numeric compare of the
+line numbers.  In fact, sort_stats("nfl") is the same as
+sort_stats("name", "file", "line").
+
+For compatibility with the standard profiler, the numeric argument -1,
+0, 1, and 2 are permitted.  They are interpreted as "stdname",
+"calls", "time", and "cumulative" respectively.  If this old style
+format (numeric) is used, only one sort key (the numeric key) will be
+used, and additionally arguments will be silently ignored.
+
+
+METHOD	reverse_order()
+
+This method for the Stats class reverses the ordering of the basic
+list within the object.  This method is provided primarily for
+compatibility with the standard profiler.  Its utility is questionable
+now that ascending vs descending order is properly selected based on
+the sort key of choice.
+
+
+METHOD	print_stats(restriction, ...)
+
+This method for the Stats class prints out a report as described in
+the profile.run() definition.  
+
+The order of the printing is based on the last sort_stats() operation
+done on the object (subject to caveats in add() and strip_dirs()).
+
+The arguments provided (if any) can be used to limit the list down to
+the significant entries.  Initially, the list is taken to be the
+complete set of profiled functions.  Each restriction is either an
+integer (to select a count of lines), or a decimal fraction between
+0.0 and 1.0 inclusive (to select a percentage of lines), or a regular
+expression (to pattern match the standard name that is printed).  If
+several restrictions are provided, then they are applied sequentially.
+For example:
+
+	print_stats(.1, "foo:")
+
+would first limit the printing to first 10% of list, and then only 
+print functions that were part of filename ".*foo:".  In contrast, the
+command: 
+
+	print_stats("foo:", .1)
+
+would limit the list to all functions having file names ".*foo:", and
+then proceed to only print the first 10% of them.
+
+
+METHOD	print_callers(restrictions, ...)
+
+This method for the Stats class prints a list of all functions that
+called each function in the profiled database.  The ordering is
+identical to that provided by print_stats(), and the definition of the
+restricting argument is also identical.  For convenience, a number is
+shown in parentheses after each caller to show how many times this
+specific call was made.  A second non-parenthesized number is the
+cumulative time spent in the function at the right.
+
+
+METHOD	print_callees(restrictions, ...)
+
+This method for the Stats class prints a list of all function that
+were called by the indicated function.  Aside from this reversal of
+direction of calls (re: called vs was called by), the arguments and
+ordering are identical to the print_callers() method.
+
+
+METHOD	ignore()
+
+This method of the Stats class is used to dispose of the value
+returned by earlier methods.  All standard methods in this class
+return the instance that is being processed, so that the commands can
+be strung together.  For example:
+
+pstats.Stats('foofile').strip_dirs().sort_stats('cum').print_stats().ignore()
+
+would perform all the indicated functions, but it would not return
+the final reference to the Stats instance.
+
+
+
+	
+LIMITATIONS
+
+There are two fundamental limitations on this profiler.  The first is
+that it relies on the Python interpreter to dispatch "call", "return",
+and "exception" events.  Compiled C code does not get interpreted,
+and hence is "invisible" to the profiler.  All time spent in C code
+(including builtin functions) will be charged to the Python function
+that was invoked the C code.  IF the C code calls out to some native
+Python code, then those calls will be profiled properly.
+
+The second limitation has to do with accuracy of timing information.
+There is a fundamental problem with deterministic profilers involving
+accuracy.  The most obvious restriction is that the underlying "clock"
+is only ticking at a rate (typically) of about .001 seconds.  Hence no
+measurements will be more accurate than that underlying clock.  If
+enough measurements are taken, then the "error" will tend to average
+out. Unfortunately, removing this first error induces a second source
+of error...
+
+The second problem is that it "takes a while" from when an event is
+dispatched until the profiler's call to get the time actually *gets*
+the state of the clock.  Similarly, there is a certain lag when
+exiting the profiler event handler from the time that the clock's
+value was obtained (and then squirreled away), until the user's code
+is once again executing.  As a result, functions that are called many
+times, or call many functions, will typically accumulate this error.
+The error that accumulates in this fashion is typically less than the
+accuracy of the clock (i.e., less than one clock tick), but it *can*
+accumulate and become very significant.  This profiler provides a
+means of calibrating itself for a give platform so that this error can
+be probabilistically (i.e., on the average) removed.  After the
+profiler is calibrated, it will be more accurate (in a least square
+sense), but it will sometimes produce negative numbers (when call
+counts are exceptionally low, and the gods of probability work against
+you :-). )  Do *NOT* be alarmed by negative numbers in the profile.
+They should *only* appear if you have calibrated your profiler, and
+the results are actually better than without calibration.
+
+
+CALIBRATION
+
+The profiler class has a hard coded constant that is added to each
+event handling time to compensate for the overhead of calling the time
+function, and socking away the results.  The following procedure can
+be used to obtain this constant for a given platform (see discussion
+in LIMITATIONS above). 
+
+	import profile
+	pr = profile.Profile()
+	pr.calibrate(100)
+	pr.calibrate(100)
+	pr.calibrate(100)
+
+The argument to calibrate() is the number of times to try to do the
+sample calls to get the CPU times.  If your computer is *very* fast,
+you might have to do:
+
+	pr.calibrate(1000)
+
+or even:
+
+	pr.calibrate(10000)
+
+The object of this exercise is to get a fairly consistent result.
+When you have a consistent answer, you are ready to use that number in
+the source code.  For a Sun Sparcstation 1000 running Solaris 2.3, the
+magical number is about .00053.  If you have a choice, you are better
+off with a smaller constant, and your results will "less often" show
+up as negative in profile statistics.
+
+The following shows how the trace_dispatch() method in the Profile
+class should be modified to install the calibration constant on a Sun
+Sparcstation 1000:
+
+	def trace_dispatch(self, frame, event, arg):
+		t = self.timer()
+		t = t[0] + t[1] - self.t - .00053 # Calibration constant
+
+		if self.dispatch[event](frame,t):
+			t = self.timer()
+			self.t = t[0] + t[1]
+		else:
+			r = self.timer()
+			self.t = r[0] + r[1] - t # put back unrecorded delta
+		return
+
+Note that if there is no calibration constant, then the line
+containing the callibration constant should simply say:
+
+		t = t[0] + t[1] - self.t  # no calibration constant
+
+You can also achieve the same results using a derived class (and the
+profiler will actually run equally fast!!), but the above method is
+the simplest to use.  I could have made the profiler "self
+calibrating", but it would have made the initialization of the
+profiler class slower, and would have required some *very* fancy
+coding, or else the use of a variable where the constant .00053 was
+placed in the code shown.  This is a ****VERY**** critical performance
+section, and there is no reason to use a variable lookup at this
+point, when a constant can be used.
+
+
+EXTENSIONS: Deriving Better Profilers
+
+The Profile class of profile was written so that derived classes
+could be developed to extend the profiler.  Rather than describing all
+the details of such an effort, I'll just present the following two
+examples of derived classes that can be used to do profiling.  If the
+reader is an avid Python programmer, then it should be possible to use
+these as a model and create similar (and perchance better) profile
+classes. 
+
+If all you want to do is change how the timer is called, or which
+timer function is used, then the basic class has an option for that in
+the constructor for the class.  Consider passing the name of a
+function to call into the constructor:
+
+	pr = profile.Profile(your_time_func)
+
+The resulting profiler will call your time function instead of
+os.times().  The function should return either a single number, or a
+list of numbers (like what os.times() returns).  If the function
+returns a single time number, or the list of returned numbers has
+length 2, then you will get an especially fast version of the dispatch
+routine.  
+
+Be warned that you *should* calibrate the profiler class for the
+timer function that you choose.  For most machines, a timer that
+returns a lone integer value will provide the best results in terms of
+low overhead during profiling.  (os.times is *pretty* bad, 'cause it
+returns a tuple of floating point values, so all arithmetic is
+floating point in the profiler!).  If you want to be substitute a
+better timer in the cleanest fashion, you should derive a class, and
+simply put in the replacement dispatch method that better handles your timer
+call, along with the appropriate calibration constant :-).
+
+
+cut here------------------------------------------------------------------
+#****************************************************************************
+# OldProfile class documentation
+#****************************************************************************
+#
+# The following derived profiler simulates the old style profile, providing
+# errant results on recursive functions. The reason for the usefulness of this
+# profiler is that it runs faster (i.e., less overhead) than the old
+# profiler.  It still creates all the caller stats, and is quite
+# useful when there is *no* recursion in the user's code.  It is also
+# a lot more accurate than the old profiler, as it does not charge all
+# its overhead time to the user's code. 
+#****************************************************************************
+class OldProfile(Profile):
+	def trace_dispatch_exception(self, frame, t):
+		rt, rtt, rct, rfn, rframe, rcur = self.cur
+		if rcur and not rframe is frame:
+			return self.trace_dispatch_return(rframe, t)
+		return 0
+
+	def trace_dispatch_call(self, frame, t):
+		fn = `frame.f_code`
+		
+		self.cur = (t, 0, 0, fn, frame, self.cur)
+		if self.timings.has_key(fn):
+			tt, ct, callers = self.timings[fn]
+			self.timings[fn] = tt, ct, callers
+		else:
+			self.timings[fn] = 0, 0, {}
+		return 1
+
+	def trace_dispatch_return(self, frame, t):
+		rt, rtt, rct, rfn, frame, rcur = self.cur
+		rtt = rtt + t
+		sft = rtt + rct
+
+		pt, ptt, pct, pfn, pframe, pcur = rcur
+		self.cur = pt, ptt+rt, pct+sft, pfn, pframe, pcur
+
+		tt, ct, callers = self.timings[rfn]
+		if callers.has_key(pfn):
+			callers[pfn] = callers[pfn] + 1
+		else:
+			callers[pfn] = 1
+		self.timings[rfn] = tt+rtt, ct + sft, callers
+
+		return 1
+
+
+	def snapshot_stats(self):
+		self.stats = {}
+		for func in self.timings.keys():
+			tt, ct, callers = self.timings[func]
+			nor_func = self.func_normalize(func)
+			nor_callers = {}
+			nc = 0
+			for func_caller in callers.keys():
+				nor_callers[self.func_normalize(func_caller)]=\
+					  callers[func_caller]
+				nc = nc + callers[func_caller]
+			self.stats[nor_func] = nc, nc, tt, ct, nor_callers
+
+		
+
+#****************************************************************************
+# HotProfile class documentation
+#****************************************************************************
+#
+# This profiler is the fastest derived profile example.  It does not
+# calculate caller-callee relationships, and does not calculate cumulative
+# time under a function.  It only calculates time spent in a function, so
+# it runs very quickly (re: very low overhead).  In truth, the basic
+# profiler is so fast, that is probably not worth the savings to give
+# up the data, but this class still provides a nice example.
+#****************************************************************************
+class HotProfile(Profile):
+	def trace_dispatch_exception(self, frame, t):
+		rt, rtt, rfn, rframe, rcur = self.cur
+		if rcur and not rframe is frame:
+			return self.trace_dispatch_return(rframe, t)
+		return 0
+
+	def trace_dispatch_call(self, frame, t):
+		self.cur = (t, 0, frame, self.cur)
+		return 1
+
+	def trace_dispatch_return(self, frame, t):
+		rt, rtt, frame, rcur = self.cur
+
+		rfn = `frame.f_code`
+
+		pt, ptt, pframe, pcur = rcur
+		self.cur = pt, ptt+rt, pframe, pcur
+
+		if self.timings.has_key(rfn):
+			nc, tt = self.timings[rfn]
+			self.timings[rfn] = nc + 1, rt + rtt + tt
+		else:
+			self.timings[rfn] =      1, rt + rtt
+
+		return 1
+
+
+	def snapshot_stats(self):
+		self.stats = {}
+		for func in self.timings.keys():
+			nc, tt = self.timings[func]
+			nor_func = self.func_normalize(func)
+			self.stats[nor_func] = nc, nc, tt, 0, {}
+
+		
+
+cut here------------------------------------------------------------------
diff --git a/depot_tools/release/win/python_24/Lib/profile.py b/depot_tools/release/win/python_24/Lib/profile.py
new file mode 100644
index 0000000..00a0ae96
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/profile.py
@@ -0,0 +1,613 @@
+#! /usr/bin/env python
+#
+# Class for profiling python code. rev 1.0  6/2/94
+#
+# Based on prior profile module by Sjoerd Mullender...
+#   which was hacked somewhat by: Guido van Rossum
+#
+# See profile.doc for more information
+
+"""Class for profiling Python code."""
+
+# Copyright 1994, by InfoSeek Corporation, all rights reserved.
+# Written by James Roskind
+#
+# Permission to use, copy, modify, and distribute this Python software
+# and its associated documentation for any purpose (subject to the
+# restriction in the following sentence) without fee is hereby granted,
+# provided that the above copyright notice appears in all copies, and
+# that both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of InfoSeek not be used in
+# advertising or publicity pertaining to distribution of the software
+# without specific, written prior permission.  This permission is
+# explicitly restricted to the copying and modification of the software
+# to remain in Python, compiled Python, or other languages (such as C)
+# wherein the modified or derived code is exclusively imported into a
+# Python module.
+#
+# INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
+# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+# FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY
+# SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
+# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+
+import sys
+import os
+import time
+import marshal
+from optparse import OptionParser
+
+__all__ = ["run", "runctx", "help", "Profile"]
+
+# Sample timer for use with
+#i_count = 0
+#def integer_timer():
+#       global i_count
+#       i_count = i_count + 1
+#       return i_count
+#itimes = integer_timer # replace with C coded timer returning integers
+
+#**************************************************************************
+# The following are the static member functions for the profiler class
+# Note that an instance of Profile() is *not* needed to call them.
+#**************************************************************************
+
+def run(statement, filename=None, sort=-1):
+    """Run statement under profiler optionally saving results in filename
+
+    This function takes a single argument that can be passed to the
+    "exec" statement, and an optional file name.  In all cases this
+    routine attempts to "exec" its first argument and gather profiling
+    statistics from the execution. If no file name is present, then this
+    function automatically prints a simple profiling report, sorted by the
+    standard name string (file/line/function-name) that is presented in
+    each line.
+    """
+    prof = Profile()
+    try:
+        prof = prof.run(statement)
+    except SystemExit:
+        pass
+    if filename is not None:
+        prof.dump_stats(filename)
+    else:
+        return prof.print_stats(sort)
+
+def runctx(statement, globals, locals, filename=None):
+    """Run statement under profiler, supplying your own globals and locals,
+    optionally saving results in filename.
+
+    statement and filename have the same semantics as profile.run
+    """
+    prof = Profile()
+    try:
+        prof = prof.runctx(statement, globals, locals)
+    except SystemExit:
+        pass
+
+    if filename is not None:
+        prof.dump_stats(filename)
+    else:
+        return prof.print_stats()
+
+# print help
+def help():
+    for dirname in sys.path:
+        fullname = os.path.join(dirname, 'profile.doc')
+        if os.path.exists(fullname):
+            sts = os.system('${PAGER-more} ' + fullname)
+            if sts: print '*** Pager exit status:', sts
+            break
+    else:
+        print 'Sorry, can\'t find the help file "profile.doc"',
+        print 'along the Python search path.'
+
+
+if os.name == "mac":
+    import MacOS
+    def _get_time_mac(timer=MacOS.GetTicks):
+        return timer() / 60.0
+
+if hasattr(os, "times"):
+    def _get_time_times(timer=os.times):
+        t = timer()
+        return t[0] + t[1]
+
+
+class Profile:
+    """Profiler class.
+
+    self.cur is always a tuple.  Each such tuple corresponds to a stack
+    frame that is currently active (self.cur[-2]).  The following are the
+    definitions of its members.  We use this external "parallel stack" to
+    avoid contaminating the program that we are profiling. (old profiler
+    used to write into the frames local dictionary!!) Derived classes
+    can change the definition of some entries, as long as they leave
+    [-2:] intact (frame and previous tuple).  In case an internal error is
+    detected, the -3 element is used as the function name.
+
+    [ 0] = Time that needs to be charged to the parent frame's function.
+           It is used so that a function call will not have to access the
+           timing data for the parent frame.
+    [ 1] = Total time spent in this frame's function, excluding time in
+           subfunctions (this latter is tallied in cur[2]).
+    [ 2] = Total time spent in subfunctions, excluding time executing the
+           frame's function (this latter is tallied in cur[1]).
+    [-3] = Name of the function that corresponds to this frame.
+    [-2] = Actual frame that we correspond to (used to sync exception handling).
+    [-1] = Our parent 6-tuple (corresponds to frame.f_back).
+
+    Timing data for each function is stored as a 5-tuple in the dictionary
+    self.timings[].  The index is always the name stored in self.cur[-3].
+    The following are the definitions of the members:
+
+    [0] = The number of times this function was called, not counting direct
+          or indirect recursion,
+    [1] = Number of times this function appears on the stack, minus one
+    [2] = Total time spent internal to this function
+    [3] = Cumulative time that this function was present on the stack.  In
+          non-recursive functions, this is the total execution time from start
+          to finish of each invocation of a function, including time spent in
+          all subfunctions.
+    [4] = A dictionary indicating for each function name, the number of times
+          it was called by us.
+    """
+
+    bias = 0  # calibration constant
+
+    def __init__(self, timer=None, bias=None):
+        self.timings = {}
+        self.cur = None
+        self.cmd = ""
+        self.c_func_name = ""
+
+        if bias is None:
+            bias = self.bias
+        self.bias = bias     # Materialize in local dict for lookup speed.
+
+        if timer is None:
+            if os.name == 'mac':
+                self.timer = MacOS.GetTicks
+                self.dispatcher = self.trace_dispatch_mac
+                self.get_time = _get_time_mac
+            elif hasattr(time, 'clock'):
+                self.timer = self.get_time = time.clock
+                self.dispatcher = self.trace_dispatch_i
+            elif hasattr(os, 'times'):
+                self.timer = os.times
+                self.dispatcher = self.trace_dispatch
+                self.get_time = _get_time_times
+            else:
+                self.timer = self.get_time = time.time
+                self.dispatcher = self.trace_dispatch_i
+        else:
+            self.timer = timer
+            t = self.timer() # test out timer function
+            try:
+                length = len(t)
+            except TypeError:
+                self.get_time = timer
+                self.dispatcher = self.trace_dispatch_i
+            else:
+                if length == 2:
+                    self.dispatcher = self.trace_dispatch
+                else:
+                    self.dispatcher = self.trace_dispatch_l
+                # This get_time() implementation needs to be defined
+                # here to capture the passed-in timer in the parameter
+                # list (for performance).  Note that we can't assume
+                # the timer() result contains two values in all
+                # cases.
+                def get_time_timer(timer=timer, sum=sum):
+                    return sum(timer())
+                self.get_time = get_time_timer
+        self.t = self.get_time()
+        self.simulate_call('profiler')
+
+    # Heavily optimized dispatch routine for os.times() timer
+
+    def trace_dispatch(self, frame, event, arg):
+        timer = self.timer
+        t = timer()
+        t = t[0] + t[1] - self.t - self.bias
+
+        if event == "c_call":
+            self.c_func_name = arg.__name__
+
+        if self.dispatch[event](self, frame,t):
+            t = timer()
+            self.t = t[0] + t[1]
+        else:
+            r = timer()
+            self.t = r[0] + r[1] - t # put back unrecorded delta
+
+    # Dispatch routine for best timer program (return = scalar, fastest if
+    # an integer but float works too -- and time.clock() relies on that).
+
+    def trace_dispatch_i(self, frame, event, arg):
+        timer = self.timer
+        t = timer() - self.t - self.bias
+
+        if event == "c_call":
+            self.c_func_name = arg.__name__
+
+        if self.dispatch[event](self, frame, t):
+            self.t = timer()
+        else:
+            self.t = timer() - t  # put back unrecorded delta
+
+    # Dispatch routine for macintosh (timer returns time in ticks of
+    # 1/60th second)
+
+    def trace_dispatch_mac(self, frame, event, arg):
+        timer = self.timer
+        t = timer()/60.0 - self.t - self.bias
+
+        if event == "c_call":
+            self.c_func_name = arg.__name__
+
+        if self.dispatch[event](self, frame, t):
+            self.t = timer()/60.0
+        else:
+            self.t = timer()/60.0 - t  # put back unrecorded delta
+
+    # SLOW generic dispatch routine for timer returning lists of numbers
+
+    def trace_dispatch_l(self, frame, event, arg):
+        get_time = self.get_time
+        t = get_time() - self.t - self.bias
+
+        if event == "c_call":
+            self.c_func_name = arg.__name__
+
+        if self.dispatch[event](self, frame, t):
+            self.t = get_time()
+        else:
+            self.t = get_time() - t # put back unrecorded delta
+
+    # In the event handlers, the first 3 elements of self.cur are unpacked
+    # into vrbls w/ 3-letter names.  The last two characters are meant to be
+    # mnemonic:
+    #     _pt  self.cur[0] "parent time"   time to be charged to parent frame
+    #     _it  self.cur[1] "internal time" time spent directly in the function
+    #     _et  self.cur[2] "external time" time spent in subfunctions
+
+    def trace_dispatch_exception(self, frame, t):
+        rpt, rit, ret, rfn, rframe, rcur = self.cur
+        if (rframe is not frame) and rcur:
+            return self.trace_dispatch_return(rframe, t)
+        self.cur = rpt, rit+t, ret, rfn, rframe, rcur
+        return 1
+
+
+    def trace_dispatch_call(self, frame, t):
+        if self.cur and frame.f_back is not self.cur[-2]:
+            rpt, rit, ret, rfn, rframe, rcur = self.cur
+            if not isinstance(rframe, Profile.fake_frame):
+                assert rframe.f_back is frame.f_back, ("Bad call", rfn,
+                                                       rframe, rframe.f_back,
+                                                       frame, frame.f_back)
+                self.trace_dispatch_return(rframe, 0)
+                assert (self.cur is None or \
+                        frame.f_back is self.cur[-2]), ("Bad call",
+                                                        self.cur[-3])
+        fcode = frame.f_code
+        fn = (fcode.co_filename, fcode.co_firstlineno, fcode.co_name)
+        self.cur = (t, 0, 0, fn, frame, self.cur)
+        timings = self.timings
+        if fn in timings:
+            cc, ns, tt, ct, callers = timings[fn]
+            timings[fn] = cc, ns + 1, tt, ct, callers
+        else:
+            timings[fn] = 0, 0, 0, 0, {}
+        return 1
+
+    def trace_dispatch_c_call (self, frame, t):
+        fn = ("", 0, self.c_func_name)
+        self.cur = (t, 0, 0, fn, frame, self.cur)
+        timings = self.timings
+        if timings.has_key(fn):
+            cc, ns, tt, ct, callers = timings[fn]
+            timings[fn] = cc, ns+1, tt, ct, callers
+        else:
+            timings[fn] = 0, 0, 0, 0, {}
+        return 1
+
+    def trace_dispatch_return(self, frame, t):
+        if frame is not self.cur[-2]:
+            assert frame is self.cur[-2].f_back, ("Bad return", self.cur[-3])
+            self.trace_dispatch_return(self.cur[-2], 0)
+
+        # Prefix "r" means part of the Returning or exiting frame.
+        # Prefix "p" means part of the Previous or Parent or older frame.
+
+        rpt, rit, ret, rfn, frame, rcur = self.cur
+        rit = rit + t
+        frame_total = rit + ret
+
+        ppt, pit, pet, pfn, pframe, pcur = rcur
+        self.cur = ppt, pit + rpt, pet + frame_total, pfn, pframe, pcur
+
+        timings = self.timings
+        cc, ns, tt, ct, callers = timings[rfn]
+        if not ns:
+            # This is the only occurrence of the function on the stack.
+            # Else this is a (directly or indirectly) recursive call, and
+            # its cumulative time will get updated when the topmost call to
+            # it returns.
+            ct = ct + frame_total
+            cc = cc + 1
+
+        if pfn in callers:
+            callers[pfn] = callers[pfn] + 1  # hack: gather more
+            # stats such as the amount of time added to ct courtesy
+            # of this specific call, and the contribution to cc
+            # courtesy of this call.
+        else:
+            callers[pfn] = 1
+
+        timings[rfn] = cc, ns - 1, tt + rit, ct, callers
+
+        return 1
+
+
+    dispatch = {
+        "call": trace_dispatch_call,
+        "exception": trace_dispatch_exception,
+        "return": trace_dispatch_return,
+        "c_call": trace_dispatch_c_call,
+        "c_exception": trace_dispatch_exception,
+        "c_return": trace_dispatch_return,
+        }
+
+
+    # The next few functions play with self.cmd. By carefully preloading
+    # our parallel stack, we can force the profiled result to include
+    # an arbitrary string as the name of the calling function.
+    # We use self.cmd as that string, and the resulting stats look
+    # very nice :-).
+
+    def set_cmd(self, cmd):
+        if self.cur[-1]: return   # already set
+        self.cmd = cmd
+        self.simulate_call(cmd)
+
+    class fake_code:
+        def __init__(self, filename, line, name):
+            self.co_filename = filename
+            self.co_line = line
+            self.co_name = name
+            self.co_firstlineno = 0
+
+        def __repr__(self):
+            return repr((self.co_filename, self.co_line, self.co_name))
+
+    class fake_frame:
+        def __init__(self, code, prior):
+            self.f_code = code
+            self.f_back = prior
+
+    def simulate_call(self, name):
+        code = self.fake_code('profile', 0, name)
+        if self.cur:
+            pframe = self.cur[-2]
+        else:
+            pframe = None
+        frame = self.fake_frame(code, pframe)
+        self.dispatch['call'](self, frame, 0)
+
+    # collect stats from pending stack, including getting final
+    # timings for self.cmd frame.
+
+    def simulate_cmd_complete(self):
+        get_time = self.get_time
+        t = get_time() - self.t
+        while self.cur[-1]:
+            # We *can* cause assertion errors here if
+            # dispatch_trace_return checks for a frame match!
+            self.dispatch['return'](self, self.cur[-2], t)
+            t = 0
+        self.t = get_time() - t
+
+
+    def print_stats(self, sort=-1):
+        import pstats
+        pstats.Stats(self).strip_dirs().sort_stats(sort). \
+                  print_stats()
+
+    def dump_stats(self, file):
+        f = open(file, 'wb')
+        self.create_stats()
+        marshal.dump(self.stats, f)
+        f.close()
+
+    def create_stats(self):
+        self.simulate_cmd_complete()
+        self.snapshot_stats()
+
+    def snapshot_stats(self):
+        self.stats = {}
+        for func, (cc, ns, tt, ct, callers) in self.timings.iteritems():
+            callers = callers.copy()
+            nc = 0
+            for callcnt in callers.itervalues():
+                nc += callcnt
+            self.stats[func] = cc, nc, tt, ct, callers
+
+
+    # The following two methods can be called by clients to use
+    # a profiler to profile a statement, given as a string.
+
+    def run(self, cmd):
+        import __main__
+        dict = __main__.__dict__
+        return self.runctx(cmd, dict, dict)
+
+    def runctx(self, cmd, globals, locals):
+        self.set_cmd(cmd)
+        sys.setprofile(self.dispatcher)
+        try:
+            exec cmd in globals, locals
+        finally:
+            sys.setprofile(None)
+        return self
+
+    # This method is more useful to profile a single function call.
+    def runcall(self, func, *args, **kw):
+        self.set_cmd(repr(func))
+        sys.setprofile(self.dispatcher)
+        try:
+            return func(*args, **kw)
+        finally:
+            sys.setprofile(None)
+
+
+    #******************************************************************
+    # The following calculates the overhead for using a profiler.  The
+    # problem is that it takes a fair amount of time for the profiler
+    # to stop the stopwatch (from the time it receives an event).
+    # Similarly, there is a delay from the time that the profiler
+    # re-starts the stopwatch before the user's code really gets to
+    # continue.  The following code tries to measure the difference on
+    # a per-event basis.
+    #
+    # Note that this difference is only significant if there are a lot of
+    # events, and relatively little user code per event.  For example,
+    # code with small functions will typically benefit from having the
+    # profiler calibrated for the current platform.  This *could* be
+    # done on the fly during init() time, but it is not worth the
+    # effort.  Also note that if too large a value specified, then
+    # execution time on some functions will actually appear as a
+    # negative number.  It is *normal* for some functions (with very
+    # low call counts) to have such negative stats, even if the
+    # calibration figure is "correct."
+    #
+    # One alternative to profile-time calibration adjustments (i.e.,
+    # adding in the magic little delta during each event) is to track
+    # more carefully the number of events (and cumulatively, the number
+    # of events during sub functions) that are seen.  If this were
+    # done, then the arithmetic could be done after the fact (i.e., at
+    # display time).  Currently, we track only call/return events.
+    # These values can be deduced by examining the callees and callers
+    # vectors for each functions.  Hence we *can* almost correct the
+    # internal time figure at print time (note that we currently don't
+    # track exception event processing counts).  Unfortunately, there
+    # is currently no similar information for cumulative sub-function
+    # time.  It would not be hard to "get all this info" at profiler
+    # time.  Specifically, we would have to extend the tuples to keep
+    # counts of this in each frame, and then extend the defs of timing
+    # tuples to include the significant two figures. I'm a bit fearful
+    # that this additional feature will slow the heavily optimized
+    # event/time ratio (i.e., the profiler would run slower, fur a very
+    # low "value added" feature.)
+    #**************************************************************
+
+    def calibrate(self, m, verbose=0):
+        if self.__class__ is not Profile:
+            raise TypeError("Subclasses must override .calibrate().")
+
+        saved_bias = self.bias
+        self.bias = 0
+        try:
+            return self._calibrate_inner(m, verbose)
+        finally:
+            self.bias = saved_bias
+
+    def _calibrate_inner(self, m, verbose):
+        get_time = self.get_time
+
+        # Set up a test case to be run with and without profiling.  Include
+        # lots of calls, because we're trying to quantify stopwatch overhead.
+        # Do not raise any exceptions, though, because we want to know
+        # exactly how many profile events are generated (one call event, +
+        # one return event, per Python-level call).
+
+        def f1(n):
+            for i in range(n):
+                x = 1
+
+        def f(m, f1=f1):
+            for i in range(m):
+                f1(100)
+
+        f(m)    # warm up the cache
+
+        # elapsed_noprofile <- time f(m) takes without profiling.
+        t0 = get_time()
+        f(m)
+        t1 = get_time()
+        elapsed_noprofile = t1 - t0
+        if verbose:
+            print "elapsed time without profiling =", elapsed_noprofile
+
+        # elapsed_profile <- time f(m) takes with profiling.  The difference
+        # is profiling overhead, only some of which the profiler subtracts
+        # out on its own.
+        p = Profile()
+        t0 = get_time()
+        p.runctx('f(m)', globals(), locals())
+        t1 = get_time()
+        elapsed_profile = t1 - t0
+        if verbose:
+            print "elapsed time with profiling =", elapsed_profile
+
+        # reported_time <- "CPU seconds" the profiler charged to f and f1.
+        total_calls = 0.0
+        reported_time = 0.0
+        for (filename, line, funcname), (cc, ns, tt, ct, callers) in \
+                p.timings.items():
+            if funcname in ("f", "f1"):
+                total_calls += cc
+                reported_time += tt
+
+        if verbose:
+            print "'CPU seconds' profiler reported =", reported_time
+            print "total # calls =", total_calls
+        if total_calls != m + 1:
+            raise ValueError("internal error: total calls = %d" % total_calls)
+
+        # reported_time - elapsed_noprofile = overhead the profiler wasn't
+        # able to measure.  Divide by twice the number of calls (since there
+        # are two profiler events per call in this test) to get the hidden
+        # overhead per event.
+        mean = (reported_time - elapsed_noprofile) / 2.0 / total_calls
+        if verbose:
+            print "mean stopwatch overhead per profile event =", mean
+        return mean
+
+#****************************************************************************
+def Stats(*args):
+    print 'Report generating functions are in the "pstats" module\a'
+
+
+# When invoked as main program, invoke the profiler on a script
+if __name__ == '__main__':
+    usage = "profile.py [-o output_file_path] [-s sort] scriptfile [arg] ..."
+    if not sys.argv[1:]:
+        print "Usage: ", usage
+        sys.exit(2)
+
+    class ProfileParser(OptionParser):
+        def __init__(self, usage):
+            OptionParser.__init__(self)
+            self.usage = usage
+
+    parser = ProfileParser(usage)
+    parser.allow_interspersed_args = False
+    parser.add_option('-o', '--outfile', dest="outfile",
+        help="Save stats to <outfile>", default=None)
+    parser.add_option('-s', '--sort', dest="sort",
+        help="Sort order when printing to stdout, based on pstats.Stats class", default=-1)
+
+    (options, args) = parser.parse_args()
+    sys.argv[:] = args
+
+    if (len(sys.argv) > 0):
+        sys.path.insert(0, os.path.dirname(sys.argv[0]))
+        run('execfile(%r)' % (sys.argv[0],), options.outfile, options.sort)
+    else:
+        print "Usage: ", usage
diff --git a/depot_tools/release/win/python_24/Lib/pstats.py b/depot_tools/release/win/python_24/Lib/pstats.py
new file mode 100644
index 0000000..5979a61
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pstats.py
@@ -0,0 +1,643 @@
+"""Class for printing reports on profiled python code."""
+
+# Class for printing reports on profiled python code. rev 1.0  4/1/94
+#
+# Based on prior profile module by Sjoerd Mullender...
+#   which was hacked somewhat by: Guido van Rossum
+#
+# see profile.doc and profile.py for more info.
+
+# Copyright 1994, by InfoSeek Corporation, all rights reserved.
+# Written by James Roskind
+#
+# Permission to use, copy, modify, and distribute this Python software
+# and its associated documentation for any purpose (subject to the
+# restriction in the following sentence) without fee is hereby granted,
+# provided that the above copyright notice appears in all copies, and
+# that both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of InfoSeek not be used in
+# advertising or publicity pertaining to distribution of the software
+# without specific, written prior permission.  This permission is
+# explicitly restricted to the copying and modification of the software
+# to remain in Python, compiled Python, or other languages (such as C)
+# wherein the modified or derived code is exclusively imported into a
+# Python module.
+#
+# INFOSEEK CORPORATION DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
+# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+# FITNESS. IN NO EVENT SHALL INFOSEEK CORPORATION BE LIABLE FOR ANY
+# SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER
+# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF
+# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
+# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+
+import os
+import time
+import marshal
+import re
+
+__all__ = ["Stats"]
+
+class Stats:
+    """This class is used for creating reports from data generated by the
+    Profile class.  It is a "friend" of that class, and imports data either
+    by direct access to members of Profile class, or by reading in a dictionary
+    that was emitted (via marshal) from the Profile class.
+
+    The big change from the previous Profiler (in terms of raw functionality)
+    is that an "add()" method has been provided to combine Stats from
+    several distinct profile runs.  Both the constructor and the add()
+    method now take arbitrarily many file names as arguments.
+
+    All the print methods now take an argument that indicates how many lines
+    to print.  If the arg is a floating point number between 0 and 1.0, then
+    it is taken as a decimal percentage of the available lines to be printed
+    (e.g., .1 means print 10% of all available lines).  If it is an integer,
+    it is taken to mean the number of lines of data that you wish to have
+    printed.
+
+    The sort_stats() method now processes some additional options (i.e., in
+    addition to the old -1, 0, 1, or 2).  It takes an arbitrary number of quoted
+    strings to select the sort order.  For example sort_stats('time', 'name')
+    sorts on the major key of "internal function time", and on the minor
+    key of 'the name of the function'.  Look at the two tables in sort_stats()
+    and get_sort_arg_defs(self) for more examples.
+
+    All methods now return "self",  so you can string together commands like:
+        Stats('foo', 'goo').strip_dirs().sort_stats('calls').\
+                            print_stats(5).print_callers(5)
+    """
+
+    def __init__(self, *args):
+        if not len(args):
+            arg = None
+        else:
+            arg = args[0]
+            args = args[1:]
+        self.init(arg)
+        self.add(*args)
+
+    def init(self, arg):
+        self.all_callees = None  # calc only if needed
+        self.files = []
+        self.fcn_list = None
+        self.total_tt = 0
+        self.total_calls = 0
+        self.prim_calls = 0
+        self.max_name_len = 0
+        self.top_level = {}
+        self.stats = {}
+        self.sort_arg_dict = {}
+        self.load_stats(arg)
+        trouble = 1
+        try:
+            self.get_top_level_stats()
+            trouble = 0
+        finally:
+            if trouble:
+                print "Invalid timing data",
+                if self.files: print self.files[-1],
+                print
+
+    def load_stats(self, arg):
+        if not arg:  self.stats = {}
+        elif type(arg) == type(""):
+            f = open(arg, 'rb')
+            self.stats = marshal.load(f)
+            f.close()
+            try:
+                file_stats = os.stat(arg)
+                arg = time.ctime(file_stats.st_mtime) + "    " + arg
+            except:  # in case this is not unix
+                pass
+            self.files = [ arg ]
+        elif hasattr(arg, 'create_stats'):
+            arg.create_stats()
+            self.stats = arg.stats
+            arg.stats = {}
+        if not self.stats:
+            raise TypeError,  "Cannot create or construct a %r object from '%r''" % (
+                              self.__class__, arg)
+        return
+
+    def get_top_level_stats(self):
+        for func, (cc, nc, tt, ct, callers) in self.stats.items():
+            self.total_calls += nc
+            self.prim_calls  += cc
+            self.total_tt    += tt
+            if callers.has_key(("jprofile", 0, "profiler")):
+                self.top_level[func] = None
+            if len(func_std_string(func)) > self.max_name_len:
+                self.max_name_len = len(func_std_string(func))
+
+    def add(self, *arg_list):
+        if not arg_list: return self
+        if len(arg_list) > 1: self.add(*arg_list[1:])
+        other = arg_list[0]
+        if type(self) != type(other) or self.__class__ != other.__class__:
+            other = Stats(other)
+        self.files += other.files
+        self.total_calls += other.total_calls
+        self.prim_calls += other.prim_calls
+        self.total_tt += other.total_tt
+        for func in other.top_level:
+            self.top_level[func] = None
+
+        if self.max_name_len < other.max_name_len:
+            self.max_name_len = other.max_name_len
+
+        self.fcn_list = None
+
+        for func, stat in other.stats.iteritems():
+            if func in self.stats:
+                old_func_stat = self.stats[func]
+            else:
+                old_func_stat = (0, 0, 0, 0, {},)
+            self.stats[func] = add_func_stats(old_func_stat, stat)
+        return self
+
+    def dump_stats(self, filename):
+        """Write the profile data to a file we know how to load back."""
+        f = file(filename, 'wb')
+        try:
+            marshal.dump(self.stats, f)
+        finally:
+            f.close()
+
+    # list the tuple indices and directions for sorting,
+    # along with some printable description
+    sort_arg_dict_default = {
+              "calls"     : (((1,-1),              ), "call count"),
+              "cumulative": (((3,-1),              ), "cumulative time"),
+              "file"      : (((4, 1),              ), "file name"),
+              "line"      : (((5, 1),              ), "line number"),
+              "module"    : (((4, 1),              ), "file name"),
+              "name"      : (((6, 1),              ), "function name"),
+              "nfl"       : (((6, 1),(4, 1),(5, 1),), "name/file/line"),
+              "pcalls"    : (((0,-1),              ), "call count"),
+              "stdname"   : (((7, 1),              ), "standard name"),
+              "time"      : (((2,-1),              ), "internal time"),
+              }
+
+    def get_sort_arg_defs(self):
+        """Expand all abbreviations that are unique."""
+        if not self.sort_arg_dict:
+            self.sort_arg_dict = dict = {}
+            bad_list = {}
+            for word, tup in self.sort_arg_dict_default.iteritems():
+                fragment = word
+                while fragment:
+                    if not fragment:
+                        break
+                    if fragment in dict:
+                        bad_list[fragment] = 0
+                        break
+                    dict[fragment] = tup
+                    fragment = fragment[:-1]
+            for word in bad_list:
+                del dict[word]
+        return self.sort_arg_dict
+
+    def sort_stats(self, *field):
+        if not field:
+            self.fcn_list = 0
+            return self
+        if len(field) == 1 and type(field[0]) == type(1):
+            # Be compatible with old profiler
+            field = [ {-1: "stdname",
+                      0:"calls",
+                      1:"time",
+                      2: "cumulative" }  [ field[0] ] ]
+
+        sort_arg_defs = self.get_sort_arg_defs()
+        sort_tuple = ()
+        self.sort_type = ""
+        connector = ""
+        for word in field:
+            sort_tuple = sort_tuple + sort_arg_defs[word][0]
+            self.sort_type += connector + sort_arg_defs[word][1]
+            connector = ", "
+
+        stats_list = []
+        for func, (cc, nc, tt, ct, callers) in self.stats.iteritems():
+            stats_list.append((cc, nc, tt, ct) + func +
+                              (func_std_string(func), func))
+
+        stats_list.sort(TupleComp(sort_tuple).compare)
+
+        self.fcn_list = fcn_list = []
+        for tuple in stats_list:
+            fcn_list.append(tuple[-1])
+        return self
+
+    def reverse_order(self):
+        if self.fcn_list:
+            self.fcn_list.reverse()
+        return self
+
+    def strip_dirs(self):
+        oldstats = self.stats
+        self.stats = newstats = {}
+        max_name_len = 0
+        for func, (cc, nc, tt, ct, callers) in oldstats.iteritems():
+            newfunc = func_strip_path(func)
+            if len(func_std_string(newfunc)) > max_name_len:
+                max_name_len = len(func_std_string(newfunc))
+            newcallers = {}
+            for func2, caller in callers.iteritems():
+                newcallers[func_strip_path(func2)] = caller
+
+            if newfunc in newstats:
+                newstats[newfunc] = add_func_stats(
+                                        newstats[newfunc],
+                                        (cc, nc, tt, ct, newcallers))
+            else:
+                newstats[newfunc] = (cc, nc, tt, ct, newcallers)
+        old_top = self.top_level
+        self.top_level = new_top = {}
+        for func in old_top:
+            new_top[func_strip_path(func)] = None
+
+        self.max_name_len = max_name_len
+
+        self.fcn_list = None
+        self.all_callees = None
+        return self
+
+    def calc_callees(self):
+        if self.all_callees: return
+        self.all_callees = all_callees = {}
+        for func, (cc, nc, tt, ct, callers) in self.stats.iteritems():
+            if not func in all_callees:
+                all_callees[func] = {}
+            for func2, caller in callers.iteritems():
+                if not func2 in all_callees:
+                    all_callees[func2] = {}
+                all_callees[func2][func]  = caller
+        return
+
+    #******************************************************************
+    # The following functions support actual printing of reports
+    #******************************************************************
+
+    # Optional "amount" is either a line count, or a percentage of lines.
+
+    def eval_print_amount(self, sel, list, msg):
+        new_list = list
+        if type(sel) == type(""):
+            new_list = []
+            for func in list:
+                if re.search(sel, func_std_string(func)):
+                    new_list.append(func)
+        else:
+            count = len(list)
+            if type(sel) == type(1.0) and 0.0 <= sel < 1.0:
+                count = int(count * sel + .5)
+                new_list = list[:count]
+            elif type(sel) == type(1) and 0 <= sel < count:
+                count = sel
+                new_list = list[:count]
+        if len(list) != len(new_list):
+            msg = msg + "   List reduced from %r to %r due to restriction <%r>\n" % (
+                         len(list), len(new_list), sel)
+
+        return new_list, msg
+
+    def get_print_list(self, sel_list):
+        width = self.max_name_len
+        if self.fcn_list:
+            list = self.fcn_list[:]
+            msg = "   Ordered by: " + self.sort_type + '\n'
+        else:
+            list = self.stats.keys()
+            msg = "   Random listing order was used\n"
+
+        for selection in sel_list:
+            list, msg = self.eval_print_amount(selection, list, msg)
+
+        count = len(list)
+
+        if not list:
+            return 0, list
+        print msg
+        if count < len(self.stats):
+            width = 0
+            for func in list:
+                if  len(func_std_string(func)) > width:
+                    width = len(func_std_string(func))
+        return width+2, list
+
+    def print_stats(self, *amount):
+        for filename in self.files:
+            print filename
+        if self.files: print
+        indent = ' ' * 8
+        for func in self.top_level:
+            print indent, func_get_function_name(func)
+
+        print indent, self.total_calls, "function calls",
+        if self.total_calls != self.prim_calls:
+            print "(%d primitive calls)" % self.prim_calls,
+        print "in %.3f CPU seconds" % self.total_tt
+        print
+        width, list = self.get_print_list(amount)
+        if list:
+            self.print_title()
+            for func in list:
+                self.print_line(func)
+            print
+            print
+        return self
+
+    def print_callees(self, *amount):
+        width, list = self.get_print_list(amount)
+        if list:
+            self.calc_callees()
+
+            self.print_call_heading(width, "called...")
+            for func in list:
+                if func in self.all_callees:
+                    self.print_call_line(width, func, self.all_callees[func])
+                else:
+                    self.print_call_line(width, func, {})
+            print
+            print
+        return self
+
+    def print_callers(self, *amount):
+        width, list = self.get_print_list(amount)
+        if list:
+            self.print_call_heading(width, "was called by...")
+            for func in list:
+                cc, nc, tt, ct, callers = self.stats[func]
+                self.print_call_line(width, func, callers)
+            print
+            print
+        return self
+
+    def print_call_heading(self, name_size, column_title):
+        print "Function ".ljust(name_size) + column_title
+
+    def print_call_line(self, name_size, source, call_dict):
+        print func_std_string(source).ljust(name_size),
+        if not call_dict:
+            print "--"
+            return
+        clist = call_dict.keys()
+        clist.sort()
+        name_size = name_size + 1
+        indent = ""
+        for func in clist:
+            name = func_std_string(func)
+            print indent*name_size + name + '(%r)' % (call_dict[func],), \
+                      f8(self.stats[func][3])
+            indent = " "
+
+    def print_title(self):
+        print '   ncalls  tottime  percall  cumtime  percall', \
+              'filename:lineno(function)'
+
+    def print_line(self, func):  # hack : should print percentages
+        cc, nc, tt, ct, callers = self.stats[func]
+        c = str(nc)
+        if nc != cc:
+            c = c + '/' + str(cc)
+        print c.rjust(9),
+        print f8(tt),
+        if nc == 0:
+            print ' '*8,
+        else:
+            print f8(tt/nc),
+        print f8(ct),
+        if cc == 0:
+            print ' '*8,
+        else:
+            print f8(ct/cc),
+        print func_std_string(func)
+
+    def ignore(self):
+        # Deprecated since 1.5.1 -- see the docs.
+        pass # has no return value, so use at end of line :-)
+
+class TupleComp:
+    """This class provides a generic function for comparing any two tuples.
+    Each instance records a list of tuple-indices (from most significant
+    to least significant), and sort direction (ascending or decending) for
+    each tuple-index.  The compare functions can then be used as the function
+    argument to the system sort() function when a list of tuples need to be
+    sorted in the instances order."""
+
+    def __init__(self, comp_select_list):
+        self.comp_select_list = comp_select_list
+
+    def compare (self, left, right):
+        for index, direction in self.comp_select_list:
+            l = left[index]
+            r = right[index]
+            if l < r:
+                return -direction
+            if l > r:
+                return direction
+        return 0
+
+#**************************************************************************
+# func_name is a triple (file:string, line:int, name:string)
+
+def func_strip_path(func_name):
+    filename, line, name = func_name
+    return os.path.basename(filename), line, name
+
+def func_get_function_name(func):
+    return func[2]
+
+def func_std_string(func_name): # match what old profile produced
+    return "%s:%d(%s)" % func_name
+
+#**************************************************************************
+# The following functions combine statists for pairs functions.
+# The bulk of the processing involves correctly handling "call" lists,
+# such as callers and callees.
+#**************************************************************************
+
+def add_func_stats(target, source):
+    """Add together all the stats for two profile entries."""
+    cc, nc, tt, ct, callers = source
+    t_cc, t_nc, t_tt, t_ct, t_callers = target
+    return (cc+t_cc, nc+t_nc, tt+t_tt, ct+t_ct,
+              add_callers(t_callers, callers))
+
+def add_callers(target, source):
+    """Combine two caller lists in a single list."""
+    new_callers = {}
+    for func, caller in target.iteritems():
+        new_callers[func] = caller
+    for func, caller in source.iteritems():
+        if func in new_callers:
+            new_callers[func] = caller + new_callers[func]
+        else:
+            new_callers[func] = caller
+    return new_callers
+
+def count_calls(callers):
+    """Sum the caller statistics to get total number of calls received."""
+    nc = 0
+    for calls in callers.itervalues():
+        nc += calls
+    return nc
+
+#**************************************************************************
+# The following functions support printing of reports
+#**************************************************************************
+
+def f8(x):
+    return "%8.3f" % x
+
+#**************************************************************************
+# Statistics browser added by ESR, April 2001
+#**************************************************************************
+
+if __name__ == '__main__':
+    import cmd
+    try:
+        import readline
+    except ImportError:
+        pass
+
+    class ProfileBrowser(cmd.Cmd):
+        def __init__(self, profile=None):
+            cmd.Cmd.__init__(self)
+            self.prompt = "% "
+            if profile is not None:
+                self.stats = Stats(profile)
+            else:
+                self.stats = None
+
+        def generic(self, fn, line):
+            args = line.split()
+            processed = []
+            for term in args:
+                try:
+                    processed.append(int(term))
+                    continue
+                except ValueError:
+                    pass
+                try:
+                    frac = float(term)
+                    if frac > 1 or frac < 0:
+                        print "Fraction argument mus be in [0, 1]"
+                        continue
+                    processed.append(frac)
+                    continue
+                except ValueError:
+                    pass
+                processed.append(term)
+            if self.stats:
+                getattr(self.stats, fn)(*processed)
+            else:
+                print "No statistics object is loaded."
+            return 0
+        def generic_help(self):
+            print "Arguments may be:"
+            print "* An integer maximum number of entries to print."
+            print "* A decimal fractional number between 0 and 1, controlling"
+            print "  what fraction of selected entries to print."
+            print "* A regular expression; only entries with function names"
+            print "  that match it are printed."
+
+        def do_add(self, line):
+            self.stats.add(line)
+            return 0
+        def help_add(self):
+            print "Add profile info from given file to current statistics object."
+
+        def do_callees(self, line):
+            return self.generic('print_callees', line)
+        def help_callees(self):
+            print "Print callees statistics from the current stat object."
+            self.generic_help()
+
+        def do_callers(self, line):
+            return self.generic('print_callers', line)
+        def help_callers(self):
+            print "Print callers statistics from the current stat object."
+            self.generic_help()
+
+        def do_EOF(self, line):
+            print ""
+            return 1
+        def help_EOF(self):
+            print "Leave the profile brower."
+
+        def do_quit(self, line):
+            return 1
+        def help_quit(self):
+            print "Leave the profile brower."
+
+        def do_read(self, line):
+            if line:
+                try:
+                    self.stats = Stats(line)
+                except IOError, args:
+                    print args[1]
+                    return
+                self.prompt = line + "% "
+            elif len(self.prompt) > 2:
+                line = self.prompt[-2:]
+            else:
+                print "No statistics object is current -- cannot reload."
+            return 0
+        def help_read(self):
+            print "Read in profile data from a specified file."
+
+        def do_reverse(self, line):
+            self.stats.reverse_order()
+            return 0
+        def help_reverse(self):
+            print "Reverse the sort order of the profiling report."
+
+        def do_sort(self, line):
+            abbrevs = self.stats.get_sort_arg_defs()
+            if line and not filter(lambda x,a=abbrevs: x not in a,line.split()):
+                self.stats.sort_stats(*line.split())
+            else:
+                print "Valid sort keys (unique prefixes are accepted):"
+                for (key, value) in Stats.sort_arg_dict_default.iteritems():
+                    print "%s -- %s" % (key, value[1])
+            return 0
+        def help_sort(self):
+            print "Sort profile data according to specified keys."
+            print "(Typing `sort' without arguments lists valid keys.)"
+        def complete_sort(self, text, *args):
+            return [a for a in Stats.sort_arg_dict_default if a.startswith(text)]
+
+        def do_stats(self, line):
+            return self.generic('print_stats', line)
+        def help_stats(self):
+            print "Print statistics from the current stat object."
+            self.generic_help()
+
+        def do_strip(self, line):
+            self.stats.strip_dirs()
+            return 0
+        def help_strip(self):
+            print "Strip leading path information from filenames in the report."
+
+        def postcmd(self, stop, line):
+            if stop:
+                return stop
+            return None
+
+    import sys
+    print "Welcome to the profile statistics browser."
+    if len(sys.argv) > 1:
+        initprofile = sys.argv[1]
+    else:
+        initprofile = None
+    try:
+        ProfileBrowser(initprofile).cmdloop()
+        print "Goodbye."
+    except KeyboardInterrupt:
+        pass
+
+# That's all, folks.
diff --git a/depot_tools/release/win/python_24/Lib/pty.py b/depot_tools/release/win/python_24/Lib/pty.py
new file mode 100644
index 0000000..fae162d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pty.py
@@ -0,0 +1,168 @@
+"""Pseudo terminal utilities."""
+
+# Bugs: No signal handling.  Doesn't set slave termios and window size.
+#       Only tested on Linux.
+# See:  W. Richard Stevens. 1992.  Advanced Programming in the
+#       UNIX Environment.  Chapter 19.
+# Author: Steen Lumholt -- with additions by Guido.
+
+from select import select
+import os
+import tty
+
+__all__ = ["openpty","fork","spawn"]
+
+STDIN_FILENO = 0
+STDOUT_FILENO = 1
+STDERR_FILENO = 2
+
+CHILD = 0
+
+def openpty():
+    """openpty() -> (master_fd, slave_fd)
+    Open a pty master/slave pair, using os.openpty() if possible."""
+
+    try:
+        return os.openpty()
+    except (AttributeError, OSError):
+        pass
+    master_fd, slave_name = _open_terminal()
+    slave_fd = slave_open(slave_name)
+    return master_fd, slave_fd
+
+def master_open():
+    """master_open() -> (master_fd, slave_name)
+    Open a pty master and return the fd, and the filename of the slave end.
+    Deprecated, use openpty() instead."""
+
+    try:
+        master_fd, slave_fd = os.openpty()
+    except (AttributeError, OSError):
+        pass
+    else:
+        slave_name = os.ttyname(slave_fd)
+        os.close(slave_fd)
+        return master_fd, slave_name
+
+    return _open_terminal()
+
+def _open_terminal():
+    """Open pty master and return (master_fd, tty_name).
+    SGI and generic BSD version, for when openpty() fails."""
+    try:
+        import sgi
+    except ImportError:
+        pass
+    else:
+        try:
+            tty_name, master_fd = sgi._getpty(os.O_RDWR, 0666, 0)
+        except IOError, msg:
+            raise os.error, msg
+        return master_fd, tty_name
+    for x in 'pqrstuvwxyzPQRST':
+        for y in '0123456789abcdef':
+            pty_name = '/dev/pty' + x + y
+            try:
+                fd = os.open(pty_name, os.O_RDWR)
+            except os.error:
+                continue
+            return (fd, '/dev/tty' + x + y)
+    raise os.error, 'out of pty devices'
+
+def slave_open(tty_name):
+    """slave_open(tty_name) -> slave_fd
+    Open the pty slave and acquire the controlling terminal, returning
+    opened filedescriptor.
+    Deprecated, use openpty() instead."""
+
+    result = os.open(tty_name, os.O_RDWR)
+    try:
+        from fcntl import ioctl, I_PUSH
+    except ImportError:
+        return result
+    try:
+        ioctl(result, I_PUSH, "ptem")
+        ioctl(result, I_PUSH, "ldterm")
+    except IOError:
+        pass
+    return result
+
+def fork():
+    """fork() -> (pid, master_fd)
+    Fork and make the child a session leader with a controlling terminal."""
+
+    try:
+        pid, fd = os.forkpty()
+    except (AttributeError, OSError):
+        pass
+    else:
+        if pid == CHILD:
+            try:
+                os.setsid()
+            except OSError:
+                # os.forkpty() already set us session leader
+                pass
+        return pid, fd
+
+    master_fd, slave_fd = openpty()
+    pid = os.fork()
+    if pid == CHILD:
+        # Establish a new session.
+        os.setsid()
+        os.close(master_fd)
+
+        # Slave becomes stdin/stdout/stderr of child.
+        os.dup2(slave_fd, STDIN_FILENO)
+        os.dup2(slave_fd, STDOUT_FILENO)
+        os.dup2(slave_fd, STDERR_FILENO)
+        if (slave_fd > STDERR_FILENO):
+            os.close (slave_fd)
+
+    # Parent and child process.
+    return pid, master_fd
+
+def _writen(fd, data):
+    """Write all the data to a descriptor."""
+    while data != '':
+        n = os.write(fd, data)
+        data = data[n:]
+
+def _read(fd):
+    """Default read function."""
+    return os.read(fd, 1024)
+
+def _copy(master_fd, master_read=_read, stdin_read=_read):
+    """Parent copy loop.
+    Copies
+            pty master -> standard output   (master_read)
+            standard input -> pty master    (stdin_read)"""
+    while 1:
+        rfds, wfds, xfds = select(
+                [master_fd, STDIN_FILENO], [], [])
+        if master_fd in rfds:
+            data = master_read(master_fd)
+            os.write(STDOUT_FILENO, data)
+        if STDIN_FILENO in rfds:
+            data = stdin_read(STDIN_FILENO)
+            _writen(master_fd, data)
+
+def spawn(argv, master_read=_read, stdin_read=_read):
+    """Create a spawned process."""
+    if type(argv) == type(''):
+        argv = (argv,)
+    pid, master_fd = fork()
+    if pid == CHILD:
+        os.execlp(argv[0], *argv)
+    try:
+        mode = tty.tcgetattr(STDIN_FILENO)
+        tty.setraw(STDIN_FILENO)
+        restore = 1
+    except tty.error:    # This is the same as termios.error
+        restore = 0
+    try:
+        _copy(master_fd, master_read, stdin_read)
+    except (IOError, OSError):
+        if restore:
+            tty.tcsetattr(STDIN_FILENO, tty.TCSAFLUSH, mode)
+
+    os.close(master_fd)
diff --git a/depot_tools/release/win/python_24/Lib/py_compile.py b/depot_tools/release/win/python_24/Lib/py_compile.py
new file mode 100644
index 0000000..02b0136
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/py_compile.py
@@ -0,0 +1,164 @@
+"""Routine to "compile" a .py file to a .pyc (or .pyo) file.
+
+This module has intimate knowledge of the format of .pyc files.
+"""
+
+import __builtin__
+import imp
+import marshal
+import os
+import sys
+import traceback
+
+MAGIC = imp.get_magic()
+
+__all__ = ["compile", "main", "PyCompileError"]
+
+
+class PyCompileError(Exception):
+    """Exception raised when an error occurs while attempting to
+    compile the file.
+
+    To raise this exception, use
+
+        raise PyCompileError(exc_type,exc_value,file[,msg])
+
+    where
+
+        exc_type:   exception type to be used in error message
+                    type name can be accesses as class variable
+                    'exc_type_name'
+
+        exc_value:  exception value to be used in error message
+                    can be accesses as class variable 'exc_value'
+
+        file:       name of file being compiled to be used in error message
+                    can be accesses as class variable 'file'
+
+        msg:        string message to be written as error message
+                    If no value is given, a default exception message will be given,
+                    consistent with 'standard' py_compile output.
+                    message (or default) can be accesses as class variable 'msg'
+
+    """
+
+    def __init__(self, exc_type, exc_value, file, msg=''):
+        exc_type_name = exc_type.__name__
+        if exc_type is SyntaxError:
+            tbtext = ''.join(traceback.format_exception_only(exc_type, exc_value))
+            errmsg = tbtext.replace('File "<string>"', 'File "%s"' % file)
+        else:
+            errmsg = "Sorry: %s: %s" % (exc_type_name,exc_value)
+
+        Exception.__init__(self,msg or errmsg,exc_type_name,exc_value,file)
+
+        self.exc_type_name = exc_type_name
+        self.exc_value = exc_value
+        self.file = file
+        self.msg = msg or errmsg
+
+    def __str__(self):
+        return self.msg
+
+
+# Define an internal helper according to the platform
+if os.name == "mac":
+    import MacOS
+    def set_creator_type(file):
+        MacOS.SetCreatorAndType(file, 'Pyth', 'PYC ')
+else:
+    def set_creator_type(file):
+        pass
+
+def wr_long(f, x):
+    """Internal; write a 32-bit int to a file in little-endian order."""
+    f.write(chr( x        & 0xff))
+    f.write(chr((x >> 8)  & 0xff))
+    f.write(chr((x >> 16) & 0xff))
+    f.write(chr((x >> 24) & 0xff))
+
+def compile(file, cfile=None, dfile=None, doraise=False):
+    """Byte-compile one Python source file to Python bytecode.
+
+    Arguments:
+
+    file:    source filename
+    cfile:   target filename; defaults to source with 'c' or 'o' appended
+             ('c' normally, 'o' in optimizing mode, giving .pyc or .pyo)
+    dfile:   purported filename; defaults to source (this is the filename
+             that will show up in error messages)
+    doraise: flag indicating whether or not an exception should be
+             raised when a compile error is found. If an exception
+             occurs and this flag is set to False, a string
+             indicating the nature of the exception will be printed,
+             and the function will return to the caller. If an
+             exception occurs and this flag is set to True, a
+             PyCompileError exception will be raised.
+
+    Note that it isn't necessary to byte-compile Python modules for
+    execution efficiency -- Python itself byte-compiles a module when
+    it is loaded, and if it can, writes out the bytecode to the
+    corresponding .pyc (or .pyo) file.
+
+    However, if a Python installation is shared between users, it is a
+    good idea to byte-compile all modules upon installation, since
+    other users may not be able to write in the source directories,
+    and thus they won't be able to write the .pyc/.pyo file, and then
+    they would be byte-compiling every module each time it is loaded.
+    This can slow down program start-up considerably.
+
+    See compileall.py for a script/module that uses this module to
+    byte-compile all installed files (or all files in selected
+    directories).
+
+    """
+    f = open(file, 'U')
+    try:
+        timestamp = long(os.fstat(f.fileno()).st_mtime)
+    except AttributeError:
+        timestamp = long(os.stat(file).st_mtime)
+    codestring = f.read()
+    f.close()
+    if codestring and codestring[-1] != '\n':
+        codestring = codestring + '\n'
+    try:
+        codeobject = __builtin__.compile(codestring, dfile or file,'exec')
+    except Exception,err:
+        py_exc = PyCompileError(err.__class__,err.args,dfile or file)
+        if doraise:
+            raise py_exc
+        else:
+            sys.stderr.write(py_exc.msg)
+            return
+    if cfile is None:
+        cfile = file + (__debug__ and 'c' or 'o')
+    fc = open(cfile, 'wb')
+    fc.write('\0\0\0\0')
+    wr_long(fc, timestamp)
+    marshal.dump(codeobject, fc)
+    fc.flush()
+    fc.seek(0, 0)
+    fc.write(MAGIC)
+    fc.close()
+    set_creator_type(cfile)
+
+def main(args=None):
+    """Compile several source files.
+
+    The files named in 'args' (or on the command line, if 'args' is
+    not specified) are compiled and the resulting bytecode is cached
+    in the normal manner.  This function does not search a directory
+    structure to locate source files; it only compiles files named
+    explicitly.
+
+    """
+    if args is None:
+        args = sys.argv[1:]
+    for filename in args:
+        try:
+            compile(filename, doraise=True)
+        except PyCompileError,err:
+            sys.stderr.write(err.msg)
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/pyclbr.py b/depot_tools/release/win/python_24/Lib/pyclbr.py
new file mode 100644
index 0000000..0812e22
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pyclbr.py
@@ -0,0 +1,338 @@
+"""Parse a Python module and describe its classes and methods.
+
+Parse enough of a Python file to recognize imports and class and
+method definitions, and to find out the superclasses of a class.
+
+The interface consists of a single function:
+        readmodule_ex(module [, path])
+where module is the name of a Python module, and path is an optional
+list of directories where the module is to be searched.  If present,
+path is prepended to the system search path sys.path.  The return
+value is a dictionary.  The keys of the dictionary are the names of
+the classes defined in the module (including classes that are defined
+via the from XXX import YYY construct).  The values are class
+instances of the class Class defined here.  One special key/value pair
+is present for packages: the key '__path__' has a list as its value
+which contains the package search path.
+
+A class is described by the class Class in this module.  Instances
+of this class have the following instance variables:
+        module -- the module name
+        name -- the name of the class
+        super -- a list of super classes (Class instances)
+        methods -- a dictionary of methods
+        file -- the file in which the class was defined
+        lineno -- the line in the file on which the class statement occurred
+The dictionary of methods uses the method names as keys and the line
+numbers on which the method was defined as values.
+If the name of a super class is not recognized, the corresponding
+entry in the list of super classes is not a class instance but a
+string giving the name of the super class.  Since import statements
+are recognized and imported modules are scanned as well, this
+shouldn't happen often.
+
+A function is described by the class Function in this module.
+Instances of this class have the following instance variables:
+        module -- the module name
+        name -- the name of the class
+        file -- the file in which the class was defined
+        lineno -- the line in the file on which the class statement occurred
+"""
+
+import sys
+import imp
+import tokenize # Python tokenizer
+from token import NAME, DEDENT, NEWLINE
+from operator import itemgetter
+
+__all__ = ["readmodule", "readmodule_ex", "Class", "Function"]
+
+_modules = {}                           # cache of modules we've seen
+
+# each Python class is represented by an instance of this class
+class Class:
+    '''Class to represent a Python class.'''
+    def __init__(self, module, name, super, file, lineno):
+        self.module = module
+        self.name = name
+        if super is None:
+            super = []
+        self.super = super
+        self.methods = {}
+        self.file = file
+        self.lineno = lineno
+
+    def _addmethod(self, name, lineno):
+        self.methods[name] = lineno
+
+class Function:
+    '''Class to represent a top-level Python function'''
+    def __init__(self, module, name, file, lineno):
+        self.module = module
+        self.name = name
+        self.file = file
+        self.lineno = lineno
+
+def readmodule(module, path=[]):
+    '''Backwards compatible interface.
+
+    Call readmodule_ex() and then only keep Class objects from the
+    resulting dictionary.'''
+
+    dict = _readmodule(module, path)
+    res = {}
+    for key, value in dict.items():
+        if isinstance(value, Class):
+            res[key] = value
+    return res
+
+def readmodule_ex(module, path=[]):
+    '''Read a module file and return a dictionary of classes.
+
+    Search for MODULE in PATH and sys.path, read and parse the
+    module and return a dictionary with one entry for each class
+    found in the module.
+
+    If INPACKAGE is true, it must be the dotted name of the package in
+    which we are searching for a submodule, and then PATH must be the
+    package search path; otherwise, we are searching for a top-level
+    module, and PATH is combined with sys.path.
+    '''
+    return _readmodule(module, path)
+
+def _readmodule(module, path, inpackage=None):
+    '''Do the hard work for readmodule[_ex].'''
+    # Compute the full module name (prepending inpackage if set)
+    if inpackage:
+        fullmodule = "%s.%s" % (inpackage, module)
+    else:
+        fullmodule = module
+
+    # Check in the cache
+    if fullmodule in _modules:
+        return _modules[fullmodule]
+
+    # Initialize the dict for this module's contents
+    dict = {}
+
+    # Check if it is a built-in module; we don't do much for these
+    if module in sys.builtin_module_names and not inpackage:
+        _modules[module] = dict
+        return dict
+
+    # Check for a dotted module name
+    i = module.rfind('.')
+    if i >= 0:
+        package = module[:i]
+        submodule = module[i+1:]
+        parent = _readmodule(package, path, inpackage)
+        if inpackage:
+            package = "%s.%s" % (inpackage, package)
+        return _readmodule(submodule, parent['__path__'], package)
+
+    # Search the path for the module
+    f = None
+    if inpackage:
+        f, file, (suff, mode, type) = imp.find_module(module, path)
+    else:
+        f, file, (suff, mode, type) = imp.find_module(module, path + sys.path)
+    if type == imp.PKG_DIRECTORY:
+        dict['__path__'] = [file]
+        path = [file] + path
+        f, file, (suff, mode, type) = imp.find_module('__init__', [file])
+    _modules[fullmodule] = dict
+    if type != imp.PY_SOURCE:
+        # not Python source, can't do anything with this module
+        f.close()
+        return dict
+
+    stack = [] # stack of (class, indent) pairs
+
+    g = tokenize.generate_tokens(f.readline)
+    try:
+        for tokentype, token, start, end, line in g:
+            if tokentype == DEDENT:
+                lineno, thisindent = start
+                # close nested classes and defs
+                while stack and stack[-1][1] >= thisindent:
+                    del stack[-1]
+            elif token == 'def':
+                lineno, thisindent = start
+                # close previous nested classes and defs
+                while stack and stack[-1][1] >= thisindent:
+                    del stack[-1]
+                tokentype, meth_name, start, end, line = g.next()
+                if tokentype != NAME:
+                    continue # Syntax error
+                if stack:
+                    cur_class = stack[-1][0]
+                    if isinstance(cur_class, Class):
+                        # it's a method
+                        cur_class._addmethod(meth_name, lineno)
+                    # else it's a nested def
+                else:
+                    # it's a function
+                    dict[meth_name] = Function(module, meth_name, file, lineno)
+                stack.append((None, thisindent)) # Marker for nested fns
+            elif token == 'class':
+                lineno, thisindent = start
+                # close previous nested classes and defs
+                while stack and stack[-1][1] >= thisindent:
+                    del stack[-1]
+                tokentype, class_name, start, end, line = g.next()
+                if tokentype != NAME:
+                    continue # Syntax error
+                # parse what follows the class name
+                tokentype, token, start, end, line = g.next()
+                inherit = None
+                if token == '(':
+                    names = [] # List of superclasses
+                    # there's a list of superclasses
+                    level = 1
+                    super = [] # Tokens making up current superclass
+                    while True:
+                        tokentype, token, start, end, line = g.next()
+                        if token in (')', ',') and level == 1:
+                            n = "".join(super)
+                            if n in dict:
+                                # we know this super class
+                                n = dict[n]
+                            else:
+                                c = n.split('.')
+                                if len(c) > 1:
+                                    # super class is of the form
+                                    # module.class: look in module for
+                                    # class
+                                    m = c[-2]
+                                    c = c[-1]
+                                    if m in _modules:
+                                        d = _modules[m]
+                                        if c in d:
+                                            n = d[c]
+                            names.append(n)
+                            super = []
+                        if token == '(':
+                            level += 1
+                        elif token == ')':
+                            level -= 1
+                            if level == 0:
+                                break
+                        elif token == ',' and level == 1:
+                            pass
+                        else:
+                            super.append(token)
+                    inherit = names
+                cur_class = Class(fullmodule, class_name, inherit, file, lineno)
+                if not stack:
+                    dict[class_name] = cur_class
+                stack.append((cur_class, thisindent))
+            elif token == 'import' and start[1] == 0:
+                modules = _getnamelist(g)
+                for mod, mod2 in modules:
+                    try:
+                        # Recursively read the imported module
+                        if not inpackage:
+                            _readmodule(mod, path)
+                        else:
+                            try:
+                                _readmodule(mod, path, inpackage)
+                            except ImportError:
+                                _readmodule(mod, [])
+                    except:
+                        # If we can't find or parse the imported module,
+                        # too bad -- don't die here.
+                        pass
+            elif token == 'from' and start[1] == 0:
+                mod, token = _getname(g)
+                if not mod or token != "import":
+                    continue
+                names = _getnamelist(g)
+                try:
+                    # Recursively read the imported module
+                    d = _readmodule(mod, path, inpackage)
+                except:
+                    # If we can't find or parse the imported module,
+                    # too bad -- don't die here.
+                    continue
+                # add any classes that were defined in the imported module
+                # to our name space if they were mentioned in the list
+                for n, n2 in names:
+                    if n in d:
+                        dict[n2 or n] = d[n]
+                    elif n == '*':
+                        # don't add names that start with _
+                        for n in d:
+                            if n[0] != '_':
+                                dict[n] = d[n]
+    except StopIteration:
+        pass
+
+    f.close()
+    return dict
+
+def _getnamelist(g):
+    # Helper to get a comma-separated list of dotted names plus 'as'
+    # clauses.  Return a list of pairs (name, name2) where name2 is
+    # the 'as' name, or None if there is no 'as' clause.
+    names = []
+    while True:
+        name, token = _getname(g)
+        if not name:
+            break
+        if token == 'as':
+            name2, token = _getname(g)
+        else:
+            name2 = None
+        names.append((name, name2))
+        while token != "," and "\n" not in token:
+            tokentype, token, start, end, line = g.next()
+        if token != ",":
+            break
+    return names
+
+def _getname(g):
+    # Helper to get a dotted name, return a pair (name, token) where
+    # name is the dotted name, or None if there was no dotted name,
+    # and token is the next input token.
+    parts = []
+    tokentype, token, start, end, line = g.next()
+    if tokentype != NAME and token != '*':
+        return (None, token)
+    parts.append(token)
+    while True:
+        tokentype, token, start, end, line = g.next()
+        if token != '.':
+            break
+        tokentype, token, start, end, line = g.next()
+        if tokentype != NAME:
+            break
+        parts.append(token)
+    return (".".join(parts), token)
+
+def _main():
+    # Main program for testing.
+    import os
+    mod = sys.argv[1]
+    if os.path.exists(mod):
+        path = [os.path.dirname(mod)]
+        mod = os.path.basename(mod)
+        if mod.lower().endswith(".py"):
+            mod = mod[:-3]
+    else:
+        path = []
+    dict = readmodule_ex(mod, path)
+    objs = dict.values()
+    objs.sort(lambda a, b: cmp(getattr(a, 'lineno', 0),
+                               getattr(b, 'lineno', 0)))
+    for obj in objs:
+        if isinstance(obj, Class):
+            print "class", obj.name, obj.super, obj.lineno
+            methods = sorted(obj.methods.iteritems(), key=itemgetter(1))
+            for name, lineno in methods:
+                if name != "__path__":
+                    print "  def", name, lineno
+        elif isinstance(obj, Function):
+            print "def", obj.name, obj.lineno
+
+if __name__ == "__main__":
+    _main()
diff --git a/depot_tools/release/win/python_24/Lib/pydoc.py b/depot_tools/release/win/python_24/Lib/pydoc.py
new file mode 100644
index 0000000..3aacd7e3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/pydoc.py
@@ -0,0 +1,2266 @@
+#!/usr/bin/env python
+# -*- coding: Latin-1 -*-
+"""Generate Python documentation in HTML or text for interactive use.
+
+In the Python interpreter, do "from pydoc import help" to provide online
+help.  Calling help(thing) on a Python object documents the object.
+
+Or, at the shell command line outside of Python:
+
+Run "pydoc <name>" to show documentation on something.  <name> may be
+the name of a function, module, package, or a dotted reference to a
+class or function within a module or module in a package.  If the
+argument contains a path segment delimiter (e.g. slash on Unix,
+backslash on Windows) it is treated as the path to a Python source file.
+
+Run "pydoc -k <keyword>" to search for a keyword in the synopsis lines
+of all available modules.
+
+Run "pydoc -p <port>" to start an HTTP server on a given port on the
+local machine to generate documentation web pages.
+
+For platforms without a command line, "pydoc -g" starts the HTTP server
+and also pops up a little window for controlling it.
+
+Run "pydoc -w <name>" to write out the HTML documentation for a module
+to a file named "<name>.html".
+
+Module docs for core modules are assumed to be in
+
+    http://www.python.org/doc/current/lib/
+
+This can be overridden by setting the PYTHONDOCS environment variable
+to a different URL or to a local directory containing the Library
+Reference Manual pages.
+"""
+
+__author__ = "Ka-Ping Yee <ping@lfw.org>"
+__date__ = "26 February 2001"
+__version__ = "$Revision: 1.100.2.2 $"
+__credits__ = """Guido van Rossum, for an excellent programming language.
+Tommy Burnette, the original creator of manpy.
+Paul Prescod, for all his work on onlinehelp.
+Richard Chamberlain, for the first implementation of textdoc.
+"""
+
+# Known bugs that can't be fixed here:
+#   - imp.load_module() cannot be prevented from clobbering existing
+#     loaded modules, so calling synopsis() on a binary module file
+#     changes the contents of any existing module with the same name.
+#   - If the __file__ attribute on a module is a relative path and
+#     the current directory is changed with os.chdir(), an incorrect
+#     path will be displayed.
+
+import sys, imp, os, re, types, inspect, __builtin__
+from repr import Repr
+from string import expandtabs, find, join, lower, split, strip, rfind, rstrip
+from collections import deque
+
+# --------------------------------------------------------- common routines
+
+def pathdirs():
+    """Convert sys.path into a list of absolute, existing, unique paths."""
+    dirs = []
+    normdirs = []
+    for dir in sys.path:
+        dir = os.path.abspath(dir or '.')
+        normdir = os.path.normcase(dir)
+        if normdir not in normdirs and os.path.isdir(dir):
+            dirs.append(dir)
+            normdirs.append(normdir)
+    return dirs
+
+def getdoc(object):
+    """Get the doc string or comments for an object."""
+    result = inspect.getdoc(object) or inspect.getcomments(object)
+    return result and re.sub('^ *\n', '', rstrip(result)) or ''
+
+def splitdoc(doc):
+    """Split a doc string into a synopsis line (if any) and the rest."""
+    lines = split(strip(doc), '\n')
+    if len(lines) == 1:
+        return lines[0], ''
+    elif len(lines) >= 2 and not rstrip(lines[1]):
+        return lines[0], join(lines[2:], '\n')
+    return '', join(lines, '\n')
+
+def classname(object, modname):
+    """Get a class name and qualify it with a module name if necessary."""
+    name = object.__name__
+    if object.__module__ != modname:
+        name = object.__module__ + '.' + name
+    return name
+
+def isdata(object):
+    """Check if an object is of a type that probably means it's data."""
+    return not (inspect.ismodule(object) or inspect.isclass(object) or
+                inspect.isroutine(object) or inspect.isframe(object) or
+                inspect.istraceback(object) or inspect.iscode(object))
+
+def replace(text, *pairs):
+    """Do a series of global replacements on a string."""
+    while pairs:
+        text = join(split(text, pairs[0]), pairs[1])
+        pairs = pairs[2:]
+    return text
+
+def cram(text, maxlen):
+    """Omit part of a string if needed to make it fit in a maximum length."""
+    if len(text) > maxlen:
+        pre = max(0, (maxlen-3)//2)
+        post = max(0, maxlen-3-pre)
+        return text[:pre] + '...' + text[len(text)-post:]
+    return text
+
+_re_stripid = re.compile(r' at 0x[0-9a-f]{6,16}(>+)$', re.IGNORECASE)
+def stripid(text):
+    """Remove the hexadecimal id from a Python object representation."""
+    # The behaviour of %p is implementation-dependent in terms of case.
+    if _re_stripid.search(repr(Exception)):
+        return _re_stripid.sub(r'\1', text)
+    return text
+
+def _is_some_method(obj):
+    return inspect.ismethod(obj) or inspect.ismethoddescriptor(obj)
+
+def allmethods(cl):
+    methods = {}
+    for key, value in inspect.getmembers(cl, _is_some_method):
+        methods[key] = 1
+    for base in cl.__bases__:
+        methods.update(allmethods(base)) # all your base are belong to us
+    for key in methods.keys():
+        methods[key] = getattr(cl, key)
+    return methods
+
+def _split_list(s, predicate):
+    """Split sequence s via predicate, and return pair ([true], [false]).
+
+    The return value is a 2-tuple of lists,
+        ([x for x in s if predicate(x)],
+         [x for x in s if not predicate(x)])
+    """
+
+    yes = []
+    no = []
+    for x in s:
+        if predicate(x):
+            yes.append(x)
+        else:
+            no.append(x)
+    return yes, no
+
+def visiblename(name, all=None):
+    """Decide whether to show documentation on a variable."""
+    # Certain special names are redundant.
+    if name in ['__builtins__', '__doc__', '__file__', '__path__',
+                '__module__', '__name__']: return 0
+    # Private names are hidden, but special names are displayed.
+    if name.startswith('__') and name.endswith('__'): return 1
+    if all is not None:
+        # only document that which the programmer exported in __all__
+        return name in all
+    else:
+        return not name.startswith('_')
+
+# ----------------------------------------------------- module manipulation
+
+def ispackage(path):
+    """Guess whether a path refers to a package directory."""
+    if os.path.isdir(path):
+        for ext in ['.py', '.pyc', '.pyo']:
+            if os.path.isfile(os.path.join(path, '__init__' + ext)):
+                return True
+    return False
+
+def synopsis(filename, cache={}):
+    """Get the one-line summary out of a module file."""
+    mtime = os.stat(filename).st_mtime
+    lastupdate, result = cache.get(filename, (0, None))
+    if lastupdate < mtime:
+        info = inspect.getmoduleinfo(filename)
+        file = open(filename)
+        if info and 'b' in info[2]: # binary modules have to be imported
+            try: module = imp.load_module('__temp__', file, filename, info[1:])
+            except: return None
+            result = split(module.__doc__ or '', '\n')[0]
+            del sys.modules['__temp__']
+        else: # text modules can be directly examined
+            line = file.readline()
+            while line[:1] == '#' or not strip(line):
+                line = file.readline()
+                if not line: break
+            line = strip(line)
+            if line[:4] == 'r"""': line = line[1:]
+            if line[:3] == '"""':
+                line = line[3:]
+                if line[-1:] == '\\': line = line[:-1]
+                while not strip(line):
+                    line = file.readline()
+                    if not line: break
+                result = strip(split(line, '"""')[0])
+            else: result = None
+        file.close()
+        cache[filename] = (mtime, result)
+    return result
+
+class ErrorDuringImport(Exception):
+    """Errors that occurred while trying to import something to document it."""
+    def __init__(self, filename, (exc, value, tb)):
+        self.filename = filename
+        self.exc = exc
+        self.value = value
+        self.tb = tb
+
+    def __str__(self):
+        exc = self.exc
+        if type(exc) is types.ClassType:
+            exc = exc.__name__
+        return 'problem in %s - %s: %s' % (self.filename, exc, self.value)
+
+def importfile(path):
+    """Import a Python source file or compiled file given its path."""
+    magic = imp.get_magic()
+    file = open(path, 'r')
+    if file.read(len(magic)) == magic:
+        kind = imp.PY_COMPILED
+    else:
+        kind = imp.PY_SOURCE
+    file.close()
+    filename = os.path.basename(path)
+    name, ext = os.path.splitext(filename)
+    file = open(path, 'r')
+    try:
+        module = imp.load_module(name, file, path, (ext, 'r', kind))
+    except:
+        raise ErrorDuringImport(path, sys.exc_info())
+    file.close()
+    return module
+
+def safeimport(path, forceload=0, cache={}):
+    """Import a module; handle errors; return None if the module isn't found.
+
+    If the module *is* found but an exception occurs, it's wrapped in an
+    ErrorDuringImport exception and reraised.  Unlike __import__, if a
+    package path is specified, the module at the end of the path is returned,
+    not the package at the beginning.  If the optional 'forceload' argument
+    is 1, we reload the module from disk (unless it's a dynamic extension)."""
+    if forceload and path in sys.modules:
+        # This is the only way to be sure.  Checking the mtime of the file
+        # isn't good enough (e.g. what if the module contains a class that
+        # inherits from another module that has changed?).
+        if path not in sys.builtin_module_names:
+            # Python never loads a dynamic extension a second time from the
+            # same path, even if the file is changed or missing.  Deleting
+            # the entry in sys.modules doesn't help for dynamic extensions,
+            # so we're not even going to try to keep them up to date.
+            info = inspect.getmoduleinfo(sys.modules[path].__file__)
+            if info[3] != imp.C_EXTENSION:
+                cache[path] = sys.modules[path] # prevent module from clearing
+                del sys.modules[path]
+    try:
+        module = __import__(path)
+    except:
+        # Did the error occur before or after the module was found?
+        (exc, value, tb) = info = sys.exc_info()
+        if path in sys.modules:
+            # An error occured while executing the imported module.
+            raise ErrorDuringImport(sys.modules[path].__file__, info)
+        elif exc is SyntaxError:
+            # A SyntaxError occurred before we could execute the module.
+            raise ErrorDuringImport(value.filename, info)
+        elif exc is ImportError and \
+             split(lower(str(value)))[:2] == ['no', 'module']:
+            # The module was not found.
+            return None
+        else:
+            # Some other error occurred during the importing process.
+            raise ErrorDuringImport(path, sys.exc_info())
+    for part in split(path, '.')[1:]:
+        try: module = getattr(module, part)
+        except AttributeError: return None
+    return module
+
+# ---------------------------------------------------- formatter base class
+
+class Doc:
+    def document(self, object, name=None, *args):
+        """Generate documentation for an object."""
+        args = (object, name) + args
+        # 'try' clause is to attempt to handle the possibility that inspect
+        # identifies something in a way that pydoc itself has issues handling;
+        # think 'super' and how it is a descriptor (which raises the exception
+        # by lacking a __name__ attribute) and an instance.
+        try:
+            if inspect.ismodule(object): return self.docmodule(*args)
+            if inspect.isclass(object): return self.docclass(*args)
+            if inspect.isroutine(object): return self.docroutine(*args)
+        except AttributeError:
+            pass
+        if isinstance(object, property): return self.docproperty(*args)
+        return self.docother(*args)
+
+    def fail(self, object, name=None, *args):
+        """Raise an exception for unimplemented types."""
+        message = "don't know how to document object%s of type %s" % (
+            name and ' ' + repr(name), type(object).__name__)
+        raise TypeError, message
+
+    docmodule = docclass = docroutine = docother = fail
+
+    def getdocloc(self, object):
+        """Return the location of module docs or None"""
+
+        try:
+            file = inspect.getabsfile(object)
+        except TypeError:
+            file = '(built-in)'
+
+        docloc = os.environ.get("PYTHONDOCS",
+                                "http://www.python.org/doc/current/lib")
+        basedir = os.path.join(sys.exec_prefix, "lib",
+                               "python"+sys.version[0:3])
+        if (isinstance(object, type(os)) and
+            (object.__name__ in ('errno', 'exceptions', 'gc', 'imp',
+                                 'marshal', 'posix', 'signal', 'sys',
+                                 'thread', 'zipimport') or
+             (file.startswith(basedir) and
+              not file.startswith(os.path.join(basedir, 'site-packages'))))):
+            htmlfile = "module-%s.html" % object.__name__
+            if docloc.startswith("http://"):
+                docloc = "%s/%s" % (docloc.rstrip("/"), htmlfile)
+            else:
+                docloc = os.path.join(docloc, htmlfile)
+        else:
+            docloc = None
+        return docloc
+
+# -------------------------------------------- HTML documentation generator
+
+class HTMLRepr(Repr):
+    """Class for safely making an HTML representation of a Python object."""
+    def __init__(self):
+        Repr.__init__(self)
+        self.maxlist = self.maxtuple = 20
+        self.maxdict = 10
+        self.maxstring = self.maxother = 100
+
+    def escape(self, text):
+        return replace(text, '&', '&amp;', '<', '&lt;', '>', '&gt;')
+
+    def repr(self, object):
+        return Repr.repr(self, object)
+
+    def repr1(self, x, level):
+        if hasattr(type(x), '__name__'):
+            methodname = 'repr_' + join(split(type(x).__name__), '_')
+            if hasattr(self, methodname):
+                return getattr(self, methodname)(x, level)
+        return self.escape(cram(stripid(repr(x)), self.maxother))
+
+    def repr_string(self, x, level):
+        test = cram(x, self.maxstring)
+        testrepr = repr(test)
+        if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
+            # Backslashes are only literal in the string and are never
+            # needed to make any special characters, so show a raw string.
+            return 'r' + testrepr[0] + self.escape(test) + testrepr[0]
+        return re.sub(r'((\\[\\abfnrtv\'"]|\\[0-9]..|\\x..|\\u....)+)',
+                      r'<font color="#c040c0">\1</font>',
+                      self.escape(testrepr))
+
+    repr_str = repr_string
+
+    def repr_instance(self, x, level):
+        try:
+            return self.escape(cram(stripid(repr(x)), self.maxstring))
+        except:
+            return self.escape('<%s instance>' % x.__class__.__name__)
+
+    repr_unicode = repr_string
+
+class HTMLDoc(Doc):
+    """Formatter class for HTML documentation."""
+
+    # ------------------------------------------- HTML formatting utilities
+
+    _repr_instance = HTMLRepr()
+    repr = _repr_instance.repr
+    escape = _repr_instance.escape
+
+    def page(self, title, contents):
+        """Format an HTML page."""
+        return '''
+<!doctype html PUBLIC "-//W3C//DTD HTML 4.0 Transitional//EN">
+<html><head><title>Python: %s</title>
+</head><body bgcolor="#f0f0f8">
+%s
+</body></html>''' % (title, contents)
+
+    def heading(self, title, fgcol, bgcol, extras=''):
+        """Format a page heading."""
+        return '''
+<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="heading">
+<tr bgcolor="%s">
+<td valign=bottom>&nbsp;<br>
+<font color="%s" face="helvetica, arial">&nbsp;<br>%s</font></td
+><td align=right valign=bottom
+><font color="%s" face="helvetica, arial">%s</font></td></tr></table>
+    ''' % (bgcol, fgcol, title, fgcol, extras or '&nbsp;')
+
+    def section(self, title, fgcol, bgcol, contents, width=6,
+                prelude='', marginalia=None, gap='&nbsp;'):
+        """Format a section with a heading."""
+        if marginalia is None:
+            marginalia = '<tt>' + '&nbsp;' * width + '</tt>'
+        result = '''<p>
+<table width="100%%" cellspacing=0 cellpadding=2 border=0 summary="section">
+<tr bgcolor="%s">
+<td colspan=3 valign=bottom>&nbsp;<br>
+<font color="%s" face="helvetica, arial">%s</font></td></tr>
+    ''' % (bgcol, fgcol, title)
+        if prelude:
+            result = result + '''
+<tr bgcolor="%s"><td rowspan=2>%s</td>
+<td colspan=2>%s</td></tr>
+<tr><td>%s</td>''' % (bgcol, marginalia, prelude, gap)
+        else:
+            result = result + '''
+<tr><td bgcolor="%s">%s</td><td>%s</td>''' % (bgcol, marginalia, gap)
+
+        return result + '\n<td width="100%%">%s</td></tr></table>' % contents
+
+    def bigsection(self, title, *args):
+        """Format a section with a big heading."""
+        title = '<big><strong>%s</strong></big>' % title
+        return self.section(title, *args)
+
+    def preformat(self, text):
+        """Format literal preformatted text."""
+        text = self.escape(expandtabs(text))
+        return replace(text, '\n\n', '\n \n', '\n\n', '\n \n',
+                             ' ', '&nbsp;', '\n', '<br>\n')
+
+    def multicolumn(self, list, format, cols=4):
+        """Format a list of items into a multi-column list."""
+        result = ''
+        rows = (len(list)+cols-1)/cols
+        for col in range(cols):
+            result = result + '<td width="%d%%" valign=top>' % (100/cols)
+            for i in range(rows*col, rows*col+rows):
+                if i < len(list):
+                    result = result + format(list[i]) + '<br>\n'
+            result = result + '</td>'
+        return '<table width="100%%" summary="list"><tr>%s</tr></table>' % result
+
+    def grey(self, text): return '<font color="#909090">%s</font>' % text
+
+    def namelink(self, name, *dicts):
+        """Make a link for an identifier, given name-to-URL mappings."""
+        for dict in dicts:
+            if name in dict:
+                return '<a href="%s">%s</a>' % (dict[name], name)
+        return name
+
+    def classlink(self, object, modname):
+        """Make a link for a class."""
+        name, module = object.__name__, sys.modules.get(object.__module__)
+        if hasattr(module, name) and getattr(module, name) is object:
+            return '<a href="%s.html#%s">%s</a>' % (
+                module.__name__, name, classname(object, modname))
+        return classname(object, modname)
+
+    def modulelink(self, object):
+        """Make a link for a module."""
+        return '<a href="%s.html">%s</a>' % (object.__name__, object.__name__)
+
+    def modpkglink(self, (name, path, ispackage, shadowed)):
+        """Make a link for a module or package to display in an index."""
+        if shadowed:
+            return self.grey(name)
+        if path:
+            url = '%s.%s.html' % (path, name)
+        else:
+            url = '%s.html' % name
+        if ispackage:
+            text = '<strong>%s</strong>&nbsp;(package)' % name
+        else:
+            text = name
+        return '<a href="%s">%s</a>' % (url, text)
+
+    def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
+        """Mark up some plain text, given a context of symbols to look for.
+        Each context dictionary maps object names to anchor names."""
+        escape = escape or self.escape
+        results = []
+        here = 0
+        pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
+                                r'RFC[- ]?(\d+)|'
+                                r'PEP[- ]?(\d+)|'
+                                r'(self\.)?(\w+))')
+        while True:
+            match = pattern.search(text, here)
+            if not match: break
+            start, end = match.span()
+            results.append(escape(text[here:start]))
+
+            all, scheme, rfc, pep, selfdot, name = match.groups()
+            if scheme:
+                url = escape(all).replace('"', '&quot;')
+                results.append('<a href="%s">%s</a>' % (url, url))
+            elif rfc:
+                url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
+                results.append('<a href="%s">%s</a>' % (url, escape(all)))
+            elif pep:
+                url = 'http://www.python.org/peps/pep-%04d.html' % int(pep)
+                results.append('<a href="%s">%s</a>' % (url, escape(all)))
+            elif text[end:end+1] == '(':
+                results.append(self.namelink(name, methods, funcs, classes))
+            elif selfdot:
+                results.append('self.<strong>%s</strong>' % name)
+            else:
+                results.append(self.namelink(name, classes))
+            here = end
+        results.append(escape(text[here:]))
+        return join(results, '')
+
+    # ---------------------------------------------- type-specific routines
+
+    def formattree(self, tree, modname, parent=None):
+        """Produce HTML for a class tree as given by inspect.getclasstree()."""
+        result = ''
+        for entry in tree:
+            if type(entry) is type(()):
+                c, bases = entry
+                result = result + '<dt><font face="helvetica, arial">'
+                result = result + self.classlink(c, modname)
+                if bases and bases != (parent,):
+                    parents = []
+                    for base in bases:
+                        parents.append(self.classlink(base, modname))
+                    result = result + '(' + join(parents, ', ') + ')'
+                result = result + '\n</font></dt>'
+            elif type(entry) is type([]):
+                result = result + '<dd>\n%s</dd>\n' % self.formattree(
+                    entry, modname, c)
+        return '<dl>\n%s</dl>\n' % result
+
+    def docmodule(self, object, name=None, mod=None, *ignored):
+        """Produce HTML documentation for a module object."""
+        name = object.__name__ # ignore the passed-in name
+        try:
+            all = object.__all__
+        except AttributeError:
+            all = None
+        parts = split(name, '.')
+        links = []
+        for i in range(len(parts)-1):
+            links.append(
+                '<a href="%s.html"><font color="#ffffff">%s</font></a>' %
+                (join(parts[:i+1], '.'), parts[i]))
+        linkedname = join(links + parts[-1:], '.')
+        head = '<big><big><strong>%s</strong></big></big>' % linkedname
+        try:
+            path = inspect.getabsfile(object)
+            url = path
+            if sys.platform == 'win32':
+                import nturl2path
+                url = nturl2path.pathname2url(path)
+            filelink = '<a href="file:%s">%s</a>' % (url, path)
+        except TypeError:
+            filelink = '(built-in)'
+        info = []
+        if hasattr(object, '__version__'):
+            version = str(object.__version__)
+            if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
+                version = strip(version[11:-1])
+            info.append('version %s' % self.escape(version))
+        if hasattr(object, '__date__'):
+            info.append(self.escape(str(object.__date__)))
+        if info:
+            head = head + ' (%s)' % join(info, ', ')
+        docloc = self.getdocloc(object)
+        if docloc is not None:
+            docloc = '<br><a href="%(docloc)s">Module Docs</a>' % locals()
+        else:
+            docloc = ''
+        result = self.heading(
+            head, '#ffffff', '#7799ee',
+            '<a href=".">index</a><br>' + filelink + docloc)
+
+        modules = inspect.getmembers(object, inspect.ismodule)
+
+        classes, cdict = [], {}
+        for key, value in inspect.getmembers(object, inspect.isclass):
+            # if __all__ exists, believe it.  Otherwise use old heuristic.
+            if (all is not None or
+                (inspect.getmodule(value) or object) is object):
+                if visiblename(key, all):
+                    classes.append((key, value))
+                    cdict[key] = cdict[value] = '#' + key
+        for key, value in classes:
+            for base in value.__bases__:
+                key, modname = base.__name__, base.__module__
+                module = sys.modules.get(modname)
+                if modname != name and module and hasattr(module, key):
+                    if getattr(module, key) is base:
+                        if not key in cdict:
+                            cdict[key] = cdict[base] = modname + '.html#' + key
+        funcs, fdict = [], {}
+        for key, value in inspect.getmembers(object, inspect.isroutine):
+            # if __all__ exists, believe it.  Otherwise use old heuristic.
+            if (all is not None or
+                inspect.isbuiltin(value) or inspect.getmodule(value) is object):
+                if visiblename(key, all):
+                    funcs.append((key, value))
+                    fdict[key] = '#-' + key
+                    if inspect.isfunction(value): fdict[value] = fdict[key]
+        data = []
+        for key, value in inspect.getmembers(object, isdata):
+            if visiblename(key, all):
+                data.append((key, value))
+
+        doc = self.markup(getdoc(object), self.preformat, fdict, cdict)
+        doc = doc and '<tt>%s</tt>' % doc
+        result = result + '<p>%s</p>\n' % doc
+
+        if hasattr(object, '__path__'):
+            modpkgs = []
+            modnames = []
+            for file in os.listdir(object.__path__[0]):
+                path = os.path.join(object.__path__[0], file)
+                modname = inspect.getmodulename(file)
+                if modname != '__init__':
+                    if modname and modname not in modnames:
+                        modpkgs.append((modname, name, 0, 0))
+                        modnames.append(modname)
+                    elif ispackage(path):
+                        modpkgs.append((file, name, 1, 0))
+            modpkgs.sort()
+            contents = self.multicolumn(modpkgs, self.modpkglink)
+            result = result + self.bigsection(
+                'Package Contents', '#ffffff', '#aa55cc', contents)
+        elif modules:
+            contents = self.multicolumn(
+                modules, lambda (key, value), s=self: s.modulelink(value))
+            result = result + self.bigsection(
+                'Modules', '#fffff', '#aa55cc', contents)
+
+        if classes:
+            classlist = map(lambda (key, value): value, classes)
+            contents = [
+                self.formattree(inspect.getclasstree(classlist, 1), name)]
+            for key, value in classes:
+                contents.append(self.document(value, key, name, fdict, cdict))
+            result = result + self.bigsection(
+                'Classes', '#ffffff', '#ee77aa', join(contents))
+        if funcs:
+            contents = []
+            for key, value in funcs:
+                contents.append(self.document(value, key, name, fdict, cdict))
+            result = result + self.bigsection(
+                'Functions', '#ffffff', '#eeaa77', join(contents))
+        if data:
+            contents = []
+            for key, value in data:
+                contents.append(self.document(value, key))
+            result = result + self.bigsection(
+                'Data', '#ffffff', '#55aa55', join(contents, '<br>\n'))
+        if hasattr(object, '__author__'):
+            contents = self.markup(str(object.__author__), self.preformat)
+            result = result + self.bigsection(
+                'Author', '#ffffff', '#7799ee', contents)
+        if hasattr(object, '__credits__'):
+            contents = self.markup(str(object.__credits__), self.preformat)
+            result = result + self.bigsection(
+                'Credits', '#ffffff', '#7799ee', contents)
+
+        return result
+
+    def docclass(self, object, name=None, mod=None, funcs={}, classes={},
+                 *ignored):
+        """Produce HTML documentation for a class object."""
+        realname = object.__name__
+        name = name or realname
+        bases = object.__bases__
+
+        contents = []
+        push = contents.append
+
+        # Cute little class to pump out a horizontal rule between sections.
+        class HorizontalRule:
+            def __init__(self):
+                self.needone = 0
+            def maybe(self):
+                if self.needone:
+                    push('<hr>\n')
+                self.needone = 1
+        hr = HorizontalRule()
+
+        # List the mro, if non-trivial.
+        mro = deque(inspect.getmro(object))
+        if len(mro) > 2:
+            hr.maybe()
+            push('<dl><dt>Method resolution order:</dt>\n')
+            for base in mro:
+                push('<dd>%s</dd>\n' % self.classlink(base,
+                                                      object.__module__))
+            push('</dl>\n')
+
+        def spill(msg, attrs, predicate):
+            ok, attrs = _split_list(attrs, predicate)
+            if ok:
+                hr.maybe()
+                push(msg)
+                for name, kind, homecls, value in ok:
+                    push(self.document(getattr(object, name), name, mod,
+                                       funcs, classes, mdict, object))
+                    push('\n')
+            return attrs
+
+        def spillproperties(msg, attrs, predicate):
+            ok, attrs = _split_list(attrs, predicate)
+            if ok:
+                hr.maybe()
+                push(msg)
+                for name, kind, homecls, value in ok:
+                    push(self._docproperty(name, value, mod))
+            return attrs
+
+        def spilldata(msg, attrs, predicate):
+            ok, attrs = _split_list(attrs, predicate)
+            if ok:
+                hr.maybe()
+                push(msg)
+                for name, kind, homecls, value in ok:
+                    base = self.docother(getattr(object, name), name, mod)
+                    if callable(value) or inspect.isdatadescriptor(value):
+                        doc = getattr(value, "__doc__", None)
+                    else:
+                        doc = None
+                    if doc is None:
+                        push('<dl><dt>%s</dl>\n' % base)
+                    else:
+                        doc = self.markup(getdoc(value), self.preformat,
+                                          funcs, classes, mdict)
+                        doc = '<dd><tt>%s</tt>' % doc
+                        push('<dl><dt>%s%s</dl>\n' % (base, doc))
+                    push('\n')
+            return attrs
+
+        attrs = filter(lambda (name, kind, cls, value): visiblename(name),
+                       inspect.classify_class_attrs(object))
+        mdict = {}
+        for key, kind, homecls, value in attrs:
+            mdict[key] = anchor = '#' + name + '-' + key
+            value = getattr(object, key)
+            try:
+                # The value may not be hashable (e.g., a data attr with
+                # a dict or list value).
+                mdict[value] = anchor
+            except TypeError:
+                pass
+
+        while attrs:
+            if mro:
+                thisclass = mro.popleft()
+            else:
+                thisclass = attrs[0][2]
+            attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
+
+            if thisclass is __builtin__.object:
+                attrs = inherited
+                continue
+            elif thisclass is object:
+                tag = 'defined here'
+            else:
+                tag = 'inherited from %s' % self.classlink(thisclass,
+                                                           object.__module__)
+            tag += ':<br>\n'
+
+            # Sort attrs by name.
+            attrs.sort(key=lambda t: t[0])
+
+            # Pump out the attrs, segregated by kind.
+            attrs = spill('Methods %s' % tag, attrs,
+                          lambda t: t[1] == 'method')
+            attrs = spill('Class methods %s' % tag, attrs,
+                          lambda t: t[1] == 'class method')
+            attrs = spill('Static methods %s' % tag, attrs,
+                          lambda t: t[1] == 'static method')
+            attrs = spillproperties('Properties %s' % tag, attrs,
+                                    lambda t: t[1] == 'property')
+            attrs = spilldata('Data and other attributes %s' % tag, attrs,
+                              lambda t: t[1] == 'data')
+            assert attrs == []
+            attrs = inherited
+
+        contents = ''.join(contents)
+
+        if name == realname:
+            title = '<a name="%s">class <strong>%s</strong></a>' % (
+                name, realname)
+        else:
+            title = '<strong>%s</strong> = <a name="%s">class %s</a>' % (
+                name, name, realname)
+        if bases:
+            parents = []
+            for base in bases:
+                parents.append(self.classlink(base, object.__module__))
+            title = title + '(%s)' % join(parents, ', ')
+        doc = self.markup(getdoc(object), self.preformat, funcs, classes, mdict)
+        doc = doc and '<tt>%s<br>&nbsp;</tt>' % doc
+
+        return self.section(title, '#000000', '#ffc8d8', contents, 3, doc)
+
+    def formatvalue(self, object):
+        """Format an argument default value as text."""
+        return self.grey('=' + self.repr(object))
+
+    def docroutine(self, object, name=None, mod=None,
+                   funcs={}, classes={}, methods={}, cl=None):
+        """Produce HTML documentation for a function or method object."""
+        realname = object.__name__
+        name = name or realname
+        anchor = (cl and cl.__name__ or '') + '-' + name
+        note = ''
+        skipdocs = 0
+        if inspect.ismethod(object):
+            imclass = object.im_class
+            if cl:
+                if imclass is not cl:
+                    note = ' from ' + self.classlink(imclass, mod)
+            else:
+                if object.im_self:
+                    note = ' method of %s instance' % self.classlink(
+                        object.im_self.__class__, mod)
+                else:
+                    note = ' unbound %s method' % self.classlink(imclass,mod)
+            object = object.im_func
+
+        if name == realname:
+            title = '<a name="%s"><strong>%s</strong></a>' % (anchor, realname)
+        else:
+            if (cl and realname in cl.__dict__ and
+                cl.__dict__[realname] is object):
+                reallink = '<a href="#%s">%s</a>' % (
+                    cl.__name__ + '-' + realname, realname)
+                skipdocs = 1
+            else:
+                reallink = realname
+            title = '<a name="%s"><strong>%s</strong></a> = %s' % (
+                anchor, name, reallink)
+        if inspect.isfunction(object):
+            args, varargs, varkw, defaults = inspect.getargspec(object)
+            argspec = inspect.formatargspec(
+                args, varargs, varkw, defaults, formatvalue=self.formatvalue)
+            if realname == '<lambda>':
+                title = '<strong>%s</strong> <em>lambda</em> ' % name
+                argspec = argspec[1:-1] # remove parentheses
+        else:
+            argspec = '(...)'
+
+        decl = title + argspec + (note and self.grey(
+               '<font face="helvetica, arial">%s</font>' % note))
+
+        if skipdocs:
+            return '<dl><dt>%s</dt></dl>\n' % decl
+        else:
+            doc = self.markup(
+                getdoc(object), self.preformat, funcs, classes, methods)
+            doc = doc and '<dd><tt>%s</tt></dd>' % doc
+            return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
+
+    def _docproperty(self, name, value, mod):
+        results = []
+        push = results.append
+
+        if name:
+            push('<dl><dt><strong>%s</strong></dt>\n' % name)
+        if value.__doc__ is not None:
+            doc = self.markup(getdoc(value), self.preformat)
+            push('<dd><tt>%s</tt></dd>\n' % doc)
+        for attr, tag in [('fget', '<em>get</em>'),
+                          ('fset', '<em>set</em>'),
+                          ('fdel', '<em>delete</em>')]:
+            func = getattr(value, attr)
+            if func is not None:
+                base = self.document(func, tag, mod)
+                push('<dd>%s</dd>\n' % base)
+        push('</dl>\n')
+
+        return ''.join(results)
+
+    def docproperty(self, object, name=None, mod=None, cl=None):
+        """Produce html documentation for a property."""
+        return self._docproperty(name, object, mod)
+
+    def docother(self, object, name=None, mod=None, *ignored):
+        """Produce HTML documentation for a data object."""
+        lhs = name and '<strong>%s</strong> = ' % name or ''
+        return lhs + self.repr(object)
+
+    def index(self, dir, shadowed=None):
+        """Generate an HTML index for a directory of modules."""
+        modpkgs = []
+        if shadowed is None: shadowed = {}
+        seen = {}
+        files = os.listdir(dir)
+
+        def found(name, ispackage,
+                  modpkgs=modpkgs, shadowed=shadowed, seen=seen):
+            if name not in seen:
+                modpkgs.append((name, '', ispackage, name in shadowed))
+                seen[name] = 1
+                shadowed[name] = 1
+
+        # Package spam/__init__.py takes precedence over module spam.py.
+        for file in files:
+            path = os.path.join(dir, file)
+            if ispackage(path): found(file, 1)
+        for file in files:
+            path = os.path.join(dir, file)
+            if os.path.isfile(path):
+                modname = inspect.getmodulename(file)
+                if modname: found(modname, 0)
+
+        modpkgs.sort()
+        contents = self.multicolumn(modpkgs, self.modpkglink)
+        return self.bigsection(dir, '#ffffff', '#ee77aa', contents)
+
+# -------------------------------------------- text documentation generator
+
+class TextRepr(Repr):
+    """Class for safely making a text representation of a Python object."""
+    def __init__(self):
+        Repr.__init__(self)
+        self.maxlist = self.maxtuple = 20
+        self.maxdict = 10
+        self.maxstring = self.maxother = 100
+
+    def repr1(self, x, level):
+        if hasattr(type(x), '__name__'):
+            methodname = 'repr_' + join(split(type(x).__name__), '_')
+            if hasattr(self, methodname):
+                return getattr(self, methodname)(x, level)
+        return cram(stripid(repr(x)), self.maxother)
+
+    def repr_string(self, x, level):
+        test = cram(x, self.maxstring)
+        testrepr = repr(test)
+        if '\\' in test and '\\' not in replace(testrepr, r'\\', ''):
+            # Backslashes are only literal in the string and are never
+            # needed to make any special characters, so show a raw string.
+            return 'r' + testrepr[0] + test + testrepr[0]
+        return testrepr
+
+    repr_str = repr_string
+
+    def repr_instance(self, x, level):
+        try:
+            return cram(stripid(repr(x)), self.maxstring)
+        except:
+            return '<%s instance>' % x.__class__.__name__
+
+class TextDoc(Doc):
+    """Formatter class for text documentation."""
+
+    # ------------------------------------------- text formatting utilities
+
+    _repr_instance = TextRepr()
+    repr = _repr_instance.repr
+
+    def bold(self, text):
+        """Format a string in bold by overstriking."""
+        return join(map(lambda ch: ch + '\b' + ch, text), '')
+
+    def indent(self, text, prefix='    '):
+        """Indent text by prepending a given prefix to each line."""
+        if not text: return ''
+        lines = split(text, '\n')
+        lines = map(lambda line, prefix=prefix: prefix + line, lines)
+        if lines: lines[-1] = rstrip(lines[-1])
+        return join(lines, '\n')
+
+    def section(self, title, contents):
+        """Format a section with a given heading."""
+        return self.bold(title) + '\n' + rstrip(self.indent(contents)) + '\n\n'
+
+    # ---------------------------------------------- type-specific routines
+
+    def formattree(self, tree, modname, parent=None, prefix=''):
+        """Render in text a class tree as returned by inspect.getclasstree()."""
+        result = ''
+        for entry in tree:
+            if type(entry) is type(()):
+                c, bases = entry
+                result = result + prefix + classname(c, modname)
+                if bases and bases != (parent,):
+                    parents = map(lambda c, m=modname: classname(c, m), bases)
+                    result = result + '(%s)' % join(parents, ', ')
+                result = result + '\n'
+            elif type(entry) is type([]):
+                result = result + self.formattree(
+                    entry, modname, c, prefix + '    ')
+        return result
+
+    def docmodule(self, object, name=None, mod=None):
+        """Produce text documentation for a given module object."""
+        name = object.__name__ # ignore the passed-in name
+        synop, desc = splitdoc(getdoc(object))
+        result = self.section('NAME', name + (synop and ' - ' + synop))
+
+        try:
+            all = object.__all__
+        except AttributeError:
+            all = None
+
+        try:
+            file = inspect.getabsfile(object)
+        except TypeError:
+            file = '(built-in)'
+        result = result + self.section('FILE', file)
+
+        docloc = self.getdocloc(object)
+        if docloc is not None:
+            result = result + self.section('MODULE DOCS', docloc)
+
+        if desc:
+            result = result + self.section('DESCRIPTION', desc)
+
+        classes = []
+        for key, value in inspect.getmembers(object, inspect.isclass):
+            # if __all__ exists, believe it.  Otherwise use old heuristic.
+            if (all is not None
+                or (inspect.getmodule(value) or object) is object):
+                if visiblename(key, all):
+                    classes.append((key, value))
+        funcs = []
+        for key, value in inspect.getmembers(object, inspect.isroutine):
+            # if __all__ exists, believe it.  Otherwise use old heuristic.
+            if (all is not None or
+                inspect.isbuiltin(value) or inspect.getmodule(value) is object):
+                if visiblename(key, all):
+                    funcs.append((key, value))
+        data = []
+        for key, value in inspect.getmembers(object, isdata):
+            if visiblename(key, all):
+                data.append((key, value))
+
+        if hasattr(object, '__path__'):
+            modpkgs = []
+            for file in os.listdir(object.__path__[0]):
+                path = os.path.join(object.__path__[0], file)
+                modname = inspect.getmodulename(file)
+                if modname != '__init__':
+                    if modname and modname not in modpkgs:
+                        modpkgs.append(modname)
+                    elif ispackage(path):
+                        modpkgs.append(file + ' (package)')
+            modpkgs.sort()
+            result = result + self.section(
+                'PACKAGE CONTENTS', join(modpkgs, '\n'))
+
+        if classes:
+            classlist = map(lambda (key, value): value, classes)
+            contents = [self.formattree(
+                inspect.getclasstree(classlist, 1), name)]
+            for key, value in classes:
+                contents.append(self.document(value, key, name))
+            result = result + self.section('CLASSES', join(contents, '\n'))
+
+        if funcs:
+            contents = []
+            for key, value in funcs:
+                contents.append(self.document(value, key, name))
+            result = result + self.section('FUNCTIONS', join(contents, '\n'))
+
+        if data:
+            contents = []
+            for key, value in data:
+                contents.append(self.docother(value, key, name, 70))
+            result = result + self.section('DATA', join(contents, '\n'))
+
+        if hasattr(object, '__version__'):
+            version = str(object.__version__)
+            if version[:11] == '$' + 'Revision: ' and version[-1:] == '$':
+                version = strip(version[11:-1])
+            result = result + self.section('VERSION', version)
+        if hasattr(object, '__date__'):
+            result = result + self.section('DATE', str(object.__date__))
+        if hasattr(object, '__author__'):
+            result = result + self.section('AUTHOR', str(object.__author__))
+        if hasattr(object, '__credits__'):
+            result = result + self.section('CREDITS', str(object.__credits__))
+        return result
+
+    def docclass(self, object, name=None, mod=None):
+        """Produce text documentation for a given class object."""
+        realname = object.__name__
+        name = name or realname
+        bases = object.__bases__
+
+        def makename(c, m=object.__module__):
+            return classname(c, m)
+
+        if name == realname:
+            title = 'class ' + self.bold(realname)
+        else:
+            title = self.bold(name) + ' = class ' + realname
+        if bases:
+            parents = map(makename, bases)
+            title = title + '(%s)' % join(parents, ', ')
+
+        doc = getdoc(object)
+        contents = doc and [doc + '\n'] or []
+        push = contents.append
+
+        # List the mro, if non-trivial.
+        mro = deque(inspect.getmro(object))
+        if len(mro) > 2:
+            push("Method resolution order:")
+            for base in mro:
+                push('    ' + makename(base))
+            push('')
+
+        # Cute little class to pump out a horizontal rule between sections.
+        class HorizontalRule:
+            def __init__(self):
+                self.needone = 0
+            def maybe(self):
+                if self.needone:
+                    push('-' * 70)
+                self.needone = 1
+        hr = HorizontalRule()
+
+        def spill(msg, attrs, predicate):
+            ok, attrs = _split_list(attrs, predicate)
+            if ok:
+                hr.maybe()
+                push(msg)
+                for name, kind, homecls, value in ok:
+                    push(self.document(getattr(object, name),
+                                       name, mod, object))
+            return attrs
+
+        def spillproperties(msg, attrs, predicate):
+            ok, attrs = _split_list(attrs, predicate)
+            if ok:
+                hr.maybe()
+                push(msg)
+                for name, kind, homecls, value in ok:
+                    push(self._docproperty(name, value, mod))
+            return attrs
+
+        def spilldata(msg, attrs, predicate):
+            ok, attrs = _split_list(attrs, predicate)
+            if ok:
+                hr.maybe()
+                push(msg)
+                for name, kind, homecls, value in ok:
+                    if callable(value) or inspect.isdatadescriptor(value):
+                        doc = getdoc(value)
+                    else:
+                        doc = None
+                    push(self.docother(getattr(object, name),
+                                       name, mod, 70, doc) + '\n')
+            return attrs
+
+        attrs = filter(lambda (name, kind, cls, value): visiblename(name),
+                       inspect.classify_class_attrs(object))
+        while attrs:
+            if mro:
+                thisclass = mro.popleft()
+            else:
+                thisclass = attrs[0][2]
+            attrs, inherited = _split_list(attrs, lambda t: t[2] is thisclass)
+
+            if thisclass is __builtin__.object:
+                attrs = inherited
+                continue
+            elif thisclass is object:
+                tag = "defined here"
+            else:
+                tag = "inherited from %s" % classname(thisclass,
+                                                      object.__module__)
+            filter(lambda t: not t[0].startswith('_'), attrs)
+
+            # Sort attrs by name.
+            attrs.sort()
+
+            # Pump out the attrs, segregated by kind.
+            attrs = spill("Methods %s:\n" % tag, attrs,
+                          lambda t: t[1] == 'method')
+            attrs = spill("Class methods %s:\n" % tag, attrs,
+                          lambda t: t[1] == 'class method')
+            attrs = spill("Static methods %s:\n" % tag, attrs,
+                          lambda t: t[1] == 'static method')
+            attrs = spillproperties("Properties %s:\n" % tag, attrs,
+                                    lambda t: t[1] == 'property')
+            attrs = spilldata("Data and other attributes %s:\n" % tag, attrs,
+                              lambda t: t[1] == 'data')
+            assert attrs == []
+            attrs = inherited
+
+        contents = '\n'.join(contents)
+        if not contents:
+            return title + '\n'
+        return title + '\n' + self.indent(rstrip(contents), ' |  ') + '\n'
+
+    def formatvalue(self, object):
+        """Format an argument default value as text."""
+        return '=' + self.repr(object)
+
+    def docroutine(self, object, name=None, mod=None, cl=None):
+        """Produce text documentation for a function or method object."""
+        realname = object.__name__
+        name = name or realname
+        note = ''
+        skipdocs = 0
+        if inspect.ismethod(object):
+            imclass = object.im_class
+            if cl:
+                if imclass is not cl:
+                    note = ' from ' + classname(imclass, mod)
+            else:
+                if object.im_self:
+                    note = ' method of %s instance' % classname(
+                        object.im_self.__class__, mod)
+                else:
+                    note = ' unbound %s method' % classname(imclass,mod)
+            object = object.im_func
+
+        if name == realname:
+            title = self.bold(realname)
+        else:
+            if (cl and realname in cl.__dict__ and
+                cl.__dict__[realname] is object):
+                skipdocs = 1
+            title = self.bold(name) + ' = ' + realname
+        if inspect.isfunction(object):
+            args, varargs, varkw, defaults = inspect.getargspec(object)
+            argspec = inspect.formatargspec(
+                args, varargs, varkw, defaults, formatvalue=self.formatvalue)
+            if realname == '<lambda>':
+                title = 'lambda'
+                argspec = argspec[1:-1] # remove parentheses
+        else:
+            argspec = '(...)'
+        decl = title + argspec + note
+
+        if skipdocs:
+            return decl + '\n'
+        else:
+            doc = getdoc(object) or ''
+            return decl + '\n' + (doc and rstrip(self.indent(doc)) + '\n')
+
+    def _docproperty(self, name, value, mod):
+        results = []
+        push = results.append
+
+        if name:
+            push(name)
+        need_blank_after_doc = 0
+        doc = getdoc(value) or ''
+        if doc:
+            push(self.indent(doc))
+            need_blank_after_doc = 1
+        for attr, tag in [('fget', '<get>'),
+                          ('fset', '<set>'),
+                          ('fdel', '<delete>')]:
+            func = getattr(value, attr)
+            if func is not None:
+                if need_blank_after_doc:
+                    push('')
+                    need_blank_after_doc = 0
+                base = self.document(func, tag, mod)
+                push(self.indent(base))
+
+        return '\n'.join(results)
+
+    def docproperty(self, object, name=None, mod=None, cl=None):
+        """Produce text documentation for a property."""
+        return self._docproperty(name, object, mod)
+
+    def docother(self, object, name=None, mod=None, maxlen=None, doc=None):
+        """Produce text documentation for a data object."""
+        repr = self.repr(object)
+        if maxlen:
+            line = (name and name + ' = ' or '') + repr
+            chop = maxlen - len(line)
+            if chop < 0: repr = repr[:chop] + '...'
+        line = (name and self.bold(name) + ' = ' or '') + repr
+        if doc is not None:
+            line += '\n' + self.indent(str(doc))
+        return line
+
+# --------------------------------------------------------- user interfaces
+
+def pager(text):
+    """The first time this is called, determine what kind of pager to use."""
+    global pager
+    pager = getpager()
+    pager(text)
+
+def getpager():
+    """Decide what method to use for paging through text."""
+    if type(sys.stdout) is not types.FileType:
+        return plainpager
+    if not sys.stdin.isatty() or not sys.stdout.isatty():
+        return plainpager
+    if os.environ.get('TERM') in ['dumb', 'emacs']:
+        return plainpager
+    if 'PAGER' in os.environ:
+        if sys.platform == 'win32': # pipes completely broken in Windows
+            return lambda text: tempfilepager(plain(text), os.environ['PAGER'])
+        elif os.environ.get('TERM') in ['dumb', 'emacs']:
+            return lambda text: pipepager(plain(text), os.environ['PAGER'])
+        else:
+            return lambda text: pipepager(text, os.environ['PAGER'])
+    if sys.platform == 'win32' or sys.platform.startswith('os2'):
+        return lambda text: tempfilepager(plain(text), 'more <')
+    if hasattr(os, 'system') and os.system('(less) 2>/dev/null') == 0:
+        return lambda text: pipepager(text, 'less')
+
+    import tempfile
+    (fd, filename) = tempfile.mkstemp()
+    os.close(fd)
+    try:
+        if hasattr(os, 'system') and os.system('more %s' % filename) == 0:
+            return lambda text: pipepager(text, 'more')
+        else:
+            return ttypager
+    finally:
+        os.unlink(filename)
+
+def plain(text):
+    """Remove boldface formatting from text."""
+    return re.sub('.\b', '', text)
+
+def pipepager(text, cmd):
+    """Page through text by feeding it to another program."""
+    pipe = os.popen(cmd, 'w')
+    try:
+        pipe.write(text)
+        pipe.close()
+    except IOError:
+        pass # Ignore broken pipes caused by quitting the pager program.
+
+def tempfilepager(text, cmd):
+    """Page through text by invoking a program on a temporary file."""
+    import tempfile
+    filename = tempfile.mktemp()
+    file = open(filename, 'w')
+    file.write(text)
+    file.close()
+    try:
+        os.system(cmd + ' ' + filename)
+    finally:
+        os.unlink(filename)
+
+def ttypager(text):
+    """Page through text on a text terminal."""
+    lines = split(plain(text), '\n')
+    try:
+        import tty
+        fd = sys.stdin.fileno()
+        old = tty.tcgetattr(fd)
+        tty.setcbreak(fd)
+        getchar = lambda: sys.stdin.read(1)
+    except (ImportError, AttributeError):
+        tty = None
+        getchar = lambda: sys.stdin.readline()[:-1][:1]
+
+    try:
+        r = inc = os.environ.get('LINES', 25) - 1
+        sys.stdout.write(join(lines[:inc], '\n') + '\n')
+        while lines[r:]:
+            sys.stdout.write('-- more --')
+            sys.stdout.flush()
+            c = getchar()
+
+            if c in ['q', 'Q']:
+                sys.stdout.write('\r          \r')
+                break
+            elif c in ['\r', '\n']:
+                sys.stdout.write('\r          \r' + lines[r] + '\n')
+                r = r + 1
+                continue
+            if c in ['b', 'B', '\x1b']:
+                r = r - inc - inc
+                if r < 0: r = 0
+            sys.stdout.write('\n' + join(lines[r:r+inc], '\n') + '\n')
+            r = r + inc
+
+    finally:
+        if tty:
+            tty.tcsetattr(fd, tty.TCSAFLUSH, old)
+
+def plainpager(text):
+    """Simply print unformatted text.  This is the ultimate fallback."""
+    sys.stdout.write(plain(text))
+
+def describe(thing):
+    """Produce a short description of the given thing."""
+    if inspect.ismodule(thing):
+        if thing.__name__ in sys.builtin_module_names:
+            return 'built-in module ' + thing.__name__
+        if hasattr(thing, '__path__'):
+            return 'package ' + thing.__name__
+        else:
+            return 'module ' + thing.__name__
+    if inspect.isbuiltin(thing):
+        return 'built-in function ' + thing.__name__
+    if inspect.isclass(thing):
+        return 'class ' + thing.__name__
+    if inspect.isfunction(thing):
+        return 'function ' + thing.__name__
+    if inspect.ismethod(thing):
+        return 'method ' + thing.__name__
+    if type(thing) is types.InstanceType:
+        return 'instance of ' + thing.__class__.__name__
+    return type(thing).__name__
+
+def locate(path, forceload=0):
+    """Locate an object by name or dotted path, importing as necessary."""
+    parts = [part for part in split(path, '.') if part]
+    module, n = None, 0
+    while n < len(parts):
+        nextmodule = safeimport(join(parts[:n+1], '.'), forceload)
+        if nextmodule: module, n = nextmodule, n + 1
+        else: break
+    if module:
+        object = module
+        for part in parts[n:]:
+            try: object = getattr(object, part)
+            except AttributeError: return None
+        return object
+    else:
+        if hasattr(__builtin__, path):
+            return getattr(__builtin__, path)
+
+# --------------------------------------- interactive interpreter interface
+
+text = TextDoc()
+html = HTMLDoc()
+
+def resolve(thing, forceload=0):
+    """Given an object or a path to an object, get the object and its name."""
+    if isinstance(thing, str):
+        object = locate(thing, forceload)
+        if not object:
+            raise ImportError, 'no Python documentation found for %r' % thing
+        return object, thing
+    else:
+        return thing, getattr(thing, '__name__', None)
+
+def doc(thing, title='Python Library Documentation: %s', forceload=0):
+    """Display text documentation, given an object or a path to an object."""
+    try:
+        object, name = resolve(thing, forceload)
+        desc = describe(object)
+        module = inspect.getmodule(object)
+        if name and '.' in name:
+            desc += ' in ' + name[:name.rfind('.')]
+        elif module and module is not object:
+            desc += ' in module ' + module.__name__
+        if not (inspect.ismodule(object) or
+                inspect.isclass(object) or
+                inspect.isroutine(object) or
+                isinstance(object, property)):
+            # If the passed object is a piece of data or an instance,
+            # document its available methods instead of its value.
+            object = type(object)
+            desc += ' object'
+        pager(title % desc + '\n\n' + text.document(object, name))
+    except (ImportError, ErrorDuringImport), value:
+        print value
+
+def writedoc(thing, forceload=0):
+    """Write HTML documentation to a file in the current directory."""
+    try:
+        object, name = resolve(thing, forceload)
+        page = html.page(describe(object), html.document(object, name))
+        file = open(name + '.html', 'w')
+        file.write(page)
+        file.close()
+        print 'wrote', name + '.html'
+    except (ImportError, ErrorDuringImport), value:
+        print value
+
+def writedocs(dir, pkgpath='', done=None):
+    """Write out HTML documentation for all modules in a directory tree."""
+    if done is None: done = {}
+    for file in os.listdir(dir):
+        path = os.path.join(dir, file)
+        if ispackage(path):
+            writedocs(path, pkgpath + file + '.', done)
+        elif os.path.isfile(path):
+            modname = inspect.getmodulename(path)
+            if modname:
+                if modname == '__init__':
+                    modname = pkgpath[:-1] # remove trailing period
+                else:
+                    modname = pkgpath + modname
+                if modname not in done:
+                    done[modname] = 1
+                    writedoc(modname)
+
+class Helper:
+    keywords = {
+        'and': 'BOOLEAN',
+        'assert': ('ref/assert', ''),
+        'break': ('ref/break', 'while for'),
+        'class': ('ref/class', 'CLASSES SPECIALMETHODS'),
+        'continue': ('ref/continue', 'while for'),
+        'def': ('ref/function', ''),
+        'del': ('ref/del', 'BASICMETHODS'),
+        'elif': 'if',
+        'else': ('ref/if', 'while for'),
+        'except': 'try',
+        'exec': ('ref/exec', ''),
+        'finally': 'try',
+        'for': ('ref/for', 'break continue while'),
+        'from': 'import',
+        'global': ('ref/global', 'NAMESPACES'),
+        'if': ('ref/if', 'TRUTHVALUE'),
+        'import': ('ref/import', 'MODULES'),
+        'in': ('ref/comparisons', 'SEQUENCEMETHODS2'),
+        'is': 'COMPARISON',
+        'lambda': ('ref/lambdas', 'FUNCTIONS'),
+        'not': 'BOOLEAN',
+        'or': 'BOOLEAN',
+        'pass': ('ref/pass', ''),
+        'print': ('ref/print', ''),
+        'raise': ('ref/raise', 'EXCEPTIONS'),
+        'return': ('ref/return', 'FUNCTIONS'),
+        'try': ('ref/try', 'EXCEPTIONS'),
+        'while': ('ref/while', 'break continue if TRUTHVALUE'),
+        'yield': ('ref/yield', ''),
+    }
+
+    topics = {
+        'TYPES': ('ref/types', 'STRINGS UNICODE NUMBERS SEQUENCES MAPPINGS FUNCTIONS CLASSES MODULES FILES inspect'),
+        'STRINGS': ('ref/strings', 'str UNICODE SEQUENCES STRINGMETHODS FORMATTING TYPES'),
+        'STRINGMETHODS': ('lib/string-methods', 'STRINGS FORMATTING'),
+        'FORMATTING': ('lib/typesseq-strings', 'OPERATORS'),
+        'UNICODE': ('ref/strings', 'encodings unicode SEQUENCES STRINGMETHODS FORMATTING TYPES'),
+        'NUMBERS': ('ref/numbers', 'INTEGER FLOAT COMPLEX TYPES'),
+        'INTEGER': ('ref/integers', 'int range'),
+        'FLOAT': ('ref/floating', 'float math'),
+        'COMPLEX': ('ref/imaginary', 'complex cmath'),
+        'SEQUENCES': ('lib/typesseq', 'STRINGMETHODS FORMATTING xrange LISTS'),
+        'MAPPINGS': 'DICTIONARIES',
+        'FUNCTIONS': ('lib/typesfunctions', 'def TYPES'),
+        'METHODS': ('lib/typesmethods', 'class def CLASSES TYPES'),
+        'CODEOBJECTS': ('lib/bltin-code-objects', 'compile FUNCTIONS TYPES'),
+        'TYPEOBJECTS': ('lib/bltin-type-objects', 'types TYPES'),
+        'FRAMEOBJECTS': 'TYPES',
+        'TRACEBACKS': 'TYPES',
+        'NONE': ('lib/bltin-null-object', ''),
+        'ELLIPSIS': ('lib/bltin-ellipsis-object', 'SLICINGS'),
+        'FILES': ('lib/bltin-file-objects', ''),
+        'SPECIALATTRIBUTES': ('lib/specialattrs', ''),
+        'CLASSES': ('ref/types', 'class SPECIALMETHODS PRIVATENAMES'),
+        'MODULES': ('lib/typesmodules', 'import'),
+        'PACKAGES': 'import',
+        'EXPRESSIONS': ('ref/summary', 'lambda or and not in is BOOLEAN COMPARISON BITWISE SHIFTING BINARY FORMATTING POWER UNARY ATTRIBUTES SUBSCRIPTS SLICINGS CALLS TUPLES LISTS DICTIONARIES BACKQUOTES'),
+        'OPERATORS': 'EXPRESSIONS',
+        'PRECEDENCE': 'EXPRESSIONS',
+        'OBJECTS': ('ref/objects', 'TYPES'),
+        'SPECIALMETHODS': ('ref/specialnames', 'BASICMETHODS ATTRIBUTEMETHODS CALLABLEMETHODS SEQUENCEMETHODS1 MAPPINGMETHODS SEQUENCEMETHODS2 NUMBERMETHODS CLASSES'),
+        'BASICMETHODS': ('ref/customization', 'cmp hash repr str SPECIALMETHODS'),
+        'ATTRIBUTEMETHODS': ('ref/attribute-access', 'ATTRIBUTES SPECIALMETHODS'),
+        'CALLABLEMETHODS': ('ref/callable-types', 'CALLS SPECIALMETHODS'),
+        'SEQUENCEMETHODS1': ('ref/sequence-types', 'SEQUENCES SEQUENCEMETHODS2 SPECIALMETHODS'),
+        'SEQUENCEMETHODS2': ('ref/sequence-methods', 'SEQUENCES SEQUENCEMETHODS1 SPECIALMETHODS'),
+        'MAPPINGMETHODS': ('ref/sequence-types', 'MAPPINGS SPECIALMETHODS'),
+        'NUMBERMETHODS': ('ref/numeric-types', 'NUMBERS AUGMENTEDASSIGNMENT SPECIALMETHODS'),
+        'EXECUTION': ('ref/execmodel', 'NAMESPACES DYNAMICFEATURES EXCEPTIONS'),
+        'NAMESPACES': ('ref/naming', 'global ASSIGNMENT DELETION DYNAMICFEATURES'),
+        'DYNAMICFEATURES': ('ref/dynamic-features', ''),
+        'SCOPING': 'NAMESPACES',
+        'FRAMES': 'NAMESPACES',
+        'EXCEPTIONS': ('ref/exceptions', 'try except finally raise'),
+        'COERCIONS': ('ref/coercion-rules','CONVERSIONS'),
+        'CONVERSIONS': ('ref/conversions', 'COERCIONS'),
+        'IDENTIFIERS': ('ref/identifiers', 'keywords SPECIALIDENTIFIERS'),
+        'SPECIALIDENTIFIERS': ('ref/id-classes', ''),
+        'PRIVATENAMES': ('ref/atom-identifiers', ''),
+        'LITERALS': ('ref/atom-literals', 'STRINGS BACKQUOTES NUMBERS TUPLELITERALS LISTLITERALS DICTIONARYLITERALS'),
+        'TUPLES': 'SEQUENCES',
+        'TUPLELITERALS': ('ref/exprlists', 'TUPLES LITERALS'),
+        'LISTS': ('lib/typesseq-mutable', 'LISTLITERALS'),
+        'LISTLITERALS': ('ref/lists', 'LISTS LITERALS'),
+        'DICTIONARIES': ('lib/typesmapping', 'DICTIONARYLITERALS'),
+        'DICTIONARYLITERALS': ('ref/dict', 'DICTIONARIES LITERALS'),
+        'BACKQUOTES': ('ref/string-conversions', 'repr str STRINGS LITERALS'),
+        'ATTRIBUTES': ('ref/attribute-references', 'getattr hasattr setattr ATTRIBUTEMETHODS'),
+        'SUBSCRIPTS': ('ref/subscriptions', 'SEQUENCEMETHODS1'),
+        'SLICINGS': ('ref/slicings', 'SEQUENCEMETHODS2'),
+        'CALLS': ('ref/calls', 'EXPRESSIONS'),
+        'POWER': ('ref/power', 'EXPRESSIONS'),
+        'UNARY': ('ref/unary', 'EXPRESSIONS'),
+        'BINARY': ('ref/binary', 'EXPRESSIONS'),
+        'SHIFTING': ('ref/shifting', 'EXPRESSIONS'),
+        'BITWISE': ('ref/bitwise', 'EXPRESSIONS'),
+        'COMPARISON': ('ref/comparisons', 'EXPRESSIONS BASICMETHODS'),
+        'BOOLEAN': ('ref/Booleans', 'EXPRESSIONS TRUTHVALUE'),
+        'ASSERTION': 'assert',
+        'ASSIGNMENT': ('ref/assignment', 'AUGMENTEDASSIGNMENT'),
+        'AUGMENTEDASSIGNMENT': ('ref/augassign', 'NUMBERMETHODS'),
+        'DELETION': 'del',
+        'PRINTING': 'print',
+        'RETURNING': 'return',
+        'IMPORTING': 'import',
+        'CONDITIONAL': 'if',
+        'LOOPING': ('ref/compound', 'for while break continue'),
+        'TRUTHVALUE': ('lib/truth', 'if while and or not BASICMETHODS'),
+        'DEBUGGING': ('lib/module-pdb', 'pdb'),
+    }
+
+    def __init__(self, input, output):
+        self.input = input
+        self.output = output
+        self.docdir = None
+        execdir = os.path.dirname(sys.executable)
+        homedir = os.environ.get('PYTHONHOME')
+        for dir in [os.environ.get('PYTHONDOCS'),
+                    homedir and os.path.join(homedir, 'doc'),
+                    os.path.join(execdir, 'doc'),
+                    '/usr/doc/python-docs-' + split(sys.version)[0],
+                    '/usr/doc/python-' + split(sys.version)[0],
+                    '/usr/doc/python-docs-' + sys.version[:3],
+                    '/usr/doc/python-' + sys.version[:3],
+                    os.path.join(sys.prefix, 'Resources/English.lproj/Documentation')]:
+            if dir and os.path.isdir(os.path.join(dir, 'lib')):
+                self.docdir = dir
+
+    def __repr__(self):
+        if inspect.stack()[1][3] == '?':
+            self()
+            return ''
+        return '<pydoc.Helper instance>'
+
+    def __call__(self, request=None):
+        if request is not None:
+            self.help(request)
+        else:
+            self.intro()
+            self.interact()
+            self.output.write('''
+You are now leaving help and returning to the Python interpreter.
+If you want to ask for help on a particular object directly from the
+interpreter, you can type "help(object)".  Executing "help('string')"
+has the same effect as typing a particular string at the help> prompt.
+''')
+
+    def interact(self):
+        self.output.write('\n')
+        while True:
+            try:
+                request = self.getline('help> ')
+                if not request: break
+            except (KeyboardInterrupt, EOFError):
+                break
+            request = strip(replace(request, '"', '', "'", ''))
+            if lower(request) in ['q', 'quit']: break
+            self.help(request)
+
+    def getline(self, prompt):
+        """Read one line, using raw_input when available."""
+        if self.input is sys.stdin:
+            return raw_input(prompt)
+        else:
+            self.output.write(prompt)
+            self.output.flush()
+            return self.input.readline()
+
+    def help(self, request):
+        if type(request) is type(''):
+            if request == 'help': self.intro()
+            elif request == 'keywords': self.listkeywords()
+            elif request == 'topics': self.listtopics()
+            elif request == 'modules': self.listmodules()
+            elif request[:8] == 'modules ':
+                self.listmodules(split(request)[1])
+            elif request in self.keywords: self.showtopic(request)
+            elif request in self.topics: self.showtopic(request)
+            elif request: doc(request, 'Help on %s:')
+        elif isinstance(request, Helper): self()
+        else: doc(request, 'Help on %s:')
+        self.output.write('\n')
+
+    def intro(self):
+        self.output.write('''
+Welcome to Python %s!  This is the online help utility.
+
+If this is your first time using Python, you should definitely check out
+the tutorial on the Internet at http://www.python.org/doc/tut/.
+
+Enter the name of any module, keyword, or topic to get help on writing
+Python programs and using Python modules.  To quit this help utility and
+return to the interpreter, just type "quit".
+
+To get a list of available modules, keywords, or topics, type "modules",
+"keywords", or "topics".  Each module also comes with a one-line summary
+of what it does; to list the modules whose summaries contain a given word
+such as "spam", type "modules spam".
+''' % sys.version[:3])
+
+    def list(self, items, columns=4, width=80):
+        items = items[:]
+        items.sort()
+        colw = width / columns
+        rows = (len(items) + columns - 1) / columns
+        for row in range(rows):
+            for col in range(columns):
+                i = col * rows + row
+                if i < len(items):
+                    self.output.write(items[i])
+                    if col < columns - 1:
+                        self.output.write(' ' + ' ' * (colw-1 - len(items[i])))
+            self.output.write('\n')
+
+    def listkeywords(self):
+        self.output.write('''
+Here is a list of the Python keywords.  Enter any keyword to get more help.
+
+''')
+        self.list(self.keywords.keys())
+
+    def listtopics(self):
+        self.output.write('''
+Here is a list of available topics.  Enter any topic name to get more help.
+
+''')
+        self.list(self.topics.keys())
+
+    def showtopic(self, topic):
+        if not self.docdir:
+            self.output.write('''
+Sorry, topic and keyword documentation is not available because the Python
+HTML documentation files could not be found.  If you have installed them,
+please set the environment variable PYTHONDOCS to indicate their location.
+''')
+            return
+        target = self.topics.get(topic, self.keywords.get(topic))
+        if not target:
+            self.output.write('no documentation found for %s\n' % repr(topic))
+            return
+        if type(target) is type(''):
+            return self.showtopic(target)
+
+        filename, xrefs = target
+        filename = self.docdir + '/' + filename + '.html'
+        try:
+            file = open(filename)
+        except:
+            self.output.write('could not read docs from %s\n' % filename)
+            return
+
+        divpat = re.compile('<div[^>]*navigat.*?</div.*?>', re.I | re.S)
+        addrpat = re.compile('<address.*?>.*?</address.*?>', re.I | re.S)
+        document = re.sub(addrpat, '', re.sub(divpat, '', file.read()))
+        file.close()
+
+        import htmllib, formatter, StringIO
+        buffer = StringIO.StringIO()
+        parser = htmllib.HTMLParser(
+            formatter.AbstractFormatter(formatter.DumbWriter(buffer)))
+        parser.start_table = parser.do_p
+        parser.end_table = lambda parser=parser: parser.do_p({})
+        parser.start_tr = parser.do_br
+        parser.start_td = parser.start_th = lambda a, b=buffer: b.write('\t')
+        parser.feed(document)
+        buffer = replace(buffer.getvalue(), '\xa0', ' ', '\n', '\n  ')
+        pager('  ' + strip(buffer) + '\n')
+        if xrefs:
+            buffer = StringIO.StringIO()
+            formatter.DumbWriter(buffer).send_flowing_data(
+                'Related help topics: ' + join(split(xrefs), ', ') + '\n')
+            self.output.write('\n%s\n' % buffer.getvalue())
+
+    def listmodules(self, key=''):
+        if key:
+            self.output.write('''
+Here is a list of matching modules.  Enter any module name to get more help.
+
+''')
+            apropos(key)
+        else:
+            self.output.write('''
+Please wait a moment while I gather a list of all available modules...
+
+''')
+            modules = {}
+            def callback(path, modname, desc, modules=modules):
+                if modname and modname[-9:] == '.__init__':
+                    modname = modname[:-9] + ' (package)'
+                if find(modname, '.') < 0:
+                    modules[modname] = 1
+            ModuleScanner().run(callback)
+            self.list(modules.keys())
+            self.output.write('''
+Enter any module name to get more help.  Or, type "modules spam" to search
+for modules whose descriptions contain the word "spam".
+''')
+
+help = Helper(sys.stdin, sys.stdout)
+
+class Scanner:
+    """A generic tree iterator."""
+    def __init__(self, roots, children, descendp):
+        self.roots = roots[:]
+        self.state = []
+        self.children = children
+        self.descendp = descendp
+
+    def next(self):
+        if not self.state:
+            if not self.roots:
+                return None
+            root = self.roots.pop(0)
+            self.state = [(root, self.children(root))]
+        node, children = self.state[-1]
+        if not children:
+            self.state.pop()
+            return self.next()
+        child = children.pop(0)
+        if self.descendp(child):
+            self.state.append((child, self.children(child)))
+        return child
+
+class ModuleScanner(Scanner):
+    """An interruptible scanner that searches module synopses."""
+    def __init__(self):
+        roots = map(lambda dir: (dir, ''), pathdirs())
+        Scanner.__init__(self, roots, self.submodules, self.isnewpackage)
+        self.inodes = map(lambda (dir, pkg): os.stat(dir).st_ino, roots)
+
+    def submodules(self, (dir, package)):
+        children = []
+        for file in os.listdir(dir):
+            path = os.path.join(dir, file)
+            if ispackage(path):
+                children.append((path, package + (package and '.') + file))
+            else:
+                children.append((path, package))
+        children.sort() # so that spam.py comes before spam.pyc or spam.pyo
+        return children
+
+    def isnewpackage(self, (dir, package)):
+        inode = os.path.exists(dir) and os.stat(dir).st_ino
+        if not (os.path.islink(dir) and inode in self.inodes):
+            self.inodes.append(inode) # detect circular symbolic links
+            return ispackage(dir)
+        return False
+
+    def run(self, callback, key=None, completer=None):
+        if key: key = lower(key)
+        self.quit = False
+        seen = {}
+
+        for modname in sys.builtin_module_names:
+            if modname != '__main__':
+                seen[modname] = 1
+                if key is None:
+                    callback(None, modname, '')
+                else:
+                    desc = split(__import__(modname).__doc__ or '', '\n')[0]
+                    if find(lower(modname + ' - ' + desc), key) >= 0:
+                        callback(None, modname, desc)
+
+        while not self.quit:
+            node = self.next()
+            if not node: break
+            path, package = node
+            modname = inspect.getmodulename(path)
+            if os.path.isfile(path) and modname:
+                modname = package + (package and '.') + modname
+                if not modname in seen:
+                    seen[modname] = 1 # if we see spam.py, skip spam.pyc
+                    if key is None:
+                        callback(path, modname, '')
+                    else:
+                        desc = synopsis(path) or ''
+                        if find(lower(modname + ' - ' + desc), key) >= 0:
+                            callback(path, modname, desc)
+        if completer: completer()
+
+def apropos(key):
+    """Print all the one-line module summaries that contain a substring."""
+    def callback(path, modname, desc):
+        if modname[-9:] == '.__init__':
+            modname = modname[:-9] + ' (package)'
+        print modname, desc and '- ' + desc
+    try: import warnings
+    except ImportError: pass
+    else: warnings.filterwarnings('ignore') # ignore problems during import
+    ModuleScanner().run(callback, key)
+
+# --------------------------------------------------- web browser interface
+
+def serve(port, callback=None, completer=None):
+    import BaseHTTPServer, mimetools, select
+
+    # Patch up mimetools.Message so it doesn't break if rfc822 is reloaded.
+    class Message(mimetools.Message):
+        def __init__(self, fp, seekable=1):
+            Message = self.__class__
+            Message.__bases__[0].__bases__[0].__init__(self, fp, seekable)
+            self.encodingheader = self.getheader('content-transfer-encoding')
+            self.typeheader = self.getheader('content-type')
+            self.parsetype()
+            self.parseplist()
+
+    class DocHandler(BaseHTTPServer.BaseHTTPRequestHandler):
+        def send_document(self, title, contents):
+            try:
+                self.send_response(200)
+                self.send_header('Content-Type', 'text/html')
+                self.end_headers()
+                self.wfile.write(html.page(title, contents))
+            except IOError: pass
+
+        def do_GET(self):
+            path = self.path
+            if path[-5:] == '.html': path = path[:-5]
+            if path[:1] == '/': path = path[1:]
+            if path and path != '.':
+                try:
+                    obj = locate(path, forceload=1)
+                except ErrorDuringImport, value:
+                    self.send_document(path, html.escape(str(value)))
+                    return
+                if obj:
+                    self.send_document(describe(obj), html.document(obj, path))
+                else:
+                    self.send_document(path,
+'no Python documentation found for %s' % repr(path))
+            else:
+                heading = html.heading(
+'<big><big><strong>Python: Index of Modules</strong></big></big>',
+'#ffffff', '#7799ee')
+                def bltinlink(name):
+                    return '<a href="%s.html">%s</a>' % (name, name)
+                names = filter(lambda x: x != '__main__',
+                               sys.builtin_module_names)
+                contents = html.multicolumn(names, bltinlink)
+                indices = ['<p>' + html.bigsection(
+                    'Built-in Modules', '#ffffff', '#ee77aa', contents)]
+
+                seen = {}
+                for dir in pathdirs():
+                    indices.append(html.index(dir, seen))
+                contents = heading + join(indices) + '''<p align=right>
+<font color="#909090" face="helvetica, arial"><strong>
+pydoc</strong> by Ka-Ping Yee &lt;ping@lfw.org&gt;</font>'''
+                self.send_document('Index of Modules', contents)
+
+        def log_message(self, *args): pass
+
+    class DocServer(BaseHTTPServer.HTTPServer):
+        def __init__(self, port, callback):
+            host = (sys.platform == 'mac') and '127.0.0.1' or 'localhost'
+            self.address = ('', port)
+            self.url = 'http://%s:%d/' % (host, port)
+            self.callback = callback
+            self.base.__init__(self, self.address, self.handler)
+
+        def serve_until_quit(self):
+            import select
+            self.quit = False
+            while not self.quit:
+                rd, wr, ex = select.select([self.socket.fileno()], [], [], 1)
+                if rd: self.handle_request()
+
+        def server_activate(self):
+            self.base.server_activate(self)
+            if self.callback: self.callback(self)
+
+    DocServer.base = BaseHTTPServer.HTTPServer
+    DocServer.handler = DocHandler
+    DocHandler.MessageClass = Message
+    try:
+        try:
+            DocServer(port, callback).serve_until_quit()
+        except (KeyboardInterrupt, select.error):
+            pass
+    finally:
+        if completer: completer()
+
+# ----------------------------------------------------- graphical interface
+
+def gui():
+    """Graphical interface (starts web server and pops up a control window)."""
+    class GUI:
+        def __init__(self, window, port=7464):
+            self.window = window
+            self.server = None
+            self.scanner = None
+
+            import Tkinter
+            self.server_frm = Tkinter.Frame(window)
+            self.title_lbl = Tkinter.Label(self.server_frm,
+                text='Starting server...\n ')
+            self.open_btn = Tkinter.Button(self.server_frm,
+                text='open browser', command=self.open, state='disabled')
+            self.quit_btn = Tkinter.Button(self.server_frm,
+                text='quit serving', command=self.quit, state='disabled')
+
+            self.search_frm = Tkinter.Frame(window)
+            self.search_lbl = Tkinter.Label(self.search_frm, text='Search for')
+            self.search_ent = Tkinter.Entry(self.search_frm)
+            self.search_ent.bind('<Return>', self.search)
+            self.stop_btn = Tkinter.Button(self.search_frm,
+                text='stop', pady=0, command=self.stop, state='disabled')
+            if sys.platform == 'win32':
+                # Trying to hide and show this button crashes under Windows.
+                self.stop_btn.pack(side='right')
+
+            self.window.title('pydoc')
+            self.window.protocol('WM_DELETE_WINDOW', self.quit)
+            self.title_lbl.pack(side='top', fill='x')
+            self.open_btn.pack(side='left', fill='x', expand=1)
+            self.quit_btn.pack(side='right', fill='x', expand=1)
+            self.server_frm.pack(side='top', fill='x')
+
+            self.search_lbl.pack(side='left')
+            self.search_ent.pack(side='right', fill='x', expand=1)
+            self.search_frm.pack(side='top', fill='x')
+            self.search_ent.focus_set()
+
+            font = ('helvetica', sys.platform == 'win32' and 8 or 10)
+            self.result_lst = Tkinter.Listbox(window, font=font, height=6)
+            self.result_lst.bind('<Button-1>', self.select)
+            self.result_lst.bind('<Double-Button-1>', self.goto)
+            self.result_scr = Tkinter.Scrollbar(window,
+                orient='vertical', command=self.result_lst.yview)
+            self.result_lst.config(yscrollcommand=self.result_scr.set)
+
+            self.result_frm = Tkinter.Frame(window)
+            self.goto_btn = Tkinter.Button(self.result_frm,
+                text='go to selected', command=self.goto)
+            self.hide_btn = Tkinter.Button(self.result_frm,
+                text='hide results', command=self.hide)
+            self.goto_btn.pack(side='left', fill='x', expand=1)
+            self.hide_btn.pack(side='right', fill='x', expand=1)
+
+            self.window.update()
+            self.minwidth = self.window.winfo_width()
+            self.minheight = self.window.winfo_height()
+            self.bigminheight = (self.server_frm.winfo_reqheight() +
+                                 self.search_frm.winfo_reqheight() +
+                                 self.result_lst.winfo_reqheight() +
+                                 self.result_frm.winfo_reqheight())
+            self.bigwidth, self.bigheight = self.minwidth, self.bigminheight
+            self.expanded = 0
+            self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight))
+            self.window.wm_minsize(self.minwidth, self.minheight)
+            self.window.tk.willdispatch()
+
+            import threading
+            threading.Thread(
+                target=serve, args=(port, self.ready, self.quit)).start()
+
+        def ready(self, server):
+            self.server = server
+            self.title_lbl.config(
+                text='Python documentation server at\n' + server.url)
+            self.open_btn.config(state='normal')
+            self.quit_btn.config(state='normal')
+
+        def open(self, event=None, url=None):
+            url = url or self.server.url
+            try:
+                import webbrowser
+                webbrowser.open(url)
+            except ImportError: # pre-webbrowser.py compatibility
+                if sys.platform == 'win32':
+                    os.system('start "%s"' % url)
+                elif sys.platform == 'mac':
+                    try: import ic
+                    except ImportError: pass
+                    else: ic.launchurl(url)
+                else:
+                    rc = os.system('netscape -remote "openURL(%s)" &' % url)
+                    if rc: os.system('netscape "%s" &' % url)
+
+        def quit(self, event=None):
+            if self.server:
+                self.server.quit = 1
+            self.window.quit()
+
+        def search(self, event=None):
+            key = self.search_ent.get()
+            self.stop_btn.pack(side='right')
+            self.stop_btn.config(state='normal')
+            self.search_lbl.config(text='Searching for "%s"...' % key)
+            self.search_ent.forget()
+            self.search_lbl.pack(side='left')
+            self.result_lst.delete(0, 'end')
+            self.goto_btn.config(state='disabled')
+            self.expand()
+
+            import threading
+            if self.scanner:
+                self.scanner.quit = 1
+            self.scanner = ModuleScanner()
+            threading.Thread(target=self.scanner.run,
+                             args=(self.update, key, self.done)).start()
+
+        def update(self, path, modname, desc):
+            if modname[-9:] == '.__init__':
+                modname = modname[:-9] + ' (package)'
+            self.result_lst.insert('end',
+                modname + ' - ' + (desc or '(no description)'))
+
+        def stop(self, event=None):
+            if self.scanner:
+                self.scanner.quit = 1
+                self.scanner = None
+
+        def done(self):
+            self.scanner = None
+            self.search_lbl.config(text='Search for')
+            self.search_lbl.pack(side='left')
+            self.search_ent.pack(side='right', fill='x', expand=1)
+            if sys.platform != 'win32': self.stop_btn.forget()
+            self.stop_btn.config(state='disabled')
+
+        def select(self, event=None):
+            self.goto_btn.config(state='normal')
+
+        def goto(self, event=None):
+            selection = self.result_lst.curselection()
+            if selection:
+                modname = split(self.result_lst.get(selection[0]))[0]
+                self.open(url=self.server.url + modname + '.html')
+
+        def collapse(self):
+            if not self.expanded: return
+            self.result_frm.forget()
+            self.result_scr.forget()
+            self.result_lst.forget()
+            self.bigwidth = self.window.winfo_width()
+            self.bigheight = self.window.winfo_height()
+            self.window.wm_geometry('%dx%d' % (self.minwidth, self.minheight))
+            self.window.wm_minsize(self.minwidth, self.minheight)
+            self.expanded = 0
+
+        def expand(self):
+            if self.expanded: return
+            self.result_frm.pack(side='bottom', fill='x')
+            self.result_scr.pack(side='right', fill='y')
+            self.result_lst.pack(side='top', fill='both', expand=1)
+            self.window.wm_geometry('%dx%d' % (self.bigwidth, self.bigheight))
+            self.window.wm_minsize(self.minwidth, self.bigminheight)
+            self.expanded = 1
+
+        def hide(self, event=None):
+            self.stop()
+            self.collapse()
+
+    import Tkinter
+    try:
+        root = Tkinter.Tk()
+        # Tk will crash if pythonw.exe has an XP .manifest
+        # file and the root has is not destroyed explicitly.
+        # If the problem is ever fixed in Tk, the explicit
+        # destroy can go.
+        try:
+            gui = GUI(root)
+            root.mainloop()
+        finally:
+            root.destroy()
+    except KeyboardInterrupt:
+        pass
+
+# -------------------------------------------------- command-line interface
+
+def ispath(x):
+    return isinstance(x, str) and find(x, os.sep) >= 0
+
+def cli():
+    """Command-line interface (looks at sys.argv to decide what to do)."""
+    import getopt
+    class BadUsage: pass
+
+    # Scripts don't get the current directory in their path by default.
+    scriptdir = os.path.dirname(sys.argv[0])
+    if scriptdir in sys.path:
+        sys.path.remove(scriptdir)
+    sys.path.insert(0, '.')
+
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'gk:p:w')
+        writing = 0
+
+        for opt, val in opts:
+            if opt == '-g':
+                gui()
+                return
+            if opt == '-k':
+                apropos(val)
+                return
+            if opt == '-p':
+                try:
+                    port = int(val)
+                except ValueError:
+                    raise BadUsage
+                def ready(server):
+                    print 'pydoc server ready at %s' % server.url
+                def stopped():
+                    print 'pydoc server stopped'
+                serve(port, ready, stopped)
+                return
+            if opt == '-w':
+                writing = 1
+
+        if not args: raise BadUsage
+        for arg in args:
+            if ispath(arg) and not os.path.exists(arg):
+                print 'file %r does not exist' % arg
+                break
+            try:
+                if ispath(arg) and os.path.isfile(arg):
+                    arg = importfile(arg)
+                if writing:
+                    if ispath(arg) and os.path.isdir(arg):
+                        writedocs(arg)
+                    else:
+                        writedoc(arg)
+                else:
+                    help.help(arg)
+            except ErrorDuringImport, value:
+                print value
+
+    except (getopt.error, BadUsage):
+        cmd = os.path.basename(sys.argv[0])
+        print """pydoc - the Python documentation tool
+
+%s <name> ...
+    Show text documentation on something.  <name> may be the name of a
+    Python keyword, topic, function, module, or package, or a dotted
+    reference to a class or function within a module or module in a
+    package.  If <name> contains a '%s', it is used as the path to a
+    Python source file to document. If name is 'keywords', 'topics',
+    or 'modules', a listing of these things is displayed.
+
+%s -k <keyword>
+    Search for a keyword in the synopsis lines of all available modules.
+
+%s -p <port>
+    Start an HTTP server on the given port on the local machine.
+
+%s -g
+    Pop up a graphical interface for finding and serving documentation.
+
+%s -w <name> ...
+    Write out the HTML documentation for a module to a file in the current
+    directory.  If <name> contains a '%s', it is treated as a filename; if
+    it names a directory, documentation is written for all the contents.
+""" % (cmd, os.sep, cmd, cmd, cmd, cmd, os.sep)
+
+if __name__ == '__main__': cli()
diff --git a/depot_tools/release/win/python_24/Lib/quopri.py b/depot_tools/release/win/python_24/Lib/quopri.py
new file mode 100644
index 0000000..8788afc2f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/quopri.py
@@ -0,0 +1,237 @@
+#! /usr/bin/env python
+
+"""Conversions to/from quoted-printable transport encoding as per RFC 1521."""
+
+# (Dec 1991 version).
+
+__all__ = ["encode", "decode", "encodestring", "decodestring"]
+
+ESCAPE = '='
+MAXLINESIZE = 76
+HEX = '0123456789ABCDEF'
+EMPTYSTRING = ''
+
+try:
+    from binascii import a2b_qp, b2a_qp
+except ImportError:
+    a2b_qp = None
+    b2a_qp = None
+
+
+def needsquoting(c, quotetabs, header):
+    """Decide whether a particular character needs to be quoted.
+
+    The 'quotetabs' flag indicates whether embedded tabs and spaces should be
+    quoted.  Note that line-ending tabs and spaces are always encoded, as per
+    RFC 1521.
+    """
+    if c in ' \t':
+        return quotetabs
+    # if header, we have to escape _ because _ is used to escape space
+    if c == '_':
+        return header
+    return c == ESCAPE or not (' ' <= c <= '~')
+
+def quote(c):
+    """Quote a single character."""
+    i = ord(c)
+    return ESCAPE + HEX[i//16] + HEX[i%16]
+
+
+
+def encode(input, output, quotetabs, header = 0):
+    """Read 'input', apply quoted-printable encoding, and write to 'output'.
+
+    'input' and 'output' are files with readline() and write() methods.
+    The 'quotetabs' flag indicates whether embedded tabs and spaces should be
+    quoted.  Note that line-ending tabs and spaces are always encoded, as per
+    RFC 1521.
+    The 'header' flag indicates whether we are encoding spaces as _ as per
+    RFC 1522.
+    """
+
+    if b2a_qp is not None:
+        data = input.read()
+        odata = b2a_qp(data, quotetabs = quotetabs, header = header)
+        output.write(odata)
+        return
+
+    def write(s, output=output, lineEnd='\n'):
+        # RFC 1521 requires that the line ending in a space or tab must have
+        # that trailing character encoded.
+        if s and s[-1:] in ' \t':
+            output.write(s[:-1] + quote(s[-1]) + lineEnd)
+        elif s == '.':
+            output.write(quote(s) + lineEnd)
+        else:
+            output.write(s + lineEnd)
+
+    prevline = None
+    while 1:
+        line = input.readline()
+        if not line:
+            break
+        outline = []
+        # Strip off any readline induced trailing newline
+        stripped = ''
+        if line[-1:] == '\n':
+            line = line[:-1]
+            stripped = '\n'
+        # Calculate the un-length-limited encoded line
+        for c in line:
+            if needsquoting(c, quotetabs, header):
+                c = quote(c)
+            if header and c == ' ':
+                outline.append('_')
+            else:
+                outline.append(c)
+        # First, write out the previous line
+        if prevline is not None:
+            write(prevline)
+        # Now see if we need any soft line breaks because of RFC-imposed
+        # length limitations.  Then do the thisline->prevline dance.
+        thisline = EMPTYSTRING.join(outline)
+        while len(thisline) > MAXLINESIZE:
+            # Don't forget to include the soft line break `=' sign in the
+            # length calculation!
+            write(thisline[:MAXLINESIZE-1], lineEnd='=\n')
+            thisline = thisline[MAXLINESIZE-1:]
+        # Write out the current line
+        prevline = thisline
+    # Write out the last line, without a trailing newline
+    if prevline is not None:
+        write(prevline, lineEnd=stripped)
+
+def encodestring(s, quotetabs = 0, header = 0):
+    if b2a_qp is not None:
+        return b2a_qp(s, quotetabs = quotetabs, header = header)
+    from cStringIO import StringIO
+    infp = StringIO(s)
+    outfp = StringIO()
+    encode(infp, outfp, quotetabs, header)
+    return outfp.getvalue()
+
+
+
+def decode(input, output, header = 0):
+    """Read 'input', apply quoted-printable decoding, and write to 'output'.
+    'input' and 'output' are files with readline() and write() methods.
+    If 'header' is true, decode underscore as space (per RFC 1522)."""
+
+    if a2b_qp is not None:
+        data = input.read()
+        odata = a2b_qp(data, header = header)
+        output.write(odata)
+        return
+
+    new = ''
+    while 1:
+        line = input.readline()
+        if not line: break
+        i, n = 0, len(line)
+        if n > 0 and line[n-1] == '\n':
+            partial = 0; n = n-1
+            # Strip trailing whitespace
+            while n > 0 and line[n-1] in " \t\r":
+                n = n-1
+        else:
+            partial = 1
+        while i < n:
+            c = line[i]
+            if c == '_' and header:
+                new = new + ' '; i = i+1
+            elif c != ESCAPE:
+                new = new + c; i = i+1
+            elif i+1 == n and not partial:
+                partial = 1; break
+            elif i+1 < n and line[i+1] == ESCAPE:
+                new = new + ESCAPE; i = i+2
+            elif i+2 < n and ishex(line[i+1]) and ishex(line[i+2]):
+                new = new + chr(unhex(line[i+1:i+3])); i = i+3
+            else: # Bad escape sequence -- leave it in
+                new = new + c; i = i+1
+        if not partial:
+            output.write(new + '\n')
+            new = ''
+    if new:
+        output.write(new)
+
+def decodestring(s, header = 0):
+    if a2b_qp is not None:
+        return a2b_qp(s, header = header)
+    from cStringIO import StringIO
+    infp = StringIO(s)
+    outfp = StringIO()
+    decode(infp, outfp, header = header)
+    return outfp.getvalue()
+
+
+
+# Other helper functions
+def ishex(c):
+    """Return true if the character 'c' is a hexadecimal digit."""
+    return '0' <= c <= '9' or 'a' <= c <= 'f' or 'A' <= c <= 'F'
+
+def unhex(s):
+    """Get the integer value of a hexadecimal number."""
+    bits = 0
+    for c in s:
+        if '0' <= c <= '9':
+            i = ord('0')
+        elif 'a' <= c <= 'f':
+            i = ord('a')-10
+        elif 'A' <= c <= 'F':
+            i = ord('A')-10
+        else:
+            break
+        bits = bits*16 + (ord(c) - i)
+    return bits
+
+
+
+def main():
+    import sys
+    import getopt
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 'td')
+    except getopt.error, msg:
+        sys.stdout = sys.stderr
+        print msg
+        print "usage: quopri [-t | -d] [file] ..."
+        print "-t: quote tabs"
+        print "-d: decode; default encode"
+        sys.exit(2)
+    deco = 0
+    tabs = 0
+    for o, a in opts:
+        if o == '-t': tabs = 1
+        if o == '-d': deco = 1
+    if tabs and deco:
+        sys.stdout = sys.stderr
+        print "-t and -d are mutually exclusive"
+        sys.exit(2)
+    if not args: args = ['-']
+    sts = 0
+    for file in args:
+        if file == '-':
+            fp = sys.stdin
+        else:
+            try:
+                fp = open(file)
+            except IOError, msg:
+                sys.stderr.write("%s: can't open (%s)\n" % (file, msg))
+                sts = 1
+                continue
+        if deco:
+            decode(fp, sys.stdout)
+        else:
+            encode(fp, sys.stdout, tabs)
+        if fp is not sys.stdin:
+            fp.close()
+    if sts:
+        sys.exit(sts)
+
+
+
+if __name__ == '__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/random.py b/depot_tools/release/win/python_24/Lib/random.py
new file mode 100644
index 0000000..0a02787
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/random.py
@@ -0,0 +1,852 @@
+"""Random variable generators.
+
+    integers
+    --------
+           uniform within range
+
+    sequences
+    ---------
+           pick random element
+           pick random sample
+           generate random permutation
+
+    distributions on the real line:
+    ------------------------------
+           uniform
+           normal (Gaussian)
+           lognormal
+           negative exponential
+           gamma
+           beta
+           pareto
+           Weibull
+
+    distributions on the circle (angles 0 to 2pi)
+    ---------------------------------------------
+           circular uniform
+           von Mises
+
+General notes on the underlying Mersenne Twister core generator:
+
+* The period is 2**19937-1.
+* It is one of the most extensively tested generators in existence
+* Without a direct way to compute N steps forward, the
+  semantics of jumpahead(n) are weakened to simply jump
+  to another distant state and rely on the large period
+  to avoid overlapping sequences.
+* The random() method is implemented in C, executes in
+  a single Python step, and is, therefore, threadsafe.
+
+"""
+
+from warnings import warn as _warn
+from types import MethodType as _MethodType, BuiltinMethodType as _BuiltinMethodType
+from math import log as _log, exp as _exp, pi as _pi, e as _e
+from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin
+from math import floor as _floor
+from os import urandom as _urandom
+from binascii import hexlify as _hexlify
+
+__all__ = ["Random","seed","random","uniform","randint","choice","sample",
+           "randrange","shuffle","normalvariate","lognormvariate",
+           "expovariate","vonmisesvariate","gammavariate",
+           "gauss","betavariate","paretovariate","weibullvariate",
+           "getstate","setstate","jumpahead", "WichmannHill", "getrandbits",
+           "SystemRandom"]
+
+NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0)
+TWOPI = 2.0*_pi
+LOG4 = _log(4.0)
+SG_MAGICCONST = 1.0 + _log(4.5)
+BPF = 53        # Number of bits in a float
+RECIP_BPF = 2**-BPF
+
+
+# Translated by Guido van Rossum from C source provided by
+# Adrian Baddeley.  Adapted by Raymond Hettinger for use with
+# the Mersenne Twister  and os.urandom() core generators.
+
+import _random
+
+class Random(_random.Random):
+    """Random number generator base class used by bound module functions.
+
+    Used to instantiate instances of Random to get generators that don't
+    share state.  Especially useful for multi-threaded programs, creating
+    a different instance of Random for each thread, and using the jumpahead()
+    method to ensure that the generated sequences seen by each thread don't
+    overlap.
+
+    Class Random can also be subclassed if you want to use a different basic
+    generator of your own devising: in that case, override the following
+    methods:  random(), seed(), getstate(), setstate() and jumpahead().
+    Optionally, implement a getrandombits() method so that randrange()
+    can cover arbitrarily large ranges.
+
+    """
+
+    VERSION = 2     # used by getstate/setstate
+
+    def __init__(self, x=None):
+        """Initialize an instance.
+
+        Optional argument x controls seeding, as for Random.seed().
+        """
+
+        self.seed(x)
+        self.gauss_next = None
+
+    def seed(self, a=None):
+        """Initialize internal state from hashable object.
+
+        None or no argument seeds from current time or from an operating
+        system specific randomness source if available.
+
+        If a is not None or an int or long, hash(a) is used instead.
+        """
+
+        if a is None:
+            try:
+                a = long(_hexlify(_urandom(16)), 16)
+            except NotImplementedError:
+                import time
+                a = long(time.time() * 256) # use fractional seconds
+
+        super(Random, self).seed(a)
+        self.gauss_next = None
+
+    def getstate(self):
+        """Return internal state; can be passed to setstate() later."""
+        return self.VERSION, super(Random, self).getstate(), self.gauss_next
+
+    def setstate(self, state):
+        """Restore internal state from object returned by getstate()."""
+        version = state[0]
+        if version == 2:
+            version, internalstate, self.gauss_next = state
+            super(Random, self).setstate(internalstate)
+        else:
+            raise ValueError("state with version %s passed to "
+                             "Random.setstate() of version %s" %
+                             (version, self.VERSION))
+
+## ---- Methods below this point do not need to be overridden when
+## ---- subclassing for the purpose of using a different core generator.
+
+## -------------------- pickle support  -------------------
+
+    def __getstate__(self): # for pickle
+        return self.getstate()
+
+    def __setstate__(self, state):  # for pickle
+        self.setstate(state)
+
+    def __reduce__(self):
+        return self.__class__, (), self.getstate()
+
+## -------------------- integer methods  -------------------
+
+    def randrange(self, start, stop=None, step=1, int=int, default=None,
+                  maxwidth=1L<<BPF):
+        """Choose a random item from range(start, stop[, step]).
+
+        This fixes the problem with randint() which includes the
+        endpoint; in Python this is usually not what you want.
+        Do not supply the 'int', 'default', and 'maxwidth' arguments.
+        """
+
+        # This code is a bit messy to make it fast for the
+        # common case while still doing adequate error checking.
+        istart = int(start)
+        if istart != start:
+            raise ValueError, "non-integer arg 1 for randrange()"
+        if stop is default:
+            if istart > 0:
+                if istart >= maxwidth:
+                    return self._randbelow(istart)
+                return int(self.random() * istart)
+            raise ValueError, "empty range for randrange()"
+
+        # stop argument supplied.
+        istop = int(stop)
+        if istop != stop:
+            raise ValueError, "non-integer stop for randrange()"
+        width = istop - istart
+        if step == 1 and width > 0:
+            # Note that
+            #     int(istart + self.random()*width)
+            # instead would be incorrect.  For example, consider istart
+            # = -2 and istop = 0.  Then the guts would be in
+            # -2.0 to 0.0 exclusive on both ends (ignoring that random()
+            # might return 0.0), and because int() truncates toward 0, the
+            # final result would be -1 or 0 (instead of -2 or -1).
+            #     istart + int(self.random()*width)
+            # would also be incorrect, for a subtler reason:  the RHS
+            # can return a long, and then randrange() would also return
+            # a long, but we're supposed to return an int (for backward
+            # compatibility).
+
+            if width >= maxwidth:
+                return int(istart + self._randbelow(width))
+            return int(istart + int(self.random()*width))
+        if step == 1:
+            raise ValueError, "empty range for randrange() (%d,%d, %d)" % (istart, istop, width)
+
+        # Non-unit step argument supplied.
+        istep = int(step)
+        if istep != step:
+            raise ValueError, "non-integer step for randrange()"
+        if istep > 0:
+            n = (width + istep - 1) // istep
+        elif istep < 0:
+            n = (width + istep + 1) // istep
+        else:
+            raise ValueError, "zero step for randrange()"
+
+        if n <= 0:
+            raise ValueError, "empty range for randrange()"
+
+        if n >= maxwidth:
+            return istart + self._randbelow(n)
+        return istart + istep*int(self.random() * n)
+
+    def randint(self, a, b):
+        """Return random integer in range [a, b], including both end points.
+        """
+
+        return self.randrange(a, b+1)
+
+    def _randbelow(self, n, _log=_log, int=int, _maxwidth=1L<<BPF,
+                   _Method=_MethodType, _BuiltinMethod=_BuiltinMethodType):
+        """Return a random int in the range [0,n)
+
+        Handles the case where n has more bits than returned
+        by a single call to the underlying generator.
+        """
+
+        try:
+            getrandbits = self.getrandbits
+        except AttributeError:
+            pass
+        else:
+            # Only call self.getrandbits if the original random() builtin method
+            # has not been overridden or if a new getrandbits() was supplied.
+            # This assures that the two methods correspond.
+            if type(self.random) is _BuiltinMethod or type(getrandbits) is _Method:
+                k = int(1.00001 + _log(n-1, 2.0))   # 2**k > n-1 > 2**(k-2)
+                r = getrandbits(k)
+                while r >= n:
+                    r = getrandbits(k)
+                return r
+        if n >= _maxwidth:
+            _warn("Underlying random() generator does not supply \n"
+                "enough bits to choose from a population range this large")
+        return int(self.random() * n)
+
+## -------------------- sequence methods  -------------------
+
+    def choice(self, seq):
+        """Choose a random element from a non-empty sequence."""
+        return seq[int(self.random() * len(seq))]  # raises IndexError if seq is empty
+
+    def shuffle(self, x, random=None, int=int):
+        """x, random=random.random -> shuffle list x in place; return None.
+
+        Optional arg random is a 0-argument function returning a random
+        float in [0.0, 1.0); by default, the standard random.random.
+
+        Note that for even rather small len(x), the total number of
+        permutations of x is larger than the period of most random number
+        generators; this implies that "most" permutations of a long
+        sequence can never be generated.
+        """
+
+        if random is None:
+            random = self.random
+        for i in reversed(xrange(1, len(x))):
+            # pick an element in x[:i+1] with which to exchange x[i]
+            j = int(random() * (i+1))
+            x[i], x[j] = x[j], x[i]
+
+    def sample(self, population, k):
+        """Chooses k unique random elements from a population sequence.
+
+        Returns a new list containing elements from the population while
+        leaving the original population unchanged.  The resulting list is
+        in selection order so that all sub-slices will also be valid random
+        samples.  This allows raffle winners (the sample) to be partitioned
+        into grand prize and second place winners (the subslices).
+
+        Members of the population need not be hashable or unique.  If the
+        population contains repeats, then each occurrence is a possible
+        selection in the sample.
+
+        To choose a sample in a range of integers, use xrange as an argument.
+        This is especially fast and space efficient for sampling from a
+        large population:   sample(xrange(10000000), 60)
+        """
+
+        # Sampling without replacement entails tracking either potential
+        # selections (the pool) in a list or previous selections in a
+        # dictionary.
+
+        # When the number of selections is small compared to the
+        # population, then tracking selections is efficient, requiring
+        # only a small dictionary and an occasional reselection.  For
+        # a larger number of selections, the pool tracking method is
+        # preferred since the list takes less space than the
+        # dictionary and it doesn't suffer from frequent reselections.
+
+        n = len(population)
+        if not 0 <= k <= n:
+            raise ValueError, "sample larger than population"
+        random = self.random
+        _int = int
+        result = [None] * k
+        if n < 6 * k:     # if n len list takes less space than a k len dict
+            pool = list(population)
+            for i in xrange(k):         # invariant:  non-selected at [0,n-i)
+                j = _int(random() * (n-i))
+                result[i] = pool[j]
+                pool[j] = pool[n-i-1]   # move non-selected item into vacancy
+        else:
+            try:
+                n > 0 and (population[0], population[n//2], population[n-1])
+            except (TypeError, KeyError):   # handle sets and dictionaries
+                population = tuple(population)
+            selected = {}
+            for i in xrange(k):
+                j = _int(random() * n)
+                while j in selected:
+                    j = _int(random() * n)
+                result[i] = selected[j] = population[j]
+        return result
+
+## -------------------- real-valued distributions  -------------------
+
+## -------------------- uniform distribution -------------------
+
+    def uniform(self, a, b):
+        """Get a random number in the range [a, b)."""
+        return a + (b-a) * self.random()
+
+## -------------------- normal distribution --------------------
+
+    def normalvariate(self, mu, sigma):
+        """Normal distribution.
+
+        mu is the mean, and sigma is the standard deviation.
+
+        """
+        # mu = mean, sigma = standard deviation
+
+        # Uses Kinderman and Monahan method. Reference: Kinderman,
+        # A.J. and Monahan, J.F., "Computer generation of random
+        # variables using the ratio of uniform deviates", ACM Trans
+        # Math Software, 3, (1977), pp257-260.
+
+        random = self.random
+        while True:
+            u1 = random()
+            u2 = 1.0 - random()
+            z = NV_MAGICCONST*(u1-0.5)/u2
+            zz = z*z/4.0
+            if zz <= -_log(u2):
+                break
+        return mu + z*sigma
+
+## -------------------- lognormal distribution --------------------
+
+    def lognormvariate(self, mu, sigma):
+        """Log normal distribution.
+
+        If you take the natural logarithm of this distribution, you'll get a
+        normal distribution with mean mu and standard deviation sigma.
+        mu can have any value, and sigma must be greater than zero.
+
+        """
+        return _exp(self.normalvariate(mu, sigma))
+
+## -------------------- exponential distribution --------------------
+
+    def expovariate(self, lambd):
+        """Exponential distribution.
+
+        lambd is 1.0 divided by the desired mean.  (The parameter would be
+        called "lambda", but that is a reserved word in Python.)  Returned
+        values range from 0 to positive infinity.
+
+        """
+        # lambd: rate lambd = 1/mean
+        # ('lambda' is a Python reserved word)
+
+        random = self.random
+        u = random()
+        while u <= 1e-7:
+            u = random()
+        return -_log(u)/lambd
+
+## -------------------- von Mises distribution --------------------
+
+    def vonmisesvariate(self, mu, kappa):
+        """Circular data distribution.
+
+        mu is the mean angle, expressed in radians between 0 and 2*pi, and
+        kappa is the concentration parameter, which must be greater than or
+        equal to zero.  If kappa is equal to zero, this distribution reduces
+        to a uniform random angle over the range 0 to 2*pi.
+
+        """
+        # mu:    mean angle (in radians between 0 and 2*pi)
+        # kappa: concentration parameter kappa (>= 0)
+        # if kappa = 0 generate uniform random angle
+
+        # Based upon an algorithm published in: Fisher, N.I.,
+        # "Statistical Analysis of Circular Data", Cambridge
+        # University Press, 1993.
+
+        # Thanks to Magnus Kessler for a correction to the
+        # implementation of step 4.
+
+        random = self.random
+        if kappa <= 1e-6:
+            return TWOPI * random()
+
+        a = 1.0 + _sqrt(1.0 + 4.0 * kappa * kappa)
+        b = (a - _sqrt(2.0 * a))/(2.0 * kappa)
+        r = (1.0 + b * b)/(2.0 * b)
+
+        while True:
+            u1 = random()
+
+            z = _cos(_pi * u1)
+            f = (1.0 + r * z)/(r + z)
+            c = kappa * (r - f)
+
+            u2 = random()
+
+            if not (u2 >= c * (2.0 - c) and u2 > c * _exp(1.0 - c)):
+                break
+
+        u3 = random()
+        if u3 > 0.5:
+            theta = (mu % TWOPI) + _acos(f)
+        else:
+            theta = (mu % TWOPI) - _acos(f)
+
+        return theta
+
+## -------------------- gamma distribution --------------------
+
+    def gammavariate(self, alpha, beta):
+        """Gamma distribution.  Not the gamma function!
+
+        Conditions on the parameters are alpha > 0 and beta > 0.
+
+        """
+
+        # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2
+
+        # Warning: a few older sources define the gamma distribution in terms
+        # of alpha > -1.0
+        if alpha <= 0.0 or beta <= 0.0:
+            raise ValueError, 'gammavariate: alpha and beta must be > 0.0'
+
+        random = self.random
+        if alpha > 1.0:
+
+            # Uses R.C.H. Cheng, "The generation of Gamma
+            # variables with non-integral shape parameters",
+            # Applied Statistics, (1977), 26, No. 1, p71-74
+
+            ainv = _sqrt(2.0 * alpha - 1.0)
+            bbb = alpha - LOG4
+            ccc = alpha + ainv
+
+            while True:
+                u1 = random()
+                if not 1e-7 < u1 < .9999999:
+                    continue
+                u2 = 1.0 - random()
+                v = _log(u1/(1.0-u1))/ainv
+                x = alpha*_exp(v)
+                z = u1*u1*u2
+                r = bbb+ccc*v-x
+                if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z):
+                    return x * beta
+
+        elif alpha == 1.0:
+            # expovariate(1)
+            u = random()
+            while u <= 1e-7:
+                u = random()
+            return -_log(u) * beta
+
+        else:   # alpha is between 0 and 1 (exclusive)
+
+            # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle
+
+            while True:
+                u = random()
+                b = (_e + alpha)/_e
+                p = b*u
+                if p <= 1.0:
+                    x = pow(p, 1.0/alpha)
+                else:
+                    # p > 1
+                    x = -_log((b-p)/alpha)
+                u1 = random()
+                if not (((p <= 1.0) and (u1 > _exp(-x))) or
+                          ((p > 1)  and  (u1 > pow(x, alpha - 1.0)))):
+                    break
+            return x * beta
+
+## -------------------- Gauss (faster alternative) --------------------
+
+    def gauss(self, mu, sigma):
+        """Gaussian distribution.
+
+        mu is the mean, and sigma is the standard deviation.  This is
+        slightly faster than the normalvariate() function.
+
+        Not thread-safe without a lock around calls.
+
+        """
+
+        # When x and y are two variables from [0, 1), uniformly
+        # distributed, then
+        #
+        #    cos(2*pi*x)*sqrt(-2*log(1-y))
+        #    sin(2*pi*x)*sqrt(-2*log(1-y))
+        #
+        # are two *independent* variables with normal distribution
+        # (mu = 0, sigma = 1).
+        # (Lambert Meertens)
+        # (corrected version; bug discovered by Mike Miller, fixed by LM)
+
+        # Multithreading note: When two threads call this function
+        # simultaneously, it is possible that they will receive the
+        # same return value.  The window is very small though.  To
+        # avoid this, you have to use a lock around all calls.  (I
+        # didn't want to slow this down in the serial case by using a
+        # lock here.)
+
+        random = self.random
+        z = self.gauss_next
+        self.gauss_next = None
+        if z is None:
+            x2pi = random() * TWOPI
+            g2rad = _sqrt(-2.0 * _log(1.0 - random()))
+            z = _cos(x2pi) * g2rad
+            self.gauss_next = _sin(x2pi) * g2rad
+
+        return mu + z*sigma
+
+## -------------------- beta --------------------
+## See
+## http://sourceforge.net/bugs/?func=detailbug&bug_id=130030&group_id=5470
+## for Ivan Frohne's insightful analysis of why the original implementation:
+##
+##    def betavariate(self, alpha, beta):
+##        # Discrete Event Simulation in C, pp 87-88.
+##
+##        y = self.expovariate(alpha)
+##        z = self.expovariate(1.0/beta)
+##        return z/(y+z)
+##
+## was dead wrong, and how it probably got that way.
+
+    def betavariate(self, alpha, beta):
+        """Beta distribution.
+
+        Conditions on the parameters are alpha > -1 and beta} > -1.
+        Returned values range between 0 and 1.
+
+        """
+
+        # This version due to Janne Sinkkonen, and matches all the std
+        # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution").
+        y = self.gammavariate(alpha, 1.)
+        if y == 0:
+            return 0.0
+        else:
+            return y / (y + self.gammavariate(beta, 1.))
+
+## -------------------- Pareto --------------------
+
+    def paretovariate(self, alpha):
+        """Pareto distribution.  alpha is the shape parameter."""
+        # Jain, pg. 495
+
+        u = 1.0 - self.random()
+        return 1.0 / pow(u, 1.0/alpha)
+
+## -------------------- Weibull --------------------
+
+    def weibullvariate(self, alpha, beta):
+        """Weibull distribution.
+
+        alpha is the scale parameter and beta is the shape parameter.
+
+        """
+        # Jain, pg. 499; bug fix courtesy Bill Arms
+
+        u = 1.0 - self.random()
+        return alpha * pow(-_log(u), 1.0/beta)
+
+## -------------------- Wichmann-Hill -------------------
+
+class WichmannHill(Random):
+
+    VERSION = 1     # used by getstate/setstate
+
+    def seed(self, a=None):
+        """Initialize internal state from hashable object.
+
+        None or no argument seeds from current time or from an operating
+        system specific randomness source if available.
+
+        If a is not None or an int or long, hash(a) is used instead.
+
+        If a is an int or long, a is used directly.  Distinct values between
+        0 and 27814431486575L inclusive are guaranteed to yield distinct
+        internal states (this guarantee is specific to the default
+        Wichmann-Hill generator).
+        """
+
+        if a is None:
+            try:
+                a = long(_hexlify(_urandom(16)), 16)
+            except NotImplementedError:
+                import time
+                a = long(time.time() * 256) # use fractional seconds
+
+        if not isinstance(a, (int, long)):
+            a = hash(a)
+
+        a, x = divmod(a, 30268)
+        a, y = divmod(a, 30306)
+        a, z = divmod(a, 30322)
+        self._seed = int(x)+1, int(y)+1, int(z)+1
+
+        self.gauss_next = None
+
+    def random(self):
+        """Get the next random number in the range [0.0, 1.0)."""
+
+        # Wichman-Hill random number generator.
+        #
+        # Wichmann, B. A. & Hill, I. D. (1982)
+        # Algorithm AS 183:
+        # An efficient and portable pseudo-random number generator
+        # Applied Statistics 31 (1982) 188-190
+        #
+        # see also:
+        #        Correction to Algorithm AS 183
+        #        Applied Statistics 33 (1984) 123
+        #
+        #        McLeod, A. I. (1985)
+        #        A remark on Algorithm AS 183
+        #        Applied Statistics 34 (1985),198-200
+
+        # This part is thread-unsafe:
+        # BEGIN CRITICAL SECTION
+        x, y, z = self._seed
+        x = (171 * x) % 30269
+        y = (172 * y) % 30307
+        z = (170 * z) % 30323
+        self._seed = x, y, z
+        # END CRITICAL SECTION
+
+        # Note:  on a platform using IEEE-754 double arithmetic, this can
+        # never return 0.0 (asserted by Tim; proof too long for a comment).
+        return (x/30269.0 + y/30307.0 + z/30323.0) % 1.0
+
+    def getstate(self):
+        """Return internal state; can be passed to setstate() later."""
+        return self.VERSION, self._seed, self.gauss_next
+
+    def setstate(self, state):
+        """Restore internal state from object returned by getstate()."""
+        version = state[0]
+        if version == 1:
+            version, self._seed, self.gauss_next = state
+        else:
+            raise ValueError("state with version %s passed to "
+                             "Random.setstate() of version %s" %
+                             (version, self.VERSION))
+
+    def jumpahead(self, n):
+        """Act as if n calls to random() were made, but quickly.
+
+        n is an int, greater than or equal to 0.
+
+        Example use:  If you have 2 threads and know that each will
+        consume no more than a million random numbers, create two Random
+        objects r1 and r2, then do
+            r2.setstate(r1.getstate())
+            r2.jumpahead(1000000)
+        Then r1 and r2 will use guaranteed-disjoint segments of the full
+        period.
+        """
+
+        if not n >= 0:
+            raise ValueError("n must be >= 0")
+        x, y, z = self._seed
+        x = int(x * pow(171, n, 30269)) % 30269
+        y = int(y * pow(172, n, 30307)) % 30307
+        z = int(z * pow(170, n, 30323)) % 30323
+        self._seed = x, y, z
+
+    def __whseed(self, x=0, y=0, z=0):
+        """Set the Wichmann-Hill seed from (x, y, z).
+
+        These must be integers in the range [0, 256).
+        """
+
+        if not type(x) == type(y) == type(z) == int:
+            raise TypeError('seeds must be integers')
+        if not (0 <= x < 256 and 0 <= y < 256 and 0 <= z < 256):
+            raise ValueError('seeds must be in range(0, 256)')
+        if 0 == x == y == z:
+            # Initialize from current time
+            import time
+            t = long(time.time() * 256)
+            t = int((t&0xffffff) ^ (t>>24))
+            t, x = divmod(t, 256)
+            t, y = divmod(t, 256)
+            t, z = divmod(t, 256)
+        # Zero is a poor seed, so substitute 1
+        self._seed = (x or 1, y or 1, z or 1)
+
+        self.gauss_next = None
+
+    def whseed(self, a=None):
+        """Seed from hashable object's hash code.
+
+        None or no argument seeds from current time.  It is not guaranteed
+        that objects with distinct hash codes lead to distinct internal
+        states.
+
+        This is obsolete, provided for compatibility with the seed routine
+        used prior to Python 2.1.  Use the .seed() method instead.
+        """
+
+        if a is None:
+            self.__whseed()
+            return
+        a = hash(a)
+        a, x = divmod(a, 256)
+        a, y = divmod(a, 256)
+        a, z = divmod(a, 256)
+        x = (x + a) % 256 or 1
+        y = (y + a) % 256 or 1
+        z = (z + a) % 256 or 1
+        self.__whseed(x, y, z)
+
+## --------------- Operating System Random Source  ------------------
+
+class SystemRandom(Random):
+    """Alternate random number generator using sources provided
+    by the operating system (such as /dev/urandom on Unix or
+    CryptGenRandom on Windows).
+
+     Not available on all systems (see os.urandom() for details).
+    """
+
+    def random(self):
+        """Get the next random number in the range [0.0, 1.0)."""
+        return (long(_hexlify(_urandom(7)), 16) >> 3) * RECIP_BPF
+
+    def getrandbits(self, k):
+        """getrandbits(k) -> x.  Generates a long int with k random bits."""
+        if k <= 0:
+            raise ValueError('number of bits must be greater than zero')
+        if k != int(k):
+            raise TypeError('number of bits should be an integer')
+        bytes = (k + 7) // 8                    # bits / 8 and rounded up
+        x = long(_hexlify(_urandom(bytes)), 16)
+        return x >> (bytes * 8 - k)             # trim excess bits
+
+    def _stub(self, *args, **kwds):
+        "Stub method.  Not used for a system random number generator."
+        return None
+    seed = jumpahead = _stub
+
+    def _notimplemented(self, *args, **kwds):
+        "Method should not be called for a system random number generator."
+        raise NotImplementedError('System entropy source does not have state.')
+    getstate = setstate = _notimplemented
+
+## -------------------- test program --------------------
+
+def _test_generator(n, func, args):
+    import time
+    print n, 'times', func.__name__
+    total = 0.0
+    sqsum = 0.0
+    smallest = 1e10
+    largest = -1e10
+    t0 = time.time()
+    for i in range(n):
+        x = func(*args)
+        total += x
+        sqsum = sqsum + x*x
+        smallest = min(x, smallest)
+        largest = max(x, largest)
+    t1 = time.time()
+    print round(t1-t0, 3), 'sec,',
+    avg = total/n
+    stddev = _sqrt(sqsum/n - avg*avg)
+    print 'avg %g, stddev %g, min %g, max %g' % \
+              (avg, stddev, smallest, largest)
+
+
+def _test(N=2000):
+    _test_generator(N, random, ())
+    _test_generator(N, normalvariate, (0.0, 1.0))
+    _test_generator(N, lognormvariate, (0.0, 1.0))
+    _test_generator(N, vonmisesvariate, (0.0, 1.0))
+    _test_generator(N, gammavariate, (0.01, 1.0))
+    _test_generator(N, gammavariate, (0.1, 1.0))
+    _test_generator(N, gammavariate, (0.1, 2.0))
+    _test_generator(N, gammavariate, (0.5, 1.0))
+    _test_generator(N, gammavariate, (0.9, 1.0))
+    _test_generator(N, gammavariate, (1.0, 1.0))
+    _test_generator(N, gammavariate, (2.0, 1.0))
+    _test_generator(N, gammavariate, (20.0, 1.0))
+    _test_generator(N, gammavariate, (200.0, 1.0))
+    _test_generator(N, gauss, (0.0, 1.0))
+    _test_generator(N, betavariate, (3.0, 3.0))
+
+# Create one instance, seeded from current time, and export its methods
+# as module-level functions.  The functions share state across all uses
+#(both in the user's code and in the Python libraries), but that's fine
+# for most programs and is easier for the casual user than making them
+# instantiate their own Random() instance.
+
+_inst = Random()
+seed = _inst.seed
+random = _inst.random
+uniform = _inst.uniform
+randint = _inst.randint
+choice = _inst.choice
+randrange = _inst.randrange
+sample = _inst.sample
+shuffle = _inst.shuffle
+normalvariate = _inst.normalvariate
+lognormvariate = _inst.lognormvariate
+expovariate = _inst.expovariate
+vonmisesvariate = _inst.vonmisesvariate
+gammavariate = _inst.gammavariate
+gauss = _inst.gauss
+betavariate = _inst.betavariate
+paretovariate = _inst.paretovariate
+weibullvariate = _inst.weibullvariate
+getstate = _inst.getstate
+setstate = _inst.setstate
+jumpahead = _inst.jumpahead
+getrandbits = _inst.getrandbits
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/re.py b/depot_tools/release/win/python_24/Lib/re.py
new file mode 100644
index 0000000..f1cbe2c12
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/re.py
@@ -0,0 +1,6 @@
+"""Minimal "re" compatibility wrapper.  See "sre" for documentation."""
+
+engine = "sre" # Some apps might use this undocumented variable
+
+from sre import *
+from sre import __all__
diff --git a/depot_tools/release/win/python_24/Lib/reconvert.py b/depot_tools/release/win/python_24/Lib/reconvert.py
new file mode 100644
index 0000000..2c77ee9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/reconvert.py
@@ -0,0 +1,192 @@
+#! /usr/bin/env python
+
+r"""Convert old ("regex") regular expressions to new syntax ("re").
+
+When imported as a module, there are two functions, with their own
+strings:
+
+  convert(s, syntax=None) -- convert a regex regular expression to re syntax
+
+  quote(s) -- return a quoted string literal
+
+When used as a script, read a Python string literal (or any other
+expression evaluating to a string) from stdin, and write the
+translated expression to stdout as a string literal.  Unless stdout is
+a tty, no trailing \n is written to stdout.  This is done so that it
+can be used with Emacs C-U M-| (shell-command-on-region with argument
+which filters the region through the shell command).
+
+No attempt has been made at coding for performance.
+
+Translation table...
+
+    \(    (     (unless RE_NO_BK_PARENS set)
+    \)    )     (unless RE_NO_BK_PARENS set)
+    \|    |     (unless RE_NO_BK_VBAR set)
+    \<    \b    (not quite the same, but alla...)
+    \>    \b    (not quite the same, but alla...)
+    \`    \A
+    \'    \Z
+
+Not translated...
+
+    .
+    ^
+    $
+    *
+    +           (unless RE_BK_PLUS_QM set, then to \+)
+    ?           (unless RE_BK_PLUS_QM set, then to \?)
+    \
+    \b
+    \B
+    \w
+    \W
+    \1 ... \9
+
+Special cases...
+
+    Non-printable characters are always replaced by their 3-digit
+    escape code (except \t, \n, \r, which use mnemonic escapes)
+
+    Newline is turned into | when RE_NEWLINE_OR is set
+
+XXX To be done...
+
+    [...]     (different treatment of backslashed items?)
+    [^...]    (different treatment of backslashed items?)
+    ^ $ * + ? (in some error contexts these are probably treated differently)
+    \vDD  \DD (in the regex docs but only works when RE_ANSI_HEX set)
+
+"""
+
+
+import warnings
+warnings.filterwarnings("ignore", ".* regex .*", DeprecationWarning, __name__,
+                        append=1)
+
+import regex
+from regex_syntax import * # RE_*
+
+__all__ = ["convert","quote"]
+
+# Default translation table
+mastertable = {
+    r'\<': r'\b',
+    r'\>': r'\b',
+    r'\`': r'\A',
+    r'\'': r'\Z',
+    r'\(': '(',
+    r'\)': ')',
+    r'\|': '|',
+    '(': r'\(',
+    ')': r'\)',
+    '|': r'\|',
+    '\t': r'\t',
+    '\n': r'\n',
+    '\r': r'\r',
+}
+
+
+def convert(s, syntax=None):
+    """Convert a regex regular expression to re syntax.
+
+    The first argument is the regular expression, as a string object,
+    just like it would be passed to regex.compile().  (I.e., pass the
+    actual string object -- string quotes must already have been
+    removed and the standard escape processing has already been done,
+    e.g. by eval().)
+
+    The optional second argument is the regex syntax variant to be
+    used.  This is an integer mask as passed to regex.set_syntax();
+    the flag bits are defined in regex_syntax.  When not specified, or
+    when None is given, the current regex syntax mask (as retrieved by
+    regex.get_syntax()) is used -- which is 0 by default.
+
+    The return value is a regular expression, as a string object that
+    could be passed to re.compile().  (I.e., no string quotes have
+    been added -- use quote() below, or repr().)
+
+    The conversion is not always guaranteed to be correct.  More
+    syntactical analysis should be performed to detect borderline
+    cases and decide what to do with them.  For example, 'x*?' is not
+    translated correctly.
+
+    """
+    table = mastertable.copy()
+    if syntax is None:
+        syntax = regex.get_syntax()
+    if syntax & RE_NO_BK_PARENS:
+        del table[r'\('], table[r'\)']
+        del table['('], table[')']
+    if syntax & RE_NO_BK_VBAR:
+        del table[r'\|']
+        del table['|']
+    if syntax & RE_BK_PLUS_QM:
+        table['+'] = r'\+'
+        table['?'] = r'\?'
+        table[r'\+'] = '+'
+        table[r'\?'] = '?'
+    if syntax & RE_NEWLINE_OR:
+        table['\n'] = '|'
+    res = ""
+
+    i = 0
+    end = len(s)
+    while i < end:
+        c = s[i]
+        i = i+1
+        if c == '\\':
+            c = s[i]
+            i = i+1
+            key = '\\' + c
+            key = table.get(key, key)
+            res = res + key
+        else:
+            c = table.get(c, c)
+            res = res + c
+    return res
+
+
+def quote(s, quote=None):
+    """Convert a string object to a quoted string literal.
+
+    This is similar to repr() but will return a "raw" string (r'...'
+    or r"...") when the string contains backslashes, instead of
+    doubling all backslashes.  The resulting string does *not* always
+    evaluate to the same string as the original; however it will do
+    just the right thing when passed into re.compile().
+
+    The optional second argument forces the string quote; it must be
+    a single character which is a valid Python string quote.
+
+    """
+    if quote is None:
+        q = "'"
+        altq = "'"
+        if q in s and altq not in s:
+            q = altq
+    else:
+        assert quote in ('"', "'")
+        q = quote
+    res = q
+    for c in s:
+        if c == q: c = '\\' + c
+        elif c < ' ' or c > '~': c = "\\%03o" % ord(c)
+        res = res + c
+    res = res + q
+    if '\\' in res:
+        res = 'r' + res
+    return res
+
+
+def main():
+    """Main program -- called when run as a script."""
+    import sys
+    s = eval(sys.stdin.read())
+    sys.stdout.write(quote(convert(s)))
+    if sys.stdout.isatty():
+        sys.stdout.write("\n")
+
+
+if __name__ == '__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/regex_syntax.py b/depot_tools/release/win/python_24/Lib/regex_syntax.py
new file mode 100644
index 0000000..b0a0dbf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/regex_syntax.py
@@ -0,0 +1,53 @@
+"""Constants for selecting regexp syntaxes for the obsolete regex module.
+
+This module is only for backward compatibility.  "regex" has now
+been replaced by the new regular expression module, "re".
+
+These bits are passed to regex.set_syntax() to choose among
+alternative regexp syntaxes.
+"""
+
+# 1 means plain parentheses serve as grouping, and backslash
+#   parentheses are needed for literal searching.
+# 0 means backslash-parentheses are grouping, and plain parentheses
+#   are for literal searching.
+RE_NO_BK_PARENS = 1
+
+# 1 means plain | serves as the "or"-operator, and \| is a literal.
+# 0 means \| serves as the "or"-operator, and | is a literal.
+RE_NO_BK_VBAR = 2
+
+# 0 means plain + or ? serves as an operator, and \+, \? are literals.
+# 1 means \+, \? are operators and plain +, ? are literals.
+RE_BK_PLUS_QM = 4
+
+# 1 means | binds tighter than ^ or $.
+# 0 means the contrary.
+RE_TIGHT_VBAR = 8
+
+# 1 means treat \n as an _OR operator
+# 0 means treat it as a normal character
+RE_NEWLINE_OR = 16
+
+# 0 means that a special characters (such as *, ^, and $) always have
+#   their special meaning regardless of the surrounding context.
+# 1 means that special characters may act as normal characters in some
+#   contexts.  Specifically, this applies to:
+#       ^ - only special at the beginning, or after ( or |
+#       $ - only special at the end, or before ) or |
+#       *, +, ? - only special when not after the beginning, (, or |
+RE_CONTEXT_INDEP_OPS = 32
+
+# ANSI sequences (\n etc) and \xhh
+RE_ANSI_HEX = 64
+
+# No GNU extensions
+RE_NO_GNU_EXTENSIONS = 128
+
+# Now define combinations of bits for the standard possibilities.
+RE_SYNTAX_AWK = (RE_NO_BK_PARENS | RE_NO_BK_VBAR | RE_CONTEXT_INDEP_OPS)
+RE_SYNTAX_EGREP = (RE_SYNTAX_AWK | RE_NEWLINE_OR)
+RE_SYNTAX_GREP = (RE_BK_PLUS_QM | RE_NEWLINE_OR)
+RE_SYNTAX_EMACS = 0
+
+# (Python's obsolete "regexp" module used a syntax similar to awk.)
diff --git a/depot_tools/release/win/python_24/Lib/regsub.py b/depot_tools/release/win/python_24/Lib/regsub.py
new file mode 100644
index 0000000..0fc10a5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/regsub.py
@@ -0,0 +1,198 @@
+"""Regexp-based split and replace using the obsolete regex module.
+
+This module is only for backward compatibility.  These operations
+are now provided by the new regular expression module, "re".
+
+sub(pat, repl, str):        replace first occurrence of pattern in string
+gsub(pat, repl, str):       replace all occurrences of pattern in string
+split(str, pat, maxsplit):  split string using pattern as delimiter
+splitx(str, pat, maxsplit): split string using pattern as delimiter plus
+                            return delimiters
+"""
+
+import warnings
+warnings.warn("the regsub module is deprecated; please use re.sub()",
+              DeprecationWarning)
+
+# Ignore further deprecation warnings about this module
+warnings.filterwarnings("ignore", "", DeprecationWarning, __name__)
+
+import regex
+
+__all__ = ["sub","gsub","split","splitx","capwords"]
+
+# Replace first occurrence of pattern pat in string str by replacement
+# repl.  If the pattern isn't found, the string is returned unchanged.
+# The replacement may contain references \digit to subpatterns and
+# escaped backslashes.  The pattern may be a string or an already
+# compiled pattern.
+
+def sub(pat, repl, str):
+    prog = compile(pat)
+    if prog.search(str) >= 0:
+        regs = prog.regs
+        a, b = regs[0]
+        str = str[:a] + expand(repl, regs, str) + str[b:]
+    return str
+
+
+# Replace all (non-overlapping) occurrences of pattern pat in string
+# str by replacement repl.  The same rules as for sub() apply.
+# Empty matches for the pattern are replaced only when not adjacent to
+# a previous match, so e.g. gsub('', '-', 'abc') returns '-a-b-c-'.
+
+def gsub(pat, repl, str):
+    prog = compile(pat)
+    new = ''
+    start = 0
+    first = 1
+    while prog.search(str, start) >= 0:
+        regs = prog.regs
+        a, b = regs[0]
+        if a == b == start and not first:
+            if start >= len(str) or prog.search(str, start+1) < 0:
+                break
+            regs = prog.regs
+            a, b = regs[0]
+        new = new + str[start:a] + expand(repl, regs, str)
+        start = b
+        first = 0
+    new = new + str[start:]
+    return new
+
+
+# Split string str in fields separated by delimiters matching pattern
+# pat.  Only non-empty matches for the pattern are considered, so e.g.
+# split('abc', '') returns ['abc'].
+# The optional 3rd argument sets the number of splits that are performed.
+
+def split(str, pat, maxsplit = 0):
+    return intsplit(str, pat, maxsplit, 0)
+
+# Split string str in fields separated by delimiters matching pattern
+# pat.  Only non-empty matches for the pattern are considered, so e.g.
+# split('abc', '') returns ['abc']. The delimiters are also included
+# in the list.
+# The optional 3rd argument sets the number of splits that are performed.
+
+
+def splitx(str, pat, maxsplit = 0):
+    return intsplit(str, pat, maxsplit, 1)
+
+# Internal function used to implement split() and splitx().
+
+def intsplit(str, pat, maxsplit, retain):
+    prog = compile(pat)
+    res = []
+    start = next = 0
+    splitcount = 0
+    while prog.search(str, next) >= 0:
+        regs = prog.regs
+        a, b = regs[0]
+        if a == b:
+            next = next + 1
+            if next >= len(str):
+                break
+        else:
+            res.append(str[start:a])
+            if retain:
+                res.append(str[a:b])
+            start = next = b
+            splitcount = splitcount + 1
+            if (maxsplit and (splitcount >= maxsplit)):
+                break
+    res.append(str[start:])
+    return res
+
+
+# Capitalize words split using a pattern
+
+def capwords(str, pat='[^a-zA-Z0-9_]+'):
+    words = splitx(str, pat)
+    for i in range(0, len(words), 2):
+        words[i] = words[i].capitalize()
+    return "".join(words)
+
+
+# Internal subroutines:
+# compile(pat): compile a pattern, caching already compiled patterns
+# expand(repl, regs, str): expand \digit escapes in replacement string
+
+
+# Manage a cache of compiled regular expressions.
+#
+# If the pattern is a string a compiled version of it is returned.  If
+# the pattern has been used before we return an already compiled
+# version from the cache; otherwise we compile it now and save the
+# compiled version in the cache, along with the syntax it was compiled
+# with.  Instead of a string, a compiled regular expression can also
+# be passed.
+
+cache = {}
+
+def compile(pat):
+    if type(pat) != type(''):
+        return pat              # Assume it is a compiled regex
+    key = (pat, regex.get_syntax())
+    if key in cache:
+        prog = cache[key]       # Get it from the cache
+    else:
+        prog = cache[key] = regex.compile(pat)
+    return prog
+
+
+def clear_cache():
+    global cache
+    cache = {}
+
+
+# Expand \digit in the replacement.
+# Each occurrence of \digit is replaced by the substring of str
+# indicated by regs[digit].  To include a literal \ in the
+# replacement, double it; other \ escapes are left unchanged (i.e.
+# the \ and the following character are both copied).
+
+def expand(repl, regs, str):
+    if '\\' not in repl:
+        return repl
+    new = ''
+    i = 0
+    ord0 = ord('0')
+    while i < len(repl):
+        c = repl[i]; i = i+1
+        if c != '\\' or i >= len(repl):
+            new = new + c
+        else:
+            c = repl[i]; i = i+1
+            if '0' <= c <= '9':
+                a, b = regs[ord(c)-ord0]
+                new = new + str[a:b]
+            elif c == '\\':
+                new = new + c
+            else:
+                new = new + '\\' + c
+    return new
+
+
+# Test program, reads sequences "pat repl str" from stdin.
+# Optional argument specifies pattern used to split lines.
+
+def test():
+    import sys
+    if sys.argv[1:]:
+        delpat = sys.argv[1]
+    else:
+        delpat = '[ \t\n]+'
+    while 1:
+        if sys.stdin.isatty(): sys.stderr.write('--> ')
+        line = sys.stdin.readline()
+        if not line: break
+        if line[-1] == '\n': line = line[:-1]
+        fields = split(line, delpat)
+        if len(fields) != 3:
+            print 'Sorry, not three fields'
+            print 'split:', repr(fields)
+            continue
+        [pat, repl, str] = split(line, delpat)
+        print 'sub :', repr(sub(pat, repl, str))
+        print 'gsub:', repr(gsub(pat, repl, str))
diff --git a/depot_tools/release/win/python_24/Lib/repr.py b/depot_tools/release/win/python_24/Lib/repr.py
new file mode 100644
index 0000000..53b5207
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/repr.py
@@ -0,0 +1,122 @@
+"""Redo the `...` (representation) but with limits on most sizes."""
+
+__all__ = ["Repr","repr"]
+
+import __builtin__
+from itertools import islice
+
+class Repr:
+
+    def __init__(self):
+        self.maxlevel = 6
+        self.maxtuple = 6
+        self.maxlist = 6
+        self.maxarray = 5
+        self.maxdict = 4
+        self.maxset = 6
+        self.maxfrozenset = 6
+        self.maxdeque = 6
+        self.maxstring = 30
+        self.maxlong = 40
+        self.maxother = 20
+
+    def repr(self, x):
+        return self.repr1(x, self.maxlevel)
+
+    def repr1(self, x, level):
+        typename = type(x).__name__
+        if ' ' in typename:
+            parts = typename.split()
+            typename = '_'.join(parts)
+        if hasattr(self, 'repr_' + typename):
+            return getattr(self, 'repr_' + typename)(x, level)
+        else:
+            s = __builtin__.repr(x)
+            if len(s) > self.maxother:
+                i = max(0, (self.maxother-3)//2)
+                j = max(0, self.maxother-3-i)
+                s = s[:i] + '...' + s[len(s)-j:]
+            return s
+
+    def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
+        n = len(x)
+        if level <= 0 and n:
+            s = '...'
+        else:
+            newlevel = level - 1
+            repr1 = self.repr1
+            pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
+            if n > maxiter:  pieces.append('...')
+            s = ', '.join(pieces)
+            if n == 1 and trail:  right = trail + right
+        return '%s%s%s' % (left, s, right)
+
+    def repr_tuple(self, x, level):
+        return self._repr_iterable(x, level, '(', ')', self.maxlist, ',')
+
+    def repr_list(self, x, level):
+        return self._repr_iterable(x, level, '[', ']', self.maxlist)
+
+    def repr_array(self, x, level):
+        header = "array('%s', [" % x.typecode
+        return self._repr_iterable(x, level, header, '])', self.maxarray)
+
+    def repr_set(self, x, level):
+        x = sorted(x)
+        return self._repr_iterable(x, level, 'set([', '])', self.maxset)
+
+    def repr_frozenset(self, x, level):
+        x = sorted(x)
+        return self._repr_iterable(x, level, 'frozenset([', '])',
+                                   self.maxfrozenset)
+
+    def repr_deque(self, x, level):
+        return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
+
+    def repr_dict(self, x, level):
+        n = len(x)
+        if n == 0: return '{}'
+        if level <= 0: return '{...}'
+        newlevel = level - 1
+        repr1 = self.repr1
+        pieces = []
+        for key in islice(sorted(x), self.maxdict):
+            keyrepr = repr1(key, newlevel)
+            valrepr = repr1(x[key], newlevel)
+            pieces.append('%s: %s' % (keyrepr, valrepr))
+        if n > self.maxdict: pieces.append('...')
+        s = ', '.join(pieces)
+        return '{%s}' % (s,)
+
+    def repr_str(self, x, level):
+        s = __builtin__.repr(x[:self.maxstring])
+        if len(s) > self.maxstring:
+            i = max(0, (self.maxstring-3)//2)
+            j = max(0, self.maxstring-3-i)
+            s = __builtin__.repr(x[:i] + x[len(x)-j:])
+            s = s[:i] + '...' + s[len(s)-j:]
+        return s
+
+    def repr_long(self, x, level):
+        s = __builtin__.repr(x) # XXX Hope this isn't too slow...
+        if len(s) > self.maxlong:
+            i = max(0, (self.maxlong-3)//2)
+            j = max(0, self.maxlong-3-i)
+            s = s[:i] + '...' + s[len(s)-j:]
+        return s
+
+    def repr_instance(self, x, level):
+        try:
+            s = __builtin__.repr(x)
+            # Bugs in x.__repr__() can cause arbitrary
+            # exceptions -- then make up something
+        except:
+            return '<%s instance at %x>' % (x.__class__.__name__, id(x))
+        if len(s) > self.maxstring:
+            i = max(0, (self.maxstring-3)//2)
+            j = max(0, self.maxstring-3-i)
+            s = s[:i] + '...' + s[len(s)-j:]
+        return s
+
+aRepr = Repr()
+repr = aRepr.repr
diff --git a/depot_tools/release/win/python_24/Lib/rexec.py b/depot_tools/release/win/python_24/Lib/rexec.py
new file mode 100644
index 0000000..89ff509
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/rexec.py
@@ -0,0 +1,585 @@
+"""Restricted execution facilities.
+
+The class RExec exports methods r_exec(), r_eval(), r_execfile(), and
+r_import(), which correspond roughly to the built-in operations
+exec, eval(), execfile() and import, but executing the code in an
+environment that only exposes those built-in operations that are
+deemed safe.  To this end, a modest collection of 'fake' modules is
+created which mimics the standard modules by the same names.  It is a
+policy decision which built-in modules and operations are made
+available; this module provides a reasonable default, but derived
+classes can change the policies e.g. by overriding or extending class
+variables like ok_builtin_modules or methods like make_sys().
+
+XXX To do:
+- r_open should allow writing tmp dir
+- r_exec etc. with explicit globals/locals? (Use rexec("exec ... in ...")?)
+
+"""
+
+
+import sys
+import __builtin__
+import os
+import ihooks
+import imp
+
+__all__ = ["RExec"]
+
+class FileBase:
+
+    ok_file_methods = ('fileno', 'flush', 'isatty', 'read', 'readline',
+            'readlines', 'seek', 'tell', 'write', 'writelines', 'xreadlines',
+            '__iter__')
+
+
+class FileWrapper(FileBase):
+
+    # XXX This is just like a Bastion -- should use that!
+
+    def __init__(self, f):
+        for m in self.ok_file_methods:
+            if not hasattr(self, m) and hasattr(f, m):
+                setattr(self, m, getattr(f, m))
+
+    def close(self):
+        self.flush()
+
+
+TEMPLATE = """
+def %s(self, *args):
+        return getattr(self.mod, self.name).%s(*args)
+"""
+
+class FileDelegate(FileBase):
+
+    def __init__(self, mod, name):
+        self.mod = mod
+        self.name = name
+
+    for m in FileBase.ok_file_methods + ('close',):
+        exec TEMPLATE % (m, m)
+
+
+class RHooks(ihooks.Hooks):
+
+    def __init__(self, *args):
+        # Hacks to support both old and new interfaces:
+        # old interface was RHooks(rexec[, verbose])
+        # new interface is RHooks([verbose])
+        verbose = 0
+        rexec = None
+        if args and type(args[-1]) == type(0):
+            verbose = args[-1]
+            args = args[:-1]
+        if args and hasattr(args[0], '__class__'):
+            rexec = args[0]
+            args = args[1:]
+        if args:
+            raise TypeError, "too many arguments"
+        ihooks.Hooks.__init__(self, verbose)
+        self.rexec = rexec
+
+    def set_rexec(self, rexec):
+        # Called by RExec instance to complete initialization
+        self.rexec = rexec
+
+    def get_suffixes(self):
+        return self.rexec.get_suffixes()
+
+    def is_builtin(self, name):
+        return self.rexec.is_builtin(name)
+
+    def init_builtin(self, name):
+        m = __import__(name)
+        return self.rexec.copy_except(m, ())
+
+    def init_frozen(self, name): raise SystemError, "don't use this"
+    def load_source(self, *args): raise SystemError, "don't use this"
+    def load_compiled(self, *args): raise SystemError, "don't use this"
+    def load_package(self, *args): raise SystemError, "don't use this"
+
+    def load_dynamic(self, name, filename, file):
+        return self.rexec.load_dynamic(name, filename, file)
+
+    def add_module(self, name):
+        return self.rexec.add_module(name)
+
+    def modules_dict(self):
+        return self.rexec.modules
+
+    def default_path(self):
+        return self.rexec.modules['sys'].path
+
+
+# XXX Backwards compatibility
+RModuleLoader = ihooks.FancyModuleLoader
+RModuleImporter = ihooks.ModuleImporter
+
+
+class RExec(ihooks._Verbose):
+    """Basic restricted execution framework.
+
+    Code executed in this restricted environment will only have access to
+    modules and functions that are deemed safe; you can subclass RExec to
+    add or remove capabilities as desired.
+
+    The RExec class can prevent code from performing unsafe operations like
+    reading or writing disk files, or using TCP/IP sockets.  However, it does
+    not protect against code using extremely large amounts of memory or
+    processor time.
+
+    """
+
+    ok_path = tuple(sys.path)           # That's a policy decision
+
+    ok_builtin_modules = ('audioop', 'array', 'binascii',
+                          'cmath', 'errno', 'imageop',
+                          'marshal', 'math', 'md5', 'operator',
+                          'parser', 'regex', 'select',
+                          'sha', '_sre', 'strop', 'struct', 'time',
+                          '_weakref')
+
+    ok_posix_names = ('error', 'fstat', 'listdir', 'lstat', 'readlink',
+                      'stat', 'times', 'uname', 'getpid', 'getppid',
+                      'getcwd', 'getuid', 'getgid', 'geteuid', 'getegid')
+
+    ok_sys_names = ('byteorder', 'copyright', 'exit', 'getdefaultencoding',
+                    'getrefcount', 'hexversion', 'maxint', 'maxunicode',
+                    'platform', 'ps1', 'ps2', 'version', 'version_info')
+
+    nok_builtin_names = ('open', 'file', 'reload', '__import__')
+
+    ok_file_types = (imp.C_EXTENSION, imp.PY_SOURCE)
+
+    def __init__(self, hooks = None, verbose = 0):
+        """Returns an instance of the RExec class.
+
+        The hooks parameter is an instance of the RHooks class or a subclass
+        of it.  If it is omitted or None, the default RHooks class is
+        instantiated.
+
+        Whenever the RExec module searches for a module (even a built-in one)
+        or reads a module's code, it doesn't actually go out to the file
+        system itself.  Rather, it calls methods of an RHooks instance that
+        was passed to or created by its constructor.  (Actually, the RExec
+        object doesn't make these calls --- they are made by a module loader
+        object that's part of the RExec object.  This allows another level of
+        flexibility, which can be useful when changing the mechanics of
+        import within the restricted environment.)
+
+        By providing an alternate RHooks object, we can control the file
+        system accesses made to import a module, without changing the
+        actual algorithm that controls the order in which those accesses are
+        made.  For instance, we could substitute an RHooks object that
+        passes all filesystem requests to a file server elsewhere, via some
+        RPC mechanism such as ILU.  Grail's applet loader uses this to support
+        importing applets from a URL for a directory.
+
+        If the verbose parameter is true, additional debugging output may be
+        sent to standard output.
+
+        """
+
+        raise RuntimeError, "This code is not secure in Python 2.2 and 2.3"
+
+        ihooks._Verbose.__init__(self, verbose)
+        # XXX There's a circular reference here:
+        self.hooks = hooks or RHooks(verbose)
+        self.hooks.set_rexec(self)
+        self.modules = {}
+        self.ok_dynamic_modules = self.ok_builtin_modules
+        list = []
+        for mname in self.ok_builtin_modules:
+            if mname in sys.builtin_module_names:
+                list.append(mname)
+        self.ok_builtin_modules = tuple(list)
+        self.set_trusted_path()
+        self.make_builtin()
+        self.make_initial_modules()
+        # make_sys must be last because it adds the already created
+        # modules to its builtin_module_names
+        self.make_sys()
+        self.loader = RModuleLoader(self.hooks, verbose)
+        self.importer = RModuleImporter(self.loader, verbose)
+
+    def set_trusted_path(self):
+        # Set the path from which dynamic modules may be loaded.
+        # Those dynamic modules must also occur in ok_builtin_modules
+        self.trusted_path = filter(os.path.isabs, sys.path)
+
+    def load_dynamic(self, name, filename, file):
+        if name not in self.ok_dynamic_modules:
+            raise ImportError, "untrusted dynamic module: %s" % name
+        if name in sys.modules:
+            src = sys.modules[name]
+        else:
+            src = imp.load_dynamic(name, filename, file)
+        dst = self.copy_except(src, [])
+        return dst
+
+    def make_initial_modules(self):
+        self.make_main()
+        self.make_osname()
+
+    # Helpers for RHooks
+
+    def get_suffixes(self):
+        return [item   # (suff, mode, type)
+                for item in imp.get_suffixes()
+                if item[2] in self.ok_file_types]
+
+    def is_builtin(self, mname):
+        return mname in self.ok_builtin_modules
+
+    # The make_* methods create specific built-in modules
+
+    def make_builtin(self):
+        m = self.copy_except(__builtin__, self.nok_builtin_names)
+        m.__import__ = self.r_import
+        m.reload = self.r_reload
+        m.open = m.file = self.r_open
+
+    def make_main(self):
+        m = self.add_module('__main__')
+
+    def make_osname(self):
+        osname = os.name
+        src = __import__(osname)
+        dst = self.copy_only(src, self.ok_posix_names)
+        dst.environ = e = {}
+        for key, value in os.environ.items():
+            e[key] = value
+
+    def make_sys(self):
+        m = self.copy_only(sys, self.ok_sys_names)
+        m.modules = self.modules
+        m.argv = ['RESTRICTED']
+        m.path = map(None, self.ok_path)
+        m.exc_info = self.r_exc_info
+        m = self.modules['sys']
+        l = self.modules.keys() + list(self.ok_builtin_modules)
+        l.sort()
+        m.builtin_module_names = tuple(l)
+
+    # The copy_* methods copy existing modules with some changes
+
+    def copy_except(self, src, exceptions):
+        dst = self.copy_none(src)
+        for name in dir(src):
+            setattr(dst, name, getattr(src, name))
+        for name in exceptions:
+            try:
+                delattr(dst, name)
+            except AttributeError:
+                pass
+        return dst
+
+    def copy_only(self, src, names):
+        dst = self.copy_none(src)
+        for name in names:
+            try:
+                value = getattr(src, name)
+            except AttributeError:
+                continue
+            setattr(dst, name, value)
+        return dst
+
+    def copy_none(self, src):
+        m = self.add_module(src.__name__)
+        m.__doc__ = src.__doc__
+        return m
+
+    # Add a module -- return an existing module or create one
+
+    def add_module(self, mname):
+        m = self.modules.get(mname)
+        if m is None:
+            self.modules[mname] = m = self.hooks.new_module(mname)
+        m.__builtins__ = self.modules['__builtin__']
+        return m
+
+    # The r* methods are public interfaces
+
+    def r_exec(self, code):
+        """Execute code within a restricted environment.
+
+        The code parameter must either be a string containing one or more
+        lines of Python code, or a compiled code object, which will be
+        executed in the restricted environment's __main__ module.
+
+        """
+        m = self.add_module('__main__')
+        exec code in m.__dict__
+
+    def r_eval(self, code):
+        """Evaluate code within a restricted environment.
+
+        The code parameter must either be a string containing a Python
+        expression, or a compiled code object, which will be evaluated in
+        the restricted environment's __main__ module.  The value of the
+        expression or code object will be returned.
+
+        """
+        m = self.add_module('__main__')
+        return eval(code, m.__dict__)
+
+    def r_execfile(self, file):
+        """Execute the Python code in the file in the restricted
+        environment's __main__ module.
+
+        """
+        m = self.add_module('__main__')
+        execfile(file, m.__dict__)
+
+    def r_import(self, mname, globals={}, locals={}, fromlist=[]):
+        """Import a module, raising an ImportError exception if the module
+        is considered unsafe.
+
+        This method is implicitly called by code executing in the
+        restricted environment.  Overriding this method in a subclass is
+        used to change the policies enforced by a restricted environment.
+
+        """
+        return self.importer.import_module(mname, globals, locals, fromlist)
+
+    def r_reload(self, m):
+        """Reload the module object, re-parsing and re-initializing it.
+
+        This method is implicitly called by code executing in the
+        restricted environment.  Overriding this method in a subclass is
+        used to change the policies enforced by a restricted environment.
+
+        """
+        return self.importer.reload(m)
+
+    def r_unload(self, m):
+        """Unload the module.
+
+        Removes it from the restricted environment's sys.modules dictionary.
+
+        This method is implicitly called by code executing in the
+        restricted environment.  Overriding this method in a subclass is
+        used to change the policies enforced by a restricted environment.
+
+        """
+        return self.importer.unload(m)
+
+    # The s_* methods are similar but also swap std{in,out,err}
+
+    def make_delegate_files(self):
+        s = self.modules['sys']
+        self.delegate_stdin = FileDelegate(s, 'stdin')
+        self.delegate_stdout = FileDelegate(s, 'stdout')
+        self.delegate_stderr = FileDelegate(s, 'stderr')
+        self.restricted_stdin = FileWrapper(sys.stdin)
+        self.restricted_stdout = FileWrapper(sys.stdout)
+        self.restricted_stderr = FileWrapper(sys.stderr)
+
+    def set_files(self):
+        if not hasattr(self, 'save_stdin'):
+            self.save_files()
+        if not hasattr(self, 'delegate_stdin'):
+            self.make_delegate_files()
+        s = self.modules['sys']
+        s.stdin = self.restricted_stdin
+        s.stdout = self.restricted_stdout
+        s.stderr = self.restricted_stderr
+        sys.stdin = self.delegate_stdin
+        sys.stdout = self.delegate_stdout
+        sys.stderr = self.delegate_stderr
+
+    def reset_files(self):
+        self.restore_files()
+        s = self.modules['sys']
+        self.restricted_stdin = s.stdin
+        self.restricted_stdout = s.stdout
+        self.restricted_stderr = s.stderr
+
+
+    def save_files(self):
+        self.save_stdin = sys.stdin
+        self.save_stdout = sys.stdout
+        self.save_stderr = sys.stderr
+
+    def restore_files(self):
+        sys.stdin = self.save_stdin
+        sys.stdout = self.save_stdout
+        sys.stderr = self.save_stderr
+
+    def s_apply(self, func, args=(), kw={}):
+        self.save_files()
+        try:
+            self.set_files()
+            r = func(*args, **kw)
+        finally:
+            self.restore_files()
+        return r
+
+    def s_exec(self, *args):
+        """Execute code within a restricted environment.
+
+        Similar to the r_exec() method, but the code will be granted access
+        to restricted versions of the standard I/O streams sys.stdin,
+        sys.stderr, and sys.stdout.
+
+        The code parameter must either be a string containing one or more
+        lines of Python code, or a compiled code object, which will be
+        executed in the restricted environment's __main__ module.
+
+        """
+        return self.s_apply(self.r_exec, args)
+
+    def s_eval(self, *args):
+        """Evaluate code within a restricted environment.
+
+        Similar to the r_eval() method, but the code will be granted access
+        to restricted versions of the standard I/O streams sys.stdin,
+        sys.stderr, and sys.stdout.
+
+        The code parameter must either be a string containing a Python
+        expression, or a compiled code object, which will be evaluated in
+        the restricted environment's __main__ module.  The value of the
+        expression or code object will be returned.
+
+        """
+        return self.s_apply(self.r_eval, args)
+
+    def s_execfile(self, *args):
+        """Execute the Python code in the file in the restricted
+        environment's __main__ module.
+
+        Similar to the r_execfile() method, but the code will be granted
+        access to restricted versions of the standard I/O streams sys.stdin,
+        sys.stderr, and sys.stdout.
+
+        """
+        return self.s_apply(self.r_execfile, args)
+
+    def s_import(self, *args):
+        """Import a module, raising an ImportError exception if the module
+        is considered unsafe.
+
+        This method is implicitly called by code executing in the
+        restricted environment.  Overriding this method in a subclass is
+        used to change the policies enforced by a restricted environment.
+
+        Similar to the r_import() method, but has access to restricted
+        versions of the standard I/O streams sys.stdin, sys.stderr, and
+        sys.stdout.
+
+        """
+        return self.s_apply(self.r_import, args)
+
+    def s_reload(self, *args):
+        """Reload the module object, re-parsing and re-initializing it.
+
+        This method is implicitly called by code executing in the
+        restricted environment.  Overriding this method in a subclass is
+        used to change the policies enforced by a restricted environment.
+
+        Similar to the r_reload() method, but has access to restricted
+        versions of the standard I/O streams sys.stdin, sys.stderr, and
+        sys.stdout.
+
+        """
+        return self.s_apply(self.r_reload, args)
+
+    def s_unload(self, *args):
+        """Unload the module.
+
+        Removes it from the restricted environment's sys.modules dictionary.
+
+        This method is implicitly called by code executing in the
+        restricted environment.  Overriding this method in a subclass is
+        used to change the policies enforced by a restricted environment.
+
+        Similar to the r_unload() method, but has access to restricted
+        versions of the standard I/O streams sys.stdin, sys.stderr, and
+        sys.stdout.
+
+        """
+        return self.s_apply(self.r_unload, args)
+
+    # Restricted open(...)
+
+    def r_open(self, file, mode='r', buf=-1):
+        """Method called when open() is called in the restricted environment.
+
+        The arguments are identical to those of the open() function, and a
+        file object (or a class instance compatible with file objects)
+        should be returned.  RExec's default behaviour is allow opening
+        any file for reading, but forbidding any attempt to write a file.
+
+        This method is implicitly called by code executing in the
+        restricted environment.  Overriding this method in a subclass is
+        used to change the policies enforced by a restricted environment.
+
+        """
+        mode = str(mode)
+        if mode not in ('r', 'rb'):
+            raise IOError, "can't open files for writing in restricted mode"
+        return open(file, mode, buf)
+
+    # Restricted version of sys.exc_info()
+
+    def r_exc_info(self):
+        ty, va, tr = sys.exc_info()
+        tr = None
+        return ty, va, tr
+
+
+def test():
+    import getopt, traceback
+    opts, args = getopt.getopt(sys.argv[1:], 'vt:')
+    verbose = 0
+    trusted = []
+    for o, a in opts:
+        if o == '-v':
+            verbose = verbose+1
+        if o == '-t':
+            trusted.append(a)
+    r = RExec(verbose=verbose)
+    if trusted:
+        r.ok_builtin_modules = r.ok_builtin_modules + tuple(trusted)
+    if args:
+        r.modules['sys'].argv = args
+        r.modules['sys'].path.insert(0, os.path.dirname(args[0]))
+    else:
+        r.modules['sys'].path.insert(0, "")
+    fp = sys.stdin
+    if args and args[0] != '-':
+        try:
+            fp = open(args[0])
+        except IOError, msg:
+            print "%s: can't open file %r" % (sys.argv[0], args[0])
+            return 1
+    if fp.isatty():
+        try:
+            import readline
+        except ImportError:
+            pass
+        import code
+        class RestrictedConsole(code.InteractiveConsole):
+            def runcode(self, co):
+                self.locals['__builtins__'] = r.modules['__builtin__']
+                r.s_apply(code.InteractiveConsole.runcode, (self, co))
+        try:
+            RestrictedConsole(r.modules['__main__'].__dict__).interact()
+        except SystemExit, n:
+            return n
+    else:
+        text = fp.read()
+        fp.close()
+        c = compile(text, fp.name, 'exec')
+        try:
+            r.s_exec(c)
+        except SystemExit, n:
+            return n
+        except:
+            traceback.print_exc()
+            return 1
+
+
+if __name__ == '__main__':
+    sys.exit(test())
diff --git a/depot_tools/release/win/python_24/Lib/rfc822.py b/depot_tools/release/win/python_24/Lib/rfc822.py
new file mode 100644
index 0000000..18277d6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/rfc822.py
@@ -0,0 +1,1013 @@
+"""RFC 2822 message manipulation.
+
+Note: This is only a very rough sketch of a full RFC-822 parser; in particular
+the tokenizing of addresses does not adhere to all the quoting rules.
+
+Note: RFC 2822 is a long awaited update to RFC 822.  This module should
+conform to RFC 2822, and is thus mis-named (it's not worth renaming it).  Some
+effort at RFC 2822 updates have been made, but a thorough audit has not been
+performed.  Consider any RFC 2822 non-conformance to be a bug.
+
+    RFC 2822: http://www.faqs.org/rfcs/rfc2822.html
+    RFC 822 : http://www.faqs.org/rfcs/rfc822.html (obsolete)
+
+Directions for use:
+
+To create a Message object: first open a file, e.g.:
+
+  fp = open(file, 'r')
+
+You can use any other legal way of getting an open file object, e.g. use
+sys.stdin or call os.popen().  Then pass the open file object to the Message()
+constructor:
+
+  m = Message(fp)
+
+This class can work with any input object that supports a readline method.  If
+the input object has seek and tell capability, the rewindbody method will
+work; also illegal lines will be pushed back onto the input stream.  If the
+input object lacks seek but has an `unread' method that can push back a line
+of input, Message will use that to push back illegal lines.  Thus this class
+can be used to parse messages coming from a buffered stream.
+
+The optional `seekable' argument is provided as a workaround for certain stdio
+libraries in which tell() discards buffered data before discovering that the
+lseek() system call doesn't work.  For maximum portability, you should set the
+seekable argument to zero to prevent that initial \code{tell} when passing in
+an unseekable object such as a a file object created from a socket object.  If
+it is 1 on entry -- which it is by default -- the tell() method of the open
+file object is called once; if this raises an exception, seekable is reset to
+0.  For other nonzero values of seekable, this test is not made.
+
+To get the text of a particular header there are several methods:
+
+  str = m.getheader(name)
+  str = m.getrawheader(name)
+
+where name is the name of the header, e.g. 'Subject'.  The difference is that
+getheader() strips the leading and trailing whitespace, while getrawheader()
+doesn't.  Both functions retain embedded whitespace (including newlines)
+exactly as they are specified in the header, and leave the case of the text
+unchanged.
+
+For addresses and address lists there are functions
+
+  realname, mailaddress = m.getaddr(name)
+  list = m.getaddrlist(name)
+
+where the latter returns a list of (realname, mailaddr) tuples.
+
+There is also a method
+
+  time = m.getdate(name)
+
+which parses a Date-like field and returns a time-compatible tuple,
+i.e. a tuple such as returned by time.localtime() or accepted by
+time.mktime().
+
+See the class definition for lower level access methods.
+
+There are also some utility functions here.
+"""
+# Cleanup and extensions by Eric S. Raymond <esr@thyrsus.com>
+
+import time
+
+__all__ = ["Message","AddressList","parsedate","parsedate_tz","mktime_tz"]
+
+_blanklines = ('\r\n', '\n')            # Optimization for islast()
+
+
+class Message:
+    """Represents a single RFC 2822-compliant message."""
+
+    def __init__(self, fp, seekable = 1):
+        """Initialize the class instance and read the headers."""
+        if seekable == 1:
+            # Exercise tell() to make sure it works
+            # (and then assume seek() works, too)
+            try:
+                fp.tell()
+            except (AttributeError, IOError):
+                seekable = 0
+            else:
+                seekable = 1
+        self.fp = fp
+        self.seekable = seekable
+        self.startofheaders = None
+        self.startofbody = None
+        #
+        if self.seekable:
+            try:
+                self.startofheaders = self.fp.tell()
+            except IOError:
+                self.seekable = 0
+        #
+        self.readheaders()
+        #
+        if self.seekable:
+            try:
+                self.startofbody = self.fp.tell()
+            except IOError:
+                self.seekable = 0
+
+    def rewindbody(self):
+        """Rewind the file to the start of the body (if seekable)."""
+        if not self.seekable:
+            raise IOError, "unseekable file"
+        self.fp.seek(self.startofbody)
+
+    def readheaders(self):
+        """Read header lines.
+
+        Read header lines up to the entirely blank line that terminates them.
+        The (normally blank) line that ends the headers is skipped, but not
+        included in the returned list.  If a non-header line ends the headers,
+        (which is an error), an attempt is made to backspace over it; it is
+        never included in the returned list.
+
+        The variable self.status is set to the empty string if all went well,
+        otherwise it is an error message.  The variable self.headers is a
+        completely uninterpreted list of lines contained in the header (so
+        printing them will reproduce the header exactly as it appears in the
+        file).
+        """
+        self.dict = {}
+        self.unixfrom = ''
+        self.headers = list = []
+        self.status = ''
+        headerseen = ""
+        firstline = 1
+        startofline = unread = tell = None
+        if hasattr(self.fp, 'unread'):
+            unread = self.fp.unread
+        elif self.seekable:
+            tell = self.fp.tell
+        while 1:
+            if tell:
+                try:
+                    startofline = tell()
+                except IOError:
+                    startofline = tell = None
+                    self.seekable = 0
+            line = self.fp.readline()
+            if not line:
+                self.status = 'EOF in headers'
+                break
+            # Skip unix From name time lines
+            if firstline and line.startswith('From '):
+                self.unixfrom = self.unixfrom + line
+                continue
+            firstline = 0
+            if headerseen and line[0] in ' \t':
+                # It's a continuation line.
+                list.append(line)
+                x = (self.dict[headerseen] + "\n " + line.strip())
+                self.dict[headerseen] = x.strip()
+                continue
+            elif self.iscomment(line):
+                # It's a comment.  Ignore it.
+                continue
+            elif self.islast(line):
+                # Note! No pushback here!  The delimiter line gets eaten.
+                break
+            headerseen = self.isheader(line)
+            if headerseen:
+                # It's a legal header line, save it.
+                list.append(line)
+                self.dict[headerseen] = line[len(headerseen)+1:].strip()
+                continue
+            else:
+                # It's not a header line; throw it back and stop here.
+                if not self.dict:
+                    self.status = 'No headers'
+                else:
+                    self.status = 'Non-header line where header expected'
+                # Try to undo the read.
+                if unread:
+                    unread(line)
+                elif tell:
+                    self.fp.seek(startofline)
+                else:
+                    self.status = self.status + '; bad seek'
+                break
+
+    def isheader(self, line):
+        """Determine whether a given line is a legal header.
+
+        This method should return the header name, suitably canonicalized.
+        You may override this method in order to use Message parsing on tagged
+        data in RFC 2822-like formats with special header formats.
+        """
+        i = line.find(':')
+        if i > 0:
+            return line[:i].lower()
+        else:
+            return None
+
+    def islast(self, line):
+        """Determine whether a line is a legal end of RFC 2822 headers.
+
+        You may override this method if your application wants to bend the
+        rules, e.g. to strip trailing whitespace, or to recognize MH template
+        separators ('--------').  For convenience (e.g. for code reading from
+        sockets) a line consisting of \r\n also matches.
+        """
+        return line in _blanklines
+
+    def iscomment(self, line):
+        """Determine whether a line should be skipped entirely.
+
+        You may override this method in order to use Message parsing on tagged
+        data in RFC 2822-like formats that support embedded comments or
+        free-text data.
+        """
+        return False
+
+    def getallmatchingheaders(self, name):
+        """Find all header lines matching a given header name.
+
+        Look through the list of headers and find all lines matching a given
+        header name (and their continuation lines).  A list of the lines is
+        returned, without interpretation.  If the header does not occur, an
+        empty list is returned.  If the header occurs multiple times, all
+        occurrences are returned.  Case is not important in the header name.
+        """
+        name = name.lower() + ':'
+        n = len(name)
+        list = []
+        hit = 0
+        for line in self.headers:
+            if line[:n].lower() == name:
+                hit = 1
+            elif not line[:1].isspace():
+                hit = 0
+            if hit:
+                list.append(line)
+        return list
+
+    def getfirstmatchingheader(self, name):
+        """Get the first header line matching name.
+
+        This is similar to getallmatchingheaders, but it returns only the
+        first matching header (and its continuation lines).
+        """
+        name = name.lower() + ':'
+        n = len(name)
+        list = []
+        hit = 0
+        for line in self.headers:
+            if hit:
+                if not line[:1].isspace():
+                    break
+            elif line[:n].lower() == name:
+                hit = 1
+            if hit:
+                list.append(line)
+        return list
+
+    def getrawheader(self, name):
+        """A higher-level interface to getfirstmatchingheader().
+
+        Return a string containing the literal text of the header but with the
+        keyword stripped.  All leading, trailing and embedded whitespace is
+        kept in the string, however.  Return None if the header does not
+        occur.
+        """
+
+        list = self.getfirstmatchingheader(name)
+        if not list:
+            return None
+        list[0] = list[0][len(name) + 1:]
+        return ''.join(list)
+
+    def getheader(self, name, default=None):
+        """Get the header value for a name.
+
+        This is the normal interface: it returns a stripped version of the
+        header value for a given header name, or None if it doesn't exist.
+        This uses the dictionary version which finds the *last* such header.
+        """
+        try:
+            return self.dict[name.lower()]
+        except KeyError:
+            return default
+    get = getheader
+
+    def getheaders(self, name):
+        """Get all values for a header.
+
+        This returns a list of values for headers given more than once; each
+        value in the result list is stripped in the same way as the result of
+        getheader().  If the header is not given, return an empty list.
+        """
+        result = []
+        current = ''
+        have_header = 0
+        for s in self.getallmatchingheaders(name):
+            if s[0].isspace():
+                if current:
+                    current = "%s\n %s" % (current, s.strip())
+                else:
+                    current = s.strip()
+            else:
+                if have_header:
+                    result.append(current)
+                current = s[s.find(":") + 1:].strip()
+                have_header = 1
+        if have_header:
+            result.append(current)
+        return result
+
+    def getaddr(self, name):
+        """Get a single address from a header, as a tuple.
+
+        An example return value:
+        ('Guido van Rossum', 'guido@cwi.nl')
+        """
+        # New, by Ben Escoto
+        alist = self.getaddrlist(name)
+        if alist:
+            return alist[0]
+        else:
+            return (None, None)
+
+    def getaddrlist(self, name):
+        """Get a list of addresses from a header.
+
+        Retrieves a list of addresses from a header, where each address is a
+        tuple as returned by getaddr().  Scans all named headers, so it works
+        properly with multiple To: or Cc: headers for example.
+        """
+        raw = []
+        for h in self.getallmatchingheaders(name):
+            if h[0] in ' \t':
+                raw.append(h)
+            else:
+                if raw:
+                    raw.append(', ')
+                i = h.find(':')
+                if i > 0:
+                    addr = h[i+1:]
+                raw.append(addr)
+        alladdrs = ''.join(raw)
+        a = AddressList(alladdrs)
+        return a.addresslist
+
+    def getdate(self, name):
+        """Retrieve a date field from a header.
+
+        Retrieves a date field from the named header, returning a tuple
+        compatible with time.mktime().
+        """
+        try:
+            data = self[name]
+        except KeyError:
+            return None
+        return parsedate(data)
+
+    def getdate_tz(self, name):
+        """Retrieve a date field from a header as a 10-tuple.
+
+        The first 9 elements make up a tuple compatible with time.mktime(),
+        and the 10th is the offset of the poster's time zone from GMT/UTC.
+        """
+        try:
+            data = self[name]
+        except KeyError:
+            return None
+        return parsedate_tz(data)
+
+
+    # Access as a dictionary (only finds *last* header of each type):
+
+    def __len__(self):
+        """Get the number of headers in a message."""
+        return len(self.dict)
+
+    def __getitem__(self, name):
+        """Get a specific header, as from a dictionary."""
+        return self.dict[name.lower()]
+
+    def __setitem__(self, name, value):
+        """Set the value of a header.
+
+        Note: This is not a perfect inversion of __getitem__, because any
+        changed headers get stuck at the end of the raw-headers list rather
+        than where the altered header was.
+        """
+        del self[name] # Won't fail if it doesn't exist
+        self.dict[name.lower()] = value
+        text = name + ": " + value
+        lines = text.split("\n")
+        for line in lines:
+            self.headers.append(line + "\n")
+
+    def __delitem__(self, name):
+        """Delete all occurrences of a specific header, if it is present."""
+        name = name.lower()
+        if not name in self.dict:
+            return
+        del self.dict[name]
+        name = name + ':'
+        n = len(name)
+        list = []
+        hit = 0
+        for i in range(len(self.headers)):
+            line = self.headers[i]
+            if line[:n].lower() == name:
+                hit = 1
+            elif not line[:1].isspace():
+                hit = 0
+            if hit:
+                list.append(i)
+        for i in reversed(list):
+            del self.headers[i]
+
+    def setdefault(self, name, default=""):
+        lowername = name.lower()
+        if lowername in self.dict:
+            return self.dict[lowername]
+        else:
+            text = name + ": " + default
+            lines = text.split("\n")
+            for line in lines:
+                self.headers.append(line + "\n")
+            self.dict[lowername] = default
+            return default
+
+    def has_key(self, name):
+        """Determine whether a message contains the named header."""
+        return name.lower() in self.dict
+
+    def __contains__(self, name):
+        """Determine whether a message contains the named header."""
+        return name.lower() in self.dict
+
+    def __iter__(self):
+        return iter(self.dict)
+
+    def keys(self):
+        """Get all of a message's header field names."""
+        return self.dict.keys()
+
+    def values(self):
+        """Get all of a message's header field values."""
+        return self.dict.values()
+
+    def items(self):
+        """Get all of a message's headers.
+
+        Returns a list of name, value tuples.
+        """
+        return self.dict.items()
+
+    def __str__(self):
+        return ''.join(self.headers)
+
+
+# Utility functions
+# -----------------
+
+# XXX Should fix unquote() and quote() to be really conformant.
+# XXX The inverses of the parse functions may also be useful.
+
+
+def unquote(str):
+    """Remove quotes from a string."""
+    if len(str) > 1:
+        if str.startswith('"') and str.endswith('"'):
+            return str[1:-1].replace('\\\\', '\\').replace('\\"', '"')
+        if str.startswith('<') and str.endswith('>'):
+            return str[1:-1]
+    return str
+
+
+def quote(str):
+    """Add quotes around a string."""
+    return str.replace('\\', '\\\\').replace('"', '\\"')
+
+
+def parseaddr(address):
+    """Parse an address into a (realname, mailaddr) tuple."""
+    a = AddressList(address)
+    list = a.addresslist
+    if not list:
+        return (None, None)
+    else:
+        return list[0]
+
+
+class AddrlistClass:
+    """Address parser class by Ben Escoto.
+
+    To understand what this class does, it helps to have a copy of
+    RFC 2822 in front of you.
+
+    http://www.faqs.org/rfcs/rfc2822.html
+
+    Note: this class interface is deprecated and may be removed in the future.
+    Use rfc822.AddressList instead.
+    """
+
+    def __init__(self, field):
+        """Initialize a new instance.
+
+        `field' is an unparsed address header field, containing one or more
+        addresses.
+        """
+        self.specials = '()<>@,:;.\"[]'
+        self.pos = 0
+        self.LWS = ' \t'
+        self.CR = '\r\n'
+        self.atomends = self.specials + self.LWS + self.CR
+        # Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it
+        # is obsolete syntax.  RFC 2822 requires that we recognize obsolete
+        # syntax, so allow dots in phrases.
+        self.phraseends = self.atomends.replace('.', '')
+        self.field = field
+        self.commentlist = []
+
+    def gotonext(self):
+        """Parse up to the start of the next address."""
+        while self.pos < len(self.field):
+            if self.field[self.pos] in self.LWS + '\n\r':
+                self.pos = self.pos + 1
+            elif self.field[self.pos] == '(':
+                self.commentlist.append(self.getcomment())
+            else: break
+
+    def getaddrlist(self):
+        """Parse all addresses.
+
+        Returns a list containing all of the addresses.
+        """
+        result = []
+        while 1:
+            ad = self.getaddress()
+            if ad:
+                result += ad
+            else:
+                break
+        return result
+
+    def getaddress(self):
+        """Parse the next address."""
+        self.commentlist = []
+        self.gotonext()
+
+        oldpos = self.pos
+        oldcl = self.commentlist
+        plist = self.getphraselist()
+
+        self.gotonext()
+        returnlist = []
+
+        if self.pos >= len(self.field):
+            # Bad email address technically, no domain.
+            if plist:
+                returnlist = [(' '.join(self.commentlist), plist[0])]
+
+        elif self.field[self.pos] in '.@':
+            # email address is just an addrspec
+            # this isn't very efficient since we start over
+            self.pos = oldpos
+            self.commentlist = oldcl
+            addrspec = self.getaddrspec()
+            returnlist = [(' '.join(self.commentlist), addrspec)]
+
+        elif self.field[self.pos] == ':':
+            # address is a group
+            returnlist = []
+
+            fieldlen = len(self.field)
+            self.pos = self.pos + 1
+            while self.pos < len(self.field):
+                self.gotonext()
+                if self.pos < fieldlen and self.field[self.pos] == ';':
+                    self.pos = self.pos + 1
+                    break
+                returnlist = returnlist + self.getaddress()
+
+        elif self.field[self.pos] == '<':
+            # Address is a phrase then a route addr
+            routeaddr = self.getrouteaddr()
+
+            if self.commentlist:
+                returnlist = [(' '.join(plist) + ' (' + \
+                         ' '.join(self.commentlist) + ')', routeaddr)]
+            else: returnlist = [(' '.join(plist), routeaddr)]
+
+        else:
+            if plist:
+                returnlist = [(' '.join(self.commentlist), plist[0])]
+            elif self.field[self.pos] in self.specials:
+                self.pos = self.pos + 1
+
+        self.gotonext()
+        if self.pos < len(self.field) and self.field[self.pos] == ',':
+            self.pos = self.pos + 1
+        return returnlist
+
+    def getrouteaddr(self):
+        """Parse a route address (Return-path value).
+
+        This method just skips all the route stuff and returns the addrspec.
+        """
+        if self.field[self.pos] != '<':
+            return
+
+        expectroute = 0
+        self.pos = self.pos + 1
+        self.gotonext()
+        adlist = ""
+        while self.pos < len(self.field):
+            if expectroute:
+                self.getdomain()
+                expectroute = 0
+            elif self.field[self.pos] == '>':
+                self.pos = self.pos + 1
+                break
+            elif self.field[self.pos] == '@':
+                self.pos = self.pos + 1
+                expectroute = 1
+            elif self.field[self.pos] == ':':
+                self.pos = self.pos + 1
+            else:
+                adlist = self.getaddrspec()
+                self.pos = self.pos + 1
+                break
+            self.gotonext()
+
+        return adlist
+
+    def getaddrspec(self):
+        """Parse an RFC 2822 addr-spec."""
+        aslist = []
+
+        self.gotonext()
+        while self.pos < len(self.field):
+            if self.field[self.pos] == '.':
+                aslist.append('.')
+                self.pos = self.pos + 1
+            elif self.field[self.pos] == '"':
+                aslist.append('"%s"' % self.getquote())
+            elif self.field[self.pos] in self.atomends:
+                break
+            else: aslist.append(self.getatom())
+            self.gotonext()
+
+        if self.pos >= len(self.field) or self.field[self.pos] != '@':
+            return ''.join(aslist)
+
+        aslist.append('@')
+        self.pos = self.pos + 1
+        self.gotonext()
+        return ''.join(aslist) + self.getdomain()
+
+    def getdomain(self):
+        """Get the complete domain name from an address."""
+        sdlist = []
+        while self.pos < len(self.field):
+            if self.field[self.pos] in self.LWS:
+                self.pos = self.pos + 1
+            elif self.field[self.pos] == '(':
+                self.commentlist.append(self.getcomment())
+            elif self.field[self.pos] == '[':
+                sdlist.append(self.getdomainliteral())
+            elif self.field[self.pos] == '.':
+                self.pos = self.pos + 1
+                sdlist.append('.')
+            elif self.field[self.pos] in self.atomends:
+                break
+            else: sdlist.append(self.getatom())
+        return ''.join(sdlist)
+
+    def getdelimited(self, beginchar, endchars, allowcomments = 1):
+        """Parse a header fragment delimited by special characters.
+
+        `beginchar' is the start character for the fragment.  If self is not
+        looking at an instance of `beginchar' then getdelimited returns the
+        empty string.
+
+        `endchars' is a sequence of allowable end-delimiting characters.
+        Parsing stops when one of these is encountered.
+
+        If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed
+        within the parsed fragment.
+        """
+        if self.field[self.pos] != beginchar:
+            return ''
+
+        slist = ['']
+        quote = 0
+        self.pos = self.pos + 1
+        while self.pos < len(self.field):
+            if quote == 1:
+                slist.append(self.field[self.pos])
+                quote = 0
+            elif self.field[self.pos] in endchars:
+                self.pos = self.pos + 1
+                break
+            elif allowcomments and self.field[self.pos] == '(':
+                slist.append(self.getcomment())
+            elif self.field[self.pos] == '\\':
+                quote = 1
+            else:
+                slist.append(self.field[self.pos])
+            self.pos = self.pos + 1
+
+        return ''.join(slist)
+
+    def getquote(self):
+        """Get a quote-delimited fragment from self's field."""
+        return self.getdelimited('"', '"\r', 0)
+
+    def getcomment(self):
+        """Get a parenthesis-delimited fragment from self's field."""
+        return self.getdelimited('(', ')\r', 1)
+
+    def getdomainliteral(self):
+        """Parse an RFC 2822 domain-literal."""
+        return '[%s]' % self.getdelimited('[', ']\r', 0)
+
+    def getatom(self, atomends=None):
+        """Parse an RFC 2822 atom.
+
+        Optional atomends specifies a different set of end token delimiters
+        (the default is to use self.atomends).  This is used e.g. in
+        getphraselist() since phrase endings must not include the `.' (which
+        is legal in phrases)."""
+        atomlist = ['']
+        if atomends is None:
+            atomends = self.atomends
+
+        while self.pos < len(self.field):
+            if self.field[self.pos] in atomends:
+                break
+            else: atomlist.append(self.field[self.pos])
+            self.pos = self.pos + 1
+
+        return ''.join(atomlist)
+
+    def getphraselist(self):
+        """Parse a sequence of RFC 2822 phrases.
+
+        A phrase is a sequence of words, which are in turn either RFC 2822
+        atoms or quoted-strings.  Phrases are canonicalized by squeezing all
+        runs of continuous whitespace into one space.
+        """
+        plist = []
+
+        while self.pos < len(self.field):
+            if self.field[self.pos] in self.LWS:
+                self.pos = self.pos + 1
+            elif self.field[self.pos] == '"':
+                plist.append(self.getquote())
+            elif self.field[self.pos] == '(':
+                self.commentlist.append(self.getcomment())
+            elif self.field[self.pos] in self.phraseends:
+                break
+            else:
+                plist.append(self.getatom(self.phraseends))
+
+        return plist
+
+class AddressList(AddrlistClass):
+    """An AddressList encapsulates a list of parsed RFC 2822 addresses."""
+    def __init__(self, field):
+        AddrlistClass.__init__(self, field)
+        if field:
+            self.addresslist = self.getaddrlist()
+        else:
+            self.addresslist = []
+
+    def __len__(self):
+        return len(self.addresslist)
+
+    def __str__(self):
+        return ", ".join(map(dump_address_pair, self.addresslist))
+
+    def __add__(self, other):
+        # Set union
+        newaddr = AddressList(None)
+        newaddr.addresslist = self.addresslist[:]
+        for x in other.addresslist:
+            if not x in self.addresslist:
+                newaddr.addresslist.append(x)
+        return newaddr
+
+    def __iadd__(self, other):
+        # Set union, in-place
+        for x in other.addresslist:
+            if not x in self.addresslist:
+                self.addresslist.append(x)
+        return self
+
+    def __sub__(self, other):
+        # Set difference
+        newaddr = AddressList(None)
+        for x in self.addresslist:
+            if not x in other.addresslist:
+                newaddr.addresslist.append(x)
+        return newaddr
+
+    def __isub__(self, other):
+        # Set difference, in-place
+        for x in other.addresslist:
+            if x in self.addresslist:
+                self.addresslist.remove(x)
+        return self
+
+    def __getitem__(self, index):
+        # Make indexing, slices, and 'in' work
+        return self.addresslist[index]
+
+def dump_address_pair(pair):
+    """Dump a (name, address) pair in a canonicalized form."""
+    if pair[0]:
+        return '"' + pair[0] + '" <' + pair[1] + '>'
+    else:
+        return pair[1]
+
+# Parse a date field
+
+_monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
+               'aug', 'sep', 'oct', 'nov', 'dec',
+               'january', 'february', 'march', 'april', 'may', 'june', 'july',
+               'august', 'september', 'october', 'november', 'december']
+_daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
+
+# The timezone table does not include the military time zones defined
+# in RFC822, other than Z.  According to RFC1123, the description in
+# RFC822 gets the signs wrong, so we can't rely on any such time
+# zones.  RFC1123 recommends that numeric timezone indicators be used
+# instead of timezone names.
+
+_timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0,
+              'AST': -400, 'ADT': -300,  # Atlantic (used in Canada)
+              'EST': -500, 'EDT': -400,  # Eastern
+              'CST': -600, 'CDT': -500,  # Central
+              'MST': -700, 'MDT': -600,  # Mountain
+              'PST': -800, 'PDT': -700   # Pacific
+              }
+
+
+def parsedate_tz(data):
+    """Convert a date string to a time tuple.
+
+    Accounts for military timezones.
+    """
+    if not data:
+        return None
+    data = data.split()
+    if data[0][-1] in (',', '.') or data[0].lower() in _daynames:
+        # There's a dayname here. Skip it
+        del data[0]
+    if len(data) == 3: # RFC 850 date, deprecated
+        stuff = data[0].split('-')
+        if len(stuff) == 3:
+            data = stuff + data[1:]
+    if len(data) == 4:
+        s = data[3]
+        i = s.find('+')
+        if i > 0:
+            data[3:] = [s[:i], s[i+1:]]
+        else:
+            data.append('') # Dummy tz
+    if len(data) < 5:
+        return None
+    data = data[:5]
+    [dd, mm, yy, tm, tz] = data
+    mm = mm.lower()
+    if not mm in _monthnames:
+        dd, mm = mm, dd.lower()
+        if not mm in _monthnames:
+            return None
+    mm = _monthnames.index(mm)+1
+    if mm > 12: mm = mm - 12
+    if dd[-1] == ',':
+        dd = dd[:-1]
+    i = yy.find(':')
+    if i > 0:
+        yy, tm = tm, yy
+    if yy[-1] == ',':
+        yy = yy[:-1]
+    if not yy[0].isdigit():
+        yy, tz = tz, yy
+    if tm[-1] == ',':
+        tm = tm[:-1]
+    tm = tm.split(':')
+    if len(tm) == 2:
+        [thh, tmm] = tm
+        tss = '0'
+    elif len(tm) == 3:
+        [thh, tmm, tss] = tm
+    else:
+        return None
+    try:
+        yy = int(yy)
+        dd = int(dd)
+        thh = int(thh)
+        tmm = int(tmm)
+        tss = int(tss)
+    except ValueError:
+        return None
+    tzoffset = None
+    tz = tz.upper()
+    if tz in _timezones:
+        tzoffset = _timezones[tz]
+    else:
+        try:
+            tzoffset = int(tz)
+        except ValueError:
+            pass
+    # Convert a timezone offset into seconds ; -0500 -> -18000
+    if tzoffset:
+        if tzoffset < 0:
+            tzsign = -1
+            tzoffset = -tzoffset
+        else:
+            tzsign = 1
+        tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
+    tuple = (yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset)
+    return tuple
+
+
+def parsedate(data):
+    """Convert a time string to a time tuple."""
+    t = parsedate_tz(data)
+    if type(t) == type( () ):
+        return t[:9]
+    else: return t
+
+
+def mktime_tz(data):
+    """Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
+    if data[9] is None:
+        # No zone info, so localtime is better assumption than GMT
+        return time.mktime(data[:8] + (-1,))
+    else:
+        t = time.mktime(data[:8] + (0,))
+        return t - data[9] - time.timezone
+
+def formatdate(timeval=None):
+    """Returns time format preferred for Internet standards.
+
+    Sun, 06 Nov 1994 08:49:37 GMT  ; RFC 822, updated by RFC 1123
+
+    According to RFC 1123, day and month names must always be in
+    English.  If not for that, this code could use strftime().  It
+    can't because strftime() honors the locale and could generated
+    non-English names.
+    """
+    if timeval is None:
+        timeval = time.time()
+    timeval = time.gmtime(timeval)
+    return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (
+            ["Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun"][timeval[6]],
+            timeval[2],
+            ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
+             "Jul", "Aug", "Sep", "Oct", "Nov", "Dec"][timeval[1]-1],
+                                timeval[0], timeval[3], timeval[4], timeval[5])
+
+
+# When used as script, run a small test program.
+# The first command line argument must be a filename containing one
+# message in RFC-822 format.
+
+if __name__ == '__main__':
+    import sys, os
+    file = os.path.join(os.environ['HOME'], 'Mail/inbox/1')
+    if sys.argv[1:]: file = sys.argv[1]
+    f = open(file, 'r')
+    m = Message(f)
+    print 'From:', m.getaddr('from')
+    print 'To:', m.getaddrlist('to')
+    print 'Subject:', m.getheader('subject')
+    print 'Date:', m.getheader('date')
+    date = m.getdate_tz('date')
+    tz = date[-1]
+    date = time.localtime(mktime_tz(date))
+    if date:
+        print 'ParsedDate:', time.asctime(date),
+        hhmmss = tz
+        hhmm, ss = divmod(hhmmss, 60)
+        hh, mm = divmod(hhmm, 60)
+        print "%+03d%02d" % (hh, mm),
+        if ss: print ".%02d" % ss,
+        print
+    else:
+        print 'ParsedDate:', None
+    m.rewindbody()
+    n = 0
+    while f.readline():
+        n = n + 1
+    print 'Lines:', n
+    print '-'*70
+    print 'len =', len(m)
+    if 'Date' in m: print 'Date =', m['Date']
+    if 'X-Nonsense' in m: pass
+    print 'keys =', m.keys()
+    print 'values =', m.values()
+    print 'items =', m.items()
diff --git a/depot_tools/release/win/python_24/Lib/rlcompleter.py b/depot_tools/release/win/python_24/Lib/rlcompleter.py
new file mode 100644
index 0000000..1d29167
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/rlcompleter.py
@@ -0,0 +1,150 @@
+"""Word completion for GNU readline 2.0.
+
+This requires the latest extension to the readline module. The completer
+completes keywords, built-ins and globals in a selectable namespace (which
+defaults to __main__); when completing NAME.NAME..., it evaluates (!) the
+expression up to the last dot and completes its attributes.
+
+It's very cool to do "import sys" type "sys.", hit the
+completion key (twice), and see the list of names defined by the
+sys module!
+
+Tip: to use the tab key as the completion key, call
+
+    readline.parse_and_bind("tab: complete")
+
+Notes:
+
+- Exceptions raised by the completer function are *ignored* (and
+generally cause the completion to fail).  This is a feature -- since
+readline sets the tty device in raw (or cbreak) mode, printing a
+traceback wouldn't work well without some complicated hoopla to save,
+reset and restore the tty state.
+
+- The evaluation of the NAME.NAME... form may cause arbitrary
+application defined code to be executed if an object with a
+__getattr__ hook is found.  Since it is the responsibility of the
+application (or the user) to enable this feature, I consider this an
+acceptable risk.  More complicated expressions (e.g. function calls or
+indexing operations) are *not* evaluated.
+
+- GNU readline is also used by the built-in functions input() and
+raw_input(), and thus these also benefit/suffer from the completer
+features.  Clearly an interactive application can benefit by
+specifying its own completer function and using raw_input() for all
+its input.
+
+- When the original stdin is not a tty device, GNU readline is never
+used, and this module (and the readline module) are silently inactive.
+
+"""
+
+import readline
+import __builtin__
+import __main__
+
+__all__ = ["Completer"]
+
+class Completer:
+    def __init__(self, namespace = None):
+        """Create a new completer for the command line.
+
+        Completer([namespace]) -> completer instance.
+
+        If unspecified, the default namespace where completions are performed
+        is __main__ (technically, __main__.__dict__). Namespaces should be
+        given as dictionaries.
+
+        Completer instances should be used as the completion mechanism of
+        readline via the set_completer() call:
+
+        readline.set_completer(Completer(my_namespace).complete)
+        """
+
+        if namespace and not isinstance(namespace, dict):
+            raise TypeError,'namespace must be a dictionary'
+
+        # Don't bind to namespace quite yet, but flag whether the user wants a
+        # specific namespace or to use __main__.__dict__. This will allow us
+        # to bind to __main__.__dict__ at completion time, not now.
+        if namespace is None:
+            self.use_main_ns = 1
+        else:
+            self.use_main_ns = 0
+            self.namespace = namespace
+
+    def complete(self, text, state):
+        """Return the next possible completion for 'text'.
+
+        This is called successively with state == 0, 1, 2, ... until it
+        returns None.  The completion should begin with 'text'.
+
+        """
+        if self.use_main_ns:
+            self.namespace = __main__.__dict__
+
+        if state == 0:
+            if "." in text:
+                self.matches = self.attr_matches(text)
+            else:
+                self.matches = self.global_matches(text)
+        try:
+            return self.matches[state]
+        except IndexError:
+            return None
+
+    def global_matches(self, text):
+        """Compute matches when text is a simple name.
+
+        Return a list of all keywords, built-in functions and names currently
+        defined in self.namespace that match.
+
+        """
+        import keyword
+        matches = []
+        n = len(text)
+        for list in [keyword.kwlist,
+                     __builtin__.__dict__,
+                     self.namespace]:
+            for word in list:
+                if word[:n] == text and word != "__builtins__":
+                    matches.append(word)
+        return matches
+
+    def attr_matches(self, text):
+        """Compute matches when text contains a dot.
+
+        Assuming the text is of the form NAME.NAME....[NAME], and is
+        evaluatable in self.namespace, it will be evaluated and its attributes
+        (as revealed by dir()) are used as possible completions.  (For class
+        instances, class members are also considered.)
+
+        WARNING: this can still invoke arbitrary C code, if an object
+        with a __getattr__ hook is evaluated.
+
+        """
+        import re
+        m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text)
+        if not m:
+            return
+        expr, attr = m.group(1, 3)
+        object = eval(expr, self.namespace)
+        words = dir(object)
+        if hasattr(object,'__class__'):
+            words.append('__class__')
+            words = words + get_class_members(object.__class__)
+        matches = []
+        n = len(attr)
+        for word in words:
+            if word[:n] == attr and word != "__builtins__":
+                matches.append("%s.%s" % (expr, word))
+        return matches
+
+def get_class_members(klass):
+    ret = dir(klass)
+    if hasattr(klass,'__bases__'):
+        for base in klass.__bases__:
+            ret = ret + get_class_members(base)
+    return ret
+
+readline.set_completer(Completer().complete)
diff --git a/depot_tools/release/win/python_24/Lib/robotparser.py b/depot_tools/release/win/python_24/Lib/robotparser.py
new file mode 100644
index 0000000..48ea066
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/robotparser.py
@@ -0,0 +1,292 @@
+""" robotparser.py
+
+    Copyright (C) 2000  Bastian Kleineidam
+
+    You can choose between two licenses when using this package:
+    1) GNU GPLv2
+    2) PSF license for Python 2.2
+
+    The robots.txt Exclusion Protocol is implemented as specified in
+    http://info.webcrawler.com/mak/projects/robots/norobots-rfc.html
+"""
+import urlparse,urllib
+
+__all__ = ["RobotFileParser"]
+
+debug = 0
+
+def _debug(msg):
+    if debug: print msg
+
+
+class RobotFileParser:
+    """ This class provides a set of methods to read, parse and answer
+    questions about a single robots.txt file.
+
+    """
+
+    def __init__(self, url=''):
+        self.entries = []
+        self.default_entry = None
+        self.disallow_all = False
+        self.allow_all = False
+        self.set_url(url)
+        self.last_checked = 0
+
+    def mtime(self):
+        """Returns the time the robots.txt file was last fetched.
+
+        This is useful for long-running web spiders that need to
+        check for new robots.txt files periodically.
+
+        """
+        return self.last_checked
+
+    def modified(self):
+        """Sets the time the robots.txt file was last fetched to the
+        current time.
+
+        """
+        import time
+        self.last_checked = time.time()
+
+    def set_url(self, url):
+        """Sets the URL referring to a robots.txt file."""
+        self.url = url
+        self.host, self.path = urlparse.urlparse(url)[1:3]
+
+    def read(self):
+        """Reads the robots.txt URL and feeds it to the parser."""
+        opener = URLopener()
+        f = opener.open(self.url)
+        lines = []
+        line = f.readline()
+        while line:
+            lines.append(line.strip())
+            line = f.readline()
+        self.errcode = opener.errcode
+        if self.errcode == 401 or self.errcode == 403:
+            self.disallow_all = True
+            _debug("disallow all")
+        elif self.errcode >= 400:
+            self.allow_all = True
+            _debug("allow all")
+        elif self.errcode == 200 and lines:
+            _debug("parse lines")
+            self.parse(lines)
+
+    def _add_entry(self, entry):
+        if "*" in entry.useragents:
+            # the default entry is considered last
+            self.default_entry = entry
+        else:
+            self.entries.append(entry)
+
+    def parse(self, lines):
+        """parse the input lines from a robots.txt file.
+           We allow that a user-agent: line is not preceded by
+           one or more blank lines."""
+        state = 0
+        linenumber = 0
+        entry = Entry()
+
+        for line in lines:
+            linenumber = linenumber + 1
+            if not line:
+                if state==1:
+                    _debug("line %d: warning: you should insert"
+                           " allow: or disallow: directives below any"
+                           " user-agent: line" % linenumber)
+                    entry = Entry()
+                    state = 0
+                elif state==2:
+                    self._add_entry(entry)
+                    entry = Entry()
+                    state = 0
+            # remove optional comment and strip line
+            i = line.find('#')
+            if i>=0:
+                line = line[:i]
+            line = line.strip()
+            if not line:
+                continue
+            line = line.split(':', 1)
+            if len(line) == 2:
+                line[0] = line[0].strip().lower()
+                line[1] = urllib.unquote(line[1].strip())
+                if line[0] == "user-agent":
+                    if state==2:
+                        _debug("line %d: warning: you should insert a blank"
+                               " line before any user-agent"
+                               " directive" % linenumber)
+                        self._add_entry(entry)
+                        entry = Entry()
+                    entry.useragents.append(line[1])
+                    state = 1
+                elif line[0] == "disallow":
+                    if state==0:
+                        _debug("line %d: error: you must insert a user-agent:"
+                               " directive before this line" % linenumber)
+                    else:
+                        entry.rulelines.append(RuleLine(line[1], False))
+                        state = 2
+                elif line[0] == "allow":
+                    if state==0:
+                        _debug("line %d: error: you must insert a user-agent:"
+                               " directive before this line" % linenumber)
+                    else:
+                        entry.rulelines.append(RuleLine(line[1], True))
+                else:
+                    _debug("line %d: warning: unknown key %s" % (linenumber,
+                               line[0]))
+            else:
+                _debug("line %d: error: malformed line %s"%(linenumber, line))
+        if state==2:
+            self.entries.append(entry)
+        _debug("Parsed rules:\n%s" % str(self))
+
+
+    def can_fetch(self, useragent, url):
+        """using the parsed robots.txt decide if useragent can fetch url"""
+        _debug("Checking robots.txt allowance for:\n  user agent: %s\n  url: %s" %
+               (useragent, url))
+        if self.disallow_all:
+            return False
+        if self.allow_all:
+            return True
+        # search for given user agent matches
+        # the first match counts
+        url = urllib.quote(urlparse.urlparse(urllib.unquote(url))[2]) or "/"
+        for entry in self.entries:
+            if entry.applies_to(useragent):
+                return entry.allowance(url)
+        # try the default entry last
+        if self.default_entry:
+            return self.default_entry.allowance(url)
+        # agent not found ==> access granted
+        return True
+
+
+    def __str__(self):
+        ret = ""
+        for entry in self.entries:
+            ret = ret + str(entry) + "\n"
+        return ret
+
+
+class RuleLine:
+    """A rule line is a single "Allow:" (allowance==True) or "Disallow:"
+       (allowance==False) followed by a path."""
+    def __init__(self, path, allowance):
+        if path == '' and not allowance:
+            # an empty value means allow all
+            allowance = True
+        self.path = urllib.quote(path)
+        self.allowance = allowance
+
+    def applies_to(self, filename):
+        return self.path=="*" or filename.startswith(self.path)
+
+    def __str__(self):
+        return (self.allowance and "Allow" or "Disallow")+": "+self.path
+
+
+class Entry:
+    """An entry has one or more user-agents and zero or more rulelines"""
+    def __init__(self):
+        self.useragents = []
+        self.rulelines = []
+
+    def __str__(self):
+        ret = ""
+        for agent in self.useragents:
+            ret = ret + "User-agent: "+agent+"\n"
+        for line in self.rulelines:
+            ret = ret + str(line) + "\n"
+        return ret
+
+    def applies_to(self, useragent):
+        """check if this entry applies to the specified agent"""
+        # split the name token and make it lower case
+        useragent = useragent.split("/")[0].lower()
+        for agent in self.useragents:
+            if agent=='*':
+                # we have the catch-all agent
+                return True
+            agent = agent.lower()
+            if agent in useragent:
+                return True
+        return False
+
+    def allowance(self, filename):
+        """Preconditions:
+        - our agent applies to this entry
+        - filename is URL decoded"""
+        for line in self.rulelines:
+            _debug((filename, str(line), line.allowance))
+            if line.applies_to(filename):
+                return line.allowance
+        return True
+
+class URLopener(urllib.FancyURLopener):
+    def __init__(self, *args):
+        urllib.FancyURLopener.__init__(self, *args)
+        self.errcode = 200
+
+    def http_error_default(self, url, fp, errcode, errmsg, headers):
+        self.errcode = errcode
+        return urllib.FancyURLopener.http_error_default(self, url, fp, errcode,
+                                                        errmsg, headers)
+
+def _check(a,b):
+    if not b:
+        ac = "access denied"
+    else:
+        ac = "access allowed"
+    if a!=b:
+        print "failed"
+    else:
+        print "ok (%s)" % ac
+    print
+
+def _test():
+    global debug
+    rp = RobotFileParser()
+    debug = 1
+
+    # robots.txt that exists, gotten to by redirection
+    rp.set_url('http://www.musi-cal.com/robots.txt')
+    rp.read()
+
+    # test for re.escape
+    _check(rp.can_fetch('*', 'http://www.musi-cal.com/'), 1)
+    # this should match the first rule, which is a disallow
+    _check(rp.can_fetch('', 'http://www.musi-cal.com/'), 0)
+    # various cherry pickers
+    _check(rp.can_fetch('CherryPickerSE',
+                       'http://www.musi-cal.com/cgi-bin/event-search'
+                       '?city=San+Francisco'), 0)
+    _check(rp.can_fetch('CherryPickerSE/1.0',
+                       'http://www.musi-cal.com/cgi-bin/event-search'
+                       '?city=San+Francisco'), 0)
+    _check(rp.can_fetch('CherryPickerSE/1.5',
+                       'http://www.musi-cal.com/cgi-bin/event-search'
+                       '?city=San+Francisco'), 0)
+    # case sensitivity
+    _check(rp.can_fetch('ExtractorPro', 'http://www.musi-cal.com/blubba'), 0)
+    _check(rp.can_fetch('extractorpro', 'http://www.musi-cal.com/blubba'), 0)
+    # substring test
+    _check(rp.can_fetch('toolpak/1.1', 'http://www.musi-cal.com/blubba'), 0)
+    # tests for catch-all * agent
+    _check(rp.can_fetch('spam', 'http://www.musi-cal.com/search'), 0)
+    _check(rp.can_fetch('spam', 'http://www.musi-cal.com/Musician/me'), 1)
+    _check(rp.can_fetch('spam', 'http://www.musi-cal.com/'), 1)
+    _check(rp.can_fetch('spam', 'http://www.musi-cal.com/'), 1)
+
+    # robots.txt that does not exist
+    rp.set_url('http://www.lycos.com/robots.txt')
+    rp.read()
+    _check(rp.can_fetch('Mozilla', 'http://www.lycos.com/search'), 1)
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/sched.py b/depot_tools/release/win/python_24/Lib/sched.py
new file mode 100644
index 0000000..2b599ee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sched.py
@@ -0,0 +1,106 @@
+"""A generally useful event scheduler class.
+
+Each instance of this class manages its own queue.
+No multi-threading is implied; you are supposed to hack that
+yourself, or use a single instance per application.
+
+Each instance is parametrized with two functions, one that is
+supposed to return the current time, one that is supposed to
+implement a delay.  You can implement real-time scheduling by
+substituting time and sleep from built-in module time, or you can
+implement simulated time by writing your own functions.  This can
+also be used to integrate scheduling with STDWIN events; the delay
+function is allowed to modify the queue.  Time can be expressed as
+integers or floating point numbers, as long as it is consistent.
+
+Events are specified by tuples (time, priority, action, argument).
+As in UNIX, lower priority numbers mean higher priority; in this
+way the queue can be maintained fully sorted.  Execution of the
+event means calling the action function, passing it the argument.
+Remember that in Python, multiple function arguments can be packed
+in a tuple.   The action function may be an instance method so it
+has another way to reference private data (besides global variables).
+Parameterless functions or methods cannot be used, however.
+"""
+
+# XXX The timefunc and delayfunc should have been defined as methods
+# XXX so you can define new kinds of schedulers using subclassing
+# XXX instead of having to define a module or class just to hold
+# XXX the global state of your particular time and delay functions.
+
+import bisect
+
+__all__ = ["scheduler"]
+
+class scheduler:
+    def __init__(self, timefunc, delayfunc):
+        """Initialize a new instance, passing the time and delay
+        functions"""
+        self.queue = []
+        self.timefunc = timefunc
+        self.delayfunc = delayfunc
+
+    def enterabs(self, time, priority, action, argument):
+        """Enter a new event in the queue at an absolute time.
+
+        Returns an ID for the event which can be used to remove it,
+        if necessary.
+
+        """
+        event = time, priority, action, argument
+        bisect.insort(self.queue, event)
+        return event # The ID
+
+    def enter(self, delay, priority, action, argument):
+        """A variant that specifies the time as a relative time.
+
+        This is actually the more commonly used interface.
+
+        """
+        time = self.timefunc() + delay
+        return self.enterabs(time, priority, action, argument)
+
+    def cancel(self, event):
+        """Remove an event from the queue.
+
+        This must be presented the ID as returned by enter().
+        If the event is not in the queue, this raises RuntimeError.
+
+        """
+        self.queue.remove(event)
+
+    def empty(self):
+        """Check whether the queue is empty."""
+        return len(self.queue) == 0
+
+    def run(self):
+        """Execute events until the queue is empty.
+
+        When there is a positive delay until the first event, the
+        delay function is called and the event is left in the queue;
+        otherwise, the event is removed from the queue and executed
+        (its action function is called, passing it the argument).  If
+        the delay function returns prematurely, it is simply
+        restarted.
+
+        It is legal for both the delay function and the action
+        function to to modify the queue or to raise an exception;
+        exceptions are not caught but the scheduler's state remains
+        well-defined so run() may be called again.
+
+        A questionably hack is added to allow other threads to run:
+        just after an event is executed, a delay of 0 is executed, to
+        avoid monopolizing the CPU when other threads are also
+        runnable.
+
+        """
+        q = self.queue
+        while q:
+            time, priority, action, argument = q[0]
+            now = self.timefunc()
+            if now < time:
+                self.delayfunc(time - now)
+            else:
+                del q[0]
+                void = action(*argument)
+                self.delayfunc(0)   # Let other threads run
diff --git a/depot_tools/release/win/python_24/Lib/sets.py b/depot_tools/release/win/python_24/Lib/sets.py
new file mode 100644
index 0000000..8ec7e2f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sets.py
@@ -0,0 +1,573 @@
+"""Classes to represent arbitrary sets (including sets of sets).
+
+This module implements sets using dictionaries whose values are
+ignored.  The usual operations (union, intersection, deletion, etc.)
+are provided as both methods and operators.
+
+Important: sets are not sequences!  While they support 'x in s',
+'len(s)', and 'for x in s', none of those operations are unique for
+sequences; for example, mappings support all three as well.  The
+characteristic operation for sequences is subscripting with small
+integers: s[i], for i in range(len(s)).  Sets don't support
+subscripting at all.  Also, sequences allow multiple occurrences and
+their elements have a definite order; sets on the other hand don't
+record multiple occurrences and don't remember the order of element
+insertion (which is why they don't support s[i]).
+
+The following classes are provided:
+
+BaseSet -- All the operations common to both mutable and immutable
+    sets. This is an abstract class, not meant to be directly
+    instantiated.
+
+Set -- Mutable sets, subclass of BaseSet; not hashable.
+
+ImmutableSet -- Immutable sets, subclass of BaseSet; hashable.
+    An iterable argument is mandatory to create an ImmutableSet.
+
+_TemporarilyImmutableSet -- A wrapper around a Set, hashable,
+    giving the same hash value as the immutable set equivalent
+    would have.  Do not use this class directly.
+
+Only hashable objects can be added to a Set. In particular, you cannot
+really add a Set as an element to another Set; if you try, what is
+actually added is an ImmutableSet built from it (it compares equal to
+the one you tried adding).
+
+When you ask if `x in y' where x is a Set and y is a Set or
+ImmutableSet, x is wrapped into a _TemporarilyImmutableSet z, and
+what's tested is actually `z in y'.
+
+"""
+
+# Code history:
+#
+# - Greg V. Wilson wrote the first version, using a different approach
+#   to the mutable/immutable problem, and inheriting from dict.
+#
+# - Alex Martelli modified Greg's version to implement the current
+#   Set/ImmutableSet approach, and make the data an attribute.
+#
+# - Guido van Rossum rewrote much of the code, made some API changes,
+#   and cleaned up the docstrings.
+#
+# - Raymond Hettinger added a number of speedups and other
+#   improvements.
+
+from __future__ import generators
+try:
+    from itertools import ifilter, ifilterfalse
+except ImportError:
+    # Code to make the module run under Py2.2
+    def ifilter(predicate, iterable):
+        if predicate is None:
+            def predicate(x):
+                return x
+        for x in iterable:
+            if predicate(x):
+                yield x
+    def ifilterfalse(predicate, iterable):
+        if predicate is None:
+            def predicate(x):
+                return x
+        for x in iterable:
+            if not predicate(x):
+                yield x
+    try:
+        True, False
+    except NameError:
+        True, False = (0==0, 0!=0)
+
+__all__ = ['BaseSet', 'Set', 'ImmutableSet']
+
+class BaseSet(object):
+    """Common base class for mutable and immutable sets."""
+
+    __slots__ = ['_data']
+
+    # Constructor
+
+    def __init__(self):
+        """This is an abstract class."""
+        # Don't call this from a concrete subclass!
+        if self.__class__ is BaseSet:
+            raise TypeError, ("BaseSet is an abstract class.  "
+                              "Use Set or ImmutableSet.")
+
+    # Standard protocols: __len__, __repr__, __str__, __iter__
+
+    def __len__(self):
+        """Return the number of elements of a set."""
+        return len(self._data)
+
+    def __repr__(self):
+        """Return string representation of a set.
+
+        This looks like 'Set([<list of elements>])'.
+        """
+        return self._repr()
+
+    # __str__ is the same as __repr__
+    __str__ = __repr__
+
+    def _repr(self, sorted=False):
+        elements = self._data.keys()
+        if sorted:
+            elements.sort()
+        return '%s(%r)' % (self.__class__.__name__, elements)
+
+    def __iter__(self):
+        """Return an iterator over the elements or a set.
+
+        This is the keys iterator for the underlying dict.
+        """
+        return self._data.iterkeys()
+
+    # Three-way comparison is not supported.  However, because __eq__ is
+    # tried before __cmp__, if Set x == Set y, x.__eq__(y) returns True and
+    # then cmp(x, y) returns 0 (Python doesn't actually call __cmp__ in this
+    # case).
+
+    def __cmp__(self, other):
+        raise TypeError, "can't compare sets using cmp()"
+
+    # Equality comparisons using the underlying dicts.  Mixed-type comparisons
+    # are allowed here, where Set == z for non-Set z always returns False,
+    # and Set != z always True.  This allows expressions like "x in y" to
+    # give the expected result when y is a sequence of mixed types, not
+    # raising a pointless TypeError just because y contains a Set, or x is
+    # a Set and y contain's a non-set ("in" invokes only __eq__).
+    # Subtle:  it would be nicer if __eq__ and __ne__ could return
+    # NotImplemented instead of True or False.  Then the other comparand
+    # would get a chance to determine the result, and if the other comparand
+    # also returned NotImplemented then it would fall back to object address
+    # comparison (which would always return False for __eq__ and always
+    # True for __ne__).  However, that doesn't work, because this type
+    # *also* implements __cmp__:  if, e.g., __eq__ returns NotImplemented,
+    # Python tries __cmp__ next, and the __cmp__ here then raises TypeError.
+
+    def __eq__(self, other):
+        if isinstance(other, BaseSet):
+            return self._data == other._data
+        else:
+            return False
+
+    def __ne__(self, other):
+        if isinstance(other, BaseSet):
+            return self._data != other._data
+        else:
+            return True
+
+    # Copying operations
+
+    def copy(self):
+        """Return a shallow copy of a set."""
+        result = self.__class__()
+        result._data.update(self._data)
+        return result
+
+    __copy__ = copy # For the copy module
+
+    def __deepcopy__(self, memo):
+        """Return a deep copy of a set; used by copy module."""
+        # This pre-creates the result and inserts it in the memo
+        # early, in case the deep copy recurses into another reference
+        # to this same set.  A set can't be an element of itself, but
+        # it can certainly contain an object that has a reference to
+        # itself.
+        from copy import deepcopy
+        result = self.__class__()
+        memo[id(self)] = result
+        data = result._data
+        value = True
+        for elt in self:
+            data[deepcopy(elt, memo)] = value
+        return result
+
+    # Standard set operations: union, intersection, both differences.
+    # Each has an operator version (e.g. __or__, invoked with |) and a
+    # method version (e.g. union).
+    # Subtle:  Each pair requires distinct code so that the outcome is
+    # correct when the type of other isn't suitable.  For example, if
+    # we did "union = __or__" instead, then Set().union(3) would return
+    # NotImplemented instead of raising TypeError (albeit that *why* it
+    # raises TypeError as-is is also a bit subtle).
+
+    def __or__(self, other):
+        """Return the union of two sets as a new set.
+
+        (I.e. all elements that are in either set.)
+        """
+        if not isinstance(other, BaseSet):
+            return NotImplemented
+        return self.union(other)
+
+    def union(self, other):
+        """Return the union of two sets as a new set.
+
+        (I.e. all elements that are in either set.)
+        """
+        result = self.__class__(self)
+        result._update(other)
+        return result
+
+    def __and__(self, other):
+        """Return the intersection of two sets as a new set.
+
+        (I.e. all elements that are in both sets.)
+        """
+        if not isinstance(other, BaseSet):
+            return NotImplemented
+        return self.intersection(other)
+
+    def intersection(self, other):
+        """Return the intersection of two sets as a new set.
+
+        (I.e. all elements that are in both sets.)
+        """
+        if not isinstance(other, BaseSet):
+            other = Set(other)
+        if len(self) <= len(other):
+            little, big = self, other
+        else:
+            little, big = other, self
+        common = ifilter(big._data.has_key, little)
+        return self.__class__(common)
+
+    def __xor__(self, other):
+        """Return the symmetric difference of two sets as a new set.
+
+        (I.e. all elements that are in exactly one of the sets.)
+        """
+        if not isinstance(other, BaseSet):
+            return NotImplemented
+        return self.symmetric_difference(other)
+
+    def symmetric_difference(self, other):
+        """Return the symmetric difference of two sets as a new set.
+
+        (I.e. all elements that are in exactly one of the sets.)
+        """
+        result = self.__class__()
+        data = result._data
+        value = True
+        selfdata = self._data
+        try:
+            otherdata = other._data
+        except AttributeError:
+            otherdata = Set(other)._data
+        for elt in ifilterfalse(otherdata.has_key, selfdata):
+            data[elt] = value
+        for elt in ifilterfalse(selfdata.has_key, otherdata):
+            data[elt] = value
+        return result
+
+    def  __sub__(self, other):
+        """Return the difference of two sets as a new Set.
+
+        (I.e. all elements that are in this set and not in the other.)
+        """
+        if not isinstance(other, BaseSet):
+            return NotImplemented
+        return self.difference(other)
+
+    def difference(self, other):
+        """Return the difference of two sets as a new Set.
+
+        (I.e. all elements that are in this set and not in the other.)
+        """
+        result = self.__class__()
+        data = result._data
+        try:
+            otherdata = other._data
+        except AttributeError:
+            otherdata = Set(other)._data
+        value = True
+        for elt in ifilterfalse(otherdata.has_key, self):
+            data[elt] = value
+        return result
+
+    # Membership test
+
+    def __contains__(self, element):
+        """Report whether an element is a member of a set.
+
+        (Called in response to the expression `element in self'.)
+        """
+        try:
+            return element in self._data
+        except TypeError:
+            transform = getattr(element, "__as_temporarily_immutable__", None)
+            if transform is None:
+                raise # re-raise the TypeError exception we caught
+            return transform() in self._data
+
+    # Subset and superset test
+
+    def issubset(self, other):
+        """Report whether another set contains this set."""
+        self._binary_sanity_check(other)
+        if len(self) > len(other):  # Fast check for obvious cases
+            return False
+        for elt in ifilterfalse(other._data.has_key, self):
+            return False
+        return True
+
+    def issuperset(self, other):
+        """Report whether this set contains another set."""
+        self._binary_sanity_check(other)
+        if len(self) < len(other):  # Fast check for obvious cases
+            return False
+        for elt in ifilterfalse(self._data.has_key, other):
+            return False
+        return True
+
+    # Inequality comparisons using the is-subset relation.
+    __le__ = issubset
+    __ge__ = issuperset
+
+    def __lt__(self, other):
+        self._binary_sanity_check(other)
+        return len(self) < len(other) and self.issubset(other)
+
+    def __gt__(self, other):
+        self._binary_sanity_check(other)
+        return len(self) > len(other) and self.issuperset(other)
+
+    # Assorted helpers
+
+    def _binary_sanity_check(self, other):
+        # Check that the other argument to a binary operation is also
+        # a set, raising a TypeError otherwise.
+        if not isinstance(other, BaseSet):
+            raise TypeError, "Binary operation only permitted between sets"
+
+    def _compute_hash(self):
+        # Calculate hash code for a set by xor'ing the hash codes of
+        # the elements.  This ensures that the hash code does not depend
+        # on the order in which elements are added to the set.  This is
+        # not called __hash__ because a BaseSet should not be hashable;
+        # only an ImmutableSet is hashable.
+        result = 0
+        for elt in self:
+            result ^= hash(elt)
+        return result
+
+    def _update(self, iterable):
+        # The main loop for update() and the subclass __init__() methods.
+        data = self._data
+
+        # Use the fast update() method when a dictionary is available.
+        if isinstance(iterable, BaseSet):
+            data.update(iterable._data)
+            return
+
+        value = True
+
+        if type(iterable) in (list, tuple, xrange):
+            # Optimized: we know that __iter__() and next() can't
+            # raise TypeError, so we can move 'try:' out of the loop.
+            it = iter(iterable)
+            while True:
+                try:
+                    for element in it:
+                        data[element] = value
+                    return
+                except TypeError:
+                    transform = getattr(element, "__as_immutable__", None)
+                    if transform is None:
+                        raise # re-raise the TypeError exception we caught
+                    data[transform()] = value
+        else:
+            # Safe: only catch TypeError where intended
+            for element in iterable:
+                try:
+                    data[element] = value
+                except TypeError:
+                    transform = getattr(element, "__as_immutable__", None)
+                    if transform is None:
+                        raise # re-raise the TypeError exception we caught
+                    data[transform()] = value
+
+
+class ImmutableSet(BaseSet):
+    """Immutable set class."""
+
+    __slots__ = ['_hashcode']
+
+    # BaseSet + hashing
+
+    def __init__(self, iterable=None):
+        """Construct an immutable set from an optional iterable."""
+        self._hashcode = None
+        self._data = {}
+        if iterable is not None:
+            self._update(iterable)
+
+    def __hash__(self):
+        if self._hashcode is None:
+            self._hashcode = self._compute_hash()
+        return self._hashcode
+
+    def __getstate__(self):
+        return self._data, self._hashcode
+
+    def __setstate__(self, state):
+        self._data, self._hashcode = state
+
+class Set(BaseSet):
+    """ Mutable set class."""
+
+    __slots__ = []
+
+    # BaseSet + operations requiring mutability; no hashing
+
+    def __init__(self, iterable=None):
+        """Construct a set from an optional iterable."""
+        self._data = {}
+        if iterable is not None:
+            self._update(iterable)
+
+    def __getstate__(self):
+        # getstate's results are ignored if it is not
+        return self._data,
+
+    def __setstate__(self, data):
+        self._data, = data
+
+    def __hash__(self):
+        """A Set cannot be hashed."""
+        # We inherit object.__hash__, so we must deny this explicitly
+        raise TypeError, "Can't hash a Set, only an ImmutableSet."
+
+    # In-place union, intersection, differences.
+    # Subtle:  The xyz_update() functions deliberately return None,
+    # as do all mutating operations on built-in container types.
+    # The __xyz__ spellings have to return self, though.
+
+    def __ior__(self, other):
+        """Update a set with the union of itself and another."""
+        self._binary_sanity_check(other)
+        self._data.update(other._data)
+        return self
+
+    def union_update(self, other):
+        """Update a set with the union of itself and another."""
+        self._update(other)
+
+    def __iand__(self, other):
+        """Update a set with the intersection of itself and another."""
+        self._binary_sanity_check(other)
+        self._data = (self & other)._data
+        return self
+
+    def intersection_update(self, other):
+        """Update a set with the intersection of itself and another."""
+        if isinstance(other, BaseSet):
+            self &= other
+        else:
+            self._data = (self.intersection(other))._data
+
+    def __ixor__(self, other):
+        """Update a set with the symmetric difference of itself and another."""
+        self._binary_sanity_check(other)
+        self.symmetric_difference_update(other)
+        return self
+
+    def symmetric_difference_update(self, other):
+        """Update a set with the symmetric difference of itself and another."""
+        data = self._data
+        value = True
+        if not isinstance(other, BaseSet):
+            other = Set(other)
+        for elt in other:
+            if elt in data:
+                del data[elt]
+            else:
+                data[elt] = value
+
+    def __isub__(self, other):
+        """Remove all elements of another set from this set."""
+        self._binary_sanity_check(other)
+        self.difference_update(other)
+        return self
+
+    def difference_update(self, other):
+        """Remove all elements of another set from this set."""
+        data = self._data
+        if not isinstance(other, BaseSet):
+            other = Set(other)
+        for elt in ifilter(data.has_key, other):
+            del data[elt]
+
+    # Python dict-like mass mutations: update, clear
+
+    def update(self, iterable):
+        """Add all values from an iterable (such as a list or file)."""
+        self._update(iterable)
+
+    def clear(self):
+        """Remove all elements from this set."""
+        self._data.clear()
+
+    # Single-element mutations: add, remove, discard
+
+    def add(self, element):
+        """Add an element to a set.
+
+        This has no effect if the element is already present.
+        """
+        try:
+            self._data[element] = True
+        except TypeError:
+            transform = getattr(element, "__as_immutable__", None)
+            if transform is None:
+                raise # re-raise the TypeError exception we caught
+            self._data[transform()] = True
+
+    def remove(self, element):
+        """Remove an element from a set; it must be a member.
+
+        If the element is not a member, raise a KeyError.
+        """
+        try:
+            del self._data[element]
+        except TypeError:
+            transform = getattr(element, "__as_temporarily_immutable__", None)
+            if transform is None:
+                raise # re-raise the TypeError exception we caught
+            del self._data[transform()]
+
+    def discard(self, element):
+        """Remove an element from a set if it is a member.
+
+        If the element is not a member, do nothing.
+        """
+        try:
+            self.remove(element)
+        except KeyError:
+            pass
+
+    def pop(self):
+        """Remove and return an arbitrary set element."""
+        return self._data.popitem()[0]
+
+    def __as_immutable__(self):
+        # Return a copy of self as an immutable set
+        return ImmutableSet(self)
+
+    def __as_temporarily_immutable__(self):
+        # Return self wrapped in a temporarily immutable set
+        return _TemporarilyImmutableSet(self)
+
+
+class _TemporarilyImmutableSet(BaseSet):
+    # Wrap a mutable set as if it was temporarily immutable.
+    # This only supplies hashing and equality comparisons.
+
+    def __init__(self, set):
+        self._set = set
+        self._data = set._data  # Needed by ImmutableSet.__eq__()
+
+    def __hash__(self):
+        return self._set._compute_hash()
diff --git a/depot_tools/release/win/python_24/Lib/sgmllib.py b/depot_tools/release/win/python_24/Lib/sgmllib.py
new file mode 100644
index 0000000..08e365b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sgmllib.py
@@ -0,0 +1,510 @@
+"""A parser for SGML, using the derived class as a static DTD."""
+
+# XXX This only supports those SGML features used by HTML.
+
+# XXX There should be a way to distinguish between PCDATA (parsed
+# character data -- the normal case), RCDATA (replaceable character
+# data -- only char and entity references and end tags are special)
+# and CDATA (character data -- only end tags are special).  RCDATA is
+# not supported at all.
+
+
+import markupbase
+import re
+
+__all__ = ["SGMLParser", "SGMLParseError"]
+
+# Regular expressions used for parsing
+
+interesting = re.compile('[&<]')
+incomplete = re.compile('&([a-zA-Z][a-zA-Z0-9]*|#[0-9]*)?|'
+                           '<([a-zA-Z][^<>]*|'
+                              '/([a-zA-Z][^<>]*)?|'
+                              '![^<>]*)?')
+
+entityref = re.compile('&([a-zA-Z][-.a-zA-Z0-9]*)[^a-zA-Z0-9]')
+charref = re.compile('&#([0-9]+)[^0-9]')
+
+starttagopen = re.compile('<[>a-zA-Z]')
+shorttagopen = re.compile('<[a-zA-Z][-.a-zA-Z0-9]*/')
+shorttag = re.compile('<([a-zA-Z][-.a-zA-Z0-9]*)/([^/]*)/')
+piclose = re.compile('>')
+endbracket = re.compile('[<>]')
+tagfind = re.compile('[a-zA-Z][-_.a-zA-Z0-9]*')
+attrfind = re.compile(
+    r'\s*([a-zA-Z_][-:.a-zA-Z_0-9]*)(\s*=\s*'
+    r'(\'[^\']*\'|"[^"]*"|[-a-zA-Z0-9./,:;+*%?!&$\(\)_#=~\'"@]*))?')
+
+
+class SGMLParseError(RuntimeError):
+    """Exception raised for all parse errors."""
+    pass
+
+
+# SGML parser base class -- find tags and call handler functions.
+# Usage: p = SGMLParser(); p.feed(data); ...; p.close().
+# The dtd is defined by deriving a class which defines methods
+# with special names to handle tags: start_foo and end_foo to handle
+# <foo> and </foo>, respectively, or do_foo to handle <foo> by itself.
+# (Tags are converted to lower case for this purpose.)  The data
+# between tags is passed to the parser by calling self.handle_data()
+# with some data as argument (the data may be split up in arbitrary
+# chunks).  Entity references are passed by calling
+# self.handle_entityref() with the entity reference as argument.
+
+class SGMLParser(markupbase.ParserBase):
+
+    def __init__(self, verbose=0):
+        """Initialize and reset this instance."""
+        self.verbose = verbose
+        self.reset()
+
+    def reset(self):
+        """Reset this instance. Loses all unprocessed data."""
+        self.__starttag_text = None
+        self.rawdata = ''
+        self.stack = []
+        self.lasttag = '???'
+        self.nomoretags = 0
+        self.literal = 0
+        markupbase.ParserBase.reset(self)
+
+    def setnomoretags(self):
+        """Enter literal mode (CDATA) till EOF.
+
+        Intended for derived classes only.
+        """
+        self.nomoretags = self.literal = 1
+
+    def setliteral(self, *args):
+        """Enter literal mode (CDATA).
+
+        Intended for derived classes only.
+        """
+        self.literal = 1
+
+    def feed(self, data):
+        """Feed some data to the parser.
+
+        Call this as often as you want, with as little or as much text
+        as you want (may include '\n').  (This just saves the text,
+        all the processing is done by goahead().)
+        """
+
+        self.rawdata = self.rawdata + data
+        self.goahead(0)
+
+    def close(self):
+        """Handle the remaining data."""
+        self.goahead(1)
+
+    def error(self, message):
+        raise SGMLParseError(message)
+
+    # Internal -- handle data as far as reasonable.  May leave state
+    # and data to be processed by a subsequent call.  If 'end' is
+    # true, force handling all data as if followed by EOF marker.
+    def goahead(self, end):
+        rawdata = self.rawdata
+        i = 0
+        n = len(rawdata)
+        while i < n:
+            if self.nomoretags:
+                self.handle_data(rawdata[i:n])
+                i = n
+                break
+            match = interesting.search(rawdata, i)
+            if match: j = match.start()
+            else: j = n
+            if i < j:
+                self.handle_data(rawdata[i:j])
+            i = j
+            if i == n: break
+            if rawdata[i] == '<':
+                if starttagopen.match(rawdata, i):
+                    if self.literal:
+                        self.handle_data(rawdata[i])
+                        i = i+1
+                        continue
+                    k = self.parse_starttag(i)
+                    if k < 0: break
+                    i = k
+                    continue
+                if rawdata.startswith("</", i):
+                    k = self.parse_endtag(i)
+                    if k < 0: break
+                    i = k
+                    self.literal = 0
+                    continue
+                if self.literal:
+                    if n > (i + 1):
+                        self.handle_data("<")
+                        i = i+1
+                    else:
+                        # incomplete
+                        break
+                    continue
+                if rawdata.startswith("<!--", i):
+                        # Strictly speaking, a comment is --.*--
+                        # within a declaration tag <!...>.
+                        # This should be removed,
+                        # and comments handled only in parse_declaration.
+                    k = self.parse_comment(i)
+                    if k < 0: break
+                    i = k
+                    continue
+                if rawdata.startswith("<?", i):
+                    k = self.parse_pi(i)
+                    if k < 0: break
+                    i = i+k
+                    continue
+                if rawdata.startswith("<!", i):
+                    # This is some sort of declaration; in "HTML as
+                    # deployed," this should only be the document type
+                    # declaration ("<!DOCTYPE html...>").
+                    k = self.parse_declaration(i)
+                    if k < 0: break
+                    i = k
+                    continue
+            elif rawdata[i] == '&':
+                if self.literal:
+                    self.handle_data(rawdata[i])
+                    i = i+1
+                    continue
+                match = charref.match(rawdata, i)
+                if match:
+                    name = match.group(1)
+                    self.handle_charref(name)
+                    i = match.end(0)
+                    if rawdata[i-1] != ';': i = i-1
+                    continue
+                match = entityref.match(rawdata, i)
+                if match:
+                    name = match.group(1)
+                    self.handle_entityref(name)
+                    i = match.end(0)
+                    if rawdata[i-1] != ';': i = i-1
+                    continue
+            else:
+                self.error('neither < nor & ??')
+            # We get here only if incomplete matches but
+            # nothing else
+            match = incomplete.match(rawdata, i)
+            if not match:
+                self.handle_data(rawdata[i])
+                i = i+1
+                continue
+            j = match.end(0)
+            if j == n:
+                break # Really incomplete
+            self.handle_data(rawdata[i:j])
+            i = j
+        # end while
+        if end and i < n:
+            self.handle_data(rawdata[i:n])
+            i = n
+        self.rawdata = rawdata[i:]
+        # XXX if end: check for empty stack
+
+    # Extensions for the DOCTYPE scanner:
+    _decl_otherchars = '='
+
+    # Internal -- parse processing instr, return length or -1 if not terminated
+    def parse_pi(self, i):
+        rawdata = self.rawdata
+        if rawdata[i:i+2] != '<?':
+            self.error('unexpected call to parse_pi()')
+        match = piclose.search(rawdata, i+2)
+        if not match:
+            return -1
+        j = match.start(0)
+        self.handle_pi(rawdata[i+2: j])
+        j = match.end(0)
+        return j-i
+
+    def get_starttag_text(self):
+        return self.__starttag_text
+
+    # Internal -- handle starttag, return length or -1 if not terminated
+    def parse_starttag(self, i):
+        self.__starttag_text = None
+        start_pos = i
+        rawdata = self.rawdata
+        if shorttagopen.match(rawdata, i):
+            # SGML shorthand: <tag/data/ == <tag>data</tag>
+            # XXX Can data contain &... (entity or char refs)?
+            # XXX Can data contain < or > (tag characters)?
+            # XXX Can there be whitespace before the first /?
+            match = shorttag.match(rawdata, i)
+            if not match:
+                return -1
+            tag, data = match.group(1, 2)
+            self.__starttag_text = '<%s/' % tag
+            tag = tag.lower()
+            k = match.end(0)
+            self.finish_shorttag(tag, data)
+            self.__starttag_text = rawdata[start_pos:match.end(1) + 1]
+            return k
+        # XXX The following should skip matching quotes (' or ")
+        match = endbracket.search(rawdata, i+1)
+        if not match:
+            return -1
+        j = match.start(0)
+        # Now parse the data between i+1 and j into a tag and attrs
+        attrs = []
+        if rawdata[i:i+2] == '<>':
+            # SGML shorthand: <> == <last open tag seen>
+            k = j
+            tag = self.lasttag
+        else:
+            match = tagfind.match(rawdata, i+1)
+            if not match:
+                self.error('unexpected call to parse_starttag')
+            k = match.end(0)
+            tag = rawdata[i+1:k].lower()
+            self.lasttag = tag
+        while k < j:
+            match = attrfind.match(rawdata, k)
+            if not match: break
+            attrname, rest, attrvalue = match.group(1, 2, 3)
+            if not rest:
+                attrvalue = attrname
+            elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
+                 attrvalue[:1] == '"' == attrvalue[-1:]:
+                attrvalue = attrvalue[1:-1]
+            attrs.append((attrname.lower(), attrvalue))
+            k = match.end(0)
+        if rawdata[j] == '>':
+            j = j+1
+        self.__starttag_text = rawdata[start_pos:j]
+        self.finish_starttag(tag, attrs)
+        return j
+
+    # Internal -- parse endtag
+    def parse_endtag(self, i):
+        rawdata = self.rawdata
+        match = endbracket.search(rawdata, i+1)
+        if not match:
+            return -1
+        j = match.start(0)
+        tag = rawdata[i+2:j].strip().lower()
+        if rawdata[j] == '>':
+            j = j+1
+        self.finish_endtag(tag)
+        return j
+
+    # Internal -- finish parsing of <tag/data/ (same as <tag>data</tag>)
+    def finish_shorttag(self, tag, data):
+        self.finish_starttag(tag, [])
+        self.handle_data(data)
+        self.finish_endtag(tag)
+
+    # Internal -- finish processing of start tag
+    # Return -1 for unknown tag, 0 for open-only tag, 1 for balanced tag
+    def finish_starttag(self, tag, attrs):
+        try:
+            method = getattr(self, 'start_' + tag)
+        except AttributeError:
+            try:
+                method = getattr(self, 'do_' + tag)
+            except AttributeError:
+                self.unknown_starttag(tag, attrs)
+                return -1
+            else:
+                self.handle_starttag(tag, method, attrs)
+                return 0
+        else:
+            self.stack.append(tag)
+            self.handle_starttag(tag, method, attrs)
+            return 1
+
+    # Internal -- finish processing of end tag
+    def finish_endtag(self, tag):
+        if not tag:
+            found = len(self.stack) - 1
+            if found < 0:
+                self.unknown_endtag(tag)
+                return
+        else:
+            if tag not in self.stack:
+                try:
+                    method = getattr(self, 'end_' + tag)
+                except AttributeError:
+                    self.unknown_endtag(tag)
+                else:
+                    self.report_unbalanced(tag)
+                return
+            found = len(self.stack)
+            for i in range(found):
+                if self.stack[i] == tag: found = i
+        while len(self.stack) > found:
+            tag = self.stack[-1]
+            try:
+                method = getattr(self, 'end_' + tag)
+            except AttributeError:
+                method = None
+            if method:
+                self.handle_endtag(tag, method)
+            else:
+                self.unknown_endtag(tag)
+            del self.stack[-1]
+
+    # Overridable -- handle start tag
+    def handle_starttag(self, tag, method, attrs):
+        method(attrs)
+
+    # Overridable -- handle end tag
+    def handle_endtag(self, tag, method):
+        method()
+
+    # Example -- report an unbalanced </...> tag.
+    def report_unbalanced(self, tag):
+        if self.verbose:
+            print '*** Unbalanced </' + tag + '>'
+            print '*** Stack:', self.stack
+
+    def handle_charref(self, name):
+        """Handle character reference, no need to override."""
+        try:
+            n = int(name)
+        except ValueError:
+            self.unknown_charref(name)
+            return
+        if not 0 <= n <= 255:
+            self.unknown_charref(name)
+            return
+        self.handle_data(chr(n))
+
+    # Definition of entities -- derived classes may override
+    entitydefs = \
+            {'lt': '<', 'gt': '>', 'amp': '&', 'quot': '"', 'apos': '\''}
+
+    def handle_entityref(self, name):
+        """Handle entity references.
+
+        There should be no need to override this method; it can be
+        tailored by setting up the self.entitydefs mapping appropriately.
+        """
+        table = self.entitydefs
+        if name in table:
+            self.handle_data(table[name])
+        else:
+            self.unknown_entityref(name)
+            return
+
+    # Example -- handle data, should be overridden
+    def handle_data(self, data):
+        pass
+
+    # Example -- handle comment, could be overridden
+    def handle_comment(self, data):
+        pass
+
+    # Example -- handle declaration, could be overridden
+    def handle_decl(self, decl):
+        pass
+
+    # Example -- handle processing instruction, could be overridden
+    def handle_pi(self, data):
+        pass
+
+    # To be overridden -- handlers for unknown objects
+    def unknown_starttag(self, tag, attrs): pass
+    def unknown_endtag(self, tag): pass
+    def unknown_charref(self, ref): pass
+    def unknown_entityref(self, ref): pass
+
+
+class TestSGMLParser(SGMLParser):
+
+    def __init__(self, verbose=0):
+        self.testdata = ""
+        SGMLParser.__init__(self, verbose)
+
+    def handle_data(self, data):
+        self.testdata = self.testdata + data
+        if len(repr(self.testdata)) >= 70:
+            self.flush()
+
+    def flush(self):
+        data = self.testdata
+        if data:
+            self.testdata = ""
+            print 'data:', repr(data)
+
+    def handle_comment(self, data):
+        self.flush()
+        r = repr(data)
+        if len(r) > 68:
+            r = r[:32] + '...' + r[-32:]
+        print 'comment:', r
+
+    def unknown_starttag(self, tag, attrs):
+        self.flush()
+        if not attrs:
+            print 'start tag: <' + tag + '>'
+        else:
+            print 'start tag: <' + tag,
+            for name, value in attrs:
+                print name + '=' + '"' + value + '"',
+            print '>'
+
+    def unknown_endtag(self, tag):
+        self.flush()
+        print 'end tag: </' + tag + '>'
+
+    def unknown_entityref(self, ref):
+        self.flush()
+        print '*** unknown entity ref: &' + ref + ';'
+
+    def unknown_charref(self, ref):
+        self.flush()
+        print '*** unknown char ref: &#' + ref + ';'
+
+    def unknown_decl(self, data):
+        self.flush()
+        print '*** unknown decl: [' + data + ']'
+
+    def close(self):
+        SGMLParser.close(self)
+        self.flush()
+
+
+def test(args = None):
+    import sys
+
+    if args is None:
+        args = sys.argv[1:]
+
+    if args and args[0] == '-s':
+        args = args[1:]
+        klass = SGMLParser
+    else:
+        klass = TestSGMLParser
+
+    if args:
+        file = args[0]
+    else:
+        file = 'test.html'
+
+    if file == '-':
+        f = sys.stdin
+    else:
+        try:
+            f = open(file, 'r')
+        except IOError, msg:
+            print file, ":", msg
+            sys.exit(1)
+
+    data = f.read()
+    if f is not sys.stdin:
+        f.close()
+
+    x = klass()
+    for c in data:
+        x.feed(c)
+    x.close()
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/shelve.py b/depot_tools/release/win/python_24/Lib/shelve.py
new file mode 100644
index 0000000..5e680bc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/shelve.py
@@ -0,0 +1,231 @@
+"""Manage shelves of pickled objects.
+
+A "shelf" is a persistent, dictionary-like object.  The difference
+with dbm databases is that the values (not the keys!) in a shelf can
+be essentially arbitrary Python objects -- anything that the "pickle"
+module can handle.  This includes most class instances, recursive data
+types, and objects containing lots of shared sub-objects.  The keys
+are ordinary strings.
+
+To summarize the interface (key is a string, data is an arbitrary
+object):
+
+        import shelve
+        d = shelve.open(filename) # open, with (g)dbm filename -- no suffix
+
+        d[key] = data   # store data at key (overwrites old data if
+                        # using an existing key)
+        data = d[key]   # retrieve a COPY of the data at key (raise
+                        # KeyError if no such key) -- NOTE that this
+                        # access returns a *copy* of the entry!
+        del d[key]      # delete data stored at key (raises KeyError
+                        # if no such key)
+        flag = d.has_key(key)   # true if the key exists; same as "key in d"
+        list = d.keys() # a list of all existing keys (slow!)
+
+        d.close()       # close it
+
+Dependent on the implementation, closing a persistent dictionary may
+or may not be necessary to flush changes to disk.
+
+Normally, d[key] returns a COPY of the entry.  This needs care when
+mutable entries are mutated: for example, if d[key] is a list,
+        d[key].append(anitem)
+does NOT modify the entry d[key] itself, as stored in the persistent
+mapping -- it only modifies the copy, which is then immediately
+discarded, so that the append has NO effect whatsoever.  To append an
+item to d[key] in a way that will affect the persistent mapping, use:
+        data = d[key]
+        data.append(anitem)
+        d[key] = data
+
+To avoid the problem with mutable entries, you may pass the keyword
+argument writeback=True in the call to shelve.open.  When you use:
+        d = shelve.open(filename, writeback=True)
+then d keeps a cache of all entries you access, and writes them all back
+to the persistent mapping when you call d.close().  This ensures that
+such usage as d[key].append(anitem) works as intended.
+
+However, using keyword argument writeback=True may consume vast amount
+of memory for the cache, and it may make d.close() very slow, if you
+access many of d's entries after opening it in this way: d has no way to
+check which of the entries you access are mutable and/or which ones you
+actually mutate, so it must cache, and write back at close, all of the
+entries that you access.  You can call d.sync() to write back all the
+entries in the cache, and empty the cache (d.sync() also synchronizes
+the persistent dictionary on disk, if feasible).
+"""
+
+# Try using cPickle and cStringIO if available.
+
+try:
+    from cPickle import Pickler, Unpickler
+except ImportError:
+    from pickle import Pickler, Unpickler
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+import UserDict
+import warnings
+
+__all__ = ["Shelf","BsdDbShelf","DbfilenameShelf","open"]
+
+class Shelf(UserDict.DictMixin):
+    """Base class for shelf implementations.
+
+    This is initialized with a dictionary-like object.
+    See the module's __doc__ string for an overview of the interface.
+    """
+
+    def __init__(self, dict, protocol=None, writeback=False, binary=None):
+        self.dict = dict
+        if protocol is not None and binary is not None:
+            raise ValueError, "can't specify both 'protocol' and 'binary'"
+        if binary is not None:
+            warnings.warn("The 'binary' argument to Shelf() is deprecated",
+                          PendingDeprecationWarning)
+            protocol = int(binary)
+        if protocol is None:
+            protocol = 0
+        self._protocol = protocol
+        self.writeback = writeback
+        self.cache = {}
+
+    def keys(self):
+        return self.dict.keys()
+
+    def __len__(self):
+        return len(self.dict)
+
+    def has_key(self, key):
+        return self.dict.has_key(key)
+
+    def __contains__(self, key):
+        return self.dict.has_key(key)
+
+    def get(self, key, default=None):
+        if self.dict.has_key(key):
+            return self[key]
+        return default
+
+    def __getitem__(self, key):
+        try:
+            value = self.cache[key]
+        except KeyError:
+            f = StringIO(self.dict[key])
+            value = Unpickler(f).load()
+            if self.writeback:
+                self.cache[key] = value
+        return value
+
+    def __setitem__(self, key, value):
+        if self.writeback:
+            self.cache[key] = value
+        f = StringIO()
+        p = Pickler(f, self._protocol)
+        p.dump(value)
+        self.dict[key] = f.getvalue()
+
+    def __delitem__(self, key):
+        del self.dict[key]
+        try:
+            del self.cache[key]
+        except KeyError:
+            pass
+
+    def close(self):
+        self.sync()
+        try:
+            self.dict.close()
+        except AttributeError:
+            pass
+        self.dict = 0
+
+    def __del__(self):
+        self.close()
+
+    def sync(self):
+        if self.writeback and self.cache:
+            self.writeback = False
+            for key, entry in self.cache.iteritems():
+                self[key] = entry
+            self.writeback = True
+            self.cache = {}
+        if hasattr(self.dict, 'sync'):
+            self.dict.sync()
+
+
+class BsdDbShelf(Shelf):
+    """Shelf implementation using the "BSD" db interface.
+
+    This adds methods first(), next(), previous(), last() and
+    set_location() that have no counterpart in [g]dbm databases.
+
+    The actual database must be opened using one of the "bsddb"
+    modules "open" routines (i.e. bsddb.hashopen, bsddb.btopen or
+    bsddb.rnopen) and passed to the constructor.
+
+    See the module's __doc__ string for an overview of the interface.
+    """
+
+    def __init__(self, dict, protocol=None, writeback=False, binary=None):
+        Shelf.__init__(self, dict, protocol, writeback, binary)
+
+    def set_location(self, key):
+        (key, value) = self.dict.set_location(key)
+        f = StringIO(value)
+        return (key, Unpickler(f).load())
+
+    def next(self):
+        (key, value) = self.dict.next()
+        f = StringIO(value)
+        return (key, Unpickler(f).load())
+
+    def previous(self):
+        (key, value) = self.dict.previous()
+        f = StringIO(value)
+        return (key, Unpickler(f).load())
+
+    def first(self):
+        (key, value) = self.dict.first()
+        f = StringIO(value)
+        return (key, Unpickler(f).load())
+
+    def last(self):
+        (key, value) = self.dict.last()
+        f = StringIO(value)
+        return (key, Unpickler(f).load())
+
+
+class DbfilenameShelf(Shelf):
+    """Shelf implementation using the "anydbm" generic dbm interface.
+
+    This is initialized with the filename for the dbm database.
+    See the module's __doc__ string for an overview of the interface.
+    """
+
+    def __init__(self, filename, flag='c', protocol=None, writeback=False, binary=None):
+        import anydbm
+        Shelf.__init__(self, anydbm.open(filename, flag), protocol, writeback, binary)
+
+
+def open(filename, flag='c', protocol=None, writeback=False, binary=None):
+    """Open a persistent dictionary for reading and writing.
+
+    The filename parameter is the base filename for the underlying
+    database.  As a side-effect, an extension may be added to the
+    filename and more than one file may be created.  The optional flag
+    parameter has the same interpretation as the flag parameter of
+    anydbm.open(). The optional protocol parameter specifies the
+    version of the pickle protocol (0, 1, or 2).
+
+    The optional binary parameter is deprecated and may be set to True
+    to force the use of binary pickles for serializing data values.
+
+    See the module's __doc__ string for an overview of the interface.
+    """
+
+    return DbfilenameShelf(filename, flag, protocol, writeback, binary)
diff --git a/depot_tools/release/win/python_24/Lib/shlex.py b/depot_tools/release/win/python_24/Lib/shlex.py
new file mode 100644
index 0000000..6632b87
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/shlex.py
@@ -0,0 +1,292 @@
+# -*- coding: iso-8859-1 -*-
+"""A lexical analyzer class for simple shell-like syntaxes."""
+
+# Module and documentation by Eric S. Raymond, 21 Dec 1998
+# Input stacking and error message cleanup added by ESR, March 2000
+# push_source() and pop_source() made explicit by ESR, January 2001.
+# Posix compliance, split(), string arguments, and
+# iterator interface by Gustavo Niemeyer, April 2003.
+
+import os.path
+import sys
+from collections import deque
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+__all__ = ["shlex", "split"]
+
+class shlex:
+    "A lexical analyzer class for simple shell-like syntaxes."
+    def __init__(self, instream=None, infile=None, posix=False):
+        if isinstance(instream, basestring):
+            instream = StringIO(instream)
+        if instream is not None:
+            self.instream = instream
+            self.infile = infile
+        else:
+            self.instream = sys.stdin
+            self.infile = None
+        self.posix = posix
+        if posix:
+            self.eof = None
+        else:
+            self.eof = ''
+        self.commenters = '#'
+        self.wordchars = ('abcdfeghijklmnopqrstuvwxyz'
+                          'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_')
+        if self.posix:
+            self.wordchars += ('ßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿ'
+                               'ÀÁÂÃÄÅÆÇÈÉÊËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞ')
+        self.whitespace = ' \t\r\n'
+        self.whitespace_split = False
+        self.quotes = '\'"'
+        self.escape = '\\'
+        self.escapedquotes = '"'
+        self.state = ' '
+        self.pushback = deque()
+        self.lineno = 1
+        self.debug = 0
+        self.token = ''
+        self.filestack = deque()
+        self.source = None
+        if self.debug:
+            print 'shlex: reading from %s, line %d' \
+                  % (self.instream, self.lineno)
+
+    def push_token(self, tok):
+        "Push a token onto the stack popped by the get_token method"
+        if self.debug >= 1:
+            print "shlex: pushing token " + repr(tok)
+        self.pushback.appendleft(tok)
+
+    def push_source(self, newstream, newfile=None):
+        "Push an input source onto the lexer's input source stack."
+        if isinstance(newstream, basestring):
+            newstream = StringIO(newstream)
+        self.filestack.appendleft((self.infile, self.instream, self.lineno))
+        self.infile = newfile
+        self.instream = newstream
+        self.lineno = 1
+        if self.debug:
+            if newfile is not None:
+                print 'shlex: pushing to file %s' % (self.infile,)
+            else:
+                print 'shlex: pushing to stream %s' % (self.instream,)
+
+    def pop_source(self):
+        "Pop the input source stack."
+        self.instream.close()
+        (self.infile, self.instream, self.lineno) = self.filestack.popleft()
+        if self.debug:
+            print 'shlex: popping to %s, line %d' \
+                  % (self.instream, self.lineno)
+        self.state = ' '
+
+    def get_token(self):
+        "Get a token from the input stream (or from stack if it's nonempty)"
+        if self.pushback:
+            tok = self.pushback.popleft()
+            if self.debug >= 1:
+                print "shlex: popping token " + repr(tok)
+            return tok
+        # No pushback.  Get a token.
+        raw = self.read_token()
+        # Handle inclusions
+        if self.source is not None:
+            while raw == self.source:
+                spec = self.sourcehook(self.read_token())
+                if spec:
+                    (newfile, newstream) = spec
+                    self.push_source(newstream, newfile)
+                raw = self.get_token()
+        # Maybe we got EOF instead?
+        while raw == self.eof:
+            if not self.filestack:
+                return self.eof
+            else:
+                self.pop_source()
+                raw = self.get_token()
+        # Neither inclusion nor EOF
+        if self.debug >= 1:
+            if raw != self.eof:
+                print "shlex: token=" + repr(raw)
+            else:
+                print "shlex: token=EOF"
+        return raw
+
+    def read_token(self):
+        quoted = False
+        escapedstate = ' '
+        while True:
+            nextchar = self.instream.read(1)
+            if nextchar == '\n':
+                self.lineno = self.lineno + 1
+            if self.debug >= 3:
+                print "shlex: in state", repr(self.state), \
+                      "I see character:", repr(nextchar)
+            if self.state is None:
+                self.token = ''        # past end of file
+                break
+            elif self.state == ' ':
+                if not nextchar:
+                    self.state = None  # end of file
+                    break
+                elif nextchar in self.whitespace:
+                    if self.debug >= 2:
+                        print "shlex: I see whitespace in whitespace state"
+                    if self.token or (self.posix and quoted):
+                        break   # emit current token
+                    else:
+                        continue
+                elif nextchar in self.commenters:
+                    self.instream.readline()
+                    self.lineno = self.lineno + 1
+                elif self.posix and nextchar in self.escape:
+                    escapedstate = 'a'
+                    self.state = nextchar
+                elif nextchar in self.wordchars:
+                    self.token = nextchar
+                    self.state = 'a'
+                elif nextchar in self.quotes:
+                    if not self.posix:
+                        self.token = nextchar
+                    self.state = nextchar
+                elif self.whitespace_split:
+                    self.token = nextchar
+                    self.state = 'a'
+                else:
+                    self.token = nextchar
+                    if self.token or (self.posix and quoted):
+                        break   # emit current token
+                    else:
+                        continue
+            elif self.state in self.quotes:
+                quoted = True
+                if not nextchar:      # end of file
+                    if self.debug >= 2:
+                        print "shlex: I see EOF in quotes state"
+                    # XXX what error should be raised here?
+                    raise ValueError, "No closing quotation"
+                if nextchar == self.state:
+                    if not self.posix:
+                        self.token = self.token + nextchar
+                        self.state = ' '
+                        break
+                    else:
+                        self.state = 'a'
+                elif self.posix and nextchar in self.escape and \
+                     self.state in self.escapedquotes:
+                    escapedstate = self.state
+                    self.state = nextchar
+                else:
+                    self.token = self.token + nextchar
+            elif self.state in self.escape:
+                if not nextchar:      # end of file
+                    if self.debug >= 2:
+                        print "shlex: I see EOF in escape state"
+                    # XXX what error should be raised here?
+                    raise ValueError, "No escaped character"
+                # In posix shells, only the quote itself or the escape
+                # character may be escaped within quotes.
+                if escapedstate in self.quotes and \
+                   nextchar != self.state and nextchar != escapedstate:
+                    self.token = self.token + self.state
+                self.token = self.token + nextchar
+                self.state = escapedstate
+            elif self.state == 'a':
+                if not nextchar:
+                    self.state = None   # end of file
+                    break
+                elif nextchar in self.whitespace:
+                    if self.debug >= 2:
+                        print "shlex: I see whitespace in word state"
+                    self.state = ' '
+                    if self.token or (self.posix and quoted):
+                        break   # emit current token
+                    else:
+                        continue
+                elif nextchar in self.commenters:
+                    self.instream.readline()
+                    self.lineno = self.lineno + 1
+                    if self.posix:
+                        self.state = ' '
+                        if self.token or (self.posix and quoted):
+                            break   # emit current token
+                        else:
+                            continue
+                elif self.posix and nextchar in self.quotes:
+                    self.state = nextchar
+                elif self.posix and nextchar in self.escape:
+                    escapedstate = 'a'
+                    self.state = nextchar
+                elif nextchar in self.wordchars or nextchar in self.quotes \
+                    or self.whitespace_split:
+                    self.token = self.token + nextchar
+                else:
+                    self.pushback.appendleft(nextchar)
+                    if self.debug >= 2:
+                        print "shlex: I see punctuation in word state"
+                    self.state = ' '
+                    if self.token:
+                        break   # emit current token
+                    else:
+                        continue
+        result = self.token
+        self.token = ''
+        if self.posix and not quoted and result == '':
+            result = None
+        if self.debug > 1:
+            if result:
+                print "shlex: raw token=" + repr(result)
+            else:
+                print "shlex: raw token=EOF"
+        return result
+
+    def sourcehook(self, newfile):
+        "Hook called on a filename to be sourced."
+        if newfile[0] == '"':
+            newfile = newfile[1:-1]
+        # This implements cpp-like semantics for relative-path inclusion.
+        if isinstance(self.infile, basestring) and not os.path.isabs(newfile):
+            newfile = os.path.join(os.path.dirname(self.infile), newfile)
+        return (newfile, open(newfile, "r"))
+
+    def error_leader(self, infile=None, lineno=None):
+        "Emit a C-compiler-like, Emacs-friendly error-message leader."
+        if infile is None:
+            infile = self.infile
+        if lineno is None:
+            lineno = self.lineno
+        return "\"%s\", line %d: " % (infile, lineno)
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        token = self.get_token()
+        if token == self.eof:
+            raise StopIteration
+        return token
+
+def split(s, comments=False):
+    lex = shlex(s, posix=True)
+    lex.whitespace_split = True
+    if not comments:
+        lex.commenters = ''
+    return list(lex)
+
+if __name__ == '__main__':
+    if len(sys.argv) == 1:
+        lexer = shlex()
+    else:
+        file = sys.argv[1]
+        lexer = shlex(open(file), file)
+    while 1:
+        tt = lexer.get_token()
+        if tt:
+            print "Token: " + repr(tt)
+        else:
+            break
diff --git a/depot_tools/release/win/python_24/Lib/shutil.py b/depot_tools/release/win/python_24/Lib/shutil.py
new file mode 100644
index 0000000..5bc4377
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/shutil.py
@@ -0,0 +1,193 @@
+"""Utility functions for copying files and directory trees.
+
+XXX The functions here don't copy the resource fork or other metadata on Mac.
+
+"""
+
+import os
+import sys
+import stat
+import exceptions
+from os.path import abspath
+
+__all__ = ["copyfileobj","copyfile","copymode","copystat","copy","copy2",
+           "copytree","move","rmtree","Error"]
+
+class Error(exceptions.EnvironmentError):
+    pass
+
+def copyfileobj(fsrc, fdst, length=16*1024):
+    """copy data from file-like object fsrc to file-like object fdst"""
+    while 1:
+        buf = fsrc.read(length)
+        if not buf:
+            break
+        fdst.write(buf)
+
+def _samefile(src, dst):
+    # Macintosh, Unix.
+    if hasattr(os.path,'samefile'):
+        try:
+            return os.path.samefile(src, dst)
+        except OSError:
+            return False
+
+    # All other platforms: check for same pathname.
+    return (os.path.normcase(os.path.abspath(src)) ==
+            os.path.normcase(os.path.abspath(dst)))
+
+def copyfile(src, dst):
+    """Copy data from src to dst"""
+    if _samefile(src, dst):
+        raise Error, "`%s` and `%s` are the same file" % (src, dst)
+
+    fsrc = None
+    fdst = None
+    try:
+        fsrc = open(src, 'rb')
+        fdst = open(dst, 'wb')
+        copyfileobj(fsrc, fdst)
+    finally:
+        if fdst:
+            fdst.close()
+        if fsrc:
+            fsrc.close()
+
+def copymode(src, dst):
+    """Copy mode bits from src to dst"""
+    if hasattr(os, 'chmod'):
+        st = os.stat(src)
+        mode = stat.S_IMODE(st.st_mode)
+        os.chmod(dst, mode)
+
+def copystat(src, dst):
+    """Copy all stat info (mode bits, atime and mtime) from src to dst"""
+    st = os.stat(src)
+    mode = stat.S_IMODE(st.st_mode)
+    if hasattr(os, 'utime'):
+        os.utime(dst, (st.st_atime, st.st_mtime))
+    if hasattr(os, 'chmod'):
+        os.chmod(dst, mode)
+
+
+def copy(src, dst):
+    """Copy data and mode bits ("cp src dst").
+
+    The destination may be a directory.
+
+    """
+    if os.path.isdir(dst):
+        dst = os.path.join(dst, os.path.basename(src))
+    copyfile(src, dst)
+    copymode(src, dst)
+
+def copy2(src, dst):
+    """Copy data and all stat info ("cp -p src dst").
+
+    The destination may be a directory.
+
+    """
+    if os.path.isdir(dst):
+        dst = os.path.join(dst, os.path.basename(src))
+    copyfile(src, dst)
+    copystat(src, dst)
+
+
+def copytree(src, dst, symlinks=False):
+    """Recursively copy a directory tree using copy2().
+
+    The destination directory must not already exist.
+    If exception(s) occur, an Error is raised with a list of reasons.
+
+    If the optional symlinks flag is true, symbolic links in the
+    source tree result in symbolic links in the destination tree; if
+    it is false, the contents of the files pointed to by symbolic
+    links are copied.
+
+    XXX Consider this example code rather than the ultimate tool.
+
+    """
+    names = os.listdir(src)
+    os.mkdir(dst)
+    errors = []
+    for name in names:
+        srcname = os.path.join(src, name)
+        dstname = os.path.join(dst, name)
+        try:
+            if symlinks and os.path.islink(srcname):
+                linkto = os.readlink(srcname)
+                os.symlink(linkto, dstname)
+            elif os.path.isdir(srcname):
+                copytree(srcname, dstname, symlinks)
+            else:
+                copy2(srcname, dstname)
+            # XXX What about devices, sockets etc.?
+        except (IOError, os.error), why:
+            errors.append((srcname, dstname, why))
+    if errors:
+        raise Error, errors
+
+def rmtree(path, ignore_errors=False, onerror=None):
+    """Recursively delete a directory tree.
+
+    If ignore_errors is set, errors are ignored; otherwise, if onerror
+    is set, it is called to handle the error with arguments (func,
+    path, exc_info) where func is os.listdir, os.remove, or os.rmdir;
+    path is the argument to that function that caused it to fail; and
+    exc_info is a tuple returned by sys.exc_info().  If ignore_errors
+    is false and onerror is None, an exception is raised.
+
+    """
+    if ignore_errors:
+        def onerror(*args):
+            pass
+    elif onerror is None:
+        def onerror(*args):
+            raise
+    names = []
+    try:
+        names = os.listdir(path)
+    except os.error, err:
+        onerror(os.listdir, path, sys.exc_info())
+    for name in names:
+        fullname = os.path.join(path, name)
+        try:
+            mode = os.lstat(fullname).st_mode
+        except os.error:
+            mode = 0
+        if stat.S_ISDIR(mode):
+            rmtree(fullname, ignore_errors, onerror)
+        else:
+            try:
+                os.remove(fullname)
+            except os.error, err:
+                onerror(os.remove, fullname, sys.exc_info())
+    try:
+        os.rmdir(path)
+    except os.error:
+        onerror(os.rmdir, path, sys.exc_info())
+
+def move(src, dst):
+    """Recursively move a file or directory to another location.
+
+    If the destination is on our current filesystem, then simply use
+    rename.  Otherwise, copy src to the dst and then remove src.
+    A lot more could be done here...  A look at a mv.c shows a lot of
+    the issues this implementation glosses over.
+
+    """
+
+    try:
+        os.rename(src, dst)
+    except OSError:
+        if os.path.isdir(src):
+            if destinsrc(src, dst):
+                raise Error, "Cannot move a directory '%s' into itself '%s'." % (src, dst)
+            copytree(src, dst, symlinks=True)
+            rmtree(src)
+        else:
+            copy2(src,dst)
+            os.unlink(src)
+
+def destinsrc(src, dst):
+    return abspath(dst).startswith(abspath(src))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/README b/depot_tools/release/win/python_24/Lib/site-packages/README
new file mode 100644
index 0000000..273f625
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/README
@@ -0,0 +1,2 @@
+This directory exists so that 3rd party packages can be installed
+here.  Read the source for site.py for more details.
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/__init__.py
new file mode 100644
index 0000000..30c21c9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/__init__.py
@@ -0,0 +1,13 @@
+VERSION = "0.2c2"
+SVN = 'svn.bat'
+SVNADMIN = 'svnadmin'
+def DiagLog(format, *args):
+  """Log diagnostics; default implementation does nothing.
+
+  Callers provide terminating newlines.
+
+  Arguments:
+  format  -- format string
+  *args   -- tuple for format string: format % args
+  """
+  return
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/changebranch.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/changebranch.py
new file mode 100644
index 0000000..26c2819
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/changebranch.py
@@ -0,0 +1,1499 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""changebranch - stuff for change branches
+
+Classes:
+ChangeBranch -- representation of a change branch
+
+Functions:
+ListChangeBranches -- generate changebranch names and dirents
+Diff            -- diff a change branch
+
+"""
+
+import cStringIO
+import os
+import posixpath
+import random
+import re
+import shutil
+import subprocess
+import sys
+
+from errno import EEXIST
+
+import svn.diff
+import svn.ra
+import svn.wc
+
+from svn.core import SVN_DIRENT_CREATED_REV, SVN_DIRENT_KIND
+from svn.core import SWIG_SVN_INVALID_REVNUM as SVN_INVALID_REVNUM
+from svn.core import svn_node_dir, svn_node_file, svn_node_none, svn_node_unknown
+from svn.core import svn_path_local_style
+from svn.core import svn_prop_regular_kind, svn_property_kind
+
+import gvn.commit
+import gvn.errors
+import gvn.platform
+import gvn.repository
+import gvn.svncmd
+import gvn.util
+import gvn.wc
+
+try:
+  import simplejson
+except ImportError:
+  from gvn.third_party import simplejson
+
+
+def IsRegularProp(name):
+  (kind, prefix_len) = svn.core.svn_property_kind(name)
+  return kind == svn.core.svn_prop_regular_kind
+
+def FilterRegularProps(properties):
+  """Return copy of properties with special svn properties removed."""
+  return dict((name, value) for (name, value) in properties.iteritems()
+              if IsRegularProp(name))
+
+
+_ADD_CODES =['A', 'R']
+
+class State(object):
+  """state of a changebranch snapshot
+
+  Attributes:
+  base_path, base_revision      -- path/revision snapshot is based on
+  paths                         -- dict mapping unicode path to PathState
+  """
+  def __init__(self, base_path, base_revision):
+    self.base_path = base_path
+    self.base_revision = base_revision
+    self.paths = {}
+
+
+class PathState(object):
+  """state of a path in a changebranch snapshot
+
+  Note: kind is always svn_node_unknown for old-style changebranches,
+  which are going away soon.
+
+  Attributes:
+  action                -- 'A'dd, 'D'elete, 'R'eplace, 'M'odify
+  base_revision         -- revision of working copy item this represents
+  snap_revision         -- revision of last snapshot
+  copyfrom_path         -- path copied from or None
+  copyfrom_revision     -- revision copied from or SVN_INVALID_REVNUM
+  kind                  -- svn_node_kind_t
+  """
+  def __init__(self, action, base_revision, snap_revision,
+               copyfrom_path, copyfrom_revision, kind):
+    self.action = action
+    self.base_revision = base_revision
+    self.snap_revision = snap_revision
+    self.copyfrom_path = copyfrom_path
+    self.copyfrom_revision = copyfrom_revision
+    self.kind = kind
+
+  def __eq__(self, other):
+    return (self.action == other.action
+            and self.base_revision == other.base_revision
+            and self.snap_revision == other.snap_revision
+            and self.copyfrom_path == other.copyfrom_path
+            and self.copyfrom_revision == other.copyfrom_revision
+            and self.kind == other.kind)
+  def __ne__(self, other):
+    return not self == other
+
+
+def _ParseStateKind(state_dict):
+  """Parse path kind from State dict (helper for ParseState)."""
+  if state_dict.get('kind') in [None, 'file']:
+    return svn_node_file
+  return svn_node_dir
+
+def ParseState(state_utf8, snap_revision):
+  """Parse state_utf8 at snap_revision.
+
+  Arguments:
+  state_utf8            -- utf8-encoded str from the repository state file
+  snap_revision         -- revision of the snapshot being examined
+
+  Raises:
+  gvn.errors.ParseState
+  """
+  # Decode the UTF-8 text.
+  try:
+    decoded = state_utf8.decode('utf8')
+  except UnicodeDecodeError, e:
+    raise gvn.errors.ParseState(child=gvn.errors.Encoding(e))
+  # Parse the dict from the decoded text.
+  try:
+    parsed = simplejson.loads(decoded)
+  except ValueError, e:
+    raise gvn.errors.ParseState(child=e)
+  # Check the format.
+  try:
+    format = parsed['format']
+  except KeyError:
+    # no format => format 0
+    format = 0
+  if format != 0:
+    # which is all we support
+    raise gvn.errors.ParseState(message='unknown format %d' % (format,))
+  # Get base path and revision.
+  try:
+    base_path = parsed['base']
+  except KeyError:
+    raise gvn.errors.ParseState(message='missing base path')
+  try:
+    base_revision = parsed['base_rev']
+  except KeyError:
+    raise gvn.errors.ParseState(message='missing base revision')
+
+  change_state = State(base_path, base_revision)
+  # Get path state.
+  try:
+    path_state = parsed['paths']
+  except KeyError:
+    raise gvn.errors.ParseState(message='missing paths')
+  for (path, state) in path_state.iteritems():
+    try:
+      action = state['action']
+    except KeyError:
+      raise gvn.errors.ParseState(message="missing action for '%s'" % (path,))
+    # base, snap, copyfrom_path/rev, and kind may be missing.
+    change_state.paths[path] = PathState(action,
+                                         state.get('base', base_revision),
+                                         state.get('snap', snap_revision),
+                                         state.get('copyfrom_path'),
+                                         state.get('copyfrom_rev',
+                                                   SVN_INVALID_REVNUM),
+                                         _ParseStateKind(state))
+  return change_state
+
+
+class _WCBranch(gvn.wc.Edit):
+  """Apply a change from a working copy to a change branch."""
+
+  def __init__(self, *args, **kwargs):
+    """Same as gvn.wc.Edit.__init__ plus source_revision.
+
+    Additional arguments:
+    source_revision -- revision of source branch copied to change branch
+                       (required keyword argument)
+    """
+
+    self.source_revision = kwargs.pop('source_revision')
+    gvn.wc.Edit.__init__(self, *args, **kwargs)
+
+  def _GetAction(self):
+    """gvn.wc.Edit._GetAction but treat conflicted as modified."""
+    try:
+      action = gvn.wc.Edit._GetAction(self)
+    except KeyError, e:
+      if e.args[0] != svn.wc.status_conflicted:
+        raise
+      action = self._Modified
+    return action
+
+  def _PostProcess(self, pool):
+    state = gvn.wc.ChangeState(None, self.wc.AbsolutePath(self.wc_path),
+                               None, self.status)
+    self.wc.UpdateChangeState(self.wc_path, state)
+
+  def _Modified(self, parent, path, editor, pool):
+    if (self.source_revision is not None
+        and self.status.entry.revision != self.source_revision):
+      self.wc.Notify(self.wc_path, svn.wc.notify_commit_modified,
+                     self.status.entry.kind, pool)
+      # If wc_path's base revision is not the same as the
+      # source_revision the overall change branch is based on, replace
+      # this node in the change branch with a copy from wc_path's base
+      # and base change on that.
+      if not self.status.entry.repos.startswith('http'):
+        # As long as we need the 2939 hack in _AddHelper, only for non-http.
+        editor.delete_entry(path, self.status.entry.revision, parent, pool)
+      return self._AddHelper(parent, path, editor,
+                             self.status.entry.url,      # copyfrom
+                             self.status.entry.revision, # copyfrom
+                             pool)
+    else:
+      return gvn.wc.Edit._Modified(self, parent, path, editor, pool)
+
+  # XXX workaround http://subversion.tigris.org/issues/show_bug.cgi?id=2939
+  def _AddHelper(self, parent, path, editor,
+                 copyfrom_url=None, copyfrom_rev=None, pool=None):
+    self.DiagLog('')
+    if self.status.entry.repos.startswith('http'):
+      args = (path, self.status.entry.revision, parent, pool)
+      gvn.DiagLog('delete_entry%s; ', args)
+      editor.delete_entry(*args)
+    result = gvn.wc.Edit._AddHelper(self, parent, path, editor,
+                                    copyfrom_url, copyfrom_rev, pool)
+    gvn.DiagLog('\n')
+    return result
+
+
+class PostfixTextDeltaManager(gvn.wc.PostfixTextDeltaManager):
+  def PostProcess(self, wc_path, status, checksum):
+    """Update gvn change state.
+
+    No wc bump, as this isn't a commit to the working copy's URL.
+    """
+
+    state = gvn.wc.ChangeState(None, self.wc.AbsolutePath(wc_path),
+                               checksum, status)
+    self.wc.UpdateChangeState(wc_path, state)
+
+  def HandleTmp(self, tmp):
+    """Unlink tmp file, rather than ignoring it.
+
+    For snapshots, we must remove this file, otherwise later submits try to
+    use it as the new text base, resulting in corruption.
+    """
+    os.unlink(tmp)
+
+
+# TODO(next-release): The new changebranch creation code fixes all these
+# TODOs about self.revision.
+class ChangeBranch(object):
+  """Representation of a change branch."""
+
+  def __init__(self, config, project, name, username=None, revision=None):
+    """Initialize ChangeBranch, which may or may not exist in the repo.
+
+    Arguments:
+    config   -- gvn.config.Config
+    project  -- gvn.project.Project
+    name     -- name of change branch
+    username -- username; defaults to config.username
+    revision -- revision number of existing change branch
+
+    """
+
+    gvn.util.ValidateChangeName(name)
+    self._name = name
+    self._config = config
+    self.project = project
+
+    if username is None:
+      self._username = self.project.repository.username
+    else:
+      self._username = username
+
+    if revision is None:
+      self._revision = revision
+    else:
+      # TODO(epg): We should fill this in on the fly.
+      (self._revision,) = self.project.repository.GetRevisions(start=revision)
+
+    self._description = None
+    self._revisions = []
+
+    # TODO(next-release): Remove; this is just to support old-style
+    # changebranches during the transition; see also _GetChangedPathsHelper
+    # and BranchPath methods.
+    self._branch_name = 'branch'
+
+  name = property(lambda self: self._name)
+  username = property(lambda self: self._username)
+  change_name_at_head = property(lambda self: '/'.join([self.username,
+                                                        self.name]))
+  change_name = property(lambda self: '@'.join(['/'.join([self.username,
+                                                          self.name]),
+                                                str(self.revision.number)]))
+
+  def BranchPath(self, path=None):
+    """Return root-relative path of (or within) the branch.
+
+    If path is None return the branch path itself, otherwise return the
+    root-relative path of path within the branch.
+    """
+    if path is None:
+      path = self._branch_name
+    else:
+      path = self._branch_name + '/' + path
+    return self.project.ChangeBranchPath(self.username, self.name, path)
+
+  def StatePath(self):
+    """Return root-relative path of the state file."""
+    return self.project.ChangeBranchPath(self.username, self.name, 'state')
+
+  def _GetRevision(self):
+    if self._revision is None:
+      dirent = self.project.repository.Stat(
+                            self.project.ChangeBranchPath(self.username,
+                                                           self.name),
+                            self.project.repository.GetHead())
+      self._revision = dirent.last_changed
+    return self._revision
+  revision = property(_GetRevision)
+
+  def _GetDescription(self):
+    if self._description is None:
+      try:
+        revision = self.revision
+      except gvn.errors.RepoPath:
+        return
+      self._description = revision['svn:log'].decode('utf-8')
+    return self._description
+  description = property(_GetDescription)
+
+  def _GetChangedPathsHelper(self):
+    revision = self.revision
+
+    cbp = self.project.ChangeBranchPath(self.username, self.name)
+
+    # TODO(epg): Er, isn't this entirely redundant?  Just use self.revision...
+    (revision,) = self.project.repository.GetRevisions([cbp],
+                                                        start=revision.number,
+                                                        end=1, limit=1)
+
+    changes = []
+    branch_name = None
+    for i in sorted(revision.paths, key=lambda x: x.path):
+      # When we start through here, we don't know the "top name"
+      # ('trunk' for '//trunk', 'projects' for '//public/projects').
+      if branch_name is None:
+        (dirname, basename) = posixpath.split(i.path.lstrip('/'))
+        if dirname == cbp:
+          # Found the branch; save branch_name, path, and copyfrom.
+          self._branch_name = branch_name = basename
+          cbp = self.project.ChangeBranchPath(self.username, self.name,
+                                               branch_name)
+          source = i.copyfrom_path
+          source_revision = i.copyfrom_revision
+          continue
+        # If we get here, we're looking at creation of directories
+        # above the changebranch; skip those.
+        continue
+      try:
+        relative_path = gvn.util.RelativePath(cbp, i.path.lstrip('/'))
+      except gvn.errors.PathNotChild:
+        # i is not in our changebranch
+        continue
+      changes.append(gvn.repository.ChangedPath(i.path, i.revision, i.action,
+                                            i.copyfrom_path, i.copyfrom_revision,
+                                            relative_path, source,
+                                            source_revision))
+    return (source, source_revision, changes)
+
+  def _GetChangedPaths(self):
+    # TODO(next-release): Preserving this crapitude for just one more...
+    try:
+      return self._GetChangedPathsHelper()[2]
+    except gvn.errors.RepoPath:
+      return []
+  changed_paths = property(_GetChangedPaths, doc="""List of ChangedPaths.""")
+
+  def _OldGetRepoState(self):
+    (source, source_revision, changes) = self._GetChangedPathsHelper()
+    state = State(source.lstrip('/'), source_revision)
+    for cp in changes:
+      relative_path = cp.relative_path.lstrip('/')
+      if cp.copyfrom_path is None:
+        copyfrom_path = None
+      else:
+        copyfrom_path = cp.copyfrom_path.lstrip('/')
+      state.paths[relative_path] = PathState(cp.action, cp.old_revision,
+                                             self.revision.number,
+                                             copyfrom_path,
+                                             cp.copyfrom_revision,
+                                             svn_node_unknown)
+    return state
+
+  def _NewGetRepoState(self, pool):
+    """Return State object parsed from repository state file.
+
+    Will replace GetRepoState in next release.
+
+    Raises:
+    SubversionException.apr_err in gvn.errors.SVN_NOENT
+    SubversionException.apr_err==???
+    gvn.errors.ParseState
+    """
+    sio = cStringIO.StringIO()
+    svn.ra.get_file(self.project.repository.ra,
+                    self.StatePath(), self.revision.number,
+                    sio, pool)
+    return ParseState(sio.getvalue(), self.revision.number)
+
+  def GetRepoState(self, pool):
+    """Return State object from old or new style changebranch.
+
+    First try _NewGetRepoState, but if the state file does not exist, try
+    _OldGetRepoState.
+
+    Will be replaced by _NewGetRepoState in next release.
+    """
+    try:
+      return self._NewGetRepoState(pool)
+    except svn.core.SubversionException, e:
+      if e.apr_err not in gvn.errors.SVN_NOENT:
+        raise
+      # No state path; try old-style changebranch.
+      return self._OldGetRepoState()
+
+  def ChangedPathsRelative(self):
+    """Return iterator returning relative paths of changed_paths."""
+    return (x.relative_path for x in self.changed_paths)
+
+  def Exists(self):
+    try:
+      # Contact the repository to find the last changed revision.
+      self.revision
+      return True
+    except gvn.errors.RepoPath:
+      # Changebranch does not exist.
+      return False
+
+  def _MissingDirectories(self, path, revision, pool):
+    """Return list of directories to make in order to mkdir path.
+
+    In an empty repository:
+      _MissingDirectories(foo/bar/baz)  => [foo, foo/bar, foo/bar/baz]
+    If foo/bar already exists:
+      _MissingDirectories(foo)          => []
+      _MissingDirectories(foo/bar)      => []
+      _MissingDirectories(foo/bar/baz)  => [foo/bar/baz]
+
+    Arguments:
+    path                -- path relative to repository root
+    revision            -- revision the transaction is based on
+    pool                -- memory pool
+    """
+    if path == '':
+      return []
+    kind = svn.ra.check_path(self.project.repository.ra, path, revision, pool)
+    if kind != svn_node_none:
+      return []
+    return (self._MissingDirectories(posixpath.dirname(path), revision, pool)
+            + [path])
+
+  def Branch(self, _wc, paths=None, description=None, pool=None):
+    """Replace (or create) change branch with paths.
+
+    paths may be either a dict mapping path to svn_wc_status2_t or a
+    list of paths.  If it is a list, .Branch will open the deepest
+    common path and discover the svn_wc_status2_t for each path
+    itself; if a dict, .Branch assumes the wc is already open.
+
+    Arguments:
+    paths       -- list of paths to change; defaults to .changed_paths
+    description -- svn:log for change; defaults to .description; default ''
+    pool        -- memory pool
+
+    """
+
+    if _wc.name == '':
+      raise gvn.errors.BranchFromRoot
+
+    # Save the current changebranched paths so we can remove local
+    # meta-data for paths being removed from the changebranch.
+    orig_paths = set(self.ChangedPathsRelative())
+
+    if paths is None:
+      paths = orig_paths
+
+    if not hasattr(paths, 'iterkeys'):
+      # Treat it as a list.
+      targets = paths
+      _wc.Open(targets)
+      paths = {}
+      def status_cb(target, status):
+        paths[_wc.RelativePath(target)] = status
+      _wc.Status(targets, recursive=False, get_all=False, no_ignore=False,
+                 show_children=False, callback=status_cb)
+
+    if description is None:
+      description = self.description
+      if description is None:
+        description = ''
+
+    head = self.project.repository.GetHead(pool)
+
+    changes_path = self.project.ChangeBranchPath()
+    user_path = self.project.ChangeBranchPath(self.username)
+    container_path = self.project.ChangeBranchPath(self.username, self.name)
+    branch_path = self.project.ChangeBranchPath(self.username, self.name,
+                                                 _wc.name)
+
+    mkdirs = self._MissingDirectories(container_path, head, pool)
+    delete_branch_path = (container_path not in mkdirs
+                          and svn.ra.check_path(self.project.repository.ra,
+                                                branch_path, head,
+                                                pool) != svn_node_none)
+    repo_paths = list(mkdirs)
+    repo_paths.append(branch_path)
+    for i in sorted(paths.iterkeys()):
+      repo_paths.append(self.project.ChangeBranchPath(self.username,
+                                                       self.name,
+                                                       '/'.join([_wc.name,
+                                                                 i])))
+
+    source_revision = _wc.Entry(pool=pool).revision
+
+    deltaman = PostfixTextDeltaManager(_wc)
+    def action(path, pool):
+      if path in mkdirs:
+        return gvn.commit.Mkdir(head)
+      elif path == branch_path:
+        return gvn.commit.CopyDir(_wc.URL(), source_revision,
+                                  replace=delete_branch_path,
+                                  replace_revision=head)
+      wc_path = path[len(branch_path)+1:]
+      try:
+        if _wc.change_state[wc_path].change_name != self.name:
+          # ye olde stupid generic exception; we gotta fix that
+          raise gvn.errors.ChangeBranch('%s already in %s' % (wc_path,
+                                        _wc.change_state[wc_path].change_name))
+      except KeyError:
+        # not yet changebranched at all
+        pass
+      return _WCBranch(_wc, wc_path, paths[wc_path],
+                       source_revision=source_revision,
+                       file_baton_cb=deltaman.Add)
+    try:
+      try:
+        commit_info = gvn.commit.Drive(self.project.repository,
+                                       {'svn:log': description.encode('utf-8')},
+                                       head, repo_paths, action,
+                                       deltaman.Transmit)
+      except svn.core.SubversionException, e:
+        # Try to report decent error message for out of date parent.
+        if e.apr_err == svn.core.SVN_ERR_FS_NOT_DIRECTORY:
+          # local and svn use this
+          m = re.match(r"Path '(.*)' not present$", e.args[0])
+          if m is None:
+            # Not a case we know about; let it out.
+            raise
+          parent = m.group(1)[len(branch_path)+1:]
+          raise gvn.errors.OutOfDateParent(e, _wc.subpath, source_revision,
+                                           parent)
+        elif e.apr_err == svn.core.SVN_ERR_FS_NOT_FOUND:
+          # neon (TODO(epg): and serf?) use this
+          m = re.match(r"File not found: transaction '[^']+', path '(.*)'$",
+                       e.args[0])
+          if m is None:
+            # Not a case we know about; let it out.
+            raise
+          # TODO(epg): neon has leading /; does serf?
+          parent = posixpath.dirname(m.group(1)[len(branch_path)+2:])
+          raise gvn.errors.OutOfDateParent(e, _wc.subpath, source_revision,
+                                           parent)
+        # Not a case we know about; let it out.
+        raise
+    except:
+      _wc.ClearPendingChangeState()
+      raise
+
+    # Update local change state.
+    for path in orig_paths.difference(paths):
+      # User removed path from changebranch.
+      _wc.DeleteChangeState(path)
+    for i in paths:
+      _wc.UpdateChangeState(i, change_name=self.name)
+    _wc.Save()
+    _wc.Close()
+
+    self._description = description
+    # TODO(epg): We just created this revision, so we can construct
+    # the Revision object without having to svn log.
+    (self._revision,) = self.project.repository.GetRevisions(start=commit_info.revision)
+
+    if callable(_wc.notify_func):
+      _wc.notify_func('Changed %s.\n' % (self.change_name,))
+      if commit_info.post_commit_err is not None:
+        _wc.notify_func('\nWarning: %s\n' % (commit_info.post_commit_err,))
+
+  def Delete(self, _wc, description=None, pool=None):
+    """Delete, leaving self invalid and paths associated with no change.
+
+    Arguments:
+    description -- svn:log for deletion; defaults to .description or ''
+    pool        -- memory pool
+
+    """
+
+    # Save this for updating _wc.change_state .
+    paths = list(self.ChangedPathsRelative())
+
+    if description is None:
+      description = 'Delete.\n'
+
+    head = self.project.repository.GetHead(pool)
+    container_path = self.project.ChangeBranchPath(self.username, self.name)
+
+    def action(path, pool):
+      return gvn.commit.Delete(self.revision.number)
+    try:
+      commit_info = gvn.commit.Drive(self.project.repository,
+                                     {'svn:log': description.encode('utf-8')},
+                                     head, [container_path], action, pool=pool)
+    except:
+      _wc.ClearPendingChangeState()
+      raise
+
+    # Update local change state.
+    for i in paths:
+      _wc.DeleteChangeState(i)
+    _wc.Save()
+
+    self._revision = self._description = None
+
+    if callable(_wc.notify_func):
+      _wc.notify_func('Deleted %s@%d.\n' % (self.change_name_at_head,
+                                            commit_info.revision))
+      if commit_info.post_commit_err is not None:
+        _wc.notify_func('\nWarning: %s\n' % (commit_info.post_commit_err,))
+
+  def Submit(self, _wc, description=None, revprops=None, pool=None):
+    """Submit (and remove) change branch.
+
+    Arguments:
+    description -- svn:log for submit; defaults to .description or ''
+    revprops    -- mapping of revprop names to values (default None)
+    pool        -- memory pool
+
+    """
+
+    if description is None:
+      description = self.description.encode('utf-8')
+      if description is None:
+        description = ''
+
+    if revprops is None:
+      revprops = {}
+    revprops['svn:log'] = description
+    revprops['gvn:change'] = self.change_name
+
+    head = self.project.repository.GetHead(pool)
+
+    if callable(_wc.notify_func):
+      _wc.notify_func('Submitting %s\n' % (self.change_name,), pool)
+
+    container_path = self.project.ChangeBranchPath(self.username, self.name)
+
+    paths = []
+    repo_paths = [container_path]
+    for i in self.changed_paths:
+      paths.append(i.relative_path)
+      repo_paths.append(_wc.RepoPath(i.relative_path))
+
+    _wc.Open(paths, write_lock=True)
+
+    path_status = {}
+    def status_cb(target, status):
+      wc_path = _wc.RelativePath(target)
+      # TODO(epg): Need to check for other kinds of status as well.
+      # For example, if i have a changebranch in which i add new/ and
+      # new/file and someone else commits new/ i can't submit (out of
+      # date).  If i update and snapshot i'll be fine, but if i update
+      # and forget to snapshot, status here is svn_wc_status_none for
+      # new/ (because it's an Add on cb but unmodified in wc) which
+      # causes _GetAction to have a KeyError.
+      if gvn.wc.IsConflicted(status):
+        raise gvn.errors.Conflict(wc_path)
+      path_status[wc_path] = status
+    _wc.Status(paths, recursive=False, get_all=False, no_ignore=False,
+               show_children=False, callback=status_cb)
+
+    deltaman = gvn.wc.PostfixTextDeltaManager(_wc)
+    def action(path, editpool):
+      if path == container_path:
+        return gvn.commit.Delete(self.revision.number)
+      wc_path = _wc.LocalPath(path)
+      return gvn.wc.Edit(_wc, wc_path, path_status[wc_path],
+                         file_baton_cb=deltaman.Add)
+    try:
+      commit_info = gvn.commit.Drive(self.project.repository,
+                                     revprops,
+                                     head, repo_paths, action,
+                                     deltaman.Transmit, pool)
+    except:
+      _wc.ClearPendingChangeState()
+      raise
+
+    # TODO(epg): Any of these may fail independently: MarkSubmitted,
+    # .gvnstate updating, wc bump, and anything else we add here.  But
+    # we want to try them all, anyway, but let the callers see all
+    # exceptions.  Collin agreed that a composite exception class,
+    # containing a list of exception objects for caller to handle, is
+    # a sane way to cover this.  For now, ignore MarkSubmitted errors.
+    # Eventually, the post-commit processor will add missing
+    # gvn:submitted properties.
+    try:
+      self.MarkSubmitted(commit_info.revision, pool)
+    except:
+      #import traceback; traceback.print_exc()
+      pass
+
+    # Update local change state.
+    for i in paths:
+      _wc.DeleteChangeState(i)
+    _wc.Save()
+
+    if callable(_wc.notify_func):
+      _wc.notify_func('Submitted r%d.\n' % (commit_info.revision,), pool)
+      if commit_info.post_commit_err is not None:
+        _wc.notify_func('\nWarning: %s\n' % (commit_info.post_commit_err,))
+
+    # TODO(epg): Huh?  Why are we clobbering these?
+    self._revision = self._description = None
+
+    _wc.ProcessCommittedQueue(commit_info, pool)
+    _wc.Close()
+
+    return commit_info.revision
+
+  def Approve(self, pool):
+    """Mark this change branch approved."""
+
+    propname = ':'.join(['gvn:approve', self.project.repository.username])
+    svn.ra.change_rev_prop(self.project.repository.ra,
+                           self.revision.number, propname, '',
+                           pool)
+
+  def MarkSubmitted(self, revision, pool):
+    """Mark this changebranch submitted in revision."""
+    svn.ra.change_rev_prop(self.project.repository.ra,
+                           self.revision.number, 'gvn:submitted',
+                           str(revision), pool)
+
+def ListChangeBranches(project, dirent_fields=0, user=None, pool=None):
+  """Return a generator yielding (change_name, svn_dirent_t) tuples.
+
+  Arguments:
+  project               -- gvn.project.Project
+  dirent_fields         -- see svn_ra_get_dir2; use SVN_DIRENT_CREATED_REV
+                           and SVN_DIRENT_KIND even if not specified here
+  user                  -- user whose changebranches to yield;
+                           yield all if None (the default)
+  pool                  -- memory pool
+
+  Raises:
+  gvn.errors.NoChangeBranchBase
+  gvn.errors.NoUserChangeBranch
+  svn.core.SubversionException
+  """
+  if user is None:
+    try:
+      (dirents, revision, properties) = svn.ra.get_dir2(project.repository.ra,
+                                                    project.ChangeBranchPath(),
+                                                        SVN_INVALID_REVNUM,
+                                                        SVN_DIRENT_KIND,
+                                                        pool)
+    except svn.core.SubversionException, e:
+      if e.apr_err in [svn.core.SVN_ERR_FS_NOT_FOUND,
+                       svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND]:
+        raise gvn.errors.NoChangeBranchBase(project.ChangeBranchPath())
+      raise
+    users = (name for (name, dirent) in dirents.iteritems()
+             if dirent.kind == svn_node_dir)
+  else:
+    users = [user]
+
+  # No matter what the caller wanted, we need at least these two.
+  dirent_fields |= SVN_DIRENT_CREATED_REV
+  dirent_fields |= SVN_DIRENT_KIND
+
+  iterpool = svn.core.Pool(pool)
+  for user in users:
+    iterpool.clear()
+    try:
+      (dirents, revision, properties) = svn.ra.get_dir2(
+                                        project.repository.ra,
+                                        project.ChangeBranchPath(user),
+                                        SVN_INVALID_REVNUM,
+                                        dirent_fields, pool)
+    except svn.core.SubversionException, e:
+      if e.apr_err in [svn.core.SVN_ERR_FS_NOT_FOUND,
+                       svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND]:
+        raise gvn.errors.NoUserChangeBranch(project.ChangeBranchPath(user))
+      raise
+    for (name, dirent) in dirents.iteritems():
+      if dirent.kind == svn_node_dir:
+        yield ('%s/%s@%d' % (user, name, dirent.created_rev), dirent)
+  iterpool.destroy()
+
+def UniqueChangeName(project, username, base, pool):
+  """Return a unique change name starting with base.
+
+  If base is available, just return that, otherwise try appending
+  numbers to it until finding a unique name.
+
+  """
+
+  try:
+    changebranches = [change_name for (change_name, dirent) in
+                      ListChangeBranches(project, user=username, pool=pool)]
+  except gvn.errors.NoUserChangeBranch:
+    return base
+  if base not in changebranches:
+    return base
+
+  i = 1
+  while True:
+    name = base + str(i)
+    if name not in changebranches:
+      return name
+    i += 1
+
+def RandomBranchName(length=4):
+  """Return a random branchname of length 'length'.
+
+  Branchnames are only lowercase characters and numbers, excluding:
+     lower case 'o' and the number 0 so as not to be confused
+     lower case 'l' and the number 1 so as not to be confused
+
+  Letters and numbers alternate beginning with a letter.
+
+  Examples of a 4 character branchname:  'a2i3', 'w2b8', 'd5j4'.
+
+  Uses of names returned by this method should be wrapped in
+  UniqueChangeName() to ensure uniqueness.
+
+  """
+
+  branchname = []
+  for i in xrange(0, length):
+    if i % 2 == 0:
+      branchname.append(random.choice('abcdefghijkmnpqrstuvwxyz'))
+    else:
+      branchname.append(random.choice('23456789'))
+
+  return ''.join(branchname)
+
+###########################################################################
+# changebranch diffing
+
+# first, the callbacks
+
+class DiffCallbacks(object):
+  """Base class for callbacks called by Diff functions."""
+  cb = None
+  repo_state = None
+
+  def SetChangeState(self, cb, state):
+    """Called with ChangeBranch and State objects before any other callback.
+
+    Callback implementors should not need to implement this one, as this
+    implementation saves them as the cb and state attributes.
+    """
+    self.cb = cb
+    self.repo_state = state
+
+  def Directory(self, path,
+                left_properties, right_properties, pool):
+    """Called for added or changed directories.
+
+    Arguments:
+    path                -- unicode path relative to the base path
+    left_properties     -- properties for the left side
+    right_properties    -- properties for the right side
+    pool                -- scratch pool
+    """
+    pass
+
+  def File(self, path, left_path, right_path,
+           left_properties, right_properties, pool):
+    """Called for added or changed files.
+
+    Implementations may freely modify the two tmp files, but may not
+    delete them.
+
+    Arguments:
+    path                -- unicode path relative to the base path
+    left_path           -- name of local temporary file for the left side
+    right_path          -- name of local temporary file for the right side
+    left_properties     -- properties for the left side
+    right_properties    -- properties for the right side
+    pool                -- scratch pool
+    """
+    pass
+
+  def Abort(self, pool):
+    """Called if something goes wrong before finishing the diff.
+
+    Arguments:
+    pool                -- scratch pool
+    """
+    pass
+
+  def Finish(self, pool):
+    """Called when the diff is finished.
+
+    Arguments:
+    pool                -- scratch pool
+
+    Returns:
+    Whatever the implementor wants (this implementation returns None); Diff
+    functions will return this.
+    """
+    return None
+
+
+class SvnDiffCallbacks(DiffCallbacks):
+  """DiffCallbacks implementation using svn's internal textual diff."""
+
+  def __init__(self, options, out_file, encoding, pool, command=None):
+    """Initialize from list, file-like, str, svn.core.Pool.
+
+    It does not make much sense for a caller to pass anything but
+    sys.stdout for out_file when specify a custom command, unless it
+    really wants the property diffs and 'Index' headings to go to a
+    different output stream from the diff command output.
+
+    Arguments:
+    options     -- list of option strings for svn.diff.file_options_parse
+    out_file    -- file-like object to which to write the diff
+    encoding    -- user's encoding
+    pool        -- scratch memory pool; may be cleared after instantiation
+    command     -- optional custom diff; passed to subprocess.call (with shell)
+    """
+    self.options = svn.diff.file_options_create()
+    svn.diff.file_options_parse(self.options, options, pool)
+    self.fp = out_file
+    self.encoding = encoding
+    self.command = command
+    self.result = 0
+
+  def _DiffProps(self, path, left_properties, right_properties, pool):
+    prop_diffs = svn.core.svn_prop_diffs(right_properties, left_properties,
+                                         pool)
+    if len(prop_diffs) == 0:
+      return
+    self.fp.write('\n'
+                  'Property changes on: %s\n'
+                  '___________________________________________________________________\n'
+                  % (path.encode(self.encoding),))
+    for (name, value) in prop_diffs.iteritems():
+      original = left_properties.get(name)
+      # TODO(glasser): Use svn.diff.mem_string stuff here.
+      if original is None:
+        self.fp.write('Added: %s\n'
+                      '   + %s\n'
+                      % (name.encode(self.encoding), value))
+      elif value is None:
+        self.fp.write('Deleted: %s\n'
+                      '   - %s\n'
+                      % (name.encode(self.encoding), original))
+      else:
+        self.fp.write('Modified: %s\n'
+                      '   - %s\n'
+                      '   + %s\n'
+                      % (name.encode(self.encoding),
+                         original, value))
+
+  def Directory(self, path,
+                left_properties, right_properties,
+                left_header, right_header, pool):
+    # TODO(epg): Support --no-diff-deleted
+    no_diff_deleted = True
+    if (no_diff_deleted
+        and self.repo_state is not None
+        and self.repo_state.paths[path].action == 'D'):
+      return
+    self._DiffProps(path, left_properties, right_properties, pool)
+
+  def File(self, path, left_path, right_path,
+           left_properties, right_properties,
+           left_header, right_header, pool):
+    # TODO(epg): Support --no-diff-deleted
+    no_diff_deleted = True
+    if (no_diff_deleted
+        and self.repo_state is not None
+        and self.repo_state.paths[path].action == 'D'):
+      maybe_deleted = ' (deleted)'
+    else:
+      maybe_deleted = ''
+    self.fp.write('Index: %s%s\n'
+                  '===================================================================\n'
+                  % (path.encode(self.encoding), maybe_deleted))
+    if (no_diff_deleted
+        and self.repo_state is not None
+        and self.repo_state.paths[path].action == 'D'):
+      return
+
+    if self.command is None:
+      diff = svn.diff.file_diff_2(left_path, right_path,
+                                  self.options, pool)
+      relative_to_dir = None
+      svn.diff.file_output_unified3(self.fp, diff,
+                                    left_path, right_path,
+                                    left_header, right_header,
+                                    self.encoding, relative_to_dir,
+                                    self.options.show_c_function, pool)
+      # TODO(epg): Would be nice if svn.diff could tell us if anything
+      # changed so we could set self.result here.
+      #self.result = ...
+    else:
+      cmd = '%s %s %s' % (self.command, left_path, right_path)
+      gvn.DiagLog('SvnDiffCallbacks.File => subprocess.call(%s, shell=True)\n',
+                  cmd)
+      self.fp.flush()
+      result = subprocess.call(cmd, shell=True)
+      if result != 0:
+        self.result = result
+
+    self._DiffProps(path, left_properties, right_properties, pool)
+
+  def Finish(self, pool):
+    return self.result
+
+
+def _MaybeWriteProperties(properties, filename):
+  """Serialize properties into the file filename, if any properties.
+
+  Helper for TkDiffCallbacks.
+  """
+  if len(properties) == 0:
+    return
+  fp = open(filename, mode='ab')
+  fp.write('\n\nProperties:\n')
+  for (name, value) in sorted(properties.iteritems(), key=lambda x: x[0]):
+    fp.write('%s: %s\n' % (name, value))
+  fp.close()
+
+class TkDiffCallbacks(DiffCallbacks):
+  """DiffCallbacks implementation using tkdiff."""
+
+  def __init__(self, tkdiff, encoding):
+    """Initialize from str, str.
+
+    Arguments:
+    tkdiff      -- tkdiff command; passed to subprocess.call (without shell)
+    encoding    -- user's encoding
+    """
+    self.tkdiff = tkdiff
+    self.encoding = encoding
+    self.pairs = []
+
+  def Directory(self, path,
+                left_properties, right_properties,
+                left_header, right_header, pool):
+    # TODO(epg): _MaybeWriteProperties in some parallel tree (so names match
+    # up) for directory prop diffs.
+    return
+
+  def File(self, path, left_path, right_path,
+           left_properties, right_properties,
+           left_header, right_header, pool):
+    self.pairs.append((left_path, right_path))
+    if len(svn.core.svn_prop_diffs(right_properties, left_properties,
+                                   pool)) == 0:
+      return
+    _MaybeWriteProperties(left_properties, left_path)
+    _MaybeWriteProperties(right_properties, right_path)
+
+  def Finish(self, pool):
+    if len(self.pairs) == 0:
+      # Nothing actually changed, so don't run tkdiff.
+      return 0
+    argv = [self.tkdiff]
+    for (left, right) in self.pairs:
+      argv.append(':')
+      argv.append(left)
+      argv.append(right)
+    return subprocess.call(argv)
+
+#################################################################
+# now, the Diff functions
+
+# first, DiffBaseToSnapshot
+
+def _FindLeft(path, base_revision, repo_state, copied_directory_stack):
+  """Return (root-relative path, revision, header) for the left side of a diff.
+
+  In the simple, common case this is just
+
+    (repo_state.base_path + path, base_revision)
+
+  But if some parent directory of path was copied
+  (i.e. copied_directory_stack is not empty), we need to find the
+  left side under the path from which that copy was made.
+
+  The returned header is suitable for to svn.diff.file_output_unified3 and
+  similar functions.
+
+  Arguments:
+  path                          -- base-relative path
+  base_revision                 -- what the caller thinks the base revision
+                                   of path is
+  repo_state                    -- State object
+  copied_directory_stack        -- list of copied directories
+                                   (see DiffBaseToSnapshot)
+  """
+  if len(copied_directory_stack) > 0:
+    # Some parent directory was copied; find left side in there.
+    (copied_dir, copyfrom_path, copyfrom_revision) = copied_directory_stack[-1]
+    path_in_copied_dir = path[len(copied_dir) + 1:]
+    left_path = '/'.join([copyfrom_path, path_in_copied_dir])
+    left_revision = copyfrom_revision
+  else:
+    # Not under copied directory; use same path for left side.
+    left_path = '/'.join([repo_state.base_path, path])
+    left_revision = base_revision
+  # Now make the header.
+  if repo_state.paths[path].action in _ADD_CODES:
+    left_header = path + '\t(added)'
+  else:
+    left_header = ('%s\t(^/%s@%d)'
+                   % (left_path[len(repo_state.base_path) + 1:],
+                      left_path, left_revision))
+  return (left_path, left_revision, left_header)
+
+
+def _RaGetDirOrFile(ra, path, revision, fp, dirent_fields, pool,
+                    kind=svn_node_unknown):
+  """Call get_file or get_dir for DiffBaseToSnapshot.
+
+  This is for old-style changebranches, where we don't know whether path
+  is a directory or file.  Arguments are a combination of svn.ra.get_file
+  and svn.ra.get_dir2 arguments, plus the optional kind, which we know
+  with new-style changebranches.
+  """
+  if kind == svn_node_file:
+    result = svn.ra.get_file(ra, path, revision, fp, pool)
+    return (kind, result)
+
+  if kind == svn_node_dir:
+    result = svn.ra.get_dir2(ra, path, revision, dirent_fields, pool)
+    return (kind, result)
+
+  gvn.DiagLog('_RaGetDirOrFile(%s@%d) => try get_file => ', path, revision)
+  try:
+    result = svn.ra.get_file(ra, path, revision, fp, pool)
+    kind = svn_node_file
+    gvn.DiagLog('is file')
+  except svn.core.SubversionException, e:
+    if e.apr_err not in [svn.core.SVN_ERR_FS_NOT_FILE,
+                         # ra-neon screws us again; don't even ask
+                         # about serf, which just wrote some HTML 301
+                         # foo into fp!  see svn issue 3206
+                         svn.core.SVN_ERR_RA_DAV_RELOCATED]:
+      raise
+    gvn.DiagLog('get_dir2 => ')
+    result = svn.ra.get_dir2(ra, path, revision, dirent_fields, pool)
+    kind = svn_node_dir
+    gvn.DiagLog('is dir')
+  gvn.DiagLog('\n')
+  return (kind, result)
+
+
+def DiffBaseToSnapshot(cb, callbacks, encoding, pool):
+  """Diff the base of a snapshot to the snapshot itself.
+
+  This is 'gvn review'; it shows the change being proposed.
+
+  Arguments:
+  cb            -- ChangeBranch object
+  callbacks     -- DiffCallbacks implementation
+  encoding      -- user's encoding
+  pool          -- memory pool
+
+  Returns:
+  the return value of callbacks.Finish
+  """
+  copied_directory_stack = []
+  tree = gvn.util.TmpTree()
+  try:
+    try:
+      change_state = cb.GetRepoState(pool)
+      callbacks.SetChangeState(cb, change_state)
+      iterpool = svn.core.Pool(pool)
+      for (path, state) in sorted(change_state.paths.iteritems(),
+                                  key=lambda x: x[0]):
+        iterpool.clear()
+        # See if we just came out from under a copied directory.
+        if len(copied_directory_stack) > 0:
+          last_copied_dir = copied_directory_stack[-1][0]
+          if not gvn.util.IsChild(path, last_copied_dir):
+            # Yep, so pop it off the stack.
+            copied_directory_stack.pop()
+
+        kind = state.kind
+        local_path = svn_path_local_style(path, iterpool).decode('utf8')
+        local_path_encoded = local_path.encode(encoding)
+
+        # Get left path/revision/header now, as we need them in a few
+        # different blocks below.
+        (left_path, left_revision, left_header) = _FindLeft(
+          path, state.base_revision, change_state, copied_directory_stack)
+
+        # This is more complicated than it could be in order to support
+        # old-style changebranches (no repo state file), where we don't
+        # know what kind of path we have.
+
+        #################################################################
+        # left side
+
+        if kind != svn_node_dir:
+          # Create tmp file, possibly incorrectly for old-style.
+          fp = tree.MkFile(['left', local_path_encoded])
+        else:
+          fp = None
+
+        if state.action in _ADD_CODES:
+          if state.copyfrom_path is None:
+            # This is an added dir or file, so no left-side properties.
+            properties = {}
+            if kind == svn_node_file and state.action == 'R':
+              # This is a Replaced file that isn't copied; svn diffs
+              # these as Modified files, so let's do the same.
+              # TODO(next-release): Note that for old-style changebranches
+              # we don't yet know kind, so we never get here.  That's OK,
+              # this can be a teensy bit broken until we kick the old
+              # style to the curb.  Don't forget to adjust testDiff in
+              # test_changebranch.py
+              svn.ra.get_file(cb.project.repository.ra,
+                              left_path, left_revision,
+                              fp, iterpool)
+          else:
+            # This is a copied dir or file, so get properties of copyfrom
+            # for the left side.
+            (kind, result) = _RaGetDirOrFile(cb.project.repository.ra,
+                                             state.copyfrom_path.encode('utf8'),
+                                             state.copyfrom_revision,
+                                             fp, dirent_fields=0, kind=kind,
+                                             pool=iterpool)
+            # The properties come last in the result for both dir and file.
+            properties = result[-1]
+            # And push it onto the stack.
+            copied_directory_stack.append((path, state.copyfrom_path,
+                                           state.copyfrom_revision))
+
+        else:
+          # Modified or Deleted
+          # TODO: Don't fetch if no_diff_deleted.
+          (kind, result) = _RaGetDirOrFile(cb.project.repository.ra,
+                                           left_path, left_revision,
+                                           fp, dirent_fields=0, kind=kind,
+                                           pool=iterpool)
+          if kind == svn_node_file:
+            (unused_rev, properties) = result
+          else:
+            (unused_dirents, unused_rev, properties) = result
+        # Note whether we made a left tmp file; further down, we'll
+        # delete bogus left and right files for directories in
+        # old-style changebranches.
+        if fp is None:
+          made_left = False
+        else:
+          made_left = True
+          fp.close()
+        left_properties = FilterRegularProps(properties)
+
+        #################################################################
+        # right side
+
+        # You might think we definitely know the kind of this path by now
+        # even in the old style, but if this is an Add, we didn't do
+        # anything for the left side, so we still don't know.
+
+        branch_path = cb.BranchPath(path)
+        if kind != svn_node_dir:
+          # Create tmp file, possibly incorrectly for old-style.
+          fp = tree.MkFile(['right', local_path_encoded])
+        else:
+          fp = None
+        if state.action == 'D':
+          properties = {}
+        else:
+          # Added or Modified
+          branch_path_utf8 = branch_path.encode('utf8')
+          (kind, result) = _RaGetDirOrFile(cb.project.repository.ra,
+                                           branch_path_utf8, state.snap_revision,
+                                           fp, dirent_fields=0, kind=kind,
+                                           pool=iterpool)
+          if kind == svn_node_file:
+            (unused_rev, properties) = result
+          else:
+            (unused_dirents, unused_rev, properties) = result
+        # Note whether we made a bogus tmp file; further down, we'll
+        # delete bogus left and right files for directories in
+        # old-style changebranches.
+        if fp is None:
+          made_right = False
+        else:
+          made_right = True
+          fp.close()
+        right_properties = FilterRegularProps(properties)
+
+        # And here it is: if this was a directory, delete any files we
+        # created incorrectly.
+        if kind == svn_node_dir:
+          if made_left:
+            tree.RmFile(['left', local_path_encoded])
+          if made_right:
+            tree.RmFile(['right', local_path_encoded])
+
+        #################################################################
+        # Now diff the left and right sides.
+
+        right_header = ('%s\t(^/%s@%d)'
+                        % (path, branch_path,
+                           state.snap_revision))
+        if kind == svn_node_file:
+          callbacks.File(path,
+                         tree.Path(['left', local_path_encoded]),
+                         tree.Path(['right', local_path_encoded]),
+                         left_properties, right_properties,
+                         left_header, right_header,
+                         iterpool)
+        else:
+          callbacks.Directory(path,
+                              left_properties, right_properties,
+                              left_header, right_header, iterpool)
+      iterpool.destroy()
+    except:
+      callbacks.Abort(pool)
+      raise
+    else:
+      return callbacks.Finish(pool)
+  finally:
+    tree.Close()
+
+
+def DiffSnapshotToWC(cb, wc, callbacks, encoding, pool):
+  """Diff a snapshot to the working copy.
+
+  This is used to show what the user has changed since last snapshot.
+
+  Arguments:
+  cb            -- ChangeBranch object
+  callbacks     -- DiffCallbacks implementation
+  encoding      -- user's encoding
+  pool          -- memory pool
+
+  Returns:
+  the return value of callbacks.Finish
+  """
+  # TODO(next-release): This is easier in upcoming .gvnstate format.
+  paths = dict((path, state) for (path, state) in wc.change_state.iteritems()
+               if state.change_name == cb.name)
+  wc.Open(paths)
+  # TODO(next-release): Remove; this is only here to support old-style
+  # changebranches; we need it to find the branch name (for BranchPath),
+  # and we need the repo path state later on (since we won't have the snap
+  # revision until the next release).
+  change_state = cb.GetRepoState(pool)
+  tree = gvn.util.TmpTree()
+  try:
+    try:
+      # Shouldn't status callback receive a pool, like most callbacks?
+      # Fine, we'll handle it ourselves.
+      status_pool = svn.core.Pool(pool)
+      def status_cb(target, status):
+        status_pool.clear()
+        path = wc.RelativePath(target)
+        if not wc.NeedsSnapshot(path, status, status_pool):
+          return
+
+        branch_path = cb.BranchPath(path)
+        branch_path_utf8 = branch_path.encode('utf8')
+        # TODO(next-release): Get from wc state instead.
+        snap_revision = change_state.paths[path].snap_revision
+
+        left_header = ('%s\t(^/%s@%d)' % (path, branch_path,
+                                          snap_revision))
+        right_header = path + '\t(working copy)'
+
+        if status.entry.kind == svn_node_dir:
+          (unused_dirents, unused_rev,
+           properties) = svn.ra.get_dir2(cb.project.repository.ra,
+                                         branch_path_utf8,
+                                         snap_revision,
+                                         0, # dirent_fields
+                                         status_pool)
+          properties = FilterRegularProps(properties)
+          (local_prop_diffs, local_props) = wc.GetPropDiffs(target, status_pool)
+          gvn.util.ApplyPropDiffs(local_props, local_prop_diffs)
+          callbacks.Directory(path, properties, local_props,
+                              left_header, right_header, status_pool)
+        else:
+          local_path = svn_path_local_style(path, status_pool).decode('utf8')
+          local_path_encoded = local_path.encode(encoding)
+
+          fp = tree.MkFile(['left', local_path_encoded])
+          (unused_rev, properties) = svn.ra.get_file(
+            cb.project.repository.ra, branch_path_utf8,
+            snap_revision, fp, status_pool)
+          fp.close()
+          properties = FilterRegularProps(properties)
+          right_path = svn.wc.translated_file2(target.encode('utf8'),
+                                               target.encode('utf8'),
+                                               wc.AdmRetrieve(target,
+                                                              status_pool),
+                                               svn.wc.TRANSLATE_FROM_NF,
+                                               pool)
+          (local_prop_diffs, local_props) = wc.GetPropDiffs(target, status_pool)
+          gvn.util.ApplyPropDiffs(local_props, local_prop_diffs)
+          callbacks.File(path,
+                         tree.Path(['left', local_path_encoded]), right_path,
+                         properties, local_props,
+                         left_header, right_header,
+                         status_pool)
+      wc.Status(paths.keys(),
+                recursive=False, get_all=False, no_ignore=False,
+                show_children=False, callback=status_cb)
+      status_pool.destroy()
+      wc.Close()
+    except:
+      callbacks.Abort(pool)
+      raise
+    else:
+      return callbacks.Finish(pool)
+  finally:
+    tree.Close()
+
+
+def DiffSnapshots():
+  pass
+  # Similar to DiffFromChange; the only way we can do this is by
+  # fetching every file and comparing them.  In the common case, a
+  # URL-URL diff would work, but when it doesn't, there's nothing we
+  # can do in the report that will give us the behavior we need.
+
+  # The way it would work in the common case is say:
+  #    want diff between branch@20 -> branch@30
+  #    report change state, telling it *what versions of branch@20 we have*
+  # receive deltas telling us how to turn that into branch@30
+
+  # That's no good:
+
+  # r20
+  # A /changes/epg/foo/branch from trunk@17
+  # M /changes/epg/foo/branch/a
+  # R /changes/epg/foo/branch/b from trunk@19
+  # =>
+  # epg/foo@20
+  # M a
+  # M b
+
+  # r30
+  # R /changes/epg/foo/branch from trunk@28
+  # M /changes/epg/foo/branch/a
+  # M /changes/epg/foo/branch/b
+  # =>
+  # epg/foo@30
+  # M a
+  # M b
+
+  # But there are files other than a and b!  And they changed between
+  # 17 and 28.  So, for example 'c': we'll receive a delta turning
+  # trunk/c@17 into trunk/c@28 which is no good at all.  In large
+  # trees, we could receive quite a lot of crap we don't want.
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/cmdline.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/cmdline.py
new file mode 100644
index 0000000..0a9b917
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/cmdline.py
@@ -0,0 +1,1101 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Facilities for processing a command-line and dispatching subcommands
+
+Classes:
+Option        -- representation of an option
+OptionParser  -- customized optparse.OptionParser
+Command       -- representation of a subcommand
+TextDeltaProgressBase   -- base class for text delta progress reporters
+TextDeltaProgressDots   -- prints dots, like svn
+TextDeltaProgressMeter  -- shows a progress meter
+
+Variables:
+SvnOptions    -- map of option names to Option objects for svn
+GvnOptions    -- map of option names to Option objects for gvn
+CommandNames  -- list of command names (no aliases)
+NameToCommand -- map of command names (including aliases) to Command objects
+
+Functions:
+FindProject   -- Find which project to use.
+AuthOptions   -- Return the standard auth options.
+LogOptions    -- Return the standard log options.
+AddCommand    -- Add a new Command to the global list.
+RunSvnCommand -- Run an svn command.
+Run           -- Run a subcommand.
+Notify        -- notify callback
+main          -- main function
+
+Call AddCommand with a function implementing a subcommand and some
+metadata about it (see the Command class's documentation for details)
+for each subcommand.  Call main with sys.argv; it uses OptionParser to
+parse the command-line arguments and Run to dispatch the subcommands.
+
+Run passes a Config object, an optparse.Values object, a list of
+subcommand name + operands, and a svn.core.Pool.  Use the
+optparse.Values object to test for options.  The operands are the
+non-option arguments from the command line, without interpretation.
+
+"""
+
+
+import codecs
+import optparse
+import os
+import sys
+import time
+import traceback
+
+from errno import EPIPE
+
+import svn.core
+import svn.wc
+
+from svn.core import svn_node_file
+from svn.core import svn_path_internal_style, svn_path_local_style
+
+import gvn.config
+import gvn.errors
+import gvn.platform
+import gvn.project
+import gvn.svncmdline
+import gvn.svnauth
+import gvn.svncmd
+import gvn.util
+import gvn.wc
+
+
+class Option(object):
+  def __init__(self, short_option, has_argument, help):
+    self.short = short_option
+    self.has_argument = has_argument
+    self.help = help
+
+SvnOptions = dict((k, Option(*v))
+                  for k,v in gvn.svncmdline.Options.iteritems())
+# Get specific about svn options that gvn subcommands take.
+SvnOptions.update({
+  # options all subcommands accept
+  'config-dir': Option(None, True,
+                       'read user configuration files from directory ARG'),
+  'force': Option(None, False, 'force operation to run'),
+  'non-interactive': Option(None, False, 'do no interactive prompting'),
+  'quiet': Option('q', False, 'print nothing, or only summary information'),
+  'verbose': Option('v', False, 'print extra information'),
+  # auth options
+  'no-auth-cache': Option(None, False, 'do not cache authentication tokens'),
+  'username': Option(None, True, 'specify a username ARG'),
+  'password': Option(None, True, 'specify a password ARG'),
+
+  # options about getting user input
+  'editor-cmd': Option(None, True, 'use ARG as external editor'),
+  'encoding': Option(None, True,
+                     'treat value as being in charset encoding ARG'),
+  'file': Option('F', True, 'read log message from file ARG'),
+  'message': Option('m', True, 'specify log message ARG'),
+
+  # diff
+  'diff-cmd': Option(None, True, 'use ARG as diff command'),
+  'extensions': Option('x', True,
+                          """Default: '-u'. When Subversion is invoking an
+                             external diff program, ARG is simply passed along
+                             to the program. But when Subversion is using its
+                             default internal diff implementation, or when
+                             Subversion is displaying blame annotations, ARG
+                             could be any of the following:
+                                -u (--unified):
+                                   Output 3 lines of unified context.
+                                -b (--ignore-space-change):
+                                   Ignore changes in the amount of white space.
+                                -w (--ignore-all-space):
+                                   Ignore all white space.
+                                --ignore-eol-style:
+                                   Ignore changes in EOL style"""),
+  'no-diff-deleted': Option(None, False, 'do not print differences for deleted files'),
+  'notice-ancestry': Option(None, False, 'notice ancestry when calculating differences'),
+  'old': Option(None, True, 'use ARG as the older target'),
+  'new': Option(None, True, 'use ARG as the newer target'),
+  'summarize': Option(None, False, 'show a summary of the results'),
+
+  # depth
+  'depth': Option(None, True,
+                          """pass depth ('empty', 'files', 'immediates', or
+                            'infinity') as ARG"""),
+  'non-recursive': Option('N', False,
+                    'obsolete; try --depth=files or --depth=immediates'),
+  'recursive': Option('R', False,
+                      'descend recursively, same as --depth=infinity'),
+
+  'change': Option('c', True,
+                          """the change made by revision ARG (like -r ARG-1:ARG)'),
+                             If ARG is negative this is like -r ARG:ARG-1"""),
+  'revision': Option('r', True,
+                          """ARG (some commands also take ARG1:ARG2 range)
+                             A revision argument can be one of:
+                                NUMBER       revision number
+                                '{' DATE '}' revision at start of the date
+                                'HEAD'       latest in repository
+                                'BASE'       base rev of item's working copy
+                                'COMMITTED'  last commit at or before BASE
+                                'PREV'       revision just before COMMITTED"""),
+  'targets': Option(None, True, 'pass contents of file ARG as additional args'),
+  'force-log': Option(None, False, 'force validity of log message source'),
+})
+
+# These options exist only for gvn.
+GvnOptions = {
+  # special options
+  'diag': Option(None, False, 'show diagnostics about what gvn is doing'),
+  'help': Option('h', False, 'help'),
+  'version': Option(None, False, 'show program version information'),
+
+  # options all subcommands accept
+  'gvn-config-dir': Option(None, True,
+                       'read user gvn configuration files from directory ARG'),
+  'project': Option(None, True, 'use project ARG'),
+
+  # gvn change
+  'add': Option(None, False, 'add to changebranch'),
+  'delete': Option(None, False, 'delete entire changebranch'),
+  'remove': Option(None, False, 'remove changebranch association'),
+
+  # gvn changes
+  'all': Option(None, False, 'show all'),
+  'user': Option(None, True, 'operate on USER (does not affect auth)'),
+
+  # gvn diff
+  'from-change': Option(None, False,
+                        'show diff from changebranch to working copy'),
+
+  # gvn mail
+  'cc': Option(None, True, 'comma-separated list of addresses to CC'),
+  'reviewers': Option(None, True,
+                      'comma-separated list of reviewer addresses'),
+
+  # commands that snapshot (change, mail, snapshot)
+  'force-change': Option(None,  False, 'force validity of changebranch name'),
+}
+
+
+# list of command names (no aliases)
+CommandNames = []
+# map of command names (including aliases) to Command objects
+NameToCommand = {}
+
+
+# sugary utilities for building option lists
+def AuthOptions(options=[]):
+  """Return the standard auth options, appended to options if specified."""
+  return options + [
+    'no-auth-cache',
+    'non-interactive',
+    'password',
+    'username',
+    ]
+
+def LogOptions(options=[]):
+  """Return the standard log options, appended to options if specified."""
+  return options + [
+    'editor-cmd',
+    'encoding',
+    'file',
+    'force-log',
+    'message',
+    ]
+
+
+# WHINE(epg): They told me optparse is nice and extensible.  They
+# lied.  It's not at all extensible, and quite painful.  I wish i'd
+# done something else.
+class OptionParser(optparse.OptionParser):
+  def __init__(self):
+    # Use 'resolve' so we can redefine svn options in GvnOptions.
+    optparse.OptionParser.__init__(self, conflict_handler='resolve')
+    self.canonical_options = {}
+    # We do our own help processing.
+    self.remove_option('--help')
+    self.AddOptions(SvnOptions)
+    self.AddOptions(GvnOptions)
+
+  def AddOptions(self, options):
+    defaults = {}
+    for (name, option) in options.iteritems():
+      self.canonical_options[name] = name
+      if option.short is not None:
+        self.canonical_options[option.short] = name
+
+      # Build args and kwargs to pass to add_option, and defaults for
+      # set_defaults.
+      args = ['--' + name]
+      if option.short is not None:
+        args.append('-' + option.short)
+      kwargs = {'action': 'callback', 'callback': self.Callback,
+                'help': option.help}
+      if option.has_argument:
+        kwargs['type'] = 'str'
+        # optparse defaults these to None.
+      else:
+        # optparse doesn't do anything with these if the user doesn't
+        # specify, breaking the if options.some_option model
+        # completely.  Don't ask me why.
+        defaults[name.replace('-', '_')] = False
+      self.add_option(*args, **kwargs)
+    self.set_defaults(**defaults)
+
+  def Callback(self, option, opt_str, value, parser):
+    """Callback action to process each option.
+
+    All i want is a list of options the user gave, but the only way to
+    get it is to reimplement the option handling entirely.
+
+    """
+
+    opt_str = self.canonical_options[opt_str.lstrip('-')]
+
+    # Maintain list of options the user gave.
+    if opt_str != 'diag':
+      try:
+        gvn_options = parser.values.gvn_options
+      except AttributeError:
+        gvn_options = parser.values.gvn_options = []
+      gvn_options.append(opt_str)
+
+    # Set the option in the Values object, as optparse would.
+    attr = opt_str.replace('-', '_')
+    if value is None:
+      setattr(parser.values, attr, True)
+    else:
+      setattr(parser.values, attr, value)
+
+  def error(self, msg):
+    """Turn optparse's attempts at sabotage into exceptions.
+
+    optparse comes from the "why raise an exception like a sane
+    library when you can just spew to stderr and then sys.exit?"
+    school of thought.  How nice.
+
+    """
+
+    raise gvn.errors.BadOptions(msg)
+
+  def parse_args(self, *args, **kwargs):
+    (values, argv) = optparse.OptionParser.parse_args(self, *args, **kwargs)
+    if not hasattr(values, 'gvn_options'):
+      values.gvn_options = []
+    # Post-process --extensions/-x option.
+    # With multiple -x options, the last wins rather than appending;
+    # this is how svn behaves.
+    if values.extensions is None:
+      values.extensions = []
+    else:
+      values.extensions = values.extensions.split()
+    return (values, argv)
+
+
+class TextDeltaProgressBase(object):
+  """base class for text delta progress reporters"""
+
+  def __init__(self, total, fp):
+    """Initialize progress tracker.
+
+    Arguments:
+    total       -- total number of files to be transmitted
+    fp          -- file-like object for output (must support write and flush)
+    """
+
+    #: total number of files to be transmitted
+    self.file_total = total
+    #: file-like object for output (must support write and flush)
+    self.fp = fp
+
+  def NewFile(self, path):
+    """Begin reporting progress for new path."""
+    pass
+
+  def Finish(self):
+    """Finish progress reporting."""
+    pass
+
+  def Update(self, progress, total):
+    """Update progress; see svn_ra_progress_notify_func_t ."""
+    pass
+
+
+class TextDeltaProgressDots(TextDeltaProgressBase):
+  """text delta progress reporter that just prints dots, like svn"""
+  def __init__(self, total, fp):
+    TextDeltaProgressBase.__init__(self, total, fp)
+    self.fp.write('Transmitting file data ')
+    self.fp.flush()
+
+  def NewFile(self, path):
+    self.fp.write('.')
+    self.fp.flush()
+
+  def Finish(self):
+    self.fp.write('\n')
+
+
+class TextDeltaProgressMeter(TextDeltaProgressBase):
+  """text delta progress reporter that shows a progress meter"""
+
+  def __init__(self, total, fp):
+    TextDeltaProgressBase.__init__(self, total, fp)
+    #: length of last line written to fp (used to clear it)
+    self.last_line_len = 0
+    #: number of files transmitted so far
+    self.file_count = 0
+    #: current average transmission rate
+    self.average = 0
+    #: current total bytes to be transferred
+    self.total = 0
+    #: current number of bytes transferred so far
+    self.transmitted = 0
+    #: progress from previous Update call
+    self.last_progress = 0
+    #: total bytes to be transferred from previous Update call
+    self.last_total = 0
+    #: time of last Update call
+    self.last_time = int(time.time())
+    #: actual progress to show, shared by Update and _Print
+    self.progress = 0
+
+  def NewFile(self, path):
+    self.path = path
+    self.file_count += 1
+    self._Print()
+
+  def Finish(self):
+    self.fp.write('\r' + ' ' * self.last_line_len + '\r')
+
+  def Update(self, progress, total):
+    # TODO(epg): This can't help but be a little broken, simply due to
+    # the way the ra layers report progress.  But maybe we can make it
+    # better.  Anyway, here's how it works now:
+
+    # ra-local: Reports no progress at all.  This is lame, as even
+    # local commits can take a long time, if the change is big enough.
+
+    # ra-neon: Reports progress with resetting progress and total at
+    # various times.  Given that we get a proper total for text delta
+    # transmission, with increasing progress values up through total,
+    # I think the total is the total to be transmitted for a single
+    # HTTP request.
+
+    # ra-serf: Reports no progress at all.  The underlying serf
+    # library doesn't even support progress notification.
+
+    # ra-svn: Reports progress starting at 0 and increasing with each
+    # call, never resetting; total is never available.  It actually
+    # counts sends and receives in the same number.  We don't get a
+    # new total for individual files or operations.
+
+    # We really can't do anything to improve the ra-svn situation,
+    # except to ra-svn itself.  Unlike neon and serf, it's fully
+    # streamy, and does not know the size of text deltas in advance,
+    # though, so we'll never be able to get totals out of it.  So,
+    # it's probably best just to leave it alone.
+
+    # For neon and serf, we print a useful meter for individual files
+    # that are large enough.  But, for a commit of a few hundred small
+    # files, the rate is wrong, never getting over 1K despite that we
+    # are actually transmitting multiple M/s.
+
+    # Based on Tortoise's progress meter.
+    if progress >= self.last_progress and total == self.last_total:
+      self.transmitted += progress - self.last_progress
+    else:
+      self.transmitted += progress
+    self.last_progress = progress
+    self.last_total = total
+
+    now = int(time.time())
+    if self.last_time < now:
+      elapsed = now - self.last_time
+      if elapsed == 0:
+        elapsed = 1
+      self.last_time = now
+      self.average = self.transmitted / elapsed
+      self.transmitted = 0
+      self.total = total
+      self.progress = progress
+      self._Print()
+
+  def _Print(self):
+    """Print more to the meter."""
+
+    # Based on Tortoise's progress meter.
+    self.fp.write('\r' + ' ' * self.last_line_len)
+    msg = ['[%d/%d]' % (self.file_count, self.file_total)]
+
+    if self.average < 1024:
+      msg.append('%dB/s' % (self.average,))
+    elif self.average < 1024 * 1024:
+      msg.append('%.1fK/s' % (self.average / 1024.0,))
+    else:
+      msg.append('%.1fM/s' % (self.average / 1024.0 / 1024.0,))
+
+    if (self.total > 0):
+      if self.total < 1024:
+        msg.append('%d/%dB' % (self.progress, self.total))
+      elif self.total < 1024 * 1024:
+        msg.append('%d/%dK' % (self.progress / 1024, self.total / 1024))
+      elif self.total < 1024 * 1024 * 1024:
+        msg.append('%d/%dM' % (self.progress / 1024 / 1024,
+                               self.total / 1024 / 1024))
+
+    msg.append(self.path)
+    msg = ' '.join(msg)
+    self.last_line_len = len(msg)
+    self.fp.write('\r' + msg)
+    self.fp.flush()
+
+
+class Context(object):
+  """Context for a command: Project, WorkingCopy, Pool, callbacks, ...
+
+  Just reference .project and .wc when you need them; they'll be
+  automatically found based on the current directory (or operands; see
+  next paragraph) or --project options, and setup with an auth baton.
+
+  .wc_operands indicates whether the operands from the command line
+  represent working copy paths.  If True (the default) and all
+  operands are absolute paths, the deepest common of those paths is
+  used to find the working copy, rather than the current directory.
+
+  Also, if .wc_operands, the operands are converted to internal style.
+
+  If Context knows about a working copy (because you've accessed .wc
+  or accessing .project caused a working copy to be found), then .path
+  is the current path within the working copy (e.g. /tmp/wc/src is
+  current directory, /tmp/wc is the top of the working copy, so .path
+  is 'src').'
+
+  Call .Finish when finished with the object.
+
+  """
+
+  def __init__(self, options, operands):
+    self.options = options
+    self._operands = operands
+
+    self._stderr = sys.stderr
+
+    self._processed_operands = None
+    self.wc_operands = True
+    self._path = None
+    self._project = self._project_config = self._wc = None
+    self.pool = svn.core.Pool()
+    self.config = gvn.config.Get(configdir=options.gvn_config_dir,
+                                 svn_configdir=options.config_dir,
+                                 pool=self.pool)
+
+    # Initialize with --username optarg if specified.
+    self._username = options.username
+
+    #: current progress reporting object; only text deltas for now
+    self.progress = None
+    #: number of files we'll be transmitting
+    self.txdelta_count = 0
+
+    self._encoding = None
+
+    self.UpdateConfigFromCommandLine()
+
+  def UpdateConfigFromCommandLine(self):
+    # Why should these be treated differently from encoding?  Maybe any
+    # setting settable via command line and config file should be accessed
+    # as ctx.foo rather than ctx.{config,options}.foo.
+    if self.options.no_auth_cache:
+      self.config.store_auth_creds = False
+    if self.options.editor_cmd is not None:
+      self.config.editor_command = self.options.editor_cmd
+
+  def _GetEncoding(self):
+    # The only real reason we treat this one differently from the above
+    # two is that it needs validation, but only if we're trying to do an
+    # operation that actually needs encoding.
+    if self._encoding is None:
+      if self.options.encoding is None:
+        encoding = self.config.encoding
+      else:
+        encoding = self.options.encoding
+      # It's a shame Python uses such a generic exception for such an
+      # important error.  Translate it to something sane so we can
+      # give the user a good error message.
+      try:
+        codecs.lookup(encoding)
+      except LookupError:
+        raise gvn.errors.UnknownEncoding(encoding)
+      self._encoding = encoding
+    return self._encoding
+  encoding = property(_GetEncoding)
+
+  def _GetProject(self):
+    if self._project is None:
+      self._FindProject()
+    return self._project
+  project = property(_GetProject)
+
+  def _GetProjectConfig(self):
+    if self._project_config is None:
+      self._FindProject()
+    return self._project_config
+  project_config = property(_GetProjectConfig)
+
+  def _GetOperands(self):
+    if self.wc_operands:
+      if not self._processed_operands:
+        self._operands = [svn_path_internal_style(x, self.pool)
+                          for x in self._operands]
+        self._processed_operands = True
+    return self._operands
+  operands = property(_GetOperands)
+
+  def _GuessWCPath(self):
+    if self.wc_operands:
+      # If operands are wc paths, guess from those if absolute.
+      absolute = relative = False
+      for i in self.operands:
+        if os.path.isabs(i):
+          absolute = True
+        else:
+          relative = True
+      if absolute:
+        if relative:
+          raise gvn.errors.MixedPaths
+        # Operands are all absolute; use common prefix.
+        return gvn.util.CommonPrefix(self.operands)
+      prefix = gvn.util.CommonPrefix(self.operands)
+      if prefix != '':
+        # Operands are all relative and prefix is more than just '';
+        # use cwd/prefix.
+        return '/'.join([svn_path_internal_style(os.getcwd(), self.pool),
+                         prefix])
+    # Operands are not wc paths or are relative and prefix is ''; use cwd.
+    return svn_path_internal_style(os.getcwd(), self.pool)
+
+  def _GetWC(self):
+    if self._wc is None:
+      (self._wc,
+       self._path) = gvn.wc.FindWorkingCopy(self._GuessWCPath(),
+                                           cancel_func=self.Cancel,
+                                           notify_func=self.Notify,
+                                           config=self.config,
+                                           pool=self.pool)
+      if self._project is None:
+        self._project = self._wc.project
+        self._project_config = self._wc.project_config
+        self._SetRaCallbacks()
+    return self._wc
+  wc = property(_GetWC)
+  path = property(lambda self: self._GetWC() and self._path,
+doc="""WC-relative path that was used to find the wc.
+
+    This is determined by what _GuessWCPath discovers.  If the
+    operands are wc paths, then this is the longest common prefix of
+    them (which may be '').  If not, this is ''.
+    """)
+
+  def Username(self, default):
+    """Return user-input username if any, else default.
+
+    If --username was specified, this returns that until overridden by
+    a username entered at an auth prompt.  Whether --username was
+    given or not, if the user entered a username at an auth prompt and
+    authentication was successful, this returns that username.
+    """
+    if self._username is None:
+      self._username = default
+    return self._username
+
+  def _SetUsername(self, username):
+    """Set self._username to username ."""
+    self._username = username
+
+  def _SetRaCallbacks(self):
+    auth_baton = gvn.svnauth.GetAuthBaton(
+      self.Username(self.project_config.username),
+      not self.options.non_interactive, self.config, self.pool,
+      username_cb=self._SetUsername)
+    self.project.repository.ra_callbacks.SetAuthBaton(auth_baton)
+    self.project.repository.ra_callbacks.progress_func = self.Progress
+    self.project.repository.ra_callbacks.cancel_func = self.Cancel
+
+  def DisplayPath(self, path):
+    """Return path suitable to display to the user from path.
+
+    If self.wc.AbsolutePath(path) is a child of the current working
+    directory, the display path is relative to that.  The display path
+    is in local style.
+
+    Arguments:
+    path -- path relative to working copy top
+    """
+    try:
+      result = gvn.util.RelativePath(svn_path_internal_style(os.getcwd(),
+                                                             self.pool),
+                                     self.wc.AbsolutePath(path))
+    except gvn.errors.PathNotChild:
+      result = path
+    return svn_path_local_style(result, self.pool)
+
+  def ValidateChangeName(self, cb):
+    if cb.Exists():
+      # Don't harass user if the changebranch already exists.
+      return
+    if self.options.force_change:
+      # Bypass these checks with --force-change.
+      return
+    if cb.name == 'c':
+      # A changebranch named c?  The user probably meant --cc.
+      raise gvn.errors.ChangeIsC
+    if os.path.exists(cb.name):
+      # An changebranched named after a file?  Probably a mistake.
+      raise gvn.errors.ChangeIsPath(cb.name)
+
+  def Finish(self):
+    """Finish working with the object.  Saves WorkingCopy state."""
+    if self._wc is not None:
+      self._wc.Save()
+
+  def Cancel(self):
+    """Implements svn_cancel_func_t."""
+    # TODO(epg): yeah...
+#     if should cancel:
+#       return svn.core.SVN_ERR_CANCELLED
+    return None
+
+  def Progress(self, progress, total, pool):
+    """Implements svn_ra_progress_notify_func_t."""
+    if self.progress is not None:
+      self.progress.Update(progress, total)
+
+  def Notify(self, n, pool=None):
+    # TODO(epg): OOPS, fool, these Notify needs to write to stdout!
+
+    if self.options.quiet:
+      return
+
+    # TODO(epg): Notifying with string messages is lame, and sucks for
+    # any gvn users but gvn itself.  Use appropriate notify actions,
+    # inventing new ones if necesary.
+    if isinstance(n, basestring):
+      self._stderr.write(n)
+      return
+
+    # TODO(epg): Make this into a new svn_wc_notify_action_t value.
+    if n == gvn.wc.notify_postfix_txdeltas_completed:
+      # TODO(epg): Make .Finish setup an alarm to print a
+      # "Finalizing... |/-|\" spinner until gvn.wc.notify_committed or
+      # whatever, then we'd set .progress = None.
+      self.progress.Finish()
+      self.progress = None
+      return
+
+    if n.action == svn.wc.notify_commit_postfix_txdelta:
+      if self.progress is None:
+        if gvn.util.isatty(self._stderr):
+          klass = TextDeltaProgressMeter
+        else:
+          klass = TextDeltaProgressDots
+        self.progress = klass(self.txdelta_count, self._stderr)
+      self.progress.NewFile(self.DisplayPath(n.path).encode(self.encoding))
+      return
+
+    if (n.action in [svn.wc.notify_commit_added,
+                     svn.wc.notify_commit_modified]
+        and n.kind == svn_node_file):
+      # Count added or modified files for use by TextDeltaProgress
+      # when we start transmitting text deltas.
+      self.txdelta_count += 1
+
+    action_messages = {
+      svn.wc.notify_commit_added:    'Adding',
+      svn.wc.notify_commit_deleted:  'Deleting',
+      svn.wc.notify_commit_modified: 'Sending',
+      svn.wc.notify_commit_replaced: 'Replacing',
+      }
+    self._stderr.write('%-14s %s\n' % (action_messages[n.action],
+                                       self.DisplayPath(n.path).encode(self.encoding)))
+
+  def _FindProject(self):
+    pconfig = None
+    if self.options.project is not None:
+      try:
+        # Start by trying a project file named self.options.project .
+        pconfig = self.config.ProjectByName(self.options.project)
+      except gvn.errors.InvalidProjectName:
+        try:
+          # Next try a project file for the URL self.options.project .
+          pconfig = self.config.ProjectByURL(self.options.project)
+        except gvn.errors.NoProject:
+          # Finally, just create a ProjectConfig from whole cloth
+          # using self.options.project as the URL.
+          pconfig = gvn.config.ProjectConfig(self.Username(self.config.default_username),
+                                             self.options.project)
+      project = gvn.project.Project(self.Username(pconfig.username), pconfig.URL,
+                                 self.config, self.pool)
+    else:
+      try:
+        return self._GetWC()
+      except gvn.errors.NotWC:
+        # Construct ProjectConfig and Project objects for the default
+        # project; if the user has no default project, the user sees
+        # the no default project error.  epg thinks this block looks
+        # kinda out of place in this file.  Seems we need some
+        # function to return the Project and ProjectConfig.
+        pconfig = self.config.ProjectDefault()
+        project = gvn.project.Project(self.Username(pconfig.username), pconfig.URL,
+                                      self.config, self.pool)
+
+    self._project = project
+    self._project_config = pconfig
+    self._SetRaCallbacks()
+
+  def GetDiffCallbacks(self, pool=None):
+    """Return user's configured gvn.changebranch.DiffCallbacks implementation.
+    """
+    if pool is None:
+      pool = self.pool
+    stdout = gvn.platform.StrippedCRFile(sys.stdout)
+    if self.config.diff_command in [None, 'internal']:
+      callbacks = gvn.changebranch.SvnDiffCallbacks(self.options.extensions,
+                                                    stdout, self.encoding,
+                                                    pool)
+    elif self.config.diff_command == 'tkdiff':
+      callbacks = gvn.changebranch.TkDiffCallbacks(self.config.diff_command,
+                                                   self.encoding)
+    else:
+      stdout.flush()
+      callbacks = gvn.changebranch.SvnDiffCallbacks(
+        self.options.extensions, stdout, self.encoding, pool,
+        command=self.config.diff_command)
+    return callbacks
+
+
+def AddCommand(name, *args, **kwargs):
+  """Add a new Command to the global list.
+
+  Arguments:
+  name    -- name of the new Command
+  aliases -- optional list of aliases for the Command
+
+  Any further arguments are passed to the Command constructor.
+
+  """
+
+  global CommandNames, NameToCommand
+  CommandNames.append(name)
+  aliases = kwargs.get('aliases', [])
+  cmd = Command(name, *args, **kwargs)
+  NameToCommand[name] = cmd
+  # TODO(epg): Bleh, this command/aliases stuff needs reworking; we
+  # handle it here and in the Command class...
+  for i in aliases:
+    NameToCommand[i] = cmd
+
+def AssertValidSvnCommand(subcommand):
+  for (_subcommand, aliases) in gvn.svncmdline.Subcommands.iteritems():
+    if subcommand == _subcommand or subcommand in aliases:
+      return
+  raise gvn.errors.BadCommand(subcommand)
+
+def RunSvnCommand(ctx, subcommand, operands, runner=gvn.svncmd.RunSvn):
+  """Run an svn command.
+
+  Arguments:
+  ctx        -- cmdline.Context object
+  subcommand -- svn subcommand to run
+  operands   -- operands from OptionParser.parse_args
+  runner     -- for unit testing; ignore
+  """
+
+  AssertValidSvnCommand(subcommand)
+
+  # If any operands are working copies, svn will figure out what to
+  # do; were we to leave this True, we'd mangle 'gvn log -v //' to
+  # 'gvn log -v /', "short URL" processing would therefore never
+  # occur, and we'd run completely the wrong svn command.
+  ctx.wc_operands = False
+
+  # TODO(epg): Consider having svncmd.RunSvn take separate subcommand
+  # name, list of options and list of operands.  Let it worry about
+  # inserting the command and -- protecting the operands.
+
+  argv = [subcommand]
+
+  # Insert all svn options; some may be invalid for this subcommand,
+  # but that's for svn to worry about.
+  for opt in ctx.options.gvn_options:
+    if opt in GvnOptions or opt == 'config-dir':
+      # Obviously we don't pass gvn options to svn; specially handle
+      # --config-dir a few lines down.
+      continue
+    argv.append('--' + opt)
+    if SvnOptions[opt].has_argument:
+      argv.append(getattr(ctx.options, opt.replace('-', '_')))
+
+  argv.append('--config-dir')
+  argv.append(ctx.config.svn_config_dir)
+
+  # Protect any operands starting with dashes.
+  argv.append('--')
+
+  # Now add any operands, massaging // pseudo-URLs into shape.
+  for op in operands:
+    try:
+      argv.append(ctx.project.repository.ShortToLongURL(op))
+    except (gvn.errors.NoProject, gvn.errors.NotShortURL):
+      argv.append(op)
+
+  return runner(argv)
+
+def Run(options, argv):
+  """Run a subcommand.
+
+  Arguments:
+  options -- options from OptionParser.parse_args
+  argv    -- subcommand + operands
+
+  """
+
+  try:
+    subcommand = argv.pop(0)
+  except IndexError:
+    raise gvn.errors.BadOperands("Type 'gvn help' for usage.")
+
+  # TODO(epg): Remove this compat in next release.
+  if subcommand == 'ack':
+    sys.stderr.write("WARNING: 'ack' is deprecated; use 'approve' instead.\n")
+    subcommand = 'approve'
+
+  ctx = Context(options, argv)
+  try:
+    cmd = NameToCommand[subcommand]
+  except KeyError:
+    # We have no such subcommand; maybe svn does.
+    return RunSvnCommand(ctx, subcommand, argv)
+
+  try:
+    return cmd(ctx)
+  except:
+    sys.stderr.write('gvn %s failed:\n' % (subcommand,))
+    raise
+
+
+class Command(object):
+  # TODO(epg): options and options_help stick around in the object,
+  # where callers may modify them; use None and build new list and
+  # dict in __init__ as with aliases.
+  def __init__(self, name, impl, helptext, options=[], options_help={},
+               aliases=None):
+    """Initialize Command from str, callable, str.
+
+    Arguments:
+    name         -- name
+    impl         -- callable implementing the subcommand
+    helptext     -- help text
+    options      -- list of accepted options
+    options_help -- map of option names to help text, overriding the
+                    standard help text for an option
+    aliases      -- optional list of aliases
+
+    The order of the options is retained for the help text, so callers
+    may control the ordering and grouping of option help.  The global
+    options 'config-dir' and 'gvn-config-dir' are always added at the
+    end; do not pass them in.
+
+    """
+
+    self.name = name
+    self.impl = impl
+    self.helptext = helptext
+    self.options = options + [
+      # --project is not here because some commands don't want that.
+      'config-dir',
+      'gvn-config-dir',
+      ]
+    self.options_help = options_help
+    if aliases is None:
+      self.aliases = []
+    else:
+      self.aliases = aliases
+
+  def __call__(self, ctx):
+    bad_options = set(ctx.options.gvn_options).difference(self.options)
+    if len(bad_options) > 0:
+      raise gvn.errors.BadOptions("Subcommand '%s' doesn't accept %s\n"
+                                  "Type 'gvn help %s' for usage."
+                            % (self.name, ' '.join(sorted(bad_options)),
+                               self.name))
+    result = self.impl(ctx)
+    ctx.Finish()
+    return result
+
+  def Help(self):
+    result = [self.helptext]
+    result.append('Valid options:')
+
+    for option_name in self.options:
+      summary = ['--']
+      summary.append(option_name)
+      option = GvnOptions.get(option_name, SvnOptions.get(option_name))
+      if option.short is not None:
+        # Put the short option first.
+        summary.insert(0, '-')
+        summary.insert(1, option.short)
+        # And bracket the long option.
+        summary.insert(2, ' [')
+        summary.append(']')
+
+      if option.has_argument:
+        # svn help has no clever meta-variables like we could have,
+        # only 'arg'.  Do we want to be different?
+        summary.append(' ARG')
+      result.append('  %-22s: %s' % (''.join(summary),
+                                     self.options_help.get(option_name,
+                                                           option.help)))
+
+    return '\n'.join(result)
+
+def Help(operands):
+  if len(operands) > 0 and operands[0] == 'help':
+    operands.pop(0)
+
+  if len(operands) == 0:
+    commands = {}
+    for (name, aliases) in gvn.svncmdline.Subcommands.iteritems():
+      commands[name] = ("   %s" % (name,))
+      if len(aliases) > 0:
+        commands[name] = '%s (%s)' % (commands[name],
+                                      ', '.join(a for a in aliases))
+    for name in CommandNames:
+      cmd = NameToCommand[name]
+      if cmd.helptext is None:
+        # skip alias objects
+        continue
+      commands[name] = ("   %s" % (name,))
+      if len(cmd.aliases) > 0:
+        commands[name] = '%s (%s)' % (commands[name],
+                                      ', '.join(a for a in cmd.aliases))
+
+    print 'usage: gvn help <subcommand>'
+    print
+    print 'Available subcommands:'
+    for name in sorted(commands):
+      print commands[name]
+
+  else:
+    for subcommand in operands:
+      # TODO(epg): Remove this compat in next release.
+      if subcommand == 'ack':
+        sys.stderr.write("WARNING: 'ack' is deprecated; use 'approve' instead.\n")
+        subcommand = 'approve'
+
+      try:
+        cmd = NameToCommand[subcommand]
+      except KeyError:
+        AssertValidSvnCommand(subcommand)
+        gvn.svncmd.RunSvn(['help', subcommand])
+      else:
+        print cmd.Help()
+        print
+
+  return 0
+
+
+def main(argv):
+  diag = False
+
+  try:
+    parser = OptionParser()
+    (options, argv) = parser.parse_args(argv[1:])
+
+    diag = options.diag
+    if diag:
+      gvn.DiagLog = lambda f,*a: sys.stderr.write(f % a)
+      gvn.svncmd.EnableCommandDebug()
+
+    if options.version:
+      print 'gvn %s\n' % (gvn.VERSION,)
+      print "Using 'svn' at '%s'" % (gvn.SVN,)
+      print
+      gvn.svncmd.RunSvn(['--version'])
+      return 0
+
+    if options.help or len(argv) > 0 and argv[0] == 'help':
+      return Help(argv)
+
+    return Run(options, argv)
+
+  except gvn.errors.Cmdline, e:
+    if diag:
+      traceback.print_exc()
+    else:
+      print >>sys.stderr, e
+    print "Type 'gvn -h' for usage."
+    return e.code
+  except gvn.errors.User, e:
+    if diag:
+      traceback.print_exc()
+      print >>sys.stderr, e.diag_message
+    else:
+      print >>sys.stderr, e
+    return e.code
+  except svn.core.SubversionException, e:
+    if diag:
+      traceback.print_exc()
+    while e is not None:
+      try:
+        msg = e.message
+        e = e.child
+      except AttributeError:
+        # old-style SubversionException, no children
+        msg = e.args[0]
+        e = None
+      sys.stderr.write('gvn: %s\n' % (msg,))
+    # svn just exits 1; we might want to capture the most common
+    # ones and turn them into gvn.errors.User exceptions with their
+    # own exit codes, e.g. catch out-of-dateness in gvn.commit.Drive
+    # and raise gvn.errors.OutOfDate .
+    return 1
+  except gvn.errors.Internal, e:
+    traceback.print_exc()
+    return e.code
+  except KeyboardInterrupt:
+    return 1
+  except IOError, e:
+    if e.errno == EPIPE:
+      if diag:
+        traceback.print_exc()
+      sys.stderr.write('gvn: %s\n' % (e,))
+    else:
+      traceback.print_exc()
+    return gvn.errors.Internal.code
+
+  except UnicodeError, e:
+    e = gvn.errors.Encoding(e)
+    if diag:
+      traceback.print_exc()
+      print >>sys.stderr, e.diag_message
+    else:
+      print >>sys.stderr, e
+    return e.code
+
+  except:
+    traceback.print_exc()
+    return gvn.errors.Internal.code
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/commit.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/commit.py
new file mode 100644
index 0000000..a22061e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/commit.py
@@ -0,0 +1,213 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""commit - classes for committing to a repository
+
+This module provides facilities to commit changes to a repository.
+
+Classes:
+EditorAction -- base class for commit editor actions
+Mkdir        -- add a directory
+CopyDir      -- copy a directory
+Delete       -- delete a node
+
+Functions:
+Drive        -- create and drive a commit editor
+
+"""
+
+import sys
+
+import gvn
+
+import svn.delta
+import svn.ra
+
+from svn.core import SWIG_SVN_INVALID_REVNUM as SVN_INVALID_REVNUM
+
+
+class EditorAction(object):
+  """Base class for commit editor actions; does nothing.
+
+  Instances are callable; see Arguments, below.
+
+  Arguments:
+  parent  -- parent baton
+  path    -- path to edit
+  editor  -- svn_delta_editor_t
+  pool    -- memory pool
+  """
+  def DiagLog(self, format, *args):
+    """Call gvn.DiagLog for an editor action, prepending the class name."""
+    klass = type(self)
+    return gvn.DiagLog(klass.__module__ + '.' + klass.__name__ + format, *args)
+
+class Mkdir(EditorAction):
+  """Return a baton for a newly created directory.
+
+  Raises:
+  SubversionException.apr_err==???
+  ???
+
+  """
+
+  def __init__(self, revision):
+    self.revision = revision
+
+  def __call__(self, parent, path, editor, pool):
+    args = (path, parent,
+            None, SVN_INVALID_REVNUM, # copyfrom
+            pool)
+    self.DiagLog('(%d) => add_directory%s\n', self.revision, args)
+    return editor.add_directory(*args)
+
+class CopyDir(EditorAction):
+  """Return a baton for copied directory.
+
+  Raises:
+  SubversionException.apr_err==SVN_ERR_FS_NOT_DIRECTORY
+  SubversionException.apr_err==SVN_ERR_RA_DAV_PATH_NOT_FOUND
+  SubversionException.apr_err==???
+  ???
+
+  """
+
+  def __init__(self, copyfrom_url, copyfrom_revision,
+               replace=False, replace_revision=SVN_INVALID_REVNUM):
+    """Initialize.
+
+    Arguments:
+    copyfrom_url                -- internal_style str URL to copy from
+    copyfrom_revision           -- int revision to copy from
+    replace                     -- if True, delete target path first
+                                   (default False)
+    replace_revision            -- if replace, pass this revision number
+                                   to delete_entry (default SVN_INVALID_REVNUM)
+    """
+    self.copyfrom_url = copyfrom_url
+    self.copyfrom_revision = copyfrom_revision
+    self.replace = replace
+    self.replace_revision = replace_revision
+
+  def __call__(self, parent, path, editor, pool):
+    self.DiagLog('(%s, %d, %s, %s) => ',
+                 self.copyfrom_url, self.copyfrom_revision,
+                 self.replace, self.replace_revision)
+    if self.replace:
+      args = (path, self.replace_revision, parent, pool)
+      gvn.DiagLog('delete_entry%s; ', args)
+      editor.delete_entry(*args)
+    args = (path, parent, self.copyfrom_url, self.copyfrom_revision, pool)
+    gvn.DiagLog('add_directory%s\n', args)
+    return editor.add_directory(*args)
+
+class Delete(EditorAction):
+  """Delete a node.
+
+  Raises:
+  SubversionException.apr_err==SVN_ERR_FS_NOT_DIRECTORY
+  SubversionException.apr_err==SVN_ERR_RA_DAV_PATH_NOT_FOUND
+  SubversionException.apr_err==???
+  ???
+
+  """
+
+  def __init__(self, revision):
+    self.revision = revision
+
+  def __call__(self, parent, path, editor, pool):
+    args = (path, self.revision, parent, pool)
+    self.DiagLog('(%d) => delete_entry%s\n', self.revision, args)
+    editor.delete_entry(*args)
+    return None
+
+def Drive(repo, revprops, revision, paths, action_cb,
+          postfix_txdelta_cb=None, pool=None):
+  """Create and drive a commit editor; return svn_commit_info_t .
+
+  If any revprop values are text (e.g. for svn:log), it must be a
+  utf-8 encoded str, not unicode.  Callers must encode themselves
+  because this function cannot know which values should be text.  It
+  could accept unicode *keys* and encode those, but since the caller
+  must already encode values, it seems pointless.  See also
+  gvn.repository.Revision.
+
+  Arguments:
+  repo        -- Repository object
+  revprops    -- dict mapping utf-8 encode str (revprop name) to a str of
+                 arbitrary binary data (revprop value)
+  revision    -- base revision
+  paths       -- paths to commit
+  action_cb             -- required callback(path, pool) that returns
+                           an EditorAction for a path from paths
+  postfix_txdelta_cb    -- optional callback(editor, edit_baton, pool)
+                           called just before close_edit (usually
+                           PostfixTextDeltaManager.Transmit)
+  pool        -- memory pool
+
+  Raises:
+  SubversionException.apr_err==SVN_ERR_FS_NOT_DIRECTORY
+  SubversionException.apr_err==SVN_ERR_RA_DAV_PATH_NOT_FOUND
+  SubversionException.apr_err==???
+  ???
+
+  """
+
+  commit_info = [None]
+  def commit_cb(_commit_info, pool):
+    commit_info[0] = _commit_info
+  (editor, edit_baton) = svn.ra.get_commit_editor3(repo.ra,
+                                                   revprops,
+                                                   commit_cb,
+                                                   None,  # lock_tokens
+                                                   False, # keep_locks
+                                                   pool)
+  try:
+    # Hang onto dir batons in this stack, popping them out when we
+    # leave a directory.  This is because svn (at least as of r31145
+    # which is in 1.5.0) does not take a reference to the baton (how
+    # could it?), it just borrows ours.
+    dir_batons = []
+    def driver_cb(parent, path, pool):
+      dir_baton = action_cb(path, pool)(parent, path, editor, pool)
+      try:
+        (last_path, last_baton) = dir_batons[-1]
+      except IndexError:
+        last_path = None
+      if last_path is not None and not gvn.util.IsChild(path, last_path):
+        dir_batons.pop(-1)
+      dir_batons.append((path, dir_baton))
+      return dir_baton
+    svn.delta.path_driver(editor, edit_baton, revision, paths,
+                          driver_cb, pool)
+    if callable(postfix_txdelta_cb):
+      postfix_txdelta_cb(editor, edit_baton, pool)
+    editor.close_edit(edit_baton, pool)
+  except:
+    # Save full exception info so we can completely ignore abort_edit errors.
+    (exc_type, exc_val, exc_tb) = sys.exc_info()
+    try:
+      editor.abort_edit(edit_baton, pool)
+    except:
+      # We already have an exception in progress, not much we can do
+      # about this.  TODO(epg): Should we at least print it, before
+      # discarding it?  If that's too confusing, maybe only in diag.
+      pass
+    # I'm assured by my trusted Python guru friend that just 'raise e'
+    # will do the right thing (i.e. cause the original traceback, not
+    # a new one from this line) in Python 3.
+    raise exc_type, exc_val, exc_tb
+
+  return commit_info[0]
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/config.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/config.py
new file mode 100644
index 0000000..2ea1288
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/config.py
@@ -0,0 +1,366 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""
+
+Classes:
+Config -- Ye Olde Configuration
+
+Functions:
+Get -- Return a new Config object, loaded from configdir and /etc (if system)
+
+"""
+
+
+import ConfigParser
+import os
+import re
+import socket
+
+from errno import ENOENT
+
+import svn.core
+from svn.core import SVN_CONFIG_OPTION_STORE_PASSWORDS
+from svn.core import SVN_CONFIG_OPTION_STORE_AUTH_CREDS
+from svn.core import SVN_CONFIG_SECTION_AUTH
+from svn.core import svn_config_get_bool, svn_config_merge, svn_config_read
+from svn.core import svn_config_get_config
+
+import gvn.errors
+import gvn.platform
+import gvn.util
+
+
+PROJECT_FILENAME_PAT = '^[A-Za-z0-9][A-Za-z0-9_-]*$'
+PROJECT_FILENAME_RE = re.compile(PROJECT_FILENAME_PAT)
+
+
+# These only exist for ease of testing.
+_GetDefaultUser = gvn.platform.GetUserName
+_GetDefaultSvnUserConfigDir = gvn.platform.GetSvnUserConfigDir
+_GetDefaultSvnSystemConfigDir = gvn.platform.GetSvnSystemConfigDir
+_GetDefaultGvnUserConf = gvn.platform.GetGvnUserConf
+_GetDefaultGvnSystemConf = gvn.platform.GetGvnSystemConf
+
+def _GetDomainName():
+  return '.'.join(socket.getfqdn().split('.')[-2:])
+
+_DEFAULT_STORE_AUTH_CREDS = True
+_DEFAULT_STORE_PASSWORDS = True
+
+
+class BaseConfig(object):
+  def __init__(self):
+    self._data = {}
+
+  def _GetBool(self, name):
+    return self._data[name]
+  def _SetBool(self, name, value):
+    self._data[name] = value
+  _GetStr = _GetBool
+  _SetStr = _SetBool
+
+
+class ProjectConfig(BaseConfig):
+  def __init__(self, username=None, url=None, email_domain=None):
+    BaseConfig.__init__(self)
+    self.username = username
+    self.URL = url
+    self.email_domain = email_domain
+
+  URL = property(lambda s: s._GetStr('URL'),
+                 lambda s, val: s._SetStr('URL', val))
+  username = property(lambda s: s._GetStr('Username'),
+                      lambda s, val: s._SetStr('Username', val))
+
+  def _GetEmailAddress(self):
+    try:
+      if self._data['email_address'] is not None:
+        return self._data['email_address']
+    except KeyError:
+      pass
+    try:
+      address = os.environ['EMAIL'].strip()
+      if len(address) > 0:
+        return address
+    except KeyError:
+      pass
+    if '@' in self.username:
+      return self.username
+    return '@'.join([self.username, _GetDomainName()])
+  email_address = property(_GetEmailAddress,
+                           lambda s, val: s._SetStr('email_address', val))
+  email_domain = property(lambda s: s._GetStr('email_domain'),
+                          lambda s, val: s._SetStr('email_domain', val))
+
+
+class Config(BaseConfig):
+  def __init__(self):
+    BaseConfig.__init__(self)
+
+    self._data = {
+        'default_username': None,
+        'svn_config_hash': None,
+        'svn_config_dir': None,
+        'store_auth_creds': None,
+        'store_passwords': None,
+        'smtp_server': None,
+        'smtp_user': None,
+        'smtp_password': None,
+        'diff_command': None,
+        'editor_command': None,
+        'encoding': None,
+        }
+
+    self.project_dirs = []
+
+  # TODO(epg): Get store_auth_creds and store_passwords from svn_config.
+  # It's bogus to parse the config files here after svn has already
+  # done it, but we can't seem to get the svn_config_t out of the
+  # apr_hash_t svn_config_get_config returns.
+  store_auth_creds = property(lambda s: s._GetBool('store_auth_creds'),
+                            lambda s, val: s._SetBool('store_auth_creds', val),
+   doc="""Whether auth info other than passwords are cached
+
+   e.g. ssl certificate trust""")
+  store_passwords = property(lambda s: s._GetBool('store_passwords'),
+                            lambda s, val: s._SetBool('store_passwords', val),
+                            doc="""Whether passwords are cached""")
+
+  def SetSvnConfig(self, value, pool):
+    self._data['svn_config_hash'] = svn_config_get_config(value.encode('utf-8'),
+                                                          pool)
+    self._data['svn_config_dir'] = value
+
+    # See comment above store_auth_creds above; this is bogus.
+    self.svn_config = svn_config_read(os.path.join(_GetDefaultSvnSystemConfigDir(),
+                                              'config'),
+                              False,    # must_exist
+                              pool)
+    svn_config_merge(self.svn_config,
+                     os.path.join(self._data['svn_config_dir'],
+                                  'config').encode('utf-8'),
+                     False)             # must_exist
+
+    self.store_auth_creds = svn_config_get_bool(self.svn_config,
+                                              SVN_CONFIG_SECTION_AUTH,
+                                            SVN_CONFIG_OPTION_STORE_AUTH_CREDS,
+                                              _DEFAULT_STORE_AUTH_CREDS)
+    self.store_passwords = svn_config_get_bool(self.svn_config,
+                                              SVN_CONFIG_SECTION_AUTH,
+                                              SVN_CONFIG_OPTION_STORE_PASSWORDS,
+                                              _DEFAULT_STORE_PASSWORDS)
+
+  svn_config_hash = property(lambda s: s._GetStr('svn_config_hash'))
+  svn_config_dir = property(lambda s: s._GetStr('svn_config_dir'))
+
+  smtp_server = property(lambda s: s._GetStr('smtp_server'),
+                        lambda s, val: s._SetStr('smtp_server', val))
+  smtp_user = property(lambda s: s._GetStr('smtp_user'),
+                       lambda s, val: s._SetStr('smtp_user', val))
+  smtp_password = property(lambda s: s._GetStr('smtp_password'),
+                           lambda s, val: s._SetStr('smtp_password', val))
+  diff_command = property(lambda s: s._GetStr('diff_command'),
+                         lambda s, val: s._SetStr('diff_command', val))
+
+  def _GetSvnConfigOption(self, option, svn_section, svn_option, default=None):
+    """Return the value for option from run-time, svn, or default value.
+
+    If set at run-time (e.g. via environment variable or command-line
+    option) return that value, else if set in svn config return that,
+    else return default.
+
+    Arguments:
+    option      -- name of the option
+    svn_section -- name of the section in svn config
+    svn_option  -- name of the option in svn config
+    default     -- default if not set from svn config or at run-time
+    """
+    result = self._GetStr(option)
+    if result is None:
+      result = svn.core.svn_config_get(self.svn_config,
+                                       svn_section, svn_option, default)
+      self._SetStr(option, result)
+    return result
+
+  editor_command = property(lambda self: self._GetSvnConfigOption(
+                                         'editor_command',
+                                         svn.core.SVN_CONFIG_SECTION_HELPERS,
+                                         svn.core.SVN_CONFIG_OPTION_EDITOR_CMD,
+                                         gvn.platform.DEFAULT_EDITOR_COMMAND),
+                            lambda s, val: s._SetStr('editor_command', val))
+  encoding = property(lambda self: self._GetSvnConfigOption('encoding',
+                                       svn.core.SVN_CONFIG_SECTION_MISCELLANY,
+                                       svn.core.SVN_CONFIG_OPTION_LOG_ENCODING,
+                                       gvn.platform.DefaultEncoding()),
+                      lambda s, val: s._SetStr('encoding', val))
+
+  default_username = property(lambda s: s._GetStr('default_username'))
+
+  # XXX not sure about any of this config stuff, but least sure about
+  # these project parts
+  # XXX(pamg): Not positive about these project parts either, on Windows.
+
+  def ProjectFile(self, name, mode='r'):
+    """Return a file object for project name; mode is same as for file.
+
+    Raises gvn.errors.NoProject if no project by this name.
+
+    """
+
+    for i in self.project_dirs:
+      try:
+        return open(os.path.join(i, name), mode)
+      except IOError, e:
+        if e.errno != ENOENT:
+          raise
+    raise gvn.errors.NoProject(name)
+
+  def ProjectByName(self, name):
+    """Return (username, url) for the first project matching name.
+
+    Raises gvn.errors.InvalidProjectName if does not match the
+    filename constraining pattern.
+
+    Raises gvn.errors.NoProject if no project by this name.
+
+    """
+
+    if not PROJECT_FILENAME_RE.match(name):
+      raise gvn.errors.InvalidProjectName(name)
+
+    result = ProjectConfig()
+    parser = ConfigParser.RawConfigParser()
+    parser.readfp(self.ProjectFile(name))
+    try:
+      result.username = parser.get('project', 'username')
+    except ConfigParser.NoOptionError:
+      result.username = self.default_username
+    try:
+      result.email_address = parser.get('project', 'email_address')
+    except ConfigParser.NoOptionError:
+      result.email_address = None
+    try:
+      result.email_domain = parser.get('project', 'email_domain')
+    except ConfigParser.NoOptionError:
+      result.email_domain = None
+    # URL is required, though.
+    result.URL = parser.get('project', 'URL').rstrip('/')
+    return result
+
+  def ProjectByURL(self, url, testing=False):
+    """Return Project whose URL has the longest match for url.
+
+    Raises gvn.errors.NoProject if no project matches.
+
+    """
+
+    names = set()
+    for i in self.project_dirs:
+      try:
+        for name in os.listdir(i):
+          names.add(name)
+      except OSError, e:
+        if e.errno not in gvn.platform.ENOENT_codes:
+          raise
+
+    if testing:
+      names = sorted(names)
+
+    found = None
+    for name in names:
+      try:
+        project = self.ProjectByName(name)
+      except gvn.errors.InvalidProjectName, e:
+        # Ignore invalid filenames and move on.
+        continue
+
+      if gvn.util.IsChild(url, project.URL):
+        if found is None or len(project.URL) > len(found.URL):
+          # Save this as the winning match if it matches longer than
+          # the last match or there is no last match yet.
+          found = project
+
+    if found is None:
+      raise gvn.errors.NoProject(url)
+    return found
+
+  def ProjectDefault(self):
+    """Return ProjectConfig instance for default project.
+
+    Raises gvn.errors.NoProject if no default project.
+
+    """
+
+    return self.ProjectByName('default')
+
+
+def _SetDefaults(config):
+  config._data['default_username'] = _GetDefaultUser()
+  config._data['svn_config_dir'] = _GetDefaultSvnSystemConfigDir()
+  config.smtp_server = 'smtp'
+  config.diff_command = 'internal'
+
+
+def _Load(config, configdir):
+  parser = ConfigParser.RawConfigParser()
+  fn = os.path.join(configdir, 'config')
+  if parser.read(fn) == [fn]:
+    # These settings are optional and should therefore not raise an Error.
+    for option in ['smtp_server', 'smtp_user', 'smtp_password',
+                   'diff_command']:
+      try:
+        setattr(config, option, parser.get('external', option))
+      except (ConfigParser.NoOptionError, ConfigParser.NoSectionError):
+        pass
+
+  config.project_dirs.insert(0, os.path.join(configdir, 'projects'))
+
+def Get(configdir=None, svn_configdir=None, system=True, pool=None):
+  """Return a new Config object, loaded from configdir and /etc (if system).
+
+  Arguments:
+  configdir     -- path to a gvn config directory (defaults to ~/.gvn)
+  svn_configdir -- path to a subversion config directory
+                   (defaults to ~/.subversion)
+  system        -- whether to load system config (/etc/gvn)
+  pool          -- memory pool
+
+  """
+
+  config = Config()
+  _SetDefaults(config)
+
+  if system:
+    _Load(config, _GetDefaultGvnSystemConf())
+
+  if configdir is None:
+    configdir = _GetDefaultGvnUserConf()
+  _Load(config, configdir)
+
+  if svn_configdir is None:
+    svn_configdir = _GetDefaultSvnUserConfigDir()
+  config.SetSvnConfig(svn_configdir, pool)
+
+  # TODO(epg): Belongs in cmdline.  Also --diff-cmd and GVNDIFF or
+  # something like that.
+  for i in 'GVN_EDITOR', 'SVN_EDITOR', 'VISUAL', 'EDITOR':
+    try:
+      config.editor_command = os.environ[i]
+      break
+    except KeyError:
+      pass
+
+  return config
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/description.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/description.py
new file mode 100644
index 0000000..d2f47a1e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/description.py
@@ -0,0 +1,163 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import datetime
+import time
+
+import gvn.changebranch
+import gvn.util
+
+import svn.core
+
+from svn.core import SWIG_SVN_INVALID_REVNUM as SVN_INVALID_REVNUM
+
+
+class RevisionDescriptionImpl(object):
+  def __init__(self, config, project, revnum):
+    # May raise errrors.NonExistentRevision; let it go.
+    (self._revision,) = project.repository.GetRevisions([''], revnum)
+
+  def Revision(self):
+    return self._revision.number
+
+  def Username(self):
+    return self._revision['svn:author']
+
+  def Date(self):
+    return self._revision['svn:date']
+
+  def ChangeBranchName(self):
+    return None
+
+  def Description(self):
+    return self._revision['svn:log']
+
+  def AffectedPaths(self, pool):
+    return [(x.path, x.action) for x in self._revision.paths]
+
+
+# TODO(epg): Replace all this jazz with a gvn.changebranch.DiffCallbacks
+# class that prints the summary info before showing diff or running tkdiff.
+class ChangeBranchDescriptionImpl(object):
+  def __init__(self, config, project, change_name):
+    self._project = project
+    (username, name, revision) = gvn.util.ParseChangeName(change_name)
+
+    self._cb = gvn.changebranch.ChangeBranch(config, self._project,
+                                             name, username, revision)
+    (self._revision,) = self._project.repository.GetRevisions(start=self._cb.revision.number)
+
+  def Revision(self):
+    return self._revision
+
+  def Username(self):
+    return self._cb.username
+
+  def Date(self):
+    return self._revision['svn:date']
+
+  def ChangeBranchName(self):
+    return self._cb.change_name
+
+  def Description(self):
+    return self._cb.description
+
+  def AffectedPaths(self, pool):
+    change_state = self._cb.GetRepoState(pool)
+    for (path, state) in sorted(change_state.paths.iteritems(),
+                                key=lambda x: x[0]):
+      yield ('/' + '/'.join([change_state.base_path, path]), state.action)
+
+
+class Description(object):
+  """A utility class to unify description formats.
+
+  Instantiate objects of this class with either a change branch
+  name or a revision number.
+  """
+
+  def __init__(self, config, project, change_or_revision):
+    self._impl = None
+    revnum = gvn.util.MatchRevision(change_or_revision)
+    if revnum is not None:
+      self._impl = RevisionDescriptionImpl(config, project, int(revnum))
+    else:
+      self._impl = ChangeBranchDescriptionImpl(config, project, change_or_revision)
+
+  def _TZHours(self):
+    return time.timezone/60/60
+
+  def _TZSeconds(self):
+    return time.timezone - self._TZHours()*3600
+
+  def _TZOffsetString(self):
+    sign = '-'
+    if time.timezone < 0:
+      sign = '+'
+    return "%s%02d%02d" % (sign, self._TZHours(), self._TZSeconds())
+
+  def Author(self):
+    # I would expect a KeyError when authz prevents the user from seeing
+    # the revision, but instead we get back None; cover both cases.
+    try:
+      author = self._impl.Username()
+    except KeyError:
+      author = None
+    if author is None:
+      return '(no author)'
+    return author
+
+  def Date(self):
+    # See Author comment about KeyError vs. returning None.
+    try:
+      date = self._impl.Date()
+    except KeyError:
+      date = None
+    if date is None:
+      return '(no date)'
+
+    dateobj = gvn.util.ConvertStringDateToDateTime(date)
+    dateobj -= datetime.timedelta(seconds=time.timezone)
+    datestring = dateobj.strftime("%Y-%m-%d %H:%M:%S %%s (%a, %d %b %Y)")
+    return datestring % (self._TZOffsetString(),)
+
+  def Log(self):
+    log = self._impl.Description()
+    if log is None:
+      return ''
+    return log
+
+  def Output(self, pool):
+    rev = "r%s" % self._impl.Revision()
+    if self._impl.ChangeBranchName():
+      rev = '*' + self._impl.ChangeBranchName()
+
+    header = ' | '.join([rev, self.Author(), self.Date()])
+
+    affected_paths = self._impl.AffectedPaths(pool)
+    affected_path_string = '\n'.join(["   %s /%s" % (a, p)
+                                      for (p, a) in affected_paths])
+    return '\n'.join([
+      header,
+      '',
+      'Description:',
+      '',
+      self.Log(),
+      '',
+      '',
+      'Affected Paths:',
+      affected_path_string,
+      '',
+      ])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/diff.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/diff.py
new file mode 100644
index 0000000..9043ba9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/diff.py
@@ -0,0 +1,83 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# TODO(epg): Reconsider this stuff.  GetShortDiff, for example, is
+# probably only used by mail, and it runs DiffBaseToSnapshot exactly
+# the way mail wants it; it probably belongs over there.
+
+"""Utility interfaces to calling svn diff
+
+Simple interfaces to isolate calls to the svn diff command.  The first
+handles all the ways that you can specify an external command and includes
+the options in the svn diff command, while the second provides only the
+first n lines of diff output.
+"""
+
+
+import gvn.changebranch
+import gvn.svncmd
+
+
+def RunSvnDiff(options, config, args):
+  """Run svn diff args, inserting --diff-cmd and --extensions arguments
+  if an external diff program is specified.
+
+  """
+
+  svn_args = ['diff']
+  diff_cmd = config.diff_command
+  # TODO(epg): Fix up, generalize, and use diff stuff from
+  # changebranch.py so we can use tkdiff.
+  if diff_cmd is not None and diff_cmd != 'internal':
+    diff_cmd_parts = diff_cmd.split(None, 1)
+    svn_args.extend(['--diff-cmd', diff_cmd_parts[0]])
+    if len(diff_cmd_parts) > 1:
+      svn_args.extend(['--extensions', diff_cmd_parts[1]])
+  svn_args.extend(args)
+  return gvn.svncmd.RunSvn(svn_args, options=options)
+
+def GetShortDiff(project, cb, pool):
+  """Return up to project.Difflines of the ChangeBranch cb's diff.
+
+  Return value is a tuple of the diff (as a str) and an estimate of the number
+  of changed lines in the diff.
+
+  """
+
+  class truncate(Exception):
+    pass
+
+  shortdiff = []
+  class DiffReader(object):
+    def write(self, line):
+      if len(shortdiff) < project.diff_lines:
+        shortdiff.append(line)
+        return
+      raise truncate
+  fp = DiffReader()
+  try:
+    options = ['-pu']
+    encoding = 'utf8'
+    callbacks = gvn.changebranch.SvnDiffCallbacks(options, fp,
+                                                  encoding, pool)
+    gvn.changebranch.DiffBaseToSnapshot(cb, callbacks, encoding, pool)
+  except truncate:
+    shortdiff.append('\n*** TRUNCATED TO %d lines ***\n'
+                     % (project.diff_lines,))
+
+  # Unified diffs: lines starting with + or - are changes.
+  changed_lines = [x for x in shortdiff if x[0] in ('+', '-')]
+  return (''.join(shortdiff).decode('utf-8'), len(changed_lines))
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/errors.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/errors.py
new file mode 100644
index 0000000..686bc9a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/errors.py
@@ -0,0 +1,403 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Exception classes used throughout gvn
+
+Exception classes all inherit from either User or Internal, both of
+which inherit from Root.  Internal errors represent programmer misuse
+of gvn APIs; User errors represent external errors resulting from user
+action, such as files in conflict, invalid server names, and so on.
+
+All classes have a code attribute, which may be used as an exit code,
+or other numeric error identifcation.  When gvn reaches 1.0, these
+will not change, for compatibility.
+
+All exception classes may be stringified (str(e) where e is an
+exception object) to produce an error message.  User errors have a
+diag_message property, which for some classes contains additional
+diagnostic information to be shown if the user desires.  It is mainly
+useful for debugging.  Since Internal represent programmer errors, the
+normal stringification message includes any diagnostic information;
+these have no diag_message property.
+
+Some classes have other attributes, used to compute the error message
+dynamically.  Only the final, outer-most gvn caller (e.g. the user
+interface) should reset these attributes to affect that message.  See
+each exception class for additional attributes.
+
+A common example is the path attribute, which contains the internal
+(unicode, unix path separator, relative to working copy root) form.
+If the target of the operation was not the working copy root, callers
+should reset this with the appropriate path (e.g. replace it with
+gvn.cmdline.Context.DisplayPath(path)).
+"""
+
+# TODO(epg): Probably some of these are stale...
+
+# TODO(epg): And of course all str here must be unicode, and
+# gvn.cmdline.main needs to get the Context itself and encode before
+# printing error messages.
+
+import os
+import platform
+
+import svn.core
+
+
+#: list of SubversionException.apr_err values which mean "out of date"
+SVN_OUT_OF_DATE = [svn.core.SVN_ERR_FS_TXN_OUT_OF_DATE,
+                   # different code with ra-dav, oddly enough
+                   svn.core.SVN_ERR_FS_CONFLICT]
+# list of SubversionException.apr_err values which mean "no such
+# path"; TODO(epg): we do this all over the place rather than using
+# this, but it looks like the fix for this crap is going to make it
+# into 1.5.0 so track it all down and remove it.
+SVN_NOENT = [svn.core.SVN_ERR_FS_NOT_FOUND,
+             svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND]
+
+
+class Root(Exception):
+  """Root of error class hierarchy
+
+  Attributes:
+  code          -- sys.exit code to use if caller considers the error fatal
+  """
+  code = None
+
+
+class User(Root):
+  """User errors are everything else, and not necessarily the user's
+  fault: connection refused, auth errors, bad gvn command-line
+  parameters, and so on.
+  """
+  diag_message = property(lambda self: str(self),
+                       doc="""error message to use in debug/diagnostic mode""")
+
+
+class BadOptions(User):
+  """Exception to raise if options, or arguments to options, are invalid,
+  inappropriate, missing, misused, abused, or subject to corporate bribery.
+  """
+  code = 2
+
+
+class BadOperands(User):
+  """Exception to raise if an operand is similarly troublesome (see
+  GvnOptions's docstring).
+  """
+  code = 3
+
+
+class InvalidChangeName(User):
+  code = 4
+
+
+class NotWC(User):
+  code = 5
+  def __init__(self, path):
+    User.__init__(self, "'%s' is not a working copy" % (path,))
+
+
+class Editor(User):
+  code = 6
+  def __init__(self, cmd, status):
+    User.__init__(self, platform.DescribeSubprocessError(cmd, status))
+
+
+class Unmodified(User):
+  code = 7
+  def __init__(self, status):
+    User.__init__(self, 'not modified: ' + status)
+
+
+class Conflict(User):
+  """Tried to commit a path in conflict.
+
+  Attributes:
+  path       -- path in conflict, in internal form
+  """
+  code = 8
+  def __init__(self, path):
+    self.path = path
+  def __str__(self):
+    return "'%s' remains in conflict" % (self.path,)
+
+
+class RepoPath(User):
+  code = 9
+  def __init__(self, path, revision):
+    User.__init__(self,
+                       "URL '%s' non-existent in revision %d" % (path,
+                                                                 revision))
+
+class NoProject(User):
+  code = 10
+  def __init__(self, project):
+    User.__init__(self, 'No project for ' + project)
+
+
+class ChangeIsPath(User):
+  """Tried to use local file name to name a changebranch.
+
+  Attributes:
+  name       -- attempted changebranch name
+  """
+  code = 11
+  def __init__(self, name):
+    self.name = name
+  def __str__(self):
+    return ("Change '%s' is a path name; use '--force-change' to override"
+            % (self.name,))
+
+
+class ChangeIsC(User):
+  code = 28
+  def __init__(self):
+    User.__init__(self,
+                  ("Changebranch named 'c'; did you mean '--cc'?"
+                   "  Use '--force-change' to override"))
+
+
+class NoReviewers(User):
+  code = 12
+  def __init__(self):
+    User.__init__(self,
+                  "Must use --reviewers or fill in To field of the email form")
+
+
+class NoUserChangeBranch(User):
+  code = 13
+  def __init__(self, user_path):
+    User.__init__(self, ("User changebranch path '//%s' does not exist."
+                         % (user_path,)))
+
+
+class InvalidProjectName(User):
+  code = 14
+  def __init__(self, project):
+    User.__init__(self, 'Project "' + project +
+                        '" is not a valid project name.')
+
+
+class NoChangeBranchBase(User):
+  code = 15
+  def __init__(self, base_path):
+    User.__init__(self, ("project changebranch base '//%s' does not exist."
+                         % (base_path,)))
+
+
+class MixedPaths(User):
+  code = 16
+  def __init__(self):
+    User.__init__(self, 'All must be absolute or relative')
+
+
+class OutOfDateParent(User):
+  """Tried to changebranch a path whose parent is out-of-date.
+
+  Attributes:
+  svn_error     -- svn.core.SubversionException object
+  branch_path   -- path of changebranch relative to repository root
+  revision      -- attempted copyfrom of changebranch
+  change_path   -- path missing in revision relative to source branch
+  """
+  code = 18
+  def __init__(self, svn_error, branch_path, revision, change_path):
+    self.svn_error = svn_error
+    self.branch_path = branch_path
+    self.revision = revision
+    self.change_path = change_path
+  diag_message = property(lambda self: 'svn: %s\n'
+                          'Tried to branch from %s@%d\n'
+                          "but %s doesn't exist in that revision"
+                          % (self.svn_error.args[0], self.branch_path,
+                             self.revision, self.change_path))
+  def __str__(self):
+    return ("'%s/' is newer than '%s/'; try gvn update '%s'"
+            % (self.change_path, self.branch_path, self.branch_path))
+
+
+class BranchFromRoot(User):
+  code = 19
+  def __init__(self):
+    User.__init__(self, 'Cannot branch from repository root')
+
+
+class Mail(User):
+  code = 20
+
+
+class Cmdline(User):
+  pass
+
+
+class BadCommand(Cmdline):
+  """Tried to use unknown command.
+
+  Attributes:
+  command    -- the unknown command
+  """
+  code = 21
+  def __init__(self, command):
+    self.command = command
+  def __str__(self):
+    return "Unknown command: '%s'" % (self.command,)
+
+
+# TODO(epg): Use these two instead of BadOptions and BadOperands above.
+# And document them.
+# class BadOperands(Cmdline):
+#   code = 22
+#   def __init__(self, operands, message='Not enough arguments provided'):
+#     self.operands = operands
+#     self.message = message
+#   def __str__(self):
+#     return self.message % {'operands': operands}
+# class BadOption(Cmdline):
+#   code = 23
+#   def __init__(self, option, message='invalid option: %(option)s'):
+#     self.option = option
+#     self.message = message
+#   def __str__(self):
+#     return self.message % {'option': self.option}
+
+
+class UnknownEncoding(User):
+  """Tried to use unknown encoding.
+
+  Attributes:
+  encoding      -- name of the unknown encoding
+  """
+  code = 24
+  def __init__(self, encoding):
+    self.encoding = encoding
+  def __str__(self):
+    return 'unknown encoding: %s' % (self.encoding,)
+
+
+class Encoding(User):
+  """Error encoding or decoding.
+
+  No part of gvn raises this exception.  Instead, callers should catch
+  UnicodeError and instantiate this class with that exception object
+  as an argument.  This gives a much nicer error message.
+  """
+  code = 25
+  def __init__(self, unicode_error):
+    self._unicode_error = unicode_error
+  def __str__(self):
+    e = self._unicode_error
+    if isinstance(e, UnicodeDecodeError):
+      action = 'decode bytes'
+    elif isinstance(e, UnicodeEncodeError):
+      action = 'encode character'
+    else:
+      # Shouldn't be called with anything else, e.g. UnicodeTranslateError.
+      return str(e)
+    # Get everything up to but not including the bogon.
+    preceding = e.object[:e.start]
+    # Get the column number of the line of the bogon.
+    column = e.start - preceding.rindex('\n')
+    # Get the line number of the bogon; number of newlines + 1 because
+    # preceding doesn't have the final newline (if any).
+    line = preceding.count('\n') + 1
+    # Get no more than 40 bytes of the line containing the bogon (or
+    # the preceding line if the bogon started the line).
+    preceding = preceding[-40:].splitlines()[-1]
+    return ("'%s' codec can't %s at column %d of line %d; preceding text:\n%s"
+            % (e.encoding, action, column, line, preceding))
+
+class WCPath(User):
+  code = 26
+  def __init__(self, path):
+    User.__init__(self, "Path '%s' does not exist" % (path,))
+
+class ChangeBranchOutsideTarget(User):
+  code = 27
+  def __init__(self, cb_element_path, target_path):
+    User.__init__(self, "Changebranch path '%s' is not in the "
+                        "target path '%s'" % (cb_element_path, target_path,))
+
+
+class NotVersioned(User):
+  """Tried to operate on unversioned file.
+
+  Attributes:
+  path       -- unversioned path, in internal form
+  """
+  code = 29
+  def __init__(self, path):
+    self.path = path
+  def __str__(self):
+    return ("'%s' is not under version control"
+            % (self.path,))
+
+
+class Internal(Root):
+  """Internal errors are caused by gvn bugs: improper use of svn (or
+  an svn binding), one layer of gvn misusing another, and so on.
+  """
+  code = 100
+
+
+# XXX I think we have multiple errors here, and maybe some are
+# internal and some are User; i haven't looked yet.
+class ChangeBranch(Internal):
+  code = 105
+
+
+class PathNotChild(Internal):
+  code = 106
+  def __init__(self, child, parent):
+    Internal.__init__(self,
+                           "'%s' not a child of '%s'" % (child, parent))
+
+
+class WCReOpen(Internal):
+  code = 107
+  def __init__(self, write_lock, requested_write_lock,
+               recursive, requested_recursive):
+    Internal.__init__(self,
+                           "WorkingCopy.Open mismatched write_lock (%s vs. %s) (%s vs. %s)"
+                           % (write_lock, requested_write_lock, recursive, requested_recursive))
+
+
+class WCClosed(Internal):
+  code = 108
+
+
+class NotShortURL(Internal):
+  code = 109
+  def __init__(self, url):
+    Internal.__init__(self, "'%s' is not a short URL." % (url,))
+
+class NotChangeBranched(Internal):
+  code = 110
+  def __init__(self, path):
+    Internal.__init__(self, "'%s' not changebranched" % (path,))
+    self.path = path
+
+class ParseState(Internal):
+  code = 111
+  def __init__(self, child=None, message=None):
+    self.child = child
+    self.message = message
+  def __str__(self):
+    message = ['repository state of changebranch is invalid']
+    if self.message is not None:
+      message.append(self.message)
+    if self.child is not None:
+      message.append(str(self.child))
+    return ': '.join(message)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/info.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/info.py
new file mode 100644
index 0000000..e0e07e5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/info.py
@@ -0,0 +1,352 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import cPickle
+import os
+import re
+import sys
+
+import svn.fs
+import svn.repos
+
+import gvn.project
+import gvn.userdb
+import gvn.util
+
+
+SVN_USERDB_PATH = "conf/gvn/userdb"
+
+
+class HookInfo(object):
+  # XXX everything references hi.pool, but this doesn't have it?!  How
+  # the hell have the tests been passing?  Anyway, the hooks mustn't
+  # be sharing one giant pool anyway.  runner.py needs to pass an
+  # iterpool to each hook.
+  pool = None
+
+  # Subversion sends different values as different argument numbers
+  # depending on the hook that's called, the variables below map
+  # positional arguments in argv based on argv[0].
+  #
+  # For more definitions on the hooks and their arguments, see:
+  #   http://svnbook.red-bean.com/nightly/en/svn-book.html#svn.ref.reposhooks
+
+  ARGV_INDEX_MAP = {
+    'start-commit': ['repos_path', 'user', 'capabilities'],
+    'pre-commit':   ['repos_path', 'txn_name'],
+    'post-commit':  ['repos_path', 'revision'],
+    'pre-revprop-change':
+                    ['repos_path', 'revision', 'user', 'propname', 'action'],
+    'post-revprop-change':
+                    ['repos_path', 'revision', 'user', 'propname', 'action'],
+    'pre-lock':     ['repos_path', 'path', 'user'],
+    'post-lock':    ['repos_path', 'user'],
+    'pre-unlock':   ['repos_path', 'path', 'user'],
+    'post-unlock':  ['repos_path', 'user'],
+    }
+
+  STDIN_MAP = {
+    'pre-revprop-change':  'propvalue',
+    'post-revprop-change': 'propvalue',
+    'post-lock':   'locked_paths',
+    'post-unlock': 'locked_paths',
+  }
+
+  def __init__(self, argv, fh=None, userdb=None):
+    self._fh =             fh
+    self._input =          None
+
+    ## common to all hook scripts
+    self._argv =           argv
+    self._hook_name =      os.path.basename(argv[0])
+
+    self._locked_paths = None
+
+    # common svn objects used by a few methods
+    self._repo =       None
+    self._fs =         None
+
+    # svn computed data for some hooks (populated on demand)
+    self._head = self._head_root = None
+    self._txn =            None
+    self._txn_root =       None
+    self._paths_changed =  None
+    self._prefix_changed = None
+    self._super_users =    None
+    self._action_word =    None
+    self._txn_props =      None
+    self._author = self._date = self._log = None
+    self._project_config = None
+
+    # The easy one: repository path is always the first arg
+    self.repos_path = argv[1]
+    self.hook_dir = os.path.join(self.repos_path, 'hooks',
+                                 self._hook_name + '.d')
+
+    self._userdb = userdb
+
+  def _GetArgvValue(self, name):
+    try:
+      idx = self.ARGV_INDEX_MAP[self._hook_name].index(name)
+      return self._argv[idx+1]
+    except (KeyError, ValueError):
+      pass
+
+    return None
+
+  def _GetInputValue(self, name):
+    try:
+      if self.STDIN_MAP[self._hook_name] == name:
+        if self._input is None and self._fh is not None:
+          self._input = self._fh.read()
+        return self._input
+    except (KeyError, ValueError):
+      pass
+
+    return None
+
+  # XXX Conflates hook failure with successful answer to the yes/no
+  # authz question.
+  def BreakOnHookFailure(self):
+    """Check what the per-hook failure disposition is.
+
+    Generally if a single hook fails then the code immediately stops
+    running hooks.  However in certain situations even if a single hook
+    fails the script should continue running all hooks.
+    """
+    if self._hook_name and self._hook_name.startswith('post-'):
+      return False
+    return True
+
+  def PostProcessHookReturnCode(self, rcode):
+    """Allow for last minute massaging of the return code.
+
+    Generally this just passes the return code through.  However in certain
+    situations the code pretends that everything succeeded regardless.
+    """
+    if self._hook_name and self._hook_name.startswith('post-'):
+      return 0
+    return rcode
+
+  def repos_name(self):
+    """Convenience method to return the 'short name' of a repository.
+
+    This basically assumes that all the repositories of interest are in
+    the same directory and the last component of the repository path is
+    unique enough to be considered the 'short name'.
+    """
+    return os.path.basename(self.repos_path)
+
+  def logger_name(self, script=None):
+    """Returns a name suitable for use with logging.getLogger().
+    """
+    if not self.repos_name():
+      return None
+
+    name = ['svn']
+    name.append(self.repos_name())
+    name.append('hook')
+    name.append(self.hook_name)
+    if script is not None:
+      name.append(script)
+    name = [word.replace('.', '_') for word in name]
+    try:
+      name = '.'.join(name)
+    except TypeError, e:
+      # probably there's a None in there somewhere
+      name = None
+    return name
+
+
+  arguments = property(lambda self: self._argv[1:])
+
+  hook_name = property(lambda self: self._hook_name)
+
+  action = property(lambda self: self._GetArgvValue('action'))
+
+  path = property(lambda self: self._GetArgvValue('path'))
+
+  propname = property(lambda self: self._GetArgvValue('propname'))
+
+  revision = property(lambda self: int(self._GetArgvValue('revision')))
+
+  txn_name = property(lambda self: self._GetArgvValue('txn_name'))
+
+  user = property(lambda self: self._GetArgvValue('user'))
+
+  propvalue = property(lambda self: self._GetInputValue('propvalue'))
+
+  def _GetLockedPaths(self):
+    if self._locked_paths is None:
+      self._locked_paths = self._GetInputValue('locked_paths')
+      if self._locked_paths is not None:
+        self._locked_paths = self._locked_paths.split('\n')
+    return self._locked_paths
+  locked_paths = property(_GetLockedPaths)
+
+  def _GetCapabilities(self):
+    if self._capabilities is None:
+      self._capabilities = set(self._GetArgvValue('action').split(':'))
+    return self._capabilities
+  capabilities = property(_GetCapabilities,
+doc="""set of client capabilities (e.g. svn.ra.CAPABILITY_MERGEINFO)""")
+
+  # We allocate everything in the global pool; seems unlikely we'll
+  # need to get more complicated.
+
+  def _GetRepo(self):
+    if self._repo is None:
+      self._repo = svn.repos.open(self.repos_path)
+    return self._repo
+  repo = property(_GetRepo)
+
+  def _GetFs(self):
+    if self._fs is None:
+      self._fs = svn.repos.fs(self.repo)
+    return self._fs
+  fs = property(_GetFs)
+
+  def _GetHead(self):
+    if self._head is None:
+      self._head = svn.fs.youngest_rev(self.fs)
+    return self._head
+  head = property(_GetHead)
+
+  def _GetHeadRoot(self):
+    if self._head_root is None:
+      self._head_root = svn.fs.revision_root(self.fs, self.head)
+    return self._head_root
+  head_root = property(_GetHeadRoot)
+
+  def _GetTxn(self):
+    if self._txn is None:
+      self._txn = svn.fs.open_txn(self.fs, self.txn_name)
+    return self._txn
+  txn = property(_GetTxn)
+
+  def _GetTxnRoot(self):
+    if self._txn_root is None:
+      self._txn_root = svn.fs.txn_root(self.txn)
+    return self._txn_root
+  txn_root = property(_GetTxnRoot)
+
+  def _GetPathsChanged(self):
+    if self._paths_changed is None:
+      self._paths_changed = svn.fs.paths_changed(self.txn_root)
+    return self._paths_changed
+  paths_changed = property(_GetPathsChanged)
+
+  def _GetPrefixChanged(self):
+    if self._prefix_changed is None:
+      tmp = gvn.util.CommonPrefix(list(self.paths_changed.iterkeys()))
+      # paths_changed have a leading /
+      self._prefix_changed = tmp.lstrip('/')
+    return self._prefix_changed
+  prefix_changed = property(_GetPrefixChanged)
+
+  # We store our list of svn superusers as a property attached to the repo root
+  def _GetSuperUsers(self, index):
+    if self._super_users is None:
+      contents = svn.fs.node_prop(self.head_root, "", "gvn:superusers")
+      if contents is None:
+        self._super_users = (set(), set())
+      else:
+        self._super_users = gvn.util.ParseOwners(contents)
+    return self._super_users[index]
+  super_users = property(lambda self: self._GetSuperUsers(0))
+  super_groups = property(lambda self: self._GetSuperUsers(1))
+
+  def _Action_Word(self):
+    if self._action_word is None:
+      mappings = {
+        "A": "add",
+        "M": "modify",
+        "D": "delete",
+      }
+      self._action_word = mappings[self.action]
+      return self._action_word
+  action_word = property(_Action_Word)
+
+  def _GetUserdb(self):
+    if self._userdb is None:
+      self._userdb = gvn.userdb.UserDB(os.path.join(self.repos_path,
+                                                    SVN_USERDB_PATH))
+    return self._userdb
+  userdb = property(_GetUserdb)
+
+  # TODO(epg): Various hooks compute these locally; make them use this instead.
+  def _GetTxnProps(self):
+    if self._txn_props is None:
+      self._txn_props = svn.fs.txn_proplist(self.txn, self.pool)
+    return self._txn_props
+  txn_props = property(_GetTxnProps)
+
+  # TODO(epg): Various hooks compute these (or at least author)
+  # locally; make them use these instead.
+  def _GetSvnProp(self, name):
+    attr = '_' + name
+    val = getattr(self, attr)
+    if val is not None:
+      return val
+    val = svn.fs.txn_prop(self.txn, 'svn:' + name, self.pool)
+    setattr(self, attr, val)
+    return val
+  author = property(lambda self: self._GetSvnProp('author'))
+  date = property(lambda self: self._GetSvnProp('date'))
+  log = property(lambda self: self._GetSvnProp('log'))
+
+  def _GetProjectConfig(self):
+    # TODO(epg): Much of this is duplicated from gvn.project, because it's
+    # heavily ra-based.  And, I think I've fixed some bugs here not fixed
+    # for gvn.project's version of this :(.  Need to abstract this out.
+    if self._project_config is None:
+      # Start with defaults.
+      config = dict(gvn.project._DEFAULT_CONFIG)
+      # Try to find gvn:project property on self.prefix_changed or any of
+      # its parents.
+      prop = [None]
+      def find_prop(path):
+        try:
+          prop[0] = svn.fs.node_prop(self.head_root, path, 'gvn:project',
+                                     self.pool)
+        except svn.core.SubversionException, e:
+          if e.apr_err != svn.core.SVN_ERR_FS_NOT_FOUND:
+            raise
+          # Keep looking.
+          return -1
+        if prop[0] is None:
+          # Keep looking.
+          return -1
+        # Got it.
+        return 0
+      path = gvn.util.ClimbAndFind(self.prefix_changed, find_prop)
+      if path == '/':
+        path = ''
+      if prop[0] is not None:
+        # Parse the project config dict from the property and process it.
+        config.update(cPickle.loads(prop[0]))
+      # Make change-branch-base relative to repository root.
+      cbb = config['change-branch-base']
+      if cbb.startswith('/'):
+        config['change-branch-base'] = cbb.lstrip('/')
+      else:
+        if path == '':
+          config['change-branch-base'] = cbb
+        else:
+          config['change-branch-base'] = '/'.join([path, cbb])
+      config['path'] = path
+      self._project_config = config
+    return self._project_config
+  project_config = property(_GetProjectConfig)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/__init__.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/__init__.pyo
new file mode 100644
index 0000000..ad10743
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/__init__.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/mailer.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/mailer.py
new file mode 100644
index 0000000..bbeb6eb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/mailer.py
@@ -0,0 +1,1387 @@
+# Mail hook for gvn hook infrastucture:
+#
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+
+# Based on original script from CollabNet:
+# Copyright (c) 2000-2006 CollabNet.  All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# 1. Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#
+# 2. Redistributions in binary form must reproduce the above copyright
+#    notice, this list of conditions and the following disclaimer in
+#    the documentation and/or other materials provided with the
+#    distribution.
+#
+# 3. The end-user documentation included with the redistribution,
+#    if any, must include the following acknowledgment:
+#       "This product includes software developed by
+#        CollabNet (http://www.Collab.Net/)."
+#    Alternately, this acknowledgment may appear in the software itself,
+#    if and wherever such third-party acknowledgments normally appear.
+#
+# 4. The hosted project names must not be used to endorse or promote
+#    products derived from this software without prior written
+#    permission. For written permission, please contact info@collab.net.
+#
+# 5. Products derived from this software may not use the "Tigris" name
+#    nor may "Tigris" appear in their names without prior written
+#    permission of CollabNet.
+#
+# THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL COLLABNET OR ITS CONTRIBUTORS BE LIABLE FOR ANY
+# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
+# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
+# IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
+# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
+# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+#
+
+"""gvn hook runner wrapper for mailer.py from collab.net
+"""
+
+import svn.fs
+
+def RunHook(hi, logger):
+
+  main (hi.pool, 'commit', os.path.join(hi.repos_path, 'conf', 'mailer.conf'),
+       hi.repos_path, [hi.revision])
+
+  # main doesn't return anything, but will raise an exception if it gets an
+  # error (as pointed out by epg)
+  return 0
+
+
+# Original script pasted as is, below:
+
+# $HeadURL: http://svn.collab.net/repos/svn/branches/1.4.x/tools/hook-scripts/mailer/mailer.py $
+# $LastChangedDate: 2006-07-27 19:44:08 +0000 (Thu, 27 Jul 2006) $
+# $LastChangedBy: dlr $
+# $LastChangedRevision: 20878 $
+#
+# USAGE: mailer.py commit      REPOS REVISION [CONFIG-FILE]
+#        mailer.py propchange  REPOS REVISION AUTHOR REVPROPNAME [CONFIG-FILE]
+#        mailer.py propchange2 REPOS REVISION AUTHOR REVPROPNAME ACTION \
+#                              [CONFIG-FILE]
+#        mailer.py lock        REPOS AUTHOR [CONFIG-FILE]
+#        mailer.py unlock      REPOS AUTHOR [CONFIG-FILE]
+#
+#   Using CONFIG-FILE, deliver an email describing the changes between
+#   REV and REV-1 for the repository REPOS.
+#
+#   ACTION was added as a fifth argument to the post-revprop-change hook
+#   in Subversion 1.2.0.  Its value is one of 'A', 'M' or 'D' to indicate
+#   if the property was added, modified or deleted, respectively.
+#
+#   This version of mailer.py requires the python bindings from
+#   subversion 1.2.0 or later.
+#
+
+import os
+import sys
+import string
+import ConfigParser
+import time
+import popen2
+import cStringIO
+import smtplib
+import re
+import tempfile
+import types
+import urllib
+
+import svn.fs
+import svn.delta
+import svn.repos
+import svn.core
+
+SEPARATOR = '=' * 78
+
+
+def main(pool, cmd, config_fname, repos_dir, cmd_args):
+  ### TODO:  Sanity check the incoming args
+
+  if cmd == 'commit':
+    revision = int(cmd_args[0])
+    repos = Repository(repos_dir, revision, pool)
+    cfg = Config(config_fname, repos, { 'author' : repos.author })
+    messenger = Commit(pool, cfg, repos)
+  elif cmd == 'propchange' or cmd == 'propchange2':
+    revision = int(cmd_args[0])
+    author = cmd_args[1]
+    propname = cmd_args[2]
+    action = (cmd == 'propchange2' and cmd_args[3] or 'A')
+    repos = Repository(repos_dir, revision, pool)
+    # Override the repos revision author with the author of the propchange
+    repos.author = author
+    cfg = Config(config_fname, repos, { 'author' : author })
+    messenger = PropChange(pool, cfg, repos, author, propname, action)
+  elif cmd == 'lock' or cmd == 'unlock':
+    author = cmd_args[0]
+    repos = Repository(repos_dir, 0, pool) ### any old revision will do
+    # Override the repos revision author with the author of the lock/unlock
+    repos.author = author
+    cfg = Config(config_fname, repos, { 'author' : author })
+    messenger = Lock(pool, cfg, repos, author, cmd == 'lock')
+  else:
+    raise UnknownSubcommand(cmd)
+
+  messenger.generate()
+
+
+# Minimal, incomplete, versions of popen2.Popen[34] for those platforms
+# for which popen2 does not provide them.
+try:
+  Popen3 = popen2.Popen3
+  Popen4 = popen2.Popen4
+except AttributeError:
+  class Popen3:
+    def __init__(self, cmd, capturestderr = False):
+      if type(cmd) != types.StringType:
+        cmd = svn.core.argv_to_command_string(cmd)
+      if capturestderr:
+        self.fromchild, self.tochild, self.childerr \
+            = popen2.popen3(cmd, mode='b')
+      else:
+        self.fromchild, self.tochild = popen2.popen2(cmd, mode='b')
+        self.childerr = None
+
+    def wait(self):
+      rv = self.fromchild.close()
+      rv = self.tochild.close() or rv
+      if self.childerr is not None:
+        rv = self.childerr.close() or rv
+      return rv
+
+  class Popen4:
+    def __init__(self, cmd):
+      if type(cmd) != types.StringType:
+        cmd = svn.core.argv_to_command_string(cmd)
+      self.fromchild, self.tochild = popen2.popen4(cmd, mode='b')
+
+    def wait(self):
+      rv = self.fromchild.close()
+      rv = self.tochild.close() or rv
+      return rv
+
+
+class OutputBase:
+  "Abstract base class to formalize the interface of output methods"
+
+  def __init__(self, cfg, repos, prefix_param):
+    self.cfg = cfg
+    self.repos = repos
+    self.prefix_param = prefix_param
+    self._CHUNKSIZE = 128 * 1024
+
+    # This is a public member variable. This must be assigned a suitable
+    # piece of descriptive text before make_subject() is called.
+    self.subject = ""
+
+  def make_subject(self, group, params):
+    prefix = self.cfg.get(self.prefix_param, group, params)
+    if prefix:
+      subject = prefix + ' ' + self.subject
+    else:
+      subject = self.subject
+
+    try:
+      truncate_subject = int(
+          self.cfg.get('truncate_subject', group, params))
+    except ValueError:
+      truncate_subject = 0
+
+    if truncate_subject and len(subject) > truncate_subject:
+      subject = subject[:(truncate_subject - 3)] + "..."
+    return subject
+
+  def start(self, group, params):
+    """Override this method.
+    Begin writing an output representation. GROUP is the name of the
+    configuration file group which is causing this output to be produced.
+    PARAMS is a dictionary of any named subexpressions of regular expressions
+    defined in the configuration file, plus the key 'author' contains the
+    author of the action being reported."""
+    raise NotImplementedError
+
+  def finish(self):
+    """Override this method.
+    Flush any cached information and finish writing the output
+    representation."""
+    raise NotImplementedError
+
+  def write(self, output):
+    """Override this method.
+    Append the literal text string OUTPUT to the output representation."""
+    raise NotImplementedError
+
+  def run(self, cmd):
+    """Override this method, if the default implementation is not sufficient.
+    Execute CMD, writing the stdout produced to the output representation."""
+    # By default we choose to incorporate child stderr into the output
+    pipe_ob = Popen4(cmd)
+
+    buf = pipe_ob.fromchild.read(self._CHUNKSIZE)
+    while buf:
+      self.write(buf)
+      buf = pipe_ob.fromchild.read(self._CHUNKSIZE)
+
+    # wait on the child so we don't end up with a billion zombies
+    pipe_ob.wait()
+
+
+class MailedOutput(OutputBase):
+  def __init__(self, cfg, repos, prefix_param):
+    OutputBase.__init__(self, cfg, repos, prefix_param)
+
+  def start(self, group, params):
+    # whitespace-separated list of addresses; split into a clean list:
+    self.to_addrs = \
+        filter(None, string.split(self.cfg.get('to_addr', group, params)))
+    self.from_addr = self.cfg.get('from_addr', group, params) \
+                     or self.repos.author or 'no_author'
+    self.reply_to = self.cfg.get('reply_to', group, params)
+
+  def mail_headers(self, group, params):
+    subject = self.make_subject(group, params)
+    try:
+      subject.encode('ascii')
+    except UnicodeError:
+      from email.Header import Header
+      subject = Header(subject, 'utf-8').encode()
+    hdrs = 'From: %s\n'    \
+           'To: %s\n'      \
+           'Subject: %s\n' \
+           'MIME-Version: 1.0\n' \
+           'Content-Type: text/plain; charset=UTF-8\n' \
+           'Content-Transfer-Encoding: 8bit\n' \
+           % (self.from_addr, string.join(self.to_addrs, ', '), subject)
+    if self.reply_to:
+      hdrs = '%sReply-To: %s\n' % (hdrs, self.reply_to)
+    return hdrs + '\n'
+
+
+class SMTPOutput(MailedOutput):
+  "Deliver a mail message to an MTA using SMTP."
+
+  def start(self, group, params):
+    MailedOutput.start(self, group, params)
+
+    self.buffer = cStringIO.StringIO()
+    self.write = self.buffer.write
+
+    self.write(self.mail_headers(group, params))
+
+  def finish(self):
+    server = smtplib.SMTP(self.cfg.general.smtp_hostname)
+    if self.cfg.is_set('general.smtp_username'):
+      server.login(self.cfg.general.smtp_username,
+                   self.cfg.general.smtp_password)
+    server.sendmail(self.from_addr, self.to_addrs, self.buffer.getvalue())
+    server.quit()
+
+
+class StandardOutput(OutputBase):
+  "Print the commit message to stdout."
+
+  def __init__(self, cfg, repos, prefix_param):
+    OutputBase.__init__(self, cfg, repos, prefix_param)
+    self.write = sys.stdout.write
+
+  def start(self, group, params):
+    self.write("Group: " + (group or "defaults") + "\n")
+    self.write("Subject: " + self.make_subject(group, params) + "\n\n")
+
+  def finish(self):
+    pass
+
+
+class PipeOutput(MailedOutput):
+  "Deliver a mail message to an MTA via a pipe."
+
+  def __init__(self, cfg, repos, prefix_param):
+    MailedOutput.__init__(self, cfg, repos, prefix_param)
+
+    # figure out the command for delivery
+    self.cmd = string.split(cfg.general.mail_command)
+
+  def start(self, group, params):
+    MailedOutput.start(self, group, params)
+
+    ### gotta fix this. this is pretty specific to sendmail and qmail's
+    ### mailwrapper program. should be able to use option param substitution
+    cmd = self.cmd + [ '-f', self.from_addr ] + self.to_addrs
+
+    # construct the pipe for talking to the mailer
+    self.pipe = Popen3(cmd)
+    self.write = self.pipe.tochild.write
+
+    # we don't need the read-from-mailer descriptor, so close it
+    self.pipe.fromchild.close()
+
+    # start writing out the mail message
+    self.write(self.mail_headers(group, params))
+
+  def finish(self):
+    # signal that we're done sending content
+    self.pipe.tochild.close()
+
+    # wait to avoid zombies
+    self.pipe.wait()
+
+
+class Messenger:
+  def __init__(self, pool, cfg, repos, prefix_param):
+    self.pool = pool
+    self.cfg = cfg
+    self.repos = repos
+
+    if cfg.is_set('general.mail_command'):
+      cls = PipeOutput
+    elif cfg.is_set('general.smtp_hostname'):
+      cls = SMTPOutput
+    else:
+      cls = StandardOutput
+
+    self.output = cls(cfg, repos, prefix_param)
+
+
+class Commit(Messenger):
+  def __init__(self, pool, cfg, repos):
+    Messenger.__init__(self, pool, cfg, repos, 'commit_subject_prefix')
+
+    # get all the changes and sort by path
+    editor = svn.repos.ChangeCollector(repos.fs_ptr, repos.root_this, self.pool)
+    e_ptr, e_baton = svn.delta.make_editor(editor, self.pool)
+    svn.repos.replay(repos.root_this, e_ptr, e_baton, self.pool)
+
+    self.changelist = editor.get_changes().items()
+    self.changelist.sort()
+
+    # collect the set of groups and the unique sets of params for the options
+    self.groups = { }
+    for path, change in self.changelist:
+      for (group, params) in self.cfg.which_groups(path):
+        # turn the params into a hashable object and stash it away
+        param_list = params.items()
+        param_list.sort()
+        # collect the set of paths belonging to this group
+        if self.groups.has_key( (group, tuple(param_list)) ):
+          old_param, paths = self.groups[group, tuple(param_list)]
+        else:
+          paths = { }
+        paths[path] = None
+        self.groups[group, tuple(param_list)] = (params, paths)
+
+    # figure out the changed directories
+    dirs = { }
+    for path, change in self.changelist:
+      if change.item_kind == svn.core.svn_node_dir:
+        dirs[path] = None
+      else:
+        idx = string.rfind(path, '/')
+        if idx == -1:
+          dirs[''] = None
+        else:
+          dirs[path[:idx]] = None
+
+    dirlist = dirs.keys()
+
+    commondir, dirlist = get_commondir(dirlist)
+
+    # compose the basic subject line. later, we can prefix it.
+    dirlist.sort()
+    dirlist = string.join(dirlist)
+    if commondir:
+      self.output.subject = 'r%d - in %s: %s' % (repos.rev, commondir, dirlist)
+    else:
+      self.output.subject = 'r%d - %s' % (repos.rev, dirlist)
+
+  def generate(self):
+    "Generate email for the various groups and option-params."
+
+    ### the groups need to be further compressed. if the headers and
+    ### body are the same across groups, then we can have multiple To:
+    ### addresses. SMTPOutput holds the entire message body in memory,
+    ### so if the body doesn't change, then it can be sent N times
+    ### rather than rebuilding it each time.
+
+    subpool = svn.core.svn_pool_create(self.pool)
+
+    # build a renderer, tied to our output stream
+    renderer = TextCommitRenderer(self.output)
+
+    for (group, param_tuple), (params, paths) in self.groups.items():
+      self.output.start(group, params)
+
+      # generate the content for this group and set of params
+      generate_content(renderer, self.cfg, self.repos, self.changelist,
+                       group, params, paths, subpool)
+
+      self.output.finish()
+      svn.core.svn_pool_clear(subpool)
+
+    svn.core.svn_pool_destroy(subpool)
+
+
+try:
+  from tempfile import NamedTemporaryFile
+except ImportError:
+  # NamedTemporaryFile was added in Python 2.3, so we need to emulate it
+  # for older Pythons.
+  class NamedTemporaryFile:
+    def __init__(self):
+      self.name = tempfile.mktemp()
+      self.file = open(self.name, 'w+b')
+    def __del__(self):
+      os.remove(self.name)
+    def write(self, data):
+      self.file.write(data)
+    def flush(self):
+      self.file.flush()
+
+
+class PropChange(Messenger):
+  def __init__(self, pool, cfg, repos, author, propname, action):
+    Messenger.__init__(self, pool, cfg, repos, 'propchange_subject_prefix')
+    self.author = author
+    self.propname = propname
+    self.action = action
+
+    # collect the set of groups and the unique sets of params for the options
+    self.groups = { }
+    for (group, params) in self.cfg.which_groups(''):
+      # turn the params into a hashable object and stash it away
+      param_list = params.items()
+      param_list.sort()
+      self.groups[group, tuple(param_list)] = params
+
+    self.output.subject = 'r%d - %s' % (repos.rev, propname)
+
+  def generate(self):
+    actions = { 'A': 'added', 'M': 'modified', 'D': 'deleted' }
+    for (group, param_tuple), params in self.groups.items():
+      self.output.start(group, params)
+      self.output.write('Author: %s\n'
+                        'Revision: %s\n'
+                        'Property Name: %s\n'
+                        'Action: %s\n'
+                        '\n'
+                        % (self.author, self.repos.rev, self.propname,
+                           actions.get(self.action, 'Unknown (\'%s\')' \
+                                       % self.action)))
+      if self.action == 'A' or not actions.has_key(self.action):
+        self.output.write('Property value:\n')
+        propvalue = self.repos.get_rev_prop(self.propname)
+        self.output.write(propvalue)
+      elif self.action == 'M':
+        self.output.write('Property diff:\n')
+        tempfile1 = NamedTemporaryFile()
+        tempfile1.write(sys.stdin.read())
+        tempfile1.flush()
+        tempfile2 = NamedTemporaryFile()
+        tempfile2.write(self.repos.get_rev_prop(self.propname))
+        tempfile2.flush()
+        self.output.run(self.cfg.get_diff_cmd(group, {
+          'label_from' : 'old property value',
+          'label_to' : 'new property value',
+          'from' : tempfile1.name,
+          'to' : tempfile2.name,
+          }))
+      self.output.finish()
+
+
+def get_commondir(dirlist):
+  """Figure out the common portion/parent (commondir) of all the paths
+  in DIRLIST and return a tuple consisting of commondir, dirlist.  If
+  a commondir is found, the dirlist returned is rooted in that
+  commondir.  If no commondir is found, dirlist is returned unchanged,
+  and commondir is the empty string."""
+  if len(dirlist) == 1 or '/' in dirlist:
+    commondir = ''
+    newdirs = dirlist
+  else:
+    common = string.split(dirlist[0], '/')
+    for j in range(1, len(dirlist)):
+      d = dirlist[j]
+      parts = string.split(d, '/')
+      for i in range(len(common)):
+        if i == len(parts) or common[i] != parts[i]:
+          del common[i:]
+          break
+    commondir = string.join(common, '/')
+    if commondir:
+      # strip the common portion from each directory
+      l = len(commondir) + 1
+      newdirs = [ ]
+      for d in dirlist:
+        if d == commondir:
+          newdirs.append('.')
+        else:
+          newdirs.append(d[l:])
+    else:
+      # nothing in common, so reset the list of directories
+      newdirs = dirlist
+
+  return commondir, newdirs
+
+
+class Lock(Messenger):
+  def __init__(self, pool, cfg, repos, author, do_lock):
+    self.author = author
+    self.do_lock = do_lock
+
+    Messenger.__init__(self, pool, cfg, repos,
+                       (do_lock and 'lock_subject_prefix'
+                        or 'unlock_subject_prefix'))
+
+    # read all the locked paths from STDIN and strip off the trailing newlines
+    self.dirlist = map(lambda x: x.rstrip(), sys.stdin.readlines())
+
+    # collect the set of groups and the unique sets of params for the options
+    self.groups = { }
+    for path in self.dirlist:
+      for (group, params) in self.cfg.which_groups(path):
+        # turn the params into a hashable object and stash it away
+        param_list = params.items()
+        param_list.sort()
+        # collect the set of paths belonging to this group
+        if self.groups.has_key( (group, tuple(param_list)) ):
+          old_param, paths = self.groups[group, tuple(param_list)]
+        else:
+          paths = { }
+        paths[path] = None
+        self.groups[group, tuple(param_list)] = (params, paths)
+
+    commondir, dirlist = get_commondir(self.dirlist)
+
+    # compose the basic subject line. later, we can prefix it.
+    dirlist.sort()
+    dirlist = string.join(dirlist)
+    if commondir:
+      self.output.subject = '%s: %s' % (commondir, dirlist)
+    else:
+      self.output.subject = '%s' % (dirlist)
+
+    # The lock comment is the same for all paths, so we can just pull
+    # the comment for the first path in the dirlist and cache it.
+    self.lock = svn.fs.svn_fs_get_lock(self.repos.fs_ptr,
+                                       self.dirlist[0], self.pool)
+
+  def generate(self):
+    for (group, param_tuple), (params, paths) in self.groups.items():
+      self.output.start(group, params)
+
+      self.output.write('Author: %s\n'
+                        '%s paths:\n' %
+                        (self.author, self.do_lock and 'Locked' or 'Unlocked'))
+
+      self.dirlist.sort()
+      for dir in self.dirlist:
+        self.output.write('   %s\n\n' % dir)
+
+      if self.do_lock:
+        self.output.write('Comment:\n%s\n' % (self.lock.comment or ''))
+
+      self.output.finish()
+
+
+class DiffSelections:
+  def __init__(self, cfg, group, params):
+    self.add = False
+    self.copy = False
+    self.delete = False
+    self.modify = False
+
+    gen_diffs = cfg.get('generate_diffs', group, params)
+
+    ### Do a little dance for deprecated options.  Note that even if you
+    ### don't have an option anywhere in your configuration file, it
+    ### still gets returned as non-None.
+    if len(gen_diffs):
+      list = string.split(gen_diffs, " ")
+      for item in list:
+        if item == 'add':
+          self.add = True
+        if item == 'copy':
+          self.copy = True
+        if item == 'delete':
+          self.delete = True
+        if item == 'modify':
+          self.modify = True
+    else:
+      self.add = True
+      self.copy = True
+      self.delete = True
+      self.modify = True
+      ### These options are deprecated
+      suppress = cfg.get('suppress_deletes', group, params)
+      if suppress == 'yes':
+        self.delete = False
+      suppress = cfg.get('suppress_adds', group, params)
+      if suppress == 'yes':
+        self.add = False
+
+
+class DiffURLSelections:
+  def __init__(self, cfg, group, params):
+    self.cfg = cfg
+    self.group = group
+    self.params = params
+
+  def _get_url(self, action, repos_rev, change):
+    # The parameters for the URLs generation need to be placed in the
+    # parameters for the configuration module, otherwise we may get
+    # KeyError exceptions.
+    params = self.params.copy()
+    params['path'] = change.path and urllib.quote(change.path) or None
+    params['base_path'] = change.base_path and urllib.quote(change.base_path) or None
+    params['rev'] = repos_rev
+    params['base_rev'] = change.base_rev
+
+    return self.cfg.get("diff_%s_url" % action, self.group, params)
+
+  def get_add_url(self, repos_rev, change):
+    return self._get_url('add', repos_rev, change)
+
+  def get_copy_url(self, repos_rev, change):
+    return self._get_url('copy', repos_rev, change)
+
+  def get_delete_url(self, repos_rev, change):
+    return self._get_url('delete', repos_rev, change)
+
+  def get_modify_url(self, repos_rev, change):
+    return self._get_url('modify', repos_rev, change)
+
+def generate_content(renderer, cfg, repos, changelist, group, params, paths,
+                     pool):
+
+  svndate = repos.get_rev_prop(svn.core.SVN_PROP_REVISION_DATE)
+  ### pick a different date format?
+  date = time.ctime(svn.core.secs_from_timestr(svndate, pool))
+
+  diffsels = DiffSelections(cfg, group, params)
+  diffurls = DiffURLSelections(cfg, group, params)
+
+  show_nonmatching_paths = cfg.get('show_nonmatching_paths', group, params) \
+      or 'yes'
+
+  # figure out the lists of changes outside the selected path-space
+  other_added_data = other_removed_data = other_modified_data = [ ]
+  if len(paths) != len(changelist) and show_nonmatching_paths != 'no':
+    other_added_data = generate_list('A', changelist, paths, False)
+    other_removed_data = generate_list('R', changelist, paths, False)
+    other_modified_data = generate_list('M', changelist, paths, False)
+
+  if len(paths) != len(changelist) and show_nonmatching_paths == 'yes':
+    other_diffs = DiffGenerator(changelist, paths, False, cfg, repos, date,
+                                group, params, diffsels, diffurls, pool)
+  else:
+    other_diffs = None
+
+  data = _data(
+    author=repos.author,
+    date=date,
+    rev=repos.rev,
+    log=repos.get_rev_prop(svn.core.SVN_PROP_REVISION_LOG) or '',
+    added_data=generate_list('A', changelist, paths, True),
+    removed_data=generate_list('R', changelist, paths, True),
+    modified_data=generate_list('M', changelist, paths, True),
+    show_nonmatching_paths=show_nonmatching_paths,
+    other_added_data=other_added_data,
+    other_removed_data=other_removed_data,
+    other_modified_data=other_modified_data,
+    diffs=DiffGenerator(changelist, paths, True, cfg, repos, date, group,
+                        params, diffsels, diffurls, pool),
+    other_diffs=other_diffs,
+    )
+  renderer.render(data)
+
+
+def generate_list(changekind, changelist, paths, in_paths):
+  if changekind == 'A':
+    selection = lambda change: change.added
+  elif changekind == 'R':
+    selection = lambda change: change.path is None
+  elif changekind == 'M':
+    selection = lambda change: not change.added and change.path is not None
+
+  items = [ ]
+  for path, change in changelist:
+    if selection(change) and paths.has_key(path) == in_paths:
+      item = _data(
+        path=path,
+        is_dir=change.item_kind == svn.core.svn_node_dir,
+        props_changed=change.prop_changes,
+        text_changed=change.text_changed,
+        copied=change.added and change.base_path,
+        base_path=change.base_path,
+        base_rev=change.base_rev,
+        )
+      items.append(item)
+
+  return items
+
+
+class DiffGenerator:
+  "This is a generator-like object returning DiffContent objects."
+
+  def __init__(self, changelist, paths, in_paths, cfg, repos, date, group,
+               params, diffsels, diffurls, pool):
+    self.changelist = changelist
+    self.paths = paths
+    self.in_paths = in_paths
+    self.cfg = cfg
+    self.repos = repos
+    self.date = date
+    self.group = group
+    self.params = params
+    self.diffsels = diffsels
+    self.diffurls = diffurls
+    self.pool = pool
+
+    self.diff = self.diff_url = None
+
+    self.idx = 0
+
+  def __nonzero__(self):
+    # we always have some items
+    return True
+
+  def __getitem__(self, idx):
+    while 1:
+      if self.idx == len(self.changelist):
+        raise IndexError
+
+      path, change = self.changelist[self.idx]
+      self.idx = self.idx + 1
+
+      diff = diff_url = None
+      kind = None
+      label1 = None
+      label2 = None
+      src_fname = None
+      dst_fname = None
+      binary = None
+      singular = None
+      content = None
+
+      # just skip directories. they have no diffs.
+      if change.item_kind == svn.core.svn_node_dir:
+        continue
+
+      # is this change in (or out of) the set of matched paths?
+      if self.paths.has_key(path) != self.in_paths:
+        continue
+
+      # figure out if/how to generate a diff
+
+      if not change.path:
+        # it was delete.
+        kind = 'D'
+
+        # get the diff url, if any is specified
+        diff_url = self.diffurls.get_delete_url(self.repos.rev, change)
+
+        # show the diff?
+        if self.diffsels.delete:
+          diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev),
+                                 change.base_path, None, None, self.pool)
+
+          label1 = '%s\t%s' % (change.base_path, self.date)
+          label2 = '(empty file)'
+          singular = True
+
+      elif change.added:
+        if change.base_path and (change.base_rev != -1):
+          # this file was copied.
+          kind = 'C'
+
+          # any diff of interest?
+          if change.text_changed:
+
+            # get the diff url, if any is specified
+            diff_url = self.diffurls.get_copy_url(self.repos.rev, change)
+
+            # show the diff?
+            if self.diffsels.copy:
+              diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev),
+                                     change.base_path,
+                                     self.repos.root_this, change.path,
+                                     self.pool)
+              label1 = change.base_path + '\t(original)'
+              label2 = '%s\t%s' % (change.path, self.date)
+              singular = False
+        else:
+          # the file was added.
+          kind = 'A'
+
+          # get the diff url, if any is specified
+          diff_url = self.diffurls.get_add_url(self.repos.rev, change)
+
+          # show the diff?
+          if self.diffsels.add:
+            diff = svn.fs.FileDiff(None, None, self.repos.root_this,
+                                   change.path, self.pool)
+            label1 = '(empty file)'
+            label2 = '%s\t%s' % (change.path, self.date)
+            singular = True
+
+      elif not change.text_changed:
+        # the text didn't change, so nothing to show.
+        continue
+      else:
+        # a simple modification.
+        kind = 'M'
+
+        # get the diff url, if any is specified
+        diff_url = self.diffurls.get_modify_url(self.repos.rev, change)
+
+        # show the diff?
+        if self.diffsels.modify:
+          diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev),
+                                 change.base_path,
+                                 self.repos.root_this, change.path,
+                                 self.pool)
+          label1 = change.base_path + '\t(original)'
+          label2 = '%s\t%s' % (change.path, self.date)
+          singular = False
+
+      if diff:
+        binary = diff.either_binary()
+        if binary:
+          content = src_fname = dst_fname = None
+        else:
+          src_fname, dst_fname = diff.get_files()
+          content = DiffContent(self.cfg.get_diff_cmd(self.group, {
+            'label_from' : label1,
+            'label_to' : label2,
+            'from' : src_fname,
+            'to' : dst_fname,
+            }))
+
+      # return a data item for this diff
+      return _data(
+        path=change.path,
+        base_path=change.base_path,
+        base_rev=change.base_rev,
+        diff=diff,
+        diff_url=diff_url,
+        kind=kind,
+        label_from=label1,
+        label_to=label2,
+        from_fname=src_fname,
+        to_fname=dst_fname,
+        binary=binary,
+        singular=singular,
+        content=content,
+        )
+
+
+class DiffContent:
+  "This is a generator-like object returning annotated lines of a diff."
+
+  def __init__(self, cmd):
+    self.seen_change = False
+
+    # By default we choose to incorporate child stderr into the output
+    self.pipe = Popen4(cmd)
+
+  def __nonzero__(self):
+    # we always have some items
+    return True
+
+  def __getitem__(self, idx):
+    if self.pipe is None:
+      raise IndexError
+
+    line = self.pipe.fromchild.readline()
+    if not line:
+      # wait on the child so we don't end up with a billion zombies
+      self.pipe.wait()
+      self.pipe = None
+      raise IndexError
+
+    # classify the type of line.
+    first = line[:1]
+    if first == '@':
+      self.seen_change = True
+      ltype = 'H'
+    elif first == '-':
+      if self.seen_change:
+        ltype = 'D'
+      else:
+        ltype = 'F'
+    elif first == '+':
+      if self.seen_change:
+        ltype = 'A'
+      else:
+        ltype = 'T'
+    elif first == ' ':
+      ltype = 'C'
+    else:
+      ltype = 'U'
+
+    return _data(
+      raw=line,
+      text=line[1:-1],  # remove indicator and newline
+      type=ltype,
+      )
+
+
+class TextCommitRenderer:
+  "This class will render the commit mail in plain text."
+
+  def __init__(self, output):
+    self.output = output
+
+  def render(self, data):
+    "Render the commit defined by 'data'."
+
+    w = self.output.write
+
+    w('Author: %s\nDate: %s\nNew Revision: %s\n\nLog:\n%s\n\n'
+      % (data.author, data.date, data.rev, data.log))
+
+    # print summary sections
+    self._render_list('Added', data.added_data)
+    self._render_list('Removed', data.removed_data)
+    self._render_list('Modified', data.modified_data)
+
+    if data.other_added_data or data.other_removed_data \
+           or data.other_modified_data:
+      if data.show_nonmatching_paths:
+        w('\nChanges in other areas also in this revision:\n')
+        self._render_list('Added', data.other_added_data)
+        self._render_list('Removed', data.other_removed_data)
+        self._render_list('Modified', data.other_modified_data)
+      else:
+        w('and changes in other areas\n')
+
+    self._render_diffs(data.diffs, '')
+    if data.other_diffs:
+      self._render_diffs(data.other_diffs,
+                         '\nDiffs of changes in other areas also'
+                         ' in this revision:\n')
+
+  def _render_list(self, header, data_list):
+    if not data_list:
+      return
+
+    w = self.output.write
+    w(header + ':\n')
+    for d in data_list:
+      if d.is_dir:
+        is_dir = '/'
+      else:
+        is_dir = ''
+      if d.props_changed:
+        if d.text_changed:
+          props = '   (contents, props changed)'
+        else:
+          props = '   (props changed)'
+      else:
+        props = ''
+      w('   %s%s%s\n' % (d.path, is_dir, props))
+      if d.copied:
+        if is_dir:
+          text = ''
+        elif d.text_changed:
+          text = ', changed'
+        else:
+          text = ' unchanged'
+        w('      - copied%s from r%d, %s%s\n'
+          % (text, d.base_rev, d.base_path, is_dir))
+
+  def _render_diffs(self, diffs, section_header):
+    """Render diffs. Write the SECTION_HEADER iff there are actually
+    any diffs to render."""
+    w = self.output.write
+    section_header_printed = False
+
+    for diff in diffs:
+      if not diff.diff and not diff.diff_url:
+        continue
+      if not section_header_printed:
+        w(section_header)
+        section_header_printed = True
+      if diff.kind == 'D':
+        w('\nDeleted: %s\n' % diff.base_path)
+      elif diff.kind == 'C':
+        w('\nCopied: %s (from r%d, %s)\n'
+          % (diff.path, diff.base_rev, diff.base_path))
+      elif diff.kind == 'A':
+        w('\nAdded: %s\n' % diff.path)
+      else:
+        # kind == 'M'
+        w('\nModified: %s\n' % diff.path)
+
+      if diff.diff_url:
+        w('URL: %s\n' % diff.diff_url)
+
+      if not diff.diff:
+        continue
+
+      w(SEPARATOR + '\n')
+
+      if diff.binary:
+        if diff.singular:
+          w('Binary file. No diff available.\n')
+        else:
+          w('Binary files. No diff available.\n')
+        continue
+
+      for line in diff.content:
+        w(line.raw)
+
+
+class Repository:
+  "Hold roots and other information about the repository."
+
+  def __init__(self, repos_dir, rev, pool):
+    self.repos_dir = repos_dir
+    self.rev = rev
+    self.pool = pool
+
+    self.repos_ptr = svn.repos.open(repos_dir, pool)
+    self.fs_ptr = svn.repos.fs(self.repos_ptr)
+
+    self.roots = { }
+
+    self.root_this = self.get_root(rev)
+
+    self.author = self.get_rev_prop(svn.core.SVN_PROP_REVISION_AUTHOR)
+
+  def get_rev_prop(self, propname):
+    return svn.fs.revision_prop(self.fs_ptr, self.rev, propname, self.pool)
+
+  def get_root(self, rev):
+    try:
+      return self.roots[rev]
+    except KeyError:
+      pass
+    root = self.roots[rev] = svn.fs.revision_root(self.fs_ptr, rev, self.pool)
+    return root
+
+
+class Config:
+
+  # The predefined configuration sections. These are omitted from the
+  # set of groups.
+  _predefined = ('general', 'defaults', 'maps')
+
+  def __init__(self, fname, repos, global_params):
+    cp = ConfigParser.ConfigParser()
+    cp.read(fname)
+
+    # record the (non-default) groups that we find
+    self._groups = [ ]
+
+    for section in cp.sections():
+      if not hasattr(self, section):
+        section_ob = _sub_section()
+        setattr(self, section, section_ob)
+        if section not in self._predefined:
+          self._groups.append(section)
+      else:
+        section_ob = getattr(self, section)
+      for option in cp.options(section):
+        # get the raw value -- we use the same format for *our* interpolation
+        value = cp.get(section, option, raw=1)
+        setattr(section_ob, option, value)
+
+    # be compatible with old format config files
+    if hasattr(self.general, 'diff') and not hasattr(self.defaults, 'diff'):
+      self.defaults.diff = self.general.diff
+    if not hasattr(self, 'maps'):
+      self.maps = _sub_section()
+
+    # these params are always available, although they may be overridden
+    self._global_params = global_params.copy()
+
+    # prepare maps. this may remove sections from consideration as a group.
+    self._prep_maps()
+
+    # process all the group sections.
+    self._prep_groups(repos)
+
+  def is_set(self, option):
+    """Return None if the option is not set; otherwise, its value is returned.
+
+    The option is specified as a dotted symbol, such as 'general.mail_command'
+    """
+    ob = self
+    for part in string.split(option, '.'):
+      if not hasattr(ob, part):
+        return None
+      ob = getattr(ob, part)
+    return ob
+
+  def get(self, option, group, params):
+    "Get a config value with appropriate substitutions and value mapping."
+
+    # find the right value
+    value = None
+    if group:
+      sub = getattr(self, group)
+      value = getattr(sub, option, None)
+    if value is None:
+      value = getattr(self.defaults, option, '')
+
+    # parameterize it
+    if params is not None:
+      value = value % params
+
+    # apply any mapper
+    mapper = getattr(self.maps, option, None)
+    if mapper is not None:
+      value = mapper(value)
+
+      # Apply any parameters that may now be available for
+      # substitution that were not before the mapping.
+      if value is not None and params is not None:
+        value = value % params
+
+    return value
+
+  def get_diff_cmd(self, group, args):
+    "Get a diff command as a list of argv elements."
+    ### do some better splitting to enable quoting of spaces
+    diff_cmd = string.split(self.get('diff', group, None))
+
+    cmd = [ ]
+    for part in diff_cmd:
+      cmd.append(part % args)
+    return cmd
+
+  def _prep_maps(self):
+    "Rewrite the [maps] options into callables that look up values."
+
+    mapsections = []
+
+    for optname, mapvalue in vars(self.maps).items():
+      if mapvalue[:1] == '[':
+        # a section is acting as a mapping
+        sectname = mapvalue[1:-1]
+        if not hasattr(self, sectname):
+          raise UnknownMappingSection(sectname)
+        # construct a lambda to look up the given value as an option name,
+        # and return the option's value. if the option is not present,
+        # then just return the value unchanged.
+        setattr(self.maps, optname,
+                lambda value,
+                       sect=getattr(self, sectname): getattr(sect, value,
+                                                             value))
+        # mark for removal when all optnames are done
+        if sectname not in mapsections:
+          mapsections.append(sectname)
+
+      # elif test for other mapper types. possible examples:
+      #   dbm:filename.db
+      #   file:two-column-file.txt
+      #   ldap:some-query-spec
+      # just craft a mapper function and insert it appropriately
+
+      else:
+        raise UnknownMappingSpec(mapvalue)
+
+    # remove each mapping section from consideration as a group
+    for sectname in mapsections:
+      self._groups.remove(sectname)
+
+
+  def _prep_groups(self, repos):
+    self._group_re = [ ]
+
+    repos_dir = os.path.abspath(repos.repos_dir)
+
+    # compute the default repository-based parameters. start with some
+    # basic parameters, then bring in the regex-based params.
+    self._default_params = self._global_params
+
+    try:
+      match = re.match(self.defaults.for_repos, repos_dir)
+      if match:
+        self._default_params = self._default_params.copy()
+        self._default_params.update(match.groupdict())
+    except AttributeError:
+      # there is no self.defaults.for_repos
+      pass
+
+    # select the groups that apply to this repository
+    for group in self._groups:
+      sub = getattr(self, group)
+      params = self._default_params
+      if hasattr(sub, 'for_repos'):
+        match = re.match(sub.for_repos, repos_dir)
+        if not match:
+          continue
+        params = params.copy()
+        params.update(match.groupdict())
+
+      # if a matching rule hasn't been given, then use the empty string
+      # as it will match all paths
+      for_paths = getattr(sub, 'for_paths', '')
+      exclude_paths = getattr(sub, 'exclude_paths', None)
+      if exclude_paths:
+        exclude_paths_re = re.compile(exclude_paths)
+      else:
+        exclude_paths_re = None
+
+      self._group_re.append((group, re.compile(for_paths),
+                             exclude_paths_re, params))
+
+    # after all the groups are done, add in the default group
+    try:
+      self._group_re.append((None,
+                             re.compile(self.defaults.for_paths),
+                             None,
+                             self._default_params))
+    except AttributeError:
+      # there is no self.defaults.for_paths
+      pass
+
+  def which_groups(self, path):
+    "Return the path's associated groups."
+    groups = []
+    for group, pattern, exclude_pattern, repos_params in self._group_re:
+      match = pattern.match(path)
+      if match:
+        if exclude_pattern and exclude_pattern.match(path):
+          continue
+        params = repos_params.copy()
+        params.update(match.groupdict())
+        groups.append((group, params))
+    if not groups:
+      groups.append((None, self._default_params))
+    return groups
+
+
+class _sub_section:
+  pass
+
+class _data:
+  "Helper class to define an attribute-based hunk o' data."
+  def __init__(self, **kw):
+    vars(self).update(kw)
+
+class MissingConfig(Exception):
+  pass
+class UnknownMappingSection(Exception):
+  pass
+class UnknownMappingSpec(Exception):
+  pass
+class UnknownSubcommand(Exception):
+  pass
+
+
+# enable True/False in older vsns of Python
+try:
+  _unused = True
+except NameError:
+  True = 1
+  False = 0
+
+
+if __name__ == '__main__':
+  def usage():
+    scriptname = os.path.basename(sys.argv[0])
+    sys.stderr.write(
+"""USAGE: %s commit      REPOS REVISION [CONFIG-FILE]
+       %s propchange  REPOS REVISION AUTHOR REVPROPNAME [CONFIG-FILE]
+       %s propchange2 REPOS REVISION AUTHOR REVPROPNAME ACTION [CONFIG-FILE]
+       %s lock        REPOS AUTHOR [CONFIG-FILE]
+       %s unlock      REPOS AUTHOR [CONFIG-FILE]
+
+If no CONFIG-FILE is provided, the script will first search for a mailer.conf
+file in REPOS/conf/.  Failing that, it will search the directory in which
+the script itself resides.
+
+ACTION was added as a fifth argument to the post-revprop-change hook
+in Subversion 1.2.0.  Its value is one of 'A', 'M' or 'D' to indicate
+if the property was added, modified or deleted, respectively.
+
+""" % (scriptname, scriptname, scriptname, scriptname, scriptname))
+    sys.exit(1)
+
+  # Command list:  subcommand -> number of arguments expected (not including
+  #                              the repository directory and config-file)
+  cmd_list = {'commit'     : 1,
+              'propchange' : 3,
+              'propchange2': 4,
+              'lock'       : 1,
+              'unlock'     : 1,
+              }
+
+  config_fname = None
+  argc = len(sys.argv)
+  if argc < 3:
+    usage()
+
+  cmd = sys.argv[1]
+  repos_dir = svn.core.svn_path_canonicalize(sys.argv[2])
+  try:
+    expected_args = cmd_list[cmd]
+  except KeyError:
+    usage()
+
+  if argc < (expected_args + 3):
+    usage()
+  elif argc > expected_args + 4:
+    usage()
+  elif argc == (expected_args + 4):
+    config_fname = sys.argv[expected_args + 3]
+
+  # Settle on a config file location, and open it.
+  if config_fname is None:
+    # Default to REPOS-DIR/conf/mailer.conf.
+    config_fname = os.path.join(repos_dir, 'conf', 'mailer.conf')
+    if not os.path.exists(config_fname):
+      # Okay.  Look for 'mailer.conf' as a sibling of this script.
+      config_fname = os.path.join(os.path.dirname(sys.argv[0]), 'mailer.conf')
+  if not os.path.exists(config_fname):
+    raise MissingConfig(config_fname)
+
+  svn.core.run_app(main, cmd, config_fname, repos_dir,
+                   sys.argv[3:3+expected_args])
+
+# ------------------------------------------------------------------------
+# TODO
+#
+# * add configuration options
+#   - each group defines delivery info:
+#     o whether to set Reply-To and/or Mail-Followup-To
+#       (btw: it is legal do set Reply-To since this is the originator of the
+#        mail; i.e. different from MLMs that munge it)
+#   - each group defines content construction:
+#     o max size of diff before trimming
+#     o max size of entire commit message before truncation
+#   - per-repository configuration
+#     o extra config living in repos
+#     o optional, non-mail log file
+#     o look up authors (username -> email; for the From: header) in a
+#       file(s) or DBM
+# * get rid of global functions that should properly be class methods
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/mailer.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/mailer.pyo
new file mode 100644
index 0000000..8850552
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/mailer.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/verify.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/verify.py
new file mode 100644
index 0000000..1cf31551
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/verify.py
@@ -0,0 +1,59 @@
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import subprocess
+import sys
+import gvn
+
+"""gvn hook runner post-commit svnadmin verify hook
+"""
+
+# This will fail until you fix the path to work for your system
+SVNADMIN=gvn.SVNADMIN
+SENDMAIL='/usr/sbin/sendmail'
+# Obviously, you'd want to adjust that too, set to empty string to disable
+EMAIL_ADDRESS = 'svn-team'
+
+
+def RunHook(hi, logger, email=None):
+  """Implements gvn.hooks.runner's RunHook interface
+  """
+
+  if email is None:
+    email = EMAIL_ADDRESS
+
+  args = (SVNADMIN, "verify", '-qr', str(hi.revision), hi.repos_path)
+  logger.debug("%s" % ' '.join(args))
+  p = subprocess.Popen(args, close_fds=True, stdout=subprocess.PIPE,
+                        stderr=subprocess.STDOUT )
+
+  p.wait()
+  output = p.stdout.read()
+
+  if output:
+    # if configured to mail, do so before returning the error to the sender
+    if email:
+      mailargs = (SENDMAIL, "-oi", "-f", email, email)
+      sendmail = subprocess.Popen(mailargs, stdin=subprocess.PIPE)
+      sendmail.stdin.write("""From: %s
+To: %s
+Subject: svnadmin verify error on repository %s rev %d
+
+Output from %s is:
+%s"""% (email, email, hi.repos_path, hi.revision, args, output))
+
+    return output
+
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/verify.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/verify.pyo
new file mode 100644
index 0000000..21d67a7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/post_commit/verify.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/__init__.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/__init__.pyo
new file mode 100644
index 0000000..406d41b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/__init__.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_bad_path_names.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_bad_path_names.py
new file mode 100644
index 0000000..478d8dc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_bad_path_names.py
@@ -0,0 +1,58 @@
+
+# This trigger is meant to disallow filenames that may cause problems with
+# unix and windows filesystems, independently from what svn may or may not
+# disallow itself)
+
+import posixpath
+import re
+import svn.fs
+
+# Disallow all kinds of whitespace (space, tab, nl)
+BADCHARS_WS = { " "  : "' ' (space)",
+                "\t" : "'\t' (tab)",
+                "\n" : "LF (new line)",
+                "\r" : "CR (carriage return)",
+              }
+# These make windows unhappy
+BADCHARS_OTHER = {  "*" : "'*'",
+                    "?" : "'?'",
+                    "\\" : "'\\' (backslash)",
+                    ":" : "':' (colon)",
+                    ";" : "';' (semi-colon)",
+                    "'" : "\"'\" (single quote)",
+                    '"' : "'\"' (double quote)",
+                 }
+BADCHARS = BADCHARS_WS
+BADCHARS.update(BADCHARS_OTHER)
+
+# As per http://msdn2.microsoft.com/en-us/library/aa365247.aspx
+BAD_WINDOWS_FILENAMES = ( "CON", "PRN", "AUX", "NUL",
+    "LPT1", "LPT2", "LPT3", "LPT4", "LPT5", "LPT6", "LPT7", "LPT8", "LPT9",
+    "COM1", "COM2", "COM3", "COM4", "COM5", "COM6", "COM7", "COM8", "COM9",
+     )
+
+def RunHook(hi, logger):
+
+  for (path, cl) in hi.paths_changed.iteritems():
+    # If the file already got in somehow, any damage is already done.
+    # And we certainly want folks to be able to remove paths with bad
+    # names.  So we only block adds.  Replace, therefore, is also fine.
+    if cl.change_kind != svn.fs.path_change_add:
+      continue
+
+    name = posixpath.basename(path)
+
+    for char in BADCHARS.keys():
+      #print "looking for %s in %s" % (char, name)
+      if char in path:
+        return "%s is forbidden in pathname '%s'" % (BADCHARS[char], path)
+
+    for bad_windows_filenames in BAD_WINDOWS_FILENAMES:
+      # allow any of AUX and AUX.foo names
+      pattern = "^" + bad_windows_filenames + "(\.[^/]*)*$(?i)"
+      #print "Looking for %s in %s" % (pattern, name)
+      if re.match(pattern, name):
+        return "Sorry, %s contains %s, which is not compatible with Windows" \
+                                                % (path, bad_windows_filenames)
+
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_bad_path_names.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_bad_path_names.pyo
new file mode 100644
index 0000000..ba6e18e6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_bad_path_names.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_monitor_commit_tests.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_monitor_commit_tests.py
new file mode 100644
index 0000000..eb8a389
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_monitor_commit_tests.py
@@ -0,0 +1,43 @@
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Block monitoring test commits
+
+This is used to test commits and have them be rejected by the last hook in
+the chain (this one) so that we don't create a lot of revisions due to
+monitoring
+"""
+
+
+import svn.fs
+
+reject_propname = "gvn:block-this-commit"
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+
+  props = svn.fs.txn_proplist(hi.txn, hi.pool)
+  logger.debug("got rev props %s" % (str(props)))
+
+  if svn.fs.txn_prop(hi.txn, reject_propname, hi.pool) is not None:
+    return("Rejecting commit as requested by revprop %s" % reject_propname)
+
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_monitor_commit_tests.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_monitor_commit_tests.pyo
new file mode 100644
index 0000000..0879b83
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_monitor_commit_tests.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_some_prop_mods.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_some_prop_mods.py
new file mode 100644
index 0000000..96bff42d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_some_prop_mods.py
@@ -0,0 +1,82 @@
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Block some property modifications:
+
+- gvn:superusers on repository root
+  You should set the property before you install this hook:
+  svn propset gvn:superusers group:svn-team .
+  as once the hook is installed it will prevent non superusers from adding,
+  modifying, or removing the property.
+
+- any property with a name not allowed by svn.core.svn_prop_name_is_valid
+"""
+
+
+import svn.fs
+
+
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+
+  author = svn.fs.txn_prop(hi.txn, "svn:author", hi.pool)
+
+  for prop in svn.fs.txn_proplist(hi.txn, hi.pool):
+    logger.debug("Inspecting syntax of prop %s", prop)
+    if not svn.core.svn_prop_name_is_valid(prop):
+      if svn.fs.revision_prop(hi.fs, hi.head, prop) is not None:
+        logger.debug("Allow modifying bad prop %s since it's existing" % prop)
+      else:
+        return("Rejecting rev property with forbidden name: '%s' for user %s" %
+               (prop, author))
+
+
+  # first tag on a quick verification of the property name
+  for node in hi.paths_changed:
+    props = svn.fs.node_proplist(hi.txn_root, node, hi.pool)
+    for property in props:
+      logger.debug("Found property %s on repository %s" % (property, node))
+      # is this a bad property name according to svn?
+      if not svn.core.svn_prop_name_is_valid(property):
+        # we'll only reject if the property doesn't already exist (we
+        # grandfather existing props)
+        if not svn.fs.node_prop(hi.head_root, node, property, hi.pool):
+          return("Rejecting new property on %s with forbidden name: '%s' for "
+                 "user %s" % (node, property, author))
+
+  # We only care about changes to the root of the repository
+  if "/" not in hi.paths_changed:
+    logger.debug("No changes to /, therefore no node properties to check, pass")
+    return 0
+
+  # Get current gvn:superusers value from HEAD.
+  old = svn.fs.node_prop(hi.head_root, "", "gvn:superusers", hi.pool)
+
+  # Get new gvn:superusers value from the txn.
+  new = svn.fs.node_prop(hi.txn_root, "", "gvn:superusers", hi.pool)
+
+  if old != new:
+    return ("user %s cannot modify %s property on repository root"
+              % (author, "gvn:superusers"))
+
+  logger.info("all root properties changes accepted, success!")
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_some_prop_mods.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_some_prop_mods.pyo
new file mode 100644
index 0000000..d3f7e58c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/block_some_prop_mods.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/bypass.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/bypass.py
new file mode 100644
index 0000000..5088cae9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/bypass.py
@@ -0,0 +1,53 @@
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Allow superusers to bypass hooks.
+
+Return -1 (immediate success, bypassing remaining hooks) if:
+- user is in gvn:superusers
+- transaction includes gvn:bypass-hooks revprop
+
+The gvn:bypass-hooks is left on the committed revision, as it may be
+of interest for auditing purposes see
+gvn.hooks.pre_revprop_change.check_gvn_revprops for a hook module that
+blocks later changes to this revprop.
+"""
+
+
+import svn.fs
+
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+
+  if svn.fs.txn_prop(hi.txn, "gvn:bypass-hooks") is None:
+    # Bypass not invoked, we're done.
+    return 0
+
+  author = svn.fs.txn_prop(hi.txn, "svn:author", hi.pool)
+  if (author in hi.super_users
+      or hi.super_groups.intersection(hi.userdb.UserGroups(author))):
+    logger.info("Superuser %s invoked gvn:bypass-hooks" % (author,))
+    # Immediately allow commit, bypassing further hooks.
+    return -1
+
+  # Bad luser, block his efforts.
+  return ("Non Superuser %s tried to invoke gvn:bypass-hooks" % (author,))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/bypass.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/bypass.pyo
new file mode 100644
index 0000000..d87ee04
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/bypass.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_changebranch.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_changebranch.py
new file mode 100644
index 0000000..b6d2af1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_changebranch.py
@@ -0,0 +1,52 @@
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Block users from mucking about with other users' changebranch areas.
+"""
+
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+  cbp = hi.project_config['change-branch-base'] + '/'
+  prefix = hi.prefix_changed
+
+  # Example with cbp == 'changes/':
+  #   prefix                                    => (left, right)
+  #   ----------------------------------------------------------------
+  #   'trunk'                                   => ValueError
+  #   'project/changes/foo'                     => ('project/', 'foo')
+  #   'changes/basil'                           => ('', 'basil')
+  #   'changes/basil/foo/trunk/doc/changes/foo' => ('', 'basil/foo/trunk/doc/changes/foo')
+  try:
+    (left, right) = prefix.split(cbp, 1)
+  except ValueError:
+    # Not a changebranch, so we don't care about it.
+    return 0
+  if left != '':
+    # Not a changebranch, so we don't care about it.
+    return 0
+
+  # Got a changebranch, and the owner is everything in right before the /
+  cb_area_owner = right.split('/')[0]
+  if cb_area_owner != hi.author:
+    return "%s cannot commit to %s's changebranch area" % (hi.author,
+                                                           cb_area_owner)
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_changebranch.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_changebranch.pyo
new file mode 100644
index 0000000..c729db2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_changebranch.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_gvn_revprops.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_gvn_revprops.py
new file mode 100644
index 0000000..3b760c1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_gvn_revprops.py
@@ -0,0 +1,57 @@
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Check gvn:* revprops.
+
+- Block gvn:approve*
+- Block gvn:submitted
+- Sanity check gvn:change
+"""
+
+
+import gvn.util
+
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+  for (name, value) in hi.txn_props.iteritems():
+    if name.startswith("gvn:approve"):
+      return "gvn:approve* not allowed at commit time"
+
+    if name == "gvn:submitted":
+      return "gvn:submitted not allowed at commit time"
+
+    if name == "gvn:change":
+      if value is not None:
+        (user_name, change_name,
+         revision) = gvn.util.ParseChangeName(value)
+        if None in [user_name, change_name, revision]:
+          return "Invalid gvn:change value %s" % (value,)
+        if user_name != hi.author:
+          return ("%s not allowed to submit %s's change (%s)"
+                  % (hi.author, user_name, value))
+        # TODO(epg): Check that the changebranch actually exists;
+        # checking for a delete of it in this txn should cover that.
+        if not 1 <= revision <= hi.head:
+          return ("Invalid revision number for gvn:change %d (%s)"
+                  % (revision, value))
+
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_gvn_revprops.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_gvn_revprops.pyo
new file mode 100644
index 0000000..f090d44
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_gvn_revprops.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_no_bad_chars.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_no_bad_chars.py
new file mode 100644
index 0000000..3f0c2ad1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_no_bad_chars.py
@@ -0,0 +1,22 @@
+import os
+
+# This trigger is meant to disallow filenames that may cause problems with
+# unix and windows filesystems, independently from what svn may or may not
+# disallow itself)
+
+# Disallow all kinds of whitespace (space, tab, nl)
+BADCHARS_WS=" \t\n"
+# These make windows unhappy
+BADCHARS_OTHER='*?\\:;'
+BADCHARS = BADCHARS_WS + BADCHARS_OTHER
+
+def RunHook(hi, logger):
+
+  for path in hi.paths_changed:
+    for char in BADCHARS:
+      #print "looking for %s in %s" % (char, path)
+      if char in path:
+	logger.warning("'%s' is forbidden in pathname '%s'" % (char, path))
+	return 255
+
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_no_bad_chars.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_no_bad_chars.pyo
new file mode 100644
index 0000000..61614a5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_no_bad_chars.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_owners.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_owners.py
new file mode 100644
index 0000000..bd1ad2b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_owners.py
@@ -0,0 +1,223 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""checks for OWNERS files recursively and only allow commits accordingly.
+
+The idea is that every directory in DIR_ROOT can have a file named OWNERS.
+This file determines who has write permissions to that directory, and
+every directory beneath it.  Permissions are additive, so you have
+permissions to write to file //repo/foo/bar/baz if you're listed in
+//repo/OWNERS, //repo/foo/OWNERS, or //repo/foo/bar/OWNERS.
+
+Note that syntax errors are logged, the relevant line ignored, and parsing
+continues (i.e. it doesn't prevent access if another line allows it)
+
+This blocks superusers from committing without approval when not in an
+OWNERS file (but see bypass.py), but *does* allow a superuser's approval
+to count even if he's not in OWNERS.
+"""
+
+import posixpath
+import re
+
+import svn.core
+import svn.fs
+
+import gvn.util
+
+
+class OwnersCache(object):
+
+  def __init__(self, hi, logger):
+    """computes and keeps track of authorized owners and groups.
+
+    Args:
+      hi: HookInfo object
+      logger: initialized logger object
+
+    This object populates and keeps a dict of directory names to list of
+    allowed users and groups
+    """
+
+    # Dictionary of directories to a list of 2 strings: users and groups
+    self._dir_cache = {}
+    self._hi = hi
+    self._logger = logger
+
+  def GetOwnerUsersGroups(self, path, recursive=True):
+    """Computes and returns allowed users and groups.
+
+    Args:
+      path: checks the repository starting at path and working up
+            must not have a trailing slash
+      recursive: when implemented allows this function to check a given
+                 level without working up (for OWNERS includes)
+    Returns:
+      two dictionaries: one with allowed users and one with allowed groups
+
+    Method looks for OWNERS in path, and recurses all the way to the svn root
+    """
+
+    hi, logger = self._hi, self._logger
+    logger.debug("path is " + path)
+
+    if path in self._dir_cache:
+      logger.info(path + " cached, returning from _dir_cache")
+      return self._dir_cache[path]
+
+    if path == '':
+      owner_file = 'OWNERS'
+    else:
+      owner_file = path + "/OWNERS"
+
+    try:
+      contents = svn.fs.file_contents(hi.head_root, owner_file, hi.pool)
+    except svn.core.SubversionException, e:
+      if e.apr_err not in [svn.core.SVN_ERR_FS_NOT_FOUND,
+                           svn.core.SVN_ERR_FS_NOT_FILE]:
+        raise
+      # No OWNERS file, or not a file.
+      contents = None
+
+    if contents is None:
+      users = set()
+      groups = set()
+    else:
+      contents = svn.core.Stream(contents).read()
+      (users, groups, noparent) = gvn.util.ParseOwners(contents)
+      if noparent:
+        recursive = False
+
+    if recursive and path != "":
+      parent = posixpath.dirname(path)
+      parent_users, parent_groups = self.GetOwnerUsersGroups(parent)
+      users.update(parent_users)
+      groups.update(parent_groups)
+
+    self._dir_cache[path] = (users, groups)
+    logger.debug("after: %s %s" % self._dir_cache[path])
+    return self._dir_cache[path]
+
+def CheckUsers(check_users, users, groups, hi):
+  """Return whether any of the users in check_users are allowed.
+
+  If any of the users listed in check_users is listed in users or a
+  member of a group listed in groups, return True, else False.
+
+  Arguments:
+  check_users   -- list of users to check
+  users         -- set of allowed users
+  groups        -- set of allowed groups
+  hi            -- gvn.hooks.HookInfo
+  """
+  if set(check_users).intersection(users):
+    # At least one of the check_users is an OWNER, so this
+    # path-change is allowed.
+    return True
+
+  for group in groups:
+    for user in check_users:
+      if hi.userdb.UserInGroup(user, group):
+        # This user is in an OWNER group, so this path-change is allowed.
+        return True
+
+  return False
+
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+  # First, see if this is a changebranch snapshot, in which we don't
+  # check OWNERS.
+  cbp = hi.project_config['change-branch-base'] + '/' + hi.author
+  if hi.prefix_changed == cbp or hi.prefix_changed.startswith(cbp + '/'):
+    return 0
+
+  # which directories we've already checked OWNERS access in
+  checked_dir_list = {}
+  owner = OwnersCache(hi, logger)
+
+  approvers = []
+  logger.debug("looking for approvers")
+  gvn_prop = svn.fs.txn_prop(hi.txn, "gvn:change", hi.pool)
+  if gvn_prop is not None:
+    (user_name, change_name, gvn_rev) = gvn.util.ParseChangeName(gvn_prop)
+    if None in [user_name, change_name, gvn_rev]:
+      return "Invalid gvn:change value %s" % (gvn_prop,)
+    props = svn.fs.revision_proplist(hi.fs, gvn_rev, hi.pool)
+    logger.debug("got rev %s and props %s" % (gvn_rev, str(props)))
+    for key in props:
+      # this property is sanity checked by the hook that checks it in
+      match = re.match(r"gvn:approve:(.+)", key)
+      if match is not None:
+        approver = match.group(1)
+        logger.debug("Found approver %s in changeset %d" % (approver, gvn_rev))
+        approvers.append(approver)
+  else:
+    logger.debug("No gvn:change in props (%s)" % str(gvn_prop))
+
+  for (path, cl) in hi.paths_changed.iteritems():
+    # Eat leading /
+    path = path[1:]
+    group_match = False
+
+    # if a directory gets a modify request, it's a property change. Directory
+    # changes are special since they are controlled by the OWNERS file in that
+    # same directory, and not one level up
+    if (svn.fs.check_path(hi.head_root, path, hi.pool) == svn.core.svn_node_dir
+        and cl.change_kind == svn.fs.path_change_modify):
+      directory = path
+    else:
+      # otherwise, we check OWNERS one level up
+      directory = posixpath.dirname(path)
+
+    logger.debug("%s in %s changed" % (path, directory))
+
+    if directory in checked_dir_list:
+      continue
+
+    checked_dir_list[directory] = True
+
+    users, groups = owner.GetOwnerUsersGroups(directory)
+    if not users and not groups:
+      logger.info("%s authorized for %s (no OWNERS)" % (hi.author, path))
+      continue
+
+    if CheckUsers([hi.author], users, groups, hi):
+      # Author is allowed for this path; continue to next path.
+      logger.info("%s directly allowed for %s" % (hi.author, path))
+      continue
+    else:
+      # The author isn't allowed for this path, so check approvers.
+      # Add in the superusers, so they can approve even if not in OWNERS.
+      users.update(hi.super_users)
+      groups.update(hi.super_groups)
+      if CheckUsers(approvers, users, groups, hi):
+        # Some approver is allowed for this path; continue to next path.
+        logger.info("%s allowed by approval for %s" % (hi.author, path))
+        continue
+
+    # Neither the author nor any approvers are allowed for this path.
+    return ("%s not authorized in any OWNERS file for %s, sorry"
+            % (hi.author, path))
+  # end for path
+
+  logger.info("All files authorized, success!")
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_owners.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_owners.pyo
new file mode 100644
index 0000000..c7d8f51
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_commit/check_owners.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/__init__.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/__init__.pyo
new file mode 100644
index 0000000..1be7d65
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/__init__.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/block_bad_revprop_names.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/block_bad_revprop_names.py
new file mode 100644
index 0000000..afe24c0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/block_bad_revprop_names.py
@@ -0,0 +1,38 @@
+# Copyright 2008 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Prevent adding revprops with bad names.
+"""
+
+import os
+
+import svn.core
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+
+  # check names of properties that get added (not modified or removed)
+  if hi.action == "A":
+    if not svn.core.svn_prop_name_is_valid(hi.propname):
+      return("Rejecting new property with forbidden name: '%s' for user %s"
+              % (hi.propname, hi.user))
+
+  return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/block_bad_revprop_names.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/block_bad_revprop_names.pyo
new file mode 100644
index 0000000..9f5daba
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/block_bad_revprop_names.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_gvn_revprops.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_gvn_revprops.py
new file mode 100644
index 0000000..2e9f3d4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_gvn_revprops.py
@@ -0,0 +1,122 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Check modifications to gvn:* revprops.
+
+- Only allow users to Add gvn:approve:theirusername
+- Only allow users to Add gvn:submitted if valid
+- Block Add of any other gvn:* revprops
+- Block Modification and Delete of all gvn:* revprops, including
+  gvn:approve:*, gvn:change, and gvn:submitted.
+"""
+
+import os
+
+import svn.core
+import svn.fs
+
+import gvn.util
+
+
+def CheckSubmitted(hi):
+  """This takes some explaining: when gvn submits a revision 100, it
+  sets a gvn:change property (at commit time, i.e. not through this
+  hook) and points it to the snapshot the change came from (let's say
+  rev 90).  gvn:change looks like this: user/branchname@90.  Then, it
+  goes in rev 90 and sets property gvn:submitted to 100 (which is
+  hi.propvalue here).
+  """
+  try:
+    submitted_rev = int(hi.propvalue)
+  except ValueError:
+    return ("Error: %s tried to set revprop %s to invalid %s" %
+            (hi.user, hi.propname, hi.propvalue))
+
+  # gvn:submitted must be a revision number greater than what it's
+  # being set on (since the submit comes after the snapshot) but <=
+  # the head revision (i.e. must exist).
+  if not hi.revision < submitted_rev <= hi.head:
+    return ("%d is an invalid revision number for gvn:submitted"
+            % (submitted_rev,))
+
+  # Now, we check if the gvn:submitted value it pointing to a proper
+  # revision and we get its gvn:change property.
+  gvn_change = svn.fs.revision_prop(hi.fs, submitted_rev, "gvn:change",
+                                    hi.pool)
+  if gvn_change is None:
+    return ("Cannot set gvn:submitted to r%d, which has no gvn:change"
+            % (submitted_rev,))
+
+  # then, we get the revision gvn:change points back to
+  (username, branchname, revision) = gvn.util.ParseChangeName(gvn_change)
+  if None in [username, branchname, revision]:
+    return ("Cannot set gvn:submitted to r%d, whose gvn:change (%s) is invalid"
+            % (submitted_rev, gvn_change))
+
+  # at this point the rev from gvn:change should be the current revision we're
+  # adding the gvn:submitted property to
+  if revision != hi.revision:
+    return ("""\
+Cannot set gvn:submitted to r%d, whose gvn:change (%s) does not point to %d"""
+            % (submitted_rev, gvn_change, hi.revision))
+
+  if username != hi.user:
+    return ("Cannot set gvn:submitted on %s's change (%s)"
+            % (username, gvn_change))
+
+  # TODO(epg): Verify that changebranch actually exists?
+
+  # Looks good.
+  return None
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+
+  if not hi.propname.startswith('gvn:'):
+    # then this hook doesn't care and passes
+    return 0
+
+  # Allow adding (but not removing) an approve property in your name.
+  if hi.propname == "gvn:approve:" + hi.user and hi.action == "A":
+    logger.info("%s set gvn:approve:%s on revision %s" %
+                                                (hi.user, hi.user, hi.revision))
+    return 0
+
+  # Allow setting gvn:submitted to a revision whose gvn:change
+  # property corresponds to this revision (hi.revision).
+  if hi.propname == 'gvn:submitted':
+    # gvn:submitted can only be added, not removed or modified
+    if hi.action != 'A':
+      return ("Error: user %s is not allowed to %s property "
+                "gvn:submitted from revision %s" %
+                 (hi.user, hi.action_word, hi.revision))
+
+    retstr = CheckSubmitted(hi)
+    if retstr is not None:
+      # gvn:submitted is somehow invalid; log and return.
+      return retstr
+
+    # all is well, things match
+    return 0
+
+  # By here, we're trying to change some other gvn:xxx, denied.
+  return ("%s cannot %s %s on revision %s" %
+            (hi.user, hi.action_word,  hi.propname, hi.revision))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_gvn_revprops.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_gvn_revprops.pyo
new file mode 100644
index 0000000..4ce1bc89
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_gvn_revprops.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_svn_revprops.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_svn_revprops.py
new file mode 100644
index 0000000..f019d58
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_svn_revprops.py
@@ -0,0 +1,36 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Block all actions (Add/Modify/Delete/...) to svn:* revprops.
+"""
+
+import os
+
+
+def RunHook(hi, logger):
+  """Implements gvn.hooks.runner's RunHook interface
+
+  Returns:
+    -1: pass and bypass other hooks
+    0: pass
+    1: fail
+    "string": fail and print string to the user
+  """
+
+  if not hi.propname.startswith('svn:'):
+    # then this hook doesn't care and passes
+    return 0
+
+  return ("%s is not allowed to %s property %s for rev %s"
+            % (hi.user, hi.action_word, hi.propname, hi.revision))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_svn_revprops.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_svn_revprops.pyo
new file mode 100644
index 0000000..0f38d66
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/pre_revprop_change/check_svn_revprops.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/runner.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/runner.py
new file mode 100644
index 0000000..b1c7bb3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/hooks/runner.py
@@ -0,0 +1,158 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import imp
+import logging
+import os
+import socket
+import sys
+import traceback
+
+from errno import ENOENT
+
+import gvn.hooks.info
+
+
+DEFAULT_MESSAGE = 'Unknown hook failure; contact your administrator for help.'
+
+# XXX(epg): I'm not sure about this; 
+# If we do have any local logging, it should be somehow configurable,
+# defaulting to something in /var/log (or maybe just log to
+# repo_dir/hooks/log, and let admins symlink that where they want, or
+# use a named pipe.
+DEFAULT_LOG_FILE = '/var/tmp/svn.log'
+
+_logger = None
+def SetupLogging(logger_name, stream=None, logfile=None, level=logging.INFO):
+  """Configure the global logging features.
+
+  """
+  global _logger
+  if _logger is not None:
+    return
+
+  _logger = logging.getLogger(logger_name)
+  _logger.setLevel(level)
+
+  if stream is not None:
+    stderr_handler = logging.StreamHandler(stream)
+    stderr_handler.setFormatter(logging.Formatter('%(message)s'))
+    _logger.addHandler(stderr_handler)
+
+  if logfile is not None:
+    file_format = logging.Formatter(
+        '%(asctime)s %(name)s %(levelname)-8s %(message)s',
+        '%a %d %b %Y %H:%M:%S'
+        )
+    file_handler = logging.FileHandler(logfile, 'a')
+    file_handler.setFormatter(file_format)
+    _logger.addHandler(file_handler)
+
+def DEBUG(message):
+  if _logger is not None:
+    _logger.debug(message)
+
+def INFO(message):
+  if _logger is not None:
+    _logger.info(message)
+
+def WARN(message):
+  if _logger is not None:
+    _logger.warning(message)
+
+
+def ImportHook(hook_dir, hook_name):
+  """Import the hook module in path and return its RunHook function.
+  """
+  # Strip off the .py suffix.
+  hook_name = hook_name[:-3]
+  (fp, path, desc) = imp.find_module(hook_name, [hook_dir])
+  return imp.load_module(hook_name, fp, path, desc).RunHook
+
+
+def ListHooks(hook_info):
+  """Return asciibetically sorted list of hooks in hook_info.hook_dir.
+
+  Raises:
+  whatever os.listdir would
+  """
+  return sorted(x for x in os.listdir(hook_info.hook_dir) if x.endswith('.py'))
+
+def main(argv, print_output=True):
+  '''print_output can be turned off for unittests.'''
+
+  if len(argv) >= 2 and argv[1] == '--invoked-as':
+    argv = argv[2:]
+
+  hook_info = gvn.hooks.info.HookInfo(argv, sys.stdin)
+
+  SetupLogging(hook_info.logger_name(), None, DEFAULT_LOG_FILE)
+
+  try:
+    try:
+      fp = open(hook_info.repos_path + '/conf/gvn/rejection-message')
+    except IOError, e:
+      if e.errno != ENOENT:
+        raise
+      # No rejection-message file => use default.
+      message = DEFAULT_MESSAGE
+    else:
+      # We have the rejection file => use it.
+      message = fp.read()
+      fp.close()
+  except:
+    # Any exception other than the rejection-message file not existing
+    # => log the traceback and deny the commit with the default message.
+    WARN(traceback.format_exc())
+    sys.stderr.write("%s: %s" % (socket.gethostname(), DEFAULT_MESSAGE))
+    return 4
+
+  # It is important to know which host the error is on so that the admin can
+  # read the right logs and not have to guess which host to read from
+  message = "%s: %s" % (socket.gethostname(), message)
+
+  try:
+    try:
+      for path in ListHooks(hook_info):
+        try:
+          run_hook = ImportHook(hook_info.hook_dir, path)
+        except:
+          WARN("failed to load %s/%s\n" % (hook_info.hook_dir, path))
+          raise
+        logger_name = hook_info.logger_name(path)
+        logger = logging.getLogger(logger_name)
+        hookret = run_hook(hook_info, logger)
+        if hookret == -1:
+          # Succeed immediately.
+          return 0
+        if hookret == 0:
+          # Pass, keep checking.
+          continue
+        # Failure of some kind.
+        if not isinstance(hookret, basestring):
+          WARN("hook module return not 0, -1, or string: '%s'" % (hookret,))
+          hookret = message
+        if print_output:
+          INFO(hookret)
+          sys.stderr.write(hookret)
+        return 2
+    except:
+      WARN(traceback.format_exc())
+      sys.stderr.write(message)
+      return 3
+    # All hook modules passed.
+    return 0
+  finally:
+    logging.shutdown()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/nt.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/nt.py
new file mode 100644
index 0000000..b3ecaae
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/nt.py
@@ -0,0 +1,161 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Windows implementations of platform-specific utilities and pseudo-constants
+
+Certain functions must be implemented in OS-specific ways due to differences
+between Linux and Windows.  The constants and functions defined here are
+imported into a common interface by platform.py.
+
+"""
+
+
+import codecs
+import os
+
+from errno import ENOENT
+
+import win32api
+import win32com.shell.shell
+import win32com.shell.shellcon
+import win32con
+
+# As of Python 2.4, Win32 error codes overwrite the OSError.errno
+# values when a WinError is raised.  This is expected to be fixed in
+# 2.5, in which case the default value of ENOENT will apply instead.
+ENOENT_codes = [ENOENT, 3]
+
+DEFAULT_EDITOR_COMMAND = 'notepad'
+
+
+def DescribeSubprocessError(cmd, status):
+  """Return a string describing an error returned by a subprocess.
+
+  Arguments:
+    cmd    -- the name or description of the command that was executed
+    status -- the status code returned by the executed command
+
+  """
+
+  return "'%s' exited %d" % (cmd, status)
+
+
+# No, svn_io_remove_dir2 doesn't work any better at all; see
+# epg/hack-rmtree@431 for proof.
+def rmtree(path):
+  """Recursively delete a directory and all its contents, even if read-only.
+
+  shutil.rmtree() doesn't work on Windows if any of the files or directories
+  are read-only, which svn repositories are.  We need to be able to force the
+  files to be writable (i.e., deletable) as we traverse the tree.
+
+  This doesn't work very well, either.  os.remove (and maybe os.rmdir,
+  too, epg doesn't remember) randomly fails to delete files yet raises
+  no error, and SetFileAttributes randomly dies with permission
+  denied.  Later attempts to delete the tree, even within the same
+  process, seem to work sometimes.
+
+  So, this suppresses all failures.
+
+  Arguments:
+    path -- a path to a directory to be deleted
+
+  Raises:
+    nothing, ever; sad, innit?
+  """
+  try:
+    for fn in os.listdir(path):
+      fullpath = os.path.join(path, fn)
+      if os.path.isfile(fullpath):
+        win32api.SetFileAttributes(fullpath, win32con.FILE_ATTRIBUTE_NORMAL)
+        os.remove(fullpath)
+      elif os.path.isdir(fullpath):
+        rmtree(fullpath)
+
+    win32api.SetFileAttributes(path, win32con.FILE_ATTRIBUTE_NORMAL)
+    os.rmdir(path)
+  except Exception:
+    pass
+
+
+def GetUserName():
+  """Return the current user's local username."""
+  return win32api.GetUserName()
+
+
+def ExecutableNames(short_name):
+  """Return a list of potential executable names based on the given short name.
+
+  Arguments:
+    short_name -- a candidate executable name, with no "dot" extension
+
+  Returns:
+    A list containing the given short name with each of the possible
+    executable extensions (.exe, .bat, .com) appended.  For example,
+    'command' => ['command.exe', 'command.bat', 'command.com']
+
+  """
+
+  # XXX(pamg): Is this really the best way to do this?  GetBinaryType() doesn't
+  # appear to be available in win32api.
+  namelist = []
+  for ext in ['exe', 'bat', 'com']:
+    namelist.append('.'.join((short_name, ext)))
+  return namelist
+
+
+def EditorWantsBOM(editor, encoding):
+  return ('notepad' in editor
+          and codecs.lookup(encoding) == codecs.lookup('utf-8'))
+
+
+def _GetAllAppdata():
+  return win32com.shell.shell.SHGetFolderPath(0,
+                                  win32com.shell.shellcon.CSIDL_COMMON_APPDATA,
+                                              0, 0)
+def _GetUserAppdata():
+  return win32com.shell.shell.SHGetFolderPath(0,
+                                         win32com.shell.shellcon.CSIDL_APPDATA,
+                                              0, 0)
+
+def GetSvnUserConfigDir():
+  return os.path.join(_GetUserAppdata(), 'Subversion')
+def GetSvnSystemConfigDir():
+  return os.path.join(_GetAllAppdata(), 'Subversion')
+def GetGvnUserConf():
+  return os.path.join(_GetUserAppdata(), 'gvn')
+def GetGvnSystemConf():
+  return os.path.join(_GetAllAppdata(), 'gvn')
+
+class StrippedCRFile(object):
+  """File-like object wrapper that strips CRs from CRLF.
+
+  svn.diff writes out CRLF-terminated lines on Windows, which happens to
+  work well for svn because it uses apr file objects, which apparently do
+  no newline translation of their own (despite 30 (?) years of stdio
+  precedent, *grumble*).  But, our file objects do such translation on
+  Windows, so we need to turn all CRLFs into LFs before letting the file
+  object add the CRs back.  Sigh.
+
+  This implements the write method to strip CRLF, and uses the Python
+  proxy idiom of a __getattr__() fetch to pass on all other method calls
+  (and attribute fetches) to the wrapped file object.
+  """
+  def __init__(self, fp):
+    self.fp = fp
+  def write(self, s):
+    return self.fp.write(s.replace('\r\n', '\n'))
+  def __getattr__(self, name):
+    return getattr(self.fp, name)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/platform.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/platform.py
new file mode 100644
index 0000000..35f161e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/platform.py
@@ -0,0 +1,40 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Platform-specific utilities and pseudo-constants
+
+Any functions and constants whose implementations or values differ between
+Linux and Windows should be defined in posix.py and nt.py, respectively.  The
+appropriate one of those two files will be imported into this module to provide
+a common, platform-independent interface to callers.
+
+"""
+
+
+import locale
+import sys
+
+
+if sys.platform == 'win32':
+  from gvn.nt import *
+else:
+  from gvn.posix import *
+
+
+def DefaultEncoding():
+  (unused_lang, encoding) = locale.getdefaultlocale()
+  if encoding is None:
+    return 'utf-8'
+  return encoding
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/posix.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/posix.py
new file mode 100644
index 0000000..3d787e8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/posix.py
@@ -0,0 +1,97 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Linux implementations of platform-specific utilities and pseudo-constants
+
+Certain functions must be implemented in OS-specific ways due to differences
+between Linux and Windows.  The constants and functions defined here are
+imported into a common interface by platform.py.
+
+"""
+
+
+import os
+import pwd
+import shutil
+import sys
+
+from errno import ENOENT
+
+
+# See nt.py for an explanation of why ENOENT_codes is needed.
+ENOENT_codes = [ENOENT]
+DEFAULT_EDITOR_COMMAND = 'vi'
+
+
+def DescribeSubprocessError(cmd, status):
+  """Return a string describing an error returned by a subprocess.
+
+  Arguments:
+    cmd    -- the name or description of the command that was executed
+    status -- the status code returned by the executed command
+
+  """
+
+  if os.WIFEXITED(status):
+    msg = "'%s' exited %d" % (cmd, os.WEXITSTATUS(status))
+  elif os.WIFSIGNALED(status):
+    msg = "'%s' killed with signal %d" % (cmd, os.WTERMSIG(status))
+  elif os.WIFSTOPPED(status):
+    msg = "'%s' stopped with signal %d" % (cmd, os.WSTOPSIG(status))
+  else:
+    msg = "'%s' exited with invalid status (%d)" % (cmd, status)
+  return msg
+
+
+# Works fine on unix.
+rmtree = shutil.rmtree
+
+
+def GetUserName():
+  """Return the current user's local username."""
+  return pwd.getpwuid(os.getuid())[0]
+
+
+def ExecutableNames(short_name):
+  """Return a list of potential executable names based on the given short name.
+
+  Arguments:
+    short_name -- a candidate executable name, with no "dot" extension
+
+  Returns:
+    A list containing only the unchanged short name.
+
+  """
+
+  return [short_name]
+
+
+def EditorWantsBOM(editor, encoding):
+  return False
+
+
+def GetSvnUserConfigDir():
+  return os.path.expanduser(os.path.join('~','.subversion'))
+def GetSvnSystemConfigDir():
+  return '/etc/subversion'
+def GetGvnUserConf():
+  return os.path.expanduser(os.path.join('~','.gvn'))
+def GetGvnSystemConf():
+  return '/etc/gvn'
+
+
+def StrippedCRFile(fp):
+  """Return fp unmolested; unix isn't broken."""
+  return fp
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/project.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/project.py
new file mode 100644
index 0000000..0c338c14
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/project.py
@@ -0,0 +1,174 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""project stuffs
+
+Classes:
+Project -- Project context
+
+"""
+
+
+import cPickle
+
+import svn.ra
+
+from svn.core import SWIG_SVN_INVALID_REVNUM as SVN_INVALID_REVNUM
+
+import gvn.errors
+import gvn.repository
+import gvn.util
+
+
+_DEFAULT_CONFIG = {
+  'change-branch-base': 'changes',
+  'diff-lines': 1000,
+  'mail-template': """Subject: %(diff_size_desc)s (%(change_name_at_head)s)
+
+I'd like you to do a code review.  To review this change, run
+
+  gvn review --project %(project)s %(change_name)s
+
+Alternatively, to review the latest snapshot of this change
+branch, run
+
+  gvn --project %(project)s review %(change_name_at_head)s
+
+to review the following change:
+
+%(description)s
+
+This is a semiautomated message from "gvn mail".  See
+<http://code.google.com/p/gvn/> to learn more.
+
+%(shortdiff)s
+""",
+
+  'bug-links': [],
+}
+
+
+class Project(object):
+  def __init__(self, username, url, config, pool=None, path=None):
+    """Initialize Project.
+
+    One of two conditions must be true:
+      1. url is the project url and path is None
+      2. url is the repository root and path is relative project path
+
+    E.g.
+      1. url='http://svn.apache.org/repos/asf/apr/apr', path=None
+      2. url='http://svn.apache.org/repos/asf', path='apr/apr'
+
+    See the path docstring for more.
+
+    TODO(epg): If the caller wants to use the repository root as the
+    project and wants to prevent the Project.path network hit, can he
+    specify path=''?  Not sure if that does or should work...
+
+    Arguments:
+    username -- used for changebranch paths and Repository auth
+    url      -- URL of either the project or the repository root
+    config   -- Config for Repository
+    pool     -- memory pool for Repository and Load (c.f.)
+    path     -- optional path of project (default None)
+    """
+
+    self._username = username
+    self._url = url
+    self._config = config
+    self._path = path
+    self._pool = pool
+
+    self._project_config = None
+    self._repository = None
+
+  def _GetPath(self):
+    if self._path is None:
+      self._path = gvn.util.RelativePath(self.repository.URL(), self._url)
+    return self._path
+  path = property(_GetPath,
+doc="""If Project was initialized with path=None (the default),
+    accessing this requires a round-trip or two to the repository (and
+    therefore the network, for remote repositories), because _GetPath
+    must use Repository.URL to figure out the project path, and
+    Repository.URL must ask the repository for the root URL
+    (round-trip #1) and reparent if the specified url is not the root
+    URL (round-trip #2).
+    """)
+
+  def _GetRepository(self):
+    if self._repository is None:
+      self._repository = gvn.repository.Repository(self._username,
+                                                   self._url,
+                                                   self._config, self._pool)
+      if self._path is not None:
+        # If self._path is not None, it had to have been specified at
+        # initialization; the only other way it can be not None is
+        # *after* self._repository is initialized.  So, this means
+        # condition 2 in the __init__ docstring: self._url is the URL
+        # of the repository root.
+        self._repository.have_root_url = True
+    return self._repository
+  repository = property(_GetRepository)
+
+  def _GetProjectConfig(self, key):
+    if self._project_config is None:
+      self.Load()
+    return self._project_config[key]
+  changebranch_base = property(lambda s: s._GetProjectConfig('change-branch-base'))
+  diff_lines = property(lambda s: s._GetProjectConfig('diff-lines'))
+  mail_template = property(lambda s: s._GetProjectConfig('mail-template'))
+  bug_links = property(lambda s: s._GetProjectConfig('bug-links'))
+
+  def ChangeBranchPath(self, username=None, changename=None, rest=None):
+    components = [self.changebranch_base]
+    if username is not None:
+      components.append(username)
+      if changename is not None:
+        components.append(changename)
+        if rest is not None:
+          components.append(rest)
+    return '/'.join(components)
+
+  # XXX maybe should be private
+  def Load(self):
+    """Load project metadata from repository.
+
+    Use self._pool for the svn.ra.get_dir2 call.
+    """
+
+    self._project_config = dict(_DEFAULT_CONFIG)
+
+    (dirents, revision, properties) = svn.ra.get_dir2(self.repository.ra,
+                                                      self.path,
+                                                      SVN_INVALID_REVNUM,
+                                                      0, # dirent_fields
+                                                      self._pool)
+    try:
+      config = cPickle.loads(properties['gvn:project'])
+      try:
+        # Make change-branch-base relative to repository root.
+        cbb = config['change-branch-base']
+        if cbb.startswith('/'):
+          config['change-branch-base'] = cbb.lstrip('/')
+        else:
+          config['change-branch-base'] = '/'.join([self.path, cbb])
+      except KeyError:
+        pass
+
+      self._project_config.update(config)
+    except KeyError:
+      pass
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/repository.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/repository.py
new file mode 100644
index 0000000..6b5b788
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/repository.py
@@ -0,0 +1,505 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""repository stuffs
+
+Classes:
+Repository   -- Repository context
+ChangedPath  -- Representation of a changed path
+Dirent       -- Wrapper around svn_ra_stat and svn_dirent_t
+Revision     -- Container for metadata about a revision
+
+TODO(epg): Callers should not be instantiating Dirent or Revision, but
+instead using Repository.Stat and Repository.GetRevisions.  But the
+class interfaces are definitely public.  Should these two classes be
+prefixed with _ ?
+
+"""
+
+
+import os
+import tempfile
+
+import svn.core
+import svn.ra
+
+from svn.core import svn_node_none, svn_node_file, svn_node_dir
+from svn.core import svn_node_unknown
+from svn.core import SVN_ERR_FS_NOT_DIRECTORY, SVN_ERR_FS_NOT_FOUND
+from svn.core import SubversionException
+
+import gvn
+import gvn.errors
+
+
+class ChangedPath(object):
+  """Representation of a changed path.
+
+  Use the old_path, old_revision, path, and NewRevision properties
+  together to diff or merge the change this represents.
+
+  """
+
+  def __init__(self, path, revision, action, copyfrom_path, copyfrom_rev,
+               relative_path=None, source_path=None, source_revision=None):
+    """Initialize ChangedPath from str, str, str, int [, str, str, int].
+
+    Arguments:
+    path            -- full path in the repository
+    revision        -- revision number
+    action          -- A(dded), D(eleted), M(odified), or R(eplaced)
+    copyfrom_path   -- full path in the repository or None
+    copyfrom_rev    -- revision number or -1
+
+    Optional arguments for ChangeBranch.changed_paths:
+    relative_path   -- project relative path
+    source_path     -- full path to source branch
+    source_revision -- revision of source branch
+
+    """
+
+    self._path = path
+    self._revision = revision
+    self._action = action
+    self._copyfrom_path = copyfrom_path
+    self._copyfrom_rev = copyfrom_rev
+    self._relative_path = relative_path
+    self._source_path = source_path
+    self._source_revision = source_revision
+
+  path = property(lambda self: self._path,
+                  doc="""Real absolute path for this change.
+
+  If this ChangedPath is part of a change branch, this is the
+  absolute path in the source branch.
+
+  """)
+  action = property(lambda self: self._action,
+                    doc="""A(dded), D(eleted), M(odified), or R(eplaced).""")
+  copyfrom_path = property(lambda self: self._copyfrom_path)
+  copyfrom_revision = property(lambda self: self._copyfrom_rev)
+  relative_path = property(lambda self: self._relative_path,
+                           doc="""Path relative to the project.
+
+  Only available when part of a ChangeBranch.
+  XXX More reason to think changebranch.py should have a subclass.
+
+  """)
+  revision = property(lambda self: self._revision)
+
+  # Er, maybe we need a subclass for the changebranch stuff.
+  def _GetOld(self):
+    if self.copyfrom_path is not None:
+      path = self.copyfrom_path
+      revision = self.copyfrom_revision
+    elif self._source_path is not None:
+      path = self.source_path
+      revision = self._source_revision
+    else:
+      path = self.path
+      if self.action == 'A':
+        revision = 0
+      else:
+        revision = self.revision - 1
+    return (path, revision)
+  old_path = property(lambda self: self._GetOld()[0])
+  old_revision = property(lambda self: self._GetOld()[1])
+  source_path = property(lambda self: '/'.join([self._source_path,
+                                               self._relative_path]))
+
+
+class Revision(object):
+  """Container for metadata about a revision; also acts as dict of revprops.
+
+  Revprop names (the keys) are utf-8 encoded str, and the values are
+  str of arbitrary binary data.  Only callers know that some of them
+  are text to be decoded (e.g. svn:{author,log}, which are
+  utf-8-encoded).  This function could accept unicode *keys* and
+  encode those, but since the caller must already decode values, it
+  seems pointless.  See also gvn.commit.Drive.
+  """
+
+  def __init__(self, ra, paths, revision, properties={}, pool=None):
+    """Initialize Revision from svn_ra_session_t, dict, int, dict, Pool.
+
+    Arguments:
+    ra         -- svn_ra_session_t
+    paths      -- dict mapping path in the repository to svn_log_changed_path_t
+    revision   -- number of this revision
+    properties -- any properties of this revision the caller already knows
+    pool       -- memory pool
+
+    """
+
+    self._ra = ra
+    self._number = revision
+    self._properties = dict(properties)
+    self._pool = pool
+
+    if paths is None:
+      self._paths = []
+    else:
+      self._paths = [ChangedPath(path, self._number, entry.action,
+                                 entry.copyfrom_path, entry.copyfrom_rev)
+                     for (path, entry) in paths.iteritems()]
+
+  def __contains__(self, key):
+    self._GetProperties()
+    return key in self._properties
+
+  def __getitem__(self, key):
+    try:
+      return self._properties[key]
+    except KeyError:
+      self._GetProperties()
+      return self._properties[key]
+
+  def get(self, key, default=None):
+    try:
+      return self[key]
+    except KeyError:
+      return default
+
+  number = property(lambda self: self._number)
+  paths = property(lambda self: self._paths)
+
+  # TODO(epg): Consider only going to the network once, not every time
+  # a caller does '[someprop]' when someprop isn't set or 'someprop
+  # in'.  Would need a .Flush() method.
+  def _GetProperties(self):
+    for (prop, val) in svn.ra.rev_proplist(self._ra, self.number,
+                                           self._pool).iteritems():
+      self._properties[prop] = val
+
+  def SortPaths(self):
+    """TODO(epg): Sort paths asciibetically, with cb deletion first if submit"""
+    pass
+
+
+def _Log(ra, paths, start, end, peg=None, limit=0,
+         strict_node_history=False, revision_cache=None, pool=None):
+  """Return a list of Revisions, possibly by asking ra for the log of paths.
+
+  If revision_cache is provided, this will store Revisions there.  If
+  any revision this finds is already in revision_cache, that Revision
+  will be returned instead of a new one.
+
+  For any revision, the same Revision object is returned every time.
+
+  Arguments:
+  paths               -- paths whose revisions to include; default all
+  start               -- revision number to start log
+  end                 -- revision number to end log
+  peg                 -- peg revision for paths
+  limit               -- return at most this many Revisions
+  strict_node_history -- do not trace back to copyfrom
+  revision_cache      -- dict mapping number to Revision
+  pool                -- memory pool
+
+  """
+
+  if peg is None:
+    peg = start
+  if paths != ['']:
+    paths = [svn.ra.get_locations(ra, x, peg, [start])[start].lstrip('/')
+             for x in paths]
+
+  if revision_cache is None:
+    # Make a temporary one: not returned, soon to be garbage collected.
+    revision_cache = {}
+
+  result = []
+  def receiver(paths, revnum, author, date, message, unused_pool):
+    try:
+      revision = revision_cache[revnum]
+    except KeyError:
+      revision = Revision(ra, paths, revnum,
+                          {'svn:author': author,
+                           'svn:date':   date,
+                           'svn:log':    message},
+                          pool)
+      revision_cache[revnum] = revision
+    result.append(revision)
+
+  discover_changed_paths = True
+  svn.ra.get_log(ra, paths, start, end, limit, discover_changed_paths,
+                 strict_node_history, receiver, pool)
+
+  return result
+
+
+class Dirent(object):
+  """Wrapper around svn_ra_stat and svn_dirent_t."""
+
+  _kinds = {
+    svn_node_none:    'none',
+    svn_node_file:    'file',
+    svn_node_dir:     'dir',
+    svn_node_unknown: 'unknown',
+    }
+
+  def __init__(self, repository, path, peg, pool=None):
+    """Initialize SvnDirent from svn_ra_stat(repository.ra, path, peg, pool).
+
+    Arguments are self-explanatory.
+
+    Raises:
+    gvn.errors.RepoPath
+    SubversionException.apr_err == ???
+    ???
+
+    """
+
+    self._repository = repository
+    self._revision = None
+
+    try:
+      self._dirent = svn.ra.stat(self._repository.ra, path, peg, pool)
+    except SubversionException, e:
+      if e.apr_err in [SVN_ERR_FS_NOT_DIRECTORY, SVN_ERR_FS_NOT_FOUND]:
+        # Caller asked for /foo/bar where /foo is not a directory.
+        self._dirent = None
+      else:
+        # Something broke.
+        raise
+    if self._dirent is None:
+      raise gvn.errors.RepoPath(self._repository.URL(path), peg)
+
+  def _GetLastChanged(self):
+    # XXX This is bogus as hell; I doubt any users of this class need
+    # this stupid thing to fetch the log.  But I this class's very
+    # existence is probably bogus; why not use svn_dirent_t directly,
+    # as we use all other svn types directly.
+    if self._revision is None:
+      # Yes, believe it or not, created_rev is in fact last changed rev.
+      (self._revision,) = self._repository.GetRevisions(start=self._dirent.created_rev)
+    return self._revision
+  last_changed = property(_GetLastChanged)
+
+  kind = property(lambda self: self._dirent.kind)
+  kind_str = property(lambda self: self._kinds.get(self._dirent.kind,
+                                                  'unknown'))
+  size = property(lambda self: self._dirent.size)
+  has_props = property(lambda self: self._dirent.has_props)
+
+  def KindIsNone(self): return self._dirent.kind == svn_node_none
+  def KindIsFile(self): return self._dirent.kind == svn_node_file
+  def KindIsDir(self): return self._dirent.kind == svn_node_dir
+  def KindIsUnknown(self):
+    return self._dirent.kind not in [svn_node_none,
+                                     svn_node_file,
+                                     svn_node_dir]
+
+
+class RaCallbacks(svn.ra.Callbacks):
+  def SetAuthBaton(self, baton):
+    self.auth_hack = baton
+    self.auth_baton = self.auth_hack[-1]
+
+  wc = None
+  def SetWC(self, wc):
+    self.wc = wc
+
+  def open_tmp_file(self, pool):
+    # The svn_swig_py_make_file wrapper is broken for both file descriptors
+    # and files on Windows, so we'll return a path.
+    if self.wc is None:
+      (fd, fn) = tempfile.mkstemp()
+    else:
+      path = self.wc.AbsolutePath('/'.join([svn.wc.get_adm_dir(pool), 'tmp']))
+      (fd, fn) = tempfile.mkstemp(dir=path)
+    os.close(fd)
+    return fn
+
+  def get_wc_prop(self, path, name, pool):
+    if self.wc is None:
+      return None
+    try:
+      path = self.wc.LocalPath(path)
+    except gvn.errors.PathNotChild:
+      return None
+    return svn.wc.prop_get(name, path, self.wc._adm_access, pool)
+
+  def push_wc_prop(self, path, name, value, pool):
+    if self.wc is None:
+      return None
+    try:
+      path = self.wc.LocalPath(path)
+    except gvn.errors.PathNotChild:
+      return
+    self.wc.WCPropSet(path, name, value)
+
+  def get_client_string(self, pool):
+    return 'gvn-' + gvn.VERSION
+
+
+class Repository(object):
+  def __init__(self, username, url, config, pool):
+    """Initialize Repository from str, str, Config, Pool.
+
+    url must be in canonical form (i.e. as returned by
+    svn_path_canonicalize).
+
+    """
+
+    self._username = username
+    self._url = url
+    self._config = config
+    self._pool = pool
+    self._scratch_pool = svn.core.Pool(pool)
+
+    self.ra_callbacks = RaCallbacks()
+    self._ra = None
+
+    self._revision_path_cache = {}
+    self._revision_cache = {}
+
+  have_root_url = property(lambda self: getattr(self, '_have_root_', False),
+                           lambda self, v: setattr(self, '_have_root_', v),
+doc="""Whether this Repository was created with the root URL.
+
+       Defaults to False, meaning this object must contact the
+       repository (hitting the network, for remote repositories) to
+       learn if self.URL() is the repository root.
+       """)
+  username = property(lambda self: self._username)
+
+  def OpenRa(self, url=None, pool=None):
+    if url is None:
+      url = self._url
+    return svn.ra.open2(url.encode('utf8'), self.ra_callbacks,
+                        self._config.svn_config_hash, pool)
+
+  def _GetRa(self):
+    if self._ra is None:
+      self._ra = self.OpenRa(pool=self._pool)
+      url = svn.ra.get_repos_root(self._ra, self._pool).decode('utf8')
+      if url != self._url:
+        self._url = url
+        svn.ra.reparent(self._ra, self._url.encode('utf8'), self._pool)
+    return self._ra
+  ra = property(_GetRa)
+
+  def URL(self, path=None):
+    """Return URL of path, or repository root if path is unspecified."""
+
+    # Make sure self._url is ready.
+    if not self.have_root_url:
+      self._GetRa()
+
+    if path in [None, '']:
+      return self._url
+    return '/'.join([self._url, path])
+
+  def ShortToLongURL(self, short_url):
+    """Convert short_url to a fully qualified (long) URL.
+
+    Returns a string with the leading '^/' replaced by the repository root
+    URL or raises NotShortURL if 'short_url' does not begin with '^/'.
+
+    For compatibility with older releases, treat // the same way.
+
+    Raises:
+      AttributeError  (if short_url is not a string)
+      gvn.errors.NotShortURL  (if short_url is not in "short form")
+
+    """
+
+    if not short_url.startswith('^/') and not short_url.startswith('//'):
+      raise gvn.errors.NotShortURL(short_url)
+
+    if short_url in ['^/', '//']:
+      return self.URL()
+
+    return self.URL(short_url[2:])
+
+  def LongToShortURL(self, long_url):
+    """Convert a fully qualified (long) URL to a short URL.
+
+    Returns a string with the leading repository root URL replaced by '^/'
+    or raises PathNotChild if 'long_url' does not begin with the
+    repository root.
+
+    Raises:
+      AttributeError  (if long_url is not a string)
+      gvn.errors.PathNotChild  (if long_url does not begin with the
+                                 repository root)
+    """
+    if long_url == self.URL():
+      return '^/'
+
+    if not long_url.startswith(self.URL() + '/'):
+      raise gvn.errors.PathNotChild(long_url, self.URL())
+
+    return '^/' + long_url[len(self.URL()):].lstrip('/')
+
+  def GetHead(self, pool=None):
+    """Return the HEAD revision number of this repository."""
+    if pool is None:
+      pool = self._pool
+    return svn.ra.get_latest_revnum(self.ra, pool)
+
+  def GetRevisions(self, paths=[''], start=None, end=None, peg=None, limit=0,
+                   strict_node_history=False):
+    """Return a list of Revisions, possibly by asking .ra for the log of paths.
+
+    To get a Revision for a specific revision, specify only start; all
+    other arguments are redundant.  Returned list is ordered from
+    start to end regardless which is higher.
+
+    For any revision, the same Revision object is returned every time.
+
+    If this is called a second time with the same arguments, it won't
+    hit the repository at all.
+
+    Arguments:
+    paths               -- paths whose revisions to include; default all
+    start               -- revision number to start log
+    end                 -- revision number to end log
+    peg                 -- peg revision for paths
+    limit               -- return at most this many Revisions
+    strict_node_history -- do not trace back to copyfrom
+
+    """
+
+    path_cache_key = [peg, start, end, limit, strict_node_history]
+    path_cache_key.extend(paths)
+    path_cache_key = tuple(path_cache_key)
+    try:
+      hit = self._revision_path_cache[path_cache_key]
+    except KeyError:
+      hit = None
+    if hit is not None:
+      return hit
+
+    if end is None:
+      if start is None:
+        start = self.GetHead()
+        end = 1
+      else:
+        end = start
+
+    result = _Log(self.ra, paths, start, end, peg, limit, strict_node_history,
+                  self._revision_cache, self._scratch_pool)
+    self._scratch_pool.clear()
+
+    self._revision_path_cache[path_cache_key] = result
+    return result
+
+  def GetRevProp(self, revision, propname):
+    return self.GetRevisions(start=revision)[0][propname]
+
+  def Stat(self, path, peg):
+    return Dirent(self, path, peg, svn.core.Pool(self._pool))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/__init__.py
new file mode 100644
index 0000000..9cc8209e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/__init__.py
@@ -0,0 +1 @@
+from gvn.subcommands import approve, change, changes, describe, diff, mail, nothave, opened, patch, pith, rdiff, revert, review, snapshot, submit, __init__
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/ack.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/ack.py
new file mode 100644
index 0000000..b9d2ca6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/ack.py
@@ -0,0 +1,61 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import sys
+
+import svn.core
+
+import gvn.changebranch
+import gvn.cmdline
+import gvn.errors
+
+
+helptext__gvn_ack = """ack: Acknowledge a changebranch.
+usage: ack CHANGE
+"""
+
+
+def TryAcknowledgement(change_name, ctx, pool):
+  try:
+    (username, name, revision) = gvn.util.ParseChangeName(change_name)
+    cb = gvn.changebranch.ChangeBranch(ctx.config, ctx.project,
+                                       name, username, revision)
+    cb.Acknowledge(pool)
+  except gvn.errors.User, e:
+    ctx.Notify('%s\n' % (e,))
+    return e.code
+
+  ctx.Notify("acknowledged '%s'\n" % (cb.change_name,), pool)
+  return 0
+
+
+def Handle_GvnAck(ctx):
+  ctx.wc_operands = False
+  iterpool = svn.core.Pool(ctx.pool)
+  rval = 0
+  for change_name in ctx.operands:
+    iterpool.clear()
+    r = TryAcknowledgement(change_name, ctx, iterpool)
+    if r != 0:
+      # Use the last error code as the exit code.
+      rval = r
+  iterpool.destroy()
+
+  return rval
+
+
+gvn.cmdline.AddCommand('ack', Handle_GvnAck, helptext__gvn_ack,
+                       gvn.cmdline.AuthOptions(['project', 'quiet']))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/approve.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/approve.py
new file mode 100644
index 0000000..2d13cd6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/approve.py
@@ -0,0 +1,61 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import sys
+
+import svn.core
+
+import gvn.changebranch
+import gvn.cmdline
+import gvn.errors
+
+
+helptext__gvn_approve = """approve: Approve a changebranch.
+usage: approve CHANGE
+"""
+
+
+def TryApprove(change_name, ctx, pool):
+  try:
+    (username, name, revision) = gvn.util.ParseChangeName(change_name)
+    cb = gvn.changebranch.ChangeBranch(ctx.config, ctx.project,
+                                       name, username, revision)
+    cb.Approve(pool)
+  except gvn.errors.User, e:
+    ctx.Notify('%s\n' % (e,))
+    return e.code
+
+  ctx.Notify("approved '%s'\n" % (cb.change_name,), pool)
+  return 0
+
+
+def Handle_GvnApprove(ctx):
+  ctx.wc_operands = False
+  iterpool = svn.core.Pool(ctx.pool)
+  rval = 0
+  for change_name in ctx.operands:
+    iterpool.clear()
+    r = TryApprove(change_name, ctx, iterpool)
+    if r != 0:
+      # Use the last error code as the exit code.
+      rval = r
+  iterpool.destroy()
+
+  return rval
+
+
+gvn.cmdline.AddCommand('approve', Handle_GvnApprove, helptext__gvn_approve,
+                       gvn.cmdline.AuthOptions(['project', 'quiet']))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/change.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/change.py
new file mode 100644
index 0000000..ef66ef8b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/change.py
@@ -0,0 +1,328 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# TODO(epg): --with-revprop support, but disallow gvn:* setting with
+# that mechanism.  Add options to set some gvn: properties
+# (e.g. --reviewers => gvn:reviewers --bug => gvn:bug).
+
+import codecs
+import os
+import posixpath
+import sys
+
+import gvn.changebranch
+import gvn.cmdline
+import gvn.errors
+import gvn.platform
+import gvn.util
+import gvn.wc
+
+
+helptext__gvn_change = """change: Manage a changebranch.
+usage: 1. change [-c CHANGE] [--add] PATH...
+       2. change -c CHANGE --remove PATH...
+       3. change -c CHANGE --delete
+
+  1. Add PATHs to changebranch, creating it if it does not exist,
+     and generating a random changebranch name if none is provided.
+  2. Remove PATHs from changebranch.
+  3. Delete changebranch.
+"""
+
+
+def _CurrentlyChangeBranched(path, cb):
+  for change in cb.changed_paths:
+    if gvn.util.IsChild(path, change.relative_path):
+      return True
+  return False
+
+def _InTargets(path, targets):
+  for target in targets:
+    if gvn.util.IsChild(path, target):
+      return True
+  return False
+
+
+_MESSAGE_DIVIDER = '--All lines above this line become the description--'
+_PATHS_IN = 'Paths in this changebranch'
+_PATHS_OUT = 'Paths modified but not in any changebranch:'
+def ChangeEditor(ctx, cb, desc, action, path_status, changed_paths,
+                 editor, encoding):
+  """Return (desc, {path: svn_wc_status2_t}) for paths to be changebranched.
+
+  Run the user's editor on a form representing the state of the
+  working copy, with desc in first section, cb.changed_paths in
+  second, and non-changedbranched modified paths in a third.  Return
+  the change description and changed paths from the edited form.
+
+  Arguments:
+  ctx           -- gvn.cmdline.Context
+  cb            -- gvn.changebranch.ChangeBranch
+  desc          -- change description (utf-8 str)
+  action        -- action being taken ('add'/None or 'remove')
+  path_status   -- dict of all modified paths in the working copy, with
+                   svn_wc_status2_t for values
+  changed_paths -- paths to be changebranched (utf-8 str keys)
+  editor        -- gvn.util.Editor for the user's editor
+  encoding      -- user's encoding
+  """
+
+  if desc is None:
+    desc = ''
+
+  form = [
+    desc.rstrip(),
+    _MESSAGE_DIVIDER,
+    '',
+    'Move files between the next two sections to add or remove them from',
+    'the changebranch.',
+    '',
+    '%s (%s):' % (_PATHS_IN, cb.name),
+    ]
+
+  # Map display paths back to wc paths.
+  display_to_wc = {}
+
+  for wc_path in sorted(changed_paths):
+    rel_path = ctx.DisplayPath(wc_path)
+    display_to_wc[rel_path] = wc_path
+
+    if _CurrentlyChangeBranched(wc_path, cb):
+      moved = ' '
+    else:
+      moved = '*'
+    form.append('%s%s    %s' % (moved,
+                                gvn.wc.ActionCode(path_status[wc_path]),
+                                rel_path))
+
+  form.append('')
+  form.append(_PATHS_OUT)
+  form.append('')
+
+  for wc_path in sorted(path_status.iterkeys()):
+    if wc_path in changed_paths:
+      continue
+    if wc_path in ctx.wc.change_state:
+      if (ctx.wc.change_state[wc_path].change_name != cb.name
+          or action != 'remove'):
+        continue
+    rel_path = ctx.DisplayPath(wc_path)
+    display_to_wc[rel_path] = wc_path
+
+    if _CurrentlyChangeBranched(wc_path, cb):
+      moved = '*'
+    else:
+      moved = ' '
+    form.append('%s%s    %s' % (moved,
+                                gvn.wc.ActionCode(path_status[wc_path]),
+                                rel_path))
+  form.append('')
+
+  answer = editor.Edit('\n'.join(form), encoding, tmp_prefix='gvnchange.')
+  if answer is None:
+    return (None, None)
+
+  state = 0
+  result_paths = {}
+  desc = []
+  for line in answer.splitlines():
+    if state == 0:
+      if line == _MESSAGE_DIVIDER:
+        state = 1
+      else:
+        desc.append(line)
+    elif state == 1 and line.startswith(_PATHS_IN):
+      state = 2
+    elif state == 2:
+      if line == '':
+        continue
+      elif line == _PATHS_OUT:
+        break
+      # TODO(epg): Wrong place to be encoding to utf-8; gvn.wc should only
+      # return unicode paths, and should .encode all its input; that's not
+      # where we are today, though, so have to do this for now.
+      path = display_to_wc[line[6:].encode('utf-8')]
+      result_paths[path] = path_status[path]
+
+  # Restore final newline.
+  desc.append('')
+
+  return (u'\n'.join(desc), result_paths)
+
+
+def Handle_GvnChange(ctx, editor_class=gvn.util.Editor):
+
+  # TODO(epg): This description block is repeated a few places, i
+  # think, along with popping up an editor for the log message.
+  # *After* we implement full editor support, see what can be
+  # refactored.  Doing it before would just be premature, as we don't
+  # know what that's going to look like
+  description = None
+  if ctx.options.file is not None:
+    description = codecs.open(ctx.options.file,
+                              encoding=ctx.encoding).read()
+  elif ctx.options.message is not None:
+    description = ctx.options.message.decode(ctx.encoding)
+  # TODO(epg): force-log
+
+  action = None
+  if ctx.options.add:
+    # Add files on the command line into the change branch.
+    if action is not None:
+      raise gvn.errors.BadOptions("cannot specify both 'add' and '%s'"
+                                   % (action,))
+    action = 'add'
+  elif ctx.options.delete:
+    # Delete the changebranch.
+    if action is not None:
+      raise gvn.errors.BadOptions("cannot specify both 'delete' and '%s'"
+                                   % (action,))
+    action = 'delete'
+  elif ctx.options.remove:
+    # Remove files on the command line from the change branch.
+    if action is not None:
+      raise gvn.errors.BadOptions("cannot specify both 'remove' and '%s'"
+                                   % (action,))
+    action = 'remove'
+
+  change_name = ctx.options.change
+  if change_name is None:
+    if action != 'add' and action is not None:
+      # Must specify a change name on which to operate.
+      raise gvn.errors.BadOptions("must provide a change name (via '-c')")
+
+    # Generate a unique, random (and short) change name.
+    change_name = gvn.changebranch.UniqueChangeName(ctx.project,
+                      ctx.project.repository.username,
+                      gvn.changebranch.RandomBranchName(),
+                      ctx.pool)
+
+  (username, cname, revision) = gvn.util.ParseChangeName(change_name)
+  cb = gvn.changebranch.ChangeBranch(ctx.config, ctx.project,
+                                     cname, username, revision)
+  ctx.ValidateChangeName(cb)
+
+  if action == 'delete':
+    cb.Delete(ctx.wc, description, ctx.pool)
+    return cb
+
+  wc_paths = set([ctx.path])
+  already_changed_paths = set(cb.ChangedPathsRelative())
+  if already_changed_paths:
+    # Make sure we don't open more deeply than shallowest path on cb
+    wc_paths.update(already_changed_paths)
+  ctx.wc.Open(wc_paths)
+
+  # Get status of everything from ctx.path down; this is wasted effort
+  # in --non-interactive mode...
+  paths = {}
+  def status_cb(target, status):
+    if gvn.wc.IsModified(status):
+      paths[ctx.wc.RelativePath(target)] = status
+  ctx.wc.Status(wc_paths, recursive=not ctx.options.non_recursive,
+                get_all=False, no_ignore=False, show_children=False,
+                callback=status_cb)
+
+  if not paths:
+    # No modified files, so exit silently like 'svn commit'.
+    return cb
+
+  # This is the list of paths to changebranch; initialize it to the
+  # list of paths currently on this changebranch, if any.
+  changed_paths = already_changed_paths
+
+  # Build the list of paths to add or remove from changed_paths.
+  if len(ctx.operands) == 1:
+    # If we have only one operand, ctx.path is already it.  TODO(epg):
+    # I'm not sure this is the right fix.  See also cmdline.py .
+    targets = [ctx.path]
+  else:
+    targets = [posixpath.join(ctx.path, x) for x in ctx.operands]
+  if len(targets) == 0 and not cb.Exists():
+    # If changebranch does not exist, add everything
+    # (already-changebranched paths are pruned later).
+    targets = [ctx.path]
+
+  # Remove all paths in changed_paths starting with any path in targets
+  # (e.g. targets=['foo'] changed_paths=['food', 'foo', 'foo/bar'],
+  # remove 'foo' and 'foo/bar' but not 'food').
+  if action == 'remove':
+    for i in list(changed_paths):
+      if _InTargets(i, targets):
+        changed_paths.remove(i)
+
+  # Add all non-changebranched, locally modified paths starting with
+  # any path in targets.
+  elif action == 'add' or action is None:
+    for i in paths.iterkeys():
+      if _InTargets(i, targets) and i not in ctx.wc.change_state:
+        changed_paths.add(i)
+
+  if description is None:
+    description = cb.description
+
+  editor = None
+  try:
+    if ctx.options.non_interactive:
+      # Set paths to status of changed_paths.
+      tmp = {}
+      for i in changed_paths:
+        try:
+          tmp[i] = paths[i]
+        except KeyError:
+          # User specified unmodified file; ignore it.
+          pass
+      paths = tmp
+    else:
+      # Get paths from the user via $EDITOR.
+      editor = editor_class(ctx.config.editor_command)
+      (description, paths) = ChangeEditor(ctx, cb, description, action,
+                                          paths, changed_paths, editor,
+                                          ctx.encoding)
+      if description is None:
+        if not ctx.options.quiet:
+          ctx.Notify('cancelled\n')
+        return None
+
+    if not paths:
+      raise gvn.errors.BadOperands("no pathnames specified!")
+
+    try:
+      cb.Branch(ctx.wc, paths, description, ctx.pool)
+    except gvn.errors.OutOfDateParent, e:
+      e.branch_path = ctx.DisplayPath(e.branch_path)
+      raise
+    if editor is not None:
+      editor.Done()
+      editor = None
+  finally:
+    if editor is not None and not editor.IsDone():
+      sys.stderr.write('Your change form was left in a temporary file:\n'
+                       '%s\n' % (editor.tmpfile,))
+
+  return cb
+
+
+def wrap_Handle_GvnChange(ctx):
+  if Handle_GvnChange(ctx) is None:
+    return 1
+  return 0
+
+
+options = gvn.cmdline.AuthOptions(gvn.cmdline.LogOptions(
+  ['add', 'delete', 'remove', 'change', 'force-change',
+   'project', 'quiet']))
+gvn.cmdline.AddCommand('change', wrap_Handle_GvnChange, helptext__gvn_change,
+                       options, {'change': 'changebranch ARG'})
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/changes.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/changes.py
new file mode 100644
index 0000000..6f5a735
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/changes.py
@@ -0,0 +1,64 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import time
+
+from svn.core import SVN_DIRENT_TIME
+
+import gvn.changebranch
+import gvn.cmdline
+
+
+helptext__gvn_changes = """changes: List changebranches.
+usage: changes [--all | --user USERNAME]
+
+Without --all or --user, list local changebranches.
+"""
+
+
+def Handle_GvnChanges(ctx):
+  # TODO(epg): Make sure we don't hit the repository for simple 'gvn
+  # changes'; that's a pure local operation.
+
+  if ctx.options.verbose:
+    fields = SVN_DIRENT_TIME
+  else:
+    fields = 0
+  if ctx.options.all or ctx.options.user is not None:
+    changes = gvn.changebranch.ListChangeBranches(ctx.project,
+                                                  dirent_fields=fields,
+                                                  user=ctx.options.user,
+                                                  pool =ctx.pool)
+    for (cname, dirent) in changes:
+      if ctx.options.verbose:
+        print '%s  %s' % (time.strftime('%F %T',
+                                        time.localtime(dirent.time/1000000)),
+                          cname)
+      else:
+        print cname
+  else:
+    # TODO(epg): Store snapshot time in ChangeState so we can be
+    # verbose here, too.
+    for change_name in set(x.change_name
+                           for x in ctx.wc.change_state.itervalues()):
+      print change_name
+
+  return 0
+
+
+gvn.cmdline.AddCommand('changes', Handle_GvnChanges, helptext__gvn_changes,
+                       gvn.cmdline.AuthOptions(['all', 'user', 'project',
+                                                'verbose']),
+                       {'all':  'show all changebranches for this project',
+                        'user': 'show all changebranches for user ARG'})
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/describe.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/describe.py
new file mode 100644
index 0000000..371ca3b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/describe.py
@@ -0,0 +1,58 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import sys
+
+import svn.core
+
+import gvn.cmdline
+import gvn.description
+import gvn.errors
+
+
+helptext__gvn_describe = """describe: Describe a revision or changebranch.
+usage: describe CHANGE | REVISION
+"""
+
+
+def TryDescribe(name, ctx, pool):
+  try:
+    desc = gvn.description.Description(ctx.config, ctx.project, name)
+    print desc.Output(pool)
+  except gvn.errors.User, e:
+    ctx.Notify('%s\n' % (e,))
+    return e.code
+
+  return 0
+
+
+def Handle_GvnDescribe(ctx):
+  ctx.wc_operands = False
+  rval = 0
+  iterpool = svn.core.Pool(ctx.pool)
+  for operand in ctx.operands:
+    iterpool.clear()
+    r = TryDescribe(operand, ctx, iterpool)
+    if r != 0:
+      # Use the last error code as the exit code.
+      rval = r
+  iterpool.destroy()
+
+  return rval
+
+
+gvn.cmdline.AddCommand('describe', Handle_GvnDescribe, helptext__gvn_describe,
+                       gvn.cmdline.AuthOptions(['project']),
+                       aliases=['desc'])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/diff.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/diff.py
new file mode 100644
index 0000000..15558c95
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/diff.py
@@ -0,0 +1,114 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+
+import svn.core
+
+import gvn.cmdline
+import gvn.util
+
+
+helptext__gvn_diff = """\
+diff (di): Display the differences for locally modified paths.
+usage: diff [-c CHANGE | PATH...]
+
+  Display the changes made to the paths changed in CHANGE or PATHS.
+  Without CHANGE or PATHs, display all changes in the working copy.
+"""
+
+# TODO(epg): This is nearly identical with the Status block in opened
+# (and maybe others); put this somewhere common.  Name sucks too...
+def GetModified(ctx, changebranched=None, modified=None):
+  """Call changebranched or modified with (target, status) of modified paths.
+
+  Arguments:
+  ctx               -- run recursive status on ctx.path
+  changebranched    -- called for changebranched paths
+  modified          -- called for paths modified but not changebranched
+
+  """
+
+  # XXX(epg): Hmm, seems to me the callback ought to be receiving a
+  # pool, like most callbacks.  Fine, we'll handle it ourselves.
+  pool = svn.core.Pool(ctx.pool)
+  def cb(target, status):
+    pool.clear()
+    # We're only interested in Modified or Broken paths.
+    if gvn.wc.IsModified(status) or gvn.wc.IsBroken(status):
+      target = ctx.wc.RelativePath(target)
+      try:
+        change_name = ctx.wc.change_state[target].change_name
+      except KeyError:
+        if callable(modified):
+          modified(target, status)
+      else:
+        if callable(changebranched):
+          changebranched(target, status)
+  ctx.wc.Status([ctx.path], recursive=True, get_all=False, no_ignore=False,
+                show_children=False, callback=cb)
+
+def Handle_GvnDiff(ctx):
+  # Three different commands in one:
+
+  ######################################################################
+  # 1. diff --from-change -c foo
+  if ctx.options.from_change:
+    change = ctx.options.change
+    if change is None:
+      raise gvn.errors.BadOptions("Can't use --from-change without -c")
+    if change == 'none':
+      raise gvn.errors.BadOptions("Can't use --from-change with -c none")
+
+    targets = set()
+    (username, name, revision) = gvn.util.ParseChangeName(change)
+    cb = gvn.changebranch.ChangeBranch(ctx.config, ctx.project,
+                                       name, username, revision)
+    return gvn.changebranch.DiffSnapshotToWC(cb, ctx.wc, ctx.GetDiffCallbacks(),
+                                             ctx.encoding, ctx.pool)
+
+  targets = set(ctx.operands)
+
+  ######################################################################
+  # 2. diff -c none
+  if ctx.options.change == 'none':
+    ctx.wc.Open([ctx.path])
+    GetModified(ctx, modified=lambda t,s: targets.add(t))
+    if not targets:
+      # All modified paths are changebranched and no operands were
+      # specified; show nothing.
+      return 0
+    ctx.options.gvn_options.remove('change')
+
+  ######################################################################
+  # 3. diff [-c foo] [PATHs]
+  elif ctx.options.change is not None:
+    for (path, state) in ctx.wc.change_state.iteritems():
+      if state.change_name == ctx.options.change:
+        # It seems odd to call get the DisplayPath here, but really it
+        # isn't.  path is relative to the wc top, so directly passing
+        # that to svn only works if current directory == wc top, which
+        # is frequently not true.  DisplayPath localizes (fine, svn
+        # expects that anyway) and converts to cwd-relative.
+        targets.add(ctx.DisplayPath(path))
+    ctx.options.gvn_options.remove('change')
+  return gvn.cmdline.RunSvnCommand(ctx, 'diff', sorted(targets))
+
+gvn.cmdline.AddCommand('diff', Handle_GvnDiff, helptext__gvn_diff,
+                       ['change', 'non-recursive', 'diff-cmd', 'extensions',
+                        'no-diff-deleted', 'notice-ancestry', 'from-change',
+                        'force'],
+                       {'change': 'changebranch ARG'},
+                       aliases=['di'])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/mail.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/mail.py
new file mode 100644
index 0000000..6e31b75
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/mail.py
@@ -0,0 +1,260 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import email.Charset
+import email.FeedParser
+import email.Utils
+import smtplib
+import sys
+
+import gvn.changebranch
+import gvn.cmdline
+import gvn.description
+import gvn.diff
+import gvn.subcommands.change
+import gvn.util
+
+
+helptext__gvn_mail = """mail: Mail a changebranch review request.
+usage: mail [--reviewers REVIEWER,...] [--cc CC,...] -c CHANGE | PATH...
+
+If CHANGE does not exist, create it based on PATHs.  If CHANGE is
+unspecified, generate a random name for the new changebranch.
+"""
+
+
+def AppendEmailDomain(pconfig, username):
+  """Append @pconfig.email_domain if not None, and if not already there."""
+
+  if pconfig.email_domain is None:
+    return username
+  if username.find('@') >= 0:
+    return username
+  return '@'.join([username, pconfig.email_domain])
+
+def GetDiffSizeDesc(diff_size):
+  """Provide text describing the size of the code review."""
+
+  if diff_size == 0:
+    desc = "a code review of unknown size"
+  elif diff_size < 2:
+    desc = "a wee code review"
+  elif diff_size < 5:
+    desc = "a tiny code review"
+  elif diff_size < 30:
+    desc = "a small code review"
+  elif diff_size < 100:
+    desc = "a medium-size code review"
+  elif diff_size < 1000:
+    desc = "a code review"
+  elif diff_size < 2000:
+    desc = "a freakin huge code review"
+  elif diff_size < 3000:
+    desc = "a jupiterian code review"
+  elif diff_size < 4000:
+    desc = "a month long code review"
+  elif diff_size < 5000:
+    desc = "a whopping big code review"
+  elif diff_size < 10000:
+    desc = "the mother of all code reviews"
+  elif diff_size < 20000:
+    desc = "the grandmother of all code reviews"
+  else:
+    desc = "a category 5 code review"
+  return desc
+
+def ParseMail(pconfig, message):
+  """Return ([recipient addresses], utf-8-encoded message str) from message.
+
+  Parse recipient addresses from message, then remove any BCC headers from
+  the returned message str.
+
+  Arguments:
+  pconfig -- gvn.config.ProjectConfig
+  message -- unicode text of the message
+  """
+
+  # Pass unicode message in...
+  parser = email.FeedParser.FeedParser()
+  parser.feed(message.encode('utf-8'))
+  message = parser.close()
+
+  recipients = []
+  for header in 'to', 'cc', 'bcc':
+    addresses = email.Utils.getaddresses(message.get_all(header, []))
+    if header == 'to' and len(addresses) == 0:
+      raise gvn.errors.NoReviewers
+    for (name, address) in addresses:
+      recipients.append(AppendEmailDomain(pconfig, address))
+
+  del message['bcc']
+
+  message['User-Agent'] = 'gvn/' + gvn.VERSION
+
+  # Here's what we have to do to send 8bit, non-base64, utf-8 text.
+  email.Charset.add_charset('utf-8', header_enc=email.Charset.SHORTEST,
+                            body_enc=None, output_charset='utf-8')
+  message.set_charset('utf-8')
+
+  # ...get RFC2047-encoded UTF-8 str back.
+  return (recipients, message.as_string())
+
+def Handle_GvnMail(ctx):
+  pconfig = ctx.project_config
+  project = ctx.project
+
+  recipients = {}
+  ccrecipients = {}
+
+  # TODO(epg): should we handle this list parsing stuff where we
+  # process the ctx.options?  I think so.
+  if ctx.options.reviewers is not None:
+    for recipient in ctx.options.reviewers.split(','):
+      recipients[recipient] = 1
+  if ctx.options.cc is not None:
+    for ccrecipient in ctx.options.cc.split(','):
+      ccrecipients[ccrecipient] = 1
+  recipients = list(recipients.iterkeys())
+  ccrecipients = list(ccrecipients.iterkeys())
+
+  cb = None
+  if ctx.options.change is not None:
+    (username, name, revision) = gvn.util.ParseChangeName(ctx.options.change)
+    cb = gvn.changebranch.ChangeBranch(ctx.config, project, name,
+                                       username, revision)
+    if not cb.Exists():
+      cb = None
+  if cb is None:
+    cb =  gvn.subcommands.change.Handle_GvnChange(ctx)
+    if cb is None:
+      return 1
+
+  if project.diff_lines > 0:
+    # TODO(epg): Should return unicode, today returns str in user's
+    # default encoding.
+    (shortdiff, diff_size) = gvn.diff.GetShortDiff(project, cb, ctx.pool)
+  else:
+    shortdiff = ''
+    diff_size = 0
+  diff_size_desc = GetDiffSizeDesc(diff_size)
+
+  fromaddr = AppendEmailDomain(pconfig, pconfig.email_address)
+
+  toaddrs = [AppendEmailDomain(pconfig, r) for r in recipients]
+  ccaddrs = [AppendEmailDomain(pconfig, r) for r in ccrecipients]
+  desc_object = gvn.description.Description(ctx.config, project,
+                                            cb.change_name)
+  cb_desc = desc_object.Output(ctx.pool)
+  short_desc = desc_object.Log()
+  if len(short_desc) > 39:
+    short_desc = short_desc[:36] + '...'
+  short_desc = short_desc.replace('\n', ' ')
+
+  template_dict = {
+    'project': project.repository.URL(project.path),
+    'reviewers': ', '.join(recipients),
+    'change_name': cb.change_name,
+    'change_name_at_head': cb.change_name_at_head,
+    'description': cb_desc,
+    'short_desc': short_desc,
+    'shortdiff': shortdiff,
+    'diff_size_desc': diff_size_desc,
+    'diff_size': diff_size,
+  }
+  mail_template_name = project.mail_template
+  message = '\n'.join([
+    'From: ' + fromaddr,
+    'To: ' + ', '.join(toaddrs),
+    'Cc: ' + ', '.join(ccaddrs),
+    project.mail_template % template_dict,
+    ])
+
+  # On Windows, the interaction among Python, Cygwin or the cmd shell, and
+  # various versions of diff can be a mess, leaving the message with mixed
+  # line endings that confuse some editors and either drop or double endings
+  # in the final mail.  We convert them all to Unix endings (\n) here, and let
+  # Python and/or Cygwin handle converting that to the OS's ending when they
+  # write the file out.
+  # Assume endings are some combination of \r, \n, and \r\n.
+  if sys.platform == 'win32':
+    message = message.replace('\r\n', '\n').replace('\r', '\n')
+
+  editor = None
+  try:
+    if ctx.options.non_interactive:
+      # Just send unedited form to --reviewers + --cc optargs.
+      toaddrs.extend(ccaddrs)
+    else:
+      # Run the user's editor on the form, then send edited form to
+      # addresses parsed from the edited form.
+      editor = gvn.util.Editor(ctx.config.editor_command)
+      message = editor.Edit(message, ctx.encoding, tmp_prefix='gvnmail.')
+      if message is None:
+        if not ctx.options.quiet:
+          editor.Done()
+          ctx._stderr.write('not sending')
+          ctx._stderr.flush()
+        return 1
+      (toaddrs, message) = ParseMail(pconfig, message)
+
+    # Send the user a copy, too.
+    toaddrs.append(fromaddr)
+
+    if not ctx.options.quiet:
+      ctx._stderr.write('sending')
+      ctx._stderr.flush()
+
+    smtp_conn = smtplib.SMTP(ctx.config.smtp_server)
+    # We have to EHLO on our own if we want to use TLS.
+    (response_code, response_msg) = smtp_conn.ehlo()
+    if not 200 <= response_code <= 299:
+      raise gvn.errors.Mail('Bad EHLO response(%d): %s' % (response_code,
+                                                           response_msg))
+    # If the user requested SMTP authentication, use it if the server
+    # supports TLS.
+    if (ctx.config.smtp_user is not None
+        and ctx.config.smtp_password is not None):
+      if smtp_conn.has_extn("starttls"):
+        smtp_conn.starttls()
+      # Resend EHLO command to retrieve new ESMTP features for TLS.
+        smtp_conn.ehlo()
+        smtp_conn.login(ctx.config.smtp_user, ctx.config.smtp_password)
+      else:
+        ctx._stderr.write('%s does not support TLS, ignoring smtp_user/password'
+                          ' options\n' % (ctx.config.smtp_server,))
+    smtp_conn.sendmail(fromaddr, toaddrs, message)
+    smtp_conn.quit()
+
+    if editor is not None:
+      editor.Done()
+      editor = None
+  finally:
+    if not ctx.options.quiet:
+      ctx._stderr.write('.\n')
+    if editor is not None and not editor.IsDone():
+      ctx._stderr.write('Your email form was left in a temporary file:\n'
+                        '%s\n' % (editor.tmpfile,))
+
+  return 0
+
+
+options = gvn.cmdline.AuthOptions(
+ gvn.cmdline.LogOptions(
+  ['change', 'cc', 'editor-cmd', 'force-change',
+   'project', 'quiet', 'reviewers'])
+)
+gvn.cmdline.AddCommand('mail', Handle_GvnMail, helptext__gvn_mail,
+                       options, {'change': 'changebranch ARG'},
+                       aliases=['syn'])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/nothave.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/nothave.py
new file mode 100644
index 0000000..1691695
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/nothave.py
@@ -0,0 +1,35 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import svn.wc
+
+import gvn.cmdline
+import gvn.wc
+
+
+helptext__gvn_nothave = """nothave: List unversioned files.
+usage: nothave
+"""
+
+def Handle_GvnNotHave(ctx):
+  ctx.wc.Open([ctx.path])
+  def cb(target, status):
+    if status.text_status == svn.wc.status_unversioned:
+      print ctx.DisplayPath(ctx.wc.RelativePath(target))
+  ctx.wc.Status([ctx.path], recursive=True, get_all=False, no_ignore=False,
+                callback=cb)
+  return 0
+
+gvn.cmdline.AddCommand('nothave', Handle_GvnNotHave, helptext__gvn_nothave)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/opened.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/opened.py
new file mode 100644
index 0000000..320afb8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/opened.py
@@ -0,0 +1,67 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import svn.core
+
+import gvn.cmdline
+import gvn.util
+import gvn.wc
+
+
+helptext__gvn_opened = """opened: List locally modified files.
+usage: opened
+"""
+
+
+def Handle_GvnOpened(ctx):
+  changebranches = {}
+  ctx.wc.Open([ctx.path])
+
+  # XXX(epg): Hmm, seems to me the callback ought to be receiving a
+  # pool, like most callbacks.  Fine, we'll handle it ourselves.
+  pool = svn.core.Pool(ctx.pool)
+  def cb(target, status):
+    pool.clear()
+    # We're only interested in Modified or Broken paths.
+    if gvn.wc.IsModified(status) or gvn.wc.IsBroken(status):
+      target = ctx.wc.RelativePath(target)
+      string = gvn.wc.StatusString(ctx.DisplayPath(target), status)
+      try:
+        change_name = ctx.wc.change_state[target].change_name
+      except KeyError:
+        # This path is not on any changebranch; print it now.
+        print ' ' + string
+      else:
+        code = [' ', '*'][ctx.wc.NeedsSnapshot(target, status, pool)]
+        string = code + string
+        try:
+          # This path is on a changebranch; stash it away for now.
+          changebranches[change_name][target] = string
+        except KeyError:
+          # First time we see this changebranch; changebranches has no
+          # change_name key yet: create it.
+          changebranches[change_name] = {target: string}
+  ctx.wc.Status([ctx.path], recursive=True, get_all=False, no_ignore=False,
+                show_children=False, callback=cb)
+
+  # Now we'll print status for each changebranched file.
+  for changename in sorted(changebranches.iterkeys()):
+    print "--- Changebranch '%s':" % (changename,)
+    for target in sorted(changebranches[changename].iterkeys()):
+      print changebranches[changename][target]
+
+  return 0
+
+gvn.cmdline.AddCommand('opened', Handle_GvnOpened, helptext__gvn_opened)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/patch.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/patch.py
new file mode 100644
index 0000000..b108253
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/patch.py
@@ -0,0 +1,42 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import gvn.changebranch
+import gvn.cmdline
+import gvn.util
+
+
+helptext__gvn_patch = """\
+XXX Only barely implemented; read the code to understand the
+narrow use patterns this works with if you want to use it.
+"""
+
+
+def Handle_GvnPatch(ctx):
+  print helptext__gvn_patch
+  return 1
+  (username, cname, revision) = gvn.util.ParseChangeName(ctx.options.change)
+  cb = gvn.changebranch.ChangeBranch(ctx.config, ctx.project,
+                                     cname, username, revision)
+  ctx.Notify('Merging %s\n' % (cb.change_name,))
+  cb = gvn.changebranch.Merge(cb, ctx.project, ctx.wc, ctx.path, ctx.config,
+                              ctx.pool)
+  print "Changed %s." % (cb.change_name,)
+
+  return 0
+
+gvn.cmdline.AddCommand('patch', Handle_GvnPatch, helptext__gvn_patch,
+                       gvn.cmdline.AuthOptions(['change']),
+                       {'change': 'changebranch ARG'})
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/pith.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/pith.py
new file mode 100644
index 0000000..6864494d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/pith.py
@@ -0,0 +1,39 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import random
+import gvn.cmdline
+
+
+PITHY_STATEMENTS = [
+  'It will be better one day.',
+  'Looks like a party in your working copy!',
+  'One repository to rule them all!',
+  'How hard could it be?',
+  'epg is always right.',
+  ]
+
+
+helptext__gvn_pith = """pith: See a random pithy statement.
+usage: pith
+"""
+
+
+def Handle_GvnPith(ctx):
+  print random.choice(PITHY_STATEMENTS)
+  return 0
+
+
+gvn.cmdline.AddCommand('pith', Handle_GvnPith, helptext__gvn_pith)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/rdiff.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/rdiff.py
new file mode 100644
index 0000000..c43912a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/rdiff.py
@@ -0,0 +1,54 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import gvn.cmdline
+import gvn.util
+
+
+helptext__gvn_rdiff = """\
+rdiff: Display the differences between two revisions or repository paths.
+usage: 1. rdiff [-c M | -r N[:M]] [URL[@REV]...]
+       2. rdiff [-r N[:M]] --old=OLD-URL[@OLDREV] [--new=NEW-URL[@NEWREV]] \
+                [PATH...]
+       3. rdiff OLD-URL[@OLDREV] NEW-URL[@NEWREV]
+
+  1. Display the changes made to URLs as they are seen in REV between two
+     revisions.  N must be specified and M defaults to HEAD.  The '-c M' option
+     is equivalent to '-r N:M' where N = M-1.  Using '-c -M' does the reverse:
+     '-r M:N' where N = M-1.
+
+  2. Display the differences between OLD-URL as it was seen in OLDREV and
+     NEW-URL as it was seen in NEWREV.  PATHs, if given, are relative to OLD-URL
+     and NEW-URL and restrict the output to differences for those paths.
+     OLD-URL and NEW-URL must be URL[@REV].  NEW-URL defaults to OLD-URL if not
+     specified.  -r N makes OLDREV default to N, -r N:M makes OLDREV default to
+     N and NEWREV default to M.
+
+  3. Shorthand for 'gvn rdiff --old=OLD-URL[@OLDREV] --new=NEW-URL[@NEWREV]'
+"""
+
+
+def Handle_GvnRdiff(ctx):
+  ctx.wc_operands = False
+  if len(ctx.operands) == 0:
+    raise gvn.errors.BadOperands(
+      "Not enough arguments provided; try 'gvn rdiff -h' for more info")
+  return gvn.cmdline.RunSvnCommand(ctx, 'diff', ctx.operands)
+
+options = gvn.cmdline.AuthOptions(
+  ['project', 'revision', 'change', 'old', 'new', 'non-recursive',
+   'diff-cmd', 'extensions', 'no-diff-deleted', 'notice-ancestry',
+   'summarize', 'force'])
+gvn.cmdline.AddCommand('rdiff', Handle_GvnRdiff, helptext__gvn_rdiff, options)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/revert.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/revert.py
new file mode 100644
index 0000000..2506d15
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/revert.py
@@ -0,0 +1,49 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import gvn.cmdline
+import gvn.util
+
+
+helptext__gvn_revert = """\
+revert: Restore pristine working copy file (undo most local edits).
+usage: revert [-c CHANGE | PATH...]
+
+  Revert the changes made to the paths changed in CHANGE or PATHS.
+"""
+
+
+def Handle_GvnRevert(ctx):
+  targets = set(ctx.operands)
+  if ctx.options.change is not None:
+    # TODO(epg): Refuse to revert if changebranch is out of date?  Do
+    # a snapshot here for the user?
+    ctx.options.recursive = True
+    ctx.options.gvn_options.append('recursive')
+    for (path, state) in list(ctx.wc.change_state.iteritems()):
+      if state.change_name == ctx.options.change:
+        # It seems odd to call get the DisplayPath here, but really it
+        # isn't.  path is relative to the wc top, so directly passing
+        # that to svn only works if current directory == wc top, which
+        # is frequently not true.  DisplayPath localizes (fine, svn
+        # expects that anyway) and converts to cwd-relative.
+        targets.add(ctx.DisplayPath(path))
+        ctx.wc.change_state.pop(path)
+    ctx.options.gvn_options.remove('change')
+  return gvn.cmdline.RunSvnCommand(ctx, 'revert', sorted(targets))
+
+gvn.cmdline.AddCommand('revert', Handle_GvnRevert, helptext__gvn_revert,
+                       ['change', 'targets', 'recursive', 'quiet'],
+                       {'change': 'changebranch ARG'})
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/review.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/review.py
new file mode 100644
index 0000000..c7085ad0b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/review.py
@@ -0,0 +1,71 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import sys
+
+import svn.core
+
+import gvn.changebranch
+import gvn.cmdline
+import gvn.description
+import gvn.diff
+import gvn.util
+
+
+helptext__gvn_review = """review: Review a revision or changebranch.
+usage: review CHANGE | REVISION
+"""
+
+
+def Handle_GvnReview(ctx):
+  ctx.wc_operands = False
+  iterpool = svn.core.Pool(ctx.pool)
+  for reviewable in ctx.operands:
+    iterpool.clear()
+    if gvn.util.IsValidChangeName(reviewable):
+      # XXX Ugh, Description gets a ChangeBranch and then we get a
+      # second one here.
+      print gvn.description.Description(ctx.config, ctx.project,
+                                        reviewable).Output(iterpool)
+
+      (username, name, revision) = gvn.util.ParseChangeName(reviewable)
+      cb = gvn.changebranch.ChangeBranch(ctx.config,
+                                         ctx.project,
+                                         name,
+                                         username,
+                                         revision)
+      callbacks = ctx.GetDiffCallbacks(pool=iterpool)
+      return gvn.changebranch.DiffBaseToSnapshot(cb, callbacks, ctx.encoding,
+                                                 iterpool)
+
+    elif gvn.util.MatchRevision(reviewable):
+      if reviewable[0] == 'r':
+        reviewable = reviewable[1:]
+      print gvn.description.Description(ctx.config, ctx.project,
+                                        reviewable).Output(iterpool)
+      gvn.diff.RunSvnDiff(ctx.options, ctx.config, ['-c', reviewable,
+                                       ctx.project.repository.URL()])
+
+    else:
+      raise gvn.errors.BadOperands('\n'.join([
+                "invalid operand: '%s'" % reviewable,
+                "neither a change name nor a revision number"
+                ]))
+  iterpool.destroy()
+  return 0
+
+
+gvn.cmdline.AddCommand('review', Handle_GvnReview, helptext__gvn_review,
+                       gvn.cmdline.AuthOptions(['extensions', 'project']))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/snapshot.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/snapshot.py
new file mode 100644
index 0000000..77f3736
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/snapshot.py
@@ -0,0 +1,115 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import codecs
+
+import svn.core
+
+import gvn.changebranch
+import gvn.cmdline
+import gvn.errors
+import gvn.util
+
+
+helptext__gvn_snapshot = """snapshot: Snapshot a changebranch.
+usage: snapshot [-c CHANGE]
+
+Without options, snapshot all local changebranches.  Won't snapshot a
+changebranch unless necessary; use gvn change to force it.
+"""
+
+
+def Check(cb, ctx):
+  """Return {path: status} if cb needs snapshot, else None.
+
+  Raises:
+  gvn.errors.ChangeBranch     -- if any path in cb not under ctx.path
+
+  """
+
+  # We need a boolean indicating whether any cb path needs snapshot
+  # because if any one path needs it, we need them all in paths.
+  # Default based on --force option (or lack thereof).
+  needs_snapshot = [ctx.options.force]
+  paths = {}
+
+  # Shouldn't the callback receive a pool, like most callbacks?  Fine,
+  # we'll handle it ourselves.
+  pool = svn.core.Pool(ctx.pool)
+  def status_cb(target, status):
+    pool.clear()
+    target = ctx.wc.RelativePath(target)
+    if not gvn.util.IsChild(target, ctx.path):
+      raise gvn.errors.ChangeBranch("'%s' outside '%s'" % (target, ctx.path))
+    if ctx.wc.NeedsSnapshot(target, status, pool):
+      needs_snapshot[0] = True
+    paths[target] = status
+  ctx.wc.Status(list(cb.ChangedPathsRelative()),
+                recursive=False, get_all=False, no_ignore=False,
+                show_children=False, callback=status_cb)
+
+  if needs_snapshot[0]:
+    return paths
+  return None
+
+
+def Handle_GvnSnapshot(ctx):
+  description = None
+  if ctx.options.file is not None:
+    description = codecs.open(ctx.options.file,
+                              encoding=ctx.encoding).read()
+  elif ctx.options.message is not None:
+    description = ctx.options.message.decode(ctx.encoding)
+  # TODO(epg): force-log
+
+  if ctx.options.change is None:
+    ctx.wc.Open([ctx.path])
+    iterpool = svn.core.Pool(ctx.pool)
+    for i in set(x.change_name for x in ctx.wc.change_state.itervalues()):
+      iterpool.clear()
+      cb = gvn.changebranch.ChangeBranch(ctx.config, ctx.project, i)
+      try:
+        paths = Check(cb, ctx)
+        if not paths:
+          continue
+      except gvn.errors.ChangeBranch, e:
+        ctx.Notify('%s, skipping %s\n' % (e, cb.name))
+        continue
+      cb.Branch(ctx.wc, paths, description=description, pool=iterpool)
+    iterpool.destroy()
+
+  else:
+    (username, cname, revision) = gvn.util.ParseChangeName(ctx.options.change)
+    cb = gvn.changebranch.ChangeBranch(ctx.config, ctx.project,
+                                       cname, username, revision)
+
+    if description is None:
+      description = cb.description
+
+    ctx.wc.Open(list(cb.ChangedPathsRelative()))
+    paths = Check(cb, ctx)
+    if paths:
+      cb.Branch(ctx.wc, paths, description=description, pool=ctx.pool)
+
+  return 0
+
+options = gvn.cmdline.AuthOptions(gvn.cmdline.LogOptions([
+  'change',
+  'force',
+  'quiet'
+]))
+gvn.cmdline.AddCommand('snapshot', Handle_GvnSnapshot,
+                       helptext__gvn_snapshot, options,
+                       {'change': 'changebranch ARG'},
+                       aliases=['snap'])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/submit.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/submit.py
new file mode 100644
index 0000000..4c8cbd2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/subcommands/submit.py
@@ -0,0 +1,65 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# TODO(epg): No --with-revprop support here; see gvn change.
+
+
+import gvn.changebranch
+import gvn.cmdline
+import gvn.errors
+
+
+helptext__gvn_submit = """submit: Submit a changebranch to the source branch.
+usage: submit -c CHANGE
+"""
+
+
+def Handle_GvnSubmit(ctx):
+  cb = gvn.changebranch.ChangeBranch(ctx.config, ctx.project,
+                                     ctx.options.change)
+
+  for i in cb.changed_paths:
+    if not gvn.util.IsChild(i.relative_path, ctx.path):
+      raise gvn.errors.ChangeBranchOutsideTarget(i.relative_path, ctx.path)
+
+  if ctx.options.with_revprop is not None:
+    # TODO(epg): Support more than one --with-revprop.
+    split = ctx.options.with_revprop.split('=', 1)
+    if len(split) == 1:
+      value = ''
+    else:
+      value = split[1]
+    revprops = {split[0]: value}
+  else:
+    revprops = None
+  try:
+    cb.Submit(ctx.wc, revprops=revprops, pool=ctx.pool)
+  except gvn.errors.Conflict, e:
+    # TODO(epg): This isn't quit the right way to make the path in the
+    # error message display-ready.  Maybe gvn.errors.Root should have
+    # a DisplayString method or something that takes as an argument a
+    # function(path) => display path.  gvn.cmdline.main would use
+    # e.DisplayString(ctx.DisplayPath) instead of str(e).  In other
+    # words, each subcommand shouldn't have to handle this on its own,
+    # for each kind of error.
+    e.path = ctx.DisplayPath(e.path)
+    raise
+
+  return 0
+
+
+gvn.cmdline.AddCommand('submit', Handle_GvnSubmit, helptext__gvn_submit,
+                       gvn.cmdline.AuthOptions(['change', 'with-revprop']),
+                       {'change': 'changebranch ARG'})
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/svn_options.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/svn_options.py
new file mode 100644
index 0000000..b8616545
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/svn_options.py
@@ -0,0 +1,55 @@
+Options = {
+  'accept': (None, True, None),
+  'auto-props': (None, False, None),
+  'change': ('c', True, None),
+  'changelist': (None, True, None),
+  'config-dir': (None, True, None),
+  'depth': (None, True, None),
+  'diff-cmd': (None, True, None),
+  'diff3-cmd': (None, True, None),
+  'dry-run': (None, False, None),
+  'editor-cmd': (None, True, None),
+  'encoding': (None, True, None),
+  'extensions': ('x', True, None),
+  'file': ('F', True, None),
+  'force': (None, False, None),
+  'force-log': (None, False, None),
+  'ignore-ancestry': (None, False, None),
+  'ignore-externals': (None, False, None),
+  'incremental': (None, False, None),
+  'keep-changelist': (None, False, None),
+  'keep-local': (None, False, None),
+  'limit': ('l', True, None),
+  'message': ('m', True, None),
+  'native-eol': (None, True, None),
+  'new': (None, True, None),
+  'no-auth-cache': (None, False, None),
+  'no-auto-props': (None, False, None),
+  'no-diff-deleted': (None, False, None),
+  'no-ignore': (None, False, None),
+  'no-unlock': (None, False, None),
+  'non-interactive': (None, False, None),
+  'non-recursive': ('N', False, None),
+  'notice-ancestry': (None, False, None),
+  'old': (None, True, None),
+  'parents': (None, False, None),
+  'password': (None, True, None),
+  'quiet': ('q', False, None),
+  'record-only': (None, False, None),
+  'recursive': ('R', False, None),
+  'relocate': (None, False, None),
+  'remove': (None, False, None),
+  'revision': ('r', True, None),
+  'revprop': (None, False, None),
+  'show-updates': ('u', False, None),
+  'stop-on-copy': (None, False, None),
+  'strict': (None, False, None),
+  'summarize': (None, False, None),
+  'targets': (None, True, None),
+  'use-merge-history': ('g', False, None),
+  'username': (None, True, None),
+  'verbose': ('v', False, None),
+  'with-all-revprops': (None, False, None),
+  'with-revprop': (None, True, None),
+  'xml': (None, False, None),
+}
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/svnauth.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/svnauth.py
new file mode 100644
index 0000000..4b69291
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/svnauth.py
@@ -0,0 +1,219 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import getpass
+import sys
+
+import svn.wc
+import svn.client
+import svn.core
+
+from svn.core import SVN_AUTH_SSL_CNMISMATCH, SVN_AUTH_SSL_EXPIRED
+from svn.core import SVN_AUTH_SSL_NOTYETVALID, SVN_AUTH_SSL_OTHER
+from svn.core import SVN_AUTH_SSL_UNKNOWNCA
+
+
+def _getpass(prompt='Password: ', stream=sys.stderr):
+  """Add stream argument to getpass, as in Python 2.5."""
+  try:
+    return getpass.getpass(prompt, stream)
+  except TypeError:
+    stream.write(prompt)
+    stream.flush()
+    return getpass.getpass('')
+
+
+def GetAuthBaton(username, interactive, config, pool, username_cb=None):
+  """Return new auth callbacks and new auth baton.
+
+  Arguments:
+  username              -- username to try before prompting
+  interactive           -- bool whether prompting should happen
+  config                -- gvn.config.Config
+  pool                  -- memory pool
+  username_cb           -- optional callback to be passed each username entered
+                           by the user to the prompt callbacks
+
+  Returns:
+  tuple of prompt callbacks and the auth baton at index -1; caller
+  must hold references to the entire tuple, as the svn binding does not
+  """
+  def SimplePrompt(realm, username, may_save, pool):
+    if not interactive:
+        return
+    result = svn.core.svn_auth_cred_simple_t()
+
+    # XXX may_save is 1, yet svn isn't storing the password.  Hmph.
+    result.may_save = may_save
+
+    sys.stderr.write('Authentication realm: %s\n' % (realm,))
+    if username is None:
+      sys.stderr.write('Username: ')
+      sys.stderr.flush()
+      result.username = sys.stdin.readline().strip()
+    else:
+      result.username = username
+    result.password = _getpass("Password for '%s': " % (result.username,),
+                               stream=sys.stderr)
+    if callable(username_cb):
+      username_cb(result.username)
+    return result
+
+  def UsernamePrompt(realm, may_save, pool):
+    if not interactive:
+        return
+    result = svn.core.svn_auth_cred_username_t()
+    result.may_save = may_save
+    sys.stderr.write('Authentication realm: %s\n' % (realm,))
+    sys.stderr.write('Username: ')
+    sys.stderr.flush()
+    result.username = sys.stdin.readline().strip()
+    if callable(username_cb):
+      username_cb(result.username)
+    return result
+
+  def SSLServerTrustPrompt(realm, failures, cert_info, may_save, pool):
+    if not interactive:
+      return
+
+    buf = ["Error validating server certificate for '%s':" % (realm,)]
+
+    if failures & SVN_AUTH_SSL_UNKNOWNCA:
+      buf.append(
+          ' - The certificate is not issued by a trusted authority. Use the')
+      buf.append('   fingerprint to validate the certificate manually!')
+
+    if failures & SVN_AUTH_SSL_CNMISMATCH:
+      buf.append(' - The certificate hostname does not match.')
+
+    if failures & SVN_AUTH_SSL_NOTYETVALID:
+      buf.append(' - The certificate is not yet valid.')
+
+    if failures & SVN_AUTH_SSL_EXPIRED:
+      buf.append(' - The certificate has expired.')
+
+    if failures & SVN_AUTH_SSL_OTHER:
+      buf.append(' - The certificate has an unknown error.')
+
+    buf.extend(['Certificate information:',
+                ' - Hostname: ' + cert_info.hostname,
+                ' - Valid: from %s until %s' % (cert_info.valid_from,
+                                                  cert_info.valid_until),
+                ' - Issuer: ' + cert_info.issuer_dname,
+                ' - Fingerprint: ' + cert_info.fingerprint])
+
+    if may_save:
+      buf.append('(R)eject, accept (t)emporarily or accept (p)ermanently? ')
+    else:
+      buf.append('(R)eject or accept (t)emporarily? ')
+
+    sys.stderr.write('\n'.join(buf))
+    sys.stderr.flush()
+    choice = sys.stdin.readline().strip().lower()
+
+    if choice == 't':
+      result = svn.core.svn_auth_cred_ssl_server_trust_t()
+      result.may_save = False
+      result.accepted_failures = failures
+    elif may_save and choice == 'p':
+      result = svn.core.svn_auth_cred_ssl_server_trust_t()
+      result.may_save = True
+      result.accepted_failures = failures
+    else:
+      result = None
+
+    return result
+
+  # SSLClientCertPrompt and SSLClientCertPwPrompt are untested.
+  def SSLClientCertPrompt(realm, may_save, pool):
+    if not interactive:
+        return
+    result = svn.core.svn_auth_cred_ssl_client_cert_t()
+    result.may_save = may_save
+    sys.stderr.write('Authentication realm: %s\n' % (realm,))
+    sys.stderr.write('Client certificate filename: ')
+    sys.stderr.flush()
+    result.cert_file = sys.stdin.readline().strip()
+    return result
+
+  def SSLClientCertPwPrompt(realm, may_save, pool):
+    if not interactive:
+        return
+    result = svn.core.svn_auth_cred_ssl_client_cert_pw_t()
+    result.may_save = may_save
+    result.password = _getpass("Passphrase for '%s': " % (realm,),
+                               stream=sys.stderr)
+    return result
+
+  providers = [
+    svn.client.get_simple_provider(),
+    svn.client.get_username_provider(),
+    ]
+  try:
+    providers.append(svn.core.svn_auth_get_windows_ssl_server_trust_provider())
+  except AttributeError:
+    pass
+  providers.extend([
+    svn.client.get_ssl_server_trust_file_provider(),
+    svn.client.get_ssl_client_cert_file_provider(),
+    svn.client.get_ssl_client_cert_pw_file_provider(),
+    ])
+
+  try:
+    # Mac OS X keychain integration
+    providers.append(svn.core.svn_auth_get_keychain_simple_provider())
+  except AttributeError:
+    pass
+  try:
+    providers.append(svn.core.svn_auth_get_windows_simple_provider())
+  except AttributeError:
+    pass
+  providers.extend([
+      svn.client.get_simple_prompt_provider(SimplePrompt, 2),
+      svn.client.get_username_prompt_provider(UsernamePrompt, 2),
+      svn.client.get_ssl_server_trust_prompt_provider(SSLServerTrustPrompt),
+      svn.client.get_ssl_client_cert_prompt_provider(SSLClientCertPrompt, 2),
+      svn.client.get_ssl_client_cert_pw_prompt_provider(SSLClientCertPwPrompt,
+                                                        2),
+      ])
+
+  auth_baton = svn.core.svn_auth_open(providers, pool)
+
+  svn_config_dir_utf8 = config.svn_config_dir.encode('utf8')
+  svn.core.svn_auth_set_parameter(auth_baton,
+                                  svn.core.SVN_AUTH_PARAM_CONFIG_DIR,
+                                  svn_config_dir_utf8)
+
+  svn.core.svn_auth_set_parameter(auth_baton,
+                                  svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME,
+                                  username)
+
+  if not interactive:
+    svn.core.svn_auth_set_parameter(auth_baton,
+                                    svn.core.SVN_AUTH_PARAM_NON_INTERACTIVE,
+                                    '')
+
+  if not config.store_passwords:
+    svn.core.svn_auth_set_parameter(auth_baton,
+                                  svn.core.SVN_AUTH_PARAM_DONT_STORE_PASSWORDS,
+                                    '')
+  if not config.store_auth_creds:
+    svn.core.svn_auth_set_parameter(auth_baton,
+                                    svn.core.SVN_AUTH_PARAM_NO_AUTH_CACHE,
+                                    '')
+
+  # The bindings don't hold references to my callbacks, so when we
+  # return, they're all freed.  Work around it for now.
+  return (svn_config_dir_utf8, SimplePrompt, UsernamePrompt, SSLServerTrustPrompt, SSLClientCertPrompt, SSLClientCertPwPrompt, auth_baton)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/svncmd.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/svncmd.py
new file mode 100644
index 0000000..3dd91f4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/svncmd.py
@@ -0,0 +1,161 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Subversion command abstraction.
+
+This module exec's/call's the svn client commands (the sort of commands an
+svn client wrapper might like to call).
+
+This module simply handles the various svn operations by using Python's
+subprocess module. This results in several "fork && exec"'s.
+
+In the future this interface should be replace by lovingly hand-crafted
+calls to the Python svn swig bindings.
+"""
+
+
+import inspect
+import os
+import subprocess
+import sys
+import types
+import xml.dom.minidom
+
+import gvn.errors
+import gvn.platform
+
+
+_do_command_debug = False
+def EnableCommandDebug():
+  global _do_command_debug
+  _do_command_debug = True
+
+def DoCommandDebug():
+  return _do_command_debug
+
+
+# XXX Username belongs in a context baton, not per-process.
+_effective_username = None
+def SetEffectiveUsername(username):
+  global _effective_username
+  _effective_username = username
+
+def EffectiveUsername():
+  return _effective_username
+
+# TODO(epg): These commands don't take auth options such as
+# --username; this is a stupid way to pass --username to all commands
+# but these.  Do we really need to be passing that all the time
+# anyway?  Seems to me the user should just pass --username himself,
+# same as with svn.
+_no_auth = [
+  'add',
+  'changelist',
+  'cleanup',
+  'help',
+  'resolved',
+  'revert',
+]
+
+
+def quote(s):
+  return ''.join(["'", s.replace("'", "'\''"), "'"])
+
+def _ProcessArgv(argv):
+  global _no_auth
+  argv = argv
+
+  if ((len(argv) >= 2
+       and os.path.basename(argv[0]) == 'svn'
+       and argv[1] not in _no_auth)
+      and EffectiveUsername()):
+    argv.insert(1, EffectiveUsername())
+    argv.insert(1, '--username')
+
+  if DoCommandDebug():
+    sys.stderr.write(' '.join(quote(x) for x in argv))
+    sys.stderr.write('\n')
+#     for (_, filename, line, function, lines, _) in inspect.stack():
+#         sys.stderr.write(' File "%s", line %d, in %s\n'
+#                          % (filename, line, function))
+#         for line in lines:
+#           sys.stderr.write(line)
+  return argv
+
+def Start(argv):
+  return subprocess.Popen(_ProcessArgv(argv),
+                          stdout=subprocess.PIPE,
+                          stderr=subprocess.PIPE)
+
+def Capture(argv):
+  proc = Start(argv)
+  result = list(proc.communicate())
+  result.insert(0, proc.returncode)
+  return result
+
+def Run(argv, stdout_callback=None, stderr_callback=None):
+  if stdout_callback is None and stderr_callback is None:
+    # Make sure anything *we* have printed goes out before we let svn
+    # start printing things.
+    sys.stdout.flush()
+    sys.stderr.flush()
+    return subprocess.call(_ProcessArgv(argv))
+
+  raise gvn.errors.Internal('callbacks not yet supported')
+
+  # Start threads like Popen.communicate does, but call the callback
+  # with a line each time we get one from the corresponding output.
+  # This is so 'gvn sync' can print update lines as they come in, not
+  # buffer all 7 million of them sitting there printing nothing for 20
+  # minutes, then spewing them all at the end (*cough* gcheckout).
+
+  return proc.retcode
+
+def _Call(f, cmd, argv=None, options=None):
+  if argv is None:
+    argv = [cmd]
+  else:
+    argv.insert(0, cmd)
+
+  if options is not None:
+    # TODO(epg): these ain't all...
+    if options.no_auth_cache is not None:
+      argv.insert(1, '--no-auth-cache')
+    if options.username is not None:
+      argv.insert(1, options.username)
+      argv.insert(1, '--username')
+    if options.password is not None:
+      argv.insert(1, options.password)
+      argv.insert(1, '--password')
+
+  return f(argv)
+
+def CaptureSvn(argv=None):
+  return _Call(Capture, gvn.SVN, argv)
+
+def CaptureSvnAdmin(argv=None):
+  return _Call(Capture, gvn.SVNADMIN, argv)
+
+def RunSvn(argv=None, options=None):
+  return _Call(Run, gvn.SVN, argv, options)
+
+def RunSvnAdmin(argv=None):
+  return _Call(Run, gvn.SVNADMIN, argv)
+
+def StartSvn(argv=None):
+  return _Call(Start, gvn.SVN, argv)
+
+def StartSvnAdmin(argv=None):
+  return _Call(Start, gvn.SVNADMIN, argv)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/svncmdline.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/svncmdline.py
new file mode 100644
index 0000000..df081ff
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/svncmdline.py
@@ -0,0 +1,88 @@
+Subcommands = {
+  'add': [],
+  'blame': ['praise', 'annotate', 'ann'],
+  'cat': [],
+  'changelist': ['cl'],
+  'checkout': ['co'],
+  'cleanup': [],
+  'commit': ['ci'],
+  'copy': ['cp'],
+  'delete': ['del', 'remove', 'rm'],
+  'diff': ['di'],
+  'export': [],
+  'help': [],
+  'import': [],
+  'info': [],
+  'list': ['ls'],
+  'lock': [],
+  'log': [],
+  'merge': [],
+  'mergeinfo': [],
+  'mkdir': [],
+  'move': ['mv', 'rename', 'ren'],
+  'propdel': ['pdel', 'pd'],
+  'propedit': ['pedit', 'pe'],
+  'propget': ['pget', 'pg'],
+  'proplist': ['plist', 'pl'],
+  'propset': ['pset', 'ps'],
+  'resolve': [],
+  'resolved': [],
+  'revert': [],
+  'status': ['stat', 'st'],
+  'switch': ['sw'],
+  'unlock': [],
+  'update': ['up'],
+}
+Options = {
+  'no-ignore': (None, False, None),
+  'force': (None, False, None),
+  'quiet': ('q', False, None),
+  'depth': (None, True, None),
+  'parents': (None, False, None),
+  'auto-props': (None, False, None),
+  'non-recursive': ('N', False, None),
+  'no-auto-props': (None, False, None),
+  'targets': (None, True, None),
+  'xml': (None, False, None),
+  'verbose': ('v', False, None),
+  'incremental': (None, False, None),
+  'extensions': ('x', True, None),
+  'use-merge-history': ('g', False, None),
+  'revision': ('r', True, None),
+  'recursive': ('R', False, None),
+  'changelist': (None, True, None),
+  'remove': (None, False, None),
+  'ignore-externals': (None, False, None),
+  'diff3-cmd': (None, True, None),
+  'encoding': (None, True, None),
+  'with-revprop': (None, True, None),
+  'file': ('F', True, None),
+  'force-log': (None, False, None),
+  'message': ('m', True, None),
+  'keep-changelists': (None, False, None),
+  'editor-cmd': (None, True, None),
+  'no-unlock': (None, False, None),
+  'keep-local': (None, False, None),
+  'old': (None, True, None),
+  'no-diff-deleted': (None, False, None),
+  'diff-cmd': (None, True, None),
+  'new': (None, True, None),
+  'summarize': (None, False, None),
+  'notice-ancestry': (None, False, None),
+  'change': ('c', True, None),
+  'native-eol': (None, True, None),
+  'stop-on-copy': (None, False, None),
+  'with-all-revprops': (None, False, None),
+  'limit': ('l', True, None),
+  'reintegrate': (None, False, None),
+  'accept': (None, True, None),
+  'record-only': (None, False, None),
+  'dry-run': (None, False, None),
+  'ignore-ancestry': (None, False, None),
+  'show-revs': (None, True, None),
+  'revprop': (None, False, None),
+  'strict': (None, False, None),
+  'show-updates': ('u', False, None),
+  'set-depth': (None, True, None),
+  'relocate': (None, False, None),
+}
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/__init__.py
new file mode 100644
index 0000000..1454ed6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/__init__.py
@@ -0,0 +1,400 @@
+r"""
+A simple, fast, extensible JSON encoder and decoder
+
+JSON (JavaScript Object Notation) <http://json.org> is a subset of
+JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
+interchange format.
+
+simplejson exposes an API familiar to uses of the standard library
+marshal and pickle modules.
+
+Encoding basic Python object hierarchies::
+    
+    >>> import simplejson
+    >>> simplejson.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
+    '["foo", {"bar": ["baz", null, 1.0, 2]}]'
+    >>> print simplejson.dumps("\"foo\bar")
+    "\"foo\bar"
+    >>> print simplejson.dumps(u'\u1234')
+    "\u1234"
+    >>> print simplejson.dumps('\\')
+    "\\"
+    >>> print simplejson.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
+    {"a": 0, "b": 0, "c": 0}
+    >>> from StringIO import StringIO
+    >>> io = StringIO()
+    >>> simplejson.dump(['streaming API'], io)
+    >>> io.getvalue()
+    '["streaming API"]'
+
+Compact encoding::
+
+    >>> import simplejson
+    >>> simplejson.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
+    '[1,2,3,{"4":5,"6":7}]'
+
+Pretty printing::
+
+    >>> import simplejson
+    >>> print simplejson.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
+    {
+        "4": 5, 
+        "6": 7
+    }
+
+Decoding JSON::
+    
+    >>> import simplejson
+    >>> simplejson.loads('["foo", {"bar":["baz", null, 1.0, 2]}]')
+    [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
+    >>> simplejson.loads('"\\"foo\\bar"')
+    u'"foo\x08ar'
+    >>> from StringIO import StringIO
+    >>> io = StringIO('["streaming API"]')
+    >>> simplejson.load(io)
+    [u'streaming API']
+
+Specializing JSON object decoding::
+
+    >>> import simplejson
+    >>> def as_complex(dct):
+    ...     if '__complex__' in dct:
+    ...         return complex(dct['real'], dct['imag'])
+    ...     return dct
+    ... 
+    >>> simplejson.loads('{"__complex__": true, "real": 1, "imag": 2}',
+    ...     object_hook=as_complex)
+    (1+2j)
+    >>> import decimal
+    >>> simplejson.loads('1.1', parse_float=decimal.Decimal)
+    Decimal("1.1")
+
+Extending JSONEncoder::
+    
+    >>> import simplejson
+    >>> class ComplexEncoder(simplejson.JSONEncoder):
+    ...     def default(self, obj):
+    ...         if isinstance(obj, complex):
+    ...             return [obj.real, obj.imag]
+    ...         return simplejson.JSONEncoder.default(self, obj)
+    ... 
+    >>> dumps(2 + 1j, cls=ComplexEncoder)
+    '[2.0, 1.0]'
+    >>> ComplexEncoder().encode(2 + 1j)
+    '[2.0, 1.0]'
+    >>> list(ComplexEncoder().iterencode(2 + 1j))
+    ['[', '2.0', ', ', '1.0', ']']
+    
+
+Using simplejson from the shell to validate and
+pretty-print::
+    
+    $ echo '{"json":"obj"}' | python -msimplejson
+    {
+        "json": "obj"
+    }
+    $ echo '{ 1.2:3.4}' | python -msimplejson
+    Expecting property name: line 1 column 2 (char 2)
+
+Note that the JSON produced by this module's default settings
+is a subset of YAML, so it may be used as a serializer for that as well.
+"""
+__version__ = '1.9.1'
+__all__ = [
+    'dump', 'dumps', 'load', 'loads',
+    'JSONDecoder', 'JSONEncoder',
+]
+
+if __name__ == '__main__':
+    from simplejson.decoder import JSONDecoder
+    from simplejson.encoder import JSONEncoder
+else:
+    from decoder import JSONDecoder
+    from encoder import JSONEncoder
+
+_default_encoder = JSONEncoder(
+    skipkeys=False,
+    ensure_ascii=True,
+    check_circular=True,
+    allow_nan=True,
+    indent=None,
+    separators=None,
+    encoding='utf-8',
+    default=None,
+)
+
+def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
+        allow_nan=True, cls=None, indent=None, separators=None,
+        encoding='utf-8', default=None, **kw):
+    """
+    Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
+    ``.write()``-supporting file-like object).
+
+    If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
+    (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) 
+    will be skipped instead of raising a ``TypeError``.
+
+    If ``ensure_ascii`` is ``False``, then the some chunks written to ``fp``
+    may be ``unicode`` instances, subject to normal Python ``str`` to
+    ``unicode`` coercion rules. Unless ``fp.write()`` explicitly
+    understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
+    to cause an error.
+
+    If ``check_circular`` is ``False``, then the circular reference check
+    for container types will be skipped and a circular reference will
+    result in an ``OverflowError`` (or worse).
+
+    If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
+    serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
+    in strict compliance of the JSON specification, instead of using the
+    JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+    If ``indent`` is a non-negative integer, then JSON array elements and object
+    members will be pretty-printed with that indent level. An indent level
+    of 0 will only insert newlines. ``None`` is the most compact representation.
+
+    If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+    then it will be used instead of the default ``(', ', ': ')`` separators.
+    ``(',', ':')`` is the most compact JSON representation.
+
+    ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+    ``default(obj)`` is a function that should return a serializable version
+    of obj or raise TypeError. The default simply raises TypeError.
+
+    To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+    ``.default()`` method to serialize additional types), specify it with
+    the ``cls`` kwarg.
+    """
+    # cached encoder
+    if (skipkeys is False and ensure_ascii is True and
+        check_circular is True and allow_nan is True and
+        cls is None and indent is None and separators is None and
+        encoding == 'utf-8' and default is None and not kw):
+        iterable = _default_encoder.iterencode(obj)
+    else:
+        if cls is None:
+            cls = JSONEncoder
+        iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+            check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+            separators=separators, encoding=encoding,
+            default=default, **kw).iterencode(obj)
+    # could accelerate with writelines in some versions of Python, at
+    # a debuggability cost
+    for chunk in iterable:
+        fp.write(chunk)
+
+
+def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
+        allow_nan=True, cls=None, indent=None, separators=None,
+        encoding='utf-8', default=None, **kw):
+    """
+    Serialize ``obj`` to a JSON formatted ``str``.
+
+    If ``skipkeys`` is ``True`` then ``dict`` keys that are not basic types
+    (``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``) 
+    will be skipped instead of raising a ``TypeError``.
+
+    If ``ensure_ascii`` is ``False``, then the return value will be a
+    ``unicode`` instance subject to normal Python ``str`` to ``unicode``
+    coercion rules instead of being escaped to an ASCII ``str``.
+
+    If ``check_circular`` is ``False``, then the circular reference check
+    for container types will be skipped and a circular reference will
+    result in an ``OverflowError`` (or worse).
+
+    If ``allow_nan`` is ``False``, then it will be a ``ValueError`` to
+    serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
+    strict compliance of the JSON specification, instead of using the
+    JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
+
+    If ``indent`` is a non-negative integer, then JSON array elements and
+    object members will be pretty-printed with that indent level. An indent
+    level of 0 will only insert newlines. ``None`` is the most compact
+    representation.
+
+    If ``separators`` is an ``(item_separator, dict_separator)`` tuple
+    then it will be used instead of the default ``(', ', ': ')`` separators.
+    ``(',', ':')`` is the most compact JSON representation.
+
+    ``encoding`` is the character encoding for str instances, default is UTF-8.
+
+    ``default(obj)`` is a function that should return a serializable version
+    of obj or raise TypeError. The default simply raises TypeError.
+
+    To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
+    ``.default()`` method to serialize additional types), specify it with
+    the ``cls`` kwarg.
+    """
+    # cached encoder
+    if (skipkeys is False and ensure_ascii is True and
+        check_circular is True and allow_nan is True and
+        cls is None and indent is None and separators is None and
+        encoding == 'utf-8' and default is None and not kw):
+        return _default_encoder.encode(obj)
+    if cls is None:
+        cls = JSONEncoder
+    return cls(
+        skipkeys=skipkeys, ensure_ascii=ensure_ascii,
+        check_circular=check_circular, allow_nan=allow_nan, indent=indent,
+        separators=separators, encoding=encoding, default=default,
+        **kw).encode(obj)
+
+
+_default_decoder = JSONDecoder(encoding=None, object_hook=None)
+
+
+def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
+        parse_int=None, parse_constant=None, **kw):
+    """
+    Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
+    a JSON document) to a Python object.
+
+    If the contents of ``fp`` is encoded with an ASCII based encoding other
+    than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
+    be specified. Encodings that are not ASCII based (such as UCS-2) are
+    not allowed, and should be wrapped with
+    ``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
+    object and passed to ``loads()``
+
+    ``object_hook`` is an optional function that will be called with the
+    result of any object literal decode (a ``dict``). The return value of
+    ``object_hook`` will be used instead of the ``dict``. This feature
+    can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+    
+    To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+    kwarg.
+    """
+    return loads(fp.read(),
+        encoding=encoding, cls=cls, object_hook=object_hook,
+        parse_float=parse_float, parse_int=parse_int,
+        parse_constant=parse_constant, **kw)
+
+
+def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
+        parse_int=None, parse_constant=None, **kw):
+    """
+    Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
+    document) to a Python object.
+
+    If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
+    other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
+    must be specified. Encodings that are not ASCII based (such as UCS-2)
+    are not allowed and should be decoded to ``unicode`` first.
+
+    ``object_hook`` is an optional function that will be called with the
+    result of any object literal decode (a ``dict``). The return value of
+    ``object_hook`` will be used instead of the ``dict``. This feature
+    can be used to implement custom decoders (e.g. JSON-RPC class hinting).
+
+    ``parse_float``, if specified, will be called with the string
+    of every JSON float to be decoded. By default this is equivalent to
+    float(num_str). This can be used to use another datatype or parser
+    for JSON floats (e.g. decimal.Decimal).
+
+    ``parse_int``, if specified, will be called with the string
+    of every JSON int to be decoded. By default this is equivalent to
+    int(num_str). This can be used to use another datatype or parser
+    for JSON integers (e.g. float).
+
+    ``parse_constant``, if specified, will be called with one of the
+    following strings: -Infinity, Infinity, NaN, null, true, false.
+    This can be used to raise an exception if invalid JSON numbers
+    are encountered.
+
+    To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
+    kwarg.
+    """
+    if (cls is None and encoding is None and object_hook is None and
+            parse_int is None and parse_float is None and
+            parse_constant is None and not kw):
+        return _default_decoder.decode(s)
+    if cls is None:
+        cls = JSONDecoder
+    if object_hook is not None:
+        kw['object_hook'] = object_hook
+    if parse_float is not None:
+        kw['parse_float'] = parse_float
+    if parse_int is not None:
+        kw['parse_int'] = parse_int
+    if parse_constant is not None:
+        kw['parse_constant'] = parse_constant
+    return cls(encoding=encoding, **kw).decode(s)
+
+
+#
+# Compatibility cruft from other libraries
+#
+
+
+def decode(s):
+    """
+    demjson, python-cjson API compatibility hook. Use loads(s) instead.
+    """
+    import warnings
+    warnings.warn("simplejson.loads(s) should be used instead of decode(s)",
+        DeprecationWarning)
+    return loads(s)
+
+
+def encode(obj):
+    """
+    demjson, python-cjson compatibility hook. Use dumps(s) instead.
+    """
+    import warnings
+    warnings.warn("simplejson.dumps(s) should be used instead of encode(s)",
+        DeprecationWarning)
+    return dumps(obj)
+
+
+def read(s):
+    """
+    jsonlib, JsonUtils, python-json, json-py API compatibility hook.
+    Use loads(s) instead.
+    """
+    import warnings
+    warnings.warn("simplejson.loads(s) should be used instead of read(s)",
+        DeprecationWarning)
+    return loads(s)
+
+
+def write(obj):
+    """
+    jsonlib, JsonUtils, python-json, json-py API compatibility hook.
+    Use dumps(s) instead.
+    """
+    import warnings
+    warnings.warn("simplejson.dumps(s) should be used instead of write(s)",
+        DeprecationWarning)
+    return dumps(obj)
+
+
+#
+# Pretty printer:
+#     curl http://mochikit.com/examples/ajax_tables/domains.json | python -msimplejson
+#
+
+
+def main():
+    import sys
+    if len(sys.argv) == 1:
+        infile = sys.stdin
+        outfile = sys.stdout
+    elif len(sys.argv) == 2:
+        infile = open(sys.argv[1], 'rb')
+        outfile = sys.stdout
+    elif len(sys.argv) == 3:
+        infile = open(sys.argv[1], 'rb')
+        outfile = open(sys.argv[2], 'wb')
+    else:
+        raise SystemExit("%s [infile [outfile]]" % (sys.argv[0],))
+    try:
+        obj = load(infile)
+    except ValueError, e:
+        raise SystemExit(e)
+    dump(obj, outfile, sort_keys=True, indent=4)
+    outfile.write('\n')
+
+
+if __name__ == '__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/__init__.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/__init__.pyo
new file mode 100644
index 0000000..3f23d2f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/__init__.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/decoder.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/decoder.py
new file mode 100644
index 0000000..9992251
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/decoder.py
@@ -0,0 +1,343 @@
+"""
+Implementation of JSONDecoder
+"""
+import re
+import sys
+
+from gvn.third_party.simplejson.scanner import Scanner, pattern
+try:
+    from simplejson._speedups import scanstring as c_scanstring
+except ImportError:
+    pass
+
+FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
+
+def _floatconstants():
+    import struct
+    import sys
+    _BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
+    if sys.byteorder != 'big':
+        _BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
+    nan, inf = struct.unpack('dd', _BYTES)
+    return nan, inf, -inf
+
+NaN, PosInf, NegInf = _floatconstants()
+
+
+def linecol(doc, pos):
+    lineno = doc.count('\n', 0, pos) + 1
+    if lineno == 1:
+        colno = pos
+    else:
+        colno = pos - doc.rindex('\n', 0, pos)
+    return lineno, colno
+
+
+def errmsg(msg, doc, pos, end=None):
+    lineno, colno = linecol(doc, pos)
+    if end is None:
+        return '%s: line %d column %d (char %d)' % (msg, lineno, colno, pos)
+    endlineno, endcolno = linecol(doc, end)
+    return '%s: line %d column %d - line %d column %d (char %d - %d)' % (
+        msg, lineno, colno, endlineno, endcolno, pos, end)
+
+
+_CONSTANTS = {
+    '-Infinity': NegInf,
+    'Infinity': PosInf,
+    'NaN': NaN,
+    'true': True,
+    'false': False,
+    'null': None,
+}
+
+def JSONConstant(match, context, c=_CONSTANTS):
+    s = match.group(0)
+    fn = getattr(context, 'parse_constant', None)
+    if fn is None:
+        rval = c[s]
+    else:
+        rval = fn(s)
+    return rval, None
+pattern('(-?Infinity|NaN|true|false|null)')(JSONConstant)
+
+
+def JSONNumber(match, context):
+    match = JSONNumber.regex.match(match.string, *match.span())
+    integer, frac, exp = match.groups()
+    if frac or exp:
+        fn = getattr(context, 'parse_float', None) or float
+        res = fn(integer + (frac or '') + (exp or ''))
+    else:
+        fn = getattr(context, 'parse_int', None) or int
+        res = fn(integer)
+    return res, None
+pattern(r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?')(JSONNumber)
+
+
+STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
+BACKSLASH = {
+    '"': u'"', '\\': u'\\', '/': u'/',
+    'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
+}
+
+DEFAULT_ENCODING = "utf-8"
+
+def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
+    if encoding is None:
+        encoding = DEFAULT_ENCODING
+    chunks = []
+    _append = chunks.append
+    begin = end - 1
+    while 1:
+        chunk = _m(s, end)
+        if chunk is None:
+            raise ValueError(
+                errmsg("Unterminated string starting at", s, begin))
+        end = chunk.end()
+        content, terminator = chunk.groups()
+        if content:
+            if not isinstance(content, unicode):
+                content = unicode(content, encoding)
+            _append(content)
+        if terminator == '"':
+            break
+        elif terminator != '\\':
+            if strict:
+                raise ValueError(errmsg("Invalid control character %r at", s, end))
+            else:
+                _append(terminator)
+                continue
+        try:
+            esc = s[end]
+        except IndexError:
+            raise ValueError(
+                errmsg("Unterminated string starting at", s, begin))
+        if esc != 'u':
+            try:
+                m = _b[esc]
+            except KeyError:
+                raise ValueError(
+                    errmsg("Invalid \\escape: %r" % (esc,), s, end))
+            end += 1
+        else:
+            esc = s[end + 1:end + 5]
+            next_end = end + 5
+            msg = "Invalid \\uXXXX escape"
+            try:
+                if len(esc) != 4:
+                    raise ValueError
+                uni = int(esc, 16)
+                if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
+                    msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
+                    if not s[end + 5:end + 7] == '\\u':
+                        raise ValueError
+                    esc2 = s[end + 7:end + 11]
+                    if len(esc2) != 4:
+                        raise ValueError
+                    uni2 = int(esc2, 16)
+                    uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
+                    next_end += 6
+                m = unichr(uni)
+            except ValueError:
+                raise ValueError(errmsg(msg, s, end))
+            end = next_end
+        _append(m)
+    return u''.join(chunks), end
+
+
+# Use speedup
+try:
+    scanstring = c_scanstring
+except NameError:
+    scanstring = py_scanstring
+
+def JSONString(match, context):
+    encoding = getattr(context, 'encoding', None)
+    strict = getattr(context, 'strict', True)
+    return scanstring(match.string, match.end(), encoding, strict)
+pattern(r'"')(JSONString)
+
+
+WHITESPACE = re.compile(r'\s*', FLAGS)
+
+def JSONObject(match, context, _w=WHITESPACE.match):
+    pairs = {}
+    s = match.string
+    end = _w(s, match.end()).end()
+    nextchar = s[end:end + 1]
+    # Trivial empty object
+    if nextchar == '}':
+        return pairs, end + 1
+    if nextchar != '"':
+        raise ValueError(errmsg("Expecting property name", s, end))
+    end += 1
+    encoding = getattr(context, 'encoding', None)
+    strict = getattr(context, 'strict', True)
+    iterscan = JSONScanner.iterscan
+    while True:
+        key, end = scanstring(s, end, encoding, strict)
+        end = _w(s, end).end()
+        if s[end:end + 1] != ':':
+            raise ValueError(errmsg("Expecting : delimiter", s, end))
+        end = _w(s, end + 1).end()
+        try:
+            value, end = iterscan(s, idx=end, context=context).next()
+        except StopIteration:
+            raise ValueError(errmsg("Expecting object", s, end))
+        pairs[key] = value
+        end = _w(s, end).end()
+        nextchar = s[end:end + 1]
+        end += 1
+        if nextchar == '}':
+            break
+        if nextchar != ',':
+            raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
+        end = _w(s, end).end()
+        nextchar = s[end:end + 1]
+        end += 1
+        if nextchar != '"':
+            raise ValueError(errmsg("Expecting property name", s, end - 1))
+    object_hook = getattr(context, 'object_hook', None)
+    if object_hook is not None:
+        pairs = object_hook(pairs)
+    return pairs, end
+pattern(r'{')(JSONObject)
+
+
+def JSONArray(match, context, _w=WHITESPACE.match):
+    values = []
+    s = match.string
+    end = _w(s, match.end()).end()
+    # Look-ahead for trivial empty array
+    nextchar = s[end:end + 1]
+    if nextchar == ']':
+        return values, end + 1
+    iterscan = JSONScanner.iterscan
+    while True:
+        try:
+            value, end = iterscan(s, idx=end, context=context).next()
+        except StopIteration:
+            raise ValueError(errmsg("Expecting object", s, end))
+        values.append(value)
+        end = _w(s, end).end()
+        nextchar = s[end:end + 1]
+        end += 1
+        if nextchar == ']':
+            break
+        if nextchar != ',':
+            raise ValueError(errmsg("Expecting , delimiter", s, end))
+        end = _w(s, end).end()
+    return values, end
+pattern(r'\[')(JSONArray)
+
+
+ANYTHING = [
+    JSONObject,
+    JSONArray,
+    JSONString,
+    JSONConstant,
+    JSONNumber,
+]
+
+JSONScanner = Scanner(ANYTHING)
+
+
+class JSONDecoder(object):
+    """
+    Simple JSON <http://json.org> decoder
+
+    Performs the following translations in decoding by default:
+    
+    +---------------+-------------------+
+    | JSON          | Python            |
+    +===============+===================+
+    | object        | dict              |
+    +---------------+-------------------+
+    | array         | list              |
+    +---------------+-------------------+
+    | string        | unicode           |
+    +---------------+-------------------+
+    | number (int)  | int, long         |
+    +---------------+-------------------+
+    | number (real) | float             |
+    +---------------+-------------------+
+    | true          | True              |
+    +---------------+-------------------+
+    | false         | False             |
+    +---------------+-------------------+
+    | null          | None              |
+    +---------------+-------------------+
+
+    It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
+    their corresponding ``float`` values, which is outside the JSON spec.
+    """
+
+    _scanner = Scanner(ANYTHING)
+    __all__ = ['__init__', 'decode', 'raw_decode']
+
+    def __init__(self, encoding=None, object_hook=None, parse_float=None,
+            parse_int=None, parse_constant=None, strict=True):
+        """
+        ``encoding`` determines the encoding used to interpret any ``str``
+        objects decoded by this instance (utf-8 by default).  It has no
+        effect when decoding ``unicode`` objects.
+        
+        Note that currently only encodings that are a superset of ASCII work,
+        strings of other encodings should be passed in as ``unicode``.
+
+        ``object_hook``, if specified, will be called with the result
+        of every JSON object decoded and its return value will be used in
+        place of the given ``dict``.  This can be used to provide custom
+        deserializations (e.g. to support JSON-RPC class hinting).
+
+        ``parse_float``, if specified, will be called with the string
+        of every JSON float to be decoded. By default this is equivalent to
+        float(num_str). This can be used to use another datatype or parser
+        for JSON floats (e.g. decimal.Decimal).
+
+        ``parse_int``, if specified, will be called with the string
+        of every JSON int to be decoded. By default this is equivalent to
+        int(num_str). This can be used to use another datatype or parser
+        for JSON integers (e.g. float).
+
+        ``parse_constant``, if specified, will be called with one of the
+        following strings: -Infinity, Infinity, NaN, null, true, false.
+        This can be used to raise an exception if invalid JSON numbers
+        are encountered.
+        """
+        self.encoding = encoding
+        self.object_hook = object_hook
+        self.parse_float = parse_float
+        self.parse_int = parse_int
+        self.parse_constant = parse_constant
+        self.strict = strict
+
+    def decode(self, s, _w=WHITESPACE.match):
+        """
+        Return the Python representation of ``s`` (a ``str`` or ``unicode``
+        instance containing a JSON document)
+        """
+        obj, end = self.raw_decode(s, idx=_w(s, 0).end())
+        end = _w(s, end).end()
+        if end != len(s):
+            raise ValueError(errmsg("Extra data", s, end, len(s)))
+        return obj
+
+    def raw_decode(self, s, **kw):
+        """
+        Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
+        with a JSON document) and return a 2-tuple of the Python
+        representation and the index in ``s`` where the document ended.
+
+        This can be used to decode a JSON document from a string that may
+        have extraneous data at the end.
+        """
+        kw.setdefault('context', self)
+        try:
+            obj, end = self._scanner.iterscan(s, **kw).next()
+        except StopIteration:
+            raise ValueError("No JSON object could be decoded")
+        return obj, end
+
+__all__ = ['JSONDecoder']
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/decoder.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/decoder.pyo
new file mode 100644
index 0000000..5e81074
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/decoder.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/encoder.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/encoder.py
new file mode 100644
index 0000000..772a2614
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/encoder.py
@@ -0,0 +1,385 @@
+"""
+Implementation of JSONEncoder
+"""
+import re
+
+try:
+    from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
+except ImportError:
+    pass
+
+ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
+ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
+HAS_UTF8 = re.compile(r'[\x80-\xff]')
+ESCAPE_DCT = {
+    '\\': '\\\\',
+    '"': '\\"',
+    '\b': '\\b',
+    '\f': '\\f',
+    '\n': '\\n',
+    '\r': '\\r',
+    '\t': '\\t',
+}
+for i in range(0x20):
+    ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
+
+# Assume this produces an infinity on all machines (probably not guaranteed)
+INFINITY = float('1e66666')
+FLOAT_REPR = repr
+
+def floatstr(o, allow_nan=True):
+    # Check for specials.  Note that this type of test is processor- and/or
+    # platform-specific, so do tests which don't depend on the internals.
+
+    if o != o:
+        text = 'NaN'
+    elif o == INFINITY:
+        text = 'Infinity'
+    elif o == -INFINITY:
+        text = '-Infinity'
+    else:
+        return FLOAT_REPR(o)
+
+    if not allow_nan:
+        raise ValueError("Out of range float values are not JSON compliant: %r"
+            % (o,))
+
+    return text
+
+
+def encode_basestring(s):
+    """
+    Return a JSON representation of a Python string
+    """
+    def replace(match):
+        return ESCAPE_DCT[match.group(0)]
+    return '"' + ESCAPE.sub(replace, s) + '"'
+
+
+def py_encode_basestring_ascii(s):
+    if isinstance(s, str) and HAS_UTF8.search(s) is not None:
+        s = s.decode('utf-8')
+    def replace(match):
+        s = match.group(0)
+        try:
+            return ESCAPE_DCT[s]
+        except KeyError:
+            n = ord(s)
+            if n < 0x10000:
+                return '\\u%04x' % (n,)
+            else:
+                # surrogate pair
+                n -= 0x10000
+                s1 = 0xd800 | ((n >> 10) & 0x3ff)
+                s2 = 0xdc00 | (n & 0x3ff)
+                return '\\u%04x\\u%04x' % (s1, s2)
+    return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
+
+
+try:
+    encode_basestring_ascii = c_encode_basestring_ascii
+except NameError:
+    encode_basestring_ascii = py_encode_basestring_ascii
+
+
+class JSONEncoder(object):
+    """
+    Extensible JSON <http://json.org> encoder for Python data structures.
+
+    Supports the following objects and types by default:
+    
+    +-------------------+---------------+
+    | Python            | JSON          |
+    +===================+===============+
+    | dict              | object        |
+    +-------------------+---------------+
+    | list, tuple       | array         |
+    +-------------------+---------------+
+    | str, unicode      | string        |
+    +-------------------+---------------+
+    | int, long, float  | number        |
+    +-------------------+---------------+
+    | True              | true          |
+    +-------------------+---------------+
+    | False             | false         |
+    +-------------------+---------------+
+    | None              | null          |
+    +-------------------+---------------+
+
+    To extend this to recognize other objects, subclass and implement a
+    ``.default()`` method with another method that returns a serializable
+    object for ``o`` if possible, otherwise it should call the superclass
+    implementation (to raise ``TypeError``).
+    """
+    __all__ = ['__init__', 'default', 'encode', 'iterencode']
+    item_separator = ', '
+    key_separator = ': '
+    def __init__(self, skipkeys=False, ensure_ascii=True,
+            check_circular=True, allow_nan=True, sort_keys=False,
+            indent=None, separators=None, encoding='utf-8', default=None):
+        """
+        Constructor for JSONEncoder, with sensible defaults.
+
+        If skipkeys is False, then it is a TypeError to attempt
+        encoding of keys that are not str, int, long, float or None.  If
+        skipkeys is True, such items are simply skipped.
+
+        If ensure_ascii is True, the output is guaranteed to be str
+        objects with all incoming unicode characters escaped.  If
+        ensure_ascii is false, the output will be unicode object.
+
+        If check_circular is True, then lists, dicts, and custom encoded
+        objects will be checked for circular references during encoding to
+        prevent an infinite recursion (which would cause an OverflowError).
+        Otherwise, no such check takes place.
+
+        If allow_nan is True, then NaN, Infinity, and -Infinity will be
+        encoded as such.  This behavior is not JSON specification compliant,
+        but is consistent with most JavaScript based encoders and decoders.
+        Otherwise, it will be a ValueError to encode such floats.
+
+        If sort_keys is True, then the output of dictionaries will be
+        sorted by key; this is useful for regression tests to ensure
+        that JSON serializations can be compared on a day-to-day basis.
+
+        If indent is a non-negative integer, then JSON array
+        elements and object members will be pretty-printed with that
+        indent level.  An indent level of 0 will only insert newlines.
+        None is the most compact representation.
+
+        If specified, separators should be a (item_separator, key_separator)
+        tuple.  The default is (', ', ': ').  To get the most compact JSON
+        representation you should specify (',', ':') to eliminate whitespace.
+
+        If specified, default is a function that gets called for objects
+        that can't otherwise be serialized.  It should return a JSON encodable
+        version of the object or raise a ``TypeError``.
+
+        If encoding is not None, then all input strings will be
+        transformed into unicode using that encoding prior to JSON-encoding.
+        The default is UTF-8.
+        """
+
+        self.skipkeys = skipkeys
+        self.ensure_ascii = ensure_ascii
+        self.check_circular = check_circular
+        self.allow_nan = allow_nan
+        self.sort_keys = sort_keys
+        self.indent = indent
+        self.current_indent_level = 0
+        if separators is not None:
+            self.item_separator, self.key_separator = separators
+        if default is not None:
+            self.default = default
+        self.encoding = encoding
+
+    def _newline_indent(self):
+        return '\n' + (' ' * (self.indent * self.current_indent_level))
+
+    def _iterencode_list(self, lst, markers=None):
+        if not lst:
+            yield '[]'
+            return
+        if markers is not None:
+            markerid = id(lst)
+            if markerid in markers:
+                raise ValueError("Circular reference detected")
+            markers[markerid] = lst
+        yield '['
+        if self.indent is not None:
+            self.current_indent_level += 1
+            newline_indent = self._newline_indent()
+            separator = self.item_separator + newline_indent
+            yield newline_indent
+        else:
+            newline_indent = None
+            separator = self.item_separator
+        first = True
+        for value in lst:
+            if first:
+                first = False
+            else:
+                yield separator
+            for chunk in self._iterencode(value, markers):
+                yield chunk
+        if newline_indent is not None:
+            self.current_indent_level -= 1
+            yield self._newline_indent()
+        yield ']'
+        if markers is not None:
+            del markers[markerid]
+
+    def _iterencode_dict(self, dct, markers=None):
+        if not dct:
+            yield '{}'
+            return
+        if markers is not None:
+            markerid = id(dct)
+            if markerid in markers:
+                raise ValueError("Circular reference detected")
+            markers[markerid] = dct
+        yield '{'
+        key_separator = self.key_separator
+        if self.indent is not None:
+            self.current_indent_level += 1
+            newline_indent = self._newline_indent()
+            item_separator = self.item_separator + newline_indent
+            yield newline_indent
+        else:
+            newline_indent = None
+            item_separator = self.item_separator
+        first = True
+        if self.ensure_ascii:
+            encoder = encode_basestring_ascii
+        else:
+            encoder = encode_basestring
+        allow_nan = self.allow_nan
+        if self.sort_keys:
+            keys = dct.keys()
+            keys.sort()
+            items = [(k, dct[k]) for k in keys]
+        else:
+            items = dct.iteritems()
+        _encoding = self.encoding
+        _do_decode = (_encoding is not None
+            and not (_encoding == 'utf-8'))
+        for key, value in items:
+            if isinstance(key, str):
+                if _do_decode:
+                    key = key.decode(_encoding)
+            elif isinstance(key, basestring):
+                pass
+            # JavaScript is weakly typed for these, so it makes sense to
+            # also allow them.  Many encoders seem to do something like this.
+            elif isinstance(key, float):
+                key = floatstr(key, allow_nan)
+            elif isinstance(key, (int, long)):
+                key = str(key)
+            elif key is True:
+                key = 'true'
+            elif key is False:
+                key = 'false'
+            elif key is None:
+                key = 'null'
+            elif self.skipkeys:
+                continue
+            else:
+                raise TypeError("key %r is not a string" % (key,))
+            if first:
+                first = False
+            else:
+                yield item_separator
+            yield encoder(key)
+            yield key_separator
+            for chunk in self._iterencode(value, markers):
+                yield chunk
+        if newline_indent is not None:
+            self.current_indent_level -= 1
+            yield self._newline_indent()
+        yield '}'
+        if markers is not None:
+            del markers[markerid]
+
+    def _iterencode(self, o, markers=None):
+        if isinstance(o, basestring):
+            if self.ensure_ascii:
+                encoder = encode_basestring_ascii
+            else:
+                encoder = encode_basestring
+            _encoding = self.encoding
+            if (_encoding is not None and isinstance(o, str)
+                    and not (_encoding == 'utf-8')):
+                o = o.decode(_encoding)
+            yield encoder(o)
+        elif o is None:
+            yield 'null'
+        elif o is True:
+            yield 'true'
+        elif o is False:
+            yield 'false'
+        elif isinstance(o, (int, long)):
+            yield str(o)
+        elif isinstance(o, float):
+            yield floatstr(o, self.allow_nan)
+        elif isinstance(o, (list, tuple)):
+            for chunk in self._iterencode_list(o, markers):
+                yield chunk
+        elif isinstance(o, dict):
+            for chunk in self._iterencode_dict(o, markers):
+                yield chunk
+        else:
+            if markers is not None:
+                markerid = id(o)
+                if markerid in markers:
+                    raise ValueError("Circular reference detected")
+                markers[markerid] = o
+            for chunk in self._iterencode_default(o, markers):
+                yield chunk
+            if markers is not None:
+                del markers[markerid]
+
+    def _iterencode_default(self, o, markers=None):
+        newobj = self.default(o)
+        return self._iterencode(newobj, markers)
+
+    def default(self, o):
+        """
+        Implement this method in a subclass such that it returns
+        a serializable object for ``o``, or calls the base implementation
+        (to raise a ``TypeError``).
+
+        For example, to support arbitrary iterators, you could
+        implement default like this::
+            
+            def default(self, o):
+                try:
+                    iterable = iter(o)
+                except TypeError:
+                    pass
+                else:
+                    return list(iterable)
+                return JSONEncoder.default(self, o)
+        """
+        raise TypeError("%r is not JSON serializable" % (o,))
+
+    def encode(self, o):
+        """
+        Return a JSON string representation of a Python data structure.
+
+        >>> JSONEncoder().encode({"foo": ["bar", "baz"]})
+        '{"foo": ["bar", "baz"]}'
+        """
+        # This is for extremely simple cases and benchmarks.
+        if isinstance(o, basestring):
+            if isinstance(o, str):
+                _encoding = self.encoding
+                if (_encoding is not None 
+                        and not (_encoding == 'utf-8')):
+                    o = o.decode(_encoding)
+            if self.ensure_ascii:
+                return encode_basestring_ascii(o)
+            else:
+                return encode_basestring(o)
+        # This doesn't pass the iterator directly to ''.join() because the
+        # exceptions aren't as detailed.  The list call should be roughly
+        # equivalent to the PySequence_Fast that ''.join() would do.
+        chunks = list(self.iterencode(o))
+        return ''.join(chunks)
+
+    def iterencode(self, o):
+        """
+        Encode the given object and yield each string
+        representation as available.
+        
+        For example::
+            
+            for chunk in JSONEncoder().iterencode(bigobject):
+                mysocket.write(chunk)
+        """
+        if self.check_circular:
+            markers = {}
+        else:
+            markers = None
+        return self._iterencode(o, markers)
+
+__all__ = ['JSONEncoder']
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/encoder.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/encoder.pyo
new file mode 100644
index 0000000..de380ef
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/encoder.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/scanner.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/scanner.py
new file mode 100644
index 0000000..2a18390d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/scanner.py
@@ -0,0 +1,67 @@
+"""
+Iterator based sre token scanner
+"""
+import re
+from re import VERBOSE, MULTILINE, DOTALL
+import sre_parse
+import sre_compile
+import sre_constants
+from sre_constants import BRANCH, SUBPATTERN
+
+__all__ = ['Scanner', 'pattern']
+
+FLAGS = (VERBOSE | MULTILINE | DOTALL)
+
+class Scanner(object):
+    def __init__(self, lexicon, flags=FLAGS):
+        self.actions = [None]
+        # Combine phrases into a compound pattern
+        s = sre_parse.Pattern()
+        s.flags = flags
+        p = []
+        for idx, token in enumerate(lexicon):
+            phrase = token.pattern
+            try:
+                subpattern = sre_parse.SubPattern(s,
+                    [(SUBPATTERN, (idx + 1, sre_parse.parse(phrase, flags)))])
+            except sre_constants.error:
+                raise
+            p.append(subpattern)
+            self.actions.append(token)
+
+        s.groups = len(p) + 1 # NOTE(guido): Added to make SRE validation work
+        p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
+        self.scanner = sre_compile.compile(p)
+
+    def iterscan(self, string, idx=0, context=None):
+        """
+        Yield match, end_idx for each match
+        """
+        match = self.scanner.scanner(string, idx).match
+        actions = self.actions
+        lastend = idx
+        end = len(string)
+        while True:
+            m = match()
+            if m is None:
+                break
+            matchbegin, matchend = m.span()
+            if lastend == matchend:
+                break
+            action = actions[m.lastindex]
+            if action is not None:
+                rval, next_pos = action(m, context)
+                if next_pos is not None and next_pos != matchend:
+                    # "fast forward" the scanner
+                    matchend = next_pos
+                    match = self.scanner.scanner(string, matchend).match
+                yield rval, matchend
+            lastend = matchend
+
+
+def pattern(pattern, flags=FLAGS):
+    def decorator(fn):
+        fn.pattern = pattern
+        fn.regex = re.compile(pattern, flags)
+        return fn
+    return decorator
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/scanner.pyo b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/scanner.pyo
new file mode 100644
index 0000000..69cc886b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/third_party/simplejson/scanner.pyo
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/userdb.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/userdb.py
new file mode 100644
index 0000000..830f196
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/userdb.py
@@ -0,0 +1,395 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Handles reading and writing from our svn userdb.
+
+This class is meant to be replaceable with another one that implements access to
+whatever database type/format you want to use. The current implementation works
+fine for us, but you might want to use mysql, or pickles, or whatever other
+access method
+"""
+
+import crypt
+import os
+import random
+import shutil
+import tempfile
+import time
+import re
+from errno import ENOENT, ENOTEMPTY
+
+# this is used to keep track of hand created users, that we don't want to
+# auto delete as part of a sync with an external user database.
+SVN_ONLY_FLAG = "user_is_svn_only;not_autosynced"
+
+
+class NoUserError(Exception):
+  def __init__(self, username, child):
+    Exception.__init__(self, 'No such user ' + username)
+    self.username = username
+    self.child = child
+
+
+def GeneratePassword():
+  """Return a new 8-character random password for subversion.
+  """
+  # 'l', 'O', 'o' are removed not to confuse them with '1' and '0'
+  # : " \ ' ` are also removed
+  char_list = ( "ABCDEFGHIJKLMNPQRSTUVWXYZabcdefghijkmnpqrstuvwxyz"
+                "0123456789!@#$%^&*()-=_+[]|{};<>?,./~" )
+  pw_list = []
+  for i in xrange(0,8):
+    pw_list.append(random.choice(char_list))
+  return ''.join(pw_list)
+
+def GenerateSalt():
+  """Generate a crypt()-style 2 byte salt string."""
+  salty_characters = ("abcdefghijklmnopqrstuvwxyz"
+                      "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+                      "0123456789./")
+
+  salt = []
+  salt.append(random.choice(salty_characters))
+  salt.append(random.choice(salty_characters))
+
+  return "".join(salt)
+
+def HashPassword(password):
+  """Return a crypt()-style hash of the password, a la htpasswd,
+  suitable for use by Apache httpd's BasicAuth mechanism.
+  """
+  return crypt.crypt(password, GenerateSalt())
+
+def WriteUserData(filepath, filename, data):
+  """Creates a tmpfile in filepath, write data and rename as filename."""
+
+  tmpfile = None
+  try:
+    fd,tmpfile = tempfile.mkstemp(dir=filepath, prefix=filename, text=True)
+    # mkstemp ignores the umask and makes a file not readable by group or others
+    fh = os.fdopen(fd, "w")
+    fh.write("\n".join(data) + "\n")
+    fh.close()
+    os.chmod(tmpfile, 0664)
+    os.rename(tmpfile, filepath + "/" + filename)
+    tmpfile = None
+  finally:
+    if tmpfile is not None:
+      try:
+        os.unlink(tmpfile)
+      except:
+        pass
+
+
+class UserDB(object):
+  """A class for interfacing with mod_authn_dir-style directory data.
+
+  This object can be replaced with another one to support any kind of storage
+  format (flat file, pickle, SQL, etc...)
+
+  It contains all the methods necessary to access data in our database
+  """
+
+  def __init__(self, dbinit, create_db=False):
+    """Initializes the DB object with whatever DB specific data is passed.
+
+    dbinit can contain whatever init data this object needs (dir, file, etc).
+    create_db set to True allows for the database to be created at init/open
+    time
+
+    Here, we just take a directory name, and optionally create the said
+    directory if needed.
+    """
+
+    if not os.path.exists(dbinit):
+      if create_db:
+        # this userdb implementation just needs a top level directory as to
+        # be valid
+        os.makedirs(dbinit)
+      else:
+        raise "userdb init failed: no such directory " + dbinit
+
+    self._dirpath = dbinit
+
+    # depending on the caller, we may build and cache a list of groups 
+    self._usergroups = {}
+
+
+  def _HashUserDir(self, username):
+    """Create a hashed directory for a given username.
+
+    We use the first 3 letters of the username, which gives a maximum number
+    of 17,576 directories, and probably around 5000 directories max in most
+    configurations (we do want fewer than 32,000 dirs which is the limit for
+    ext2/ext3 without extended attributes)
+
+    This in turn, gives 100 to 500 user sub-directories per hash bucket if you
+    have one million users
+    This is not the best hashing function, but it's simple and allows for easy
+    lookup by an admin or a shell script
+    """
+
+    userdir = os.path.join(self._dirpath, username[:3], username)
+    return userdir
+
+  def _GetUserDir(self, username):
+    """Return a hash path to a user directory, or None if not present.
+    """
+
+    userdir = self._HashUserDir(username)
+    if os.path.exists(userdir):
+      return userdir
+    else:
+      return None
+
+
+  def _MakeUserDir(self, username):
+    """Make a new user directory if needed, and return the hashed location.
+
+    In other words, you should call this function just like you'd call
+    _HashUserDir, and it will just happen to create the hashed directory for
+    you if needed.
+    """
+
+    userdir = self._HashUserDir(username)
+    hash_bucket = os.path.dirname(userdir)
+    # make the parent and current directories if needed
+    try:
+      os.mkdir(hash_bucket)
+      os.chmod(hash_bucket, 02775)
+    except OSError:
+      pass
+
+    try:
+      os.mkdir(userdir)
+      os.chmod(userdir, 02775)
+    except OSError:
+      pass
+
+    return userdir
+
+
+  def ReadUserPassword(self, username):
+    """Returns a (username, plaintext_password, crypted_password) tuple.
+
+    Note: if the username does not exist a partially empty tuple of the
+    form (username, None, None) is returned.
+    """
+    plaintext = None
+    crypted = None
+
+    userdir = self._HashUserDir(username)
+
+    # Turn ENOENT into NoUserError, but let other errors out.
+    try:
+      fp = open(userdir + "/password", "r")
+    except IOError, e:
+      if e.errno != ENOENT:
+        raise
+      raise NoUserError(username, e)
+
+    for line in fp:
+      if not line or line[0] == "#":
+        continue
+      parts = line.split(':')
+      crypted = parts[0]
+      plaintext = parts[1]
+      # Ignore all lines after this one (not that there should be any)
+      break
+
+    return (username, plaintext, crypted)
+
+
+  def WriteUserPassword(self, username, password):
+    """Create a new user entry if needed and sets/changes the password.
+    """
+
+    hashed_pwd = HashPassword(password)
+    pwd_line = "%s:%s:%s\n" % (hashed_pwd, password, username)
+    WriteUserData(self._MakeUserDir(username), "password", [ pwd_line ])
+
+    return (username, password, hashed_pwd)
+
+
+  def ReadUserGroups(self, username):
+    """Get a list of all groups for a user, or return None if no such user.
+
+    Returns None if the user does not exist, or an empty list of the user has
+    no groups
+    """
+
+    userdir = self._GetUserDir(username)
+    # We accept users with no group files and just say that they have no groups
+    if userdir is None:
+      return []
+
+    grouplist = []
+    try:
+      f = open(userdir + "/groups", "r")
+    except IOError, e:
+      if e.errno != ENOENT:
+        raise
+      # Non-existent groups file is no error; just means no groups.
+      return []
+    buf = f.read()
+    f.close()
+    lines = buf.split('\n')
+    for line in lines:
+      if not line or line[0] == "#":
+        continue
+      grouplist.append(line)
+
+    return grouplist
+
+
+  def WriteUserGroups(self, username, grouplist):
+    """Create a new user entry if needed and sets/changes the group list.
+    """
+
+    WriteUserData(self._MakeUserDir(username), "groups", grouplist)
+
+
+  def DeleteUser(self, username):
+    """delete user hash dir with all data, and hash bucket, if possible.
+
+    Return None if no such user existed
+    """
+
+    userdir = self._GetUserDir(username)
+    if userdir is None:
+      return None
+
+    shutil.rmtree(userdir)
+
+    # Try to delete the hash bucket, just in case it became empty
+    hash_bucket = os.path.dirname(userdir)
+    try:
+      os.rmdir(hash_bucket)
+    except OSError, e:
+      if e.errno != ENOTEMPTY:
+        raise
+
+  def DoesUserExist(self, username, require_password=False):
+    """simply does what it says :).
+
+       Obviously, a user can be added or removed between the time you run
+       this call, and the time you use the data you got from it. Keep that in
+       mind.
+
+       Setting require_password to True if asking for a user with password
+    """
+
+    if self._GetUserDir(username):
+      if not require_password:
+        return True
+      else:
+        try:
+          self.ReadUserPassword(username)
+          return True
+        except IOError:
+          return False
+    else:
+      return False
+
+  def GetAllUsers(self, require_password=True, return_not_autosynced=True):
+    """user list generator.
+
+    require_password: if True, yield only users with password entries.
+    return_not_autosynced: If False, do not return users with SVN_ONLY_FLAG
+         (useful for not deleting users if they are autosynced from another DB)
+
+    """
+
+    for hashdir in os.listdir(self._dirpath):
+      # our hashdirs are 1 to 3 letters, skip all others
+      if len(hashdir) > 3:
+        continue
+
+      hashdir = self._dirpath + "/" + hashdir
+      # skip whatever could be there, but isn't a directory (just being safe)
+      if not os.path.isdir(hashdir):
+        continue
+
+      for user in os.listdir(hashdir):
+
+        # if we want to exclude non autosynced users:
+        if return_not_autosynced is False:
+          if os.path.isfile("%s/%s/%s" % (hashdir, user, SVN_ONLY_FLAG)):
+            # we don't want to return not_autosynced users, and we found the
+            # autosynced flag, so:
+            continue
+
+        if not require_password:
+          yield user
+        else:
+          # Nice, re-uses password reading code, but too slow
+          #try:
+          #  self.ReadUserPassword(user)
+          #except IOError:
+          #  pass
+
+          # this is 3-5x faster
+          if os.path.isfile("%s/%s/password" % (hashdir,user)):
+            yield user
+
+
+  def LockUser(self, username):
+    """Pretty much does what it says."""
+
+    (username, plaintext, crypted) = self.ReadUserPassword(username)
+    pwd_line = "!!%s:!!%s:%s\n" % (crypted, plaintext, username)
+    WriteUserData(self._MakeUserDir(username), "password", [ pwd_line ])
+
+  def UnLockUser(self, username):
+    """Pretty much does what it says. Invariant if the user isn't locked"""
+
+    (username, plaintext, crypted) = self.ReadUserPassword(username)
+    plaintext = re.sub(r'^!!', '', plaintext)
+    crypted = re.sub(r'^!!', '', crypted)
+    pwd_line = "%s:%s:%s\n" % (crypted, plaintext, username)
+    WriteUserData(self._MakeUserDir(username), "password", [ pwd_line ])
+
+  def RandomizeUserPassword(self, username):
+    """Create a new random password for user 'username'.
+
+    Returns the new (username, plain, crypted) tuple for this user.
+    Note: as a side effect user 'username' is created if necessary.
+    """
+    return self.WriteUserPassword(username, GeneratePassword())
+
+  def CreateSvnOnlyUser(self, username):
+    """Creates a user with the 'svn only' flag and a random password.
+
+    This special flag is used to indicate to auto DB syncing scripts that
+    this user should not be removed if it's absent from the DB we're syncing
+    from.
+
+    Returns the new (username, plain, crypted) tuple for the user.
+    """
+    tuplet = self.WriteUserPassword(username, GeneratePassword())
+    WriteUserData(self._HashUserDir(username), SVN_ONLY_FLAG,
+                                                               "not autosynced")
+    return tuplet
+
+
+  def UserGroups(self, user):
+    """Like ReadUserGroups, but cache the output."""
+
+    if user not in self._usergroups:
+      self._usergroups[user] = self.ReadUserGroups(user)
+
+    return self._usergroups[user]
+
+  def UserInGroup(self, user, group):
+    return group in self.UserGroups(user)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/util.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/util.py
new file mode 100644
index 0000000..d22f1121
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/util.py
@@ -0,0 +1,623 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""Utility routines used throughout the 'gvn' wrapper.
+
+   Note that all path manipulations assume Unix-style paths, as expected
+   by internal classes.  This means that they can't use os.path methods,
+   in case we're actually on Windows!
+"""
+
+import codecs
+import datetime
+import os
+import posixpath
+import re
+import shutil
+import sys
+import tempfile
+import time
+
+from errno import ENOENT
+from tempfile import mkstemp
+
+import gvn.errors
+import gvn.platform
+
+# XXX We had this in 1.4, but lost it at some point on trunk.
+try:
+  from svn.core import APR_OS_START_SYSERR
+except ImportError:
+  APR_OS_START_SYSERR = 720000
+
+
+VALID_CHANGE_NAME_PAT = r'^[A-Za-z][A-Za-z0-9._-]{,49}$'
+VALID_CHANGE_NAME_RE  = re.compile(VALID_CHANGE_NAME_PAT)
+
+
+class ListDict(dict):
+  """Dictionary of lists.
+
+  This is just sugar so you don't have to:
+
+  try:
+    d[k].append(v)
+  except KeyError:
+    d[k] = [v]
+
+  """
+
+  def append(self, key, val):
+    if key not in self:
+      self[key] = []
+    return self[key].append(val)
+
+  def extend(self, key, l):
+    if key not in self:
+      self[key] = []
+    return self[key].extend(l)
+
+
+class BoolProperty(property):
+  def __init__(self, fget=None, fset=None, fdel=None, doc=None):
+    if fget is None:
+      bfget = fget
+    else:
+      def bfget(self):
+        result = fget(self)
+        if result == 'true':
+          return True
+        return False
+
+    if fset is None:
+      bfset = fset
+    else:
+      def bfset(self, value):
+        if value:
+          return fset(self, 'true')
+        return fset(self, 'false')
+
+    property.__init__(self, bfget, bfset, fdel, doc)
+
+
+REVISION_PAT = r'^[rR]?([0-9]+)$'
+REVISION_RE  = re.compile(REVISION_PAT)
+
+def MatchRevision(word):
+  """Match word against an svn revision pattern.
+
+  Returns the revision number (sub)string of word if present
+  otherwise None.
+  """
+  if not word:
+    return None
+
+  matchobj = REVISION_RE.match(word)
+  if not matchobj:
+    return None
+
+  return matchobj.group(1)
+
+
+def ParseChangeName(change_name):
+  """Parse a change_name into useful constituent parts.
+
+  Change names are of the form:
+
+    [user/]branchname[@revision]
+
+  NOTE: Validation of the branchname is *not* done here.
+
+  Returns a tuple of the form (username, branchname, revision).
+  Any of these can be None to indicate that no such element could be
+  parsed.
+  """
+  username = None
+  branchname = None
+  revision = None
+
+  if change_name:
+    try:
+      idx = change_name.index('/')
+    except ValueError:
+      idx = None
+    if idx is not None:
+      username = change_name[:idx]
+      change_name = change_name[idx+1:]
+
+    try:
+      idx = change_name.rindex('@')
+    except ValueError:
+      idx = None
+    if idx is not None:
+      # For some reason this triggers a bug:
+      # revision = MatchRevision(change_name[idx+1:])
+      revision = change_name[idx+1:]
+      if len(revision) > 1 and revision[0] in ['r', 'R']:
+        revision = revision[1:]
+
+      # TODO(epg): I'm retaining the behavior where 'changefoo@' ==
+      # 'changefoo', but i think that's an error and we should just
+      # let the int() constructor raise ValueError.
+      if revision == '':
+        revision = None
+      else:
+        # TODO(epg): I've said before that we should raise exceptions
+        # when asked to parse an invalid change spec.  But, as long as
+        # this function works this way and all callers expect that, we
+        # need to catch this.
+        try:
+          revision = int(revision)
+        except ValueError:
+          revision = None
+
+      change_name = change_name[:idx]
+
+    branchname = change_name
+
+  if username == '':
+    username = None
+  if branchname == '':
+    branchname = None
+
+  return (username, branchname, revision)
+
+
+def ValidateChangeName(change_name):
+  """Validates that a given change name is acceptable.
+
+  Raises a ChangeBranch exception if any of the following
+  conditions are not met:
+    1. change_name does not conform to VALID_CHANGE_NAME_PAT
+    2. change_name could be mistaken for a revision number
+  """
+  (username, branchname, revision) = ParseChangeName(change_name)
+
+  if not branchname or not VALID_CHANGE_NAME_RE.match(branchname):
+    error = ("'%s' not a valid change name. Must conform to '%s'"
+             % (branchname, VALID_CHANGE_NAME_PAT))
+    raise gvn.errors.InvalidChangeName(error)
+
+  if MatchRevision(branchname):
+    error = "'%s' should not look like a repository revision." % branchname
+    raise gvn.errors.InvalidChangeName(error)
+
+
+def IsValidChangeName(change_name):
+  """A convenience function so callers don't have to try/except.
+  """
+  try:
+    ValidateChangeName(change_name)
+  except gvn.errors.InvalidChangeName:
+    return False
+
+  return True
+
+
+def Prompt(prompt_string):
+  return raw_input(prompt_string).strip()
+
+
+def ClimbAndFind(path, test):
+  """Work up the directory tree from the given path, calling test(path)
+  until test returns either 0 or 1.  If test(X) returns 0, return X.  If
+  test(X) returns 1, return the previous directory (one deeper than X).
+  Return '/' if the path is exhausted without test() ever returning 0 or 1.
+
+  Examples:
+    test(/path/to/foo/bar)  returns -1 => call test(/path/to/foo)
+    test(/path/to/foo)      returns -1 => call test(/path/to)
+    test(/path/to)          returns 0  => return /path/to
+    test(/path/to)          returns 1  => return /path/to/foo
+  """
+  shallower = deeper = path
+  while shallower != '/':
+    t = test(shallower)
+    if t == 0:
+      return shallower
+    elif t == 1:
+      return deeper
+    deeper = shallower
+    # We have /-separated paths on all platforms, so we need to use posixpath
+    # instead of os.path.
+    shallower = posixpath.dirname(shallower)
+    # If we were passed an absolute Windows path (C:/foo/bar), we may climb
+    # up until we have no '/'left at all.
+    if shallower == '':
+      return '/'
+
+  return shallower
+
+def ConvertStringDateToDateTime(date):
+  """Convert a string date in the svn standard format into a
+  datetime.datetime object, to 1-second precision.
+  """
+  format = '%Y-%m-%dT%H:%M:%S'
+  date = re.sub(r'\.\d*Z$', '', date)
+  t = time.strptime(date, format)
+  return datetime.datetime(t[0], t[1], t[2], t[3], t[4], t[5])
+
+def RelativePath(parent, child):
+  """Return the path of child relative to parent.
+
+  If parent is '', simply return child.  Else, if child is not a child
+  of parent (e.g. RelativePath('/tmp', '/etc')), raise
+  gvn.errors.PathNotChild.  Else, return the relative path
+  (e.g. RelativePath('/tmp/wc', '/tmp/wc/lib' => 'lib')).
+
+  Raises:
+  gvn.errors.PathNotChild
+
+  """
+
+  if parent == '':
+    if child.startswith('/'):
+      raise gvn.errors.PathNotChild(child, parent)
+    return child
+
+  p = child.split(parent, 1)
+  if len(p) == 1 or p[0] != '':
+    raise gvn.errors.PathNotChild(child, parent)
+  return p[1].lstrip('/')
+
+def IsChild(child, parent):
+  """Return True if child is parent (a directory) or is a path under parent."""
+
+  if parent == '':
+    return not child.startswith('/')
+
+  return child == parent or child.startswith(parent + '/')
+
+def PathSplit(path, maxsplit=-1, uniform=True):
+  """Return list of components of path, like str.split not os.path.split .
+
+  path must be normal form (no doubled or trailing / characters)
+
+  Arguments:
+  path     -- path to split
+  maxsplit -- do at most maxsplit splits (default full split)
+  uniform  -- whether to use / or system-specific separator (default True)
+  """
+
+  # Rely on the undocumented behavior that maxsplit=-1 is the same as
+  # not specifying maxsplit at all.
+  if not uniform and sys.platform == 'win32':
+    # Nope, ntpath is just '\\' not '\\/'.
+    return path.split('\\/', maxsplit)
+  else:
+    return path.split('/', maxsplit)
+
+def CommonPrefix(paths, uniform=True):
+  """Return the longest common leading component of paths.
+
+  paths must be:
+   - all absolute or all relative, not mixed
+   - normal form (no doubled or trailing / characters)
+
+  Otherwise the result is undefined.
+
+  You might think we've reinvented os.path.commonprefix .  You'd be wrong:
+  >>> os.path.commonprefix(['/tmp/ab', '/tmp/ac'])
+  '/tmp/a'
+
+  This brokenness is actually documented in the HTML help, but not in
+  the doc string.
+
+  Arguments:
+  paths   -- list of paths
+  uniform -- whether to use / or system-specific separator (default True)
+  """
+
+  if len(paths) == 0:
+    return ''
+  if len(paths) == 1:
+    return paths[0]
+  # TODO(epg): More special-casing, bleh; I'm sure this whole function
+  # is stupid and should be thrown out.
+  if not uniform and sys.platform == 'win32' and '\\' in paths:
+    return '\\'
+  if '/' in paths:
+    return '/'
+
+  common = paths[0]
+  common_components = PathSplit(common, uniform=uniform)
+  for path in paths[1:]:
+    if path == common:
+      continue
+
+    components = PathSplit(path, uniform=uniform)
+    # We need to know whether we matched all components or broke out early.
+    partial_match = False
+    for (index, component) in enumerate(components):
+      try:
+        if component != common_components[index]:
+          common_components = components[:index]
+          #   '/'.join(['', 'a'])   => '/a'
+          #   os.path.join('', 'a') => 'a'
+          # While we're at it, why does os.path.join not take a list
+          # anyway?  Are you trying to minimize consistentcy?
+          # And don't worry about uniform=False; Windows works just
+          # fine with / as a separator.
+          common = '/'.join(common_components)
+          partial_match = True
+          break
+      except IndexError:
+        # For e.g. common='/a' path='/a/b' we get here on 'b', so
+        # leave '/a' as common.
+        partial_match = True
+        break
+    if not partial_match and len(common_components) > len(components):
+      # Got all the way through this path without hitting a different
+      # component, and this path is shorter than our current guess on
+      # common; that means this is more common.
+      common_components = components
+      common = path
+
+  return common
+
+
+# XXX This really should be bound.  Maybe in ctypes?
+def APR_TO_OS_ERROR(e):
+  if e == 0:
+    return 0
+  return e - APR_OS_START_SYSERR
+
+
+class Editor(object):
+  """Abstraction for exchanging information with a user via forms in a
+  text editor.
+
+  Callers should not call Done unless the user's action has been
+  completed successfully.  If anything has gone wrong, callers should
+  not call Done, instead informing the user that the form remains
+  saved at the path in the tmpfile member.
+  """
+
+  def __init__(self, executable):
+    self._executable = executable
+    self.tmpfile = None
+
+  def Edit(self, text, encoding, tmp_prefix='gvn.', _system=os.system):
+    """Return user-edited text, or None if user aborted.
+
+    Arguments:
+    text                -- text for the user to edit
+    encoding            -- user's encoding, for encoding text in the file
+    tmp_prefix          -- prefix to use on the temporary filename
+    _system             -- for testing, ignore
+    """
+    # Some editors on Windows (at least notepad) prepend a BOM.
+    do_bom = gvn.platform.EditorWantsBOM(self._executable, encoding)
+
+    # Create a temporary file.
+    (fd, self.tmpfile) = mkstemp(suffix='.txt', prefix=tmp_prefix, text=True)
+    f = os.fdopen(fd, 'w')
+    if do_bom:
+      f.write(codecs.BOM_UTF8)
+    f.write(text.encode(encoding))
+    f.close()
+    cmd = ' '.join((self._executable, self.tmpfile))
+    status = _system(cmd)
+
+    # If the editor failed, return None for abort.
+    if status != 0:
+      raise gvn.errors.Editor(cmd, status)
+
+    # If the file does not exist or is empty, abort.
+    try:
+      post_edit_stat = os.stat(self.tmpfile)
+    except OSError, e:
+      if e.errno != ENOENT:
+        raise
+      # User removed tmpfile; don't let callers think it's still there.
+      self.tmpfile = None
+      post_edit_stat = None
+    if post_edit_stat is None or post_edit_stat.st_size == 0:
+      return None
+
+    f = codecs.open(self.tmpfile, 'r', encoding=encoding)
+    text = f.read()
+    if do_bom:
+      text = text.lstrip(codecs.BOM_UTF8.decode(encoding))
+    f.close()
+
+    # If the file contained nothing but the BOM, abort.
+    if len(text) == 0:
+      return None
+
+    return text
+
+  def IsDone(self):
+    """Return False if Editor.Done should be called."""
+    return self.tmpfile is None
+
+  def Done(self):
+    """Remove the tmpfile, marking this Editor as done."""
+    if self.IsDone():
+      return
+
+    try:
+      os.unlink(self.tmpfile)
+    except:
+      # It's a temp file, who cares.
+      pass
+    self.tmpfile = None
+
+def isatty(fp):
+  try:
+    return os.isatty(fp.fileno())
+  # AttributeError for file-like objects with no fileno method
+  # EnvironmentError for errors from fileno
+  except (AttributeError, EnvironmentError):
+    return False
+
+
+def ParseOwners(owners_contents):
+  """Return (users, groups, noparent) parse from owners_contents.
+
+  Arguments:
+  owners_contents       -- OWNERS-type text (must have splitlines method)
+
+  Returns:
+  tuple of (set(), set(), bool())
+
+  Format for OWNERS::
+
+      # takes comments
+      # This special line means we don't inherit OWNERS from higher dirs
+      set noparent
+      # single user names, one per line, as such:
+      user
+      # or groups like so:
+      group: svngroup1  # and trailing comments are allowed
+      # include files not supported, nor planned
+      file:../some_other_dir/OWNERS
+  """
+  users = set()
+  groups = set()
+  noparent = False
+
+  for line in owners_contents.splitlines():
+    # Strip comments and whitespace and skip blank/only-comments lines.
+    line = re.sub('#.*', '', line).strip()
+    if len(line) == 0:
+      continue
+
+    if re.match('set\s+noparent', line):
+      noparent = True
+      continue
+
+    m = re.match('group:\s*(.*)', line)
+    if m is not None:
+      groups.add(m.group(1))
+      continue
+
+    m = re.match('file:\s*(.*)', line)
+    if m is not None:
+      # The OWNERS system Google implemented for p4 allowed includes,
+      # but we think groups are a better solution.
+      # For now, we'll see if users can do without it.
+      continue
+
+    # TODO(epg): Behave like Marc's original code for now.  It ignored
+    # lines with embedded whitespace, but I don't see why.  For
+    # organizations that have no spaces in usernames, treating such
+    # lines as usernames has the same effect as ignoring them; for
+    # organizations that have spaces in usernames (which svn is cool
+    # with), allowing this is a big win.
+    if re.search('\s+', line):
+      continue
+
+    users.add(line)
+  return (users, groups, noparent)
+
+
+def ApplyPropDiffs(properties, property_diffs):
+  """Apply property_diffs to properties.
+
+  Arguments:
+  properties        -- dict to be transformed
+  property_diffs    -- dict as returned by svn.core.svn_prop_diffs
+  """
+  for (name, value) in property_diffs.iteritems():
+    if value is None:
+      del properties[name]
+    else:
+      properties[name] = value
+
+
+class TmpTree(object):
+  def __init__(self, *args, **kwargs):
+    """Initialize temporary tree.
+
+    Pass any arguments to tempfile.mkdtemp .
+    """
+    tmpdir = tempfile.mkdtemp(*args, **kwargs)
+    self._tmp_dirs = [tmpdir]
+    self._tmp_files = []
+
+  def Path(self, path_components):
+    """Return absolute temporary path for joined path_components.
+
+    Arguments:
+    path_components     -- sequence of local style path components
+                           (encoded str, not unicode)
+    """
+    if path_components[-1] == '':
+      # Caller is asking for one of the two base dir, e.g.
+      #   self.Path(['left', os.path.dirname('COPYING')])
+      # dirname returns '' but caller wants '.../left' not '.../left/'.
+      path_components.pop(-1)
+    path_components.insert(0, self._tmp_dirs[0])
+    return os.path.join(*path_components)
+
+  def Mkdir(self, path_components):
+    """Make temporary directory.
+
+    Arguments:
+    path_components     -- sequence of local style path components
+                           (encoded str, not unicode)
+    """
+    path = self.Path(path_components)
+    # Build list of directories we need to create.
+    todo = []
+    while path not in self._tmp_dirs:
+      todo.append(path)
+      path = os.path.dirname(path)
+    # Create those directories, remembering that we created them.
+    for path in reversed(todo):
+      os.mkdir(path)
+      self._tmp_dirs.append(path)
+
+  def MkFile(self, path_components, mode='wb'):
+    """Return file object opened for binary writing.
+
+    Arguments:
+    path_components     -- sequence of local style path components
+                           (encoded str, not unicode)
+    """
+    path = self.Path(path_components)
+    self.Mkdir([os.path.dirname(path)])
+    fp = open(path, mode)
+    self._tmp_files.append(path)
+    return fp
+
+  def OpenFile(self, path_components):
+    """Return file object opened for binary reading.
+
+    Arguments:
+    path_components     -- sequence of local style path components
+                           (encoded str, not unicode)
+    """
+    path = self.Path(path_components)
+    return open(path, 'rb')
+
+  def RmFile(self, path_components):
+    """Remove a tmp file.
+
+    Arguments:
+    path_components     -- sequence of local style path components
+                           (encoded str, not unicode)
+    """
+    path = self.Path(path_components)
+    os.unlink(path)
+    self._tmp_files.remove(path)
+
+  def Close(self):
+    """Remove temporary files and directories."""
+    for path in self._tmp_files:
+      os.unlink(path)
+    for path in reversed(self._tmp_dirs):
+      os.rmdir(path)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/gvn/wc.py b/depot_tools/release/win/python_24/Lib/site-packages/gvn/wc.py
new file mode 100644
index 0000000..9efd1c1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/gvn/wc.py
@@ -0,0 +1,1018 @@
+# Copyright 2007 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+"""working copy stuff
+
+Classes:
+WorkingCopy     -- Representation of a working copy
+Edit            -- commit.EditorAction to apply a change from a working copy
+ChangeState     -- Representation of a path's changebranch state
+PostfixTextDeltaManager -- Postfix text-delta transmission manager
+
+Functions:
+FindWorkingCopy -- Return a WorkingCopy for the top of a working copy path
+StatusString    -- Return a string suitable for printing about an entry
+
+Objects:
+notify_postfix_txdeltas_completed  -- Hack for gvn.cmdline.Context.Notify
+
+"""
+
+
+import cPickle
+import md5
+import os
+import posixpath
+import sys
+
+from errno import ENOENT
+from errno import EEXIST
+from tempfile import mkstemp
+
+import svn.wc
+
+from svn.core import SubversionException, SVN_ERR_WC_NOT_DIRECTORY
+from svn.core import svn_node_dir, svn_node_file
+
+import gvn.commit
+import gvn.config
+import gvn.errors
+import gvn.project
+import gvn.util
+
+
+STATE_FILE = '.gvnstate'
+
+
+def _GetAccessEntry(path, cancel_func, write_lock, depth, pool):
+  """Return svn_wc_adm_access_t and svn_wc_entry_t for path.
+
+  Arguments:
+  path        -- path to open
+  cancel_func -- cancellation callback
+  write_lock  -- whether to take out a write lock
+  depth       -- -1: lock from path down; 0: just lock path; N: how many
+                 directories deep to lock
+  pool        -- memory pool
+
+  """
+
+  (anchor_access, target_access,
+   target) = svn.wc.adm_open_anchor(path, write_lock, depth, cancel_func,
+                                    pool)
+  return (anchor_access,
+          svn.wc.entry(path, anchor_access,
+                       False,           # show_hidden
+                       pool))
+
+
+class ChangeState(object):
+  """Representation of a path's changebranch state
+
+  This is meta-data about the working file (not text-base) as last
+  snapshotted.
+
+  """
+
+  def __init__(self, change_name, path, checksum, status):
+    """Initialize from change_name, path, checksum, svn_wc_status2_t."""
+
+    self.change_name = change_name
+
+    # Copy from svn_wc_entry_t.
+    self.kind = status.entry.kind
+    self.copyfrom_url = status.entry.copyfrom_url
+    self.copyfrom_rev = status.entry.copyfrom_rev
+    self.has_prop_mods = status.entry.has_prop_mods
+
+    # Copy from svn_wc_status2_t.
+    self.text_status = status.text_status
+    self.prop_status = status.prop_status
+
+    self.checksum = checksum
+
+    if status.text_status == svn.wc.status_deleted:
+      self.mtime = self.size = None
+    else:
+      st = os.stat(path)
+      self.mtime = st.st_mtime
+      self.size = st.st_size
+
+  def __eq__(self, other):
+    for i in ['kind', 'copyfrom_url', 'copyfrom_rev', 'has_prop_mods',
+              'mtime', 'size']:
+      if getattr(self, i) != getattr(other, i):
+        return False
+    return True
+
+  def __ne__(self, other):
+    return not self == other
+
+
+class WorkingCopy(object):
+  def __init__(self, path, cancel_func, notify_func, config, pool):
+    """Initialize and open from str, callable, callable, Config, Pool.
+
+    Arguments:
+    path        -- absolute path to a working copy top
+    cancel_func -- callback()
+    notify_func -- callback(svn_wc_notify_t, pool)
+    config      -- Config object
+    pool        -- memory pool
+
+    """
+
+    # absolute path to top of working copy
+    self._path = path
+
+    # stuff for svn.wc
+    self._cancel_func = cancel_func
+    self._notify_func = notify_func
+    self._config = config
+    self._pool = pool
+
+    self._adm_access = self._entry = self._write_lock = self._recursive = None
+    self._committed = []
+    self._wcprop_changes = {}
+
+    self._change_state = {}
+    # Use this second change change_state dict for pending changes, i.e.
+    # those that will be written to the change_state when it is saved.
+    # Keeping this separate and clearing it on error ensure change_state is
+    # not inconsistent with actual change state.  A value of None marks an
+    # item to be removed from the change_state.
+    self._pending_change_state = {}
+
+    self.Load()
+
+    # Get URL from quick non-recursive wc open.
+    (adm_access, entry) = _GetAccessEntry(path, cancel_func, depth=0,
+                                          write_lock=False, pool=self._pool)
+    self._repository_root = entry.repos
+    self._url = entry.url
+    self._base = self._url[len(self.repository_root)+1:]
+    svn.wc.adm_close(adm_access)
+
+    # Get ProjectConfig and Project from URL.
+    try:
+      self._project_config = self._config.ProjectByURL(self._url)
+    except gvn.errors.NoProject:
+      self._project_config = gvn.config.ProjectConfig(
+                                                 self._config.default_username,
+                                                 self._url)
+    root = self.repository_root
+    self._project = gvn.project.Project(self._project_config.username, root,
+                                        self._config, self._pool,
+                                        self._project_config.URL[len(root)+1:])
+    self._project.repository.have_root_url = True
+    self._project.repository.ra_callbacks.SetWC(self)
+
+  name = property(lambda self: os.path.basename(self._base))
+  path = property(lambda self: self._path)
+  cancel_func = property(lambda self: self._cancel_func,
+                        lambda self, f: setattr(self, '_cancel_func', f))
+  notify_func = property(lambda self: self._notify_func,
+                        lambda self, f: setattr(self, '_notify_func', f))
+
+  project = property(lambda self: self._project)
+  project_config = property(lambda self: self._project_config)
+
+  change_state = property(lambda self: self._change_state,
+                       doc="""Map of working copy path to ChangeState.""")
+
+  repository_root = property(lambda self: self._repository_root)
+
+  def Load(self):
+    """Load STATE_FILE file."""
+
+    try:
+      fp = open(os.path.join(self.path, STATE_FILE))
+    except IOError, e:
+      if e.errno == ENOENT:
+        return
+      raise
+
+    # Do we want to complain about corrupt state files?  Maybe a warning.
+    try:
+      p = cPickle.load(fp)
+    except EOFError:
+      return
+
+    try:
+      d = p['change_state']
+    except (KeyError, TypeError):
+      d = {}
+    self.change_state.update(d)
+
+  def UpdateChangeState(self, path, state=None, **attrs):
+    """Schedule a change_state update for path.
+
+    Arguments:
+    path        -- path whose state to update
+    state       -- optional new ChangeState object
+
+    Additionally, attribute names of ChangeState may be passed as
+    keyword arguments to update specific fields.  For example, to move
+    'foo' from one changebranch to another:
+
+      wc.UpdateChangeState('foo', change_name='new_change')
+
+    Raises:
+    AttributeError      -- if state argument is None and change_state
+                           for path has been deleted;
+                           or if a non-existent ChangeState attribute
+                           has been passed as a keyword argument
+    KeyError            -- if state argument is None and no change_state
+                           for path has been set
+    """
+    if state is not None:
+      self._pending_change_state[path] = state
+    for (attr, value) in attrs.iteritems():
+      setattr(self._pending_change_state[path], attr, value)
+
+  def DeleteChangeState(self, path):
+    """Schedule a deletion of change_state for path."""
+    self._pending_change_state[path] = None
+
+  def ClearPendingChangeState(self):
+    """Clear all pending change_state.
+
+    Call this on error when changing a changebranch.
+    """
+    self._pending_change_state.clear()
+
+  def Save(self):
+    """Flush pending change_state and save STATE_FILE file."""
+
+    for (path, state) in list(self._pending_change_state.iteritems()):
+      if state is None:
+        del self._change_state[path]
+        del self._pending_change_state[path]
+    self._change_state.update(self._pending_change_state)
+    self.ClearPendingChangeState()
+
+    (fd, fn) = mkstemp(dir=self.path)
+    fp = os.fdopen(fd, 'w')
+    cPickle.dump({
+      'change_state': self.change_state,
+      }, fp)
+    fp.close()
+    state_file = os.path.join(self.path, STATE_FILE)
+    try:
+      os.rename(fn, state_file)
+    except OSError, e:
+      if sys.platform != 'win32' or e.errno != EEXIST:
+        raise
+      # "On Windows, if dst already exists, OSError will be raised even if it
+      # is a file; there may be no way to implement an atomic rename when dst
+      # names an existing file."
+      os.remove(state_file)
+      os.rename(fn, state_file)
+
+  # TODO(epg): see what else should use adm_access instead of _adm_access
+  def _GetAdmAccess(self):
+    if self._adm_access is None:
+      raise gvn.errors.WCClosed
+    return self._adm_access
+  adm_access = property(_GetAdmAccess,
+doc="""svn_wc_adm_access_t if WorkingCopy is Open
+
+       Raises:
+       gvn.errors.WCClosed
+""")
+
+  def AdmRetrieve(self, path, pool):
+    # TODO(epg): This is duplicated all over the place; make them all
+    # call this instead.
+
+    # This is stupid; I have to stat the file just to get an adm baton?!
+    if os.path.isdir(path):
+      (directory, target) = (path, '')
+    else:
+      (directory, target) = os.path.split(path)
+    if directory == self.path:
+      return self.adm_access
+    return svn.wc.adm_retrieve(self.adm_access, directory, pool)
+
+  def GetPropDiffs(self, path, pool):
+    return svn.wc.get_prop_diffs(path.encode('utf8'),
+                                 self.AdmRetrieve(path, pool), pool)
+
+  def Open(self, paths=None, write_lock=False, recursive=True):
+    """Open the working copy to deepest common of paths.
+
+    Arguments:
+    paths      -- paths whose deepest common path to open; default [self.path]
+    write_lock -- bool whether to take out write lock; default False
+    recursive  -- bool whether to open recursively; default True
+
+    Raises:
+    gvn.errors.WCReOpen
+
+    """
+
+    if self._adm_access is not None:
+      if self._write_lock != write_lock or self._recursive != recursive:
+        raise gvn.errors.WCReOpen(self._write_lock, write_lock,
+                                  self._recursive, recursive)
+      return
+
+    if paths is None:
+      path = self.path
+    else:
+      path = gvn.util.CommonPrefix([self.AbsolutePath(x) for x in paths])
+
+    self._write_lock = write_lock
+    self._recursive = recursive
+
+    if recursive:
+      depth = -1
+    else:
+      depth = 0
+
+    # path may be a new directory scheduled for addition, whose entry
+    # is not complete; specifically, it has no base revision, which we
+    # need in order to changebranch.  So, climb up from path until we
+    # find a complete entry.
+    # TODO(epg): Actually, we only need to do this when
+    # changebranching; for opened, submit, etc. it's wasted effort;
+    # maybe Open should take an option for skipping this?
+    while True:
+      (adm_access, entry) = _GetAccessEntry(path, self.cancel_func,
+                                            self._write_lock, depth,
+                                            self._pool)
+      if entry is None:
+        raise gvn.errors.NotVersioned(path)
+      if entry.revision > 0:
+        break
+      path = os.path.dirname(path)
+      svn.wc.adm_close(adm_access)
+
+    self._adm_access = adm_access
+    self._entry = entry
+    self.subpath = self.RelativePath(path)
+
+  def Close(self):
+    """Close the working copy, releasing any locks.
+
+    Note that this WorkingCopy remains valid, though some methods will
+    not work until .Open is called again.  See the individual method
+    docstrings for that information.
+
+    Raises:
+    gvn.errors.WCClosed
+    """
+
+    if self._adm_access is None:
+      raise gvn.errors.WCClosed
+
+    svn.wc.adm_close(self._adm_access)
+    self._adm_access = self._entry = self._write_lock = self._recursive = None
+
+  def AbsolutePath(self, path=None):
+    """Return the absolute path of path in the working copy.
+
+    Arguments:
+    path -- path to make absolute; default self.path
+    """
+    if path is None or path == '':
+      return self.path
+    return '/'.join([self.path, path])
+
+  def RelativePath(self, path):
+    """Return path relative to the working copy for path."""
+    return gvn.util.RelativePath(self.path, path)
+
+  def RepoPath(self, path=''):
+    """Return the path in the repository for path."""
+    if self._base == '':
+      return path
+    if path == '':
+      return self._base
+    return '/'.join([self._base, path])
+
+  def URL(self, path=''):
+    """Return the repository URL for path."""
+    return self.project.repository.URL(self.RepoPath(path))
+
+  def LocalPath(self, path):
+    """Return the relative path in the working copy for repository path."""
+    try:
+      return gvn.util.RelativePath(self._url, path)
+    except gvn.errors.PathNotChild:
+      return gvn.util.RelativePath(self._base, path)
+
+  def Notify(self, path, action, kind=None, pool=None):
+    """Notify caller of some action on path.
+
+    This is a convenience wrapper around the svn_wc_notify_func2_t
+    provided at instantiation.
+
+    Arguments:
+    path            -- internal_style str being acted upon
+    action          -- svn_wc_notify_action_t of path
+    kind            -- svn_node_kind_t of path (default None)
+    pool            -- memory pool
+    """
+    if callable(self._notify_func):
+      n = svn.wc.create_notify(path, action, pool)
+      if kind is not None:
+        n.kind = kind
+      self._notify_func(n, pool)
+
+  def Entry(self, path=None, pool=None):
+    """Return svn_wc_entry_t for path (default self.subpath).
+
+    Arguments:
+    path -- path in the working copy (default self.subpath)
+    pool -- pool for local processing
+
+    Raises:
+    gvn.errors.WCClosed
+    """
+    if self._adm_access is None:
+      raise gvn.errors.WCClosed
+
+    if path in [None, self.subpath]:
+      return self._entry
+
+    return svn.wc.entry(self.AbsolutePath(path), self._adm_access,
+                        True,           # show_hidden
+                        pool)
+
+  def Status(self, paths=[''], recursive=False, get_all=True, no_ignore=True,
+             show_children=True,
+             callback=None):
+    """Return a list of (or call callback for) svn_wc_status2_t objects for paths.
+
+    Arguments:
+    paths         -- list of paths for which to get status
+    recursive     -- whether to descend into children of paths
+    get_all       -- whether to get status for unmodified files
+    no_ignore     -- whether to get status for ignored files
+    show_children -- report children of deleted directories
+                     when not explicitly specified in paths (default=True)
+    callback      -- if provided, call with status instead of returning a list
+
+    """
+
+    if not callable(callback):
+      result = []
+
+    if len(paths) == 0:
+      paths = ['']
+    for i in paths:
+      path = self.AbsolutePath(i)
+
+      # This is stupid.
+      if os.path.isdir(path):
+        (directory, target) = (path, '')
+      else:
+        (directory, target) = os.path.split(path)
+
+      adm_access = svn.wc.adm_retrieve(self._adm_access, directory, self._pool)
+
+      deleted_trees = []
+      def status_func(target, status):
+        # For whatever reason, this is sometimes called with some (but
+        # not all) children of path even when recursive is False.
+        if not recursive and target != path:
+          return
+
+        if self.RelativePath(target) == STATE_FILE:
+          return
+
+        if not show_children:
+          # What we're trying to do here is turn this:
+          # 0 gvn% svn mv testdata foo
+          # 0 gvn% gvn opened
+          # D      testdata
+          # D      testdata/hooks
+          # A  +   foo
+          # into this
+          # 0 gvn% gvn opened
+          # D      testdata
+          # A  +   foo
+          for tree in deleted_trees:
+            if gvn.util.IsChild(target, tree):
+              if status.text_status == svn.wc.status_deleted:
+                # target (e.g. testdata/hooks) is a child of tree
+                # (e.g. testdata) and is deleted; hide it.
+                return
+          if (status.entry is not None
+              and status.entry.kind == svn_node_dir
+              and status.text_status == svn.wc.status_deleted):
+            # target is a deleted tree; save it so we can hide its children.
+            deleted_trees.append(target)
+
+        # I'd love to do this, but it doesn't work.  Search the web for
+        # [python callback generator] and you'll find that the only
+        # thing people have to suggest is threads.  Blech.
+        #yield status
+
+        # So we callback or build and return a list.
+        if callable(callback):
+          return callback(target, status)
+        result.append((target, status))
+
+      (editor, edit_baton, set_locks_baton,
+       edit_revision) = svn.wc.get_status_editor2(adm_access,
+                                                  target,
+                                                  self._config.svn_config_hash,
+                                                  recursive,
+                                                  get_all,
+                                                  no_ignore,
+                                                  status_func,
+                                                  self.cancel_func,
+                                                  None, # traversal_info
+                                                  self._pool)
+      editor.close_edit(edit_baton, self._pool)
+
+    if not callable(callback):
+      return result
+
+  def WCPropSet(self, path, name, value):
+    """Save value of wcprop name for path for post-commit processing."""
+    try:
+      d = self._wcprop_changes[path]
+    except KeyError:
+      d = self._wcprop_changes[path] = {}
+    d[name] = value
+
+  def ProcessCommitted(self, path, recursive, kind, remove_lock, checksum):
+    """Append a committed item to the post-commit processing queue."""
+    self._committed.append((path, recursive, kind, remove_lock, checksum))
+
+  def ProcessCommittedQueue(self, commit_info, pool):
+    """Post-process committed items."""
+
+    iterpool = svn.core.Pool(pool)
+    for i in self._committed:
+      iterpool.clear()
+      args = list(i)
+      kind = args.pop(2)
+      relative = args[0]
+      args[0] = self.AbsolutePath(args[0])
+
+      if kind == svn_node_dir:
+        adm_access_path = args[0]
+        parent = os.path.dirname(adm_access_path)
+        if parent == self.path:
+          base_dir_access = self._adm_access
+        else:
+          base_dir_access = svn.wc.adm_retrieve(self._adm_access,
+                                                parent,
+                                                iterpool)
+      else:
+        base_dir_access = self._adm_access
+        adm_access_path = os.path.dirname(args[0])
+
+      args.insert(1, svn.wc.adm_retrieve(base_dir_access,
+                                         adm_access_path,
+                                         iterpool))
+
+      args.insert(3, commit_info.revision)
+      args.insert(4, commit_info.date)
+      args.insert(5, commit_info.author)
+      args.insert(6, self._wcprop_changes.get(relative))
+      args.append(iterpool)
+      svn.wc.process_committed3(*args)
+    iterpool.destroy()
+
+    del self._committed[:]
+    self._wcprop_changes.clear()
+
+  def NeedsSnapshot(self, path, status, pool):
+    """Return whether path needs snapshotting.
+
+    Arguments:
+    path        -- path in working copy to be tested
+    status      -- svn_wc_status2_t for path
+    pool        -- used for temporary allocations
+
+    """
+
+    try:
+      old = self.change_state[path]
+    except KeyError:
+      raise gvn.errors.NotChangeBranched(path)
+
+    abs_path = self.AbsolutePath(path)
+
+    new = ChangeState(None, abs_path, checksum=None, status=status)
+
+    # If kind has changed, no question this needs snapshot.
+    if new.kind != old.kind:
+      return True
+
+    # Now, let's be kinda like svn_wc_text_modified_p.
+
+    # XXX(epg): svn_wc_text_modified_p returns FALSE if old == new.
+    # It only returns TRUE if old != new and a full file compare finds
+    # differences.  Seems odd to me; if old != new, we *know* the file
+    # is modified, so why do we checksum at all?
+
+    if old == new:
+      return False
+
+    if status.entry.kind != svn_node_file:
+      # If it's not a file, no checksumming, they're just different.
+      return True
+
+    # It's a file that may be different, let's compare.
+
+    # Here's the tricky bit: old.checksum is of the *normal* form;
+    # i.e. without keyword expansion, eol translation, or other forms
+    # of text mangling.  Man i hate that stuff.  So we have to
+    # checksum the normal form of the current working file.
+    adm_access = svn.wc.adm_probe_retrieve(self._adm_access,
+                                           abs_path, pool)
+    stream = svn.wc.translated_stream(abs_path, abs_path, adm_access,
+                                      svn.wc.TRANSLATE_TO_NF, pool)
+    m = md5.new()
+    while True:
+      data = svn.core.svn_stream_read(stream, svn.core.SVN_STREAM_CHUNK_SIZE)
+      if not data:
+        break
+      m.update(data)
+
+    return old.checksum != m.digest()
+
+def FindWorkingCopy(path, cancel_func, notify_func, config, pool=None):
+  """Return (WorkingCopy, subdir) for the top of the working copy path.
+
+  The second element of the returned tuple is the path within
+  WorkingCopy.path for path, examples:
+
+    '/tmp/wc'       => (WorkingCopy, '')
+    '/tmp/wc/lib'   => (WorkingCopy, 'lib')
+
+  Arguments:
+  path        -- absolute, internal-style path to a working copy top
+  cancel_func -- callback()
+  notify_func -- callback(svn_wc_notify_t, pool)
+  config      -- Config object
+  pool        -- memory pool
+
+  """
+
+  # First, find the deepest path at path or above that is a
+  # working copy.
+  orig_uuid_url = [None, None]
+  def is_wc(d):
+    try:
+      (adm_access, entry) = _GetAccessEntry(d, cancel_func, write_lock=False,
+                                            depth=0, pool=pool)
+    except SubversionException, e:
+      if e.apr_err != SVN_ERR_WC_NOT_DIRECTORY:
+        raise
+      # Keep looking.
+      return -1
+    if entry is None or entry.uuid is None:
+      svn.wc.adm_close(adm_access)
+      # Keep looking.
+      return -1
+    orig_uuid_url[:] = (entry.uuid, entry.url)
+    svn.wc.adm_close(adm_access)
+    return 0
+  curr = gvn.util.ClimbAndFind(path, is_wc)
+
+  # Did we find anything?
+  if orig_uuid_url[0] is None:
+    # path is not in a wc at all; bail
+    raise gvn.errors.NotWC(path)
+
+  # Now let's look for the shallowest path at path or above that is a
+  # working copy of the same repository.
+  def is_top_wc(d):
+    try:
+      (adm_access, entry) = _GetAccessEntry(d, cancel_func, write_lock=False,
+                                            depth=0, pool=pool)
+    except SubversionException, e:
+      if e.apr_err != SVN_ERR_WC_NOT_DIRECTORY:
+        raise
+      # We've walked up out of the wc completely, return one deeper.
+      return 1
+
+    if entry is None or [entry.uuid, entry.url] != orig_uuid_url:
+      # We've walked up into some other repository checkout, or not a
+      # working copy at all, or out of an external; return one deeper.
+      svn.wc.adm_close(adm_access)
+      return 1
+
+    # Keep looking.
+    svn.wc.adm_close(adm_access)
+    # So eat the last path component of the saved URL
+    orig_uuid_url[1] = posixpath.dirname(orig_uuid_url[1])
+    return -1
+  top = gvn.util.ClimbAndFind(curr, is_top_wc)
+
+  subpath = gvn.util.RelativePath(top, path)
+  return (WorkingCopy(top, cancel_func, notify_func, config, pool), subpath)
+
+
+def IsBroken(status):
+  """Return whether the svn_wc_status2_t indicates a broken node."""
+  states = [svn.wc.status_missing, svn.wc.status_incomplete,
+            svn.wc.status_obstructed]
+  return status.text_status in states or status.prop_status in states
+
+def IsConflicted(status):
+  """Return whether the svn_wc_status2_t indicates a conflicted node."""
+  return svn.wc.status_conflicted in [status.text_status, status.prop_status]
+
+def IsModified(status):
+  """Return whether the svn_wc_status2_t indicates a modified node."""
+  states = [svn.wc.status_added, svn.wc.status_conflicted,
+            svn.wc.status_deleted, svn.wc.status_modified,
+            svn.wc.status_replaced]
+  return (status.text_status in states or status.prop_status in states
+          or (status.entry is not None
+              and (status.entry.copied
+                   or status.entry.deleted)
+              ))
+
+
+_status_codes = {
+  svn.wc.status_none:        ' ',
+  svn.wc.status_normal:      ' ',
+  svn.wc.status_added:       'A',
+  svn.wc.status_missing:     '!',
+  svn.wc.status_incomplete:  '!',
+  svn.wc.status_deleted:     'D',
+  svn.wc.status_replaced:    'R',
+  svn.wc.status_modified:    'M',
+  svn.wc.status_merged:      'G',
+  svn.wc.status_conflicted:  'C',
+  svn.wc.status_obstructed:  '~',
+  svn.wc.status_ignored:     'I',
+  svn.wc.status_external:    'X',
+  svn.wc.status_unversioned: '?',
+}
+def _StatusCode(status):
+    return _status_codes.get(status, '?')
+def StatusString(path, status):
+  """Return a string suitable for printing about this status entry.
+
+  The intent is to duplicate the actual svn client syntax.
+
+  """
+
+  columns = [_StatusCode(status.text_status), _StatusCode(status.prop_status),
+             ' ', ' ', ' ', ' ']
+
+  if status.locked:
+    columns[2] = 'L'
+
+  if status.entry is not None and status.entry.copied:
+    columns[3] = '+'
+
+  if status.switched:
+    columns[4] = 'S'
+
+  return ' '.join([''.join(columns), path])
+
+def ActionCode(status):
+  """Return single-letter code for svn_wc_status2_t."""
+  if status.text_status == svn.wc.status_added:
+    return _status_codes[svn.wc.status_added]
+  if status.text_status == svn.wc.status_deleted:
+    return _status_codes[svn.wc.status_deleted]
+  return _status_codes[svn.wc.status_modified]
+
+
+class Edit(gvn.commit.EditorAction):
+  """Apply a change from a working copy; return a baton if adding a directory.
+
+  Raises:
+  SubversionException.apr_err==SVN_ERR_FS_NOT_DIRECTORY
+  SubversionException.apr_err==SVN_ERR_RA_DAV_PATH_NOT_FOUND
+  SubversionException.apr_err==???
+  ???
+
+  """
+
+  def __init__(self, wc, wc_path, status, file_baton_cb=None):
+    """Initialize Edit.
+
+    Arguments:
+    wc         -- WorkingCopy object
+    wc_path    -- path in the working copy
+    status     -- svn_wc_status2_t for wc_path
+    file_baton_cb       -- optional callback(file baton, fulltext, wc_path,
+                                             svn_wc_status2_t) for callers
+                           to save these objects for use in postfix
+                           text-delta transmission (usually
+                           PostfixTextDeltaManager.Add)
+    """
+
+    self.wc = wc
+    self.wc_path = wc_path
+    self.status = status
+    if file_baton_cb is None:
+      def file_baton_cb(*args): pass
+    self.file_baton_cb = file_baton_cb
+    self.tempfile = None
+
+  # TODO(epg): changebranch._WCBranch needs to override this
+  def DiagLog(self, format, *args):
+    """Call gvn.commit.EditorAction.DiagLog, prepending attributes from init."""
+    return gvn.commit.EditorAction.DiagLog(
+      self, '(%s, %s, %s) => %s' % (self.wc_path, self.status,
+                                    self.file_baton_cb, format),
+      *args)
+
+  def _GetAction(self):
+    if (self.status.text_status == svn.wc.status_normal
+        and self.status.prop_status != svn.wc.status_normal):
+      status = self.status.prop_status
+    else:
+      status = self.status.text_status
+    actions = {
+      svn.wc.status_added:       self._Added,
+      svn.wc.status_deleted:     self._Deleted,
+      svn.wc.status_replaced:    self._Replaced,
+      svn.wc.status_modified:    self._Modified,
+    }
+    return actions[status]
+
+  def _PostProcess(self, pool):
+    """Post-process after file delete or any directory operation.
+
+    This implementation calls self.wc.ProcessCommitted to save this
+    path for the post-commit wc bump.
+
+    Do not call any file operation except delete; instead use the
+    file_baton_cb.
+    """
+    self.wc.ProcessCommitted(self.wc_path,
+                    recursive=(self.status.text_status == svn.wc.status_added
+                               and self.status.entry.kind == svn_node_dir
+                               and self.status.entry.copyfrom_url is not None),
+                             kind=self.status.entry.kind,
+                             remove_lock=False, checksum=None)
+
+  def __call__(self, parent, path, editor, pool):
+    return self._GetAction()(parent, path, editor, pool)
+
+  def _TransmitPropDeltas(self, baton, editor, pool):
+    """Transmit property deltas into baton."""
+    abs_path = self.wc.AbsolutePath(self.wc_path)
+    adm_access = svn.wc.adm_probe_retrieve(self.wc._adm_access,
+                                           abs_path, pool)
+    svn.wc.transmit_prop_deltas(abs_path, adm_access, self.status.entry,
+                                editor, baton, pool)
+
+  def _AddHelper(self, parent, path, editor,
+                 copyfrom_url=None, copyfrom_rev=None, pool=None):
+    if copyfrom_url is None:
+      copyfrom_url = self.status.entry.copyfrom_url
+      copyfrom_rev = self.status.entry.copyfrom_rev
+
+    if self.status.entry.kind == svn_node_dir:
+      args = (path, parent, copyfrom_url, copyfrom_rev, pool)
+      gvn.DiagLog('add_directory%s', args)
+      baton = editor.add_directory(*args)
+      self._TransmitPropDeltas(baton, editor, pool)
+      self._PostProcess(pool)
+    else:
+      args = (path, parent, copyfrom_url, copyfrom_rev, pool)
+      gvn.DiagLog('add_file%s', args)
+      baton = editor.add_file(*args)
+      self._TransmitPropDeltas(baton, editor, pool)
+      self.file_baton_cb(self.wc_path, baton,
+                         fulltext=True, status=self.status)
+      baton = None
+    return baton
+
+  def _Added(self, parent, path, editor, pool):
+    self.wc.Notify(self.wc_path, svn.wc.notify_commit_added,
+                   self.status.entry.kind, pool)
+    self.DiagLog('')
+    result = self._AddHelper(parent, path, editor, pool=pool)
+    gvn.DiagLog('\n')
+    return result
+
+  def _Deleted(self, parent, path, editor, pool):
+    self.wc.Notify(self.wc_path, svn.wc.notify_commit_deleted, pool=pool)
+    args = (path, self.status.entry.revision, parent, pool)
+    self.DiagLog('delete_entry%s\n', args)
+    editor.delete_entry(*args)
+    self._PostProcess(pool)
+    return None
+
+  def _Replaced(self, parent, path, editor, pool):
+    self.wc.Notify(self.wc_path, svn.wc.notify_commit_replaced, pool=pool)
+    args = (path, self.status.entry.revision, parent, pool)
+    gvn.DiagLog('delete_entry%s; ', args)
+    editor.delete_entry(*args)
+    result = self._AddHelper(parent, path, editor, pool=pool)
+    gvn.DiagLog('\n')
+    return result
+
+  def _Modified(self, parent, path, editor, pool):
+    self.wc.Notify(self.wc_path, svn.wc.notify_commit_modified,
+                   self.status.entry.kind, pool)
+    if self.status.entry.kind == svn_node_dir:
+      args = (path, parent, self.status.entry.revision, pool)
+      self.DiagLog('open_directory%s\n', args)
+      baton = editor.open_directory(*args)
+      self._TransmitPropDeltas(baton, editor, pool)
+      self._PostProcess(pool)
+    else:
+      args = (path, parent, self.status.entry.revision, pool)
+      self.DiagLog('open_file%s\n', args)
+      baton = editor.open_file(*args)
+      self._TransmitPropDeltas(baton, editor, pool)
+      self.file_baton_cb(self.wc_path, baton,
+                         fulltext=False, status=self.status)
+      baton = None
+    return baton
+
+#: We have svn_wc_notify for update_completed, and status_completed, but not
+#: for txdeltas_completed.  TODO(epg): Add this to svn_wc_notify.
+notify_postfix_txdeltas_completed = object()
+
+class PostfixTextDeltaManager(object):
+  def __init__(self, wc):
+    """Initialize from WorkingCopy object."""
+    self.wc = wc
+    self.files = []
+
+  def Add(self, wc_path, baton, fulltext, status):
+    """Save objects for postfix text-delta transmission.
+
+    Arguments:
+    wc_path             -- path relative to working copy top
+    baton               -- file baton from add_file or open_file
+    fulltext            -- whether to send full-text of wc_path or delta
+                           from text-base
+    status              -- svn_wc_status2_t for wc_path
+    """
+    self.files.append((baton, fulltext, wc_path, status))
+
+  def Transmit(self, editor, edit_baton, pool):
+    """Transmit text-deltas for all self.Add()ed paths.
+
+    Arguments:
+    editor              -- svn_delta_editor_t
+    edit_baton          -- baton from editor open
+    pool                -- memory pool
+    """
+    if len(self.files) == 0:
+      return
+    for (baton, fulltext, wc_path, status) in self.files:
+      self.wc.Notify(wc_path, svn.wc.notify_commit_postfix_txdelta, pool=pool)
+      abs_path = self.wc.AbsolutePath(wc_path)
+      adm_access = svn.wc.adm_probe_retrieve(self.wc._adm_access,
+                                             abs_path, pool)
+      (tmp, checksum) = svn.wc.transmit_text_deltas2(abs_path, adm_access,
+                                                     fulltext, editor,
+                                                     baton, pool)
+      self.HandleTmp(tmp)
+      self.PostProcess(wc_path, status, checksum)
+    # TODO(epg): Change self.wc.Notify so that we can use it instead of
+    # poking behind its back; though this is moot if we don't need this
+    # custom sentinel at all...
+    if callable(self.wc._notify_func):
+      self.wc._notify_func(notify_postfix_txdeltas_completed)
+
+  def PostProcess(self, wc_path, status, checksum):
+    """Save wc_path and meta-data for wc bump.
+
+    Subclasses may want to override this to perform some other
+    post-processing instead (see gvn.changebranch.PostfixTextDeltaManager).
+
+    Arguments:
+    wc_path             -- path relative to working copy top
+    status              -- svn_wc_status2_t for wc_path
+    checksum            -- checksum of transmitted text
+    """
+    self.wc.ProcessCommitted(wc_path,
+                         recursive=(status.text_status == svn.wc.status_added
+                                    and status.entry.kind == svn_node_dir
+                                    and status.entry.copyfrom_url is not None),
+                             kind=status.entry.kind,
+                             remove_lock=False, checksum=checksum)
+
+  def HandleTmp(self, tmp):
+    """Do nothing with tmp file.
+
+    On a real commit (i.e. one that should mark the working copy files as
+    committed), we leave this file (.svn/tmp/text-base/rho.svn-base) alone,
+    and it becomes the new text-base.  Subclasses can override this to
+    decide what to do (see gvn.changebranch.PostfixTextDeltaManager).
+    """
+    pass
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/__init__.py
new file mode 100644
index 0000000..f4cf92b8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/__init__.py
@@ -0,0 +1,17 @@
+#
+# __init__.py:  defines this directory as the 'libsvn' package.
+#
+#  Subversion is a tool for revision control.
+#  See http://subversion.tigris.org for more information.
+#
+# ====================================================================
+# Copyright (c) 2000-2004 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+#
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_client.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_client.dll
new file mode 100644
index 0000000..918cbc2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_client.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_core.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_core.dll
new file mode 100644
index 0000000..6bfa521
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_core.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_delta.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_delta.dll
new file mode 100644
index 0000000..61cc0a0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_delta.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_diff.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_diff.dll
new file mode 100644
index 0000000..d865ec2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_diff.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_fs.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_fs.dll
new file mode 100644
index 0000000..e517602
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_fs.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_ra.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_ra.dll
new file mode 100644
index 0000000..14edccb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_ra.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_repos.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_repos.dll
new file mode 100644
index 0000000..99191239
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_repos.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_wc.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_wc.dll
new file mode 100644
index 0000000..06cb2c8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/_wc.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/client.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/client.py
new file mode 100644
index 0000000..264bb1e5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/client.py
@@ -0,0 +1,2657 @@
+# This file was created automatically by SWIG 1.3.27.
+# Don't modify this file, modify the SWIG interface instead.
+import core
+import delta
+import wc
+import ra
+
+import _client
+
+# This file is compatible with both classic and new-style classes.
+def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
+    if (name == "this"):
+        if isinstance(value, class_type):
+            self.__dict__[name] = value.this
+            if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
+            del value.thisown
+            return
+    method = class_type.__swig_setmethods__.get(name,None)
+    if method: return method(self,value)
+    if (not static) or hasattr(self,name) or (name == "thisown"):
+        self.__dict__[name] = value
+    else:
+        raise AttributeError("You cannot add attributes to %s" % self)
+
+def _swig_setattr(self,class_type,name,value):
+    return _swig_setattr_nondynamic(self,class_type,name,value,0)
+
+def _swig_getattr(self,class_type,name):
+    method = class_type.__swig_getmethods__.get(name,None)
+    if method: return method(self)
+    raise AttributeError,name
+
+
+def svn_client_version(*args):
+    """svn_client_version() -> svn_version_t"""
+    return apply(_client.svn_client_version, args)
+
+def svn_client_get_simple_prompt_provider(*args):
+    """
+    svn_client_get_simple_prompt_provider(svn_auth_provider_object_t provider, svn_auth_simple_prompt_func_t prompt_func, 
+        int retry_limit, 
+        apr_pool_t pool)
+    """
+    return apply(_client.svn_client_get_simple_prompt_provider, args)
+
+def svn_client_get_username_prompt_provider(*args):
+    """
+    svn_client_get_username_prompt_provider(svn_auth_provider_object_t provider, svn_auth_username_prompt_func_t prompt_func, 
+        int retry_limit, 
+        apr_pool_t pool)
+    """
+    return apply(_client.svn_client_get_username_prompt_provider, args)
+
+def svn_client_get_simple_provider(*args):
+    """svn_client_get_simple_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_client.svn_client_get_simple_provider, args)
+
+def svn_client_get_windows_simple_provider(*args):
+    """svn_client_get_windows_simple_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_client.svn_client_get_windows_simple_provider, args)
+
+def svn_client_get_username_provider(*args):
+    """svn_client_get_username_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_client.svn_client_get_username_provider, args)
+
+def svn_client_get_ssl_server_trust_file_provider(*args):
+    """svn_client_get_ssl_server_trust_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_client.svn_client_get_ssl_server_trust_file_provider, args)
+
+def svn_client_get_ssl_client_cert_file_provider(*args):
+    """svn_client_get_ssl_client_cert_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_client.svn_client_get_ssl_client_cert_file_provider, args)
+
+def svn_client_get_ssl_client_cert_pw_file_provider(*args):
+    """svn_client_get_ssl_client_cert_pw_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_client.svn_client_get_ssl_client_cert_pw_file_provider, args)
+
+def svn_client_get_ssl_server_trust_prompt_provider(*args):
+    """
+    svn_client_get_ssl_server_trust_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_server_trust_prompt_func_t prompt_func, 
+        apr_pool_t pool)
+    """
+    return apply(_client.svn_client_get_ssl_server_trust_prompt_provider, args)
+
+def svn_client_get_ssl_client_cert_prompt_provider(*args):
+    """
+    svn_client_get_ssl_client_cert_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_client_cert_prompt_func_t prompt_func, 
+        int retry_limit, 
+        apr_pool_t pool)
+    """
+    return apply(_client.svn_client_get_ssl_client_cert_prompt_provider, args)
+
+def svn_client_get_ssl_client_cert_pw_prompt_provider(*args):
+    """
+    svn_client_get_ssl_client_cert_pw_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_client_cert_pw_prompt_func_t prompt_func, 
+        int retry_limit, 
+        apr_pool_t pool)
+    """
+    return apply(_client.svn_client_get_ssl_client_cert_pw_prompt_provider, args)
+
+def svn_client_proplist_item_dup(*args):
+    """svn_client_proplist_item_dup( item, apr_pool_t pool)"""
+    return apply(_client.svn_client_proplist_item_dup, args)
+class svn_client_commit_info_t:
+    """Proxy of C svn_client_commit_info_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_commit_info_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_commit_info_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_commit_info_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["revision"] = _client.svn_client_commit_info_t_revision_set
+    __swig_getmethods__["revision"] = _client.svn_client_commit_info_t_revision_get
+    __swig_setmethods__["date"] = _client.svn_client_commit_info_t_date_set
+    __swig_getmethods__["date"] = _client.svn_client_commit_info_t_date_get
+    __swig_setmethods__["author"] = _client.svn_client_commit_info_t_author_set
+    __swig_getmethods__["author"] = _client.svn_client_commit_info_t_author_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_commit_info_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_client_commit_info_t"""
+        _swig_setattr(self, svn_client_commit_info_t, 'this', apply(_client.new_svn_client_commit_info_t, args))
+        _swig_setattr(self, svn_client_commit_info_t, 'thisown', 1)
+    def __del__(self, destroy=_client.delete_svn_client_commit_info_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_client_commit_info_tPtr(svn_client_commit_info_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_commit_info_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_commit_info_t, 'thisown', 0)
+        self.__class__ = svn_client_commit_info_t
+_client.svn_client_commit_info_t_swigregister(svn_client_commit_info_tPtr)
+
+SVN_CLIENT_COMMIT_ITEM_ADD = _client.SVN_CLIENT_COMMIT_ITEM_ADD
+SVN_CLIENT_COMMIT_ITEM_DELETE = _client.SVN_CLIENT_COMMIT_ITEM_DELETE
+SVN_CLIENT_COMMIT_ITEM_TEXT_MODS = _client.SVN_CLIENT_COMMIT_ITEM_TEXT_MODS
+SVN_CLIENT_COMMIT_ITEM_PROP_MODS = _client.SVN_CLIENT_COMMIT_ITEM_PROP_MODS
+SVN_CLIENT_COMMIT_ITEM_IS_COPY = _client.SVN_CLIENT_COMMIT_ITEM_IS_COPY
+SVN_CLIENT_COMMIT_ITEM_LOCK_TOKEN = _client.SVN_CLIENT_COMMIT_ITEM_LOCK_TOKEN
+class svn_client_commit_item3_t:
+    """Proxy of C svn_client_commit_item3_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_commit_item3_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_commit_item3_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_commit_item3_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["path"] = _client.svn_client_commit_item3_t_path_set
+    __swig_getmethods__["path"] = _client.svn_client_commit_item3_t_path_get
+    __swig_setmethods__["kind"] = _client.svn_client_commit_item3_t_kind_set
+    __swig_getmethods__["kind"] = _client.svn_client_commit_item3_t_kind_get
+    __swig_setmethods__["url"] = _client.svn_client_commit_item3_t_url_set
+    __swig_getmethods__["url"] = _client.svn_client_commit_item3_t_url_get
+    __swig_setmethods__["revision"] = _client.svn_client_commit_item3_t_revision_set
+    __swig_getmethods__["revision"] = _client.svn_client_commit_item3_t_revision_get
+    __swig_setmethods__["copyfrom_url"] = _client.svn_client_commit_item3_t_copyfrom_url_set
+    __swig_getmethods__["copyfrom_url"] = _client.svn_client_commit_item3_t_copyfrom_url_get
+    __swig_setmethods__["copyfrom_rev"] = _client.svn_client_commit_item3_t_copyfrom_rev_set
+    __swig_getmethods__["copyfrom_rev"] = _client.svn_client_commit_item3_t_copyfrom_rev_get
+    __swig_setmethods__["state_flags"] = _client.svn_client_commit_item3_t_state_flags_set
+    __swig_getmethods__["state_flags"] = _client.svn_client_commit_item3_t_state_flags_get
+    __swig_setmethods__["incoming_prop_changes"] = _client.svn_client_commit_item3_t_incoming_prop_changes_set
+    __swig_getmethods__["incoming_prop_changes"] = _client.svn_client_commit_item3_t_incoming_prop_changes_get
+    __swig_setmethods__["outgoing_prop_changes"] = _client.svn_client_commit_item3_t_outgoing_prop_changes_set
+    __swig_getmethods__["outgoing_prop_changes"] = _client.svn_client_commit_item3_t_outgoing_prop_changes_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_commit_item3_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_client_commit_item3_t"""
+        _swig_setattr(self, svn_client_commit_item3_t, 'this', apply(_client.new_svn_client_commit_item3_t, args))
+        _swig_setattr(self, svn_client_commit_item3_t, 'thisown', 1)
+    def __del__(self, destroy=_client.delete_svn_client_commit_item3_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_client_commit_item3_tPtr(svn_client_commit_item3_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_commit_item3_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_commit_item3_t, 'thisown', 0)
+        self.__class__ = svn_client_commit_item3_t
+_client.svn_client_commit_item3_t_swigregister(svn_client_commit_item3_tPtr)
+
+class svn_client_commit_item2_t:
+    """Proxy of C svn_client_commit_item2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_commit_item2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_commit_item2_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_commit_item2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["path"] = _client.svn_client_commit_item2_t_path_set
+    __swig_getmethods__["path"] = _client.svn_client_commit_item2_t_path_get
+    __swig_setmethods__["kind"] = _client.svn_client_commit_item2_t_kind_set
+    __swig_getmethods__["kind"] = _client.svn_client_commit_item2_t_kind_get
+    __swig_setmethods__["url"] = _client.svn_client_commit_item2_t_url_set
+    __swig_getmethods__["url"] = _client.svn_client_commit_item2_t_url_get
+    __swig_setmethods__["revision"] = _client.svn_client_commit_item2_t_revision_set
+    __swig_getmethods__["revision"] = _client.svn_client_commit_item2_t_revision_get
+    __swig_setmethods__["copyfrom_url"] = _client.svn_client_commit_item2_t_copyfrom_url_set
+    __swig_getmethods__["copyfrom_url"] = _client.svn_client_commit_item2_t_copyfrom_url_get
+    __swig_setmethods__["copyfrom_rev"] = _client.svn_client_commit_item2_t_copyfrom_rev_set
+    __swig_getmethods__["copyfrom_rev"] = _client.svn_client_commit_item2_t_copyfrom_rev_get
+    __swig_setmethods__["state_flags"] = _client.svn_client_commit_item2_t_state_flags_set
+    __swig_getmethods__["state_flags"] = _client.svn_client_commit_item2_t_state_flags_get
+    __swig_setmethods__["wcprop_changes"] = _client.svn_client_commit_item2_t_wcprop_changes_set
+    __swig_getmethods__["wcprop_changes"] = _client.svn_client_commit_item2_t_wcprop_changes_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_commit_item2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_client_commit_item2_t"""
+        _swig_setattr(self, svn_client_commit_item2_t, 'this', apply(_client.new_svn_client_commit_item2_t, args))
+        _swig_setattr(self, svn_client_commit_item2_t, 'thisown', 1)
+    def __del__(self, destroy=_client.delete_svn_client_commit_item2_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_client_commit_item2_tPtr(svn_client_commit_item2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_commit_item2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_commit_item2_t, 'thisown', 0)
+        self.__class__ = svn_client_commit_item2_t
+_client.svn_client_commit_item2_t_swigregister(svn_client_commit_item2_tPtr)
+
+class svn_client_commit_item_t:
+    """Proxy of C svn_client_commit_item_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_commit_item_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_commit_item_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_commit_item_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["path"] = _client.svn_client_commit_item_t_path_set
+    __swig_getmethods__["path"] = _client.svn_client_commit_item_t_path_get
+    __swig_setmethods__["kind"] = _client.svn_client_commit_item_t_kind_set
+    __swig_getmethods__["kind"] = _client.svn_client_commit_item_t_kind_get
+    __swig_setmethods__["url"] = _client.svn_client_commit_item_t_url_set
+    __swig_getmethods__["url"] = _client.svn_client_commit_item_t_url_get
+    __swig_setmethods__["revision"] = _client.svn_client_commit_item_t_revision_set
+    __swig_getmethods__["revision"] = _client.svn_client_commit_item_t_revision_get
+    __swig_setmethods__["copyfrom_url"] = _client.svn_client_commit_item_t_copyfrom_url_set
+    __swig_getmethods__["copyfrom_url"] = _client.svn_client_commit_item_t_copyfrom_url_get
+    __swig_setmethods__["state_flags"] = _client.svn_client_commit_item_t_state_flags_set
+    __swig_getmethods__["state_flags"] = _client.svn_client_commit_item_t_state_flags_get
+    __swig_setmethods__["wcprop_changes"] = _client.svn_client_commit_item_t_wcprop_changes_set
+    __swig_getmethods__["wcprop_changes"] = _client.svn_client_commit_item_t_wcprop_changes_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_commit_item_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_client_commit_item_t"""
+        _swig_setattr(self, svn_client_commit_item_t, 'this', apply(_client.new_svn_client_commit_item_t, args))
+        _swig_setattr(self, svn_client_commit_item_t, 'thisown', 1)
+    def __del__(self, destroy=_client.delete_svn_client_commit_item_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_client_commit_item_tPtr(svn_client_commit_item_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_commit_item_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_commit_item_t, 'thisown', 0)
+        self.__class__ = svn_client_commit_item_t
+_client.svn_client_commit_item_t_swigregister(svn_client_commit_item_tPtr)
+
+
+def svn_client_commit_item_create(*args):
+    """svn_client_commit_item_create(svn_client_commit_item3_t item, apr_pool_t pool) -> svn_error_t"""
+    return apply(_client.svn_client_commit_item_create, args)
+
+def svn_client_commit_item3_dup(*args):
+    """svn_client_commit_item3_dup(svn_client_commit_item3_t item, apr_pool_t pool) -> svn_client_commit_item3_t"""
+    return apply(_client.svn_client_commit_item3_dup, args)
+
+def svn_client_commit_item2_dup(*args):
+    """svn_client_commit_item2_dup(svn_client_commit_item2_t item, apr_pool_t pool) -> svn_client_commit_item2_t"""
+    return apply(_client.svn_client_commit_item2_dup, args)
+svn_client_diff_summarize_kind_normal = _client.svn_client_diff_summarize_kind_normal
+svn_client_diff_summarize_kind_added = _client.svn_client_diff_summarize_kind_added
+svn_client_diff_summarize_kind_modified = _client.svn_client_diff_summarize_kind_modified
+svn_client_diff_summarize_kind_deleted = _client.svn_client_diff_summarize_kind_deleted
+class svn_client_diff_summarize_t:
+    """Proxy of C svn_client_diff_summarize_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_diff_summarize_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_diff_summarize_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_diff_summarize_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["path"] = _client.svn_client_diff_summarize_t_path_set
+    __swig_getmethods__["path"] = _client.svn_client_diff_summarize_t_path_get
+    __swig_setmethods__["summarize_kind"] = _client.svn_client_diff_summarize_t_summarize_kind_set
+    __swig_getmethods__["summarize_kind"] = _client.svn_client_diff_summarize_t_summarize_kind_get
+    __swig_setmethods__["prop_changed"] = _client.svn_client_diff_summarize_t_prop_changed_set
+    __swig_getmethods__["prop_changed"] = _client.svn_client_diff_summarize_t_prop_changed_get
+    __swig_setmethods__["node_kind"] = _client.svn_client_diff_summarize_t_node_kind_set
+    __swig_getmethods__["node_kind"] = _client.svn_client_diff_summarize_t_node_kind_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_diff_summarize_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_client_diff_summarize_t"""
+        _swig_setattr(self, svn_client_diff_summarize_t, 'this', apply(_client.new_svn_client_diff_summarize_t, args))
+        _swig_setattr(self, svn_client_diff_summarize_t, 'thisown', 1)
+    def __del__(self, destroy=_client.delete_svn_client_diff_summarize_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_client_diff_summarize_tPtr(svn_client_diff_summarize_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_diff_summarize_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_diff_summarize_t, 'thisown', 0)
+        self.__class__ = svn_client_diff_summarize_t
+_client.svn_client_diff_summarize_t_swigregister(svn_client_diff_summarize_tPtr)
+
+
+def svn_client_diff_summarize_dup(*args):
+    """svn_client_diff_summarize_dup(svn_client_diff_summarize_t diff, apr_pool_t pool) -> svn_client_diff_summarize_t"""
+    return apply(_client.svn_client_diff_summarize_dup, args)
+class svn_client_ctx_t:
+    """Proxy of C svn_client_ctx_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_ctx_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_ctx_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_ctx_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["auth_baton"] = _client.svn_client_ctx_t_auth_baton_set
+    __swig_getmethods__["auth_baton"] = _client.svn_client_ctx_t_auth_baton_get
+    __swig_setmethods__["notify_func"] = _client.svn_client_ctx_t_notify_func_set
+    __swig_getmethods__["notify_func"] = _client.svn_client_ctx_t_notify_func_get
+    __swig_setmethods__["notify_baton"] = _client.svn_client_ctx_t_notify_baton_set
+    __swig_getmethods__["notify_baton"] = _client.svn_client_ctx_t_notify_baton_get
+    __swig_setmethods__["log_msg_func"] = _client.svn_client_ctx_t_log_msg_func_set
+    __swig_getmethods__["log_msg_func"] = _client.svn_client_ctx_t_log_msg_func_get
+    __swig_setmethods__["log_msg_baton"] = _client.svn_client_ctx_t_log_msg_baton_set
+    __swig_getmethods__["log_msg_baton"] = _client.svn_client_ctx_t_log_msg_baton_get
+    __swig_setmethods__["config"] = _client.svn_client_ctx_t_config_set
+    __swig_getmethods__["config"] = _client.svn_client_ctx_t_config_get
+    __swig_setmethods__["cancel_func"] = _client.svn_client_ctx_t_cancel_func_set
+    __swig_getmethods__["cancel_func"] = _client.svn_client_ctx_t_cancel_func_get
+    __swig_setmethods__["cancel_baton"] = _client.svn_client_ctx_t_cancel_baton_set
+    __swig_getmethods__["cancel_baton"] = _client.svn_client_ctx_t_cancel_baton_get
+    __swig_setmethods__["notify_func2"] = _client.svn_client_ctx_t_notify_func2_set
+    __swig_getmethods__["notify_func2"] = _client.svn_client_ctx_t_notify_func2_get
+    __swig_setmethods__["notify_baton2"] = _client.svn_client_ctx_t_notify_baton2_set
+    __swig_getmethods__["notify_baton2"] = _client.svn_client_ctx_t_notify_baton2_get
+    __swig_setmethods__["log_msg_func2"] = _client.svn_client_ctx_t_log_msg_func2_set
+    __swig_getmethods__["log_msg_func2"] = _client.svn_client_ctx_t_log_msg_func2_get
+    __swig_setmethods__["log_msg_baton2"] = _client.svn_client_ctx_t_log_msg_baton2_set
+    __swig_getmethods__["log_msg_baton2"] = _client.svn_client_ctx_t_log_msg_baton2_get
+    __swig_setmethods__["progress_func"] = _client.svn_client_ctx_t_progress_func_set
+    __swig_getmethods__["progress_func"] = _client.svn_client_ctx_t_progress_func_get
+    __swig_setmethods__["progress_baton"] = _client.svn_client_ctx_t_progress_baton_set
+    __swig_getmethods__["progress_baton"] = _client.svn_client_ctx_t_progress_baton_get
+    __swig_setmethods__["log_msg_func3"] = _client.svn_client_ctx_t_log_msg_func3_set
+    __swig_getmethods__["log_msg_func3"] = _client.svn_client_ctx_t_log_msg_func3_get
+    __swig_setmethods__["log_msg_baton3"] = _client.svn_client_ctx_t_log_msg_baton3_set
+    __swig_getmethods__["log_msg_baton3"] = _client.svn_client_ctx_t_log_msg_baton3_get
+    __swig_setmethods__["mimetypes_map"] = _client.svn_client_ctx_t_mimetypes_map_set
+    __swig_getmethods__["mimetypes_map"] = _client.svn_client_ctx_t_mimetypes_map_get
+    __swig_setmethods__["conflict_func"] = _client.svn_client_ctx_t_conflict_func_set
+    __swig_getmethods__["conflict_func"] = _client.svn_client_ctx_t_conflict_func_get
+    __swig_setmethods__["conflict_baton"] = _client.svn_client_ctx_t_conflict_baton_set
+    __swig_getmethods__["conflict_baton"] = _client.svn_client_ctx_t_conflict_baton_get
+    __swig_setmethods__["client_name"] = _client.svn_client_ctx_t_client_name_set
+    __swig_getmethods__["client_name"] = _client.svn_client_ctx_t_client_name_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_ctx_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_client_ctx_t"""
+        _swig_setattr(self, svn_client_ctx_t, 'this', apply(_client.new_svn_client_ctx_t, args))
+        _swig_setattr(self, svn_client_ctx_t, 'thisown', 1)
+    def __del__(self, destroy=_client.delete_svn_client_ctx_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_client_ctx_tPtr(svn_client_ctx_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_ctx_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_ctx_t, 'thisown', 0)
+        self.__class__ = svn_client_ctx_t
+_client.svn_client_ctx_t_swigregister(svn_client_ctx_tPtr)
+
+SVN_CLIENT_AUTH_USERNAME = _client.SVN_CLIENT_AUTH_USERNAME
+SVN_CLIENT_AUTH_PASSWORD = _client.SVN_CLIENT_AUTH_PASSWORD
+
+def svn_client_create_context(*args):
+    """svn_client_create_context(svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t"""
+    return apply(_client.svn_client_create_context, args)
+
+def svn_client_checkout3(*args):
+    """
+    svn_client_checkout3(svn_revnum_t result_rev, char URL, char path, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_depth_t depth, svn_boolean_t ignore_externals, 
+        svn_boolean_t allow_unver_obstructions, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_checkout3, args)
+
+def svn_client_checkout2(*args):
+    """
+    svn_client_checkout2(svn_revnum_t result_rev, char URL, char path, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_boolean_t recurse, svn_boolean_t ignore_externals, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_checkout2, args)
+
+def svn_client_checkout(*args):
+    """
+    svn_client_checkout(svn_revnum_t result_rev, char URL, char path, svn_opt_revision_t revision, 
+        svn_boolean_t recurse, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_checkout, args)
+
+def svn_client_update3(*args):
+    """
+    svn_client_update3(apr_array_header_t result_revs, apr_array_header_t paths, 
+        svn_opt_revision_t revision, svn_depth_t depth, 
+        svn_boolean_t depth_is_sticky, svn_boolean_t ignore_externals, 
+        svn_boolean_t allow_unver_obstructions, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_update3, args)
+
+def svn_client_update2(*args):
+    """
+    svn_client_update2(apr_array_header_t result_revs, apr_array_header_t paths, 
+        svn_opt_revision_t revision, svn_boolean_t recurse, 
+        svn_boolean_t ignore_externals, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_update2, args)
+
+def svn_client_update(*args):
+    """
+    svn_client_update(svn_revnum_t result_rev, char path, svn_opt_revision_t revision, 
+        svn_boolean_t recurse, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_update, args)
+
+def svn_client_switch2(*args):
+    """
+    svn_client_switch2(svn_revnum_t result_rev, char path, char url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_depth_t depth, svn_boolean_t depth_is_sticky, 
+        svn_boolean_t ignore_externals, 
+        svn_boolean_t allow_unver_obstructions, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_switch2, args)
+
+def svn_client_switch(*args):
+    """
+    svn_client_switch(svn_revnum_t result_rev, char path, char url, svn_opt_revision_t revision, 
+        svn_boolean_t recurse, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_switch, args)
+
+def svn_client_add4(*args):
+    """
+    svn_client_add4(char path, svn_depth_t depth, svn_boolean_t force, 
+        svn_boolean_t no_ignore, svn_boolean_t add_parents, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_add4, args)
+
+def svn_client_add3(*args):
+    """
+    svn_client_add3(char path, svn_boolean_t recursive, svn_boolean_t force, 
+        svn_boolean_t no_ignore, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_add3, args)
+
+def svn_client_add2(*args):
+    """
+    svn_client_add2(char path, svn_boolean_t recursive, svn_boolean_t force, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_add2, args)
+
+def svn_client_add(*args):
+    """
+    svn_client_add(char path, svn_boolean_t recursive, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_add, args)
+
+def svn_client_mkdir3(*args):
+    """
+    svn_client_mkdir3(svn_commit_info_t commit_info_p, apr_array_header_t paths, 
+        svn_boolean_t make_parents, apr_hash_t revprop_table, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_mkdir3, args)
+
+def svn_client_mkdir2(*args):
+    """
+    svn_client_mkdir2(svn_commit_info_t commit_info_p, apr_array_header_t paths, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_mkdir2, args)
+
+def svn_client_mkdir(*args):
+    """
+    svn_client_mkdir(svn_client_commit_info_t commit_info_p, apr_array_header_t paths, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_mkdir, args)
+
+def svn_client_delete3(*args):
+    """
+    svn_client_delete3(svn_commit_info_t commit_info_p, apr_array_header_t paths, 
+        svn_boolean_t force, svn_boolean_t keep_local, 
+        apr_hash_t revprop_table, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_delete3, args)
+
+def svn_client_delete2(*args):
+    """
+    svn_client_delete2(svn_commit_info_t commit_info_p, apr_array_header_t paths, 
+        svn_boolean_t force, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_delete2, args)
+
+def svn_client_delete(*args):
+    """
+    svn_client_delete(svn_client_commit_info_t commit_info_p, apr_array_header_t paths, 
+        svn_boolean_t force, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_delete, args)
+
+def svn_client_import3(*args):
+    """
+    svn_client_import3(svn_commit_info_t commit_info_p, char path, char url, 
+        svn_depth_t depth, svn_boolean_t no_ignore, 
+        svn_boolean_t ignore_unknown_node_types, apr_hash_t revprop_table, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_import3, args)
+
+def svn_client_import2(*args):
+    """
+    svn_client_import2(svn_commit_info_t commit_info_p, char path, char url, 
+        svn_boolean_t nonrecursive, svn_boolean_t no_ignore, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_import2, args)
+
+def svn_client_import(*args):
+    """
+    svn_client_import(svn_client_commit_info_t commit_info_p, char path, 
+        char url, svn_boolean_t nonrecursive, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_import, args)
+
+def svn_client_commit4(*args):
+    """
+    svn_client_commit4(svn_commit_info_t commit_info_p, apr_array_header_t targets, 
+        svn_depth_t depth, svn_boolean_t keep_locks, 
+        svn_boolean_t keep_changelists, apr_array_header_t changelists, 
+        apr_hash_t revprop_table, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_commit4, args)
+
+def svn_client_commit3(*args):
+    """
+    svn_client_commit3(svn_commit_info_t commit_info_p, apr_array_header_t targets, 
+        svn_boolean_t recurse, svn_boolean_t keep_locks, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_commit3, args)
+
+def svn_client_commit2(*args):
+    """
+    svn_client_commit2(svn_client_commit_info_t commit_info_p, apr_array_header_t targets, 
+        svn_boolean_t recurse, svn_boolean_t keep_locks, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_commit2, args)
+
+def svn_client_commit(*args):
+    """
+    svn_client_commit(svn_client_commit_info_t commit_info_p, apr_array_header_t targets, 
+        svn_boolean_t nonrecursive, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_commit, args)
+
+def svn_client_status3(*args):
+    """
+    svn_client_status3(svn_revnum_t result_rev, char path, svn_opt_revision_t revision, 
+        svn_wc_status_func2_t status_func, 
+        svn_depth_t depth, svn_boolean_t get_all, 
+        svn_boolean_t update, svn_boolean_t no_ignore, 
+        svn_boolean_t ignore_externals, apr_array_header_t changelists, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_status3, args)
+
+def svn_client_status2(*args):
+    """
+    svn_client_status2(svn_revnum_t result_rev, char path, svn_opt_revision_t revision, 
+        svn_wc_status_func2_t status_func, 
+        svn_boolean_t recurse, svn_boolean_t get_all, 
+        svn_boolean_t update, svn_boolean_t no_ignore, 
+        svn_boolean_t ignore_externals, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_status2, args)
+
+def svn_client_status(*args):
+    """
+    svn_client_status(svn_revnum_t result_rev, char path, svn_opt_revision_t revision, 
+        svn_wc_status_func_t status_func, 
+        svn_boolean_t recurse, svn_boolean_t get_all, 
+        svn_boolean_t update, svn_boolean_t no_ignore, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_status, args)
+
+def svn_client_log4(*args):
+    """
+    svn_client_log4(apr_array_header_t targets, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start, svn_opt_revision_t end, 
+        int limit, svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, 
+        svn_boolean_t include_merged_revisions, 
+        apr_array_header_t revprops, svn_log_entry_receiver_t receiver, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_log4, args)
+
+def svn_client_log3(*args):
+    """
+    svn_client_log3(apr_array_header_t targets, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start, svn_opt_revision_t end, 
+        int limit, svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, 
+        svn_log_message_receiver_t receiver, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_log3, args)
+
+def svn_client_log2(*args):
+    """
+    svn_client_log2(apr_array_header_t targets, svn_opt_revision_t start, 
+        svn_opt_revision_t end, int limit, svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, 
+        svn_log_message_receiver_t receiver, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_log2, args)
+
+def svn_client_log(*args):
+    """
+    svn_client_log(apr_array_header_t targets, svn_opt_revision_t start, 
+        svn_opt_revision_t end, svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, 
+        svn_log_message_receiver_t receiver, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_log, args)
+
+def svn_client_blame4(*args):
+    """
+    svn_client_blame4(char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start, svn_opt_revision_t end, 
+        svn_diff_file_options_t diff_options, svn_boolean_t ignore_mime_type, 
+        svn_boolean_t include_merged_revisions, 
+        svn_client_blame_receiver2_t receiver, 
+        void receiver_baton, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_blame4, args)
+
+def svn_client_blame3(*args):
+    """
+    svn_client_blame3(char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start, svn_opt_revision_t end, 
+        svn_diff_file_options_t diff_options, svn_boolean_t ignore_mime_type, 
+        svn_client_blame_receiver_t receiver, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_blame3, args)
+
+def svn_client_blame2(*args):
+    """
+    svn_client_blame2(char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start, svn_opt_revision_t end, 
+        svn_client_blame_receiver_t receiver, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_blame2, args)
+
+def svn_client_blame(*args):
+    """
+    svn_client_blame(char path_or_url, svn_opt_revision_t start, svn_opt_revision_t end, 
+        svn_client_blame_receiver_t receiver, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_blame, args)
+
+def svn_client_diff4(*args):
+    """
+    svn_client_diff4(apr_array_header_t diff_options, char path1, svn_opt_revision_t revision1, 
+        char path2, svn_opt_revision_t revision2, 
+        char relative_to_dir, svn_depth_t depth, 
+        svn_boolean_t ignore_ancestry, 
+        svn_boolean_t no_diff_deleted, svn_boolean_t ignore_content_type, 
+        char header_encoding, 
+        apr_file_t outfile, apr_file_t errfile, apr_array_header_t changelists, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff4, args)
+
+def svn_client_diff3(*args):
+    """
+    svn_client_diff3(apr_array_header_t diff_options, char path1, svn_opt_revision_t revision1, 
+        char path2, svn_opt_revision_t revision2, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t no_diff_deleted, 
+        svn_boolean_t ignore_content_type, 
+        char header_encoding, apr_file_t outfile, 
+        apr_file_t errfile, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff3, args)
+
+def svn_client_diff2(*args):
+    """
+    svn_client_diff2(apr_array_header_t diff_options, char path1, svn_opt_revision_t revision1, 
+        char path2, svn_opt_revision_t revision2, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t no_diff_deleted, 
+        svn_boolean_t ignore_content_type, 
+        apr_file_t outfile, apr_file_t errfile, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff2, args)
+
+def svn_client_diff(*args):
+    """
+    svn_client_diff(apr_array_header_t diff_options, char path1, svn_opt_revision_t revision1, 
+        char path2, svn_opt_revision_t revision2, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t no_diff_deleted, 
+        apr_file_t outfile, apr_file_t errfile, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff, args)
+
+def svn_client_diff_peg4(*args):
+    """
+    svn_client_diff_peg4(apr_array_header_t diff_options, char path, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start_revision, 
+        svn_opt_revision_t end_revision, 
+        char relative_to_dir, svn_depth_t depth, 
+        svn_boolean_t ignore_ancestry, svn_boolean_t no_diff_deleted, 
+        svn_boolean_t ignore_content_type, 
+        char header_encoding, apr_file_t outfile, 
+        apr_file_t errfile, apr_array_header_t changelists, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff_peg4, args)
+
+def svn_client_diff_peg3(*args):
+    """
+    svn_client_diff_peg3(apr_array_header_t diff_options, char path, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start_revision, 
+        svn_opt_revision_t end_revision, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t no_diff_deleted, svn_boolean_t ignore_content_type, 
+        char header_encoding, 
+        apr_file_t outfile, apr_file_t errfile, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff_peg3, args)
+
+def svn_client_diff_peg2(*args):
+    """
+    svn_client_diff_peg2(apr_array_header_t diff_options, char path, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start_revision, 
+        svn_opt_revision_t end_revision, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t no_diff_deleted, svn_boolean_t ignore_content_type, 
+        apr_file_t outfile, 
+        apr_file_t errfile, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff_peg2, args)
+
+def svn_client_diff_peg(*args):
+    """
+    svn_client_diff_peg(apr_array_header_t diff_options, char path, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t start_revision, 
+        svn_opt_revision_t end_revision, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t no_diff_deleted, apr_file_t outfile, 
+        apr_file_t errfile, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff_peg, args)
+
+def svn_client_diff_summarize2(*args):
+    """
+    svn_client_diff_summarize2(char path1, svn_opt_revision_t revision1, char path2, 
+        svn_opt_revision_t revision2, svn_depth_t depth, 
+        svn_boolean_t ignore_ancestry, apr_array_header_t changelists, 
+        svn_client_diff_summarize_func_t summarize_func, 
+        void summarize_baton, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff_summarize2, args)
+
+def svn_client_diff_summarize(*args):
+    """
+    svn_client_diff_summarize(char path1, svn_opt_revision_t revision1, char path2, 
+        svn_opt_revision_t revision2, svn_boolean_t recurse, 
+        svn_boolean_t ignore_ancestry, svn_client_diff_summarize_func_t summarize_func, 
+        void summarize_baton, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff_summarize, args)
+
+def svn_client_diff_summarize_peg2(*args):
+    """
+    svn_client_diff_summarize_peg2(char path, svn_opt_revision_t peg_revision, svn_opt_revision_t start_revision, 
+        svn_opt_revision_t end_revision, 
+        svn_depth_t depth, svn_boolean_t ignore_ancestry, 
+        apr_array_header_t changelists, 
+        svn_client_diff_summarize_func_t summarize_func, 
+        void summarize_baton, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff_summarize_peg2, args)
+
+def svn_client_diff_summarize_peg(*args):
+    """
+    svn_client_diff_summarize_peg(char path, svn_opt_revision_t peg_revision, svn_opt_revision_t start_revision, 
+        svn_opt_revision_t end_revision, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_client_diff_summarize_func_t summarize_func, 
+        void summarize_baton, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_diff_summarize_peg, args)
+
+def svn_client_merge3(*args):
+    """
+    svn_client_merge3(char source1, svn_opt_revision_t revision1, char source2, 
+        svn_opt_revision_t revision2, char target_wcpath, 
+        svn_depth_t depth, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t force, svn_boolean_t record_only, 
+        svn_boolean_t dry_run, apr_array_header_t merge_options, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_merge3, args)
+
+def svn_client_merge2(*args):
+    """
+    svn_client_merge2(char source1, svn_opt_revision_t revision1, char source2, 
+        svn_opt_revision_t revision2, char target_wcpath, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t force, svn_boolean_t dry_run, 
+        apr_array_header_t merge_options, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_merge2, args)
+
+def svn_client_merge(*args):
+    """
+    svn_client_merge(char source1, svn_opt_revision_t revision1, char source2, 
+        svn_opt_revision_t revision2, char target_wcpath, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t force, svn_boolean_t dry_run, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_merge, args)
+
+def svn_client_merge_reintegrate(*args):
+    """
+    svn_client_merge_reintegrate(char source, svn_opt_revision_t peg_revision, char target_wcpath, 
+        svn_boolean_t dry_run, apr_array_header_t merge_options, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_merge_reintegrate, args)
+
+def svn_client_merge_peg3(*args):
+    """
+    svn_client_merge_peg3(char source, apr_array_header_t ranges_to_merge, svn_opt_revision_t peg_revision, 
+        char target_wcpath, 
+        svn_depth_t depth, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t force, svn_boolean_t record_only, 
+        svn_boolean_t dry_run, apr_array_header_t merge_options, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_merge_peg3, args)
+
+def svn_client_merge_peg2(*args):
+    """
+    svn_client_merge_peg2(char source, svn_opt_revision_t revision1, svn_opt_revision_t revision2, 
+        svn_opt_revision_t peg_revision, 
+        char target_wcpath, svn_boolean_t recurse, 
+        svn_boolean_t ignore_ancestry, svn_boolean_t force, 
+        svn_boolean_t dry_run, apr_array_header_t merge_options, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_merge_peg2, args)
+
+def svn_client_merge_peg(*args):
+    """
+    svn_client_merge_peg(char source, svn_opt_revision_t revision1, svn_opt_revision_t revision2, 
+        svn_opt_revision_t peg_revision, 
+        char target_wcpath, svn_boolean_t recurse, 
+        svn_boolean_t ignore_ancestry, svn_boolean_t force, 
+        svn_boolean_t dry_run, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_merge_peg, args)
+
+def svn_client_suggest_merge_sources(*args):
+    """
+    svn_client_suggest_merge_sources(apr_array_header_t suggestions, char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_suggest_merge_sources, args)
+
+def svn_client_mergeinfo_get_merged(*args):
+    """
+    svn_client_mergeinfo_get_merged(apr_hash_t mergeinfo, char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_mergeinfo_get_merged, args)
+
+def svn_client_mergeinfo_log_merged(*args):
+    """
+    svn_client_mergeinfo_log_merged(char path_or_url, svn_opt_revision_t peg_revision, 
+        char merge_source_url, svn_opt_revision_t src_peg_revision, 
+        svn_log_entry_receiver_t receiver, 
+        svn_boolean_t discover_changed_paths, apr_array_header_t revprops, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_mergeinfo_log_merged, args)
+
+def svn_client_mergeinfo_log_eligible(*args):
+    """
+    svn_client_mergeinfo_log_eligible(char path_or_url, svn_opt_revision_t peg_revision, 
+        char merge_source_url, svn_opt_revision_t src_peg_revision, 
+        svn_log_entry_receiver_t receiver, 
+        svn_boolean_t discover_changed_paths, apr_array_header_t revprops, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_mergeinfo_log_eligible, args)
+
+def svn_client_cleanup(*args):
+    """svn_client_cleanup(char dir, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t"""
+    return apply(_client.svn_client_cleanup, args)
+
+def svn_client_relocate(*args):
+    """
+    svn_client_relocate(char dir, char from, char to, svn_boolean_t recurse, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_relocate, args)
+
+def svn_client_revert2(*args):
+    """
+    svn_client_revert2(apr_array_header_t paths, svn_depth_t depth, apr_array_header_t changelists, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_revert2, args)
+
+def svn_client_revert(*args):
+    """
+    svn_client_revert(apr_array_header_t paths, svn_boolean_t recursive, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_revert, args)
+
+def svn_client_resolved(*args):
+    """
+    svn_client_resolved(char path, svn_boolean_t recursive, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_resolved, args)
+
+def svn_client_resolve(*args):
+    """
+    svn_client_resolve(char path, svn_depth_t depth, svn_wc_conflict_choice_t conflict_choice, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_resolve, args)
+class svn_client_copy_source_t:
+    """Proxy of C svn_client_copy_source_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_copy_source_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_copy_source_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_copy_source_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["path"] = _client.svn_client_copy_source_t_path_set
+    __swig_getmethods__["path"] = _client.svn_client_copy_source_t_path_get
+    __swig_setmethods__["revision"] = _client.svn_client_copy_source_t_revision_set
+    __swig_getmethods__["revision"] = _client.svn_client_copy_source_t_revision_get
+    __swig_setmethods__["peg_revision"] = _client.svn_client_copy_source_t_peg_revision_set
+    __swig_getmethods__["peg_revision"] = _client.svn_client_copy_source_t_peg_revision_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_copy_source_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_client_copy_source_t"""
+        _swig_setattr(self, svn_client_copy_source_t, 'this', apply(_client.new_svn_client_copy_source_t, args))
+        _swig_setattr(self, svn_client_copy_source_t, 'thisown', 1)
+    def __del__(self, destroy=_client.delete_svn_client_copy_source_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_client_copy_source_tPtr(svn_client_copy_source_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_copy_source_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_copy_source_t, 'thisown', 0)
+        self.__class__ = svn_client_copy_source_t
+_client.svn_client_copy_source_t_swigregister(svn_client_copy_source_tPtr)
+
+
+def svn_client_copy4(*args):
+    """
+    svn_client_copy4(svn_commit_info_t commit_info_p, apr_array_header_t sources, 
+        char dst_path, svn_boolean_t copy_as_child, 
+        svn_boolean_t make_parents, apr_hash_t revprop_table, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_copy4, args)
+
+def svn_client_copy3(*args):
+    """
+    svn_client_copy3(svn_commit_info_t commit_info_p, char src_path, svn_opt_revision_t src_revision, 
+        char dst_path, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_copy3, args)
+
+def svn_client_copy2(*args):
+    """
+    svn_client_copy2(svn_commit_info_t commit_info_p, char src_path, svn_opt_revision_t src_revision, 
+        char dst_path, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_copy2, args)
+
+def svn_client_copy(*args):
+    """
+    svn_client_copy(svn_client_commit_info_t commit_info_p, char src_path, 
+        svn_opt_revision_t src_revision, char dst_path, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_copy, args)
+
+def svn_client_move5(*args):
+    """
+    svn_client_move5(svn_commit_info_t commit_info_p, apr_array_header_t src_paths, 
+        char dst_path, svn_boolean_t force, 
+        svn_boolean_t move_as_child, svn_boolean_t make_parents, 
+        apr_hash_t revprop_table, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_move5, args)
+
+def svn_client_move4(*args):
+    """
+    svn_client_move4(svn_commit_info_t commit_info_p, char src_path, char dst_path, 
+        svn_boolean_t force, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_move4, args)
+
+def svn_client_move3(*args):
+    """
+    svn_client_move3(svn_commit_info_t commit_info_p, char src_path, char dst_path, 
+        svn_boolean_t force, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_move3, args)
+
+def svn_client_move2(*args):
+    """
+    svn_client_move2(svn_client_commit_info_t commit_info_p, char src_path, 
+        char dst_path, svn_boolean_t force, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_move2, args)
+
+def svn_client_move(*args):
+    """
+    svn_client_move(svn_client_commit_info_t commit_info_p, char src_path, 
+        svn_opt_revision_t src_revision, char dst_path, 
+        svn_boolean_t force, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_move, args)
+
+def svn_client_propset3(*args):
+    """
+    svn_client_propset3(svn_commit_info_t commit_info_p, char propname, svn_string_t propval, 
+        char target, svn_depth_t depth, 
+        svn_boolean_t skip_checks, svn_revnum_t base_revision_for_url, 
+        apr_array_header_t changelists, 
+        apr_hash_t revprop_table, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_propset3, args)
+
+def svn_client_propset2(*args):
+    """
+    svn_client_propset2(char propname, svn_string_t propval, char target, svn_boolean_t recurse, 
+        svn_boolean_t skip_checks, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_propset2, args)
+
+def svn_client_propset(*args):
+    """
+    svn_client_propset(char propname, svn_string_t propval, char target, svn_boolean_t recurse, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_propset, args)
+
+def svn_client_revprop_set(*args):
+    """
+    svn_client_revprop_set(char propname, svn_string_t propval, char URL, svn_opt_revision_t revision, 
+        svn_revnum_t set_rev, 
+        svn_boolean_t force, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_revprop_set, args)
+
+def svn_client_propget3(*args):
+    """
+    svn_client_propget3(apr_hash_t props, char propname, char target, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_revnum_t actual_revnum, svn_depth_t depth, 
+        apr_array_header_t changelists, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_propget3, args)
+
+def svn_client_propget2(*args):
+    """
+    svn_client_propget2(apr_hash_t props, char propname, char target, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_boolean_t recurse, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_propget2, args)
+
+def svn_client_propget(*args):
+    """
+    svn_client_propget(apr_hash_t props, char propname, char target, svn_opt_revision_t revision, 
+        svn_boolean_t recurse, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_propget, args)
+
+def svn_client_revprop_get(*args):
+    """
+    svn_client_revprop_get(char propname, svn_string_t propval, char URL, svn_opt_revision_t revision, 
+        svn_revnum_t set_rev, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_revprop_get, args)
+
+def svn_client_proplist3(*args):
+    """
+    svn_client_proplist3(char target, svn_opt_revision_t peg_revision, svn_opt_revision_t revision, 
+        svn_depth_t depth, apr_array_header_t changelists, 
+        svn_proplist_receiver_t receiver, 
+        void receiver_baton, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_proplist3, args)
+
+def svn_client_proplist2(*args):
+    """
+    svn_client_proplist2(apr_array_header_t props, char target, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_boolean_t recurse, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_proplist2, args)
+
+def svn_client_proplist(*args):
+    """
+    svn_client_proplist(apr_array_header_t props, char target, svn_opt_revision_t revision, 
+        svn_boolean_t recurse, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_proplist, args)
+
+def svn_client_revprop_list(*args):
+    """
+    svn_client_revprop_list(apr_hash_t props, char URL, svn_opt_revision_t revision, 
+        svn_revnum_t set_rev, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_revprop_list, args)
+
+def svn_client_export4(*args):
+    """
+    svn_client_export4(svn_revnum_t result_rev, char from, char to, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_boolean_t overwrite, svn_boolean_t ignore_externals, 
+        svn_depth_t depth, char native_eol, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_export4, args)
+
+def svn_client_export3(*args):
+    """
+    svn_client_export3(svn_revnum_t result_rev, char from, char to, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_boolean_t overwrite, svn_boolean_t ignore_externals, 
+        svn_boolean_t recurse, 
+        char native_eol, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_export3, args)
+
+def svn_client_export2(*args):
+    """
+    svn_client_export2(svn_revnum_t result_rev, char from, char to, svn_opt_revision_t revision, 
+        svn_boolean_t force, char native_eol, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_export2, args)
+
+def svn_client_export(*args):
+    """
+    svn_client_export(svn_revnum_t result_rev, char from, char to, svn_opt_revision_t revision, 
+        svn_boolean_t force, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_export, args)
+
+def svn_client_list2(*args):
+    """
+    svn_client_list2(char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, svn_depth_t depth, 
+        apr_uint32_t dirent_fields, svn_boolean_t fetch_locks, 
+        svn_client_list_func_t list_func, 
+        void baton, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_list2, args)
+
+def svn_client_list(*args):
+    """
+    svn_client_list(char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, svn_boolean_t recurse, 
+        apr_uint32_t dirent_fields, svn_boolean_t fetch_locks, 
+        svn_client_list_func_t list_func, 
+        void baton, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_list, args)
+
+def svn_client_ls3(*args):
+    """
+    svn_client_ls3(apr_hash_t dirents, apr_hash_t locks, char path_or_url, 
+        svn_opt_revision_t peg_revision, svn_opt_revision_t revision, 
+        svn_boolean_t recurse, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_ls3, args)
+
+def svn_client_ls2(*args):
+    """
+    svn_client_ls2(apr_hash_t dirents, char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_boolean_t recurse, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_ls2, args)
+
+def svn_client_ls(*args):
+    """
+    svn_client_ls(apr_hash_t dirents, char path_or_url, svn_opt_revision_t revision, 
+        svn_boolean_t recurse, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_ls, args)
+
+def svn_client_cat2(*args):
+    """
+    svn_client_cat2(svn_stream_t out, char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_cat2, args)
+
+def svn_client_cat(*args):
+    """
+    svn_client_cat(svn_stream_t out, char path_or_url, svn_opt_revision_t revision, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_cat, args)
+
+def svn_client_add_to_changelist(*args):
+    """
+    svn_client_add_to_changelist(apr_array_header_t paths, char changelist, svn_depth_t depth, 
+        apr_array_header_t changelists, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_add_to_changelist, args)
+
+def svn_client_remove_from_changelists(*args):
+    """
+    svn_client_remove_from_changelists(apr_array_header_t paths, svn_depth_t depth, apr_array_header_t changelists, 
+        svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_remove_from_changelists, args)
+
+def svn_client_get_changelists(*args):
+    """
+    svn_client_get_changelists(char path, apr_array_header_t changelists, svn_depth_t depth, 
+        svn_changelist_receiver_t callback_func, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_get_changelists, args)
+
+def svn_client_lock(*args):
+    """
+    svn_client_lock(apr_array_header_t targets, char comment, svn_boolean_t steal_lock, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_lock, args)
+
+def svn_client_unlock(*args):
+    """
+    svn_client_unlock(apr_array_header_t targets, svn_boolean_t break_lock, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_unlock, args)
+class svn_info_t:
+    """Proxy of C svn_info_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_info_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_info_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_info_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["URL"] = _client.svn_info_t_URL_set
+    __swig_getmethods__["URL"] = _client.svn_info_t_URL_get
+    __swig_setmethods__["rev"] = _client.svn_info_t_rev_set
+    __swig_getmethods__["rev"] = _client.svn_info_t_rev_get
+    __swig_setmethods__["kind"] = _client.svn_info_t_kind_set
+    __swig_getmethods__["kind"] = _client.svn_info_t_kind_get
+    __swig_setmethods__["repos_root_URL"] = _client.svn_info_t_repos_root_URL_set
+    __swig_getmethods__["repos_root_URL"] = _client.svn_info_t_repos_root_URL_get
+    __swig_setmethods__["repos_UUID"] = _client.svn_info_t_repos_UUID_set
+    __swig_getmethods__["repos_UUID"] = _client.svn_info_t_repos_UUID_get
+    __swig_setmethods__["last_changed_rev"] = _client.svn_info_t_last_changed_rev_set
+    __swig_getmethods__["last_changed_rev"] = _client.svn_info_t_last_changed_rev_get
+    __swig_setmethods__["last_changed_date"] = _client.svn_info_t_last_changed_date_set
+    __swig_getmethods__["last_changed_date"] = _client.svn_info_t_last_changed_date_get
+    __swig_setmethods__["last_changed_author"] = _client.svn_info_t_last_changed_author_set
+    __swig_getmethods__["last_changed_author"] = _client.svn_info_t_last_changed_author_get
+    __swig_setmethods__["lock"] = _client.svn_info_t_lock_set
+    __swig_getmethods__["lock"] = _client.svn_info_t_lock_get
+    __swig_setmethods__["has_wc_info"] = _client.svn_info_t_has_wc_info_set
+    __swig_getmethods__["has_wc_info"] = _client.svn_info_t_has_wc_info_get
+    __swig_setmethods__["schedule"] = _client.svn_info_t_schedule_set
+    __swig_getmethods__["schedule"] = _client.svn_info_t_schedule_get
+    __swig_setmethods__["copyfrom_url"] = _client.svn_info_t_copyfrom_url_set
+    __swig_getmethods__["copyfrom_url"] = _client.svn_info_t_copyfrom_url_get
+    __swig_setmethods__["copyfrom_rev"] = _client.svn_info_t_copyfrom_rev_set
+    __swig_getmethods__["copyfrom_rev"] = _client.svn_info_t_copyfrom_rev_get
+    __swig_setmethods__["text_time"] = _client.svn_info_t_text_time_set
+    __swig_getmethods__["text_time"] = _client.svn_info_t_text_time_get
+    __swig_setmethods__["prop_time"] = _client.svn_info_t_prop_time_set
+    __swig_getmethods__["prop_time"] = _client.svn_info_t_prop_time_get
+    __swig_setmethods__["checksum"] = _client.svn_info_t_checksum_set
+    __swig_getmethods__["checksum"] = _client.svn_info_t_checksum_get
+    __swig_setmethods__["conflict_old"] = _client.svn_info_t_conflict_old_set
+    __swig_getmethods__["conflict_old"] = _client.svn_info_t_conflict_old_get
+    __swig_setmethods__["conflict_new"] = _client.svn_info_t_conflict_new_set
+    __swig_getmethods__["conflict_new"] = _client.svn_info_t_conflict_new_get
+    __swig_setmethods__["conflict_wrk"] = _client.svn_info_t_conflict_wrk_set
+    __swig_getmethods__["conflict_wrk"] = _client.svn_info_t_conflict_wrk_get
+    __swig_setmethods__["prejfile"] = _client.svn_info_t_prejfile_set
+    __swig_getmethods__["prejfile"] = _client.svn_info_t_prejfile_get
+    __swig_setmethods__["changelist"] = _client.svn_info_t_changelist_set
+    __swig_getmethods__["changelist"] = _client.svn_info_t_changelist_get
+    __swig_setmethods__["depth"] = _client.svn_info_t_depth_set
+    __swig_getmethods__["depth"] = _client.svn_info_t_depth_get
+    __swig_setmethods__["working_size"] = _client.svn_info_t_working_size_set
+    __swig_getmethods__["working_size"] = _client.svn_info_t_working_size_get
+    __swig_setmethods__["size"] = _client.svn_info_t_size_set
+    __swig_getmethods__["size"] = _client.svn_info_t_size_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_info_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_info_t"""
+        _swig_setattr(self, svn_info_t, 'this', apply(_client.new_svn_info_t, args))
+        _swig_setattr(self, svn_info_t, 'thisown', 1)
+    def __del__(self, destroy=_client.delete_svn_info_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_info_tPtr(svn_info_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_info_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_info_t, 'thisown', 0)
+        self.__class__ = svn_info_t
+_client.svn_info_t_swigregister(svn_info_tPtr)
+
+
+def svn_info_dup(*args):
+    """svn_info_dup(svn_info_t info, apr_pool_t pool) -> svn_info_t"""
+    return apply(_client.svn_info_dup, args)
+
+def svn_client_info2(*args):
+    """
+    svn_client_info2(char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, svn_info_receiver_t receiver, 
+        svn_depth_t depth, apr_array_header_t changelists, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_info2, args)
+
+def svn_client_info(*args):
+    """
+    svn_client_info(char path_or_url, svn_opt_revision_t peg_revision, 
+        svn_opt_revision_t revision, svn_info_receiver_t receiver, 
+        svn_boolean_t recurse, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_info, args)
+
+def svn_client_url_from_path(*args):
+    """svn_client_url_from_path(char url, char path_or_url, apr_pool_t pool) -> svn_error_t"""
+    return apply(_client.svn_client_url_from_path, args)
+
+def svn_client_root_url_from_path(*args):
+    """svn_client_root_url_from_path(char url, char path_or_url, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t"""
+    return apply(_client.svn_client_root_url_from_path, args)
+
+def svn_client_uuid_from_url(*args):
+    """svn_client_uuid_from_url(char uuid, char url, svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t"""
+    return apply(_client.svn_client_uuid_from_url, args)
+
+def svn_client_uuid_from_path(*args):
+    """
+    svn_client_uuid_from_path(char uuid, char path, svn_wc_adm_access_t adm_access, 
+        svn_client_ctx_t ctx, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_uuid_from_path, args)
+
+def svn_client_open_ra_session(*args):
+    """
+    svn_client_open_ra_session(svn_ra_session_t session, char url, svn_client_ctx_t ctx, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_open_ra_session, args)
+
+def svn_proplist_invoke_receiver(*args):
+    """
+    svn_proplist_invoke_receiver(svn_proplist_receiver_t _obj, void baton, char path, 
+        apr_hash_t prop_hash, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_proplist_invoke_receiver, args)
+
+def svn_client_invoke_get_commit_log3(*args):
+    """
+    svn_client_invoke_get_commit_log3(svn_client_get_commit_log3_t _obj, char log_msg, char tmp_file, 
+        apr_array_header_t commit_items, 
+        void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_invoke_get_commit_log3, args)
+
+def svn_client_invoke_get_commit_log2(*args):
+    """
+    svn_client_invoke_get_commit_log2(svn_client_get_commit_log2_t _obj, char log_msg, char tmp_file, 
+        apr_array_header_t commit_items, 
+        void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_invoke_get_commit_log2, args)
+
+def svn_client_invoke_get_commit_log(*args):
+    """
+    svn_client_invoke_get_commit_log(svn_client_get_commit_log_t _obj, char log_msg, char tmp_file, 
+        apr_array_header_t commit_items, 
+        void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_invoke_get_commit_log, args)
+
+def svn_client_invoke_blame_receiver2(*args):
+    """
+    svn_client_invoke_blame_receiver2(svn_client_blame_receiver2_t _obj, void baton, apr_int64_t line_no, 
+        svn_revnum_t revision, char author, 
+        char date, svn_revnum_t merged_revision, 
+        char merged_author, char merged_date, char merged_path, 
+        char line, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_invoke_blame_receiver2, args)
+
+def svn_client_invoke_blame_receiver(*args):
+    """
+    svn_client_invoke_blame_receiver(svn_client_blame_receiver_t _obj, void baton, apr_int64_t line_no, 
+        svn_revnum_t revision, char author, 
+        char date, char line, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_invoke_blame_receiver, args)
+
+def svn_client_invoke_diff_summarize_func(*args):
+    """
+    svn_client_invoke_diff_summarize_func(svn_client_diff_summarize_func_t _obj, svn_client_diff_summarize_t diff, 
+        void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_invoke_diff_summarize_func, args)
+
+def svn_client_invoke_list_func(*args):
+    """
+    svn_client_invoke_list_func(svn_client_list_func_t _obj, void baton, char path, 
+        svn_dirent_t dirent, svn_lock_t lock, char abs_path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_client_invoke_list_func, args)
+
+def svn_changelist_invoke_receiver(*args):
+    """
+    svn_changelist_invoke_receiver(svn_changelist_receiver_t _obj, void baton, char path, 
+        char changelist, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_changelist_invoke_receiver, args)
+
+def svn_info_invoke_receiver(*args):
+    """
+    svn_info_invoke_receiver(svn_info_receiver_t _obj, void baton, char path, svn_info_t info, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_client.svn_info_invoke_receiver, args)
+class svn_proplist_receiver_t:
+    """Proxy of C svn_proplist_receiver_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_proplist_receiver_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_proplist_receiver_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_proplist_receiver_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_proplist_receiver_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_proplist_invoke_receiver(self, *args)
+
+
+class svn_proplist_receiver_tPtr(svn_proplist_receiver_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_proplist_receiver_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_proplist_receiver_t, 'thisown', 0)
+        self.__class__ = svn_proplist_receiver_t
+_client.svn_proplist_receiver_t_swigregister(svn_proplist_receiver_tPtr)
+
+class svn_client_get_commit_log3_t:
+    """Proxy of C svn_client_get_commit_log3_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_get_commit_log3_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_get_commit_log3_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_get_commit_log3_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_get_commit_log3_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_client_invoke_get_commit_log3(self, *args)
+
+
+class svn_client_get_commit_log3_tPtr(svn_client_get_commit_log3_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_get_commit_log3_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_get_commit_log3_t, 'thisown', 0)
+        self.__class__ = svn_client_get_commit_log3_t
+_client.svn_client_get_commit_log3_t_swigregister(svn_client_get_commit_log3_tPtr)
+
+class svn_client_get_commit_log2_t:
+    """Proxy of C svn_client_get_commit_log2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_get_commit_log2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_get_commit_log2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_get_commit_log2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_get_commit_log2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_client_invoke_get_commit_log2(self, *args)
+
+
+class svn_client_get_commit_log2_tPtr(svn_client_get_commit_log2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_get_commit_log2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_get_commit_log2_t, 'thisown', 0)
+        self.__class__ = svn_client_get_commit_log2_t
+_client.svn_client_get_commit_log2_t_swigregister(svn_client_get_commit_log2_tPtr)
+
+class svn_client_get_commit_log_t:
+    """Proxy of C svn_client_get_commit_log_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_get_commit_log_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_get_commit_log_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_get_commit_log_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_get_commit_log_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_client_invoke_get_commit_log(self, *args)
+
+
+class svn_client_get_commit_log_tPtr(svn_client_get_commit_log_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_get_commit_log_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_get_commit_log_t, 'thisown', 0)
+        self.__class__ = svn_client_get_commit_log_t
+_client.svn_client_get_commit_log_t_swigregister(svn_client_get_commit_log_tPtr)
+
+class svn_client_blame_receiver2_t:
+    """Proxy of C svn_client_blame_receiver2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_blame_receiver2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_blame_receiver2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_blame_receiver2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_blame_receiver2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_client_invoke_blame_receiver2(self, *args)
+
+
+class svn_client_blame_receiver2_tPtr(svn_client_blame_receiver2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_blame_receiver2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_blame_receiver2_t, 'thisown', 0)
+        self.__class__ = svn_client_blame_receiver2_t
+_client.svn_client_blame_receiver2_t_swigregister(svn_client_blame_receiver2_tPtr)
+
+class svn_client_blame_receiver_t:
+    """Proxy of C svn_client_blame_receiver_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_blame_receiver_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_blame_receiver_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_blame_receiver_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_blame_receiver_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_client_invoke_blame_receiver(self, *args)
+
+
+class svn_client_blame_receiver_tPtr(svn_client_blame_receiver_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_blame_receiver_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_blame_receiver_t, 'thisown', 0)
+        self.__class__ = svn_client_blame_receiver_t
+_client.svn_client_blame_receiver_t_swigregister(svn_client_blame_receiver_tPtr)
+
+class svn_client_diff_summarize_func_t:
+    """Proxy of C svn_client_diff_summarize_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_diff_summarize_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_diff_summarize_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_diff_summarize_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_diff_summarize_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_client_invoke_diff_summarize_func(self, *args)
+
+
+class svn_client_diff_summarize_func_tPtr(svn_client_diff_summarize_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_diff_summarize_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_diff_summarize_func_t, 'thisown', 0)
+        self.__class__ = svn_client_diff_summarize_func_t
+_client.svn_client_diff_summarize_func_t_swigregister(svn_client_diff_summarize_func_tPtr)
+
+class svn_client_list_func_t:
+    """Proxy of C svn_client_list_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_client_list_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_client_list_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_client_list_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_client_list_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_client_invoke_list_func(self, *args)
+
+
+class svn_client_list_func_tPtr(svn_client_list_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_client_list_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_client_list_func_t, 'thisown', 0)
+        self.__class__ = svn_client_list_func_t
+_client.svn_client_list_func_t_swigregister(svn_client_list_func_tPtr)
+
+class svn_changelist_receiver_t:
+    """Proxy of C svn_changelist_receiver_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_changelist_receiver_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_changelist_receiver_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_changelist_receiver_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_changelist_receiver_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_changelist_invoke_receiver(self, *args)
+
+
+class svn_changelist_receiver_tPtr(svn_changelist_receiver_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_changelist_receiver_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_changelist_receiver_t, 'thisown', 0)
+        self.__class__ = svn_changelist_receiver_t
+_client.svn_changelist_receiver_t_swigregister(svn_changelist_receiver_tPtr)
+
+class svn_info_receiver_t:
+    """Proxy of C svn_info_receiver_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_info_receiver_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_info_receiver_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_info_receiver_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_info_receiver_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_info_invoke_receiver(self, *args)
+
+
+class svn_info_receiver_tPtr(svn_info_receiver_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_info_receiver_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_info_receiver_t, 'thisown', 0)
+        self.__class__ = svn_info_receiver_t
+_client.svn_info_receiver_t_swigregister(svn_info_receiver_tPtr)
+
+SWIG_SVN_INFO_SIZE_UNKNOWN = _client.SWIG_SVN_INFO_SIZE_UNKNOWN
+svn_swig_py_cancel_func = _client.svn_swig_py_cancel_func
+svn_swig_py_get_commit_log_func = _client.svn_swig_py_get_commit_log_func
+svn_swig_py_notify_func = _client.svn_swig_py_notify_func
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/core.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/core.py
new file mode 100644
index 0000000..cae527a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/core.py
@@ -0,0 +1,5427 @@
+# This file was created automatically by SWIG 1.3.27.
+# Don't modify this file, modify the SWIG interface instead.
+
+import _core
+
+# This file is compatible with both classic and new-style classes.
+def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
+    if (name == "this"):
+        if isinstance(value, class_type):
+            self.__dict__[name] = value.this
+            if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
+            del value.thisown
+            return
+    method = class_type.__swig_setmethods__.get(name,None)
+    if method: return method(self,value)
+    if (not static) or hasattr(self,name) or (name == "thisown"):
+        self.__dict__[name] = value
+    else:
+        raise AttributeError("You cannot add attributes to %s" % self)
+
+def _swig_setattr(self,class_type,name,value):
+    return _swig_setattr_nondynamic(self,class_type,name,value,0)
+
+def _swig_getattr(self,class_type,name):
+    method = class_type.__swig_getmethods__.get(name,None)
+    if method: return method(self)
+    raise AttributeError,name
+
+SWIG_SVN_INVALID_REVNUM = _core.SWIG_SVN_INVALID_REVNUM
+SWIG_SVN_IGNORED_REVNUM = _core.SWIG_SVN_IGNORED_REVNUM
+
+def apr_initialize(*args):
+    """apr_initialize() -> apr_status_t"""
+    return apply(_core.apr_initialize, args)
+
+def apr_terminate(*args):
+    """apr_terminate()"""
+    return apply(_core.apr_terminate, args)
+
+def apr_time_ansi_put(*args):
+    """apr_time_ansi_put(apr_time_t result, time_t input) -> apr_status_t"""
+    return apply(_core.apr_time_ansi_put, args)
+
+def apr_pool_destroy(*args):
+    """apr_pool_destroy(apr_pool_t p)"""
+    return apply(_core.apr_pool_destroy, args)
+
+def apr_pool_clear(*args):
+    """apr_pool_clear(apr_pool_t p)"""
+    return apply(_core.apr_pool_clear, args)
+
+def apr_file_open_stdout(*args):
+    """apr_file_open_stdout(apr_file_t out, apr_pool_t pool) -> apr_status_t"""
+    return apply(_core.apr_file_open_stdout, args)
+
+def apr_file_open_stderr(*args):
+    """apr_file_open_stderr(apr_file_t out, apr_pool_t pool) -> apr_status_t"""
+    return apply(_core.apr_file_open_stderr, args)
+SVN_ERR_CATEGORY_SIZE = _core.SVN_ERR_CATEGORY_SIZE
+SVN_WARNING = _core.SVN_WARNING
+SVN_ERR_BAD_CONTAINING_POOL = _core.SVN_ERR_BAD_CONTAINING_POOL
+SVN_ERR_BAD_FILENAME = _core.SVN_ERR_BAD_FILENAME
+SVN_ERR_BAD_URL = _core.SVN_ERR_BAD_URL
+SVN_ERR_BAD_DATE = _core.SVN_ERR_BAD_DATE
+SVN_ERR_BAD_MIME_TYPE = _core.SVN_ERR_BAD_MIME_TYPE
+SVN_ERR_BAD_PROPERTY_VALUE = _core.SVN_ERR_BAD_PROPERTY_VALUE
+SVN_ERR_BAD_VERSION_FILE_FORMAT = _core.SVN_ERR_BAD_VERSION_FILE_FORMAT
+SVN_ERR_BAD_RELATIVE_PATH = _core.SVN_ERR_BAD_RELATIVE_PATH
+SVN_ERR_BAD_UUID = _core.SVN_ERR_BAD_UUID
+SVN_ERR_XML_ATTRIB_NOT_FOUND = _core.SVN_ERR_XML_ATTRIB_NOT_FOUND
+SVN_ERR_XML_MISSING_ANCESTRY = _core.SVN_ERR_XML_MISSING_ANCESTRY
+SVN_ERR_XML_UNKNOWN_ENCODING = _core.SVN_ERR_XML_UNKNOWN_ENCODING
+SVN_ERR_XML_MALFORMED = _core.SVN_ERR_XML_MALFORMED
+SVN_ERR_XML_UNESCAPABLE_DATA = _core.SVN_ERR_XML_UNESCAPABLE_DATA
+SVN_ERR_IO_INCONSISTENT_EOL = _core.SVN_ERR_IO_INCONSISTENT_EOL
+SVN_ERR_IO_UNKNOWN_EOL = _core.SVN_ERR_IO_UNKNOWN_EOL
+SVN_ERR_IO_CORRUPT_EOL = _core.SVN_ERR_IO_CORRUPT_EOL
+SVN_ERR_IO_UNIQUE_NAMES_EXHAUSTED = _core.SVN_ERR_IO_UNIQUE_NAMES_EXHAUSTED
+SVN_ERR_IO_PIPE_FRAME_ERROR = _core.SVN_ERR_IO_PIPE_FRAME_ERROR
+SVN_ERR_IO_PIPE_READ_ERROR = _core.SVN_ERR_IO_PIPE_READ_ERROR
+SVN_ERR_IO_WRITE_ERROR = _core.SVN_ERR_IO_WRITE_ERROR
+SVN_ERR_STREAM_UNEXPECTED_EOF = _core.SVN_ERR_STREAM_UNEXPECTED_EOF
+SVN_ERR_STREAM_MALFORMED_DATA = _core.SVN_ERR_STREAM_MALFORMED_DATA
+SVN_ERR_STREAM_UNRECOGNIZED_DATA = _core.SVN_ERR_STREAM_UNRECOGNIZED_DATA
+SVN_ERR_NODE_UNKNOWN_KIND = _core.SVN_ERR_NODE_UNKNOWN_KIND
+SVN_ERR_NODE_UNEXPECTED_KIND = _core.SVN_ERR_NODE_UNEXPECTED_KIND
+SVN_ERR_ENTRY_NOT_FOUND = _core.SVN_ERR_ENTRY_NOT_FOUND
+SVN_ERR_ENTRY_EXISTS = _core.SVN_ERR_ENTRY_EXISTS
+SVN_ERR_ENTRY_MISSING_REVISION = _core.SVN_ERR_ENTRY_MISSING_REVISION
+SVN_ERR_ENTRY_MISSING_URL = _core.SVN_ERR_ENTRY_MISSING_URL
+SVN_ERR_ENTRY_ATTRIBUTE_INVALID = _core.SVN_ERR_ENTRY_ATTRIBUTE_INVALID
+SVN_ERR_WC_OBSTRUCTED_UPDATE = _core.SVN_ERR_WC_OBSTRUCTED_UPDATE
+SVN_ERR_WC_UNWIND_MISMATCH = _core.SVN_ERR_WC_UNWIND_MISMATCH
+SVN_ERR_WC_UNWIND_EMPTY = _core.SVN_ERR_WC_UNWIND_EMPTY
+SVN_ERR_WC_UNWIND_NOT_EMPTY = _core.SVN_ERR_WC_UNWIND_NOT_EMPTY
+SVN_ERR_WC_LOCKED = _core.SVN_ERR_WC_LOCKED
+SVN_ERR_WC_NOT_LOCKED = _core.SVN_ERR_WC_NOT_LOCKED
+SVN_ERR_WC_INVALID_LOCK = _core.SVN_ERR_WC_INVALID_LOCK
+SVN_ERR_WC_NOT_DIRECTORY = _core.SVN_ERR_WC_NOT_DIRECTORY
+SVN_ERR_WC_NOT_FILE = _core.SVN_ERR_WC_NOT_FILE
+SVN_ERR_WC_BAD_ADM_LOG = _core.SVN_ERR_WC_BAD_ADM_LOG
+SVN_ERR_WC_PATH_NOT_FOUND = _core.SVN_ERR_WC_PATH_NOT_FOUND
+SVN_ERR_WC_NOT_UP_TO_DATE = _core.SVN_ERR_WC_NOT_UP_TO_DATE
+SVN_ERR_WC_LEFT_LOCAL_MOD = _core.SVN_ERR_WC_LEFT_LOCAL_MOD
+SVN_ERR_WC_SCHEDULE_CONFLICT = _core.SVN_ERR_WC_SCHEDULE_CONFLICT
+SVN_ERR_WC_PATH_FOUND = _core.SVN_ERR_WC_PATH_FOUND
+SVN_ERR_WC_FOUND_CONFLICT = _core.SVN_ERR_WC_FOUND_CONFLICT
+SVN_ERR_WC_CORRUPT = _core.SVN_ERR_WC_CORRUPT
+SVN_ERR_WC_CORRUPT_TEXT_BASE = _core.SVN_ERR_WC_CORRUPT_TEXT_BASE
+SVN_ERR_WC_NODE_KIND_CHANGE = _core.SVN_ERR_WC_NODE_KIND_CHANGE
+SVN_ERR_WC_INVALID_OP_ON_CWD = _core.SVN_ERR_WC_INVALID_OP_ON_CWD
+SVN_ERR_WC_BAD_ADM_LOG_START = _core.SVN_ERR_WC_BAD_ADM_LOG_START
+SVN_ERR_WC_UNSUPPORTED_FORMAT = _core.SVN_ERR_WC_UNSUPPORTED_FORMAT
+SVN_ERR_WC_BAD_PATH = _core.SVN_ERR_WC_BAD_PATH
+SVN_ERR_WC_INVALID_SCHEDULE = _core.SVN_ERR_WC_INVALID_SCHEDULE
+SVN_ERR_WC_INVALID_RELOCATION = _core.SVN_ERR_WC_INVALID_RELOCATION
+SVN_ERR_WC_INVALID_SWITCH = _core.SVN_ERR_WC_INVALID_SWITCH
+SVN_ERR_WC_MISMATCHED_CHANGELIST = _core.SVN_ERR_WC_MISMATCHED_CHANGELIST
+SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE = _core.SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE
+SVN_ERR_WC_COPYFROM_PATH_NOT_FOUND = _core.SVN_ERR_WC_COPYFROM_PATH_NOT_FOUND
+SVN_ERR_WC_CHANGELIST_MOVE = _core.SVN_ERR_WC_CHANGELIST_MOVE
+SVN_ERR_FS_GENERAL = _core.SVN_ERR_FS_GENERAL
+SVN_ERR_FS_CLEANUP = _core.SVN_ERR_FS_CLEANUP
+SVN_ERR_FS_ALREADY_OPEN = _core.SVN_ERR_FS_ALREADY_OPEN
+SVN_ERR_FS_NOT_OPEN = _core.SVN_ERR_FS_NOT_OPEN
+SVN_ERR_FS_CORRUPT = _core.SVN_ERR_FS_CORRUPT
+SVN_ERR_FS_PATH_SYNTAX = _core.SVN_ERR_FS_PATH_SYNTAX
+SVN_ERR_FS_NO_SUCH_REVISION = _core.SVN_ERR_FS_NO_SUCH_REVISION
+SVN_ERR_FS_NO_SUCH_TRANSACTION = _core.SVN_ERR_FS_NO_SUCH_TRANSACTION
+SVN_ERR_FS_NO_SUCH_ENTRY = _core.SVN_ERR_FS_NO_SUCH_ENTRY
+SVN_ERR_FS_NO_SUCH_REPRESENTATION = _core.SVN_ERR_FS_NO_SUCH_REPRESENTATION
+SVN_ERR_FS_NO_SUCH_STRING = _core.SVN_ERR_FS_NO_SUCH_STRING
+SVN_ERR_FS_NO_SUCH_COPY = _core.SVN_ERR_FS_NO_SUCH_COPY
+SVN_ERR_FS_TRANSACTION_NOT_MUTABLE = _core.SVN_ERR_FS_TRANSACTION_NOT_MUTABLE
+SVN_ERR_FS_NOT_FOUND = _core.SVN_ERR_FS_NOT_FOUND
+SVN_ERR_FS_ID_NOT_FOUND = _core.SVN_ERR_FS_ID_NOT_FOUND
+SVN_ERR_FS_NOT_ID = _core.SVN_ERR_FS_NOT_ID
+SVN_ERR_FS_NOT_DIRECTORY = _core.SVN_ERR_FS_NOT_DIRECTORY
+SVN_ERR_FS_NOT_FILE = _core.SVN_ERR_FS_NOT_FILE
+SVN_ERR_FS_NOT_SINGLE_PATH_COMPONENT = _core.SVN_ERR_FS_NOT_SINGLE_PATH_COMPONENT
+SVN_ERR_FS_NOT_MUTABLE = _core.SVN_ERR_FS_NOT_MUTABLE
+SVN_ERR_FS_ALREADY_EXISTS = _core.SVN_ERR_FS_ALREADY_EXISTS
+SVN_ERR_FS_ROOT_DIR = _core.SVN_ERR_FS_ROOT_DIR
+SVN_ERR_FS_NOT_TXN_ROOT = _core.SVN_ERR_FS_NOT_TXN_ROOT
+SVN_ERR_FS_NOT_REVISION_ROOT = _core.SVN_ERR_FS_NOT_REVISION_ROOT
+SVN_ERR_FS_CONFLICT = _core.SVN_ERR_FS_CONFLICT
+SVN_ERR_FS_REP_CHANGED = _core.SVN_ERR_FS_REP_CHANGED
+SVN_ERR_FS_REP_NOT_MUTABLE = _core.SVN_ERR_FS_REP_NOT_MUTABLE
+SVN_ERR_FS_MALFORMED_SKEL = _core.SVN_ERR_FS_MALFORMED_SKEL
+SVN_ERR_FS_TXN_OUT_OF_DATE = _core.SVN_ERR_FS_TXN_OUT_OF_DATE
+SVN_ERR_FS_BERKELEY_DB = _core.SVN_ERR_FS_BERKELEY_DB
+SVN_ERR_FS_BERKELEY_DB_DEADLOCK = _core.SVN_ERR_FS_BERKELEY_DB_DEADLOCK
+SVN_ERR_FS_TRANSACTION_DEAD = _core.SVN_ERR_FS_TRANSACTION_DEAD
+SVN_ERR_FS_TRANSACTION_NOT_DEAD = _core.SVN_ERR_FS_TRANSACTION_NOT_DEAD
+SVN_ERR_FS_UNKNOWN_FS_TYPE = _core.SVN_ERR_FS_UNKNOWN_FS_TYPE
+SVN_ERR_FS_NO_USER = _core.SVN_ERR_FS_NO_USER
+SVN_ERR_FS_PATH_ALREADY_LOCKED = _core.SVN_ERR_FS_PATH_ALREADY_LOCKED
+SVN_ERR_FS_PATH_NOT_LOCKED = _core.SVN_ERR_FS_PATH_NOT_LOCKED
+SVN_ERR_FS_BAD_LOCK_TOKEN = _core.SVN_ERR_FS_BAD_LOCK_TOKEN
+SVN_ERR_FS_NO_LOCK_TOKEN = _core.SVN_ERR_FS_NO_LOCK_TOKEN
+SVN_ERR_FS_LOCK_OWNER_MISMATCH = _core.SVN_ERR_FS_LOCK_OWNER_MISMATCH
+SVN_ERR_FS_NO_SUCH_LOCK = _core.SVN_ERR_FS_NO_SUCH_LOCK
+SVN_ERR_FS_LOCK_EXPIRED = _core.SVN_ERR_FS_LOCK_EXPIRED
+SVN_ERR_FS_OUT_OF_DATE = _core.SVN_ERR_FS_OUT_OF_DATE
+SVN_ERR_FS_UNSUPPORTED_FORMAT = _core.SVN_ERR_FS_UNSUPPORTED_FORMAT
+SVN_ERR_FS_REP_BEING_WRITTEN = _core.SVN_ERR_FS_REP_BEING_WRITTEN
+SVN_ERR_FS_TXN_NAME_TOO_LONG = _core.SVN_ERR_FS_TXN_NAME_TOO_LONG
+SVN_ERR_FS_NO_SUCH_NODE_ORIGIN = _core.SVN_ERR_FS_NO_SUCH_NODE_ORIGIN
+SVN_ERR_FS_UNSUPPORTED_UPGRADE = _core.SVN_ERR_FS_UNSUPPORTED_UPGRADE
+SVN_ERR_REPOS_LOCKED = _core.SVN_ERR_REPOS_LOCKED
+SVN_ERR_REPOS_HOOK_FAILURE = _core.SVN_ERR_REPOS_HOOK_FAILURE
+SVN_ERR_REPOS_BAD_ARGS = _core.SVN_ERR_REPOS_BAD_ARGS
+SVN_ERR_REPOS_NO_DATA_FOR_REPORT = _core.SVN_ERR_REPOS_NO_DATA_FOR_REPORT
+SVN_ERR_REPOS_BAD_REVISION_REPORT = _core.SVN_ERR_REPOS_BAD_REVISION_REPORT
+SVN_ERR_REPOS_UNSUPPORTED_VERSION = _core.SVN_ERR_REPOS_UNSUPPORTED_VERSION
+SVN_ERR_REPOS_DISABLED_FEATURE = _core.SVN_ERR_REPOS_DISABLED_FEATURE
+SVN_ERR_REPOS_POST_COMMIT_HOOK_FAILED = _core.SVN_ERR_REPOS_POST_COMMIT_HOOK_FAILED
+SVN_ERR_REPOS_POST_LOCK_HOOK_FAILED = _core.SVN_ERR_REPOS_POST_LOCK_HOOK_FAILED
+SVN_ERR_REPOS_POST_UNLOCK_HOOK_FAILED = _core.SVN_ERR_REPOS_POST_UNLOCK_HOOK_FAILED
+SVN_ERR_REPOS_UNSUPPORTED_UPGRADE = _core.SVN_ERR_REPOS_UNSUPPORTED_UPGRADE
+SVN_ERR_RA_ILLEGAL_URL = _core.SVN_ERR_RA_ILLEGAL_URL
+SVN_ERR_RA_NOT_AUTHORIZED = _core.SVN_ERR_RA_NOT_AUTHORIZED
+SVN_ERR_RA_UNKNOWN_AUTH = _core.SVN_ERR_RA_UNKNOWN_AUTH
+SVN_ERR_RA_NOT_IMPLEMENTED = _core.SVN_ERR_RA_NOT_IMPLEMENTED
+SVN_ERR_RA_OUT_OF_DATE = _core.SVN_ERR_RA_OUT_OF_DATE
+SVN_ERR_RA_NO_REPOS_UUID = _core.SVN_ERR_RA_NO_REPOS_UUID
+SVN_ERR_RA_UNSUPPORTED_ABI_VERSION = _core.SVN_ERR_RA_UNSUPPORTED_ABI_VERSION
+SVN_ERR_RA_NOT_LOCKED = _core.SVN_ERR_RA_NOT_LOCKED
+SVN_ERR_RA_PARTIAL_REPLAY_NOT_SUPPORTED = _core.SVN_ERR_RA_PARTIAL_REPLAY_NOT_SUPPORTED
+SVN_ERR_RA_UUID_MISMATCH = _core.SVN_ERR_RA_UUID_MISMATCH
+SVN_ERR_RA_DAV_SOCK_INIT = _core.SVN_ERR_RA_DAV_SOCK_INIT
+SVN_ERR_RA_DAV_CREATING_REQUEST = _core.SVN_ERR_RA_DAV_CREATING_REQUEST
+SVN_ERR_RA_DAV_REQUEST_FAILED = _core.SVN_ERR_RA_DAV_REQUEST_FAILED
+SVN_ERR_RA_DAV_OPTIONS_REQ_FAILED = _core.SVN_ERR_RA_DAV_OPTIONS_REQ_FAILED
+SVN_ERR_RA_DAV_PROPS_NOT_FOUND = _core.SVN_ERR_RA_DAV_PROPS_NOT_FOUND
+SVN_ERR_RA_DAV_ALREADY_EXISTS = _core.SVN_ERR_RA_DAV_ALREADY_EXISTS
+SVN_ERR_RA_DAV_INVALID_CONFIG_VALUE = _core.SVN_ERR_RA_DAV_INVALID_CONFIG_VALUE
+SVN_ERR_RA_DAV_PATH_NOT_FOUND = _core.SVN_ERR_RA_DAV_PATH_NOT_FOUND
+SVN_ERR_RA_DAV_PROPPATCH_FAILED = _core.SVN_ERR_RA_DAV_PROPPATCH_FAILED
+SVN_ERR_RA_DAV_MALFORMED_DATA = _core.SVN_ERR_RA_DAV_MALFORMED_DATA
+SVN_ERR_RA_DAV_RESPONSE_HEADER_BADNESS = _core.SVN_ERR_RA_DAV_RESPONSE_HEADER_BADNESS
+SVN_ERR_RA_DAV_RELOCATED = _core.SVN_ERR_RA_DAV_RELOCATED
+SVN_ERR_RA_LOCAL_REPOS_NOT_FOUND = _core.SVN_ERR_RA_LOCAL_REPOS_NOT_FOUND
+SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED = _core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED
+SVN_ERR_RA_SVN_CMD_ERR = _core.SVN_ERR_RA_SVN_CMD_ERR
+SVN_ERR_RA_SVN_UNKNOWN_CMD = _core.SVN_ERR_RA_SVN_UNKNOWN_CMD
+SVN_ERR_RA_SVN_CONNECTION_CLOSED = _core.SVN_ERR_RA_SVN_CONNECTION_CLOSED
+SVN_ERR_RA_SVN_IO_ERROR = _core.SVN_ERR_RA_SVN_IO_ERROR
+SVN_ERR_RA_SVN_MALFORMED_DATA = _core.SVN_ERR_RA_SVN_MALFORMED_DATA
+SVN_ERR_RA_SVN_REPOS_NOT_FOUND = _core.SVN_ERR_RA_SVN_REPOS_NOT_FOUND
+SVN_ERR_RA_SVN_BAD_VERSION = _core.SVN_ERR_RA_SVN_BAD_VERSION
+SVN_ERR_RA_SVN_NO_MECHANISMS = _core.SVN_ERR_RA_SVN_NO_MECHANISMS
+SVN_ERR_RA_SERF_SSPI_INITIALISATION_FAILED = _core.SVN_ERR_RA_SERF_SSPI_INITIALISATION_FAILED
+SVN_ERR_RA_SERF_SSL_CERT_UNTRUSTED = _core.SVN_ERR_RA_SERF_SSL_CERT_UNTRUSTED
+SVN_ERR_AUTHN_CREDS_UNAVAILABLE = _core.SVN_ERR_AUTHN_CREDS_UNAVAILABLE
+SVN_ERR_AUTHN_NO_PROVIDER = _core.SVN_ERR_AUTHN_NO_PROVIDER
+SVN_ERR_AUTHN_PROVIDERS_EXHAUSTED = _core.SVN_ERR_AUTHN_PROVIDERS_EXHAUSTED
+SVN_ERR_AUTHN_CREDS_NOT_SAVED = _core.SVN_ERR_AUTHN_CREDS_NOT_SAVED
+SVN_ERR_AUTHN_FAILED = _core.SVN_ERR_AUTHN_FAILED
+SVN_ERR_AUTHZ_ROOT_UNREADABLE = _core.SVN_ERR_AUTHZ_ROOT_UNREADABLE
+SVN_ERR_AUTHZ_UNREADABLE = _core.SVN_ERR_AUTHZ_UNREADABLE
+SVN_ERR_AUTHZ_PARTIALLY_READABLE = _core.SVN_ERR_AUTHZ_PARTIALLY_READABLE
+SVN_ERR_AUTHZ_INVALID_CONFIG = _core.SVN_ERR_AUTHZ_INVALID_CONFIG
+SVN_ERR_AUTHZ_UNWRITABLE = _core.SVN_ERR_AUTHZ_UNWRITABLE
+SVN_ERR_SVNDIFF_INVALID_HEADER = _core.SVN_ERR_SVNDIFF_INVALID_HEADER
+SVN_ERR_SVNDIFF_CORRUPT_WINDOW = _core.SVN_ERR_SVNDIFF_CORRUPT_WINDOW
+SVN_ERR_SVNDIFF_BACKWARD_VIEW = _core.SVN_ERR_SVNDIFF_BACKWARD_VIEW
+SVN_ERR_SVNDIFF_INVALID_OPS = _core.SVN_ERR_SVNDIFF_INVALID_OPS
+SVN_ERR_SVNDIFF_UNEXPECTED_END = _core.SVN_ERR_SVNDIFF_UNEXPECTED_END
+SVN_ERR_SVNDIFF_INVALID_COMPRESSED_DATA = _core.SVN_ERR_SVNDIFF_INVALID_COMPRESSED_DATA
+SVN_ERR_DIFF_DATASOURCE_MODIFIED = _core.SVN_ERR_DIFF_DATASOURCE_MODIFIED
+SVN_ERR_APMOD_MISSING_PATH_TO_FS = _core.SVN_ERR_APMOD_MISSING_PATH_TO_FS
+SVN_ERR_APMOD_MALFORMED_URI = _core.SVN_ERR_APMOD_MALFORMED_URI
+SVN_ERR_APMOD_ACTIVITY_NOT_FOUND = _core.SVN_ERR_APMOD_ACTIVITY_NOT_FOUND
+SVN_ERR_APMOD_BAD_BASELINE = _core.SVN_ERR_APMOD_BAD_BASELINE
+SVN_ERR_APMOD_CONNECTION_ABORTED = _core.SVN_ERR_APMOD_CONNECTION_ABORTED
+SVN_ERR_CLIENT_VERSIONED_PATH_REQUIRED = _core.SVN_ERR_CLIENT_VERSIONED_PATH_REQUIRED
+SVN_ERR_CLIENT_RA_ACCESS_REQUIRED = _core.SVN_ERR_CLIENT_RA_ACCESS_REQUIRED
+SVN_ERR_CLIENT_BAD_REVISION = _core.SVN_ERR_CLIENT_BAD_REVISION
+SVN_ERR_CLIENT_DUPLICATE_COMMIT_URL = _core.SVN_ERR_CLIENT_DUPLICATE_COMMIT_URL
+SVN_ERR_CLIENT_IS_BINARY_FILE = _core.SVN_ERR_CLIENT_IS_BINARY_FILE
+SVN_ERR_CLIENT_INVALID_EXTERNALS_DESCRIPTION = _core.SVN_ERR_CLIENT_INVALID_EXTERNALS_DESCRIPTION
+SVN_ERR_CLIENT_MODIFIED = _core.SVN_ERR_CLIENT_MODIFIED
+SVN_ERR_CLIENT_IS_DIRECTORY = _core.SVN_ERR_CLIENT_IS_DIRECTORY
+SVN_ERR_CLIENT_REVISION_RANGE = _core.SVN_ERR_CLIENT_REVISION_RANGE
+SVN_ERR_CLIENT_INVALID_RELOCATION = _core.SVN_ERR_CLIENT_INVALID_RELOCATION
+SVN_ERR_CLIENT_REVISION_AUTHOR_CONTAINS_NEWLINE = _core.SVN_ERR_CLIENT_REVISION_AUTHOR_CONTAINS_NEWLINE
+SVN_ERR_CLIENT_PROPERTY_NAME = _core.SVN_ERR_CLIENT_PROPERTY_NAME
+SVN_ERR_CLIENT_UNRELATED_RESOURCES = _core.SVN_ERR_CLIENT_UNRELATED_RESOURCES
+SVN_ERR_CLIENT_MISSING_LOCK_TOKEN = _core.SVN_ERR_CLIENT_MISSING_LOCK_TOKEN
+SVN_ERR_CLIENT_MULTIPLE_SOURCES_DISALLOWED = _core.SVN_ERR_CLIENT_MULTIPLE_SOURCES_DISALLOWED
+SVN_ERR_CLIENT_NO_VERSIONED_PARENT = _core.SVN_ERR_CLIENT_NO_VERSIONED_PARENT
+SVN_ERR_CLIENT_NOT_READY_TO_MERGE = _core.SVN_ERR_CLIENT_NOT_READY_TO_MERGE
+SVN_ERR_BASE = _core.SVN_ERR_BASE
+SVN_ERR_PLUGIN_LOAD_FAILURE = _core.SVN_ERR_PLUGIN_LOAD_FAILURE
+SVN_ERR_MALFORMED_FILE = _core.SVN_ERR_MALFORMED_FILE
+SVN_ERR_INCOMPLETE_DATA = _core.SVN_ERR_INCOMPLETE_DATA
+SVN_ERR_INCORRECT_PARAMS = _core.SVN_ERR_INCORRECT_PARAMS
+SVN_ERR_UNVERSIONED_RESOURCE = _core.SVN_ERR_UNVERSIONED_RESOURCE
+SVN_ERR_TEST_FAILED = _core.SVN_ERR_TEST_FAILED
+SVN_ERR_UNSUPPORTED_FEATURE = _core.SVN_ERR_UNSUPPORTED_FEATURE
+SVN_ERR_BAD_PROP_KIND = _core.SVN_ERR_BAD_PROP_KIND
+SVN_ERR_ILLEGAL_TARGET = _core.SVN_ERR_ILLEGAL_TARGET
+SVN_ERR_DELTA_MD5_CHECKSUM_ABSENT = _core.SVN_ERR_DELTA_MD5_CHECKSUM_ABSENT
+SVN_ERR_DIR_NOT_EMPTY = _core.SVN_ERR_DIR_NOT_EMPTY
+SVN_ERR_EXTERNAL_PROGRAM = _core.SVN_ERR_EXTERNAL_PROGRAM
+SVN_ERR_SWIG_PY_EXCEPTION_SET = _core.SVN_ERR_SWIG_PY_EXCEPTION_SET
+SVN_ERR_CHECKSUM_MISMATCH = _core.SVN_ERR_CHECKSUM_MISMATCH
+SVN_ERR_CANCELLED = _core.SVN_ERR_CANCELLED
+SVN_ERR_INVALID_DIFF_OPTION = _core.SVN_ERR_INVALID_DIFF_OPTION
+SVN_ERR_PROPERTY_NOT_FOUND = _core.SVN_ERR_PROPERTY_NOT_FOUND
+SVN_ERR_NO_AUTH_FILE_PATH = _core.SVN_ERR_NO_AUTH_FILE_PATH
+SVN_ERR_VERSION_MISMATCH = _core.SVN_ERR_VERSION_MISMATCH
+SVN_ERR_MERGEINFO_PARSE_ERROR = _core.SVN_ERR_MERGEINFO_PARSE_ERROR
+SVN_ERR_CEASE_INVOCATION = _core.SVN_ERR_CEASE_INVOCATION
+SVN_ERR_REVNUM_PARSE_FAILURE = _core.SVN_ERR_REVNUM_PARSE_FAILURE
+SVN_ERR_ITER_BREAK = _core.SVN_ERR_ITER_BREAK
+SVN_ERR_UNKNOWN_CHANGELIST = _core.SVN_ERR_UNKNOWN_CHANGELIST
+SVN_ERR_RESERVED_FILENAME_SPECIFIED = _core.SVN_ERR_RESERVED_FILENAME_SPECIFIED
+SVN_ERR_UNKNOWN_CAPABILITY = _core.SVN_ERR_UNKNOWN_CAPABILITY
+SVN_ERR_CL_ARG_PARSING_ERROR = _core.SVN_ERR_CL_ARG_PARSING_ERROR
+SVN_ERR_CL_INSUFFICIENT_ARGS = _core.SVN_ERR_CL_INSUFFICIENT_ARGS
+SVN_ERR_CL_MUTUALLY_EXCLUSIVE_ARGS = _core.SVN_ERR_CL_MUTUALLY_EXCLUSIVE_ARGS
+SVN_ERR_CL_ADM_DIR_RESERVED = _core.SVN_ERR_CL_ADM_DIR_RESERVED
+SVN_ERR_CL_LOG_MESSAGE_IS_VERSIONED_FILE = _core.SVN_ERR_CL_LOG_MESSAGE_IS_VERSIONED_FILE
+SVN_ERR_CL_LOG_MESSAGE_IS_PATHNAME = _core.SVN_ERR_CL_LOG_MESSAGE_IS_PATHNAME
+SVN_ERR_CL_COMMIT_IN_ADDED_DIR = _core.SVN_ERR_CL_COMMIT_IN_ADDED_DIR
+SVN_ERR_CL_NO_EXTERNAL_EDITOR = _core.SVN_ERR_CL_NO_EXTERNAL_EDITOR
+SVN_ERR_CL_BAD_LOG_MESSAGE = _core.SVN_ERR_CL_BAD_LOG_MESSAGE
+SVN_ERR_CL_UNNECESSARY_LOG_MESSAGE = _core.SVN_ERR_CL_UNNECESSARY_LOG_MESSAGE
+SVN_ERR_CL_NO_EXTERNAL_MERGE_TOOL = _core.SVN_ERR_CL_NO_EXTERNAL_MERGE_TOOL
+SVN_ERR_LAST = _core.SVN_ERR_LAST
+
+def svn_time_to_cstring(*args):
+    """svn_time_to_cstring(apr_time_t when, apr_pool_t pool) -> char"""
+    return apply(_core.svn_time_to_cstring, args)
+
+def svn_time_from_cstring(*args):
+    """svn_time_from_cstring(apr_time_t when, char data, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_time_from_cstring, args)
+
+def svn_time_to_human_cstring(*args):
+    """svn_time_to_human_cstring(apr_time_t when, apr_pool_t pool) -> char"""
+    return apply(_core.svn_time_to_human_cstring, args)
+
+def svn_parse_date(*args):
+    """
+    svn_parse_date(svn_boolean_t matched, apr_time_t result, char text, 
+        apr_time_t now, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_parse_date, args)
+
+def svn_sleep_for_timestamps(*args):
+    """svn_sleep_for_timestamps()"""
+    return apply(_core.svn_sleep_for_timestamps, args)
+class svn_error_t:
+    """Proxy of C svn_error_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_error_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_error_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_error_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["apr_err"] = _core.svn_error_t_apr_err_set
+    __swig_getmethods__["apr_err"] = _core.svn_error_t_apr_err_get
+    __swig_getmethods__["message"] = _core.svn_error_t_message_get
+    __swig_setmethods__["child"] = _core.svn_error_t_child_set
+    __swig_getmethods__["child"] = _core.svn_error_t_child_get
+    __swig_setmethods__["pool"] = _core.svn_error_t_pool_set
+    __swig_getmethods__["pool"] = _core.svn_error_t_pool_get
+    __swig_getmethods__["file"] = _core.svn_error_t_file_get
+    __swig_setmethods__["line"] = _core.svn_error_t_line_set
+    __swig_getmethods__["line"] = _core.svn_error_t_line_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_error_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_error_t"""
+        _swig_setattr(self, svn_error_t, 'this', apply(_core.new_svn_error_t, args))
+        _swig_setattr(self, svn_error_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_error_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_error_tPtr(svn_error_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_error_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_error_t, 'thisown', 0)
+        self.__class__ = svn_error_t
+_core.svn_error_t_swigregister(svn_error_tPtr)
+
+svn_node_none = _core.svn_node_none
+svn_node_file = _core.svn_node_file
+svn_node_dir = _core.svn_node_dir
+svn_node_unknown = _core.svn_node_unknown
+
+def svn_revnum_parse(*args):
+    """svn_revnum_parse(svn_revnum_t rev, char str, char endptr) -> svn_error_t"""
+    return apply(_core.svn_revnum_parse, args)
+SVN_REVNUM_T_FMT = _core.SVN_REVNUM_T_FMT
+TRUE = _core.TRUE
+FALSE = _core.FALSE
+svn_nonrecursive = _core.svn_nonrecursive
+svn_recursive = _core.svn_recursive
+svn_depth_unknown = _core.svn_depth_unknown
+svn_depth_exclude = _core.svn_depth_exclude
+svn_depth_empty = _core.svn_depth_empty
+svn_depth_files = _core.svn_depth_files
+svn_depth_immediates = _core.svn_depth_immediates
+svn_depth_infinity = _core.svn_depth_infinity
+
+def svn_depth_to_word(*args):
+    """svn_depth_to_word(svn_depth_t depth) -> char"""
+    return apply(_core.svn_depth_to_word, args)
+
+def svn_depth_from_word(*args):
+    """svn_depth_from_word(char word) -> int"""
+    return apply(_core.svn_depth_from_word, args)
+SVN_DIRENT_KIND = _core.SVN_DIRENT_KIND
+SVN_DIRENT_SIZE = _core.SVN_DIRENT_SIZE
+SVN_DIRENT_HAS_PROPS = _core.SVN_DIRENT_HAS_PROPS
+SVN_DIRENT_CREATED_REV = _core.SVN_DIRENT_CREATED_REV
+SVN_DIRENT_TIME = _core.SVN_DIRENT_TIME
+SVN_DIRENT_LAST_AUTHOR = _core.SVN_DIRENT_LAST_AUTHOR
+class svn_dirent_t:
+    """Proxy of C svn_dirent_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_dirent_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_dirent_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_dirent_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["kind"] = _core.svn_dirent_t_kind_set
+    __swig_getmethods__["kind"] = _core.svn_dirent_t_kind_get
+    __swig_setmethods__["size"] = _core.svn_dirent_t_size_set
+    __swig_getmethods__["size"] = _core.svn_dirent_t_size_get
+    __swig_setmethods__["has_props"] = _core.svn_dirent_t_has_props_set
+    __swig_getmethods__["has_props"] = _core.svn_dirent_t_has_props_get
+    __swig_setmethods__["created_rev"] = _core.svn_dirent_t_created_rev_set
+    __swig_getmethods__["created_rev"] = _core.svn_dirent_t_created_rev_get
+    __swig_setmethods__["time"] = _core.svn_dirent_t_time_set
+    __swig_getmethods__["time"] = _core.svn_dirent_t_time_get
+    __swig_getmethods__["last_author"] = _core.svn_dirent_t_last_author_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_dirent_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_dirent_t"""
+        _swig_setattr(self, svn_dirent_t, 'this', apply(_core.new_svn_dirent_t, args))
+        _swig_setattr(self, svn_dirent_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_dirent_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_dirent_tPtr(svn_dirent_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_dirent_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_dirent_t, 'thisown', 0)
+        self.__class__ = svn_dirent_t
+_core.svn_dirent_t_swigregister(svn_dirent_tPtr)
+
+
+def svn_dirent_dup(*args):
+    """svn_dirent_dup(svn_dirent_t dirent, apr_pool_t pool) -> svn_dirent_t"""
+    return apply(_core.svn_dirent_dup, args)
+SVN_KEYWORD_MAX_LEN = _core.SVN_KEYWORD_MAX_LEN
+SVN_KEYWORD_REVISION_LONG = _core.SVN_KEYWORD_REVISION_LONG
+SVN_KEYWORD_REVISION_SHORT = _core.SVN_KEYWORD_REVISION_SHORT
+SVN_KEYWORD_REVISION_MEDIUM = _core.SVN_KEYWORD_REVISION_MEDIUM
+SVN_KEYWORD_DATE_LONG = _core.SVN_KEYWORD_DATE_LONG
+SVN_KEYWORD_DATE_SHORT = _core.SVN_KEYWORD_DATE_SHORT
+SVN_KEYWORD_AUTHOR_LONG = _core.SVN_KEYWORD_AUTHOR_LONG
+SVN_KEYWORD_AUTHOR_SHORT = _core.SVN_KEYWORD_AUTHOR_SHORT
+SVN_KEYWORD_URL_LONG = _core.SVN_KEYWORD_URL_LONG
+SVN_KEYWORD_URL_SHORT = _core.SVN_KEYWORD_URL_SHORT
+SVN_KEYWORD_ID = _core.SVN_KEYWORD_ID
+class svn_commit_info_t:
+    """Proxy of C svn_commit_info_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_commit_info_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_commit_info_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_commit_info_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["revision"] = _core.svn_commit_info_t_revision_set
+    __swig_getmethods__["revision"] = _core.svn_commit_info_t_revision_get
+    __swig_setmethods__["date"] = _core.svn_commit_info_t_date_set
+    __swig_getmethods__["date"] = _core.svn_commit_info_t_date_get
+    __swig_setmethods__["author"] = _core.svn_commit_info_t_author_set
+    __swig_getmethods__["author"] = _core.svn_commit_info_t_author_get
+    __swig_setmethods__["post_commit_err"] = _core.svn_commit_info_t_post_commit_err_set
+    __swig_getmethods__["post_commit_err"] = _core.svn_commit_info_t_post_commit_err_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_commit_info_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_commit_info_t"""
+        _swig_setattr(self, svn_commit_info_t, 'this', apply(_core.new_svn_commit_info_t, args))
+        _swig_setattr(self, svn_commit_info_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_commit_info_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_commit_info_tPtr(svn_commit_info_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_commit_info_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_commit_info_t, 'thisown', 0)
+        self.__class__ = svn_commit_info_t
+_core.svn_commit_info_t_swigregister(svn_commit_info_tPtr)
+
+
+def svn_create_commit_info(*args):
+    """svn_create_commit_info(apr_pool_t pool) -> svn_commit_info_t"""
+    return apply(_core.svn_create_commit_info, args)
+
+def svn_commit_info_dup(*args):
+    """svn_commit_info_dup(svn_commit_info_t src_commit_info, apr_pool_t pool) -> svn_commit_info_t"""
+    return apply(_core.svn_commit_info_dup, args)
+class svn_log_changed_path_t:
+    """Proxy of C svn_log_changed_path_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_log_changed_path_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_log_changed_path_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_log_changed_path_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["action"] = _core.svn_log_changed_path_t_action_set
+    __swig_getmethods__["action"] = _core.svn_log_changed_path_t_action_get
+    __swig_getmethods__["copyfrom_path"] = _core.svn_log_changed_path_t_copyfrom_path_get
+    __swig_setmethods__["copyfrom_rev"] = _core.svn_log_changed_path_t_copyfrom_rev_set
+    __swig_getmethods__["copyfrom_rev"] = _core.svn_log_changed_path_t_copyfrom_rev_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_log_changed_path_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_log_changed_path_t"""
+        _swig_setattr(self, svn_log_changed_path_t, 'this', apply(_core.new_svn_log_changed_path_t, args))
+        _swig_setattr(self, svn_log_changed_path_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_log_changed_path_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_log_changed_path_tPtr(svn_log_changed_path_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_log_changed_path_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_log_changed_path_t, 'thisown', 0)
+        self.__class__ = svn_log_changed_path_t
+_core.svn_log_changed_path_t_swigregister(svn_log_changed_path_tPtr)
+
+
+def svn_log_changed_path_dup(*args):
+    """svn_log_changed_path_dup(svn_log_changed_path_t changed_path, apr_pool_t pool) -> svn_log_changed_path_t"""
+    return apply(_core.svn_log_changed_path_dup, args)
+class svn_log_entry_t:
+    """Proxy of C svn_log_entry_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_log_entry_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_log_entry_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_log_entry_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["changed_paths"] = _core.svn_log_entry_t_changed_paths_set
+    __swig_getmethods__["changed_paths"] = _core.svn_log_entry_t_changed_paths_get
+    __swig_setmethods__["revision"] = _core.svn_log_entry_t_revision_set
+    __swig_getmethods__["revision"] = _core.svn_log_entry_t_revision_get
+    __swig_setmethods__["revprops"] = _core.svn_log_entry_t_revprops_set
+    __swig_getmethods__["revprops"] = _core.svn_log_entry_t_revprops_get
+    __swig_setmethods__["has_children"] = _core.svn_log_entry_t_has_children_set
+    __swig_getmethods__["has_children"] = _core.svn_log_entry_t_has_children_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_log_entry_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_log_entry_t"""
+        _swig_setattr(self, svn_log_entry_t, 'this', apply(_core.new_svn_log_entry_t, args))
+        _swig_setattr(self, svn_log_entry_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_log_entry_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_log_entry_tPtr(svn_log_entry_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_log_entry_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_log_entry_t, 'thisown', 0)
+        self.__class__ = svn_log_entry_t
+_core.svn_log_entry_t_swigregister(svn_log_entry_tPtr)
+
+
+def svn_log_entry_create(*args):
+    """svn_log_entry_create(apr_pool_t pool) -> svn_log_entry_t"""
+    return apply(_core.svn_log_entry_create, args)
+SVN_STREAM_CHUNK_SIZE = _core.SVN_STREAM_CHUNK_SIZE
+SVN__STREAM_CHUNK_SIZE = _core.SVN__STREAM_CHUNK_SIZE
+
+def svn_mime_type_validate(*args):
+    """svn_mime_type_validate(char mime_type, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_mime_type_validate, args)
+
+def svn_mime_type_is_binary(*args):
+    """svn_mime_type_is_binary(char mime_type) -> svn_boolean_t"""
+    return apply(_core.svn_mime_type_is_binary, args)
+class svn_lock_t:
+    """Proxy of C svn_lock_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_lock_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_lock_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_lock_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["path"] = _core.svn_lock_t_path_set
+    __swig_getmethods__["path"] = _core.svn_lock_t_path_get
+    __swig_setmethods__["token"] = _core.svn_lock_t_token_set
+    __swig_getmethods__["token"] = _core.svn_lock_t_token_get
+    __swig_setmethods__["owner"] = _core.svn_lock_t_owner_set
+    __swig_getmethods__["owner"] = _core.svn_lock_t_owner_get
+    __swig_setmethods__["comment"] = _core.svn_lock_t_comment_set
+    __swig_getmethods__["comment"] = _core.svn_lock_t_comment_get
+    __swig_setmethods__["is_dav_comment"] = _core.svn_lock_t_is_dav_comment_set
+    __swig_getmethods__["is_dav_comment"] = _core.svn_lock_t_is_dav_comment_get
+    __swig_setmethods__["creation_date"] = _core.svn_lock_t_creation_date_set
+    __swig_getmethods__["creation_date"] = _core.svn_lock_t_creation_date_get
+    __swig_setmethods__["expiration_date"] = _core.svn_lock_t_expiration_date_set
+    __swig_getmethods__["expiration_date"] = _core.svn_lock_t_expiration_date_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_lock_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_lock_t"""
+        _swig_setattr(self, svn_lock_t, 'this', apply(_core.new_svn_lock_t, args))
+        _swig_setattr(self, svn_lock_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_lock_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_lock_tPtr(svn_lock_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_lock_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_lock_t, 'thisown', 0)
+        self.__class__ = svn_lock_t
+_core.svn_lock_t_swigregister(svn_lock_tPtr)
+
+
+def svn_lock_create(*args):
+    """svn_lock_create(apr_pool_t pool) -> svn_lock_t"""
+    return apply(_core.svn_lock_create, args)
+
+def svn_lock_dup(*args):
+    """svn_lock_dup(svn_lock_t lock, apr_pool_t pool) -> svn_lock_t"""
+    return apply(_core.svn_lock_dup, args)
+
+def svn_uuid_generate(*args):
+    """svn_uuid_generate(apr_pool_t pool) -> char"""
+    return apply(_core.svn_uuid_generate, args)
+class svn_merge_range_t:
+    """Proxy of C svn_merge_range_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_merge_range_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_merge_range_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_merge_range_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["start"] = _core.svn_merge_range_t_start_set
+    __swig_getmethods__["start"] = _core.svn_merge_range_t_start_get
+    __swig_setmethods__["end"] = _core.svn_merge_range_t_end_set
+    __swig_getmethods__["end"] = _core.svn_merge_range_t_end_get
+    __swig_setmethods__["inheritable"] = _core.svn_merge_range_t_inheritable_set
+    __swig_getmethods__["inheritable"] = _core.svn_merge_range_t_inheritable_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_merge_range_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_merge_range_t"""
+        _swig_setattr(self, svn_merge_range_t, 'this', apply(_core.new_svn_merge_range_t, args))
+        _swig_setattr(self, svn_merge_range_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_merge_range_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_merge_range_tPtr(svn_merge_range_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_merge_range_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_merge_range_t, 'thisown', 0)
+        self.__class__ = svn_merge_range_t
+_core.svn_merge_range_t_swigregister(svn_merge_range_tPtr)
+
+
+def svn_merge_range_dup(*args):
+    """svn_merge_range_dup(svn_merge_range_t range, apr_pool_t pool) -> svn_merge_range_t"""
+    return apply(_core.svn_merge_range_dup, args)
+
+def svn_merge_range_contains_rev(*args):
+    """svn_merge_range_contains_rev(svn_merge_range_t range, svn_revnum_t rev) -> svn_boolean_t"""
+    return apply(_core.svn_merge_range_contains_rev, args)
+class svn_location_segment_t:
+    """Proxy of C svn_location_segment_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_location_segment_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_location_segment_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_location_segment_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["range_start"] = _core.svn_location_segment_t_range_start_set
+    __swig_getmethods__["range_start"] = _core.svn_location_segment_t_range_start_get
+    __swig_setmethods__["range_end"] = _core.svn_location_segment_t_range_end_set
+    __swig_getmethods__["range_end"] = _core.svn_location_segment_t_range_end_get
+    __swig_setmethods__["path"] = _core.svn_location_segment_t_path_set
+    __swig_getmethods__["path"] = _core.svn_location_segment_t_path_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_location_segment_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_location_segment_t"""
+        _swig_setattr(self, svn_location_segment_t, 'this', apply(_core.new_svn_location_segment_t, args))
+        _swig_setattr(self, svn_location_segment_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_location_segment_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_location_segment_tPtr(svn_location_segment_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_location_segment_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_location_segment_t, 'thisown', 0)
+        self.__class__ = svn_location_segment_t
+_core.svn_location_segment_t_swigregister(svn_location_segment_tPtr)
+
+
+def svn_location_segment_dup(*args):
+    """svn_location_segment_dup(svn_location_segment_t segment, apr_pool_t pool) -> svn_location_segment_t"""
+    return apply(_core.svn_location_segment_dup, args)
+
+def svn_log_invoke_entry_receiver(*args):
+    """
+    svn_log_invoke_entry_receiver(svn_log_entry_receiver_t _obj, void baton, svn_log_entry_t log_entry, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_log_invoke_entry_receiver, args)
+
+def svn_log_invoke_message_receiver(*args):
+    """
+    svn_log_invoke_message_receiver(svn_log_message_receiver_t _obj, void baton, apr_hash_t changed_paths, 
+        svn_revnum_t revision, char author, 
+        char date, char message, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_log_invoke_message_receiver, args)
+
+def svn_commit_invoke_callback2(*args):
+    """
+    svn_commit_invoke_callback2(svn_commit_callback2_t _obj, svn_commit_info_t commit_info, 
+        void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_commit_invoke_callback2, args)
+
+def svn_commit_invoke_callback(*args):
+    """
+    svn_commit_invoke_callback(svn_commit_callback_t _obj, svn_revnum_t new_revision, 
+        char date, char author, void baton) -> svn_error_t
+    """
+    return apply(_core.svn_commit_invoke_callback, args)
+
+def svn_cancel_invoke_func(*args):
+    """svn_cancel_invoke_func(svn_cancel_func_t _obj, void cancel_baton) -> svn_error_t"""
+    return apply(_core.svn_cancel_invoke_func, args)
+
+def svn_location_invoke_segment_receiver(*args):
+    """
+    svn_location_invoke_segment_receiver(svn_location_segment_receiver_t _obj, svn_location_segment_t segment, 
+        void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_location_invoke_segment_receiver, args)
+class svn_log_entry_receiver_t:
+    """Proxy of C svn_log_entry_receiver_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_log_entry_receiver_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_log_entry_receiver_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_log_entry_receiver_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_log_entry_receiver_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_log_invoke_entry_receiver(self, *args)
+
+
+class svn_log_entry_receiver_tPtr(svn_log_entry_receiver_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_log_entry_receiver_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_log_entry_receiver_t, 'thisown', 0)
+        self.__class__ = svn_log_entry_receiver_t
+_core.svn_log_entry_receiver_t_swigregister(svn_log_entry_receiver_tPtr)
+
+class svn_log_message_receiver_t:
+    """Proxy of C svn_log_message_receiver_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_log_message_receiver_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_log_message_receiver_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_log_message_receiver_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_log_message_receiver_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_log_invoke_message_receiver(self, *args)
+
+
+class svn_log_message_receiver_tPtr(svn_log_message_receiver_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_log_message_receiver_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_log_message_receiver_t, 'thisown', 0)
+        self.__class__ = svn_log_message_receiver_t
+_core.svn_log_message_receiver_t_swigregister(svn_log_message_receiver_tPtr)
+
+class svn_commit_callback2_t:
+    """Proxy of C svn_commit_callback2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_commit_callback2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_commit_callback2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_commit_callback2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_commit_callback2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_commit_invoke_callback2(self, *args)
+
+
+class svn_commit_callback2_tPtr(svn_commit_callback2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_commit_callback2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_commit_callback2_t, 'thisown', 0)
+        self.__class__ = svn_commit_callback2_t
+_core.svn_commit_callback2_t_swigregister(svn_commit_callback2_tPtr)
+
+class svn_commit_callback_t:
+    """Proxy of C svn_commit_callback_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_commit_callback_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_commit_callback_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_commit_callback_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_commit_callback_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_commit_invoke_callback(self, *args)
+
+
+class svn_commit_callback_tPtr(svn_commit_callback_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_commit_callback_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_commit_callback_t, 'thisown', 0)
+        self.__class__ = svn_commit_callback_t
+_core.svn_commit_callback_t_swigregister(svn_commit_callback_tPtr)
+
+class svn_cancel_func_t:
+    """Proxy of C svn_cancel_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_cancel_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_cancel_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_cancel_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_cancel_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_cancel_invoke_func(self, *args)
+
+
+class svn_cancel_func_tPtr(svn_cancel_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_cancel_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_cancel_func_t, 'thisown', 0)
+        self.__class__ = svn_cancel_func_t
+_core.svn_cancel_func_t_swigregister(svn_cancel_func_tPtr)
+
+class svn_location_segment_receiver_t:
+    """Proxy of C svn_location_segment_receiver_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_location_segment_receiver_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_location_segment_receiver_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_location_segment_receiver_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_location_segment_receiver_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_location_invoke_segment_receiver(self, *args)
+
+
+class svn_location_segment_receiver_tPtr(svn_location_segment_receiver_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_location_segment_receiver_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_location_segment_receiver_t, 'thisown', 0)
+        self.__class__ = svn_location_segment_receiver_t
+_core.svn_location_segment_receiver_t_swigregister(svn_location_segment_receiver_tPtr)
+
+SVN_ALLOCATOR_RECOMMENDED_MAX_FREE = _core.SVN_ALLOCATOR_RECOMMENDED_MAX_FREE
+
+def svn_pool_create(*args):
+    """svn_pool_create(apr_pool_t parent_pool, apr_allocator_t allocator) -> apr_pool_t"""
+    return apply(_core.svn_pool_create, args)
+SVN_VER_MAJOR = _core.SVN_VER_MAJOR
+SVN_VER_MINOR = _core.SVN_VER_MINOR
+SVN_VER_PATCH = _core.SVN_VER_PATCH
+SVN_VER_MICRO = _core.SVN_VER_MICRO
+SVN_VER_LIBRARY = _core.SVN_VER_LIBRARY
+SVN_VER_TAG = _core.SVN_VER_TAG
+SVN_VER_NUMTAG = _core.SVN_VER_NUMTAG
+SVN_VER_REVISION = _core.SVN_VER_REVISION
+class svn_version_t:
+    """Proxy of C svn_version_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_version_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_version_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_version_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["major"] = _core.svn_version_t_major_set
+    __swig_getmethods__["major"] = _core.svn_version_t_major_get
+    __swig_setmethods__["minor"] = _core.svn_version_t_minor_set
+    __swig_getmethods__["minor"] = _core.svn_version_t_minor_get
+    __swig_setmethods__["patch"] = _core.svn_version_t_patch_set
+    __swig_getmethods__["patch"] = _core.svn_version_t_patch_get
+    __swig_setmethods__["tag"] = _core.svn_version_t_tag_set
+    __swig_getmethods__["tag"] = _core.svn_version_t_tag_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_version_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_version_t"""
+        _swig_setattr(self, svn_version_t, 'this', apply(_core.new_svn_version_t, args))
+        _swig_setattr(self, svn_version_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_version_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_version_tPtr(svn_version_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_version_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_version_t, 'thisown', 0)
+        self.__class__ = svn_version_t
+_core.svn_version_t_swigregister(svn_version_tPtr)
+
+
+def svn_ver_compatible(*args):
+    """svn_ver_compatible(svn_version_t my_version, svn_version_t lib_version) -> svn_boolean_t"""
+    return apply(_core.svn_ver_compatible, args)
+
+def svn_ver_equal(*args):
+    """svn_ver_equal(svn_version_t my_version, svn_version_t lib_version) -> svn_boolean_t"""
+    return apply(_core.svn_ver_equal, args)
+class svn_version_checklist_t:
+    """Proxy of C svn_version_checklist_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_version_checklist_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_version_checklist_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_version_checklist_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["label"] = _core.svn_version_checklist_t_label_set
+    __swig_getmethods__["label"] = _core.svn_version_checklist_t_label_get
+    __swig_setmethods__["version_query"] = _core.svn_version_checklist_t_version_query_set
+    __swig_getmethods__["version_query"] = _core.svn_version_checklist_t_version_query_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_version_checklist_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def version_query(self, *args):
+      return svn_version_checklist_invoke_version_query(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_version_checklist_t"""
+        _swig_setattr(self, svn_version_checklist_t, 'this', apply(_core.new_svn_version_checklist_t, args))
+        _swig_setattr(self, svn_version_checklist_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_version_checklist_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_version_checklist_tPtr(svn_version_checklist_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_version_checklist_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_version_checklist_t, 'thisown', 0)
+        self.__class__ = svn_version_checklist_t
+_core.svn_version_checklist_t_swigregister(svn_version_checklist_tPtr)
+
+
+def svn_ver_check_list(*args):
+    """svn_ver_check_list(svn_version_t my_version, svn_version_checklist_t checklist) -> svn_error_t"""
+    return apply(_core.svn_ver_check_list, args)
+
+def svn_subr_version(*args):
+    """svn_subr_version() -> svn_version_t"""
+    return apply(_core.svn_subr_version, args)
+
+def svn_version_checklist_invoke_version_query(*args):
+    """svn_version_checklist_invoke_version_query(svn_version_checklist_t _obj) -> svn_version_t"""
+    return apply(_core.svn_version_checklist_invoke_version_query, args)
+
+def svn_prop_dup(*args):
+    """svn_prop_dup( prop, apr_pool_t pool)"""
+    return apply(_core.svn_prop_dup, args)
+
+def svn_prop_array_dup(*args):
+    """svn_prop_array_dup(apr_array_header_t array, apr_pool_t pool) -> apr_array_header_t"""
+    return apply(_core.svn_prop_array_dup, args)
+
+def svn_prop_hash_to_array(*args):
+    """svn_prop_hash_to_array(apr_hash_t hash, apr_pool_t pool) -> apr_array_header_t"""
+    return apply(_core.svn_prop_hash_to_array, args)
+svn_prop_entry_kind = _core.svn_prop_entry_kind
+svn_prop_wc_kind = _core.svn_prop_wc_kind
+svn_prop_regular_kind = _core.svn_prop_regular_kind
+
+def svn_property_kind(*args):
+    """svn_property_kind(int prefix_len, char prop_name) -> int"""
+    return apply(_core.svn_property_kind, args)
+
+def svn_prop_is_svn_prop(*args):
+    """svn_prop_is_svn_prop(char prop_name) -> svn_boolean_t"""
+    return apply(_core.svn_prop_is_svn_prop, args)
+
+def svn_prop_has_svn_prop(*args):
+    """svn_prop_has_svn_prop(apr_hash_t props, apr_pool_t pool) -> svn_boolean_t"""
+    return apply(_core.svn_prop_has_svn_prop, args)
+
+def svn_prop_is_boolean(*args):
+    """svn_prop_is_boolean(char prop_name) -> svn_boolean_t"""
+    return apply(_core.svn_prop_is_boolean, args)
+
+def svn_prop_needs_translation(*args):
+    """svn_prop_needs_translation(char prop_name) -> svn_boolean_t"""
+    return apply(_core.svn_prop_needs_translation, args)
+
+def svn_categorize_props(*args):
+    """
+    svn_categorize_props(apr_array_header_t proplist, apr_array_header_t entry_props, 
+        apr_array_header_t wc_props, apr_array_header_t regular_props, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_categorize_props, args)
+
+def svn_prop_diffs(*args):
+    """
+    svn_prop_diffs(apr_array_header_t propdiffs, apr_hash_t target_props, 
+        apr_hash_t source_props, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_prop_diffs, args)
+
+def svn_prop_name_is_valid(*args):
+    """svn_prop_name_is_valid(char prop_name) -> svn_boolean_t"""
+    return apply(_core.svn_prop_name_is_valid, args)
+SVN_PROP_PREFIX = _core.SVN_PROP_PREFIX
+SVN_PROP_BOOLEAN_TRUE = _core.SVN_PROP_BOOLEAN_TRUE
+SVN_PROP_MIME_TYPE = _core.SVN_PROP_MIME_TYPE
+SVN_PROP_IGNORE = _core.SVN_PROP_IGNORE
+SVN_PROP_EOL_STYLE = _core.SVN_PROP_EOL_STYLE
+SVN_PROP_KEYWORDS = _core.SVN_PROP_KEYWORDS
+SVN_PROP_EXECUTABLE = _core.SVN_PROP_EXECUTABLE
+SVN_PROP_EXECUTABLE_VALUE = _core.SVN_PROP_EXECUTABLE_VALUE
+SVN_PROP_NEEDS_LOCK = _core.SVN_PROP_NEEDS_LOCK
+SVN_PROP_NEEDS_LOCK_VALUE = _core.SVN_PROP_NEEDS_LOCK_VALUE
+SVN_PROP_SPECIAL = _core.SVN_PROP_SPECIAL
+SVN_PROP_SPECIAL_VALUE = _core.SVN_PROP_SPECIAL_VALUE
+SVN_PROP_EXTERNALS = _core.SVN_PROP_EXTERNALS
+SVN_PROP_MERGEINFO = _core.SVN_PROP_MERGEINFO
+SVN_PROP_WC_PREFIX = _core.SVN_PROP_WC_PREFIX
+SVN_PROP_ENTRY_PREFIX = _core.SVN_PROP_ENTRY_PREFIX
+SVN_PROP_ENTRY_COMMITTED_REV = _core.SVN_PROP_ENTRY_COMMITTED_REV
+SVN_PROP_ENTRY_COMMITTED_DATE = _core.SVN_PROP_ENTRY_COMMITTED_DATE
+SVN_PROP_ENTRY_LAST_AUTHOR = _core.SVN_PROP_ENTRY_LAST_AUTHOR
+SVN_PROP_ENTRY_UUID = _core.SVN_PROP_ENTRY_UUID
+SVN_PROP_ENTRY_LOCK_TOKEN = _core.SVN_PROP_ENTRY_LOCK_TOKEN
+SVN_PROP_CUSTOM_PREFIX = _core.SVN_PROP_CUSTOM_PREFIX
+SVN_PROP_REVISION_AUTHOR = _core.SVN_PROP_REVISION_AUTHOR
+SVN_PROP_REVISION_LOG = _core.SVN_PROP_REVISION_LOG
+SVN_PROP_REVISION_DATE = _core.SVN_PROP_REVISION_DATE
+SVN_PROP_REVISION_ORIG_DATE = _core.SVN_PROP_REVISION_ORIG_DATE
+SVN_PROP_REVISION_AUTOVERSIONED = _core.SVN_PROP_REVISION_AUTOVERSIONED
+SVNSYNC_PROP_PREFIX = _core.SVNSYNC_PROP_PREFIX
+SVNSYNC_PROP_LOCK = _core.SVNSYNC_PROP_LOCK
+SVNSYNC_PROP_FROM_URL = _core.SVNSYNC_PROP_FROM_URL
+SVNSYNC_PROP_FROM_UUID = _core.SVNSYNC_PROP_FROM_UUID
+SVNSYNC_PROP_LAST_MERGED_REV = _core.SVNSYNC_PROP_LAST_MERGED_REV
+SVNSYNC_PROP_CURRENTLY_COPYING = _core.SVNSYNC_PROP_CURRENTLY_COPYING
+SVN_OPT_MAX_ALIASES = _core.SVN_OPT_MAX_ALIASES
+SVN_OPT_MAX_OPTIONS = _core.SVN_OPT_MAX_OPTIONS
+SVN_OPT_FIRST_LONGOPT_ID = _core.SVN_OPT_FIRST_LONGOPT_ID
+class svn_opt_subcommand_desc2_t:
+    """Proxy of C svn_opt_subcommand_desc2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_opt_subcommand_desc2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_opt_subcommand_desc2_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_opt_subcommand_desc2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["name"] = _core.svn_opt_subcommand_desc2_t_name_set
+    __swig_getmethods__["name"] = _core.svn_opt_subcommand_desc2_t_name_get
+    __swig_setmethods__["cmd_func"] = _core.svn_opt_subcommand_desc2_t_cmd_func_set
+    __swig_getmethods__["cmd_func"] = _core.svn_opt_subcommand_desc2_t_cmd_func_get
+    __swig_setmethods__["aliases"] = _core.svn_opt_subcommand_desc2_t_aliases_set
+    __swig_getmethods__["aliases"] = _core.svn_opt_subcommand_desc2_t_aliases_get
+    __swig_setmethods__["help"] = _core.svn_opt_subcommand_desc2_t_help_set
+    __swig_getmethods__["help"] = _core.svn_opt_subcommand_desc2_t_help_get
+    __swig_setmethods__["valid_options"] = _core.svn_opt_subcommand_desc2_t_valid_options_set
+    __swig_getmethods__["valid_options"] = _core.svn_opt_subcommand_desc2_t_valid_options_get
+    __swig_getmethods__["desc_overrides"] = _core.svn_opt_subcommand_desc2_t_desc_overrides_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_opt_subcommand_desc2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_opt_subcommand_desc2_t"""
+        _swig_setattr(self, svn_opt_subcommand_desc2_t, 'this', apply(_core.new_svn_opt_subcommand_desc2_t, args))
+        _swig_setattr(self, svn_opt_subcommand_desc2_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_opt_subcommand_desc2_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_opt_subcommand_desc2_tPtr(svn_opt_subcommand_desc2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_opt_subcommand_desc2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_opt_subcommand_desc2_t, 'thisown', 0)
+        self.__class__ = svn_opt_subcommand_desc2_t
+_core.svn_opt_subcommand_desc2_t_swigregister(svn_opt_subcommand_desc2_tPtr)
+
+class svn_opt_subcommand_desc2_t_desc_overrides:
+    """Proxy of C svn_opt_subcommand_desc2_t_desc_overrides struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_opt_subcommand_desc2_t_desc_overrides, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_opt_subcommand_desc2_t_desc_overrides, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_opt_subcommand_desc2_t_desc_overrides instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["optch"] = _core.svn_opt_subcommand_desc2_t_desc_overrides_optch_set
+    __swig_getmethods__["optch"] = _core.svn_opt_subcommand_desc2_t_desc_overrides_optch_get
+    __swig_setmethods__["desc"] = _core.svn_opt_subcommand_desc2_t_desc_overrides_desc_set
+    __swig_getmethods__["desc"] = _core.svn_opt_subcommand_desc2_t_desc_overrides_desc_get
+    def __init__(self, *args):
+        """__init__(self) -> svn_opt_subcommand_desc2_t_desc_overrides"""
+        _swig_setattr(self, svn_opt_subcommand_desc2_t_desc_overrides, 'this', apply(_core.new_svn_opt_subcommand_desc2_t_desc_overrides, args))
+        _swig_setattr(self, svn_opt_subcommand_desc2_t_desc_overrides, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_opt_subcommand_desc2_t_desc_overrides):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_opt_subcommand_desc2_t_desc_overridesPtr(svn_opt_subcommand_desc2_t_desc_overrides):
+    def __init__(self, this):
+        _swig_setattr(self, svn_opt_subcommand_desc2_t_desc_overrides, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_opt_subcommand_desc2_t_desc_overrides, 'thisown', 0)
+        self.__class__ = svn_opt_subcommand_desc2_t_desc_overrides
+_core.svn_opt_subcommand_desc2_t_desc_overrides_swigregister(svn_opt_subcommand_desc2_t_desc_overridesPtr)
+
+class svn_opt_subcommand_desc_t:
+    """Proxy of C svn_opt_subcommand_desc_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_opt_subcommand_desc_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_opt_subcommand_desc_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_opt_subcommand_desc_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["name"] = _core.svn_opt_subcommand_desc_t_name_set
+    __swig_getmethods__["name"] = _core.svn_opt_subcommand_desc_t_name_get
+    __swig_setmethods__["cmd_func"] = _core.svn_opt_subcommand_desc_t_cmd_func_set
+    __swig_getmethods__["cmd_func"] = _core.svn_opt_subcommand_desc_t_cmd_func_get
+    __swig_setmethods__["aliases"] = _core.svn_opt_subcommand_desc_t_aliases_set
+    __swig_getmethods__["aliases"] = _core.svn_opt_subcommand_desc_t_aliases_get
+    __swig_setmethods__["help"] = _core.svn_opt_subcommand_desc_t_help_set
+    __swig_getmethods__["help"] = _core.svn_opt_subcommand_desc_t_help_get
+    __swig_setmethods__["valid_options"] = _core.svn_opt_subcommand_desc_t_valid_options_set
+    __swig_getmethods__["valid_options"] = _core.svn_opt_subcommand_desc_t_valid_options_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_opt_subcommand_desc_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_opt_subcommand_desc_t"""
+        _swig_setattr(self, svn_opt_subcommand_desc_t, 'this', apply(_core.new_svn_opt_subcommand_desc_t, args))
+        _swig_setattr(self, svn_opt_subcommand_desc_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_opt_subcommand_desc_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_opt_subcommand_desc_tPtr(svn_opt_subcommand_desc_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_opt_subcommand_desc_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_opt_subcommand_desc_t, 'thisown', 0)
+        self.__class__ = svn_opt_subcommand_desc_t
+_core.svn_opt_subcommand_desc_t_swigregister(svn_opt_subcommand_desc_tPtr)
+
+
+def svn_opt_get_canonical_subcommand2(*args):
+    """svn_opt_get_canonical_subcommand2(svn_opt_subcommand_desc2_t table, char cmd_name) -> svn_opt_subcommand_desc2_t"""
+    return apply(_core.svn_opt_get_canonical_subcommand2, args)
+
+def svn_opt_get_canonical_subcommand(*args):
+    """svn_opt_get_canonical_subcommand(svn_opt_subcommand_desc_t table, char cmd_name) -> svn_opt_subcommand_desc_t"""
+    return apply(_core.svn_opt_get_canonical_subcommand, args)
+
+def svn_opt_get_option_from_code2(*args):
+    """
+    svn_opt_get_option_from_code2(int code, apr_getopt_option_t option_table, svn_opt_subcommand_desc2_t command, 
+        apr_pool_t pool) -> apr_getopt_option_t
+    """
+    return apply(_core.svn_opt_get_option_from_code2, args)
+
+def svn_opt_get_option_from_code(*args):
+    """svn_opt_get_option_from_code(int code, apr_getopt_option_t option_table) -> apr_getopt_option_t"""
+    return apply(_core.svn_opt_get_option_from_code, args)
+
+def svn_opt_subcommand_takes_option3(*args):
+    """
+    svn_opt_subcommand_takes_option3(svn_opt_subcommand_desc2_t command, int option_code, 
+        int global_options) -> svn_boolean_t
+    """
+    return apply(_core.svn_opt_subcommand_takes_option3, args)
+
+def svn_opt_subcommand_takes_option2(*args):
+    """svn_opt_subcommand_takes_option2(svn_opt_subcommand_desc2_t command, int option_code) -> svn_boolean_t"""
+    return apply(_core.svn_opt_subcommand_takes_option2, args)
+
+def svn_opt_subcommand_takes_option(*args):
+    """svn_opt_subcommand_takes_option(svn_opt_subcommand_desc_t command, int option_code) -> svn_boolean_t"""
+    return apply(_core.svn_opt_subcommand_takes_option, args)
+
+def svn_opt_print_generic_help2(*args):
+    """
+    svn_opt_print_generic_help2(char header, svn_opt_subcommand_desc2_t cmd_table, 
+        apr_getopt_option_t opt_table, char footer, apr_pool_t pool, 
+        FILE stream)
+    """
+    return apply(_core.svn_opt_print_generic_help2, args)
+
+def svn_opt_format_option(*args):
+    """
+    svn_opt_format_option(char string, apr_getopt_option_t opt, svn_boolean_t doc, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_opt_format_option, args)
+
+def svn_opt_subcommand_help3(*args):
+    """
+    svn_opt_subcommand_help3(char subcommand, svn_opt_subcommand_desc2_t table, 
+        apr_getopt_option_t options_table, int global_options, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_opt_subcommand_help3, args)
+
+def svn_opt_subcommand_help2(*args):
+    """
+    svn_opt_subcommand_help2(char subcommand, svn_opt_subcommand_desc2_t table, 
+        apr_getopt_option_t options_table, apr_pool_t pool)
+    """
+    return apply(_core.svn_opt_subcommand_help2, args)
+
+def svn_opt_subcommand_help(*args):
+    """
+    svn_opt_subcommand_help(char subcommand, svn_opt_subcommand_desc_t table, apr_getopt_option_t options_table, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_opt_subcommand_help, args)
+svn_opt_revision_unspecified = _core.svn_opt_revision_unspecified
+svn_opt_revision_number = _core.svn_opt_revision_number
+svn_opt_revision_date = _core.svn_opt_revision_date
+svn_opt_revision_committed = _core.svn_opt_revision_committed
+svn_opt_revision_previous = _core.svn_opt_revision_previous
+svn_opt_revision_base = _core.svn_opt_revision_base
+svn_opt_revision_working = _core.svn_opt_revision_working
+svn_opt_revision_head = _core.svn_opt_revision_head
+class svn_opt_revision_value_t:
+    """Proxy of C svn_opt_revision_value_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_opt_revision_value_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_opt_revision_value_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_opt_revision_value_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["number"] = _core.svn_opt_revision_value_t_number_set
+    __swig_getmethods__["number"] = _core.svn_opt_revision_value_t_number_get
+    __swig_setmethods__["date"] = _core.svn_opt_revision_value_t_date_set
+    __swig_getmethods__["date"] = _core.svn_opt_revision_value_t_date_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_opt_revision_value_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_opt_revision_value_t"""
+        _swig_setattr(self, svn_opt_revision_value_t, 'this', apply(_core.new_svn_opt_revision_value_t, args))
+        _swig_setattr(self, svn_opt_revision_value_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_opt_revision_value_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_opt_revision_value_tPtr(svn_opt_revision_value_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_opt_revision_value_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_opt_revision_value_t, 'thisown', 0)
+        self.__class__ = svn_opt_revision_value_t
+_core.svn_opt_revision_value_t_swigregister(svn_opt_revision_value_tPtr)
+
+class svn_opt_revision_t:
+    """Proxy of C svn_opt_revision_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_opt_revision_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_opt_revision_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_opt_revision_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["kind"] = _core.svn_opt_revision_t_kind_set
+    __swig_getmethods__["kind"] = _core.svn_opt_revision_t_kind_get
+    __swig_setmethods__["value"] = _core.svn_opt_revision_t_value_set
+    __swig_getmethods__["value"] = _core.svn_opt_revision_t_value_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_opt_revision_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_opt_revision_t"""
+        _swig_setattr(self, svn_opt_revision_t, 'this', apply(_core.new_svn_opt_revision_t, args))
+        _swig_setattr(self, svn_opt_revision_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_opt_revision_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_opt_revision_tPtr(svn_opt_revision_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_opt_revision_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_opt_revision_t, 'thisown', 0)
+        self.__class__ = svn_opt_revision_t
+_core.svn_opt_revision_t_swigregister(svn_opt_revision_tPtr)
+
+class svn_opt_revision_range_t:
+    """Proxy of C svn_opt_revision_range_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_opt_revision_range_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_opt_revision_range_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_opt_revision_range_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["start"] = _core.svn_opt_revision_range_t_start_set
+    __swig_getmethods__["start"] = _core.svn_opt_revision_range_t_start_get
+    __swig_setmethods__["end"] = _core.svn_opt_revision_range_t_end_set
+    __swig_getmethods__["end"] = _core.svn_opt_revision_range_t_end_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_opt_revision_range_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_opt_revision_range_t"""
+        _swig_setattr(self, svn_opt_revision_range_t, 'this', apply(_core.new_svn_opt_revision_range_t, args))
+        _swig_setattr(self, svn_opt_revision_range_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_opt_revision_range_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_opt_revision_range_tPtr(svn_opt_revision_range_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_opt_revision_range_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_opt_revision_range_t, 'thisown', 0)
+        self.__class__ = svn_opt_revision_range_t
+_core.svn_opt_revision_range_t_swigregister(svn_opt_revision_range_tPtr)
+
+
+def svn_opt_parse_revision(*args):
+    """
+    svn_opt_parse_revision(svn_opt_revision_t start_revision, svn_opt_revision_t end_revision, 
+        char arg, apr_pool_t pool) -> int
+    """
+    return apply(_core.svn_opt_parse_revision, args)
+
+def svn_opt_parse_revision_to_range(*args):
+    """svn_opt_parse_revision_to_range(apr_array_header_t opt_ranges, char arg, apr_pool_t pool) -> int"""
+    return apply(_core.svn_opt_parse_revision_to_range, args)
+
+def svn_opt_resolve_revisions(*args):
+    """
+    svn_opt_resolve_revisions(svn_opt_revision_t peg_rev, svn_opt_revision_t op_rev, 
+        svn_boolean_t is_url, svn_boolean_t notice_local_mods, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_opt_resolve_revisions, args)
+
+def svn_opt_args_to_target_array3(*args):
+    """
+    svn_opt_args_to_target_array3(apr_array_header_t targets_p, apr_getopt_t os, apr_array_header_t known_targets, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_opt_args_to_target_array3, args)
+
+def svn_opt_args_to_target_array2(*args):
+    """
+    svn_opt_args_to_target_array2(apr_array_header_t targets_p, apr_getopt_t os, apr_array_header_t known_targets, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_opt_args_to_target_array2, args)
+
+def svn_opt_push_implicit_dot_target(*args):
+    """svn_opt_push_implicit_dot_target(apr_array_header_t targets, apr_pool_t pool)"""
+    return apply(_core.svn_opt_push_implicit_dot_target, args)
+
+def svn_opt_parse_num_args(*args):
+    """
+    svn_opt_parse_num_args(apr_array_header_t args_p, apr_getopt_t os, int num_args, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_opt_parse_num_args, args)
+
+def svn_opt_parse_all_args(*args):
+    """svn_opt_parse_all_args(apr_array_header_t args_p, apr_getopt_t os, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_opt_parse_all_args, args)
+
+def svn_opt_parse_path(*args):
+    """svn_opt_parse_path(svn_opt_revision_t rev, char truepath, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_opt_parse_path, args)
+
+def svn_opt_print_help3(*args):
+    """
+    svn_opt_print_help3(apr_getopt_t os, char pgm_name, svn_boolean_t print_version, 
+        svn_boolean_t quiet, char version_footer, 
+        char header, svn_opt_subcommand_desc2_t cmd_table, 
+        apr_getopt_option_t option_table, 
+        int global_options, char footer, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_opt_print_help3, args)
+
+def svn_opt_print_help2(*args):
+    """
+    svn_opt_print_help2(apr_getopt_t os, char pgm_name, svn_boolean_t print_version, 
+        svn_boolean_t quiet, char version_footer, 
+        char header, svn_opt_subcommand_desc2_t cmd_table, 
+        apr_getopt_option_t option_table, 
+        char footer, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_opt_print_help2, args)
+
+def svn_opt_print_help(*args):
+    """
+    svn_opt_print_help(apr_getopt_t os, char pgm_name, svn_boolean_t print_version, 
+        svn_boolean_t quiet, char version_footer, 
+        char header, svn_opt_subcommand_desc_t cmd_table, 
+        apr_getopt_option_t option_table, 
+        char footer, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_opt_print_help, args)
+class svn_auth_provider_t:
+    """Proxy of C svn_auth_provider_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_provider_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_provider_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_provider_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["cred_kind"] = _core.svn_auth_provider_t_cred_kind_set
+    __swig_getmethods__["cred_kind"] = _core.svn_auth_provider_t_cred_kind_get
+    __swig_setmethods__["first_credentials"] = _core.svn_auth_provider_t_first_credentials_set
+    __swig_getmethods__["first_credentials"] = _core.svn_auth_provider_t_first_credentials_get
+    __swig_setmethods__["next_credentials"] = _core.svn_auth_provider_t_next_credentials_set
+    __swig_getmethods__["next_credentials"] = _core.svn_auth_provider_t_next_credentials_get
+    __swig_setmethods__["save_credentials"] = _core.svn_auth_provider_t_save_credentials_set
+    __swig_getmethods__["save_credentials"] = _core.svn_auth_provider_t_save_credentials_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_provider_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def first_credentials(self, *args):
+      return svn_auth_provider_invoke_first_credentials(self, *args)
+
+    def next_credentials(self, *args):
+      return svn_auth_provider_invoke_next_credentials(self, *args)
+
+    def save_credentials(self, *args):
+      return svn_auth_provider_invoke_save_credentials(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_auth_provider_t"""
+        _swig_setattr(self, svn_auth_provider_t, 'this', apply(_core.new_svn_auth_provider_t, args))
+        _swig_setattr(self, svn_auth_provider_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_auth_provider_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_auth_provider_tPtr(svn_auth_provider_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_provider_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_provider_t, 'thisown', 0)
+        self.__class__ = svn_auth_provider_t
+_core.svn_auth_provider_t_swigregister(svn_auth_provider_tPtr)
+
+class svn_auth_provider_object_t:
+    """Proxy of C svn_auth_provider_object_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_provider_object_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_provider_object_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_provider_object_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["vtable"] = _core.svn_auth_provider_object_t_vtable_set
+    __swig_getmethods__["vtable"] = _core.svn_auth_provider_object_t_vtable_get
+    __swig_setmethods__["provider_baton"] = _core.svn_auth_provider_object_t_provider_baton_set
+    __swig_getmethods__["provider_baton"] = _core.svn_auth_provider_object_t_provider_baton_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_provider_object_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_auth_provider_object_t"""
+        _swig_setattr(self, svn_auth_provider_object_t, 'this', apply(_core.new_svn_auth_provider_object_t, args))
+        _swig_setattr(self, svn_auth_provider_object_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_auth_provider_object_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_auth_provider_object_tPtr(svn_auth_provider_object_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_provider_object_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_provider_object_t, 'thisown', 0)
+        self.__class__ = svn_auth_provider_object_t
+_core.svn_auth_provider_object_t_swigregister(svn_auth_provider_object_tPtr)
+
+SVN_AUTH_CRED_SIMPLE = _core.SVN_AUTH_CRED_SIMPLE
+class svn_auth_cred_simple_t:
+    """Proxy of C svn_auth_cred_simple_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_cred_simple_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_cred_simple_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_cred_simple_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["username"] = _core.svn_auth_cred_simple_t_username_set
+    __swig_getmethods__["username"] = _core.svn_auth_cred_simple_t_username_get
+    __swig_setmethods__["password"] = _core.svn_auth_cred_simple_t_password_set
+    __swig_getmethods__["password"] = _core.svn_auth_cred_simple_t_password_get
+    __swig_setmethods__["may_save"] = _core.svn_auth_cred_simple_t_may_save_set
+    __swig_getmethods__["may_save"] = _core.svn_auth_cred_simple_t_may_save_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_cred_simple_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_auth_cred_simple_t"""
+        _swig_setattr(self, svn_auth_cred_simple_t, 'this', apply(_core.new_svn_auth_cred_simple_t, args))
+        _swig_setattr(self, svn_auth_cred_simple_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_auth_cred_simple_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_auth_cred_simple_tPtr(svn_auth_cred_simple_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_cred_simple_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_cred_simple_t, 'thisown', 0)
+        self.__class__ = svn_auth_cred_simple_t
+_core.svn_auth_cred_simple_t_swigregister(svn_auth_cred_simple_tPtr)
+
+SVN_AUTH_CRED_USERNAME = _core.SVN_AUTH_CRED_USERNAME
+class svn_auth_cred_username_t:
+    """Proxy of C svn_auth_cred_username_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_cred_username_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_cred_username_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_cred_username_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["username"] = _core.svn_auth_cred_username_t_username_set
+    __swig_getmethods__["username"] = _core.svn_auth_cred_username_t_username_get
+    __swig_setmethods__["may_save"] = _core.svn_auth_cred_username_t_may_save_set
+    __swig_getmethods__["may_save"] = _core.svn_auth_cred_username_t_may_save_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_cred_username_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_auth_cred_username_t"""
+        _swig_setattr(self, svn_auth_cred_username_t, 'this', apply(_core.new_svn_auth_cred_username_t, args))
+        _swig_setattr(self, svn_auth_cred_username_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_auth_cred_username_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_auth_cred_username_tPtr(svn_auth_cred_username_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_cred_username_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_cred_username_t, 'thisown', 0)
+        self.__class__ = svn_auth_cred_username_t
+_core.svn_auth_cred_username_t_swigregister(svn_auth_cred_username_tPtr)
+
+SVN_AUTH_CRED_SSL_CLIENT_CERT = _core.SVN_AUTH_CRED_SSL_CLIENT_CERT
+class svn_auth_cred_ssl_client_cert_t:
+    """Proxy of C svn_auth_cred_ssl_client_cert_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_cred_ssl_client_cert_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_cred_ssl_client_cert_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_cred_ssl_client_cert_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["cert_file"] = _core.svn_auth_cred_ssl_client_cert_t_cert_file_set
+    __swig_getmethods__["cert_file"] = _core.svn_auth_cred_ssl_client_cert_t_cert_file_get
+    __swig_setmethods__["may_save"] = _core.svn_auth_cred_ssl_client_cert_t_may_save_set
+    __swig_getmethods__["may_save"] = _core.svn_auth_cred_ssl_client_cert_t_may_save_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_cred_ssl_client_cert_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_auth_cred_ssl_client_cert_t"""
+        _swig_setattr(self, svn_auth_cred_ssl_client_cert_t, 'this', apply(_core.new_svn_auth_cred_ssl_client_cert_t, args))
+        _swig_setattr(self, svn_auth_cred_ssl_client_cert_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_auth_cred_ssl_client_cert_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_auth_cred_ssl_client_cert_tPtr(svn_auth_cred_ssl_client_cert_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_cred_ssl_client_cert_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_cred_ssl_client_cert_t, 'thisown', 0)
+        self.__class__ = svn_auth_cred_ssl_client_cert_t
+_core.svn_auth_cred_ssl_client_cert_t_swigregister(svn_auth_cred_ssl_client_cert_tPtr)
+
+SVN_AUTH_CRED_SSL_CLIENT_CERT_PW = _core.SVN_AUTH_CRED_SSL_CLIENT_CERT_PW
+class svn_auth_cred_ssl_client_cert_pw_t:
+    """Proxy of C svn_auth_cred_ssl_client_cert_pw_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_cred_ssl_client_cert_pw_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_cred_ssl_client_cert_pw_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_cred_ssl_client_cert_pw_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["password"] = _core.svn_auth_cred_ssl_client_cert_pw_t_password_set
+    __swig_getmethods__["password"] = _core.svn_auth_cred_ssl_client_cert_pw_t_password_get
+    __swig_setmethods__["may_save"] = _core.svn_auth_cred_ssl_client_cert_pw_t_may_save_set
+    __swig_getmethods__["may_save"] = _core.svn_auth_cred_ssl_client_cert_pw_t_may_save_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_cred_ssl_client_cert_pw_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_auth_cred_ssl_client_cert_pw_t"""
+        _swig_setattr(self, svn_auth_cred_ssl_client_cert_pw_t, 'this', apply(_core.new_svn_auth_cred_ssl_client_cert_pw_t, args))
+        _swig_setattr(self, svn_auth_cred_ssl_client_cert_pw_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_auth_cred_ssl_client_cert_pw_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_auth_cred_ssl_client_cert_pw_tPtr(svn_auth_cred_ssl_client_cert_pw_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_cred_ssl_client_cert_pw_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_cred_ssl_client_cert_pw_t, 'thisown', 0)
+        self.__class__ = svn_auth_cred_ssl_client_cert_pw_t
+_core.svn_auth_cred_ssl_client_cert_pw_t_swigregister(svn_auth_cred_ssl_client_cert_pw_tPtr)
+
+SVN_AUTH_CRED_SSL_SERVER_TRUST = _core.SVN_AUTH_CRED_SSL_SERVER_TRUST
+class svn_auth_ssl_server_cert_info_t:
+    """Proxy of C svn_auth_ssl_server_cert_info_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_ssl_server_cert_info_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_ssl_server_cert_info_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_ssl_server_cert_info_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["hostname"] = _core.svn_auth_ssl_server_cert_info_t_hostname_set
+    __swig_getmethods__["hostname"] = _core.svn_auth_ssl_server_cert_info_t_hostname_get
+    __swig_setmethods__["fingerprint"] = _core.svn_auth_ssl_server_cert_info_t_fingerprint_set
+    __swig_getmethods__["fingerprint"] = _core.svn_auth_ssl_server_cert_info_t_fingerprint_get
+    __swig_setmethods__["valid_from"] = _core.svn_auth_ssl_server_cert_info_t_valid_from_set
+    __swig_getmethods__["valid_from"] = _core.svn_auth_ssl_server_cert_info_t_valid_from_get
+    __swig_setmethods__["valid_until"] = _core.svn_auth_ssl_server_cert_info_t_valid_until_set
+    __swig_getmethods__["valid_until"] = _core.svn_auth_ssl_server_cert_info_t_valid_until_get
+    __swig_setmethods__["issuer_dname"] = _core.svn_auth_ssl_server_cert_info_t_issuer_dname_set
+    __swig_getmethods__["issuer_dname"] = _core.svn_auth_ssl_server_cert_info_t_issuer_dname_get
+    __swig_setmethods__["ascii_cert"] = _core.svn_auth_ssl_server_cert_info_t_ascii_cert_set
+    __swig_getmethods__["ascii_cert"] = _core.svn_auth_ssl_server_cert_info_t_ascii_cert_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_ssl_server_cert_info_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_auth_ssl_server_cert_info_t"""
+        _swig_setattr(self, svn_auth_ssl_server_cert_info_t, 'this', apply(_core.new_svn_auth_ssl_server_cert_info_t, args))
+        _swig_setattr(self, svn_auth_ssl_server_cert_info_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_auth_ssl_server_cert_info_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_auth_ssl_server_cert_info_tPtr(svn_auth_ssl_server_cert_info_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_ssl_server_cert_info_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_ssl_server_cert_info_t, 'thisown', 0)
+        self.__class__ = svn_auth_ssl_server_cert_info_t
+_core.svn_auth_ssl_server_cert_info_t_swigregister(svn_auth_ssl_server_cert_info_tPtr)
+
+
+def svn_auth_ssl_server_cert_info_dup(*args):
+    """svn_auth_ssl_server_cert_info_dup(svn_auth_ssl_server_cert_info_t info, apr_pool_t pool) -> svn_auth_ssl_server_cert_info_t"""
+    return apply(_core.svn_auth_ssl_server_cert_info_dup, args)
+class svn_auth_cred_ssl_server_trust_t:
+    """Proxy of C svn_auth_cred_ssl_server_trust_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_cred_ssl_server_trust_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_cred_ssl_server_trust_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_cred_ssl_server_trust_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["may_save"] = _core.svn_auth_cred_ssl_server_trust_t_may_save_set
+    __swig_getmethods__["may_save"] = _core.svn_auth_cred_ssl_server_trust_t_may_save_get
+    __swig_setmethods__["accepted_failures"] = _core.svn_auth_cred_ssl_server_trust_t_accepted_failures_set
+    __swig_getmethods__["accepted_failures"] = _core.svn_auth_cred_ssl_server_trust_t_accepted_failures_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_cred_ssl_server_trust_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_auth_cred_ssl_server_trust_t"""
+        _swig_setattr(self, svn_auth_cred_ssl_server_trust_t, 'this', apply(_core.new_svn_auth_cred_ssl_server_trust_t, args))
+        _swig_setattr(self, svn_auth_cred_ssl_server_trust_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_auth_cred_ssl_server_trust_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_auth_cred_ssl_server_trust_tPtr(svn_auth_cred_ssl_server_trust_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_cred_ssl_server_trust_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_cred_ssl_server_trust_t, 'thisown', 0)
+        self.__class__ = svn_auth_cred_ssl_server_trust_t
+_core.svn_auth_cred_ssl_server_trust_t_swigregister(svn_auth_cred_ssl_server_trust_tPtr)
+
+SVN_AUTH_SSL_NOTYETVALID = _core.SVN_AUTH_SSL_NOTYETVALID
+SVN_AUTH_SSL_EXPIRED = _core.SVN_AUTH_SSL_EXPIRED
+SVN_AUTH_SSL_CNMISMATCH = _core.SVN_AUTH_SSL_CNMISMATCH
+SVN_AUTH_SSL_UNKNOWNCA = _core.SVN_AUTH_SSL_UNKNOWNCA
+SVN_AUTH_SSL_OTHER = _core.SVN_AUTH_SSL_OTHER
+
+def svn_auth_open(*args):
+    """
+    svn_auth_open(svn_auth_baton_t auth_baton, apr_array_header_t providers, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_auth_open, args)
+
+def svn_auth_set_parameter(*args):
+    """svn_auth_set_parameter(svn_auth_baton_t auth_baton, char name, void value)"""
+    return apply(_core.svn_auth_set_parameter, args)
+SVN_AUTH_PARAM_PREFIX = _core.SVN_AUTH_PARAM_PREFIX
+SVN_AUTH_PARAM_DEFAULT_USERNAME = _core.SVN_AUTH_PARAM_DEFAULT_USERNAME
+SVN_AUTH_PARAM_DEFAULT_PASSWORD = _core.SVN_AUTH_PARAM_DEFAULT_PASSWORD
+SVN_AUTH_PARAM_NON_INTERACTIVE = _core.SVN_AUTH_PARAM_NON_INTERACTIVE
+SVN_AUTH_PARAM_DONT_STORE_PASSWORDS = _core.SVN_AUTH_PARAM_DONT_STORE_PASSWORDS
+SVN_AUTH_PARAM_NO_AUTH_CACHE = _core.SVN_AUTH_PARAM_NO_AUTH_CACHE
+SVN_AUTH_PARAM_SSL_SERVER_FAILURES = _core.SVN_AUTH_PARAM_SSL_SERVER_FAILURES
+SVN_AUTH_PARAM_SSL_SERVER_CERT_INFO = _core.SVN_AUTH_PARAM_SSL_SERVER_CERT_INFO
+SVN_AUTH_PARAM_CONFIG = _core.SVN_AUTH_PARAM_CONFIG
+SVN_AUTH_PARAM_SERVER_GROUP = _core.SVN_AUTH_PARAM_SERVER_GROUP
+SVN_AUTH_PARAM_CONFIG_DIR = _core.SVN_AUTH_PARAM_CONFIG_DIR
+
+def svn_auth_first_credentials(*args):
+    """
+    svn_auth_first_credentials(void credentials, svn_auth_iterstate_t state, char cred_kind, 
+        char realmstring, svn_auth_baton_t auth_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_first_credentials, args)
+
+def svn_auth_next_credentials(*args):
+    """svn_auth_next_credentials(void credentials, svn_auth_iterstate_t state, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_auth_next_credentials, args)
+
+def svn_auth_save_credentials(*args):
+    """svn_auth_save_credentials(svn_auth_iterstate_t state, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_auth_save_credentials, args)
+
+def svn_auth_get_simple_prompt_provider(*args):
+    """
+    svn_auth_get_simple_prompt_provider(svn_auth_provider_object_t provider, svn_auth_simple_prompt_func_t prompt_func, 
+        int retry_limit, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_auth_get_simple_prompt_provider, args)
+
+def svn_auth_get_username_prompt_provider(*args):
+    """
+    svn_auth_get_username_prompt_provider(svn_auth_provider_object_t provider, svn_auth_username_prompt_func_t prompt_func, 
+        int retry_limit, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_auth_get_username_prompt_provider, args)
+
+def svn_auth_get_simple_provider(*args):
+    """svn_auth_get_simple_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_core.svn_auth_get_simple_provider, args)
+
+def svn_auth_get_windows_simple_provider(*args):
+    """svn_auth_get_windows_simple_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_core.svn_auth_get_windows_simple_provider, args)
+
+def svn_auth_get_username_provider(*args):
+    """svn_auth_get_username_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_core.svn_auth_get_username_provider, args)
+
+def svn_auth_get_ssl_server_trust_file_provider(*args):
+    """svn_auth_get_ssl_server_trust_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_core.svn_auth_get_ssl_server_trust_file_provider, args)
+
+def svn_auth_get_windows_ssl_server_trust_provider(*args):
+    """svn_auth_get_windows_ssl_server_trust_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_core.svn_auth_get_windows_ssl_server_trust_provider, args)
+
+def svn_auth_get_ssl_client_cert_file_provider(*args):
+    """svn_auth_get_ssl_client_cert_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_core.svn_auth_get_ssl_client_cert_file_provider, args)
+
+def svn_auth_get_ssl_client_cert_pw_file_provider(*args):
+    """svn_auth_get_ssl_client_cert_pw_file_provider(svn_auth_provider_object_t provider, apr_pool_t pool)"""
+    return apply(_core.svn_auth_get_ssl_client_cert_pw_file_provider, args)
+
+def svn_auth_get_ssl_server_trust_prompt_provider(*args):
+    """
+    svn_auth_get_ssl_server_trust_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_server_trust_prompt_func_t prompt_func, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_auth_get_ssl_server_trust_prompt_provider, args)
+
+def svn_auth_get_ssl_client_cert_prompt_provider(*args):
+    """
+    svn_auth_get_ssl_client_cert_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_client_cert_prompt_func_t prompt_func, 
+        int retry_limit, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_auth_get_ssl_client_cert_prompt_provider, args)
+
+def svn_auth_get_ssl_client_cert_pw_prompt_provider(*args):
+    """
+    svn_auth_get_ssl_client_cert_pw_prompt_provider(svn_auth_provider_object_t provider, svn_auth_ssl_client_cert_pw_prompt_func_t prompt_func, 
+        int retry_limit, 
+        apr_pool_t pool)
+    """
+    return apply(_core.svn_auth_get_ssl_client_cert_pw_prompt_provider, args)
+class svn_auth_baton_t:
+    """Proxy of C svn_auth_baton_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_baton_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_baton_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_baton_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_baton_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_auth_baton_tPtr(svn_auth_baton_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_baton_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_baton_t, 'thisown', 0)
+        self.__class__ = svn_auth_baton_t
+_core.svn_auth_baton_t_swigregister(svn_auth_baton_tPtr)
+
+class svn_auth_iterstate_t:
+    """Proxy of C svn_auth_iterstate_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_iterstate_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_iterstate_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_iterstate_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_iterstate_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_auth_iterstate_tPtr(svn_auth_iterstate_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_iterstate_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_iterstate_t, 'thisown', 0)
+        self.__class__ = svn_auth_iterstate_t
+_core.svn_auth_iterstate_t_swigregister(svn_auth_iterstate_tPtr)
+
+
+def svn_auth_provider_invoke_first_credentials(*args):
+    """
+    svn_auth_provider_invoke_first_credentials(svn_auth_provider_t _obj, void credentials, void iter_baton, 
+        void provider_baton, apr_hash_t parameters, 
+        char realmstring, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_provider_invoke_first_credentials, args)
+
+def svn_auth_provider_invoke_next_credentials(*args):
+    """
+    svn_auth_provider_invoke_next_credentials(svn_auth_provider_t _obj, void credentials, void iter_baton, 
+        void provider_baton, apr_hash_t parameters, 
+        char realmstring, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_provider_invoke_next_credentials, args)
+
+def svn_auth_provider_invoke_save_credentials(*args):
+    """
+    svn_auth_provider_invoke_save_credentials(svn_auth_provider_t _obj, svn_boolean_t saved, void credentials, 
+        void provider_baton, apr_hash_t parameters, 
+        char realmstring, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_provider_invoke_save_credentials, args)
+
+def svn_auth_invoke_simple_prompt_func(*args):
+    """
+    svn_auth_invoke_simple_prompt_func(svn_auth_simple_prompt_func_t _obj, svn_auth_cred_simple_t cred, 
+        void baton, char realm, char username, 
+        svn_boolean_t may_save, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_invoke_simple_prompt_func, args)
+
+def svn_auth_invoke_username_prompt_func(*args):
+    """
+    svn_auth_invoke_username_prompt_func(svn_auth_username_prompt_func_t _obj, svn_auth_cred_username_t cred, 
+        void baton, char realm, svn_boolean_t may_save, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_invoke_username_prompt_func, args)
+
+def svn_auth_invoke_ssl_server_trust_prompt_func(*args):
+    """
+    svn_auth_invoke_ssl_server_trust_prompt_func(svn_auth_ssl_server_trust_prompt_func_t _obj, svn_auth_cred_ssl_server_trust_t cred, 
+        void baton, 
+        char realm, apr_uint32_t failures, svn_auth_ssl_server_cert_info_t cert_info, 
+        svn_boolean_t may_save, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_invoke_ssl_server_trust_prompt_func, args)
+
+def svn_auth_invoke_ssl_client_cert_prompt_func(*args):
+    """
+    svn_auth_invoke_ssl_client_cert_prompt_func(svn_auth_ssl_client_cert_prompt_func_t _obj, svn_auth_cred_ssl_client_cert_t cred, 
+        void baton, char realm, 
+        svn_boolean_t may_save, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_invoke_ssl_client_cert_prompt_func, args)
+
+def svn_auth_invoke_ssl_client_cert_pw_prompt_func(*args):
+    """
+    svn_auth_invoke_ssl_client_cert_pw_prompt_func(svn_auth_ssl_client_cert_pw_prompt_func_t _obj, svn_auth_cred_ssl_client_cert_pw_t cred, 
+        void baton, 
+        char realm, svn_boolean_t may_save, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_auth_invoke_ssl_client_cert_pw_prompt_func, args)
+class svn_auth_simple_prompt_func_t:
+    """Proxy of C svn_auth_simple_prompt_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_simple_prompt_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_simple_prompt_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_simple_prompt_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_simple_prompt_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_auth_invoke_simple_prompt_func(self, *args)
+
+
+class svn_auth_simple_prompt_func_tPtr(svn_auth_simple_prompt_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_simple_prompt_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_simple_prompt_func_t, 'thisown', 0)
+        self.__class__ = svn_auth_simple_prompt_func_t
+_core.svn_auth_simple_prompt_func_t_swigregister(svn_auth_simple_prompt_func_tPtr)
+
+class svn_auth_username_prompt_func_t:
+    """Proxy of C svn_auth_username_prompt_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_username_prompt_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_username_prompt_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_username_prompt_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_username_prompt_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_auth_invoke_username_prompt_func(self, *args)
+
+
+class svn_auth_username_prompt_func_tPtr(svn_auth_username_prompt_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_username_prompt_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_username_prompt_func_t, 'thisown', 0)
+        self.__class__ = svn_auth_username_prompt_func_t
+_core.svn_auth_username_prompt_func_t_swigregister(svn_auth_username_prompt_func_tPtr)
+
+class svn_auth_ssl_server_trust_prompt_func_t:
+    """Proxy of C svn_auth_ssl_server_trust_prompt_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_ssl_server_trust_prompt_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_ssl_server_trust_prompt_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_ssl_server_trust_prompt_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_ssl_server_trust_prompt_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_auth_invoke_ssl_server_trust_prompt_func(self, *args)
+
+
+class svn_auth_ssl_server_trust_prompt_func_tPtr(svn_auth_ssl_server_trust_prompt_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_ssl_server_trust_prompt_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_ssl_server_trust_prompt_func_t, 'thisown', 0)
+        self.__class__ = svn_auth_ssl_server_trust_prompt_func_t
+_core.svn_auth_ssl_server_trust_prompt_func_t_swigregister(svn_auth_ssl_server_trust_prompt_func_tPtr)
+
+class svn_auth_ssl_client_cert_prompt_func_t:
+    """Proxy of C svn_auth_ssl_client_cert_prompt_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_ssl_client_cert_prompt_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_ssl_client_cert_prompt_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_ssl_client_cert_prompt_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_ssl_client_cert_prompt_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_auth_invoke_ssl_client_cert_prompt_func(self, *args)
+
+
+class svn_auth_ssl_client_cert_prompt_func_tPtr(svn_auth_ssl_client_cert_prompt_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_ssl_client_cert_prompt_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_ssl_client_cert_prompt_func_t, 'thisown', 0)
+        self.__class__ = svn_auth_ssl_client_cert_prompt_func_t
+_core.svn_auth_ssl_client_cert_prompt_func_t_swigregister(svn_auth_ssl_client_cert_prompt_func_tPtr)
+
+class svn_auth_ssl_client_cert_pw_prompt_func_t:
+    """Proxy of C svn_auth_ssl_client_cert_pw_prompt_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_auth_ssl_client_cert_pw_prompt_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_auth_ssl_client_cert_pw_prompt_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_auth_ssl_client_cert_pw_prompt_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_auth_ssl_client_cert_pw_prompt_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_auth_invoke_ssl_client_cert_pw_prompt_func(self, *args)
+
+
+class svn_auth_ssl_client_cert_pw_prompt_func_tPtr(svn_auth_ssl_client_cert_pw_prompt_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_auth_ssl_client_cert_pw_prompt_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_auth_ssl_client_cert_pw_prompt_func_t, 'thisown', 0)
+        self.__class__ = svn_auth_ssl_client_cert_pw_prompt_func_t
+_core.svn_auth_ssl_client_cert_pw_prompt_func_t_swigregister(svn_auth_ssl_client_cert_pw_prompt_func_tPtr)
+
+SVN_CONFIG_CATEGORY_SERVERS = _core.SVN_CONFIG_CATEGORY_SERVERS
+SVN_CONFIG_SECTION_GROUPS = _core.SVN_CONFIG_SECTION_GROUPS
+SVN_CONFIG_SECTION_GLOBAL = _core.SVN_CONFIG_SECTION_GLOBAL
+SVN_CONFIG_OPTION_HTTP_PROXY_HOST = _core.SVN_CONFIG_OPTION_HTTP_PROXY_HOST
+SVN_CONFIG_OPTION_HTTP_PROXY_PORT = _core.SVN_CONFIG_OPTION_HTTP_PROXY_PORT
+SVN_CONFIG_OPTION_HTTP_PROXY_USERNAME = _core.SVN_CONFIG_OPTION_HTTP_PROXY_USERNAME
+SVN_CONFIG_OPTION_HTTP_PROXY_PASSWORD = _core.SVN_CONFIG_OPTION_HTTP_PROXY_PASSWORD
+SVN_CONFIG_OPTION_HTTP_PROXY_EXCEPTIONS = _core.SVN_CONFIG_OPTION_HTTP_PROXY_EXCEPTIONS
+SVN_CONFIG_OPTION_HTTP_TIMEOUT = _core.SVN_CONFIG_OPTION_HTTP_TIMEOUT
+SVN_CONFIG_OPTION_HTTP_COMPRESSION = _core.SVN_CONFIG_OPTION_HTTP_COMPRESSION
+SVN_CONFIG_OPTION_NEON_DEBUG_MASK = _core.SVN_CONFIG_OPTION_NEON_DEBUG_MASK
+SVN_CONFIG_OPTION_HTTP_AUTH_TYPES = _core.SVN_CONFIG_OPTION_HTTP_AUTH_TYPES
+SVN_CONFIG_OPTION_SSL_AUTHORITY_FILES = _core.SVN_CONFIG_OPTION_SSL_AUTHORITY_FILES
+SVN_CONFIG_OPTION_SSL_TRUST_DEFAULT_CA = _core.SVN_CONFIG_OPTION_SSL_TRUST_DEFAULT_CA
+SVN_CONFIG_OPTION_SSL_CLIENT_CERT_FILE = _core.SVN_CONFIG_OPTION_SSL_CLIENT_CERT_FILE
+SVN_CONFIG_OPTION_SSL_CLIENT_CERT_PASSWORD = _core.SVN_CONFIG_OPTION_SSL_CLIENT_CERT_PASSWORD
+SVN_CONFIG_OPTION_SSL_PKCS11_PROVIDER = _core.SVN_CONFIG_OPTION_SSL_PKCS11_PROVIDER
+SVN_CONFIG_OPTION_HTTP_LIBRARY = _core.SVN_CONFIG_OPTION_HTTP_LIBRARY
+SVN_CONFIG_CATEGORY_CONFIG = _core.SVN_CONFIG_CATEGORY_CONFIG
+SVN_CONFIG_SECTION_AUTH = _core.SVN_CONFIG_SECTION_AUTH
+SVN_CONFIG_OPTION_STORE_PASSWORDS = _core.SVN_CONFIG_OPTION_STORE_PASSWORDS
+SVN_CONFIG_OPTION_STORE_AUTH_CREDS = _core.SVN_CONFIG_OPTION_STORE_AUTH_CREDS
+SVN_CONFIG_SECTION_HELPERS = _core.SVN_CONFIG_SECTION_HELPERS
+SVN_CONFIG_OPTION_EDITOR_CMD = _core.SVN_CONFIG_OPTION_EDITOR_CMD
+SVN_CONFIG_OPTION_DIFF_CMD = _core.SVN_CONFIG_OPTION_DIFF_CMD
+SVN_CONFIG_OPTION_DIFF3_CMD = _core.SVN_CONFIG_OPTION_DIFF3_CMD
+SVN_CONFIG_OPTION_DIFF3_HAS_PROGRAM_ARG = _core.SVN_CONFIG_OPTION_DIFF3_HAS_PROGRAM_ARG
+SVN_CONFIG_OPTION_MERGE_TOOL_CMD = _core.SVN_CONFIG_OPTION_MERGE_TOOL_CMD
+SVN_CONFIG_SECTION_MISCELLANY = _core.SVN_CONFIG_SECTION_MISCELLANY
+SVN_CONFIG_OPTION_GLOBAL_IGNORES = _core.SVN_CONFIG_OPTION_GLOBAL_IGNORES
+SVN_CONFIG_OPTION_LOG_ENCODING = _core.SVN_CONFIG_OPTION_LOG_ENCODING
+SVN_CONFIG_OPTION_USE_COMMIT_TIMES = _core.SVN_CONFIG_OPTION_USE_COMMIT_TIMES
+SVN_CONFIG_OPTION_TEMPLATE_ROOT = _core.SVN_CONFIG_OPTION_TEMPLATE_ROOT
+SVN_CONFIG_OPTION_ENABLE_AUTO_PROPS = _core.SVN_CONFIG_OPTION_ENABLE_AUTO_PROPS
+SVN_CONFIG_OPTION_NO_UNLOCK = _core.SVN_CONFIG_OPTION_NO_UNLOCK
+SVN_CONFIG_OPTION_MIMETYPES_FILE = _core.SVN_CONFIG_OPTION_MIMETYPES_FILE
+SVN_CONFIG_OPTION_PRESERVED_CF_EXTS = _core.SVN_CONFIG_OPTION_PRESERVED_CF_EXTS
+SVN_CONFIG_OPTION_INTERACTIVE_CONFLICTS = _core.SVN_CONFIG_OPTION_INTERACTIVE_CONFLICTS
+SVN_CONFIG_SECTION_TUNNELS = _core.SVN_CONFIG_SECTION_TUNNELS
+SVN_CONFIG_SECTION_AUTO_PROPS = _core.SVN_CONFIG_SECTION_AUTO_PROPS
+SVN_CONFIG_SECTION_GENERAL = _core.SVN_CONFIG_SECTION_GENERAL
+SVN_CONFIG_OPTION_ANON_ACCESS = _core.SVN_CONFIG_OPTION_ANON_ACCESS
+SVN_CONFIG_OPTION_AUTH_ACCESS = _core.SVN_CONFIG_OPTION_AUTH_ACCESS
+SVN_CONFIG_OPTION_PASSWORD_DB = _core.SVN_CONFIG_OPTION_PASSWORD_DB
+SVN_CONFIG_OPTION_REALM = _core.SVN_CONFIG_OPTION_REALM
+SVN_CONFIG_OPTION_AUTHZ_DB = _core.SVN_CONFIG_OPTION_AUTHZ_DB
+SVN_CONFIG_SECTION_SASL = _core.SVN_CONFIG_SECTION_SASL
+SVN_CONFIG_OPTION_USE_SASL = _core.SVN_CONFIG_OPTION_USE_SASL
+SVN_CONFIG_OPTION_MIN_SSF = _core.SVN_CONFIG_OPTION_MIN_SSF
+SVN_CONFIG_OPTION_MAX_SSF = _core.SVN_CONFIG_OPTION_MAX_SSF
+SVN_CONFIG_SECTION_USERS = _core.SVN_CONFIG_SECTION_USERS
+SVN_CONFIG__DEFAULT_GLOBAL_IGNORES_LINE_1 = _core.SVN_CONFIG__DEFAULT_GLOBAL_IGNORES_LINE_1
+SVN_CONFIG__DEFAULT_GLOBAL_IGNORES_LINE_2 = _core.SVN_CONFIG__DEFAULT_GLOBAL_IGNORES_LINE_2
+SVN_CONFIG_DEFAULT_GLOBAL_IGNORES = _core.SVN_CONFIG_DEFAULT_GLOBAL_IGNORES
+SVN_CONFIG_TRUE = _core.SVN_CONFIG_TRUE
+SVN_CONFIG_FALSE = _core.SVN_CONFIG_FALSE
+
+def svn_config_get_config(*args):
+    """svn_config_get_config(apr_hash_t cfg_hash, char config_dir, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_config_get_config, args)
+
+def svn_config_read(*args):
+    """
+    svn_config_read(svn_config_t cfgp, char file, svn_boolean_t must_exist, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_config_read, args)
+
+def svn_config_merge(*args):
+    """svn_config_merge(svn_config_t cfg, char file, svn_boolean_t must_exist) -> svn_error_t"""
+    return apply(_core.svn_config_merge, args)
+
+def svn_config_get(*args):
+    """
+    svn_config_get(svn_config_t cfg, char valuep, char section, char option, 
+        char default_value)
+    """
+    return apply(_core.svn_config_get, args)
+
+def svn_config_set(*args):
+    """svn_config_set(svn_config_t cfg, char section, char option, char value)"""
+    return apply(_core.svn_config_set, args)
+
+def svn_config_get_bool(*args):
+    """
+    svn_config_get_bool(svn_config_t cfg, svn_boolean_t valuep, char section, 
+        char option, svn_boolean_t default_value) -> svn_error_t
+    """
+    return apply(_core.svn_config_get_bool, args)
+
+def svn_config_set_bool(*args):
+    """svn_config_set_bool(svn_config_t cfg, char section, char option, svn_boolean_t value)"""
+    return apply(_core.svn_config_set_bool, args)
+
+def svn_config_enumerate_sections(*args):
+    """
+    svn_config_enumerate_sections(svn_config_t cfg, svn_config_section_enumerator_t callback, 
+        void baton) -> int
+    """
+    return apply(_core.svn_config_enumerate_sections, args)
+
+def svn_config_enumerate_sections2(*args):
+    """
+    svn_config_enumerate_sections2(svn_config_t cfg, svn_config_section_enumerator2_t callback, 
+        void baton, apr_pool_t pool) -> int
+    """
+    return apply(_core.svn_config_enumerate_sections2, args)
+
+def svn_config_enumerate(*args):
+    """
+    svn_config_enumerate(svn_config_t cfg, char section, svn_config_enumerator_t callback, 
+        void baton) -> int
+    """
+    return apply(_core.svn_config_enumerate, args)
+
+def svn_config_enumerate2(*args):
+    """
+    svn_config_enumerate2(svn_config_t cfg, char section, svn_config_enumerator2_t callback, 
+        void baton, apr_pool_t pool) -> int
+    """
+    return apply(_core.svn_config_enumerate2, args)
+
+def svn_config_has_section(*args):
+    """svn_config_has_section(svn_config_t cfg, char section) -> svn_boolean_t"""
+    return apply(_core.svn_config_has_section, args)
+
+def svn_config_find_group(*args):
+    """svn_config_find_group(svn_config_t cfg, char key, char master_section, apr_pool_t pool) -> char"""
+    return apply(_core.svn_config_find_group, args)
+
+def svn_config_get_server_setting(*args):
+    """
+    svn_config_get_server_setting(svn_config_t cfg, char server_group, char option_name, 
+        char default_value) -> char
+    """
+    return apply(_core.svn_config_get_server_setting, args)
+
+def svn_config_get_server_setting_int(*args):
+    """
+    svn_config_get_server_setting_int(svn_config_t cfg, char server_group, char option_name, 
+        apr_int64_t default_value, apr_int64_t result_value, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_config_get_server_setting_int, args)
+
+def svn_config_ensure(*args):
+    """svn_config_ensure(char config_dir, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_config_ensure, args)
+SVN_CONFIG_REALMSTRING_KEY = _core.SVN_CONFIG_REALMSTRING_KEY
+
+def svn_config_read_auth_data(*args):
+    """
+    svn_config_read_auth_data(apr_hash_t hash, char cred_kind, char realmstring, 
+        char config_dir, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_config_read_auth_data, args)
+
+def svn_config_write_auth_data(*args):
+    """
+    svn_config_write_auth_data(apr_hash_t hash, char cred_kind, char realmstring, 
+        char config_dir, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_config_write_auth_data, args)
+class svn_config_t:
+    """Proxy of C svn_config_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_config_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_config_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_config_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_config_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_config_tPtr(svn_config_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_config_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_config_t, 'thisown', 0)
+        self.__class__ = svn_config_t
+_core.svn_config_t_swigregister(svn_config_tPtr)
+
+
+def svn_config_invoke_section_enumerator(*args):
+    """svn_config_invoke_section_enumerator(svn_config_section_enumerator_t _obj, char name, void baton) -> svn_boolean_t"""
+    return apply(_core.svn_config_invoke_section_enumerator, args)
+
+def svn_config_invoke_section_enumerator2(*args):
+    """
+    svn_config_invoke_section_enumerator2(svn_config_section_enumerator2_t _obj, char name, void baton, 
+        apr_pool_t pool) -> svn_boolean_t
+    """
+    return apply(_core.svn_config_invoke_section_enumerator2, args)
+
+def svn_config_invoke_enumerator(*args):
+    """
+    svn_config_invoke_enumerator(svn_config_enumerator_t _obj, char name, char value, 
+        void baton) -> svn_boolean_t
+    """
+    return apply(_core.svn_config_invoke_enumerator, args)
+
+def svn_config_invoke_enumerator2(*args):
+    """
+    svn_config_invoke_enumerator2(svn_config_enumerator2_t _obj, char name, char value, 
+        void baton, apr_pool_t pool) -> svn_boolean_t
+    """
+    return apply(_core.svn_config_invoke_enumerator2, args)
+class svn_config_section_enumerator_t:
+    """Proxy of C svn_config_section_enumerator_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_config_section_enumerator_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_config_section_enumerator_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_config_section_enumerator_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_config_section_enumerator_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_config_invoke_section_enumerator(self, *args)
+
+
+class svn_config_section_enumerator_tPtr(svn_config_section_enumerator_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_config_section_enumerator_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_config_section_enumerator_t, 'thisown', 0)
+        self.__class__ = svn_config_section_enumerator_t
+_core.svn_config_section_enumerator_t_swigregister(svn_config_section_enumerator_tPtr)
+
+class svn_config_section_enumerator2_t:
+    """Proxy of C svn_config_section_enumerator2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_config_section_enumerator2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_config_section_enumerator2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_config_section_enumerator2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_config_section_enumerator2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_config_invoke_section_enumerator2(self, *args)
+
+
+class svn_config_section_enumerator2_tPtr(svn_config_section_enumerator2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_config_section_enumerator2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_config_section_enumerator2_t, 'thisown', 0)
+        self.__class__ = svn_config_section_enumerator2_t
+_core.svn_config_section_enumerator2_t_swigregister(svn_config_section_enumerator2_tPtr)
+
+class svn_config_enumerator_t:
+    """Proxy of C svn_config_enumerator_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_config_enumerator_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_config_enumerator_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_config_enumerator_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_config_enumerator_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_config_invoke_enumerator(self, *args)
+
+
+class svn_config_enumerator_tPtr(svn_config_enumerator_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_config_enumerator_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_config_enumerator_t, 'thisown', 0)
+        self.__class__ = svn_config_enumerator_t
+_core.svn_config_enumerator_t_swigregister(svn_config_enumerator_tPtr)
+
+class svn_config_enumerator2_t:
+    """Proxy of C svn_config_enumerator2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_config_enumerator2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_config_enumerator2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_config_enumerator2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_config_enumerator2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_config_invoke_enumerator2(self, *args)
+
+
+class svn_config_enumerator2_tPtr(svn_config_enumerator2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_config_enumerator2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_config_enumerator2_t, 'thisown', 0)
+        self.__class__ = svn_config_enumerator2_t
+_core.svn_config_enumerator2_t_swigregister(svn_config_enumerator2_tPtr)
+
+
+def svn_utf_initialize(*args):
+    """svn_utf_initialize(apr_pool_t pool)"""
+    return apply(_core.svn_utf_initialize, args)
+
+def svn_utf_stringbuf_to_utf8(*args):
+    """svn_utf_stringbuf_to_utf8(svn_stringbuf_t dest, svn_stringbuf_t src, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_stringbuf_to_utf8, args)
+
+def svn_utf_string_to_utf8(*args):
+    """svn_utf_string_to_utf8(svn_string_t dest, svn_string_t src, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_string_to_utf8, args)
+
+def svn_utf_cstring_to_utf8(*args):
+    """svn_utf_cstring_to_utf8(char dest, char src, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_cstring_to_utf8, args)
+
+def svn_utf_cstring_to_utf8_ex2(*args):
+    """svn_utf_cstring_to_utf8_ex2(char dest, char src, char frompage, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_cstring_to_utf8_ex2, args)
+
+def svn_utf_cstring_to_utf8_ex(*args):
+    """
+    svn_utf_cstring_to_utf8_ex(char dest, char src, char frompage, char convset_key, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_utf_cstring_to_utf8_ex, args)
+
+def svn_utf_stringbuf_from_utf8(*args):
+    """svn_utf_stringbuf_from_utf8(svn_stringbuf_t dest, svn_stringbuf_t src, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_stringbuf_from_utf8, args)
+
+def svn_utf_string_from_utf8(*args):
+    """svn_utf_string_from_utf8(svn_string_t dest, svn_string_t src, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_string_from_utf8, args)
+
+def svn_utf_cstring_from_utf8(*args):
+    """svn_utf_cstring_from_utf8(char dest, char src, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_cstring_from_utf8, args)
+
+def svn_utf_cstring_from_utf8_ex2(*args):
+    """svn_utf_cstring_from_utf8_ex2(char dest, char src, char topage, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_cstring_from_utf8_ex2, args)
+
+def svn_utf_cstring_from_utf8_ex(*args):
+    """
+    svn_utf_cstring_from_utf8_ex(char dest, char src, char topage, char convset_key, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_utf_cstring_from_utf8_ex, args)
+
+def svn_utf_cstring_from_utf8_fuzzy(*args):
+    """svn_utf_cstring_from_utf8_fuzzy(char src, apr_pool_t pool) -> char"""
+    return apply(_core.svn_utf_cstring_from_utf8_fuzzy, args)
+
+def svn_utf_cstring_from_utf8_stringbuf(*args):
+    """svn_utf_cstring_from_utf8_stringbuf(char dest, svn_stringbuf_t src, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_cstring_from_utf8_stringbuf, args)
+
+def svn_utf_cstring_from_utf8_string(*args):
+    """svn_utf_cstring_from_utf8_string(char dest, svn_string_t src, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_utf_cstring_from_utf8_string, args)
+
+def svn_nls_init(*args):
+    """svn_nls_init() -> svn_error_t"""
+    return apply(_core.svn_nls_init, args)
+
+def svn_path_internal_style(*args):
+    """svn_path_internal_style(char path, apr_pool_t pool) -> char"""
+    return apply(_core.svn_path_internal_style, args)
+
+def svn_path_local_style(*args):
+    """svn_path_local_style(char path, apr_pool_t pool) -> char"""
+    return apply(_core.svn_path_local_style, args)
+
+def svn_path_splitext(*args):
+    """svn_path_splitext(char path_root, char path_ext, char path, apr_pool_t pool)"""
+    return apply(_core.svn_path_splitext, args)
+
+def svn_path_is_empty(*args):
+    """svn_path_is_empty(char path) -> int"""
+    return apply(_core.svn_path_is_empty, args)
+
+def svn_dirent_is_root(*args):
+    """svn_dirent_is_root(char dirent, apr_size_t len) -> svn_boolean_t"""
+    return apply(_core.svn_dirent_is_root, args)
+
+def svn_path_canonicalize(*args):
+    """svn_path_canonicalize(char path, apr_pool_t pool) -> char"""
+    return apply(_core.svn_path_canonicalize, args)
+
+def svn_path_is_canonical(*args):
+    """svn_path_is_canonical(char path, apr_pool_t pool) -> svn_boolean_t"""
+    return apply(_core.svn_path_is_canonical, args)
+
+def svn_path_compare_paths(*args):
+    """svn_path_compare_paths(char path1, char path2) -> int"""
+    return apply(_core.svn_path_compare_paths, args)
+
+def svn_path_get_longest_ancestor(*args):
+    """svn_path_get_longest_ancestor(char path1, char path2, apr_pool_t pool) -> char"""
+    return apply(_core.svn_path_get_longest_ancestor, args)
+
+def svn_path_is_uri_safe(*args):
+    """svn_path_is_uri_safe(char path) -> svn_boolean_t"""
+    return apply(_core.svn_path_is_uri_safe, args)
+SVN_MERGEINFO_NONINHERITABLE_STR = _core.SVN_MERGEINFO_NONINHERITABLE_STR
+
+def svn_mergeinfo_parse(*args):
+    """svn_mergeinfo_parse(svn_mergeinfo_t mergeinfo, char input, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_mergeinfo_parse, args)
+
+def svn_mergeinfo_diff(*args):
+    """
+    svn_mergeinfo_diff(svn_mergeinfo_t deleted, svn_mergeinfo_t added, svn_mergeinfo_t mergefrom, 
+        svn_mergeinfo_t mergeto, 
+        svn_boolean_t consider_inheritance, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_mergeinfo_diff, args)
+
+def svn_mergeinfo_remove(*args):
+    """
+    svn_mergeinfo_remove(svn_mergeinfo_t mergeinfo, svn_mergeinfo_t eraser, 
+        svn_mergeinfo_t whiteboard, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_mergeinfo_remove, args)
+
+def svn_rangelist_diff(*args):
+    """
+    svn_rangelist_diff(apr_array_header_t deleted, apr_array_header_t added, 
+        apr_array_header_t from, apr_array_header_t to, 
+        svn_boolean_t consider_inheritance, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_rangelist_diff, args)
+
+def svn_rangelist_remove(*args):
+    """
+    svn_rangelist_remove(apr_array_header_t output, apr_array_header_t eraser, 
+        apr_array_header_t whiteboard, svn_boolean_t consider_inheritance, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_rangelist_remove, args)
+
+def svn_mergeinfo_intersect(*args):
+    """
+    svn_mergeinfo_intersect(svn_mergeinfo_t mergeinfo, svn_mergeinfo_t mergeinfo1, 
+        svn_mergeinfo_t mergeinfo2, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_mergeinfo_intersect, args)
+
+def svn_rangelist_intersect(*args):
+    """
+    svn_rangelist_intersect(apr_array_header_t rangelist, apr_array_header_t rangelist1, 
+        apr_array_header_t rangelist2, svn_boolean_t consider_inheritance, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_rangelist_intersect, args)
+
+def svn_rangelist_to_string(*args):
+    """
+    svn_rangelist_to_string(svn_string_t output, apr_array_header_t rangelist, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_rangelist_to_string, args)
+
+def svn_rangelist_inheritable(*args):
+    """
+    svn_rangelist_inheritable(apr_array_header_t inheritable_rangelist, apr_array_header_t rangelist, 
+        svn_revnum_t start, svn_revnum_t end, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_rangelist_inheritable, args)
+
+def svn_mergeinfo_inheritable(*args):
+    """
+    svn_mergeinfo_inheritable(svn_mergeinfo_t inheritable_mergeinfo, svn_mergeinfo_t mergeinfo, 
+        char path, svn_revnum_t start, 
+        svn_revnum_t end, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_mergeinfo_inheritable, args)
+
+def svn_mergeinfo_to_string(*args):
+    """svn_mergeinfo_to_string(svn_string_t output, svn_mergeinfo_t mergeinput, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_mergeinfo_to_string, args)
+
+def svn_mergeinfo_dup(*args):
+    """svn_mergeinfo_dup(svn_mergeinfo_t mergeinfo, apr_pool_t pool) -> svn_mergeinfo_t"""
+    return apply(_core.svn_mergeinfo_dup, args)
+
+def svn_rangelist_dup(*args):
+    """svn_rangelist_dup(apr_array_header_t rangelist, apr_pool_t pool) -> apr_array_header_t"""
+    return apply(_core.svn_rangelist_dup, args)
+svn_mergeinfo_explicit = _core.svn_mergeinfo_explicit
+svn_mergeinfo_inherited = _core.svn_mergeinfo_inherited
+svn_mergeinfo_nearest_ancestor = _core.svn_mergeinfo_nearest_ancestor
+
+def svn_inheritance_to_word(*args):
+    """svn_inheritance_to_word(svn_mergeinfo_inheritance_t inherit) -> char"""
+    return apply(_core.svn_inheritance_to_word, args)
+
+def svn_inheritance_from_word(*args):
+    """svn_inheritance_from_word(char word) -> int"""
+    return apply(_core.svn_inheritance_from_word, args)
+svn_io_file_del_none = _core.svn_io_file_del_none
+svn_io_file_del_on_close = _core.svn_io_file_del_on_close
+svn_io_file_del_on_pool_cleanup = _core.svn_io_file_del_on_pool_cleanup
+class svn_io_dirent_t:
+    """Proxy of C svn_io_dirent_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_io_dirent_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_io_dirent_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_io_dirent_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["kind"] = _core.svn_io_dirent_t_kind_set
+    __swig_getmethods__["kind"] = _core.svn_io_dirent_t_kind_get
+    __swig_setmethods__["special"] = _core.svn_io_dirent_t_special_set
+    __swig_getmethods__["special"] = _core.svn_io_dirent_t_special_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_io_dirent_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_io_dirent_t"""
+        _swig_setattr(self, svn_io_dirent_t, 'this', apply(_core.new_svn_io_dirent_t, args))
+        _swig_setattr(self, svn_io_dirent_t, 'thisown', 1)
+    def __del__(self, destroy=_core.delete_svn_io_dirent_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_io_dirent_tPtr(svn_io_dirent_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_io_dirent_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_io_dirent_t, 'thisown', 0)
+        self.__class__ = svn_io_dirent_t
+_core.svn_io_dirent_t_swigregister(svn_io_dirent_tPtr)
+
+
+def svn_io_open_unique_file2(*args):
+    """
+    svn_io_open_unique_file2(apr_file_t f, char unique_name_p, char path, char suffix, 
+        svn_io_file_del_t delete_when, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_io_open_unique_file2, args)
+
+def svn_io_open_unique_file(*args):
+    """
+    svn_io_open_unique_file(apr_file_t f, char unique_name_p, char path, char suffix, 
+        svn_boolean_t delete_on_close, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_io_open_unique_file, args)
+
+def svn_io_file_checksum(*args):
+    """svn_io_file_checksum(unsigned char digest, char file, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_io_file_checksum, args)
+
+def svn_io_files_contents_same_p(*args):
+    """svn_io_files_contents_same_p(svn_boolean_t same, char file1, char file2, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_io_files_contents_same_p, args)
+
+def svn_stream_empty(*args):
+    """svn_stream_empty(apr_pool_t pool) -> svn_stream_t"""
+    return apply(_core.svn_stream_empty, args)
+
+def svn_stream_disown(*args):
+    """svn_stream_disown(svn_stream_t stream, apr_pool_t pool) -> svn_stream_t"""
+    return apply(_core.svn_stream_disown, args)
+
+def svn_stream_from_aprfile2(*args):
+    """svn_stream_from_aprfile2(apr_file_t file, svn_boolean_t disown, apr_pool_t pool) -> svn_stream_t"""
+    return apply(_core.svn_stream_from_aprfile2, args)
+
+def svn_stream_from_aprfile(*args):
+    """svn_stream_from_aprfile(apr_file_t file, apr_pool_t pool) -> svn_stream_t"""
+    return apply(_core.svn_stream_from_aprfile, args)
+
+def svn_stream_for_stdout(*args):
+    """svn_stream_for_stdout(svn_stream_t out, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_stream_for_stdout, args)
+
+def svn_stream_from_stringbuf(*args):
+    """svn_stream_from_stringbuf(svn_stringbuf_t str, apr_pool_t pool) -> svn_stream_t"""
+    return apply(_core.svn_stream_from_stringbuf, args)
+
+def svn_stream_compressed(*args):
+    """svn_stream_compressed(svn_stream_t stream, apr_pool_t pool) -> svn_stream_t"""
+    return apply(_core.svn_stream_compressed, args)
+
+def svn_stream_read(*args):
+    """svn_stream_read(svn_stream_t stream, char buffer) -> svn_error_t"""
+    return apply(_core.svn_stream_read, args)
+
+def svn_stream_write(*args):
+    """svn_stream_write(svn_stream_t stream, char data) -> svn_error_t"""
+    return apply(_core.svn_stream_write, args)
+
+def svn_stream_close(*args):
+    """svn_stream_close(svn_stream_t stream) -> svn_error_t"""
+    return apply(_core.svn_stream_close, args)
+
+def svn_stream_readline(*args):
+    """
+    svn_stream_readline(svn_stream_t stream, svn_stringbuf_t stringbuf, char eol, 
+        svn_boolean_t eof, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_stream_readline, args)
+
+def svn_stream_copy2(*args):
+    """
+    svn_stream_copy2(svn_stream_t from, svn_stream_t to, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_stream_copy2, args)
+
+def svn_stream_copy(*args):
+    """svn_stream_copy(svn_stream_t from, svn_stream_t to, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_stream_copy, args)
+
+def svn_stream_contents_same(*args):
+    """
+    svn_stream_contents_same(svn_boolean_t same, svn_stream_t stream1, svn_stream_t stream2, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_stream_contents_same, args)
+
+def svn_stringbuf_from_file2(*args):
+    """svn_stringbuf_from_file2(svn_stringbuf_t result, char filename, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_stringbuf_from_file2, args)
+
+def svn_stringbuf_from_file(*args):
+    """svn_stringbuf_from_file(svn_stringbuf_t result, char filename, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_stringbuf_from_file, args)
+
+def svn_stringbuf_from_aprfile(*args):
+    """svn_stringbuf_from_aprfile(svn_stringbuf_t result, apr_file_t file, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_stringbuf_from_aprfile, args)
+
+def svn_io_remove_file(*args):
+    """svn_io_remove_file(char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_io_remove_file, args)
+
+def svn_io_remove_dir2(*args):
+    """
+    svn_io_remove_dir2(char path, svn_boolean_t ignore_enoent, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_io_remove_dir2, args)
+
+def svn_io_remove_dir(*args):
+    """svn_io_remove_dir(char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_io_remove_dir, args)
+
+def svn_io_parse_mimetypes_file(*args):
+    """svn_io_parse_mimetypes_file(apr_hash_t type_map, char mimetypes_file, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_io_parse_mimetypes_file, args)
+
+def svn_io_detect_mimetype2(*args):
+    """
+    svn_io_detect_mimetype2(char mimetype, char file, apr_hash_t mimetype_map, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_io_detect_mimetype2, args)
+
+def svn_io_detect_mimetype(*args):
+    """svn_io_detect_mimetype(char mimetype, char file, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_io_detect_mimetype, args)
+class svn_stream_t:
+    """Proxy of C svn_stream_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_stream_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_stream_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_stream_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_stream_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_stream_tPtr(svn_stream_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_stream_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_stream_t, 'thisown', 0)
+        self.__class__ = svn_stream_t
+_core.svn_stream_t_swigregister(svn_stream_tPtr)
+
+
+def svn_read_invoke_fn(*args):
+    """svn_read_invoke_fn(svn_read_fn_t _obj, void baton, char buffer) -> svn_error_t"""
+    return apply(_core.svn_read_invoke_fn, args)
+
+def svn_write_invoke_fn(*args):
+    """svn_write_invoke_fn(svn_write_fn_t _obj, void baton, char data) -> svn_error_t"""
+    return apply(_core.svn_write_invoke_fn, args)
+
+def svn_close_invoke_fn(*args):
+    """svn_close_invoke_fn(svn_close_fn_t _obj, void baton) -> svn_error_t"""
+    return apply(_core.svn_close_invoke_fn, args)
+
+def svn_io_invoke_walk_func(*args):
+    """
+    svn_io_invoke_walk_func(svn_io_walk_func_t _obj, void baton, char path, apr_finfo_t finfo, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_io_invoke_walk_func, args)
+class svn_read_fn_t:
+    """Proxy of C svn_read_fn_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_read_fn_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_read_fn_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_read_fn_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_read_fn_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_read_invoke_fn(self, *args)
+
+
+class svn_read_fn_tPtr(svn_read_fn_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_read_fn_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_read_fn_t, 'thisown', 0)
+        self.__class__ = svn_read_fn_t
+_core.svn_read_fn_t_swigregister(svn_read_fn_tPtr)
+
+class svn_write_fn_t:
+    """Proxy of C svn_write_fn_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_write_fn_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_write_fn_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_write_fn_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_write_fn_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_write_invoke_fn(self, *args)
+
+
+class svn_write_fn_tPtr(svn_write_fn_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_write_fn_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_write_fn_t, 'thisown', 0)
+        self.__class__ = svn_write_fn_t
+_core.svn_write_fn_t_swigregister(svn_write_fn_tPtr)
+
+class svn_close_fn_t:
+    """Proxy of C svn_close_fn_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_close_fn_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_close_fn_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_close_fn_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_close_fn_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_close_invoke_fn(self, *args)
+
+
+class svn_close_fn_tPtr(svn_close_fn_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_close_fn_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_close_fn_t, 'thisown', 0)
+        self.__class__ = svn_close_fn_t
+_core.svn_close_fn_t_swigregister(svn_close_fn_tPtr)
+
+class svn_io_walk_func_t:
+    """Proxy of C svn_io_walk_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_io_walk_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_io_walk_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_io_walk_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_io_walk_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_io_invoke_walk_func(self, *args)
+
+
+class svn_io_walk_func_tPtr(svn_io_walk_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_io_walk_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_io_walk_func_t, 'thisown', 0)
+        self.__class__ = svn_io_walk_func_t
+_core.svn_io_walk_func_t_swigregister(svn_io_walk_func_tPtr)
+
+
+def svn_swig_py_set_application_pool(*args):
+    """svn_swig_py_set_application_pool(PyObject py_pool, apr_pool_t pool)"""
+    return apply(_core.svn_swig_py_set_application_pool, args)
+
+def svn_swig_py_clear_application_pool(*args):
+    """svn_swig_py_clear_application_pool()"""
+    return apply(_core.svn_swig_py_clear_application_pool, args)
+class apr_array_header_t:
+    """Proxy of C apr_array_header_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, apr_array_header_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, apr_array_header_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C apr_array_header_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for apr_array_header_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class apr_array_header_tPtr(apr_array_header_t):
+    def __init__(self, this):
+        _swig_setattr(self, apr_array_header_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, apr_array_header_t, 'thisown', 0)
+        self.__class__ = apr_array_header_t
+_core.apr_array_header_t_swigregister(apr_array_header_tPtr)
+
+class apr_file_t:
+    """Proxy of C apr_file_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, apr_file_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, apr_file_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C apr_file_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for apr_file_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class apr_file_tPtr(apr_file_t):
+    def __init__(self, this):
+        _swig_setattr(self, apr_file_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, apr_file_t, 'thisown', 0)
+        self.__class__ = apr_file_t
+_core.apr_file_t_swigregister(apr_file_tPtr)
+
+class apr_hash_t:
+    """Proxy of C apr_hash_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, apr_hash_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, apr_hash_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C apr_hash_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for apr_hash_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class apr_hash_tPtr(apr_hash_t):
+    def __init__(self, this):
+        _swig_setattr(self, apr_hash_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, apr_hash_t, 'thisown', 0)
+        self.__class__ = apr_hash_t
+_core.apr_hash_t_swigregister(apr_hash_tPtr)
+
+import threading
+
+application_pool = None
+application_pool_lock = threading.Lock()
+class GenericSWIGWrapper:
+  def __init__(self, this, pool):
+    """Create new Generic SWIG wrapper object"""
+    import weakref
+    self.this = this
+    self._parent_pool = pool
+    self._is_valid = weakref.ref(pool._is_valid)
+
+  def set_parent_pool(self, pool):
+    """Set the parent pool of this object"""
+    self._parent_pool = pool
+
+  def valid(self):
+    """Is this object valid?"""
+    return self._is_valid()
+
+  def assert_valid(self):
+    """Assert that this object is still valid"""
+    assert self.valid(), "This object has already been destroyed"
+
+  def _unwrap(self):
+    """Return underlying SWIG object"""
+    self.assert_valid()
+    return self.this
+
+def _mark_weakpool_invalid(weakpool):
+  if weakpool and weakpool() and hasattr(weakpool(), "_is_valid"):
+    del weakpool()._is_valid
+
+
+class apr_pool_t:
+    """Proxy of C apr_pool_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, apr_pool_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, apr_pool_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C apr_pool_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new memory pool"""
+      global application_pool
+
+      try:
+        application_pool_lock.acquire()
+
+        self._parent_pool = parent_pool or application_pool
+        self._mark_valid()
+
+        # Protect important functions from GC
+        self._apr_pool_destroy = _core.apr_pool_destroy
+        self._svn_swig_py_clear_application_pool = \
+          _core.svn_swig_py_clear_application_pool
+
+        # If we are an application-level pool,
+        # then set this pool to be the application-level pool
+        if not self._parent_pool:
+          svn_swig_py_set_application_pool(self, self)
+          application_pool = self
+      finally:
+        application_pool_lock.release()
+
+    def valid(self):
+      """Check whether this memory pool and its parents
+      are still valid"""
+      return hasattr(self,"_is_valid")
+
+    def assert_valid(self):
+      """Assert that this memory_pool is still valid."""
+      assert self.valid(), "This pool has already been destroyed"
+
+    def clear(self):
+      """Clear embedded memory pool. Invalidate all subpools."""
+      pool = self._parent_pool
+      apr_pool_clear(self)
+      self.set_parent_pool(pool)
+
+    def destroy(self):
+      """Destroy embedded memory pool. If you do not destroy
+      the memory pool manually, Python will destroy it
+      automatically."""
+      global application_pool
+
+      self.assert_valid()
+
+      is_application_pool = not self._parent_pool
+
+      # Destroy pool
+      self._apr_pool_destroy(self)
+
+      # Clear application pool if necessary
+      if is_application_pool:
+        application_pool = None
+        self._svn_swig_py_clear_application_pool()
+
+      # Mark self as invalid
+      if hasattr(self, "_parent_pool"):
+        del self._parent_pool
+      if hasattr(self, "_is_valid"):
+        del self._is_valid
+
+    def __del__(self):
+      """Automatically destroy memory pools, if necessary"""
+      if self.valid():
+        self.destroy()
+
+    def _mark_valid(self):
+      """Mark pool as valid"""
+
+      self._weakparent = None
+
+      if self._parent_pool:
+        import weakref
+
+        # Make sure that the parent object is valid
+        self._parent_pool.assert_valid()
+
+        # Refer to self using a weakrefrence so that we don't
+        # create a reference cycle
+        weakself = weakref.ref(self)
+
+        # Set up callbacks to mark pool as invalid when parents
+        # are destroyed
+        self._weakparent = weakref.ref(self._parent_pool._is_valid,
+          lambda x: _mark_weakpool_invalid(weakself))
+
+      # Mark pool as valid
+      self._is_valid = lambda: 1
+
+    def _wrap(self, obj):
+      """Mark a SWIG object as owned by this pool"""
+      self.assert_valid()
+      if hasattr(obj, "set_parent_pool"):
+        obj.set_parent_pool(self)
+        return obj
+      elif obj is None:
+        return None
+      else:
+        return GenericSWIGWrapper(obj, self)
+
+
+
+class apr_pool_tPtr(apr_pool_t):
+    def __init__(self, this):
+        _swig_setattr(self, apr_pool_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, apr_pool_t, 'thisown', 0)
+        self.__class__ = apr_pool_t
+_core.apr_pool_t_swigregister(apr_pool_tPtr)
+
+# Initialize a global pool
+svn_pool_create()
+
+
+def svn_swig_mergeinfo_merge(*args):
+    """svn_swig_mergeinfo_merge(apr_hash_t mergeinfo_inout, apr_hash_t changes, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_swig_mergeinfo_merge, args)
+
+def svn_swig_mergeinfo_sort(*args):
+    """svn_swig_mergeinfo_sort(apr_hash_t mergeinfo_inout, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_swig_mergeinfo_sort, args)
+
+def svn_swig_rangelist_merge(*args):
+    """
+    svn_swig_rangelist_merge(apr_array_header_t rangelist_inout, apr_array_header_t changes, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_core.svn_swig_rangelist_merge, args)
+
+def svn_swig_rangelist_reverse(*args):
+    """svn_swig_rangelist_reverse(apr_array_header_t rangelist_inout, apr_pool_t pool) -> svn_error_t"""
+    return apply(_core.svn_swig_rangelist_reverse, args)
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/delta.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/delta.py
new file mode 100644
index 0000000..8f97ef5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/delta.py
@@ -0,0 +1,1158 @@
+# This file was created automatically by SWIG 1.3.27.
+# Don't modify this file, modify the SWIG interface instead.
+import core
+
+import _delta
+
+# This file is compatible with both classic and new-style classes.
+def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
+    if (name == "this"):
+        if isinstance(value, class_type):
+            self.__dict__[name] = value.this
+            if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
+            del value.thisown
+            return
+    method = class_type.__swig_setmethods__.get(name,None)
+    if method: return method(self,value)
+    if (not static) or hasattr(self,name) or (name == "thisown"):
+        self.__dict__[name] = value
+    else:
+        raise AttributeError("You cannot add attributes to %s" % self)
+
+def _swig_setattr(self,class_type,name,value):
+    return _swig_setattr_nondynamic(self,class_type,name,value,0)
+
+def _swig_getattr(self,class_type,name):
+    method = class_type.__swig_getmethods__.get(name,None)
+    if method: return method(self)
+    raise AttributeError,name
+
+
+def svn_swig_py_make_editor(*args):
+    """
+    svn_swig_py_make_editor(svn_delta_editor_t editor, void edit_baton, PyObject py_editor, 
+        apr_pool_t pool)
+    """
+    return apply(_delta.svn_swig_py_make_editor, args)
+
+def svn_delta_version(*args):
+    """svn_delta_version() -> svn_version_t"""
+    return apply(_delta.svn_delta_version, args)
+svn_txdelta_source = _delta.svn_txdelta_source
+svn_txdelta_target = _delta.svn_txdelta_target
+svn_txdelta_new = _delta.svn_txdelta_new
+class svn_txdelta_op_t:
+    """Proxy of C svn_txdelta_op_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_txdelta_op_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_txdelta_op_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_txdelta_op_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["action_code"] = _delta.svn_txdelta_op_t_action_code_set
+    __swig_getmethods__["action_code"] = _delta.svn_txdelta_op_t_action_code_get
+    __swig_setmethods__["offset"] = _delta.svn_txdelta_op_t_offset_set
+    __swig_getmethods__["offset"] = _delta.svn_txdelta_op_t_offset_get
+    __swig_setmethods__["length"] = _delta.svn_txdelta_op_t_length_set
+    __swig_getmethods__["length"] = _delta.svn_txdelta_op_t_length_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_txdelta_op_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_txdelta_op_t"""
+        _swig_setattr(self, svn_txdelta_op_t, 'this', apply(_delta.new_svn_txdelta_op_t, args))
+        _swig_setattr(self, svn_txdelta_op_t, 'thisown', 1)
+    def __del__(self, destroy=_delta.delete_svn_txdelta_op_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_txdelta_op_tPtr(svn_txdelta_op_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_txdelta_op_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_txdelta_op_t, 'thisown', 0)
+        self.__class__ = svn_txdelta_op_t
+_delta.svn_txdelta_op_t_swigregister(svn_txdelta_op_tPtr)
+
+class svn_txdelta_window_t:
+    """Proxy of C svn_txdelta_window_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_txdelta_window_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_txdelta_window_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_txdelta_window_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["sview_offset"] = _delta.svn_txdelta_window_t_sview_offset_set
+    __swig_getmethods__["sview_offset"] = _delta.svn_txdelta_window_t_sview_offset_get
+    __swig_setmethods__["sview_len"] = _delta.svn_txdelta_window_t_sview_len_set
+    __swig_getmethods__["sview_len"] = _delta.svn_txdelta_window_t_sview_len_get
+    __swig_setmethods__["tview_len"] = _delta.svn_txdelta_window_t_tview_len_set
+    __swig_getmethods__["tview_len"] = _delta.svn_txdelta_window_t_tview_len_get
+    __swig_setmethods__["num_ops"] = _delta.svn_txdelta_window_t_num_ops_set
+    __swig_getmethods__["num_ops"] = _delta.svn_txdelta_window_t_num_ops_get
+    __swig_setmethods__["src_ops"] = _delta.svn_txdelta_window_t_src_ops_set
+    __swig_getmethods__["src_ops"] = _delta.svn_txdelta_window_t_src_ops_get
+    __swig_setmethods__["ops"] = _delta.svn_txdelta_window_t_ops_set
+    __swig_getmethods__["ops"] = _delta.svn_txdelta_window_t_ops_get
+    __swig_getmethods__["new_data"] = _delta.svn_txdelta_window_t_new_data_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_txdelta_window_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_txdelta_window_t"""
+        _swig_setattr(self, svn_txdelta_window_t, 'this', apply(_delta.new_svn_txdelta_window_t, args))
+        _swig_setattr(self, svn_txdelta_window_t, 'thisown', 1)
+    def __del__(self, destroy=_delta.delete_svn_txdelta_window_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_txdelta_window_tPtr(svn_txdelta_window_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_txdelta_window_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_txdelta_window_t, 'thisown', 0)
+        self.__class__ = svn_txdelta_window_t
+_delta.svn_txdelta_window_t_swigregister(svn_txdelta_window_tPtr)
+
+
+def svn_txdelta_window_dup(*args):
+    """svn_txdelta_window_dup(svn_txdelta_window_t window, apr_pool_t pool) -> svn_txdelta_window_t"""
+    return apply(_delta.svn_txdelta_window_dup, args)
+
+def svn_txdelta_compose_windows(*args):
+    """
+    svn_txdelta_compose_windows(svn_txdelta_window_t window_A, svn_txdelta_window_t window_B, 
+        apr_pool_t pool) -> svn_txdelta_window_t
+    """
+    return apply(_delta.svn_txdelta_compose_windows, args)
+
+def svn_txdelta_apply_instructions(*args):
+    """
+    svn_txdelta_apply_instructions(svn_txdelta_window_t window, char sbuf, char tbuf, 
+        apr_size_t tlen)
+    """
+    return apply(_delta.svn_txdelta_apply_instructions, args)
+
+def svn_txdelta_stream_create(*args):
+    """
+    svn_txdelta_stream_create(void baton, svn_txdelta_next_window_fn_t next_window, 
+        svn_txdelta_md5_digest_fn_t md5_digest, apr_pool_t pool) -> svn_txdelta_stream_t
+    """
+    return apply(_delta.svn_txdelta_stream_create, args)
+
+def svn_txdelta_next_window(*args):
+    """
+    svn_txdelta_next_window(svn_txdelta_window_t window, svn_txdelta_stream_t stream, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_txdelta_next_window, args)
+
+def svn_txdelta_md5_digest(*args):
+    """svn_txdelta_md5_digest(svn_txdelta_stream_t stream) -> unsigned char"""
+    return apply(_delta.svn_txdelta_md5_digest, args)
+
+def svn_txdelta(*args):
+    """
+    svn_txdelta(svn_txdelta_stream_t stream, svn_stream_t source, svn_stream_t target, 
+        apr_pool_t pool)
+    """
+    return apply(_delta.svn_txdelta, args)
+
+def svn_txdelta_target_push(*args):
+    """
+    svn_txdelta_target_push(svn_txdelta_window_handler_t handler, void handler_baton, 
+        svn_stream_t source, apr_pool_t pool) -> svn_stream_t
+    """
+    return apply(_delta.svn_txdelta_target_push, args)
+
+def svn_txdelta_send_string(*args):
+    """
+    svn_txdelta_send_string(svn_string_t string, svn_txdelta_window_handler_t handler, 
+        void handler_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_txdelta_send_string, args)
+
+def svn_txdelta_send_stream(*args):
+    """
+    svn_txdelta_send_stream(svn_stream_t stream, svn_txdelta_window_handler_t handler, 
+        void handler_baton, unsigned char digest, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_txdelta_send_stream, args)
+
+def svn_txdelta_send_txstream(*args):
+    """
+    svn_txdelta_send_txstream(svn_txdelta_stream_t txstream, svn_txdelta_window_handler_t handler, 
+        void handler_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_txdelta_send_txstream, args)
+
+def svn_txdelta_apply(*args):
+    """
+    svn_txdelta_apply(svn_stream_t source, svn_stream_t target, unsigned char result_digest, 
+        char error_info, apr_pool_t pool, 
+        svn_txdelta_window_handler_t handler, 
+        void handler_baton)
+    """
+    return apply(_delta.svn_txdelta_apply, args)
+
+def svn_txdelta_to_svndiff2(*args):
+    """
+    svn_txdelta_to_svndiff2(svn_txdelta_window_handler_t handler, void handler_baton, 
+        svn_stream_t output, int svndiff_version, 
+        apr_pool_t pool)
+    """
+    return apply(_delta.svn_txdelta_to_svndiff2, args)
+
+def svn_txdelta_to_svndiff(*args):
+    """
+    svn_txdelta_to_svndiff(svn_stream_t output, apr_pool_t pool, svn_txdelta_window_handler_t handler, 
+        void handler_baton)
+    """
+    return apply(_delta.svn_txdelta_to_svndiff, args)
+
+def svn_txdelta_parse_svndiff(*args):
+    """
+    svn_txdelta_parse_svndiff(svn_txdelta_window_handler_t handler, void handler_baton, 
+        svn_boolean_t error_on_early_close, apr_pool_t pool) -> svn_stream_t
+    """
+    return apply(_delta.svn_txdelta_parse_svndiff, args)
+
+def svn_txdelta_read_svndiff_window(*args):
+    """
+    svn_txdelta_read_svndiff_window(svn_txdelta_window_t window, svn_stream_t stream, int svndiff_version, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_txdelta_read_svndiff_window, args)
+
+def svn_txdelta_skip_svndiff_window(*args):
+    """svn_txdelta_skip_svndiff_window(apr_file_t file, int svndiff_version, apr_pool_t pool) -> svn_error_t"""
+    return apply(_delta.svn_txdelta_skip_svndiff_window, args)
+class svn_delta_editor_t:
+    """Proxy of C svn_delta_editor_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_delta_editor_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_delta_editor_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_delta_editor_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["set_target_revision"] = _delta.svn_delta_editor_t_set_target_revision_set
+    __swig_getmethods__["set_target_revision"] = _delta.svn_delta_editor_t_set_target_revision_get
+    __swig_setmethods__["open_root"] = _delta.svn_delta_editor_t_open_root_set
+    __swig_getmethods__["open_root"] = _delta.svn_delta_editor_t_open_root_get
+    __swig_setmethods__["delete_entry"] = _delta.svn_delta_editor_t_delete_entry_set
+    __swig_getmethods__["delete_entry"] = _delta.svn_delta_editor_t_delete_entry_get
+    __swig_setmethods__["add_directory"] = _delta.svn_delta_editor_t_add_directory_set
+    __swig_getmethods__["add_directory"] = _delta.svn_delta_editor_t_add_directory_get
+    __swig_setmethods__["open_directory"] = _delta.svn_delta_editor_t_open_directory_set
+    __swig_getmethods__["open_directory"] = _delta.svn_delta_editor_t_open_directory_get
+    __swig_setmethods__["change_dir_prop"] = _delta.svn_delta_editor_t_change_dir_prop_set
+    __swig_getmethods__["change_dir_prop"] = _delta.svn_delta_editor_t_change_dir_prop_get
+    __swig_setmethods__["close_directory"] = _delta.svn_delta_editor_t_close_directory_set
+    __swig_getmethods__["close_directory"] = _delta.svn_delta_editor_t_close_directory_get
+    __swig_setmethods__["absent_directory"] = _delta.svn_delta_editor_t_absent_directory_set
+    __swig_getmethods__["absent_directory"] = _delta.svn_delta_editor_t_absent_directory_get
+    __swig_setmethods__["add_file"] = _delta.svn_delta_editor_t_add_file_set
+    __swig_getmethods__["add_file"] = _delta.svn_delta_editor_t_add_file_get
+    __swig_setmethods__["open_file"] = _delta.svn_delta_editor_t_open_file_set
+    __swig_getmethods__["open_file"] = _delta.svn_delta_editor_t_open_file_get
+    __swig_setmethods__["apply_textdelta"] = _delta.svn_delta_editor_t_apply_textdelta_set
+    __swig_getmethods__["apply_textdelta"] = _delta.svn_delta_editor_t_apply_textdelta_get
+    __swig_setmethods__["change_file_prop"] = _delta.svn_delta_editor_t_change_file_prop_set
+    __swig_getmethods__["change_file_prop"] = _delta.svn_delta_editor_t_change_file_prop_get
+    __swig_setmethods__["close_file"] = _delta.svn_delta_editor_t_close_file_set
+    __swig_getmethods__["close_file"] = _delta.svn_delta_editor_t_close_file_get
+    __swig_setmethods__["absent_file"] = _delta.svn_delta_editor_t_absent_file_set
+    __swig_getmethods__["absent_file"] = _delta.svn_delta_editor_t_absent_file_get
+    __swig_setmethods__["close_edit"] = _delta.svn_delta_editor_t_close_edit_set
+    __swig_getmethods__["close_edit"] = _delta.svn_delta_editor_t_close_edit_get
+    __swig_setmethods__["abort_edit"] = _delta.svn_delta_editor_t_abort_edit_set
+    __swig_getmethods__["abort_edit"] = _delta.svn_delta_editor_t_abort_edit_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_delta_editor_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def set_target_revision(self, *args):
+      return svn_delta_editor_invoke_set_target_revision(self, *args)
+
+    def open_root(self, *args):
+      return svn_delta_editor_invoke_open_root(self, *args)
+
+    def delete_entry(self, *args):
+      return svn_delta_editor_invoke_delete_entry(self, *args)
+
+    def add_directory(self, *args):
+      return svn_delta_editor_invoke_add_directory(self, *args)
+
+    def open_directory(self, *args):
+      return svn_delta_editor_invoke_open_directory(self, *args)
+
+    def change_dir_prop(self, *args):
+      return svn_delta_editor_invoke_change_dir_prop(self, *args)
+
+    def close_directory(self, *args):
+      return svn_delta_editor_invoke_close_directory(self, *args)
+
+    def absent_directory(self, *args):
+      return svn_delta_editor_invoke_absent_directory(self, *args)
+
+    def add_file(self, *args):
+      return svn_delta_editor_invoke_add_file(self, *args)
+
+    def open_file(self, *args):
+      return svn_delta_editor_invoke_open_file(self, *args)
+
+    def apply_textdelta(self, *args):
+      return svn_delta_editor_invoke_apply_textdelta(self, *args)
+
+    def change_file_prop(self, *args):
+      return svn_delta_editor_invoke_change_file_prop(self, *args)
+
+    def close_file(self, *args):
+      return svn_delta_editor_invoke_close_file(self, *args)
+
+    def absent_file(self, *args):
+      return svn_delta_editor_invoke_absent_file(self, *args)
+
+    def close_edit(self, *args):
+      return svn_delta_editor_invoke_close_edit(self, *args)
+
+    def abort_edit(self, *args):
+      return svn_delta_editor_invoke_abort_edit(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_delta_editor_t"""
+        _swig_setattr(self, svn_delta_editor_t, 'this', apply(_delta.new_svn_delta_editor_t, args))
+        _swig_setattr(self, svn_delta_editor_t, 'thisown', 1)
+    def __del__(self, destroy=_delta.delete_svn_delta_editor_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_delta_editor_tPtr(svn_delta_editor_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_delta_editor_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_delta_editor_t, 'thisown', 0)
+        self.__class__ = svn_delta_editor_t
+_delta.svn_delta_editor_t_swigregister(svn_delta_editor_tPtr)
+
+
+def svn_delta_default_editor(*args):
+    """svn_delta_default_editor(apr_pool_t pool) -> svn_delta_editor_t"""
+    return apply(_delta.svn_delta_default_editor, args)
+
+def svn_delta_noop_window_handler(*args):
+    """svn_delta_noop_window_handler(svn_txdelta_window_t window, void baton) -> svn_error_t"""
+    return apply(_delta.svn_delta_noop_window_handler, args)
+
+def svn_delta_get_cancellation_editor(*args):
+    """
+    svn_delta_get_cancellation_editor(svn_cancel_func_t cancel_func, svn_delta_editor_t wrapped_editor, 
+        void wrapped_baton, svn_delta_editor_t editor, 
+        void edit_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_get_cancellation_editor, args)
+
+def svn_delta_depth_filter_editor(*args):
+    """
+    svn_delta_depth_filter_editor(svn_delta_editor_t editor, void edit_baton, svn_delta_editor_t wrapped_editor, 
+        void wrapped_edit_baton, 
+        svn_depth_t requested_depth, svn_boolean_t has_target, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_depth_filter_editor, args)
+
+def svn_delta_path_driver(*args):
+    """
+    svn_delta_path_driver(svn_delta_editor_t editor, void edit_baton, svn_revnum_t revision, 
+        apr_array_header_t paths, svn_delta_path_driver_cb_func_t callback_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_path_driver, args)
+
+def svn_compat_wrap_file_rev_handler(*args):
+    """
+    svn_compat_wrap_file_rev_handler(svn_file_rev_handler_t handler2, void handler2_baton, 
+        svn_file_rev_handler_old_t handler, void handler_baton, 
+        apr_pool_t pool)
+    """
+    return apply(_delta.svn_compat_wrap_file_rev_handler, args)
+class svn_txdelta_stream_t:
+    """Proxy of C svn_txdelta_stream_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_txdelta_stream_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_txdelta_stream_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_txdelta_stream_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_txdelta_stream_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_txdelta_stream_tPtr(svn_txdelta_stream_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_txdelta_stream_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_txdelta_stream_t, 'thisown', 0)
+        self.__class__ = svn_txdelta_stream_t
+_delta.svn_txdelta_stream_t_swigregister(svn_txdelta_stream_tPtr)
+
+
+def svn_delta_editor_invoke_set_target_revision(*args):
+    """
+    svn_delta_editor_invoke_set_target_revision(svn_delta_editor_t _obj, void edit_baton, svn_revnum_t target_revision, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_set_target_revision, args)
+
+def svn_delta_editor_invoke_open_root(*args):
+    """
+    svn_delta_editor_invoke_open_root(svn_delta_editor_t _obj, void edit_baton, svn_revnum_t base_revision, 
+        apr_pool_t dir_pool, void root_baton) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_open_root, args)
+
+def svn_delta_editor_invoke_delete_entry(*args):
+    """
+    svn_delta_editor_invoke_delete_entry(svn_delta_editor_t _obj, char path, svn_revnum_t revision, 
+        void parent_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_delete_entry, args)
+
+def svn_delta_editor_invoke_add_directory(*args):
+    """
+    svn_delta_editor_invoke_add_directory(svn_delta_editor_t _obj, char path, void parent_baton, 
+        char copyfrom_path, svn_revnum_t copyfrom_revision, 
+        apr_pool_t dir_pool, void child_baton) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_add_directory, args)
+
+def svn_delta_editor_invoke_open_directory(*args):
+    """
+    svn_delta_editor_invoke_open_directory(svn_delta_editor_t _obj, char path, void parent_baton, 
+        svn_revnum_t base_revision, apr_pool_t dir_pool, 
+        void child_baton) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_open_directory, args)
+
+def svn_delta_editor_invoke_change_dir_prop(*args):
+    """
+    svn_delta_editor_invoke_change_dir_prop(svn_delta_editor_t _obj, void dir_baton, char name, 
+        svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_change_dir_prop, args)
+
+def svn_delta_editor_invoke_close_directory(*args):
+    """svn_delta_editor_invoke_close_directory(svn_delta_editor_t _obj, void dir_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_delta.svn_delta_editor_invoke_close_directory, args)
+
+def svn_delta_editor_invoke_absent_directory(*args):
+    """
+    svn_delta_editor_invoke_absent_directory(svn_delta_editor_t _obj, char path, void parent_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_absent_directory, args)
+
+def svn_delta_editor_invoke_add_file(*args):
+    """
+    svn_delta_editor_invoke_add_file(svn_delta_editor_t _obj, char path, void parent_baton, 
+        char copyfrom_path, svn_revnum_t copyfrom_revision, 
+        apr_pool_t file_pool, void file_baton) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_add_file, args)
+
+def svn_delta_editor_invoke_open_file(*args):
+    """
+    svn_delta_editor_invoke_open_file(svn_delta_editor_t _obj, char path, void parent_baton, 
+        svn_revnum_t base_revision, apr_pool_t file_pool, 
+        void file_baton) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_open_file, args)
+
+def svn_delta_editor_invoke_apply_textdelta(*args):
+    """
+    svn_delta_editor_invoke_apply_textdelta(svn_delta_editor_t _obj, void file_baton, char base_checksum, 
+        apr_pool_t pool, svn_txdelta_window_handler_t handler, 
+        void handler_baton) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_apply_textdelta, args)
+
+def svn_delta_editor_invoke_change_file_prop(*args):
+    """
+    svn_delta_editor_invoke_change_file_prop(svn_delta_editor_t _obj, void file_baton, char name, 
+        svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_change_file_prop, args)
+
+def svn_delta_editor_invoke_close_file(*args):
+    """
+    svn_delta_editor_invoke_close_file(svn_delta_editor_t _obj, void file_baton, char text_checksum, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_close_file, args)
+
+def svn_delta_editor_invoke_absent_file(*args):
+    """
+    svn_delta_editor_invoke_absent_file(svn_delta_editor_t _obj, char path, void parent_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_editor_invoke_absent_file, args)
+
+def svn_delta_editor_invoke_close_edit(*args):
+    """svn_delta_editor_invoke_close_edit(svn_delta_editor_t _obj, void edit_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_delta.svn_delta_editor_invoke_close_edit, args)
+
+def svn_delta_editor_invoke_abort_edit(*args):
+    """svn_delta_editor_invoke_abort_edit(svn_delta_editor_t _obj, void edit_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_delta.svn_delta_editor_invoke_abort_edit, args)
+
+def svn_txdelta_invoke_window_handler(*args):
+    """
+    svn_txdelta_invoke_window_handler(svn_txdelta_window_handler_t _obj, svn_txdelta_window_t window, 
+        void baton) -> svn_error_t
+    """
+    return apply(_delta.svn_txdelta_invoke_window_handler, args)
+
+def svn_txdelta_invoke_next_window_fn(*args):
+    """
+    svn_txdelta_invoke_next_window_fn(svn_txdelta_next_window_fn_t _obj, svn_txdelta_window_t window, 
+        void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_txdelta_invoke_next_window_fn, args)
+
+def svn_txdelta_invoke_md5_digest_fn(*args):
+    """svn_txdelta_invoke_md5_digest_fn(svn_txdelta_md5_digest_fn_t _obj, void baton) -> unsigned char"""
+    return apply(_delta.svn_txdelta_invoke_md5_digest_fn, args)
+
+def svn_delta_invoke_path_driver_cb_func(*args):
+    """
+    svn_delta_invoke_path_driver_cb_func(svn_delta_path_driver_cb_func_t _obj, void dir_baton, 
+        void parent_baton, void callback_baton, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_delta_invoke_path_driver_cb_func, args)
+
+def svn_file_invoke_rev_handler(*args):
+    """
+    svn_file_invoke_rev_handler(svn_file_rev_handler_t _obj, void baton, char path, 
+        svn_revnum_t rev, apr_hash_t rev_props, svn_boolean_t result_of_merge, 
+        svn_txdelta_window_handler_t delta_handler, 
+        void delta_baton, apr_array_header_t prop_diffs, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_file_invoke_rev_handler, args)
+
+def svn_file_invoke_rev_handler_old(*args):
+    """
+    svn_file_invoke_rev_handler_old(svn_file_rev_handler_old_t _obj, void baton, char path, 
+        svn_revnum_t rev, apr_hash_t rev_props, 
+        svn_txdelta_window_handler_t delta_handler, void delta_baton, 
+        apr_array_header_t prop_diffs, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_delta.svn_file_invoke_rev_handler_old, args)
+class svn_txdelta_window_handler_t:
+    """Proxy of C svn_txdelta_window_handler_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_txdelta_window_handler_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_txdelta_window_handler_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_txdelta_window_handler_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_txdelta_window_handler_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_txdelta_invoke_window_handler(self, *args)
+
+
+class svn_txdelta_window_handler_tPtr(svn_txdelta_window_handler_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_txdelta_window_handler_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_txdelta_window_handler_t, 'thisown', 0)
+        self.__class__ = svn_txdelta_window_handler_t
+_delta.svn_txdelta_window_handler_t_swigregister(svn_txdelta_window_handler_tPtr)
+
+class svn_txdelta_next_window_fn_t:
+    """Proxy of C svn_txdelta_next_window_fn_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_txdelta_next_window_fn_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_txdelta_next_window_fn_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_txdelta_next_window_fn_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_txdelta_next_window_fn_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_txdelta_invoke_next_window_fn(self, *args)
+
+
+class svn_txdelta_next_window_fn_tPtr(svn_txdelta_next_window_fn_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_txdelta_next_window_fn_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_txdelta_next_window_fn_t, 'thisown', 0)
+        self.__class__ = svn_txdelta_next_window_fn_t
+_delta.svn_txdelta_next_window_fn_t_swigregister(svn_txdelta_next_window_fn_tPtr)
+
+class svn_txdelta_md5_digest_fn_t:
+    """Proxy of C svn_txdelta_md5_digest_fn_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_txdelta_md5_digest_fn_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_txdelta_md5_digest_fn_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_txdelta_md5_digest_fn_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_txdelta_md5_digest_fn_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_txdelta_invoke_md5_digest_fn(self, *args)
+
+
+class svn_txdelta_md5_digest_fn_tPtr(svn_txdelta_md5_digest_fn_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_txdelta_md5_digest_fn_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_txdelta_md5_digest_fn_t, 'thisown', 0)
+        self.__class__ = svn_txdelta_md5_digest_fn_t
+_delta.svn_txdelta_md5_digest_fn_t_swigregister(svn_txdelta_md5_digest_fn_tPtr)
+
+class svn_delta_path_driver_cb_func_t:
+    """Proxy of C svn_delta_path_driver_cb_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_delta_path_driver_cb_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_delta_path_driver_cb_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_delta_path_driver_cb_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_delta_path_driver_cb_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_delta_invoke_path_driver_cb_func(self, *args)
+
+
+class svn_delta_path_driver_cb_func_tPtr(svn_delta_path_driver_cb_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_delta_path_driver_cb_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_delta_path_driver_cb_func_t, 'thisown', 0)
+        self.__class__ = svn_delta_path_driver_cb_func_t
+_delta.svn_delta_path_driver_cb_func_t_swigregister(svn_delta_path_driver_cb_func_tPtr)
+
+class svn_file_rev_handler_t:
+    """Proxy of C svn_file_rev_handler_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_file_rev_handler_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_file_rev_handler_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_file_rev_handler_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_file_rev_handler_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_file_invoke_rev_handler(self, *args)
+
+
+class svn_file_rev_handler_tPtr(svn_file_rev_handler_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_file_rev_handler_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_file_rev_handler_t, 'thisown', 0)
+        self.__class__ = svn_file_rev_handler_t
+_delta.svn_file_rev_handler_t_swigregister(svn_file_rev_handler_tPtr)
+
+class svn_file_rev_handler_old_t:
+    """Proxy of C svn_file_rev_handler_old_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_file_rev_handler_old_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_file_rev_handler_old_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_file_rev_handler_old_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_file_rev_handler_old_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_file_invoke_rev_handler_old(self, *args)
+
+
+class svn_file_rev_handler_old_tPtr(svn_file_rev_handler_old_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_file_rev_handler_old_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_file_rev_handler_old_t, 'thisown', 0)
+        self.__class__ = svn_file_rev_handler_old_t
+_delta.svn_file_rev_handler_old_t_swigregister(svn_file_rev_handler_old_tPtr)
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/diff.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/diff.py
new file mode 100644
index 0000000..9718a12
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/diff.py
@@ -0,0 +1,638 @@
+# This file was created automatically by SWIG 1.3.27.
+# Don't modify this file, modify the SWIG interface instead.
+import core
+
+import _diff
+
+# This file is compatible with both classic and new-style classes.
+def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
+    if (name == "this"):
+        if isinstance(value, class_type):
+            self.__dict__[name] = value.this
+            if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
+            del value.thisown
+            return
+    method = class_type.__swig_setmethods__.get(name,None)
+    if method: return method(self,value)
+    if (not static) or hasattr(self,name) or (name == "thisown"):
+        self.__dict__[name] = value
+    else:
+        raise AttributeError("You cannot add attributes to %s" % self)
+
+def _swig_setattr(self,class_type,name,value):
+    return _swig_setattr_nondynamic(self,class_type,name,value,0)
+
+def _swig_getattr(self,class_type,name):
+    method = class_type.__swig_getmethods__.get(name,None)
+    if method: return method(self)
+    raise AttributeError,name
+
+
+def svn_diff_version(*args):
+    """svn_diff_version() -> svn_version_t"""
+    return apply(_diff.svn_diff_version, args)
+svn_diff_datasource_original = _diff.svn_diff_datasource_original
+svn_diff_datasource_modified = _diff.svn_diff_datasource_modified
+svn_diff_datasource_latest = _diff.svn_diff_datasource_latest
+svn_diff_datasource_ancestor = _diff.svn_diff_datasource_ancestor
+class svn_diff_fns_t:
+    """Proxy of C svn_diff_fns_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_diff_fns_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_diff_fns_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_diff_fns_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["datasource_open"] = _diff.svn_diff_fns_t_datasource_open_set
+    __swig_getmethods__["datasource_open"] = _diff.svn_diff_fns_t_datasource_open_get
+    __swig_setmethods__["datasource_close"] = _diff.svn_diff_fns_t_datasource_close_set
+    __swig_getmethods__["datasource_close"] = _diff.svn_diff_fns_t_datasource_close_get
+    __swig_setmethods__["datasource_get_next_token"] = _diff.svn_diff_fns_t_datasource_get_next_token_set
+    __swig_getmethods__["datasource_get_next_token"] = _diff.svn_diff_fns_t_datasource_get_next_token_get
+    __swig_setmethods__["token_compare"] = _diff.svn_diff_fns_t_token_compare_set
+    __swig_getmethods__["token_compare"] = _diff.svn_diff_fns_t_token_compare_get
+    __swig_setmethods__["token_discard"] = _diff.svn_diff_fns_t_token_discard_set
+    __swig_getmethods__["token_discard"] = _diff.svn_diff_fns_t_token_discard_get
+    __swig_setmethods__["token_discard_all"] = _diff.svn_diff_fns_t_token_discard_all_set
+    __swig_getmethods__["token_discard_all"] = _diff.svn_diff_fns_t_token_discard_all_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_diff_fns_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def datasource_open(self, *args):
+      return svn_diff_fns_invoke_datasource_open(self, *args)
+
+    def datasource_close(self, *args):
+      return svn_diff_fns_invoke_datasource_close(self, *args)
+
+    def datasource_get_next_token(self, *args):
+      return svn_diff_fns_invoke_datasource_get_next_token(self, *args)
+
+    def token_compare(self, *args):
+      return svn_diff_fns_invoke_token_compare(self, *args)
+
+    def token_discard(self, *args):
+      return svn_diff_fns_invoke_token_discard(self, *args)
+
+    def token_discard_all(self, *args):
+      return svn_diff_fns_invoke_token_discard_all(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_diff_fns_t"""
+        _swig_setattr(self, svn_diff_fns_t, 'this', apply(_diff.new_svn_diff_fns_t, args))
+        _swig_setattr(self, svn_diff_fns_t, 'thisown', 1)
+    def __del__(self, destroy=_diff.delete_svn_diff_fns_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_diff_fns_tPtr(svn_diff_fns_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_diff_fns_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_diff_fns_t, 'thisown', 0)
+        self.__class__ = svn_diff_fns_t
+_diff.svn_diff_fns_t_swigregister(svn_diff_fns_tPtr)
+
+
+def svn_diff_diff(*args):
+    """
+    svn_diff_diff(svn_diff_t diff, void diff_baton, svn_diff_fns_t diff_fns, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_diff, args)
+
+def svn_diff_diff3(*args):
+    """
+    svn_diff_diff3(svn_diff_t diff, void diff_baton, svn_diff_fns_t diff_fns, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_diff3, args)
+
+def svn_diff_diff4(*args):
+    """
+    svn_diff_diff4(svn_diff_t diff, void diff_baton, svn_diff_fns_t diff_fns, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_diff4, args)
+
+def svn_diff_contains_conflicts(*args):
+    """svn_diff_contains_conflicts(svn_diff_t diff) -> svn_boolean_t"""
+    return apply(_diff.svn_diff_contains_conflicts, args)
+
+def svn_diff_contains_diffs(*args):
+    """svn_diff_contains_diffs(svn_diff_t diff) -> svn_boolean_t"""
+    return apply(_diff.svn_diff_contains_diffs, args)
+class svn_diff_output_fns_t:
+    """Proxy of C svn_diff_output_fns_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_diff_output_fns_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_diff_output_fns_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_diff_output_fns_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["output_common"] = _diff.svn_diff_output_fns_t_output_common_set
+    __swig_getmethods__["output_common"] = _diff.svn_diff_output_fns_t_output_common_get
+    __swig_setmethods__["output_diff_modified"] = _diff.svn_diff_output_fns_t_output_diff_modified_set
+    __swig_getmethods__["output_diff_modified"] = _diff.svn_diff_output_fns_t_output_diff_modified_get
+    __swig_setmethods__["output_diff_latest"] = _diff.svn_diff_output_fns_t_output_diff_latest_set
+    __swig_getmethods__["output_diff_latest"] = _diff.svn_diff_output_fns_t_output_diff_latest_get
+    __swig_setmethods__["output_diff_common"] = _diff.svn_diff_output_fns_t_output_diff_common_set
+    __swig_getmethods__["output_diff_common"] = _diff.svn_diff_output_fns_t_output_diff_common_get
+    __swig_setmethods__["output_conflict"] = _diff.svn_diff_output_fns_t_output_conflict_set
+    __swig_getmethods__["output_conflict"] = _diff.svn_diff_output_fns_t_output_conflict_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_diff_output_fns_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def output_common(self, *args):
+      return svn_diff_output_fns_invoke_output_common(self, *args)
+
+    def output_diff_modified(self, *args):
+      return svn_diff_output_fns_invoke_output_diff_modified(self, *args)
+
+    def output_diff_latest(self, *args):
+      return svn_diff_output_fns_invoke_output_diff_latest(self, *args)
+
+    def output_diff_common(self, *args):
+      return svn_diff_output_fns_invoke_output_diff_common(self, *args)
+
+    def output_conflict(self, *args):
+      return svn_diff_output_fns_invoke_output_conflict(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_diff_output_fns_t"""
+        _swig_setattr(self, svn_diff_output_fns_t, 'this', apply(_diff.new_svn_diff_output_fns_t, args))
+        _swig_setattr(self, svn_diff_output_fns_t, 'thisown', 1)
+    def __del__(self, destroy=_diff.delete_svn_diff_output_fns_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_diff_output_fns_tPtr(svn_diff_output_fns_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_diff_output_fns_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_diff_output_fns_t, 'thisown', 0)
+        self.__class__ = svn_diff_output_fns_t
+_diff.svn_diff_output_fns_t_swigregister(svn_diff_output_fns_tPtr)
+
+
+def svn_diff_output(*args):
+    """svn_diff_output(svn_diff_t diff, void output_baton, svn_diff_output_fns_t output_fns) -> svn_error_t"""
+    return apply(_diff.svn_diff_output, args)
+svn_diff_file_ignore_space_none = _diff.svn_diff_file_ignore_space_none
+svn_diff_file_ignore_space_change = _diff.svn_diff_file_ignore_space_change
+svn_diff_file_ignore_space_all = _diff.svn_diff_file_ignore_space_all
+class svn_diff_file_options_t:
+    """Proxy of C svn_diff_file_options_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_diff_file_options_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_diff_file_options_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_diff_file_options_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["ignore_space"] = _diff.svn_diff_file_options_t_ignore_space_set
+    __swig_getmethods__["ignore_space"] = _diff.svn_diff_file_options_t_ignore_space_get
+    __swig_setmethods__["ignore_eol_style"] = _diff.svn_diff_file_options_t_ignore_eol_style_set
+    __swig_getmethods__["ignore_eol_style"] = _diff.svn_diff_file_options_t_ignore_eol_style_get
+    __swig_setmethods__["show_c_function"] = _diff.svn_diff_file_options_t_show_c_function_set
+    __swig_getmethods__["show_c_function"] = _diff.svn_diff_file_options_t_show_c_function_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_diff_file_options_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_diff_file_options_t"""
+        _swig_setattr(self, svn_diff_file_options_t, 'this', apply(_diff.new_svn_diff_file_options_t, args))
+        _swig_setattr(self, svn_diff_file_options_t, 'thisown', 1)
+    def __del__(self, destroy=_diff.delete_svn_diff_file_options_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_diff_file_options_tPtr(svn_diff_file_options_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_diff_file_options_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_diff_file_options_t, 'thisown', 0)
+        self.__class__ = svn_diff_file_options_t
+_diff.svn_diff_file_options_t_swigregister(svn_diff_file_options_tPtr)
+
+
+def svn_diff_file_options_create(*args):
+    """svn_diff_file_options_create(apr_pool_t pool) -> svn_diff_file_options_t"""
+    return apply(_diff.svn_diff_file_options_create, args)
+
+def svn_diff_file_options_parse(*args):
+    """
+    svn_diff_file_options_parse(svn_diff_file_options_t options, apr_array_header_t args, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_options_parse, args)
+
+def svn_diff_file_diff_2(*args):
+    """
+    svn_diff_file_diff_2(svn_diff_t diff, char original, char modified, svn_diff_file_options_t options, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_diff_2, args)
+
+def svn_diff_file_diff(*args):
+    """svn_diff_file_diff(svn_diff_t diff, char original, char modified, apr_pool_t pool) -> svn_error_t"""
+    return apply(_diff.svn_diff_file_diff, args)
+
+def svn_diff_file_diff3_2(*args):
+    """
+    svn_diff_file_diff3_2(svn_diff_t diff, char original, char modified, char latest, 
+        svn_diff_file_options_t options, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_diff3_2, args)
+
+def svn_diff_file_diff3(*args):
+    """
+    svn_diff_file_diff3(svn_diff_t diff, char original, char modified, char latest, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_diff3, args)
+
+def svn_diff_file_diff4_2(*args):
+    """
+    svn_diff_file_diff4_2(svn_diff_t diff, char original, char modified, char latest, 
+        char ancestor, svn_diff_file_options_t options, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_diff4_2, args)
+
+def svn_diff_file_diff4(*args):
+    """
+    svn_diff_file_diff4(svn_diff_t diff, char original, char modified, char latest, 
+        char ancestor, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_diff4, args)
+
+def svn_diff_file_output_unified3(*args):
+    """
+    svn_diff_file_output_unified3(svn_stream_t output_stream, svn_diff_t diff, char original_path, 
+        char modified_path, char original_header, 
+        char modified_header, char header_encoding, 
+        char relative_to_dir, svn_boolean_t show_c_function, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_output_unified3, args)
+
+def svn_diff_file_output_unified2(*args):
+    """
+    svn_diff_file_output_unified2(svn_stream_t output_stream, svn_diff_t diff, char original_path, 
+        char modified_path, char original_header, 
+        char modified_header, char header_encoding, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_output_unified2, args)
+
+def svn_diff_file_output_unified(*args):
+    """
+    svn_diff_file_output_unified(svn_stream_t output_stream, svn_diff_t diff, char original_path, 
+        char modified_path, char original_header, 
+        char modified_header, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_output_unified, args)
+
+def svn_diff_file_output_merge(*args):
+    """
+    svn_diff_file_output_merge(svn_stream_t output_stream, svn_diff_t diff, char original_path, 
+        char modified_path, char latest_path, 
+        char conflict_original, char conflict_modified, 
+        char conflict_latest, char conflict_separator, 
+        svn_boolean_t display_original_in_conflict, 
+        svn_boolean_t display_resolved_conflicts, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_file_output_merge, args)
+
+def svn_diff_mem_string_diff(*args):
+    """
+    svn_diff_mem_string_diff(svn_diff_t diff, svn_string_t original, svn_string_t modified, 
+        svn_diff_file_options_t options, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_mem_string_diff, args)
+
+def svn_diff_mem_string_diff3(*args):
+    """
+    svn_diff_mem_string_diff3(svn_diff_t diff, svn_string_t original, svn_string_t modified, 
+        svn_string_t latest, svn_diff_file_options_t options, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_mem_string_diff3, args)
+
+def svn_diff_mem_string_diff4(*args):
+    """
+    svn_diff_mem_string_diff4(svn_diff_t diff, svn_string_t original, svn_string_t modified, 
+        svn_string_t latest, svn_string_t ancestor, 
+        svn_diff_file_options_t options, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_mem_string_diff4, args)
+
+def svn_diff_mem_string_output_unified(*args):
+    """
+    svn_diff_mem_string_output_unified(svn_stream_t output_stream, svn_diff_t diff, char original_header, 
+        char modified_header, char header_encoding, 
+        svn_string_t original, svn_string_t modified, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_mem_string_output_unified, args)
+
+def svn_diff_mem_string_output_merge(*args):
+    """
+    svn_diff_mem_string_output_merge(svn_stream_t output_stream, svn_diff_t diff, svn_string_t original, 
+        svn_string_t modified, svn_string_t latest, 
+        char conflict_original, char conflict_modified, 
+        char conflict_latest, char conflict_separator, 
+        svn_boolean_t display_original_in_conflict, 
+        svn_boolean_t display_resolved_conflicts, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_mem_string_output_merge, args)
+class svn_diff_t:
+    """Proxy of C svn_diff_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_diff_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_diff_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_diff_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_diff_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_diff_tPtr(svn_diff_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_diff_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_diff_t, 'thisown', 0)
+        self.__class__ = svn_diff_t
+_diff.svn_diff_t_swigregister(svn_diff_tPtr)
+
+
+def svn_diff_fns_invoke_datasource_open(*args):
+    """svn_diff_fns_invoke_datasource_open(svn_diff_fns_t _obj, void diff_baton, svn_diff_datasource_e datasource) -> svn_error_t"""
+    return apply(_diff.svn_diff_fns_invoke_datasource_open, args)
+
+def svn_diff_fns_invoke_datasource_close(*args):
+    """svn_diff_fns_invoke_datasource_close(svn_diff_fns_t _obj, void diff_baton, svn_diff_datasource_e datasource) -> svn_error_t"""
+    return apply(_diff.svn_diff_fns_invoke_datasource_close, args)
+
+def svn_diff_fns_invoke_datasource_get_next_token(*args):
+    """
+    svn_diff_fns_invoke_datasource_get_next_token(svn_diff_fns_t _obj, apr_uint32_t hash, void token, 
+        void diff_baton, svn_diff_datasource_e datasource) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_fns_invoke_datasource_get_next_token, args)
+
+def svn_diff_fns_invoke_token_compare(*args):
+    """
+    svn_diff_fns_invoke_token_compare(svn_diff_fns_t _obj, void diff_baton, void ltoken, 
+        void rtoken, int compare) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_fns_invoke_token_compare, args)
+
+def svn_diff_fns_invoke_token_discard(*args):
+    """svn_diff_fns_invoke_token_discard(svn_diff_fns_t _obj, void diff_baton, void token)"""
+    return apply(_diff.svn_diff_fns_invoke_token_discard, args)
+
+def svn_diff_fns_invoke_token_discard_all(*args):
+    """svn_diff_fns_invoke_token_discard_all(svn_diff_fns_t _obj, void diff_baton)"""
+    return apply(_diff.svn_diff_fns_invoke_token_discard_all, args)
+
+def svn_diff_output_fns_invoke_output_common(*args):
+    """
+    svn_diff_output_fns_invoke_output_common(svn_diff_output_fns_t _obj, void output_baton, apr_off_t original_start, 
+        apr_off_t original_length, 
+        apr_off_t modified_start, apr_off_t modified_length, 
+        apr_off_t latest_start, apr_off_t latest_length) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_output_fns_invoke_output_common, args)
+
+def svn_diff_output_fns_invoke_output_diff_modified(*args):
+    """
+    svn_diff_output_fns_invoke_output_diff_modified(svn_diff_output_fns_t _obj, void output_baton, apr_off_t original_start, 
+        apr_off_t original_length, 
+        apr_off_t modified_start, apr_off_t modified_length, 
+        apr_off_t latest_start, apr_off_t latest_length) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_output_fns_invoke_output_diff_modified, args)
+
+def svn_diff_output_fns_invoke_output_diff_latest(*args):
+    """
+    svn_diff_output_fns_invoke_output_diff_latest(svn_diff_output_fns_t _obj, void output_baton, apr_off_t original_start, 
+        apr_off_t original_length, 
+        apr_off_t modified_start, apr_off_t modified_length, 
+        apr_off_t latest_start, apr_off_t latest_length) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_output_fns_invoke_output_diff_latest, args)
+
+def svn_diff_output_fns_invoke_output_diff_common(*args):
+    """
+    svn_diff_output_fns_invoke_output_diff_common(svn_diff_output_fns_t _obj, void output_baton, apr_off_t original_start, 
+        apr_off_t original_length, 
+        apr_off_t modified_start, apr_off_t modified_length, 
+        apr_off_t latest_start, apr_off_t latest_length) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_output_fns_invoke_output_diff_common, args)
+
+def svn_diff_output_fns_invoke_output_conflict(*args):
+    """
+    svn_diff_output_fns_invoke_output_conflict(svn_diff_output_fns_t _obj, void output_baton, apr_off_t original_start, 
+        apr_off_t original_length, 
+        apr_off_t modified_start, apr_off_t modified_length, 
+        apr_off_t latest_start, apr_off_t latest_length, 
+        svn_diff_t resolved_diff) -> svn_error_t
+    """
+    return apply(_diff.svn_diff_output_fns_invoke_output_conflict, args)
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/fs.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/fs.py
new file mode 100644
index 0000000..732423e8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/fs.py
@@ -0,0 +1,1245 @@
+# This file was created automatically by SWIG 1.3.27.
+# Don't modify this file, modify the SWIG interface instead.
+import core
+import delta
+
+import _fs
+
+# This file is compatible with both classic and new-style classes.
+def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
+    if (name == "this"):
+        if isinstance(value, class_type):
+            self.__dict__[name] = value.this
+            if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
+            del value.thisown
+            return
+    method = class_type.__swig_setmethods__.get(name,None)
+    if method: return method(self,value)
+    if (not static) or hasattr(self,name) or (name == "thisown"):
+        self.__dict__[name] = value
+    else:
+        raise AttributeError("You cannot add attributes to %s" % self)
+
+def _swig_setattr(self,class_type,name,value):
+    return _swig_setattr_nondynamic(self,class_type,name,value,0)
+
+def _swig_getattr(self,class_type,name):
+    method = class_type.__swig_getmethods__.get(name,None)
+    if method: return method(self)
+    raise AttributeError,name
+
+
+def svn_fs_version(*args):
+    """svn_fs_version() -> svn_version_t"""
+    return apply(_fs.svn_fs_version, args)
+SVN_FS_CONFIG_BDB_TXN_NOSYNC = _fs.SVN_FS_CONFIG_BDB_TXN_NOSYNC
+SVN_FS_CONFIG_BDB_LOG_AUTOREMOVE = _fs.SVN_FS_CONFIG_BDB_LOG_AUTOREMOVE
+SVN_FS_CONFIG_FS_TYPE = _fs.SVN_FS_CONFIG_FS_TYPE
+SVN_FS_TYPE_BDB = _fs.SVN_FS_TYPE_BDB
+SVN_FS_TYPE_FSFS = _fs.SVN_FS_TYPE_FSFS
+SVN_FS_CONFIG_PRE_1_4_COMPATIBLE = _fs.SVN_FS_CONFIG_PRE_1_4_COMPATIBLE
+SVN_FS_CONFIG_PRE_1_5_COMPATIBLE = _fs.SVN_FS_CONFIG_PRE_1_5_COMPATIBLE
+
+def svn_fs_initialize(*args):
+    """svn_fs_initialize(apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_initialize, args)
+
+def svn_fs_set_warning_func(*args):
+    """svn_fs_set_warning_func(svn_fs_t fs, svn_fs_warning_callback_t warning, void warning_baton)"""
+    return apply(_fs.svn_fs_set_warning_func, args)
+
+def svn_fs_create(*args):
+    """svn_fs_create(svn_fs_t fs_p, char path, apr_hash_t fs_config, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_create, args)
+
+def svn_fs_open(*args):
+    """svn_fs_open(svn_fs_t fs_p, char path, apr_hash_t fs_config, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_open, args)
+
+def svn_fs_upgrade(*args):
+    """svn_fs_upgrade(char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_upgrade, args)
+
+def svn_fs_type(*args):
+    """svn_fs_type(char fs_type, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_type, args)
+
+def svn_fs_path(*args):
+    """svn_fs_path(svn_fs_t fs, apr_pool_t pool) -> char"""
+    return apply(_fs.svn_fs_path, args)
+
+def svn_fs_delete_fs(*args):
+    """svn_fs_delete_fs(char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_delete_fs, args)
+
+def svn_fs_hotcopy(*args):
+    """
+    svn_fs_hotcopy(char src_path, char dest_path, svn_boolean_t clean, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_hotcopy, args)
+
+def svn_fs_recover(*args):
+    """svn_fs_recover(char path, svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_recover, args)
+
+def svn_fs_berkeley_logfiles(*args):
+    """
+    svn_fs_berkeley_logfiles(apr_array_header_t logfiles, char path, svn_boolean_t only_unused, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_berkeley_logfiles, args)
+
+def svn_fs_new(*args):
+    """svn_fs_new(apr_hash_t fs_config, apr_pool_t pool) -> svn_fs_t"""
+    return apply(_fs.svn_fs_new, args)
+
+def svn_fs_create_berkeley(*args):
+    """svn_fs_create_berkeley(svn_fs_t fs, char path) -> svn_error_t"""
+    return apply(_fs.svn_fs_create_berkeley, args)
+
+def svn_fs_open_berkeley(*args):
+    """svn_fs_open_berkeley(svn_fs_t fs, char path) -> svn_error_t"""
+    return apply(_fs.svn_fs_open_berkeley, args)
+
+def svn_fs_berkeley_path(*args):
+    """svn_fs_berkeley_path(svn_fs_t fs, apr_pool_t pool) -> char"""
+    return apply(_fs.svn_fs_berkeley_path, args)
+
+def svn_fs_delete_berkeley(*args):
+    """svn_fs_delete_berkeley(char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_delete_berkeley, args)
+
+def svn_fs_hotcopy_berkeley(*args):
+    """
+    svn_fs_hotcopy_berkeley(char src_path, char dest_path, svn_boolean_t clean_logs, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_hotcopy_berkeley, args)
+
+def svn_fs_berkeley_recover(*args):
+    """svn_fs_berkeley_recover(char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_berkeley_recover, args)
+
+def svn_fs_create_access(*args):
+    """svn_fs_create_access(svn_fs_access_t access_ctx, char username, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_create_access, args)
+
+def svn_fs_set_access(*args):
+    """svn_fs_set_access(svn_fs_t fs, svn_fs_access_t access_ctx) -> svn_error_t"""
+    return apply(_fs.svn_fs_set_access, args)
+
+def svn_fs_get_access(*args):
+    """svn_fs_get_access(svn_fs_access_t access_ctx, svn_fs_t fs) -> svn_error_t"""
+    return apply(_fs.svn_fs_get_access, args)
+
+def svn_fs_access_get_username(*args):
+    """svn_fs_access_get_username(char username, svn_fs_access_t access_ctx) -> svn_error_t"""
+    return apply(_fs.svn_fs_access_get_username, args)
+
+def svn_fs_access_add_lock_token(*args):
+    """svn_fs_access_add_lock_token(svn_fs_access_t access_ctx, char token) -> svn_error_t"""
+    return apply(_fs.svn_fs_access_add_lock_token, args)
+
+def svn_fs_compare_ids(*args):
+    """svn_fs_compare_ids(svn_fs_id_t a, svn_fs_id_t b) -> int"""
+    return apply(_fs.svn_fs_compare_ids, args)
+
+def svn_fs_check_related(*args):
+    """svn_fs_check_related(svn_fs_id_t id1, svn_fs_id_t id2) -> svn_boolean_t"""
+    return apply(_fs.svn_fs_check_related, args)
+
+def svn_fs_parse_id(*args):
+    """svn_fs_parse_id(char data, apr_pool_t pool) -> svn_fs_id_t"""
+    return apply(_fs.svn_fs_parse_id, args)
+
+def svn_fs_unparse_id(*args):
+    """svn_fs_unparse_id(svn_fs_id_t id, apr_pool_t pool) -> svn_string_t"""
+    return apply(_fs.svn_fs_unparse_id, args)
+SVN_FS_TXN_CHECK_OOD = _fs.SVN_FS_TXN_CHECK_OOD
+SVN_FS_TXN_CHECK_LOCKS = _fs.SVN_FS_TXN_CHECK_LOCKS
+
+def svn_fs_begin_txn2(*args):
+    """
+    svn_fs_begin_txn2(svn_fs_txn_t txn_p, svn_fs_t fs, svn_revnum_t rev, 
+        apr_uint32_t flags, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_begin_txn2, args)
+
+def svn_fs_begin_txn(*args):
+    """
+    svn_fs_begin_txn(svn_fs_txn_t txn_p, svn_fs_t fs, svn_revnum_t rev, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_begin_txn, args)
+
+def svn_fs_commit_txn(*args):
+    """
+    svn_fs_commit_txn(char conflict_p, svn_revnum_t new_rev, svn_fs_txn_t txn, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_commit_txn, args)
+
+def svn_fs_abort_txn(*args):
+    """svn_fs_abort_txn(svn_fs_txn_t txn, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_abort_txn, args)
+
+def svn_fs_purge_txn(*args):
+    """svn_fs_purge_txn(svn_fs_t fs, char txn_id, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_purge_txn, args)
+
+def svn_fs_txn_name(*args):
+    """svn_fs_txn_name(char name_p, svn_fs_txn_t txn, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_txn_name, args)
+
+def svn_fs_txn_base_revision(*args):
+    """svn_fs_txn_base_revision(svn_fs_txn_t txn) -> svn_revnum_t"""
+    return apply(_fs.svn_fs_txn_base_revision, args)
+
+def svn_fs_open_txn(*args):
+    """svn_fs_open_txn(svn_fs_txn_t txn, svn_fs_t fs, char name, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_open_txn, args)
+
+def svn_fs_list_transactions(*args):
+    """svn_fs_list_transactions(apr_array_header_t names_p, svn_fs_t fs, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_list_transactions, args)
+
+def svn_fs_txn_prop(*args):
+    """
+    svn_fs_txn_prop(svn_string_t value_p, svn_fs_txn_t txn, char propname, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_txn_prop, args)
+
+def svn_fs_txn_proplist(*args):
+    """svn_fs_txn_proplist(apr_hash_t table_p, svn_fs_txn_t txn, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_txn_proplist, args)
+
+def svn_fs_change_txn_prop(*args):
+    """svn_fs_change_txn_prop(svn_fs_txn_t txn, char name, svn_string_t value, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_change_txn_prop, args)
+
+def svn_fs_change_txn_props(*args):
+    """svn_fs_change_txn_props(svn_fs_txn_t txn, apr_array_header_t props, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_change_txn_props, args)
+
+def svn_fs_revision_root(*args):
+    """
+    svn_fs_revision_root(svn_fs_root_t root_p, svn_fs_t fs, svn_revnum_t rev, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_revision_root, args)
+
+def svn_fs_txn_root(*args):
+    """svn_fs_txn_root(svn_fs_root_t root_p, svn_fs_txn_t txn, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_txn_root, args)
+
+def svn_fs_close_root(*args):
+    """svn_fs_close_root(svn_fs_root_t root)"""
+    return apply(_fs.svn_fs_close_root, args)
+
+def svn_fs_root_fs(*args):
+    """svn_fs_root_fs(svn_fs_root_t root) -> svn_fs_t"""
+    return apply(_fs.svn_fs_root_fs, args)
+
+def svn_fs_is_txn_root(*args):
+    """svn_fs_is_txn_root(svn_fs_root_t root) -> svn_boolean_t"""
+    return apply(_fs.svn_fs_is_txn_root, args)
+
+def svn_fs_is_revision_root(*args):
+    """svn_fs_is_revision_root(svn_fs_root_t root) -> svn_boolean_t"""
+    return apply(_fs.svn_fs_is_revision_root, args)
+
+def svn_fs_txn_root_name(*args):
+    """svn_fs_txn_root_name(svn_fs_root_t root, apr_pool_t pool) -> char"""
+    return apply(_fs.svn_fs_txn_root_name, args)
+
+def svn_fs_txn_root_base_revision(*args):
+    """svn_fs_txn_root_base_revision(svn_fs_root_t root) -> svn_revnum_t"""
+    return apply(_fs.svn_fs_txn_root_base_revision, args)
+
+def svn_fs_revision_root_revision(*args):
+    """svn_fs_revision_root_revision(svn_fs_root_t root) -> svn_revnum_t"""
+    return apply(_fs.svn_fs_revision_root_revision, args)
+svn_fs_path_change_modify = _fs.svn_fs_path_change_modify
+svn_fs_path_change_add = _fs.svn_fs_path_change_add
+svn_fs_path_change_delete = _fs.svn_fs_path_change_delete
+svn_fs_path_change_replace = _fs.svn_fs_path_change_replace
+svn_fs_path_change_reset = _fs.svn_fs_path_change_reset
+class svn_fs_path_change_t:
+    """Proxy of C svn_fs_path_change_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_path_change_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_path_change_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_path_change_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["node_rev_id"] = _fs.svn_fs_path_change_t_node_rev_id_set
+    __swig_getmethods__["node_rev_id"] = _fs.svn_fs_path_change_t_node_rev_id_get
+    __swig_setmethods__["change_kind"] = _fs.svn_fs_path_change_t_change_kind_set
+    __swig_getmethods__["change_kind"] = _fs.svn_fs_path_change_t_change_kind_get
+    __swig_setmethods__["text_mod"] = _fs.svn_fs_path_change_t_text_mod_set
+    __swig_getmethods__["text_mod"] = _fs.svn_fs_path_change_t_text_mod_get
+    __swig_setmethods__["prop_mod"] = _fs.svn_fs_path_change_t_prop_mod_set
+    __swig_getmethods__["prop_mod"] = _fs.svn_fs_path_change_t_prop_mod_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_path_change_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_fs_path_change_tPtr(svn_fs_path_change_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_path_change_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_path_change_t, 'thisown', 0)
+        self.__class__ = svn_fs_path_change_t
+_fs.svn_fs_path_change_t_swigregister(svn_fs_path_change_tPtr)
+
+
+def svn_fs_paths_changed(*args):
+    """svn_fs_paths_changed(apr_hash_t changed_paths_p, svn_fs_root_t root, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_paths_changed, args)
+
+def svn_fs_check_path(*args):
+    """
+    svn_fs_check_path(svn_node_kind_t kind_p, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_check_path, args)
+
+def svn_fs_node_history(*args):
+    """
+    svn_fs_node_history(svn_fs_history_t history_p, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_node_history, args)
+
+def svn_fs_history_prev(*args):
+    """
+    svn_fs_history_prev(svn_fs_history_t prev_history_p, svn_fs_history_t history, 
+        svn_boolean_t cross_copies, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_history_prev, args)
+
+def svn_fs_history_location(*args):
+    """
+    svn_fs_history_location(char path, svn_revnum_t revision, svn_fs_history_t history, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_history_location, args)
+
+def svn_fs_is_dir(*args):
+    """
+    svn_fs_is_dir(svn_boolean_t is_dir, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_is_dir, args)
+
+def svn_fs_is_file(*args):
+    """
+    svn_fs_is_file(svn_boolean_t is_file, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_is_file, args)
+
+def svn_fs_node_id(*args):
+    """svn_fs_node_id(svn_fs_id_t id_p, svn_fs_root_t root, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_node_id, args)
+
+def svn_fs_node_created_rev(*args):
+    """
+    svn_fs_node_created_rev(svn_revnum_t revision, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_node_created_rev, args)
+
+def svn_fs_node_origin_rev(*args):
+    """
+    svn_fs_node_origin_rev(svn_revnum_t revision, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_node_origin_rev, args)
+
+def svn_fs_node_created_path(*args):
+    """svn_fs_node_created_path(char created_path, svn_fs_root_t root, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_node_created_path, args)
+
+def svn_fs_node_prop(*args):
+    """
+    svn_fs_node_prop(svn_string_t value_p, svn_fs_root_t root, char path, 
+        char propname, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_node_prop, args)
+
+def svn_fs_node_proplist(*args):
+    """
+    svn_fs_node_proplist(apr_hash_t table_p, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_node_proplist, args)
+
+def svn_fs_change_node_prop(*args):
+    """
+    svn_fs_change_node_prop(svn_fs_root_t root, char path, char name, svn_string_t value, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_change_node_prop, args)
+
+def svn_fs_props_changed(*args):
+    """
+    svn_fs_props_changed(svn_boolean_t changed_p, svn_fs_root_t root1, char path1, 
+        svn_fs_root_t root2, char path2, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_props_changed, args)
+
+def svn_fs_copied_from(*args):
+    """
+    svn_fs_copied_from(svn_revnum_t rev_p, char path_p, svn_fs_root_t root, 
+        char path, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_copied_from, args)
+
+def svn_fs_closest_copy(*args):
+    """
+    svn_fs_closest_copy(svn_fs_root_t root_p, char path_p, svn_fs_root_t root, 
+        char path, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_closest_copy, args)
+
+def svn_fs_get_mergeinfo(*args):
+    """
+    svn_fs_get_mergeinfo(svn_mergeinfo_catalog_t catalog, svn_fs_root_t root, 
+        apr_array_header_t paths, svn_mergeinfo_inheritance_t inherit, 
+        svn_boolean_t include_descendants, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_get_mergeinfo, args)
+
+def svn_fs_merge(*args):
+    """
+    svn_fs_merge(char conflict_p, svn_fs_root_t source_root, char source_path, 
+        svn_fs_root_t target_root, char target_path, 
+        svn_fs_root_t ancestor_root, char ancestor_path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_merge, args)
+class svn_fs_dirent_t:
+    """Proxy of C svn_fs_dirent_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_dirent_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_dirent_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_dirent_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["name"] = _fs.svn_fs_dirent_t_name_set
+    __swig_getmethods__["name"] = _fs.svn_fs_dirent_t_name_get
+    __swig_setmethods__["id"] = _fs.svn_fs_dirent_t_id_set
+    __swig_getmethods__["id"] = _fs.svn_fs_dirent_t_id_get
+    __swig_setmethods__["kind"] = _fs.svn_fs_dirent_t_kind_set
+    __swig_getmethods__["kind"] = _fs.svn_fs_dirent_t_kind_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_dirent_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_fs_dirent_tPtr(svn_fs_dirent_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_dirent_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_dirent_t, 'thisown', 0)
+        self.__class__ = svn_fs_dirent_t
+_fs.svn_fs_dirent_t_swigregister(svn_fs_dirent_tPtr)
+
+
+def svn_fs_dir_entries(*args):
+    """
+    svn_fs_dir_entries(apr_hash_t entries_p, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_dir_entries, args)
+
+def svn_fs_make_dir(*args):
+    """svn_fs_make_dir(svn_fs_root_t root, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_make_dir, args)
+
+def svn_fs_delete(*args):
+    """svn_fs_delete(svn_fs_root_t root, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_delete, args)
+
+def svn_fs_copy(*args):
+    """
+    svn_fs_copy(svn_fs_root_t from_root, char from_path, svn_fs_root_t to_root, 
+        char to_path, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_copy, args)
+
+def svn_fs_revision_link(*args):
+    """
+    svn_fs_revision_link(svn_fs_root_t from_root, svn_fs_root_t to_root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_revision_link, args)
+
+def svn_fs_file_length(*args):
+    """
+    svn_fs_file_length(svn_filesize_t length_p, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_file_length, args)
+
+def svn_fs_file_md5_checksum(*args):
+    """
+    svn_fs_file_md5_checksum(unsigned char digest, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_file_md5_checksum, args)
+
+def svn_fs_file_contents(*args):
+    """
+    svn_fs_file_contents(svn_stream_t contents, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_file_contents, args)
+
+def svn_fs_make_file(*args):
+    """svn_fs_make_file(svn_fs_root_t root, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_make_file, args)
+
+def svn_fs_apply_textdelta(*args):
+    """
+    svn_fs_apply_textdelta(svn_txdelta_window_handler_t contents_p, void contents_baton_p, 
+        svn_fs_root_t root, char path, char base_checksum, 
+        char result_checksum, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_apply_textdelta, args)
+
+def svn_fs_apply_text(*args):
+    """
+    svn_fs_apply_text(svn_stream_t contents_p, svn_fs_root_t root, char path, 
+        char result_checksum, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_apply_text, args)
+
+def svn_fs_contents_changed(*args):
+    """
+    svn_fs_contents_changed(svn_boolean_t changed_p, svn_fs_root_t root1, char path1, 
+        svn_fs_root_t root2, char path2, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_contents_changed, args)
+
+def svn_fs_youngest_rev(*args):
+    """svn_fs_youngest_rev(svn_revnum_t youngest_p, svn_fs_t fs, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_youngest_rev, args)
+
+def svn_fs_deltify_revision(*args):
+    """svn_fs_deltify_revision(svn_fs_t fs, svn_revnum_t revision, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_deltify_revision, args)
+
+def svn_fs_revision_prop(*args):
+    """
+    svn_fs_revision_prop(svn_string_t value_p, svn_fs_t fs, svn_revnum_t rev, 
+        char propname, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_revision_prop, args)
+
+def svn_fs_revision_proplist(*args):
+    """
+    svn_fs_revision_proplist(apr_hash_t table_p, svn_fs_t fs, svn_revnum_t rev, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_revision_proplist, args)
+
+def svn_fs_change_rev_prop(*args):
+    """
+    svn_fs_change_rev_prop(svn_fs_t fs, svn_revnum_t rev, char name, svn_string_t value, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_change_rev_prop, args)
+
+def svn_fs_get_file_delta_stream(*args):
+    """
+    svn_fs_get_file_delta_stream(svn_txdelta_stream_t stream_p, svn_fs_root_t source_root, 
+        char source_path, svn_fs_root_t target_root, 
+        char target_path, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_get_file_delta_stream, args)
+
+def svn_fs_get_uuid(*args):
+    """svn_fs_get_uuid(svn_fs_t fs, char uuid, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_get_uuid, args)
+
+def svn_fs_set_uuid(*args):
+    """svn_fs_set_uuid(svn_fs_t fs, char uuid, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_set_uuid, args)
+
+def svn_fs_lock(*args):
+    """
+    svn_fs_lock(svn_lock_t lock, svn_fs_t fs, char path, char token, 
+        char comment, svn_boolean_t is_dav_comment, 
+        apr_time_t expiration_date, svn_revnum_t current_rev, 
+        svn_boolean_t steal_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_lock, args)
+
+def svn_fs_generate_lock_token(*args):
+    """svn_fs_generate_lock_token(char token, svn_fs_t fs, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_generate_lock_token, args)
+
+def svn_fs_unlock(*args):
+    """
+    svn_fs_unlock(svn_fs_t fs, char path, char token, svn_boolean_t break_lock, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_unlock, args)
+
+def svn_fs_get_lock(*args):
+    """svn_fs_get_lock(svn_lock_t lock, svn_fs_t fs, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_get_lock, args)
+
+def svn_fs_get_locks(*args):
+    """
+    svn_fs_get_locks(svn_fs_t fs, char path, svn_fs_get_locks_callback_t get_locks_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_get_locks, args)
+
+def svn_fs_print_modules(*args):
+    """svn_fs_print_modules(svn_stringbuf_t output, apr_pool_t pool) -> svn_error_t"""
+    return apply(_fs.svn_fs_print_modules, args)
+class svn_fs_t:
+    """Proxy of C svn_fs_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_fs_tPtr(svn_fs_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_t, 'thisown', 0)
+        self.__class__ = svn_fs_t
+_fs.svn_fs_t_swigregister(svn_fs_tPtr)
+
+class svn_fs_access_t:
+    """Proxy of C svn_fs_access_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_access_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_access_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_access_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_access_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_fs_access_tPtr(svn_fs_access_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_access_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_access_t, 'thisown', 0)
+        self.__class__ = svn_fs_access_t
+_fs.svn_fs_access_t_swigregister(svn_fs_access_tPtr)
+
+class svn_fs_id_t:
+    """Proxy of C svn_fs_id_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_id_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_id_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_id_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_id_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_fs_id_tPtr(svn_fs_id_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_id_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_id_t, 'thisown', 0)
+        self.__class__ = svn_fs_id_t
+_fs.svn_fs_id_t_swigregister(svn_fs_id_tPtr)
+
+class svn_fs_txn_t:
+    """Proxy of C svn_fs_txn_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_txn_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_txn_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_txn_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_txn_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_fs_txn_tPtr(svn_fs_txn_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_txn_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_txn_t, 'thisown', 0)
+        self.__class__ = svn_fs_txn_t
+_fs.svn_fs_txn_t_swigregister(svn_fs_txn_tPtr)
+
+class svn_fs_root_t:
+    """Proxy of C svn_fs_root_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_root_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_root_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_root_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_root_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_fs_root_tPtr(svn_fs_root_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_root_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_root_t, 'thisown', 0)
+        self.__class__ = svn_fs_root_t
+_fs.svn_fs_root_t_swigregister(svn_fs_root_tPtr)
+
+class svn_fs_history_t:
+    """Proxy of C svn_fs_history_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_history_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_history_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_history_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_history_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_fs_history_tPtr(svn_fs_history_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_history_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_history_t, 'thisown', 0)
+        self.__class__ = svn_fs_history_t
+_fs.svn_fs_history_t_swigregister(svn_fs_history_tPtr)
+
+
+def svn_fs_invoke_warning_callback(*args):
+    """svn_fs_invoke_warning_callback(svn_fs_warning_callback_t _obj, void baton, svn_error_t err)"""
+    return apply(_fs.svn_fs_invoke_warning_callback, args)
+
+def svn_fs_invoke_get_locks_callback(*args):
+    """
+    svn_fs_invoke_get_locks_callback(svn_fs_get_locks_callback_t _obj, void baton, svn_lock_t lock, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_fs.svn_fs_invoke_get_locks_callback, args)
+class svn_fs_warning_callback_t:
+    """Proxy of C svn_fs_warning_callback_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_warning_callback_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_warning_callback_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_warning_callback_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_warning_callback_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_fs_invoke_warning_callback(self, *args)
+
+
+class svn_fs_warning_callback_tPtr(svn_fs_warning_callback_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_warning_callback_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_warning_callback_t, 'thisown', 0)
+        self.__class__ = svn_fs_warning_callback_t
+_fs.svn_fs_warning_callback_t_swigregister(svn_fs_warning_callback_tPtr)
+
+class svn_fs_get_locks_callback_t:
+    """Proxy of C svn_fs_get_locks_callback_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_fs_get_locks_callback_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_fs_get_locks_callback_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_fs_get_locks_callback_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_fs_get_locks_callback_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_fs_invoke_get_locks_callback(self, *args)
+
+
+class svn_fs_get_locks_callback_tPtr(svn_fs_get_locks_callback_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_fs_get_locks_callback_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_fs_get_locks_callback_t, 'thisown', 0)
+        self.__class__ = svn_fs_get_locks_callback_t
+_fs.svn_fs_get_locks_callback_t_swigregister(svn_fs_get_locks_callback_tPtr)
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libapr-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libapr-1.dll
new file mode 100644
index 0000000..cf77bb1f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libapr-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libapriconv-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libapriconv-1.dll
new file mode 100644
index 0000000..3ec773c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libapriconv-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libaprutil-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libaprutil-1.dll
new file mode 100644
index 0000000..bb85898
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libaprutil-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libeay32.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libeay32.dll
new file mode 100644
index 0000000..0d5a3c5c9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libeay32.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libeay32.dll.manifest b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libeay32.dll.manifest
new file mode 100644
index 0000000..324c707e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libeay32.dll.manifest
@@ -0,0 +1,8 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes'?>

+<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>

+  <dependency>

+    <dependentAssembly>

+      <assemblyIdentity type='win32' name='Microsoft.VC80.CRT' version='8.0.50727.762' processorArchitecture='x86' publicKeyToken='1fc8b3b9a1e18e3b' />

+    </dependentAssembly>

+  </dependency>

+</assembly>

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_client-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_client-1.dll
new file mode 100644
index 0000000..932eadd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_client-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_delta-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_delta-1.dll
new file mode 100644
index 0000000..7560bf91
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_delta-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_diff-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_diff-1.dll
new file mode 100644
index 0000000..1364d28
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_diff-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_fs-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_fs-1.dll
new file mode 100644
index 0000000..5781a54
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_fs-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_ra-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_ra-1.dll
new file mode 100644
index 0000000..f7e7ecaf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_ra-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_repos-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_repos-1.dll
new file mode 100644
index 0000000..75d44e9e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_repos-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_subr-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_subr-1.dll
new file mode 100644
index 0000000..1892842f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_subr-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_swig_py-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_swig_py-1.dll
new file mode 100644
index 0000000..e91fbff9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_swig_py-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_wc-1.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_wc-1.dll
new file mode 100644
index 0000000..8cf2f112f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/libsvn_wc-1.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ra.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ra.py
new file mode 100644
index 0000000..54c9dda
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ra.py
@@ -0,0 +1,2355 @@
+# This file was created automatically by SWIG 1.3.27.
+# Don't modify this file, modify the SWIG interface instead.
+import core
+import delta
+
+import _ra
+
+# This file is compatible with both classic and new-style classes.
+def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
+    if (name == "this"):
+        if isinstance(value, class_type):
+            self.__dict__[name] = value.this
+            if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
+            del value.thisown
+            return
+    method = class_type.__swig_setmethods__.get(name,None)
+    if method: return method(self,value)
+    if (not static) or hasattr(self,name) or (name == "thisown"):
+        self.__dict__[name] = value
+    else:
+        raise AttributeError("You cannot add attributes to %s" % self)
+
+def _swig_setattr(self,class_type,name,value):
+    return _swig_setattr_nondynamic(self,class_type,name,value,0)
+
+def _swig_getattr(self,class_type,name):
+    method = class_type.__swig_getmethods__.get(name,None)
+    if method: return method(self)
+    raise AttributeError,name
+
+
+def svn_ra_version(*args):
+    """svn_ra_version() -> svn_version_t"""
+    return apply(_ra.svn_ra_version, args)
+class svn_ra_reporter3_t:
+    """Proxy of C svn_ra_reporter3_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_reporter3_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_reporter3_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_reporter3_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["set_path"] = _ra.svn_ra_reporter3_t_set_path_set
+    __swig_getmethods__["set_path"] = _ra.svn_ra_reporter3_t_set_path_get
+    __swig_setmethods__["delete_path"] = _ra.svn_ra_reporter3_t_delete_path_set
+    __swig_getmethods__["delete_path"] = _ra.svn_ra_reporter3_t_delete_path_get
+    __swig_setmethods__["link_path"] = _ra.svn_ra_reporter3_t_link_path_set
+    __swig_getmethods__["link_path"] = _ra.svn_ra_reporter3_t_link_path_get
+    __swig_setmethods__["finish_report"] = _ra.svn_ra_reporter3_t_finish_report_set
+    __swig_getmethods__["finish_report"] = _ra.svn_ra_reporter3_t_finish_report_get
+    __swig_setmethods__["abort_report"] = _ra.svn_ra_reporter3_t_abort_report_set
+    __swig_getmethods__["abort_report"] = _ra.svn_ra_reporter3_t_abort_report_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_reporter3_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def set_path(self, *args):
+      return svn_ra_reporter3_invoke_set_path(self, *args)
+
+    def delete_path(self, *args):
+      return svn_ra_reporter3_invoke_delete_path(self, *args)
+
+    def link_path(self, *args):
+      return svn_ra_reporter3_invoke_link_path(self, *args)
+
+    def finish_report(self, *args):
+      return svn_ra_reporter3_invoke_finish_report(self, *args)
+
+    def abort_report(self, *args):
+      return svn_ra_reporter3_invoke_abort_report(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_ra_reporter3_t"""
+        _swig_setattr(self, svn_ra_reporter3_t, 'this', apply(_ra.new_svn_ra_reporter3_t, args))
+        _swig_setattr(self, svn_ra_reporter3_t, 'thisown', 1)
+    def __del__(self, destroy=_ra.delete_svn_ra_reporter3_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_ra_reporter3_tPtr(svn_ra_reporter3_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_reporter3_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_reporter3_t, 'thisown', 0)
+        self.__class__ = svn_ra_reporter3_t
+_ra.svn_ra_reporter3_t_swigregister(svn_ra_reporter3_tPtr)
+
+class svn_ra_reporter2_t:
+    """Proxy of C svn_ra_reporter2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_reporter2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_reporter2_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_reporter2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["set_path"] = _ra.svn_ra_reporter2_t_set_path_set
+    __swig_getmethods__["set_path"] = _ra.svn_ra_reporter2_t_set_path_get
+    __swig_setmethods__["delete_path"] = _ra.svn_ra_reporter2_t_delete_path_set
+    __swig_getmethods__["delete_path"] = _ra.svn_ra_reporter2_t_delete_path_get
+    __swig_setmethods__["link_path"] = _ra.svn_ra_reporter2_t_link_path_set
+    __swig_getmethods__["link_path"] = _ra.svn_ra_reporter2_t_link_path_get
+    __swig_setmethods__["finish_report"] = _ra.svn_ra_reporter2_t_finish_report_set
+    __swig_getmethods__["finish_report"] = _ra.svn_ra_reporter2_t_finish_report_get
+    __swig_setmethods__["abort_report"] = _ra.svn_ra_reporter2_t_abort_report_set
+    __swig_getmethods__["abort_report"] = _ra.svn_ra_reporter2_t_abort_report_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_reporter2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def set_path(self, *args):
+      return svn_ra_reporter2_invoke_set_path(self, *args)
+
+    def delete_path(self, *args):
+      return svn_ra_reporter2_invoke_delete_path(self, *args)
+
+    def link_path(self, *args):
+      return svn_ra_reporter2_invoke_link_path(self, *args)
+
+    def finish_report(self, *args):
+      return svn_ra_reporter2_invoke_finish_report(self, *args)
+
+    def abort_report(self, *args):
+      return svn_ra_reporter2_invoke_abort_report(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_ra_reporter2_t"""
+        _swig_setattr(self, svn_ra_reporter2_t, 'this', apply(_ra.new_svn_ra_reporter2_t, args))
+        _swig_setattr(self, svn_ra_reporter2_t, 'thisown', 1)
+    def __del__(self, destroy=_ra.delete_svn_ra_reporter2_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_ra_reporter2_tPtr(svn_ra_reporter2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_reporter2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_reporter2_t, 'thisown', 0)
+        self.__class__ = svn_ra_reporter2_t
+_ra.svn_ra_reporter2_t_swigregister(svn_ra_reporter2_tPtr)
+
+class svn_ra_reporter_t:
+    """Proxy of C svn_ra_reporter_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_reporter_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_reporter_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_reporter_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["set_path"] = _ra.svn_ra_reporter_t_set_path_set
+    __swig_getmethods__["set_path"] = _ra.svn_ra_reporter_t_set_path_get
+    __swig_setmethods__["delete_path"] = _ra.svn_ra_reporter_t_delete_path_set
+    __swig_getmethods__["delete_path"] = _ra.svn_ra_reporter_t_delete_path_get
+    __swig_setmethods__["link_path"] = _ra.svn_ra_reporter_t_link_path_set
+    __swig_getmethods__["link_path"] = _ra.svn_ra_reporter_t_link_path_get
+    __swig_setmethods__["finish_report"] = _ra.svn_ra_reporter_t_finish_report_set
+    __swig_getmethods__["finish_report"] = _ra.svn_ra_reporter_t_finish_report_get
+    __swig_setmethods__["abort_report"] = _ra.svn_ra_reporter_t_abort_report_set
+    __swig_getmethods__["abort_report"] = _ra.svn_ra_reporter_t_abort_report_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_reporter_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def set_path(self, *args):
+      return svn_ra_reporter_invoke_set_path(self, *args)
+
+    def delete_path(self, *args):
+      return svn_ra_reporter_invoke_delete_path(self, *args)
+
+    def link_path(self, *args):
+      return svn_ra_reporter_invoke_link_path(self, *args)
+
+    def finish_report(self, *args):
+      return svn_ra_reporter_invoke_finish_report(self, *args)
+
+    def abort_report(self, *args):
+      return svn_ra_reporter_invoke_abort_report(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_ra_reporter_t"""
+        _swig_setattr(self, svn_ra_reporter_t, 'this', apply(_ra.new_svn_ra_reporter_t, args))
+        _swig_setattr(self, svn_ra_reporter_t, 'thisown', 1)
+    def __del__(self, destroy=_ra.delete_svn_ra_reporter_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_ra_reporter_tPtr(svn_ra_reporter_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_reporter_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_reporter_t, 'thisown', 0)
+        self.__class__ = svn_ra_reporter_t
+_ra.svn_ra_reporter_t_swigregister(svn_ra_reporter_tPtr)
+
+class svn_ra_callbacks2_t:
+    """Proxy of C svn_ra_callbacks2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_callbacks2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_callbacks2_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_callbacks2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["open_tmp_file"] = _ra.svn_ra_callbacks2_t_open_tmp_file_set
+    __swig_getmethods__["open_tmp_file"] = _ra.svn_ra_callbacks2_t_open_tmp_file_get
+    __swig_setmethods__["auth_baton"] = _ra.svn_ra_callbacks2_t_auth_baton_set
+    __swig_getmethods__["auth_baton"] = _ra.svn_ra_callbacks2_t_auth_baton_get
+    __swig_setmethods__["get_wc_prop"] = _ra.svn_ra_callbacks2_t_get_wc_prop_set
+    __swig_getmethods__["get_wc_prop"] = _ra.svn_ra_callbacks2_t_get_wc_prop_get
+    __swig_setmethods__["set_wc_prop"] = _ra.svn_ra_callbacks2_t_set_wc_prop_set
+    __swig_getmethods__["set_wc_prop"] = _ra.svn_ra_callbacks2_t_set_wc_prop_get
+    __swig_setmethods__["push_wc_prop"] = _ra.svn_ra_callbacks2_t_push_wc_prop_set
+    __swig_getmethods__["push_wc_prop"] = _ra.svn_ra_callbacks2_t_push_wc_prop_get
+    __swig_setmethods__["invalidate_wc_props"] = _ra.svn_ra_callbacks2_t_invalidate_wc_props_set
+    __swig_getmethods__["invalidate_wc_props"] = _ra.svn_ra_callbacks2_t_invalidate_wc_props_get
+    __swig_setmethods__["progress_func"] = _ra.svn_ra_callbacks2_t_progress_func_set
+    __swig_getmethods__["progress_func"] = _ra.svn_ra_callbacks2_t_progress_func_get
+    __swig_setmethods__["progress_baton"] = _ra.svn_ra_callbacks2_t_progress_baton_set
+    __swig_getmethods__["progress_baton"] = _ra.svn_ra_callbacks2_t_progress_baton_get
+    __swig_setmethods__["cancel_func"] = _ra.svn_ra_callbacks2_t_cancel_func_set
+    __swig_getmethods__["cancel_func"] = _ra.svn_ra_callbacks2_t_cancel_func_get
+    __swig_setmethods__["get_client_string"] = _ra.svn_ra_callbacks2_t_get_client_string_set
+    __swig_getmethods__["get_client_string"] = _ra.svn_ra_callbacks2_t_get_client_string_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_callbacks2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def open_tmp_file(self, *args):
+      return svn_ra_callbacks2_invoke_open_tmp_file(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_ra_callbacks2_t"""
+        _swig_setattr(self, svn_ra_callbacks2_t, 'this', apply(_ra.new_svn_ra_callbacks2_t, args))
+        _swig_setattr(self, svn_ra_callbacks2_t, 'thisown', 1)
+    def __del__(self, destroy=_ra.delete_svn_ra_callbacks2_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_ra_callbacks2_tPtr(svn_ra_callbacks2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_callbacks2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_callbacks2_t, 'thisown', 0)
+        self.__class__ = svn_ra_callbacks2_t
+_ra.svn_ra_callbacks2_t_swigregister(svn_ra_callbacks2_tPtr)
+
+class svn_ra_callbacks_t:
+    """Proxy of C svn_ra_callbacks_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_callbacks_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_callbacks_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_callbacks_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["open_tmp_file"] = _ra.svn_ra_callbacks_t_open_tmp_file_set
+    __swig_getmethods__["open_tmp_file"] = _ra.svn_ra_callbacks_t_open_tmp_file_get
+    __swig_setmethods__["auth_baton"] = _ra.svn_ra_callbacks_t_auth_baton_set
+    __swig_getmethods__["auth_baton"] = _ra.svn_ra_callbacks_t_auth_baton_get
+    __swig_setmethods__["get_wc_prop"] = _ra.svn_ra_callbacks_t_get_wc_prop_set
+    __swig_getmethods__["get_wc_prop"] = _ra.svn_ra_callbacks_t_get_wc_prop_get
+    __swig_setmethods__["set_wc_prop"] = _ra.svn_ra_callbacks_t_set_wc_prop_set
+    __swig_getmethods__["set_wc_prop"] = _ra.svn_ra_callbacks_t_set_wc_prop_get
+    __swig_setmethods__["push_wc_prop"] = _ra.svn_ra_callbacks_t_push_wc_prop_set
+    __swig_getmethods__["push_wc_prop"] = _ra.svn_ra_callbacks_t_push_wc_prop_get
+    __swig_setmethods__["invalidate_wc_props"] = _ra.svn_ra_callbacks_t_invalidate_wc_props_set
+    __swig_getmethods__["invalidate_wc_props"] = _ra.svn_ra_callbacks_t_invalidate_wc_props_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_callbacks_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def open_tmp_file(self, *args):
+      return svn_ra_callbacks_invoke_open_tmp_file(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_ra_callbacks_t"""
+        _swig_setattr(self, svn_ra_callbacks_t, 'this', apply(_ra.new_svn_ra_callbacks_t, args))
+        _swig_setattr(self, svn_ra_callbacks_t, 'thisown', 1)
+    def __del__(self, destroy=_ra.delete_svn_ra_callbacks_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_ra_callbacks_tPtr(svn_ra_callbacks_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_callbacks_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_callbacks_t, 'thisown', 0)
+        self.__class__ = svn_ra_callbacks_t
+_ra.svn_ra_callbacks_t_swigregister(svn_ra_callbacks_tPtr)
+
+
+def svn_ra_initialize(*args):
+    """svn_ra_initialize(apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_initialize, args)
+
+def svn_ra_create_callbacks(*args):
+    """svn_ra_create_callbacks(svn_ra_callbacks2_t callbacks, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_create_callbacks, args)
+
+def svn_ra_open3(*args):
+    """
+    svn_ra_open3(svn_ra_session_t session_p, char repos_URL, char uuid, 
+        svn_ra_callbacks2_t callbacks, apr_hash_t config, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_open3, args)
+
+def svn_ra_open2(*args):
+    """
+    svn_ra_open2(svn_ra_session_t session_p, char repos_URL, svn_ra_callbacks2_t callbacks, 
+        apr_hash_t config, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_open2, args)
+
+def svn_ra_open(*args):
+    """
+    svn_ra_open(svn_ra_session_t session_p, char repos_URL, svn_ra_callbacks_t callbacks, 
+        void callback_baton, apr_hash_t config, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_open, args)
+
+def svn_ra_reparent(*args):
+    """svn_ra_reparent(svn_ra_session_t ra_session, char url, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_reparent, args)
+
+def svn_ra_get_session_url(*args):
+    """svn_ra_get_session_url(svn_ra_session_t ra_session, char url, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_get_session_url, args)
+
+def svn_ra_get_latest_revnum(*args):
+    """
+    svn_ra_get_latest_revnum(svn_ra_session_t session, svn_revnum_t latest_revnum, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_latest_revnum, args)
+
+def svn_ra_get_dated_revision(*args):
+    """
+    svn_ra_get_dated_revision(svn_ra_session_t session, svn_revnum_t revision, apr_time_t tm, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_dated_revision, args)
+
+def svn_ra_change_rev_prop(*args):
+    """
+    svn_ra_change_rev_prop(svn_ra_session_t session, svn_revnum_t rev, char name, 
+        svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_change_rev_prop, args)
+
+def svn_ra_rev_proplist(*args):
+    """
+    svn_ra_rev_proplist(svn_ra_session_t session, svn_revnum_t rev, apr_hash_t props, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_rev_proplist, args)
+
+def svn_ra_rev_prop(*args):
+    """
+    svn_ra_rev_prop(svn_ra_session_t session, svn_revnum_t rev, char name, 
+        svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_rev_prop, args)
+
+def svn_ra_get_commit_editor3(*args):
+    """
+    svn_ra_get_commit_editor3(svn_ra_session_t session, svn_delta_editor_t editor, 
+        void edit_baton, apr_hash_t revprop_table, 
+        svn_commit_callback2_t callback, apr_hash_t lock_tokens, 
+        svn_boolean_t keep_locks, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_commit_editor3, args)
+
+def svn_ra_get_commit_editor2(*args):
+    """
+    svn_ra_get_commit_editor2(svn_ra_session_t session, svn_delta_editor_t editor, 
+        void edit_baton, char log_msg, svn_commit_callback2_t callback, 
+        apr_hash_t lock_tokens, 
+        svn_boolean_t keep_locks, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_commit_editor2, args)
+
+def svn_ra_get_commit_editor(*args):
+    """
+    svn_ra_get_commit_editor(svn_ra_session_t session, svn_delta_editor_t editor, 
+        void edit_baton, char log_msg, svn_commit_callback_t callback, 
+        apr_hash_t lock_tokens, svn_boolean_t keep_locks, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_commit_editor, args)
+
+def svn_ra_get_file(*args):
+    """
+    svn_ra_get_file(svn_ra_session_t session, char path, svn_revnum_t revision, 
+        svn_stream_t stream, svn_revnum_t fetched_rev, 
+        apr_hash_t props, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_file, args)
+
+def svn_ra_get_dir2(*args):
+    """
+    svn_ra_get_dir2(svn_ra_session_t session, apr_hash_t dirents, svn_revnum_t fetched_rev, 
+        apr_hash_t props, char path, 
+        svn_revnum_t revision, apr_uint32_t dirent_fields, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_dir2, args)
+
+def svn_ra_get_dir(*args):
+    """
+    svn_ra_get_dir(svn_ra_session_t session, char path, svn_revnum_t revision, 
+        apr_hash_t dirents, svn_revnum_t fetched_rev, 
+        apr_hash_t props, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_dir, args)
+
+def svn_ra_get_mergeinfo(*args):
+    """
+    svn_ra_get_mergeinfo(svn_ra_session_t session, svn_mergeinfo_catalog_t catalog, 
+        apr_array_header_t paths, svn_revnum_t revision, 
+        svn_mergeinfo_inheritance_t inherit, 
+        svn_boolean_t include_descendants, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_mergeinfo, args)
+
+def svn_ra_do_update2(*args):
+    """
+    svn_ra_do_update2(svn_ra_session_t session, svn_ra_reporter3_t reporter, 
+        void report_baton, svn_revnum_t revision_to_update_to, 
+        char update_target, svn_depth_t depth, 
+        svn_boolean_t send_copyfrom_args, svn_delta_editor_t update_editor, 
+        void update_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_update2, args)
+
+def svn_ra_do_update(*args):
+    """
+    svn_ra_do_update(svn_ra_session_t session, svn_ra_reporter2_t reporter, 
+        void report_baton, svn_revnum_t revision_to_update_to, 
+        char update_target, svn_boolean_t recurse, 
+        svn_delta_editor_t update_editor, 
+        void update_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_update, args)
+
+def svn_ra_do_switch2(*args):
+    """
+    svn_ra_do_switch2(svn_ra_session_t session, svn_ra_reporter3_t reporter, 
+        void report_baton, svn_revnum_t revision_to_switch_to, 
+        char switch_target, svn_depth_t depth, 
+        char switch_url, svn_delta_editor_t switch_editor, 
+        void switch_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_switch2, args)
+
+def svn_ra_do_switch(*args):
+    """
+    svn_ra_do_switch(svn_ra_session_t session, svn_ra_reporter2_t reporter, 
+        void report_baton, svn_revnum_t revision_to_switch_to, 
+        char switch_target, svn_boolean_t recurse, 
+        char switch_url, svn_delta_editor_t switch_editor, 
+        void switch_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_switch, args)
+
+def svn_ra_do_status2(*args):
+    """
+    svn_ra_do_status2(svn_ra_session_t session, svn_ra_reporter3_t reporter, 
+        void report_baton, char status_target, svn_revnum_t revision, 
+        svn_depth_t depth, svn_delta_editor_t status_editor, 
+        void status_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_status2, args)
+
+def svn_ra_do_status(*args):
+    """
+    svn_ra_do_status(svn_ra_session_t session, svn_ra_reporter2_t reporter, 
+        void report_baton, char status_target, svn_revnum_t revision, 
+        svn_boolean_t recurse, svn_delta_editor_t status_editor, 
+        void status_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_status, args)
+
+def svn_ra_do_diff3(*args):
+    """
+    svn_ra_do_diff3(svn_ra_session_t session, svn_ra_reporter3_t reporter, 
+        void report_baton, svn_revnum_t revision, 
+        char diff_target, svn_depth_t depth, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t text_deltas, 
+        char versus_url, svn_delta_editor_t diff_editor, 
+        void diff_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_diff3, args)
+
+def svn_ra_do_diff2(*args):
+    """
+    svn_ra_do_diff2(svn_ra_session_t session, svn_ra_reporter2_t reporter, 
+        void report_baton, svn_revnum_t revision, 
+        char diff_target, svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t text_deltas, 
+        char versus_url, svn_delta_editor_t diff_editor, 
+        void diff_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_diff2, args)
+
+def svn_ra_do_diff(*args):
+    """
+    svn_ra_do_diff(svn_ra_session_t session, svn_ra_reporter2_t reporter, 
+        void report_baton, svn_revnum_t revision, 
+        char diff_target, svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        char versus_url, 
+        svn_delta_editor_t diff_editor, void diff_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_do_diff, args)
+
+def svn_ra_get_log2(*args):
+    """
+    svn_ra_get_log2(svn_ra_session_t session, apr_array_header_t paths, 
+        svn_revnum_t start, svn_revnum_t end, int limit, 
+        svn_boolean_t discover_changed_paths, svn_boolean_t strict_node_history, 
+        svn_boolean_t include_merged_revisions, 
+        apr_array_header_t revprops, 
+        svn_log_entry_receiver_t receiver, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_log2, args)
+
+def svn_ra_get_log(*args):
+    """
+    svn_ra_get_log(svn_ra_session_t session, apr_array_header_t paths, 
+        svn_revnum_t start, svn_revnum_t end, int limit, 
+        svn_boolean_t discover_changed_paths, svn_boolean_t strict_node_history, 
+        svn_log_message_receiver_t receiver, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_log, args)
+
+def svn_ra_check_path(*args):
+    """
+    svn_ra_check_path(svn_ra_session_t session, char path, svn_revnum_t revision, 
+        svn_node_kind_t kind, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_check_path, args)
+
+def svn_ra_stat(*args):
+    """
+    svn_ra_stat(svn_ra_session_t session, char path, svn_revnum_t revision, 
+        svn_dirent_t dirent, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_stat, args)
+
+def svn_ra_get_uuid2(*args):
+    """svn_ra_get_uuid2(svn_ra_session_t session, char uuid, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_get_uuid2, args)
+
+def svn_ra_get_uuid(*args):
+    """svn_ra_get_uuid(svn_ra_session_t session, char uuid, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_get_uuid, args)
+
+def svn_ra_get_repos_root2(*args):
+    """svn_ra_get_repos_root2(svn_ra_session_t session, char url, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_get_repos_root2, args)
+
+def svn_ra_get_repos_root(*args):
+    """svn_ra_get_repos_root(svn_ra_session_t session, char url, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_get_repos_root, args)
+
+def svn_ra_get_locations(*args):
+    """
+    svn_ra_get_locations(svn_ra_session_t session, apr_hash_t locations, char path, 
+        svn_revnum_t peg_revision, apr_array_header_t location_revisions, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_locations, args)
+
+def svn_ra_get_location_segments(*args):
+    """
+    svn_ra_get_location_segments(svn_ra_session_t session, char path, svn_revnum_t peg_revision, 
+        svn_revnum_t start_rev, svn_revnum_t end_rev, 
+        svn_location_segment_receiver_t receiver, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_location_segments, args)
+
+def svn_ra_get_file_revs2(*args):
+    """
+    svn_ra_get_file_revs2(svn_ra_session_t session, char path, svn_revnum_t start, 
+        svn_revnum_t end, svn_boolean_t include_merged_revisions, 
+        svn_file_rev_handler_t handler, 
+        void handler_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_file_revs2, args)
+
+def svn_ra_get_file_revs(*args):
+    """
+    svn_ra_get_file_revs(svn_ra_session_t session, char path, svn_revnum_t start, 
+        svn_revnum_t end, svn_ra_file_rev_handler_t handler, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_file_revs, args)
+
+def svn_ra_lock(*args):
+    """
+    svn_ra_lock(svn_ra_session_t session, apr_hash_t path_revs, char comment, 
+        svn_boolean_t steal_lock, svn_ra_lock_callback_t lock_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_lock, args)
+
+def svn_ra_unlock(*args):
+    """
+    svn_ra_unlock(svn_ra_session_t session, apr_hash_t path_tokens, svn_boolean_t break_lock, 
+        svn_ra_lock_callback_t lock_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_unlock, args)
+
+def svn_ra_get_lock(*args):
+    """
+    svn_ra_get_lock(svn_ra_session_t session, svn_lock_t lock, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_lock, args)
+
+def svn_ra_get_locks(*args):
+    """
+    svn_ra_get_locks(svn_ra_session_t session, apr_hash_t locks, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_get_locks, args)
+
+def svn_ra_replay_range(*args):
+    """
+    svn_ra_replay_range(svn_ra_session_t session, svn_revnum_t start_revision, 
+        svn_revnum_t end_revision, svn_revnum_t low_water_mark, 
+        svn_boolean_t send_deltas, svn_ra_replay_revstart_callback_t revstart_func, 
+        svn_ra_replay_revfinish_callback_t revfinish_func, 
+        void replay_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_replay_range, args)
+
+def svn_ra_replay(*args):
+    """
+    svn_ra_replay(svn_ra_session_t session, svn_revnum_t revision, svn_revnum_t low_water_mark, 
+        svn_boolean_t send_deltas, 
+        svn_delta_editor_t editor, void edit_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_replay, args)
+
+def svn_ra_has_capability(*args):
+    """
+    svn_ra_has_capability(svn_ra_session_t session, svn_boolean_t has, char capability, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_has_capability, args)
+SVN_RA_CAPABILITY_DEPTH = _ra.SVN_RA_CAPABILITY_DEPTH
+SVN_RA_CAPABILITY_MERGEINFO = _ra.SVN_RA_CAPABILITY_MERGEINFO
+SVN_RA_CAPABILITY_LOG_REVPROPS = _ra.SVN_RA_CAPABILITY_LOG_REVPROPS
+SVN_RA_CAPABILITY_PARTIAL_REPLAY = _ra.SVN_RA_CAPABILITY_PARTIAL_REPLAY
+SVN_RA_CAPABILITY_COMMIT_REVPROPS = _ra.SVN_RA_CAPABILITY_COMMIT_REVPROPS
+
+def svn_ra_print_modules(*args):
+    """svn_ra_print_modules(svn_stringbuf_t output, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_print_modules, args)
+
+def svn_ra_print_ra_libraries(*args):
+    """svn_ra_print_ra_libraries(svn_stringbuf_t descriptions, void ra_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_print_ra_libraries, args)
+class svn_ra_plugin_t:
+    """Proxy of C svn_ra_plugin_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_plugin_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_plugin_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_plugin_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["name"] = _ra.svn_ra_plugin_t_name_set
+    __swig_getmethods__["name"] = _ra.svn_ra_plugin_t_name_get
+    __swig_setmethods__["description"] = _ra.svn_ra_plugin_t_description_set
+    __swig_getmethods__["description"] = _ra.svn_ra_plugin_t_description_get
+    __swig_setmethods__["open"] = _ra.svn_ra_plugin_t_open_set
+    __swig_getmethods__["open"] = _ra.svn_ra_plugin_t_open_get
+    __swig_setmethods__["get_latest_revnum"] = _ra.svn_ra_plugin_t_get_latest_revnum_set
+    __swig_getmethods__["get_latest_revnum"] = _ra.svn_ra_plugin_t_get_latest_revnum_get
+    __swig_setmethods__["get_dated_revision"] = _ra.svn_ra_plugin_t_get_dated_revision_set
+    __swig_getmethods__["get_dated_revision"] = _ra.svn_ra_plugin_t_get_dated_revision_get
+    __swig_setmethods__["change_rev_prop"] = _ra.svn_ra_plugin_t_change_rev_prop_set
+    __swig_getmethods__["change_rev_prop"] = _ra.svn_ra_plugin_t_change_rev_prop_get
+    __swig_setmethods__["rev_proplist"] = _ra.svn_ra_plugin_t_rev_proplist_set
+    __swig_getmethods__["rev_proplist"] = _ra.svn_ra_plugin_t_rev_proplist_get
+    __swig_setmethods__["rev_prop"] = _ra.svn_ra_plugin_t_rev_prop_set
+    __swig_getmethods__["rev_prop"] = _ra.svn_ra_plugin_t_rev_prop_get
+    __swig_setmethods__["get_commit_editor"] = _ra.svn_ra_plugin_t_get_commit_editor_set
+    __swig_getmethods__["get_commit_editor"] = _ra.svn_ra_plugin_t_get_commit_editor_get
+    __swig_setmethods__["get_file"] = _ra.svn_ra_plugin_t_get_file_set
+    __swig_getmethods__["get_file"] = _ra.svn_ra_plugin_t_get_file_get
+    __swig_setmethods__["get_dir"] = _ra.svn_ra_plugin_t_get_dir_set
+    __swig_getmethods__["get_dir"] = _ra.svn_ra_plugin_t_get_dir_get
+    __swig_setmethods__["do_update"] = _ra.svn_ra_plugin_t_do_update_set
+    __swig_getmethods__["do_update"] = _ra.svn_ra_plugin_t_do_update_get
+    __swig_setmethods__["do_switch"] = _ra.svn_ra_plugin_t_do_switch_set
+    __swig_getmethods__["do_switch"] = _ra.svn_ra_plugin_t_do_switch_get
+    __swig_setmethods__["do_status"] = _ra.svn_ra_plugin_t_do_status_set
+    __swig_getmethods__["do_status"] = _ra.svn_ra_plugin_t_do_status_get
+    __swig_setmethods__["do_diff"] = _ra.svn_ra_plugin_t_do_diff_set
+    __swig_getmethods__["do_diff"] = _ra.svn_ra_plugin_t_do_diff_get
+    __swig_setmethods__["get_log"] = _ra.svn_ra_plugin_t_get_log_set
+    __swig_getmethods__["get_log"] = _ra.svn_ra_plugin_t_get_log_get
+    __swig_setmethods__["check_path"] = _ra.svn_ra_plugin_t_check_path_set
+    __swig_getmethods__["check_path"] = _ra.svn_ra_plugin_t_check_path_get
+    __swig_setmethods__["get_uuid"] = _ra.svn_ra_plugin_t_get_uuid_set
+    __swig_getmethods__["get_uuid"] = _ra.svn_ra_plugin_t_get_uuid_get
+    __swig_setmethods__["get_repos_root"] = _ra.svn_ra_plugin_t_get_repos_root_set
+    __swig_getmethods__["get_repos_root"] = _ra.svn_ra_plugin_t_get_repos_root_get
+    __swig_setmethods__["get_locations"] = _ra.svn_ra_plugin_t_get_locations_set
+    __swig_getmethods__["get_locations"] = _ra.svn_ra_plugin_t_get_locations_get
+    __swig_setmethods__["get_file_revs"] = _ra.svn_ra_plugin_t_get_file_revs_set
+    __swig_getmethods__["get_file_revs"] = _ra.svn_ra_plugin_t_get_file_revs_get
+    __swig_setmethods__["get_version"] = _ra.svn_ra_plugin_t_get_version_set
+    __swig_getmethods__["get_version"] = _ra.svn_ra_plugin_t_get_version_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_plugin_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def open(self, *args):
+      return svn_ra_plugin_invoke_open(self, *args)
+
+    def get_latest_revnum(self, *args):
+      return svn_ra_plugin_invoke_get_latest_revnum(self, *args)
+
+    def get_dated_revision(self, *args):
+      return svn_ra_plugin_invoke_get_dated_revision(self, *args)
+
+    def change_rev_prop(self, *args):
+      return svn_ra_plugin_invoke_change_rev_prop(self, *args)
+
+    def rev_proplist(self, *args):
+      return svn_ra_plugin_invoke_rev_proplist(self, *args)
+
+    def rev_prop(self, *args):
+      return svn_ra_plugin_invoke_rev_prop(self, *args)
+
+    def get_commit_editor(self, *args):
+      return svn_ra_plugin_invoke_get_commit_editor(self, *args)
+
+    def get_file(self, *args):
+      return svn_ra_plugin_invoke_get_file(self, *args)
+
+    def get_dir(self, *args):
+      return svn_ra_plugin_invoke_get_dir(self, *args)
+
+    def do_update(self, *args):
+      return svn_ra_plugin_invoke_do_update(self, *args)
+
+    def do_switch(self, *args):
+      return svn_ra_plugin_invoke_do_switch(self, *args)
+
+    def do_status(self, *args):
+      return svn_ra_plugin_invoke_do_status(self, *args)
+
+    def do_diff(self, *args):
+      return svn_ra_plugin_invoke_do_diff(self, *args)
+
+    def get_log(self, *args):
+      return svn_ra_plugin_invoke_get_log(self, *args)
+
+    def check_path(self, *args):
+      return svn_ra_plugin_invoke_check_path(self, *args)
+
+    def get_uuid(self, *args):
+      return svn_ra_plugin_invoke_get_uuid(self, *args)
+
+    def get_repos_root(self, *args):
+      return svn_ra_plugin_invoke_get_repos_root(self, *args)
+
+    def get_locations(self, *args):
+      return svn_ra_plugin_invoke_get_locations(self, *args)
+
+    def get_file_revs(self, *args):
+      return svn_ra_plugin_invoke_get_file_revs(self, *args)
+
+    def get_version(self, *args):
+      return svn_ra_plugin_invoke_get_version(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_ra_plugin_t"""
+        _swig_setattr(self, svn_ra_plugin_t, 'this', apply(_ra.new_svn_ra_plugin_t, args))
+        _swig_setattr(self, svn_ra_plugin_t, 'thisown', 1)
+    def __del__(self, destroy=_ra.delete_svn_ra_plugin_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_ra_plugin_tPtr(svn_ra_plugin_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_plugin_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_plugin_t, 'thisown', 0)
+        self.__class__ = svn_ra_plugin_t
+_ra.svn_ra_plugin_t_swigregister(svn_ra_plugin_tPtr)
+
+SVN_RA_ABI_VERSION = _ra.SVN_RA_ABI_VERSION
+
+def svn_ra_init_ra_libs(*args):
+    """svn_ra_init_ra_libs(void ra_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_init_ra_libs, args)
+
+def svn_ra_get_ra_library(*args):
+    """svn_ra_get_ra_library(svn_ra_plugin_t library, void ra_baton, char url, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_get_ra_library, args)
+class svn_ra_session_t:
+    """Proxy of C svn_ra_session_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_session_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_session_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_session_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_session_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_ra_session_tPtr(svn_ra_session_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_session_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_session_t, 'thisown', 0)
+        self.__class__ = svn_ra_session_t
+_ra.svn_ra_session_t_swigregister(svn_ra_session_tPtr)
+
+
+def svn_ra_reporter3_invoke_set_path(*args):
+    """
+    svn_ra_reporter3_invoke_set_path(svn_ra_reporter3_t _obj, void report_baton, char path, 
+        svn_revnum_t revision, svn_depth_t depth, 
+        svn_boolean_t start_empty, char lock_token, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter3_invoke_set_path, args)
+
+def svn_ra_reporter3_invoke_delete_path(*args):
+    """
+    svn_ra_reporter3_invoke_delete_path(svn_ra_reporter3_t _obj, void report_baton, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter3_invoke_delete_path, args)
+
+def svn_ra_reporter3_invoke_link_path(*args):
+    """
+    svn_ra_reporter3_invoke_link_path(svn_ra_reporter3_t _obj, void report_baton, char path, 
+        char url, svn_revnum_t revision, svn_depth_t depth, 
+        svn_boolean_t start_empty, char lock_token, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter3_invoke_link_path, args)
+
+def svn_ra_reporter3_invoke_finish_report(*args):
+    """svn_ra_reporter3_invoke_finish_report(svn_ra_reporter3_t _obj, void report_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_reporter3_invoke_finish_report, args)
+
+def svn_ra_reporter3_invoke_abort_report(*args):
+    """svn_ra_reporter3_invoke_abort_report(svn_ra_reporter3_t _obj, void report_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_reporter3_invoke_abort_report, args)
+
+def svn_ra_reporter2_invoke_set_path(*args):
+    """
+    svn_ra_reporter2_invoke_set_path(svn_ra_reporter2_t _obj, void report_baton, char path, 
+        svn_revnum_t revision, svn_boolean_t start_empty, 
+        char lock_token, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter2_invoke_set_path, args)
+
+def svn_ra_reporter2_invoke_delete_path(*args):
+    """
+    svn_ra_reporter2_invoke_delete_path(svn_ra_reporter2_t _obj, void report_baton, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter2_invoke_delete_path, args)
+
+def svn_ra_reporter2_invoke_link_path(*args):
+    """
+    svn_ra_reporter2_invoke_link_path(svn_ra_reporter2_t _obj, void report_baton, char path, 
+        char url, svn_revnum_t revision, svn_boolean_t start_empty, 
+        char lock_token, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter2_invoke_link_path, args)
+
+def svn_ra_reporter2_invoke_finish_report(*args):
+    """svn_ra_reporter2_invoke_finish_report(svn_ra_reporter2_t _obj, void report_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_reporter2_invoke_finish_report, args)
+
+def svn_ra_reporter2_invoke_abort_report(*args):
+    """svn_ra_reporter2_invoke_abort_report(svn_ra_reporter2_t _obj, void report_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_reporter2_invoke_abort_report, args)
+
+def svn_ra_reporter_invoke_set_path(*args):
+    """
+    svn_ra_reporter_invoke_set_path(svn_ra_reporter_t _obj, void report_baton, char path, 
+        svn_revnum_t revision, svn_boolean_t start_empty, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter_invoke_set_path, args)
+
+def svn_ra_reporter_invoke_delete_path(*args):
+    """
+    svn_ra_reporter_invoke_delete_path(svn_ra_reporter_t _obj, void report_baton, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter_invoke_delete_path, args)
+
+def svn_ra_reporter_invoke_link_path(*args):
+    """
+    svn_ra_reporter_invoke_link_path(svn_ra_reporter_t _obj, void report_baton, char path, 
+        char url, svn_revnum_t revision, svn_boolean_t start_empty, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_reporter_invoke_link_path, args)
+
+def svn_ra_reporter_invoke_finish_report(*args):
+    """svn_ra_reporter_invoke_finish_report(svn_ra_reporter_t _obj, void report_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_reporter_invoke_finish_report, args)
+
+def svn_ra_reporter_invoke_abort_report(*args):
+    """svn_ra_reporter_invoke_abort_report(svn_ra_reporter_t _obj, void report_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_ra.svn_ra_reporter_invoke_abort_report, args)
+
+def svn_ra_callbacks2_invoke_open_tmp_file(*args):
+    """
+    svn_ra_callbacks2_invoke_open_tmp_file(svn_ra_callbacks2_t _obj, apr_file_t fp, void callback_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_callbacks2_invoke_open_tmp_file, args)
+
+def svn_ra_callbacks_invoke_open_tmp_file(*args):
+    """
+    svn_ra_callbacks_invoke_open_tmp_file(svn_ra_callbacks_t _obj, apr_file_t fp, void callback_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_callbacks_invoke_open_tmp_file, args)
+
+def svn_ra_plugin_invoke_open(*args):
+    """
+    svn_ra_plugin_invoke_open(svn_ra_plugin_t _obj, void session_baton, char repos_URL, 
+        svn_ra_callbacks_t callbacks, void callback_baton, 
+        apr_hash_t config, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_open, args)
+
+def svn_ra_plugin_invoke_get_latest_revnum(*args):
+    """
+    svn_ra_plugin_invoke_get_latest_revnum(svn_ra_plugin_t _obj, void session_baton, svn_revnum_t latest_revnum, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_latest_revnum, args)
+
+def svn_ra_plugin_invoke_get_dated_revision(*args):
+    """
+    svn_ra_plugin_invoke_get_dated_revision(svn_ra_plugin_t _obj, void session_baton, svn_revnum_t revision, 
+        apr_time_t tm, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_dated_revision, args)
+
+def svn_ra_plugin_invoke_change_rev_prop(*args):
+    """
+    svn_ra_plugin_invoke_change_rev_prop(svn_ra_plugin_t _obj, void session_baton, svn_revnum_t rev, 
+        char name, svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_change_rev_prop, args)
+
+def svn_ra_plugin_invoke_rev_proplist(*args):
+    """
+    svn_ra_plugin_invoke_rev_proplist(svn_ra_plugin_t _obj, void session_baton, svn_revnum_t rev, 
+        apr_hash_t props, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_rev_proplist, args)
+
+def svn_ra_plugin_invoke_rev_prop(*args):
+    """
+    svn_ra_plugin_invoke_rev_prop(svn_ra_plugin_t _obj, void session_baton, svn_revnum_t rev, 
+        char name, svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_rev_prop, args)
+
+def svn_ra_plugin_invoke_get_commit_editor(*args):
+    """
+    svn_ra_plugin_invoke_get_commit_editor(svn_ra_plugin_t _obj, void session_baton, svn_delta_editor_t editor, 
+        void edit_baton, char log_msg, 
+        svn_commit_callback_t callback, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_commit_editor, args)
+
+def svn_ra_plugin_invoke_get_file(*args):
+    """
+    svn_ra_plugin_invoke_get_file(svn_ra_plugin_t _obj, void session_baton, char path, 
+        svn_revnum_t revision, svn_stream_t stream, 
+        svn_revnum_t fetched_rev, apr_hash_t props, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_file, args)
+
+def svn_ra_plugin_invoke_get_dir(*args):
+    """
+    svn_ra_plugin_invoke_get_dir(svn_ra_plugin_t _obj, void session_baton, char path, 
+        svn_revnum_t revision, apr_hash_t dirents, 
+        svn_revnum_t fetched_rev, apr_hash_t props, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_dir, args)
+
+def svn_ra_plugin_invoke_do_update(*args):
+    """
+    svn_ra_plugin_invoke_do_update(svn_ra_plugin_t _obj, void session_baton, svn_ra_reporter_t reporter, 
+        void report_baton, svn_revnum_t revision_to_update_to, 
+        char update_target, 
+        svn_boolean_t recurse, svn_delta_editor_t update_editor, 
+        void update_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_do_update, args)
+
+def svn_ra_plugin_invoke_do_switch(*args):
+    """
+    svn_ra_plugin_invoke_do_switch(svn_ra_plugin_t _obj, void session_baton, svn_ra_reporter_t reporter, 
+        void report_baton, svn_revnum_t revision_to_switch_to, 
+        char switch_target, 
+        svn_boolean_t recurse, char switch_url, svn_delta_editor_t switch_editor, 
+        void switch_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_do_switch, args)
+
+def svn_ra_plugin_invoke_do_status(*args):
+    """
+    svn_ra_plugin_invoke_do_status(svn_ra_plugin_t _obj, void session_baton, svn_ra_reporter_t reporter, 
+        void report_baton, char status_target, 
+        svn_revnum_t revision, svn_boolean_t recurse, 
+        svn_delta_editor_t status_editor, 
+        void status_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_do_status, args)
+
+def svn_ra_plugin_invoke_do_diff(*args):
+    """
+    svn_ra_plugin_invoke_do_diff(svn_ra_plugin_t _obj, void session_baton, svn_ra_reporter_t reporter, 
+        void report_baton, svn_revnum_t revision, 
+        char diff_target, svn_boolean_t recurse, 
+        svn_boolean_t ignore_ancestry, char versus_url, 
+        svn_delta_editor_t diff_editor, 
+        void diff_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_do_diff, args)
+
+def svn_ra_plugin_invoke_get_log(*args):
+    """
+    svn_ra_plugin_invoke_get_log(svn_ra_plugin_t _obj, void session_baton, apr_array_header_t paths, 
+        svn_revnum_t start, svn_revnum_t end, 
+        svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, svn_log_message_receiver_t receiver, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_log, args)
+
+def svn_ra_plugin_invoke_check_path(*args):
+    """
+    svn_ra_plugin_invoke_check_path(svn_ra_plugin_t _obj, void session_baton, char path, 
+        svn_revnum_t revision, svn_node_kind_t kind, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_check_path, args)
+
+def svn_ra_plugin_invoke_get_uuid(*args):
+    """
+    svn_ra_plugin_invoke_get_uuid(svn_ra_plugin_t _obj, void session_baton, char uuid, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_uuid, args)
+
+def svn_ra_plugin_invoke_get_repos_root(*args):
+    """
+    svn_ra_plugin_invoke_get_repos_root(svn_ra_plugin_t _obj, void session_baton, char url, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_repos_root, args)
+
+def svn_ra_plugin_invoke_get_locations(*args):
+    """
+    svn_ra_plugin_invoke_get_locations(svn_ra_plugin_t _obj, void session_baton, apr_hash_t locations, 
+        char path, svn_revnum_t peg_revision, 
+        apr_array_header_t location_revisions, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_locations, args)
+
+def svn_ra_plugin_invoke_get_file_revs(*args):
+    """
+    svn_ra_plugin_invoke_get_file_revs(svn_ra_plugin_t _obj, void session_baton, char path, 
+        svn_revnum_t start, svn_revnum_t end, svn_ra_file_rev_handler_t handler, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_plugin_invoke_get_file_revs, args)
+
+def svn_ra_plugin_invoke_get_version(*args):
+    """svn_ra_plugin_invoke_get_version(svn_ra_plugin_t _obj) -> svn_version_t"""
+    return apply(_ra.svn_ra_plugin_invoke_get_version, args)
+
+def svn_ra_invoke_get_wc_prop_func(*args):
+    """
+    svn_ra_invoke_get_wc_prop_func(svn_ra_get_wc_prop_func_t _obj, void baton, char relpath, 
+        char name, svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_get_wc_prop_func, args)
+
+def svn_ra_invoke_set_wc_prop_func(*args):
+    """
+    svn_ra_invoke_set_wc_prop_func(svn_ra_set_wc_prop_func_t _obj, void baton, char path, 
+        char name, svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_set_wc_prop_func, args)
+
+def svn_ra_invoke_push_wc_prop_func(*args):
+    """
+    svn_ra_invoke_push_wc_prop_func(svn_ra_push_wc_prop_func_t _obj, void baton, char path, 
+        char name, svn_string_t value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_push_wc_prop_func, args)
+
+def svn_ra_invoke_invalidate_wc_props_func(*args):
+    """
+    svn_ra_invoke_invalidate_wc_props_func(svn_ra_invalidate_wc_props_func_t _obj, void baton, 
+        char path, char name, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_invalidate_wc_props_func, args)
+
+def svn_ra_invoke_get_latest_revnum_func(*args):
+    """
+    svn_ra_invoke_get_latest_revnum_func(svn_ra_get_latest_revnum_func_t _obj, void session_baton, 
+        svn_revnum_t latest_revnum) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_get_latest_revnum_func, args)
+
+def svn_ra_invoke_get_client_string_func(*args):
+    """
+    svn_ra_invoke_get_client_string_func(svn_ra_get_client_string_func_t _obj, void baton, char name, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_get_client_string_func, args)
+
+def svn_ra_invoke_file_rev_handler(*args):
+    """
+    svn_ra_invoke_file_rev_handler(svn_ra_file_rev_handler_t _obj, void baton, char path, 
+        svn_revnum_t rev, apr_hash_t rev_props, svn_txdelta_window_handler_t delta_handler, 
+        void delta_baton, 
+        apr_array_header_t prop_diffs, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_file_rev_handler, args)
+
+def svn_ra_invoke_lock_callback(*args):
+    """
+    svn_ra_invoke_lock_callback(svn_ra_lock_callback_t _obj, void baton, char path, 
+        svn_boolean_t do_lock, svn_lock_t lock, svn_error_t ra_err, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_lock_callback, args)
+
+def svn_ra_invoke_progress_notify_func(*args):
+    """
+    svn_ra_invoke_progress_notify_func(svn_ra_progress_notify_func_t _obj, apr_off_t progress, 
+        apr_off_t total, void baton, apr_pool_t pool)
+    """
+    return apply(_ra.svn_ra_invoke_progress_notify_func, args)
+
+def svn_ra_invoke_replay_revstart_callback(*args):
+    """
+    svn_ra_invoke_replay_revstart_callback(svn_ra_replay_revstart_callback_t _obj, svn_revnum_t revision, 
+        void replay_baton, svn_delta_editor_t editor, 
+        void edit_baton, apr_hash_t rev_props, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_replay_revstart_callback, args)
+
+def svn_ra_invoke_replay_revfinish_callback(*args):
+    """
+    svn_ra_invoke_replay_revfinish_callback(svn_ra_replay_revfinish_callback_t _obj, svn_revnum_t revision, 
+        void replay_baton, svn_delta_editor_t editor, 
+        void edit_baton, apr_hash_t rev_props, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_replay_revfinish_callback, args)
+
+def svn_ra_invoke_init_func(*args):
+    """
+    svn_ra_invoke_init_func(svn_ra_init_func_t _obj, int abi_version, apr_pool_t pool, 
+        apr_hash_t hash) -> svn_error_t
+    """
+    return apply(_ra.svn_ra_invoke_init_func, args)
+class svn_ra_get_wc_prop_func_t:
+    """Proxy of C svn_ra_get_wc_prop_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_get_wc_prop_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_get_wc_prop_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_get_wc_prop_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_get_wc_prop_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_get_wc_prop_func(self, *args)
+
+
+class svn_ra_get_wc_prop_func_tPtr(svn_ra_get_wc_prop_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_get_wc_prop_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_get_wc_prop_func_t, 'thisown', 0)
+        self.__class__ = svn_ra_get_wc_prop_func_t
+_ra.svn_ra_get_wc_prop_func_t_swigregister(svn_ra_get_wc_prop_func_tPtr)
+
+class svn_ra_set_wc_prop_func_t:
+    """Proxy of C svn_ra_set_wc_prop_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_set_wc_prop_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_set_wc_prop_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_set_wc_prop_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_set_wc_prop_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_set_wc_prop_func(self, *args)
+
+
+class svn_ra_set_wc_prop_func_tPtr(svn_ra_set_wc_prop_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_set_wc_prop_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_set_wc_prop_func_t, 'thisown', 0)
+        self.__class__ = svn_ra_set_wc_prop_func_t
+_ra.svn_ra_set_wc_prop_func_t_swigregister(svn_ra_set_wc_prop_func_tPtr)
+
+class svn_ra_push_wc_prop_func_t:
+    """Proxy of C svn_ra_push_wc_prop_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_push_wc_prop_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_push_wc_prop_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_push_wc_prop_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_push_wc_prop_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_push_wc_prop_func(self, *args)
+
+
+class svn_ra_push_wc_prop_func_tPtr(svn_ra_push_wc_prop_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_push_wc_prop_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_push_wc_prop_func_t, 'thisown', 0)
+        self.__class__ = svn_ra_push_wc_prop_func_t
+_ra.svn_ra_push_wc_prop_func_t_swigregister(svn_ra_push_wc_prop_func_tPtr)
+
+class svn_ra_invalidate_wc_props_func_t:
+    """Proxy of C svn_ra_invalidate_wc_props_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_invalidate_wc_props_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_invalidate_wc_props_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_invalidate_wc_props_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_invalidate_wc_props_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_invalidate_wc_props_func(self, *args)
+
+
+class svn_ra_invalidate_wc_props_func_tPtr(svn_ra_invalidate_wc_props_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_invalidate_wc_props_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_invalidate_wc_props_func_t, 'thisown', 0)
+        self.__class__ = svn_ra_invalidate_wc_props_func_t
+_ra.svn_ra_invalidate_wc_props_func_t_swigregister(svn_ra_invalidate_wc_props_func_tPtr)
+
+class svn_ra_get_latest_revnum_func_t:
+    """Proxy of C svn_ra_get_latest_revnum_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_get_latest_revnum_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_get_latest_revnum_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_get_latest_revnum_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_get_latest_revnum_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_get_latest_revnum_func(self, *args)
+
+
+class svn_ra_get_latest_revnum_func_tPtr(svn_ra_get_latest_revnum_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_get_latest_revnum_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_get_latest_revnum_func_t, 'thisown', 0)
+        self.__class__ = svn_ra_get_latest_revnum_func_t
+_ra.svn_ra_get_latest_revnum_func_t_swigregister(svn_ra_get_latest_revnum_func_tPtr)
+
+class svn_ra_get_client_string_func_t:
+    """Proxy of C svn_ra_get_client_string_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_get_client_string_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_get_client_string_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_get_client_string_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_get_client_string_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_get_client_string_func(self, *args)
+
+
+class svn_ra_get_client_string_func_tPtr(svn_ra_get_client_string_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_get_client_string_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_get_client_string_func_t, 'thisown', 0)
+        self.__class__ = svn_ra_get_client_string_func_t
+_ra.svn_ra_get_client_string_func_t_swigregister(svn_ra_get_client_string_func_tPtr)
+
+class svn_ra_file_rev_handler_t:
+    """Proxy of C svn_ra_file_rev_handler_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_file_rev_handler_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_file_rev_handler_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_file_rev_handler_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_file_rev_handler_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_file_rev_handler(self, *args)
+
+
+class svn_ra_file_rev_handler_tPtr(svn_ra_file_rev_handler_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_file_rev_handler_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_file_rev_handler_t, 'thisown', 0)
+        self.__class__ = svn_ra_file_rev_handler_t
+_ra.svn_ra_file_rev_handler_t_swigregister(svn_ra_file_rev_handler_tPtr)
+
+class svn_ra_lock_callback_t:
+    """Proxy of C svn_ra_lock_callback_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_lock_callback_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_lock_callback_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_lock_callback_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_lock_callback_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_lock_callback(self, *args)
+
+
+class svn_ra_lock_callback_tPtr(svn_ra_lock_callback_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_lock_callback_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_lock_callback_t, 'thisown', 0)
+        self.__class__ = svn_ra_lock_callback_t
+_ra.svn_ra_lock_callback_t_swigregister(svn_ra_lock_callback_tPtr)
+
+class svn_ra_progress_notify_func_t:
+    """Proxy of C svn_ra_progress_notify_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_progress_notify_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_progress_notify_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_progress_notify_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_progress_notify_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_progress_notify_func(self, *args)
+
+
+class svn_ra_progress_notify_func_tPtr(svn_ra_progress_notify_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_progress_notify_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_progress_notify_func_t, 'thisown', 0)
+        self.__class__ = svn_ra_progress_notify_func_t
+_ra.svn_ra_progress_notify_func_t_swigregister(svn_ra_progress_notify_func_tPtr)
+
+class svn_ra_replay_revstart_callback_t:
+    """Proxy of C svn_ra_replay_revstart_callback_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_replay_revstart_callback_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_replay_revstart_callback_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_replay_revstart_callback_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_replay_revstart_callback_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_replay_revstart_callback(self, *args)
+
+
+class svn_ra_replay_revstart_callback_tPtr(svn_ra_replay_revstart_callback_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_replay_revstart_callback_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_replay_revstart_callback_t, 'thisown', 0)
+        self.__class__ = svn_ra_replay_revstart_callback_t
+_ra.svn_ra_replay_revstart_callback_t_swigregister(svn_ra_replay_revstart_callback_tPtr)
+
+class svn_ra_replay_revfinish_callback_t:
+    """Proxy of C svn_ra_replay_revfinish_callback_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_replay_revfinish_callback_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_replay_revfinish_callback_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_replay_revfinish_callback_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_replay_revfinish_callback_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_replay_revfinish_callback(self, *args)
+
+
+class svn_ra_replay_revfinish_callback_tPtr(svn_ra_replay_revfinish_callback_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_replay_revfinish_callback_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_replay_revfinish_callback_t, 'thisown', 0)
+        self.__class__ = svn_ra_replay_revfinish_callback_t
+_ra.svn_ra_replay_revfinish_callback_t_swigregister(svn_ra_replay_revfinish_callback_tPtr)
+
+class svn_ra_init_func_t:
+    """Proxy of C svn_ra_init_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_ra_init_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_ra_init_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_ra_init_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_ra_init_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_ra_invoke_init_func(self, *args)
+
+
+class svn_ra_init_func_tPtr(svn_ra_init_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_ra_init_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_ra_init_func_t, 'thisown', 0)
+        self.__class__ = svn_ra_init_func_t
+_ra.svn_ra_init_func_t_swigregister(svn_ra_init_func_tPtr)
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/repos.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/repos.py
new file mode 100644
index 0000000..a57847e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/repos.py
@@ -0,0 +1,1538 @@
+# This file was created automatically by SWIG 1.3.27.
+# Don't modify this file, modify the SWIG interface instead.
+import core
+import delta
+import fs
+
+import _repos
+
+# This file is compatible with both classic and new-style classes.
+def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
+    if (name == "this"):
+        if isinstance(value, class_type):
+            self.__dict__[name] = value.this
+            if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
+            del value.thisown
+            return
+    method = class_type.__swig_setmethods__.get(name,None)
+    if method: return method(self,value)
+    if (not static) or hasattr(self,name) or (name == "thisown"):
+        self.__dict__[name] = value
+    else:
+        raise AttributeError("You cannot add attributes to %s" % self)
+
+def _swig_setattr(self,class_type,name,value):
+    return _swig_setattr_nondynamic(self,class_type,name,value,0)
+
+def _swig_getattr(self,class_type,name):
+    method = class_type.__swig_getmethods__.get(name,None)
+    if method: return method(self)
+    raise AttributeError,name
+
+
+def svn_repos_version(*args):
+    """svn_repos_version() -> svn_version_t"""
+    return apply(_repos.svn_repos_version, args)
+svn_authz_none = _repos.svn_authz_none
+svn_authz_read = _repos.svn_authz_read
+svn_authz_write = _repos.svn_authz_write
+svn_authz_recursive = _repos.svn_authz_recursive
+
+def svn_repos_find_root_path(*args):
+    """svn_repos_find_root_path(char path, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_find_root_path, args)
+
+def svn_repos_open(*args):
+    """svn_repos_open(svn_repos_t repos_p, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_repos.svn_repos_open, args)
+
+def svn_repos_create(*args):
+    """
+    svn_repos_create(svn_repos_t repos_p, char path, char unused_1, char unused_2, 
+        apr_hash_t config, apr_hash_t fs_config, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_create, args)
+
+def svn_repos_upgrade(*args):
+    """
+    svn_repos_upgrade(char path, svn_boolean_t nonblocking, svn_error_t start_callback, 
+        void start_callback_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_upgrade, args)
+
+def svn_repos_delete(*args):
+    """svn_repos_delete(char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_repos.svn_repos_delete, args)
+
+def svn_repos_has_capability(*args):
+    """
+    svn_repos_has_capability(svn_repos_t repos, svn_boolean_t has, char capability, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_has_capability, args)
+SVN_REPOS_CAPABILITY_MERGEINFO = _repos.SVN_REPOS_CAPABILITY_MERGEINFO
+
+def svn_repos_fs(*args):
+    """svn_repos_fs(svn_repos_t repos) -> svn_fs_t"""
+    return apply(_repos.svn_repos_fs, args)
+
+def svn_repos_hotcopy(*args):
+    """
+    svn_repos_hotcopy(char src_path, char dst_path, svn_boolean_t clean_logs, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_hotcopy, args)
+
+def svn_repos_recover3(*args):
+    """
+    svn_repos_recover3(char path, svn_boolean_t nonblocking, svn_error_t start_callback, 
+        void start_callback_baton, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_recover3, args)
+
+def svn_repos_recover2(*args):
+    """
+    svn_repos_recover2(char path, svn_boolean_t nonblocking, svn_error_t start_callback, 
+        void start_callback_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_recover2, args)
+
+def svn_repos_recover(*args):
+    """svn_repos_recover(char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_repos.svn_repos_recover, args)
+
+def svn_repos_db_logfiles(*args):
+    """
+    svn_repos_db_logfiles(apr_array_header_t logfiles, char path, svn_boolean_t only_unused, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_db_logfiles, args)
+
+def svn_repos_path(*args):
+    """svn_repos_path(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_path, args)
+
+def svn_repos_db_env(*args):
+    """svn_repos_db_env(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_db_env, args)
+
+def svn_repos_conf_dir(*args):
+    """svn_repos_conf_dir(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_conf_dir, args)
+
+def svn_repos_svnserve_conf(*args):
+    """svn_repos_svnserve_conf(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_svnserve_conf, args)
+
+def svn_repos_lock_dir(*args):
+    """svn_repos_lock_dir(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_lock_dir, args)
+
+def svn_repos_db_lockfile(*args):
+    """svn_repos_db_lockfile(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_db_lockfile, args)
+
+def svn_repos_db_logs_lockfile(*args):
+    """svn_repos_db_logs_lockfile(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_db_logs_lockfile, args)
+
+def svn_repos_hook_dir(*args):
+    """svn_repos_hook_dir(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_hook_dir, args)
+
+def svn_repos_start_commit_hook(*args):
+    """svn_repos_start_commit_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_start_commit_hook, args)
+
+def svn_repos_pre_commit_hook(*args):
+    """svn_repos_pre_commit_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_pre_commit_hook, args)
+
+def svn_repos_post_commit_hook(*args):
+    """svn_repos_post_commit_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_post_commit_hook, args)
+
+def svn_repos_pre_revprop_change_hook(*args):
+    """svn_repos_pre_revprop_change_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_pre_revprop_change_hook, args)
+
+def svn_repos_post_revprop_change_hook(*args):
+    """svn_repos_post_revprop_change_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_post_revprop_change_hook, args)
+
+def svn_repos_pre_lock_hook(*args):
+    """svn_repos_pre_lock_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_pre_lock_hook, args)
+
+def svn_repos_post_lock_hook(*args):
+    """svn_repos_post_lock_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_post_lock_hook, args)
+
+def svn_repos_pre_unlock_hook(*args):
+    """svn_repos_pre_unlock_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_pre_unlock_hook, args)
+
+def svn_repos_post_unlock_hook(*args):
+    """svn_repos_post_unlock_hook(svn_repos_t repos, apr_pool_t pool) -> char"""
+    return apply(_repos.svn_repos_post_unlock_hook, args)
+
+def svn_repos_begin_report2(*args):
+    """
+    svn_repos_begin_report2(void report_baton, svn_revnum_t revnum, svn_repos_t repos, 
+        char fs_base, char target, char tgt_path, 
+        svn_boolean_t text_deltas, svn_depth_t depth, 
+        svn_boolean_t ignore_ancestry, svn_boolean_t send_copyfrom_args, 
+        svn_delta_editor_t editor, 
+        void edit_baton, svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_begin_report2, args)
+
+def svn_repos_begin_report(*args):
+    """
+    svn_repos_begin_report(void report_baton, svn_revnum_t revnum, char username, 
+        svn_repos_t repos, char fs_base, char target, 
+        char tgt_path, svn_boolean_t text_deltas, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_delta_editor_t editor, void edit_baton, 
+        svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_begin_report, args)
+
+def svn_repos_set_path3(*args):
+    """
+    svn_repos_set_path3(void report_baton, char path, svn_revnum_t revision, 
+        svn_depth_t depth, svn_boolean_t start_empty, 
+        char lock_token, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_set_path3, args)
+
+def svn_repos_set_path2(*args):
+    """
+    svn_repos_set_path2(void report_baton, char path, svn_revnum_t revision, 
+        svn_boolean_t start_empty, char lock_token, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_set_path2, args)
+
+def svn_repos_set_path(*args):
+    """
+    svn_repos_set_path(void report_baton, char path, svn_revnum_t revision, 
+        svn_boolean_t start_empty, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_set_path, args)
+
+def svn_repos_link_path3(*args):
+    """
+    svn_repos_link_path3(void report_baton, char path, char link_path, svn_revnum_t revision, 
+        svn_depth_t depth, svn_boolean_t start_empty, 
+        char lock_token, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_link_path3, args)
+
+def svn_repos_link_path2(*args):
+    """
+    svn_repos_link_path2(void report_baton, char path, char link_path, svn_revnum_t revision, 
+        svn_boolean_t start_empty, 
+        char lock_token, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_link_path2, args)
+
+def svn_repos_link_path(*args):
+    """
+    svn_repos_link_path(void report_baton, char path, char link_path, svn_revnum_t revision, 
+        svn_boolean_t start_empty, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_link_path, args)
+
+def svn_repos_delete_path(*args):
+    """svn_repos_delete_path(void report_baton, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_repos.svn_repos_delete_path, args)
+
+def svn_repos_finish_report(*args):
+    """svn_repos_finish_report(void report_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_repos.svn_repos_finish_report, args)
+
+def svn_repos_abort_report(*args):
+    """svn_repos_abort_report(void report_baton, apr_pool_t pool) -> svn_error_t"""
+    return apply(_repos.svn_repos_abort_report, args)
+
+def svn_repos_dir_delta2(*args):
+    """
+    svn_repos_dir_delta2(svn_fs_root_t src_root, char src_parent_dir, char src_entry, 
+        svn_fs_root_t tgt_root, char tgt_path, 
+        svn_delta_editor_t editor, void edit_baton, 
+        svn_repos_authz_func_t authz_read_func, svn_boolean_t text_deltas, 
+        svn_depth_t depth, svn_boolean_t entry_props, 
+        svn_boolean_t ignore_ancestry, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_dir_delta2, args)
+
+def svn_repos_dir_delta(*args):
+    """
+    svn_repos_dir_delta(svn_fs_root_t src_root, char src_parent_dir, char src_entry, 
+        svn_fs_root_t tgt_root, char tgt_path, 
+        svn_delta_editor_t editor, void edit_baton, 
+        svn_repos_authz_func_t authz_read_func, svn_boolean_t text_deltas, 
+        svn_boolean_t recurse, 
+        svn_boolean_t entry_props, svn_boolean_t ignore_ancestry, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_dir_delta, args)
+
+def svn_repos_replay2(*args):
+    """
+    svn_repos_replay2(svn_fs_root_t root, char base_dir, svn_revnum_t low_water_mark, 
+        svn_boolean_t send_deltas, svn_delta_editor_t editor, 
+        void edit_baton, svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_replay2, args)
+
+def svn_repos_replay(*args):
+    """
+    svn_repos_replay(svn_fs_root_t root, svn_delta_editor_t editor, void edit_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_replay, args)
+
+def svn_repos_get_commit_editor5(*args):
+    """
+    svn_repos_get_commit_editor5(svn_delta_editor_t editor, void edit_baton, svn_repos_t repos, 
+        svn_fs_txn_t txn, char repos_url, 
+        char base_path, apr_hash_t revprop_table, svn_commit_callback2_t callback, 
+        svn_repos_authz_callback_t authz_callback, 
+        void authz_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_commit_editor5, args)
+
+def svn_repos_get_commit_editor4(*args):
+    """
+    svn_repos_get_commit_editor4(svn_delta_editor_t editor, void edit_baton, svn_repos_t repos, 
+        svn_fs_txn_t txn, char repos_url, 
+        char base_path, char user, char log_msg, svn_commit_callback2_t callback, 
+        svn_repos_authz_callback_t authz_callback, 
+        void authz_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_commit_editor4, args)
+
+def svn_repos_get_commit_editor3(*args):
+    """
+    svn_repos_get_commit_editor3(svn_delta_editor_t editor, void edit_baton, svn_repos_t repos, 
+        svn_fs_txn_t txn, char repos_url, 
+        char base_path, char user, char log_msg, svn_commit_callback_t callback, 
+        svn_repos_authz_callback_t authz_callback, 
+        void authz_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_commit_editor3, args)
+
+def svn_repos_get_commit_editor2(*args):
+    """
+    svn_repos_get_commit_editor2(svn_delta_editor_t editor, void edit_baton, svn_repos_t repos, 
+        svn_fs_txn_t txn, char repos_url, 
+        char base_path, char user, char log_msg, svn_commit_callback_t callback, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_commit_editor2, args)
+
+def svn_repos_get_commit_editor(*args):
+    """
+    svn_repos_get_commit_editor(svn_delta_editor_t editor, void edit_baton, svn_repos_t repos, 
+        char repos_url, char base_path, char user, 
+        char log_msg, svn_commit_callback_t callback, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_commit_editor, args)
+
+def svn_repos_dated_revision(*args):
+    """
+    svn_repos_dated_revision(svn_revnum_t revision, svn_repos_t repos, apr_time_t tm, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_dated_revision, args)
+
+def svn_repos_get_committed_info(*args):
+    """
+    svn_repos_get_committed_info(svn_revnum_t committed_rev, char committed_date, char last_author, 
+        svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_committed_info, args)
+
+def svn_repos_stat(*args):
+    """
+    svn_repos_stat(svn_dirent_t dirent, svn_fs_root_t root, char path, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_stat, args)
+
+def svn_repos_deleted_rev(*args):
+    """
+    svn_repos_deleted_rev(svn_fs_t fs, char path, svn_revnum_t start, svn_revnum_t end, 
+        svn_revnum_t deleted, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_deleted_rev, args)
+
+def svn_repos_history2(*args):
+    """
+    svn_repos_history2(svn_fs_t fs, char path, svn_repos_history_func_t history_func, 
+        svn_repos_authz_func_t authz_read_func, 
+        svn_revnum_t start, svn_revnum_t end, 
+        svn_boolean_t cross_copies, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_history2, args)
+
+def svn_repos_history(*args):
+    """
+    svn_repos_history(svn_fs_t fs, char path, svn_repos_history_func_t history_func, 
+        svn_revnum_t start, svn_revnum_t end, 
+        svn_boolean_t cross_copies, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_history, args)
+
+def svn_repos_trace_node_locations(*args):
+    """
+    svn_repos_trace_node_locations(svn_fs_t fs, apr_hash_t locations, char fs_path, svn_revnum_t peg_revision, 
+        apr_array_header_t location_revisions, 
+        svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_trace_node_locations, args)
+
+def svn_repos_node_location_segments(*args):
+    """
+    svn_repos_node_location_segments(svn_repos_t repos, char path, svn_revnum_t peg_revision, 
+        svn_revnum_t start_rev, svn_revnum_t end_rev, 
+        svn_location_segment_receiver_t receiver, 
+        svn_repos_authz_func_t authz_read_func, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_node_location_segments, args)
+
+def svn_repos_get_logs4(*args):
+    """
+    svn_repos_get_logs4(svn_repos_t repos, apr_array_header_t paths, svn_revnum_t start, 
+        svn_revnum_t end, int limit, svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, 
+        svn_boolean_t include_merged_revisions, 
+        apr_array_header_t revprops, 
+        svn_repos_authz_func_t authz_read_func, 
+        svn_log_entry_receiver_t receiver, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_logs4, args)
+
+def svn_repos_get_logs3(*args):
+    """
+    svn_repos_get_logs3(svn_repos_t repos, apr_array_header_t paths, svn_revnum_t start, 
+        svn_revnum_t end, int limit, svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, 
+        svn_repos_authz_func_t authz_read_func, 
+        svn_log_message_receiver_t receiver, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_logs3, args)
+
+def svn_repos_get_logs2(*args):
+    """
+    svn_repos_get_logs2(svn_repos_t repos, apr_array_header_t paths, svn_revnum_t start, 
+        svn_revnum_t end, svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, 
+        svn_repos_authz_func_t authz_read_func, 
+        svn_log_message_receiver_t receiver, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_logs2, args)
+
+def svn_repos_get_logs(*args):
+    """
+    svn_repos_get_logs(svn_repos_t repos, apr_array_header_t paths, svn_revnum_t start, 
+        svn_revnum_t end, svn_boolean_t discover_changed_paths, 
+        svn_boolean_t strict_node_history, 
+        svn_log_message_receiver_t receiver, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_logs, args)
+
+def svn_repos_fs_get_mergeinfo(*args):
+    """
+    svn_repos_fs_get_mergeinfo(svn_mergeinfo_catalog_t catalog, svn_repos_t repos, 
+        apr_array_header_t paths, svn_revnum_t revision, 
+        svn_mergeinfo_inheritance_t inherit, svn_boolean_t include_descendants, 
+        svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_get_mergeinfo, args)
+
+def svn_repos_get_file_revs2(*args):
+    """
+    svn_repos_get_file_revs2(svn_repos_t repos, char path, svn_revnum_t start, svn_revnum_t end, 
+        svn_boolean_t include_merged_revisions, 
+        svn_repos_authz_func_t authz_read_func, 
+        svn_file_rev_handler_t handler, void handler_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_file_revs2, args)
+
+def svn_repos_get_file_revs(*args):
+    """
+    svn_repos_get_file_revs(svn_repos_t repos, char path, svn_revnum_t start, svn_revnum_t end, 
+        svn_repos_authz_func_t authz_read_func, 
+        svn_repos_file_rev_handler_t handler, 
+        void handler_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_file_revs, args)
+
+def svn_repos_fs_commit_txn(*args):
+    """
+    svn_repos_fs_commit_txn(char conflict_p, svn_repos_t repos, svn_revnum_t new_rev, 
+        svn_fs_txn_t txn, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_commit_txn, args)
+
+def svn_repos_fs_begin_txn_for_commit2(*args):
+    """
+    svn_repos_fs_begin_txn_for_commit2(svn_fs_txn_t txn_p, svn_repos_t repos, svn_revnum_t rev, 
+        apr_hash_t revprop_table, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_begin_txn_for_commit2, args)
+
+def svn_repos_fs_begin_txn_for_commit(*args):
+    """
+    svn_repos_fs_begin_txn_for_commit(svn_fs_txn_t txn_p, svn_repos_t repos, svn_revnum_t rev, 
+        char author, char log_msg, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_begin_txn_for_commit, args)
+
+def svn_repos_fs_begin_txn_for_update(*args):
+    """
+    svn_repos_fs_begin_txn_for_update(svn_fs_txn_t txn_p, svn_repos_t repos, svn_revnum_t rev, 
+        char author, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_begin_txn_for_update, args)
+
+def svn_repos_fs_lock(*args):
+    """
+    svn_repos_fs_lock(svn_lock_t lock, svn_repos_t repos, char path, char token, 
+        char comment, svn_boolean_t is_dav_comment, 
+        apr_time_t expiration_date, svn_revnum_t current_rev, 
+        svn_boolean_t steal_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_lock, args)
+
+def svn_repos_fs_unlock(*args):
+    """
+    svn_repos_fs_unlock(svn_repos_t repos, char path, char token, svn_boolean_t break_lock, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_unlock, args)
+
+def svn_repos_fs_get_locks(*args):
+    """
+    svn_repos_fs_get_locks(apr_hash_t locks, svn_repos_t repos, char path, svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_get_locks, args)
+
+def svn_repos_fs_change_rev_prop3(*args):
+    """
+    svn_repos_fs_change_rev_prop3(svn_repos_t repos, svn_revnum_t rev, char author, char name, 
+        svn_string_t new_value, svn_boolean_t use_pre_revprop_change_hook, 
+        svn_boolean_t use_post_revprop_change_hook, 
+        svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_change_rev_prop3, args)
+
+def svn_repos_fs_change_rev_prop2(*args):
+    """
+    svn_repos_fs_change_rev_prop2(svn_repos_t repos, svn_revnum_t rev, char author, char name, 
+        svn_string_t new_value, svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_change_rev_prop2, args)
+
+def svn_repos_fs_change_rev_prop(*args):
+    """
+    svn_repos_fs_change_rev_prop(svn_repos_t repos, svn_revnum_t rev, char author, char name, 
+        svn_string_t new_value, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_change_rev_prop, args)
+
+def svn_repos_fs_revision_prop(*args):
+    """
+    svn_repos_fs_revision_prop(svn_string_t value_p, svn_repos_t repos, svn_revnum_t rev, 
+        char propname, svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_revision_prop, args)
+
+def svn_repos_fs_revision_proplist(*args):
+    """
+    svn_repos_fs_revision_proplist(apr_hash_t table_p, svn_repos_t repos, svn_revnum_t rev, 
+        svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_revision_proplist, args)
+
+def svn_repos_fs_change_node_prop(*args):
+    """
+    svn_repos_fs_change_node_prop(svn_fs_root_t root, char path, char name, svn_string_t value, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_fs_change_node_prop, args)
+
+def svn_repos_fs_change_txn_prop(*args):
+    """svn_repos_fs_change_txn_prop(svn_fs_txn_t txn, char name, svn_string_t value, apr_pool_t pool) -> svn_error_t"""
+    return apply(_repos.svn_repos_fs_change_txn_prop, args)
+
+def svn_repos_fs_change_txn_props(*args):
+    """svn_repos_fs_change_txn_props(svn_fs_txn_t txn, apr_array_header_t props, apr_pool_t pool) -> svn_error_t"""
+    return apply(_repos.svn_repos_fs_change_txn_props, args)
+class svn_repos_node_t:
+    """Proxy of C svn_repos_node_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_node_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_node_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_repos_node_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["kind"] = _repos.svn_repos_node_t_kind_set
+    __swig_getmethods__["kind"] = _repos.svn_repos_node_t_kind_get
+    __swig_setmethods__["action"] = _repos.svn_repos_node_t_action_set
+    __swig_getmethods__["action"] = _repos.svn_repos_node_t_action_get
+    __swig_setmethods__["text_mod"] = _repos.svn_repos_node_t_text_mod_set
+    __swig_getmethods__["text_mod"] = _repos.svn_repos_node_t_text_mod_get
+    __swig_setmethods__["prop_mod"] = _repos.svn_repos_node_t_prop_mod_set
+    __swig_getmethods__["prop_mod"] = _repos.svn_repos_node_t_prop_mod_get
+    __swig_setmethods__["name"] = _repos.svn_repos_node_t_name_set
+    __swig_getmethods__["name"] = _repos.svn_repos_node_t_name_get
+    __swig_setmethods__["copyfrom_rev"] = _repos.svn_repos_node_t_copyfrom_rev_set
+    __swig_getmethods__["copyfrom_rev"] = _repos.svn_repos_node_t_copyfrom_rev_get
+    __swig_setmethods__["copyfrom_path"] = _repos.svn_repos_node_t_copyfrom_path_set
+    __swig_getmethods__["copyfrom_path"] = _repos.svn_repos_node_t_copyfrom_path_get
+    __swig_setmethods__["sibling"] = _repos.svn_repos_node_t_sibling_set
+    __swig_getmethods__["sibling"] = _repos.svn_repos_node_t_sibling_get
+    __swig_setmethods__["child"] = _repos.svn_repos_node_t_child_set
+    __swig_getmethods__["child"] = _repos.svn_repos_node_t_child_get
+    __swig_setmethods__["parent"] = _repos.svn_repos_node_t_parent_set
+    __swig_getmethods__["parent"] = _repos.svn_repos_node_t_parent_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_repos_node_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_repos_node_tPtr(svn_repos_node_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_repos_node_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_repos_node_t, 'thisown', 0)
+        self.__class__ = svn_repos_node_t
+_repos.svn_repos_node_t_swigregister(svn_repos_node_tPtr)
+
+
+def svn_repos_node_editor(*args):
+    """
+    svn_repos_node_editor(svn_delta_editor_t editor, void edit_baton, svn_repos_t repos, 
+        svn_fs_root_t base_root, svn_fs_root_t root, 
+        apr_pool_t node_pool, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_node_editor, args)
+
+def svn_repos_node_from_baton(*args):
+    """svn_repos_node_from_baton(void edit_baton) -> svn_repos_node_t"""
+    return apply(_repos.svn_repos_node_from_baton, args)
+SVN_REPOS_DUMPFILE_MAGIC_HEADER = _repos.SVN_REPOS_DUMPFILE_MAGIC_HEADER
+SVN_REPOS_DUMPFILE_FORMAT_VERSION = _repos.SVN_REPOS_DUMPFILE_FORMAT_VERSION
+SVN_REPOS_DUMPFILE_UUID = _repos.SVN_REPOS_DUMPFILE_UUID
+SVN_REPOS_DUMPFILE_CONTENT_LENGTH = _repos.SVN_REPOS_DUMPFILE_CONTENT_LENGTH
+SVN_REPOS_DUMPFILE_REVISION_NUMBER = _repos.SVN_REPOS_DUMPFILE_REVISION_NUMBER
+SVN_REPOS_DUMPFILE_NODE_PATH = _repos.SVN_REPOS_DUMPFILE_NODE_PATH
+SVN_REPOS_DUMPFILE_NODE_KIND = _repos.SVN_REPOS_DUMPFILE_NODE_KIND
+SVN_REPOS_DUMPFILE_NODE_ACTION = _repos.SVN_REPOS_DUMPFILE_NODE_ACTION
+SVN_REPOS_DUMPFILE_NODE_COPYFROM_PATH = _repos.SVN_REPOS_DUMPFILE_NODE_COPYFROM_PATH
+SVN_REPOS_DUMPFILE_NODE_COPYFROM_REV = _repos.SVN_REPOS_DUMPFILE_NODE_COPYFROM_REV
+SVN_REPOS_DUMPFILE_TEXT_COPY_SOURCE_CHECKSUM = _repos.SVN_REPOS_DUMPFILE_TEXT_COPY_SOURCE_CHECKSUM
+SVN_REPOS_DUMPFILE_TEXT_CONTENT_CHECKSUM = _repos.SVN_REPOS_DUMPFILE_TEXT_CONTENT_CHECKSUM
+SVN_REPOS_DUMPFILE_PROP_CONTENT_LENGTH = _repos.SVN_REPOS_DUMPFILE_PROP_CONTENT_LENGTH
+SVN_REPOS_DUMPFILE_TEXT_CONTENT_LENGTH = _repos.SVN_REPOS_DUMPFILE_TEXT_CONTENT_LENGTH
+SVN_REPOS_DUMPFILE_PROP_DELTA = _repos.SVN_REPOS_DUMPFILE_PROP_DELTA
+SVN_REPOS_DUMPFILE_TEXT_DELTA = _repos.SVN_REPOS_DUMPFILE_TEXT_DELTA
+SVN_REPOS_DUMPFILE_TEXT_DELTA_BASE_CHECKSUM = _repos.SVN_REPOS_DUMPFILE_TEXT_DELTA_BASE_CHECKSUM
+svn_node_action_change = _repos.svn_node_action_change
+svn_node_action_add = _repos.svn_node_action_add
+svn_node_action_delete = _repos.svn_node_action_delete
+svn_node_action_replace = _repos.svn_node_action_replace
+svn_repos_load_uuid_default = _repos.svn_repos_load_uuid_default
+svn_repos_load_uuid_ignore = _repos.svn_repos_load_uuid_ignore
+svn_repos_load_uuid_force = _repos.svn_repos_load_uuid_force
+
+def svn_repos_verify_fs(*args):
+    """
+    svn_repos_verify_fs(svn_repos_t repos, svn_stream_t feedback_stream, svn_revnum_t start_rev, 
+        svn_revnum_t end_rev, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_verify_fs, args)
+
+def svn_repos_dump_fs2(*args):
+    """
+    svn_repos_dump_fs2(svn_repos_t repos, svn_stream_t dumpstream, svn_stream_t feedback_stream, 
+        svn_revnum_t start_rev, 
+        svn_revnum_t end_rev, svn_boolean_t incremental, 
+        svn_boolean_t use_deltas, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_dump_fs2, args)
+
+def svn_repos_dump_fs(*args):
+    """
+    svn_repos_dump_fs(svn_repos_t repos, svn_stream_t dumpstream, svn_stream_t feedback_stream, 
+        svn_revnum_t start_rev, 
+        svn_revnum_t end_rev, svn_boolean_t incremental, 
+        svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_dump_fs, args)
+
+def svn_repos_load_fs2(*args):
+    """
+    svn_repos_load_fs2(svn_repos_t repos, svn_stream_t dumpstream, svn_stream_t feedback_stream, 
+        enum svn_repos_load_uuid uuid_action, 
+        char parent_dir, svn_boolean_t use_pre_commit_hook, 
+        svn_boolean_t use_post_commit_hook, 
+        svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_load_fs2, args)
+
+def svn_repos_load_fs(*args):
+    """
+    svn_repos_load_fs(svn_repos_t repos, svn_stream_t dumpstream, svn_stream_t feedback_stream, 
+        enum svn_repos_load_uuid uuid_action, 
+        char parent_dir, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_load_fs, args)
+class svn_repos_parse_fns2_t:
+    """Proxy of C svn_repos_parse_fns2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_parse_fns2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_parse_fns2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_repos_parse_fns2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["new_revision_record"] = _repos.svn_repos_parse_fns2_t_new_revision_record_set
+    __swig_getmethods__["new_revision_record"] = _repos.svn_repos_parse_fns2_t_new_revision_record_get
+    __swig_setmethods__["uuid_record"] = _repos.svn_repos_parse_fns2_t_uuid_record_set
+    __swig_getmethods__["uuid_record"] = _repos.svn_repos_parse_fns2_t_uuid_record_get
+    __swig_setmethods__["new_node_record"] = _repos.svn_repos_parse_fns2_t_new_node_record_set
+    __swig_getmethods__["new_node_record"] = _repos.svn_repos_parse_fns2_t_new_node_record_get
+    __swig_setmethods__["set_revision_property"] = _repos.svn_repos_parse_fns2_t_set_revision_property_set
+    __swig_getmethods__["set_revision_property"] = _repos.svn_repos_parse_fns2_t_set_revision_property_get
+    __swig_setmethods__["set_node_property"] = _repos.svn_repos_parse_fns2_t_set_node_property_set
+    __swig_getmethods__["set_node_property"] = _repos.svn_repos_parse_fns2_t_set_node_property_get
+    __swig_setmethods__["delete_node_property"] = _repos.svn_repos_parse_fns2_t_delete_node_property_set
+    __swig_getmethods__["delete_node_property"] = _repos.svn_repos_parse_fns2_t_delete_node_property_get
+    __swig_setmethods__["remove_node_props"] = _repos.svn_repos_parse_fns2_t_remove_node_props_set
+    __swig_getmethods__["remove_node_props"] = _repos.svn_repos_parse_fns2_t_remove_node_props_get
+    __swig_setmethods__["set_fulltext"] = _repos.svn_repos_parse_fns2_t_set_fulltext_set
+    __swig_getmethods__["set_fulltext"] = _repos.svn_repos_parse_fns2_t_set_fulltext_get
+    __swig_setmethods__["apply_textdelta"] = _repos.svn_repos_parse_fns2_t_apply_textdelta_set
+    __swig_getmethods__["apply_textdelta"] = _repos.svn_repos_parse_fns2_t_apply_textdelta_get
+    __swig_setmethods__["close_node"] = _repos.svn_repos_parse_fns2_t_close_node_set
+    __swig_getmethods__["close_node"] = _repos.svn_repos_parse_fns2_t_close_node_get
+    __swig_setmethods__["close_revision"] = _repos.svn_repos_parse_fns2_t_close_revision_set
+    __swig_getmethods__["close_revision"] = _repos.svn_repos_parse_fns2_t_close_revision_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_repos_parse_fns2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def new_revision_record(self, *args):
+      return svn_repos_parse_fns2_invoke_new_revision_record(self, *args)
+
+    def uuid_record(self, *args):
+      return svn_repos_parse_fns2_invoke_uuid_record(self, *args)
+
+    def new_node_record(self, *args):
+      return svn_repos_parse_fns2_invoke_new_node_record(self, *args)
+
+    def set_revision_property(self, *args):
+      return svn_repos_parse_fns2_invoke_set_revision_property(self, *args)
+
+    def set_node_property(self, *args):
+      return svn_repos_parse_fns2_invoke_set_node_property(self, *args)
+
+    def delete_node_property(self, *args):
+      return svn_repos_parse_fns2_invoke_delete_node_property(self, *args)
+
+    def remove_node_props(self, *args):
+      return svn_repos_parse_fns2_invoke_remove_node_props(self, *args)
+
+    def set_fulltext(self, *args):
+      return svn_repos_parse_fns2_invoke_set_fulltext(self, *args)
+
+    def apply_textdelta(self, *args):
+      return svn_repos_parse_fns2_invoke_apply_textdelta(self, *args)
+
+    def close_node(self, *args):
+      return svn_repos_parse_fns2_invoke_close_node(self, *args)
+
+    def close_revision(self, *args):
+      return svn_repos_parse_fns2_invoke_close_revision(self, *args)
+
+
+class svn_repos_parse_fns2_tPtr(svn_repos_parse_fns2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_repos_parse_fns2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_repos_parse_fns2_t, 'thisown', 0)
+        self.__class__ = svn_repos_parse_fns2_t
+_repos.svn_repos_parse_fns2_t_swigregister(svn_repos_parse_fns2_tPtr)
+
+
+def svn_repos_parse_dumpstream2(*args):
+    """
+    svn_repos_parse_dumpstream2(svn_stream_t stream, svn_repos_parse_fns2_t parse_fns, 
+        void parse_baton, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_parse_dumpstream2, args)
+
+def svn_repos_get_fs_build_parser2(*args):
+    """
+    svn_repos_get_fs_build_parser2(svn_repos_parse_fns2_t parser, void parse_baton, svn_repos_t repos, 
+        svn_boolean_t use_history, enum svn_repos_load_uuid uuid_action, 
+        svn_stream_t outstream, 
+        char parent_dir, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_fs_build_parser2, args)
+class svn_repos_parser_fns_t:
+    """Proxy of C svn_repos_parser_fns_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_parser_fns_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_parser_fns_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_repos_parser_fns_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["new_revision_record"] = _repos.svn_repos_parser_fns_t_new_revision_record_set
+    __swig_getmethods__["new_revision_record"] = _repos.svn_repos_parser_fns_t_new_revision_record_get
+    __swig_setmethods__["uuid_record"] = _repos.svn_repos_parser_fns_t_uuid_record_set
+    __swig_getmethods__["uuid_record"] = _repos.svn_repos_parser_fns_t_uuid_record_get
+    __swig_setmethods__["new_node_record"] = _repos.svn_repos_parser_fns_t_new_node_record_set
+    __swig_getmethods__["new_node_record"] = _repos.svn_repos_parser_fns_t_new_node_record_get
+    __swig_setmethods__["set_revision_property"] = _repos.svn_repos_parser_fns_t_set_revision_property_set
+    __swig_getmethods__["set_revision_property"] = _repos.svn_repos_parser_fns_t_set_revision_property_get
+    __swig_setmethods__["set_node_property"] = _repos.svn_repos_parser_fns_t_set_node_property_set
+    __swig_getmethods__["set_node_property"] = _repos.svn_repos_parser_fns_t_set_node_property_get
+    __swig_setmethods__["remove_node_props"] = _repos.svn_repos_parser_fns_t_remove_node_props_set
+    __swig_getmethods__["remove_node_props"] = _repos.svn_repos_parser_fns_t_remove_node_props_get
+    __swig_setmethods__["set_fulltext"] = _repos.svn_repos_parser_fns_t_set_fulltext_set
+    __swig_getmethods__["set_fulltext"] = _repos.svn_repos_parser_fns_t_set_fulltext_get
+    __swig_setmethods__["close_node"] = _repos.svn_repos_parser_fns_t_close_node_set
+    __swig_getmethods__["close_node"] = _repos.svn_repos_parser_fns_t_close_node_get
+    __swig_setmethods__["close_revision"] = _repos.svn_repos_parser_fns_t_close_revision_set
+    __swig_getmethods__["close_revision"] = _repos.svn_repos_parser_fns_t_close_revision_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_repos_parse_fns_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_repos_parser_fns_tPtr(svn_repos_parser_fns_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_repos_parser_fns_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_repos_parser_fns_t, 'thisown', 0)
+        self.__class__ = svn_repos_parser_fns_t
+_repos.svn_repos_parser_fns_t_swigregister(svn_repos_parser_fns_tPtr)
+
+
+def svn_repos_parse_dumpstream(*args):
+    """
+    svn_repos_parse_dumpstream(svn_stream_t stream,  parse_fns, void parse_baton, 
+        svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_parse_dumpstream, args)
+
+def svn_repos_get_fs_build_parser(*args):
+    """
+    svn_repos_get_fs_build_parser( parser, void parse_baton, svn_repos_t repos, svn_boolean_t use_history, 
+        enum svn_repos_load_uuid uuid_action, 
+        svn_stream_t outstream, char parent_dir, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_get_fs_build_parser, args)
+
+def svn_repos_authz_read(*args):
+    """
+    svn_repos_authz_read(svn_authz_t authz_p, char file, svn_boolean_t must_exist, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_authz_read, args)
+
+def svn_repos_authz_check_access(*args):
+    """
+    svn_repos_authz_check_access(svn_authz_t authz, char repos_name, char path, char user, 
+        svn_repos_authz_access_t required_access, 
+        svn_boolean_t access_granted, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_authz_check_access, args)
+svn_repos_revision_access_none = _repos.svn_repos_revision_access_none
+svn_repos_revision_access_partial = _repos.svn_repos_revision_access_partial
+svn_repos_revision_access_full = _repos.svn_repos_revision_access_full
+
+def svn_repos_check_revision_access(*args):
+    """
+    svn_repos_check_revision_access(svn_repos_revision_access_level_t access_level, svn_repos_t repos, 
+        svn_revnum_t revision, svn_repos_authz_func_t authz_read_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_check_revision_access, args)
+
+def svn_repos_remember_client_capabilities(*args):
+    """svn_repos_remember_client_capabilities(svn_repos_t repos, apr_array_header_t capabilities) -> svn_error_t"""
+    return apply(_repos.svn_repos_remember_client_capabilities, args)
+class svn_repos_t:
+    """Proxy of C svn_repos_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_repos_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_repos_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_repos_tPtr(svn_repos_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_repos_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_repos_t, 'thisown', 0)
+        self.__class__ = svn_repos_t
+_repos.svn_repos_t_swigregister(svn_repos_tPtr)
+
+class svn_authz_t:
+    """Proxy of C svn_authz_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_authz_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_authz_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_authz_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_authz_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_authz_tPtr(svn_authz_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_authz_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_authz_t, 'thisown', 0)
+        self.__class__ = svn_authz_t
+_repos.svn_authz_t_swigregister(svn_authz_tPtr)
+
+
+def svn_repos_parse_fns2_invoke_new_revision_record(*args):
+    """
+    svn_repos_parse_fns2_invoke_new_revision_record(svn_repos_parse_fns2_t _obj, void revision_baton, apr_hash_t headers, 
+        void parse_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_parse_fns2_invoke_new_revision_record, args)
+
+def svn_repos_parse_fns2_invoke_uuid_record(*args):
+    """
+    svn_repos_parse_fns2_invoke_uuid_record(svn_repos_parse_fns2_t _obj, char uuid, void parse_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_parse_fns2_invoke_uuid_record, args)
+
+def svn_repos_parse_fns2_invoke_new_node_record(*args):
+    """
+    svn_repos_parse_fns2_invoke_new_node_record(svn_repos_parse_fns2_t _obj, void node_baton, apr_hash_t headers, 
+        void revision_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_parse_fns2_invoke_new_node_record, args)
+
+def svn_repos_parse_fns2_invoke_set_revision_property(*args):
+    """
+    svn_repos_parse_fns2_invoke_set_revision_property(svn_repos_parse_fns2_t _obj, void revision_baton, char name, 
+        svn_string_t value) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_parse_fns2_invoke_set_revision_property, args)
+
+def svn_repos_parse_fns2_invoke_set_node_property(*args):
+    """
+    svn_repos_parse_fns2_invoke_set_node_property(svn_repos_parse_fns2_t _obj, void node_baton, char name, 
+        svn_string_t value) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_parse_fns2_invoke_set_node_property, args)
+
+def svn_repos_parse_fns2_invoke_delete_node_property(*args):
+    """svn_repos_parse_fns2_invoke_delete_node_property(svn_repos_parse_fns2_t _obj, void node_baton, char name) -> svn_error_t"""
+    return apply(_repos.svn_repos_parse_fns2_invoke_delete_node_property, args)
+
+def svn_repos_parse_fns2_invoke_remove_node_props(*args):
+    """svn_repos_parse_fns2_invoke_remove_node_props(svn_repos_parse_fns2_t _obj, void node_baton) -> svn_error_t"""
+    return apply(_repos.svn_repos_parse_fns2_invoke_remove_node_props, args)
+
+def svn_repos_parse_fns2_invoke_set_fulltext(*args):
+    """svn_repos_parse_fns2_invoke_set_fulltext(svn_repos_parse_fns2_t _obj, svn_stream_t stream, void node_baton) -> svn_error_t"""
+    return apply(_repos.svn_repos_parse_fns2_invoke_set_fulltext, args)
+
+def svn_repos_parse_fns2_invoke_apply_textdelta(*args):
+    """
+    svn_repos_parse_fns2_invoke_apply_textdelta(svn_repos_parse_fns2_t _obj, svn_txdelta_window_handler_t handler, 
+        void handler_baton, void node_baton) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_parse_fns2_invoke_apply_textdelta, args)
+
+def svn_repos_parse_fns2_invoke_close_node(*args):
+    """svn_repos_parse_fns2_invoke_close_node(svn_repos_parse_fns2_t _obj, void node_baton) -> svn_error_t"""
+    return apply(_repos.svn_repos_parse_fns2_invoke_close_node, args)
+
+def svn_repos_parse_fns2_invoke_close_revision(*args):
+    """svn_repos_parse_fns2_invoke_close_revision(svn_repos_parse_fns2_t _obj, void revision_baton) -> svn_error_t"""
+    return apply(_repos.svn_repos_parse_fns2_invoke_close_revision, args)
+
+def svn_repos_invoke_authz_func(*args):
+    """
+    svn_repos_invoke_authz_func(svn_repos_authz_func_t _obj, svn_boolean_t allowed, 
+        svn_fs_root_t root, char path, void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_invoke_authz_func, args)
+
+def svn_repos_invoke_authz_callback(*args):
+    """
+    svn_repos_invoke_authz_callback(svn_repos_authz_callback_t _obj, svn_repos_authz_access_t required, 
+        svn_boolean_t allowed, svn_fs_root_t root, 
+        char path, void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_invoke_authz_callback, args)
+
+def svn_repos_invoke_file_rev_handler(*args):
+    """
+    svn_repos_invoke_file_rev_handler(svn_repos_file_rev_handler_t _obj, void baton, char path, 
+        svn_revnum_t rev, apr_hash_t rev_props, 
+        svn_txdelta_window_handler_t delta_handler, 
+        void delta_baton, apr_array_header_t prop_diffs, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_invoke_file_rev_handler, args)
+
+def svn_repos_invoke_history_func(*args):
+    """
+    svn_repos_invoke_history_func(svn_repos_history_func_t _obj, void baton, char path, 
+        svn_revnum_t revision, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_repos.svn_repos_invoke_history_func, args)
+class svn_repos_authz_func_t:
+    """Proxy of C svn_repos_authz_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_authz_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_authz_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_repos_authz_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_repos_authz_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_repos_invoke_authz_func(self, *args)
+
+
+class svn_repos_authz_func_tPtr(svn_repos_authz_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_repos_authz_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_repos_authz_func_t, 'thisown', 0)
+        self.__class__ = svn_repos_authz_func_t
+_repos.svn_repos_authz_func_t_swigregister(svn_repos_authz_func_tPtr)
+
+class svn_repos_authz_callback_t:
+    """Proxy of C svn_repos_authz_callback_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_authz_callback_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_authz_callback_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_repos_authz_callback_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_repos_authz_callback_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_repos_invoke_authz_callback(self, *args)
+
+
+class svn_repos_authz_callback_tPtr(svn_repos_authz_callback_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_repos_authz_callback_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_repos_authz_callback_t, 'thisown', 0)
+        self.__class__ = svn_repos_authz_callback_t
+_repos.svn_repos_authz_callback_t_swigregister(svn_repos_authz_callback_tPtr)
+
+class svn_repos_file_rev_handler_t:
+    """Proxy of C svn_repos_file_rev_handler_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_file_rev_handler_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_file_rev_handler_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_repos_file_rev_handler_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_repos_file_rev_handler_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_repos_invoke_file_rev_handler(self, *args)
+
+
+class svn_repos_file_rev_handler_tPtr(svn_repos_file_rev_handler_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_repos_file_rev_handler_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_repos_file_rev_handler_t, 'thisown', 0)
+        self.__class__ = svn_repos_file_rev_handler_t
+_repos.svn_repos_file_rev_handler_t_swigregister(svn_repos_file_rev_handler_tPtr)
+
+class svn_repos_history_func_t:
+    """Proxy of C svn_repos_history_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_repos_history_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_repos_history_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_repos_history_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_repos_history_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_repos_invoke_history_func(self, *args)
+
+
+class svn_repos_history_func_tPtr(svn_repos_history_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_repos_history_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_repos_history_func_t, 'thisown', 0)
+        self.__class__ = svn_repos_history_func_t
+_repos.svn_repos_history_func_t_swigregister(svn_repos_history_func_tPtr)
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/sqlite3.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/sqlite3.dll
new file mode 100644
index 0000000..472769b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/sqlite3.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ssleay32.dll b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ssleay32.dll
new file mode 100644
index 0000000..e1c4ffcc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ssleay32.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ssleay32.dll.manifest b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ssleay32.dll.manifest
new file mode 100644
index 0000000..324c707e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/ssleay32.dll.manifest
@@ -0,0 +1,8 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes'?>

+<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>

+  <dependency>

+    <dependentAssembly>

+      <assemblyIdentity type='win32' name='Microsoft.VC80.CRT' version='8.0.50727.762' processorArchitecture='x86' publicKeyToken='1fc8b3b9a1e18e3b' />

+    </dependentAssembly>

+  </dependency>

+</assembly>

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/libsvn/wc.py b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/wc.py
new file mode 100644
index 0000000..fb438c56
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/libsvn/wc.py
@@ -0,0 +1,3529 @@
+# This file was created automatically by SWIG 1.3.27.
+# Don't modify this file, modify the SWIG interface instead.
+import core
+import delta
+import ra
+
+import _wc
+
+# This file is compatible with both classic and new-style classes.
+def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
+    if (name == "this"):
+        if isinstance(value, class_type):
+            self.__dict__[name] = value.this
+            if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
+            del value.thisown
+            return
+    method = class_type.__swig_setmethods__.get(name,None)
+    if method: return method(self,value)
+    if (not static) or hasattr(self,name) or (name == "thisown"):
+        self.__dict__[name] = value
+    else:
+        raise AttributeError("You cannot add attributes to %s" % self)
+
+def _swig_setattr(self,class_type,name,value):
+    return _swig_setattr_nondynamic(self,class_type,name,value,0)
+
+def _swig_getattr(self,class_type,name):
+    method = class_type.__swig_getmethods__.get(name,None)
+    if method: return method(self)
+    raise AttributeError,name
+
+
+def svn_wc_version(*args):
+    """svn_wc_version() -> svn_version_t"""
+    return apply(_wc.svn_wc_version, args)
+SVN_WC_TRANSLATE_FROM_NF = _wc.SVN_WC_TRANSLATE_FROM_NF
+SVN_WC_TRANSLATE_TO_NF = _wc.SVN_WC_TRANSLATE_TO_NF
+SVN_WC_TRANSLATE_FORCE_EOL_REPAIR = _wc.SVN_WC_TRANSLATE_FORCE_EOL_REPAIR
+SVN_WC_TRANSLATE_NO_OUTPUT_CLEANUP = _wc.SVN_WC_TRANSLATE_NO_OUTPUT_CLEANUP
+SVN_WC_TRANSLATE_FORCE_COPY = _wc.SVN_WC_TRANSLATE_FORCE_COPY
+SVN_WC_TRANSLATE_USE_GLOBAL_TMP = _wc.SVN_WC_TRANSLATE_USE_GLOBAL_TMP
+
+def svn_wc_adm_open3(*args):
+    """
+    svn_wc_adm_open3(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        int levels_to_lock, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_open3, args)
+
+def svn_wc_adm_open2(*args):
+    """
+    svn_wc_adm_open2(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        int levels_to_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_open2, args)
+
+def svn_wc_adm_open(*args):
+    """
+    svn_wc_adm_open(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        svn_boolean_t tree_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_open, args)
+
+def svn_wc_adm_probe_open3(*args):
+    """
+    svn_wc_adm_probe_open3(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        int levels_to_lock, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_probe_open3, args)
+
+def svn_wc_adm_probe_open2(*args):
+    """
+    svn_wc_adm_probe_open2(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        int levels_to_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_probe_open2, args)
+
+def svn_wc_adm_probe_open(*args):
+    """
+    svn_wc_adm_probe_open(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        svn_boolean_t tree_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_probe_open, args)
+
+def svn_wc_adm_open_anchor(*args):
+    """
+    svn_wc_adm_open_anchor(svn_wc_adm_access_t anchor_access, svn_wc_adm_access_t target_access, 
+        char target, char path, svn_boolean_t write_lock, 
+        int levels_to_lock, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_open_anchor, args)
+
+def svn_wc_adm_retrieve(*args):
+    """
+    svn_wc_adm_retrieve(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_retrieve, args)
+
+def svn_wc_adm_probe_retrieve(*args):
+    """
+    svn_wc_adm_probe_retrieve(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_probe_retrieve, args)
+
+def svn_wc_adm_probe_try3(*args):
+    """
+    svn_wc_adm_probe_try3(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        int levels_to_lock, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_probe_try3, args)
+
+def svn_wc_adm_probe_try2(*args):
+    """
+    svn_wc_adm_probe_try2(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        int levels_to_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_probe_try2, args)
+
+def svn_wc_adm_probe_try(*args):
+    """
+    svn_wc_adm_probe_try(svn_wc_adm_access_t adm_access, svn_wc_adm_access_t associated, 
+        char path, svn_boolean_t write_lock, 
+        svn_boolean_t tree_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_adm_probe_try, args)
+
+def svn_wc_adm_close(*args):
+    """svn_wc_adm_close(svn_wc_adm_access_t adm_access) -> svn_error_t"""
+    return apply(_wc.svn_wc_adm_close, args)
+
+def svn_wc_adm_access_path(*args):
+    """svn_wc_adm_access_path(svn_wc_adm_access_t adm_access) -> char"""
+    return apply(_wc.svn_wc_adm_access_path, args)
+
+def svn_wc_adm_access_pool(*args):
+    """svn_wc_adm_access_pool(svn_wc_adm_access_t adm_access) -> apr_pool_t"""
+    return apply(_wc.svn_wc_adm_access_pool, args)
+
+def svn_wc_adm_locked(*args):
+    """svn_wc_adm_locked(svn_wc_adm_access_t adm_access) -> svn_boolean_t"""
+    return apply(_wc.svn_wc_adm_locked, args)
+
+def svn_wc_locked(*args):
+    """svn_wc_locked(svn_boolean_t locked, char path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_locked, args)
+
+def svn_wc_is_adm_dir(*args):
+    """svn_wc_is_adm_dir(char name, apr_pool_t pool) -> svn_boolean_t"""
+    return apply(_wc.svn_wc_is_adm_dir, args)
+
+def svn_wc_get_adm_dir(*args):
+    """svn_wc_get_adm_dir(apr_pool_t pool) -> char"""
+    return apply(_wc.svn_wc_get_adm_dir, args)
+
+def svn_wc_set_adm_dir(*args):
+    """svn_wc_set_adm_dir(char name, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_set_adm_dir, args)
+
+def svn_wc_init_traversal_info(*args):
+    """svn_wc_init_traversal_info(apr_pool_t pool) -> svn_wc_traversal_info_t"""
+    return apply(_wc.svn_wc_init_traversal_info, args)
+
+def svn_wc_edited_externals(*args):
+    """
+    svn_wc_edited_externals(apr_hash_t externals_old, apr_hash_t externals_new, 
+        svn_wc_traversal_info_t traversal_info)
+    """
+    return apply(_wc.svn_wc_edited_externals, args)
+
+def svn_wc_traversed_depths(*args):
+    """svn_wc_traversed_depths(apr_hash_t depths, svn_wc_traversal_info_t traversal_info)"""
+    return apply(_wc.svn_wc_traversed_depths, args)
+class svn_wc_external_item2_t:
+    """Proxy of C svn_wc_external_item2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_external_item2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_external_item2_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_external_item2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["target_dir"] = _wc.svn_wc_external_item2_t_target_dir_set
+    __swig_getmethods__["target_dir"] = _wc.svn_wc_external_item2_t_target_dir_get
+    __swig_setmethods__["url"] = _wc.svn_wc_external_item2_t_url_set
+    __swig_getmethods__["url"] = _wc.svn_wc_external_item2_t_url_get
+    __swig_setmethods__["revision"] = _wc.svn_wc_external_item2_t_revision_set
+    __swig_getmethods__["revision"] = _wc.svn_wc_external_item2_t_revision_get
+    __swig_setmethods__["peg_revision"] = _wc.svn_wc_external_item2_t_peg_revision_set
+    __swig_getmethods__["peg_revision"] = _wc.svn_wc_external_item2_t_peg_revision_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_external_item2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_external_item2_t"""
+        _swig_setattr(self, svn_wc_external_item2_t, 'this', apply(_wc.new_svn_wc_external_item2_t, args))
+        _swig_setattr(self, svn_wc_external_item2_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_external_item2_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_external_item2_tPtr(svn_wc_external_item2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_external_item2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_external_item2_t, 'thisown', 0)
+        self.__class__ = svn_wc_external_item2_t
+_wc.svn_wc_external_item2_t_swigregister(svn_wc_external_item2_tPtr)
+
+
+def svn_wc_external_item_create(*args):
+    """svn_wc_external_item_create(svn_wc_external_item2_t item, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_external_item_create, args)
+
+def svn_wc_external_item2_dup(*args):
+    """svn_wc_external_item2_dup(svn_wc_external_item2_t item, apr_pool_t pool) -> svn_wc_external_item2_t"""
+    return apply(_wc.svn_wc_external_item2_dup, args)
+class svn_wc_external_item_t:
+    """Proxy of C svn_wc_external_item_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_external_item_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_external_item_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_external_item_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["target_dir"] = _wc.svn_wc_external_item_t_target_dir_set
+    __swig_getmethods__["target_dir"] = _wc.svn_wc_external_item_t_target_dir_get
+    __swig_setmethods__["url"] = _wc.svn_wc_external_item_t_url_set
+    __swig_getmethods__["url"] = _wc.svn_wc_external_item_t_url_get
+    __swig_setmethods__["revision"] = _wc.svn_wc_external_item_t_revision_set
+    __swig_getmethods__["revision"] = _wc.svn_wc_external_item_t_revision_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_external_item_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_external_item_t"""
+        _swig_setattr(self, svn_wc_external_item_t, 'this', apply(_wc.new_svn_wc_external_item_t, args))
+        _swig_setattr(self, svn_wc_external_item_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_external_item_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_external_item_tPtr(svn_wc_external_item_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_external_item_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_external_item_t, 'thisown', 0)
+        self.__class__ = svn_wc_external_item_t
+_wc.svn_wc_external_item_t_swigregister(svn_wc_external_item_tPtr)
+
+
+def svn_wc_external_item_dup(*args):
+    """svn_wc_external_item_dup(svn_wc_external_item_t item, apr_pool_t pool) -> svn_wc_external_item_t"""
+    return apply(_wc.svn_wc_external_item_dup, args)
+
+def svn_wc_parse_externals_description3(*args):
+    """
+    svn_wc_parse_externals_description3(apr_array_header_t externals_p, char parent_directory, 
+        char desc, svn_boolean_t canonicalize_url, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_parse_externals_description3, args)
+
+def svn_wc_parse_externals_description2(*args):
+    """
+    svn_wc_parse_externals_description2(apr_array_header_t externals_p, char parent_directory, 
+        char desc, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_parse_externals_description2, args)
+
+def svn_wc_parse_externals_description(*args):
+    """
+    svn_wc_parse_externals_description(apr_hash_t externals_p, char parent_directory, char desc, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_parse_externals_description, args)
+svn_wc_notify_add = _wc.svn_wc_notify_add
+svn_wc_notify_copy = _wc.svn_wc_notify_copy
+svn_wc_notify_delete = _wc.svn_wc_notify_delete
+svn_wc_notify_restore = _wc.svn_wc_notify_restore
+svn_wc_notify_revert = _wc.svn_wc_notify_revert
+svn_wc_notify_failed_revert = _wc.svn_wc_notify_failed_revert
+svn_wc_notify_resolved = _wc.svn_wc_notify_resolved
+svn_wc_notify_skip = _wc.svn_wc_notify_skip
+svn_wc_notify_update_delete = _wc.svn_wc_notify_update_delete
+svn_wc_notify_update_add = _wc.svn_wc_notify_update_add
+svn_wc_notify_update_update = _wc.svn_wc_notify_update_update
+svn_wc_notify_update_completed = _wc.svn_wc_notify_update_completed
+svn_wc_notify_update_external = _wc.svn_wc_notify_update_external
+svn_wc_notify_status_completed = _wc.svn_wc_notify_status_completed
+svn_wc_notify_status_external = _wc.svn_wc_notify_status_external
+svn_wc_notify_commit_modified = _wc.svn_wc_notify_commit_modified
+svn_wc_notify_commit_added = _wc.svn_wc_notify_commit_added
+svn_wc_notify_commit_deleted = _wc.svn_wc_notify_commit_deleted
+svn_wc_notify_commit_replaced = _wc.svn_wc_notify_commit_replaced
+svn_wc_notify_commit_postfix_txdelta = _wc.svn_wc_notify_commit_postfix_txdelta
+svn_wc_notify_blame_revision = _wc.svn_wc_notify_blame_revision
+svn_wc_notify_locked = _wc.svn_wc_notify_locked
+svn_wc_notify_unlocked = _wc.svn_wc_notify_unlocked
+svn_wc_notify_failed_lock = _wc.svn_wc_notify_failed_lock
+svn_wc_notify_failed_unlock = _wc.svn_wc_notify_failed_unlock
+svn_wc_notify_exists = _wc.svn_wc_notify_exists
+svn_wc_notify_changelist_set = _wc.svn_wc_notify_changelist_set
+svn_wc_notify_changelist_clear = _wc.svn_wc_notify_changelist_clear
+svn_wc_notify_changelist_moved = _wc.svn_wc_notify_changelist_moved
+svn_wc_notify_merge_begin = _wc.svn_wc_notify_merge_begin
+svn_wc_notify_foreign_merge_begin = _wc.svn_wc_notify_foreign_merge_begin
+svn_wc_notify_update_replace = _wc.svn_wc_notify_update_replace
+svn_wc_notify_state_inapplicable = _wc.svn_wc_notify_state_inapplicable
+svn_wc_notify_state_unknown = _wc.svn_wc_notify_state_unknown
+svn_wc_notify_state_unchanged = _wc.svn_wc_notify_state_unchanged
+svn_wc_notify_state_missing = _wc.svn_wc_notify_state_missing
+svn_wc_notify_state_obstructed = _wc.svn_wc_notify_state_obstructed
+svn_wc_notify_state_changed = _wc.svn_wc_notify_state_changed
+svn_wc_notify_state_merged = _wc.svn_wc_notify_state_merged
+svn_wc_notify_state_conflicted = _wc.svn_wc_notify_state_conflicted
+svn_wc_notify_lock_state_inapplicable = _wc.svn_wc_notify_lock_state_inapplicable
+svn_wc_notify_lock_state_unknown = _wc.svn_wc_notify_lock_state_unknown
+svn_wc_notify_lock_state_unchanged = _wc.svn_wc_notify_lock_state_unchanged
+svn_wc_notify_lock_state_locked = _wc.svn_wc_notify_lock_state_locked
+svn_wc_notify_lock_state_unlocked = _wc.svn_wc_notify_lock_state_unlocked
+class svn_wc_notify_t:
+    """Proxy of C svn_wc_notify_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_notify_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_notify_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_notify_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["path"] = _wc.svn_wc_notify_t_path_set
+    __swig_getmethods__["path"] = _wc.svn_wc_notify_t_path_get
+    __swig_setmethods__["action"] = _wc.svn_wc_notify_t_action_set
+    __swig_getmethods__["action"] = _wc.svn_wc_notify_t_action_get
+    __swig_setmethods__["kind"] = _wc.svn_wc_notify_t_kind_set
+    __swig_getmethods__["kind"] = _wc.svn_wc_notify_t_kind_get
+    __swig_setmethods__["mime_type"] = _wc.svn_wc_notify_t_mime_type_set
+    __swig_getmethods__["mime_type"] = _wc.svn_wc_notify_t_mime_type_get
+    __swig_setmethods__["lock"] = _wc.svn_wc_notify_t_lock_set
+    __swig_getmethods__["lock"] = _wc.svn_wc_notify_t_lock_get
+    __swig_setmethods__["err"] = _wc.svn_wc_notify_t_err_set
+    __swig_getmethods__["err"] = _wc.svn_wc_notify_t_err_get
+    __swig_setmethods__["content_state"] = _wc.svn_wc_notify_t_content_state_set
+    __swig_getmethods__["content_state"] = _wc.svn_wc_notify_t_content_state_get
+    __swig_setmethods__["prop_state"] = _wc.svn_wc_notify_t_prop_state_set
+    __swig_getmethods__["prop_state"] = _wc.svn_wc_notify_t_prop_state_get
+    __swig_setmethods__["lock_state"] = _wc.svn_wc_notify_t_lock_state_set
+    __swig_getmethods__["lock_state"] = _wc.svn_wc_notify_t_lock_state_get
+    __swig_setmethods__["revision"] = _wc.svn_wc_notify_t_revision_set
+    __swig_getmethods__["revision"] = _wc.svn_wc_notify_t_revision_get
+    __swig_setmethods__["changelist_name"] = _wc.svn_wc_notify_t_changelist_name_set
+    __swig_getmethods__["changelist_name"] = _wc.svn_wc_notify_t_changelist_name_get
+    __swig_setmethods__["merge_range"] = _wc.svn_wc_notify_t_merge_range_set
+    __swig_getmethods__["merge_range"] = _wc.svn_wc_notify_t_merge_range_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_notify_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_notify_t"""
+        _swig_setattr(self, svn_wc_notify_t, 'this', apply(_wc.new_svn_wc_notify_t, args))
+        _swig_setattr(self, svn_wc_notify_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_notify_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_notify_tPtr(svn_wc_notify_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_notify_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_notify_t, 'thisown', 0)
+        self.__class__ = svn_wc_notify_t
+_wc.svn_wc_notify_t_swigregister(svn_wc_notify_tPtr)
+
+
+def svn_wc_create_notify(*args):
+    """svn_wc_create_notify(char path, svn_wc_notify_action_t action, apr_pool_t pool) -> svn_wc_notify_t"""
+    return apply(_wc.svn_wc_create_notify, args)
+
+def svn_wc_dup_notify(*args):
+    """svn_wc_dup_notify(svn_wc_notify_t notify, apr_pool_t pool) -> svn_wc_notify_t"""
+    return apply(_wc.svn_wc_dup_notify, args)
+svn_wc_conflict_action_edit = _wc.svn_wc_conflict_action_edit
+svn_wc_conflict_action_add = _wc.svn_wc_conflict_action_add
+svn_wc_conflict_action_delete = _wc.svn_wc_conflict_action_delete
+svn_wc_conflict_reason_edited = _wc.svn_wc_conflict_reason_edited
+svn_wc_conflict_reason_obstructed = _wc.svn_wc_conflict_reason_obstructed
+svn_wc_conflict_reason_deleted = _wc.svn_wc_conflict_reason_deleted
+svn_wc_conflict_reason_missing = _wc.svn_wc_conflict_reason_missing
+svn_wc_conflict_reason_unversioned = _wc.svn_wc_conflict_reason_unversioned
+svn_wc_conflict_kind_text = _wc.svn_wc_conflict_kind_text
+svn_wc_conflict_kind_property = _wc.svn_wc_conflict_kind_property
+class svn_wc_conflict_description_t:
+    """Proxy of C svn_wc_conflict_description_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_description_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_description_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_conflict_description_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["path"] = _wc.svn_wc_conflict_description_t_path_set
+    __swig_getmethods__["path"] = _wc.svn_wc_conflict_description_t_path_get
+    __swig_setmethods__["node_kind"] = _wc.svn_wc_conflict_description_t_node_kind_set
+    __swig_getmethods__["node_kind"] = _wc.svn_wc_conflict_description_t_node_kind_get
+    __swig_setmethods__["kind"] = _wc.svn_wc_conflict_description_t_kind_set
+    __swig_getmethods__["kind"] = _wc.svn_wc_conflict_description_t_kind_get
+    __swig_setmethods__["property_name"] = _wc.svn_wc_conflict_description_t_property_name_set
+    __swig_getmethods__["property_name"] = _wc.svn_wc_conflict_description_t_property_name_get
+    __swig_setmethods__["is_binary"] = _wc.svn_wc_conflict_description_t_is_binary_set
+    __swig_getmethods__["is_binary"] = _wc.svn_wc_conflict_description_t_is_binary_get
+    __swig_setmethods__["mime_type"] = _wc.svn_wc_conflict_description_t_mime_type_set
+    __swig_getmethods__["mime_type"] = _wc.svn_wc_conflict_description_t_mime_type_get
+    __swig_setmethods__["access"] = _wc.svn_wc_conflict_description_t_access_set
+    __swig_getmethods__["access"] = _wc.svn_wc_conflict_description_t_access_get
+    __swig_setmethods__["action"] = _wc.svn_wc_conflict_description_t_action_set
+    __swig_getmethods__["action"] = _wc.svn_wc_conflict_description_t_action_get
+    __swig_setmethods__["reason"] = _wc.svn_wc_conflict_description_t_reason_set
+    __swig_getmethods__["reason"] = _wc.svn_wc_conflict_description_t_reason_get
+    __swig_setmethods__["base_file"] = _wc.svn_wc_conflict_description_t_base_file_set
+    __swig_getmethods__["base_file"] = _wc.svn_wc_conflict_description_t_base_file_get
+    __swig_setmethods__["their_file"] = _wc.svn_wc_conflict_description_t_their_file_set
+    __swig_getmethods__["their_file"] = _wc.svn_wc_conflict_description_t_their_file_get
+    __swig_setmethods__["my_file"] = _wc.svn_wc_conflict_description_t_my_file_set
+    __swig_getmethods__["my_file"] = _wc.svn_wc_conflict_description_t_my_file_get
+    __swig_setmethods__["merged_file"] = _wc.svn_wc_conflict_description_t_merged_file_set
+    __swig_getmethods__["merged_file"] = _wc.svn_wc_conflict_description_t_merged_file_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_conflict_description_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_conflict_description_t"""
+        _swig_setattr(self, svn_wc_conflict_description_t, 'this', apply(_wc.new_svn_wc_conflict_description_t, args))
+        _swig_setattr(self, svn_wc_conflict_description_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_conflict_description_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_conflict_description_tPtr(svn_wc_conflict_description_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_conflict_description_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_conflict_description_t, 'thisown', 0)
+        self.__class__ = svn_wc_conflict_description_t
+_wc.svn_wc_conflict_description_t_swigregister(svn_wc_conflict_description_tPtr)
+
+svn_wc_conflict_choose_postpone = _wc.svn_wc_conflict_choose_postpone
+svn_wc_conflict_choose_base = _wc.svn_wc_conflict_choose_base
+svn_wc_conflict_choose_theirs_full = _wc.svn_wc_conflict_choose_theirs_full
+svn_wc_conflict_choose_mine_full = _wc.svn_wc_conflict_choose_mine_full
+svn_wc_conflict_choose_theirs_conflict = _wc.svn_wc_conflict_choose_theirs_conflict
+svn_wc_conflict_choose_mine_conflict = _wc.svn_wc_conflict_choose_mine_conflict
+svn_wc_conflict_choose_merged = _wc.svn_wc_conflict_choose_merged
+class svn_wc_conflict_result_t:
+    """Proxy of C svn_wc_conflict_result_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_result_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_result_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_conflict_result_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["choice"] = _wc.svn_wc_conflict_result_t_choice_set
+    __swig_getmethods__["choice"] = _wc.svn_wc_conflict_result_t_choice_get
+    __swig_setmethods__["merged_file"] = _wc.svn_wc_conflict_result_t_merged_file_set
+    __swig_getmethods__["merged_file"] = _wc.svn_wc_conflict_result_t_merged_file_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_conflict_result_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_conflict_result_t"""
+        _swig_setattr(self, svn_wc_conflict_result_t, 'this', apply(_wc.new_svn_wc_conflict_result_t, args))
+        _swig_setattr(self, svn_wc_conflict_result_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_conflict_result_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_conflict_result_tPtr(svn_wc_conflict_result_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_conflict_result_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_conflict_result_t, 'thisown', 0)
+        self.__class__ = svn_wc_conflict_result_t
+_wc.svn_wc_conflict_result_t_swigregister(svn_wc_conflict_result_tPtr)
+
+
+def svn_wc_create_conflict_result(*args):
+    """
+    svn_wc_create_conflict_result(svn_wc_conflict_choice_t choice, char merged_file, 
+        apr_pool_t pool) -> svn_wc_conflict_result_t
+    """
+    return apply(_wc.svn_wc_create_conflict_result, args)
+class svn_wc_diff_callbacks2_t:
+    """Proxy of C svn_wc_diff_callbacks2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_diff_callbacks2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_diff_callbacks2_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_diff_callbacks2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["file_changed"] = _wc.svn_wc_diff_callbacks2_t_file_changed_set
+    __swig_getmethods__["file_changed"] = _wc.svn_wc_diff_callbacks2_t_file_changed_get
+    __swig_setmethods__["file_added"] = _wc.svn_wc_diff_callbacks2_t_file_added_set
+    __swig_getmethods__["file_added"] = _wc.svn_wc_diff_callbacks2_t_file_added_get
+    __swig_setmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks2_t_file_deleted_set
+    __swig_getmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks2_t_file_deleted_get
+    __swig_setmethods__["dir_added"] = _wc.svn_wc_diff_callbacks2_t_dir_added_set
+    __swig_getmethods__["dir_added"] = _wc.svn_wc_diff_callbacks2_t_dir_added_get
+    __swig_setmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks2_t_dir_deleted_set
+    __swig_getmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks2_t_dir_deleted_get
+    __swig_setmethods__["dir_props_changed"] = _wc.svn_wc_diff_callbacks2_t_dir_props_changed_set
+    __swig_getmethods__["dir_props_changed"] = _wc.svn_wc_diff_callbacks2_t_dir_props_changed_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_diff_callbacks2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def file_changed(self, *args):
+      return svn_wc_diff_callbacks2_invoke_file_changed(self, *args)
+
+    def file_added(self, *args):
+      return svn_wc_diff_callbacks2_invoke_file_added(self, *args)
+
+    def file_deleted(self, *args):
+      return svn_wc_diff_callbacks2_invoke_file_deleted(self, *args)
+
+    def dir_added(self, *args):
+      return svn_wc_diff_callbacks2_invoke_dir_added(self, *args)
+
+    def dir_deleted(self, *args):
+      return svn_wc_diff_callbacks2_invoke_dir_deleted(self, *args)
+
+    def dir_props_changed(self, *args):
+      return svn_wc_diff_callbacks2_invoke_dir_props_changed(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_diff_callbacks2_t"""
+        _swig_setattr(self, svn_wc_diff_callbacks2_t, 'this', apply(_wc.new_svn_wc_diff_callbacks2_t, args))
+        _swig_setattr(self, svn_wc_diff_callbacks2_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_diff_callbacks2_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_diff_callbacks2_tPtr(svn_wc_diff_callbacks2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_diff_callbacks2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_diff_callbacks2_t, 'thisown', 0)
+        self.__class__ = svn_wc_diff_callbacks2_t
+_wc.svn_wc_diff_callbacks2_t_swigregister(svn_wc_diff_callbacks2_tPtr)
+
+class svn_wc_diff_callbacks_t:
+    """Proxy of C svn_wc_diff_callbacks_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_diff_callbacks_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_diff_callbacks_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_diff_callbacks_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["file_changed"] = _wc.svn_wc_diff_callbacks_t_file_changed_set
+    __swig_getmethods__["file_changed"] = _wc.svn_wc_diff_callbacks_t_file_changed_get
+    __swig_setmethods__["file_added"] = _wc.svn_wc_diff_callbacks_t_file_added_set
+    __swig_getmethods__["file_added"] = _wc.svn_wc_diff_callbacks_t_file_added_get
+    __swig_setmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks_t_file_deleted_set
+    __swig_getmethods__["file_deleted"] = _wc.svn_wc_diff_callbacks_t_file_deleted_get
+    __swig_setmethods__["dir_added"] = _wc.svn_wc_diff_callbacks_t_dir_added_set
+    __swig_getmethods__["dir_added"] = _wc.svn_wc_diff_callbacks_t_dir_added_get
+    __swig_setmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks_t_dir_deleted_set
+    __swig_getmethods__["dir_deleted"] = _wc.svn_wc_diff_callbacks_t_dir_deleted_get
+    __swig_setmethods__["props_changed"] = _wc.svn_wc_diff_callbacks_t_props_changed_set
+    __swig_getmethods__["props_changed"] = _wc.svn_wc_diff_callbacks_t_props_changed_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_diff_callbacks_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def file_changed(self, *args):
+      return svn_wc_diff_callbacks_invoke_file_changed(self, *args)
+
+    def file_added(self, *args):
+      return svn_wc_diff_callbacks_invoke_file_added(self, *args)
+
+    def file_deleted(self, *args):
+      return svn_wc_diff_callbacks_invoke_file_deleted(self, *args)
+
+    def dir_added(self, *args):
+      return svn_wc_diff_callbacks_invoke_dir_added(self, *args)
+
+    def dir_deleted(self, *args):
+      return svn_wc_diff_callbacks_invoke_dir_deleted(self, *args)
+
+    def props_changed(self, *args):
+      return svn_wc_diff_callbacks_invoke_props_changed(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_diff_callbacks_t"""
+        _swig_setattr(self, svn_wc_diff_callbacks_t, 'this', apply(_wc.new_svn_wc_diff_callbacks_t, args))
+        _swig_setattr(self, svn_wc_diff_callbacks_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_diff_callbacks_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_diff_callbacks_tPtr(svn_wc_diff_callbacks_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_diff_callbacks_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_diff_callbacks_t, 'thisown', 0)
+        self.__class__ = svn_wc_diff_callbacks_t
+_wc.svn_wc_diff_callbacks_t_swigregister(svn_wc_diff_callbacks_tPtr)
+
+
+def svn_wc_check_wc(*args):
+    """svn_wc_check_wc(char path, int wc_format, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_check_wc, args)
+
+def svn_wc_has_binary_prop(*args):
+    """
+    svn_wc_has_binary_prop(svn_boolean_t has_binary_prop, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_has_binary_prop, args)
+
+def svn_wc_text_modified_p(*args):
+    """
+    svn_wc_text_modified_p(svn_boolean_t modified_p, char filename, svn_boolean_t force_comparison, 
+        svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_text_modified_p, args)
+
+def svn_wc_props_modified_p(*args):
+    """
+    svn_wc_props_modified_p(svn_boolean_t modified_p, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_props_modified_p, args)
+SVN_WC_ADM_DIR_NAME = _wc.SVN_WC_ADM_DIR_NAME
+svn_wc_schedule_normal = _wc.svn_wc_schedule_normal
+svn_wc_schedule_add = _wc.svn_wc_schedule_add
+svn_wc_schedule_delete = _wc.svn_wc_schedule_delete
+svn_wc_schedule_replace = _wc.svn_wc_schedule_replace
+SVN_WC_ENTRY_WORKING_SIZE_UNKNOWN = _wc.SVN_WC_ENTRY_WORKING_SIZE_UNKNOWN
+class svn_wc_entry_t:
+    """Proxy of C svn_wc_entry_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_entry_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_entry_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_entry_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["name"] = _wc.svn_wc_entry_t_name_set
+    __swig_getmethods__["name"] = _wc.svn_wc_entry_t_name_get
+    __swig_setmethods__["revision"] = _wc.svn_wc_entry_t_revision_set
+    __swig_getmethods__["revision"] = _wc.svn_wc_entry_t_revision_get
+    __swig_setmethods__["url"] = _wc.svn_wc_entry_t_url_set
+    __swig_getmethods__["url"] = _wc.svn_wc_entry_t_url_get
+    __swig_setmethods__["repos"] = _wc.svn_wc_entry_t_repos_set
+    __swig_getmethods__["repos"] = _wc.svn_wc_entry_t_repos_get
+    __swig_setmethods__["uuid"] = _wc.svn_wc_entry_t_uuid_set
+    __swig_getmethods__["uuid"] = _wc.svn_wc_entry_t_uuid_get
+    __swig_setmethods__["kind"] = _wc.svn_wc_entry_t_kind_set
+    __swig_getmethods__["kind"] = _wc.svn_wc_entry_t_kind_get
+    __swig_setmethods__["schedule"] = _wc.svn_wc_entry_t_schedule_set
+    __swig_getmethods__["schedule"] = _wc.svn_wc_entry_t_schedule_get
+    __swig_setmethods__["copied"] = _wc.svn_wc_entry_t_copied_set
+    __swig_getmethods__["copied"] = _wc.svn_wc_entry_t_copied_get
+    __swig_setmethods__["deleted"] = _wc.svn_wc_entry_t_deleted_set
+    __swig_getmethods__["deleted"] = _wc.svn_wc_entry_t_deleted_get
+    __swig_setmethods__["absent"] = _wc.svn_wc_entry_t_absent_set
+    __swig_getmethods__["absent"] = _wc.svn_wc_entry_t_absent_get
+    __swig_setmethods__["incomplete"] = _wc.svn_wc_entry_t_incomplete_set
+    __swig_getmethods__["incomplete"] = _wc.svn_wc_entry_t_incomplete_get
+    __swig_setmethods__["copyfrom_url"] = _wc.svn_wc_entry_t_copyfrom_url_set
+    __swig_getmethods__["copyfrom_url"] = _wc.svn_wc_entry_t_copyfrom_url_get
+    __swig_setmethods__["copyfrom_rev"] = _wc.svn_wc_entry_t_copyfrom_rev_set
+    __swig_getmethods__["copyfrom_rev"] = _wc.svn_wc_entry_t_copyfrom_rev_get
+    __swig_setmethods__["conflict_old"] = _wc.svn_wc_entry_t_conflict_old_set
+    __swig_getmethods__["conflict_old"] = _wc.svn_wc_entry_t_conflict_old_get
+    __swig_setmethods__["conflict_new"] = _wc.svn_wc_entry_t_conflict_new_set
+    __swig_getmethods__["conflict_new"] = _wc.svn_wc_entry_t_conflict_new_get
+    __swig_setmethods__["conflict_wrk"] = _wc.svn_wc_entry_t_conflict_wrk_set
+    __swig_getmethods__["conflict_wrk"] = _wc.svn_wc_entry_t_conflict_wrk_get
+    __swig_setmethods__["prejfile"] = _wc.svn_wc_entry_t_prejfile_set
+    __swig_getmethods__["prejfile"] = _wc.svn_wc_entry_t_prejfile_get
+    __swig_setmethods__["text_time"] = _wc.svn_wc_entry_t_text_time_set
+    __swig_getmethods__["text_time"] = _wc.svn_wc_entry_t_text_time_get
+    __swig_setmethods__["prop_time"] = _wc.svn_wc_entry_t_prop_time_set
+    __swig_getmethods__["prop_time"] = _wc.svn_wc_entry_t_prop_time_get
+    __swig_setmethods__["checksum"] = _wc.svn_wc_entry_t_checksum_set
+    __swig_getmethods__["checksum"] = _wc.svn_wc_entry_t_checksum_get
+    __swig_setmethods__["cmt_rev"] = _wc.svn_wc_entry_t_cmt_rev_set
+    __swig_getmethods__["cmt_rev"] = _wc.svn_wc_entry_t_cmt_rev_get
+    __swig_setmethods__["cmt_date"] = _wc.svn_wc_entry_t_cmt_date_set
+    __swig_getmethods__["cmt_date"] = _wc.svn_wc_entry_t_cmt_date_get
+    __swig_setmethods__["cmt_author"] = _wc.svn_wc_entry_t_cmt_author_set
+    __swig_getmethods__["cmt_author"] = _wc.svn_wc_entry_t_cmt_author_get
+    __swig_setmethods__["lock_token"] = _wc.svn_wc_entry_t_lock_token_set
+    __swig_getmethods__["lock_token"] = _wc.svn_wc_entry_t_lock_token_get
+    __swig_setmethods__["lock_owner"] = _wc.svn_wc_entry_t_lock_owner_set
+    __swig_getmethods__["lock_owner"] = _wc.svn_wc_entry_t_lock_owner_get
+    __swig_setmethods__["lock_comment"] = _wc.svn_wc_entry_t_lock_comment_set
+    __swig_getmethods__["lock_comment"] = _wc.svn_wc_entry_t_lock_comment_get
+    __swig_setmethods__["lock_creation_date"] = _wc.svn_wc_entry_t_lock_creation_date_set
+    __swig_getmethods__["lock_creation_date"] = _wc.svn_wc_entry_t_lock_creation_date_get
+    __swig_setmethods__["has_props"] = _wc.svn_wc_entry_t_has_props_set
+    __swig_getmethods__["has_props"] = _wc.svn_wc_entry_t_has_props_get
+    __swig_setmethods__["has_prop_mods"] = _wc.svn_wc_entry_t_has_prop_mods_set
+    __swig_getmethods__["has_prop_mods"] = _wc.svn_wc_entry_t_has_prop_mods_get
+    __swig_setmethods__["cachable_props"] = _wc.svn_wc_entry_t_cachable_props_set
+    __swig_getmethods__["cachable_props"] = _wc.svn_wc_entry_t_cachable_props_get
+    __swig_setmethods__["present_props"] = _wc.svn_wc_entry_t_present_props_set
+    __swig_getmethods__["present_props"] = _wc.svn_wc_entry_t_present_props_get
+    __swig_setmethods__["changelist"] = _wc.svn_wc_entry_t_changelist_set
+    __swig_getmethods__["changelist"] = _wc.svn_wc_entry_t_changelist_get
+    __swig_setmethods__["working_size"] = _wc.svn_wc_entry_t_working_size_set
+    __swig_getmethods__["working_size"] = _wc.svn_wc_entry_t_working_size_get
+    __swig_setmethods__["keep_local"] = _wc.svn_wc_entry_t_keep_local_set
+    __swig_getmethods__["keep_local"] = _wc.svn_wc_entry_t_keep_local_get
+    __swig_setmethods__["depth"] = _wc.svn_wc_entry_t_depth_set
+    __swig_getmethods__["depth"] = _wc.svn_wc_entry_t_depth_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_entry_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_entry_t"""
+        _swig_setattr(self, svn_wc_entry_t, 'this', apply(_wc.new_svn_wc_entry_t, args))
+        _swig_setattr(self, svn_wc_entry_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_entry_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_entry_tPtr(svn_wc_entry_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_entry_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_entry_t, 'thisown', 0)
+        self.__class__ = svn_wc_entry_t
+_wc.svn_wc_entry_t_swigregister(svn_wc_entry_tPtr)
+
+SVN_WC_ENTRY_THIS_DIR = _wc.SVN_WC_ENTRY_THIS_DIR
+
+def svn_wc_entry(*args):
+    """
+    svn_wc_entry(svn_wc_entry_t entry, char path, svn_wc_adm_access_t adm_access, 
+        svn_boolean_t show_hidden, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_entry, args)
+
+def svn_wc_entries_read(*args):
+    """
+    svn_wc_entries_read(apr_hash_t entries, svn_wc_adm_access_t adm_access, 
+        svn_boolean_t show_hidden, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_entries_read, args)
+
+def svn_wc_entry_dup(*args):
+    """svn_wc_entry_dup(svn_wc_entry_t entry, apr_pool_t pool) -> svn_wc_entry_t"""
+    return apply(_wc.svn_wc_entry_dup, args)
+
+def svn_wc_conflicted_p(*args):
+    """
+    svn_wc_conflicted_p(svn_boolean_t text_conflicted_p, svn_boolean_t prop_conflicted_p, 
+        char dir_path, svn_wc_entry_t entry, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_conflicted_p, args)
+
+def svn_wc_get_ancestry(*args):
+    """
+    svn_wc_get_ancestry(char url, svn_revnum_t rev, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_ancestry, args)
+class svn_wc_entry_callbacks2_t:
+    """Proxy of C svn_wc_entry_callbacks2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_entry_callbacks2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_entry_callbacks2_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_entry_callbacks2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["found_entry"] = _wc.svn_wc_entry_callbacks2_t_found_entry_set
+    __swig_getmethods__["found_entry"] = _wc.svn_wc_entry_callbacks2_t_found_entry_get
+    __swig_setmethods__["handle_error"] = _wc.svn_wc_entry_callbacks2_t_handle_error_set
+    __swig_getmethods__["handle_error"] = _wc.svn_wc_entry_callbacks2_t_handle_error_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_entry_callbacks2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def found_entry(self, *args):
+      return svn_wc_entry_callbacks2_invoke_found_entry(self, *args)
+
+    def handle_error(self, *args):
+      return svn_wc_entry_callbacks2_invoke_handle_error(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_entry_callbacks2_t"""
+        _swig_setattr(self, svn_wc_entry_callbacks2_t, 'this', apply(_wc.new_svn_wc_entry_callbacks2_t, args))
+        _swig_setattr(self, svn_wc_entry_callbacks2_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_entry_callbacks2_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_entry_callbacks2_tPtr(svn_wc_entry_callbacks2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_entry_callbacks2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_entry_callbacks2_t, 'thisown', 0)
+        self.__class__ = svn_wc_entry_callbacks2_t
+_wc.svn_wc_entry_callbacks2_t_swigregister(svn_wc_entry_callbacks2_tPtr)
+
+class svn_wc_entry_callbacks_t:
+    """Proxy of C svn_wc_entry_callbacks_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_entry_callbacks_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_entry_callbacks_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_entry_callbacks_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["found_entry"] = _wc.svn_wc_entry_callbacks_t_found_entry_set
+    __swig_getmethods__["found_entry"] = _wc.svn_wc_entry_callbacks_t_found_entry_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_entry_callbacks_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def found_entry(self, *args):
+      return svn_wc_entry_callbacks_invoke_found_entry(self, *args)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_entry_callbacks_t"""
+        _swig_setattr(self, svn_wc_entry_callbacks_t, 'this', apply(_wc.new_svn_wc_entry_callbacks_t, args))
+        _swig_setattr(self, svn_wc_entry_callbacks_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_entry_callbacks_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_entry_callbacks_tPtr(svn_wc_entry_callbacks_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_entry_callbacks_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_entry_callbacks_t, 'thisown', 0)
+        self.__class__ = svn_wc_entry_callbacks_t
+_wc.svn_wc_entry_callbacks_t_swigregister(svn_wc_entry_callbacks_tPtr)
+
+
+def svn_wc_walk_entries3(*args):
+    """
+    svn_wc_walk_entries3(char path, svn_wc_adm_access_t adm_access, svn_wc_entry_callbacks2_t walk_callbacks, 
+        void walk_baton, 
+        svn_depth_t depth, svn_boolean_t show_hidden, 
+        svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_walk_entries3, args)
+
+def svn_wc_walk_entries2(*args):
+    """
+    svn_wc_walk_entries2(char path, svn_wc_adm_access_t adm_access, svn_wc_entry_callbacks_t walk_callbacks, 
+        void walk_baton, 
+        svn_boolean_t show_hidden, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_walk_entries2, args)
+
+def svn_wc_walk_entries(*args):
+    """
+    svn_wc_walk_entries(char path, svn_wc_adm_access_t adm_access, svn_wc_entry_callbacks_t walk_callbacks, 
+        void walk_baton, 
+        svn_boolean_t show_hidden, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_walk_entries, args)
+
+def svn_wc_mark_missing_deleted(*args):
+    """svn_wc_mark_missing_deleted(char path, svn_wc_adm_access_t parent, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_mark_missing_deleted, args)
+
+def svn_wc_ensure_adm3(*args):
+    """
+    svn_wc_ensure_adm3(char path, char uuid, char url, char repos, svn_revnum_t revision, 
+        svn_depth_t depth, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_ensure_adm3, args)
+
+def svn_wc_ensure_adm2(*args):
+    """
+    svn_wc_ensure_adm2(char path, char uuid, char url, char repos, svn_revnum_t revision, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_ensure_adm2, args)
+
+def svn_wc_ensure_adm(*args):
+    """
+    svn_wc_ensure_adm(char path, char uuid, char url, svn_revnum_t revision, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_ensure_adm, args)
+
+def svn_wc_maybe_set_repos_root(*args):
+    """
+    svn_wc_maybe_set_repos_root(svn_wc_adm_access_t adm_access, char path, char repos, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_maybe_set_repos_root, args)
+svn_wc_status_none = _wc.svn_wc_status_none
+svn_wc_status_unversioned = _wc.svn_wc_status_unversioned
+svn_wc_status_normal = _wc.svn_wc_status_normal
+svn_wc_status_added = _wc.svn_wc_status_added
+svn_wc_status_missing = _wc.svn_wc_status_missing
+svn_wc_status_deleted = _wc.svn_wc_status_deleted
+svn_wc_status_replaced = _wc.svn_wc_status_replaced
+svn_wc_status_modified = _wc.svn_wc_status_modified
+svn_wc_status_merged = _wc.svn_wc_status_merged
+svn_wc_status_conflicted = _wc.svn_wc_status_conflicted
+svn_wc_status_ignored = _wc.svn_wc_status_ignored
+svn_wc_status_obstructed = _wc.svn_wc_status_obstructed
+svn_wc_status_external = _wc.svn_wc_status_external
+svn_wc_status_incomplete = _wc.svn_wc_status_incomplete
+class svn_wc_status2_t:
+    """Proxy of C svn_wc_status2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status2_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_status2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["entry"] = _wc.svn_wc_status2_t_entry_set
+    __swig_getmethods__["entry"] = _wc.svn_wc_status2_t_entry_get
+    __swig_setmethods__["text_status"] = _wc.svn_wc_status2_t_text_status_set
+    __swig_getmethods__["text_status"] = _wc.svn_wc_status2_t_text_status_get
+    __swig_setmethods__["prop_status"] = _wc.svn_wc_status2_t_prop_status_set
+    __swig_getmethods__["prop_status"] = _wc.svn_wc_status2_t_prop_status_get
+    __swig_setmethods__["locked"] = _wc.svn_wc_status2_t_locked_set
+    __swig_getmethods__["locked"] = _wc.svn_wc_status2_t_locked_get
+    __swig_setmethods__["copied"] = _wc.svn_wc_status2_t_copied_set
+    __swig_getmethods__["copied"] = _wc.svn_wc_status2_t_copied_get
+    __swig_setmethods__["switched"] = _wc.svn_wc_status2_t_switched_set
+    __swig_getmethods__["switched"] = _wc.svn_wc_status2_t_switched_get
+    __swig_setmethods__["repos_text_status"] = _wc.svn_wc_status2_t_repos_text_status_set
+    __swig_getmethods__["repos_text_status"] = _wc.svn_wc_status2_t_repos_text_status_get
+    __swig_setmethods__["repos_prop_status"] = _wc.svn_wc_status2_t_repos_prop_status_set
+    __swig_getmethods__["repos_prop_status"] = _wc.svn_wc_status2_t_repos_prop_status_get
+    __swig_setmethods__["repos_lock"] = _wc.svn_wc_status2_t_repos_lock_set
+    __swig_getmethods__["repos_lock"] = _wc.svn_wc_status2_t_repos_lock_get
+    __swig_setmethods__["url"] = _wc.svn_wc_status2_t_url_set
+    __swig_getmethods__["url"] = _wc.svn_wc_status2_t_url_get
+    __swig_setmethods__["ood_last_cmt_rev"] = _wc.svn_wc_status2_t_ood_last_cmt_rev_set
+    __swig_getmethods__["ood_last_cmt_rev"] = _wc.svn_wc_status2_t_ood_last_cmt_rev_get
+    __swig_setmethods__["ood_last_cmt_date"] = _wc.svn_wc_status2_t_ood_last_cmt_date_set
+    __swig_getmethods__["ood_last_cmt_date"] = _wc.svn_wc_status2_t_ood_last_cmt_date_get
+    __swig_setmethods__["ood_kind"] = _wc.svn_wc_status2_t_ood_kind_set
+    __swig_getmethods__["ood_kind"] = _wc.svn_wc_status2_t_ood_kind_get
+    __swig_setmethods__["ood_last_cmt_author"] = _wc.svn_wc_status2_t_ood_last_cmt_author_set
+    __swig_getmethods__["ood_last_cmt_author"] = _wc.svn_wc_status2_t_ood_last_cmt_author_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_status2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_status2_t"""
+        _swig_setattr(self, svn_wc_status2_t, 'this', apply(_wc.new_svn_wc_status2_t, args))
+        _swig_setattr(self, svn_wc_status2_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_status2_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_status2_tPtr(svn_wc_status2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_status2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_status2_t, 'thisown', 0)
+        self.__class__ = svn_wc_status2_t
+_wc.svn_wc_status2_t_swigregister(svn_wc_status2_tPtr)
+
+class svn_wc_status_t:
+    """Proxy of C svn_wc_status_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_status_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["entry"] = _wc.svn_wc_status_t_entry_set
+    __swig_getmethods__["entry"] = _wc.svn_wc_status_t_entry_get
+    __swig_setmethods__["text_status"] = _wc.svn_wc_status_t_text_status_set
+    __swig_getmethods__["text_status"] = _wc.svn_wc_status_t_text_status_get
+    __swig_setmethods__["prop_status"] = _wc.svn_wc_status_t_prop_status_set
+    __swig_getmethods__["prop_status"] = _wc.svn_wc_status_t_prop_status_get
+    __swig_setmethods__["locked"] = _wc.svn_wc_status_t_locked_set
+    __swig_getmethods__["locked"] = _wc.svn_wc_status_t_locked_get
+    __swig_setmethods__["copied"] = _wc.svn_wc_status_t_copied_set
+    __swig_getmethods__["copied"] = _wc.svn_wc_status_t_copied_get
+    __swig_setmethods__["switched"] = _wc.svn_wc_status_t_switched_set
+    __swig_getmethods__["switched"] = _wc.svn_wc_status_t_switched_get
+    __swig_setmethods__["repos_text_status"] = _wc.svn_wc_status_t_repos_text_status_set
+    __swig_getmethods__["repos_text_status"] = _wc.svn_wc_status_t_repos_text_status_get
+    __swig_setmethods__["repos_prop_status"] = _wc.svn_wc_status_t_repos_prop_status_set
+    __swig_getmethods__["repos_prop_status"] = _wc.svn_wc_status_t_repos_prop_status_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_status_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_status_t"""
+        _swig_setattr(self, svn_wc_status_t, 'this', apply(_wc.new_svn_wc_status_t, args))
+        _swig_setattr(self, svn_wc_status_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_status_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_status_tPtr(svn_wc_status_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_status_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_status_t, 'thisown', 0)
+        self.__class__ = svn_wc_status_t
+_wc.svn_wc_status_t_swigregister(svn_wc_status_tPtr)
+
+
+def svn_wc_dup_status2(*args):
+    """svn_wc_dup_status2(svn_wc_status2_t orig_stat, apr_pool_t pool) -> svn_wc_status2_t"""
+    return apply(_wc.svn_wc_dup_status2, args)
+
+def svn_wc_dup_status(*args):
+    """svn_wc_dup_status(svn_wc_status_t orig_stat, apr_pool_t pool) -> svn_wc_status_t"""
+    return apply(_wc.svn_wc_dup_status, args)
+
+def svn_wc_status2(*args):
+    """
+    svn_wc_status2(svn_wc_status2_t status, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_status2, args)
+
+def svn_wc_status(*args):
+    """
+    svn_wc_status(svn_wc_status_t status, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_status, args)
+
+def svn_wc_get_status_editor3(*args):
+    """
+    svn_wc_get_status_editor3(svn_delta_editor_t editor, void edit_baton, void set_locks_baton, 
+        svn_revnum_t edit_revision, svn_wc_adm_access_t anchor, 
+        char target, svn_depth_t depth, 
+        svn_boolean_t get_all, svn_boolean_t no_ignore, 
+        apr_array_header_t ignore_patterns, 
+        svn_wc_status_func2_t status_func, svn_cancel_func_t cancel_func, 
+        svn_wc_traversal_info_t traversal_info, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_status_editor3, args)
+
+def svn_wc_get_status_editor2(*args):
+    """
+    svn_wc_get_status_editor2(svn_delta_editor_t editor, void edit_baton, void set_locks_baton, 
+        svn_revnum_t edit_revision, svn_wc_adm_access_t anchor, 
+        char target, apr_hash_t config, 
+        svn_boolean_t recurse, svn_boolean_t get_all, 
+        svn_boolean_t no_ignore, svn_wc_status_func2_t status_func, 
+        svn_cancel_func_t cancel_func, 
+        svn_wc_traversal_info_t traversal_info, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_status_editor2, args)
+
+def svn_wc_get_status_editor(*args):
+    """
+    svn_wc_get_status_editor(svn_delta_editor_t editor, void edit_baton, svn_revnum_t edit_revision, 
+        svn_wc_adm_access_t anchor, 
+        char target, apr_hash_t config, svn_boolean_t recurse, 
+        svn_boolean_t get_all, svn_boolean_t no_ignore, 
+        svn_wc_status_func_t status_func, 
+        svn_cancel_func_t cancel_func, svn_wc_traversal_info_t traversal_info, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_status_editor, args)
+
+def svn_wc_status_set_repos_locks(*args):
+    """
+    svn_wc_status_set_repos_locks(void set_locks_baton, apr_hash_t locks, char repos_root, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_status_set_repos_locks, args)
+
+def svn_wc_copy2(*args):
+    """
+    svn_wc_copy2(char src, svn_wc_adm_access_t dst_parent, char dst_basename, 
+        svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_copy2, args)
+
+def svn_wc_copy(*args):
+    """
+    svn_wc_copy(char src, svn_wc_adm_access_t dst_parent, char dst_basename, 
+        svn_cancel_func_t cancel_func, svn_wc_notify_func_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_copy, args)
+
+def svn_wc_delete3(*args):
+    """
+    svn_wc_delete3(char path, svn_wc_adm_access_t adm_access, svn_cancel_func_t cancel_func, 
+        svn_wc_notify_func2_t notify_func, 
+        svn_boolean_t keep_local, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_delete3, args)
+
+def svn_wc_delete2(*args):
+    """
+    svn_wc_delete2(char path, svn_wc_adm_access_t adm_access, svn_cancel_func_t cancel_func, 
+        svn_wc_notify_func2_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_delete2, args)
+
+def svn_wc_delete(*args):
+    """
+    svn_wc_delete(char path, svn_wc_adm_access_t adm_access, svn_cancel_func_t cancel_func, 
+        svn_wc_notify_func_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_delete, args)
+
+def svn_wc_add2(*args):
+    """
+    svn_wc_add2(char path, svn_wc_adm_access_t parent_access, char copyfrom_url, 
+        svn_revnum_t copyfrom_rev, svn_cancel_func_t cancel_func, 
+        svn_wc_notify_func2_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_add2, args)
+
+def svn_wc_add(*args):
+    """
+    svn_wc_add(char path, svn_wc_adm_access_t parent_access, char copyfrom_url, 
+        svn_revnum_t copyfrom_rev, svn_cancel_func_t cancel_func, 
+        svn_wc_notify_func_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_add, args)
+
+def svn_wc_add_repos_file2(*args):
+    """
+    svn_wc_add_repos_file2(char dst_path, svn_wc_adm_access_t adm_access, char new_text_base_path, 
+        char new_text_path, apr_hash_t new_base_props, 
+        apr_hash_t new_props, 
+        char copyfrom_url, svn_revnum_t copyfrom_rev, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_add_repos_file2, args)
+
+def svn_wc_add_repos_file(*args):
+    """
+    svn_wc_add_repos_file(char dst_path, svn_wc_adm_access_t adm_access, char new_text_path, 
+        apr_hash_t new_props, char copyfrom_url, 
+        svn_revnum_t copyfrom_rev, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_add_repos_file, args)
+
+def svn_wc_remove_from_revision_control(*args):
+    """
+    svn_wc_remove_from_revision_control(svn_wc_adm_access_t adm_access, char name, svn_boolean_t destroy_wf, 
+        svn_boolean_t instant_error, 
+        svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_remove_from_revision_control, args)
+
+def svn_wc_resolved_conflict3(*args):
+    """
+    svn_wc_resolved_conflict3(char path, svn_wc_adm_access_t adm_access, svn_boolean_t resolve_text, 
+        svn_boolean_t resolve_props, 
+        svn_depth_t depth, svn_wc_conflict_choice_t conflict_choice, 
+        svn_wc_notify_func2_t notify_func, 
+        svn_cancel_func_t cancel_func, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_resolved_conflict3, args)
+
+def svn_wc_resolved_conflict2(*args):
+    """
+    svn_wc_resolved_conflict2(char path, svn_wc_adm_access_t adm_access, svn_boolean_t resolve_text, 
+        svn_boolean_t resolve_props, 
+        svn_boolean_t recurse, svn_wc_notify_func2_t notify_func, 
+        svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_resolved_conflict2, args)
+
+def svn_wc_resolved_conflict(*args):
+    """
+    svn_wc_resolved_conflict(char path, svn_wc_adm_access_t adm_access, svn_boolean_t resolve_text, 
+        svn_boolean_t resolve_props, 
+        svn_boolean_t recurse, svn_wc_notify_func_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_resolved_conflict, args)
+
+def svn_wc_committed_queue_create(*args):
+    """svn_wc_committed_queue_create(apr_pool_t pool) -> svn_wc_committed_queue_t"""
+    return apply(_wc.svn_wc_committed_queue_create, args)
+
+def svn_wc_queue_committed(*args):
+    """
+    svn_wc_queue_committed(svn_wc_committed_queue_t queue, char path, svn_wc_adm_access_t adm_access, 
+        svn_boolean_t recurse, 
+        apr_array_header_t wcprop_changes, svn_boolean_t remove_lock, 
+        svn_boolean_t remove_changelist, 
+        unsigned char digest, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_queue_committed, args)
+
+def svn_wc_process_committed_queue(*args):
+    """
+    svn_wc_process_committed_queue(svn_wc_committed_queue_t queue, svn_wc_adm_access_t adm_access, 
+        svn_revnum_t new_revnum, char rev_date, 
+        char rev_author, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_process_committed_queue, args)
+
+def svn_wc_process_committed4(*args):
+    """
+    svn_wc_process_committed4(char path, svn_wc_adm_access_t adm_access, svn_boolean_t recurse, 
+        svn_revnum_t new_revnum, char rev_date, 
+        char rev_author, apr_array_header_t wcprop_changes, 
+        svn_boolean_t remove_lock, svn_boolean_t remove_changelist, 
+        unsigned char digest, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_process_committed4, args)
+
+def svn_wc_process_committed3(*args):
+    """
+    svn_wc_process_committed3(char path, svn_wc_adm_access_t adm_access, svn_boolean_t recurse, 
+        svn_revnum_t new_revnum, char rev_date, 
+        char rev_author, apr_array_header_t wcprop_changes, 
+        svn_boolean_t remove_lock, unsigned char digest, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_process_committed3, args)
+
+def svn_wc_process_committed2(*args):
+    """
+    svn_wc_process_committed2(char path, svn_wc_adm_access_t adm_access, svn_boolean_t recurse, 
+        svn_revnum_t new_revnum, char rev_date, 
+        char rev_author, apr_array_header_t wcprop_changes, 
+        svn_boolean_t remove_lock, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_process_committed2, args)
+
+def svn_wc_process_committed(*args):
+    """
+    svn_wc_process_committed(char path, svn_wc_adm_access_t adm_access, svn_boolean_t recurse, 
+        svn_revnum_t new_revnum, char rev_date, 
+        char rev_author, apr_array_header_t wcprop_changes, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_process_committed, args)
+
+def svn_wc_crawl_revisions3(*args):
+    """
+    svn_wc_crawl_revisions3(char path, svn_wc_adm_access_t adm_access, svn_ra_reporter3_t reporter, 
+        void report_baton, svn_boolean_t restore_files, 
+        svn_depth_t depth, svn_boolean_t depth_compatibility_trick, 
+        svn_boolean_t use_commit_times, 
+        svn_wc_notify_func2_t notify_func, 
+        svn_wc_traversal_info_t traversal_info, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_crawl_revisions3, args)
+
+def svn_wc_crawl_revisions2(*args):
+    """
+    svn_wc_crawl_revisions2(char path, svn_wc_adm_access_t adm_access, svn_ra_reporter2_t reporter, 
+        svn_boolean_t restore_files, 
+        svn_boolean_t recurse, svn_boolean_t use_commit_times, 
+        svn_wc_notify_func2_t notify_func, 
+        svn_wc_traversal_info_t traversal_info, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_crawl_revisions2, args)
+
+def svn_wc_crawl_revisions(*args):
+    """
+    svn_wc_crawl_revisions(char path, svn_wc_adm_access_t adm_access, svn_ra_reporter_t reporter, 
+        void report_baton, svn_boolean_t restore_files, 
+        svn_boolean_t recurse, 
+        svn_boolean_t use_commit_times, svn_wc_notify_func_t notify_func, 
+        svn_wc_traversal_info_t traversal_info, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_crawl_revisions, args)
+
+def svn_wc_is_wc_root(*args):
+    """
+    svn_wc_is_wc_root(svn_boolean_t wc_root, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_is_wc_root, args)
+
+def svn_wc_get_actual_target(*args):
+    """svn_wc_get_actual_target(char path, char anchor, char target, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_get_actual_target, args)
+
+def svn_wc_get_update_editor3(*args):
+    """
+    svn_wc_get_update_editor3(svn_revnum_t target_revision, svn_wc_adm_access_t anchor, 
+        char target, svn_boolean_t use_commit_times, 
+        svn_depth_t depth, svn_boolean_t depth_is_sticky, 
+        svn_boolean_t allow_unver_obstructions, 
+        svn_wc_notify_func2_t notify_func, svn_cancel_func_t cancel_func, 
+        svn_wc_conflict_resolver_func_t conflict_func, 
+        void conflict_baton, 
+        svn_wc_get_file_t fetch_func, void fetch_baton, 
+        char diff3_cmd, apr_array_header_t preserved_exts, 
+        svn_delta_editor_t editor, void edit_baton, 
+        svn_wc_traversal_info_t ti, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_update_editor3, args)
+
+def svn_wc_get_update_editor2(*args):
+    """
+    svn_wc_get_update_editor2(svn_revnum_t target_revision, svn_wc_adm_access_t anchor, 
+        char target, svn_boolean_t use_commit_times, 
+        svn_boolean_t recurse, svn_wc_notify_func2_t notify_func, 
+        svn_cancel_func_t cancel_func, 
+        char diff3_cmd, svn_delta_editor_t editor, 
+        void edit_baton, svn_wc_traversal_info_t ti, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_update_editor2, args)
+
+def svn_wc_get_update_editor(*args):
+    """
+    svn_wc_get_update_editor(svn_revnum_t target_revision, svn_wc_adm_access_t anchor, 
+        char target, svn_boolean_t use_commit_times, 
+        svn_boolean_t recurse, svn_wc_notify_func_t notify_func, 
+        svn_cancel_func_t cancel_func, 
+        char diff3_cmd, svn_delta_editor_t editor, 
+        void edit_baton, svn_wc_traversal_info_t ti, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_update_editor, args)
+
+def svn_wc_get_switch_editor3(*args):
+    """
+    svn_wc_get_switch_editor3(svn_revnum_t target_revision, svn_wc_adm_access_t anchor, 
+        char target, char switch_url, svn_boolean_t use_commit_times, 
+        svn_depth_t depth, svn_boolean_t depth_is_sticky, 
+        svn_boolean_t allow_unver_obstructions, 
+        svn_wc_notify_func2_t notify_func, 
+        svn_cancel_func_t cancel_func, 
+        svn_wc_conflict_resolver_func_t conflict_func, 
+        void conflict_baton, char diff3_cmd, apr_array_header_t preserved_exts, 
+        svn_delta_editor_t editor, 
+        void edit_baton, svn_wc_traversal_info_t ti, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_switch_editor3, args)
+
+def svn_wc_get_switch_editor2(*args):
+    """
+    svn_wc_get_switch_editor2(svn_revnum_t target_revision, svn_wc_adm_access_t anchor, 
+        char target, char switch_url, svn_boolean_t use_commit_times, 
+        svn_boolean_t recurse, 
+        svn_wc_notify_func2_t notify_func, svn_cancel_func_t cancel_func, 
+        char diff3_cmd, svn_delta_editor_t editor, 
+        void edit_baton, svn_wc_traversal_info_t ti, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_switch_editor2, args)
+
+def svn_wc_get_switch_editor(*args):
+    """
+    svn_wc_get_switch_editor(svn_revnum_t target_revision, svn_wc_adm_access_t anchor, 
+        char target, char switch_url, svn_boolean_t use_commit_times, 
+        svn_boolean_t recurse, 
+        svn_wc_notify_func_t notify_func, svn_cancel_func_t cancel_func, 
+        char diff3_cmd, svn_delta_editor_t editor, 
+        void edit_baton, svn_wc_traversal_info_t ti, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_switch_editor, args)
+
+def svn_wc_prop_list(*args):
+    """
+    svn_wc_prop_list(apr_hash_t props, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_prop_list, args)
+
+def svn_wc_prop_get(*args):
+    """
+    svn_wc_prop_get(svn_string_t value, char name, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_prop_get, args)
+
+def svn_wc_prop_set2(*args):
+    """
+    svn_wc_prop_set2(char name, svn_string_t value, char path, svn_wc_adm_access_t adm_access, 
+        svn_boolean_t skip_checks, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_prop_set2, args)
+
+def svn_wc_prop_set(*args):
+    """
+    svn_wc_prop_set(char name, svn_string_t value, char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_prop_set, args)
+
+def svn_wc_is_normal_prop(*args):
+    """svn_wc_is_normal_prop(char name) -> svn_boolean_t"""
+    return apply(_wc.svn_wc_is_normal_prop, args)
+
+def svn_wc_is_wc_prop(*args):
+    """svn_wc_is_wc_prop(char name) -> svn_boolean_t"""
+    return apply(_wc.svn_wc_is_wc_prop, args)
+
+def svn_wc_is_entry_prop(*args):
+    """svn_wc_is_entry_prop(char name) -> svn_boolean_t"""
+    return apply(_wc.svn_wc_is_entry_prop, args)
+
+def svn_wc_canonicalize_svn_prop(*args):
+    """
+    svn_wc_canonicalize_svn_prop(svn_string_t propval_p, char propname, svn_string_t propval, 
+        char path, svn_node_kind_t kind, svn_boolean_t skip_some_checks, 
+        svn_wc_canonicalize_svn_prop_get_file_t prop_getter, 
+        void getter_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_canonicalize_svn_prop, args)
+
+def svn_wc_get_diff_editor4(*args):
+    """
+    svn_wc_get_diff_editor4(svn_wc_adm_access_t anchor, char target, svn_wc_diff_callbacks2_t callbacks, 
+        svn_depth_t depth, 
+        svn_boolean_t ignore_ancestry, svn_boolean_t use_text_base, 
+        svn_boolean_t reverse_order, svn_cancel_func_t cancel_func, 
+        apr_array_header_t changelists, 
+        svn_delta_editor_t editor, void edit_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_diff_editor4, args)
+
+def svn_wc_get_diff_editor3(*args):
+    """
+    svn_wc_get_diff_editor3(svn_wc_adm_access_t anchor, char target, svn_wc_diff_callbacks2_t callbacks, 
+        svn_boolean_t recurse, 
+        svn_boolean_t ignore_ancestry, svn_boolean_t use_text_base, 
+        svn_boolean_t reverse_order, 
+        svn_cancel_func_t cancel_func, svn_delta_editor_t editor, 
+        void edit_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_diff_editor3, args)
+
+def svn_wc_get_diff_editor2(*args):
+    """
+    svn_wc_get_diff_editor2(svn_wc_adm_access_t anchor, char target, svn_wc_diff_callbacks_t callbacks, 
+        void callback_baton, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        svn_boolean_t use_text_base, svn_boolean_t reverse_order, 
+        svn_cancel_func_t cancel_func, 
+        svn_delta_editor_t editor, void edit_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_diff_editor2, args)
+
+def svn_wc_get_diff_editor(*args):
+    """
+    svn_wc_get_diff_editor(svn_wc_adm_access_t anchor, char target, svn_wc_diff_callbacks_t callbacks, 
+        void callback_baton, 
+        svn_boolean_t recurse, svn_boolean_t use_text_base, 
+        svn_boolean_t reverse_order, svn_cancel_func_t cancel_func, 
+        svn_delta_editor_t editor, 
+        void edit_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_diff_editor, args)
+
+def svn_wc_diff4(*args):
+    """
+    svn_wc_diff4(svn_wc_adm_access_t anchor, char target, svn_wc_diff_callbacks2_t callbacks, 
+        svn_depth_t depth, 
+        svn_boolean_t ignore_ancestry, apr_array_header_t changelists, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff4, args)
+
+def svn_wc_diff3(*args):
+    """
+    svn_wc_diff3(svn_wc_adm_access_t anchor, char target, svn_wc_diff_callbacks2_t callbacks, 
+        svn_boolean_t recurse, 
+        svn_boolean_t ignore_ancestry, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff3, args)
+
+def svn_wc_diff2(*args):
+    """
+    svn_wc_diff2(svn_wc_adm_access_t anchor, char target, svn_wc_diff_callbacks_t callbacks, 
+        void callback_baton, 
+        svn_boolean_t recurse, svn_boolean_t ignore_ancestry, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff2, args)
+
+def svn_wc_diff(*args):
+    """
+    svn_wc_diff(svn_wc_adm_access_t anchor, char target, svn_wc_diff_callbacks_t callbacks, 
+        void callback_baton, 
+        svn_boolean_t recurse, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff, args)
+
+def svn_wc_get_prop_diffs(*args):
+    """
+    svn_wc_get_prop_diffs(apr_array_header_t propchanges, apr_hash_t original_props, 
+        char path, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_prop_diffs, args)
+svn_wc_merge_unchanged = _wc.svn_wc_merge_unchanged
+svn_wc_merge_merged = _wc.svn_wc_merge_merged
+svn_wc_merge_conflict = _wc.svn_wc_merge_conflict
+svn_wc_merge_no_merge = _wc.svn_wc_merge_no_merge
+
+def svn_wc_merge3(*args):
+    """
+    svn_wc_merge3(enum svn_wc_merge_outcome_t merge_outcome, char left, 
+        char right, char merge_target, svn_wc_adm_access_t adm_access, 
+        char left_label, char right_label, 
+        char target_label, svn_boolean_t dry_run, 
+        char diff3_cmd, apr_array_header_t merge_options, 
+        apr_array_header_t prop_diff, svn_wc_conflict_resolver_func_t conflict_func, 
+        void conflict_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_merge3, args)
+
+def svn_wc_merge2(*args):
+    """
+    svn_wc_merge2(enum svn_wc_merge_outcome_t merge_outcome, char left, 
+        char right, char merge_target, svn_wc_adm_access_t adm_access, 
+        char left_label, char right_label, 
+        char target_label, svn_boolean_t dry_run, 
+        char diff3_cmd, apr_array_header_t merge_options, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_merge2, args)
+
+def svn_wc_merge(*args):
+    """
+    svn_wc_merge(char left, char right, char merge_target, svn_wc_adm_access_t adm_access, 
+        char left_label, char right_label, 
+        char target_label, svn_boolean_t dry_run, 
+        enum svn_wc_merge_outcome_t merge_outcome, 
+        char diff3_cmd, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_merge, args)
+
+def svn_wc_merge_props2(*args):
+    """
+    svn_wc_merge_props2(svn_wc_notify_state_t state, char path, svn_wc_adm_access_t adm_access, 
+        apr_hash_t baseprops, apr_array_header_t propchanges, 
+        svn_boolean_t base_merge, 
+        svn_boolean_t dry_run, svn_wc_conflict_resolver_func_t conflict_func, 
+        void conflict_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_merge_props2, args)
+
+def svn_wc_merge_props(*args):
+    """
+    svn_wc_merge_props(svn_wc_notify_state_t state, char path, svn_wc_adm_access_t adm_access, 
+        apr_hash_t baseprops, apr_array_header_t propchanges, 
+        svn_boolean_t base_merge, 
+        svn_boolean_t dry_run, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_merge_props, args)
+
+def svn_wc_merge_prop_diffs(*args):
+    """
+    svn_wc_merge_prop_diffs(svn_wc_notify_state_t state, char path, svn_wc_adm_access_t adm_access, 
+        apr_array_header_t propchanges, 
+        svn_boolean_t base_merge, svn_boolean_t dry_run, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_merge_prop_diffs, args)
+
+def svn_wc_get_pristine_copy_path(*args):
+    """svn_wc_get_pristine_copy_path(char path, char pristine_path, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_get_pristine_copy_path, args)
+
+def svn_wc_cleanup2(*args):
+    """
+    svn_wc_cleanup2(char path, char diff3_cmd, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_cleanup2, args)
+
+def svn_wc_cleanup(*args):
+    """
+    svn_wc_cleanup(char path, svn_wc_adm_access_t optional_adm_access, 
+        char diff3_cmd, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_cleanup, args)
+
+def svn_wc_relocate3(*args):
+    """
+    svn_wc_relocate3(char path, svn_wc_adm_access_t adm_access, char from, 
+        char to, svn_boolean_t recurse, svn_wc_relocation_validator3_t validator, 
+        void validator_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_relocate3, args)
+
+def svn_wc_relocate2(*args):
+    """
+    svn_wc_relocate2(char path, svn_wc_adm_access_t adm_access, char from, 
+        char to, svn_boolean_t recurse, svn_wc_relocation_validator2_t validator, 
+        void validator_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_relocate2, args)
+
+def svn_wc_relocate(*args):
+    """
+    svn_wc_relocate(char path, svn_wc_adm_access_t adm_access, char from, 
+        char to, svn_boolean_t recurse, svn_wc_relocation_validator_t validator, 
+        void validator_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_relocate, args)
+
+def svn_wc_revert3(*args):
+    """
+    svn_wc_revert3(char path, svn_wc_adm_access_t parent_access, svn_depth_t depth, 
+        svn_boolean_t use_commit_times, 
+        apr_array_header_t changelists, svn_cancel_func_t cancel_func, 
+        svn_wc_notify_func2_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_revert3, args)
+
+def svn_wc_revert2(*args):
+    """
+    svn_wc_revert2(char path, svn_wc_adm_access_t parent_access, svn_boolean_t recursive, 
+        svn_boolean_t use_commit_times, 
+        svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_revert2, args)
+
+def svn_wc_revert(*args):
+    """
+    svn_wc_revert(char path, svn_wc_adm_access_t parent_access, svn_boolean_t recursive, 
+        svn_boolean_t use_commit_times, 
+        svn_cancel_func_t cancel_func, svn_wc_notify_func_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_revert, args)
+
+def svn_wc_create_tmp_file2(*args):
+    """
+    svn_wc_create_tmp_file2(apr_file_t fp, char new_name, char path, svn_io_file_del_t delete_when, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_create_tmp_file2, args)
+
+def svn_wc_create_tmp_file(*args):
+    """
+    svn_wc_create_tmp_file(apr_file_t fp, char path, svn_boolean_t delete_on_close, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_create_tmp_file, args)
+
+def svn_wc_translated_file2(*args):
+    """
+    svn_wc_translated_file2(char xlated_path, char src, char versioned_file, svn_wc_adm_access_t adm_access, 
+        apr_uint32_t flags, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_translated_file2, args)
+
+def svn_wc_translated_file(*args):
+    """
+    svn_wc_translated_file(char xlated_p, char vfile, svn_wc_adm_access_t adm_access, 
+        svn_boolean_t force_repair, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_translated_file, args)
+
+def svn_wc_translated_stream(*args):
+    """
+    svn_wc_translated_stream(svn_stream_t stream, char path, char versioned_file, 
+        svn_wc_adm_access_t adm_access, apr_uint32_t flags, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_translated_stream, args)
+
+def svn_wc_transmit_text_deltas2(*args):
+    """
+    svn_wc_transmit_text_deltas2(char tempfile, unsigned char digest, char path, svn_wc_adm_access_t adm_access, 
+        svn_boolean_t fulltext, 
+        svn_delta_editor_t editor, void file_baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_transmit_text_deltas2, args)
+
+def svn_wc_transmit_text_deltas(*args):
+    """
+    svn_wc_transmit_text_deltas(char path, svn_wc_adm_access_t adm_access, svn_boolean_t fulltext, 
+        svn_delta_editor_t editor, void file_baton, 
+        char tempfile, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_transmit_text_deltas, args)
+
+def svn_wc_transmit_prop_deltas(*args):
+    """
+    svn_wc_transmit_prop_deltas(char path, svn_wc_adm_access_t adm_access, svn_wc_entry_t entry, 
+        svn_delta_editor_t editor, void baton, 
+        char tempfile, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_transmit_prop_deltas, args)
+
+def svn_wc_get_default_ignores(*args):
+    """svn_wc_get_default_ignores(apr_array_header_t patterns, apr_hash_t config, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_get_default_ignores, args)
+
+def svn_wc_get_ignores(*args):
+    """
+    svn_wc_get_ignores(apr_array_header_t patterns, apr_hash_t config, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_get_ignores, args)
+
+def svn_wc_match_ignore_list(*args):
+    """svn_wc_match_ignore_list(char str, apr_array_header_t list, apr_pool_t pool) -> svn_boolean_t"""
+    return apply(_wc.svn_wc_match_ignore_list, args)
+
+def svn_wc_add_lock(*args):
+    """
+    svn_wc_add_lock(char path, svn_lock_t lock, svn_wc_adm_access_t adm_access, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_add_lock, args)
+
+def svn_wc_remove_lock(*args):
+    """svn_wc_remove_lock(char path, svn_wc_adm_access_t adm_access, apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_remove_lock, args)
+class svn_wc_revision_status_t:
+    """Proxy of C svn_wc_revision_status_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_revision_status_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_revision_status_t, name)
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_revision_status_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    __swig_setmethods__["min_rev"] = _wc.svn_wc_revision_status_t_min_rev_set
+    __swig_getmethods__["min_rev"] = _wc.svn_wc_revision_status_t_min_rev_get
+    __swig_setmethods__["max_rev"] = _wc.svn_wc_revision_status_t_max_rev_set
+    __swig_getmethods__["max_rev"] = _wc.svn_wc_revision_status_t_max_rev_get
+    __swig_setmethods__["switched"] = _wc.svn_wc_revision_status_t_switched_set
+    __swig_getmethods__["switched"] = _wc.svn_wc_revision_status_t_switched_get
+    __swig_setmethods__["modified"] = _wc.svn_wc_revision_status_t_modified_set
+    __swig_getmethods__["modified"] = _wc.svn_wc_revision_status_t_modified_get
+    __swig_setmethods__["sparse_checkout"] = _wc.svn_wc_revision_status_t_sparse_checkout_set
+    __swig_getmethods__["sparse_checkout"] = _wc.svn_wc_revision_status_t_sparse_checkout_get
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_revision_status_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __init__(self, *args):
+        """__init__(self) -> svn_wc_revision_status_t"""
+        _swig_setattr(self, svn_wc_revision_status_t, 'this', apply(_wc.new_svn_wc_revision_status_t, args))
+        _swig_setattr(self, svn_wc_revision_status_t, 'thisown', 1)
+    def __del__(self, destroy=_wc.delete_svn_wc_revision_status_t):
+        """__del__(self)"""
+        try:
+            if self.thisown: destroy(self)
+        except: pass
+
+
+class svn_wc_revision_status_tPtr(svn_wc_revision_status_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_revision_status_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_revision_status_t, 'thisown', 0)
+        self.__class__ = svn_wc_revision_status_t
+_wc.svn_wc_revision_status_t_swigregister(svn_wc_revision_status_tPtr)
+
+
+def svn_wc_revision_status(*args):
+    """
+    svn_wc_revision_status(svn_wc_revision_status_t result_p, char wc_path, char trail_url, 
+        svn_boolean_t committed, svn_cancel_func_t cancel_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_revision_status, args)
+
+def svn_wc_set_changelist(*args):
+    """
+    svn_wc_set_changelist(char path, char changelist, svn_wc_adm_access_t adm_access, 
+        svn_cancel_func_t cancel_func, svn_wc_notify_func2_t notify_func, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_set_changelist, args)
+class svn_wc_adm_access_t:
+    """Proxy of C svn_wc_adm_access_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_adm_access_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_adm_access_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_adm_access_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_adm_access_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_wc_adm_access_tPtr(svn_wc_adm_access_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_adm_access_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_adm_access_t, 'thisown', 0)
+        self.__class__ = svn_wc_adm_access_t
+_wc.svn_wc_adm_access_t_swigregister(svn_wc_adm_access_tPtr)
+
+class svn_wc_traversal_info_t:
+    """Proxy of C svn_wc_traversal_info_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_traversal_info_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_traversal_info_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_traversal_info_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_traversal_info_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_wc_traversal_info_tPtr(svn_wc_traversal_info_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_traversal_info_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_traversal_info_t, 'thisown', 0)
+        self.__class__ = svn_wc_traversal_info_t
+_wc.svn_wc_traversal_info_t_swigregister(svn_wc_traversal_info_tPtr)
+
+class svn_wc_committed_queue_t:
+    """Proxy of C svn_wc_committed_queue_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_committed_queue_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_committed_queue_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_committed_queue_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_committed_queue_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+
+class svn_wc_committed_queue_tPtr(svn_wc_committed_queue_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_committed_queue_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_committed_queue_t, 'thisown', 0)
+        self.__class__ = svn_wc_committed_queue_t
+_wc.svn_wc_committed_queue_t_swigregister(svn_wc_committed_queue_tPtr)
+
+
+def svn_wc_diff_callbacks2_invoke_file_changed(*args):
+    """
+    svn_wc_diff_callbacks2_invoke_file_changed(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t contentstate, 
+        svn_wc_notify_state_t propstate, char path, 
+        char tmpfile1, char tmpfile2, svn_revnum_t rev1, 
+        svn_revnum_t rev2, char mimetype1, char mimetype2, 
+        apr_array_header_t propchanges, apr_hash_t originalprops, 
+        void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks2_invoke_file_changed, args)
+
+def svn_wc_diff_callbacks2_invoke_file_added(*args):
+    """
+    svn_wc_diff_callbacks2_invoke_file_added(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t contentstate, 
+        svn_wc_notify_state_t propstate, char path, 
+        char tmpfile1, char tmpfile2, svn_revnum_t rev1, 
+        svn_revnum_t rev2, char mimetype1, char mimetype2, 
+        apr_array_header_t propchanges, apr_hash_t originalprops, 
+        void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks2_invoke_file_added, args)
+
+def svn_wc_diff_callbacks2_invoke_file_deleted(*args):
+    """
+    svn_wc_diff_callbacks2_invoke_file_deleted(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        char tmpfile1, char tmpfile2, char mimetype1, 
+        char mimetype2, apr_hash_t originalprops, 
+        void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks2_invoke_file_deleted, args)
+
+def svn_wc_diff_callbacks2_invoke_dir_added(*args):
+    """
+    svn_wc_diff_callbacks2_invoke_dir_added(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        svn_revnum_t rev, void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks2_invoke_dir_added, args)
+
+def svn_wc_diff_callbacks2_invoke_dir_deleted(*args):
+    """
+    svn_wc_diff_callbacks2_invoke_dir_deleted(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks2_invoke_dir_deleted, args)
+
+def svn_wc_diff_callbacks2_invoke_dir_props_changed(*args):
+    """
+    svn_wc_diff_callbacks2_invoke_dir_props_changed(svn_wc_diff_callbacks2_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        apr_array_header_t propchanges, apr_hash_t original_props, 
+        void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks2_invoke_dir_props_changed, args)
+
+def svn_wc_diff_callbacks_invoke_file_changed(*args):
+    """
+    svn_wc_diff_callbacks_invoke_file_changed(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        char tmpfile1, char tmpfile2, svn_revnum_t rev1, 
+        svn_revnum_t rev2, char mimetype1, 
+        char mimetype2, void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks_invoke_file_changed, args)
+
+def svn_wc_diff_callbacks_invoke_file_added(*args):
+    """
+    svn_wc_diff_callbacks_invoke_file_added(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        char tmpfile1, char tmpfile2, svn_revnum_t rev1, 
+        svn_revnum_t rev2, char mimetype1, 
+        char mimetype2, void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks_invoke_file_added, args)
+
+def svn_wc_diff_callbacks_invoke_file_deleted(*args):
+    """
+    svn_wc_diff_callbacks_invoke_file_deleted(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        char tmpfile1, char tmpfile2, char mimetype1, 
+        char mimetype2, void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks_invoke_file_deleted, args)
+
+def svn_wc_diff_callbacks_invoke_dir_added(*args):
+    """
+    svn_wc_diff_callbacks_invoke_dir_added(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        svn_revnum_t rev, void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks_invoke_dir_added, args)
+
+def svn_wc_diff_callbacks_invoke_dir_deleted(*args):
+    """
+    svn_wc_diff_callbacks_invoke_dir_deleted(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks_invoke_dir_deleted, args)
+
+def svn_wc_diff_callbacks_invoke_props_changed(*args):
+    """
+    svn_wc_diff_callbacks_invoke_props_changed(svn_wc_diff_callbacks_t _obj, svn_wc_adm_access_t adm_access, 
+        svn_wc_notify_state_t state, char path, 
+        apr_array_header_t propchanges, apr_hash_t original_props, 
+        void diff_baton) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_diff_callbacks_invoke_props_changed, args)
+
+def svn_wc_entry_callbacks2_invoke_found_entry(*args):
+    """
+    svn_wc_entry_callbacks2_invoke_found_entry(svn_wc_entry_callbacks2_t _obj, char path, svn_wc_entry_t entry, 
+        void walk_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_entry_callbacks2_invoke_found_entry, args)
+
+def svn_wc_entry_callbacks2_invoke_handle_error(*args):
+    """
+    svn_wc_entry_callbacks2_invoke_handle_error(svn_wc_entry_callbacks2_t _obj, char path, svn_error_t err, 
+        void walk_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_entry_callbacks2_invoke_handle_error, args)
+
+def svn_wc_entry_callbacks_invoke_found_entry(*args):
+    """
+    svn_wc_entry_callbacks_invoke_found_entry(svn_wc_entry_callbacks_t _obj, char path, svn_wc_entry_t entry, 
+        void walk_baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_entry_callbacks_invoke_found_entry, args)
+
+def svn_wc_invoke_notify_func2(*args):
+    """
+    svn_wc_invoke_notify_func2(svn_wc_notify_func2_t _obj, void baton, svn_wc_notify_t notify, 
+        apr_pool_t pool)
+    """
+    return apply(_wc.svn_wc_invoke_notify_func2, args)
+
+def svn_wc_invoke_notify_func(*args):
+    """
+    svn_wc_invoke_notify_func(svn_wc_notify_func_t _obj, void baton, char path, svn_wc_notify_action_t action, 
+        svn_node_kind_t kind, 
+        char mime_type, svn_wc_notify_state_t content_state, 
+        svn_wc_notify_state_t prop_state, 
+        svn_revnum_t revision)
+    """
+    return apply(_wc.svn_wc_invoke_notify_func, args)
+
+def svn_wc_invoke_get_file(*args):
+    """
+    svn_wc_invoke_get_file(svn_wc_get_file_t _obj, void baton, char path, svn_revnum_t revision, 
+        svn_stream_t stream, svn_revnum_t fetched_rev, 
+        apr_hash_t props, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_invoke_get_file, args)
+
+def svn_wc_invoke_conflict_resolver_func(*args):
+    """
+    svn_wc_invoke_conflict_resolver_func(svn_wc_conflict_resolver_func_t _obj, svn_wc_conflict_result_t result, 
+        svn_wc_conflict_description_t description, 
+        void baton, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_invoke_conflict_resolver_func, args)
+
+def svn_wc_invoke_status_func2(*args):
+    """
+    svn_wc_invoke_status_func2(svn_wc_status_func2_t _obj, void baton, char path, 
+        svn_wc_status2_t status)
+    """
+    return apply(_wc.svn_wc_invoke_status_func2, args)
+
+def svn_wc_invoke_status_func(*args):
+    """svn_wc_invoke_status_func(svn_wc_status_func_t _obj, void baton, char path, svn_wc_status_t status)"""
+    return apply(_wc.svn_wc_invoke_status_func, args)
+
+def svn_wc_invoke_canonicalize_svn_prop_get_file(*args):
+    """
+    svn_wc_invoke_canonicalize_svn_prop_get_file(svn_wc_canonicalize_svn_prop_get_file_t _obj, svn_string_t mime_type, 
+        svn_stream_t stream, void baton, 
+        apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_invoke_canonicalize_svn_prop_get_file, args)
+
+def svn_wc_invoke_relocation_validator3(*args):
+    """
+    svn_wc_invoke_relocation_validator3(svn_wc_relocation_validator3_t _obj, void baton, char uuid, 
+        char url, char root_url, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_invoke_relocation_validator3, args)
+
+def svn_wc_invoke_relocation_validator2(*args):
+    """
+    svn_wc_invoke_relocation_validator2(svn_wc_relocation_validator2_t _obj, void baton, char uuid, 
+        char url, svn_boolean_t root, apr_pool_t pool) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_invoke_relocation_validator2, args)
+
+def svn_wc_invoke_relocation_validator(*args):
+    """
+    svn_wc_invoke_relocation_validator(svn_wc_relocation_validator_t _obj, void baton, char uuid, 
+        char url) -> svn_error_t
+    """
+    return apply(_wc.svn_wc_invoke_relocation_validator, args)
+class svn_wc_notify_func2_t:
+    """Proxy of C svn_wc_notify_func2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_notify_func2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_notify_func2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_notify_func2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_notify_func2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_notify_func2(self, *args)
+
+
+class svn_wc_notify_func2_tPtr(svn_wc_notify_func2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_notify_func2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_notify_func2_t, 'thisown', 0)
+        self.__class__ = svn_wc_notify_func2_t
+_wc.svn_wc_notify_func2_t_swigregister(svn_wc_notify_func2_tPtr)
+
+class svn_wc_notify_func_t:
+    """Proxy of C svn_wc_notify_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_notify_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_notify_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_notify_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_notify_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_notify_func(self, *args)
+
+
+class svn_wc_notify_func_tPtr(svn_wc_notify_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_notify_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_notify_func_t, 'thisown', 0)
+        self.__class__ = svn_wc_notify_func_t
+_wc.svn_wc_notify_func_t_swigregister(svn_wc_notify_func_tPtr)
+
+class svn_wc_get_file_t:
+    """Proxy of C svn_wc_get_file_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_get_file_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_get_file_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_get_file_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_get_file_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_get_file(self, *args)
+
+
+class svn_wc_get_file_tPtr(svn_wc_get_file_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_get_file_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_get_file_t, 'thisown', 0)
+        self.__class__ = svn_wc_get_file_t
+_wc.svn_wc_get_file_t_swigregister(svn_wc_get_file_tPtr)
+
+class svn_wc_conflict_resolver_func_t:
+    """Proxy of C svn_wc_conflict_resolver_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_conflict_resolver_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_conflict_resolver_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_conflict_resolver_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_conflict_resolver_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_conflict_resolver_func(self, *args)
+
+
+class svn_wc_conflict_resolver_func_tPtr(svn_wc_conflict_resolver_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_conflict_resolver_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_conflict_resolver_func_t, 'thisown', 0)
+        self.__class__ = svn_wc_conflict_resolver_func_t
+_wc.svn_wc_conflict_resolver_func_t_swigregister(svn_wc_conflict_resolver_func_tPtr)
+
+class svn_wc_status_func2_t:
+    """Proxy of C svn_wc_status_func2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status_func2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status_func2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_status_func2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_status_func2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_status_func2(self, *args)
+
+
+class svn_wc_status_func2_tPtr(svn_wc_status_func2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_status_func2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_status_func2_t, 'thisown', 0)
+        self.__class__ = svn_wc_status_func2_t
+_wc.svn_wc_status_func2_t_swigregister(svn_wc_status_func2_tPtr)
+
+class svn_wc_status_func_t:
+    """Proxy of C svn_wc_status_func_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_status_func_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_status_func_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_status_func_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_status_func_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_status_func(self, *args)
+
+
+class svn_wc_status_func_tPtr(svn_wc_status_func_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_status_func_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_status_func_t, 'thisown', 0)
+        self.__class__ = svn_wc_status_func_t
+_wc.svn_wc_status_func_t_swigregister(svn_wc_status_func_tPtr)
+
+class svn_wc_canonicalize_svn_prop_get_file_t:
+    """Proxy of C svn_wc_canonicalize_svn_prop_get_file_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_canonicalize_svn_prop_get_file_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_canonicalize_svn_prop_get_file_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_canonicalize_svn_prop_get_file_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_canonicalize_svn_prop_get_file_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_canonicalize_svn_prop_get_file(self, *args)
+
+
+class svn_wc_canonicalize_svn_prop_get_file_tPtr(svn_wc_canonicalize_svn_prop_get_file_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_canonicalize_svn_prop_get_file_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_canonicalize_svn_prop_get_file_t, 'thisown', 0)
+        self.__class__ = svn_wc_canonicalize_svn_prop_get_file_t
+_wc.svn_wc_canonicalize_svn_prop_get_file_t_swigregister(svn_wc_canonicalize_svn_prop_get_file_tPtr)
+
+class svn_wc_relocation_validator3_t:
+    """Proxy of C svn_wc_relocation_validator3_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_relocation_validator3_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_relocation_validator3_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_relocation_validator3_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_relocation_validator3_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_relocation_validator3(self, *args)
+
+
+class svn_wc_relocation_validator3_tPtr(svn_wc_relocation_validator3_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_relocation_validator3_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_relocation_validator3_t, 'thisown', 0)
+        self.__class__ = svn_wc_relocation_validator3_t
+_wc.svn_wc_relocation_validator3_t_swigregister(svn_wc_relocation_validator3_tPtr)
+
+class svn_wc_relocation_validator2_t:
+    """Proxy of C svn_wc_relocation_validator2_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_relocation_validator2_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_relocation_validator2_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_relocation_validator2_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_relocation_validator2_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_relocation_validator2(self, *args)
+
+
+class svn_wc_relocation_validator2_tPtr(svn_wc_relocation_validator2_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_relocation_validator2_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_relocation_validator2_t, 'thisown', 0)
+        self.__class__ = svn_wc_relocation_validator2_t
+_wc.svn_wc_relocation_validator2_t_swigregister(svn_wc_relocation_validator2_tPtr)
+
+class svn_wc_relocation_validator_t:
+    """Proxy of C svn_wc_relocation_validator_t struct"""
+    __swig_setmethods__ = {}
+    __setattr__ = lambda self, name, value: _swig_setattr(self, svn_wc_relocation_validator_t, name, value)
+    __swig_getmethods__ = {}
+    __getattr__ = lambda self, name: _swig_getattr(self, svn_wc_relocation_validator_t, name)
+    def __init__(self): raise RuntimeError, "No constructor defined"
+    def __repr__(self):
+        return "<%s.%s; proxy of C svn_wc_relocation_validator_t instance at %s>" % (self.__class__.__module__, self.__class__.__name__, self.this,)
+    def set_parent_pool(self, parent_pool=None):
+      """Create a new proxy object for svn_wc_relocation_validator_t"""
+      import libsvn.core, weakref
+      self.__dict__["_parent_pool"] = \
+        parent_pool or libsvn.core.application_pool;
+      if self.__dict__["_parent_pool"]:
+        self.__dict__["_is_valid"] = weakref.ref(
+          self.__dict__["_parent_pool"]._is_valid)
+
+    def assert_valid(self):
+      """Assert that this object is using valid pool memory"""
+      if "_is_valid" in self.__dict__:
+        assert self.__dict__["_is_valid"](), "Variable has already been deleted"
+
+    def __getattr__(self, name):
+      """Get an attribute from this object"""
+      self.assert_valid()
+
+      value = _swig_getattr(self, self.__class__, name)
+
+
+
+      members = self.__dict__.get("_members")
+      if members is not None:
+        old_value = members.get(name)
+        if (old_value is not None and value is not None and
+            value is not old_value):
+          try:
+            value.__dict__.update(old_value.__dict__)
+          except AttributeError:
+            pass
+
+
+      if hasattr(value, "assert_valid"):
+        value.assert_valid()
+
+      return value
+
+    def __setattr__(self, name, value):
+      """Set an attribute on this object"""
+      self.assert_valid()
+
+
+
+
+      self.__dict__.setdefault("_members",{})[name] = value
+
+      return _swig_setattr(self, self.__class__, name, value)
+
+    def __call__(self, *args):
+      return svn_wc_invoke_relocation_validator(self, *args)
+
+
+class svn_wc_relocation_validator_tPtr(svn_wc_relocation_validator_t):
+    def __init__(self, this):
+        _swig_setattr(self, svn_wc_relocation_validator_t, 'this', this)
+        if not hasattr(self,"thisown"): _swig_setattr(self, svn_wc_relocation_validator_t, 'thisown', 0)
+        self.__class__ = svn_wc_relocation_validator_t
+_wc.svn_wc_relocation_validator_t_swigregister(svn_wc_relocation_validator_tPtr)
+
+
+def svn_wc_swig_init_asp_dot_net_hack(*args):
+    """svn_wc_swig_init_asp_dot_net_hack(apr_pool_t pool) -> svn_error_t"""
+    return apply(_wc.svn_wc_swig_init_asp_dot_net_hack, args)
+svn_wc_swig_init_asp_dot_net_hack() 
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythoncom.py b/depot_tools/release/win/python_24/Lib/site-packages/pythoncom.py
new file mode 100644
index 0000000..ca7fda4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythoncom.py
@@ -0,0 +1,3 @@
+# Magic utility that "redirects" to pythoncomxx.dll
+import pywintypes
+pywintypes.__import_pywin32_system_module__("pythoncom", globals())
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/Pythonwin.exe b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/Pythonwin.exe
new file mode 100644
index 0000000..441ae82
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/Pythonwin.exe
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/dde.pyd b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/dde.pyd
new file mode 100644
index 0000000..f32369f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/dde.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/license.txt b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/license.txt
new file mode 100644
index 0000000..dd05084
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/license.txt
@@ -0,0 +1,30 @@
+Unless stated in the specfic source file, this work is
+Copyright (c) 1994-2001, Mark Hammond 
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without 
+modification, are permitted provided that the following conditions 
+are met:
+
+Redistributions of source code must retain the above copyright notice, 
+this list of conditions and the following disclaimer.
+
+Redistributions in binary form must reproduce the above copyright 
+notice, this list of conditions and the following disclaimer in 
+the documentation and/or other materials provided with the distribution.
+
+Neither name of Mark Hammond nor the name of contributors may be used 
+to endorse or promote products derived from this software without 
+specific prior written permission. 
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS
+IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/basictimerapp.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/basictimerapp.py
new file mode 100644
index 0000000..72027ab0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/basictimerapp.py
@@ -0,0 +1,222 @@
+# basictimerapp - a really simple timer application.
+# This should be run using the command line:
+# pythonwin /app demos\basictimerapp.py
+import win32ui
+import win32api
+import win32con
+import sys
+from pywin.framework import app, cmdline, dlgappcore, cmdline
+import timer
+import time
+import string
+import regsub
+
+class TimerAppDialog(dlgappcore.AppDialog):
+	softspace=1
+	def __init__(self, appName = ""):
+		dlgappcore.AppDialog.__init__(self, win32ui.IDD_GENERAL_STATUS)
+		self.timerAppName = appName
+		self.argOff = 0
+		if len(self.timerAppName)==0:
+			if len(sys.argv)>1 and sys.argv[1][0]!='/': 
+				self.timerAppName = sys.argv[1]
+				self.argOff = 1
+
+	def PreDoModal(self):
+#		sys.stderr = sys.stdout
+		pass
+
+	def ProcessArgs(self, args):
+		for arg in args:
+			if arg=="/now":
+				self.OnOK()
+
+	def OnInitDialog(self):
+		win32ui.SetProfileFileName('pytimer.ini')
+		self.title = win32ui.GetProfileVal(self.timerAppName, "Title", "Remote System Timer")
+		self.buildTimer = win32ui.GetProfileVal(self.timerAppName, "Timer", "EachMinuteIntervaler()")
+		self.doWork = win32ui.GetProfileVal(self.timerAppName, "Work", "DoDemoWork()")
+		# replace "\n" with real \n.
+		self.doWork =  regsub.gsub('\\\\n','\n', self.doWork)
+		dlgappcore.AppDialog.OnInitDialog(self)
+
+		self.SetWindowText(self.title)
+		self.prompt1 = self.GetDlgItem(win32ui.IDC_PROMPT1)
+		self.prompt2 = self.GetDlgItem(win32ui.IDC_PROMPT2)
+		self.prompt3 = self.GetDlgItem(win32ui.IDC_PROMPT3)
+		self.butOK = self.GetDlgItem(win32con.IDOK)
+		self.butCancel = self.GetDlgItem(win32con.IDCANCEL)
+		self.prompt1.SetWindowText("Python Timer App")
+		self.prompt2.SetWindowText("")
+		self.prompt3.SetWindowText("")
+		self.butOK.SetWindowText("Do it now")
+		self.butCancel.SetWindowText("Close")
+
+		self.timerManager = TimerManager(self)
+		self.ProcessArgs(sys.argv[self.argOff:])
+		self.timerManager.go()
+		return 1
+
+	def OnDestroy(self,msg):
+		dlgappcore.AppDialog.OnDestroy(self, msg)
+		self.timerManager.stop()
+	def OnOK(self):
+		# stop the timer, then restart after setting special boolean
+		self.timerManager.stop()
+		self.timerManager.bConnectNow = 1
+		self.timerManager.go()
+		return
+#	def OnCancel(self): default behaviour - cancel == close.
+#		return 
+
+class TimerManager:
+	def __init__(self, dlg):
+		self.dlg = dlg
+		self.timerId = None
+		self.intervaler = eval(self.dlg.buildTimer)
+		self.bConnectNow = 0
+		self.bHaveSetPrompt1 = 0
+	def CaptureOutput(self):
+		self.oldOut = sys.stdout
+		self.oldErr = sys.stderr
+		sys.stdout = sys.stderr = self
+		self.bHaveSetPrompt1 = 0
+	def ReleaseOutput(self):
+		sys.stdout = self.oldOut
+		sys.stderr = self.oldErr
+	def write(self, str):
+		s = string.strip(str)
+		if len(s):
+			if self.bHaveSetPrompt1:
+				dest = self.dlg.prompt3
+			else:
+				dest = self.dlg.prompt1
+				self.bHaveSetPrompt1 = 1
+			dest.SetWindowText(s)
+	def go(self):
+		self.OnTimer(None,None)
+	def stop(self):
+		if self.timerId: timer.kill_timer (self.timerId)
+		self.timerId = None
+
+	def OnTimer(self, id, timeVal):
+		if id: timer.kill_timer (id)
+		if self.intervaler.IsTime() or self.bConnectNow :
+			# do the work.
+			try:
+				self.dlg.SetWindowText(self.dlg.title + " - Working...")
+				self.dlg.butOK.EnableWindow(0)
+				self.dlg.butCancel.EnableWindow(0)
+				self.CaptureOutput()
+				try:
+					exec(self.dlg.doWork)
+					print "The last operation completed successfully."
+				except:
+					t, v, tb = sys.exc_info()
+					str = "Failed: %s: %s" % (t, `v`)
+					print str
+					self.oldErr.write(str)
+					tb = None # Prevent cycle
+			finally:
+				self.ReleaseOutput()
+				self.dlg.butOK.EnableWindow()
+				self.dlg.butCancel.EnableWindow()
+				self.dlg.SetWindowText(self.dlg.title)
+		else:
+			now = time.time()
+			nextTime = self.intervaler.GetNextTime()
+			if nextTime:
+				timeDiffSeconds = nextTime - now
+				timeDiffMinutes = int(timeDiffSeconds / 60)
+				timeDiffSeconds = timeDiffSeconds % 60
+				timeDiffHours = int(timeDiffMinutes / 60)
+				timeDiffMinutes = timeDiffMinutes % 60
+				self.dlg.prompt1.SetWindowText("Next connection due in %02d:%02d:%02d" % (timeDiffHours,timeDiffMinutes,timeDiffSeconds))
+		self.timerId = timer.set_timer (self.intervaler.GetWakeupInterval(), self.OnTimer)
+		self.bConnectNow = 0
+		
+class TimerIntervaler:
+	def __init__(self):
+		self.nextTime = None
+		self.wakeUpInterval = 2000
+	def GetWakeupInterval(self):
+		return self.wakeUpInterval
+	def GetNextTime(self):
+		return self.nextTime
+	def IsTime(self):
+		now = time.time()
+		if self.nextTime is None:
+			self.nextTime = self.SetFirstTime(now)
+		ret = 0
+		if now >= self.nextTime:
+			ret = 1
+			self.nextTime = self.SetNextTime(self.nextTime, now)
+			# do the work.
+		return ret
+	
+class EachAnyIntervaler(TimerIntervaler):
+	def __init__(self, timeAt, timePos, timeAdd, wakeUpInterval = None):
+		TimerIntervaler.__init__(self)
+		self.timeAt = timeAt
+		self.timePos = timePos
+		self.timeAdd = timeAdd
+		if wakeUpInterval:
+			self.wakeUpInterval = wakeUpInterval
+	def SetFirstTime(self, now):
+		timeTup = time.localtime(now)
+		lst = []
+		for item in timeTup:
+			lst.append(item)
+		bAdd = timeTup[self.timePos] > self.timeAt
+		lst[self.timePos] = self.timeAt
+		for pos in range(self.timePos+1, 6):
+			lst[pos]=0
+		ret = time.mktime(tuple(lst))
+		if (bAdd):
+			ret = ret + self.timeAdd
+		return ret;			
+		
+	def SetNextTime(self, lastTime, now):
+		return lastTime + self.timeAdd
+
+class EachMinuteIntervaler(EachAnyIntervaler):
+	def __init__(self, at=0):
+		EachAnyIntervaler.__init__(self, at, 5, 60, 2000)
+
+class EachHourIntervaler(EachAnyIntervaler):
+	def __init__(self, at=0):
+		EachAnyIntervaler.__init__(self, at, 4, 3600, 10000)
+
+class EachDayIntervaler(EachAnyIntervaler):
+	def __init__(self,at=0):
+		EachAnyIntervaler.__init__(self, at, 3, 86400, 10000)
+
+class TimerDialogApp(dlgappcore.DialogApp):
+	def CreateDialog(self):
+		return TimerAppDialog()
+
+def DoDemoWork():
+	print "Doing the work..."
+	print "About to connect"
+	win32api.MessageBeep(win32con.MB_ICONASTERISK)
+	win32api.Sleep(2000)
+	print "Doing something else..."
+	win32api.MessageBeep(win32con.MB_ICONEXCLAMATION)
+	win32api.Sleep(2000)
+	print "More work."
+	win32api.MessageBeep(win32con.MB_ICONHAND)
+	win32api.Sleep(2000)
+	print "The last bit."
+	win32api.MessageBeep(win32con.MB_OK)
+	win32api.Sleep(2000)
+
+app = TimerDialogApp()
+
+def t():
+	t = TimerAppDialog("Test Dialog")
+	t.DoModal()
+	return t
+
+if __name__=='__main__':
+		import demoutils
+		demoutils.NeedApp()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/customprint.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/customprint.py
new file mode 100644
index 0000000..31c899a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/customprint.py
@@ -0,0 +1,194 @@
+# A demo of an Application object that has some custom print functionality.
+
+# If you desire, you can also run this from inside Pythonwin, in which
+# case it will do the demo inside the Pythonwin environment.
+
+# This sample was contributed by Roger Burnham.
+
+from pywin.mfc import docview, dialog, afxres
+from pywin.framework import app
+
+import win32con
+import win32ui
+import win32api
+
+PRINTDLGORD = 1538
+IDC_PRINT_MAG_EDIT = 1010
+
+
+class PrintDemoTemplate(docview.DocTemplate):
+    def _SetupSharedMenu_(self):
+        pass
+
+class PrintDemoView(docview.ScrollView):
+        
+    def OnInitialUpdate(self):
+        ret = self._obj_.OnInitialUpdate()
+        self.colors = {'Black'  : (0x00<<0) + (0x00<<8) + (0x00<<16),
+                       'Red'    : (0xff<<0) + (0x00<<8) + (0x00<<16),
+                       'Green'  : (0x00<<0) + (0xff<<8) + (0x00<<16),
+                       'Blue'   : (0x00<<0) + (0x00<<8) + (0xff<<16),
+                       'Cyan'   : (0x00<<0) + (0xff<<8) + (0xff<<16),
+                       'Magenta': (0xff<<0) + (0x00<<8) + (0xff<<16),
+                       'Yellow' : (0xff<<0) + (0xff<<8) + (0x00<<16),
+                       }
+        self.pens = {}
+        for name, color in self.colors.items():
+            self.pens[name] = win32ui.CreatePen(win32con.PS_SOLID,
+                                                 5, color)
+        self.pen = None
+        self.size = (128,128)
+        self.SetScaleToFitSize(self.size)
+        self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT)
+        self.HookCommand(self.OnFilePrintPreview,
+                         win32ui.ID_FILE_PRINT_PREVIEW)
+        return ret
+
+    def OnDraw(self, dc):
+        oldPen = None
+        x,y = self.size
+        delta = 2
+        colors = self.colors.keys()
+        colors.sort()
+        colors = colors*2
+        for color in colors:
+            if oldPen is None:
+                oldPen = dc.SelectObject(self.pens[color])
+            else:
+                dc.SelectObject(self.pens[color])
+            dc.MoveTo((  delta,   delta))
+            dc.LineTo((x-delta,   delta))
+            dc.LineTo((x-delta, y-delta))
+            dc.LineTo((  delta, y-delta))
+            dc.LineTo((  delta,   delta))
+            delta = delta + 4
+            if x-delta <= 0 or y-delta <= 0:
+                break
+        dc.SelectObject(oldPen)
+
+    def OnPrepareDC (self, dc, pInfo):
+        if dc.IsPrinting():
+            mag = self.prtDlg['mag']
+            dc.SetMapMode(win32con.MM_ANISOTROPIC);
+            dc.SetWindowOrg((0, 0))
+            dc.SetWindowExt((1, 1))
+            dc.SetViewportOrg((0, 0))
+            dc.SetViewportExt((mag, mag))
+
+    def OnPreparePrinting(self, pInfo):
+        flags = (win32ui.PD_USEDEVMODECOPIES|
+                 win32ui.PD_PAGENUMS|
+                 win32ui.PD_NOPAGENUMS|
+                 win32ui.PD_NOSELECTION)
+        self.prtDlg = ImagePrintDialog(pInfo, PRINTDLGORD, flags)
+        pInfo.SetPrintDialog(self.prtDlg)
+        pInfo.SetMinPage(1)
+        pInfo.SetMaxPage(1)
+        pInfo.SetFromPage(1)
+        pInfo.SetToPage(1)
+        ret = self.DoPreparePrinting(pInfo)
+        return ret
+
+    def OnBeginPrinting(self, dc, pInfo):
+        return self._obj_.OnBeginPrinting(dc, pInfo)
+
+    def OnEndPrinting(self, dc, pInfo):
+        del self.prtDlg
+        return self._obj_.OnEndPrinting(dc, pInfo)
+
+    def OnFilePrintPreview(self, *arg):
+        self._obj_.OnFilePrintPreview()
+
+    def OnFilePrint(self, *arg):
+        self._obj_.OnFilePrint()
+        
+    def OnPrint(self, dc, pInfo):
+        doc = self.GetDocument()
+        metrics = dc.GetTextMetrics()
+        cxChar = metrics['tmAveCharWidth']
+        cyChar = metrics['tmHeight']
+        left, top, right, bottom = pInfo.GetDraw()
+        dc.TextOut(0, 2*cyChar, doc.GetTitle())
+        top = top + (7*cyChar)/2
+        dc.MoveTo(left, top)
+        dc.LineTo(right, top)
+        top = top + cyChar
+        # this seems to have not effect...
+        # get what I want with the dc.SetWindowOrg calls
+        pInfo.SetDraw((left, top, right, bottom))
+        dc.SetWindowOrg((0, -top))
+
+        self.OnDraw(dc)
+        dc.SetTextAlign(win32con.TA_LEFT|win32con.TA_BOTTOM)
+
+        rect = self.GetWindowRect()
+        rect = self.ScreenToClient(rect)
+        height = (rect[3]-rect[1])
+        dc.SetWindowOrg((0, -(top+height+cyChar)))
+        dc.MoveTo(left, 0)
+        dc.LineTo(right, 0)
+
+        x = 0
+        y = (3*cyChar)/2
+
+        dc.TextOut(x, y, doc.GetTitle())
+        y = y + cyChar
+
+
+class PrintDemoApp(app.CApp):
+    def __init__(self):
+        app.CApp.__init__(self)
+    
+    def InitInstance(self):
+        template = PrintDemoTemplate(None, None,
+                                     None, PrintDemoView)
+        self.AddDocTemplate(template)
+        self._obj_.InitMDIInstance()
+        self.LoadMainFrame()
+        doc = template.OpenDocumentFile(None)
+        doc.SetTitle('Custom Print Document')
+
+        
+class ImagePrintDialog(dialog.PrintDialog):
+
+    sectionPos = 'Image Print Demo'
+    
+    def __init__(self, pInfo, dlgID, flags=win32ui.PD_USEDEVMODECOPIES):
+        dialog.PrintDialog.__init__(self, pInfo, dlgID, flags=flags)
+        mag = win32ui.GetProfileVal(self.sectionPos,
+                                    'Document Magnification',
+                                    0)
+        if mag <= 0:
+            mag = 2
+            win32ui.WriteProfileVal(self.sectionPos,
+                                    'Document Magnification',
+                                    mag)
+                
+        self['mag'] = mag
+
+    def OnInitDialog(self):
+        self.magCtl = self.GetDlgItem(IDC_PRINT_MAG_EDIT)
+        self.magCtl.SetWindowText(`self['mag']`)
+        return dialog.PrintDialog.OnInitDialog(self)
+    def OnOK(self):
+        dialog.PrintDialog.OnOK(self)
+        strMag = self.magCtl.GetWindowText()
+        try:
+            self['mag'] = string.atoi(strMag)
+        except:
+            pass
+        win32ui.WriteProfileVal(self.sectionPos,
+                                'Document Magnification',
+                                self['mag'])
+
+
+if __name__=='__main__':
+        # Running under Pythonwin
+        def test():
+                template = PrintDemoTemplate(None, None,
+                                     None, PrintDemoView)
+                template.OpenDocumentFile(None)
+        test()
+else:
+        app = PrintDemoApp()
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/demoutils.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/demoutils.py
new file mode 100644
index 0000000..74352011
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/demoutils.py
@@ -0,0 +1,52 @@
+# Utilities for the demos
+
+import sys, win32api, win32con, win32ui
+
+NotScriptMsg = """\
+This demo program is not designed to be run as a Script, but is
+probably used by some other test program.  Please try another demo.
+"""
+
+NeedGUIMsg = """\
+This demo program can only be run from inside of Pythonwin
+
+You must start Pythonwin, and select 'Run' from the toolbar or File menu
+"""
+
+
+NeedAppMsg = """\
+This demo program is a 'Pythonwin Application'.
+
+It is more demo code than an example of Pythonwin's capabilities.
+
+To run it, you must execute the command:
+pythonwin.exe /app "%s"
+
+Would you like to execute it now?
+"""
+
+def NotAScript():
+	import win32ui
+	win32ui.MessageBox(NotScriptMsg, "Demos")
+
+def NeedGoodGUI():
+	from pywin.framework.app import HaveGoodGUI
+	rc = HaveGoodGUI()
+	if not rc:
+		win32ui.MessageBox(NeedGUIMsg, "Demos")
+	return rc
+
+def NeedApp():
+	import win32ui
+	rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO)
+	if rc==win32con.IDYES:
+		try:
+			parent = win32ui.GetMainFrame().GetSafeHwnd()
+			win32api.ShellExecute(parent, None, 'pythonwin.exe', '/app "%s"' % sys.argv[0], None, 1)
+		except win32api.error, details:
+			win32ui.MessageBox("Error executing command - %s" % (details), "Demos")
+
+
+if __name__=='__main__':
+	import demoutils
+	demoutils.NotAScript()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/dlgappdemo.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/dlgappdemo.py
new file mode 100644
index 0000000..3390c8c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/dlgappdemo.py
@@ -0,0 +1,48 @@
+# dlgappdemo - a demo of a dialog application.
+# This is a demonstration of both a custom "application" module,
+# and a Python program in a dialog box.
+#
+# NOTE:  You CAN NOT import this module from either PythonWin or Python.
+# This module must be specified on the commandline to PythonWin only.
+# eg, PythonWin /app dlgappdemo.py
+
+from pywin.framework import dlgappcore, app
+import win32ui
+import sys
+import regsub
+
+class TestDialogApp(dlgappcore.DialogApp):
+	def CreateDialog(self):
+		return TestAppDialog()
+
+	
+class TestAppDialog(dlgappcore.AppDialog):
+	def __init__(self):
+		self.edit = None
+		dlgappcore.AppDialog.__init__(self, win32ui.IDD_LARGE_EDIT)
+	def OnInitDialog(self):
+		self.SetWindowText('Test dialog application')
+		self.edit = self.GetDlgItem(win32ui.IDC_EDIT1)
+		print "Hello from Python"
+		print "args are:", 
+		for arg in sys.argv:
+			print arg
+		return 1
+
+	def PreDoModal(self):
+		sys.stdout = sys.stderr = self
+
+	def write(self, str):
+		if self.edit:
+			self.edit.SetSel(-2)
+			# translate \n to \n\r
+			self.edit.ReplaceSel(regsub.gsub('\n','\r\n',str))
+		else:
+			win32ui.OutputDebug("dlgapp - no edit control! >>\n%s\n<<\n" % str )
+
+app.AppBuilder = TestDialogApp
+
+if __name__=='__main__':
+	import demoutils
+	demoutils.NeedApp()
+	
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/dojobapp.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/dojobapp.py
new file mode 100644
index 0000000..bdd22cd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/dojobapp.py
@@ -0,0 +1,62 @@
+# dojobapp - do a job, show the result in a dialog, and exit.
+#
+# Very simple - faily minimal dialog based app.
+#
+# This should be run using the command line:
+# pythonwin /app demos\dojobapp.py
+
+import win32ui
+import win32api
+import win32con
+import sys
+from pywin.framework import app, dlgappcore
+import string
+
+class DoJobAppDialog(dlgappcore.AppDialog):
+	softspace=1
+	def __init__(self, appName = ""):
+		self.appName = appName
+		dlgappcore.AppDialog.__init__(self, win32ui.IDD_GENERAL_STATUS)
+
+	def PreDoModal(self):
+		pass
+
+	def ProcessArgs(self, args):
+		pass
+
+	def OnInitDialog(self):
+		self.SetWindowText(self.appName)
+		butCancel = self.GetDlgItem(win32con.IDCANCEL)
+		butCancel.ShowWindow(win32con.SW_HIDE)
+		p1 = self.GetDlgItem(win32ui.IDC_PROMPT1)
+		p2 = self.GetDlgItem(win32ui.IDC_PROMPT2)
+
+		# Do something here!
+
+		p1.SetWindowText("Hello there")
+		p2.SetWindowText("from the demo")
+	def OnDestroy(self,msg):
+		pass
+#	def OnOK(self):
+#		pass	
+#	def OnCancel(self): default behaviour - cancel == close.
+#		return 
+
+class DoJobDialogApp(dlgappcore.DialogApp):
+	def CreateDialog(self):
+		return DoJobAppDialog("Do Something")
+
+class CopyToDialogApp(DoJobDialogApp):
+	def __init__(self):
+		DoJobDialogApp.__init__(self)
+
+app.AppBuilder = DoJobDialogApp
+
+def t():
+	t = DoJobAppDialog("Copy To")
+	t.DoModal()
+	return t
+	
+if __name__=='__main__':
+	import demoutils
+	demoutils.NeedApp()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/helloapp.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/helloapp.py
new file mode 100644
index 0000000..20ff3706
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/app/helloapp.py
@@ -0,0 +1,44 @@
+##
+## helloapp.py
+##
+##
+## A nice, small 'hello world' Pythonwin application.
+## NOT an MDI application - just a single, normal, top-level window.
+##
+## MUST be run with the command line "pythonwin.exe /app helloapp.py"
+## (or if you are really keen, rename "pythonwin.exe" to something else, then
+## using MSVC or similar, edit the string section in the .EXE to name this file)
+##
+## Originally by Willy Heineman <wheineman@uconect.net>
+
+
+import win32con
+import win32ui
+from pywin.mfc import window, dialog, thread, afxres
+
+# The main frame.
+# Does almost nothing at all - doesnt even create a child window!
+class HelloWindow(window.Wnd):
+    def __init__(self):
+        # The window.Wnd ctor creates a Window object, and places it in
+        # self._obj_.  Note the window object exists, but the window itself
+        # does not!
+        window.Wnd.__init__(self, win32ui.CreateWnd())
+
+        # Now we ask the window object to create the window itself.
+        self._obj_.CreateWindowEx(win32con.WS_EX_CLIENTEDGE, \
+            win32ui.RegisterWndClass(0, 0, win32con.COLOR_WINDOW + 1), \
+            'Hello World!', win32con.WS_OVERLAPPEDWINDOW, \
+            (100, 100, 400, 300), None, 0, None)
+
+# The application object itself.       
+class HelloApp(thread.WinApp):
+
+    def InitInstance(self):
+        self.frame = HelloWindow()
+        self.frame.ShowWindow(win32con.SW_SHOWNORMAL)
+        # We need to tell MFC what our main frame is.
+        self.SetMainFrame(self.frame)
+        
+# Now create the application object itself!   
+app = HelloApp()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/cmdserver.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/cmdserver.py
new file mode 100644
index 0000000..a7af9b10
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/cmdserver.py
@@ -0,0 +1,108 @@
+# cmdserver.py
+
+# Demo code that is not Pythonwin related, but too good to throw away...
+
+import win32api
+import sys
+from pywin.framework import winout
+
+import thread, sys
+
+import traceback
+
+class ThreadWriter:
+	"Assign an instance to sys.stdout for per-thread printing objects - Courtesy Guido!"
+	def __init__(self):
+		"Constructor -- initialize the table of writers"
+		self.writers = {}
+		self.origStdOut = None
+	def register(self, writer):
+		"Register the writer for the current thread"
+		self.writers[thread.get_ident()] = writer
+		if self.origStdOut is None:
+			self.origStdOut = sys.stdout
+			sys.stdout = self		
+
+	def unregister(self):
+		"Remove the writer for the current thread, if any"
+		try:
+			del self.writers[thread.get_ident()]
+		except KeyError:
+			pass
+		if len(self.writers)==0:
+			sys.stdout = self.origStdOut
+			self.origStdOut = None
+
+	def getwriter(self):
+		"Return the current thread's writer, default sys.stdout"
+		try:
+			return self.writers[thread.get_ident()]
+		except KeyError:
+			return self.origStdOut
+
+	def write(self, str):
+		"Write to the current thread's writer, default sys.stdout"
+		self.getwriter().write(str)
+
+def Test():
+	num=1
+	while num<1000:
+		print 'Hello there no ' + str(num)
+		win32api.Sleep(50)
+		num = num + 1
+
+class flags:
+	SERVER_BEST = 0
+	SERVER_IMMEDIATE = 1
+	SERVER_THREAD = 2
+	SERVER_PROCESS = 3
+	
+def StartServer( cmd, title=None, bCloseOnEnd=0, serverFlags = flags.SERVER_BEST ):
+	out = winout.WindowOutput( title, None, winout.flags.WQ_IDLE )
+	if not title:
+		title=cmd
+	out.Create(title)
+#	ServerThread((out, cmd, title, bCloseOnEnd))
+#	out = sys.stdout
+	thread.start_new_thread( ServerThread, (out, cmd, title, bCloseOnEnd) )
+
+def ServerThread(myout, cmd, title, bCloseOnEnd):
+	try:
+		writer.register(myout)
+		print 'Executing "%s"\n' % cmd
+		bOK = 1
+		try:
+			import __main__
+			exec (cmd+'\n', __main__.__dict__)
+		except:
+			bOK = 0
+		if bOK:
+			print "Command terminated without errors."
+		else:
+			t, v, tb = sys.exc_info()
+			print t, ': ', v
+			traceback.print_tb(tb)
+			tb = None # prevent a cycle
+			print "Command terminated with an unhandled exception"
+		writer.unregister()
+		if bOK and bCloseOnEnd:
+			myout.frame.DestroyWindow()
+
+	# Unhandled exception of any kind in a thread kills the gui!
+	except:
+		t, v, tb = sys.exc_info()
+		print t, ': ', v
+		traceback.print_tb(tb)
+		tb = None
+		print "Thread failed"
+
+# assist for reloading (when debugging) - use only 1 tracer object,
+# else a large chain of tracer objects will exist.
+#try:
+#	writer
+#except NameError:
+#	writer=ThreadWriter()
+if __name__=='__main__':
+	import demoutils
+	demoutils.NotAScript()
+	
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/createwin.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/createwin.py
new file mode 100644
index 0000000..3a174c3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/createwin.py
@@ -0,0 +1,99 @@
+#
+# Window creation example
+#
+#	This example creates a minimal "control" that just fills in its
+#	window with red.  To make your own control, subclass Control and
+#	write your own OnPaint() method.  See PyCWnd.HookMessage for what
+#	the parameters to OnPaint are.
+#
+
+from pywin.mfc import dialog, window
+import win32ui
+import win32con
+import win32api
+
+class Control(window.Wnd):
+	"""Generic control class"""
+	def __init__ (self):
+		window.Wnd.__init__(self, win32ui.CreateWnd ())
+
+	def OnPaint (self):
+		dc, paintStruct = self.BeginPaint()
+		self.DoPaint(dc)
+		self.EndPaint(paintStruct)
+		
+	def DoPaint (self, dc):	# Override this!
+		pass
+
+class RedBox (Control):
+	def DoPaint (self, dc):
+		dc.FillSolidRect (self.GetClientRect(), win32api.RGB(255,0,0))
+	
+
+class RedBoxWithPie (RedBox):
+	def DoPaint (self, dc):
+		RedBox.DoPaint(self, dc)
+		r = self.GetClientRect()
+		dc.Pie(r[0], r[1], r[2], r[3], 0,0,r[2], r[3]/2)
+
+def MakeDlgTemplate():
+    style = (win32con.DS_MODALFRAME |
+	     win32con.WS_POPUP |
+	     win32con.WS_VISIBLE |
+	     win32con.WS_CAPTION |
+	     win32con.WS_SYSMENU |
+	     win32con.DS_SETFONT)
+    cs = (win32con.WS_CHILD |
+	  win32con.WS_VISIBLE)
+
+    w = 64
+    h = 64
+
+    dlg = [["Red box",
+	    (0, 0, w, h),
+	    style,
+	    None,
+	    (8, "MS Sans Serif")],
+	   ]
+
+    s = win32con.WS_TABSTOP | cs
+
+    dlg.append([128,
+		"Cancel",
+		win32con.IDCANCEL,
+		(7, h - 18, 50, 14), s | win32con.BS_PUSHBUTTON])
+
+    return dlg
+
+class TestDialog(dialog.Dialog):
+    def OnInitDialog(self):
+        rc = dialog.Dialog.OnInitDialog(self)
+        self.redbox = RedBox ()
+        self.redbox.CreateWindow (None, "RedBox",
+                                 win32con.WS_CHILD |
+                                 win32con.WS_VISIBLE,
+                                 (5, 5, 90, 68),
+                                 self, 1003)
+        return rc
+
+class TestPieDialog(dialog.Dialog):
+    def OnInitDialog(self):
+        rc = dialog.Dialog.OnInitDialog(self)
+        self.control = RedBoxWithPie()
+        self.control.CreateWindow (None, "RedBox with Pie",
+                                 win32con.WS_CHILD |
+                                 win32con.WS_VISIBLE,
+                                 (5, 5, 90, 68),
+                                 self, 1003)
+                                 
+def demo(modal=0):
+	d = TestPieDialog (MakeDlgTemplate())
+	if modal:
+		d.DoModal()
+	else:
+		d.CreateWindow()
+
+if __name__=='__main__':
+	demo(1)
+
+# II Cor. 12:9 $Header: /cvsroot/pywin32/pywin32/Pythonwin/pywin/Demos/createwin.py,v 1.1 1999/09/01 23:33:35 mhammond Exp $
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/demoutils.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/demoutils.py
new file mode 100644
index 0000000..818313d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/demoutils.py
@@ -0,0 +1,54 @@
+# Utilities for the demos
+
+import sys, win32api, win32con, win32ui
+
+NotScriptMsg = """\
+This demo program is not designed to be run as a Script, but is
+probably used by some other test program.  Please try another demo.
+"""
+
+NeedGUIMsg = """\
+This demo program can only be run from inside of Pythonwin
+
+You must start Pythonwin, and select 'Run' from the toolbar or File menu
+"""
+
+
+NeedAppMsg = """\
+This demo program is a 'Pythonwin Application'.
+
+It is more demo code than an example of Pythonwin's capabilities.
+
+To run it, you must execute the command:
+pythonwin.exe /app "%s"
+
+Would you like to execute it now?
+"""
+
+def NotAScript():
+	import win32ui
+	win32ui.MessageBox(NotScriptMsg, "Demos")
+
+def NeedGoodGUI():
+	from pywin.framework.app import HaveGoodGUI
+	rc = HaveGoodGUI()
+	if not rc:
+		win32ui.MessageBox(NeedGUIMsg, "Demos")
+	return rc
+
+def NeedApp():
+	import win32ui
+	rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO)
+	if rc==win32con.IDYES:
+		try:
+			parent = win32ui.GetMainFrame().GetSafeHwnd()
+			win32api.ShellExecute(parent, None, 'pythonwin.exe', '/app "%s"' % sys.argv[0], None, 1)
+		except win32api.error, details:
+			win32ui.MessageBox("Error executing command - %s" % (details), "Demos")
+
+
+from pywin.framework.app import HaveGoodGUI
+
+if __name__=='__main__':
+	import demoutils
+	demoutils.NotAScript()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dibdemo.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dibdemo.py
new file mode 100644
index 0000000..10ca222
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dibdemo.py
@@ -0,0 +1,69 @@
+# A demo which creates a view and a frame which displays a PPM format bitmap
+#
+# This hasnnt been run in a while, as I dont have many of that format around!
+import win32ui
+import win32con
+import win32api
+import string
+
+class DIBView:
+	def __init__(self,  doc, dib):
+		self.dib = dib
+		self.view = win32ui.CreateView(doc)
+		self.width = self.height = 0
+		# set up message handlers
+#		self.view.OnPrepareDC = self.OnPrepareDC
+		self.view.HookMessage (self.OnSize, win32con.WM_SIZE)
+
+	def OnSize (self, params):
+		lParam = params[3]
+		self.width = win32api.LOWORD(lParam)
+		self.height = win32api.HIWORD(lParam)
+
+	def OnDraw (self, ob, dc):
+		# set sizes used for "non strecth" mode.
+		self.view.SetScrollSizes(win32con.MM_TEXT, self.dib.GetSize())
+		dibSize = self.dib.GetSize()
+		dibRect = (0,0,dibSize[0], dibSize[1])
+		# stretch BMP.
+		#self.dib.Paint(dc, (0,0,self.width, self.height),dibRect)
+		# non stretch.
+		self.dib.Paint(dc)
+	
+class DIBDemo:
+	def __init__(self, filename, * bPBM):
+		# init data members
+		f = open(filename, 'rb')
+		dib=win32ui.CreateDIBitmap()
+		if len(bPBM)>0:
+			magic=f.readline()
+			if magic <> "P6\n":
+				print "The file is not a PBM format file"
+				raise "Failed"
+			# check magic?
+			rowcollist=string.split(f.readline())
+			cols=string.atoi(rowcollist[0])
+			rows=string.atoi(rowcollist[1])
+			f.readline()	# whats this one?
+			dib.LoadPBMData(f,(cols,rows))
+		else:
+			dib.LoadWindowsFormatFile(f)
+		f.close()
+		# create doc/view
+		self.doc = win32ui.CreateDoc()
+		self.dibView = DIBView( self.doc, dib )
+		self.frame = win32ui.CreateMDIFrame()
+		self.frame.LoadFrame()	# this will force OnCreateClient
+		self.doc.SetTitle ('DIB Demo')
+		self.frame.ShowWindow()
+		
+		# display the sucka
+		self.frame.ActivateFrame()
+
+	def OnCreateClient( self, createparams, context ):
+		self.dibView.view.CreateWindow(self.frame)
+		return 1
+	
+if __name__=='__main__':
+	import demoutils
+	demoutils.NotAScript()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dlgtest.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dlgtest.py
new file mode 100644
index 0000000..b652846
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dlgtest.py
@@ -0,0 +1,138 @@
+# A Demo of Pythonwin's Dialog and Property Page support.
+
+###################
+#
+# First demo - use the built-in to Pythonwin "Tab Stop" dialog, but
+# customise it heavily.
+#
+# ID's for the tabstop dialog - out test.
+#
+from win32ui import IDD_SET_TABSTOPS
+from win32ui import IDC_EDIT_TABS
+from win32ui import IDC_PROMPT_TABS
+from win32con import IDOK
+from win32con import IDCANCEL
+
+import win32ui
+import win32con
+
+from pywin.mfc import dialog
+
+class TestDialog(dialog.Dialog):
+	def __init__(self, modal=1):
+		dialog.Dialog.__init__(self, IDD_SET_TABSTOPS)
+		self.counter=0
+		if modal:
+			self.DoModal()
+		else:
+			self.CreateWindow()
+
+	def OnInitDialog(self):
+		# Set the caption of the dialog itself.
+		self.SetWindowText("Used to be Tab Stops!")
+		# Get a child control, remember it, and change its text.
+		self.edit=self.GetDlgItem(IDC_EDIT_TABS)	# the text box.
+		self.edit.SetWindowText("Test")
+		# Hook a Windows message for the dialog.
+		self.edit.HookMessage(self.KillFocus, win32con.WM_KILLFOCUS)
+		# Get the prompt control, and change its next.
+		prompt=self.GetDlgItem(IDC_PROMPT_TABS)	# the prompt box.
+		prompt.SetWindowText("Prompt")
+		# And the same for the button..
+		cancel=self.GetDlgItem(IDCANCEL)	# the cancel button
+		cancel.SetWindowText("&Kill me")
+
+		# And just for demonstration purposes, we hook the notify message for the dialog.
+		# This allows us to be notified when the Edit Control text changes.
+		self.HookCommand(self.OnNotify, IDC_EDIT_TABS)
+
+	def OnNotify(self, controlid, code):
+		if code==win32con.EN_CHANGE:
+			print "Edit text changed!"
+		return 1 # I handled this, so no need to call defaults!
+
+	# kill focus for the edit box.
+	# Simply increment the value in the text box.
+	def KillFocus(self,msg):
+		self.counter=self.counter+1
+		if self.edit != None:
+			self.edit.SetWindowText(str(self.counter))
+
+	# Called when the dialog box is terminating...
+	def OnDestroy(self,msg):
+		del self.edit
+		del self.counter
+
+# A very simply Property Sheet.
+# We only make a new class for demonstration purposes.
+class TestSheet(dialog.PropertySheet):
+	def __init__(self, title):
+		dialog.PropertySheet.__init__(self, title)
+		self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE)
+	def OnActivate(self, msg):
+		pass
+
+# A very simply Property Page, which will be "owned" by the above
+# Property Sheet.
+# We create a new class, just so we can hook a control notification.
+class TestPage(dialog.PropertyPage):
+	def OnInitDialog(self):
+		# We use the HookNotify function to allow Python to respond to
+		# Windows WM_NOTIFY messages.
+		# In this case, we are interested in BN_CLICKED messages.
+		self.HookNotify(self.OnNotify, win32con.BN_CLICKED)
+		
+	def OnNotify(self, std, extra):
+		print "OnNotify", std, extra
+
+# Some code that actually uses these objects.
+def demo(modal = 0):
+	TestDialog(modal)
+	
+	# property sheet/page demo
+	ps=win32ui.CreatePropertySheet('Property Sheet/Page Demo')
+	# Create a completely standard PropertyPage.
+	page1=win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO1)
+	# Create our custom property page.
+	page2=TestPage(win32ui.IDD_PROPDEMO2)
+	ps.AddPage(page1)
+	ps.AddPage(page2)
+	if modal:
+		ps.DoModal()
+	else:
+		style = win32con.WS_SYSMENU|win32con.WS_POPUP|win32con.WS_CAPTION|win32con.DS_MODALFRAME|win32con.WS_VISIBLE
+		styleex = win32con.WS_EX_DLGMODALFRAME | win32con.WS_EX_PALETTEWINDOW
+		ps.CreateWindow(win32ui.GetMainFrame(), style, styleex)
+	
+	
+def test(modal=1):
+
+#	dlg=dialog.Dialog(1010)
+#	dlg.CreateWindow()
+#	dlg.EndDialog(0)
+#	del dlg
+#	return
+	# property sheet/page demo
+	ps=TestSheet('Property Sheet/Page Demo')
+	page1=win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO1)
+	page2=win32ui.CreatePropertyPage(win32ui.IDD_PROPDEMO2)
+	ps.AddPage(page1)
+	ps.AddPage(page2)
+	del page1
+	del page2
+	if modal:
+		ps.DoModal()
+	else:
+		ps.CreateWindow(win32ui.GetMainFrame())
+	return ps
+
+def d():
+	dlg = win32ui.CreateDialog(win32ui.IDD_DEBUGGER)
+	dlg.datalist.append((win32ui.IDC_DBG_RADIOSTACK, "radio"))
+	print "data list is ", dlg.datalist
+	dlg.data['radio']=1
+	dlg.DoModal()
+	print dlg.data['radio']
+
+if __name__=='__main__':
+	demo(1)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dyndlg.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dyndlg.py
new file mode 100644
index 0000000..5d36690
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/dyndlg.py
@@ -0,0 +1,73 @@
+# dyndlg.py
+# contributed by Curt Hagenlocher <chi@earthlink.net>
+
+# Dialog Template params:
+#	Parameter 0 - Window caption
+#	Parameter 1 - Bounds (rect tuple)
+#	Parameter 2 - Window style
+#	Parameter 3 - Extended style
+#	Parameter 4 - Font tuple
+#	Parameter 5 - Menu name
+#	Parameter 6 - Window class
+# Dialog item params:
+#	Parameter 0 - Window class
+#	Parameter 1 - Text
+#	Parameter 2 - ID
+#	Parameter 3 - Bounds
+#	Parameter 4 - Style
+#	Parameter 5 - Extended style
+#	Parameter 6 - Extra data
+
+
+import win32ui
+import win32con
+from pywin.mfc import dialog, window
+
+def MakeDlgTemplate():
+	style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
+	cs = win32con.WS_CHILD | win32con.WS_VISIBLE
+	dlg = [ ["Select Warehouse", (0, 0, 177, 93), style, None, (8, "MS Sans Serif")], ]
+	dlg.append([130, "Current Warehouse:", -1, (7, 7, 69, 9), cs | win32con.SS_LEFT])
+	dlg.append([130, "ASTORIA", 128, (16, 17, 99, 7), cs | win32con.SS_LEFT])
+	dlg.append([130, "New &Warehouse:", -1, (7, 29, 69, 9), cs | win32con.SS_LEFT])
+	s = win32con.WS_TABSTOP | cs
+#	dlg.append([131, None, 130, (5, 40, 110, 48),
+#		s | win32con.LBS_NOTIFY | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER])
+	dlg.append(["{8E27C92B-1264-101C-8A2F-040224009C02}", None, 131, (5, 40, 110, 48),win32con.WS_TABSTOP])
+
+	dlg.append([128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
+	s = win32con.BS_PUSHBUTTON | s
+	dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 22, 50, 14), s])
+	dlg.append([128, "&Help", 100, (124, 74, 50, 14), s])
+	
+	return dlg
+
+def test1():
+	win32ui.CreateDialogIndirect( MakeDlgTemplate() ).DoModal()
+	
+def test2():
+	dialog.Dialog( MakeDlgTemplate() ).DoModal()
+
+def test3():
+	dlg = win32ui.LoadDialogResource(win32ui.IDD_SET_TABSTOPS)
+	dlg[0][0] = 'New Dialog Title'
+	dlg[0][1] = (80, 20, 161, 60)
+	dlg[1][1] = '&Confusion:'
+	cs = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON
+	dlg.append([128, "&Help", 100, (111, 41, 40, 14), cs])
+	dialog.Dialog( dlg ).DoModal()
+
+def test4():
+	page1=dialog.PropertyPage(win32ui.LoadDialogResource(win32ui.IDD_PROPDEMO1))
+	page2=dialog.PropertyPage(win32ui.LoadDialogResource(win32ui.IDD_PROPDEMO2))
+	ps=dialog.PropertySheet('Property Sheet/Page Demo', None, [page1, page2])
+	ps.DoModal()
+
+def testall():
+	test1()
+	test2()
+	test3()
+	test4()
+
+if __name__=='__main__':
+	testall()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/fontdemo.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/fontdemo.py
new file mode 100644
index 0000000..449e3da8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/fontdemo.py
@@ -0,0 +1,79 @@
+# Demo of Generic document windows, DC, and Font usage
+# by Dave Brennan (brennan@hal.com)
+
+# usage examples:
+
+# >>> from fontdemo import *
+# >>> d = FontDemo('Hello, Python')
+# >>> f1 = { 'name':'Arial', 'height':36, 'weight':win32con.FW_BOLD}
+# >>> d.SetFont(f1)
+# >>> f2 = {'name':'Courier New', 'height':24, 'italic':1}
+# >>> d.SetFont (f2)
+
+import win32ui
+import win32con
+import win32api
+
+from pywin.mfc import docview
+
+
+# font is a dictionary in which the following elements matter:
+# (the best matching font to supplied parameters is returned)
+#   name		string name of the font as known by Windows
+#   size		point size of font in logical units
+#   weight		weight of font (win32con.FW_NORMAL, win32con.FW_BOLD)
+#   italic		boolean; true if set to anything but None
+#   underline	boolean; true if set to anything but None
+
+class FontView(docview.ScrollView):
+	def __init__(self,  doc, text = 'Python Rules!', font_spec = {'name':'Arial', 'height':42}):
+		docview.ScrollView.__init__(self, doc)
+		self.font = win32ui.CreateFont (font_spec)
+		self.text = text
+		self.width = self.height = 0
+		# set up message handlers
+		self.HookMessage (self.OnSize, win32con.WM_SIZE)
+	def OnAttachedObjectDeath(self):
+		docview.ScrollView.OnAttachedObjectDeath(self)
+		del self.font
+
+	def SetFont (self, new_font):
+		# Change font on the fly
+		self.font = win32ui.CreateFont (new_font)
+		# redraw the entire client window
+		selfInvalidateRect (None)
+	def OnSize (self, params):
+		lParam = params[3]
+		self.width = win32api.LOWORD(lParam)
+		self.height = win32api.HIWORD(lParam)
+
+	def OnPrepareDC (self, dc, printinfo):
+		# Set up the DC for forthcoming OnDraw call
+		self.SetScrollSizes(win32con.MM_TEXT, (100,100))
+		dc.SetTextColor (win32api.RGB(0,0,255))
+		dc.SetBkColor (win32api.GetSysColor (win32con.COLOR_WINDOW))
+		dc.SelectObject (self.font)
+		dc.SetTextAlign (win32con.TA_CENTER | win32con.TA_BASELINE)
+
+	def OnDraw (self, dc):
+		if (self.width == 0 and self.height == 0):
+			left, top, right, bottom = self.GetClientRect()
+			self.width = right - left
+			self.height = bottom - top
+		x, y = self.width / 2, self.height / 2
+		dc.TextOut (x, y, self.text)
+	
+def FontDemo():
+	# create doc/view
+	template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, None, None, FontView)
+	doc=template.OpenDocumentFile(None)
+	doc.SetTitle ('Font Demo')
+#	print "template is ", template, "obj is", template._obj_
+	template.close()
+#	print "closed"
+#	del template
+
+if __name__=='__main__':
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		FontDemo()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/guidemo.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/guidemo.py
new file mode 100644
index 0000000..d57e66ce
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/guidemo.py
@@ -0,0 +1,68 @@
+# GUI Demo - just a worker script to invoke all the other demo/test scripts.
+import win32ui
+import __main__
+import sys
+import regutil
+import win32api
+
+demos = [ \
+#	('Font', 'import fontdemo;fontdemo.FontDemo()'),
+	('Open GL Demo', 'import openGLDemo;openGLDemo.test()'),
+	('Threaded GUI', 'import threadedgui;threadedgui.ThreadedDemo()'),
+	('Tree View Demo', 'import hiertest;hiertest.demoboth()'),
+	('3-Way Splitter Window', 'import splittst;splittst.demo()'),
+	('Custom Toolbars and Tooltips', 'import toolbar;toolbar.test()'),
+	('Progress Bar', 'import progressbar;progressbar.demo()'),
+	('Slider Control', 'import sliderdemo;sliderdemo.demo()'),
+	('Dynamic window creation', 'import createwin;createwin.demo()'),
+	('Various Dialog demos', 'import dlgtest;dlgtest.demo()'),
+	('OCX Control Demo', 'from ocx import ocxtest;ocxtest.demo()'),
+	('OCX Serial Port Demo', 'from ocx import ocxserialtest;	ocxserialtest.test()'),
+	('IE4 Control Demo', 'from ocx import webbrowser; webbrowser.Demo()'),
+]
+
+def demo():
+	try:
+		# seeif I can locate the demo files.
+		import fontdemo
+	except ImportError:
+		# else put the demos direectory on the path (if not already)
+		try:
+			instPath = regutil.GetRegistryDefaultValue(regutil.BuildDefaultPythonKey() + "\\InstallPath")
+		except win32api.error:
+			print "The InstallPath can not be located, and the Demos directory is not on the path"
+			instPath="."
+			
+		demosDir = win32ui.FullPath(instPath + "\\Demos")
+		for path in sys.path:
+			if win32ui.FullPath(path)==demosDir:
+				break
+		else:
+			sys.path.append(demosDir)
+		import fontdemo
+
+	import sys
+	if "/go" in sys.argv:
+		for name, cmd in demos:		
+			try:
+				exec cmd
+			except:
+				print "Demo of %s failed - %s:%s" % (cmd,sys.exc_info()[0], sys.exc_info()[1])
+		return
+	# Otherwise allow the user to select the demo to run
+
+	import pywin.dialogs.list
+	while 1:
+		rc = pywin.dialogs.list.SelectFromLists( "Select a Demo", demos, ['Demo Title'] )
+		if rc is None:
+			break
+		title, cmd = demos[rc]
+		try:
+			exec cmd
+		except:
+			print "Demo of %s failed - %s:%s" % (title,sys.exc_info()[0], sys.exc_info()[1])
+
+if __name__==__main__.__name__:
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		demo()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/hiertest.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/hiertest.py
new file mode 100644
index 0000000..1d856c1b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/hiertest.py
@@ -0,0 +1,104 @@
+import win32ui
+import os
+import commctrl
+
+from pywin.tools import hierlist
+from pywin.mfc import docview, window
+
+# directory listbox
+# This has obvious limitations - doesnt track subdirs, etc.  Demonstrates
+# simple use of Python code for querying the tree as needed.
+# Only use strings, and lists of strings (from curdir())
+class DirHierList(hierlist.HierList):
+	def __init__(self, root, listBoxID = win32ui.IDC_LIST1):
+		hierlist.HierList.__init__(self, root, win32ui.IDB_HIERFOLDERS, listBoxID)
+	def GetText(self, item):
+		return os.path.basename(item)
+	def GetSubList(self, item):
+		if os.path.isdir(item):
+			ret = map(lambda path, base=item: os.path.join(base, path), os.listdir(item))
+		else:
+			ret = None
+		return ret
+	# if the item is a dir, it is expandable.
+	def IsExpandable(self, item):
+		return os.path.isdir(item)
+	def GetSelectedBitmapColumn(self, item):
+		return self.GetBitmapColumn(item)+6 # Use different color for selection
+
+class TestDocument(docview.Document):
+	def __init__(self, template):
+		docview.Document.__init__(self, template)
+		self.hierlist = hierlist.HierListWithItems(HLIFileDir("\\"), win32ui.IDB_HIERFOLDERS, win32ui.AFX_IDW_PANE_FIRST)
+
+class HierListView(docview.TreeView):
+	def OnInitialUpdate(self):
+		rc = self._obj_.OnInitialUpdate()
+		self.hierList = self.GetDocument().hierlist
+		self.hierList.HierInit(self.GetParent())
+		self.hierList.SetStyle(commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS)
+		return rc
+
+class HierListFrame(window.MDIChildWnd):
+	pass
+
+def GetTestRoot():
+	tree1 = ('Tree 1',[('Item 1','Item 1 data'),'Item 2',3])
+	tree2 = ('Tree 2',[('Item 2.1','Item 2 data'),'Item 2.2',2.3])
+	return ('Root',[tree1,tree2,'Item 3'])
+
+def demoboth():
+	template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, TestDocument, HierListFrame, HierListView)
+	template.OpenDocumentFile(None).SetTitle("Hierlist demo")
+	
+	demomodeless()
+
+def demomodeless():
+	testList2=DirHierList("\\")
+	dlg=hierlist.HierDialog('hier list test',testList2)
+	dlg.CreateWindow()
+
+def demodlg	():
+	testList2=DirHierList("\\")
+	dlg=hierlist.HierDialog('hier list test',testList2)
+	dlg.DoModal()
+
+def demo():
+	template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, TestDocument, HierListFrame, HierListView)
+	template.OpenDocumentFile(None).SetTitle("Hierlist demo")
+
+#
+# Demo/Test for HierList items.
+#
+# Easy to make a better directory program.
+#
+class HLIFileDir(hierlist.HierListItem):
+	def __init__( self, filename ):
+		self.filename = filename
+		hierlist.HierListItem.__init__(self)
+	def GetText(self):
+		try:
+			return "%-20s %d bytes" % (os.path.basename(self.filename), os.stat(self.filename)[6])
+		except os.error, details:
+			return "%-20s - %s" % (self.filename, details[1])
+
+	def IsExpandable(self):
+		return os.path.isdir(self.filename)
+	def GetSubList(self):
+		ret = []
+		for newname in os.listdir(self.filename):
+			if newname not in ['.', '..']:
+				ret.append( HLIFileDir( os.path.join(self.filename,newname ) ) )
+		return ret
+
+
+def demohli():
+	template = docview.DocTemplate(win32ui.IDR_PYTHONTYPE, TestDocument, hierlist.HierListFrame, hierlist.HierListView)
+	template.OpenDocumentFile(None).SetTitle("Hierlist demo")
+
+if __name__=='__main__':
+	import demoutils
+	if demoutils.HaveGoodGUI():
+		demoboth()
+	else:
+		demodlg()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/menutest.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/menutest.py
new file mode 100644
index 0000000..18ceeb2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/menutest.py
@@ -0,0 +1,12 @@
+# Run this as a python script, to gray "close" off the edit window system menu.
+from pywin.framework import interact
+import win32con
+
+if __name__=='__main__':
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		win=interact.edit.currentView.GetParent()
+		menu=win.GetSystemMenu()
+		id=menu.GetMenuItemID(6)
+		menu.EnableMenuItem(id,win32con.MF_BYCOMMAND|win32con.MF_GRAYED)
+		print "The interactive window's 'Close' menu item is now disabled."
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/objdoc.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/objdoc.py
new file mode 100644
index 0000000..c455fd8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/objdoc.py
@@ -0,0 +1,49 @@
+# This is a sample file, and shows the basic framework for using an "Object" based
+# document, rather than a "filename" based document.
+# This is referenced by the Pythonwin .html documentation.
+
+# In the example below, the OpenObject() method is used instead of OpenDocumentFile,
+# and all the core MFC document open functionality is retained.
+
+import win32ui
+from pywin.mfc import docview
+
+class object_template (docview.DocTemplate):
+	def __init__(self):
+		docview.DocTemplate.__init__(self, None, None, None, object_view)
+	def OpenObject(self, object): # Use this instead of OpenDocumentFile.
+		# Look for existing open document
+		for doc in self.GetDocumentList():
+			print "document is ", doc
+			if doc.object is object:
+				doc.GetFirstView().ActivateFrame()
+				return doc
+		# not found - new one.
+		doc = object_document(self, object)
+		frame = self.CreateNewFrame(doc)
+		doc.OnNewDocument()
+		doc.SetTitle(str(object))
+		self.InitialUpdateFrame(frame, doc)
+		return doc
+
+class object_document (docview.Document):
+	def __init__(self, template, object):
+		docview.Document.__init__(self, template)
+		self.object = object
+	def OnOpenDocument (self, name):
+		raise error, "Should not be called if template strings set up correctly"
+		return 0
+
+class object_view (docview.EditView):
+	def OnInitialUpdate (self):
+		self.ReplaceSel("Object is %s" % `self.GetDocument().object`)
+
+def demo ():
+	t = object_template()		
+	d = t.OpenObject(win32ui)
+	return (t, d)
+
+if __name__=='__main__':
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		demo()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/demoutils.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/demoutils.py
new file mode 100644
index 0000000..818313d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/demoutils.py
@@ -0,0 +1,54 @@
+# Utilities for the demos
+
+import sys, win32api, win32con, win32ui
+
+NotScriptMsg = """\
+This demo program is not designed to be run as a Script, but is
+probably used by some other test program.  Please try another demo.
+"""
+
+NeedGUIMsg = """\
+This demo program can only be run from inside of Pythonwin
+
+You must start Pythonwin, and select 'Run' from the toolbar or File menu
+"""
+
+
+NeedAppMsg = """\
+This demo program is a 'Pythonwin Application'.
+
+It is more demo code than an example of Pythonwin's capabilities.
+
+To run it, you must execute the command:
+pythonwin.exe /app "%s"
+
+Would you like to execute it now?
+"""
+
+def NotAScript():
+	import win32ui
+	win32ui.MessageBox(NotScriptMsg, "Demos")
+
+def NeedGoodGUI():
+	from pywin.framework.app import HaveGoodGUI
+	rc = HaveGoodGUI()
+	if not rc:
+		win32ui.MessageBox(NeedGUIMsg, "Demos")
+	return rc
+
+def NeedApp():
+	import win32ui
+	rc = win32ui.MessageBox(NeedAppMsg % sys.argv[0], "Demos", win32con.MB_YESNO)
+	if rc==win32con.IDYES:
+		try:
+			parent = win32ui.GetMainFrame().GetSafeHwnd()
+			win32api.ShellExecute(parent, None, 'pythonwin.exe', '/app "%s"' % sys.argv[0], None, 1)
+		except win32api.error, details:
+			win32ui.MessageBox("Error executing command - %s" % (details), "Demos")
+
+
+from pywin.framework.app import HaveGoodGUI
+
+if __name__=='__main__':
+	import demoutils
+	demoutils.NotAScript()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/flash.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/flash.py
new file mode 100644
index 0000000..5f6c7e5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/flash.py
@@ -0,0 +1,84 @@
+# By Bradley Schatz
+# simple flash/python application demonstrating bidirectional
+# communicaion between flash and python. Click the sphere to see
+# behavior. Uses Bounce.swf from FlashBounce.zip, available from
+# http://pages.cpsc.ucalgary.ca/~saul/vb_examples/tutorial12/
+
+# Update to the path of the .swf file (note it could be a true URL)
+flash_url = "c:\\bounce.swf"
+
+import win32ui, win32con, win32api, regutil
+from pywin.mfc import window, activex
+from win32com.client import gencache
+import sys
+
+FlashModule = gencache.EnsureModule("{D27CDB6B-AE6D-11CF-96B8-444553540000}", 0, 1, 0)
+
+if FlashModule is None:
+ raise ImportError, "Flash does not appear to be installed."
+
+class MyFlashComponent(activex.Control, FlashModule.ShockwaveFlash):
+ def __init__(self):
+  activex.Control.__init__(self)
+  FlashModule.ShockwaveFlash.__init__(self)
+  self.x = 50
+  self.y = 50
+  self.angle = 30
+  self.started = 0
+
+ def OnFSCommand(self, command, args):
+  print "FSCommend" , command, args
+  self.x = self.x + 20
+  self.y = self.y + 20
+  self.angle = self.angle + 20
+  if self.x > 200 or self.y > 200:
+      self.x = 0
+      self.y = 0
+  if self.angle > 360:
+      self.angle = 0
+  self.SetVariable("xVal", self.x)
+  self.SetVariable("yVal", self.y)
+  self.SetVariable("angle", self.angle)
+  self.TPlay("_root.mikeBall")
+
+ def OnProgress(self, percentDone):
+  print "PercentDone", percentDone
+ def OnReadyStateChange(self, newState):
+  # 0=Loading, 1=Uninitialized, 2=Loaded, 3=Interactive, 4=Complete
+  print "State", newState
+
+
+class BrowserFrame(window.MDIChildWnd):
+ def __init__(self, url = None):
+  if url is None:
+   self.url = regutil.GetRegisteredHelpFile("Main Python Documentation")
+  else:
+   self.url = url
+  pass # Dont call base class doc/view version...
+ def Create(self, title, rect = None, parent = None):
+  style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW
+  self._obj_ = win32ui.CreateMDIChild()
+  self._obj_.AttachObject(self)
+  self._obj_.CreateWindow(None, title, style, rect, parent)
+  rect = self.GetClientRect()
+  rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
+  self.ocx = MyFlashComponent()
+  self.ocx.CreateControl("Flash Player", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000)
+  self.ocx.LoadMovie(0,flash_url)
+  self.ocx.Play()
+  self.HookMessage (self.OnSize, win32con.WM_SIZE)
+
+ def OnSize (self, params):
+  rect = self.GetClientRect()
+  rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
+  self.ocx.SetWindowPos(0, rect, 0)
+
+def Demo():
+ url = None
+ if len(sys.argv)>1:
+  url = win32api.GetFullPathName(sys.argv[1])
+ f = BrowserFrame(url)
+ f.Create("Flash Player")
+
+if __name__=='__main__':
+ Demo()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/msoffice.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/msoffice.py
new file mode 100644
index 0000000..8ec42ff
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/msoffice.py
@@ -0,0 +1,127 @@
+# This demo uses some of the Microsoft Office components.
+#
+# It was taken from an MSDN article showing how to embed excel.
+# It is not comlpete yet, but it _does_ show an Excel spreadsheet in a frame!
+#
+
+import win32ui, win32uiole, win32con, regutil
+from pywin.mfc import window, activex, object, docview
+from win32com.client import gencache
+
+#WordModule = gencache.EnsureModule('{00020905-0000-0000-C000-000000000046}', 1033, 8, 0)
+#if WordModule is None:
+#	raise ImportError, "Microsoft Word version 8 does not appear to be installed."
+
+
+class OleClientItem(object.CmdTarget):
+	def __init__(self, doc):
+		object.CmdTarget.__init__(self, win32uiole.CreateOleClientItem(doc))
+
+	def OnGetItemPosition(self):
+		# For now return a hard-coded rect.
+		return (10, 10, 210, 210)
+
+	def OnActivate(self):
+		# Allow only one inplace activate item per frame
+		view = self.GetActiveView()
+		item = self.GetDocument().GetInPlaceActiveItem(view)
+		if item is not None and item._obj_ != self._obj_:
+			item.Close()
+		self._obj_.OnActivate()
+		
+	def OnChange(self, oleNotification, dwParam):
+		self._obj_.OnChange(oleNotification, dwParam)
+		self.GetDocument().UpdateAllViews(None)
+		
+	def OnChangeItemPosition(self, rect):
+		# During in-place activation CEmbed_ExcelCntrItem::OnChangeItemPosition
+		#  is called by the server to change the position of the in-place
+		#  window.  Usually, this is a result of the data in the server
+		#  document changing such that the extent has changed or as a result
+		#  of in-place resizing.
+		#
+		# The default here is to call the base class, which will call
+		#  COleClientItem::SetItemRects to move the item
+		#  to the new position.
+		if not self._obj_.OnChangeItemPosition(self, rect):
+			return 0
+
+		# TODO: update any cache you may have of the item's rectangle/extent
+		return 1
+		
+class OleDocument(object.CmdTarget):
+	def __init__(self, template):
+		object.CmdTarget.__init__(self, win32uiole.CreateOleDocument(template))
+		self.EnableCompoundFile()
+
+class ExcelView(docview.ScrollView):
+	def OnInitialUpdate(self):
+		self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS)
+		self.HookMessage (self.OnSize, win32con.WM_SIZE)
+
+		self.SetScrollSizes(win32con.MM_TEXT, (100, 100))
+		rc = self._obj_.OnInitialUpdate()
+		self.EmbedExcel()
+		return rc
+		
+	def EmbedExcel(self):
+		doc = self.GetDocument()
+		self.clientItem = OleClientItem(doc)
+		self.clientItem.CreateNewItem("Excel.Sheet")
+		self.clientItem.DoVerb(-1, self)
+		doc.UpdateAllViews(None)
+		
+	def OnDraw(self, dc):
+		doc = self.GetDocument()
+		pos = doc.GetStartPosition()
+		clientItem, pos = doc.GetNextItem(pos)
+		clientItem.Draw(dc, (10, 10, 210, 210) )
+
+	# Special handling of OnSetFocus and OnSize are required for a container
+	#  when an object is being edited in-place.
+	def OnSetFocus(self, msg):
+		item = self.GetDocument().GetInPlaceActiveItem(self)
+		if item is not None and item.GetItemState()==win32uiole.COleClientItem_activeUIState:
+			wnd = item.GetInPlaceWindow()
+			if wnd is not None:
+				wnd.SetFocus()
+			return 0 # Dont get the base version called.
+		return 1 # Call the base version.
+	
+	def OnSize (self, params):
+		item = self.GetDocument().GetInPlaceActiveItem(self)
+		if item is not None:
+			item.SetItemRects()
+		return 1 # do call the base!
+		 	
+class OleTemplate(docview.DocTemplate):
+	def __init__(self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None):
+		if MakeDocument is None: MakeDocument = OleDocument
+		if MakeView is None: MakeView = ExcelView
+		docview.DocTemplate.__init__(self, resourceId, MakeDocument, MakeFrame, MakeView)
+
+class WordFrame(window.MDIChildWnd):
+	def __init__(self, doc = None):
+		self._obj_ = win32ui.CreateMDIChild()
+		self._obj_.AttachObject(self)
+		# Dont call base class doc/view version...
+	def Create(self, title, rect = None, parent = None):
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW
+		self._obj_.CreateWindow(None, title, style, rect, parent)
+		
+		rect = self.GetClientRect()
+		rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
+		self.ocx = MyWordControl()
+		self.ocx.CreateControl("Microsoft Word", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 20000)
+
+def Demo():
+	import sys, win32api
+	docName = None
+	if len(sys.argv)>1:
+		docName = win32api.GetFullPathName(sys.argv[1])
+	OleTemplate().OpenDocumentFile(None)
+#	f = WordFrame(docName)
+#	f.Create("Microsoft Office")
+
+if __name__=='__main__':
+	Demo()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxserialtest.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxserialtest.py
new file mode 100644
index 0000000..47f4cb7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxserialtest.py
@@ -0,0 +1,101 @@
+# ocxserialtest.py
+#
+# Sample that uses the mscomm OCX to talk to a serial
+# device.
+
+# Very simple -  queries a modem for ATI responses
+
+import win32ui, win32uiole
+import win32con
+from pywin.mfc import dialog, activex
+from win32com.client import gencache
+import pythoncom
+
+SERIAL_SETTINGS = '19200,n,8,1'
+SERIAL_PORT = 2
+
+win32ui.DoWaitCursor(1)
+serialModule = gencache.EnsureModule("{648A5603-2C6E-101B-82B6-000000000014}", 0, 1, 1)
+win32ui.DoWaitCursor(0)
+if serialModule is None:
+	raise ImportError, "MS COMM Control does not appear to be installed on the PC"
+
+
+def MakeDlgTemplate():
+    style = win32con.DS_MODALFRAME | win32con.WS_POPUP \
+            | win32con.WS_VISIBLE | win32con.WS_CAPTION \
+            | win32con.WS_SYSMENU | win32con.DS_SETFONT
+    cs = win32con.WS_CHILD | win32con.WS_VISIBLE
+    dlg = [ ["Very Basic Terminal",
+             (0, 0, 350, 180), style, None, (8, "MS Sans Serif")], ]
+    s = win32con.WS_TABSTOP | cs
+    dlg.append(["RICHEDIT", None, 132, (5, 5, 340, 170),s | win32con.ES_WANTRETURN | win32con.ES_MULTILINE | win32con.ES_AUTOVSCROLL | win32con.WS_VSCROLL])
+    return dlg
+
+
+####################################
+#
+# Serial Control
+#
+class MySerialControl(activex.Control, serialModule.MSComm):
+	def __init__(self, parent):
+		activex.Control.__init__(self)
+		serialModule.MSComm.__init__(self)
+		self.parent = parent
+	def OnComm(self):
+		self.parent.OnComm()
+
+class TestSerDialog(dialog.Dialog):
+	def __init__(self, *args):
+		apply( dialog.Dialog.__init__, (self,)+args )
+		self.olectl = None
+	def OnComm(self):
+		event = self.olectl.CommEvent
+		if event == serialModule.OnCommConstants.comEvReceive:
+			self.editwindow.ReplaceSel(self.olectl.Input)
+
+	def OnKey(self, key):
+		if self.olectl:
+			self.olectl.Output = chr(key)
+
+	def OnInitDialog(self):
+		rc = dialog.Dialog.OnInitDialog(self)
+		self.editwindow = self.GetDlgItem(132)
+		self.editwindow.HookAllKeyStrokes(self.OnKey)
+
+		self.olectl = MySerialControl(self)
+		try:
+			self.olectl.CreateControl("OCX",
+			                          win32con.WS_TABSTOP | win32con.WS_VISIBLE,
+			                          (7,43,500,300), self._obj_, 131)
+		except win32ui.error:
+			self.MessageBox("The Serial Control could not be created")
+			self.olectl = None
+			self.EndDialog(win32con.IDCANCEL)
+		if self.olectl:			                        
+			self.olectl.Settings = SERIAL_SETTINGS
+			self.olectl.CommPort = SERIAL_PORT
+			self.olectl.RThreshold = 1
+			try:
+				self.olectl.PortOpen = 1
+			except pythoncom.com_error, details:
+				print "Could not open the specified serial port - %s" % (details[2][2])
+				self.EndDialog(win32con.IDCANCEL)
+		return rc
+
+	def OnDestroy(self, msg):
+		if self.olectl:
+			try:
+				self.olectl.PortOpen = 0
+			except pythoncom.com_error, details:
+				print "Error closing port - %s" % (details[2][2])
+		return dialog.Dialog.OnDestroy(self, msg)
+
+def test():
+    d = TestSerDialog(MakeDlgTemplate() )
+    d.DoModal()
+
+if __name__ == "__main__":
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxtest.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxtest.py
new file mode 100644
index 0000000..b1fa4c4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/ocxtest.py
@@ -0,0 +1,186 @@
+# OCX Tester for Pythonwin
+#
+# This file _is_ ready to run.  All that is required is that the OCXs being tested
+# are installed on your machine.
+#
+# The .py files behind the OCXs will be automatically generated and imported.
+
+from pywin.mfc import dialog, window, activex
+import win32ui, win32uiole
+import win32con
+import os, sys, win32api, glob
+from win32com.client import gencache
+
+
+def MakeDlgTemplate():
+	style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
+	cs = win32con.WS_CHILD | win32con.WS_VISIBLE
+	dlg = [ ["OCX Demos", (0, 0, 350, 350), style, None, (8, "MS Sans Serif")], ]
+	s = win32con.WS_TABSTOP | cs
+#	dlg.append([131, None, 130, (5, 40, 110, 48),
+#		s | win32con.LBS_NOTIFY | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL | win32con.WS_BORDER])
+#	dlg.append(["{8E27C92B-1264-101C-8A2F-040224009C02}", None, 131, (5, 40, 110, 48),win32con.WS_TABSTOP])
+
+	dlg.append([128, "About", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
+	s = win32con.BS_PUSHBUTTON | s
+	dlg.append([128, "Close", win32con.IDCANCEL, (124, 22, 50, 14), s])
+	
+	return dlg
+
+####################################
+#
+# Calendar test code
+#
+
+def GetTestCalendarClass():
+	global calendarParentModule
+	win32ui.DoWaitCursor(1)
+	calendarParentModule = gencache.EnsureModule("{8E27C92E-1264-101C-8A2F-040224009C02}", 0, 7, 0)
+	win32ui.DoWaitCursor(0)
+	if calendarParentModule is None:
+		return None
+
+	class TestCalDialog(dialog.Dialog):
+		def OnInitDialog(self):
+		
+			class MyCal(activex.Control, calendarParentModule.Calendar):
+				def OnAfterUpdate(self):
+					print "OnAfterUpdate"
+				def OnClick(self):
+					print "OnClick"
+				def OnDblClick(self):
+					print "OnDblClick"
+				def OnKeyDown(self, KeyCode, Shift):
+					print "OnKeyDown", KeyCode, Shift
+				def OnKeyPress(self, KeyAscii):
+					print "OnKeyPress", KeyAscii
+				def OnKeyUp(self, KeyCode, Shift):
+					print "OnKeyUp", KeyCode, Shift
+				def OnBeforeUpdate(self, Cancel):
+					print "OnBeforeUpdate", Cancel
+				def OnNewMonth(self):
+					print "OnNewMonth"
+				def OnNewYear(self):
+					print "OnNewYear"
+
+			rc = dialog.Dialog.OnInitDialog(self)
+			self.olectl = MyCal()
+			try:
+				self.olectl.CreateControl("OCX", win32con.WS_TABSTOP | win32con.WS_VISIBLE, (7,43,500,300), self._obj_, 131)
+			except win32ui.error:
+				self.MessageBox("The Calendar Control could not be created")
+				self.olectl = None
+				self.EndDialog(win32con.IDCANCEL)
+
+			return rc
+		def OnOK(self):
+			self.olectl.AboutBox()
+			
+	return TestCalDialog
+		
+
+####################################
+#
+# Video Control
+#
+def GetTestVideoModule():
+	global videoControlModule, videoControlFileName
+	win32ui.DoWaitCursor(1)
+	videoControlModule = gencache.EnsureModule("{05589FA0-C356-11CE-BF01-00AA0055595A}", 0, 2, 0)
+	win32ui.DoWaitCursor(0)
+	if videoControlModule is None:
+		return None
+	fnames = glob.glob(os.path.join(win32api.GetWindowsDirectory(), "*.avi"))
+	if not fnames:
+		print "No AVI files available in system directory"
+		return None
+	videoControlFileName = fnames[0]
+	return videoControlModule
+
+def GetTestVideoDialogClass():
+	if GetTestVideoModule() is None:
+		return None
+	class TestVideoDialog(dialog.Dialog):
+		def OnInitDialog(self):
+			rc = dialog.Dialog.OnInitDialog(self)
+			try:
+				self.olectl = activex.MakeControlInstance(videoControlModule.ActiveMovie)
+				self.olectl.CreateControl("", win32con.WS_TABSTOP | win32con.WS_VISIBLE, (7,43,500,300), self._obj_, 131)
+			except win32ui.error:
+				self.MessageBox("The Video Control could not be created")
+				self.olectl = None
+				self.EndDialog(win32con.IDCANCEL)
+				return
+
+			self.olectl.FileName = videoControlFileName
+#			self.olectl.Run()
+			return rc
+		def OnOK(self):
+			self.olectl.AboutBox()
+	return TestVideoDialog
+
+###############
+#
+# An OCX in an MDI Frame
+#
+class OCXFrame(window.MDIChildWnd):
+	def __init__(self):
+		pass # Dont call base class doc/view version...
+	def Create(self, controlClass, title, rect = None, parent = None):
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW
+		self._obj_ = win32ui.CreateMDIChild()
+		self._obj_.AttachObject(self)
+		self._obj_.CreateWindow(None, title, style, rect, parent)
+		
+		rect = self.GetClientRect()
+		rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
+		self.ocx = controlClass()
+		self.ocx.CreateControl("", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000)		
+
+def MDITest():
+	calendarParentModule = gencache.EnsureModule("{8E27C92E-1264-101C-8A2F-040224009C02}", 0, 7, 0)
+	class MyCal(activex.Control, calendarParentModule.Calendar):
+		def OnAfterUpdate(self):
+			print "OnAfterUpdate"
+		def OnClick(self):
+			print "OnClick"
+	
+	f = OCXFrame()
+	f.Create(MyCal, "Calendar Test")
+
+
+def test1():
+	klass = GetTestCalendarClass()
+	if klass is None:
+		print "Can not test the MSAccess Calendar control - it does not appear to be installed"
+		return
+
+	d = klass(MakeDlgTemplate() )
+	d.DoModal()
+
+def test2():
+	klass = GetTestVideoDialogClass()
+	if klass is None:
+		print "Can not test the Video OCX - it does not appear to be installed,"
+		print "or no AVI files can be found."
+		return
+	d = klass(MakeDlgTemplate() )
+	d.DoModal()
+	d = None
+
+def test3():
+	d = TestCOMMDialog(MakeDlgTemplate() )
+	d.DoModal()
+	d = None
+
+def testall():
+	test1()
+	test2()
+
+def demo():
+	testall()
+
+if __name__=='__main__':
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		testall()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/webbrowser.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/webbrowser.py
new file mode 100644
index 0000000..83af271
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/ocx/webbrowser.py
@@ -0,0 +1,54 @@
+# This demo uses the IE4 Web Browser control.
+
+# It catches an "OnNavigate" event, and updates the frame title.
+# (event stuff by Neil Hodgson)
+
+import win32ui, win32con, win32api, regutil
+from pywin.mfc import window, activex
+from win32com.client import gencache
+import sys
+
+WebBrowserModule = gencache.EnsureModule("{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}", 0, 1, 1)
+if WebBrowserModule is None:
+	raise ImportError, "IE4 does not appear to be installed."
+
+class MyWebBrowser(activex.Control, WebBrowserModule.WebBrowser):
+	def OnBeforeNavigate2(self, pDisp, URL, Flags, TargetFrameName, PostData, Headers, Cancel):
+		self.GetParent().OnNavigate(URL)
+		#print "BeforeNavigate2", pDisp, URL, Flags, TargetFrameName, PostData, Headers, Cancel
+
+class BrowserFrame(window.MDIChildWnd):
+	def __init__(self, url = None):
+		if url is None:
+			self.url = regutil.GetRegisteredHelpFile("Main Python Documentation")
+		else:
+			self.url = url
+		pass # Dont call base class doc/view version...
+	def Create(self, title, rect = None, parent = None):
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW
+		self._obj_ = win32ui.CreateMDIChild()
+		self._obj_.AttachObject(self)
+		self._obj_.CreateWindow(None, title, style, rect, parent)
+		rect = self.GetClientRect()
+		rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
+		self.ocx = MyWebBrowser()
+		self.ocx.CreateControl("Web Browser", win32con.WS_VISIBLE | win32con.WS_CHILD, rect, self, 1000)		
+		self.ocx.Navigate(self.url)
+		self.HookMessage (self.OnSize, win32con.WM_SIZE)
+	def OnSize (self, params):
+		rect = self.GetClientRect()
+		rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
+		self.ocx.SetWindowPos(0, rect, 0)
+	def OnNavigate(self, url):
+		title = "Web Browser - %s" % (url,)
+		self.SetWindowText(title)
+
+def Demo():
+	url = None
+	if len(sys.argv)>1:
+		url = win32api.GetFullPathName(sys.argv[1])
+	f = BrowserFrame(url)
+	f.Create("Web Browser")
+
+if __name__=='__main__':
+	Demo()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/openGLDemo.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/openGLDemo.py
new file mode 100644
index 0000000..5322df10
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/openGLDemo.py
@@ -0,0 +1,357 @@
+# Ported from the win32 and MFC OpenGL Samples.
+
+from pywin.mfc import docview
+import sys
+try:
+	from OpenGL.GL import *
+	from OpenGL.GLU import *
+except ImportError:
+	print "The OpenGL extensions do not appear to be installed."
+	print "This Pythonwin demo can not run"
+	sys.exit(1)
+
+import win32con
+import win32ui
+import win32api
+import timer
+
+PFD_TYPE_RGBA =       0
+PFD_TYPE_COLORINDEX = 1
+PFD_MAIN_PLANE =      0
+PFD_OVERLAY_PLANE =   1
+PFD_UNDERLAY_PLANE =  (-1)
+PFD_DOUBLEBUFFER =           0x00000001
+PFD_STEREO =                 0x00000002
+PFD_DRAW_TO_WINDOW =         0x00000004
+PFD_DRAW_TO_BITMAP =         0x00000008
+PFD_SUPPORT_GDI =            0x00000010
+PFD_SUPPORT_OPENGL =         0x00000020
+PFD_GENERIC_FORMAT =         0x00000040
+PFD_NEED_PALETTE =           0x00000080
+PFD_NEED_SYSTEM_PALETTE =    0x00000100
+PFD_SWAP_EXCHANGE =          0x00000200
+PFD_SWAP_COPY =              0x00000400
+PFD_SWAP_LAYER_BUFFERS =     0x00000800
+PFD_GENERIC_ACCELERATED =    0x00001000
+PFD_DEPTH_DONTCARE =         0x20000000
+PFD_DOUBLEBUFFER_DONTCARE =  0x40000000
+PFD_STEREO_DONTCARE =        0x80000000
+
+
+threeto8 = [0, 0111>>1, 0222>>1, 0333>>1, 0444>>1, 0555>>1, 0666>>1, 0377]
+twoto8 = [0, 0x55, 0xaa, 0xff]
+oneto8 = [0, 255]
+
+def ComponentFromIndex(i, nbits, shift):
+	# val = (unsigned char) (i >> shift);
+	val = (i >> shift) & 0xF;
+	if nbits==1:
+		val = val & 0x1
+		return oneto8[val]
+	elif nbits==2:
+		val = val & 0x3
+		return twoto8[val]
+	elif nbits==3:
+		val = val & 0x7
+		return threeto8[val]
+	else:
+		return 0;
+
+OpenGLViewParent=docview.ScrollView
+class OpenGLView(OpenGLViewParent):
+	def PreCreateWindow(self, cc):
+		self.HookMessage (self.OnSize, win32con.WM_SIZE)
+		# An OpenGL window must be created with the following flags and must not
+		# include CS_PARENTDC for the class style. Refer to SetPixelFormat
+		# documentation in the "Comments" section for further information.
+		style = cc[5]
+		style = style | win32con.WS_CLIPSIBLINGS | win32con.WS_CLIPCHILDREN
+		cc = cc[0], cc[1], cc[2], cc[3], cc[4], style, cc[6], cc[7], cc[8]
+		cc = self._obj_.PreCreateWindow(cc)
+		return cc
+
+	def OnSize (self, params):
+		lParam = params[3]
+		cx = win32api.LOWORD(lParam)
+		cy = win32api.HIWORD(lParam)
+		glViewport(0, 0, cx, cy)
+
+		if self.oldrect[2] > cx or self.oldrect[3] > cy:
+			self.RedrawWindow()
+			
+		self.OnSizeChange(cx, cy)
+
+		self.oldrect = self.oldrect[0], self.oldrect[1], cx, cy
+		
+	def OnInitialUpdate(self):
+		self.SetScaleToFitSize((100,100)) # or SetScrollSizes() - A Pythonwin requirement
+		return self._obj_.OnInitialUpdate()
+#		return rc
+
+	def OnCreate(self, cs):
+		self.oldrect = self.GetClientRect()
+		self._InitContexts()
+		self.Init()
+			
+	def OnDestroy(self, msg):
+		self.Term()
+		self._DestroyContexts()
+		return OpenGLViewParent.OnDestroy(self, msg)
+
+
+	def OnDraw(self, dc):
+		self.DrawScene()
+	
+	def OnEraseBkgnd(self, dc):
+		return 1
+
+	# The OpenGL helpers
+	def _SetupPixelFormat(self):
+		dc = self.dc.GetSafeHdc()
+		pfd = CreatePIXELFORMATDESCRIPTOR()
+		pfd.dwFlags = PFD_DRAW_TO_WINDOW | PFD_SUPPORT_OPENGL | PFD_DOUBLEBUFFER
+		pfd.iPixelType = PFD_TYPE_RGBA
+		pfd.cColorBits = 24
+		pfd.cDepthBits = 32
+		pfd.iLayerType = PFD_MAIN_PLANE
+		pixelformat = ChoosePixelFormat(dc, pfd)
+		SetPixelFormat(dc, pixelformat, pfd)
+		self._CreateRGBPalette()
+		
+	def _CreateRGBPalette(self):
+		dc = self.dc.GetSafeHdc()
+		n = GetPixelFormat(dc)
+		pfd = DescribePixelFormat(dc, n)
+		if pfd.dwFlags & PFD_NEED_PALETTE:
+			n = 1 << pfd.cColorBits
+			pal = []
+			for i in range(n):
+				this = ComponentFromIndex(i, pfd.cRedBits, pfd.cRedShift), \
+				      ComponentFromIndex(i, pfd.cGreenBits, pfd.cGreenShift), \
+				      ComponentFromIndex(i, pfd.cBlueBits, pfd.cBlueShift), \
+				      0
+				pal.append(this)
+			hpal = win32ui.CreatePalette(pal)
+			self.dc.SelectPalette(hpal, 0)
+			self.dc.RealizePalette()
+			
+	def _InitContexts(self):
+		self.dc = self.GetDC()
+		self._SetupPixelFormat()
+		hrc = wglCreateContext(self.dc.GetSafeHdc())
+		wglMakeCurrent(self.dc.GetSafeHdc(), hrc)
+
+	def _DestroyContexts(self):
+		hrc = wglGetCurrentContext()
+		wglMakeCurrent(0, 0)
+		if hrc: wglDeleteContext(hrc)
+
+	# The methods to support OpenGL
+	def DrawScene(self):
+		assert 0, "You must override this method"
+		
+	def Init(self):
+		assert 0, "You must override this method"
+		
+	def OnSizeChange(self, cx, cy):
+		pass
+
+	def Term(self):
+		pass
+
+
+class TestView(OpenGLView):
+
+	def OnSizeChange(self, right, bottom):
+		glClearColor( 0.0, 0.0, 0.0, 1.0 );
+		glClearDepth( 1.0 );
+		glEnable(GL_DEPTH_TEST)
+
+		glMatrixMode( GL_PROJECTION )
+		if bottom:
+			aspect = right / bottom
+		else:
+			aspect = 0 # When window created!
+		glLoadIdentity()
+		gluPerspective( 45.0, aspect, 3.0, 7.0 )
+		glMatrixMode( GL_MODELVIEW )
+		
+		near_plane = 3.0;
+		far_plane = 7.0;
+		maxObjectSize = 3.0;
+		self.radius = near_plane + maxObjectSize/2.0;
+
+
+	def Init(self):
+		pass
+
+	def DrawScene(self):
+		glClearColor(0.0, 0.0, 0.0, 1.0)
+		glClear( GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT )
+
+		glPushMatrix()
+		glTranslatef(0.0, 0.0, -self.radius);
+
+		self._DrawCone()
+
+		self._DrawPyramid()
+
+		glPopMatrix()
+		glFinish()
+
+		SwapBuffers( wglGetCurrentDC() )
+
+	def _DrawCone(self):
+		glColor3f(0.0, 1.0, 0.0)
+
+		glPushMatrix()
+		glTranslatef(-1.0, 0.0, 0.0);
+		quadObj = gluNewQuadric();
+		gluQuadricDrawStyle(quadObj, GLU_FILL);
+		gluQuadricNormals(quadObj, GLU_SMOOTH);
+		gluCylinder(quadObj, 1.0, 0.0, 1.0, 20, 10);
+#		gluDeleteQuadric(quadObj);
+		glPopMatrix();
+
+	def _DrawPyramid(self):
+		glPushMatrix()
+		glTranslatef(1.0, 0.0, 0.0)
+		glBegin(GL_TRIANGLE_FAN)
+		glColor3f(1.0, 0.0, 0.0)
+		glVertex3f(0.0, 1.0, 0.0)
+		glColor3f(0.0, 1.0, 0.0)
+		glVertex3f(-1.0, 0.0, 0.0)
+		glColor3f(0.0, 0.0, 1.0)
+		glVertex3f(0.0, 0.0, 1.0)
+		glColor3f(0.0, 1.0, 0.0)
+		glVertex3f(1.0, 0.0, 0.0)
+		glEnd()
+		glPopMatrix()
+
+class CubeView(OpenGLView):
+	def OnSizeChange(self, right, bottom):
+		glClearColor( 0.0, 0.0, 0.0, 1.0 );
+		glClearDepth( 1.0 );
+		glEnable(GL_DEPTH_TEST)
+
+		glMatrixMode( GL_PROJECTION )
+		if bottom:
+			aspect = right / bottom
+		else:
+			aspect = 0 # When window created!
+		glLoadIdentity()
+		gluPerspective( 45.0, aspect, 3.0, 7.0 )
+		glMatrixMode( GL_MODELVIEW )
+		
+		near_plane = 3.0;
+		far_plane = 7.0;
+		maxObjectSize = 3.0;
+		self.radius = near_plane + maxObjectSize/2.0;
+
+	def Init(self):
+		self.busy = 0
+		self.wAngleY = 10.0
+		self.wAngleX = 1.0
+		self.wAngleZ = 5.0
+		self.timerid = timer.set_timer (150, self.OnTimer)
+		
+	def OnTimer(self, id, timeVal):
+		self.DrawScene()
+	
+	def Term(self):
+		timer.kill_timer(self.timerid)
+
+	def DrawScene(self):
+		if self.busy: return
+		self.busy = 1
+
+		glClearColor(0.0, 0.0, 0.0, 1.0);
+		glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
+		
+		glPushMatrix();
+		
+		glTranslatef(0.0, 0.0, -self.radius);
+		glRotatef(self.wAngleX, 1.0, 0.0, 0.0);
+		glRotatef(self.wAngleY, 0.0, 1.0, 0.0);
+		glRotatef(self.wAngleZ, 0.0, 0.0, 1.0);
+		
+		self.wAngleX = self.wAngleX + 1.0
+		self.wAngleY = self.wAngleY + 10.0
+		self.wAngleZ = self.wAngleZ + 5.0;
+		
+		
+		glBegin(GL_QUAD_STRIP);
+		glColor3f(1.0, 0.0, 1.0);
+		glVertex3f(-0.5, 0.5, 0.5);
+		
+		glColor3f(1.0, 0.0, 0.0);
+		glVertex3f(-0.5, -0.5, 0.5);
+		
+		glColor3f(1.0, 1.0, 1.0);
+		glVertex3f(0.5, 0.5, 0.5);
+		
+		glColor3f(1.0, 1.0, 0.0);
+		glVertex3f(0.5, -0.5, 0.5);
+		
+		glColor3f(0.0, 1.0, 1.0);
+		glVertex3f(0.5, 0.5, -0.5);
+		
+		glColor3f(0.0, 1.0, 0.0);
+		glVertex3f(0.5, -0.5, -0.5);
+		
+		glColor3f(0.0, 0.0, 1.0);
+		glVertex3f(-0.5, 0.5, -0.5);
+		
+		glColor3f(0.0, 0.0, 0.0);
+		glVertex3f(-0.5, -0.5,  -0.5);
+		
+		glColor3f(1.0, 0.0, 1.0);
+		glVertex3f(-0.5, 0.5, 0.5);
+		
+		glColor3f(1.0, 0.0, 0.0);
+		glVertex3f(-0.5, -0.5, 0.5);
+		
+		glEnd();
+		
+		glBegin(GL_QUADS);
+		glColor3f(1.0, 0.0, 1.0);
+		glVertex3f(-0.5, 0.5, 0.5);
+		
+		glColor3f(1.0, 1.0, 1.0);
+		glVertex3f(0.5, 0.5, 0.5);
+		
+		glColor3f(0.0, 1.0, 1.0);
+		glVertex3f(0.5, 0.5, -0.5);
+		
+		glColor3f(0.0, 0.0, 1.0);
+		glVertex3f(-0.5, 0.5, -0.5);
+		glEnd();
+		
+		glBegin(GL_QUADS);
+		glColor3f(1.0, 0.0, 0.0);
+		glVertex3f(-0.5, -0.5, 0.5);
+		
+		glColor3f(1.0, 1.0, 0.0);
+		glVertex3f(0.5, -0.5, 0.5);
+		
+		glColor3f(0.0, 1.0, 0.0);
+		glVertex3f(0.5, -0.5, -0.5);
+		
+		glColor3f(0.0, 0.0, 0.0);
+		glVertex3f(-0.5, -0.5,  -0.5);
+		glEnd();
+		
+		glPopMatrix();
+		
+		glFinish();
+		SwapBuffers(wglGetCurrentDC());
+		
+		self.busy = 0
+
+def test():
+	template = docview.DocTemplate(None, None, None, CubeView )
+#	template = docview.DocTemplate(None, None, None, TestView )
+	template.OpenDocumentFile(None)
+	
+if __name__=='__main__':
+	test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/progressbar.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/progressbar.py
new file mode 100644
index 0000000..2a38b0e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/progressbar.py
@@ -0,0 +1,91 @@
+#
+# Progress bar control example
+#
+#	PyCProgressCtrl encapsulates the MFC CProgressCtrl class.  To use it,
+#	you:
+#
+#	- Create the control with win32ui.CreateProgressCtrl()
+#	- Create the control window with PyCProgressCtrl.CreateWindow()
+#	- Initialize the range if you want it to be other than (0, 100) using
+#	  PyCProgressCtrl.SetRange()
+#	- Either:
+#	  - Set the step size with PyCProgressCtrl.SetStep(), and
+#	  - Increment using PyCProgressCtrl.StepIt()
+#	  or:
+#	  - Set the amount completed using PyCProgressCtrl.SetPos()
+#
+# Example and progress bar code courtesy of KDL Technologies, Ltd., Hong Kong SAR, China.
+#
+
+from pywin.mfc import dialog
+import win32ui
+import win32con
+
+def MakeDlgTemplate():
+	style = (win32con.DS_MODALFRAME |
+		win32con.WS_POPUP |
+		win32con.WS_VISIBLE |
+		win32con.WS_CAPTION |
+		win32con.WS_SYSMENU |
+		win32con.DS_SETFONT)
+	cs = (win32con.WS_CHILD |
+		win32con.WS_VISIBLE)
+
+	w = 215
+	h = 36
+
+	dlg = [["Progress bar control example",
+		(0, 0, w, h),
+		style,
+		None,
+		(8, "MS Sans Serif")],
+		]
+
+	s = win32con.WS_TABSTOP | cs
+
+	dlg.append([128,
+		"Tick",
+		win32con.IDOK,
+		(10, h - 18, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
+
+	dlg.append([128,
+		"Cancel",
+		win32con.IDCANCEL,
+		(w - 60, h - 18, 50, 14), s | win32con.BS_PUSHBUTTON])
+
+	return dlg
+
+class TestDialog(dialog.Dialog):
+	def OnInitDialog(self):
+		rc = dialog.Dialog.OnInitDialog(self)
+		self.pbar = win32ui.CreateProgressCtrl()
+		self.pbar.CreateWindow (win32con.WS_CHILD |
+						win32con.WS_VISIBLE,
+						(10, 10, 310, 24),
+						self, 1001)
+		# self.pbar.SetStep (5)
+		self.progress = 0
+		self.pincr = 5
+		return rc
+
+	def OnOK(self):
+		# NB: StepIt wraps at the end if you increment past the upper limit!
+		# self.pbar.StepIt()
+		self.progress = self.progress + self.pincr
+		if self.progress > 100:
+	    		self.progress = 100
+		if self.progress <= 100:
+			self.pbar.SetPos(self.progress)
+
+def demo(modal = 0):
+	d = TestDialog (MakeDlgTemplate())
+	if modal:
+		d.DoModal()
+	else:
+		d.CreateWindow ()
+
+if __name__=='__main__':
+	demo(1)
+
+# $Header: /cvsroot/pywin32/pywin32/Pythonwin/pywin/Demos/progressbar.py,v 1.1 1999/09/01 23:33:35 mhammond Exp $
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/sliderdemo.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/sliderdemo.py
new file mode 100644
index 0000000..b077fd0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/sliderdemo.py
@@ -0,0 +1,49 @@
+# sliderdemo.py
+# Demo of the slider control courtesy of Mike Fletcher.
+
+import win32con, win32ui
+from pywin.mfc import dialog
+
+class MyDialog(dialog.Dialog):
+	'''
+	Example using simple controls
+	'''
+	_dialogstyle = (win32con.WS_MINIMIZEBOX | win32con.WS_DLGFRAME |
+                    win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE |
+                    win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT )
+	_buttonstyle = (win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP |
+                    win32con.WS_CHILD | win32con.WS_VISIBLE)
+	### The static template, contains all "normal" dialog items
+	DIALOGTEMPLATE = [
+		# the dialog itself is the first element in the template
+		["Example slider", (0, 0, 50, 43), _dialogstyle, None, (8, "MS SansSerif")],
+		# rest of elements are the controls within the dialog
+		# standard "Close" button
+		[128, "Close", win32con.IDCANCEL, (0, 30, 50, 13), _buttonstyle],	]
+	### ID of the control to be created during dialog initialisation
+	IDC_SLIDER = 9500
+	def __init__(self ):
+		dialog.Dialog.__init__(self, self.DIALOGTEMPLATE)
+	def OnInitDialog(self):
+		rc = dialog.Dialog.OnInitDialog(self)
+		# now initialise your controls that you want to create
+		# programmatically, including those which are OLE controls
+		# those created directly by win32ui.Create*
+		# and your "custom controls" which are subclasses/whatever
+		win32ui.EnableControlContainer()
+		self.slider = win32ui.CreateSliderCtrl( )
+		self.slider.CreateWindow( win32con.WS_TABSTOP | win32con.WS_VISIBLE,
+                                  (0,0,100,30),
+                                  self._obj_,
+                                  self.IDC_SLIDER)
+		return rc
+	def OnCancel(self):
+		print "The slider control is at position", self.slider.GetPos()
+		self._obj_.OnCancel()
+###
+def demo():
+	dia = MyDialog()
+	dia.DoModal()
+
+if __name__ == "__main__":
+	demo()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/splittst.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/splittst.py
new file mode 100644
index 0000000..2e72168
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/splittst.py
@@ -0,0 +1,72 @@
+import win32ui
+import win32con
+import fontdemo
+from pywin.mfc import window, docview
+import commctrl
+
+# derive from CMDIChild.  This does much work for us.
+		
+class SplitterFrame(window.MDIChildWnd):
+	def __init__(self):
+		# call base CreateFrame
+		self.images = None
+		window.MDIChildWnd.__init__(self)
+
+	def OnCreateClient(self, cp, context):
+		splitter = win32ui.CreateSplitter()
+		doc = context.doc
+		frame_rect = self.GetWindowRect()
+		size = ((frame_rect[2] - frame_rect[0]),
+		        (frame_rect[3] - frame_rect[1])/2)
+		sub_size = (size[0]/2, size[1])
+		splitter.CreateStatic (self, 2, 1)
+		self.v1 = win32ui.CreateEditView(doc)
+		self.v2 = fontdemo.FontView(doc)
+		# CListControl view
+		self.v3 = win32ui.CreateListView(doc)
+		sub_splitter = win32ui.CreateSplitter()
+		# pass "splitter" so each view knows how to get to the others
+		sub_splitter.CreateStatic (splitter, 1, 2)
+		sub_splitter.CreateView(self.v1, 0, 0, (sub_size))
+		sub_splitter.CreateView(self.v2, 0, 1, (0,0)) # size ignored.
+		splitter.SetRowInfo(0, size[1] ,0)
+		splitter.CreateView (self.v3, 1, 0, (0,0)) # size ignored.
+		# Setup items in the imagelist
+		self.images = win32ui.CreateImageList(32,32,1,5,5)
+		self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_MAINFRAME))
+		self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_PYTHONCONTYPE))
+		self.images.Add(win32ui.GetApp().LoadIcon(win32ui.IDR_TEXTTYPE))
+		self.v3.SetImageList(self.images, commctrl.LVSIL_NORMAL)
+		self.v3.InsertItem(0, "Icon 1", 0)
+		self.v3.InsertItem(0, "Icon 2", 1)
+		self.v3.InsertItem(0, "Icon 3", 2)
+#		self.v3.Arrange(commctrl.LVA_DEFAULT) Hmmm - win95 aligns left always???
+		return 1
+	def OnDestroy(self, msg):
+		window.MDIChildWnd.OnDestroy(self, msg)
+		if self.images:
+			self.images.DeleteImageList()
+			self.images = None
+
+	def InitialUpdateFrame(self, doc, makeVisible):
+		self.v1.ReplaceSel("Hello from Edit Window 1")
+		self.v1.SetModifiedFlag(0)
+
+class SampleTemplate(docview.DocTemplate):
+	def __init__(self):
+		docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, None, SplitterFrame, None)
+	def InitialUpdateFrame(self, frame, doc, makeVisible):
+#		print "frame is ", frame, frame._obj_
+#		print "doc is ", doc, doc._obj_
+		self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler.
+		frame.InitialUpdateFrame(doc, makeVisible)
+		
+def demo():
+	template = SampleTemplate()
+	doc=template.OpenDocumentFile(None)
+	doc.SetTitle("Splitter Demo")
+	
+if __name__=='__main__':
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		demo()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/threadedgui.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/threadedgui.py
new file mode 100644
index 0000000..14c5713
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/threadedgui.py
@@ -0,0 +1,171 @@
+# Demo of using just windows, without documents and views.
+
+# Also demo of a GUI thread, pretty much direct from the MFC C++ sample MTMDI.
+
+import win32ui
+import win32con
+import win32api
+import timer
+
+from pywin.mfc import window, docview, thread
+
+
+WM_USER_PREPARE_TO_CLOSE = win32con.WM_USER + 32
+
+# font is a dictionary in which the following elements matter:
+# (the best matching font to supplied parameters is returned)
+#   name		string name of the font as known by Windows
+#   size		point size of font in logical units
+#   weight		weight of font (win32con.FW_NORMAL, win32con.FW_BOLD)
+#   italic		boolean; true if set to anything but None
+#   underline	boolean; true if set to anything but None
+
+# This window is a child window of a frame.  It is not the frame window itself.
+class FontWindow(window.Wnd):
+	def __init__(self,  text = 'Python Rules!'):
+		window.Wnd.__init__(self)
+		self.text = text
+		self.index = 0
+		self.incr = 1
+		self.width = self.height = 0
+		self.ChangeAttributes()
+		# set up message handlers
+		
+	def Create(self, title, style, rect, parent):
+		classStyle = win32con.CS_HREDRAW | win32con.CS_VREDRAW
+		className = win32ui.RegisterWndClass(classStyle, 0, win32con.COLOR_WINDOW+1, 0)
+		self._obj_ = win32ui.CreateWnd()
+		self._obj_.AttachObject(self)
+		self._obj_.CreateWindow(className, title, style, rect, parent, win32ui.AFX_IDW_PANE_FIRST)
+		self.HookMessage (self.OnSize, win32con.WM_SIZE)
+		self.HookMessage (self.OnPrepareToClose, WM_USER_PREPARE_TO_CLOSE)
+		self.HookMessage (self.OnDestroy, win32con.WM_DESTROY)
+		self.timerid = timer.set_timer (100, self.OnTimer)
+		self.InvalidateRect()
+
+	def OnDestroy (self, msg):
+		timer.kill_timer(self.timerid)
+
+	def OnTimer(self, id, timeVal):
+		self.index = self.index + self.incr
+		if self.index > len(self.text):
+			self.incr = -1
+			self.index = len(self.text)
+		elif self.index < 0:
+			self.incr = 1
+			self.index = 0
+		self.InvalidateRect()
+
+	def OnPaint (self):
+#		print "Paint message from thread", win32api.GetCurrentThreadId()
+		dc, paintStruct = self.BeginPaint()
+		self.OnPrepareDC(dc, None)
+		
+		if (self.width == 0 and self.height == 0):
+			left, top, right, bottom = self.GetClientRect()
+			self.width = right - left
+			self.height = bottom - top
+		x, y = self.width / 2, self.height / 2
+		dc.TextOut (x, y, self.text[:self.index])
+		self.EndPaint(paintStruct)
+
+	def ChangeAttributes(self):
+		font_spec = {'name':'Arial', 'height':42}
+		self.font = win32ui.CreateFont (font_spec)
+		
+	def OnPrepareToClose(self, params):
+		self.DestroyWindow()
+
+	def OnSize (self, params):
+		lParam = params[3]
+		self.width = win32api.LOWORD(lParam)
+		self.height = win32api.HIWORD(lParam)
+
+	def OnPrepareDC (self, dc, printinfo):
+		# Set up the DC for forthcoming OnDraw call
+		dc.SetTextColor (win32api.RGB(0,0,255))
+		dc.SetBkColor (win32api.GetSysColor (win32con.COLOR_WINDOW))
+		dc.SelectObject (self.font)
+		dc.SetTextAlign (win32con.TA_CENTER | win32con.TA_BASELINE)
+
+class FontFrame(window.MDIChildWnd):
+	def __init__(self):
+		pass # Dont call base class doc/view version...
+	def Create(self, title, rect = None, parent = None):
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW
+		self._obj_ = win32ui.CreateMDIChild()
+		self._obj_.AttachObject(self)
+		self._obj_.CreateWindow(None, title, style, rect, parent)
+		rect = self.GetClientRect()
+		rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
+		self.child = FontWindow("Not threaded")
+		self.child.Create("FontDemo", win32con.WS_CHILD | win32con.WS_VISIBLE, rect, self)
+
+
+class TestThread(thread.WinThread):
+	def __init__(self, parentWindow):
+		self.parentWindow = parentWindow
+		self.child = None
+		thread.WinThread.__init__(self)
+	def InitInstance(self):
+		rect = self.parentWindow.GetClientRect()
+		rect = (0,0,rect[2]-rect[0], rect[3]-rect[1])
+		self.child = FontWindow()
+		self.child.Create("FontDemo", win32con.WS_CHILD | win32con.WS_VISIBLE, rect, self.parentWindow)
+		self.SetMainFrame(self.child)
+		return thread.WinThread.InitInstance(self)
+
+	def ExitInstance(self):
+		return 0
+
+class ThreadedFontFrame(window.MDIChildWnd):
+	def __init__(self):
+		pass # Dont call base class doc/view version...
+		self.thread = None
+	def Create(self, title, rect = None, parent = None):
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_OVERLAPPEDWINDOW
+		self._obj_ = win32ui.CreateMDIChild()
+		self._obj_.CreateWindow(None, title, style, rect, parent)
+		self._obj_.HookMessage(self.OnDestroy, win32con.WM_DESTROY)
+		self._obj_.HookMessage (self.OnSize, win32con.WM_SIZE)
+		
+		self.thread = TestThread(self)
+		self.thread.CreateThread()
+		
+	def OnSize(self, msg):
+		pass
+
+	def OnDestroy(self, msg):
+		win32ui.OutputDebugString("OnDestroy\n")
+		if self.thread and self.thread.child:
+			child = self.thread.child
+			child.SendMessage(WM_USER_PREPARE_TO_CLOSE, 0, 0)
+			win32ui.OutputDebugString("Destroyed\n")
+
+	
+def Demo():
+	f = FontFrame()
+	f.Create("Font Demo")
+
+def ThreadedDemo():
+	rect = win32ui.GetMainFrame().GetMDIClient().GetClientRect()
+	rect = rect[0], rect[3]*3/4, rect[2]/4, rect[3]
+	incr = rect[2]
+	for i in range(4):
+		if i==0:
+			f = FontFrame()
+			title = "Not threaded"
+		else:
+			f = ThreadedFontFrame()
+			title = "Threaded GUI Demo"
+		f.Create(title, rect)
+		rect = rect[0] + incr, rect[1], rect[2]+incr, rect[3]
+	# Givem a chance to start
+	win32api.Sleep(100)
+	win32ui.PumpWaitingMessages()
+
+if __name__=='__main__':
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		ThreadedDemo()
+#		Demo()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/toolbar.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/toolbar.py
new file mode 100644
index 0000000..e5eabfd9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/Demos/toolbar.py
@@ -0,0 +1,85 @@
+# Demo of ToolBars
+
+# Shows the toolbar control.
+# Demos how to make custom tooltips, etc.
+
+import win32ui
+import win32con
+import win32api
+from pywin.mfc import docview, window, afxres
+import commctrl
+
+class GenericFrame(window.MDIChildWnd):
+	def OnCreateClient(self, cp, context):
+		# handlers for toolbar buttons
+		self.HookCommand (self.OnPrevious, 401)
+		self.HookCommand (self.OnNext, 402)
+		self.HookNotify(self.GetTTText, commctrl.TTN_NEEDTEXT)
+		self.HookNotify(self.GetTTText, commctrl.TTN_NEEDTEXTW)
+	
+#		parent = win32ui.GetMainFrame()
+		parent = self
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | \
+		    afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY
+
+		buttons = (win32ui.ID_APP_ABOUT,win32ui.ID_VIEW_INTERACTIVE)
+		bitmap = win32ui.IDB_BROWSER_HIER
+		tbid = 0xE840
+		self.toolbar = tb = win32ui.CreateToolBar (parent, style, tbid)
+		tb.LoadBitmap(bitmap)
+		tb.SetButtons(buttons)
+		    
+		tb.EnableDocking(afxres.CBRS_ALIGN_ANY)
+		tb.SetWindowText("Test")
+		parent.EnableDocking(afxres.CBRS_ALIGN_ANY)
+		parent.DockControlBar(tb)
+		parent.LoadBarState("ToolbarTest")
+		window.MDIChildWnd.OnCreateClient(self, cp, context)
+		return 1
+
+	def OnDestroy(self, msg):
+		self.SaveBarState("ToolbarTest")
+		
+	def GetTTText(self, std, extra):
+		(hwndFrom, idFrom, code) = std
+		text, hinst, flags = extra
+		if flags & commctrl.TTF_IDISHWND:
+			return # Not handled
+		if (idFrom==win32ui.ID_APP_ABOUT):
+			return 0, ("It works!", idFrom, code)
+		return None # not handled.
+			
+	def GetMessageString(self, id):
+		if id==win32ui.ID_APP_ABOUT:
+			return "Dialog Test\nTest"
+		else:
+			return self._obj_.GetMessageString(id)
+
+	def OnSize (self, params):
+		print 'OnSize called with ', params
+
+	def OnNext (self, id, cmd):
+		print 'OnNext called'
+	
+	def OnPrevious (self, id, cmd):
+		print 'OnPrevious called'
+	
+msg = """\
+This toolbar was dynamically created.\r
+\r
+The first item's tooltips is provided by Python code.\r
+\r
+(Dont close the window with the toolbar in a floating state - it may not re-appear!)\r
+"""
+
+def test():
+	template = docview.DocTemplate( win32ui.IDR_PYTHONTYPE, None, GenericFrame, docview.EditView)
+	doc = template.OpenDocumentFile(None)
+	doc.SetTitle("Toolbar Test")
+	view = doc.GetFirstView()
+	view.SetWindowText(msg)
+
+if __name__=='__main__':
+	import demoutils
+	if demoutils.NeedGoodGUI():
+		test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/IDLE.cfg b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/IDLE.cfg
new file mode 100644
index 0000000..c724b95
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/IDLE.cfg
@@ -0,0 +1,29 @@
+[General]

+# We base this configuration on the default config.

+# You can list "Based On" as many times as you like

+Based On          = default

+

+[Keys]

+# Only list keys different to default.

+# Note you may wish to rebind some of the default

+# Pythonwin keys to "Beep" or "DoNothing"

+

+Alt+L             = LocateSelectedFile

+Ctrl+Q            = AppExit

+

+# Other non-default Pythonwin keys

+Alt+A             = EditSelectAll

+Alt+M             = LocateModule

+

+# Movement

+Ctrl+D            = GotoEndOfFile

+

+# Tabs and other indent features

+Alt+T            = <<toggle-tabs>>

+Ctrl+[            = <<indent-region>>

+Ctrl+]            = <<dedent-region>>

+

+[Keys:Interactive]

+Alt+P             = <<history-previous>>

+Alt+N             = <<history-next>>

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/__init__.py
new file mode 100644
index 0000000..d824e21
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/__init__.py
@@ -0,0 +1,11 @@
+# See if we run in Unicode mode.
+# This may be referenced all over the place, so we save it globally.
+import win32api, win32con, __builtin__
+
+# This doesn't seem to work correctly on NT - see bug 716708
+is_platform_unicode  = 0
+#is_platform_unicode = hasattr(__builtin__, "unicode") and win32api.GetVersionEx()[3] == win32con.VER_PLATFORM_WIN32_NT
+default_platform_encoding = "mbcs" # Will it ever be necessary to change this?
+default_scintilla_encoding = "utf-8" # Scintilla _only_ supports this ATM
+
+del win32api, win32con, __builtin__
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/__init__.py
new file mode 100644
index 0000000..bad0001
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/__init__.py
@@ -0,0 +1,113 @@
+import sys
+
+# Some cruft to deal with the Pythonwin GUI booting up from a non GUI app.
+def _MakeDebuggerGUI():
+	app.InitInstance()
+
+isInprocApp = -1
+def _CheckNeedGUI():
+	global isInprocApp
+	if isInprocApp==-1:
+		import win32ui
+		isInprocApp = win32ui.GetApp().IsInproc()
+	if isInprocApp:
+		# MAY Need it - may already have one
+		need = sys.modules.has_key("pywin.debugger.dbgpyapp")==0
+	else:
+		need = 0
+	if need:
+		import pywin.framework.app
+		import dbgpyapp
+		pywin.framework.app.CreateDefaultGUI(dbgpyapp.DebuggerPythonApp)
+
+	else:
+		# Check we have the appropriate editor
+		# No longer necessary!
+		pass
+	return need
+
+# Inject some methods in the top level name-space.
+currentDebugger = None # Wipe out any old one on reload.
+
+def _GetCurrentDebugger():
+	global currentDebugger
+	if currentDebugger is None:
+		_CheckNeedGUI()
+		import debugger
+		currentDebugger = debugger.Debugger()
+	return currentDebugger
+
+def GetDebugger():
+	# An error here is not nice - as we are probably trying to
+	# break into the debugger on a Python error, any
+	# error raised by this is usually silent, and causes
+	# big problems later!
+	try:
+		rc = _GetCurrentDebugger()
+		rc.GUICheckInit()
+		return rc
+	except:
+		print "Could not create the debugger!"
+		import traceback
+		traceback.print_exc()
+		return None
+
+def close():
+	if currentDebugger is not None:
+		currentDebugger.close()
+
+def run(cmd,globals=None, locals=None, start_stepping = 1):
+	_GetCurrentDebugger().run(cmd, globals,locals, start_stepping)
+
+def runeval(expression, globals=None, locals=None):
+	return _GetCurrentDebugger().runeval(expression, globals, locals)
+
+def runcall(*args):
+	return apply(_GetCurrentDebugger().runcall, args)
+
+def set_trace():
+	import sys
+	d = _GetCurrentDebugger()
+
+	if d.frameShutdown: return # App closing
+
+	if d.stopframe != d.botframe:
+		# If im not "running"
+		return
+
+	sys.settrace(None) # May be hooked
+	d.reset()
+	d.set_trace()
+
+# "brk" is an alias for "set_trace" ("break" is a reserved word :-(
+brk = set_trace
+
+# Post-Mortem interface
+
+def post_mortem(t=None):
+	if t is None:
+		t = sys.exc_info()[2] # Will be valid if we are called from an except handler.
+	if t is None:
+		try:
+			t = sys.last_traceback
+		except AttributeError:
+			print "No traceback can be found from which to perform post-mortem debugging!"
+			print "No debugging can continue"
+			return
+	p = _GetCurrentDebugger()
+	if p.frameShutdown: return # App closing
+	# No idea why I need to settrace to None - it should have been reset by now?
+	sys.settrace(None)
+	p.reset()
+	while t.tb_next <> None: t = t.tb_next
+	p.bAtPostMortem = 1
+	p.prep_run(None)
+	try:
+		p.interaction(t.tb_frame, t)
+	finally:
+		t = None
+		p.bAtPostMortem = 0
+		p.done_run()
+
+def pm(t=None):
+	post_mortem(t)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/configui.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/configui.py
new file mode 100644
index 0000000..ff0f260
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/configui.py
@@ -0,0 +1,30 @@
+from dbgcon import *
+from pywin.mfc import dialog
+
+class DebuggerOptionsPropPage(dialog.PropertyPage):
+	def __init__(self):
+		dialog.PropertyPage.__init__(self, win32ui.IDD_PP_DEBUGGER)
+
+	def OnInitDialog(self):
+		options = self.options = LoadDebuggerOptions()
+		self.AddDDX(win32ui.IDC_CHECK1, OPT_HIDE)
+		self[OPT_STOP_EXCEPTIONS] = options[OPT_STOP_EXCEPTIONS]
+		self.AddDDX(win32ui.IDC_CHECK2, OPT_STOP_EXCEPTIONS)
+		self[OPT_HIDE] = options[OPT_HIDE]
+		return dialog.PropertyPage.OnInitDialog(self)
+
+	def OnOK(self):
+		self.UpdateData()
+		dirty = 0
+		for key, val in self.items():
+			if self.options.has_key(key):
+				if self.options[key] != val:
+					self.options[key] = val
+					dirty = 1
+		if dirty:
+			SaveDebuggerOptions(self.options)
+		# If there is a debugger open, then set its options.
+		import pywin.debugger
+		if pywin.debugger.currentDebugger is not None:
+			pywin.debugger.currentDebugger.options = self.options
+		return 1
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/dbgcon.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/dbgcon.py
new file mode 100644
index 0000000..1feb21f1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/dbgcon.py
@@ -0,0 +1,28 @@
+# General constants for the debugger
+
+DBGSTATE_NOT_DEBUGGING = 0
+DBGSTATE_RUNNING = 1
+DBGSTATE_BREAK = 2
+DBGSTATE_QUITTING = 3 # Attempting to back out of the debug session.
+
+LINESTATE_CURRENT    = 0x1 # This line is where we are stopped
+LINESTATE_BREAKPOINT = 0x2 # This line is a breakpoint
+LINESTATE_CALLSTACK  = 0x4 # This line is in the callstack.
+
+OPT_HIDE = 'hide'
+OPT_STOP_EXCEPTIONS = 'stopatexceptions'
+
+import win32api, win32ui
+
+def DoGetOption(optsDict, optName, default):
+	optsDict[optName] = win32ui.GetProfileVal("Debugger Options", optName, default)
+
+def LoadDebuggerOptions():
+	opts = {}
+	DoGetOption(opts, OPT_HIDE, 0)
+	DoGetOption(opts, OPT_STOP_EXCEPTIONS, 1)
+	return opts
+
+def SaveDebuggerOptions(opts):
+	for key, val in opts.items():
+		win32ui.WriteProfileVal("Debugger Options", key, val)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/dbgpyapp.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/dbgpyapp.py
new file mode 100644
index 0000000..ebfe354
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/dbgpyapp.py
@@ -0,0 +1,59 @@
+# dbgpyapp.py  - Debugger Python application class
+#
+import win32con
+import win32ui
+import sys
+import string
+import os
+from pywin.framework import intpyapp
+
+version = '0.3.0'
+
+class DebuggerPythonApp(intpyapp.InteractivePythonApp):
+	def LoadMainFrame(self):
+		" Create the main applications frame "
+		self.frame = self.CreateMainFrame()
+		self.SetMainFrame(self.frame)
+		self.frame.LoadFrame(win32ui.IDR_DEBUGGER, win32con.WS_OVERLAPPEDWINDOW)
+		self.frame.DragAcceptFiles()	# we can accept these.
+		self.frame.ShowWindow(win32con.SW_HIDE);
+		self.frame.UpdateWindow();
+
+		# but we do rehook, hooking the new code objects.
+		self.HookCommands()
+
+	def InitInstance(self):
+		# Use a registry path of "Python\Pythonwin Debugger
+		win32ui.SetAppName(win32ui.LoadString(win32ui.IDR_DEBUGGER))
+		win32ui.SetRegistryKey("Python %s" % (sys.winver,))
+		# We _need_ the Scintilla color editor.
+		# (and we _always_ get it now :-)
+
+		numMRU = win32ui.GetProfileVal("Settings","Recent File List Size", 10)
+		win32ui.LoadStdProfileSettings(numMRU)
+
+		self.LoadMainFrame()
+
+		# Display the interactive window if the user wants it.
+		from pywin.framework import interact
+		interact.CreateInteractiveWindowUserPreference()
+
+		# Load the modules we use internally.
+		self.LoadSystemModules()
+		# Load additional module the user may want.
+		self.LoadUserModules()
+
+#		win32ui.CreateDebuggerThread()
+		win32ui.EnableControlContainer()
+
+		# Load the ToolBar state near the end of the init process, as
+		# there may be Toolbar IDs created by the user or other modules.
+		# By now all these modules should be loaded, so all the toolbar IDs loaded.
+		try:
+			self.frame.LoadBarState("ToolbarDefault")
+		except win32ui.error:
+			# MFC sucks.  It does essentially "GetDlgItem(x)->Something", so if the
+			# toolbar with ID x does not exist, MFC crashes!  Pythonwin has a trap for this
+			# but I need to investigate more how to prevent it (AFAIK, ensuring all the
+			# toolbars are created by now _should_ stop it!)
+			pass
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/debugger.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/debugger.py
new file mode 100644
index 0000000..978aff6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/debugger.py
@@ -0,0 +1,968 @@
+# debugger.py
+
+# A debugger for Pythonwin.  Built from pdb.
+
+# Mark Hammond (MHammond@skippinet.com.au) - Dec 94.
+
+# usage:
+# >>> import pywin.debugger
+# >>> pywin.debugger.GetDebugger().run("command")
+
+import pdb
+import bdb
+import sys
+import string
+import os
+import types
+
+import win32ui
+import win32api
+import win32con
+import pywin.docking.DockingBar
+from pywin.mfc import dialog, object, afxres, window
+from pywin.framework import app, interact, editor, scriptutils
+from pywin.framework.editor.color.coloreditor import MARKER_CURRENT, MARKER_BREAKPOINT
+from pywin.tools import browser, hierlist
+import commctrl
+import traceback
+
+#import win32traceutil
+
+from dbgcon import *
+
+error = "pywin.debugger.error"
+
+def SetInteractiveContext(globs, locs):
+	if interact.edit is not None and interact.edit.currentView is not None:
+		interact.edit.currentView.SetContext(globs, locs)
+
+def _LineStateToMarker(ls):
+	if ls==LINESTATE_CURRENT:
+		return MARKER_CURRENT
+#	elif ls == LINESTATE_CALLSTACK:
+#		return MARKER_CALLSTACK
+	return MARKER_BREAKPOINT
+
+class HierListItem(browser.HLIPythonObject):
+	pass
+
+class HierFrameItem(HierListItem):
+	def __init__(self, frame, debugger):
+		HierListItem.__init__(self, frame, None)
+		self.debugger = debugger
+	def GetText(self):
+		name = self.myobject.f_code.co_name
+		if not name or name == '?' :
+			# See if locals has a '__name__' (ie, a module)
+			if self.myobject.f_locals.has_key('__name__'):
+				name = self.myobject.f_locals['__name__'] + " module"
+			else:
+				name = '<Debugger Context>'
+
+		return "%s   (%s:%d)" % (name, os.path.split(self.myobject.f_code.co_filename)[1], self.myobject.f_lineno)
+	def GetBitmapColumn(self):
+		if self.debugger.curframe is self.myobject:
+			return 7
+		else:
+			return 8
+	def GetSubList(self):
+		ret = []
+		ret.append(HierFrameDict(self.myobject.f_locals, "Locals", 2))
+		ret.append(HierFrameDict(self.myobject.f_globals, "Globals", 1))
+		return ret
+	def IsExpandable(self):
+		return 1
+	def TakeDefaultAction(self):
+		# Set the default frame to be this frame.
+		self.debugger.set_cur_frame(self.myobject)
+		return 1
+
+class HierFrameDict(browser.HLIDict):
+	def __init__(self, dict, name, bitmapColumn):
+		self.bitmapColumn=bitmapColumn
+		browser.HLIDict.__init__(self, dict, name)
+	def GetBitmapColumn(self):
+		return self.bitmapColumn
+
+class NoStackAvailableItem(HierListItem):
+	def __init__(self, why):
+		HierListItem.__init__(self, None, why)
+	def IsExpandable(self):
+		return 0
+	def GetText(self):
+		return self.name
+	def GetBitmapColumn(self):
+		return 8
+
+class HierStackRoot(HierListItem):
+	def __init__( self, debugger ):
+		HierListItem.__init__(self, debugger, None)
+		self.last_stack = []
+##	def __del__(self):
+##		print "HierStackRoot dieing"
+	def GetSubList(self):
+		debugger = self.myobject
+#		print self.debugger.stack, self.debugger.curframe
+		ret = []
+		if debugger.debuggerState==DBGSTATE_BREAK:
+			stackUse=debugger.stack[:]
+			stackUse.reverse()
+			self.last_stack = []
+			for frame, lineno in stackUse:
+				self.last_stack.append( (frame, lineno) )
+				if frame is debugger.userbotframe: # Dont bother showing frames below our bottom frame.
+					break
+		for frame, lineno in self.last_stack:
+			ret.append( HierFrameItem( frame, debugger ) )
+##		elif debugger.debuggerState==DBGSTATE_NOT_DEBUGGING:
+##			ret.append(NoStackAvailableItem('<nothing is being debugged>'))
+##		else:
+##			ret.append(NoStackAvailableItem('<stack not available while running>'))
+		return ret
+	def GetText(self):
+		return 'root item'
+	def IsExpandable(self):
+		return 1
+
+class HierListDebugger(hierlist.HierListWithItems):
+	""" Hier List of stack frames, breakpoints, whatever """
+	def __init__(self):
+		hierlist.HierListWithItems.__init__(self, None, win32ui.IDB_DEBUGGER_HIER, None, win32api.RGB(255,0,0))
+	def Setup(self, debugger):
+		root = HierStackRoot(debugger)
+		self.AcceptRoot(root)
+#	def Refresh(self):
+#		self.Setup()
+
+class DebuggerWindow(window.Wnd):
+	def __init__(self, ob):
+		window.Wnd.__init__(self, ob)
+		self.debugger = None
+
+	def Init(self, debugger):
+		self.debugger = debugger
+
+	def GetDefRect(self):
+		defRect = app.LoadWindowSize("Debugger Windows\\" + self.title)
+		if defRect[2]-defRect[0]==0:
+			defRect = 0, 0, 150, 150
+		return defRect
+
+	def OnDestroy(self, msg):
+		newSize = self.GetWindowPlacement()[4]
+		pywin.framework.app.SaveWindowSize("Debugger Windows\\" + self.title, newSize)
+		return window.Wnd.OnDestroy(self, msg)
+
+	def OnKeyDown(self, msg):
+		key = msg[2]
+		if key in [13, 27, 32]: return 1
+		if key in [46,8]: # delete/BS key
+			self.DeleteSelected()
+			return 0
+		view = scriptutils.GetActiveView()
+		try:
+			firer = view.bindings.fire_key_event
+		except AttributeError:
+			firer = None
+		if firer is not None:
+			return firer(msg)
+		else:
+			return 1
+
+	def DeleteSelected(self):
+		win32api.MessageBeep()
+
+	def EditSelected(self):
+		win32api.MessageBeep()
+
+class DebuggerStackWindow(DebuggerWindow):
+	title = "Stack"
+	def __init__(self):
+		DebuggerWindow.__init__(self, win32ui.CreateTreeCtrl())
+		self.list = HierListDebugger()
+		self.listOK = 0
+	def SaveState(self):
+		self.list.DeleteAllItems()
+		self.listOK = 0
+	def CreateWindow(self, parent):
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS
+		self._obj_.CreateWindow(style, self.GetDefRect(), parent, win32ui.IDC_LIST1)
+		self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
+		self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN)
+		self.list.HierInit (parent, self)
+		self.listOK = 0 # delayed setup
+		#self.list.Setup()
+
+	def RespondDebuggerState(self, state):
+		assert self.debugger is not None, "Init not called"
+		if not self.listOK:
+			self.listOK = 1
+			self.list.Setup(self.debugger)
+		else:			
+			self.list.Refresh()
+
+	def RespondDebuggerData(self):
+		try:
+			handle = self.GetChildItem(0)
+		except win32ui.error:
+			return # No items
+		while 1:
+			item = self.list.ItemFromHandle(handle)
+			col = self.list.GetBitmapColumn(item)
+			selCol = self.list.GetSelectedBitmapColumn(item)
+			if selCol is None: selCol = col
+			if self.list.GetItemImage(handle)!= (col, selCol):
+				self.list.SetItemImage(handle, col, selCol)
+			try:
+				handle = self.GetNextSiblingItem(handle)
+			except win32ui.error:
+				break
+
+class DebuggerListViewWindow(DebuggerWindow):
+	def __init__(self):
+		DebuggerWindow.__init__(self, win32ui.CreateListCtrl())
+	def CreateWindow(self, parent):
+		list = self
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | commctrl.LVS_EDITLABELS | commctrl.LVS_REPORT
+		self._obj_.CreateWindow(style, self.GetDefRect(), parent, win32ui.IDC_LIST1)
+		self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
+		self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN)
+		list = self
+		title, width = self.columns[0]
+		itemDetails = (commctrl.LVCFMT_LEFT, width, title, 0)
+		list.InsertColumn(0, itemDetails)
+		col = 1
+		for title, width in self.columns[1:]:
+			col = col + 1
+			itemDetails = (commctrl.LVCFMT_LEFT, width, title, 0)
+			list.InsertColumn(col, itemDetails)
+		parent.HookNotify( self.OnListEndLabelEdit, commctrl.LVN_ENDLABELEDIT)
+		parent.HookNotify(self.OnItemRightClick, commctrl.NM_RCLICK)
+		parent.HookNotify(self.OnItemDoubleClick, commctrl.NM_DBLCLK)
+
+	def RespondDebuggerData(self):
+		pass
+
+	def RespondDebuggerState(self, state):
+		pass
+
+	def EditSelected(self):
+		sel = self.GetNextItem(-1, commctrl.LVNI_SELECTED)
+		if sel == -1:
+			return
+		self.EditLabel(sel)
+
+	def OnKeyDown(self, msg):
+		key = msg[2]
+		# If someone starts typing, they probably are trying to edit the text!
+		if chr(key) in string.uppercase:
+			self.EditSelected()
+			return 0
+		return DebuggerWindow.OnKeyDown(self, msg)
+
+	def OnItemDoubleClick(self, notify_data, extra):
+		self.EditSelected()
+
+	def OnItemRightClick(self, notify_data, extra):
+		# First select the item we right-clicked on.
+		pt = self.ScreenToClient(win32api.GetCursorPos())
+		flags, hItem, subitem = self.HitTest(pt)
+		if hItem==-1 or commctrl.TVHT_ONITEM & flags==0:
+			return None
+		self.SetItemState(hItem, commctrl.LVIS_SELECTED, commctrl.LVIS_SELECTED)
+
+		menu = win32ui.CreatePopupMenu()
+		menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1000, "Edit item")
+		menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1001, "Delete item")
+		dockbar = self.GetParent()
+		if dockbar.IsFloating():
+			hook_parent = win32ui.GetMainFrame()
+		else:
+			hook_parent = self.GetParentFrame()
+		hook_parent.HookCommand(self.OnEditItem, 1000)
+		hook_parent.HookCommand(self.OnDeleteItem, 1001)
+		menu.TrackPopupMenu(win32api.GetCursorPos()) # track at mouse position.
+		return None
+
+	def OnDeleteItem(self,command, code):
+		self.DeleteSelected()
+	def OnEditItem(self, command, code):
+		self.EditSelected()
+
+class DebuggerBreakpointsWindow(DebuggerListViewWindow):
+	title = "Breakpoints"
+	columns = [ ("Condition", 70), ("Location", 1024)]
+
+	def SaveState(self):
+		pass
+
+	def OnListEndLabelEdit(self, std, extra):
+		item = extra[0]
+		text = item[4]
+		if text is None: return
+
+		item_id = self.GetItem(item[0])[6]
+
+		from bdb import Breakpoint
+		for bplist in Breakpoint.bplist.values():
+			for bp in bplist:
+				if id(bp)==item_id:
+					if string.lower(string.strip(text))=="none":
+						text = None
+					bp.cond = text
+					break
+		self.RespondDebuggerData()
+
+	def DeleteSelected(self):
+		try:
+			num = self.GetNextItem(-1, commctrl.LVNI_SELECTED)
+			item_id = self.GetItem(num)[6]
+			from bdb import Breakpoint
+			for bplist in Breakpoint.bplist.values():
+				for bp in bplist:
+					if id(bp)==item_id:
+						self.debugger.clear_break(bp.file, bp.line)
+						break
+		except win32ui.error:
+			win32api.MessageBeep()
+		self.RespondDebuggerData()
+
+	def RespondDebuggerData(self):
+		list = self
+		list.DeleteAllItems()
+		index = -1
+		from bdb import Breakpoint
+		for bplist in Breakpoint.bplist.values():
+			for bp in bplist:
+				baseName = os.path.split(bp.file)[1]
+				cond = bp.cond
+				item = index+1, 0, 0, 0, str(cond), 0, id(bp)
+				index = list.InsertItem(item)
+				list.SetItemText(index, 1, "%s: %s" % (baseName, bp.line))
+
+class DebuggerWatchWindow(DebuggerListViewWindow):
+	title = "Watch"
+	columns = [ ("Expression", 70), ("Value", 1024)]
+
+	def CreateWindow(self, parent):
+		DebuggerListViewWindow.CreateWindow(self, parent)
+		items = string.split(win32ui.GetProfileVal("Debugger Windows\\" + self.title, "Items", ""), "\t")
+		index = -1
+		for item in items:
+			if item:
+				index = self.InsertItem(index+1, item)
+		self.InsertItem(index+1, "<New Item>")
+
+	def SaveState(self):
+		items = []
+		for i in range(self.GetItemCount()-1):
+			items.append(self.GetItemText(i,0))
+		win32ui.WriteProfileVal("Debugger Windows\\" + self.title, "Items", string.join(items,"\t"))
+		return 1
+
+	def OnListEndLabelEdit(self, std, extra):
+		item = extra[0]
+		itemno = item[0]
+		text = item[4]
+		if text is None: return
+		self.SetItemText(itemno, 0, text)
+		if itemno == self.GetItemCount()-1:
+			self.InsertItem(itemno+1, "<New Item>")
+		self.RespondDebuggerState(self.debugger.debuggerState)
+
+	def DeleteSelected(self):
+		try:
+			num = self.GetNextItem(-1, commctrl.LVNI_SELECTED)
+			if num < self.GetItemCount()-1: # We cant delete the last
+				self.DeleteItem(num)
+		except win32ui.error:
+			win32api.MessageBeep()
+
+	def RespondDebuggerState(self, state):
+		globs = locs = None
+		if state==DBGSTATE_BREAK:
+			if self.debugger.curframe:
+				globs = self.debugger.curframe.f_globals
+				locs = self.debugger.curframe.f_locals
+		elif state==DBGSTATE_NOT_DEBUGGING:
+			import __main__
+			globs = locs = __main__.__dict__
+		for i in range(self.GetItemCount()-1):
+			text = self.GetItemText(i, 0)
+			if globs is None:
+				val = ""
+			else:
+				try:
+					val = repr( eval( text, globs, locs) )
+				except SyntaxError:
+					val = "Syntax Error"
+				except:
+					t, v, tb = sys.exc_info()
+					val = string.strip(traceback.format_exception_only(t, v)[0])
+					tb = None # prevent a cycle.
+			self.SetItemText(i, 1, val)
+
+def CreateDebuggerDialog(parent, klass):
+	control = klass()
+	control.CreateWindow(parent)
+	return control
+
+DebuggerDialogInfos = (
+	(0xe810, DebuggerStackWindow, None),
+	(0xe811, DebuggerBreakpointsWindow, (10, 10)),
+	(0xe812, DebuggerWatchWindow, None),
+	)
+
+# Prepare all the "control bars" for this package.
+# If control bars are not all loaded when the toolbar-state functions are
+# called, things go horribly wrong.
+def PrepareControlBars(frame):
+	style = win32con.WS_CHILD | afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY
+	tbd = win32ui.CreateToolBar (frame, style, win32ui.ID_VIEW_TOOLBAR_DBG)
+	tbd.ModifyStyle(0, commctrl.TBSTYLE_FLAT)
+	tbd.LoadToolBar(win32ui.IDR_DEBUGGER)
+	tbd.EnableDocking(afxres.CBRS_ALIGN_ANY)
+	tbd.SetWindowText("Debugger")
+	frame.DockControlBar(tbd)
+
+	# and the other windows.
+	for id, klass, float in DebuggerDialogInfos:
+		try:
+			frame.GetControlBar(id)
+			exists=1
+		except win32ui.error:
+			exists=0
+		if exists: continue
+		bar = pywin.docking.DockingBar.DockingBar()
+		style=win32con.WS_CHILD | afxres.CBRS_LEFT # don't create visible.
+		bar.CreateWindow(frame, CreateDebuggerDialog, klass.title, id, style, childCreatorArgs=(klass,))
+		bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC)
+		bar.EnableDocking(afxres.CBRS_ALIGN_ANY)
+		if float is None:
+			frame.DockControlBar(bar)
+		else:
+			frame.FloatControlBar(bar, float, afxres.CBRS_ALIGN_ANY)
+
+		frame.ShowControlBar(bar, 0, 1)
+
+
+SKIP_NONE=0
+SKIP_STEP=1
+SKIP_RUN=2
+
+debugger_parent=pdb.Pdb
+class Debugger(debugger_parent):
+	def __init__(self):
+		self.inited = 0
+		self.skipBotFrame = SKIP_NONE
+		self.userbotframe = None
+		self.frameShutdown = 0
+		self.pumping = 0
+		self.debuggerState = DBGSTATE_NOT_DEBUGGING # Assume so, anyway.
+		self.shownLineCurrent = None # The last filename I highlighted.
+		self.shownLineCallstack = None # The last filename I highlighted.
+		self.last_cmd_debugged = ""
+		self.abortClosed = 0
+		self.isInitialBreakpoint = 0
+		debugger_parent.__init__(self)
+
+		# See if any break-points have been set in the editor
+		for doc in editor.editorTemplate.GetDocumentList():
+			lineNo = -1
+			while 1:
+				lineNo = doc.MarkerGetNext(lineNo+1, MARKER_BREAKPOINT)
+				if lineNo <= 0: break
+				self.set_break(doc.GetPathName(), lineNo)
+
+		self.reset()
+		self.inForcedGUI = win32ui.GetApp().IsInproc()
+		self.options = LoadDebuggerOptions()
+		self.bAtException = self.bAtPostMortem = 0
+
+	def __del__(self):
+		self.close()
+	def close(self, frameShutdown = 0):
+		# abortClose indicates if we have total shutdown
+		# (ie, main window is dieing)
+		if self.pumping:
+			# Can stop pump here, as it only posts a message, and
+			# returns immediately.
+			if not self.StopDebuggerPump(): # User cancelled close.
+				return 0
+			# NOTE - from this point on the close can not be
+			# stopped - the WM_QUIT message is already in the queue.
+		self.frameShutdown = frameShutdown
+		if not self.inited: return 1
+		self.inited = 0
+
+		SetInteractiveContext(None, None)
+
+		frame = win32ui.GetMainFrame()
+		frame.SaveBarState("ToolbarDebugging")
+		# Hide the debuger toolbars (as they wont normally form part of the main toolbar state.
+		for id, klass, float in DebuggerDialogInfos:
+			try:
+				tb = frame.GetControlBar(id)
+				if tb.dialog is not None: # We may never have actually been shown.
+					tb.dialog.SaveState()
+					frame.ShowControlBar(tb, 0, 1)
+			except win32ui.error:
+				pass
+
+		# Restore the standard toolbar config
+		try:
+			frame.LoadBarState("ToolbarDefault")
+		except win32ui.error, msg: # When once created toolbars no longer exist.
+			pass
+#			print msg # LoadBarState failed (with win32 exception!)
+		self._UnshowCurrentLine()
+		self.set_quit()
+		return 1
+
+	def StopDebuggerPump(self):
+		assert self.pumping, "Can't stop the debugger pump if Im not pumping!"
+		# After stopping a pump, I may never return.
+		if self.GUIAboutToFinishInteract():
+			self.pumping = 0
+			win32ui.StopDebuggerPump() # Posts a message, so we do return.
+			return 1
+		return 0
+
+	def get_option(self, option):
+		"""Public interface into debugger options
+		"""
+		try:
+			return self.options[option]
+		except KeyError:
+			raise error, "Option %s is not a valid option" % option
+
+	def prep_run(self, cmd):
+		pass
+	def done_run(self, cmd=None):
+		self.RespondDebuggerState(DBGSTATE_NOT_DEBUGGING)
+		self.close()
+	def canonic(self, fname):
+		return string.lower(os.path.abspath(fname))
+	def reset(self):
+		debugger_parent.reset(self)
+		self.userbotframe = None
+		self.UpdateAllLineStates()
+		self._UnshowCurrentLine()
+
+
+	def setup(self, f, t):
+		debugger_parent.setup(self, f, t)
+		self.bAtException = t is not None
+
+	def set_break(self, filename, lineno, temporary=0, cond = None):
+		filename = self.canonic(filename)
+		self.SetLineState(filename, lineno, LINESTATE_BREAKPOINT)
+		return debugger_parent.set_break(self, filename, lineno, temporary, cond)
+
+	def clear_break(self, filename, lineno):
+		filename = self.canonic(filename)
+		self.ResetLineState(filename, lineno, LINESTATE_BREAKPOINT)
+		return debugger_parent.clear_break(self, filename, lineno)
+
+	def cmdloop(self):
+		if self.frameShutdown: return # App in the process of closing - never break in!
+		self.GUIAboutToBreak()
+
+	def print_stack_entry(self, frame):
+		# We dont want a stack printed - our GUI is better :-)
+		pass
+
+	def user_return(self, frame, return_value):
+		# Same as parent, just no "print"
+		# This function is called when a return trap is set here
+		frame.f_locals['__return__'] = return_value
+		self.interaction(frame, None)
+
+	def user_exception(self, frame, (exc_type, exc_value, exc_traceback)):
+		# This function is called if an exception occurs,
+		# but only if we are to stop at or just below this level
+		if self.get_option(OPT_STOP_EXCEPTIONS):
+			frame.f_locals['__exception__'] = exc_type, exc_value
+			print "Unhandled exception while debugging..."
+			traceback.print_exception(exc_type, exc_value, exc_traceback)
+			self.interaction(frame, exc_traceback)
+
+	def user_line(self, frame):
+		if frame.f_lineno==0: return
+		debugger_parent.user_line(self, frame)
+
+	def stop_here(self, frame):
+		if self.isInitialBreakpoint:
+			self.isInitialBreakpoint = 0
+			self.set_continue()
+			return 0
+		if frame is self.botframe and self.skipBotFrame == SKIP_RUN:
+			self.set_continue()
+			return 0
+		if frame is self.botframe and self.skipBotFrame == SKIP_STEP:
+			self.set_step()
+			return 0
+		return debugger_parent.stop_here(self, frame)
+
+	def run(self, cmd,globals=None, locals=None, start_stepping = 1):
+		if type(cmd) not in [types.StringType, types.CodeType]:
+			raise TypeError, "Only strings can be run"
+		self.last_cmd_debugged = cmd
+		if start_stepping:
+			self.isInitialBreakpoint = 0
+		else:
+			self.isInitialBreakpoint = 1
+		try:
+			if globals is None:
+				import __main__
+				globals = __main__.__dict__
+			if locals is None:
+				locals = globals
+			self.reset()
+			self.prep_run(cmd)
+			sys.settrace(self.trace_dispatch)
+			if type(cmd) <> types.CodeType:
+				cmd = cmd+'\n'
+			try:
+				try:
+					if start_stepping: self.skipBotFrame = SKIP_STEP
+					else: self.skipBotFrame = SKIP_RUN
+					if sys.version_info > (2,2):
+						exec cmd in globals, locals
+					else:
+						_doexec(cmd, globals, locals)
+				except bdb.BdbQuit:
+					pass
+			finally:
+				self.skipBotFrame = SKIP_NONE
+				self.quitting = 1
+				sys.settrace(None)
+
+		finally:
+			self.done_run(cmd)
+
+	def runeval(self, expr, globals=None, locals=None):
+		self.prep_run(expr)
+		try:
+			debugger_parent.runeval(self, expr, globals, locals)
+		finally:
+			self.done_run(expr)
+
+	def runexec(self, what, globs=None, locs=None):
+		self.reset()
+		sys.settrace(self.trace_dispatch)
+		try:
+			try:
+				exec what in globs, locs
+			except bdb.BdbQuit:
+				pass
+		finally:
+			self.quitting = 1
+			sys.settrace(None)
+
+	def do_set_step(self):
+		if self.GUIAboutToRun():
+			self.set_step()
+
+	def do_set_next(self):
+		if self.GUIAboutToRun():
+			self.set_next(self.curframe)
+
+	def do_set_return(self):
+		if self.GUIAboutToRun():
+			self.set_return(self.curframe)
+
+	def do_set_continue(self):
+		if self.GUIAboutToRun():
+			self.set_continue()
+
+	def set_quit(self):
+		ok = 1
+		if self.pumping:
+			ok = self.StopDebuggerPump()
+		if ok:
+			debugger_parent.set_quit(self)
+
+	def _dump_frame_(self, frame,name=None):
+		if name is None: name = ""
+		if frame:
+			if frame.f_code and frame.f_code.co_filename:
+				fname = os.path.split(frame.f_code.co_filename)[1]
+			else:
+				fname = "??"
+			print `name`, fname, frame.f_lineno, frame
+		else:
+			print `name`, "None"
+
+	def set_trace(self):
+		# Start debugging from _2_ levels up!
+		try:
+			1 + ''
+		except:
+			frame = sys.exc_info()[2].tb_frame.f_back.f_back
+		self.reset()
+		self.userbotframe = None
+		while frame:
+			# scriptutils.py creates a local variable with name
+			# '_debugger_stop_frame_', and we dont go past it
+			# (everything above this is Pythonwin framework code)
+			if frame.f_locals.has_key("_debugger_stop_frame_"):
+				self.userbotframe = frame
+				break
+
+			frame.f_trace = self.trace_dispatch
+			self.botframe = frame
+			frame = frame.f_back
+		self.set_step()
+		sys.settrace(self.trace_dispatch)
+
+	def set_cur_frame(self, frame):
+		# Sets the "current" frame - ie, the frame with focus.  This is the
+		# frame on which "step out" etc actions are taken.
+		# This may or may not be the top of the stack.
+		assert frame is not None, "You must pass a valid frame"
+		self.curframe = frame
+		for f, index in self.stack:
+			if f is frame:
+				self.curindex = index
+				break
+		else:
+			assert 0, "Can't find the frame in the stack."
+		SetInteractiveContext(frame.f_globals, frame.f_locals)
+		self.GUIRespondDebuggerData()
+		self.ShowCurrentLine()
+
+	def IsBreak(self):
+		return self.debuggerState == DBGSTATE_BREAK
+
+	def IsDebugging(self):
+		return self.debuggerState != DBGSTATE_NOT_DEBUGGING
+
+	def RespondDebuggerState(self, state):
+		if state == self.debuggerState: return
+		if state==DBGSTATE_NOT_DEBUGGING: # Debugger exists, but not doing anything
+			title = ""
+		elif state==DBGSTATE_RUNNING: # Code is running under the debugger.
+			title = " - running"
+		elif state==DBGSTATE_BREAK: # We are at a breakpoint or stepping or whatever.
+			if self.bAtException:
+				if self.bAtPostMortem:
+					title = " - post mortem exception"
+				else:
+					title = " - exception"
+			else:
+				title = " - break"
+		else:
+			raise error, "Invalid debugger state passed!"
+		win32ui.GetMainFrame().SetWindowText(win32ui.LoadString(win32ui.IDR_MAINFRAME) + title)
+		if self.debuggerState == DBGSTATE_QUITTING and state != DBGSTATE_NOT_DEBUGGING:
+			print "Ignoring state change cos Im trying to stop!", state
+			return
+		self.debuggerState = state
+		try:
+			frame = win32ui.GetMainFrame()
+		except win32ui.error:
+			frame = None
+		if frame is not None:
+			for id, klass, float in DebuggerDialogInfos:
+				cb = win32ui.GetMainFrame().GetControlBar(id).dialog
+				cb.RespondDebuggerState(state)
+		# Tell each open editor window about the state transition
+		for doc in editor.editorTemplate.GetDocumentList():
+			doc.OnDebuggerStateChange(state)
+		self.ShowCurrentLine()
+
+	#
+	# GUI debugger interface.
+	#
+	def GUICheckInit(self):
+		if self.inited: return
+		self.inited = 1
+		frame = win32ui.GetMainFrame()
+
+		# Ensure the debugger windows are attached to the debugger.
+		for id, klass, float in DebuggerDialogInfos:
+			w = frame.GetControlBar(id).dialog
+			w.Init(self)
+
+		try:
+			frame.LoadBarState("ToolbarDebugging")
+		except win32ui.error, details:
+			print "LoadBarState failed - %s" % details
+
+		# ALWAYS show debugging toolbar, regardless of saved state
+		tb = frame.GetControlBar(win32ui.ID_VIEW_TOOLBAR_DBG)
+		frame.ShowControlBar(tb, 1, 1)
+		self.GUIRespondDebuggerData()
+
+#		frame.RecalcLayout()
+
+	def GetDebuggerBar(self, barName):
+		frame = win32ui.GetMainFrame()
+		for id, klass, float in DebuggerDialogInfos:
+			if klass.title == barName:
+				return frame.GetControlBar(id)
+		assert 0, "Can't find a bar of that name!"
+
+	def GUIRespondDebuggerData(self):
+		if not self.inited: # GUI not inited - no toolbars etc.
+			return
+
+		for id, klass, float in DebuggerDialogInfos:
+			cb = win32ui.GetMainFrame().GetControlBar(id).dialog
+			cb.RespondDebuggerData()
+
+	def GUIAboutToRun(self):
+		if not self.StopDebuggerPump():
+			return 0
+		self._UnshowCurrentLine()
+		self.RespondDebuggerState(DBGSTATE_RUNNING)
+		SetInteractiveContext(None, None)
+		return 1
+
+	def GUIAboutToBreak(self):
+		"Called as the GUI debugger is about to get context, and take control of the running program."
+		self.GUICheckInit()
+		self.RespondDebuggerState(DBGSTATE_BREAK)
+		self.GUIAboutToInteract()
+		if self.pumping:
+			print "!!! Already pumping - outa here"
+			return
+		self.pumping = 1
+		win32ui.StartDebuggerPump() # NOTE - This will NOT return until the user is finished interacting
+		assert not self.pumping, "Should not be pumping once the pump has finished"
+		if self.frameShutdown: # User shut down app while debugging
+			win32ui.GetMainFrame().PostMessage(win32con.WM_CLOSE)
+
+	def GUIAboutToInteract(self):
+		"Called as the GUI is about to perform any interaction with the user"
+		frame = win32ui.GetMainFrame()
+		# Remember the enabled state of our main frame
+		# may be disabled primarily if a modal dialog is displayed.
+		# Only get at enabled via GetWindowLong.
+		self.bFrameEnabled = frame.IsWindowEnabled()
+		self.oldForeground = None
+		fw = win32ui.GetForegroundWindow()
+		if fw is not frame:
+			self.oldForeground = fw
+#			fw.EnableWindow(0) Leave enabled for now?
+			self.oldFrameEnableState = frame.IsWindowEnabled()
+			frame.EnableWindow(1)
+		if self.inForcedGUI and not frame.IsWindowVisible():
+			frame.ShowWindow(win32con.SW_SHOW)
+			frame.UpdateWindow()
+		if self.curframe:
+			SetInteractiveContext(self.curframe.f_globals, self.curframe.f_locals)
+		else:
+			SetInteractiveContext(None, None)
+		self.GUIRespondDebuggerData()
+
+	def GUIAboutToFinishInteract(self):
+		"""Called as the GUI is about to finish any interaction with the user
+		   Returns non zero if we are allowed to stop interacting"""
+		if self.oldForeground is not None:
+			try:
+				win32ui.GetMainFrame().EnableWindow(self.oldFrameEnableState)
+				self.oldForeground.EnableWindow(1)
+			except win32ui.error:
+				# old window may be dead.
+				pass
+#			self.oldForeground.SetForegroundWindow() - fails??
+		if not self.inForcedGUI:
+			return 1 # Never a problem, and nothing else to do.
+		# If we are running a forced GUI, we may never get an opportunity
+		# to interact again.  Therefore we perform a "SaveAll", to makesure that
+		# any documents are saved before leaving.
+		for template in win32ui.GetApp().GetDocTemplateList():
+			for doc in template.GetDocumentList():
+				if not doc.SaveModified():
+					return 0
+		# All documents saved - now hide the app and debugger.
+		if self.get_option(OPT_HIDE):
+			frame = win32ui.GetMainFrame()
+			frame.ShowWindow(win32con.SW_HIDE)
+		return 1
+
+	#
+	# Pythonwin interface - all stuff to do with showing source files,
+	# changing line states etc.
+	#
+	def ShowLineState(self, fileName, lineNo, lineState):
+		# Set the state of a line, open if not already
+		self.ShowLineNo(fileName, lineNo)
+		self.SetLineState(fileName, lineNo, lineState)
+
+	def SetLineState(self, fileName, lineNo, lineState):
+		# Set the state of a line if the document is open.
+		doc = editor.editorTemplate.FindOpenDocument(fileName)
+		if doc is not None:
+			marker = _LineStateToMarker(lineState)
+			if not doc.MarkerCheck(lineNo, marker):
+				doc.MarkerAdd(lineNo, marker)
+
+	def ResetLineState(self, fileName, lineNo, lineState):
+		# Set the state of a line if the document is open.
+		doc = editor.editorTemplate.FindOpenDocument(fileName)
+		if doc is not None:
+			marker = _LineStateToMarker(lineState)
+			doc.MarkerDelete(lineNo, marker)
+
+	def UpdateDocumentLineStates(self, doc):
+		# Show all lines in their special status color.  If the doc is open
+		# all line states are reset.
+		doc.MarkerDeleteAll( MARKER_BREAKPOINT )
+		doc.MarkerDeleteAll( MARKER_CURRENT )
+		fname = self.canonic(doc.GetPathName())
+		# Now loop over all break-points
+		for line in self.breaks.get(fname, []):
+			doc.MarkerAdd(line, MARKER_BREAKPOINT)
+		# And the current line if in this document.
+		if self.shownLineCurrent and fname == self.shownLineCurrent[0]:
+			lineNo = self.shownLineCurrent[1]
+			if not doc.MarkerCheck(lineNo, MARKER_CURRENT):
+				doc.MarkerAdd(lineNo, MARKER_CURRENT)
+#		if self.shownLineCallstack and fname == self.shownLineCallstack[0]:
+#			doc.MarkerAdd(self.shownLineCallstack[1], MARKER_CURRENT)
+
+	def UpdateAllLineStates(self):
+		for doc in editor.editorTemplate.GetDocumentList():
+			self.UpdateDocumentLineStates(doc)
+
+	def ShowCurrentLine(self):
+		# Show the current line.  Only ever 1 current line - undoes last current
+		# The "Current Line" is self.curframe.
+		# The "Callstack Line" is the top of the stack.
+		# If current == callstack, only show as current.
+		self._UnshowCurrentLine() # un-highlight the old one.
+		if self.curframe:
+			fileName = self.canonic(self.curframe.f_code.co_filename)
+			lineNo = self.curframe.f_lineno
+			self.shownLineCurrent = fileName, lineNo
+			self.ShowLineState(fileName, lineNo, LINESTATE_CURRENT)
+
+	def _UnshowCurrentLine(self):
+		"Unshow the current line, and forget it"
+		if self.shownLineCurrent is not None:
+			fname, lineno = self.shownLineCurrent
+			self.ResetLineState(fname, lineno, LINESTATE_CURRENT)
+			self.shownLineCurrent = None
+
+	def ShowLineNo( self, filename, lineno ):
+		wasOpen = editor.editorTemplate.FindOpenDocument(filename) is not None
+		if os.path.isfile(filename) and scriptutils.JumpToDocument(filename, lineno):
+			if not wasOpen:
+				doc = editor.editorTemplate.FindOpenDocument(filename)
+				if doc is not None:
+					self.UpdateDocumentLineStates(doc)
+					return 1
+				return 0
+			return 1
+		else:
+			# Can't find the source file - linecache may have it?
+			import linecache
+			line = linecache.getline(filename, lineno)
+			print "%s(%d): %s" % (os.path.basename(filename), lineno, string.expandtabs(line[:-1],4))
+			return 0
+
+def _doexec(cmd, globals, locals):
+	exec cmd in globals, locals
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/fail.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/fail.py
new file mode 100644
index 0000000..ce6e367
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/debugger/fail.py
@@ -0,0 +1,48 @@
+# NOTE NOTE - This module is designed to fail!
+#
+# The ONLY purpose for this script is testing/demoing the
+# Pythonwin debugger package.
+
+# It does nothing useful, and it even doesnt do that!
+
+import pywin.debugger, sys, time
+import traceback
+
+def a():
+	a=1
+	try:
+		b()
+	except:
+		# Break into the debugger with the exception information.
+		pywin.debugger.post_mortem(sys.exc_info()[2])
+		a=1
+		a=2
+		a=3
+		a=4
+		pass
+
+def b():
+	b=1
+	pywin.debugger.set_trace()
+	# After importing or running this module, you are likely to be
+	# sitting at the next line.  This is because we explicitely
+	# broke into the debugger using the "set_trace() function
+	# "pywin.debugger.brk()" is a shorter alias for this.
+	c()
+	pass
+
+def c():
+	c=1
+	d()
+
+def d():
+	d=1
+	e(d)
+	raise ValueError, "Hi"
+
+def e(arg):
+	e=1
+	time.sleep(1)
+	return e
+
+a()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/default.cfg b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/default.cfg
new file mode 100644
index 0000000..9f4433b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/default.cfg
@@ -0,0 +1,214 @@
+# The default keyboard etc configuration file for Pythonwin.

+#

+# The format of this file is very similar to a Windows INI file.

+# Sections are identified with [Section] lines, but comments

+# use the standatd Python # character.  Depending on the section,

+# lines may not be in the standard "key=value" format.

+

+# NOTE:  You should not need to modify this file.

+# Simply create a new .CFG file, and add an entry:

+# [General]

+# BasedOn = Default

+#

+# and add your customisations.  Then select your new configuration 

+# from the Pythonwin View/Options/Editor dialog.

+# This way you get to add your own customisations,

+# but still take advantage of changes to the default

+# configuration in new releases.

+

+# See IDLE.cfg for an example extension configuration.

+#

+##########################################################################

+

+[IDLE Extensions]

+

+# The list of IDLE extensions to load.  The extensions

+# AutoIndent, AutoFormat and possibly others are

+# "built-in", so do not need specifying.

+

+FormatParagraph

+CallTips

+

+

+[Keys]

+

+# The list of _default_ key definitions.  

+#  See [Keys:Interactive] and [Keys:Editor] below for further defs.

+

+#Events of the format <<event-name>> 

+# are events defined in IDLE extensions.

+

+Alt+Q             = <<format-paragraph>>

+

+Ctrl+W            = ViewWhitespace

+Ctrl+Shift+8      = ViewWhitespace # The MSVC default key def.

+

+Ctrl+Shift+F      = ViewFixedFont

+

+# Auto-complete, call-tips, etc.

+Alt+/             = <<expand-word>>

+Ctrl+Space   = <<expand-word>>

+Shift+(           = <<paren-open>>

+Shift-)           = <<paren-close>>

+Up                = <<check-calltip-cancel>>

+Down              = <<check-calltip-cancel>>

+Left              = <<check-calltip-cancel>>

+Right             = <<check-calltip-cancel>>

+.                 = KeyDot

+

+# Debugger - These are the MSVC default keys, for want of a better choice.

+F9                = DbgBreakpointToggle

+F5                = DbgGo

+Shift+F5          = DbgClose

+F11               = DbgStep

+F10               = DbgStepOver

+Shift+F11         = DbgStepOut

+

+Ctrl+F3           = AutoFindNext

+

+

+[Keys:Editor]

+# Key bindings specific to the editor

+F2                = GotoNextBookmark

+Ctrl+F2           = ToggleBookmark

+Ctrl+G            = GotoLine

+

+Alt+I             = ShowInteractiveWindow

+Alt-B             = AddBanner # A sample Event defined in this file.

+

+# Block operations

+Alt+3             = <<comment-region>>

+Shift+Alt+3       = <<uncomment-region>>

+Alt+4             = <<uncomment-region>> # IDLE default.

+Alt+5             = <<tabify-region>>

+Alt+6             = <<untabify-region>>

+

+# Tabs and other indent features

+Back              = <<smart-backspace>>

+Ctrl+T            = <<toggle-tabs>>

+Alt+U             = <<change-indentwidth>>

+Enter             = EnterKey

+Tab               = TabKey

+Shift-Tab         = <<dedent-region>>

+

+# Folding

+Add               = FoldExpand

+Subtract          = FoldCollapse

+Alt+Add           = FoldExpandAll

+Subtract          = FoldCollapse

+Alt+Subtract      = FoldCollapseAll

+Multiply          = FoldTopLevel

+

+[Keys:Interactive]

+# Key bindings specific to the interactive window.

+# History for the interactive window

+Ctrl+Up           = <<history-previous>>

+Ctrl+Down         = <<history-next>>

+Enter             = ProcessEnter

+Ctrl+Enter        = ProcessEnter

+Shift+Enter       = ProcessEnter

+Esc               = ProcessEsc

+Alt+I             = WindowBack # Toggle back to previous window.

+Home              = InteractiveHome # A sample Event defined in this file.

+Shift+Home        = InteractiveHomeExtend # A sample Event defined in this file.

+

+# When docked, the Ctrl+Tab and Shift+Ctrl+Tab keys dont work as expected.

+Ctrl+Tab          = MDINext

+Ctrl+Shift+Tab    = MDIPrev

+

+[Extensions]

+# Python event handlers specific to this config file.

+# All functions not starting with an "_" are assumed

+# to be events, and take 2 params:

+# * editor_window is the same object passed to IDLE

+#   extensions.  editor_window.text is a text widget

+#   that conforms to the Tk text widget interface.

+# * event is the event being fired.  Will always be None

+#   in the current implementation.

+

+# Simply by defining these functions, they are available as

+# events.

+# Note that we bind keystrokes to these events in the various

+# [Keys] sections.

+

+# Add a simple file/class/function simple banner

+def AddBanner(editor_window, event):

+

+	text = editor_window.text

+	big_line = "#" * 70

+	banner = "%s\n## \n## \n## \n%s\n" % (big_line, big_line)

+

+	# Insert at the start of the current line.

+	pos = text.index("insert linestart")

+

+	text.undo_block_start() # Allow action to be undone as a single unit.

+	text.insert(pos, banner)

+	text.undo_block_stop()

+

+	# Now set the insert point to the middle of the banner.

+	import string

+	line, col = map(int, string.split(pos, "."))

+	text.mark_set("insert", "%d.1 lineend" % (line+2, ) )

+

+

+# Here is a sample event bound to the "Home" key in the

+# interactive window

+def InteractiveHome(editor_window, event):

+	return _DoInteractiveHome(editor_window.text, 0)

+

+def InteractiveHomeExtend(editor_window, event):

+	return _DoInteractiveHome(editor_window.text, 1)

+

+def _DoInteractiveHome(text, extend):

+	import sys

+	# If Scintilla has an autocomplete window open, then let Scintilla handle it.

+	if text.edit.SCIAutoCActive():

+		return 1

+	of_interest = "insert linestart + %d c" % len(sys.ps1)

+	if not text.compare("insert", "==", of_interest) and \

+	   text.get("insert linestart", of_interest) in [sys.ps1, sys.ps2]: # Not sys.ps? line

+		end = of_interest

+	else:

+		end = "insert linestart"

+

+	if extend: start = "insert"

+	else: start = end

+	text.tag_add("sel", start, end)

+

+# From Niki Spahie

+def AutoFindNext(editor_window, event):

+    "find selected text or word under cursor"

+

+    from pywin.scintilla import find

+    from pywin.scintilla import scintillacon

+

+    try:

+        sci = editor_window.edit

+        word = sci.GetSelText()

+        if word:

+            find.lastSearch.findText = word

+            find.lastSearch.sel = sci.GetSel()

+        else:

+            pos = sci.SendScintilla( scintillacon.SCI_GETCURRENTPOS )

+            start = sci.SendScintilla( scintillacon.SCI_WORDSTARTPOSITION, pos, 1 )

+            end = sci.SendScintilla( scintillacon.SCI_WORDENDPOSITION, pos, 1 )

+            word = sci.GetTextRange( start, end )

+            if word:

+                find.lastSearch.findText = word

+                find.lastSearch.sel = (start,end)

+    except Exception, why:

+        print repr(why), why

+    find.FindNext()

+

+

+# A couple of generic events.

+def Beep(editor_window, event):

+	editor_window.text.beep()

+

+def DoNothing(editor_window, event):

+	pass

+

+def ContinueEvent(editor_window, event):

+	# Almost an "unbind" - allows Pythonwin/MFC to handle the keystroke

+	return 1

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/ideoptions.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/ideoptions.py
new file mode 100644
index 0000000..a074dd1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/ideoptions.py
@@ -0,0 +1,116 @@
+# The property page to define generic IDE options for Pythonwin
+
+from pywin.mfc import dialog
+from pywin.framework import interact
+import win32ui
+import win32con
+
+buttonControlMap = {
+	win32ui.IDC_BUTTON1: win32ui.IDC_EDIT1,
+	win32ui.IDC_BUTTON2: win32ui.IDC_EDIT2,
+	win32ui.IDC_BUTTON3: win32ui.IDC_EDIT3,
+}
+
+class OptionsPropPage(dialog.PropertyPage):
+	def __init__(self):
+		dialog.PropertyPage.__init__(self, win32ui.IDD_PP_IDE)
+		self.AddDDX(win32ui.IDC_CHECK1, "bShowAtStartup")
+		self.AddDDX(win32ui.IDC_CHECK2, "bDocking")
+		self.AddDDX(win32ui.IDC_EDIT4, 'MRUSize', "i")
+
+	def OnInitDialog(self):
+
+		edit = self.GetDlgItem(win32ui.IDC_EDIT1)
+		format = eval(win32ui.GetProfileVal(interact.sectionProfile, interact.STYLE_INTERACTIVE_PROMPT, str(interact.formatInput)))
+		edit.SetDefaultCharFormat(format)
+		edit.SetWindowText("Input Text")
+		
+		edit = self.GetDlgItem(win32ui.IDC_EDIT2)
+		format = eval(win32ui.GetProfileVal(interact.sectionProfile, interact.STYLE_INTERACTIVE_OUTPUT, str(interact.formatOutput)))
+		edit.SetDefaultCharFormat(format)
+		edit.SetWindowText("Output Text")
+		
+		edit = self.GetDlgItem(win32ui.IDC_EDIT3)
+		format = eval(win32ui.GetProfileVal(interact.sectionProfile, interact.STYLE_INTERACTIVE_ERROR, str(interact.formatOutputError)))
+		edit.SetDefaultCharFormat(format)
+		edit.SetWindowText("Error Text")
+
+		self['bShowAtStartup'] = interact.LoadPreference("Show at startup", 1)
+		self['bDocking'] = interact.LoadPreference("Docking", 0)
+		self['MRUSize'] = win32ui.GetProfileVal("Settings","Recent File List Size", 10)
+
+		# Hook the button clicks.
+		self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON1)
+		self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON2)
+		self.HookCommand(self.HandleCharFormatChange, win32ui.IDC_BUTTON3)
+
+		# Ensure the spin control remains in range.
+		spinner = self.GetDlgItem(win32ui.IDC_SPIN1)
+		spinner.SetRange(1, 16)
+
+		return dialog.PropertyPage.OnInitDialog(self)
+
+	# Called to save away the new format tuple for the specified item.
+	def HandleCharFormatChange(self, id, code):
+		if code == win32con.BN_CLICKED:
+			editId = buttonControlMap.get(id)
+			assert editId is not None, "Format button has no associated edit control"
+			editControl = self.GetDlgItem(editId)
+			existingFormat = editControl.GetDefaultCharFormat()
+			flags = win32con.CF_SCREENFONTS
+			d=win32ui.CreateFontDialog(existingFormat, flags, None, self)
+			if d.DoModal()==win32con.IDOK:
+				cf = d.GetCharFormat()
+				editControl.SetDefaultCharFormat(cf)
+				self.SetModified(1)
+			return 0 # We handled this fully!
+
+	def OnOK(self):
+		# Handle the edit controls - get all the fonts, put them back into interact, then
+		# get interact to save its stuff!
+		controlAttrs = [
+			(win32ui.IDC_EDIT1, interact.STYLE_INTERACTIVE_PROMPT),
+			(win32ui.IDC_EDIT2, interact.STYLE_INTERACTIVE_OUTPUT),
+			(win32ui.IDC_EDIT3, interact.STYLE_INTERACTIVE_ERROR)]
+		for id, key in controlAttrs:
+			control = self.GetDlgItem(id)
+			fmt = control.GetDefaultCharFormat()
+			win32ui.WriteProfileVal(interact.sectionProfile, key, str(fmt))
+
+		# Save the other interactive window options.
+		interact.SavePreference("Show at startup", self['bShowAtStartup'])
+		interact.SavePreference("Docking", self['bDocking'])
+
+		# And the other options.
+		win32ui.WriteProfileVal("Settings","Recent File List Size", self['MRUSize'])
+		
+		return 1
+	def ChangeFormat(self, fmtAttribute, fmt):
+		dlg = win32ui.CreateFontDialog(fmt)
+		if dlg.DoModal() <> win32con.IDOK: return None
+		return dlg.GetCharFormat()
+
+	def OnFormatTitle(self, command, code):
+		fmt = self.GetFormat(interact.formatTitle)
+		if fmt:
+			formatTitle = fmt
+			SaveFontPreferences()
+
+	def OnFormatInput(self, command, code):
+		global formatInput
+		fmt = self.GetFormat(formatInput)
+		if fmt:
+			formatInput = fmt
+			SaveFontPreferences()
+	def OnFormatOutput(self, command, code):
+		global formatOutput
+		fmt = self.GetFormat(formatOutput)
+		if fmt:
+			formatOutput = fmt
+			SaveFontPreferences()
+	def OnFormatError(self, command, code):
+		global formatOutputError
+		fmt = self.GetFormat(formatOutputError)
+		if fmt:
+			formatOutputError = fmt
+			SaveFontPreferences()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/list.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/list.py
new file mode 100644
index 0000000..2cfa9d7d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/list.py
@@ -0,0 +1,122 @@
+from pywin.mfc import dialog
+import win32ui, win32con, commctrl, win32api
+
+class ListDialog (dialog.Dialog):
+	
+	def __init__ (self, title, list):
+		dialog.Dialog.__init__ (self, self._maketemplate(title))
+		self.HookMessage (self.on_size, win32con.WM_SIZE)
+		self.HookNotify(self.OnListItemChange, commctrl.LVN_ITEMCHANGED)
+		self.HookCommand(self.OnListClick, win32ui.IDC_LIST1)
+		self.items = list
+
+	def _maketemplate(self, title):
+		style = win32con.WS_DLGFRAME | win32con.WS_SYSMENU | win32con.WS_VISIBLE
+		ls = (
+			win32con.WS_CHILD           |
+			win32con.WS_VISIBLE         |
+			commctrl.LVS_ALIGNLEFT      |
+			commctrl.LVS_REPORT
+		    )
+		bs = (
+			win32con.WS_CHILD           |
+			win32con.WS_VISIBLE
+		     )
+		return [ [title, (0, 0, 200, 200), style, None, (8, "MS Sans Serif")],
+			["SysListView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), ls], 
+			[128,	"OK", win32con.IDOK, (10, 0, 50, 14), bs | win32con.BS_DEFPUSHBUTTON],
+			[128,	"Cancel",win32con.IDCANCEL,(0, 0, 50, 14), bs],
+		    ]
+
+	def FillList(self):
+		size = self.GetWindowRect()
+		width = size[2] - size[0] - (10)
+		itemDetails = (commctrl.LVCFMT_LEFT, width, "Item", 0)
+		self.itemsControl.InsertColumn(0, itemDetails)
+		index = 0
+		for item in self.items:
+			index = self.itemsControl.InsertItem(index+1, str(item), 0)
+	
+	def OnListClick(self, id, code):
+		if code==commctrl.NM_DBLCLK:
+			self.EndDialog(win32con.IDOK)
+		return 1
+	
+	def OnListItemChange(self,std, extra):
+		(hwndFrom, idFrom, code), (itemNotify, sub, newState, oldState, change, point, lparam) = std, extra
+		oldSel = (oldState & commctrl.LVIS_SELECTED)<>0
+		newSel = (newState & commctrl.LVIS_SELECTED)<>0
+		if oldSel <> newSel:
+			try:
+				self.selecteditem = itemNotify
+				self.butOK.EnableWindow(1)
+			except win32ui.error:
+				self.selecteditem = None
+	
+	
+	def OnInitDialog (self):
+		rc = dialog.Dialog.OnInitDialog (self)
+		self.itemsControl = self.GetDlgItem(win32ui.IDC_LIST1)
+		self.butOK = self.GetDlgItem(win32con.IDOK)
+		self.butCancel = self.GetDlgItem(win32con.IDCANCEL)
+		
+		self.FillList()		
+		
+		size = self.GetWindowRect()
+		self.LayoutControls(size[2]-size[0], size[3]-size[1])
+		self.butOK.EnableWindow(0) # wait for first selection
+		return rc
+		
+	def LayoutControls(self, w, h):
+		self.itemsControl.MoveWindow((0,0,w,h-30))
+		self.butCancel.MoveWindow((10, h-24, 60, h-4))
+		self.butOK.MoveWindow((w-60, h-24, w-10, h-4))
+	
+	def on_size (self, params):
+		lparam = params[3]
+		w = win32api.LOWORD(lparam)
+		h = win32api.HIWORD(lparam)
+		self.LayoutControls(w, h)
+
+class ListsDialog(ListDialog):
+	def __init__(self, title, list, colHeadings = ['Item']):
+		ListDialog.__init__(self, title, list)
+		self.colHeadings = colHeadings
+
+	def FillList(self):
+		index = 0
+		size = self.GetWindowRect()
+		width = size[2] - size[0] - (10) - win32api.GetSystemMetrics(win32con.SM_CXVSCROLL)
+		numCols = len(self.colHeadings)
+
+		for col in self.colHeadings:
+			itemDetails = (commctrl.LVCFMT_LEFT, width/numCols, col, 0)
+			self.itemsControl.InsertColumn(index, itemDetails)
+			index = index + 1
+		index = 0
+		for items in self.items:
+			index = self.itemsControl.InsertItem(index+1, str(items[0]), 0)
+			for itemno in range(1,numCols):
+				item = items[itemno]
+				self.itemsControl.SetItemText(index, itemno, str(item))
+	
+def SelectFromList (title, lst):
+	dlg = ListDialog(title, lst)
+	if dlg.DoModal()==win32con.IDOK:
+		return dlg.selecteditem
+	else:
+		return None
+    	
+def SelectFromLists (title, lists, headings):
+	dlg = ListsDialog(title, lists, headings)
+	if dlg.DoModal()==win32con.IDOK:
+		return dlg.selecteditem
+	else:
+		return None
+
+def test():
+#	print SelectFromList('Single list',  [1,2,3])
+	print SelectFromLists('Multi-List', [ ('1',1, 'a'), ('2',2, 'b'), ('3',3, 'c' )], ['Col 1', 'Col 2'])
+
+if __name__=='__main__':	
+	test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/login.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/login.py
new file mode 100644
index 0000000..efaa15a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/login.py
@@ -0,0 +1,121 @@
+'''login -- PythonWin user ID and password dialog box
+
+(Adapted from originally distributed with Mark Hammond's PythonWin - 
+this now replaces it!)
+
+login.GetLogin() displays a modal "OK/Cancel" dialog box with input
+fields for a user ID and password. The password field input is masked
+with *'s. GetLogin takes two optional parameters, a window title, and a
+default user ID. If these parameters are omitted, the title defaults to
+"Login", and the user ID is left blank. GetLogin returns a (userid, password)
+tuple. GetLogin can be called from scripts running on the console - i.e. you
+don't need to write a full-blown GUI app to use it.
+
+login.GetPassword() is similar, except there is no username field.
+
+Example:
+import pywin.dialogs.login
+title = "FTP Login"
+def_user = "fred"
+userid, password = pywin.dialogs.login.GetLogin(title, def_user)
+
+Jim Eggleston, 28 August 1996
+Merged with dlgpass and moved to pywin.dialogs by Mark Hammond Jan 1998.
+'''
+
+import win32ui
+import win32api
+import win32con
+from pywin.mfc import dialog
+
+def MakeLoginDlgTemplate(title):
+	style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
+	cs = win32con.WS_CHILD | win32con.WS_VISIBLE
+
+	# Window frame and title
+	dlg = [ [title, (0, 0, 184, 40), style, None, (8, "MS Sans Serif")], ]
+
+	# ID label and text box
+	dlg.append([130, "User ID:", -1, (7, 9, 69, 9), cs | win32con.SS_LEFT])
+	s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER 
+	dlg.append(['EDIT', None, win32ui.IDC_EDIT1, (50, 7, 60, 12), s])
+
+	# Password label and text box
+	dlg.append([130, "Password:", -1, (7, 22, 69, 9), cs | win32con.SS_LEFT])
+	s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER 
+	dlg.append(['EDIT', None, win32ui.IDC_EDIT2, (50, 20, 60, 12), s | win32con.ES_PASSWORD])
+
+	# OK/Cancel Buttons
+	s = cs | win32con.WS_TABSTOP 
+	dlg.append([128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
+	s = win32con.BS_PUSHBUTTON | s
+	dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 20, 50, 14), s])
+	return dlg
+
+def MakePasswordDlgTemplate(title):
+	style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
+	cs = win32con.WS_CHILD | win32con.WS_VISIBLE
+	# Window frame and title
+	dlg = [ [title, (0, 0, 177, 45), style, None, (8, "MS Sans Serif")], ]
+	
+	# Password label and text box
+	dlg.append([130, "Password:", -1, (7, 7, 69, 9), cs | win32con.SS_LEFT])
+	s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER 
+	dlg.append(['EDIT', None, win32ui.IDC_EDIT1, (50, 7, 60, 12), s | win32con.ES_PASSWORD])
+	
+	# OK/Cancel Buttons
+	s = cs | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON
+	dlg.append([128, "OK", win32con.IDOK, (124, 5, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
+	dlg.append([128, "Cancel", win32con.IDCANCEL, (124, 22, 50, 14), s])
+	return dlg
+
+class LoginDlg(dialog.Dialog):
+	Cancel = 0
+	def __init__(self, title):
+		dialog.Dialog.__init__(self, MakeLoginDlgTemplate(title) )
+		self.AddDDX(win32ui.IDC_EDIT1,'userid')
+		self.AddDDX(win32ui.IDC_EDIT2,'password')
+
+def GetLogin(title='Login', userid='', password=''):
+	d = LoginDlg(title)
+	d['userid'] = userid
+	d['password'] = password
+	if d.DoModal() != win32con.IDOK:
+		return (None, None)
+	else:	
+		return (d['userid'], d['password'])
+
+class PasswordDlg(dialog.Dialog):
+	def __init__(self, title):
+		dialog.Dialog.__init__(self, MakePasswordDlgTemplate(title) )
+		self.AddDDX(win32ui.IDC_EDIT1,'password')
+
+def GetPassword(title='Password', password=''):
+	d = PasswordDlg(title)
+	d['password'] = password
+	if d.DoModal()!=win32con.IDOK:
+		return None
+	return d['password']
+
+if __name__ == "__main__":
+	import sys
+	title = 'Login'
+	def_user = ''
+	if len(sys.argv) > 1:
+		title = sys.argv[1]
+	if len(sys.argv) > 2:
+		def_userid = sys.argv[2]
+	userid, password = GetLogin(title, def_user)
+	if userid == password == None:
+		print "User pressed Cancel"
+	else:
+		print "User ID: ", userid
+		print "Password:", password
+		newpassword = GetPassword("Reenter just for fun", password)
+		if newpassword is None:
+			print "User cancelled"
+		else:
+			what = ""
+			if newpassword != password:
+				what = "not "
+			print "The passwords did %smatch" % (what)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/status.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/status.py
new file mode 100644
index 0000000..bd188f5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/dialogs/status.py
@@ -0,0 +1,226 @@
+# No cancel button.
+
+from pywin.mfc import dialog, thread
+import threading
+import win32ui
+import win32con
+import win32api
+import time
+
+def MakeProgressDlgTemplate(caption, staticText = ""):
+    style = (win32con.DS_MODALFRAME |
+	     win32con.WS_POPUP |
+	     win32con.WS_VISIBLE |
+	     win32con.WS_CAPTION |
+	     win32con.WS_SYSMENU |
+	     win32con.DS_SETFONT)
+    cs = (win32con.WS_CHILD |
+	  win32con.WS_VISIBLE)
+
+    w = 215
+    h = 36 # With button
+    h = 40
+
+    dlg = [[caption,
+	    (0, 0, w, h),
+	    style,
+	    None,
+	    (8, "MS Sans Serif")],
+	   ]
+
+    s = win32con.WS_TABSTOP | cs
+    
+    dlg.append([130, staticText, 1000, (7, 7, w-7, h-32), cs | win32con.SS_LEFT])
+
+#    dlg.append([128,
+#		"Cancel",
+#		win32con.IDCANCEL,
+#		(w - 60, h - 18, 50, 14), s | win32con.BS_PUSHBUTTON])
+
+    return dlg
+
+class CStatusProgressDialog(dialog.Dialog):
+	def __init__(self, title, msg = "", maxticks = 100, tickincr = 1):
+		self.initMsg = msg
+		templ = MakeProgressDlgTemplate(title, msg)
+		dialog.Dialog.__init__(self, templ)
+		self.maxticks = maxticks
+		self.tickincr = tickincr
+		self.pbar = None
+		
+	def OnInitDialog(self):
+		rc = dialog.Dialog.OnInitDialog(self)
+		self.static = self.GetDlgItem(1000)
+		self.pbar = win32ui.CreateProgressCtrl()
+		self.pbar.CreateWindow (win32con.WS_CHILD |
+						win32con.WS_VISIBLE,
+						(10, 30, 310, 44),
+						self, 1001)
+		self.pbar.SetRange(0, self.maxticks)
+		self.pbar.SetStep(self.tickincr)
+		self.progress = 0
+		self.pincr = 5
+		return rc
+	
+	def Close(self):
+		self.EndDialog(0)
+
+	def SetMaxTicks(self, maxticks):
+		if self.pbar is not None:
+			self.pbar.SetRange(0, maxticks)
+
+	def Tick(self):
+		if self.pbar is not None:
+			self.pbar.StepIt()
+
+	def SetTitle(self, text):
+		self.SetWindowText(text)
+
+	def SetText(self, text):
+		self.SetDlgItemText(1000, text)
+
+	def Set(self, pos, max = None):
+		if self.pbar is not None:
+			self.pbar.SetPos(pos)
+			if max is not None:
+				self.pbar.SetRange(0, max)
+
+# a progress dialog created in a new thread - especially suitable for
+# console apps with no message loop.
+MYWM_SETTITLE = win32con.WM_USER+10
+MYWM_SETMSG = win32con.WM_USER+11
+MYWM_TICK = win32con.WM_USER+12
+MYWM_SETMAXTICKS = win32con.WM_USER+13
+MYWM_SET = win32con.WM_USER+14
+
+class CThreadedStatusProcessDialog(CStatusProgressDialog):
+	def __init__(self, title, msg = "", maxticks = 100, tickincr = 1):
+		self.title = title
+		self.msg = msg
+		self.threadid = win32api.GetCurrentThreadId()
+		CStatusProgressDialog.__init__(self, title, msg, maxticks, tickincr)
+
+	def OnInitDialog(self):
+		rc = CStatusProgressDialog.OnInitDialog(self)
+		self.HookMessage(self.OnTitle, MYWM_SETTITLE)
+		self.HookMessage(self.OnMsg, MYWM_SETMSG)
+		self.HookMessage(self.OnTick, MYWM_TICK)
+		self.HookMessage(self.OnMaxTicks, MYWM_SETMAXTICKS)
+		self.HookMessage(self.OnSet, MYWM_SET)
+		return rc
+
+	def _Send(self, msg):
+		try:
+			self.PostMessage(msg)
+		except win32ui.error:
+			# the user closed the window - but this does not cancel the
+			# process - so just ignore it.
+			pass
+
+	def OnTitle(self, msg):
+		CStatusProgressDialog.SetTitle(self, self.title)
+
+	def OnMsg(self, msg):
+		CStatusProgressDialog.SetText(self, self.msg)
+
+	def OnTick(self, msg):
+		CStatusProgressDialog.Tick(self)
+
+	def OnMaxTicks(self, msg):
+		CStatusProgressDialog.SetMaxTicks(self, self.maxticks)
+
+	def OnSet(self, msg):
+		CStatusProgressDialog.Set(self, self.pos, self.max)
+
+	def Close(self):
+		assert self.threadid, "No thread!"
+		win32api.PostThreadMessage(self.threadid, win32con.WM_QUIT, 0, 0)
+
+	def SetMaxTicks(self, maxticks):
+		self.maxticks = maxticks
+		self._Send(MYWM_SETMAXTICKS)
+	def SetTitle(self, title):
+		self.title = title
+		self._Send(MYWM_SETTITLE)
+	def SetText(self, text):
+		self.msg = text
+		self._Send(MYWM_SETMSG)
+	def Tick(self):
+		self._Send(MYWM_TICK)
+	def Set(self, pos, max = None):
+		self.pos = pos
+		self.max = max
+		self._Send(MYWM_SET)
+
+class ProgressThread(thread.WinThread):
+	def __init__(self,  title, msg = "", maxticks = 100, tickincr = 1):
+		self.title = title
+		self.msg = msg
+		self.maxticks = maxticks
+		self.tickincr = tickincr
+		self.dialog = None
+		thread.WinThread.__init__(self)
+		self.createdEvent = threading.Event()
+
+	def InitInstance(self):
+		self.dialog = CThreadedStatusProcessDialog( self.title, self.msg, self.maxticks, self.tickincr)
+		self.dialog.CreateWindow()
+		try:
+			self.dialog.SetForegroundWindow()
+		except win32ui.error:
+			pass
+		self.createdEvent.set()
+		return thread.WinThread.InitInstance(self)
+
+	def ExitInstance(self):
+		return 0
+
+
+def StatusProgressDialog(title, msg = "", maxticks = 100, parent = None):
+	d = CStatusProgressDialog (title, msg, maxticks)
+	d.CreateWindow (parent)
+	return d
+
+def ThreadedStatusProgressDialog(title, msg = "", maxticks = 100):
+	t = ProgressThread(title, msg, maxticks)
+	t.CreateThread()
+	# Need to run a basic "PumpWaitingMessages" loop just incase we are
+	# running inside Pythonwin.
+	# Basic timeout incase things go terribly wrong.  Ideally we should use
+	# win32event.MsgWaitForMultipleObjects(), but we use a threading module
+	# event - so use a dumb strategy
+	end_time = time.time() + 10
+	while time.time() < end_time:
+		if t.createdEvent.isSet():
+			break
+		win32ui.PumpWaitingMessages()
+		time.sleep(0.1)
+	return t.dialog
+
+def demo():
+	d = StatusProgressDialog("A Demo", "Doing something...")
+	import win32api
+	for i in range(100):
+		if i == 50:
+			d.SetText("Getting there...")
+		if i==90:
+			d.SetText("Nearly done...")
+		win32api.Sleep(20)
+		d.Tick()
+	d.Close()
+
+def thread_demo():
+	d = ThreadedStatusProgressDialog("A threaded demo", "Doing something")
+	import win32api
+	for i in range(100):
+		if i == 50:
+			d.SetText("Getting there...")
+		if i==90:
+			d.SetText("Nearly done...")
+		win32api.Sleep(20)
+		d.Tick()
+	d.Close()
+
+if __name__=='__main__':
+	thread_demo()
+	#demo()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/docking/DockingBar.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/docking/DockingBar.py
new file mode 100644
index 0000000..0238b4c4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/docking/DockingBar.py
@@ -0,0 +1,527 @@
+# DockingBar.py
+
+# Ported directly (comments and all) from the samples at www.codeguru.com
+
+# WARNING: Use at your own risk, as this interface is highly likely to change.
+# Currently we support only one child per DockingBar.  Later we need to add
+# support for multiple children.
+
+import win32api, win32con, win32ui
+from pywin.mfc import afxres, window
+import struct
+
+clrBtnHilight = win32api.GetSysColor(win32con.COLOR_BTNHILIGHT)
+clrBtnShadow = win32api.GetSysColor(win32con.COLOR_BTNSHADOW)
+
+def CenterPoint(rect):
+	width = rect[2]-rect[0]
+	height = rect[3]-rect[1]
+	return rect[0] + width/2, rect[1] + height/2
+
+def OffsetRect(rect, (x, y) ):
+	return rect[0]+x, rect[1]+y, rect[2]+x, rect[3]+y
+
+def DeflateRect(rect, (x,y) ):
+	return rect[0]+x, rect[1]+y, rect[2]-x, rect[3]-y
+
+def PtInRect(rect, pt):
+	return rect[0] <= pt[0] < rect[2] and rect[1] <= pt[1] < rect[3]
+
+class DockingBar(window.Wnd):
+	def __init__(self, obj=None):
+		if obj is None:
+			obj = win32ui.CreateControlBar()
+		window.Wnd.__init__(self, obj)
+		self.dialog = None
+		self.nDockBarID = 0
+		self.sizeMin = 32, 32
+		self.sizeHorz = 200, 200
+		self.sizeVert = 200, 200
+		self.sizeFloat = 200, 200
+		self.bTracking = 0
+		self.bInRecalcNC = 0
+		self.cxEdge = 6
+		self.cxBorder = 3
+		self.cxGripper = 20
+		self.brushBkgd = win32ui.CreateBrush()
+		self.brushBkgd.CreateSolidBrush(win32api.GetSysColor(win32con.COLOR_BTNFACE))
+
+		# Support for diagonal resizing
+		self.cyBorder = 3
+		self.cCaptionSize = win32api.GetSystemMetrics(win32con.SM_CYSMCAPTION)
+		self.cMinWidth = win32api.GetSystemMetrics(win32con.SM_CXMIN)
+		self.cMinHeight = win32api.GetSystemMetrics(win32con.SM_CYMIN)
+
+	def OnUpdateCmdUI(self, target, bDisableIfNoHndler):
+		return self.UpdateDialogControls(target, bDisableIfNoHndler)
+
+	def CreateWindow(self, parent, childCreator, title, id, style=win32con.WS_CHILD | win32con.WS_VISIBLE | afxres.CBRS_LEFT, childCreatorArgs=()):
+		assert not ((style & afxres.CBRS_SIZE_FIXED) and (style & afxres.CBRS_SIZE_DYNAMIC)), "Invalid style"
+		self.rectClose = self.rectBorder = self.rectGripper = self.rectTracker = 0,0,0,0
+
+		# save the style
+		self._obj_.dwStyle = style & afxres.CBRS_ALL
+
+		cursor = win32api.LoadCursor(0, win32con.IDC_ARROW)
+		wndClass = win32ui.RegisterWndClass(win32con.CS_DBLCLKS, cursor, self.brushBkgd.GetSafeHandle(), 0)
+
+		self._obj_.CreateWindow(wndClass, title, style, (0,0,0,0), parent, id)
+
+		# Create the child dialog
+		self.dialog = apply(childCreator, (self,) + childCreatorArgs)
+
+		# use the dialog dimensions as default base dimensions
+		assert self.dialog.IsWindow(), "The childCreator function %s did not create a window!" % childCreator
+		rect = self.dialog.GetWindowRect()
+		self.sizeHorz = self.sizeVert = self.sizeFloat = rect[2]-rect[0], rect[3]-rect[1]
+
+		self.sizeHorz = self.sizeHorz[0], self.sizeHorz[1] + self.cxEdge + self.cxBorder
+		self.sizeVert = self.sizeVert[0] + self.cxEdge + self.cxBorder, self.sizeVert[1]
+		self.HookMessages()
+
+	def CalcFixedLayout(self, bStretch, bHorz):
+		rectTop = self.dockSite.GetControlBar(afxres.AFX_IDW_DOCKBAR_TOP).GetWindowRect()
+		rectLeft = self.dockSite.GetControlBar(afxres.AFX_IDW_DOCKBAR_LEFT).GetWindowRect()
+		if bStretch:
+			nHorzDockBarWidth = 32767
+			nVertDockBarHeight = 32767
+		else:
+			nHorzDockBarWidth = rectTop[2]-rectTop[0] + 4
+			nVertDockBarHeight = rectLeft[3]-rectLeft[1] + 4
+
+		if self.IsFloating():
+			return self.sizeFloat
+		if bHorz:
+			return nHorzDockBarWidth, self.sizeHorz[1]
+		return self.sizeVert[0], nVertDockBarHeight
+
+	def CalcDynamicLayout(self, length, mode):
+		# Support for diagonal sizing.
+		if self.IsFloating():
+			self.GetParent().GetParent().ModifyStyle(win32ui.MFS_4THICKFRAME, 0)
+		if mode & (win32ui.LM_HORZDOCK | win32ui.LM_VERTDOCK):
+			flags = win32con.SWP_NOSIZE | win32con.SWP_NOMOVE | win32con.SWP_NOZORDER |\
+				win32con.SWP_NOACTIVATE | win32con.SWP_FRAMECHANGED
+			self.SetWindowPos(0, (0, 0, 0, 0,), flags)
+			self.dockSite.RecalcLayout()
+			return self._obj_.CalcDynamicLayout(length, mode)
+
+		if mode & win32ui.LM_MRUWIDTH:
+			return self.sizeFloat
+		if mode & win32ui.LM_COMMIT:
+			self.sizeFloat = length, self.sizeFloat[1]
+			return self.sizeFloat
+		# More diagonal sizing.
+		if self.IsFloating():
+			dc = self.dockContext
+			pt = win32api.GetCursorPos()
+			windowRect = self.GetParent().GetParent().GetWindowRect()
+
+			hittest = dc.nHitTest
+			if hittest==win32con.HTTOPLEFT:
+				cx = max(windowRect[2] - pt[0], self.cMinWidth) - self.cxBorder
+				cy = max(windowRect[3] - self.cCaptionSize - pt[1],self.cMinHeight) - 1
+				self.sizeFloat = cx, cy
+
+				top = min(pt[1], windowRect[3] - self.cCaptionSize - self.cMinHeight) - self.cyBorder
+				left = min(pt[0], windowRect[2] - self.cMinWidth) - 1
+				dc.rectFrameDragHorz = left, top, dc.rectFrameDragHorz[2], dc.rectFrameDragHorz[3]
+				return self.sizeFloat
+			if hittest==win32con.HTTOPRIGHT:
+				cx = max(pt[0] - windowRect[0], self.cMinWidth)
+				cy = max(windowRect[3] - self.cCaptionSize - pt[1], self.cMinHeight) - 1
+				self.sizeFloat = cx, cy
+
+				top = min(pt[1], windowRect[3] - self.cCaptionSize - self.cMinHeight) - self.cyBorder
+				dc.rectFrameDragHorz = dc.rectFrameDragHorz[0], top, dc.rectFrameDragHorz[2], dc.rectFrameDragHorz[3]
+				return self.sizeFloat
+
+			if hittest==win32con.HTBOTTOMLEFT:
+				cx = max(windowRect[2] - pt[0], self.cMinWidth) - self.cxBorder
+				cy = max(pt[1] - windowRect[1] - self.cCaptionSize, self.cMinHeight)
+				self.sizeFloat = cx, cy
+
+				left = min(pt[0], windowRect[2] -self.cMinWidth) - 1
+				dc.rectFrameDragHorz = left, dc.rectFrameDragHorz[1], dc.rectFrameDragHorz[2], dc.rectFrameDragHorz[3]
+				return self.sizeFloat
+
+			if hittest==win32con.HTBOTTOMRIGHT:
+				cx = max(pt[0] - windowRect[0], self.cMinWidth)
+				cy = max(pt[1] - windowRect[1] - self.cCaptionSize, self.cMinHeight)
+				self.sizeFloat = cx, cy
+				return self.sizeFloat
+
+		if mode & win32ui.LM_LENGTHY:
+			self.sizeFloat = self.sizeFloat[0], max(self.sizeMin[1], length)
+			return self.sizeFloat
+		else:
+			return max(self.sizeMin[0], length), self.sizeFloat[1]
+
+	def OnWindowPosChanged(self, msg):
+		if self.GetSafeHwnd()==0 or self.dialog is None:
+			return 0
+		lparam = msg[3]
+		format = "iiiiiii"
+		bytes = win32ui.GetBytes( lparam, struct.calcsize(format) )
+		hwnd, hwndAfter, x, y, cx, cy, flags = struct.unpack(format, bytes)
+
+		if self.bInRecalcNC:
+			rc = self.GetClientRect()
+			self.dialog.MoveWindow(rc)
+			return 0
+		# Find on which side are we docked
+		nDockBarID = self.GetParent().GetDlgCtrlID()
+		# Return if dropped at same location
+		# no docking side change and no size change
+		if (nDockBarID == self.nDockBarID) and \
+			(flags & win32con.SWP_NOSIZE) and \
+			((self._obj_.dwStyle & afxres.CBRS_BORDER_ANY) != afxres.CBRS_BORDER_ANY):
+			return
+		self.nDockBarID = nDockBarID
+
+		# Force recalc the non-client area
+		self.bInRecalcNC = 1
+		try:
+			swpflags = win32con.SWP_NOSIZE | win32con.SWP_NOMOVE | win32con.SWP_NOZORDER | win32con.SWP_FRAMECHANGED
+			self.SetWindowPos(0, (0,0,0,0), swpflags)
+		finally:
+			self.bInRecalcNC = 0
+		return 0
+
+	# This is a virtual and not a message hook.
+	def OnSetCursor(self, window, nHitTest, wMouseMsg):
+		if nHitTest != win32con.HTSIZE or self.bTracking:
+			return self._obj_.OnSetCursor(window, nHitTest, wMouseMsg)
+
+		if self.IsHorz():
+			win32api.SetCursor(win32api.LoadCursor(0, win32con.IDC_SIZENS))
+		else:
+			win32api.SetCursor(win32api.LoadCursor(0, win32con.IDC_SIZEWE))
+		return 1
+
+	# Mouse Handling
+	def OnLButtonUp(self, msg):
+		if not self.bTracking:
+			return 1 # pass it on.
+		self.StopTracking(1)
+		return 0 # Dont pass on
+
+	def OnLButtonDown(self, msg):
+		# UINT nFlags, CPoint point) 
+		# only start dragging if clicked in "void" space
+		if self.dockBar is not None:
+			# start the drag
+			pt = msg[5]
+			pt = self.ClientToScreen(pt)
+			self.dockContext.StartDrag(pt)
+			return 0
+		return 1
+
+	def OnNcLButtonDown(self, msg):
+		if self.bTracking: return 0
+		nHitTest = wparam = msg[2]
+		pt = msg[5]
+
+		if nHitTest==win32con.HTSYSMENU and not self.IsFloating():
+			self.GetDockingFrame().ShowControlBar(self, 0, 0)
+		elif nHitTest == win32con.HTMINBUTTON and not self.IsFloating():
+			self.dockContext.ToggleDocking()
+		elif nHitTest == win32con.HTCAPTION and not self.IsFloating() and self.dockBar is not None:
+			self.dockContext.StartDrag(pt)
+		elif nHitTest == win32con.HTSIZE and not self.IsFloating():
+			self.StartTracking()
+		else:
+			return 1
+		return 0
+
+	def OnLButtonDblClk(self, msg):
+		# only toggle docking if clicked in "void" space
+		if self.dockBar is not None:
+			# toggle docking
+			self.dockContext.ToggleDocking()
+			return 0
+		return 1
+
+	def OnNcLButtonDblClk(self, msg):
+		nHitTest = wparam = msg[2]
+		# UINT nHitTest, CPoint point) 
+		if self.dockBar is not None and nHitTest == win32con.HTCAPTION:
+			# toggle docking
+			self.dockContext.ToggleDocking()
+			return 0
+		return 1
+
+	def OnMouseMove(self, msg):
+		flags = wparam = msg[2]
+		lparam = msg[3]
+		if self.IsFloating() or not self.bTracking:
+			return 1
+
+		# Convert unsigned 16 bit to signed 32 bit.
+		x=win32api.LOWORD(lparam)
+		if x & 32768: x = x | -65536
+		y = win32api.HIWORD(lparam)
+		if y & 32768: y = y | -65536
+		pt = x, y
+		cpt = CenterPoint(self.rectTracker)
+		pt = self.ClientToWnd(pt)
+		if self.IsHorz():
+			if cpt[1] != pt[1]:
+				self.OnInvertTracker(self.rectTracker)
+				self.rectTracker = OffsetRect(self.rectTracker, (0, pt[1] - cpt[1]))
+				self.OnInvertTracker(self.rectTracker)
+		else:
+			if cpt[0] != pt[0]:
+				self.OnInvertTracker(self.rectTracker)
+				self.rectTracker = OffsetRect(self.rectTracker, (pt[0]-cpt[0], 0))
+				self.OnInvertTracker(self.rectTracker)
+
+		return 0 # Dont pass it on.
+
+#	def OnBarStyleChange(self, old, new):
+
+	def OnNcCalcSize(self, bCalcValid, (rc0, rc1, rc2, pos)):
+		self.rectBorder = self.GetWindowRect()
+		self.rectBorder = OffsetRect( self.rectBorder, (-self.rectBorder[0], -self.rectBorder[1]) )
+
+		dwBorderStyle = self._obj_.dwStyle | afxres.CBRS_BORDER_ANY
+
+		if self.nDockBarID==afxres.AFX_IDW_DOCKBAR_TOP:
+			dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_BOTTOM;
+			rc0.left = rc0.left + self.cxGripper
+			rc0.bottom = rc0.bottom-self.cxEdge
+			rc0.top = rc0.top + self.cxBorder
+			rc0.right = rc0.right - self.cxBorder
+			self.rectBorder = self.rectBorder[0], self.rectBorder[3]-self.cxEdge, self.rectBorder[2], self.rectBorder[3]
+		elif self.nDockBarID==afxres.AFX_IDW_DOCKBAR_BOTTOM:
+			dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_TOP
+			rc0.left = rc0.left + self.cxGripper
+			rc0.top = rc0.top + self.cxEdge
+			rc0.bottom = rc0.bottom - self.cxBorder
+			rc0.right = rc0.right - self.cxBorder
+			self.rectBorder = self.rectBorder[0], self.rectBorder[1], self.rectBorder[2], self.rectBorder[1]+self.cxEdge
+		elif self.nDockBarID==afxres.AFX_IDW_DOCKBAR_LEFT:
+			dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_RIGHT
+			rc0.right = rc0.right - self.cxEdge
+			rc0.left = rc0.left + self.cxBorder
+			rc0.bottom = rc0.bottom - self.cxBorder
+			rc0.top = rc0.top + self.cxGripper
+			self.rectBorder = self.rectBorder[2] - self.cxEdge, self.rectBorder[1], self.rectBorder[2], self.rectBorder[3]
+		elif self.nDockBarID==afxres.AFX_IDW_DOCKBAR_RIGHT:
+			dwBorderStyle = dwBorderStyle & ~afxres.CBRS_BORDER_LEFT
+			rc0.left = rc0.left + self.cxEdge
+			rc0.right = rc0.right - self.cxBorder
+			rc0.bottom = rc0.bottom - self.cxBorder
+			rc0.top = rc0.top + self.cxGripper
+			self.rectBorder = self.rectBorder[0], self.rectBorder[1], self.rectBorder[0]+self.cxEdge, self.rectBorder[3]
+		else:
+			self.rectBorder = 0,0,0,0
+		
+		self.SetBarStyle(dwBorderStyle)
+		return 0
+
+	def OnNcPaint(self, msg):
+		self.EraseNonClient()
+		dc = self.GetWindowDC()
+		ctl = win32api.GetSysColor(win32con.COLOR_BTNHIGHLIGHT)
+		cbr = win32api.GetSysColor(win32con.COLOR_BTNSHADOW)
+		dc.Draw3dRect(self.rectBorder, ctl, cbr)
+
+		self.DrawGripper(dc)
+
+		rect = self.GetClientRect()
+		self.InvalidateRect( rect, 1)
+		return 0
+
+	def OnNcHitTest(self, pt): # A virtual, not a hooked message.
+		if self.IsFloating():
+			return 1
+
+		ptOrig = pt
+		rect = self.GetWindowRect()
+		pt = pt[0] - rect[0], pt[1] - rect[1]
+		
+		if PtInRect(self.rectClose, pt):
+			return win32con.HTSYSMENU
+		elif PtInRect(self.rectUndock, pt):
+			return win32con.HTMINBUTTON
+		elif PtInRect(self.rectGripper, pt):
+			return win32con.HTCAPTION
+		elif PtInRect(self.rectBorder, pt):
+			return win32con.HTSIZE
+		else:
+			return self._obj_.OnNcHitTest(ptOrig)
+
+	def StartTracking(self):
+		self.SetCapture()
+
+		# make sure no updates are pending
+		self.RedrawWindow(None, None, win32con.RDW_ALLCHILDREN | win32con.RDW_UPDATENOW)
+		self.dockSite.LockWindowUpdate()
+
+		self.ptOld = CenterPoint(self.rectBorder)
+		self.bTracking = 1
+
+		self.rectTracker = self.rectBorder;
+		if not self.IsHorz():
+			l, t, r, b = self.rectTracker
+			b = b - 4
+			self.rectTracker = l, t, r, b
+
+		self.OnInvertTracker(self.rectTracker);
+
+	def OnCaptureChanged(self, msg):
+		hwnd = lparam = msg[3]
+		if self.bTracking and hwnd != self.GetSafeHwnd():
+			self.StopTracking(0) # cancel tracking
+		return 1
+
+	def StopTracking(self, bAccept):
+		self.OnInvertTracker(self.rectTracker)
+		self.dockSite.UnlockWindowUpdate()
+		self.bTracking = 0
+		self.ReleaseCapture()
+		if not bAccept: return
+
+		rcc = self.dockSite.GetWindowRect()
+		if self.IsHorz():
+			newsize = self.sizeHorz[1]
+			maxsize = newsize + (rcc[3]-rcc[1])
+			minsize = self.sizeMin[1]
+		else:
+			newsize = self.sizeVert[0]
+			maxsize = newsize + (rcc[2]-rcc[0])
+			minsize = self.sizeMin[0]
+
+		pt = CenterPoint(self.rectTracker)
+		if self.nDockBarID== afxres.AFX_IDW_DOCKBAR_TOP:
+			newsize = newsize + (pt[1] - self.ptOld[1])
+		elif self.nDockBarID== afxres.AFX_IDW_DOCKBAR_BOTTOM:
+			newsize = newsize + (- pt[1] + self.ptOld[1])
+		elif self.nDockBarID== afxres.AFX_IDW_DOCKBAR_LEFT:
+			newsize = newsize + (pt[0] - self.ptOld[0])
+		elif self.nDockBarID== afxres.AFX_IDW_DOCKBAR_RIGHT:
+			newsize = newsize + (- pt[0] + self.ptOld[0])
+		newsize = max(minsize, min(maxsize, newsize))
+		if self.IsHorz():
+			self.sizeHorz = self.sizeHorz[0], newsize
+		else:
+			self.sizeVert = newsize, self.sizeVert[1]
+		self.dockSite.RecalcLayout()
+		return 0
+
+	def OnInvertTracker(self, rect):
+		assert rect[2]-rect[0]>0 and rect[3]-rect[1]>0, "rect is empty"
+		assert self.bTracking
+		rcc = self.GetWindowRect()
+		rcf = self.dockSite.GetWindowRect()
+
+		rect = OffsetRect(rect, (rcc[0] - rcf[0], rcc[1] - rcf[1]))
+		rect = DeflateRect(rect, (1, 1));
+
+		flags = win32con.DCX_WINDOW|win32con.DCX_CACHE|win32con.DCX_LOCKWINDOWUPDATE
+		dc = self.dockSite.GetDCEx(None, flags)
+		try:
+			brush = win32ui.GetHalftoneBrush()
+			oldBrush = dc.SelectObject(brush)
+
+			dc.PatBlt((rect[0], rect[1]), (rect[2]-rect[0], rect[3]-rect[1]), win32con.PATINVERT)
+			dc.SelectObject(oldBrush)
+		finally:
+			self.dockSite.ReleaseDC(dc)
+
+	def IsHorz(self):
+		return self.nDockBarID == afxres.AFX_IDW_DOCKBAR_TOP or \
+			self.nDockBarID == afxres.AFX_IDW_DOCKBAR_BOTTOM
+
+	def ClientToWnd(self, pt):
+		x, y=pt
+		if self.nDockBarID == afxres.AFX_IDW_DOCKBAR_BOTTOM:
+			y = y + self.cxEdge
+		elif self.nDockBarID == afxres.AFX_IDW_DOCKBAR_RIGHT:
+			x = x + self.cxEdge
+		return x,y
+
+	def DrawGripper(self, dc):
+		# no gripper if floating
+		if self._obj_.dwStyle & afxres.CBRS_FLOATING:
+			return
+
+		# -==HACK==-
+		# in order to calculate the client area properly after docking,
+		# the client area must be recalculated twice (I have no idea why)
+		self.dockSite.RecalcLayout()
+		# -==END HACK==-
+
+		gripper = self.GetWindowRect()
+		gripper = self.ScreenToClient( gripper )
+		gripper = OffsetRect( gripper, (-gripper[0], -gripper[1]) )
+		gl, gt, gr, gb = gripper
+
+		if self._obj_.dwStyle & afxres.CBRS_ORIENT_HORZ:
+			# gripper at left
+			self.rectGripper = gl, gt + 40, gl+20, gb
+			# draw close box
+			self.rectClose = gl+7, gt + 10, gl+19, gt+22
+			dc.DrawFrameControl(self.rectClose, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONCLOSE)
+			# draw docking toggle box
+			self.rectUndock = OffsetRect(self.rectClose, (0,13))
+			dc.DrawFrameControl(self.rectUndock, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONMAX);
+
+			gt = gt + 38
+			gb = gb - 10
+			gl = gl + 10
+			gr = gl + 3
+			gripper = gl, gt, gr, gb
+			dc.Draw3dRect( gripper, clrBtnHilight, clrBtnShadow )
+			dc.Draw3dRect( OffsetRect(gripper, (4,0)), clrBtnHilight, clrBtnShadow )
+		else:
+			# gripper at top
+			self.rectGripper = gl, gt, gr-40, gt+20
+			# draw close box
+			self.rectClose = gr-21, gt+7, gr-10, gt+18
+			dc.DrawFrameControl(self.rectClose, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONCLOSE)
+			#  draw docking toggle box
+			self.rectUndock = OffsetRect( self.rectClose, (-13,0) )
+			dc.DrawFrameControl(self.rectUndock, win32con.DFC_CAPTION, win32con.DFCS_CAPTIONMAX)
+			gr = gr - 38;
+			gl = gl + 10
+			gt = gt + 10
+			gb = gt + 3
+
+			gripper = gl, gt, gr, gb
+			dc.Draw3dRect( gripper, clrBtnHilight, clrBtnShadow )
+			dc.Draw3dRect( OffsetRect(gripper, (0,4) ), clrBtnHilight, clrBtnShadow )
+
+	def HookMessages(self):
+		self.HookMessage(self.OnLButtonUp, win32con.WM_LBUTTONUP)
+		self.HookMessage(self.OnLButtonDown, win32con.WM_LBUTTONDOWN)
+		self.HookMessage(self.OnLButtonDblClk, win32con.WM_LBUTTONDBLCLK)
+		self.HookMessage(self.OnNcLButtonDown, win32con.WM_NCLBUTTONDOWN)
+		self.HookMessage(self.OnNcLButtonDblClk, win32con.WM_NCLBUTTONDBLCLK)
+		self.HookMessage(self.OnMouseMove, win32con.WM_MOUSEMOVE)
+		self.HookMessage(self.OnNcPaint, win32con.WM_NCPAINT)
+		self.HookMessage(self.OnCaptureChanged, win32con.WM_CAPTURECHANGED)
+		self.HookMessage(self.OnWindowPosChanged, win32con.WM_WINDOWPOSCHANGED)
+#		self.HookMessage(self.OnSize, win32con.WM_SIZE)
+
+def EditCreator(parent):
+	d = win32ui.CreateEdit()
+	es = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | win32con.ES_MULTILINE | win32con.ES_WANTRETURN
+	d.CreateWindow( es, (0,0,150,150), parent, 1000)
+	return d
+
+def test():
+	import pywin.mfc.dialog
+	global bar
+	bar = DockingBar()
+	creator = EditCreator
+	bar.CreateWindow(win32ui.GetMainFrame(), creator, "Coolbar Demo")
+#	win32ui.GetMainFrame().ShowControlBar(bar, 1, 0)
+	bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC)
+	bar.EnableDocking(afxres.CBRS_ALIGN_ANY)
+	win32ui.GetMainFrame().DockControlBar(bar, afxres.AFX_IDW_DOCKBAR_BOTTOM)
+
+
+if __name__=='__main__':
+	test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/docking/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/docking/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/docking/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/app.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/app.py
new file mode 100644
index 0000000..2275dc8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/app.py
@@ -0,0 +1,404 @@
+# App.py
+# Application stuff.
+# The application is responsible for managing the main frame window.
+#
+# We also grab the FileOpen command, to invoke our Python editor
+" The PythonWin application code. Manages most aspects of MDI, etc "
+import win32con
+import win32api
+import win32ui
+import sys
+import string
+import os
+from pywin.mfc import window, dialog, thread, afxres
+import traceback
+from pywin.framework import scriptutils
+
+## NOTE: App and AppBuild should NOT be used - instead, you should contruct your
+## APP class manually whenever you like (just ensure you leave these 2 params None!)
+## Whoever wants the generic "Application" should get it via win32iu.GetApp()
+
+# These are "legacy"
+AppBuilder = None 
+App = None	# default - if used, must end up a CApp derived class. 
+
+# Helpers that should one day be removed!
+def AddIdleHandler(handler):
+	print "app.AddIdleHandler is deprecated - please use win32ui.GetApp().AddIdleHandler() instead."
+	return win32ui.GetApp().AddIdleHandler(handler)
+def DeleteIdleHandler(handler):
+	print "app.DeleteIdleHandler is deprecated - please use win32ui.GetApp().DeleteIdleHandler() instead."
+	return win32ui.GetApp().DeleteIdleHandler(handler)
+
+# Helper for writing a Window position by name, and later loading it.
+def SaveWindowSize(section,rect,state=""):
+	""" Writes a rectangle to an INI file
+	Args: section = section name in the applications INI file
+	      rect = a rectangle in a (cy, cx, y, x) tuple 
+	             (same format as CREATESTRUCT position tuples)."""	
+	left, top, right, bottom = rect
+	if state: state = state + " "
+	win32ui.WriteProfileVal(section,state+"left",left)
+	win32ui.WriteProfileVal(section,state+"top",top)
+	win32ui.WriteProfileVal(section,state+"right",right)
+	win32ui.WriteProfileVal(section,state+"bottom",bottom)
+
+def LoadWindowSize(section, state=""):
+	""" Loads a section from an INI file, and returns a rect in a tuple (see SaveWindowSize)"""
+	if state: state = state + " "
+	left = win32ui.GetProfileVal(section,state+"left",0)
+	top = win32ui.GetProfileVal(section,state+"top",0)
+	right = win32ui.GetProfileVal(section,state+"right",0)
+	bottom = win32ui.GetProfileVal(section,state+"bottom",0)
+	return (left, top, right, bottom)
+
+def RectToCreateStructRect(rect):
+	return (rect[3]-rect[1], rect[2]-rect[0], rect[1], rect[0] )
+
+
+# Define FrameWindow and Application objects
+#
+# The Main Frame of the application.
+class MainFrame(window.MDIFrameWnd):
+	sectionPos = "Main Window"
+	statusBarIndicators = ( afxres.ID_SEPARATOR, #// status line indicator
+	                        afxres.ID_INDICATOR_CAPS,
+	                        afxres.ID_INDICATOR_NUM,
+	                        afxres.ID_INDICATOR_SCRL,
+	                        win32ui.ID_INDICATOR_LINENUM,
+	                        win32ui.ID_INDICATOR_COLNUM )
+
+	def OnCreate(self, cs):
+		self._CreateStatusBar()
+		return 0
+
+	def _CreateStatusBar(self):
+		self.statusBar = win32ui.CreateStatusBar(self)
+		self.statusBar.SetIndicators(self.statusBarIndicators)
+		self.HookCommandUpdate(self.OnUpdatePosIndicator, win32ui.ID_INDICATOR_LINENUM)
+		self.HookCommandUpdate(self.OnUpdatePosIndicator, win32ui.ID_INDICATOR_COLNUM)
+
+	def OnUpdatePosIndicator(self, cmdui):
+		editControl = scriptutils.GetActiveEditControl()
+		value = " " * 5
+		if editControl is not None:
+			try:
+				startChar, endChar = editControl.GetSel()
+				lineNo = editControl.LineFromChar(startChar)
+				colNo = endChar - editControl.LineIndex(lineNo)
+
+				if cmdui.m_nID==win32ui.ID_INDICATOR_LINENUM:
+					value = "%0*d" % (5, lineNo + 1)
+				else:
+					value = "%0*d" % (3, colNo + 1)
+			except win32ui.error:
+				pass
+		cmdui.SetText(value)
+		cmdui.Enable()
+
+	def PreCreateWindow(self, cc):
+		cc = self._obj_.PreCreateWindow(cc)
+		pos = LoadWindowSize(self.sectionPos)
+		self.startRect = pos
+		if pos[2] - pos[0]:
+			rect = RectToCreateStructRect(pos)
+			cc = cc[0], cc[1], cc[2], cc[3], rect, cc[5], cc[6], cc[7], cc[8]
+		return cc
+
+	def OnDestroy(self, msg):
+		# use GetWindowPlacement(), as it works even when min'd or max'd
+		rectNow = self.GetWindowPlacement()[4]
+		if rectNow != self.startRect:
+			SaveWindowSize(self.sectionPos, rectNow)
+		return 0
+
+class CApp(thread.WinApp):
+	" A class for the application "
+	def __init__(self):
+		self.oldCallbackCaller = None
+		thread.WinApp.__init__(self, win32ui.GetApp() )
+		self.idleHandlers = []
+		
+	def InitInstance(self):
+		" Called to crank up the app "
+		numMRU = win32ui.GetProfileVal("Settings","Recent File List Size", 10)
+		win32ui.LoadStdProfileSettings(numMRU)
+#		self._obj_.InitMDIInstance()
+		if win32api.GetVersionEx()[0]<4:
+			win32ui.SetDialogBkColor()
+			win32ui.Enable3dControls()
+
+		# install a "callback caller" - a manager for the callbacks
+#		self.oldCallbackCaller = win32ui.InstallCallbackCaller(self.CallbackManager)
+		self.LoadMainFrame()
+		self.SetApplicationPaths()
+
+	def ExitInstance(self):
+		" Called as the app dies - too late to prevent it here! "
+		win32ui.OutputDebug("Application shutdown\n")
+		# Restore the callback manager, if any.
+		try:
+			win32ui.InstallCallbackCaller(self.oldCallbackCaller)
+		except AttributeError:
+			pass
+		if self.oldCallbackCaller:
+			del self.oldCallbackCaller
+		self.frame=None	# clean Python references to the now destroyed window object.
+		self.idleHandlers = []
+		# Attempt cleanup if not already done!
+		if self._obj_: self._obj_.AttachObject(None)
+		self._obj_ = None
+		global App
+		global AppBuilder
+		App = None
+		AppBuilder = None
+		return 0
+
+	def HaveIdleHandler(self, handler):
+		return handler in self.idleHandlers
+	def AddIdleHandler(self, handler):
+		self.idleHandlers.append(handler)
+	def DeleteIdleHandler(self, handler):
+		self.idleHandlers.remove(handler)
+	def OnIdle(self, count):
+		try:
+			ret = 0
+			handlers = self.idleHandlers[:] # copy list, as may be modified during loop
+			for handler in handlers:
+				try:
+					thisRet = handler(handler, count)
+				except:
+					print "Idle handler %s failed" % (`handler`)
+					traceback.print_exc()
+					print "Idle handler removed from list"
+					try:
+						self.DeleteIdleHandler(handler)
+					except ValueError: # Item not in list.
+						pass
+					thisRet = 0
+				ret = ret or thisRet
+			return ret
+		except KeyboardInterrupt:
+			pass
+	def CreateMainFrame(self):
+		return MainFrame()
+
+	def LoadMainFrame(self):
+		" Create the main applications frame "
+		self.frame = self.CreateMainFrame()
+		self.SetMainFrame(self.frame)
+		self.frame.LoadFrame(win32ui.IDR_MAINFRAME, win32con.WS_OVERLAPPEDWINDOW)
+		self.frame.DragAcceptFiles()	# we can accept these.
+		self.frame.ShowWindow(win32ui.GetInitialStateRequest())
+		self.frame.UpdateWindow()
+		self.HookCommands()
+
+	def OnHelp(self,id, code):
+		try:
+			import regutil
+			if id==win32ui.ID_HELP_GUI_REF:
+				helpFile = regutil.GetRegisteredHelpFile("Pythonwin Reference")
+				helpCmd = win32con.HELP_CONTENTS
+			else:
+				helpFile = regutil.GetRegisteredHelpFile("Main Python Documentation")
+				helpCmd = win32con.HELP_FINDER
+			if helpFile is None:
+				win32ui.MessageBox("The help file is not registered!")
+			else:
+				import help
+				help.OpenHelpFile(helpFile, helpCmd)
+		except:
+			t, v, tb = sys.exc_info()
+			win32ui.MessageBox("Internal error in help file processing\r\n%s: %s" % (t,v))
+			tb = None # Prevent a cycle
+	
+	def DoLoadModules(self, modules):
+		# XXX - this should go, but the debugger uses it :-(
+		# dont do much checking!
+		for module in modules:
+			__import__(module)
+
+	def HookCommands(self):
+		self.frame.HookMessage(self.OnDropFiles,win32con.WM_DROPFILES)
+		self.HookCommand(self.HandleOnFileOpen,win32ui.ID_FILE_OPEN)
+		self.HookCommand(self.HandleOnFileNew,win32ui.ID_FILE_NEW)
+		self.HookCommand(self.OnFileMRU,win32ui.ID_FILE_MRU_FILE1)
+		self.HookCommand(self.OnHelpAbout,win32ui.ID_APP_ABOUT)
+		self.HookCommand(self.OnHelp, win32ui.ID_HELP_PYTHON)
+		self.HookCommand(self.OnHelp, win32ui.ID_HELP_GUI_REF)
+		# Hook for the right-click menu.
+		self.frame.GetWindow(win32con.GW_CHILD).HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN)
+
+	def SetApplicationPaths(self):
+		# Load the users/application paths
+		new_path = []
+		apppath=string.splitfields(win32ui.GetProfileVal('Python','Application Path',''),';')
+		for path in apppath:
+			if len(path)>0:
+				new_path.append(win32ui.FullPath(path))
+		for extra_num in range(1,11):
+			apppath=string.splitfields(win32ui.GetProfileVal('Python','Application Path %d'%extra_num,''),';')
+			if len(apppath) == 0:
+				break
+			for path in apppath:
+				if len(path)>0:
+					new_path.append(win32ui.FullPath(path))
+		sys.path = new_path + sys.path
+		
+	def OnRClick(self,params):
+		" Handle right click message "
+		# put up the entire FILE menu!
+		menu = win32ui.LoadMenu(win32ui.IDR_TEXTTYPE).GetSubMenu(0)
+		menu.TrackPopupMenu(params[5]) # track at mouse position.
+		return 0
+
+	def OnDropFiles(self,msg):
+		" Handle a file being dropped from file manager "
+		hDropInfo = msg[2]
+		self.frame.SetActiveWindow()	# active us
+		nFiles = win32api.DragQueryFile(hDropInfo)
+		try:
+			for iFile in range(0,nFiles):
+				fileName = win32api.DragQueryFile(hDropInfo, iFile)
+				win32ui.GetApp().OpenDocumentFile( fileName )
+		finally:
+			win32api.DragFinish(hDropInfo);
+
+		return 0
+
+# No longer used by Pythonwin, as the C++ code has this same basic functionality
+# but handles errors slightly better.
+# It all still works, tho, so if you need similar functionality, you can use it.
+# Therefore I havent deleted this code completely!
+#	def CallbackManager( self, ob, args = () ):
+#		"""Manage win32 callbacks.  Trap exceptions, report on them, then return 'All OK'
+#		to the frame-work. """
+#		import traceback
+#		try:
+#			ret = apply(ob, args)
+#			return ret
+#		except:
+#			# take copies of the exception values, else other (handled) exceptions may get
+#			# copied over by the other fns called.
+#			win32ui.SetStatusText('An exception occured in a windows command handler.')
+#			t, v, tb = sys.exc_info()
+#			traceback.print_exception(t, v, tb.tb_next)
+#			try:
+#				sys.stdout.flush()
+#			except (NameError, AttributeError):
+#				pass
+
+	# Command handlers.
+	def OnFileMRU( self, id, code ):
+		" Called when a File 1-n message is recieved "
+		fileName = win32ui.GetRecentFileList()[id - win32ui.ID_FILE_MRU_FILE1]
+		win32ui.GetApp().OpenDocumentFile(fileName)
+
+	def HandleOnFileOpen( self, id, code ):
+		" Called when FileOpen message is received "
+		win32ui.GetApp().OnFileOpen()
+
+	def HandleOnFileNew( self, id, code ):
+		" Called when FileNew message is received "
+		win32ui.GetApp().OnFileNew()
+
+	def OnHelpAbout( self, id, code ):
+		" Called when HelpAbout message is received.  Displays the About dialog. "
+		win32ui.InitRichEdit()
+		dlg=AboutBox()
+		dlg.DoModal()
+
+def _GetRegistryValue(key, val, default = None):
+	# val is registry value - None for default val.
+	try:
+		hkey = win32api.RegOpenKey(win32con.HKEY_CURRENT_USER, key)
+		return win32api.RegQueryValueEx(hkey, val)[0]
+	except win32api.error:
+		try:
+			hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, key)
+			return win32api.RegQueryValueEx(hkey, val)[0]
+		except win32api.error:
+			return default
+
+scintilla = "Scintilla is Copyright 1998-2004 Neil Hodgson (http://www.scintilla.org)"
+idle = "This program uses IDLE extensions by Guido van Rossum, Tim Peters and others."
+contributors = "Thanks to the following people for making significant contributions: Sam Rushing, Curt Hagenlocher, Dave Brennan, Roger Burnham, Gordon McMillan, Neil Hodgson, Laramie Leavitt. (let me know if I have forgotten you!)"
+# The About Box
+class AboutBox(dialog.Dialog):
+	def __init__(self, idd=win32ui.IDD_ABOUTBOX):
+		dialog.Dialog.__init__(self, idd)
+	def OnInitDialog(self):
+		text = "Pythonwin - Python IDE and GUI Framework for Windows.\n\n%s\n\nPython is %s\n\n%s\n\n%s\n\n%s" % (win32ui.copyright, sys.copyright, scintilla, idle, contributors)
+		self.SetDlgItemText(win32ui.IDC_EDIT1, text)
+		# Get the build number - written by installers.
+		# For distutils build, read pywin32.version.txt
+		import distutils.sysconfig
+		site_packages = distutils.sysconfig.get_python_lib(plat_specific=1)
+		try:
+			build_no = open(os.path.join(site_packages, "pywin32.version.txt")).read().strip()
+			ver = "pywin32 build %s" % build_no
+		except EnvironmentError:
+			ver = None
+		if ver is None:
+			# See if we are Part of Active Python
+			ver = _GetRegistryValue("SOFTWARE\\ActiveState\\ActivePython", "CurrentVersion")
+			if ver is not None:
+				ver = "ActivePython build %s" % (ver,)
+		if ver is None:
+			ver = ""
+		self.SetDlgItemText(win32ui.IDC_ABOUT_VERSION, ver)
+		self.HookCommand(self.OnButHomePage, win32ui.IDC_BUTTON1)
+
+	def OnButHomePage(self, id, code):
+		if code == win32con.BN_CLICKED:
+			win32api.ShellExecute(0, "open", "http://starship.python.net/crew/mhammond/win32", None, "", 1)
+
+def Win32RawInput(prompt=None):
+	"Provide raw_input() for gui apps"
+	# flush stderr/out first.
+	try:
+		sys.stdout.flush()
+		sys.stderr.flush()
+	except:
+		pass
+	if prompt is None: prompt = ""
+	ret=dialog.GetSimpleInput(prompt)
+	if ret==None:
+		raise KeyboardInterrupt, "operation cancelled"
+	return ret
+
+def Win32Input(prompt=None):
+	"Provide input() for gui apps"
+	return eval(raw_input(prompt))
+
+sys.modules['__builtin__'].raw_input=Win32RawInput
+sys.modules['__builtin__'].input=Win32Input
+
+try:
+	# LocatePythonFile used to be here.
+	# THIS WILL BE DELETED SOON.
+	from scriptutils import LocatePythonFile
+except:
+	pass
+
+def HaveGoodGUI():
+	"""Returns true if we currently have a good gui available.
+	"""
+	return sys.modules.has_key("pywin.framework.startup")
+
+def CreateDefaultGUI( appClass = None):
+	"""Creates a default GUI environment
+	"""
+	if appClass is None:
+		import intpyapp # Bring in the default app - could be param'd later.
+		appClass = intpyapp.InteractivePythonApp
+	# Create and init the app.
+	appClass().InitInstance()
+
+def CheckCreateDefaultGUI():
+	"""Checks and creates if necessary a default GUI environment.
+	"""
+	rc = HaveGoodGUI()
+	if not rc:
+		CreateDefaultGUI()
+	return rc
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/bitmap.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/bitmap.py
new file mode 100644
index 0000000..641ea84
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/bitmap.py
@@ -0,0 +1,143 @@
+import win32ui
+import win32con
+import win32api
+import string
+import os
+import app
+import sys
+
+from pywin.mfc import docview, window
+
+bStretch = 1
+
+class BitmapDocument(docview.Document):
+	"A bitmap document.  Holds the bitmap data itself."
+	def __init__(self, template):
+		docview.Document.__init__(self, template)
+		self.bitmap=None
+	def OnNewDocument(self):
+		# I can not create new bitmaps.
+		win32ui.MessageBox("Bitmaps can not be created.")
+	def OnOpenDocument(self, filename):
+		self.bitmap=win32ui.CreateBitmap()
+		# init data members
+		f = open(filename, 'rb')
+		try:
+			try:
+				self.bitmap.LoadBitmapFile(f)
+			except IOError:
+				win32ui.MessageBox("Could not load the bitmap from %s" % filename)
+				return 0
+		finally:
+			f.close()
+		self.size = self.bitmap.GetSize()
+		return 1
+	def DeleteContents(self):
+		self.bitmap=None
+
+class BitmapView(docview.ScrollView):
+	"A view of a bitmap.  Obtains data from document."
+	def __init__(self, doc):
+		docview.ScrollView.__init__(self, doc)
+		self.width = self.height = 0
+		# set up message handlers
+		self.HookMessage (self.OnSize, win32con.WM_SIZE)
+		
+	def OnInitialUpdate(self):
+		doc = self.GetDocument()
+		if doc.bitmap:
+			bitmapSize = doc.bitmap.GetSize()
+			self.SetScrollSizes(win32con.MM_TEXT, bitmapSize)
+
+	def OnSize (self, params):
+		lParam = params[3]
+		self.width = win32api.LOWORD(lParam)
+		self.height = win32api.HIWORD(lParam)
+
+	def OnDraw (self, dc):
+		# set sizes used for "non stretch" mode.
+		doc = self.GetDocument()
+		if doc.bitmap is None: return
+		bitmapSize = doc.bitmap.GetSize()
+		if bStretch:
+			# stretch BMP.
+			viewRect = (0,0,self.width, self.height)
+			bitmapRect = (0,0,bitmapSize[0], bitmapSize[1])
+			doc.bitmap.Paint(dc, viewRect, bitmapRect)
+		else:
+			# non stretch.
+			doc.bitmap.Paint(dc)
+	
+class BitmapFrame(window.MDIChildWnd):
+	def OnCreateClient( self, createparams, context ):
+		borderX = win32api.GetSystemMetrics(win32con.SM_CXFRAME)
+		borderY = win32api.GetSystemMetrics(win32con.SM_CYFRAME)
+		titleY = win32api.GetSystemMetrics(win32con.SM_CYCAPTION)	# includes border
+		# try and maintain default window pos, else adjust if cant fit
+		# get the main client window dimensions.
+		mdiClient = win32ui.GetMainFrame().GetWindow(win32con.GW_CHILD)
+		clientWindowRect=mdiClient.ScreenToClient(mdiClient.GetWindowRect())
+		clientWindowSize=(clientWindowRect[2]-clientWindowRect[0],clientWindowRect[3]-clientWindowRect[1])
+		left, top, right, bottom=mdiClient.ScreenToClient(self.GetWindowRect())
+#		width, height=context.doc.size[0], context.doc.size[1]
+#		width = width+borderX*2
+#		height= height+titleY+borderY*2-1
+#		if (left+width)>clientWindowSize[0]:
+#			left = clientWindowSize[0] - width
+#		if left<0:
+#			left = 0
+#			width = clientWindowSize[0]
+#		if (top+height)>clientWindowSize[1]:
+#			top = clientWindowSize[1] - height
+#		if top<0:
+#			top = 0
+#			height = clientWindowSize[1]
+#		self.frame.MoveWindow((left, top, left+width, top+height),0)
+		window.MDIChildWnd.OnCreateClient(self, createparams, context)
+		return 1
+
+
+class BitmapTemplate(docview.DocTemplate):
+	def __init__(self):
+		docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, BitmapDocument, BitmapFrame, BitmapView)
+	def MatchDocType(self, fileName, fileType):
+		doc = self.FindOpenDocument(fileName)
+		if doc: return doc
+		ext = string.lower(os.path.splitext(fileName)[1])
+		if ext =='.bmp': # removed due to PIL! or ext=='.ppm':
+			return win32ui.CDocTemplate_Confidence_yesAttemptNative
+		return win32ui.CDocTemplate_Confidence_maybeAttemptForeign
+#		return win32ui.CDocTemplate_Confidence_noAttempt
+
+# For debugging purposes, when this module may be reloaded many times.
+try:
+	win32ui.GetApp().RemoveDocTemplate(bitmapTemplate)
+except NameError:
+	pass
+	
+bitmapTemplate = BitmapTemplate()
+bitmapTemplate.SetDocStrings('\nBitmap\nBitmap\nBitmap (*.bmp)\n.bmp\nPythonBitmapFileType\nPython Bitmap File')
+win32ui.GetApp().AddDocTemplate(bitmapTemplate)
+
+# This works, but just didnt make it through the code reorg.
+#class PPMBitmap(Bitmap):
+#	def LoadBitmapFile(self, file ):
+#		magic=file.readline()
+#		if magic <> "P6\n":
+#			raise TypeError, "The file is not a PPM format file"
+#		rowcollist=string.split(file.readline())
+#		cols=string.atoi(rowcollist[0])
+#		rows=string.atoi(rowcollist[1])
+#		file.readline()	# whats this one?
+#		self.bitmap.LoadPPMFile(file,(cols,rows))
+
+
+def t():
+	bitmapTemplate.OpenDocumentFile('d:\\winnt\\arcade.bmp')
+	#OpenBMPFile( 'd:\\winnt\\arcade.bmp')
+
+def demo():
+	import glob
+	winDir=win32api.GetWindowsDirectory()
+	for fileName in glob.glob1(winDir, '*.bmp')[:2]:
+		bitmapTemplate.OpenDocumentFile(os.path.join(winDir, fileName))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/cmdline.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/cmdline.py
new file mode 100644
index 0000000..ca25816
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/cmdline.py
@@ -0,0 +1,49 @@
+# cmdline - command line utilities.
+import sys
+import win32ui
+import string
+
+def ParseArgs( str ):
+	import string
+	ret=[]
+	pos = 0
+	length=len(str)
+	while pos<length:
+		try:
+			while str[pos] in string.whitespace: pos = pos+1
+		except IndexError:
+			break 	
+		if pos>=length:
+			break
+		if str[pos]=='"':
+			pos=pos+1
+			try:
+				endPos = string.index(str, '"', pos)-1
+				nextPos = endPos+2
+			except ValueError:
+				endPos=length
+				nextPos=endPos+1
+		else:
+			endPos = pos
+			while endPos<length and not str[endPos] in string.whitespace: endPos = endPos+1
+			nextPos=endPos+1
+		ret.append(string.strip(str[pos:endPos+1]))
+		pos = nextPos
+	return ret
+
+def FixArgFileName(fileName):
+	"""Convert a filename on the commandline to something useful.
+	Given an automatic filename on the commandline, turn it a python module name,
+	with the path added to sys.path. """
+	import os
+	path, fname = os.path.split(fileName)
+	if len(path)==0:
+		path = os.curdir
+	path=os.path.abspath(path)
+	# must check that the command line arg's path is in sys.path
+	for syspath in sys.path:
+		if os.path.abspath(syspath)==path:
+			break
+	else:
+		sys.path.append(path)
+	return os.path.splitext(fname)[0]
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/dbgcommands.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/dbgcommands.py
new file mode 100644
index 0000000..4c3b3c5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/dbgcommands.py
@@ -0,0 +1,163 @@
+# Command Handlers for the debugger.
+
+# Not in the debugger package, as I always want these interfaces to be
+# available, even if the debugger has not yet been (or can not be)
+# imported
+import win32ui, win32con
+import scriptutils
+
+IdToBarNames = {
+	win32ui.IDC_DBG_STACK : ("Stack",0),
+	win32ui.IDC_DBG_BREAKPOINTS : ("Breakpoints",0),
+	win32ui.IDC_DBG_WATCH : ("Watch",1),
+	}
+
+class DebuggerCommandHandler:
+	def HookCommands(self):
+		commands = ( (self.OnStep, None, win32ui.IDC_DBG_STEP),
+		                  (self.OnStepOut, self.OnUpdateOnlyBreak, win32ui.IDC_DBG_STEPOUT),
+		                  (self.OnStepOver, None, win32ui.IDC_DBG_STEPOVER),
+		                  (self.OnGo, None, win32ui.IDC_DBG_GO),
+		                  (self.OnClose, self.OnUpdateClose, win32ui.IDC_DBG_CLOSE),
+		                  (self.OnAdd, self.OnUpdateAddBreakpoints, win32ui.IDC_DBG_ADD),
+		                  (self.OnClearAll, self.OnUpdateClearAllBreakpoints, win32ui.IDC_DBG_CLEAR),
+#		                  (self.OnDebuggerToolbar, self.OnUpdateDebuggerToolbar, win32ui.ID_DEBUGGER_TOOLBAR),
+		                )
+
+		frame = win32ui.GetMainFrame()
+
+		for methHandler, methUpdate, id in commands:
+			frame.HookCommand(methHandler, id);
+			if not methUpdate is None:
+				frame.HookCommandUpdate(methUpdate, id)
+
+		for id in IdToBarNames.keys():
+			frame.HookCommand( self.OnDebuggerBar, id)
+			frame.HookCommandUpdate(self.OnUpdateDebuggerBar, id)
+
+	def OnDebuggerToolbar(self, id, code):
+		if code==0:
+			return not win32ui.GetMainFrame().OnBarCheck(id)
+
+	def OnUpdateDebuggerToolbar(self, cmdui):
+		win32ui.GetMainFrame().OnUpdateControlBarMenu(cmdui)
+		cmdui.Enable(1)
+
+	def _GetDebugger(self):
+		try:
+			import pywin.debugger
+			return pywin.debugger.currentDebugger
+		except ImportError:
+			return None
+
+	def _DoOrStart(self, doMethod, startFlag):
+		d=self._GetDebugger()
+		if d is not None and d.IsDebugging():
+			method = getattr(d, doMethod)
+			method()
+		else:
+			scriptutils.RunScript(defName=None, defArgs=None, bShowDialog = 0, debuggingType=startFlag)
+
+	def OnStep(self, msg, code):
+		self._DoOrStart("do_set_step", scriptutils.RS_DEBUGGER_STEP)
+
+	def OnStepOver(self, msg, code):
+		self._DoOrStart("do_set_next", scriptutils.RS_DEBUGGER_STEP)
+
+	def OnStepOut(self, msg, code):
+		d=self._GetDebugger()
+		if d is not None and d.IsDebugging():
+			d.do_set_return()
+
+	def OnGo(self, msg, code):
+		self._DoOrStart("do_set_continue", scriptutils.RS_DEBUGGER_GO)
+
+	def OnClose(self, msg, code):
+		d=self._GetDebugger()
+		if d is not None:
+			if d.IsDebugging():
+				d.set_quit()
+			else:
+				d.close()
+
+	def OnUpdateClose(self, cmdui):
+		d=self._GetDebugger()
+		if d is not None and d.inited:
+			cmdui.Enable(1)
+		else:
+			cmdui.Enable(0)
+
+	def OnAdd(self, msg, code):
+		doc = scriptutils.GetActiveEditorDocument()
+		if doc is None:
+			win32ui.MessageBox('There is no active window - no breakpoint can be added')
+		pathName = doc.GetPathName()
+		view = doc.GetFirstView()
+		lineNo = view.LineFromChar(view.GetSel()[0])+1
+		# If I have a debugger, then tell it, otherwise just add a marker
+		d=self._GetDebugger()
+		if d is None:
+			import pywin.framework.editor.color.coloreditor
+			doc.MarkerToggle(lineNo, pywin.framework.editor.color.coloreditor.MARKER_BREAKPOINT)
+		else:
+			if d.get_break(pathName, lineNo):
+				win32ui.SetStatusText('Clearing breakpoint',1)
+				rc = d.clear_break(pathName, lineNo)
+			else:
+				win32ui.SetStatusText('Setting breakpoint',1)
+				rc = d.set_break(pathName, lineNo)
+			if rc:
+				win32ui.MessageBox(rc)
+			d.GUIRespondDebuggerData()
+
+	def OnClearAll(self, msg, code):
+		win32ui.SetStatusText('Clearing all breakpoints')
+		d=self._GetDebugger()
+		if d is None:
+			import pywin.framework.editor
+			import pywin.framework.editor.color.coloreditor
+			for doc in pywin.framework.editor.editorTemplate.GetDocumentList():
+				doc.MarkerDeleteAll(pywin.framework.editor.color.coloreditor.MARKER_BREAKPOINT)
+		else:
+			d.clear_all_breaks()
+			d.UpdateAllLineStates()
+			d.GUIRespondDebuggerData()
+
+	def OnUpdateOnlyBreak(self, cmdui):
+		d=self._GetDebugger()
+		ok = d is not None and d.IsBreak()
+		cmdui.Enable(ok)
+
+	def OnUpdateAddBreakpoints(self, cmdui):
+		doc = scriptutils.GetActiveEditorDocument()
+		if doc is None:
+			enabled = 0
+		else:
+			enabled = 1
+			view = doc.GetFirstView()
+			lineNo = view.LineFromChar(view.GetSel()[0])+1
+			import pywin.framework.editor.color.coloreditor
+			cmdui.SetCheck(doc.MarkerAtLine(lineNo, pywin.framework.editor.color.coloreditor.MARKER_BREAKPOINT) != 0)
+		cmdui.Enable(enabled)
+
+	def OnUpdateClearAllBreakpoints(self, cmdui):
+		d=self._GetDebugger()
+		cmdui.Enable(d is None or len(d.breaks)!=0)
+
+	def OnUpdateDebuggerBar(self, cmdui):
+		name, always = IdToBarNames.get(cmdui.m_nID)
+		enabled = always
+		d=self._GetDebugger()
+		if d is not None and d.IsDebugging() and name is not None:
+			enabled = 1
+			bar = d.GetDebuggerBar(name)
+			cmdui.SetCheck(bar.IsWindowVisible())
+		cmdui.Enable(enabled)
+
+	def OnDebuggerBar(self, id, code):
+		name = IdToBarNames.get(id)[0]
+		d=self._GetDebugger()
+		if d is not None and name is not None:
+			bar = d.GetDebuggerBar(name)
+			newState = not bar.IsWindowVisible()
+			win32ui.GetMainFrame().ShowControlBar(bar, newState, 1)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/dlgappcore.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/dlgappcore.py
new file mode 100644
index 0000000..a4b691b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/dlgappcore.py
@@ -0,0 +1,69 @@
+# dlgappcore.
+#
+# base classes for dialog based apps.
+
+import app
+import win32ui
+import win32con
+import win32api
+import sys
+from pywin.mfc import dialog
+
+error = "Dialog Application Error" 
+
+class AppDialog(dialog.Dialog):
+	"The dialog box for the application"
+	def __init__(self, id, dll=None):
+		self.iconId = win32ui.IDR_MAINFRAME
+		dialog.Dialog.__init__(self, id, dll)
+
+	def OnInitDialog(self):
+		return dialog.Dialog.OnInitDialog(self)
+
+	# Provide support for a dlg app using an icon
+	def OnPaint(self):
+		if not self.IsIconic(): return self._obj_.OnPaint()
+		self.DefWindowProc(win32con.WM_ICONERASEBKGND, dc.GetHandleOutput(), 0)
+		left, top, right, bottom = self.GetClientRect()
+		left = (right  - win32api.GetSystemMetrics(win32con.SM_CXICON)) >> 1
+		top  = (bottom - win32api.GetSystemMetrics(win32con.SM_CYICON)) >> 1
+		hIcon = win32ui.GetApp().LoadIcon(self.iconId)
+		self.GetDC().DrawIcon((left, top), hIcon)
+
+	# Only needed to provide a minimized icon (and this seems
+	# less important under win95/NT4
+	def OnEraseBkgnd(self, dc):
+		if self.IsIconic():
+			return 1
+		else:
+			return self._obj_.OnEraseBkgnd(dc)
+	def OnQueryDragIcon(self):
+		return win32ui.GetApp().LoadIcon(self.iconId)
+
+	def PreDoModal(self):
+		pass
+
+
+class DialogApp(app.CApp):
+	"An application class, for an app with main dialog box"
+	def InitInstance(self):
+#		win32ui.SetProfileFileName('dlgapp.ini')
+		win32ui.LoadStdProfileSettings()
+		win32ui.EnableControlContainer()
+		win32ui.Enable3dControls()
+		self.dlg = self.frame = self.CreateDialog()
+	
+		if self.frame is None:
+			raise error, "No dialog was created by CreateDialog()"
+			return
+
+		self._obj_.InitDlgInstance(self.dlg)
+		self.PreDoModal()
+		self.dlg.PreDoModal()
+		self.dlg.DoModal()
+
+	def CreateDialog(self):
+		pass
+	def PreDoModal(self):
+		pass
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/ModuleBrowser.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/ModuleBrowser.py
new file mode 100644
index 0000000..bc1280e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/ModuleBrowser.py
@@ -0,0 +1,205 @@
+# ModuleBrowser.py - A view that provides a module browser for an editor document.
+import pywin.mfc.docview
+import win32ui
+import win32con
+import commctrl
+import win32api
+from pywin.tools import hierlist, browser
+import pywin.framework.scriptutils
+import afxres
+
+import pyclbr
+
+class HierListCLBRModule(hierlist.HierListItem):
+    def __init__(self, modName, clbrdata):
+        self.modName = modName
+        self.clbrdata = clbrdata
+    def GetText(self):
+        return self.modName
+    def GetSubList(self):
+        ret = []
+        for item in self.clbrdata.values():
+            if item.__class__ != pyclbr.Class: # ie, it is a pyclbr Function instance (only introduced post 1.5.2)
+                ret.append(HierListCLBRFunction( item ) )
+            else:
+                ret.append(HierListCLBRClass( item) )
+        ret.sort()
+        return ret
+    def IsExpandable(self):
+        return 1
+
+class HierListCLBRItem(hierlist.HierListItem):
+    def __init__(self, name, file, lineno, suffix = ""):
+        self.name = str(name)
+        self.file = file
+        self.lineno = lineno
+        self.suffix = suffix
+    def __cmp__(self, other):
+        return cmp(self.name, other.name)
+    def GetText(self):
+        return self.name + self.suffix
+    def TakeDefaultAction(self):
+        if self.file:
+            pywin.framework.scriptutils.JumpToDocument(self.file, self.lineno, bScrollToTop = 1)
+        else:
+            win32ui.SetStatusText("Can not locate the source code for this object.")
+    def PerformItemSelected(self):
+        if self.file is None:
+            msg = "%s - source can not be located." % (self.name, )
+        else:
+            msg = "%s defined at line %d of %s" % (self.name, self.lineno, self.file)
+        win32ui.SetStatusText(msg)
+
+class HierListCLBRClass(HierListCLBRItem):
+    def __init__(self, clbrclass, suffix = ""):
+        try:
+            name = clbrclass.name
+            file = clbrclass.file
+            lineno = clbrclass.lineno
+            self.super = clbrclass.super
+            self.methods = clbrclass.methods
+        except AttributeError:
+            name = clbrclass
+            file = lineno = None
+            self.super = []; self.methods = {}
+        HierListCLBRItem.__init__(self, name, file, lineno, suffix)
+    def __cmp__(self,other):
+        ret = cmp(self.name,other.name)
+        if ret==0 and (self is not other) and self.file==other.file:
+            self.methods = other.methods
+            self.super = other.super
+            self.lineno = other.lineno
+        return ret
+    def GetSubList(self):
+        r1 = []
+        for c in self.super:
+            r1.append(HierListCLBRClass(c, " (Parent class)"))
+        r1.sort()
+        r2=[]
+        for meth, lineno in self.methods.items():
+            r2.append(HierListCLBRMethod(meth, self.file, lineno))
+        r2.sort()
+        return r1+r2
+    def IsExpandable(self):
+        return len(self.methods) + len(self.super)
+    def GetBitmapColumn(self):
+        return 21
+
+class HierListCLBRFunction(HierListCLBRItem):
+    def __init__(self, clbrfunc, suffix = ""):
+        name = clbrfunc.name
+        file = clbrfunc.file
+        lineno = clbrfunc.lineno
+        HierListCLBRItem.__init__(self, name, file, lineno, suffix)
+    def GetBitmapColumn(self):
+        return 22
+
+class HierListCLBRMethod(HierListCLBRItem):
+    def GetBitmapColumn(self):
+        return 22
+
+class HierListCLBRErrorItem(hierlist.HierListItem):
+    def __init__(self, text):
+        self.text = text
+    def GetText(self):
+        return self.text
+    def GetSubList(self):
+        return [HierListCLBRErrorItem(self.text)]
+    def IsExpandable(self):
+        return 0
+
+class HierListCLBRErrorRoot(HierListCLBRErrorItem):
+    def IsExpandable(self):
+        return 1
+
+class BrowserView(pywin.mfc.docview.TreeView):
+    def OnInitialUpdate(self):
+        self.list = None
+        rc = self._obj_.OnInitialUpdate()
+        self.HookMessage(self.OnSize, win32con.WM_SIZE)
+        self.bDirty = 0
+        self.destroying = 0
+        return rc
+
+    def DestroyBrowser(self):
+        self.DestroyList()
+
+    def OnActivateView(self, activate, av, dv):
+#        print "AV", self.bDirty, activate
+        if activate:
+            self.CheckRefreshList()
+        return self._obj_.OnActivateView(activate, av, dv)
+
+    def _MakeRoot(self):
+        path = self.GetDocument().GetPathName()
+        if not path:
+            return HierListCLBRErrorRoot("Error: Can not browse a file until it is saved")
+        else:
+            mod, path = pywin.framework.scriptutils.GetPackageModuleName(path)
+            if self.bDirty:
+                what = "Refreshing"
+                # Hack for pyclbr being too smart
+                try:
+                    del pyclbr._modules[mod]
+                except (KeyError, AttributeError):
+                    pass
+            else:
+                what = "Building"
+            win32ui.SetStatusText("%s class list - please wait..." % (what,), 1)
+            win32ui.DoWaitCursor(1)
+            try:
+                reader = pyclbr.readmodule_ex # new version post 1.5.2
+            except AttributeError:
+                reader = pyclbr.readmodule
+            try:
+                data = reader(mod, [path])
+                if data:
+                    return HierListCLBRModule(mod, data)
+                else:
+                    return HierListCLBRErrorRoot("No Python classes in module.")
+
+            finally:
+                win32ui.DoWaitCursor(0)
+                win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE))
+
+    def DestroyList(self):
+        self.destroying = 1
+        list = getattr(self, "list", None) # If the document was not successfully opened, we may not have a list.
+        self.list = None
+        if list is not None:
+            list.HierTerm()
+        self.destroying = 0
+
+    def CheckMadeList(self):
+        if self.list is not None or self.destroying: return
+        self.rootitem = root = self._MakeRoot()
+        self.list = list = hierlist.HierListWithItems( root, win32ui.IDB_BROWSER_HIER)
+        list.HierInit(self.GetParentFrame(), self)
+        list.SetStyle(commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS)
+
+    def CheckRefreshList(self):
+        if self.bDirty:
+            if self.list is None:
+                self.CheckMadeList()
+            else:
+                new_root = self._MakeRoot()
+                if self.rootitem.__class__==new_root.__class__==HierListCLBRModule:
+                    self.rootitem.modName = new_root.modName
+                    self.rootitem.clbrdata = new_root.clbrdata
+                    self.list.Refresh()
+                else:
+                    self.list.AcceptRoot(self._MakeRoot())
+            self.bDirty = 0
+
+    def OnSize(self, params):
+        lparam = params[3]
+        w = win32api.LOWORD(lparam)
+        h = win32api.HIWORD(lparam)
+        if w != 0:
+            self.CheckMadeList()
+        elif w == 0:
+            self.DestroyList()
+        return 1
+
+    def _UpdateUIForState(self):
+        self.bDirty = 1
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/__init__.py
new file mode 100644
index 0000000..b4f03e2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/__init__.py
@@ -0,0 +1,90 @@
+# __init__ for the Pythonwin editor package.
+#
+# We used to support optional editors - eg, color or non-color.
+#
+# This really isnt necessary with Scintilla, and scintilla
+# is getting so deeply embedded that it was too much work.
+
+import win32ui, sys, win32con
+
+defaultCharacterFormat = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New')
+
+##def GetDefaultEditorModuleName():
+##	import pywin
+##	# If someone has set pywin.editormodulename, then this is what we use
+##	try:
+##		prefModule = pywin.editormodulename
+##	except AttributeError:
+##		prefModule = win32ui.GetProfileVal("Editor","Module", "")
+##	return prefModule
+##
+##def WriteDefaultEditorModule(module):
+##	try:
+##		module = module.__name__
+##	except:
+##		pass
+##	win32ui.WriteProfileVal("Editor", "Module", module)
+
+def LoadDefaultEditor():
+	pass
+##	prefModule = GetDefaultEditorModuleName()
+##	restorePrefModule = None
+##	mod = None
+##	if prefModule:
+##		try:
+##			mod = __import__(prefModule)
+##		except 'xx':
+##			msg = "Importing your preferred editor ('%s') failed.\n\nError %s: %s\n\nAn attempt will be made to load the default editor.\n\nWould you like this editor disabled in the future?" % (prefModule, sys.exc_info()[0], sys.exc_info()[1])
+##			rc = win32ui.MessageBox(msg, "Error importing editor", win32con.MB_YESNO)
+##			if rc == win32con.IDNO:
+##				restorePrefModule = prefModule
+##			WriteDefaultEditorModule("")
+##			del rc
+##	
+##	try:
+##		# Try and load the default one - dont catch errors here.		
+##		if mod is None:
+##			prefModule = "pywin.framework.editor.color.coloreditor"
+##			mod = __import__(prefModule)
+##	
+##		# Get at the real module.
+##		mod = sys.modules[prefModule]
+##	
+##		# Do a "from mod import *"
+##		globals().update(mod.__dict__)
+##
+##	finally:
+##		# Restore the users default editor if it failed and they requested not to disable it.
+##		if restorePrefModule:
+##			WriteDefaultEditorModule(restorePrefModule)
+
+def GetEditorOption(option, defaultValue, min=None, max = None):
+	rc = win32ui.GetProfileVal("Editor", option, defaultValue)
+	if min is not None and rc < min: rc = defaultValue
+	if max is not None and rc > max: rc = defaultValue
+	return rc
+
+def SetEditorOption(option, newValue):
+	win32ui.WriteProfileVal("Editor", option, newValue)
+
+def DeleteEditorOption(option):
+	try:
+		win32ui.WriteProfileVal("Editor", option, None)
+	except win32ui.error:
+		pass
+
+# Load and save font tuples
+def GetEditorFontOption(option, default = None):
+	if default is None: default = defaultCharacterFormat
+	fmt = GetEditorOption( option, "" )
+	if fmt == "": return default
+	try:
+		return eval(fmt)
+	except:
+		print "WARNING: Invalid font setting in registry - setting ignored"
+		return default
+		
+def SetEditorFontOption(option, newValue):
+	SetEditorOption(option, str(newValue))
+
+from pywin.framework.editor.color.coloreditor import *
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/color/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/color/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/color/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/color/coloreditor.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/color/coloreditor.py
new file mode 100644
index 0000000..4b1c3c5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/color/coloreditor.py
@@ -0,0 +1,477 @@
+# Color Editor originally by Neil Hodgson, but restructured by mh to integrate
+# even tighter into Pythonwin.
+import win32ui
+import win32con
+import win32api
+import sys
+
+import pywin.scintilla.keycodes
+from pywin.scintilla import bindings
+
+from pywin.framework.editor import GetEditorOption, SetEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat
+#from pywin.framework.editor import EditorPropertyPage
+
+MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in document.py and editor.py
+
+# Define a few common markers
+MARKER_BOOKMARK = 0
+MARKER_BREAKPOINT = 1
+MARKER_CURRENT = 2
+
+# XXX - copied from debugger\dbgcon.py
+DBGSTATE_NOT_DEBUGGING = 0
+DBGSTATE_RUNNING = 1
+DBGSTATE_BREAK = 2
+
+from pywin.scintilla.document import CScintillaDocument
+from pywin.framework.editor.document import EditorDocumentBase
+from pywin.scintilla.scintillacon import * # For the marker definitions
+import pywin.scintilla.view
+
+class SyntEditDocument(EditorDocumentBase):
+	"A SyntEdit document. "
+	def OnDebuggerStateChange(self, state):
+		self._ApplyOptionalToViews("OnDebuggerStateChange", state)
+	def HookViewNotifications(self, view):
+		EditorDocumentBase.HookViewNotifications(self, view)
+		view.SCISetUndoCollection(1)
+	def FinalizeViewCreation(self, view):
+		EditorDocumentBase.FinalizeViewCreation(self, view)
+		if view==self.GetFirstView():
+			self.GetDocTemplate().CheckIDLEMenus(view.idle)
+
+SyntEditViewParent=pywin.scintilla.view.CScintillaView
+class SyntEditView(SyntEditViewParent):
+	"A view of a SyntEdit.  Obtains data from document."
+	def __init__(self, doc):
+		SyntEditViewParent.__init__(self, doc)
+		self.bCheckingFile = 0
+
+	def OnInitialUpdate(self):
+		SyntEditViewParent.OnInitialUpdate(self)
+
+		self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN)
+
+		for id in [win32ui.ID_VIEW_FOLD_COLLAPSE, win32ui.ID_VIEW_FOLD_COLLAPSE_ALL,
+				   win32ui.ID_VIEW_FOLD_EXPAND, win32ui.ID_VIEW_FOLD_EXPAND_ALL]:
+		
+			self.HookCommand(self.OnCmdViewFold, id)
+			self.HookCommandUpdate(self.OnUpdateViewFold, id)
+		self.HookCommand(self.OnCmdViewFoldTopLevel, win32ui.ID_VIEW_FOLD_TOPLEVEL)
+
+		# Define the markers
+#		self.SCIMarkerDeleteAll()
+		self.SCIMarkerDefineAll(MARKER_BOOKMARK, SC_MARK_ROUNDRECT, win32api.RGB(0x0, 0x0, 0x0), win32api.RGB(0, 0xff, 0xff))
+
+		self.SCIMarkerDefine(MARKER_CURRENT, SC_MARK_ARROW)
+		self.SCIMarkerSetBack(MARKER_CURRENT, win32api.RGB(0xff, 0xff, 0x00))
+
+		# Define the folding markers
+		if 1: #traditional markers
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEROPEN, SC_MARK_MINUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDER, SC_MARK_PLUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERSUB, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERTAIL, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEREND, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEROPENMID, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERMIDTAIL, SC_MARK_EMPTY, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+		else: # curved markers
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEROPEN, SC_MARK_CIRCLEMINUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDER, SC_MARK_CIRCLEPLUS, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERSUB, SC_MARK_VLINE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERTAIL, SC_MARK_LCORNERCURVE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEREND, SC_MARK_CIRCLEPLUSCONNECTED, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDEROPENMID, SC_MARK_CIRCLEMINUSCONNECTED, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+			self.SCIMarkerDefineAll(SC_MARKNUM_FOLDERMIDTAIL, SC_MARK_TCORNERCURVE, win32api.RGB(0xff, 0xff, 0xff), win32api.RGB(0, 0, 0))
+		
+		self.SCIMarkerDefine(MARKER_BREAKPOINT, SC_MARK_CIRCLE)
+		# Marker background depends on debugger state
+		self.SCIMarkerSetFore(MARKER_BREAKPOINT, win32api.RGB(0x0, 0, 0))
+		# Get the current debugger state.
+		try:
+			import pywin.debugger
+			if pywin.debugger.currentDebugger is None:
+				state = DBGSTATE_NOT_DEBUGGING
+			else:
+				state = pywin.debugger.currentDebugger.debuggerState
+		except ImportError:
+			state = DBGSTATE_NOT_DEBUGGING
+		self.OnDebuggerStateChange(state)
+
+	def _GetSubConfigNames(self):
+		return ["editor"] # Allow [Keys:Editor] sections to be specific to us
+
+	def DoConfigChange(self):
+		SyntEditViewParent.DoConfigChange(self)
+		tabSize = GetEditorOption("Tab Size", 4, 2)
+		indentSize = GetEditorOption("Indent Size", 4, 2)
+		bUseTabs = GetEditorOption("Use Tabs", 0)
+		bSmartTabs = GetEditorOption("Smart Tabs", 1)
+		ext = self.idle.IDLEExtension("AutoIndent") # Required extension.
+
+		self.SCISetViewWS( GetEditorOption("View Whitespace", 0) )
+		self.SCISetViewEOL( GetEditorOption("View EOL", 0) )
+		self.SCISetIndentationGuides( GetEditorOption("View Indentation Guides", 0) )
+
+		if GetEditorOption("Right Edge Enabled", 0):
+			mode = EDGE_BACKGROUND
+		else:
+			mode = EDGE_NONE
+		self.SCISetEdgeMode(mode)
+		self.SCISetEdgeColumn( GetEditorOption("Right Edge Column", 75) )
+		self.SCISetEdgeColor( GetEditorOption("Right Edge Color", win32api.RGB(0xef, 0xef, 0xef)))
+
+		width = GetEditorOption("Marker Margin Width", 16)
+		self.SCISetMarginWidthN(1, width)
+		width = GetEditorOption("Folding Margin Width", 12)
+		self.SCISetMarginWidthN(2, width)
+		width = GetEditorOption("Line Number Margin Width", 0)
+		self.SCISetMarginWidthN(0, width)
+		self.bFolding = GetEditorOption("Enable Folding", 1)
+		fold_flags = 0
+		self.SendScintilla(SCI_SETMODEVENTMASK, SC_MOD_CHANGEFOLD);
+		if self.bFolding:
+			if GetEditorOption("Fold Lines", 1):
+				fold_flags = 16
+
+		self.SCISetProperty("fold", self.bFolding)
+		self.SCISetFoldFlags(fold_flags)
+
+		tt_color = GetEditorOption("Tab Timmy Color", win32api.RGB(0xff, 0, 0))
+		self.SendScintilla(SCI_INDICSETFORE, 1, tt_color)
+
+		tt_use = GetEditorOption("Use Tab Timmy", 1)
+		if tt_use:
+			self.SCISetProperty("tab.timmy.whinge.level", "1")
+
+		# Auto-indent has very complicated behaviour.  In a nutshell, the only
+		# way to get sensible behaviour from it is to ensure tabwidth != indentsize.
+		# Further, usetabs will only ever go from 1->0, never 0->1.
+		# This is _not_ the behaviour Pythonwin wants:
+		# * Tab width is arbitary, so should have no impact on smarts.
+		# * bUseTabs setting should reflect how new files are created, and
+		#   if Smart Tabs disabled, existing files are edited
+		# * If "Smart Tabs" is enabled, bUseTabs should have no bearing
+		#   for existing files (unless of course no context can be determined)
+		#
+		# So for smart tabs we configure the widget with completely dummy
+		# values (ensuring tabwidth != indentwidth), ask it to guess, then
+		# look at the values it has guessed, and re-configure
+		if bSmartTabs:
+			ext.config(usetabs=1, tabwidth=5, indentwidth=4)
+			ext.set_indentation_params(1)
+			if ext.indentwidth==5:
+				# Either 5 literal spaces, or a single tab character. Assume a tab
+				usetabs = 1
+				indentwidth = tabSize
+			else:
+				# Either Indented with spaces, and indent size has been guessed or
+				# an empty file (or no context found - tough!)
+				if self.GetTextLength()==0: # emtpy
+					usetabs = bUseTabs
+					indentwidth = indentSize
+				else: # guessed.
+					indentwidth = ext.indentwidth
+					usetabs = 0
+			# Tab size can never be guessed - set at user preference.
+			ext.config(usetabs=usetabs, indentwidth=indentwidth, tabwidth=tabSize)
+		else:
+			# Dont want smart-tabs - just set the options!
+			ext.config(usetabs=bUseTabs, tabwidth=tabSize, indentwidth=indentSize)
+		self.SCISetIndent(indentSize)
+		self.SCISetTabWidth(tabSize)
+
+	def OnDebuggerStateChange(self, state):
+		if state == DBGSTATE_NOT_DEBUGGING:
+			# Indicate breakpoints arent really usable.
+			# Not quite white - useful when no marker margin, so set as background color.
+			self.SCIMarkerSetBack(MARKER_BREAKPOINT, win32api.RGB(0xef, 0xef, 0xef))
+		else:
+			# A light-red, so still readable when no marker margin.
+			self.SCIMarkerSetBack(MARKER_BREAKPOINT, win32api.RGB(0xff, 0x80, 0x80))
+
+	def HookDocumentHandlers(self):
+		SyntEditViewParent.HookDocumentHandlers(self)
+		self.HookMessage(self.OnCheckExternalDocumentUpdated,MSG_CHECK_EXTERNAL_FILE)
+
+	def HookHandlers(self):
+		SyntEditViewParent.HookHandlers(self)
+		self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS)
+
+	def _PrepareUserStateChange(self):
+		return self.GetSel(), self.GetFirstVisibleLine()
+	def _EndUserStateChange(self, info):
+		scrollOff = info[1] - self.GetFirstVisibleLine()
+		if scrollOff:
+			self.LineScroll(scrollOff)
+		# Make sure we dont reset the cursor beyond the buffer.
+		max = self.GetTextLength()
+		newPos = min(info[0][0], max), min(info[0][1], max)
+		self.SetSel(newPos)
+
+	#######################################
+	# The Windows Message or Notify handlers.
+	#######################################
+	def OnMarginClick(self, std, extra):
+		notify = self.SCIUnpackNotifyMessage(extra)
+		if notify.margin==2: # Our fold margin
+			line_click = self.LineFromChar(notify.position)
+#			max_line = self.GetLineCount()
+			if self.SCIGetFoldLevel(line_click) & SC_FOLDLEVELHEADERFLAG:
+				# If a fold point.
+				self.SCIToggleFold(line_click)
+		return 1
+
+	def OnSetFocus(self,msg):
+		# Even though we use file change notifications, we should be very sure about it here.
+		self.OnCheckExternalDocumentUpdated(msg)
+		return 1
+
+	def OnCheckExternalDocumentUpdated(self, msg):
+		if self.bCheckingFile: return
+		self.bCheckingFile = 1
+		self.GetDocument().CheckExternalDocumentUpdated()
+		self.bCheckingFile = 0
+
+	def OnRClick(self,params):
+		menu = win32ui.CreatePopupMenu()
+		self.AppendMenu(menu, "&Locate module", "LocateModule")
+		self.AppendMenu(menu, flags=win32con.MF_SEPARATOR)
+		self.AppendMenu(menu, "&Undo", "EditUndo")
+		self.AppendMenu(menu, '&Redo', 'EditRedo')
+		self.AppendMenu(menu, flags=win32con.MF_SEPARATOR)
+		self.AppendMenu(menu, 'Cu&t', 'EditCut')
+		self.AppendMenu(menu, '&Copy', 'EditCopy')
+		self.AppendMenu(menu, '&Paste', 'EditPaste')
+		self.AppendMenu(menu, flags=win32con.MF_SEPARATOR)
+		self.AppendMenu(menu, '&Select all', 'EditSelectAll')
+		self.AppendMenu(menu, 'View &Whitespace', 'ViewWhitespace', checked=self.SCIGetViewWS())
+		self.AppendMenu(menu, "&Fixed Font", "ViewFixedFont", checked = self._GetColorizer().bUseFixed)
+		self.AppendMenu(menu, flags=win32con.MF_SEPARATOR)
+		self.AppendMenu(menu, "&Goto line...", "GotoLine")
+
+		submenu = win32ui.CreatePopupMenu()
+		newitems = self.idle.GetMenuItems("edit")
+		for text, event in newitems:
+			self.AppendMenu(submenu, text, event)
+
+		flags=win32con.MF_STRING|win32con.MF_ENABLED|win32con.MF_POPUP
+		menu.AppendMenu(flags, submenu.GetHandle(), "&Source code")
+
+		flags = win32con.TPM_LEFTALIGN|win32con.TPM_LEFTBUTTON|win32con.TPM_RIGHTBUTTON
+		menu.TrackPopupMenu(params[5], flags, self)
+		return 0
+	def OnCmdViewFold(self, cid, code): # Handle the menu command
+		if cid == win32ui.ID_VIEW_FOLD_EXPAND_ALL:
+			self.FoldExpandAllEvent(None)
+		elif cid == win32ui.ID_VIEW_FOLD_EXPAND:
+			self.FoldExpandEvent(None)
+		elif cid == win32ui.ID_VIEW_FOLD_COLLAPSE_ALL:
+			self.FoldCollapseAllEvent(None)
+		elif cid == win32ui.ID_VIEW_FOLD_COLLAPSE:
+			self.FoldCollapseEvent(None)
+		else:
+			print "Unknown collapse/expand ID"
+	def OnUpdateViewFold(self, cmdui): # Update the tick on the UI.
+		if not self.bFolding:
+			cmdui.Enable(0)
+			return
+		id = cmdui.m_nID
+		if id in [win32ui.ID_VIEW_FOLD_EXPAND_ALL, win32ui.ID_VIEW_FOLD_COLLAPSE_ALL]:
+			cmdui.Enable()
+		else:
+			enable = 0
+			lineno = self.LineFromChar(self.GetSel()[0])
+			foldable = self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG
+			is_expanded = self.SCIGetFoldExpanded(lineno)
+			if id == win32ui.ID_VIEW_FOLD_EXPAND:
+				if foldable and not is_expanded:
+					enable = 1
+			elif id == win32ui.ID_VIEW_FOLD_COLLAPSE:
+				if foldable and is_expanded:
+					enable = 1
+			cmdui.Enable(enable)
+
+	def OnCmdViewFoldTopLevel(self, cid, code): # Handle the menu command
+			self.FoldTopLevelEvent(None)
+
+	#######################################
+	# The Events
+	#######################################
+	def ToggleBookmarkEvent(self, event, pos = -1):
+		"""Toggle a bookmark at the specified or current position
+		"""
+		if pos==-1:
+			pos, end = self.GetSel()
+		startLine = self.LineFromChar(pos)
+		self.GetDocument().MarkerToggle(startLine+1, MARKER_BOOKMARK)
+		return 0
+
+	def GotoNextBookmarkEvent(self, event, fromPos=-1):
+		""" Move to the next bookmark
+		"""
+		if fromPos==-1:
+			fromPos, end = self.GetSel()
+		startLine = self.LineFromChar(fromPos)+1 # Zero based line to start
+		nextLine = self.GetDocument().MarkerGetNext(startLine+1, MARKER_BOOKMARK)-1
+		if nextLine<0:
+			nextLine = self.GetDocument().MarkerGetNext(0, MARKER_BOOKMARK)-1
+		if nextLine <0 or nextLine == startLine-1:
+			win32api.MessageBeep()
+		else:
+			self.SCIEnsureVisible(nextLine)
+			self.SCIGotoLine(nextLine)
+		return 0
+
+	def TabKeyEvent(self, event):
+		"""Insert an indent.  If no selection, a single indent, otherwise a block indent
+		"""
+		# Handle auto-complete first.
+		if self.SCIAutoCActive():
+			self.SCIAutoCComplete()
+			return 0
+		# Call the IDLE event.
+		return self.bindings.fire("<<smart-indent>>", event)
+
+	def EnterKeyEvent(self, event):
+		"""Handle the enter key with special handling for auto-complete
+		"""
+		# Handle auto-complete first.
+		if self.SCIAutoCActive():
+			self.SCIAutoCCancel()
+		# Call the IDLE event.
+		return self.bindings.fire("<<newline-and-indent>>", event)
+
+	def ShowInteractiveWindowEvent(self, event):
+		import pywin.framework.interact
+		pywin.framework.interact.ShowInteractiveWindow()
+
+	def FoldTopLevelEvent(self, event = None):
+		win32ui.DoWaitCursor(1)
+		try:
+			self.Colorize()
+			maxLine = self.GetLineCount()
+			# Find the first line, and check out its state.
+			for lineSeek in xrange(maxLine):
+				if self.SCIGetFoldLevel(lineSeek) & SC_FOLDLEVELHEADERFLAG:
+					expanding = not self.SCIGetFoldExpanded(lineSeek)
+					break
+			else:
+				# no folds here!
+				return
+			for lineSeek in xrange(lineSeek, maxLine):
+				level = self.SCIGetFoldLevel(lineSeek)
+				level_no = level & SC_FOLDLEVELNUMBERMASK - SC_FOLDLEVELBASE
+				is_header = level & SC_FOLDLEVELHEADERFLAG
+	#			print lineSeek, level_no, is_header
+				if level_no == 0 and is_header:
+					if (expanding and not self.SCIGetFoldExpanded(lineSeek)) or \
+					   (not expanding and self.SCIGetFoldExpanded(lineSeek)):
+						self.SCIToggleFold(lineSeek)
+		finally:
+			win32ui.DoWaitCursor(-1)
+
+	def FoldExpandEvent(self, event):
+		if not self.bFolding:
+			win32api.MessageBeep()
+			return
+		win32ui.DoWaitCursor(1)
+		lineno = self.LineFromChar(self.GetSel()[0])
+		if self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG and \
+				not self.SCIGetFoldExpanded(lineno):
+			self.SCIToggleFold(lineno)
+		win32ui.DoWaitCursor(-1)
+
+	def FoldExpandAllEvent(self, event):
+		if not self.bFolding:
+			win32api.MessageBeep()
+			return
+		win32ui.DoWaitCursor(1)
+		for lineno in xrange(0, self.GetLineCount()):
+			if self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG and \
+					not self.SCIGetFoldExpanded(lineno):
+				self.SCIToggleFold(lineno)
+		win32ui.DoWaitCursor(-1)
+
+	def FoldCollapseEvent(self, event):
+		if not self.bFolding:
+			win32api.MessageBeep()
+			return
+		win32ui.DoWaitCursor(1)
+		lineno = self.LineFromChar(self.GetSel()[0])
+		if self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG and \
+				self.SCIGetFoldExpanded(lineno):
+			self.SCIToggleFold(lineno)
+		win32ui.DoWaitCursor(-1)
+
+	def FoldCollapseAllEvent(self, event):
+		if not self.bFolding:
+			win32api.MessageBeep()
+			return
+		win32ui.DoWaitCursor(1)
+		self.Colorize()
+		for lineno in xrange(0, self.GetLineCount()):
+			if self.SCIGetFoldLevel(lineno) & SC_FOLDLEVELHEADERFLAG and \
+					self.SCIGetFoldExpanded(lineno):
+				self.SCIToggleFold(lineno)
+		win32ui.DoWaitCursor(-1)
+
+
+from pywin.framework.editor.frame import EditorFrame
+class SplitterFrame(EditorFrame):
+	def OnCreate(self, cs):
+		self.HookCommand(self.OnWindowSplit, win32ui.ID_WINDOW_SPLIT)
+		return 1
+	def OnWindowSplit(self, id, code):
+		self.GetDlgItem(win32ui.AFX_IDW_PANE_FIRST).DoKeyboardSplit()
+		return 1
+
+from pywin.framework.editor.template import EditorTemplateBase
+class SyntEditTemplate(EditorTemplateBase):
+	def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None):
+		if makeDoc is None: makeDoc = SyntEditDocument
+		if makeView is None: makeView = SyntEditView
+		if makeFrame is None: makeFrame = SplitterFrame
+		self.bSetMenus = 0
+		EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView)
+
+	def CheckIDLEMenus(self, idle):
+		if self.bSetMenus: return
+		self.bSetMenus = 1
+
+		submenu = win32ui.CreatePopupMenu()
+		newitems = idle.GetMenuItems("edit")
+		flags=win32con.MF_STRING|win32con.MF_ENABLED
+		for text, event in newitems:
+			id = bindings.event_to_commands.get(event)
+			if id is not None:
+				keyname = pywin.scintilla.view.configManager.get_key_binding( event, ["editor"] )
+				if keyname is not None:
+					text = text + "\t" + keyname
+				submenu.AppendMenu(flags, id, text)
+
+		mainMenu = self.GetSharedMenu()
+		editMenu = mainMenu.GetSubMenu(1)
+		editMenu.AppendMenu(win32con.MF_SEPARATOR, 0, "")
+		editMenu.AppendMenu(win32con.MF_STRING | win32con.MF_POPUP | win32con.MF_ENABLED, submenu.GetHandle(), "&Source Code")
+
+	def _CreateDocTemplate(self, resourceId):
+		return win32ui.CreateDocTemplate(resourceId)
+
+	def CreateWin32uiDocument(self):
+		return self.DoCreateDoc()
+
+	def GetPythonPropertyPages(self):
+		"""Returns a list of property pages
+		"""
+		from pywin.scintilla import configui
+		return EditorTemplateBase.GetPythonPropertyPages(self) + [configui.ScintillaFormatPropertyPage()]
+		
+# For debugging purposes, when this module may be reloaded many times.
+try:
+	win32ui.GetApp().RemoveDocTemplate(editorTemplate)
+except NameError:
+	pass
+
+editorTemplate = SyntEditTemplate()
+win32ui.GetApp().AddDocTemplate(editorTemplate)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/configui.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/configui.py
new file mode 100644
index 0000000..a266196
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/configui.py
@@ -0,0 +1,255 @@
+from pywin.mfc import dialog
+import document
+import win32ui
+import win32con
+import win32api
+
+from pywin.framework.editor import GetEditorOption, SetEditorOption, DeleteEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat, editorTemplate
+import pywin.scintilla.config
+
+# The standard 16 color VGA palette should always be possible    
+paletteVGA = ( ("Black",0,0,0), ("Navy",0,0,128), ("Green",0,128,0), ("Cyan",0,128,128), 
+	("Maroon",128,0,0), ("Purple",128,0,128), ("Olive",128,128,0), ("Gray",128,128,128), 
+	("Silver",192,192,192), ("Blue",0,0,255), ("Lime",0,255,0), ("Aqua",0,255,255), 
+	("Red",255,0,0), ("Fuchsia",255,0,255), ("Yellow",255,255,0), ("White",255,255,255) )
+
+######################################################
+#
+# Property Page for editor options
+#
+class EditorPropertyPage(dialog.PropertyPage):
+	def __init__(self):
+		dialog.PropertyPage.__init__(self, win32ui.IDD_PP_EDITOR)
+		self.autooptions = []
+		self._AddEditorOption(win32ui.IDC_AUTO_RELOAD, "i", "Auto Reload", 1)
+		self._AddEditorOption(win32ui.IDC_COMBO1, "i", "Backup Type", document.BAK_DOT_BAK_BAK_DIR)
+		self._AddEditorOption(win32ui.IDC_AUTOCOMPLETE, "i", "Autocomplete Attributes", 1)
+		self._AddEditorOption(win32ui.IDC_CALLTIPS, "i", "Show Call Tips", 1)
+		self._AddEditorOption(win32ui.IDC_MARGIN_LINENUMBER, "i", "Line Number Margin Width", 0)
+		self._AddEditorOption(win32ui.IDC_RADIO1, "i", "MarkersInMargin", None)
+		self._AddEditorOption(win32ui.IDC_MARGIN_MARKER, "i", "Marker Margin Width", None)
+		self["Marker Margin Width"] = GetEditorOption("Marker Margin Width", 16)
+
+		# Folding		
+		self._AddEditorOption(win32ui.IDC_MARGIN_FOLD, "i", "Fold Margin Width", 20)
+		self._AddEditorOption(win32ui.IDC_FOLD_ENABLE, "i", "Enable Folding", 1)
+		self._AddEditorOption(win32ui.IDC_FOLD_ON_OPEN, "i", "Fold On Open", 0)
+		self._AddEditorOption(win32ui.IDC_FOLD_SHOW_LINES, "i", "Fold Lines", 1)
+
+		# Right edge.
+		self._AddEditorOption(win32ui.IDC_RIGHTEDGE_ENABLE, "i", "Right Edge Enabled", 0)
+		self._AddEditorOption(win32ui.IDC_RIGHTEDGE_COLUMN, "i", "Right Edge Column", 75)
+
+		# Source control, etc		
+		self.AddDDX(win32ui.IDC_VSS_INTEGRATE, "bVSS")
+		self.AddDDX(win32ui.IDC_KEYBOARD_CONFIG, "Configs", "l")
+		self["Configs"] = pywin.scintilla.config.find_config_files()
+
+
+
+	def _AddEditorOption(self, idd, typ, optionName, defaultVal):
+		self.AddDDX(idd, optionName, typ)
+		# some options are "derived" - ie, can be implied from others
+		# (eg, "view markers in background" is implied from "markerMarginWidth==0"
+		# So we don't actually store these values, but they do still get DDX support.
+		if defaultVal is not None:
+			self[optionName] = GetEditorOption(optionName, defaultVal)
+			self.autooptions.append((optionName, defaultVal))
+
+	def OnInitDialog(self):
+		for name, val in self.autooptions:
+			self[name] = GetEditorOption(name, val)
+
+		# Note that these MUST be in the same order as the BAK constants.
+		cbo = self.GetDlgItem(win32ui.IDC_COMBO1)
+		cbo.AddString("None")
+		cbo.AddString(".BAK File")
+		cbo.AddString("TEMP dir")
+		cbo.AddString("Own dir")
+
+		# Source Safe
+		bVSS = GetEditorOption("Source Control Module", "") == "pywin.framework.editor.vss"
+		self['bVSS'] = bVSS
+
+		edit = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE)
+		edit.SetWindowText("Sample Color")
+
+		rc = dialog.PropertyPage.OnInitDialog(self)
+
+		try:
+			self.GetDlgItem(win32ui.IDC_KEYBOARD_CONFIG).SelectString(-1, GetEditorOption("Keyboard Config", "default"))
+		except win32ui.error:
+			import traceback
+			traceback.print_exc()
+			pass
+
+		self.HookCommand(self.OnButSimple, win32ui.IDC_FOLD_ENABLE)
+		self.HookCommand(self.OnButSimple, win32ui.IDC_RADIO1)
+		self.HookCommand(self.OnButSimple, win32ui.IDC_RADIO2)
+		self.HookCommand(self.OnButSimple, win32ui.IDC_RIGHTEDGE_ENABLE)
+		self.HookCommand(self.OnButEdgeColor, win32ui.IDC_RIGHTEDGE_DEFINE)
+
+		butMarginEnabled = self['Marker Margin Width'] > 0
+		self.GetDlgItem(win32ui.IDC_RADIO1).SetCheck(butMarginEnabled)
+		self.GetDlgItem(win32ui.IDC_RADIO2).SetCheck(not butMarginEnabled)
+
+		self.edgeColor = self.initialEdgeColor = GetEditorOption("Right Edge Color", win32api.RGB(0xef, 0xef, 0xef))
+
+		self.UpdateUIForState()
+
+		return rc
+
+	def OnButSimple(self, id, code):
+		if code == win32con.BN_CLICKED:
+			self.UpdateUIForState()
+
+	def OnButEdgeColor(self, id, code):
+		if code == win32con.BN_CLICKED:
+			d = win32ui.CreateColorDialog(self.edgeColor, 0, self)
+			# Ensure the current color is a custom color (as it may not be in the swatch)
+			# plus some other nice gray scales.
+			ccs = [self.edgeColor]
+			for c in range(0xef, 0x4f, -0x10):
+				ccs.append(win32api.RGB(c,c,c))
+			d.SetCustomColors( ccs )
+			if d.DoModal() == win32con.IDOK:
+				self.edgeColor = d.GetColor()
+				self.UpdateUIForState()
+				
+	def UpdateUIForState(self):
+		folding = self.GetDlgItem(win32ui.IDC_FOLD_ENABLE).GetCheck()
+		self.GetDlgItem(win32ui.IDC_FOLD_ON_OPEN).EnableWindow(folding)
+		self.GetDlgItem(win32ui.IDC_FOLD_SHOW_LINES).EnableWindow(folding)
+
+		widthEnabled = self.GetDlgItem(win32ui.IDC_RADIO1).GetCheck()
+		self.GetDlgItem(win32ui.IDC_MARGIN_MARKER).EnableWindow(widthEnabled)
+		self.UpdateData() # Ensure self[] is up to date with the control data.
+		if widthEnabled and self["Marker Margin Width"] == 0:
+			self["Marker Margin Width"] = 16
+			self.UpdateData(0) # Ensure control up to date with self[]
+
+		# Right edge
+		edgeEnabled = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_ENABLE).GetCheck()
+		self.GetDlgItem(win32ui.IDC_RIGHTEDGE_COLUMN).EnableWindow(edgeEnabled)
+		self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE).EnableWindow(edgeEnabled)
+		self.GetDlgItem(win32ui.IDC_RIGHTEDGE_DEFINE).EnableWindow(edgeEnabled)
+
+		edit = self.GetDlgItem(win32ui.IDC_RIGHTEDGE_SAMPLE)
+		edit.SetBackgroundColor(0, self.edgeColor)
+
+	def OnOK(self):
+		for name, defVal in self.autooptions:
+			SetEditorOption(name, self[name])
+		# Margin width gets handled differently.
+		if self['MarkersInMargin'] == 0:
+			SetEditorOption("Marker Margin Width", self["Marker Margin Width"])
+		else:
+			SetEditorOption("Marker Margin Width", 0)
+		if self.edgeColor != self.initialEdgeColor:
+			SetEditorOption("Right Edge Color", self.edgeColor)
+		if self['bVSS']:
+			SetEditorOption("Source Control Module", "pywin.framework.editor.vss")
+		else:
+			if GetEditorOption("Source Control Module", "")=='pywin.framework.editor.vss':
+				SetEditorOption("Source Control Module", "")
+		# Keyboard config
+		configname = self.GetDlgItem(win32ui.IDC_KEYBOARD_CONFIG).GetWindowText()
+		if configname:
+			if configname == "default":
+				DeleteEditorOption("Keyboard Config")
+			else:
+				SetEditorOption("Keyboard Config", configname)
+
+			import pywin.scintilla.view
+			pywin.scintilla.view.LoadConfiguration()
+
+		# Now tell all views we have changed.
+##		for doc in editorTemplate.GetDocumentList():
+##			for view in doc.GetAllViews():
+##				try:
+##					fn = view.OnConfigChange
+##				except AttributeError:
+##					continue
+##				fn()
+		return 1
+
+class EditorWhitespacePropertyPage(dialog.PropertyPage):
+	def __init__(self):
+		dialog.PropertyPage.__init__(self, win32ui.IDD_PP_TABS)
+		self.autooptions = []
+		self._AddEditorOption(win32ui.IDC_TAB_SIZE, "i", "Tab Size", 4)
+		self._AddEditorOption(win32ui.IDC_INDENT_SIZE, "i", "Indent Size", 4)
+		self._AddEditorOption(win32ui.IDC_USE_SMART_TABS, "i", "Smart Tabs", 1)
+		self._AddEditorOption(win32ui.IDC_VIEW_WHITESPACE, "i", "View Whitespace", 0)
+		self._AddEditorOption(win32ui.IDC_VIEW_EOL, "i", "View EOL", 0)
+		self._AddEditorOption(win32ui.IDC_VIEW_INDENTATIONGUIDES, "i", "View Indentation Guides", 0)
+
+	def _AddEditorOption(self, idd, typ, optionName, defaultVal):
+		self.AddDDX(idd, optionName, typ)
+		self[optionName] = GetEditorOption(optionName, defaultVal)
+		self.autooptions.append((optionName, defaultVal))
+
+	def OnInitDialog(self):
+		for name, val in self.autooptions:
+			self[name] = GetEditorOption(name, val)
+
+		rc = dialog.PropertyPage.OnInitDialog(self)
+
+		idc = win32ui.IDC_TABTIMMY_NONE
+		if GetEditorOption("Use Tab Timmy", 1):
+			idc = win32ui.IDC_TABTIMMY_IND
+		self.GetDlgItem(idc).SetCheck(1)
+
+		idc = win32ui.IDC_RADIO1
+		if GetEditorOption("Use Tabs", 0):
+			idc = win32ui.IDC_USE_TABS
+		self.GetDlgItem(idc).SetCheck(1)
+
+		tt_color = GetEditorOption("Tab Timmy Color", win32api.RGB(0xff, 0, 0))
+		self.cbo = self.GetDlgItem(win32ui.IDC_COMBO1)
+		for c in paletteVGA:
+			self.cbo.AddString(c[0])
+		sel = 0
+		for c in paletteVGA:
+			if tt_color == win32api.RGB(c[1], c[2], c[3]):
+				break
+			sel = sel + 1
+		else:
+			sel = -1
+		self.cbo.SetCurSel(sel)
+		self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_NONE)
+		self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_IND)
+		self.HookCommand(self.OnButSimple, win32ui.IDC_TABTIMMY_BG)
+		# Set ranges for the spinners.
+		for spinner_id in [win32ui.IDC_SPIN1, win32ui.IDC_SPIN2]:
+			spinner = self.GetDlgItem(spinner_id)
+			spinner.SetRange(1, 16)
+		return rc
+
+	def OnButSimple(self, id, code):
+		if code == win32con.BN_CLICKED:
+			self.UpdateUIForState()
+
+	def UpdateUIForState(self):
+		timmy = self.GetDlgItem(win32ui.IDC_TABTIMMY_NONE).GetCheck()
+		self.GetDlgItem(win32ui.IDC_COMBO1).EnableWindow(not timmy)
+
+	def OnOK(self):
+		for name, defVal in self.autooptions:
+			SetEditorOption(name, self[name])
+
+		SetEditorOption("Use Tabs", self.GetDlgItem(win32ui.IDC_USE_TABS).GetCheck())
+
+		SetEditorOption("Use Tab Timmy", self.GetDlgItem(win32ui.IDC_TABTIMMY_IND).GetCheck())
+		c = paletteVGA[self.cbo.GetCurSel()]
+		SetEditorOption("Tab Timmy Color", win32api.RGB(c[1], c[2], c[3]))
+
+		return 1
+
+def testpp():
+	ps = dialog.PropertySheet("Editor Options")
+	ps.AddPage(EditorWhitespacePropertyPage())
+	ps.DoModal()
+
+if __name__=='__main__':
+    testpp()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/document.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/document.py
new file mode 100644
index 0000000..579ea57
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/document.py
@@ -0,0 +1,310 @@
+# We no longer support the old, non-colour editor!
+
+from pywin.mfc import docview, object
+from pywin.framework.editor import GetEditorOption
+import win32ui
+import os
+import win32con
+import string
+import traceback
+import win32api
+
+BAK_NONE=0
+BAK_DOT_BAK=1
+BAK_DOT_BAK_TEMP_DIR=2
+BAK_DOT_BAK_BAK_DIR=3
+
+MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in editor.py and coloreditor.py
+
+import pywin.scintilla.document
+ParentEditorDocument=pywin.scintilla.document.CScintillaDocument
+class EditorDocumentBase(ParentEditorDocument):
+	def __init__(self, template):
+		self.bAutoReload = GetEditorOption("Auto Reload", 1)
+		self.bDeclinedReload = 0 # Has the user declined to reload.
+		self.fileStat = None
+		self.bReportedFileNotFound = 0
+
+		# what sort of bak file should I create.
+		# default to write to %temp%/bak/filename.ext
+		self.bakFileType=GetEditorOption("Backup Type", BAK_DOT_BAK_BAK_DIR)
+
+		self.watcherThread = FileWatchingThread(self)
+		self.watcherThread.CreateThread()
+		# Should I try and use VSS integration?
+		self.scModuleName=GetEditorOption("Source Control Module", "")
+		self.scModule = None # Loaded when first used.
+		# Skip the direct parent
+		object.CmdTarget.__init__(self, template.CreateWin32uiDocument())
+
+	def OnCloseDocument(self ):
+		self.watcherThread.SignalStop()
+		return self._obj_.OnCloseDocument()
+
+#	def OnOpenDocument(self, name):
+#		rc = ParentEditorDocument.OnOpenDocument(self, name)
+#		self.GetFirstView()._SetLoadedText(self.text)
+#		self._DocumentStateChanged()
+#		return rc
+
+	def OnSaveDocument( self, fileName ):
+		win32ui.SetStatusText("Saving file...",1)
+		# rename to bak if required.
+		dir, basename = os.path.split(fileName)
+		if self.bakFileType==BAK_DOT_BAK:
+			bakFileName=dir+'\\'+os.path.splitext(basename)[0]+'.bak'
+		elif self.bakFileType==BAK_DOT_BAK_TEMP_DIR:
+			bakFileName=win32api.GetTempPath()+'\\'+os.path.splitext(basename)[0]+'.bak'
+		elif self.bakFileType==BAK_DOT_BAK_BAK_DIR:
+			tempPath=os.path.join(win32api.GetTempPath(),'bak')
+			try:
+				os.mkdir(tempPath,0)
+			except os.error:
+				pass
+			bakFileName=os.path.join(tempPath,basename)
+		try:
+			os.unlink(bakFileName)	# raise NameError if no bakups wanted.
+		except (os.error, NameError):
+			pass
+		try:
+			os.rename(fileName, bakFileName)
+		except (os.error, NameError):
+			pass
+		try:
+			self.SaveFile(fileName)
+		except IOError, details:
+			win32ui.MessageBox("Error - could not save file\r\n\r\n%s"%details)
+			return 0
+		self.SetModifiedFlag(0) # No longer dirty
+		self.bDeclinedReload = 0 # They probably want to know if it changes again!
+		win32ui.AddToRecentFileList(fileName)
+		self.SetPathName(fileName)
+		win32ui.SetStatusText("Ready")
+		self._DocumentStateChanged()
+		return 1
+
+	def FinalizeViewCreation(self, view):
+		ParentEditorDocument.FinalizeViewCreation(self, view)
+		if view == self.GetFirstView():
+			self._DocumentStateChanged()
+			if view.bFolding and GetEditorOption("Fold On Open", 0):
+				view.FoldTopLevelEvent()
+
+	def HookViewNotifications(self, view):
+		ParentEditorDocument.HookViewNotifications(self, view)
+
+	# Support for reloading the document from disk - presumably after some
+	# external application has modified it (or possibly source control has
+	# checked it out.
+	def ReloadDocument(self):
+		"""Reloads the document from disk.  Assumes the file has
+		been saved and user has been asked if necessary - it just does it!
+		"""
+		win32ui.SetStatusText("Reloading document.  Please wait...", 1)
+		self.SetModifiedFlag(0)
+		# Loop over all views, saving their state, then reload the document
+		views = self.GetAllViews()
+		states = []
+		for view in views:
+			try:
+				info = view._PrepareUserStateChange()
+			except AttributeError: # Not our editor view?
+				info = None
+			states.append(info)
+		self.OnOpenDocument(self.GetPathName())
+		for view, info in map(None, views, states):
+			if info is not None:
+				view._EndUserStateChange(info)
+		self._DocumentStateChanged()
+		win32ui.SetStatusText("Document reloaded.")
+
+	# Reloading the file
+	def CheckExternalDocumentUpdated(self):
+		if self.bDeclinedReload or not self.GetPathName():
+			return
+		try:
+			newstat = os.stat(self.GetPathName())
+		except os.error, (code, msg):
+			if not self.bReportedFileNotFound:
+				print "The file '%s' is open for editing, but\nchecking it for changes caused the error: %s" % (self.GetPathName(), msg)
+				self.bReportedFileNotFound = 1
+			return
+		if self.bReportedFileNotFound:
+			print "The file '%s' has re-appeared - continuing to watch for changes..." % (self.GetPathName(),)
+			self.bReportedFileNotFound = 0 # Once found again we want to start complaining.
+		changed = (self.fileStat is None) or \
+			self.fileStat[0] != newstat[0] or \
+			self.fileStat[6] != newstat[6] or \
+			self.fileStat[8] != newstat[8] or \
+			self.fileStat[9] != newstat[9]
+		if changed:
+			question = None
+			if self.IsModified():
+				question = "%s\r\n\r\nThis file has been modified outside of the source editor.\r\nDo you want to reload it and LOSE THE CHANGES in the source editor?" % self.GetPathName()
+				mbStyle = win32con.MB_YESNO | win32con.MB_DEFBUTTON2 # Default to "No"
+			else:
+				if not self.bAutoReload:
+					question = "%s\r\n\r\nThis file has been modified outside of the source editor.\r\nDo you want to reload it?" % self.GetPathName()
+					mbStyle = win32con.MB_YESNO # Default to "Yes"
+			if question:
+				rc = win32ui.MessageBox(question, None, mbStyle)
+				if rc!=win32con.IDYES:
+					self.bDeclinedReload = 1
+					return
+			self.ReloadDocument()
+
+	def _DocumentStateChanged(self):
+		"""Called whenever the documents state (on disk etc) has been changed
+		by the editor (eg, as the result of a save operation)
+		"""
+		if self.GetPathName():
+			try:
+				self.fileStat = os.stat(self.GetPathName())
+			except os.error:
+				self.fileStat = None
+		else:
+			self.fileStat = None
+		self.watcherThread._DocumentStateChanged()
+		self._UpdateUIForState()
+		self._ApplyOptionalToViews("_UpdateUIForState")
+		self._ApplyOptionalToViews("SetReadOnly", self._IsReadOnly())
+		self._ApplyOptionalToViews("SCISetSavePoint")
+		# Allow the debugger to reset us too.
+		import pywin.debugger
+		if pywin.debugger.currentDebugger is not None:
+			pywin.debugger.currentDebugger.UpdateDocumentLineStates(self)
+			
+	# Read-only document support - make it obvious to the user
+	# that the file is read-only.
+	def _IsReadOnly(self):
+		return self.fileStat is not None and (self.fileStat[0] & 128)==0
+
+	def _UpdateUIForState(self):
+		"""Change the title to reflect the state of the document - 
+		eg ReadOnly, Dirty, etc
+		"""
+		filename = self.GetPathName()
+		if not filename: return # New file - nothing to do
+		try:
+			# This seems necessary so the internal state of the window becomes
+			# "visible".  without it, it is still shown, but certain functions
+			# (such as updating the title) dont immediately work?
+			self.GetFirstView().ShowWindow(win32con.SW_SHOW)
+			title = win32ui.GetFileTitle(filename)
+		except win32ui.error:
+			title = filename
+		if self._IsReadOnly():
+			title = title + " (read-only)"
+		self.SetTitle(title)
+
+	def MakeDocumentWritable(self):
+		pretend_ss = 0 # Set to 1 to test this without source safe :-)
+		if not self.scModuleName and not pretend_ss: # No Source Control support.
+			win32ui.SetStatusText("Document is read-only, and no source-control system is configured")
+			win32api.MessageBeep()
+			return 0
+
+		# We have source control support - check if the user wants to use it.
+		msg = "Would you like to check this file out?"
+		defButton = win32con.MB_YESNO
+		if self.IsModified(): 
+			msg = msg + "\r\n\r\nALL CHANGES IN THE EDITOR WILL BE LOST"
+			defButton = win32con.MB_YESNO
+		if win32ui.MessageBox(msg, None, defButton)!=win32con.IDYES:
+			return 0
+
+		if pretend_ss:
+			print "We are only pretending to check it out!"
+			win32api.SetFileAttributes(self.GetPathName(), win32con.FILE_ATTRIBUTE_NORMAL)
+			self.ReloadDocument()
+			return 1
+			
+		# Now call on the module to do it.
+		if self.scModule is None:
+			try:
+				self.scModule = __import__(self.scModuleName)
+				for part in string.split(self.scModuleName,'.')[1:]:
+					self.scModule = getattr(self.scModule, part)
+			except:
+				traceback.print_exc()
+				print "Error loading source control module."
+				return 0
+		
+		if self.scModule.CheckoutFile(self.GetPathName()):
+			self.ReloadDocument()
+			return 1
+		return 0
+
+	def CheckMakeDocumentWritable(self):
+		if self._IsReadOnly():
+			return self.MakeDocumentWritable()
+		return 1
+
+	def SaveModified(self):
+		# Called as the document is closed.  If we are about
+		# to prompt for a save, bring the document to the foreground.
+		if self.IsModified():
+			frame = self.GetFirstView().GetParentFrame()
+			try:
+				frame.MDIActivate()
+				frame.AutoRestore()
+			except:
+				print "Could not bring document to foreground"
+		return self._obj_.SaveModified()
+
+# NOTE - I DONT use the standard threading module,
+# as this waits for all threads to terminate at shutdown.
+# When using the debugger, it is possible shutdown will
+# occur without Pythonwin getting a complete shutdown,
+# so we deadlock at the end - threading is waiting for
+import pywin.mfc.thread
+import win32event
+class FileWatchingThread(pywin.mfc.thread.WinThread):
+	def __init__(self, doc):
+		self.doc = doc
+		self.adminEvent = win32event.CreateEvent(None, 0, 0, None)
+		self.stopEvent = win32event.CreateEvent(None, 0, 0, None)
+		self.watchEvent = None
+		pywin.mfc.thread.WinThread.__init__(self)
+
+	def _DocumentStateChanged(self):
+		win32event.SetEvent(self.adminEvent)
+
+	def RefreshEvent(self):
+		self.hwnd = self.doc.GetFirstView().GetSafeHwnd()
+		if self.watchEvent is not None:
+			win32api.FindCloseChangeNotification(self.watchEvent)
+			self.watchEvent = None
+		path = self.doc.GetPathName()
+		if path: path = os.path.dirname(path)
+		if path:
+			filter = win32con.FILE_NOTIFY_CHANGE_FILE_NAME | \
+					 win32con.FILE_NOTIFY_CHANGE_ATTRIBUTES | \
+					 win32con.FILE_NOTIFY_CHANGE_LAST_WRITE
+			try:
+				self.watchEvent = win32api.FindFirstChangeNotification(path, 0, filter)
+			except win32api.error, (rc, fn, msg):
+				print "Can not watch file", path, "for changes -", msg
+	def SignalStop(self):
+		win32event.SetEvent(self.stopEvent)
+	def Run(self):
+		while 1:
+			handles = [self.stopEvent, self.adminEvent]
+			if self.watchEvent is not None:
+				handles.append(self.watchEvent)
+			rc = win32event.WaitForMultipleObjects(handles, 0, win32event.INFINITE)
+			if rc == win32event.WAIT_OBJECT_0:
+				break
+			elif rc == win32event.WAIT_OBJECT_0+1:
+				self.RefreshEvent()
+			else:
+				win32api.PostMessage(self.hwnd, MSG_CHECK_EXTERNAL_FILE, 0, 0)
+				try:
+					# If the directory has been removed underneath us, we get this error.
+					win32api.FindNextChangeNotification(self.watchEvent)
+				except win32api.error, (rc, fn, msg):
+					print "Can not watch file", self.doc.GetPathName(), "for changes -", msg
+					break
+
+		# close a circular reference
+		self.doc = None
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/editor.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/editor.py
new file mode 100644
index 0000000..df98549
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/editor.py
@@ -0,0 +1,465 @@
+#####################################################################
+#
+# editor.py
+#
+# A general purpose text editor, built on top of the win32ui edit
+# type, which is built on an MFC CEditView
+#
+#
+# We now support reloading of externally modified documented
+# (eg, presumably by some other process, such as source control or
+# another editor.
+# We also suport auto-loading of externally modified files.
+# - if the current document has not been modified in this
+# editor, but has been modified on disk, then the file
+# can be automatically reloaded.
+#
+# Note that it will _always_ prompt you if the file in the editor has been modified.
+
+
+import win32ui
+import win32api
+import win32con
+import regex
+import re
+import string
+import sys, os
+import traceback
+from pywin.mfc import docview, dialog, afxres
+
+from pywin.framework.editor import GetEditorOption, SetEditorOption, GetEditorFontOption, SetEditorFontOption, defaultCharacterFormat
+
+patImport=regex.symcomp('import \(<name>.*\)')
+patIndent=regex.compile('^\\([ \t]*[~ \t]\\)')
+
+ID_LOCATE_FILE = 0xe200
+ID_GOTO_LINE = 0xe2001
+MSG_CHECK_EXTERNAL_FILE = win32con.WM_USER+1999 ## WARNING: Duplicated in document.py and coloreditor.py
+
+# Key Codes that modify the bufffer when Ctrl or Alt are NOT pressed.
+MODIFYING_VK_KEYS = [win32con.VK_BACK, win32con.VK_TAB, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
+for k in range(48, 91):
+	MODIFYING_VK_KEYS.append(k)
+
+# Key Codes that modify the bufffer when Ctrl is pressed.
+MODIFYING_VK_KEYS_CTRL = [win32con.VK_BACK, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
+
+# Key Codes that modify the bufffer when Alt is pressed.
+MODIFYING_VK_KEYS_ALT = [win32con.VK_BACK, win32con.VK_RETURN, win32con.VK_SPACE, win32con.VK_DELETE]
+
+
+# The editor itself starts here.
+# Using the MFC Document/View model, we have an EditorDocument, which is responsible for
+# managing the contents of the file, and a view which is responsible for rendering it.
+#
+# Due to a limitation in the Windows edit controls, we are limited to one view
+# per document, although nothing in this code assumes this (I hope!)
+
+isRichText=1 # We are using the Rich Text control.  This has not been tested with value "0" for quite some time!
+
+#ParentEditorDocument=docview.Document
+from document import EditorDocumentBase
+ParentEditorDocument=EditorDocumentBase
+class EditorDocument(ParentEditorDocument):
+	#
+	# File loading and saving operations
+	#
+	def OnOpenDocument(self, filename):
+		#
+		# handle Unix and PC text file format.
+		#
+
+		# Get the "long name" of the file name, as it may have been translated
+		# to short names by the shell.
+		self.SetPathName(filename) # Must set this early!
+		# Now do the work!
+		self.BeginWaitCursor()
+		win32ui.SetStatusText("Loading file...",1)
+		try:
+			f = open(filename,"rb")
+		except IOError:
+			win32ui.MessageBox(filename + '\nCan not find this file\nPlease verify that the correct path and file name are given')
+			self.EndWaitCursor()
+			return 0
+		raw=f.read()
+		f.close()
+		contents = self.TranslateLoadedData(raw)
+		rc = 0
+		if win32ui.IsWin32s() and len(contents)>62000: # give or take a few bytes
+			win32ui.MessageBox("This file is too big for Python on Windows 3.1\r\nPlease use another editor to view this file.")
+		else:
+			try:
+				self.GetFirstView().SetWindowText(contents)
+				rc = 1
+			except TypeError: # Null byte in file.
+				win32ui.MessageBox("This file contains NULL bytes, and can not be edited")
+				rc = 0
+				
+			self.EndWaitCursor()
+			self.SetModifiedFlag(0) # No longer dirty
+			self._DocumentStateChanged()
+		return rc
+
+	def TranslateLoadedData(self, data):
+		"""Given raw data read from a file, massage it suitable for the edit window"""
+		# if a CR in the first 250 chars, then perform the expensive translate
+		if string.find(data[:250],'\r')==-1:
+			win32ui.SetStatusText("Translating from Unix file format - please wait...",1)
+			return re.sub('\r*\n','\r\n',data)
+		else:
+			return data
+		
+	def SaveFile(self, fileName):
+		if isRichText:
+			view = self.GetFirstView()
+			view.SaveTextFile(fileName)
+		else: # Old style edit view window.
+			self.GetFirstView().SaveFile(fileName)
+		try:
+			# Make sure line cache has updated info about me!
+			import linecache
+			linecache.checkcache()
+		except:
+			pass
+
+	#
+	# Color state stuff
+	#
+	def SetAllLineColors(self, color = None):
+		for view in self.GetAllViews():
+			view.SetAllLineColors(color)
+
+	def SetLineColor(self, lineNo, color):
+		"Color a line of all views"
+		for view in self.GetAllViews():
+			view.SetLineColor(lineNo, color)
+
+		
+#	def StreamTextOut(self, data): ### This seems unreliable???
+#		self.saveFileHandle.write(data)
+#		return 1 # keep em coming!
+
+#ParentEditorView=docview.EditView
+ParentEditorView=docview.RichEditView
+class EditorView(ParentEditorView):
+	def __init__(self, doc):
+		ParentEditorView.__init__(self, doc)
+		if isRichText:
+			self.SetWordWrap(win32ui.CRichEditView_WrapNone)
+
+		self.addToMRU = 1
+		self.HookHandlers()
+		self.bCheckingFile = 0
+		
+		self.defCharFormat = GetEditorFontOption("Default Font", defaultCharacterFormat)
+
+		# Smart tabs override everything else if context can be worked out.
+		self.bSmartTabs = GetEditorOption("Smart Tabs", 1)
+
+		self.tabSize = GetEditorOption("Tab Size", 8)
+		self.indentSize = GetEditorOption("Indent Size", 8)
+		# If next indent is at a tab position, and useTabs is set, a tab will be inserted.
+		self.bUseTabs = GetEditorOption("Use Tabs", 1)
+
+	def OnInitialUpdate(self):
+		rc = self._obj_.OnInitialUpdate()
+		self.SetDefaultCharFormat(self.defCharFormat)
+		return rc
+
+	def CutCurLine(self):
+		curLine = self._obj_.LineFromChar()
+		nextLine = curLine+1
+		start = self._obj_.LineIndex(curLine)
+		end = self._obj_.LineIndex(nextLine)
+		if end==0:	# must be last line.
+			end = start + self.end.GetLineLength(curLine)
+		self._obj_.SetSel(start,end)
+		self._obj_.Cut()
+	def _PrepareUserStateChange(self):
+		"Return selection, lineindex, etc info, so it can be restored"
+		self.SetRedraw(0)
+		return self.GetModify(), self.GetSel(), self.GetFirstVisibleLine()
+	def _EndUserStateChange(self, info):
+		scrollOff = info[2] - self.GetFirstVisibleLine()
+		if scrollOff:
+			self.LineScroll(scrollOff)
+		self.SetSel(info[1])
+		self.SetModify(info[0])
+		self.SetRedraw(1)
+		self.InvalidateRect()
+		self.UpdateWindow()
+
+	def _UpdateUIForState(self):
+		self.SetReadOnly(self.GetDocument()._IsReadOnly())
+	
+	def SetAllLineColors(self, color = None):
+		if isRichText:
+			info = self._PrepareUserStateChange()
+			try:
+				if color is None: color = self.defCharFormat[4]
+				self.SetSel(0,-1)
+				self.SetSelectionCharFormat((win32con.CFM_COLOR, 0,0,0,color))
+			finally:
+				self._EndUserStateChange(info)
+
+	def SetLineColor(self, lineNo, color):
+		"lineNo is the 1 based line number to set.  If color is None, default color is used."
+		if isRichText:
+			info = self._PrepareUserStateChange()
+			try:
+				if color is None: color = self.defCharFormat[4]
+				lineNo = lineNo-1
+				startIndex = self.LineIndex(lineNo)
+				if startIndex!=-1:
+					self.SetSel(startIndex, self.LineIndex(lineNo+1))
+					self.SetSelectionCharFormat((win32con.CFM_COLOR, 0,0,0,color))
+			finally:
+				self._EndUserStateChange(info)
+				
+	def Indent(self):
+		"""Insert an indent to move the cursor to the next tab position.
+		
+		Honors the tab size and 'use tabs' settings.  Assumes the cursor is already at the
+		position to be indented, and the selection is a single character (ie, not a block)
+		"""
+		start, end = self._obj_.GetSel()
+		startLine = self._obj_.LineFromChar(start)
+		line = self._obj_.GetLine(startLine)
+		realCol = start - self._obj_.LineIndex(startLine)
+		# Calulate the next tab stop.
+		# Expand existing tabs.
+		curCol = 0
+		for ch in line[:realCol]:
+			if ch=='\t':
+				curCol = ((curCol / self.tabSize) + 1) * self.tabSize
+			else:
+				curCol = curCol + 1
+		nextColumn = ((curCol / self.indentSize) + 1) * self.indentSize
+#		print "curCol is", curCol, "nextColumn is", nextColumn
+		ins = None
+		if self.bSmartTabs:
+			# Look for some context.
+			if realCol==0: # Start of the line - see if the line above can tell us
+				lookLine = startLine-1
+				while lookLine >= 0:
+					check = self._obj_.GetLine(lookLine)[0:1]
+					if check in ['\t', ' ']:
+						ins = check
+						break
+					lookLine = lookLine - 1
+			else: # See if the previous char can tell us
+				check = line[realCol-1]
+				if check in ['\t', ' ']:
+					ins = check
+
+		# Either smart tabs off, or not smart enough!
+		# Use the "old style" settings.
+		if ins is None: 			
+			if self.bUseTabs and nextColumn % self.tabSize==0:
+				ins = '\t'
+			else:
+				ins = ' '
+				
+		if ins == ' ': 
+			# Calc the number of spaces to take us to the next stop
+			ins = ins * (nextColumn - curCol)
+
+		self._obj_.ReplaceSel(ins)
+		
+
+	def BlockDent(self, isIndent, startLine, endLine):
+		" Indent/Undent all lines specified "
+		if not self.GetDocument().CheckMakeDocumentWritable(): return 0
+		tabSize=self.tabSize	# hard-code for now!
+		info = self._PrepareUserStateChange()
+		try:
+			for lineNo in range(startLine, endLine):
+				pos=self._obj_.LineIndex(lineNo)
+				self._obj_.SetSel(pos, pos)
+				if isIndent:
+					self.Indent()
+				else:
+					line = self._obj_.GetLine(lineNo)
+					try:
+						noToDel = 0
+						if line[0]=='\t':
+							noToDel = 1
+						elif line[0]==' ':
+							for noToDel in range(0,tabSize):
+								if line[noToDel]!=' ':
+									break
+							else:
+								noToDel=tabSize
+						if noToDel:
+							self._obj_.SetSel(pos, pos+noToDel)
+							self._obj_.Clear()
+					except IndexError:
+						pass
+		finally:
+			self._EndUserStateChange(info)
+		self.GetDocument().SetModifiedFlag(1) # Now dirty
+		self._obj_.SetSel(self.LineIndex(startLine), self.LineIndex(endLine))
+	
+	def GotoLine(self, lineNo = None):
+		try:
+			if lineNo is None:
+				lineNo = string.atoi(raw_input("Enter Line Number"))
+		except (ValueError, KeyboardInterrupt):
+			return 0
+		self.GetLineCount() # Seems to be needed when file first opened???
+		charNo = self.LineIndex(lineNo-1)
+		self.SetSel(charNo)
+
+	def HookHandlers(self):	# children can override, but should still call me!
+#		self.HookAllKeyStrokes(self.OnKey)
+		self.HookMessage(self.OnCheckExternalDocumentUpdated,MSG_CHECK_EXTERNAL_FILE)
+		self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN)
+		self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS)
+		self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
+		self.HookKeyStroke(self.OnKeyCtrlY, 25)	# ^Y
+		self.HookKeyStroke(self.OnKeyCtrlG, 7)	# ^G
+		self.HookKeyStroke(self.OnKeyTab, 9)	# TAB
+		self.HookKeyStroke(self.OnKeyEnter, 13) # Enter
+		self.HookCommand(self.OnCmdLocateFile, ID_LOCATE_FILE)
+		self.HookCommand(self.OnCmdGotoLine, ID_GOTO_LINE)
+		self.HookCommand(self.OnEditPaste, afxres.ID_EDIT_PASTE)
+		self.HookCommand(self.OnEditCut, afxres.ID_EDIT_CUT)
+
+	# Hook Handlers
+	def OnSetFocus(self,msg):
+		# Even though we use file change notifications, we should be very sure about it here.
+		self.OnCheckExternalDocumentUpdated(msg)
+
+	def OnRClick(self,params):
+		menu = win32ui.CreatePopupMenu()
+		
+		# look for a module name
+		line=string.strip(self._obj_.GetLine())
+		flags=win32con.MF_STRING|win32con.MF_ENABLED
+		if patImport.match(line)==len(line):
+			menu.AppendMenu(flags, ID_LOCATE_FILE, "&Locate %s.py"%patImport.group('name'))
+			menu.AppendMenu(win32con.MF_SEPARATOR);
+		menu.AppendMenu(flags, win32ui.ID_EDIT_UNDO, '&Undo')
+		menu.AppendMenu(win32con.MF_SEPARATOR);
+		menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, 'Cu&t')
+		menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, '&Copy')
+		menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, '&Paste')
+		menu.AppendMenu(flags, win32con.MF_SEPARATOR);
+		menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all')
+		menu.AppendMenu(flags, win32con.MF_SEPARATOR);
+		menu.AppendMenu(flags, ID_GOTO_LINE, '&Goto line...')
+		menu.TrackPopupMenu(params[5])
+		return 0
+
+	def OnCmdGotoLine(self, cmd, code):
+		self.GotoLine()
+		return 0
+
+	def OnCmdLocateFile(self, cmd, code):
+		modName = patImport.group('name')
+		if not modName:
+			return 0
+		import pywin.framework.scriptutils
+		fileName = pywin.framework.scriptutils.LocatePythonFile(modName)
+		if fileName is None:
+			win32ui.SetStatusText("Can't locate module %s" % modName)
+		else:
+			win32ui.GetApp().OpenDocumentFile(fileName)
+		return 0	
+
+	# Key handlers
+	def OnKeyEnter(self, key):
+		if not self.GetDocument().CheckMakeDocumentWritable(): return 0
+		curLine = self._obj_.GetLine()
+		self._obj_.ReplaceSel('\r\n')	# insert the newline
+		# If the current line indicates the next should be indented,
+		# then copy the current indentation to this line.
+		res = patIndent.match(curLine,0)
+		if res>0 and string.strip(curLine):
+			curIndent = patIndent.group(1)
+			self._obj_.ReplaceSel(curIndent)
+		return 0	# dont pass on
+
+	def OnKeyCtrlY(self, key):
+		if not self.GetDocument().CheckMakeDocumentWritable(): return 0
+		self.CutCurLine()
+		return 0	# dont let him have it!
+	def OnKeyCtrlG(self, key):
+		self.GotoLine()
+		return 0	# dont let him have it!
+	def OnKeyTab(self, key):
+		if not self.GetDocument().CheckMakeDocumentWritable(): return 0
+		start, end = self._obj_.GetSel()
+		if start==end:	# normal TAB key
+			self.Indent()
+			return 0 # we handled this.
+
+		# Otherwise it is a block indent/dedent.
+		if start>end:
+			start, end = end, start	# swap them.
+		startLine = self._obj_.LineFromChar(start)
+		endLine = self._obj_.LineFromChar(end)
+
+		self.BlockDent(win32api.GetKeyState(win32con.VK_SHIFT)>=0, startLine, endLine)
+		return 0
+
+	
+	def OnEditPaste(self, id, code):
+		# Return 1 if we can make the file editable.(or it already is!)
+		return self.GetDocument().CheckMakeDocumentWritable()
+
+	def OnEditCut(self, id, code):
+		# Return 1 if we can make the file editable.(or it already is!)
+		return self.GetDocument().CheckMakeDocumentWritable()
+
+	def OnKeyDown(self, msg):
+		key = msg[2]
+		if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000:
+			modList = MODIFYING_VK_KEYS_CTRL
+		elif win32api.GetKeyState(win32con.VK_MENU) & 0x8000:
+			modList = MODIFYING_VK_KEYS_ALT
+		else:
+			modList = MODIFYING_VK_KEYS
+
+		if key in modList:
+			# Return 1 if we can make the file editable.(or it already is!)
+			return self.GetDocument().CheckMakeDocumentWritable()
+		return 1 # Pass it on OK
+
+#	def OnKey(self, key):
+#		return self.GetDocument().CheckMakeDocumentWritable()
+
+	def OnCheckExternalDocumentUpdated(self, msg):
+		if self._obj_ is None or self.bCheckingFile: return
+		self.bCheckingFile = 1
+		self.GetDocument().CheckExternalDocumentUpdated()
+		self.bCheckingFile = 0
+
+from template import EditorTemplateBase
+class EditorTemplate(EditorTemplateBase):
+	def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None):
+		if makeDoc is None: makeDoc = EditorDocument
+		if makeView is None: makeView = EditorView
+		EditorTemplateBase.__init__(self, res, makeDoc, makeFrame, makeView)
+
+	def _CreateDocTemplate(self, resourceId):
+		return win32ui.CreateRichEditDocTemplate(resourceId)
+
+	def CreateWin32uiDocument(self):
+		return self.DoCreateRichEditDoc()
+	
+def Create(fileName = None, title=None, template = None):
+	return editorTemplate.OpenDocumentFile(fileName)
+
+from pywin.framework.editor import GetDefaultEditorModuleName
+prefModule = GetDefaultEditorModuleName()
+# Initialize only if this is the "default" editor.
+if __name__==prefModule:
+	# For debugging purposes, when this module may be reloaded many times.
+	try:
+		win32ui.GetApp().RemoveDocTemplate(editorTemplate)
+	except (NameError, win32ui.error):
+		pass
+
+	editorTemplate = EditorTemplate()
+	win32ui.GetApp().AddDocTemplate(editorTemplate)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/frame.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/frame.py
new file mode 100644
index 0000000..4313374
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/frame.py
@@ -0,0 +1,66 @@
+# frame.py - The MDI frame window for an editor.
+import pywin.framework.window
+import win32ui
+import win32con
+import afxres
+
+import ModuleBrowser
+
+class EditorFrame(pywin.framework.window.MDIChildWnd):
+    def OnCreateClient(self, cp, context):
+
+        # Create the default view as specified by the template (ie, the editor view)
+        view = context.template.MakeView(context.doc)
+        # Create the browser view.
+        browserView = ModuleBrowser.BrowserView(context.doc)
+        view2 = context.template.MakeView(context.doc)
+
+        splitter = win32ui.CreateSplitter()
+        style = win32con.WS_CHILD | win32con.WS_VISIBLE
+        splitter.CreateStatic (self, 1, 2, style, win32ui.AFX_IDW_PANE_FIRST)
+        sub_splitter = self.sub_splitter = win32ui.CreateSplitter()
+        sub_splitter.CreateStatic (splitter, 2, 1, style, win32ui.AFX_IDW_PANE_FIRST+1)
+
+        # Note we must add the default view first, so that doc.GetFirstView() returns the editor view.
+        sub_splitter.CreateView(view, 1, 0, (0,0)) 
+        splitter.CreateView (browserView, 0, 0, (0,0))
+        sub_splitter.CreateView(view2,0, 0, (0,0)) 
+
+##        print "First view is", context.doc.GetFirstView()
+##        print "Views are", view, view2, browserView
+##        print "Parents are", view.GetParent(), view2.GetParent(), browserView.GetParent()
+##        print "Splitter is", splitter
+##        print "sub splitter is", sub_splitter
+        ## Old 
+##        splitter.CreateStatic (self, 1, 2)
+##        splitter.CreateView(view, 0, 1, (0,0)) # size ignored.
+##        splitter.CreateView (browserView, 0, 0, (0, 0))
+
+        # Restrict the size of the browser splitter (and we can avoid filling
+        # it until it is shown)
+        splitter.SetColumnInfo(0, 10, 20)
+        # And the active view is our default view (so it gets initial focus)
+        self.SetActiveView(view)
+
+    def GetEditorView(self):
+        # In a multi-view (eg, splitter) environment, get
+        # an editor (ie, scintilla) view
+        # Look for the splitter opened the most!
+        if self.sub_splitter is None:
+            return self.GetDlgItem(win32ui.AFX_IDW_PANE_FIRST)
+        v1 = self.sub_splitter.GetPane(0,0)
+        v2 = self.sub_splitter.GetPane(1,0)
+        r1 = v1.GetWindowRect()
+        r2 = v2.GetWindowRect()
+        if r1[3]-r1[1] > r2[3]-r2[1]:
+            return v1
+        return v2
+
+    def GetBrowserView(self):
+        # XXX - should fix this :-)
+        return self.GetActiveDocument().GetAllViews()[1]
+    def OnClose(self):
+        # Must force the module browser to close itself here (OnDestroy for the view itself is too late!)
+        self.sub_splitter = None # ensure no circles!
+        self.GetBrowserView().DestroyBrowser()
+        return self._obj_.OnClose()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/template.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/template.py
new file mode 100644
index 0000000..42ebacf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/template.py
@@ -0,0 +1,50 @@
+import string
+import win32ui
+import win32api
+from pywin.mfc import docview
+import pywin.framework.window
+import os
+import frame
+
+ParentEditorTemplate=docview.DocTemplate
+class EditorTemplateBase(ParentEditorTemplate):
+	def __init__(self, res=win32ui.IDR_TEXTTYPE, makeDoc=None, makeFrame=None, makeView=None):
+		if makeFrame is None: makeFrame = frame.EditorFrame
+		ParentEditorTemplate.__init__(self, res, makeDoc, makeFrame, makeView)
+
+	def _CreateDocTemplate(self, resourceId):
+		assert 0, "You must override this"
+	def CreateWin32uiDocument(self):
+		assert 0, "You must override this"
+	def GetFileExtensions(self):
+		return ".txt", ".py"
+	def MatchDocType(self, fileName, fileType):
+		doc = self.FindOpenDocument(fileName)
+		if doc: return doc
+		ext = string.lower(os.path.splitext(fileName)[1])
+		if ext in self.GetFileExtensions():
+			return win32ui.CDocTemplate_Confidence_yesAttemptNative
+		return win32ui.CDocTemplate_Confidence_maybeAttemptForeign
+
+	def InitialUpdateFrame(self, frame, doc, makeVisible=1):
+		self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler.
+		doc._UpdateUIForState()
+		
+	def GetPythonPropertyPages(self):
+		"""Returns a list of property pages
+		"""
+		import configui
+		return [configui.EditorPropertyPage(), configui.EditorWhitespacePropertyPage()]
+
+	def OpenDocumentFile(self, filename, bMakeVisible = 1):
+		if filename is not None:
+			try:
+				path = os.path.split(filename)[0]
+#				print "The editor is translating", `filename`,"to",
+				filename = win32api.FindFiles(filename)[0][8]
+				filename = os.path.join(path, filename)
+#				print `filename`
+			except (win32api.error, IndexError), details:
+				pass
+#				print "Couldnt get the full filename!", details
+		return self._obj_.OpenDocumentFile(filename, bMakeVisible)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/vss.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/vss.py
new file mode 100644
index 0000000..28a895e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/editor/vss.py
@@ -0,0 +1,95 @@
+# vss.py -- Source Control using Microsoft VSS.
+
+# Provides routines for checking files out of VSS.
+#
+# Uses an INI file very similar to how VB integrates with VSS - even
+# as far as using the same name.
+
+# The file must be named "Mssccprj.scc", and be in the format of
+# an INI file.  This file may be in a parent directory, in which
+# case the project name will be built from what is specified in the
+# ini file, plus the path from the INI file to the file itself.
+#
+# The INI file should have a [Python] section, and a
+# Project=Project Name
+#  and optionally
+# Database=??
+
+
+import win32ui, win32api, win32con, os, string, sys
+
+import traceback
+
+g_iniName = "Mssccprj.scc" # Use the same INI name as VB!
+
+g_sourceSafe = None
+
+def FindVssProjectInfo(fullfname):
+	"""Looks up the file system for an INI file describing the project.
+	
+	Looking up the tree is for ni style packages.
+	
+	Returns (projectName, pathToFileName) where pathToFileName contains
+	the path from the ini file to the actual file.
+	"""
+	path, fnameonly = os.path.split(fullfname)
+	origPath = path
+	project = ""
+	retPaths = [fnameonly]
+	while not project:
+		iniName = os.path.join(path, g_iniName)
+		database = win32api.GetProfileVal("Python","Database", "", iniName)
+		project = win32api.GetProfileVal("Python","Project", "", iniName)
+		if project:
+			break;
+		# No valid INI file in this directory - look up a level.
+		path, addpath = os.path.split(path)
+		if not addpath: # Root?
+			break
+		retPaths.insert(0, addpath)
+	if not project:
+		win32ui.MessageBox("%s\r\n\r\nThis directory is not configured for Python/VSS" % origPath)
+		return
+	return project, string.join(retPaths, "/"), database
+		
+	
+def CheckoutFile(fileName):
+	global g_sourceSafe
+	import pythoncom
+	ok = 0
+	# Assumes the fileName has a complete path,
+	# and that the INI file can be found in that path
+	# (or a parent path if a ni style package)
+	try:
+		import win32com.client, win32com.client.gencache
+		mod = win32com.client.gencache.EnsureModule('{783CD4E0-9D54-11CF-B8EE-00608CC9A71F}', 0, 5, 0)
+		if mod is None:
+			win32ui.MessageBox("VSS does not appear to be installed.  The TypeInfo can not be created")
+			return ok
+
+		rc = FindVssProjectInfo(fileName)
+		if rc is None:
+			return
+		project, vssFname, database = rc
+		if g_sourceSafe is None:
+			g_sourceSafe=win32com.client.Dispatch("SourceSafe")
+			# SS seems a bit wierd.  It defaults the arguments as empty strings, but
+			# then complains when they are used - so we pass "Missing"
+			if not database:
+				database = pythoncom.Missing
+			g_sourceSafe.Open(database, pythoncom.Missing, pythoncom.Missing)
+		item = g_sourceSafe.VSSItem("$/%s/%s" % (project, vssFname))
+		item.Checkout(None, fileName)
+		ok = 1
+	except pythoncom.com_error, (hr, msg, exc, arg):
+		if exc:
+			msg = exc[2]
+		win32ui.MessageBox(msg, "Error checking out file")
+	except:
+		typ, val, tb = sys.exc_info()
+		traceback.print_exc()
+		win32ui.MessageBox("%s - %s" % (str(typ), str(val)),"Error checking out file")
+		tb = None # Cleanup a cycle
+	return ok
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/help.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/help.py
new file mode 100644
index 0000000..7daf271c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/help.py
@@ -0,0 +1,147 @@
+ # help.py - help utilities for PythonWin.
+import win32api
+import win32con
+import win32ui
+import string
+import sys
+import regutil
+import string, os
+
+htmlhelp_handle = None
+
+html_help_command_translators = {
+	win32con.HELP_CONTENTS : 1, # HH_DISPLAY_TOC
+	win32con.HELP_CONTEXT : 15, # HH_HELP_CONTEXT
+	win32con.HELP_FINDER : 1, # HH_DISPLAY_TOC
+}
+
+def FinalizeHelp():
+	global htmlhelp_handle
+	if htmlhelp_handle is not None:
+			import win32help
+			try:
+				#frame = win32ui.GetMainFrame().GetSafeHwnd()
+				frame = 0
+				win32help.HtmlHelp(frame, None, win32help.HH_UNINITIALIZE, htmlhelp_handle)
+			except win32help.error:
+				print "Failed to finalize htmlhelp!"
+			htmlhelp_handle = None
+			
+def OpenHelpFile(fileName, helpCmd = None, helpArg = None):
+	"Open a help file, given a full path"
+	# default help arg.
+	win32ui.DoWaitCursor(1)
+	try:
+		if helpCmd is None: helpCmd = win32con.HELP_CONTENTS
+		ext = string.lower(os.path.splitext(fileName)[1])
+		if ext == ".hlp":
+			win32api.WinHelp( win32ui.GetMainFrame().GetSafeHwnd(), fileName, helpCmd, helpArg)
+		# XXX - using the htmlhelp API wreaks havoc with keyboard shortcuts
+		# so we disable it, forcing ShellExecute, which works fine (but
+		# doesn't close the help file when Pythonwin is closed.
+		# Tom Heller also points out http://www.microsoft.com/mind/0499/faq/faq0499.asp,
+		# which may or may not be related.
+		elif 0 and ext == ".chm":
+			import win32help
+			global htmlhelp_handle
+			helpCmd = html_help_command_translators.get(helpCmd, helpCmd)
+			#frame = win32ui.GetMainFrame().GetSafeHwnd()
+			frame = 0 # Dont want it overlapping ours!
+			if htmlhelp_handle is None:
+				htmlhelp_hwnd, htmlhelp_handle = win32help.HtmlHelp(frame, None, win32help.HH_INITIALIZE)
+			win32help.HtmlHelp(frame, fileName, helpCmd, helpArg)
+		else:
+			# Hope that the extension is registered, and we know what to do!
+			win32api.ShellExecute(0, "open", fileName, None, "", win32con.SW_SHOW)
+		return fileName
+	finally:
+		win32ui.DoWaitCursor(-1)
+
+def ListAllHelpFiles():
+	ret = []
+	ret = _ListAllHelpFilesInRoot(win32con.HKEY_LOCAL_MACHINE)
+	# Ensure we don't get dups.
+	for item in _ListAllHelpFilesInRoot(win32con.HKEY_CURRENT_USER):
+		if item not in ret:
+			ret.append(item)
+	return ret
+
+def _ListAllHelpFilesInRoot(root):
+	"""Returns a list of (helpDesc, helpFname) for all registered help files
+	"""
+	import regutil
+	retList = []
+	try:
+		key = win32api.RegOpenKey(root, regutil.BuildDefaultPythonKey() + "\\Help", 0, win32con.KEY_READ)
+	except win32api.error, (code, fn, details):
+		import winerror
+		if code!=winerror.ERROR_FILE_NOT_FOUND:
+			raise win32api.error, (code, fn, details)
+		return retList
+	try:
+		keyNo = 0
+		while 1:
+			try:
+				helpDesc = win32api.RegEnumKey(key, keyNo)
+				helpFile = win32api.RegQueryValue(key, helpDesc)
+				retList.append((helpDesc, helpFile))
+				keyNo = keyNo + 1
+			except win32api.error, (code, fn, desc):
+				import winerror
+				if code!=winerror.ERROR_NO_MORE_ITEMS:
+					raise win32api.error, (code, fn, desc)
+				break
+	finally:
+		win32api.RegCloseKey(key)
+	return retList
+
+def SelectAndRunHelpFile():
+	from pywin.dialogs import list
+	helpFiles = ListAllHelpFiles()
+	index = list.SelectFromLists("Select Help file", helpFiles, ["Title"])
+	if index is not None:
+		OpenHelpFile(helpFiles[index][1])
+
+
+helpIDMap = None
+
+def SetHelpMenuOtherHelp(mainMenu):
+	"""Modifies the main Help Menu to handle all registered help files.
+	   mainMenu -- The main menu to modify - usually from docTemplate.GetSharedMenu()
+	"""
+
+	# Load all help files from the registry.
+	global helpIDMap
+	if helpIDMap is None:
+		helpIDMap = {}
+		cmdID = win32ui.ID_HELP_OTHER
+		excludeList = ['Main Python Documentation', 'Pythonwin Reference']
+		firstList = ListAllHelpFiles()
+		# We actually want to not only exclude these entries, but
+		# their help file names (as many entries may share the same name)
+		excludeFnames = []
+		for desc, fname in firstList:
+			if desc in excludeList:
+				excludeFnames.append(fname)
+
+		helpDescs = []
+		for desc, fname in firstList:
+			if fname not in excludeFnames:
+				helpIDMap[cmdID] = (desc, fname)
+				win32ui.GetMainFrame().HookCommand(HandleHelpOtherCommand, cmdID)
+				cmdID = cmdID + 1
+
+	helpMenu = mainMenu.GetSubMenu(mainMenu.GetMenuItemCount()-1) # Help menu always last.
+	otherHelpMenuPos = 2 # cant search for ID, as sub-menu has no ID.
+	otherMenu = helpMenu.GetSubMenu(otherHelpMenuPos)
+	while otherMenu.GetMenuItemCount():
+		otherMenu.DeleteMenu(0, win32con.MF_BYPOSITION)
+	
+	if helpIDMap:
+		for id, (desc, fname) in helpIDMap.items():
+			otherMenu.AppendMenu(win32con.MF_ENABLED|win32con.MF_STRING,id, desc)
+	else:
+		helpMenu.EnableMenuItem(otherHelpMenuPos, win32con.MF_BYPOSITION | win32con.MF_GRAYED)
+		
+def HandleHelpOtherCommand(cmd, code):
+	OpenHelpFile(helpIDMap[cmd][1])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/interact.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/interact.py
new file mode 100644
index 0000000..8ee7d52
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/interact.py
@@ -0,0 +1,807 @@
+
+##################################################################
+##
+## Interactive Shell Window
+##
+
+import sys
+import code
+import string
+
+import win32ui
+import win32api
+import win32con
+import traceback
+import afxres
+import array
+import __main__
+
+import pywin.scintilla.formatter
+import pywin.scintilla.control
+import pywin.scintilla.IDLEenvironment
+import pywin.framework.app
+
+trace=pywin.scintilla.formatter.trace
+
+import winout
+
+import re
+# from IDLE.
+_is_block_opener = re.compile(r":\s*(#.*)?$").search
+_is_block_closer = re.compile(r"""
+    \s*
+    ( return
+    | break
+    | continue
+    | raise
+    | pass
+    )
+    \b
+""", re.VERBOSE).match
+
+tracebackHeader = "Traceback ("
+
+sectionProfile = "Interactive Window"
+valueFormatTitle = "FormatTitle"
+valueFormatInput = "FormatInput"
+valueFormatOutput = "FormatOutput"
+valueFormatOutputError = "FormatOutputError"
+
+# These are defaults only.  Values are read from the registry.
+formatTitle = (-536870897, 0, 220, 0, 16711680, 184, 34, 'Arial')
+formatInput =  (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New')
+formatOutput =  (-402653169, 0, 200, 0, 8421376, 0, 49, 'Courier New')
+formatOutputError = (-402653169, 0, 200, 0, 255, 0, 49, 'Courier New')
+
+# couple of exceptions defined for this module
+excNoValidCommand = 'No Valid Command'
+excEmptyCommand = 'Empty Command'
+excContinueCommand = 'Continue Command'
+
+try:
+	sys.ps1
+except AttributeError:
+	sys.ps1 = '>>> '
+	sys.ps2 = '... '
+
+def LoadPreference(preference, default = ""):
+	return win32ui.GetProfileVal(sectionProfile, preference, default)
+
+def SavePreference( prefName, prefValue ):
+	win32ui.WriteProfileVal( sectionProfile, prefName, prefValue )
+
+def GetPromptPrefix(line):
+	ps1=sys.ps1
+	if line[:len(ps1)]==ps1: return ps1
+	ps2=sys.ps2
+	if line[:len(ps2)]==ps2: return ps2
+
+#############################################################
+#
+# Colorizer related code.
+#
+#############################################################
+STYLE_INTERACTIVE_EOL = "Interactive EOL"
+STYLE_INTERACTIVE_OUTPUT = "Interactive Output"
+STYLE_INTERACTIVE_PROMPT = "Interactive Prompt"
+STYLE_INTERACTIVE_BANNER = "Interactive Banner"
+STYLE_INTERACTIVE_ERROR = "Interactive Error"
+STYLE_INTERACTIVE_ERROR_FINALLINE = "Interactive Error (final line)"
+
+INTERACTIVE_STYLES = [STYLE_INTERACTIVE_EOL, STYLE_INTERACTIVE_OUTPUT, STYLE_INTERACTIVE_PROMPT, STYLE_INTERACTIVE_BANNER, STYLE_INTERACTIVE_ERROR, STYLE_INTERACTIVE_ERROR_FINALLINE]
+
+FormatterParent = pywin.scintilla.formatter.PythonSourceFormatter
+class InteractiveFormatter(FormatterParent):
+	def __init__(self, scintilla):
+		FormatterParent.__init__(self, scintilla)
+
+	def SetStyles(self):
+		FormatterParent.SetStyles(self)
+		Style = pywin.scintilla.formatter.Style
+		self.RegisterStyle( Style(STYLE_INTERACTIVE_EOL, STYLE_INTERACTIVE_PROMPT ) )
+		self.RegisterStyle( Style(STYLE_INTERACTIVE_PROMPT, formatInput ) )
+		self.RegisterStyle( Style(STYLE_INTERACTIVE_OUTPUT, formatOutput) )
+		self.RegisterStyle( Style(STYLE_INTERACTIVE_BANNER, formatTitle ) )
+		self.RegisterStyle( Style(STYLE_INTERACTIVE_ERROR, formatOutputError ) )
+		self.RegisterStyle( Style(STYLE_INTERACTIVE_ERROR_FINALLINE, STYLE_INTERACTIVE_ERROR ) )
+
+	def LoadPreference(self, name, default):
+		rc = win32ui.GetProfileVal("Format", name, default)
+		if rc==default:
+			rc = win32ui.GetProfileVal(sectionProfile, name, default)
+		return rc
+
+	def ColorizeInteractiveCode(self, cdoc, styleStart, stylePyStart):
+		lengthDoc = len(cdoc)
+		if lengthDoc == 0: return
+		state = styleStart
+		chNext = cdoc[0]
+		startSeg = 0
+		i = 0
+		lastState=state # debug only
+		while i < lengthDoc:
+			ch = chNext
+			chNext = cdoc[i+1:i+2]
+			
+#			trace("ch=%r, i=%d, next=%r, state=%s" % (ch, i, chNext, state))
+			if state == STYLE_INTERACTIVE_EOL:
+				if ch not in '\r\n':
+					self.ColorSeg(startSeg, i-1, state)
+					startSeg = i
+					if ch in [sys.ps1[0], sys.ps2[0]]:
+						state = STYLE_INTERACTIVE_PROMPT
+					elif cdoc[i:i+len(tracebackHeader)]==tracebackHeader:
+						state = STYLE_INTERACTIVE_ERROR
+					else:
+						state = STYLE_INTERACTIVE_OUTPUT
+			elif state == STYLE_INTERACTIVE_PROMPT:
+				if ch not in sys.ps1 + sys.ps2 + " ":
+					self.ColorSeg(startSeg, i-1, state)
+					startSeg = i
+					if ch in '\r\n':
+						state = STYLE_INTERACTIVE_EOL
+					else:
+						state = stylePyStart # Start coloring Python code.
+			elif state in [STYLE_INTERACTIVE_OUTPUT]:
+				if ch in '\r\n':
+					self.ColorSeg(startSeg, i-1, state)
+					startSeg = i
+					state = STYLE_INTERACTIVE_EOL
+			elif state == STYLE_INTERACTIVE_ERROR:
+				if ch in '\r\n' and chNext and chNext not in string.whitespace:
+					# Everything including me
+					self.ColorSeg(startSeg, i, state)
+					startSeg = i+1
+					state = STYLE_INTERACTIVE_ERROR_FINALLINE
+				elif i == 0 and ch not in string.whitespace:
+					# If we are coloring from the start of a line,
+					# we need this better check for the last line
+					# Color up to not including me
+					self.ColorSeg(startSeg, i-1, state)
+					startSeg = i
+					state = STYLE_INTERACTIVE_ERROR_FINALLINE
+			elif state == STYLE_INTERACTIVE_ERROR_FINALLINE:
+				if ch in '\r\n':
+					self.ColorSeg(startSeg, i-1, state)
+					startSeg = i
+					state = STYLE_INTERACTIVE_EOL
+			elif state == STYLE_INTERACTIVE_BANNER:
+				if ch in '\r\n' and (chNext=='' or chNext in ">["):
+					# Everything including me
+					self.ColorSeg(startSeg, i-1, state)
+					startSeg = i
+					state = STYLE_INTERACTIVE_EOL
+			else:
+				# It is a PythonColorizer state - seek past the end of the line
+				# and ask the Python colorizer to color that.
+				end = startSeg
+				while end < lengthDoc and cdoc[end] not in '\r\n':
+					end = end + 1
+				self.ColorizePythonCode( cdoc[:end], startSeg, state)
+				stylePyStart = self.GetStringStyle(end-1)
+				if stylePyStart is None:
+					stylePyStart = pywin.scintilla.formatter.STYLE_DEFAULT
+				else:
+					stylePyStart = stylePyStart.name
+				startSeg =end
+				i = end - 1 # ready for increment.
+				chNext = cdoc[end:end+1]
+				state = STYLE_INTERACTIVE_EOL
+			if lastState != state:
+				lastState = state
+			i = i + 1
+		# and the rest
+		if startSeg<i:
+			self.ColorSeg(startSeg, i-1, state)
+
+	def Colorize(self, start=0, end=-1):
+		stringVal = self.scintilla.GetTextRange(start, end)
+		styleStart = None
+		stylePyStart = None
+		if start > 1:
+			# Likely we are being asked to color from the start of the line.
+			# We find the last formatted character on the previous line.
+			# If TQString, we continue it.  Otherwise, we reset.
+			look = start -1
+			while look and self.scintilla.SCIGetCharAt(look) in '\n\r':
+				look = look - 1
+			if look and look < start-1: # Did we find a char before the \n\r sets?
+				strstyle = self.GetStringStyle(look)
+				quote_char = None
+				if strstyle is not None:
+					if strstyle.name == pywin.scintilla.formatter.STYLE_TQSSTRING:
+						quote_char = "'"
+					elif strstyle.name == pywin.scintilla.formatter.STYLE_TQDSTRING:
+						quote_char = '"'
+					if quote_char is not None:
+						# It is a TQS.  If the TQS is not terminated, we
+						# carry the style through.
+						if look > 2:
+							look_str = self.scintilla.SCIGetCharAt(look-2) + self.scintilla.SCIGetCharAt(look-1) + self.scintilla.SCIGetCharAt(look)
+							if look_str != quote_char * 3:
+								stylePyStart = strstyle.name
+		if stylePyStart is None: stylePyStart =  pywin.scintilla.formatter.STYLE_DEFAULT
+
+		if start > 0:
+			stylenum = self.scintilla.SCIGetStyleAt(start - 1)
+			styleStart = self.GetStyleByNum(stylenum).name
+		else:
+			styleStart = STYLE_INTERACTIVE_BANNER
+		self.scintilla.SCIStartStyling(start, 31)
+		self.style_buffer = array.array("c", chr(0)*len(stringVal))
+		self.ColorizeInteractiveCode(stringVal, styleStart, stylePyStart)
+		self.scintilla.SCISetStylingEx(self.style_buffer)
+		self.style_buffer = None
+
+###############################################################
+#
+# This class handles the Python interactive interpreter.
+#
+# It uses a basic EditWindow, and does all the magic.
+# This is triggered by the enter key hander attached by the
+# start-up code.  It determines if a command is to be executed
+# or continued (ie, emit "... ") by snooping around the current
+# line, looking for the prompts
+#
+class PythonwinInteractiveInterpreter(code.InteractiveInterpreter):
+	def __init__(self, locals = None, globals = None):
+		if locals is None: locals = __main__.__dict__
+		if globals is None: globals = locals
+		self.globals = globals
+		code.InteractiveInterpreter.__init__(self, locals)
+	def showsyntaxerror(self, filename=None):
+		sys.stderr.write(tracebackHeader) # So the color syntaxer recognises it.
+		code.InteractiveInterpreter.showsyntaxerror(self, filename)
+	def runcode(self, code):
+		try:
+			exec code in self.globals, self.locals
+		except SystemExit:
+			raise
+		except:
+			self.showtraceback()
+
+class InteractiveCore:
+	def __init__(self, banner = None):
+		self.banner = banner
+#		LoadFontPreferences()
+	def Init(self):
+		self.oldStdOut = self.oldStdErr = None
+
+#		self.SetWordWrap(win32ui.CRichEditView_WrapNone)
+		self.interp = PythonwinInteractiveInterpreter()
+
+		self.OutputGrab()	# Release at cleanup.
+
+		if self.GetTextLength()==0:
+			if self.banner is None:
+				suffix = ""
+				if win32ui.debug: suffix = ", debug build"
+				sys.stderr.write("PythonWin %s on %s%s.\n" % (sys.version, sys.platform, suffix) )
+				sys.stderr.write("Portions %s - see 'Help/About PythonWin' for further copyright information.\n" % (win32ui.copyright,) )
+			else:
+				sys.stderr.write(banner)
+		self.AppendToPrompt([])
+
+	def SetContext(self, globals, locals, name = "Dbg"):
+		oldPrompt = sys.ps1
+		if globals is None:
+			# Reset
+			sys.ps1 = ">>> "
+			sys.ps2 = "... "
+			locals = globals = __main__.__dict__
+		else:
+			sys.ps1 = "[%s]>>> " % name
+			sys.ps2 = "[%s]... " % name
+		self.interp.locals = locals
+		self.interp.globals = globals
+		self.AppendToPrompt([], oldPrompt)
+
+	def GetContext(self):
+		return self.interp.globals, self.interp.locals
+
+	def DoGetLine(self, line=-1):
+		if line==-1: line = self.LineFromChar()
+		line = self.GetLine(line)
+		if pywin.is_platform_unicode:
+			try:
+				line = unicode(line, pywin.default_scintilla_encoding).encode(pywin.default_platform_encoding)
+			except:
+				# We should fix the underlying problem rather than always masking errors
+				# so make it complain.
+				print "Unicode error converting", repr(line)
+				line = unicode(line, pywin.default_scintilla_encoding, "ignore").encode(pywin.default_platform_encoding)
+
+		while line and line[-1] in ['\r', '\n']:
+			line = line[:-1]
+		return line
+	def AppendToPrompt(self,bufLines, oldPrompt = None):
+		" Take a command and stick it at the end of the buffer (with python prompts inserted if required)."
+		self.flush()
+		lastLineNo = self.GetLineCount()-1
+		line = self.DoGetLine(lastLineNo)
+		if oldPrompt and line==oldPrompt:
+			self.SetSel(self.GetTextLength()-len(oldPrompt), self.GetTextLength())
+			self.ReplaceSel(sys.ps1)
+		elif (line!=str(sys.ps1)):
+			if len(line)!=0: self.write('\n')
+			self.write(sys.ps1)
+		self.flush()
+		self.idle.text.mark_set("iomark", "end-1c")
+		if not bufLines:
+			return
+		terms = (["\n" + sys.ps2] * (len(bufLines)-1)) + ['']
+		for bufLine, term in map(None, bufLines, terms):
+			if string.strip(bufLine):
+				self.write( bufLine + term )
+		self.flush()
+
+	def EnsureNoPrompt(self):
+		# Get ready to write some text NOT at a Python prompt.
+		self.flush()
+		lastLineNo = self.GetLineCount()-1
+		line = self.DoGetLine(lastLineNo)
+		if not line or line in [sys.ps1, sys.ps2]:
+			self.SetSel(self.GetTextLength()-len(line), self.GetTextLength())
+			self.ReplaceSel('')
+		else:
+			# Just add a new line.
+			self.write('\n')
+		
+	def _GetSubConfigNames(self):
+		return ["interactive"] # Allow [Keys:Interactive] sections to be specific
+
+	def HookHandlers(self):
+		# Hook menu command (executed when a menu item with that ID is selected from a menu/toolbar
+		self.HookCommand(self.OnSelectBlock, win32ui.ID_EDIT_SELECT_BLOCK)
+		mod = pywin.scintilla.IDLEenvironment.GetIDLEModule("IdleHistory")
+		if mod is not None:
+			self.history = mod.History(self.idle.text, "\n" + sys.ps2)
+		else:
+			self.history = None
+		# hack for now for event handling.
+
+	# GetBlockBoundary takes a line number, and will return the
+	# start and and line numbers of the block, and a flag indicating if the
+	# block is a Python code block.
+	# If the line specified has a Python prompt, then the lines are parsed
+	# backwards and forwards, and the flag is true.
+	# If the line does not start with a prompt, the block is searched forward
+	# and backward until a prompt _is_ found, and all lines in between without
+	# prompts are returned, and the flag is false.
+	def GetBlockBoundary( self, lineNo ):
+		line = self.DoGetLine(lineNo)
+		maxLineNo = self.GetLineCount()-1
+		prefix = GetPromptPrefix(line)
+		if prefix is None:        # Non code block
+			flag = 0
+			startLineNo = lineNo
+			while startLineNo>0:
+				if GetPromptPrefix(self.DoGetLine(startLineNo-1)) is not None:
+					break                     # there _is_ a prompt
+				startLineNo = startLineNo-1
+			endLineNo = lineNo
+			while endLineNo<maxLineNo:
+				if GetPromptPrefix(self.DoGetLine(endLineNo+1)) is not None:
+					break                     # there _is_ a prompt
+				endLineNo = endLineNo+1
+		else:                                    # Code block
+			flag = 1
+			startLineNo = lineNo
+			while startLineNo>0 and prefix!=str(sys.ps1):
+				prefix = GetPromptPrefix(self.DoGetLine(startLineNo-1))
+				if prefix is None:
+					break;	# there is no prompt.
+				startLineNo = startLineNo - 1
+			endLineNo = lineNo
+			while endLineNo<maxLineNo:
+				prefix = GetPromptPrefix(self.DoGetLine(endLineNo+1))
+				if prefix is None:
+					break	# there is no prompt
+				if prefix==str(sys.ps1):
+					break	# this is another command
+				endLineNo = endLineNo+1
+				# continue until end of buffer, or no prompt
+		return (startLineNo, endLineNo, flag)
+
+	def ExtractCommand( self, lines ):
+		start, end = lines
+		retList = []
+		while end >= start:
+			thisLine = self.DoGetLine(end)
+			promptLen = len(GetPromptPrefix(thisLine))
+			retList = [thisLine[promptLen:]] + retList
+			end = end-1
+		return retList
+
+	def OutputGrab(self):
+#		import win32traceutil; return
+		self.oldStdOut = sys.stdout
+		self.oldStdErr = sys.stderr
+		sys.stdout=self
+		sys.stderr=self
+		self.flush()
+
+	def OutputRelease(self):
+		# a command may have overwritten these - only restore if not.
+		if self.oldStdOut is not None:
+			if sys.stdout == self:
+				sys.stdout=self.oldStdOut
+		if self.oldStdErr is not None:
+			if sys.stderr == self:
+				sys.stderr=self.oldStdErr
+		self.oldStdOut = None
+		self.oldStdErr = None
+		self.flush()
+
+	###################################
+	#
+	# Message/Command/Key Hooks.
+	#
+	# Enter key handler
+	#
+	def ProcessEnterEvent(self, event ):
+		self.SCICancel()
+		# First, check for an error message
+		haveGrabbedOutput = 0
+		if self.HandleSpecialLine(): return 0
+
+		lineNo = self.LineFromChar()
+		start, end, isCode = self.GetBlockBoundary(lineNo)
+		# If we are not in a code block just go to the prompt (or create a new one)
+		if not isCode:
+			self.AppendToPrompt([])
+			win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE))
+			return
+
+		lines = self.ExtractCommand((start,end))
+
+		# If we are in a code-block, but it isnt at the end of the buffer
+		# then copy it to the end ready for editing and subsequent execution
+		if end!=self.GetLineCount()-1:
+			win32ui.SetStatusText('Press ENTER to execute command')
+			self.AppendToPrompt(lines)
+			self.SetSel(-2)
+			return
+
+		# If SHIFT held down, we want new code here and now!
+		bNeedIndent = win32api.GetKeyState(win32con.VK_SHIFT)<0 or win32api.GetKeyState(win32con.VK_CONTROL)<0
+		if bNeedIndent:
+			self.ReplaceSel("\n")
+		else:
+			self.SetSel(-2)
+			self.ReplaceSel("\n")
+			source = string.join(lines, '\n')
+			while source and source[-1] in '\t ':
+				source = source[:-1]
+			self.OutputGrab()	# grab the output for the command exec.
+			try:
+				if self.interp.runsource(source, "<interactive input>"): # Need more input!
+					bNeedIndent = 1
+				else:
+					# If the last line isnt empty, append a newline
+					if self.history is not None:
+						self.history.history_store(source)
+					self.AppendToPrompt([])
+					win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE))
+#					win32ui.SetStatusText('Successfully executed statement')
+			finally:
+				self.OutputRelease()
+		if bNeedIndent:
+			win32ui.SetStatusText('Ready to continue the command')
+			# Now attempt correct indentation (should use IDLE?)
+			curLine = self.DoGetLine(lineNo)[len(sys.ps2):]
+			pos = 0
+			indent=''
+			while len(curLine)>pos and curLine[pos] in string.whitespace:
+				indent = indent + curLine[pos]
+				pos = pos + 1
+			if _is_block_opener(curLine):
+				indent = indent + '\t'
+			elif _is_block_closer(curLine):
+				indent = indent[:-1]
+			# use ReplaceSel to ensure it goes at the cursor rather than end of buffer.
+			self.ReplaceSel(sys.ps2+indent)
+		return 0
+
+	# ESC key handler
+	def ProcessEscEvent(self, event):
+		# Implement a cancel.
+		if self.SCIAutoCActive() or self.SCICallTipActive():
+			self.SCICancel()
+		else:
+			win32ui.SetStatusText('Cancelled.')
+			self.AppendToPrompt(('',))
+		return 0
+
+	def OnSelectBlock(self,command, code):
+		lineNo = self.LineFromChar()
+		start, end, isCode = self.GetBlockBoundary(lineNo)
+		startIndex = self.LineIndex(start)
+		endIndex = self.LineIndex(end+1)-2	# skip \r + \n
+		if endIndex<0:	# must be beyond end of buffer
+			endIndex = -2	# self.Length()
+		self.SetSel(startIndex,endIndex)
+
+	def GetRightMenuItems(self):
+		# Just override parents
+		ret = []
+		flags = 0
+		ret.append((flags, win32ui.ID_EDIT_UNDO, '&Undo'))
+		ret.append(win32con.MF_SEPARATOR)
+		ret.append((flags, win32ui.ID_EDIT_CUT, 'Cu&t'))
+		ret.append((flags, win32ui.ID_EDIT_COPY, '&Copy'))
+		ret.append((flags, win32ui.ID_EDIT_PASTE, '&Paste'))
+		ret.append(win32con.MF_SEPARATOR)
+		ret.append((flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all'))
+		ret.append((flags, win32ui.ID_EDIT_SELECT_BLOCK, 'Select &block'))
+		ret.append((flags, win32ui.ID_VIEW_WHITESPACE, "View &Whitespace"))
+		return ret
+
+	def MDINextEvent(self, event):
+		win32ui.GetMainFrame().MDINext(0)
+	def MDIPrevEvent(self, event):
+		win32ui.GetMainFrame().MDINext(0)
+
+	def WindowBackEvent(self, event):
+		parent = self.GetParentFrame()
+		if parent == win32ui.GetMainFrame():
+			# It is docked.
+			try:
+				wnd, isactive = parent.MDIGetActive()
+				wnd.SetFocus()
+			except win32ui.error:
+				# No MDI window active!
+				pass
+		else:
+			# Normal Window
+			try:
+				lastActive = self.GetParentFrame().lastActive
+				# If the window is invalid, reset it.
+				if lastActive is not None and (lastActive._obj_ is None or lastActive.GetSafeHwnd()==0):
+					lastActive = self.GetParentFrame().lastActive = None
+					win32ui.SetStatusText("The last active Window has been closed.")
+			except AttributeError:
+				print "Can't find the last active window!"
+				lastActive = None
+			if lastActive is not None:
+				lastActive.MDIActivate()
+
+class InteractiveView(InteractiveCore, winout.WindowOutputView):
+	def __init__(self,  doc):
+		InteractiveCore.__init__(self)
+		winout.WindowOutputView.__init__(self, doc)
+	def _MakeColorizer(self):
+		return InteractiveFormatter(self)
+	def OnInitialUpdate(self):
+		winout.WindowOutputView.OnInitialUpdate(self)
+		self.SetWordWrap()
+		self.Init()
+	def HookHandlers(self):
+		winout.WindowOutputView.HookHandlers(self)
+		InteractiveCore.HookHandlers(self)
+
+class CInteractivePython(winout.WindowOutput):
+	def __init__(self, makeDoc = None, makeFrame = None):
+		self.IsFinalDestroy = 0
+		winout.WindowOutput.__init__(self, sectionProfile, sectionProfile, \
+		                             winout.flags.WQ_LINE, 1, None, makeDoc, makeFrame, InteractiveView )
+		self.Create()
+
+	def OnViewDestroy(self, view):
+		if self.IsFinalDestroy:
+			view.OutputRelease()
+		winout.WindowOutput.OnViewDestroy(self, view)
+
+	def Close(self):
+		self.IsFinalDestroy = 1
+		winout.WindowOutput.Close(self)
+
+class InteractiveFrame(winout.WindowOutputFrame):
+	def __init__(self):
+		self.lastActive = None
+		winout.WindowOutputFrame.__init__(self)
+
+	def OnMDIActivate(self, bActive, wndActive, wndDeactive):
+		if bActive:
+			self.lastActive = wndDeactive
+
+######################################################################
+## 
+## Dockable Window Support
+## 
+######################################################################
+ID_DOCKED_INTERACTIVE_CONTROLBAR = 0xe802
+
+DockedInteractiveViewParent = InteractiveView
+class DockedInteractiveView(DockedInteractiveViewParent):
+	def HookHandlers(self):
+		DockedInteractiveViewParent.HookHandlers(self)
+		self.HookMessage(self.OnSetFocus, win32con.WM_SETFOCUS)
+		self.HookMessage(self.OnKillFocus, win32con.WM_KILLFOCUS)
+	def OnSetFocus(self, msg):
+		self.GetParentFrame().SetActiveView(self)
+		return 1
+	def OnKillFocus(self, msg):
+		# If we are losing focus to another in this app, reset the main frame's active view.
+		hwnd = wparam = msg[2]
+		try:
+			wnd = win32ui.CreateWindowFromHandle(hwnd)
+			reset  = wnd.GetTopLevelFrame()==self.GetTopLevelFrame()
+		except win32ui.error:
+			reset = 0 # Not my window
+		if reset: self.GetParentFrame().SetActiveView(None)
+		return 1
+	def OnDestroy(self, msg):
+		newSize = self.GetWindowPlacement()[4]
+		pywin.framework.app.SaveWindowSize("Interactive Window", newSize, "docked")
+		try:
+			if self.GetParentFrame().GetActiveView==self:
+				self.GetParentFrame().SetActiveView(None)
+		except win32ui.error:
+			pass
+		try:
+			if win32ui.GetMainFrame().GetActiveView()==self:
+				win32ui.GetMainFrame().SetActiveView(None)
+		except win32ui.error:
+			pass
+		return DockedInteractiveViewParent.OnDestroy(self, msg)
+
+class CDockedInteractivePython(CInteractivePython):
+	def __init__(self, dockbar):
+		self.bFirstCreated = 0
+		self.dockbar = dockbar
+		CInteractivePython.__init__(self)
+	def NeedRecreateWindow(self):
+		if self.bCreating:
+			return 0
+		try:
+				frame = win32ui.GetMainFrame()
+				if frame.closing:
+					return 0 # Dieing!
+		except (win32ui.error, AttributeError):
+			return 0 # The app is dieing!
+		try:
+			cb = frame.GetControlBar(ID_DOCKED_INTERACTIVE_CONTROLBAR)
+			return not cb.IsWindowVisible()
+		except win32ui.error:
+			return 1 # Control bar does not exist!
+	def RecreateWindow(self):
+		try:
+			dockbar = win32ui.GetMainFrame().GetControlBar(ID_DOCKED_INTERACTIVE_CONTROLBAR)
+			win32ui.GetMainFrame().ShowControlBar(dockbar, 1, 1)
+		except win32ui.error:
+			CreateDockedInteractiveWindow()
+
+	def Create(self):
+		self.bCreating = 1
+		doc = InteractiveDocument(None, self.DoCreateDoc())
+		view = DockedInteractiveView(doc)
+		defRect = pywin.framework.app.LoadWindowSize("Interactive Window", "docked")
+		if defRect[2]-defRect[0]==0:
+			defRect = 0, 0, 500, 200
+		style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER
+		id = 1050 # win32ui.AFX_IDW_PANE_FIRST
+		view.CreateWindow(self.dockbar, id, style, defRect)
+		view.OnInitialUpdate()
+		self.bFirstCreated = 1
+
+		self.currentView = doc.GetFirstView()
+		self.bCreating = 0
+		if self.title: doc.SetTitle(self.title)
+
+# The factory we pass to the dockable window support.
+def InteractiveViewCreator(parent):
+	global edit
+	edit = CDockedInteractivePython(parent)
+	return edit.currentView
+
+def CreateDockedInteractiveWindow():
+	# Later, the DockingBar should be capable of hosting multiple
+	# children.
+	from pywin.docking.DockingBar import DockingBar
+	bar = DockingBar()
+	creator = InteractiveViewCreator
+	bar.CreateWindow(win32ui.GetMainFrame(), creator, "Interactive Window", ID_DOCKED_INTERACTIVE_CONTROLBAR)
+	bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC)
+	bar.EnableDocking(afxres.CBRS_ALIGN_ANY)
+	win32ui.GetMainFrame().DockControlBar(bar, afxres.AFX_IDW_DOCKBAR_BOTTOM)
+
+######################################################################
+# 
+# The public interface to this module.
+# 
+######################################################################
+# No extra functionality now, but maybe later, so
+# publicize these names.
+InteractiveDocument = winout.WindowOutputDocument
+
+# We remember our one and only interactive window in the "edit" variable.
+edit = None
+
+def CreateInteractiveWindowUserPreference(makeDoc = None, makeFrame = None):
+	"""Create some sort of interactive window if the user's preference say we should.
+	"""
+	bCreate = LoadPreference("Show at startup", 1)
+	if bCreate:
+		CreateInteractiveWindow(makeDoc, makeFrame)
+
+def CreateInteractiveWindow(makeDoc = None, makeFrame = None):
+	"""Create a standard or docked interactive window unconditionally
+	"""
+	assert edit is None, "Creating second interactive window!"
+	bDocking = LoadPreference("Docking", 0)
+	if bDocking:
+		CreateDockedInteractiveWindow()
+	else:
+		CreateMDIInteractiveWindow(makeDoc, makeFrame)
+	assert edit is not None, "Created interactive window, but did not set the global!"
+	edit.currentView.SetFocus()
+
+def CreateMDIInteractiveWindow(makeDoc = None, makeFrame = None):
+	"""Create a standard (non-docked) interactive window unconditionally
+	"""
+	global edit
+	if makeDoc is None: makeDoc = InteractiveDocument
+	if makeFrame is None: makeFrame = InteractiveFrame
+	edit = CInteractivePython(makeDoc=makeDoc,makeFrame=makeFrame)
+
+def DestroyInteractiveWindow():
+	""" Destroy the interactive window.
+	This is different to Closing the window,
+	which may automatically re-appear.  Once destroyed, it can never be recreated,
+	and a complete new instance must be created (which the various other helper
+	functions will then do after making this call
+	"""
+	global edit
+	if edit is not None and edit.currentView is not None:
+		if edit.currentView.GetParentFrame() == win32ui.GetMainFrame():
+			# It is docked - do nothing now (this is only called at shutdown!)
+			pass
+		else:
+			# It is a standard window - call Close on the container.
+			edit.Close()
+			edit = None
+
+def CloseInteractiveWindow():
+	"""Close the interactive window, allowing it to be re-created on demand.
+	"""
+	global edit
+	if edit is not None and edit.currentView is not None:
+		if edit.currentView.GetParentFrame() == win32ui.GetMainFrame():
+			# It is docked, just hide the dock bar.
+			frame = win32ui.GetMainFrame()
+			cb = frame.GetControlBar(ID_DOCKED_INTERACTIVE_CONTROLBAR)
+			frame.ShowControlBar(cb, 0, 1)
+		else:
+			# It is a standard window - destroy the frame/view, allowing the object itself to remain.
+			edit.currentView.GetParentFrame().DestroyWindow()
+
+def ToggleInteractiveWindow():
+	"""If the interactive window is visible, hide it, otherwise show it.
+	"""
+	if edit is None:
+		CreateInteractiveWindow()
+	else:
+		if edit.NeedRecreateWindow():
+			edit.RecreateWindow()
+		else:
+			# Close it, allowing a reopen.
+			CloseInteractiveWindow()
+
+def ShowInteractiveWindow():
+	"""Shows (or creates if necessary) an interactive window"""
+	if edit is None:
+		CreateInteractiveWindow()
+	else:
+		if edit.NeedRecreateWindow():
+			edit.RecreateWindow()
+		else:
+			parent = edit.currentView.GetParentFrame()
+			if parent == win32ui.GetMainFrame(): # It is docked.
+				edit.currentView.SetFocus()
+			else: # It is a "normal" window
+				edit.currentView.GetParentFrame().AutoRestore()
+				win32ui.GetMainFrame().MDIActivate(edit.currentView.GetParentFrame())
+
+def IsInteractiveWindowVisible():
+	return edit is not None and not edit.NeedRecreateWindow()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/intpyapp.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/intpyapp.py
new file mode 100644
index 0000000..bd67421
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/intpyapp.py
@@ -0,0 +1,442 @@
+# intpyapp.py  - Interactive Python application class
+#
+import win32con
+import win32api
+import win32ui
+import __main__
+import sys
+import string
+import app
+import traceback
+from pywin.mfc import window, afxres, dialog
+import commctrl
+import dbgcommands
+
+lastLocateFileName = ".py" # used in the "File/Locate" dialog...
+
+class MainFrame(app.MainFrame):
+	def OnCreate(self, createStruct):
+		self.closing = 0
+		if app.MainFrame.OnCreate(self, createStruct)==-1:
+			return -1
+		style = win32con.WS_CHILD | afxres.CBRS_SIZE_DYNAMIC | afxres.CBRS_TOP | afxres.CBRS_TOOLTIPS | afxres.CBRS_FLYBY
+
+		self.EnableDocking(afxres.CBRS_ALIGN_ANY)
+
+		tb = win32ui.CreateToolBar (self, style | win32con.WS_VISIBLE)
+		tb.ModifyStyle(0, commctrl.TBSTYLE_FLAT)
+		tb.LoadToolBar(win32ui.IDR_MAINFRAME)
+		tb.EnableDocking(afxres.CBRS_ALIGN_ANY)
+		tb.SetWindowText("Standard")
+		self.DockControlBar(tb)
+		# Any other packages which use toolbars
+		from pywin.debugger.debugger import PrepareControlBars
+		PrepareControlBars(self)
+		# Note "interact" also uses dockable windows, but they already happen
+
+		# And a "Tools" menu on the main frame.
+		menu = self.GetMenu()
+		import toolmenu
+		toolmenu.SetToolsMenu(menu, 2)
+		# And fix the "Help" menu on the main frame
+		from pywin.framework import help
+		help.SetHelpMenuOtherHelp(menu)
+
+	def OnClose(self):
+		try:
+			import pywin.debugger
+			if pywin.debugger.currentDebugger is not None and pywin.debugger.currentDebugger.pumping:
+				try:
+					pywin.debugger.currentDebugger.close(1)
+				except:
+					import traceback
+					traceback.print_exc()
+				return
+		except win32ui.error:
+			pass
+		self.closing = 1
+		self.SaveBarState("ToolbarDefault")
+		self.SetActiveView(None) # Otherwise MFC's OnClose may _not_ prompt for save.
+
+		from pywin.framework import help
+		help.FinalizeHelp()
+
+		self.DestroyControlBar(afxres.AFX_IDW_TOOLBAR)
+		self.DestroyControlBar(win32ui.ID_VIEW_TOOLBAR_DBG)
+		
+		return self._obj_.OnClose()
+
+	def DestroyControlBar(self, id):
+		try:
+			bar = self.GetControlBar(id)
+		except win32ui.error:
+			return
+		bar.DestroyWindow()
+
+	def OnCommand(self, wparam, lparam):
+		# By default, the current MDI child frame will process WM_COMMAND
+		# messages before any docked control bars - even if the control bar
+		# has focus.  This is a problem for the interactive window when docked.
+		# Therefore, we detect the situation of a view having the main frame
+		# as its parent, and assume it must be a docked view (which it will in an MDI app)
+		try:
+			v = self.GetActiveView() # Raise an exception if none - good - then we want default handling
+			# Main frame _does_ have a current view (ie, a docking view) - see if it wants it.
+			if v.OnCommand(wparam, lparam):
+				return 1
+		except (win32ui.error, AttributeError):
+			pass
+		return self._obj_.OnCommand(wparam, lparam)
+
+class InteractivePythonApp(app.CApp):
+	# This works if necessary - just we dont need to override the Run method.
+#	def Run(self):
+#		return self._obj_.Run()
+
+	def HookCommands(self):
+		app.CApp.HookCommands(self)
+		dbgcommands.DebuggerCommandHandler().HookCommands()
+		self.HookCommand(self.OnViewBrowse,win32ui.ID_VIEW_BROWSE)
+		self.HookCommand(self.OnFileImport,win32ui.ID_FILE_IMPORT)
+		self.HookCommand(self.OnFileCheck,win32ui.ID_FILE_CHECK)
+		self.HookCommandUpdate(self.OnUpdateFileCheck, win32ui.ID_FILE_CHECK)
+		self.HookCommand(self.OnFileRun,win32ui.ID_FILE_RUN)
+		self.HookCommand(self.OnFileLocate,win32ui.ID_FILE_LOCATE)
+		self.HookCommand(self.OnInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE)
+		self.HookCommandUpdate(self.OnUpdateInteractiveWindow, win32ui.ID_VIEW_INTERACTIVE)
+		self.HookCommand(self.OnViewOptions, win32ui.ID_VIEW_OPTIONS)
+		self.HookCommand(self.OnHelpIndex, afxres.ID_HELP_INDEX)
+		self.HookCommand(self.OnFileSaveAll, win32ui.ID_FILE_SAVE_ALL)
+		self.HookCommand(self.OnViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG)
+		self.HookCommandUpdate(self.OnUpdateViewToolbarDbg, win32ui.ID_VIEW_TOOLBAR_DBG)
+
+	def CreateMainFrame(self):
+		return MainFrame()
+
+	def MakeExistingDDEConnection(self):
+		# Use DDE to connect to an existing instance
+		# Return None if no existing instance
+		try:
+			import intpydde
+		except ImportError:
+			# No dde support!
+			return None
+		conv = intpydde.CreateConversation(self.ddeServer)
+		try:
+			conv.ConnectTo("Pythonwin", "System")
+			return conv
+		except intpydde.error:
+			return None
+
+	def InitDDE(self):
+		# Do all the magic DDE handling.  
+		# Returns TRUE if we have pumped the arguments to our
+		# remote DDE app, and we should terminate.
+		try:
+			import intpydde
+		except ImportError:
+			self.ddeServer = None
+			intpydde = None
+		if intpydde is not None:
+			self.ddeServer = intpydde.DDEServer(self)
+			self.ddeServer.Create("Pythonwin", intpydde.CBF_FAIL_SELFCONNECTIONS )
+			try:
+				# If there is an existing instance, pump the arguments to it.
+				connection = self.MakeExistingDDEConnection()
+				if connection is not None:
+					if self.ProcessArgs(sys.argv, connection) is None:
+						return 1
+			except:
+				win32ui.MessageBox("There was an error in the DDE conversation with Pythonwin")
+				traceback.print_exc()
+
+	def InitInstance(self):
+		# Allow "/nodde" and "/newinstance to optimize this!
+		if "/nodde" not in sys.argv and "/newinstance" not in sys.argv:
+			if self.InitDDE():
+				return 1 # A remote DDE client is doing it for us!
+		else:
+			self.ddeServer = None
+
+		win32ui.SetRegistryKey("Python %s" % (sys.winver,)) # MFC automatically puts the main frame caption on!
+		app.CApp.InitInstance(self)
+
+		# Create the taskbar icon
+		win32ui.CreateDebuggerThread()
+
+		# Allow Pythonwin to host OCX controls.
+		win32ui.EnableControlContainer()
+
+		# Display the interactive window if the user wants it.
+		import interact
+		interact.CreateInteractiveWindowUserPreference()
+
+		# Load the modules we use internally.
+		self.LoadSystemModules()
+
+		# Load additional module the user may want.
+		self.LoadUserModules()
+
+		# Load the ToolBar state near the end of the init process, as
+		# there may be Toolbar IDs created by the user or other modules.
+		# By now all these modules should be loaded, so all the toolbar IDs loaded.
+		try:
+			self.frame.LoadBarState("ToolbarDefault")
+		except win32ui.error:
+			# MFC sucks.  It does essentially "GetDlgItem(x)->Something", so if the
+			# toolbar with ID x does not exist, MFC crashes!  Pythonwin has a trap for this
+			# but I need to investigate more how to prevent it (AFAIK, ensuring all the
+			# toolbars are created by now _should_ stop it!)
+			pass
+
+		# Finally process the command line arguments.
+		self.ProcessArgs(sys.argv)
+
+	def ExitInstance(self):
+		win32ui.DestroyDebuggerThread()
+		try:
+			import interact
+			interact.DestroyInteractiveWindow()
+		except:
+			pass
+		if self.ddeServer is not None:
+			self.ddeServer.Shutdown()
+			self.ddeServer = None
+		return app.CApp.ExitInstance(self)
+
+	def Activate(self):
+		# Bring to the foreground.  Mainly used when another app starts up, it asks
+		# this one to activate itself, then it terminates.
+		frame = win32ui.GetMainFrame()
+		frame.SetForegroundWindow()
+		if frame.GetWindowPlacement()[1]==win32con.SW_SHOWMINIMIZED:
+			frame.ShowWindow(win32con.SW_RESTORE)
+
+	def ProcessArgs(self, args, dde = None):
+		# If we are going to talk to a remote app via DDE, then
+		# activate it!
+		if dde is not None: dde.Exec("self.Activate()")
+		if len(args) and args[0] in ['/nodde','/newinstance']: del args[0] # already handled.
+		if len(args)<1 or not args[0]: # argv[0]=='' when started without args, just like Python.exe!
+			return
+		try:
+			if args[0] and args[0][0]!='/':
+				argStart = 0
+				argType = string.lower(win32ui.GetProfileVal("Python","Default Arg Type","/edit"))
+			else:
+				argStart = 1
+				argType = args[0]
+			if argStart >= len(args):
+				raise TypeError, "The command line requires an additional arg."
+			if argType=="/edit":
+				# Load up the default application.
+				if dde:
+					fname = win32api.GetFullPathName(args[argStart])
+					dde.Exec("win32ui.GetApp().OpenDocumentFile(%s)" % (`fname`))
+				else:
+					win32ui.GetApp().OpenDocumentFile(args[argStart])
+			elif argType=="/rundlg":
+				if dde:
+					dde.Exec("import scriptutils;scriptutils.RunScript('%s', '%s', 1)" % (args[argStart], string.join(args[argStart+1:])))
+				else:
+					import scriptutils
+					scriptutils.RunScript(args[argStart], string.join(args[argStart+1:]))
+			elif argType=="/run":
+				if dde:
+					dde.Exec("import scriptutils;scriptutils.RunScript('%s', '%s', 0)" % (args[argStart], string.join(args[argStart+1:])))
+				else:
+					import scriptutils
+					scriptutils.RunScript(args[argStart], string.join(args[argStart+1:]), 0)
+			elif argType=="/app":
+				raise RuntimeError, "/app only supported for new instances of Pythonwin.exe"
+			elif argType=='/new': # Allow a new instance of Pythonwin
+				return 1
+			elif argType=='/dde': # Send arbitary command
+				if dde is not None:
+					dde.Exec(args[argStart])
+				else:
+					win32ui.MessageBox("The /dde command can only be used\r\nwhen Pythonwin is already running")
+			else:
+				raise TypeError, "Command line arguments not recognised"
+		except:
+			typ, val, tb = sys.exc_info()
+			print "There was an error processing the command line args"
+			traceback.print_exception(typ, val, tb, None, sys.stdout)
+			win32ui.OutputDebug("There was a problem with the command line args - %s: %s" % (`typ`,`val`))
+			tb = None # Prevent a cycle
+
+
+	def LoadSystemModules(self):
+		self.DoLoadModules("editor,stdin")
+
+	def LoadUserModules(self, moduleNames = None):
+		# Load the users modules.
+		if moduleNames is None:
+			default = "sgrepmdi"
+			moduleNames=win32ui.GetProfileVal('Python','Startup Modules',default)
+		self.DoLoadModules(moduleNames)
+
+	def DoLoadModules(self, moduleNames): # ", sep string of module names.
+		if not moduleNames: return
+		modules = string.splitfields(moduleNames,",")
+		for module in modules:
+			try:
+				exec "import "+module
+			except: # Catch em all, else the app itself dies! 'ImportError:
+				traceback.print_exc()
+				msg = 'Startup import of user module "%s" failed' % module
+				print msg
+				win32ui.MessageBox(msg)
+
+	#
+	# DDE Callback
+	#
+	def OnDDECommand(self, command):
+#		print "DDE Executing", `command`
+		try:
+			exec command + "\n"
+		except:
+			print "ERROR executing DDE command: ", command
+			traceback.print_exc()
+			raise
+
+	#
+	# General handlers
+	#
+	def OnViewBrowse( self, id, code ):
+		" Called when ViewBrowse message is received "
+		from pywin.mfc import dialog
+		from pywin.tools import browser
+		obName = dialog.GetSimpleInput('Object', '__builtins__', 'Browse Python Object')
+		if obName is None:
+			return
+		try:
+			browser.Browse(eval(obName, __main__.__dict__, __main__.__dict__))
+		except NameError:
+			win32ui.MessageBox('This is no object with this name')
+		except AttributeError:
+			win32ui.MessageBox('The object has no attribute of that name')
+		except:
+			traceback.print_exc()
+			win32ui.MessageBox('This object can not be browsed')
+
+	def OnFileImport( self, id, code ):
+		" Called when a FileImport message is received. Import the current or specified file"
+		import scriptutils
+		scriptutils.ImportFile()
+
+	def OnFileCheck( self, id, code ):
+		" Called when a FileCheck message is received. Check the current file."
+		import scriptutils
+		scriptutils.CheckFile()
+
+	def OnUpdateFileCheck(self, cmdui):
+		import scriptutils
+		cmdui.Enable( scriptutils.GetActiveFileName(0) is not None )
+
+	def OnFileRun( self, id, code ):
+		" Called when a FileRun message is received. "
+		import scriptutils
+		showDlg = win32api.GetKeyState(win32con.VK_SHIFT) >= 0
+		scriptutils.RunScript(None, None, showDlg)
+
+	def OnFileLocate( self, id, code ):
+		from pywin.mfc import dialog
+		import scriptutils
+		import os
+		global lastLocateFileName # save the new version away for next time...
+
+		# Loop until a good name, or cancel
+		while 1:
+			name = dialog.GetSimpleInput('File name', lastLocateFileName, 'Locate Python File')
+			if name is None: # Cancelled.
+				break
+			lastLocateFileName = name
+			# if ".py" supplied, rip it off!
+			if string.lower(lastLocateFileName[-3:])=='.py':
+				lastLocateFileName = lastLocateFileName[:-3]
+			lastLocateFileName = string.translate(lastLocateFileName, string.maketrans(".","\\"))
+			newName = scriptutils.LocatePythonFile(lastLocateFileName)
+			if newName is None:
+				win32ui.MessageBox("The file '%s' can not be located" % lastLocateFileName)
+			else:
+				win32ui.GetApp().OpenDocumentFile(newName)
+				break
+
+	# Display all the "options" proprety pages we can find
+	def OnViewOptions(self, id, code):
+		win32ui.InitRichEdit()
+		sheet = dialog.PropertySheet("Pythonwin Options")
+		# Add property pages we know about that need manual work.
+		from pywin.dialogs import ideoptions
+		sheet.AddPage( ideoptions.OptionsPropPage() )
+
+		import toolmenu
+		sheet.AddPage( toolmenu.ToolMenuPropPage() )
+
+		# Get other dynamic pages from templates.
+		pages = []
+		for template in self.GetDocTemplateList():
+			try:
+				# Dont actually call the function with the exception handler.
+				getter = template.GetPythonPropertyPages
+			except AttributeError:
+				# Template does not provide property pages!
+				continue
+			pages = pages + getter()
+
+		# Debugger template goes at the end
+		try:
+			from pywin.debugger import configui
+		except ImportError:
+			configui = None
+		if configui is not None: pages.append(configui.DebuggerOptionsPropPage())
+		# Now simply add the pages, and display the dialog.
+		for page in pages:
+			sheet.AddPage(page)
+
+		if sheet.DoModal()==win32con.IDOK:
+			win32ui.SetStatusText("Applying configuration changes...", 1)
+			win32ui.DoWaitCursor(1)
+			# Tell every Window in our app that win.ini has changed!
+			win32ui.GetMainFrame().SendMessageToDescendants(win32con.WM_WININICHANGE, 0, 0)
+			win32ui.DoWaitCursor(0)
+
+	def OnInteractiveWindow(self, id, code):
+		# toggle the existing state.
+		import interact
+		interact.ToggleInteractiveWindow()
+
+	def OnUpdateInteractiveWindow(self, cmdui):
+		try:
+			interact=sys.modules['pywin.framework.interact']
+			state = interact.IsInteractiveWindowVisible()
+		except KeyError: # Interactive module hasnt ever been imported.
+			state = 0
+		cmdui.Enable()
+		cmdui.SetCheck(state)
+
+	def OnFileSaveAll(self, id, code):
+		# Only attempt to save editor documents.
+		from pywin.framework.editor import editorTemplate
+		docs = filter(lambda doc: doc.IsModified() and doc.GetPathName(), editorTemplate.GetDocumentList())
+		map(lambda doc: doc.OnSaveDocument(doc.GetPathName()), docs)
+		win32ui.SetStatusText("%d documents saved" % len(docs), 1)
+
+	def OnViewToolbarDbg(self, id, code):
+		if code==0:
+			return not win32ui.GetMainFrame().OnBarCheck(id)
+
+	def OnUpdateViewToolbarDbg(self, cmdui):
+		win32ui.GetMainFrame().OnUpdateControlBarMenu(cmdui)
+		cmdui.Enable(1)
+
+	def OnHelpIndex( self, id, code ):
+		import help
+		help.SelectAndRunHelpFile()
+
+# As per the comments in app.py, this use is depreciated.
+# app.AppBuilder = InteractivePythonApp
+
+# Now all we do is create the application
+thisApp = InteractivePythonApp()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/intpydde.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/intpydde.py
new file mode 100644
index 0000000..2ca6f753
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/intpydde.py
@@ -0,0 +1,56 @@
+# DDE support for Pythonwin
+#
+# Seems to work fine (in the context that IE4 seems to have broken
+# DDE on _all_ NT4 machines I have tried, but only when a "Command Prompt" window
+# is open.  Strange, but true.  If you have problems with this, close all Command Prompts!
+
+
+import win32ui
+import win32api, win32con
+from pywin.mfc import object
+from dde import *
+import traceback
+import string
+
+class DDESystemTopic(object.Object):
+	def __init__(self, app):
+		self.app = app
+		object.Object.__init__(self, CreateServerSystemTopic())
+	def Exec(self, data):
+		try:
+#			print "Executing", cmd
+			self.app.OnDDECommand(data)
+		except:
+			# The DDE Execution failed.
+			print "Error executing DDE command."
+			traceback.print_exc()
+			return 0
+
+class DDEServer(object.Object):
+	def __init__(self, app):
+		self.app = app
+		object.Object.__init__(self, CreateServer())
+		self.topic = self.item = None
+		
+	def CreateSystemTopic(self):
+		return DDESystemTopic(self.app)
+
+	def Shutdown(self):
+		self._obj_.Shutdown()
+		self._obj_.Destroy()
+		if self.topic is not None:
+			self.topic.Destroy()
+			self.topic = None
+		if self.item is not None:
+			self.item.Destroy()
+			self.item = None
+		
+	def OnCreate(self):
+		return 1
+		
+	def Status(self, msg):
+		try:
+			win32ui.SetStatusText(msg)
+		except win32ui.error:
+			print msg
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/scriptutils.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/scriptutils.py
new file mode 100644
index 0000000..997e2f6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/scriptutils.py
@@ -0,0 +1,590 @@
+"""
+Various utilities for running/importing a script
+"""
+import app
+import sys
+import win32ui
+import win32api
+import win32con
+import __main__
+from pywin.mfc import dialog
+import os
+import string
+import traceback
+import linecache
+import bdb
+
+from cmdline import ParseArgs
+
+RS_DEBUGGER_NONE=0 # Dont run under the debugger.
+RS_DEBUGGER_STEP=1 # Start stepping under the debugger
+RS_DEBUGGER_GO=2 # Just run under the debugger, stopping only at break-points.
+RS_DEBUGGER_PM=3 # Dont run under debugger, but do post-mortem analysis on exception.
+
+debugging_options = string.split("""No debugging
+Step-through in the debugger
+Run in the debugger
+Post-Mortem of unhandled exceptions""", "\n")
+
+# A dialog box for the "Run Script" command.
+class DlgRunScript(dialog.Dialog):
+	"A class for the 'run script' dialog"
+	def __init__(self, bHaveDebugger):
+		dialog.Dialog.__init__(self, win32ui.IDD_RUN_SCRIPT )
+		self.AddDDX(win32ui.IDC_EDIT1, "script")
+		self.AddDDX(win32ui.IDC_EDIT2, "args")
+		self.AddDDX(win32ui.IDC_COMBO1, "debuggingType", "i")
+		self.HookCommand(self.OnBrowse, win32ui.IDC_BUTTON2)
+		self.bHaveDebugger = bHaveDebugger
+	def OnInitDialog(self):
+		rc = dialog.Dialog.OnInitDialog(self)
+		cbo = self.GetDlgItem(win32ui.IDC_COMBO1)
+		for o in debugging_options:
+			cbo.AddString(o)
+		cbo.SetCurSel(self['debuggingType'])
+		if not self.bHaveDebugger:
+			cbo.EnableWindow(0)
+
+	def OnBrowse(self, id, cmd):
+		openFlags = win32con.OFN_OVERWRITEPROMPT|win32con.OFN_FILEMUSTEXIST
+		dlg = win32ui.CreateFileDialog(1,None,None,openFlags, "Python Scripts (*.py)|*.py||", self)
+		dlg.SetOFNTitle("Run Script")
+		if dlg.DoModal()!=win32con.IDOK:
+			return 0
+		self['script'] = dlg.GetPathName()
+		self.UpdateData(0)
+		return 0
+
+def GetDebugger():
+	"""Get the default Python debugger.  Returns the debugger, or None.
+	
+	It is assumed the debugger has a standard "pdb" defined interface.
+	Currently always returns the 'pywin.debugger' debugger, or None
+	(pdb is _not_ returned as it is not effective in this GUI environment)
+	"""
+	try:
+		import pywin.debugger
+		return pywin.debugger
+	except ImportError:
+		return None
+
+def IsOnPythonPath(path):
+	"Given a path only, see if it is on the Pythonpath.  Assumes path is a full path spec."
+	# must check that the command line arg's path is in sys.path
+	for syspath in sys.path:
+		try:
+			# Python 1.5 and later allows an empty sys.path entry.
+			if syspath and win32ui.FullPath(syspath)==path:
+				return 1
+		except win32ui.error, details:
+			print "Warning: The sys.path entry '%s' is invalid\n%s" % (syspath, details)
+	return 0
+
+def GetPackageModuleName(fileName):
+	"""Given a filename, return (module name, new path).
+	   eg - given "c:\a\b\c\my.py", return ("b.c.my",None) if "c:\a" is on sys.path.
+	   If no package found, will return ("my", "c:\a\b\c")
+	"""
+	path, fname = os.path.split(fileName)
+	path=origPath=win32ui.FullPath(path)
+	fname = os.path.splitext(fname)[0]
+	modBits = []
+	newPathReturn = None
+	if not IsOnPythonPath(path):
+		# Module not directly on the search path - see if under a package.
+		while len(path)>3: # ie 'C:\'
+			path, modBit = os.path.split(path)
+			modBits.append(modBit)
+			# If on path, _and_ existing package of that name loaded.
+			if IsOnPythonPath(path) and sys.modules.has_key(modBit) and \
+			   ( os.path.exists(os.path.join(path, '__init__.py')) or \
+			     os.path.exists(os.path.join(path, '__init__.pyc')) or \
+			     os.path.exists(os.path.join(path, '__init__.pyo')) \
+			   ):
+				modBits.reverse()
+				return string.join(modBits, ".") + "." + fname, newPathReturn
+			# Not found - look a level higher
+		else:
+			newPathReturn = origPath
+		
+	return fname, newPathReturn
+
+def GetActiveView():
+	"""Gets the edit control (eg, EditView) with the focus, or None
+	"""
+	try:
+		childFrame, bIsMaximised = win32ui.GetMainFrame().MDIGetActive()
+		return childFrame.GetActiveView()
+	except win32ui.error:
+		return None
+
+def GetActiveEditControl():
+	view = GetActiveView()
+	if view is None: return None
+	if hasattr(view, "SCIAddText"): # Is it a scintilla control?
+		return view
+	try:
+		return view.GetRichEditCtrl()
+	except AttributeError:
+		pass
+	try:
+		return view.GetEditCtrl()
+	except AttributeError:
+		pass
+
+def GetActiveEditorDocument():
+	"""Returns the active editor document, or None if no
+	active document or its not an editor document.
+	"""
+	view = GetActiveView()
+	if view is None: return None
+	doc = view.GetDocument()
+	if hasattr(doc, "MarkerAdd"): # Is it an Editor document?
+		return doc
+	return None
+
+def GetActiveFileName(bAutoSave = 1):
+	"""Gets the file name for the active frame, saving it if necessary.
+	
+	Returns None if it cant be found, or raises KeyboardInterrupt.
+	"""
+	pathName = None
+	active = GetActiveView()
+	if active is None:
+		return None
+	try:
+		doc = active.GetDocument()
+		pathName = doc.GetPathName()
+
+		if bAutoSave and \
+		   (len(pathName)>0 or \
+		    doc.GetTitle()[:8]=="Untitled" or \
+		    doc.GetTitle()[:6]=="Script"): # if not a special purpose window
+			if doc.IsModified():
+				try:
+					doc.OnSaveDocument(pathName)
+					pathName = doc.GetPathName()
+					
+					# clear the linecache buffer
+					linecache.clearcache()
+
+				except win32ui.error:
+					raise KeyboardInterrupt
+
+	except (win32ui.error, AttributeError):
+		pass
+	if not pathName:
+		return None
+	return pathName
+
+lastScript = ''
+lastArgs = ''
+lastDebuggingType = RS_DEBUGGER_NONE
+
+def RunScript(defName=None, defArgs=None, bShowDialog = 1, debuggingType=None):
+	global lastScript, lastArgs, lastDebuggingType
+	_debugger_stop_frame_ = 1 # Magic variable so the debugger will hide me!
+
+	# Get the debugger - may be None!
+	debugger = GetDebugger()
+
+	if defName is None:
+		try:
+			pathName = GetActiveFileName()
+		except KeyboardInterrupt:
+			return # User cancelled save.
+	else:
+		pathName = defName
+	if not pathName:
+		pathName = lastScript
+	if defArgs is None:
+		args = ''
+		if pathName==lastScript:
+			args = lastArgs
+	else:
+		args = defArgs
+	if debuggingType is None: debuggingType = lastDebuggingType
+
+	if not pathName or bShowDialog:
+		dlg = DlgRunScript(debugger is not None)
+		dlg['script'] = pathName
+		dlg['args'] = args
+		dlg['debuggingType'] = debuggingType
+		if dlg.DoModal() != win32con.IDOK:
+			return
+		script=dlg['script']
+		args=dlg['args']
+		debuggingType = dlg['debuggingType']
+		if not script: return
+		if debuggingType == RS_DEBUGGER_GO and debugger is not None:
+			# This may surprise users - they select "Run under debugger", but
+			# it appears not to!  Only warn when they pick from the dialog!
+			# First - ensure the debugger is activated to pickup any break-points
+			# set in the editor.
+			try:
+				# Create the debugger, but _dont_ init the debugger GUI.
+				rd = debugger._GetCurrentDebugger()
+			except AttributeError:
+				rd = None
+			if rd is not None and len(rd.breaks)==0:
+				msg = "There are no active break-points.\r\n\r\nSelecting this debug option without any\r\nbreak-points is unlikely to have the desired effect\r\nas the debugger is unlikely to be invoked..\r\n\r\nWould you like to step-through in the debugger instead?"
+				rc = win32ui.MessageBox(msg, win32ui.LoadString(win32ui.IDR_DEBUGGER), win32con.MB_YESNOCANCEL | win32con.MB_ICONINFORMATION)
+				if rc == win32con.IDCANCEL:
+					return
+				if rc == win32con.IDYES:
+					debuggingType = RS_DEBUGGER_STEP
+
+		lastDebuggingType = debuggingType
+		lastScript = script
+		lastArgs = args
+	else:
+		script = pathName
+
+	# try and open the script.
+	if len(os.path.splitext(script)[1])==0:	# check if no extension supplied, and give one.
+			script = script + '.py'
+	# If no path specified, try and locate the file
+	path, fnameonly = os.path.split(script)
+	if len(path)==0:
+		try:
+			os.stat(fnameonly) # See if it is OK as is...
+			script = fnameonly
+		except os.error:
+			fullScript = app.LocatePythonFile(script)
+			if fullScript is None:
+				win32ui.MessageBox("The file '%s' can not be located" % script )
+				return
+			script = fullScript
+	else:
+		path = win32ui.FullPath(path)
+		if not IsOnPythonPath(path): sys.path.append(path)
+
+	try:
+		f = open(script)
+	except IOError, (code, msg):
+		win32ui.MessageBox("The file could not be opened - %s (%d)" % (msg, code))
+		return
+
+	# Remember and hack sys.argv for the script.
+	oldArgv = sys.argv
+	sys.argv = ParseArgs(args)
+	sys.argv.insert(0, script)
+	# sys.path[0] is the path of the script
+	oldPath0 = sys.path[0]
+	newPath0 = os.path.split(script)[0]
+	if not oldPath0: # if sys.path[0] is empty
+		sys.path[0] = newPath0
+		insertedPath0 = 0
+	else:
+		sys.path.insert(0, newPath0)
+		insertedPath0 = 1
+	bWorked = 0
+	win32ui.DoWaitCursor(1)
+	base = os.path.split(script)[1]
+	# Allow windows to repaint before starting.
+	win32ui.PumpWaitingMessages()
+	win32ui.SetStatusText('Running script %s...' % base,1 )
+	exitCode = 0
+	from pywin.framework import interact
+	# Check the debugger flags
+	if debugger is None and (debuggingType != RS_DEBUGGER_NONE):
+		win32ui.MessageBox("No debugger is installed.  Debugging options have been ignored!")
+		debuggingType = RS_DEBUGGER_NONE
+
+	# Get a code object - ignore the debugger for this, as it is probably a syntax error
+	# at this point
+	try:
+		codeObject = compile(f.read()+"\n", script, "exec")
+	except:
+		# Almost certainly a syntax error!
+		_HandlePythonFailure("run script", script)
+		# No code object which to run/debug.
+		return
+	try:
+		if debuggingType == RS_DEBUGGER_STEP:
+			debugger.run(codeObject, __main__.__dict__, start_stepping=1)
+		elif debuggingType == RS_DEBUGGER_GO:
+			debugger.run(codeObject, __main__.__dict__, start_stepping=0)
+		else:
+			# Post mortem or no debugging
+			exec codeObject in __main__.__dict__
+		bWorked = 1
+	except bdb.BdbQuit:
+		# Dont print tracebacks when the debugger quit, but do print a message.
+		print "Debugging session cancelled."
+		exitCode = 1
+		bWorked = 1
+	except SystemExit, code:
+		exitCode = code
+		bWorked = 1
+	except KeyboardInterrupt:
+		# Consider this successful, as we dont want the debugger.
+		# (but we do want a traceback!)
+		if interact.edit and interact.edit.currentView:
+			interact.edit.currentView.EnsureNoPrompt()
+		traceback.print_exc()
+		if interact.edit and interact.edit.currentView:
+			interact.edit.currentView.AppendToPrompt([])
+		bWorked = 1
+	except:
+		if interact.edit and interact.edit.currentView:
+			interact.edit.currentView.EnsureNoPrompt()
+		traceback.print_exc()
+		if interact.edit and interact.edit.currentView:
+			interact.edit.currentView.AppendToPrompt([])
+		if debuggingType == RS_DEBUGGER_PM:
+			debugger.pm()
+	sys.argv = oldArgv
+	if insertedPath0:
+		del sys.path[0]
+	else:
+		sys.path[0] = oldPath0
+	f.close()
+	if bWorked:
+		win32ui.SetStatusText("Script '%s' returned exit code %s" %(script, exitCode))
+	else:
+		win32ui.SetStatusText('Exception raised while running script  %s' % base)
+	try:
+		sys.stdout.flush()
+	except AttributeError:
+		pass
+
+	win32ui.DoWaitCursor(0)
+
+def ImportFile():
+	""" This code looks for the current window, and determines if it can be imported.  If not,
+	it will prompt for a file name, and allow it to be imported. """
+	try:
+		pathName = GetActiveFileName()
+	except KeyboardInterrupt:
+		pathName = None
+
+	if pathName is not None:
+		if string.lower(os.path.splitext(pathName)[1]) <> ".py":
+			pathName = None
+
+	if pathName is None:
+		openFlags = win32con.OFN_OVERWRITEPROMPT|win32con.OFN_FILEMUSTEXIST
+		dlg = win32ui.CreateFileDialog(1,None,None,openFlags, "Python Scripts (*.py)|*.py||")
+		dlg.SetOFNTitle("Import Script")
+		if dlg.DoModal()!=win32con.IDOK:
+			return 0
+
+		pathName = dlg.GetPathName()
+		
+	# If already imported, dont look for package
+	path, modName = os.path.split(pathName)
+	modName, modExt = os.path.splitext(modName)
+	newPath = None
+	for key, mod in sys.modules.items():
+		if hasattr(mod, '__file__'):
+			fname = mod.__file__
+			base, ext = os.path.splitext(fname)
+			if string.lower(ext) in ['.pyo', '.pyc']:
+				ext = '.py'
+			fname = base + ext
+			if win32ui.ComparePath(fname, pathName):
+				modName = key
+				break
+	else: # for not broken
+		modName, newPath = GetPackageModuleName(pathName)
+		if newPath: sys.path.append(newPath)
+
+	if sys.modules.has_key(modName):
+		bNeedReload = 1
+		what = "reload"
+	else:
+		what = "import"
+		bNeedReload = 0
+	
+	win32ui.SetStatusText(string.capitalize(what)+'ing module...',1)
+	win32ui.DoWaitCursor(1)
+#	win32ui.GetMainFrame().BeginWaitCursor()
+	try:
+		# always do an import, as it is cheap is already loaded.  This ensures
+		# it is in our name space.
+		codeObj = compile('import '+modName,'<auto import>','exec')
+		exec codeObj in __main__.__dict__
+		if bNeedReload:
+			reload(sys.modules[modName])
+#			codeObj = compile('reload('+modName+')','<auto import>','eval')
+#			exec codeObj in __main__.__dict__
+		win32ui.SetStatusText('Successfully ' + what + "ed module '"+modName+"'")
+	except:
+		_HandlePythonFailure(what)
+	win32ui.DoWaitCursor(0)
+
+def CheckFile():
+	""" This code looks for the current window, and gets Python to check it
+	without actually executing any code (ie, by compiling only)
+	"""
+	try:
+		pathName = GetActiveFileName()
+	except KeyboardInterrupt:
+		return
+
+	what = "check"	
+	win32ui.SetStatusText(string.capitalize(what)+'ing module...',1)
+	win32ui.DoWaitCursor(1)
+	try:
+		f = open(pathName)
+	except IOError, details:
+		print "Cant open file '%s' - %s" % (pathName, details)
+		return
+	try:
+		code = f.read() + "\n"
+	finally:
+		f.close()
+	try:
+		codeObj = compile(code, pathName,'exec')
+		if RunTabNanny(pathName):
+			win32ui.SetStatusText("Python and the TabNanny successfully checked the file '"+os.path.basename(pathName)+"'")
+	except SyntaxError:
+		_HandlePythonFailure(what, pathName)
+	except:
+		traceback.print_exc()
+		_HandlePythonFailure(what)
+	win32ui.DoWaitCursor(0)
+
+def RunTabNanny(filename):
+	import cStringIO
+	tabnanny = FindTabNanny()
+	if tabnanny is None:
+		win32ui.MessageBox("The TabNanny is not around, so the children can run amok!" )
+		return
+		
+	# Capture the tab-nanny output
+	newout = cStringIO.StringIO()
+	old_out = sys.stderr, sys.stdout
+	sys.stderr = sys.stdout = newout
+	try:
+		tabnanny.check(filename)
+	finally:
+		# Restore output
+		sys.stderr, sys.stdout = old_out
+	data = newout.getvalue()
+	if data:
+		try:
+			lineno = string.split(data)[1]
+			lineno = int(lineno)
+			_JumpToPosition(filename, lineno)
+			try: # Try and display whitespace
+				GetActiveEditControl().SCISetViewWS(1)
+			except:
+				pass
+			win32ui.SetStatusText("The TabNanny found trouble at line %d" % lineno)
+		except (IndexError, TypeError, ValueError):
+			print "The tab nanny complained, but I cant see where!"
+			print data
+		return 0
+	return 1
+
+def _JumpToPosition(fileName, lineno, col = 1):
+	JumpToDocument(fileName, lineno, col)
+
+def JumpToDocument(fileName, lineno=0, col = 1, nChars = 0, bScrollToTop = 0):
+	# Jump to the position in a file.
+	# If lineno is <= 0, dont move the position - just open/restore.
+	# if nChars > 0, select that many characters.
+	# if bScrollToTop, the specified line will be moved to the top of the window
+	#  (eg, bScrollToTop should be false when jumping to an error line to retain the
+	#  context, but true when jumping to a method defn, where we want the full body.
+	doc = win32ui.GetApp().OpenDocumentFile(fileName)
+	if doc is None: return 0
+	frame = doc.GetFirstView().GetParentFrame()
+	try:
+		view = frame.GetEditorView()
+		if frame.GetActiveView() != view:
+			frame.SetActiveView(view)
+		frame.AutoRestore()
+	except AttributeError: # Not an editor frame??
+		view = doc.GetFirstView()
+	if lineno > 0:
+		charNo = view.LineIndex(lineno-1)
+		start = charNo + col - 1
+		size = view.GetTextLength()
+		try:
+			view.EnsureCharsVisible(charNo)
+		except AttributeError:
+			print "Doesnt appear to be one of our views?"
+		view.SetSel(min(start, size), min(start + nChars, size))
+	if bScrollToTop:
+		curTop = view.GetFirstVisibleLine()
+		nScroll = (lineno-1) - curTop
+		view.LineScroll(nScroll, 0)
+	view.SetFocus()
+	return 1
+
+def _HandlePythonFailure(what, syntaxErrorPathName = None):
+	typ, details, tb = sys.exc_info()
+	if isinstance(details, SyntaxError):
+		try:
+			msg, (fileName, line, col, text) = details
+			if (not fileName or fileName =="<string>") and syntaxErrorPathName:
+				fileName = syntaxErrorPathName
+			_JumpToPosition(fileName, line, col)
+		except (TypeError, ValueError):
+			msg = str(details)
+		win32ui.SetStatusText('Failed to ' + what + ' - syntax error - %s' % msg)
+	else:	
+		traceback.print_exc()
+		win32ui.SetStatusText('Failed to ' + what + ' - ' + str(details) )
+	tb = None # Clean up a cycle.
+
+# Find the Python TabNanny in either the standard library or the Python Tools/Scripts directory.
+def FindTabNanny():
+	try:
+		return __import__("tabnanny")
+	except ImportError:
+		pass
+	# OK - not in the standard library - go looking.
+	filename = "tabnanny.py"
+	try:
+		path = win32api.RegQueryValue(win32con.HKEY_LOCAL_MACHINE, "SOFTWARE\\Python\\PythonCore\\%s\\InstallPath" % (sys.winver))
+	except win32api.error:
+		print "WARNING - The Python registry does not have an 'InstallPath' setting"
+		print "          The file '%s' can not be located" % (filename)
+		return None
+	fname = os.path.join(path, "Tools\\Scripts\\%s" % filename)
+	try:
+		os.stat(fname)
+	except os.error:
+		print "WARNING - The file '%s' can not be located in path '%s'" % (filename, path)
+		return None
+
+	tabnannyhome, tabnannybase = os.path.split(fname)
+	tabnannybase = os.path.splitext(tabnannybase)[0]
+	# Put tab nanny at the top of the path.
+	sys.path.insert(0, tabnannyhome)
+	try:
+		return __import__(tabnannybase)
+	finally:
+		# remove the tab-nanny from the path
+		del sys.path[0]
+		
+def LocatePythonFile( fileName, bBrowseIfDir = 1 ):
+	" Given a file name, return a fully qualified file name, or None "
+	# first look for the exact file as specified
+	if not os.path.isfile(fileName):
+		# Go looking!
+		baseName = fileName
+		for path in sys.path:
+			fileName = os.path.join(path, baseName)
+			if os.path.isdir(fileName):
+				if bBrowseIfDir:
+					d=win32ui.CreateFileDialog(1, "*.py", None, 0, "Python Files (*.py)|*.py|All files|*.*")
+					d.SetOFNInitialDir(fileName)
+					rc=d.DoModal()
+					if rc==win32con.IDOK:
+						fileName = d.GetPathName()
+						break
+					else:
+						return None
+			else:
+				fileName = fileName + ".py"
+				if os.path.isfile(fileName):
+					break # Found it!
+
+		else:	# for not broken out of
+			return None
+	return win32ui.FullPath(fileName)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/sgrepmdi.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/sgrepmdi.py
new file mode 100644
index 0000000..9ff340f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/sgrepmdi.py
@@ -0,0 +1,523 @@
+#SGrepMDI is by Gordon McMillan (gmcm@hypernet.com)
+#It does basically what Find In Files does in MSVC with a couple enhancements.
+# - It saves any directories in the app's ini file (if you want to get rid
+#	of them you'll have to edit the file)
+# - "Directories" can be directories, 
+#  -	semicolon separated lists of "directories",
+#  -	environment variables that evaluate to "directories",
+#  -	registry path names that evaluate to "directories",
+#  -	all of which is recursive, so you can mix them all up.
+# - It is MDI, so you can 'nest' greps and return to earlier ones,
+#	(ie, have multiple results open at the same time)
+# - Like FIF, double clicking a line opens an editor and takes you to the line.
+# - You can highlight text, right click and start a new grep with the selected
+#	text as search pattern and same directories etc as before.
+# - You can save grep parameters (so you don't lose your hardearned pattern)
+#	from File|Save
+# - You can save grep results by right clicking in the result window.
+# Hats off to Mark Hammond for providing an environment where I could cobble
+# something like this together in a couple evenings!
+
+import win32ui
+import win32api
+from pywin.mfc import docview, dialog, window
+import win32con
+import string
+import re
+import glob
+import os
+import stat
+import glob
+import scriptutils
+
+def getsubdirs(d):
+	dlist = []
+	flist = glob.glob(d+'\\*')
+	for f in flist:
+		if os.path.isdir(f):
+			dlist.append(f)
+			dlist = dlist + getsubdirs(f)
+	return dlist
+
+class dirpath:
+	def __init__(self, str, recurse=0):
+		dp = string.split(str, ';')
+		dirs = {}
+		for d in dp:
+			if os.path.isdir(d):
+				d = string.lower(d)
+				if not dirs.has_key(d):
+					dirs[d] = None
+					if recurse:
+						subdirs = getsubdirs(d)
+						for sd in subdirs:
+							sd = string.lower(sd)
+							if not dirs.has_key(sd):
+								dirs[sd] = None
+			elif os.path.isfile(d):
+				pass
+			else:
+				x = None
+				if os.environ.has_key(d):
+					x = dirpath(os.environ[d])
+				elif d[:5] == 'HKEY_':
+					keystr = string.split(d,'\\')
+					try:
+						root = eval('win32con.'+keystr[0])
+					except:
+						win32ui.MessageBox("Can't interpret registry key name '%s'" % keystr[0])
+					try:
+						subkey = string.join(keystr[1:], '\\')
+						val = win32api.RegQueryValue(root, subkey)
+						if val:
+							x = dirpath(val)
+						else:
+							win32ui.MessageBox("Registry path '%s' did not return a path entry" % d)
+					except:
+						win32ui.MessageBox("Can't interpret registry key value: %s" % keystr[1:])
+				else:
+					win32ui.MessageBox("Directory '%s' not found" % d)
+				if x:
+					for xd in x:
+						if not dirs.has_key(xd):
+							dirs[xd] = None
+							if recurse:
+								subdirs = getsubdirs(xd)
+								for sd in subdirs:
+									sd = string.lower(sd)
+									if not dirs.has_key(sd):
+										dirs[sd] = None
+		self.dirs = []
+		for d in dirs.keys():
+			self.dirs.append(d)
+
+	def __getitem__(self, key):
+		return self.dirs[key]
+	def __len__(self):
+		return len(self.dirs)
+	def __setitem__(self, key, value):
+		self.dirs[key] = value
+	def __delitem__(self, key):
+		del self.dirs[key]
+	def __getslice__(self, lo, hi):
+		return self.dirs[lo:hi]
+	def __setslice__(self, lo, hi, seq):
+		self.dirs[lo:hi] = seq
+	def __delslice__(self, lo, hi):
+		del self.dirs[lo:hi]
+	def __add__(self, other):
+		if type(other) == type(self) or type(other) == type([]):
+			return self.dirs + other.dirs
+	def __radd__(self, other):
+		if type(other) == type(self) or type(other) == type([]):
+			return other.dirs + self.dirs
+
+# Group(1) is the filename, group(2) is the lineno.
+#regexGrepResult=regex.compile("^\\([a-zA-Z]:.*\\)(\\([0-9]+\\))")
+
+regexGrep=re.compile(r"^([a-zA-Z]:[^(]*)\(([0-9]+)\)")
+
+#these are the atom numbers defined by Windows for basic dialog controls
+
+BUTTON    = 0x80
+EDIT      = 0x81
+STATIC    = 0x82
+LISTBOX   = 0x83
+SCROLLBAR = 0x84
+COMBOBOX  = 0x85
+
+class GrepTemplate(docview.RichEditDocTemplate):
+	def __init__(self):
+		docview.RichEditDocTemplate.__init__(self, win32ui.IDR_TEXTTYPE, GrepDocument, GrepFrame, GrepView)
+		self.SetDocStrings("\nGrep\nGrep\nGrep params (*.grep)\n.grep\n\n\n")
+		win32ui.GetApp().AddDocTemplate(self)
+		self.docparams = None
+
+	def MatchDocType(self, fileName, fileType):
+		doc = self.FindOpenDocument(fileName)
+		if doc: return doc
+		ext = string.lower(os.path.splitext(fileName)[1])
+		if ext =='.grep': 
+			return win32ui.CDocTemplate_Confidence_yesAttemptNative
+		return win32ui.CDocTemplate_Confidence_noAttempt
+
+	def setParams(self, params):
+		self.docparams = params
+
+	def readParams(self):
+		tmp = self.docparams
+		self.docparams = None
+		return tmp
+
+class GrepFrame(window.MDIChildWnd):
+	# The template and doc params will one day be removed.
+	def __init__(self, wnd = None):
+		window.MDIChildWnd.__init__(self, wnd)
+
+class GrepDocument(docview.RichEditDoc):
+	def __init__(self, template):
+		docview.RichEditDoc.__init__(self, template)
+		self.dirpattern = ''
+		self.filpattern = ''
+		self.greppattern = ''
+		self.casesensitive = 1
+		self.recurse = 1
+		self.verbose = 0
+
+	def OnOpenDocument(self, fnm):
+		#this bizarre stuff with params is so right clicking in a result window
+		#and starting a new grep can communicate the default parameters to the
+		#new grep.
+		try:
+			params = open(fnm,'r').read()
+		except:
+			params = None
+		self.setInitParams(params)
+		return self.OnNewDocument()
+
+	def OnCloseDocument(self):
+		try:
+			win32ui.GetApp().DeleteIdleHandler(self.SearchFile)
+		except:
+			pass
+		return self._obj_.OnCloseDocument()
+
+	def saveInitParams(self):
+		# Only save the flags, not the text boxes.
+		paramstr = "\t%s\t\t%d\t%d" % (self.filpattern, self.casesensitive, self.recurse)
+		win32ui.WriteProfileVal("Grep", "Params", paramstr)
+
+	def setInitParams(self, paramstr):
+		if paramstr is None:
+			paramstr = win32ui.GetProfileVal("Grep", "Params", '\t\t\t1\t0\t0')
+		params = string.split(paramstr, '\t')
+		if len(params) < 3:
+			params = params + ['']*(3-len(params))
+		if len(params) < 6:
+			params = params + [0]*(6-len(params))
+		self.dirpattern = params[0]
+		self.filpattern = params[1]
+		self.greppattern = params[2]
+		self.casesensitive = int(params[3])
+		self.recurse = int(params[4])
+		self.verbose = int(params[5])
+		# setup some reasonable defaults.
+		if not self.dirpattern:
+			self.dirpattern = os.getcwd()
+		if not self.filpattern:
+			self.filpattern = "*.py"
+
+	def OnNewDocument(self):
+		if self.dirpattern == '':
+			self.setInitParams(greptemplate.readParams())
+		d = GrepDialog(self.dirpattern, self.filpattern, self.greppattern, self.casesensitive, self.recurse, self.verbose)
+		if d.DoModal() == win32con.IDOK:
+			self.dirpattern = d['dirpattern']
+			self.filpattern = d['filpattern']
+			self.greppattern = d['greppattern']
+			self.casesensitive = d['casesensitive']
+			self.recurse = d['recursive']
+			self.verbose = d['verbose']
+			self.doSearch()
+			self.saveInitParams()
+			return 1
+		return 0 # cancelled - return zero to stop frame creation.
+
+	def doSearch(self):
+		self.dp = dirpath(self.dirpattern, self.recurse)
+		self.SetTitle("Grep for %s in %s" % (self.greppattern, self.filpattern))
+		#self.text = []
+		self.GetFirstView().Append('#Search '+self.dirpattern+'\n')
+		if self.verbose:
+			self.GetFirstView().Append('#   ='+`self.dp.dirs`+'\n')
+		self.GetFirstView().Append('# Files '+self.filpattern+'\n')
+		self.GetFirstView().Append('#   For '+self.greppattern+'\n')
+		self.fplist = string.split(self.filpattern,';')
+		if self.casesensitive:
+			self.pat = re.compile(self.greppattern)
+		else:
+			self.pat = re.compile(self.greppattern, re.IGNORECASE)
+		win32ui.SetStatusText("Searching.  Please wait...", 0)
+		self.dpndx = self.fpndx = 0
+		self.fndx = -1
+		if not self.dp:
+			self.GetFirstView().Append("# ERROR: '%s' does not resolve to any search locations" % self.dirpattern)
+			self.SetModifiedFlag(0)
+		else:
+			self.flist = glob.glob(self.dp[0]+'\\'+self.fplist[0])
+			win32ui.GetApp().AddIdleHandler(self.SearchFile)
+
+	def SearchFile(self, handler, count):
+		self.fndx = self.fndx + 1
+		if self.fndx < len(self.flist):
+			f = self.flist[self.fndx]
+			if self.verbose:
+				self.GetFirstView().Append('# ..'+f+'\n')
+			win32ui.SetStatusText("Searching "+f, 0)
+			lines = open(f, 'r').readlines()
+			for i in range(len(lines)):
+				line = lines[i]
+				if self.pat.search(line) != None:
+					self.GetFirstView().Append(f+'('+`i+1` + ') '+line)
+		else:
+			self.fndx = -1
+			self.fpndx = self.fpndx + 1
+			if self.fpndx < len(self.fplist):
+				self.flist = glob.glob(self.dp[self.dpndx] + '\\' + self.fplist[self.fpndx])
+			else:
+				self.fpndx = 0
+				self.dpndx = self.dpndx + 1
+				if self.dpndx < len(self.dp):
+					self.flist = glob.glob(self.dp[self.dpndx] + '\\' + self.fplist[self.fpndx])
+				else:
+					win32ui.SetStatusText("Search complete.", 0)
+					self.SetModifiedFlag(0) # default to not modified.
+					try:
+						win32ui.GetApp().DeleteIdleHandler(self.SearchFile)
+					except:
+						pass
+					return 0
+		return 1
+
+	def GetParams(self):
+		return self.dirpattern+'\t'+self.filpattern+'\t'+self.greppattern+'\t'+`self.casesensitive`+'\t'+`self.recurse`+'\t'+`self.verbose`
+
+	def OnSaveDocument(self, filename):
+#		print 'OnSaveDocument() filename=',filename
+		savefile = open(filename,"wb")
+		txt = self.GetParams()+'\n'
+#		print 'writing',txt
+		savefile.write(txt)
+		savefile.close()
+		self.SetModifiedFlag(0)
+		return 1
+
+ID_OPEN_FILE = 0xe400
+ID_GREP	 = 0xe401
+ID_SAVERESULTS = 0x402
+ID_TRYAGAIN = 0x403
+
+class GrepView(docview.RichEditView):
+	def __init__(self, doc):
+		docview.RichEditView.__init__(self, doc)
+		self.SetWordWrap(win32ui.CRichEditView_WrapNone)
+		self.HookHandlers()
+
+	def OnInitialUpdate(self):
+		rc = self._obj_.OnInitialUpdate()
+		format = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New')
+		self.SetDefaultCharFormat(format)
+		return rc
+
+	def HookHandlers(self):
+		self.HookMessage(self.OnRClick, win32con.WM_RBUTTONDOWN)
+		self.HookCommand(self.OnCmdOpenFile, ID_OPEN_FILE)
+		self.HookCommand(self.OnCmdGrep, ID_GREP)
+		self.HookCommand(self.OnCmdSave, ID_SAVERESULTS)
+		self.HookCommand(self.OnTryAgain, ID_TRYAGAIN)
+		self.HookMessage(self.OnLDblClick,win32con.WM_LBUTTONDBLCLK)
+
+	def OnLDblClick(self,params):
+		line = self.GetLine()
+		regexGrepResult = regexGrep.match(line)
+		if regexGrepResult:
+			fname = regexGrepResult.group(1)
+			line = string.atoi(regexGrepResult.group(2))
+			scriptutils.JumpToDocument(fname, line)
+			return 0	# dont pass on
+		return 1	# pass it on by default.
+
+	def OnRClick(self, params):
+		menu = win32ui.CreatePopupMenu()
+		flags=win32con.MF_STRING|win32con.MF_ENABLED
+		lineno = self._obj_.LineFromChar(-1)	#selection or current line
+		line = self._obj_.GetLine(lineno)
+		regexGrepResult = regexGrep.match(line)
+		if regexGrepResult:
+			self.fnm = regexGrepResult.group(1)
+			self.lnnum = string.atoi(regexGrepResult.group(2))
+			menu.AppendMenu(flags, ID_OPEN_FILE, "&Open "+self.fnm)
+			menu.AppendMenu(win32con.MF_SEPARATOR)
+		menu.AppendMenu(flags, ID_TRYAGAIN, "&Try Again")
+		charstart, charend = self._obj_.GetSel()
+		if charstart != charend:
+			linestart = self._obj_.LineIndex(lineno)
+			self.sel = line[charstart-linestart:charend-linestart]
+			menu.AppendMenu(flags, ID_GREP, "&Grep for "+self.sel)
+			menu.AppendMenu(win32con.MF_SEPARATOR)
+		menu.AppendMenu(flags, win32ui.ID_EDIT_CUT, 'Cu&t')
+		menu.AppendMenu(flags, win32ui.ID_EDIT_COPY, '&Copy')
+		menu.AppendMenu(flags, win32ui.ID_EDIT_PASTE, '&Paste')
+		menu.AppendMenu(flags, win32con.MF_SEPARATOR);
+		menu.AppendMenu(flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all')
+		menu.AppendMenu(flags, win32con.MF_SEPARATOR);
+		menu.AppendMenu(flags, ID_SAVERESULTS, 'Sa&ve results')
+		menu.TrackPopupMenu(params[5])
+		return 0
+
+	def OnCmdOpenFile(self, cmd, code):
+		doc = win32ui.GetApp().OpenDocumentFile(self.fnm)
+		if doc:
+			vw = doc.GetFirstView()
+			#hope you have an editor that implements GotoLine()!
+			try:
+				vw.GotoLine(string.atoi(self.lnnum))
+			except:
+				pass
+		return 0
+
+	def OnCmdGrep(self, cmd, code):
+		curparamsstr = self.GetDocument().GetParams()
+		params = string.split(curparamsstr,'\t')
+		params[2] = self.sel
+		greptemplate.setParams(string.join(params,'\t'))
+		greptemplate.OpenDocumentFile()
+		return 0
+	
+	def OnTryAgain(self, cmd, code):
+		greptemplate.setParams(self.GetDocument().GetParams())
+		greptemplate.OpenDocumentFile()
+		return 0
+
+	def OnCmdSave(self, cmd, code):
+		flags = win32con.OFN_OVERWRITEPROMPT 
+		dlg = win32ui.CreateFileDialog(0, None, None, flags, "Text Files (*.txt)|*.txt||", self)
+		dlg.SetOFNTitle("Save Results As")
+		if dlg.DoModal() == win32con.IDOK:
+			pn = dlg.GetPathName()
+			self._obj_.SaveFile(pn)
+		return 0
+		
+	def Append(self, strng):
+		numlines = self.GetLineCount()
+		endpos = self.LineIndex(numlines-1) + len(self.GetLine(numlines-1))
+		self.SetSel(endpos, endpos)
+		self.ReplaceSel(strng)
+
+
+class GrepDialog(dialog.Dialog):
+	def __init__(self, dp, fp, gp, cs, r, v):
+		style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
+		CS = win32con.WS_CHILD | win32con.WS_VISIBLE
+		tmp = [ ["Grep", (0, 0, 210, 90), style, None, (8, "MS Sans Serif")], ]
+		tmp.append([STATIC, "Grep For:",            -1, (7,   7,  50,  9), CS ])
+		tmp.append([EDIT,   gp,                    101, (52,  7, 144,  11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER])
+		tmp.append([STATIC, "Directories:",         -1, (7,  20,  50,  9), CS ])
+		tmp.append([EDIT,   dp,                    102, (52, 20, 128,  11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER])
+		tmp.append([BUTTON, '...',                 110, (182,20,  16,  11), CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP]) 
+		tmp.append([STATIC, "File types:",          -1, (7,  33,  50,  9), CS ])
+		tmp.append([EDIT,   fp,                    103, (52, 33, 128,  11), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER ])
+		tmp.append([BUTTON, '...',                 111, (182,33,  16,  11), CS | win32con.BS_PUSHBUTTON | win32con.WS_TABSTOP]) 
+		tmp.append([BUTTON,'Case sensitive',       104, (7,  45,  72,  9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP])
+		tmp.append([BUTTON,'Subdirectories',       105, (7,  56,  72,  9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP])
+		tmp.append([BUTTON,'Verbose',              106, (7,  67,  72,  9), CS | win32con.BS_AUTOCHECKBOX | win32con.BS_LEFTTEXT| win32con.WS_TABSTOP])
+		tmp.append([BUTTON,'OK',         win32con.IDOK, (166,53,  32, 12), CS | win32con.BS_DEFPUSHBUTTON| win32con.WS_TABSTOP])
+		tmp.append([BUTTON,'Cancel', win32con.IDCANCEL, (166,67,  32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP])
+		dialog.Dialog.__init__(self, tmp)
+		self.AddDDX(101,'greppattern')
+		self.AddDDX(102,'dirpattern')
+		self.AddDDX(103,'filpattern')
+		self.AddDDX(104,'casesensitive')
+		self.AddDDX(105,'recursive')
+		self.AddDDX(106,'verbose')
+		self._obj_.data['greppattern'] = gp
+		self._obj_.data['dirpattern']  = dp
+		self._obj_.data['filpattern']  = fp
+		self._obj_.data['casesensitive']  = cs
+		self._obj_.data['recursive'] = r
+		self._obj_.data['verbose']  = v
+		self.HookCommand(self.OnMoreDirectories, 110)
+		self.HookCommand(self.OnMoreFiles, 111)
+
+	def OnMoreDirectories(self, cmd, code):
+		self.getMore('Grep\\Directories', 'dirpattern')
+
+	def OnMoreFiles(self, cmd, code):
+		self.getMore('Grep\\File Types', 'filpattern')
+		
+	def getMore(self, section, key):
+		self.UpdateData(1)
+		#get the items out of the ini file
+		ini = win32ui.GetProfileFileName()
+		secitems = win32api.GetProfileSection(section, ini)
+		items = []
+		for secitem in secitems:
+			items.append(string.split(secitem,'=')[1])
+		dlg = GrepParamsDialog(items)
+		if dlg.DoModal() == win32con.IDOK:
+			itemstr = string.join(dlg.getItems(),';')
+			self._obj_.data[key] = itemstr
+			#update the ini file with dlg.getNew()
+			i = 0
+			newitems = dlg.getNew()
+			if newitems:
+				items = items + newitems
+				for item in items:
+					win32api.WriteProfileVal(section, `i`, item, ini)
+					i = i + 1
+			self.UpdateData(0)
+
+	def OnOK(self):
+		self.UpdateData(1)
+		for id, name in [(101,'greppattern'), (102,'dirpattern'), (103,'filpattern')]:
+			if not self[name]:
+				self.GetDlgItem(id).SetFocus()
+				win32api.MessageBeep()
+				win32ui.SetStatusText("Please enter a value")
+				return
+		self._obj_.OnOK()
+
+class GrepParamsDialog(dialog.Dialog):
+	def __init__(self, items):
+		self.items = items
+		self.newitems = []
+		style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
+		CS = win32con.WS_CHILD | win32con.WS_VISIBLE
+		tmp = [ ["Grep Parameters", (0, 0, 205, 100), style, None, (8, "MS Sans Serif")], ]
+		tmp.append([LISTBOX, '',                   107, (7,   7,  150,  72), CS | win32con.LBS_MULTIPLESEL| win32con.LBS_STANDARD | win32con.LBS_HASSTRINGS | win32con.WS_TABSTOP | win32con.LBS_NOTIFY])
+		tmp.append([BUTTON,'OK',         win32con.IDOK, (167, 7,  32, 12), CS | win32con.BS_DEFPUSHBUTTON| win32con.WS_TABSTOP])
+		tmp.append([BUTTON,'Cancel', win32con.IDCANCEL, (167,23,  32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP])
+		tmp.append([STATIC,'New:',                  -1, (2,  83,  15,  12), CS])
+		tmp.append([EDIT,  '',                     108, (18, 83,  139,  12), CS | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL | win32con.WS_BORDER])
+		tmp.append([BUTTON,'Add',                  109, (167,83,  32, 12), CS | win32con.BS_PUSHBUTTON| win32con.WS_TABSTOP]) 
+		dialog.Dialog.__init__(self, tmp)
+		self.HookCommand(self.OnAddItem, 109)
+		self.HookCommand(self.OnListDoubleClick, 107)
+
+	def OnInitDialog(self):
+		lb = self.GetDlgItem(107)
+		for item in self.items:
+			lb.AddString(item)
+		return self._obj_.OnInitDialog()
+
+	def OnAddItem(self, cmd, code):
+		eb = self.GetDlgItem(108)
+		item = eb.GetLine(0)
+		self.newitems.append(item)
+		lb = self.GetDlgItem(107)
+		i = lb.AddString(item)
+		lb.SetSel(i, 1)
+		return 1
+
+	def OnListDoubleClick(self, cmd, code):
+		if code == win32con.LBN_DBLCLK:
+			self.OnOK()
+			return 1
+
+	def OnOK(self):
+		lb = self.GetDlgItem(107)
+		self.selections = lb.GetSelTextItems()
+		self._obj_.OnOK()
+
+	def getItems(self):
+		return self.selections
+
+	def getNew(self):
+		return self.newitems
+
+try:
+	win32ui.GetApp().RemoveDocTemplate(greptemplate)
+except NameError:
+	pass
+
+greptemplate = GrepTemplate() 
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/startup.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/startup.py
new file mode 100644
index 0000000..c8554d8c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/startup.py
@@ -0,0 +1,62 @@
+# startup.py
+#
+"The main application startup code for PythonWin."
+
+#
+# This does the basic command line handling.
+
+# Keep this as short as possible, cos error output is only redirected if
+# this runs OK.  Errors in imported modules are much better - the messages go somewhere (not any more :-)
+
+import sys
+import win32ui
+import strop
+
+# You may wish to redirect error output somewhere useful if you have startup errors.
+# eg, 'import win32traceutil' will do this for you.
+# import win32traceutil # Just uncomment this line to see error output!
+
+# An old class I used to use - generally only useful if Pythonwin is running under MSVC
+#class DebugOutput:
+#	softspace=1
+#	def write(self,message):
+#		win32ui.OutputDebug(message)
+#sys.stderr=sys.stdout=DebugOutput()
+
+# To fix a problem with Pythonwin when started from the Pythonwin directory,
+# we update the pywin path to ensure it is absolute.
+# If it is indeed relative, it will be relative to our current directory.
+# If its already absolute, then this will have no affect.
+import pywin, pywin.framework
+pywin.__path__[0] = win32ui.FullPath(pywin.__path__[0])
+pywin.framework.__path__[0] = win32ui.FullPath(pywin.framework.__path__[0])
+
+# make a few wierd sys values.  This is so later we can clobber sys.argv to trick
+# scripts when running under a GUI environment.
+
+moduleName = "intpyapp"
+sys.appargvoffset = 0
+sys.appargv = sys.argv[:]
+# Must check for /app param here.
+if len(sys.argv)>=2 and strop.lower(sys.argv[0])=='/app': 
+	import cmdline
+	moduleName = cmdline.FixArgFileName(sys.argv[1])
+	sys.appargvoffset = 2
+	newargv=sys.argv[sys.appargvoffset:]
+#	newargv.insert(0, sys.argv[0])
+	sys.argv = newargv
+	
+exec "import %s\n" % moduleName
+
+try:
+	win32ui.GetApp()._obj_
+	# This worked - an app already exists - do nothing more
+except (AttributeError, win32ui.error):
+	# This means either no app object exists at all, or the one
+	# that does exist does not have a Python class (ie, was created
+	# by the host .EXE).  In this case, we do the "old style" init...
+	import app
+	if app.AppBuilder is None:
+		raise TypeError, "No application object has been registered"
+
+	app.App = app.AppBuilder()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/stdin.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/stdin.py
new file mode 100644
index 0000000..2e39f41
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/stdin.py
@@ -0,0 +1,180 @@
+# Copyright (c) 2000 David Abrahams. Permission to copy, use, modify, sell
+# and distribute this software is granted provided this copyright
+# notice appears in all copies. This software is provided "as is" without
+# express or implied warranty, and with no claim as to its suitability for
+# any purpose.
+"""Provides a class Stdin which can be used to emulate the regular old
+sys.stdin for the PythonWin interactive window. Right now it just pops
+up a raw_input() dialog. With luck, someone will integrate it into the
+actual PythonWin interactive window someday.
+
+WARNING: Importing this file automatically replaces sys.stdin with an
+instance of Stdin (below). This is useful because you can just open
+Stdin.py in PythonWin and hit the import button to get it set up right
+if you don't feel like changing PythonWin's source. To put things back
+the way they were, simply use this magic incantation:
+    import sys
+    sys.stdin = sys.stdin.real_file
+"""
+import sys
+import string
+
+true = 1
+false = 0
+get_input_line = raw_input
+
+class Stdin:
+    def __init__(self):
+        self.real_file = sys.stdin
+        self.buffer = ""
+
+    def __getattr__(self, name):
+        """Forward most functions to the real sys.stdin for absolute realism.
+        """
+        return getattr(self.real_file, name)
+    
+    def isatty(self):
+        """Return 1 if the file is connected to a tty(-like) device, else 0. 
+        """
+        return 1
+
+    def read(self, size = -1):
+        """Read at most size bytes from the file (less if the read
+        hits EOF or no more data is immediately available on a pipe,
+        tty or similar device). If the size argument is negative or
+        omitted, read all data until EOF is reached. The bytes are
+        returned as a string object. An empty string is returned when
+        EOF is encountered immediately. (For certain files, like ttys,
+        it makes sense to continue reading after an EOF is hit.)"""
+        if self.closed:
+            return self.real_file.read(size)
+        
+        result_size = self.__get_lines(size)
+        return self.__extract_from_buffer(result_size)
+            
+
+    def readline(self, size = -1):
+        """Read one entire line from the file. A trailing newline
+        character is kept in the string2.6 (but may be absent when a file ends
+        with an incomplete line). If the size argument is present and
+        non-negative, it is a maximum byte count (including the trailing
+        newline) and an incomplete line may be returned. An empty string is
+        returned when EOF is hit immediately. Note: unlike stdio's fgets(),
+        the returned string contains null characters ('\0') if they occurred
+        in the input.
+        """
+        if self.closed:
+            return self.real_file.readline(size)
+        
+        maximum_result_size = self.__get_lines(size, lambda buffer: '\n' in buffer)
+
+        if '\n' in self.buffer[:maximum_result_size]:
+            result_size = string.find(self.buffer, '\n', 0, maximum_result_size) + 1
+            assert(result_size > 0)
+        else:
+            result_size = maximum_result_size
+            
+        return self.__extract_from_buffer(result_size)
+
+    def __extract_from_buffer(self, character_count):
+        """Remove the first character_count characters from the internal buffer and
+        return them.
+        """
+        result = self.buffer[:character_count]
+        self.buffer = self.buffer[character_count:]
+        return result
+    
+    def __get_lines(self, desired_size, done_reading = lambda buffer: false):
+        """Keep adding lines to our internal buffer until done_reading(self.buffer)
+        is true or EOF has been reached or we have desired_size bytes in the buffer.
+        If desired_size < 0, we are never satisfied until we reach EOF. If done_reading
+        is not supplied, it is not consulted.
+
+        If desired_size < 0, returns the length of the internal buffer. Otherwise,
+        returns desired_size.
+        """
+        while not done_reading(self.buffer) and (desired_size < 0
+                                                 or len(self.buffer) < desired_size):
+            try:
+                self.__get_line()
+            except: # deal with cancellation of get_input_line dialog
+                desired_size = len(self.buffer) # Be satisfied!
+                pass
+        if desired_size < 0:
+            return len(self.buffer)
+        else:
+            return desired_size
+        
+    def __get_line(self):
+        """Grab one line from get_input_line() and append it to the buffer.
+        """
+        line = get_input_line()
+        print '>>>',line  # echo input to console
+        self.buffer = self.buffer + line + '\n'
+
+    def readlines(self, *sizehint): 
+        """Read until EOF using readline() and return a list containing the lines
+        thus read. If the optional sizehint argument is present, instead of
+        reading up to EOF, whole lines totalling approximately sizehint bytes
+        (possibly after rounding up to an internal buffer size) are read.
+        """
+        if self.closed:
+            return apply(self.real_file.readlines, sizehint)
+        
+        result = []
+        total_read = 0
+        while sizehint == () or total_read < sizehint[0]:
+            line = self.readline()
+            if line == '':
+                break
+            total_read = total_read + len(line)
+            result.append(line)
+        return result
+
+if __name__ == "__main__":
+    test_input = r"""this is some test
+input that I am hoping
+~
+will be very instructive
+and when I am done
+I will have tested everything.
+Twelve and twenty blackbirds
+baked in a pie. Patty cake
+patty cake so am I.
+~
+Thirty-five niggling idiots!
+Sell you soul to the devil, baby
+"""
+
+    def fake_raw_input(prompt=None):
+        """Replacement for raw_input() which pulls lines out of global test_input.
+        For testing only!
+        """
+        global test_input
+        if '\n' not in test_input:
+            end_of_line_pos = len(test_input)
+        else:
+            end_of_line_pos = string.find(test_input, '\n')
+        result = test_input[:end_of_line_pos]
+        test_input = test_input[end_of_line_pos + 1:]
+        if len(result) == 0 or result[0] == '~':
+            raise 'EOF'
+        return result
+    
+    get_input_line = fake_raw_input
+
+    # Some completely inadequate tests, just to make sure the code's not totally broken    
+    try:    
+        x = Stdin()
+        print x.read()
+        print x.readline()
+        print x.read(12)
+        print x.readline(47)
+        print x.readline(3)
+        print x.readlines()
+    finally:
+        get_input_line = raw_input
+else:
+    import sys
+    sys.stdin = Stdin()
+    
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/toolmenu.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/toolmenu.py
new file mode 100644
index 0000000..cff34745
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/toolmenu.py
@@ -0,0 +1,248 @@
+# toolmenu.py
+
+import win32ui
+import win32con
+import win32api
+import app
+import sys
+import string
+
+tools = {}
+idPos = 100
+
+# The default items should no tools menu exist in the INI file.
+defaultToolMenuItems = [
+	('Browser', 'win32ui.GetApp().OnViewBrowse(0,0)'),
+       ('Browse PythonPath', 'from pywin.tools import browseProjects;browseProjects.Browse()'),
+       ('Edit Python Path', 'from pywin.tools import regedit;regedit.EditRegistry()'),
+       ('COM Makepy utility', 'from win32com.client import makepy;makepy.main()'),
+       ('COM Browser', 'from win32com.client import combrowse;combrowse.main()'),
+       ('Trace Collector Debugging tool', 'from pywin.tools import TraceCollector;TraceCollector.MakeOutputWindow()'),
+]
+
+def LoadToolMenuItems():
+	# Load from the registry.
+	items = []
+	lookNo = 1
+	while 1:
+		menu = win32ui.GetProfileVal("Tools Menu\\%s" % lookNo, "", "")
+		if menu=="":
+			break
+		cmd = win32ui.GetProfileVal("Tools Menu\\%s" % lookNo, "Command", "")
+		items.append((menu, cmd))
+		lookNo = lookNo + 1
+
+	if len(items)==0: 
+		items = defaultToolMenuItems
+	return items
+
+def WriteToolMenuItems( items ):
+	# Items is a list of (menu, command)
+	# Delete the entire registry tree.
+	try:
+		mainKey = win32ui.GetAppRegistryKey()
+		toolKey = win32api.RegOpenKey(mainKey, "Tools Menu")
+	except win32ui.error:
+		toolKey = None
+	if toolKey is not None:
+		while 1:
+			try:
+				subkey = win32api.RegEnumKey(toolKey, 0)
+			except win32api.error:
+				break
+			win32api.RegDeleteKey(toolKey, subkey)
+	# Keys are now removed - write the new ones.
+	# But first check if we have the defaults - and if so, dont write anything!
+	if items==defaultToolMenuItems:
+		return
+	itemNo = 1
+	for menu, cmd in items:
+		win32ui.WriteProfileVal("Tools Menu\\%s" % itemNo, "", menu)
+		win32ui.WriteProfileVal("Tools Menu\\%s" % itemNo, "Command", cmd)
+		itemNo = itemNo + 1
+	
+def SetToolsMenu(menu, menuPos = None):
+	global tools
+	global idPos
+	
+	# todo - check the menu does not already exist.
+	# Create the new menu
+	toolsMenu = win32ui.CreatePopupMenu()
+	
+	# Load from the ini file.
+	items = LoadToolMenuItems()
+	for menuString, cmd in items:
+		tools[idPos] = (menuString, cmd, menuString)
+		toolsMenu.AppendMenu(win32con.MF_ENABLED|win32con.MF_STRING,idPos, menuString)
+		win32ui.GetMainFrame().HookCommand(HandleToolCommand, idPos)
+		idPos=idPos+1
+
+	# Find the correct spot to insert the new tools menu.
+	if menuPos is None:
+		menuPos = menu.GetMenuItemCount()-2
+		if menuPos<0: menuPos=0
+
+	menu.InsertMenu(menuPos, win32con.MF_BYPOSITION|win32con.MF_ENABLED|win32con.MF_STRING|win32con.MF_POPUP, toolsMenu.GetHandle(), '&Tools')
+
+def HandleToolCommand(cmd, code):
+	import traceback
+	import re
+	global tools
+	(menuString, pyCmd, desc) = tools[cmd]
+	win32ui.SetStatusText("Executing tool %s" % desc, 1)
+	pyCmd = re.sub('\\\\n','\n', pyCmd)
+	win32ui.DoWaitCursor(1)
+	oldFlag = None
+	try:
+		oldFlag = sys.stdout.template.writeQueueing
+		sys.stdout.template.writeQueueing = 0
+	except (NameError, AttributeError):
+		pass
+	
+	try:
+		exec "%s\n" % pyCmd
+		worked=1
+	except SystemExit:
+		# The program raised a SystemExit - ignore it.
+		worked = 1
+	except:
+		print "Failed to execute command:\n%s" % pyCmd
+		traceback.print_exc()
+		worked=0
+	if oldFlag is not None:
+		sys.stdout.template.writeQueueing = oldFlag
+	win32ui.DoWaitCursor(0)
+	if worked:
+		text = "Completed successfully."
+	else:
+		text = "Error executing %s." % desc
+	win32ui.SetStatusText(text, 1)
+
+# The property page for maintaing the items on the Tools menu.
+import commctrl
+from pywin.mfc import dialog
+
+class ToolMenuPropPage(dialog.PropertyPage):
+	def __init__(self):
+		self.bImChangingEditControls = 0 # Am I programatically changing the controls?
+		dialog.PropertyPage.__init__(self, win32ui.IDD_PP_TOOLMENU)
+
+	def OnInitDialog(self):
+		self.editMenuCommand = self.GetDlgItem(win32ui.IDC_EDIT2)
+		self.butNew = self.GetDlgItem(win32ui.IDC_BUTTON3)
+		
+		# Now hook the change notification messages for the edit controls.
+		self.HookCommand(self.OnCommandEditControls, win32ui.IDC_EDIT1)
+		self.HookCommand(self.OnCommandEditControls, win32ui.IDC_EDIT2)
+
+		self.HookNotify(self.OnNotifyListControl, commctrl.LVN_ITEMCHANGED)
+		self.HookNotify(self.OnNotifyListControlEndLabelEdit, commctrl.LVN_ENDLABELEDIT)
+		
+		# Hook the button clicks.
+		self.HookCommand(self.OnButtonNew, win32ui.IDC_BUTTON3) # New Item
+		self.HookCommand(self.OnButtonDelete, win32ui.IDC_BUTTON4) # Delete item
+		self.HookCommand(self.OnButtonMove, win32ui.IDC_BUTTON1) # Move up
+		self.HookCommand(self.OnButtonMove, win32ui.IDC_BUTTON2) # Move down
+
+		# Setup the columns in the list control
+		lc = self.GetDlgItem(win32ui.IDC_LIST1)
+		rect = lc.GetWindowRect()
+		cx = rect[2] - rect[0]
+		colSize = cx/2 - win32api.GetSystemMetrics(win32con.SM_CXBORDER) - 1
+
+		item = commctrl.LVCFMT_LEFT, colSize, "Menu Text"
+		lc.InsertColumn(0, item)
+
+		item = commctrl.LVCFMT_LEFT, colSize, "Python Command"
+		lc.InsertColumn(1, item)
+
+		# Insert the existing tools menu
+		itemNo = 0
+		for desc, cmd in LoadToolMenuItems():
+			lc.InsertItem(itemNo, desc)
+			lc.SetItemText(itemNo, 1, cmd)
+			itemNo = itemNo + 1
+
+		self.listControl = lc
+		return dialog.PropertyPage.OnInitDialog(self)
+
+	def OnOK(self):
+		# Write the menu back to the registry.
+		items = []
+		itemLook = 0
+		while 1:
+			try:
+				items.append( ( self.listControl.GetItemText(itemLook, 0), self.listControl.GetItemText(itemLook, 1) ) )
+			except win32ui.error:
+				# no more items!
+				break
+			itemLook = itemLook + 1
+		WriteToolMenuItems( items )
+		return self._obj_.OnOK()
+
+	def OnCommandEditControls(self, id, cmd):
+#		print "OnEditControls", id, cmd
+		if cmd==win32con.EN_CHANGE and not self.bImChangingEditControls:
+			itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED)
+			newText = self.editMenuCommand.GetWindowText()
+			self.listControl.SetItemText(itemNo, 1, newText)
+
+		return 0
+		
+	def OnNotifyListControlEndLabelEdit(self, id, cmd):
+		newText = self.listControl.GetEditControl().GetWindowText()
+		itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED)
+		self.listControl.SetItemText(itemNo, 0, newText)
+
+	def OnNotifyListControl(self, id, cmd):
+#		print id, cmd
+		try:
+			itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED)
+		except win32ui.error: # No selection!
+			return
+
+		self.bImChangingEditControls = 1
+		try:
+			item = self.listControl.GetItem(itemNo, 1)
+			self.editMenuCommand.SetWindowText(item[4])
+		finally:
+			self.bImChangingEditControls = 0
+		
+		return 0 # we have handled this!
+
+	def OnButtonNew(self, id, cmd):
+		if cmd==win32con.BN_CLICKED:
+			newIndex = self.listControl.GetItemCount()
+			self.listControl.InsertItem(newIndex, "Click to edit the text")
+			self.listControl.EnsureVisible(newIndex, 0)
+
+	def OnButtonMove(self, id, cmd):
+		if cmd==win32con.BN_CLICKED:
+			try:
+				itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED)
+			except win32ui.error:
+				return
+			menu = self.listControl.GetItemText(itemNo, 0)
+			cmd = self.listControl.GetItemText(itemNo, 1)
+			if id == win32ui.IDC_BUTTON1:
+				# Move up
+				if itemNo > 0:
+					self.listControl.DeleteItem(itemNo)
+					# reinsert it.
+					self.listControl.InsertItem(itemNo-1, menu)
+					self.listControl.SetItemText(itemNo-1, 1, cmd)
+			else:
+				# Move down.
+				if itemNo < self.listControl.GetItemCount()-1:
+					self.listControl.DeleteItem(itemNo)
+					# reinsert it.
+					self.listControl.InsertItem(itemNo+1, menu)
+					self.listControl.SetItemText(itemNo+1, 1, cmd)
+
+	def OnButtonDelete(self, id, cmd):
+		if cmd==win32con.BN_CLICKED:
+			try:
+				itemNo = self.listControl.GetNextItem(-1, commctrl.LVNI_SELECTED)
+			except win32ui.error: # No selection!
+				return
+			self.listControl.DeleteItem(itemNo)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/window.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/window.py
new file mode 100644
index 0000000..c90b091
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/window.py
@@ -0,0 +1,13 @@
+# Framework Window classes.
+
+# Most Pythonwin windows should use these classes rather than
+# the raw MFC ones if they want Pythonwin specific functionality.
+import pywin.mfc.window
+import win32con
+
+class MDIChildWnd(pywin.mfc.window.MDIChildWnd):
+	def AutoRestore(self):
+		"If the window is minimised or maximised, restore it."
+		p = self.GetWindowPlacement()
+		if p[1]==win32con.SW_MINIMIZE or p[1]==win32con.SW_SHOWMINIMIZED:
+			self.SetWindowPlacement(p[0], win32con.SW_RESTORE, p[2], p[3], p[4])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/winout.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/winout.py
new file mode 100644
index 0000000..7501ff4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/framework/winout.py
@@ -0,0 +1,521 @@
+# winout.py
+#
+# generic "output window"
+#
+# This Window will detect itself closing, and recreate next time output is
+# written to it.
+
+# This has the option of writing output at idle time (by hooking the
+# idle message, and queueing output) or writing as each
+# write is executed.
+# Updating the window directly gives a jerky appearance as many writes
+# take place between commands, and the windows scrolls, and updates etc
+# Updating at idle-time may defer all output of a long process, giving the 
+# appearence nothing is happening.
+# There is a compromise "line" mode, which will output whenever
+# a complete line is available.
+
+# behaviour depends on self.writeQueueing
+
+# This module is thread safe - output can originate from any thread.  If any thread 
+# other than the main thread attempts to print, it is always queued until next idle time
+
+import sys, string, re
+from pywin.mfc import docview
+from pywin.framework import app, window
+from pywintypes import UnicodeType
+import win32ui, win32api, win32con
+import Queue
+
+debug = lambda msg: None
+
+##debug=win32ui.OutputDebugString
+##import win32trace;win32trace.InitWrite() # for debugging - delete me!
+##debug = win32trace.write
+
+class flags:
+	# queueing of output.
+	WQ_NONE = 0
+	WQ_LINE = 1
+	WQ_IDLE = 2
+
+#WindowOutputDocumentParent=docview.RichEditDoc
+#WindowOutputDocumentParent=docview.Document
+import pywin.scintilla.document
+from pywin.scintilla import scintillacon
+from pywin import is_platform_unicode, default_platform_encoding, default_scintilla_encoding
+
+WindowOutputDocumentParent=pywin.scintilla.document.CScintillaDocument
+class WindowOutputDocument(WindowOutputDocumentParent):
+	def SaveModified(self):
+		return 1	# say it is OK to destroy my document
+
+class WindowOutputFrame(window.MDIChildWnd):
+	def __init__(self, wnd = None):
+		window.MDIChildWnd.__init__(self, wnd)
+		self.HookMessage(self.OnSizeMove, win32con.WM_SIZE)
+		self.HookMessage(self.OnSizeMove, win32con.WM_MOVE)
+
+	def LoadFrame( self, idResource, style, wndParent, context ):
+		self.template = context.template
+		return self._obj_.LoadFrame(idResource, style, wndParent, context)
+		
+	def PreCreateWindow(self, cc):
+		cc = self._obj_.PreCreateWindow(cc)
+		if self.template.defSize and self.template.defSize[0] != self.template.defSize[1]:
+			rect = app.RectToCreateStructRect(self.template.defSize)
+			cc = cc[0], cc[1], cc[2], cc[3], rect, cc[5], cc[6], cc[7], cc[8]
+		return cc
+	def OnSizeMove(self, msg):
+		# so recreate maintains position.
+		# Need to map coordinates from the
+		# frame windows first child.
+		mdiClient = self.GetParent()
+		self.template.defSize = mdiClient.ScreenToClient(self.GetWindowRect())
+	def OnDestroy(self, message):
+		self.template.OnFrameDestroy(self)
+		return 1
+
+class WindowOutputViewImpl:
+	def __init__(self):
+		self.patErrorMessage=re.compile('\W*File "(.*)", line ([0-9]+)')
+		self.template = self.GetDocument().GetDocTemplate()
+
+	def HookHandlers(self):
+		# Hook for the right-click menu.
+		self.HookMessage(self.OnRClick,win32con.WM_RBUTTONDOWN)
+
+	def OnDestroy(self, msg):
+		self.template.OnViewDestroy(self)
+
+	def OnInitialUpdate(self):
+		self.RestoreKillBuffer()
+		self.SetSel(-2)	# end of buffer
+
+	def GetRightMenuItems(self):
+		ret = []
+		flags=win32con.MF_STRING|win32con.MF_ENABLED
+		ret.append((flags, win32ui.ID_EDIT_COPY, '&Copy'))
+		ret.append((flags, win32ui.ID_EDIT_SELECT_ALL, '&Select all'))
+		return ret
+
+	#
+	# Windows command handlers, virtuals, etc.
+	#
+	def OnRClick(self,params):
+		paramsList = self.GetRightMenuItems()
+		menu = win32ui.CreatePopupMenu()
+		for appendParams in paramsList:
+			if type(appendParams)!=type(()):
+				appendParams = (appendParams,)
+			apply(menu.AppendMenu, appendParams)
+		menu.TrackPopupMenu(params[5]) # track at mouse position.
+		return 0
+
+	# as this is often used as an output window, exeptions will often
+	# be printed.  Therefore, we support this functionality at this level.
+	# Returns TRUE if the current line is an error message line, and will
+	# jump to it.  FALSE if no error (and no action taken)
+	def HandleSpecialLine(self):
+		import scriptutils
+		line = self.GetLine()
+		if line[:11]=="com_error: ":
+			# An OLE Exception - pull apart the exception
+			# and try and locate a help file.
+			try:
+				import win32api, win32con
+				det = eval(string.strip(line[string.find(line,":")+1:]))
+				win32ui.SetStatusText("Opening help file on OLE error...");
+				import help
+				help.OpenHelpFile(det[2][3],win32con.HELP_CONTEXT, det[2][4])
+				return 1
+			except win32api.error, details:
+				try:
+					msg = details[2]
+				except:
+					msg = str(details)
+				win32ui.SetStatusText("The help file could not be opened - %s" % msg)
+				return 1
+			except:
+				win32ui.SetStatusText("Line is a COM error, but no WinHelp details can be parsed");
+		# Look for a Python traceback.
+		matchResult = self.patErrorMessage.match(line)
+		if matchResult is None:
+			# No match - try the previous line
+			lineNo = self.LineFromChar()
+			if lineNo > 0:
+				line = self.GetLine(lineNo-1)
+				matchResult = self.patErrorMessage.match(line)
+		if matchResult is not None:
+			# we have an error line.
+			fileName = matchResult.group(1)
+			if fileName[0]=="<":
+				win32ui.SetStatusText("Can not load this file")
+				return 1	# still was an error message.
+			else:
+				lineNoString = matchResult.group(2)
+				# Attempt to locate the file (in case it is a relative spec)
+				fileNameSpec = fileName
+				fileName = scriptutils.LocatePythonFile(fileName)
+				if fileName is None:
+					# Dont force update, so it replaces the idle prompt.
+					win32ui.SetStatusText("Cant locate the file '%s'" % (fileNameSpec), 0)
+					return 1
+
+				win32ui.SetStatusText("Jumping to line "+lineNoString+" of file "+fileName,1)
+				if not scriptutils.JumpToDocument(fileName, string.atoi(lineNoString)):
+					win32ui.SetStatusText("Could not open %s" % fileName)
+					return 1	# still was an error message.
+				return 1
+		return 0	# not an error line
+	def write(self, msg):
+		return self.template.write(msg)
+	def writelines(self, lines):
+		for line in lines:
+			self.write(line)
+	def flush(self):
+		self.template.flush()
+
+class WindowOutputViewRTF(docview.RichEditView, WindowOutputViewImpl):
+	def __init__(self, doc):
+		docview.RichEditView.__init__(self, doc)
+		WindowOutputViewImpl.__init__(self)
+
+	def OnInitialUpdate(self):
+		WindowOutputViewImpl.OnInitialUpdate(self)
+		return docview.RichEditView.OnInitialUpdate(self)
+
+	def OnDestroy(self, msg):
+		WindowOutputViewImpl.OnDestroy(self, msg)
+		docview.RichEditView.OnDestroy(self, msg)
+
+	def HookHandlers(self):
+		WindowOutputViewImpl.HookHandlers(self)
+		# Hook for finding and locating error messages
+		self.HookMessage(self.OnLDoubleClick,win32con.WM_LBUTTONDBLCLK)
+#		docview.RichEditView.HookHandlers(self)
+
+	def OnLDoubleClick(self,params):
+		if self.HandleSpecialLine():
+			return 0	# dont pass on
+		return 1	# pass it on by default.
+		
+	def RestoreKillBuffer(self):
+		if len(self.template.killBuffer):
+			self.StreamIn(win32con.SF_RTF, self._StreamRTFIn)
+			self.template.killBuffer = []
+
+	def SaveKillBuffer(self):
+		self.StreamOut(win32con.SF_RTFNOOBJS, self._StreamRTFOut)
+
+	def _StreamRTFOut(self, data):
+		self.template.killBuffer.append(data)
+		return 1 # keep em coming!
+
+	def _StreamRTFIn(self, bytes):
+		try:
+			item = self.template.killBuffer[0]
+			self.template.killBuffer.remove(item)
+			if bytes < len(item):
+				print "Warning - output buffer not big enough!"
+			return item
+		except IndexError:
+			return None
+	def dowrite(self, str):
+		self.SetSel(-2)
+		self.ReplaceSel(str)
+
+import pywin.scintilla.view
+class WindowOutputViewScintilla(pywin.scintilla.view.CScintillaView, WindowOutputViewImpl):
+	def __init__(self, doc):
+		pywin.scintilla.view.CScintillaView.__init__(self, doc)
+		WindowOutputViewImpl.__init__(self)
+
+	def OnInitialUpdate(self):
+		pywin.scintilla.view.CScintillaView.OnInitialUpdate(self)
+		self.SCISetMarginWidth(3)
+		WindowOutputViewImpl.OnInitialUpdate(self)
+
+	def OnDestroy(self, msg):
+		WindowOutputViewImpl.OnDestroy(self, msg)
+		pywin.scintilla.view.CScintillaView.OnDestroy(self, msg)
+
+	def HookHandlers(self):
+		WindowOutputViewImpl.HookHandlers(self)
+		pywin.scintilla.view.CScintillaView.HookHandlers(self)
+		self.GetParent().HookNotify(self.OnScintillaDoubleClick, scintillacon.SCN_DOUBLECLICK)
+##		self.HookMessage(self.OnLDoubleClick,win32con.WM_LBUTTONDBLCLK)
+
+	def OnScintillaDoubleClick(self, std, extra):
+		self.HandleSpecialLine()
+
+##	def OnLDoubleClick(self,params):
+##			return 0	# never dont pass on
+
+	def RestoreKillBuffer(self):
+		assert len(self.template.killBuffer) in [0,1], "Unexpected killbuffer contents"
+		if self.template.killBuffer:
+			self.SCIAddText(self.template.killBuffer[0])
+		self.template.killBuffer = []
+	def SaveKillBuffer(self):
+		self.template.killBuffer = [self.GetTextRange(0,-1)]
+	def dowrite(self, str):
+		end = self.GetTextLength()
+		atEnd = end==self.GetSel()[0]
+		self.SCIInsertText(str, end)
+		if atEnd:
+			self.SetSel(self.GetTextLength())
+
+	def SetWordWrap(self, bWrapOn = 1):
+		if bWrapOn:
+			wrap_mode = scintillacon.SC_WRAP_WORD
+		else:
+			wrap_mode = scintillacon.SC_WRAP_NONE
+		self.SCISetWrapMode(wrap_mode)
+
+	def _MakeColorizer(self):
+		return None # No colorizer for me!
+
+WindowOutputView = WindowOutputViewScintilla
+# The WindowOutput class is actually an MFC template.  This is a conventient way of
+# making sure that my state can exist beyond the life of the windows themselves.
+# This is primarily to support the functionality of a WindowOutput window automatically
+# being recreated if necessary when written to.
+class WindowOutput(docview.DocTemplate):
+	""" Looks like a general Output Window - text can be written by the 'write' method.
+		Will auto-create itself on first write, and also on next write after being closed """
+	softspace=1
+	def __init__(self, title=None, defSize=None, queueing = flags.WQ_LINE, \
+	             bAutoRestore = 1, style=None,
+	             makeDoc = None, makeFrame = None, makeView = None):
+		""" init the output window - 
+		Params
+		title=None -- What is the title of the window
+		defSize=None -- What is the default size for the window - if this
+		                is a string, the size will be loaded from the ini file.
+		queueing = flags.WQ_LINE -- When should output be written
+		bAutoRestore=1 -- Should a minimized window be restored.
+		style -- Style for Window, or None for default.
+		makeDoc, makeFrame, makeView -- Classes for frame, view and window respectively.
+		"""
+		if makeDoc is None: makeDoc = WindowOutputDocument
+		if makeFrame is None: makeFrame = WindowOutputFrame
+		if makeView is None: makeView = WindowOutputViewScintilla
+		docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, \
+		    makeDoc, makeFrame, makeView)
+		self.SetDocStrings("\nOutput\n\n\n\n\n\n")
+		win32ui.GetApp().AddDocTemplate(self)
+		self.writeQueueing = queueing
+		self.errorCantRecreate = 0
+		self.killBuffer=[]
+		self.style = style
+		self.bAutoRestore = bAutoRestore
+		self.title = title
+		self.bCreating = 0
+		self.interruptCount = 0
+		if type(defSize)==type(''):	# is a string - maintain size pos from ini file.
+			self.iniSizeSection = defSize
+			self.defSize = app.LoadWindowSize(defSize)
+			self.loadedSize = self.defSize
+		else:
+			self.iniSizeSection = None
+			self.defSize=defSize
+		self.currentView = None
+		self.outputQueue = Queue.Queue(-1)
+		self.mainThreadId = win32api.GetCurrentThreadId()
+		self.idleHandlerSet = 0
+		self.SetIdleHandler()
+
+	def __del__(self):
+		self.Close()
+
+	def Create(self, title=None, style = None):
+		self.bCreating = 1
+		if title: self.title = title
+		if style: self.style = style
+		doc=self.OpenDocumentFile()
+		if doc is None: return
+		self.currentView = doc.GetFirstView()
+		self.bCreating = 0
+		if self.title: doc.SetTitle(self.title)
+
+	def Close(self):
+		self.RemoveIdleHandler()
+		try:
+			parent = self.currentView.GetParent()
+		except (AttributeError, win32ui.error): # Already closed
+			return
+		parent.DestroyWindow()
+
+	def SetTitle(self, title):
+		self.title = title
+		if self.currentView: self.currentView.GetDocument().SetTitle(self.title)
+
+	def OnViewDestroy(self, view):
+		self.currentView.SaveKillBuffer()
+		self.currentView = None
+
+	def OnFrameDestroy(self, frame):
+		if self.iniSizeSection:
+			# use GetWindowPlacement(), as it works even when min'd or max'd
+			newSize = frame.GetWindowPlacement()[4]
+			if self.loadedSize!=newSize:
+				app.SaveWindowSize(self.iniSizeSection, newSize)
+
+	def SetIdleHandler(self):
+		if not self.idleHandlerSet:
+			debug("Idle handler set\n")
+			win32ui.GetApp().AddIdleHandler(self.QueueIdleHandler)
+			self.idleHandlerSet = 1
+
+	def RemoveIdleHandler(self):
+		if self.idleHandlerSet:
+			debug("Idle handler reset\n")
+			if (win32ui.GetApp().DeleteIdleHandler(self.QueueIdleHandler)==0):
+				debug('Error deleting idle handler\n')
+			self.idleHandlerSet = 0
+
+	def RecreateWindow(self):
+		if self.errorCantRecreate:
+			debug("Error = not trying again")
+			return 0
+		try:
+			# This will fail if app shutting down
+			win32ui.GetMainFrame().GetSafeHwnd()
+			self.Create()
+			return 1
+		except (win32ui.error, AttributeError):
+			self.errorCantRecreate = 1
+			debug("Winout can not recreate the Window!\n")
+			return 0
+
+	# this handles the idle message, and does the printing.
+	def QueueIdleHandler(self,handler,count):
+		try:
+			bEmpty = self.QueueFlush(20)
+			# If the queue is empty, then we are back to idle and restart interrupt logic.
+			if bEmpty: self.interruptCount = 0
+		except KeyboardInterrupt:
+			# First interrupt since idle we just pass on.
+			# later ones we dump the queue and give up.
+			self.interruptCount = self.interruptCount + 1
+			if self.interruptCount > 1:
+				# Drop the queue quickly as the user is already annoyed :-)
+				self.outputQueue = Queue.Queue(-1)
+				print "Interrupted."
+				bEmpty = 1
+			else:
+				raise # re-raise the error so the users exception filters up.
+		return not bEmpty # More to do if not empty.
+
+	# Returns true if the Window needs to be recreated.
+	def NeedRecreateWindow(self):
+		try:
+			if self.currentView is not None and self.currentView.IsWindow():
+				return 0
+		except (win32ui.error, AttributeError): # Attribute error if the win32ui object has died.
+			pass
+		return 1
+
+	# Returns true if the Window is OK (either cos it was, or because it was recreated
+	def CheckRecreateWindow(self):
+		if self.bCreating: return 1
+		if not self.NeedRecreateWindow():
+			return 1
+		if self.bAutoRestore:
+			if self.RecreateWindow():
+				return 1
+		return 0
+
+	def QueueFlush(self, max = sys.maxint):
+		# Returns true if the queue is empty after the flush
+#		debug("Queueflush - %d, %d\n" % (max, self.outputQueue.qsize()))
+		if self.bCreating: return 1
+		items = []
+		rc = 0
+		while max > 0:
+			try:
+				item = self.outputQueue.get_nowait()
+				if is_platform_unicode:
+					# Note is_platform_unicode is never true any more!
+					if not isinstance(item, UnicodeType):
+						item = unicode(item, default_platform_encoding)
+					item = item.encode(default_scintilla_encoding) # What scintilla uses.
+				else:
+					# try and display using mbcs encoding
+					if isinstance(item, UnicodeType):
+						item = item.encode("mbcs")
+				items.append(item)
+			except Queue.Empty:
+				rc = 1
+				break
+			max = max - 1
+		if len(items) != 0:
+			if not self.CheckRecreateWindow():
+				debug(":Recreate failed!\n")
+				return 1 # In trouble - so say we have nothing to do.
+			win32ui.PumpWaitingMessages() # Pump paint messages
+			self.currentView.dowrite(string.join(items,''))
+		return rc
+
+	def HandleOutput(self,message):
+#		debug("QueueOutput on thread %d, flags %d with '%s'...\n" % (win32api.GetCurrentThreadId(), self.writeQueueing, message ))
+		self.outputQueue.put(message)
+		if win32api.GetCurrentThreadId() != self.mainThreadId:
+			pass
+#			debug("not my thread - ignoring queue options!\n")
+		elif self.writeQueueing==flags.WQ_LINE:
+			pos = string.rfind(message, '\n')
+			if pos>=0:
+#				debug("Line queueing - forcing flush\n")
+				self.QueueFlush()
+				return
+		elif self.writeQueueing==flags.WQ_NONE:
+#			debug("WQ_NONE - flushing!\n")
+			self.QueueFlush()
+			return
+		# Let our idle handler get it - wake it up
+		try:
+			win32ui.GetMainFrame().PostMessage(win32con.WM_USER) # Kick main thread off.
+		except win32ui.error:
+			# This can happen as the app is shutting down, so we send it to the C++ debugger
+			win32api.OutputDebugString(message)
+
+	# delegate certain fns to my view.
+	def writelines(self, lines):
+		for line in lines:
+			self.write(line)
+
+	def write(self,message):
+		self.HandleOutput(message)
+			
+	def flush(self):
+		self.QueueFlush()
+
+	def HandleSpecialLine(self):
+		self.currentView.HandleSpecialLine()
+
+def RTFWindowOutput(*args, **kw):
+	kw['makeView'] = WindowOutputViewRTF
+	return apply( WindowOutput, args, kw )
+
+
+def thread_test(o):
+	for i in range(5):
+		o.write("Hi from thread %d\n" % (win32api.GetCurrentThreadId()))
+		win32api.Sleep(100)
+		
+def test():
+	w = WindowOutput(queueing=flags.WQ_IDLE)
+	w.write("First bit of text\n")
+	import thread
+	for i in range(5):
+		w.write("Hello from the main thread\n")
+		thread.start_new(thread_test, (w,))
+	for i in range(2):
+		w.write("Hello from the main thread\n")
+		win32api.Sleep(50)
+	return w
+
+if __name__=='__main__':
+	test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/AutoExpand.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/AutoExpand.py
new file mode 100644
index 0000000..0d57be4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/AutoExpand.py
@@ -0,0 +1,92 @@
+import string
+import re
+
+###$ event <<expand-word>>
+###$ win <Alt-slash>
+###$ unix <Alt-slash>
+
+class AutoExpand:
+
+    keydefs = {
+        '<<expand-word>>': ['<Alt-slash>'],
+    }
+
+    unix_keydefs = {
+        '<<expand-word>>': ['<Meta-slash>'],
+    }
+
+    menudefs = [
+        ('edit', [
+            ('E_xpand word', '<<expand-word>>'),
+         ]),
+    ]
+
+    wordchars = string.letters + string.digits + "_"
+
+    def __init__(self, editwin):
+        self.text = editwin.text
+        self.text.wordlist = None # XXX what is this?
+        self.state = None
+
+    def expand_word_event(self, event):
+        curinsert = self.text.index("insert")
+        curline = self.text.get("insert linestart", "insert lineend")
+        if not self.state:
+            words = self.getwords()
+            index = 0
+        else:
+            words, index, insert, line = self.state
+            if insert != curinsert or line != curline:
+                words = self.getwords()
+                index = 0
+        if not words:
+            self.text.bell()
+            return "break"
+        word = self.getprevword()
+        self.text.delete("insert - %d chars" % len(word), "insert")
+        newword = words[index]
+        index = (index + 1) % len(words)
+        if index == 0:
+            self.text.bell()            # Warn we cycled around
+        self.text.insert("insert", newword)
+        curinsert = self.text.index("insert")
+        curline = self.text.get("insert linestart", "insert lineend")
+        self.state = words, index, curinsert, curline
+        return "break"
+
+    def getwords(self):
+        word = self.getprevword()
+        if not word:
+            return []
+        before = self.text.get("1.0", "insert wordstart")
+        wbefore = re.findall(r"\b" + word + r"\w+\b", before)
+        del before
+        after = self.text.get("insert wordend", "end")
+        wafter = re.findall(r"\b" + word + r"\w+\b", after)
+        del after
+        if not wbefore and not wafter:
+            return []
+        words = []
+        dict = {}
+        # search backwards through words before
+        wbefore.reverse()
+        for w in wbefore:
+            if dict.get(w):
+                continue
+            words.append(w)
+            dict[w] = w
+        # search onwards through words after
+        for w in wafter:
+            if dict.get(w):
+                continue
+            words.append(w)
+            dict[w] = w
+        words.append(word)
+        return words
+
+    def getprevword(self):
+        line = self.text.get("insert linestart", "insert")
+        i = len(line)
+        while i > 0 and line[i-1] in self.wordchars:
+            i = i-1
+        return line[i:]
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/AutoIndent.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/AutoIndent.py
new file mode 100644
index 0000000..d994e9e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/AutoIndent.py
@@ -0,0 +1,553 @@
+import string
+#from Tkinter import TclError
+#import tkMessageBox
+#import tkSimpleDialog
+
+###$ event <<newline-and-indent>>
+###$ win <Key-Return>
+###$ win <KP_Enter>
+###$ unix <Key-Return>
+###$ unix <KP_Enter>
+
+###$ event <<indent-region>>
+###$ win <Control-bracketright>
+###$ unix <Alt-bracketright>
+###$ unix <Control-bracketright>
+
+###$ event <<dedent-region>>
+###$ win <Control-bracketleft>
+###$ unix <Alt-bracketleft>
+###$ unix <Control-bracketleft>
+
+###$ event <<comment-region>>
+###$ win <Alt-Key-3>
+###$ unix <Alt-Key-3>
+
+###$ event <<uncomment-region>>
+###$ win <Alt-Key-4>
+###$ unix <Alt-Key-4>
+
+###$ event <<tabify-region>>
+###$ win <Alt-Key-5>
+###$ unix <Alt-Key-5>
+
+###$ event <<untabify-region>>
+###$ win <Alt-Key-6>
+###$ unix <Alt-Key-6>
+
+import PyParse
+
+class AutoIndent:
+
+    menudefs = [
+        ('edit', [
+            None,
+            ('_Indent region', '<<indent-region>>'),
+            ('_Dedent region', '<<dedent-region>>'),
+            ('Comment _out region', '<<comment-region>>'),
+            ('U_ncomment region', '<<uncomment-region>>'),
+            ('Tabify region', '<<tabify-region>>'),
+            ('Untabify region', '<<untabify-region>>'),
+            ('Toggle tabs', '<<toggle-tabs>>'),
+            ('New indent width', '<<change-indentwidth>>'),
+        ]),
+    ]
+
+    keydefs = {
+        '<<smart-backspace>>': ['<Key-BackSpace>'],
+        '<<newline-and-indent>>': ['<Key-Return>', '<KP_Enter>'],
+        '<<smart-indent>>': ['<Key-Tab>']
+    }
+
+    windows_keydefs = {
+        '<<indent-region>>': ['<Control-bracketright>'],
+        '<<dedent-region>>': ['<Control-bracketleft>'],
+        '<<comment-region>>': ['<Alt-Key-3>'],
+        '<<uncomment-region>>': ['<Alt-Key-4>'],
+        '<<tabify-region>>': ['<Alt-Key-5>'],
+        '<<untabify-region>>': ['<Alt-Key-6>'],
+        '<<toggle-tabs>>': ['<Alt-Key-t>'],
+        '<<change-indentwidth>>': ['<Alt-Key-u>'],
+    }
+
+    unix_keydefs = {
+        '<<indent-region>>': ['<Alt-bracketright>',
+                              '<Meta-bracketright>',
+                              '<Control-bracketright>'],
+        '<<dedent-region>>': ['<Alt-bracketleft>',
+                              '<Meta-bracketleft>',
+                              '<Control-bracketleft>'],
+        '<<comment-region>>': ['<Alt-Key-3>', '<Meta-Key-3>'],
+        '<<uncomment-region>>': ['<Alt-Key-4>', '<Meta-Key-4>'],
+        '<<tabify-region>>': ['<Alt-Key-5>', '<Meta-Key-5>'],
+        '<<untabify-region>>': ['<Alt-Key-6>', '<Meta-Key-6>'],
+        '<<toggle-tabs>>': ['<Alt-Key-t>'],
+        '<<change-indentwidth>>': ['<Alt-Key-u>'],
+    }
+
+    # usetabs true  -> literal tab characters are used by indent and
+    #                  dedent cmds, possibly mixed with spaces if
+    #                  indentwidth is not a multiple of tabwidth
+    #         false -> tab characters are converted to spaces by indent
+    #                  and dedent cmds, and ditto TAB keystrokes
+    # indentwidth is the number of characters per logical indent level.
+    # tabwidth is the display width of a literal tab character.
+    # CAUTION:  telling Tk to use anything other than its default
+    # tab setting causes it to use an entirely different tabbing algorithm,
+    # treating tab stops as fixed distances from the left margin.
+    # Nobody expects this, so for now tabwidth should never be changed.
+    usetabs = 1
+    indentwidth = 4
+    tabwidth = 8    # for IDLE use, must remain 8 until Tk is fixed
+
+    # If context_use_ps1 is true, parsing searches back for a ps1 line;
+    # else searches for a popular (if, def, ...) Python stmt.
+    context_use_ps1 = 0
+
+    # When searching backwards for a reliable place to begin parsing,
+    # first start num_context_lines[0] lines back, then
+    # num_context_lines[1] lines back if that didn't work, and so on.
+    # The last value should be huge (larger than the # of lines in a
+    # conceivable file).
+    # Making the initial values larger slows things down more often.
+    num_context_lines = 50, 500, 5000000
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+        self.text = editwin.text
+
+    def config(self, **options):
+        for key, value in options.items():
+            if key == 'usetabs':
+                self.usetabs = value
+            elif key == 'indentwidth':
+                self.indentwidth = value
+            elif key == 'tabwidth':
+                self.tabwidth = value
+            elif key == 'context_use_ps1':
+                self.context_use_ps1 = value
+            else:
+                raise KeyError, "bad option name: %s" % `key`
+
+    # If ispythonsource and guess are true, guess a good value for
+    # indentwidth based on file content (if possible), and if
+    # indentwidth != tabwidth set usetabs false.
+    # In any case, adjust the Text widget's view of what a tab
+    # character means.
+
+    def set_indentation_params(self, ispythonsource, guess=1):
+        if guess and ispythonsource:
+            i = self.guess_indent()
+            if 2 <= i <= 8:
+                self.indentwidth = i
+            if self.indentwidth != self.tabwidth:
+                self.usetabs = 0
+
+        self.editwin.set_tabwidth(self.tabwidth)
+
+    def smart_backspace_event(self, event):
+        text = self.text
+        first, last = self.editwin.get_selection_indices()
+        if first and last:
+            text.delete(first, last)
+            text.mark_set("insert", first)
+            return "break"
+        # Delete whitespace left, until hitting a real char or closest
+        # preceding virtual tab stop.
+        chars = text.get("insert linestart", "insert")
+        if chars == '':
+            if text.compare("insert", ">", "1.0"):
+                # easy: delete preceding newline
+                text.delete("insert-1c")
+            else:
+                text.bell()     # at start of buffer
+            return "break"
+        if  chars[-1] not in " \t":
+            # easy: delete preceding real char
+            text.delete("insert-1c")
+            return "break"
+        # Ick.  It may require *inserting* spaces if we back up over a
+        # tab character!  This is written to be clear, not fast.
+        expand, tabwidth = string.expandtabs, self.tabwidth
+        have = len(expand(chars, tabwidth))
+        assert have > 0
+        want = int((have - 1) / self.indentwidth) * self.indentwidth
+        ncharsdeleted = 0
+        while 1:
+            chars = chars[:-1]
+            ncharsdeleted = ncharsdeleted + 1
+            have = len(expand(chars, tabwidth))
+            if have <= want or chars[-1] not in " \t":
+                break
+        text.undo_block_start()
+        text.delete("insert-%dc" % ncharsdeleted, "insert")
+        if have < want:
+            text.insert("insert", ' ' * (want - have))
+        text.undo_block_stop()
+        return "break"
+
+    def smart_indent_event(self, event):
+        # if intraline selection:
+        #     delete it
+        # elif multiline selection:
+        #     do indent-region & return
+        # indent one level
+        text = self.text
+        first, last = self.editwin.get_selection_indices()
+        text.undo_block_start()
+        try:
+            if first and last:
+                if index2line(first) != index2line(last):
+                    return self.indent_region_event(event)
+                text.delete(first, last)
+                text.mark_set("insert", first)
+            prefix = text.get("insert linestart", "insert")
+            raw, effective = classifyws(prefix, self.tabwidth)
+            if raw == len(prefix):
+                # only whitespace to the left
+                self.reindent_to(effective + self.indentwidth)
+            else:
+                if self.usetabs:
+                    pad = '\t'
+                else:
+                    effective = len(string.expandtabs(prefix,
+                                                      self.tabwidth))
+                    n = self.indentwidth
+                    pad = ' ' * (n - effective % n)
+                text.insert("insert", pad)
+            text.see("insert")
+            return "break"
+        finally:
+            text.undo_block_stop()
+
+    def newline_and_indent_event(self, event):
+        text = self.text
+        first, last = self.editwin.get_selection_indices()
+        text.undo_block_start()
+        try:
+            if first and last:
+                text.delete(first, last)
+                text.mark_set("insert", first)
+            line = text.get("insert linestart", "insert")
+            i, n = 0, len(line)
+            while i < n and line[i] in " \t":
+                i = i+1
+            if i == n:
+                # the cursor is in or at leading indentation; just inject
+                # an empty line at the start
+                text.insert("insert linestart", '\n')
+                return "break"
+            indent = line[:i]
+            # strip whitespace before insert point
+            i = 0
+            while line and line[-1] in " \t":
+                line = line[:-1]
+                i = i+1
+            if i:
+                text.delete("insert - %d chars" % i, "insert")
+            # strip whitespace after insert point
+            while text.get("insert") in " \t":
+                text.delete("insert")
+            # start new line
+            text.insert("insert", '\n')
+
+            # adjust indentation for continuations and block
+            # open/close first need to find the last stmt
+            lno = index2line(text.index('insert'))
+            y = PyParse.Parser(self.indentwidth, self.tabwidth)
+            for context in self.num_context_lines:
+                startat = max(lno - context, 1)
+                startatindex = `startat` + ".0"
+                rawtext = text.get(startatindex, "insert")
+                y.set_str(rawtext)
+                bod = y.find_good_parse_start(
+                          self.context_use_ps1,
+                          self._build_char_in_string_func(startatindex))
+                if bod is not None or startat == 1:
+                    break
+            y.set_lo(bod or 0)
+            c = y.get_continuation_type()
+            if c != PyParse.C_NONE:
+                # The current stmt hasn't ended yet.
+                if c == PyParse.C_STRING:
+                    # inside a string; just mimic the current indent
+                    text.insert("insert", indent)
+                elif c == PyParse.C_BRACKET:
+                    # line up with the first (if any) element of the
+                    # last open bracket structure; else indent one
+                    # level beyond the indent of the line with the
+                    # last open bracket
+                    self.reindent_to(y.compute_bracket_indent())
+                elif c == PyParse.C_BACKSLASH:
+                    # if more than one line in this stmt already, just
+                    # mimic the current indent; else if initial line
+                    # has a start on an assignment stmt, indent to
+                    # beyond leftmost =; else to beyond first chunk of
+                    # non-whitespace on initial line
+                    if y.get_num_lines_in_stmt() > 1:
+                        text.insert("insert", indent)
+                    else:
+                        self.reindent_to(y.compute_backslash_indent())
+                else:
+                    assert 0, "bogus continuation type " + `c`
+                return "break"
+
+            # This line starts a brand new stmt; indent relative to
+            # indentation of initial line of closest preceding
+            # interesting stmt.
+            indent = y.get_base_indent_string()
+            text.insert("insert", indent)
+            if y.is_block_opener():
+                self.smart_indent_event(event)
+            elif indent and y.is_block_closer():
+                self.smart_backspace_event(event)
+            return "break"
+        finally:
+            text.see("insert")
+            text.undo_block_stop()
+
+    auto_indent = newline_and_indent_event
+
+    # Our editwin provides a is_char_in_string function that works
+    # with a Tk text index, but PyParse only knows about offsets into
+    # a string. This builds a function for PyParse that accepts an
+    # offset.
+
+    def _build_char_in_string_func(self, startindex):
+        def inner(offset, _startindex=startindex,
+                  _icis=self.editwin.is_char_in_string):
+            return _icis(_startindex + "+%dc" % offset)
+        return inner
+
+    def indent_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        for pos in range(len(lines)):
+            line = lines[pos]
+            if line:
+                raw, effective = classifyws(line, self.tabwidth)
+                effective = effective + self.indentwidth
+                lines[pos] = self._make_blanks(effective) + line[raw:]
+        self.set_region(head, tail, chars, lines)
+        return "break"
+
+    def dedent_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        for pos in range(len(lines)):
+            line = lines[pos]
+            if line:
+                raw, effective = classifyws(line, self.tabwidth)
+                effective = max(effective - self.indentwidth, 0)
+                lines[pos] = self._make_blanks(effective) + line[raw:]
+        self.set_region(head, tail, chars, lines)
+        return "break"
+
+    def comment_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        for pos in range(len(lines) - 1):
+            line = lines[pos]
+            lines[pos] = '##' + line
+        self.set_region(head, tail, chars, lines)
+
+    def uncomment_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        for pos in range(len(lines)):
+            line = lines[pos]
+            if not line:
+                continue
+            if line[:2] == '##':
+                line = line[2:]
+            elif line[:1] == '#':
+                line = line[1:]
+            lines[pos] = line
+        self.set_region(head, tail, chars, lines)
+
+    def tabify_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        tabwidth = self._asktabwidth()
+        for pos in range(len(lines)):
+            line = lines[pos]
+            if line:
+                raw, effective = classifyws(line, tabwidth)
+                ntabs, nspaces = divmod(effective, tabwidth)
+                lines[pos] = '\t' * ntabs + ' ' * nspaces + line[raw:]
+        self.set_region(head, tail, chars, lines)
+
+    def untabify_region_event(self, event):
+        head, tail, chars, lines = self.get_region()
+        tabwidth = self._asktabwidth()
+        for pos in range(len(lines)):
+            lines[pos] = string.expandtabs(lines[pos], tabwidth)
+        self.set_region(head, tail, chars, lines)
+
+    def toggle_tabs_event(self, event):
+        if self.editwin.askyesno(
+              "Toggle tabs",
+              "Turn tabs " + ("on", "off")[self.usetabs] + "?",
+              parent=self.text):
+            self.usetabs = not self.usetabs
+        return "break"
+
+    # XXX this isn't bound to anything -- see class tabwidth comments
+    def change_tabwidth_event(self, event):
+        new = self._asktabwidth()
+        if new != self.tabwidth:
+            self.tabwidth = new
+            self.set_indentation_params(0, guess=0)
+        return "break"
+
+    def change_indentwidth_event(self, event):
+        new = self.editwin.askinteger(
+                  "Indent width",
+                  "New indent width (1-16)",
+                  parent=self.text,
+                  initialvalue=self.indentwidth,
+                  minvalue=1,
+                  maxvalue=16)
+        if new and new != self.indentwidth:
+            self.indentwidth = new
+        return "break"
+
+    def get_region(self):
+        text = self.text
+        first, last = self.editwin.get_selection_indices()
+        if first and last:
+            head = text.index(first + " linestart")
+            tail = text.index(last + "-1c lineend +1c")
+        else:
+            head = text.index("insert linestart")
+            tail = text.index("insert lineend +1c")
+        chars = text.get(head, tail)
+        lines = string.split(chars, "\n")
+        return head, tail, chars, lines
+
+    def set_region(self, head, tail, chars, lines):
+        text = self.text
+        newchars = string.join(lines, "\n")
+        if newchars == chars:
+            text.bell()
+            return
+        text.tag_remove("sel", "1.0", "end")
+        text.mark_set("insert", head)
+        text.undo_block_start()
+        text.delete(head, tail)
+        text.insert(head, newchars)
+        text.undo_block_stop()
+        text.tag_add("sel", head, "insert")
+
+    # Make string that displays as n leading blanks.
+
+    def _make_blanks(self, n):
+        if self.usetabs:
+            ntabs, nspaces = divmod(n, self.tabwidth)
+            return '\t' * ntabs + ' ' * nspaces
+        else:
+            return ' ' * n
+
+    # Delete from beginning of line to insert point, then reinsert
+    # column logical (meaning use tabs if appropriate) spaces.
+
+    def reindent_to(self, column):
+        text = self.text
+        text.undo_block_start()
+        if text.compare("insert linestart", "!=", "insert"):
+            text.delete("insert linestart", "insert")
+        if column:
+            text.insert("insert", self._make_blanks(column))
+        text.undo_block_stop()
+
+    def _asktabwidth(self):
+        return self.editwin.askinteger(
+            "Tab width",
+            "Spaces per tab?",
+            parent=self.text,
+            initialvalue=self.tabwidth,
+            minvalue=1,
+            maxvalue=16) or self.tabwidth
+
+    # Guess indentwidth from text content.
+    # Return guessed indentwidth.  This should not be believed unless
+    # it's in a reasonable range (e.g., it will be 0 if no indented
+    # blocks are found).
+
+    def guess_indent(self):
+        opener, indented = IndentSearcher(self.text, self.tabwidth).run()
+        if opener and indented:
+            raw, indentsmall = classifyws(opener, self.tabwidth)
+            raw, indentlarge = classifyws(indented, self.tabwidth)
+        else:
+            indentsmall = indentlarge = 0
+        return indentlarge - indentsmall
+
+# "line.col" -> line, as an int
+def index2line(index):
+    return int(float(index))
+
+# Look at the leading whitespace in s.
+# Return pair (# of leading ws characters,
+#              effective # of leading blanks after expanding
+#              tabs to width tabwidth)
+
+def classifyws(s, tabwidth):
+    raw = effective = 0
+    for ch in s:
+        if ch == ' ':
+            raw = raw + 1
+            effective = effective + 1
+        elif ch == '\t':
+            raw = raw + 1
+            effective = (effective / tabwidth + 1) * tabwidth
+        else:
+            break
+    return raw, effective
+
+import tokenize
+_tokenize = tokenize
+del tokenize
+
+class IndentSearcher:
+
+    # .run() chews over the Text widget, looking for a block opener
+    # and the stmt following it.  Returns a pair,
+    #     (line containing block opener, line containing stmt)
+    # Either or both may be None.
+
+    def __init__(self, text, tabwidth):
+        self.text = text
+        self.tabwidth = tabwidth
+        self.i = self.finished = 0
+        self.blkopenline = self.indentedline = None
+
+    def readline(self):
+        if self.finished:
+            return ""
+        i = self.i = self.i + 1
+        mark = `i` + ".0"
+        if self.text.compare(mark, ">=", "end"):
+            return ""
+        return self.text.get(mark, mark + " lineend+1c")
+
+    def tokeneater(self, type, token, start, end, line,
+                   INDENT=_tokenize.INDENT,
+                   NAME=_tokenize.NAME,
+                   OPENERS=('class', 'def', 'for', 'if', 'try', 'while')):
+        if self.finished:
+            pass
+        elif type == NAME and token in OPENERS:
+            self.blkopenline = line
+        elif type == INDENT and self.blkopenline:
+            self.indentedline = line
+            self.finished = 1
+
+    def run(self):
+        save_tabsize = _tokenize.tabsize
+        _tokenize.tabsize = self.tabwidth
+        try:
+            try:
+                _tokenize.tokenize(self.readline, self.tokeneater)
+            except (_tokenize.TokenError, IndentationError):
+                # since we cut off the tokenizer early, we can trigger
+                # spurious errors
+                pass
+        finally:
+            _tokenize.tabsize = save_tabsize
+        return self.blkopenline, self.indentedline
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/CallTips.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/CallTips.py
new file mode 100644
index 0000000..04eccde
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/CallTips.py
@@ -0,0 +1,190 @@
+# CallTips.py - An IDLE extension that provides "Call Tips" - ie, a floating window that
+# displays parameter information as you open parens.
+
+import string
+import sys
+import types
+
+class CallTips:
+
+    menudefs = [
+    ]
+
+    keydefs = {
+        '<<paren-open>>': ['<Key-parenleft>'],
+        '<<paren-close>>': ['<Key-parenright>'],
+        '<<check-calltip-cancel>>': ['<KeyRelease>'],
+        '<<calltip-cancel>>': ['<ButtonPress>', '<Key-Escape>'],
+    }
+
+    windows_keydefs = {
+    }
+
+    unix_keydefs = {
+    }
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+        self.text = editwin.text
+        self.calltip = None
+        if hasattr(self.text, "make_calltip_window"):
+            self._make_calltip_window = self.text.make_calltip_window
+        else:
+            self._make_calltip_window = self._make_tk_calltip_window
+
+    def close(self):
+        self._make_calltip_window = None
+
+    # Makes a Tk based calltip window.  Used by IDLE, but not Pythonwin.
+    # See __init__ above for how this is used.
+    def _make_tk_calltip_window(self):
+        import CallTipWindow
+        return CallTipWindow.CallTip(self.text)
+
+    def _remove_calltip_window(self):
+        if self.calltip:
+            self.calltip.hidetip()
+            self.calltip = None
+        
+    def paren_open_event(self, event):
+        self._remove_calltip_window()
+        arg_text = get_arg_text(self.get_object_at_cursor())
+        if arg_text:
+            self.calltip_start = self.text.index("insert")
+            self.calltip = self._make_calltip_window()
+            self.calltip.showtip(arg_text)
+        return "" #so the event is handled normally.
+
+    def paren_close_event(self, event):
+        # Now just hides, but later we should check if other
+        # paren'd expressions remain open.
+        self._remove_calltip_window()
+        return "" #so the event is handled normally.
+
+    def check_calltip_cancel_event(self, event):
+        if self.calltip:
+            # If we have moved before the start of the calltip,
+            # or off the calltip line, then cancel the tip.
+            # (Later need to be smarter about multi-line, etc)
+            if self.text.compare("insert", "<=", self.calltip_start) or \
+               self.text.compare("insert", ">", self.calltip_start + " lineend"):
+                self._remove_calltip_window()
+        return "" #so the event is handled normally.
+
+    def calltip_cancel_event(self, event):
+        self._remove_calltip_window()
+        return "" #so the event is handled normally.
+
+    def get_object_at_cursor(self,
+                             wordchars="._" + string.uppercase + string.lowercase + string.digits):
+        # XXX - This needs to be moved to a better place
+        # so the "." attribute lookup code can also use it.
+        text = self.text
+        chars = text.get("insert linestart", "insert")
+        i = len(chars)
+        while i and chars[i-1] in wordchars:
+            i = i-1
+        word = chars[i:]
+        if word:
+            # How is this for a hack!
+            import sys, __main__
+            namespace = sys.modules.copy()
+            namespace.update(__main__.__dict__)
+            try:
+                    return eval(word, namespace)
+            except:
+                    pass
+        return None # Can't find an object.
+
+def _find_constructor(class_ob):
+    # Given a class object, return a function object used for the
+    # constructor (ie, __init__() ) or None if we can't find one.
+    try:
+        return class_ob.__init__.im_func
+    except AttributeError:
+        for base in class_ob.__bases__:
+            rc = _find_constructor(base)
+            if rc is not None: return rc
+    return None
+
+def get_arg_text(ob):
+    # Get a string describing the arguments for the given object.
+    argText = ""
+    if ob is not None:
+        argOffset = 0
+        if type(ob)==types.ClassType:
+            # Look for the highest __init__ in the class chain.
+            fob = _find_constructor(ob)
+            if fob is None:
+                fob = lambda: None
+            else:
+                argOffset = 1
+        elif type(ob)==types.MethodType:
+            # bit of a hack for methods - turn it into a function
+            # but we drop the "self" param.
+            fob = ob.im_func
+            argOffset = 1
+        else:
+            fob = ob
+        # Try and build one for Python defined functions
+        if type(fob) in [types.FunctionType, types.LambdaType]:
+            try:
+                realArgs = fob.func_code.co_varnames[argOffset:fob.func_code.co_argcount]
+                defaults = fob.func_defaults or []
+                defaults = list(map(lambda name: "=%s" % name, defaults))
+                defaults = [""] * (len(realArgs)-len(defaults)) + defaults
+                items = map(lambda arg, dflt: arg+dflt, realArgs, defaults)
+                if fob.func_code.co_flags & 0x4:
+                    items.append("...")
+                if fob.func_code.co_flags & 0x8:
+                    items.append("***")
+                argText = string.join(items , ", ")
+                argText = "(%s)" % argText
+            except:
+                pass
+        # See if we can use the docstring
+        if hasattr(ob, "__doc__") and ob.__doc__:
+            pos = string.find(ob.__doc__, "\n")
+            if pos<0 or pos>70: pos=70
+            if argText: argText = argText + "\n"
+            argText = argText + ob.__doc__[:pos]
+
+    return argText
+
+#################################################
+#
+# Test code
+#
+if __name__=='__main__':
+
+    def t1(): "()"
+    def t2(a, b=None): "(a, b=None)"
+    def t3(a, *args): "(a, ...)"
+    def t4(*args): "(...)"
+    def t5(a, *args): "(a, ...)"
+    def t6(a, b=None, *args, **kw): "(a, b=None, ..., ***)"
+
+    class TC:
+        "(a=None, ...)"
+        def __init__(self, a=None, *b): "(a=None, ...)"
+        def t1(self): "()"
+        def t2(self, a, b=None): "(a, b=None)"
+        def t3(self, a, *args): "(a, ...)"
+        def t4(self, *args): "(...)"
+        def t5(self, a, *args): "(a, ...)"
+        def t6(self, a, b=None, *args, **kw): "(a, b=None, ..., ***)"
+
+    def test( tests ):
+        failed=[]
+        for t in tests:
+            expected = t.__doc__ + "\n" + t.__doc__
+            if get_arg_text(t) != expected:
+                failed.append(t)
+                print "%s - expected %s, but got %s" % (t, `expected`, `get_arg_text(t)`)
+        print "%d of %d tests failed" % (len(failed), len(tests))
+
+    tc = TC()
+    tests = t1, t2, t3, t4, t5, t6, \
+            TC, tc.t1, tc.t2, tc.t3, tc.t4, tc.t5, tc.t6
+
+    test(tests)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/FormatParagraph.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/FormatParagraph.py
new file mode 100644
index 0000000..76c52f55
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/FormatParagraph.py
@@ -0,0 +1,155 @@
+# Extension to format a paragraph
+
+# Does basic, standard text formatting, and also understands Python
+# comment blocks.  Thus, for editing Python source code, this
+# extension is really only suitable for reformatting these comment
+# blocks or triple-quoted strings.
+
+# Known problems with comment reformatting:
+# * If there is a selection marked, and the first line of the
+#   selection is not complete, the block will probably not be detected
+#   as comments, and will have the normal "text formatting" rules
+#   applied.
+# * If a comment block has leading whitespace that mixes tabs and
+#   spaces, they will not be considered part of the same block.
+# * Fancy comments, like this bulleted list, arent handled :-)
+
+import string
+import re
+
+class FormatParagraph:
+
+    menudefs = [
+        ('edit', [
+            ('Format Paragraph', '<<format-paragraph>>'),
+         ])
+    ]
+
+    keydefs = {
+        '<<format-paragraph>>': ['<Alt-q>'],
+    }
+    
+    unix_keydefs = {
+        '<<format-paragraph>>': ['<Meta-q>'],
+    } 
+
+    def __init__(self, editwin):
+        self.editwin = editwin
+
+    def close(self):
+        self.editwin = None
+
+    def format_paragraph_event(self, event):
+        text = self.editwin.text
+        first, last = self.editwin.get_selection_indices()
+        if first and last:
+            data = text.get(first, last)
+            comment_header = ''
+        else:
+            first, last, comment_header, data = \
+                    find_paragraph(text, text.index("insert"))
+        if comment_header:
+            # Reformat the comment lines - convert to text sans header.
+            lines = string.split(data, "\n")
+            lines = map(lambda st, l=len(comment_header): st[l:], lines)
+            data = string.join(lines, "\n")
+            # Reformat to 70 chars or a 20 char width, whichever is greater.
+            format_width = max(70-len(comment_header), 20)
+            newdata = reformat_paragraph(data, format_width)
+            # re-split and re-insert the comment header.
+            newdata = string.split(newdata, "\n")
+            # If the block ends in a \n, we dont want the comment
+            # prefix inserted after it. (Im not sure it makes sense to
+            # reformat a comment block that isnt made of complete
+            # lines, but whatever!)  Can't think of a clean soltution,
+            # so we hack away
+            block_suffix = ""
+            if not newdata[-1]:
+                block_suffix = "\n"
+                newdata = newdata[:-1]
+            builder = lambda item, prefix=comment_header: prefix+item
+            newdata = string.join(map(builder, newdata), '\n') + block_suffix
+        else:
+            # Just a normal text format
+            newdata = reformat_paragraph(data)
+        text.tag_remove("sel", "1.0", "end")
+        if newdata != data:
+            text.mark_set("insert", first)
+            text.undo_block_start()
+            text.delete(first, last)
+            text.insert(first, newdata)
+            text.undo_block_stop()
+        else:
+            text.mark_set("insert", last)
+        text.see("insert")
+
+def find_paragraph(text, mark):
+    lineno, col = map(int, string.split(mark, "."))
+    line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    while text.compare("%d.0" % lineno, "<", "end") and is_all_white(line):
+        lineno = lineno + 1
+        line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    first_lineno = lineno
+    comment_header = get_comment_header(line)
+    comment_header_len = len(comment_header)
+    while get_comment_header(line)==comment_header and \
+              not is_all_white(line[comment_header_len:]):
+        lineno = lineno + 1
+        line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    last = "%d.0" % lineno
+    # Search back to beginning of paragraph
+    lineno = first_lineno - 1
+    line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    while lineno > 0 and \
+              get_comment_header(line)==comment_header and \
+              not is_all_white(line[comment_header_len:]):
+        lineno = lineno - 1
+        line = text.get("%d.0" % lineno, "%d.0 lineend" % lineno)
+    first = "%d.0" % (lineno+1)
+    return first, last, comment_header, text.get(first, last)
+
+def reformat_paragraph(data, limit=70):
+    lines = string.split(data, "\n")
+    i = 0
+    n = len(lines)
+    while i < n and is_all_white(lines[i]):
+        i = i+1
+    if i >= n:
+        return data
+    indent1 = get_indent(lines[i])
+    if i+1 < n and not is_all_white(lines[i+1]):
+        indent2 = get_indent(lines[i+1])
+    else:
+        indent2 = indent1
+    new = lines[:i]
+    partial = indent1
+    while i < n and not is_all_white(lines[i]):
+        # XXX Should take double space after period (etc.) into account
+        words = re.split("(\s+)", lines[i])
+        for j in range(0, len(words), 2):
+            word = words[j]
+            if not word:
+                continue # Can happen when line ends in whitespace
+            if len(string.expandtabs(partial + word)) > limit and \
+               partial != indent1:
+                new.append(string.rstrip(partial))
+                partial = indent2
+            partial = partial + word + " "
+            if j+1 < len(words) and words[j+1] != " ":
+                partial = partial + " "
+        i = i+1
+    new.append(string.rstrip(partial))
+    # XXX Should reformat remaining paragraphs as well
+    new.extend(lines[i:])
+    return string.join(new, "\n")
+
+def is_all_white(line):
+    return re.match(r"^\s*$", line) is not None
+
+def get_indent(line):
+    return re.match(r"^(\s*)", line).group()
+
+def get_comment_header(line):
+    m = re.match(r"^(\s*#*)", line)
+    if m is None: return ""
+    return m.group(1)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/IdleHistory.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/IdleHistory.py
new file mode 100644
index 0000000..aa41b73
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/IdleHistory.py
@@ -0,0 +1,89 @@
+import string
+
+class History:
+
+    def __init__(self, text, output_sep = "\n"):
+        self.text = text
+        self.history = []
+        self.history_prefix = None
+        self.history_pointer = None
+        self.output_sep = output_sep
+        text.bind("<<history-previous>>", self.history_prev)
+        text.bind("<<history-next>>", self.history_next)
+
+    def history_next(self, event):
+        self.history_do(0)
+        return "break"
+
+    def history_prev(self, event):
+        self.history_do(1)
+        return "break"
+
+    def _get_source(self, start, end):
+        # Get source code from start index to end index.  Lines in the
+        # text control may be separated by sys.ps2 .
+        lines = string.split(self.text.get(start, end), self.output_sep)
+        return string.join(lines, "\n")
+
+    def _put_source(self, where, source):
+        output = string.join(string.split(source, "\n"), self.output_sep)
+        self.text.insert(where, output)
+
+    def history_do(self, reverse):
+        nhist = len(self.history)
+        pointer = self.history_pointer
+        prefix = self.history_prefix
+        if pointer is not None and prefix is not None:
+            if self.text.compare("insert", "!=", "end-1c") or \
+               self._get_source("iomark", "end-1c") != self.history[pointer]:
+                pointer = prefix = None
+        if pointer is None or prefix is None:
+            prefix = self._get_source("iomark", "end-1c")
+            if reverse:
+                pointer = nhist
+            else:
+                pointer = -1
+        nprefix = len(prefix)
+        while 1:
+            if reverse:
+                pointer = pointer - 1
+            else:
+                pointer = pointer + 1
+            if pointer < 0 or pointer >= nhist:
+                self.text.bell()
+                if self._get_source("iomark", "end-1c") != prefix:
+                    self.text.delete("iomark", "end-1c")
+                    self._put_source("iomark", prefix)
+                pointer = prefix = None
+                break
+            item = self.history[pointer]
+            if item[:nprefix] == prefix and len(item) > nprefix:
+                self.text.delete("iomark", "end-1c")
+                self._put_source("iomark", item)
+                break
+        self.text.mark_set("insert", "end-1c")
+        self.text.see("insert")
+        self.text.tag_remove("sel", "1.0", "end")
+        self.history_pointer = pointer
+        self.history_prefix = prefix
+
+    def history_store(self, source):
+        source = string.strip(source)
+        if len(source) > 2:
+            # avoid duplicates
+            try:
+                self.history.remove(source)
+            except ValueError:
+                pass
+            self.history.append(source)
+        self.history_pointer = None
+        self.history_prefix = None
+
+    def recall(self, s):
+        s = string.strip(s)
+        self.text.tag_remove("sel", "1.0", "end")
+        self.text.delete("iomark", "end-1c")
+        self.text.mark_set("insert", "end-1c")
+        self.text.insert("insert", s)
+        self.text.see("insert")
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/PyParse.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/PyParse.py
new file mode 100644
index 0000000..89878e5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/PyParse.py
@@ -0,0 +1,588 @@
+import string
+import re
+import sys
+
+try:
+    from types import UnicodeType
+except ImportError:
+    from pywintypes import UnicodeType
+
+# Reason last stmt is continued (or C_NONE if it's not).
+C_NONE, C_BACKSLASH, C_STRING, C_BRACKET = range(4)
+
+if 0:   # for throwaway debugging output
+    def dump(*stuff):
+        sys.__stdout__.write(string.join(map(str, stuff), " ") + "\n")
+
+# Find what looks like the start of a popular stmt.
+
+_synchre = re.compile(r"""
+    ^
+    [ \t]*
+    (?: if
+    |   for
+    |   while
+    |   else
+    |   def
+    |   return
+    |   assert
+    |   break
+    |   class
+    |   continue
+    |   elif
+    |   try
+    |   except
+    |   raise
+    |   import
+    )
+    \b
+""", re.VERBOSE | re.MULTILINE).search
+
+# Match blank line or non-indenting comment line.
+
+_junkre = re.compile(r"""
+    [ \t]*
+    (?: \# \S .* )?
+    \n
+""", re.VERBOSE).match
+
+# Match any flavor of string; the terminating quote is optional
+# so that we're robust in the face of incomplete program text.
+
+_match_stringre = re.compile(r"""
+    \""" [^"\\]* (?:
+                     (?: \\. | "(?!"") )
+                     [^"\\]*
+                 )*
+    (?: \""" )?
+
+|   " [^"\\\n]* (?: \\. [^"\\\n]* )* "?
+
+|   ''' [^'\\]* (?:
+                   (?: \\. | '(?!'') )
+                   [^'\\]*
+                )*
+    (?: ''' )?
+
+|   ' [^'\\\n]* (?: \\. [^'\\\n]* )* '?
+""", re.VERBOSE | re.DOTALL).match
+
+# Match a line that starts with something interesting;
+# used to find the first item of a bracket structure.
+
+_itemre = re.compile(r"""
+    [ \t]*
+    [^\s#\\]    # if we match, m.end()-1 is the interesting char
+""", re.VERBOSE).match
+
+# Match start of stmts that should be followed by a dedent.
+
+_closere = re.compile(r"""
+    \s*
+    (?: return
+    |   break
+    |   continue
+    |   raise
+    |   pass
+    )
+    \b
+""", re.VERBOSE).match
+
+# Chew up non-special chars as quickly as possible.  If match is
+# successful, m.end() less 1 is the index of the last boring char
+# matched.  If match is unsuccessful, the string starts with an
+# interesting char.
+
+_chew_ordinaryre = re.compile(r"""
+    [^[\](){}#'"\\]+
+""", re.VERBOSE).match
+
+# Build translation table to map uninteresting chars to "x", open
+# brackets to "(", and close brackets to ")".
+
+_tran = ['x'] * 256
+for ch in "({[":
+    _tran[ord(ch)] = '('
+for ch in ")}]":
+    _tran[ord(ch)] = ')'
+for ch in "\"'\\\n#":
+    _tran[ord(ch)] = ch
+_tran = string.join(_tran, '')
+del ch
+
+class Parser:
+
+    def __init__(self, indentwidth, tabwidth):
+        self.indentwidth = indentwidth
+        self.tabwidth = tabwidth
+
+    def set_str(self, str):
+        assert len(str) == 0 or str[-1] == '\n', "Oops - have str %r" % (str,)
+        if type(str) == UnicodeType:
+            # The parse functions have no idea what to do with Unicode, so
+            # replace all Unicode characters with "x".  This is "safe"
+            # so long as the only characters germane to parsing the structure
+            # of Python are 7-bit ASCII.  It's *necessary* because Unicode
+            # strings don't have a .translate() method that supports
+            # deletechars.
+            uniphooey = str
+            str = []
+            push = str.append
+            for raw in map(ord, uniphooey):
+                push(raw < 127 and chr(raw) or "x")
+            str = "".join(str)
+        self.str = str
+        self.study_level = 0
+
+    # Return index of a good place to begin parsing, as close to the
+    # end of the string as possible.  This will be the start of some
+    # popular stmt like "if" or "def".  Return None if none found:
+    # the caller should pass more prior context then, if possible, or
+    # if not (the entire program text up until the point of interest
+    # has already been tried) pass 0 to set_lo.
+    #
+    # This will be reliable iff given a reliable is_char_in_string
+    # function, meaning that when it says "no", it's absolutely
+    # guaranteed that the char is not in a string.
+    #
+    # Ack, hack: in the shell window this kills us, because there's
+    # no way to tell the differences between output, >>> etc and
+    # user input.  Indeed, IDLE's first output line makes the rest
+    # look like it's in an unclosed paren!:
+    # Python 1.5.2 (#0, Apr 13 1999, ...
+
+    def find_good_parse_start(self, use_ps1, is_char_in_string=None,
+                              _rfind=string.rfind,
+                              _synchre=_synchre):
+        str, pos = self.str, None
+        if use_ps1:
+            # shell window
+            ps1 = '\n' + sys.ps1
+            i = _rfind(str, ps1)
+            if i >= 0:
+                pos = i + len(ps1)
+                # make it look like there's a newline instead
+                # of ps1 at the start -- hacking here once avoids
+                # repeated hackery later
+                self.str = str[:pos-1] + '\n' + str[pos:]
+            return pos
+
+        # File window -- real work.
+        if not is_char_in_string:
+            # no clue -- make the caller pass everything
+            return None
+
+        # Peek back from the end for a good place to start,
+        # but don't try too often; pos will be left None, or
+        # bumped to a legitimate synch point.
+        limit = len(str)
+        for tries in range(5):
+            i = _rfind(str, ":\n", 0, limit)
+            if i < 0:
+                break
+            i = _rfind(str, '\n', 0, i) + 1  # start of colon line
+            m = _synchre(str, i, limit)
+            if m and not is_char_in_string(m.start()):
+                pos = m.start()
+                break
+            limit = i
+        if pos is None:
+            # Nothing looks like a block-opener, or stuff does
+            # but is_char_in_string keeps returning true; most likely
+            # we're in or near a giant string, the colorizer hasn't
+            # caught up enough to be helpful, or there simply *aren't*
+            # any interesting stmts.  In any of these cases we're
+            # going to have to parse the whole thing to be sure, so
+            # give it one last try from the start, but stop wasting
+            # time here regardless of the outcome.
+            m = _synchre(str)
+            if m and not is_char_in_string(m.start()):
+                pos = m.start()
+            return pos
+
+        # Peeking back worked; look forward until _synchre no longer
+        # matches.
+        i = pos + 1
+        while 1:
+            m = _synchre(str, i)
+            if m:
+                s, i = m.span()
+                if not is_char_in_string(s):
+                    pos = s
+            else:
+                break
+        return pos
+
+    # Throw away the start of the string.  Intended to be called with
+    # find_good_parse_start's result.
+
+    def set_lo(self, lo):
+        assert lo == 0 or self.str[lo-1] == '\n'
+        if lo > 0:
+            self.str = self.str[lo:]
+
+    # As quickly as humanly possible <wink>, find the line numbers (0-
+    # based) of the non-continuation lines.
+    # Creates self.{goodlines, continuation}.
+
+    def _study1(self, _replace=string.replace, _find=string.find):
+        if self.study_level >= 1:
+            return
+        self.study_level = 1
+
+        # Map all uninteresting characters to "x", all open brackets
+        # to "(", all close brackets to ")", then collapse runs of
+        # uninteresting characters.  This can cut the number of chars
+        # by a factor of 10-40, and so greatly speed the following loop.
+        str = self.str
+        str = string.translate(str, _tran)
+        str = _replace(str, 'xxxxxxxx', 'x')
+        str = _replace(str, 'xxxx', 'x')
+        str = _replace(str, 'xx', 'x')
+        str = _replace(str, 'xx', 'x')
+        str = _replace(str, '\nx', '\n')
+        # note that replacing x\n with \n would be incorrect, because
+        # x may be preceded by a backslash
+
+        # March over the squashed version of the program, accumulating
+        # the line numbers of non-continued stmts, and determining
+        # whether & why the last stmt is a continuation.
+        continuation = C_NONE
+        level = lno = 0     # level is nesting level; lno is line number
+        self.goodlines = goodlines = [0]
+        push_good = goodlines.append
+        i, n = 0, len(str)
+        while i < n:
+            ch = str[i]
+            i = i+1
+
+            # cases are checked in decreasing order of frequency
+            if ch == 'x':
+                continue
+
+            if ch == '\n':
+                lno = lno + 1
+                if level == 0:
+                    push_good(lno)
+                    # else we're in an unclosed bracket structure
+                continue
+
+            if ch == '(':
+                level = level + 1
+                continue
+
+            if ch == ')':
+                if level:
+                    level = level - 1
+                    # else the program is invalid, but we can't complain
+                continue
+
+            if ch == '"' or ch == "'":
+                # consume the string
+                quote = ch
+                if str[i-1:i+2] == quote * 3:
+                    quote = quote * 3
+                w = len(quote) - 1
+                i = i+w
+                while i < n:
+                    ch = str[i]
+                    i = i+1
+
+                    if ch == 'x':
+                        continue
+
+                    if str[i-1:i+w] == quote:
+                        i = i+w
+                        break
+
+                    if ch == '\n':
+                        lno = lno + 1
+                        if w == 0:
+                            # unterminated single-quoted string
+                            if level == 0:
+                                push_good(lno)
+                            break
+                        continue
+
+                    if ch == '\\':
+                        assert i < n
+                        if str[i] == '\n':
+                            lno = lno + 1
+                        i = i+1
+                        continue
+
+                    # else comment char or paren inside string
+
+                else:
+                    # didn't break out of the loop, so we're still
+                    # inside a string
+                    continuation = C_STRING
+                continue    # with outer loop
+
+            if ch == '#':
+                # consume the comment
+                i = _find(str, '\n', i)
+                assert i >= 0
+                continue
+
+            assert ch == '\\'
+            assert i < n
+            if str[i] == '\n':
+                lno = lno + 1
+                if i+1 == n:
+                    continuation = C_BACKSLASH
+            i = i+1
+
+        # The last stmt may be continued for all 3 reasons.
+        # String continuation takes precedence over bracket
+        # continuation, which beats backslash continuation.
+        if continuation != C_STRING and level > 0:
+            continuation = C_BRACKET
+        self.continuation = continuation
+
+        # Push the final line number as a sentinel value, regardless of
+        # whether it's continued.
+        assert (continuation == C_NONE) == (goodlines[-1] == lno)
+        if goodlines[-1] != lno:
+            push_good(lno)
+
+    def get_continuation_type(self):
+        self._study1()
+        return self.continuation
+
+    # study1 was sufficient to determine the continuation status,
+    # but doing more requires looking at every character.  study2
+    # does this for the last interesting statement in the block.
+    # Creates:
+    #     self.stmt_start, stmt_end
+    #         slice indices of last interesting stmt
+    #     self.lastch
+    #         last non-whitespace character before optional trailing
+    #         comment
+    #     self.lastopenbracketpos
+    #         if continuation is C_BRACKET, index of last open bracket
+
+    def _study2(self, _rfind=string.rfind, _find=string.find,
+                      _ws=string.whitespace):
+        if self.study_level >= 2:
+            return
+        self._study1()
+        self.study_level = 2
+
+        # Set p and q to slice indices of last interesting stmt.
+        str, goodlines = self.str, self.goodlines
+        i = len(goodlines) - 1
+        p = len(str)    # index of newest line
+        while i:
+            assert p
+            # p is the index of the stmt at line number goodlines[i].
+            # Move p back to the stmt at line number goodlines[i-1].
+            q = p
+            for nothing in range(goodlines[i-1], goodlines[i]):
+                # tricky: sets p to 0 if no preceding newline
+                p = _rfind(str, '\n', 0, p-1) + 1
+            # The stmt str[p:q] isn't a continuation, but may be blank
+            # or a non-indenting comment line.
+            if  _junkre(str, p):
+                i = i-1
+            else:
+                break
+        if i == 0:
+            # nothing but junk!
+            assert p == 0
+            q = p
+        self.stmt_start, self.stmt_end = p, q
+
+        # Analyze this stmt, to find the last open bracket (if any)
+        # and last interesting character (if any).
+        lastch = ""
+        stack = []  # stack of open bracket indices
+        push_stack = stack.append
+        while p < q:
+            # suck up all except ()[]{}'"#\\
+            m = _chew_ordinaryre(str, p, q)
+            if m:
+                # we skipped at least one boring char
+                newp = m.end()
+                # back up over totally boring whitespace
+                i = newp - 1    # index of last boring char
+                while i >= p and str[i] in " \t\n":
+                    i = i-1
+                if i >= p:
+                    lastch = str[i]
+                p = newp
+                if p >= q:
+                    break
+
+            ch = str[p]
+
+            if ch in "([{":
+                push_stack(p)
+                lastch = ch
+                p = p+1
+                continue
+
+            if ch in ")]}":
+                if stack:
+                    del stack[-1]
+                lastch = ch
+                p = p+1
+                continue
+
+            if ch == '"' or ch == "'":
+                # consume string
+                # Note that study1 did this with a Python loop, but
+                # we use a regexp here; the reason is speed in both
+                # cases; the string may be huge, but study1 pre-squashed
+                # strings to a couple of characters per line.  study1
+                # also needed to keep track of newlines, and we don't
+                # have to.
+                lastch = ch
+                p = _match_stringre(str, p, q).end()
+                continue
+
+            if ch == '#':
+                # consume comment and trailing newline
+                p = _find(str, '\n', p, q) + 1
+                assert p > 0
+                continue
+
+            assert ch == '\\'
+            p = p+1     # beyond backslash
+            assert p < q
+            if str[p] != '\n':
+                # the program is invalid, but can't complain
+                lastch = ch + str[p]
+            p = p+1     # beyond escaped char
+
+        # end while p < q:
+
+        self.lastch = lastch
+        if stack:
+            self.lastopenbracketpos = stack[-1]
+
+    # Assuming continuation is C_BRACKET, return the number
+    # of spaces the next line should be indented.
+
+    def compute_bracket_indent(self, _find=string.find):
+        self._study2()
+        assert self.continuation == C_BRACKET
+        j = self.lastopenbracketpos
+        str = self.str
+        n = len(str)
+        origi = i = string.rfind(str, '\n', 0, j) + 1
+        j = j+1     # one beyond open bracket
+        # find first list item; set i to start of its line
+        while j < n:
+            m = _itemre(str, j)
+            if m:
+                j = m.end() - 1     # index of first interesting char
+                extra = 0
+                break
+            else:
+                # this line is junk; advance to next line
+                i = j = _find(str, '\n', j) + 1
+        else:
+            # nothing interesting follows the bracket;
+            # reproduce the bracket line's indentation + a level
+            j = i = origi
+            while str[j] in " \t":
+                j = j+1
+            extra = self.indentwidth
+        return len(string.expandtabs(str[i:j],
+                                     self.tabwidth)) + extra
+
+    # Return number of physical lines in last stmt (whether or not
+    # it's an interesting stmt!  this is intended to be called when
+    # continuation is C_BACKSLASH).
+
+    def get_num_lines_in_stmt(self):
+        self._study1()
+        goodlines = self.goodlines
+        return goodlines[-1] - goodlines[-2]
+
+    # Assuming continuation is C_BACKSLASH, return the number of spaces
+    # the next line should be indented.  Also assuming the new line is
+    # the first one following the initial line of the stmt.
+
+    def compute_backslash_indent(self):
+        self._study2()
+        assert self.continuation == C_BACKSLASH
+        str = self.str
+        i = self.stmt_start
+        while str[i] in " \t":
+            i = i+1
+        startpos = i
+
+        # See whether the initial line starts an assignment stmt; i.e.,
+        # look for an = operator
+        endpos = string.find(str, '\n', startpos) + 1
+        found = level = 0
+        while i < endpos:
+            ch = str[i]
+            if ch in "([{":
+                level = level + 1
+                i = i+1
+            elif ch in ")]}":
+                if level:
+                    level = level - 1
+                i = i+1
+            elif ch == '"' or ch == "'":
+                i = _match_stringre(str, i, endpos).end()
+            elif ch == '#':
+                break
+            elif level == 0 and ch == '=' and \
+                   (i == 0 or str[i-1] not in "=<>!") and \
+                   str[i+1] != '=':
+                found = 1
+                break
+            else:
+                i = i+1
+
+        if found:
+            # found a legit =, but it may be the last interesting
+            # thing on the line
+            i = i+1     # move beyond the =
+            found = re.match(r"\s*\\", str[i:endpos]) is None
+
+        if not found:
+            # oh well ... settle for moving beyond the first chunk
+            # of non-whitespace chars
+            i = startpos
+            while str[i] not in " \t\n":
+                i = i+1
+
+        return len(string.expandtabs(str[self.stmt_start :
+                                         i],
+                                     self.tabwidth)) + 1
+
+    # Return the leading whitespace on the initial line of the last
+    # interesting stmt.
+
+    def get_base_indent_string(self):
+        self._study2()
+        i, n = self.stmt_start, self.stmt_end
+        j = i
+        str = self.str
+        while j < n and str[j] in " \t":
+            j = j + 1
+        return str[i:j]
+
+    # Did the last interesting stmt open a block?
+
+    def is_block_opener(self):
+        self._study2()
+        return self.lastch == ':'
+
+    # Did the last interesting stmt close a block?
+
+    def is_block_closer(self):
+        self._study2()
+        return _closere(self.str, self.stmt_start) is not None
+
+    # index of last open bracket ({[, or None if none
+    lastopenbracketpos = None
+
+    def get_last_open_bracket_pos(self):
+        self._study2()
+        return self.lastopenbracketpos
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/__init__.py
new file mode 100644
index 0000000..c9c70df4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/idle/__init__.py
@@ -0,0 +1 @@
+# This file denotes the directory as a Python package.
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/activex.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/activex.py
new file mode 100644
index 0000000..d8b5c4c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/activex.py
@@ -0,0 +1,69 @@
+"""Support for ActiveX control hosting in Pythonwin.
+"""
+import win32ui, win32uiole, window
+import new
+
+class Control(window.Wnd):
+	"""An ActiveX control base class.  A new class must be derived from both
+	this class and the Events class.  See the demos for more details.
+	"""
+	def __init__(self):
+		self.__dict__["_dispobj_"] = None
+		window.Wnd.__init__(self)
+
+	def _GetControlCLSID( self ):
+		return self.CLSID
+	def _GetDispatchClass(self):
+		return self.default_interface
+	def _GetEventMap(self):
+		return self.default_source._dispid_to_func_
+
+	def CreateControl(self, windowTitle, style, rect, parent, id, lic_string=None):
+		clsid = str(self._GetControlCLSID())
+		self.__dict__["_obj_"] = win32ui.CreateControl(clsid, windowTitle, style, rect, parent, id, None, False, lic_string)
+		klass = self._GetDispatchClass()
+		dispobj = klass(win32uiole.GetIDispatchForWindow(self._obj_))
+		self.HookOleEvents()
+		self.__dict__["_dispobj_"] = dispobj
+
+	def HookOleEvents(self):
+		dict = self._GetEventMap()
+		for dispid, methodName in dict.items():
+			if hasattr(self, methodName):
+				self._obj_.HookOleEvent( getattr(self, methodName), dispid )
+
+	def __getattr__(self, attr):
+		# Delegate attributes to the windows and the Dispatch object for this class
+		try:
+			return window.Wnd.__getattr__(self, attr)
+		except AttributeError:
+			pass
+		return getattr(self._dispobj_, attr)
+	def __setattr__(self, attr, value):
+		if hasattr(self.__dict__, attr):
+			self.__dict__[attr] = value
+			return
+		try:
+			if self._dispobj_:
+				self._dispobj_.__setattr__(attr, value)
+				return
+		except AttributeError:
+			pass
+		self.__dict__[attr] = value
+
+def MakeControlClass( controlClass, name = None ):
+	"""Given a CoClass in a generated .py file, this function will return a Class
+	object which can be used as an OCX control.
+
+	This function is used when you do not want to handle any events from the OCX
+	control.  If you need events, then you should derive a class from both the
+	activex.Control class and the CoClass
+	"""
+	if name is None:
+		name = controlClass.__name__
+	return new.classobj("OCX" + name, (Control, controlClass), {})
+
+def MakeControlInstance( controlClass, name = None ):
+	"""As for MakeControlClass(), but returns an instance of the class.
+	"""
+	return MakeControlClass(controlClass, name)()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/afxres.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/afxres.py
new file mode 100644
index 0000000..e75b3f25
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/afxres.py
@@ -0,0 +1,497 @@
+# Generated by h2py from stdin
+TCS_MULTILINE = 0x0200
+CBRS_ALIGN_LEFT = 0x1000
+CBRS_ALIGN_TOP = 0x2000
+CBRS_ALIGN_RIGHT = 0x4000
+CBRS_ALIGN_BOTTOM = 0x8000
+CBRS_ALIGN_ANY = 0xF000
+CBRS_BORDER_LEFT = 0x0100
+CBRS_BORDER_TOP = 0x0200
+CBRS_BORDER_RIGHT = 0x0400
+CBRS_BORDER_BOTTOM = 0x0800
+CBRS_BORDER_ANY = 0x0F00
+CBRS_TOOLTIPS = 0x0010
+CBRS_FLYBY = 0x0020
+CBRS_FLOAT_MULTI = 0x0040
+CBRS_BORDER_3D = 0x0080
+CBRS_HIDE_INPLACE = 0x0008
+CBRS_SIZE_DYNAMIC = 0x0004
+CBRS_SIZE_FIXED = 0x0002
+CBRS_FLOATING = 0x0001
+CBRS_GRIPPER = 0x00400000
+CBRS_ORIENT_HORZ = (CBRS_ALIGN_TOP|CBRS_ALIGN_BOTTOM)
+CBRS_ORIENT_VERT = (CBRS_ALIGN_LEFT|CBRS_ALIGN_RIGHT)
+CBRS_ORIENT_ANY = (CBRS_ORIENT_HORZ|CBRS_ORIENT_VERT)
+CBRS_ALL = 0xFFFF
+CBRS_NOALIGN = 0x00000000
+CBRS_LEFT = (CBRS_ALIGN_LEFT|CBRS_BORDER_RIGHT)
+CBRS_TOP = (CBRS_ALIGN_TOP|CBRS_BORDER_BOTTOM)
+CBRS_RIGHT = (CBRS_ALIGN_RIGHT|CBRS_BORDER_LEFT)
+CBRS_BOTTOM = (CBRS_ALIGN_BOTTOM|CBRS_BORDER_TOP)
+SBPS_NORMAL = 0x0000
+SBPS_NOBORDERS = 0x0100
+SBPS_POPOUT = 0x0200
+SBPS_OWNERDRAW = 0x1000
+SBPS_DISABLED = 0x04000000
+SBPS_STRETCH = 0x08000000
+ID_INDICATOR_EXT = 0xE700
+ID_INDICATOR_CAPS = 0xE701
+ID_INDICATOR_NUM = 0xE702
+ID_INDICATOR_SCRL = 0xE703
+ID_INDICATOR_OVR = 0xE704
+ID_INDICATOR_REC = 0xE705
+ID_INDICATOR_KANA = 0xE706
+ID_SEPARATOR = 0
+AFX_IDW_CONTROLBAR_FIRST = 0xE800
+AFX_IDW_CONTROLBAR_LAST = 0xE8FF
+AFX_IDW_TOOLBAR = 0xE800
+AFX_IDW_STATUS_BAR = 0xE801
+AFX_IDW_PREVIEW_BAR = 0xE802
+AFX_IDW_RESIZE_BAR = 0xE803
+AFX_IDW_DOCKBAR_TOP = 0xE81B
+AFX_IDW_DOCKBAR_LEFT = 0xE81C
+AFX_IDW_DOCKBAR_RIGHT = 0xE81D
+AFX_IDW_DOCKBAR_BOTTOM = 0xE81E
+AFX_IDW_DOCKBAR_FLOAT = 0xE81F
+def AFX_CONTROLBAR_MASK(nIDC): return (1L << (nIDC - AFX_IDW_CONTROLBAR_FIRST))
+
+AFX_IDW_PANE_FIRST = 0xE900
+AFX_IDW_PANE_LAST = 0xE9ff
+AFX_IDW_HSCROLL_FIRST = 0xEA00
+AFX_IDW_VSCROLL_FIRST = 0xEA10
+AFX_IDW_SIZE_BOX = 0xEA20
+AFX_IDW_PANE_SAVE = 0xEA21
+AFX_IDS_APP_TITLE = 0xE000
+AFX_IDS_IDLEMESSAGE = 0xE001
+AFX_IDS_HELPMODEMESSAGE = 0xE002
+AFX_IDS_APP_TITLE_EMBEDDING = 0xE003
+AFX_IDS_COMPANY_NAME = 0xE004
+AFX_IDS_OBJ_TITLE_INPLACE = 0xE005
+ID_FILE_NEW = 0xE100
+ID_FILE_OPEN = 0xE101
+ID_FILE_CLOSE = 0xE102
+ID_FILE_SAVE = 0xE103
+ID_FILE_SAVE_AS = 0xE104
+ID_FILE_PAGE_SETUP = 0xE105
+ID_FILE_PRINT_SETUP = 0xE106
+ID_FILE_PRINT = 0xE107
+ID_FILE_PRINT_DIRECT = 0xE108
+ID_FILE_PRINT_PREVIEW = 0xE109
+ID_FILE_UPDATE = 0xE10A
+ID_FILE_SAVE_COPY_AS = 0xE10B
+ID_FILE_SEND_MAIL = 0xE10C
+ID_FILE_MRU_FIRST = 0xE110
+ID_FILE_MRU_FILE1 = 0xE110
+ID_FILE_MRU_FILE2 = 0xE111
+ID_FILE_MRU_FILE3 = 0xE112
+ID_FILE_MRU_FILE4 = 0xE113
+ID_FILE_MRU_FILE5 = 0xE114
+ID_FILE_MRU_FILE6 = 0xE115
+ID_FILE_MRU_FILE7 = 0xE116
+ID_FILE_MRU_FILE8 = 0xE117
+ID_FILE_MRU_FILE9 = 0xE118
+ID_FILE_MRU_FILE10 = 0xE119
+ID_FILE_MRU_FILE11 = 0xE11A
+ID_FILE_MRU_FILE12 = 0xE11B
+ID_FILE_MRU_FILE13 = 0xE11C
+ID_FILE_MRU_FILE14 = 0xE11D
+ID_FILE_MRU_FILE15 = 0xE11E
+ID_FILE_MRU_FILE16 = 0xE11F
+ID_FILE_MRU_LAST = 0xE11F
+ID_EDIT_CLEAR = 0xE120
+ID_EDIT_CLEAR_ALL = 0xE121
+ID_EDIT_COPY = 0xE122
+ID_EDIT_CUT = 0xE123
+ID_EDIT_FIND = 0xE124
+ID_EDIT_PASTE = 0xE125
+ID_EDIT_PASTE_LINK = 0xE126
+ID_EDIT_PASTE_SPECIAL = 0xE127
+ID_EDIT_REPEAT = 0xE128
+ID_EDIT_REPLACE = 0xE129
+ID_EDIT_SELECT_ALL = 0xE12A
+ID_EDIT_UNDO = 0xE12B
+ID_EDIT_REDO = 0xE12C
+ID_WINDOW_NEW = 0xE130
+ID_WINDOW_ARRANGE = 0xE131
+ID_WINDOW_CASCADE = 0xE132
+ID_WINDOW_TILE_HORZ = 0xE133
+ID_WINDOW_TILE_VERT = 0xE134
+ID_WINDOW_SPLIT = 0xE135
+AFX_IDM_WINDOW_FIRST = 0xE130
+AFX_IDM_WINDOW_LAST = 0xE13F
+AFX_IDM_FIRST_MDICHILD = 0xFF00
+ID_APP_ABOUT = 0xE140
+ID_APP_EXIT = 0xE141
+ID_HELP_INDEX = 0xE142
+ID_HELP_FINDER = 0xE143
+ID_HELP_USING = 0xE144
+ID_CONTEXT_HELP = 0xE145
+ID_HELP = 0xE146
+ID_DEFAULT_HELP = 0xE147
+ID_NEXT_PANE = 0xE150
+ID_PREV_PANE = 0xE151
+ID_FORMAT_FONT = 0xE160
+ID_OLE_INSERT_NEW = 0xE200
+ID_OLE_EDIT_LINKS = 0xE201
+ID_OLE_EDIT_CONVERT = 0xE202
+ID_OLE_EDIT_CHANGE_ICON = 0xE203
+ID_OLE_EDIT_PROPERTIES = 0xE204
+ID_OLE_VERB_FIRST = 0xE210
+ID_OLE_VERB_LAST = 0xE21F
+AFX_ID_PREVIEW_CLOSE = 0xE300
+AFX_ID_PREVIEW_NUMPAGE = 0xE301
+AFX_ID_PREVIEW_NEXT = 0xE302
+AFX_ID_PREVIEW_PREV = 0xE303
+AFX_ID_PREVIEW_PRINT = 0xE304
+AFX_ID_PREVIEW_ZOOMIN = 0xE305
+AFX_ID_PREVIEW_ZOOMOUT = 0xE306
+ID_VIEW_TOOLBAR = 0xE800
+ID_VIEW_STATUS_BAR = 0xE801
+ID_RECORD_FIRST = 0xE900
+ID_RECORD_LAST = 0xE901
+ID_RECORD_NEXT = 0xE902
+ID_RECORD_PREV = 0xE903
+IDC_STATIC = (-1)
+AFX_IDS_SCFIRST = 0xEF00
+AFX_IDS_SCSIZE = 0xEF00
+AFX_IDS_SCMOVE = 0xEF01
+AFX_IDS_SCMINIMIZE = 0xEF02
+AFX_IDS_SCMAXIMIZE = 0xEF03
+AFX_IDS_SCNEXTWINDOW = 0xEF04
+AFX_IDS_SCPREVWINDOW = 0xEF05
+AFX_IDS_SCCLOSE = 0xEF06
+AFX_IDS_SCRESTORE = 0xEF12
+AFX_IDS_SCTASKLIST = 0xEF13
+AFX_IDS_MDICHILD = 0xEF1F
+AFX_IDS_DESKACCESSORY = 0xEFDA
+AFX_IDS_OPENFILE = 0xF000
+AFX_IDS_SAVEFILE = 0xF001
+AFX_IDS_ALLFILTER = 0xF002
+AFX_IDS_UNTITLED = 0xF003
+AFX_IDS_SAVEFILECOPY = 0xF004
+AFX_IDS_PREVIEW_CLOSE = 0xF005
+AFX_IDS_UNNAMED_FILE = 0xF006
+AFX_IDS_ABOUT = 0xF010
+AFX_IDS_HIDE = 0xF011
+AFX_IDP_NO_ERROR_AVAILABLE = 0xF020
+AFX_IDS_NOT_SUPPORTED_EXCEPTION = 0xF021
+AFX_IDS_RESOURCE_EXCEPTION = 0xF022
+AFX_IDS_MEMORY_EXCEPTION = 0xF023
+AFX_IDS_USER_EXCEPTION = 0xF024
+AFX_IDS_PRINTONPORT = 0xF040
+AFX_IDS_ONEPAGE = 0xF041
+AFX_IDS_TWOPAGE = 0xF042
+AFX_IDS_PRINTPAGENUM = 0xF043
+AFX_IDS_PREVIEWPAGEDESC = 0xF044
+AFX_IDS_PRINTDEFAULTEXT = 0xF045
+AFX_IDS_PRINTDEFAULT = 0xF046
+AFX_IDS_PRINTFILTER = 0xF047
+AFX_IDS_PRINTCAPTION = 0xF048
+AFX_IDS_PRINTTOFILE = 0xF049
+AFX_IDS_OBJECT_MENUITEM = 0xF080
+AFX_IDS_EDIT_VERB = 0xF081
+AFX_IDS_ACTIVATE_VERB = 0xF082
+AFX_IDS_CHANGE_LINK = 0xF083
+AFX_IDS_AUTO = 0xF084
+AFX_IDS_MANUAL = 0xF085
+AFX_IDS_FROZEN = 0xF086
+AFX_IDS_ALL_FILES = 0xF087
+AFX_IDS_SAVE_MENU = 0xF088
+AFX_IDS_UPDATE_MENU = 0xF089
+AFX_IDS_SAVE_AS_MENU = 0xF08A
+AFX_IDS_SAVE_COPY_AS_MENU = 0xF08B
+AFX_IDS_EXIT_MENU = 0xF08C
+AFX_IDS_UPDATING_ITEMS = 0xF08D
+AFX_IDS_METAFILE_FORMAT = 0xF08E
+AFX_IDS_DIB_FORMAT = 0xF08F
+AFX_IDS_BITMAP_FORMAT = 0xF090
+AFX_IDS_LINKSOURCE_FORMAT = 0xF091
+AFX_IDS_EMBED_FORMAT = 0xF092
+AFX_IDS_PASTELINKEDTYPE = 0xF094
+AFX_IDS_UNKNOWNTYPE = 0xF095
+AFX_IDS_RTF_FORMAT = 0xF096
+AFX_IDS_TEXT_FORMAT = 0xF097
+AFX_IDS_INVALID_CURRENCY = 0xF098
+AFX_IDS_INVALID_DATETIME = 0xF099
+AFX_IDS_INVALID_DATETIMESPAN = 0xF09A
+AFX_IDP_INVALID_FILENAME = 0xF100
+AFX_IDP_FAILED_TO_OPEN_DOC = 0xF101
+AFX_IDP_FAILED_TO_SAVE_DOC = 0xF102
+AFX_IDP_ASK_TO_SAVE = 0xF103
+AFX_IDP_FAILED_TO_CREATE_DOC = 0xF104
+AFX_IDP_FILE_TOO_LARGE = 0xF105
+AFX_IDP_FAILED_TO_START_PRINT = 0xF106
+AFX_IDP_FAILED_TO_LAUNCH_HELP = 0xF107
+AFX_IDP_INTERNAL_FAILURE = 0xF108
+AFX_IDP_COMMAND_FAILURE = 0xF109
+AFX_IDP_FAILED_MEMORY_ALLOC = 0xF10A
+AFX_IDP_PARSE_INT = 0xF110
+AFX_IDP_PARSE_REAL = 0xF111
+AFX_IDP_PARSE_INT_RANGE = 0xF112
+AFX_IDP_PARSE_REAL_RANGE = 0xF113
+AFX_IDP_PARSE_STRING_SIZE = 0xF114
+AFX_IDP_PARSE_RADIO_BUTTON = 0xF115
+AFX_IDP_PARSE_BYTE = 0xF116
+AFX_IDP_PARSE_UINT = 0xF117
+AFX_IDP_PARSE_DATETIME = 0xF118
+AFX_IDP_PARSE_CURRENCY = 0xF119
+AFX_IDP_FAILED_INVALID_FORMAT = 0xF120
+AFX_IDP_FAILED_INVALID_PATH = 0xF121
+AFX_IDP_FAILED_DISK_FULL = 0xF122
+AFX_IDP_FAILED_ACCESS_READ = 0xF123
+AFX_IDP_FAILED_ACCESS_WRITE = 0xF124
+AFX_IDP_FAILED_IO_ERROR_READ = 0xF125
+AFX_IDP_FAILED_IO_ERROR_WRITE = 0xF126
+AFX_IDP_STATIC_OBJECT = 0xF180
+AFX_IDP_FAILED_TO_CONNECT = 0xF181
+AFX_IDP_SERVER_BUSY = 0xF182
+AFX_IDP_BAD_VERB = 0xF183
+AFX_IDP_FAILED_TO_NOTIFY = 0xF185
+AFX_IDP_FAILED_TO_LAUNCH = 0xF186
+AFX_IDP_ASK_TO_UPDATE = 0xF187
+AFX_IDP_FAILED_TO_UPDATE = 0xF188
+AFX_IDP_FAILED_TO_REGISTER = 0xF189
+AFX_IDP_FAILED_TO_AUTO_REGISTER = 0xF18A
+AFX_IDP_FAILED_TO_CONVERT = 0xF18B
+AFX_IDP_GET_NOT_SUPPORTED = 0xF18C
+AFX_IDP_SET_NOT_SUPPORTED = 0xF18D
+AFX_IDP_ASK_TO_DISCARD = 0xF18E
+AFX_IDP_FAILED_TO_CREATE = 0xF18F
+AFX_IDP_FAILED_MAPI_LOAD = 0xF190
+AFX_IDP_INVALID_MAPI_DLL = 0xF191
+AFX_IDP_FAILED_MAPI_SEND = 0xF192
+AFX_IDP_FILE_NONE = 0xF1A0
+AFX_IDP_FILE_GENERIC = 0xF1A1
+AFX_IDP_FILE_NOT_FOUND = 0xF1A2
+AFX_IDP_FILE_BAD_PATH = 0xF1A3
+AFX_IDP_FILE_TOO_MANY_OPEN = 0xF1A4
+AFX_IDP_FILE_ACCESS_DENIED = 0xF1A5
+AFX_IDP_FILE_INVALID_FILE = 0xF1A6
+AFX_IDP_FILE_REMOVE_CURRENT = 0xF1A7
+AFX_IDP_FILE_DIR_FULL = 0xF1A8
+AFX_IDP_FILE_BAD_SEEK = 0xF1A9
+AFX_IDP_FILE_HARD_IO = 0xF1AA
+AFX_IDP_FILE_SHARING = 0xF1AB
+AFX_IDP_FILE_LOCKING = 0xF1AC
+AFX_IDP_FILE_DISKFULL = 0xF1AD
+AFX_IDP_FILE_EOF = 0xF1AE
+AFX_IDP_ARCH_NONE = 0xF1B0
+AFX_IDP_ARCH_GENERIC = 0xF1B1
+AFX_IDP_ARCH_READONLY = 0xF1B2
+AFX_IDP_ARCH_ENDOFFILE = 0xF1B3
+AFX_IDP_ARCH_WRITEONLY = 0xF1B4
+AFX_IDP_ARCH_BADINDEX = 0xF1B5
+AFX_IDP_ARCH_BADCLASS = 0xF1B6
+AFX_IDP_ARCH_BADSCHEMA = 0xF1B7
+AFX_IDS_OCC_SCALEUNITS_PIXELS = 0xF1C0
+AFX_IDS_STATUS_FONT = 0xF230
+AFX_IDS_TOOLTIP_FONT = 0xF231
+AFX_IDS_UNICODE_FONT = 0xF232
+AFX_IDS_MINI_FONT = 0xF233
+AFX_IDP_SQL_FIRST = 0xF280
+AFX_IDP_SQL_CONNECT_FAIL = 0xF281
+AFX_IDP_SQL_RECORDSET_FORWARD_ONLY = 0xF282
+AFX_IDP_SQL_EMPTY_COLUMN_LIST = 0xF283
+AFX_IDP_SQL_FIELD_SCHEMA_MISMATCH = 0xF284
+AFX_IDP_SQL_ILLEGAL_MODE = 0xF285
+AFX_IDP_SQL_MULTIPLE_ROWS_AFFECTED = 0xF286
+AFX_IDP_SQL_NO_CURRENT_RECORD = 0xF287
+AFX_IDP_SQL_NO_ROWS_AFFECTED = 0xF288
+AFX_IDP_SQL_RECORDSET_READONLY = 0xF289
+AFX_IDP_SQL_SQL_NO_TOTAL = 0xF28A
+AFX_IDP_SQL_ODBC_LOAD_FAILED = 0xF28B
+AFX_IDP_SQL_DYNASET_NOT_SUPPORTED = 0xF28C
+AFX_IDP_SQL_SNAPSHOT_NOT_SUPPORTED = 0xF28D
+AFX_IDP_SQL_API_CONFORMANCE = 0xF28E
+AFX_IDP_SQL_SQL_CONFORMANCE = 0xF28F
+AFX_IDP_SQL_NO_DATA_FOUND = 0xF290
+AFX_IDP_SQL_ROW_UPDATE_NOT_SUPPORTED = 0xF291
+AFX_IDP_SQL_ODBC_V2_REQUIRED = 0xF292
+AFX_IDP_SQL_NO_POSITIONED_UPDATES = 0xF293
+AFX_IDP_SQL_LOCK_MODE_NOT_SUPPORTED = 0xF294
+AFX_IDP_SQL_DATA_TRUNCATED = 0xF295
+AFX_IDP_SQL_ROW_FETCH = 0xF296
+AFX_IDP_SQL_INCORRECT_ODBC = 0xF297
+AFX_IDP_SQL_UPDATE_DELETE_FAILED = 0xF298
+AFX_IDP_SQL_DYNAMIC_CURSOR_NOT_SUPPORTED = 0xF299
+AFX_IDP_DAO_FIRST = 0xF2A0
+AFX_IDP_DAO_ENGINE_INITIALIZATION = 0xF2A0
+AFX_IDP_DAO_DFX_BIND = 0xF2A1
+AFX_IDP_DAO_OBJECT_NOT_OPEN = 0xF2A2
+AFX_IDP_DAO_ROWTOOSHORT = 0xF2A3
+AFX_IDP_DAO_BADBINDINFO = 0xF2A4
+AFX_IDP_DAO_COLUMNUNAVAILABLE = 0xF2A5
+AFX_IDC_LISTBOX = 100
+AFX_IDC_CHANGE = 101
+AFX_IDC_PRINT_DOCNAME = 201
+AFX_IDC_PRINT_PRINTERNAME = 202
+AFX_IDC_PRINT_PORTNAME = 203
+AFX_IDC_PRINT_PAGENUM = 204
+ID_APPLY_NOW = 0x3021
+ID_WIZBACK = 0x3023
+ID_WIZNEXT = 0x3024
+ID_WIZFINISH = 0x3025
+AFX_IDC_TAB_CONTROL = 0x3020
+AFX_IDD_FILEOPEN = 28676
+AFX_IDD_FILESAVE = 28677
+AFX_IDD_FONT = 28678
+AFX_IDD_COLOR = 28679
+AFX_IDD_PRINT = 28680
+AFX_IDD_PRINTSETUP = 28681
+AFX_IDD_FIND = 28682
+AFX_IDD_REPLACE = 28683
+AFX_IDD_NEWTYPEDLG = 30721
+AFX_IDD_PRINTDLG = 30722
+AFX_IDD_PREVIEW_TOOLBAR = 30723
+AFX_IDD_PREVIEW_SHORTTOOLBAR = 30731
+AFX_IDD_INSERTOBJECT = 30724
+AFX_IDD_CHANGEICON = 30725
+AFX_IDD_CONVERT = 30726
+AFX_IDD_PASTESPECIAL = 30727
+AFX_IDD_EDITLINKS = 30728
+AFX_IDD_FILEBROWSE = 30729
+AFX_IDD_BUSY = 30730
+AFX_IDD_OBJECTPROPERTIES = 30732
+AFX_IDD_CHANGESOURCE = 30733
+AFX_IDC_CONTEXTHELP = 30977
+AFX_IDC_MAGNIFY = 30978
+AFX_IDC_SMALLARROWS = 30979
+AFX_IDC_HSPLITBAR = 30980
+AFX_IDC_VSPLITBAR = 30981
+AFX_IDC_NODROPCRSR = 30982
+AFX_IDC_TRACKNWSE = 30983
+AFX_IDC_TRACKNESW = 30984
+AFX_IDC_TRACKNS = 30985
+AFX_IDC_TRACKWE = 30986
+AFX_IDC_TRACK4WAY = 30987
+AFX_IDC_MOVE4WAY = 30988
+AFX_IDB_MINIFRAME_MENU = 30994
+AFX_IDB_CHECKLISTBOX_NT = 30995
+AFX_IDB_CHECKLISTBOX_95 = 30996
+AFX_IDR_PREVIEW_ACCEL = 30997
+AFX_IDI_STD_MDIFRAME = 31233
+AFX_IDI_STD_FRAME = 31234
+AFX_IDC_FONTPROP = 1000
+AFX_IDC_FONTNAMES = 1001
+AFX_IDC_FONTSTYLES = 1002
+AFX_IDC_FONTSIZES = 1003
+AFX_IDC_STRIKEOUT = 1004
+AFX_IDC_UNDERLINE = 1005
+AFX_IDC_SAMPLEBOX = 1006
+AFX_IDC_COLOR_BLACK = 1100
+AFX_IDC_COLOR_WHITE = 1101
+AFX_IDC_COLOR_RED = 1102
+AFX_IDC_COLOR_GREEN = 1103
+AFX_IDC_COLOR_BLUE = 1104
+AFX_IDC_COLOR_YELLOW = 1105
+AFX_IDC_COLOR_MAGENTA = 1106
+AFX_IDC_COLOR_CYAN = 1107
+AFX_IDC_COLOR_GRAY = 1108
+AFX_IDC_COLOR_LIGHTGRAY = 1109
+AFX_IDC_COLOR_DARKRED = 1110
+AFX_IDC_COLOR_DARKGREEN = 1111
+AFX_IDC_COLOR_DARKBLUE = 1112
+AFX_IDC_COLOR_LIGHTBROWN = 1113
+AFX_IDC_COLOR_DARKMAGENTA = 1114
+AFX_IDC_COLOR_DARKCYAN = 1115
+AFX_IDC_COLORPROP = 1116
+AFX_IDC_SYSTEMCOLORS = 1117
+AFX_IDC_PROPNAME = 1201
+AFX_IDC_PICTURE = 1202
+AFX_IDC_BROWSE = 1203
+AFX_IDC_CLEAR = 1204
+AFX_IDD_PROPPAGE_COLOR = 32257
+AFX_IDD_PROPPAGE_FONT = 32258
+AFX_IDD_PROPPAGE_PICTURE = 32259
+AFX_IDB_TRUETYPE = 32384
+AFX_IDS_PROPPAGE_UNKNOWN = 0xFE01
+AFX_IDS_COLOR_DESKTOP = 0xFE04
+AFX_IDS_COLOR_APPWORKSPACE = 0xFE05
+AFX_IDS_COLOR_WNDBACKGND = 0xFE06
+AFX_IDS_COLOR_WNDTEXT = 0xFE07
+AFX_IDS_COLOR_MENUBAR = 0xFE08
+AFX_IDS_COLOR_MENUTEXT = 0xFE09
+AFX_IDS_COLOR_ACTIVEBAR = 0xFE0A
+AFX_IDS_COLOR_INACTIVEBAR = 0xFE0B
+AFX_IDS_COLOR_ACTIVETEXT = 0xFE0C
+AFX_IDS_COLOR_INACTIVETEXT = 0xFE0D
+AFX_IDS_COLOR_ACTIVEBORDER = 0xFE0E
+AFX_IDS_COLOR_INACTIVEBORDER = 0xFE0F
+AFX_IDS_COLOR_WNDFRAME = 0xFE10
+AFX_IDS_COLOR_SCROLLBARS = 0xFE11
+AFX_IDS_COLOR_BTNFACE = 0xFE12
+AFX_IDS_COLOR_BTNSHADOW = 0xFE13
+AFX_IDS_COLOR_BTNTEXT = 0xFE14
+AFX_IDS_COLOR_BTNHIGHLIGHT = 0xFE15
+AFX_IDS_COLOR_DISABLEDTEXT = 0xFE16
+AFX_IDS_COLOR_HIGHLIGHT = 0xFE17
+AFX_IDS_COLOR_HIGHLIGHTTEXT = 0xFE18
+AFX_IDS_REGULAR = 0xFE19
+AFX_IDS_BOLD = 0xFE1A
+AFX_IDS_ITALIC = 0xFE1B
+AFX_IDS_BOLDITALIC = 0xFE1C
+AFX_IDS_SAMPLETEXT = 0xFE1D
+AFX_IDS_DISPLAYSTRING_FONT = 0xFE1E
+AFX_IDS_DISPLAYSTRING_COLOR = 0xFE1F
+AFX_IDS_DISPLAYSTRING_PICTURE = 0xFE20
+AFX_IDS_PICTUREFILTER = 0xFE21
+AFX_IDS_PICTYPE_UNKNOWN = 0xFE22
+AFX_IDS_PICTYPE_NONE = 0xFE23
+AFX_IDS_PICTYPE_BITMAP = 0xFE24
+AFX_IDS_PICTYPE_METAFILE = 0xFE25
+AFX_IDS_PICTYPE_ICON = 0xFE26
+AFX_IDS_COLOR_PPG = 0xFE28
+AFX_IDS_COLOR_PPG_CAPTION = 0xFE29
+AFX_IDS_FONT_PPG = 0xFE2A
+AFX_IDS_FONT_PPG_CAPTION = 0xFE2B
+AFX_IDS_PICTURE_PPG = 0xFE2C
+AFX_IDS_PICTURE_PPG_CAPTION = 0xFE2D
+AFX_IDS_PICTUREBROWSETITLE = 0xFE30
+AFX_IDS_BORDERSTYLE_0 = 0xFE31
+AFX_IDS_BORDERSTYLE_1 = 0xFE32
+AFX_IDS_VERB_EDIT = 0xFE40
+AFX_IDS_VERB_PROPERTIES = 0xFE41
+AFX_IDP_PICTURECANTOPEN = 0xFE83
+AFX_IDP_PICTURECANTLOAD = 0xFE84
+AFX_IDP_PICTURETOOLARGE = 0xFE85
+AFX_IDP_PICTUREREADFAILED = 0xFE86
+AFX_IDP_E_ILLEGALFUNCTIONCALL = 0xFEA0
+AFX_IDP_E_OVERFLOW = 0xFEA1
+AFX_IDP_E_OUTOFMEMORY = 0xFEA2
+AFX_IDP_E_DIVISIONBYZERO = 0xFEA3
+AFX_IDP_E_OUTOFSTRINGSPACE = 0xFEA4
+AFX_IDP_E_OUTOFSTACKSPACE = 0xFEA5
+AFX_IDP_E_BADFILENAMEORNUMBER = 0xFEA6
+AFX_IDP_E_FILENOTFOUND = 0xFEA7
+AFX_IDP_E_BADFILEMODE = 0xFEA8
+AFX_IDP_E_FILEALREADYOPEN = 0xFEA9
+AFX_IDP_E_DEVICEIOERROR = 0xFEAA
+AFX_IDP_E_FILEALREADYEXISTS = 0xFEAB
+AFX_IDP_E_BADRECORDLENGTH = 0xFEAC
+AFX_IDP_E_DISKFULL = 0xFEAD
+AFX_IDP_E_BADRECORDNUMBER = 0xFEAE
+AFX_IDP_E_BADFILENAME = 0xFEAF
+AFX_IDP_E_TOOMANYFILES = 0xFEB0
+AFX_IDP_E_DEVICEUNAVAILABLE = 0xFEB1
+AFX_IDP_E_PERMISSIONDENIED = 0xFEB2
+AFX_IDP_E_DISKNOTREADY = 0xFEB3
+AFX_IDP_E_PATHFILEACCESSERROR = 0xFEB4
+AFX_IDP_E_PATHNOTFOUND = 0xFEB5
+AFX_IDP_E_INVALIDPATTERNSTRING = 0xFEB6
+AFX_IDP_E_INVALIDUSEOFNULL = 0xFEB7
+AFX_IDP_E_INVALIDFILEFORMAT = 0xFEB8
+AFX_IDP_E_INVALIDPROPERTYVALUE = 0xFEB9
+AFX_IDP_E_INVALIDPROPERTYARRAYINDEX = 0xFEBA
+AFX_IDP_E_SETNOTSUPPORTEDATRUNTIME = 0xFEBB
+AFX_IDP_E_SETNOTSUPPORTED = 0xFEBC
+AFX_IDP_E_NEEDPROPERTYARRAYINDEX = 0xFEBD
+AFX_IDP_E_SETNOTPERMITTED = 0xFEBE
+AFX_IDP_E_GETNOTSUPPORTEDATRUNTIME = 0xFEBF
+AFX_IDP_E_GETNOTSUPPORTED = 0xFEC0
+AFX_IDP_E_PROPERTYNOTFOUND = 0xFEC1
+AFX_IDP_E_INVALIDCLIPBOARDFORMAT = 0xFEC2
+AFX_IDP_E_INVALIDPICTURE = 0xFEC3
+AFX_IDP_E_PRINTERERROR = 0xFEC4
+AFX_IDP_E_CANTSAVEFILETOTEMP = 0xFEC5
+AFX_IDP_E_SEARCHTEXTNOTFOUND = 0xFEC6
+AFX_IDP_E_REPLACEMENTSTOOLONG = 0xFEC7
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/dialog.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/dialog.py
new file mode 100644
index 0000000..2f6edda
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/dialog.py
@@ -0,0 +1,237 @@
+""" \
+Base class for Dialogs.  Also contains a few useful utility functions
+"""
+# dialog.py
+# Python class for Dialog Boxes in PythonWin.
+
+import win32ui
+import win32con
+import window
+
+def dllFromDll(dllid):
+	" given a 'dll' (maybe a dll, filename, etc), return a DLL object "
+	if dllid==None:
+		return None
+	elif type('')==type(dllid):
+		return win32ui.LoadLibrary(dllid)
+	else:
+		try:
+			dllid.GetFileName()
+		except AttributeError:
+			raise TypeError, "DLL parameter must be None, a filename or a dll object"
+		return dllid
+	
+class Dialog(window.Wnd):
+	" Base class for a dialog"
+	def __init__( self, id, dllid=None ):
+		""" id is the resource ID, or a template
+			dllid may be None, a dll object, or a string with a dll name """
+		# must take a reference to the DLL until InitDialog.
+		self.dll=dllFromDll(dllid)
+		if type(id)==type([]):	# a template
+			dlg=win32ui.CreateDialogIndirect(id)
+		else:
+			dlg=win32ui.CreateDialog(id, self.dll)
+		window.Wnd.__init__(self, dlg)
+		self.HookCommands()
+		self.bHaveInit = None
+		
+	def HookCommands(self):
+		pass
+		
+	def OnAttachedObjectDeath(self):
+		self.data = self._obj_.data
+		window.Wnd.OnAttachedObjectDeath(self)
+
+	# provide virtuals.
+	def OnOK(self):
+		self._obj_.OnOK()
+	def OnCancel(self):
+		self._obj_.OnCancel()
+	def OnInitDialog(self):
+		self.bHaveInit = 1
+		if self._obj_.data:
+			self._obj_.UpdateData(0)
+		return 1 		# I did NOT set focus to a child window.
+	def OnDestroy(self,msg):
+		self.dll = None 	# theoretically not needed if object destructs normally.
+	# DDX support
+	def AddDDX( self, *args ):
+		self._obj_.datalist.append(args)
+	# Make a dialog object look like a dictionary for the DDX support
+	def __nonzero__(self):
+		return 1
+	def __len__(self): return len(self.data)
+	def __getitem__(self, key): return self.data[key]
+	def __setitem__(self, key, item): self._obj_.data[key] = item# self.UpdateData(0)
+	def keys(self): return self.data.keys()
+	def items(self): return self.data.items()
+	def values(self): return self.data.values()
+	def has_key(self, key): return self.data.has_key(key)
+
+class PrintDialog(Dialog):
+	" Base class for a print dialog"
+	def __init__(self, pInfo, dlgID,
+                     printSetupOnly = 0,
+                     flags=(win32ui.PD_ALLPAGES|
+                            win32ui.PD_USEDEVMODECOPIES|
+                            win32ui.PD_NOPAGENUMS|
+                            win32ui.PD_HIDEPRINTTOFILE|
+                            win32ui.PD_NOSELECTION),
+                     parent=None,
+                     dllid=None):
+		self.dll=dllFromDll(dllid)
+		if type(dlgID)==type([]):	# a template
+			raise TypeError, "dlgID parameter must be an integer resource ID"
+		dlg=win32ui.CreatePrintDialog(dlgID, printSetupOnly,
+                                              flags, parent,
+                                              self.dll)
+		window.Wnd.__init__(self, dlg)
+		self.HookCommands()
+		self.bHaveInit = None
+		self.pInfo = pInfo
+		# init values (if PrintSetup is called, values still available)
+		flags = pInfo.GetFlags()
+		self['toFile'] = (flags&win32ui.PD_PRINTTOFILE != 0)
+		self['direct'] = pInfo.GetDirect()
+		self['preview'] = pInfo.GetPreview()
+		self['continuePrinting'] = pInfo.GetContinuePrinting()
+		self['curPage'] = pInfo.GetCurPage()
+		self['numPreviewPages'] = pInfo.GetNumPreviewPages()
+		self['userData'] = pInfo.GetUserData()
+		self['draw'] = pInfo.GetDraw()
+		self['pageDesc'] = pInfo.GetPageDesc()
+		self['minPage'] = pInfo.GetMinPage()
+		self['maxPage'] = pInfo.GetMaxPage()
+		self['offsetPage'] = pInfo.GetOffsetPage()
+		self['fromPage'] = pInfo.GetFromPage()
+		self['toPage'] = pInfo.GetToPage()
+		# these values updated after OnOK
+		self['copies'] = 0
+		self['deviceName'] = ''
+		self['driverName'] = ''
+		self['printAll'] = 0
+		self['printCollate'] = 0
+		self['printRange'] = 0
+		self['printSelection'] = 0
+
+	def OnInitDialog(self):
+		self.pInfo.CreatePrinterDC() # This also sets the hDC of the pInfo structure.
+		return self._obj_.OnInitDialog()
+
+	def OnCancel(self):
+		del self.pInfo
+	def OnOK(self):
+		'''DoModal has finished. Can now access the users choices'''
+		self._obj_.OnOK()
+		pInfo = self.pInfo
+	   # user values 
+		flags = pInfo.GetFlags()
+		self['toFile'] = (flags&win32ui.PD_PRINTTOFILE != 0)
+		self['direct'] = pInfo.GetDirect()
+		self['preview'] = pInfo.GetPreview()
+		self['continuePrinting'] = pInfo.GetContinuePrinting()
+		self['curPage'] = pInfo.GetCurPage()
+		self['numPreviewPages'] = pInfo.GetNumPreviewPages()
+		self['userData'] = pInfo.GetUserData()
+		self['draw'] = pInfo.GetDraw()
+		self['pageDesc'] = pInfo.GetPageDesc()
+		self['minPage'] = pInfo.GetMinPage()
+		self['maxPage'] = pInfo.GetMaxPage()
+		self['offsetPage'] = pInfo.GetOffsetPage()
+		self['fromPage'] = pInfo.GetFromPage()
+		self['toPage'] = pInfo.GetToPage()
+		self['copies'] = pInfo.GetCopies()
+		self['deviceName'] = pInfo.GetDeviceName()
+		self['driverName'] = pInfo.GetDriverName()
+		self['printAll'] = pInfo.PrintAll()
+		self['printCollate'] = pInfo.PrintCollate()
+		self['printRange'] = pInfo.PrintRange()
+		self['printSelection'] = pInfo.PrintSelection()
+		del self.pInfo
+
+class PropertyPage(Dialog):
+	" Base class for a Property Page"
+	def __init__( self, id, dllid=None, caption=0 ):
+		""" id is the resource ID
+			dllid may be None, a dll object, or a string with a dll name """
+
+		self.dll = dllFromDll(dllid)
+		if self.dll:
+			oldRes = win32ui.SetResource(self.dll)
+		if type(id)==type([]):
+			dlg=win32ui.CreatePropertyPageIndirect(id)
+		else:
+			dlg=win32ui.CreatePropertyPage(id, caption)
+		if self.dll:
+			win32ui.SetResource(oldRes)
+		# dont call dialog init!
+		window.Wnd.__init__(self, dlg)
+		self.HookCommands()
+
+class PropertySheet(window.Wnd):
+	def __init__(self, caption, dll=None, pageList=None ):# parent=None, style,etc):
+		" Initialize a property sheet.  pageList is a list of ID's "
+		# must take a reference to the DLL until InitDialog.
+		self.dll=dllFromDll(dll)
+		self.sheet = win32ui.CreatePropertySheet(caption)
+		window.Wnd.__init__(self, self.sheet)
+		if not pageList is None:
+			self.AddPage(pageList)
+
+	def OnInitDialog(self):
+		return self._obj_.OnInitDialog()
+					
+	def DoModal(self):
+		if self.dll:
+			oldRes = win32ui.SetResource(self.dll)
+		rc = self.sheet.DoModal()
+		if self.dll:
+			win32ui.SetResource(oldRes)
+		return rc
+		
+	def AddPage(self, pages):
+		if self.dll:
+			oldRes = win32ui.SetResource(self.dll)
+		try:	# try list style access		
+			pages[0]
+			isSeq = 1
+		except (TypeError,KeyError):
+			isSeq = 0
+		if isSeq:
+			for page in pages:
+				self.DoAddSinglePage(page)
+		else:
+			self.DoAddSinglePage(pages)
+		if self.dll:
+			win32ui.SetResource(oldRes)
+		
+	def DoAddSinglePage(self, page):
+		"Page may be page, or int ID. Assumes DLL setup "
+		if type(page)==type(0):
+			self.sheet.AddPage(win32ui.CreatePropertyPage(page))
+		else:
+			self.sheet.AddPage(page)
+		
+# define some app utility functions.
+def GetSimpleInput(prompt, defValue='', title=None ):
+	""" displays a dialog, and returns a string, or None if cancelled.
+	args prompt, defValue='', title=main frames title """
+	# uses a simple dialog to return a string object.
+	if title is None: title=win32ui.GetMainFrame().GetWindowText()
+	class DlgSimpleInput(Dialog):
+		def __init__(self, prompt, defValue, title ):
+			self.title=title
+			Dialog.__init__(self, win32ui.IDD_SIMPLE_INPUT)
+			self.AddDDX(win32ui.IDC_EDIT1,'result')
+			self.AddDDX(win32ui.IDC_PROMPT1, 'prompt')
+			self._obj_.data['result']=defValue
+			self._obj_.data['prompt']=prompt
+		def OnInitDialog(self):
+			self.SetWindowText(self.title)
+			return Dialog.OnInitDialog(self)
+			
+	dlg=DlgSimpleInput( prompt, defValue, title)
+	if dlg.DoModal() <> win32con.IDOK:
+		return None
+	return dlg['result']
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/docview.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/docview.py
new file mode 100644
index 0000000..ba4971d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/docview.py
@@ -0,0 +1,122 @@
+# document and view classes for MFC.
+import win32ui
+import win32con
+import object
+import window
+
+class View(window.Wnd):
+	def __init__(self, initobj):
+		window.Wnd.__init__(self, initobj)
+	def OnInitialUpdate(self):
+		pass
+
+# Simple control based views.
+class CtrlView(View):
+	def __init__(self, doc, wndclass, style=0):
+		View.__init__(self, win32ui.CreateCtrlView(doc, wndclass, style))
+
+class EditView(CtrlView):
+	def __init__(self,  doc):
+		View.__init__(self, win32ui.CreateEditView(doc))
+
+class RichEditView(CtrlView):
+	def __init__(self,  doc):
+		View.__init__(self, win32ui.CreateRichEditView(doc))
+
+class ListView(CtrlView):
+	def __init__(self,  doc):
+		View.__init__(self, win32ui.CreateListView(doc))
+
+class TreeView(CtrlView):
+	def __init__(self,  doc):
+		View.__init__(self, win32ui.CreateTreeView(doc))
+
+# Other more advanced views.
+class ScrollView(View):
+	def __init__(self,  doc):
+		View.__init__(self, win32ui.CreateView(doc))
+
+class FormView(View):
+	def __init__(self,  doc, id):
+		View.__init__(self, win32ui.CreateFormView(doc, id))
+
+class Document(object.CmdTarget):
+	def __init__(self, template, docobj=None):
+		if docobj is None:
+			docobj = template.DoCreateDoc()
+		object.CmdTarget.__init__(self, docobj)
+
+class RichEditDoc(object.CmdTarget):
+	def __init__(self, template):
+		object.CmdTarget.__init__(self, template.DoCreateRichEditDoc())
+
+class CreateContext:
+	"A transient base class used as a CreateContext"
+	def __init__(self, template, doc = None):
+		self.template = template
+		self.doc = doc
+	def __del__(self):
+		self.close()
+	def close(self):
+		self.doc = None
+		self.template = None
+
+class DocTemplate(object.CmdTarget):
+	def __init__(self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None):
+		if resourceId is None: resourceId = win32ui.IDR_PYTHONTYPE
+		object.CmdTarget.__init__(self, self._CreateDocTemplate(resourceId))
+		self.MakeDocument=MakeDocument
+		self.MakeFrame=MakeFrame
+		self.MakeView=MakeView
+		self._SetupSharedMenu_()
+# todo - _SetupSharedMenu should be moved to a framework class.
+	def _SetupSharedMenu_(self):
+		sharedMenu = self.GetSharedMenu()
+		from pywin.framework import toolmenu
+		toolmenu.SetToolsMenu(sharedMenu)
+		from pywin.framework import help
+		help.SetHelpMenuOtherHelp(sharedMenu)
+
+	def _CreateDocTemplate(self, resourceId):
+		return win32ui.CreateDocTemplate(resourceId)
+	def __del__(self):
+		object.CmdTarget.__del__(self)
+	def CreateCreateContext(self, doc=None):
+		return CreateContext(self, doc)
+	def CreateNewFrame(self, doc):
+		makeFrame = self.MakeFrame
+		if makeFrame is None: makeFrame = window.MDIChildWnd
+		wnd = makeFrame()
+		context = self.CreateCreateContext(doc)
+		wnd.LoadFrame(self.GetResourceID(), -1, None, context)	# triggers OnCreateClient...
+		return wnd
+	def CreateNewDocument(self):
+		makeDocument = self.MakeDocument
+		if makeDocument is None:
+			makeDocument = Document
+		return makeDocument(self)
+	def CreateView(self, frame, context):
+		makeView = self.MakeView
+		if makeView is None: makeView = EditView
+		view = makeView(context.doc)
+		view.CreateWindow(frame)
+
+class RichEditDocTemplate(DocTemplate):
+	def __init__(self, resourceId=None, MakeDocument=None, MakeFrame=None, MakeView=None):
+		if MakeView is None: MakeView = RichEditView
+		if MakeDocument is None: MakeDocument = RichEditDoc
+		DocTemplate.__init__(self, resourceId, MakeDocument, MakeFrame, MakeView)
+
+	def _CreateDocTemplate(self, resourceId):
+		return win32ui.CreateRichEditDocTemplate(resourceId)
+
+def t():
+	class FormTemplate(DocTemplate):
+		def CreateView(self, frame, context):
+			makeView = self.MakeView
+#			view = FormView(context.doc, win32ui.IDD_PROPDEMO1)
+			view = ListView(context.doc)
+			view.CreateWindow(frame)
+
+	t=FormTemplate()
+	return t.OpenDocumentFile(None)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/object.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/object.py
new file mode 100644
index 0000000..646379e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/object.py
@@ -0,0 +1,55 @@
+# MFC base classes.
+import sys
+import win32ui
+
+class Object:
+	def __init__(self, initObj = None):
+		self.__dict__['_obj_'] = initObj
+#		self._obj_ = initObj
+		if initObj is not None: initObj.AttachObject(self)
+	def __del__(self):
+		self.close()
+	def __getattr__(self, attr):	# Make this object look like the underlying win32ui one.
+		# During cleanup __dict__ is not available, causing recursive death.
+		if attr != '__dict__':
+			try:
+				o = self.__dict__['_obj_']
+				if o is not None:
+					return getattr(o, attr)
+				# Only raise this error for non "internal" names -
+				# Python may be calling __len__, __nonzero__, etc, so
+				# we dont want this exception
+				if attr[0]!= '_' and attr[-1] != '_':
+					raise win32ui.error, "The MFC object has died."
+			except KeyError:
+				# No _obj_ at all - dont report MFC object died when there isnt one!
+				pass
+		raise AttributeError, attr
+
+	def OnAttachedObjectDeath(self):
+#		print "object", self.__class__.__name__, "dieing"
+		self._obj_ = None
+	def close(self):
+		if self.__dict__.has_key('_obj_'):
+			if self._obj_ is not None:
+				self._obj_.AttachObject(None)
+				self._obj_ = None
+
+class CmdTarget(Object):
+	def __init__(self, initObj):
+		Object.__init__(self, initObj)
+	def HookNotifyRange(self, handler, firstID, lastID):
+		oldhandlers = []
+		for i in range(firstID, lastID + 1):
+			oldhandlers.append(self.HookNotify(handler, i))
+		return oldhandlers
+	def HookCommandRange(self, handler, firstID, lastID):
+		oldhandlers = []
+		for i in range(firstID, lastID + 1):
+			oldhandlers.append(self.HookCommand(handler, i))
+		return oldhandlers
+	def HookCommandUpdateRange(self, handler, firstID, lastID):
+		oldhandlers = []
+		for i in range(firstID, lastID + 1):
+			oldhandlers.append(self.HookCommandUpdate(handler, i))
+		return oldhandlers
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/thread.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/thread.py
new file mode 100644
index 0000000..b54439f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/thread.py
@@ -0,0 +1,22 @@
+# Thread and application objects
+
+import object
+import win32ui
+
+class WinThread(object.CmdTarget):
+	def __init__(self, initObj = None):
+		if initObj is None:
+			initObj = win32ui.CreateThread()
+		object.CmdTarget.__init__(self, initObj)
+		
+	def InitInstance(self):
+		pass # Default None/0 return indicates success for InitInstance()
+	def ExitInstance(self):
+		pass
+		
+
+class WinApp(WinThread):
+	def __init__(self, initApp = None):
+		if initApp is None:
+			initApp = win32ui.GetApp()
+		WinThread.__init__(self, initApp)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/window.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/window.py
new file mode 100644
index 0000000..63e6caf9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/mfc/window.py
@@ -0,0 +1,41 @@
+# The MFCish window classes.
+import object
+import win32ui
+import win32con
+
+class Wnd(object.CmdTarget):
+	def __init__(self, initobj=None):
+		object.CmdTarget.__init__(self, initobj)
+		if self._obj_: self._obj_.HookMessage(self.OnDestroy, win32con.WM_DESTROY)
+	def OnDestroy(self, msg):
+		pass
+
+# NOTE NOTE - This facility is currently disabled in Pythonwin!!!!!
+# Note - to process all messages for your window, add the following method
+# to a derived class.  This code provides default message handling (ie, is
+# identical, except presumably in speed, as if the method did not exist at
+# all, so presumably will be modified to test for specific messages to be 
+# useful!
+#	def WindowProc(self, msg, wParam, lParam):
+#		rc, lResult = self._obj_.OnWndMsg(msg, wParam, lParam)
+#		if not rc: lResult = self._obj_.DefWindowProc(msg, wParam, lParam)
+#		return lResult
+
+class FrameWnd(Wnd):
+	def __init__(self, wnd):
+		Wnd.__init__(self, wnd)
+
+class MDIChildWnd(FrameWnd):
+	def __init__(self, wnd = None):
+		if wnd is None:
+			wnd=win32ui.CreateMDIChild()
+		FrameWnd.__init__(self, wnd)
+	def OnCreateClient(self, cp, context):
+		if context is not None and context.template is not None:
+			context.template.CreateView(self, context)
+
+class MDIFrameWnd(FrameWnd):
+	def __init__(self, wnd = None):
+		if wnd is None:
+			wnd=win32ui.CreateMDIFrame()
+		FrameWnd.__init__(self, wnd)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/IDLEenvironment.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/IDLEenvironment.py
new file mode 100644
index 0000000..3ca1f213
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/IDLEenvironment.py
@@ -0,0 +1,515 @@
+# Code that allows Pythonwin to pretend it is IDLE
+# (at least as far as most IDLE extensions are concerned)
+
+import string
+import win32api
+import win32ui
+import win32con
+import sys
+
+from pywin.mfc.dialog import GetSimpleInput
+
+wordchars = string.uppercase + string.lowercase + string.digits
+
+class TextError(Exception): # When a TclError would normally be raised.
+	pass
+
+class EmptyRange(Exception): # Internally raised.
+	pass
+
+def GetIDLEModule(module):
+	try:
+		# First get it from Pythonwin it is exists.
+		modname = "pywin.idle." + module
+		__import__(modname)
+	except ImportError, details:
+		msg = "The IDLE extension '%s' can not be located.\r\n\r\n" \
+			  "Please correct the installation and restart the" \
+			  " application.\r\n\r\n%s" % (module, details)
+		win32ui.MessageBox(msg)
+		return None
+	mod=sys.modules[modname]
+	mod.TclError = TextError # A hack that can go soon!
+	return mod
+
+# A class that is injected into the IDLE auto-indent extension.
+# It allows for decent performance when opening a new file,
+# as auto-indent uses the tokenizer module to determine indents.
+# The default AutoIndent readline method works OK, but it goes through
+# this layer of Tk index indirection for every single line.  For large files
+# without indents (and even small files with indents :-) it was pretty slow!
+def fast_readline(self):
+	if self.finished:
+		return ""
+	if not self.__dict__.has_key("_scint_lines"):
+		# XXX - note - assumes this is only called once the file is loaded!
+		self._scint_lines = string.split(self.text.edit.GetTextRange(), "\n")
+	sl = self._scint_lines
+	i = self.i = self.i + 1
+	if i >= len(sl):
+		return ""
+	return sl[i]+"\n"
+
+try:
+	GetIDLEModule("AutoIndent").IndentSearcher.readline = fast_readline
+except AttributeError: # GetIDLEModule may return None
+	pass
+
+# A class that attempts to emulate an IDLE editor window.
+# Construct with a Pythonwin view.
+class IDLEEditorWindow:
+	def __init__(self, edit):
+		self.edit = edit
+		self.text = TkText(edit)
+		self.extensions = {}
+		self.extension_menus = {}
+
+	def close(self):
+		self.edit = self.text = None
+		self.extension_menus = None
+		try:
+			for ext in self.extensions.values():
+				closer = getattr(ext, "close", None)
+				if closer is not None:
+					closer()
+		finally:
+			self.extensions = {}
+
+	def IDLEExtension(self, extension):
+		ext = self.extensions.get(extension)
+		if ext is not None: return ext
+		mod = GetIDLEModule(extension)
+		if mod is None: return None
+		klass = getattr(mod, extension)
+		ext = self.extensions[extension] = klass(self)
+		# Find and bind all the events defined in the extension.
+		events = filter(lambda item: item[-6:]=="_event", dir(klass))
+		for event in events:
+			name = "<<%s>>" % (string.replace(event[:-6], "_", "-"), )
+			self.edit.bindings.bind(name, getattr(ext, event))
+		return ext
+
+	def GetMenuItems(self, menu_name):
+		# Get all menu items for the menu name (eg, "edit")
+		bindings = self.edit.bindings
+		ret = []
+		for ext in self.extensions.values():
+			menudefs = getattr(ext, "menudefs", [])
+			for name, items in menudefs:
+				if name == menu_name:
+					for text, event in filter(lambda item: item is not None, items):
+						text = string.replace(text, "&", "&&")
+						text = string.replace(text, "_", "&")
+						ret.append((text, event))
+		return ret
+
+	######################################################################
+	# The IDLE "Virtual UI" methods that are exposed to the IDLE extensions.
+	#
+	def askinteger(self, caption, prompt, parent=None, initialvalue=0, minvalue=None, maxvalue=None):
+		while 1:
+			rc = GetSimpleInput(prompt, str(initialvalue), caption)
+			if rc is None: return 0 # Correct "cancel" semantics?
+			err = None
+			try:
+				rc = int(rc)
+			except ValueError:
+				err = "Please enter an integer"
+			if not err and minvalue is not None and rc < minvalue:
+				err = "Please enter an integer greater then or equal to %s" % (minvalue,)
+			if not err and maxvalue is not None and rc > maxvalue:
+				err = "Please enter an integer less then or equal to %s" % (maxvalue,)
+			if err:
+				win32ui.MessageBox(err, caption, win32con.MB_OK)
+				continue
+			return rc
+	def askyesno(self, caption, prompt, parent=None):
+		return win32ui.MessageBox(prompt, caption, win32con.MB_YESNO)==win32con.IDYES
+
+	######################################################################
+	# The IDLE "Virtual Text Widget" methods that are exposed to the IDLE extensions.
+	#
+
+	# Is character at text_index in a Python string?  Return 0 for
+	# "guaranteed no", true for anything else.
+	def is_char_in_string(self, text_index):
+		# A helper for the code analyser - we need internal knowledge of
+		# the colorizer to get this information
+		# This assumes the colorizer has got to this point!
+		text_index = self.text._getoffset(text_index)
+		c = self.text.edit._GetColorizer()
+		if c and c.GetStringStyle(text_index) is None:
+			return 0
+		return 1
+
+	# If a selection is defined in the text widget, return
+	# (start, end) as Tkinter text indices, otherwise return
+	# (None, None)
+	def get_selection_indices(self):
+		try:
+			first = self.text.index("sel.first")
+			last = self.text.index("sel.last")
+			return first, last
+		except TextError:
+			return None, None
+
+	def set_tabwidth(self, width ):
+		self.edit.SCISetTabWidth(width)
+
+	def get_tabwidth(self):
+		return self.edit.GetTabWidth()
+
+# A class providing the generic "Call Tips" interface
+class CallTips:
+	def __init__(self, edit):
+		self.edit = edit
+	def showtip(self, tip_text):
+		self.edit.SCICallTipShow(tip_text)
+	def hidetip(self):
+		self.edit.SCICallTipCancel()
+
+########################################
+#
+# Helpers for the TkText emulation.
+def TkOffsetToIndex(offset, edit):
+	lineoff = 0
+	# May be 1 > actual end if we pretended there was a trailing '\n'
+	offset = min(offset, edit.GetTextLength())
+	line = edit.LineFromChar(offset)
+	lineIndex = edit.LineIndex(line)
+	return "%d.%d" % (line+1, offset-lineIndex)
+
+def _NextTok(str, pos):
+	# Returns (token, endPos)
+	end = len(str)
+	if pos>=end: return None, 0
+	while pos < end and str[pos] in string.whitespace:
+		pos = pos + 1
+	# Special case for +-
+	if str[pos] in '+-':
+		return str[pos],pos+1
+	# Digits also a special case.
+	endPos = pos
+	while endPos < end and str[endPos] in string.digits+".":
+		endPos = endPos + 1
+	if pos!=endPos: return str[pos:endPos], endPos
+	endPos = pos
+	while endPos < end and str[endPos] not in string.whitespace + string.digits + "+-":
+		endPos = endPos + 1
+	if pos!=endPos: return str[pos:endPos], endPos
+	return None, 0
+
+def TkIndexToOffset(bm, edit, marks):
+	base, nextTokPos = _NextTok(bm, 0)
+	if base is None: raise ValueError, "Empty bookmark ID!"
+	if string.find(base,".")>0:
+		try:
+			line, col = string.split(base, ".", 2)
+			if col=="first" or col=="last":
+				# Tag name
+				if line != "sel": raise ValueError, "Tags arent here!"
+				sel = edit.GetSel()
+				if sel[0]==sel[1]:
+					raise EmptyRange
+				if col=="first":
+					pos = sel[0]
+				else:
+					pos = sel[1]
+			else:
+				# Lines are 1 based for tkinter
+				line = int(line)-1
+				if line > edit.GetLineCount():
+					pos = edit.GetTextLength()+1
+				else:
+					pos = edit.LineIndex(line)
+					if pos==-1: pos = edit.GetTextLength()
+					pos = pos + int(col)
+		except (ValueError, IndexError):
+			raise ValueError, "Unexpected literal in '%s'" % base 
+	elif base == 'insert':
+		pos = edit.GetSel()[0]
+	elif base=='end':
+		pos = edit.GetTextLength()
+		# Pretend there is a trailing '\n' if necessary
+		if pos and edit.SCIGetCharAt(pos-1) != "\n":
+			pos = pos+1
+	else:
+		try:
+			pos = marks[base]
+		except KeyError:
+			raise ValueError, "Unsupported base offset or undefined mark '%s'" % base
+
+	while 1:
+		word, nextTokPos = _NextTok(bm, nextTokPos)
+		if word is None: break
+		if word in ['+','-']:
+			num, nextTokPos = _NextTok(bm, nextTokPos)
+			if num is None: raise ValueError, "+/- operator needs 2 args"
+			what, nextTokPos = _NextTok(bm, nextTokPos)
+			if what is None: raise ValueError, "+/- operator needs 2 args"
+			if what[0] <> "c": raise ValueError, "+/- only supports chars"
+			if word=='+':
+				pos = pos + int(num)
+			else:
+				pos = pos - int(num)
+		elif word=='wordstart':
+			while pos > 0 and edit.SCIGetCharAt(pos-1) in wordchars:
+				pos = pos - 1
+		elif word=='wordend':
+			end = edit.GetTextLength()
+			while pos < end and edit.SCIGetCharAt(pos) in wordchars:
+				pos = pos + 1
+		elif word=='linestart':
+			while pos > 0 and edit.SCIGetCharAt(pos-1) not in '\n\r':
+				pos = pos - 1
+		elif word=='lineend':
+			end = edit.GetTextLength()
+			while pos < end and edit.SCIGetCharAt(pos) not in '\n\r':
+				pos = pos + 1
+		else:
+			raise ValueError, "Unsupported relative offset '%s'" % word
+	return max(pos, 0) # Tkinter is tollerant of -ve indexes - we aren't
+
+# A class that resembles an IDLE (ie, a Tk) text widget.
+# Construct with an edit object (eg, an editor view)
+class TkText:
+	def __init__(self, edit):
+		self.calltips = None
+		self.edit = edit
+		self.marks = {}
+##	def __getattr__(self, attr):
+##		if attr=="tk": return self # So text.tk.call works.
+##		if attr=="master": return None # ditto!
+##		raise AttributeError, attr
+##	def __getitem__(self, item):
+##		if item=="tabs":
+##			size = self.edit.GetTabWidth()
+##			if size==8: return "" # Tk default
+##			return size # correct semantics?
+##		elif item=="font": # Used for measurements we dont need to do!
+##			return "Dont know the font"
+##		raise IndexError, "Invalid index '%s'" % item
+	def make_calltip_window(self):
+		if self.calltips is None:
+			self.calltips = CallTips(self.edit)
+		return self.calltips
+	def _getoffset(self, index):
+		return TkIndexToOffset(index, self.edit, self.marks)
+	def _getindex(self, off):
+		return TkOffsetToIndex(off, self.edit)
+	def _fix_eol_indexes(self, start, end):
+		if start>0 and self.edit.SCIGetCharAt(start)=='\n' and self.edit.SCIGetCharAt(start-1)=='\r':
+			start = start - 1
+		if end < self.edit.GetTextLength() and self.edit.SCIGetCharAt(end-1)=='\r' and self.edit.SCIGetCharAt(end)=='\n':
+			end = end + 1
+		return start, end
+##	def get_tab_width(self):
+##		return self.edit.GetTabWidth()
+##	def call(self, *rest):
+##		# Crap to support Tk measurement hacks for tab widths
+##		if rest[0] != "font" or rest[1] != "measure":
+##			raise ValueError, "Unsupport call type"
+##		return len(rest[5])
+##	def configure(self, **kw):
+##		for name, val in kw.items():
+##			if name=="tabs":
+##				self.edit.SCISetTabWidth(int(val))
+##			else:
+##				raise ValueError, "Unsupported configuration item %s" % kw
+	def bind(self, binding, handler):
+		self.edit.bindings.bind(binding, handler)
+	def get(self, start, end = None):
+		try:
+			start = self._getoffset(start)
+			if end is None:
+				end = start+1
+			else:
+				end = self._getoffset(end)
+		except EmptyRange:
+			return ""
+		# Simple semantic checks to conform to the Tk text interface
+		if end <= start: return ""
+		max = self.edit.GetTextLength()
+		checkEnd = 0
+		if end > max:
+			end = max
+			checkEnd = 1
+		start, end = self._fix_eol_indexes(start, end)
+		ret = self.edit.GetTextRange(start, end)
+		# pretend a trailing '\n' exists if necessary.
+		if checkEnd and (not ret or ret[-1] != '\n'): ret = ret + '\n'
+		return string.replace(ret, "\r", "")
+	def index(self, spec):
+		try:
+			return self._getindex(self._getoffset(spec))
+		except EmptyRange:
+			return ""
+	def insert(self, pos, text):
+		try:
+			pos = self._getoffset(pos)
+		except EmptyRange:
+			raise TextError, "Empty range"
+		self.edit.SetSel((pos, pos))
+		# IDLE only deals with "\n" - we will be nicer
+		bits = string.split(text, '\n')
+		self.edit.SCIAddText(bits[0])
+		for bit in bits[1:]:
+			self.edit.SCINewline()
+			self.edit.SCIAddText(bit)
+
+	def delete(self, start, end=None):
+		try:
+			start = self._getoffset(start)
+			if end is not None: end = self._getoffset(end)
+		except EmptyRange:
+			raise TextError, "Empty range"
+		# If end is specified and == start, then we must delete nothing.
+		if start==end: return
+		# If end is not specified, delete one char
+		if end is None:
+			end = start+1
+		else:
+			# Tk says not to delete in this case, but our control would.
+			if end<start: return
+		if start==self.edit.GetTextLength(): return # Nothing to delete.
+		old = self.edit.GetSel()[0] # Lose a selection
+		# Hack for partial '\r\n' removal
+		start, end = self._fix_eol_indexes(start, end)
+		self.edit.SetSel((start, end))
+		self.edit.Clear()
+		if old>=start and old<end:
+			old=start
+		elif old>=end:
+			old = old - (end-start)
+		self.edit.SetSel(old)
+
+	def bell(self):
+		win32api.MessageBeep()
+
+	def see(self, pos):
+		# Most commands we use in Scintilla actually force the selection
+		# to be seen, making this unnecessary.
+		pass
+
+	def mark_set(self, name, pos):
+		try:
+			pos = self._getoffset(pos)
+		except EmptyRange:
+			raise TextError, "Empty range '%s'" % pos
+		if name == "insert":
+			self.edit.SetSel( pos )
+		else:
+			self.marks[name]=pos
+
+	def tag_add(self, name, start, end):
+		if name != "sel": raise ValueError, "Only sel tag is supported"
+		try:
+			start = self._getoffset(start)
+			end = self._getoffset(end)
+		except EmptyRange:
+			raise TextError, "Empty range"
+		self.edit.SetSel( start, end )
+
+	def tag_remove(self, name, start, end):
+		if name !="sel" or start != "1.0" or end != "end":
+			raise ValueError, "Cant remove this tag"
+		# Turn the sel into a cursor
+		self.edit.SetSel(self.edit.GetSel()[0])
+
+	def compare(self, i1, op, i2):
+		try:
+			i1=self._getoffset(i1)
+		except EmptyRange:
+			i1 = ""
+		try:
+			i2=self._getoffset(i2)
+		except EmptyRange:
+			i2 = ""
+		return eval("%d%s%d" % (i1,op,i2))
+
+	def undo_block_start(self):
+		self.edit.SCIBeginUndoAction()
+
+	def undo_block_stop(self):
+		self.edit.SCIEndUndoAction()
+
+######################################################################
+# 
+# Test related code.
+# 
+######################################################################
+def TestCheck(index, edit, expected=None):
+	rc = TkIndexToOffset(index, edit, {})
+	if rc != expected:
+		print "ERROR: Index", index,", expected", expected, "but got", rc
+
+def TestGet(fr, to, t, expected):
+	got = t.get(fr, to)
+	if got != expected:
+		print "ERROR: get(%s, %s) expected %s, but got %s" % (`fr`, `to`, `expected`, `got`)
+
+def test():
+	import pywin.framework.editor
+	d=pywin.framework.editor.editorTemplate.OpenDocumentFile(None)
+	e=d.GetFirstView()
+	t = TkText(e)
+	e.SCIAddText("hi there how\nare you today\r\nI hope you are well")
+	e.SetSel((4,4))
+
+	skip = """
+	TestCheck("insert", e, 4)
+	TestCheck("insert wordstart", e, 3)
+	TestCheck("insert wordend", e, 8)
+	TestCheck("insert linestart", e, 0)
+	TestCheck("insert lineend", e, 12)
+	TestCheck("insert + 4 chars", e, 8)
+	TestCheck("insert +4c", e, 8)
+	TestCheck("insert - 2 chars", e, 2)
+	TestCheck("insert -2c", e, 2)
+	TestCheck("insert-2c", e, 2)
+	TestCheck("insert-2 c", e, 2)
+	TestCheck("insert- 2c", e, 2)
+	TestCheck("1.1", e, 1)
+	TestCheck("1.0", e, 0)
+	TestCheck("2.0", e, 13)
+	try:
+		TestCheck("sel.first", e, 0)
+		print "*** sel.first worked with an empty selection"
+	except TextError:
+		pass
+	e.SetSel((4,5))
+	TestCheck("sel.first- 2c", e, 2)
+	TestCheck("sel.last- 2c", e, 3)
+	"""
+	# Check EOL semantics
+	e.SetSel((4,4))
+	TestGet("insert lineend", "insert lineend +1c", t, "\n")
+	e.SetSel((20, 20))
+	TestGet("insert lineend", "insert lineend +1c", t, "\n")
+	e.SetSel((35, 35))
+	TestGet("insert lineend", "insert lineend +1c", t, "\n")
+
+class IDLEWrapper:
+	def __init__(self, control):
+		self.text = control
+
+def IDLETest(extension):
+	import sys, os
+	modname = "pywin.idle." + extension
+	__import__(modname)
+	mod=sys.modules[modname]
+	mod.TclError = TextError
+	klass = getattr(mod, extension)
+
+	# Create a new Scintilla Window.
+	import pywin.framework.editor
+	d=pywin.framework.editor.editorTemplate.OpenDocumentFile(None)
+	v=d.GetFirstView()
+	fname=os.path.splitext(__file__)[0] + ".py"
+	v.SCIAddText(open(fname).read())
+	d.SetModifiedFlag(0)
+	r=klass( IDLEWrapper( TkText(v) ) )
+	return r
+
+if __name__=='__main__':
+	test()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/__init__.py
new file mode 100644
index 0000000..8d8ea47
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/__init__.py
@@ -0,0 +1 @@
+# package init.
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/bindings.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/bindings.py
new file mode 100644
index 0000000..4ea05e54
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/bindings.py
@@ -0,0 +1,186 @@
+import IDLEenvironment
+import string
+import win32ui
+import win32api
+import win32con
+import keycodes
+import sys
+
+HANDLER_ARGS_GUESS=0
+HANDLER_ARGS_NATIVE=1
+HANDLER_ARGS_IDLE=2
+HANDLER_ARGS_EXTENSION=3
+
+next_id = 5000
+
+event_to_commands = {}# dict of integer IDs to event names.
+command_to_events = {}# dict of event names to int IDs
+
+def assign_command_id(event, id = 0):
+	global next_id
+	if id == 0:
+		id = event_to_commands.get(event, 0)
+		if id == 0:
+			id = next_id
+			next_id = next_id + 1
+		# Only map the ones we allocated - specified ones are assumed to have a handler
+		command_to_events[id] = event
+	event_to_commands[event] = id
+	return id
+
+class SendCommandHandler:
+	def __init__(self, cmd):
+		self.cmd = cmd
+	def __call__(self, *args):
+		win32ui.GetMainFrame().SendMessage(win32con.WM_COMMAND, self.cmd)
+
+class Binding:
+	def __init__(self, handler, handler_args_type):
+		self.handler = handler
+		self.handler_args_type = handler_args_type
+
+class BindingsManager:
+	def __init__(self, parent_view):
+		self.parent_view = parent_view
+		self.bindings = {} # dict of Binding instances.
+		self.keymap = {}
+
+	def prepare_configure(self):
+		self.keymap = {}
+
+	def complete_configure(self):
+		for id in command_to_events.keys():
+			self.parent_view.HookCommand(self._OnCommand, id)
+
+	def close(self):
+		self.parent_view = self.bindings = self.keymap = None
+
+	def report_error(self, problem):
+		try:
+			win32ui.SetStatusText(problem, 1)
+		except win32ui.error:
+			# No status bar!
+			print problem
+
+	def update_keymap(self, keymap):
+		self.keymap.update(keymap)
+
+	def bind(self, event, handler, handler_args_type = HANDLER_ARGS_GUESS, cid = 0):
+		if handler is None:
+			handler = SendCommandHandler(cid)
+		self.bindings[event] = self._new_binding(handler, handler_args_type)
+		self.bind_command(event, cid)
+
+	def bind_command(self, event, id = 0):
+		"Binds an event to a Windows control/command ID"
+		id = assign_command_id(event, id)
+		return id
+
+	def get_command_id(self, event):
+		id = event_to_commands.get(event)
+		if id is None:
+			# See if we even have an event of that name!?
+			if not self.bindings.has_key(event):
+				return None
+			id = self.bind_command(event)
+		return id
+
+	def _OnCommand(self, id, code):
+		event = command_to_events.get(id)
+		if event is None:
+			self.report_error("No event associated with event ID %d" % id)
+			return 1
+		return self.fire(event)
+
+	def _new_binding(self, event, handler_args_type):
+		return Binding(event, handler_args_type)
+
+	def _get_IDLE_handler(self, ext, handler):
+		try:
+			instance = self.parent_view.idle.IDLEExtension(ext)
+			name = string.replace(handler, "-", "_") + "_event"
+			return getattr(instance, name)
+		except (ImportError, AttributeError):
+			msg = "Can not find event '%s' in IDLE extension '%s'" % (handler, ext)
+			self.report_error(msg)
+			return None
+
+	def fire(self, event, event_param = None):
+		# Fire the specified event.  Result is native Pythonwin result
+		# (ie, 1==pass one, 0 or None==handled)
+
+		# First look up the event directly - if there, we are set.
+		binding = self.bindings.get(event)
+		if binding is None:
+			# If possible, find it!
+			# A native method name
+			handler = getattr(self.parent_view, event + "Event", None)
+			if handler is None:
+				# Can't decide if I should report an error??
+				self.report_error("The event name '%s' can not be found." % event)
+				# Either way, just let the default handlers grab it.
+				return 1
+			binding = self._new_binding(handler, HANDLER_ARGS_NATIVE)
+			# Cache it.
+			self.bindings[event] = binding
+
+		handler_args_type = binding.handler_args_type
+		# Now actually fire it.
+		if handler_args_type==HANDLER_ARGS_GUESS:
+			# Can't be native, as natives are never added with "guess".
+			# Must be extension or IDLE.
+			if event[0]=="<":
+				handler_args_type = HANDLER_ARGS_IDLE
+			else:
+				handler_args_type = HANDLER_ARGS_EXTENSION
+		try:
+			if handler_args_type==HANDLER_ARGS_EXTENSION:
+				args = self.parent_view.idle, event_param
+			else:
+				args = (event_param,)
+			rc = apply(binding.handler, args)
+			if handler_args_type==HANDLER_ARGS_IDLE:
+				# Convert to our return code.
+				if rc in [None, "break"]:
+					rc = 0
+				else:
+					rc = 1
+		except:
+			import traceback
+			message = "Firing event '%s' failed." % event
+			print message
+			traceback.print_exc()
+			self.report_error(message)
+			rc = 1 # Let any default handlers have a go!
+		return rc
+
+	def fire_key_event(self, msg):
+		key = msg[2]
+		keyState = 0
+		if win32api.GetKeyState(win32con.VK_CONTROL) & 0x8000:
+			keyState = keyState | win32con.RIGHT_CTRL_PRESSED | win32con.LEFT_CTRL_PRESSED
+		if win32api.GetKeyState(win32con.VK_SHIFT) & 0x8000:
+			keyState = keyState | win32con.SHIFT_PRESSED
+		if win32api.GetKeyState(win32con.VK_MENU) & 0x8000:
+			keyState = keyState | win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED
+		keyinfo = key, keyState
+		# Special hacks for the dead-char key on non-US keyboards.
+		# (XXX - which do not work :-(
+		event = self.keymap.get( keyinfo )
+		if event is None:
+##			if key == 220: # Dead key
+##				return 1
+##			# Translate the raw scancode into an Ascii character.
+##			print "translating", key, "(with state)", keyState,
+##			key = win32ui.TranslateVirtualKey(key)
+##			print "Got back key", `key`,
+####			if key is None:
+####				return 1 # Dead-key - don't handle at all!!!
+##			if key:
+##				# Then back to a "normalized" scan-code.
+##				key = keycodes.get_scan_code(key[0])
+##				keyinfo = key, keyState
+##				event = self.keymap.get( keyinfo )
+##			if event is None:
+				return 1
+		return self.fire(event, None)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/config.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/config.py
new file mode 100644
index 0000000..beb90da
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/config.py
@@ -0,0 +1,313 @@
+# config.py - deals with loading configuration information.
+
+# Loads config data from a .cfg file.  Also caches the compiled
+# data back into a .cfc file.
+
+# If you are wondering how to avoid needing .cfg files (eg,
+# if you are freezing Pythonwin etc) I suggest you create a
+# .py file, and put the config info in a docstring.  Then
+# pass a CStringIO file (rather than a filename) to the
+# config manager.
+import sys
+import string
+import keycodes
+import marshal
+import stat
+import os
+import types
+import traceback
+import pywin
+import glob
+import imp
+
+debugging = 0
+if debugging:
+    import win32traceutil # Some trace statements fire before the interactive window is open.
+    def trace(*args):
+        sys.stderr.write(string.join(map(str, args), " ") + "\n")
+else:
+    trace = lambda *args: None
+
+compiled_config_version = 2
+
+def split_line(line, lineno):
+    comment_pos = string.find(line, "#")
+    if comment_pos>=0: line = line[:comment_pos]
+    sep_pos = string.rfind(line, "=")
+    if sep_pos == -1:
+        if string.strip(line):
+            print "Warning: Line %d: %s is an invalid entry" % (lineno, `line`)
+            return None, None
+        return "", ""
+    return string.strip(line[:sep_pos]), string.strip(line[sep_pos+1:])
+
+def get_section_header(line):
+    # Returns the section if the line is a section header, else None
+    if line[0] == "[":
+        end = string.find(line, "]")
+        if end==-1: end=len(line)
+        rc = string.lower(line[1:end])
+        try:
+            i = string.index(rc, ":")
+            return rc[:i], rc[i+1:]
+        except ValueError:
+            return rc, ""
+    return None, None
+
+def find_config_file(f):
+    return os.path.join(pywin.__path__[0], f + ".cfg")
+
+def find_config_files():
+    return map( lambda x: os.path.split(x)[1], map( lambda x: os.path.splitext(x)[0], glob.glob(os.path.join(pywin.__path__[0], "*.cfg"))))
+
+class ConfigManager:
+    def __init__(self, f):
+        self.filename = "unknown"
+        self.last_error = None
+        self.key_to_events = {}
+        if hasattr(f, "readline"):
+            fp = f
+            self.filename = "<config string>"
+            compiled_name = None
+        else:
+            try:
+                f = find_config_file(f)
+                src_stat = os.stat(f)
+            except os.error:
+                self.report_error("Config file '%s' not found" % f)
+                return
+            self.filename = f
+            self.basename = os.path.basename(f)
+            trace("Loading configuration", self.basename)
+            compiled_name = os.path.splitext(f)[0] + ".cfc"
+            try:
+                cf = open(compiled_name, "rb")
+                try:
+                    ver = marshal.load(cf)
+                    ok = compiled_config_version == ver
+                    if ok:
+                        magic = marshal.load(cf)
+                        size = marshal.load(cf)
+                        mtime = marshal.load(cf)
+                        if magic == imp.get_magic() and \
+                           src_stat[stat.ST_MTIME] == mtime and \
+                           src_stat[stat.ST_SIZE] == size:
+                            self.cache = marshal.load(cf)
+                            trace("Configuration loaded cached", compiled_name)
+                            return # We are ready to roll!
+                finally:
+                    cf.close()
+            except (os.error, IOError, EOFError):
+                pass
+            fp = open(f)
+        self.cache = {}
+        lineno = 1
+        line = fp.readline()
+        while line:
+            # Skip to the next section (maybe already there!)
+            section, subsection = get_section_header(line)
+            while line and section is None:
+                line = fp.readline()
+                if not line: break
+                lineno = lineno + 1
+                section, subsection = get_section_header(line)
+            if not line: break
+
+            if section=="keys":
+                line, lineno = self._load_keys(subsection, fp, lineno)
+            elif section == "extensions":
+                line, lineno = self._load_extensions(subsection, fp, lineno)
+            elif section == "idle extensions":
+                line, lineno = self._load_idle_extensions(subsection, fp, lineno)
+            elif section == "general":
+                line, lineno = self._load_general(subsection, fp, lineno)
+            else:
+                self.report_error("Unrecognised section header '%s:%s'" % (section,subsection))
+                line = fp.readline()
+                lineno = lineno + 1
+        # Check critical data.
+        if not self.cache.get("keys"):
+            self.report_error("No keyboard definitions were loaded")
+        if not self.last_error and compiled_name:
+            try:
+                cf = open(compiled_name, "wb")
+                marshal.dump(compiled_config_version, cf)
+                marshal.dump(imp.get_magic(), cf)
+                marshal.dump(src_stat[stat.ST_SIZE], cf)
+                marshal.dump(src_stat[stat.ST_MTIME], cf)
+                marshal.dump(self.cache, cf)
+                cf.close()
+            except (IOError, EOFError):
+                pass # Ignore errors - may be read only.
+
+    def configure(self, editor, subsections = None):
+        # Execute the extension code, and find any events.
+        # First, we "recursively" connect any we are based on.
+        if subsections is None: subsections = []
+        subsections = [''] + subsections
+        general = self.get_data("general")
+        if general:
+            parents = general.get("based on", [])
+            for parent in parents:
+                trace("Configuration based on", parent, "- loading.")
+                parent = self.__class__(parent)
+                parent.configure(editor, subsections)
+                if parent.last_error:
+                    self.report_error(parent.last_error)
+
+        bindings = editor.bindings
+        codeob = self.get_data("extension code")
+        if codeob is not None:
+            ns = {}
+            try:
+                exec codeob in ns
+            except:
+                traceback.print_exc()
+                self.report_error("Executing extension code failed")
+                ns = None
+            if ns:
+                num = 0
+                for name, func in ns.items():
+                    if type(func)==types.FunctionType and name[:1] != '_':
+                        bindings.bind(name, func)
+                        num = num + 1
+                trace("Configuration Extension code loaded", num, "events")
+        # Load the idle extensions
+        for subsection in subsections:
+            for ext in self.get_data("idle extensions", {}).get(subsection, []):
+                try:
+                    editor.idle.IDLEExtension(ext)
+                    trace("Loaded IDLE extension", ext)
+                except:
+                    self.report_error("Can not load the IDLE extension '%s'" % ext)
+
+        # Now bind up the key-map (remembering a reverse map
+        subsection_keymap = self.get_data("keys")
+        num_bound = 0
+        for subsection in subsections:
+            keymap = subsection_keymap.get(subsection, {})
+            bindings.update_keymap(keymap)
+            num_bound = num_bound + len(keymap)
+        trace("Configuration bound", num_bound, "keys")
+
+    def get_key_binding(self, event, subsections = None):
+        if subsections is None: subsections = []
+        subsections = [''] + subsections
+
+        subsection_keymap = self.get_data("keys")
+        for subsection in subsections:
+            map = self.key_to_events.get(subsection)
+            if map is None: # Build it
+                map = {}
+                keymap = subsection_keymap.get(subsection, {})
+                for key_info, map_event in keymap.items():
+                    map[map_event] = key_info
+                self.key_to_events[subsection] = map
+
+            info = map.get(event)
+            if info is not None:
+                return keycodes.make_key_name( info[0], info[1] )
+        return None
+
+    def report_error(self, msg):
+        self.last_error = msg
+        print "Error in %s: %s" % (self.filename, msg)
+    def report_warning(self, msg):
+        print "Warning in %s: %s" % (self.filename, msg)
+
+    def _readline(self, fp, lineno, bStripComments = 1):
+        line = fp.readline()
+        lineno = lineno + 1
+        if line:
+            bBreak = get_section_header(line)[0] is not None # A new section is starting
+            if bStripComments and not bBreak:
+                pos = string.find(line, "#")
+                if pos>=0: line=line[:pos]+"\n"
+        else:
+            bBreak=1
+        return line, lineno, bBreak
+
+    def get_data(self, name, default=None):
+        return self.cache.get(name, default)
+
+    def _save_data(self, name, data):
+        self.cache[name] = data
+        return data
+
+    def _load_general(self, sub_section, fp, lineno):
+        map = {}
+        while 1:
+            line, lineno, bBreak = self._readline(fp, lineno)
+            if bBreak: break
+
+            key, val = split_line(line, lineno)
+            if not key: continue
+            key = string.lower(key)
+            l = map.get(key, [])
+            l.append(val)
+            map[key]=l
+        self._save_data("general", map)
+        return line, lineno
+
+    def _load_keys(self, sub_section, fp, lineno):
+        # Builds a nested dictionary of
+        # (scancode, flags) = event_name
+        main_map = self.get_data("keys", {})
+        map = main_map.get(sub_section, {})
+        while 1:
+            line, lineno, bBreak = self._readline(fp, lineno)
+            if bBreak: break
+
+            key, event = split_line(line, lineno)
+            if not event: continue
+            sc, flag = keycodes.parse_key_name(key)
+            if sc is None:
+                self.report_warning("Line %d: Invalid key name '%s'" % (lineno, key))
+            else:
+                map[sc, flag] = event
+        main_map[sub_section] = map
+        self._save_data("keys", main_map)
+        return line, lineno
+
+    def _load_extensions(self, sub_section, fp, lineno):
+        start_lineno = lineno
+        lines = []
+        while 1:
+            line, lineno, bBreak = self._readline(fp, lineno, 0)
+            if bBreak: break
+            lines.append(line)
+        try:
+            c = compile(string.join(lines, ""), self.filename, "exec")
+            self._save_data("extension code", c)
+        except SyntaxError, details:
+            msg = details[0]
+            errlineno = details[1][1] + start_lineno
+            # Should handle syntax errors better here, and offset the lineno.
+            self.report_error("Compiling extension code failed: Line %d: %s" % (errlineno, msg))
+        return line, lineno
+
+    def _load_idle_extensions(self, sub_section, fp, lineno):
+        extension_map = self.get_data("idle extensions")
+        if extension_map is None: extension_map = {}
+        extensions = []
+        while 1:
+            line, lineno, bBreak = self._readline(fp, lineno)
+            if bBreak: break
+            line = string.strip(line)
+            if line:
+                extensions.append(line)
+        extension_map[sub_section] = extensions
+        self._save_data("idle extensions", extension_map)
+        return line, lineno
+
+def test():
+    import time
+    start = time.clock()
+    f="default"
+    cm = ConfigManager(f)
+    map = cm.get_data("keys")
+    took = time.clock()-start
+    print "Loaded %s items in %.4f secs" % (len(map), took)
+
+if __name__=='__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/configui.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/configui.py
new file mode 100644
index 0000000..376dfbd0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/configui.py
@@ -0,0 +1,246 @@
+from pywin.mfc import dialog
+import win32api
+import win32con
+import win32ui
+import copy
+import string
+from scintillacon import *
+
+######################################################
+# Property Page for syntax formatting options
+    
+# The standard 16 color VGA palette should always be possible    
+paletteVGA = ( ("Black",0,0,0), ("Navy",0,0,128), ("Green",0,128,0), ("Cyan",0,128,128), 
+	("Maroon",128,0,0), ("Purple",128,0,128), ("Olive",128,128,0), ("Gray",128,128,128), 
+	("Silver",192,192,192), ("Blue",0,0,255), ("Lime",0,255,0), ("Aqua",0,255,255), 
+	("Red",255,0,0), ("Fuchsia",255,0,255), ("Yellow",255,255,0), ("White",255,255,255),
+# and a few others will generally be possible.
+	("DarkGrey", 64,64,64 ), ("PurpleBlue",64,64,192), ("DarkGreen",0,96,0),
+	("DarkOlive",128,128,64), ("MediumBlue",0,0,192), ("DarkNavy",0,0,96),
+	("Magenta",96,0,96), ("OffWhite", 255,255,220 ), ("LightPurple",220,220,255),
+)
+
+def BGR(b,g,r):		# Colors in font definitions are integers made up of Blue, Green, and Red bytes
+    return b*256*256 + g*256 + r
+
+class ScintillaFormatPropertyPage(dialog.PropertyPage):
+	def __init__(self, scintillaClass = None, caption = 0):
+		self.scintillaClass = scintillaClass
+		dialog.PropertyPage.__init__(self, win32ui.IDD_PP_FORMAT, caption=caption)
+
+	def OnInitDialog(self):
+		try:
+			if self.scintillaClass is None:
+				import control
+				sc = control.CScintillaEdit
+			else:
+				sc = self.scintillaClass
+
+			self.scintilla = sc()
+			style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.ES_MULTILINE
+			# Convert the rect size
+			rect = self.MapDialogRect( (5, 5, 120, 75))
+			self.scintilla.CreateWindow(style, rect, self, 111)
+			self.HookNotify(self.OnBraceMatch, SCN_CHECKBRACE)
+			self.scintilla.HookKeyStroke(self.OnEsc, 27)
+			self.scintilla.SCISetViewWS(1)
+			self.pos_bstart = self.pos_bend = self.pos_bbad = 0
+
+			colorizer = self.scintilla._GetColorizer()
+			text = colorizer.GetSampleText()
+			items = string.split(text, '|', 2)
+			pos = len(items[0])
+			self.scintilla.SCIAddText(string.join(items,''))
+			self.scintilla.SetSel(pos, pos)
+			self.scintilla.ApplyFormattingStyles()
+			self.styles = self.scintilla._GetColorizer().styles
+
+			self.cbo = self.GetDlgItem(win32ui.IDC_COMBO1)
+			for c in paletteVGA:
+				self.cbo.AddString(c[0])
+
+			self.cboBoldItalic = self.GetDlgItem(win32ui.IDC_COMBO2)
+			for item in ["Bold Italic", "Bold", "Italic", "Regular"]:
+				self.cboBoldItalic.InsertString(0, item)
+
+			self.butIsDefault = self.GetDlgItem(win32ui.IDC_CHECK1)
+			self.butIsDefaultBackground = self.GetDlgItem(win32ui.IDC_CHECK2)
+			self.listbox = self.GetDlgItem(win32ui.IDC_LIST1)
+			self.HookCommand(self.OnListCommand, win32ui.IDC_LIST1)
+			names = self.styles.keys()
+			names.sort()
+			for name in names:
+				if self.styles[name].aliased is None:
+					self.listbox.AddString(name)
+			self.listbox.SetCurSel(0)
+
+			idc = win32ui.IDC_RADIO1
+			if not self.scintilla._GetColorizer().bUseFixed: idc = win32ui.IDC_RADIO2
+			self.GetDlgItem(idc).SetCheck(1)
+			self.UpdateUIForStyle(self.styles[names[0]])
+
+			self.scintilla.HookFormatter(self)
+			self.HookCommand(self.OnButDefaultFixedFont, win32ui.IDC_BUTTON1)
+			self.HookCommand(self.OnButDefaultPropFont, win32ui.IDC_BUTTON2)
+			self.HookCommand(self.OnButThisFont, win32ui.IDC_BUTTON3)
+			self.HookCommand(self.OnButUseDefaultFont, win32ui.IDC_CHECK1)
+			self.HookCommand(self.OnButThisBackground, win32ui.IDC_BUTTON4)
+			self.HookCommand(self.OnButUseDefaultBackground, win32ui.IDC_CHECK2)
+			self.HookCommand(self.OnStyleUIChanged, win32ui.IDC_COMBO1)
+			self.HookCommand(self.OnStyleUIChanged, win32ui.IDC_COMBO2)
+			self.HookCommand(self.OnButFixedOrDefault, win32ui.IDC_RADIO1)
+			self.HookCommand(self.OnButFixedOrDefault, win32ui.IDC_RADIO2)
+		except:
+			import traceback
+			traceback.print_exc()
+
+	def OnEsc(self, ch):
+		self.GetParent().EndDialog(win32con.IDCANCEL)
+
+	def OnBraceMatch(self, std, extra):
+		import pywin.scintilla.view
+		pywin.scintilla.view.DoBraceMatch(self.scintilla)
+
+	def GetSelectedStyle(self):
+		return self.styles[self.listbox.GetText(self.listbox.GetCurSel())]
+
+	def _DoButDefaultFont(self, extra_flags, attr):
+		baseFormat = getattr(self.scintilla._GetColorizer(), attr)
+		flags = extra_flags | win32con.CF_SCREENFONTS | win32con.CF_EFFECTS | win32con.CF_FORCEFONTEXIST
+		d=win32ui.CreateFontDialog(baseFormat, flags, None, self)
+		if d.DoModal()==win32con.IDOK:
+			setattr(self.scintilla._GetColorizer(), attr, d.GetCharFormat())
+			self.OnStyleUIChanged(0, win32con.BN_CLICKED)
+
+	def OnButDefaultFixedFont(self, id, code):
+		if code==win32con.BN_CLICKED:
+			self._DoButDefaultFont(win32con.CF_FIXEDPITCHONLY, "baseFormatFixed")
+			return 1
+
+	def OnButDefaultPropFont(self, id, code):
+		if code==win32con.BN_CLICKED:
+			self._DoButDefaultFont(win32con.CF_SCALABLEONLY, "baseFormatProp")
+			return 1
+
+	def OnButFixedOrDefault(self, id, code):
+		if code==win32con.BN_CLICKED:
+			bUseFixed = id == win32ui.IDC_RADIO1
+			self.GetDlgItem(win32ui.IDC_RADIO1).GetCheck() != 0
+			self.scintilla._GetColorizer().bUseFixed = bUseFixed
+			self.scintilla.ApplyFormattingStyles(0)
+			return 1
+
+	def OnButThisFont(self, id, code):
+		if code==win32con.BN_CLICKED:
+			flags = win32con.CF_SCREENFONTS | win32con.CF_EFFECTS | win32con.CF_FORCEFONTEXIST
+			style = self.GetSelectedStyle()
+			# If the selected style is based on the default, we need to apply
+			# the default to it.
+			def_format = self.scintilla._GetColorizer().GetDefaultFormat()
+			format = style.GetCompleteFormat(def_format)
+			d=win32ui.CreateFontDialog(format, flags, None, self)
+			if d.DoModal()==win32con.IDOK:
+				style.format = d.GetCharFormat()
+				self.scintilla.ApplyFormattingStyles(0)
+			return 1
+
+	def OnButUseDefaultFont(self, id, code):
+		if code == win32con.BN_CLICKED:
+			isDef = self.butIsDefault.GetCheck()
+			self.GetDlgItem(win32ui.IDC_BUTTON3).EnableWindow(not isDef)
+			if isDef: # Being reset to the default font.
+				style = self.GetSelectedStyle()
+				style.ForceAgainstDefault()
+				self.UpdateUIForStyle(style)
+				self.scintilla.ApplyFormattingStyles(0)
+			else:
+				# User wants to override default -
+				# do nothing!
+				pass
+
+	def OnButThisBackground(self, id, code):
+		if code==win32con.BN_CLICKED:
+			style = self.GetSelectedStyle()
+			bg = win32api.RGB(0xff, 0xff, 0xff)
+			if style.background is not None:
+				bg = style.background
+			d=win32ui.CreateColorDialog(bg, 0, self)
+			if d.DoModal()==win32con.IDOK:
+				style.background = d.GetColor()
+				self.scintilla.ApplyFormattingStyles(0)
+			return 1
+
+	def OnButUseDefaultBackground(self, id, code):
+		if code == win32con.BN_CLICKED:
+			isDef = self.butIsDefaultBackground.GetCheck()
+			self.GetDlgItem(win32ui.IDC_BUTTON4).EnableWindow(not isDef)
+			if isDef: # Being reset to the default color
+				style = self.GetSelectedStyle()
+				style.background = None
+				self.UpdateUIForStyle(style)
+				self.scintilla.ApplyFormattingStyles(0)
+			else:
+				# User wants to override default -
+				# do nothing!
+				pass
+
+
+	def OnListCommand(self, id, code):
+		if code==win32con.LBN_SELCHANGE:
+			style = self.GetSelectedStyle()
+			self.UpdateUIForStyle(style)
+		return 1
+
+	def UpdateUIForStyle(self, style ):
+		format = style.format
+		sel = 0
+		for c in paletteVGA:
+			if format[4] == BGR(c[3], c[2], c[1]):
+#				print "Style", style.name, "is", c[0]
+				break
+			sel = sel + 1
+		else:
+			sel = -1
+		self.cbo.SetCurSel(sel)
+		self.butIsDefault.SetCheck(style.IsBasedOnDefault())
+		self.GetDlgItem(win32ui.IDC_BUTTON3).EnableWindow(not style.IsBasedOnDefault())
+
+		self.butIsDefaultBackground.SetCheck(style.background is None)
+		self.GetDlgItem(win32ui.IDC_BUTTON4).EnableWindow(style.background is not None)
+		
+		bold = format[1] & win32con.CFE_BOLD != 0; italic = format[1] & win32con.CFE_ITALIC != 0
+		self.cboBoldItalic.SetCurSel( bold*2 + italic )
+
+	def OnStyleUIChanged(self, id, code):
+		if code in [win32con.BN_CLICKED, win32con.CBN_SELCHANGE]:
+			style = self.GetSelectedStyle()
+			self.ApplyUIFormatToStyle(style)
+			self.scintilla.ApplyFormattingStyles(0)
+			return 0
+		return 1
+
+	def ApplyUIFormatToStyle(self, style):
+		format = style.format
+		color = paletteVGA[self.cbo.GetCurSel()]
+		effect = 0
+		sel = self.cboBoldItalic.GetCurSel()
+		if sel==0:
+			effect = 0
+		elif sel==1:
+			effect = win32con.CFE_ITALIC
+		elif sel==2:
+			effect = win32con.CFE_BOLD
+		else:
+			effect = win32con.CFE_BOLD | win32con.CFE_ITALIC
+		maskFlags=format[0]|win32con.CFM_COLOR|win32con.CFM_BOLD|win32con.CFM_ITALIC
+		style.format = (maskFlags, effect, style.format[2], style.format[3], BGR(color[3], color[2], color[1])) + style.format[5:]
+
+	def OnOK(self):
+		self.scintilla._GetColorizer().SavePreferences()
+		return 1
+
+def test():
+	page = ColorEditorPropertyPage()
+	sheet = pywin.mfc.dialog.PropertySheet("Test")
+	sheet.AddPage(page)
+	sheet.CreateWindow()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/control.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/control.py
new file mode 100644
index 0000000..3d9a8b4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/control.py
@@ -0,0 +1,417 @@
+# An Python interface to the Scintilla control.
+#
+# Exposes Python classes that allow you to use Scintilla as
+# a "standard" MFC edit control (eg, control.GetTextLength(), control.GetSel()
+# plus many Scintilla specific features (eg control.SCIAddStyledText())
+
+from pywin.mfc import window
+import win32con
+import win32ui
+import win32api
+import array
+import struct
+import string
+import os
+from scintillacon import *
+
+# Load Scintilla.dll to get access to the control.
+# We expect to find this in the same directory as win32ui.pyd
+dllid = None
+if win32ui.debug: # If running _d version of Pythonwin...
+	try:
+		dllid = win32api.LoadLibrary(os.path.join(os.path.split(win32ui.__file__)[0], "Scintilla_d.DLL"))
+	except win32api.error: # Not there - we dont _need_ a debug ver, so ignore this error.
+		pass
+if dllid is None:
+	try:
+		dllid = win32api.LoadLibrary(os.path.join(os.path.split(win32ui.__file__)[0], "Scintilla.DLL"))
+	except win32api.error: 
+		pass
+if dllid is None:
+	# Still not there - lets see if Windows can find it by searching?
+	dllid = win32api.LoadLibrary("Scintilla.DLL")
+
+EM_GETTEXTRANGE = 1099
+EM_EXLINEFROMCHAR = 1078
+EM_FINDTEXTEX = 1103
+EM_GETSELTEXT = 1086
+EM_EXSETSEL = win32con.WM_USER + 55
+
+class ScintillaNotification:
+	def __init__(self, **args):
+		self.__dict__.update(args)
+
+class ScintillaControlInterface:
+	def SCIUnpackNotifyMessage(self, msg):
+		format = "iiiiPiiiiiiiii"
+		bytes = win32ui.GetBytes( msg, struct.calcsize(format) )
+		position, ch, modifiers, modificationType, text_ptr, \
+				length, linesAdded, msg, wParam, lParam, line, \
+				foldLevelNow, foldLevelPrev, margin \
+				= struct.unpack(format, bytes)
+		return ScintillaNotification(position=position,ch=ch,
+									 modifiers=modifiers, modificationType=modificationType,
+									 text_ptr = text_ptr, length=length, linesAdded=linesAdded,
+									 msg = msg, wParam = wParam, lParam = lParam,
+									 line = line, foldLevelNow = foldLevelNow, foldLevelPrev = foldLevelPrev,
+									 margin = margin)
+
+	def SCIAddText(self, text):
+		self.SendMessage(SCI_ADDTEXT, buffer(text))
+	def SCIAddStyledText(self, text, style = None):
+		# If style is None, text is assumed to be a "native" Scintilla buffer.
+		# If style is specified, text is a normal string, and the style is
+		# assumed to apply to the entire string.
+		if style is not None:
+			text = map(lambda char, style=style: char+chr(style), text)
+			text = string.join(text, '')
+		self.SendMessage(SCI_ADDSTYLEDTEXT, buffer(text))
+	def SCIInsertText(self, text, pos=-1):
+		sma = array.array('c', text+"\0")
+		(a,l) = sma.buffer_info()
+		self.SendScintilla(SCI_INSERTTEXT, pos, a)
+	def SCISetSavePoint(self):
+		self.SendScintilla(SCI_SETSAVEPOINT)
+	def SCISetUndoCollection(self, collectFlag):
+		self.SendScintilla(SCI_SETUNDOCOLLECTION, collectFlag)
+	def SCIBeginUndoAction(self):
+		self.SendScintilla(SCI_BEGINUNDOACTION)
+	def SCIEndUndoAction(self):
+		self.SendScintilla(SCI_ENDUNDOACTION)
+
+	def SCIGetCurrentPos(self):
+		return self.SendScintilla(SCI_GETCURRENTPOS)
+	def SCIGetCharAt(self, pos):
+		# Must ensure char is unsigned!
+		return chr(self.SendScintilla(SCI_GETCHARAT, pos) & 0xFF)
+	def SCIGotoLine(self, line):
+		self.SendScintilla(SCI_GOTOLINE, line)
+	def SCIBraceMatch(self, pos, maxReStyle):
+		return self.SendScintilla(SCI_BRACEMATCH, pos, maxReStyle)
+	def SCIBraceHighlight(self, pos, posOpposite):
+		return self.SendScintilla(SCI_BRACEHIGHLIGHT, pos, posOpposite)
+	def SCIBraceBadHighlight(self, pos):
+		return self.SendScintilla(SCI_BRACEBADLIGHT, pos)
+
+	####################################
+	# Styling
+#	def SCIColourise(self, start=0, end=-1):
+#   NOTE - dependent on of we use builtin lexer, so handled below.		
+	def SCIGetEndStyled(self):
+		return self.SendScintilla(SCI_GETENDSTYLED)
+	def SCIStyleSetFore(self, num, v):
+		return self.SendScintilla(SCI_STYLESETFORE, num, v)
+	def SCIStyleSetBack(self, num, v):
+		return self.SendScintilla(SCI_STYLESETBACK, num, v)
+	def SCIStyleSetEOLFilled(self, num, v):
+		return self.SendScintilla(SCI_STYLESETEOLFILLED, num, v)
+	def SCIStyleSetFont(self, num, name, characterset=0):
+		buff = array.array('c', name + "\0")
+		addressBuffer = buff.buffer_info()[0]
+		self.SendScintilla(SCI_STYLESETFONT, num, addressBuffer)
+		self.SendScintilla(SCI_STYLESETCHARACTERSET, num, characterset)
+	def SCIStyleSetBold(self, num, bBold):
+		self.SendScintilla(SCI_STYLESETBOLD, num, bBold)
+	def SCIStyleSetItalic(self, num, bItalic):
+		self.SendScintilla(SCI_STYLESETITALIC, num, bItalic)
+	def SCIStyleSetSize(self, num, size):
+		self.SendScintilla(SCI_STYLESETSIZE, num, size)
+	def SCIGetViewWS(self):
+		return self.SendScintilla(SCI_GETVIEWWS)
+	def SCISetViewWS(self, val):
+		self.SendScintilla(SCI_SETVIEWWS, not (val==0))
+		self.InvalidateRect()
+	def SCISetIndentationGuides(self, val):
+		self.SendScintilla(SCI_SETINDENTATIONGUIDES, val)
+	def SCIGetIndentationGuides(self):
+		return self.SendScintilla(SCI_GETINDENTATIONGUIDES)
+	def SCISetIndent(self, val):
+		self.SendScintilla(SCI_SETINDENT, val)
+	def SCIGetIndent(self, val):
+		return self.SendScintilla(SCI_GETINDENT)
+
+	def SCIGetViewEOL(self):
+		return self.SendScintilla(SCI_GETVIEWEOL)
+	def SCISetViewEOL(self, val):
+		self.SendScintilla(SCI_SETVIEWEOL, not(val==0))
+		self.InvalidateRect()
+	def SCISetTabWidth(self, width):
+		self.SendScintilla(SCI_SETTABWIDTH, width, 0)
+	def SCIStartStyling(self, pos, mask):
+		self.SendScintilla(SCI_STARTSTYLING, pos, mask)
+	def SCISetStyling(self, pos, attr):
+		self.SendScintilla(SCI_SETSTYLING, pos, attr)
+	def SCISetStylingEx(self, ray): # ray is an array.
+		address, length = ray.buffer_info()
+		self.SendScintilla(SCI_SETSTYLINGEX, length, address)
+	def SCIGetStyleAt(self, pos):
+		return self.SendScintilla(SCI_GETSTYLEAT, pos)
+	def SCISetMarginWidth(self, width):
+		self.SendScintilla(SCI_SETMARGINWIDTHN, 1, width)
+	def SCISetMarginWidthN(self, n, width):
+		self.SendScintilla(SCI_SETMARGINWIDTHN, n, width)
+	def SCISetFoldFlags(self, flags):
+		self.SendScintilla(SCI_SETFOLDFLAGS, flags)
+	# Markers
+	def SCIMarkerDefineAll(self, markerNum, markerType, fore, back):
+		self.SCIMarkerDefine(markerNum, markerType)
+		self.SCIMarkerSetFore(markerNum, fore)
+		self.SCIMarkerSetBack(markerNum, back)
+	def SCIMarkerDefine(self, markerNum, markerType):
+		self.SendScintilla(SCI_MARKERDEFINE, markerNum, markerType)
+	def SCIMarkerSetFore(self, markerNum, fore):
+		self.SendScintilla(SCI_MARKERSETFORE, markerNum, fore)
+	def SCIMarkerSetBack(self, markerNum, back):
+		self.SendScintilla(SCI_MARKERSETBACK, markerNum, back)
+	def SCIMarkerAdd(self, lineNo, markerNum):
+		self.SendScintilla(SCI_MARKERADD, lineNo, markerNum)
+	def SCIMarkerDelete(self, lineNo, markerNum):
+		self.SendScintilla(SCI_MARKERDELETE, lineNo, markerNum)
+	def SCIMarkerDeleteAll(self, markerNum=-1):
+		self.SendScintilla(SCI_MARKERDELETEALL, markerNum)
+	def SCIMarkerGet(self, lineNo):
+		return self.SendScintilla(SCI_MARKERGET, lineNo)
+	def SCIMarkerNext(self, lineNo, markerNum):
+		return self.SendScintilla(SCI_MARKERNEXT, lineNo, markerNum)
+	def SCICancel(self):
+		self.SendScintilla(SCI_CANCEL)
+	# AutoComplete
+	def SCIAutoCShow(self, text):
+		if type(text) in [type([]), type(())]:
+			text = string.join(text)
+		buff = array.array('c', text + "\0")
+		addressBuffer = buff.buffer_info()[0]
+		return self.SendScintilla(SCI_AUTOCSHOW, 0, addressBuffer)
+	def SCIAutoCCancel(self):
+		self.SendScintilla(SCI_AUTOCCANCEL)
+	def SCIAutoCActive(self):
+		return self.SendScintilla(SCI_AUTOCACTIVE)
+	def SCIAutoCComplete(self):
+		return self.SendScintilla(SCI_AUTOCCOMPLETE)
+	def SCIAutoCStops(self, stops):
+		buff = array.array('c', stops + "\0")
+		addressBuffer = buff.buffer_info()[0]
+		self.SendScintilla(SCI_AUTOCSTOPS, 0, addressBuffer)
+	def SCIAutoCSetAutoHide(self, hide):
+		self.SendScintilla(SCI_AUTOCSETAUTOHIDE, hide)
+	def SCIAutoCSetFillups(self, fillups):
+		self.SendScintilla(SCI_AUTOCSETFILLUPS, fillups)
+	# Call tips
+	def SCICallTipShow(self, text, pos=-1):
+		if pos==-1: pos = self.GetSel()[0]
+		if isinstance(text, unicode):
+			# I'm really not sure what the correct encoding
+			# to use is - but it has gotta be better than total
+			# failure due to the array module
+			text = text.encode("mbcs")
+		buff = array.array('c', text + "\0")
+		addressBuffer = buff.buffer_info()[0]
+		self.SendScintilla(SCI_CALLTIPSHOW, pos, addressBuffer)
+	def SCICallTipCancel(self):
+		self.SendScintilla(SCI_CALLTIPCANCEL)
+	def SCICallTipActive(self):
+		return self.SendScintilla(SCI_CALLTIPACTIVE)
+	def SCICallTipPosStart(self):
+		return self.SendScintilla(SCI_CALLTIPPOSSTART)
+	def SCINewline(self):
+		self.SendScintilla(SCI_NEWLINE)
+	# Lexer etc
+	def SCISetKeywords(self, keywords, kw_list_no = 0):
+		ar = array.array('c', keywords+"\0")
+		(a,l) = ar.buffer_info()
+		self.SendScintilla(SCI_SETKEYWORDS, kw_list_no, a)
+	def SCISetProperty(self, name, value):
+		name_buff = array.array('c', name + "\0")
+		val_buff = array.array("c", str(value) + "\0")
+		address_name_buffer = name_buff.buffer_info()[0]
+		address_val_buffer = val_buff.buffer_info()[0]
+		self.SendScintilla(SCI_SETPROPERTY, address_name_buffer, address_val_buffer)
+	def SCISetStyleBits(self, nbits):
+		self.SendScintilla(SCI_SETSTYLEBITS, nbits)
+	# Folding
+	def SCIGetFoldLevel(self, lineno):
+		return self.SendScintilla(SCI_GETFOLDLEVEL, lineno)
+	def SCIToggleFold(self, lineno):
+		return self.SendScintilla(SCI_TOGGLEFOLD, lineno)
+	def SCIEnsureVisible(self, lineno):
+		self.SendScintilla(SCI_ENSUREVISIBLE, lineno)
+	def SCIGetFoldExpanded(self, lineno):
+		return self.SendScintilla(SCI_GETFOLDEXPANDED, lineno)
+	# right edge
+	def SCISetEdgeColumn(self, edge):
+		self.SendScintilla(SCI_SETEDGECOLUMN, edge)
+	def SCIGetEdgeColumn(self):
+		return self.SendScintilla(SCI_GETEDGECOLUMN)
+	def SCISetEdgeMode(self, mode):
+		self.SendScintilla(SCI_SETEDGEMODE, mode)
+	def SCIGetEdgeMode(self):
+		return self.SendScintilla(SCI_GETEDGEMODE)
+	def SCISetEdgeColor(self, color):
+		self.SendScintilla(SCI_SETEDGECOLOUR, color)
+	def SCIGetEdgeColor(self):
+		return self.SendScintilla(SCI_GETEDGECOLOR)
+	# Multi-doc
+	def SCIGetDocPointer(self):
+		return self.SendScintilla(SCI_GETDOCPOINTER)
+	def SCISetDocPointer(self, p):
+		return self.SendScintilla(SCI_SETDOCPOINTER, 0, p)
+	def SCISetWrapMode(self, mode):
+		return self.SendScintilla(SCI_SETWRAPMODE, mode)
+	def SCIGetWrapMode(self):
+		return self.SendScintilla(SCI_GETWRAPMODE)
+
+class CScintillaEditInterface(ScintillaControlInterface):
+	def close(self):
+		self.colorizer = None
+	def Clear(self):
+		self.SendScintilla(win32con.WM_CLEAR)
+	def Clear(self):
+		self.SendScintilla(win32con.WM_CLEAR)
+	def FindText(self, flags, range, findText):
+		buff = array.array('c', findText + "\0")
+		addressBuffer = buff.buffer_info()[0]
+		ft = struct.pack('llLll', range[0], range[1], addressBuffer, 0, 0)
+		ftBuff = array.array('c', ft)
+		addressFtBuff = ftBuff.buffer_info()[0]
+		rc = self.SendScintilla(EM_FINDTEXTEX, flags, addressFtBuff)
+		ftUnpacked = struct.unpack('llLll', ftBuff.tostring())
+		return rc, (ftUnpacked[3], ftUnpacked[4])
+
+	def GetSel(self):
+		currentPos = self.SendScintilla(SCI_GETCURRENTPOS)
+		anchorPos = self.SendScintilla(SCI_GETANCHOR)
+		if currentPos < anchorPos:
+			return (currentPos, anchorPos)
+		else:
+			return (anchorPos, currentPos)
+		return currentPos;
+
+	def GetSelText(self):
+		start, end = self.GetSel()
+		txtBuf = array.array('c', " " * ((end-start)+1))
+		addressTxtBuf = txtBuf.buffer_info()[0]
+		self.SendScintilla(EM_GETSELTEXT, 0, addressTxtBuf)
+		return txtBuf.tostring()[:-1]
+
+	def SetSel(self, start=0, end=None):
+		if type(start)==type(()):
+			assert end is None, "If you pass a point in the first param, the second must be None"
+			start, end = start
+		elif end is None: 
+			end = start
+		if start < 0: start = self.GetTextLength()
+		if end < 0: end = self.GetTextLength()
+		assert start <= self.GetTextLength(), "The start postion is invalid (%d/%d)" % (start, self.GetTextLength())
+		assert end <= self.GetTextLength(), "The end postion is invalid (%d/%d)" % (end, self.GetTextLength())
+		cr = struct.pack('ll', start, end)
+		crBuff = array.array('c', cr)
+		addressCrBuff = crBuff.buffer_info()[0]
+		rc = self.SendScintilla(EM_EXSETSEL, 0, addressCrBuff)
+
+	def GetLineCount(self):
+		return self.SendScintilla(win32con.EM_GETLINECOUNT)
+
+	def LineFromChar(self, charPos=-1):
+		if charPos==-1: charPos = self.GetSel()[0]
+		assert charPos >= 0 and charPos <= self.GetTextLength(), "The charPos postion (%s) is invalid (max=%s)" % (charPos, self.GetTextLength())
+		#return self.SendScintilla(EM_EXLINEFROMCHAR, charPos)
+		# EM_EXLINEFROMCHAR puts charPos in lParam, not wParam
+		return self.SendScintilla(EM_EXLINEFROMCHAR, 0, charPos)
+		
+	def LineIndex(self, line):
+		return self.SendScintilla(win32con.EM_LINEINDEX, line)
+
+	def ScrollCaret(self):
+		return self.SendScintilla(win32con.EM_SCROLLCARET)
+
+	def GetCurLineNumber(self):
+		return self.LineFromChar(self.SCIGetCurrentPos())
+		
+	def GetTextLength(self):
+		return self.SendScintilla(win32con.WM_GETTEXTLENGTH)
+
+	def GetTextRange(self, start = 0, end = -1):
+		if end == -1: end = self.SendScintilla(win32con.WM_GETTEXTLENGTH)
+		assert end>=start, "Negative index requested (%d/%d)" % (start, end)
+		assert start >= 0 and start <= self.GetTextLength(), "The start postion is invalid"
+		assert end >= 0 and end <= self.GetTextLength(), "The end postion is invalid"
+		initer = "=" * (end - start + 1)
+		buff = array.array('c', initer)
+		addressBuffer = buff.buffer_info()[0]
+		tr = struct.pack('llL', start, end, addressBuffer)
+		trBuff = array.array('c', tr)
+		addressTrBuff = trBuff.buffer_info()[0]
+		numChars = self.SendScintilla(EM_GETTEXTRANGE, 0, addressTrBuff)
+		return buff.tostring()[:numChars]
+		
+	def ReplaceSel(self, str):
+		buff = array.array('c', str + "\0")
+		self.SendScintilla(win32con.EM_REPLACESEL, 0, buff.buffer_info()[0]);
+		buff = None
+	
+	def GetLine(self, line=-1):
+		if line == -1: line = self.GetCurLineNumber()
+		start = self.LineIndex(line)
+		end = self.LineIndex(line+1)
+		return self.GetTextRange(start, end)
+
+	def SetReadOnly(self, flag = 1):
+		return self.SendScintilla(win32con.EM_SETREADONLY, flag)
+		
+	def LineScroll(self, lines, cols=0):
+		return self.SendScintilla(win32con.EM_LINESCROLL, cols, lines)
+
+	def GetFirstVisibleLine(self):
+		return self.SendScintilla(win32con.EM_GETFIRSTVISIBLELINE)
+
+	def SetWordWrap(self, mode):
+		if mode <> win32ui.CRichEditView_WrapNone:
+			raise ValueError, "We dont support word-wrap (I dont think :-)"
+
+class CScintillaColorEditInterface(CScintillaEditInterface):
+	################################
+	# Plug-in colorizer support
+	def _GetColorizer(self):
+		if not hasattr(self, "colorizer"):
+			self.colorizer = self._MakeColorizer()
+		return self.colorizer
+	def _MakeColorizer(self):
+		# Give parent a chance to hook.
+		parent_func = getattr(self.GetParentFrame(), "_MakeColorizer", None)
+		if parent_func is not None:
+			return parent_func()
+		import formatter
+##		return formatter.PythonSourceFormatter(self)
+		return formatter.BuiltinPythonSourceFormatter(self)
+
+	def Colorize(self, start=0, end=-1):
+		c = self._GetColorizer()
+		if c is not None: c.Colorize(start, end)
+
+	def ApplyFormattingStyles(self, bReload=1):
+		c = self._GetColorizer()
+		if c is not None: c.ApplyFormattingStyles(bReload)
+
+	# The Parent window will normally hook
+	def HookFormatter(self, parent = None):
+		c = self._GetColorizer()
+		if c is not None: # No need if we have no color!
+			c.HookFormatter(parent)
+
+class CScintillaEdit(window.Wnd, CScintillaColorEditInterface):
+	def __init__(self, wnd=None):
+		if wnd is None:
+			wnd = win32ui.CreateWnd()
+		window.Wnd.__init__(self, wnd)
+	def SendScintilla(self, msg, w=0, l=0):
+		return self.SendMessage(msg, w, l)
+	def CreateWindow(self, style, rect, parent, id):
+		self._obj_.CreateWindow(
+				"Scintilla",
+				"Scintilla",
+				style,
+				rect,
+				parent,
+				id,
+				None)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/document.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/document.py
new file mode 100644
index 0000000..10d32f1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/document.py
@@ -0,0 +1,163 @@
+import win32ui
+from pywin.mfc import docview
+from pywin import is_platform_unicode, default_platform_encoding, default_scintilla_encoding
+from scintillacon import *
+import win32con
+import string
+import array
+
+ParentScintillaDocument=docview.Document
+class CScintillaDocument(ParentScintillaDocument):
+	"A SyntEdit document. "
+	def DeleteContents(self):
+		pass
+
+	def OnOpenDocument(self, filename):
+		# init data members
+		#print "Opening", filename
+		self.SetPathName(filename) # Must set this early!
+		try:
+			if is_platform_unicode:
+				# Scintilla in UTF-8 mode - translate accordingly.
+				import codecs
+				f = codecs.open(filename, 'rb', default_platform_encoding)
+			else:
+				f = open(filename, 'rb')
+			try:
+				text = f.read()
+			finally:
+				f.close()
+			if is_platform_unicode:
+				# Translate from locale-specific (MCBS) encoding to UTF-8 for Scintilla
+				text = text.encode(default_scintilla_encoding)
+		except IOError:
+			win32ui.MessageBox("Could not load the file from %s" % filename)
+			return 0
+
+		self._SetLoadedText(text)
+##		if self.GetFirstView():
+##			self.GetFirstView()._SetLoadedText(text)
+##		self.SetModifiedFlag(0) # No longer dirty
+		return 1
+
+	def SaveFile(self, fileName):
+		view = self.GetFirstView()
+		ok = view.SaveTextFile(fileName)
+		if ok:
+			view.SCISetSavePoint()
+		return ok
+
+	def ApplyFormattingStyles(self):
+		self._ApplyOptionalToViews("ApplyFormattingStyles")
+
+	# #####################
+	# File related functions
+	# Helper to transfer text from the MFC document to the control.
+	def _SetLoadedText(self, text):
+		view = self.GetFirstView()
+		if view.IsWindow():
+			# Turn off undo collection while loading 
+			view.SendScintilla(SCI_SETUNDOCOLLECTION, 0, 0)
+			# Make sure the control isnt read-only
+			view.SetReadOnly(0)
+
+			doc = self
+			view.SendScintilla(SCI_CLEARALL)
+			view.SendMessage(SCI_ADDTEXT, buffer(text))
+			view.SendScintilla(SCI_SETUNDOCOLLECTION, 1, 0)
+			view.SendScintilla(win32con.EM_EMPTYUNDOBUFFER, 0, 0)
+
+	def FinalizeViewCreation(self, view):
+		pass
+
+	def HookViewNotifications(self, view):
+		parent = view.GetParentFrame()
+		parent.HookNotify(ViewNotifyDelegate(self, "OnBraceMatch"), SCN_CHECKBRACE)
+		parent.HookNotify(ViewNotifyDelegate(self, "OnMarginClick"), SCN_MARGINCLICK)
+		parent.HookNotify(ViewNotifyDelegate(self, "OnNeedShown"), SCN_NEEDSHOWN)
+
+		parent.HookNotify(DocumentNotifyDelegate(self, "OnSavePointReached"), SCN_SAVEPOINTREACHED)
+		parent.HookNotify(DocumentNotifyDelegate(self, "OnSavePointLeft"), SCN_SAVEPOINTLEFT)
+		parent.HookNotify(DocumentNotifyDelegate(self, "OnModifyAttemptRO"), SCN_MODIFYATTEMPTRO)
+		# Tell scintilla what characters should abort auto-complete.
+		view.SCIAutoCStops(string.whitespace+"()[]:;+-/*=\\?'!#@$%^&,<>\"'|" )
+
+		if view != self.GetFirstView():
+			view.SCISetDocPointer(self.GetFirstView().SCIGetDocPointer())
+
+
+	def OnSavePointReached(self, std, extra):
+		self.SetModifiedFlag(0)
+
+	def OnSavePointLeft(self, std, extra):
+		self.SetModifiedFlag(1)
+
+	def OnModifyAttemptRO(self, std, extra):
+		self.MakeDocumentWritable()
+
+	# All Marker functions are 1 based.
+	def MarkerAdd( self, lineNo, marker ):
+		self.GetEditorView().SCIMarkerAdd(lineNo-1, marker)
+
+	def MarkerCheck(self, lineNo, marker ):
+		v = self.GetEditorView()
+		lineNo = lineNo - 1 # Make 0 based
+		markerState = v.SCIMarkerGet(lineNo)
+		return markerState & (1<<marker) != 0
+
+	def MarkerToggle( self, lineNo, marker ):
+		v = self.GetEditorView()
+		if self.MarkerCheck(lineNo, marker):
+			v.SCIMarkerDelete(lineNo-1, marker)
+		else:
+			v.SCIMarkerAdd(lineNo-1, marker)
+	def MarkerDelete( self, lineNo, marker ):
+		self.GetEditorView().SCIMarkerDelete(lineNo-1, marker)
+	def MarkerDeleteAll( self, marker ):
+		self.GetEditorView().SCIMarkerDeleteAll(marker)
+	def MarkerGetNext(self, lineNo, marker):
+		return self.GetEditorView().SCIMarkerNext( lineNo-1, 1 << marker )+1
+	def MarkerAtLine(self, lineNo, marker):
+		markerState = self.GetEditorView().SCIMarkerGet(lineNo-1)
+		return markerState & (1<<marker)
+
+	# Helper for reflecting functions to views.
+	def _ApplyToViews(self, funcName, *args):
+		for view in self.GetAllViews():
+			func = getattr(view, funcName)
+			apply(func, args)
+	def _ApplyOptionalToViews(self, funcName, *args):
+		for view in self.GetAllViews():
+			func = getattr(view, funcName, None)
+			if func is not None:
+				apply(func, args)
+	def GetEditorView(self):
+		# Find the first frame with a view,
+		# then ask it to give the editor view
+		# as it knows which one is "active"
+		try:
+			frame_gev = self.GetFirstView().GetParentFrame().GetEditorView
+		except AttributeError:
+			return self.GetFirstView()
+		return frame_gev()
+
+# Delegate to the correct view, based on the control that sent it.
+class ViewNotifyDelegate:
+	def __init__(self, doc, name):
+		self.doc = doc
+		self.name = name
+	def __call__(self, std, extra):
+		(hwndFrom, idFrom, code) = std
+		for v in self.doc.GetAllViews():
+			if v.GetSafeHwnd() == hwndFrom:
+				return apply(getattr(v, self.name), (std, extra))
+
+# Delegate to the document, but only from a single view (as each view sends it seperately)
+class DocumentNotifyDelegate:
+	def __init__(self, doc, name):
+		self.doc = doc
+		self.delegate = getattr(doc, name)
+	def __call__(self, std, extra):
+		(hwndFrom, idFrom, code) = std
+		if hwndFrom == self.doc.GetEditorView().GetSafeHwnd():
+				apply(self.delegate, (std, extra))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/find.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/find.py
new file mode 100644
index 0000000..64eb44b8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/find.py
@@ -0,0 +1,306 @@
+# find.py - Find and Replace
+import win32con, win32api
+import win32ui
+from pywin.mfc import dialog
+import afxres
+from pywin.framework import scriptutils
+
+FOUND_NOTHING=0
+FOUND_NORMAL=1
+FOUND_LOOPED_BACK=2
+FOUND_NEXT_FILE=3
+
+class SearchParams:
+	def __init__(self, other=None):
+		if other is None:
+			self.__dict__['findText'] = ""
+			self.__dict__['replaceText'] = ""
+			self.__dict__['matchCase'] = 0
+			self.__dict__['matchWords'] = 0
+			self.__dict__['acrossFiles'] = 0
+			self.__dict__['remember'] = 1
+			self.__dict__['sel'] = (-1,-1)
+			self.__dict__['keepDialogOpen']=0
+		else:
+			self.__dict__.update(other.__dict__)
+	# Helper so we cant misspell attributes :-)
+	def __setattr__(self, attr, val):
+		if not hasattr(self, attr):
+			raise AttributeError, attr
+		self.__dict__[attr]=val
+
+curDialog = None
+lastSearch = defaultSearch = SearchParams()
+
+def ShowFindDialog():
+	_ShowDialog(FindDialog)
+
+def ShowReplaceDialog():
+	_ShowDialog(ReplaceDialog)
+
+def _ShowDialog(dlgClass):
+	global curDialog
+	if curDialog is not None:
+		if curDialog.__class__ != dlgClass:
+			curDialog.DestroyWindow()
+			curDialog = None
+		else:
+			curDialog.SetFocus()
+	if curDialog is None:
+		curDialog = dlgClass()
+		curDialog.CreateWindow()
+
+def FindNext():
+	params = SearchParams(lastSearch)
+	params.sel = (-1,-1)
+	if not params.findText:
+		ShowFindDialog()
+	else:
+		return _FindIt(None, params)
+
+def _GetControl(control=None):
+	if control is None:
+		control = scriptutils.GetActiveEditControl()
+	return control
+
+def _FindIt(control, searchParams):
+	global lastSearch, defaultSearch
+	control = _GetControl(control)
+	if control is None: return FOUND_NOTHING
+
+	# Move to the next char, so we find the next one.
+	flags = 0
+	if searchParams.matchWords: flags = flags | win32con.FR_WHOLEWORD
+	if searchParams.matchCase: flags = flags | win32con.FR_MATCHCASE
+	if searchParams.sel == (-1,-1):
+		sel = control.GetSel()
+		# If the position is the same as we found last time,
+		# then we assume it is a "FindNext"
+		if sel==lastSearch.sel:
+			sel = sel[0]+1, sel[0]+1
+	else:
+		sel = searchParams.sel
+
+	if sel[0]==sel[1]: sel=sel[0], control.GetTextLength()
+
+	rc = FOUND_NOTHING
+	# (Old edit control will fail here!)
+	posFind, foundSel = control.FindText(flags, sel, searchParams.findText)
+	lastSearch = SearchParams(searchParams)
+	if posFind >= 0:
+		rc = FOUND_NORMAL
+		lineno = control.LineFromChar(posFind)
+		control.SCIEnsureVisible(lineno)
+		control.SetSel(foundSel)
+		control.SetFocus()
+		win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE))
+	if rc == FOUND_NOTHING and lastSearch.acrossFiles:
+		# Loop around all documents.  First find this document.
+		try:
+			try:
+				doc = control.GetDocument()
+			except AttributeError:
+				try:
+					doc = control.GetParent().GetDocument()
+				except AttributeError:
+					print "Cant find a document for the control!"
+					doc = None
+			if doc is not None:
+				template = doc.GetDocTemplate()
+				alldocs = template.GetDocumentList()
+				mypos = lookpos = alldocs.index(doc)
+				while 1:
+					lookpos = (lookpos+1) % len(alldocs)
+					if lookpos == mypos:
+						break
+					view = alldocs[lookpos].GetFirstView()
+					posFind, foundSel = view.FindText(flags, (0, view.GetTextLength()), searchParams.findText)
+					if posFind >= 0:
+						nChars = foundSel[1]-foundSel[0]
+						lineNo = view.LineFromChar(posFind) # zero based.
+						lineStart = view.LineIndex(lineNo)
+						colNo = posFind - lineStart # zero based.
+						scriptutils.JumpToDocument(alldocs[lookpos].GetPathName(), lineNo+1, colNo+1, nChars)
+						rc = FOUND_NEXT_FILE
+						break
+		except win32ui.error:
+			pass
+	if rc == FOUND_NOTHING:
+		# Loop around this control - attempt to find from the start of the control.
+		posFind, foundSel = control.FindText(flags, (0, sel[0]-1), searchParams.findText)
+		if posFind >= 0:
+			control.SCIEnsureVisible(control.LineFromChar(foundSel[0]))
+			control.SetSel(foundSel)
+			control.SetFocus()
+			win32ui.SetStatusText("Not found! Searching from the top of the file.")
+			rc = FOUND_LOOPED_BACK
+		else:
+			lastSearch.sel=-1,-1
+			win32ui.SetStatusText("Can not find '%s'" % searchParams.findText )
+
+	if rc != FOUND_NOTHING:
+		lastSearch.sel = foundSel
+
+	if lastSearch.remember:
+		defaultSearch = lastSearch
+	return rc
+
+def _ReplaceIt(control):
+	control = _GetControl(control)
+	statusText = "Can not find '%s'." % lastSearch.findText
+	rc = FOUND_NOTHING
+	if control is not None and lastSearch.sel != (-1,-1):
+		control.ReplaceSel(lastSearch.replaceText)
+		rc = FindNext()
+		if rc !=FOUND_NOTHING:
+			statusText = win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE)
+	win32ui.SetStatusText(statusText)
+	return rc
+
+class FindReplaceDialog(dialog.Dialog):
+	def __init__(self):
+		dialog.Dialog.__init__(self,self._GetDialogTemplate())
+		self.HookCommand(self.OnFindNext, 109)
+
+	def OnInitDialog(self):
+		self.editFindText = self.GetDlgItem(102)
+		self.butMatchWords = self.GetDlgItem(105)
+		self.butMatchCase = self.GetDlgItem(107)
+		self.butKeepDialogOpen = self.GetDlgItem(115)
+		self.butAcrossFiles = self.GetDlgItem(116)
+		self.butRemember = self.GetDlgItem(117)
+
+		self.editFindText.SetWindowText(defaultSearch.findText)
+		control = _GetControl()
+		# If we have a selection, default to that.
+		sel = control.GetSelText()
+		if (len(sel) != 0):
+			self.editFindText.SetWindowText(sel)
+			if (defaultSearch.remember):
+				defaultSearch.findText = sel
+
+		self.editFindText.SetSel(0, -2)
+		self.editFindText.SetFocus()
+		self.butMatchWords.SetCheck(defaultSearch.matchWords)
+		self.butMatchCase.SetCheck(defaultSearch.matchCase)
+		self.butKeepDialogOpen.SetCheck(defaultSearch.keepDialogOpen)
+		self.butAcrossFiles.SetCheck(defaultSearch.acrossFiles)
+		self.butRemember.SetCheck(defaultSearch.remember)
+		return dialog.Dialog.OnInitDialog(self)
+
+	def OnDestroy(self, msg):
+		global curDialog
+		curDialog = None
+		return dialog.Dialog.OnDestroy(self, msg)
+
+	def DoFindNext(self):
+		params = SearchParams()
+		params.findText = self.editFindText.GetWindowText()
+		params.matchCase = self.butMatchCase.GetCheck()
+		params.matchWords = self.butMatchWords.GetCheck()
+		params.acrossFiles = self.butAcrossFiles.GetCheck()
+		params.remember = self.butRemember.GetCheck()
+		return _FindIt(None, params)
+	
+	def OnFindNext(self, id, code):
+		if not self.editFindText.GetWindowText():
+			win32api.MessageBeep()
+			return
+		if self.DoFindNext() != FOUND_NOTHING and not self.butKeepDialogOpen.GetCheck():
+			self.DestroyWindow()
+
+class FindDialog(FindReplaceDialog):
+	def _GetDialogTemplate(self):
+		style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
+		visible = win32con.WS_CHILD | win32con.WS_VISIBLE
+		dt = [
+			["Find", (0, 2, 240, 75), style, None, (8, "MS Sans Serif")],
+			["Static", "Fi&nd What:", 101, (5, 8, 40, 10), visible],
+			["Edit", "", 102, (50, 7, 120, 12), visible | win32con.WS_BORDER | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL],
+			["Button", "Match &whole word only", 105, (5, 23, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "Match &case", 107, (5, 33, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "Keep &dialog open", 115, (5, 43, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "Across &open files", 116, (5, 52, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "&Remember as default search", 117, (5, 61, 150, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "&Find Next", 109, (185, 5, 50, 14), visible | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP],
+			["Button", "Cancel", win32con.IDCANCEL, (185, 23, 50, 14), visible | win32con.WS_TABSTOP],
+		]
+		return dt
+
+class ReplaceDialog(FindReplaceDialog):
+	def _GetDialogTemplate(self):
+		style = win32con.DS_MODALFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT
+		visible = win32con.WS_CHILD | win32con.WS_VISIBLE
+		dt = [
+			["Replace", (0, 2, 240, 95), style, None, (8, "MS Sans Serif")],
+			["Static", "Fi&nd What:", 101, (5, 8, 40, 10), visible],
+			["Edit", "", 102, (60, 7, 110, 12), visible | win32con.WS_BORDER | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL],
+			["Static", "Re&place with:", 103, (5, 25, 50, 10), visible],
+			["Edit", "", 104, (60, 24, 110, 12), visible | win32con.WS_BORDER | win32con.WS_TABSTOP | win32con.ES_AUTOHSCROLL],
+			["Button", "Match &whole word only", 105, (5, 42, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "Match &case", 107, (5, 52, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "Keep &dialog open", 115, (5, 62, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "Across &open files", 116, (5, 72, 100, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "&Remember as default search", 117, (5, 81, 150, 10), visible | win32con.BS_AUTOCHECKBOX | win32con.WS_TABSTOP],
+			["Button", "&Find Next", 109, (185, 5, 50, 14), visible | win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP],
+			["Button", "&Replace", 110, (185, 23, 50, 14), visible | win32con.WS_TABSTOP],
+			["Button", "Replace &All", 111, (185, 41, 50, 14), visible | win32con.WS_TABSTOP],
+			["Button", "Cancel", win32con.IDCANCEL, (185, 59, 50, 14), visible | win32con.WS_TABSTOP],
+
+			
+		]
+		return dt
+
+	def OnInitDialog(self):
+		rc = FindReplaceDialog.OnInitDialog(self)
+		self.HookCommand(self.OnReplace, 110)
+		self.HookCommand(self.OnReplaceAll, 111)
+		self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE)
+		self.editReplaceText = self.GetDlgItem(104)
+		self.editReplaceText.SetWindowText(lastSearch.replaceText)
+		self.editReplaceText.SetSel(0, -2)
+		self.butReplace = self.GetDlgItem(110)
+		self.butReplaceAll = self.GetDlgItem(111)
+		self.CheckButtonStates()
+		return rc
+
+	def CheckButtonStates(self):
+		# We can do a "Replace" or "Replace All" if the current selection
+		# is the same as the search text.
+		ft = self.editFindText.GetWindowText()
+		control = _GetControl()
+#		bCanReplace = len(ft)>0 and control.GetSelText() == ft
+		bCanReplace = control is not None and lastSearch.sel == control.GetSel()
+		self.butReplace.EnableWindow(bCanReplace)
+#		self.butReplaceAll.EnableWindow(bCanReplace)
+
+	def OnActivate(self, msg):
+		wparam = msg[2]
+		fActive = win32api.LOWORD(wparam)
+		if fActive != win32con.WA_INACTIVE:
+			self.CheckButtonStates()
+		
+	def OnFindNext(self, id, code):
+		self.DoFindNext()
+		self.CheckButtonStates()
+
+	def OnReplace(self, id, code):
+		lastSearch.replaceText = self.editReplaceText.GetWindowText()
+		_ReplaceIt(None)
+
+	def OnReplaceAll(self, id, code):
+		control = _GetControl(None)
+		if control is not None:
+			control.SetSel(0)
+			num = 0
+			if self.DoFindNext() == FOUND_NORMAL:
+				lastSearch.replaceText = self.editReplaceText.GetWindowText()
+				while _ReplaceIt(control) == FOUND_NORMAL:
+					num = num + 1
+
+			win32ui.SetStatusText("Replaced %d occurrences" % num)
+			if num > 0 and not self.butKeepDialogOpen.GetCheck():
+				self.DestroyWindow()
+
+if __name__=='__main__':
+	ShowFindDialog()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/formatter.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/formatter.py
new file mode 100644
index 0000000..e376795
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/formatter.py
@@ -0,0 +1,579 @@
+# Does Python source formatting for Scintilla controls.
+import win32ui
+import win32api
+import win32con
+import winerror
+import string
+import array
+import scintillacon
+
+WM_KICKIDLE = 0x036A
+
+debugging = 0
+if debugging:
+	# Output must go to another process else the result of
+	# the printing itself will trigger again trigger a trace.
+	import sys, win32traceutil, win32trace 
+	def trace(*args):
+		win32trace.write(string.join(map(str, args), " ") + "\n")
+else:
+	trace = lambda *args: None
+
+class Style:
+	"""Represents a single format
+	"""
+	def __init__(self, name, format, background = None):
+		self.name = name # Name the format representes eg, "String", "Class"
+		self.background = background
+		if type(format)==type(''):
+			self.aliased = format
+			self.format = None
+		else:
+			self.format = format
+			self.aliased = None
+		self.stylenum = None # Not yet registered.
+	def IsBasedOnDefault(self):
+		return len(self.format)==5
+	# If the currently extended font defintion matches the
+	# default format, restore the format to the "simple" format.
+	def NormalizeAgainstDefault(self, defaultFormat):
+		if self.IsBasedOnDefault():
+			return 0 # No more to do, and not changed.
+		bIsDefault = self.format[7] == defaultFormat[7] and \
+		             self.format[2] == defaultFormat[2]
+		if bIsDefault:
+			self.ForceAgainstDefault()
+		return bIsDefault
+	def ForceAgainstDefault(self):
+		self.format = self.format[:5]
+	def GetCompleteFormat(self, defaultFormat):
+		# Get the complete style after applying any relevant defaults.
+		if len(self.format)==5: # It is a default one
+			fmt = self.format + defaultFormat[5:]
+		else:
+			fmt = self.format
+		flags = win32con.CFM_BOLD | win32con.CFM_CHARSET | win32con.CFM_COLOR | win32con.CFM_FACE | win32con.CFM_ITALIC | win32con.CFM_SIZE
+		return (flags,) + fmt[1:]
+
+# The Formatter interface
+# used primarily when the actual formatting is done by Scintilla!
+class FormatterBase:
+	def __init__(self, scintilla):
+		self.scintilla = scintilla
+		self.baseFormatFixed = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New')
+		self.baseFormatProp = (-402653169, 0, 200, 0, 0, 0, 49, 'Arial')
+		self.bUseFixed = 1
+		self.styles = {} # Indexed by name
+		self.styles_by_id = {} # Indexed by allocated ID.
+		# Default Background
+		self.default_background = None
+		self._LoadBackground()
+
+		self.SetStyles()
+	
+
+	def _LoadBackground( self ):
+		#load default background
+		bg = int( self.LoadPreference( "Default Background", -1 ) )
+		if bg != -1:
+			self.default_background = bg
+		if self.default_background is None:
+			self.default_background = win32api.RGB( 0xff, 0xff, 0xff )
+
+	def GetDefaultBackground( self ):
+		return self.default_background
+
+	def HookFormatter(self, parent = None):
+		raise NotImplementedError
+
+	# Used by the IDLE extensions to quickly determine if a character is a string.
+	def GetStringStyle(self, pos):
+		try:
+			style = self.styles_by_id[self.scintilla.SCIGetStyleAt(pos)]
+		except KeyError:
+			# A style we dont know about - probably not even a .py file - can't be a string
+			return None			
+		if style.name in self.string_style_names:
+			return style
+		return None
+
+	def RegisterStyle(self, style, stylenum):
+		assert stylenum is not None, "We must have a style number"
+		assert style.stylenum is None, "Style has already been registered"
+		assert not self.styles.has_key(stylenum), "We are reusing a style number!"
+		style.stylenum = stylenum
+		self.styles[style.name] = style
+		self.styles_by_id[stylenum] = style
+
+	def SetStyles(self):
+		raise NotImplementedError
+
+	def GetSampleText(self):
+		return "Sample Text for the Format Dialog"
+
+	def GetDefaultFormat(self):
+			if self.bUseFixed:
+				return self.baseFormatFixed
+			return self.baseFormatProp
+
+	# Update the control with the new style format.
+	def _ReformatStyle(self, style):
+		assert style.stylenum is not None, "Unregistered style."
+		#print "Reformat style", style.name, style.stylenum
+		scintilla=self.scintilla
+		stylenum = style.stylenum
+		# Now we have the style number, indirect for the actual style.
+		if style.aliased is not None:
+			style = self.styles[style.aliased]
+		f=style.format
+		if style.IsBasedOnDefault():
+			baseFormat = self.GetDefaultFormat()
+		else: baseFormat = f
+		scintilla.SCIStyleSetFore(stylenum, f[4])
+		scintilla.SCIStyleSetFont(stylenum, baseFormat[7], baseFormat[5])
+		if f[1] & 1: scintilla.SCIStyleSetBold(stylenum, 1)
+		else: scintilla.SCIStyleSetBold(stylenum, 0)
+		if f[1] & 2: scintilla.SCIStyleSetItalic(stylenum, 1)
+		else: scintilla.SCIStyleSetItalic(stylenum, 0)
+		scintilla.SCIStyleSetSize(stylenum, int(baseFormat[2]/20))
+		if style.background is not None:
+			scintilla.SCIStyleSetBack(stylenum, style.background)
+		else:
+			scintilla.SCIStyleSetBack(stylenum, self.GetDefaultBackground() )
+		scintilla.SCIStyleSetEOLFilled(stylenum, 1) # Only needed for unclosed strings.
+
+	def GetStyleByNum(self, stylenum):
+		return self.styles_by_id[stylenum]
+
+	def ApplyFormattingStyles(self, bReload=1):
+		if bReload:
+			self.LoadPreferences()
+		baseFormat = self.GetDefaultFormat()
+		defaultStyle = Style("default", baseFormat)
+		defaultStyle.stylenum = scintillacon.STYLE_DEFAULT
+		self._ReformatStyle(defaultStyle)
+		for style in self.styles.values():
+			if style.aliased is None:
+				style.NormalizeAgainstDefault(baseFormat)
+			self._ReformatStyle(style)
+		self.scintilla.InvalidateRect()
+
+	# Some functions for loading and saving preferences.  By default
+	# an INI file (well, MFC maps this to the registry) is used.
+	def LoadPreferences(self):
+		self.baseFormatFixed = eval(self.LoadPreference("Base Format Fixed", str(self.baseFormatFixed)))
+		self.baseFormatProp = eval(self.LoadPreference("Base Format Proportional", str(self.baseFormatProp)))
+		self.bUseFixed = int(self.LoadPreference("Use Fixed", 1))
+
+		for style in self.styles.values():
+			new = self.LoadPreference(style.name, str(style.format))
+			try:
+				style.format = eval(new)
+				bg = int(self.LoadPreference(style.name + " background", -1))
+				if bg != -1:
+					style.background = bg
+				if style.background == self.default_background:
+					style.background = None
+					
+			except:
+				print "Error loading style data for", style.name
+
+	def LoadPreference(self, name, default):
+		return win32ui.GetProfileVal("Format", name, default)
+
+	def SavePreferences(self):
+		self.SavePreference("Base Format Fixed", str(self.baseFormatFixed))
+		self.SavePreference("Base Format Proportional", str(self.baseFormatProp))
+		self.SavePreference("Use Fixed", self.bUseFixed)
+		for style in self.styles.values():
+			if style.aliased is None:
+				self.SavePreference(style.name, str(style.format))
+				if style.background is not None:
+					bg_name = style.name + " background"
+					self.SavePreference(bg_name, style.background) # May be None
+					
+	def SavePreference(self, name, value):
+		if value is None:
+			hkey = win32ui.GetAppRegistryKey()
+			try:
+				subkey = win32api.RegOpenKey(hkey, "Format", 0, win32con.KEY_SET_VALUE)
+			except win32api.error, (rc, fn, msg):
+				if rc != winerror.ERROR_FILE_NOT_FOUND:
+					raise
+			subkey.Close()
+		else:
+			win32ui.WriteProfileVal("Format", name, value)
+
+# An abstract formatter
+# For all formatters we actually implement here.
+# (as opposed to those formatters built in to Scintilla)
+class Formatter(FormatterBase):
+	def __init__(self, scintilla):
+		self.bCompleteWhileIdle = 0
+		self.bHaveIdleHandler = 0 # Dont currently have an idle handle
+		self.nextstylenum = 0
+		FormatterBase.__init__(self, scintilla)
+
+	def HookFormatter(self, parent = None):
+		if parent is None: parent = self.scintilla.GetParent() # was GetParentFrame()!?
+		parent.HookNotify(self.OnStyleNeeded, scintillacon.SCN_STYLENEEDED)
+
+	def OnStyleNeeded(self, std, extra):
+		notify = self.scintilla.SCIUnpackNotifyMessage(extra)
+		endStyledChar = self.scintilla.SendScintilla(scintillacon.SCI_GETENDSTYLED)
+		lineEndStyled = self.scintilla.LineFromChar(endStyledChar)
+		endStyled = self.scintilla.LineIndex(lineEndStyled)
+		#print "enPosPaint %d endStyledChar %d lineEndStyled %d endStyled %d" % (endPosPaint, endStyledChar, lineEndStyled, endStyled)
+		self.Colorize(endStyled, notify.position)
+
+	def ColorSeg(self, start, end, styleName):
+		end = end+1
+#		assert end-start>=0, "Can't have negative styling"
+		stylenum = self.styles[styleName].stylenum
+		while start<end:
+			self.style_buffer[start]=chr(stylenum)
+			start = start+1
+		#self.scintilla.SCISetStyling(end - start + 1, stylenum)
+
+	def RegisterStyle(self, style, stylenum = None):
+		if stylenum is None:
+			stylenum = self.nextstylenum
+			self.nextstylenum = self.nextstylenum + 1
+		FormatterBase.RegisterStyle(self, style, stylenum)
+
+	def ColorizeString(self, str, charStart, styleStart):
+		raise RuntimeError, "You must override this method"
+
+	def Colorize(self, start=0, end=-1):
+		scintilla = self.scintilla
+		stringVal = scintilla.GetTextRange(start, end)
+		if start > 0:
+			stylenum = scintilla.SCIGetStyleAt(start - 1)
+			styleStart = self.GetStyleByNum(stylenum).name
+		else:
+			styleStart = None
+#		trace("Coloring", start, end, end-start, len(stringVal), styleStart, self.scintilla.SCIGetCharAt(start))
+		scintilla.SCIStartStyling(start, 31)
+		self.style_buffer = array.array("c", chr(0)*len(stringVal))
+		self.ColorizeString(stringVal, styleStart)
+		scintilla.SCISetStylingEx(self.style_buffer)
+		self.style_buffer = None
+#		trace("After styling, end styled is", self.scintilla.SCIGetEndStyled())
+		if self.bCompleteWhileIdle and not self.bHaveIdleHandler and end!=-1 and end < scintilla.GetTextLength():
+			self.bHaveIdleHandler = 1
+			win32ui.GetApp().AddIdleHandler(self.DoMoreColoring)
+			# Kicking idle makes the app seem slower when initially repainting!
+#			win32ui.GetMainFrame().PostMessage(WM_KICKIDLE, 0, 0)
+
+	def DoMoreColoring(self, handler, count):
+		try:
+			scintilla = self.scintilla
+			endStyled = scintilla.SCIGetEndStyled()
+			lineStartStyled = scintilla.LineFromChar(endStyled)
+			start = scintilla.LineIndex(lineStartStyled)
+			end = scintilla.LineIndex(lineStartStyled+1)
+			textlen = scintilla.GetTextLength()
+			if end < 0: end = textlen
+
+			finished = end >= textlen
+			self.Colorize(start, end)
+		except (win32ui.error, AttributeError):
+			# Window may have closed before we finished - no big deal!
+			finished = 1
+
+		if finished:
+			self.bHaveIdleHandler = 0
+			win32ui.GetApp().DeleteIdleHandler(handler)
+		return not finished
+
+# A Formatter that knows how to format Python source
+from keyword import iskeyword, kwlist
+
+wordstarts = '_0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
+wordchars = '._0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
+operators = '%^&*()-+=|{}[]:;<>,/?!.~'
+
+STYLE_DEFAULT = "Whitespace"
+STYLE_COMMENT = "Comment"
+STYLE_COMMENT_BLOCK = "Comment Blocks"
+STYLE_NUMBER = "Number"
+STYLE_STRING = "String"
+STYLE_SQSTRING = "SQ String"
+STYLE_TQSSTRING = "TQS String"
+STYLE_TQDSTRING = "TQD String"
+STYLE_KEYWORD = "Keyword"
+STYLE_CLASS = "Class"
+STYLE_METHOD = "Method"
+STYLE_OPERATOR = "Operator"
+STYLE_IDENTIFIER = "Identifier"
+STYLE_BRACE = "Brace/Paren - matching"
+STYLE_BRACEBAD = "Brace/Paren - unmatched"
+STYLE_STRINGEOL = "String with no terminator"
+
+STRING_STYLES = [STYLE_STRING, STYLE_SQSTRING, STYLE_TQSSTRING, STYLE_TQDSTRING, STYLE_STRINGEOL]
+
+# These styles can have any ID - they are not special to scintilla itself.
+# However, if we use the built-in lexer, then we must use its style numbers
+# so in that case, they _are_ special.
+PYTHON_STYLES = [
+		(STYLE_DEFAULT,      (0, 0, 200, 0, 0x808080), None,     scintillacon.SCE_P_DEFAULT ),
+		(STYLE_COMMENT,      (0, 2, 200, 0, 0x008000), None,     scintillacon.SCE_P_COMMENTLINE ),
+		(STYLE_COMMENT_BLOCK,(0, 2, 200, 0, 0x808080), None,     scintillacon.SCE_P_COMMENTBLOCK ),
+		(STYLE_NUMBER,       (0, 0, 200, 0, 0x808000), None,     scintillacon.SCE_P_NUMBER ),
+		(STYLE_STRING,       (0, 0, 200, 0, 0x008080), None,     scintillacon.SCE_P_STRING ),
+		(STYLE_SQSTRING,     STYLE_STRING,             None,     scintillacon.SCE_P_CHARACTER ),
+		(STYLE_TQSSTRING,    STYLE_STRING,             None,     scintillacon.SCE_P_TRIPLE ),
+		(STYLE_TQDSTRING,    STYLE_STRING,             None,     scintillacon.SCE_P_TRIPLEDOUBLE),
+		(STYLE_STRINGEOL,    (0, 0, 200, 0, 0x000000), 0x008080, scintillacon.SCE_P_STRINGEOL),
+		(STYLE_KEYWORD,      (0, 1, 200, 0, 0x800000), None,     scintillacon.SCE_P_WORD),
+		(STYLE_CLASS,        (0, 1, 200, 0, 0xFF0000), None,     scintillacon.SCE_P_CLASSNAME ),
+		(STYLE_METHOD,       (0, 1, 200, 0, 0x808000), None,     scintillacon.SCE_P_DEFNAME),
+		(STYLE_OPERATOR,     (0, 0, 200, 0, 0x000000), None,     scintillacon.SCE_P_OPERATOR),
+		(STYLE_IDENTIFIER,   (0, 0, 200, 0, 0x000000), None,     scintillacon.SCE_P_IDENTIFIER ),
+]
+
+# These styles _always_ have this specific style number, regardless of
+# internal or external formatter.
+SPECIAL_STYLES = [
+		(STYLE_BRACE,        (0, 0, 200, 0, 0x000000), 0xffff80, scintillacon.STYLE_BRACELIGHT),
+		(STYLE_BRACEBAD,     (0, 0, 200, 0, 0x000000), 0x8ea5f2, scintillacon.STYLE_BRACEBAD),
+]
+
+PythonSampleCode = """\
+# Some Python
+class Sample(Super):
+  def Fn(self):
+\tself.v = 1024
+dest = 'dest.html'
+x = func(a + 1)|)
+s = "I forget...
+## A large
+## comment block"""
+
+class PythonSourceFormatter(Formatter):
+	string_style_names = STRING_STYLES
+	def GetSampleText(self):
+		return PythonSampleCode
+
+	def LoadStyles(self):
+		pass
+
+	def SetStyles(self):
+		for name, format, bg, ignore in PYTHON_STYLES:
+			self.RegisterStyle( Style(name, format, bg) )
+		for name, format, bg, sc_id in SPECIAL_STYLES:
+			self.RegisterStyle( Style(name, format, bg), sc_id )
+
+	def ClassifyWord(self, cdoc, start, end, prevWord):
+		word = cdoc[start:end+1]
+		attr = STYLE_IDENTIFIER
+		if prevWord == "class":
+			attr = STYLE_CLASS
+		elif prevWord == "def":
+			attr = STYLE_METHOD
+		elif cdoc[start] in string.digits:
+			attr = STYLE_NUMBER
+		elif iskeyword(word):
+			attr = STYLE_KEYWORD
+		self.ColorSeg(start, end, attr)
+		return word
+
+	def ColorizeString(self, str, styleStart):
+		if styleStart is None: styleStart = STYLE_DEFAULT
+		return self.ColorizePythonCode(str, 0, styleStart)
+
+	def ColorizePythonCode(self, cdoc, charStart, styleStart):
+		# Straight translation of C++, should do better
+		lengthDoc = len(cdoc)
+		if lengthDoc <= charStart: return
+		prevWord = ""
+		state = styleStart
+		chPrev = chPrev2 = chPrev3 = ' '
+		chNext = cdoc[charStart]
+		chNext2 = cdoc[charStart]
+		startSeg = i = charStart
+		while i < lengthDoc:
+			ch = chNext
+			chNext = ' '
+			if i+1 < lengthDoc: chNext = cdoc[i+1]
+			chNext2 = ' '
+			if i+2 < lengthDoc: chNext2 = cdoc[i+2]
+			if state == STYLE_DEFAULT:
+				if ch in wordstarts:
+					self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT)
+					state = STYLE_KEYWORD
+					startSeg = i
+				elif ch == '#':
+					self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT)
+					if chNext == '#':
+						state = STYLE_COMMENT_BLOCK
+					else:
+						state = STYLE_COMMENT
+					startSeg = i
+				elif ch == '\"':
+					self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT)
+					startSeg = i
+					state = STYLE_COMMENT
+					if chNext == '\"' and chNext2 == '\"':
+						i = i + 2
+						state = STYLE_TQDSTRING
+						ch = ' '
+						chPrev = ' '
+						chNext = ' '
+						if i+1 < lengthDoc: chNext = cdoc[i+1]
+					else:
+						state = STYLE_STRING
+				elif ch == '\'':
+					self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT)
+					startSeg = i
+					state = STYLE_COMMENT
+					if chNext == '\'' and chNext2 == '\'':
+						i = i + 2
+						state = STYLE_TQSSTRING
+						ch = ' '
+						chPrev = ' '
+						chNext = ' '
+						if i+1 < lengthDoc: chNext = cdoc[i+1]
+					else:
+						state = STYLE_SQSTRING
+				elif ch in operators:
+					self.ColorSeg(startSeg, i - 1, STYLE_DEFAULT)
+					self.ColorSeg(i, i, STYLE_OPERATOR)
+					startSeg = i+1
+			elif state == STYLE_KEYWORD:
+				if ch not in wordchars:
+					prevWord = self.ClassifyWord(cdoc, startSeg, i-1, prevWord)
+					state = STYLE_DEFAULT
+					startSeg = i
+					if ch == '#':
+						if chNext == '#':
+							state = STYLE_COMMENT_BLOCK
+						else:
+							state = STYLE_COMMENT
+					elif ch == '\"':
+						if chNext == '\"' and chNext2 == '\"':
+							i = i + 2
+							state = STYLE_TQDSTRING
+							ch = ' '
+							chPrev = ' '
+							chNext = ' '
+							if i+1 < lengthDoc: chNext = cdoc[i+1]
+						else:
+							state = STYLE_STRING
+					elif ch == '\'':
+						if chNext == '\'' and chNext2 == '\'':
+							i = i + 2
+							state = STYLE_TQSSTRING
+							ch = ' '
+							chPrev = ' '
+							chNext = ' '
+							if i+1 < lengthDoc: chNext = cdoc[i+1]
+						else:
+							state = STYLE_SQSTRING
+					elif ch in operators:
+						self.ColorSeg(startSeg, i, STYLE_OPERATOR)
+						startSeg = i+1
+			elif state == STYLE_COMMENT or state == STYLE_COMMENT_BLOCK:
+				if ch == '\r' or ch == '\n':
+					self.ColorSeg(startSeg, i-1, state)
+					state = STYLE_DEFAULT
+					startSeg = i
+			elif state == STYLE_STRING:
+				if ch == '\\':
+					if chNext == '\"' or chNext == '\'' or chNext == '\\':
+						i = i + 1
+						ch = chNext
+						chNext = ' '
+						if i+1 < lengthDoc: chNext = cdoc[i+1]
+				elif ch == '\"':
+					self.ColorSeg(startSeg, i, STYLE_STRING)
+					state = STYLE_DEFAULT
+					startSeg = i+1
+			elif state == STYLE_SQSTRING:
+				if ch == '\\':
+					if chNext == '\"' or chNext == '\'' or chNext == '\\':
+						i = i+1
+						ch = chNext
+						chNext = ' '
+						if i+1 < lengthDoc: chNext = cdoc[i+1]
+				elif ch == '\'':
+					self.ColorSeg(startSeg, i, STYLE_SQSTRING)
+					state = STYLE_DEFAULT
+					startSeg = i+1
+			elif state == STYLE_TQSSTRING:
+				if ch == '\'' and chPrev == '\'' and chPrev2 == '\'' and chPrev3 != '\\':
+					self.ColorSeg(startSeg, i, STYLE_TQSSTRING)
+					state = STYLE_DEFAULT
+					startSeg = i+1
+			elif state == STYLE_TQDSTRING and ch == '\"' and chPrev == '\"' and chPrev2 == '\"' and chPrev3 != '\\':
+					self.ColorSeg(startSeg, i, STYLE_TQDSTRING)
+					state = STYLE_DEFAULT
+					startSeg = i+1
+			chPrev3 = chPrev2
+			chPrev2 = chPrev
+			chPrev = ch
+			i = i + 1
+		if startSeg < lengthDoc:
+			if state == STYLE_KEYWORD:
+				self.ClassifyWord(cdoc, startSeg, lengthDoc-1, prevWord)
+			else:
+				self.ColorSeg(startSeg, lengthDoc-1, state)
+
+
+# These taken from the SciTE properties file.
+source_formatter_extensions = [
+	( string.split(".py .pys .pyw"), scintillacon.SCLEX_PYTHON ),
+	( string.split(".html .htm .asp .shtml"), scintillacon.SCLEX_HTML ),
+	( string.split("c .cc .cpp .cxx .h .hh .hpp .hxx .idl .odl .php3 .phtml .inc .js"),scintillacon.SCLEX_CPP ),
+	( string.split(".vbs .frm .ctl .cls"), scintillacon.SCLEX_VB ),
+	( string.split(".pl .pm .cgi .pod"), scintillacon.SCLEX_PERL ),
+	( string.split(".sql .spec .body .sps .spb .sf .sp"), scintillacon.SCLEX_SQL ),
+	( string.split(".tex .sty"), scintillacon.SCLEX_LATEX ),
+	( string.split(".xml .xul"), scintillacon.SCLEX_XML ),
+	( string.split(".err"), scintillacon.SCLEX_ERRORLIST ),
+	( string.split(".mak"), scintillacon.SCLEX_MAKEFILE ),
+	( string.split(".bat .cmd"), scintillacon.SCLEX_BATCH ),
+]
+
+class BuiltinSourceFormatter(FormatterBase):
+	# A class that represents a formatter built-in to Scintilla
+	def __init__(self, scintilla, ext):
+		self.ext = ext
+		FormatterBase.__init__(self, scintilla)
+
+	def Colorize(self, start=0, end=-1):
+		self.scintilla.SendScintilla(scintillacon.SCI_COLOURISE, start, end)
+	def RegisterStyle(self, style, stylenum = None):
+		assert style.stylenum is None, "Style has already been registered"
+		if stylenum is None:
+			stylenum = self.nextstylenum
+			self.nextstylenum = self.nextstylenum + 1
+		assert self.styles.get(stylenum) is None, "We are reusing a style number!"
+		style.stylenum = stylenum
+		self.styles[style.name] = style
+		self.styles_by_id[stylenum] = style
+
+	def HookFormatter(self, parent = None):
+		sc = self.scintilla
+		for exts, formatter in source_formatter_extensions:
+			if self.ext in exts:
+				formatter_use = formatter
+				break
+		else:
+			formatter_use = scintillacon.SCLEX_PYTHON
+		sc.SendScintilla(scintillacon.SCI_SETLEXER, formatter_use)
+		keywords = string.join(kwlist)
+		sc.SCISetKeywords(keywords)
+
+class BuiltinPythonSourceFormatter(BuiltinSourceFormatter):
+	sci_lexer_name = scintillacon.SCLEX_PYTHON
+	string_style_names = STRING_STYLES
+	def __init__(self, sc, ext = ".py"):
+		BuiltinSourceFormatter.__init__(self, sc, ext)
+	def SetStyles(self):
+		for name, format, bg, sc_id in PYTHON_STYLES:
+			self.RegisterStyle( Style(name, format, bg), sc_id )
+		for name, format, bg, sc_id in SPECIAL_STYLES:
+			self.RegisterStyle( Style(name, format, bg), sc_id )
+	def GetSampleText(self):
+		return PythonSampleCode
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/keycodes.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/keycodes.py
new file mode 100644
index 0000000..1300b52
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/keycodes.py
@@ -0,0 +1,160 @@
+import string
+import win32con
+
+char_ranges = [
+    (string.lowercase, -32),
+    (string.digits, 0),
+    ("?><:[]\\", 128),
+    (";", 127),
+    ("=", 126),
+    ("/.,", 144),
+    ("`{}|", 96),
+    ("_", 94),
+    ("-+", 144),
+    ("'", 183),
+    ('"', 188),
+    ("~", 66),
+    ("!",16),
+    ("#$%&", 18),
+    ("()", 17),
+]
+
+key_name_to_code = {}
+key_code_to_name = {}
+
+_better_names = [
+    ("esc", win32con.VK_ESCAPE),
+    ("enter", win32con.VK_RETURN),
+    ("pgup", win32con.VK_BACK),
+    ("pgdn", win32con.VK_NEXT),
+]
+def _fillmap():
+    # Pull the VK_names from win32con
+    names = filter(lambda entry: entry[:3]=="VK_", win32con.__dict__.keys())
+    for name in names:
+        n = string.lower(name[3:])
+        val = getattr(win32con, name)
+        key_name_to_code[n] = val
+        key_code_to_name[val] = n
+    # Some better named we know about
+    for name, code in _better_names:
+        key_name_to_code[name] = code
+        key_code_to_name[code] = name
+    # And the char_ranges map above
+    for chars, offset in char_ranges:
+        for char in chars:
+            key_name_to_code[char] = ord(char)+offset
+            key_code_to_name[ord(char)+offset] = char
+
+_fillmap()
+
+def get_scan_code(chardesc):
+    return key_name_to_code.get(string.lower(chardesc))
+
+modifiers = {
+    "alt" : win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED, 
+    "lalt" : win32con.LEFT_ALT_PRESSED, 
+    "ralt" : win32con.RIGHT_ALT_PRESSED,
+    "ctrl" : win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED,
+    "ctl" : win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED,
+    "control" : win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED,
+    "lctrl" : win32con.LEFT_CTRL_PRESSED,
+    "lctl" : win32con.LEFT_CTRL_PRESSED,
+    "rctrl" : win32con.RIGHT_CTRL_PRESSED,
+    "rctl" : win32con.RIGHT_CTRL_PRESSED,
+    "shift" : win32con.SHIFT_PRESSED,
+    "key" : 0, # ignore key tag.
+}
+
+def parse_key_name(name):
+    name = name + "-" # Add a sentinal
+    start = pos = 0
+    max = len(name)
+    flags = 0
+    scancode = None
+    while pos<max:
+        if name[pos] in "+-":
+            tok = string.lower(name[start:pos])
+            mod = modifiers.get(tok)
+            if mod is None:
+                # Its a key name
+                scancode = get_scan_code(tok)
+            else:
+                flags = flags | mod
+            pos = pos + 1 # skip the sep
+            start = pos
+        pos = pos + 1
+    return scancode, flags
+
+_checks = [
+    [ # Shift
+    ("Shift", win32con.SHIFT_PRESSED),
+    ],
+    [ # Ctrl key
+    ("Ctrl", win32con.LEFT_CTRL_PRESSED | win32con.RIGHT_CTRL_PRESSED),
+    ("LCtrl", win32con.LEFT_CTRL_PRESSED),
+    ("RCtrl", win32con.RIGHT_CTRL_PRESSED),
+    ],
+    [ # Alt key
+    ("Alt", win32con.LEFT_ALT_PRESSED | win32con.RIGHT_ALT_PRESSED),
+    ("LAlt", win32con.LEFT_ALT_PRESSED),
+    ("RAlt", win32con.RIGHT_ALT_PRESSED),
+    ],
+]
+
+def make_key_name(scancode, flags):
+    # Check alt keys.
+    flags_done = 0
+    parts = []
+    for moddata in _checks:
+        for name, checkflag in moddata:
+            if flags & checkflag:
+                parts.append(name)
+                flags_done = flags_done & checkflag
+                break
+    if flags_done & flags:
+        parts.append(hex( flags & ~flags_done ) )
+    # Now the key name.
+    try:
+        parts.append(key_code_to_name[scancode])
+    except KeyError:
+        parts.append( "<Unknown scan code %s>" % scancode )
+    sep = "+"
+    if sep in parts: sep = "-"
+    return string.join(map(string.capitalize, parts), sep)
+
+def _psc(char):
+    sc = get_scan_code(char)
+    print "Char %s -> %d -> %s" % (`char`, sc, key_code_to_name.get(sc))
+
+def test1():
+    for ch in """aA0/?[{}];:'"`~_-+=\\|,<.>/?""":
+        _psc(ch)
+    for code in ["Home", "End", "Left", "Right", "Up", "Down", "Menu", "Next"]:
+        _psc(code)
+
+def _pkn(n):
+    scancode, flags = parse_key_name(n)
+    print "%s -> %s,%s -> %s" % (n, scancode, flags, make_key_name(scancode, flags))
+
+def test2():
+    _pkn("ctrl+alt-shift+x")
+    _pkn("ctrl-home")
+    _pkn("Shift-+")
+    _pkn("Shift--")
+    _pkn("Shift+-")
+    _pkn("Shift++")
+    _pkn("LShift-+")
+    _pkn("ctl+home")
+    _pkn("ctl+enter")
+    _pkn("alt+return")
+    _pkn("Alt+/")
+    _pkn("Alt+BadKeyName")
+    _pkn("A")
+    _pkn("(")
+    _pkn("Ctrl+(")
+    _pkn("{")
+    _pkn("!")
+
+if __name__=='__main__':
+    test2()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/scintillacon.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/scintillacon.py
new file mode 100644
index 0000000..042563938
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/scintillacon.py
@@ -0,0 +1,1229 @@
+# Generated by h2py from Include\scintilla.h
+
+# Included from BaseTsd.h
+def HandleToUlong(h): return HandleToULong(h)
+
+def UlongToHandle(ul): return ULongToHandle(ul)
+
+def UlongToPtr(ul): return ULongToPtr(ul)
+
+def UintToPtr(ui): return UIntToPtr(ui)
+
+INVALID_POSITION = -1
+SCI_START = 2000
+SCI_OPTIONAL_START = 3000
+SCI_LEXER_START = 4000
+SCI_ADDTEXT = 2001
+SCI_ADDSTYLEDTEXT = 2002
+SCI_INSERTTEXT = 2003
+SCI_CLEARALL = 2004
+SCI_CLEARDOCUMENTSTYLE = 2005
+SCI_GETLENGTH = 2006
+SCI_GETCHARAT = 2007
+SCI_GETCURRENTPOS = 2008
+SCI_GETANCHOR = 2009
+SCI_GETSTYLEAT = 2010
+SCI_REDO = 2011
+SCI_SETUNDOCOLLECTION = 2012
+SCI_SELECTALL = 2013
+SCI_SETSAVEPOINT = 2014
+SCI_GETSTYLEDTEXT = 2015
+SCI_CANREDO = 2016
+SCI_MARKERLINEFROMHANDLE = 2017
+SCI_MARKERDELETEHANDLE = 2018
+SCI_GETUNDOCOLLECTION = 2019
+SCWS_INVISIBLE = 0
+SCWS_VISIBLEALWAYS = 1
+SCWS_VISIBLEAFTERINDENT = 2
+SCI_GETVIEWWS = 2020
+SCI_SETVIEWWS = 2021
+SCI_POSITIONFROMPOINT = 2022
+SCI_POSITIONFROMPOINTCLOSE = 2023
+SCI_GOTOLINE = 2024
+SCI_GOTOPOS = 2025
+SCI_SETANCHOR = 2026
+SCI_GETCURLINE = 2027
+SCI_GETENDSTYLED = 2028
+SC_EOL_CRLF = 0
+SC_EOL_CR = 1
+SC_EOL_LF = 2
+SCI_CONVERTEOLS = 2029
+SCI_GETEOLMODE = 2030
+SCI_SETEOLMODE = 2031
+SCI_STARTSTYLING = 2032
+SCI_SETSTYLING = 2033
+SCI_GETBUFFEREDDRAW = 2034
+SCI_SETBUFFEREDDRAW = 2035
+SCI_SETTABWIDTH = 2036
+SCI_GETTABWIDTH = 2121
+SC_CP_UTF8 = 65001
+SC_CP_DBCS = 1
+SCI_SETCODEPAGE = 2037
+SCI_SETUSEPALETTE = 2039
+MARKER_MAX = 31
+SC_MARK_CIRCLE = 0
+SC_MARK_ROUNDRECT = 1
+SC_MARK_ARROW = 2
+SC_MARK_SMALLRECT = 3
+SC_MARK_SHORTARROW = 4
+SC_MARK_EMPTY = 5
+SC_MARK_ARROWDOWN = 6
+SC_MARK_MINUS = 7
+SC_MARK_PLUS = 8
+SC_MARK_VLINE = 9
+SC_MARK_LCORNER = 10
+SC_MARK_TCORNER = 11
+SC_MARK_BOXPLUS = 12
+SC_MARK_BOXPLUSCONNECTED = 13
+SC_MARK_BOXMINUS = 14
+SC_MARK_BOXMINUSCONNECTED = 15
+SC_MARK_LCORNERCURVE = 16
+SC_MARK_TCORNERCURVE = 17
+SC_MARK_CIRCLEPLUS = 18
+SC_MARK_CIRCLEPLUSCONNECTED = 19
+SC_MARK_CIRCLEMINUS = 20
+SC_MARK_CIRCLEMINUSCONNECTED = 21
+SC_MARK_BACKGROUND = 22
+SC_MARK_DOTDOTDOT = 23
+SC_MARK_ARROWS = 24
+SC_MARK_PIXMAP = 25
+SC_MARK_CHARACTER = 10000
+SC_MARKNUM_FOLDEREND = 25
+SC_MARKNUM_FOLDEROPENMID = 26
+SC_MARKNUM_FOLDERMIDTAIL = 27
+SC_MARKNUM_FOLDERTAIL = 28
+SC_MARKNUM_FOLDERSUB = 29
+SC_MARKNUM_FOLDER = 30
+SC_MARKNUM_FOLDEROPEN = 31
+SC_MASK_FOLDERS = (-33554432)
+SCI_MARKERDEFINE = 2040
+SCI_MARKERSETFORE = 2041
+SCI_MARKERSETBACK = 2042
+SCI_MARKERADD = 2043
+SCI_MARKERDELETE = 2044
+SCI_MARKERDELETEALL = 2045
+SCI_MARKERGET = 2046
+SCI_MARKERNEXT = 2047
+SCI_MARKERPREVIOUS = 2048
+SCI_MARKERDEFINEPIXMAP = 2049
+SC_MARGIN_SYMBOL = 0
+SC_MARGIN_NUMBER = 1
+SCI_SETMARGINTYPEN = 2240
+SCI_GETMARGINTYPEN = 2241
+SCI_SETMARGINWIDTHN = 2242
+SCI_GETMARGINWIDTHN = 2243
+SCI_SETMARGINMASKN = 2244
+SCI_GETMARGINMASKN = 2245
+SCI_SETMARGINSENSITIVEN = 2246
+SCI_GETMARGINSENSITIVEN = 2247
+STYLE_DEFAULT = 32
+STYLE_LINENUMBER = 33
+STYLE_BRACELIGHT = 34
+STYLE_BRACEBAD = 35
+STYLE_CONTROLCHAR = 36
+STYLE_INDENTGUIDE = 37
+STYLE_LASTPREDEFINED = 39
+STYLE_MAX = 127
+SC_CHARSET_ANSI = 0
+SC_CHARSET_DEFAULT = 1
+SC_CHARSET_BALTIC = 186
+SC_CHARSET_CHINESEBIG5 = 136
+SC_CHARSET_EASTEUROPE = 238
+SC_CHARSET_GB2312 = 134
+SC_CHARSET_GREEK = 161
+SC_CHARSET_HANGUL = 129
+SC_CHARSET_MAC = 77
+SC_CHARSET_OEM = 255
+SC_CHARSET_RUSSIAN = 204
+SC_CHARSET_SHIFTJIS = 128
+SC_CHARSET_SYMBOL = 2
+SC_CHARSET_TURKISH = 162
+SC_CHARSET_JOHAB = 130
+SC_CHARSET_HEBREW = 177
+SC_CHARSET_ARABIC = 178
+SC_CHARSET_VIETNAMESE = 163
+SC_CHARSET_THAI = 222
+SCI_STYLECLEARALL = 2050
+SCI_STYLESETFORE = 2051
+SCI_STYLESETBACK = 2052
+SCI_STYLESETBOLD = 2053
+SCI_STYLESETITALIC = 2054
+SCI_STYLESETSIZE = 2055
+SCI_STYLESETFONT = 2056
+SCI_STYLESETEOLFILLED = 2057
+SCI_STYLERESETDEFAULT = 2058
+SCI_STYLESETUNDERLINE = 2059
+SC_CASE_MIXED = 0
+SC_CASE_UPPER = 1
+SC_CASE_LOWER = 2
+SCI_STYLESETCASE = 2060
+SCI_STYLESETCHARACTERSET = 2066
+SCI_STYLESETHOTSPOT = 2409
+SCI_SETSELFORE = 2067
+SCI_SETSELBACK = 2068
+SCI_SETCARETFORE = 2069
+SCI_ASSIGNCMDKEY = 2070
+SCI_CLEARCMDKEY = 2071
+SCI_CLEARALLCMDKEYS = 2072
+SCI_SETSTYLINGEX = 2073
+SCI_STYLESETVISIBLE = 2074
+SCI_GETCARETPERIOD = 2075
+SCI_SETCARETPERIOD = 2076
+SCI_SETWORDCHARS = 2077
+SCI_BEGINUNDOACTION = 2078
+SCI_ENDUNDOACTION = 2079
+INDIC_MAX = 7
+INDIC_PLAIN = 0
+INDIC_SQUIGGLE = 1
+INDIC_TT = 2
+INDIC_DIAGONAL = 3
+INDIC_STRIKE = 4
+INDIC_HIDDEN = 5
+INDIC_BOX = 6
+INDIC0_MASK = 0x20
+INDIC1_MASK = 0x40
+INDIC2_MASK = 0x80
+INDICS_MASK = 0xE0
+SCI_INDICSETSTYLE = 2080
+SCI_INDICGETSTYLE = 2081
+SCI_INDICSETFORE = 2082
+SCI_INDICGETFORE = 2083
+SCI_SETWHITESPACEFORE = 2084
+SCI_SETWHITESPACEBACK = 2085
+SCI_SETSTYLEBITS = 2090
+SCI_GETSTYLEBITS = 2091
+SCI_SETLINESTATE = 2092
+SCI_GETLINESTATE = 2093
+SCI_GETMAXLINESTATE = 2094
+SCI_GETCARETLINEVISIBLE = 2095
+SCI_SETCARETLINEVISIBLE = 2096
+SCI_GETCARETLINEBACK = 2097
+SCI_SETCARETLINEBACK = 2098
+SCI_STYLESETCHANGEABLE = 2099
+SCI_AUTOCSHOW = 2100
+SCI_AUTOCCANCEL = 2101
+SCI_AUTOCACTIVE = 2102
+SCI_AUTOCPOSSTART = 2103
+SCI_AUTOCCOMPLETE = 2104
+SCI_AUTOCSTOPS = 2105
+SCI_AUTOCSETSEPARATOR = 2106
+SCI_AUTOCGETSEPARATOR = 2107
+SCI_AUTOCSELECT = 2108
+SCI_AUTOCSETCANCELATSTART = 2110
+SCI_AUTOCGETCANCELATSTART = 2111
+SCI_AUTOCSETFILLUPS = 2112
+SCI_AUTOCSETCHOOSESINGLE = 2113
+SCI_AUTOCGETCHOOSESINGLE = 2114
+SCI_AUTOCSETIGNORECASE = 2115
+SCI_AUTOCGETIGNORECASE = 2116
+SCI_USERLISTSHOW = 2117
+SCI_AUTOCSETAUTOHIDE = 2118
+SCI_AUTOCGETAUTOHIDE = 2119
+SCI_AUTOCSETDROPRESTOFWORD = 2270
+SCI_AUTOCGETDROPRESTOFWORD = 2271
+SCI_REGISTERIMAGE = 2405
+SCI_CLEARREGISTEREDIMAGES = 2408
+SCI_AUTOCGETTYPESEPARATOR = 2285
+SCI_AUTOCSETTYPESEPARATOR = 2286
+SCI_SETINDENT = 2122
+SCI_GETINDENT = 2123
+SCI_SETUSETABS = 2124
+SCI_GETUSETABS = 2125
+SCI_SETLINEINDENTATION = 2126
+SCI_GETLINEINDENTATION = 2127
+SCI_GETLINEINDENTPOSITION = 2128
+SCI_GETCOLUMN = 2129
+SCI_SETHSCROLLBAR = 2130
+SCI_GETHSCROLLBAR = 2131
+SCI_SETINDENTATIONGUIDES = 2132
+SCI_GETINDENTATIONGUIDES = 2133
+SCI_SETHIGHLIGHTGUIDE = 2134
+SCI_GETHIGHLIGHTGUIDE = 2135
+SCI_GETLINEENDPOSITION = 2136
+SCI_GETCODEPAGE = 2137
+SCI_GETCARETFORE = 2138
+SCI_GETUSEPALETTE = 2139
+SCI_GETREADONLY = 2140
+SCI_SETCURRENTPOS = 2141
+SCI_SETSELECTIONSTART = 2142
+SCI_GETSELECTIONSTART = 2143
+SCI_SETSELECTIONEND = 2144
+SCI_GETSELECTIONEND = 2145
+SCI_SETPRINTMAGNIFICATION = 2146
+SCI_GETPRINTMAGNIFICATION = 2147
+SC_PRINT_NORMAL = 0
+SC_PRINT_INVERTLIGHT = 1
+SC_PRINT_BLACKONWHITE = 2
+SC_PRINT_COLOURONWHITE = 3
+SC_PRINT_COLOURONWHITEDEFAULTBG = 4
+SCI_SETPRINTCOLOURMODE = 2148
+SCI_GETPRINTCOLOURMODE = 2149
+SCFIND_WHOLEWORD = 2
+SCFIND_MATCHCASE = 4
+SCFIND_WORDSTART = 0x00100000
+SCFIND_REGEXP = 0x00200000
+SCFIND_POSIX = 0x00400000
+SCI_FINDTEXT = 2150
+SCI_FORMATRANGE = 2151
+SCI_GETFIRSTVISIBLELINE = 2152
+SCI_GETLINE = 2153
+SCI_GETLINECOUNT = 2154
+SCI_SETMARGINLEFT = 2155
+SCI_GETMARGINLEFT = 2156
+SCI_SETMARGINRIGHT = 2157
+SCI_GETMARGINRIGHT = 2158
+SCI_GETMODIFY = 2159
+SCI_SETSEL = 2160
+SCI_GETSELTEXT = 2161
+SCI_GETTEXTRANGE = 2162
+SCI_HIDESELECTION = 2163
+SCI_POINTXFROMPOSITION = 2164
+SCI_POINTYFROMPOSITION = 2165
+SCI_LINEFROMPOSITION = 2166
+SCI_POSITIONFROMLINE = 2167
+SCI_LINESCROLL = 2168
+SCI_SCROLLCARET = 2169
+SCI_REPLACESEL = 2170
+SCI_SETREADONLY = 2171
+SCI_NULL = 2172
+SCI_CANPASTE = 2173
+SCI_CANUNDO = 2174
+SCI_EMPTYUNDOBUFFER = 2175
+SCI_UNDO = 2176
+SCI_CUT = 2177
+SCI_COPY = 2178
+SCI_PASTE = 2179
+SCI_CLEAR = 2180
+SCI_SETTEXT = 2181
+SCI_GETTEXT = 2182
+SCI_GETTEXTLENGTH = 2183
+SCI_GETDIRECTFUNCTION = 2184
+SCI_GETDIRECTPOINTER = 2185
+SCI_SETOVERTYPE = 2186
+SCI_GETOVERTYPE = 2187
+SCI_SETCARETWIDTH = 2188
+SCI_GETCARETWIDTH = 2189
+SCI_SETTARGETSTART = 2190
+SCI_GETTARGETSTART = 2191
+SCI_SETTARGETEND = 2192
+SCI_GETTARGETEND = 2193
+SCI_REPLACETARGET = 2194
+SCI_REPLACETARGETRE = 2195
+SCI_SEARCHINTARGET = 2197
+SCI_SETSEARCHFLAGS = 2198
+SCI_GETSEARCHFLAGS = 2199
+SCI_CALLTIPSHOW = 2200
+SCI_CALLTIPCANCEL = 2201
+SCI_CALLTIPACTIVE = 2202
+SCI_CALLTIPPOSSTART = 2203
+SCI_CALLTIPSETHLT = 2204
+SCI_CALLTIPSETBACK = 2205
+SCI_CALLTIPSETFORE = 2206
+SCI_CALLTIPSETFOREHLT = 2207
+SCI_VISIBLEFROMDOCLINE = 2220
+SCI_DOCLINEFROMVISIBLE = 2221
+SC_FOLDLEVELBASE = 0x400
+SC_FOLDLEVELWHITEFLAG = 0x1000
+SC_FOLDLEVELHEADERFLAG = 0x2000
+SC_FOLDLEVELBOXHEADERFLAG = 0x4000
+SC_FOLDLEVELBOXFOOTERFLAG = 0x8000
+SC_FOLDLEVELCONTRACTED = 0x10000
+SC_FOLDLEVELUNINDENT = 0x20000
+SC_FOLDLEVELNUMBERMASK = 0x0FFF
+SCI_SETFOLDLEVEL = 2222
+SCI_GETFOLDLEVEL = 2223
+SCI_GETLASTCHILD = 2224
+SCI_GETFOLDPARENT = 2225
+SCI_SHOWLINES = 2226
+SCI_HIDELINES = 2227
+SCI_GETLINEVISIBLE = 2228
+SCI_SETFOLDEXPANDED = 2229
+SCI_GETFOLDEXPANDED = 2230
+SCI_TOGGLEFOLD = 2231
+SCI_ENSUREVISIBLE = 2232
+SC_FOLDFLAG_LINEBEFORE_EXPANDED = 0x0002
+SC_FOLDFLAG_LINEBEFORE_CONTRACTED = 0x0004
+SC_FOLDFLAG_LINEAFTER_EXPANDED = 0x0008
+SC_FOLDFLAG_LINEAFTER_CONTRACTED = 0x0010
+SC_FOLDFLAG_LEVELNUMBERS = 0x0040
+SC_FOLDFLAG_BOX = 0x0001
+SCI_SETFOLDFLAGS = 2233
+SCI_ENSUREVISIBLEENFORCEPOLICY = 2234
+SCI_SETTABINDENTS = 2260
+SCI_GETTABINDENTS = 2261
+SCI_SETBACKSPACEUNINDENTS = 2262
+SCI_GETBACKSPACEUNINDENTS = 2263
+SC_TIME_FOREVER = 10000000
+SCI_SETMOUSEDWELLTIME = 2264
+SCI_GETMOUSEDWELLTIME = 2265
+SCI_WORDSTARTPOSITION = 2266
+SCI_WORDENDPOSITION = 2267
+SC_WRAP_NONE = 0
+SC_WRAP_WORD = 1
+SCI_SETWRAPMODE = 2268
+SCI_GETWRAPMODE = 2269
+SC_CACHE_NONE = 0
+SC_CACHE_CARET = 1
+SC_CACHE_PAGE = 2
+SC_CACHE_DOCUMENT = 3
+SCI_SETLAYOUTCACHE = 2272
+SCI_GETLAYOUTCACHE = 2273
+SCI_SETSCROLLWIDTH = 2274
+SCI_GETSCROLLWIDTH = 2275
+SCI_TEXTWIDTH = 2276
+SCI_SETENDATLASTLINE = 2277
+SCI_GETENDATLASTLINE = 2278
+SCI_TEXTHEIGHT = 2279
+SCI_SETVSCROLLBAR = 2280
+SCI_GETVSCROLLBAR = 2281
+SCI_APPENDTEXT = 2282
+SCI_GETTWOPHASEDRAW = 2283
+SCI_SETTWOPHASEDRAW = 2284
+SCI_TARGETFROMSELECTION = 2287
+SCI_LINESJOIN = 2288
+SCI_LINESSPLIT = 2289
+SCI_SETFOLDMARGINCOLOUR = 2290
+SCI_SETFOLDMARGINHICOLOUR = 2291
+SCI_LINEDOWN = 2300
+SCI_LINEDOWNEXTEND = 2301
+SCI_LINEUP = 2302
+SCI_LINEUPEXTEND = 2303
+SCI_CHARLEFT = 2304
+SCI_CHARLEFTEXTEND = 2305
+SCI_CHARRIGHT = 2306
+SCI_CHARRIGHTEXTEND = 2307
+SCI_WORDLEFT = 2308
+SCI_WORDLEFTEXTEND = 2309
+SCI_WORDRIGHT = 2310
+SCI_WORDRIGHTEXTEND = 2311
+SCI_HOME = 2312
+SCI_HOMEEXTEND = 2313
+SCI_LINEEND = 2314
+SCI_LINEENDEXTEND = 2315
+SCI_DOCUMENTSTART = 2316
+SCI_DOCUMENTSTARTEXTEND = 2317
+SCI_DOCUMENTEND = 2318
+SCI_DOCUMENTENDEXTEND = 2319
+SCI_PAGEUP = 2320
+SCI_PAGEUPEXTEND = 2321
+SCI_PAGEDOWN = 2322
+SCI_PAGEDOWNEXTEND = 2323
+SCI_EDITTOGGLEOVERTYPE = 2324
+SCI_CANCEL = 2325
+SCI_DELETEBACK = 2326
+SCI_TAB = 2327
+SCI_BACKTAB = 2328
+SCI_NEWLINE = 2329
+SCI_FORMFEED = 2330
+SCI_VCHOME = 2331
+SCI_VCHOMEEXTEND = 2332
+SCI_ZOOMIN = 2333
+SCI_ZOOMOUT = 2334
+SCI_DELWORDLEFT = 2335
+SCI_DELWORDRIGHT = 2336
+SCI_LINECUT = 2337
+SCI_LINEDELETE = 2338
+SCI_LINETRANSPOSE = 2339
+SCI_LINEDUPLICATE = 2404
+SCI_LOWERCASE = 2340
+SCI_UPPERCASE = 2341
+SCI_LINESCROLLDOWN = 2342
+SCI_LINESCROLLUP = 2343
+SCI_DELETEBACKNOTLINE = 2344
+SCI_HOMEDISPLAY = 2345
+SCI_HOMEDISPLAYEXTEND = 2346
+SCI_LINEENDDISPLAY = 2347
+SCI_LINEENDDISPLAYEXTEND = 2348
+SCI_HOMEWRAP = 2349
+SCI_HOMEWRAPEXTEND = 2450
+SCI_LINEENDWRAP = 2451
+SCI_LINEENDWRAPEXTEND = 2452
+SCI_VCHOMEWRAP = 2453
+SCI_VCHOMEWRAPEXTEND = 2454
+SCI_LINECOPY = 2455
+SCI_MOVECARETINSIDEVIEW = 2401
+SCI_LINELENGTH = 2350
+SCI_BRACEHIGHLIGHT = 2351
+SCI_BRACEBADLIGHT = 2352
+SCI_BRACEMATCH = 2353
+SCI_GETVIEWEOL = 2355
+SCI_SETVIEWEOL = 2356
+SCI_GETDOCPOINTER = 2357
+SCI_SETDOCPOINTER = 2358
+SCI_SETMODEVENTMASK = 2359
+EDGE_NONE = 0
+EDGE_LINE = 1
+EDGE_BACKGROUND = 2
+SCI_GETEDGECOLUMN = 2360
+SCI_SETEDGECOLUMN = 2361
+SCI_GETEDGEMODE = 2362
+SCI_SETEDGEMODE = 2363
+SCI_GETEDGECOLOUR = 2364
+SCI_SETEDGECOLOUR = 2365
+SCI_SEARCHANCHOR = 2366
+SCI_SEARCHNEXT = 2367
+SCI_SEARCHPREV = 2368
+SCI_LINESONSCREEN = 2370
+SCI_USEPOPUP = 2371
+SCI_SELECTIONISRECTANGLE = 2372
+SCI_SETZOOM = 2373
+SCI_GETZOOM = 2374
+SCI_CREATEDOCUMENT = 2375
+SCI_ADDREFDOCUMENT = 2376
+SCI_RELEASEDOCUMENT = 2377
+SCI_GETMODEVENTMASK = 2378
+SCI_SETFOCUS = 2380
+SCI_GETFOCUS = 2381
+SCI_SETSTATUS = 2382
+SCI_GETSTATUS = 2383
+SCI_SETMOUSEDOWNCAPTURES = 2384
+SCI_GETMOUSEDOWNCAPTURES = 2385
+SC_CURSORNORMAL = -1
+SC_CURSORWAIT = 4
+SCI_SETCURSOR = 2386
+SCI_GETCURSOR = 2387
+SCI_SETCONTROLCHARSYMBOL = 2388
+SCI_GETCONTROLCHARSYMBOL = 2389
+SCI_WORDPARTLEFT = 2390
+SCI_WORDPARTLEFTEXTEND = 2391
+SCI_WORDPARTRIGHT = 2392
+SCI_WORDPARTRIGHTEXTEND = 2393
+VISIBLE_SLOP = 0x01
+VISIBLE_STRICT = 0x04
+SCI_SETVISIBLEPOLICY = 2394
+SCI_DELLINELEFT = 2395
+SCI_DELLINERIGHT = 2396
+SCI_SETXOFFSET = 2397
+SCI_GETXOFFSET = 2398
+SCI_CHOOSECARETX = 2399
+SCI_GRABFOCUS = 2400
+CARET_SLOP = 0x01
+CARET_STRICT = 0x04
+CARET_JUMPS = 0x10
+CARET_EVEN = 0x08
+SCI_SETXCARETPOLICY = 2402
+SCI_SETYCARETPOLICY = 2403
+SCI_SETPRINTWRAPMODE = 2406
+SCI_GETPRINTWRAPMODE = 2407
+SCI_SETHOTSPOTACTIVEFORE = 2410
+SCI_SETHOTSPOTACTIVEBACK = 2411
+SCI_SETHOTSPOTACTIVEUNDERLINE = 2412
+SCI_SETHOTSPOTSINGLELINE = 2421
+SCI_PARADOWN = 2413
+SCI_PARADOWNEXTEND = 2414
+SCI_PARAUP = 2415
+SCI_PARAUPEXTEND = 2416
+SCI_POSITIONBEFORE = 2417
+SCI_POSITIONAFTER = 2418
+SCI_COPYRANGE = 2419
+SCI_COPYTEXT = 2420
+SC_SEL_STREAM = 0
+SC_SEL_RECTANGLE = 1
+SC_SEL_LINES = 2
+SCI_SETSELECTIONMODE = 2422
+SCI_GETSELECTIONMODE = 2423
+SCI_GETLINESELSTARTPOSITION = 2424
+SCI_GETLINESELENDPOSITION = 2425
+SCI_LINEDOWNRECTEXTEND = 2426
+SCI_LINEUPRECTEXTEND = 2427
+SCI_CHARLEFTRECTEXTEND = 2428
+SCI_CHARRIGHTRECTEXTEND = 2429
+SCI_HOMERECTEXTEND = 2430
+SCI_VCHOMERECTEXTEND = 2431
+SCI_LINEENDRECTEXTEND = 2432
+SCI_PAGEUPRECTEXTEND = 2433
+SCI_PAGEDOWNRECTEXTEND = 2434
+SCI_STUTTEREDPAGEUP = 2435
+SCI_STUTTEREDPAGEUPEXTEND = 2436
+SCI_STUTTEREDPAGEDOWN = 2437
+SCI_STUTTEREDPAGEDOWNEXTEND = 2438
+SCI_WORDLEFTEND = 2439
+SCI_WORDLEFTENDEXTEND = 2440
+SCI_WORDRIGHTEND = 2441
+SCI_WORDRIGHTENDEXTEND = 2442
+SCI_SETWHITESPACECHARS = 2443
+SCI_SETCHARSDEFAULT = 2444
+SCI_STARTRECORD = 3001
+SCI_STOPRECORD = 3002
+SCI_SETLEXER = 4001
+SCI_GETLEXER = 4002
+SCI_COLOURISE = 4003
+SCI_SETPROPERTY = 4004
+KEYWORDSET_MAX = 8
+SCI_SETKEYWORDS = 4005
+SCI_SETLEXERLANGUAGE = 4006
+SCI_LOADLEXERLIBRARY = 4007
+SC_MOD_INSERTTEXT = 0x1
+SC_MOD_DELETETEXT = 0x2
+SC_MOD_CHANGESTYLE = 0x4
+SC_MOD_CHANGEFOLD = 0x8
+SC_PERFORMED_USER = 0x10
+SC_PERFORMED_UNDO = 0x20
+SC_PERFORMED_REDO = 0x40
+SC_LASTSTEPINUNDOREDO = 0x100
+SC_MOD_CHANGEMARKER = 0x200
+SC_MOD_BEFOREINSERT = 0x400
+SC_MOD_BEFOREDELETE = 0x800
+SC_MODEVENTMASKALL = 0xF77
+SCEN_CHANGE = 768
+SCEN_SETFOCUS = 512
+SCEN_KILLFOCUS = 256
+SCK_DOWN = 300
+SCK_UP = 301
+SCK_LEFT = 302
+SCK_RIGHT = 303
+SCK_HOME = 304
+SCK_END = 305
+SCK_PRIOR = 306
+SCK_NEXT = 307
+SCK_DELETE = 308
+SCK_INSERT = 309
+SCK_ESCAPE = 7
+SCK_BACK = 8
+SCK_TAB = 9
+SCK_RETURN = 13
+SCK_ADD = 310
+SCK_SUBTRACT = 311
+SCK_DIVIDE = 312
+SCMOD_SHIFT = 1
+SCMOD_CTRL = 2
+SCMOD_ALT = 4
+SCN_STYLENEEDED = 2000
+SCN_CHARADDED = 2001
+SCN_SAVEPOINTREACHED = 2002
+SCN_SAVEPOINTLEFT = 2003
+SCN_MODIFYATTEMPTRO = 2004
+SCN_KEY = 2005
+SCN_DOUBLECLICK = 2006
+SCN_UPDATEUI = 2007
+SCN_MODIFIED = 2008
+SCN_MACRORECORD = 2009
+SCN_MARGINCLICK = 2010
+SCN_NEEDSHOWN = 2011
+SCN_PAINTED = 2013
+SCN_USERLISTSELECTION = 2014
+SCN_URIDROPPED = 2015
+SCN_DWELLSTART = 2016
+SCN_DWELLEND = 2017
+SCN_ZOOM = 2018
+SCN_HOTSPOTCLICK = 2019
+SCN_HOTSPOTDOUBLECLICK = 2020
+SCN_CALLTIPCLICK = 2021
+SCI_SETCARETPOLICY = 2369
+CARET_CENTER = 0x02
+CARET_XEVEN = 0x08
+CARET_XJUMPS = 0x10
+SCN_POSCHANGED = 2012
+SCN_CHECKBRACE = 2007
+# Generated by h2py from Include\scilexer.h
+SCLEX_CONTAINER = 0
+SCLEX_NULL = 1
+SCLEX_PYTHON = 2
+SCLEX_CPP = 3
+SCLEX_HTML = 4
+SCLEX_XML = 5
+SCLEX_PERL = 6
+SCLEX_SQL = 7
+SCLEX_VB = 8
+SCLEX_PROPERTIES = 9
+SCLEX_ERRORLIST = 10
+SCLEX_MAKEFILE = 11
+SCLEX_BATCH = 12
+SCLEX_XCODE = 13
+SCLEX_LATEX = 14
+SCLEX_LUA = 15
+SCLEX_DIFF = 16
+SCLEX_CONF = 17
+SCLEX_PASCAL = 18
+SCLEX_AVE = 19
+SCLEX_ADA = 20
+SCLEX_LISP = 21
+SCLEX_RUBY = 22
+SCLEX_EIFFEL = 23
+SCLEX_EIFFELKW = 24
+SCLEX_TCL = 25
+SCLEX_NNCRONTAB = 26
+SCLEX_BULLANT = 27
+SCLEX_VBSCRIPT = 28
+SCLEX_ASP = 29
+SCLEX_PHP = 30
+SCLEX_BAAN = 31
+SCLEX_MATLAB = 32
+SCLEX_SCRIPTOL = 33
+SCLEX_ASM = 34
+SCLEX_CPPNOCASE = 35
+SCLEX_FORTRAN = 36
+SCLEX_F77 = 37
+SCLEX_CSS = 38
+SCLEX_POV = 39
+SCLEX_LOUT = 40
+SCLEX_ESCRIPT = 41
+SCLEX_PS = 42
+SCLEX_NSIS = 43
+SCLEX_MMIXAL = 44
+SCLEX_CLW = 45
+SCLEX_CLWNOCASE = 46
+SCLEX_LOT = 47
+SCLEX_YAML = 48
+SCLEX_TEX = 49
+SCLEX_METAPOST = 50
+SCLEX_POWERBASIC = 51
+SCLEX_FORTH = 52
+SCLEX_ERLANG = 53
+SCLEX_AUTOMATIC = 1000
+SCE_P_DEFAULT = 0
+SCE_P_COMMENTLINE = 1
+SCE_P_NUMBER = 2
+SCE_P_STRING = 3
+SCE_P_CHARACTER = 4
+SCE_P_WORD = 5
+SCE_P_TRIPLE = 6
+SCE_P_TRIPLEDOUBLE = 7
+SCE_P_CLASSNAME = 8
+SCE_P_DEFNAME = 9
+SCE_P_OPERATOR = 10
+SCE_P_IDENTIFIER = 11
+SCE_P_COMMENTBLOCK = 12
+SCE_P_STRINGEOL = 13
+SCE_C_DEFAULT = 0
+SCE_C_COMMENT = 1
+SCE_C_COMMENTLINE = 2
+SCE_C_COMMENTDOC = 3
+SCE_C_NUMBER = 4
+SCE_C_WORD = 5
+SCE_C_STRING = 6
+SCE_C_CHARACTER = 7
+SCE_C_UUID = 8
+SCE_C_PREPROCESSOR = 9
+SCE_C_OPERATOR = 10
+SCE_C_IDENTIFIER = 11
+SCE_C_STRINGEOL = 12
+SCE_C_VERBATIM = 13
+SCE_C_REGEX = 14
+SCE_C_COMMENTLINEDOC = 15
+SCE_C_WORD2 = 16
+SCE_C_COMMENTDOCKEYWORD = 17
+SCE_C_COMMENTDOCKEYWORDERROR = 18
+SCE_C_GLOBALCLASS = 19
+SCE_H_DEFAULT = 0
+SCE_H_TAG = 1
+SCE_H_TAGUNKNOWN = 2
+SCE_H_ATTRIBUTE = 3
+SCE_H_ATTRIBUTEUNKNOWN = 4
+SCE_H_NUMBER = 5
+SCE_H_DOUBLESTRING = 6
+SCE_H_SINGLESTRING = 7
+SCE_H_OTHER = 8
+SCE_H_COMMENT = 9
+SCE_H_ENTITY = 10
+SCE_H_TAGEND = 11
+SCE_H_XMLSTART = 12
+SCE_H_XMLEND = 13
+SCE_H_SCRIPT = 14
+SCE_H_ASP = 15
+SCE_H_ASPAT = 16
+SCE_H_CDATA = 17
+SCE_H_QUESTION = 18
+SCE_H_VALUE = 19
+SCE_H_XCCOMMENT = 20
+SCE_H_SGML_DEFAULT = 21
+SCE_H_SGML_COMMAND = 22
+SCE_H_SGML_1ST_PARAM = 23
+SCE_H_SGML_DOUBLESTRING = 24
+SCE_H_SGML_SIMPLESTRING = 25
+SCE_H_SGML_ERROR = 26
+SCE_H_SGML_SPECIAL = 27
+SCE_H_SGML_ENTITY = 28
+SCE_H_SGML_COMMENT = 29
+SCE_H_SGML_1ST_PARAM_COMMENT = 30
+SCE_H_SGML_BLOCK_DEFAULT = 31
+SCE_HJ_START = 40
+SCE_HJ_DEFAULT = 41
+SCE_HJ_COMMENT = 42
+SCE_HJ_COMMENTLINE = 43
+SCE_HJ_COMMENTDOC = 44
+SCE_HJ_NUMBER = 45
+SCE_HJ_WORD = 46
+SCE_HJ_KEYWORD = 47
+SCE_HJ_DOUBLESTRING = 48
+SCE_HJ_SINGLESTRING = 49
+SCE_HJ_SYMBOLS = 50
+SCE_HJ_STRINGEOL = 51
+SCE_HJ_REGEX = 52
+SCE_HJA_START = 55
+SCE_HJA_DEFAULT = 56
+SCE_HJA_COMMENT = 57
+SCE_HJA_COMMENTLINE = 58
+SCE_HJA_COMMENTDOC = 59
+SCE_HJA_NUMBER = 60
+SCE_HJA_WORD = 61
+SCE_HJA_KEYWORD = 62
+SCE_HJA_DOUBLESTRING = 63
+SCE_HJA_SINGLESTRING = 64
+SCE_HJA_SYMBOLS = 65
+SCE_HJA_STRINGEOL = 66
+SCE_HJA_REGEX = 67
+SCE_HB_START = 70
+SCE_HB_DEFAULT = 71
+SCE_HB_COMMENTLINE = 72
+SCE_HB_NUMBER = 73
+SCE_HB_WORD = 74
+SCE_HB_STRING = 75
+SCE_HB_IDENTIFIER = 76
+SCE_HB_STRINGEOL = 77
+SCE_HBA_START = 80
+SCE_HBA_DEFAULT = 81
+SCE_HBA_COMMENTLINE = 82
+SCE_HBA_NUMBER = 83
+SCE_HBA_WORD = 84
+SCE_HBA_STRING = 85
+SCE_HBA_IDENTIFIER = 86
+SCE_HBA_STRINGEOL = 87
+SCE_HP_START = 90
+SCE_HP_DEFAULT = 91
+SCE_HP_COMMENTLINE = 92
+SCE_HP_NUMBER = 93
+SCE_HP_STRING = 94
+SCE_HP_CHARACTER = 95
+SCE_HP_WORD = 96
+SCE_HP_TRIPLE = 97
+SCE_HP_TRIPLEDOUBLE = 98
+SCE_HP_CLASSNAME = 99
+SCE_HP_DEFNAME = 100
+SCE_HP_OPERATOR = 101
+SCE_HP_IDENTIFIER = 102
+SCE_HPA_START = 105
+SCE_HPA_DEFAULT = 106
+SCE_HPA_COMMENTLINE = 107
+SCE_HPA_NUMBER = 108
+SCE_HPA_STRING = 109
+SCE_HPA_CHARACTER = 110
+SCE_HPA_WORD = 111
+SCE_HPA_TRIPLE = 112
+SCE_HPA_TRIPLEDOUBLE = 113
+SCE_HPA_CLASSNAME = 114
+SCE_HPA_DEFNAME = 115
+SCE_HPA_OPERATOR = 116
+SCE_HPA_IDENTIFIER = 117
+SCE_HPHP_DEFAULT = 118
+SCE_HPHP_HSTRING = 119
+SCE_HPHP_SIMPLESTRING = 120
+SCE_HPHP_WORD = 121
+SCE_HPHP_NUMBER = 122
+SCE_HPHP_VARIABLE = 123
+SCE_HPHP_COMMENT = 124
+SCE_HPHP_COMMENTLINE = 125
+SCE_HPHP_HSTRING_VARIABLE = 126
+SCE_HPHP_OPERATOR = 127
+SCE_PL_DEFAULT = 0
+SCE_PL_ERROR = 1
+SCE_PL_COMMENTLINE = 2
+SCE_PL_POD = 3
+SCE_PL_NUMBER = 4
+SCE_PL_WORD = 5
+SCE_PL_STRING = 6
+SCE_PL_CHARACTER = 7
+SCE_PL_PUNCTUATION = 8
+SCE_PL_PREPROCESSOR = 9
+SCE_PL_OPERATOR = 10
+SCE_PL_IDENTIFIER = 11
+SCE_PL_SCALAR = 12
+SCE_PL_ARRAY = 13
+SCE_PL_HASH = 14
+SCE_PL_SYMBOLTABLE = 15
+SCE_PL_REGEX = 17
+SCE_PL_REGSUBST = 18
+SCE_PL_LONGQUOTE = 19
+SCE_PL_BACKTICKS = 20
+SCE_PL_DATASECTION = 21
+SCE_PL_HERE_DELIM = 22
+SCE_PL_HERE_Q = 23
+SCE_PL_HERE_QQ = 24
+SCE_PL_HERE_QX = 25
+SCE_PL_STRING_Q = 26
+SCE_PL_STRING_QQ = 27
+SCE_PL_STRING_QX = 28
+SCE_PL_STRING_QR = 29
+SCE_PL_STRING_QW = 30
+SCE_B_DEFAULT = 0
+SCE_B_COMMENT = 1
+SCE_B_NUMBER = 2
+SCE_B_KEYWORD = 3
+SCE_B_STRING = 4
+SCE_B_PREPROCESSOR = 5
+SCE_B_OPERATOR = 6
+SCE_B_IDENTIFIER = 7
+SCE_B_DATE = 8
+SCE_PROPS_DEFAULT = 0
+SCE_PROPS_COMMENT = 1
+SCE_PROPS_SECTION = 2
+SCE_PROPS_ASSIGNMENT = 3
+SCE_PROPS_DEFVAL = 4
+SCE_L_DEFAULT = 0
+SCE_L_COMMAND = 1
+SCE_L_TAG = 2
+SCE_L_MATH = 3
+SCE_L_COMMENT = 4
+SCE_LUA_DEFAULT = 0
+SCE_LUA_COMMENT = 1
+SCE_LUA_COMMENTLINE = 2
+SCE_LUA_COMMENTDOC = 3
+SCE_LUA_NUMBER = 4
+SCE_LUA_WORD = 5
+SCE_LUA_STRING = 6
+SCE_LUA_CHARACTER = 7
+SCE_LUA_LITERALSTRING = 8
+SCE_LUA_PREPROCESSOR = 9
+SCE_LUA_OPERATOR = 10
+SCE_LUA_IDENTIFIER = 11
+SCE_LUA_STRINGEOL = 12
+SCE_LUA_WORD2 = 13
+SCE_LUA_WORD3 = 14
+SCE_LUA_WORD4 = 15
+SCE_LUA_WORD5 = 16
+SCE_LUA_WORD6 = 17
+SCE_LUA_WORD7 = 18
+SCE_LUA_WORD8 = 19
+SCE_ERR_DEFAULT = 0
+SCE_ERR_PYTHON = 1
+SCE_ERR_GCC = 2
+SCE_ERR_MS = 3
+SCE_ERR_CMD = 4
+SCE_ERR_BORLAND = 5
+SCE_ERR_PERL = 6
+SCE_ERR_NET = 7
+SCE_ERR_LUA = 8
+SCE_ERR_CTAG = 9
+SCE_ERR_DIFF_CHANGED = 10
+SCE_ERR_DIFF_ADDITION = 11
+SCE_ERR_DIFF_DELETION = 12
+SCE_ERR_DIFF_MESSAGE = 13
+SCE_ERR_PHP = 14
+SCE_ERR_ELF = 15
+SCE_ERR_IFC = 16
+SCE_BAT_DEFAULT = 0
+SCE_BAT_COMMENT = 1
+SCE_BAT_WORD = 2
+SCE_BAT_LABEL = 3
+SCE_BAT_HIDE = 4
+SCE_BAT_COMMAND = 5
+SCE_BAT_IDENTIFIER = 6
+SCE_BAT_OPERATOR = 7
+SCE_MAKE_DEFAULT = 0
+SCE_MAKE_COMMENT = 1
+SCE_MAKE_PREPROCESSOR = 2
+SCE_MAKE_IDENTIFIER = 3
+SCE_MAKE_OPERATOR = 4
+SCE_MAKE_TARGET = 5
+SCE_MAKE_IDEOL = 9
+SCE_DIFF_DEFAULT = 0
+SCE_DIFF_COMMENT = 1
+SCE_DIFF_COMMAND = 2
+SCE_DIFF_HEADER = 3
+SCE_DIFF_POSITION = 4
+SCE_DIFF_DELETED = 5
+SCE_DIFF_ADDED = 6
+SCE_CONF_DEFAULT = 0
+SCE_CONF_COMMENT = 1
+SCE_CONF_NUMBER = 2
+SCE_CONF_IDENTIFIER = 3
+SCE_CONF_EXTENSION = 4
+SCE_CONF_PARAMETER = 5
+SCE_CONF_STRING = 6
+SCE_CONF_OPERATOR = 7
+SCE_CONF_IP = 8
+SCE_CONF_DIRECTIVE = 9
+SCE_AVE_DEFAULT = 0
+SCE_AVE_COMMENT = 1
+SCE_AVE_NUMBER = 2
+SCE_AVE_WORD = 3
+SCE_AVE_STRING = 6
+SCE_AVE_ENUM = 7
+SCE_AVE_STRINGEOL = 8
+SCE_AVE_IDENTIFIER = 9
+SCE_AVE_OPERATOR = 10
+SCE_AVE_WORD1 = 11
+SCE_AVE_WORD2 = 12
+SCE_AVE_WORD3 = 13
+SCE_AVE_WORD4 = 14
+SCE_AVE_WORD5 = 15
+SCE_AVE_WORD6 = 16
+SCE_ADA_DEFAULT = 0
+SCE_ADA_WORD = 1
+SCE_ADA_IDENTIFIER = 2
+SCE_ADA_NUMBER = 3
+SCE_ADA_DELIMITER = 4
+SCE_ADA_CHARACTER = 5
+SCE_ADA_CHARACTEREOL = 6
+SCE_ADA_STRING = 7
+SCE_ADA_STRINGEOL = 8
+SCE_ADA_LABEL = 9
+SCE_ADA_COMMENTLINE = 10
+SCE_ADA_ILLEGAL = 11
+SCE_BAAN_DEFAULT = 0
+SCE_BAAN_COMMENT = 1
+SCE_BAAN_COMMENTDOC = 2
+SCE_BAAN_NUMBER = 3
+SCE_BAAN_WORD = 4
+SCE_BAAN_STRING = 5
+SCE_BAAN_PREPROCESSOR = 6
+SCE_BAAN_OPERATOR = 7
+SCE_BAAN_IDENTIFIER = 8
+SCE_BAAN_STRINGEOL = 9
+SCE_BAAN_WORD2 = 10
+SCE_LISP_DEFAULT = 0
+SCE_LISP_COMMENT = 1
+SCE_LISP_NUMBER = 2
+SCE_LISP_KEYWORD = 3
+SCE_LISP_STRING = 6
+SCE_LISP_STRINGEOL = 8
+SCE_LISP_IDENTIFIER = 9
+SCE_LISP_OPERATOR = 10
+SCE_EIFFEL_DEFAULT = 0
+SCE_EIFFEL_COMMENTLINE = 1
+SCE_EIFFEL_NUMBER = 2
+SCE_EIFFEL_WORD = 3
+SCE_EIFFEL_STRING = 4
+SCE_EIFFEL_CHARACTER = 5
+SCE_EIFFEL_OPERATOR = 6
+SCE_EIFFEL_IDENTIFIER = 7
+SCE_EIFFEL_STRINGEOL = 8
+SCE_NNCRONTAB_DEFAULT = 0
+SCE_NNCRONTAB_COMMENT = 1
+SCE_NNCRONTAB_TASK = 2
+SCE_NNCRONTAB_SECTION = 3
+SCE_NNCRONTAB_KEYWORD = 4
+SCE_NNCRONTAB_MODIFIER = 5
+SCE_NNCRONTAB_ASTERISK = 6
+SCE_NNCRONTAB_NUMBER = 7
+SCE_NNCRONTAB_STRING = 8
+SCE_NNCRONTAB_ENVIRONMENT = 9
+SCE_NNCRONTAB_IDENTIFIER = 10
+SCE_FORTH_DEFAULT = 0
+SCE_FORTH_COMMENT = 1
+SCE_FORTH_COMMENT_ML = 2
+SCE_FORTH_IDENTIFIER = 3
+SCE_FORTH_CONTROL = 4
+SCE_FORTH_KEYWORD = 5
+SCE_FORTH_DEFWORD = 6
+SCE_FORTH_PREWORD1 = 7
+SCE_FORTH_PREWORD2 = 8
+SCE_FORTH_NUMBER = 9
+SCE_FORTH_STRING = 10
+SCE_FORTH_LOCALE = 11
+SCE_MATLAB_DEFAULT = 0
+SCE_MATLAB_COMMENT = 1
+SCE_MATLAB_COMMAND = 2
+SCE_MATLAB_NUMBER = 3
+SCE_MATLAB_KEYWORD = 4
+SCE_MATLAB_STRING = 5
+SCE_MATLAB_OPERATOR = 6
+SCE_MATLAB_IDENTIFIER = 7
+SCE_SCRIPTOL_DEFAULT = 0
+SCE_SCRIPTOL_WHITE = 1
+SCE_SCRIPTOL_COMMENTLINE = 2
+SCE_SCRIPTOL_PERSISTENT = 3
+SCE_SCRIPTOL_CSTYLE = 4
+SCE_SCRIPTOL_COMMENTBLOCK = 5
+SCE_SCRIPTOL_NUMBER = 6
+SCE_SCRIPTOL_STRING = 7
+SCE_SCRIPTOL_CHARACTER = 8
+SCE_SCRIPTOL_STRINGEOL = 9
+SCE_SCRIPTOL_KEYWORD = 10
+SCE_SCRIPTOL_OPERATOR = 11
+SCE_SCRIPTOL_IDENTIFIER = 12
+SCE_SCRIPTOL_TRIPLE = 13
+SCE_SCRIPTOL_CLASSNAME = 14
+SCE_SCRIPTOL_PREPROCESSOR = 15
+SCE_ASM_DEFAULT = 0
+SCE_ASM_COMMENT = 1
+SCE_ASM_NUMBER = 2
+SCE_ASM_STRING = 3
+SCE_ASM_OPERATOR = 4
+SCE_ASM_IDENTIFIER = 5
+SCE_ASM_CPUINSTRUCTION = 6
+SCE_ASM_MATHINSTRUCTION = 7
+SCE_ASM_REGISTER = 8
+SCE_ASM_DIRECTIVE = 9
+SCE_ASM_DIRECTIVEOPERAND = 10
+SCE_ASM_COMMENTBLOCK = 11
+SCE_ASM_CHARACTER = 12
+SCE_ASM_STRINGEOL = 13
+SCE_ASM_EXTINSTRUCTION = 14
+SCE_F_DEFAULT = 0
+SCE_F_COMMENT = 1
+SCE_F_NUMBER = 2
+SCE_F_STRING1 = 3
+SCE_F_STRING2 = 4
+SCE_F_STRINGEOL = 5
+SCE_F_OPERATOR = 6
+SCE_F_IDENTIFIER = 7
+SCE_F_WORD = 8
+SCE_F_WORD2 = 9
+SCE_F_WORD3 = 10
+SCE_F_PREPROCESSOR = 11
+SCE_F_OPERATOR2 = 12
+SCE_F_LABEL = 13
+SCE_F_CONTINUATION = 14
+SCE_CSS_DEFAULT = 0
+SCE_CSS_TAG = 1
+SCE_CSS_CLASS = 2
+SCE_CSS_PSEUDOCLASS = 3
+SCE_CSS_UNKNOWN_PSEUDOCLASS = 4
+SCE_CSS_OPERATOR = 5
+SCE_CSS_IDENTIFIER = 6
+SCE_CSS_UNKNOWN_IDENTIFIER = 7
+SCE_CSS_VALUE = 8
+SCE_CSS_COMMENT = 9
+SCE_CSS_ID = 10
+SCE_CSS_IMPORTANT = 11
+SCE_CSS_DIRECTIVE = 12
+SCE_CSS_DOUBLESTRING = 13
+SCE_CSS_SINGLESTRING = 14
+SCE_POV_DEFAULT = 0
+SCE_POV_COMMENT = 1
+SCE_POV_COMMENTLINE = 2
+SCE_POV_NUMBER = 3
+SCE_POV_OPERATOR = 4
+SCE_POV_IDENTIFIER = 5
+SCE_POV_STRING = 6
+SCE_POV_STRINGEOL = 7
+SCE_POV_DIRECTIVE = 8
+SCE_POV_BADDIRECTIVE = 9
+SCE_POV_WORD2 = 10
+SCE_POV_WORD3 = 11
+SCE_POV_WORD4 = 12
+SCE_POV_WORD5 = 13
+SCE_POV_WORD6 = 14
+SCE_POV_WORD7 = 15
+SCE_POV_WORD8 = 16
+SCE_LOUT_DEFAULT = 0
+SCE_LOUT_COMMENT = 1
+SCE_LOUT_NUMBER = 2
+SCE_LOUT_WORD = 3
+SCE_LOUT_WORD2 = 4
+SCE_LOUT_WORD3 = 5
+SCE_LOUT_WORD4 = 6
+SCE_LOUT_STRING = 7
+SCE_LOUT_OPERATOR = 8
+SCE_LOUT_IDENTIFIER = 9
+SCE_LOUT_STRINGEOL = 10
+SCE_ESCRIPT_DEFAULT = 0
+SCE_ESCRIPT_COMMENT = 1
+SCE_ESCRIPT_COMMENTLINE = 2
+SCE_ESCRIPT_COMMENTDOC = 3
+SCE_ESCRIPT_NUMBER = 4
+SCE_ESCRIPT_WORD = 5
+SCE_ESCRIPT_STRING = 6
+SCE_ESCRIPT_OPERATOR = 7
+SCE_ESCRIPT_IDENTIFIER = 8
+SCE_ESCRIPT_BRACE = 9
+SCE_ESCRIPT_WORD2 = 10
+SCE_ESCRIPT_WORD3 = 11
+SCE_PS_DEFAULT = 0
+SCE_PS_COMMENT = 1
+SCE_PS_DSC_COMMENT = 2
+SCE_PS_DSC_VALUE = 3
+SCE_PS_NUMBER = 4
+SCE_PS_NAME = 5
+SCE_PS_KEYWORD = 6
+SCE_PS_LITERAL = 7
+SCE_PS_IMMEVAL = 8
+SCE_PS_PAREN_ARRAY = 9
+SCE_PS_PAREN_DICT = 10
+SCE_PS_PAREN_PROC = 11
+SCE_PS_TEXT = 12
+SCE_PS_HEXSTRING = 13
+SCE_PS_BASE85STRING = 14
+SCE_PS_BADSTRINGCHAR = 15
+SCE_NSIS_DEFAULT = 0
+SCE_NSIS_COMMENT = 1
+SCE_NSIS_STRINGDQ = 2
+SCE_NSIS_STRINGLQ = 3
+SCE_NSIS_STRINGRQ = 4
+SCE_NSIS_FUNCTION = 5
+SCE_NSIS_VARIABLE = 6
+SCE_NSIS_LABEL = 7
+SCE_NSIS_USERDEFINED = 8
+SCE_NSIS_SECTIONDEF = 9
+SCE_NSIS_SUBSECTIONDEF = 10
+SCE_NSIS_IFDEFINEDEF = 11
+SCE_NSIS_MACRODEF = 12
+SCE_NSIS_STRINGVAR = 13
+SCE_MMIXAL_LEADWS = 0
+SCE_MMIXAL_COMMENT = 1
+SCE_MMIXAL_LABEL = 2
+SCE_MMIXAL_OPCODE = 3
+SCE_MMIXAL_OPCODE_PRE = 4
+SCE_MMIXAL_OPCODE_VALID = 5
+SCE_MMIXAL_OPCODE_UNKNOWN = 6
+SCE_MMIXAL_OPCODE_POST = 7
+SCE_MMIXAL_OPERANDS = 8
+SCE_MMIXAL_NUMBER = 9
+SCE_MMIXAL_REF = 10
+SCE_MMIXAL_CHAR = 11
+SCE_MMIXAL_STRING = 12
+SCE_MMIXAL_REGISTER = 13
+SCE_MMIXAL_HEX = 14
+SCE_MMIXAL_OPERATOR = 15
+SCE_MMIXAL_SYMBOL = 16
+SCE_MMIXAL_INCLUDE = 17
+SCE_CLW_DEFAULT = 0
+SCE_CLW_LABEL = 1
+SCE_CLW_COMMENT = 2
+SCE_CLW_STRING = 3
+SCE_CLW_USER_IDENTIFIER = 4
+SCE_CLW_INTEGER_CONSTANT = 5
+SCE_CLW_REAL_CONSTANT = 6
+SCE_CLW_PICTURE_STRING = 7
+SCE_CLW_KEYWORD = 8
+SCE_CLW_COMPILER_DIRECTIVE = 9
+SCE_CLW_BUILTIN_PROCEDURES_FUNCTION = 10
+SCE_CLW_STRUCTURE_DATA_TYPE = 11
+SCE_CLW_ATTRIBUTE = 12
+SCE_CLW_STANDARD_EQUATE = 13
+SCE_CLW_ERROR = 14
+SCE_LOT_DEFAULT = 0
+SCE_LOT_HEADER = 1
+SCE_LOT_BREAK = 2
+SCE_LOT_SET = 3
+SCE_LOT_PASS = 4
+SCE_LOT_FAIL = 5
+SCE_LOT_ABORT = 6
+SCE_YAML_DEFAULT = 0
+SCE_YAML_COMMENT = 1
+SCE_YAML_IDENTIFIER = 2
+SCE_YAML_KEYWORD = 3
+SCE_YAML_NUMBER = 4
+SCE_YAML_REFERENCE = 5
+SCE_YAML_DOCUMENT = 6
+SCE_YAML_TEXT = 7
+SCE_YAML_ERROR = 8
+SCE_TEX_DEFAULT = 0
+SCE_TEX_SPECIAL = 1
+SCE_TEX_GROUP = 2
+SCE_TEX_SYMBOL = 3
+SCE_TEX_COMMAND = 4
+SCE_TEX_TEXT = 5
+SCE_METAPOST_DEFAULT = 0
+SCE_METAPOST_SPECIAL = 1
+SCE_METAPOST_GROUP = 2
+SCE_METAPOST_SYMBOL = 3
+SCE_METAPOST_COMMAND = 4
+SCE_METAPOST_TEXT = 5
+SCE_METAPOST_EXTRA = 6
+SCE_ERLANG_DEFAULT = 0
+SCE_ERLANG_COMMENT = 1
+SCE_ERLANG_VARIABLE = 2
+SCE_ERLANG_NUMBER = 3
+SCE_ERLANG_KEYWORD = 4
+SCE_ERLANG_STRING = 5
+SCE_ERLANG_OPERATOR = 6
+SCE_ERLANG_ATOM = 7
+SCE_ERLANG_FUNCTION_NAME = 8
+SCE_ERLANG_CHARACTER = 9
+SCE_ERLANG_MACRO = 10
+SCE_ERLANG_RECORD = 11
+SCE_ERLANG_SEPARATOR = 12
+SCE_ERLANG_NODE_NAME = 13
+SCE_ERLANG_UNKNOWN = 31
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/view.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/view.py
new file mode 100644
index 0000000..1cfe107
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/scintilla/view.py
@@ -0,0 +1,689 @@
+# A general purpose MFC CCtrlView view that uses Scintilla.
+
+import control
+import IDLEenvironment # IDLE emulation.
+from pywin.mfc import docview
+from pywin.mfc import dialog
+from scintillacon import *
+import win32con
+import win32ui
+import afxres
+import string
+import array
+import sys
+import types
+import __main__ # for attribute lookup
+import bindings
+import keycodes
+import struct
+import re
+import os
+
+from pywin import is_platform_unicode
+
+PRINTDLGORD = 1538
+IDC_PRINT_MAG_EDIT = 1010
+EM_FORMATRANGE = win32con.WM_USER+57
+
+wordbreaks = "._" + string.uppercase + string.lowercase + string.digits
+
+patImport=re.compile('import (?P<name>.*)')
+
+_event_commands = [
+	# File menu
+	"win32ui.ID_FILE_LOCATE", "win32ui.ID_FILE_CHECK", "afxres.ID_FILE_CLOSE",
+	"afxres.ID_FILE_NEW", "afxres.ID_FILE_OPEN", "afxres.ID_FILE_SAVE",
+	"afxres.ID_FILE_SAVE_AS", "win32ui.ID_FILE_SAVE_ALL",
+	# Edit menu
+	"afxres.ID_EDIT_UNDO", "afxres.ID_EDIT_REDO", "afxres.ID_EDIT_CUT",
+	"afxres.ID_EDIT_COPY", "afxres.ID_EDIT_PASTE", "afxres.ID_EDIT_SELECT_ALL",
+	"afxres.ID_EDIT_FIND", "afxres.ID_EDIT_REPEAT", "afxres.ID_EDIT_REPLACE", 
+	# View menu
+	"win32ui.ID_VIEW_WHITESPACE", "win32ui.ID_VIEW_FIXED_FONT",
+	"win32ui.ID_VIEW_BROWSE", "win32ui.ID_VIEW_INTERACTIVE",
+	# Window menu
+	"afxres.ID_WINDOW_ARRANGE", "afxres.ID_WINDOW_CASCADE",
+	"afxres.ID_WINDOW_NEW", "afxres.ID_WINDOW_SPLIT",
+	"afxres.ID_WINDOW_TILE_HORZ", "afxres.ID_WINDOW_TILE_VERT",
+	# Others
+	"afxres.ID_APP_EXIT", "afxres.ID_APP_ABOUT",
+]
+
+_extra_event_commands = [
+	("EditDelete", afxres.ID_EDIT_CLEAR),
+	("LocateModule", win32ui.ID_FILE_LOCATE),
+	("GotoLine", win32ui.ID_EDIT_GOTO_LINE),
+	("DbgBreakpointToggle", win32ui.IDC_DBG_ADD),
+	("DbgGo", win32ui.IDC_DBG_GO),
+	("DbgStepOver", win32ui.IDC_DBG_STEPOVER),
+	("DbgStep", win32ui.IDC_DBG_STEP),
+	("DbgStepOut", win32ui.IDC_DBG_STEPOUT),
+	("DbgBreakpointClearAll", win32ui.IDC_DBG_CLEAR),
+	("DbgClose", win32ui.IDC_DBG_CLOSE),
+]
+
+event_commands = []
+def _CreateEvents():
+	for name in _event_commands:
+		val = eval(name)
+		name_parts = string.split(name, "_")[1:]
+		name_parts = map(string.capitalize, name_parts)
+		event  =string.join(name_parts,'')
+		event_commands.append((event, val))
+	for name, id in _extra_event_commands:
+		event_commands.append((name, id))
+
+_CreateEvents()
+del _event_commands; del _extra_event_commands
+
+command_reflectors = [
+	(win32ui.ID_EDIT_UNDO, win32con.WM_UNDO),
+	(win32ui.ID_EDIT_REDO, SCI_REDO),
+	(win32ui.ID_EDIT_CUT, win32con.WM_CUT),
+	(win32ui.ID_EDIT_COPY, win32con.WM_COPY),
+	(win32ui.ID_EDIT_PASTE, win32con.WM_PASTE),
+	(win32ui.ID_EDIT_CLEAR, win32con.WM_CLEAR),
+	(win32ui.ID_EDIT_SELECT_ALL, SCI_SELECTALL),
+]
+
+def DoBraceMatch(control):
+		curPos = control.SCIGetCurrentPos()
+		charBefore = ' '
+		if curPos: charBefore = control.SCIGetCharAt(curPos-1)
+		charAt = control.SCIGetCharAt(curPos)
+		braceAtPos = braceOpposite = -1
+		if charBefore in "[](){}": braceAtPos = curPos-1
+		if braceAtPos==-1:
+			if charAt in "[](){}": braceAtPos = curPos
+		if braceAtPos != -1:
+			braceOpposite = control.SCIBraceMatch(braceAtPos, 0)
+		if braceAtPos != -1 and braceOpposite==-1:
+			control.SCIBraceBadHighlight(braceAtPos)
+		else:
+			# either clear them both or set them both.
+			control.SCIBraceHighlight(braceAtPos, braceOpposite)
+
+def _get_class_attributes(ob):
+	# Recurse into base classes looking for attributes
+	items = []
+	try:
+		items = items + dir(ob)
+		for i in ob.__bases__:
+			for item in _get_class_attributes(i):
+				if item not in items:
+					items.append(item)
+	except AttributeError:
+		pass
+	return items
+
+# Supposed to look like an MFC CEditView, but 
+# also supports IDLE extensions and other source code generic features.
+class CScintillaView(docview.CtrlView, control.CScintillaColorEditInterface):
+	def __init__(self, doc):
+		docview.CtrlView.__init__(self, doc, "Scintilla", win32con.WS_CHILD | win32con.WS_VSCROLL | win32con.WS_HSCROLL | win32con.WS_CLIPCHILDREN | win32con.WS_VISIBLE)
+		self._tabWidth = 8 # Mirror of what we send to Scintilla - never change this directly
+		self.bAutoCompleteAttributes = 1
+		self.bShowCallTips = 1
+		self.bMatchBraces = 0 # Editor option will default this to true later!
+		self.bindings = bindings.BindingsManager(self)
+
+		self.idle = IDLEenvironment.IDLEEditorWindow(self)
+		self.idle.IDLEExtension("AutoExpand")
+		# SendScintilla is called so frequently it is worth optimizing.
+		self.SendScintilla = self._obj_.SendMessage
+
+	def OnDestroy(self, msg):
+		self.SendScintilla = None
+		return docview.CtrlView.OnDestroy(self, msg)
+
+	def _MakeColorizer(self):
+		ext = os.path.splitext(self.GetDocument().GetPathName())[1]
+		import formatter
+		return formatter.BuiltinPythonSourceFormatter(self, ext)
+
+	
+#	def SendScintilla(self, msg, w=0, l=0):
+#		return self._obj_.SendMessage(msg, w, l)
+
+	def SCISetTabWidth(self, width):
+		# I need to remember the tab-width for the AutoIndent extension.  This may go.
+		self._tabWidth = width
+		control.CScintillaEditInterface.SCISetTabWidth(self, width)
+
+	def GetTabWidth(self):
+		return self._tabWidth
+
+	def HookHandlers(self):
+		# Create events for all the menu names.
+		for name, val in event_commands:
+#			handler = lambda id, code, tosend=val, parent=parent: parent.OnCommand(tosend, 0) and 0
+			self.bindings.bind(name, None, cid=val)
+
+		# Hook commands that do nothing other than send Scintilla messages.
+		for command, reflection in command_reflectors:
+			handler = lambda id, code, ss=self.SendScintilla, tosend=reflection: ss(tosend) and 0
+			self.HookCommand(handler, command)
+
+		self.HookCommand(self.OnCmdViewWS, win32ui.ID_VIEW_WHITESPACE)
+		self.HookCommandUpdate(self.OnUpdateViewWS, win32ui.ID_VIEW_WHITESPACE)
+		self.HookCommand(self.OnCmdViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES)
+		self.HookCommandUpdate(self.OnUpdateViewIndentationGuides, win32ui.ID_VIEW_INDENTATIONGUIDES)
+		self.HookCommand(self.OnCmdViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE)
+		self.HookCommandUpdate(self.OnUpdateViewRightEdge, win32ui.ID_VIEW_RIGHT_EDGE)
+		self.HookCommand(self.OnCmdViewEOL, win32ui.ID_VIEW_EOL)
+		self.HookCommandUpdate(self.OnUpdateViewEOL, win32ui.ID_VIEW_EOL)
+		self.HookCommand(self.OnCmdViewFixedFont, win32ui.ID_VIEW_FIXED_FONT)
+		self.HookCommandUpdate(self.OnUpdateViewFixedFont, win32ui.ID_VIEW_FIXED_FONT)
+		self.HookCommand(self.OnCmdFileLocate, win32ui.ID_FILE_LOCATE)
+		self.HookCommand(self.OnCmdEditFind, win32ui.ID_EDIT_FIND)
+		self.HookCommand(self.OnCmdEditRepeat, win32ui.ID_EDIT_REPEAT)
+		self.HookCommand(self.OnCmdEditReplace, win32ui.ID_EDIT_REPLACE)
+		self.HookCommand(self.OnCmdGotoLine, win32ui.ID_EDIT_GOTO_LINE)
+		self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT)
+		self.HookCommand(self.OnFilePrint, afxres.ID_FILE_PRINT_DIRECT)
+		self.HookCommand(self.OnFilePrintPreview,
+			win32ui.ID_FILE_PRINT_PREVIEW)
+		# Key bindings.
+		self.HookMessage(self.OnKeyDown, win32con.WM_KEYDOWN)
+		self.HookMessage(self.OnKeyDown, win32con.WM_SYSKEYDOWN)
+		# Hook wheeley mouse events
+#		self.HookMessage(self.OnMouseWheel, win32con.WM_MOUSEWHEEL)
+		self.HookFormatter()
+
+	def OnInitialUpdate(self):
+		doc = self.GetDocument()
+
+		# Enable Unicode if we can
+		if is_platform_unicode:
+			self.SendScintilla(SCI_SETCODEPAGE, SC_CP_UTF8, 0)
+		# Create margins
+		self.SendScintilla(SCI_SETMARGINTYPEN, 1, SC_MARGIN_SYMBOL);
+		self.SendScintilla(SCI_SETMARGINMASKN, 1, 0xF);
+		self.SendScintilla(SCI_SETMARGINTYPEN, 2, SC_MARGIN_SYMBOL);
+		self.SendScintilla(SCI_SETMARGINMASKN, 2, SC_MASK_FOLDERS);
+		self.SendScintilla(SCI_SETMARGINSENSITIVEN, 2, 1);
+
+		self.GetDocument().HookViewNotifications(self) # is there an MFC way to grab this?
+		self.HookHandlers()
+
+		# Load the configuration information.
+		self.OnWinIniChange(None)
+
+		self.SetSel()
+
+		self.GetDocument().FinalizeViewCreation(self) # is there an MFC way to grab this?
+		
+
+	def _GetSubConfigNames(self):
+		return None # By default we use only sections without sub-sections.
+
+	def OnWinIniChange(self, section = None):
+		self.bindings.prepare_configure()
+		try:
+			self.DoConfigChange()
+		finally:
+			self.bindings.complete_configure()
+
+	def DoConfigChange(self):
+		# Bit of a hack I dont kow what to do about - these should be "editor options"
+		from pywin.framework.editor import GetEditorOption
+		self.bAutoCompleteAttributes = GetEditorOption("Autocomplete Attributes", 1)
+		self.bShowCallTips = GetEditorOption("Show Call Tips", 1)
+		# Update the key map and extension data.
+		configManager.configure(self, self._GetSubConfigNames())
+		if configManager.last_error:
+			win32ui.MessageBox(configManager.last_error, "Configuration Error")
+		self.bMatchBraces = GetEditorOption("Match Braces", 1)
+		self.ApplyFormattingStyles(1)
+
+	def OnDestroy(self, msg):
+		self.bindings.close()
+		self.bindings = None
+		self.idle.close()
+		self.idle = None
+		control.CScintillaColorEditInterface.close(self)
+		return docview.CtrlView.OnDestroy(self, msg)
+
+	def OnMouseWheel(self, msg):
+		zDelta = msg[2] >> 16
+		vpos = self.GetScrollPos(win32con.SB_VERT)
+		vpos = vpos - zDelta/40 # 3 lines per notch
+		self.SetScrollPos(win32con.SB_VERT, vpos)
+		self.SendScintilla(win32con.WM_VSCROLL,
+						   (vpos<<16) | win32con.SB_THUMBPOSITION,
+						   0)
+
+	def OnBraceMatch(self, std, extra):
+		if not self.bMatchBraces: return
+		DoBraceMatch(self)
+
+	def OnNeedShown(self, std, extra):
+		notify = self.SCIUnpackNotifyMessage(extra)
+		# OnNeedShown is called before an edit operation when
+		# text is folded (as it is possible the text insertion will happen
+		# in a folded region.)  As this happens _before_ the insert,
+		# we ignore the length (if we are at EOF, pos + length may
+		# actually be beyond the end of buffer)
+		self.EnsureCharsVisible(notify.position)
+
+	def EnsureCharsVisible(self, start, end = None):
+		if end is None: end = start
+		lineStart = self.LineFromChar(min(start, end))
+		lineEnd = self.LineFromChar(max(start, end))
+		while lineStart <= lineEnd:
+			self.SCIEnsureVisible(lineStart)
+			lineStart = lineStart + 1
+
+	# Helper to add an event to a menu.
+	def AppendMenu(self, menu, text="", event=None, flags = None, checked=0):
+		if event is None:
+			assert flags is not None, "No event or custom flags!"
+			cmdid = 0
+		else:
+			cmdid = self.bindings.get_command_id(event)
+			if cmdid is None:
+				# No event of that name - no point displaying it.
+				print 'View.AppendMenu(): Unknown event "%s" specified for menu text "%s" - ignored' % (event, text)
+				return 
+			keyname = configManager.get_key_binding( event, self._GetSubConfigNames() )
+			if keyname is not None:
+				text = text + "\t" + keyname
+		if flags is None: flags = win32con.MF_STRING|win32con.MF_ENABLED
+		if checked: flags = flags | win32con.MF_CHECKED
+		menu.AppendMenu(flags, cmdid, text)
+
+	def OnKeyDown(self, msg):
+		return self.bindings.fire_key_event( msg )
+
+	def GotoEndOfFileEvent(self, event):
+		self.SetSel(-1)
+
+	def KeyDotEvent(self, event):
+		self.SCIAddText(".")
+		if self.bAutoCompleteAttributes:
+			self._AutoComplete()
+
+	# View Whitespace/EOL/Indentation UI.
+
+	def OnCmdViewWS(self, cmd, code): # Handle the menu command
+		viewWS = self.SCIGetViewWS()
+		self.SCISetViewWS(not viewWS)
+	def OnUpdateViewWS(self, cmdui): # Update the tick on the UI.
+		cmdui.SetCheck(self.SCIGetViewWS())
+		cmdui.Enable()
+	def OnCmdViewIndentationGuides(self, cmd, code): # Handle the menu command
+		viewIG = self.SCIGetIndentationGuides()
+		self.SCISetIndentationGuides(not viewIG)
+	def OnUpdateViewIndentationGuides(self, cmdui): # Update the tick on the UI.
+		cmdui.SetCheck(self.SCIGetIndentationGuides())
+		cmdui.Enable()
+	def OnCmdViewRightEdge(self, cmd, code): # Handle the menu command
+		if self.SCIGetEdgeMode() == EDGE_NONE:
+			mode = EDGE_BACKGROUND
+		else:
+			mode = EDGE_NONE
+		self.SCISetEdgeMode(mode)
+	def OnUpdateViewRightEdge(self, cmdui): # Update the tick on the UI.
+		cmdui.SetCheck(self.SCIGetEdgeMode() != EDGE_NONE)
+		cmdui.Enable()
+	def OnCmdViewEOL(self, cmd, code): # Handle the menu command
+		viewEOL = self.SCIGetViewEOL()
+		self.SCISetViewEOL(not viewEOL)
+	def OnUpdateViewEOL(self, cmdui): # Update the tick on the UI.
+		cmdui.SetCheck(self.SCIGetViewEOL())
+		cmdui.Enable()
+
+	def OnCmdViewFixedFont(self, cmd, code): # Handle the menu command
+		self._GetColorizer().bUseFixed = not self._GetColorizer().bUseFixed
+		self.ApplyFormattingStyles(0)
+		# Ensure the selection is visible!
+		self.ScrollCaret()
+
+	def OnUpdateViewFixedFont(self, cmdui): # Update the tick on the UI.
+		c = self._GetColorizer()
+		if c is not None: cmdui.SetCheck(c.bUseFixed)
+		cmdui.Enable(c is not None)
+
+	def OnCmdEditFind(self, cmd, code):
+		import find
+		find.ShowFindDialog()
+	def OnCmdEditRepeat(self, cmd, code):
+		import find
+		find.FindNext()
+	def OnCmdEditReplace(self, cmd, code):
+		import find
+		find.ShowReplaceDialog()
+
+	def OnCmdFileLocate(self, cmd, id):
+		line=string.strip(self.GetLine())
+		import pywin.framework.scriptutils
+		m = patImport.match(line)
+		if m:
+			# Module name on this line - locate that!
+			modName = m.group('name')
+			fileName = pywin.framework.scriptutils.LocatePythonFile(modName)
+			if fileName is None:
+				win32ui.SetStatusText("Can't locate module %s" % modName)
+				return 1 # Let the default get it.
+			else:
+				win32ui.GetApp().OpenDocumentFile(fileName)
+		else:
+			# Just to a "normal" locate - let the default handler get it.
+			return 1
+		return 0
+
+	def OnCmdGotoLine(self, cmd, id):
+		try:
+			lineNo = string.atoi(raw_input("Enter Line Number"))-1
+		except (ValueError, KeyboardInterrupt):
+			return 0
+		self.SCIEnsureVisible(lineNo)
+		self.SCIGotoLine(lineNo)
+		return 0
+
+	def SaveTextFile(self, filename):
+		doc = self.GetDocument()
+		s = self.GetTextRange()
+		if is_platform_unicode:
+			s = unicode(s,"utf-8").encode("mbcs")
+		f  = open(filename, 'wb')
+		f.write(s)
+		f.close()
+		doc.SetModifiedFlag(0)
+		return 1
+
+	def _AutoComplete(self):
+		def list2dict(l):
+			ret={}
+			for i in l:
+				ret[i] = None
+			return ret
+
+		self.SCIAutoCCancel() # Cancel old auto-complete lists.
+		# First try and get an object without evaluating calls
+		ob = self._GetObjectAtPos(bAllowCalls = 0)
+		# If that failed, try and process call or indexing to get the object.
+		if ob is None:
+			ob = self._GetObjectAtPos(bAllowCalls = 1)
+		items_dict = {}
+		if ob is not None:
+			try: # Catch unexpected errors when fetching attribute names from the object
+				try:
+					items_dict.update(list2dict(dir(ob)))
+				except AttributeError:
+					pass # object has no __dict__
+				if hasattr(ob, "__class__"):
+					items_dict.update(list2dict(_get_class_attributes(ob.__class__)))
+				# The object may be a COM object with typelib support - lets see if we can get its props.
+				# (contributed by Stefan Migowsky)
+				try:
+					# Get the automation attributes
+					items_dict.update(ob.__class__._prop_map_get_)
+					# See if there is an write only property 
+					# could be optimized
+					items_dict.update(ob.__class__._prop_map_put_)
+					# append to the already evaluated list
+				except AttributeError:
+					pass
+				# The object might be a pure COM dynamic dispatch with typelib support - lets see if we can get its props.
+				if hasattr(ob, "_oleobj_"):
+					try:
+						for iTI in xrange(0,ob._oleobj_.GetTypeInfoCount()):
+							typeInfo = ob._oleobj_.GetTypeInfo(iTI)
+							typeAttr = typeInfo.GetTypeAttr()
+							for iFun in xrange(0,typeAttr.cFuncs):
+								funDesc = typeInfo.GetFuncDesc(iFun)
+								funName = typeInfo.GetNames(funDesc.memid)[0]
+								if not items_dict.has_key(funName):
+									items_dict[funName] = None
+					except:
+						pass
+			except:
+				win32ui.SetStatusText("Error attempting to get object attributes - %s" % (`sys.exc_info()[0]`,))
+
+		# ensure all keys are strings.		
+		items = map(str, items_dict.keys())
+		# All names that start with "_" go!
+		items = filter(lambda word: word[0]!='_', items)
+
+		if not items:
+			# Heuristics a-la AutoExpand
+			# The idea is to find other usages of the current binding
+			# and assume, that it refers to the same object (or at least,
+			# to an object of the same type)
+			# Contributed by Vadim Chugunov [vadimch@yahoo.com]
+			left, right = self._GetWordSplit()
+			if left=="": # Ignore standalone dots
+				return None
+			# We limit our search to the current class, if that
+			# information is available
+			minline, maxline, curclass = self._GetClassInfoFromBrowser()
+			endpos = self.LineIndex(maxline)
+			text = self.GetTextRange(self.LineIndex(minline),endpos)
+			try:
+				list = re.findall(r"\b"+left+"\.\w+",text)
+			except re.error:
+				# parens etc may make an invalid RE, but this code wouldnt
+				# benefit even if the RE did work :-)
+				list = []
+			prefix = len(left)+1
+			unique = {}
+			for li in list:
+				unique[li[prefix:]] = 1
+			# Assuming traditional usage of self...
+			if curclass and left=="self":
+				self._UpdateWithClassMethods(unique,curclass)
+
+			items = filter(lambda word: word[:2]!='__' or word[-2:]!='__', unique.keys())
+			# Ignore the word currently to the right of the dot - probably a red-herring.
+			try:
+				items.remove(right[1:])
+			except ValueError:
+				pass
+		if items:
+			items.sort()
+			self.SCIAutoCSetAutoHide(0)
+			self.SCIAutoCShow(items)
+
+	# TODO: This is kinda slow. Probably need some kind of cache 
+	# here that is flushed upon file save
+	# Or maybe we don't need the superclass methods at all ?
+	def _UpdateWithClassMethods(self,dict,classinfo):
+		if not hasattr(classinfo,"methods"):
+			# No 'methods' - probably not what we think it is.
+			return
+		dict.update(classinfo.methods)
+		for super in classinfo.super:
+			if hasattr(super,"methods"):
+				self._UpdateWithClassMethods(dict,super)
+
+	# Find which class definition caret is currently in and return
+	# indexes of the the first and the last lines of that class definition
+	# Data is obtained from module browser (if enabled)
+	def _GetClassInfoFromBrowser(self,pos=-1):
+		minline = 0
+		maxline = self.GetLineCount()-1
+		doc = self.GetParentFrame().GetActiveDocument()
+		browser = None
+		try:
+			if doc is not None:
+				browser = doc.GetAllViews()[1]
+		except IndexError:
+			pass
+		if browser is None:
+			return (minline,maxline,None) # Current window has no browser
+		if not browser.list: return (minline,maxline,None) # Not initialized
+		path = self.GetDocument().GetPathName()
+		if not path: return (minline,maxline,None) # No current path
+		
+		import pywin.framework.scriptutils
+		curmodule, path = pywin.framework.scriptutils.GetPackageModuleName(path)
+		try:
+			clbrdata = browser.list.root.clbrdata
+		except AttributeError:
+			return (minline,maxline,None) # No class data for this module.
+		curline = self.LineFromChar(pos)
+		curclass = None
+		# Find out which class we are in
+		for item in clbrdata.values():
+			if item.module==curmodule:
+				item_lineno = item.lineno - 1 # Scintilla counts lines from 0, whereas pyclbr - from 1
+				if minline < item_lineno <= curline:
+					minline = item_lineno
+					curclass = item
+				if curline < item_lineno < maxline:
+					maxline = item_lineno
+		return (minline,maxline,curclass)
+
+
+	def _GetObjectAtPos(self, pos = -1, bAllowCalls = 0):
+		left, right = self._GetWordSplit(pos, bAllowCalls)
+		if left: # It is an attribute lookup
+			# How is this for a hack!
+			namespace = sys.modules.copy()
+			namespace.update(__main__.__dict__)
+			# Get the debugger's context.
+			try:
+				from pywin.framework import interact
+				if interact.edit is not None and interact.edit.currentView is not None:
+					globs, locs = interact.edit.currentView.GetContext()[:2]
+					if globs: namespace.update(globs)
+					if locs: namespace.update(locs)
+			except ImportError:
+				pass
+			try:
+				return eval(left, namespace)
+			except:
+				pass
+		return None
+
+	def _GetWordSplit(self, pos = -1, bAllowCalls = 0):
+		if pos==-1: pos = self.GetSel()[0]-1 # Character before current one
+		limit = self.GetTextLength()
+		before = []
+		after = []
+		index = pos-1
+		wordbreaks_use = wordbreaks
+		if bAllowCalls: wordbreaks_use = wordbreaks_use + "()[]"
+		while index>=0:
+			char = self.SCIGetCharAt(index)
+			if char not in wordbreaks_use: break
+			before.insert(0, char)
+			index = index-1
+		index = pos
+		while index<=limit:
+			char = self.SCIGetCharAt(index)
+			if char not in wordbreaks_use: break
+			after.append(char)
+			index=index+1
+		return string.join(before,''), string.join(after,'')
+
+	def OnPrepareDC (self, dc, pInfo):
+#		print "OnPrepareDC for page", pInfo.GetCurPage(), "of", pInfo.GetFromPage(), "to", pInfo.GetToPage(), ", starts=", self.starts
+		if dc.IsPrinting():
+			# Check if we are beyond the end.
+			# (only do this when actually printing, else messes up print preview!)
+			if not pInfo.GetPreview() and self.starts is not None:
+				prevPage = pInfo.GetCurPage() - 1
+				if prevPage > 0 and self.starts[prevPage] >= self.GetTextLength():
+					# All finished.
+					pInfo.SetContinuePrinting(0)
+					return
+			dc.SetMapMode(win32con.MM_TEXT);
+
+	def OnPreparePrinting(self, pInfo):
+		flags = win32ui.PD_USEDEVMODECOPIES | \
+		        win32ui.PD_ALLPAGES | \
+		        win32ui.PD_NOSELECTION # Dont support printing just a selection.
+# NOTE: Custom print dialogs are stopping the user's values from coming back :-(
+#		self.prtDlg = PrintDialog(pInfo, PRINTDLGORD, flags)
+#		pInfo.SetPrintDialog(self.prtDlg)
+		pInfo.SetMinPage(1)
+		# max page remains undefined for now.
+		pInfo.SetFromPage(1)
+		pInfo.SetToPage(1)
+		ret = self.DoPreparePrinting(pInfo)
+		return ret
+
+	def OnBeginPrinting(self, dc, pInfo):
+		self.starts = None
+		return self._obj_.OnBeginPrinting(dc, pInfo)
+
+	def CalculatePageRanges(self, dc, pInfo):
+		# Calculate page ranges and max page
+		self.starts = {0:0}
+		metrics = dc.GetTextMetrics()
+		left, top, right, bottom = pInfo.GetDraw()
+		# Leave space at the top for the header.
+		rc = (left, top + (9*metrics['tmHeight'])/2, right, bottom)
+		pageStart = 0
+		maxPage = 0
+		textLen = self.GetTextLength()
+		while pageStart < textLen:
+			pageStart = self.FormatRange(dc, pageStart, textLen, rc, 0)
+			maxPage = maxPage + 1
+			self.starts[maxPage] = pageStart
+		# And a sentinal for one page past the end
+		self.starts[maxPage+1] = textLen
+		# When actually printing, maxPage doesnt have any effect at this late state.
+		# but is needed to make the Print Preview work correctly.
+		pInfo.SetMaxPage(maxPage)
+
+	def OnFilePrintPreview(self, *arg):
+		self._obj_.OnFilePrintPreview()
+
+	def OnFilePrint(self, *arg):
+		self._obj_.OnFilePrint()
+
+	def FormatRange(self, dc, pageStart, lengthDoc, rc, draw):
+		hdcRender = dc.GetHandleOutput()
+		hdcFormat = dc.GetHandleAttrib()
+		fr = struct.pack('LLIIIIIIIIll', hdcRender, hdcFormat, rc[0], rc[1], rc[2], rc[3], rc[0], rc[1], rc[2], rc[3], pageStart, lengthDoc)
+		frBuff = array.array('c', fr)
+		addressFrBuff = frBuff.buffer_info()[0]
+		nextPageStart = self.SendScintilla(EM_FORMATRANGE, draw, addressFrBuff)
+		return nextPageStart
+
+	def OnPrint(self, dc, pInfo):
+		metrics = dc.GetTextMetrics()
+#		print "dev", w, h, l, metrics['tmAscent'], metrics['tmDescent']
+		if self.starts is None:
+			self.CalculatePageRanges(dc, pInfo)
+		pageNum = pInfo.GetCurPage() - 1
+		# Setup the header of the page - docname on left, pagenum on right.
+		doc = self.GetDocument()
+		cxChar = metrics['tmAveCharWidth']
+		cyChar = metrics['tmHeight']
+		left, top, right, bottom = pInfo.GetDraw()
+		dc.TextOut(0, 2*cyChar, doc.GetTitle())
+		pagenum_str = win32ui.LoadString(afxres.AFX_IDS_PRINTPAGENUM) % (pageNum+1,)
+		dc.SetTextAlign(win32con.TA_RIGHT)
+		dc.TextOut(right, 2*cyChar, pagenum_str)
+		dc.SetTextAlign(win32con.TA_LEFT)
+		top = top + (7*cyChar)/2
+		dc.MoveTo(left, top)
+		dc.LineTo(right, top)
+		top = top + cyChar
+		rc = (left, top, right, bottom)
+		nextPageStart = self.FormatRange(dc, self.starts[pageNum], self.starts[pageNum+1], rc, 1)
+
+def LoadConfiguration():
+	global configManager
+	# Bit of a hack I dont kow what to do about?
+	from config import ConfigManager
+	configName = rc = win32ui.GetProfileVal("Editor", "Keyboard Config", "default")
+	configManager = ConfigManager(configName)
+	if configManager.last_error:
+		bTryDefault = 0
+		msg = "Error loading configuration '%s'\n\n%s" % (configName, configManager.last_error)
+		if configName != "default":
+			msg = msg + "\n\nThe default configuration will be loaded."
+			bTryDefault = 1
+		win32ui.MessageBox(msg)
+		if bTryDefault:
+			configManager = ConfigManager("default")
+			if configManager.last_error:
+				win32ui.MessageBox("Error loading configuration 'default'\n\n%s" % (configManager.last_error))
+
+configManager = None
+LoadConfiguration()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/TraceCollector.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/TraceCollector.py
new file mode 100644
index 0000000..3860f90d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/TraceCollector.py
@@ -0,0 +1,66 @@
+# win32traceutil like utility for Pythonwin
+import thread
+import win32trace, win32event, win32api
+from pywin.framework import winout
+
+outputWindow = None
+
+def CollectorThread(stopEvent, file):
+	win32trace.InitRead()
+	handle = win32trace.GetHandle()
+	# Run this thread at a lower priority to the main message-loop (and printing output)
+	# thread can keep up
+	import win32process
+	win32process.SetThreadPriority(win32api.GetCurrentThread(), win32process.THREAD_PRIORITY_BELOW_NORMAL)
+
+	try:
+		while 1:
+			rc = win32event.WaitForMultipleObjects((handle, stopEvent), 0, win32event.INFINITE)
+			if rc == win32event.WAIT_OBJECT_0:
+				# About the only char we can't live with is \0!
+				file.write(win32trace.read().replace("\0", "<null>"))
+			else:
+				# Stop event
+				break
+	finally:
+		win32trace.TermRead()
+		print "Thread dieing"
+
+class WindowOutput(winout.WindowOutput):
+	def __init__(self, *args):
+		apply(winout.WindowOutput.__init__, (self,)+args)
+		self.hStopThread = win32event.CreateEvent(None, 0, 0, None)
+		thread.start_new(CollectorThread, (self.hStopThread, self))
+	def _StopThread(self):
+		win32event.SetEvent(self.hStopThread)
+		self.hStopThread = None
+	def Close(self):
+		self._StopThread()
+		winout.WindowOutput.Close(self)
+#	def OnViewDestroy(self, frame):
+#		return winout.WindowOutput.OnViewDestroy(self, frame)
+#	def Create(self, title=None, style = None):
+#		rc = winout.WindowOutput.Create(self, title, style)
+		return rc
+		
+
+def MakeOutputWindow():
+	# Note that it will not show until the first string written or
+	# you pass bShow = 1
+	global outputWindow
+	if outputWindow is None:
+		title = "Python Trace Collector"
+		# queueingFlag doesnt matter, as all output will come from new thread
+		outputWindow = WindowOutput(title, title)
+		# Let people know what this does!
+		msg = """\
+# This window will display output from any programs that import win32traceutil
+# win32com servers registered with '--debug' are in this category.
+"""
+		outputWindow.write(msg)
+	# force existing window open
+	outputWindow.write('')
+	return outputWindow
+
+if __name__=='__main__':
+	MakeOutputWindow()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/__init__.py
new file mode 100644
index 0000000..139597f9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/__init__.py
@@ -0,0 +1,2 @@
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/browseProjects.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/browseProjects.py
new file mode 100644
index 0000000..98dfb1f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/browseProjects.py
@@ -0,0 +1,257 @@
+import hierlist, string, regutil, os
+import win32con, win32ui, win32api
+import commctrl
+from pywin.mfc import dialog
+import glob
+import pyclbr
+import pywin.framework.scriptutils
+import afxres
+
+class HLIErrorItem(hierlist.HierListItem):
+	def __init__(self, text):
+		self.text = text
+		hierlist.HierListItem.__init__(self)
+	def GetText(self):
+		return self.text
+
+class HLICLBRItem(hierlist.HierListItem):
+	def __init__(self, name, file, lineno, suffix = ""):
+		# If the 'name' object itself has a .name, use it.  Not sure
+		# how this happens, but seems pyclbr related. 
+		# See PyWin32 bug 817035
+		self.name = getattr(name, "name", name)
+		self.file = file
+		self.lineno = lineno
+		self.suffix = suffix
+	def __cmp__(self, other):
+		return cmp(self.name, other.name)
+	def GetText(self):
+		return self.name + self.suffix
+	def TakeDefaultAction(self):
+		if self.file:
+			pywin.framework.scriptutils.JumpToDocument(self.file, self.lineno, bScrollToTop=1)
+		else:
+			win32ui.SetStatusText("The source of this object is unknown")
+	def PerformItemSelected(self):
+		if self.file is None:
+			msg = "%s - source can not be located." % (self.name, )
+		else:
+			msg = "%s defined at line %d of %s" % (self.name, self.lineno, self.file)
+		win32ui.SetStatusText(msg)
+
+class HLICLBRClass(HLICLBRItem):
+	def __init__(self, clbrclass, suffix = ""):
+		try:
+			name = clbrclass.name
+			file = clbrclass.file
+			lineno = clbrclass.lineno
+			self.super = clbrclass.super
+			self.methods = clbrclass.methods
+		except AttributeError:
+			name = clbrclass
+			file = lineno = None
+			self.super = []; self.methods = {}
+		HLICLBRItem.__init__(self, name, file, lineno, suffix)
+	def GetSubList(self):
+		 ret = []
+		 for c in self.super:
+			 ret.append(HLICLBRClass(c, " (Parent class)"))
+		 for meth, lineno in self.methods.items():
+			 ret.append(HLICLBRMethod(meth, self.file, lineno, " (method)"))
+		 return ret
+	def IsExpandable(self):
+		return len(self.methods) + len(self.super)
+	def GetBitmapColumn(self):
+		return 21
+
+class HLICLBRFunction(HLICLBRClass):
+	def GetBitmapColumn(self):
+		return 22
+class HLICLBRMethod(HLICLBRItem):
+	def GetBitmapColumn(self):
+		return 22
+
+class HLIModuleItem(hierlist.HierListItem):
+	def __init__(self, path):
+		hierlist.HierListItem.__init__(self)
+		self.path = path
+	def GetText(self):
+		return os.path.split(self.path)[1] + " (module)"
+	def IsExpandable(self):
+		return 1
+	def TakeDefaultAction(self):
+		win32ui.GetApp().OpenDocumentFile( self.path )
+	def GetBitmapColumn(self):
+		col = 4 # Default
+		try:
+			if win32api.GetFileAttributes(self.path) & win32con.FILE_ATTRIBUTE_READONLY:
+				col = 5
+		except win32api.error:
+			pass
+		return col
+	def GetSubList(self):
+		mod, path = pywin.framework.scriptutils.GetPackageModuleName(self.path)
+		win32ui.SetStatusText("Building class list - please wait...", 1)
+		win32ui.DoWaitCursor(1)
+		try:
+			try:
+				reader = pyclbr.readmodule_ex # Post 1.5.2 interface.
+				extra_msg = " or functions"
+			except AttributeError:
+				reader = pyclbr.readmodule
+				extra_msg = ""
+			data = reader(mod, [path])
+			if data:
+				ret = []
+				for item in data.values():
+					if item.__class__ != pyclbr.Class: # ie, it is a pyclbr Function instance (only introduced post 1.5.2)
+						ret.append(HLICLBRFunction( item, " (function)" ) )
+					else:
+						ret.append(HLICLBRClass( item, " (class)") )
+				ret.sort()
+				return ret
+			else:
+				return [HLIErrorItem("No Python classes%s in module." % (extra_msg,))]
+		finally:
+			win32ui.DoWaitCursor(0)
+			win32ui.SetStatusText(win32ui.LoadString(afxres.AFX_IDS_IDLEMESSAGE))
+
+def MakePathSubList(path):
+	ret = []
+	for filename in glob.glob(os.path.join(path,'*')):
+		if os.path.isdir(filename) and os.path.isfile(os.path.join(filename, "__init__.py")):
+			ret.append(HLIDirectoryItem(filename, os.path.split(filename)[1]))
+		else:
+			if string.lower(os.path.splitext(filename)[1]) in ['.py', '.pyw']:
+				ret.append(HLIModuleItem(filename))
+	return ret
+
+class HLIDirectoryItem(hierlist.HierListItem):
+	def __init__(self, path, displayName = None, bSubDirs = 0):
+		hierlist.HierListItem.__init__(self)
+		self.path = path
+		self.bSubDirs = bSubDirs
+		if displayName:
+			self.displayName = displayName
+		else:
+			self.displayName = path
+	def IsExpandable(self):
+		return 1
+	def GetText(self):
+		return self.displayName
+
+	def GetSubList(self):
+		ret = MakePathSubList(self.path)
+		if os.path.split(self.path)[1] == "win32com": # Complete and utter hack for win32com.
+			try:
+				path = win32api.GetFullPathName(os.path.join(self.path, "..\\win32comext"))
+				ret = ret + MakePathSubList(path)
+			except win32ui.error:
+				pass
+		return ret
+
+class HLIProjectRoot(hierlist.HierListItem):
+	def __init__(self, projectName, displayName = None):
+		hierlist.HierListItem.__init__(self)
+		self.projectName = projectName
+		self.displayName = displayName or projectName
+	def GetText(self):
+		return self.displayName
+	def IsExpandable(self):
+		return 1
+	def GetSubList(self):
+		paths = regutil.GetRegisteredNamedPath(self.projectName)
+		pathList = string.split(paths,";")
+		if len(pathList)==1: # Single dir - dont bother putting the dir in
+			ret = MakePathSubList(pathList[0])
+		else:
+			ret = map( HLIDirectoryItem, pathList )
+		return ret
+
+class HLIRoot(hierlist.HierListItem):
+	def __init__(self):
+		hierlist.HierListItem.__init__(self)
+	def IsExpandable(self):
+		return 1
+	def GetSubList(self):
+		keyStr = regutil.BuildDefaultPythonKey() + "\\PythonPath"
+		hKey = win32api.RegOpenKey(regutil.GetRootKey(), keyStr)
+		try:
+			ret = []
+			ret.append(HLIProjectRoot("", "Standard Python Library")) # The core path.
+			index = 0
+			while 1:
+				try:
+					ret.append(HLIProjectRoot(win32api.RegEnumKey(hKey, index)))
+					index = index + 1
+				except win32api.error:
+					break
+			return ret
+		finally:
+			win32api.RegCloseKey(hKey)
+
+class dynamic_browser (dialog.Dialog):
+    style = win32con.WS_OVERLAPPEDWINDOW | win32con.WS_VISIBLE
+    cs = (
+        win32con.WS_CHILD           |
+        win32con.WS_VISIBLE         |
+        commctrl.TVS_HASLINES       |
+        commctrl.TVS_LINESATROOT    |
+        commctrl.TVS_HASBUTTONS
+        )
+
+    dt = [
+        ["Python Projects", (0, 0, 200, 200), style, None, (8, "MS Sans Serif")],
+        ["SysTreeView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), cs]
+        ]
+
+    def __init__ (self, hli_root):
+        dialog.Dialog.__init__ (self, self.dt)
+        self.hier_list = hierlist.HierListWithItems (
+            hli_root,
+            win32ui.IDB_BROWSER_HIER
+            )
+        self.HookMessage (self.on_size, win32con.WM_SIZE)
+
+    def OnInitDialog (self):
+        self.hier_list.HierInit (self)
+        return dialog.Dialog.OnInitDialog (self)
+
+    def on_size (self, params):
+        lparam = params[3]
+        w = win32api.LOWORD(lparam)
+        h = win32api.HIWORD(lparam)
+        self.GetDlgItem (win32ui.IDC_LIST1).MoveWindow((0,0,w,h))
+
+def BrowseDialog():
+    root = HLIRoot()
+    if not root.IsExpandable():
+        raise TypeError, "Browse() argument must have __dict__ attribute, or be a Browser supported type"
+        
+    dlg = dynamic_browser (root)
+    dlg.CreateWindow()
+
+def DockableBrowserCreator(parent):
+	root = HLIRoot()
+	hl = hierlist.HierListWithItems (
+            root,
+            win32ui.IDB_BROWSER_HIER
+            )
+
+	style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS
+
+	control = win32ui.CreateTreeCtrl()
+	control.CreateWindow(style, (0, 0, 150, 300), parent, win32ui.IDC_LIST1)
+	list = hl.HierInit (parent, control)
+	return control
+
+def DockablePathBrowser():
+	import pywin.docking.DockingBar
+	bar = pywin.docking.DockingBar.DockingBar()
+	bar.CreateWindow(win32ui.GetMainFrame(), DockableBrowserCreator, "Path Browser", 0x8e0a)
+	bar.SetBarStyle( bar.GetBarStyle()|afxres.CBRS_TOOLTIPS|afxres.CBRS_FLYBY|afxres.CBRS_SIZE_DYNAMIC)
+	bar.EnableDocking(afxres.CBRS_ALIGN_ANY)
+	win32ui.GetMainFrame().DockControlBar(bar)
+
+# The "default" entry point
+Browse = DockablePathBrowser
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/browser.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/browser.py
new file mode 100644
index 0000000..b464d04
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/browser.py
@@ -0,0 +1,424 @@
+# basic module browser.
+
+# usage:
+# >>> import browser
+# >>> browser.Browse()
+# or
+# >>> browser.Browse(your_module)
+import __main__
+import string
+import win32ui
+from pywin.mfc import dialog
+
+import hierlist
+from types import *
+
+            
+special_names = [ '__doc__', '__name__', '__self__' ]
+
+#
+# HierList items
+class HLIPythonObject(hierlist.HierListItem):
+	def __init__(self, myobject=None, name=None ):
+		hierlist.HierListItem.__init__(self)
+		self.myobject = myobject
+		self.knownExpandable = None
+		if name:
+			self.name=name
+		else:
+			try:
+				self.name=str(myobject.__name__)
+			except (AttributeError, TypeError):
+				try:
+					r = repr(myobject)
+					if len(r)>20:
+						r = r[:20] + "..."
+					self.name=r
+				except (AttributeError, TypeError):
+					self.name="???"
+	def __cmp__(self, other):
+		rc = cmp(self.name, other.name)
+		if rc==0:
+			rc = cmp(self.myobject, other.myobject)
+		return rc
+	def __repr__(self):
+		try:
+			type = self.GetHLIType()
+		except:
+			type = "Generic"
+		return "HLIPythonObject("+type+") - name: "+ self.name + " object: " + repr(self.myobject)
+	def GetText(self):
+		try:
+			return str(self.name) + ' (' + self.GetHLIType() + ')'
+		except AttributeError:
+			return str(self.name) + ' = ' + repr(self.myobject)
+	def InsertDocString(self, lst):
+		ob = None
+		try:
+			ob = self.myobject.__doc__
+		except (AttributeError, TypeError):
+			pass
+		if ob:
+			lst.insert(0, HLIDocString( ob, "Doc" ))
+
+	def GetSubList(self):
+		ret = []
+		try:
+			for (key, ob) in self.myobject.__dict__.items():
+				if key not in special_names:
+					ret.append(MakeHLI( ob, key ) )
+		except (AttributeError, TypeError):
+			pass
+		try:
+			for name in self.myobject.__methods__:
+				ret.append(HLIMethod( name ))	# no MakeHLI, as cant auto detect
+		except (AttributeError, TypeError):
+			pass
+		try:
+			for member in self.myobject.__members__:
+				if not member in special_names:
+					ret.append(MakeHLI(getattr(self.myobject, member), member))
+		except (AttributeError, TypeError):
+			pass
+		ret.sort()
+		self.InsertDocString(ret)
+		return ret
+	# if the has a dict, it is expandable.
+	def IsExpandable(self):
+		if self.knownExpandable is None:
+			self.knownExpandable = self.CalculateIsExpandable()
+		return self.knownExpandable
+
+	def CalculateIsExpandable(self):
+		if hasattr(self.myobject, '__doc__'):
+			return 1
+		try:
+			for key in self.myobject.__dict__.keys():
+				if key not in special_names:
+					return 1
+		except (AttributeError, TypeError):
+			pass
+		try:
+			self.myobject.__methods__
+			return 1
+		except (AttributeError, TypeError):
+			pass
+		try:
+			for item in self.myobject.__members__:
+				if item not in special_names:
+					return 1
+		except (AttributeError, TypeError):
+			pass
+		return 0
+	def GetBitmapColumn(self):
+		if self.IsExpandable():
+			return 0
+		else:
+			return 4
+	def TakeDefaultAction(self):
+		ShowObject(self.myobject, self.name)
+
+
+class HLIDocString(HLIPythonObject):
+	def GetHLIType(self):
+		return "DocString"
+	def GetText(self):
+		return string.strip(self.myobject)
+	def IsExpandable(self):
+		return 0
+	def GetBitmapColumn(self):
+		return 6
+
+class HLIModule(HLIPythonObject):
+	def GetHLIType(self):
+		return "Module"
+
+class HLIFrame(HLIPythonObject):
+	def GetHLIType(self):
+		return "Stack Frame"
+
+class HLITraceback(HLIPythonObject):
+	def GetHLIType(self):
+		return "Traceback"
+
+class HLIClass(HLIPythonObject):
+	def GetHLIType(self):
+		return "Class"
+	def GetSubList(self):
+		ret = []
+		for base in self.myobject.__bases__:
+			ret.append( MakeHLI(base, 'Base class: ' + base.__name__ ) )
+		ret = ret + HLIPythonObject.GetSubList(self)
+		return ret
+
+class HLIMethod(HLIPythonObject):
+	# myobject is just a string for methods.
+	def GetHLIType(self):
+		return "Method"
+	def GetText(self):
+		return "Method: " + self.myobject + '()'
+
+class HLICode(HLIPythonObject):
+	def GetHLIType(self):
+		return "Code"
+	def IsExpandable(self):
+		return self.myobject
+	def GetSubList(self):
+		ret = []
+		ret.append( MakeHLI( self.myobject.co_consts, "Constants (co_consts)" ))
+		ret.append( MakeHLI( self.myobject.co_names, "Names (co_names)" ))
+		ret.append( MakeHLI( self.myobject.co_filename, "Filename (co_filename)" ))
+		ret.append( MakeHLI( self.myobject.co_argcount, "Number of args (co_argcount)"))
+		ret.append( MakeHLI( self.myobject.co_varnames, "Param names (co_varnames)"))
+		
+		return ret
+
+class HLIInstance(HLIPythonObject):
+	def GetHLIType(self):
+		return "Instance"
+	def GetText(self):
+		return str(self.name) + ' (Instance of class ' + str(self.myobject.__class__.__name__) + ')'
+	def IsExpandable(self):
+		return 1
+	def GetSubList(self):
+		ret = []
+		ret.append( MakeHLI( self.myobject.__class__) )
+		ret = ret + HLIPythonObject.GetSubList(self)
+		return ret
+
+
+class HLIBuiltinFunction(HLIPythonObject):
+	def GetHLIType(self):
+		return "Builtin Function"
+
+class HLIFunction(HLIPythonObject):
+	def GetHLIType(self):
+		return "Function"
+	def IsExpandable(self):
+		return 1
+	def GetSubList(self):
+		ret = []
+#		ret.append( MakeHLI( self.myobject.func_argcount, "Arg Count" ))
+		try:
+			ret.append( MakeHLI( self.myobject.func_argdefs, "Arg Defs" ))
+		except AttributeError:
+			pass
+		ret.append( MakeHLI( self.myobject.func_code, "Code" ))
+		ret.append( MakeHLI( self.myobject.func_globals, "Globals" ))
+		self.InsertDocString(ret)
+		return ret
+
+class HLISeq(HLIPythonObject):
+	def GetHLIType(self):
+		return "Sequence (abstract!)"
+	def IsExpandable(self):
+		return len(self.myobject)>0
+	def GetSubList(self):
+		ret = []
+		pos=0
+		for item in self.myobject:
+			ret.append(MakeHLI( item, '['+str(pos)+']' ) )
+			pos=pos+1
+		self.InsertDocString(ret)
+		return ret
+
+class HLIList(HLISeq):
+	def GetHLIType(self):
+		return "List"
+
+class HLITuple(HLISeq):
+	def GetHLIType(self):
+		return "Tuple"
+
+class HLIDict(HLIPythonObject):
+	def GetHLIType(self):
+		return "Dict"
+	def IsExpandable(self):
+		try:
+			self.myobject.__doc__
+			return 1
+		except (AttributeError, TypeError):
+			return len(self.myobject) > 0
+	def GetSubList(self):
+		ret = []
+		keys = self.myobject.keys()
+		keys.sort()
+		for key in keys:
+			ob = self.myobject[key]
+			ret.append(MakeHLI( ob, str(key) ) )
+		self.InsertDocString(ret)
+		return ret
+
+# In Python 1.6, strings and Unicode have builtin methods, but we dont really want to see these
+class HLIString(HLIPythonObject):
+    def IsExpandable(self):
+        return 0
+
+TypeMap = { ClassType : HLIClass, 
+            FunctionType: HLIFunction,
+            TupleType: HLITuple,
+            DictType: HLIDict,
+            ListType: HLIList,
+            ModuleType: HLIModule,
+            InstanceType : HLIInstance,
+            CodeType : HLICode,
+            BuiltinFunctionType : HLIBuiltinFunction,
+            FrameType : HLIFrame,
+            TracebackType : HLITraceback,
+            StringType : HLIString,
+           }
+try:
+    TypeMap[UnicodeType] = HLIString
+except NameError:
+    pass # Python 1.5 - no Unicode - no problem!
+
+def MakeHLI( ob, name=None ):
+	try:
+		cls = TypeMap[type(ob)]
+	except KeyError:
+		if hasattr(ob, '__class__'): # 'new style' class
+			cls = HLIInstance
+		else:
+			cls = HLIPythonObject
+	return cls( ob, name )
+
+#########################################
+#
+# Dialog related.
+
+
+class DialogShowObject(dialog.Dialog):
+	def __init__(self, object, title):
+		self.object = object
+		self.title = title
+		dialog.Dialog.__init__(self, win32ui.IDD_LARGE_EDIT)
+	def OnInitDialog(self):
+		import re
+		self.SetWindowText(self.title)
+		self.edit = self.GetDlgItem(win32ui.IDC_EDIT1)
+		try:
+			strval = str(self.object)
+		except:
+			t, v, tb = sys.exc_info()
+			strval = "Exception getting object value\n\n%s:%s" % (t, v)
+			tb = None
+		strval = re.sub('\n','\r\n', strval)
+		self.edit.ReplaceSel(strval)
+		
+def ShowObject(object, title):
+	dlg = DialogShowObject(object, title)
+	dlg.DoModal()
+
+# And some mods for a sizable dialog from Sam Rushing!
+import win32con
+import win32api
+import commctrl
+
+class dynamic_browser (dialog.Dialog):
+    style = win32con.WS_OVERLAPPEDWINDOW | win32con.WS_VISIBLE
+    cs = (
+        win32con.WS_CHILD           |
+        win32con.WS_VISIBLE         |
+        commctrl.TVS_HASLINES       |
+        commctrl.TVS_LINESATROOT    |
+        commctrl.TVS_HASBUTTONS
+        )
+
+    dt = [
+        ["Python Object Browser", (0, 0, 200, 200), style, None, (8, "MS Sans Serif")],
+        ["SysTreeView32", None, win32ui.IDC_LIST1, (0, 0, 200, 200), cs]
+        ]
+
+    def __init__ (self, hli_root):
+        dialog.Dialog.__init__ (self, self.dt)
+        self.hier_list = hierlist.HierListWithItems (
+            hli_root,
+            win32ui.IDB_BROWSER_HIER
+            )
+        self.HookMessage (self.on_size, win32con.WM_SIZE)
+
+    def OnInitDialog (self):
+        self.hier_list.HierInit (self)
+        return dialog.Dialog.OnInitDialog (self)
+
+    def OnOK(self):
+        self.hier_list.HierTerm()
+        self.hier_list = None
+        return self._obj_.OnOK()
+    def OnCancel(self):
+        self.hier_list.HierTerm()
+        self.hier_list = None
+        return self._obj_.OnCancel()
+
+    def on_size (self, params):
+        lparam = params[3]
+        w = win32api.LOWORD(lparam)
+        h = win32api.HIWORD(lparam)
+        self.GetDlgItem (win32ui.IDC_LIST1).MoveWindow((0,0,w,h))
+
+def Browse (ob=__main__):
+    " Browse the argument, or the main dictionary "
+    root = MakeHLI (ob, 'root')
+    if not root.IsExpandable():
+        raise TypeError, "Browse() argument must have __dict__ attribute, or be a Browser supported type"
+        
+    dlg = dynamic_browser (root)
+    dlg.CreateWindow()
+
+#
+#
+# Classes for using the browser in an MDI window, rather than a dialog
+#
+from pywin.mfc import docview
+class BrowserTemplate(docview.DocTemplate):
+	def __init__(self):
+		docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, BrowserDocument, None, BrowserView)
+
+	def OpenObject(self, root): # Use this instead of OpenDocumentFile.
+		# Look for existing open document
+		for doc in self.GetDocumentList():
+			if doc.root==root:
+				doc.GetFirstView().ActivateFrame()
+				return doc
+		# not found - new one.
+		doc = BrowserDocument(self, root)
+		frame = self.CreateNewFrame(doc)
+		doc.OnNewDocument()
+		self.InitialUpdateFrame(frame, doc, 1)
+		return doc
+
+class BrowserDocument (docview.Document):
+	def __init__(self, template, root):
+		docview.Document.__init__(self, template)
+		self.root = root
+		self.SetTitle("Browser: " + root.name)
+	def OnOpenDocument (self, name):
+		raise TypeError, "This template can not open files"
+		return 0
+
+class BrowserView(docview.TreeView):
+	def OnInitialUpdate(self):
+		import commctrl
+		rc = self._obj_.OnInitialUpdate()
+		list=hierlist.HierListWithItems( self.GetDocument().root, win32ui.IDB_BROWSER_HIER, win32ui.AFX_IDW_PANE_FIRST)
+		list.HierInit(self.GetParent())
+		list.SetStyle(commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS)
+		return rc
+
+template = None
+def MakeTemplate():
+	global template
+	if template is None:
+		template = BrowserTemplate() #win32ui.IDR_PYTHONTYPE, BrowserDocument, None, BrowserView)
+
+def BrowseMDI(ob=__main__):
+	"""Browse an object using an MDI window.
+	"""
+
+	MakeTemplate()
+	root = MakeHLI(ob, repr(ob))
+	if not root.IsExpandable():
+		raise TypeError, "Browse() argument must have __dict__ attribute, or be a Browser supported type"
+		
+	template.OpenObject(root)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/hierlist.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/hierlist.py
new file mode 100644
index 0000000..82404201
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/hierlist.py
@@ -0,0 +1,296 @@
+# hierlist
+#
+# IMPORTANT - Please read before using.
+
+# This module exposes an API for a Hierarchical Tree Control.
+# Previously, a custom tree control was included in Pythonwin which
+# has an API very similar to this.
+
+# The current control used is the common "Tree Control".  This module exists now
+# to provide an API similar to the old control, but for the new Tree control.
+
+# If you need to use the Tree Control, you may still find this API a reasonable
+# choice.  However, you should investigate using the tree control directly
+# to provide maximum flexibility (but with extra work).
+
+
+import win32ui 
+import win32con
+import win32api
+from win32api import RGB
+
+from pywin.mfc import object, window, docview, dialog
+import commctrl
+
+# helper to get the text of an arbitary item
+def GetItemText(item):
+	if type(item)==type(()) or type(item)==type([]):
+		use = item[0]
+	else:
+		use = item
+	if type(use)==type(''):
+		return use
+	else:
+		return repr(item)
+
+
+class HierDialog(dialog.Dialog):
+	def __init__(self, title, hierList, bitmapID = win32ui.IDB_HIERFOLDERS, dlgID = win32ui.IDD_TREE, dll = None, childListBoxID = win32ui.IDC_LIST1):
+		dialog.Dialog.__init__(self, dlgID, dll )	# reuse this dialog.
+		self.hierList=hierList
+		self.dlgID = dlgID
+		self.title=title
+#		self.childListBoxID = childListBoxID
+	def OnInitDialog(self):
+		self.SetWindowText(self.title)
+		self.hierList.HierInit(self)
+		return dialog.Dialog.OnInitDialog(self)
+
+class HierList(object.Object):
+	def __init__(self, root, bitmapID = win32ui.IDB_HIERFOLDERS, listBoxId = None, bitmapMask = None): # used to create object.
+		self.list = None
+		self.bitmapID = bitmapID
+		self.root = root
+		self.listBoxId = listBoxId
+		self.itemHandleMap = {}
+		self.filledItemHandlesMap = {}
+		self.bitmapMask = bitmapMask
+	def __getattr__(self, attr):
+		l = self.__dict__['list']
+		if l is not None:
+			return getattr(l, attr)
+	def ItemFromHandle(self, handle):
+		return self.itemHandleMap[handle]
+	def SetStyle(self, newStyle):
+		hwnd = self.list.GetSafeHwnd()
+		style = win32api.GetWindowLong(hwnd, win32con.GWL_STYLE);
+		win32api.SetWindowLong(hwnd, win32con.GWL_STYLE, (style | newStyle) )
+
+	def HierInit(self, parent, listControl = None ):	# Used when window first exists.
+		# this also calls "Create" on the listbox.
+		# params - id of listbbox, ID of bitmap, size of bitmaps
+		if self.bitmapMask is None:
+			bitmapMask = RGB(0,0,255)
+		else:
+			bitmapMask = self.bitmapMask
+		self.imageList = win32ui.CreateImageList(self.bitmapID, 16, 0, bitmapMask)
+		if listControl is None:
+			if self.listBoxId is None: self.listBoxId = win32ui.IDC_LIST1
+			self.list = parent.GetDlgItem(self.listBoxId)
+		else:
+			self.list = listControl
+			lbid = listControl.GetDlgCtrlID()
+			assert self.listBoxId is None or self.listBoxId == lbid, "An invalid listbox control ID has been specified (specified as %s, but exists as %s)" % (self.listBoxId, lbid)
+			self.listBoxId = lbid
+		self.list.SetImageList(self.imageList, commctrl.LVSIL_NORMAL)
+#		self.list.AttachObject(self)
+		parent.HookNotify(self.OnTreeItemExpanding, commctrl.TVN_ITEMEXPANDING)
+		parent.HookNotify(self.OnTreeItemSelChanged, commctrl.TVN_SELCHANGED)
+		parent.HookNotify(self.OnTreeItemDoubleClick, commctrl.NM_DBLCLK)
+		self.notify_parent = parent
+
+		if self.root:
+			self.AcceptRoot(self.root)
+
+	def DeleteAllItems(self):
+		self.list.DeleteAllItems()
+		self.root = None
+		self.itemHandleMap = {}
+		self.filledItemHandlesMap = {}
+		
+	def HierTerm(self):
+		# Dont want notifies as we kill the list.
+		parent = self.notify_parent # GetParentFrame()
+		parent.HookNotify(None, commctrl.TVN_ITEMEXPANDING)
+		parent.HookNotify(None, commctrl.TVN_SELCHANGED)
+		parent.HookNotify(None, commctrl.NM_DBLCLK)
+		self.DeleteAllItems()
+		self.list = None
+		self.notify_parent = None # Break a possible cycle
+
+	def OnTreeItemDoubleClick(self,(hwndFrom, idFrom, code), extra):
+		if idFrom != self.listBoxId: return None
+		item = self.itemHandleMap[self.list.GetSelectedItem()]
+		self.TakeDefaultAction(item)
+		return 1
+
+	def OnTreeItemExpanding(self,(hwndFrom, idFrom, code), extra):
+		if idFrom != self.listBoxId: return None
+		action, itemOld, itemNew, pt = extra
+		itemHandle = itemNew[0]
+		if not self.filledItemHandlesMap.has_key(itemHandle):
+			item = self.itemHandleMap[itemHandle]
+			self.AddSubList(itemHandle, self.GetSubList(item))
+			self.filledItemHandlesMap[itemHandle] = None
+		return 0
+
+	def OnTreeItemSelChanged(self,(hwndFrom, idFrom, code), extra):
+		if idFrom != self.listBoxId: return None
+		action, itemOld, itemNew, pt = extra
+		itemHandle = itemNew[0]
+		item = self.itemHandleMap[itemHandle]
+		self.PerformItemSelected(item)
+		return 1
+
+	def AddSubList(self, parentHandle, subItems):
+		for item in subItems:
+			self.AddItem(parentHandle, item)
+
+	def AddItem(self, parentHandle, item, hInsertAfter = commctrl.TVI_LAST):
+		text = self.GetText(item)
+#			hitem = self.list.InsertItem(text, 0, 1)
+		if self.IsExpandable(item):
+			cItems = 1 # Trick it !!
+		else:
+			cItems = 0
+		bitmapCol = self.GetBitmapColumn(item)
+		bitmapSel = self.GetSelectedBitmapColumn(item)
+		if bitmapSel is None: bitmapSel = bitmapCol
+		hitem = self.list.InsertItem(parentHandle, hInsertAfter, (None, None, None, text, bitmapCol, bitmapSel, cItems, 0))
+		self.itemHandleMap[hitem] = item
+		return hitem
+
+	def _GetChildHandles(self, handle):
+		ret = []
+		try:
+			handle = self.list.GetChildItem(handle)
+			while 1:
+				ret.append(handle)
+				handle = self.list.GetNextItem(handle, commctrl.TVGN_NEXT)
+		except win32ui.error:
+			# out of children
+			pass
+		return ret
+	def ItemFromHandle(self, handle):
+		return self.itemHandleMap[handle]
+
+	def Refresh(self, hparent = None):
+		# Attempt to refresh the given item's sub-entries, but maintain the tree state
+		# (ie, the selected item, expanded items, etc)
+		if hparent is None: hparent = commctrl.TVI_ROOT
+		if not self.filledItemHandlesMap.has_key(hparent):
+			# This item has never been expanded, so no refresh can possibly be required.
+			return
+		root_item = self.itemHandleMap[hparent]
+		old_handles = self._GetChildHandles(hparent)
+		old_items = map( self.ItemFromHandle, old_handles )
+		new_items = self.GetSubList(root_item)
+		# Now an inefficient technique for synching the items.
+		inew = 0
+		hAfter = commctrl.TVI_FIRST
+		for iold in range(len(old_items)):
+			inewlook = inew
+			matched = 0
+			while inewlook < len(new_items):
+				if old_items[iold] == new_items[inewlook]:
+					matched = 1
+					break
+				inewlook = inewlook + 1
+			if matched:
+				# Insert the new items.
+#				print "Inserting after", old_items[iold], old_handles[iold]
+				for i in range(inew, inewlook):
+#					print "Inserting index %d (%s)" % (i, new_items[i])
+					hAfter = self.AddItem(hparent, new_items[i], hAfter)
+					
+				inew = inewlook + 1
+				# And recursively refresh iold
+				hold = old_handles[iold]
+				if self.filledItemHandlesMap.has_key(hold):
+					self.Refresh(hold)
+			else:
+				# Remove the deleted items.
+#				print "Deleting %d (%s)" % (iold, old_items[iold])
+				hdelete = old_handles[iold]
+				# First recurse and remove the children from the map.
+				for hchild in self._GetChildHandles(hdelete):
+					del self.itemHandleMap[hchild]
+					if self.filledItemHandlesMap.has_key(hchild):
+						del self.filledItemHandlesMap[hchild]
+				self.list.DeleteItem(hdelete)
+			hAfter = old_handles[iold]
+		# Fill any remaining new items:
+		for newItem in new_items[inew:]:
+#			print "Inserting new item", newItem
+			self.AddItem(hparent, newItem)
+	def AcceptRoot(self, root):
+		self.list.DeleteAllItems()
+		self.itemHandleMap = {commctrl.TVI_ROOT : root}
+		self.filledItemHandlesMap = {commctrl.TVI_ROOT : root}
+		subItems = self.GetSubList(root)
+		self.AddSubList(0, subItems)
+
+	def GetBitmapColumn(self, item):
+		if self.IsExpandable(item):
+			return 0
+		else:
+			return 4
+	def GetSelectedBitmapColumn(self, item):
+		return None # Use standard.
+
+	def GetSelectedBitmapColumn(self, item):
+		return 0
+
+	def CheckChangedChildren(self):
+		return self.list.CheckChangedChildren()
+	def GetText(self,item):
+		return GetItemText(item)
+	def PerformItemSelected(self, item):
+		try:
+			win32ui.SetStatusText('Selected ' + self.GetText(item))
+		except win32ui.error: # No status bar!
+			pass
+	def TakeDefaultAction(self, item):
+		win32ui.MessageBox('Got item ' + self.GetText(item))
+
+##########################################################################
+#
+# Classes for use with seperate HierListItems.
+#
+#
+class HierListWithItems(HierList):
+	def __init__(self, root, bitmapID = win32ui.IDB_HIERFOLDERS, listBoxID = None, bitmapMask = None): # used to create object.
+		HierList.__init__(self, root, bitmapID, listBoxID, bitmapMask )
+	def DelegateCall( self, fn):
+		return fn()
+	def GetBitmapColumn(self, item):
+		rc = self.DelegateCall(item.GetBitmapColumn)
+		if rc is None:
+			rc = HierList.GetBitmapColumn(self, item)
+		return rc
+	def GetSelectedBitmapColumn(self, item):
+		return self.DelegateCall(item.GetSelectedBitmapColumn)
+	def IsExpandable(self, item):
+		return self.DelegateCall( item.IsExpandable)
+	def GetText(self, item):
+		return self.DelegateCall( item.GetText )
+	def GetSubList(self, item):
+		return self.DelegateCall(item.GetSubList)
+	def PerformItemSelected(self, item):
+		func = getattr(item, "PerformItemSelected", None)
+		if func is None:
+			return HierList.PerformItemSelected( self, item )
+		else:
+			return self.DelegateCall(func)
+
+	def TakeDefaultAction(self, item):
+		func = getattr(item, "TakeDefaultAction", None)
+		if func is None:
+			return HierList.TakeDefaultAction( self, item )
+		else:
+			return self.DelegateCall(func)
+
+# A hier list item - for use with a HierListWithItems
+class HierListItem:
+	def __init__(self):
+		pass
+	def GetText(self):
+		pass
+	def GetSubList(self):
+		pass
+	def IsExpandable(self):
+		pass
+	def GetBitmapColumn(self):
+		return None	# indicate he should do it.
+	def GetSelectedBitmapColumn(self):
+		return None	# same as other
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/regedit.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/regedit.py
new file mode 100644
index 0000000..e14ff02
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/regedit.py
@@ -0,0 +1,329 @@
+# Regedit - a Registry Editor for Python
+import win32api, win32ui, win32con, commctrl
+from pywin.mfc import window, docview, dialog
+import hierlist
+import regutil
+import string
+
+def SafeApply( fn, args, err_desc = "" ):
+	try:
+		apply(fn, args)
+		return 1
+	except win32api.error, (rc, fn, msg):
+		msg = "Error " + err_desc + "\r\n\r\n" + msg
+		win32ui.MessageBox(msg)
+		return 0
+
+class SplitterFrame(window.MDIChildWnd):
+	def __init__(self):
+		# call base CreateFrame
+		self.images = None
+		window.MDIChildWnd.__init__(self)
+
+	def OnCreateClient(self, cp, context):
+		splitter = win32ui.CreateSplitter()
+		doc = context.doc
+		frame_rect = self.GetWindowRect()
+		size = ((frame_rect[2] - frame_rect[0]),
+		        (frame_rect[3] - frame_rect[1])/2)
+		sub_size = (size[0]/3, size[1])
+		splitter.CreateStatic (self, 1, 2)
+		# CTreeControl view
+		self.keysview = RegistryTreeView(doc)
+		# CListControl view
+		self.valuesview = RegistryValueView(doc)
+
+		splitter.CreatePane (self.keysview, 0, 0, (sub_size))
+		splitter.CreatePane (self.valuesview, 0, 1, (0,0)) # size ignored.
+		splitter.SetRowInfo(0, size[1] ,0)
+		# Setup items in the imagelist
+
+		return 1
+
+	def OnItemDoubleClick(self,(hwndFrom, idFrom, code), extra):
+		if idFrom==win32ui.AFX_IDW_PANE_FIRST:
+			# Tree control
+			return None
+		elif idFrom==win32ui.AFX_IDW_PANE_FIRST + 1:
+			item = self.keysview.SelectedItem()
+			self.valuesview.EditValue(item)
+			return 0
+			# List control
+		else:
+			return None # Pass it on
+
+	def PerformItemSelected(self,item):
+		return self.valuesview.UpdateForRegItem(item)
+
+	def OnDestroy(self, msg):
+		window.MDIChildWnd.OnDestroy(self, msg)
+		if self.images:
+			self.images.DeleteImageList()
+			self.images = None
+
+class RegistryTreeView(docview.TreeView):
+	def OnInitialUpdate(self):
+		rc = self._obj_.OnInitialUpdate()
+		self.frame = self.GetParent().GetParent()
+		self.hierList = hierlist.HierListWithItems( self.GetHLIRoot(), win32ui.IDB_HIERFOLDERS, win32ui.AFX_IDW_PANE_FIRST)
+		self.hierList.HierInit(self.frame, self.GetTreeCtrl())
+		self.hierList.SetStyle(commctrl.TVS_HASLINES | commctrl.TVS_LINESATROOT | commctrl.TVS_HASBUTTONS)
+		self.hierList.PerformItemSelected = self.PerformItemSelected
+
+		self.frame.HookNotify(self.frame.OnItemDoubleClick, commctrl.NM_DBLCLK)
+		self.frame.HookNotify(self.OnItemRightClick, commctrl.NM_RCLICK)
+#		self.HookMessage(self.OnItemRightClick, win32con.WM_RBUTTONUP)
+
+	def GetHLIRoot(self):
+		doc = self.GetDocument()
+		regroot = doc.root
+		subkey = doc.subkey
+		return HLIRegistryKey(regroot, subkey, "Root")
+
+	def OnItemRightClick(self, notify_data, extra):
+		# First select the item we right-clicked on.
+		pt = self.ScreenToClient(win32api.GetCursorPos())
+		flags, hItem = self.HitTest(pt)
+		if hItem==0 or commctrl.TVHT_ONITEM & flags==0:
+			return None
+		self.Select(hItem, commctrl.TVGN_CARET)
+
+		menu = win32ui.CreatePopupMenu()
+		menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1000, "Add Key")
+		menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1001, "Add Value")
+		menu.AppendMenu(win32con.MF_STRING|win32con.MF_ENABLED,1002, "Delete Key")
+		self.HookCommand(self.OnAddKey, 1000)
+		self.HookCommand(self.OnAddValue, 1001)
+		self.HookCommand(self.OnDeleteKey, 1002)
+		menu.TrackPopupMenu(win32api.GetCursorPos()) # track at mouse position.
+		return None
+
+	def OnDeleteKey(self,command, code):
+		hitem = self.hierList.GetSelectedItem()
+		item = self.hierList.ItemFromHandle(hitem)
+		msg = "Are you sure you wish to delete the key '%s'?" % (item.keyName,)
+		id = win32ui.MessageBox(msg, None, win32con.MB_YESNO)
+		if id != win32con.IDYES:
+			return
+		if SafeApply(win32api.RegDeleteKey, (item.keyRoot, item.keyName), "deleting registry key" ):
+			# Get the items parent.
+			try:
+				hparent = self.GetParentItem(hitem)
+			except win32ui.error:
+				hparent = None
+			self.hierList.Refresh(hparent)
+
+	def OnAddKey(self,command, code):
+		from pywin.mfc import dialog
+		val = dialog.GetSimpleInput("New key name", '', "Add new key")
+		if val is None: return # cancelled.
+		hitem = self.hierList.GetSelectedItem()
+		item = self.hierList.ItemFromHandle(hitem)
+		if SafeApply(win32api.RegCreateKey, (item.keyRoot, item.keyName + "\\" + val)):
+			self.hierList.Refresh(hitem)
+
+	def OnAddValue(self,command, code):
+		from pywin.mfc import dialog
+		val = dialog.GetSimpleInput("New value", "", "Add new value")
+		if val is None: return # cancelled.
+		hitem = self.hierList.GetSelectedItem()
+		item = self.hierList.ItemFromHandle(hitem)
+		if SafeApply(win32api.RegSetValue, (item.keyRoot, item.keyName, win32con.REG_SZ, val)):
+			# Simply re-select the current item to refresh the right spitter.
+			self.PerformItemSelected(item)
+#			self.Select(hitem, commctrl.TVGN_CARET)
+
+	def PerformItemSelected(self, item):
+		return self.frame.PerformItemSelected(item)
+
+	def SelectedItem(self):
+		return self.hierList.ItemFromHandle(self.hierList.GetSelectedItem())
+
+	def SearchSelectedItem(self):
+		handle = self.hierList.GetChildItem(0)
+		while 1:
+#			print "State is", self.hierList.GetItemState(handle, -1)
+			if self.hierList.GetItemState(handle, commctrl.TVIS_SELECTED):
+#				print "Item is ", self.hierList.ItemFromHandle(handle)
+				return self.hierList.ItemFromHandle(handle)
+			handle = self.hierList.GetNextSiblingItem(handle)
+
+class RegistryValueView(docview.ListView):
+	def OnInitialUpdate(self):
+		hwnd = self._obj_.GetSafeHwnd()
+		style = win32api.GetWindowLong(hwnd, win32con.GWL_STYLE);
+		win32api.SetWindowLong(hwnd, win32con.GWL_STYLE, (style & ~commctrl.LVS_TYPEMASK) | commctrl.LVS_REPORT); 
+
+		itemDetails = (commctrl.LVCFMT_LEFT, 100, "Name", 0)
+		self.InsertColumn(0, itemDetails)
+		itemDetails = (commctrl.LVCFMT_LEFT, 500, "Data", 0)
+		self.InsertColumn(1, itemDetails)
+
+	def UpdateForRegItem(self, item):
+		self.DeleteAllItems()
+		hkey = win32api.RegOpenKey(item.keyRoot, item.keyName)
+		try:
+			valNum = 0
+			ret = []
+			while 1:
+				try:
+					res = win32api.RegEnumValue(hkey, valNum)
+				except win32api.error:
+					break
+				name = res[0]
+				if not name: name = "(Default)"
+				self.InsertItem(valNum, name)
+				self.SetItemText(valNum, 1, str(res[1]))
+				valNum = valNum + 1
+		finally:
+			win32api.RegCloseKey(hkey)
+	def EditValue(self, item):
+		# Edit the current value
+		class EditDialog(dialog.Dialog):
+			def __init__(self, item):
+				self.item = item
+				dialog.Dialog.__init__(self, win32ui.IDD_LARGE_EDIT)
+			def OnInitDialog(self):
+				self.SetWindowText("Enter new value")
+				self.GetDlgItem(win32con.IDCANCEL).ShowWindow(win32con.SW_SHOW)
+				self.edit = self.GetDlgItem(win32ui.IDC_EDIT1)
+				# Modify the edit windows style
+				style = win32api.GetWindowLong(self.edit.GetSafeHwnd(), win32con.GWL_STYLE)
+				style = style & (~win32con.ES_WANTRETURN)
+				win32api.SetWindowLong(self.edit.GetSafeHwnd(), win32con.GWL_STYLE, style)
+				self.edit.SetWindowText(str(self.item))
+				self.edit.SetSel(-1)
+				return dialog.Dialog.OnInitDialog(self)
+			def OnDestroy(self,msg):
+				self.newvalue = self.edit.GetWindowText()
+		
+		try:
+			index = self.GetNextItem(-1, commctrl.LVNI_SELECTED)
+		except win32ui.error:
+			return # No item selected.
+
+		if index==0:
+			keyVal = ""
+		else:
+			keyVal = self.GetItemText(index,0)
+		# Query for a new value.
+		try:
+			newVal = self.GetItemsCurrentValue(item, keyVal)
+		except TypeError, details:
+			win32ui.MessageBox(details)
+			return
+		
+		d = EditDialog(newVal)
+		if d.DoModal()==win32con.IDOK:
+			try:
+				self.SetItemsCurrentValue(item, keyVal, d.newvalue)
+			except win32api.error, (rc, fn, desc):
+				win32ui.MessageBox("Error setting value\r\n\n%s" % desc)
+			self.UpdateForRegItem(item)
+
+	def GetItemsCurrentValue(self, item, valueName):
+		hkey = win32api.RegOpenKey(item.keyRoot, item.keyName)
+		try:
+			val, type = win32api.RegQueryValueEx(hkey, valueName)
+			if type != win32con.REG_SZ:
+				raise TypeError, "Only strings can be edited"
+			return val
+		finally:
+			win32api.RegCloseKey(hkey)
+	
+	def SetItemsCurrentValue(self, item, valueName, value):
+		# ** Assumes already checked is a string.
+		hkey = win32api.RegOpenKey(item.keyRoot, item.keyName , 0, win32con.KEY_SET_VALUE)
+		try:
+			win32api.RegSetValueEx(hkey, valueName, 0, win32con.REG_SZ, value)
+		finally:
+			win32api.RegCloseKey(hkey)
+
+
+class RegTemplate(docview.DocTemplate):
+	def __init__(self):
+		docview.DocTemplate.__init__(self, win32ui.IDR_PYTHONTYPE, None, SplitterFrame, None)
+
+#	def InitialUpdateFrame(self, frame, doc, makeVisible=1):
+#		self._obj_.InitialUpdateFrame(frame, doc, makeVisible) # call default handler.
+#		frame.InitialUpdateFrame(doc, makeVisible)
+
+	def OpenRegistryKey(self, root = None, subkey = None): # Use this instead of OpenDocumentFile.
+		# Look for existing open document
+		if root is None: root = regutil.GetRootKey()
+		if subkey is None: subkey = regutil.BuildDefaultPythonKey()
+		for doc in self.GetDocumentList():
+			if doc.root==root and doc.subkey==subkey:
+				doc.GetFirstView().ActivateFrame()
+				return doc
+		# not found - new one.
+		doc = RegDocument(self, root, subkey)
+		frame = self.CreateNewFrame(doc)
+		doc.OnNewDocument()
+		self.InitialUpdateFrame(frame, doc, 1)
+		return doc
+
+class RegDocument (docview.Document):
+	def __init__(self, template, root, subkey):
+		docview.Document.__init__(self, template)
+		self.root = root
+		self.subkey = subkey
+		self.SetTitle("Registry Editor: " + subkey)
+
+	def OnOpenDocument (self, name):
+		raise TypeError, "This template can not open files"
+		return 0
+		
+
+class HLIRegistryKey(hierlist.HierListItem):
+	def __init__( self, keyRoot, keyName, userName ):
+		self.keyRoot = keyRoot
+		self.keyName = keyName
+		self.userName = userName
+		hierlist.HierListItem.__init__(self)
+	def __cmp__(self, other):
+		rc = cmp(self.keyRoot, other.keyRoot)
+		if rc==0:
+			rc = cmp(self.keyName, other.keyName)
+		if rc==0:
+			rc = cmp(self.userName, other.userName)
+		return rc
+	def __repr__(self):
+		return "<%s with root=%s, key=%s>" % (self.__class__.__name__, self.keyRoot, self.keyName)
+	def GetText(self):
+		return self.userName
+	def IsExpandable(self):
+		# All keys are expandable, even if they currently have zero children.
+		return 1
+##		hkey = win32api.RegOpenKey(self.keyRoot, self.keyName)
+##		try:
+##			keys, vals, dt = win32api.RegQueryInfoKey(hkey)
+##			return (keys>0)
+##		finally:
+##			win32api.RegCloseKey(hkey)
+
+	def GetSubList(self):
+		hkey = win32api.RegOpenKey(self.keyRoot, self.keyName)
+		win32ui.DoWaitCursor(1)
+		try:
+			keyNum = 0
+			ret = []
+			while 1:
+				try:
+					key = win32api.RegEnumKey(hkey, keyNum)
+				except win32api.error:
+					break
+				ret.append(HLIRegistryKey(self.keyRoot, self.keyName + "\\" + key, key))
+				keyNum = keyNum + 1
+		finally:
+			win32api.RegCloseKey(hkey)
+			win32ui.DoWaitCursor(0)
+		return ret
+
+template = RegTemplate()
+
+def EditRegistry(root = None, key = None):
+	doc=template.OpenRegistryKey(root, key)
+
+if __name__=='__main__':
+	EditRegistry()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/regpy.py b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/regpy.py
new file mode 100644
index 0000000..79b3f3e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/pywin/tools/regpy.py
@@ -0,0 +1,53 @@
+# (sort-of) Registry editor
+import win32ui
+import dialog
+import win32con
+import commctrl
+
+class RegistryControl:
+	def __init__(self, key):
+		self.key = key
+
+class RegEditPropertyPage(dialog.PropertyPage):
+	IDC_LISTVIEW = 1000
+	def GetTemplate(self):
+		"Return the template used to create this dialog"
+
+		w = 152  # Dialog width
+		h = 122  # Dialog height
+		SS_STD = win32con.WS_CHILD | win32con.WS_VISIBLE
+		FRAMEDLG_STD = win32con.WS_CAPTION | win32con.WS_SYSMENU
+		style = FRAMEDLG_STD | win32con.WS_VISIBLE | win32con.DS_SETFONT | win32con.WS_MINIMIZEBOX
+		template = [[self.caption, (0, 0, w, h), style, None, (8, 'Helv')], ]
+		lvStyle = SS_STD | commctrl.LVS_EDITLABELS | commctrl.LVS_REPORT | commctrl.LVS_AUTOARRANGE | commctrl.LVS_ALIGNLEFT | win32con.WS_BORDER | win32con.WS_TABSTOP
+		template.append(["SysListView32", "", self.IDC_LISTVIEW, (10, 10, 185, 100), lvStyle])
+		return template
+
+class RegistryPage(RegEditPropertyPage):
+	def __init__(self):
+		self.caption="Path"
+		RegEditPropertyPage.__init__(self, self.GetTemplate())
+	def OnInitDialog(self):
+		self.listview = self.GetDlgItem(self.IDC_LISTVIEW)
+		RegEditPropertyPage.OnInitDialog(self)
+		# Setup the listview columns
+		itemDetails = (commctrl.LVCFMT_LEFT, 100, "App", 0)
+		self.listview.InsertColumn(0, itemDetails)
+		itemDetails = (commctrl.LVCFMT_LEFT, 1024, "Paths", 0)
+		self.listview.InsertColumn(1, itemDetails)
+
+		index = self.listview.InsertItem(0,"App")
+		self.listview.SetItemText(index, 1, "Path")
+
+
+class RegistrySheet(dialog.PropertySheet):
+	def __init__(self, title):
+		dialog.PropertySheet.__init__(self, title)
+		self.HookMessage(self.OnActivate, win32con.WM_ACTIVATE)
+	def OnActivate(self, msg):
+		print "OnAcivate"
+
+def t():
+	ps=RegistrySheet('Registry Settings')
+	ps.AddPage(RegistryPage())
+	ps.DoModal()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/scintilla.dll b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/scintilla.dll
new file mode 100644
index 0000000..15324b5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/scintilla.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/win32ui.pyd b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/win32ui.pyd
new file mode 100644
index 0000000..d04eca3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/win32ui.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/win32uiole.pyd b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/win32uiole.pyd
new file mode 100644
index 0000000..b756b71
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pythonwin/win32uiole.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/pywin32.pth b/depot_tools/release/win/python_24/Lib/site-packages/pywin32.pth
new file mode 100644
index 0000000..49ed1b4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/pywin32.pth
@@ -0,0 +1,4 @@
+# .pth file for the PyWin32 extensions

+win32

+win32\lib

+Pythonwin

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/__init__.py
new file mode 100644
index 0000000..9773a43
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/__init__.py
@@ -0,0 +1,20 @@
+#
+# __init__.py: defines this directory as the 'svn' package
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2000-2004 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+__all__ = ['core', 'client', 'delta', 'fs', 'ra', 'repos', 'wc']
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/client.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/client.py
new file mode 100644
index 0000000..7017849
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/client.py
@@ -0,0 +1,23 @@
+#
+# client.py: public Python interface for client components
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2000-2004 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+from libsvn.client import *
+from svn.core import _unprefix_names
+_unprefix_names(locals(), 'svn_client_')
+_unprefix_names(locals(), 'SVN_CLIENT_')
+del _unprefix_names
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/core.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/core.py
new file mode 100644
index 0000000..7e61534
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/core.py
@@ -0,0 +1,288 @@
+#
+# core.py: public Python interface for core components
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2003-2007 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+from libsvn.core import *
+import libsvn.core as _libsvncore
+import atexit as _atexit
+
+class SubversionException(Exception):
+  def __init__(self, message=None, apr_err=None, child=None,
+               file=None, line=None):
+    """Initialize a new Subversion exception object.
+
+    Arguments:
+    message     -- optional user-visible error message
+    apr_err     -- optional integer error code (apr_status_t)
+    child       -- optional SubversionException to wrap
+    file        -- optional source file name where the error originated
+    line        -- optional line number of the source file
+
+    file and line are for C, not Python; they are redundant to the
+    traceback information for exceptions raised in Python.
+    """
+    # Be compatible with pre-1.5 .args behavior:
+    args = []
+    if message is None:
+      # SubversionException().args => ()
+      pass
+    else:
+      # SubversionException('message').args => ('message',)
+      args.append(message)
+      if apr_err is not None:
+        # SubversionException('message', 123) => ('message', 123)
+        args.append(apr_err)
+    Exception.__init__(self, *args)
+
+    self.apr_err = apr_err
+    self.message = message
+    self.child = child
+    self.file = file
+    self.line = line
+
+  def _new_from_err_list(cls, errors):
+    """Return new Subversion exception object from list of svn_error_t data.
+
+    This alternative constructor is for turning a chain of svn_error_t
+    objects in C into a chain of SubversionException objects in Python.
+    errors is a list of (apr_err, message, file, line) tuples, in order
+    from outer-most child to inner-most.
+
+    Use svn_swig_py_svn_exception rather than calling this directly.
+
+    Note: this modifies the errors list provided by the caller by
+    reversing it.
+    """
+    child = None
+    errors.reverse()
+    for (apr_err, message, file, line) in errors:
+      child = cls(message, apr_err, child, file, line)
+    return child
+  # Don't use @classmethod, we support 2.2.
+  _new_from_err_list = classmethod(_new_from_err_list)
+
+
+def _cleanup_application_pool():
+  """Cleanup the application pool before exiting"""
+  if application_pool and application_pool.valid():
+    application_pool.destroy()
+_atexit.register(_cleanup_application_pool)
+
+def _unprefix_names(symbol_dict, from_prefix, to_prefix = ''):
+  for name, value in symbol_dict.items():
+    if name.startswith(from_prefix):
+      symbol_dict[to_prefix + name[len(from_prefix):]] = value
+
+
+Pool = _libsvncore.svn_pool_create
+
+# Setup consistent names for revnum constants
+SVN_IGNORED_REVNUM = SWIG_SVN_IGNORED_REVNUM
+SVN_INVALID_REVNUM = SWIG_SVN_INVALID_REVNUM
+
+def svn_path_compare_paths(path1, path2):
+  path1_len = len (path1);
+  path2_len = len (path2);
+  min_len = min(path1_len, path2_len)
+  i = 0
+
+  # Are the paths exactly the same?
+  if path1 == path2:
+    return 0
+
+  # Skip past common prefix
+  while (i < min_len) and (path1[i] == path2[i]):
+    i = i + 1
+
+  # Children of paths are greater than their parents, but less than
+  # greater siblings of their parents
+  char1 = '\0'
+  char2 = '\0'
+  if (i < path1_len):
+    char1 = path1[i]
+  if (i < path2_len):
+    char2 = path2[i]
+
+  if (char1 == '/') and (i == path2_len):
+    return 1
+  if (char2 == '/') and (i == path1_len):
+    return -1
+  if (i < path1_len) and (char1 == '/'):
+    return -1
+  if (i < path2_len) and (char2 == '/'):
+    return 1
+
+  # Common prefix was skipped above, next character is compared to
+  # determine order
+  return cmp(char1, char2)
+
+def svn_mergeinfo_merge(mergeinfo, changes):
+  return _libsvncore.svn_swig_mergeinfo_merge(mergeinfo, changes)
+
+def svn_mergeinfo_sort(mergeinfo):
+  return _libsvncore.svn_swig_mergeinfo_sort(mergeinfo)
+
+def svn_rangelist_merge(rangelist, changes):
+  return _libsvncore.svn_swig_rangelist_merge(rangelist, changes)
+
+def svn_rangelist_reverse(rangelist):
+  return _libsvncore.svn_swig_rangelist_reverse(rangelist)
+
+class Stream:
+  """A file-object-like wrapper for Subversion svn_stream_t objects."""
+  def __init__(self, stream):
+    self._stream = stream
+
+  def read(self, amt=None):
+    if amt is None:
+      # read the rest of the stream
+      chunks = [ ]
+      while 1:
+        data = svn_stream_read(self._stream, SVN_STREAM_CHUNK_SIZE)
+        if not data:
+          break
+        chunks.append(data)
+      return ''.join(chunks)
+
+    # read the amount specified
+    return svn_stream_read(self._stream, int(amt))
+
+  def write(self, buf):
+    ### what to do with the amount written? (the result value)
+    svn_stream_write(self._stream, buf)
+
+def secs_from_timestr(svn_datetime, pool=None):
+  """Convert a Subversion datetime string into seconds since the Epoch."""
+  aprtime = svn_time_from_cstring(svn_datetime, pool)
+
+  # ### convert to a time_t; this requires intimate knowledge of
+  # ### the apr_time_t type
+  # ### aprtime is microseconds; turn it into seconds
+  return aprtime / 1000000
+
+
+# ============================================================================
+# Variations on this code are used in other places:
+# - subversion/build/generator/gen_win.py
+# - cvs2svn/cvs2svn
+
+# Names that are not to be exported
+import sys as _sys
+
+if _sys.platform == "win32":
+  import re as _re
+  _escape_shell_arg_re = _re.compile(r'(\\+)(\"|$)')
+
+  def escape_shell_arg(arg):
+    # The (very strange) parsing rules used by the C runtime library are
+    # described at:
+    # http://msdn.microsoft.com/library/en-us/vclang/html/_pluslang_Parsing_C.2b2b_.Command.2d.Line_Arguments.asp
+
+    # double up slashes, but only if they are followed by a quote character
+    arg = _re.sub(_escape_shell_arg_re, r'\1\1\2', arg)
+
+    # surround by quotes and escape quotes inside
+    arg = '"' + arg.replace('"', '"^""') + '"'
+    return arg
+
+
+  def argv_to_command_string(argv):
+    """Flatten a list of command line arguments into a command string.
+
+    The resulting command string is expected to be passed to the system
+    shell which os functions like popen() and system() invoke internally.
+    """
+
+    # According cmd's usage notes (cmd /?), it parses the command line by
+    # "seeing if the first character is a quote character and if so, stripping
+    # the leading character and removing the last quote character."
+    # So to prevent the argument string from being changed we add an extra set
+    # of quotes around it here.
+    return '"' + " ".join(map(escape_shell_arg, argv)) + '"'
+
+else:
+  def escape_shell_arg(str):
+    return "'" + str.replace("'", "'\\''") + "'"
+
+  def argv_to_command_string(argv):
+    """Flatten a list of command line arguments into a command string.
+
+    The resulting command string is expected to be passed to the system
+    shell which os functions like popen() and system() invoke internally.
+    """
+
+    return " ".join(map(escape_shell_arg, argv))
+# ============================================================================
+# Deprecated functions
+
+def apr_initialize():
+  """Deprecated. APR is now initialized automatically. This is
+  a compatibility wrapper providing the interface of the
+  Subversion 1.2.x and earlier bindings."""
+  pass
+
+def apr_terminate():
+  """Deprecated. APR is now terminated automatically. This is
+  a compatibility wrapper providing the interface of the
+  Subversion 1.2.x and earlier bindings."""
+  pass
+
+def svn_pool_create(parent_pool=None):
+  """Deprecated. Use Pool() instead. This is a compatibility
+  wrapper providing the interface of the Subversion 1.2.x and
+  earlier bindings."""
+  return Pool(parent_pool)
+
+def svn_pool_destroy(pool):
+  """Deprecated. Pools are now destroyed automatically. If you
+  want to manually destroy a pool, use Pool.destroy. This is
+  a compatibility wrapper providing the interface of the
+  Subversion 1.2.x and earlier bindings."""
+
+  assert pool is not None
+
+  # New in 1.3.x: All pools are automatically destroyed when Python shuts
+  # down. For compatibility with 1.2.x, we won't report an error if your
+  # app tries to destroy a pool during the shutdown process. Instead, we
+  # check to make sure the application_pool is still around before calling
+  # pool.destroy().
+  if application_pool and application_pool.valid():
+    pool.destroy()
+apr_pool_destroy = svn_pool_destroy
+
+def svn_pool_clear(pool):
+  """Deprecated. Use Pool.clear instead. This is a compatibility
+  wrapper providing the interface of the Subversion 1.2.x and
+  earlier bindings."""
+
+  assert pool is not None
+
+  pool.clear()
+apr_pool_clear = svn_pool_clear
+
+def run_app(func, *args, **kw):
+  '''Deprecated: Application-level pools are now created
+  automatically. APR is also initialized and terminated
+  automatically. This is a compatibility wrapper providing the
+  interface of the Subversion 1.2.x and earlier bindings.
+
+  Run a function as an "APR application".
+
+  APR is initialized, and an application pool is created. Cleanup is
+  performed as the function exits (normally or via an exception).
+  '''
+  return apply(func, (application_pool,) + args, kw)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/delta.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/delta.py
new file mode 100644
index 0000000..e3dfdc0b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/delta.py
@@ -0,0 +1,74 @@
+#
+# delta.py: public Python interface for delta components
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2000-2004 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+from libsvn.delta import *
+from svn.core import _unprefix_names
+_unprefix_names(locals(), 'svn_delta_')
+_unprefix_names(locals(), 'svn_txdelta_', 'tx_')
+del _unprefix_names
+
+
+class Editor:
+
+  def set_target_revision(self, target_revision, pool=None):
+    pass
+
+  def open_root(self, base_revision, dir_pool=None):
+    return None
+
+  def delete_entry(self, path, revision, parent_baton, pool=None):
+    pass
+
+  def add_directory(self, path, parent_baton,
+                    copyfrom_path, copyfrom_revision, dir_pool=None):
+    return None
+
+  def open_directory(self, path, parent_baton, base_revision, dir_pool=None):
+    return None
+
+  def change_dir_prop(self, dir_baton, name, value, pool=None):
+    pass
+
+  def close_directory(self, dir_baton, pool=None):
+    pass
+
+  def add_file(self, path, parent_baton,
+               copyfrom_path, copyfrom_revision, file_pool=None):
+    return None
+
+  def open_file(self, path, parent_baton, base_revision, file_pool=None):
+    return None
+
+  def apply_textdelta(self, file_baton, base_checksum, pool=None):
+    return None
+
+  def change_file_prop(self, file_baton, name, value, pool=None):
+    pass
+
+  def close_file(self, file_baton, text_checksum, pool=None):
+    pass
+
+  def close_edit(self, pool=None):
+    pass
+
+  def abort_edit(self, pool=None):
+    pass
+
+
+def make_editor(editor, pool=None):
+  return svn_swig_py_make_editor(editor, pool)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/diff.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/diff.py
new file mode 100644
index 0000000..c6f4f8d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/diff.py
@@ -0,0 +1,22 @@
+#
+# diff.py: public Python interface for diff components
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2007 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+from libsvn.diff import *
+from svn.core import _unprefix_names
+_unprefix_names(locals(), 'svn_diff_')
+del _unprefix_names
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/fs.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/fs.py
new file mode 100644
index 0000000..79def8d1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/fs.py
@@ -0,0 +1,126 @@
+#
+# fs.py: public Python interface for fs components
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2000-2004 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+from libsvn.fs import *
+from svn.core import _unprefix_names, Pool
+_unprefix_names(locals(), 'svn_fs_')
+_unprefix_names(locals(), 'SVN_FS_')
+del _unprefix_names
+
+
+# Names that are not to be exported
+import sys as _sys, os as _os, popen2 as _popen2, tempfile as _tempfile
+import __builtin__
+import svn.core as _svncore
+
+
+def entries(root, path, pool=None):
+  "Call dir_entries returning a dictionary mappings names to IDs."
+  e = dir_entries(root, path, pool)
+  for name, entry in e.items():
+    e[name] = dirent_t_id_get(entry)
+  return e
+
+
+class FileDiff:
+  def __init__(self, root1, path1, root2, path2, pool=None, diffoptions=[]):
+    assert path1 or path2
+
+    self.tempfile1 = None
+    self.tempfile2 = None
+
+    self.root1 = root1
+    self.path1 = path1
+    self.root2 = root2
+    self.path2 = path2
+    self.diffoptions = diffoptions
+
+  def either_binary(self):
+    "Return true if either of the files are binary."
+    if self.path1 is not None:
+      prop = node_prop(self.root1, self.path1, _svncore.SVN_PROP_MIME_TYPE)
+      if prop and _svncore.svn_mime_type_is_binary(prop):
+        return 1
+    if self.path2 is not None:
+      prop = node_prop(self.root2, self.path2, _svncore.SVN_PROP_MIME_TYPE)
+      if prop and _svncore.svn_mime_type_is_binary(prop):
+        return 1
+    return 0
+
+  def _dump_contents(self, file, root, path, pool=None):
+    fp = __builtin__.open(file, 'w+') # avoid namespace clash with
+                                      # trimmed-down svn_fs_open()
+    if path is not None:
+      stream = file_contents(root, path, pool)
+      try:
+        while 1:
+          chunk = _svncore.svn_stream_read(stream, _svncore.SVN_STREAM_CHUNK_SIZE)
+          if not chunk:
+            break
+          fp.write(chunk)
+      finally:
+        _svncore.svn_stream_close(stream)
+    fp.close()
+
+
+  def get_files(self):
+    if self.tempfile1:
+      # no need to do more. we ran this already.
+      return self.tempfile1, self.tempfile2
+
+    # Make tempfiles, and dump the file contents into those tempfiles.
+    self.tempfile1 = _tempfile.mktemp()
+    self.tempfile2 = _tempfile.mktemp()
+
+    self._dump_contents(self.tempfile1, self.root1, self.path1)
+    self._dump_contents(self.tempfile2, self.root2, self.path2)
+
+    return self.tempfile1, self.tempfile2
+
+  def get_pipe(self):
+    self.get_files()
+
+    # use an array for the command to avoid the shell and potential
+    # security exposures
+    cmd = ["diff"] \
+          + self.diffoptions \
+          + [self.tempfile1, self.tempfile2]
+
+    # the windows implementation of popen2 requires a string
+    if _sys.platform == "win32":
+      cmd = _svncore.argv_to_command_string(cmd)
+
+    # open the pipe, forget the end for writing to the child (we won't),
+    # and then return the file object for reading from the child.
+    fromchild, tochild = _popen2.popen2(cmd)
+    tochild.close()
+    return fromchild
+
+  def __del__(self):
+    # it seems that sometimes the files are deleted, so just ignore any
+    # failures trying to remove them
+    if self.tempfile1 is not None:
+      try:
+        _os.remove(self.tempfile1)
+      except OSError:
+        pass
+    if self.tempfile2 is not None:
+      try:
+        _os.remove(self.tempfile2)
+      except OSError:
+        pass
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/ra.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/ra.py
new file mode 100644
index 0000000..8bdf3c6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/ra.py
@@ -0,0 +1,70 @@
+#
+# ra.py: public Python interface for ra components
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2000-2004 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+from libsvn.ra import *
+from svn.core import _unprefix_names
+_unprefix_names(locals(), 'svn_ra_')
+_unprefix_names(locals(), 'SVN_RA_')
+del _unprefix_names
+
+class Callbacks:
+  """Base class for callbacks structure for svn.ra.open2.
+
+  Ra users may pass an instance of this class as is to svn.ra.open2
+  for some simple operations: as long as authentication is not
+  required, auth_baton may be None, and some ra implementations do not
+  use open_tmp_file at all.  These are not guarantees, however, and
+  all but the simplest scripts should fill even these in.
+
+  The wc_prop slots, on the other hand, are only necessary for commits
+  and updates, and progress_func and cancel_func are always optional.
+
+  A simple example:
+
+  class Callbacks(svn.ra.Callbacks):
+    def __init__(self, wc, username, password):
+      self.wc = wc
+      self.auth_baton = svn.core.svn_auth_open([
+          svn.client.get_simple_provider(),
+          svn.client.get_username_provider(),
+          ])
+      svn.core.svn_auth_set_parameter(self.auth_baton,
+                                      svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME,
+                                      username)
+      svn.core.svn_auth_set_parameter(self.auth_baton,
+                                      svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD,
+                                      password)
+    def open_tmp_file(self, pool):
+      path = '/'.join([self.wc, svn.wc.get_adm_dir(pool), 'tmp'])
+      (fd, fn) = tempfile.mkstemp(dir=path)
+      os.close(fd)
+      return fn
+    def cancel_func(self):
+      if some_condition():
+        return svn.core.SVN_ERR_CANCELLED
+      return 0
+  """
+  open_tmp_file = None
+  auth_baton = None
+  get_wc_prop = None
+  set_wc_prop = None
+  push_wc_prop = None
+  invalidate_wc_props = None
+  progress_func = None
+  cancel_func = None
+  get_client_string = None
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/repos.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/repos.py
new file mode 100644
index 0000000..56e9103
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/repos.py
@@ -0,0 +1,271 @@
+#
+# repos.py: public Python interface for repos components
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2000-2004 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+from libsvn.repos import *
+from svn.core import _unprefix_names, Pool
+_unprefix_names(locals(), 'svn_repos_')
+_unprefix_names(locals(), 'SVN_REPOS_')
+del _unprefix_names
+
+
+# Names that are not to be exported
+import svn.core as _svncore, svn.fs as _svnfs, svn.delta as _svndelta
+
+# Available change actions
+CHANGE_ACTION_MODIFY  = 0
+CHANGE_ACTION_ADD     = 1
+CHANGE_ACTION_DELETE  = 2
+CHANGE_ACTION_REPLACE = 3
+
+
+class ChangedPath:
+  __slots__ = [ 'item_kind', 'prop_changes', 'text_changed',
+                'base_path', 'base_rev', 'path', 'added', 'action',
+                ]
+  def __init__(self,
+               item_kind, prop_changes, text_changed, base_path, base_rev,
+               path, added, action=None):
+    self.item_kind = item_kind
+    self.prop_changes = prop_changes
+    self.text_changed = text_changed
+    self.base_path = base_path
+    self.base_rev = base_rev
+    self.path = path
+    if action not in [None, CHANGE_ACTION_MODIFY, CHANGE_ACTION_ADD,
+                      CHANGE_ACTION_DELETE, CHANGE_ACTION_REPLACE]:
+      raise Exception, "unsupported change type"
+    self.action = action
+
+    ### it would be nice to avoid this flag. however, without it, it would
+    ### be quite difficult to distinguish between a change to the previous
+    ### revision (which has a base_path/base_rev) and a copy from some
+    ### other path/rev. a change in path is obviously add-with-history,
+    ### but the same path could be a change to the previous rev or a restore
+    ### of an older version. when it is "change to previous", I'm not sure
+    ### if the rev is always repos.rev - 1, or whether it represents the
+    ### created or time-of-checkout rev. so... we use a flag (for now)
+    ### Note: This flag is also set for replaced paths unlike self.action
+    ### which is either set to CHANGE_ACTION_ADD or CHANGE_ACTION_REPLACE
+    self.added = added
+
+
+class ChangeCollector(_svndelta.Editor):
+  """Available Since: 1.2.0
+  """
+
+  # BATON FORMAT: [path, base_path, base_rev]
+
+  def __init__(self, fs_ptr, root, pool=None, notify_cb=None):
+    self.fs_ptr = fs_ptr
+    self.changes = { } # path -> ChangedPathEntry()
+    self.roots = { } # revision -> svn_svnfs_root_t
+    self.notify_cb = notify_cb
+    self.props = { }
+    self.fs_root = root
+
+    # Figger out the base revision and root properties.
+    if _svnfs.is_revision_root(self.fs_root):
+      rev = _svnfs.revision_root_revision(self.fs_root)
+      self.base_rev = rev - 1
+      self.props = _svnfs.revision_proplist(self.fs_ptr, rev)
+    else:
+      txn_name = _svnfs.txn_root_name(self.fs_root)
+      txn_t = _svnfs.open_txn(self.fs_ptr, txn_name)
+      self.base_rev = _svnfs.txn_base_revision(txn_t)
+      self.props = _svnfs.txn_proplist(txn_t)
+
+  def get_root_props(self):
+    return self.props
+
+  def get_changes(self):
+    return self.changes
+
+  def _send_change(self, path):
+    if self.notify_cb:
+      change = self.changes.get(path)
+      if change:
+        self.notify_cb(change)
+
+  def _make_base_path(self, parent_path, path):
+    idx = path.rfind('/')
+    if parent_path:
+      parent_path = parent_path + '/'
+    if idx == -1:
+      return parent_path + path
+    return parent_path + path[idx+1:]
+
+  def _get_root(self, rev):
+    try:
+      return self.roots[rev]
+    except KeyError:
+      pass
+    root = self.roots[rev] = _svnfs.revision_root(self.fs_ptr, rev)
+    return root
+
+  def open_root(self, base_revision, dir_pool=None):
+    return ('', '', self.base_rev)  # dir_baton
+
+  def delete_entry(self, path, revision, parent_baton, pool=None):
+    base_path = self._make_base_path(parent_baton[1], path)
+    if _svnfs.is_dir(self._get_root(parent_baton[2]), base_path):
+      item_type = _svncore.svn_node_dir
+    else:
+      item_type = _svncore.svn_node_file
+    self.changes[path] = ChangedPath(item_type,
+                                     False,
+                                     False,
+                                     base_path,
+                                     parent_baton[2], # base_rev
+                                     None,            # (new) path
+                                     False,           # added
+                                     CHANGE_ACTION_DELETE,
+                                     )
+    self._send_change(path)
+
+  def add_directory(self, path, parent_baton,
+                    copyfrom_path, copyfrom_revision, dir_pool=None):
+    action = self.changes.has_key(path) and CHANGE_ACTION_REPLACE \
+             or CHANGE_ACTION_ADD
+    self.changes[path] = ChangedPath(_svncore.svn_node_dir,
+                                     False,
+                                     False,
+                                     copyfrom_path,     # base_path
+                                     copyfrom_revision, # base_rev
+                                     path,              # path
+                                     True,              # added
+                                     action,
+                                     )
+    if copyfrom_path and (copyfrom_revision != -1):
+      base_path = copyfrom_path
+    else:
+      base_path = path
+    base_rev = copyfrom_revision
+    return (path, base_path, base_rev)  # dir_baton
+
+  def open_directory(self, path, parent_baton, base_revision, dir_pool=None):
+    base_path = self._make_base_path(parent_baton[1], path)
+    return (path, base_path, parent_baton[2])  # dir_baton
+
+  def change_dir_prop(self, dir_baton, name, value, pool=None):
+    dir_path = dir_baton[0]
+    if self.changes.has_key(dir_path):
+      self.changes[dir_path].prop_changes = True
+    else:
+      # can't be added or deleted, so this must be CHANGED
+      self.changes[dir_path] = ChangedPath(_svncore.svn_node_dir,
+                                           True,
+                                           False,
+                                           dir_baton[1], # base_path
+                                           dir_baton[2], # base_rev
+                                           dir_path,     # path
+                                           False,        # added
+                                           CHANGE_ACTION_MODIFY,
+                                           )
+
+  def add_file(self, path, parent_baton,
+               copyfrom_path, copyfrom_revision, file_pool=None):
+    action = self.changes.has_key(path) and CHANGE_ACTION_REPLACE \
+             or CHANGE_ACTION_ADD
+    self.changes[path] = ChangedPath(_svncore.svn_node_file,
+                                     False,
+                                     False,
+                                     copyfrom_path,     # base_path
+                                     copyfrom_revision, # base_rev
+                                     path,              # path
+                                     True,              # added
+                                     action,
+                                     )
+    if copyfrom_path and (copyfrom_revision != -1):
+      base_path = copyfrom_path
+    else:
+      base_path = path
+    base_rev = copyfrom_revision
+    return (path, base_path, base_rev)  # file_baton
+
+  def open_file(self, path, parent_baton, base_revision, file_pool=None):
+    base_path = self._make_base_path(parent_baton[1], path)
+    return (path, base_path, parent_baton[2])  # file_baton
+
+  def apply_textdelta(self, file_baton, base_checksum):
+    file_path = file_baton[0]
+    if self.changes.has_key(file_path):
+      self.changes[file_path].text_changed = True
+    else:
+      # an add would have inserted a change record already, and it can't
+      # be a delete with a text delta, so this must be a normal change.
+      self.changes[file_path] = ChangedPath(_svncore.svn_node_file,
+                                            False,
+                                            True,
+                                            file_baton[1], # base_path
+                                            file_baton[2], # base_rev
+                                            file_path,     # path
+                                            False,         # added
+                                            CHANGE_ACTION_MODIFY,
+                                            )
+
+    # no handler
+    return None
+
+  def change_file_prop(self, file_baton, name, value, pool=None):
+    file_path = file_baton[0]
+    if self.changes.has_key(file_path):
+      self.changes[file_path].prop_changes = True
+    else:
+      # an add would have inserted a change record already, and it can't
+      # be a delete with a prop change, so this must be a normal change.
+      self.changes[file_path] = ChangedPath(_svncore.svn_node_file,
+                                            True,
+                                            False,
+                                            file_baton[1], # base_path
+                                            file_baton[2], # base_rev
+                                            file_path,     # path
+                                            False,         # added
+                                            CHANGE_ACTION_MODIFY,
+                                            )
+  def close_directory(self, dir_baton):
+    self._send_change(dir_baton[0])
+
+  def close_file(self, file_baton, text_checksum):
+    self._send_change(file_baton[0])
+
+
+class RevisionChangeCollector(ChangeCollector):
+  """Deprecated: Use ChangeCollector.
+  This is a compatibility wrapper providing the interface of the
+  Subversion 1.1.x and earlier bindings.
+
+  Important difference: base_path members have a leading '/' character in
+  this interface."""
+
+  def __init__(self, fs_ptr, root, pool=None, notify_cb=None):
+    root = _svnfs.revision_root(fs_ptr, root)
+    ChangeCollector.__init__(self, fs_ptr, root, pool, notify_cb)
+
+  def _make_base_path(self, parent_path, path):
+    idx = path.rfind('/')
+    if idx == -1:
+      return parent_path + '/' + path
+    return parent_path + path[idx:]
+
+
+# enable True/False in older vsns of Python
+try:
+  True
+except NameError:
+  True = 1
+  False = 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/svn/wc.py b/depot_tools/release/win/python_24/Lib/site-packages/svn/wc.py
new file mode 100644
index 0000000..90ddd379
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/svn/wc.py
@@ -0,0 +1,51 @@
+#
+# wc.py: public Python interface for wc components
+#
+# Subversion is a tool for revision control.
+# See http://subversion.tigris.org for more information.
+#
+######################################################################
+#
+# Copyright (c) 2000-2004 CollabNet.  All rights reserved.
+#
+# This software is licensed as described in the file COPYING, which
+# you should have received as part of this distribution.  The terms
+# are also available at http://subversion.tigris.org/license-1.html.
+# If newer versions of this license are posted there, you may use a
+# newer version instead, at your option.
+#
+######################################################################
+
+from libsvn.wc import *
+from svn.core import _unprefix_names
+_unprefix_names(locals(), 'svn_wc_')
+_unprefix_names(locals(), 'SVN_WC_')
+del _unprefix_names
+
+
+class DiffCallbacks2:
+    def file_changed(self, adm_access, path,
+                     tmpfile1, tmpfile2, rev1, rev2,
+                     mimetype1, mimetype2,
+                     propchanges, originalprops):
+        return (notify_state_unknown, notify_state_unknown)
+
+    def file_added(self, adm_access, path,
+                   tmpfile1, tmpfile2, rev1, rev2,
+                   mimetype1, mimetype2,
+                   propchanges, originalprops):
+        return (notify_state_unknown, notify_state_unknown)
+
+    def file_deleted(self, adm_access, path, tmpfile1, tmpfile2,
+                     mimetype1, mimetype2, originalprops):
+        return notify_state_unknown
+
+    def dir_added(self, adm_access, path, rev):
+        return notify_state_unknown
+
+    def dir_deleted(self, adm_access, path):
+        return notify_state_unknown
+
+    def dir_props_changed(self, adm_access, path,
+                          propchanges, original_props):
+        return notify_state_unknown
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/BackupRead_BackupWrite.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/BackupRead_BackupWrite.py
new file mode 100644
index 0000000..63ca70c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/BackupRead_BackupWrite.py
@@ -0,0 +1,80 @@
+## demonstrates using BackupRead and BackupWrite to copy all of a file's data streams
+
+import win32file, win32api, win32con, win32security, ntsecuritycon
+from win32com import storagecon
+import pythoncom, pywintypes
+import struct, traceback
+
+all_sd_info=win32security.DACL_SECURITY_INFORMATION|win32security.DACL_SECURITY_INFORMATION|   \
+            win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION
+
+tempdir=win32api.GetTempPath()
+tempfile=win32api.GetTempFileName(tempdir,'bkr')[0]
+outfile=win32api.GetTempFileName(tempdir,'out')[0]
+print 'Filename:',tempfile,'Output file:',outfile
+
+f=open(tempfile,'w')
+f.write('some random junk'+'x'*100)
+f.close()
+
+## add a couple of alternate data streams
+f=open(tempfile+':streamdata','w')
+f.write('data written to alternate stream'+'y'*100)
+f.close()
+
+f=open(tempfile+':anotherstream','w')
+f.write('z'*100)
+f.close()
+
+## add Summary Information, which is stored as a separate stream
+m=storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE |storagecon.STGM_DIRECT
+pss=pythoncom.StgOpenStorageEx(tempfile, m, storagecon.STGFMT_FILE, 0 , pythoncom.IID_IPropertySetStorage,None)
+ps=pss.Create(pythoncom.FMTID_SummaryInformation,pythoncom.IID_IPropertyStorage,0,storagecon.STGM_READWRITE|storagecon.STGM_SHARE_EXCLUSIVE)
+ps.WriteMultiple((storagecon.PIDSI_KEYWORDS,storagecon.PIDSI_COMMENTS),('keywords','comments'))
+ps=None
+pss=None
+
+## add a custom security descriptor to make sure we don't
+##   get a default that would always be the same for both files in temp dir
+new_sd=pywintypes.SECURITY_DESCRIPTOR()
+sid=win32security.LookupAccountName('','EveryOne')[0]
+acl=pywintypes.ACL()
+acl.AddAccessAllowedAce(1, win32con.GENERIC_READ, sid)
+acl.AddAccessAllowedAce(1, ntsecuritycon.FILE_APPEND_DATA, sid)
+acl.AddAccessAllowedAce(1, win32con.GENERIC_WRITE, sid)
+acl.AddAccessAllowedAce(1, ntsecuritycon.FILE_ALL_ACCESS, sid)
+
+new_sd.SetSecurityDescriptorDacl(True, acl, False)
+win32security.SetFileSecurity(tempfile,win32security.DACL_SECURITY_INFORMATION,new_sd)
+
+                              
+sa=pywintypes.SECURITY_ATTRIBUTES()
+sa.bInheritHandle=True
+h=win32file.CreateFile(tempfile, win32con.GENERIC_ALL ,win32con.FILE_SHARE_READ,
+    sa, win32con.OPEN_EXISTING, win32file.FILE_FLAG_BACKUP_SEMANTICS , None)
+
+outh=win32file.CreateFile(outfile, win32con.GENERIC_ALL ,win32con.FILE_SHARE_READ|win32con.FILE_SHARE_WRITE,
+    sa, win32con.OPEN_EXISTING, win32file.FILE_FLAG_BACKUP_SEMANTICS , None)
+
+ctxt=0
+outctxt=0
+buf=None
+readsize=100
+
+while 1:
+    bytes_read, buf, ctxt=win32file.BackupRead(h, readsize, buf, False, True, ctxt)
+    if bytes_read==0:
+        break
+    bytes_written, outctxt=win32file.BackupWrite(outh, bytes_read, buf, False, True, outctxt)
+    print 'Written:',bytes_written,'Context:',outctxt
+win32file.BackupRead(h, 0, buf, True, True, ctxt)
+win32file.BackupWrite(outh, 0, '', True, True, outctxt)
+win32file.CloseHandle(h)
+win32file.CloseHandle(outh)
+
+assert open(tempfile).read()==open(outfile).read(),"File contents differ !"
+assert open(tempfile+':streamdata').read()==open(outfile+':streamdata').read(),"streamdata contents differ !"
+assert open(tempfile+':anotherstream').read()==open(outfile+':anotherstream').read(),"anotherstream contents differ !"
+assert buffer(win32security.GetFileSecurity(tempfile,all_sd_info))[:]== \
+       buffer(win32security.GetFileSecurity(outfile, all_sd_info))[:], "Security descriptors are different !"
+## also should check Summary Info programatically
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/BackupSeek_streamheaders.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/BackupSeek_streamheaders.py
new file mode 100644
index 0000000..c915205
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/BackupSeek_streamheaders.py
@@ -0,0 +1,85 @@
+## demonstrates using BackupSeek to enumerate data streams for a file
+import win32file, win32api, win32con
+from win32com import storagecon
+import pythoncom, pywintypes
+import struct, traceback
+
+stream_types={
+    win32con.BACKUP_DATA:"Standard data", 
+    win32con.BACKUP_EA_DATA:"Extended attribute data", 
+    win32con.BACKUP_SECURITY_DATA:"Security descriptor data", 
+    win32con.BACKUP_ALTERNATE_DATA:"Alternative data streams", 
+    win32con.BACKUP_LINK:"Hard link information",
+    win32con.BACKUP_PROPERTY_DATA:"Property data",
+    win32con.BACKUP_OBJECT_ID:"Objects identifiers",
+    win32con.BACKUP_REPARSE_DATA:"Reparse points",
+    win32con.BACKUP_SPARSE_BLOCK:"Sparse file"
+}
+
+tempdir=win32api.GetTempPath()
+tempfile=win32api.GetTempFileName(tempdir,'bkr')[0]
+print 'Filename:',tempfile
+
+f=open(tempfile,'w')
+f.write('some random junk'+'x'*100)
+f.close()
+
+f=open(tempfile+':streamdata','w')
+f.write('data written to alternate stream'+'y'*100)
+f.close()
+
+f=open(tempfile+':anotherstream','w')
+f.write('z'*200)
+f.close()
+
+## add Summary Information, which is stored as a separate stream
+m=storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE |storagecon.STGM_DIRECT
+pss=pythoncom.StgOpenStorageEx(tempfile, m, storagecon.STGFMT_FILE, 0 , pythoncom.IID_IPropertySetStorage,None)
+ps=pss.Create(pythoncom.FMTID_SummaryInformation,pythoncom.IID_IPropertyStorage,0,storagecon.STGM_READWRITE|storagecon.STGM_SHARE_EXCLUSIVE)
+ps.WriteMultiple((storagecon.PIDSI_KEYWORDS,storagecon.PIDSI_COMMENTS),('keywords','comments'))
+ps=None
+pss=None
+
+sa=pywintypes.SECURITY_ATTRIBUTES()
+sa.bInheritHandle=False
+h=win32file.CreateFile(tempfile, win32con.GENERIC_ALL ,win32con.FILE_SHARE_READ,
+    sa, win32con.OPEN_EXISTING, win32file.FILE_FLAG_BACKUP_SEMANTICS , None)
+
+
+""" stream header:
+typedef struct _WIN32_STREAM_ID {
+    DWORD dwStreamId;  DWORD dwStreamAttributes;  LARGE_INTEGER Size;
+    DWORD dwStreamNameSize;  WCHAR cStreamName[ANYSIZE_ARRAY];
+}
+"""
+
+win32_stream_id_format="LLQL"
+win32_stream_id_size=struct.calcsize(win32_stream_id_format)
+
+def parse_stream_header(h,ctxt,data):
+    stream_type, stream_attributes, stream_size, stream_name_size=struct.unpack(win32_stream_id_format,data)
+    print '\nType:',stream_type,stream_types[stream_type], 'Attributes:', stream_attributes, 'Size:', stream_size, 'Name len:',stream_name_size
+    if stream_name_size>0:
+        ## ??? sdk says this size is in characters, but it appears to be number of bytes ???
+        bytes_read, stream_name_buf, ctxt=win32file.BackupRead(h, stream_name_size, None, False, True, ctxt)
+        stream_name=pywintypes.UnicodeFromRaw(stream_name_buf[:])
+    else:
+        stream_name='Unnamed'
+    print 'Name:'+stream_name
+    return ctxt, stream_type, stream_attributes, stream_size, stream_name_size, stream_name
+
+ctxt=0
+win32_stream_id_buf=None  ## gets rebound to a writable buffer on first call and reused
+while 1:
+    bytes_read, win32_stream_id_buf, ctxt=win32file.BackupRead(h, win32_stream_id_size, win32_stream_id_buf, False, True, ctxt)
+    if bytes_read==0:
+        break
+    ctxt, stream_type, stream_attributes, stream_size, stream_name_size, stream_name=\
+        parse_stream_header(h, ctxt, win32_stream_id_buf[:])
+    if stream_size>0:
+        bytes_moved=win32file.BackupSeek(h, stream_size, ctxt)
+        print 'Moved: ',bytes_moved
+
+win32file.BackupRead(h, win32_stream_id_size, win32_stream_id_buf, True, True, ctxt)
+win32file.CloseHandle(h)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/FileSecurityTest.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/FileSecurityTest.py
new file mode 100644
index 0000000..1922d50
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/FileSecurityTest.py
@@ -0,0 +1,68 @@
+# Contributed by Kelly Kranabetter.
+import os, sys
+import win32security, ntsecuritycon
+
+# get security information
+#name=r"c:\autoexec.bat"
+#name= r"g:\!workgrp\lim"
+name=sys.argv[0]
+
+if not os.path.exists(name):
+    print name, "does not exist!"
+    sys.exit()
+
+print "On file " , name, "\n"
+
+# get owner SID
+print "OWNER"
+sd= win32security.GetFileSecurity(name, win32security.OWNER_SECURITY_INFORMATION)
+sid= sd.GetSecurityDescriptorOwner()
+print "  ", win32security.LookupAccountSid(None, sid)
+
+# get group SID
+print "GROUP"
+sd= win32security.GetFileSecurity(name, win32security.GROUP_SECURITY_INFORMATION)
+sid= sd.GetSecurityDescriptorGroup()
+print "  ", win32security.LookupAccountSid(None, sid)
+
+# get ACEs
+sd= win32security.GetFileSecurity(name, win32security.DACL_SECURITY_INFORMATION)
+dacl= sd.GetSecurityDescriptorDacl()
+if dacl == None:
+    print "No Discretionary ACL"
+else:
+    for ace_no in range(0, dacl.GetAceCount()):
+        ace= dacl.GetAce(ace_no)
+        print "ACE", ace_no
+
+        print "  -Type"
+        for i in ("ACCESS_ALLOWED_ACE_TYPE", "ACCESS_DENIED_ACE_TYPE", "SYSTEM_AUDIT_ACE_TYPE", "SYSTEM_ALARM_ACE_TYPE"):
+            if getattr(ntsecuritycon, i) == ace[0][0]:
+                print "    ", i
+
+        print "  -Flags", hex(ace[0][1])
+        for i in ("OBJECT_INHERIT_ACE", "CONTAINER_INHERIT_ACE", "NO_PROPAGATE_INHERIT_ACE", "INHERIT_ONLY_ACE", "SUCCESSFUL_ACCESS_ACE_FLAG", "FAILED_ACCESS_ACE_FLAG"):
+            if getattr(ntsecuritycon, i) & ace[0][1] == getattr(ntsecuritycon, i):
+                print "    ", i
+
+        print "  -mask", hex(ace[1])
+
+        # files and directories do permissions differently
+        permissions_file= ("DELETE", "READ_CONTROL", "WRITE_DAC", "WRITE_OWNER", "SYNCHRONIZE", "FILE_GENERIC_READ", "FILE_GENERIC_WRITE", "FILE_GENERIC_EXECUTE", "FILE_DELETE_CHILD")
+        permissions_dir= ("DELETE", "READ_CONTROL", "WRITE_DAC", "WRITE_OWNER", "SYNCHRONIZE", "FILE_ADD_SUBDIRECTORY", "FILE_ADD_FILE", "FILE_DELETE_CHILD", "FILE_LIST_DIRECTORY", "FILE_TRAVERSE", "FILE_READ_ATTRIBUTES", "FILE_WRITE_ATTRIBUTES", "FILE_READ_EA", "FILE_WRITE_EA")
+        permissions_dir_inherit= ("DELETE", "READ_CONTROL", "WRITE_DAC", "WRITE_OWNER", "SYNCHRONIZE", "GENERIC_READ", "GENERIC_WRITE", "GENERIC_EXECUTE", "GENERIC_ALL")
+        if os.path.isfile(name):
+            permissions= permissions_file
+        else:
+            permissions= permissions_dir
+            # directories also contain an ACE that is inherited by children (files) within them
+            if ace[0][1] & ntsecuritycon.OBJECT_INHERIT_ACE == ntsecuritycon.OBJECT_INHERIT_ACE and ace[0][1] & ntsecuritycon.INHERIT_ONLY_ACE == ntsecuritycon.INHERIT_ONLY_ACE:
+                permissions= permissions_dir_inherit
+
+        calc_mask= 0  # calculate the mask so we can see if we are printing all of the permissions
+        for i in permissions:
+            if getattr(ntsecuritycon, i) & ace[1] == getattr(ntsecuritycon, i):
+                calc_mask= calc_mask | getattr(ntsecuritycon, i)
+                print "    ", i
+        print "  ", "Calculated Check Mask=", hex(calc_mask)
+        print "  -SID\n    ", win32security.LookupAccountSid(None, ace[2])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/GetSaveFileName.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/GetSaveFileName.py
new file mode 100644
index 0000000..c99fe15
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/GetSaveFileName.py
@@ -0,0 +1,36 @@
+import win32gui, win32con, os
+
+filter='Python Scripts\0*.py;*.pyw;*.pys\0Text files\0*.txt\0'
+customfilter='Other file types\0*.*\0'
+
+fname, customfilter, flags=win32gui.GetSaveFileNameW(
+    InitialDir=os.environ['temp'],
+    Flags=win32con.OFN_ALLOWMULTISELECT|win32con.OFN_EXPLORER,
+    File='somefilename', DefExt='py',
+    Title='GetSaveFileNameW',
+    Filter=filter,
+    CustomFilter=customfilter,
+    FilterIndex=1)
+
+print 'save file names:', repr(fname)
+print 'filter used:', repr(customfilter)
+print 'Flags:', flags
+for k,v in win32con.__dict__.items():
+    if k.startswith('OFN_') and flags & v:
+        print '\t'+k
+
+fname, customfilter, flags=win32gui.GetOpenFileNameW(
+    InitialDir=os.environ['temp'],
+    Flags=win32con.OFN_ALLOWMULTISELECT|win32con.OFN_EXPLORER,
+    File='somefilename', DefExt='py',
+    Title='GetOpenFileNameW',
+    Filter=filter,
+    CustomFilter=customfilter,
+    FilterIndex=0)
+
+print 'open file names:', repr(fname)
+print 'filter used:', repr(customfilter)
+print 'Flags:', flags
+for k,v in win32con.__dict__.items():
+    if k.startswith('OFN_') and flags & v:
+        print '\t'+k
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/SystemParametersInfo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/SystemParametersInfo.py
new file mode 100644
index 0000000..8a36721
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/SystemParametersInfo.py
@@ -0,0 +1,173 @@
+import win32gui, win32con, win32api, time, os, glob
+## some of these tests will fail for systems prior to XP
+
+for pname in(
+    ## Set actions all take an unsigned int in pvParam
+    "SPI_GETMOUSESPEED", "SPI_GETACTIVEWNDTRKTIMEOUT", "SPI_GETCARETWIDTH",
+    "SPI_GETFOREGROUNDFLASHCOUNT", "SPI_GETFOREGROUNDLOCKTIMEOUT", 
+    ## Set actions all take an unsigned int in uiParam
+    "SPI_GETWHEELSCROLLLINES", "SPI_GETKEYBOARDDELAY",
+    "SPI_GETKEYBOARDSPEED", "SPI_GETMOUSEHOVERHEIGHT", "SPI_GETMOUSEHOVERWIDTH",
+    "SPI_GETMOUSEHOVERTIME", "SPI_GETSCREENSAVETIMEOUT", "SPI_GETMENUSHOWDELAY",
+    "SPI_GETLOWPOWERTIMEOUT", "SPI_GETPOWEROFFTIMEOUT",  "SPI_GETBORDER",
+    ## below are winxp only:
+    "SPI_GETFONTSMOOTHINGCONTRAST", "SPI_GETFONTSMOOTHINGTYPE", "SPI_GETFOCUSBORDERHEIGHT",
+    "SPI_GETFOCUSBORDERWIDTH",      "SPI_GETMOUSECLICKLOCKTIME"):
+    print pname
+    cget=getattr(win32con,pname)
+    cset=getattr(win32con,pname.replace('_GET','_SET'))
+    orig_value=win32gui.SystemParametersInfo(cget)
+    print '\toriginal setting:',orig_value
+    win32gui.SystemParametersInfo(cset, orig_value+1)
+    new_value=win32gui.SystemParametersInfo(cget)
+    print '\tnew value:',new_value
+    assert new_value==orig_value+1
+    win32gui.SystemParametersInfo(cset, orig_value)
+    assert win32gui.SystemParametersInfo(cget)==orig_value
+
+
+
+# these take a boolean value in pvParam
+# change to opposite, check that it was changed and change back
+for pname in ("SPI_GETFLATMENU","SPI_GETDROPSHADOW","SPI_GETKEYBOARDCUES","SPI_GETMENUFADE",
+    "SPI_GETCOMBOBOXANIMATION", "SPI_GETCURSORSHADOW", "SPI_GETGRADIENTCAPTIONS", "SPI_GETHOTTRACKING",
+    "SPI_GETLISTBOXSMOOTHSCROLLING", "SPI_GETMENUANIMATION", "SPI_GETSELECTIONFADE",
+    "SPI_GETTOOLTIPANIMATION", "SPI_GETTOOLTIPFADE", "SPI_GETUIEFFECTS", "SPI_GETACTIVEWINDOWTRACKING",
+    "SPI_GETACTIVEWNDTRKZORDER"):
+    print pname
+    cget=getattr(win32con,pname)
+    cset=getattr(win32con,pname.replace('_GET','_SET'))
+    orig_value=win32gui.SystemParametersInfo(cget)
+    print orig_value
+    win32gui.SystemParametersInfo(cset, not orig_value)
+    new_value=win32gui.SystemParametersInfo(cget)
+    print new_value
+    assert orig_value!=new_value
+    win32gui.SystemParametersInfo(cset, orig_value)
+    assert win32gui.SystemParametersInfo(cget)==orig_value
+
+
+
+# these take a boolean in uiParam
+#  could combine with above section now that SystemParametersInfo only takes a single parameter
+for pname in ("SPI_GETFONTSMOOTHING","SPI_GETICONTITLEWRAP","SPI_GETBEEP","SPI_GETBLOCKSENDINPUTRESETS",
+    "SPI_GETKEYBOARDPREF","SPI_GETSCREENSAVEACTIVE","SPI_GETMENUDROPALIGNMENT",
+    "SPI_GETDRAGFULLWINDOWS", "SPI_GETSHOWIMEUI"):
+    print pname
+    cget=getattr(win32con,pname)
+    cset=getattr(win32con,pname.replace('_GET','_SET'))
+    orig_value=win32gui.SystemParametersInfo(cget)
+    print orig_value
+    win32gui.SystemParametersInfo(cset, not orig_value)
+    new_value=win32gui.SystemParametersInfo(cget)
+    print new_value
+    assert orig_value!=new_value
+    win32gui.SystemParametersInfo(cset, orig_value)
+    assert win32gui.SystemParametersInfo(cget)==orig_value
+
+
+
+print "SPI_GETICONTITLELOGFONT"
+lf=win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT)
+orig_height=lf.lfHeight
+orig_italic=lf.lfItalic
+print 'Height:', orig_height, 'Italic:',orig_italic
+lf.lfHeight+=2
+lf.lfItalic=not lf.lfItalic
+win32gui.SystemParametersInfo(win32con.SPI_SETICONTITLELOGFONT, lf)
+new_lf=win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT)
+print 'New Height:', new_lf.lfHeight, 'New Italic:',new_lf.lfItalic
+assert new_lf.lfHeight==orig_height+2
+assert new_lf.lfItalic!=orig_italic
+
+lf.lfHeight=orig_height
+lf.lfItalic=orig_italic
+win32gui.SystemParametersInfo(win32con.SPI_SETICONTITLELOGFONT, lf)
+new_lf=win32gui.SystemParametersInfo(win32con.SPI_GETICONTITLELOGFONT)
+assert new_lf.lfHeight==orig_height
+assert new_lf.lfItalic==orig_italic
+
+
+
+print "SPI_GETMOUSEHOVERWIDTH, SPI_GETMOUSEHOVERHEIGHT, SPI_GETMOUSEHOVERTIME"
+w=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH)
+h=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT)
+t=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)
+print 'w,h,t:', w,h,t
+
+win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERWIDTH,w+1)
+win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERHEIGHT,h+2)
+win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERTIME,t+3)
+new_w=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH)
+new_h=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT)
+new_t=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)
+print 'new w,h,t:', new_w, new_h, new_t
+assert new_w==w+1
+assert new_h==h+2
+assert new_t==t+3
+
+win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERWIDTH,w)
+win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERHEIGHT,h)
+win32gui.SystemParametersInfo(win32con.SPI_SETMOUSEHOVERTIME,t)
+new_w=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERWIDTH)
+new_h=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERHEIGHT)
+new_t=win32gui.SystemParametersInfo(win32con.SPI_GETMOUSEHOVERTIME)
+assert new_w==w
+assert new_h==h
+assert new_t==t
+
+
+
+print "SPI_SETDOUBLECLKWIDTH, SPI_SETDOUBLECLKHEIGHT"
+x=win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK)
+y=win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK)
+print 'x,y:', x, y
+win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKWIDTH, x+1)
+win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKHEIGHT, y+2)
+new_x=win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK)
+new_y=win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK)
+print 'new x,y:', new_x, new_y
+assert new_x==x+1
+assert new_y==y+2
+win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKWIDTH, x)
+win32gui.SystemParametersInfo(win32con.SPI_SETDOUBLECLKHEIGHT, y)
+new_x=win32api.GetSystemMetrics(win32con.SM_CXDOUBLECLK)
+new_y=win32api.GetSystemMetrics(win32con.SM_CYDOUBLECLK)
+assert new_x==x
+assert new_y==y
+
+
+
+print "SPI_SETDRAGWIDTH, SPI_SETDRAGHEIGHT"
+dw=win32api.GetSystemMetrics(win32con.SM_CXDRAG)
+dh=win32api.GetSystemMetrics(win32con.SM_CYDRAG)
+print 'dw,dh:', dw, dh
+win32gui.SystemParametersInfo(win32con.SPI_SETDRAGWIDTH,dw+1)
+win32gui.SystemParametersInfo(win32con.SPI_SETDRAGHEIGHT,dh+2)
+new_dw=win32api.GetSystemMetrics(win32con.SM_CXDRAG)
+new_dh=win32api.GetSystemMetrics(win32con.SM_CYDRAG)
+print 'new dw,dh:', new_dw, new_dh
+assert new_dw==dw+1
+assert new_dh==dh+2
+win32gui.SystemParametersInfo(win32con.SPI_SETDRAGWIDTH,dw)
+win32gui.SystemParametersInfo(win32con.SPI_SETDRAGHEIGHT,dh)
+new_dw=win32api.GetSystemMetrics(win32con.SM_CXDRAG)
+new_dh=win32api.GetSystemMetrics(win32con.SM_CYDRAG)
+assert new_dw==dw
+assert new_dh==dh
+
+
+
+orig_wallpaper=win32gui.SystemParametersInfo(Action=win32con.SPI_GETDESKWALLPAPER)
+print 'Original: ',orig_wallpaper
+for bmp in glob.glob(os.path.join(os.environ['windir'],'*.bmp')):
+    print bmp
+    win32gui.SystemParametersInfo(win32con.SPI_SETDESKWALLPAPER, Param=bmp)
+    print win32gui.SystemParametersInfo(Action=win32con.SPI_GETDESKWALLPAPER)
+    time.sleep(1)
+
+win32gui.SystemParametersInfo(win32con.SPI_SETDESKWALLPAPER, Param=orig_wallpaper)
+
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/README.txt b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/README.txt
new file mode 100644
index 0000000..0a9e38d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/README.txt
@@ -0,0 +1,6 @@
+This directory contains a sample Python extension module which 
+includes PyWinTypes.h and links against PyWinTypesXX.lib  The same technique 
+can be used to include and extend pythoncom.
+
+The sample does nothing useful other than to show how to compile and link
+such an extension using a standard pywin32 installation.
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/setup.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/setup.py
new file mode 100644
index 0000000..be59df3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/setup.py
@@ -0,0 +1,23 @@
+# A sample distutils script to show to build your own
+# extension module which extends pywintypes or pythoncom.
+#
+# Use 'python setup.py build' to build this extension.
+import os
+from distutils.core import setup, Extension
+from distutils.sysconfig import get_python_lib
+
+sources = ["win32_extension.cpp"]
+
+# Specify the directory where the PyWin32 .h and .lib files are installed.
+# If you are doing a win32com extension, you will also need to add
+# win32com\Include and win32com\Libs.
+ext = Extension("win32_extension", sources,
+                include_dirs = [os.path.join(get_python_lib(), "win32", "Include")],
+                library_dirs = [os.path.join(get_python_lib(), "win32", "Libs")],
+                )
+
+setup(
+    name="win32 extension sample", 
+    version="0.1",
+    ext_modules=[ext],
+)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/win32_extension.cpp b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/win32_extension.cpp
new file mode 100644
index 0000000..8fcf965
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/c_extension/win32_extension.cpp
@@ -0,0 +1,21 @@
+// Note: this sample does nothing useful other than to show you how
+// your own Python extension can link with and use the functions from
+// pywintypesxx.dll
+#include "Python.h"
+#include "PyWinTypes.h"
+
+static struct PyMethodDef win32extension_functions[] = {
+    0
+};
+
+extern "C" __declspec(dllexport)
+void initwin32_extension(void)
+{
+  // Initialize PyWin32 globals (such as error objects etc)
+  PyWinGlobals_Ensure();
+
+  PyObject *module;
+  module = Py_InitModule("win32_extension", win32extension_functions);
+  if (!module)
+    return;
+}
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/cerapi.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/cerapi.py
new file mode 100644
index 0000000..640b222
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/cerapi.py
@@ -0,0 +1,212 @@
+# A demo of the Windows CE Remote API
+#
+# This connects to a CE device, and interacts with it.
+
+import wincerapi
+import win32event
+import win32api
+import win32con
+import os
+import sys
+import getopt
+from repr import repr
+
+
+def DumpPythonRegistry():
+    try:
+        h = wincerapi.CeRegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, "Software\\Python\\PythonCore\\%s\\PythonPath" % sys.winver)
+    except win32api.error:
+        print "The remote device does not appear to have Python installed"
+        return 0
+    path, typ = wincerapi.CeRegQueryValueEx(h, None)
+    print "The remote PythonPath is '%s'" % (str(path), )
+    h.Close()
+    return 1
+
+def DumpRegistry(root, level=0):
+    # A recursive dump of the remote registry to test most functions.
+    h = wincerapi.CeRegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE, None)
+    level_prefix = " " * level
+    index = 0
+    # Enumerate values.
+    while 1:
+        try:
+            name, data, typ = wincerapi.CeRegEnumValue(root, index)
+        except win32api.error:
+            break
+        print "%s%s=%s" % (level_prefix, name, repr(str(data)))
+        index = index+1
+    # Now enumerate all keys.
+    index=0
+    while 1:
+        try:
+            name, klass = wincerapi.CeRegEnumKeyEx(root, index)
+        except win32api.error:
+            break
+        print "%s%s\\" % (level_prefix, name)
+        subkey = wincerapi.CeRegOpenKeyEx(root, name)
+        DumpRegistry(subkey, level+1)
+        index = index+1
+
+def DemoCopyFile():
+    # Create a file on the device, and write a string.
+    cefile = wincerapi.CeCreateFile("TestPython", win32con.GENERIC_WRITE, 0, None, win32con.OPEN_ALWAYS, 0, None)
+    wincerapi.CeWriteFile(cefile, "Hello from Python")
+    cefile.Close()
+    # reopen the file and check the data.
+    cefile = wincerapi.CeCreateFile("TestPython", win32con.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None)
+    if wincerapi.CeReadFile(cefile, 100) != "Hello from Python":
+        print "Couldnt read the data from the device!"
+    cefile.Close()
+    # Delete the test file
+    wincerapi.CeDeleteFile("TestPython")
+    print "Created, wrote to, read from and deleted a test file!"
+
+def DemoCreateProcess():
+    try:
+        hp, ht, pid, tid = wincerapi.CeCreateProcess("Windows\\Python.exe", "", None, None, 0, 0, None, "", None)
+
+        # Not necessary, except to see if handle closing raises an exception
+        # (if auto-closed, the error is suppressed)
+        hp.Close()
+        ht.Close()
+        print "Python is running on the remote device!"
+    except win32api.error, (hr, fn, msg):
+        print "Couldnt execute remote process -", msg
+
+def DumpRemoteMachineStatus():
+    ACLineStatus, BatteryFlag, BatteryLifePercent, BatteryLifeTime, BatteryFullLifeTime, BackupBatteryFlag, BackupBatteryLifePercent, BackupBatteryLifeTime, BackupBatteryLifeTime = \
+            wincerapi.CeGetSystemPowerStatusEx()
+    if ACLineStatus:
+        power = "AC"
+    else:
+        power = "battery"
+    if BatteryLifePercent==255:
+        batPerc = "unknown"
+    else:
+        batPerc = BatteryLifePercent
+    print "The batteries are at %s%%, and is currently being powered by %s" % (batPerc, power)
+
+    memLoad, totalPhys, availPhys, totalPage, availPage, totalVirt, availVirt = \
+            wincerapi.CeGlobalMemoryStatus()
+
+    print "The memory is %d%% utilized." % (memLoad)
+    print "%-20s%-10s%-10s" % ("", "Total", "Avail")
+    print "%-20s%-10s%-10s" % ("Physical Memory", totalPhys, availPhys)
+    print "%-20s%-10s%-10s" % ("Virtual Memory", totalVirt, availVirt)
+    print "%-20s%-10s%-10s" % ("Paging file", totalPage, availPage)
+
+
+    storeSize, freeSize = wincerapi.CeGetStoreInformation()
+    print "%-20s%-10s%-10s" % ("File store", storeSize, freeSize)
+
+    print "The CE temp path is", wincerapi.CeGetTempPath()
+    print "The system info for the device is", wincerapi.CeGetSystemInfo()
+
+def DumpRemoteFolders():
+    # Dump all special folders possible.
+    for name, val in wincerapi.__dict__.items():
+        if name[:6]=="CSIDL_":
+            try:
+                loc = str(wincerapi.CeGetSpecialFolderPath(val))
+                print "Folder %s is at %s" % (name, loc)
+            except win32api.error, details:
+                pass
+
+    # Get the shortcut targets for the "Start Menu"
+    print "Dumping start menu shortcuts..."
+    try:
+        startMenu = str(wincerapi.CeGetSpecialFolderPath(wincerapi.CSIDL_STARTMENU))
+    except win32api.error, details:
+        print "This device has no start menu!", details
+        startMenu = None
+
+    if startMenu:
+        for fileAttr in wincerapi.CeFindFiles(os.path.join(startMenu, "*")):
+            fileName = fileAttr[8]
+            fullPath = os.path.join(startMenu, str(fileName))
+            try:
+                resolved = wincerapi.CeSHGetShortcutTarget(fullPath)
+            except win32api.error, (rc, fn, msg):
+                resolved = "#Error - %s" % msg
+            print "%s->%s" % (fileName, resolved)
+
+    #       print "The start menu is at",
+    #       print wincerapi.CeSHGetShortcutTarget("\\Windows\\Start Menu\\Shortcut to Python.exe.lnk")
+
+def usage():
+    print "Options:"
+    print "-a - Execute all demos"
+    print "-p - Execute Python process on remote device"
+    print "-r - Dump the remote registry"
+    print "-f - Dump all remote special folder locations"
+    print "-s - Dont dump machine status"
+    print "-y - Perform asynch init of CE connection"
+
+def main():
+    async_init = bStartPython = bDumpRegistry = bDumpFolders = 0
+    bDumpStatus = 1
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "apr")
+    except getopt.error, why:
+        print "Invalid usage:", why
+        usage()
+        return
+
+    for o, v in opts:
+        if o=="-a":
+            bStartPython = bDumpRegistry = bDumpStatus = bDumpFolders = asynch_init = 1
+        if o=="-p":
+            bStartPython=1
+        if o=="-r":
+            bDumpRegistry=1
+        if o=="-s":
+            bDumpStatus=0
+        if o=="-f":
+            bDumpFolders = 1
+        if o=="-y":
+            print "Doing asynch init of CE connection"
+            async_init = 1
+
+    if async_init:
+        event, rc = wincerapi.CeRapiInitEx()
+        while 1:
+            rc = win32event.WaitForSingleObject(event, 500)
+            if rc==win32event.WAIT_OBJECT_0:
+                # We connected.
+                break
+            else:
+                print "Waiting for Initialize to complete (picture a Cancel button here :)"
+    else:
+        wincerapi.CeRapiInit()
+    print "Connected to remote CE device."
+    try:
+        verinfo = wincerapi.CeGetVersionEx()
+        print "The device is running windows CE version %d.%d - %s" % (verinfo[0], verinfo[1], verinfo[4])
+
+        if bDumpStatus:
+            print "Dumping remote machine status"
+            DumpRemoteMachineStatus()
+
+        if bDumpRegistry:
+            print "Dumping remote registry..."
+            DumpRegistry(win32con.HKEY_LOCAL_MACHINE)
+
+        if bDumpFolders:
+            print "Dumping remote folder information"
+            DumpRemoteFolders()
+
+        DemoCopyFile()
+        if bStartPython:
+            print "Starting remote Python process"
+            if DumpPythonRegistry():
+                DemoCreateProcess()
+            else:
+                print "Not trying to start Python, as it's not installed"
+
+    finally:
+        wincerapi.CeRapiUninit()
+        print "Disconnected"
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/dde/ddeclient.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/dde/ddeclient.py
new file mode 100644
index 0000000..7ec40aa
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/dde/ddeclient.py
@@ -0,0 +1,19 @@
+# 'Request' example added jjk  11/20/98
+
+import win32ui
+import dde
+
+server = dde.CreateServer()
+server.Create("TestClient")
+
+conversation = dde.CreateConversation(server)
+
+conversation.ConnectTo("RunAny", "RunAnyCommand")
+conversation.Exec("DoSomething")
+conversation.Exec("DoSomethingElse")
+
+conversation.ConnectTo("RunAny", "ComputeStringLength")
+s = 'abcdefghi'
+sl = conversation.Request(s)
+print 'length of "%s" is %s'%(s,sl)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/dde/ddeserver.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/dde/ddeserver.py
new file mode 100644
index 0000000..2735da9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/dde/ddeserver.py
@@ -0,0 +1,38 @@
+# 'Request' example added jjk  11/20/98
+
+import win32ui
+from pywin.mfc import object
+import dde
+
+class MySystemTopic(object.Object):
+	def __init__(self):
+		object.Object.__init__(self, dde.CreateServerSystemTopic())
+
+	def Exec(self, cmd):
+		print "System Topic asked to exec", cmd
+
+class MyOtherTopic(object.Object):
+	def __init__(self, topicName):
+		object.Object.__init__(self, dde.CreateTopic(topicName))
+
+	def Exec(self, cmd):
+		print "Other Topic asked to exec", cmd
+
+class MyRequestTopic(object.Object):
+	def __init__(self, topicName):
+		topic = dde.CreateTopic(topicName)
+		topic.AddItem(dde.CreateStringItem(""))
+		object.Object.__init__(self, topic)
+
+	def Request(self, aString):
+		print "Request Topic asked to compute length of:", aString
+		return(str(len(aString)))
+
+server = dde.CreateServer()
+server.AddTopic(MySystemTopic())
+server.AddTopic(MyOtherTopic("RunAnyCommand"))
+server.AddTopic(MyRequestTopic("ComputeStringLength"))
+server.Create('RunAny')
+
+while 1:
+	win32ui.PumpWaitingMessages(0, -1)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/eventLogDemo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/eventLogDemo.py
new file mode 100644
index 0000000..92bb485
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/eventLogDemo.py
@@ -0,0 +1,94 @@
+import win32evtlog, traceback
+import win32api, win32con
+import win32security # To translate NT Sids to account names.
+
+from win32evtlogutil import *
+
+def ReadLog(computer, logType="Application", dumpEachRecord = 0):
+    # read the entire log back.
+    h=win32evtlog.OpenEventLog(computer, logType)
+    numRecords = win32evtlog.GetNumberOfEventLogRecords(h)
+#       print "There are %d records" % numRecords
+
+    num=0
+    while 1:
+        objects = win32evtlog.ReadEventLog(h, win32evtlog.EVENTLOG_BACKWARDS_READ|win32evtlog.EVENTLOG_SEQUENTIAL_READ, 0)
+        if not objects:
+            break
+        for object in objects:
+            # get it for testing purposes, but dont print it.
+            msg = SafeFormatMessage(object, logType).encode("mbcs")
+            if object.Sid is not None:
+                try:
+                    domain, user, typ = win32security.LookupAccountSid(computer, object.Sid)
+                    sidDesc = "%s/%s" % (domain, user)
+                except win32security.error:
+                    sidDesc = str(object.Sid)
+                user_desc = "Event associated with user %s" % (sidDesc,)
+            else:
+                user_desc = None
+            if dumpEachRecord:
+                if user_desc:
+                    print user_desc
+                print msg
+        num = num + len(objects)
+
+    if numRecords == num:
+        print "Successfully read all", numRecords, "records"
+    else:
+        print "Couldn't get all records - reported %d, but found %d" % (numRecords, num)
+        print "(Note that some other app may have written records while we were running!)"
+    win32evtlog.CloseEventLog(h)
+
+def Usage():
+    print "Writes an event to the event log."
+    print "-w : Dont write any test records."
+    print "-r : Dont read the event log"
+    print "-c : computerName : Process the log on the specified computer"
+    print "-v : Verbose"
+    print "-t : LogType - Use the specified log - default = 'Application'"
+
+
+def test():
+    # check if running on Windows NT, if not, display notice and terminate
+    if win32api.GetVersion() & 0x80000000:
+        print "This sample only runs on NT"
+        return
+
+    import sys, getopt
+    opts, args = getopt.getopt(sys.argv[1:], "rwh?c:t:v")
+    computer = None
+    do_read = do_write = 1
+
+    logType = "Application"
+    verbose = 0
+
+    if len(args)>0:
+        print "Invalid args"
+        usage()
+        return 1
+    for opt, val in opts:
+        if opt == '-t':
+            logType = val
+        if opt == '-c':
+            computer = val
+        if opt in ['-h', '-?']:
+            Usage()
+            return
+        if opt=='-r':
+            do_read = 0
+        if opt=='-w':
+            do_write = 0
+        if opt=='-v':
+            verbose = verbose + 1
+    if do_write:
+        ReportEvent(logType, 2, strings=["The message text for event 2"], data = "Raw\0Data")
+        ReportEvent(logType, 1, eventType=win32evtlog.EVENTLOG_WARNING_TYPE, strings=["A warning"], data = "Raw\0Data")
+        ReportEvent(logType, 1, eventType=win32evtlog.EVENTLOG_INFORMATION_TYPE, strings=["An info"], data = "Raw\0Data")
+        print "Successfully wrote 3 records to the log"
+
+    if do_read:
+        ReadLog(computer, logType, verbose > 0)
+
+if __name__=='__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/getfilever.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/getfilever.py
new file mode 100644
index 0000000..1212419
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/getfilever.py
@@ -0,0 +1,24 @@
+import os, win32api
+
+ver_strings=('Comments','InternalName','ProductName', 
+    'CompanyName','LegalCopyright','ProductVersion', 
+    'FileDescription','LegalTrademarks','PrivateBuild', 
+    'FileVersion','OriginalFilename','SpecialBuild')
+fname = os.environ["comspec"]
+d=win32api.GetFileVersionInfo(fname, '\\')
+## backslash as parm returns dictionary of numeric info corresponding to VS_FIXEDFILEINFO struc
+for n, v in d.items():
+    print n, v
+
+pairs=win32api.GetFileVersionInfo(fname, '\\VarFileInfo\\Translation')
+## \VarFileInfo\Translation returns list of available (language, codepage) pairs that can be used to retreive string info
+## any other must be of the form \StringfileInfo\%04X%04X\parm_name, middle two are language/codepage pair returned from above
+for lang, codepage in pairs:
+    print 'lang: ', lang, 'codepage:', codepage
+    for ver_string in ver_strings:
+        str_info=u'\\StringFileInfo\\%04X%04X\\%s' %(lang,codepage,ver_string)
+        ## print str_info
+        print ver_string, win32api.GetFileVersionInfo(fname, str_info)
+
+        
+    
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/images/frowny.bmp b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/images/frowny.bmp
new file mode 100644
index 0000000..43e7621e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/images/frowny.bmp
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/images/smiley.bmp b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/images/smiley.bmp
new file mode 100644
index 0000000..12ed5de
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/images/smiley.bmp
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/pipes/cat.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/pipes/cat.py
new file mode 100644
index 0000000..be6f3ed0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/pipes/cat.py
@@ -0,0 +1,16 @@
+'''cat.py
+a version of unix cat, tweaked to show off runproc.py
+'''
+
+import sys
+data = sys.stdin.read(1)
+sys.stdout.write(data)
+sys.stdout.flush()
+while data:
+    data = sys.stdin.read(1)
+    sys.stdout.write(data)
+    sys.stdout.flush()
+# Just here to have something to read from stderr.
+sys.stderr.write("Blah...")
+
+# end of cat.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/pipes/runproc.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/pipes/runproc.py
new file mode 100644
index 0000000..e63f3bd6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/pipes/runproc.py
@@ -0,0 +1,110 @@
+'''runproc.py
+
+start a process with three inherited pipes.
+Try to write to and read from those.
+'''
+
+import win32api
+import win32pipe
+import win32file
+import win32process
+import win32security
+import win32con
+import msvcrt
+import os
+
+class Process:
+    def run(self, cmdline):
+        # security attributes for pipes
+        sAttrs = win32security.SECURITY_ATTRIBUTES()
+        sAttrs.bInheritHandle = 1
+
+        # create pipes
+        hStdin_r,  self.hStdin_w  = win32pipe.CreatePipe(sAttrs, 0)
+        self.hStdout_r, hStdout_w = win32pipe.CreatePipe(sAttrs, 0)
+        self.hStderr_r, hStderr_w = win32pipe.CreatePipe(sAttrs, 0)
+
+        # set the info structure for the new process.
+        StartupInfo = win32process.STARTUPINFO()
+        StartupInfo.hStdInput  = hStdin_r
+        StartupInfo.hStdOutput = hStdout_w
+        StartupInfo.hStdError  = hStderr_w
+        StartupInfo.dwFlags = win32process.STARTF_USESTDHANDLES
+        # Mark doesn't support wShowWindow yet.
+        # StartupInfo.dwFlags = StartupInfo.dwFlags | win32process.STARTF_USESHOWWINDOW
+        # StartupInfo.wShowWindow = win32con.SW_HIDE
+        
+        # Create new output read handles and the input write handle. Set
+        # the inheritance properties to FALSE. Otherwise, the child inherits
+        # the these handles; resulting in non-closeable handles to the pipes
+        # being created.
+        pid = win32api.GetCurrentProcess()
+
+        tmp = win32api.DuplicateHandle(
+            pid,
+            self.hStdin_w,
+            pid,
+            0,
+            0,     # non-inheritable!!
+            win32con.DUPLICATE_SAME_ACCESS)
+        # Close the inhertible version of the handle
+        win32file.CloseHandle(self.hStdin_w)
+        self.hStdin_w = tmp
+        tmp = win32api.DuplicateHandle(
+            pid,
+            self.hStdout_r,
+            pid,
+            0,
+            0,     # non-inheritable!
+            win32con.DUPLICATE_SAME_ACCESS)
+        # Close the inhertible version of the handle
+        win32file.CloseHandle(self.hStdout_r)
+        self.hStdout_r = tmp
+
+        # start the process.
+        hProcess, hThread, dwPid, dwTid = win32process.CreateProcess(
+                None,   # program
+                cmdline,# command line
+                None,   # process security attributes
+                None,   # thread attributes
+                1,      # inherit handles, or USESTDHANDLES won't work.
+                        # creation flags. Don't access the console.
+                0,      # Don't need anything here.
+                        # If you're in a GUI app, you should use
+                        # CREATE_NEW_CONSOLE here, or any subprocesses
+                        # might fall victim to the problem described in:
+                        # KB article: Q156755, cmd.exe requires
+                        # an NT console in order to perform redirection.. 
+                None,   # no new environment
+                None,   # current directory (stay where we are)
+                StartupInfo)
+        # normally, we would save the pid etc. here...
+
+        # Child is launched. Close the parents copy of those pipe handles
+        # that only the child should have open.
+        # You need to make sure that no handles to the write end of the
+        # output pipe are maintained in this process or else the pipe will
+        # not close when the child process exits and the ReadFile will hang.
+        win32file.CloseHandle(hStderr_w)
+        win32file.CloseHandle(hStdout_w)
+        win32file.CloseHandle(hStdin_r)
+
+        self.stdin = os.fdopen(msvcrt.open_osfhandle(self.hStdin_w, 0), "wb")
+        self.stdin.write('hmmmmm\r\n')
+        self.stdin.flush()
+        self.stdin.close()
+
+        self.stdout = os.fdopen(msvcrt.open_osfhandle(self.hStdout_r, 0), "rb")
+        print "Read on stdout: ", repr(self.stdout.read())
+
+        self.stderr = os.fdopen(msvcrt.open_osfhandle(self.hStderr_r, 0), "rb")
+        print "Read on stderr: ", repr(self.stderr.read())
+        
+
+if __name__ == '__main__':
+    p = Process()
+    exe = win32api.GetModuleFileName(0)
+    p.run(exe + ' cat.py')
+
+# end of runproc.py
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/print_desktop.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/print_desktop.py
new file mode 100644
index 0000000..ae9dd4d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/print_desktop.py
@@ -0,0 +1,64 @@
+import win32print, pywintypes, win32con, win32gui, win32ui, win32api
+
+pname=win32print.GetDefaultPrinter()
+print pname
+p=win32print.OpenPrinter(pname)
+print 'Printer handle: ',p
+print_processor=win32print.GetPrinter(p,2)['pPrintProcessor']
+## call with last parm set to 0 to get total size needed for printer's DEVMODE
+dmsize=win32print.DocumentProperties(0, p, pname, None, None, 0)
+## dmDriverExtra should be total size - fixed size
+driverextra=dmsize - pywintypes.DEVMODEType().Size  ## need a better way to get DEVMODE.dmSize
+dm=pywintypes.DEVMODEType(driverextra)
+dm.Fields=dm.Fields|win32con.DM_ORIENTATION|win32con.DM_COPIES
+dm.Orientation=win32con.DMORIENT_LANDSCAPE
+dm.Copies=2
+win32print.DocumentProperties(0, p, pname, dm, dm, win32con.DM_IN_BUFFER|win32con.DM_OUT_BUFFER)
+
+pDC=win32gui.CreateDC(print_processor,pname,dm)
+printerwidth=win32ui.GetDeviceCaps(pDC, 110) ##PHYSICALWIDTH
+printerheight=win32ui.GetDeviceCaps(pDC, 111) ##PHYSICALHEIGHT
+
+hwnd=win32gui.GetDesktopWindow()
+l,t,r,b=win32gui.GetWindowRect(hwnd)
+desktopheight=b-t
+desktopwidth=r-l
+dDC = win32gui.GetWindowDC(hwnd)
+
+dcDC=win32gui.CreateCompatibleDC(dDC)
+dcBM = win32gui.CreateCompatibleBitmap(dDC, desktopwidth, desktopheight);
+win32gui.SelectObject(dcDC, dcBM)
+win32gui.StretchBlt(dcDC, 0, 0, desktopwidth, desktopheight, dDC, 0, 0, desktopwidth, desktopheight, win32con.SRCCOPY)
+
+pcDC=win32gui.CreateCompatibleDC(pDC)
+pcBM=win32gui.CreateCompatibleBitmap(pDC, printerwidth, printerheight)
+win32gui.SelectObject(pcDC, pcBM)
+win32gui.StretchBlt(pcDC, 0, 0, printerwidth, printerheight, dcDC, 0, 0, desktopwidth, desktopheight, win32con.SRCCOPY)
+
+win32print.StartDoc(pDC,('desktop.bmp',None,None,0))
+win32print.StartPage(pDC)
+win32gui.StretchBlt(pDC, 0, 0, int(printerwidth*.9), int(printerheight*.9), pcDC, 0, 0, printerwidth, printerheight, win32con.SRCCOPY)
+
+font=win32gui.LOGFONT()
+font.lfHeight=int(printerheight/20)
+font.lfWidth=font.lfHeight
+font.lfWeight=150
+font.lfItalic=1
+font.lfUnderline=1
+hf=win32gui.CreateFontIndirect(font)
+win32gui.SelectObject(pDC,hf)
+win32gui.SetBkMode(pDC, win32con.TRANSPARENT)
+win32gui.SetTextColor(pDC,win32api.RGB(0,255,0))
+win32gui.DrawText(pDC,'Printed by Python!', -1,
+    (0,0, int(printerwidth*.9), int(printerheight*.9)),
+    win32con.DT_RIGHT|win32con.DT_BOTTOM|win32con.DT_SINGLELINE)
+win32print.EndPage(pDC)
+win32print.EndDoc(pDC)
+
+win32print.ClosePrinter(p)
+win32gui.DeleteDC(dDC)
+win32gui.DeleteDC(dcDC)
+win32gui.DeleteDC(pDC)
+win32gui.DeleteDC(pcDC)
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/rastest.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/rastest.py
new file mode 100644
index 0000000..56fc3a9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/rastest.py
@@ -0,0 +1,136 @@
+# rastest.py - test/demonstrate the win32ras module.
+# Much of the code here contributed by Jethro Wright.
+
+import sys
+import string
+import os
+import win32ras
+
+# Build a little dictionary of RAS states to decent strings.
+# eg win32ras.RASCS_OpenPort -> "OpenPort"
+stateMap = {}
+for name, val in win32ras.__dict__.items():
+    if name[:6]=="RASCS_":
+        stateMap[val] = name[6:]
+
+# Use a lock so the callback can tell the main thread when it is finished.
+import win32event
+callbackEvent = win32event.CreateEvent(None, 0, 0, None)
+
+def Callback( hras, msg, state, error, exterror):
+#       print "Callback called with ", hras, msg, state, error, exterror
+    stateName = stateMap.get(state, "Unknown state?")
+    print "Status is %s (%04lx), error code is %d" % (stateName, state, error)
+    finished = state in [win32ras.RASCS_Connected]
+    if finished:
+        win32event.SetEvent(callbackEvent)
+    if error != 0 or int( state ) == win32ras.RASCS_Disconnected:
+        #       we know for sure this is a good place to hangup....
+        print "Detected call failure: %s" % win32ras.GetErrorString( error )
+        HangUp( hras )
+        win32event.SetEvent(callbackEvent)
+
+def ShowConnections():
+    print "All phone-book entries:"
+    for (name,) in win32ras.EnumEntries():
+        print " ", name
+    print "Current Connections:"
+    for con in win32ras.EnumConnections():
+        print " ", con
+
+def EditEntry(entryName):
+    try:
+        win32ras.EditPhonebookEntry(0,None,entryName)
+    except win32ras.error, (rc, function, msg):
+        print "Can not edit/find the RAS entry -", msg
+
+def HangUp( hras ):
+    #       trap potential, irrelevant errors from win32ras....
+    try:
+        win32ras.HangUp( hras )
+    except:
+        print "Tried to hang up gracefully on error, but didn't work...."
+    return None
+
+def Connect(entryName, bUseCallback):
+    if bUseCallback:
+        theCallback = Callback
+        win32event.ResetEvent(callbackEvent)
+    else:
+        theCallback = None
+    #       in order to *use* the username/password of a particular dun entry, one must
+    #       explicitly get those params under win95....
+    try:
+        dp, b = win32ras.GetEntryDialParams( None, entryName )
+    except:
+        print "Couldn't find DUN entry: %s" % entryName
+    else:
+        hras, rc = win32ras.Dial(None, None, (entryName, "", "", dp[ 3 ], dp[ 4 ], ""),theCallback)
+    #       hras, rc = win32ras.Dial(None, None, (entryName, ),theCallback)
+    #       print hras, rc
+        if not bUseCallback and rc <> 0:
+            print "Could not dial the RAS connection:", win32ras.GetErrorString(rc)
+            hras = HangUp( hras )
+        #       don't wait here if there's no need to....
+        elif bUseCallback and win32event.WaitForSingleObject(callbackEvent, 60000)!=win32event.WAIT_OBJECT_0:
+            print "Gave up waiting for the process to complete!"
+            #       sdk docs state one must explcitly hangup, even if there's an error....
+            try:
+                cs = win32ras.GetConnectStatus( hras )
+            except:
+                #       on error, attempt a hang up anyway....
+                hras = HangUp( hras )
+            else:
+                if int( cs[ 0 ] ) == win32ras.RASCS_Disconnected:
+                    hras = HangUp( hras )
+    return hras, rc
+
+def Disconnect( rasEntry ):
+    # Need to find the entry
+    name = string.lower( rasEntry )
+    for hcon, entryName, devName, devType in win32ras.EnumConnections():
+        if string.lower( entryName ) == name:
+            win32ras.HangUp( hcon )
+            print "Disconnected from", rasEntry
+            break
+    else:
+        print "Could not find an open connection to", entryName
+
+usage = """
+Usage: %s [-s] [-l] [-c connection] [-d connection]
+-l : List phone-book entries and current connections.
+-s : Show status while connecting/disconnecting (uses callbacks)
+-c : Connect to the specified phonebook name.
+-d : Disconnect from the specified phonebook name.
+-e : Edit the specified phonebook entry.
+"""
+
+def main():
+    import getopt
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "slc:d:e:")
+    except getopt.error, why:
+        print why
+        print usage % (os.path.basename(sys.argv[0],))
+        return
+
+    bCallback = 0
+    if args or not opts:
+        print usage % (os.path.basename(sys.argv[0],))
+        return
+    for opt, val in opts:
+        if opt=="-s":
+            bCallback = 1
+        if opt=="-l":
+            ShowConnections()
+        if opt=="-c":
+            hras, rc = Connect(val, bCallback)
+            if hras != None:
+                print "hras: 0x%8lx, rc: 0x%04x" % ( hras, rc )
+        if opt=="-d":
+            Disconnect(val)
+        if opt=="-e":
+            EditEntry(val)
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/account_rights.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/account_rights.py
new file mode 100644
index 0000000..3bae1010
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/account_rights.py
@@ -0,0 +1,31 @@
+import win32security,win32file,win32api,ntsecuritycon,win32con
+from security_enums import TRUSTEE_TYPE,TRUSTEE_FORM,ACE_FLAGS,ACCESS_MODE
+
+new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ?
+            )
+
+ph = win32api.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS)  ##win32con.TOKEN_ADJUST_PRIVILEGES)
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+
+policy_handle = win32security.GetPolicyHandle('',win32security.POLICY_ALL_ACCESS)
+tmp_sid = win32security.LookupAccountName('','tmp')[0]
+
+privs=[ntsecuritycon.SE_DEBUG_NAME,ntsecuritycon.SE_TCB_NAME,ntsecuritycon.SE_RESTORE_NAME,ntsecuritycon.SE_REMOTE_SHUTDOWN_NAME]
+win32security.LsaAddAccountRights(policy_handle,tmp_sid,privs)
+
+privlist=win32security.LsaEnumerateAccountRights(policy_handle,tmp_sid)
+for priv in privlist:
+    print priv
+    
+privs=[ntsecuritycon.SE_DEBUG_NAME,ntsecuritycon.SE_TCB_NAME]
+win32security.LsaRemoveAccountRights(policy_handle,tmp_sid,0,privs)
+
+privlist=win32security.LsaEnumerateAccountRights(policy_handle,tmp_sid)
+for priv in privlist:
+    print priv
+
+win32security.LsaClose(policy_handle)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/explicit_entries.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/explicit_entries.py
new file mode 100644
index 0000000..082f37b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/explicit_entries.py
@@ -0,0 +1,144 @@
+import os
+import win32security,win32file,win32api,ntsecuritycon,win32con
+from security_enums import TRUSTEE_TYPE,TRUSTEE_FORM,ACE_FLAGS,ACCESS_MODE
+
+fname = os.path.join(win32api.GetTempPath(), "win32security_test.txt")
+f=open(fname, "w")
+f.write("Hello from Python\n");
+f.close()
+print "Testing on file", fname
+
+new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ?
+            )
+
+ph = win32api.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS)  ##win32con.TOKEN_ADJUST_PRIVILEGES)
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+
+all_security_info = \
+    win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
+    win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
+
+sd=win32security.GetFileSecurity(fname,all_security_info)
+
+old_sacl=sd.GetSecurityDescriptorSacl()
+if old_sacl==None:
+    old_sacl=win32security.ACL()
+old_dacl=sd.GetSecurityDescriptorDacl()
+if old_dacl==None:
+    old_dacl=win32security.ACL()
+
+my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0]
+tmp_sid = win32security.LookupAccountName('','tmp')[0]
+pwr_sid = win32security.LookupAccountName('','Power Users')[0]
+
+
+## MultipleTrustee,MultipleTrusteeOperation,TrusteeForm,TrusteeType,Identifier
+## first two are ignored
+my_trustee = {}
+my_trustee['MultipleTrustee']=None
+my_trustee['MultipleTrusteeOperation']=0
+my_trustee['TrusteeForm']=TRUSTEE_FORM.TRUSTEE_IS_SID
+my_trustee['TrusteeType']=TRUSTEE_TYPE.TRUSTEE_IS_USER
+my_trustee['Identifier']=my_sid
+
+tmp_trustee = {}
+tmp_trustee['MultipleTrustee']=None
+tmp_trustee['MultipleTrusteeOperation']=0
+tmp_trustee['TrusteeForm']=TRUSTEE_FORM.TRUSTEE_IS_NAME
+tmp_trustee['TrusteeType']=TRUSTEE_TYPE.TRUSTEE_IS_USER
+tmp_trustee['Identifier']='rupole\\tmp'
+
+pwr_trustee = {}
+pwr_trustee['MultipleTrustee']=None
+pwr_trustee['MultipleTrusteeOperation']=0
+pwr_trustee['TrusteeForm']=TRUSTEE_FORM.TRUSTEE_IS_SID
+pwr_trustee['TrusteeType']=TRUSTEE_TYPE.TRUSTEE_IS_USER
+pwr_trustee['Identifier']=pwr_sid
+
+expl_list=[]
+expl_list.append(
+    {
+    'Trustee':my_trustee,
+    'Inheritance':ACE_FLAGS.NO_INHERITANCE,
+    'AccessMode':ACCESS_MODE.SET_AUDIT_SUCCESS, ##|ACCESS_MODE.SET_AUDIT_FAILURE,
+    'AccessPermissions':win32con.GENERIC_ALL
+    }
+  )
+
+expl_list.append(
+    {
+    'Trustee':my_trustee,
+    'Inheritance':ACE_FLAGS.NO_INHERITANCE,
+    'AccessMode':ACCESS_MODE.SET_AUDIT_FAILURE,
+    'AccessPermissions':win32con.GENERIC_ALL
+    }
+  )
+
+expl_list.append(
+    {
+    'Trustee':tmp_trustee,
+    'Inheritance':ACE_FLAGS.NO_INHERITANCE,
+    'AccessMode':ACCESS_MODE.SET_AUDIT_SUCCESS,
+    'AccessPermissions':win32con.GENERIC_ALL
+    }
+  )
+
+expl_list.append(
+    {
+    'Trustee':tmp_trustee,
+    'Inheritance':ACE_FLAGS.NO_INHERITANCE,
+    'AccessMode':ACCESS_MODE.SET_AUDIT_FAILURE,
+    'AccessPermissions':win32con.GENERIC_ALL
+    }
+  )
+old_sacl.SetEntriesInAcl(expl_list)
+
+expl_list=[]
+expl_list.append(
+    {
+    'Trustee':tmp_trustee,
+    'Inheritance':ACE_FLAGS.NO_INHERITANCE,
+    'AccessMode':ACCESS_MODE.DENY_ACCESS,
+    'AccessPermissions':win32con.DELETE
+    }
+  )
+
+expl_list.append(
+    {
+    'Trustee':tmp_trustee,
+    'Inheritance':ACE_FLAGS.NO_INHERITANCE,
+    'AccessMode':ACCESS_MODE.GRANT_ACCESS,
+    'AccessPermissions':win32con.WRITE_OWNER
+    }
+  )
+expl_list.append(
+    {
+    'Trustee':pwr_trustee,
+    'Inheritance':ACE_FLAGS.NO_INHERITANCE,
+    'AccessMode':ACCESS_MODE.GRANT_ACCESS,
+    'AccessPermissions':win32con.GENERIC_READ
+    }
+  )
+expl_list.append(
+    {
+    'Trustee':my_trustee,
+    'Inheritance':ACE_FLAGS.NO_INHERITANCE,
+    'AccessMode':ACCESS_MODE.GRANT_ACCESS,
+    'AccessPermissions':win32con.GENERIC_ALL
+    }
+  )
+
+old_dacl.SetEntriesInAcl(expl_list)
+sd.SetSecurityDescriptorSacl(1,old_sacl,1)
+sd.SetSecurityDescriptorDacl(1,old_dacl,1)
+sd.SetSecurityDescriptorOwner(pwr_sid,1)
+
+win32security.SetFileSecurity(fname,
+            all_security_info,
+            sd)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/get_policy_info.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/get_policy_info.py
new file mode 100644
index 0000000..8ebd440
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/get_policy_info.py
@@ -0,0 +1,23 @@
+import win32security,win32file,win32api,ntsecuritycon,win32con
+policy_handle = win32security.GetPolicyHandle('rupole',win32security.POLICY_ALL_ACCESS)
+
+## mod_nbr, mod_time = win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyModificationInformation)
+## print mod_nbr, mod_time
+
+domain_name,dns_domain_name, dns_forest_name, domain_guid, domain_sid = \
+        win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyDnsDomainInformation)
+print domain_name, dns_domain_name, dns_forest_name, domain_guid, domain_sid
+
+event_audit_info=win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyAuditEventsInformation)
+print event_audit_info
+
+domain_name,sid =win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyPrimaryDomainInformation)
+print domain_name, sid
+
+domain_name,sid =win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyAccountDomainInformation)
+print domain_name, sid
+
+server_role = win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyLsaServerRoleInformation)
+print 'server role: ',server_role
+
+win32security.LsaClose(policy_handle)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/list_rights.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/list_rights.py
new file mode 100644
index 0000000..66c6d941
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/list_rights.py
@@ -0,0 +1,20 @@
+import win32security,win32file,win32api,ntsecuritycon,win32con
+from security_enums import TRUSTEE_TYPE,TRUSTEE_FORM,ACE_FLAGS,ACCESS_MODE
+
+new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ?
+            )
+
+ph = win32api.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS)  ##win32con.TOKEN_ADJUST_PRIVILEGES)
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+
+policy_handle = win32security.GetPolicyHandle('',win32security.POLICY_ALL_ACCESS)
+
+sidlist=win32security.LsaEnumerateAccountsWithUserRight(policy_handle,ntsecuritycon.SE_RESTORE_NAME)
+for sid in sidlist:
+    print win32security.LookupAccountSid('',sid)
+
+win32security.LsaClose(policy_handle)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/lsaregevent.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/lsaregevent.py
new file mode 100644
index 0000000..841f44a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/lsaregevent.py
@@ -0,0 +1,8 @@
+import win32security, win32event
+evt = win32event.CreateEvent(None,0,0,None)
+win32security.LsaRegisterPolicyChangeNotification(win32security.PolicyNotifyAuditEventsInformation, evt)
+print "Waiting for you change Audit policy in Management console ..."
+ret_code=win32event.WaitForSingleObject(evt,1000000000)
+## should come back when you change Audit policy in Management console ...
+print ret_code
+win32security.LsaUnregisterPolicyChangeNotification(win32security.PolicyNotifyAuditEventsInformation, evt)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/lsastore.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/lsastore.py
new file mode 100644
index 0000000..4c53c470
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/lsastore.py
@@ -0,0 +1,11 @@
+import win32security
+policy_handle = win32security.GetPolicyHandle('',win32security.POLICY_ALL_ACCESS)
+privatedata='some sensitive data'
+keyname='tmp'
+win32security.LsaStorePrivateData(policy_handle,keyname,privatedata)
+retrieveddata=win32security.LsaRetrievePrivateData(policy_handle,keyname)
+assert retrieveddata==privatedata
+
+## passing None deletes key
+win32security.LsaStorePrivateData(policy_handle,keyname,None)
+win32security.LsaClose(policy_handle)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/query_information.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/query_information.py
new file mode 100644
index 0000000..217c9b8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/query_information.py
@@ -0,0 +1,25 @@
+from ntsecuritycon import *
+import win32api, win32security, winerror
+
+# This is a Python implementation of win32api.GetDomainName()
+def GetDomainName():
+    try:
+        tok = win32security.OpenThreadToken(win32api.GetCurrentThread(),
+                                            TOKEN_QUERY, 1)
+    except win32api.error, details:
+        if details[0] != winerror.ERROR_NO_TOKEN:
+            raise
+        # attempt to open the process token, since no thread token
+        # exists
+        tok = win32security.OpenProcessToken(win32api.GetCurrentProcess(),
+                                             TOKEN_QUERY)
+    sid, attr = win32security.GetTokenInformation(tok, TokenUser)
+    win32api.CloseHandle(tok)
+
+    name, dom, typ = win32security.LookupAccountSid(None, sid)
+    return dom
+
+if __name__=='__main__':
+    print "Domain name is", GetDomainName()
+
+    
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/regsave_sa.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/regsave_sa.py
new file mode 100644
index 0000000..4b29c1f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/regsave_sa.py
@@ -0,0 +1,37 @@
+fname='h:\\tmp.reg'
+
+import win32api, win32con, win32security, ntsecuritycon, pywintypes,os
+## regsave will not overwrite a file
+if os.path.isfile(fname):
+    os.remove(fname)
+
+new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_BACKUP_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED)
+
+            )
+ph = win32api.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES)
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0]
+
+hklm=win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE,None,0,win32con.KEY_ALL_ACCESS)
+skey=win32api.RegOpenKey(hklm,'SYSTEM',0,win32con.KEY_ALL_ACCESS)
+
+sa=pywintypes.SECURITY_ATTRIBUTES()
+sd=pywintypes.SECURITY_DESCRIPTOR()
+sa.SECURITY_DESCRIPTOR=sd
+acl=pywintypes.ACL()
+
+pwr_sid = win32security.LookupAccountName('','Power Users')[0]
+acl.AddAccessAllowedAce(win32con.ACL_REVISION,win32con.GENERIC_READ|win32con.ACCESS_SYSTEM_SECURITY,my_sid)
+sd.SetSecurityDescriptorDacl(1,acl,0)
+sd.SetSecurityDescriptorOwner(pwr_sid,0)
+sa.bInheritHandle=1
+assert sa.SECURITY_DESCRIPTOR is sd
+
+win32api.RegSaveKey(skey,fname,sa)
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/regsecurity.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/regsecurity.py
new file mode 100644
index 0000000..8e1d3470
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/regsecurity.py
@@ -0,0 +1,20 @@
+import win32api, win32con, win32security, ntsecuritycon
+
+new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED)
+            )
+ph = win32api.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES)
+
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+hkey=win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE,None,0,win32con.KEY_ALL_ACCESS)
+win32api.RegCreateKey(hkey,'SYSTEM\\NOTMP')
+notmpkey=win32api.RegOpenKey(hkey,'SYSTEM\\notmp',0,win32con.ACCESS_SYSTEM_SECURITY)
+
+tmp_sid = win32security.LookupAccountName('','tmp')[0]
+sacl=win32security.ACL()
+sacl.AddAuditAccessAce(win32security.ACL_REVISION,win32con.GENERIC_ALL,tmp_sid,1,1)
+
+sd=win32security.SECURITY_DESCRIPTOR()
+sd.SetSecurityDescriptorSacl(1,sacl,1)
+win32api.RegSetKeySecurity(notmpkey,win32con.SACL_SECURITY_INFORMATION,sd)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sa_inherit.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sa_inherit.py
new file mode 100644
index 0000000..71dc142
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sa_inherit.py
@@ -0,0 +1,7 @@
+import pywintypes, win32security
+sa=pywintypes.SECURITY_ATTRIBUTES()
+tmp_sid=win32security.LookupAccountName('','tmp')[0]
+sa.SetSecurityDescriptorOwner(tmp_sid,0)
+sid=sa.SECURITY_DESCRIPTOR.GetSecurityDescriptorOwner()
+print win32security.LookupAccountSid('',sid)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/security_enums.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/security_enums.py
new file mode 100644
index 0000000..6f8a801
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/security_enums.py
@@ -0,0 +1,228 @@
+import win32security
+class Enum:
+    def __init__(self,*consts):
+        for const in consts:
+            setattr(self,const,getattr(win32security,const))
+
+TOKEN_INFORMATION_CLASS = Enum(
+    'TokenDefaultDacl',
+    'TokenGroups',
+    'TokenGroupsAndPrivileges',
+    'TokenImpersonationLevel',
+    'TokenOwner',
+    'TokenPrimaryGroup',
+    'TokenPrivileges',
+    'TokenRestrictedSids',
+    'TokenSandBoxInert',
+    'TokenSessionId',
+    'TokenSource',
+    'TokenStatistics',
+    'TokenType', 
+    'TokenUser')
+
+TOKEN_TYPE = Enum(
+    'TokenPrimary',
+    'TokenImpersonation')
+
+POLICY_AUDIT_EVENT_TYPE = Enum(
+    'AuditCategorySystem', 
+    'AuditCategoryLogon', 
+    'AuditCategoryObjectAccess', 
+    'AuditCategoryPrivilegeUse', 
+    'AuditCategoryDetailedTracking', 
+    'AuditCategoryPolicyChange', 
+    'AuditCategoryAccountManagement', 
+    'AuditCategoryDirectoryServiceAccess', 
+    'AuditCategoryAccountLogon')
+
+POLICY_INFORMATION_CLASS = Enum(
+    'PolicyAuditLogInformation', 
+    'PolicyAuditEventsInformation', 
+    'PolicyPrimaryDomainInformation',
+    'PolicyPdAccountInformation', 
+    'PolicyAccountDomainInformation', 
+    'PolicyLsaServerRoleInformation', 
+    'PolicyReplicaSourceInformation', 
+    'PolicyDefaultQuotaInformation', 
+    'PolicyModificationInformation', 
+    'PolicyAuditFullSetInformation', 
+    'PolicyAuditFullQueryInformation', 
+    'PolicyDnsDomainInformation')
+
+POLICY_LSA_SERVER_ROLE = Enum(
+    'PolicyServerRoleBackup', 
+    'PolicyServerRolePrimary')
+
+## access modes for opening a policy handle - this is not a real enum
+POLICY_ACCESS_MODES = Enum(
+    'POLICY_VIEW_LOCAL_INFORMATION',
+    'POLICY_VIEW_AUDIT_INFORMATION',
+    'POLICY_GET_PRIVATE_INFORMATION',
+    'POLICY_TRUST_ADMIN',
+    'POLICY_CREATE_ACCOUNT',
+    'POLICY_CREATE_SECRET',
+    'POLICY_CREATE_PRIVILEGE',
+    'POLICY_SET_DEFAULT_QUOTA_LIMITS',
+    'POLICY_SET_AUDIT_REQUIREMENTS',
+    'POLICY_AUDIT_LOG_ADMIN',
+    'POLICY_SERVER_ADMIN',
+    'POLICY_LOOKUP_NAMES',
+    'POLICY_NOTIFICATION',
+    'POLICY_ALL_ACCESS',
+    'POLICY_READ',
+    'POLICY_WRITE',
+    'POLICY_EXECUTE')
+
+## EventAuditingOptions flags - not a real enum
+POLICY_AUDIT_EVENT_OPTIONS_FLAGS = Enum(
+    'POLICY_AUDIT_EVENT_UNCHANGED',
+    'POLICY_AUDIT_EVENT_SUCCESS',
+    'POLICY_AUDIT_EVENT_FAILURE',
+    'POLICY_AUDIT_EVENT_NONE')
+
+# AceType in ACE_HEADER - not a real enum
+ACE_TYPE = Enum(
+    'ACCESS_ALLOWED_ACE_TYPE',
+    'ACCESS_ALLOWED_OBJECT_ACE_TYPE',
+    'ACCESS_DENIED_ACE_TYPE',
+    'ACCESS_DENIED_OBJECT_ACE_TYPE',
+    'SYSTEM_AUDIT_ACE_TYPE',
+    'SYSTEM_AUDIT_OBJECT_ACE_TYPE')
+
+#bit flags for AceFlags - not a real enum
+ACE_FLAGS = Enum(
+    'CONTAINER_INHERIT_ACE',
+    'FAILED_ACCESS_ACE_FLAG',
+    'INHERIT_ONLY_ACE',
+    'INHERITED_ACE',
+    'NO_PROPAGATE_INHERIT_ACE',
+    'OBJECT_INHERIT_ACE',
+    'SUCCESSFUL_ACCESS_ACE_FLAG',
+    'NO_INHERITANCE',
+    'SUB_CONTAINERS_AND_OBJECTS_INHERIT',
+    'SUB_CONTAINERS_ONLY_INHERIT',
+    'SUB_OBJECTS_ONLY_INHERIT')
+
+# used in SetEntriesInAcl - very similar to ACE_TYPE
+ACCESS_MODE = Enum(
+    'NOT_USED_ACCESS', 
+    'GRANT_ACCESS', 
+    'SET_ACCESS', 
+    'DENY_ACCESS', 
+    'REVOKE_ACCESS', 
+    'SET_AUDIT_SUCCESS', 
+    'SET_AUDIT_FAILURE')
+
+# Bit flags in PSECURITY_DESCRIPTOR->Control - not a real enum
+SECURITY_DESCRIPTOR_CONTROL_FLAGS = Enum(
+    'SE_DACL_AUTO_INHERITED',        ## win2k and up
+    'SE_SACL_AUTO_INHERITED',        ## win2k and up
+    'SE_DACL_PROTECTED',             ## win2k and up
+    'SE_SACL_PROTECTED',             ## win2k and up
+    'SE_DACL_DEFAULTED',       
+    'SE_DACL_PRESENT',       
+    'SE_GROUP_DEFAULTED',       
+    'SE_OWNER_DEFAULTED',       
+    'SE_SACL_PRESENT',       
+    'SE_SELF_RELATIVE',       
+    'SE_SACL_DEFAULTED')
+
+# types of SID
+SID_NAME_USE = Enum(
+    'SidTypeUser',
+    'SidTypeGroup', 
+    'SidTypeDomain', 
+    'SidTypeAlias', 
+    'SidTypeWellKnownGroup', 
+    'SidTypeDeletedAccount', 
+    'SidTypeInvalid', 
+    'SidTypeUnknown', 
+    'SidTypeComputer')
+
+## bit flags, not a real enum
+TOKEN_ACCESS_PRIVILEGES = Enum(
+    'TOKEN_ADJUST_DEFAULT',
+    'TOKEN_ADJUST_GROUPS',
+    'TOKEN_ADJUST_PRIVILEGES',
+    'TOKEN_ALL_ACCESS',
+    'TOKEN_ASSIGN_PRIMARY',
+    'TOKEN_DUPLICATE',
+    'TOKEN_EXECUTE',
+    'TOKEN_IMPERSONATE',
+    'TOKEN_QUERY',
+    'TOKEN_QUERY_SOURCE',
+    'TOKEN_READ',
+    'TOKEN_WRITE')
+ 
+SECURITY_IMPERSONATION_LEVEL = Enum(
+    'SecurityAnonymous',
+    'SecurityIdentification',
+    'SecurityImpersonation',
+    'SecurityDelegation')
+
+POLICY_SERVER_ENABLE_STATE = Enum(
+    'PolicyServerEnabled',
+    'PolicyServerDisabled')
+
+POLICY_NOTIFICATION_INFORMATION_CLASS = Enum(
+    'PolicyNotifyAuditEventsInformation',
+    'PolicyNotifyAccountDomainInformation',
+    'PolicyNotifyServerRoleInformation',
+    'PolicyNotifyDnsDomainInformation',
+    'PolicyNotifyDomainEfsInformation',
+    'PolicyNotifyDomainKerberosTicketInformation',
+    'PolicyNotifyMachineAccountPasswordInformation')
+
+TRUSTED_INFORMATION_CLASS = Enum(
+    'TrustedDomainNameInformation',
+    'TrustedControllersInformation',
+    'TrustedPosixOffsetInformation',
+    'TrustedPasswordInformation',
+    'TrustedDomainInformationBasic',
+    'TrustedDomainInformationEx',
+    'TrustedDomainAuthInformation',
+    'TrustedDomainFullInformation',
+    'TrustedDomainAuthInformationInternal',
+    'TrustedDomainFullInformationInternal',
+    'TrustedDomainInformationEx2Internal',
+    'TrustedDomainFullInformation2Internal')
+
+TRUSTEE_FORM = Enum(
+    'TRUSTEE_IS_SID', 
+    'TRUSTEE_IS_NAME', 
+    'TRUSTEE_BAD_FORM', 
+    'TRUSTEE_IS_OBJECTS_AND_SID', 
+    'TRUSTEE_IS_OBJECTS_AND_NAME')
+
+TRUSTEE_TYPE = Enum(
+    'TRUSTEE_IS_UNKNOWN', 
+    'TRUSTEE_IS_USER', 
+    'TRUSTEE_IS_GROUP', 
+    'TRUSTEE_IS_DOMAIN', 
+    'TRUSTEE_IS_ALIAS', 
+    'TRUSTEE_IS_WELL_KNOWN_GROUP', 
+    'TRUSTEE_IS_DELETED', 
+    'TRUSTEE_IS_INVALID', 
+    'TRUSTEE_IS_COMPUTER')
+
+## SE_OBJECT_TYPE - securable objects
+SE_OBJECT_TYPE = Enum(
+    'SE_UNKNOWN_OBJECT_TYPE',
+    'SE_FILE_OBJECT',
+    'SE_SERVICE',
+    'SE_PRINTER',
+    'SE_REGISTRY_KEY',
+    'SE_LMSHARE',
+    'SE_KERNEL_OBJECT',
+    'SE_WINDOW_OBJECT',
+    'SE_DS_OBJECT',
+    'SE_DS_OBJECT_ALL',
+    'SE_PROVIDER_DEFINED_OBJECT',
+    'SE_WMIGUID_OBJECT',
+    'SE_REGISTRY_WOW64_32KEY')
+
+PRIVILEGE_FLAGS = Enum(
+    'SE_PRIVILEGE_ENABLED_BY_DEFAULT',
+    'SE_PRIVILEGE_ENABLED',
+    'SE_PRIVILEGE_USED_FOR_ACCESS')
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_file_audit.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_file_audit.py
new file mode 100644
index 0000000..98afe2b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_file_audit.py
@@ -0,0 +1,47 @@
+fname=r'h:\tmp.txt'
+import win32security,win32file,win32api,ntsecuritycon,win32con
+
+new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ?
+            )
+
+ph = win32api.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES)
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+
+all_security_info = \
+    win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
+    win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
+
+sd=win32security.GetFileSecurity(fname,all_security_info)
+old_dacl=sd.GetSecurityDescriptorDacl()
+old_sacl=sd.GetSecurityDescriptorSacl()
+old_group=sd.GetSecurityDescriptorGroup()
+
+if old_dacl==None:
+    old_dacl=win32security.ACL()
+if old_sacl==None:
+    old_sacl=win32security.ACL()
+
+new_sd=win32security.SECURITY_DESCRIPTOR()
+
+my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0]
+tmp_sid = win32security.LookupAccountName('','tmp')[0]
+pwr_sid = win32security.LookupAccountName('','Power Users')[0]
+
+old_dacl.AddAccessDeniedAce(old_dacl.GetAclRevision(),win32con.GENERIC_ALL,pwr_sid)
+old_dacl.AddAccessAllowedAce(old_dacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid)
+old_dacl.AddAccessAllowedAce(old_dacl.GetAclRevision(),win32con.GENERIC_ALL,pwr_sid)
+old_sacl.AddAuditAccessAce(old_dacl.GetAclRevision(),win32con.GENERIC_ALL,tmp_sid,1,1)
+
+new_sd.SetSecurityDescriptorOwner(tmp_sid,0)
+new_sd.SetSecurityDescriptorGroup(old_group,0)
+
+new_sd.SetSecurityDescriptorSacl(1,old_sacl,1)
+new_sd.SetSecurityDescriptorDacl(1,old_dacl,1)
+
+win32security.SetFileSecurity(fname,all_security_info,new_sd)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_file_owner.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_file_owner.py
new file mode 100644
index 0000000..677643a9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_file_owner.py
@@ -0,0 +1,38 @@
+fname=r'h:\tmp.txt'
+
+import win32security,win32file,win32api,ntsecuritycon,win32con
+
+new_privs = ((win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
+            (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',ntsecuritycon.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('','SeEnableDelegationPrivilege'),win32con.SE_PRIVILEGE_ENABLED) ##doesn't seem to be in ntsecuritycon.py ?
+            )
+
+ph = win32api.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS|win32con.TOKEN_ADJUST_PRIVILEGES)
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+
+all_security_info = \
+    win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
+    win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
+
+sd=win32security.GetFileSecurity(fname,all_security_info)
+old_dacl=sd.GetSecurityDescriptorDacl()
+old_sacl=sd.GetSecurityDescriptorSacl()
+old_group=sd.GetSecurityDescriptorGroup()
+
+new_sd=win32security.SECURITY_DESCRIPTOR()
+print "relative, valid, size: ",new_sd.IsSelfRelative(), new_sd.IsValid(), new_sd.GetLength()
+
+my_sid = win32security.GetTokenInformation(th,ntsecuritycon.TokenUser)[0]
+tmp_sid = win32security.LookupAccountName('','tmp')[0]
+
+new_sd.SetSecurityDescriptorSacl(1,old_sacl,1)
+new_sd.SetSecurityDescriptorDacl(1,old_dacl,1)
+new_sd.SetSecurityDescriptorOwner(tmp_sid,0)
+new_sd.SetSecurityDescriptorGroup(old_group,0)
+
+win32security.SetFileSecurity(fname,all_security_info,new_sd)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_policy_info.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_policy_info.py
new file mode 100644
index 0000000..d26ab54f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/set_policy_info.py
@@ -0,0 +1,17 @@
+import win32security,win32file,win32api,ntsecuritycon,win32con
+policy_handle = win32security.GetPolicyHandle('rupole',win32security.POLICY_ALL_ACCESS)
+
+event_audit_info=win32security.LsaQueryInformationPolicy(policy_handle,win32security.PolicyAuditEventsInformation)
+print event_audit_info
+
+new_audit_info=list(event_audit_info[1])
+new_audit_info[win32security.AuditCategoryPolicyChange]= \
+            win32security.POLICY_AUDIT_EVENT_SUCCESS|win32security.POLICY_AUDIT_EVENT_FAILURE
+new_audit_info[win32security.AuditCategoryAccountLogon]= \
+            win32security.POLICY_AUDIT_EVENT_SUCCESS|win32security.POLICY_AUDIT_EVENT_FAILURE
+new_audit_info[win32security.AuditCategoryLogon]= \
+            win32security.POLICY_AUDIT_EVENT_SUCCESS|win32security.POLICY_AUDIT_EVENT_FAILURE
+
+win32security.LsaSetInformationPolicy(policy_handle, win32security.PolicyAuditEventsInformation, (1,new_audit_info))
+
+win32security.LsaClose(policy_handle)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setkernelobjectsecurity.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setkernelobjectsecurity.py
new file mode 100644
index 0000000..c9eb24d3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setkernelobjectsecurity.py
@@ -0,0 +1,67 @@
+import win32security,win32api,win32con, win32process
+## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody
+## other than yourself or your primary group.  Most admin logins don't have it by default, so
+## enabling it may fail
+new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED)
+            )
+
+all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
+     win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
+
+pid=win32api.GetCurrentProcessId()
+ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS,0,pid)
+## PROCESS_ALL_ACCESS does not contain ACCESS_SYSTEM_SECURITY (neccessy to do SACLs)
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS)  ##win32con.TOKEN_ADJUST_PRIVILEGES)
+old_privs=win32security.AdjustTokenPrivileges(th,0,new_privs)
+my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0]
+pwr_sid=win32security.LookupAccountName('','Power Users')[0]
+## reopen process with ACCESS_SYSTEM_SECURITY now that sufficent privs are enabled
+ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS|win32con.ACCESS_SYSTEM_SECURITY,0,pid)
+
+sd=win32security.GetKernelObjectSecurity(ph,all_info)
+dacl=sd.GetSecurityDescriptorDacl()
+if dacl is None:
+    dacl=win32security.ACL()
+sacl=sd.GetSecurityDescriptorSacl()
+if sacl is None:
+    sacl=win32security.ACL()
+
+dacl_ace_cnt=dacl.GetAceCount()
+sacl_ace_cnt=sacl.GetAceCount()
+
+dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid)
+sacl.AddAuditAccessAce(sacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid,1,1)
+sd.SetSecurityDescriptorDacl(1,dacl,0)
+sd.SetSecurityDescriptorSacl(1,sacl,0)
+sd.SetSecurityDescriptorGroup(pwr_sid,0)
+sd.SetSecurityDescriptorOwner(pwr_sid,0)
+
+win32security.SetKernelObjectSecurity(ph,all_info,sd)
+new_sd=win32security.GetKernelObjectSecurity(ph,all_info)
+
+if new_sd.GetSecurityDescriptorDacl().GetAceCount()!=dacl_ace_cnt+1:
+    print 'New dacl doesn''t contain extra ace ????'
+if new_sd.GetSecurityDescriptorSacl().GetAceCount()!=sacl_ace_cnt+1:
+    print 'New Sacl doesn''t contain extra ace ????'
+if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]!='Power Users':
+    print 'Owner not successfully set to Power Users !!!!!'
+if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]!='Power Users':
+    print 'Group not successfully set to Power Users !!!!!'
+
+sd.SetSecurityDescriptorSacl(0,None,0)
+win32security.SetKernelObjectSecurity(ph, win32security.SACL_SECURITY_INFORMATION, sd)
+new_sd_1=win32security.GetKernelObjectSecurity(ph,win32security.SACL_SECURITY_INFORMATION)
+if new_sd_1.GetSecurityDescriptorSacl() is not None:
+    print 'Unable to set Sacl to NULL !!!!!!!!'
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setnamedsecurityinfo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setnamedsecurityinfo.py
new file mode 100644
index 0000000..c5408ea
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setnamedsecurityinfo.py
@@ -0,0 +1,60 @@
+import win32security,win32api,win32con, win32process
+fname, tmp = win32api.GetTempFileName(win32api.GetTempPath(),'tmp')
+print fname
+## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody
+## other than yourself or your primary group.  Most admin logins don't have it by default, so
+## enabling it may fail
+new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED)
+            )
+
+all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
+     win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
+
+ph=win32process.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS)  ##win32con.TOKEN_ADJUST_PRIVILEGES)
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0]
+pwr_sid=win32security.LookupAccountName('','Power Users')[0]
+
+sd=win32security.GetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,all_info)
+dacl=sd.GetSecurityDescriptorDacl()
+if dacl is None:
+    dacl=win32security.ACL()
+sacl=sd.GetSecurityDescriptorSacl()
+if sacl is None:
+    sacl=win32security.ACL()
+
+dacl_ace_cnt=dacl.GetAceCount()
+sacl_ace_cnt=sacl.GetAceCount()
+
+dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid)
+sacl.AddAuditAccessAce(sacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid,1,1)
+
+win32security.SetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,all_info,pwr_sid, pwr_sid, dacl, sacl)
+new_sd=win32security.GetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,all_info)
+
+## could do additional checking to make sure added ACE contains expected info
+if new_sd.GetSecurityDescriptorDacl().GetAceCount()!=dacl_ace_cnt+1:
+    print 'New dacl doesn''t contain extra ace ????'
+if new_sd.GetSecurityDescriptorSacl().GetAceCount()!=sacl_ace_cnt+1:
+    print 'New Sacl doesn''t contain extra ace ????'
+if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]!='Power Users':
+    print 'Owner not successfully set to Power Users !!!!!'
+if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]!='Power Users':
+    print 'Group not successfully set to Power Users !!!!!'
+
+win32security.SetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,win32security.SACL_SECURITY_INFORMATION, None, None, None, None)
+new_sd_1=win32security.GetNamedSecurityInfo(fname,win32security.SE_FILE_OBJECT,win32security.SACL_SECURITY_INFORMATION)
+if new_sd_1.GetSecurityDescriptorSacl() is not None:
+    print 'Unable to set Sacl to NULL !!!!!!!!'
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setsecurityinfo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setsecurityinfo.py
new file mode 100644
index 0000000..0ab79a98
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setsecurityinfo.py
@@ -0,0 +1,61 @@
+import win32security,win32api,win32con, win32process
+## You need SE_RESTORE_NAME to be able to set the owner of a security descriptor to anybody
+## other than yourself or your primary group.  Most admin logins don't have it by default, so
+## enabling it may fail
+new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED)
+            )
+
+all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
+     win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
+
+pid=win32api.GetCurrentProcessId()
+ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS,0,pid)
+## PROCESS_ALL_ACCESS does not contain ACCESS_SYSTEM_SECURITY (neccessy to do SACLs)
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS)  ##win32con.TOKEN_ADJUST_PRIVILEGES)
+old_privs=win32security.AdjustTokenPrivileges(th,0,new_privs)
+my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0]
+pwr_sid=win32security.LookupAccountName('','Power Users')[0]
+## reopen process with ACCESS_SYSTEM_SECURITY now that sufficent privs are enabled
+ph=win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS|win32con.ACCESS_SYSTEM_SECURITY,0,pid)
+
+sd=win32security.GetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,all_info)
+dacl=sd.GetSecurityDescriptorDacl()
+if dacl is None:
+    dacl=win32security.ACL()
+sacl=sd.GetSecurityDescriptorSacl()
+if sacl is None:
+    sacl=win32security.ACL()
+
+dacl_ace_cnt=dacl.GetAceCount()
+sacl_ace_cnt=sacl.GetAceCount()
+
+dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid)
+sacl.AddAuditAccessAce(sacl.GetAclRevision(),win32con.GENERIC_ALL,my_sid,1,1)
+
+win32security.SetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,all_info,pwr_sid, pwr_sid, dacl, sacl)
+new_sd=win32security.GetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,all_info)
+
+if new_sd.GetSecurityDescriptorDacl().GetAceCount()!=dacl_ace_cnt+1:
+    print 'New dacl doesn''t contain extra ace ????'
+if new_sd.GetSecurityDescriptorSacl().GetAceCount()!=sacl_ace_cnt+1:
+    print 'New Sacl doesn''t contain extra ace ????'
+if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]!='Power Users':
+    print 'Owner not successfully set to Power Users !!!!!'
+if win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]!='Power Users':
+    print 'Group not successfully set to Power Users !!!!!'
+
+win32security.SetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,win32security.SACL_SECURITY_INFORMATION, None, None, None, None)
+new_sd_1=win32security.GetSecurityInfo(ph,win32security.SE_KERNEL_OBJECT,win32security.SACL_SECURITY_INFORMATION)
+if new_sd_1.GetSecurityDescriptorSacl() is not None:
+    print 'Unable to set Sacl to NULL !!!!!!!!'
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setuserobjectsecurity.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setuserobjectsecurity.py
new file mode 100644
index 0000000..5535541
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/setuserobjectsecurity.py
@@ -0,0 +1,42 @@
+import win32security,win32api,win32con, win32process
+new_privs = ((win32security.LookupPrivilegeValue('',win32security.SE_SECURITY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_TCB_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_SHUTDOWN_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_RESTORE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_TAKE_OWNERSHIP_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_CREATE_PERMANENT_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_ENABLE_DELEGATION_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_CHANGE_NOTIFY_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_DEBUG_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_PROF_SINGLE_PROCESS_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_SYSTEM_PROFILE_NAME),win32con.SE_PRIVILEGE_ENABLED),
+             (win32security.LookupPrivilegeValue('',win32security.SE_LOCK_MEMORY_NAME),win32con.SE_PRIVILEGE_ENABLED)
+            )
+
+all_info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION| \
+     win32security.DACL_SECURITY_INFORMATION|win32security.SACL_SECURITY_INFORMATION
+info=win32security.OWNER_SECURITY_INFORMATION|win32security.GROUP_SECURITY_INFORMATION|win32security.DACL_SECURITY_INFORMATION
+
+ph=win32process.GetCurrentProcess()
+th = win32security.OpenProcessToken(ph,win32security.TOKEN_ALL_ACCESS)  ##win32con.TOKEN_ADJUST_PRIVILEGES)
+win32security.AdjustTokenPrivileges(th,0,new_privs)
+my_sid = win32security.GetTokenInformation(th,win32security.TokenUser)[0]
+pwr_sid=win32security.LookupAccountName('','Power Users')[0]
+
+h=win32process.GetProcessWindowStation()
+sd=win32security.GetUserObjectSecurity(h,info)
+dacl=sd.GetSecurityDescriptorDacl()
+ace_cnt=dacl.GetAceCount()
+
+dacl.AddAccessAllowedAce(dacl.GetAclRevision(),win32con.ACCESS_SYSTEM_SECURITY|win32con.WRITE_DAC,my_sid)
+sd.SetSecurityDescriptorDacl(1,dacl,0)
+sd.SetSecurityDescriptorGroup(pwr_sid,0)
+sd.SetSecurityDescriptorOwner(pwr_sid,0)
+
+win32security.SetUserObjectSecurity(h,info,sd)
+new_sd=win32security.GetUserObjectSecurity(h,info)
+assert new_sd.GetSecurityDescriptorDacl().GetAceCount()==ace_cnt+1,'Did not add an ace to the Dacl !!!!!!'
+assert win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorOwner())[0]=='Power Users','Owner not successfully set to Power Users !!!!!'
+assert win32security.LookupAccountSid('',new_sd.GetSecurityDescriptorGroup())[0]=='Power Users','Group not successfully set to Power Users !!!!!'
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/fetch_url.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/fetch_url.py
new file mode 100644
index 0000000..5f4f27baf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/fetch_url.py
@@ -0,0 +1,146 @@
+"""
+Fetches a URL from a web-server supporting NTLM authentication
+eg, IIS.
+
+If no arguments are specified, a default of http://localhost/localstart.asp
+is used.  This script does follow simple 302 redirections, so pointing at the
+root of an IIS server is should work.
+"""
+
+import sys
+import urllib
+import httplib
+import urlparse
+from base64 import encodestring, decodestring
+
+from sspi import ClientAuth
+
+import optparse # sorry, this demo needs 2.3+
+
+options = None # set to optparse options object
+
+def open_url(host, url):
+    h = httplib.HTTPConnection(host)
+#    h.set_debuglevel(9)
+    h.putrequest('GET', url)
+    h.endheaders()
+    resp = h.getresponse()
+    print "Initial response is", resp.status, resp.reason
+    body = resp.read()
+    if resp.status == 302: # object moved
+        url = "/" + resp.msg["location"]
+        resp.close()
+        h.putrequest('GET', url)
+        h.endheaders()
+        resp = h.getresponse()
+        print "After redirect response is", resp.status, resp.reason
+    if options.show_headers:
+        print "Initial response headers:"
+        for name, val in resp.msg.items():
+            print " %s: %s" % (name, val)
+    if options.show_body:
+        print body
+    if resp.status == 401:
+        # 401: Unauthorized - here is where the real work starts
+        auth_info = None
+        if options.user or options.domain or options.password:
+            auth_info = options.user, options.domain, options.password
+        ca = ClientAuth("NTLM", auth_info=auth_info)
+        auth_scheme = ca.pkg_info['Name']
+        data = None
+        while 1:
+            err, out_buf = ca.authorize(data)
+            data = out_buf[0].Buffer
+            # Encode it as base64 as required by HTTP
+            auth = encodestring(data).replace("\012", "")
+            h.putrequest('GET', url)
+            h.putheader('Authorization', auth_scheme + ' ' + auth)
+            h.putheader('Content-Length', '0')
+            h.endheaders()
+            resp = h.getresponse()
+            if options.show_headers:
+                print "Token dance headers:"
+                for name, val in resp.msg.items():
+                    print " %s: %s" % (name, val)
+
+            if err==0:
+                break
+            else:
+                if resp.status != 401:
+                    print "Eeek - got response", resp.status
+                    cl = resp.msg.get("content-length")
+                    if cl:
+                        print repr(resp.read(int(cl)))
+                    else:
+                        print "no content!"
+
+                assert resp.status == 401, resp.status
+
+            assert not resp.will_close, "NTLM is per-connection - must not close"
+            schemes = [s.strip() for s in resp.msg.get("WWW-Authenticate", "").split(",")]
+            for scheme in schemes:
+                if scheme.startswith(auth_scheme):
+                    data = decodestring(scheme[len(auth_scheme)+1:])
+                    break
+            else:
+                print "Could not find scheme '%s' in schemes %r" % (auth_scheme, schemes)
+                break
+        
+            resp.read()
+    print "Final response status is", resp.status, resp.reason
+    if resp.status == 200:
+        # Worked!
+        # Check we can read it again without re-authenticating.
+        if resp.will_close:
+            print "EEEK - response will close, but NTLM is per connection - it must stay open"
+        body = resp.read()
+        if options.show_body:
+            print "Final response body:"
+            print body
+        h.putrequest('GET', url)
+        h.endheaders()
+        resp = h.getresponse()
+        print "Second fetch response is", resp.status, resp.reason
+        if options.show_headers:
+            print "Second response headers:"
+            for name, val in resp.msg.items():
+                print " %s: %s" % (name, val)
+        
+        resp.read(int(resp.msg.get("content-length", 0)))
+    elif resp.status == 500:
+        print "Error text"
+        print resp.read()
+    else:
+        if options.show_body:
+            cl = resp.msg.get("content-length")
+            print resp.read(int(cl))
+
+if __name__=='__main__':
+    parser = optparse.OptionParser(description=__doc__)
+    
+    parser.add_option("", "--show-body", action="store_true",
+                      help="print the body of each response as it is received")
+
+    parser.add_option("", "--show-headers", action="store_true",
+                      help="print the headers of each response as it is received")
+
+    parser.add_option("", "--user", action="store",
+                      help="The username to login with")
+
+    parser.add_option("", "--password", action="store",
+                      help="The password to login with")
+
+    parser.add_option("", "--domain", action="store",
+                      help="The domain to login to")
+
+    options, args = parser.parse_args()
+    if not args:
+        print "Run with --help for usage details"
+        args = ["http://localhost/localstart.asp"]
+    for url in args:
+        scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
+        if (scheme != "http") or params or query or fragment:
+            parser.error("Scheme must be http, URL must be simple")
+    
+        print "Opening '%s' from '%s'" % (path, netloc)
+        r = open_url(netloc, path)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/simple_auth.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/simple_auth.py
new file mode 100644
index 0000000..ed1cf349
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/simple_auth.py
@@ -0,0 +1,71 @@
+# A demo of basic SSPI authentication.
+# There is a 'client' context and a 'server' context - typically these will
+# be on different machines (here they are in the same process, but the same
+# concepts apply)
+import sspi
+import win32security, sspicon, win32api
+
+def lookup_ret_code(err):
+    for k,v in sspicon.__dict__.items():
+        if k[0:6] in ('SEC_I_','SEC_E_') and v==err:
+            return k
+
+
+"""
+pkg_name='Kerberos'
+sspiclient=SSPIClient(pkg_name, win32api.GetUserName(),  ## target spn is ourself
+    None, None,   ## use none for client name and authentication information for current context
+    ## u'username', (u'username',u'domain.com',u'passwd'),
+    sspicon.ISC_REQ_INTEGRITY|sspicon.ISC_REQ_SEQUENCE_DETECT|sspicon.ISC_REQ_REPLAY_DETECT|    \
+        sspicon.ISC_REQ_DELEGATE|sspicon.ISC_REQ_CONFIDENTIALITY|sspicon.ISC_REQ_USE_SESSION_KEY)
+sspiserver=SSPIServer(pkg_name, None,
+    sspicon.ASC_REQ_INTEGRITY|sspicon.ASC_REQ_SEQUENCE_DETECT|sspicon.ASC_REQ_REPLAY_DETECT|   \
+        sspicon.ASC_REQ_DELEGATE|sspicon.ASC_REQ_CONFIDENTIALITY|sspicon.ASC_REQ_STREAM|sspicon.ASC_REQ_USE_SESSION_KEY)
+"""
+
+pkg_name='NTLM'
+
+# Setup the 2 contexts.
+sspiclient=sspi.ClientAuth(pkg_name)
+sspiserver=sspi.ServerAuth(pkg_name)
+
+# Perform the authentication dance, each loop exchanging more information
+# on the way to completing authentication.
+sec_buffer=None
+while 1:
+    err, sec_buffer = sspiclient.authorize(sec_buffer)
+    err, sec_buffer = sspiserver.authorize(sec_buffer)
+    if err==0:
+        break
+
+# The server can now impersonate the client.  In this demo the 2 users will
+# always be the same.
+sspiserver.ctxt.ImpersonateSecurityContext()
+print 'Impersonated user: ',win32api.GetUserNameEx(win32api.NameSamCompatible)
+sspiserver.ctxt.RevertSecurityContext()
+print 'Reverted to self: ',win32api.GetUserName()
+
+pkg_size_info=sspiclient.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES)
+# Now sign some data
+msg='some data to be encrypted ......'
+
+sigsize=pkg_size_info['MaxSignature']
+sigbuf=win32security.PySecBufferDescType()
+sigbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA))
+sigbuf.append(win32security.PySecBufferType(sigsize, sspicon.SECBUFFER_TOKEN))
+sigbuf[0].Buffer=msg
+sspiclient.ctxt.MakeSignature(0,sigbuf,1)
+sspiserver.ctxt.VerifySignature(sigbuf,1)
+
+# And finally encrypt some.
+trailersize=pkg_size_info['SecurityTrailer']
+encbuf=win32security.PySecBufferDescType()
+encbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA))
+encbuf.append(win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN))
+encbuf[0].Buffer=msg
+sspiclient.ctxt.EncryptMessage(0,encbuf,1)
+print 'Encrypted data:',repr(encbuf[0].Buffer)
+sspiserver.ctxt.DecryptMessage(encbuf,1)
+print 'Unencrypted data:',encbuf[0].Buffer
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/socket_server.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/socket_server.py
new file mode 100644
index 0000000..ab5c98e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/socket_server.py
@@ -0,0 +1,178 @@
+"""A sample socket server and client using SSPI authentication and encryption.
+
+You must run with either 'client' or 'server' as arguments.  A server must be
+running before a client can connect.
+
+To use with Kerberos you should include in the client options
+--target-spn=username, where 'username' is the user under which the server is
+being run.
+
+Running either the client or server as a different user can be informative.
+A command-line such as the following may be useful:
+`runas /user:{user} {fqp}\python.exe {fqp}\socket_server.py --wait client|server`
+
+{fqp} should specify the relevant fully-qualified path names.
+
+To use 'runas' with Kerberos, the client program will need to
+specify --target-spn with the username under which the *server* is running.
+
+See the SSPI documentation for more details.
+"""
+
+
+import sys
+import struct
+import SocketServer
+import win32api
+import httplib
+import traceback
+
+import win32security
+import sspi, sspicon
+
+import optparse # sorry, this demo needs 2.3+
+
+options = None # set to optparse object.
+
+def GetUserName():
+    try:
+        return win32api.GetUserName()
+    except win32api.error, details:
+        # Seeing 'access denied' errors here for non-local users (presumably
+        # without permission to login locally).  Get the fully-qualified
+        # username, although a side-effect of these permission-denied errors
+        # is a lack of Python codecs - so printing the Unicode value fails.
+        # So just return the repr(), and avoid codecs completely.
+        return repr(win32api.GetUserNameEx(win32api.NameSamCompatible))
+    
+# Send a simple "message" over a socket - send the number of bytes first,
+# then the string.  Ditto for receive.
+def _send_msg(s, m):
+    s.send(struct.pack("i", len(m)))
+    s.send(m)
+
+def _get_msg(s):
+    size_data = s.recv(struct.calcsize("i"))
+    if not size_data:
+        return None
+    cb = struct.unpack("i", size_data)[0]
+    return s.recv(cb)
+
+class SSPISocketServer(SocketServer.TCPServer):
+    def __init__(self, *args, **kw):
+        SocketServer.TCPServer.__init__(self, *args, **kw)
+        self.sa = sspi.ServerAuth(options.package)
+
+    def verify_request(self, sock, ca):
+        # Do the sspi auth dance
+        self.sa.reset()
+        while 1:
+            data = _get_msg(sock)
+            if data is None:
+                return False
+            try:
+                err, sec_buffer = self.sa.authorize(data)
+            except sspi.error, details:
+                print "FAILED to authorize client:", details
+                return False
+                
+            if err==0:
+                break
+            _send_msg(sock, sec_buffer[0].Buffer)
+        return True
+
+    def process_request(self, request, client_address):
+        # An example using the connection once it is established.
+        print "The server is running as user", GetUserName()
+        self.sa.ctxt.ImpersonateSecurityContext()
+        try:
+            print "Having conversation with client as user", GetUserName()
+            while 1:
+                # we need to grab 2 bits of data - the encrypted data, and the
+                # 'key'
+                data = _get_msg(request)
+                key = _get_msg(request)
+                if data is None or key is None:
+                    break
+                data = self.sa.decrypt(data, key)
+                print "Client sent:", repr(data)
+        finally:
+            self.sa.ctxt.RevertSecurityContext()
+        self.close_request(request)
+        print "The server is back to user", GetUserName()
+
+def serve():
+    s = SSPISocketServer(("localhost", options.port), None)
+    print "Running test server..."
+    s.serve_forever()
+
+def sspi_client():
+    c = httplib.HTTPConnection("localhost", options.port)
+    c.connect()
+    # Do the auth dance.
+    ca = sspi.ClientAuth(options.package, targetspn=options.target_spn)
+    data = None
+    while 1:
+        err, out_buf = ca.authorize(data)
+        _send_msg(c.sock, out_buf[0].Buffer)
+        if err==0:
+            break
+        data = _get_msg(c.sock)
+    print "Auth dance complete - sending a few encryted messages"
+    # Assume out data is sensitive - encrypt the message.
+    for data in "Hello from the client".split():
+        blob, key = ca.encrypt(data)
+        _send_msg(c.sock, blob)
+        _send_msg(c.sock, key)
+    c.sock.close()
+    print "Client completed."
+
+if __name__=='__main__':
+    parser = optparse.OptionParser("%prog [options] client|server",
+                                   description=__doc__)
+    
+    parser.add_option("", "--package", action="store", default="NTLM",
+                      help="The SSPI package to use (eg, Kerberos) - default is NTLM")
+
+    parser.add_option("", "--target-spn", action="store",
+                      help="""The target security provider name to use. The
+                      string contents are security-package specific.  For
+                      example, 'Kerberos' or 'Negotiate' require the server
+                      principal name (SPN) (ie, the username) of the remote
+                      process.  For NTLM this must be blank.""")
+
+    parser.add_option("", "--port", action="store", default="8181",
+                      help="The port number to use (default=8181)")
+
+    parser.add_option("", "--wait", action="store_true",
+                      help="""Cause the program to wait for input just before
+                              terminating. Useful when using via runas to see
+                              any error messages before termination.
+                           """)
+
+    options, args = parser.parse_args()
+    try:
+        options.port = int(options.port)
+    except (ValueError, TypeError):
+        parser.error("--port must be an integer")
+
+    try:
+        try:
+            if not args:
+                args = ['']
+            if args[0]=="client":
+                sspi_client()
+            elif args[0]=="server":
+                serve()
+            else:
+                parser.error("You must supply 'client' or 'server' - " \
+                             "use --help for details")
+        except KeyboardInterrupt:
+            pass
+        except SystemExit:
+            pass
+        except:
+            traceback.print_exc()
+    finally:
+        if options.wait:
+            raw_input("Press enter to continue")
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/validate_password.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/validate_password.py
new file mode 100644
index 0000000..13fa577
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/security/sspi/validate_password.py
@@ -0,0 +1,38 @@
+# Demonstrates how to validate a password.
+# See also MSKB article Q180548
+#
+# To use with Kerberos you need to jump through the 'targetspn' hoops.
+
+import win32security
+import sys
+from sspi import ClientAuth, ServerAuth
+
+def validate(username, password, domain = ""):
+    auth_info = username, domain, password
+    ca = ClientAuth("NTLM", auth_info = auth_info)
+    sa = ServerAuth("NTLM")
+      
+    data = err = None
+    while err != 0:
+        err, data = ca.authorize(data)
+        err, data = sa.authorize(data)
+    # If we get here without exception, we worked!
+
+if __name__=='__main__':
+    if len(sys.argv) not in [2,3,4]:
+        print "Usage: %s username [password [domain]]" % (__file__,)
+        sys.exit(1)
+
+    # password and domain are optional!
+    password = None
+    if len(sys.argv)>=3:
+        password = sys.argv[2]
+    domain = ""
+    if len(sys.argv)>=4:
+        domain = sys.argv[3]
+    try:
+        validate(sys.argv[1], password, domain)
+        print "Validated OK"
+    except win32security.error, details:
+        hr, func, msg = details
+        print "Validation failed: %s (%d)" % (msg, hr)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/perf_install.h b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/perf_install.h
new file mode 100644
index 0000000..d2ae56a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/perf_install.h
@@ -0,0 +1,9 @@
+# All these simply list the counters in order.
+# Important that you add these in this order in
+# the Python code.
+
+#define SERVICE 0
+#define HEARTBEAT 2
+#define READ 4
+#define WRITE 6
+#define THREAD 8
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/perf_install.ini b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/perf_install.ini
new file mode 100644
index 0000000..ba391e45
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/perf_install.ini
@@ -0,0 +1,18 @@
+[info]

+drivername=PipeServicePerf

+symbolfile=perf_install.h

+

+[languages]

+009=English

+

+[text]

+SERVICE_009_NAME=Python Demo Service

+SERVICE_009_HELP=Shows performance statistics for the sample Python service

+HEARTBEAT_009_NAME=Heartbeat pulse

+HEARTBEAT_009_HELP=Regular counter to indicate the service is alive.  Useful so system monitors can detect health even under low loads.

+READ_009_NAME=Bytes read/sec

+READ_009_HELP=Number of bytes read in total

+WRITE_009_NAME=Bytes written/sec

+WRITE_009_HELP=Number of bytes written in total

+THREADS_009_NAME=Number of threads

+THREADS_009_HELP=Number of threads currently servicing client connections

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/readme.txt b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/readme.txt
new file mode 100644
index 0000000..7afaad97
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/install/readme.txt
@@ -0,0 +1,12 @@
+This directory contains stuff needed to install the Performance 
+Monitoring counters for the test service.
+
+This stuff is only needed at install time, and not used at runtime.
+
+This sample code does attempt to install this information at runtime
+when the sample service is installed.  However, if you use a setup
+program for example this is not needed in the runtime environment.
+
+The NT API's require a .h and a .ini.  These are not used by the Python
+runtime at all.  It is your programs responsibility to create counters
+in your Python code which correspond to the entries here.
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/pipeTestService.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/pipeTestService.py
new file mode 100644
index 0000000..43b8815
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/pipeTestService.py
@@ -0,0 +1,161 @@
+# A Demo of services and named pipes.
+
+# A multi-threaded service that simply echos back its input.
+
+# * Install as a service using "pipeTestService.py install"
+# * Use Control Panel to change the user name of the service
+#   to a real user name (ie, NOT the SystemAccount)
+# * Start the service.
+# * Run the "pipeTestServiceClient.py" program as the client pipe side.
+
+import win32serviceutil, win32service
+import pywintypes, win32con, winerror
+# Use "import *" to keep this looking as much as a "normal" service
+# as possible.  Real code shouldn't do this.
+from win32event import *
+from win32file import *
+from win32pipe import *
+from win32api import *
+from ntsecuritycon import *
+
+# Old versions of the service framework would not let you import this
+# module at the top-level.  Now you can, and can check 'Debugging()' and
+# 'RunningAsService()' to check your context.
+import servicemanager
+
+import traceback
+import thread
+
+def ApplyIgnoreError(fn, args):
+    try:
+        return apply(fn, args)
+    except error: # Ignore win32api errors.
+        return None
+
+class TestPipeService(win32serviceutil.ServiceFramework):
+    _svc_name_ = "PyPipeTestService"
+    _svc_display_name_ = "Python Pipe Test Service"
+    _svc_description_ = "Tests Python service framework by receiving and echoing messages over a named pipe"
+
+    def __init__(self, args):
+        win32serviceutil.ServiceFramework.__init__(self, args)
+        self.hWaitStop = CreateEvent(None, 0, 0, None)
+        self.overlapped = pywintypes.OVERLAPPED()
+        self.overlapped.hEvent = CreateEvent(None,0,0,None)
+        self.thread_handles = []
+
+    def CreatePipeSecurityObject(self):
+        # Create a security object giving World read/write access,
+        # but only "Owner" modify access.
+        sa = pywintypes.SECURITY_ATTRIBUTES()
+        sidEveryone = pywintypes.SID()
+        sidEveryone.Initialize(SECURITY_WORLD_SID_AUTHORITY,1)
+        sidEveryone.SetSubAuthority(0, SECURITY_WORLD_RID)
+        sidCreator = pywintypes.SID()
+        sidCreator.Initialize(SECURITY_CREATOR_SID_AUTHORITY,1)
+        sidCreator.SetSubAuthority(0, SECURITY_CREATOR_OWNER_RID)
+
+        acl = pywintypes.ACL()
+        acl.AddAccessAllowedAce(FILE_GENERIC_READ|FILE_GENERIC_WRITE, sidEveryone)
+        acl.AddAccessAllowedAce(FILE_ALL_ACCESS, sidCreator)
+
+        sa.SetSecurityDescriptorDacl(1, acl, 0)
+        return sa
+
+    # The functions executed in their own thread to process a client request.
+    def DoProcessClient(self, pipeHandle, tid):
+        try:
+            try:
+                # Create a loop, reading large data.  If we knew the data stream was
+                # was small, a simple ReadFile would do.
+                d = ''
+                hr = winerror.ERROR_MORE_DATA
+                while hr==winerror.ERROR_MORE_DATA:
+                    hr, thisd = ReadFile(pipeHandle, 256)
+                    d = d + thisd
+                print "Read", d
+                ok = 1
+            except error:
+                # Client disconnection - do nothing
+                ok = 0
+
+            # A secure service would handle (and ignore!) errors writing to the
+            # pipe, but for the sake of this demo we dont (if only to see what errors
+            # we can get when our clients break at strange times :-)
+            if ok: WriteFile(pipeHandle,"%s (on thread %d) sent me %s" % (GetNamedPipeHandleState(pipeHandle)[4],tid, d))
+        finally:
+            ApplyIgnoreError( DisconnectNamedPipe, (pipeHandle,) )
+            ApplyIgnoreError( CloseHandle, (pipeHandle,) )
+
+    def ProcessClient(self, pipeHandle):
+        try:
+            procHandle = GetCurrentProcess()
+            th = DuplicateHandle(procHandle, GetCurrentThread(), procHandle, 0, 0, win32con.DUPLICATE_SAME_ACCESS)
+            try:
+                self.thread_handles.append(th)
+                try:
+                    return self.DoProcessClient(pipeHandle, th)
+                except:
+                    traceback.print_exc()
+            finally:
+                self.thread_handles.remove(th)
+        except:
+            traceback.print_exc()
+
+    def SvcStop(self):
+        self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
+        SetEvent(self.hWaitStop)
+
+    def SvcDoRun(self):
+        # Write an event log record - in debug mode we will also
+        # see this message printed.
+        servicemanager.LogMsg(
+                servicemanager.EVENTLOG_INFORMATION_TYPE,
+                servicemanager.PYS_SERVICE_STARTED,
+                (self._svc_name_, '')
+                )
+
+        num_connections = 0
+        while 1:
+            pipeHandle = CreateNamedPipe("\\\\.\\pipe\\PyPipeTest",
+                    PIPE_ACCESS_DUPLEX| FILE_FLAG_OVERLAPPED,
+                    PIPE_TYPE_MESSAGE | PIPE_READMODE_BYTE,
+                    PIPE_UNLIMITED_INSTANCES,       # max instances
+                    0, 0, 6000,
+                    self.CreatePipeSecurityObject())
+            try:
+                hr = ConnectNamedPipe(pipeHandle, self.overlapped)
+            except error, details:
+                print "Error connecting pipe!", details
+                CloseHandle(pipeHandle)
+                break
+            if hr==winerror.ERROR_PIPE_CONNECTED:
+                # Client is already connected - signal event
+                SetEvent(self.overlapped.hEvent)
+            rc = WaitForMultipleObjects((self.hWaitStop, self.overlapped.hEvent), 0, INFINITE)
+            if rc==WAIT_OBJECT_0:
+                # Stop event
+                break
+            else:
+                # Pipe event - spawn thread to deal with it.
+                thread.start_new_thread(self.ProcessClient, (pipeHandle,))
+                num_connections = num_connections + 1
+
+        # Sleep to ensure that any new threads are in the list, and then
+        # wait for all current threads to finish.
+        # What is a better way?
+        Sleep(500)
+        while self.thread_handles:
+            self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING, 5000)
+            print "Waiting for %d threads to finish..." % (len(self.thread_handles))
+            WaitForMultipleObjects(self.thread_handles, 1, 3000)
+        # Write another event log record.
+        servicemanager.LogMsg(
+                servicemanager.EVENTLOG_INFORMATION_TYPE,
+                servicemanager.PYS_SERVICE_STOPPED,
+                (self._svc_name_, " after processing %d connections" % (num_connections,))
+                )
+
+
+if __name__=='__main__':
+    win32serviceutil.HandleCommandLine(TestPipeService)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/pipeTestServiceClient.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/pipeTestServiceClient.py
new file mode 100644
index 0000000..19516979
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/service/pipeTestServiceClient.py
@@ -0,0 +1,121 @@
+# A Test Program for pipeTestService.py
+#
+# Install and start the Pipe Test service, then run this test
+# either from the same machine, or from another using the "-s" param.
+#
+# Eg: pipeTestServiceClient.py -s server_name Hi There
+# Should work.
+
+from win32pipe import *
+from win32file import *
+from win32event import *
+import pywintypes
+import win32api
+import winerror
+import sys, os, traceback
+
+verbose = 0
+
+#def ReadFromPipe(pipeName):
+        # Could (Should?) use CallNamedPipe, but this technique allows variable size
+        # messages (whereas you must supply a buffer size for CallNamedPipe!
+#       hPipe = CreateFile(pipeName, GENERIC_WRITE, 0, None, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, 0)
+#       more = 1
+#       while more:
+#               hr = ReadFile(hPipe, 256)
+#               if hr==0:
+#                       more = 0
+#               except win32api.error (hr, fn, desc):
+#                       if hr==winerror.ERROR_MORE_DATA:
+#                               data = dat
+#
+
+def CallPipe(fn, args):
+    ret = None
+    retryCount = 0
+    while retryCount < 8:   # Keep looping until user cancels.
+        retryCount = retryCount + 1
+        try:
+            return apply(fn, args)
+        except win32api.error, (rc, fnerr, msg):
+            if rc==winerror.ERROR_PIPE_BUSY:
+                win32api.Sleep(5000)
+                continue
+            else:
+                raise win32api.error, (rc, fnerr, msg)
+
+    raise RuntimeError, "Could not make a connection to the server"
+
+def testClient(server,msg):
+    if verbose:
+        print "Sending", msg
+    data = CallPipe(CallNamedPipe, ("\\\\%s\\pipe\\PyPipeTest" % server, msg, 256, NMPWAIT_WAIT_FOREVER))
+    if verbose:
+        print "Server sent back '%s'" % data
+    print "Sent and received a message!"
+
+def testLargeMessage(server, size = 4096):
+    if verbose:
+        print "Sending message of size %d" % (size)
+    msg = "*" * size
+    data = CallPipe(CallNamedPipe, ("\\\\%s\\pipe\\PyPipeTest" % server, msg, 512, NMPWAIT_WAIT_FOREVER))
+    if len(data)-size:
+        print "Sizes are all wrong - send %d, got back %d" % (size, len(data))
+
+def stressThread(server, numMessages, wait):
+    try:
+        try:
+            for i in xrange(numMessages):
+                r = CallPipe(CallNamedPipe, ("\\\\%s\\pipe\\PyPipeTest" % server, "#" * 512, 1024, NMPWAIT_WAIT_FOREVER))
+        except:
+            traceback.print_exc()
+            print "Failed after %d messages" % i
+    finally:
+        SetEvent(wait)
+
+def stressTestClient(server, numThreads, numMessages):
+    import thread
+    thread_waits = []
+    for t_num in xrange(numThreads):
+        # Note I could just wait on thread handles (after calling DuplicateHandle)
+        # See the service itself for an example of waiting for the clients...
+        wait = CreateEvent(None, 0, 0, None)
+        thread_waits.append(wait)
+        thread.start_new_thread(stressThread, (server,numMessages, wait))
+    # Wait for all threads to finish.
+    WaitForMultipleObjects(thread_waits, 1, INFINITE)
+
+def main():
+    import sys, getopt, string
+    server = "."
+    thread_count = 0
+    msg_count = 500
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], 's:t:m:vl')
+        for o,a in opts:
+            if o=='-s':
+                server = a
+            if o=='-m':
+                msg_count = string.atoi(a)
+            if o=='-t':
+                thread_count = string.atoi(a)
+            if o=='-v':
+                global verbose
+                verbose = 1
+            if o=='-l':
+                testLargeMessage(server)
+        msg = string.join(args)
+    except getopt.error, msg:
+        print msg
+        my_name = os.path.split(sys.argv[0])[1]
+        print "Usage: %s [-v] [-s server] [-t thread_count=0] [-m msg_count=500] msg ..." % my_name
+        print "       -v = verbose"
+        print "       Specifying a value for -t will stress test using that many threads."
+        return
+    testClient(server, msg)
+    if thread_count > 0:
+        print "Spawning %d threads each sending %d messages..." % (thread_count, msg_count)
+        stressTestClient(server, thread_count, msg_count)
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/timer_demo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/timer_demo.py
new file mode 100644
index 0000000..457c99e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/timer_demo.py
@@ -0,0 +1,68 @@
+# -*- Mode: Python; tab-width: 4 -*-
+#
+
+# This module, and the timer.pyd core timer support, were written by
+# Sam Rushing (rushing@nightmare.com)
+
+import timer
+import time
+
+# Timers are based on Windows messages.  So we need
+# to do the event-loop thing!
+import win32event, win32gui
+
+# glork holds a simple counter for us.
+
+class glork:
+
+    def __init__ (self, delay=1000, max=10):
+        self.x = 0
+        self.max = max
+        self.id = timer.set_timer (delay, self.increment)
+        # Could use the threading module, but this is
+        # a win32 extension test after all! :-)
+        self.event = win32event.CreateEvent(None, 0, 0, None)
+
+    def increment (self, id, time):
+        print 'x = %d' % self.x
+        self.x = self.x + 1
+        # if we've reached the max count,
+        # kill off the timer.
+        if self.x > self.max:
+            # we could have used 'self.id' here, too
+            timer.kill_timer (id)
+            win32event.SetEvent(self.event)
+
+# create a counter that will count from '1' thru '10', incrementing
+# once a second, and then stop.
+
+def demo (delay=1000, stop=10):
+    g = glork(delay, stop)
+    # Timers are message based - so we need
+    # To run a message loop while waiting for our timers
+    # to expire.
+    start_time = time.time()
+    while 1:
+        # We can't simply give a timeout of 30 seconds, as
+        # we may continouusly be recieving other input messages,
+        # and therefore never expire.
+        rc = win32event.MsgWaitForMultipleObjects(
+                (g.event,), # list of objects
+                0, # wait all
+                500,  # timeout
+                win32event.QS_ALLEVENTS, # type of input
+                )
+        if rc == win32event.WAIT_OBJECT_0:
+            # Event signalled.
+            break
+        elif rc == win32event.WAIT_OBJECT_0+1:
+            # Message waiting.
+            if win32gui.PumpWaitingMessages():
+                raise RuntimeError, "We got an unexpected WM_QUIT message!"
+        else:
+            # This wait timed-out.
+            if time.time()-start_time > 30:
+                raise RuntimeError, "We timed out waiting for the timers to expire!"
+
+if __name__=='__main__':
+    demo()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32clipboardDemo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32clipboardDemo.py
new file mode 100644
index 0000000..6fad3ff
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32clipboardDemo.py
@@ -0,0 +1,129 @@
+# win32clipboardDemo.py
+#
+# Demo/test of the win32clipboard module.
+from win32clipboard import *
+import win32con
+import types
+
+if not __debug__:
+    print "WARNING: The test code in this module uses assert"
+    print "This instance of Python has asserts disabled, so many tests will be skipped"
+
+cf_names = {}
+# Build map of CF_* constants to names.
+for name, val in win32con.__dict__.items():
+    if name[:3]=="CF_" and name != "CF_SCREENFONTS": # CF_SCREEN_FONTS==CF_TEXT!?!?
+        cf_names[val] = name
+
+def TestEmptyClipboard():
+    OpenClipboard()
+    try:
+        EmptyClipboard()
+        assert EnumClipboardFormats(0)==0, "Clipboard formats were available after emptying it!"
+    finally:
+        CloseClipboard()
+
+def TestText():
+    OpenClipboard()
+    try:
+        text = "Hello from Python"
+        SetClipboardText(text)
+        got = GetClipboardData(win32con.CF_TEXT)
+        assert  got == text, "Didnt get the correct result back - '%r'." % (got,)
+        # Win32 documentation says I can get the result back as CF_UNICODE  or CF_OEMTEXT.
+        # But it appears I need to close the clipboard for this to kick-in.
+        # but if I attempt to, it fails!
+    finally:
+        CloseClipboard()
+
+    OpenClipboard()
+    try:
+        got = GetClipboardData(win32con.CF_UNICODETEXT)
+        assert  got == text, "Didnt get the correct result back - '%r'." % (got,)
+        assert type(got)==types.UnicodeType, "Didnt get the correct result back - '%r'." % (got,)
+
+        got = GetClipboardData(win32con.CF_OEMTEXT)
+        assert  got == text, "Didnt get the correct result back - '%r'." % (got,)
+
+        # Unicode tests
+        EmptyClipboard()
+        text = u"Hello from Python unicode"
+        # Now set the Unicode value
+        SetClipboardData(win32con.CF_UNICODETEXT, text)
+        # Get it in Unicode.
+        got = GetClipboardData(win32con.CF_UNICODETEXT)
+        assert  got == text, "Didnt get the correct result back - '%r'." % (got,)
+        assert type(got)==types.UnicodeType, "Didnt get the correct result back - '%r'." % (got,)
+
+        # Close and open the clipboard to ensure auto-conversions take place.
+    finally:
+        CloseClipboard()
+
+    OpenClipboard()
+    try:
+
+        # Make sure I can still get the text.
+        got = GetClipboardData(win32con.CF_TEXT)
+        assert  got == text, "Didnt get the correct result back - '%r'." % (got,)
+        # Make sure we get back the correct types.
+        got = GetClipboardData(win32con.CF_UNICODETEXT)
+        assert type(got)==types.UnicodeType, "Didnt get the correct result back - '%r'." % (got,)
+        got = GetClipboardData(win32con.CF_OEMTEXT)
+        assert  got == text, "Didnt get the correct result back - '%r'." % (got,)
+        print "Clipboard text tests worked correctly"
+    finally:
+        CloseClipboard()
+
+def TestClipboardEnum():
+    OpenClipboard()
+    try:
+        # Enumerate over the clipboard types
+        enum = 0
+        while 1:
+            enum = EnumClipboardFormats(enum)
+            if enum==0:
+                break
+            assert IsClipboardFormatAvailable(enum), "Have format, but clipboard says it is not available!"
+            n = cf_names.get(enum,"")
+            if not n:
+                try:
+                    n = GetClipboardFormatName(enum)
+                except error:
+                    n = "unknown (%s)" % (enum,)
+
+            print "Have format", n
+        print "Clipboard enumerator tests worked correctly"
+    finally:
+        CloseClipboard()
+
+class Foo:
+    def __init__(self, **kw):
+        self.__dict__.update(kw)
+    def __cmp__(self, other):
+        return cmp(self.__dict__, other.__dict__)
+
+def TestCustomFormat():
+    OpenClipboard()
+    try:
+        # Just for the fun of it pickle Python objects through the clipboard
+        fmt = RegisterClipboardFormat("Python Pickle Format")
+        import cPickle
+        pickled_object = Foo(a=1, b=2, Hi=3)
+        SetClipboardData(fmt, cPickle.dumps( pickled_object ) )
+        # Now read it back.
+        data = GetClipboardData(fmt)
+        loaded_object = cPickle.loads(data)
+        assert cPickle.loads(data) == pickled_object, "Didnt get the correct data!"
+
+        print "Clipboard custom format tests worked correctly"
+    finally:
+        CloseClipboard()
+
+
+if __name__=='__main__':
+    TestEmptyClipboard()
+    TestText()
+    TestCustomFormat()
+    TestClipboardEnum()
+    # And leave it empty at the end!
+    TestEmptyClipboard()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32comport_demo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32comport_demo.py
new file mode 100644
index 0000000..110d4b5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32comport_demo.py
@@ -0,0 +1,133 @@
+# This is a simple serial port terminal demo.
+#
+# Its primary purpose is to demonstrate the native serial port access offered via
+# win32file.
+
+# It uses 3 threads:
+# - The main thread, which cranks up the other 2 threads, then simply waits for them to exit.
+# - The user-input thread - blocks waiting for a keyboard character, and when found sends it
+#   out the COM port.  If the character is Ctrl+C, it stops, signalling the COM port thread to stop.
+# - The COM port thread is simply listening for input on the COM port, and prints it to the screen.
+
+# This demo uses userlapped IO, so that none of the read or write operations actually block (however,
+# in this sample, the very next thing we do _is_ block - so it shows off the concepts even though it
+# doesnt exploit them.
+
+from win32file import * # The base COM port and file IO functions.
+from win32event import * # We use events and the WaitFor[Multiple]Objects functions.
+import win32con # constants.
+import msvcrt # For the getch() function.
+
+import threading
+import sys
+
+def FindModem():
+    # Snoop over the comports, seeing if it is likely we have a modem.
+    for i in range(1,5):
+        port = "COM%d" % (i,)
+        try:
+            handle = CreateFile(port,
+                                           win32con.GENERIC_READ | win32con.GENERIC_WRITE,
+                                           0, # exclusive access
+                                           None, # no security
+                                           win32con.OPEN_EXISTING,
+                                           win32con.FILE_ATTRIBUTE_NORMAL,
+                                           None)
+            # It appears that an available COM port will always success here,
+            # just return 0 for the status flags.  We only care that it has _any_ status
+            # flags (and therefore probably a real modem)
+            if GetCommModemStatus(handle) != 0:
+                return port
+        except error:
+            pass # No port, or modem status failed.
+    return None
+
+# A basic synchronous COM port file-like object
+class SerialTTY:
+    def __init__(self, port):
+        if type(port)==type(0):
+            port = "COM%d" % (port,)
+        self.handle = CreateFile(port,
+                                           win32con.GENERIC_READ | win32con.GENERIC_WRITE,
+                                           0, # exclusive access
+                                           None, # no security
+                                           win32con.OPEN_EXISTING,
+                                           win32con.FILE_ATTRIBUTE_NORMAL | win32con.FILE_FLAG_OVERLAPPED,
+                                           None)
+        # Tell the port we want a notification on each char.
+        SetCommMask(self.handle, EV_RXCHAR)
+        # Setup a 4k buffer
+        SetupComm(self.handle, 4096, 4096)
+        # Remove anything that was there
+        PurgeComm(self.handle, PURGE_TXABORT | PURGE_RXABORT | PURGE_TXCLEAR | PURGE_RXCLEAR )
+        # Setup for overlapped IO.
+        timeouts = 0xFFFFFFFF, 0, 1000, 0, 1000
+        SetCommTimeouts(self.handle, timeouts)
+        # Setup the connection info.
+        dcb = GetCommState( self.handle )
+        dcb.BaudRate = CBR_115200
+        dcb.ByteSize = 8
+        dcb.Parity = NOPARITY
+        dcb.StopBits = ONESTOPBIT
+        SetCommState(self.handle, dcb)
+        print "Connected to %s at %s baud" % (port, dcb.BaudRate)
+
+    def _UserInputReaderThread(self):
+        overlapped = OVERLAPPED()
+        overlapped.hEvent = CreateEvent(None, 1, 0, None)
+        try:
+            while 1:
+                ch = msvcrt.getch()
+                if ord(ch)==3:
+                    break
+                WriteFile(self.handle, ch, overlapped)
+                # Wait for the write to complete.
+                WaitForSingleObject(overlapped.hEvent, INFINITE)
+        finally:
+            SetEvent(self.eventStop)
+
+    def _ComPortThread(self):
+        overlapped = OVERLAPPED()
+        overlapped.hEvent = CreateEvent(None, 1, 0, None)
+        while 1:
+            # XXX - note we could _probably_ just use overlapped IO on the win32file.ReadFile() statement
+            # XXX but this tests the COM stuff!
+            rc, mask = WaitCommEvent(self.handle, overlapped)
+            if rc == 0: # Character already ready!
+                SetEvent(overlapped.hEvent)
+            rc = WaitForMultipleObjects([overlapped.hEvent, self.eventStop], 0, INFINITE)
+            if rc == WAIT_OBJECT_0:
+                # Some input - read and print it
+                flags, comstat = ClearCommError( self.handle )
+                rc, data = ReadFile(self.handle, comstat.cbInQue, overlapped)
+                WaitForSingleObject(overlapped.hEvent, INFINITE)
+                sys.stdout.write(data)
+            else:
+                # Stop the thread!
+                # Just incase the user input thread uis still going, close it
+                sys.stdout.close()
+                break
+
+    def Run(self):
+        self.eventStop = CreateEvent(None, 0, 0, None)
+        # Start the reader and writer threads.
+        user_thread = threading.Thread(target = self._UserInputReaderThread)
+        user_thread.start()
+        com_thread = threading.Thread(target = self._ComPortThread)
+        com_thread.start()
+        user_thread.join()
+        com_thread.join()
+
+if __name__=='__main__':
+    print "Serial port terminal demo - press Ctrl+C to exit"
+    if len(sys.argv)<=1:
+        port = FindModem()
+        if port is None:
+            print "No COM port specified, and no modem could be found"
+            print "Please re-run this script with the name of a COM port (eg COM3)"
+            sys.exit(1)
+    else:
+        port = sys.argv[1]
+
+    tty = SerialTTY(port)
+    tty.Run()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32console_demo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32console_demo.py
new file mode 100644
index 0000000..787a4b6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32console_demo.py
@@ -0,0 +1,103 @@
+import win32console, win32con
+import traceback, time
+
+virtual_keys={}
+for k,v in win32con.__dict__.items():
+    if k.startswith('VK_'):
+        virtual_keys[v]=k 
+
+free_console=True
+try:
+    win32console.AllocConsole()
+except win32console.error, err_tuple:
+    if err_tuple[0]!=5:
+        raise
+    ## only free console if one was created successfully
+    free_console=False
+
+stdout=win32console.GetStdHandle(win32console.STD_OUTPUT_HANDLE)
+stdin=win32console.GetStdHandle(win32console.STD_INPUT_HANDLE)
+newbuffer=win32console.CreateConsoleScreenBuffer()
+newbuffer.SetConsoleActiveScreenBuffer()
+newbuffer.SetConsoleTextAttribute(win32console.FOREGROUND_RED|win32console.FOREGROUND_INTENSITY
+        |win32console.BACKGROUND_GREEN|win32console.BACKGROUND_INTENSITY)
+newbuffer.WriteConsole('This is a new screen buffer\n')
+
+## test setting screen buffer and window size
+## screen buffer size cannot be smaller than window size
+window_size=newbuffer.GetConsoleScreenBufferInfo()['Window']
+coord=win32console.PyCOORDType(X=window_size.Right+20, Y=window_size.Bottom+20)
+newbuffer.SetConsoleScreenBufferSize(coord)
+
+window_size.Right+=10
+window_size.Bottom+=10
+newbuffer.SetConsoleWindowInfo(Absolute=True,ConsoleWindow=window_size)
+
+## write some records to the input queue 
+x=win32console.PyINPUT_RECORDType(win32console.KEY_EVENT)
+x.Char=u'X'
+x.KeyDown=True
+x.RepeatCount=1
+x.VirtualKeyCode=0x58
+x.ControlKeyState=win32con.SHIFT_PRESSED
+
+z=win32console.PyINPUT_RECORDType(win32console.KEY_EVENT)
+z.Char=u'Z'
+z.KeyDown=True
+z.RepeatCount=1
+z.VirtualKeyCode=0x5a
+z.ControlKeyState=win32con.SHIFT_PRESSED
+
+stdin.WriteConsoleInput([x,z,x])
+
+newbuffer.SetConsoleTextAttribute(win32console.FOREGROUND_RED|win32console.FOREGROUND_INTENSITY
+        |win32console.BACKGROUND_GREEN|win32console.BACKGROUND_INTENSITY)
+newbuffer.WriteConsole('Press some keys, click some characters with the mouse\n')
+
+newbuffer.SetConsoleTextAttribute(win32console.FOREGROUND_BLUE|win32console.FOREGROUND_INTENSITY
+        |win32console.BACKGROUND_RED|win32console.BACKGROUND_INTENSITY)
+newbuffer.WriteConsole('Hit "End" key to quit\n')
+                    
+breakout=False
+while not breakout:
+    input_records=stdin.ReadConsoleInput(10)
+    for input_record in input_records:
+        if input_record.EventType==win32console.KEY_EVENT:
+            if input_record.KeyDown:
+                if input_record.Char=='\0':
+                    newbuffer.WriteConsole(virtual_keys.get(input_record.VirtualKeyCode, 'VirtualKeyCode: %s' %input_record.VirtualKeyCode))
+                else:
+                    newbuffer.WriteConsole(input_record.Char)
+                if input_record.VirtualKeyCode==win32con.VK_END:
+                    breakout=True
+                    break
+        elif input_record.EventType==win32console.MOUSE_EVENT:
+            if input_record.EventFlags==0:  ## 0 indicates a button event
+                if input_record.ButtonState!=0:   ## exclude button releases
+                    pos=input_record.MousePosition
+                    # switch the foreground and background colors of the character that was clicked
+                    attr=newbuffer.ReadConsoleOutputAttribute(Length=1, ReadCoord=pos)[0]
+                    new_attr=attr
+                    if attr&win32console.FOREGROUND_BLUE:
+                        new_attr=(new_attr&~win32console.FOREGROUND_BLUE)|win32console.BACKGROUND_BLUE
+                    if attr&win32console.FOREGROUND_RED:
+                        new_attr=(new_attr&~win32console.FOREGROUND_RED)|win32console.BACKGROUND_RED
+                    if attr&win32console.FOREGROUND_GREEN:
+                        new_attr=(new_attr&~win32console.FOREGROUND_GREEN)|win32console.BACKGROUND_GREEN
+
+                    if attr&win32console.BACKGROUND_BLUE:
+                        new_attr=(new_attr&~win32console.BACKGROUND_BLUE)|win32console.FOREGROUND_BLUE
+                    if attr&win32console.BACKGROUND_RED:
+                        new_attr=(new_attr&~win32console.BACKGROUND_RED)|win32console.FOREGROUND_RED
+                    if attr&win32console.BACKGROUND_GREEN:
+                        new_attr=(new_attr&~win32console.BACKGROUND_GREEN)|win32console.FOREGROUND_GREEN
+                    newbuffer.WriteConsoleOutputAttribute((new_attr,),pos)
+        else:
+            newbuffer.WriteConsole(str(input_record))
+    time.sleep(0.1)
+
+stdout.SetConsoleActiveScreenBuffer()
+newbuffer.Close()
+if free_console:
+     win32console.FreeConsole()
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32fileDemo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32fileDemo.py
new file mode 100644
index 0000000..5f895f5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32fileDemo.py
@@ -0,0 +1,35 @@
+# This is a "demo" of win32file - it used to be more a test case than a 
+# demo, so has been moved to the test directory.
+
+# Please contribute your favourite simple little demo.
+import win32file, win32api, win32con
+import os
+
+# A very simple demo - note that this does no more than you can do with 
+# builtin Python file objects, so for something as simple as this, you
+# generally *should* use builtin Python objects.  Only use win32file etc
+# when you need win32 specific features not available in Python.
+def SimpleFileDemo():
+    testName = os.path.join( win32api.GetTempPath(), "win32file_demo_test_file")
+    if os.path.exists(testName): os.unlink(testName)
+    # Open the file for writing.
+    handle = win32file.CreateFile(testName, 
+                                  win32file.GENERIC_WRITE, 
+                                  0, 
+                                  None, 
+                                  win32con.CREATE_NEW, 
+                                  0, 
+                                  None)
+    test_data = "Hello\0there"
+    win32file.WriteFile(handle, test_data)
+    handle.Close()
+    # Open it for reading.
+    handle = win32file.CreateFile(testName, win32file.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None)
+    rc, data = win32file.ReadFile(handle, 1024)
+    handle.Close()
+    if data == test_data:
+        print "Successfully wrote and read a file"
+    os.unlink(testName)
+
+if __name__=='__main__':
+    SimpleFileDemo()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_demo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_demo.py
new file mode 100644
index 0000000..e92fadf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_demo.py
@@ -0,0 +1,21 @@
+# The start of a win32gui generic demo.
+# Feel free to contribute more demos back ;-)
+
+import win32gui
+
+def _MyCallback( hwnd, extra ):
+    hwnds, classes = extra
+    hwnds.append(hwnd)
+    classes[win32gui.GetClassName(hwnd)] = 1
+
+def TestEnumWindows():
+    windows = []
+    classes = {}
+    win32gui.EnumWindows(_MyCallback, (windows, classes))
+    print "Enumerated a total of %d windows with %d classes" % (len(windows),len(classes))
+    if not classes.has_key("tooltips_class32"):
+        print "Hrmmmm - I'm very surprised to not find a 'tooltips_class32' class."
+
+print "Enumerating all windows..."
+TestEnumWindows()
+print "All tests done!"
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_dialog.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_dialog.py
new file mode 100644
index 0000000..c76e510
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_dialog.py
@@ -0,0 +1,377 @@
+# A demo of a fairly complex dialog.
+#
+# Features:
+# * Uses a "dynamic dialog resource" to build the dialog.
+# * Uses a ListView control.
+# * Dynamically resizes content.
+# * Uses a second worker thread to fill the list.
+# * Demostrates support for windows XP themes.
+
+# If you are on Windows XP, and specify a '--noxp' argument, you will see:
+# * alpha-blend issues with icons
+# * The buttons are "old" style, rather than based on the XP theme.
+# Hence, using:
+#   import winxpgui as win32gui
+# is recommened.
+# Please report any problems.
+import sys
+if "--noxp" in sys.argv:
+    import win32gui
+else:
+    import winxpgui as win32gui
+import win32api
+import win32con
+import struct, array
+import commctrl
+import Queue
+import os
+
+IDC_SEARCHTEXT = 1024
+IDC_BUTTON_SEARCH = 1025
+IDC_BUTTON_DISPLAY = 1026
+IDC_LISTBOX = 1027
+
+WM_SEARCH_RESULT = win32con.WM_USER + 512
+WM_SEARCH_FINISHED = win32con.WM_USER + 513
+
+g_registeredClass = 0
+
+g_iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "pyc.ico" ))
+if not os.path.isfile(g_iconPathName):
+    # Look in the source tree.
+    g_iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "..\\PC\\pyc.ico" ))
+    if not os.path.isfile(g_iconPathName):
+        print "Can't find the icon file"
+        g_iconPathName = None
+
+class _WIN32MASKEDSTRUCT:
+    def __init__(self, **kw):
+        full_fmt = ""
+        for name, fmt, default, mask in self._struct_items_:
+            self.__dict__[name] = None
+            if fmt == "z":
+                full_fmt += "pi"
+            else:
+                full_fmt += fmt
+        for name, val in kw.items():
+            if not self.__dict__.has_key(name):
+                raise ValueError, "LVITEM structures do not have an item '%s'" % (name,)
+            self.__dict__[name] = val
+
+    def __setattr__(self, attr, val):
+        if not attr.startswith("_") and not self.__dict__.has_key(attr):
+            raise AttributeError, attr
+        self.__dict__[attr] = val
+
+    def toparam(self):
+        self._buffs = []
+        full_fmt = ""
+        vals = []
+        mask = 0
+        # calc the mask
+        for name, fmt, default, this_mask in self._struct_items_:
+            if this_mask is not None and self.__dict__.get(name) is not None:
+                mask |= this_mask
+        self.mask = mask
+        for name, fmt, default, this_mask in self._struct_items_:
+            val = self.__dict__[name]
+            if fmt == "z":
+                fmt = "Pi"
+                if val is None:
+                    vals.append(0)
+                    vals.append(0)
+                else:
+                    str_buf = array.array("c", val+'\0')
+                    vals.append(str_buf.buffer_info()[0])
+                    vals.append(len(val))
+                    self._buffs.append(str_buf) # keep alive during the call.
+            else:
+                if val is None:
+                    val = default
+                vals.append(val)
+            full_fmt += fmt
+        return apply(struct.pack, (full_fmt,) + tuple(vals) )
+
+
+# NOTE: See the win32gui_struct module for an alternative way of dealing 
+# with these structures
+class LVITEM(_WIN32MASKEDSTRUCT):
+    _struct_items_ = [
+        ("mask", "I", 0, None),
+        ("iItem", "i", 0, None),
+        ("iSubItem", "i", 0, None),
+        ("state", "I", 0, commctrl.LVIF_STATE),
+        ("stateMask", "I", 0, None),
+        ("text", "z", None, commctrl.LVIF_TEXT),
+        ("iImage", "i", 0, commctrl.LVIF_IMAGE),
+        ("lParam", "i", 0, commctrl.LVIF_PARAM),
+        ("iIdent", "i", 0, None),
+    ]
+
+class LVCOLUMN(_WIN32MASKEDSTRUCT):
+    _struct_items_ = [
+        ("mask", "I", 0, None),
+        ("fmt", "i", 0, commctrl.LVCF_FMT),
+        ("cx", "i", 0, commctrl.LVCF_WIDTH),
+        ("text", "z", None, commctrl.LVCF_TEXT),
+        ("iSubItem", "i", 0, commctrl.LVCF_SUBITEM),
+        ("iImage", "i", 0, commctrl.LVCF_IMAGE),
+        ("iOrder", "i", 0, commctrl.LVCF_ORDER),
+    ]
+
+class DemoWindowBase:
+    def __init__(self):
+        win32gui.InitCommonControls()
+        self.hinst = win32gui.dllhandle
+        self.list_data = {}
+
+    def _RegisterWndClass(self):
+        className = "PythonDocSearch"
+        global g_registeredClass
+        if not g_registeredClass:
+            message_map = {}
+            wc = win32gui.WNDCLASS()
+            wc.SetDialogProc() # Make it a dialog class.
+            wc.hInstance = self.hinst
+            wc.lpszClassName = className
+            wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW
+            wc.hCursor = win32gui.LoadCursor( 0, win32con.IDC_ARROW )
+            wc.hbrBackground = win32con.COLOR_WINDOW + 1
+            wc.lpfnWndProc = message_map # could also specify a wndproc.
+            # C code: wc.cbWndExtra = DLGWINDOWEXTRA + sizeof(HBRUSH) + (sizeof(COLORREF));
+            wc.cbWndExtra = win32con.DLGWINDOWEXTRA + struct.calcsize("Pi")
+            icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE
+            wc.hIcon = win32gui.LoadImage(self.hinst, g_iconPathName, win32con.IMAGE_ICON, 0, 0, icon_flags)
+            classAtom = win32gui.RegisterClass(wc)
+            g_registeredClass = 1
+        return className
+
+    def _GetDialogTemplate(self, dlgClassName):
+        style = win32con.WS_THICKFRAME | win32con.WS_POPUP | win32con.WS_VISIBLE | win32con.WS_CAPTION | win32con.WS_SYSMENU | win32con.DS_SETFONT | win32con.WS_MINIMIZEBOX
+        cs = win32con.WS_CHILD | win32con.WS_VISIBLE
+        title = "Dynamic Dialog Demo"
+
+        # Window frame and title
+        dlg = [ [title, (0, 0, 210, 250), style, None, (8, "MS Sans Serif"), None, dlgClassName], ]
+
+        # ID label and text box
+        dlg.append([130, "Enter something", -1, (5, 5, 200, 9), cs | win32con.SS_LEFT])
+        s = cs | win32con.WS_TABSTOP | win32con.WS_BORDER
+        dlg.append(['EDIT', None, IDC_SEARCHTEXT, (5, 15, 200, 12), s])
+
+        # Search/Display Buttons
+        # (x positions don't matter here)
+        s = cs | win32con.WS_TABSTOP
+        dlg.append([128, "Fill List", IDC_BUTTON_SEARCH, (5, 35, 50, 14), s | win32con.BS_DEFPUSHBUTTON])
+        s = win32con.BS_PUSHBUTTON | s
+        dlg.append([128, "Display", IDC_BUTTON_DISPLAY, (100, 35, 50, 14), s])
+
+        # List control.
+        # Can't make this work :(
+##        s = cs | win32con.WS_TABSTOP
+##        dlg.append(['SysListView32', "Title", IDC_LISTBOX, (5, 505, 200, 200), s])
+
+        return dlg
+
+    def _DoCreate(self, fn):
+        message_map = {
+            win32con.WM_SIZE: self.OnSize,
+            win32con.WM_COMMAND: self.OnCommand,
+            win32con.WM_NOTIFY: self.OnNotify,
+            win32con.WM_INITDIALOG: self.OnInitDialog,
+            win32con.WM_CLOSE: self.OnClose,
+            win32con.WM_DESTROY: self.OnDestroy,
+            WM_SEARCH_RESULT: self.OnSearchResult,
+            WM_SEARCH_FINISHED: self.OnSearchFinished,
+        }
+        dlgClassName = self._RegisterWndClass()
+        template = self._GetDialogTemplate(dlgClassName)
+        return fn(self.hinst, template, 0, message_map)
+
+    def _SetupList(self):
+        child_style = win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_BORDER | win32con.WS_HSCROLL | win32con.WS_VSCROLL
+        child_style |= commctrl.LVS_SINGLESEL | commctrl.LVS_SHOWSELALWAYS | commctrl.LVS_REPORT
+        self.hwndList = win32gui.CreateWindow("SysListView32", None, child_style, 0, 0, 100, 100, self.hwnd, IDC_LISTBOX, self.hinst, None)
+
+        child_ex_style = win32gui.SendMessage(self.hwndList, commctrl.LVM_GETEXTENDEDLISTVIEWSTYLE, 0, 0)
+        child_ex_style |= commctrl.LVS_EX_FULLROWSELECT
+        win32gui.SendMessage(self.hwndList, commctrl.LVM_SETEXTENDEDLISTVIEWSTYLE, 0, child_ex_style)
+
+        # Add an image list - use the builtin shell folder icon - this
+        # demonstrates the problem with alpha-blending of icons on XP if
+        # winxpgui is not used in place of win32gui.
+        il = win32gui.ImageList_Create(
+                    win32api.GetSystemMetrics(win32con.SM_CXSMICON),
+                    win32api.GetSystemMetrics(win32con.SM_CYSMICON),
+                    commctrl.ILC_COLOR32 | commctrl.ILC_MASK,
+                    1, # initial size
+                    0) # cGrow
+
+        shell_dll = os.path.join(win32api.GetSystemDirectory(), "shell32.dll")
+        large, small = win32gui.ExtractIconEx(shell_dll, 4, 1)
+        win32gui.ImageList_ReplaceIcon(il, -1, small[0])
+        win32gui.DestroyIcon(small[0])
+        win32gui.DestroyIcon(large[0])
+        win32gui.SendMessage(self.hwndList, commctrl.LVM_SETIMAGELIST,
+                             commctrl.LVSIL_SMALL, il)
+
+        # Setup the list control columns.
+        lvc = LVCOLUMN(mask = commctrl.LVCF_FMT | commctrl.LVCF_WIDTH | commctrl.LVCF_TEXT | commctrl.LVCF_SUBITEM)
+        lvc.fmt = commctrl.LVCFMT_LEFT
+        lvc.iSubItem = 1
+        lvc.text = "Title"
+        lvc.cx = 200
+        win32gui.SendMessage(self.hwndList, commctrl.LVM_INSERTCOLUMN, 0, lvc.toparam())
+        lvc.iSubItem = 0
+        lvc.text = "Order"
+        lvc.cx = 50
+        win32gui.SendMessage(self.hwndList, commctrl.LVM_INSERTCOLUMN, 0, lvc.toparam())
+
+        win32gui.UpdateWindow(self.hwnd)
+
+    def ClearListItems(self):
+        win32gui.SendMessage(self.hwndList, commctrl.LVM_DELETEALLITEMS)
+        self.list_data = {}
+
+    def AddListItem(self, data, *columns):
+        num_items = win32gui.SendMessage(self.hwndList, commctrl.LVM_GETITEMCOUNT)
+        item = LVITEM(text=columns[0], iItem = num_items)
+        new_index = win32gui.SendMessage(self.hwndList, commctrl.LVM_INSERTITEM, 0, item.toparam())
+        col_no = 1
+        for col in columns[1:]:
+            item = LVITEM(text=col, iItem = new_index, iSubItem = col_no)
+            win32gui.SendMessage(self.hwndList, commctrl.LVM_SETITEM, 0, item.toparam())
+            col_no += 1
+        self.list_data[new_index] = data
+
+    def OnInitDialog(self, hwnd, msg, wparam, lparam):
+        self.hwnd = hwnd
+        # centre the dialog
+        desktop = win32gui.GetDesktopWindow()
+        l,t,r,b = win32gui.GetWindowRect(self.hwnd)
+        dt_l, dt_t, dt_r, dt_b = win32gui.GetWindowRect(desktop)
+        centre_x, centre_y = win32gui.ClientToScreen( desktop, ( (dt_r-dt_l)/2, (dt_b-dt_t)/2) )
+        win32gui.MoveWindow(hwnd, centre_x-(r/2), centre_y-(b/2), r-l, b-t, 0)
+        self._SetupList()
+        l,t,r,b = win32gui.GetClientRect(self.hwnd)
+        self._DoSize(r-l,b-t, 1)
+
+    def _DoSize(self, cx, cy, repaint = 1):
+        # right-justify the textbox.
+        ctrl = win32gui.GetDlgItem(self.hwnd, IDC_SEARCHTEXT)
+        l, t, r, b = win32gui.GetWindowRect(ctrl)
+        l, t = win32gui.ScreenToClient(self.hwnd, (l,t) )
+        r, b = win32gui.ScreenToClient(self.hwnd, (r,b) )
+        win32gui.MoveWindow(ctrl, l, t, cx-l-5, b-t, repaint)
+        # The button.
+        ctrl = win32gui.GetDlgItem(self.hwnd, IDC_BUTTON_DISPLAY)
+        l, t, r, b = win32gui.GetWindowRect(ctrl)
+        l, t = win32gui.ScreenToClient(self.hwnd, (l,t) )
+        r, b = win32gui.ScreenToClient(self.hwnd, (r,b) )
+        list_y = b + 10
+        w = r - l
+        win32gui.MoveWindow(ctrl, cx - 5 - w, t, w, b-t, repaint)
+
+        # The list control
+        win32gui.MoveWindow(self.hwndList, 0, list_y, cx, cy-list_y, repaint)
+        # The last column of the list control.
+        new_width = cx - win32gui.SendMessage(self.hwndList, commctrl.LVM_GETCOLUMNWIDTH, 0)
+        win32gui.SendMessage(self.hwndList, commctrl.LVM_SETCOLUMNWIDTH, 1, new_width)
+
+    def OnSize(self, hwnd, msg, wparam, lparam):
+        x = win32api.LOWORD(lparam)
+        y = win32api.HIWORD(lparam)
+        self._DoSize(x,y)
+        return 1
+
+    def OnSearchResult(self, hwnd, msg, wparam, lparam):
+        try:
+            while 1:
+                params = self.result_queue.get(0)
+                apply(self.AddListItem, params)
+        except Queue.Empty:
+            pass
+
+    def OnSearchFinished(self, hwnd, msg, wparam, lparam):
+        print "OnSearchFinished"
+
+    def OnNotify(self, hwnd, msg, wparam, lparam):
+        format = "iiiiiiiiiii"
+        buf = win32gui.PyMakeBuffer(struct.calcsize(format), lparam)
+        hwndFrom, idFrom, code, iItem, iSubItem, uNewState, uOldState, uChanged, actionx, actiony, lParam \
+                  = struct.unpack(format, buf)
+        # *sigh* - work around a problem with old commctrl modules, which had a
+        # bad value for PY_OU, which therefore cause most "control notification"
+        # messages to be wrong.
+        # Code that needs to work with both pre and post pywin32-204 must do
+        # this too.
+        code += commctrl.PY_0U
+        if code == commctrl.NM_DBLCLK:
+            print "Double click on item", iItem+1
+        return 1
+
+    def OnCommand(self, hwnd, msg, wparam, lparam):
+        id = win32api.LOWORD(wparam)
+        if id == IDC_BUTTON_SEARCH:
+            self.ClearListItems()
+            def fill_slowly(q, hwnd):
+                import time
+                for i in range(20):
+                    q.put(("whatever", str(i+1), "Search result " + str(i) ))
+                    win32gui.PostMessage(hwnd, WM_SEARCH_RESULT, 0, 0)
+                    time.sleep(.25)
+                win32gui.PostMessage(hwnd, WM_SEARCH_FINISHED, 0, 0)
+
+            import threading
+            self.result_queue = Queue.Queue()
+            thread = threading.Thread(target = fill_slowly, args=(self.result_queue, self.hwnd) )
+            thread.start()
+        elif id == IDC_BUTTON_DISPLAY:
+            print "Display button selected"
+            sel = win32gui.SendMessage(self.hwndList, commctrl.LVM_GETNEXTITEM, -1, commctrl.LVNI_SELECTED)
+            print "The selected item is", sel+1
+
+    # These function differ based on how the window is used, so may be overridden
+    def OnClose(self, hwnd, msg, wparam, lparam):
+        raise NotImplementedError
+
+    def OnDestroy(self, hwnd, msg, wparam, lparam):
+        pass
+
+# An implementation suitable for use with the Win32 Window functions (ie, not
+# a true dialog)
+class DemoWindow(DemoWindowBase):
+    def CreateWindow(self):
+        # Create the window via CreateDialogBoxIndirect - it can then
+        # work as a "normal" window, once a message loop is established.
+        self._DoCreate(win32gui.CreateDialogIndirect)
+
+    def OnClose(self, hwnd, msg, wparam, lparam):
+        win32gui.DestroyWindow(hwnd)
+
+    # We need to arrange to a WM_QUIT message to be sent to our
+    # PumpMessages() loop.
+    def OnDestroy(self, hwnd, msg, wparam, lparam):
+        win32gui.PostQuitMessage(0) # Terminate the app.
+
+# An implementation suitable for use with the Win32 Dialog functions.
+class DemoDialog(DemoWindowBase):
+    def DoModal(self):
+        return self._DoCreate(win32gui.DialogBoxIndirect)
+
+    def OnClose(self, hwnd, msg, wparam, lparam):
+        win32gui.EndDialog(hwnd, 0)
+
+def DemoModal():
+    w=DemoDialog()
+    w.DoModal()
+
+def DemoCreateWindow():
+    w=DemoWindow()
+    w.CreateWindow()
+    # PumpMessages runs until PostQuitMessage() is called by someone.
+    win32gui.PumpMessages()
+    
+if __name__=='__main__':
+    DemoModal()
+    DemoCreateWindow()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_menu.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_menu.py
new file mode 100644
index 0000000..23a55297
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_menu.py
@@ -0,0 +1,215 @@
+# Demonstrates some advanced menu concepts using win32gui.
+# This creates a taskbar icon which has some fancy menus (but note that
+# selecting the menu items does nothing useful - see win32gui_taskbar.py
+# for examples of this.
+
+# NOTE: This is a work in progress.  Todo:
+# * The "Checked" menu items don't work correctly - I'm not sure why.
+# * No support for GetMenuItemInfo.
+
+# Based on Andy McKay's demo code.
+from win32api import *
+# Try and use XP features, so we get alpha-blending etc.
+try:
+    from winxpgui import *
+except ImportError:
+    from win32gui import *
+from win32gui_struct import *
+import win32con
+import sys, os
+import struct
+import array
+
+this_dir = os.path.split(sys.argv[0])[0]
+
+class MainWindow:
+    def __init__(self):
+        message_map = {
+                win32con.WM_DESTROY: self.OnDestroy,
+                win32con.WM_COMMAND: self.OnCommand,
+                win32con.WM_USER+20 : self.OnTaskbarNotify,
+        }
+        # Register the Window class.
+        wc = WNDCLASS()
+        hinst = wc.hInstance = GetModuleHandle(None)
+        wc.lpszClassName = "PythonTaskbarDemo"
+        wc.lpfnWndProc = message_map # could also specify a wndproc.
+        classAtom = RegisterClass(wc)
+        # Create the Window.
+        style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU
+        self.hwnd = CreateWindow( classAtom, "Taskbar Demo", style, \
+                0, 0, win32con.CW_USEDEFAULT, win32con.CW_USEDEFAULT, \
+                0, 0, hinst, None)
+        UpdateWindow(self.hwnd)
+        iconPathName = os.path.abspath(os.path.join( sys.prefix, "pyc.ico" ))
+        if not os.path.isfile(iconPathName):
+            # Look in the source tree.
+            iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "..\\PC\\pyc.ico" ))
+        if os.path.isfile(iconPathName):
+            icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE
+            hicon = LoadImage(hinst, iconPathName, win32con.IMAGE_ICON, 0, 0, icon_flags)
+        else:
+            iconPathName = None
+            print "Can't find a Python icon file - using default"
+            hicon = LoadIcon(0, win32con.IDI_APPLICATION)
+        self.iconPathName = iconPathName
+        
+        self.createMenu()
+
+        flags = NIF_ICON | NIF_MESSAGE | NIF_TIP
+        nid = (self.hwnd, 0, flags, win32con.WM_USER+20, hicon, "Python Demo")
+        Shell_NotifyIcon(NIM_ADD, nid)
+        print "Please right-click on the Python icon in the taskbar"
+
+    def createMenu(self):
+        self.hmenu = menu = CreatePopupMenu()
+        # Create our 'Exit' item with the standard, ugly 'close' icon.
+        item, extras = PackMENUITEMINFO(text = "Exit",
+                                        hbmpItem=win32con.HBMMENU_MBAR_CLOSE,
+                                        wID=1000)
+        InsertMenuItem(menu, 0, 1, item)
+        # Create a 'text only' menu via InsertMenuItem rather then
+        # AppendMenu, just to prove we can!
+        item, extras = PackMENUITEMINFO(text = "Text only item",
+                                        wID=1001)
+        InsertMenuItem(menu, 0, 1, item)
+
+        load_bmp_flags=win32con.LR_LOADFROMFILE | \
+                       win32con.LR_LOADTRANSPARENT
+        # These images are "over sized", so we load them scaled.
+        hbmp = LoadImage(0, os.path.join(this_dir, "images/smiley.bmp"),
+                         win32con.IMAGE_BITMAP, 20, 20, load_bmp_flags)
+
+        # Create a top-level menu with a bitmap
+        item, extras = PackMENUITEMINFO(text="Menu with bitmap",
+                                        hbmpItem=hbmp,
+                                        wID=1002)
+        InsertMenuItem(menu, 0, 1, item)
+        
+        # Create one with an icon - this is a fair bit more work, as we need
+        # to convert the icon to a bitmap.
+        # First load the icon.
+        ico_x = GetSystemMetrics(win32con.SM_CXSMICON)
+        ico_y = GetSystemMetrics(win32con.SM_CYSMICON)
+        if self.iconPathName:
+            hicon = LoadImage(0, self.iconPathName, win32con.IMAGE_ICON, ico_x, ico_y, win32con.LR_LOADFROMFILE)
+        else:
+            shell_dll = os.path.join(GetSystemDirectory(), "shell32.dll")
+            large, small = win32gui.ExtractIconEx(shell_dll, 4, 1)
+            hicon = small[0]
+            DestroyIcon(large[0])
+
+        hdcBitmap = CreateCompatibleDC(0)
+        hdcScreen = GetDC(0)
+        hbm = CreateCompatibleBitmap(hdcScreen, ico_x, ico_y)
+        hbmOld = SelectObject(hdcBitmap, hbm)
+        # Fill the background.
+        brush = GetSysColorBrush(win32con.COLOR_MENU)
+        FillRect(hdcBitmap, (0, 0, 16, 16), brush)
+        # unclear if brush needs to be feed.  Best clue I can find is:
+        # "GetSysColorBrush returns a cached brush instead of allocating a new
+        # one." - implies no DeleteObject
+        # draw the icon
+        DrawIconEx(hdcBitmap, 0, 0, hicon, ico_x, ico_y, 0, 0, win32con.DI_NORMAL)
+        SelectObject(hdcBitmap, hbmOld)
+        DeleteDC(hdcBitmap)
+
+        item, extras = PackMENUITEMINFO(text="Menu with icon",
+                                        hbmpItem=hbm,
+                                        wID=1009)
+        InsertMenuItem(menu, 0, 1, item)
+
+        # Create a sub-menu, and put a few funky ones there.
+        self.sub_menu = sub_menu = CreatePopupMenu()
+        # A 'checkbox' menu.
+        item, extras = PackMENUITEMINFO(fState=win32con.MFS_CHECKED,
+                                        text="Checkbox menu",
+                                        hbmpItem=hbmp,
+                                        wID=1003)
+        InsertMenuItem(sub_menu, 0, 1, item)
+        # A 'radio' menu.
+        InsertMenu(sub_menu, 0, win32con.MF_BYPOSITION, win32con.MF_SEPARATOR, None)
+        item, extras = PackMENUITEMINFO(fType=win32con.MFT_RADIOCHECK,
+                                        fState=win32con.MFS_CHECKED,
+                                        text="Checkbox menu - bullet 1",
+                                        hbmpItem=hbmp,
+                                        wID=1004)
+        InsertMenuItem(sub_menu, 0, 1, item)
+        item, extras = PackMENUITEMINFO(fType=win32con.MFT_RADIOCHECK,
+                                        fState=win32con.MFS_UNCHECKED,
+                                        text="Checkbox menu - bullet 2",
+                                        hbmpItem=hbmp,
+                                        wID=1005)
+        InsertMenuItem(sub_menu, 0, 1, item)
+        # And add the sub-menu to the top-level menu.
+        item, extras = PackMENUITEMINFO(text="Sub-Menu",
+                                        hSubMenu=sub_menu)
+        InsertMenuItem(menu, 0, 1, item)
+
+        # Set 'Exit' as the default option.
+        SetMenuDefaultItem(menu, 1000, 0)
+        
+    def OnDestroy(self, hwnd, msg, wparam, lparam):
+        nid = (self.hwnd, 0)
+        Shell_NotifyIcon(NIM_DELETE, nid)
+        PostQuitMessage(0) # Terminate the app.
+
+    def OnTaskbarNotify(self, hwnd, msg, wparam, lparam):
+        if lparam==win32con.WM_RBUTTONUP:
+            print "You right clicked me."
+            # display the menu at the cursor pos.
+            pos = GetCursorPos()
+            SetForegroundWindow(self.hwnd)
+            TrackPopupMenu(self.hmenu, win32con.TPM_LEFTALIGN, pos[0], pos[1], 0, self.hwnd, None)
+            PostMessage(self.hwnd, win32con.WM_NULL, 0, 0)
+        elif lparam==win32con.WM_LBUTTONDBLCLK:
+            print "You double-clicked me"
+            # find the default menu item and fire it.
+            cmd = GetMenuDefaultItem(self.hmenu, False, 0)
+            if cmd == -1:
+                print "Can't find a default!"
+            # and just pretend it came from the menu
+            self.OnCommand(hwnd, win32con.WM_COMMAND, cmd, 0)
+        return 1
+
+    def OnCommand(self, hwnd, msg, wparam, lparam):
+        id = LOWORD(wparam)
+        print "OnCommand for control ID", id
+    def OnCommand(self, hwnd, msg, wparam, lparam):
+        id = LOWORD(wparam)
+        if id == 1000:
+            print "Goodbye"
+            DestroyWindow(self.hwnd)
+        elif id in (1003, 1004, 1005):
+            # Our 'checkbox' and 'radio' items
+            state = GetMenuState(self.sub_menu, id, win32con.MF_BYCOMMAND)
+            if state==-1:
+                raise RuntimeError, "No item found"
+            if state & win32con.MF_CHECKED:
+                check_flags = win32con.MF_UNCHECKED
+                print "Menu was checked - unchecking"
+            else:
+                check_flags = win32con.MF_CHECKED
+                print "Menu was unchecked - checking"
+
+            if id == 1003:
+                # simple checkbox
+                rc = CheckMenuItem(self.sub_menu, id,
+                                   win32con.MF_BYCOMMAND | check_flags)
+            else:
+                # radio button - must pass the first and last IDs in the
+                # "group", and the ID in the group that is to be selected.
+                rc = CheckMenuRadioItem(self.sub_menu, 1004, 1005, id,
+                                        win32con.MF_BYCOMMAND)
+            new_state = GetMenuState(self.sub_menu, id, win32con.MF_BYCOMMAND)
+            if  new_state & win32con.MF_CHECKED != check_flags:
+                raise RuntimeError, "The new item didn't get the new checked state!"
+        else:
+            print "OnCommand for ID", id
+
+def main():
+    w=MainWindow()
+    PumpMessages()
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_taskbar.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_taskbar.py
new file mode 100644
index 0000000..344b5f7c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32gui_taskbar.py
@@ -0,0 +1,103 @@
+# Creates a task-bar icon.  Run from Python.exe to see the
+# messages printed.
+from win32api import *
+from win32gui import *
+import win32con
+import sys, os
+
+class MainWindow:
+    def __init__(self):
+        msg_TaskbarRestart = RegisterWindowMessage("TaskbarCreated");
+        message_map = {
+                msg_TaskbarRestart: self.OnRestart,
+                win32con.WM_DESTROY: self.OnDestroy,
+                win32con.WM_COMMAND: self.OnCommand,
+                win32con.WM_USER+20 : self.OnTaskbarNotify,
+        }
+        # Register the Window class.
+        wc = WNDCLASS()
+        hinst = wc.hInstance = GetModuleHandle(None)
+        wc.lpszClassName = "PythonTaskbarDemo"
+        wc.style = win32con.CS_VREDRAW | win32con.CS_HREDRAW;
+        wc.hCursor = LoadCursor( 0, win32con.IDC_ARROW )
+        wc.hbrBackground = win32con.COLOR_WINDOW
+        wc.lpfnWndProc = message_map # could also specify a wndproc.
+        classAtom = RegisterClass(wc)
+        # Create the Window.
+        style = win32con.WS_OVERLAPPED | win32con.WS_SYSMENU
+        self.hwnd = CreateWindow( classAtom, "Taskbar Demo", style, \
+                0, 0, win32con.CW_USEDEFAULT, win32con.CW_USEDEFAULT, \
+                0, 0, hinst, None)
+        UpdateWindow(self.hwnd)
+        self._DoCreateIcons()
+    def _DoCreateIcons(self):
+        # Try and find a custom icon
+        hinst =  GetModuleHandle(None)
+        iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "pyc.ico" ))
+        if not os.path.isfile(iconPathName):
+            # Look in the source tree.
+            iconPathName = os.path.abspath(os.path.join( os.path.split(sys.executable)[0], "..\\PC\\pyc.ico" ))
+        if os.path.isfile(iconPathName):
+            icon_flags = win32con.LR_LOADFROMFILE | win32con.LR_DEFAULTSIZE
+            hicon = LoadImage(hinst, iconPathName, win32con.IMAGE_ICON, 0, 0, icon_flags)
+        else:
+            print "Can't find a Python icon file - using default"
+            hicon = LoadIcon(0, win32con.IDI_APPLICATION)
+
+        flags = NIF_ICON | NIF_MESSAGE | NIF_TIP
+        nid = (self.hwnd, 0, flags, win32con.WM_USER+20, hicon, "Python Demo")
+        try:
+            Shell_NotifyIcon(NIM_ADD, nid)
+        except error:
+            # This is common when windows is starting, and this code is hit
+            # before the taskbar has been created.
+            print "Failed to add the taskbar icon - is explorer running?"
+            # but keep running anyway - when explorer starts, we get the
+            # TaskbarCreated message.
+
+    def OnRestart(self, hwnd, msg, wparam, lparam):
+        self._DoCreateIcons()
+
+    def OnDestroy(self, hwnd, msg, wparam, lparam):
+        nid = (self.hwnd, 0)
+        Shell_NotifyIcon(NIM_DELETE, nid)
+        PostQuitMessage(0) # Terminate the app.
+
+    def OnTaskbarNotify(self, hwnd, msg, wparam, lparam):
+        if lparam==win32con.WM_LBUTTONUP:
+            print "You clicked me."
+        elif lparam==win32con.WM_LBUTTONDBLCLK:
+            print "You double-clicked me - goodbye"
+            DestroyWindow(self.hwnd)
+        elif lparam==win32con.WM_RBUTTONUP:
+            print "You right clicked me."
+            menu = CreatePopupMenu()
+            AppendMenu( menu, win32con.MF_STRING, 1023, "Display Dialog")
+            AppendMenu( menu, win32con.MF_STRING, 1024, "Say Hello")
+            AppendMenu( menu, win32con.MF_STRING, 1025, "Exit program" )
+            pos = GetCursorPos()
+            # See http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/menus_0hdi.asp
+            SetForegroundWindow(self.hwnd)
+            TrackPopupMenu(menu, win32con.TPM_LEFTALIGN, pos[0], pos[1], 0, self.hwnd, None)
+            PostMessage(self.hwnd, win32con.WM_NULL, 0, 0)
+        return 1
+
+    def OnCommand(self, hwnd, msg, wparam, lparam):
+        id = LOWORD(wparam)
+        if id == 1023:
+            import win32gui_dialog
+            win32gui_dialog.DemoModal()
+        elif id == 1024:
+            print "Hello"
+        elif id == 1025:
+            print "Goodbye"
+            DestroyWindow(self.hwnd)
+        else:
+            print "Unknown command -", id
+
+def main():
+    w=MainWindow()
+    PumpMessages()
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32netdemo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32netdemo.py
new file mode 100644
index 0000000..de10830
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32netdemo.py
@@ -0,0 +1,231 @@
+import sys
+import win32api
+import win32net
+import win32netcon
+import win32security
+import getopt
+import traceback
+
+verbose_level = 0
+
+server = None # Run on local machine.
+
+def verbose(msg):
+    if verbose_level:
+        print msg
+
+def CreateUser():
+    "Creates a new test user, then deletes the user"
+    testName = "PyNetTestUser"
+    try:
+        win32net.NetUserDel(server, testName)
+        print "Warning - deleted user before creating it!"
+    except win32net.error:
+        pass
+
+    d = {}
+    d['name'] = testName
+    d['password'] = 'deleteme'
+    d['priv'] = win32netcon.USER_PRIV_USER
+    d['comment'] = "Delete me - created by Python test code"
+    d['flags'] = win32netcon.UF_NORMAL_ACCOUNT | win32netcon.UF_SCRIPT
+    win32net.NetUserAdd(server, 1, d)
+    try:
+        try:
+            win32net.NetUserChangePassword(server, testName, "wrong", "new")
+            print "ERROR: NetUserChangePassword worked with a wrong password!"
+        except win32net.error:
+            pass
+        win32net.NetUserChangePassword(server, testName, "deleteme", "new")
+    finally:
+        win32net.NetUserDel(server, testName)
+    print "Created a user, changed their password, and deleted them!"
+
+def UserEnum():
+    "Enumerates all the local users"
+    resume = 0
+    nuser = 0
+    while 1:
+        data, total, resume = win32net.NetUserEnum(server, 3, win32netcon.FILTER_NORMAL_ACCOUNT, resume)
+        verbose("Call to NetUserEnum obtained %d entries of %d total" % (len(data), total))
+        for user in data:
+            verbose("Found user %s" % user['name'])
+            nuser = nuser + 1
+        if not resume:
+            break
+    assert nuser, "Could not find any users!"
+    print "Enumerated all the local users"
+
+def GroupEnum():
+    "Enumerates all the domain groups"
+    nmembers = 0
+    resume = 0
+    while 1:
+        data, total, resume = win32net.NetGroupEnum(server, 1, resume)
+#               print "Call to NetGroupEnum obtained %d entries of %d total" % (len(data), total)
+        for group in data:
+            verbose("Found group %(name)s:%(comment)s " % group)
+            memberresume = 0
+            while 1:
+                memberdata, total, memberresume = win32net.NetGroupGetUsers(server, group['name'], 0, resume)
+                for member in memberdata:
+                    verbose(" Member %(name)s" % member)
+                    nmembers = nmembers + 1
+                if memberresume==0:
+                    break
+        if not resume:
+            break
+    assert nmembers, "Couldnt find a single member in a single group!"
+    print "Enumerated all the groups"
+
+def LocalGroupEnum():
+    "Enumerates all the local groups"
+    resume = 0
+    nmembers = 0
+    while 1:
+        data, total, resume = win32net.NetLocalGroupEnum(server, 1, resume)
+        for group in data:
+            verbose("Found group %(name)s:%(comment)s " % group)
+            memberresume = 0
+            while 1:
+                memberdata, total, memberresume = win32net.NetLocalGroupGetMembers(server, group['name'], 2, resume)
+                for member in memberdata:
+                    # Just for the sake of it, we convert the SID to a username
+                    username, domain, type = win32security.LookupAccountSid(server, member['sid'])
+                    nmembers = nmembers + 1
+                    verbose(" Member %s (%s)" % (username, member['domainandname']))
+                if memberresume==0:
+                    break
+        if not resume:
+            break
+    assert nmembers, "Couldnt find a single member in a single group!"
+    print "Enumerated all the local groups"
+
+def ServerEnum():
+    "Enumerates all servers on the network"
+    resume = 0
+    while 1:
+        data, total, resume = win32net.NetServerEnum(server, 100, win32netcon.SV_TYPE_ALL, None, resume)
+        for s in data:
+            verbose("Found server %s" % s['name'])
+            # Now loop over the shares.
+            shareresume=0
+            while 1:
+                sharedata, total, shareresume = win32net.NetShareEnum(server, 2, shareresume)
+                for share in sharedata:
+                    verbose(" %(netname)s (%(path)s):%(remark)s - in use by %(current_uses)d users" % share)
+                if not shareresume:
+                    break
+        if not resume:
+            break
+    print "Enumerated all the servers on the network"
+
+def LocalGroup(uname=None):
+    "Creates a local group, adds some members, deletes them, then removes the group"
+    level = 3
+    if uname is None: uname=win32api.GetUserName()
+    if uname.find("\\")<0:
+        uname = win32api.GetDomainName() + "\\" + uname
+    group = 'python_test_group'
+    # delete the group if it already exists
+    try:
+        win32net.NetLocalGroupDel(server, group)
+        print "WARNING: existing local group '%s' has been deleted."
+    except win32net.error:
+        pass
+    group_data = {'name': group}
+    win32net.NetLocalGroupAdd(server, 1, group_data)
+    try:
+        u={'domainandname': uname}
+        win32net.NetLocalGroupAddMembers(server, group, level, [u])
+        mem, tot, res = win32net.NetLocalGroupGetMembers(server, group, level)
+        print "members are", mem
+        if mem[0]['domainandname'] != uname:
+            print "ERROR: LocalGroup just added %s, but members are %r" % (uname, mem)
+        # Convert the list of dicts to a list of strings.
+        win32net.NetLocalGroupDelMembers(server, group, [m['domainandname'] for m in mem])
+    finally:
+        win32net.NetLocalGroupDel(server, group)
+    print "Created a local group, added and removed members, then deleted the group"
+
+def GetInfo(userName=None):
+    "Dumps level 3 information about the current user"
+    if userName is None: userName=win32api.GetUserName()
+    print "Dumping level 3 information about user"
+    info = win32net.NetUserGetInfo(server, userName, 3)
+    for key, val in info.items():
+        verbose("%s=%s" % (key,val))
+
+def SetInfo(userName=None):
+    "Attempts to change the current users comment, then set it back"
+    if userName is None: userName=win32api.GetUserName()
+    oldData = win32net.NetUserGetInfo(server, userName, 3)
+    try:
+        d = oldData.copy()
+        d["usr_comment"] = "Test comment"
+        win32net.NetUserSetInfo(server, userName, 3, d)
+        new = win32net.NetUserGetInfo(server, userName, 3)['usr_comment']
+        if  str(new) != "Test comment":
+            raise RuntimeError, "Could not read the same comment back - got %s" % new
+        print "Changed the data for the user"
+    finally:
+        win32net.NetUserSetInfo(server, userName, 3, oldData)
+
+def SetComputerInfo():
+    "Doesnt actually change anything, just make sure we could ;-)"
+    info = win32net.NetWkstaGetInfo(None, 502)
+    # *sob* - but we can't!  Why not!!!
+    # win32net.NetWkstaSetInfo(None, 502, info)
+
+def usage(tests):
+    import os
+    print "Usage: %s [-s server ] [-v] [Test ...]" % os.path.basename(sys.argv[0])
+    print "  -v : Verbose - print more information"
+    print "  -s : server - execute the tests against the named server"
+    print "where Test is one of:"
+    for t in tests:
+        print t.__name__,":", t.__doc__
+    print
+    print "If not tests are specified, all tests are run"
+    sys.exit(1)
+
+def main():
+    tests = []
+    for ob in globals().values():
+        if type(ob)==type(main) and ob.__doc__:
+            tests.append(ob)
+    opts, args = getopt.getopt(sys.argv[1:], "s:hv")
+    for opt, val in opts:
+        if opt=="-s":
+            global server
+            server = val
+        if opt=="-h":
+            usage(tests)
+        if opt=="-v":
+            global verbose_level
+            verbose_level = verbose_level + 1
+
+    if len(args)==0:
+        print "Running all tests - use '-h' to see command-line options..."
+        dotests = tests
+    else:
+        dotests = []
+        for arg in args:
+            for t in tests:
+                if t.__name__==arg:
+                    dotests.append(t)
+                    break
+            else:
+                print "Test '%s' unknown - skipping" % arg
+    if not len(dotests):
+        print "Nothing to do!"
+        usage(tests)
+    for test in dotests:
+        try:
+            test()
+        except:
+            print "Test %s failed" % test.__name__
+            traceback.print_exc()
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32rcparser_demo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32rcparser_demo.py
new file mode 100644
index 0000000..6898a7a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32rcparser_demo.py
@@ -0,0 +1,83 @@
+# A demo of the win32rcparser module and using win32gui
+
+import win32gui
+import win32api
+import win32con
+import win32rcparser
+import commctrl
+import sys, os
+
+# We use the .rc file in our 'test' directory.
+try:
+    __file__
+except NameError: # pre 2.3
+    __file__ = sys.argv[0]
+
+this_dir = os.path.abspath(os.path.dirname(__file__))
+g_rcname = os.path.abspath(
+        os.path.join( this_dir, "..", "test", "win32rcparser", "test.rc"))
+
+if not os.path.isfile(g_rcname):
+    raise RuntimeError, "Can't locate test.rc (should be at '%s')" % (g_rcname,)
+
+class DemoWindow:
+    def __init__(self, dlg_template):
+        self.dlg_template = dlg_template
+
+    def CreateWindow(self):
+        self._DoCreate(win32gui.CreateDialogIndirect)
+
+    def DoModal(self):
+        return self._DoCreate(win32gui.DialogBoxIndirect)
+
+    def _DoCreate(self, fn):
+        message_map = {
+            win32con.WM_INITDIALOG: self.OnInitDialog,
+            win32con.WM_CLOSE: self.OnClose,
+            win32con.WM_DESTROY: self.OnDestroy,
+            win32con.WM_COMMAND: self.OnCommand,
+        }
+        return fn(0, self.dlg_template, 0, message_map)
+
+    def OnInitDialog(self, hwnd, msg, wparam, lparam):
+        self.hwnd = hwnd
+        # centre the dialog
+        desktop = win32gui.GetDesktopWindow()
+        l,t,r,b = win32gui.GetWindowRect(self.hwnd)
+        dt_l, dt_t, dt_r, dt_b = win32gui.GetWindowRect(desktop)
+        centre_x, centre_y = win32gui.ClientToScreen( desktop, ( (dt_r-dt_l)/2, (dt_b-dt_t)/2) )
+        win32gui.MoveWindow(hwnd, centre_x-(r/2), centre_y-(b/2), r-l, b-t, 0)
+
+    def OnCommand(self, hwnd, msg, wparam, lparam):
+        # Needed to make OK/Cancel work - no other controls are handled.
+        id = win32api.LOWORD(wparam)
+        if id in [win32con.IDOK, win32con.IDCANCEL]:
+            win32gui.EndDialog(hwnd, id)
+
+    def OnClose(self, hwnd, msg, wparam, lparam):
+        win32gui.EndDialog(hwnd, 0)
+
+    def OnDestroy(self, hwnd, msg, wparam, lparam):
+        pass
+
+def DemoModal():
+    # Load the .rc file.
+    resources = win32rcparser.Parse(g_rcname)
+    for id, ddef in resources.dialogs.items():
+        print "Displaying dialog", id
+        w=DemoWindow(ddef)
+        w.DoModal()
+
+if __name__=='__main__':
+    flags = 0
+    for flag in """ICC_DATE_CLASSES ICC_ANIMATE_CLASS ICC_ANIMATE_CLASS 
+                   ICC_BAR_CLASSES ICC_COOL_CLASSES ICC_DATE_CLASSES
+                   ICC_HOTKEY_CLASS ICC_INTERNET_CLASSES ICC_LISTVIEW_CLASSES
+                   ICC_PAGESCROLLER_CLASS ICC_PROGRESS_CLASS ICC_TAB_CLASSES
+                   ICC_TREEVIEW_CLASSES ICC_UPDOWN_CLASS ICC_USEREX_CLASSES
+                   ICC_WIN95_CLASSES  """.split():
+        flags |= getattr(commctrl, flag)
+    win32gui.InitCommonControlsEx(flags)
+    # Need to do this go get rich-edit working.
+    win32api.LoadLibrary("riched20.dll")
+    DemoModal()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32servicedemo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32servicedemo.py
new file mode 100644
index 0000000..39fdcc9e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32servicedemo.py
@@ -0,0 +1,23 @@
+import win32service
+import win32con
+
+
+def EnumServices():
+    resume = 0
+    accessSCM = win32con.GENERIC_READ
+    accessSrv = win32service.SC_MANAGER_ALL_ACCESS
+
+    #Open Service Control Manager
+    hscm = win32service.OpenSCManager(None, None, accessSCM)
+
+    #Enumerate Service Control Manager DB
+
+    typeFilter = win32service.SERVICE_WIN32
+    stateFilter = win32service.SERVICE_STATE_ALL
+
+    statuses = win32service.EnumServicesStatus(hscm, typeFilter, stateFilter)
+    for (short_name, desc, status) in statuses:
+        print short_name, desc, status
+
+
+EnumServices()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/netresource.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/netresource.htm
new file mode 100644
index 0000000..79d5702b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/netresource.htm
@@ -0,0 +1,238 @@
+<html>
+
+<head>
+<meta http-equiv="Content-Type"
+content="text/html; charset=iso-8859-2">
+<meta name="GENERATOR" content="Microsoft FrontPage Express 2.0">
+<title>WIN32WNET</title>
+</head>
+
+<body bgcolor="#FFFFFF">
+
+<h1 align="center">WIN32WNET</h1>
+
+<h4 align="left">PyNETRESOURCE</h4>
+
+<p><font size="2">The PyNETRESOURCE object is the Python
+encapsulation of the Win32 NETRESOURCE structure used with
+WNetXXX networking functions and other Win32 APIs. The
+implementation can be compiled for Ascii or Unicode and has been
+tested on Python 1.5.1.</font></p>
+
+<h5>PyNETRESOURCE Object Members:</h5>
+
+<ul>
+    <li><font size="2">NETRESOURCE m_nr -- the actual NETRESOURCE
+        data structure</font></li>
+    <li><font size="2">TCHAR szLName -- local storage for the
+        lpLocalName member of the NETRESOURCE structure.<br
+        clear="right">
+        The names are stored locally and are not stored as native
+        Python strings.String length is 256 TCHARs maximum.</font></li>
+</ul>
+
+<ul>
+    <li><font size="2">TCHAR szRName -- lpRemoteName storage. See
+        szLName.</font></li>
+    <li><font size="2">TCHAR szProv -- lpProvider storage. See
+        szLName.</font></li>
+    <li><font size="2">TCHAR szComment -- lpComment storage. See
+        szLName.</font></li>
+</ul>
+
+<h5><font size="2">Relevant Python Methods:</font></h5>
+
+<ul>
+    <li><font size="2">x = win32wnet.NETRESOURCE() will
+        instantiate a new PyNETRESOURCE Object</font></li>
+    <li><font size="2">x.dwScope, x.dwType, x.dwDisplayType,
+        x.dwUsage -- Integer attributes corresponding to the
+        NETRESOURCE structure members. Attributes are both
+        readable and writeable using standard Python attribute
+        semantics. </font></li>
+    <li><font size="2">X.lpLocalName, x.lpRemoteName,
+        x.lpComment, x.lpProvider -- String pointers in the
+        NETRESOURCE structure. They are wrapped to point to
+        szLName, etc.... These strings are not stored as Python
+        strings and are therefore both readable and writeable.
+        Using standard Python attribute semantics, the attributes
+        return Python strings upon query and take python strings
+        as input.</font></li>
+</ul>
+
+<h5>'C' Programming Interface:</h5>
+
+<ul>
+    <li><font size="2">BOOL PyWinObject_AsNETRESOURCE(PyObject
+        *ob, NETRESOURCE **ppNetresource, BOOL bNoneOK = TRUE);</font></li>
+    <li><font size="2">PyObject
+        *PyWinObject_FromNETRESOURCE(const NETRESOURCE
+        *pNetresource</font></li>
+</ul>
+
+<h4>WIN32WNET Functions</h4>
+
+<p><strong>win32wnet.py</strong> is the Python extension
+providing access to the WNetXXX Win32APIs. This module is fully
+compilable for both Ascii and Unicode and contains conditional,
+if untested as of this writing, compiles for Windows CE. This
+module has been tested with Python 1.5.1 and Windows NT
+workstation 4.0 SP3.</p>
+
+<dl>
+    <dt><strong>WNetAddConnection2</strong> (</dt>
+    <dd><dl>
+            <dd><dl>
+                    <dd><dl>
+                            <dd>INTEGER Type, -
+                                RESOURCETYPE_DISK,
+                                RESOURCETYPE_PRINT, or
+                                RESOURCETYPE_ANY <br>
+                                STRING LocalName, - String or
+                                None<br>
+                                STRING RemoteName, - String
+                                (required to be in network format<br>
+                                STRING ProviderName, - String or
+                                None<br>
+                                STRING Username,<br>
+                                STRING Password</dd>
+                            <dd>)</dd>
+                            <dt>Returns: PyNone</dt>
+                        </dl>
+                    </dd>
+                </dl>
+            </dd>
+            <dd>The <strong>WNetAddConnection2</strong> function
+                makes a connection to a network resource. The
+                function can redirect a local device to the
+                network resource. This function was written
+                before the PyNETRESOURCE object was created and
+                thus does not use PyNETRESOURCE in this release
+                (this will be updated at a later date). The first
+                four parameters correspond to the NETRESOURCE
+                object that is being constructed for the Win32API
+                call. UserName and Password are obvious. Passing
+                None for UserName and Password will attempt a
+                NULL session connection. NOTE: when compiled for
+                Windows CE, this function is conditionally
+                compiled to call the <strong>WNetAddConnection3</strong>
+                API interface with a NULL hwnd parameter, making
+                it functionally equivalent to WNetAddConnection2.
+                WNetAddConnection3 is the only interface
+                supported by Windows CE.</dd>
+        </dl>
+    </dd>
+</dl>
+
+<dl>
+    <dt><strong>WNetCancelConnection2</strong> (</dt>
+    <dd><dl>
+            <dd><dl>
+                    <dd><dl>
+                            <dd>STRING Name,<br>
+                                INTEGER Flags,<br>
+                                INTEGER Force</dd>
+                            <dd>)</dd>
+                            <dt>Returns: PyNone</dt>
+                        </dl>
+                    </dd>
+                </dl>
+            </dd>
+            <dd>The <strong>WNetCancelConnection2 </strong>function
+                breaks an existing network connection. It can
+                also be used to remove remembered network
+                connections that are not currently connected. The
+                Python parameters correspond directly to the
+                Win32API call parameters. The Name string is the
+                name of the connection you wish to drop (i.e. O:,
+                <a href="\\bexar\support">\\bexar\support</a>,
+                etc...). The Flags parameter indicates wether the
+                permanent connection information should be
+                updated or not. A value of zero indicates that no
+                stored information about the connection should be
+                updated. A value of CONNECT_UPDATE_PROFILE
+                indicates that the persistent connection
+                information for this connection should be updated
+                (deleted). the Force parameter is a boolean
+                indicated whether connection termination should
+                be forced (i.e. open files and connects ignored).</dd>
+        </dl>
+    </dd>
+</dl>
+
+<dl>
+    <dt><strong>WNetOpenEnum</strong> (</dt>
+    <dd><dl>
+            <dd><dl>
+                    <dd>INTEGER Scope, - Specifies the scope of
+                        the enumeration.<br>
+                        INTEGER Type, - Specifies the resource
+                        types to enumerate.<br>
+                        INTEGER Usage, - Specifies the resource
+                        usage to be enumerated.<br>
+                        OBJECT NetResource - Python PyNETRESOURCE
+                        object.</dd>
+                    <dd>)<br>
+                        </dd>
+                    <dt>Returns: PyHANDLE</dt>
+                </dl>
+            </dd>
+            <dd>The <strong>WNetOpenEnum</strong> function starts
+                an enumeration of network resources or existing
+                connections. For full documentation, see the
+                Win32API references.</dd>
+        </dl>
+    </dd>
+</dl>
+
+<dl>
+    <dt><strong>WNetCloseEnum</strong> (</dt>
+    <dd><dl>
+            <dd><dl>
+                    <dd>OBJECT Handle - PyHANDLE object returned
+                        from a previous WNetOpenEnum call.<br>
+                        )</dd>
+                    <dt>Returns: PyNone</dt>
+                </dl>
+            </dd>
+            <dd>Closes a PyHANDLE that represents an Open
+                Enumeration (from WNetOpenEnum)</dd>
+        </dl>
+    </dd>
+</dl>
+
+<dl>
+    <dt><strong>WNetEnumResource</strong> (</dt>
+    <dd><dl>
+            <dd><dl>
+                    <dd>OBJECT Handle, - HANDLE (in form of
+                        PyHANDLE) to an open Enumeration Object
+                        (from WNetOpenEnum)<br>
+                        OPTIONAL INTEGER - how many items to try
+                        to recieve---0 will default to 64.</dd>
+                    <dd>)</dd>
+                    <dt>Returns: PyList</dt>
+                    <dd>The list contains PyNETRESOURCE objects.
+                        The total number of PyNETRESOURCE objects
+                        will be &lt;= number requested (excepting
+                        the default behavior of requesting 0,
+                        which returns up to 64)</dd>
+                </dl>
+            </dd>
+            <dd>Implements the <strong>WNetEnumResource</strong>
+                Win32 API call. Successive calls to
+                WNetEnumResource will enumerate starting where
+                the previous call stopped. That is, the
+                enumeration is not reset on successive calls
+                UNLESS the enumeration handle is closed and
+                reopened. This lets you process an enumeration in
+                small chunks (as small as 1 item at a time) and
+                still fully enumerate a network object! Network
+                resources are not guaranteed to be returned in
+                any particular order.<br>
+                </dd>
+        </dl>
+    </dd>
+</dl>
+</body>
+</html>
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/testwnet.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/testwnet.py
new file mode 100644
index 0000000..f29a34b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/testwnet.py
@@ -0,0 +1,74 @@
+import win32wnet
+import sys
+from winnetwk import *
+import os
+
+possible_shares = []
+
+def _doDumpHandle(handle, level = 0):
+		indent = " " * level
+		while 1:
+			items = win32wnet.WNetEnumResource(handle, 0)
+			if len(items)==0:
+				break
+			for item in items:
+				try:
+					if item.dwDisplayType == RESOURCEDISPLAYTYPE_SHARE:
+						print indent + "Have share with name:", item.lpRemoteName
+						possible_shares.append(item)
+					elif item.dwDisplayType == RESOURCEDISPLAYTYPE_GENERIC:
+						print indent + "Have generic resource with name:", item.lpRemoteName
+					else:
+						# Try generic!
+						print indent + "Enumerating " + item.lpRemoteName,
+						k = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET, RESOURCETYPE_ANY,0,item)
+						print
+						_doDumpHandle(k, level + 1)
+						win32wnet.WNetCloseEnum(k) # could do k.Close(), but this is a good test!
+				except win32wnet.error, details:
+					print indent + "Couldn't enumerate this resource: " + details[2]
+
+def TestOpenEnum():
+	print "Enumerating all resources on the network - this may take some time..."
+	handle = win32wnet.WNetOpenEnum(RESOURCE_GLOBALNET,RESOURCETYPE_ANY,0,None)
+
+	try:
+		_doDumpHandle(handle)
+	finally:
+		handle.Close()
+	print "Finished dumping all resources."
+
+def TestConnection():
+	if len(possible_shares)==0:
+		print "Couldn't find any potential shares to connect to"
+		return
+	localName = "Z:" # need better way!
+	for share in possible_shares:
+		print "Attempting connection of", localName, "to", share.lpRemoteName
+		try:
+			win32wnet.WNetAddConnection2(share.dwType, localName, share.lpRemoteName)
+		except win32wnet.error, details:
+			print "Couldn't connect: " + details[2]
+			continue
+		# Have a connection.
+		try:
+			fname = os.path.join(localName + "\\", os.listdir(localName + "\\")[0])
+			try:
+				print "Universal name of '%s' is '%s'" % (fname, win32wnet.WNetGetUniversalName(fname))
+			except win32wnet.error, details:
+				print "Couldn't get universal name of '%s': %s" % (fname, details[2])
+			print "User name for this connection is", win32wnet.WNetGetUser(localName)
+		finally:
+			win32wnet.WNetCancelConnection2(localName, 0, 0)
+			# Only do the first share that succeeds.
+			break
+
+def TestGetUser():
+	u = win32wnet.WNetGetUser()
+	print "Current global user is", `u`
+	if u != win32wnet.WNetGetUser(None):
+		raise RuntimeError, "Default value didnt seem to work!"
+
+TestGetUser()
+TestOpenEnum()
+TestConnection()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/winnetwk.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/winnetwk.py
new file mode 100644
index 0000000..192538b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/win32wnet/winnetwk.py
@@ -0,0 +1,98 @@
+# Generated by h2py from d:\mssdk\include\winnetwk.h
+WNNC_NET_MSNET = 0x00010000
+WNNC_NET_LANMAN = 0x00020000
+WNNC_NET_NETWARE = 0x00030000
+WNNC_NET_VINES = 0x00040000
+WNNC_NET_10NET = 0x00050000
+WNNC_NET_LOCUS = 0x00060000
+WNNC_NET_SUN_PC_NFS = 0x00070000
+WNNC_NET_LANSTEP = 0x00080000
+WNNC_NET_9TILES = 0x00090000
+WNNC_NET_LANTASTIC = 0x000A0000
+WNNC_NET_AS400 = 0x000B0000
+WNNC_NET_FTP_NFS = 0x000C0000
+WNNC_NET_PATHWORKS = 0x000D0000
+WNNC_NET_LIFENET = 0x000E0000
+WNNC_NET_POWERLAN = 0x000F0000
+WNNC_NET_BWNFS = 0x00100000
+WNNC_NET_COGENT = 0x00110000
+WNNC_NET_FARALLON = 0x00120000
+WNNC_NET_APPLETALK = 0x00130000
+WNNC_NET_INTERGRAPH = 0x00140000
+WNNC_NET_SYMFONET = 0x00150000
+WNNC_NET_CLEARCASE = 0x00160000
+WNNC_NET_FRONTIER = 0x00170000
+WNNC_NET_BMC = 0x00180000
+WNNC_NET_DCE = 0x00190000
+WNNC_NET_DECORB = 0x00200000
+WNNC_NET_PROTSTOR = 0x00210000
+WNNC_NET_FJ_REDIR = 0x00220000
+WNNC_NET_DISTINCT = 0x00230000
+WNNC_NET_TWINS = 0x00240000
+WNNC_NET_RDR2SAMPLE = 0x00250000
+RESOURCE_CONNECTED = 0x00000001
+RESOURCE_GLOBALNET = 0x00000002
+RESOURCE_REMEMBERED = 0x00000003
+RESOURCE_RECENT = 0x00000004
+RESOURCE_CONTEXT = 0x00000005
+RESOURCETYPE_ANY = 0x00000000
+RESOURCETYPE_DISK = 0x00000001
+RESOURCETYPE_PRINT = 0x00000002
+RESOURCETYPE_RESERVED = 0x00000008
+RESOURCETYPE_UNKNOWN = 0xFFFFFFFF
+RESOURCEUSAGE_CONNECTABLE = 0x00000001
+RESOURCEUSAGE_CONTAINER = 0x00000002
+RESOURCEUSAGE_NOLOCALDEVICE = 0x00000004
+RESOURCEUSAGE_SIBLING = 0x00000008
+RESOURCEUSAGE_ATTACHED = 0x00000010
+RESOURCEUSAGE_ALL = (RESOURCEUSAGE_CONNECTABLE | RESOURCEUSAGE_CONTAINER | RESOURCEUSAGE_ATTACHED)
+RESOURCEUSAGE_RESERVED = 0x80000000
+RESOURCEDISPLAYTYPE_GENERIC = 0x00000000
+RESOURCEDISPLAYTYPE_DOMAIN = 0x00000001
+RESOURCEDISPLAYTYPE_SERVER = 0x00000002
+RESOURCEDISPLAYTYPE_SHARE = 0x00000003
+RESOURCEDISPLAYTYPE_FILE = 0x00000004
+RESOURCEDISPLAYTYPE_GROUP = 0x00000005
+RESOURCEDISPLAYTYPE_NETWORK = 0x00000006
+RESOURCEDISPLAYTYPE_ROOT = 0x00000007
+RESOURCEDISPLAYTYPE_SHAREADMIN = 0x00000008
+RESOURCEDISPLAYTYPE_DIRECTORY = 0x00000009
+RESOURCEDISPLAYTYPE_TREE = 0x0000000A
+RESOURCEDISPLAYTYPE_NDSCONTAINER = 0x0000000B
+NETPROPERTY_PERSISTENT = 1
+CONNECT_UPDATE_PROFILE = 0x00000001
+CONNECT_UPDATE_RECENT = 0x00000002
+CONNECT_TEMPORARY = 0x00000004
+CONNECT_INTERACTIVE = 0x00000008
+CONNECT_PROMPT = 0x00000010
+CONNECT_NEED_DRIVE = 0x00000020
+CONNECT_REFCOUNT = 0x00000040
+CONNECT_REDIRECT = 0x00000080
+CONNECT_LOCALDRIVE = 0x00000100
+CONNECT_CURRENT_MEDIA = 0x00000200
+CONNECT_DEFERRED = 0x00000400
+CONNECT_RESERVED = 0xFF000000
+CONNDLG_RO_PATH = 0x00000001
+CONNDLG_CONN_POINT = 0x00000002
+CONNDLG_USE_MRU = 0x00000004
+CONNDLG_HIDE_BOX = 0x00000008
+CONNDLG_PERSIST = 0x00000010
+CONNDLG_NOT_PERSIST = 0x00000020
+DISC_UPDATE_PROFILE = 0x00000001
+DISC_NO_FORCE = 0x00000040
+UNIVERSAL_NAME_INFO_LEVEL = 0x00000001
+REMOTE_NAME_INFO_LEVEL = 0x00000002
+WNFMT_MULTILINE = 0x01
+WNFMT_ABBREVIATED = 0x02
+WNFMT_INENUM = 0x10
+WNFMT_CONNECTION = 0x20
+NETINFO_DLL16 = 0x00000001
+NETINFO_DISKRED = 0x00000004
+NETINFO_PRINTERRED = 0x00000008
+RP_LOGON = 0x01
+RP_INIFILE = 0x02
+PP_DISPLAYERRORS = 0x01
+WNCON_FORNETCARD = 0x00000001
+WNCON_NOTROUTED = 0x00000002
+WNCON_SLOWLINK = 0x00000004
+WNCON_DYNAMIC = 0x00000008
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/winprocess.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/winprocess.py
new file mode 100644
index 0000000..12951422
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/Demos/winprocess.py
@@ -0,0 +1,195 @@
+"""
+Windows Process Control
+
+winprocess.run launches a child process and returns the exit code.
+Optionally, it can:
+  redirect stdin, stdout & stderr to files
+  run the command as another user
+  limit the process's running time
+  control the process window (location, size, window state, desktop)
+Works on Windows NT, 2000 & XP. Requires Mark Hammond's win32
+extensions.
+
+This code is free for any purpose, with no warranty of any kind.
+-- John B. Dell'Aquila <jbd@alum.mit.edu>
+"""
+
+import win32api, win32process, win32security
+import win32event, win32con, msvcrt, win32gui
+
+
+def logonUser(loginString):
+    """
+    Login as specified user and return handle.
+    loginString:  'Domain\nUser\nPassword'; for local
+        login use . or empty string as domain
+        e.g. '.\nadministrator\nsecret_password'
+    """
+    domain, user, passwd = loginString.split('\n')
+    return win32security.LogonUser(
+        user,
+        domain,
+        passwd,
+        win32con.LOGON32_LOGON_INTERACTIVE,
+        win32con.LOGON32_PROVIDER_DEFAULT
+        )
+
+
+class Process:
+    """
+    A Windows process.
+    """
+
+    def __init__(self, cmd, login=None,
+                 hStdin=None, hStdout=None, hStderr=None,
+                 show=1, xy=None, xySize=None,
+                 desktop=None):
+        """
+        Create a Windows process.
+        cmd:     command to run
+        login:   run as user 'Domain\nUser\nPassword'
+        hStdin, hStdout, hStderr:
+                 handles for process I/O; default is caller's stdin,
+                 stdout & stderr
+        show:    wShowWindow (0=SW_HIDE, 1=SW_NORMAL, ...)
+        xy:      window offset (x, y) of upper left corner in pixels
+        xySize:  window size (width, height) in pixels
+        desktop: lpDesktop - name of desktop e.g. 'winsta0\\default'
+                 None = inherit current desktop
+                 '' = create new desktop if necessary
+
+        User calling login requires additional privileges:
+          Act as part of the operating system [not needed on Windows XP]
+          Increase quotas
+          Replace a process level token
+        Login string must EITHER be an administrator's account
+        (ordinary user can't access current desktop - see Microsoft
+        Q165194) OR use desktop='' to run another desktop invisibly
+        (may be very slow to startup & finalize).
+        """
+        si = win32process.STARTUPINFO()
+        si.dwFlags = (win32con.STARTF_USESTDHANDLES ^
+                      win32con.STARTF_USESHOWWINDOW)
+        if hStdin is None:
+            si.hStdInput = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE)
+        else:
+            si.hStdInput = hStdin
+        if hStdout is None:
+            si.hStdOutput = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE)
+        else:
+            si.hStdOutput = hStdout
+        if hStderr is None:
+            si.hStdError = win32api.GetStdHandle(win32api.STD_ERROR_HANDLE)
+        else:
+            si.hStdError = hStderr
+        si.wShowWindow = show
+        if xy is not None:
+            si.dwX, si.dwY = xy
+            si.dwFlags ^= win32con.STARTF_USEPOSITION
+        if xySize is not None:
+            si.dwXSize, si.dwYSize = xySize
+            si.dwFlags ^= win32con.STARTF_USESIZE
+        if desktop is not None:
+            si.lpDesktop = desktop
+        procArgs = (None,  # appName
+                    cmd,  # commandLine
+                    None,  # processAttributes
+                    None,  # threadAttributes
+                    1,  # bInheritHandles
+                    win32process.CREATE_NEW_CONSOLE,  # dwCreationFlags
+                    None,  # newEnvironment
+                    None,  # currentDirectory
+                    si)  # startupinfo
+        if login is not None:
+            hUser = logonUser(login)
+            win32security.ImpersonateLoggedOnUser(hUser)
+            procHandles = win32process.CreateProcessAsUser(hUser, *procArgs)
+            win32security.RevertToSelf()
+        else:
+            procHandles = win32process.CreateProcess(*procArgs)
+        self.hProcess, self.hThread, self.PId, self.TId = procHandles
+
+    def wait(self, mSec=None):
+        """
+        Wait for process to finish or for specified number of
+        milliseconds to elapse.
+        """
+        if mSec is None:
+            mSec = win32event.INFINITE
+        return win32event.WaitForSingleObject(self.hProcess, mSec)
+
+    def kill(self, gracePeriod=5000):
+        """
+        Kill process. Try for an orderly shutdown via WM_CLOSE.  If
+        still running after gracePeriod (5 sec. default), terminate.
+        """
+        win32gui.EnumWindows(self.__close__, 0)
+        if self.wait(gracePeriod) != win32event.WAIT_OBJECT_0:
+            win32process.TerminateProcess(self.hProcess, 0)
+            win32api.Sleep(100) # wait for resources to be released
+
+    def __close__(self, hwnd, dummy):
+        """
+        EnumWindows callback - sends WM_CLOSE to any window
+        owned by this process.
+        """
+        TId, PId = win32process.GetWindowThreadProcessId(hwnd)
+        if PId == self.PId:
+            win32gui.PostMessage(hwnd, win32con.WM_CLOSE, 0, 0)
+
+    def exitCode(self):
+        """
+        Return process exit code.
+        """
+        return win32process.GetExitCodeProcess(self.hProcess)
+
+
+def run(cmd, mSec=None, stdin=None, stdout=None, stderr=None, **kw):
+    """
+    Run cmd as a child process and return exit code.
+    mSec:  terminate cmd after specified number of milliseconds
+    stdin, stdout, stderr:
+           file objects for child I/O (use hStdin etc. to attach
+           handles instead of files); default is caller's stdin,
+           stdout & stderr;
+    kw:    see Process.__init__ for more keyword options
+    """
+    if stdin is not None:
+        kw['hStdin'] = msvcrt.get_osfhandle(stdin.fileno())
+    if stdout is not None:
+        kw['hStdout'] = msvcrt.get_osfhandle(stdout.fileno())
+    if stderr is not None:
+        kw['hStderr'] = msvcrt.get_osfhandle(stderr.fileno())
+    child = Process(cmd, **kw)
+    if child.wait(mSec) != win32event.WAIT_OBJECT_0:
+        child.kill()
+        raise WindowsError, 'process timeout exceeded'
+    return child.exitCode()
+
+
+if __name__ == '__main__':
+
+    # Pipe commands to a shell and display the output in notepad
+    print 'Testing winprocess.py...'
+
+    import tempfile
+
+    timeoutSeconds = 15
+    cmdString = """\
+REM      Test of winprocess.py piping commands to a shell.\r
+REM      This window will close in %d seconds.\r
+vol\r
+net user\r
+_this_is_a_test_of_stderr_\r
+""" % timeoutSeconds
+
+    cmd, out = tempfile.TemporaryFile(), tempfile.TemporaryFile()
+    cmd.write(cmdString)
+    cmd.seek(0)
+    print 'CMD.EXE exit code:', run('cmd.exe', show=0, stdin=cmd,
+                                    stdout=out, stderr=out)
+    cmd.close()
+    print 'NOTEPAD exit code:', run('notepad.exe %s' % out.file.name,
+                                    show=win32con.SW_MAXIMIZE,
+                                    mSec=timeoutSeconds*1000)
+    out.close()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/_win32sysloader.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/_win32sysloader.pyd
new file mode 100644
index 0000000..7b1ce9e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/_win32sysloader.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/_winxptheme.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/_winxptheme.pyd
new file mode 100644
index 0000000..239e4a9d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/_winxptheme.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/dbi.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/dbi.pyd
new file mode 100644
index 0000000..9907e71
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/dbi.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/include/PyWinTypes.h b/depot_tools/release/win/python_24/Lib/site-packages/win32/include/PyWinTypes.h
new file mode 100644
index 0000000..6857503
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/include/PyWinTypes.h
@@ -0,0 +1,630 @@
+
+#ifndef __PYWINTYPES_H__
+#define __PYWINTYPES_H__
+
+// If building under a GCC, tweak what we need.
+#if defined(__GNUC__) && defined(_POSIX_C_SOURCE)
+    // python.h complains if _POSIX_C_SOURCE is already defined
+#	undef _POSIX_C_SOURCE
+#endif
+
+// Python.h and Windows.h both protect themselves from multiple
+// includes - so it is safe to do here (and provides a handy
+// choke point for #include vagaries
+#include "Python.h"
+#include "windows.h"
+
+// Lars: for WAVEFORMATEX
+#include "mmsystem.h"
+
+// Do we want to use the builtin Unicode object?
+// If defined, we use the standard builtin type.
+// If not define, we have our own Unicode type
+// (but that doesnt work seamlessly with PyString objects)
+
+// For 1.6+ builds, this will be ON.
+// For 1.5 builds, this will be OFF
+#if (PY_VERSION_HEX >= 0x01060000)
+#define PYWIN_USE_PYUNICODE
+#endif
+
+// *** NOTE *** FREEZE_PYWINTYPES is deprecated.  It used to be used
+// by the 'freeze' tool, but now py2exe etc do a far better job, and 
+// don't require a custom built pywintypes DLL.
+#ifdef FREEZE_PYWINTYPES
+	/* The pywintypes module is being included in a frozen .EXE/.DLL */
+#	define PYWINTYPES_EXPORT
+#else
+#	ifdef BUILD_PYWINTYPES
+		/* We are building pywintypesxx.dll */
+#		define PYWINTYPES_EXPORT __declspec(dllexport)
+#else
+		/* This module uses pywintypesxx.dll */
+#		define PYWINTYPES_EXPORT __declspec(dllimport)
+#		if defined(_MSC_VER)
+#			if defined(DEBUG) || defined(_DEBUG)
+#				pragma comment(lib,"pywintypes_d.lib")
+#			else
+#				pragma comment(lib,"pywintypes.lib")
+#			endif // DEBUG/_DEBUG
+#		endif // _MSC_VER
+#	endif // BUILD_PYWINTYPES
+#endif // FREEZE_PYWINTYPES
+
+#include <tchar.h>
+#ifdef MS_WINCE
+// These macros caused grief on CE once (do they still?)
+#	ifndef IN
+#		define IN
+#	endif
+#	ifdef OUT
+#		undef OUT
+#	endif
+#	ifndef OUT
+#		define OUT
+#	endif
+// Having trouble making these work for Palm PCs??
+// NOTE: These are old - for Windows CE 1 devices, and well
+// before the PPC platform.  It is unlikely recent CE toolkits
+// still need all this magic.
+#	ifndef PYWIN_HPC /* Palm PC */
+#		define NO_PYWINTYPES_TIME
+#		define NO_PYWINTYPES_IID
+#		define NO_PYWINTYPES_BSTR
+#	endif
+#endif // MS_WINCE
+/*
+** Error/Exception handling
+*/
+extern PYWINTYPES_EXPORT PyObject *PyWinExc_ApiError;
+// Register a Windows DLL that contains the messages in the specified range.
+extern PYWINTYPES_EXPORT BOOL PyWin_RegisterErrorMessageModule(DWORD first, DWORD last, HINSTANCE hmod);
+// Get the previously registered hmodule for an error code.
+extern PYWINTYPES_EXPORT HINSTANCE PyWin_GetErrorMessageModule(DWORD err);
+
+
+/* A global function that sets an API style error (ie, (code, fn, errTest)) */
+PYWINTYPES_EXPORT PyObject *PyWin_SetAPIError(char *fnName, long err = 0);
+
+/* Basic COM Exception handling.  The main COM exception object
+   is actually defined here.  However, the most useful functions
+   for raising the exception are still in the COM package.  Therefore,
+   you can use the fn below to raise a basic COM exception - no fancy error
+   messages available, just the HRESULT.  It will, however, _be_ a COM
+   exception, and therefore trappable like any other COM exception
+*/
+extern PYWINTYPES_EXPORT PyObject *PyWinExc_COMError;
+PYWINTYPES_EXPORT PyObject *PyWin_SetBasicCOMError(HRESULT hr);
+
+/*
+** String/UniCode support
+*/
+#ifdef PYWIN_USE_PYUNICODE
+	/* Python has built-in Unicode String support */
+#define PyUnicodeType PyUnicode_Type
+// PyUnicode_Check is defined.
+
+#else
+
+/* If a Python Unicode object exists, disable it. */
+#ifdef PyUnicode_Check
+#undef PyUnicode_Check
+#define PyUnicode_Check(ob)	((ob)->ob_type == &PyUnicodeType)
+#endif /* PyUnicode_Check */
+
+	/* Need our custom Unicode object */
+extern PYWINTYPES_EXPORT PyTypeObject PyUnicodeType; // the Type for PyUnicode
+#define PyUnicode_Check(ob)	((ob)->ob_type == &PyUnicodeType)
+
+
+// PyUnicode_AsUnicode clashes with the standard Python name - 
+// so if we are not using Python Unicode objects, we hide the
+// name with a #define.
+#define PyUnicode_AsUnicode(op) (((PyUnicode *)op)->m_bstrValue)
+//extern PYWINTYPES_EXPORT WCHAR *PyUnicode_AsUnicode(PyObject *op);
+
+#endif /* PYWIN_USE_PYUNICODE */
+
+extern PYWINTYPES_EXPORT int PyUnicode_Size(PyObject *op);
+
+#ifndef NO_PYWINTYPES_BSTR
+// Given a PyObject (string, Unicode, etc) create a "BSTR" with the value
+PYWINTYPES_EXPORT BOOL PyWinObject_AsBstr(PyObject *stringObject, BSTR *pResult, BOOL bNoneOK = FALSE, DWORD *pResultLen = NULL);
+// And free it when finished.
+PYWINTYPES_EXPORT void PyWinObject_FreeBstr(BSTR pResult);
+
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromBstr(const BSTR bstr, BOOL takeOwnership=FALSE);
+
+// Convert a "char *" to a BSTR - free via ::SysFreeString()
+PYWINTYPES_EXPORT BSTR PyWin_String_AsBstr(const char *str);
+
+#endif // NO_PYWINTYPES_BSTR
+
+// Given a string or Unicode object, get WCHAR characters.
+PYWINTYPES_EXPORT BOOL PyWinObject_AsWCHAR(PyObject *stringObject, WCHAR **pResult, BOOL bNoneOK = FALSE, DWORD *pResultLen = NULL);
+// And free it when finished.
+PYWINTYPES_EXPORT void PyWinObject_FreeWCHAR(BSTR pResult);
+
+// Given a PyObject (string, Unicode, etc) create a "char *" with the value
+// if pResultLen != NULL, it will be set to the result size NOT INCLUDING 
+// TERMINATOR (to be in line with SysStringLen, PyString_*, etc)
+PYWINTYPES_EXPORT BOOL PyWinObject_AsString(PyObject *stringObject, char **pResult, BOOL bNoneOK = FALSE, DWORD *pResultLen = NULL);
+// And free it when finished.
+PYWINTYPES_EXPORT void PyWinObject_FreeString(char *pResult);
+PYWINTYPES_EXPORT void PyWinObject_FreeString(WCHAR *pResult);
+
+/* ANSI/Unicode Support */
+/* If UNICODE defined, will be a BSTR - otherwise a char *
+   Either way - PyWinObject_FreeTCHAR() must be called
+*/
+
+#ifdef UNICODE
+#define PyWinObject_AsTCHAR PyWinObject_AsWCHAR
+#define PyWinObject_FreeTCHAR PyWinObject_FreeWCHAR
+#define PyWinObject_FromTCHAR PyWinObject_FromOLECHAR
+#define PyString_FromTCHAR PyString_FromUnicode
+#else /* not UNICODE */
+#define PyWinObject_AsTCHAR PyWinObject_AsString
+#define PyWinObject_FreeTCHAR PyWinObject_FreeString
+inline PyObject *PyWinObject_FromTCHAR( TCHAR *str ) {return PyString_FromString(str);}
+inline PyObject *PyWinObject_FromTCHAR( TCHAR *str, int numChars ) {return PyString_FromStringAndSize(str, numChars);}
+#define PyString_FromTCHAR PyString_FromString
+#endif
+
+#define PyWinObject_FromWCHAR PyWinObject_FromOLECHAR
+
+PYWINTYPES_EXPORT PyObject *PyString_FromUnicode( const OLECHAR *str );
+PYWINTYPES_EXPORT PyObject *PyUnicodeObject_FromString(const char *string);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromOLECHAR(const OLECHAR * str);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromOLECHAR(const OLECHAR * str, int numChars);
+
+#ifndef MS_WINCE
+// String support for buffers allocated via a function of your choice.
+PYWINTYPES_EXPORT BOOL PyWinObject_AsPfnAllocatedWCHAR(PyObject *stringObject, 
+                                                  void *(*pfnAllocator)(ULONG), 
+                                                  WCHAR **ppResult, 
+                                                  BOOL bNoneOK = FALSE,
+                                                  DWORD *pResultLen = NULL);
+
+// String support for buffers allocated via CoTaskMemAlloc and CoTaskMemFree
+PYWINTYPES_EXPORT BOOL PyWinObject_AsTaskAllocatedWCHAR(PyObject *stringObject, WCHAR **ppResult, BOOL bNoneOK /*= FALSE*/,DWORD *pResultLen /*= NULL*/);
+PYWINTYPES_EXPORT void PyWinObject_FreeTaskAllocatedWCHAR(WCHAR * str);
+#endif // MS_WINCE
+// String conversion - These must also be freed with PyWinObject_FreeString
+PYWINTYPES_EXPORT BOOL PyWin_WCHAR_AsString(WCHAR *input, DWORD inLen, char **pResult);
+PYWINTYPES_EXPORT BOOL PyWin_Bstr_AsString(BSTR input, char **pResult);
+PYWINTYPES_EXPORT BOOL PyWin_String_AsWCHAR(char *input, DWORD inLen, WCHAR **pResult);
+
+PYWINTYPES_EXPORT void PyWinObject_FreeString(char *str);
+PYWINTYPES_EXPORT void PyWinObject_FreeString(WCHAR *str);
+
+/*
+** LARGE_INTEGER objects
+*/
+#ifdef LONG_LONG
+	// Python got its own support for 64 bit ints as of Python 1.5.2.
+	// However, for 1.5.2 we stick without it - we use it for 1.6 and on.
+#	if (PY_VERSION_HEX < 0x01060000)
+#		define PYWIN_NO_PYTHON_LONG_LONG
+#	endif
+#else
+	// If LONG_LONG is undefined, we are still building pre 1.5.2, so
+	// we have no choice but to define it.
+#	define PYWIN_NO_PYTHON_LONG_LONG
+#endif
+
+// These need to be renamed.  For now, the old names still appear in the DLL.
+PYWINTYPES_EXPORT BOOL PyLong_AsTwoInts(PyObject *ob, int *hiint, unsigned *loint);
+PYWINTYPES_EXPORT PyObject *PyLong_FromTwoInts(int hidword, unsigned lodword);
+
+// These seem (to MH anyway :) to be better names than using "int".
+inline BOOL PyLong_AsTwoI32(PyObject *ob, int *hiint, unsigned *loint) {return PyLong_AsTwoInts(ob, hiint, loint);}
+inline PyObject *PyLong_FromTwoI32(int hidword, unsigned lodword) {return PyLong_FromTwoInts(hidword, lodword);}
+
+//AsLARGE_INTEGER takes either PyInteger, PyLong, (PyInteger, PyInteger)
+PYWINTYPES_EXPORT BOOL PyWinObject_AsLARGE_INTEGER(PyObject *ob, LARGE_INTEGER *pResult);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsULARGE_INTEGER(PyObject *ob, ULARGE_INTEGER *pResult);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromLARGE_INTEGER(LARGE_INTEGER &val);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromULARGE_INTEGER(ULARGE_INTEGER &val);
+#define PyLong_FromLARGE_INTEGER PyWinObject_FromLARGE_INTEGER
+#define PyLong_FromULARGE_INTEGER PyWinObject_FromULARGE_INTEGER
+
+PyObject *PyLong_FromI64(__int64 ival);
+BOOL PyLong_AsI64(PyObject *val, __int64 *lval);
+
+// Some boolean helpers for Python 2.2 and earlier
+#if (PY_VERSION_HEX < 0x02030000 && !defined(PYWIN_NO_BOOL_FROM_LONG))
+// PyBool_FromLong only in 2.3 and later
+inline PyObject *PyBool_FromLong(long v)
+{
+	PyObject *ret= v ? Py_True : Py_False;
+	Py_INCREF(ret);
+    return ret;
+}
+#endif
+
+/*
+** OVERLAPPED Object and API
+*/
+class PyOVERLAPPED; // forward declare
+extern PYWINTYPES_EXPORT PyTypeObject PyOVERLAPPEDType; // the Type for PyOVERLAPPED
+#define PyOVERLAPPED_Check(ob)	((ob)->ob_type == &PyOVERLAPPEDType)
+PYWINTYPES_EXPORT BOOL PyWinObject_AsOVERLAPPED(PyObject *ob, OVERLAPPED **ppOverlapped, BOOL bNoneOK = TRUE);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsPyOVERLAPPED(PyObject *ob, PyOVERLAPPED **ppOverlapped, BOOL bNoneOK = TRUE);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromOVERLAPPED(const OVERLAPPED *pOverlapped);
+
+// A global function that can work as a module method for making an OVERLAPPED object.
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewOVERLAPPED(PyObject *self, PyObject *args);
+
+#ifndef NO_PYWINTYPES_IID
+/*
+** IID/GUID support
+*/
+
+extern PYWINTYPES_EXPORT PyTypeObject PyIIDType;		// the Type for PyIID
+#define PyIID_Check(ob)		((ob)->ob_type == &PyIIDType)
+
+// Given an object repring a CLSID (either PyIID or string), fill the CLSID.
+PYWINTYPES_EXPORT BOOL PyWinObject_AsIID(PyObject *obCLSID, CLSID *clsid);
+
+// return a native PyIID object representing an IID
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromIID(const IID &riid);
+
+// return a string/Unicode object representing an IID
+PYWINTYPES_EXPORT PyObject *PyWinStringObject_FromIID(const IID &riid);
+PYWINTYPES_EXPORT PyObject *PyWinUnicodeObject_FromIID(const IID &riid);
+
+// A global function that can work as a module method for making an IID object.
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewIID( PyObject *self, PyObject *args);
+#endif /*NO_PYWINTYPES_IID */
+
+/*
+** TIME support
+*/
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromSYSTEMTIME(const SYSTEMTIME &t);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromFILETIME(const FILETIME &t);
+
+// Converts a TimeStamp, which is in 100 nanosecond units like a FILETIME
+// TimeStamp is actually defined as a LARGE_INTEGER, so this function will also
+// accept Windows security "TimeStamp" objects directly - however, we use a
+// LARGE_INTEGER prototype to avoid pulling in the windows security headers.
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromTimeStamp(const LARGE_INTEGER &t);
+
+PYWINTYPES_EXPORT BOOL PyWinObject_AsDATE(PyObject *ob, DATE *pDate);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsFILETIME(PyObject *ob,	FILETIME *pDate);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsSYSTEMTIME(PyObject *ob, SYSTEMTIME *pDate);
+
+#ifndef NO_PYWINTYPES_TIME
+
+extern PYWINTYPES_EXPORT PyTypeObject PyTimeType;		// the Type for PyTime
+#define PyTime_Check(ob)		((ob)->ob_type == &PyTimeType)
+
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromDATE(DATE t);
+PYWINTYPES_EXPORT PyObject *PyWinTimeObject_FromLong(long t);
+
+// A global function that can work as a module method for making a time object.
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewTime( PyObject *self, PyObject *args);
+
+#endif // NO_PYWINTYPES_TIME
+
+/*
+** SECURITY_ATTRIBUTES support
+*/
+extern PYWINTYPES_EXPORT PyTypeObject PySECURITY_ATTRIBUTESType;
+#define PySECURITY_ATTRIBUTES_Check(ob)		((ob)->ob_type == &PySECURITY_ATTRIBUTESType)
+extern PYWINTYPES_EXPORT PyTypeObject PyDEVMODEType;
+
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSECURITY_ATTRIBUTES(PyObject *self, PyObject *args);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsSECURITY_ATTRIBUTES(PyObject *ob, SECURITY_ATTRIBUTES **ppSECURITY_ATTRIBUTES, BOOL bNoneOK = TRUE);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromSECURITY_ATTRIBUTES(const SECURITY_ATTRIBUTES &sa);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsDEVMODE(PyObject *ob, PDEVMODE * ppDEVMODE, BOOL bNoneOK = TRUE);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromDEVMODE(PDEVMODE);
+
+/*
+** WAVEFORMATEX support
+*/
+
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewWAVEFORMATEX(PyObject *self, PyObject *args);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromWAVEFROMATEX(const WAVEFORMATEX &wfx);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsWAVEFORMATEX(PyObject *ob, WAVEFORMATEX **ppWAVEFORMATEX, BOOL bNoneOK = TRUE);
+extern PYWINTYPES_EXPORT PyTypeObject PyWAVEFORMATEXType;
+#define PyWAVEFORMATEX_Check(ob)		((ob)->ob_type == &PyWAVEFORMATEXType)
+
+
+/*
+** SECURITY_DESCRIPTOR support
+*/
+#ifndef MS_WINCE /* These are not available on Windows CE */
+
+extern PYWINTYPES_EXPORT PyTypeObject PySECURITY_DESCRIPTORType;
+#define PySECURITY_DESCRIPTOR_Check(ob)		((ob)->ob_type == &PySECURITY_DESCRIPTORType)
+
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSECURITY_DESCRIPTOR(PyObject *self, PyObject *args);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsSECURITY_DESCRIPTOR(PyObject *ob, PSECURITY_DESCRIPTOR *ppSECURITY_DESCRIPTOR, BOOL bNoneOK = TRUE);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromSECURITY_DESCRIPTOR(PSECURITY_DESCRIPTOR psd);
+
+/*
+** SID support
+*/
+extern PYWINTYPES_EXPORT PyTypeObject PySIDType;
+#define PySID_Check(ob)		((ob)->ob_type == &PySIDType)
+
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewSID(PyObject *self, PyObject *args);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsSID(PyObject *ob, PSID *ppSID, BOOL bNoneOK = FALSE);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromSID(PSID pSID);
+
+/*
+** ACL support
+*/
+extern PYWINTYPES_EXPORT PyTypeObject PyACLType;
+#define PyACL_Check(ob)		((ob)->ob_type == &PyACLType)
+
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewACL(PyObject *self, PyObject *args);
+PYWINTYPES_EXPORT BOOL PyWinObject_AsACL(PyObject *ob, PACL *ppACL, BOOL bNoneOK = FALSE);
+
+#endif /* MS_WINCE */
+
+/*
+** Win32 HANDLE wrapper - any handle closable by "CloseHandle()"
+*/
+extern PYWINTYPES_EXPORT PyTypeObject PyHANDLEType; // the Type for PyHANDLE
+#define PyHANDLE_Check(ob)	((ob)->ob_type == &PyHANDLEType)
+
+PYWINTYPES_EXPORT BOOL PyWinObject_AsHANDLE(PyObject *ob, HANDLE *pRes, BOOL bNoneOK = FALSE);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromHANDLE(HANDLE h);
+
+// A global function that can work as a module method for making a HANDLE object.
+PYWINTYPES_EXPORT PyObject *PyWinMethod_NewHANDLE( PyObject *self, PyObject *args);
+
+// A global function that does the right thing wrt closing a "handle".
+// The object can be either a PyHANDLE or an integer.
+// If result is FALSE, a Python error is all setup (cf PyHANDLE::Close(), which doesnt set the Python error)
+PYWINTYPES_EXPORT BOOL PyWinObject_CloseHANDLE(PyObject *obHandle);
+
+PYWINTYPES_EXPORT BOOL PyWinObject_AsHKEY(PyObject *ob, HKEY *pRes, BOOL bNoneOK = FALSE);
+PYWINTYPES_EXPORT PyObject *PyWinObject_FromHKEY(HKEY h);
+PYWINTYPES_EXPORT BOOL PyWinObject_CloseHKEY(PyObject *obHandle);
+
+#include "winsock.h"
+/*
+** SOCKET support.
+*/
+PYWINTYPES_EXPORT
+BOOL PySocket_AsSOCKET
+//-------------------------------------------------------------------------
+// Helper function for dealing with socket arguments.
+(
+	PyObject *obSocket,
+	// [in] Python object being converted into a SOCKET handle.
+	SOCKET *ps
+	// [out] Returned socket handle
+);
+
+
+
+/*
+** Other Utilities
+*/
+#ifndef NO_PYWINTYPES_BSTR
+// ----------------------------------------------------------------------
+// WARNING - NEVER EVER USE new() ON THIS CLASS
+// This class can be used as a local variable, typically in a Python/C
+// function, and can be passed whereever a TCHAR/WCHAR is expected.
+// Typical Usage:
+// PyWin_AutoFreeBstr arg;
+// PyArg_ParseTuple("O", &obStr);
+// PyWinObject_AsAutoFreeBstr(obStr, &arg);
+// CallTheFunction(arg); // Will correctly pass BSTR/OLECHAR
+// -- when the function goes out of scope, the string owned by "arg" will
+// -- automatically be freed.
+// ----------------------------------------------------------------------
+class PYWINTYPES_EXPORT PyWin_AutoFreeBstr {
+public:
+	PyWin_AutoFreeBstr( BSTR bstr = NULL );
+	~PyWin_AutoFreeBstr();
+	void SetBstr( BSTR bstr );
+	operator BSTR() {return m_bstr;}
+private:
+	BSTR m_bstr;
+};
+
+inline BOOL PyWinObject_AsAutoFreeBstr(PyObject *stringObject, PyWin_AutoFreeBstr *pResult, BOOL bNoneOK = FALSE)
+{
+	if (bNoneOK && stringObject == Py_None) {
+		pResult->SetBstr(NULL);
+		return TRUE;
+	}
+	BSTR bs;
+	if (!PyWinObject_AsBstr(stringObject, &bs, bNoneOK))
+		return FALSE;
+	pResult->SetBstr(bs);
+	return TRUE;
+}
+#endif // NO_PYWINTYPES_BSTR
+
+// ----------------------------------------------------------------------
+//
+// THREAD MANAGEMENT
+//
+
+// ### need to rename the PYCOM_ stuff soon...
+
+// We have 2 discrete locks in use (when no free-threaded is used, anyway).
+// The first type of lock is the global Python lock.  This is the standard lock
+// in use by Python, and must be used as documented by Python.  Specifically, no
+// 2 threads may _ever_ call _any_ Python code (including INCREF/DECREF) without
+// first having this thread lock.
+//
+// The second type of lock is a "global framework lock".  This lock is simply a 
+// critical section, and used whenever 2 threads of C code need access to global
+// data.  This is different than the Python lock - this lock is used when no Python
+// code can ever be called by the threads, but the C code still needs thread-safety.
+
+// We also supply helper classes which make the usage of these locks a one-liner.
+
+// The "framework" lock, implemented as a critical section.
+PYWINTYPES_EXPORT void PyWin_AcquireGlobalLock(void);
+PYWINTYPES_EXPORT void PyWin_ReleaseGlobalLock(void);
+
+// Helper class for the DLL global lock.
+//
+// This class magically waits for the Win32/COM framework global lock, and releases it
+// when finished.  
+// NEVER new one of these objects - only use on the stack!
+class CEnterLeaveFramework {
+public:
+	CEnterLeaveFramework() {PyWin_AcquireGlobalLock();}
+	~CEnterLeaveFramework() {PyWin_ReleaseGlobalLock();}
+};
+
+// Python thread-lock stuff.  Free-threading patches use different semantics, but
+// these are abstracted away here...
+#ifndef FORCE_NO_FREE_THREAD
+# ifdef WITH_FREE_THREAD
+#  define PYCOM_USE_FREE_THREAD
+# endif
+#endif
+#ifdef PYCOM_USE_FREE_THREAD
+# include <threadstate.h>
+#else
+# include <pystate.h>
+#endif
+
+
+// Helper class for Enter/Leave Python
+//
+// This class magically waits for the Python global lock, and releases it
+// when finished.  
+
+// Nested invocations will deadlock, so be careful.
+
+// NEVER new one of these objects - only use on the stack!
+#ifndef PYCOM_USE_FREE_THREAD
+extern PYWINTYPES_EXPORT PyInterpreterState *PyWin_InterpreterState;
+extern PYWINTYPES_EXPORT BOOL PyWinThreadState_Ensure();
+extern PYWINTYPES_EXPORT void PyWinThreadState_Free();
+extern PYWINTYPES_EXPORT void PyWinThreadState_Clear();
+extern PYWINTYPES_EXPORT void PyWinInterpreterLock_Acquire();
+extern PYWINTYPES_EXPORT void PyWinInterpreterLock_Release();
+
+extern PYWINTYPES_EXPORT void PyWinGlobals_Ensure();
+extern PYWINTYPES_EXPORT void PyWinGlobals_Free();
+#else
+#define PyWinThreadState_Ensure PyThreadState_Ensure
+#define PyWinThreadState_Free PyThreadState_Free
+#define PyWinThreadState_Clear PyThreadState_ClearExc
+
+#endif
+
+extern PYWINTYPES_EXPORT void PyWin_MakePendingCalls();
+
+// For 2.3, use the PyGILState_ calls
+#if (PY_VERSION_HEX >= 0x02030000)
+#define PYWIN_USE_GILSTATE
+#endif
+
+#ifndef PYWIN_USE_GILSTATE
+
+class CEnterLeavePython {
+public:
+	CEnterLeavePython() {
+		acquired = FALSE;
+		acquire();
+	}
+	void acquire() {
+		if (acquired)
+			return;
+		created = PyWinThreadState_Ensure();
+#ifndef PYCOM_USE_FREE_THREAD
+		PyWinInterpreterLock_Acquire();
+#endif
+		if (created) {
+			// If pending python calls are waiting as we enter Python,
+			// it will generally mean an asynch signal handler, etc.
+			// We can either call it here, or wait for Python to call it
+			// as part of its "every 'n' opcodes" check.  If we wait for
+			// Python to check it and the pending call raises an exception,
+			// then it is _our_ code that will fail - this is unfair,
+			// as the signal was raised before we were entered - indeed,
+			// we may be directly responding to the signal!
+			// Thus, we flush all the pending calls here, and report any
+			// exceptions via our normal exception reporting mechanism.
+			// (of which we don't have, but not to worry... :)
+			// We can then execute our code in the knowledge that only
+			// signals raised _while_ we are executing will cause exceptions.
+			PyWin_MakePendingCalls();
+		}
+		acquired = TRUE;
+	}
+	~CEnterLeavePython() {
+		if (acquired)
+			release();
+	}
+	void release() {
+	// The interpreter state must be cleared
+	// _before_ we release the lock, as some of
+	// the sys. attributes cleared (eg, the current exception)
+	// may need the lock to invoke their destructors - 
+	// specifically, when exc_value is a class instance, and
+	// the exception holds the last reference!
+		if ( !acquired )
+			return;
+		if ( created )
+			PyWinThreadState_Clear();
+#ifndef PYCOM_USE_FREE_THREAD
+		PyWinInterpreterLock_Release();
+#endif
+		if ( created )
+			PyWinThreadState_Free();
+		acquired = FALSE;
+	}
+private:
+	BOOL created;
+	BOOL acquired;
+};
+
+#else // PYWIN_USE_GILSTATE
+
+class CEnterLeavePython {
+public:
+	CEnterLeavePython() {
+		acquire();
+	}
+	void acquire(void) {
+		state = PyGILState_Ensure();
+		released = FALSE;
+	}
+	~CEnterLeavePython() {
+		release();
+	}
+	void release(void) {
+		if (!released) {
+			PyGILState_Release(state);
+			released = TRUE;
+		}
+	}
+private:
+	PyGILState_STATE state;
+	BOOL released;
+};
+#endif // PYWIN_USE_GILSTATE
+
+// A helper for simple exception handling.
+// try/__try
+#ifdef MAINWIN
+#define PYWINTYPES_TRY try
+#else
+#define PYWINTYPES_TRY __try
+#endif /* MAINWIN */
+
+// catch/__except
+#if defined(__MINGW32__) || defined(MAINWIN)
+#define PYWINTYPES_EXCEPT catch(...)
+#else
+#define PYWINTYPES_EXCEPT __except( EXCEPTION_EXECUTE_HANDLER )
+#endif
+// End of exception helper macros.
+
+#endif // __PYWINTYPES_H__
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/afxres.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/afxres.py
new file mode 100644
index 0000000..e75b3f25
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/afxres.py
@@ -0,0 +1,497 @@
+# Generated by h2py from stdin
+TCS_MULTILINE = 0x0200
+CBRS_ALIGN_LEFT = 0x1000
+CBRS_ALIGN_TOP = 0x2000
+CBRS_ALIGN_RIGHT = 0x4000
+CBRS_ALIGN_BOTTOM = 0x8000
+CBRS_ALIGN_ANY = 0xF000
+CBRS_BORDER_LEFT = 0x0100
+CBRS_BORDER_TOP = 0x0200
+CBRS_BORDER_RIGHT = 0x0400
+CBRS_BORDER_BOTTOM = 0x0800
+CBRS_BORDER_ANY = 0x0F00
+CBRS_TOOLTIPS = 0x0010
+CBRS_FLYBY = 0x0020
+CBRS_FLOAT_MULTI = 0x0040
+CBRS_BORDER_3D = 0x0080
+CBRS_HIDE_INPLACE = 0x0008
+CBRS_SIZE_DYNAMIC = 0x0004
+CBRS_SIZE_FIXED = 0x0002
+CBRS_FLOATING = 0x0001
+CBRS_GRIPPER = 0x00400000
+CBRS_ORIENT_HORZ = (CBRS_ALIGN_TOP|CBRS_ALIGN_BOTTOM)
+CBRS_ORIENT_VERT = (CBRS_ALIGN_LEFT|CBRS_ALIGN_RIGHT)
+CBRS_ORIENT_ANY = (CBRS_ORIENT_HORZ|CBRS_ORIENT_VERT)
+CBRS_ALL = 0xFFFF
+CBRS_NOALIGN = 0x00000000
+CBRS_LEFT = (CBRS_ALIGN_LEFT|CBRS_BORDER_RIGHT)
+CBRS_TOP = (CBRS_ALIGN_TOP|CBRS_BORDER_BOTTOM)
+CBRS_RIGHT = (CBRS_ALIGN_RIGHT|CBRS_BORDER_LEFT)
+CBRS_BOTTOM = (CBRS_ALIGN_BOTTOM|CBRS_BORDER_TOP)
+SBPS_NORMAL = 0x0000
+SBPS_NOBORDERS = 0x0100
+SBPS_POPOUT = 0x0200
+SBPS_OWNERDRAW = 0x1000
+SBPS_DISABLED = 0x04000000
+SBPS_STRETCH = 0x08000000
+ID_INDICATOR_EXT = 0xE700
+ID_INDICATOR_CAPS = 0xE701
+ID_INDICATOR_NUM = 0xE702
+ID_INDICATOR_SCRL = 0xE703
+ID_INDICATOR_OVR = 0xE704
+ID_INDICATOR_REC = 0xE705
+ID_INDICATOR_KANA = 0xE706
+ID_SEPARATOR = 0
+AFX_IDW_CONTROLBAR_FIRST = 0xE800
+AFX_IDW_CONTROLBAR_LAST = 0xE8FF
+AFX_IDW_TOOLBAR = 0xE800
+AFX_IDW_STATUS_BAR = 0xE801
+AFX_IDW_PREVIEW_BAR = 0xE802
+AFX_IDW_RESIZE_BAR = 0xE803
+AFX_IDW_DOCKBAR_TOP = 0xE81B
+AFX_IDW_DOCKBAR_LEFT = 0xE81C
+AFX_IDW_DOCKBAR_RIGHT = 0xE81D
+AFX_IDW_DOCKBAR_BOTTOM = 0xE81E
+AFX_IDW_DOCKBAR_FLOAT = 0xE81F
+def AFX_CONTROLBAR_MASK(nIDC): return (1L << (nIDC - AFX_IDW_CONTROLBAR_FIRST))
+
+AFX_IDW_PANE_FIRST = 0xE900
+AFX_IDW_PANE_LAST = 0xE9ff
+AFX_IDW_HSCROLL_FIRST = 0xEA00
+AFX_IDW_VSCROLL_FIRST = 0xEA10
+AFX_IDW_SIZE_BOX = 0xEA20
+AFX_IDW_PANE_SAVE = 0xEA21
+AFX_IDS_APP_TITLE = 0xE000
+AFX_IDS_IDLEMESSAGE = 0xE001
+AFX_IDS_HELPMODEMESSAGE = 0xE002
+AFX_IDS_APP_TITLE_EMBEDDING = 0xE003
+AFX_IDS_COMPANY_NAME = 0xE004
+AFX_IDS_OBJ_TITLE_INPLACE = 0xE005
+ID_FILE_NEW = 0xE100
+ID_FILE_OPEN = 0xE101
+ID_FILE_CLOSE = 0xE102
+ID_FILE_SAVE = 0xE103
+ID_FILE_SAVE_AS = 0xE104
+ID_FILE_PAGE_SETUP = 0xE105
+ID_FILE_PRINT_SETUP = 0xE106
+ID_FILE_PRINT = 0xE107
+ID_FILE_PRINT_DIRECT = 0xE108
+ID_FILE_PRINT_PREVIEW = 0xE109
+ID_FILE_UPDATE = 0xE10A
+ID_FILE_SAVE_COPY_AS = 0xE10B
+ID_FILE_SEND_MAIL = 0xE10C
+ID_FILE_MRU_FIRST = 0xE110
+ID_FILE_MRU_FILE1 = 0xE110
+ID_FILE_MRU_FILE2 = 0xE111
+ID_FILE_MRU_FILE3 = 0xE112
+ID_FILE_MRU_FILE4 = 0xE113
+ID_FILE_MRU_FILE5 = 0xE114
+ID_FILE_MRU_FILE6 = 0xE115
+ID_FILE_MRU_FILE7 = 0xE116
+ID_FILE_MRU_FILE8 = 0xE117
+ID_FILE_MRU_FILE9 = 0xE118
+ID_FILE_MRU_FILE10 = 0xE119
+ID_FILE_MRU_FILE11 = 0xE11A
+ID_FILE_MRU_FILE12 = 0xE11B
+ID_FILE_MRU_FILE13 = 0xE11C
+ID_FILE_MRU_FILE14 = 0xE11D
+ID_FILE_MRU_FILE15 = 0xE11E
+ID_FILE_MRU_FILE16 = 0xE11F
+ID_FILE_MRU_LAST = 0xE11F
+ID_EDIT_CLEAR = 0xE120
+ID_EDIT_CLEAR_ALL = 0xE121
+ID_EDIT_COPY = 0xE122
+ID_EDIT_CUT = 0xE123
+ID_EDIT_FIND = 0xE124
+ID_EDIT_PASTE = 0xE125
+ID_EDIT_PASTE_LINK = 0xE126
+ID_EDIT_PASTE_SPECIAL = 0xE127
+ID_EDIT_REPEAT = 0xE128
+ID_EDIT_REPLACE = 0xE129
+ID_EDIT_SELECT_ALL = 0xE12A
+ID_EDIT_UNDO = 0xE12B
+ID_EDIT_REDO = 0xE12C
+ID_WINDOW_NEW = 0xE130
+ID_WINDOW_ARRANGE = 0xE131
+ID_WINDOW_CASCADE = 0xE132
+ID_WINDOW_TILE_HORZ = 0xE133
+ID_WINDOW_TILE_VERT = 0xE134
+ID_WINDOW_SPLIT = 0xE135
+AFX_IDM_WINDOW_FIRST = 0xE130
+AFX_IDM_WINDOW_LAST = 0xE13F
+AFX_IDM_FIRST_MDICHILD = 0xFF00
+ID_APP_ABOUT = 0xE140
+ID_APP_EXIT = 0xE141
+ID_HELP_INDEX = 0xE142
+ID_HELP_FINDER = 0xE143
+ID_HELP_USING = 0xE144
+ID_CONTEXT_HELP = 0xE145
+ID_HELP = 0xE146
+ID_DEFAULT_HELP = 0xE147
+ID_NEXT_PANE = 0xE150
+ID_PREV_PANE = 0xE151
+ID_FORMAT_FONT = 0xE160
+ID_OLE_INSERT_NEW = 0xE200
+ID_OLE_EDIT_LINKS = 0xE201
+ID_OLE_EDIT_CONVERT = 0xE202
+ID_OLE_EDIT_CHANGE_ICON = 0xE203
+ID_OLE_EDIT_PROPERTIES = 0xE204
+ID_OLE_VERB_FIRST = 0xE210
+ID_OLE_VERB_LAST = 0xE21F
+AFX_ID_PREVIEW_CLOSE = 0xE300
+AFX_ID_PREVIEW_NUMPAGE = 0xE301
+AFX_ID_PREVIEW_NEXT = 0xE302
+AFX_ID_PREVIEW_PREV = 0xE303
+AFX_ID_PREVIEW_PRINT = 0xE304
+AFX_ID_PREVIEW_ZOOMIN = 0xE305
+AFX_ID_PREVIEW_ZOOMOUT = 0xE306
+ID_VIEW_TOOLBAR = 0xE800
+ID_VIEW_STATUS_BAR = 0xE801
+ID_RECORD_FIRST = 0xE900
+ID_RECORD_LAST = 0xE901
+ID_RECORD_NEXT = 0xE902
+ID_RECORD_PREV = 0xE903
+IDC_STATIC = (-1)
+AFX_IDS_SCFIRST = 0xEF00
+AFX_IDS_SCSIZE = 0xEF00
+AFX_IDS_SCMOVE = 0xEF01
+AFX_IDS_SCMINIMIZE = 0xEF02
+AFX_IDS_SCMAXIMIZE = 0xEF03
+AFX_IDS_SCNEXTWINDOW = 0xEF04
+AFX_IDS_SCPREVWINDOW = 0xEF05
+AFX_IDS_SCCLOSE = 0xEF06
+AFX_IDS_SCRESTORE = 0xEF12
+AFX_IDS_SCTASKLIST = 0xEF13
+AFX_IDS_MDICHILD = 0xEF1F
+AFX_IDS_DESKACCESSORY = 0xEFDA
+AFX_IDS_OPENFILE = 0xF000
+AFX_IDS_SAVEFILE = 0xF001
+AFX_IDS_ALLFILTER = 0xF002
+AFX_IDS_UNTITLED = 0xF003
+AFX_IDS_SAVEFILECOPY = 0xF004
+AFX_IDS_PREVIEW_CLOSE = 0xF005
+AFX_IDS_UNNAMED_FILE = 0xF006
+AFX_IDS_ABOUT = 0xF010
+AFX_IDS_HIDE = 0xF011
+AFX_IDP_NO_ERROR_AVAILABLE = 0xF020
+AFX_IDS_NOT_SUPPORTED_EXCEPTION = 0xF021
+AFX_IDS_RESOURCE_EXCEPTION = 0xF022
+AFX_IDS_MEMORY_EXCEPTION = 0xF023
+AFX_IDS_USER_EXCEPTION = 0xF024
+AFX_IDS_PRINTONPORT = 0xF040
+AFX_IDS_ONEPAGE = 0xF041
+AFX_IDS_TWOPAGE = 0xF042
+AFX_IDS_PRINTPAGENUM = 0xF043
+AFX_IDS_PREVIEWPAGEDESC = 0xF044
+AFX_IDS_PRINTDEFAULTEXT = 0xF045
+AFX_IDS_PRINTDEFAULT = 0xF046
+AFX_IDS_PRINTFILTER = 0xF047
+AFX_IDS_PRINTCAPTION = 0xF048
+AFX_IDS_PRINTTOFILE = 0xF049
+AFX_IDS_OBJECT_MENUITEM = 0xF080
+AFX_IDS_EDIT_VERB = 0xF081
+AFX_IDS_ACTIVATE_VERB = 0xF082
+AFX_IDS_CHANGE_LINK = 0xF083
+AFX_IDS_AUTO = 0xF084
+AFX_IDS_MANUAL = 0xF085
+AFX_IDS_FROZEN = 0xF086
+AFX_IDS_ALL_FILES = 0xF087
+AFX_IDS_SAVE_MENU = 0xF088
+AFX_IDS_UPDATE_MENU = 0xF089
+AFX_IDS_SAVE_AS_MENU = 0xF08A
+AFX_IDS_SAVE_COPY_AS_MENU = 0xF08B
+AFX_IDS_EXIT_MENU = 0xF08C
+AFX_IDS_UPDATING_ITEMS = 0xF08D
+AFX_IDS_METAFILE_FORMAT = 0xF08E
+AFX_IDS_DIB_FORMAT = 0xF08F
+AFX_IDS_BITMAP_FORMAT = 0xF090
+AFX_IDS_LINKSOURCE_FORMAT = 0xF091
+AFX_IDS_EMBED_FORMAT = 0xF092
+AFX_IDS_PASTELINKEDTYPE = 0xF094
+AFX_IDS_UNKNOWNTYPE = 0xF095
+AFX_IDS_RTF_FORMAT = 0xF096
+AFX_IDS_TEXT_FORMAT = 0xF097
+AFX_IDS_INVALID_CURRENCY = 0xF098
+AFX_IDS_INVALID_DATETIME = 0xF099
+AFX_IDS_INVALID_DATETIMESPAN = 0xF09A
+AFX_IDP_INVALID_FILENAME = 0xF100
+AFX_IDP_FAILED_TO_OPEN_DOC = 0xF101
+AFX_IDP_FAILED_TO_SAVE_DOC = 0xF102
+AFX_IDP_ASK_TO_SAVE = 0xF103
+AFX_IDP_FAILED_TO_CREATE_DOC = 0xF104
+AFX_IDP_FILE_TOO_LARGE = 0xF105
+AFX_IDP_FAILED_TO_START_PRINT = 0xF106
+AFX_IDP_FAILED_TO_LAUNCH_HELP = 0xF107
+AFX_IDP_INTERNAL_FAILURE = 0xF108
+AFX_IDP_COMMAND_FAILURE = 0xF109
+AFX_IDP_FAILED_MEMORY_ALLOC = 0xF10A
+AFX_IDP_PARSE_INT = 0xF110
+AFX_IDP_PARSE_REAL = 0xF111
+AFX_IDP_PARSE_INT_RANGE = 0xF112
+AFX_IDP_PARSE_REAL_RANGE = 0xF113
+AFX_IDP_PARSE_STRING_SIZE = 0xF114
+AFX_IDP_PARSE_RADIO_BUTTON = 0xF115
+AFX_IDP_PARSE_BYTE = 0xF116
+AFX_IDP_PARSE_UINT = 0xF117
+AFX_IDP_PARSE_DATETIME = 0xF118
+AFX_IDP_PARSE_CURRENCY = 0xF119
+AFX_IDP_FAILED_INVALID_FORMAT = 0xF120
+AFX_IDP_FAILED_INVALID_PATH = 0xF121
+AFX_IDP_FAILED_DISK_FULL = 0xF122
+AFX_IDP_FAILED_ACCESS_READ = 0xF123
+AFX_IDP_FAILED_ACCESS_WRITE = 0xF124
+AFX_IDP_FAILED_IO_ERROR_READ = 0xF125
+AFX_IDP_FAILED_IO_ERROR_WRITE = 0xF126
+AFX_IDP_STATIC_OBJECT = 0xF180
+AFX_IDP_FAILED_TO_CONNECT = 0xF181
+AFX_IDP_SERVER_BUSY = 0xF182
+AFX_IDP_BAD_VERB = 0xF183
+AFX_IDP_FAILED_TO_NOTIFY = 0xF185
+AFX_IDP_FAILED_TO_LAUNCH = 0xF186
+AFX_IDP_ASK_TO_UPDATE = 0xF187
+AFX_IDP_FAILED_TO_UPDATE = 0xF188
+AFX_IDP_FAILED_TO_REGISTER = 0xF189
+AFX_IDP_FAILED_TO_AUTO_REGISTER = 0xF18A
+AFX_IDP_FAILED_TO_CONVERT = 0xF18B
+AFX_IDP_GET_NOT_SUPPORTED = 0xF18C
+AFX_IDP_SET_NOT_SUPPORTED = 0xF18D
+AFX_IDP_ASK_TO_DISCARD = 0xF18E
+AFX_IDP_FAILED_TO_CREATE = 0xF18F
+AFX_IDP_FAILED_MAPI_LOAD = 0xF190
+AFX_IDP_INVALID_MAPI_DLL = 0xF191
+AFX_IDP_FAILED_MAPI_SEND = 0xF192
+AFX_IDP_FILE_NONE = 0xF1A0
+AFX_IDP_FILE_GENERIC = 0xF1A1
+AFX_IDP_FILE_NOT_FOUND = 0xF1A2
+AFX_IDP_FILE_BAD_PATH = 0xF1A3
+AFX_IDP_FILE_TOO_MANY_OPEN = 0xF1A4
+AFX_IDP_FILE_ACCESS_DENIED = 0xF1A5
+AFX_IDP_FILE_INVALID_FILE = 0xF1A6
+AFX_IDP_FILE_REMOVE_CURRENT = 0xF1A7
+AFX_IDP_FILE_DIR_FULL = 0xF1A8
+AFX_IDP_FILE_BAD_SEEK = 0xF1A9
+AFX_IDP_FILE_HARD_IO = 0xF1AA
+AFX_IDP_FILE_SHARING = 0xF1AB
+AFX_IDP_FILE_LOCKING = 0xF1AC
+AFX_IDP_FILE_DISKFULL = 0xF1AD
+AFX_IDP_FILE_EOF = 0xF1AE
+AFX_IDP_ARCH_NONE = 0xF1B0
+AFX_IDP_ARCH_GENERIC = 0xF1B1
+AFX_IDP_ARCH_READONLY = 0xF1B2
+AFX_IDP_ARCH_ENDOFFILE = 0xF1B3
+AFX_IDP_ARCH_WRITEONLY = 0xF1B4
+AFX_IDP_ARCH_BADINDEX = 0xF1B5
+AFX_IDP_ARCH_BADCLASS = 0xF1B6
+AFX_IDP_ARCH_BADSCHEMA = 0xF1B7
+AFX_IDS_OCC_SCALEUNITS_PIXELS = 0xF1C0
+AFX_IDS_STATUS_FONT = 0xF230
+AFX_IDS_TOOLTIP_FONT = 0xF231
+AFX_IDS_UNICODE_FONT = 0xF232
+AFX_IDS_MINI_FONT = 0xF233
+AFX_IDP_SQL_FIRST = 0xF280
+AFX_IDP_SQL_CONNECT_FAIL = 0xF281
+AFX_IDP_SQL_RECORDSET_FORWARD_ONLY = 0xF282
+AFX_IDP_SQL_EMPTY_COLUMN_LIST = 0xF283
+AFX_IDP_SQL_FIELD_SCHEMA_MISMATCH = 0xF284
+AFX_IDP_SQL_ILLEGAL_MODE = 0xF285
+AFX_IDP_SQL_MULTIPLE_ROWS_AFFECTED = 0xF286
+AFX_IDP_SQL_NO_CURRENT_RECORD = 0xF287
+AFX_IDP_SQL_NO_ROWS_AFFECTED = 0xF288
+AFX_IDP_SQL_RECORDSET_READONLY = 0xF289
+AFX_IDP_SQL_SQL_NO_TOTAL = 0xF28A
+AFX_IDP_SQL_ODBC_LOAD_FAILED = 0xF28B
+AFX_IDP_SQL_DYNASET_NOT_SUPPORTED = 0xF28C
+AFX_IDP_SQL_SNAPSHOT_NOT_SUPPORTED = 0xF28D
+AFX_IDP_SQL_API_CONFORMANCE = 0xF28E
+AFX_IDP_SQL_SQL_CONFORMANCE = 0xF28F
+AFX_IDP_SQL_NO_DATA_FOUND = 0xF290
+AFX_IDP_SQL_ROW_UPDATE_NOT_SUPPORTED = 0xF291
+AFX_IDP_SQL_ODBC_V2_REQUIRED = 0xF292
+AFX_IDP_SQL_NO_POSITIONED_UPDATES = 0xF293
+AFX_IDP_SQL_LOCK_MODE_NOT_SUPPORTED = 0xF294
+AFX_IDP_SQL_DATA_TRUNCATED = 0xF295
+AFX_IDP_SQL_ROW_FETCH = 0xF296
+AFX_IDP_SQL_INCORRECT_ODBC = 0xF297
+AFX_IDP_SQL_UPDATE_DELETE_FAILED = 0xF298
+AFX_IDP_SQL_DYNAMIC_CURSOR_NOT_SUPPORTED = 0xF299
+AFX_IDP_DAO_FIRST = 0xF2A0
+AFX_IDP_DAO_ENGINE_INITIALIZATION = 0xF2A0
+AFX_IDP_DAO_DFX_BIND = 0xF2A1
+AFX_IDP_DAO_OBJECT_NOT_OPEN = 0xF2A2
+AFX_IDP_DAO_ROWTOOSHORT = 0xF2A3
+AFX_IDP_DAO_BADBINDINFO = 0xF2A4
+AFX_IDP_DAO_COLUMNUNAVAILABLE = 0xF2A5
+AFX_IDC_LISTBOX = 100
+AFX_IDC_CHANGE = 101
+AFX_IDC_PRINT_DOCNAME = 201
+AFX_IDC_PRINT_PRINTERNAME = 202
+AFX_IDC_PRINT_PORTNAME = 203
+AFX_IDC_PRINT_PAGENUM = 204
+ID_APPLY_NOW = 0x3021
+ID_WIZBACK = 0x3023
+ID_WIZNEXT = 0x3024
+ID_WIZFINISH = 0x3025
+AFX_IDC_TAB_CONTROL = 0x3020
+AFX_IDD_FILEOPEN = 28676
+AFX_IDD_FILESAVE = 28677
+AFX_IDD_FONT = 28678
+AFX_IDD_COLOR = 28679
+AFX_IDD_PRINT = 28680
+AFX_IDD_PRINTSETUP = 28681
+AFX_IDD_FIND = 28682
+AFX_IDD_REPLACE = 28683
+AFX_IDD_NEWTYPEDLG = 30721
+AFX_IDD_PRINTDLG = 30722
+AFX_IDD_PREVIEW_TOOLBAR = 30723
+AFX_IDD_PREVIEW_SHORTTOOLBAR = 30731
+AFX_IDD_INSERTOBJECT = 30724
+AFX_IDD_CHANGEICON = 30725
+AFX_IDD_CONVERT = 30726
+AFX_IDD_PASTESPECIAL = 30727
+AFX_IDD_EDITLINKS = 30728
+AFX_IDD_FILEBROWSE = 30729
+AFX_IDD_BUSY = 30730
+AFX_IDD_OBJECTPROPERTIES = 30732
+AFX_IDD_CHANGESOURCE = 30733
+AFX_IDC_CONTEXTHELP = 30977
+AFX_IDC_MAGNIFY = 30978
+AFX_IDC_SMALLARROWS = 30979
+AFX_IDC_HSPLITBAR = 30980
+AFX_IDC_VSPLITBAR = 30981
+AFX_IDC_NODROPCRSR = 30982
+AFX_IDC_TRACKNWSE = 30983
+AFX_IDC_TRACKNESW = 30984
+AFX_IDC_TRACKNS = 30985
+AFX_IDC_TRACKWE = 30986
+AFX_IDC_TRACK4WAY = 30987
+AFX_IDC_MOVE4WAY = 30988
+AFX_IDB_MINIFRAME_MENU = 30994
+AFX_IDB_CHECKLISTBOX_NT = 30995
+AFX_IDB_CHECKLISTBOX_95 = 30996
+AFX_IDR_PREVIEW_ACCEL = 30997
+AFX_IDI_STD_MDIFRAME = 31233
+AFX_IDI_STD_FRAME = 31234
+AFX_IDC_FONTPROP = 1000
+AFX_IDC_FONTNAMES = 1001
+AFX_IDC_FONTSTYLES = 1002
+AFX_IDC_FONTSIZES = 1003
+AFX_IDC_STRIKEOUT = 1004
+AFX_IDC_UNDERLINE = 1005
+AFX_IDC_SAMPLEBOX = 1006
+AFX_IDC_COLOR_BLACK = 1100
+AFX_IDC_COLOR_WHITE = 1101
+AFX_IDC_COLOR_RED = 1102
+AFX_IDC_COLOR_GREEN = 1103
+AFX_IDC_COLOR_BLUE = 1104
+AFX_IDC_COLOR_YELLOW = 1105
+AFX_IDC_COLOR_MAGENTA = 1106
+AFX_IDC_COLOR_CYAN = 1107
+AFX_IDC_COLOR_GRAY = 1108
+AFX_IDC_COLOR_LIGHTGRAY = 1109
+AFX_IDC_COLOR_DARKRED = 1110
+AFX_IDC_COLOR_DARKGREEN = 1111
+AFX_IDC_COLOR_DARKBLUE = 1112
+AFX_IDC_COLOR_LIGHTBROWN = 1113
+AFX_IDC_COLOR_DARKMAGENTA = 1114
+AFX_IDC_COLOR_DARKCYAN = 1115
+AFX_IDC_COLORPROP = 1116
+AFX_IDC_SYSTEMCOLORS = 1117
+AFX_IDC_PROPNAME = 1201
+AFX_IDC_PICTURE = 1202
+AFX_IDC_BROWSE = 1203
+AFX_IDC_CLEAR = 1204
+AFX_IDD_PROPPAGE_COLOR = 32257
+AFX_IDD_PROPPAGE_FONT = 32258
+AFX_IDD_PROPPAGE_PICTURE = 32259
+AFX_IDB_TRUETYPE = 32384
+AFX_IDS_PROPPAGE_UNKNOWN = 0xFE01
+AFX_IDS_COLOR_DESKTOP = 0xFE04
+AFX_IDS_COLOR_APPWORKSPACE = 0xFE05
+AFX_IDS_COLOR_WNDBACKGND = 0xFE06
+AFX_IDS_COLOR_WNDTEXT = 0xFE07
+AFX_IDS_COLOR_MENUBAR = 0xFE08
+AFX_IDS_COLOR_MENUTEXT = 0xFE09
+AFX_IDS_COLOR_ACTIVEBAR = 0xFE0A
+AFX_IDS_COLOR_INACTIVEBAR = 0xFE0B
+AFX_IDS_COLOR_ACTIVETEXT = 0xFE0C
+AFX_IDS_COLOR_INACTIVETEXT = 0xFE0D
+AFX_IDS_COLOR_ACTIVEBORDER = 0xFE0E
+AFX_IDS_COLOR_INACTIVEBORDER = 0xFE0F
+AFX_IDS_COLOR_WNDFRAME = 0xFE10
+AFX_IDS_COLOR_SCROLLBARS = 0xFE11
+AFX_IDS_COLOR_BTNFACE = 0xFE12
+AFX_IDS_COLOR_BTNSHADOW = 0xFE13
+AFX_IDS_COLOR_BTNTEXT = 0xFE14
+AFX_IDS_COLOR_BTNHIGHLIGHT = 0xFE15
+AFX_IDS_COLOR_DISABLEDTEXT = 0xFE16
+AFX_IDS_COLOR_HIGHLIGHT = 0xFE17
+AFX_IDS_COLOR_HIGHLIGHTTEXT = 0xFE18
+AFX_IDS_REGULAR = 0xFE19
+AFX_IDS_BOLD = 0xFE1A
+AFX_IDS_ITALIC = 0xFE1B
+AFX_IDS_BOLDITALIC = 0xFE1C
+AFX_IDS_SAMPLETEXT = 0xFE1D
+AFX_IDS_DISPLAYSTRING_FONT = 0xFE1E
+AFX_IDS_DISPLAYSTRING_COLOR = 0xFE1F
+AFX_IDS_DISPLAYSTRING_PICTURE = 0xFE20
+AFX_IDS_PICTUREFILTER = 0xFE21
+AFX_IDS_PICTYPE_UNKNOWN = 0xFE22
+AFX_IDS_PICTYPE_NONE = 0xFE23
+AFX_IDS_PICTYPE_BITMAP = 0xFE24
+AFX_IDS_PICTYPE_METAFILE = 0xFE25
+AFX_IDS_PICTYPE_ICON = 0xFE26
+AFX_IDS_COLOR_PPG = 0xFE28
+AFX_IDS_COLOR_PPG_CAPTION = 0xFE29
+AFX_IDS_FONT_PPG = 0xFE2A
+AFX_IDS_FONT_PPG_CAPTION = 0xFE2B
+AFX_IDS_PICTURE_PPG = 0xFE2C
+AFX_IDS_PICTURE_PPG_CAPTION = 0xFE2D
+AFX_IDS_PICTUREBROWSETITLE = 0xFE30
+AFX_IDS_BORDERSTYLE_0 = 0xFE31
+AFX_IDS_BORDERSTYLE_1 = 0xFE32
+AFX_IDS_VERB_EDIT = 0xFE40
+AFX_IDS_VERB_PROPERTIES = 0xFE41
+AFX_IDP_PICTURECANTOPEN = 0xFE83
+AFX_IDP_PICTURECANTLOAD = 0xFE84
+AFX_IDP_PICTURETOOLARGE = 0xFE85
+AFX_IDP_PICTUREREADFAILED = 0xFE86
+AFX_IDP_E_ILLEGALFUNCTIONCALL = 0xFEA0
+AFX_IDP_E_OVERFLOW = 0xFEA1
+AFX_IDP_E_OUTOFMEMORY = 0xFEA2
+AFX_IDP_E_DIVISIONBYZERO = 0xFEA3
+AFX_IDP_E_OUTOFSTRINGSPACE = 0xFEA4
+AFX_IDP_E_OUTOFSTACKSPACE = 0xFEA5
+AFX_IDP_E_BADFILENAMEORNUMBER = 0xFEA6
+AFX_IDP_E_FILENOTFOUND = 0xFEA7
+AFX_IDP_E_BADFILEMODE = 0xFEA8
+AFX_IDP_E_FILEALREADYOPEN = 0xFEA9
+AFX_IDP_E_DEVICEIOERROR = 0xFEAA
+AFX_IDP_E_FILEALREADYEXISTS = 0xFEAB
+AFX_IDP_E_BADRECORDLENGTH = 0xFEAC
+AFX_IDP_E_DISKFULL = 0xFEAD
+AFX_IDP_E_BADRECORDNUMBER = 0xFEAE
+AFX_IDP_E_BADFILENAME = 0xFEAF
+AFX_IDP_E_TOOMANYFILES = 0xFEB0
+AFX_IDP_E_DEVICEUNAVAILABLE = 0xFEB1
+AFX_IDP_E_PERMISSIONDENIED = 0xFEB2
+AFX_IDP_E_DISKNOTREADY = 0xFEB3
+AFX_IDP_E_PATHFILEACCESSERROR = 0xFEB4
+AFX_IDP_E_PATHNOTFOUND = 0xFEB5
+AFX_IDP_E_INVALIDPATTERNSTRING = 0xFEB6
+AFX_IDP_E_INVALIDUSEOFNULL = 0xFEB7
+AFX_IDP_E_INVALIDFILEFORMAT = 0xFEB8
+AFX_IDP_E_INVALIDPROPERTYVALUE = 0xFEB9
+AFX_IDP_E_INVALIDPROPERTYARRAYINDEX = 0xFEBA
+AFX_IDP_E_SETNOTSUPPORTEDATRUNTIME = 0xFEBB
+AFX_IDP_E_SETNOTSUPPORTED = 0xFEBC
+AFX_IDP_E_NEEDPROPERTYARRAYINDEX = 0xFEBD
+AFX_IDP_E_SETNOTPERMITTED = 0xFEBE
+AFX_IDP_E_GETNOTSUPPORTEDATRUNTIME = 0xFEBF
+AFX_IDP_E_GETNOTSUPPORTED = 0xFEC0
+AFX_IDP_E_PROPERTYNOTFOUND = 0xFEC1
+AFX_IDP_E_INVALIDCLIPBOARDFORMAT = 0xFEC2
+AFX_IDP_E_INVALIDPICTURE = 0xFEC3
+AFX_IDP_E_PRINTERERROR = 0xFEC4
+AFX_IDP_E_CANTSAVEFILETOTEMP = 0xFEC5
+AFX_IDP_E_SEARCHTEXTNOTFOUND = 0xFEC6
+AFX_IDP_E_REPLACEMENTSTOOLONG = 0xFEC7
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/commctrl.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/commctrl.py
new file mode 100644
index 0000000..c876e01
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/commctrl.py
@@ -0,0 +1,1549 @@
+# Generated by h2py from COMMCTRL.H
+WM_USER = 1024
+ICC_LISTVIEW_CLASSES = 1		# listview, header
+ICC_TREEVIEW_CLASSES = 2		# treeview, tooltips
+ICC_BAR_CLASSES = 4			# toolbar, statusbar, trackbar, tooltips
+ICC_TAB_CLASSES = 8			# tab, tooltips
+ICC_UPDOWN_CLASS = 16			# updown
+ICC_PROGRESS_CLASS = 32		# progress
+ICC_HOTKEY_CLASS = 64			# hotkey
+ICC_ANIMATE_CLASS = 128		# animate
+ICC_WIN95_CLASSES = 255
+ICC_DATE_CLASSES = 256			# month picker, date picker, time picker, updown
+ICC_USEREX_CLASSES = 512		# comboex
+ICC_COOL_CLASSES = 1024			# rebar (coolbar) control
+ICC_INTERNET_CLASSES = 2048
+ICC_PAGESCROLLER_CLASS = 4096		# page scroller
+ICC_NATIVEFNTCTL_CLASS = 8192		# native font control
+ODT_HEADER = 100
+ODT_TAB = 101
+ODT_LISTVIEW = 102
+PY_0U = 0
+NM_FIRST = (PY_0U)				# generic to all controls
+NM_LAST = (PY_0U- 99)
+LVN_FIRST = (PY_0U-100)			# listview
+LVN_LAST = (PY_0U-199)
+HDN_FIRST = (PY_0U-300)			# header
+HDN_LAST = (PY_0U-399)
+TVN_FIRST = (PY_0U-400)			# treeview
+TVN_LAST = (PY_0U-499)
+TTN_FIRST = (PY_0U-520)			# tooltips
+TTN_LAST = (PY_0U-549)
+TCN_FIRST = (PY_0U-550)			# tab control
+TCN_LAST = (PY_0U-580)
+CDN_FIRST = (PY_0U-601)			# common dialog (new)
+CDN_LAST = (PY_0U-699)
+TBN_FIRST = (PY_0U-700)			# toolbar
+TBN_LAST = (PY_0U-720)
+UDN_FIRST = (PY_0U-721)			# updown
+UDN_LAST = (PY_0U-740)
+MCN_FIRST = (PY_0U-750)			# monthcal
+MCN_LAST = (PY_0U-759)
+DTN_FIRST = (PY_0U-760)			# datetimepick
+DTN_LAST = (PY_0U-799)
+CBEN_FIRST = (PY_0U-800)		# combo box ex
+CBEN_LAST = (PY_0U-830)
+RBN_FIRST = (PY_0U-831)			# rebar
+RBN_LAST = (PY_0U-859)
+IPN_FIRST = (PY_0U-860)			# internet address
+IPN_LAST = (PY_0U-879)			# internet address
+SBN_FIRST = (PY_0U-880)			# status bar
+SBN_LAST = (PY_0U-899)
+PGN_FIRST = (PY_0U-900)			# Pager Control
+PGN_LAST = (PY_0U-950)
+LVM_FIRST = 4096				# ListView messages
+TV_FIRST = 4352					# TreeView messages
+HDM_FIRST = 4608				# Header messages
+TCM_FIRST = 4864				# Tab control messages
+PGM_FIRST = 5120				# Pager control messages
+CCM_FIRST = 8192				# Common control shared messages
+CCM_SETBKCOLOR = (CCM_FIRST + 1)		# lParam is bkColor
+CCM_SETCOLORSCHEME = (CCM_FIRST + 2)	# lParam is color scheme
+CCM_GETCOLORSCHEME = (CCM_FIRST + 3)	# fills in COLORSCHEME pointed to by lParam
+CCM_GETDROPTARGET = (CCM_FIRST + 4)
+CCM_SETUNICODEFORMAT = (CCM_FIRST + 5)
+CCM_GETUNICODEFORMAT = (CCM_FIRST + 6)
+INFOTIPSIZE = 1024
+NM_OUTOFMEMORY = (NM_FIRST-1)
+NM_CLICK = (NM_FIRST-2)				# uses NMCLICK struct
+NM_DBLCLK = (NM_FIRST-3)
+NM_RETURN = (NM_FIRST-4)
+NM_RCLICK = (NM_FIRST-5)			# uses NMCLICK struct
+NM_RDBLCLK = (NM_FIRST-6)
+NM_SETFOCUS = (NM_FIRST-7)
+NM_KILLFOCUS = (NM_FIRST-8)
+NM_CUSTOMDRAW = (NM_FIRST-12)
+NM_HOVER = (NM_FIRST-13)
+NM_NCHITTEST = (NM_FIRST-14)			# uses NMMOUSE struct
+NM_KEYDOWN = (NM_FIRST-15)			# uses NMKEY struct
+NM_RELEASEDCAPTURE = (NM_FIRST-16)
+NM_SETCURSOR = (NM_FIRST-17)			# uses NMMOUSE struct
+NM_CHAR = (NM_FIRST-18)				# uses NMCHAR struct
+MSGF_COMMCTRL_BEGINDRAG = 16896
+MSGF_COMMCTRL_SIZEHEADER = 16897
+MSGF_COMMCTRL_DRAGSELECT = 16898
+MSGF_COMMCTRL_TOOLBARCUST = 16899
+CDRF_DODEFAULT = 0
+CDRF_NEWFONT = 2
+CDRF_SKIPDEFAULT = 4
+CDRF_NOTIFYPOSTPAINT = 16
+CDRF_NOTIFYITEMDRAW = 32
+CDRF_NOTIFYSUBITEMDRAW = 32  # flags are the same, we can distinguish by context
+CDRF_NOTIFYPOSTERASE = 64
+CDDS_PREPAINT = 1
+CDDS_POSTPAINT = 2
+CDDS_PREERASE = 3
+CDDS_POSTERASE = 4
+CDDS_ITEM = 65536
+CDDS_ITEMPREPAINT = (CDDS_ITEM | CDDS_PREPAINT)
+CDDS_ITEMPOSTPAINT = (CDDS_ITEM | CDDS_POSTPAINT)
+CDDS_ITEMPREERASE = (CDDS_ITEM | CDDS_PREERASE)
+CDDS_ITEMPOSTERASE = (CDDS_ITEM | CDDS_POSTERASE)
+CDDS_SUBITEM = 131072
+CDIS_SELECTED = 1
+CDIS_GRAYED = 2
+CDIS_DISABLED = 4
+CDIS_CHECKED = 8
+CDIS_FOCUS = 16
+CDIS_DEFAULT = 32
+CDIS_HOT = 64
+CDIS_MARKED = 128
+CDIS_INDETERMINATE = 256
+CLR_NONE = 0xFFFFFFFFL
+CLR_DEFAULT = 0xFF000000L
+ILC_MASK = 1
+ILC_COLOR = 0
+ILC_COLORDDB = 254
+ILC_COLOR4 = 4
+ILC_COLOR8 = 8
+ILC_COLOR16 = 16
+ILC_COLOR24 = 24
+ILC_COLOR32 = 32
+ILC_PALETTE = 2048      # (not implemented)
+ILD_NORMAL = 0
+ILD_TRANSPARENT = 1
+ILD_MASK = 16
+ILD_IMAGE = 32
+ILD_ROP = 64
+ILD_BLEND25 = 2
+ILD_BLEND50 = 4
+ILD_OVERLAYMASK = 3840
+ILD_SELECTED = ILD_BLEND50
+ILD_FOCUS = ILD_BLEND25
+ILD_BLEND = ILD_BLEND50
+CLR_HILIGHT = CLR_DEFAULT
+ILCF_MOVE = (0)
+ILCF_SWAP = (1)
+WC_HEADERA = "SysHeader32"
+WC_HEADER = WC_HEADERA
+HDS_HORZ = 0
+HDS_BUTTONS = 2
+HDS_HOTTRACK = 4
+HDS_HIDDEN = 8
+HDS_DRAGDROP = 64
+HDS_FULLDRAG = 128
+HDI_WIDTH = 1
+HDI_HEIGHT = HDI_WIDTH
+HDI_TEXT = 2
+HDI_FORMAT = 4
+HDI_LPARAM = 8
+HDI_BITMAP = 16
+HDI_IMAGE = 32
+HDI_DI_SETITEM = 64
+HDI_ORDER = 128
+HDF_LEFT = 0
+HDF_RIGHT = 1
+HDF_CENTER = 2
+HDF_JUSTIFYMASK = 3
+HDF_RTLREADING = 4
+HDF_OWNERDRAW = 32768
+HDF_STRING = 16384
+HDF_BITMAP = 8192
+HDF_BITMAP_ON_RIGHT = 4096
+HDF_IMAGE = 2048
+HDM_GETITEMCOUNT = (HDM_FIRST + 0)
+HDM_INSERTITEMA = (HDM_FIRST + 1)
+HDM_INSERTITEMW = (HDM_FIRST + 10)
+HDM_INSERTITEM = HDM_INSERTITEMA
+HDM_DELETEITEM = (HDM_FIRST + 2)
+HDM_GETITEMA = (HDM_FIRST + 3)
+HDM_GETITEMW = (HDM_FIRST + 11)
+HDM_GETITEM = HDM_GETITEMA
+HDM_SETITEMA = (HDM_FIRST + 4)
+HDM_SETITEMW = (HDM_FIRST + 12)
+HDM_SETITEM = HDM_SETITEMA
+HDM_LAYOUT = (HDM_FIRST + 5)
+HHT_NOWHERE = 1
+HHT_ONHEADER = 2
+HHT_ONDIVIDER = 4
+HHT_ONDIVOPEN = 8
+HHT_ABOVE = 256
+HHT_BELOW = 512
+HHT_TORIGHT = 1024
+HHT_TOLEFT = 2048
+HDM_HITTEST = (HDM_FIRST + 6)
+HDM_GETITEMRECT = (HDM_FIRST + 7)
+HDM_SETIMAGELIST = (HDM_FIRST + 8)
+HDM_GETIMAGELIST = (HDM_FIRST + 9)
+HDM_ORDERTOINDEX = (HDM_FIRST + 15)
+HDM_CREATEDRAGIMAGE = (HDM_FIRST + 16)  # wparam = which item (by index)
+HDM_GETORDERARRAY = (HDM_FIRST + 17)
+HDM_SETORDERARRAY = (HDM_FIRST + 18)
+HDM_SETHOTDIVIDER = (HDM_FIRST + 19)
+HDM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+HDM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+HDN_ITEMCHANGINGA = (HDN_FIRST-0)
+HDN_ITEMCHANGINGW = (HDN_FIRST-20)
+HDN_ITEMCHANGEDA = (HDN_FIRST-1)
+HDN_ITEMCHANGEDW = (HDN_FIRST-21)
+HDN_ITEMCLICKA = (HDN_FIRST-2)
+HDN_ITEMCLICKW = (HDN_FIRST-22)
+HDN_ITEMDBLCLICKA = (HDN_FIRST-3)
+HDN_ITEMDBLCLICKW = (HDN_FIRST-23)
+HDN_DIVIDERDBLCLICKA = (HDN_FIRST-5)
+HDN_DIVIDERDBLCLICKW = (HDN_FIRST-25)
+HDN_BEGINTRACKA = (HDN_FIRST-6)
+HDN_BEGINTRACKW = (HDN_FIRST-26)
+HDN_ENDTRACKA = (HDN_FIRST-7)
+HDN_ENDTRACKW = (HDN_FIRST-27)
+HDN_TRACKA = (HDN_FIRST-8)
+HDN_TRACKW = (HDN_FIRST-28)
+HDN_GETDISPINFOA = (HDN_FIRST-9)
+HDN_GETDISPINFOW = (HDN_FIRST-29)
+HDN_BEGINDRAG = (HDN_FIRST-10)
+HDN_ENDDRAG = (HDN_FIRST-11)
+HDN_ITEMCHANGING = HDN_ITEMCHANGINGA
+HDN_ITEMCHANGED = HDN_ITEMCHANGEDA
+HDN_ITEMCLICK = HDN_ITEMCLICKA
+HDN_ITEMDBLCLICK = HDN_ITEMDBLCLICKA
+HDN_DIVIDERDBLCLICK = HDN_DIVIDERDBLCLICKA
+HDN_BEGINTRACK = HDN_BEGINTRACKA
+HDN_ENDTRACK = HDN_ENDTRACKA
+HDN_TRACK = HDN_TRACKA
+HDN_GETDISPINFO = HDN_GETDISPINFOA
+TOOLBARCLASSNAMEA = "ToolbarWindow32"
+TOOLBARCLASSNAME = TOOLBARCLASSNAMEA
+CMB_MASKED = 2
+TBSTATE_CHECKED = 1
+TBSTATE_PRESSED = 2
+TBSTATE_ENABLED = 4
+TBSTATE_HIDDEN = 8
+TBSTATE_INDETERMINATE = 16
+TBSTATE_WRAP = 32
+TBSTATE_ELLIPSES = 64
+TBSTATE_MARKED = 128
+TBSTYLE_BUTTON = 0
+TBSTYLE_SEP = 1
+TBSTYLE_CHECK = 2
+TBSTYLE_GROUP = 4
+TBSTYLE_CHECKGROUP = (TBSTYLE_GROUP | TBSTYLE_CHECK)
+TBSTYLE_DROPDOWN = 8
+TBSTYLE_AUTOSIZE = 16 # automatically calculate the cx of the button
+TBSTYLE_NOPREFIX = 32 # if this button should not have accel prefix
+TBSTYLE_TOOLTIPS = 256
+TBSTYLE_WRAPABLE = 512
+TBSTYLE_ALTDRAG = 1024
+TBSTYLE_FLAT = 2048
+TBSTYLE_LIST = 4096
+TBSTYLE_CUSTOMERASE = 8192
+TBSTYLE_REGISTERDROP = 16384
+TBSTYLE_TRANSPARENT = 32768
+TBSTYLE_EX_DRAWDDARROWS = 1
+BTNS_BUTTON = TBSTYLE_BUTTON
+BTNS_SEP = TBSTYLE_SEP         # 0x0001
+BTNS_CHECK = TBSTYLE_CHECK       # 0x0002
+BTNS_GROUP = TBSTYLE_GROUP       # 0x0004
+BTNS_CHECKGROUP = TBSTYLE_CHECKGROUP # (TBSTYLE_GROUP | TBSTYLE_CHECK)
+BTNS_DROPDOWN = TBSTYLE_DROPDOWN    # 0x0008
+BTNS_AUTOSIZE = TBSTYLE_AUTOSIZE    # 0x0010; automatically calculate the cx of the button
+BTNS_NOPREFIX = TBSTYLE_NOPREFIX    # 0x0020; this button should not have accel prefix
+BTNS_SHOWTEXT   = 64 # 0x0040              // ignored unless TBSTYLE_EX_MIXEDBUTTONS is set
+BTNS_WHOLEDROPDOWN  = 128 # 0x0080          // draw drop-down arrow, but without split arrow section
+TBCDRF_NOEDGES = 65536  # Don't draw button edges
+TBCDRF_HILITEHOTTRACK = 131072  # Use color of the button bk when hottracked
+TBCDRF_NOOFFSET = 262144  # Don't offset button if pressed
+TBCDRF_NOMARK = 524288  # Don't draw default highlight of image/text for TBSTATE_MARKED
+TBCDRF_NOETCHEDEFFECT = 1048576  # Don't draw etched effect for disabled items
+TB_ENABLEBUTTON = (WM_USER + 1)
+TB_CHECKBUTTON = (WM_USER + 2)
+TB_PRESSBUTTON = (WM_USER + 3)
+TB_HIDEBUTTON = (WM_USER + 4)
+TB_INDETERMINATE = (WM_USER + 5)
+TB_MARKBUTTON = (WM_USER + 6)
+TB_ISBUTTONENABLED = (WM_USER + 9)
+TB_ISBUTTONCHECKED = (WM_USER + 10)
+TB_ISBUTTONPRESSED = (WM_USER + 11)
+TB_ISBUTTONHIDDEN = (WM_USER + 12)
+TB_ISBUTTONINDETERMINATE = (WM_USER + 13)
+TB_ISBUTTONHIGHLIGHTED = (WM_USER + 14)
+TB_SETSTATE = (WM_USER + 17)
+TB_GETSTATE = (WM_USER + 18)
+TB_ADDBITMAP = (WM_USER + 19)
+HINST_COMMCTRL = -1
+IDB_STD_SMALL_COLOR = 0
+IDB_STD_LARGE_COLOR = 1
+IDB_VIEW_SMALL_COLOR = 4
+IDB_VIEW_LARGE_COLOR = 5
+IDB_HIST_SMALL_COLOR = 8
+IDB_HIST_LARGE_COLOR = 9
+STD_CUT = 0
+STD_COPY = 1
+STD_PASTE = 2
+STD_UNDO = 3
+STD_REDOW = 4
+STD_DELETE = 5
+STD_FILENEW = 6
+STD_FILEOPEN = 7
+STD_FILESAVE = 8
+STD_PRINTPRE = 9
+STD_PROPERTIES = 10
+STD_HELP = 11
+STD_FIND = 12
+STD_REPLACE = 13
+STD_PRINT = 14
+VIEW_LARGEICONS = 0
+VIEW_SMALLICONS = 1
+VIEW_LIST = 2
+VIEW_DETAILS = 3
+VIEW_SORTNAME = 4
+VIEW_SORTSIZE = 5
+VIEW_SORTDATE = 6
+VIEW_SORTTYPE = 7
+VIEW_PARENTFOLDER = 8
+VIEW_NETCONNECT = 9
+VIEW_NETDISCONNECT = 10
+VIEW_NEWFOLDER = 11
+VIEW_VIEWMENU = 12
+HIST_BACK = 0
+HIST_FORWARD = 1
+HIST_FAVORITES = 2
+HIST_ADDTOFAVORITES = 3
+HIST_VIEWTREE = 4
+TB_ADDBUTTONSA = (WM_USER + 20)
+TB_INSERTBUTTONA = (WM_USER + 21)
+TB_ADDBUTTONS = (WM_USER + 20)
+TB_INSERTBUTTON = (WM_USER + 21)
+TB_DELETEBUTTON = (WM_USER + 22)
+TB_GETBUTTON = (WM_USER + 23)
+TB_BUTTONCOUNT = (WM_USER + 24)
+TB_COMMANDTOINDEX = (WM_USER + 25)
+TB_SAVERESTOREA = (WM_USER + 26)
+TB_SAVERESTOREW = (WM_USER + 76)
+TB_CUSTOMIZE = (WM_USER + 27)
+TB_ADDSTRINGA = (WM_USER + 28)
+TB_ADDSTRINGW = (WM_USER + 77)
+TB_GETITEMRECT = (WM_USER + 29)
+TB_BUTTONSTRUCTSIZE = (WM_USER + 30)
+TB_SETBUTTONSIZE = (WM_USER + 31)
+TB_SETBITMAPSIZE = (WM_USER + 32)
+TB_AUTOSIZE = (WM_USER + 33)
+TB_GETTOOLTIPS = (WM_USER + 35)
+TB_SETTOOLTIPS = (WM_USER + 36)
+TB_SETPARENT = (WM_USER + 37)
+TB_SETROWS = (WM_USER + 39)
+TB_GETROWS = (WM_USER + 40)
+TB_SETCMDID = (WM_USER + 42)
+TB_CHANGEBITMAP = (WM_USER + 43)
+TB_GETBITMAP = (WM_USER + 44)
+TB_GETBUTTONTEXTA = (WM_USER + 45)
+TB_GETBUTTONTEXTW = (WM_USER + 75)
+TB_REPLACEBITMAP = (WM_USER + 46)
+TB_SETINDENT = (WM_USER + 47)
+TB_SETIMAGELIST = (WM_USER + 48)
+TB_GETIMAGELIST = (WM_USER + 49)
+TB_LOADIMAGES = (WM_USER + 50)
+TB_GETRECT = (WM_USER + 51) # wParam is the Cmd instead of index
+TB_SETHOTIMAGELIST = (WM_USER + 52)
+TB_GETHOTIMAGELIST = (WM_USER + 53)
+TB_SETDISABLEDIMAGELIST = (WM_USER + 54)
+TB_GETDISABLEDIMAGELIST = (WM_USER + 55)
+TB_SETSTYLE = (WM_USER + 56)
+TB_GETSTYLE = (WM_USER + 57)
+TB_GETBUTTONSIZE = (WM_USER + 58)
+TB_SETBUTTONWIDTH = (WM_USER + 59)
+TB_SETMAXTEXTROWS = (WM_USER + 60)
+TB_GETTEXTROWS = (WM_USER + 61)
+TB_GETBUTTONTEXT = TB_GETBUTTONTEXTW
+TB_SAVERESTORE = TB_SAVERESTOREW
+TB_ADDSTRING = TB_ADDSTRINGW
+TB_GETBUTTONTEXT = TB_GETBUTTONTEXTA
+TB_SAVERESTORE = TB_SAVERESTOREA
+TB_ADDSTRING = TB_ADDSTRINGA
+TB_GETOBJECT = (WM_USER + 62)  # wParam == IID, lParam void **ppv
+TB_GETHOTITEM = (WM_USER + 71)
+TB_SETHOTITEM = (WM_USER + 72)  # wParam == iHotItem
+TB_SETANCHORHIGHLIGHT = (WM_USER + 73)  # wParam == TRUE/FALSE
+TB_GETANCHORHIGHLIGHT = (WM_USER + 74)
+TB_MAPACCELERATORA = (WM_USER + 78)  # wParam == ch, lParam int * pidBtn
+TBIMHT_AFTER = 1 # TRUE = insert After iButton, otherwise before
+TBIMHT_BACKGROUND = 2 # TRUE iff missed buttons completely
+TB_GETINSERTMARK = (WM_USER + 79)  # lParam == LPTBINSERTMARK
+TB_SETINSERTMARK = (WM_USER + 80)  # lParam == LPTBINSERTMARK
+TB_INSERTMARKHITTEST = (WM_USER + 81)  # wParam == LPPOINT lParam == LPTBINSERTMARK
+TB_MOVEBUTTON = (WM_USER + 82)
+TB_GETMAXSIZE = (WM_USER + 83)  # lParam == LPSIZE
+TB_SETEXTENDEDSTYLE = (WM_USER + 84)  # For TBSTYLE_EX_*
+TB_GETEXTENDEDSTYLE = (WM_USER + 85)  # For TBSTYLE_EX_*
+TB_GETPADDING = (WM_USER + 86)
+TB_SETPADDING = (WM_USER + 87)
+TB_SETINSERTMARKCOLOR = (WM_USER + 88)
+TB_GETINSERTMARKCOLOR = (WM_USER + 89)
+TB_SETCOLORSCHEME = CCM_SETCOLORSCHEME  # lParam is color scheme
+TB_GETCOLORSCHEME = CCM_GETCOLORSCHEME	# fills in COLORSCHEME pointed to by lParam
+TB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+TB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+TB_MAPACCELERATORW = (WM_USER + 90)  # wParam == ch, lParam int * pidBtn
+TB_MAPACCELERATOR = TB_MAPACCELERATORW
+TB_MAPACCELERATOR = TB_MAPACCELERATORA
+TBBF_LARGE = 1
+TB_GETBITMAPFLAGS = (WM_USER + 41)
+TBIF_IMAGE = 1
+TBIF_TEXT = 2
+TBIF_STATE = 4
+TBIF_STYLE = 8
+TBIF_LPARAM = 16
+TBIF_COMMAND = 32
+TBIF_SIZE = 64
+TB_GETBUTTONINFOW = (WM_USER + 63)
+TB_SETBUTTONINFOW = (WM_USER + 64)
+TB_GETBUTTONINFOA = (WM_USER + 65)
+TB_SETBUTTONINFOA = (WM_USER + 66)
+TB_INSERTBUTTONW = (WM_USER + 67)
+TB_ADDBUTTONSW = (WM_USER + 68)
+TB_HITTEST = (WM_USER + 69)
+TB_SETDRAWTEXTFLAGS = (WM_USER + 70)  # wParam == mask lParam == bit values
+TBN_GETBUTTONINFOA = (TBN_FIRST-0)
+TBN_GETBUTTONINFOW = (TBN_FIRST-20)
+TBN_BEGINDRAG = (TBN_FIRST-1)
+TBN_ENDDRAG = (TBN_FIRST-2)
+TBN_BEGINADJUST = (TBN_FIRST-3)
+TBN_ENDADJUST = (TBN_FIRST-4)
+TBN_RESET = (TBN_FIRST-5)
+TBN_QUERYINSERT = (TBN_FIRST-6)
+TBN_QUERYDELETE = (TBN_FIRST-7)
+TBN_TOOLBARCHANGE = (TBN_FIRST-8)
+TBN_CUSTHELP = (TBN_FIRST-9)
+TBN_DROPDOWN = (TBN_FIRST - 10)
+TBN_GETOBJECT = (TBN_FIRST - 12)
+HICF_OTHER = 0
+HICF_MOUSE = 1          # Triggered by mouse
+HICF_ARROWKEYS = 2          # Triggered by arrow keys
+HICF_ACCELERATOR = 4          # Triggered by accelerator
+HICF_DUPACCEL = 8          # This accelerator is not unique
+HICF_ENTERING = 16          # idOld is invalid
+HICF_LEAVING = 32          # idNew is invalid
+HICF_RESELECT = 64          # hot item reselected
+TBN_HOTITEMCHANGE = (TBN_FIRST - 13)
+TBN_DRAGOUT = (TBN_FIRST - 14) # this is sent when the user clicks down on a button then drags off the button
+TBN_DELETINGBUTTON = (TBN_FIRST - 15) # uses TBNOTIFY
+TBN_GETDISPINFOA = (TBN_FIRST - 16) # This is sent when the  toolbar needs  some display information
+TBN_GETDISPINFOW = (TBN_FIRST - 17) # This is sent when the  toolbar needs  some display information
+TBN_GETINFOTIPA = (TBN_FIRST - 18)
+TBN_GETINFOTIPW = (TBN_FIRST - 19)
+TBN_GETINFOTIP = TBN_GETINFOTIPA
+TBNF_IMAGE = 1
+TBNF_TEXT = 2
+TBNF_DI_SETITEM = 268435456
+TBN_GETDISPINFO = TBN_GETDISPINFOA
+TBDDRET_DEFAULT = 0
+TBDDRET_NODEFAULT = 1
+TBDDRET_TREATPRESSED = 2       # Treat as a standard press button
+TBN_GETBUTTONINFO = TBN_GETBUTTONINFOA
+REBARCLASSNAMEA = "ReBarWindow32"
+REBARCLASSNAME = REBARCLASSNAMEA
+RBIM_IMAGELIST = 1
+RBS_TOOLTIPS = 256
+RBS_VARHEIGHT = 512
+RBS_BANDBORDERS = 1024
+RBS_FIXEDORDER = 2048
+RBS_REGISTERDROP = 4096
+RBS_AUTOSIZE = 8192
+RBS_VERTICALGRIPPER = 16384  # this always has the vertical gripper (default for horizontal mode)
+RBS_DBLCLKTOGGLE = 32768
+RBS_TOOLTIPS = 256
+RBS_VARHEIGHT = 512
+RBS_BANDBORDERS = 1024
+RBS_FIXEDORDER = 2048
+RBBS_BREAK = 1  # break to new line
+RBBS_FIXEDSIZE = 2  # band can't be sized
+RBBS_CHILDEDGE = 4  # edge around top & bottom of child window
+RBBS_HIDDEN = 8  # don't show
+RBBS_NOVERT = 16  # don't show when vertical
+RBBS_FIXEDBMP = 32  # bitmap doesn't move during band resize
+RBBS_VARIABLEHEIGHT = 64  # allow autosizing of this child vertically
+RBBS_GRIPPERALWAYS = 128  # always show the gripper
+RBBS_NOGRIPPER = 256  # never show the gripper
+RBBIM_STYLE = 1
+RBBIM_COLORS = 2
+RBBIM_TEXT = 4
+RBBIM_IMAGE = 8
+RBBIM_CHILD = 16
+RBBIM_CHILDSIZE = 32
+RBBIM_SIZE = 64
+RBBIM_BACKGROUND = 128
+RBBIM_ID = 256
+RBBIM_IDEALSIZE = 512
+RBBIM_LPARAM = 1024
+RB_INSERTBANDA = (WM_USER +  1)
+RB_DELETEBAND = (WM_USER +  2)
+RB_GETBARINFO = (WM_USER +  3)
+RB_SETBARINFO = (WM_USER +  4)
+RB_GETBANDINFO = (WM_USER +  5)
+RB_SETBANDINFOA = (WM_USER +  6)
+RB_SETPARENT = (WM_USER +  7)
+RB_HITTEST = (WM_USER +  8)
+RB_GETRECT = (WM_USER +  9)
+RB_INSERTBANDW = (WM_USER +  10)
+RB_SETBANDINFOW = (WM_USER +  11)
+RB_GETBANDCOUNT = (WM_USER +  12)
+RB_GETROWCOUNT = (WM_USER +  13)
+RB_GETROWHEIGHT = (WM_USER +  14)
+RB_IDTOINDEX = (WM_USER +  16) # wParam == id
+RB_GETTOOLTIPS = (WM_USER +  17)
+RB_SETTOOLTIPS = (WM_USER +  18)
+RB_SETBKCOLOR = (WM_USER +  19) # sets the default BK color
+RB_GETBKCOLOR = (WM_USER +  20) # defaults to CLR_NONE
+RB_SETTEXTCOLOR = (WM_USER +  21)
+RB_GETTEXTCOLOR = (WM_USER +  22) # defaults to 0x00000000
+RB_SIZETORECT = (WM_USER +  23) # resize the rebar/break bands and such to this rect (lparam)
+RB_SETCOLORSCHEME = CCM_SETCOLORSCHEME  # lParam is color scheme
+RB_GETCOLORSCHEME = CCM_GETCOLORSCHEME  # fills in COLORSCHEME pointed to by lParam
+RB_INSERTBAND = RB_INSERTBANDA
+RB_SETBANDINFO = RB_SETBANDINFOA
+RB_BEGINDRAG = (WM_USER + 24)
+RB_ENDDRAG = (WM_USER + 25)
+RB_DRAGMOVE = (WM_USER + 26)
+RB_GETBARHEIGHT = (WM_USER + 27)
+RB_GETBANDINFOW = (WM_USER + 28)
+RB_GETBANDINFOA = (WM_USER + 29)
+RB_GETBANDINFO = RB_GETBANDINFOA
+RB_MINIMIZEBAND = (WM_USER + 30)
+RB_MAXIMIZEBAND = (WM_USER + 31)
+RB_GETDROPTARGET = (CCM_GETDROPTARGET)
+RB_GETBANDBORDERS = (WM_USER + 34)  # returns in lparam = lprc the amount of edges added to band wparam
+RB_SHOWBAND = (WM_USER + 35)      # show/hide band
+RB_SETPALETTE = (WM_USER + 37)
+RB_GETPALETTE = (WM_USER + 38)
+RB_MOVEBAND = (WM_USER + 39)
+RB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+RB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+RBN_HEIGHTCHANGE = (RBN_FIRST - 0)
+RBN_GETOBJECT = (RBN_FIRST - 1)
+RBN_LAYOUTCHANGED = (RBN_FIRST - 2)
+RBN_AUTOSIZE = (RBN_FIRST - 3)
+RBN_BEGINDRAG = (RBN_FIRST - 4)
+RBN_ENDDRAG = (RBN_FIRST - 5)
+RBN_DELETINGBAND = (RBN_FIRST - 6)     # Uses NMREBAR
+RBN_DELETEDBAND = (RBN_FIRST - 7)     # Uses NMREBAR
+RBN_CHILDSIZE = (RBN_FIRST - 8)
+RBNM_ID = 1
+RBNM_STYLE = 2
+RBNM_LPARAM = 4
+RBHT_NOWHERE = 1
+RBHT_CAPTION = 2
+RBHT_CLIENT = 3
+RBHT_GRABBER = 4
+TOOLTIPS_CLASSA = "tooltips_class32"
+TOOLTIPS_CLASS = TOOLTIPS_CLASSA
+TTS_ALWAYSTIP = 1
+TTS_NOPREFIX = 2
+TTF_IDISHWND = 1
+TTF_CENTERTIP = 2
+TTF_RTLREADING = 4
+TTF_SUBCLASS = 16
+TTF_TRACK = 32
+TTF_ABSOLUTE = 128
+TTF_TRANSPARENT = 256
+TTF_DI_SETITEM = 32768       # valid only on the TTN_NEEDTEXT callback
+TTDT_AUTOMATIC = 0
+TTDT_RESHOW = 1
+TTDT_AUTOPOP = 2
+TTDT_INITIAL = 3
+TTM_ACTIVATE = (WM_USER + 1)
+TTM_SETDELAYTIME = (WM_USER + 3)
+TTM_ADDTOOLA = (WM_USER + 4)
+TTM_ADDTOOLW = (WM_USER + 50)
+TTM_DELTOOLA = (WM_USER + 5)
+TTM_DELTOOLW = (WM_USER + 51)
+TTM_NEWTOOLRECTA = (WM_USER + 6)
+TTM_NEWTOOLRECTW = (WM_USER + 52)
+TTM_RELAYEVENT = (WM_USER + 7)
+TTM_GETTOOLINFOA = (WM_USER + 8)
+TTM_GETTOOLINFOW = (WM_USER + 53)
+TTM_SETTOOLINFOA = (WM_USER + 9)
+TTM_SETTOOLINFOW = (WM_USER + 54)
+TTM_HITTESTA = (WM_USER +10)
+TTM_HITTESTW = (WM_USER +55)
+TTM_GETTEXTA = (WM_USER +11)
+TTM_GETTEXTW = (WM_USER +56)
+TTM_UPDATETIPTEXTA = (WM_USER +12)
+TTM_UPDATETIPTEXTW = (WM_USER +57)
+TTM_GETTOOLCOUNT = (WM_USER +13)
+TTM_ENUMTOOLSA = (WM_USER +14)
+TTM_ENUMTOOLSW = (WM_USER +58)
+TTM_GETCURRENTTOOLA = (WM_USER + 15)
+TTM_GETCURRENTTOOLW = (WM_USER + 59)
+TTM_WINDOWFROMPOINT = (WM_USER + 16)
+TTM_TRACKACTIVATE = (WM_USER + 17)  # wParam = TRUE/FALSE start end  lparam = LPTOOLINFO
+TTM_TRACKPOSITION = (WM_USER + 18)  # lParam = dwPos
+TTM_SETTIPBKCOLOR = (WM_USER + 19)
+TTM_SETTIPTEXTCOLOR = (WM_USER + 20)
+TTM_GETDELAYTIME = (WM_USER + 21)
+TTM_GETTIPBKCOLOR = (WM_USER + 22)
+TTM_GETTIPTEXTCOLOR = (WM_USER + 23)
+TTM_SETMAXTIPWIDTH = (WM_USER + 24)
+TTM_GETMAXTIPWIDTH = (WM_USER + 25)
+TTM_SETMARGIN = (WM_USER + 26)  # lParam = lprc
+TTM_GETMARGIN = (WM_USER + 27)  # lParam = lprc
+TTM_POP = (WM_USER + 28)
+TTM_UPDATE = (WM_USER + 29)
+TTM_ADDTOOL = TTM_ADDTOOLA
+TTM_DELTOOL = TTM_DELTOOLA
+TTM_NEWTOOLRECT = TTM_NEWTOOLRECTA
+TTM_GETTOOLINFO = TTM_GETTOOLINFOA
+TTM_SETTOOLINFO = TTM_SETTOOLINFOA
+TTM_HITTEST = TTM_HITTESTA
+TTM_GETTEXT = TTM_GETTEXTA
+TTM_UPDATETIPTEXT = TTM_UPDATETIPTEXTA
+TTM_ENUMTOOLS = TTM_ENUMTOOLSA
+TTM_GETCURRENTTOOL = TTM_GETCURRENTTOOLA
+TTN_GETDISPINFOA = (TTN_FIRST - 0)
+TTN_GETDISPINFOW = (TTN_FIRST - 10)
+TTN_SHOW = (TTN_FIRST - 1)
+TTN_POP = (TTN_FIRST - 2)
+TTN_GETDISPINFO = TTN_GETDISPINFOA
+TTN_NEEDTEXT = TTN_GETDISPINFO
+TTN_NEEDTEXTA = TTN_GETDISPINFOA
+TTN_NEEDTEXTW = TTN_GETDISPINFOW
+SBARS_SIZEGRIP = 256
+SBARS_TOOLTIPS = 2048
+STATUSCLASSNAMEA = "msctls_statusbar32"
+STATUSCLASSNAME = STATUSCLASSNAMEA
+SB_SETTEXTA = (WM_USER+1)
+SB_SETTEXTW = (WM_USER+11)
+SB_GETTEXTA = (WM_USER+2)
+SB_GETTEXTW = (WM_USER+13)
+SB_GETTEXTLENGTHA = (WM_USER+3)
+SB_GETTEXTLENGTHW = (WM_USER+12)
+SB_GETTEXT = SB_GETTEXTA
+SB_SETTEXT = SB_SETTEXTA
+SB_GETTEXTLENGTH = SB_GETTEXTLENGTHA
+SB_SETPARTS = (WM_USER+4)
+SB_GETPARTS = (WM_USER+6)
+SB_GETBORDERS = (WM_USER+7)
+SB_SETMINHEIGHT = (WM_USER+8)
+SB_SIMPLE = (WM_USER+9)
+SB_GETRECT = (WM_USER+10)
+SB_ISSIMPLE = (WM_USER+14)
+SB_SETICON = (WM_USER+15)
+SB_SETTIPTEXTA = (WM_USER+16)
+SB_SETTIPTEXTW = (WM_USER+17)
+SB_GETTIPTEXTA = (WM_USER+18)
+SB_GETTIPTEXTW = (WM_USER+19)
+SB_GETICON = (WM_USER+20)
+SB_SETTIPTEXT = SB_SETTIPTEXTA
+SB_GETTIPTEXT = SB_GETTIPTEXTA
+SB_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+SB_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+SBT_OWNERDRAW = 4096
+SBT_NOBORDERS = 256
+SBT_POPOUT = 512
+SBT_RTLREADING = 1024
+SBT_NOTABPARSING = 2048
+SBT_TOOLTIPS = 2048
+SB_SETBKCOLOR = CCM_SETBKCOLOR      # lParam = bkColor
+SBN_SIMPLEMODECHANGE = (SBN_FIRST - 0)
+TRACKBAR_CLASSA = "msctls_trackbar32"
+TRACKBAR_CLASS = TRACKBAR_CLASSA
+TBS_AUTOTICKS = 1
+TBS_VERT = 2
+TBS_HORZ = 0
+TBS_TOP = 4
+TBS_BOTTOM = 0
+TBS_LEFT = 4
+TBS_RIGHT = 0
+TBS_BOTH = 8
+TBS_NOTICKS = 16
+TBS_ENABLESELRANGE = 32
+TBS_FIXEDLENGTH = 64
+TBS_NOTHUMB = 128
+TBS_TOOLTIPS = 256
+TBM_GETPOS = (WM_USER)
+TBM_GETRANGEMIN = (WM_USER+1)
+TBM_GETRANGEMAX = (WM_USER+2)
+TBM_GETTIC = (WM_USER+3)
+TBM_SETTIC = (WM_USER+4)
+TBM_SETPOS = (WM_USER+5)
+TBM_SETRANGE = (WM_USER+6)
+TBM_SETRANGEMIN = (WM_USER+7)
+TBM_SETRANGEMAX = (WM_USER+8)
+TBM_CLEARTICS = (WM_USER+9)
+TBM_SETSEL = (WM_USER+10)
+TBM_SETSELSTART = (WM_USER+11)
+TBM_SETSELEND = (WM_USER+12)
+TBM_GETPTICS = (WM_USER+14)
+TBM_GETTICPOS = (WM_USER+15)
+TBM_GETNUMTICS = (WM_USER+16)
+TBM_GETSELSTART = (WM_USER+17)
+TBM_GETSELEND = (WM_USER+18)
+TBM_CLEARSEL = (WM_USER+19)
+TBM_SETTICFREQ = (WM_USER+20)
+TBM_SETPAGESIZE = (WM_USER+21)
+TBM_GETPAGESIZE = (WM_USER+22)
+TBM_SETLINESIZE = (WM_USER+23)
+TBM_GETLINESIZE = (WM_USER+24)
+TBM_GETTHUMBRECT = (WM_USER+25)
+TBM_GETCHANNELRECT = (WM_USER+26)
+TBM_SETTHUMBLENGTH = (WM_USER+27)
+TBM_GETTHUMBLENGTH = (WM_USER+28)
+TBM_SETTOOLTIPS = (WM_USER+29)
+TBM_GETTOOLTIPS = (WM_USER+30)
+TBM_SETTIPSIDE = (WM_USER+31)
+TBTS_TOP = 0
+TBTS_LEFT = 1
+TBTS_BOTTOM = 2
+TBTS_RIGHT = 3
+TBM_SETBUDDY = (WM_USER+32) # wparam = BOOL fLeft; (or right)
+TBM_GETBUDDY = (WM_USER+33) # wparam = BOOL fLeft; (or right)
+TBM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+TBM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+TB_LINEUP = 0
+TB_LINEDOWN = 1
+TB_PAGEUP = 2
+TB_PAGEDOWN = 3
+TB_THUMBPOSITION = 4
+TB_THUMBTRACK = 5
+TB_TOP = 6
+TB_BOTTOM = 7
+TB_ENDTRACK = 8
+TBCD_TICS = 1
+TBCD_THUMB = 2
+TBCD_CHANNEL = 3
+DL_BEGINDRAG = (WM_USER+133)
+DL_DRAGGING = (WM_USER+134)
+DL_DROPPED = (WM_USER+135)
+DL_CANCELDRAG = (WM_USER+136)
+DL_CURSORSET = 0
+DL_STOPCURSOR = 1
+DL_COPYCURSOR = 2
+DL_MOVECURSOR = 3
+DRAGLISTMSGSTRING = "commctrl_DragListMsg"
+UPDOWN_CLASSA = "msctls_updown32"
+UPDOWN_CLASS = UPDOWN_CLASSA
+UD_MAXVAL = 32767
+UD_MINVAL = (-UD_MAXVAL)
+UDS_WRAP = 1
+UDS_SETBUDDYINT = 2
+UDS_ALIGNRIGHT = 4
+UDS_ALIGNLEFT = 8
+UDS_AUTOBUDDY = 16
+UDS_ARROWKEYS = 32
+UDS_HORZ = 64
+UDS_NOTHOUSANDS = 128
+UDS_HOTTRACK = 256
+UDM_SETRANGE = (WM_USER+101)
+UDM_GETRANGE = (WM_USER+102)
+UDM_SETPOS = (WM_USER+103)
+UDM_GETPOS = (WM_USER+104)
+UDM_SETBUDDY = (WM_USER+105)
+UDM_GETBUDDY = (WM_USER+106)
+UDM_SETACCEL = (WM_USER+107)
+UDM_GETACCEL = (WM_USER+108)
+UDM_SETBASE = (WM_USER+109)
+UDM_GETBASE = (WM_USER+110)
+UDM_SETRANGE32 = (WM_USER+111)
+UDM_GETRANGE32 = (WM_USER+112) # wParam & lParam are LPINT
+UDM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+UDM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+UDN_DELTAPOS = (UDN_FIRST - 1)
+PROGRESS_CLASSA = "msctls_progress32"
+PROGRESS_CLASS = PROGRESS_CLASSA
+PBS_SMOOTH = 1
+PBS_VERTICAL = 4
+PBM_SETRANGE = (WM_USER+1)
+PBM_SETPOS = (WM_USER+2)
+PBM_DELTAPOS = (WM_USER+3)
+PBM_SETSTEP = (WM_USER+4)
+PBM_STEPIT = (WM_USER+5)
+PBM_SETRANGE32 = (WM_USER+6)  # lParam = high, wParam = low
+PBM_GETRANGE = (WM_USER+7)  # wParam = return (TRUE ? low : high). lParam = PPBRANGE or NULL
+PBM_GETPOS = (WM_USER+8)
+PBM_SETBARCOLOR = (WM_USER+9)		# lParam = bar color
+PBM_SETBKCOLOR = CCM_SETBKCOLOR  # lParam = bkColor
+HOTKEYF_SHIFT = 1
+HOTKEYF_CONTROL = 2
+HOTKEYF_ALT = 4
+HOTKEYF_EXT = 128
+HOTKEYF_EXT = 8
+HKCOMB_NONE = 1
+HKCOMB_S = 2
+HKCOMB_C = 4
+HKCOMB_A = 8
+HKCOMB_SC = 16
+HKCOMB_SA = 32
+HKCOMB_CA = 64
+HKCOMB_SCA = 128
+HKM_SETHOTKEY = (WM_USER+1)
+HKM_GETHOTKEY = (WM_USER+2)
+HKM_SETRULES = (WM_USER+3)
+HOTKEY_CLASSA = "msctls_hotkey32"
+HOTKEY_CLASS = HOTKEY_CLASSA
+CCS_TOP = 0x00000001L
+CCS_NOMOVEY = 0x00000002L
+CCS_BOTTOM = 0x00000003L
+CCS_NORESIZE = 0x00000004L
+CCS_NOPARENTALIGN = 0x00000008L
+CCS_ADJUSTABLE = 0x00000020L
+CCS_NODIVIDER = 0x00000040L
+CCS_VERT = 0x00000080L
+CCS_LEFT = (CCS_VERT | CCS_TOP)
+CCS_RIGHT = (CCS_VERT | CCS_BOTTOM)
+CCS_NOMOVEX = (CCS_VERT | CCS_NOMOVEY)
+WC_LISTVIEWA = "SysListView32"
+WC_LISTVIEW = WC_LISTVIEWA
+LVS_ICON = 0
+LVS_REPORT = 1
+LVS_SMALLICON = 2
+LVS_LIST = 3
+LVS_TYPEMASK = 3
+LVS_SINGLESEL = 4
+LVS_SHOWSELALWAYS = 8
+LVS_SORTASCENDING = 16
+LVS_SORTDESCENDING = 32
+LVS_SHAREIMAGELISTS = 64
+LVS_NOLABELWRAP = 128
+LVS_AUTOARRANGE = 256
+LVS_EDITLABELS = 512
+LVS_OWNERDATA = 4096
+LVS_NOSCROLL = 8192
+LVS_TYPESTYLEMASK = 64512
+LVS_ALIGNTOP = 0
+LVS_ALIGNLEFT = 2048
+LVS_ALIGNMASK = 3072
+LVS_OWNERDRAWFIXED = 1024
+LVS_NOCOLUMNHEADER = 16384
+LVS_NOSORTHEADER = 32768
+LVM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+LVM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+LVM_GETBKCOLOR = (LVM_FIRST + 0)
+LVM_SETBKCOLOR = (LVM_FIRST + 1)
+LVM_GETIMAGELIST = (LVM_FIRST + 2)
+LVSIL_NORMAL = 0
+LVSIL_SMALL = 1
+LVSIL_STATE = 2
+LVM_SETIMAGELIST = (LVM_FIRST + 3)
+LVM_GETITEMCOUNT = (LVM_FIRST + 4)
+LVIF_TEXT = 1
+LVIF_IMAGE = 2
+LVIF_PARAM = 4
+LVIF_STATE = 8
+LVIF_INDENT = 16
+LVIF_NORECOMPUTE = 2048
+LVIS_FOCUSED = 1
+LVIS_SELECTED = 2
+LVIS_CUT = 4
+LVIS_DROPHILITED = 8
+LVIS_ACTIVATING = 32
+LVIS_OVERLAYMASK = 3840
+LVIS_STATEIMAGEMASK = 61440
+I_INDENTCALLBACK = (-1)
+LPSTR_TEXTCALLBACKA = -1
+LPSTR_TEXTCALLBACK = LPSTR_TEXTCALLBACKA
+I_IMAGECALLBACK = (-1)
+LVM_GETITEMA = (LVM_FIRST + 5)
+LVM_GETITEMW = (LVM_FIRST + 75)
+LVM_GETITEM = LVM_GETITEMW
+LVM_GETITEM = LVM_GETITEMA
+LVM_SETITEMA = (LVM_FIRST + 6)
+LVM_SETITEMW = (LVM_FIRST + 76)
+LVM_SETITEM = LVM_SETITEMW
+LVM_SETITEM = LVM_SETITEMA
+LVM_INSERTITEMA = (LVM_FIRST + 7)
+LVM_INSERTITEMW = (LVM_FIRST + 77)
+LVM_INSERTITEM = LVM_INSERTITEMA
+LVM_DELETEITEM = (LVM_FIRST + 8)
+LVM_DELETEALLITEMS = (LVM_FIRST + 9)
+LVM_GETCALLBACKMASK = (LVM_FIRST + 10)
+LVM_SETCALLBACKMASK = (LVM_FIRST + 11)
+LVNI_ALL = 0
+LVNI_FOCUSED = 1
+LVNI_SELECTED = 2
+LVNI_CUT = 4
+LVNI_DROPHILITED = 8
+LVNI_ABOVE = 256
+LVNI_BELOW = 512
+LVNI_TOLEFT = 1024
+LVNI_TORIGHT = 2048
+LVM_GETNEXTITEM = (LVM_FIRST + 12)
+LVFI_PARAM = 1
+LVFI_STRING = 2
+LVFI_PARTIAL = 8
+LVFI_WRAP = 32
+LVFI_NEARESTXY = 64
+LVM_FINDITEMA = (LVM_FIRST + 13)
+LVM_FINDITEMW = (LVM_FIRST + 83)
+LVM_FINDITEM = LVM_FINDITEMA
+LVIR_BOUNDS = 0
+LVIR_ICON = 1
+LVIR_LABEL = 2
+LVIR_SELECTBOUNDS = 3
+LVM_GETITEMRECT = (LVM_FIRST + 14)
+LVM_SETITEMPOSITION = (LVM_FIRST + 15)
+LVM_GETITEMPOSITION = (LVM_FIRST + 16)
+LVM_GETSTRINGWIDTHA = (LVM_FIRST + 17)
+LVM_GETSTRINGWIDTHW = (LVM_FIRST + 87)
+LVM_GETSTRINGWIDTH = LVM_GETSTRINGWIDTHA
+LVHT_NOWHERE = 1
+LVHT_ONITEMICON = 2
+LVHT_ONITEMLABEL = 4
+LVHT_ONITEMSTATEICON = 8
+LVHT_ONITEM = (LVHT_ONITEMICON | LVHT_ONITEMLABEL | LVHT_ONITEMSTATEICON)
+LVHT_ABOVE = 8
+LVHT_BELOW = 16
+LVHT_TORIGHT = 32
+LVHT_TOLEFT = 64
+LVM_HITTEST = (LVM_FIRST + 18)
+LVM_ENSUREVISIBLE = (LVM_FIRST + 19)
+LVM_SCROLL = (LVM_FIRST + 20)
+LVM_REDRAWITEMS = (LVM_FIRST + 21)
+LVA_DEFAULT = 0
+LVA_ALIGNLEFT = 1
+LVA_ALIGNTOP = 2
+LVA_SNAPTOGRID = 5
+LVM_ARRANGE = (LVM_FIRST + 22)
+LVM_EDITLABELA = (LVM_FIRST + 23)
+LVM_EDITLABELW = (LVM_FIRST + 118)
+LVM_EDITLABEL = LVM_EDITLABELW
+LVM_EDITLABEL = LVM_EDITLABELA
+LVM_GETEDITCONTROL = (LVM_FIRST + 24)
+LVCF_FMT = 1
+LVCF_WIDTH = 2
+LVCF_TEXT = 4
+LVCF_SUBITEM = 8
+LVCF_IMAGE = 16
+LVCF_ORDER = 32
+LVCFMT_LEFT = 0
+LVCFMT_RIGHT = 1
+LVCFMT_CENTER = 2
+LVCFMT_JUSTIFYMASK = 3
+LVCFMT_IMAGE = 2048
+LVCFMT_BITMAP_ON_RIGHT = 4096
+LVCFMT_COL_HAS_IMAGES = 32768
+LVM_GETCOLUMNA = (LVM_FIRST + 25)
+LVM_GETCOLUMNW = (LVM_FIRST + 95)
+LVM_GETCOLUMN = LVM_GETCOLUMNA
+LVM_SETCOLUMNA = (LVM_FIRST + 26)
+LVM_SETCOLUMNW = (LVM_FIRST + 96)
+LVM_SETCOLUMN = LVM_SETCOLUMNA
+LVM_INSERTCOLUMNA = (LVM_FIRST + 27)
+LVM_INSERTCOLUMNW = (LVM_FIRST + 97)
+LVM_INSERTCOLUMN = LVM_INSERTCOLUMNA
+LVM_DELETECOLUMN = (LVM_FIRST + 28)
+LVM_GETCOLUMNWIDTH = (LVM_FIRST + 29)
+LVSCW_AUTOSIZE = -1
+LVSCW_AUTOSIZE_USEHEADER = -2
+LVM_SETCOLUMNWIDTH = (LVM_FIRST + 30)
+LVM_GETHEADER = (LVM_FIRST + 31)
+LVM_CREATEDRAGIMAGE = (LVM_FIRST + 33)
+LVM_GETVIEWRECT = (LVM_FIRST + 34)
+LVM_GETTEXTCOLOR = (LVM_FIRST + 35)
+LVM_SETTEXTCOLOR = (LVM_FIRST + 36)
+LVM_GETTEXTBKCOLOR = (LVM_FIRST + 37)
+LVM_SETTEXTBKCOLOR = (LVM_FIRST + 38)
+LVM_GETTOPINDEX = (LVM_FIRST + 39)
+LVM_GETCOUNTPERPAGE = (LVM_FIRST + 40)
+LVM_GETORIGIN = (LVM_FIRST + 41)
+LVM_UPDATE = (LVM_FIRST + 42)
+LVM_SETITEMSTATE = (LVM_FIRST + 43)
+LVM_GETITEMSTATE = (LVM_FIRST + 44)
+LVM_GETITEMTEXTA = (LVM_FIRST + 45)
+LVM_GETITEMTEXTW = (LVM_FIRST + 115)
+LVM_GETITEMTEXT = LVM_GETITEMTEXTW
+LVM_GETITEMTEXT = LVM_GETITEMTEXTA
+LVM_SETITEMTEXTA = (LVM_FIRST + 46)
+LVM_SETITEMTEXTW = (LVM_FIRST + 116)
+LVM_SETITEMTEXT = LVM_SETITEMTEXTW
+LVM_SETITEMTEXT = LVM_SETITEMTEXTA
+LVSICF_NOINVALIDATEALL = 1
+LVSICF_NOSCROLL = 2
+LVM_SETITEMCOUNT = (LVM_FIRST + 47)
+LVM_SORTITEMS = (LVM_FIRST + 48)
+LVM_SETITEMPOSITION32 = (LVM_FIRST + 49)
+LVM_GETSELECTEDCOUNT = (LVM_FIRST + 50)
+LVM_GETITEMSPACING = (LVM_FIRST + 51)
+LVM_GETISEARCHSTRINGA = (LVM_FIRST + 52)
+LVM_GETISEARCHSTRINGW = (LVM_FIRST + 117)
+LVM_GETISEARCHSTRING = LVM_GETISEARCHSTRINGA
+LVM_SETICONSPACING = (LVM_FIRST + 53)
+LVM_SETEXTENDEDLISTVIEWSTYLE = (LVM_FIRST + 54)   # optional wParam == mask
+LVM_GETEXTENDEDLISTVIEWSTYLE = (LVM_FIRST + 55)
+LVS_EX_GRIDLINES = 1
+LVS_EX_SUBITEMIMAGES = 2
+LVS_EX_CHECKBOXES = 4
+LVS_EX_TRACKSELECT = 8
+LVS_EX_HEADERDRAGDROP = 16
+LVS_EX_FULLROWSELECT = 32 # applies to report mode only
+LVS_EX_ONECLICKACTIVATE = 64
+LVS_EX_TWOCLICKACTIVATE = 128
+LVS_EX_FLATSB = 256
+LVS_EX_REGIONAL = 512
+LVS_EX_INFOTIP = 1024 # listview does InfoTips for you
+LVS_EX_UNDERLINEHOT = 2048
+LVS_EX_UNDERLINECOLD = 4096
+LVS_EX_MULTIWORKAREAS = 8192
+LVM_GETSUBITEMRECT = (LVM_FIRST + 56)
+LVM_SUBITEMHITTEST = (LVM_FIRST + 57)
+LVM_SETCOLUMNORDERARRAY = (LVM_FIRST + 58)
+LVM_GETCOLUMNORDERARRAY = (LVM_FIRST + 59)
+LVM_SETHOTITEM = (LVM_FIRST + 60)
+LVM_GETHOTITEM = (LVM_FIRST + 61)
+LVM_SETHOTCURSOR = (LVM_FIRST + 62)
+LVM_GETHOTCURSOR = (LVM_FIRST + 63)
+LVM_APPROXIMATEVIEWRECT = (LVM_FIRST + 64)
+LV_MAX_WORKAREAS = 16
+LVM_SETWORKAREAS = (LVM_FIRST + 65)
+LVM_GETWORKAREAS = (LVM_FIRST + 70)
+LVM_GETNUMBEROFWORKAREAS = (LVM_FIRST + 73)
+LVM_GETSELECTIONMARK = (LVM_FIRST + 66)
+LVM_SETSELECTIONMARK = (LVM_FIRST + 67)
+LVM_SETHOVERTIME = (LVM_FIRST + 71)
+LVM_GETHOVERTIME = (LVM_FIRST + 72)
+LVM_SETTOOLTIPS = (LVM_FIRST + 74)
+LVM_GETTOOLTIPS = (LVM_FIRST + 78)
+LVBKIF_SOURCE_NONE = 0
+LVBKIF_SOURCE_HBITMAP = 1
+LVBKIF_SOURCE_URL = 2
+LVBKIF_SOURCE_MASK = 3
+LVBKIF_STYLE_NORMAL = 0
+LVBKIF_STYLE_TILE = 16
+LVBKIF_STYLE_MASK = 16
+LVM_SETBKIMAGEA = (LVM_FIRST + 68)
+LVM_SETBKIMAGEW = (LVM_FIRST + 138)
+LVM_GETBKIMAGEA = (LVM_FIRST + 69)
+LVM_GETBKIMAGEW = (LVM_FIRST + 139)
+LVKF_ALT = 1
+LVKF_CONTROL = 2
+LVKF_SHIFT = 4
+LVN_ITEMCHANGING = (LVN_FIRST-0)
+LVN_ITEMCHANGED = (LVN_FIRST-1)
+LVN_INSERTITEM = (LVN_FIRST-2)
+LVN_DELETEITEM = (LVN_FIRST-3)
+LVN_DELETEALLITEMS = (LVN_FIRST-4)
+LVN_BEGINLABELEDITA = (LVN_FIRST-5)
+LVN_BEGINLABELEDITW = (LVN_FIRST-75)
+LVN_ENDLABELEDITA = (LVN_FIRST-6)
+LVN_ENDLABELEDITW = (LVN_FIRST-76)
+LVN_COLUMNCLICK = (LVN_FIRST-8)
+LVN_BEGINDRAG = (LVN_FIRST-9)
+LVN_BEGINRDRAG = (LVN_FIRST-11)
+LVN_ODCACHEHINT = (LVN_FIRST-13)
+LVN_ODFINDITEMA = (LVN_FIRST-52)
+LVN_ODFINDITEMW = (LVN_FIRST-79)
+LVN_ITEMACTIVATE = (LVN_FIRST-14)
+LVN_ODSTATECHANGED = (LVN_FIRST-15)
+LVN_ODFINDITEM = LVN_ODFINDITEMA
+LVN_HOTTRACK = (LVN_FIRST-21)
+LVN_GETDISPINFOA = (LVN_FIRST-50)
+LVN_GETDISPINFOW = (LVN_FIRST-77)
+LVN_SETDISPINFOA = (LVN_FIRST-51)
+LVN_SETDISPINFOW = (LVN_FIRST-78)
+LVN_BEGINLABELEDIT = LVN_BEGINLABELEDITA
+LVN_ENDLABELEDIT = LVN_ENDLABELEDITA
+LVN_GETDISPINFO = LVN_GETDISPINFOA
+LVN_SETDISPINFO = LVN_SETDISPINFOA
+LVIF_DI_SETITEM = 4096
+LVN_KEYDOWN = (LVN_FIRST-55)
+LVN_MARQUEEBEGIN = (LVN_FIRST-56)
+LVGIT_UNFOLDED = 1
+LVN_GETINFOTIPA = (LVN_FIRST-57)
+LVN_GETINFOTIPW = (LVN_FIRST-58)
+LVN_GETINFOTIP = LVN_GETINFOTIPA
+WC_TREEVIEWA = "SysTreeView32"
+WC_TREEVIEW = WC_TREEVIEWA
+TVS_HASBUTTONS = 1
+TVS_HASLINES = 2
+TVS_LINESATROOT = 4
+TVS_EDITLABELS = 8
+TVS_DISABLEDRAGDROP = 16
+TVS_SHOWSELALWAYS = 32
+TVS_RTLREADING = 64
+TVS_NOTOOLTIPS = 128
+TVS_CHECKBOXES = 256
+TVS_TRACKSELECT = 512
+TVS_SINGLEEXPAND = 1024
+TVS_INFOTIP = 2048
+TVS_FULLROWSELECT = 4096
+TVS_NOSCROLL = 8192
+TVS_NONEVENHEIGHT = 16384
+TVIF_TEXT = 1
+TVIF_IMAGE = 2
+TVIF_PARAM = 4
+TVIF_STATE = 8
+TVIF_HANDLE = 16
+TVIF_SELECTEDIMAGE = 32
+TVIF_CHILDREN = 64
+TVIF_INTEGRAL = 128
+TVIS_SELECTED = 2
+TVIS_CUT = 4
+TVIS_DROPHILITED = 8
+TVIS_BOLD = 16
+TVIS_EXPANDED = 32
+TVIS_EXPANDEDONCE = 64
+TVIS_EXPANDPARTIAL = 128
+TVIS_OVERLAYMASK = 3840
+TVIS_STATEIMAGEMASK = 61440
+TVIS_USERMASK = 61440
+I_CHILDRENCALLBACK = (-1)
+TVI_ROOT = -65536
+TVI_FIRST = -65535
+TVI_LAST = -65534
+TVI_SORT = -65533
+TVM_INSERTITEMA = (TV_FIRST + 0)
+TVM_INSERTITEMW = (TV_FIRST + 50)
+TVM_INSERTITEM = TVM_INSERTITEMW
+TVM_INSERTITEM = TVM_INSERTITEMA
+TVM_DELETEITEM = (TV_FIRST + 1)
+TVM_EXPAND = (TV_FIRST + 2)
+TVE_COLLAPSE = 1
+TVE_EXPAND = 2
+TVE_TOGGLE = 3
+TVE_EXPANDPARTIAL = 16384
+TVE_COLLAPSERESET = 32768
+TVM_GETITEMRECT = (TV_FIRST + 4)
+TVM_GETCOUNT = (TV_FIRST + 5)
+TVM_GETINDENT = (TV_FIRST + 6)
+TVM_SETINDENT = (TV_FIRST + 7)
+TVM_GETIMAGELIST = (TV_FIRST + 8)
+TVSIL_NORMAL = 0
+TVSIL_STATE = 2
+TVM_SETIMAGELIST = (TV_FIRST + 9)
+TVM_GETNEXTITEM = (TV_FIRST + 10)
+TVGN_ROOT = 0
+TVGN_NEXT = 1
+TVGN_PREVIOUS = 2
+TVGN_PARENT = 3
+TVGN_CHILD = 4
+TVGN_FIRSTVISIBLE = 5
+TVGN_NEXTVISIBLE = 6
+TVGN_PREVIOUSVISIBLE = 7
+TVGN_DROPHILITE = 8
+TVGN_CARET = 9
+TVGN_LASTVISIBLE = 10
+TVM_SELECTITEM = (TV_FIRST + 11)
+TVM_GETITEMA = (TV_FIRST + 12)
+TVM_GETITEMW = (TV_FIRST + 62)
+TVM_GETITEM = TVM_GETITEMW
+TVM_GETITEM = TVM_GETITEMA
+TVM_SETITEMA = (TV_FIRST + 13)
+TVM_SETITEMW = (TV_FIRST + 63)
+TVM_SETITEM = TVM_SETITEMW
+TVM_SETITEM = TVM_SETITEMA
+TVM_EDITLABELA = (TV_FIRST + 14)
+TVM_EDITLABELW = (TV_FIRST + 65)
+TVM_EDITLABEL = TVM_EDITLABELW
+TVM_EDITLABEL = TVM_EDITLABELA
+TVM_GETEDITCONTROL = (TV_FIRST + 15)
+TVM_GETVISIBLECOUNT = (TV_FIRST + 16)
+TVM_HITTEST = (TV_FIRST + 17)
+TVHT_NOWHERE = 1
+TVHT_ONITEMICON = 2
+TVHT_ONITEMLABEL = 4
+TVHT_ONITEMINDENT = 8
+TVHT_ONITEMBUTTON = 16
+TVHT_ONITEMRIGHT = 32
+TVHT_ONITEMSTATEICON = 64
+TVHT_ABOVE = 256
+TVHT_BELOW = 512
+TVHT_TORIGHT = 1024
+TVHT_TOLEFT = 2048
+TVHT_ONITEM = (TVHT_ONITEMICON | TVHT_ONITEMLABEL | TVHT_ONITEMSTATEICON)
+TVM_CREATEDRAGIMAGE = (TV_FIRST + 18)
+TVM_SORTCHILDREN = (TV_FIRST + 19)
+TVM_ENSUREVISIBLE = (TV_FIRST + 20)
+TVM_SORTCHILDRENCB = (TV_FIRST + 21)
+TVM_ENDEDITLABELNOW = (TV_FIRST + 22)
+TVM_GETISEARCHSTRINGA = (TV_FIRST + 23)
+TVM_GETISEARCHSTRINGW = (TV_FIRST + 64)
+TVM_GETISEARCHSTRING = TVM_GETISEARCHSTRINGA
+TVM_SETTOOLTIPS = (TV_FIRST + 24)
+TVM_GETTOOLTIPS = (TV_FIRST + 25)
+TVM_SETINSERTMARK = (TV_FIRST + 26)
+TVM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+TVM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+TVM_SETITEMHEIGHT = (TV_FIRST + 27)
+TVM_GETITEMHEIGHT = (TV_FIRST + 28)
+TVM_SETBKCOLOR = (TV_FIRST + 29)
+TVM_SETTEXTCOLOR = (TV_FIRST + 30)
+TVM_GETBKCOLOR = (TV_FIRST + 31)
+TVM_GETTEXTCOLOR = (TV_FIRST + 32)
+TVM_SETSCROLLTIME = (TV_FIRST + 33)
+TVM_GETSCROLLTIME = (TV_FIRST + 34)
+TVM_SETINSERTMARKCOLOR = (TV_FIRST + 37)
+TVM_GETINSERTMARKCOLOR = (TV_FIRST + 38)
+TVN_SELCHANGINGA = (TVN_FIRST-1)
+TVN_SELCHANGINGW = (TVN_FIRST-50)
+TVN_SELCHANGEDA = (TVN_FIRST-2)
+TVN_SELCHANGEDW = (TVN_FIRST-51)
+TVC_UNKNOWN = 0
+TVC_BYMOUSE = 1
+TVC_BYKEYBOARD = 2
+TVN_GETDISPINFOA = (TVN_FIRST-3)
+TVN_GETDISPINFOW = (TVN_FIRST-52)
+TVN_SETDISPINFOA = (TVN_FIRST-4)
+TVN_SETDISPINFOW = (TVN_FIRST-53)
+TVIF_DI_SETITEM = 4096
+TVN_ITEMEXPANDINGA = (TVN_FIRST-5)
+TVN_ITEMEXPANDINGW = (TVN_FIRST-54)
+TVN_ITEMEXPANDEDA = (TVN_FIRST-6)
+TVN_ITEMEXPANDEDW = (TVN_FIRST-55)
+TVN_BEGINDRAGA = (TVN_FIRST-7)
+TVN_BEGINDRAGW = (TVN_FIRST-56)
+TVN_BEGINRDRAGA = (TVN_FIRST-8)
+TVN_BEGINRDRAGW = (TVN_FIRST-57)
+TVN_DELETEITEMA = (TVN_FIRST-9)
+TVN_DELETEITEMW = (TVN_FIRST-58)
+TVN_BEGINLABELEDITA = (TVN_FIRST-10)
+TVN_BEGINLABELEDITW = (TVN_FIRST-59)
+TVN_ENDLABELEDITA = (TVN_FIRST-11)
+TVN_ENDLABELEDITW = (TVN_FIRST-60)
+TVN_KEYDOWN = (TVN_FIRST-12)
+TVN_GETINFOTIPA = (TVN_FIRST-13)
+TVN_GETINFOTIPW = (TVN_FIRST-14)
+TVN_SINGLEEXPAND = (TVN_FIRST-15)
+TVN_SELCHANGING = TVN_SELCHANGINGA
+TVN_SELCHANGED = TVN_SELCHANGEDA
+TVN_GETDISPINFO = TVN_GETDISPINFOA
+TVN_SETDISPINFO = TVN_SETDISPINFOA
+TVN_ITEMEXPANDING = TVN_ITEMEXPANDINGA
+TVN_ITEMEXPANDED = TVN_ITEMEXPANDEDA
+TVN_BEGINDRAG = TVN_BEGINDRAGA
+TVN_BEGINRDRAG = TVN_BEGINRDRAGA
+TVN_DELETEITEM = TVN_DELETEITEMA
+TVN_BEGINLABELEDIT = TVN_BEGINLABELEDITA
+TVN_ENDLABELEDIT = TVN_ENDLABELEDITA
+TVN_GETINFOTIP = TVN_GETINFOTIPA
+TVCDRF_NOIMAGES = 65536
+WC_COMBOBOXEXA = "ComboBoxEx32"
+WC_COMBOBOXEX = WC_COMBOBOXEXA
+CBEIF_TEXT = 1
+CBEIF_IMAGE = 2
+CBEIF_SELECTEDIMAGE = 4
+CBEIF_OVERLAY = 8
+CBEIF_INDENT = 16
+CBEIF_LPARAM = 32
+CBEIF_DI_SETITEM = 268435456
+CBEM_INSERTITEMA = (WM_USER + 1)
+CBEM_SETIMAGELIST = (WM_USER + 2)
+CBEM_GETIMAGELIST = (WM_USER + 3)
+CBEM_GETITEMA = (WM_USER + 4)
+CBEM_SETITEMA = (WM_USER + 5)
+#CBEM_DELETEITEM = CB_DELETESTRING
+CBEM_GETCOMBOCONTROL = (WM_USER + 6)
+CBEM_GETEDITCONTROL = (WM_USER + 7)
+CBEM_SETEXSTYLE = (WM_USER + 8)  # use  SETEXTENDEDSTYLE instead
+CBEM_SETEXTENDEDSTYLE = (WM_USER + 14)   # lparam == new style, wParam (optional) == mask
+CBEM_GETEXSTYLE = (WM_USER + 9) # use GETEXTENDEDSTYLE instead
+CBEM_GETEXTENDEDSTYLE = (WM_USER + 9)
+CBEM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+CBEM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+CBEM_SETEXSTYLE = (WM_USER + 8)
+CBEM_GETEXSTYLE = (WM_USER + 9)
+CBEM_HASEDITCHANGED = (WM_USER + 10)
+CBEM_INSERTITEMW = (WM_USER + 11)
+CBEM_SETITEMW = (WM_USER + 12)
+CBEM_GETITEMW = (WM_USER + 13)
+CBEM_INSERTITEM = CBEM_INSERTITEMA
+CBEM_SETITEM = CBEM_SETITEMA
+CBEM_GETITEM = CBEM_GETITEMA
+CBES_EX_NOEDITIMAGE = 1
+CBES_EX_NOEDITIMAGEINDENT = 2
+CBES_EX_PATHWORDBREAKPROC = 4
+CBES_EX_NOSIZELIMIT = 8
+CBES_EX_CASESENSITIVE = 16
+CBEN_GETDISPINFO = (CBEN_FIRST - 0)
+CBEN_GETDISPINFOA = (CBEN_FIRST - 0)
+CBEN_INSERTITEM = (CBEN_FIRST - 1)
+CBEN_DELETEITEM = (CBEN_FIRST - 2)
+CBEN_BEGINEDIT = (CBEN_FIRST - 4)
+CBEN_ENDEDITA = (CBEN_FIRST - 5)
+CBEN_ENDEDITW = (CBEN_FIRST - 6)
+CBEN_GETDISPINFOW = (CBEN_FIRST - 7)
+CBEN_DRAGBEGINA = (CBEN_FIRST - 8)
+CBEN_DRAGBEGINW = (CBEN_FIRST - 9)
+CBEN_DRAGBEGIN = CBEN_DRAGBEGINA
+CBEN_ENDEDIT = CBEN_ENDEDITA
+CBENF_KILLFOCUS = 1
+CBENF_RETURN = 2
+CBENF_ESCAPE = 3
+CBENF_DROPDOWN = 4
+CBEMAXSTRLEN = 260
+WC_TABCONTROLA = "SysTabControl32"
+WC_TABCONTROL = WC_TABCONTROLA
+TCS_SCROLLOPPOSITE = 1   # assumes multiline tab
+TCS_BOTTOM = 2
+TCS_RIGHT = 2
+TCS_MULTISELECT = 4  # allow multi-select in button mode
+TCS_FLATBUTTONS = 8
+TCS_FORCEICONLEFT = 16
+TCS_FORCELABELLEFT = 32
+TCS_HOTTRACK = 64
+TCS_VERTICAL = 128
+TCS_TABS = 0
+TCS_BUTTONS = 256
+TCS_SINGLELINE = 0
+TCS_MULTILINE = 512
+TCS_RIGHTJUSTIFY = 0
+TCS_FIXEDWIDTH = 1024
+TCS_RAGGEDRIGHT = 2048
+TCS_FOCUSONBUTTONDOWN = 4096
+TCS_OWNERDRAWFIXED = 8192
+TCS_TOOLTIPS = 16384
+TCS_FOCUSNEVER = 32768
+TCS_EX_FLATSEPARATORS = 1
+TCS_EX_REGISTERDROP = 2
+TCM_GETIMAGELIST = (TCM_FIRST + 2)
+TCM_SETIMAGELIST = (TCM_FIRST + 3)
+TCM_GETITEMCOUNT = (TCM_FIRST + 4)
+TCIF_TEXT = 1
+TCIF_IMAGE = 2
+TCIF_RTLREADING = 4
+TCIF_PARAM = 8
+TCIF_STATE = 16
+TCIS_BUTTONPRESSED = 1
+TCIS_HIGHLIGHTED = 2
+TCM_GETITEMA = (TCM_FIRST + 5)
+TCM_GETITEMW = (TCM_FIRST + 60)
+TCM_GETITEM = TCM_GETITEMA
+TCM_SETITEMA = (TCM_FIRST + 6)
+TCM_SETITEMW = (TCM_FIRST + 61)
+TCM_SETITEM = TCM_SETITEMA
+TCM_INSERTITEMA = (TCM_FIRST + 7)
+TCM_INSERTITEMW = (TCM_FIRST + 62)
+TCM_INSERTITEM = TCM_INSERTITEMA
+TCM_DELETEITEM = (TCM_FIRST + 8)
+TCM_DELETEALLITEMS = (TCM_FIRST + 9)
+TCM_GETITEMRECT = (TCM_FIRST + 10)
+TCM_GETCURSEL = (TCM_FIRST + 11)
+TCM_SETCURSEL = (TCM_FIRST + 12)
+TCHT_NOWHERE = 1
+TCHT_ONITEMICON = 2
+TCHT_ONITEMLABEL = 4
+TCHT_ONITEM = (TCHT_ONITEMICON | TCHT_ONITEMLABEL)
+TCM_HITTEST = (TCM_FIRST + 13)
+TCM_SETITEMEXTRA = (TCM_FIRST + 14)
+TCM_ADJUSTRECT = (TCM_FIRST + 40)
+TCM_SETITEMSIZE = (TCM_FIRST + 41)
+TCM_REMOVEIMAGE = (TCM_FIRST + 42)
+TCM_SETPADDING = (TCM_FIRST + 43)
+TCM_GETROWCOUNT = (TCM_FIRST + 44)
+TCM_GETTOOLTIPS = (TCM_FIRST + 45)
+TCM_SETTOOLTIPS = (TCM_FIRST + 46)
+TCM_GETCURFOCUS = (TCM_FIRST + 47)
+TCM_SETCURFOCUS = (TCM_FIRST + 48)
+TCM_SETMINTABWIDTH = (TCM_FIRST + 49)
+TCM_DESELECTALL = (TCM_FIRST + 50)
+TCM_HIGHLIGHTITEM = (TCM_FIRST + 51)
+TCM_SETEXTENDEDSTYLE = (TCM_FIRST + 52)  # optional wParam == mask
+TCM_GETEXTENDEDSTYLE = (TCM_FIRST + 53)
+TCM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+TCM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+TCN_KEYDOWN = (TCN_FIRST - 0)
+ANIMATE_CLASSA = "SysAnimate32"
+ANIMATE_CLASS = ANIMATE_CLASSA
+ACS_CENTER = 1
+ACS_TRANSPARENT = 2
+ACS_AUTOPLAY = 4
+ACS_TIMER = 8  # don't use threads... use timers
+ACM_OPENA = (WM_USER+100)
+ACM_OPENW = (WM_USER+103)
+ACM_OPEN = ACM_OPENW
+ACM_OPEN = ACM_OPENA
+ACM_PLAY = (WM_USER+101)
+ACM_STOP = (WM_USER+102)
+ACN_START = 1
+ACN_STOP = 2
+MONTHCAL_CLASSA = "SysMonthCal32"
+MONTHCAL_CLASS = MONTHCAL_CLASSA
+MCM_FIRST = 4096
+MCM_GETCURSEL = (MCM_FIRST + 1)
+MCM_SETCURSEL = (MCM_FIRST + 2)
+MCM_GETMAXSELCOUNT = (MCM_FIRST + 3)
+MCM_SETMAXSELCOUNT = (MCM_FIRST + 4)
+MCM_GETSELRANGE = (MCM_FIRST + 5)
+MCM_SETSELRANGE = (MCM_FIRST + 6)
+MCM_GETMONTHRANGE = (MCM_FIRST + 7)
+MCM_SETDAYSTATE = (MCM_FIRST + 8)
+MCM_GETMINREQRECT = (MCM_FIRST + 9)
+MCM_SETCOLOR = (MCM_FIRST + 10)
+MCM_GETCOLOR = (MCM_FIRST + 11)
+MCSC_BACKGROUND = 0   # the background color (between months)
+MCSC_TEXT = 1   # the dates
+MCSC_TITLEBK = 2   # background of the title
+MCSC_TITLETEXT = 3
+MCSC_MONTHBK = 4   # background within the month cal
+MCSC_TRAILINGTEXT = 5   # the text color of header & trailing days
+MCM_SETTODAY = (MCM_FIRST + 12)
+MCM_GETTODAY = (MCM_FIRST + 13)
+MCM_HITTEST = (MCM_FIRST + 14)
+MCHT_TITLE = 65536
+MCHT_CALENDAR = 131072
+MCHT_TODAYLINK = 196608
+MCHT_NEXT = 16777216   # these indicate that hitting
+MCHT_PREV = 33554432  # here will go to the next/prev month
+MCHT_NOWHERE = 0
+MCHT_TITLEBK = (MCHT_TITLE)
+MCHT_TITLEMONTH = (MCHT_TITLE | 1)
+MCHT_TITLEYEAR = (MCHT_TITLE | 2)
+MCHT_TITLEBTNNEXT = (MCHT_TITLE | MCHT_NEXT | 3)
+MCHT_TITLEBTNPREV = (MCHT_TITLE | MCHT_PREV | 3)
+MCHT_CALENDARBK = (MCHT_CALENDAR)
+MCHT_CALENDARDATE = (MCHT_CALENDAR | 1)
+MCHT_CALENDARDATENEXT = (MCHT_CALENDARDATE | MCHT_NEXT)
+MCHT_CALENDARDATEPREV = (MCHT_CALENDARDATE | MCHT_PREV)
+MCHT_CALENDARDAY = (MCHT_CALENDAR | 2)
+MCHT_CALENDARWEEKNUM = (MCHT_CALENDAR | 3)
+MCM_SETFIRSTDAYOFWEEK = (MCM_FIRST + 15)
+MCM_GETFIRSTDAYOFWEEK = (MCM_FIRST + 16)
+MCM_GETRANGE = (MCM_FIRST + 17)
+MCM_SETRANGE = (MCM_FIRST + 18)
+MCM_GETMONTHDELTA = (MCM_FIRST + 19)
+MCM_SETMONTHDELTA = (MCM_FIRST + 20)
+MCM_GETMAXTODAYWIDTH = (MCM_FIRST + 21)
+MCM_SETUNICODEFORMAT = CCM_SETUNICODEFORMAT
+MCM_GETUNICODEFORMAT = CCM_GETUNICODEFORMAT
+MCN_SELCHANGE = (MCN_FIRST + 1)
+MCN_GETDAYSTATE = (MCN_FIRST + 3)
+MCN_SELECT = (MCN_FIRST + 4)
+MCS_DAYSTATE = 1
+MCS_MULTISELECT = 2
+MCS_WEEKNUMBERS = 4
+MCS_NOTODAYCIRCLE = 8
+MCS_NOTODAY = 16
+MCS_NOTODAY = 8
+GMR_VISIBLE = 0       # visible portion of display
+GMR_DAYSTATE = 1       # above plus the grayed out parts of
+DATETIMEPICK_CLASSA = "SysDateTimePick32"
+DATETIMEPICK_CLASS = DATETIMEPICK_CLASSA
+DTM_FIRST = 4096
+DTM_GETSYSTEMTIME = (DTM_FIRST + 1)
+DTM_SETSYSTEMTIME = (DTM_FIRST + 2)
+DTM_GETRANGE = (DTM_FIRST + 3)
+DTM_SETRANGE = (DTM_FIRST + 4)
+DTM_SETFORMATA = (DTM_FIRST + 5)
+DTM_SETFORMATW = (DTM_FIRST + 50)
+DTM_SETFORMAT = DTM_SETFORMATW
+DTM_SETFORMAT = DTM_SETFORMATA
+DTM_SETMCCOLOR = (DTM_FIRST + 6)
+DTM_GETMCCOLOR = (DTM_FIRST + 7)
+DTM_GETMONTHCAL = (DTM_FIRST + 8)
+DTM_SETMCFONT = (DTM_FIRST + 9)
+DTM_GETMCFONT = (DTM_FIRST + 10)
+DTS_UPDOWN = 1 # use UPDOWN instead of MONTHCAL
+DTS_SHOWNONE = 2 # allow a NONE selection
+DTS_SHORTDATEFORMAT = 0 # use the short date format (app must forward WM_WININICHANGE messages)
+DTS_LONGDATEFORMAT = 4 # use the long date format (app must forward WM_WININICHANGE messages)
+DTS_TIMEFORMAT = 9 # use the time format (app must forward WM_WININICHANGE messages)
+DTS_APPCANPARSE = 16 # allow user entered strings (app MUST respond to DTN_USERSTRING)
+DTS_RIGHTALIGN = 32 # right-align popup instead of left-align it
+DTN_DATETIMECHANGE = (DTN_FIRST + 1) # the systemtime has changed
+DTN_USERSTRINGA = (DTN_FIRST + 2) # the user has entered a string
+DTN_USERSTRINGW = (DTN_FIRST + 15)
+DTN_USERSTRING = DTN_USERSTRINGW
+DTN_WMKEYDOWNA = (DTN_FIRST + 3) # modify keydown on app format field (X)
+DTN_WMKEYDOWNW = (DTN_FIRST + 16)
+DTN_WMKEYDOWN = DTN_WMKEYDOWNA
+DTN_FORMATA = (DTN_FIRST + 4) # query display for app format field (X)
+DTN_FORMATW = (DTN_FIRST + 17)
+DTN_FORMAT = DTN_FORMATA
+DTN_FORMATQUERYA = (DTN_FIRST + 5) # query formatting info for app format field (X)
+DTN_FORMATQUERYW = (DTN_FIRST + 18)
+DTN_FORMATQUERY = DTN_FORMATQUERYA
+DTN_DROPDOWN = (DTN_FIRST + 6) # MonthCal has dropped down
+DTN_CLOSEUP = (DTN_FIRST + 7) # MonthCal is popping up
+GDTR_MIN = 1
+GDTR_MAX = 2
+GDT_ERROR = -1
+GDT_VALID = 0
+GDT_NONE = 1
+IPM_CLEARADDRESS = (WM_USER+100) # no parameters
+IPM_SETADDRESS = (WM_USER+101) # lparam = TCP/IP address
+IPM_GETADDRESS = (WM_USER+102) # lresult = # of non black fields.  lparam = LPDWORD for TCP/IP address
+IPM_SETRANGE = (WM_USER+103) # wparam = field, lparam = range
+IPM_SETFOCUS = (WM_USER+104) # wparam = field
+IPM_ISBLANK = (WM_USER+105) # no parameters
+WC_IPADDRESSA = "SysIPAddress32"
+WC_IPADDRESS = WC_IPADDRESSA
+IPN_FIELDCHANGED = (IPN_FIRST - 0)
+WC_PAGESCROLLERA = "SysPager"
+WC_PAGESCROLLER = WC_PAGESCROLLERA
+PGS_VERT = 0
+PGS_HORZ = 1
+PGS_AUTOSCROLL = 2
+PGS_DRAGNDROP = 4
+PGF_INVISIBLE = 0      # Scroll button is not visible
+PGF_NORMAL = 1      # Scroll button is in normal state
+PGF_GRAYED = 2      # Scroll button is in grayed state
+PGF_DEPRESSED = 4      # Scroll button is in depressed state
+PGF_HOT = 8      # Scroll button is in hot state
+PGB_TOPORLEFT = 0
+PGB_BOTTOMORRIGHT = 1
+PGM_SETCHILD = (PGM_FIRST + 1)  # lParam == hwnd
+PGM_RECALCSIZE = (PGM_FIRST + 2)
+PGM_FORWARDMOUSE = (PGM_FIRST + 3)
+PGM_SETBKCOLOR = (PGM_FIRST + 4)
+PGM_GETBKCOLOR = (PGM_FIRST + 5)
+PGM_SETBORDER = (PGM_FIRST + 6)
+PGM_GETBORDER = (PGM_FIRST + 7)
+PGM_SETPOS = (PGM_FIRST + 8)
+PGM_GETPOS = (PGM_FIRST + 9)
+PGM_SETBUTTONSIZE = (PGM_FIRST + 10)
+PGM_GETBUTTONSIZE = (PGM_FIRST + 11)
+PGM_GETBUTTONSTATE = (PGM_FIRST + 12)
+PGM_GETDROPTARGET = CCM_GETDROPTARGET
+PGN_SCROLL = (PGN_FIRST-1)
+PGF_SCROLLUP = 1
+PGF_SCROLLDOWN = 2
+PGF_SCROLLLEFT = 4
+PGF_SCROLLRIGHT = 8
+PGK_SHIFT = 1
+PGK_CONTROL = 2
+PGK_MENU = 4
+PGN_CALCSIZE = (PGN_FIRST-2)
+PGF_CALCWIDTH = 1
+PGF_CALCHEIGHT = 2
+WC_NATIVEFONTCTLA = "NativeFontCtl"
+WC_NATIVEFONTCTL = WC_NATIVEFONTCTLA
+NFS_EDIT = 1
+NFS_STATIC = 2
+NFS_LISTCOMBO = 4
+NFS_BUTTON = 8
+NFS_ALL = 16
+WM_MOUSEHOVER = 673
+WM_MOUSELEAVE = 675
+TME_HOVER = 1
+TME_LEAVE = 2
+TME_QUERY = 1073741824
+TME_CANCEL = -2147483648
+HOVER_DEFAULT = -1
+WSB_PROP_CYVSCROLL = 0x00000001L
+WSB_PROP_CXHSCROLL = 0x00000002L
+WSB_PROP_CYHSCROLL = 0x00000004L
+WSB_PROP_CXVSCROLL = 0x00000008L
+WSB_PROP_CXHTHUMB = 0x00000010L
+WSB_PROP_CYVTHUMB = 0x00000020L
+WSB_PROP_VBKGCOLOR = 0x00000040L
+WSB_PROP_HBKGCOLOR = 0x00000080L
+WSB_PROP_VSTYLE = 0x00000100L
+WSB_PROP_HSTYLE = 0x00000200L
+WSB_PROP_WINSTYLE = 0x00000400L
+WSB_PROP_PALETTE = 0x00000800L
+WSB_PROP_MASK = 0x00000FFFL
+FSB_FLAT_MODE = 2
+FSB_ENCARTA_MODE = 1
+FSB_REGULAR_MODE = 0
+
+def INDEXTOOVERLAYMASK(i):
+    return i << 8
+
+def INDEXTOSTATEIMAGEMASK(i):
+    return i << 12
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/mmsystem.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/mmsystem.py
new file mode 100644
index 0000000..fb0e3f0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/mmsystem.py
@@ -0,0 +1,867 @@
+# Generated by h2py from d:/msdev/include/mmsystem.h
+MAXPNAMELEN = 32
+MAXERRORLENGTH = 256
+MAX_JOYSTICKOEMVXDNAME = 260
+MM_MICROSOFT = 1
+MM_MIDI_MAPPER = 1
+MM_WAVE_MAPPER = 2
+MM_SNDBLST_MIDIOUT = 3
+MM_SNDBLST_MIDIIN = 4
+MM_SNDBLST_SYNTH = 5
+MM_SNDBLST_WAVEOUT = 6
+MM_SNDBLST_WAVEIN = 7
+MM_ADLIB = 9
+MM_MPU401_MIDIOUT = 10
+MM_MPU401_MIDIIN = 11
+MM_PC_JOYSTICK = 12
+TIME_MS = 0x0001
+TIME_SAMPLES = 0x0002
+TIME_BYTES = 0x0004
+TIME_SMPTE = 0x0008
+TIME_MIDI = 0x0010
+TIME_TICKS = 0x0020
+MM_JOY1MOVE = 0x3A0
+MM_JOY2MOVE = 0x3A1
+MM_JOY1ZMOVE = 0x3A2
+MM_JOY2ZMOVE = 0x3A3
+MM_JOY1BUTTONDOWN = 0x3B5
+MM_JOY2BUTTONDOWN = 0x3B6
+MM_JOY1BUTTONUP = 0x3B7
+MM_JOY2BUTTONUP = 0x3B8
+MM_MCINOTIFY = 0x3B9
+MM_WOM_OPEN = 0x3BB
+MM_WOM_CLOSE = 0x3BC
+MM_WOM_DONE = 0x3BD
+MM_WIM_OPEN = 0x3BE
+MM_WIM_CLOSE = 0x3BF
+MM_WIM_DATA = 0x3C0
+MM_MIM_OPEN = 0x3C1
+MM_MIM_CLOSE = 0x3C2
+MM_MIM_DATA = 0x3C3
+MM_MIM_LONGDATA = 0x3C4
+MM_MIM_ERROR = 0x3C5
+MM_MIM_LONGERROR = 0x3C6
+MM_MOM_OPEN = 0x3C7
+MM_MOM_CLOSE = 0x3C8
+MM_MOM_DONE = 0x3C9
+MM_STREAM_OPEN = 0x3D4
+MM_STREAM_CLOSE = 0x3D5
+MM_STREAM_DONE = 0x3D6
+MM_STREAM_ERROR = 0x3D7
+MM_MOM_POSITIONCB = 0x3CA
+MM_MIM_MOREDATA = 0x3CC
+MM_MIXM_LINE_CHANGE = 0x3D0
+MM_MIXM_CONTROL_CHANGE = 0x3D1
+MMSYSERR_BASE = 0
+WAVERR_BASE = 32
+MIDIERR_BASE = 64
+TIMERR_BASE = 96
+JOYERR_BASE = 160
+MCIERR_BASE = 256
+MIXERR_BASE = 1024
+MCI_STRING_OFFSET = 512
+MCI_VD_OFFSET = 1024
+MCI_CD_OFFSET = 1088
+MCI_WAVE_OFFSET = 1152
+MCI_SEQ_OFFSET = 1216
+MMSYSERR_NOERROR = 0
+MMSYSERR_ERROR = (MMSYSERR_BASE + 1)
+MMSYSERR_BADDEVICEID = (MMSYSERR_BASE + 2)
+MMSYSERR_NOTENABLED = (MMSYSERR_BASE + 3)
+MMSYSERR_ALLOCATED = (MMSYSERR_BASE + 4)
+MMSYSERR_INVALHANDLE = (MMSYSERR_BASE + 5)
+MMSYSERR_NODRIVER = (MMSYSERR_BASE + 6)
+MMSYSERR_NOMEM = (MMSYSERR_BASE + 7)
+MMSYSERR_NOTSUPPORTED = (MMSYSERR_BASE + 8)
+MMSYSERR_BADERRNUM = (MMSYSERR_BASE + 9)
+MMSYSERR_INVALFLAG = (MMSYSERR_BASE + 10)
+MMSYSERR_INVALPARAM = (MMSYSERR_BASE + 11)
+MMSYSERR_HANDLEBUSY = (MMSYSERR_BASE + 12)
+MMSYSERR_INVALIDALIAS = (MMSYSERR_BASE + 13)
+MMSYSERR_BADDB = (MMSYSERR_BASE + 14)
+MMSYSERR_KEYNOTFOUND = (MMSYSERR_BASE + 15)
+MMSYSERR_READERROR = (MMSYSERR_BASE + 16)
+MMSYSERR_WRITEERROR = (MMSYSERR_BASE + 17)
+MMSYSERR_DELETEERROR = (MMSYSERR_BASE + 18)
+MMSYSERR_VALNOTFOUND = (MMSYSERR_BASE + 19)
+MMSYSERR_NODRIVERCB = (MMSYSERR_BASE + 20)
+MMSYSERR_LASTERROR = (MMSYSERR_BASE + 20)
+DRV_LOAD = 0x0001
+DRV_ENABLE = 0x0002
+DRV_OPEN = 0x0003
+DRV_CLOSE = 0x0004
+DRV_DISABLE = 0x0005
+DRV_FREE = 0x0006
+DRV_CONFIGURE = 0x0007
+DRV_QUERYCONFIGURE = 0x0008
+DRV_INSTALL = 0x0009
+DRV_REMOVE = 0x000A
+DRV_EXITSESSION = 0x000B
+DRV_POWER = 0x000F
+DRV_RESERVED = 0x0800
+DRV_USER = 0x4000
+DRVCNF_CANCEL = 0x0000
+DRVCNF_OK = 0x0001
+DRVCNF_RESTART = 0x0002
+DRV_CANCEL = DRVCNF_CANCEL
+DRV_OK = DRVCNF_OK
+DRV_RESTART = DRVCNF_RESTART
+DRV_MCI_FIRST = DRV_RESERVED
+DRV_MCI_LAST = (DRV_RESERVED + 0xFFF)
+CALLBACK_TYPEMASK = 0x00070000l
+CALLBACK_NULL = 0x00000000l
+CALLBACK_WINDOW = 0x00010000l
+CALLBACK_TASK = 0x00020000l
+CALLBACK_FUNCTION = 0x00030000l
+CALLBACK_THREAD = (CALLBACK_TASK)
+CALLBACK_EVENT = 0x00050000l
+SND_SYNC = 0x0000
+SND_ASYNC = 0x0001
+SND_NODEFAULT = 0x0002
+SND_MEMORY = 0x0004
+SND_LOOP = 0x0008
+SND_NOSTOP = 0x0010
+SND_NOWAIT = 0x00002000L
+SND_ALIAS = 0x00010000L
+SND_ALIAS_ID = 0x00110000L
+SND_FILENAME = 0x00020000L
+SND_RESOURCE = 0x00040004L
+SND_PURGE = 0x0040
+SND_APPLICATION = 0x0080
+SND_ALIAS_START = 0
+WAVERR_BADFORMAT = (WAVERR_BASE + 0)
+WAVERR_STILLPLAYING = (WAVERR_BASE + 1)
+WAVERR_UNPREPARED = (WAVERR_BASE + 2)
+WAVERR_SYNC = (WAVERR_BASE + 3)
+WAVERR_LASTERROR = (WAVERR_BASE + 3)
+WOM_OPEN = MM_WOM_OPEN
+WOM_CLOSE = MM_WOM_CLOSE
+WOM_DONE = MM_WOM_DONE
+WIM_OPEN = MM_WIM_OPEN
+WIM_CLOSE = MM_WIM_CLOSE
+WIM_DATA = MM_WIM_DATA
+WAVE_MAPPER = 0xFFFFFFFFL
+WAVE_FORMAT_QUERY = 0x0001
+WAVE_ALLOWSYNC = 0x0002
+WAVE_MAPPED = 0x0004
+WAVE_FORMAT_DIRECT = 0x0008
+WAVE_FORMAT_DIRECT_QUERY = (WAVE_FORMAT_QUERY | WAVE_FORMAT_DIRECT)
+WHDR_DONE = 0x00000001
+WHDR_PREPARED = 0x00000002
+WHDR_BEGINLOOP = 0x00000004
+WHDR_ENDLOOP = 0x00000008
+WHDR_INQUEUE = 0x00000010
+WAVECAPS_PITCH = 0x0001
+WAVECAPS_PLAYBACKRATE = 0x0002
+WAVECAPS_VOLUME = 0x0004
+WAVECAPS_LRVOLUME = 0x0008
+WAVECAPS_SYNC = 0x0010
+WAVECAPS_SAMPLEACCURATE = 0x0020
+WAVECAPS_DIRECTSOUND = 0x0040
+WAVE_INVALIDFORMAT = 0x00000000
+WAVE_FORMAT_1M08 = 0x00000001
+WAVE_FORMAT_1S08 = 0x00000002
+WAVE_FORMAT_1M16 = 0x00000004
+WAVE_FORMAT_1S16 = 0x00000008
+WAVE_FORMAT_2M08 = 0x00000010
+WAVE_FORMAT_2S08 = 0x00000020
+WAVE_FORMAT_2M16 = 0x00000040
+WAVE_FORMAT_2S16 = 0x00000080
+WAVE_FORMAT_4M08 = 0x00000100
+WAVE_FORMAT_4S08 = 0x00000200
+WAVE_FORMAT_4M16 = 0x00000400
+WAVE_FORMAT_4S16 = 0x00000800
+WAVE_FORMAT_PCM = 1
+WAVE_FORMAT_IEEE_FLOAT = 3
+MIDIERR_UNPREPARED = (MIDIERR_BASE + 0)
+MIDIERR_STILLPLAYING = (MIDIERR_BASE + 1)
+MIDIERR_NOMAP = (MIDIERR_BASE + 2)
+MIDIERR_NOTREADY = (MIDIERR_BASE + 3)
+MIDIERR_NODEVICE = (MIDIERR_BASE + 4)
+MIDIERR_INVALIDSETUP = (MIDIERR_BASE + 5)
+MIDIERR_BADOPENMODE = (MIDIERR_BASE + 6)
+MIDIERR_DONT_CONTINUE = (MIDIERR_BASE + 7)
+MIDIERR_LASTERROR = (MIDIERR_BASE + 7)
+MIDIPATCHSIZE = 128
+MIM_OPEN = MM_MIM_OPEN
+MIM_CLOSE = MM_MIM_CLOSE
+MIM_DATA = MM_MIM_DATA
+MIM_LONGDATA = MM_MIM_LONGDATA
+MIM_ERROR = MM_MIM_ERROR
+MIM_LONGERROR = MM_MIM_LONGERROR
+MOM_OPEN = MM_MOM_OPEN
+MOM_CLOSE = MM_MOM_CLOSE
+MOM_DONE = MM_MOM_DONE
+MIM_MOREDATA = MM_MIM_MOREDATA
+MOM_POSITIONCB = MM_MOM_POSITIONCB
+MIDI_IO_STATUS = 0x00000020L
+MIDI_CACHE_ALL = 1
+MIDI_CACHE_BESTFIT = 2
+MIDI_CACHE_QUERY = 3
+MIDI_UNCACHE = 4
+MOD_MIDIPORT = 1
+MOD_SYNTH = 2
+MOD_SQSYNTH = 3
+MOD_FMSYNTH = 4
+MOD_MAPPER = 5
+MIDICAPS_VOLUME = 0x0001
+MIDICAPS_LRVOLUME = 0x0002
+MIDICAPS_CACHE = 0x0004
+MIDICAPS_STREAM = 0x0008
+MHDR_DONE = 0x00000001
+MHDR_PREPARED = 0x00000002
+MHDR_INQUEUE = 0x00000004
+MHDR_ISSTRM = 0x00000008
+MEVT_F_SHORT = 0x00000000L
+MEVT_F_LONG = 0x80000000L
+MEVT_F_CALLBACK = 0x40000000L
+def MEVT_EVENTTYPE(x): return ((BYTE)(((x)>>24)&0xFF))
+
+def MEVT_EVENTPARM(x): return ((DWORD)((x)&0x00FFFFFFL))
+
+MIDISTRM_ERROR = (-2)
+MIDIPROP_SET = 0x80000000L
+MIDIPROP_GET = 0x40000000L
+MIDIPROP_TIMEDIV = 0x00000001L
+MIDIPROP_TEMPO = 0x00000002L
+AUXCAPS_CDAUDIO = 1
+AUXCAPS_AUXIN = 2
+AUXCAPS_VOLUME = 0x0001
+AUXCAPS_LRVOLUME = 0x0002
+MIXER_SHORT_NAME_CHARS = 16
+MIXER_LONG_NAME_CHARS = 64
+MIXERR_INVALLINE = (MIXERR_BASE + 0)
+MIXERR_INVALCONTROL = (MIXERR_BASE + 1)
+MIXERR_INVALVALUE = (MIXERR_BASE + 2)
+MIXERR_LASTERROR = (MIXERR_BASE + 2)
+MIXER_OBJECTF_HANDLE = 0x80000000L
+MIXER_OBJECTF_MIXER = 0x00000000L
+MIXER_OBJECTF_HMIXER = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_MIXER)
+MIXER_OBJECTF_WAVEOUT = 0x10000000L
+MIXER_OBJECTF_HWAVEOUT = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_WAVEOUT)
+MIXER_OBJECTF_WAVEIN = 0x20000000L
+MIXER_OBJECTF_HWAVEIN = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_WAVEIN)
+MIXER_OBJECTF_MIDIOUT = 0x30000000L
+MIXER_OBJECTF_HMIDIOUT = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_MIDIOUT)
+MIXER_OBJECTF_MIDIIN = 0x40000000L
+MIXER_OBJECTF_HMIDIIN = (MIXER_OBJECTF_HANDLE|MIXER_OBJECTF_MIDIIN)
+MIXER_OBJECTF_AUX = 0x50000000L
+MIXERLINE_LINEF_ACTIVE = 0x00000001L
+MIXERLINE_LINEF_DISCONNECTED = 0x00008000L
+MIXERLINE_LINEF_SOURCE = 0x80000000L
+MIXERLINE_COMPONENTTYPE_DST_FIRST = 0x00000000L
+MIXERLINE_COMPONENTTYPE_DST_UNDEFINED = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 0)
+MIXERLINE_COMPONENTTYPE_DST_DIGITAL = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 1)
+MIXERLINE_COMPONENTTYPE_DST_LINE = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 2)
+MIXERLINE_COMPONENTTYPE_DST_MONITOR = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 3)
+MIXERLINE_COMPONENTTYPE_DST_SPEAKERS = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 4)
+MIXERLINE_COMPONENTTYPE_DST_HEADPHONES = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 5)
+MIXERLINE_COMPONENTTYPE_DST_TELEPHONE = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 6)
+MIXERLINE_COMPONENTTYPE_DST_WAVEIN = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 7)
+MIXERLINE_COMPONENTTYPE_DST_VOICEIN = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 8)
+MIXERLINE_COMPONENTTYPE_DST_LAST = (MIXERLINE_COMPONENTTYPE_DST_FIRST + 8)
+MIXERLINE_COMPONENTTYPE_SRC_FIRST = 0x00001000L
+MIXERLINE_COMPONENTTYPE_SRC_UNDEFINED = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 0)
+MIXERLINE_COMPONENTTYPE_SRC_DIGITAL = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 1)
+MIXERLINE_COMPONENTTYPE_SRC_LINE = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 2)
+MIXERLINE_COMPONENTTYPE_SRC_MICROPHONE = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 3)
+MIXERLINE_COMPONENTTYPE_SRC_SYNTHESIZER = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 4)
+MIXERLINE_COMPONENTTYPE_SRC_COMPACTDISC = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 5)
+MIXERLINE_COMPONENTTYPE_SRC_TELEPHONE = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 6)
+MIXERLINE_COMPONENTTYPE_SRC_PCSPEAKER = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 7)
+MIXERLINE_COMPONENTTYPE_SRC_WAVEOUT = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 8)
+MIXERLINE_COMPONENTTYPE_SRC_AUXILIARY = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 9)
+MIXERLINE_COMPONENTTYPE_SRC_ANALOG = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 10)
+MIXERLINE_COMPONENTTYPE_SRC_LAST = (MIXERLINE_COMPONENTTYPE_SRC_FIRST + 10)
+MIXERLINE_TARGETTYPE_UNDEFINED = 0
+MIXERLINE_TARGETTYPE_WAVEOUT = 1
+MIXERLINE_TARGETTYPE_WAVEIN = 2
+MIXERLINE_TARGETTYPE_MIDIOUT = 3
+MIXERLINE_TARGETTYPE_MIDIIN = 4
+MIXERLINE_TARGETTYPE_AUX = 5
+MIXER_GETLINEINFOF_DESTINATION = 0x00000000L
+MIXER_GETLINEINFOF_SOURCE = 0x00000001L
+MIXER_GETLINEINFOF_LINEID = 0x00000002L
+MIXER_GETLINEINFOF_COMPONENTTYPE = 0x00000003L
+MIXER_GETLINEINFOF_TARGETTYPE = 0x00000004L
+MIXER_GETLINEINFOF_QUERYMASK = 0x0000000FL
+MIXERCONTROL_CONTROLF_UNIFORM = 0x00000001L
+MIXERCONTROL_CONTROLF_MULTIPLE = 0x00000002L
+MIXERCONTROL_CONTROLF_DISABLED = 0x80000000L
+MIXERCONTROL_CT_CLASS_MASK = 0xF0000000L
+MIXERCONTROL_CT_CLASS_CUSTOM = 0x00000000L
+MIXERCONTROL_CT_CLASS_METER = 0x10000000L
+MIXERCONTROL_CT_CLASS_SWITCH = 0x20000000L
+MIXERCONTROL_CT_CLASS_NUMBER = 0x30000000L
+MIXERCONTROL_CT_CLASS_SLIDER = 0x40000000L
+MIXERCONTROL_CT_CLASS_FADER = 0x50000000L
+MIXERCONTROL_CT_CLASS_TIME = 0x60000000L
+MIXERCONTROL_CT_CLASS_LIST = 0x70000000L
+MIXERCONTROL_CT_SUBCLASS_MASK = 0x0F000000L
+MIXERCONTROL_CT_SC_SWITCH_BOOLEAN = 0x00000000L
+MIXERCONTROL_CT_SC_SWITCH_BUTTON = 0x01000000L
+MIXERCONTROL_CT_SC_METER_POLLED = 0x00000000L
+MIXERCONTROL_CT_SC_TIME_MICROSECS = 0x00000000L
+MIXERCONTROL_CT_SC_TIME_MILLISECS = 0x01000000L
+MIXERCONTROL_CT_SC_LIST_SINGLE = 0x00000000L
+MIXERCONTROL_CT_SC_LIST_MULTIPLE = 0x01000000L
+MIXERCONTROL_CT_UNITS_MASK = 0x00FF0000L
+MIXERCONTROL_CT_UNITS_CUSTOM = 0x00000000L
+MIXERCONTROL_CT_UNITS_BOOLEAN = 0x00010000L
+MIXERCONTROL_CT_UNITS_SIGNED = 0x00020000L
+MIXERCONTROL_CT_UNITS_UNSIGNED = 0x00030000L
+MIXERCONTROL_CT_UNITS_DECIBELS = 0x00040000L
+MIXERCONTROL_CT_UNITS_PERCENT = 0x00050000L
+MIXERCONTROL_CONTROLTYPE_CUSTOM = (MIXERCONTROL_CT_CLASS_CUSTOM | MIXERCONTROL_CT_UNITS_CUSTOM)
+MIXERCONTROL_CONTROLTYPE_BOOLEANMETER = (MIXERCONTROL_CT_CLASS_METER | MIXERCONTROL_CT_SC_METER_POLLED | MIXERCONTROL_CT_UNITS_BOOLEAN)
+MIXERCONTROL_CONTROLTYPE_SIGNEDMETER = (MIXERCONTROL_CT_CLASS_METER | MIXERCONTROL_CT_SC_METER_POLLED | MIXERCONTROL_CT_UNITS_SIGNED)
+MIXERCONTROL_CONTROLTYPE_PEAKMETER = (MIXERCONTROL_CONTROLTYPE_SIGNEDMETER + 1)
+MIXERCONTROL_CONTROLTYPE_UNSIGNEDMETER = (MIXERCONTROL_CT_CLASS_METER | MIXERCONTROL_CT_SC_METER_POLLED | MIXERCONTROL_CT_UNITS_UNSIGNED)
+MIXERCONTROL_CONTROLTYPE_BOOLEAN = (MIXERCONTROL_CT_CLASS_SWITCH | MIXERCONTROL_CT_SC_SWITCH_BOOLEAN | MIXERCONTROL_CT_UNITS_BOOLEAN)
+MIXERCONTROL_CONTROLTYPE_ONOFF = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 1)
+MIXERCONTROL_CONTROLTYPE_MUTE = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 2)
+MIXERCONTROL_CONTROLTYPE_MONO = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 3)
+MIXERCONTROL_CONTROLTYPE_LOUDNESS = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 4)
+MIXERCONTROL_CONTROLTYPE_STEREOENH = (MIXERCONTROL_CONTROLTYPE_BOOLEAN + 5)
+MIXERCONTROL_CONTROLTYPE_BUTTON = (MIXERCONTROL_CT_CLASS_SWITCH | MIXERCONTROL_CT_SC_SWITCH_BUTTON | MIXERCONTROL_CT_UNITS_BOOLEAN)
+MIXERCONTROL_CONTROLTYPE_DECIBELS = (MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_DECIBELS)
+MIXERCONTROL_CONTROLTYPE_SIGNED = (MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_SIGNED)
+MIXERCONTROL_CONTROLTYPE_UNSIGNED = (MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_UNSIGNED)
+MIXERCONTROL_CONTROLTYPE_PERCENT = (MIXERCONTROL_CT_CLASS_NUMBER | MIXERCONTROL_CT_UNITS_PERCENT)
+MIXERCONTROL_CONTROLTYPE_SLIDER = (MIXERCONTROL_CT_CLASS_SLIDER | MIXERCONTROL_CT_UNITS_SIGNED)
+MIXERCONTROL_CONTROLTYPE_PAN = (MIXERCONTROL_CONTROLTYPE_SLIDER + 1)
+MIXERCONTROL_CONTROLTYPE_QSOUNDPAN = (MIXERCONTROL_CONTROLTYPE_SLIDER + 2)
+MIXERCONTROL_CONTROLTYPE_FADER = (MIXERCONTROL_CT_CLASS_FADER | MIXERCONTROL_CT_UNITS_UNSIGNED)
+MIXERCONTROL_CONTROLTYPE_VOLUME = (MIXERCONTROL_CONTROLTYPE_FADER + 1)
+MIXERCONTROL_CONTROLTYPE_BASS = (MIXERCONTROL_CONTROLTYPE_FADER + 2)
+MIXERCONTROL_CONTROLTYPE_TREBLE = (MIXERCONTROL_CONTROLTYPE_FADER + 3)
+MIXERCONTROL_CONTROLTYPE_EQUALIZER = (MIXERCONTROL_CONTROLTYPE_FADER + 4)
+MIXERCONTROL_CONTROLTYPE_SINGLESELECT = (MIXERCONTROL_CT_CLASS_LIST | MIXERCONTROL_CT_SC_LIST_SINGLE | MIXERCONTROL_CT_UNITS_BOOLEAN)
+MIXERCONTROL_CONTROLTYPE_MUX = (MIXERCONTROL_CONTROLTYPE_SINGLESELECT + 1)
+MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT = (MIXERCONTROL_CT_CLASS_LIST | MIXERCONTROL_CT_SC_LIST_MULTIPLE | MIXERCONTROL_CT_UNITS_BOOLEAN)
+MIXERCONTROL_CONTROLTYPE_MIXER = (MIXERCONTROL_CONTROLTYPE_MULTIPLESELECT + 1)
+MIXERCONTROL_CONTROLTYPE_MICROTIME = (MIXERCONTROL_CT_CLASS_TIME | MIXERCONTROL_CT_SC_TIME_MICROSECS | MIXERCONTROL_CT_UNITS_UNSIGNED)
+MIXERCONTROL_CONTROLTYPE_MILLITIME = (MIXERCONTROL_CT_CLASS_TIME | MIXERCONTROL_CT_SC_TIME_MILLISECS | MIXERCONTROL_CT_UNITS_UNSIGNED)
+MIXER_GETLINECONTROLSF_ALL = 0x00000000L
+MIXER_GETLINECONTROLSF_ONEBYID = 0x00000001L
+MIXER_GETLINECONTROLSF_ONEBYTYPE = 0x00000002L
+MIXER_GETLINECONTROLSF_QUERYMASK = 0x0000000FL
+MIXER_GETCONTROLDETAILSF_VALUE = 0x00000000L
+MIXER_GETCONTROLDETAILSF_LISTTEXT = 0x00000001L
+MIXER_GETCONTROLDETAILSF_QUERYMASK = 0x0000000FL
+MIXER_SETCONTROLDETAILSF_VALUE = 0x00000000L
+MIXER_SETCONTROLDETAILSF_CUSTOM = 0x00000001L
+MIXER_SETCONTROLDETAILSF_QUERYMASK = 0x0000000FL
+TIMERR_NOERROR = (0)
+TIMERR_NOCANDO = (TIMERR_BASE+1)
+TIMERR_STRUCT = (TIMERR_BASE+33)
+TIME_ONESHOT = 0x0000
+TIME_PERIODIC = 0x0001
+TIME_CALLBACK_FUNCTION = 0x0000
+TIME_CALLBACK_EVENT_SET = 0x0010
+TIME_CALLBACK_EVENT_PULSE = 0x0020
+JOYERR_NOERROR = (0)
+JOYERR_PARMS = (JOYERR_BASE+5)
+JOYERR_NOCANDO = (JOYERR_BASE+6)
+JOYERR_UNPLUGGED = (JOYERR_BASE+7)
+JOY_BUTTON1 = 0x0001
+JOY_BUTTON2 = 0x0002
+JOY_BUTTON3 = 0x0004
+JOY_BUTTON4 = 0x0008
+JOY_BUTTON1CHG = 0x0100
+JOY_BUTTON2CHG = 0x0200
+JOY_BUTTON3CHG = 0x0400
+JOY_BUTTON4CHG = 0x0800
+JOY_BUTTON5 = 0x00000010l
+JOY_BUTTON6 = 0x00000020l
+JOY_BUTTON7 = 0x00000040l
+JOY_BUTTON8 = 0x00000080l
+JOY_BUTTON9 = 0x00000100l
+JOY_BUTTON10 = 0x00000200l
+JOY_BUTTON11 = 0x00000400l
+JOY_BUTTON12 = 0x00000800l
+JOY_BUTTON13 = 0x00001000l
+JOY_BUTTON14 = 0x00002000l
+JOY_BUTTON15 = 0x00004000l
+JOY_BUTTON16 = 0x00008000l
+JOY_BUTTON17 = 0x00010000l
+JOY_BUTTON18 = 0x00020000l
+JOY_BUTTON19 = 0x00040000l
+JOY_BUTTON20 = 0x00080000l
+JOY_BUTTON21 = 0x00100000l
+JOY_BUTTON22 = 0x00200000l
+JOY_BUTTON23 = 0x00400000l
+JOY_BUTTON24 = 0x00800000l
+JOY_BUTTON25 = 0x01000000l
+JOY_BUTTON26 = 0x02000000l
+JOY_BUTTON27 = 0x04000000l
+JOY_BUTTON28 = 0x08000000l
+JOY_BUTTON29 = 0x10000000l
+JOY_BUTTON30 = 0x20000000l
+JOY_BUTTON31 = 0x40000000l
+JOY_BUTTON32 = 0x80000000l
+JOY_POVFORWARD = 0
+JOY_POVRIGHT = 9000
+JOY_POVBACKWARD = 18000
+JOY_POVLEFT = 27000
+JOY_RETURNX = 0x00000001l
+JOY_RETURNY = 0x00000002l
+JOY_RETURNZ = 0x00000004l
+JOY_RETURNR = 0x00000008l
+JOY_RETURNU = 0x00000010l
+JOY_RETURNV = 0x00000020l
+JOY_RETURNPOV = 0x00000040l
+JOY_RETURNBUTTONS = 0x00000080l
+JOY_RETURNRAWDATA = 0x00000100l
+JOY_RETURNPOVCTS = 0x00000200l
+JOY_RETURNCENTERED = 0x00000400l
+JOY_USEDEADZONE = 0x00000800l
+JOY_RETURNALL = (JOY_RETURNX | JOY_RETURNY | JOY_RETURNZ | \
+				 JOY_RETURNR | JOY_RETURNU | JOY_RETURNV | \
+				 JOY_RETURNPOV | JOY_RETURNBUTTONS)
+JOY_CAL_READALWAYS = 0x00010000l
+JOY_CAL_READXYONLY = 0x00020000l
+JOY_CAL_READ3 = 0x00040000l
+JOY_CAL_READ4 = 0x00080000l
+JOY_CAL_READXONLY = 0x00100000l
+JOY_CAL_READYONLY = 0x00200000l
+JOY_CAL_READ5 = 0x00400000l
+JOY_CAL_READ6 = 0x00800000l
+JOY_CAL_READZONLY = 0x01000000l
+JOY_CAL_READRONLY = 0x02000000l
+JOY_CAL_READUONLY = 0x04000000l
+JOY_CAL_READVONLY = 0x08000000l
+JOYSTICKID1 = 0
+JOYSTICKID2 = 1
+JOYCAPS_HASZ = 0x0001
+JOYCAPS_HASR = 0x0002
+JOYCAPS_HASU = 0x0004
+JOYCAPS_HASV = 0x0008
+JOYCAPS_HASPOV = 0x0010
+JOYCAPS_POV4DIR = 0x0020
+JOYCAPS_POVCTS = 0x0040
+MMIOERR_BASE = 256
+MMIOERR_FILENOTFOUND = (MMIOERR_BASE + 1)
+MMIOERR_OUTOFMEMORY = (MMIOERR_BASE + 2)
+MMIOERR_CANNOTOPEN = (MMIOERR_BASE + 3)
+MMIOERR_CANNOTCLOSE = (MMIOERR_BASE + 4)
+MMIOERR_CANNOTREAD = (MMIOERR_BASE + 5)
+MMIOERR_CANNOTWRITE = (MMIOERR_BASE + 6)
+MMIOERR_CANNOTSEEK = (MMIOERR_BASE + 7)
+MMIOERR_CANNOTEXPAND = (MMIOERR_BASE + 8)
+MMIOERR_CHUNKNOTFOUND = (MMIOERR_BASE + 9)
+MMIOERR_UNBUFFERED = (MMIOERR_BASE + 10)
+MMIOERR_PATHNOTFOUND = (MMIOERR_BASE + 11)
+MMIOERR_ACCESSDENIED = (MMIOERR_BASE + 12)
+MMIOERR_SHARINGVIOLATION = (MMIOERR_BASE + 13)
+MMIOERR_NETWORKERROR = (MMIOERR_BASE + 14)
+MMIOERR_TOOMANYOPENFILES = (MMIOERR_BASE + 15)
+MMIOERR_INVALIDFILE = (MMIOERR_BASE + 16)
+CFSEPCHAR = ord('+')
+MMIO_RWMODE = 0x00000003
+MMIO_SHAREMODE = 0x00000070
+MMIO_CREATE = 0x00001000
+MMIO_PARSE = 0x00000100
+MMIO_DELETE = 0x00000200
+MMIO_EXIST = 0x00004000
+MMIO_ALLOCBUF = 0x00010000
+MMIO_GETTEMP = 0x00020000
+MMIO_DIRTY = 0x10000000
+MMIO_READ = 0x00000000
+MMIO_WRITE = 0x00000001
+MMIO_READWRITE = 0x00000002
+MMIO_COMPAT = 0x00000000
+MMIO_EXCLUSIVE = 0x00000010
+MMIO_DENYWRITE = 0x00000020
+MMIO_DENYREAD = 0x00000030
+MMIO_DENYNONE = 0x00000040
+MMIO_FHOPEN = 0x0010
+MMIO_EMPTYBUF = 0x0010
+MMIO_TOUPPER = 0x0010
+MMIO_INSTALLPROC = 0x00010000
+MMIO_GLOBALPROC = 0x10000000
+MMIO_REMOVEPROC = 0x00020000
+MMIO_UNICODEPROC = 0x01000000
+MMIO_FINDPROC = 0x00040000
+MMIO_FINDCHUNK = 0x0010
+MMIO_FINDRIFF = 0x0020
+MMIO_FINDLIST = 0x0040
+MMIO_CREATERIFF = 0x0020
+MMIO_CREATELIST = 0x0040
+MMIOM_READ = MMIO_READ
+MMIOM_WRITE = MMIO_WRITE
+MMIOM_SEEK = 2
+MMIOM_OPEN = 3
+MMIOM_CLOSE = 4
+MMIOM_WRITEFLUSH = 5
+MMIOM_RENAME = 6
+MMIOM_USER = 0x8000
+SEEK_SET = 0
+SEEK_CUR = 1
+SEEK_END = 2
+MMIO_DEFAULTBUFFER = 8192
+MCIERR_INVALID_DEVICE_ID = (MCIERR_BASE + 1)
+MCIERR_UNRECOGNIZED_KEYWORD = (MCIERR_BASE + 3)
+MCIERR_UNRECOGNIZED_COMMAND = (MCIERR_BASE + 5)
+MCIERR_HARDWARE = (MCIERR_BASE + 6)
+MCIERR_INVALID_DEVICE_NAME = (MCIERR_BASE + 7)
+MCIERR_OUT_OF_MEMORY = (MCIERR_BASE + 8)
+MCIERR_DEVICE_OPEN = (MCIERR_BASE + 9)
+MCIERR_CANNOT_LOAD_DRIVER = (MCIERR_BASE + 10)
+MCIERR_MISSING_COMMAND_STRING = (MCIERR_BASE + 11)
+MCIERR_PARAM_OVERFLOW = (MCIERR_BASE + 12)
+MCIERR_MISSING_STRING_ARGUMENT = (MCIERR_BASE + 13)
+MCIERR_BAD_INTEGER = (MCIERR_BASE + 14)
+MCIERR_PARSER_INTERNAL = (MCIERR_BASE + 15)
+MCIERR_DRIVER_INTERNAL = (MCIERR_BASE + 16)
+MCIERR_MISSING_PARAMETER = (MCIERR_BASE + 17)
+MCIERR_UNSUPPORTED_FUNCTION = (MCIERR_BASE + 18)
+MCIERR_FILE_NOT_FOUND = (MCIERR_BASE + 19)
+MCIERR_DEVICE_NOT_READY = (MCIERR_BASE + 20)
+MCIERR_INTERNAL = (MCIERR_BASE + 21)
+MCIERR_DRIVER = (MCIERR_BASE + 22)
+MCIERR_CANNOT_USE_ALL = (MCIERR_BASE + 23)
+MCIERR_MULTIPLE = (MCIERR_BASE + 24)
+MCIERR_EXTENSION_NOT_FOUND = (MCIERR_BASE + 25)
+MCIERR_OUTOFRANGE = (MCIERR_BASE + 26)
+MCIERR_FLAGS_NOT_COMPATIBLE = (MCIERR_BASE + 28)
+MCIERR_FILE_NOT_SAVED = (MCIERR_BASE + 30)
+MCIERR_DEVICE_TYPE_REQUIRED = (MCIERR_BASE + 31)
+MCIERR_DEVICE_LOCKED = (MCIERR_BASE + 32)
+MCIERR_DUPLICATE_ALIAS = (MCIERR_BASE + 33)
+MCIERR_BAD_CONSTANT = (MCIERR_BASE + 34)
+MCIERR_MUST_USE_SHAREABLE = (MCIERR_BASE + 35)
+MCIERR_MISSING_DEVICE_NAME = (MCIERR_BASE + 36)
+MCIERR_BAD_TIME_FORMAT = (MCIERR_BASE + 37)
+MCIERR_NO_CLOSING_QUOTE = (MCIERR_BASE + 38)
+MCIERR_DUPLICATE_FLAGS = (MCIERR_BASE + 39)
+MCIERR_INVALID_FILE = (MCIERR_BASE + 40)
+MCIERR_NULL_PARAMETER_BLOCK = (MCIERR_BASE + 41)
+MCIERR_UNNAMED_RESOURCE = (MCIERR_BASE + 42)
+MCIERR_NEW_REQUIRES_ALIAS = (MCIERR_BASE + 43)
+MCIERR_NOTIFY_ON_AUTO_OPEN = (MCIERR_BASE + 44)
+MCIERR_NO_ELEMENT_ALLOWED = (MCIERR_BASE + 45)
+MCIERR_NONAPPLICABLE_FUNCTION = (MCIERR_BASE + 46)
+MCIERR_ILLEGAL_FOR_AUTO_OPEN = (MCIERR_BASE + 47)
+MCIERR_FILENAME_REQUIRED = (MCIERR_BASE + 48)
+MCIERR_EXTRA_CHARACTERS = (MCIERR_BASE + 49)
+MCIERR_DEVICE_NOT_INSTALLED = (MCIERR_BASE + 50)
+MCIERR_GET_CD = (MCIERR_BASE + 51)
+MCIERR_SET_CD = (MCIERR_BASE + 52)
+MCIERR_SET_DRIVE = (MCIERR_BASE + 53)
+MCIERR_DEVICE_LENGTH = (MCIERR_BASE + 54)
+MCIERR_DEVICE_ORD_LENGTH = (MCIERR_BASE + 55)
+MCIERR_NO_INTEGER = (MCIERR_BASE + 56)
+MCIERR_WAVE_OUTPUTSINUSE = (MCIERR_BASE + 64)
+MCIERR_WAVE_SETOUTPUTINUSE = (MCIERR_BASE + 65)
+MCIERR_WAVE_INPUTSINUSE = (MCIERR_BASE + 66)
+MCIERR_WAVE_SETINPUTINUSE = (MCIERR_BASE + 67)
+MCIERR_WAVE_OUTPUTUNSPECIFIED = (MCIERR_BASE + 68)
+MCIERR_WAVE_INPUTUNSPECIFIED = (MCIERR_BASE + 69)
+MCIERR_WAVE_OUTPUTSUNSUITABLE = (MCIERR_BASE + 70)
+MCIERR_WAVE_SETOUTPUTUNSUITABLE = (MCIERR_BASE + 71)
+MCIERR_WAVE_INPUTSUNSUITABLE = (MCIERR_BASE + 72)
+MCIERR_WAVE_SETINPUTUNSUITABLE = (MCIERR_BASE + 73)
+MCIERR_SEQ_DIV_INCOMPATIBLE = (MCIERR_BASE + 80)
+MCIERR_SEQ_PORT_INUSE = (MCIERR_BASE + 81)
+MCIERR_SEQ_PORT_NONEXISTENT = (MCIERR_BASE + 82)
+MCIERR_SEQ_PORT_MAPNODEVICE = (MCIERR_BASE + 83)
+MCIERR_SEQ_PORT_MISCERROR = (MCIERR_BASE + 84)
+MCIERR_SEQ_TIMER = (MCIERR_BASE + 85)
+MCIERR_SEQ_PORTUNSPECIFIED = (MCIERR_BASE + 86)
+MCIERR_SEQ_NOMIDIPRESENT = (MCIERR_BASE + 87)
+MCIERR_NO_WINDOW = (MCIERR_BASE + 90)
+MCIERR_CREATEWINDOW = (MCIERR_BASE + 91)
+MCIERR_FILE_READ = (MCIERR_BASE + 92)
+MCIERR_FILE_WRITE = (MCIERR_BASE + 93)
+MCIERR_NO_IDENTITY = (MCIERR_BASE + 94)
+MCIERR_CUSTOM_DRIVER_BASE = (MCIERR_BASE + 256)
+MCI_FIRST = DRV_MCI_FIRST
+MCI_OPEN = 0x0803
+MCI_CLOSE = 0x0804
+MCI_ESCAPE = 0x0805
+MCI_PLAY = 0x0806
+MCI_SEEK = 0x0807
+MCI_STOP = 0x0808
+MCI_PAUSE = 0x0809
+MCI_INFO = 0x080A
+MCI_GETDEVCAPS = 0x080B
+MCI_SPIN = 0x080C
+MCI_SET = 0x080D
+MCI_STEP = 0x080E
+MCI_RECORD = 0x080F
+MCI_SYSINFO = 0x0810
+MCI_BREAK = 0x0811
+MCI_SAVE = 0x0813
+MCI_STATUS = 0x0814
+MCI_CUE = 0x0830
+MCI_REALIZE = 0x0840
+MCI_WINDOW = 0x0841
+MCI_PUT = 0x0842
+MCI_WHERE = 0x0843
+MCI_FREEZE = 0x0844
+MCI_UNFREEZE = 0x0845
+MCI_LOAD = 0x0850
+MCI_CUT = 0x0851
+MCI_COPY = 0x0852
+MCI_PASTE = 0x0853
+MCI_UPDATE = 0x0854
+MCI_RESUME = 0x0855
+MCI_DELETE = 0x0856
+MCI_USER_MESSAGES = (DRV_MCI_FIRST + 0x400)
+MCI_LAST = 0x0FFF
+MCI_DEVTYPE_VCR = 513
+MCI_DEVTYPE_VIDEODISC = 514
+MCI_DEVTYPE_OVERLAY = 515
+MCI_DEVTYPE_CD_AUDIO = 516
+MCI_DEVTYPE_DAT = 517
+MCI_DEVTYPE_SCANNER = 518
+MCI_DEVTYPE_ANIMATION = 519
+MCI_DEVTYPE_DIGITAL_VIDEO = 520
+MCI_DEVTYPE_OTHER = 521
+MCI_DEVTYPE_WAVEFORM_AUDIO = 522
+MCI_DEVTYPE_SEQUENCER = 523
+MCI_DEVTYPE_FIRST = MCI_DEVTYPE_VCR
+MCI_DEVTYPE_LAST = MCI_DEVTYPE_SEQUENCER
+MCI_DEVTYPE_FIRST_USER = 0x1000
+MCI_MODE_NOT_READY = (MCI_STRING_OFFSET + 12)
+MCI_MODE_STOP = (MCI_STRING_OFFSET + 13)
+MCI_MODE_PLAY = (MCI_STRING_OFFSET + 14)
+MCI_MODE_RECORD = (MCI_STRING_OFFSET + 15)
+MCI_MODE_SEEK = (MCI_STRING_OFFSET + 16)
+MCI_MODE_PAUSE = (MCI_STRING_OFFSET + 17)
+MCI_MODE_OPEN = (MCI_STRING_OFFSET + 18)
+MCI_FORMAT_MILLISECONDS = 0
+MCI_FORMAT_HMS = 1
+MCI_FORMAT_MSF = 2
+MCI_FORMAT_FRAMES = 3
+MCI_FORMAT_SMPTE_24 = 4
+MCI_FORMAT_SMPTE_25 = 5
+MCI_FORMAT_SMPTE_30 = 6
+MCI_FORMAT_SMPTE_30DROP = 7
+MCI_FORMAT_BYTES = 8
+MCI_FORMAT_SAMPLES = 9
+MCI_FORMAT_TMSF = 10
+def MCI_MSF_MINUTE(msf): return ((BYTE)(msf))
+
+def MCI_MSF_SECOND(msf): return ((BYTE)(((WORD)(msf)) >> 8))
+
+def MCI_MSF_FRAME(msf): return ((BYTE)((msf)>>16))
+
+def MCI_TMSF_TRACK(tmsf): return ((BYTE)(tmsf))
+
+def MCI_TMSF_MINUTE(tmsf): return ((BYTE)(((WORD)(tmsf)) >> 8))
+
+def MCI_TMSF_SECOND(tmsf): return ((BYTE)((tmsf)>>16))
+
+def MCI_TMSF_FRAME(tmsf): return ((BYTE)((tmsf)>>24))
+
+def MCI_HMS_HOUR(hms): return ((BYTE)(hms))
+
+def MCI_HMS_MINUTE(hms): return ((BYTE)(((WORD)(hms)) >> 8))
+
+def MCI_HMS_SECOND(hms): return ((BYTE)((hms)>>16))
+
+MCI_NOTIFY_SUCCESSFUL = 0x0001
+MCI_NOTIFY_SUPERSEDED = 0x0002
+MCI_NOTIFY_ABORTED = 0x0004
+MCI_NOTIFY_FAILURE = 0x0008
+MCI_NOTIFY = 0x00000001L
+MCI_WAIT = 0x00000002L
+MCI_FROM = 0x00000004L
+MCI_TO = 0x00000008L
+MCI_TRACK = 0x00000010L
+MCI_OPEN_SHAREABLE = 0x00000100L
+MCI_OPEN_ELEMENT = 0x00000200L
+MCI_OPEN_ALIAS = 0x00000400L
+MCI_OPEN_ELEMENT_ID = 0x00000800L
+MCI_OPEN_TYPE_ID = 0x00001000L
+MCI_OPEN_TYPE = 0x00002000L
+MCI_SEEK_TO_START = 0x00000100L
+MCI_SEEK_TO_END = 0x00000200L
+MCI_STATUS_ITEM = 0x00000100L
+MCI_STATUS_START = 0x00000200L
+MCI_STATUS_LENGTH = 0x00000001L
+MCI_STATUS_POSITION = 0x00000002L
+MCI_STATUS_NUMBER_OF_TRACKS = 0x00000003L
+MCI_STATUS_MODE = 0x00000004L
+MCI_STATUS_MEDIA_PRESENT = 0x00000005L
+MCI_STATUS_TIME_FORMAT = 0x00000006L
+MCI_STATUS_READY = 0x00000007L
+MCI_STATUS_CURRENT_TRACK = 0x00000008L
+MCI_INFO_PRODUCT = 0x00000100L
+MCI_INFO_FILE = 0x00000200L
+MCI_INFO_MEDIA_UPC = 0x00000400L
+MCI_INFO_MEDIA_IDENTITY = 0x00000800L
+MCI_INFO_NAME = 0x00001000L
+MCI_INFO_COPYRIGHT = 0x00002000L
+MCI_GETDEVCAPS_ITEM = 0x00000100L
+MCI_GETDEVCAPS_CAN_RECORD = 0x00000001L
+MCI_GETDEVCAPS_HAS_AUDIO = 0x00000002L
+MCI_GETDEVCAPS_HAS_VIDEO = 0x00000003L
+MCI_GETDEVCAPS_DEVICE_TYPE = 0x00000004L
+MCI_GETDEVCAPS_USES_FILES = 0x00000005L
+MCI_GETDEVCAPS_COMPOUND_DEVICE = 0x00000006L
+MCI_GETDEVCAPS_CAN_EJECT = 0x00000007L
+MCI_GETDEVCAPS_CAN_PLAY = 0x00000008L
+MCI_GETDEVCAPS_CAN_SAVE = 0x00000009L
+MCI_SYSINFO_QUANTITY = 0x00000100L
+MCI_SYSINFO_OPEN = 0x00000200L
+MCI_SYSINFO_NAME = 0x00000400L
+MCI_SYSINFO_INSTALLNAME = 0x00000800L
+MCI_SET_DOOR_OPEN = 0x00000100L
+MCI_SET_DOOR_CLOSED = 0x00000200L
+MCI_SET_TIME_FORMAT = 0x00000400L
+MCI_SET_AUDIO = 0x00000800L
+MCI_SET_VIDEO = 0x00001000L
+MCI_SET_ON = 0x00002000L
+MCI_SET_OFF = 0x00004000L
+MCI_SET_AUDIO_ALL = 0x00000000L
+MCI_SET_AUDIO_LEFT = 0x00000001L
+MCI_SET_AUDIO_RIGHT = 0x00000002L
+MCI_BREAK_KEY = 0x00000100L
+MCI_BREAK_HWND = 0x00000200L
+MCI_BREAK_OFF = 0x00000400L
+MCI_RECORD_INSERT = 0x00000100L
+MCI_RECORD_OVERWRITE = 0x00000200L
+MCI_SAVE_FILE = 0x00000100L
+MCI_LOAD_FILE = 0x00000100L
+MCI_VD_MODE_PARK = (MCI_VD_OFFSET + 1)
+MCI_VD_MEDIA_CLV = (MCI_VD_OFFSET + 2)
+MCI_VD_MEDIA_CAV = (MCI_VD_OFFSET + 3)
+MCI_VD_MEDIA_OTHER = (MCI_VD_OFFSET + 4)
+MCI_VD_FORMAT_TRACK = 0x4001
+MCI_VD_PLAY_REVERSE = 0x00010000L
+MCI_VD_PLAY_FAST = 0x00020000L
+MCI_VD_PLAY_SPEED = 0x00040000L
+MCI_VD_PLAY_SCAN = 0x00080000L
+MCI_VD_PLAY_SLOW = 0x00100000L
+MCI_VD_SEEK_REVERSE = 0x00010000L
+MCI_VD_STATUS_SPEED = 0x00004002L
+MCI_VD_STATUS_FORWARD = 0x00004003L
+MCI_VD_STATUS_MEDIA_TYPE = 0x00004004L
+MCI_VD_STATUS_SIDE = 0x00004005L
+MCI_VD_STATUS_DISC_SIZE = 0x00004006L
+MCI_VD_GETDEVCAPS_CLV = 0x00010000L
+MCI_VD_GETDEVCAPS_CAV = 0x00020000L
+MCI_VD_SPIN_UP = 0x00010000L
+MCI_VD_SPIN_DOWN = 0x00020000L
+MCI_VD_GETDEVCAPS_CAN_REVERSE = 0x00004002L
+MCI_VD_GETDEVCAPS_FAST_RATE = 0x00004003L
+MCI_VD_GETDEVCAPS_SLOW_RATE = 0x00004004L
+MCI_VD_GETDEVCAPS_NORMAL_RATE = 0x00004005L
+MCI_VD_STEP_FRAMES = 0x00010000L
+MCI_VD_STEP_REVERSE = 0x00020000L
+MCI_VD_ESCAPE_STRING = 0x00000100L
+MCI_CDA_STATUS_TYPE_TRACK = 0x00004001L
+MCI_CDA_TRACK_AUDIO = (MCI_CD_OFFSET + 0)
+MCI_CDA_TRACK_OTHER = (MCI_CD_OFFSET + 1)
+MCI_WAVE_PCM = (MCI_WAVE_OFFSET + 0)
+MCI_WAVE_MAPPER = (MCI_WAVE_OFFSET + 1)
+MCI_WAVE_OPEN_BUFFER = 0x00010000L
+MCI_WAVE_SET_FORMATTAG = 0x00010000L
+MCI_WAVE_SET_CHANNELS = 0x00020000L
+MCI_WAVE_SET_SAMPLESPERSEC = 0x00040000L
+MCI_WAVE_SET_AVGBYTESPERSEC = 0x00080000L
+MCI_WAVE_SET_BLOCKALIGN = 0x00100000L
+MCI_WAVE_SET_BITSPERSAMPLE = 0x00200000L
+MCI_WAVE_INPUT = 0x00400000L
+MCI_WAVE_OUTPUT = 0x00800000L
+MCI_WAVE_STATUS_FORMATTAG = 0x00004001L
+MCI_WAVE_STATUS_CHANNELS = 0x00004002L
+MCI_WAVE_STATUS_SAMPLESPERSEC = 0x00004003L
+MCI_WAVE_STATUS_AVGBYTESPERSEC = 0x00004004L
+MCI_WAVE_STATUS_BLOCKALIGN = 0x00004005L
+MCI_WAVE_STATUS_BITSPERSAMPLE = 0x00004006L
+MCI_WAVE_STATUS_LEVEL = 0x00004007L
+MCI_WAVE_SET_ANYINPUT = 0x04000000L
+MCI_WAVE_SET_ANYOUTPUT = 0x08000000L
+MCI_WAVE_GETDEVCAPS_INPUTS = 0x00004001L
+MCI_WAVE_GETDEVCAPS_OUTPUTS = 0x00004002L
+MCI_SEQ_DIV_PPQN = (0 + MCI_SEQ_OFFSET)
+MCI_SEQ_DIV_SMPTE_24 = (1 + MCI_SEQ_OFFSET)
+MCI_SEQ_DIV_SMPTE_25 = (2 + MCI_SEQ_OFFSET)
+MCI_SEQ_DIV_SMPTE_30DROP = (3 + MCI_SEQ_OFFSET)
+MCI_SEQ_DIV_SMPTE_30 = (4 + MCI_SEQ_OFFSET)
+MCI_SEQ_FORMAT_SONGPTR = 0x4001
+MCI_SEQ_FILE = 0x4002
+MCI_SEQ_MIDI = 0x4003
+MCI_SEQ_SMPTE = 0x4004
+MCI_SEQ_NONE = 65533
+MCI_SEQ_MAPPER = 65535
+MCI_SEQ_STATUS_TEMPO = 0x00004002L
+MCI_SEQ_STATUS_PORT = 0x00004003L
+MCI_SEQ_STATUS_SLAVE = 0x00004007L
+MCI_SEQ_STATUS_MASTER = 0x00004008L
+MCI_SEQ_STATUS_OFFSET = 0x00004009L
+MCI_SEQ_STATUS_DIVTYPE = 0x0000400AL
+MCI_SEQ_STATUS_NAME = 0x0000400BL
+MCI_SEQ_STATUS_COPYRIGHT = 0x0000400CL
+MCI_SEQ_SET_TEMPO = 0x00010000L
+MCI_SEQ_SET_PORT = 0x00020000L
+MCI_SEQ_SET_SLAVE = 0x00040000L
+MCI_SEQ_SET_MASTER = 0x00080000L
+MCI_SEQ_SET_OFFSET = 0x01000000L
+MCI_ANIM_OPEN_WS = 0x00010000L
+MCI_ANIM_OPEN_PARENT = 0x00020000L
+MCI_ANIM_OPEN_NOSTATIC = 0x00040000L
+MCI_ANIM_PLAY_SPEED = 0x00010000L
+MCI_ANIM_PLAY_REVERSE = 0x00020000L
+MCI_ANIM_PLAY_FAST = 0x00040000L
+MCI_ANIM_PLAY_SLOW = 0x00080000L
+MCI_ANIM_PLAY_SCAN = 0x00100000L
+MCI_ANIM_STEP_REVERSE = 0x00010000L
+MCI_ANIM_STEP_FRAMES = 0x00020000L
+MCI_ANIM_STATUS_SPEED = 0x00004001L
+MCI_ANIM_STATUS_FORWARD = 0x00004002L
+MCI_ANIM_STATUS_HWND = 0x00004003L
+MCI_ANIM_STATUS_HPAL = 0x00004004L
+MCI_ANIM_STATUS_STRETCH = 0x00004005L
+MCI_ANIM_INFO_TEXT = 0x00010000L
+MCI_ANIM_GETDEVCAPS_CAN_REVERSE = 0x00004001L
+MCI_ANIM_GETDEVCAPS_FAST_RATE = 0x00004002L
+MCI_ANIM_GETDEVCAPS_SLOW_RATE = 0x00004003L
+MCI_ANIM_GETDEVCAPS_NORMAL_RATE = 0x00004004L
+MCI_ANIM_GETDEVCAPS_PALETTES = 0x00004006L
+MCI_ANIM_GETDEVCAPS_CAN_STRETCH = 0x00004007L
+MCI_ANIM_GETDEVCAPS_MAX_WINDOWS = 0x00004008L
+MCI_ANIM_REALIZE_NORM = 0x00010000L
+MCI_ANIM_REALIZE_BKGD = 0x00020000L
+MCI_ANIM_WINDOW_HWND = 0x00010000L
+MCI_ANIM_WINDOW_STATE = 0x00040000L
+MCI_ANIM_WINDOW_TEXT = 0x00080000L
+MCI_ANIM_WINDOW_ENABLE_STRETCH = 0x00100000L
+MCI_ANIM_WINDOW_DISABLE_STRETCH = 0x00200000L
+MCI_ANIM_WINDOW_DEFAULT = 0x00000000L
+MCI_ANIM_RECT = 0x00010000L
+MCI_ANIM_PUT_SOURCE = 0x00020000L
+MCI_ANIM_PUT_DESTINATION = 0x00040000L
+MCI_ANIM_WHERE_SOURCE = 0x00020000L
+MCI_ANIM_WHERE_DESTINATION = 0x00040000L
+MCI_ANIM_UPDATE_HDC = 0x00020000L
+MCI_OVLY_OPEN_WS = 0x00010000L
+MCI_OVLY_OPEN_PARENT = 0x00020000L
+MCI_OVLY_STATUS_HWND = 0x00004001L
+MCI_OVLY_STATUS_STRETCH = 0x00004002L
+MCI_OVLY_INFO_TEXT = 0x00010000L
+MCI_OVLY_GETDEVCAPS_CAN_STRETCH = 0x00004001L
+MCI_OVLY_GETDEVCAPS_CAN_FREEZE = 0x00004002L
+MCI_OVLY_GETDEVCAPS_MAX_WINDOWS = 0x00004003L
+MCI_OVLY_WINDOW_HWND = 0x00010000L
+MCI_OVLY_WINDOW_STATE = 0x00040000L
+MCI_OVLY_WINDOW_TEXT = 0x00080000L
+MCI_OVLY_WINDOW_ENABLE_STRETCH = 0x00100000L
+MCI_OVLY_WINDOW_DISABLE_STRETCH = 0x00200000L
+MCI_OVLY_WINDOW_DEFAULT = 0x00000000L
+MCI_OVLY_RECT = 0x00010000L
+MCI_OVLY_PUT_SOURCE = 0x00020000L
+MCI_OVLY_PUT_DESTINATION = 0x00040000L
+MCI_OVLY_PUT_FRAME = 0x00080000L
+MCI_OVLY_PUT_VIDEO = 0x00100000L
+MCI_OVLY_WHERE_SOURCE = 0x00020000L
+MCI_OVLY_WHERE_DESTINATION = 0x00040000L
+MCI_OVLY_WHERE_FRAME = 0x00080000L
+MCI_OVLY_WHERE_VIDEO = 0x00100000L
+SELECTDIB = 41
+def DIBINDEX(n): return MAKELONG((n),0x10FF)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/netbios.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/netbios.py
new file mode 100644
index 0000000..e8b698d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/netbios.py
@@ -0,0 +1,286 @@
+import win32wnet
+import struct
+import string
+
+# Constants generated by h2py from nb30.h
+NCBNAMSZ = 16
+MAX_LANA = 254
+NAME_FLAGS_MASK = 0x87
+GROUP_NAME = 0x80
+UNIQUE_NAME = 0x00
+REGISTERING = 0x00
+REGISTERED = 0x04
+DEREGISTERED = 0x05
+DUPLICATE = 0x06
+DUPLICATE_DEREG = 0x07
+LISTEN_OUTSTANDING = 0x01
+CALL_PENDING = 0x02
+SESSION_ESTABLISHED = 0x03
+HANGUP_PENDING = 0x04
+HANGUP_COMPLETE = 0x05
+SESSION_ABORTED = 0x06
+ALL_TRANSPORTS = "M\0\0\0"
+MS_NBF = "MNBF"
+NCBCALL = 0x10
+NCBLISTEN = 0x11
+NCBHANGUP = 0x12
+NCBSEND = 0x14
+NCBRECV = 0x15
+NCBRECVANY = 0x16
+NCBCHAINSEND = 0x17
+NCBDGSEND = 0x20
+NCBDGRECV = 0x21
+NCBDGSENDBC = 0x22
+NCBDGRECVBC = 0x23
+NCBADDNAME = 0x30
+NCBDELNAME = 0x31
+NCBRESET = 0x32
+NCBASTAT = 0x33
+NCBSSTAT = 0x34
+NCBCANCEL = 0x35
+NCBADDGRNAME = 0x36
+NCBENUM = 0x37
+NCBUNLINK = 0x70
+NCBSENDNA = 0x71
+NCBCHAINSENDNA = 0x72
+NCBLANSTALERT = 0x73
+NCBACTION = 0x77
+NCBFINDNAME = 0x78
+NCBTRACE = 0x79
+ASYNCH = 0x80
+NRC_GOODRET = 0x00
+NRC_BUFLEN = 0x01
+NRC_ILLCMD = 0x03
+NRC_CMDTMO = 0x05
+NRC_INCOMP = 0x06
+NRC_BADDR = 0x07
+NRC_SNUMOUT = 0x08
+NRC_NORES = 0x09
+NRC_SCLOSED = 0x0a
+NRC_CMDCAN = 0x0b
+NRC_DUPNAME = 0x0d
+NRC_NAMTFUL = 0x0e
+NRC_ACTSES = 0x0f
+NRC_LOCTFUL = 0x11
+NRC_REMTFUL = 0x12
+NRC_ILLNN = 0x13
+NRC_NOCALL = 0x14
+NRC_NOWILD = 0x15
+NRC_INUSE = 0x16
+NRC_NAMERR = 0x17
+NRC_SABORT = 0x18
+NRC_NAMCONF = 0x19
+NRC_IFBUSY = 0x21
+NRC_TOOMANY = 0x22
+NRC_BRIDGE = 0x23
+NRC_CANOCCR = 0x24
+NRC_CANCEL = 0x26
+NRC_DUPENV = 0x30
+NRC_ENVNOTDEF = 0x34
+NRC_OSRESNOTAV = 0x35
+NRC_MAXAPPS = 0x36
+NRC_NOSAPS = 0x37
+NRC_NORESOURCES = 0x38
+NRC_INVADDRESS = 0x39
+NRC_INVDDID = 0x3B
+NRC_LOCKFAIL = 0x3C
+NRC_OPENERR = 0x3f
+NRC_SYSTEM = 0x40
+NRC_PENDING = 0xff
+
+
+UCHAR = "B"
+WORD = "H"
+DWORD = "I"
+USHORT = "H"
+ULONG = "I"
+
+ADAPTER_STATUS_ITEMS = [
+    ("6s",        "adapter_address"),
+    (UCHAR,   "rev_major"),
+    (UCHAR,   "reserved0"),
+    (UCHAR,   "adapter_type"),
+    (UCHAR,   "rev_minor"),
+    (WORD,    "duration"),
+    (WORD,    "frmr_recv"),
+    (WORD,    "frmr_xmit"),
+
+    (WORD,    "iframe_recv_err"),
+
+    (WORD,    "xmit_aborts"),
+    (DWORD,   "xmit_success"),
+    (DWORD,   "recv_success"),
+
+    (WORD,    "iframe_xmit_err"),
+
+    (WORD,    "recv_buff_unavail"),
+    (WORD,    "t1_timeouts"),
+    (WORD,    "ti_timeouts"),
+    (DWORD,   "reserved1"),
+    (WORD,    "free_ncbs"),
+    (WORD,    "max_cfg_ncbs"),
+    (WORD,    "max_ncbs"),
+    (WORD,    "xmit_buf_unavail"),
+    (WORD,    "max_dgram_size"),
+    (WORD,    "pending_sess"),
+    (WORD,    "max_cfg_sess"),
+    (WORD,    "max_sess"),
+    (WORD,    "max_sess_pkt_size"),
+    (WORD,    "name_count"),
+]
+
+NAME_BUFFER_ITEMS = [
+    (str(NCBNAMSZ) + "s", "name"),
+    (UCHAR,   "name_num"),
+    (UCHAR,   "name_flags"),
+]
+
+SESSION_HEADER_ITEMS = [
+    (UCHAR,   "sess_name"),
+    (UCHAR,   "num_sess"),
+    (UCHAR,   "rcv_dg_outstanding"),
+    (UCHAR,   "rcv_any_outstanding"),
+]
+
+SESSION_BUFFER_ITEMS = [
+    (UCHAR,   "lsn"),
+    (UCHAR,   "state"),
+    (str(NCBNAMSZ)+"s",   "local_name"),
+    (str(NCBNAMSZ)+"s",   "remote_name"),
+    (UCHAR,   "rcvs_outstanding"),
+    (UCHAR,   "sends_outstanding"),
+]
+
+LANA_ENUM_ITEMS = [
+    ("B",   "length"),         # Number of valid entries in lana[]
+    (str(MAX_LANA+1) + "s", "lana"),
+]
+
+FIND_NAME_HEADER_ITEMS = [
+    (WORD,    "node_count"),
+    (UCHAR,   "reserved"),
+    (UCHAR,   "unique_group"),
+]
+
+FIND_NAME_BUFFER_ITEMS = [
+    (UCHAR,   "length"),
+    (UCHAR,   "access_control"),
+    (UCHAR,   "frame_control"),
+    ("6s",   "destination_addr"),
+    ("6s", "source_addr"), 
+    ("18s", "routing_info"),
+]
+
+ACTION_HEADER_ITEMS = [
+    (ULONG,   "transport_id"),
+    (USHORT,  "action_code"),
+    (USHORT,  "reserved"),
+]
+    
+del UCHAR, WORD, DWORD, USHORT, ULONG
+
+NCB = win32wnet.NCB
+def Netbios(ncb):
+    ob = ncb.Buffer
+    is_ours = hasattr(ob, "_pack")
+    if is_ours:
+        ob._pack()
+    try:
+        return win32wnet.Netbios(ncb)
+    finally:
+        if is_ours:
+            ob._unpack()
+        
+class NCBStruct:
+    def __init__(self, items):
+        self._format = string.join(map(lambda item: item[0], items), "")
+        self._items = items
+        self._buffer_ = win32wnet.NCBBuffer(struct.calcsize(self._format))
+
+        for format, name in self._items:
+            if len(format)==1:
+                if format == 'c':
+                    val = '\0'
+                else:
+                    val = 0
+            else:
+                l = int(format[:-1])
+                val = '\0' * l
+            self.__dict__[name] = val
+
+    def _pack(self):
+        vals = []
+        for format, name in self._items:
+            try:
+                vals.append(self.__dict__[name])
+            except KeyError:
+                vals.append(None)
+        
+        self._buffer_[:] = apply( struct.pack, (self._format,) + tuple(vals) )
+
+    def _unpack(self):
+        items = struct.unpack(self._format, self._buffer_)
+        assert len(items)==len(self._items), "unexpected number of items to unpack!"
+        for (format, name), val in map(None, self._items, items):
+            self.__dict__[name] = val
+
+    def __setattr__(self, attr, val):
+        if not self.__dict__.has_key(attr) and attr[0]!='_':
+            for format, attr_name in self._items:
+                if attr==attr_name:
+                    break
+            else:
+                raise AttributeError, attr
+        self.__dict__[attr] = val
+
+def ADAPTER_STATUS():
+    return NCBStruct(ADAPTER_STATUS_ITEMS)
+
+def NAME_BUFFER():
+    return NCBStruct(NAME_BUFFER_ITEMS)
+
+def SESSION_HEADER():
+    return NCBStruct(SESSION_HEADER_ITEMS)
+
+def SESSION_BUFFER():
+    return NCBStruct(SESSION_BUFFER_ITEMS)
+
+def LANA_ENUM():
+    return NCBStruct(LANA_ENUM_ITEMS)
+
+def FIND_NAME_HEADER():
+    return NCBStruct(FIND_NAME_HEADER_ITEMS)
+
+def FIND_NAME_BUFFER():
+    return NCBStruct(FIND_NAME_BUFFER_ITEMS)
+
+def ACTION_HEADER():
+    return NCBStruct(ACTION_HEADER_ITEMS)
+
+if __name__=='__main__':
+    # code ported from "HOWTO: Get the MAC Address for an Ethernet Adapter"
+    # MS KB ID: Q118623 
+    ncb = NCB()
+    ncb.Command = NCBENUM
+    la_enum = LANA_ENUM()
+    ncb.Buffer = la_enum
+    rc = Netbios(ncb)
+    if rc != 0: raise RuntimeError, "Unexpected result %d" % (rc,)
+    for i in range(la_enum.length):
+        ncb.Reset()
+        ncb.Command = NCBRESET
+        ncb.Lana_num = ord(la_enum.lana[i])
+        rc = Netbios(ncb)
+        if rc != 0: raise RuntimeError, "Unexpected result %d" % (rc,)
+        ncb.Reset()
+        ncb.Command = NCBASTAT
+        ncb.Lana_num = ord(la_enum.lana[i])
+        ncb.Callname = "*               "
+        adapter = ADAPTER_STATUS()
+        ncb.Buffer = adapter
+        Netbios(ncb)
+        print "Adapter address:",
+        for ch in adapter.adapter_address:
+            print "%02x" % (ord(ch),) ,
+        print
+    
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/ntsecuritycon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/ntsecuritycon.py
new file mode 100644
index 0000000..21f1f30
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/ntsecuritycon.py
@@ -0,0 +1,609 @@
+# Hacked from winnt.h
+
+DELETE = (65536)
+READ_CONTROL = (131072)
+WRITE_DAC = (262144)
+WRITE_OWNER = (524288)
+SYNCHRONIZE = (1048576)
+STANDARD_RIGHTS_REQUIRED = (983040)
+STANDARD_RIGHTS_READ = (READ_CONTROL)
+STANDARD_RIGHTS_WRITE = (READ_CONTROL)
+STANDARD_RIGHTS_EXECUTE = (READ_CONTROL)
+STANDARD_RIGHTS_ALL = (2031616)
+SPECIFIC_RIGHTS_ALL = (65535)
+ACCESS_SYSTEM_SECURITY = (16777216)
+MAXIMUM_ALLOWED = (33554432)
+GENERIC_READ = (-2147483648)
+GENERIC_WRITE = (1073741824)
+GENERIC_EXECUTE = (536870912)
+GENERIC_ALL = (268435456)
+
+# file security permissions
+FILE_READ_DATA=            ( 1 )
+FILE_LIST_DIRECTORY=       ( 1 )
+FILE_WRITE_DATA=           ( 2 )
+FILE_ADD_FILE=             ( 2 )
+FILE_APPEND_DATA=          ( 4 )
+FILE_ADD_SUBDIRECTORY=     ( 4 )
+FILE_CREATE_PIPE_INSTANCE= ( 4 )
+FILE_READ_EA=              ( 8 )
+FILE_WRITE_EA=             ( 16 )
+FILE_EXECUTE=              ( 32 )
+FILE_TRAVERSE=             ( 32 )
+FILE_DELETE_CHILD=         ( 64 )
+FILE_READ_ATTRIBUTES=      ( 128 )
+FILE_WRITE_ATTRIBUTES=     ( 256 )
+FILE_ALL_ACCESS=           (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 1023)
+FILE_GENERIC_READ=         (STANDARD_RIGHTS_READ | FILE_READ_DATA | FILE_READ_ATTRIBUTES | FILE_READ_EA | SYNCHRONIZE)
+FILE_GENERIC_WRITE=        (STANDARD_RIGHTS_WRITE | FILE_WRITE_DATA | FILE_WRITE_ATTRIBUTES | FILE_WRITE_EA | FILE_APPEND_DATA | SYNCHRONIZE)
+FILE_GENERIC_EXECUTE=      (STANDARD_RIGHTS_EXECUTE | FILE_READ_ATTRIBUTES | FILE_EXECUTE | SYNCHRONIZE)
+
+
+SECURITY_NULL_SID_AUTHORITY = (0,0,0,0,0,0)
+SECURITY_WORLD_SID_AUTHORITY = (0,0,0,0,0,1)
+SECURITY_LOCAL_SID_AUTHORITY = (0,0,0,0,0,2)
+SECURITY_CREATOR_SID_AUTHORITY = (0,0,0,0,0,3)
+SECURITY_NON_UNIQUE_AUTHORITY = (0,0,0,0,0,4)
+
+SECURITY_NULL_RID                 = 0
+SECURITY_WORLD_RID                = 0
+SECURITY_LOCAL_RID                = 0X00000000
+
+SECURITY_CREATOR_OWNER_RID        = 0
+SECURITY_CREATOR_GROUP_RID        = 1
+
+SECURITY_CREATOR_OWNER_SERVER_RID = 2
+SECURITY_CREATOR_GROUP_SERVER_RID = 3
+
+
+# NT well-known SIDs
+SECURITY_NT_AUTHORITY = (0,0,0,0,0,5)
+
+SECURITY_DIALUP_RID             = 1
+SECURITY_NETWORK_RID            = 2
+SECURITY_BATCH_RID              = 3
+SECURITY_INTERACTIVE_RID        = 4
+SECURITY_SERVICE_RID            = 6
+SECURITY_ANONYMOUS_LOGON_RID    = 7
+SECURITY_PROXY_RID              = 8
+SECURITY_SERVER_LOGON_RID       = 9
+
+SECURITY_LOGON_IDS_RID          = 5
+SECURITY_LOGON_IDS_RID_COUNT    = 3
+
+SECURITY_LOCAL_SYSTEM_RID       = 18
+
+SECURITY_NT_NON_UNIQUE          = 21
+
+SECURITY_BUILTIN_DOMAIN_RID     = 32
+
+# well-known domain relative sub-authority values (RIDs)...
+DOMAIN_USER_RID_ADMIN          = 500
+DOMAIN_USER_RID_GUEST          = 501
+
+
+
+# well-known groups ...
+
+DOMAIN_GROUP_RID_ADMINS        = 512
+DOMAIN_GROUP_RID_USERS         = 513
+DOMAIN_GROUP_RID_GUESTS        = 514
+
+
+
+
+# well-known aliases ...
+
+DOMAIN_ALIAS_RID_ADMINS        = 544
+DOMAIN_ALIAS_RID_USERS         = 545
+DOMAIN_ALIAS_RID_GUESTS        = 546
+DOMAIN_ALIAS_RID_POWER_USERS   = 547
+
+DOMAIN_ALIAS_RID_ACCOUNT_OPS   = 548
+DOMAIN_ALIAS_RID_SYSTEM_OPS    = 549
+DOMAIN_ALIAS_RID_PRINT_OPS     = 550
+DOMAIN_ALIAS_RID_BACKUP_OPS    = 551
+
+DOMAIN_ALIAS_RID_REPLICATOR    = 552
+
+SYSTEM_LUID                    = (999, 0)
+
+# Group attributes
+
+SE_GROUP_MANDATORY              = 1
+SE_GROUP_ENABLED_BY_DEFAULT     = 2
+SE_GROUP_ENABLED                = 4
+SE_GROUP_OWNER                  = 8
+SE_GROUP_LOGON_ID               = -1073741824
+
+
+# User attributes
+# (None yet defined.)
+
+ACCESS_ALLOWED_ACE_TYPE          = 0
+ACCESS_DENIED_ACE_TYPE           = 1
+SYSTEM_AUDIT_ACE_TYPE            = 2
+SYSTEM_ALARM_ACE_TYPE            = 3
+
+
+#  The following are the inherit flags that go into the AceFlags field
+#  of an Ace header.
+
+OBJECT_INHERIT_ACE               = 1
+CONTAINER_INHERIT_ACE            = 2
+NO_PROPAGATE_INHERIT_ACE         = 4
+INHERIT_ONLY_ACE                 = 8
+VALID_INHERIT_FLAGS              = 15
+
+
+SUCCESSFUL_ACCESS_ACE_FLAG       = 64
+FAILED_ACCESS_ACE_FLAG           = 128
+
+SE_OWNER_DEFAULTED               = 1
+SE_GROUP_DEFAULTED               = 2
+SE_DACL_PRESENT                  = 4
+SE_DACL_DEFAULTED                = 8
+SE_SACL_PRESENT                  = 16
+SE_SACL_DEFAULTED                = 32
+SE_SELF_RELATIVE                 = 32768
+
+
+SE_PRIVILEGE_ENABLED_BY_DEFAULT = 1
+SE_PRIVILEGE_ENABLED            = 2
+SE_PRIVILEGE_USED_FOR_ACCESS    = -2147483648
+
+PRIVILEGE_SET_ALL_NECESSARY    = 1
+
+#               NT Defined Privileges
+
+SE_CREATE_TOKEN_NAME              = "SeCreateTokenPrivilege"
+SE_ASSIGNPRIMARYTOKEN_NAME        = "SeAssignPrimaryTokenPrivilege"
+SE_LOCK_MEMORY_NAME               = "SeLockMemoryPrivilege"
+SE_INCREASE_QUOTA_NAME            = "SeIncreaseQuotaPrivilege"
+SE_UNSOLICITED_INPUT_NAME         = "SeUnsolicitedInputPrivilege"
+SE_MACHINE_ACCOUNT_NAME           = "SeMachineAccountPrivilege"
+SE_TCB_NAME                       = "SeTcbPrivilege"
+SE_SECURITY_NAME                  = "SeSecurityPrivilege"
+SE_TAKE_OWNERSHIP_NAME            = "SeTakeOwnershipPrivilege"
+SE_LOAD_DRIVER_NAME               = "SeLoadDriverPrivilege"
+SE_SYSTEM_PROFILE_NAME            = "SeSystemProfilePrivilege"
+SE_SYSTEMTIME_NAME                = "SeSystemtimePrivilege"
+SE_PROF_SINGLE_PROCESS_NAME       = "SeProfileSingleProcessPrivilege"
+SE_INC_BASE_PRIORITY_NAME         = "SeIncreaseBasePriorityPrivilege"
+SE_CREATE_PAGEFILE_NAME           = "SeCreatePagefilePrivilege"
+SE_CREATE_PERMANENT_NAME          = "SeCreatePermanentPrivilege"
+SE_BACKUP_NAME                    = "SeBackupPrivilege"
+SE_RESTORE_NAME                   = "SeRestorePrivilege"
+SE_SHUTDOWN_NAME                  = "SeShutdownPrivilege"
+SE_DEBUG_NAME                     = "SeDebugPrivilege"
+SE_AUDIT_NAME                     = "SeAuditPrivilege"
+SE_SYSTEM_ENVIRONMENT_NAME        = "SeSystemEnvironmentPrivilege"
+SE_CHANGE_NOTIFY_NAME             = "SeChangeNotifyPrivilege"
+SE_REMOTE_SHUTDOWN_NAME           = "SeRemoteShutdownPrivilege"
+
+
+# Enum SECURITY_IMPERSONATION_LEVEL:
+SecurityAnonymous = 0
+SecurityIdentification = 1
+SecurityImpersonation = 2
+SecurityDelegation = 3
+
+SECURITY_MAX_IMPERSONATION_LEVEL = SecurityDelegation
+
+DEFAULT_IMPERSONATION_LEVEL = SecurityImpersonation
+
+TOKEN_ASSIGN_PRIMARY    = 1
+TOKEN_DUPLICATE         = 2
+TOKEN_IMPERSONATE       = 4
+TOKEN_QUERY             = 8
+TOKEN_QUERY_SOURCE      = 16
+TOKEN_ADJUST_PRIVILEGES = 32
+TOKEN_ADJUST_GROUPS     = 64
+TOKEN_ADJUST_DEFAULT    = 128
+
+TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED  |\
+                          TOKEN_ASSIGN_PRIMARY      |\
+                          TOKEN_DUPLICATE           |\
+                          TOKEN_IMPERSONATE         |\
+                          TOKEN_QUERY               |\
+                          TOKEN_QUERY_SOURCE        |\
+                          TOKEN_ADJUST_PRIVILEGES   |\
+                          TOKEN_ADJUST_GROUPS       |\
+                          TOKEN_ADJUST_DEFAULT)
+
+
+TOKEN_READ       = (STANDARD_RIGHTS_READ      |\
+                          TOKEN_QUERY)
+
+
+TOKEN_WRITE      = (STANDARD_RIGHTS_WRITE     |\
+                          TOKEN_ADJUST_PRIVILEGES   |\
+                          TOKEN_ADJUST_GROUPS       |\
+                          TOKEN_ADJUST_DEFAULT)
+
+TOKEN_EXECUTE    = (STANDARD_RIGHTS_EXECUTE)
+
+SidTypeUser = 1
+SidTypeGroup = 2
+SidTypeDomain =3
+SidTypeAlias = 4
+SidTypeWellKnownGroup = 5
+SidTypeDeletedAccount = 6
+SidTypeInvalid = 7
+SidTypeUnknown = 8
+
+# Token types
+TokenPrimary = 1
+TokenImpersonation = 2
+
+TokenUser = 1
+TokenGroups = 2
+TokenPrivileges = 3
+TokenOwner = 4
+TokenPrimaryGroup = 5
+TokenDefaultDacl = 6
+TokenSource = 7
+TokenType = 8
+TokenImpersonationLevel = 9
+TokenStatistics = 10
+
+# DirectoryService related constants.
+# Generated by h2py from NtDsAPI.h
+DS_BEHAVIOR_WIN2000 = 0
+DS_BEHAVIOR_WIN2003_WITH_MIXED_DOMAINS = 1
+DS_BEHAVIOR_WIN2003 = 2
+DS_SYNCED_EVENT_NAME = "NTDSInitialSyncsCompleted"
+ACTRL_DS_OPEN = 0x00000000
+ACTRL_DS_CREATE_CHILD = 0x00000001
+ACTRL_DS_DELETE_CHILD = 0x00000002
+ACTRL_DS_LIST = 0x00000004
+ACTRL_DS_SELF = 0x00000008
+ACTRL_DS_READ_PROP = 0x00000010
+ACTRL_DS_WRITE_PROP = 0x00000020
+ACTRL_DS_DELETE_TREE = 0x00000040
+ACTRL_DS_LIST_OBJECT = 0x00000080
+ACTRL_DS_CONTROL_ACCESS = 0x00000100
+NTDSAPI_BIND_ALLOW_DELEGATION = (0x00000001)
+DS_REPSYNC_ASYNCHRONOUS_OPERATION = 0x00000001
+DS_REPSYNC_WRITEABLE = 0x00000002
+DS_REPSYNC_PERIODIC = 0x00000004
+DS_REPSYNC_INTERSITE_MESSAGING = 0x00000008
+DS_REPSYNC_ALL_SOURCES = 0x00000010
+DS_REPSYNC_FULL = 0x00000020
+DS_REPSYNC_URGENT = 0x00000040
+DS_REPSYNC_NO_DISCARD = 0x00000080
+DS_REPSYNC_FORCE = 0x00000100
+DS_REPSYNC_ADD_REFERENCE = 0x00000200
+DS_REPSYNC_NEVER_COMPLETED = 0x00000400
+DS_REPSYNC_TWO_WAY = 0x00000800
+DS_REPSYNC_NEVER_NOTIFY = 0x00001000
+DS_REPSYNC_INITIAL = 0x00002000
+DS_REPSYNC_USE_COMPRESSION = 0x00004000
+DS_REPSYNC_ABANDONED = 0x00008000
+DS_REPSYNC_INITIAL_IN_PROGRESS = 0x00010000
+DS_REPSYNC_PARTIAL_ATTRIBUTE_SET = 0x00020000
+DS_REPSYNC_REQUEUE = 0x00040000
+DS_REPSYNC_NOTIFICATION = 0x00080000
+DS_REPSYNC_ASYNCHRONOUS_REPLICA = 0x00100000
+DS_REPSYNC_CRITICAL = 0x00200000
+DS_REPSYNC_FULL_IN_PROGRESS = 0x00400000
+DS_REPSYNC_PREEMPTED = 0x00800000
+DS_REPADD_ASYNCHRONOUS_OPERATION = 0x00000001
+DS_REPADD_WRITEABLE = 0x00000002
+DS_REPADD_INITIAL = 0x00000004
+DS_REPADD_PERIODIC = 0x00000008
+DS_REPADD_INTERSITE_MESSAGING = 0x00000010
+DS_REPADD_ASYNCHRONOUS_REPLICA = 0x00000020
+DS_REPADD_DISABLE_NOTIFICATION = 0x00000040
+DS_REPADD_DISABLE_PERIODIC = 0x00000080
+DS_REPADD_USE_COMPRESSION = 0x00000100
+DS_REPADD_NEVER_NOTIFY = 0x00000200
+DS_REPADD_TWO_WAY = 0x00000400
+DS_REPADD_CRITICAL = 0x00000800
+DS_REPDEL_ASYNCHRONOUS_OPERATION = 0x00000001
+DS_REPDEL_WRITEABLE = 0x00000002
+DS_REPDEL_INTERSITE_MESSAGING = 0x00000004
+DS_REPDEL_IGNORE_ERRORS = 0x00000008
+DS_REPDEL_LOCAL_ONLY = 0x00000010
+DS_REPDEL_NO_SOURCE = 0x00000020
+DS_REPDEL_REF_OK = 0x00000040
+DS_REPMOD_ASYNCHRONOUS_OPERATION = 0x00000001
+DS_REPMOD_WRITEABLE = 0x00000002
+DS_REPMOD_UPDATE_FLAGS = 0x00000001
+DS_REPMOD_UPDATE_ADDRESS = 0x00000002
+DS_REPMOD_UPDATE_SCHEDULE = 0x00000004
+DS_REPMOD_UPDATE_RESULT = 0x00000008
+DS_REPMOD_UPDATE_TRANSPORT = 0x00000010
+DS_REPUPD_ASYNCHRONOUS_OPERATION = 0x00000001
+DS_REPUPD_WRITEABLE = 0x00000002
+DS_REPUPD_ADD_REFERENCE = 0x00000004
+DS_REPUPD_DELETE_REFERENCE = 0x00000008
+DS_INSTANCETYPE_IS_NC_HEAD = 0x00000001
+DS_INSTANCETYPE_NC_IS_WRITEABLE = 0x00000004
+DS_INSTANCETYPE_NC_COMING = 0x00000010
+DS_INSTANCETYPE_NC_GOING = 0x00000020
+NTDSDSA_OPT_IS_GC = ( 1 << 0 )
+NTDSDSA_OPT_DISABLE_INBOUND_REPL = ( 1 << 1 )
+NTDSDSA_OPT_DISABLE_OUTBOUND_REPL = ( 1 << 2 )
+NTDSDSA_OPT_DISABLE_NTDSCONN_XLATE = ( 1 << 3 )
+NTDSCONN_OPT_IS_GENERATED = ( 1 << 0 )
+NTDSCONN_OPT_TWOWAY_SYNC = ( 1 << 1 )
+NTDSCONN_OPT_OVERRIDE_NOTIFY_DEFAULT = (1 << 2 )
+NTDSCONN_OPT_USE_NOTIFY = (1 << 3)
+NTDSCONN_OPT_DISABLE_INTERSITE_COMPRESSION = (1 << 4)
+NTDSCONN_OPT_USER_OWNED_SCHEDULE = (1 << 5)
+NTDSCONN_KCC_NO_REASON = ( 0 )
+NTDSCONN_KCC_GC_TOPOLOGY = ( 1 << 0 )
+NTDSCONN_KCC_RING_TOPOLOGY = ( 1 << 1 )
+NTDSCONN_KCC_MINIMIZE_HOPS_TOPOLOGY = ( 1 << 2 )
+NTDSCONN_KCC_STALE_SERVERS_TOPOLOGY = ( 1 << 3 )
+NTDSCONN_KCC_OSCILLATING_CONNECTION_TOPOLOGY = ( 1 << 4 )
+NTDSCONN_KCC_INTERSITE_GC_TOPOLOGY = (1 << 5)
+NTDSCONN_KCC_INTERSITE_TOPOLOGY = (1 << 6)
+NTDSCONN_KCC_SERVER_FAILOVER_TOPOLOGY = (1 << 7)
+NTDSCONN_KCC_SITE_FAILOVER_TOPOLOGY = (1 << 8)
+NTDSCONN_KCC_REDUNDANT_SERVER_TOPOLOGY = (1 << 9)
+FRSCONN_PRIORITY_MASK = 0x70000000
+FRSCONN_MAX_PRIORITY = 0x8
+NTDSCONN_OPT_IGNORE_SCHEDULE_MASK = (-2147483648)
+
+NTDSSETTINGS_OPT_IS_AUTO_TOPOLOGY_DISABLED = ( 1 << 0 )
+NTDSSETTINGS_OPT_IS_TOPL_CLEANUP_DISABLED = ( 1 << 1 )
+NTDSSETTINGS_OPT_IS_TOPL_MIN_HOPS_DISABLED = ( 1 << 2 )
+NTDSSETTINGS_OPT_IS_TOPL_DETECT_STALE_DISABLED = ( 1 << 3 )
+NTDSSETTINGS_OPT_IS_INTER_SITE_AUTO_TOPOLOGY_DISABLED = ( 1 << 4 )
+NTDSSETTINGS_OPT_IS_GROUP_CACHING_ENABLED = ( 1 << 5 )
+NTDSSETTINGS_OPT_FORCE_KCC_WHISTLER_BEHAVIOR = ( 1 << 6 )
+NTDSSETTINGS_OPT_FORCE_KCC_W2K_ELECTION = ( 1 << 7 )
+NTDSSETTINGS_OPT_IS_RAND_BH_SELECTION_DISABLED = ( 1 << 8 )
+NTDSSETTINGS_OPT_IS_SCHEDULE_HASHING_ENABLED = ( 1 << 9 )
+NTDSSETTINGS_OPT_IS_REDUNDANT_SERVER_TOPOLOGY_ENABLED = ( 1 << 10 )
+NTDSSETTINGS_DEFAULT_SERVER_REDUNDANCY = 2
+NTDSTRANSPORT_OPT_IGNORE_SCHEDULES = ( 1 << 0 )
+NTDSTRANSPORT_OPT_BRIDGES_REQUIRED = (1 << 1 )
+NTDSSITECONN_OPT_USE_NOTIFY = ( 1 << 0 )
+NTDSSITECONN_OPT_TWOWAY_SYNC = ( 1 << 1 )
+NTDSSITECONN_OPT_DISABLE_COMPRESSION = ( 1 << 2 )
+NTDSSITELINK_OPT_USE_NOTIFY = ( 1 << 0 )
+NTDSSITELINK_OPT_TWOWAY_SYNC = ( 1 << 1 )
+NTDSSITELINK_OPT_DISABLE_COMPRESSION = ( 1 << 2 )
+GUID_USERS_CONTAINER_A = "a9d1ca15768811d1aded00c04fd8d5cd"
+GUID_COMPUTRS_CONTAINER_A = "aa312825768811d1aded00c04fd8d5cd"
+GUID_SYSTEMS_CONTAINER_A = "ab1d30f3768811d1aded00c04fd8d5cd"
+GUID_DOMAIN_CONTROLLERS_CONTAINER_A = "a361b2ffffd211d1aa4b00c04fd7d83a"
+GUID_INFRASTRUCTURE_CONTAINER_A = "2fbac1870ade11d297c400c04fd8d5cd"
+GUID_DELETED_OBJECTS_CONTAINER_A = "18e2ea80684f11d2b9aa00c04f79f805"
+GUID_LOSTANDFOUND_CONTAINER_A = "ab8153b7768811d1aded00c04fd8d5cd"
+GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_A = "22b70c67d56e4efb91e9300fca3dc1aa"
+GUID_PROGRAM_DATA_CONTAINER_A = "09460c08ae1e4a4ea0f64aee7daa1e5a"
+GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_A = "f4be92a4c777485e878e9421d53087db"
+GUID_NTDS_QUOTAS_CONTAINER_A = "6227f0af1fc2410d8e3bb10615bb5b0f"
+GUID_USERS_CONTAINER_BYTE = "\xa9\xd1\xca\x15\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd"
+GUID_COMPUTRS_CONTAINER_BYTE = "\xaa\x31\x28\x25\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd"
+GUID_SYSTEMS_CONTAINER_BYTE = "\xab\x1d\x30\xf3\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd"
+GUID_DOMAIN_CONTROLLERS_CONTAINER_BYTE = "\xa3\x61\xb2\xff\xff\xd2\x11\xd1\xaa\x4b\x00\xc0\x4f\xd7\xd8\x3a"
+GUID_INFRASTRUCTURE_CONTAINER_BYTE = "\x2f\xba\xc1\x87\x0a\xde\x11\xd2\x97\xc4\x00\xc0\x4f\xd8\xd5\xcd"
+GUID_DELETED_OBJECTS_CONTAINER_BYTE = "\x18\xe2\xea\x80\x68\x4f\x11\xd2\xb9\xaa\x00\xc0\x4f\x79\xf8\x05"
+GUID_LOSTANDFOUND_CONTAINER_BYTE = "\xab\x81\x53\xb7\x76\x88\x11\xd1\xad\xed\x00\xc0\x4f\xd8\xd5\xcd"
+GUID_FOREIGNSECURITYPRINCIPALS_CONTAINER_BYTE = "\x22\xb7\x0c\x67\xd5\x6e\x4e\xfb\x91\xe9\x30\x0f\xca\x3d\xc1\xaa"
+GUID_PROGRAM_DATA_CONTAINER_BYTE = "\x09\x46\x0c\x08\xae\x1e\x4a\x4e\xa0\xf6\x4a\xee\x7d\xaa\x1e\x5a"
+GUID_MICROSOFT_PROGRAM_DATA_CONTAINER_BYTE = "\xf4\xbe\x92\xa4\xc7\x77\x48\x5e\x87\x8e\x94\x21\xd5\x30\x87\xdb"
+GUID_NTDS_QUOTAS_CONTAINER_BYTE = "\x62\x27\xf0\xaf\x1f\xc2\x41\x0d\x8e\x3b\xb1\x06\x15\xbb\x5b\x0f"
+DS_REPSYNCALL_NO_OPTIONS = 0x00000000
+DS_REPSYNCALL_ABORT_IF_SERVER_UNAVAILABLE = 0x00000001
+DS_REPSYNCALL_SYNC_ADJACENT_SERVERS_ONLY = 0x00000002
+DS_REPSYNCALL_ID_SERVERS_BY_DN = 0x00000004
+DS_REPSYNCALL_DO_NOT_SYNC = 0x00000008
+DS_REPSYNCALL_SKIP_INITIAL_CHECK = 0x00000010
+DS_REPSYNCALL_PUSH_CHANGES_OUTWARD = 0x00000020
+DS_REPSYNCALL_CROSS_SITE_BOUNDARIES = 0x00000040
+DS_LIST_DSA_OBJECT_FOR_SERVER = 0
+DS_LIST_DNS_HOST_NAME_FOR_SERVER = 1
+DS_LIST_ACCOUNT_OBJECT_FOR_SERVER = 2
+DS_ROLE_SCHEMA_OWNER = 0
+DS_ROLE_DOMAIN_OWNER = 1
+DS_ROLE_PDC_OWNER = 2
+DS_ROLE_RID_OWNER = 3
+DS_ROLE_INFRASTRUCTURE_OWNER = 4
+DS_SCHEMA_GUID_NOT_FOUND = 0
+DS_SCHEMA_GUID_ATTR = 1
+DS_SCHEMA_GUID_ATTR_SET = 2
+DS_SCHEMA_GUID_CLASS = 3
+DS_SCHEMA_GUID_CONTROL_RIGHT = 4
+DS_KCC_FLAG_ASYNC_OP = (1 << 0)
+DS_KCC_FLAG_DAMPED = (1 << 1)
+DS_EXIST_ADVISORY_MODE = (0x1)
+DS_REPL_INFO_FLAG_IMPROVE_LINKED_ATTRS = (0x00000001)
+DS_REPL_NBR_WRITEABLE = (0x00000010)
+DS_REPL_NBR_SYNC_ON_STARTUP = (0x00000020)
+DS_REPL_NBR_DO_SCHEDULED_SYNCS = (0x00000040)
+DS_REPL_NBR_USE_ASYNC_INTERSITE_TRANSPORT = (0x00000080)
+DS_REPL_NBR_TWO_WAY_SYNC = (0x00000200)
+DS_REPL_NBR_RETURN_OBJECT_PARENTS = (0x00000800)
+DS_REPL_NBR_FULL_SYNC_IN_PROGRESS = (0x00010000)
+DS_REPL_NBR_FULL_SYNC_NEXT_PACKET = (0x00020000)
+DS_REPL_NBR_NEVER_SYNCED = (0x00200000)
+DS_REPL_NBR_PREEMPTED = (0x01000000)
+DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS = (0x04000000)
+DS_REPL_NBR_DISABLE_SCHEDULED_SYNC = (0x08000000)
+DS_REPL_NBR_COMPRESS_CHANGES = (0x10000000)
+DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS = (0x20000000)
+DS_REPL_NBR_PARTIAL_ATTRIBUTE_SET = (0x40000000)
+DS_REPL_NBR_MODIFIABLE_MASK = \
+        ( \
+        DS_REPL_NBR_SYNC_ON_STARTUP | \
+        DS_REPL_NBR_DO_SCHEDULED_SYNCS | \
+        DS_REPL_NBR_TWO_WAY_SYNC | \
+        DS_REPL_NBR_IGNORE_CHANGE_NOTIFICATIONS | \
+        DS_REPL_NBR_DISABLE_SCHEDULED_SYNC | \
+        DS_REPL_NBR_COMPRESS_CHANGES | \
+        DS_REPL_NBR_NO_CHANGE_NOTIFICATIONS \
+        )
+
+# from enum DS_NAME_FORMAT
+DS_UNKNOWN_NAME = 0
+DS_FQDN_1779_NAME = 1
+DS_NT4_ACCOUNT_NAME = 2
+DS_DISPLAY_NAME = 3
+DS_UNIQUE_ID_NAME = 6
+DS_CANONICAL_NAME = 7
+DS_USER_PRINCIPAL_NAME = 8
+DS_CANONICAL_NAME_EX = 9
+DS_SERVICE_PRINCIPAL_NAME = 10
+DS_SID_OR_SID_HISTORY_NAME = 11
+DS_DNS_DOMAIN_NAME = 12
+
+DS_DOMAIN_SIMPLE_NAME = DS_USER_PRINCIPAL_NAME
+DS_ENTERPRISE_SIMPLE_NAME = DS_USER_PRINCIPAL_NAME
+
+# from enum DS_NAME_FLAGS
+DS_NAME_NO_FLAGS = 0x0
+DS_NAME_FLAG_SYNTACTICAL_ONLY = 0x1
+DS_NAME_FLAG_EVAL_AT_DC = 0x2
+DS_NAME_FLAG_GCVERIFY = 0x4
+DS_NAME_FLAG_TRUST_REFERRAL = 0x8
+
+# from enum DS_NAME_ERROR
+DS_NAME_NO_ERROR = 0
+DS_NAME_ERROR_RESOLVING = 1
+DS_NAME_ERROR_NOT_FOUND = 2
+DS_NAME_ERROR_NOT_UNIQUE = 3
+DS_NAME_ERROR_NO_MAPPING = 4
+DS_NAME_ERROR_DOMAIN_ONLY = 5
+DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING = 6
+DS_NAME_ERROR_TRUST_REFERRAL = 7
+
+
+# from enum DS_SPN_NAME_TYPE
+DS_SPN_DNS_HOST = 0
+DS_SPN_DN_HOST = 1
+DS_SPN_NB_HOST = 2
+DS_SPN_DOMAIN = 3
+DS_SPN_NB_DOMAIN = 4
+DS_SPN_SERVICE = 5
+
+# from enum DS_SPN_WRITE_OP
+DS_SPN_ADD_SPN_OP = 0
+DS_SPN_REPLACE_SPN_OP = 1
+DS_SPN_DELETE_SPN_OP = 2  
+
+# Generated by h2py from DsGetDC.h
+DS_FORCE_REDISCOVERY = 0x00000001
+DS_DIRECTORY_SERVICE_REQUIRED = 0x00000010
+DS_DIRECTORY_SERVICE_PREFERRED = 0x00000020
+DS_GC_SERVER_REQUIRED = 0x00000040
+DS_PDC_REQUIRED = 0x00000080
+DS_BACKGROUND_ONLY = 0x00000100
+DS_IP_REQUIRED = 0x00000200
+DS_KDC_REQUIRED = 0x00000400
+DS_TIMESERV_REQUIRED = 0x00000800
+DS_WRITABLE_REQUIRED = 0x00001000
+DS_GOOD_TIMESERV_PREFERRED = 0x00002000
+DS_AVOID_SELF = 0x00004000
+DS_ONLY_LDAP_NEEDED = 0x00008000
+DS_IS_FLAT_NAME = 0x00010000
+DS_IS_DNS_NAME = 0x00020000
+DS_RETURN_DNS_NAME = 0x40000000
+DS_RETURN_FLAT_NAME = (-2147483648)
+DSGETDC_VALID_FLAGS = ( \
+            DS_FORCE_REDISCOVERY | \
+            DS_DIRECTORY_SERVICE_REQUIRED | \
+            DS_DIRECTORY_SERVICE_PREFERRED | \
+            DS_GC_SERVER_REQUIRED | \
+            DS_PDC_REQUIRED | \
+            DS_BACKGROUND_ONLY | \
+            DS_IP_REQUIRED | \
+            DS_KDC_REQUIRED | \
+            DS_TIMESERV_REQUIRED | \
+            DS_WRITABLE_REQUIRED | \
+            DS_GOOD_TIMESERV_PREFERRED | \
+            DS_AVOID_SELF | \
+            DS_ONLY_LDAP_NEEDED | \
+            DS_IS_FLAT_NAME | \
+            DS_IS_DNS_NAME | \
+            DS_RETURN_FLAT_NAME  | \
+            DS_RETURN_DNS_NAME )
+DS_INET_ADDRESS = 1
+DS_NETBIOS_ADDRESS = 2
+DS_PDC_FLAG = 0x00000001
+DS_GC_FLAG = 0x00000004
+DS_LDAP_FLAG = 0x00000008
+DS_DS_FLAG = 0x00000010
+DS_KDC_FLAG = 0x00000020
+DS_TIMESERV_FLAG = 0x00000040
+DS_CLOSEST_FLAG = 0x00000080
+DS_WRITABLE_FLAG = 0x00000100
+DS_GOOD_TIMESERV_FLAG = 0x00000200
+DS_NDNC_FLAG = 0x00000400
+DS_PING_FLAGS = 0x0000FFFF
+DS_DNS_CONTROLLER_FLAG = 0x20000000
+DS_DNS_DOMAIN_FLAG = 0x40000000
+DS_DNS_FOREST_FLAG = (-2147483648)
+DS_DOMAIN_IN_FOREST = 0x0001
+DS_DOMAIN_DIRECT_OUTBOUND = 0x0002
+DS_DOMAIN_TREE_ROOT = 0x0004
+DS_DOMAIN_PRIMARY = 0x0008
+DS_DOMAIN_NATIVE_MODE = 0x0010
+DS_DOMAIN_DIRECT_INBOUND = 0x0020
+DS_DOMAIN_VALID_FLAGS = (         \
+            DS_DOMAIN_IN_FOREST       | \
+            DS_DOMAIN_DIRECT_OUTBOUND | \
+            DS_DOMAIN_TREE_ROOT       | \
+            DS_DOMAIN_PRIMARY         | \
+            DS_DOMAIN_NATIVE_MODE     | \
+            DS_DOMAIN_DIRECT_INBOUND )
+DS_GFTI_UPDATE_TDO = 0x1
+DS_GFTI_VALID_FLAGS = 0x1
+DS_ONLY_DO_SITE_NAME = 0x01
+DS_NOTIFY_AFTER_SITE_RECORDS = 0x02
+DS_OPEN_VALID_OPTION_FLAGS = ( DS_ONLY_DO_SITE_NAME | DS_NOTIFY_AFTER_SITE_RECORDS )
+DS_OPEN_VALID_FLAGS = (       \
+            DS_FORCE_REDISCOVERY  | \
+            DS_ONLY_LDAP_NEEDED   | \
+            DS_KDC_REQUIRED       | \
+            DS_PDC_REQUIRED       | \
+            DS_GC_SERVER_REQUIRED | \
+            DS_WRITABLE_REQUIRED )
+
+## from aclui.h
+# SI_OBJECT_INFO.dwFlags
+SI_EDIT_PERMS = 0x00000000L
+SI_EDIT_OWNER = 0x00000001L
+SI_EDIT_AUDITS = 0x00000002L
+SI_CONTAINER = 0x00000004L
+SI_READONLY = 0x00000008L
+SI_ADVANCED = 0x00000010L
+SI_RESET = 0x00000020L
+SI_OWNER_READONLY = 0x00000040L
+SI_EDIT_PROPERTIES = 0x00000080L
+SI_OWNER_RECURSE = 0x00000100L
+SI_NO_ACL_PROTECT = 0x00000200L
+SI_NO_TREE_APPLY = 0x00000400L
+SI_PAGE_TITLE = 0x00000800L
+SI_SERVER_IS_DC = 0x00001000L
+SI_RESET_DACL_TREE = 0x00004000L
+SI_RESET_SACL_TREE = 0x00008000L
+SI_OBJECT_GUID = 0x00010000L
+SI_EDIT_EFFECTIVE = 0x00020000L
+SI_RESET_DACL = 0x00040000L
+SI_RESET_SACL = 0x00080000L
+SI_RESET_OWNER = 0x00100000L
+SI_NO_ADDITIONAL_PERMISSION = 0x00200000L
+SI_MAY_WRITE = 0x10000000L
+SI_EDIT_ALL = (SI_EDIT_PERMS | SI_EDIT_OWNER | SI_EDIT_AUDITS)
+
+# SI_ACCESS.dwFlags
+SI_ACCESS_SPECIFIC = 0x00010000L
+SI_ACCESS_GENERAL = 0x00020000L
+SI_ACCESS_CONTAINER = 0x00040000L
+SI_ACCESS_PROPERTY = 0x00080000L
+
+# SI_PAGE_TYPE enum
+SI_PAGE_PERM = 0
+SI_PAGE_ADVPERM = 1
+SI_PAGE_AUDIT = 2
+SI_PAGE_OWNER = 3
+SI_PAGE_EFFECTIVE =4
+
+CFSTR_ACLUI_SID_INFO_LIST = u"CFSTR_ACLUI_SID_INFO_LIST"
+PSPCB_SI_INITDIALOG = 1025 ## WM_USER+1
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/pywintypes.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/pywintypes.py
new file mode 100644
index 0000000..eee60a9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/pywintypes.py
@@ -0,0 +1,102 @@
+# Magic utility that "redirects" to pywintypesxx.dll
+
+def __import_pywin32_system_module__(modname, globs):
+    # This has been through a number of iterations.  The problem: how to 
+    # locate pywintypesXX.dll when it may be in a number of places, and how
+    # to avoid ever loading it twice.  This problem is compounded by the
+    # fact that the "right" way to do this requires win32api, but this
+    # itself requires pywintypesXX.
+    # And the killer problem is that someone may have done 'import win32api'
+    # before this code is called.  In that case Windows will have already
+    # loaded pywintypesXX as part of loading win32api - but by the time
+    # we get here, we may locate a different one.  This appears to work, but
+    # then starts raising bizarre TypeErrors complaining that something
+    # is not a pywintypes type when it clearly is!
+
+    # So in what we hope is the last major iteration of this, we now
+    # rely on a _win32sysloader module, implemented in C but not relying
+    # on pywintypesXX.dll.  It then can check if the DLL we are looking for
+    # lib is already loaded.
+    import imp, sys, os
+    if not sys.platform.startswith("win32"):
+        # These extensions can be built on Linux via the 'mainwin' toolkit.
+        # Look for a native 'lib{modname}.so'
+        # NOTE: The _win32sysloader module will probably build in this
+        # environment, so it may be better to use that here too.
+        for ext, mode, ext_type in imp.get_suffixes():
+            if ext_type==imp.C_EXTENSION:
+                for path in sys.path:
+                    look = os.path.join(path, "lib" + modname + ext)
+                    if os.path.isfile(look):
+                        mod = imp.load_module(modname, None, look,
+                                              (ext, mode, ext_type))
+                        # and fill our namespace with it.
+                        globs.update(mod.__dict__)
+                        return
+        raise ImportError, "No dynamic module " + modname
+    # See if this is a debug build.
+    for suffix_item in imp.get_suffixes():
+        if suffix_item[0]=='_d.pyd':
+            suffix = '_d'
+            break
+    else:
+        suffix = ""
+    filename = "%s%d%d%s.dll" % \
+               (modname, sys.version_info[0], sys.version_info[1], suffix)
+    if hasattr(sys, "frozen"):
+        # If we are running from a frozen program (py2exe, McMillan, freeze)
+        # then we try and load the DLL from our sys.path
+        # XXX - This path may also benefit from _win32sysloader?  However,
+        # MarkH has never seen the DLL load problem with py2exe programs...
+        for look in sys.path:
+            # If the sys.path entry is a (presumably) .zip file, use the
+            # directory 
+            if os.path.isfile(look):
+                look = os.path.dirname(look)            
+            found = os.path.join(look, filename)
+            if os.path.isfile(found):
+                break
+        else:
+            raise ImportError, \
+                  "Module '%s' isn't in frozen sys.path %s" % (modname, sys.path)
+    else:
+        # First see if it already in our process - if so, we must use that.
+        import _win32sysloader
+        found = _win32sysloader.GetModuleFilename(filename)
+        if found is None:
+            # We ask Windows to load it next.  This is in an attempt to 
+            # get the exact same module loaded should pywintypes be imported
+            # first (which is how we are here) or if, eg, win32api was imported
+            # first thereby implicitly loading the DLL.
+
+            # Sadly though, it doesn't quite work - if pywintypesxx.dll
+            # is in system32 *and* the executable's directory, on XP SP2, an
+            # import of win32api will cause Windows to load pywintypes
+            # from system32, where LoadLibrary for that name will
+            # load the one in the exe's dir.
+            # That shouldn't really matter though, so long as we only ever
+            # get one loaded.
+            found = _win32sysloader.LoadModule(filename)
+        if found is None:
+            # Windows can't find it - which although isn't relevent here, 
+            # means that we *must* be the first win32 import, as an attempt
+            # to import win32api etc would fail when Windows attempts to 
+            # locate the DLL.
+            # This is most likely to happen for "non-admin" installs, where
+            # we can't put the files anywhere else on the global path.
+
+            # If there is a version in our Python directory, use that
+            if os.path.isfile(os.path.join(sys.prefix, filename)):
+                found = os.path.join(sys.prefix, filename)
+        if found is None:
+            # give up in disgust.
+            raise ImportError, \
+                  "No system module '%s' (%s)" % (modname, filename)
+
+    # Python can load the module
+    mod = imp.load_module(modname, None, found, 
+                          ('.dll', 'rb', imp.C_EXTENSION))
+    # and fill our namespace with it.
+    globs.update(mod.__dict__)
+
+__import_pywin32_system_module__("pywintypes", globals())
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/rasutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/rasutil.py
new file mode 100644
index 0000000..c923573
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/rasutil.py
@@ -0,0 +1,38 @@
+import win32ras
+
+stateStrings = {
+	win32ras.RASCS_OpenPort : "OpenPort",
+    win32ras.RASCS_PortOpened : "PortOpened",
+    win32ras.RASCS_ConnectDevice : "ConnectDevice",
+    win32ras.RASCS_DeviceConnected : "DeviceConnected",
+    win32ras.RASCS_AllDevicesConnected : "AllDevicesConnected",
+    win32ras.RASCS_Authenticate : "Authenticate",
+    win32ras.RASCS_AuthNotify : "AuthNotify",
+    win32ras.RASCS_AuthRetry : "AuthRetry",
+    win32ras.RASCS_AuthCallback : "AuthCallback",
+    win32ras.RASCS_AuthChangePassword : "AuthChangePassword",
+    win32ras.RASCS_AuthProject : "AuthProject",
+    win32ras.RASCS_AuthLinkSpeed : "AuthLinkSpeed",
+    win32ras.RASCS_AuthAck : "AuthAck",
+    win32ras.RASCS_ReAuthenticate : "ReAuthenticate",
+    win32ras.RASCS_Authenticated : "Authenticated",
+    win32ras.RASCS_PrepareForCallback : "PrepareForCallback",
+    win32ras.RASCS_WaitForModemReset : "WaitForModemReset",
+    win32ras.RASCS_WaitForCallback : "WaitForCallback",
+    win32ras.RASCS_Projected : "Projected",
+    win32ras.RASCS_StartAuthentication : "StartAuthentication",
+    win32ras.RASCS_CallbackComplete : "CallbackComplete",
+    win32ras.RASCS_LogonNetwork : "LogonNetwork",
+    win32ras.RASCS_Interactive : "Interactive",
+    win32ras.RASCS_RetryAuthentication : "RetryAuthentication",
+    win32ras.RASCS_CallbackSetByCaller : "CallbackSetByCaller",
+    win32ras.RASCS_PasswordExpired : "PasswordExpired",
+	win32ras.RASCS_Connected : "Connected",
+    win32ras.RASCS_Disconnected : "Disconnected"
+}
+
+def TestCallback( hras, msg, state, error, exterror):
+	print "Callback called with ", hras, msg, stateStrings[state], error, exterror
+
+def test(rasName = "_ Divert Off"):
+	return win32ras.Dial(None, None, (rasName,),TestCallback)
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/regcheck.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/regcheck.py
new file mode 100644
index 0000000..93c7e572
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/regcheck.py
@@ -0,0 +1,117 @@
+import win32con
+import regutil
+import win32api
+import os
+import sys
+
+def CheckRegisteredExe(exename):
+	try:
+		os.stat(win32api.RegQueryValue(regutil.GetRootKey()  , regutil.GetAppPathsKey() + "\\" + exename))
+#	except SystemError:
+	except (os.error,win32api.error):
+		print "Registration of %s - Not registered correctly" % exename
+
+def CheckPathString(pathString):
+	import string
+	for path in string.split(pathString, ";"):
+		if not os.path.isdir(path):
+			return "'%s' is not a valid directory!" % path
+	return None
+
+def CheckPythonPaths(verbose):
+	if verbose: print "Python Paths:"
+	# Check the core path
+	if verbose: print "\tCore Path:",
+	try:
+		appPath = win32api.RegQueryValue(regutil.GetRootKey(), regutil.BuildDefaultPythonKey() + "\\PythonPath")
+	except win32api.error, (code, fn, desc):
+		print "** does not exist - ", desc
+	problem = CheckPathString(appPath)
+	if problem:
+		print problem
+	else:
+		if verbose: print appPath
+	
+	key = win32api.RegOpenKey(regutil.GetRootKey(), regutil.BuildDefaultPythonKey() + "\\PythonPath", 0, win32con.KEY_READ)
+	try:
+		keyNo = 0
+		while 1:
+			try:
+				appName = win32api.RegEnumKey(key, keyNo)
+				appPath = win32api.RegQueryValue(key, appName)
+				if verbose: print "\t"+appName+":",
+				if appPath:
+					problem = CheckPathString(appPath)
+					if problem:
+						print problem
+					else:
+						if verbose: print appPath
+				else:
+					if verbose: print "(empty)"
+				keyNo = keyNo + 1
+			except win32api.error:
+				break
+	finally:
+		win32api.RegCloseKey(key)
+
+def CheckHelpFiles(verbose):
+	if verbose: print "Help Files:"
+	try:
+		key = win32api.RegOpenKey(regutil.GetRootKey(), regutil.BuildDefaultPythonKey() + "\\Help", 0, win32con.KEY_READ)
+	except win32api.error, (code, fn, details):
+		import winerror
+		if code!=winerror.ERROR_FILE_NOT_FOUND:
+			raise win32api.error, (code, fn, details)
+		return
+		
+	try:
+		keyNo = 0
+		while 1:
+			try:
+				helpDesc = win32api.RegEnumKey(key, keyNo)
+				helpFile = win32api.RegQueryValue(key, helpDesc)
+				if verbose: print "\t"+helpDesc+":",
+				# query the os section.
+				try:
+					os.stat(helpFile )
+					if verbose: print helpFile
+				except os.error:
+					print "** Help file %s does not exist" % helpFile
+				keyNo = keyNo + 1
+			except win32api.error, (code, fn, desc):
+				import winerror
+				if code!=winerror.ERROR_NO_MORE_ITEMS:
+					raise win32api.error, (code, fn, desc)
+				break
+	finally:
+		win32api.RegCloseKey(key)
+
+def CheckRegisteredModules(verbose):
+	# Check out all registered modules.
+	k=regutil.BuildDefaultPythonKey() + "\\Modules"
+	try:
+		keyhandle = win32api.RegOpenKey(regutil.GetRootKey(), k)
+		print "WARNING: 'Modules' registry entry is deprectated and evil!"
+	except win32api.error, (code, fn, details):
+		import winerror
+		if code!=winerror.ERROR_FILE_NOT_FOUND:
+			raise win32api.error, (code, fn, details)
+		return
+
+def CheckRegistry(verbose=0):
+	# check the registered modules
+	if os.environ.has_key('pythonpath'):
+		print "Warning - PythonPath in environment - registry PythonPath will be ignored"
+	# Check out all paths on sys.path
+	
+	CheckPythonPaths(verbose)
+	CheckHelpFiles(verbose)
+	CheckRegisteredModules(verbose)
+	CheckRegisteredExe("Python.exe")
+
+if __name__=='__main__':
+	if len(sys.argv)>1 and sys.argv[1]=='-q':
+		verbose = 0
+	else:
+		verbose = 1
+	CheckRegistry(verbose)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/regutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/regutil.py
new file mode 100644
index 0000000..7cbd447
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/regutil.py
@@ -0,0 +1,284 @@
+# Some registry helpers.
+import win32api
+import win32con
+import sys
+import os
+
+error = "Registry utility error"
+
+# A .py file has a CLSID associated with it (why? - dunno!)
+CLSIDPyFile = "{b51df050-06ae-11cf-ad3b-524153480001}"
+
+RegistryIDPyFile = "Python.File" # The registry "file type" of a .py file
+RegistryIDPycFile = "Python.CompiledFile" # The registry "file type" of a .pyc file
+
+def GetRootKey():
+	"""Retrieves the Registry root in use by Python.
+	"""
+# Win32s no longer supported/released.
+#	if win32ui.IsWin32s():
+#		return win32con.HKEY_CLASSES_ROOT
+#	else:
+	return win32con.HKEY_LOCAL_MACHINE
+
+def GetRegistryDefaultValue(subkey, rootkey = None):
+	"""A helper to return the default value for a key in the registry.
+        """
+	if rootkey is None: rootkey = GetRootKey()
+	return win32api.RegQueryValue(rootkey, subkey)
+
+def SetRegistryDefaultValue(subKey, value, rootkey = None):
+	"""A helper to set the default value for a key in the registry
+        """
+	import types
+	if rootkey is None: rootkey = GetRootKey()
+	if type(value)==types.StringType:
+		typeId = win32con.REG_SZ
+	elif type(value)==types.IntType:
+		typeId = win32con.REG_DWORD
+	else:
+		raise TypeError, "Value must be string or integer - was passed " + str(value)
+
+	win32api.RegSetValue(rootkey, subKey, typeId ,value)
+	
+def BuildDefaultPythonKey():
+	"""Builds a string containing the path to the current registry key.
+
+	   The Python registry key contains the Python version.  This function
+	   uses the version of the DLL used by the current process to get the
+	   registry key currently in use.
+        """
+
+	return "Software\\Python\\PythonCore\\" + sys.winver
+
+def GetAppPathsKey():
+	return "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths"
+
+def RegisterPythonExe(exeFullPath, exeAlias = None, exeAppPath = None):
+	"""Register a .exe file that uses Python.
+
+	   Registers the .exe with the OS.  This allows the specified .exe to
+	   be run from the command-line or start button without using the full path,
+	   and also to setup application specific path (ie, os.environ['PATH']).
+
+	   Currently the exeAppPath is not supported, so this function is general
+	   purpose, and not specific to Python at all.  Later, exeAppPath may provide
+	   a reasonable default that is used.
+
+	   exeFullPath -- The full path to the .exe
+	   exeAlias = None -- An alias for the exe - if none, the base portion
+	             of the filename is used.
+	   exeAppPath -- Not supported.
+	"""
+	# Note - Dont work on win32s (but we dont care anymore!)
+	if exeAppPath:
+		raise error, "Do not support exeAppPath argument currently"
+	if exeAlias is None:
+		exeAlias = os.path.basename(exeFullPath)
+	win32api.RegSetValue(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias, win32con.REG_SZ, exeFullPath)
+
+def GetRegisteredExe(exeAlias):
+	"""Get a registered .exe
+	"""
+	return win32api.RegQueryValue(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias)
+
+def UnregisterPythonExe(exeAlias):
+	"""Unregister a .exe file that uses Python.
+	"""
+	try:
+		win32api.RegDeleteKey(GetRootKey(), GetAppPathsKey() + "\\" + exeAlias)
+	except win32api.error, (code, fn, details):
+		import winerror
+		if code!=winerror.ERROR_FILE_NOT_FOUND:
+			raise win32api.error, (code, fn, desc)
+		return
+
+def RegisterNamedPath(name, path):
+	"""Register a named path - ie, a named PythonPath entry.
+	"""
+	keyStr = BuildDefaultPythonKey() + "\\PythonPath"
+	if name: keyStr = keyStr + "\\" + name
+	win32api.RegSetValue(GetRootKey(), keyStr, win32con.REG_SZ, path)
+
+def UnregisterNamedPath(name):
+	"""Unregister a named path - ie, a named PythonPath entry.
+	"""
+	keyStr = BuildDefaultPythonKey() + "\\PythonPath\\" + name
+	try:
+		win32api.RegDeleteKey(GetRootKey(), keyStr)
+	except win32api.error, (code, fn, details):
+		import winerror
+		if code!=winerror.ERROR_FILE_NOT_FOUND:
+			raise win32api.error, (code, fn, desc)
+		return
+
+def GetRegisteredNamedPath(name):
+	"""Get a registered named path, or None if it doesnt exist.
+	"""
+	keyStr = BuildDefaultPythonKey() + "\\PythonPath"
+	if name: keyStr = keyStr + "\\" + name
+	try:
+		return win32api.RegQueryValue(GetRootKey(), keyStr)
+	except win32api.error, (code, fn, details):
+		import winerror
+		if code!=winerror.ERROR_FILE_NOT_FOUND:
+			raise win32api.error, (code, fn, details)
+		return None
+
+
+def RegisterModule(modName, modPath):
+	"""Register an explicit module in the registry.  This forces the Python import
+           mechanism to locate this module directly, without a sys.path search.  Thus
+           a registered module need not appear in sys.path at all.
+
+	   modName -- The name of the module, as used by import.
+	   modPath -- The full path and file name of the module.
+	"""
+	try:
+		import os
+		os.stat(modPath)
+	except os.error:
+		print "Warning: Registering non-existant module %s" % modPath
+	win32api.RegSetValue(GetRootKey(), 
+	                     BuildDefaultPythonKey() + "\\Modules\\%s" % modName,
+		win32con.REG_SZ, modPath)
+
+def UnregisterModule(modName):
+	"""Unregister an explicit module in the registry.
+
+	   modName -- The name of the module, as used by import.
+	"""
+	try:
+		win32api.RegDeleteKey(GetRootKey(), 
+		                     BuildDefaultPythonKey() + "\\Modules\\%s" % modName)
+	except win32api.error, (code, fn, desc):
+		import winerror
+		if code!=winerror.ERROR_FILE_NOT_FOUND:
+			raise win32api.error, (code, fn, desc)
+
+def GetRegisteredHelpFile(helpDesc):
+	"""Given a description, return the registered entry.
+	"""
+	try:
+		return GetRegistryDefaultValue(BuildDefaultPythonKey() + "\\Help\\" + helpDesc)
+	except win32api.error:
+		try:
+			return GetRegistryDefaultValue(BuildDefaultPythonKey() + "\\Help\\" + helpDesc, win32con.HKEY_CURRENT_USER)
+		except win32api.error:
+			pass
+	return None
+
+def RegisterHelpFile(helpFile, helpPath, helpDesc = None, bCheckFile = 1):
+	"""Register a help file in the registry.
+	
+	     Note that this used to support writing to the Windows Help
+	     key, however this is no longer done, as it seems to be incompatible.
+
+           helpFile -- the base name of the help file.
+           helpPath -- the path to the help file
+           helpDesc -- A description for the help file.  If None, the helpFile param is used.
+           bCheckFile -- A flag indicating if the file existence should be checked.
+	"""
+	if helpDesc is None: helpDesc = helpFile
+	fullHelpFile = os.path.join(helpPath, helpFile)
+	try:
+		if bCheckFile: os.stat(fullHelpFile)
+	except os.error:
+		raise ValueError, "Help file does not exist"
+	# Now register with Python itself.
+	win32api.RegSetValue(GetRootKey(), 
+	                     BuildDefaultPythonKey() + "\\Help\\%s" % helpDesc, win32con.REG_SZ, fullHelpFile)
+
+def UnregisterHelpFile(helpFile, helpDesc = None):
+	"""Unregister a help file in the registry.
+
+           helpFile -- the base name of the help file.
+           helpDesc -- A description for the help file.  If None, the helpFile param is used.
+	"""
+	key = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "Software\\Microsoft\\Windows\\Help", 0, win32con.KEY_ALL_ACCESS)
+	try:
+		try:
+			win32api.RegDeleteValue(key, helpFile)
+		except win32api.error, (code, fn, desc):
+			import winerror
+			if code!=winerror.ERROR_FILE_NOT_FOUND:
+				raise win32api.error, (code, fn, desc)
+	finally:
+		win32api.RegCloseKey(key)
+	
+	# Now de-register with Python itself.
+	if helpDesc is None: helpDesc = helpFile
+	try:
+		win32api.RegDeleteKey(GetRootKey(), 
+		                     BuildDefaultPythonKey() + "\\Help\\%s" % helpDesc)	
+	except win32api.error, (code, fn, desc):
+		import winerror
+		if code!=winerror.ERROR_FILE_NOT_FOUND:
+			raise win32api.error, (code, fn, desc)
+
+def RegisterCoreDLL(coredllName = None):
+	"""Registers the core DLL in the registry.
+
+        If no params are passed, the name of the Python DLL used in 
+        the current process is used and registered.
+	"""
+	if coredllName is None:
+		coredllName = win32api.GetModuleFileName(sys.dllhandle)
+		# must exist!
+	else:
+		try:
+			os.stat(coredllName)
+		except os.error:
+			print "Warning: Registering non-existant core DLL %s" % coredllName
+
+	hKey = win32api.RegCreateKey(GetRootKey() , BuildDefaultPythonKey())
+	try:
+		win32api.RegSetValue(hKey, "Dll", win32con.REG_SZ, coredllName)
+	finally:
+		win32api.RegCloseKey(hKey)
+	# Lastly, setup the current version to point to me.
+	win32api.RegSetValue(GetRootKey(), "Software\\Python\\PythonCore\\CurrentVersion", win32con.REG_SZ, sys.winver)
+
+def RegisterFileExtensions(defPyIcon, defPycIcon, runCommand):
+	"""Register the core Python file extensions.
+	
+	   defPyIcon -- The default icon to use for .py files, in 'fname,offset' format.
+	   defPycIcon -- The default icon to use for .pyc files, in 'fname,offset' format.
+	   runCommand -- The command line to use for running .py files
+	"""
+	# Register the file extensions.
+	pythonFileId = RegistryIDPyFile
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , ".py", win32con.REG_SZ, pythonFileId)
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , pythonFileId , win32con.REG_SZ, "Python File")
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\CLSID" % pythonFileId , win32con.REG_SZ, CLSIDPyFile)
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\DefaultIcon" % pythonFileId, win32con.REG_SZ, defPyIcon)
+	base = "%s\\Shell" % RegistryIDPyFile
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open", win32con.REG_SZ, "Run")
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open\\Command", win32con.REG_SZ, runCommand)
+
+	# Register the .PYC.
+	pythonFileId = RegistryIDPycFile
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , ".pyc", win32con.REG_SZ, pythonFileId)
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , pythonFileId , win32con.REG_SZ, "Compiled Python File")
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , "%s\\DefaultIcon" % pythonFileId, win32con.REG_SZ, defPycIcon)
+	base = "%s\\Shell" % pythonFileId
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open", win32con.REG_SZ, "Run")
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\Open\\Command", win32con.REG_SZ, runCommand)
+
+def RegisterShellCommand(shellCommand, exeCommand, shellUserCommand = None):
+	# Last param for "Open" - for a .py file to be executed by the command line
+	# or shell execute (eg, just entering "foo.py"), the Command must be "Open",
+	# but you may associate a different name for the right-click menu.
+	# In our case, normally we have "Open=Run"
+	base = "%s\\Shell" % RegistryIDPyFile
+	if shellUserCommand:
+		win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s" % (shellCommand), win32con.REG_SZ, shellUserCommand)
+
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\Command" % (shellCommand), win32con.REG_SZ, exeCommand)
+
+def RegisterDDECommand(shellCommand, ddeApp, ddeTopic, ddeCommand):
+	base = "%s\\Shell" % RegistryIDPyFile
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec" % (shellCommand), win32con.REG_SZ, ddeCommand)
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec\\Application" % (shellCommand), win32con.REG_SZ, ddeApp)
+	win32api.RegSetValue(win32con.HKEY_CLASSES_ROOT , base + "\\%s\\ddeexec\\Topic" % (shellCommand), win32con.REG_SZ, ddeTopic)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/sspi.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/sspi.py
new file mode 100644
index 0000000..812546f5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/sspi.py
@@ -0,0 +1,231 @@
+"""
+Helper classes for SSPI authentication via the win32security module.
+
+SSPI authentication involves a token-exchange "dance", the exact details
+of which depends on the authentication provider used.  There are also
+a number of complex flags and constants that need to be used - in most
+cases, there are reasonable defaults.
+
+These classes attempt to hide these details from you until you really need
+to know.  They are not designed to handle all cases, just the common ones.
+If you need finer control than offered here, just use the win32security
+functions directly.
+"""
+# Based on Roger Upole's sspi demos.
+# $Id: sspi.py,v 1.3 2006/02/13 14:47:20 rupole Exp $
+import win32security, sspicon
+
+error = win32security.error
+
+try:
+    True, False
+except NameError:
+    False = 1==0
+    True = not False
+
+class _BaseAuth(object):
+    def __init__(self):
+        self.reset()
+
+    def reset(self):
+        """Reset everything to an unauthorized state"""
+        self.ctxt = None
+        self.authenticated = False
+        # The next seq_num for an encrypt/sign operation
+        self.next_seq_num = 0
+
+    def _get_next_seq_num(self):
+        """Get the next sequence number for a transmission.  Default
+        implementation is to increment a counter
+        """
+        ret = self.next_seq_num
+        self.next_seq_num = self.next_seq_num + 1
+        return ret
+
+    def encrypt(self, data):
+        """Encrypt a string, returning a tuple of (encrypted_data, encryption_data).
+        These can be passed to decrypt to get back the original string.
+        """
+        pkg_size_info=self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES)
+        trailersize=pkg_size_info['SecurityTrailer']
+
+        encbuf=win32security.PySecBufferDescType()
+        encbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA))
+        encbuf.append(win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN))
+        encbuf[0].Buffer=data
+        self.ctxt.EncryptMessage(0,encbuf,self._get_next_seq_num())
+        return encbuf[0].Buffer, encbuf[1].Buffer
+
+    def decrypt(self, data, trailer):
+        """Decrypt a previously encrypted string, returning the orignal data"""
+        encbuf=win32security.PySecBufferDescType()
+        encbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA))
+        encbuf.append(win32security.PySecBufferType(len(trailer), sspicon.SECBUFFER_TOKEN))
+        encbuf[0].Buffer=data
+        encbuf[1].Buffer=trailer
+        self.ctxt.DecryptMessage(encbuf,self._get_next_seq_num())
+        return encbuf[0].Buffer
+
+    def sign(self, data):
+        """sign a string suitable for transmission, returning the signature.
+        Passing the data and signature to verify will determine if the data
+        is unchanged.
+        """
+        pkg_size_info=self.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES)
+        sigsize=pkg_size_info['MaxSignature']
+        sigbuf=win32security.PySecBufferDescType()
+        sigbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA))
+        sigbuf.append(win32security.PySecBufferType(sigsize, sspicon.SECBUFFER_TOKEN))
+        sigbuf[0].Buffer=data
+
+        self.ctxt.MakeSignature(0,sigbuf,self._get_next_seq_num())
+        return sigbuf[1].Buffer
+
+    def verify(self, data, sig):
+        """Verifies data and its signature.  If verification fails, an sspi.error
+        will be raised.
+        """
+        sigbuf=win32security.PySecBufferDescType()
+        sigbuf.append(win32security.PySecBufferType(len(data), sspicon.SECBUFFER_DATA))
+        sigbuf.append(win32security.PySecBufferType(len(sig), sspicon.SECBUFFER_TOKEN))
+
+        sigbuf[0].Buffer=data
+        sigbuf[1].Buffer=sig
+        self.ctxt.VerifySignature(sigbuf,self._get_next_seq_num())
+
+class ClientAuth(_BaseAuth):
+    """Manages the client side of an SSPI authentication handshake
+    """
+    def __init__(self,
+                 pkg_name, # Name of the package to used.
+                 client_name = None, # User for whom credentials are used.
+                 auth_info = None, # or a tuple of (username, domain, password)
+                 targetspn = None, # Target security context provider name.
+                 scflags=None, # security context flags
+                 datarep=sspicon.SECURITY_NETWORK_DREP):
+        if scflags is None:
+            scflags = sspicon.ISC_REQ_INTEGRITY|sspicon.ISC_REQ_SEQUENCE_DETECT|\
+                      sspicon.ISC_REQ_REPLAY_DETECT|sspicon.ISC_REQ_CONFIDENTIALITY
+        self.scflags=scflags
+        self.datarep=datarep
+        self.targetspn=targetspn
+        self.pkg_info=win32security.QuerySecurityPackageInfo(pkg_name)
+        self.credentials, \
+        self.credentials_expiry=win32security.AcquireCredentialsHandle(
+                client_name, self.pkg_info['Name'],
+                sspicon.SECPKG_CRED_OUTBOUND,
+                None, auth_info)
+        _BaseAuth.__init__(self)
+
+    # Perform *one* step of the client authentication process.
+    def authorize(self, sec_buffer_in):
+        if sec_buffer_in is not None and type(sec_buffer_in) != win32security.PySecBufferDescType:
+            # User passed us the raw data - wrap it into a SecBufferDesc
+            sec_buffer_new=win32security.PySecBufferDescType()
+            tokenbuf=win32security.PySecBufferType(self.pkg_info['MaxToken'],
+                                                 sspicon.SECBUFFER_TOKEN)
+            tokenbuf.Buffer=sec_buffer_in
+            sec_buffer_new.append(tokenbuf)
+            sec_buffer_in = sec_buffer_new
+        sec_buffer_out=win32security.PySecBufferDescType()
+        tokenbuf=win32security.PySecBufferType(self.pkg_info['MaxToken'], sspicon.SECBUFFER_TOKEN)
+        sec_buffer_out.append(tokenbuf)
+        ## input context handle should be NULL on first call
+        ctxtin=self.ctxt
+        if self.ctxt is None:
+            self.ctxt=win32security.PyCtxtHandleType()
+        err, attr, exp=win32security.InitializeSecurityContext(
+            self.credentials,
+            ctxtin,
+            self.targetspn,
+            self.scflags,
+            self.datarep,
+            sec_buffer_in,
+            self.ctxt,
+            sec_buffer_out)
+        # Stash these away incase someone needs to know the state from the
+        # final call.
+        self.ctxt_attr = attr
+        self.ctxt_expiry = exp
+        
+        if err in (sspicon.SEC_I_COMPLETE_NEEDED,sspicon.SEC_I_COMPLETE_AND_CONTINUE):
+            self.ctxt.CompleteAuthToken(sec_buffer_out)
+        self.authenticated = err == 0
+        return err, sec_buffer_out
+
+class ServerAuth(_BaseAuth):
+    """Manages the server side of an SSPI authentication handshake
+    """
+    def __init__(self,
+                 pkg_name,
+                 spn = None,
+                 scflags=None,
+                 datarep=sspicon.SECURITY_NETWORK_DREP):
+        self.spn=spn
+        self.datarep=datarep
+        
+        if scflags is None:
+            scflags = sspicon.ASC_REQ_INTEGRITY|sspicon.ASC_REQ_SEQUENCE_DETECT|\
+                      sspicon.ASC_REQ_REPLAY_DETECT|sspicon.ASC_REQ_CONFIDENTIALITY
+        # Should we default to sspicon.KerbAddExtraCredentialsMessage
+        # if pkg_name=='Kerberos'?
+        self.scflags=scflags
+
+        self.pkg_info=win32security.QuerySecurityPackageInfo(pkg_name)
+
+        self.credentials, \
+        self.credentials_expiry=win32security.AcquireCredentialsHandle(spn,
+                self.pkg_info['Name'], sspicon.SECPKG_CRED_INBOUND, None, None)
+        _BaseAuth.__init__(self)
+
+    # Perform *one* step of the server authentication process.
+    def authorize(self, sec_buffer_in):
+        if sec_buffer_in is not None and type(sec_buffer_in) != win32security.PySecBufferDescType:
+            # User passed us the raw data - wrap it into a SecBufferDesc
+            sec_buffer_new=win32security.PySecBufferDescType()
+            tokenbuf=win32security.PySecBufferType(self.pkg_info['MaxToken'],
+                                                 sspicon.SECBUFFER_TOKEN)
+            tokenbuf.Buffer=sec_buffer_in
+            sec_buffer_new.append(tokenbuf)
+            sec_buffer_in = sec_buffer_new
+
+        sec_buffer_out=win32security.PySecBufferDescType()
+        tokenbuf=win32security.PySecBufferType(self.pkg_info['MaxToken'], sspicon.SECBUFFER_TOKEN)
+        sec_buffer_out.append(tokenbuf)
+        ## input context handle is None initially, then handle returned from last call thereafter
+        ctxtin=self.ctxt
+        if self.ctxt is None:
+            self.ctxt=win32security.PyCtxtHandleType()
+        err, attr, exp = win32security.AcceptSecurityContext(self.credentials, ctxtin,
+            sec_buffer_in, self.scflags,
+            self.datarep, self.ctxt, sec_buffer_out)
+
+        # Stash these away incase someone needs to know the state from the
+        # final call.
+        self.ctxt_attr = attr
+        self.ctxt_expiry = exp
+
+        if err in (sspicon.SEC_I_COMPLETE_NEEDED,sspicon.SEC_I_COMPLETE_AND_CONTINUE):
+            self.ctxt.CompleteAuthToken(sec_buffer_out)
+        self.authenticated = err == 0
+        return err, sec_buffer_out
+
+if __name__=='__main__':
+   # Setup the 2 contexts.
+    sspiclient=ClientAuth("NTLM")
+    sspiserver=ServerAuth("NTLM")
+    
+    # Perform the authentication dance, each loop exchanging more information
+    # on the way to completing authentication.
+    sec_buffer=None
+    while 1:
+        err, sec_buffer = sspiclient.authorize(sec_buffer)
+        err, sec_buffer = sspiserver.authorize(sec_buffer)
+        if err==0:
+            break
+    sig = sspiclient.sign("hello")
+    sspiserver.verify("hello", sig)
+
+    data, key = sspiclient.encrypt("hello")
+    assert sspiserver.decrypt(data, key) == "hello"
+    print "cool!"
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/sspicon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/sspicon.py
new file mode 100644
index 0000000..a3c56531
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/sspicon.py
@@ -0,0 +1,479 @@
+# Generated by h2py from c:\microsoft sdk\include\sspi.h
+ISSP_LEVEL = 32
+ISSP_MODE = 1
+ISSP_LEVEL = 32
+ISSP_MODE = 0
+ISSP_LEVEL = 32
+ISSP_MODE = 1
+def SEC_SUCCESS(Status): return ((Status) >= 0)
+
+SECPKG_FLAG_INTEGRITY = 1
+SECPKG_FLAG_PRIVACY = 2
+SECPKG_FLAG_TOKEN_ONLY = 4
+SECPKG_FLAG_DATAGRAM = 8
+SECPKG_FLAG_CONNECTION = 16
+SECPKG_FLAG_MULTI_REQUIRED = 32
+SECPKG_FLAG_CLIENT_ONLY = 64
+SECPKG_FLAG_EXTENDED_ERROR = 128
+SECPKG_FLAG_IMPERSONATION = 256
+SECPKG_FLAG_ACCEPT_WIN32_NAME = 512
+SECPKG_FLAG_STREAM = 1024
+SECPKG_FLAG_NEGOTIABLE = 2048
+SECPKG_FLAG_GSS_COMPATIBLE = 4096
+SECPKG_FLAG_LOGON = 8192
+SECPKG_FLAG_ASCII_BUFFERS = 16384
+SECPKG_FLAG_FRAGMENT = 32768
+SECPKG_FLAG_MUTUAL_AUTH = 65536
+SECPKG_FLAG_DELEGATION = 131072
+SECPKG_FLAG_READONLY_WITH_CHECKSUM = 262144
+SECPKG_ID_NONE = 65535
+
+SECBUFFER_VERSION = 0
+SECBUFFER_EMPTY = 0
+SECBUFFER_DATA = 1
+SECBUFFER_TOKEN = 2
+SECBUFFER_PKG_PARAMS = 3
+SECBUFFER_MISSING = 4
+SECBUFFER_EXTRA = 5
+SECBUFFER_STREAM_TRAILER = 6
+SECBUFFER_STREAM_HEADER = 7
+SECBUFFER_NEGOTIATION_INFO = 8
+SECBUFFER_PADDING = 9
+SECBUFFER_STREAM = 10
+SECBUFFER_MECHLIST = 11
+SECBUFFER_MECHLIST_SIGNATURE = 12
+SECBUFFER_TARGET = 13
+SECBUFFER_CHANNEL_BINDINGS = 14
+SECBUFFER_ATTRMASK = (-268435456)
+SECBUFFER_READONLY = (-2147483648)
+SECBUFFER_READONLY_WITH_CHECKSUM = 268435456
+SECBUFFER_RESERVED = 1610612736
+
+SECURITY_NATIVE_DREP = 16
+SECURITY_NETWORK_DREP = 0
+
+SECPKG_CRED_INBOUND = 1
+SECPKG_CRED_OUTBOUND = 2
+SECPKG_CRED_BOTH = 3
+SECPKG_CRED_DEFAULT = 4
+SECPKG_CRED_RESERVED = -268435456
+
+ISC_REQ_DELEGATE = 1
+ISC_REQ_MUTUAL_AUTH = 2
+ISC_REQ_REPLAY_DETECT = 4
+ISC_REQ_SEQUENCE_DETECT = 8
+ISC_REQ_CONFIDENTIALITY = 16
+ISC_REQ_USE_SESSION_KEY = 32
+ISC_REQ_PROMPT_FOR_CREDS = 64
+ISC_REQ_USE_SUPPLIED_CREDS = 128
+ISC_REQ_ALLOCATE_MEMORY = 256
+ISC_REQ_USE_DCE_STYLE = 512
+ISC_REQ_DATAGRAM = 1024
+ISC_REQ_CONNECTION = 2048
+ISC_REQ_CALL_LEVEL = 4096
+ISC_REQ_FRAGMENT_SUPPLIED = 8192
+ISC_REQ_EXTENDED_ERROR = 16384
+ISC_REQ_STREAM = 32768
+ISC_REQ_INTEGRITY = 65536
+ISC_REQ_IDENTIFY = 131072
+ISC_REQ_NULL_SESSION = 262144
+ISC_REQ_MANUAL_CRED_VALIDATION = 524288
+ISC_REQ_RESERVED1 = 1048576
+ISC_REQ_FRAGMENT_TO_FIT = 2097152
+ISC_REQ_HTTP = 0x10000000
+ISC_RET_DELEGATE = 1
+ISC_RET_MUTUAL_AUTH = 2
+ISC_RET_REPLAY_DETECT = 4
+ISC_RET_SEQUENCE_DETECT = 8
+ISC_RET_CONFIDENTIALITY = 16
+ISC_RET_USE_SESSION_KEY = 32
+ISC_RET_USED_COLLECTED_CREDS = 64
+ISC_RET_USED_SUPPLIED_CREDS = 128
+ISC_RET_ALLOCATED_MEMORY = 256
+ISC_RET_USED_DCE_STYLE = 512
+ISC_RET_DATAGRAM = 1024
+ISC_RET_CONNECTION = 2048
+ISC_RET_INTERMEDIATE_RETURN = 4096
+ISC_RET_CALL_LEVEL = 8192
+ISC_RET_EXTENDED_ERROR = 16384
+ISC_RET_STREAM = 32768
+ISC_RET_INTEGRITY = 65536
+ISC_RET_IDENTIFY = 131072
+ISC_RET_NULL_SESSION = 262144
+ISC_RET_MANUAL_CRED_VALIDATION = 524288
+ISC_RET_RESERVED1 = 1048576
+ISC_RET_FRAGMENT_ONLY = 2097152
+
+ASC_REQ_DELEGATE = 1
+ASC_REQ_MUTUAL_AUTH = 2
+ASC_REQ_REPLAY_DETECT = 4
+ASC_REQ_SEQUENCE_DETECT = 8
+ASC_REQ_CONFIDENTIALITY = 16
+ASC_REQ_USE_SESSION_KEY = 32
+ASC_REQ_ALLOCATE_MEMORY = 256
+ASC_REQ_USE_DCE_STYLE = 512
+ASC_REQ_DATAGRAM = 1024
+ASC_REQ_CONNECTION = 2048
+ASC_REQ_CALL_LEVEL = 4096
+ASC_REQ_EXTENDED_ERROR = 32768
+ASC_REQ_STREAM = 65536
+ASC_REQ_INTEGRITY = 131072
+ASC_REQ_LICENSING = 262144
+ASC_REQ_IDENTIFY = 524288
+ASC_REQ_ALLOW_NULL_SESSION = 1048576
+ASC_REQ_ALLOW_NON_USER_LOGONS = 2097152
+ASC_REQ_ALLOW_CONTEXT_REPLAY = 4194304
+ASC_REQ_FRAGMENT_TO_FIT = 8388608
+ASC_REQ_FRAGMENT_SUPPLIED = 8192
+ASC_REQ_NO_TOKEN = 16777216
+ASC_RET_DELEGATE = 1
+ASC_RET_MUTUAL_AUTH = 2
+ASC_RET_REPLAY_DETECT = 4
+ASC_RET_SEQUENCE_DETECT = 8
+ASC_RET_CONFIDENTIALITY = 16
+ASC_RET_USE_SESSION_KEY = 32
+ASC_RET_ALLOCATED_MEMORY = 256
+ASC_RET_USED_DCE_STYLE = 512
+ASC_RET_DATAGRAM = 1024
+ASC_RET_CONNECTION = 2048
+ASC_RET_CALL_LEVEL = 8192
+ASC_RET_THIRD_LEG_FAILED = 16384
+ASC_RET_EXTENDED_ERROR = 32768
+ASC_RET_STREAM = 65536
+ASC_RET_INTEGRITY = 131072
+ASC_RET_LICENSING = 262144
+ASC_RET_IDENTIFY = 524288
+ASC_RET_NULL_SESSION = 1048576
+ASC_RET_ALLOW_NON_USER_LOGONS = 2097152
+ASC_RET_ALLOW_CONTEXT_REPLAY = 4194304
+ASC_RET_FRAGMENT_ONLY = 8388608
+
+SECPKG_CRED_ATTR_NAMES = 1
+SECPKG_ATTR_SIZES = 0
+SECPKG_ATTR_NAMES = 1
+SECPKG_ATTR_LIFESPAN = 2
+SECPKG_ATTR_DCE_INFO = 3
+SECPKG_ATTR_STREAM_SIZES = 4
+SECPKG_ATTR_KEY_INFO = 5
+SECPKG_ATTR_AUTHORITY = 6
+SECPKG_ATTR_PROTO_INFO = 7
+SECPKG_ATTR_PASSWORD_EXPIRY = 8
+SECPKG_ATTR_SESSION_KEY = 9
+SECPKG_ATTR_PACKAGE_INFO = 10
+SECPKG_ATTR_USER_FLAGS = 11
+SECPKG_ATTR_NEGOTIATION_INFO = 12
+SECPKG_ATTR_NATIVE_NAMES = 13
+SECPKG_ATTR_FLAGS = 14
+SECPKG_ATTR_USE_VALIDATED = 15
+SECPKG_ATTR_CREDENTIAL_NAME = 16
+SECPKG_ATTR_TARGET_INFORMATION = 17
+SECPKG_ATTR_ACCESS_TOKEN = 18
+SECPKG_ATTR_TARGET = 19
+SECPKG_ATTR_AUTHENTICATION_ID = 20
+
+## attributes from schannel.h
+SECPKG_ATTR_REMOTE_CERT_CONTEXT = 83
+SECPKG_ATTR_LOCAL_CERT_CONTEXT = 84
+SECPKG_ATTR_ROOT_STORE = 85
+SECPKG_ATTR_SUPPORTED_ALGS = 86
+SECPKG_ATTR_CIPHER_STRENGTHS = 87
+SECPKG_ATTR_SUPPORTED_PROTOCOLS = 88
+SECPKG_ATTR_ISSUER_LIST_EX = 89
+SECPKG_ATTR_CONNECTION_INFO = 90
+SECPKG_ATTR_EAP_KEY_BLOCK = 91
+SECPKG_ATTR_MAPPED_CRED_ATTR = 92
+SECPKG_ATTR_SESSION_INFO = 93
+SECPKG_ATTR_APP_DATA = 94
+
+SECPKG_NEGOTIATION_COMPLETE = 0
+SECPKG_NEGOTIATION_OPTIMISTIC = 1
+SECPKG_NEGOTIATION_IN_PROGRESS = 2
+SECPKG_NEGOTIATION_DIRECT = 3
+SECPKG_NEGOTIATION_TRY_MULTICRED = 4
+SECPKG_CONTEXT_EXPORT_RESET_NEW = 1
+SECPKG_CONTEXT_EXPORT_DELETE_OLD = 2
+SECQOP_WRAP_NO_ENCRYPT = (-2147483647)
+SECURITY_ENTRYPOINT_ANSIW = "InitSecurityInterfaceW"
+SECURITY_ENTRYPOINT_ANSIA = "InitSecurityInterfaceA"
+SECURITY_ENTRYPOINT16 = "INITSECURITYINTERFACEA"
+SECURITY_ENTRYPOINT_ANSI = SECURITY_ENTRYPOINT_ANSIW
+SECURITY_ENTRYPOINT_ANSI = SECURITY_ENTRYPOINT_ANSIA
+SECURITY_ENTRYPOINT = SECURITY_ENTRYPOINT16
+SECURITY_ENTRYPOINT_ANSI = SECURITY_ENTRYPOINT16
+SECURITY_SUPPORT_PROVIDER_INTERFACE_VERSION = 1
+SECURITY_SUPPORT_PROVIDER_INTERFACE_VERSION_2 = 2
+SASL_OPTION_SEND_SIZE = 1
+SASL_OPTION_RECV_SIZE = 2
+SASL_OPTION_AUTHZ_STRING = 3
+SASL_OPTION_AUTHZ_PROCESSING = 4
+SEC_WINNT_AUTH_IDENTITY_ANSI = 1
+SEC_WINNT_AUTH_IDENTITY_UNICODE = 2
+SEC_WINNT_AUTH_IDENTITY_VERSION = 512
+SEC_WINNT_AUTH_IDENTITY_MARSHALLED = 4
+SEC_WINNT_AUTH_IDENTITY_ONLY = 8
+SECPKG_OPTIONS_TYPE_UNKNOWN = 0
+SECPKG_OPTIONS_TYPE_LSA = 1
+SECPKG_OPTIONS_TYPE_SSPI = 2
+SECPKG_OPTIONS_PERMANENT = 1
+
+SEC_E_INSUFFICIENT_MEMORY = -2146893056
+SEC_E_INVALID_HANDLE = -2146893055
+SEC_E_UNSUPPORTED_FUNCTION = -2146893054
+SEC_E_TARGET_UNKNOWN = -2146893053
+SEC_E_INTERNAL_ERROR = -2146893052
+SEC_E_SECPKG_NOT_FOUND = -2146893051
+SEC_E_NOT_OWNER = -2146893050
+SEC_E_CANNOT_INSTALL = -2146893049
+SEC_E_INVALID_TOKEN = -2146893048
+SEC_E_CANNOT_PACK = -2146893047
+SEC_E_QOP_NOT_SUPPORTED = -2146893046
+SEC_E_NO_IMPERSONATION = -2146893045
+SEC_E_LOGON_DENIED = -2146893044
+SEC_E_UNKNOWN_CREDENTIALS = -2146893043
+SEC_E_NO_CREDENTIALS = -2146893042
+SEC_E_MESSAGE_ALTERED = -2146893041
+SEC_E_OUT_OF_SEQUENCE = -2146893040
+SEC_E_NO_AUTHENTICATING_AUTHORITY = -2146893039
+SEC_I_CONTINUE_NEEDED = 590610
+SEC_I_COMPLETE_NEEDED = 590611
+SEC_I_COMPLETE_AND_CONTINUE = 590612
+SEC_I_LOCAL_LOGON = 590613
+SEC_E_BAD_PKGID = -2146893034
+SEC_E_CONTEXT_EXPIRED = -2146893033
+SEC_I_CONTEXT_EXPIRED = 590615
+SEC_E_INCOMPLETE_MESSAGE = -2146893032
+SEC_E_INCOMPLETE_CREDENTIALS = -2146893024
+SEC_E_BUFFER_TOO_SMALL = -2146893023
+SEC_I_INCOMPLETE_CREDENTIALS = 590624
+SEC_I_RENEGOTIATE = 590625
+SEC_E_WRONG_PRINCIPAL = -2146893022
+SEC_I_NO_LSA_CONTEXT = 590627
+SEC_E_TIME_SKEW = -2146893020
+SEC_E_UNTRUSTED_ROOT = -2146893019
+SEC_E_ILLEGAL_MESSAGE = -2146893018
+SEC_E_CERT_UNKNOWN = -2146893017
+SEC_E_CERT_EXPIRED = -2146893016
+SEC_E_ENCRYPT_FAILURE = -2146893015
+SEC_E_DECRYPT_FAILURE = -2146893008
+SEC_E_ALGORITHM_MISMATCH = -2146893007
+SEC_E_SECURITY_QOS_FAILED = -2146893006
+SEC_E_UNFINISHED_CONTEXT_DELETED = -2146893005
+SEC_E_NO_TGT_REPLY = -2146893004
+SEC_E_NO_IP_ADDRESSES = -2146893003
+SEC_E_WRONG_CREDENTIAL_HANDLE = -2146893002
+SEC_E_CRYPTO_SYSTEM_INVALID = -2146893001
+SEC_E_MAX_REFERRALS_EXCEEDED = -2146893000
+SEC_E_MUST_BE_KDC = -2146892999
+SEC_E_STRONG_CRYPTO_NOT_SUPPORTED = -2146892998
+SEC_E_TOO_MANY_PRINCIPALS = -2146892997
+SEC_E_NO_PA_DATA = -2146892996
+SEC_E_PKINIT_NAME_MISMATCH = -2146892995
+SEC_E_SMARTCARD_LOGON_REQUIRED = -2146892994
+SEC_E_SHUTDOWN_IN_PROGRESS = -2146892993
+SEC_E_KDC_INVALID_REQUEST = -2146892992
+SEC_E_KDC_UNABLE_TO_REFER = -2146892991
+SEC_E_KDC_UNKNOWN_ETYPE = -2146892990
+SEC_E_UNSUPPORTED_PREAUTH = -2146892989
+SEC_E_DELEGATION_REQUIRED = -2146892987
+SEC_E_BAD_BINDINGS = -2146892986
+SEC_E_MULTIPLE_ACCOUNTS = -2146892985
+SEC_E_NO_KERB_KEY = -2146892984
+
+ERROR_IPSEC_QM_POLICY_EXISTS = 13000L
+ERROR_IPSEC_QM_POLICY_NOT_FOUND = 13001L
+ERROR_IPSEC_QM_POLICY_IN_USE = 13002L
+ERROR_IPSEC_MM_POLICY_EXISTS = 13003L
+ERROR_IPSEC_MM_POLICY_NOT_FOUND = 13004L
+ERROR_IPSEC_MM_POLICY_IN_USE = 13005L
+ERROR_IPSEC_MM_FILTER_EXISTS = 13006L
+ERROR_IPSEC_MM_FILTER_NOT_FOUND = 13007L
+ERROR_IPSEC_TRANSPORT_FILTER_EXISTS = 13008L
+ERROR_IPSEC_TRANSPORT_FILTER_NOT_FOUND = 13009L
+ERROR_IPSEC_MM_AUTH_EXISTS = 13010L
+ERROR_IPSEC_MM_AUTH_NOT_FOUND = 13011L
+ERROR_IPSEC_MM_AUTH_IN_USE = 13012L
+ERROR_IPSEC_DEFAULT_MM_POLICY_NOT_FOUND = 13013L
+ERROR_IPSEC_DEFAULT_MM_AUTH_NOT_FOUND = 13014L
+ERROR_IPSEC_DEFAULT_QM_POLICY_NOT_FOUND = 13015L
+ERROR_IPSEC_TUNNEL_FILTER_EXISTS = 13016L
+ERROR_IPSEC_TUNNEL_FILTER_NOT_FOUND = 13017L
+ERROR_IPSEC_MM_FILTER_PENDING_DELETION = 13018L
+ERROR_IPSEC_TRANSPORT_FILTER_PENDING_DELETION = 13019L
+ERROR_IPSEC_TUNNEL_FILTER_PENDING_DELETION = 13020L
+ERROR_IPSEC_MM_POLICY_PENDING_DELETION = 13021L
+ERROR_IPSEC_MM_AUTH_PENDING_DELETION = 13022L
+ERROR_IPSEC_QM_POLICY_PENDING_DELETION = 13023L
+WARNING_IPSEC_MM_POLICY_PRUNED = 13024L
+WARNING_IPSEC_QM_POLICY_PRUNED = 13025L
+ERROR_IPSEC_IKE_NEG_STATUS_BEGIN = 13800L
+ERROR_IPSEC_IKE_AUTH_FAIL = 13801L
+ERROR_IPSEC_IKE_ATTRIB_FAIL = 13802L
+ERROR_IPSEC_IKE_NEGOTIATION_PENDING = 13803L
+ERROR_IPSEC_IKE_GENERAL_PROCESSING_ERROR = 13804L
+ERROR_IPSEC_IKE_TIMED_OUT = 13805L
+ERROR_IPSEC_IKE_NO_CERT = 13806L
+ERROR_IPSEC_IKE_SA_DELETED = 13807L
+ERROR_IPSEC_IKE_SA_REAPED = 13808L
+ERROR_IPSEC_IKE_MM_ACQUIRE_DROP = 13809L
+ERROR_IPSEC_IKE_QM_ACQUIRE_DROP = 13810L
+ERROR_IPSEC_IKE_QUEUE_DROP_MM = 13811L
+ERROR_IPSEC_IKE_QUEUE_DROP_NO_MM = 13812L
+ERROR_IPSEC_IKE_DROP_NO_RESPONSE = 13813L
+ERROR_IPSEC_IKE_MM_DELAY_DROP = 13814L
+ERROR_IPSEC_IKE_QM_DELAY_DROP = 13815L
+ERROR_IPSEC_IKE_ERROR = 13816L
+ERROR_IPSEC_IKE_CRL_FAILED = 13817L
+ERROR_IPSEC_IKE_INVALID_KEY_USAGE = 13818L
+ERROR_IPSEC_IKE_INVALID_CERT_TYPE = 13819L
+ERROR_IPSEC_IKE_NO_PRIVATE_KEY = 13820L
+ERROR_IPSEC_IKE_DH_FAIL = 13822L
+ERROR_IPSEC_IKE_INVALID_HEADER = 13824L
+ERROR_IPSEC_IKE_NO_POLICY = 13825L
+ERROR_IPSEC_IKE_INVALID_SIGNATURE = 13826L
+ERROR_IPSEC_IKE_KERBEROS_ERROR = 13827L
+ERROR_IPSEC_IKE_NO_PUBLIC_KEY = 13828L
+ERROR_IPSEC_IKE_PROCESS_ERR = 13829L
+ERROR_IPSEC_IKE_PROCESS_ERR_SA = 13830L
+ERROR_IPSEC_IKE_PROCESS_ERR_PROP = 13831L
+ERROR_IPSEC_IKE_PROCESS_ERR_TRANS = 13832L
+ERROR_IPSEC_IKE_PROCESS_ERR_KE = 13833L
+ERROR_IPSEC_IKE_PROCESS_ERR_ID = 13834L
+ERROR_IPSEC_IKE_PROCESS_ERR_CERT = 13835L
+ERROR_IPSEC_IKE_PROCESS_ERR_CERT_REQ = 13836L
+ERROR_IPSEC_IKE_PROCESS_ERR_HASH = 13837L
+ERROR_IPSEC_IKE_PROCESS_ERR_SIG = 13838L
+ERROR_IPSEC_IKE_PROCESS_ERR_NONCE = 13839L
+ERROR_IPSEC_IKE_PROCESS_ERR_NOTIFY = 13840L
+ERROR_IPSEC_IKE_PROCESS_ERR_DELETE = 13841L
+ERROR_IPSEC_IKE_PROCESS_ERR_VENDOR = 13842L
+ERROR_IPSEC_IKE_INVALID_PAYLOAD = 13843L
+ERROR_IPSEC_IKE_LOAD_SOFT_SA = 13844L
+ERROR_IPSEC_IKE_SOFT_SA_TORN_DOWN = 13845L
+ERROR_IPSEC_IKE_INVALID_COOKIE = 13846L
+ERROR_IPSEC_IKE_NO_PEER_CERT = 13847L
+ERROR_IPSEC_IKE_PEER_CRL_FAILED = 13848L
+ERROR_IPSEC_IKE_POLICY_CHANGE = 13849L
+ERROR_IPSEC_IKE_NO_MM_POLICY = 13850L
+ERROR_IPSEC_IKE_NOTCBPRIV = 13851L
+ERROR_IPSEC_IKE_SECLOADFAIL = 13852L
+ERROR_IPSEC_IKE_FAILSSPINIT = 13853L
+ERROR_IPSEC_IKE_FAILQUERYSSP = 13854L
+ERROR_IPSEC_IKE_SRVACQFAIL = 13855L
+ERROR_IPSEC_IKE_SRVQUERYCRED = 13856L
+ERROR_IPSEC_IKE_GETSPIFAIL = 13857L
+ERROR_IPSEC_IKE_INVALID_FILTER = 13858L
+ERROR_IPSEC_IKE_OUT_OF_MEMORY = 13859L
+ERROR_IPSEC_IKE_ADD_UPDATE_KEY_FAILED = 13860L
+ERROR_IPSEC_IKE_INVALID_POLICY = 13861L
+ERROR_IPSEC_IKE_UNKNOWN_DOI = 13862L
+ERROR_IPSEC_IKE_INVALID_SITUATION = 13863L
+ERROR_IPSEC_IKE_DH_FAILURE = 13864L
+ERROR_IPSEC_IKE_INVALID_GROUP = 13865L
+ERROR_IPSEC_IKE_ENCRYPT = 13866L
+ERROR_IPSEC_IKE_DECRYPT = 13867L
+ERROR_IPSEC_IKE_POLICY_MATCH = 13868L
+ERROR_IPSEC_IKE_UNSUPPORTED_ID = 13869L
+ERROR_IPSEC_IKE_INVALID_HASH = 13870L
+ERROR_IPSEC_IKE_INVALID_HASH_ALG = 13871L
+ERROR_IPSEC_IKE_INVALID_HASH_SIZE = 13872L
+ERROR_IPSEC_IKE_INVALID_ENCRYPT_ALG = 13873L
+ERROR_IPSEC_IKE_INVALID_AUTH_ALG = 13874L
+ERROR_IPSEC_IKE_INVALID_SIG = 13875L
+ERROR_IPSEC_IKE_LOAD_FAILED = 13876L
+ERROR_IPSEC_IKE_RPC_DELETE = 13877L
+ERROR_IPSEC_IKE_BENIGN_REINIT = 13878L
+ERROR_IPSEC_IKE_INVALID_RESPONDER_LIFETIME_NOTIFY = 13879L
+ERROR_IPSEC_IKE_INVALID_CERT_KEYLEN = 13881L
+ERROR_IPSEC_IKE_MM_LIMIT = 13882L
+ERROR_IPSEC_IKE_NEGOTIATION_DISABLED = 13883L
+ERROR_IPSEC_IKE_NEG_STATUS_END = 13884L
+CRYPT_E_MSG_ERROR = ((-2146889727))
+CRYPT_E_UNKNOWN_ALGO = ((-2146889726))
+CRYPT_E_OID_FORMAT = ((-2146889725))
+CRYPT_E_INVALID_MSG_TYPE = ((-2146889724))
+CRYPT_E_UNEXPECTED_ENCODING = ((-2146889723))
+CRYPT_E_AUTH_ATTR_MISSING = ((-2146889722))
+CRYPT_E_HASH_VALUE = ((-2146889721))
+CRYPT_E_INVALID_INDEX = ((-2146889720))
+CRYPT_E_ALREADY_DECRYPTED = ((-2146889719))
+CRYPT_E_NOT_DECRYPTED = ((-2146889718))
+CRYPT_E_RECIPIENT_NOT_FOUND = ((-2146889717))
+CRYPT_E_CONTROL_TYPE = ((-2146889716))
+CRYPT_E_ISSUER_SERIALNUMBER = ((-2146889715))
+CRYPT_E_SIGNER_NOT_FOUND = ((-2146889714))
+CRYPT_E_ATTRIBUTES_MISSING = ((-2146889713))
+CRYPT_E_STREAM_MSG_NOT_READY = ((-2146889712))
+CRYPT_E_STREAM_INSUFFICIENT_DATA = ((-2146889711))
+CRYPT_I_NEW_PROTECTION_REQUIRED = (593938)
+CRYPT_E_BAD_LEN = ((-2146885631))
+CRYPT_E_BAD_ENCODE = ((-2146885630))
+CRYPT_E_FILE_ERROR = ((-2146885629))
+CRYPT_E_NOT_FOUND = ((-2146885628))
+CRYPT_E_EXISTS = ((-2146885627))
+CRYPT_E_NO_PROVIDER = ((-2146885626))
+CRYPT_E_SELF_SIGNED = ((-2146885625))
+CRYPT_E_DELETED_PREV = ((-2146885624))
+CRYPT_E_NO_MATCH = ((-2146885623))
+CRYPT_E_UNEXPECTED_MSG_TYPE = ((-2146885622))
+CRYPT_E_NO_KEY_PROPERTY = ((-2146885621))
+CRYPT_E_NO_DECRYPT_CERT = ((-2146885620))
+CRYPT_E_BAD_MSG = ((-2146885619))
+CRYPT_E_NO_SIGNER = ((-2146885618))
+CRYPT_E_PENDING_CLOSE = ((-2146885617))
+CRYPT_E_REVOKED = ((-2146885616))
+CRYPT_E_NO_REVOCATION_DLL = ((-2146885615))
+CRYPT_E_NO_REVOCATION_CHECK = ((-2146885614))
+CRYPT_E_REVOCATION_OFFLINE = ((-2146885613))
+CRYPT_E_NOT_IN_REVOCATION_DATABASE = ((-2146885612))
+CRYPT_E_INVALID_NUMERIC_STRING = ((-2146885600))
+CRYPT_E_INVALID_PRINTABLE_STRING = ((-2146885599))
+CRYPT_E_INVALID_IA5_STRING = ((-2146885598))
+CRYPT_E_INVALID_X500_STRING = ((-2146885597))
+CRYPT_E_NOT_CHAR_STRING = ((-2146885596))
+CRYPT_E_FILERESIZED = ((-2146885595))
+CRYPT_E_SECURITY_SETTINGS = ((-2146885594))
+CRYPT_E_NO_VERIFY_USAGE_DLL = ((-2146885593))
+CRYPT_E_NO_VERIFY_USAGE_CHECK = ((-2146885592))
+CRYPT_E_VERIFY_USAGE_OFFLINE = ((-2146885591))
+CRYPT_E_NOT_IN_CTL = ((-2146885590))
+CRYPT_E_NO_TRUSTED_SIGNER = ((-2146885589))
+CRYPT_E_MISSING_PUBKEY_PARA = ((-2146885588))
+CRYPT_E_OSS_ERROR = ((-2146881536))
+
+## Kerberos message types for LsaCallAuthenticationPackage (from ntsecapi.h)
+KerbDebugRequestMessage = 0
+KerbQueryTicketCacheMessage = 1
+KerbChangeMachinePasswordMessage = 2
+KerbVerifyPacMessage = 3
+KerbRetrieveTicketMessage = 4
+KerbUpdateAddressesMessage = 5
+KerbPurgeTicketCacheMessage = 6
+KerbChangePasswordMessage = 7
+KerbRetrieveEncodedTicketMessage = 8
+KerbDecryptDataMessage = 9
+KerbAddBindingCacheEntryMessage = 10
+KerbSetPasswordMessage = 11
+KerbSetPasswordExMessage = 12
+KerbVerifyCredentialsMessage = 13
+KerbQueryTicketCacheExMessage = 14
+KerbPurgeTicketCacheExMessage = 15
+KerbRefreshSmartcardCredentialsMessage = 16
+KerbAddExtraCredentialsMessage = 17
+KerbQuerySupplementalCredentialsMessage = 18
+
+## messages used with msv1_0 from ntsecapi.h
+MsV1_0Lm20ChallengeRequest = 0
+MsV1_0Lm20GetChallengeResponse = 1
+MsV1_0EnumerateUsers = 2
+MsV1_0GetUserInfo = 3
+MsV1_0ReLogonUsers = 4
+MsV1_0ChangePassword = 5
+MsV1_0ChangeCachedPassword = 6
+MsV1_0GenericPassthrough = 7
+MsV1_0CacheLogon = 8
+MsV1_0SubAuth = 9
+MsV1_0DeriveCredential = 10
+MsV1_0CacheLookup = 11
+MsV1_0SetProcessOption = 12
+
+SEC_E_OK = 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32con.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32con.py
new file mode 100644
index 0000000..5929750
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32con.py
@@ -0,0 +1,4835 @@
+# Generated by h2py from commdlg.h (plus modifications 4jan98)
+WINVER = 1280
+WM_USER = 1024
+PY_0U = 0
+OFN_READONLY = 1
+OFN_OVERWRITEPROMPT = 2
+OFN_HIDEREADONLY = 4
+OFN_NOCHANGEDIR = 8
+OFN_SHOWHELP = 16
+OFN_ENABLEHOOK = 32
+OFN_ENABLETEMPLATE = 64
+OFN_ENABLETEMPLATEHANDLE = 128
+OFN_NOVALIDATE = 256
+OFN_ALLOWMULTISELECT = 512
+OFN_EXTENSIONDIFFERENT = 1024
+OFN_PATHMUSTEXIST = 2048
+OFN_FILEMUSTEXIST = 4096
+OFN_CREATEPROMPT = 8192
+OFN_SHAREAWARE = 16384
+OFN_NOREADONLYRETURN = 32768
+OFN_NOTESTFILECREATE = 65536
+OFN_NONETWORKBUTTON = 131072
+OFN_NOLONGNAMES = 262144
+OFN_EXPLORER = 524288            # new look commdlg
+OFN_NODEREFERENCELINKS = 1048576
+OFN_LONGNAMES = 2097152           # force long names for 3.x modules
+OFN_ENABLEINCLUDENOTIFY = 4194304 # send include message to callback
+OFN_ENABLESIZING = 8388608
+OFN_DONTADDTORECENT = 33554432
+OFN_FORCESHOWHIDDEN = 268435456 # Show All files including System and hidden files
+OFN_EX_NOPLACESBAR = 1
+OFN_SHAREFALLTHROUGH = 2
+OFN_SHARENOWARN = 1
+OFN_SHAREWARN = 0
+CDN_FIRST = (PY_0U-601)
+CDN_LAST = (PY_0U-699)
+CDN_INITDONE = (CDN_FIRST - 0)
+CDN_SELCHANGE = (CDN_FIRST - 1)
+CDN_FOLDERCHANGE = (CDN_FIRST - 2)
+CDN_SHAREVIOLATION = (CDN_FIRST - 3)
+CDN_HELP = (CDN_FIRST - 4)
+CDN_FILEOK = (CDN_FIRST - 5)
+CDN_TYPECHANGE = (CDN_FIRST - 6)
+CDN_INCLUDEITEM = (CDN_FIRST - 7)
+CDM_FIRST = (WM_USER + 100)
+CDM_LAST = (WM_USER + 200)
+CDM_GETSPEC = (CDM_FIRST + 0)
+CDM_GETFILEPATH = (CDM_FIRST + 1)
+CDM_GETFOLDERPATH = (CDM_FIRST + 2)
+CDM_GETFOLDERIDLIST = (CDM_FIRST + 3)
+CDM_SETCONTROLTEXT = (CDM_FIRST + 4)
+CDM_HIDECONTROL = (CDM_FIRST + 5)
+CDM_SETDEFEXT = (CDM_FIRST + 6)
+CC_RGBINIT = 1
+CC_FULLOPEN = 2
+CC_PREVENTFULLOPEN = 4
+CC_SHOWHELP = 8
+CC_ENABLEHOOK = 16
+CC_ENABLETEMPLATE = 32
+CC_ENABLETEMPLATEHANDLE = 64
+CC_SOLIDCOLOR = 128
+CC_ANYCOLOR = 256
+FR_DOWN = 1
+FR_WHOLEWORD = 2
+FR_MATCHCASE = 4
+FR_FINDNEXT = 8
+FR_REPLACE = 16
+FR_REPLACEALL = 32
+FR_DIALOGTERM = 64
+FR_SHOWHELP = 128
+FR_ENABLEHOOK = 256
+FR_ENABLETEMPLATE = 512
+FR_NOUPDOWN = 1024
+FR_NOMATCHCASE = 2048
+FR_NOWHOLEWORD = 4096
+FR_ENABLETEMPLATEHANDLE = 8192
+FR_HIDEUPDOWN = 16384
+FR_HIDEMATCHCASE = 32768
+FR_HIDEWHOLEWORD = 65536
+CF_SCREENFONTS = 1
+CF_PRINTERFONTS = 2
+CF_BOTH = (CF_SCREENFONTS | CF_PRINTERFONTS)
+CF_SHOWHELP = 4
+CF_ENABLEHOOK = 8
+CF_ENABLETEMPLATE = 16
+CF_ENABLETEMPLATEHANDLE = 32
+CF_INITTOLOGFONTSTRUCT = 64
+CF_USESTYLE = 128
+CF_EFFECTS = 256
+CF_APPLY = 512
+CF_ANSIONLY = 1024
+CF_SCRIPTSONLY = CF_ANSIONLY
+CF_NOVECTORFONTS = 2048
+CF_NOOEMFONTS = CF_NOVECTORFONTS
+CF_NOSIMULATIONS = 4096
+CF_LIMITSIZE = 8192
+CF_FIXEDPITCHONLY = 16384
+CF_WYSIWYG = 32768 # must also have CF_SCREENFONTS & CF_PRINTERFONTS
+CF_FORCEFONTEXIST = 65536
+CF_SCALABLEONLY = 131072
+CF_TTONLY = 262144
+CF_NOFACESEL = 524288
+CF_NOSTYLESEL = 1048576
+CF_NOSIZESEL = 2097152
+CF_SELECTSCRIPT = 4194304
+CF_NOSCRIPTSEL = 8388608
+CF_NOVERTFONTS = 16777216
+SIMULATED_FONTTYPE = 32768
+PRINTER_FONTTYPE = 16384
+SCREEN_FONTTYPE = 8192
+BOLD_FONTTYPE = 256
+ITALIC_FONTTYPE = 512
+REGULAR_FONTTYPE = 1024
+OPENTYPE_FONTTYPE = 65536
+TYPE1_FONTTYPE = 131072
+DSIG_FONTTYPE = 262144
+WM_CHOOSEFONT_GETLOGFONT = (WM_USER + 1)
+WM_CHOOSEFONT_SETLOGFONT = (WM_USER + 101)
+WM_CHOOSEFONT_SETFLAGS = (WM_USER + 102)
+LBSELCHSTRINGA = "commdlg_LBSelChangedNotify"
+SHAREVISTRINGA = "commdlg_ShareViolation"
+FILEOKSTRINGA = "commdlg_FileNameOK"
+COLOROKSTRINGA = "commdlg_ColorOK"
+SETRGBSTRINGA = "commdlg_SetRGBColor"
+HELPMSGSTRINGA = "commdlg_help"
+FINDMSGSTRINGA = "commdlg_FindReplace"
+LBSELCHSTRING = LBSELCHSTRINGA
+SHAREVISTRING = SHAREVISTRINGA
+FILEOKSTRING = FILEOKSTRINGA
+COLOROKSTRING = COLOROKSTRINGA
+SETRGBSTRING = SETRGBSTRINGA
+HELPMSGSTRING = HELPMSGSTRINGA
+FINDMSGSTRING = FINDMSGSTRINGA
+CD_LBSELNOITEMS = -1
+CD_LBSELCHANGE = 0
+CD_LBSELSUB = 1
+CD_LBSELADD = 2
+PD_ALLPAGES = 0
+PD_SELECTION = 1
+PD_PAGENUMS = 2
+PD_NOSELECTION = 4
+PD_NOPAGENUMS = 8
+PD_COLLATE = 16
+PD_PRINTTOFILE = 32
+PD_PRINTSETUP = 64
+PD_NOWARNING = 128
+PD_RETURNDC = 256
+PD_RETURNIC = 512
+PD_RETURNDEFAULT = 1024
+PD_SHOWHELP = 2048
+PD_ENABLEPRINTHOOK = 4096
+PD_ENABLESETUPHOOK = 8192
+PD_ENABLEPRINTTEMPLATE = 16384
+PD_ENABLESETUPTEMPLATE = 32768
+PD_ENABLEPRINTTEMPLATEHANDLE = 65536
+PD_ENABLESETUPTEMPLATEHANDLE = 131072
+PD_USEDEVMODECOPIES = 262144
+PD_DISABLEPRINTTOFILE = 524288
+PD_HIDEPRINTTOFILE = 1048576
+PD_NONETWORKBUTTON = 2097152
+DN_DEFAULTPRN = 1
+WM_PSD_PAGESETUPDLG = (WM_USER  )
+WM_PSD_FULLPAGERECT = (WM_USER+1)
+WM_PSD_MINMARGINRECT = (WM_USER+2)
+WM_PSD_MARGINRECT = (WM_USER+3)
+WM_PSD_GREEKTEXTRECT = (WM_USER+4)
+WM_PSD_ENVSTAMPRECT = (WM_USER+5)
+WM_PSD_YAFULLPAGERECT = (WM_USER+6)
+PSD_DEFAULTMINMARGINS = 0 # default (printer's)
+PSD_INWININIINTLMEASURE = 0 # 1st of 4 possible
+PSD_MINMARGINS = 1 # use caller's
+PSD_MARGINS = 2 # use caller's
+PSD_INTHOUSANDTHSOFINCHES = 4 # 2nd of 4 possible
+PSD_INHUNDREDTHSOFMILLIMETERS = 8 # 3rd of 4 possible
+PSD_DISABLEMARGINS = 16
+PSD_DISABLEPRINTER = 32
+PSD_NOWARNING = 128 # must be same as PD_*
+PSD_DISABLEORIENTATION = 256
+PSD_RETURNDEFAULT = 1024 # must be same as PD_*
+PSD_DISABLEPAPER = 512
+PSD_SHOWHELP = 2048 # must be same as PD_*
+PSD_ENABLEPAGESETUPHOOK = 8192 # must be same as PD_*
+PSD_ENABLEPAGESETUPTEMPLATE = 32768 # must be same as PD_*
+PSD_ENABLEPAGESETUPTEMPLATEHANDLE = 131072 # must be same as PD_*
+PSD_ENABLEPAGEPAINTHOOK = 262144
+PSD_DISABLEPAGEPAINTING = 524288
+PSD_NONETWORKBUTTON = 2097152 # must be same as PD_*
+
+# Generated by h2py from winreg.h
+HKEY_CLASSES_ROOT = -2147483648
+HKEY_CURRENT_USER = -2147483647
+HKEY_LOCAL_MACHINE = -2147483646
+HKEY_USERS = -2147483645
+HKEY_PERFORMANCE_DATA = -2147483644
+HKEY_CURRENT_CONFIG = -2147483643
+HKEY_DYN_DATA = -2147483642
+HKEY_PERFORMANCE_TEXT = -2147483568  # ?? 4Jan98
+HKEY_PERFORMANCE_NLSTEXT = -2147483552  # ?? 4Jan98
+
+# Generated by h2py from winuser.h
+HWND_BROADCAST = 65535
+HWND_DESKTOP = 0
+HWND_TOP = 0
+HWND_BOTTOM = 1
+HWND_TOPMOST = -1
+HWND_NOTOPMOST = -2
+HWND_MESSAGE = -3
+
+# winuser.h line 4601
+SM_CXSCREEN = 0
+SM_CYSCREEN = 1
+SM_CXVSCROLL = 2
+SM_CYHSCROLL = 3
+SM_CYCAPTION = 4
+SM_CXBORDER = 5
+SM_CYBORDER = 6
+SM_CXDLGFRAME = 7
+SM_CYDLGFRAME = 8
+SM_CYVTHUMB = 9
+SM_CXHTHUMB = 10
+SM_CXICON = 11
+SM_CYICON = 12
+SM_CXCURSOR = 13
+SM_CYCURSOR = 14
+SM_CYMENU = 15
+SM_CXFULLSCREEN = 16
+SM_CYFULLSCREEN = 17
+SM_CYKANJIWINDOW = 18
+SM_MOUSEPRESENT = 19
+SM_CYVSCROLL = 20
+SM_CXHSCROLL = 21
+SM_DEBUG = 22
+SM_SWAPBUTTON = 23
+SM_RESERVED1 = 24
+SM_RESERVED2 = 25
+SM_RESERVED3 = 26
+SM_RESERVED4 = 27
+SM_CXMIN = 28
+SM_CYMIN = 29
+SM_CXSIZE = 30
+SM_CYSIZE = 31
+SM_CXFRAME = 32
+SM_CYFRAME = 33
+SM_CXMINTRACK = 34
+SM_CYMINTRACK = 35
+SM_CXDOUBLECLK = 36
+SM_CYDOUBLECLK = 37
+SM_CXICONSPACING = 38
+SM_CYICONSPACING = 39
+SM_MENUDROPALIGNMENT = 40
+SM_PENWINDOWS = 41
+SM_DBCSENABLED = 42
+SM_CMOUSEBUTTONS = 43
+SM_CXFIXEDFRAME = SM_CXDLGFRAME
+SM_CYFIXEDFRAME = SM_CYDLGFRAME
+SM_CXSIZEFRAME = SM_CXFRAME
+SM_CYSIZEFRAME = SM_CYFRAME
+SM_SECURE = 44
+SM_CXEDGE = 45
+SM_CYEDGE = 46
+SM_CXMINSPACING = 47
+SM_CYMINSPACING = 48
+SM_CXSMICON = 49
+SM_CYSMICON = 50
+SM_CYSMCAPTION = 51
+SM_CXSMSIZE = 52
+SM_CYSMSIZE = 53
+SM_CXMENUSIZE = 54
+SM_CYMENUSIZE = 55
+SM_ARRANGE = 56
+SM_CXMINIMIZED = 57
+SM_CYMINIMIZED = 58
+SM_CXMAXTRACK = 59
+SM_CYMAXTRACK = 60
+SM_CXMAXIMIZED = 61
+SM_CYMAXIMIZED = 62
+SM_NETWORK = 63
+SM_CLEANBOOT = 67
+SM_CXDRAG = 68
+SM_CYDRAG = 69
+SM_SHOWSOUNDS = 70
+SM_CXMENUCHECK = 71
+SM_CYMENUCHECK = 72
+SM_SLOWMACHINE = 73
+SM_MIDEASTENABLED = 74
+SM_MOUSEWHEELPRESENT = 75
+SM_XVIRTUALSCREEN = 76
+SM_YVIRTUALSCREEN = 77
+SM_CXVIRTUALSCREEN = 78
+SM_CYVIRTUALSCREEN = 79
+SM_CMONITORS = 80
+SM_SAMEDISPLAYFORMAT = 81
+SM_CMETRICS = 83
+MNC_IGNORE = 0
+MNC_CLOSE = 1
+MNC_EXECUTE = 2
+MNC_SELECT = 3
+MNS_NOCHECK = -2147483648
+MNS_MODELESS = 1073741824
+MNS_DRAGDROP = 536870912
+MNS_AUTODISMISS = 268435456
+MNS_NOTIFYBYPOS = 134217728
+MNS_CHECKORBMP = 67108864
+MIM_MAXHEIGHT = 1
+MIM_BACKGROUND = 2
+MIM_HELPID = 4
+MIM_MENUDATA = 8
+MIM_STYLE = 16
+MIM_APPLYTOSUBMENUS = -2147483648
+MND_CONTINUE = 0
+MND_ENDMENU = 1
+MNGOF_GAP = 3
+MNGO_NOINTERFACE = 0
+MNGO_NOERROR = 1
+MIIM_STATE = 1
+MIIM_ID = 2
+MIIM_SUBMENU = 4
+MIIM_CHECKMARKS = 8
+MIIM_TYPE = 16
+MIIM_DATA = 32
+MIIM_STRING = 64
+MIIM_BITMAP = 128
+MIIM_FTYPE = 256
+HBMMENU_CALLBACK = -1
+HBMMENU_SYSTEM = 1
+HBMMENU_MBAR_RESTORE = 2
+HBMMENU_MBAR_MINIMIZE = 3
+HBMMENU_MBAR_CLOSE = 5
+HBMMENU_MBAR_CLOSE_D = 6
+HBMMENU_MBAR_MINIMIZE_D = 7
+HBMMENU_POPUP_CLOSE = 8
+HBMMENU_POPUP_RESTORE = 9
+HBMMENU_POPUP_MAXIMIZE = 10
+HBMMENU_POPUP_MINIMIZE = 11
+GMDI_USEDISABLED = 1
+GMDI_GOINTOPOPUPS = 2
+TPM_LEFTBUTTON = 0
+TPM_RIGHTBUTTON = 2
+TPM_LEFTALIGN = 0
+TPM_CENTERALIGN = 4
+TPM_RIGHTALIGN = 8
+TPM_TOPALIGN = 0
+TPM_VCENTERALIGN = 16
+TPM_BOTTOMALIGN = 32
+TPM_HORIZONTAL = 0
+TPM_VERTICAL = 64
+TPM_NONOTIFY = 128
+TPM_RETURNCMD = 256
+TPM_RECURSE = 1
+DOF_EXECUTABLE = 32769
+DOF_DOCUMENT = 32770
+DOF_DIRECTORY = 32771
+DOF_MULTIPLE = 32772
+DOF_PROGMAN = 1
+DOF_SHELLDATA = 2
+DO_DROPFILE = 1162627398
+DO_PRINTFILE = 1414419024
+DT_TOP = 0
+DT_LEFT = 0
+DT_CENTER = 1
+DT_RIGHT = 2
+DT_VCENTER = 4
+DT_BOTTOM = 8
+DT_WORDBREAK = 16
+DT_SINGLELINE = 32
+DT_EXPANDTABS = 64
+DT_TABSTOP = 128
+DT_NOCLIP = 256
+DT_EXTERNALLEADING = 512
+DT_CALCRECT = 1024
+DT_NOPREFIX = 2048
+DT_INTERNAL = 4096
+DT_EDITCONTROL = 8192
+DT_PATH_ELLIPSIS = 16384
+DT_END_ELLIPSIS = 32768
+DT_MODIFYSTRING = 65536
+DT_RTLREADING = 131072
+DT_WORD_ELLIPSIS = 262144
+DST_COMPLEX = 0
+DST_TEXT = 1
+DST_PREFIXTEXT = 2
+DST_ICON = 3
+DST_BITMAP = 4
+DSS_NORMAL = 0
+DSS_UNION = 16
+DSS_DISABLED = 32
+DSS_MONO = 128
+DSS_RIGHT = 32768
+DCX_WINDOW = 1
+DCX_CACHE = 2
+DCX_NORESETATTRS = 4
+DCX_CLIPCHILDREN = 8
+DCX_CLIPSIBLINGS = 16
+DCX_PARENTCLIP = 32
+DCX_EXCLUDERGN = 64
+DCX_INTERSECTRGN = 128
+DCX_EXCLUDEUPDATE = 256
+DCX_INTERSECTUPDATE = 512
+DCX_LOCKWINDOWUPDATE = 1024
+DCX_VALIDATE = 2097152
+CUDR_NORMAL = 0
+CUDR_NOSNAPTOGRID = 1
+CUDR_NORESOLVEPOSITIONS = 2
+CUDR_NOCLOSEGAPS = 4
+CUDR_NEGATIVECOORDS = 8
+CUDR_NOPRIMARY = 16
+RDW_INVALIDATE = 1
+RDW_INTERNALPAINT = 2
+RDW_ERASE = 4
+RDW_VALIDATE = 8
+RDW_NOINTERNALPAINT = 16
+RDW_NOERASE = 32
+RDW_NOCHILDREN = 64
+RDW_ALLCHILDREN = 128
+RDW_UPDATENOW = 256
+RDW_ERASENOW = 512
+RDW_FRAME = 1024
+RDW_NOFRAME = 2048
+SW_SCROLLCHILDREN = 1
+SW_INVALIDATE = 2
+SW_ERASE = 4
+SW_SMOOTHSCROLL = 16  # Use smooth scrolling
+ESB_ENABLE_BOTH = 0
+ESB_DISABLE_BOTH = 3
+ESB_DISABLE_LEFT = 1
+ESB_DISABLE_RIGHT = 2
+ESB_DISABLE_UP = 1
+ESB_DISABLE_DOWN = 2
+ESB_DISABLE_LTUP = ESB_DISABLE_LEFT
+ESB_DISABLE_RTDN = ESB_DISABLE_RIGHT
+HELPINFO_WINDOW = 1
+HELPINFO_MENUITEM = 2
+MB_OK = 0
+MB_OKCANCEL = 1
+MB_ABORTRETRYIGNORE = 2
+MB_YESNOCANCEL = 3
+MB_YESNO = 4
+MB_RETRYCANCEL = 5
+MB_ICONHAND = 16
+MB_ICONQUESTION = 32
+MB_ICONEXCLAMATION = 48
+MB_ICONASTERISK = 64
+MB_ICONWARNING = MB_ICONEXCLAMATION
+MB_ICONERROR = MB_ICONHAND
+MB_ICONINFORMATION = MB_ICONASTERISK
+MB_ICONSTOP = MB_ICONHAND
+MB_DEFBUTTON1 = 0
+MB_DEFBUTTON2 = 256
+MB_DEFBUTTON3 = 512
+MB_DEFBUTTON4 = 768
+MB_APPLMODAL = 0
+MB_SYSTEMMODAL = 4096
+MB_TASKMODAL = 8192
+MB_HELP = 16384
+MB_NOFOCUS = 32768
+MB_SETFOREGROUND = 65536
+MB_DEFAULT_DESKTOP_ONLY = 131072
+MB_TOPMOST = 262144L
+MB_RIGHT = 524288
+MB_RTLREADING = 1048576
+MB_SERVICE_NOTIFICATION = 2097152
+MB_TYPEMASK = 15
+MB_USERICON = 128
+MB_ICONMASK = 240
+MB_DEFMASK = 3840
+MB_MODEMASK = 12288
+MB_MISCMASK = 49152
+# winuser.h line 6373
+CWP_ALL = 0
+CWP_SKIPINVISIBLE = 1
+CWP_SKIPDISABLED = 2
+CWP_SKIPTRANSPARENT = 4
+CTLCOLOR_MSGBOX = 0
+CTLCOLOR_EDIT = 1
+CTLCOLOR_LISTBOX = 2
+CTLCOLOR_BTN = 3
+CTLCOLOR_DLG = 4
+CTLCOLOR_SCROLLBAR = 5
+CTLCOLOR_STATIC = 6
+CTLCOLOR_MAX = 7
+COLOR_SCROLLBAR = 0
+COLOR_BACKGROUND = 1
+COLOR_ACTIVECAPTION = 2
+COLOR_INACTIVECAPTION = 3
+COLOR_MENU = 4
+COLOR_WINDOW = 5
+COLOR_WINDOWFRAME = 6
+COLOR_MENUTEXT = 7
+COLOR_WINDOWTEXT = 8
+COLOR_CAPTIONTEXT = 9
+COLOR_ACTIVEBORDER = 10
+COLOR_INACTIVEBORDER = 11
+COLOR_APPWORKSPACE = 12
+COLOR_HIGHLIGHT = 13
+COLOR_HIGHLIGHTTEXT = 14
+COLOR_BTNFACE = 15
+COLOR_BTNSHADOW = 16
+COLOR_GRAYTEXT = 17
+COLOR_BTNTEXT = 18
+COLOR_INACTIVECAPTIONTEXT = 19
+COLOR_BTNHIGHLIGHT = 20
+COLOR_3DDKSHADOW = 21
+COLOR_3DLIGHT = 22
+COLOR_INFOTEXT = 23
+COLOR_INFOBK = 24
+COLOR_HOTLIGHT = 26
+COLOR_GRADIENTACTIVECAPTION = 27
+COLOR_GRADIENTINACTIVECAPTION = 28
+COLOR_DESKTOP = COLOR_BACKGROUND
+COLOR_3DFACE = COLOR_BTNFACE
+COLOR_3DSHADOW = COLOR_BTNSHADOW
+COLOR_3DHIGHLIGHT = COLOR_BTNHIGHLIGHT
+COLOR_3DHILIGHT = COLOR_BTNHIGHLIGHT
+COLOR_BTNHILIGHT = COLOR_BTNHIGHLIGHT
+GW_HWNDFIRST = 0
+GW_HWNDLAST = 1
+GW_HWNDNEXT = 2
+GW_HWNDPREV = 3
+GW_OWNER = 4
+GW_CHILD = 5
+GW_ENABLEDPOPUP = 6
+GW_MAX = 6
+MF_INSERT = 0
+MF_CHANGE = 128
+MF_APPEND = 256
+MF_DELETE = 512
+MF_REMOVE = 4096
+MF_BYCOMMAND = 0
+MF_BYPOSITION = 1024
+MF_SEPARATOR = 2048
+MF_ENABLED = 0
+MF_GRAYED = 1
+MF_DISABLED = 2
+MF_UNCHECKED = 0
+MF_CHECKED = 8
+MF_USECHECKBITMAPS = 512
+MF_STRING = 0
+MF_BITMAP = 4
+MF_OWNERDRAW = 256
+MF_POPUP = 16
+MF_MENUBARBREAK = 32
+MF_MENUBREAK = 64
+MF_UNHILITE = 0
+MF_HILITE = 128
+MF_DEFAULT = 4096
+MF_SYSMENU = 8192
+MF_HELP = 16384
+MF_RIGHTJUSTIFY = 16384
+MF_MOUSESELECT = 32768
+MF_END = 128
+MFT_STRING = MF_STRING
+MFT_BITMAP = MF_BITMAP
+MFT_MENUBARBREAK = MF_MENUBARBREAK
+MFT_MENUBREAK = MF_MENUBREAK
+MFT_OWNERDRAW = MF_OWNERDRAW
+MFT_RADIOCHECK = 512
+MFT_SEPARATOR = MF_SEPARATOR
+MFT_RIGHTORDER = 8192
+MFT_RIGHTJUSTIFY = MF_RIGHTJUSTIFY
+MFS_GRAYED = 3
+MFS_DISABLED = MFS_GRAYED
+MFS_CHECKED = MF_CHECKED
+MFS_HILITE = MF_HILITE
+MFS_ENABLED = MF_ENABLED
+MFS_UNCHECKED = MF_UNCHECKED
+MFS_UNHILITE = MF_UNHILITE
+MFS_DEFAULT = MF_DEFAULT
+MFS_MASK = 4235L
+MFS_HOTTRACKDRAWN = 268435456
+MFS_CACHEDBMP = 536870912
+MFS_BOTTOMGAPDROP = 1073741824
+MFS_TOPGAPDROP = -2147483648
+MFS_GAPDROP = -1073741824
+SC_SIZE = 61440
+SC_MOVE = 61456
+SC_MINIMIZE = 61472
+SC_MAXIMIZE = 61488
+SC_NEXTWINDOW = 61504
+SC_PREVWINDOW = 61520
+SC_CLOSE = 61536
+SC_VSCROLL = 61552
+SC_HSCROLL = 61568
+SC_MOUSEMENU = 61584
+SC_KEYMENU = 61696
+SC_ARRANGE = 61712
+SC_RESTORE = 61728
+SC_TASKLIST = 61744
+SC_SCREENSAVE = 61760
+SC_HOTKEY = 61776
+SC_DEFAULT = 61792
+SC_MONITORPOWER = 61808
+SC_CONTEXTHELP = 61824
+SC_SEPARATOR = 61455
+SC_ICON = SC_MINIMIZE
+SC_ZOOM = SC_MAXIMIZE
+IDC_ARROW = 32512
+IDC_IBEAM = 32513
+IDC_WAIT = 32514
+IDC_CROSS = 32515
+IDC_UPARROW = 32516
+IDC_SIZE = 32640  # OBSOLETE: use IDC_SIZEALL
+IDC_ICON = 32641  # OBSOLETE: use IDC_ARROW
+IDC_SIZENWSE = 32642
+IDC_SIZENESW = 32643
+IDC_SIZEWE = 32644
+IDC_SIZENS = 32645
+IDC_SIZEALL = 32646
+IDC_NO = 32648
+IDC_HAND = 32649
+IDC_APPSTARTING = 32650
+IDC_HELP = 32651
+IMAGE_BITMAP = 0
+IMAGE_ICON = 1
+IMAGE_CURSOR = 2
+IMAGE_ENHMETAFILE = 3
+LR_DEFAULTCOLOR = 0
+LR_MONOCHROME = 1
+LR_COLOR = 2
+LR_COPYRETURNORG = 4
+LR_COPYDELETEORG = 8
+LR_LOADFROMFILE = 16
+LR_LOADTRANSPARENT = 32
+LR_DEFAULTSIZE = 64
+LR_LOADREALSIZE = 128
+LR_LOADMAP3DCOLORS = 4096
+LR_CREATEDIBSECTION = 8192
+LR_COPYFROMRESOURCE = 16384
+LR_SHARED = 32768
+DI_MASK = 1
+DI_IMAGE = 2
+DI_NORMAL = 3
+DI_COMPAT = 4
+DI_DEFAULTSIZE = 8
+RES_ICON = 1
+RES_CURSOR = 2
+OBM_CLOSE = 32754
+OBM_UPARROW = 32753
+OBM_DNARROW = 32752
+OBM_RGARROW = 32751
+OBM_LFARROW = 32750
+OBM_REDUCE = 32749
+OBM_ZOOM = 32748
+OBM_RESTORE = 32747
+OBM_REDUCED = 32746
+OBM_ZOOMD = 32745
+OBM_RESTORED = 32744
+OBM_UPARROWD = 32743
+OBM_DNARROWD = 32742
+OBM_RGARROWD = 32741
+OBM_LFARROWD = 32740
+OBM_MNARROW = 32739
+OBM_COMBO = 32738
+OBM_UPARROWI = 32737
+OBM_DNARROWI = 32736
+OBM_RGARROWI = 32735
+OBM_LFARROWI = 32734
+OBM_OLD_CLOSE = 32767
+OBM_SIZE = 32766
+OBM_OLD_UPARROW = 32765
+OBM_OLD_DNARROW = 32764
+OBM_OLD_RGARROW = 32763
+OBM_OLD_LFARROW = 32762
+OBM_BTSIZE = 32761
+OBM_CHECK = 32760
+OBM_CHECKBOXES = 32759
+OBM_BTNCORNERS = 32758
+OBM_OLD_REDUCE = 32757
+OBM_OLD_ZOOM = 32756
+OBM_OLD_RESTORE = 32755
+OCR_NORMAL = 32512
+OCR_IBEAM = 32513
+OCR_WAIT = 32514
+OCR_CROSS = 32515
+OCR_UP = 32516
+OCR_SIZE = 32640
+OCR_ICON = 32641
+OCR_SIZENWSE = 32642
+OCR_SIZENESW = 32643
+OCR_SIZEWE = 32644
+OCR_SIZENS = 32645
+OCR_SIZEALL = 32646
+OCR_ICOCUR = 32647
+OCR_NO = 32648
+OCR_HAND = 32649
+OCR_APPSTARTING = 32650
+# winuser.h line 7455
+OIC_SAMPLE = 32512
+OIC_HAND = 32513
+OIC_QUES = 32514
+OIC_BANG = 32515
+OIC_NOTE = 32516
+OIC_WINLOGO = 32517
+OIC_WARNING = OIC_BANG
+OIC_ERROR = OIC_HAND
+OIC_INFORMATION = OIC_NOTE
+ORD_LANGDRIVER = 1
+IDI_APPLICATION = 32512
+IDI_HAND = 32513
+IDI_QUESTION = 32514
+IDI_EXCLAMATION = 32515
+IDI_ASTERISK = 32516
+IDI_WINLOGO = 32517
+IDI_WARNING = IDI_EXCLAMATION
+IDI_ERROR = IDI_HAND
+IDI_INFORMATION = IDI_ASTERISK
+IDOK = 1
+IDCANCEL = 2
+IDABORT = 3
+IDRETRY = 4
+IDIGNORE = 5
+IDYES = 6
+IDNO = 7
+IDCLOSE = 8
+IDHELP = 9
+ES_LEFT = 0
+ES_CENTER = 1
+ES_RIGHT = 2
+ES_MULTILINE = 4
+ES_UPPERCASE = 8
+ES_LOWERCASE = 16
+ES_PASSWORD = 32
+ES_AUTOVSCROLL = 64
+ES_AUTOHSCROLL = 128
+ES_NOHIDESEL = 256
+ES_OEMCONVERT = 1024
+ES_READONLY = 2048
+ES_WANTRETURN = 4096
+ES_NUMBER = 8192
+EN_SETFOCUS = 256
+EN_KILLFOCUS = 512
+EN_CHANGE = 768
+EN_UPDATE = 1024
+EN_ERRSPACE = 1280
+EN_MAXTEXT = 1281
+EN_HSCROLL = 1537
+EN_VSCROLL = 1538
+EC_LEFTMARGIN = 1
+EC_RIGHTMARGIN = 2
+EC_USEFONTINFO = 65535
+EMSIS_COMPOSITIONSTRING = 1
+EIMES_GETCOMPSTRATONCE = 1
+EIMES_CANCELCOMPSTRINFOCUS = 2
+EIMES_COMPLETECOMPSTRKILLFOCUS = 4
+EM_GETSEL = 176
+EM_SETSEL = 177
+EM_GETRECT = 178
+EM_SETRECT = 179
+EM_SETRECTNP = 180
+EM_SCROLL = 181
+EM_LINESCROLL = 182
+EM_SCROLLCARET = 183
+EM_GETMODIFY = 184
+EM_SETMODIFY = 185
+EM_GETLINECOUNT = 186
+EM_LINEINDEX = 187
+EM_SETHANDLE = 188
+EM_GETHANDLE = 189
+EM_GETTHUMB = 190
+EM_LINELENGTH = 193
+EM_REPLACESEL = 194
+EM_GETLINE = 196
+EM_LIMITTEXT = 197
+EM_CANUNDO = 198
+EM_UNDO = 199
+EM_FMTLINES = 200
+EM_LINEFROMCHAR = 201
+EM_SETTABSTOPS = 203
+EM_SETPASSWORDCHAR = 204
+EM_EMPTYUNDOBUFFER = 205
+EM_GETFIRSTVISIBLELINE = 206
+EM_SETREADONLY = 207
+EM_SETWORDBREAKPROC = 208
+EM_GETWORDBREAKPROC = 209
+EM_GETPASSWORDCHAR = 210
+EM_SETMARGINS = 211
+EM_GETMARGINS = 212
+EM_SETLIMITTEXT = EM_LIMITTEXT
+EM_GETLIMITTEXT = 213
+EM_POSFROMCHAR = 214
+EM_CHARFROMPOS = 215
+EM_SETIMESTATUS = 216
+EM_GETIMESTATUS = 217
+WB_LEFT = 0
+WB_RIGHT = 1
+WB_ISDELIMITER = 2
+BS_PUSHBUTTON = 0
+BS_DEFPUSHBUTTON = 1
+BS_CHECKBOX = 2
+BS_AUTOCHECKBOX = 3
+BS_RADIOBUTTON = 4
+BS_3STATE = 5
+BS_AUTO3STATE = 6
+BS_GROUPBOX = 7
+BS_USERBUTTON = 8
+BS_AUTORADIOBUTTON = 9
+BS_OWNERDRAW = 11L
+BS_LEFTTEXT = 32
+BS_TEXT = 0
+BS_ICON = 64
+BS_BITMAP = 128
+BS_LEFT = 256
+BS_RIGHT = 512
+BS_CENTER = 768
+BS_TOP = 1024
+BS_BOTTOM = 2048
+BS_VCENTER = 3072
+BS_PUSHLIKE = 4096
+BS_MULTILINE = 8192
+BS_NOTIFY = 16384
+BS_FLAT = 32768
+BS_RIGHTBUTTON = BS_LEFTTEXT
+BN_CLICKED = 0
+BN_PAINT = 1
+BN_HILITE = 2
+BN_UNHILITE = 3
+BN_DISABLE = 4
+BN_DOUBLECLICKED = 5
+BN_PUSHED = BN_HILITE
+BN_UNPUSHED = BN_UNHILITE
+BN_DBLCLK = BN_DOUBLECLICKED
+BN_SETFOCUS = 6
+BN_KILLFOCUS = 7
+BM_GETCHECK = 240
+BM_SETCHECK = 241
+BM_GETSTATE = 242
+BM_SETSTATE = 243
+BM_SETSTYLE = 244
+BM_CLICK = 245
+BM_GETIMAGE = 246
+BM_SETIMAGE = 247
+BST_UNCHECKED = 0
+BST_CHECKED = 1
+BST_INDETERMINATE = 2
+BST_PUSHED = 4
+BST_FOCUS = 8
+SS_LEFT = 0
+SS_CENTER = 1
+SS_RIGHT = 2
+SS_ICON = 3
+SS_BLACKRECT = 4
+SS_GRAYRECT = 5
+SS_WHITERECT = 6
+SS_BLACKFRAME = 7
+SS_GRAYFRAME = 8
+SS_WHITEFRAME = 9
+SS_USERITEM = 10
+SS_SIMPLE = 11
+SS_LEFTNOWORDWRAP = 12
+SS_BITMAP = 14
+SS_OWNERDRAW = 13
+SS_ENHMETAFILE = 15
+SS_ETCHEDHORZ = 16
+SS_ETCHEDVERT = 17
+SS_ETCHEDFRAME = 18
+SS_TYPEMASK = 31
+SS_NOPREFIX = 128
+SS_NOTIFY = 256
+SS_CENTERIMAGE = 512
+SS_RIGHTJUST = 1024
+SS_REALSIZEIMAGE = 2048
+SS_SUNKEN = 4096
+SS_ENDELLIPSIS = 16384
+SS_PATHELLIPSIS = 32768
+SS_WORDELLIPSIS = 49152
+SS_ELLIPSISMASK = 49152
+STM_SETICON = 368
+STM_GETICON = 369
+STM_SETIMAGE = 370
+STM_GETIMAGE = 371
+STN_CLICKED = 0
+STN_DBLCLK = 1
+STN_ENABLE = 2
+STN_DISABLE = 3
+STM_MSGMAX = 372
+DWL_MSGRESULT = 0
+DWL_DLGPROC = 4
+DWL_USER = 8
+DDL_READWRITE = 0
+DDL_READONLY = 1
+DDL_HIDDEN = 2
+DDL_SYSTEM = 4
+DDL_DIRECTORY = 16
+DDL_ARCHIVE = 32
+DDL_POSTMSGS = 8192
+DDL_DRIVES = 16384
+DDL_EXCLUSIVE = 32768
+
+#from winuser.h line 153
+RT_CURSOR = 1
+RT_BITMAP = 2
+RT_ICON = 3
+RT_MENU = 4
+RT_DIALOG = 5
+RT_STRING = 6
+RT_FONTDIR = 7
+RT_FONT = 8
+RT_ACCELERATOR = 9
+RT_RCDATA = 10
+RT_MESSAGETABLE = 11
+DIFFERENCE = 11
+RT_GROUP_CURSOR = (RT_CURSOR + DIFFERENCE)
+RT_GROUP_ICON = (RT_ICON + DIFFERENCE)
+RT_VERSION = 16
+RT_DLGINCLUDE = 17
+RT_PLUGPLAY = 19
+RT_VXD = 20
+RT_ANICURSOR = 21
+RT_ANIICON = 22
+RT_HTML = 23
+# from winuser.h line 218
+SB_HORZ = 0
+SB_VERT = 1
+SB_CTL = 2
+SB_BOTH = 3
+SB_LINEUP = 0
+SB_LINELEFT = 0
+SB_LINEDOWN = 1
+SB_LINERIGHT = 1
+SB_PAGEUP = 2
+SB_PAGELEFT = 2
+SB_PAGEDOWN = 3
+SB_PAGERIGHT = 3
+SB_THUMBPOSITION = 4
+SB_THUMBTRACK = 5
+SB_TOP = 6
+SB_LEFT = 6
+SB_BOTTOM = 7
+SB_RIGHT = 7
+SB_ENDSCROLL = 8
+SW_HIDE = 0
+SW_SHOWNORMAL = 1
+SW_NORMAL = 1
+SW_SHOWMINIMIZED = 2
+SW_SHOWMAXIMIZED = 3
+SW_MAXIMIZE = 3
+SW_SHOWNOACTIVATE = 4
+SW_SHOW = 5
+SW_MINIMIZE = 6
+SW_SHOWMINNOACTIVE = 7
+SW_SHOWNA = 8
+SW_RESTORE = 9
+SW_SHOWDEFAULT = 10
+SW_FORCEMINIMIZE = 11
+SW_MAX = 11
+HIDE_WINDOW = 0
+SHOW_OPENWINDOW = 1
+SHOW_ICONWINDOW = 2
+SHOW_FULLSCREEN = 3
+SHOW_OPENNOACTIVATE = 4
+SW_PARENTCLOSING = 1
+SW_OTHERZOOM = 2
+SW_PARENTOPENING = 3
+SW_OTHERUNZOOM = 4
+AW_HOR_POSITIVE = 1
+AW_HOR_NEGATIVE = 2
+AW_VER_POSITIVE = 4
+AW_VER_NEGATIVE = 8
+AW_CENTER = 16
+AW_HIDE = 65536
+AW_ACTIVATE = 131072
+AW_SLIDE = 262144
+AW_BLEND = 524288
+KF_EXTENDED = 256
+KF_DLGMODE = 2048
+KF_MENUMODE = 4096
+KF_ALTDOWN = 8192
+KF_REPEAT = 16384
+KF_UP = 32768
+VK_LBUTTON = 1
+VK_RBUTTON = 2
+VK_CANCEL = 3
+VK_MBUTTON = 4
+VK_BACK = 8
+VK_TAB = 9
+VK_CLEAR = 12
+VK_RETURN = 13
+VK_SHIFT = 16
+VK_CONTROL = 17
+VK_MENU = 18
+VK_PAUSE = 19
+VK_CAPITAL = 20
+VK_KANA = 21
+VK_HANGEUL = 21  # old name - should be here for compatibility
+VK_HANGUL = 21
+VK_JUNJA = 23
+VK_FINAL = 24
+VK_HANJA = 25
+VK_KANJI = 25
+VK_ESCAPE = 27
+VK_CONVERT = 28
+VK_NONCONVERT = 29
+VK_ACCEPT = 30
+VK_MODECHANGE = 31
+VK_SPACE = 32
+VK_PRIOR = 33
+VK_NEXT = 34
+VK_END = 35
+VK_HOME = 36
+VK_LEFT = 37
+VK_UP = 38
+VK_RIGHT = 39
+VK_DOWN = 40
+VK_SELECT = 41
+VK_PRINT = 42
+VK_EXECUTE = 43
+VK_SNAPSHOT = 44
+VK_INSERT = 45
+VK_DELETE = 46
+VK_HELP = 47
+VK_LWIN = 91
+VK_RWIN = 92
+VK_APPS = 93
+VK_NUMPAD0 = 96
+VK_NUMPAD1 = 97
+VK_NUMPAD2 = 98
+VK_NUMPAD3 = 99
+VK_NUMPAD4 = 100
+VK_NUMPAD5 = 101
+VK_NUMPAD6 = 102
+VK_NUMPAD7 = 103
+VK_NUMPAD8 = 104
+VK_NUMPAD9 = 105
+VK_MULTIPLY = 106
+VK_ADD = 107
+VK_SEPARATOR = 108
+VK_SUBTRACT = 109
+VK_DECIMAL = 110
+VK_DIVIDE = 111
+VK_F1 = 112
+VK_F2 = 113
+VK_F3 = 114
+VK_F4 = 115
+VK_F5 = 116
+VK_F6 = 117
+VK_F7 = 118
+VK_F8 = 119
+VK_F9 = 120
+VK_F10 = 121
+VK_F11 = 122
+VK_F12 = 123
+VK_F13 = 124
+VK_F14 = 125
+VK_F15 = 126
+VK_F16 = 127
+VK_F17 = 128
+VK_F18 = 129
+VK_F19 = 130
+VK_F20 = 131
+VK_F21 = 132
+VK_F22 = 133
+VK_F23 = 134
+VK_F24 = 135
+VK_NUMLOCK = 144
+VK_SCROLL = 145
+VK_LSHIFT = 160
+VK_RSHIFT = 161
+VK_LCONTROL = 162
+VK_RCONTROL = 163
+VK_LMENU = 164
+VK_RMENU = 165
+VK_PROCESSKEY = 229
+VK_ATTN = 246
+VK_CRSEL = 247
+VK_EXSEL = 248
+VK_EREOF = 249
+VK_PLAY = 250
+VK_ZOOM = 251
+VK_NONAME = 252
+VK_PA1 = 253
+VK_OEM_CLEAR = 254
+# multi-media related "keys"
+MOUSEEVENTF_XDOWN = 0x0080
+MOUSEEVENTF_XUP = 0x0100
+MOUSEEVENTF_WHEEL = 0x0800
+VK_XBUTTON1 = 0x05
+VK_XBUTTON2 = 0x06
+VK_VOLUME_MUTE = 0xAD
+VK_VOLUME_DOWN = 0xAE
+VK_VOLUME_UP = 0xAF
+VK_MEDIA_NEXT_TRACK = 0xB0
+VK_MEDIA_PREV_TRACK = 0xB1
+VK_MEDIA_PLAY_PAUSE = 0xB3
+VK_BROWSER_BACK = 0xA6
+VK_BROWSER_FORWARD = 0xA7
+WH_MIN = (-1)
+WH_MSGFILTER = (-1)
+WH_JOURNALRECORD = 0
+WH_JOURNALPLAYBACK = 1
+WH_KEYBOARD = 2
+WH_GETMESSAGE = 3
+WH_CALLWNDPROC = 4
+WH_CBT = 5
+WH_SYSMSGFILTER = 6
+WH_MOUSE = 7
+WH_HARDWARE = 8
+WH_DEBUG = 9
+WH_SHELL = 10
+WH_FOREGROUNDIDLE = 11
+WH_CALLWNDPROCRET = 12
+WH_KEYBOARD_LL = 13
+WH_MOUSE_LL = 14
+WH_MAX = 14
+WH_MINHOOK = WH_MIN
+WH_MAXHOOK = WH_MAX
+HC_ACTION = 0
+HC_GETNEXT = 1
+HC_SKIP = 2
+HC_NOREMOVE = 3
+HC_NOREM = HC_NOREMOVE
+HC_SYSMODALON = 4
+HC_SYSMODALOFF = 5
+HCBT_MOVESIZE = 0
+HCBT_MINMAX = 1
+HCBT_QS = 2
+HCBT_CREATEWND = 3
+HCBT_DESTROYWND = 4
+HCBT_ACTIVATE = 5
+HCBT_CLICKSKIPPED = 6
+HCBT_KEYSKIPPED = 7
+HCBT_SYSCOMMAND = 8
+HCBT_SETFOCUS = 9
+MSGF_DIALOGBOX = 0
+MSGF_MESSAGEBOX = 1
+MSGF_MENU = 2
+#MSGF_MOVE = 3
+#MSGF_SIZE = 4
+MSGF_SCROLLBAR = 5
+MSGF_NEXTWINDOW = 6
+#MSGF_MAINLOOP = 8
+MSGF_MAX = 8
+MSGF_USER = 4096
+HSHELL_WINDOWCREATED = 1
+HSHELL_WINDOWDESTROYED = 2
+HSHELL_ACTIVATESHELLWINDOW = 3
+HSHELL_WINDOWACTIVATED = 4
+HSHELL_GETMINRECT = 5
+HSHELL_REDRAW = 6
+HSHELL_TASKMAN = 7
+HSHELL_LANGUAGE = 8
+HSHELL_ACCESSIBILITYSTATE = 11
+ACCESS_STICKYKEYS = 1
+ACCESS_FILTERKEYS = 2
+ACCESS_MOUSEKEYS = 3
+# winuser.h line 624
+LLKHF_EXTENDED = 1
+LLKHF_INJECTED = 16
+LLKHF_ALTDOWN = 32
+LLKHF_UP = 128
+LLMHF_INJECTED = 1
+# line 692
+HKL_PREV = 0
+HKL_NEXT = 1
+KLF_ACTIVATE = 1
+KLF_SUBSTITUTE_OK = 2
+KLF_UNLOADPREVIOUS = 4
+KLF_REORDER = 8
+KLF_REPLACELANG = 16
+KLF_NOTELLSHELL = 128
+KLF_SETFORPROCESS = 256
+KL_NAMELENGTH = 9
+DESKTOP_READOBJECTS = 1
+DESKTOP_CREATEWINDOW = 2
+DESKTOP_CREATEMENU = 4
+DESKTOP_HOOKCONTROL = 8
+DESKTOP_JOURNALRECORD = 16
+DESKTOP_JOURNALPLAYBACK = 32
+DESKTOP_ENUMERATE = 64
+DESKTOP_WRITEOBJECTS = 128
+DESKTOP_SWITCHDESKTOP = 256
+DF_ALLOWOTHERACCOUNTHOOK = 1
+WINSTA_ENUMDESKTOPS = 1
+WINSTA_READATTRIBUTES = 2
+WINSTA_ACCESSCLIPBOARD = 4
+WINSTA_CREATEDESKTOP = 8
+WINSTA_WRITEATTRIBUTES = 16
+WINSTA_ACCESSGLOBALATOMS = 32
+WINSTA_EXITWINDOWS = 64
+WINSTA_ENUMERATE = 256
+WINSTA_READSCREEN = 512
+WSF_VISIBLE = 1
+UOI_FLAGS = 1
+UOI_NAME = 2
+UOI_TYPE = 3
+UOI_USER_SID = 4
+GWL_WNDPROC = (-4)
+GWL_HINSTANCE = (-6)
+GWL_HWNDPARENT = (-8)
+GWL_STYLE = (-16)
+GWL_EXSTYLE = (-20)
+GWL_USERDATA = (-21)
+GWL_ID = (-12)
+GCL_MENUNAME = (-8)
+GCL_HBRBACKGROUND = (-10)
+GCL_HCURSOR = (-12)
+GCL_HICON = (-14)
+GCL_HMODULE = (-16)
+GCL_CBWNDEXTRA = (-18)
+GCL_CBCLSEXTRA = (-20)
+GCL_WNDPROC = (-24)
+GCL_STYLE = (-26)
+GCW_ATOM = (-32)
+GCL_HICONSM = (-34)
+# line 1291
+WM_NULL = 0
+WM_CREATE = 1
+WM_DESTROY = 2
+WM_MOVE = 3
+WM_SIZE = 5
+WM_ACTIVATE = 6
+WA_INACTIVE = 0
+WA_ACTIVE = 1
+WA_CLICKACTIVE = 2
+WM_SETFOCUS = 7
+WM_KILLFOCUS = 8
+WM_ENABLE = 10
+WM_SETREDRAW = 11
+WM_SETTEXT = 12
+WM_GETTEXT = 13
+WM_GETTEXTLENGTH = 14
+WM_PAINT = 15
+WM_CLOSE = 16
+WM_QUERYENDSESSION = 17
+WM_QUIT = 18
+WM_QUERYOPEN = 19
+WM_ERASEBKGND = 20
+WM_SYSCOLORCHANGE = 21
+WM_ENDSESSION = 22
+WM_SHOWWINDOW = 24
+WM_WININICHANGE = 26
+WM_SETTINGCHANGE = WM_WININICHANGE
+WM_DEVMODECHANGE = 27
+WM_ACTIVATEAPP = 28
+WM_FONTCHANGE = 29
+WM_TIMECHANGE = 30
+WM_CANCELMODE = 31
+WM_SETCURSOR = 32
+WM_MOUSEACTIVATE = 33
+WM_CHILDACTIVATE = 34
+WM_QUEUESYNC = 35
+WM_GETMINMAXINFO = 36
+WM_PAINTICON = 38
+WM_ICONERASEBKGND = 39
+WM_NEXTDLGCTL = 40
+WM_SPOOLERSTATUS = 42
+WM_DRAWITEM = 43
+WM_MEASUREITEM = 44
+WM_DELETEITEM = 45
+WM_VKEYTOITEM = 46
+WM_CHARTOITEM = 47
+WM_SETFONT = 48
+WM_GETFONT = 49
+WM_SETHOTKEY = 50
+WM_GETHOTKEY = 51
+WM_QUERYDRAGICON = 55
+WM_COMPAREITEM = 57
+WM_GETOBJECT = 61
+WM_COMPACTING = 65
+WM_COMMNOTIFY = 68
+WM_WINDOWPOSCHANGING = 70
+WM_WINDOWPOSCHANGED = 71
+WM_POWER = 72
+PWR_OK = 1
+PWR_FAIL = (-1)
+PWR_SUSPENDREQUEST = 1
+PWR_SUSPENDRESUME = 2
+PWR_CRITICALRESUME = 3
+WM_COPYDATA = 74
+WM_CANCELJOURNAL = 75
+WM_NOTIFY = 78
+WM_INPUTLANGCHANGEREQUEST = 80
+WM_INPUTLANGCHANGE = 81
+WM_TCARD = 82
+WM_HELP = 83
+WM_USERCHANGED = 84
+WM_NOTIFYFORMAT = 85
+NFR_ANSI = 1
+NFR_UNICODE = 2
+NF_QUERY = 3
+NF_REQUERY = 4
+WM_CONTEXTMENU = 123
+WM_STYLECHANGING = 124
+WM_STYLECHANGED = 125
+WM_DISPLAYCHANGE = 126
+WM_GETICON = 127
+WM_SETICON = 128
+WM_NCCREATE = 129
+WM_NCDESTROY = 130
+WM_NCCALCSIZE = 131
+WM_NCHITTEST = 132
+WM_NCPAINT = 133
+WM_NCACTIVATE = 134
+WM_GETDLGCODE = 135
+WM_SYNCPAINT = 136
+WM_NCMOUSEMOVE = 160
+WM_NCLBUTTONDOWN = 161
+WM_NCLBUTTONUP = 162
+WM_NCLBUTTONDBLCLK = 163
+WM_NCRBUTTONDOWN = 164
+WM_NCRBUTTONUP = 165
+WM_NCRBUTTONDBLCLK = 166
+WM_NCMBUTTONDOWN = 167
+WM_NCMBUTTONUP = 168
+WM_NCMBUTTONDBLCLK = 169
+WM_KEYFIRST = 256
+WM_KEYDOWN = 256
+WM_KEYUP = 257
+WM_CHAR = 258
+WM_DEADCHAR = 259
+WM_SYSKEYDOWN = 260
+WM_SYSKEYUP = 261
+WM_SYSCHAR = 262
+WM_SYSDEADCHAR = 263
+WM_KEYLAST = 264
+WM_IME_STARTCOMPOSITION = 269
+WM_IME_ENDCOMPOSITION = 270
+WM_IME_COMPOSITION = 271
+WM_IME_KEYLAST = 271
+WM_INITDIALOG = 272
+WM_COMMAND = 273
+WM_SYSCOMMAND = 274
+WM_TIMER = 275
+WM_HSCROLL = 276
+WM_VSCROLL = 277
+WM_INITMENU = 278
+WM_INITMENUPOPUP = 279
+WM_MENUSELECT = 287
+WM_MENUCHAR = 288
+WM_ENTERIDLE = 289
+WM_MENURBUTTONUP = 290
+WM_MENUDRAG = 291
+WM_MENUGETOBJECT = 292
+WM_UNINITMENUPOPUP = 293
+WM_MENUCOMMAND = 294
+WM_CTLCOLORMSGBOX = 306
+WM_CTLCOLOREDIT = 307
+WM_CTLCOLORLISTBOX = 308
+WM_CTLCOLORBTN = 309
+WM_CTLCOLORDLG = 310
+WM_CTLCOLORSCROLLBAR = 311
+WM_CTLCOLORSTATIC = 312
+WM_MOUSEFIRST = 512
+WM_MOUSEMOVE = 512
+WM_LBUTTONDOWN = 513
+WM_LBUTTONUP = 514
+WM_LBUTTONDBLCLK = 515
+WM_RBUTTONDOWN = 516
+WM_RBUTTONUP = 517
+WM_RBUTTONDBLCLK = 518
+WM_MBUTTONDOWN = 519
+WM_MBUTTONUP = 520
+WM_MBUTTONDBLCLK = 521
+WM_MOUSEWHEEL = 522
+WM_MOUSELAST = 522
+WHEEL_DELTA = 120     # Value for rolling one detent
+WHEEL_PAGESCROLL = -1 # Scroll one page
+WM_PARENTNOTIFY = 528
+MENULOOP_WINDOW = 0
+MENULOOP_POPUP = 1
+WM_ENTERMENULOOP = 529
+WM_EXITMENULOOP = 530
+WM_NEXTMENU = 531
+WM_SIZING = 532
+WM_CAPTURECHANGED = 533
+WM_MOVING = 534
+WM_POWERBROADCAST = 536
+PBT_APMQUERYSUSPEND = 0
+PBT_APMQUERYSTANDBY = 1
+PBT_APMQUERYSUSPENDFAILED = 2
+PBT_APMQUERYSTANDBYFAILED = 3
+PBT_APMSUSPEND = 4
+PBT_APMSTANDBY = 5
+PBT_APMRESUMECRITICAL = 6
+PBT_APMRESUMESUSPEND = 7
+PBT_APMRESUMESTANDBY = 8
+PBTF_APMRESUMEFROMFAILURE = 1
+PBT_APMBATTERYLOW = 9
+PBT_APMPOWERSTATUSCHANGE = 10
+PBT_APMOEMEVENT = 11
+PBT_APMRESUMEAUTOMATIC = 18
+WM_DEVICECHANGE = 537
+WM_MDICREATE = 544
+WM_MDIDESTROY = 545
+WM_MDIACTIVATE = 546
+WM_MDIRESTORE = 547
+WM_MDINEXT = 548
+WM_MDIMAXIMIZE = 549
+WM_MDITILE = 550
+WM_MDICASCADE = 551
+WM_MDIICONARRANGE = 552
+WM_MDIGETACTIVE = 553
+WM_MDISETMENU = 560
+WM_ENTERSIZEMOVE = 561
+WM_EXITSIZEMOVE = 562
+WM_DROPFILES = 563
+WM_MDIREFRESHMENU = 564
+WM_IME_SETCONTEXT = 641
+WM_IME_NOTIFY = 642
+WM_IME_CONTROL = 643
+WM_IME_COMPOSITIONFULL = 644
+WM_IME_SELECT = 645
+WM_IME_CHAR = 646
+WM_IME_REQUEST = 648
+WM_IME_KEYDOWN = 656
+WM_IME_KEYUP = 657
+WM_MOUSEHOVER = 673
+WM_MOUSELEAVE = 675
+WM_CUT = 768
+WM_COPY = 769
+WM_PASTE = 770
+WM_CLEAR = 771
+WM_UNDO = 772
+WM_RENDERFORMAT = 773
+WM_RENDERALLFORMATS = 774
+WM_DESTROYCLIPBOARD = 775
+WM_DRAWCLIPBOARD = 776
+WM_PAINTCLIPBOARD = 777
+WM_VSCROLLCLIPBOARD = 778
+WM_SIZECLIPBOARD = 779
+WM_ASKCBFORMATNAME = 780
+WM_CHANGECBCHAIN = 781
+WM_HSCROLLCLIPBOARD = 782
+WM_QUERYNEWPALETTE = 783
+WM_PALETTEISCHANGING = 784
+WM_PALETTECHANGED = 785
+WM_HOTKEY = 786
+WM_PRINT = 791
+WM_PRINTCLIENT = 792
+WM_HANDHELDFIRST = 856
+WM_HANDHELDLAST = 863
+WM_AFXFIRST = 864
+WM_AFXLAST = 895
+WM_PENWINFIRST = 896
+WM_PENWINLAST = 911
+WM_APP = 32768
+WMSZ_LEFT = 1
+WMSZ_RIGHT = 2
+WMSZ_TOP = 3
+WMSZ_TOPLEFT = 4
+WMSZ_TOPRIGHT = 5
+WMSZ_BOTTOM = 6
+WMSZ_BOTTOMLEFT = 7
+WMSZ_BOTTOMRIGHT = 8
+#ST_BEGINSWP = 0
+#ST_ENDSWP = 1
+HTERROR = (-2)
+HTTRANSPARENT = (-1)
+HTNOWHERE = 0
+HTCLIENT = 1
+HTCAPTION = 2
+HTSYSMENU = 3
+HTGROWBOX = 4
+HTSIZE = HTGROWBOX
+HTMENU = 5
+HTHSCROLL = 6
+HTVSCROLL = 7
+HTMINBUTTON = 8
+HTMAXBUTTON = 9
+HTLEFT = 10
+HTRIGHT = 11
+HTTOP = 12
+HTTOPLEFT = 13
+HTTOPRIGHT = 14
+HTBOTTOM = 15
+HTBOTTOMLEFT = 16
+HTBOTTOMRIGHT = 17
+HTBORDER = 18
+HTREDUCE = HTMINBUTTON
+HTZOOM = HTMAXBUTTON
+HTSIZEFIRST = HTLEFT
+HTSIZELAST = HTBOTTOMRIGHT
+HTOBJECT = 19
+HTCLOSE = 20
+HTHELP = 21
+SMTO_NORMAL = 0
+SMTO_BLOCK = 1
+SMTO_ABORTIFHUNG = 2
+SMTO_NOTIMEOUTIFNOTHUNG = 8
+MA_ACTIVATE = 1
+MA_ACTIVATEANDEAT = 2
+MA_NOACTIVATE = 3
+MA_NOACTIVATEANDEAT = 4
+ICON_SMALL = 0
+ICON_BIG = 1
+SIZE_RESTORED = 0
+SIZE_MINIMIZED = 1
+SIZE_MAXIMIZED = 2
+SIZE_MAXSHOW = 3
+SIZE_MAXHIDE = 4
+SIZENORMAL = SIZE_RESTORED
+SIZEICONIC = SIZE_MINIMIZED
+SIZEFULLSCREEN = SIZE_MAXIMIZED
+SIZEZOOMSHOW = SIZE_MAXSHOW
+SIZEZOOMHIDE = SIZE_MAXHIDE
+WVR_ALIGNTOP = 16
+WVR_ALIGNLEFT = 32
+WVR_ALIGNBOTTOM = 64
+WVR_ALIGNRIGHT = 128
+WVR_HREDRAW = 256
+WVR_VREDRAW = 512
+WVR_REDRAW = (WVR_HREDRAW | WVR_VREDRAW)
+WVR_VALIDRECTS = 1024
+MK_LBUTTON = 1
+MK_RBUTTON = 2
+MK_SHIFT = 4
+MK_CONTROL = 8
+MK_MBUTTON = 16
+TME_HOVER = 1
+TME_LEAVE = 2
+TME_QUERY = 1073741824
+TME_CANCEL = -2147483648
+HOVER_DEFAULT = -1
+WS_OVERLAPPED = 0
+WS_POPUP = -2147483648
+WS_CHILD = 1073741824
+WS_MINIMIZE = 536870912
+WS_VISIBLE = 268435456
+WS_DISABLED = 134217728
+WS_CLIPSIBLINGS = 67108864
+WS_CLIPCHILDREN = 33554432
+WS_MAXIMIZE = 16777216
+WS_CAPTION = 12582912
+WS_BORDER = 8388608
+WS_DLGFRAME = 4194304
+WS_VSCROLL = 2097152
+WS_HSCROLL = 1048576
+WS_SYSMENU = 524288
+WS_THICKFRAME = 262144
+WS_GROUP = 131072
+WS_TABSTOP = 65536
+WS_MINIMIZEBOX = 131072
+WS_MAXIMIZEBOX = 65536
+WS_TILED = WS_OVERLAPPED
+WS_ICONIC = WS_MINIMIZE
+WS_SIZEBOX = WS_THICKFRAME
+WS_OVERLAPPEDWINDOW = (WS_OVERLAPPED     | \
+                             WS_CAPTION        | \
+                             WS_SYSMENU        | \
+                             WS_THICKFRAME     | \
+                             WS_MINIMIZEBOX    | \
+                             WS_MAXIMIZEBOX)
+WS_POPUPWINDOW = (WS_POPUP          | \
+                             WS_BORDER         | \
+                             WS_SYSMENU)
+WS_CHILDWINDOW = (WS_CHILD)
+WS_TILEDWINDOW = WS_OVERLAPPEDWINDOW
+WS_EX_DLGMODALFRAME = 1
+WS_EX_NOPARENTNOTIFY = 4
+WS_EX_TOPMOST = 8
+WS_EX_ACCEPTFILES = 16
+WS_EX_TRANSPARENT = 32
+WS_EX_MDICHILD = 64
+WS_EX_TOOLWINDOW = 128
+WS_EX_WINDOWEDGE = 256
+WS_EX_CLIENTEDGE = 512
+WS_EX_CONTEXTHELP = 1024
+WS_EX_RIGHT = 4096
+WS_EX_LEFT = 0
+WS_EX_RTLREADING = 8192
+WS_EX_LTRREADING = 0
+WS_EX_LEFTSCROLLBAR = 16384
+WS_EX_RIGHTSCROLLBAR = 0
+WS_EX_CONTROLPARENT = 65536
+WS_EX_STATICEDGE = 131072
+WS_EX_APPWINDOW = 262144
+WS_EX_OVERLAPPEDWINDOW = (WS_EX_WINDOWEDGE | WS_EX_CLIENTEDGE)
+WS_EX_PALETTEWINDOW = (WS_EX_WINDOWEDGE | WS_EX_TOOLWINDOW | WS_EX_TOPMOST)
+WS_EX_LAYERED = 0x00080000
+WS_EX_NOINHERITLAYOUT = 0x00100000
+WS_EX_LAYOUTRTL = 0x00400000
+WS_EX_COMPOSITED = 0x02000000
+WS_EX_NOACTIVATE = 0x08000000
+
+CS_VREDRAW = 1
+CS_HREDRAW = 2
+#CS_KEYCVTWINDOW = 0x0004
+CS_DBLCLKS = 8
+CS_OWNDC = 32
+CS_CLASSDC = 64
+CS_PARENTDC = 128
+#CS_NOKEYCVT = 0x0100
+CS_NOCLOSE = 512
+CS_SAVEBITS = 2048
+CS_BYTEALIGNCLIENT = 4096
+CS_BYTEALIGNWINDOW = 8192
+CS_GLOBALCLASS = 16384
+CS_IME = 65536
+PRF_CHECKVISIBLE = 1
+PRF_NONCLIENT = 2
+PRF_CLIENT = 4
+PRF_ERASEBKGND = 8
+PRF_CHILDREN = 16
+PRF_OWNED = 32
+BDR_RAISEDOUTER = 1
+BDR_SUNKENOUTER = 2
+BDR_RAISEDINNER = 4
+BDR_SUNKENINNER = 8
+BDR_OUTER = 3
+BDR_INNER = 12
+#BDR_RAISED = 0x0005
+#BDR_SUNKEN = 0x000a
+EDGE_RAISED = (BDR_RAISEDOUTER | BDR_RAISEDINNER)
+EDGE_SUNKEN = (BDR_SUNKENOUTER | BDR_SUNKENINNER)
+EDGE_ETCHED = (BDR_SUNKENOUTER | BDR_RAISEDINNER)
+EDGE_BUMP = (BDR_RAISEDOUTER | BDR_SUNKENINNER)
+
+# winuser.h line 2879
+ISMEX_NOSEND = 0
+ISMEX_SEND = 1
+ISMEX_NOTIFY = 2
+ISMEX_CALLBACK = 4
+ISMEX_REPLIED = 8
+CW_USEDEFAULT = -2147483648
+FLASHW_STOP = 0
+FLASHW_CAPTION = 1
+FLASHW_TRAY = 2
+FLASHW_ALL = (FLASHW_CAPTION | FLASHW_TRAY)
+FLASHW_TIMER = 4
+FLASHW_TIMERNOFG = 12
+
+# winuser.h line 7963
+DS_ABSALIGN = 1
+DS_SYSMODAL = 2
+DS_LOCALEDIT = 32
+DS_SETFONT = 64
+DS_MODALFRAME = 128
+DS_NOIDLEMSG = 256
+DS_SETFOREGROUND = 512
+DS_3DLOOK = 4
+DS_FIXEDSYS = 8
+DS_NOFAILCREATE = 16
+DS_CONTROL = 1024
+DS_CENTER = 2048
+DS_CENTERMOUSE = 4096
+DS_CONTEXTHELP = 8192
+DM_GETDEFID = (WM_USER+0)
+DM_SETDEFID = (WM_USER+1)
+DM_REPOSITION = (WM_USER+2)
+#PSM_PAGEINFO = (WM_USER+100)
+#PSM_SHEETINFO = (WM_USER+101)
+#PSI_SETACTIVE = 0x0001
+#PSI_KILLACTIVE = 0x0002
+#PSI_APPLY = 0x0003
+#PSI_RESET = 0x0004
+#PSI_HASHELP = 0x0005
+#PSI_HELP = 0x0006
+#PSI_CHANGED = 0x0001
+#PSI_GUISTART = 0x0002
+#PSI_REBOOT = 0x0003
+#PSI_GETSIBLINGS = 0x0004
+DC_HASDEFID = 21323
+DLGC_WANTARROWS = 1
+DLGC_WANTTAB = 2
+DLGC_WANTALLKEYS = 4
+DLGC_WANTMESSAGE = 4
+DLGC_HASSETSEL = 8
+DLGC_DEFPUSHBUTTON = 16
+DLGC_UNDEFPUSHBUTTON = 32
+DLGC_RADIOBUTTON = 64
+DLGC_WANTCHARS = 128
+DLGC_STATIC = 256
+DLGC_BUTTON = 8192
+LB_CTLCODE = 0
+LB_OKAY = 0
+LB_ERR = (-1)
+LB_ERRSPACE = (-2)
+LBN_ERRSPACE = (-2)
+LBN_SELCHANGE = 1
+LBN_DBLCLK = 2
+LBN_SELCANCEL = 3
+LBN_SETFOCUS = 4
+LBN_KILLFOCUS = 5
+LB_ADDSTRING = 384
+LB_INSERTSTRING = 385
+LB_DELETESTRING = 386
+LB_SELITEMRANGEEX = 387
+LB_RESETCONTENT = 388
+LB_SETSEL = 389
+LB_SETCURSEL = 390
+LB_GETSEL = 391
+LB_GETCURSEL = 392
+LB_GETTEXT = 393
+LB_GETTEXTLEN = 394
+LB_GETCOUNT = 395
+LB_SELECTSTRING = 396
+LB_DIR = 397
+LB_GETTOPINDEX = 398
+LB_FINDSTRING = 399
+LB_GETSELCOUNT = 400
+LB_GETSELITEMS = 401
+LB_SETTABSTOPS = 402
+LB_GETHORIZONTALEXTENT = 403
+LB_SETHORIZONTALEXTENT = 404
+LB_SETCOLUMNWIDTH = 405
+LB_ADDFILE = 406
+LB_SETTOPINDEX = 407
+LB_GETITEMRECT = 408
+LB_GETITEMDATA = 409
+LB_SETITEMDATA = 410
+LB_SELITEMRANGE = 411
+LB_SETANCHORINDEX = 412
+LB_GETANCHORINDEX = 413
+LB_SETCARETINDEX = 414
+LB_GETCARETINDEX = 415
+LB_SETITEMHEIGHT = 416
+LB_GETITEMHEIGHT = 417
+LB_FINDSTRINGEXACT = 418
+LB_SETLOCALE = 421
+LB_GETLOCALE = 422
+LB_SETCOUNT = 423
+LB_INITSTORAGE = 424
+LB_ITEMFROMPOINT = 425
+LB_MSGMAX = 432
+LBS_NOTIFY = 1
+LBS_SORT = 2
+LBS_NOREDRAW = 4
+LBS_MULTIPLESEL = 8
+LBS_OWNERDRAWFIXED = 16
+LBS_OWNERDRAWVARIABLE = 32
+LBS_HASSTRINGS = 64
+LBS_USETABSTOPS = 128
+LBS_NOINTEGRALHEIGHT = 256
+LBS_MULTICOLUMN = 512
+LBS_WANTKEYBOARDINPUT = 1024
+LBS_EXTENDEDSEL = 2048
+LBS_DISABLENOSCROLL = 4096
+LBS_NODATA = 8192
+LBS_NOSEL = 16384
+LBS_STANDARD = (LBS_NOTIFY | LBS_SORT | WS_VSCROLL | WS_BORDER)
+CB_OKAY = 0
+CB_ERR = (-1)
+CB_ERRSPACE = (-2)
+CBN_ERRSPACE = (-1)
+CBN_SELCHANGE = 1
+CBN_DBLCLK = 2
+CBN_SETFOCUS = 3
+CBN_KILLFOCUS = 4
+CBN_EDITCHANGE = 5
+CBN_EDITUPDATE = 6
+CBN_DROPDOWN = 7
+CBN_CLOSEUP = 8
+CBN_SELENDOK = 9
+CBN_SELENDCANCEL = 10
+CBS_SIMPLE = 1
+CBS_DROPDOWN = 2
+CBS_DROPDOWNLIST = 3
+CBS_OWNERDRAWFIXED = 16
+CBS_OWNERDRAWVARIABLE = 32
+CBS_AUTOHSCROLL = 64
+CBS_OEMCONVERT = 128
+CBS_SORT = 256
+CBS_HASSTRINGS = 512
+CBS_NOINTEGRALHEIGHT = 1024
+CBS_DISABLENOSCROLL = 2048
+CBS_UPPERCASE = 8192
+CBS_LOWERCASE = 16384
+CB_GETEDITSEL = 320
+CB_LIMITTEXT = 321
+CB_SETEDITSEL = 322
+CB_ADDSTRING = 323
+CB_DELETESTRING = 324
+CB_DIR = 325
+CB_GETCOUNT = 326
+CB_GETCURSEL = 327
+CB_GETLBTEXT = 328
+CB_GETLBTEXTLEN = 329
+CB_INSERTSTRING = 330
+CB_RESETCONTENT = 331
+CB_FINDSTRING = 332
+CB_SELECTSTRING = 333
+CB_SETCURSEL = 334
+CB_SHOWDROPDOWN = 335
+CB_GETITEMDATA = 336
+CB_SETITEMDATA = 337
+CB_GETDROPPEDCONTROLRECT = 338
+CB_SETITEMHEIGHT = 339
+CB_GETITEMHEIGHT = 340
+CB_SETEXTENDEDUI = 341
+CB_GETEXTENDEDUI = 342
+CB_GETDROPPEDSTATE = 343
+CB_FINDSTRINGEXACT = 344
+CB_SETLOCALE = 345
+CB_GETLOCALE = 346
+CB_GETTOPINDEX = 347
+CB_SETTOPINDEX = 348
+CB_GETHORIZONTALEXTENT = 349
+CB_SETHORIZONTALEXTENT = 350
+CB_GETDROPPEDWIDTH = 351
+CB_SETDROPPEDWIDTH = 352
+CB_INITSTORAGE = 353
+CB_MSGMAX = 354
+SBS_HORZ = 0
+SBS_VERT = 1
+SBS_TOPALIGN = 2
+SBS_LEFTALIGN = 2
+SBS_BOTTOMALIGN = 4
+SBS_RIGHTALIGN = 4
+SBS_SIZEBOXTOPLEFTALIGN = 2
+SBS_SIZEBOXBOTTOMRIGHTALIGN = 4
+SBS_SIZEBOX = 8
+SBS_SIZEGRIP = 16
+SBM_SETPOS = 224
+SBM_GETPOS = 225
+SBM_SETRANGE = 226
+SBM_SETRANGEREDRAW = 230
+SBM_GETRANGE = 227
+SBM_ENABLE_ARROWS = 228
+SBM_SETSCROLLINFO = 233
+SBM_GETSCROLLINFO = 234
+SIF_RANGE = 1
+SIF_PAGE = 2
+SIF_POS = 4
+SIF_DISABLENOSCROLL = 8
+SIF_TRACKPOS = 16
+SIF_ALL = (SIF_RANGE | SIF_PAGE | SIF_POS | SIF_TRACKPOS)
+MDIS_ALLCHILDSTYLES = 1
+MDITILE_VERTICAL = 0
+MDITILE_HORIZONTAL = 1
+MDITILE_SKIPDISABLED = 2
+
+IMC_GETCANDIDATEPOS = 7
+IMC_SETCANDIDATEPOS = 8
+IMC_GETCOMPOSITIONFONT = 9
+IMC_SETCOMPOSITIONFONT = 10
+IMC_GETCOMPOSITIONWINDOW = 11
+IMC_SETCOMPOSITIONWINDOW = 12
+IMC_GETSTATUSWINDOWPOS = 15
+IMC_SETSTATUSWINDOWPOS = 16
+IMC_CLOSESTATUSWINDOW = 33
+IMC_OPENSTATUSWINDOW = 34
+# Generated by h2py from \msvc20\include\winnt.h
+# hacked and split by mhammond.
+DELETE = (65536)
+READ_CONTROL = (131072)
+WRITE_DAC = (262144)
+WRITE_OWNER = (524288)
+SYNCHRONIZE = (1048576)
+STANDARD_RIGHTS_REQUIRED = (983040)
+STANDARD_RIGHTS_READ = (READ_CONTROL)
+STANDARD_RIGHTS_WRITE = (READ_CONTROL)
+STANDARD_RIGHTS_EXECUTE = (READ_CONTROL)
+STANDARD_RIGHTS_ALL = (2031616)
+SPECIFIC_RIGHTS_ALL = (65535)
+ACCESS_SYSTEM_SECURITY = (16777216)
+MAXIMUM_ALLOWED = (33554432)
+GENERIC_READ = (-2147483648)
+GENERIC_WRITE = (1073741824)
+GENERIC_EXECUTE = (536870912)
+GENERIC_ALL = (268435456)
+
+SERVICE_KERNEL_DRIVER = 1
+SERVICE_FILE_SYSTEM_DRIVER = 2
+SERVICE_ADAPTER = 4
+SERVICE_RECOGNIZER_DRIVER = 8
+SERVICE_DRIVER = (SERVICE_KERNEL_DRIVER | \
+                                        SERVICE_FILE_SYSTEM_DRIVER | \
+                                        SERVICE_RECOGNIZER_DRIVER)
+SERVICE_WIN32_OWN_PROCESS = 16
+SERVICE_WIN32_SHARE_PROCESS = 32
+SERVICE_WIN32 = (SERVICE_WIN32_OWN_PROCESS | \
+                                        SERVICE_WIN32_SHARE_PROCESS)
+SERVICE_INTERACTIVE_PROCESS = 256
+SERVICE_TYPE_ALL = (SERVICE_WIN32  | \
+                                        SERVICE_ADAPTER | \
+                                        SERVICE_DRIVER  | \
+                                        SERVICE_INTERACTIVE_PROCESS)
+SERVICE_BOOT_START = 0
+SERVICE_SYSTEM_START = 1
+SERVICE_AUTO_START = 2
+SERVICE_DEMAND_START = 3
+SERVICE_DISABLED = 4
+SERVICE_ERROR_IGNORE = 0
+SERVICE_ERROR_NORMAL = 1
+SERVICE_ERROR_SEVERE = 2
+SERVICE_ERROR_CRITICAL = 3
+TAPE_ERASE_SHORT = 0
+TAPE_ERASE_LONG = 1
+TAPE_LOAD = 0
+TAPE_UNLOAD = 1
+TAPE_TENSION = 2
+TAPE_LOCK = 3
+TAPE_UNLOCK = 4
+TAPE_FORMAT = 5
+TAPE_SETMARKS = 0
+TAPE_FILEMARKS = 1
+TAPE_SHORT_FILEMARKS = 2
+TAPE_LONG_FILEMARKS = 3
+TAPE_ABSOLUTE_POSITION = 0
+TAPE_LOGICAL_POSITION = 1
+TAPE_PSEUDO_LOGICAL_POSITION = 2
+TAPE_REWIND = 0
+TAPE_ABSOLUTE_BLOCK = 1
+TAPE_LOGICAL_BLOCK = 2
+TAPE_PSEUDO_LOGICAL_BLOCK = 3
+TAPE_SPACE_END_OF_DATA = 4
+TAPE_SPACE_RELATIVE_BLOCKS = 5
+TAPE_SPACE_FILEMARKS = 6
+TAPE_SPACE_SEQUENTIAL_FMKS = 7
+TAPE_SPACE_SETMARKS = 8
+TAPE_SPACE_SEQUENTIAL_SMKS = 9
+TAPE_DRIVE_FIXED = 1
+TAPE_DRIVE_SELECT = 2
+TAPE_DRIVE_INITIATOR = 4
+TAPE_DRIVE_ERASE_SHORT = 16
+TAPE_DRIVE_ERASE_LONG = 32
+TAPE_DRIVE_ERASE_BOP_ONLY = 64
+TAPE_DRIVE_ERASE_IMMEDIATE = 128
+TAPE_DRIVE_TAPE_CAPACITY = 256
+TAPE_DRIVE_TAPE_REMAINING = 512
+TAPE_DRIVE_FIXED_BLOCK = 1024
+TAPE_DRIVE_VARIABLE_BLOCK = 2048
+TAPE_DRIVE_WRITE_PROTECT = 4096
+TAPE_DRIVE_EOT_WZ_SIZE = 8192
+TAPE_DRIVE_ECC = 65536
+TAPE_DRIVE_COMPRESSION = 131072
+TAPE_DRIVE_PADDING = 262144
+TAPE_DRIVE_REPORT_SMKS = 524288
+TAPE_DRIVE_GET_ABSOLUTE_BLK = 1048576
+TAPE_DRIVE_GET_LOGICAL_BLK = 2097152
+TAPE_DRIVE_SET_EOT_WZ_SIZE = 4194304
+TAPE_DRIVE_LOAD_UNLOAD = -2147483647
+TAPE_DRIVE_TENSION = -2147483646
+TAPE_DRIVE_LOCK_UNLOCK = -2147483644
+TAPE_DRIVE_REWIND_IMMEDIATE = -2147483640
+TAPE_DRIVE_SET_BLOCK_SIZE = -2147483632
+TAPE_DRIVE_LOAD_UNLD_IMMED = -2147483616
+TAPE_DRIVE_TENSION_IMMED = -2147483584
+TAPE_DRIVE_LOCK_UNLK_IMMED = -2147483520
+TAPE_DRIVE_SET_ECC = -2147483392
+TAPE_DRIVE_SET_COMPRESSION = -2147483136
+TAPE_DRIVE_SET_PADDING = -2147482624
+TAPE_DRIVE_SET_REPORT_SMKS = -2147481600
+TAPE_DRIVE_ABSOLUTE_BLK = -2147479552
+TAPE_DRIVE_ABS_BLK_IMMED = -2147475456
+TAPE_DRIVE_LOGICAL_BLK = -2147467264
+TAPE_DRIVE_LOG_BLK_IMMED = -2147450880
+TAPE_DRIVE_END_OF_DATA = -2147418112
+TAPE_DRIVE_RELATIVE_BLKS = -2147352576
+TAPE_DRIVE_FILEMARKS = -2147221504
+TAPE_DRIVE_SEQUENTIAL_FMKS = -2146959360
+TAPE_DRIVE_SETMARKS = -2146435072
+TAPE_DRIVE_SEQUENTIAL_SMKS = -2145386496
+TAPE_DRIVE_REVERSE_POSITION = -2143289344
+TAPE_DRIVE_SPACE_IMMEDIATE = -2139095040
+TAPE_DRIVE_WRITE_SETMARKS = -2130706432
+TAPE_DRIVE_WRITE_FILEMARKS = -2113929216
+TAPE_DRIVE_WRITE_SHORT_FMKS = -2080374784
+TAPE_DRIVE_WRITE_LONG_FMKS = -2013265920
+TAPE_DRIVE_WRITE_MARK_IMMED = -1879048192
+TAPE_DRIVE_FORMAT = -1610612736
+TAPE_DRIVE_FORMAT_IMMEDIATE = -1073741824
+TAPE_FIXED_PARTITIONS = 0
+TAPE_SELECT_PARTITIONS = 1
+TAPE_INITIATOR_PARTITIONS = 2
+# Generated by h2py from \msvc20\include\winnt.h
+# hacked and split by mhammond.
+
+APPLICATION_ERROR_MASK = 536870912
+ERROR_SEVERITY_SUCCESS = 0
+ERROR_SEVERITY_INFORMATIONAL = 1073741824
+ERROR_SEVERITY_WARNING = -2147483648
+ERROR_SEVERITY_ERROR = -1073741824
+MINCHAR = 128
+MAXCHAR = 127
+MINSHORT = 32768
+MAXSHORT = 32767
+MINLONG = -2147483648
+MAXLONG = 2147483647
+MAXBYTE = 255
+MAXWORD = 65535
+MAXDWORD = -1
+LANG_NEUTRAL = 0
+LANG_BULGARIAN = 2
+LANG_CHINESE = 4
+LANG_CROATIAN = 26
+LANG_CZECH = 5
+LANG_DANISH = 6
+LANG_DUTCH = 19
+LANG_ENGLISH = 9
+LANG_FINNISH = 11
+LANG_FRENCH = 12
+LANG_GERMAN = 7
+LANG_GREEK = 8
+LANG_HUNGARIAN = 14
+LANG_ICELANDIC = 15
+LANG_ITALIAN = 16
+LANG_JAPANESE = 17
+LANG_KOREAN = 18
+LANG_NORWEGIAN = 20
+LANG_POLISH = 21
+LANG_PORTUGUESE = 22
+LANG_ROMANIAN = 24
+LANG_RUSSIAN = 25
+LANG_SLOVAK = 27
+LANG_SLOVENIAN = 36
+LANG_SPANISH = 10
+LANG_SWEDISH = 29
+LANG_TURKISH = 31
+SUBLANG_NEUTRAL = 0
+SUBLANG_DEFAULT = 1
+SUBLANG_SYS_DEFAULT = 2
+SUBLANG_CHINESE_TRADITIONAL = 1
+SUBLANG_CHINESE_SIMPLIFIED = 2
+SUBLANG_CHINESE_HONGKONG = 3
+SUBLANG_CHINESE_SINGAPORE = 4
+SUBLANG_DUTCH = 1
+SUBLANG_DUTCH_BELGIAN = 2
+SUBLANG_ENGLISH_US = 1
+SUBLANG_ENGLISH_UK = 2
+SUBLANG_ENGLISH_AUS = 3
+SUBLANG_ENGLISH_CAN = 4
+SUBLANG_ENGLISH_NZ = 5
+SUBLANG_ENGLISH_EIRE = 6
+SUBLANG_FRENCH = 1
+SUBLANG_FRENCH_BELGIAN = 2
+SUBLANG_FRENCH_CANADIAN = 3
+SUBLANG_FRENCH_SWISS = 4
+SUBLANG_GERMAN = 1
+SUBLANG_GERMAN_SWISS = 2
+SUBLANG_GERMAN_AUSTRIAN = 3
+SUBLANG_ITALIAN = 1
+SUBLANG_ITALIAN_SWISS = 2
+SUBLANG_NORWEGIAN_BOKMAL = 1
+SUBLANG_NORWEGIAN_NYNORSK = 2
+SUBLANG_PORTUGUESE = 2
+SUBLANG_PORTUGUESE_BRAZILIAN = 1
+SUBLANG_SPANISH = 1
+SUBLANG_SPANISH_MEXICAN = 2
+SUBLANG_SPANISH_MODERN = 3
+SORT_DEFAULT = 0
+SORT_JAPANESE_XJIS = 0
+SORT_JAPANESE_UNICODE = 1
+SORT_CHINESE_BIG5 = 0
+SORT_CHINESE_UNICODE = 1
+SORT_KOREAN_KSC = 0
+SORT_KOREAN_UNICODE = 1
+def PRIMARYLANGID(lgid): return ((lgid) & 1023)
+
+def SUBLANGID(lgid): return ((lgid) >> 10)
+
+NLS_VALID_LOCALE_MASK = 1048575
+CONTEXT_PORTABLE_32BIT = 1048576
+CONTEXT_ALPHA = 131072
+CONTEXT_CONTROL = (CONTEXT_ALPHA | 1)
+CONTEXT_FLOATING_POINT = (CONTEXT_ALPHA | 2)
+CONTEXT_INTEGER = (CONTEXT_ALPHA | 4)
+CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER)
+SIZE_OF_80387_REGISTERS = 80
+CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER)
+CONTEXT_CONTROL = 1
+CONTEXT_FLOATING_POINT = 2
+CONTEXT_INTEGER = 4
+CONTEXT_FULL = (CONTEXT_CONTROL | CONTEXT_FLOATING_POINT | CONTEXT_INTEGER)
+PROCESS_TERMINATE = (1)
+PROCESS_CREATE_THREAD = (2)
+PROCESS_VM_OPERATION = (8)
+PROCESS_VM_READ = (16)
+PROCESS_VM_WRITE = (32)
+PROCESS_DUP_HANDLE = (64)
+PROCESS_CREATE_PROCESS = (128)
+PROCESS_SET_QUOTA = (256)
+PROCESS_SET_INFORMATION = (512)
+PROCESS_QUERY_INFORMATION = (1024)
+PROCESS_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED | SYNCHRONIZE | 4095)
+THREAD_TERMINATE = (1)
+THREAD_SUSPEND_RESUME = (2)
+THREAD_GET_CONTEXT = (8)
+THREAD_SET_CONTEXT = (16)
+THREAD_SET_INFORMATION = (32)
+THREAD_QUERY_INFORMATION = (64)
+THREAD_SET_THREAD_TOKEN = (128)
+THREAD_IMPERSONATE = (256)
+THREAD_DIRECT_IMPERSONATION = (512)
+TLS_MINIMUM_AVAILABLE = 64
+EVENT_MODIFY_STATE = 2
+MUTANT_QUERY_STATE = 1
+SEMAPHORE_MODIFY_STATE = 2
+TIME_ZONE_ID_UNKNOWN = 0
+TIME_ZONE_ID_STANDARD = 1
+TIME_ZONE_ID_DAYLIGHT = 2
+PROCESSOR_INTEL_386 = 386
+PROCESSOR_INTEL_486 = 486
+PROCESSOR_INTEL_PENTIUM = 586
+PROCESSOR_INTEL_860 = 860
+PROCESSOR_MIPS_R2000 = 2000
+PROCESSOR_MIPS_R3000 = 3000
+PROCESSOR_MIPS_R4000 = 4000
+PROCESSOR_ALPHA_21064 = 21064
+PROCESSOR_PPC_601 = 601
+PROCESSOR_PPC_603 = 603
+PROCESSOR_PPC_604 = 604
+PROCESSOR_PPC_620 = 620
+SECTION_QUERY = 1
+SECTION_MAP_WRITE = 2
+SECTION_MAP_READ = 4
+SECTION_MAP_EXECUTE = 8
+SECTION_EXTEND_SIZE = 16
+PAGE_NOACCESS = 1
+PAGE_READONLY = 2
+PAGE_READWRITE = 4
+PAGE_WRITECOPY = 8
+PAGE_EXECUTE = 16
+PAGE_EXECUTE_READ = 32
+PAGE_EXECUTE_READWRITE = 64
+PAGE_EXECUTE_WRITECOPY = 128
+PAGE_GUARD = 256
+PAGE_NOCACHE = 512
+MEM_COMMIT = 4096
+MEM_RESERVE = 8192
+MEM_DECOMMIT = 16384
+MEM_RELEASE = 32768
+MEM_FREE = 65536
+MEM_PRIVATE = 131072
+MEM_MAPPED = 262144
+MEM_TOP_DOWN = 1048576
+
+# Generated by h2py from \msvc20\include\winnt.h
+# hacked and split by mhammond.
+SEC_FILE = 8388608
+SEC_IMAGE = 16777216
+SEC_RESERVE = 67108864
+SEC_COMMIT = 134217728
+SEC_NOCACHE = 268435456
+MEM_IMAGE = SEC_IMAGE
+FILE_SHARE_READ = 1
+FILE_SHARE_WRITE = 2
+FILE_SHARE_DELETE = 4
+FILE_ATTRIBUTE_READONLY = 1
+FILE_ATTRIBUTE_HIDDEN = 2
+FILE_ATTRIBUTE_SYSTEM = 4
+FILE_ATTRIBUTE_DIRECTORY = 16
+FILE_ATTRIBUTE_ARCHIVE = 32
+FILE_ATTRIBUTE_NORMAL = 128
+FILE_ATTRIBUTE_TEMPORARY = 256
+FILE_ATTRIBUTE_ATOMIC_WRITE = 512
+FILE_ATTRIBUTE_XACTION_WRITE = 1024
+FILE_ATTRIBUTE_COMPRESSED = 2048
+FILE_NOTIFY_CHANGE_FILE_NAME = 1
+FILE_NOTIFY_CHANGE_DIR_NAME = 2
+FILE_NOTIFY_CHANGE_ATTRIBUTES = 4
+FILE_NOTIFY_CHANGE_SIZE = 8
+FILE_NOTIFY_CHANGE_LAST_WRITE = 16
+FILE_NOTIFY_CHANGE_SECURITY = 256
+FILE_CASE_SENSITIVE_SEARCH = 1
+FILE_CASE_PRESERVED_NAMES = 2
+FILE_UNICODE_ON_DISK = 4
+FILE_PERSISTENT_ACLS = 8
+FILE_FILE_COMPRESSION = 16
+FILE_VOLUME_IS_COMPRESSED = 32768
+IO_COMPLETION_MODIFY_STATE = 2
+DUPLICATE_CLOSE_SOURCE = 1
+DUPLICATE_SAME_ACCESS = 2
+SID_MAX_SUB_AUTHORITIES = (15)
+SECURITY_NULL_RID = (0)
+SECURITY_WORLD_RID = (0)
+SECURITY_LOCAL_RID = (0X00000000)
+SECURITY_CREATOR_OWNER_RID = (0)
+SECURITY_CREATOR_GROUP_RID = (1)
+SECURITY_DIALUP_RID = (1)
+SECURITY_NETWORK_RID = (2)
+SECURITY_BATCH_RID = (3)
+SECURITY_INTERACTIVE_RID = (4)
+SECURITY_SERVICE_RID = (6)
+SECURITY_ANONYMOUS_LOGON_RID = (7)
+SECURITY_LOGON_IDS_RID = (5)
+SECURITY_LOGON_IDS_RID_COUNT = (3)
+SECURITY_LOCAL_SYSTEM_RID = (18)
+SECURITY_NT_NON_UNIQUE = (21)
+SECURITY_BUILTIN_DOMAIN_RID = (32)
+DOMAIN_USER_RID_ADMIN = (500)
+DOMAIN_USER_RID_GUEST = (501)
+DOMAIN_GROUP_RID_ADMINS = (512)
+DOMAIN_GROUP_RID_USERS = (513)
+DOMAIN_GROUP_RID_GUESTS = (514)
+DOMAIN_ALIAS_RID_ADMINS = (544)
+DOMAIN_ALIAS_RID_USERS = (545)
+DOMAIN_ALIAS_RID_GUESTS = (546)
+DOMAIN_ALIAS_RID_POWER_USERS = (547)
+DOMAIN_ALIAS_RID_ACCOUNT_OPS = (548)
+DOMAIN_ALIAS_RID_SYSTEM_OPS = (549)
+DOMAIN_ALIAS_RID_PRINT_OPS = (550)
+DOMAIN_ALIAS_RID_BACKUP_OPS = (551)
+DOMAIN_ALIAS_RID_REPLICATOR = (552)
+SE_GROUP_MANDATORY = (1)
+SE_GROUP_ENABLED_BY_DEFAULT = (2)
+SE_GROUP_ENABLED = (4)
+SE_GROUP_OWNER = (8)
+SE_GROUP_LOGON_ID = (-1073741824)
+ACL_REVISION = (2)
+ACL_REVISION1 = (1)
+ACL_REVISION2 = (2)
+ACCESS_ALLOWED_ACE_TYPE = (0)
+ACCESS_DENIED_ACE_TYPE = (1)
+SYSTEM_AUDIT_ACE_TYPE = (2)
+SYSTEM_ALARM_ACE_TYPE = (3)
+OBJECT_INHERIT_ACE = (1)
+CONTAINER_INHERIT_ACE = (2)
+NO_PROPAGATE_INHERIT_ACE = (4)
+INHERIT_ONLY_ACE = (8)
+VALID_INHERIT_FLAGS = (15)
+SUCCESSFUL_ACCESS_ACE_FLAG = (64)
+FAILED_ACCESS_ACE_FLAG = (128)
+SECURITY_DESCRIPTOR_REVISION = (1)
+SECURITY_DESCRIPTOR_REVISION1 = (1)
+SECURITY_DESCRIPTOR_MIN_LENGTH = (20)
+SE_OWNER_DEFAULTED = (1)
+SE_GROUP_DEFAULTED = (2)
+SE_DACL_PRESENT = (4)
+SE_DACL_DEFAULTED = (8)
+SE_SACL_PRESENT = (16)
+SE_SACL_DEFAULTED = (32)
+SE_SELF_RELATIVE = (32768)
+SE_PRIVILEGE_ENABLED_BY_DEFAULT = (1)
+SE_PRIVILEGE_ENABLED = (2)
+SE_PRIVILEGE_USED_FOR_ACCESS = (-2147483648)
+PRIVILEGE_SET_ALL_NECESSARY = (1)
+SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege"
+SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege"
+SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege"
+SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege"
+SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege"
+SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege"
+SE_TCB_NAME = "SeTcbPrivilege"
+SE_SECURITY_NAME = "SeSecurityPrivilege"
+SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege"
+SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege"
+SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege"
+SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege"
+SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege"
+SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege"
+SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege"
+SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege"
+SE_BACKUP_NAME = "SeBackupPrivilege"
+SE_RESTORE_NAME = "SeRestorePrivilege"
+SE_SHUTDOWN_NAME = "SeShutdownPrivilege"
+SE_DEBUG_NAME = "SeDebugPrivilege"
+SE_AUDIT_NAME = "SeAuditPrivilege"
+SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege"
+SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege"
+SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege"
+
+TOKEN_ASSIGN_PRIMARY = (1)
+TOKEN_DUPLICATE = (2)
+TOKEN_IMPERSONATE = (4)
+TOKEN_QUERY = (8)
+TOKEN_QUERY_SOURCE = (16)
+TOKEN_ADJUST_PRIVILEGES = (32)
+TOKEN_ADJUST_GROUPS = (64)
+TOKEN_ADJUST_DEFAULT = (128)
+TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED  |\
+                          TOKEN_ASSIGN_PRIMARY      |\
+                          TOKEN_DUPLICATE           |\
+                          TOKEN_IMPERSONATE         |\
+                          TOKEN_QUERY               |\
+                          TOKEN_QUERY_SOURCE        |\
+                          TOKEN_ADJUST_PRIVILEGES   |\
+                          TOKEN_ADJUST_GROUPS       |\
+                          TOKEN_ADJUST_DEFAULT)
+TOKEN_READ = (STANDARD_RIGHTS_READ      |\
+                          TOKEN_QUERY)
+TOKEN_WRITE = (STANDARD_RIGHTS_WRITE     |\
+                          TOKEN_ADJUST_PRIVILEGES   |\
+                          TOKEN_ADJUST_GROUPS       |\
+                          TOKEN_ADJUST_DEFAULT)
+TOKEN_EXECUTE = (STANDARD_RIGHTS_EXECUTE)
+TOKEN_SOURCE_LENGTH = 8
+
+KEY_QUERY_VALUE = (1)
+KEY_SET_VALUE = (2)
+KEY_CREATE_SUB_KEY = (4)
+KEY_ENUMERATE_SUB_KEYS = (8)
+KEY_NOTIFY = (16)
+KEY_CREATE_LINK = (32)
+KEY_READ = ((STANDARD_RIGHTS_READ       |\
+                                  KEY_QUERY_VALUE            |\
+                                  KEY_ENUMERATE_SUB_KEYS     |\
+                                  KEY_NOTIFY)                 \
+                                  &                           \
+                                 (~SYNCHRONIZE))
+KEY_WRITE = ((STANDARD_RIGHTS_WRITE      |\
+                                  KEY_SET_VALUE              |\
+                                  KEY_CREATE_SUB_KEY)         \
+                                  &                           \
+                                 (~SYNCHRONIZE))
+KEY_EXECUTE = ((KEY_READ)                   \
+                                  &                           \
+                                 (~SYNCHRONIZE))
+KEY_ALL_ACCESS = ((STANDARD_RIGHTS_ALL        |\
+                                  KEY_QUERY_VALUE            |\
+                                  KEY_SET_VALUE              |\
+                                  KEY_CREATE_SUB_KEY         |\
+                                  KEY_ENUMERATE_SUB_KEYS     |\
+                                  KEY_NOTIFY                 |\
+                                  KEY_CREATE_LINK)            \
+                                  &                           \
+                                 (~SYNCHRONIZE))
+REG_NOTIFY_CHANGE_ATTRIBUTES = (2)
+REG_NOTIFY_CHANGE_SECURITY = (8)
+REG_RESOURCE_REQUIREMENTS_LIST = ( 10 )
+REG_NONE =                   ( 0 )   # No value type
+REG_SZ =                     ( 1 )   # Unicode nul terminated string
+REG_EXPAND_SZ =              ( 2 )   # Unicode nul terminated string
+                                     # (with environment variable references)
+REG_BINARY =                 ( 3 )   # Free form binary
+REG_DWORD =                  ( 4 )   # 32-bit number
+REG_DWORD_LITTLE_ENDIAN =    ( 4 )   # 32-bit number (same as REG_DWORD)
+REG_DWORD_BIG_ENDIAN =       ( 5 )   # 32-bit number
+REG_LINK =                   ( 6 )   # Symbolic Link (unicode)
+REG_MULTI_SZ =               ( 7 )   # Multiple Unicode strings
+REG_RESOURCE_LIST =          ( 8 )   # Resource list in the resource map
+REG_FULL_RESOURCE_DESCRIPTOR =( 9 )  # Resource list in the hardware description
+REG_RESOURCE_REQUIREMENTS_LIST = ( 10 )
+REG_QWORD =                 ( 11 )  # 64-bit number
+REG_QWORD_LITTLE_ENDIAN =   ( 11 )  # 64-bit number (same as REG_QWORD)
+
+
+# Generated by h2py from \msvc20\include\winnt.h
+# hacked and split by mhammond.
+# Included from string.h
+_NLSCMPERROR = 2147483647
+NULL = 0
+HEAP_NO_SERIALIZE = 1
+HEAP_GROWABLE = 2
+HEAP_GENERATE_EXCEPTIONS = 4
+HEAP_ZERO_MEMORY = 8
+HEAP_REALLOC_IN_PLACE_ONLY = 16
+HEAP_TAIL_CHECKING_ENABLED = 32
+HEAP_FREE_CHECKING_ENABLED = 64
+HEAP_DISABLE_COALESCE_ON_FREE = 128
+IS_TEXT_UNICODE_ASCII16 = 1
+IS_TEXT_UNICODE_REVERSE_ASCII16 = 16
+IS_TEXT_UNICODE_STATISTICS = 2
+IS_TEXT_UNICODE_REVERSE_STATISTICS = 32
+IS_TEXT_UNICODE_CONTROLS = 4
+IS_TEXT_UNICODE_REVERSE_CONTROLS = 64
+IS_TEXT_UNICODE_SIGNATURE = 8
+IS_TEXT_UNICODE_REVERSE_SIGNATURE = 128
+IS_TEXT_UNICODE_ILLEGAL_CHARS = 256
+IS_TEXT_UNICODE_ODD_LENGTH = 512
+IS_TEXT_UNICODE_DBCS_LEADBYTE = 1024
+IS_TEXT_UNICODE_NULL_BYTES = 4096
+IS_TEXT_UNICODE_UNICODE_MASK = 15
+IS_TEXT_UNICODE_REVERSE_MASK = 240
+IS_TEXT_UNICODE_NOT_UNICODE_MASK = 3840
+IS_TEXT_UNICODE_NOT_ASCII_MASK = 61440
+COMPRESSION_FORMAT_NONE = (0)
+COMPRESSION_FORMAT_DEFAULT = (1)
+COMPRESSION_FORMAT_LZNT1 = (2)
+COMPRESSION_ENGINE_STANDARD = (0)
+COMPRESSION_ENGINE_MAXIMUM = (256)
+MESSAGE_RESOURCE_UNICODE = 1
+RTL_CRITSECT_TYPE = 0
+RTL_RESOURCE_TYPE = 1
+DLL_PROCESS_ATTACH = 1
+DLL_THREAD_ATTACH = 2
+DLL_THREAD_DETACH = 3
+DLL_PROCESS_DETACH = 0
+EVENTLOG_SEQUENTIAL_READ = 0X0001
+EVENTLOG_SEEK_READ = 0X0002
+EVENTLOG_FORWARDS_READ = 0X0004
+EVENTLOG_BACKWARDS_READ = 0X0008
+EVENTLOG_SUCCESS = 0X0000
+EVENTLOG_ERROR_TYPE = 1
+EVENTLOG_WARNING_TYPE = 2
+EVENTLOG_INFORMATION_TYPE = 4
+EVENTLOG_AUDIT_SUCCESS = 8
+EVENTLOG_AUDIT_FAILURE = 16
+EVENTLOG_START_PAIRED_EVENT = 1
+EVENTLOG_END_PAIRED_EVENT = 2
+EVENTLOG_END_ALL_PAIRED_EVENTS = 4
+EVENTLOG_PAIRED_EVENT_ACTIVE = 8
+EVENTLOG_PAIRED_EVENT_INACTIVE = 16
+# Generated by h2py from \msvc20\include\winnt.h
+# hacked and split by mhammond.
+OWNER_SECURITY_INFORMATION = (0X00000001)
+GROUP_SECURITY_INFORMATION = (0X00000002)
+DACL_SECURITY_INFORMATION = (0X00000004)
+SACL_SECURITY_INFORMATION = (0X00000008)
+IMAGE_SIZEOF_FILE_HEADER = 20
+IMAGE_FILE_MACHINE_UNKNOWN = 0
+IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16
+IMAGE_SIZEOF_ROM_OPTIONAL_HEADER = 56
+IMAGE_SIZEOF_STD_OPTIONAL_HEADER = 28
+IMAGE_SIZEOF_NT_OPTIONAL_HEADER = 224
+IMAGE_NT_OPTIONAL_HDR_MAGIC = 267
+IMAGE_ROM_OPTIONAL_HDR_MAGIC = 263
+IMAGE_SIZEOF_SHORT_NAME = 8
+IMAGE_SIZEOF_SECTION_HEADER = 40
+IMAGE_SIZEOF_SYMBOL = 18
+IMAGE_SYM_CLASS_NULL = 0
+IMAGE_SYM_CLASS_AUTOMATIC = 1
+IMAGE_SYM_CLASS_EXTERNAL = 2
+IMAGE_SYM_CLASS_STATIC = 3
+IMAGE_SYM_CLASS_REGISTER = 4
+IMAGE_SYM_CLASS_EXTERNAL_DEF = 5
+IMAGE_SYM_CLASS_LABEL = 6
+IMAGE_SYM_CLASS_UNDEFINED_LABEL = 7
+IMAGE_SYM_CLASS_MEMBER_OF_STRUCT = 8
+IMAGE_SYM_CLASS_ARGUMENT = 9
+IMAGE_SYM_CLASS_STRUCT_TAG = 10
+IMAGE_SYM_CLASS_MEMBER_OF_UNION = 11
+IMAGE_SYM_CLASS_UNION_TAG = 12
+IMAGE_SYM_CLASS_TYPE_DEFINITION = 13
+IMAGE_SYM_CLASS_UNDEFINED_STATIC = 14
+IMAGE_SYM_CLASS_ENUM_TAG = 15
+IMAGE_SYM_CLASS_MEMBER_OF_ENUM = 16
+IMAGE_SYM_CLASS_REGISTER_PARAM = 17
+IMAGE_SYM_CLASS_BIT_FIELD = 18
+IMAGE_SYM_CLASS_BLOCK = 100
+IMAGE_SYM_CLASS_FUNCTION = 101
+IMAGE_SYM_CLASS_END_OF_STRUCT = 102
+IMAGE_SYM_CLASS_FILE = 103
+IMAGE_SYM_CLASS_SECTION = 104
+IMAGE_SYM_CLASS_WEAK_EXTERNAL = 105
+N_BTMASK = 017
+N_TMASK = 060
+N_TMASK1 = 0300
+N_TMASK2 = 0360
+N_BTSHFT = 4
+N_TSHIFT = 2
+IMAGE_SIZEOF_AUX_SYMBOL = 18
+IMAGE_COMDAT_SELECT_NODUPLICATES = 1
+IMAGE_COMDAT_SELECT_ANY = 2
+IMAGE_COMDAT_SELECT_SAME_SIZE = 3
+IMAGE_COMDAT_SELECT_EXACT_MATCH = 4
+IMAGE_COMDAT_SELECT_ASSOCIATIVE = 5
+IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY = 1
+IMAGE_WEAK_EXTERN_SEARCH_LIBRARY = 2
+IMAGE_WEAK_EXTERN_SEARCH_ALIAS = 3
+IMAGE_SIZEOF_RELOCATION = 10
+IMAGE_REL_I386_SECTION = 012
+IMAGE_REL_I386_SECREL = 013
+IMAGE_REL_MIPS_REFHALF = 01
+IMAGE_REL_MIPS_REFWORD = 02
+IMAGE_REL_MIPS_JMPADDR = 03
+IMAGE_REL_MIPS_REFHI = 04
+IMAGE_REL_MIPS_REFLO = 05
+IMAGE_REL_MIPS_GPREL = 06
+IMAGE_REL_MIPS_LITERAL = 07
+IMAGE_REL_MIPS_SECTION = 012
+IMAGE_REL_MIPS_SECREL = 013
+IMAGE_REL_MIPS_REFWORDNB = 042
+IMAGE_REL_MIPS_PAIR = 045
+IMAGE_REL_ALPHA_ABSOLUTE = 0
+IMAGE_REL_ALPHA_REFLONG = 1
+IMAGE_REL_ALPHA_REFQUAD = 2
+IMAGE_REL_ALPHA_GPREL32 = 3
+IMAGE_REL_ALPHA_LITERAL = 4
+IMAGE_REL_ALPHA_LITUSE = 5
+IMAGE_REL_ALPHA_GPDISP = 6
+IMAGE_REL_ALPHA_BRADDR = 7
+IMAGE_REL_ALPHA_HINT = 8
+IMAGE_REL_ALPHA_INLINE_REFLONG = 9
+IMAGE_REL_ALPHA_REFHI = 10
+IMAGE_REL_ALPHA_REFLO = 11
+IMAGE_REL_ALPHA_PAIR = 12
+IMAGE_REL_ALPHA_MATCH = 13
+IMAGE_REL_ALPHA_SECTION = 14
+IMAGE_REL_ALPHA_SECREL = 15
+IMAGE_REL_ALPHA_REFLONGNB = 16
+IMAGE_SIZEOF_BASE_RELOCATION = 8
+IMAGE_REL_BASED_ABSOLUTE = 0
+IMAGE_REL_BASED_HIGH = 1
+IMAGE_REL_BASED_LOW = 2
+IMAGE_REL_BASED_HIGHLOW = 3
+IMAGE_REL_BASED_HIGHADJ = 4
+IMAGE_REL_BASED_MIPS_JMPADDR = 5
+IMAGE_SIZEOF_LINENUMBER = 6
+IMAGE_ARCHIVE_START_SIZE = 8
+IMAGE_ARCHIVE_START = "!<arch>\n"
+IMAGE_ARCHIVE_END = "`\n"
+IMAGE_ARCHIVE_PAD = "\n"
+IMAGE_ARCHIVE_LINKER_MEMBER = "/               "
+IMAGE_ARCHIVE_LONGNAMES_MEMBER = "//              "
+IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR = 60
+IMAGE_ORDINAL_FLAG = -2147483648
+def IMAGE_SNAP_BY_ORDINAL(Ordinal): return ((Ordinal & IMAGE_ORDINAL_FLAG) != 0)
+
+def IMAGE_ORDINAL(Ordinal): return (Ordinal & 65535)
+
+IMAGE_RESOURCE_NAME_IS_STRING = -2147483648
+IMAGE_RESOURCE_DATA_IS_DIRECTORY = -2147483648
+IMAGE_DEBUG_TYPE_UNKNOWN = 0
+IMAGE_DEBUG_TYPE_COFF = 1
+IMAGE_DEBUG_TYPE_CODEVIEW = 2
+IMAGE_DEBUG_TYPE_FPO = 3
+IMAGE_DEBUG_TYPE_MISC = 4
+IMAGE_DEBUG_TYPE_EXCEPTION = 5
+IMAGE_DEBUG_TYPE_FIXUP = 6
+IMAGE_DEBUG_TYPE_OMAP_TO_SRC = 7
+IMAGE_DEBUG_TYPE_OMAP_FROM_SRC = 8
+FRAME_FPO = 0
+FRAME_TRAP = 1
+FRAME_TSS = 2
+SIZEOF_RFPO_DATA = 16
+IMAGE_DEBUG_MISC_EXENAME = 1
+IMAGE_SEPARATE_DEBUG_SIGNATURE = 18756
+# Generated by h2py from \msvcnt\include\wingdi.h
+# hacked and split manually by mhammond.
+NEWFRAME = 1
+ABORTDOC = 2
+NEXTBAND = 3
+SETCOLORTABLE = 4
+GETCOLORTABLE = 5
+FLUSHOUTPUT = 6
+DRAFTMODE = 7
+QUERYESCSUPPORT = 8
+SETABORTPROC = 9
+STARTDOC = 10
+ENDDOC = 11
+GETPHYSPAGESIZE = 12
+GETPRINTINGOFFSET = 13
+GETSCALINGFACTOR = 14
+MFCOMMENT = 15
+GETPENWIDTH = 16
+SETCOPYCOUNT = 17
+SELECTPAPERSOURCE = 18
+DEVICEDATA = 19
+PASSTHROUGH = 19
+GETTECHNOLGY = 20
+GETTECHNOLOGY = 20
+SETLINECAP = 21
+SETLINEJOIN = 22
+SETMITERLIMIT = 23
+BANDINFO = 24
+DRAWPATTERNRECT = 25
+GETVECTORPENSIZE = 26
+GETVECTORBRUSHSIZE = 27
+ENABLEDUPLEX = 28
+GETSETPAPERBINS = 29
+GETSETPRINTORIENT = 30
+ENUMPAPERBINS = 31
+SETDIBSCALING = 32
+EPSPRINTING = 33
+ENUMPAPERMETRICS = 34
+GETSETPAPERMETRICS = 35
+POSTSCRIPT_DATA = 37
+POSTSCRIPT_IGNORE = 38
+MOUSETRAILS = 39
+GETDEVICEUNITS = 42
+GETEXTENDEDTEXTMETRICS = 256
+GETEXTENTTABLE = 257
+GETPAIRKERNTABLE = 258
+GETTRACKKERNTABLE = 259
+EXTTEXTOUT = 512
+GETFACENAME = 513
+DOWNLOADFACE = 514
+ENABLERELATIVEWIDTHS = 768
+ENABLEPAIRKERNING = 769
+SETKERNTRACK = 770
+SETALLJUSTVALUES = 771
+SETCHARSET = 772
+STRETCHBLT = 2048
+GETSETSCREENPARAMS = 3072
+BEGIN_PATH = 4096
+CLIP_TO_PATH = 4097
+END_PATH = 4098
+EXT_DEVICE_CAPS = 4099
+RESTORE_CTM = 4100
+SAVE_CTM = 4101
+SET_ARC_DIRECTION = 4102
+SET_BACKGROUND_COLOR = 4103
+SET_POLY_MODE = 4104
+SET_SCREEN_ANGLE = 4105
+SET_SPREAD = 4106
+TRANSFORM_CTM = 4107
+SET_CLIP_BOX = 4108
+SET_BOUNDS = 4109
+SET_MIRROR_MODE = 4110
+OPENCHANNEL = 4110
+DOWNLOADHEADER = 4111
+CLOSECHANNEL = 4112
+POSTSCRIPT_PASSTHROUGH = 4115
+ENCAPSULATED_POSTSCRIPT = 4116
+SP_NOTREPORTED = 16384
+SP_ERROR = (-1)
+SP_APPABORT = (-2)
+SP_USERABORT = (-3)
+SP_OUTOFDISK = (-4)
+SP_OUTOFMEMORY = (-5)
+PR_JOBSTATUS = 0
+OBJ_PEN = 1
+OBJ_BRUSH = 2
+OBJ_DC = 3
+OBJ_METADC = 4
+OBJ_PAL = 5
+OBJ_FONT = 6
+OBJ_BITMAP = 7
+OBJ_REGION = 8
+OBJ_METAFILE = 9
+OBJ_MEMDC = 10
+OBJ_EXTPEN = 11
+OBJ_ENHMETADC = 12
+OBJ_ENHMETAFILE = 13
+MWT_IDENTITY = 1
+MWT_LEFTMULTIPLY = 2
+MWT_RIGHTMULTIPLY = 3
+MWT_MIN = MWT_IDENTITY
+MWT_MAX = MWT_RIGHTMULTIPLY
+BI_RGB = 0
+BI_RLE8 = 1
+BI_RLE4 = 2
+BI_BITFIELDS = 3
+TMPF_FIXED_PITCH = 1
+TMPF_VECTOR = 2
+TMPF_DEVICE = 8
+TMPF_TRUETYPE = 4
+NTM_REGULAR = 64
+NTM_BOLD = 32
+NTM_ITALIC = 1
+LF_FACESIZE = 32
+LF_FULLFACESIZE = 64
+OUT_DEFAULT_PRECIS = 0
+OUT_STRING_PRECIS = 1
+OUT_CHARACTER_PRECIS = 2
+OUT_STROKE_PRECIS = 3
+OUT_TT_PRECIS = 4
+OUT_DEVICE_PRECIS = 5
+OUT_RASTER_PRECIS = 6
+OUT_TT_ONLY_PRECIS = 7
+OUT_OUTLINE_PRECIS = 8
+CLIP_DEFAULT_PRECIS = 0
+CLIP_CHARACTER_PRECIS = 1
+CLIP_STROKE_PRECIS = 2
+CLIP_MASK = 15
+CLIP_LH_ANGLES = (1<<4)
+CLIP_TT_ALWAYS = (2<<4)
+CLIP_EMBEDDED = (8<<4)
+DEFAULT_QUALITY = 0
+DRAFT_QUALITY = 1
+PROOF_QUALITY = 2
+NONANTIALIASED_QUALITY = 3
+ANTIALIASED_QUALITY = 4
+CLEARTYPE_QUALITY = 5
+CLEARTYPE_NATURAL_QUALITY = 6
+DEFAULT_PITCH = 0
+FIXED_PITCH = 1
+VARIABLE_PITCH = 2
+ANSI_CHARSET = 0
+DEFAULT_CHARSET = 1
+SYMBOL_CHARSET = 2
+SHIFTJIS_CHARSET = 128
+HANGEUL_CHARSET = 129
+CHINESEBIG5_CHARSET = 136
+OEM_CHARSET = 255
+JOHAB_CHARSET = 130
+HEBREW_CHARSET = 177
+ARABIC_CHARSET = 178
+GREEK_CHARSET = 161
+TURKISH_CHARSET = 162
+VIETNAMESE_CHARSET = 163
+THAI_CHARSET = 222
+EASTEUROPE_CHARSET = 238
+RUSSIAN_CHARSET = 204
+MAC_CHARSET = 77
+BALTIC_CHARSET = 186
+FF_DONTCARE = (0<<4)
+FF_ROMAN = (1<<4)
+FF_SWISS = (2<<4)
+FF_MODERN = (3<<4)
+FF_SCRIPT = (4<<4)
+FF_DECORATIVE = (5<<4)
+FW_DONTCARE = 0
+FW_THIN = 100
+FW_EXTRALIGHT = 200
+FW_LIGHT = 300
+FW_NORMAL = 400
+FW_MEDIUM = 500
+FW_SEMIBOLD = 600
+FW_BOLD = 700
+FW_EXTRABOLD = 800
+FW_HEAVY = 900
+FW_ULTRALIGHT = FW_EXTRALIGHT
+FW_REGULAR = FW_NORMAL
+FW_DEMIBOLD = FW_SEMIBOLD
+FW_ULTRABOLD = FW_EXTRABOLD
+FW_BLACK = FW_HEAVY
+# Generated by h2py from \msvcnt\include\wingdi.h
+# hacked and split manually by mhammond.
+BS_SOLID = 0
+BS_NULL = 1
+BS_HOLLOW = BS_NULL
+BS_HATCHED = 2
+BS_PATTERN = 3
+BS_INDEXED = 4
+BS_DIBPATTERN = 5
+BS_DIBPATTERNPT = 6
+BS_PATTERN8X8 = 7
+BS_DIBPATTERN8X8 = 8
+HS_HORIZONTAL = 0
+HS_VERTICAL = 1
+HS_FDIAGONAL = 2
+HS_BDIAGONAL = 3
+HS_CROSS = 4
+HS_DIAGCROSS = 5
+HS_FDIAGONAL1 = 6
+HS_BDIAGONAL1 = 7
+HS_SOLID = 8
+HS_DENSE1 = 9
+HS_DENSE2 = 10
+HS_DENSE3 = 11
+HS_DENSE4 = 12
+HS_DENSE5 = 13
+HS_DENSE6 = 14
+HS_DENSE7 = 15
+HS_DENSE8 = 16
+HS_NOSHADE = 17
+HS_HALFTONE = 18
+HS_SOLIDCLR = 19
+HS_DITHEREDCLR = 20
+HS_SOLIDTEXTCLR = 21
+HS_DITHEREDTEXTCLR = 22
+HS_SOLIDBKCLR = 23
+HS_DITHEREDBKCLR = 24
+HS_API_MAX = 25
+PS_SOLID = 0
+PS_DASH = 1
+PS_DOT = 2
+PS_DASHDOT = 3
+PS_DASHDOTDOT = 4
+PS_NULL = 5
+PS_INSIDEFRAME = 6
+PS_USERSTYLE = 7
+PS_ALTERNATE = 8
+PS_STYLE_MASK = 15
+PS_ENDCAP_ROUND = 0
+PS_ENDCAP_SQUARE = 256
+PS_ENDCAP_FLAT = 512
+PS_ENDCAP_MASK = 3840
+PS_JOIN_ROUND = 0
+PS_JOIN_BEVEL = 4096
+PS_JOIN_MITER = 8192
+PS_JOIN_MASK = 61440
+PS_COSMETIC = 0
+PS_GEOMETRIC = 65536
+PS_TYPE_MASK = 983040
+AD_COUNTERCLOCKWISE = 1
+AD_CLOCKWISE = 2
+DRIVERVERSION = 0
+TECHNOLOGY = 2
+HORZSIZE = 4
+VERTSIZE = 6
+HORZRES = 8
+VERTRES = 10
+BITSPIXEL = 12
+PLANES = 14
+NUMBRUSHES = 16
+NUMPENS = 18
+NUMMARKERS = 20
+NUMFONTS = 22
+NUMCOLORS = 24
+PDEVICESIZE = 26
+CURVECAPS = 28
+LINECAPS = 30
+POLYGONALCAPS = 32
+TEXTCAPS = 34
+CLIPCAPS = 36
+RASTERCAPS = 38
+ASPECTX = 40
+ASPECTY = 42
+ASPECTXY = 44
+LOGPIXELSX = 88
+LOGPIXELSY = 90
+SIZEPALETTE = 104
+NUMRESERVED = 106
+COLORRES = 108
+DT_PLOTTER = 0
+DT_RASDISPLAY = 1
+DT_RASPRINTER = 2
+DT_RASCAMERA = 3
+DT_CHARSTREAM = 4
+DT_METAFILE = 5
+DT_DISPFILE = 6
+CC_NONE = 0
+CC_CIRCLES = 1
+CC_PIE = 2
+CC_CHORD = 4
+CC_ELLIPSES = 8
+CC_WIDE = 16
+CC_STYLED = 32
+CC_WIDESTYLED = 64
+CC_INTERIORS = 128
+CC_ROUNDRECT = 256
+LC_NONE = 0
+LC_POLYLINE = 2
+LC_MARKER = 4
+LC_POLYMARKER = 8
+LC_WIDE = 16
+LC_STYLED = 32
+LC_WIDESTYLED = 64
+LC_INTERIORS = 128
+PC_NONE = 0
+PC_POLYGON = 1
+PC_RECTANGLE = 2
+PC_WINDPOLYGON = 4
+PC_TRAPEZOID = 4
+PC_SCANLINE = 8
+PC_WIDE = 16
+PC_STYLED = 32
+PC_WIDESTYLED = 64
+PC_INTERIORS = 128
+CP_NONE = 0
+CP_RECTANGLE = 1
+CP_REGION = 2
+TC_OP_CHARACTER = 1
+TC_OP_STROKE = 2
+TC_CP_STROKE = 4
+TC_CR_90 = 8
+TC_CR_ANY = 16
+TC_SF_X_YINDEP = 32
+TC_SA_DOUBLE = 64
+TC_SA_INTEGER = 128
+TC_SA_CONTIN = 256
+TC_EA_DOUBLE = 512
+TC_IA_ABLE = 1024
+TC_UA_ABLE = 2048
+TC_SO_ABLE = 4096
+TC_RA_ABLE = 8192
+TC_VA_ABLE = 16384
+TC_RESERVED = 32768
+TC_SCROLLBLT = 65536
+RC_BITBLT = 1
+RC_BANDING = 2
+RC_SCALING = 4
+RC_BITMAP64 = 8
+RC_GDI20_OUTPUT = 16
+RC_GDI20_STATE = 32
+RC_SAVEBITMAP = 64
+RC_DI_BITMAP = 128
+RC_PALETTE = 256
+RC_DIBTODEV = 512
+RC_BIGFONT = 1024
+RC_STRETCHBLT = 2048
+RC_FLOODFILL = 4096
+RC_STRETCHDIB = 8192
+RC_OP_DX_OUTPUT = 16384
+RC_DEVBITS = 32768
+DIB_RGB_COLORS = 0
+DIB_PAL_COLORS = 1
+DIB_PAL_INDICES = 2
+DIB_PAL_PHYSINDICES = 2
+DIB_PAL_LOGINDICES = 4
+SYSPAL_ERROR = 0
+SYSPAL_STATIC = 1
+SYSPAL_NOSTATIC = 2
+CBM_CREATEDIB = 2
+CBM_INIT = 4
+FLOODFILLBORDER = 0
+FLOODFILLSURFACE = 1
+CCHDEVICENAME = 32
+CCHFORMNAME = 32
+# Generated by h2py from \msvcnt\include\wingdi.h
+# hacked and split manually by mhammond.
+
+# DEVMODE.dmFields
+DM_SPECVERSION = 800
+DM_ORIENTATION = 1
+DM_PAPERSIZE = 2
+DM_PAPERLENGTH = 4
+DM_PAPERWIDTH = 8
+DM_SCALE = 16
+DM_COPIES = 256
+DM_DEFAULTSOURCE = 512
+DM_PRINTQUALITY = 1024
+DM_COLOR = 2048
+DM_DUPLEX = 4096
+DM_YRESOLUTION = 8192
+DM_TTOPTION = 16384
+DM_COLLATE = 32768
+DM_FORMNAME = 65536
+DM_LOGPIXELS = 131072
+DM_BITSPERPEL = 262144
+DM_PELSWIDTH = 524288
+DM_PELSHEIGHT = 1048576
+DM_DISPLAYFLAGS = 2097152
+DM_DISPLAYFREQUENCY = 4194304
+DM_ICMMETHOD = 8388608
+DM_ICMINTENT = 16777216
+DM_MEDIATYPE = 33554432
+DM_DITHERTYPE = 67108864
+DM_PANNINGWIDTH = 134217728
+DM_PANNINGHEIGHT = 268435456
+DM_DISPLAYFIXEDOUTPUT = 536870912
+
+# DEVMODE.dmOrientation
+DMORIENT_PORTRAIT = 1
+DMORIENT_LANDSCAPE = 2
+
+# DEVMODE.dmDisplayOrientation
+DMDO_DEFAULT = 0
+DMDO_90 = 1
+DMDO_180 = 2
+DMDO_270 = 3
+
+# DEVMODE.dmDisplayFixedOutput
+DMDFO_DEFAULT = 0
+DMDFO_STRETCH = 1
+DMDFO_CENTER = 2
+
+# DEVMODE.dmPaperSize
+DMPAPER_LETTER = 1
+DMPAPER_LETTERSMALL = 2
+DMPAPER_TABLOID = 3
+DMPAPER_LEDGER = 4
+DMPAPER_LEGAL = 5
+DMPAPER_STATEMENT = 6
+DMPAPER_EXECUTIVE = 7
+DMPAPER_A3 = 8
+DMPAPER_A4 = 9
+DMPAPER_A4SMALL = 10
+DMPAPER_A5 = 11
+DMPAPER_B4 = 12
+DMPAPER_B5 = 13
+DMPAPER_FOLIO = 14
+DMPAPER_QUARTO = 15
+DMPAPER_10X14 = 16
+DMPAPER_11X17 = 17
+DMPAPER_NOTE = 18
+DMPAPER_ENV_9 = 19
+DMPAPER_ENV_10 = 20
+DMPAPER_ENV_11 = 21
+DMPAPER_ENV_12 = 22
+DMPAPER_ENV_14 = 23
+DMPAPER_CSHEET = 24
+DMPAPER_DSHEET = 25
+DMPAPER_ESHEET = 26
+DMPAPER_ENV_DL = 27
+DMPAPER_ENV_C5 = 28
+DMPAPER_ENV_C3 = 29
+DMPAPER_ENV_C4 = 30
+DMPAPER_ENV_C6 = 31
+DMPAPER_ENV_C65 = 32
+DMPAPER_ENV_B4 = 33
+DMPAPER_ENV_B5 = 34
+DMPAPER_ENV_B6 = 35
+DMPAPER_ENV_ITALY = 36
+DMPAPER_ENV_MONARCH = 37
+DMPAPER_ENV_PERSONAL = 38
+DMPAPER_FANFOLD_US = 39
+DMPAPER_FANFOLD_STD_GERMAN = 40
+DMPAPER_FANFOLD_LGL_GERMAN = 41
+DMPAPER_ISO_B4 = 42
+DMPAPER_JAPANESE_POSTCARD = 43
+DMPAPER_9X11 = 44
+DMPAPER_10X11 = 45
+DMPAPER_15X11 = 46
+DMPAPER_ENV_INVITE = 47
+DMPAPER_RESERVED_48 = 48
+DMPAPER_RESERVED_49 = 49
+DMPAPER_LETTER_EXTRA = 50
+DMPAPER_LEGAL_EXTRA = 51
+DMPAPER_TABLOID_EXTRA = 52
+DMPAPER_A4_EXTRA = 53
+DMPAPER_LETTER_TRANSVERSE = 54
+DMPAPER_A4_TRANSVERSE = 55
+DMPAPER_LETTER_EXTRA_TRANSVERSE = 56
+DMPAPER_A_PLUS = 57
+DMPAPER_B_PLUS = 58
+DMPAPER_LETTER_PLUS = 59
+DMPAPER_A4_PLUS = 60
+DMPAPER_A5_TRANSVERSE = 61
+DMPAPER_B5_TRANSVERSE = 62
+DMPAPER_A3_EXTRA = 63
+DMPAPER_A5_EXTRA = 64
+DMPAPER_B5_EXTRA = 65
+DMPAPER_A2 = 66
+DMPAPER_A3_TRANSVERSE = 67
+DMPAPER_A3_EXTRA_TRANSVERSE = 68
+DMPAPER_DBL_JAPANESE_POSTCARD = 69
+DMPAPER_A6 = 70
+DMPAPER_JENV_KAKU2 = 71
+DMPAPER_JENV_KAKU3 = 72
+DMPAPER_JENV_CHOU3 = 73
+DMPAPER_JENV_CHOU4 = 74
+DMPAPER_LETTER_ROTATED = 75
+DMPAPER_A3_ROTATED = 76
+DMPAPER_A4_ROTATED = 77
+DMPAPER_A5_ROTATED = 78
+DMPAPER_B4_JIS_ROTATED = 79
+DMPAPER_B5_JIS_ROTATED = 80
+DMPAPER_JAPANESE_POSTCARD_ROTATED = 81
+DMPAPER_DBL_JAPANESE_POSTCARD_ROTATED = 82
+DMPAPER_A6_ROTATED = 83
+DMPAPER_JENV_KAKU2_ROTATED = 84
+DMPAPER_JENV_KAKU3_ROTATED = 85
+DMPAPER_JENV_CHOU3_ROTATED = 86
+DMPAPER_JENV_CHOU4_ROTATED = 87
+DMPAPER_B6_JIS = 88
+DMPAPER_B6_JIS_ROTATED = 89
+DMPAPER_12X11 = 90
+DMPAPER_JENV_YOU4 = 91
+DMPAPER_JENV_YOU4_ROTATED = 92
+DMPAPER_P16K = 93
+DMPAPER_P32K = 94
+DMPAPER_P32KBIG = 95
+DMPAPER_PENV_1 = 96
+DMPAPER_PENV_2 = 97
+DMPAPER_PENV_3 = 98
+DMPAPER_PENV_4 = 99
+DMPAPER_PENV_5 = 100
+DMPAPER_PENV_6 = 101
+DMPAPER_PENV_7 = 102
+DMPAPER_PENV_8 = 103
+DMPAPER_PENV_9 = 104
+DMPAPER_PENV_10 = 105
+DMPAPER_P16K_ROTATED = 106
+DMPAPER_P32K_ROTATED = 107
+DMPAPER_P32KBIG_ROTATED = 108
+DMPAPER_PENV_1_ROTATED = 109
+DMPAPER_PENV_2_ROTATED = 110
+DMPAPER_PENV_3_ROTATED = 111
+DMPAPER_PENV_4_ROTATED = 112
+DMPAPER_PENV_5_ROTATED = 113
+DMPAPER_PENV_6_ROTATED = 114
+DMPAPER_PENV_7_ROTATED = 115
+DMPAPER_PENV_8_ROTATED = 116
+DMPAPER_PENV_9_ROTATED = 117
+DMPAPER_PENV_10_ROTATED = 118
+DMPAPER_LAST = DMPAPER_PENV_10_ROTATED
+DMPAPER_USER = 256
+
+# DEVMODE.dmDefaultSource
+DMBIN_UPPER = 1
+DMBIN_ONLYONE = 1
+DMBIN_LOWER = 2
+DMBIN_MIDDLE = 3
+DMBIN_MANUAL = 4
+DMBIN_ENVELOPE = 5
+DMBIN_ENVMANUAL = 6
+DMBIN_AUTO = 7
+DMBIN_TRACTOR = 8
+DMBIN_SMALLFMT = 9
+DMBIN_LARGEFMT = 10
+DMBIN_LARGECAPACITY = 11
+DMBIN_CASSETTE = 14
+DMBIN_LAST = DMBIN_CASSETTE
+DMBIN_USER = 256
+
+# DEVMODE.dmPrintQuality
+DMRES_DRAFT = (-1)
+DMRES_LOW = (-2)
+DMRES_MEDIUM = (-3)
+DMRES_HIGH = (-4)
+
+# DEVMODE.dmColor
+DMCOLOR_MONOCHROME = 1
+DMCOLOR_COLOR = 2
+
+# DEVMODE.dmDuplex
+DMDUP_SIMPLEX = 1
+DMDUP_VERTICAL = 2
+DMDUP_HORIZONTAL = 3
+
+# DEVMODE.dmTTOption
+DMTT_BITMAP = 1
+DMTT_DOWNLOAD = 2
+DMTT_SUBDEV = 3
+DMTT_DOWNLOAD_OUTLINE = 4
+
+# DEVMODE.dmCollate
+DMCOLLATE_FALSE = 0
+DMCOLLATE_TRUE = 1
+
+# DEVMODE.dmDisplayFlags
+DM_GRAYSCALE = 1
+DM_INTERLACED = 2
+
+# DEVMODE.dmICMMethod
+DMICMMETHOD_NONE = 1
+DMICMMETHOD_SYSTEM = 2
+DMICMMETHOD_DRIVER = 3
+DMICMMETHOD_DEVICE = 4
+DMICMMETHOD_USER = 256
+
+# DEVMODE.dmICMIntent
+DMICM_SATURATE = 1
+DMICM_CONTRAST = 2
+DMICM_COLORIMETRIC = 3
+DMICM_ABS_COLORIMETRIC = 4
+DMICM_USER = 256
+
+# DEVMODE.dmMediaType
+DMMEDIA_STANDARD = 1
+DMMEDIA_TRANSPARENCY = 2
+DMMEDIA_GLOSSY = 3
+DMMEDIA_USER = 256
+
+RDH_RECTANGLES = 1
+GGO_METRICS = 0
+GGO_BITMAP = 1
+GGO_NATIVE = 2
+TT_POLYGON_TYPE = 24
+TT_PRIM_LINE = 1
+TT_PRIM_QSPLINE = 2
+TT_AVAILABLE = 1
+TT_ENABLED = 2
+DM_UPDATE = 1
+DM_COPY = 2
+DM_PROMPT = 4
+DM_MODIFY = 8
+DM_IN_BUFFER = DM_MODIFY
+DM_IN_PROMPT = DM_PROMPT
+DM_OUT_BUFFER = DM_COPY
+DM_OUT_DEFAULT = DM_UPDATE
+
+# DeviceCapabilities types
+DC_FIELDS = 1
+DC_PAPERS = 2
+DC_PAPERSIZE = 3
+DC_MINEXTENT = 4
+DC_MAXEXTENT = 5
+DC_BINS = 6
+DC_DUPLEX = 7
+DC_SIZE = 8
+DC_EXTRA = 9
+DC_VERSION = 10
+DC_DRIVER = 11
+DC_BINNAMES = 12
+DC_ENUMRESOLUTIONS = 13
+DC_FILEDEPENDENCIES = 14
+DC_TRUETYPE = 15
+DC_PAPERNAMES = 16
+DC_ORIENTATION = 17
+DC_COPIES = 18
+DC_BINADJUST = 19
+DC_EMF_COMPLIANT = 20
+DC_DATATYPE_PRODUCED = 21
+DC_COLLATE = 22
+DC_MANUFACTURER = 23
+DC_MODEL = 24
+DC_PERSONALITY = 25
+DC_PRINTRATE = 26
+DC_PRINTRATEUNIT = 27
+DC_PRINTERMEM = 28
+DC_MEDIAREADY = 29
+DC_STAPLE = 30
+DC_PRINTRATEPPM = 31
+DC_COLORDEVICE = 32
+DC_NUP = 33
+DC_MEDIATYPENAMES = 34
+DC_MEDIATYPES = 35
+
+PRINTRATEUNIT_PPM = 1
+PRINTRATEUNIT_CPS = 2
+PRINTRATEUNIT_LPM = 3
+PRINTRATEUNIT_IPM = 4
+
+# TrueType constants
+DCTT_BITMAP = 1
+DCTT_DOWNLOAD = 2
+DCTT_SUBDEV = 4
+DCTT_DOWNLOAD_OUTLINE = 8
+
+CA_NEGATIVE = 1
+CA_LOG_FILTER = 2
+ILLUMINANT_DEVICE_DEFAULT = 0
+ILLUMINANT_A = 1
+ILLUMINANT_B = 2
+ILLUMINANT_C = 3
+ILLUMINANT_D50 = 4
+ILLUMINANT_D55 = 5
+ILLUMINANT_D65 = 6
+ILLUMINANT_D75 = 7
+ILLUMINANT_F2 = 8
+ILLUMINANT_MAX_INDEX = ILLUMINANT_F2
+ILLUMINANT_TUNGSTEN = ILLUMINANT_A
+ILLUMINANT_DAYLIGHT = ILLUMINANT_C
+ILLUMINANT_FLUORESCENT = ILLUMINANT_F2
+ILLUMINANT_NTSC = ILLUMINANT_C
+
+# Generated by h2py from \msvcnt\include\wingdi.h
+# hacked and split manually by mhammond.
+FONTMAPPER_MAX = 10
+ENHMETA_SIGNATURE = 1179469088
+ENHMETA_STOCK_OBJECT = -2147483648
+EMR_HEADER = 1
+EMR_POLYBEZIER = 2
+EMR_POLYGON = 3
+EMR_POLYLINE = 4
+EMR_POLYBEZIERTO = 5
+EMR_POLYLINETO = 6
+EMR_POLYPOLYLINE = 7
+EMR_POLYPOLYGON = 8
+EMR_SETWINDOWEXTEX = 9
+EMR_SETWINDOWORGEX = 10
+EMR_SETVIEWPORTEXTEX = 11
+EMR_SETVIEWPORTORGEX = 12
+EMR_SETBRUSHORGEX = 13
+EMR_EOF = 14
+EMR_SETPIXELV = 15
+EMR_SETMAPPERFLAGS = 16
+EMR_SETMAPMODE = 17
+EMR_SETBKMODE = 18
+EMR_SETPOLYFILLMODE = 19
+EMR_SETROP2 = 20
+EMR_SETSTRETCHBLTMODE = 21
+EMR_SETTEXTALIGN = 22
+EMR_SETCOLORADJUSTMENT = 23
+EMR_SETTEXTCOLOR = 24
+EMR_SETBKCOLOR = 25
+EMR_OFFSETCLIPRGN = 26
+EMR_MOVETOEX = 27
+EMR_SETMETARGN = 28
+EMR_EXCLUDECLIPRECT = 29
+EMR_INTERSECTCLIPRECT = 30
+EMR_SCALEVIEWPORTEXTEX = 31
+EMR_SCALEWINDOWEXTEX = 32
+EMR_SAVEDC = 33
+EMR_RESTOREDC = 34
+EMR_SETWORLDTRANSFORM = 35
+EMR_MODIFYWORLDTRANSFORM = 36
+EMR_SELECTOBJECT = 37
+EMR_CREATEPEN = 38
+EMR_CREATEBRUSHINDIRECT = 39
+EMR_DELETEOBJECT = 40
+EMR_ANGLEARC = 41
+EMR_ELLIPSE = 42
+EMR_RECTANGLE = 43
+EMR_ROUNDRECT = 44
+EMR_ARC = 45
+EMR_CHORD = 46
+EMR_PIE = 47
+EMR_SELECTPALETTE = 48
+EMR_CREATEPALETTE = 49
+EMR_SETPALETTEENTRIES = 50
+EMR_RESIZEPALETTE = 51
+EMR_REALIZEPALETTE = 52
+EMR_EXTFLOODFILL = 53
+EMR_LINETO = 54
+EMR_ARCTO = 55
+EMR_POLYDRAW = 56
+EMR_SETARCDIRECTION = 57
+EMR_SETMITERLIMIT = 58
+EMR_BEGINPATH = 59
+EMR_ENDPATH = 60
+EMR_CLOSEFIGURE = 61
+EMR_FILLPATH = 62
+EMR_STROKEANDFILLPATH = 63
+EMR_STROKEPATH = 64
+EMR_FLATTENPATH = 65
+EMR_WIDENPATH = 66
+EMR_SELECTCLIPPATH = 67
+EMR_ABORTPATH = 68
+EMR_GDICOMMENT = 70
+EMR_FILLRGN = 71
+EMR_FRAMERGN = 72
+EMR_INVERTRGN = 73
+EMR_PAINTRGN = 74
+EMR_EXTSELECTCLIPRGN = 75
+EMR_BITBLT = 76
+EMR_STRETCHBLT = 77
+EMR_MASKBLT = 78
+EMR_PLGBLT = 79
+EMR_SETDIBITSTODEVICE = 80
+EMR_STRETCHDIBITS = 81
+EMR_EXTCREATEFONTINDIRECTW = 82
+EMR_EXTTEXTOUTA = 83
+EMR_EXTTEXTOUTW = 84
+EMR_POLYBEZIER16 = 85
+EMR_POLYGON16 = 86
+EMR_POLYLINE16 = 87
+EMR_POLYBEZIERTO16 = 88
+EMR_POLYLINETO16 = 89
+EMR_POLYPOLYLINE16 = 90
+EMR_POLYPOLYGON16 = 91
+EMR_POLYDRAW16 = 92
+EMR_CREATEMONOBRUSH = 93
+EMR_CREATEDIBPATTERNBRUSHPT = 94
+EMR_EXTCREATEPEN = 95
+EMR_POLYTEXTOUTA = 96
+EMR_POLYTEXTOUTW = 97
+EMR_MIN = 1
+EMR_MAX = 97
+# Generated by h2py from \msvcnt\include\wingdi.h
+# hacked and split manually by mhammond.
+PANOSE_COUNT = 10
+PAN_FAMILYTYPE_INDEX = 0
+PAN_SERIFSTYLE_INDEX = 1
+PAN_WEIGHT_INDEX = 2
+PAN_PROPORTION_INDEX = 3
+PAN_CONTRAST_INDEX = 4
+PAN_STROKEVARIATION_INDEX = 5
+PAN_ARMSTYLE_INDEX = 6
+PAN_LETTERFORM_INDEX = 7
+PAN_MIDLINE_INDEX = 8
+PAN_XHEIGHT_INDEX = 9
+PAN_CULTURE_LATIN = 0
+PAN_ANY = 0
+PAN_NO_FIT = 1
+PAN_FAMILY_TEXT_DISPLAY = 2
+PAN_FAMILY_SCRIPT = 3
+PAN_FAMILY_DECORATIVE = 4
+PAN_FAMILY_PICTORIAL = 5
+PAN_SERIF_COVE = 2
+PAN_SERIF_OBTUSE_COVE = 3
+PAN_SERIF_SQUARE_COVE = 4
+PAN_SERIF_OBTUSE_SQUARE_COVE = 5
+PAN_SERIF_SQUARE = 6
+PAN_SERIF_THIN = 7
+PAN_SERIF_BONE = 8
+PAN_SERIF_EXAGGERATED = 9
+PAN_SERIF_TRIANGLE = 10
+PAN_SERIF_NORMAL_SANS = 11
+PAN_SERIF_OBTUSE_SANS = 12
+PAN_SERIF_PERP_SANS = 13
+PAN_SERIF_FLARED = 14
+PAN_SERIF_ROUNDED = 15
+PAN_WEIGHT_VERY_LIGHT = 2
+PAN_WEIGHT_LIGHT = 3
+PAN_WEIGHT_THIN = 4
+PAN_WEIGHT_BOOK = 5
+PAN_WEIGHT_MEDIUM = 6
+PAN_WEIGHT_DEMI = 7
+PAN_WEIGHT_BOLD = 8
+PAN_WEIGHT_HEAVY = 9
+PAN_WEIGHT_BLACK = 10
+PAN_WEIGHT_NORD = 11
+PAN_PROP_OLD_STYLE = 2
+PAN_PROP_MODERN = 3
+PAN_PROP_EVEN_WIDTH = 4
+PAN_PROP_EXPANDED = 5
+PAN_PROP_CONDENSED = 6
+PAN_PROP_VERY_EXPANDED = 7
+PAN_PROP_VERY_CONDENSED = 8
+PAN_PROP_MONOSPACED = 9
+PAN_CONTRAST_NONE = 2
+PAN_CONTRAST_VERY_LOW = 3
+PAN_CONTRAST_LOW = 4
+PAN_CONTRAST_MEDIUM_LOW = 5
+PAN_CONTRAST_MEDIUM = 6
+PAN_CONTRAST_MEDIUM_HIGH = 7
+PAN_CONTRAST_HIGH = 8
+PAN_CONTRAST_VERY_HIGH = 9
+PAN_STROKE_GRADUAL_DIAG = 2
+PAN_STROKE_GRADUAL_TRAN = 3
+PAN_STROKE_GRADUAL_VERT = 4
+PAN_STROKE_GRADUAL_HORZ = 5
+PAN_STROKE_RAPID_VERT = 6
+PAN_STROKE_RAPID_HORZ = 7
+PAN_STROKE_INSTANT_VERT = 8
+PAN_STRAIGHT_ARMS_HORZ = 2
+PAN_STRAIGHT_ARMS_WEDGE = 3
+PAN_STRAIGHT_ARMS_VERT = 4
+PAN_STRAIGHT_ARMS_SINGLE_SERIF = 5
+PAN_STRAIGHT_ARMS_DOUBLE_SERIF = 6
+PAN_BENT_ARMS_HORZ = 7
+PAN_BENT_ARMS_WEDGE = 8
+PAN_BENT_ARMS_VERT = 9
+PAN_BENT_ARMS_SINGLE_SERIF = 10
+PAN_BENT_ARMS_DOUBLE_SERIF = 11
+PAN_LETT_NORMAL_CONTACT = 2
+PAN_LETT_NORMAL_WEIGHTED = 3
+PAN_LETT_NORMAL_BOXED = 4
+PAN_LETT_NORMAL_FLATTENED = 5
+PAN_LETT_NORMAL_ROUNDED = 6
+PAN_LETT_NORMAL_OFF_CENTER = 7
+PAN_LETT_NORMAL_SQUARE = 8
+PAN_LETT_OBLIQUE_CONTACT = 9
+PAN_LETT_OBLIQUE_WEIGHTED = 10
+PAN_LETT_OBLIQUE_BOXED = 11
+PAN_LETT_OBLIQUE_FLATTENED = 12
+PAN_LETT_OBLIQUE_ROUNDED = 13
+PAN_LETT_OBLIQUE_OFF_CENTER = 14
+PAN_LETT_OBLIQUE_SQUARE = 15
+PAN_MIDLINE_STANDARD_TRIMMED = 2
+PAN_MIDLINE_STANDARD_POINTED = 3
+PAN_MIDLINE_STANDARD_SERIFED = 4
+PAN_MIDLINE_HIGH_TRIMMED = 5
+PAN_MIDLINE_HIGH_POINTED = 6
+PAN_MIDLINE_HIGH_SERIFED = 7
+PAN_MIDLINE_CONSTANT_TRIMMED = 8
+PAN_MIDLINE_CONSTANT_POINTED = 9
+PAN_MIDLINE_CONSTANT_SERIFED = 10
+PAN_MIDLINE_LOW_TRIMMED = 11
+PAN_MIDLINE_LOW_POINTED = 12
+PAN_MIDLINE_LOW_SERIFED = 13
+PAN_XHEIGHT_CONSTANT_SMALL = 2
+PAN_XHEIGHT_CONSTANT_STD = 3
+PAN_XHEIGHT_CONSTANT_LARGE = 4
+PAN_XHEIGHT_DUCKING_SMALL = 5
+PAN_XHEIGHT_DUCKING_STD = 6
+PAN_XHEIGHT_DUCKING_LARGE = 7
+ELF_VENDOR_SIZE = 4
+ELF_VERSION = 0
+ELF_CULTURE_LATIN = 0
+RASTER_FONTTYPE = 1
+DEVICE_FONTTYPE = 2
+TRUETYPE_FONTTYPE = 4
+def PALETTEINDEX(i): return ((16777216 | (i)))
+
+PC_RESERVED = 1
+PC_EXPLICIT = 2
+PC_NOCOLLAPSE = 4
+def GetRValue(rgb): return rgb & 0xff
+
+def GetGValue(rgb): return (rgb >> 8) & 0xff
+
+def GetBValue(rgb): return (rgb >> 16) & 0xff
+
+TRANSPARENT = 1
+OPAQUE = 2
+BKMODE_LAST = 2
+GM_COMPATIBLE = 1
+GM_ADVANCED = 2
+GM_LAST = 2
+PT_CLOSEFIGURE = 1
+PT_LINETO = 2
+PT_BEZIERTO = 4
+PT_MOVETO = 6
+MM_TEXT = 1
+MM_LOMETRIC = 2
+MM_HIMETRIC = 3
+MM_LOENGLISH = 4
+MM_HIENGLISH = 5
+MM_TWIPS = 6
+MM_ISOTROPIC = 7
+MM_ANISOTROPIC = 8
+MM_MIN = MM_TEXT
+MM_MAX = MM_ANISOTROPIC
+MM_MAX_FIXEDSCALE = MM_TWIPS
+ABSOLUTE = 1
+RELATIVE = 2
+WHITE_BRUSH = 0
+LTGRAY_BRUSH = 1
+GRAY_BRUSH = 2
+DKGRAY_BRUSH = 3
+BLACK_BRUSH = 4
+NULL_BRUSH = 5
+HOLLOW_BRUSH = NULL_BRUSH
+WHITE_PEN = 6
+BLACK_PEN = 7
+NULL_PEN = 8
+OEM_FIXED_FONT = 10
+ANSI_FIXED_FONT = 11
+ANSI_VAR_FONT = 12
+SYSTEM_FONT = 13
+DEVICE_DEFAULT_FONT = 14
+DEFAULT_PALETTE = 15
+SYSTEM_FIXED_FONT = 16
+STOCK_LAST = 16
+CLR_INVALID = -1
+
+# Exception/Status codes from winuser.h and winnt.h
+STATUS_WAIT_0                    = 0    
+STATUS_ABANDONED_WAIT_0          = 128    
+STATUS_USER_APC                  = 192    
+STATUS_TIMEOUT                   = 258    
+STATUS_PENDING                   = 259    
+STATUS_SEGMENT_NOTIFICATION      = 1073741829    
+STATUS_GUARD_PAGE_VIOLATION      = -2147483647    
+STATUS_DATATYPE_MISALIGNMENT     = -2147483646    
+STATUS_BREAKPOINT                = -2147483645    
+STATUS_SINGLE_STEP               = -2147483644    
+STATUS_ACCESS_VIOLATION          = -1073741819    
+STATUS_IN_PAGE_ERROR             = -1073741818    
+STATUS_INVALID_HANDLE            = -1073741816    
+STATUS_NO_MEMORY                 = -1073741801    
+STATUS_ILLEGAL_INSTRUCTION       = -1073741795    
+STATUS_NONCONTINUABLE_EXCEPTION  = -1073741787    
+STATUS_INVALID_DISPOSITION       = -1073741786    
+STATUS_ARRAY_BOUNDS_EXCEEDED     = -1073741684    
+STATUS_FLOAT_DENORMAL_OPERAND    = -1073741683    
+STATUS_FLOAT_DIVIDE_BY_ZERO      = -1073741682    
+STATUS_FLOAT_INEXACT_RESULT      = -1073741681    
+STATUS_FLOAT_INVALID_OPERATION   = -1073741680    
+STATUS_FLOAT_OVERFLOW            = -1073741679    
+STATUS_FLOAT_STACK_CHECK         = -1073741678    
+STATUS_FLOAT_UNDERFLOW           = -1073741677    
+STATUS_INTEGER_DIVIDE_BY_ZERO    = -1073741676    
+STATUS_INTEGER_OVERFLOW          = -1073741675    
+STATUS_PRIVILEGED_INSTRUCTION    = -1073741674    
+STATUS_STACK_OVERFLOW            = -1073741571    
+STATUS_CONTROL_C_EXIT            = -1073741510    
+
+
+WAIT_FAILED                      = -1
+WAIT_OBJECT_0                    = STATUS_WAIT_0 + 0
+
+WAIT_ABANDONED                      = STATUS_ABANDONED_WAIT_0 + 0 
+WAIT_ABANDONED_0                    = STATUS_ABANDONED_WAIT_0 + 0
+
+WAIT_TIMEOUT                        = STATUS_TIMEOUT
+WAIT_IO_COMPLETION                  = STATUS_USER_APC
+STILL_ACTIVE                        = STATUS_PENDING
+EXCEPTION_ACCESS_VIOLATION          = STATUS_ACCESS_VIOLATION
+EXCEPTION_DATATYPE_MISALIGNMENT     = STATUS_DATATYPE_MISALIGNMENT
+EXCEPTION_BREAKPOINT                = STATUS_BREAKPOINT
+EXCEPTION_SINGLE_STEP               = STATUS_SINGLE_STEP
+EXCEPTION_ARRAY_BOUNDS_EXCEEDED     = STATUS_ARRAY_BOUNDS_EXCEEDED
+EXCEPTION_FLT_DENORMAL_OPERAND      = STATUS_FLOAT_DENORMAL_OPERAND
+EXCEPTION_FLT_DIVIDE_BY_ZERO        = STATUS_FLOAT_DIVIDE_BY_ZERO
+EXCEPTION_FLT_INEXACT_RESULT        = STATUS_FLOAT_INEXACT_RESULT
+EXCEPTION_FLT_INVALID_OPERATION     = STATUS_FLOAT_INVALID_OPERATION
+EXCEPTION_FLT_OVERFLOW              = STATUS_FLOAT_OVERFLOW
+EXCEPTION_FLT_STACK_CHECK           = STATUS_FLOAT_STACK_CHECK
+EXCEPTION_FLT_UNDERFLOW             = STATUS_FLOAT_UNDERFLOW
+EXCEPTION_INT_DIVIDE_BY_ZERO        = STATUS_INTEGER_DIVIDE_BY_ZERO
+EXCEPTION_INT_OVERFLOW              = STATUS_INTEGER_OVERFLOW
+EXCEPTION_PRIV_INSTRUCTION          = STATUS_PRIVILEGED_INSTRUCTION
+EXCEPTION_IN_PAGE_ERROR             = STATUS_IN_PAGE_ERROR
+EXCEPTION_ILLEGAL_INSTRUCTION       = STATUS_ILLEGAL_INSTRUCTION
+EXCEPTION_NONCONTINUABLE_EXCEPTION  = STATUS_NONCONTINUABLE_EXCEPTION
+EXCEPTION_STACK_OVERFLOW            = STATUS_STACK_OVERFLOW
+EXCEPTION_INVALID_DISPOSITION       = STATUS_INVALID_DISPOSITION
+EXCEPTION_GUARD_PAGE                = STATUS_GUARD_PAGE_VIOLATION
+EXCEPTION_INVALID_HANDLE            = STATUS_INVALID_HANDLE
+CONTROL_C_EXIT                      = STATUS_CONTROL_C_EXIT
+
+# winuser.h line 8594
+# constants used with SystemParametersInfo
+SPI_GETBEEP = 1
+SPI_SETBEEP = 2
+SPI_GETMOUSE = 3
+SPI_SETMOUSE = 4
+SPI_GETBORDER = 5
+SPI_SETBORDER = 6
+SPI_GETKEYBOARDSPEED = 10
+SPI_SETKEYBOARDSPEED = 11
+SPI_LANGDRIVER = 12
+SPI_ICONHORIZONTALSPACING = 13
+SPI_GETSCREENSAVETIMEOUT = 14
+SPI_SETSCREENSAVETIMEOUT = 15
+SPI_GETSCREENSAVEACTIVE = 16
+SPI_SETSCREENSAVEACTIVE = 17
+SPI_GETGRIDGRANULARITY = 18
+SPI_SETGRIDGRANULARITY = 19
+SPI_SETDESKWALLPAPER = 20
+SPI_SETDESKPATTERN = 21
+SPI_GETKEYBOARDDELAY = 22
+SPI_SETKEYBOARDDELAY = 23
+SPI_ICONVERTICALSPACING = 24
+SPI_GETICONTITLEWRAP = 25
+SPI_SETICONTITLEWRAP = 26
+SPI_GETMENUDROPALIGNMENT = 27
+SPI_SETMENUDROPALIGNMENT = 28
+SPI_SETDOUBLECLKWIDTH = 29
+SPI_SETDOUBLECLKHEIGHT = 30
+SPI_GETICONTITLELOGFONT = 31
+SPI_SETDOUBLECLICKTIME = 32
+SPI_SETMOUSEBUTTONSWAP = 33
+SPI_SETICONTITLELOGFONT = 34
+SPI_GETFASTTASKSWITCH = 35
+SPI_SETFASTTASKSWITCH = 36
+SPI_SETDRAGFULLWINDOWS = 37
+SPI_GETDRAGFULLWINDOWS = 38
+SPI_GETNONCLIENTMETRICS = 41
+SPI_SETNONCLIENTMETRICS = 42
+SPI_GETMINIMIZEDMETRICS = 43
+SPI_SETMINIMIZEDMETRICS = 44
+SPI_GETICONMETRICS = 45
+SPI_SETICONMETRICS = 46
+SPI_SETWORKAREA = 47
+SPI_GETWORKAREA = 48
+SPI_SETPENWINDOWS = 49
+SPI_GETFILTERKEYS = 50
+SPI_SETFILTERKEYS = 51
+SPI_GETTOGGLEKEYS = 52
+SPI_SETTOGGLEKEYS = 53
+SPI_GETMOUSEKEYS = 54
+SPI_SETMOUSEKEYS = 55
+SPI_GETSHOWSOUNDS = 56
+SPI_SETSHOWSOUNDS = 57
+SPI_GETSTICKYKEYS = 58
+SPI_SETSTICKYKEYS = 59
+SPI_GETACCESSTIMEOUT = 60
+SPI_SETACCESSTIMEOUT = 61
+SPI_GETSERIALKEYS = 62
+SPI_SETSERIALKEYS = 63
+SPI_GETSOUNDSENTRY = 64
+SPI_SETSOUNDSENTRY = 65
+SPI_GETHIGHCONTRAST = 66
+SPI_SETHIGHCONTRAST = 67
+SPI_GETKEYBOARDPREF = 68
+SPI_SETKEYBOARDPREF = 69
+SPI_GETSCREENREADER = 70
+SPI_SETSCREENREADER = 71
+SPI_GETANIMATION = 72
+SPI_SETANIMATION = 73
+SPI_GETFONTSMOOTHING = 74
+SPI_SETFONTSMOOTHING = 75
+SPI_SETDRAGWIDTH = 76
+SPI_SETDRAGHEIGHT = 77
+SPI_SETHANDHELD = 78
+SPI_GETLOWPOWERTIMEOUT = 79
+SPI_GETPOWEROFFTIMEOUT = 80
+SPI_SETLOWPOWERTIMEOUT = 81
+SPI_SETPOWEROFFTIMEOUT = 82
+SPI_GETLOWPOWERACTIVE = 83
+SPI_GETPOWEROFFACTIVE = 84
+SPI_SETLOWPOWERACTIVE = 85
+SPI_SETPOWEROFFACTIVE = 86
+SPI_SETCURSORS = 87
+SPI_SETICONS = 88
+SPI_GETDEFAULTINPUTLANG = 89
+SPI_SETDEFAULTINPUTLANG = 90
+SPI_SETLANGTOGGLE = 91
+SPI_GETWINDOWSEXTENSION = 92
+SPI_SETMOUSETRAILS = 93
+SPI_GETMOUSETRAILS = 94
+SPI_GETSNAPTODEFBUTTON = 95
+SPI_SETSNAPTODEFBUTTON = 96
+SPI_SETSCREENSAVERRUNNING = 97
+SPI_SCREENSAVERRUNNING = SPI_SETSCREENSAVERRUNNING
+SPI_GETMOUSEHOVERWIDTH = 98
+SPI_SETMOUSEHOVERWIDTH = 99
+SPI_GETMOUSEHOVERHEIGHT = 100
+SPI_SETMOUSEHOVERHEIGHT = 101
+SPI_GETMOUSEHOVERTIME = 102
+SPI_SETMOUSEHOVERTIME = 103
+SPI_GETWHEELSCROLLLINES = 104
+SPI_SETWHEELSCROLLLINES = 105
+SPI_GETMENUSHOWDELAY = 106
+SPI_SETMENUSHOWDELAY = 107
+
+SPI_GETSHOWIMEUI = 110
+SPI_SETSHOWIMEUI = 111
+SPI_GETMOUSESPEED = 112
+SPI_SETMOUSESPEED = 113
+SPI_GETSCREENSAVERRUNNING = 114
+SPI_GETDESKWALLPAPER = 115
+
+SPI_GETACTIVEWINDOWTRACKING = 4096
+SPI_SETACTIVEWINDOWTRACKING = 4097
+SPI_GETMENUANIMATION = 4098
+SPI_SETMENUANIMATION = 4099
+SPI_GETCOMBOBOXANIMATION = 4100
+SPI_SETCOMBOBOXANIMATION = 4101
+SPI_GETLISTBOXSMOOTHSCROLLING = 4102
+SPI_SETLISTBOXSMOOTHSCROLLING = 4103
+SPI_GETGRADIENTCAPTIONS = 4104
+SPI_SETGRADIENTCAPTIONS = 4105
+SPI_GETKEYBOARDCUES = 4106
+SPI_SETKEYBOARDCUES = 4107
+SPI_GETMENUUNDERLINES = 4106
+SPI_SETMENUUNDERLINES = 4107
+SPI_GETACTIVEWNDTRKZORDER = 4108
+SPI_SETACTIVEWNDTRKZORDER = 4109
+SPI_GETHOTTRACKING = 4110
+SPI_SETHOTTRACKING = 4111
+
+SPI_GETMENUFADE = 4114
+SPI_SETMENUFADE = 4115
+SPI_GETSELECTIONFADE = 4116
+SPI_SETSELECTIONFADE = 4117
+SPI_GETTOOLTIPANIMATION = 4118
+SPI_SETTOOLTIPANIMATION = 4119
+SPI_GETTOOLTIPFADE = 4120
+SPI_SETTOOLTIPFADE = 4121
+SPI_GETCURSORSHADOW = 4122
+SPI_SETCURSORSHADOW = 4123
+SPI_GETMOUSESONAR = 4124
+SPI_SETMOUSESONAR = 4125
+SPI_GETMOUSECLICKLOCK = 4126
+SPI_SETMOUSECLICKLOCK = 4127
+SPI_GETMOUSEVANISH = 4128
+SPI_SETMOUSEVANISH = 4129
+SPI_GETFLATMENU = 4130
+SPI_SETFLATMENU = 4131
+SPI_GETDROPSHADOW = 4132
+SPI_SETDROPSHADOW = 4133
+SPI_GETBLOCKSENDINPUTRESETS = 4134
+SPI_SETBLOCKSENDINPUTRESETS = 4135
+SPI_GETUIEFFECTS = 4158
+SPI_SETUIEFFECTS = 4159
+
+SPI_GETFOREGROUNDLOCKTIMEOUT = 8192
+SPI_SETFOREGROUNDLOCKTIMEOUT = 8193
+SPI_GETACTIVEWNDTRKTIMEOUT = 8194
+SPI_SETACTIVEWNDTRKTIMEOUT = 8195
+SPI_GETFOREGROUNDFLASHCOUNT = 8196
+SPI_SETFOREGROUNDFLASHCOUNT = 8197
+SPI_GETCARETWIDTH = 8198
+SPI_SETCARETWIDTH = 8199
+SPI_GETMOUSECLICKLOCKTIME = 8200
+SPI_SETMOUSECLICKLOCKTIME = 8201
+SPI_GETFONTSMOOTHINGTYPE = 8202
+SPI_SETFONTSMOOTHINGTYPE = 8203
+SPI_GETFONTSMOOTHINGCONTRAST = 8204
+SPI_SETFONTSMOOTHINGCONTRAST = 8205
+SPI_GETFOCUSBORDERWIDTH = 8206
+SPI_SETFOCUSBORDERWIDTH = 8207
+SPI_GETFOCUSBORDERHEIGHT = 8208
+SPI_SETFOCUSBORDERHEIGHT = 8209
+SPI_GETFONTSMOOTHINGORIENTATION = 8210
+SPI_SETFONTSMOOTHINGORIENTATION = 8211
+
+# fWinIni flags for SystemParametersInfo
+SPIF_UPDATEINIFILE = 1
+SPIF_SENDWININICHANGE = 2
+SPIF_SENDCHANGE = SPIF_SENDWININICHANGE
+
+# used with SystemParametersInfo and SPI_GETFONTSMOOTHINGTYPE/SPI_SETFONTSMOOTHINGTYPE
+FE_FONTSMOOTHINGSTANDARD = 1
+FE_FONTSMOOTHINGCLEARTYPE = 2
+FE_FONTSMOOTHINGDOCKING = 32768
+
+METRICS_USEDEFAULT = -1
+ARW_BOTTOMLEFT = 0
+ARW_BOTTOMRIGHT = 1
+ARW_TOPLEFT = 2
+ARW_TOPRIGHT = 3
+ARW_STARTMASK = 3
+ARW_STARTRIGHT = 1
+ARW_STARTTOP = 2
+ARW_LEFT = 0
+ARW_RIGHT = 0
+ARW_UP = 4
+ARW_DOWN = 4
+ARW_HIDE = 8
+#ARW_VALID = 0x000F
+SERKF_SERIALKEYSON = 1
+SERKF_AVAILABLE = 2
+SERKF_INDICATOR = 4
+HCF_HIGHCONTRASTON = 1
+HCF_AVAILABLE = 2
+HCF_HOTKEYACTIVE = 4
+HCF_CONFIRMHOTKEY = 8
+HCF_HOTKEYSOUND = 16
+HCF_INDICATOR = 32
+HCF_HOTKEYAVAILABLE = 64
+CDS_UPDATEREGISTRY = 1
+CDS_TEST = 2
+CDS_FULLSCREEN = 4
+CDS_GLOBAL = 8
+CDS_SET_PRIMARY = 16
+CDS_RESET = 1073741824
+CDS_SETRECT = 536870912
+CDS_NORESET = 268435456
+DISP_CHANGE_SUCCESSFUL = 0
+DISP_CHANGE_RESTART = 1
+DISP_CHANGE_FAILED = -1
+DISP_CHANGE_BADMODE = -2
+DISP_CHANGE_NOTUPDATED = -3
+DISP_CHANGE_BADFLAGS = -4
+DISP_CHANGE_BADPARAM = -5
+ENUM_CURRENT_SETTINGS = -1
+ENUM_REGISTRY_SETTINGS = -2
+FKF_FILTERKEYSON = 1
+FKF_AVAILABLE = 2
+FKF_HOTKEYACTIVE = 4
+FKF_CONFIRMHOTKEY = 8
+FKF_HOTKEYSOUND = 16
+FKF_INDICATOR = 32
+FKF_CLICKON = 64
+SKF_STICKYKEYSON = 1
+SKF_AVAILABLE = 2
+SKF_HOTKEYACTIVE = 4
+SKF_CONFIRMHOTKEY = 8
+SKF_HOTKEYSOUND = 16
+SKF_INDICATOR = 32
+SKF_AUDIBLEFEEDBACK = 64
+SKF_TRISTATE = 128
+SKF_TWOKEYSOFF = 256
+SKF_LALTLATCHED = 268435456
+SKF_LCTLLATCHED = 67108864
+SKF_LSHIFTLATCHED = 16777216
+SKF_RALTLATCHED = 536870912
+SKF_RCTLLATCHED = 134217728
+SKF_RSHIFTLATCHED = 33554432
+SKF_LWINLATCHED = 1073741824
+SKF_RWINLATCHED = -2147483648
+SKF_LALTLOCKED = 1048576
+SKF_LCTLLOCKED = 262144
+SKF_LSHIFTLOCKED = 65536
+SKF_RALTLOCKED = 2097152
+SKF_RCTLLOCKED = 524288
+SKF_RSHIFTLOCKED = 131072
+SKF_LWINLOCKED = 4194304
+SKF_RWINLOCKED = 8388608
+MKF_MOUSEKEYSON = 1
+MKF_AVAILABLE = 2
+MKF_HOTKEYACTIVE = 4
+MKF_CONFIRMHOTKEY = 8
+MKF_HOTKEYSOUND = 16
+MKF_INDICATOR = 32
+MKF_MODIFIERS = 64
+MKF_REPLACENUMBERS = 128
+MKF_LEFTBUTTONSEL = 268435456
+MKF_RIGHTBUTTONSEL = 536870912
+MKF_LEFTBUTTONDOWN = 16777216
+MKF_RIGHTBUTTONDOWN = 33554432
+MKF_MOUSEMODE = -2147483648
+ATF_TIMEOUTON = 1
+ATF_ONOFFFEEDBACK = 2
+SSGF_NONE = 0
+SSGF_DISPLAY = 3
+SSTF_NONE = 0
+SSTF_CHARS = 1
+SSTF_BORDER = 2
+SSTF_DISPLAY = 3
+SSWF_NONE = 0
+SSWF_TITLE = 1
+SSWF_WINDOW = 2
+SSWF_DISPLAY = 3
+SSWF_CUSTOM = 4
+SSF_SOUNDSENTRYON = 1
+SSF_AVAILABLE = 2
+SSF_INDICATOR = 4
+TKF_TOGGLEKEYSON = 1
+TKF_AVAILABLE = 2
+TKF_HOTKEYACTIVE = 4
+TKF_CONFIRMHOTKEY = 8
+TKF_HOTKEYSOUND = 16
+TKF_INDICATOR = 32
+SLE_ERROR = 1
+SLE_MINORERROR = 2
+SLE_WARNING = 3
+MONITOR_DEFAULTTONULL = 0
+MONITOR_DEFAULTTOPRIMARY = 1
+MONITOR_DEFAULTTONEAREST = 2
+MONITORINFOF_PRIMARY = 1
+CCHDEVICENAME = 32
+CHILDID_SELF = 0
+INDEXID_OBJECT = 0
+INDEXID_CONTAINER = 0
+OBJID_WINDOW = 0
+OBJID_SYSMENU = -1
+OBJID_TITLEBAR = -2
+OBJID_MENU = -3
+OBJID_CLIENT = -4
+OBJID_VSCROLL = -5
+OBJID_HSCROLL = -6
+OBJID_SIZEGRIP = -7
+OBJID_CARET = -8
+OBJID_CURSOR = -9
+OBJID_ALERT = -10
+OBJID_SOUND = -11
+EVENT_MIN = 1
+EVENT_MAX = 2147483647
+EVENT_SYSTEM_SOUND = 1
+EVENT_SYSTEM_ALERT = 2
+EVENT_SYSTEM_FOREGROUND = 3
+EVENT_SYSTEM_MENUSTART = 4
+EVENT_SYSTEM_MENUEND = 5
+EVENT_SYSTEM_MENUPOPUPSTART = 6
+EVENT_SYSTEM_MENUPOPUPEND = 7
+EVENT_SYSTEM_CAPTURESTART = 8
+EVENT_SYSTEM_CAPTUREEND = 9
+EVENT_SYSTEM_MOVESIZESTART = 10
+EVENT_SYSTEM_MOVESIZEEND = 11
+EVENT_SYSTEM_CONTEXTHELPSTART = 12
+EVENT_SYSTEM_CONTEXTHELPEND = 13
+EVENT_SYSTEM_DRAGDROPSTART = 14
+EVENT_SYSTEM_DRAGDROPEND = 15
+EVENT_SYSTEM_DIALOGSTART = 16
+EVENT_SYSTEM_DIALOGEND = 17
+EVENT_SYSTEM_SCROLLINGSTART = 18
+EVENT_SYSTEM_SCROLLINGEND = 19
+EVENT_SYSTEM_SWITCHSTART = 20
+EVENT_SYSTEM_SWITCHEND = 21
+EVENT_SYSTEM_MINIMIZESTART = 22
+EVENT_SYSTEM_MINIMIZEEND = 23
+EVENT_OBJECT_CREATE = 32768
+EVENT_OBJECT_DESTROY = 32769
+EVENT_OBJECT_SHOW = 32770
+EVENT_OBJECT_HIDE = 32771
+EVENT_OBJECT_REORDER = 32772
+EVENT_OBJECT_FOCUS = 32773
+EVENT_OBJECT_SELECTION = 32774
+EVENT_OBJECT_SELECTIONADD = 32775
+EVENT_OBJECT_SELECTIONREMOVE = 32776
+EVENT_OBJECT_SELECTIONWITHIN = 32777
+EVENT_OBJECT_STATECHANGE = 32778
+EVENT_OBJECT_LOCATIONCHANGE = 32779
+EVENT_OBJECT_NAMECHANGE = 32780
+EVENT_OBJECT_DESCRIPTIONCHANGE = 32781
+EVENT_OBJECT_VALUECHANGE = 32782
+EVENT_OBJECT_PARENTCHANGE = 32783
+EVENT_OBJECT_HELPCHANGE = 32784
+EVENT_OBJECT_DEFACTIONCHANGE = 32785
+EVENT_OBJECT_ACCELERATORCHANGE = 32786
+SOUND_SYSTEM_STARTUP = 1
+SOUND_SYSTEM_SHUTDOWN = 2
+SOUND_SYSTEM_BEEP = 3
+SOUND_SYSTEM_ERROR = 4
+SOUND_SYSTEM_QUESTION = 5
+SOUND_SYSTEM_WARNING = 6
+SOUND_SYSTEM_INFORMATION = 7
+SOUND_SYSTEM_MAXIMIZE = 8
+SOUND_SYSTEM_MINIMIZE = 9
+SOUND_SYSTEM_RESTOREUP = 10
+SOUND_SYSTEM_RESTOREDOWN = 11
+SOUND_SYSTEM_APPSTART = 12
+SOUND_SYSTEM_FAULT = 13
+SOUND_SYSTEM_APPEND = 14
+SOUND_SYSTEM_MENUCOMMAND = 15
+SOUND_SYSTEM_MENUPOPUP = 16
+CSOUND_SYSTEM = 16
+ALERT_SYSTEM_INFORMATIONAL = 1
+ALERT_SYSTEM_WARNING = 2
+ALERT_SYSTEM_ERROR = 3
+ALERT_SYSTEM_QUERY = 4
+ALERT_SYSTEM_CRITICAL = 5
+CALERT_SYSTEM = 6
+WINEVENT_OUTOFCONTEXT = 0
+WINEVENT_SKIPOWNTHREAD = 1
+WINEVENT_SKIPOWNPROCESS = 2
+WINEVENT_INCONTEXT = 4
+GUI_CARETBLINKING = 1
+GUI_INMOVESIZE = 2
+GUI_INMENUMODE = 4
+GUI_SYSTEMMENUMODE = 8
+GUI_POPUPMENUMODE = 16
+STATE_SYSTEM_UNAVAILABLE = 1
+STATE_SYSTEM_SELECTED = 2
+STATE_SYSTEM_FOCUSED = 4
+STATE_SYSTEM_PRESSED = 8
+STATE_SYSTEM_CHECKED = 16
+STATE_SYSTEM_MIXED = 32
+STATE_SYSTEM_READONLY = 64
+STATE_SYSTEM_HOTTRACKED = 128
+STATE_SYSTEM_DEFAULT = 256
+STATE_SYSTEM_EXPANDED = 512
+STATE_SYSTEM_COLLAPSED = 1024
+STATE_SYSTEM_BUSY = 2048
+STATE_SYSTEM_FLOATING = 4096
+STATE_SYSTEM_MARQUEED = 8192
+STATE_SYSTEM_ANIMATED = 16384
+STATE_SYSTEM_INVISIBLE = 32768
+STATE_SYSTEM_OFFSCREEN = 65536
+STATE_SYSTEM_SIZEABLE = 131072
+STATE_SYSTEM_MOVEABLE = 262144
+STATE_SYSTEM_SELFVOICING = 524288
+STATE_SYSTEM_FOCUSABLE = 1048576
+STATE_SYSTEM_SELECTABLE = 2097152
+STATE_SYSTEM_LINKED = 4194304
+STATE_SYSTEM_TRAVERSED = 8388608
+STATE_SYSTEM_MULTISELECTABLE = 16777216
+STATE_SYSTEM_EXTSELECTABLE = 33554432
+STATE_SYSTEM_ALERT_LOW = 67108864
+STATE_SYSTEM_ALERT_MEDIUM = 134217728
+STATE_SYSTEM_ALERT_HIGH = 268435456
+STATE_SYSTEM_VALID = 536870911
+CCHILDREN_TITLEBAR = 5
+CCHILDREN_SCROLLBAR = 5
+CURSOR_SHOWING = 1
+WS_ACTIVECAPTION = 1
+GA_MIC = 1
+GA_PARENT = 1
+GA_ROOT = 2
+GA_ROOTOWNER = 3
+GA_MAC = 4
+
+# winuser.h line 1979
+BF_LEFT = 1
+BF_TOP = 2
+BF_RIGHT = 4
+BF_BOTTOM = 8
+BF_TOPLEFT = (BF_TOP | BF_LEFT)
+BF_TOPRIGHT = (BF_TOP | BF_RIGHT)
+BF_BOTTOMLEFT = (BF_BOTTOM | BF_LEFT)
+BF_BOTTOMRIGHT = (BF_BOTTOM | BF_RIGHT)
+BF_RECT = (BF_LEFT | BF_TOP | BF_RIGHT | BF_BOTTOM)
+BF_DIAGONAL = 16
+BF_DIAGONAL_ENDTOPRIGHT = (BF_DIAGONAL | BF_TOP | BF_RIGHT)
+BF_DIAGONAL_ENDTOPLEFT = (BF_DIAGONAL | BF_TOP | BF_LEFT)
+BF_DIAGONAL_ENDBOTTOMLEFT = (BF_DIAGONAL | BF_BOTTOM | BF_LEFT)
+BF_DIAGONAL_ENDBOTTOMRIGHT = (BF_DIAGONAL | BF_BOTTOM | BF_RIGHT)
+BF_MIDDLE = 2048
+BF_SOFT = 4096
+BF_ADJUST = 8192
+BF_FLAT = 16384
+BF_MONO = 32768
+DFC_CAPTION = 1
+DFC_MENU = 2
+DFC_SCROLL = 3
+DFC_BUTTON = 4
+DFC_POPUPMENU = 5
+DFCS_CAPTIONCLOSE = 0
+DFCS_CAPTIONMIN = 1
+DFCS_CAPTIONMAX = 2
+DFCS_CAPTIONRESTORE = 3
+DFCS_CAPTIONHELP = 4
+DFCS_MENUARROW = 0
+DFCS_MENUCHECK = 1
+DFCS_MENUBULLET = 2
+DFCS_MENUARROWRIGHT = 4
+DFCS_SCROLLUP = 0
+DFCS_SCROLLDOWN = 1
+DFCS_SCROLLLEFT = 2
+DFCS_SCROLLRIGHT = 3
+DFCS_SCROLLCOMBOBOX = 5
+DFCS_SCROLLSIZEGRIP = 8
+DFCS_SCROLLSIZEGRIPRIGHT = 16
+DFCS_BUTTONCHECK = 0
+DFCS_BUTTONRADIOIMAGE = 1
+DFCS_BUTTONRADIOMASK = 2
+DFCS_BUTTONRADIO = 4
+DFCS_BUTTON3STATE = 8
+DFCS_BUTTONPUSH = 16
+DFCS_INACTIVE = 256
+DFCS_PUSHED = 512
+DFCS_CHECKED = 1024
+DFCS_TRANSPARENT = 2048
+DFCS_HOT = 4096
+DFCS_ADJUSTRECT = 8192
+DFCS_FLAT = 16384
+DFCS_MONO = 32768
+DC_ACTIVE = 1
+DC_SMALLCAP = 2
+DC_ICON = 4
+DC_TEXT = 8
+DC_INBUTTON = 16
+DC_GRADIENT = 32
+IDANI_OPEN = 1
+IDANI_CLOSE = 2
+IDANI_CAPTION = 3
+CF_TEXT = 1
+CF_BITMAP = 2
+CF_METAFILEPICT = 3
+CF_SYLK = 4
+CF_DIF = 5
+CF_TIFF = 6
+CF_OEMTEXT = 7
+CF_DIB = 8
+CF_PALETTE = 9
+CF_PENDATA = 10
+CF_RIFF = 11
+CF_WAVE = 12
+CF_UNICODETEXT = 13
+CF_ENHMETAFILE = 14
+CF_HDROP = 15
+CF_LOCALE = 16
+CF_MAX = 17
+CF_OWNERDISPLAY = 128
+CF_DSPTEXT = 129
+CF_DSPBITMAP = 130
+CF_DSPMETAFILEPICT = 131
+CF_DSPENHMETAFILE = 142
+CF_PRIVATEFIRST = 512
+CF_PRIVATELAST = 767
+CF_GDIOBJFIRST = 768
+CF_GDIOBJLAST = 1023
+FVIRTKEY =1
+FNOINVERT = 2
+FSHIFT = 4
+FCONTROL = 8
+FALT = 16
+WPF_SETMINPOSITION = 1
+WPF_RESTORETOMAXIMIZED = 2
+ODT_MENU = 1
+ODT_LISTBOX = 2
+ODT_COMBOBOX = 3
+ODT_BUTTON = 4
+ODT_STATIC = 5
+ODA_DRAWENTIRE = 1
+ODA_SELECT = 2
+ODA_FOCUS = 4
+ODS_SELECTED = 1
+ODS_GRAYED = 2
+ODS_DISABLED = 4
+ODS_CHECKED = 8
+ODS_FOCUS = 16
+ODS_DEFAULT = 32
+ODS_COMBOBOXEDIT = 4096
+ODS_HOTLIGHT = 64
+ODS_INACTIVE = 128
+PM_NOREMOVE = 0
+PM_REMOVE = 1
+PM_NOYIELD = 2
+MOD_ALT = 1
+MOD_CONTROL = 2
+MOD_SHIFT = 4
+MOD_WIN = 8
+IDHOT_SNAPWINDOW = (-1)
+IDHOT_SNAPDESKTOP = (-2)
+#EW_RESTARTWINDOWS = 0x0042
+#EW_REBOOTSYSTEM = 0x0043
+#EW_EXITANDEXECAPP = 0x0044
+ENDSESSION_LOGOFF = -2147483648
+EWX_LOGOFF = 0
+EWX_SHUTDOWN = 1
+EWX_REBOOT = 2
+EWX_FORCE = 4
+EWX_POWEROFF = 8
+EWX_FORCEIFHUNG = 16
+BSM_ALLCOMPONENTS = 0
+BSM_VXDS = 1
+BSM_NETDRIVER = 2
+BSM_INSTALLABLEDRIVERS = 4
+BSM_APPLICATIONS = 8
+BSM_ALLDESKTOPS = 16
+BSF_QUERY = 1
+BSF_IGNORECURRENTTASK = 2
+BSF_FLUSHDISK = 4
+BSF_NOHANG = 8
+BSF_POSTMESSAGE = 16
+BSF_FORCEIFHUNG = 32
+BSF_NOTIMEOUTIFNOTHUNG = 64
+BROADCAST_QUERY_DENY = 1112363332  # Return this value to deny a query.
+
+DBWF_LPARAMPOINTER = 32768
+
+# winuser.h line 3232
+SWP_NOSIZE = 1
+SWP_NOMOVE = 2
+SWP_NOZORDER = 4
+SWP_NOREDRAW = 8
+SWP_NOACTIVATE = 16
+SWP_FRAMECHANGED = 32
+SWP_SHOWWINDOW = 64
+SWP_HIDEWINDOW = 128
+SWP_NOCOPYBITS = 256
+SWP_NOOWNERZORDER = 512
+SWP_NOSENDCHANGING = 1024
+SWP_DRAWFRAME = SWP_FRAMECHANGED
+SWP_NOREPOSITION = SWP_NOOWNERZORDER
+SWP_DEFERERASE = 8192
+SWP_ASYNCWINDOWPOS = 16384
+
+DLGWINDOWEXTRA = 30
+# winuser.h line 4249
+KEYEVENTF_EXTENDEDKEY = 1
+KEYEVENTF_KEYUP = 2
+MOUSEEVENTF_MOVE = 1
+MOUSEEVENTF_LEFTDOWN = 2
+MOUSEEVENTF_LEFTUP = 4
+MOUSEEVENTF_RIGHTDOWN = 8
+MOUSEEVENTF_RIGHTUP = 16
+MOUSEEVENTF_MIDDLEDOWN = 32
+MOUSEEVENTF_MIDDLEUP = 64
+MOUSEEVENTF_ABSOLUTE = 32768
+INPUT_MOUSE = 0
+INPUT_KEYBOARD = 1
+INPUT_HARDWARE = 2
+MWMO_WAITALL = 1
+MWMO_ALERTABLE = 2
+MWMO_INPUTAVAILABLE = 4
+QS_KEY = 1
+QS_MOUSEMOVE = 2
+QS_MOUSEBUTTON = 4
+QS_POSTMESSAGE = 8
+QS_TIMER = 16
+QS_PAINT = 32
+QS_SENDMESSAGE = 64
+QS_HOTKEY = 128
+QS_MOUSE = (QS_MOUSEMOVE     | \
+                            QS_MOUSEBUTTON)
+QS_INPUT = (QS_MOUSE         | \
+                            QS_KEY)
+QS_ALLEVENTS = (QS_INPUT         | \
+                            QS_POSTMESSAGE   | \
+                            QS_TIMER         | \
+                            QS_PAINT         | \
+                            QS_HOTKEY)
+QS_ALLINPUT = (QS_INPUT         | \
+                            QS_POSTMESSAGE   | \
+                            QS_TIMER         | \
+                            QS_PAINT         | \
+                            QS_HOTKEY        | \
+                            QS_SENDMESSAGE)
+
+
+IMN_CLOSESTATUSWINDOW = 1
+IMN_OPENSTATUSWINDOW = 2
+IMN_CHANGECANDIDATE = 3
+IMN_CLOSECANDIDATE = 4
+IMN_OPENCANDIDATE = 5
+IMN_SETCONVERSIONMODE = 6
+IMN_SETSENTENCEMODE = 7
+IMN_SETOPENSTATUS = 8
+IMN_SETCANDIDATEPOS = 9
+IMN_SETCOMPOSITIONFONT = 10
+IMN_SETCOMPOSITIONWINDOW = 11
+IMN_SETSTATUSWINDOWPOS = 12
+IMN_GUIDELINE = 13
+IMN_PRIVATE = 14
+
+# winuser.h line 8518
+HELP_CONTEXT = 1
+HELP_QUIT = 2
+HELP_INDEX = 3
+HELP_CONTENTS = 3
+HELP_HELPONHELP = 4
+HELP_SETINDEX = 5
+HELP_SETCONTENTS = 5
+HELP_CONTEXTPOPUP = 8
+HELP_FORCEFILE = 9
+HELP_KEY = 257
+HELP_COMMAND = 258
+HELP_PARTIALKEY = 261
+HELP_MULTIKEY = 513
+HELP_SETWINPOS = 515
+HELP_CONTEXTMENU = 10
+HELP_FINDER = 11
+HELP_WM_HELP = 12
+HELP_SETPOPUP_POS = 13
+HELP_TCARD = 32768
+HELP_TCARD_DATA = 16
+HELP_TCARD_OTHER_CALLER = 17
+IDH_NO_HELP = 28440
+IDH_MISSING_CONTEXT = 28441 # Control doesn't have matching help context
+IDH_GENERIC_HELP_BUTTON = 28442 # Property sheet help button
+IDH_OK = 28443
+IDH_CANCEL = 28444
+IDH_HELP = 28445
+GR_GDIOBJECTS = 0      # Count of GDI objects
+GR_USEROBJECTS = 1     # Count of USER objects
+# Generated by h2py from \msvcnt\include\wingdi.h
+# manually added (missed by generation some how!
+SRCCOPY		= 13369376	# dest = source
+SRCPAINT	= 15597702	# dest = source OR dest
+SRCAND		= 8913094	# dest = source AND dest
+SRCINVERT	= 6684742	# dest = source XOR dest
+SRCERASE	= 4457256	# dest = source AND (NOT dest )
+NOTSRCCOPY	= 3342344	# dest = (NOT source)
+NOTSRCERASE	= 1114278	# dest = (NOT src) AND (NOT dest)
+MERGECOPY	= 12583114	# dest = (source AND pattern)
+MERGEPAINT	= 12255782	# dest = (NOT source) OR dest
+PATCOPY		= 15728673	# dest = pattern
+PATPAINT	= 16452105	# dest = DPSnoo
+PATINVERT	= 5898313	# dest = pattern XOR dest
+DSTINVERT	= 5570569	# dest = (NOT dest)
+BLACKNESS	= 66	# dest = BLACK
+WHITENESS	= 16711778	# dest = WHITE
+
+# hacked and split manually by mhammond.
+R2_BLACK = 1
+R2_NOTMERGEPEN = 2
+R2_MASKNOTPEN = 3
+R2_NOTCOPYPEN = 4
+R2_MASKPENNOT = 5
+R2_NOT = 6
+R2_XORPEN = 7
+R2_NOTMASKPEN = 8
+R2_MASKPEN = 9
+R2_NOTXORPEN = 10
+R2_NOP = 11
+R2_MERGENOTPEN = 12
+R2_COPYPEN = 13
+R2_MERGEPENNOT = 14
+R2_MERGEPEN = 15
+R2_WHITE = 16
+R2_LAST = 16
+GDI_ERROR = (-1)
+ERROR = 0
+NULLREGION = 1
+SIMPLEREGION = 2
+COMPLEXREGION = 3
+RGN_ERROR = ERROR
+RGN_AND = 1
+RGN_OR = 2
+RGN_XOR = 3
+RGN_DIFF = 4
+RGN_COPY = 5
+RGN_MIN = RGN_AND
+RGN_MAX = RGN_COPY
+BLACKONWHITE = 1
+WHITEONBLACK = 2
+COLORONCOLOR = 3
+HALFTONE = 4
+MAXSTRETCHBLTMODE = 4
+ALTERNATE = 1
+WINDING = 2
+POLYFILL_LAST = 2
+TA_NOUPDATECP = 0
+TA_UPDATECP = 1
+TA_LEFT = 0
+TA_RIGHT = 2
+TA_CENTER = 6
+TA_TOP = 0
+TA_BOTTOM = 8
+TA_BASELINE = 24
+TA_MASK = (TA_BASELINE+TA_CENTER+TA_UPDATECP)
+VTA_BASELINE = TA_BASELINE
+VTA_LEFT = TA_BOTTOM
+VTA_RIGHT = TA_TOP
+VTA_CENTER = TA_CENTER
+VTA_BOTTOM = TA_RIGHT
+VTA_TOP = TA_LEFT
+ETO_GRAYED = 1
+ETO_OPAQUE = 2
+ETO_CLIPPED = 4
+ASPECT_FILTERING = 1
+DCB_RESET = 1
+DCB_ACCUMULATE = 2
+DCB_DIRTY = DCB_ACCUMULATE
+DCB_SET = (DCB_RESET | DCB_ACCUMULATE)
+DCB_ENABLE = 4
+DCB_DISABLE = 8
+META_SETBKCOLOR = 513
+META_SETBKMODE = 258
+META_SETMAPMODE = 259
+META_SETROP2 = 260
+META_SETRELABS = 261
+META_SETPOLYFILLMODE = 262
+META_SETSTRETCHBLTMODE = 263
+META_SETTEXTCHAREXTRA = 264
+META_SETTEXTCOLOR = 521
+META_SETTEXTJUSTIFICATION = 522
+META_SETWINDOWORG = 523
+META_SETWINDOWEXT = 524
+META_SETVIEWPORTORG = 525
+META_SETVIEWPORTEXT = 526
+META_OFFSETWINDOWORG = 527
+META_SCALEWINDOWEXT = 1040
+META_OFFSETVIEWPORTORG = 529
+META_SCALEVIEWPORTEXT = 1042
+META_LINETO = 531
+META_MOVETO = 532
+META_EXCLUDECLIPRECT = 1045
+META_INTERSECTCLIPRECT = 1046
+META_ARC = 2071
+META_ELLIPSE = 1048
+META_FLOODFILL = 1049
+META_PIE = 2074
+META_RECTANGLE = 1051
+META_ROUNDRECT = 1564
+META_PATBLT = 1565
+META_SAVEDC = 30
+META_SETPIXEL = 1055
+META_OFFSETCLIPRGN = 544
+META_TEXTOUT = 1313
+META_BITBLT = 2338
+META_STRETCHBLT = 2851
+META_POLYGON = 804
+META_POLYLINE = 805
+META_ESCAPE = 1574
+META_RESTOREDC = 295
+META_FILLREGION = 552
+META_FRAMEREGION = 1065
+META_INVERTREGION = 298
+META_PAINTREGION = 299
+META_SELECTCLIPREGION = 300
+META_SELECTOBJECT = 301
+META_SETTEXTALIGN = 302
+META_CHORD = 2096
+META_SETMAPPERFLAGS = 561
+META_EXTTEXTOUT = 2610
+META_SETDIBTODEV = 3379
+META_SELECTPALETTE = 564
+META_REALIZEPALETTE = 53
+META_ANIMATEPALETTE = 1078
+META_SETPALENTRIES = 55
+META_POLYPOLYGON = 1336
+META_RESIZEPALETTE = 313
+META_DIBBITBLT = 2368
+META_DIBSTRETCHBLT = 2881
+META_DIBCREATEPATTERNBRUSH = 322
+META_STRETCHDIB = 3907
+META_EXTFLOODFILL = 1352
+META_DELETEOBJECT = 496
+META_CREATEPALETTE = 247
+META_CREATEPATTERNBRUSH = 505
+META_CREATEPENINDIRECT = 762
+META_CREATEFONTINDIRECT = 763
+META_CREATEBRUSHINDIRECT = 764
+META_CREATEREGION = 1791
+FILE_BEGIN = 0
+FILE_CURRENT = 1
+FILE_END = 2
+FILE_FLAG_WRITE_THROUGH = -2147483648
+FILE_FLAG_OVERLAPPED = 1073741824
+FILE_FLAG_NO_BUFFERING = 536870912
+FILE_FLAG_RANDOM_ACCESS = 268435456
+FILE_FLAG_SEQUENTIAL_SCAN = 134217728
+FILE_FLAG_DELETE_ON_CLOSE = 67108864
+FILE_FLAG_BACKUP_SEMANTICS = 33554432
+FILE_FLAG_POSIX_SEMANTICS = 16777216
+CREATE_NEW = 1
+CREATE_ALWAYS = 2
+OPEN_EXISTING = 3
+OPEN_ALWAYS = 4
+TRUNCATE_EXISTING = 5
+PIPE_ACCESS_INBOUND = 1
+PIPE_ACCESS_OUTBOUND = 2
+PIPE_ACCESS_DUPLEX = 3
+PIPE_CLIENT_END = 0
+PIPE_SERVER_END = 1
+PIPE_WAIT = 0
+PIPE_NOWAIT = 1
+PIPE_READMODE_BYTE = 0
+PIPE_READMODE_MESSAGE = 2
+PIPE_TYPE_BYTE = 0
+PIPE_TYPE_MESSAGE = 4
+PIPE_UNLIMITED_INSTANCES = 255
+SECURITY_CONTEXT_TRACKING = 262144
+SECURITY_EFFECTIVE_ONLY = 524288
+SECURITY_SQOS_PRESENT = 1048576
+SECURITY_VALID_SQOS_FLAGS = 2031616
+DTR_CONTROL_DISABLE = 0
+DTR_CONTROL_ENABLE = 1
+DTR_CONTROL_HANDSHAKE = 2
+RTS_CONTROL_DISABLE = 0
+RTS_CONTROL_ENABLE = 1
+RTS_CONTROL_HANDSHAKE = 2
+RTS_CONTROL_TOGGLE = 3
+GMEM_FIXED = 0
+GMEM_MOVEABLE = 2
+GMEM_NOCOMPACT = 16
+GMEM_NODISCARD = 32
+GMEM_ZEROINIT = 64
+GMEM_MODIFY = 128
+GMEM_DISCARDABLE = 256
+GMEM_NOT_BANKED = 4096
+GMEM_SHARE = 8192
+GMEM_DDESHARE = 8192
+GMEM_NOTIFY = 16384
+GMEM_LOWER = GMEM_NOT_BANKED
+GMEM_VALID_FLAGS = 32626
+GMEM_INVALID_HANDLE = 32768
+GHND = (GMEM_MOVEABLE | GMEM_ZEROINIT)
+GPTR = (GMEM_FIXED | GMEM_ZEROINIT)
+GMEM_DISCARDED = 16384
+GMEM_LOCKCOUNT = 255
+LMEM_FIXED = 0
+LMEM_MOVEABLE = 2
+LMEM_NOCOMPACT = 16
+LMEM_NODISCARD = 32
+LMEM_ZEROINIT = 64
+LMEM_MODIFY = 128
+LMEM_DISCARDABLE = 3840
+LMEM_VALID_FLAGS = 3954
+LMEM_INVALID_HANDLE = 32768
+LHND = (LMEM_MOVEABLE | LMEM_ZEROINIT)
+LPTR = (LMEM_FIXED | LMEM_ZEROINIT)
+NONZEROLHND = (LMEM_MOVEABLE)
+NONZEROLPTR = (LMEM_FIXED)
+LMEM_DISCARDED = 16384
+LMEM_LOCKCOUNT = 255
+DEBUG_PROCESS = 1
+DEBUG_ONLY_THIS_PROCESS = 2
+CREATE_SUSPENDED = 4
+DETACHED_PROCESS = 8
+CREATE_NEW_CONSOLE = 16
+NORMAL_PRIORITY_CLASS = 32
+IDLE_PRIORITY_CLASS = 64
+HIGH_PRIORITY_CLASS = 128
+REALTIME_PRIORITY_CLASS = 256
+CREATE_NEW_PROCESS_GROUP = 512
+CREATE_UNICODE_ENVIRONMENT = 1024
+CREATE_SEPARATE_WOW_VDM = 2048
+CREATE_SHARED_WOW_VDM = 4096
+CREATE_DEFAULT_ERROR_MODE = 67108864
+CREATE_NO_WINDOW = 134217728
+PROFILE_USER = 268435456
+PROFILE_KERNEL = 536870912
+PROFILE_SERVER = 1073741824
+THREAD_BASE_PRIORITY_LOWRT  = 15
+THREAD_BASE_PRIORITY_MAX =   2
+THREAD_BASE_PRIORITY_MIN =  -2
+THREAD_BASE_PRIORITY_IDLE = -15
+THREAD_PRIORITY_LOWEST = THREAD_BASE_PRIORITY_MIN
+THREAD_PRIORITY_BELOW_NORMAL = THREAD_PRIORITY_LOWEST+1
+THREAD_PRIORITY_HIGHEST =         THREAD_BASE_PRIORITY_MAX
+THREAD_PRIORITY_ABOVE_NORMAL =   THREAD_PRIORITY_HIGHEST-1
+THREAD_PRIORITY_ERROR_RETURN =   MAXLONG
+THREAD_PRIORITY_TIME_CRITICAL   = THREAD_BASE_PRIORITY_LOWRT
+THREAD_PRIORITY_IDLE            = THREAD_BASE_PRIORITY_IDLE
+THREAD_PRIORITY_NORMAL = 0
+EXCEPTION_DEBUG_EVENT = 1
+CREATE_THREAD_DEBUG_EVENT = 2
+CREATE_PROCESS_DEBUG_EVENT = 3
+EXIT_THREAD_DEBUG_EVENT = 4
+EXIT_PROCESS_DEBUG_EVENT = 5
+LOAD_DLL_DEBUG_EVENT = 6
+UNLOAD_DLL_DEBUG_EVENT = 7
+OUTPUT_DEBUG_STRING_EVENT = 8
+RIP_EVENT = 9
+DRIVE_UNKNOWN = 0
+DRIVE_NO_ROOT_DIR = 1
+DRIVE_REMOVABLE = 2
+DRIVE_FIXED = 3
+DRIVE_REMOTE = 4
+DRIVE_CDROM = 5
+DRIVE_RAMDISK = 6
+FILE_TYPE_UNKNOWN = 0
+FILE_TYPE_DISK = 1
+FILE_TYPE_CHAR = 2
+FILE_TYPE_PIPE = 3
+FILE_TYPE_REMOTE = 32768
+NOPARITY = 0
+ODDPARITY = 1
+EVENPARITY = 2
+MARKPARITY = 3
+SPACEPARITY = 4
+ONESTOPBIT = 0
+ONE5STOPBITS = 1
+TWOSTOPBITS = 2
+CBR_110 = 110
+CBR_300 = 300
+CBR_600 = 600
+CBR_1200 = 1200
+CBR_2400 = 2400
+CBR_4800 = 4800
+CBR_9600 = 9600
+CBR_14400 = 14400
+CBR_19200 = 19200
+CBR_38400 = 38400
+CBR_56000 = 56000
+CBR_57600 = 57600
+CBR_115200 = 115200
+CBR_128000 = 128000
+CBR_256000 = 256000
+S_QUEUEEMPTY = 0
+S_THRESHOLD = 1
+S_ALLTHRESHOLD = 2
+S_NORMAL = 0
+S_LEGATO = 1
+S_STACCATO = 2
+NMPWAIT_WAIT_FOREVER = -1
+NMPWAIT_NOWAIT = 1
+NMPWAIT_USE_DEFAULT_WAIT = 0
+OF_READ = 0
+OF_WRITE = 1
+OF_READWRITE = 2
+OF_SHARE_COMPAT = 0
+OF_SHARE_EXCLUSIVE = 16
+OF_SHARE_DENY_WRITE = 32
+OF_SHARE_DENY_READ = 48
+OF_SHARE_DENY_NONE = 64
+OF_PARSE = 256
+OF_DELETE = 512
+OF_VERIFY = 1024
+OF_CANCEL = 2048
+OF_CREATE = 4096
+OF_PROMPT = 8192
+OF_EXIST = 16384
+OF_REOPEN = 32768
+OFS_MAXPATHNAME = 128
+MAXINTATOM = 49152
+
+# winbase.h
+PROCESS_HEAP_REGION = 1
+PROCESS_HEAP_UNCOMMITTED_RANGE = 2
+PROCESS_HEAP_ENTRY_BUSY = 4
+PROCESS_HEAP_ENTRY_MOVEABLE = 16
+PROCESS_HEAP_ENTRY_DDESHARE = 32
+SCS_32BIT_BINARY = 0
+SCS_DOS_BINARY = 1
+SCS_WOW_BINARY = 2
+SCS_PIF_BINARY = 3
+SCS_POSIX_BINARY = 4
+SCS_OS216_BINARY = 5
+SEM_FAILCRITICALERRORS = 1
+SEM_NOGPFAULTERRORBOX = 2
+SEM_NOALIGNMENTFAULTEXCEPT = 4
+SEM_NOOPENFILEERRORBOX = 32768
+LOCKFILE_FAIL_IMMEDIATELY = 1
+LOCKFILE_EXCLUSIVE_LOCK = 2
+HANDLE_FLAG_INHERIT = 1
+HANDLE_FLAG_PROTECT_FROM_CLOSE = 2
+HINSTANCE_ERROR = 32
+GET_TAPE_MEDIA_INFORMATION = 0
+GET_TAPE_DRIVE_INFORMATION = 1
+SET_TAPE_MEDIA_INFORMATION = 0
+SET_TAPE_DRIVE_INFORMATION = 1
+FORMAT_MESSAGE_ALLOCATE_BUFFER = 256
+FORMAT_MESSAGE_IGNORE_INSERTS = 512
+FORMAT_MESSAGE_FROM_STRING = 1024
+FORMAT_MESSAGE_FROM_HMODULE = 2048
+FORMAT_MESSAGE_FROM_SYSTEM = 4096
+FORMAT_MESSAGE_ARGUMENT_ARRAY = 8192
+FORMAT_MESSAGE_MAX_WIDTH_MASK = 255
+BACKUP_INVALID = 0
+BACKUP_DATA = 1
+BACKUP_EA_DATA = 2
+BACKUP_SECURITY_DATA = 3
+BACKUP_ALTERNATE_DATA = 4
+BACKUP_LINK = 5
+BACKUP_PROPERTY_DATA = 6
+BACKUP_OBJECT_ID = 7
+BACKUP_REPARSE_DATA = 8
+BACKUP_SPARSE_BLOCK = 9
+
+STREAM_NORMAL_ATTRIBUTE = 0
+STREAM_MODIFIED_WHEN_READ = 1
+STREAM_CONTAINS_SECURITY = 2
+STREAM_CONTAINS_PROPERTIES = 4
+STARTF_USESHOWWINDOW = 1
+STARTF_USESIZE = 2
+STARTF_USEPOSITION = 4
+STARTF_USECOUNTCHARS = 8
+STARTF_USEFILLATTRIBUTE = 16
+STARTF_FORCEONFEEDBACK = 64
+STARTF_FORCEOFFFEEDBACK = 128
+STARTF_USESTDHANDLES = 256
+STARTF_USEHOTKEY = 512
+SHUTDOWN_NORETRY = 1
+DONT_RESOLVE_DLL_REFERENCES = 1
+LOAD_LIBRARY_AS_DATAFILE = 2
+LOAD_WITH_ALTERED_SEARCH_PATH = 8
+DDD_RAW_TARGET_PATH = 1
+DDD_REMOVE_DEFINITION = 2
+DDD_EXACT_MATCH_ON_REMOVE = 4
+MOVEFILE_REPLACE_EXISTING = 1
+MOVEFILE_COPY_ALLOWED = 2
+MOVEFILE_DELAY_UNTIL_REBOOT = 4
+MAX_COMPUTERNAME_LENGTH = 15
+LOGON32_LOGON_INTERACTIVE = 2
+LOGON32_LOGON_BATCH = 4
+LOGON32_LOGON_SERVICE = 5
+LOGON32_PROVIDER_DEFAULT = 0
+LOGON32_PROVIDER_WINNT35 = 1
+VER_PLATFORM_WIN32s = 0
+VER_PLATFORM_WIN32_WINDOWS = 1
+VER_PLATFORM_WIN32_NT = 2
+TC_NORMAL = 0
+TC_HARDERR = 1
+TC_GP_TRAP = 2
+TC_SIGNAL = 3
+AC_LINE_OFFLINE = 0
+AC_LINE_ONLINE = 1
+AC_LINE_BACKUP_POWER = 2
+AC_LINE_UNKNOWN = 255
+BATTERY_FLAG_HIGH = 1
+BATTERY_FLAG_LOW = 2
+BATTERY_FLAG_CRITICAL = 4
+BATTERY_FLAG_CHARGING = 8
+BATTERY_FLAG_NO_BATTERY = 128
+BATTERY_FLAG_UNKNOWN = 255
+BATTERY_PERCENTAGE_UNKNOWN = 255
+BATTERY_LIFE_UNKNOWN = -1
+
+# Generated by h2py from d:\msdev\include\richedit.h
+cchTextLimitDefault = 32767
+WM_CONTEXTMENU = 123
+WM_PRINTCLIENT = 792
+EN_MSGFILTER = 1792
+EN_REQUESTRESIZE = 1793
+EN_SELCHANGE = 1794
+EN_DROPFILES = 1795
+EN_PROTECTED = 1796
+EN_CORRECTTEXT = 1797
+EN_STOPNOUNDO = 1798
+EN_IMECHANGE = 1799
+EN_SAVECLIPBOARD = 1800
+EN_OLEOPFAILED = 1801
+ENM_NONE = 0
+ENM_CHANGE = 1
+ENM_UPDATE = 2
+ENM_SCROLL = 4
+ENM_KEYEVENTS = 65536
+ENM_MOUSEEVENTS = 131072
+ENM_REQUESTRESIZE = 262144
+ENM_SELCHANGE = 524288
+ENM_DROPFILES = 1048576
+ENM_PROTECTED = 2097152
+ENM_CORRECTTEXT = 4194304
+ENM_IMECHANGE = 8388608
+ES_SAVESEL = 32768
+ES_SUNKEN = 16384
+ES_DISABLENOSCROLL = 8192
+ES_SELECTIONBAR = 16777216
+ES_EX_NOCALLOLEINIT = 16777216
+ES_VERTICAL = 4194304
+ES_NOIME = 524288
+ES_SELFIME = 262144
+ECO_AUTOWORDSELECTION = 1
+ECO_AUTOVSCROLL = 64
+ECO_AUTOHSCROLL = 128
+ECO_NOHIDESEL = 256
+ECO_READONLY = 2048
+ECO_WANTRETURN = 4096
+ECO_SAVESEL = 32768
+ECO_SELECTIONBAR = 16777216
+ECO_VERTICAL = 4194304
+ECOOP_SET = 1
+ECOOP_OR = 2
+ECOOP_AND = 3
+ECOOP_XOR = 4
+WB_CLASSIFY = 3
+WB_MOVEWORDLEFT = 4
+WB_MOVEWORDRIGHT = 5
+WB_LEFTBREAK = 6
+WB_RIGHTBREAK = 7
+WB_MOVEWORDPREV = 4
+WB_MOVEWORDNEXT = 5
+WB_PREVBREAK = 6
+WB_NEXTBREAK = 7
+PC_FOLLOWING = 1
+PC_LEADING = 2
+PC_OVERFLOW = 3
+PC_DELIMITER = 4
+WBF_WORDWRAP = 16
+WBF_WORDBREAK = 32
+WBF_OVERFLOW = 64
+WBF_LEVEL1 = 128
+WBF_LEVEL2 = 256
+WBF_CUSTOM = 512
+CFM_BOLD = 1
+CFM_ITALIC = 2
+CFM_UNDERLINE = 4
+CFM_STRIKEOUT = 8
+CFM_PROTECTED = 16
+CFM_SIZE = -2147483648
+CFM_COLOR = 1073741824
+CFM_FACE = 536870912
+CFM_OFFSET = 268435456
+CFM_CHARSET = 134217728
+CFE_BOLD = 1
+CFE_ITALIC = 2
+CFE_UNDERLINE = 4
+CFE_STRIKEOUT = 8
+CFE_PROTECTED = 16
+CFE_AUTOCOLOR = 1073741824
+yHeightCharPtsMost = 1638
+SCF_SELECTION = 1
+SCF_WORD = 2
+SF_TEXT = 1
+SF_RTF = 2
+SF_RTFNOOBJS = 3
+SF_TEXTIZED = 4
+SFF_SELECTION = 32768
+SFF_PLAINRTF = 16384
+MAX_TAB_STOPS = 32
+lDefaultTab = 720
+PFM_STARTINDENT = 1
+PFM_RIGHTINDENT = 2
+PFM_OFFSET = 4
+PFM_ALIGNMENT = 8
+PFM_TABSTOPS = 16
+PFM_NUMBERING = 32
+PFM_OFFSETINDENT = -2147483648
+PFN_BULLET = 1
+PFA_LEFT = 1
+PFA_RIGHT = 2
+PFA_CENTER = 3
+WM_NOTIFY = 78
+SEL_EMPTY = 0
+SEL_TEXT = 1
+SEL_OBJECT = 2
+SEL_MULTICHAR = 4
+SEL_MULTIOBJECT = 8
+OLEOP_DOVERB = 1
+CF_RTF = "Rich Text Format"
+CF_RTFNOOBJS = "Rich Text Format Without Objects"
+CF_RETEXTOBJ = "RichEdit Text and Objects"
+
+# From wincon.h
+RIGHT_ALT_PRESSED     = 1 # the right alt key is pressed.
+LEFT_ALT_PRESSED      = 2 # the left alt key is pressed.
+RIGHT_CTRL_PRESSED    = 4 # the right ctrl key is pressed.
+LEFT_CTRL_PRESSED     = 8 # the left ctrl key is pressed.
+SHIFT_PRESSED         = 16 # the shift key is pressed.
+NUMLOCK_ON            = 32 # the numlock light is on.
+SCROLLLOCK_ON         = 64 # the scrolllock light is on.
+CAPSLOCK_ON           = 128 # the capslock light is on.
+ENHANCED_KEY          = 256 # the key is enhanced.
+NLS_DBCSCHAR          = 65536 # DBCS for JPN: SBCS/DBCS mode.
+NLS_ALPHANUMERIC      = 0 # DBCS for JPN: Alphanumeric mode.
+NLS_KATAKANA          = 131072 # DBCS for JPN: Katakana mode.
+NLS_HIRAGANA          = 262144 # DBCS for JPN: Hiragana mode.
+NLS_ROMAN             = 4194304 # DBCS for JPN: Roman/Noroman mode.
+NLS_IME_CONVERSION    = 8388608 # DBCS for JPN: IME conversion.
+NLS_IME_DISABLE       = 536870912 # DBCS for JPN: IME enable/disable.
+
+FROM_LEFT_1ST_BUTTON_PRESSED    = 1
+RIGHTMOST_BUTTON_PRESSED        = 2
+FROM_LEFT_2ND_BUTTON_PRESSED    = 4
+FROM_LEFT_3RD_BUTTON_PRESSED    = 8
+FROM_LEFT_4TH_BUTTON_PRESSED    = 16
+
+CTRL_C_EVENT        = 0
+CTRL_BREAK_EVENT    = 1
+CTRL_CLOSE_EVENT    = 2
+CTRL_LOGOFF_EVENT   = 5
+CTRL_SHUTDOWN_EVENT = 6
+
+MOUSE_MOVED   = 1
+DOUBLE_CLICK  = 2
+MOUSE_WHEELED = 4
+
+#property sheet window messages from prsht.h
+PSM_SETCURSEL = (WM_USER + 101)
+PSM_REMOVEPAGE = (WM_USER + 102)
+PSM_ADDPAGE = (WM_USER + 103)
+PSM_CHANGED = (WM_USER + 104)
+PSM_RESTARTWINDOWS = (WM_USER + 105)
+PSM_REBOOTSYSTEM = (WM_USER + 106)
+PSM_CANCELTOCLOSE = (WM_USER + 107)
+PSM_QUERYSIBLINGS = (WM_USER + 108)
+PSM_UNCHANGED = (WM_USER + 109)
+PSM_APPLY = (WM_USER + 110)
+PSM_SETTITLEA = (WM_USER + 111)
+PSM_SETTITLEW = (WM_USER + 120)
+PSM_SETWIZBUTTONS = (WM_USER + 112)
+PSM_PRESSBUTTON = (WM_USER + 113)
+PSM_SETCURSELID = (WM_USER + 114)
+PSM_SETFINISHTEXTA = (WM_USER + 115)
+PSM_SETFINISHTEXTW = (WM_USER + 121)
+PSM_GETTABCONTROL = (WM_USER + 116)
+PSM_ISDIALOGMESSAGE = (WM_USER + 117)
+PSM_GETCURRENTPAGEHWND = (WM_USER + 118)
+PSM_INSERTPAGE = (WM_USER + 119)
+PSM_SETHEADERTITLEA = (WM_USER + 125)
+PSM_SETHEADERTITLEW = (WM_USER + 126)
+PSM_SETHEADERSUBTITLEA = (WM_USER + 127)
+PSM_SETHEADERSUBTITLEW = (WM_USER + 128)
+PSM_HWNDTOINDEX = (WM_USER + 129)
+PSM_INDEXTOHWND = (WM_USER + 130)
+PSM_PAGETOINDEX = (WM_USER + 131)
+PSM_INDEXTOPAGE = (WM_USER + 132)
+PSM_IDTOINDEX = (WM_USER + 133)
+PSM_INDEXTOID = (WM_USER + 134)
+PSM_GETRESULT = (WM_USER + 135)
+PSM_RECALCPAGESIZES = (WM_USER + 136)
+
+# GetUserNameEx/GetComputerNameEx
+NameUnknown = 0
+NameFullyQualifiedDN = 1
+NameSamCompatible = 2
+NameDisplay = 3
+NameUniqueId = 6
+NameCanonical = 7
+NameUserPrincipal = 8
+NameCanonicalEx = 9
+NameServicePrincipal = 10
+NameDnsDomain = 12
+
+ComputerNameNetBIOS = 0
+ComputerNameDnsHostname = 1
+ComputerNameDnsDomain = 2
+ComputerNameDnsFullyQualified = 3
+ComputerNamePhysicalNetBIOS = 4
+ComputerNamePhysicalDnsHostname = 5
+ComputerNamePhysicalDnsDomain = 6
+ComputerNamePhysicalDnsFullyQualified = 7
+
+LWA_COLORKEY = 0x00000001
+LWA_ALPHA = 0x00000002
+ULW_COLORKEY = 0x00000001
+ULW_ALPHA = 0x00000002
+ULW_OPAQUE = 0x00000004
+
+# WinDef.h
+TRUE = 1
+FALSE = 0
+MAX_PATH = 260
+# WinGDI.h
+AC_SRC_OVER = 0
+AC_SRC_ALPHA = 1
+GRADIENT_FILL_RECT_H = 0
+GRADIENT_FILL_RECT_V = 1
+GRADIENT_FILL_TRIANGLE = 2
+GRADIENT_FILL_OP_FLAG = 255
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32cryptcon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32cryptcon.py
new file mode 100644
index 0000000..e8dc3034
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32cryptcon.py
@@ -0,0 +1,1671 @@
+# Generated by h2py from WinCrypt.h
+def GET_ALG_CLASS(x): return (x & (7 << 13))
+
+def GET_ALG_TYPE(x): return (x & (15 << 9))
+
+def GET_ALG_SID(x): return (x & (511))
+
+ALG_CLASS_ANY = (0)
+ALG_CLASS_SIGNATURE = (1 << 13)
+ALG_CLASS_MSG_ENCRYPT = (2 << 13)
+ALG_CLASS_DATA_ENCRYPT = (3 << 13)
+ALG_CLASS_HASH = (4 << 13)
+ALG_CLASS_KEY_EXCHANGE = (5 << 13)
+ALG_CLASS_ALL = (7 << 13)
+ALG_TYPE_ANY = (0)
+ALG_TYPE_DSS = (1 << 9)
+ALG_TYPE_RSA = (2 << 9)
+ALG_TYPE_BLOCK = (3 << 9)
+ALG_TYPE_STREAM = (4 << 9)
+ALG_TYPE_DH = (5 << 9)
+ALG_TYPE_SECURECHANNEL = (6 << 9)
+ALG_SID_ANY = (0)
+ALG_SID_RSA_ANY = 0
+ALG_SID_RSA_PKCS = 1
+ALG_SID_RSA_MSATWORK = 2
+ALG_SID_RSA_ENTRUST = 3
+ALG_SID_RSA_PGP = 4
+ALG_SID_DSS_ANY = 0
+ALG_SID_DSS_PKCS = 1
+ALG_SID_DSS_DMS = 2
+ALG_SID_DES = 1
+ALG_SID_3DES = 3
+ALG_SID_DESX = 4
+ALG_SID_IDEA = 5
+ALG_SID_CAST = 6
+ALG_SID_SAFERSK64 = 7
+ALG_SID_SAFERSK128 = 8
+ALG_SID_3DES_112 = 9
+ALG_SID_CYLINK_MEK = 12
+ALG_SID_RC5 = 13
+ALG_SID_AES_128 = 14
+ALG_SID_AES_192 = 15
+ALG_SID_AES_256 = 16
+ALG_SID_AES = 17
+ALG_SID_SKIPJACK = 10
+ALG_SID_TEK = 11
+CRYPT_MODE_CBCI = 6
+CRYPT_MODE_CFBP = 7
+CRYPT_MODE_OFBP = 8
+CRYPT_MODE_CBCOFM = 9
+CRYPT_MODE_CBCOFMI = 10
+ALG_SID_RC2 = 2
+ALG_SID_RC4 = 1
+ALG_SID_SEAL = 2
+ALG_SID_DH_SANDF = 1
+ALG_SID_DH_EPHEM = 2
+ALG_SID_AGREED_KEY_ANY = 3
+ALG_SID_KEA = 4
+ALG_SID_MD2 = 1
+ALG_SID_MD4 = 2
+ALG_SID_MD5 = 3
+ALG_SID_SHA = 4
+ALG_SID_SHA1 = 4
+ALG_SID_MAC = 5
+ALG_SID_RIPEMD = 6
+ALG_SID_RIPEMD160 = 7
+ALG_SID_SSL3SHAMD5 = 8
+ALG_SID_HMAC = 9
+ALG_SID_TLS1PRF = 10
+ALG_SID_HASH_REPLACE_OWF = 11
+ALG_SID_SHA_256 = 12
+ALG_SID_SHA_384 = 13
+ALG_SID_SHA_512 = 14
+ALG_SID_SSL3_MASTER = 1
+ALG_SID_SCHANNEL_MASTER_HASH = 2
+ALG_SID_SCHANNEL_MAC_KEY = 3
+ALG_SID_PCT1_MASTER = 4
+ALG_SID_SSL2_MASTER = 5
+ALG_SID_TLS1_MASTER = 6
+ALG_SID_SCHANNEL_ENC_KEY = 7
+ALG_SID_EXAMPLE = 80
+CALG_MD2 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD2)
+CALG_MD4 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD4)
+CALG_MD5 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MD5)
+CALG_SHA = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA)
+CALG_SHA1 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA1)
+CALG_MAC = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_MAC)
+CALG_RSA_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_RSA | ALG_SID_RSA_ANY)
+CALG_DSS_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_DSS | ALG_SID_DSS_ANY)
+CALG_NO_SIGN = (ALG_CLASS_SIGNATURE | ALG_TYPE_ANY | ALG_SID_ANY)
+CALG_RSA_KEYX = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_RSA|ALG_SID_RSA_ANY)
+CALG_DES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_DES)
+CALG_3DES_112 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_3DES_112)
+CALG_3DES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_3DES)
+CALG_DESX = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_DESX)
+CALG_RC2 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_RC2)
+CALG_RC4 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_STREAM|ALG_SID_RC4)
+CALG_SEAL = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_STREAM|ALG_SID_SEAL)
+CALG_DH_SF = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_DH_SANDF)
+CALG_DH_EPHEM = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_DH_EPHEM)
+CALG_AGREEDKEY_ANY = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_AGREED_KEY_ANY)
+CALG_KEA_KEYX = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_DH|ALG_SID_KEA)
+CALG_HUGHES_MD5 = (ALG_CLASS_KEY_EXCHANGE|ALG_TYPE_ANY|ALG_SID_MD5)
+CALG_SKIPJACK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_SKIPJACK)
+CALG_TEK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_TEK)
+CALG_CYLINK_MEK = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_CYLINK_MEK)
+CALG_SSL3_SHAMD5 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SSL3SHAMD5)
+CALG_SSL3_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SSL3_MASTER)
+CALG_SCHANNEL_MASTER_HASH = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_MASTER_HASH)
+CALG_SCHANNEL_MAC_KEY = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_MAC_KEY)
+CALG_SCHANNEL_ENC_KEY = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SCHANNEL_ENC_KEY)
+CALG_PCT1_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_PCT1_MASTER)
+CALG_SSL2_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_SSL2_MASTER)
+CALG_TLS1_MASTER = (ALG_CLASS_MSG_ENCRYPT|ALG_TYPE_SECURECHANNEL|ALG_SID_TLS1_MASTER)
+CALG_RC5 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_RC5)
+CALG_HMAC = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_HMAC)
+CALG_TLS1PRF = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_TLS1PRF)
+CALG_HASH_REPLACE_OWF = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_HASH_REPLACE_OWF)
+CALG_AES_128 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_128)
+CALG_AES_192 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_192)
+CALG_AES_256 = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES_256)
+CALG_AES = (ALG_CLASS_DATA_ENCRYPT|ALG_TYPE_BLOCK|ALG_SID_AES)
+CALG_SHA_256 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_256)
+CALG_SHA_384 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_384)
+CALG_SHA_512 = (ALG_CLASS_HASH | ALG_TYPE_ANY | ALG_SID_SHA_512)
+CRYPT_VERIFYCONTEXT = (-268435456)
+CRYPT_NEWKEYSET = 0x00000008
+CRYPT_DELETEKEYSET = 0x00000010
+CRYPT_MACHINE_KEYSET = 0x00000020
+CRYPT_SILENT = 0x00000040
+CRYPT_EXPORTABLE = 0x00000001
+CRYPT_USER_PROTECTED = 0x00000002
+CRYPT_CREATE_SALT = 0x00000004
+CRYPT_UPDATE_KEY = 0x00000008
+CRYPT_NO_SALT = 0x00000010
+CRYPT_PREGEN = 0x00000040
+CRYPT_RECIPIENT = 0x00000010
+CRYPT_INITIATOR = 0x00000040
+CRYPT_ONLINE = 0x00000080
+CRYPT_SF = 0x00000100
+CRYPT_CREATE_IV = 0x00000200
+CRYPT_KEK = 0x00000400
+CRYPT_DATA_KEY = 0x00000800
+CRYPT_VOLATILE = 0x00001000
+CRYPT_SGCKEY = 0x00002000
+CRYPT_ARCHIVABLE = 0x00004000
+RSA1024BIT_KEY = 0x04000000
+CRYPT_SERVER = 0x00000400
+KEY_LENGTH_MASK = (-65536)
+CRYPT_Y_ONLY = 0x00000001
+CRYPT_SSL2_FALLBACK = 0x00000002
+CRYPT_DESTROYKEY = 0x00000004
+CRYPT_OAEP = 0x00000040
+CRYPT_BLOB_VER3 = 0x00000080
+CRYPT_IPSEC_HMAC_KEY = 0x00000100
+CRYPT_DECRYPT_RSA_NO_PADDING_CHECK = 0x00000020
+CRYPT_SECRETDIGEST = 0x00000001
+CRYPT_OWF_REPL_LM_HASH = 0x00000001
+CRYPT_LITTLE_ENDIAN = 0x00000001
+CRYPT_NOHASHOID = 0x00000001
+CRYPT_TYPE2_FORMAT = 0x00000002
+CRYPT_X931_FORMAT = 0x00000004
+CRYPT_MACHINE_DEFAULT = 0x00000001
+CRYPT_USER_DEFAULT = 0x00000002
+CRYPT_DELETE_DEFAULT = 0x00000004
+SIMPLEBLOB = 0x1
+PUBLICKEYBLOB = 0x6
+PRIVATEKEYBLOB = 0x7
+PLAINTEXTKEYBLOB = 0x8
+OPAQUEKEYBLOB = 0x9
+PUBLICKEYBLOBEX = 0xA
+SYMMETRICWRAPKEYBLOB = 0xB
+AT_KEYEXCHANGE = 1
+AT_SIGNATURE = 2
+CRYPT_USERDATA = 1
+KP_IV = 1
+KP_SALT = 2
+KP_PADDING = 3
+KP_MODE = 4
+KP_MODE_BITS = 5
+KP_PERMISSIONS = 6
+KP_ALGID = 7
+KP_BLOCKLEN = 8
+KP_KEYLEN = 9
+KP_SALT_EX = 10
+KP_P = 11
+KP_G = 12
+KP_Q = 13
+KP_X = 14
+KP_Y = 15
+KP_RA = 16
+KP_RB = 17
+KP_INFO = 18
+KP_EFFECTIVE_KEYLEN = 19
+KP_SCHANNEL_ALG = 20
+KP_CLIENT_RANDOM = 21
+KP_SERVER_RANDOM = 22
+KP_RP = 23
+KP_PRECOMP_MD5 = 24
+KP_PRECOMP_SHA = 25
+KP_CERTIFICATE = 26
+KP_CLEAR_KEY = 27
+KP_PUB_EX_LEN = 28
+KP_PUB_EX_VAL = 29
+KP_KEYVAL = 30
+KP_ADMIN_PIN = 31
+KP_KEYEXCHANGE_PIN = 32
+KP_SIGNATURE_PIN = 33
+KP_PREHASH = 34
+KP_ROUNDS = 35
+KP_OAEP_PARAMS = 36
+KP_CMS_KEY_INFO = 37
+KP_CMS_DH_KEY_INFO = 38
+KP_PUB_PARAMS = 39
+KP_VERIFY_PARAMS = 40
+KP_HIGHEST_VERSION = 41
+KP_GET_USE_COUNT = 42
+PKCS5_PADDING = 1
+RANDOM_PADDING = 2
+ZERO_PADDING = 3
+CRYPT_MODE_CBC = 1
+CRYPT_MODE_ECB = 2
+CRYPT_MODE_OFB = 3
+CRYPT_MODE_CFB = 4
+CRYPT_MODE_CTS = 5
+CRYPT_ENCRYPT = 0x0001
+CRYPT_DECRYPT = 0x0002
+CRYPT_EXPORT = 0x0004
+CRYPT_READ = 0x0008
+CRYPT_WRITE = 0x0010
+CRYPT_MAC = 0x0020
+CRYPT_EXPORT_KEY = 0x0040
+CRYPT_IMPORT_KEY = 0x0080
+CRYPT_ARCHIVE = 0x0100
+HP_ALGID = 0x0001
+HP_HASHVAL = 0x0002
+HP_HASHSIZE = 0x0004
+HP_HMAC_INFO = 0x0005
+HP_TLS1PRF_LABEL = 0x0006
+HP_TLS1PRF_SEED = 0x0007
+def RCRYPT_SUCCEEDED(rt): return ((rt) == CRYPT_SUCCEED)
+
+def RCRYPT_FAILED(rt): return ((rt) == CRYPT_FAILED)
+
+PP_ENUMALGS = 1
+PP_ENUMCONTAINERS = 2
+PP_IMPTYPE = 3
+PP_NAME = 4
+PP_VERSION = 5
+PP_CONTAINER = 6
+PP_CHANGE_PASSWORD = 7
+PP_KEYSET_SEC_DESCR = 8
+PP_CERTCHAIN = 9
+PP_KEY_TYPE_SUBTYPE = 10
+PP_PROVTYPE = 16
+PP_KEYSTORAGE = 17
+PP_APPLI_CERT = 18
+PP_SYM_KEYSIZE = 19
+PP_SESSION_KEYSIZE = 20
+PP_UI_PROMPT = 21
+PP_ENUMALGS_EX = 22
+PP_ENUMMANDROOTS = 25
+PP_ENUMELECTROOTS = 26
+PP_KEYSET_TYPE = 27
+PP_ADMIN_PIN = 31
+PP_KEYEXCHANGE_PIN = 32
+PP_SIGNATURE_PIN = 33
+PP_SIG_KEYSIZE_INC = 34
+PP_KEYX_KEYSIZE_INC = 35
+PP_UNIQUE_CONTAINER = 36
+PP_SGC_INFO = 37
+PP_USE_HARDWARE_RNG = 38
+PP_KEYSPEC = 39
+PP_ENUMEX_SIGNING_PROT = 40
+PP_CRYPT_COUNT_KEY_USE = 41
+CRYPT_FIRST = 1
+CRYPT_NEXT = 2
+CRYPT_SGC_ENUM = 4
+CRYPT_IMPL_HARDWARE = 1
+CRYPT_IMPL_SOFTWARE = 2
+CRYPT_IMPL_MIXED = 3
+CRYPT_IMPL_UNKNOWN = 4
+CRYPT_IMPL_REMOVABLE = 8
+CRYPT_SEC_DESCR = 0x00000001
+CRYPT_PSTORE = 0x00000002
+CRYPT_UI_PROMPT = 0x00000004
+CRYPT_FLAG_PCT1 = 0x0001
+CRYPT_FLAG_SSL2 = 0x0002
+CRYPT_FLAG_SSL3 = 0x0004
+CRYPT_FLAG_TLS1 = 0x0008
+CRYPT_FLAG_IPSEC = 0x0010
+CRYPT_FLAG_SIGNING = 0x0020
+CRYPT_SGC = 0x0001
+CRYPT_FASTSGC = 0x0002
+PP_CLIENT_HWND = 1
+PP_CONTEXT_INFO = 11
+PP_KEYEXCHANGE_KEYSIZE = 12
+PP_SIGNATURE_KEYSIZE = 13
+PP_KEYEXCHANGE_ALG = 14
+PP_SIGNATURE_ALG = 15
+PP_DELETEKEY = 24
+PROV_RSA_FULL = 1
+PROV_RSA_SIG = 2
+PROV_DSS = 3
+PROV_FORTEZZA = 4
+PROV_MS_EXCHANGE = 5
+PROV_SSL = 6
+PROV_RSA_SCHANNEL = 12
+PROV_DSS_DH = 13
+PROV_EC_ECDSA_SIG = 14
+PROV_EC_ECNRA_SIG = 15
+PROV_EC_ECDSA_FULL = 16
+PROV_EC_ECNRA_FULL = 17
+PROV_DH_SCHANNEL = 18
+PROV_SPYRUS_LYNKS = 20
+PROV_RNG = 21
+PROV_INTEL_SEC = 22
+PROV_REPLACE_OWF = 23
+PROV_RSA_AES = 24
+MS_DEF_PROV_A = "Microsoft Base Cryptographic Provider v1.0"
+MS_DEF_PROV = MS_DEF_PROV_A
+MS_ENHANCED_PROV_A = "Microsoft Enhanced Cryptographic Provider v1.0"
+MS_ENHANCED_PROV = MS_ENHANCED_PROV_A
+MS_STRONG_PROV_A = "Microsoft Strong Cryptographic Provider"
+MS_STRONG_PROV = MS_STRONG_PROV_A
+MS_DEF_RSA_SIG_PROV_A = "Microsoft RSA Signature Cryptographic Provider"
+MS_DEF_RSA_SIG_PROV = MS_DEF_RSA_SIG_PROV_A
+MS_DEF_RSA_SCHANNEL_PROV_A = "Microsoft RSA SChannel Cryptographic Provider"
+MS_DEF_RSA_SCHANNEL_PROV = MS_DEF_RSA_SCHANNEL_PROV_A
+MS_DEF_DSS_PROV_A = "Microsoft Base DSS Cryptographic Provider"
+MS_DEF_DSS_PROV = MS_DEF_DSS_PROV_A
+MS_DEF_DSS_DH_PROV_A = "Microsoft Base DSS and Diffie-Hellman Cryptographic Provider"
+MS_DEF_DSS_DH_PROV = MS_DEF_DSS_DH_PROV_A
+MS_ENH_DSS_DH_PROV_A = "Microsoft Enhanced DSS and Diffie-Hellman Cryptographic Provider"
+MS_ENH_DSS_DH_PROV = MS_ENH_DSS_DH_PROV_A
+MS_DEF_DH_SCHANNEL_PROV_A = "Microsoft DH SChannel Cryptographic Provider"
+MS_DEF_DH_SCHANNEL_PROV = MS_DEF_DH_SCHANNEL_PROV_A
+MS_SCARD_PROV_A = "Microsoft Base Smart Card Crypto Provider"
+MS_SCARD_PROV = MS_SCARD_PROV_A
+MS_ENH_RSA_AES_PROV_A = "Microsoft Enhanced RSA and AES Cryptographic Provider"
+MS_ENH_RSA_AES_PROV = MS_ENH_RSA_AES_PROV_A
+MAXUIDLEN = 64
+EXPO_OFFLOAD_REG_VALUE = "ExpoOffload"
+EXPO_OFFLOAD_FUNC_NAME = "OffloadModExpo"
+szKEY_CRYPTOAPI_PRIVATE_KEY_OPTIONS = \
+    "Software\\Policies\\Microsoft\\Cryptography"
+szFORCE_KEY_PROTECTION = "ForceKeyProtection"
+dwFORCE_KEY_PROTECTION_DISABLED = 0x0
+dwFORCE_KEY_PROTECTION_USER_SELECT = 0x1
+dwFORCE_KEY_PROTECTION_HIGH = 0x2
+szKEY_CACHE_ENABLED = "CachePrivateKeys"
+szKEY_CACHE_SECONDS = "PrivateKeyLifetimeSeconds"
+CUR_BLOB_VERSION = 2
+SCHANNEL_MAC_KEY = 0x00000000
+SCHANNEL_ENC_KEY = 0x00000001
+INTERNATIONAL_USAGE = 0x00000001
+szOID_RSA = "1.2.840.113549"
+szOID_PKCS = "1.2.840.113549.1"
+szOID_RSA_HASH = "1.2.840.113549.2"
+szOID_RSA_ENCRYPT = "1.2.840.113549.3"
+szOID_PKCS_1 = "1.2.840.113549.1.1"
+szOID_PKCS_2 = "1.2.840.113549.1.2"
+szOID_PKCS_3 = "1.2.840.113549.1.3"
+szOID_PKCS_4 = "1.2.840.113549.1.4"
+szOID_PKCS_5 = "1.2.840.113549.1.5"
+szOID_PKCS_6 = "1.2.840.113549.1.6"
+szOID_PKCS_7 = "1.2.840.113549.1.7"
+szOID_PKCS_8 = "1.2.840.113549.1.8"
+szOID_PKCS_9 = "1.2.840.113549.1.9"
+szOID_PKCS_10 = "1.2.840.113549.1.10"
+szOID_PKCS_12 = "1.2.840.113549.1.12"
+szOID_RSA_RSA = "1.2.840.113549.1.1.1"
+szOID_RSA_MD2RSA = "1.2.840.113549.1.1.2"
+szOID_RSA_MD4RSA = "1.2.840.113549.1.1.3"
+szOID_RSA_MD5RSA = "1.2.840.113549.1.1.4"
+szOID_RSA_SHA1RSA = "1.2.840.113549.1.1.5"
+szOID_RSA_SETOAEP_RSA = "1.2.840.113549.1.1.6"
+szOID_RSA_DH = "1.2.840.113549.1.3.1"
+szOID_RSA_data = "1.2.840.113549.1.7.1"
+szOID_RSA_signedData = "1.2.840.113549.1.7.2"
+szOID_RSA_envelopedData = "1.2.840.113549.1.7.3"
+szOID_RSA_signEnvData = "1.2.840.113549.1.7.4"
+szOID_RSA_digestedData = "1.2.840.113549.1.7.5"
+szOID_RSA_hashedData = "1.2.840.113549.1.7.5"
+szOID_RSA_encryptedData = "1.2.840.113549.1.7.6"
+szOID_RSA_emailAddr = "1.2.840.113549.1.9.1"
+szOID_RSA_unstructName = "1.2.840.113549.1.9.2"
+szOID_RSA_contentType = "1.2.840.113549.1.9.3"
+szOID_RSA_messageDigest = "1.2.840.113549.1.9.4"
+szOID_RSA_signingTime = "1.2.840.113549.1.9.5"
+szOID_RSA_counterSign = "1.2.840.113549.1.9.6"
+szOID_RSA_challengePwd = "1.2.840.113549.1.9.7"
+szOID_RSA_unstructAddr = "1.2.840.113549.1.9.8"
+szOID_RSA_extCertAttrs = "1.2.840.113549.1.9.9"
+szOID_RSA_certExtensions = "1.2.840.113549.1.9.14"
+szOID_RSA_SMIMECapabilities = "1.2.840.113549.1.9.15"
+szOID_RSA_preferSignedData = "1.2.840.113549.1.9.15.1"
+szOID_RSA_SMIMEalg = "1.2.840.113549.1.9.16.3"
+szOID_RSA_SMIMEalgESDH = "1.2.840.113549.1.9.16.3.5"
+szOID_RSA_SMIMEalgCMS3DESwrap = "1.2.840.113549.1.9.16.3.6"
+szOID_RSA_SMIMEalgCMSRC2wrap = "1.2.840.113549.1.9.16.3.7"
+szOID_RSA_MD2 = "1.2.840.113549.2.2"
+szOID_RSA_MD4 = "1.2.840.113549.2.4"
+szOID_RSA_MD5 = "1.2.840.113549.2.5"
+szOID_RSA_RC2CBC = "1.2.840.113549.3.2"
+szOID_RSA_RC4 = "1.2.840.113549.3.4"
+szOID_RSA_DES_EDE3_CBC = "1.2.840.113549.3.7"
+szOID_RSA_RC5_CBCPad = "1.2.840.113549.3.9"
+szOID_ANSI_X942 = "1.2.840.10046"
+szOID_ANSI_X942_DH = "1.2.840.10046.2.1"
+szOID_X957 = "1.2.840.10040"
+szOID_X957_DSA = "1.2.840.10040.4.1"
+szOID_X957_SHA1DSA = "1.2.840.10040.4.3"
+szOID_DS = "2.5"
+szOID_DSALG = "2.5.8"
+szOID_DSALG_CRPT = "2.5.8.1"
+szOID_DSALG_HASH = "2.5.8.2"
+szOID_DSALG_SIGN = "2.5.8.3"
+szOID_DSALG_RSA = "2.5.8.1.1"
+szOID_OIW = "1.3.14"
+szOID_OIWSEC = "1.3.14.3.2"
+szOID_OIWSEC_md4RSA = "1.3.14.3.2.2"
+szOID_OIWSEC_md5RSA = "1.3.14.3.2.3"
+szOID_OIWSEC_md4RSA2 = "1.3.14.3.2.4"
+szOID_OIWSEC_desECB = "1.3.14.3.2.6"
+szOID_OIWSEC_desCBC = "1.3.14.3.2.7"
+szOID_OIWSEC_desOFB = "1.3.14.3.2.8"
+szOID_OIWSEC_desCFB = "1.3.14.3.2.9"
+szOID_OIWSEC_desMAC = "1.3.14.3.2.10"
+szOID_OIWSEC_rsaSign = "1.3.14.3.2.11"
+szOID_OIWSEC_dsa = "1.3.14.3.2.12"
+szOID_OIWSEC_shaDSA = "1.3.14.3.2.13"
+szOID_OIWSEC_mdc2RSA = "1.3.14.3.2.14"
+szOID_OIWSEC_shaRSA = "1.3.14.3.2.15"
+szOID_OIWSEC_dhCommMod = "1.3.14.3.2.16"
+szOID_OIWSEC_desEDE = "1.3.14.3.2.17"
+szOID_OIWSEC_sha = "1.3.14.3.2.18"
+szOID_OIWSEC_mdc2 = "1.3.14.3.2.19"
+szOID_OIWSEC_dsaComm = "1.3.14.3.2.20"
+szOID_OIWSEC_dsaCommSHA = "1.3.14.3.2.21"
+szOID_OIWSEC_rsaXchg = "1.3.14.3.2.22"
+szOID_OIWSEC_keyHashSeal = "1.3.14.3.2.23"
+szOID_OIWSEC_md2RSASign = "1.3.14.3.2.24"
+szOID_OIWSEC_md5RSASign = "1.3.14.3.2.25"
+szOID_OIWSEC_sha1 = "1.3.14.3.2.26"
+szOID_OIWSEC_dsaSHA1 = "1.3.14.3.2.27"
+szOID_OIWSEC_dsaCommSHA1 = "1.3.14.3.2.28"
+szOID_OIWSEC_sha1RSASign = "1.3.14.3.2.29"
+szOID_OIWDIR = "1.3.14.7.2"
+szOID_OIWDIR_CRPT = "1.3.14.7.2.1"
+szOID_OIWDIR_HASH = "1.3.14.7.2.2"
+szOID_OIWDIR_SIGN = "1.3.14.7.2.3"
+szOID_OIWDIR_md2 = "1.3.14.7.2.2.1"
+szOID_OIWDIR_md2RSA = "1.3.14.7.2.3.1"
+szOID_INFOSEC = "2.16.840.1.101.2.1"
+szOID_INFOSEC_sdnsSignature = "2.16.840.1.101.2.1.1.1"
+szOID_INFOSEC_mosaicSignature = "2.16.840.1.101.2.1.1.2"
+szOID_INFOSEC_sdnsConfidentiality = "2.16.840.1.101.2.1.1.3"
+szOID_INFOSEC_mosaicConfidentiality = "2.16.840.1.101.2.1.1.4"
+szOID_INFOSEC_sdnsIntegrity = "2.16.840.1.101.2.1.1.5"
+szOID_INFOSEC_mosaicIntegrity = "2.16.840.1.101.2.1.1.6"
+szOID_INFOSEC_sdnsTokenProtection = "2.16.840.1.101.2.1.1.7"
+szOID_INFOSEC_mosaicTokenProtection = "2.16.840.1.101.2.1.1.8"
+szOID_INFOSEC_sdnsKeyManagement = "2.16.840.1.101.2.1.1.9"
+szOID_INFOSEC_mosaicKeyManagement = "2.16.840.1.101.2.1.1.10"
+szOID_INFOSEC_sdnsKMandSig = "2.16.840.1.101.2.1.1.11"
+szOID_INFOSEC_mosaicKMandSig = "2.16.840.1.101.2.1.1.12"
+szOID_INFOSEC_SuiteASignature = "2.16.840.1.101.2.1.1.13"
+szOID_INFOSEC_SuiteAConfidentiality = "2.16.840.1.101.2.1.1.14"
+szOID_INFOSEC_SuiteAIntegrity = "2.16.840.1.101.2.1.1.15"
+szOID_INFOSEC_SuiteATokenProtection = "2.16.840.1.101.2.1.1.16"
+szOID_INFOSEC_SuiteAKeyManagement = "2.16.840.1.101.2.1.1.17"
+szOID_INFOSEC_SuiteAKMandSig = "2.16.840.1.101.2.1.1.18"
+szOID_INFOSEC_mosaicUpdatedSig = "2.16.840.1.101.2.1.1.19"
+szOID_INFOSEC_mosaicKMandUpdSig = "2.16.840.1.101.2.1.1.20"
+szOID_INFOSEC_mosaicUpdatedInteg = "2.16.840.1.101.2.1.1.21"
+szOID_COMMON_NAME = "2.5.4.3"
+szOID_SUR_NAME = "2.5.4.4"
+szOID_DEVICE_SERIAL_NUMBER = "2.5.4.5"
+szOID_COUNTRY_NAME = "2.5.4.6"
+szOID_LOCALITY_NAME = "2.5.4.7"
+szOID_STATE_OR_PROVINCE_NAME = "2.5.4.8"
+szOID_STREET_ADDRESS = "2.5.4.9"
+szOID_ORGANIZATION_NAME = "2.5.4.10"
+szOID_ORGANIZATIONAL_UNIT_NAME = "2.5.4.11"
+szOID_TITLE = "2.5.4.12"
+szOID_DESCRIPTION = "2.5.4.13"
+szOID_SEARCH_GUIDE = "2.5.4.14"
+szOID_BUSINESS_CATEGORY = "2.5.4.15"
+szOID_POSTAL_ADDRESS = "2.5.4.16"
+szOID_POSTAL_CODE = "2.5.4.17"
+szOID_POST_OFFICE_BOX = "2.5.4.18"
+szOID_PHYSICAL_DELIVERY_OFFICE_NAME = "2.5.4.19"
+szOID_TELEPHONE_NUMBER = "2.5.4.20"
+szOID_TELEX_NUMBER = "2.5.4.21"
+szOID_TELETEXT_TERMINAL_IDENTIFIER = "2.5.4.22"
+szOID_FACSIMILE_TELEPHONE_NUMBER = "2.5.4.23"
+szOID_X21_ADDRESS = "2.5.4.24"
+szOID_INTERNATIONAL_ISDN_NUMBER = "2.5.4.25"
+szOID_REGISTERED_ADDRESS = "2.5.4.26"
+szOID_DESTINATION_INDICATOR = "2.5.4.27"
+szOID_PREFERRED_DELIVERY_METHOD = "2.5.4.28"
+szOID_PRESENTATION_ADDRESS = "2.5.4.29"
+szOID_SUPPORTED_APPLICATION_CONTEXT = "2.5.4.30"
+szOID_MEMBER = "2.5.4.31"
+szOID_OWNER = "2.5.4.32"
+szOID_ROLE_OCCUPANT = "2.5.4.33"
+szOID_SEE_ALSO = "2.5.4.34"
+szOID_USER_PASSWORD = "2.5.4.35"
+szOID_USER_CERTIFICATE = "2.5.4.36"
+szOID_CA_CERTIFICATE = "2.5.4.37"
+szOID_AUTHORITY_REVOCATION_LIST = "2.5.4.38"
+szOID_CERTIFICATE_REVOCATION_LIST = "2.5.4.39"
+szOID_CROSS_CERTIFICATE_PAIR = "2.5.4.40"
+szOID_GIVEN_NAME = "2.5.4.42"
+szOID_INITIALS = "2.5.4.43"
+szOID_DN_QUALIFIER = "2.5.4.46"
+szOID_DOMAIN_COMPONENT = "0.9.2342.19200300.100.1.25"
+szOID_PKCS_12_FRIENDLY_NAME_ATTR = "1.2.840.113549.1.9.20"
+szOID_PKCS_12_LOCAL_KEY_ID = "1.2.840.113549.1.9.21"
+szOID_PKCS_12_KEY_PROVIDER_NAME_ATTR = "1.3.6.1.4.1.311.17.1"
+szOID_LOCAL_MACHINE_KEYSET = "1.3.6.1.4.1.311.17.2"
+szOID_KEYID_RDN = "1.3.6.1.4.1.311.10.7.1"
+CERT_RDN_ANY_TYPE = 0
+CERT_RDN_ENCODED_BLOB = 1
+CERT_RDN_OCTET_STRING = 2
+CERT_RDN_NUMERIC_STRING = 3
+CERT_RDN_PRINTABLE_STRING = 4
+CERT_RDN_TELETEX_STRING = 5
+CERT_RDN_T61_STRING = 5
+CERT_RDN_VIDEOTEX_STRING = 6
+CERT_RDN_IA5_STRING = 7
+CERT_RDN_GRAPHIC_STRING = 8
+CERT_RDN_VISIBLE_STRING = 9
+CERT_RDN_ISO646_STRING = 9
+CERT_RDN_GENERAL_STRING = 10
+CERT_RDN_UNIVERSAL_STRING = 11
+CERT_RDN_INT4_STRING = 11
+CERT_RDN_BMP_STRING = 12
+CERT_RDN_UNICODE_STRING = 12
+CERT_RDN_UTF8_STRING = 13
+CERT_RDN_TYPE_MASK = 0x000000FF
+CERT_RDN_FLAGS_MASK = (-16777216)
+CERT_RDN_ENABLE_T61_UNICODE_FLAG = (-2147483648)
+CERT_RDN_ENABLE_UTF8_UNICODE_FLAG = 0x20000000
+CERT_RDN_DISABLE_CHECK_TYPE_FLAG = 0x40000000
+CERT_RDN_DISABLE_IE4_UTF8_FLAG = 0x01000000
+CERT_RSA_PUBLIC_KEY_OBJID = szOID_RSA_RSA
+CERT_DEFAULT_OID_PUBLIC_KEY_SIGN = szOID_RSA_RSA
+CERT_DEFAULT_OID_PUBLIC_KEY_XCHG = szOID_RSA_RSA
+CERT_V1 = 0
+CERT_V2 = 1
+CERT_V3 = 2
+CERT_INFO_VERSION_FLAG = 1
+CERT_INFO_SERIAL_NUMBER_FLAG = 2
+CERT_INFO_SIGNATURE_ALGORITHM_FLAG = 3
+CERT_INFO_ISSUER_FLAG = 4
+CERT_INFO_NOT_BEFORE_FLAG = 5
+CERT_INFO_NOT_AFTER_FLAG = 6
+CERT_INFO_SUBJECT_FLAG = 7
+CERT_INFO_SUBJECT_PUBLIC_KEY_INFO_FLAG = 8
+CERT_INFO_ISSUER_UNIQUE_ID_FLAG = 9
+CERT_INFO_SUBJECT_UNIQUE_ID_FLAG = 10
+CERT_INFO_EXTENSION_FLAG = 11
+CRL_V1 = 0
+CRL_V2 = 1
+CERT_REQUEST_V1 = 0
+CERT_KEYGEN_REQUEST_V1 = 0
+CTL_V1 = 0
+CERT_ENCODING_TYPE_MASK = 0x0000FFFF
+CMSG_ENCODING_TYPE_MASK = (-65536)
+def GET_CERT_ENCODING_TYPE(X): return (X & CERT_ENCODING_TYPE_MASK)
+
+def GET_CMSG_ENCODING_TYPE(X): return (X & CMSG_ENCODING_TYPE_MASK)
+
+CRYPT_ASN_ENCODING = 0x00000001
+CRYPT_NDR_ENCODING = 0x00000002
+X509_ASN_ENCODING = 0x00000001
+X509_NDR_ENCODING = 0x00000002
+PKCS_7_ASN_ENCODING = 0x00010000
+PKCS_7_NDR_ENCODING = 0x00020000
+CRYPT_FORMAT_STR_MULTI_LINE = 0x0001
+CRYPT_FORMAT_STR_NO_HEX = 0x0010
+CRYPT_FORMAT_SIMPLE = 0x0001
+CRYPT_FORMAT_X509 = 0x0002
+CRYPT_FORMAT_OID = 0x0004
+CRYPT_FORMAT_RDN_SEMICOLON = 0x0100
+CRYPT_FORMAT_RDN_CRLF = 0x0200
+CRYPT_FORMAT_RDN_UNQUOTE = 0x0400
+CRYPT_FORMAT_RDN_REVERSE = 0x0800
+CRYPT_FORMAT_COMMA = 0x1000
+CRYPT_FORMAT_SEMICOLON = CRYPT_FORMAT_RDN_SEMICOLON
+CRYPT_FORMAT_CRLF = CRYPT_FORMAT_RDN_CRLF
+CRYPT_ENCODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG = 0x8
+CRYPT_ENCODE_ALLOC_FLAG = 0x8000
+CRYPT_UNICODE_NAME_ENCODE_ENABLE_T61_UNICODE_FLAG = \
+            CERT_RDN_ENABLE_T61_UNICODE_FLAG
+CRYPT_UNICODE_NAME_ENCODE_ENABLE_UTF8_UNICODE_FLAG = \
+            CERT_RDN_ENABLE_UTF8_UNICODE_FLAG
+CRYPT_UNICODE_NAME_ENCODE_DISABLE_CHECK_TYPE_FLAG = \
+            CERT_RDN_DISABLE_CHECK_TYPE_FLAG
+CRYPT_SORTED_CTL_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x10000
+CRYPT_DECODE_NOCOPY_FLAG = 0x1
+CRYPT_DECODE_TO_BE_SIGNED_FLAG = 0x2
+CRYPT_DECODE_SHARE_OID_STRING_FLAG = 0x4
+CRYPT_DECODE_NO_SIGNATURE_BYTE_REVERSAL_FLAG = 0x8
+CRYPT_DECODE_ALLOC_FLAG = 0x8000
+CRYPT_UNICODE_NAME_DECODE_DISABLE_IE4_UTF8_FLAG = \
+            CERT_RDN_DISABLE_IE4_UTF8_FLAG
+CRYPT_ENCODE_DECODE_NONE = 0
+szOID_AUTHORITY_KEY_IDENTIFIER = "2.5.29.1"
+szOID_KEY_ATTRIBUTES = "2.5.29.2"
+szOID_CERT_POLICIES_95 = "2.5.29.3"
+szOID_KEY_USAGE_RESTRICTION = "2.5.29.4"
+szOID_SUBJECT_ALT_NAME = "2.5.29.7"
+szOID_ISSUER_ALT_NAME = "2.5.29.8"
+szOID_BASIC_CONSTRAINTS = "2.5.29.10"
+szOID_KEY_USAGE = "2.5.29.15"
+szOID_PRIVATEKEY_USAGE_PERIOD = "2.5.29.16"
+szOID_BASIC_CONSTRAINTS2 = "2.5.29.19"
+szOID_CERT_POLICIES = "2.5.29.32"
+szOID_ANY_CERT_POLICY = "2.5.29.32.0"
+szOID_AUTHORITY_KEY_IDENTIFIER2 = "2.5.29.35"
+szOID_SUBJECT_KEY_IDENTIFIER = "2.5.29.14"
+szOID_SUBJECT_ALT_NAME2 = "2.5.29.17"
+szOID_ISSUER_ALT_NAME2 = "2.5.29.18"
+szOID_CRL_REASON_CODE = "2.5.29.21"
+szOID_REASON_CODE_HOLD = "2.5.29.23"
+szOID_CRL_DIST_POINTS = "2.5.29.31"
+szOID_ENHANCED_KEY_USAGE = "2.5.29.37"
+szOID_CRL_NUMBER = "2.5.29.20"
+szOID_DELTA_CRL_INDICATOR = "2.5.29.27"
+szOID_ISSUING_DIST_POINT = "2.5.29.28"
+szOID_FRESHEST_CRL = "2.5.29.46"
+szOID_NAME_CONSTRAINTS = "2.5.29.30"
+szOID_POLICY_MAPPINGS = "2.5.29.33"
+szOID_LEGACY_POLICY_MAPPINGS = "2.5.29.5"
+szOID_POLICY_CONSTRAINTS = "2.5.29.36"
+szOID_RENEWAL_CERTIFICATE = "1.3.6.1.4.1.311.13.1"
+szOID_ENROLLMENT_NAME_VALUE_PAIR = "1.3.6.1.4.1.311.13.2.1"
+szOID_ENROLLMENT_CSP_PROVIDER = "1.3.6.1.4.1.311.13.2.2"
+szOID_OS_VERSION = "1.3.6.1.4.1.311.13.2.3"
+szOID_ENROLLMENT_AGENT = "1.3.6.1.4.1.311.20.2.1"
+szOID_PKIX = "1.3.6.1.5.5.7"
+szOID_PKIX_PE = "1.3.6.1.5.5.7.1"
+szOID_AUTHORITY_INFO_ACCESS = "1.3.6.1.5.5.7.1.1"
+szOID_CERT_EXTENSIONS = "1.3.6.1.4.1.311.2.1.14"
+szOID_NEXT_UPDATE_LOCATION = "1.3.6.1.4.1.311.10.2"
+szOID_REMOVE_CERTIFICATE = "1.3.6.1.4.1.311.10.8.1"
+szOID_CROSS_CERT_DIST_POINTS = "1.3.6.1.4.1.311.10.9.1"
+szOID_CTL = "1.3.6.1.4.1.311.10.1"
+szOID_SORTED_CTL = "1.3.6.1.4.1.311.10.1.1"
+szOID_SERIALIZED = "1.3.6.1.4.1.311.10.3.3.1"
+szOID_NT_PRINCIPAL_NAME = "1.3.6.1.4.1.311.20.2.3"
+szOID_PRODUCT_UPDATE = "1.3.6.1.4.1.311.31.1"
+szOID_ANY_APPLICATION_POLICY = "1.3.6.1.4.1.311.10.12.1"
+szOID_AUTO_ENROLL_CTL_USAGE = "1.3.6.1.4.1.311.20.1"
+szOID_ENROLL_CERTTYPE_EXTENSION = "1.3.6.1.4.1.311.20.2"
+szOID_CERT_MANIFOLD = "1.3.6.1.4.1.311.20.3"
+szOID_CERTSRV_CA_VERSION = "1.3.6.1.4.1.311.21.1"
+szOID_CERTSRV_PREVIOUS_CERT_HASH = "1.3.6.1.4.1.311.21.2"
+szOID_CRL_VIRTUAL_BASE = "1.3.6.1.4.1.311.21.3"
+szOID_CRL_NEXT_PUBLISH = "1.3.6.1.4.1.311.21.4"
+szOID_KP_CA_EXCHANGE = "1.3.6.1.4.1.311.21.5"
+szOID_KP_KEY_RECOVERY_AGENT = "1.3.6.1.4.1.311.21.6"
+szOID_CERTIFICATE_TEMPLATE = "1.3.6.1.4.1.311.21.7"
+szOID_ENTERPRISE_OID_ROOT = "1.3.6.1.4.1.311.21.8"
+szOID_RDN_DUMMY_SIGNER = "1.3.6.1.4.1.311.21.9"
+szOID_APPLICATION_CERT_POLICIES = "1.3.6.1.4.1.311.21.10"
+szOID_APPLICATION_POLICY_MAPPINGS = "1.3.6.1.4.1.311.21.11"
+szOID_APPLICATION_POLICY_CONSTRAINTS = "1.3.6.1.4.1.311.21.12"
+szOID_ARCHIVED_KEY_ATTR = "1.3.6.1.4.1.311.21.13"
+szOID_CRL_SELF_CDP = "1.3.6.1.4.1.311.21.14"
+szOID_REQUIRE_CERT_CHAIN_POLICY = "1.3.6.1.4.1.311.21.15"
+szOID_ARCHIVED_KEY_CERT_HASH = "1.3.6.1.4.1.311.21.16"
+szOID_ISSUED_CERT_HASH = "1.3.6.1.4.1.311.21.17"
+szOID_DS_EMAIL_REPLICATION = "1.3.6.1.4.1.311.21.19"
+szOID_REQUEST_CLIENT_INFO = "1.3.6.1.4.1.311.21.20"
+szOID_ENCRYPTED_KEY_HASH = "1.3.6.1.4.1.311.21.21"
+szOID_CERTSRV_CROSSCA_VERSION = "1.3.6.1.4.1.311.21.22"
+szOID_NTDS_REPLICATION = "1.3.6.1.4.1.311.25.1"
+szOID_SUBJECT_DIR_ATTRS = "2.5.29.9"
+szOID_PKIX_KP = "1.3.6.1.5.5.7.3"
+szOID_PKIX_KP_SERVER_AUTH = "1.3.6.1.5.5.7.3.1"
+szOID_PKIX_KP_CLIENT_AUTH = "1.3.6.1.5.5.7.3.2"
+szOID_PKIX_KP_CODE_SIGNING = "1.3.6.1.5.5.7.3.3"
+szOID_PKIX_KP_EMAIL_PROTECTION = "1.3.6.1.5.5.7.3.4"
+szOID_PKIX_KP_IPSEC_END_SYSTEM = "1.3.6.1.5.5.7.3.5"
+szOID_PKIX_KP_IPSEC_TUNNEL = "1.3.6.1.5.5.7.3.6"
+szOID_PKIX_KP_IPSEC_USER = "1.3.6.1.5.5.7.3.7"
+szOID_PKIX_KP_TIMESTAMP_SIGNING = "1.3.6.1.5.5.7.3.8"
+szOID_IPSEC_KP_IKE_INTERMEDIATE = "1.3.6.1.5.5.8.2.2"
+szOID_KP_CTL_USAGE_SIGNING = "1.3.6.1.4.1.311.10.3.1"
+szOID_KP_TIME_STAMP_SIGNING = "1.3.6.1.4.1.311.10.3.2"
+szOID_SERVER_GATED_CRYPTO = "1.3.6.1.4.1.311.10.3.3"
+szOID_SGC_NETSCAPE = "2.16.840.1.113730.4.1"
+szOID_KP_EFS = "1.3.6.1.4.1.311.10.3.4"
+szOID_EFS_RECOVERY = "1.3.6.1.4.1.311.10.3.4.1"
+szOID_WHQL_CRYPTO = "1.3.6.1.4.1.311.10.3.5"
+szOID_NT5_CRYPTO = "1.3.6.1.4.1.311.10.3.6"
+szOID_OEM_WHQL_CRYPTO = "1.3.6.1.4.1.311.10.3.7"
+szOID_EMBEDDED_NT_CRYPTO = "1.3.6.1.4.1.311.10.3.8"
+szOID_ROOT_LIST_SIGNER = "1.3.6.1.4.1.311.10.3.9"
+szOID_KP_QUALIFIED_SUBORDINATION = "1.3.6.1.4.1.311.10.3.10"
+szOID_KP_KEY_RECOVERY = "1.3.6.1.4.1.311.10.3.11"
+szOID_KP_DOCUMENT_SIGNING = "1.3.6.1.4.1.311.10.3.12"
+szOID_KP_LIFETIME_SIGNING = "1.3.6.1.4.1.311.10.3.13"
+szOID_KP_MOBILE_DEVICE_SOFTWARE = "1.3.6.1.4.1.311.10.3.14"
+szOID_DRM = "1.3.6.1.4.1.311.10.5.1"
+szOID_DRM_INDIVIDUALIZATION = "1.3.6.1.4.1.311.10.5.2"
+szOID_LICENSES = "1.3.6.1.4.1.311.10.6.1"
+szOID_LICENSE_SERVER = "1.3.6.1.4.1.311.10.6.2"
+szOID_KP_SMARTCARD_LOGON = "1.3.6.1.4.1.311.20.2.2"
+szOID_YESNO_TRUST_ATTR = "1.3.6.1.4.1.311.10.4.1"
+szOID_PKIX_POLICY_QUALIFIER_CPS = "1.3.6.1.5.5.7.2.1"
+szOID_PKIX_POLICY_QUALIFIER_USERNOTICE = "1.3.6.1.5.5.7.2.2"
+szOID_CERT_POLICIES_95_QUALIFIER1 = "2.16.840.1.113733.1.7.1.1"
+CERT_UNICODE_RDN_ERR_INDEX_MASK = 0x3FF
+CERT_UNICODE_RDN_ERR_INDEX_SHIFT = 22
+CERT_UNICODE_ATTR_ERR_INDEX_MASK = 0x003F
+CERT_UNICODE_ATTR_ERR_INDEX_SHIFT = 16
+CERT_UNICODE_VALUE_ERR_INDEX_MASK = 0x0000FFFF
+CERT_UNICODE_VALUE_ERR_INDEX_SHIFT = 0
+CERT_DIGITAL_SIGNATURE_KEY_USAGE = 0x80
+CERT_NON_REPUDIATION_KEY_USAGE = 0x40
+CERT_KEY_ENCIPHERMENT_KEY_USAGE = 0x20
+CERT_DATA_ENCIPHERMENT_KEY_USAGE = 0x10
+CERT_KEY_AGREEMENT_KEY_USAGE = 0x08
+CERT_KEY_CERT_SIGN_KEY_USAGE = 0x04
+CERT_OFFLINE_CRL_SIGN_KEY_USAGE = 0x02
+CERT_CRL_SIGN_KEY_USAGE = 0x02
+CERT_ENCIPHER_ONLY_KEY_USAGE = 0x01
+CERT_DECIPHER_ONLY_KEY_USAGE = 0x80
+CERT_ALT_NAME_OTHER_NAME = 1
+CERT_ALT_NAME_RFC822_NAME = 2
+CERT_ALT_NAME_DNS_NAME = 3
+CERT_ALT_NAME_X400_ADDRESS = 4
+CERT_ALT_NAME_DIRECTORY_NAME = 5
+CERT_ALT_NAME_EDI_PARTY_NAME = 6
+CERT_ALT_NAME_URL = 7
+CERT_ALT_NAME_IP_ADDRESS = 8
+CERT_ALT_NAME_REGISTERED_ID = 9
+CERT_ALT_NAME_ENTRY_ERR_INDEX_MASK = 0xFF
+CERT_ALT_NAME_ENTRY_ERR_INDEX_SHIFT = 16
+CERT_ALT_NAME_VALUE_ERR_INDEX_MASK = 0x0000FFFF
+CERT_ALT_NAME_VALUE_ERR_INDEX_SHIFT = 0
+CERT_CA_SUBJECT_FLAG = 0x80
+CERT_END_ENTITY_SUBJECT_FLAG = 0x40
+szOID_PKIX_ACC_DESCR = "1.3.6.1.5.5.7.48"
+szOID_PKIX_OCSP = "1.3.6.1.5.5.7.48.1"
+szOID_PKIX_CA_ISSUERS = "1.3.6.1.5.5.7.48.2"
+CRL_REASON_UNSPECIFIED = 0
+CRL_REASON_KEY_COMPROMISE = 1
+CRL_REASON_CA_COMPROMISE = 2
+CRL_REASON_AFFILIATION_CHANGED = 3
+CRL_REASON_SUPERSEDED = 4
+CRL_REASON_CESSATION_OF_OPERATION = 5
+CRL_REASON_CERTIFICATE_HOLD = 6
+CRL_REASON_REMOVE_FROM_CRL = 8
+CRL_DIST_POINT_NO_NAME = 0
+CRL_DIST_POINT_FULL_NAME = 1
+CRL_DIST_POINT_ISSUER_RDN_NAME = 2
+CRL_REASON_UNUSED_FLAG = 0x80
+CRL_REASON_KEY_COMPROMISE_FLAG = 0x40
+CRL_REASON_CA_COMPROMISE_FLAG = 0x20
+CRL_REASON_AFFILIATION_CHANGED_FLAG = 0x10
+CRL_REASON_SUPERSEDED_FLAG = 0x08
+CRL_REASON_CESSATION_OF_OPERATION_FLAG = 0x04
+CRL_REASON_CERTIFICATE_HOLD_FLAG = 0x02
+CRL_DIST_POINT_ERR_INDEX_MASK = 0x7F
+CRL_DIST_POINT_ERR_INDEX_SHIFT = 24
+
+CRL_DIST_POINT_ERR_CRL_ISSUER_BIT = (-2147483648)
+
+CROSS_CERT_DIST_POINT_ERR_INDEX_MASK = 0xFF
+CROSS_CERT_DIST_POINT_ERR_INDEX_SHIFT = 24
+
+CERT_EXCLUDED_SUBTREE_BIT = (-2147483648)
+
+SORTED_CTL_EXT_FLAGS_OFFSET = (0*4)
+SORTED_CTL_EXT_COUNT_OFFSET = (1*4)
+SORTED_CTL_EXT_MAX_COLLISION_OFFSET = (2*4)
+SORTED_CTL_EXT_HASH_BUCKET_OFFSET = (3*4)
+SORTED_CTL_EXT_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x1
+CERT_DSS_R_LEN = 20
+CERT_DSS_S_LEN = 20
+CERT_DSS_SIGNATURE_LEN = (CERT_DSS_R_LEN + CERT_DSS_S_LEN)
+CERT_MAX_ASN_ENCODED_DSS_SIGNATURE_LEN = (2 + 2*(2 + 20 +1))
+CRYPT_X942_COUNTER_BYTE_LENGTH = 4
+CRYPT_X942_KEY_LENGTH_BYTE_LENGTH = 4
+CRYPT_X942_PUB_INFO_BYTE_LENGTH = (512/8)
+CRYPT_RC2_40BIT_VERSION = 160
+CRYPT_RC2_56BIT_VERSION = 52
+CRYPT_RC2_64BIT_VERSION = 120
+CRYPT_RC2_128BIT_VERSION = 58
+szOID_VERISIGN_PRIVATE_6_9 = "2.16.840.1.113733.1.6.9"
+szOID_VERISIGN_ONSITE_JURISDICTION_HASH = "2.16.840.1.113733.1.6.11"
+szOID_VERISIGN_BITSTRING_6_13 = "2.16.840.1.113733.1.6.13"
+szOID_VERISIGN_ISS_STRONG_CRYPTO = "2.16.840.1.113733.1.8.1"
+szOID_NETSCAPE = "2.16.840.1.113730"
+szOID_NETSCAPE_CERT_EXTENSION = "2.16.840.1.113730.1"
+szOID_NETSCAPE_CERT_TYPE = "2.16.840.1.113730.1.1"
+szOID_NETSCAPE_BASE_URL = "2.16.840.1.113730.1.2"
+szOID_NETSCAPE_REVOCATION_URL = "2.16.840.1.113730.1.3"
+szOID_NETSCAPE_CA_REVOCATION_URL = "2.16.840.1.113730.1.4"
+szOID_NETSCAPE_CERT_RENEWAL_URL = "2.16.840.1.113730.1.7"
+szOID_NETSCAPE_CA_POLICY_URL = "2.16.840.1.113730.1.8"
+szOID_NETSCAPE_SSL_SERVER_NAME = "2.16.840.1.113730.1.12"
+szOID_NETSCAPE_COMMENT = "2.16.840.1.113730.1.13"
+szOID_NETSCAPE_DATA_TYPE = "2.16.840.1.113730.2"
+szOID_NETSCAPE_CERT_SEQUENCE = "2.16.840.1.113730.2.5"
+NETSCAPE_SSL_CLIENT_AUTH_CERT_TYPE = 0x80
+NETSCAPE_SSL_SERVER_AUTH_CERT_TYPE = 0x40
+NETSCAPE_SMIME_CERT_TYPE = 0x20
+NETSCAPE_SIGN_CERT_TYPE = 0x10
+NETSCAPE_SSL_CA_CERT_TYPE = 0x04
+NETSCAPE_SMIME_CA_CERT_TYPE = 0x02
+NETSCAPE_SIGN_CA_CERT_TYPE = 0x01
+szOID_CT_PKI_DATA = "1.3.6.1.5.5.7.12.2"
+szOID_CT_PKI_RESPONSE = "1.3.6.1.5.5.7.12.3"
+szOID_PKIX_NO_SIGNATURE = "1.3.6.1.5.5.7.6.2"
+szOID_CMC = "1.3.6.1.5.5.7.7"
+szOID_CMC_STATUS_INFO = "1.3.6.1.5.5.7.7.1"
+szOID_CMC_IDENTIFICATION = "1.3.6.1.5.5.7.7.2"
+szOID_CMC_IDENTITY_PROOF = "1.3.6.1.5.5.7.7.3"
+szOID_CMC_DATA_RETURN = "1.3.6.1.5.5.7.7.4"
+szOID_CMC_TRANSACTION_ID = "1.3.6.1.5.5.7.7.5"
+szOID_CMC_SENDER_NONCE = "1.3.6.1.5.5.7.7.6"
+szOID_CMC_RECIPIENT_NONCE = "1.3.6.1.5.5.7.7.7"
+szOID_CMC_ADD_EXTENSIONS = "1.3.6.1.5.5.7.7.8"
+szOID_CMC_ENCRYPTED_POP = "1.3.6.1.5.5.7.7.9"
+szOID_CMC_DECRYPTED_POP = "1.3.6.1.5.5.7.7.10"
+szOID_CMC_LRA_POP_WITNESS = "1.3.6.1.5.5.7.7.11"
+szOID_CMC_GET_CERT = "1.3.6.1.5.5.7.7.15"
+szOID_CMC_GET_CRL = "1.3.6.1.5.5.7.7.16"
+szOID_CMC_REVOKE_REQUEST = "1.3.6.1.5.5.7.7.17"
+szOID_CMC_REG_INFO = "1.3.6.1.5.5.7.7.18"
+szOID_CMC_RESPONSE_INFO = "1.3.6.1.5.5.7.7.19"
+szOID_CMC_QUERY_PENDING = "1.3.6.1.5.5.7.7.21"
+szOID_CMC_ID_POP_LINK_RANDOM = "1.3.6.1.5.5.7.7.22"
+szOID_CMC_ID_POP_LINK_WITNESS = "1.3.6.1.5.5.7.7.23"
+szOID_CMC_ID_CONFIRM_CERT_ACCEPTANCE = "1.3.6.1.5.5.7.7.24"
+szOID_CMC_ADD_ATTRIBUTES = "1.3.6.1.4.1.311.10.10.1"
+CMC_TAGGED_CERT_REQUEST_CHOICE = 1
+CMC_OTHER_INFO_NO_CHOICE = 0
+CMC_OTHER_INFO_FAIL_CHOICE = 1
+CMC_OTHER_INFO_PEND_CHOICE = 2
+CMC_STATUS_SUCCESS = 0
+CMC_STATUS_FAILED = 2
+CMC_STATUS_PENDING = 3
+CMC_STATUS_NO_SUPPORT = 4
+CMC_STATUS_CONFIRM_REQUIRED = 5
+CMC_FAIL_BAD_ALG = 0
+CMC_FAIL_BAD_MESSAGE_CHECK = 1
+CMC_FAIL_BAD_REQUEST = 2
+CMC_FAIL_BAD_TIME = 3
+CMC_FAIL_BAD_CERT_ID = 4
+CMC_FAIL_UNSUPORTED_EXT = 5
+CMC_FAIL_MUST_ARCHIVE_KEYS = 6
+CMC_FAIL_BAD_IDENTITY = 7
+CMC_FAIL_POP_REQUIRED = 8
+CMC_FAIL_POP_FAILED = 9
+CMC_FAIL_NO_KEY_REUSE = 10
+CMC_FAIL_INTERNAL_CA_ERROR = 11
+CMC_FAIL_TRY_LATER = 12
+CRYPT_OID_ENCODE_OBJECT_FUNC = "CryptDllEncodeObject"
+CRYPT_OID_DECODE_OBJECT_FUNC = "CryptDllDecodeObject"
+CRYPT_OID_ENCODE_OBJECT_EX_FUNC = "CryptDllEncodeObjectEx"
+CRYPT_OID_DECODE_OBJECT_EX_FUNC = "CryptDllDecodeObjectEx"
+CRYPT_OID_CREATE_COM_OBJECT_FUNC = "CryptDllCreateCOMObject"
+CRYPT_OID_VERIFY_REVOCATION_FUNC = "CertDllVerifyRevocation"
+CRYPT_OID_VERIFY_CTL_USAGE_FUNC = "CertDllVerifyCTLUsage"
+CRYPT_OID_FORMAT_OBJECT_FUNC = "CryptDllFormatObject"
+CRYPT_OID_FIND_OID_INFO_FUNC = "CryptDllFindOIDInfo"
+CRYPT_OID_FIND_LOCALIZED_NAME_FUNC = "CryptDllFindLocalizedName"
+CRYPT_OID_REGPATH = "Software\\Microsoft\\Cryptography\\OID"
+CRYPT_OID_REG_ENCODING_TYPE_PREFIX = "EncodingType "
+CRYPT_OID_REG_FUNC_NAME_VALUE_NAME_A = "FuncName"
+CRYPT_DEFAULT_OID = "DEFAULT"
+CRYPT_INSTALL_OID_FUNC_BEFORE_FLAG = 1
+CRYPT_GET_INSTALLED_OID_FUNC_FLAG = 0x1
+CRYPT_REGISTER_FIRST_INDEX = 0
+CRYPT_REGISTER_LAST_INDEX = (-1)
+CRYPT_MATCH_ANY_ENCODING_TYPE = (-1)
+CRYPT_HASH_ALG_OID_GROUP_ID = 1
+CRYPT_ENCRYPT_ALG_OID_GROUP_ID = 2
+CRYPT_PUBKEY_ALG_OID_GROUP_ID = 3
+CRYPT_SIGN_ALG_OID_GROUP_ID = 4
+CRYPT_RDN_ATTR_OID_GROUP_ID = 5
+CRYPT_EXT_OR_ATTR_OID_GROUP_ID = 6
+CRYPT_ENHKEY_USAGE_OID_GROUP_ID = 7
+CRYPT_POLICY_OID_GROUP_ID = 8
+CRYPT_TEMPLATE_OID_GROUP_ID = 9
+CRYPT_LAST_OID_GROUP_ID = 9
+CRYPT_FIRST_ALG_OID_GROUP_ID = CRYPT_HASH_ALG_OID_GROUP_ID
+CRYPT_LAST_ALG_OID_GROUP_ID = CRYPT_SIGN_ALG_OID_GROUP_ID
+CRYPT_OID_INHIBIT_SIGNATURE_FORMAT_FLAG = 0x1
+CRYPT_OID_USE_PUBKEY_PARA_FOR_PKCS7_FLAG = 0x2
+CRYPT_OID_NO_NULL_ALGORITHM_PARA_FLAG = 0x4
+CRYPT_OID_INFO_OID_KEY = 1
+CRYPT_OID_INFO_NAME_KEY = 2
+CRYPT_OID_INFO_ALGID_KEY = 3
+CRYPT_OID_INFO_SIGN_KEY = 4
+CRYPT_INSTALL_OID_INFO_BEFORE_FLAG = 1
+CRYPT_LOCALIZED_NAME_ENCODING_TYPE = 0
+CRYPT_LOCALIZED_NAME_OID = "LocalizedNames"
+szOID_PKCS_7_DATA = "1.2.840.113549.1.7.1"
+szOID_PKCS_7_SIGNED = "1.2.840.113549.1.7.2"
+szOID_PKCS_7_ENVELOPED = "1.2.840.113549.1.7.3"
+szOID_PKCS_7_SIGNEDANDENVELOPED = "1.2.840.113549.1.7.4"
+szOID_PKCS_7_DIGESTED = "1.2.840.113549.1.7.5"
+szOID_PKCS_7_ENCRYPTED = "1.2.840.113549.1.7.6"
+szOID_PKCS_9_CONTENT_TYPE = "1.2.840.113549.1.9.3"
+szOID_PKCS_9_MESSAGE_DIGEST = "1.2.840.113549.1.9.4"
+CMSG_DATA = 1
+CMSG_SIGNED = 2
+CMSG_ENVELOPED = 3
+CMSG_SIGNED_AND_ENVELOPED = 4
+CMSG_HASHED = 5
+CMSG_ENCRYPTED = 6
+CMSG_DATA_FLAG = (1 << CMSG_DATA)
+CMSG_SIGNED_FLAG = (1 << CMSG_SIGNED)
+CMSG_ENVELOPED_FLAG = (1 << CMSG_ENVELOPED)
+CMSG_SIGNED_AND_ENVELOPED_FLAG = (1 << CMSG_SIGNED_AND_ENVELOPED)
+CMSG_HASHED_FLAG = (1 << CMSG_HASHED)
+CMSG_ENCRYPTED_FLAG = (1 << CMSG_ENCRYPTED)
+CERT_ID_ISSUER_SERIAL_NUMBER = 1
+CERT_ID_KEY_IDENTIFIER = 2
+CERT_ID_SHA1_HASH = 3
+CMSG_KEY_AGREE_EPHEMERAL_KEY_CHOICE = 1
+CMSG_KEY_AGREE_STATIC_KEY_CHOICE = 2
+CMSG_MAIL_LIST_HANDLE_KEY_CHOICE = 1
+CMSG_KEY_TRANS_RECIPIENT = 1
+CMSG_KEY_AGREE_RECIPIENT = 2
+CMSG_MAIL_LIST_RECIPIENT = 3
+CMSG_SP3_COMPATIBLE_ENCRYPT_FLAG = (-2147483648)
+CMSG_RC4_NO_SALT_FLAG = 0x40000000
+CMSG_INDEFINITE_LENGTH = ((-1))
+CMSG_BARE_CONTENT_FLAG = 0x00000001
+CMSG_LENGTH_ONLY_FLAG = 0x00000002
+CMSG_DETACHED_FLAG = 0x00000004
+CMSG_AUTHENTICATED_ATTRIBUTES_FLAG = 0x00000008
+CMSG_CONTENTS_OCTETS_FLAG = 0x00000010
+CMSG_MAX_LENGTH_FLAG = 0x00000020
+CMSG_CMS_ENCAPSULATED_CONTENT_FLAG = 0x00000040
+CMSG_CRYPT_RELEASE_CONTEXT_FLAG = 0x00008000
+CMSG_TYPE_PARAM = 1
+CMSG_CONTENT_PARAM = 2
+CMSG_BARE_CONTENT_PARAM = 3
+CMSG_INNER_CONTENT_TYPE_PARAM = 4
+CMSG_SIGNER_COUNT_PARAM = 5
+CMSG_SIGNER_INFO_PARAM = 6
+CMSG_SIGNER_CERT_INFO_PARAM = 7
+CMSG_SIGNER_HASH_ALGORITHM_PARAM = 8
+CMSG_SIGNER_AUTH_ATTR_PARAM = 9
+CMSG_SIGNER_UNAUTH_ATTR_PARAM = 10
+CMSG_CERT_COUNT_PARAM = 11
+CMSG_CERT_PARAM = 12
+CMSG_CRL_COUNT_PARAM = 13
+CMSG_CRL_PARAM = 14
+CMSG_ENVELOPE_ALGORITHM_PARAM = 15
+CMSG_RECIPIENT_COUNT_PARAM = 17
+CMSG_RECIPIENT_INDEX_PARAM = 18
+CMSG_RECIPIENT_INFO_PARAM = 19
+CMSG_HASH_ALGORITHM_PARAM = 20
+CMSG_HASH_DATA_PARAM = 21
+CMSG_COMPUTED_HASH_PARAM = 22
+CMSG_ENCRYPT_PARAM = 26
+CMSG_ENCRYPTED_DIGEST = 27
+CMSG_ENCODED_SIGNER = 28
+CMSG_ENCODED_MESSAGE = 29
+CMSG_VERSION_PARAM = 30
+CMSG_ATTR_CERT_COUNT_PARAM = 31
+CMSG_ATTR_CERT_PARAM = 32
+CMSG_CMS_RECIPIENT_COUNT_PARAM = 33
+CMSG_CMS_RECIPIENT_INDEX_PARAM = 34
+CMSG_CMS_RECIPIENT_ENCRYPTED_KEY_INDEX_PARAM = 35
+CMSG_CMS_RECIPIENT_INFO_PARAM = 36
+CMSG_UNPROTECTED_ATTR_PARAM = 37
+CMSG_SIGNER_CERT_ID_PARAM = 38
+CMSG_CMS_SIGNER_INFO_PARAM = 39
+CMSG_SIGNED_DATA_V1 = 1
+CMSG_SIGNED_DATA_V3 = 3
+CMSG_SIGNED_DATA_PKCS_1_5_VERSION = CMSG_SIGNED_DATA_V1
+CMSG_SIGNED_DATA_CMS_VERSION = CMSG_SIGNED_DATA_V3
+CMSG_SIGNER_INFO_V1 = 1
+CMSG_SIGNER_INFO_V3 = 3
+CMSG_SIGNER_INFO_PKCS_1_5_VERSION = CMSG_SIGNER_INFO_V1
+CMSG_SIGNER_INFO_CMS_VERSION = CMSG_SIGNER_INFO_V3
+CMSG_HASHED_DATA_V0 = 0
+CMSG_HASHED_DATA_V2 = 2
+CMSG_HASHED_DATA_PKCS_1_5_VERSION = CMSG_HASHED_DATA_V0
+CMSG_HASHED_DATA_CMS_VERSION = CMSG_HASHED_DATA_V2
+CMSG_ENVELOPED_DATA_V0 = 0
+CMSG_ENVELOPED_DATA_V2 = 2
+CMSG_ENVELOPED_DATA_PKCS_1_5_VERSION = CMSG_ENVELOPED_DATA_V0
+CMSG_ENVELOPED_DATA_CMS_VERSION = CMSG_ENVELOPED_DATA_V2
+CMSG_KEY_AGREE_ORIGINATOR_CERT = 1
+CMSG_KEY_AGREE_ORIGINATOR_PUBLIC_KEY = 2
+CMSG_ENVELOPED_RECIPIENT_V0 = 0
+CMSG_ENVELOPED_RECIPIENT_V2 = 2
+CMSG_ENVELOPED_RECIPIENT_V3 = 3
+CMSG_ENVELOPED_RECIPIENT_V4 = 4
+CMSG_KEY_TRANS_PKCS_1_5_VERSION = CMSG_ENVELOPED_RECIPIENT_V0
+CMSG_KEY_TRANS_CMS_VERSION = CMSG_ENVELOPED_RECIPIENT_V2
+CMSG_KEY_AGREE_VERSION = CMSG_ENVELOPED_RECIPIENT_V3
+CMSG_MAIL_LIST_VERSION = CMSG_ENVELOPED_RECIPIENT_V4
+CMSG_CTRL_VERIFY_SIGNATURE = 1
+CMSG_CTRL_DECRYPT = 2
+CMSG_CTRL_VERIFY_HASH = 5
+CMSG_CTRL_ADD_SIGNER = 6
+CMSG_CTRL_DEL_SIGNER = 7
+CMSG_CTRL_ADD_SIGNER_UNAUTH_ATTR = 8
+CMSG_CTRL_DEL_SIGNER_UNAUTH_ATTR = 9
+CMSG_CTRL_ADD_CERT = 10
+CMSG_CTRL_DEL_CERT = 11
+CMSG_CTRL_ADD_CRL = 12
+CMSG_CTRL_DEL_CRL = 13
+CMSG_CTRL_ADD_ATTR_CERT = 14
+CMSG_CTRL_DEL_ATTR_CERT = 15
+CMSG_CTRL_KEY_TRANS_DECRYPT = 16
+CMSG_CTRL_KEY_AGREE_DECRYPT = 17
+CMSG_CTRL_MAIL_LIST_DECRYPT = 18
+CMSG_CTRL_VERIFY_SIGNATURE_EX = 19
+CMSG_CTRL_ADD_CMS_SIGNER_INFO = 20
+CMSG_VERIFY_SIGNER_PUBKEY = 1
+CMSG_VERIFY_SIGNER_CERT = 2
+CMSG_VERIFY_SIGNER_CHAIN = 3
+CMSG_VERIFY_SIGNER_NULL = 4
+CMSG_OID_GEN_ENCRYPT_KEY_FUNC = "CryptMsgDllGenEncryptKey"
+CMSG_OID_EXPORT_ENCRYPT_KEY_FUNC = "CryptMsgDllExportEncryptKey"
+CMSG_OID_IMPORT_ENCRYPT_KEY_FUNC = "CryptMsgDllImportEncryptKey"
+CMSG_CONTENT_ENCRYPT_PAD_ENCODED_LEN_FLAG = 0x00000001
+CMSG_CONTENT_ENCRYPT_FREE_PARA_FLAG = 0x00000001
+CMSG_CONTENT_ENCRYPT_RELEASE_CONTEXT_FLAG = 0x00008000
+CMSG_OID_GEN_CONTENT_ENCRYPT_KEY_FUNC = "CryptMsgDllGenContentEncryptKey"
+CMSG_KEY_TRANS_ENCRYPT_FREE_PARA_FLAG = 0x00000001
+CMSG_OID_EXPORT_KEY_TRANS_FUNC = "CryptMsgDllExportKeyTrans"
+CMSG_KEY_AGREE_ENCRYPT_FREE_PARA_FLAG = 0x00000001
+CMSG_KEY_AGREE_ENCRYPT_FREE_MATERIAL_FLAG = 0x00000002
+CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_ALG_FLAG = 0x00000004
+CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_PARA_FLAG = 0x00000008
+CMSG_KEY_AGREE_ENCRYPT_FREE_PUBKEY_BITS_FLAG = 0x00000010
+CMSG_OID_EXPORT_KEY_AGREE_FUNC = "CryptMsgDllExportKeyAgree"
+CMSG_MAIL_LIST_ENCRYPT_FREE_PARA_FLAG = 0x00000001
+CMSG_OID_EXPORT_MAIL_LIST_FUNC = "CryptMsgDllExportMailList"
+CMSG_OID_IMPORT_KEY_TRANS_FUNC = "CryptMsgDllImportKeyTrans"
+CMSG_OID_IMPORT_KEY_AGREE_FUNC = "CryptMsgDllImportKeyAgree"
+CMSG_OID_IMPORT_MAIL_LIST_FUNC = "CryptMsgDllImportMailList"
+CERT_KEY_PROV_HANDLE_PROP_ID = 1
+CERT_KEY_PROV_INFO_PROP_ID = 2
+CERT_SHA1_HASH_PROP_ID = 3
+CERT_MD5_HASH_PROP_ID = 4
+CERT_HASH_PROP_ID = CERT_SHA1_HASH_PROP_ID
+CERT_KEY_CONTEXT_PROP_ID = 5
+CERT_KEY_SPEC_PROP_ID = 6
+CERT_IE30_RESERVED_PROP_ID = 7
+CERT_PUBKEY_HASH_RESERVED_PROP_ID = 8
+CERT_ENHKEY_USAGE_PROP_ID = 9
+CERT_CTL_USAGE_PROP_ID = CERT_ENHKEY_USAGE_PROP_ID
+CERT_NEXT_UPDATE_LOCATION_PROP_ID = 10
+CERT_FRIENDLY_NAME_PROP_ID = 11
+CERT_PVK_FILE_PROP_ID = 12
+CERT_DESCRIPTION_PROP_ID = 13
+CERT_ACCESS_STATE_PROP_ID = 14
+CERT_SIGNATURE_HASH_PROP_ID = 15
+CERT_SMART_CARD_DATA_PROP_ID = 16
+CERT_EFS_PROP_ID = 17
+CERT_FORTEZZA_DATA_PROP_ID = 18
+CERT_ARCHIVED_PROP_ID = 19
+CERT_KEY_IDENTIFIER_PROP_ID = 20
+CERT_AUTO_ENROLL_PROP_ID = 21
+CERT_PUBKEY_ALG_PARA_PROP_ID = 22
+CERT_CROSS_CERT_DIST_POINTS_PROP_ID = 23
+CERT_ISSUER_PUBLIC_KEY_MD5_HASH_PROP_ID = 24
+CERT_SUBJECT_PUBLIC_KEY_MD5_HASH_PROP_ID = 25
+CERT_ENROLLMENT_PROP_ID = 26
+CERT_DATE_STAMP_PROP_ID = 27
+CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID = 28
+CERT_SUBJECT_NAME_MD5_HASH_PROP_ID = 29
+CERT_EXTENDED_ERROR_INFO_PROP_ID = 30
+CERT_RENEWAL_PROP_ID = 64
+CERT_ARCHIVED_KEY_HASH_PROP_ID = 65
+CERT_AUTO_ENROLL_RETRY_PROP_ID = 66
+CERT_AIA_URL_RETRIEVED_PROP_ID = 67
+CERT_FIRST_RESERVED_PROP_ID = 68
+CERT_LAST_RESERVED_PROP_ID = 0x00007FFF
+CERT_FIRST_USER_PROP_ID = 0x00008000
+CERT_LAST_USER_PROP_ID = 0x0000FFFF
+szOID_CERT_PROP_ID_PREFIX = "1.3.6.1.4.1.311.10.11."
+szOID_CERT_KEY_IDENTIFIER_PROP_ID = "1.3.6.1.4.1.311.10.11.20"
+szOID_CERT_ISSUER_SERIAL_NUMBER_MD5_HASH_PROP_ID = \
+                                            "1.3.6.1.4.1.311.10.11.28"
+szOID_CERT_SUBJECT_NAME_MD5_HASH_PROP_ID = \
+                                            "1.3.6.1.4.1.311.10.11.29"
+CERT_ACCESS_STATE_WRITE_PERSIST_FLAG = 0x1
+CERT_ACCESS_STATE_SYSTEM_STORE_FLAG = 0x2
+CERT_ACCESS_STATE_LM_SYSTEM_STORE_FLAG = 0x4
+CERT_SET_KEY_PROV_HANDLE_PROP_ID = 0x00000001
+CERT_SET_KEY_CONTEXT_PROP_ID = 0x00000001
+sz_CERT_STORE_PROV_MEMORY = "Memory"
+sz_CERT_STORE_PROV_FILENAME_W = "File"
+sz_CERT_STORE_PROV_FILENAME = sz_CERT_STORE_PROV_FILENAME_W
+sz_CERT_STORE_PROV_SYSTEM_W = "System"
+sz_CERT_STORE_PROV_SYSTEM = sz_CERT_STORE_PROV_SYSTEM_W
+sz_CERT_STORE_PROV_PKCS7 = "PKCS7"
+sz_CERT_STORE_PROV_SERIALIZED = "Serialized"
+sz_CERT_STORE_PROV_COLLECTION = "Collection"
+sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W = "SystemRegistry"
+sz_CERT_STORE_PROV_SYSTEM_REGISTRY = sz_CERT_STORE_PROV_SYSTEM_REGISTRY_W
+sz_CERT_STORE_PROV_PHYSICAL_W = "Physical"
+sz_CERT_STORE_PROV_PHYSICAL = sz_CERT_STORE_PROV_PHYSICAL_W
+sz_CERT_STORE_PROV_SMART_CARD_W = "SmartCard"
+sz_CERT_STORE_PROV_SMART_CARD = sz_CERT_STORE_PROV_SMART_CARD_W
+sz_CERT_STORE_PROV_LDAP_W = "Ldap"
+sz_CERT_STORE_PROV_LDAP = sz_CERT_STORE_PROV_LDAP_W
+CERT_STORE_SIGNATURE_FLAG = 0x00000001
+CERT_STORE_TIME_VALIDITY_FLAG = 0x00000002
+CERT_STORE_REVOCATION_FLAG = 0x00000004
+CERT_STORE_NO_CRL_FLAG = 0x00010000
+CERT_STORE_NO_ISSUER_FLAG = 0x00020000
+CERT_STORE_BASE_CRL_FLAG = 0x00000100
+CERT_STORE_DELTA_CRL_FLAG = 0x00000200
+CERT_STORE_NO_CRYPT_RELEASE_FLAG = 0x00000001
+CERT_STORE_SET_LOCALIZED_NAME_FLAG = 0x00000002
+CERT_STORE_DEFER_CLOSE_UNTIL_LAST_FREE_FLAG = 0x00000004
+CERT_STORE_DELETE_FLAG = 0x00000010
+CERT_STORE_UNSAFE_PHYSICAL_FLAG = 0x00000020
+CERT_STORE_SHARE_STORE_FLAG = 0x00000040
+CERT_STORE_SHARE_CONTEXT_FLAG = 0x00000080
+CERT_STORE_MANIFOLD_FLAG = 0x00000100
+CERT_STORE_ENUM_ARCHIVED_FLAG = 0x00000200
+CERT_STORE_UPDATE_KEYID_FLAG = 0x00000400
+CERT_STORE_BACKUP_RESTORE_FLAG = 0x00000800
+CERT_STORE_READONLY_FLAG = 0x00008000
+CERT_STORE_OPEN_EXISTING_FLAG = 0x00004000
+CERT_STORE_CREATE_NEW_FLAG = 0x00002000
+CERT_STORE_MAXIMUM_ALLOWED_FLAG = 0x00001000
+CERT_SYSTEM_STORE_MASK = (-65536)
+CERT_SYSTEM_STORE_RELOCATE_FLAG = (-2147483648)
+CERT_SYSTEM_STORE_UNPROTECTED_FLAG = 0x40000000
+CERT_SYSTEM_STORE_LOCATION_MASK = 0x00FF0000
+CERT_SYSTEM_STORE_LOCATION_SHIFT = 16
+CERT_SYSTEM_STORE_CURRENT_USER_ID = 1
+CERT_SYSTEM_STORE_LOCAL_MACHINE_ID = 2
+CERT_SYSTEM_STORE_CURRENT_SERVICE_ID = 4
+CERT_SYSTEM_STORE_SERVICES_ID = 5
+CERT_SYSTEM_STORE_USERS_ID = 6
+CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID = 7
+CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID = 8
+CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID = 9
+CERT_SYSTEM_STORE_CURRENT_USER = \
+    (CERT_SYSTEM_STORE_CURRENT_USER_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
+CERT_SYSTEM_STORE_LOCAL_MACHINE = \
+    (CERT_SYSTEM_STORE_LOCAL_MACHINE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
+CERT_SYSTEM_STORE_CURRENT_SERVICE = \
+    (CERT_SYSTEM_STORE_CURRENT_SERVICE_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
+CERT_SYSTEM_STORE_SERVICES = \
+    (CERT_SYSTEM_STORE_SERVICES_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
+CERT_SYSTEM_STORE_USERS = \
+    (CERT_SYSTEM_STORE_USERS_ID << CERT_SYSTEM_STORE_LOCATION_SHIFT)
+CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY = \
+    (CERT_SYSTEM_STORE_CURRENT_USER_GROUP_POLICY_ID << \
+        CERT_SYSTEM_STORE_LOCATION_SHIFT)
+CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY = \
+    (CERT_SYSTEM_STORE_LOCAL_MACHINE_GROUP_POLICY_ID << \
+        CERT_SYSTEM_STORE_LOCATION_SHIFT)
+CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE = \
+    (CERT_SYSTEM_STORE_LOCAL_MACHINE_ENTERPRISE_ID << \
+        CERT_SYSTEM_STORE_LOCATION_SHIFT)
+CERT_PROT_ROOT_DISABLE_CURRENT_USER_FLAG = 0x1
+CERT_PROT_ROOT_INHIBIT_ADD_AT_INIT_FLAG = 0x2
+CERT_PROT_ROOT_INHIBIT_PURGE_LM_FLAG = 0x4
+CERT_PROT_ROOT_DISABLE_LM_AUTH_FLAG = 0x8
+CERT_PROT_ROOT_ONLY_LM_GPT_FLAG = 0x8
+CERT_PROT_ROOT_DISABLE_NT_AUTH_REQUIRED_FLAG = 0x10
+CERT_PROT_ROOT_DISABLE_NOT_DEFINED_NAME_CONSTRAINT_FLAG = 0x20
+CERT_TRUST_PUB_ALLOW_TRUST_MASK = 0x00000003
+CERT_TRUST_PUB_ALLOW_END_USER_TRUST = 0x00000000
+CERT_TRUST_PUB_ALLOW_MACHINE_ADMIN_TRUST = 0x00000001
+CERT_TRUST_PUB_ALLOW_ENTERPRISE_ADMIN_TRUST = 0x00000002
+CERT_TRUST_PUB_CHECK_PUBLISHER_REV_FLAG = 0x00000100
+CERT_TRUST_PUB_CHECK_TIMESTAMP_REV_FLAG = 0x00000200
+CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_UNTRUSTED_ROOT_LOGGING_FLAG = 0x1
+CERT_AUTH_ROOT_AUTO_UPDATE_DISABLE_PARTIAL_CHAIN_LOGGING_FLAG = 0x2
+CERT_AUTH_ROOT_CTL_FILENAME_A = "authroot.stl"
+CERT_REGISTRY_STORE_REMOTE_FLAG = 0x10000
+CERT_REGISTRY_STORE_SERIALIZED_FLAG = 0x20000
+CERT_REGISTRY_STORE_CLIENT_GPT_FLAG = (-2147483648)
+CERT_REGISTRY_STORE_LM_GPT_FLAG = 0x01000000
+CERT_REGISTRY_STORE_ROAMING_FLAG = 0x40000
+CERT_REGISTRY_STORE_MY_IE_DIRTY_FLAG = 0x80000
+CERT_FILE_STORE_COMMIT_ENABLE_FLAG = 0x10000
+CERT_LDAP_STORE_SIGN_FLAG = 0x10000
+CERT_LDAP_STORE_AREC_EXCLUSIVE_FLAG = 0x20000
+CERT_LDAP_STORE_OPENED_FLAG = 0x40000
+CERT_LDAP_STORE_UNBIND_FLAG = 0x80000
+CRYPT_OID_OPEN_STORE_PROV_FUNC = "CertDllOpenStoreProv"
+CERT_STORE_PROV_EXTERNAL_FLAG = 0x1
+CERT_STORE_PROV_DELETED_FLAG = 0x2
+CERT_STORE_PROV_NO_PERSIST_FLAG = 0x4
+CERT_STORE_PROV_SYSTEM_STORE_FLAG = 0x8
+CERT_STORE_PROV_LM_SYSTEM_STORE_FLAG = 0x10
+CERT_STORE_PROV_CLOSE_FUNC = 0
+CERT_STORE_PROV_READ_CERT_FUNC = 1
+CERT_STORE_PROV_WRITE_CERT_FUNC = 2
+CERT_STORE_PROV_DELETE_CERT_FUNC = 3
+CERT_STORE_PROV_SET_CERT_PROPERTY_FUNC = 4
+CERT_STORE_PROV_READ_CRL_FUNC = 5
+CERT_STORE_PROV_WRITE_CRL_FUNC = 6
+CERT_STORE_PROV_DELETE_CRL_FUNC = 7
+CERT_STORE_PROV_SET_CRL_PROPERTY_FUNC = 8
+CERT_STORE_PROV_READ_CTL_FUNC = 9
+CERT_STORE_PROV_WRITE_CTL_FUNC = 10
+CERT_STORE_PROV_DELETE_CTL_FUNC = 11
+CERT_STORE_PROV_SET_CTL_PROPERTY_FUNC = 12
+CERT_STORE_PROV_CONTROL_FUNC = 13
+CERT_STORE_PROV_FIND_CERT_FUNC = 14
+CERT_STORE_PROV_FREE_FIND_CERT_FUNC = 15
+CERT_STORE_PROV_GET_CERT_PROPERTY_FUNC = 16
+CERT_STORE_PROV_FIND_CRL_FUNC = 17
+CERT_STORE_PROV_FREE_FIND_CRL_FUNC = 18
+CERT_STORE_PROV_GET_CRL_PROPERTY_FUNC = 19
+CERT_STORE_PROV_FIND_CTL_FUNC = 20
+CERT_STORE_PROV_FREE_FIND_CTL_FUNC = 21
+CERT_STORE_PROV_GET_CTL_PROPERTY_FUNC = 22
+CERT_STORE_PROV_WRITE_ADD_FLAG = 0x1
+CERT_STORE_SAVE_AS_STORE = 1
+CERT_STORE_SAVE_AS_PKCS7 = 2
+CERT_STORE_SAVE_TO_FILE = 1
+CERT_STORE_SAVE_TO_MEMORY = 2
+CERT_STORE_SAVE_TO_FILENAME_A = 3
+CERT_STORE_SAVE_TO_FILENAME_W = 4
+CERT_STORE_SAVE_TO_FILENAME = CERT_STORE_SAVE_TO_FILENAME_W
+CERT_CLOSE_STORE_FORCE_FLAG = 0x00000001
+CERT_CLOSE_STORE_CHECK_FLAG = 0x00000002
+CERT_COMPARE_MASK = 0xFFFF
+CERT_COMPARE_SHIFT = 16
+CERT_COMPARE_ANY = 0
+CERT_COMPARE_SHA1_HASH = 1
+CERT_COMPARE_NAME = 2
+CERT_COMPARE_ATTR = 3
+CERT_COMPARE_MD5_HASH = 4
+CERT_COMPARE_PROPERTY = 5
+CERT_COMPARE_PUBLIC_KEY = 6
+CERT_COMPARE_HASH = CERT_COMPARE_SHA1_HASH
+CERT_COMPARE_NAME_STR_A = 7
+CERT_COMPARE_NAME_STR_W = 8
+CERT_COMPARE_KEY_SPEC = 9
+CERT_COMPARE_ENHKEY_USAGE = 10
+CERT_COMPARE_CTL_USAGE = CERT_COMPARE_ENHKEY_USAGE
+CERT_COMPARE_SUBJECT_CERT = 11
+CERT_COMPARE_ISSUER_OF = 12
+CERT_COMPARE_EXISTING = 13
+CERT_COMPARE_SIGNATURE_HASH = 14
+CERT_COMPARE_KEY_IDENTIFIER = 15
+CERT_COMPARE_CERT_ID = 16
+CERT_COMPARE_CROSS_CERT_DIST_POINTS = 17
+CERT_COMPARE_PUBKEY_MD5_HASH = 18
+CERT_FIND_ANY = (CERT_COMPARE_ANY << CERT_COMPARE_SHIFT)
+CERT_FIND_SHA1_HASH = (CERT_COMPARE_SHA1_HASH << CERT_COMPARE_SHIFT)
+CERT_FIND_MD5_HASH = (CERT_COMPARE_MD5_HASH << CERT_COMPARE_SHIFT)
+CERT_FIND_SIGNATURE_HASH = (CERT_COMPARE_SIGNATURE_HASH << CERT_COMPARE_SHIFT)
+CERT_FIND_KEY_IDENTIFIER = (CERT_COMPARE_KEY_IDENTIFIER << CERT_COMPARE_SHIFT)
+CERT_FIND_HASH = CERT_FIND_SHA1_HASH
+CERT_FIND_PROPERTY = (CERT_COMPARE_PROPERTY << CERT_COMPARE_SHIFT)
+CERT_FIND_PUBLIC_KEY = (CERT_COMPARE_PUBLIC_KEY << CERT_COMPARE_SHIFT)
+CERT_FIND_SUBJECT_NAME = (CERT_COMPARE_NAME << CERT_COMPARE_SHIFT | \
+                                 CERT_INFO_SUBJECT_FLAG)
+CERT_FIND_SUBJECT_ATTR = (CERT_COMPARE_ATTR << CERT_COMPARE_SHIFT | \
+                                 CERT_INFO_SUBJECT_FLAG)
+CERT_FIND_ISSUER_NAME = (CERT_COMPARE_NAME << CERT_COMPARE_SHIFT | \
+                                 CERT_INFO_ISSUER_FLAG)
+CERT_FIND_ISSUER_ATTR = (CERT_COMPARE_ATTR << CERT_COMPARE_SHIFT | \
+                                 CERT_INFO_ISSUER_FLAG)
+CERT_FIND_SUBJECT_STR_A = (CERT_COMPARE_NAME_STR_A << CERT_COMPARE_SHIFT | \
+                                 CERT_INFO_SUBJECT_FLAG)
+CERT_FIND_SUBJECT_STR_W = (CERT_COMPARE_NAME_STR_W << CERT_COMPARE_SHIFT | \
+                                 CERT_INFO_SUBJECT_FLAG)
+CERT_FIND_SUBJECT_STR = CERT_FIND_SUBJECT_STR_W
+CERT_FIND_ISSUER_STR_A = (CERT_COMPARE_NAME_STR_A << CERT_COMPARE_SHIFT | \
+                                 CERT_INFO_ISSUER_FLAG)
+CERT_FIND_ISSUER_STR_W = (CERT_COMPARE_NAME_STR_W << CERT_COMPARE_SHIFT | \
+                                 CERT_INFO_ISSUER_FLAG)
+CERT_FIND_ISSUER_STR = CERT_FIND_ISSUER_STR_W
+CERT_FIND_KEY_SPEC = (CERT_COMPARE_KEY_SPEC << CERT_COMPARE_SHIFT)
+CERT_FIND_ENHKEY_USAGE = (CERT_COMPARE_ENHKEY_USAGE << CERT_COMPARE_SHIFT)
+CERT_FIND_CTL_USAGE = CERT_FIND_ENHKEY_USAGE
+CERT_FIND_SUBJECT_CERT = (CERT_COMPARE_SUBJECT_CERT << CERT_COMPARE_SHIFT)
+CERT_FIND_ISSUER_OF = (CERT_COMPARE_ISSUER_OF << CERT_COMPARE_SHIFT)
+CERT_FIND_EXISTING = (CERT_COMPARE_EXISTING << CERT_COMPARE_SHIFT)
+CERT_FIND_CERT_ID = (CERT_COMPARE_CERT_ID << CERT_COMPARE_SHIFT)
+CERT_FIND_CROSS_CERT_DIST_POINTS = \
+                    (CERT_COMPARE_CROSS_CERT_DIST_POINTS << CERT_COMPARE_SHIFT)
+CERT_FIND_PUBKEY_MD5_HASH = \
+                    (CERT_COMPARE_PUBKEY_MD5_HASH << CERT_COMPARE_SHIFT)
+CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG = 0x1
+CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG = 0x2
+CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG = 0x4
+CERT_FIND_NO_ENHKEY_USAGE_FLAG = 0x8
+CERT_FIND_OR_ENHKEY_USAGE_FLAG = 0x10
+CERT_FIND_VALID_ENHKEY_USAGE_FLAG = 0x20
+CERT_FIND_OPTIONAL_CTL_USAGE_FLAG = CERT_FIND_OPTIONAL_ENHKEY_USAGE_FLAG
+CERT_FIND_EXT_ONLY_CTL_USAGE_FLAG = \
+        CERT_FIND_EXT_ONLY_ENHKEY_USAGE_FLAG
+CERT_FIND_PROP_ONLY_CTL_USAGE_FLAG = \
+        CERT_FIND_PROP_ONLY_ENHKEY_USAGE_FLAG
+CERT_FIND_NO_CTL_USAGE_FLAG = CERT_FIND_NO_ENHKEY_USAGE_FLAG
+CERT_FIND_OR_CTL_USAGE_FLAG = CERT_FIND_OR_ENHKEY_USAGE_FLAG
+CERT_FIND_VALID_CTL_USAGE_FLAG = CERT_FIND_VALID_ENHKEY_USAGE_FLAG
+CERT_SET_PROPERTY_IGNORE_PERSIST_ERROR_FLAG = (-2147483648)
+CERT_SET_PROPERTY_INHIBIT_PERSIST_FLAG = 0x40000000
+CTL_ENTRY_FROM_PROP_CHAIN_FLAG = 0x1
+CRL_FIND_ANY = 0
+CRL_FIND_ISSUED_BY = 1
+CRL_FIND_EXISTING = 2
+CRL_FIND_ISSUED_FOR = 3
+CRL_FIND_ISSUED_BY_AKI_FLAG = 0x1
+CRL_FIND_ISSUED_BY_SIGNATURE_FLAG = 0x2
+CRL_FIND_ISSUED_BY_DELTA_FLAG = 0x4
+CRL_FIND_ISSUED_BY_BASE_FLAG = 0x8
+CERT_STORE_ADD_NEW = 1
+CERT_STORE_ADD_USE_EXISTING = 2
+CERT_STORE_ADD_REPLACE_EXISTING = 3
+CERT_STORE_ADD_ALWAYS = 4
+CERT_STORE_ADD_REPLACE_EXISTING_INHERIT_PROPERTIES = 5
+CERT_STORE_ADD_NEWER = 6
+CERT_STORE_ADD_NEWER_INHERIT_PROPERTIES = 7
+CERT_STORE_CERTIFICATE_CONTEXT = 1
+CERT_STORE_CRL_CONTEXT = 2
+CERT_STORE_CTL_CONTEXT = 3
+CERT_STORE_CERTIFICATE_CONTEXT_FLAG = \
+                (1 << CERT_STORE_CERTIFICATE_CONTEXT)
+CERT_STORE_CRL_CONTEXT_FLAG = \
+                (1 << CERT_STORE_CRL_CONTEXT)
+CERT_STORE_CTL_CONTEXT_FLAG = \
+                (1 << CERT_STORE_CTL_CONTEXT)
+CTL_ANY_SUBJECT_TYPE = 1
+CTL_CERT_SUBJECT_TYPE = 2
+CTL_FIND_ANY = 0
+CTL_FIND_SHA1_HASH = 1
+CTL_FIND_MD5_HASH = 2
+CTL_FIND_USAGE = 3
+CTL_FIND_SUBJECT = 4
+CTL_FIND_EXISTING = 5
+CTL_FIND_NO_LIST_ID_CBDATA = (-1)
+CTL_FIND_SAME_USAGE_FLAG = 0x1
+CERT_STORE_CTRL_RESYNC = 1
+CERT_STORE_CTRL_NOTIFY_CHANGE = 2
+CERT_STORE_CTRL_COMMIT = 3
+CERT_STORE_CTRL_AUTO_RESYNC = 4
+CERT_STORE_CTRL_CANCEL_NOTIFY = 5
+CERT_STORE_CTRL_INHIBIT_DUPLICATE_HANDLE_FLAG = 0x1
+CERT_STORE_CTRL_COMMIT_FORCE_FLAG = 0x1
+CERT_STORE_CTRL_COMMIT_CLEAR_FLAG = 0x2
+CERT_STORE_LOCALIZED_NAME_PROP_ID = 0x1000
+CERT_CREATE_CONTEXT_NOCOPY_FLAG = 0x1
+CERT_CREATE_CONTEXT_SORTED_FLAG = 0x2
+CERT_CREATE_CONTEXT_NO_HCRYPTMSG_FLAG = 0x4
+CERT_CREATE_CONTEXT_NO_ENTRY_FLAG = 0x8
+CERT_PHYSICAL_STORE_ADD_ENABLE_FLAG = 0x1
+CERT_PHYSICAL_STORE_OPEN_DISABLE_FLAG = 0x2
+CERT_PHYSICAL_STORE_REMOTE_OPEN_DISABLE_FLAG = 0x4
+CERT_PHYSICAL_STORE_INSERT_COMPUTER_NAME_ENABLE_FLAG = 0x8
+CERT_PHYSICAL_STORE_PREDEFINED_ENUM_FLAG = 0x1
+CRYPT_OID_OPEN_SYSTEM_STORE_PROV_FUNC = "CertDllOpenSystemStoreProv"
+CRYPT_OID_REGISTER_SYSTEM_STORE_FUNC = "CertDllRegisterSystemStore"
+CRYPT_OID_UNREGISTER_SYSTEM_STORE_FUNC = "CertDllUnregisterSystemStore"
+CRYPT_OID_ENUM_SYSTEM_STORE_FUNC = "CertDllEnumSystemStore"
+CRYPT_OID_REGISTER_PHYSICAL_STORE_FUNC = "CertDllRegisterPhysicalStore"
+CRYPT_OID_UNREGISTER_PHYSICAL_STORE_FUNC = "CertDllUnregisterPhysicalStore"
+CRYPT_OID_ENUM_PHYSICAL_STORE_FUNC = "CertDllEnumPhysicalStore"
+CMSG_TRUSTED_SIGNER_FLAG = 0x1
+CMSG_SIGNER_ONLY_FLAG = 0x2
+CMSG_USE_SIGNER_INDEX_FLAG = 0x4
+CMSG_CMS_ENCAPSULATED_CTL_FLAG = 0x00008000
+CMSG_ENCODE_SORTED_CTL_FLAG = 0x1
+CMSG_ENCODE_HASHED_SUBJECT_IDENTIFIER_FLAG = 0x2
+CERT_VERIFY_INHIBIT_CTL_UPDATE_FLAG = 0x1
+CERT_VERIFY_TRUSTED_SIGNERS_FLAG = 0x2
+CERT_VERIFY_NO_TIME_CHECK_FLAG = 0x4
+CERT_VERIFY_ALLOW_MORE_USAGE_FLAG = 0x8
+CERT_VERIFY_UPDATED_CTL_FLAG = 0x1
+CERT_CONTEXT_REVOCATION_TYPE = 1
+CERT_VERIFY_REV_CHAIN_FLAG = 0x00000001
+CERT_VERIFY_CACHE_ONLY_BASED_REVOCATION = 0x00000002
+CERT_VERIFY_REV_ACCUMULATIVE_TIMEOUT_FLAG = 0x00000004
+CERT_UNICODE_IS_RDN_ATTRS_FLAG = 0x1
+CERT_CASE_INSENSITIVE_IS_RDN_ATTRS_FLAG = 0x2
+CRYPT_VERIFY_CERT_SIGN_SUBJECT_BLOB = 1
+CRYPT_VERIFY_CERT_SIGN_SUBJECT_CERT = 2
+CRYPT_VERIFY_CERT_SIGN_SUBJECT_CRL = 3
+CRYPT_VERIFY_CERT_SIGN_ISSUER_PUBKEY = 1
+CRYPT_VERIFY_CERT_SIGN_ISSUER_CERT = 2
+CRYPT_VERIFY_CERT_SIGN_ISSUER_CHAIN = 3
+CRYPT_VERIFY_CERT_SIGN_ISSUER_NULL = 4
+CRYPT_DEFAULT_CONTEXT_AUTO_RELEASE_FLAG = 0x00000001
+CRYPT_DEFAULT_CONTEXT_PROCESS_FLAG = 0x00000002
+CRYPT_DEFAULT_CONTEXT_CERT_SIGN_OID = 1
+CRYPT_DEFAULT_CONTEXT_MULTI_CERT_SIGN_OID = 2
+CRYPT_OID_EXPORT_PUBLIC_KEY_INFO_FUNC = "CryptDllExportPublicKeyInfoEx"
+CRYPT_OID_IMPORT_PUBLIC_KEY_INFO_FUNC = "CryptDllImportPublicKeyInfoEx"
+CRYPT_ACQUIRE_CACHE_FLAG = 0x00000001
+CRYPT_ACQUIRE_USE_PROV_INFO_FLAG = 0x00000002
+CRYPT_ACQUIRE_COMPARE_KEY_FLAG = 0x00000004
+CRYPT_ACQUIRE_SILENT_FLAG = 0x00000040
+CRYPT_FIND_USER_KEYSET_FLAG = 0x00000001
+CRYPT_FIND_MACHINE_KEYSET_FLAG = 0x00000002
+CRYPT_FIND_SILENT_KEYSET_FLAG = 0x00000040
+CRYPT_OID_IMPORT_PRIVATE_KEY_INFO_FUNC = "CryptDllImportPrivateKeyInfoEx"
+CRYPT_OID_EXPORT_PRIVATE_KEY_INFO_FUNC = "CryptDllExportPrivateKeyInfoEx"
+CRYPT_DELETE_KEYSET = 0x0001
+CERT_SIMPLE_NAME_STR = 1
+CERT_OID_NAME_STR = 2
+CERT_X500_NAME_STR = 3
+CERT_NAME_STR_SEMICOLON_FLAG = 0x40000000
+CERT_NAME_STR_NO_PLUS_FLAG = 0x20000000
+CERT_NAME_STR_NO_QUOTING_FLAG = 0x10000000
+CERT_NAME_STR_CRLF_FLAG = 0x08000000
+CERT_NAME_STR_COMMA_FLAG = 0x04000000
+CERT_NAME_STR_REVERSE_FLAG = 0x02000000
+CERT_NAME_STR_DISABLE_IE4_UTF8_FLAG = 0x00010000
+CERT_NAME_STR_ENABLE_T61_UNICODE_FLAG = 0x00020000
+CERT_NAME_STR_ENABLE_UTF8_UNICODE_FLAG = 0x00040000
+CERT_NAME_EMAIL_TYPE = 1
+CERT_NAME_RDN_TYPE = 2
+CERT_NAME_ATTR_TYPE = 3
+CERT_NAME_SIMPLE_DISPLAY_TYPE = 4
+CERT_NAME_FRIENDLY_DISPLAY_TYPE = 5
+CERT_NAME_DNS_TYPE = 6
+CERT_NAME_URL_TYPE = 7
+CERT_NAME_UPN_TYPE = 8
+CERT_NAME_ISSUER_FLAG = 0x1
+CERT_NAME_DISABLE_IE4_UTF8_FLAG = 0x00010000
+CRYPT_MESSAGE_BARE_CONTENT_OUT_FLAG = 0x00000001
+CRYPT_MESSAGE_ENCAPSULATED_CONTENT_OUT_FLAG = 0x00000002
+CRYPT_MESSAGE_KEYID_SIGNER_FLAG = 0x00000004
+CRYPT_MESSAGE_SILENT_KEYSET_FLAG = 0x00000040
+CRYPT_MESSAGE_KEYID_RECIPIENT_FLAG = 0x4
+CERT_QUERY_OBJECT_FILE = 0x00000001
+CERT_QUERY_OBJECT_BLOB = 0x00000002
+CERT_QUERY_CONTENT_CERT = 1
+CERT_QUERY_CONTENT_CTL = 2
+CERT_QUERY_CONTENT_CRL = 3
+CERT_QUERY_CONTENT_SERIALIZED_STORE = 4
+CERT_QUERY_CONTENT_SERIALIZED_CERT = 5
+CERT_QUERY_CONTENT_SERIALIZED_CTL = 6
+CERT_QUERY_CONTENT_SERIALIZED_CRL = 7
+CERT_QUERY_CONTENT_PKCS7_SIGNED = 8
+CERT_QUERY_CONTENT_PKCS7_UNSIGNED = 9
+CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED = 10
+CERT_QUERY_CONTENT_PKCS10 = 11
+CERT_QUERY_CONTENT_PFX = 12
+CERT_QUERY_CONTENT_CERT_PAIR = 13
+CERT_QUERY_CONTENT_FLAG_CERT = \
+                ( 1 << CERT_QUERY_CONTENT_CERT)
+CERT_QUERY_CONTENT_FLAG_CTL = \
+                ( 1 << CERT_QUERY_CONTENT_CTL)
+CERT_QUERY_CONTENT_FLAG_CRL = \
+                ( 1 << CERT_QUERY_CONTENT_CRL)
+CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE = \
+                ( 1 << CERT_QUERY_CONTENT_SERIALIZED_STORE)
+CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT = \
+                ( 1 << CERT_QUERY_CONTENT_SERIALIZED_CERT)
+CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL = \
+                ( 1 << CERT_QUERY_CONTENT_SERIALIZED_CTL)
+CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL = \
+                ( 1 << CERT_QUERY_CONTENT_SERIALIZED_CRL)
+CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED = \
+                ( 1 << CERT_QUERY_CONTENT_PKCS7_SIGNED)
+CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED = \
+                ( 1 << CERT_QUERY_CONTENT_PKCS7_UNSIGNED)
+CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED = \
+                ( 1 << CERT_QUERY_CONTENT_PKCS7_SIGNED_EMBED)
+CERT_QUERY_CONTENT_FLAG_PKCS10 = \
+                ( 1 << CERT_QUERY_CONTENT_PKCS10)
+CERT_QUERY_CONTENT_FLAG_PFX = \
+                ( 1 << CERT_QUERY_CONTENT_PFX)
+CERT_QUERY_CONTENT_FLAG_CERT_PAIR = \
+                ( 1 << CERT_QUERY_CONTENT_CERT_PAIR)
+CERT_QUERY_CONTENT_FLAG_ALL = \
+                CERT_QUERY_CONTENT_FLAG_CERT |                  \
+                CERT_QUERY_CONTENT_FLAG_CTL  |                  \
+                CERT_QUERY_CONTENT_FLAG_CRL  |                  \
+                CERT_QUERY_CONTENT_FLAG_SERIALIZED_STORE |      \
+                CERT_QUERY_CONTENT_FLAG_SERIALIZED_CERT  |      \
+                CERT_QUERY_CONTENT_FLAG_SERIALIZED_CTL   |      \
+                CERT_QUERY_CONTENT_FLAG_SERIALIZED_CRL   |      \
+                CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED     |      \
+                CERT_QUERY_CONTENT_FLAG_PKCS7_UNSIGNED   |      \
+                CERT_QUERY_CONTENT_FLAG_PKCS7_SIGNED_EMBED |    \
+                CERT_QUERY_CONTENT_FLAG_PKCS10                     |    \
+                CERT_QUERY_CONTENT_FLAG_PFX                |    \
+                CERT_QUERY_CONTENT_FLAG_CERT_PAIR
+CERT_QUERY_FORMAT_BINARY = 1
+CERT_QUERY_FORMAT_BASE64_ENCODED = 2
+CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED = 3
+CERT_QUERY_FORMAT_FLAG_BINARY = \
+                ( 1 << CERT_QUERY_FORMAT_BINARY)
+CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED = \
+                ( 1 << CERT_QUERY_FORMAT_BASE64_ENCODED)
+CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED = \
+                ( 1 << CERT_QUERY_FORMAT_ASN_ASCII_HEX_ENCODED)
+CERT_QUERY_FORMAT_FLAG_ALL = \
+            CERT_QUERY_FORMAT_FLAG_BINARY   |       \
+            CERT_QUERY_FORMAT_FLAG_BASE64_ENCODED | \
+            CERT_QUERY_FORMAT_FLAG_ASN_ASCII_HEX_ENCODED
+SCHEME_OID_RETRIEVE_ENCODED_OBJECT_FUNC = "SchemeDllRetrieveEncodedObject"
+SCHEME_OID_RETRIEVE_ENCODED_OBJECTW_FUNC = "SchemeDllRetrieveEncodedObjectW"
+CONTEXT_OID_CREATE_OBJECT_CONTEXT_FUNC = "ContextDllCreateObjectContext"
+CRYPT_RETRIEVE_MULTIPLE_OBJECTS = 0x00000001
+CRYPT_CACHE_ONLY_RETRIEVAL = 0x00000002
+CRYPT_WIRE_ONLY_RETRIEVAL = 0x00000004
+CRYPT_DONT_CACHE_RESULT = 0x00000008
+CRYPT_ASYNC_RETRIEVAL = 0x00000010
+CRYPT_STICKY_CACHE_RETRIEVAL = 0x00001000
+CRYPT_LDAP_SCOPE_BASE_ONLY_RETRIEVAL = 0x00002000
+CRYPT_OFFLINE_CHECK_RETRIEVAL = 0x00004000
+CRYPT_LDAP_INSERT_ENTRY_ATTRIBUTE = 0x00008000
+CRYPT_LDAP_SIGN_RETRIEVAL = 0x00010000
+CRYPT_NO_AUTH_RETRIEVAL = 0x00020000
+CRYPT_LDAP_AREC_EXCLUSIVE_RETRIEVAL = 0x00040000
+CRYPT_AIA_RETRIEVAL = 0x00080000
+CRYPT_VERIFY_CONTEXT_SIGNATURE = 0x00000020
+CRYPT_VERIFY_DATA_HASH = 0x00000040
+CRYPT_KEEP_TIME_VALID = 0x00000080
+CRYPT_DONT_VERIFY_SIGNATURE = 0x00000100
+CRYPT_DONT_CHECK_TIME_VALIDITY = 0x00000200
+CRYPT_CHECK_FRESHNESS_TIME_VALIDITY = 0x00000400
+CRYPT_ACCUMULATIVE_TIMEOUT = 0x00000800
+CRYPT_GET_URL_FROM_PROPERTY = 0x00000001
+CRYPT_GET_URL_FROM_EXTENSION = 0x00000002
+CRYPT_GET_URL_FROM_UNAUTH_ATTRIBUTE = 0x00000004
+CRYPT_GET_URL_FROM_AUTH_ATTRIBUTE = 0x00000008
+URL_OID_GET_OBJECT_URL_FUNC = "UrlDllGetObjectUrl"
+TIME_VALID_OID_GET_OBJECT_FUNC = "TimeValidDllGetObject"
+TIME_VALID_OID_FLUSH_OBJECT_FUNC = "TimeValidDllFlushObject"
+CRYPTPROTECT_PROMPT_ON_UNPROTECT = 0x1
+CRYPTPROTECT_PROMPT_ON_PROTECT = 0x2
+CRYPTPROTECT_PROMPT_RESERVED = 0x04
+CRYPTPROTECT_PROMPT_STRONG = 0x08
+CRYPTPROTECT_PROMPT_REQUIRE_STRONG = 0x10
+CRYPTPROTECT_UI_FORBIDDEN = 0x1
+CRYPTPROTECT_LOCAL_MACHINE = 0x4
+CRYPTPROTECT_CRED_SYNC = 0x8
+CRYPTPROTECT_AUDIT = 0x10
+CRYPTPROTECT_NO_RECOVERY = 0x20
+CRYPTPROTECT_VERIFY_PROTECTION = 0x40
+CRYPTPROTECT_CRED_REGENERATE = 0x80
+CRYPTPROTECT_FIRST_RESERVED_FLAGVAL = 0x0FFFFFFF
+CRYPTPROTECT_LAST_RESERVED_FLAGVAL = (-1)
+CRYPTPROTECTMEMORY_BLOCK_SIZE = 16
+CRYPTPROTECTMEMORY_SAME_PROCESS = 0x00
+CRYPTPROTECTMEMORY_CROSS_PROCESS = 0x01
+CRYPTPROTECTMEMORY_SAME_LOGON = 0x02
+CERT_CREATE_SELFSIGN_NO_SIGN = 1
+CERT_CREATE_SELFSIGN_NO_KEY_INFO = 2
+CRYPT_KEYID_MACHINE_FLAG = 0x00000020
+CRYPT_KEYID_ALLOC_FLAG = 0x00008000
+CRYPT_KEYID_DELETE_FLAG = 0x00000010
+CRYPT_KEYID_SET_NEW_FLAG = 0x00002000
+CERT_CHAIN_MAX_AIA_URL_COUNT_IN_CERT_DEFAULT = 5
+CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_COUNT_PER_CHAIN_DEFAULT = 10
+CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_BYTE_COUNT_DEFAULT = 100000
+CERT_CHAIN_MAX_AIA_URL_RETRIEVAL_CERT_COUNT_DEFAULT = 10
+CERT_CHAIN_CACHE_END_CERT = 0x00000001
+CERT_CHAIN_THREAD_STORE_SYNC = 0x00000002
+CERT_CHAIN_CACHE_ONLY_URL_RETRIEVAL = 0x00000004
+CERT_CHAIN_USE_LOCAL_MACHINE_STORE = 0x00000008
+CERT_CHAIN_ENABLE_CACHE_AUTO_UPDATE = 0x00000010
+CERT_CHAIN_ENABLE_SHARE_STORE = 0x00000020
+CERT_TRUST_NO_ERROR = 0x00000000
+CERT_TRUST_IS_NOT_TIME_VALID = 0x00000001
+CERT_TRUST_IS_NOT_TIME_NESTED = 0x00000002
+CERT_TRUST_IS_REVOKED = 0x00000004
+CERT_TRUST_IS_NOT_SIGNATURE_VALID = 0x00000008
+CERT_TRUST_IS_NOT_VALID_FOR_USAGE = 0x00000010
+CERT_TRUST_IS_UNTRUSTED_ROOT = 0x00000020
+CERT_TRUST_REVOCATION_STATUS_UNKNOWN = 0x00000040
+CERT_TRUST_IS_CYCLIC = 0x00000080
+CERT_TRUST_INVALID_EXTENSION = 0x00000100
+CERT_TRUST_INVALID_POLICY_CONSTRAINTS = 0x00000200
+CERT_TRUST_INVALID_BASIC_CONSTRAINTS = 0x00000400
+CERT_TRUST_INVALID_NAME_CONSTRAINTS = 0x00000800
+CERT_TRUST_HAS_NOT_SUPPORTED_NAME_CONSTRAINT = 0x00001000
+CERT_TRUST_HAS_NOT_DEFINED_NAME_CONSTRAINT = 0x00002000
+CERT_TRUST_HAS_NOT_PERMITTED_NAME_CONSTRAINT = 0x00004000
+CERT_TRUST_HAS_EXCLUDED_NAME_CONSTRAINT = 0x00008000
+CERT_TRUST_IS_OFFLINE_REVOCATION = 0x01000000
+CERT_TRUST_NO_ISSUANCE_CHAIN_POLICY = 0x02000000
+CERT_TRUST_IS_PARTIAL_CHAIN = 0x00010000
+CERT_TRUST_CTL_IS_NOT_TIME_VALID = 0x00020000
+CERT_TRUST_CTL_IS_NOT_SIGNATURE_VALID = 0x00040000
+CERT_TRUST_CTL_IS_NOT_VALID_FOR_USAGE = 0x00080000
+CERT_TRUST_HAS_EXACT_MATCH_ISSUER = 0x00000001
+CERT_TRUST_HAS_KEY_MATCH_ISSUER = 0x00000002
+CERT_TRUST_HAS_NAME_MATCH_ISSUER = 0x00000004
+CERT_TRUST_IS_SELF_SIGNED = 0x00000008
+CERT_TRUST_HAS_PREFERRED_ISSUER = 0x00000100
+CERT_TRUST_HAS_ISSUANCE_CHAIN_POLICY = 0x00000200
+CERT_TRUST_HAS_VALID_NAME_CONSTRAINTS = 0x00000400
+CERT_TRUST_IS_COMPLEX_CHAIN = 0x00010000
+USAGE_MATCH_TYPE_AND = 0x00000000
+USAGE_MATCH_TYPE_OR = 0x00000001
+CERT_CHAIN_REVOCATION_CHECK_END_CERT = 0x10000000
+CERT_CHAIN_REVOCATION_CHECK_CHAIN = 0x20000000
+CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT = 0x40000000
+CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY = (-2147483648)
+CERT_CHAIN_REVOCATION_ACCUMULATIVE_TIMEOUT = 0x08000000
+CERT_CHAIN_DISABLE_PASS1_QUALITY_FILTERING = 0x00000040
+CERT_CHAIN_RETURN_LOWER_QUALITY_CONTEXTS = 0x00000080
+CERT_CHAIN_DISABLE_AUTH_ROOT_AUTO_UPDATE = 0x00000100
+CERT_CHAIN_TIMESTAMP_TIME = 0x00000200
+CERT_CHAIN_FIND_BY_ISSUER = 1
+CERT_CHAIN_FIND_BY_ISSUER_COMPARE_KEY_FLAG = 0x0001
+CERT_CHAIN_FIND_BY_ISSUER_COMPLEX_CHAIN_FLAG = 0x0002
+CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_URL_FLAG = 0x0004
+CERT_CHAIN_FIND_BY_ISSUER_LOCAL_MACHINE_FLAG = 0x0008
+CERT_CHAIN_FIND_BY_ISSUER_NO_KEY_FLAG = 0x4000
+CERT_CHAIN_FIND_BY_ISSUER_CACHE_ONLY_FLAG = 0x8000
+CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG = 0x00000001
+CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG = 0x00000002
+CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG = 0x00000004
+CERT_CHAIN_POLICY_IGNORE_INVALID_BASIC_CONSTRAINTS_FLAG = 0x00000008
+CERT_CHAIN_POLICY_IGNORE_ALL_NOT_TIME_VALID_FLAGS = ( \
+    CERT_CHAIN_POLICY_IGNORE_NOT_TIME_VALID_FLAG                | \
+    CERT_CHAIN_POLICY_IGNORE_CTL_NOT_TIME_VALID_FLAG            | \
+    CERT_CHAIN_POLICY_IGNORE_NOT_TIME_NESTED_FLAG                 \
+    )
+CERT_CHAIN_POLICY_ALLOW_UNKNOWN_CA_FLAG = 0x00000010
+CERT_CHAIN_POLICY_IGNORE_WRONG_USAGE_FLAG = 0x00000020
+CERT_CHAIN_POLICY_IGNORE_INVALID_NAME_FLAG = 0x00000040
+CERT_CHAIN_POLICY_IGNORE_INVALID_POLICY_FLAG = 0x00000080
+CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG = 0x00000100
+CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG = 0x00000200
+CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG = 0x00000400
+CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG = 0x00000800
+CERT_CHAIN_POLICY_IGNORE_ALL_REV_UNKNOWN_FLAGS = ( \
+    CERT_CHAIN_POLICY_IGNORE_END_REV_UNKNOWN_FLAG         | \
+    CERT_CHAIN_POLICY_IGNORE_CTL_SIGNER_REV_UNKNOWN_FLAG  | \
+    CERT_CHAIN_POLICY_IGNORE_CA_REV_UNKNOWN_FLAG          | \
+    CERT_CHAIN_POLICY_IGNORE_ROOT_REV_UNKNOWN_FLAG          \
+    )
+CERT_CHAIN_POLICY_ALLOW_TESTROOT_FLAG = 0x00008000
+CERT_CHAIN_POLICY_TRUST_TESTROOT_FLAG = 0x00004000
+CRYPT_OID_VERIFY_CERTIFICATE_CHAIN_POLICY_FUNC = \
+    "CertDllVerifyCertificateChainPolicy"
+AUTHTYPE_CLIENT = 1
+AUTHTYPE_SERVER = 2
+BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_CA_FLAG = (-2147483648)
+BASIC_CONSTRAINTS_CERT_CHAIN_POLICY_END_ENTITY_FLAG = 0x40000000
+MICROSOFT_ROOT_CERT_CHAIN_POLICY_ENABLE_TEST_ROOT_FLAG = 0x00010000
+CRYPT_STRING_BASE64HEADER = 0x00000000
+CRYPT_STRING_BASE64 = 0x00000001
+CRYPT_STRING_BINARY = 0x00000002
+CRYPT_STRING_BASE64REQUESTHEADER = 0x00000003
+CRYPT_STRING_HEX = 0x00000004
+CRYPT_STRING_HEXASCII = 0x00000005
+CRYPT_STRING_BASE64_ANY = 0x00000006
+CRYPT_STRING_ANY = 0x00000007
+CRYPT_STRING_HEX_ANY = 0x00000008
+CRYPT_STRING_BASE64X509CRLHEADER = 0x00000009
+CRYPT_STRING_HEXADDR = 0x0000000a
+CRYPT_STRING_HEXASCIIADDR = 0x0000000b
+CRYPT_STRING_NOCR = (-2147483648)
+CRYPT_USER_KEYSET = 0x00001000
+PKCS12_IMPORT_RESERVED_MASK = (-65536)
+REPORT_NO_PRIVATE_KEY = 0x0001
+REPORT_NOT_ABLE_TO_EXPORT_PRIVATE_KEY = 0x0002
+EXPORT_PRIVATE_KEYS = 0x0004
+PKCS12_EXPORT_RESERVED_MASK = (-65536)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32evtlogutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32evtlogutil.py
new file mode 100644
index 0000000..fb3ff0b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32evtlogutil.py
@@ -0,0 +1,152 @@
+"""Event Log Utilities - helper for win32evtlog.pyd
+"""
+
+import win32api, win32con, winerror, win32evtlog, string
+
+error = win32api.error # The error the evtlog module raises.
+
+langid = win32api.MAKELANGID(win32con.LANG_NEUTRAL, win32con.SUBLANG_NEUTRAL)
+
+def AddSourceToRegistry(appName, msgDLL = None, eventLogType = "Application", eventLogFlags = None):
+    """Add a source of messages to the event log.
+
+    Allows Python program to register a custom source of messages in the
+    registry.  You must also provide the DLL name that has the message table, so the
+    full message text appears in the event log.
+
+    Note that the win32evtlog.pyd file has a number of string entries with just "%1"
+    built in, so many Python programs can simply use this DLL.  Disadvantages are that
+    you do not get language translation, and the full text is stored in the event log,
+    blowing the size of the log up.
+    """
+
+    # When an application uses the RegisterEventSource or OpenEventLog
+    # function to get a handle of an event log, the event loggging service
+    # searches for the specified source name in the registry. You can add a
+    # new source name to the registry by opening a new registry subkey
+    # under the Application key and adding registry values to the new
+    # subkey.
+
+    if msgDLL is None:
+        msgDLL = win32evtlog.__file__
+
+    # Create a new key for our application
+    hkey = win32api.RegCreateKey(win32con.HKEY_LOCAL_MACHINE, \
+        "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" % (eventLogType, appName))
+
+    # Add the Event-ID message-file name to the subkey.
+    win32api.RegSetValueEx(hkey,
+        "EventMessageFile",    # value name \
+        0,                     # reserved \
+        win32con.REG_EXPAND_SZ,# value type \
+        msgDLL)
+
+    # Set the supported types flags and add it to the subkey.
+    if eventLogFlags is None:
+        eventLogFlags = win32evtlog.EVENTLOG_ERROR_TYPE | win32evtlog.EVENTLOG_WARNING_TYPE | win32evtlog.EVENTLOG_INFORMATION_TYPE
+    win32api.RegSetValueEx(hkey, # subkey handle \
+        "TypesSupported",        # value name \
+        0,                       # reserved \
+        win32con.REG_DWORD,      # value type \
+        eventLogFlags)
+    win32api.RegCloseKey(hkey)
+
+def RemoveSourceFromRegistry(appName, eventLogType = "Application"):
+    """Removes a source of messages from the event log.
+    """
+
+    # Delete our key
+    try:
+        win32api.RegDeleteKey(win32con.HKEY_LOCAL_MACHINE, \
+                     "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" % (eventLogType, appName))
+    except win32api.error, (hr, fn, desc):
+        if hr != winerror.ERROR_FILE_NOT_FOUND:
+            raise
+
+
+def ReportEvent(appName, eventID, eventCategory = 0, eventType=win32evtlog.EVENTLOG_ERROR_TYPE, strings = None, data = None, sid=None):
+    """Report an event for a previously added event source.
+    """
+    # Get a handle to the Application event log
+    hAppLog = win32evtlog.RegisterEventSource(None, appName)
+
+    # Now report the event, which will add this event to the event log */
+    win32evtlog.ReportEvent(hAppLog, # event-log handle \
+        eventType,
+        eventCategory,
+        eventID,
+        sid,
+        strings,
+        data)
+
+    win32evtlog.DeregisterEventSource(hAppLog);
+
+def FormatMessage( eventLogRecord, logType="Application" ):
+    """Given a tuple from ReadEventLog, and optionally where the event
+    record came from, load the message, and process message inserts.
+
+    Note that this function may raise win32api.error.  See also the
+    function SafeFormatMessage which will return None if the message can
+    not be processed.
+    """
+
+    # From the event log source name, we know the name of the registry
+    # key to look under for the name of the message DLL that contains
+    # the messages we need to extract with FormatMessage. So first get
+    # the event log source name...
+    keyName = "SYSTEM\\CurrentControlSet\\Services\\EventLog\\%s\\%s" % (logType, eventLogRecord.SourceName)
+
+    # Now open this key and get the EventMessageFile value, which is
+    # the name of the message DLL.
+    handle = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, keyName)
+    try:
+        dllNames = win32api.RegQueryValueEx(handle, "EventMessageFile")[0].split(";")
+        # Win2k etc appear to allow multiple DLL names
+        data = None
+        for dllName in dllNames:
+            try:
+                # Expand environment variable strings in the message DLL path name,
+                # in case any are there.
+                dllName = win32api.ExpandEnvironmentStrings(dllName)
+
+                dllHandle = win32api.LoadLibraryEx(dllName, 0, win32con.DONT_RESOLVE_DLL_REFERENCES)
+                try:
+                    data = win32api.FormatMessageW(win32con.FORMAT_MESSAGE_FROM_HMODULE,
+                                    dllHandle, eventLogRecord.EventID, langid, eventLogRecord.StringInserts)
+                finally:
+                    win32api.FreeLibrary(dllHandle)
+            except win32api.error:
+                pass # Not in this DLL - try the next
+            if data is not None:
+                break
+    finally:
+        win32api.RegCloseKey(handle)
+    return data or u'' # Don't want "None" ever being returned.
+
+def SafeFormatMessage( eventLogRecord, logType=None ):
+    """As for FormatMessage, except returns an error message if
+    the message can not be processed.
+    """
+    if logType is None: logType = "Application"
+    try:
+        return FormatMessage(eventLogRecord, logType)
+    except win32api.error:
+        if eventLogRecord.StringInserts is None:
+            desc = ""
+        else:
+            desc = u", ".join(eventLogRecord.StringInserts)
+        return u"<The description for Event ID ( %d ) in Source ( %r ) could not be found. It contains the following insertion string(s):%r.>" % (winerror.HRESULT_CODE(eventLogRecord.EventID), eventLogRecord.SourceName, desc)
+
+def FeedEventLogRecords(feeder, machineName = None, logName = "Application", readFlags = None):
+    if readFlags is None:
+        readFlags = win32evtlog.EVENTLOG_BACKWARDS_READ|win32evtlog.EVENTLOG_SEQUENTIAL_READ
+
+    h=win32evtlog.OpenEventLog(machineName, logName)
+    try:
+        while 1:
+            objects = win32evtlog.ReadEventLog(h, readFlags, 0)
+            if not objects:
+                break
+            map(lambda item, feeder = feeder: apply(feeder, (item,)), objects)
+    finally:
+        win32evtlog.CloseEventLog(h)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32gui_struct.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32gui_struct.py
new file mode 100644
index 0000000..ec5cb81
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32gui_struct.py
@@ -0,0 +1,547 @@
+# This is a work in progress - see Demos/win32gui_menu.py
+
+# win32gui_struct.py - helpers for working with various win32gui structures.
+# As win32gui is "light-weight", it does not define objects for all possible
+# win32 structures - in general, "buffer" objects are passed around - it is
+# the callers responsibility to pack the buffer in the correct format.
+#
+# This module defines some helpers for the commonly used structures.
+#
+# In general, each structure has 3 functions:
+#
+# buffer, extras = PackSTRUCTURE(items, ...)
+# item, ... = UnpackSTRUCTURE(buffer)
+# buffer, extras = EmtpySTRUCTURE(...)
+#
+# 'extras' is always items that must be held along with the buffer, as the
+# buffer refers to these object's memory.
+# For structures that support a 'mask', this mask is hidden from the user - if
+# 'None' is passed, the mask flag will not be set, or on return, None will
+# be returned for the value if the mask is not set.
+#
+# NOTE: I considered making these structures look like real classes, and
+# support 'attributes' etc - however, ctypes already has a good structure
+# mechanism - I think it makes more sense to support ctype structures
+# at the win32gui level, then there will be no need for this module at all.
+
+import win32gui
+import win32con
+import struct
+import array
+import commctrl
+
+# Generic WM_NOTIFY unpacking
+def UnpackWMNOTIFY(lparam):
+    format = "iii"
+    buf = win32gui.PyMakeBuffer(struct.calcsize(format), lparam)
+    hwndFrom, idFrom, code = struct.unpack(format, buf)
+    return hwndFrom, idFrom, code
+    
+# MENUITEMINFO struct
+# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winui/WinUI/WindowsUserInterface/Resources/Menus/MenuReference/MenuStructures/MENUITEMINFO.asp
+# We use the struct module to pack and unpack strings as MENUITEMINFO
+# structures.  We also have special handling for the 'fMask' item in that
+# structure to avoid the caller needing to explicitly check validity
+# (None is used if the mask excludes/should exclude the value)
+menuitem_fmt = '9iP2i'
+
+def PackMENUITEMINFO(fType=None, fState=None, wID=None, hSubMenu=None,
+                     hbmpChecked=None, hbmpUnchecked=None, dwTypeData=None,
+                     text=None, hbmpItem=None):
+    # 'extras' are objects the caller must keep a reference to (as their
+    # memory is used) for the lifetime of the INFO item.
+    extras = []
+    fMask = 0
+    if fType is None: fType = 0
+    else: fMask |= win32con.MIIM_FTYPE
+    if fState is None: fState = 0
+    else: fMask |= win32con.MIIM_STATE
+    if wID is None: wID = 0
+    else: fMask |= win32con.MIIM_ID
+    if hSubMenu is None: hSubMenu = 0
+    else: fMask |= win32con.MIIM_SUBMENU
+    if hbmpChecked is None:
+        assert hbmpUnchecked is None, \
+                "neither or both checkmark bmps must be given"
+        hbmpChecked = hbmpUnchecked = 0
+    else:
+        assert hbmpUnchecked is not None, \
+                "neither or both checkmark bmps must be given"
+        fMask |= win32con.MIIM_CHECKMARKS
+    if dwTypeData is None: dwTypeData = 0
+    else: fMask |= win32con.MIIM_DATA
+    if hbmpItem is None: hbmpItem = 0
+    else: fMask |= win32con.MIIM_BITMAP
+    if text is not None:
+        fMask |= win32con.MIIM_STRING
+        if isinstance(text, unicode):
+            text = text.encode("mbcs")
+        str_buf = array.array("c", text+'\0')
+        cch = len(str_buf)
+        # We are taking address of strbuf - it must not die until windows
+        # has finished with our structure.
+        lptext = str_buf.buffer_info()[0]
+        extras.append(str_buf)
+    else:
+        lptext = 0
+        cch = 0
+    # Create the struct.
+    dwItemData = 0
+    item = struct.pack(
+                menuitem_fmt,
+                struct.calcsize(menuitem_fmt), # cbSize
+                fMask,
+                fType,
+                fState,
+                wID,
+                hSubMenu,
+                hbmpChecked,
+                hbmpUnchecked,
+                dwItemData,
+                lptext,
+                cch,
+                hbmpItem
+                )
+    # Now copy the string to a writable buffer, so that the result
+    # could be passed to a 'Get' function
+    return array.array("c", item), extras
+
+def UnpackMENUITEMINFO(s):
+    (cb,
+    fMask,
+    fType,
+    fState,
+    wID,
+    hSubMenu,
+    hbmpChecked,
+    hbmpUnchecked,
+    dwItemData,
+    lptext,
+    cch,
+    hbmpItem) = struct.unpack(menuitem_fmt, s)
+    assert cb==len(s)
+    if fMask & win32con.MIIM_FTYPE==0: fType = None
+    if fMask & win32con.MIIM_STATE==0: fState = None
+    if fMask & win32con.MIIM_ID==0: wID = None
+    if fMask & win32con.MIIM_SUBMENU==0: hSubMenu = None
+    if fMask & win32con.MIIM_CHECKMARKS==0: hbmpChecked = hbmpUnchecked = None
+    if fMask & win32con.MIIM_DATA==0: dwItemData = None
+    if fMask & win32con.MIIM_BITMAP==0: hbmpItem = None
+    if fMask & win32con.MIIM_STRING:
+        text = win32gui.PyGetString(lptext, cch)
+    else:
+        text = None
+    return fType, fState, wID, hSubMenu, hbmpChecked, hbmpUnchecked, \
+           dwItemData, text, hbmpItem
+
+def EmptyMENUITEMINFO(mask = None, text_buf_size=512):
+    extra = []
+    if mask is None:
+        mask = win32con.MIIM_BITMAP | win32con.MIIM_CHECKMARKS | \
+               win32con.MIIM_DATA | win32con.MIIM_FTYPE | \
+               win32con.MIIM_ID | win32con.MIIM_STATE | \
+               win32con.MIIM_STRING | win32con.MIIM_SUBMENU
+               # Note: No MIIM_TYPE - this screws win2k/98.
+ 
+    if mask & win32con.MIIM_STRING:
+        text_buffer = array.array("c", "\0" * text_buf_size)
+        extra.append(text_buffer)
+        text_addr, text_len = text_buffer.buffer_info()
+    else:
+        text_addr = text_len = 0
+
+    buf = struct.pack(
+                menuitem_fmt,
+                struct.calcsize(menuitem_fmt), # cbSize
+                mask,
+                0, #fType,
+                0, #fState,
+                0, #wID,
+                0, #hSubMenu,
+                0, #hbmpChecked,
+                0, #hbmpUnchecked,
+                0, #dwItemData,
+                text_addr,
+                text_len,
+                0, #hbmpItem
+                )
+    return array.array("c", buf), extra
+
+# MENUINFO struct
+menuinfo_fmt = '7i'
+
+def PackMENUINFO(dwStyle = None, cyMax = None,
+                 hbrBack = None, dwContextHelpID = None, dwMenuData = None,
+                 fMask = 0):
+    if dwStyle is None: dwStyle = 0
+    else: fMask |= win32con.MIM_STYLE
+    if cyMax is None: cyMax = 0
+    else: fMask |= win32con.MIM_MAXHEIGHT
+    if hbrBack is None: hbrBack = 0
+    else: fMask |= win32con.MIM_BACKGROUND
+    if dwContextHelpID is None: dwContextHelpID = 0
+    else: fMask |= win32con.MIM_HELPID
+    if dwMenuData is None: dwMenuData = 0
+    else: fMask |= win32con.MIM_MENUDATA
+    # Create the struct.
+    item = struct.pack(
+                menuinfo_fmt,
+                struct.calcsize(menuinfo_fmt), # cbSize
+                fMask,
+                dwStyle,
+                cyMax,
+                hbrBack,
+                dwContextHelpID,
+                dwMenuData)
+    return array.array("c", item)
+
+def UnpackMENUINFO(s):
+    (cb,
+    fMask,
+    dwStyle,
+    cyMax,
+    hbrBack,
+    dwContextHelpID,
+    dwMenuData) = struct.unpack(menuinfo_fmt, s)
+    assert cb==len(s)
+    if fMask & win32con.MIM_STYLE==0: dwStyle = None
+    if fMask & win32con.MIM_MAXHEIGHT==0: cyMax = None
+    if fMask & win32con.MIM_BACKGROUND==0: hbrBack = None
+    if fMask & win32con.MIM_HELPID==0: dwContextHelpID = None
+    if fMask & win32con.MIM_MENUDATA==0: dwMenuData = None
+    return dwStyle, cyMax, hbrBack, dwContextHelpID, dwMenuData
+
+def EmptyMENUINFO(mask = None):
+    if mask is None:
+        mask = win32con.MIM_STYLE | win32con.MIM_MAXHEIGHT| \
+               win32con.MIM_BACKGROUND | win32con.MIM_HELPID | \
+               win32con.MIM_MENUDATA
+ 
+    buf = struct.pack(
+                menuinfo_fmt,
+                struct.calcsize(menuinfo_fmt), # cbSize
+                mask,
+                0, #dwStyle
+                0, #cyMax
+                0, #hbrBack,
+                0, #dwContextHelpID,
+                0, #dwMenuData,
+                )
+    return array.array("c", buf)
+
+##########################################################################
+#
+# Tree View structure support - TVITEM, TVINSERTSTRUCT and TVDISPINFO
+# 
+##########################################################################
+
+# XXX - Note that the following implementation of TreeView structures is ripped
+# XXX - from the SpamBayes project.  It may not quite work correctly yet - I
+# XXX - intend checking them later - but having them is better than not at all!
+
+# Helpers for the ugly win32 structure packing/unpacking
+# XXX - Note that functions using _GetMaskAndVal run 3x faster if they are
+# 'inlined' into the function - see PackLVITEM.  If the profiler points at
+# _GetMaskAndVal(), you should nuke it (patches welcome once they have been
+# tested)
+def _GetMaskAndVal(val, default, mask, flag):
+    if val is None:
+        return mask, default
+    else:
+        if flag is not None:
+            mask |= flag
+        return mask, val
+
+def PackTVINSERTSTRUCT(parent, insertAfter, tvitem):
+    tvitem_buf, extra = PackTVITEM(*tvitem)
+    tvitem_buf = tvitem_buf.tostring()
+    format = "ii%ds" % len(tvitem_buf)
+    return struct.pack(format, parent, insertAfter, tvitem_buf), extra
+
+def PackTVITEM(hitem, state, stateMask, text, image, selimage, citems, param):
+    extra = [] # objects we must keep references to
+    mask = 0
+    mask, hitem = _GetMaskAndVal(hitem, 0, mask, commctrl.TVIF_HANDLE)
+    mask, state = _GetMaskAndVal(state, 0, mask, commctrl.TVIF_STATE)
+    if not mask & commctrl.TVIF_STATE:
+        stateMask = 0
+    mask, text = _GetMaskAndVal(text, None, mask, commctrl.TVIF_TEXT)
+    mask, image = _GetMaskAndVal(image, 0, mask, commctrl.TVIF_IMAGE)
+    mask, selimage = _GetMaskAndVal(selimage, 0, mask, commctrl.TVIF_SELECTEDIMAGE)
+    mask, citems = _GetMaskAndVal(citems, 0, mask, commctrl.TVIF_CHILDREN)
+    mask, param = _GetMaskAndVal(param, 0, mask, commctrl.TVIF_PARAM)
+    if text is None:
+        text_addr = text_len = 0
+    else:
+        if isinstance(text, unicode):
+            text = text.encode("mbcs")
+        text_buffer = array.array("c", text+"\0")
+        extra.append(text_buffer)
+        text_addr, text_len = text_buffer.buffer_info()
+    format = "iiiiiiiiii"
+    buf = struct.pack(format,
+                      mask, hitem,
+                      state, stateMask,
+                      text_addr, text_len, # text
+                      image, selimage,
+                      citems, param)
+    return array.array("c", buf), extra
+
+# Make a new buffer suitable for querying hitem's attributes.
+def EmptyTVITEM(hitem, mask = None, text_buf_size=512):
+    extra = [] # objects we must keep references to
+    if mask is None:
+        mask = commctrl.TVIF_HANDLE | commctrl.TVIF_STATE | commctrl.TVIF_TEXT | \
+               commctrl.TVIF_IMAGE | commctrl.TVIF_SELECTEDIMAGE | \
+               commctrl.TVIF_CHILDREN | commctrl.TVIF_PARAM
+    if mask & commctrl.TVIF_TEXT:
+        text_buffer = array.array("c", "\0" * text_buf_size)
+        extra.append(text_buffer)
+        text_addr, text_len = text_buffer.buffer_info()
+    else:
+        text_addr = text_len = 0
+    format = "iiiiiiiiii"
+    buf = struct.pack(format,
+                      mask, hitem,
+                      0, 0,
+                      text_addr, text_len, # text
+                      0, 0,
+                      0, 0)
+    return array.array("c", buf), extra
+    
+def UnpackTVITEM(buffer):
+    item_mask, item_hItem, item_state, item_stateMask, \
+        item_textptr, item_cchText, item_image, item_selimage, \
+        item_cChildren, item_param = struct.unpack("10i", buffer)
+    # ensure only items listed by the mask are valid (except we assume the
+    # handle is always valid - some notifications (eg, TVN_ENDLABELEDIT) set a
+    # mask that doesn't include the handle, but the docs explicity say it is.)
+    if not (item_mask & commctrl.TVIF_TEXT): item_textptr = item_cchText = None
+    if not (item_mask & commctrl.TVIF_CHILDREN): item_cChildren = None
+    if not (item_mask & commctrl.TVIF_IMAGE): item_image = None
+    if not (item_mask & commctrl.TVIF_PARAM): item_param = None
+    if not (item_mask & commctrl.TVIF_SELECTEDIMAGE): item_selimage = None
+    if not (item_mask & commctrl.TVIF_STATE): item_state = item_stateMask = None
+    
+    if item_textptr:
+        text = win32gui.PyGetString(item_textptr)
+    else:
+        text = None
+    return item_hItem, item_state, item_stateMask, \
+        text, item_image, item_selimage, \
+        item_cChildren, item_param
+
+# Unpack the lparm from a "TVNOTIFY" message
+def UnpackTVNOTIFY(lparam):
+    format = "iiii40s40s"
+    buf = win32gui.PyMakeBuffer(struct.calcsize(format), lparam)
+    hwndFrom, id, code, action, buf_old, buf_new \
+          = struct.unpack(format, buf)
+    item_old = UnpackTVITEM(buf_old)
+    item_new = UnpackTVITEM(buf_new)
+    return hwndFrom, id, code, action, item_old, item_new
+
+def UnpackTVDISPINFO(lparam):
+    format = "iii40s"
+    buf = win32gui.PyMakeBuffer(struct.calcsize(format), lparam)
+    hwndFrom, id, code, buf_item = struct.unpack(format, buf)
+    item = UnpackTVITEM(buf_item)
+    return hwndFrom, id, code, item
+
+#
+# List view items
+def PackLVITEM(item=None, subItem=None, state=None, stateMask=None, text=None, image=None, param=None, indent=None):
+    extra = [] # objects we must keep references to
+    mask = 0
+    # _GetMaskAndVal adds quite a bit of overhead to this function.
+    if item is None: item = 0 # No mask for item
+    if subItem is None: subItem = 0 # No mask for sibItem
+    if state is None:
+        state = 0
+        stateMask = 0
+    else:
+        mask |= commctrl.LVIF_STATE
+        if stateMask is None: stateMask = state
+    
+    if image is None: image = 0
+    else: mask |= commctrl.LVIF_IMAGE
+    if param is None: param = 0
+    else: mask |= commctrl.LVIF_PARAM
+    if indent is None: indent = 0
+    else: mask |= commctrl.LVIF_INDENT
+
+    if text is None:
+        text_addr = text_len = 0
+    else:
+        mask |= commctrl.LVIF_TEXT
+        if isinstance(text, unicode):
+            text = text.encode("mbcs")
+        text_buffer = array.array("c", text+"\0")
+        extra.append(text_buffer)
+        text_addr, text_len = text_buffer.buffer_info()
+    format = "iiiiiiiiii"
+    buf = struct.pack(format,
+                      mask, item, subItem,
+                      state, stateMask,
+                      text_addr, text_len, # text
+                      image, param, indent)
+    return array.array("c", buf), extra
+
+def UnpackLVITEM(buffer):
+    item_mask, item_item, item_subItem, \
+        item_state, item_stateMask, \
+        item_textptr, item_cchText, item_image, \
+        item_param, item_indent = struct.unpack("10i", buffer)
+    # ensure only items listed by the mask are valid
+    if not (item_mask & commctrl.LVIF_TEXT): item_textptr = item_cchText = None
+    if not (item_mask & commctrl.LVIF_IMAGE): item_image = None
+    if not (item_mask & commctrl.LVIF_PARAM): item_param = None
+    if not (item_mask & commctrl.LVIF_INDENT): item_indent = None
+    if not (item_mask & commctrl.LVIF_STATE): item_state = item_stateMask = None
+    
+    if item_textptr:
+        text = win32gui.PyGetString(item_textptr)
+    else:
+        text = None
+    return item_item, item_subItem, item_state, item_stateMask, \
+        text, item_image, item_param, item_indent
+
+# Unpack an "LVNOTIFY" message
+def UnpackLVDISPINFO(lparam):
+    format = "iii40s"
+    buf = win32gui.PyMakeBuffer(struct.calcsize(format), lparam)
+    hwndFrom, id, code, buf_item = struct.unpack(format, buf)
+    item = UnpackLVITEM(buf_item)
+    return hwndFrom, id, code, item
+
+def UnpackLVNOTIFY(lparam):
+    format = "3i8i"
+    buf = win32gui.PyMakeBuffer(struct.calcsize(format), lparam)
+    hwndFrom, id, code, item, subitem, newstate, oldstate, \
+        changed, pt_x, pt_y, lparam = struct.unpack(format, buf)
+    return hwndFrom, id, code, item, subitem, newstate, oldstate, \
+        changed, (pt_x, pt_y), lparam
+
+
+# Make a new buffer suitable for querying an items attributes.
+def EmptyLVITEM(item, subitem, mask = None, text_buf_size=512):
+    extra = [] # objects we must keep references to
+    if mask is None:
+        mask = commctrl.LVIF_IMAGE | commctrl.LVIF_INDENT | commctrl.LVIF_TEXT | \
+               commctrl.LVIF_PARAM | commctrl.LVIF_STATE
+    if mask & commctrl.LVIF_TEXT:
+        text_buffer = array.array("c", "\0" * text_buf_size)
+        extra.append(text_buffer)
+        text_addr, text_len = text_buffer.buffer_info()
+    else:
+        text_addr = text_len = 0
+    format = "iiiiiiiiii"
+    buf = struct.pack(format,
+                      mask, item, subitem, 
+                      0, 0,
+                      text_addr, text_len, # text
+                      0, 0, 0)
+    return array.array("c", buf), extra
+
+
+# List view column structure
+def PackLVCOLUMN(fmt=None, cx=None, text=None, subItem=None, image=None, order=None):
+    extra = [] # objects we must keep references to
+    mask = 0
+    mask, fmt = _GetMaskAndVal(fmt, 0, mask, commctrl.LVCF_FMT)
+    mask, cx = _GetMaskAndVal(cx, 0, mask, commctrl.LVCF_WIDTH)
+    mask, text = _GetMaskAndVal(text, None, mask, commctrl.LVCF_TEXT)
+    mask, subItem = _GetMaskAndVal(subItem, 0, mask, commctrl.LVCF_SUBITEM)
+    mask, image = _GetMaskAndVal(image, 0, mask, commctrl.LVCF_IMAGE)
+    mask, order= _GetMaskAndVal(order, 0, mask, commctrl.LVCF_ORDER)
+    if text is None:
+        text_addr = text_len = 0
+    else:
+        if isinstance(text, unicode):
+            text = text.encode("mbcs")
+        text_buffer = array.array("c", text+"\0")
+        extra.append(text_buffer)
+        text_addr, text_len = text_buffer.buffer_info()
+    format = "iiiiiiii"
+    buf = struct.pack(format,
+                      mask, fmt, cx,
+                      text_addr, text_len, # text
+                      subItem, image, order)
+    return array.array("c", buf), extra
+
+def UnpackLVCOLUMN(lparam):
+    format = "iiiiiiii"
+    mask, fmt, cx, text_addr, text_size, subItem, image, order = \
+            struct.unpack(format, lparam)
+    # ensure only items listed by the mask are valid
+    if not (mask & commctrl.LVCF_FMT): fmt = None
+    if not (mask & commctrl.LVCF_WIDTH): cx = None
+    if not (mask & commctrl.LVCF_TEXT): text_addr = text_size = None
+    if not (mask & commctrl.LVCF_SUBITEM): subItem = None
+    if not (mask & commctrl.LVCF_IMAGE): image = None
+    if not (mask & commctrl.LVCF_ORDER): order = None
+    if text_addr:
+        text = win32gui.PyGetString(text_addr)
+    else:
+        text = None
+    return fmt, cx, text, subItem, image, order
+
+
+# Make a new buffer suitable for querying an items attributes.
+def EmptyLVCOLUMN(mask = None, text_buf_size=512):
+    extra = [] # objects we must keep references to
+    if mask is None:
+        mask = commctrl.LVCF_FMT | commctrl.LVCF_WIDTH | commctrl.LVCF_TEXT | \
+               commctrl.LVCF_SUBITEM | commctrl.LVCF_IMAGE | commctrl.LVCF_ORDER
+    if mask & commctrl.LVCF_TEXT:
+        text_buffer = array.array("c", "\0" * text_buf_size)
+        extra.append(text_buffer)
+        text_addr, text_len = text_buffer.buffer_info()
+    else:
+        text_addr = text_len = 0
+    format = "iiiiiiii"
+    buf = struct.pack(format,
+                      mask, 0, 0,
+                      text_addr, text_len, # text
+                      0, 0, 0)
+    return array.array("c", buf), extra
+
+# List view hit-test.
+def PackLVHITTEST(pt):
+    format = "iiiii"
+    buf = struct.pack(format,
+                      pt[0], pt[1],
+                      0, 0, 0)
+    return array.array("c", buf), None
+
+def UnpackLVHITTEST(buf):
+    format = "iiiii"
+    x, y, flags, item, subitem = struct.unpack(format, buf)
+    return (x,y), flags, item, subitem
+
+def PackHDITEM(cxy = None, text = None, hbm = None, fmt = None,
+               param = None, image = None, order = None):
+    extra = [] # objects we must keep references to
+    mask = 0
+    mask, cxy = _GetMaskAndVal(cxy, 0, mask, commctrl.HDI_HEIGHT)
+    mask, text = _GetMaskAndVal(text, None, mask, commctrl.LVCF_TEXT)
+    mask, hbm = _GetMaskAndVal(hbm, 0, mask, commctrl.HDI_BITMAP)
+    mask, fmt = _GetMaskAndVal(fmt, 0, mask, commctrl.HDI_FORMAT)
+    mask, param = _GetMaskAndVal(param, 0, mask, commctrl.HDI_LPARAM)
+    mask, image = _GetMaskAndVal(image, 0, mask, commctrl.HDI_IMAGE)
+    mask, order = _GetMaskAndVal(order, 0, mask, commctrl.HDI_ORDER)
+
+    if text is None:
+        text_addr = text_len = 0
+    else:
+        if isinstance(text, unicode):
+            text = text.encode("mbcs")
+        text_buffer = array.array("c", text+"\0")
+        extra.append(text_buffer)
+        text_addr, text_len = text_buffer.buffer_info()
+
+    format = "iiiiiiiiiii"
+    buf = struct.pack(format,
+                      mask, cxy, text_addr, hbm, text_len,
+                      fmt, param, image, order, 0, 0)
+    return array.array("c", buf), extra
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32inetcon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32inetcon.py
new file mode 100644
index 0000000..4907af9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32inetcon.py
@@ -0,0 +1,727 @@
+# Generated by h2py from \mssdk\include\WinInet.h
+
+# Included from pshpack8.h
+
+# Included from pshpack4.h
+INTERNET_INVALID_PORT_NUMBER = 0
+INTERNET_DEFAULT_FTP_PORT = 21
+INTERNET_DEFAULT_GOPHER_PORT = 70
+INTERNET_DEFAULT_HTTP_PORT = 80
+INTERNET_DEFAULT_HTTPS_PORT = 443
+INTERNET_DEFAULT_SOCKS_PORT = 1080
+INTERNET_MAX_HOST_NAME_LENGTH = 256
+INTERNET_MAX_USER_NAME_LENGTH = 128
+INTERNET_MAX_PASSWORD_LENGTH = 128
+INTERNET_MAX_PORT_NUMBER_LENGTH = 5
+INTERNET_MAX_PORT_NUMBER_VALUE = 65535
+INTERNET_MAX_PATH_LENGTH = 2048
+INTERNET_MAX_SCHEME_LENGTH = 32
+INTERNET_KEEP_ALIVE_ENABLED = 1
+INTERNET_KEEP_ALIVE_DISABLED = 0
+INTERNET_REQFLAG_FROM_CACHE = 0x00000001
+INTERNET_REQFLAG_ASYNC = 0x00000002
+INTERNET_REQFLAG_VIA_PROXY = 0x00000004
+INTERNET_REQFLAG_NO_HEADERS = 0x00000008
+INTERNET_REQFLAG_PASSIVE = 0x00000010
+INTERNET_REQFLAG_CACHE_WRITE_DISABLED = 0x00000040
+INTERNET_REQFLAG_NET_TIMEOUT = 0x00000080
+INTERNET_FLAG_RELOAD = (-2147483648)
+INTERNET_FLAG_RAW_DATA = 0x40000000
+INTERNET_FLAG_EXISTING_CONNECT = 0x20000000
+INTERNET_FLAG_ASYNC = 0x10000000
+INTERNET_FLAG_PASSIVE = 0x08000000
+INTERNET_FLAG_NO_CACHE_WRITE = 0x04000000
+INTERNET_FLAG_DONT_CACHE = INTERNET_FLAG_NO_CACHE_WRITE
+INTERNET_FLAG_MAKE_PERSISTENT = 0x02000000
+INTERNET_FLAG_FROM_CACHE = 0x01000000
+INTERNET_FLAG_OFFLINE = INTERNET_FLAG_FROM_CACHE
+INTERNET_FLAG_SECURE = 0x00800000
+INTERNET_FLAG_KEEP_CONNECTION = 0x00400000
+INTERNET_FLAG_NO_AUTO_REDIRECT = 0x00200000
+INTERNET_FLAG_READ_PREFETCH = 0x00100000
+INTERNET_FLAG_NO_COOKIES = 0x00080000
+INTERNET_FLAG_NO_AUTH = 0x00040000
+INTERNET_FLAG_RESTRICTED_ZONE = 0x00020000
+INTERNET_FLAG_CACHE_IF_NET_FAIL = 0x00010000
+INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP = 0x00008000
+INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS = 0x00004000
+INTERNET_FLAG_IGNORE_CERT_DATE_INVALID = 0x00002000
+INTERNET_FLAG_IGNORE_CERT_CN_INVALID = 0x00001000
+INTERNET_FLAG_RESYNCHRONIZE = 0x00000800
+INTERNET_FLAG_HYPERLINK = 0x00000400
+INTERNET_FLAG_NO_UI = 0x00000200
+INTERNET_FLAG_PRAGMA_NOCACHE = 0x00000100
+INTERNET_FLAG_CACHE_ASYNC = 0x00000080
+INTERNET_FLAG_FORMS_SUBMIT = 0x00000040
+INTERNET_FLAG_FWD_BACK = 0x00000020
+INTERNET_FLAG_NEED_FILE = 0x00000010
+INTERNET_FLAG_MUST_CACHE_REQUEST = INTERNET_FLAG_NEED_FILE
+SECURITY_INTERNET_MASK = (INTERNET_FLAG_IGNORE_CERT_CN_INVALID    |  \
+                                 INTERNET_FLAG_IGNORE_CERT_DATE_INVALID  |  \
+                                 INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS  |  \
+                                 INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP   )
+INTERNET_ERROR_MASK_INSERT_CDROM = 0x1
+INTERNET_ERROR_MASK_COMBINED_SEC_CERT = 0x2
+INTERNET_ERROR_MASK_NEED_MSN_SSPI_PKG = 0X4
+INTERNET_ERROR_MASK_LOGIN_FAILURE_DISPLAY_ENTITY_BODY = 0x8
+WININET_API_FLAG_ASYNC = 0x00000001
+WININET_API_FLAG_SYNC = 0x00000004
+WININET_API_FLAG_USE_CONTEXT = 0x00000008
+INTERNET_NO_CALLBACK = 0
+IDSI_FLAG_KEEP_ALIVE = 0x00000001
+IDSI_FLAG_SECURE = 0x00000002
+IDSI_FLAG_PROXY = 0x00000004
+IDSI_FLAG_TUNNEL = 0x00000008
+INTERNET_PER_CONN_FLAGS = 1
+INTERNET_PER_CONN_PROXY_SERVER = 2
+INTERNET_PER_CONN_PROXY_BYPASS = 3
+INTERNET_PER_CONN_AUTOCONFIG_URL = 4
+INTERNET_PER_CONN_AUTODISCOVERY_FLAGS = 5
+INTERNET_PER_CONN_AUTOCONFIG_SECONDARY_URL = 6
+INTERNET_PER_CONN_AUTOCONFIG_RELOAD_DELAY_MINS = 7
+INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_TIME = 8
+INTERNET_PER_CONN_AUTOCONFIG_LAST_DETECT_URL = 9
+PROXY_TYPE_DIRECT = 0x00000001
+PROXY_TYPE_PROXY = 0x00000002
+PROXY_TYPE_AUTO_PROXY_URL = 0x00000004
+PROXY_TYPE_AUTO_DETECT = 0x00000008
+AUTO_PROXY_FLAG_USER_SET = 0x00000001
+AUTO_PROXY_FLAG_ALWAYS_DETECT = 0x00000002
+AUTO_PROXY_FLAG_DETECTION_RUN = 0x00000004
+AUTO_PROXY_FLAG_MIGRATED = 0x00000008
+AUTO_PROXY_FLAG_DONT_CACHE_PROXY_RESULT = 0x00000010
+AUTO_PROXY_FLAG_CACHE_INIT_RUN = 0x00000020
+AUTO_PROXY_FLAG_DETECTION_SUSPECT = 0x00000040
+ISO_FORCE_DISCONNECTED = 0x00000001
+INTERNET_RFC1123_FORMAT = 0
+INTERNET_RFC1123_BUFSIZE = 30
+ICU_ESCAPE = (-2147483648)
+ICU_USERNAME = 0x40000000
+ICU_NO_ENCODE = 0x20000000
+ICU_DECODE = 0x10000000
+ICU_NO_META = 0x08000000
+ICU_ENCODE_SPACES_ONLY = 0x04000000
+ICU_BROWSER_MODE = 0x02000000
+ICU_ENCODE_PERCENT = 0x00001000
+INTERNET_OPEN_TYPE_PRECONFIG = 0
+INTERNET_OPEN_TYPE_DIRECT = 1
+INTERNET_OPEN_TYPE_PROXY = 3
+INTERNET_OPEN_TYPE_PRECONFIG_WITH_NO_AUTOPROXY = 4
+PRE_CONFIG_INTERNET_ACCESS = INTERNET_OPEN_TYPE_PRECONFIG
+LOCAL_INTERNET_ACCESS = INTERNET_OPEN_TYPE_DIRECT
+CERN_PROXY_INTERNET_ACCESS = INTERNET_OPEN_TYPE_PROXY
+INTERNET_SERVICE_FTP = 1
+INTERNET_SERVICE_GOPHER = 2
+INTERNET_SERVICE_HTTP = 3
+IRF_ASYNC = WININET_API_FLAG_ASYNC
+IRF_SYNC = WININET_API_FLAG_SYNC
+IRF_USE_CONTEXT = WININET_API_FLAG_USE_CONTEXT
+IRF_NO_WAIT = 0x00000008
+ISO_GLOBAL = 0x00000001
+ISO_REGISTRY = 0x00000002
+ISO_VALID_FLAGS = (ISO_GLOBAL | ISO_REGISTRY)
+INTERNET_OPTION_CALLBACK = 1
+INTERNET_OPTION_CONNECT_TIMEOUT = 2
+INTERNET_OPTION_CONNECT_RETRIES = 3
+INTERNET_OPTION_CONNECT_BACKOFF = 4
+INTERNET_OPTION_SEND_TIMEOUT = 5
+INTERNET_OPTION_CONTROL_SEND_TIMEOUT = INTERNET_OPTION_SEND_TIMEOUT
+INTERNET_OPTION_RECEIVE_TIMEOUT = 6
+INTERNET_OPTION_CONTROL_RECEIVE_TIMEOUT = INTERNET_OPTION_RECEIVE_TIMEOUT
+INTERNET_OPTION_DATA_SEND_TIMEOUT = 7
+INTERNET_OPTION_DATA_RECEIVE_TIMEOUT = 8
+INTERNET_OPTION_HANDLE_TYPE = 9
+INTERNET_OPTION_LISTEN_TIMEOUT = 11
+INTERNET_OPTION_READ_BUFFER_SIZE = 12
+INTERNET_OPTION_WRITE_BUFFER_SIZE = 13
+INTERNET_OPTION_ASYNC_ID = 15
+INTERNET_OPTION_ASYNC_PRIORITY = 16
+INTERNET_OPTION_PARENT_HANDLE = 21
+INTERNET_OPTION_KEEP_CONNECTION = 22
+INTERNET_OPTION_REQUEST_FLAGS = 23
+INTERNET_OPTION_EXTENDED_ERROR = 24
+INTERNET_OPTION_OFFLINE_MODE = 26
+INTERNET_OPTION_CACHE_STREAM_HANDLE = 27
+INTERNET_OPTION_USERNAME = 28
+INTERNET_OPTION_PASSWORD = 29
+INTERNET_OPTION_ASYNC = 30
+INTERNET_OPTION_SECURITY_FLAGS = 31
+INTERNET_OPTION_SECURITY_CERTIFICATE_STRUCT = 32
+INTERNET_OPTION_DATAFILE_NAME = 33
+INTERNET_OPTION_URL = 34
+INTERNET_OPTION_SECURITY_CERTIFICATE = 35
+INTERNET_OPTION_SECURITY_KEY_BITNESS = 36
+INTERNET_OPTION_REFRESH = 37
+INTERNET_OPTION_PROXY = 38
+INTERNET_OPTION_SETTINGS_CHANGED = 39
+INTERNET_OPTION_VERSION = 40
+INTERNET_OPTION_USER_AGENT = 41
+INTERNET_OPTION_END_BROWSER_SESSION = 42
+INTERNET_OPTION_PROXY_USERNAME = 43
+INTERNET_OPTION_PROXY_PASSWORD = 44
+INTERNET_OPTION_CONTEXT_VALUE = 45
+INTERNET_OPTION_CONNECT_LIMIT = 46
+INTERNET_OPTION_SECURITY_SELECT_CLIENT_CERT = 47
+INTERNET_OPTION_POLICY = 48
+INTERNET_OPTION_DISCONNECTED_TIMEOUT = 49
+INTERNET_OPTION_CONNECTED_STATE = 50
+INTERNET_OPTION_IDLE_STATE = 51
+INTERNET_OPTION_OFFLINE_SEMANTICS = 52
+INTERNET_OPTION_SECONDARY_CACHE_KEY = 53
+INTERNET_OPTION_CALLBACK_FILTER = 54
+INTERNET_OPTION_CONNECT_TIME = 55
+INTERNET_OPTION_SEND_THROUGHPUT = 56
+INTERNET_OPTION_RECEIVE_THROUGHPUT = 57
+INTERNET_OPTION_REQUEST_PRIORITY = 58
+INTERNET_OPTION_HTTP_VERSION = 59
+INTERNET_OPTION_RESET_URLCACHE_SESSION = 60
+INTERNET_OPTION_ERROR_MASK = 62
+INTERNET_OPTION_FROM_CACHE_TIMEOUT = 63
+INTERNET_OPTION_BYPASS_EDITED_ENTRY = 64
+INTERNET_OPTION_DIAGNOSTIC_SOCKET_INFO = 67
+INTERNET_OPTION_CODEPAGE = 68
+INTERNET_OPTION_CACHE_TIMESTAMPS = 69
+INTERNET_OPTION_DISABLE_AUTODIAL = 70
+INTERNET_OPTION_MAX_CONNS_PER_SERVER = 73
+INTERNET_OPTION_MAX_CONNS_PER_1_0_SERVER = 74
+INTERNET_OPTION_PER_CONNECTION_OPTION = 75
+INTERNET_OPTION_DIGEST_AUTH_UNLOAD = 76
+INTERNET_OPTION_IGNORE_OFFLINE = 77
+INTERNET_OPTION_IDENTITY = 78
+INTERNET_OPTION_REMOVE_IDENTITY = 79
+INTERNET_OPTION_ALTER_IDENTITY = 80
+INTERNET_OPTION_SUPPRESS_BEHAVIOR = 81
+INTERNET_OPTION_AUTODIAL_MODE = 82
+INTERNET_OPTION_AUTODIAL_CONNECTION = 83
+INTERNET_OPTION_CLIENT_CERT_CONTEXT = 84
+INTERNET_OPTION_AUTH_FLAGS = 85
+INTERNET_OPTION_COOKIES_3RD_PARTY = 86
+INTERNET_OPTION_DISABLE_PASSPORT_AUTH = 87
+INTERNET_OPTION_SEND_UTF8_SERVERNAME_TO_PROXY = 88
+INTERNET_OPTION_EXEMPT_CONNECTION_LIMIT = 89
+INTERNET_OPTION_ENABLE_PASSPORT_AUTH = 90
+INTERNET_OPTION_HIBERNATE_INACTIVE_WORKER_THREADS = 91
+INTERNET_OPTION_ACTIVATE_WORKER_THREADS = 92
+INTERNET_OPTION_RESTORE_WORKER_THREAD_DEFAULTS = 93
+INTERNET_OPTION_SOCKET_SEND_BUFFER_LENGTH = 94
+INTERNET_OPTION_PROXY_SETTINGS_CHANGED = 95
+INTERNET_FIRST_OPTION = INTERNET_OPTION_CALLBACK
+INTERNET_LAST_OPTION = INTERNET_OPTION_PROXY_SETTINGS_CHANGED
+INTERNET_PRIORITY_FOREGROUND = 1000
+INTERNET_HANDLE_TYPE_INTERNET = 1
+INTERNET_HANDLE_TYPE_CONNECT_FTP = 2
+INTERNET_HANDLE_TYPE_CONNECT_GOPHER = 3
+INTERNET_HANDLE_TYPE_CONNECT_HTTP = 4
+INTERNET_HANDLE_TYPE_FTP_FIND = 5
+INTERNET_HANDLE_TYPE_FTP_FIND_HTML = 6
+INTERNET_HANDLE_TYPE_FTP_FILE = 7
+INTERNET_HANDLE_TYPE_FTP_FILE_HTML = 8
+INTERNET_HANDLE_TYPE_GOPHER_FIND = 9
+INTERNET_HANDLE_TYPE_GOPHER_FIND_HTML = 10
+INTERNET_HANDLE_TYPE_GOPHER_FILE = 11
+INTERNET_HANDLE_TYPE_GOPHER_FILE_HTML = 12
+INTERNET_HANDLE_TYPE_HTTP_REQUEST = 13
+INTERNET_HANDLE_TYPE_FILE_REQUEST = 14
+AUTH_FLAG_DISABLE_NEGOTIATE = 0x00000001
+AUTH_FLAG_ENABLE_NEGOTIATE = 0x00000002
+SECURITY_FLAG_SECURE = 0x00000001
+SECURITY_FLAG_STRENGTH_WEAK = 0x10000000
+SECURITY_FLAG_STRENGTH_MEDIUM = 0x40000000
+SECURITY_FLAG_STRENGTH_STRONG = 0x20000000
+SECURITY_FLAG_UNKNOWNBIT = (-2147483648)
+SECURITY_FLAG_FORTEZZA = 0x08000000
+SECURITY_FLAG_NORMALBITNESS = SECURITY_FLAG_STRENGTH_WEAK
+SECURITY_FLAG_SSL = 0x00000002
+SECURITY_FLAG_SSL3 = 0x00000004
+SECURITY_FLAG_PCT = 0x00000008
+SECURITY_FLAG_PCT4 = 0x00000010
+SECURITY_FLAG_IETFSSL4 = 0x00000020
+SECURITY_FLAG_40BIT = SECURITY_FLAG_STRENGTH_WEAK
+SECURITY_FLAG_128BIT = SECURITY_FLAG_STRENGTH_STRONG
+SECURITY_FLAG_56BIT = SECURITY_FLAG_STRENGTH_MEDIUM
+SECURITY_FLAG_IGNORE_REVOCATION = 0x00000080
+SECURITY_FLAG_IGNORE_UNKNOWN_CA = 0x00000100
+SECURITY_FLAG_IGNORE_WRONG_USAGE = 0x00000200
+SECURITY_FLAG_IGNORE_CERT_CN_INVALID = INTERNET_FLAG_IGNORE_CERT_CN_INVALID
+SECURITY_FLAG_IGNORE_CERT_DATE_INVALID = INTERNET_FLAG_IGNORE_CERT_DATE_INVALID
+SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTPS = INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS
+SECURITY_FLAG_IGNORE_REDIRECT_TO_HTTP = INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP
+SECURITY_SET_MASK = (SECURITY_FLAG_IGNORE_REVOCATION |\
+                                 SECURITY_FLAG_IGNORE_UNKNOWN_CA |\
+                                 SECURITY_FLAG_IGNORE_CERT_CN_INVALID |\
+                                 SECURITY_FLAG_IGNORE_CERT_DATE_INVALID |\
+                                 SECURITY_FLAG_IGNORE_WRONG_USAGE)
+AUTODIAL_MODE_NEVER = 1
+AUTODIAL_MODE_ALWAYS = 2
+AUTODIAL_MODE_NO_NETWORK_PRESENT = 4
+INTERNET_STATUS_RESOLVING_NAME = 10
+INTERNET_STATUS_NAME_RESOLVED = 11
+INTERNET_STATUS_CONNECTING_TO_SERVER = 20
+INTERNET_STATUS_CONNECTED_TO_SERVER = 21
+INTERNET_STATUS_SENDING_REQUEST = 30
+INTERNET_STATUS_REQUEST_SENT = 31
+INTERNET_STATUS_RECEIVING_RESPONSE = 40
+INTERNET_STATUS_RESPONSE_RECEIVED = 41
+INTERNET_STATUS_CTL_RESPONSE_RECEIVED = 42
+INTERNET_STATUS_PREFETCH = 43
+INTERNET_STATUS_CLOSING_CONNECTION = 50
+INTERNET_STATUS_CONNECTION_CLOSED = 51
+INTERNET_STATUS_HANDLE_CREATED = 60
+INTERNET_STATUS_HANDLE_CLOSING = 70
+INTERNET_STATUS_DETECTING_PROXY = 80
+INTERNET_STATUS_REQUEST_COMPLETE = 100
+INTERNET_STATUS_REDIRECT = 110
+INTERNET_STATUS_INTERMEDIATE_RESPONSE = 120
+INTERNET_STATUS_USER_INPUT_REQUIRED = 140
+INTERNET_STATUS_STATE_CHANGE = 200
+INTERNET_STATUS_COOKIE_SENT = 320
+INTERNET_STATUS_COOKIE_RECEIVED = 321
+INTERNET_STATUS_PRIVACY_IMPACTED = 324
+INTERNET_STATUS_P3P_HEADER = 325
+INTERNET_STATUS_P3P_POLICYREF = 326
+INTERNET_STATUS_COOKIE_HISTORY = 327
+INTERNET_STATE_CONNECTED = 0x00000001
+INTERNET_STATE_DISCONNECTED = 0x00000002
+INTERNET_STATE_DISCONNECTED_BY_USER = 0x00000010
+INTERNET_STATE_IDLE = 0x00000100
+INTERNET_STATE_BUSY = 0x00000200
+FTP_TRANSFER_TYPE_UNKNOWN = 0x00000000
+FTP_TRANSFER_TYPE_ASCII = 0x00000001
+FTP_TRANSFER_TYPE_BINARY = 0x00000002
+FTP_TRANSFER_TYPE_MASK = (FTP_TRANSFER_TYPE_ASCII | FTP_TRANSFER_TYPE_BINARY)
+MAX_GOPHER_DISPLAY_TEXT = 128
+MAX_GOPHER_SELECTOR_TEXT = 256
+MAX_GOPHER_HOST_NAME = INTERNET_MAX_HOST_NAME_LENGTH
+MAX_GOPHER_LOCATOR_LENGTH = (1                                  \
+                                    + MAX_GOPHER_DISPLAY_TEXT           \
+                                    + 1                                 \
+                                    + MAX_GOPHER_SELECTOR_TEXT          \
+                                    + 1                                 \
+                                    + MAX_GOPHER_HOST_NAME              \
+                                    + 1                                 \
+                                    + INTERNET_MAX_PORT_NUMBER_LENGTH   \
+                                    + 1                                 \
+                                    + 1                                 \
+                                    + 2                                 \
+                                    )
+GOPHER_TYPE_TEXT_FILE = 0x00000001
+GOPHER_TYPE_DIRECTORY = 0x00000002
+GOPHER_TYPE_CSO = 0x00000004
+GOPHER_TYPE_ERROR = 0x00000008
+GOPHER_TYPE_MAC_BINHEX = 0x00000010
+GOPHER_TYPE_DOS_ARCHIVE = 0x00000020
+GOPHER_TYPE_UNIX_UUENCODED = 0x00000040
+GOPHER_TYPE_INDEX_SERVER = 0x00000080
+GOPHER_TYPE_TELNET = 0x00000100
+GOPHER_TYPE_BINARY = 0x00000200
+GOPHER_TYPE_REDUNDANT = 0x00000400
+GOPHER_TYPE_TN3270 = 0x00000800
+GOPHER_TYPE_GIF = 0x00001000
+GOPHER_TYPE_IMAGE = 0x00002000
+GOPHER_TYPE_BITMAP = 0x00004000
+GOPHER_TYPE_MOVIE = 0x00008000
+GOPHER_TYPE_SOUND = 0x00010000
+GOPHER_TYPE_HTML = 0x00020000
+GOPHER_TYPE_PDF = 0x00040000
+GOPHER_TYPE_CALENDAR = 0x00080000
+GOPHER_TYPE_INLINE = 0x00100000
+GOPHER_TYPE_UNKNOWN = 0x20000000
+GOPHER_TYPE_ASK = 0x40000000
+GOPHER_TYPE_GOPHER_PLUS = (-2147483648)
+GOPHER_TYPE_FILE_MASK = (GOPHER_TYPE_TEXT_FILE          \
+                                    | GOPHER_TYPE_MAC_BINHEX        \
+                                    | GOPHER_TYPE_DOS_ARCHIVE       \
+                                    | GOPHER_TYPE_UNIX_UUENCODED    \
+                                    | GOPHER_TYPE_BINARY            \
+                                    | GOPHER_TYPE_GIF               \
+                                    | GOPHER_TYPE_IMAGE             \
+                                    | GOPHER_TYPE_BITMAP            \
+                                    | GOPHER_TYPE_MOVIE             \
+                                    | GOPHER_TYPE_SOUND             \
+                                    | GOPHER_TYPE_HTML              \
+                                    | GOPHER_TYPE_PDF               \
+                                    | GOPHER_TYPE_CALENDAR          \
+                                    | GOPHER_TYPE_INLINE            \
+                                    )
+MAX_GOPHER_CATEGORY_NAME = 128
+MAX_GOPHER_ATTRIBUTE_NAME = 128
+MIN_GOPHER_ATTRIBUTE_LENGTH = 256
+GOPHER_ATTRIBUTE_ID_BASE = (-1412641792)
+GOPHER_CATEGORY_ID_ALL = (GOPHER_ATTRIBUTE_ID_BASE + 1)
+GOPHER_CATEGORY_ID_INFO = (GOPHER_ATTRIBUTE_ID_BASE + 2)
+GOPHER_CATEGORY_ID_ADMIN = (GOPHER_ATTRIBUTE_ID_BASE + 3)
+GOPHER_CATEGORY_ID_VIEWS = (GOPHER_ATTRIBUTE_ID_BASE + 4)
+GOPHER_CATEGORY_ID_ABSTRACT = (GOPHER_ATTRIBUTE_ID_BASE + 5)
+GOPHER_CATEGORY_ID_VERONICA = (GOPHER_ATTRIBUTE_ID_BASE + 6)
+GOPHER_CATEGORY_ID_ASK = (GOPHER_ATTRIBUTE_ID_BASE + 7)
+GOPHER_CATEGORY_ID_UNKNOWN = (GOPHER_ATTRIBUTE_ID_BASE + 8)
+GOPHER_ATTRIBUTE_ID_ALL = (GOPHER_ATTRIBUTE_ID_BASE + 9)
+GOPHER_ATTRIBUTE_ID_ADMIN = (GOPHER_ATTRIBUTE_ID_BASE + 10)
+GOPHER_ATTRIBUTE_ID_MOD_DATE = (GOPHER_ATTRIBUTE_ID_BASE + 11)
+GOPHER_ATTRIBUTE_ID_TTL = (GOPHER_ATTRIBUTE_ID_BASE + 12)
+GOPHER_ATTRIBUTE_ID_SCORE = (GOPHER_ATTRIBUTE_ID_BASE + 13)
+GOPHER_ATTRIBUTE_ID_RANGE = (GOPHER_ATTRIBUTE_ID_BASE + 14)
+GOPHER_ATTRIBUTE_ID_SITE = (GOPHER_ATTRIBUTE_ID_BASE + 15)
+GOPHER_ATTRIBUTE_ID_ORG = (GOPHER_ATTRIBUTE_ID_BASE + 16)
+GOPHER_ATTRIBUTE_ID_LOCATION = (GOPHER_ATTRIBUTE_ID_BASE + 17)
+GOPHER_ATTRIBUTE_ID_GEOG = (GOPHER_ATTRIBUTE_ID_BASE + 18)
+GOPHER_ATTRIBUTE_ID_TIMEZONE = (GOPHER_ATTRIBUTE_ID_BASE + 19)
+GOPHER_ATTRIBUTE_ID_PROVIDER = (GOPHER_ATTRIBUTE_ID_BASE + 20)
+GOPHER_ATTRIBUTE_ID_VERSION = (GOPHER_ATTRIBUTE_ID_BASE + 21)
+GOPHER_ATTRIBUTE_ID_ABSTRACT = (GOPHER_ATTRIBUTE_ID_BASE + 22)
+GOPHER_ATTRIBUTE_ID_VIEW = (GOPHER_ATTRIBUTE_ID_BASE + 23)
+GOPHER_ATTRIBUTE_ID_TREEWALK = (GOPHER_ATTRIBUTE_ID_BASE + 24)
+GOPHER_ATTRIBUTE_ID_UNKNOWN = (GOPHER_ATTRIBUTE_ID_BASE + 25)
+HTTP_MAJOR_VERSION = 1
+HTTP_MINOR_VERSION = 0
+HTTP_VERSIONA = "HTTP/1.0"
+HTTP_VERSION = HTTP_VERSIONA
+HTTP_QUERY_MIME_VERSION = 0
+HTTP_QUERY_CONTENT_TYPE = 1
+HTTP_QUERY_CONTENT_TRANSFER_ENCODING = 2
+HTTP_QUERY_CONTENT_ID = 3
+HTTP_QUERY_CONTENT_DESCRIPTION = 4
+HTTP_QUERY_CONTENT_LENGTH = 5
+HTTP_QUERY_CONTENT_LANGUAGE = 6
+HTTP_QUERY_ALLOW = 7
+HTTP_QUERY_PUBLIC = 8
+HTTP_QUERY_DATE = 9
+HTTP_QUERY_EXPIRES = 10
+HTTP_QUERY_LAST_MODIFIED = 11
+HTTP_QUERY_MESSAGE_ID = 12
+HTTP_QUERY_URI = 13
+HTTP_QUERY_DERIVED_FROM = 14
+HTTP_QUERY_COST = 15
+HTTP_QUERY_LINK = 16
+HTTP_QUERY_PRAGMA = 17
+HTTP_QUERY_VERSION = 18
+HTTP_QUERY_STATUS_CODE = 19
+HTTP_QUERY_STATUS_TEXT = 20
+HTTP_QUERY_RAW_HEADERS = 21
+HTTP_QUERY_RAW_HEADERS_CRLF = 22
+HTTP_QUERY_CONNECTION = 23
+HTTP_QUERY_ACCEPT = 24
+HTTP_QUERY_ACCEPT_CHARSET = 25
+HTTP_QUERY_ACCEPT_ENCODING = 26
+HTTP_QUERY_ACCEPT_LANGUAGE = 27
+HTTP_QUERY_AUTHORIZATION = 28
+HTTP_QUERY_CONTENT_ENCODING = 29
+HTTP_QUERY_FORWARDED = 30
+HTTP_QUERY_FROM = 31
+HTTP_QUERY_IF_MODIFIED_SINCE = 32
+HTTP_QUERY_LOCATION = 33
+HTTP_QUERY_ORIG_URI = 34
+HTTP_QUERY_REFERER = 35
+HTTP_QUERY_RETRY_AFTER = 36
+HTTP_QUERY_SERVER = 37
+HTTP_QUERY_TITLE = 38
+HTTP_QUERY_USER_AGENT = 39
+HTTP_QUERY_WWW_AUTHENTICATE = 40
+HTTP_QUERY_PROXY_AUTHENTICATE = 41
+HTTP_QUERY_ACCEPT_RANGES = 42
+HTTP_QUERY_SET_COOKIE = 43
+HTTP_QUERY_COOKIE = 44
+HTTP_QUERY_REQUEST_METHOD = 45
+HTTP_QUERY_REFRESH = 46
+HTTP_QUERY_CONTENT_DISPOSITION = 47
+HTTP_QUERY_AGE = 48
+HTTP_QUERY_CACHE_CONTROL = 49
+HTTP_QUERY_CONTENT_BASE = 50
+HTTP_QUERY_CONTENT_LOCATION = 51
+HTTP_QUERY_CONTENT_MD5 = 52
+HTTP_QUERY_CONTENT_RANGE = 53
+HTTP_QUERY_ETAG = 54
+HTTP_QUERY_HOST = 55
+HTTP_QUERY_IF_MATCH = 56
+HTTP_QUERY_IF_NONE_MATCH = 57
+HTTP_QUERY_IF_RANGE = 58
+HTTP_QUERY_IF_UNMODIFIED_SINCE = 59
+HTTP_QUERY_MAX_FORWARDS = 60
+HTTP_QUERY_PROXY_AUTHORIZATION = 61
+HTTP_QUERY_RANGE = 62
+HTTP_QUERY_TRANSFER_ENCODING = 63
+HTTP_QUERY_UPGRADE = 64
+HTTP_QUERY_VARY = 65
+HTTP_QUERY_VIA = 66
+HTTP_QUERY_WARNING = 67
+HTTP_QUERY_EXPECT = 68
+HTTP_QUERY_PROXY_CONNECTION = 69
+HTTP_QUERY_UNLESS_MODIFIED_SINCE = 70
+HTTP_QUERY_ECHO_REQUEST = 71
+HTTP_QUERY_ECHO_REPLY = 72
+HTTP_QUERY_ECHO_HEADERS = 73
+HTTP_QUERY_ECHO_HEADERS_CRLF = 74
+HTTP_QUERY_PROXY_SUPPORT = 75
+HTTP_QUERY_AUTHENTICATION_INFO = 76
+HTTP_QUERY_PASSPORT_URLS = 77
+HTTP_QUERY_PASSPORT_CONFIG = 78
+HTTP_QUERY_MAX = 78
+HTTP_QUERY_CUSTOM = 65535
+HTTP_QUERY_FLAG_REQUEST_HEADERS = (-2147483648)
+HTTP_QUERY_FLAG_SYSTEMTIME = 0x40000000
+HTTP_QUERY_FLAG_NUMBER = 0x20000000
+HTTP_QUERY_FLAG_COALESCE = 0x10000000
+HTTP_QUERY_MODIFIER_FLAGS_MASK = (HTTP_QUERY_FLAG_REQUEST_HEADERS    \
+                                                | HTTP_QUERY_FLAG_SYSTEMTIME        \
+                                                | HTTP_QUERY_FLAG_NUMBER            \
+                                                | HTTP_QUERY_FLAG_COALESCE          \
+                                                )
+HTTP_QUERY_HEADER_MASK = (~HTTP_QUERY_MODIFIER_FLAGS_MASK)
+HTTP_STATUS_CONTINUE = 100
+HTTP_STATUS_SWITCH_PROTOCOLS = 101
+HTTP_STATUS_OK = 200
+HTTP_STATUS_CREATED = 201
+HTTP_STATUS_ACCEPTED = 202
+HTTP_STATUS_PARTIAL = 203
+HTTP_STATUS_NO_CONTENT = 204
+HTTP_STATUS_RESET_CONTENT = 205
+HTTP_STATUS_PARTIAL_CONTENT = 206
+HTTP_STATUS_AMBIGUOUS = 300
+HTTP_STATUS_MOVED = 301
+HTTP_STATUS_REDIRECT = 302
+HTTP_STATUS_REDIRECT_METHOD = 303
+HTTP_STATUS_NOT_MODIFIED = 304
+HTTP_STATUS_USE_PROXY = 305
+HTTP_STATUS_REDIRECT_KEEP_VERB = 307
+HTTP_STATUS_BAD_REQUEST = 400
+HTTP_STATUS_DENIED = 401
+HTTP_STATUS_PAYMENT_REQ = 402
+HTTP_STATUS_FORBIDDEN = 403
+HTTP_STATUS_NOT_FOUND = 404
+HTTP_STATUS_BAD_METHOD = 405
+HTTP_STATUS_NONE_ACCEPTABLE = 406
+HTTP_STATUS_PROXY_AUTH_REQ = 407
+HTTP_STATUS_REQUEST_TIMEOUT = 408
+HTTP_STATUS_CONFLICT = 409
+HTTP_STATUS_GONE = 410
+HTTP_STATUS_LENGTH_REQUIRED = 411
+HTTP_STATUS_PRECOND_FAILED = 412
+HTTP_STATUS_REQUEST_TOO_LARGE = 413
+HTTP_STATUS_URI_TOO_LONG = 414
+HTTP_STATUS_UNSUPPORTED_MEDIA = 415
+HTTP_STATUS_RETRY_WITH = 449
+HTTP_STATUS_SERVER_ERROR = 500
+HTTP_STATUS_NOT_SUPPORTED = 501
+HTTP_STATUS_BAD_GATEWAY = 502
+HTTP_STATUS_SERVICE_UNAVAIL = 503
+HTTP_STATUS_GATEWAY_TIMEOUT = 504
+HTTP_STATUS_VERSION_NOT_SUP = 505
+HTTP_STATUS_FIRST = HTTP_STATUS_CONTINUE
+HTTP_STATUS_LAST = HTTP_STATUS_VERSION_NOT_SUP
+HTTP_ADDREQ_INDEX_MASK = 0x0000FFFF
+HTTP_ADDREQ_FLAGS_MASK = (-65536)
+HTTP_ADDREQ_FLAG_ADD_IF_NEW = 0x10000000
+HTTP_ADDREQ_FLAG_ADD = 0x20000000
+HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA = 0x40000000
+HTTP_ADDREQ_FLAG_COALESCE_WITH_SEMICOLON = 0x01000000
+HTTP_ADDREQ_FLAG_COALESCE = HTTP_ADDREQ_FLAG_COALESCE_WITH_COMMA
+HTTP_ADDREQ_FLAG_REPLACE = (-2147483648)
+HSR_ASYNC = WININET_API_FLAG_ASYNC
+HSR_SYNC = WININET_API_FLAG_SYNC
+HSR_USE_CONTEXT = WININET_API_FLAG_USE_CONTEXT
+HSR_INITIATE = 0x00000008
+HSR_DOWNLOAD = 0x00000010
+HSR_CHUNKED = 0x00000020
+INTERNET_COOKIE_IS_SECURE = 0x01
+INTERNET_COOKIE_IS_SESSION = 0x02
+INTERNET_COOKIE_THIRD_PARTY = 0x10
+INTERNET_COOKIE_PROMPT_REQUIRED = 0x20
+INTERNET_COOKIE_EVALUATE_P3P = 0x40
+INTERNET_COOKIE_APPLY_P3P = 0x80
+INTERNET_COOKIE_P3P_ENABLED = 0x100
+INTERNET_COOKIE_IS_RESTRICTED = 0x200
+INTERNET_COOKIE_IE6 = 0x400
+INTERNET_COOKIE_IS_LEGACY = 0x800
+FLAG_ICC_FORCE_CONNECTION = 0x00000001
+FLAGS_ERROR_UI_FILTER_FOR_ERRORS = 0x01
+FLAGS_ERROR_UI_FLAGS_CHANGE_OPTIONS = 0x02
+FLAGS_ERROR_UI_FLAGS_GENERATE_DATA = 0x04
+FLAGS_ERROR_UI_FLAGS_NO_UI = 0x08
+FLAGS_ERROR_UI_SERIALIZE_DIALOGS = 0x10
+INTERNET_ERROR_BASE = 12000
+ERROR_INTERNET_OUT_OF_HANDLES = (INTERNET_ERROR_BASE + 1)
+ERROR_INTERNET_TIMEOUT = (INTERNET_ERROR_BASE + 2)
+ERROR_INTERNET_EXTENDED_ERROR = (INTERNET_ERROR_BASE + 3)
+ERROR_INTERNET_INTERNAL_ERROR = (INTERNET_ERROR_BASE + 4)
+ERROR_INTERNET_INVALID_URL = (INTERNET_ERROR_BASE + 5)
+ERROR_INTERNET_UNRECOGNIZED_SCHEME = (INTERNET_ERROR_BASE + 6)
+ERROR_INTERNET_NAME_NOT_RESOLVED = (INTERNET_ERROR_BASE + 7)
+ERROR_INTERNET_PROTOCOL_NOT_FOUND = (INTERNET_ERROR_BASE + 8)
+ERROR_INTERNET_INVALID_OPTION = (INTERNET_ERROR_BASE + 9)
+ERROR_INTERNET_BAD_OPTION_LENGTH = (INTERNET_ERROR_BASE + 10)
+ERROR_INTERNET_OPTION_NOT_SETTABLE = (INTERNET_ERROR_BASE + 11)
+ERROR_INTERNET_SHUTDOWN = (INTERNET_ERROR_BASE + 12)
+ERROR_INTERNET_INCORRECT_USER_NAME = (INTERNET_ERROR_BASE + 13)
+ERROR_INTERNET_INCORRECT_PASSWORD = (INTERNET_ERROR_BASE + 14)
+ERROR_INTERNET_LOGIN_FAILURE = (INTERNET_ERROR_BASE + 15)
+ERROR_INTERNET_INVALID_OPERATION = (INTERNET_ERROR_BASE + 16)
+ERROR_INTERNET_OPERATION_CANCELLED = (INTERNET_ERROR_BASE + 17)
+ERROR_INTERNET_INCORRECT_HANDLE_TYPE = (INTERNET_ERROR_BASE + 18)
+ERROR_INTERNET_INCORRECT_HANDLE_STATE = (INTERNET_ERROR_BASE + 19)
+ERROR_INTERNET_NOT_PROXY_REQUEST = (INTERNET_ERROR_BASE + 20)
+ERROR_INTERNET_REGISTRY_VALUE_NOT_FOUND = (INTERNET_ERROR_BASE + 21)
+ERROR_INTERNET_BAD_REGISTRY_PARAMETER = (INTERNET_ERROR_BASE + 22)
+ERROR_INTERNET_NO_DIRECT_ACCESS = (INTERNET_ERROR_BASE + 23)
+ERROR_INTERNET_NO_CONTEXT = (INTERNET_ERROR_BASE + 24)
+ERROR_INTERNET_NO_CALLBACK = (INTERNET_ERROR_BASE + 25)
+ERROR_INTERNET_REQUEST_PENDING = (INTERNET_ERROR_BASE + 26)
+ERROR_INTERNET_INCORRECT_FORMAT = (INTERNET_ERROR_BASE + 27)
+ERROR_INTERNET_ITEM_NOT_FOUND = (INTERNET_ERROR_BASE + 28)
+ERROR_INTERNET_CANNOT_CONNECT = (INTERNET_ERROR_BASE + 29)
+ERROR_INTERNET_CONNECTION_ABORTED = (INTERNET_ERROR_BASE + 30)
+ERROR_INTERNET_CONNECTION_RESET = (INTERNET_ERROR_BASE + 31)
+ERROR_INTERNET_FORCE_RETRY = (INTERNET_ERROR_BASE + 32)
+ERROR_INTERNET_INVALID_PROXY_REQUEST = (INTERNET_ERROR_BASE + 33)
+ERROR_INTERNET_NEED_UI = (INTERNET_ERROR_BASE + 34)
+ERROR_INTERNET_HANDLE_EXISTS = (INTERNET_ERROR_BASE + 36)
+ERROR_INTERNET_SEC_CERT_DATE_INVALID = (INTERNET_ERROR_BASE + 37)
+ERROR_INTERNET_SEC_CERT_CN_INVALID = (INTERNET_ERROR_BASE + 38)
+ERROR_INTERNET_HTTP_TO_HTTPS_ON_REDIR = (INTERNET_ERROR_BASE + 39)
+ERROR_INTERNET_HTTPS_TO_HTTP_ON_REDIR = (INTERNET_ERROR_BASE + 40)
+ERROR_INTERNET_MIXED_SECURITY = (INTERNET_ERROR_BASE + 41)
+ERROR_INTERNET_CHG_POST_IS_NON_SECURE = (INTERNET_ERROR_BASE + 42)
+ERROR_INTERNET_POST_IS_NON_SECURE = (INTERNET_ERROR_BASE + 43)
+ERROR_INTERNET_CLIENT_AUTH_CERT_NEEDED = (INTERNET_ERROR_BASE + 44)
+ERROR_INTERNET_INVALID_CA = (INTERNET_ERROR_BASE + 45)
+ERROR_INTERNET_CLIENT_AUTH_NOT_SETUP = (INTERNET_ERROR_BASE + 46)
+ERROR_INTERNET_ASYNC_THREAD_FAILED = (INTERNET_ERROR_BASE + 47)
+ERROR_INTERNET_REDIRECT_SCHEME_CHANGE = (INTERNET_ERROR_BASE + 48)
+ERROR_INTERNET_DIALOG_PENDING = (INTERNET_ERROR_BASE + 49)
+ERROR_INTERNET_RETRY_DIALOG = (INTERNET_ERROR_BASE + 50)
+ERROR_INTERNET_HTTPS_HTTP_SUBMIT_REDIR = (INTERNET_ERROR_BASE + 52)
+ERROR_INTERNET_INSERT_CDROM = (INTERNET_ERROR_BASE + 53)
+ERROR_INTERNET_FORTEZZA_LOGIN_NEEDED = (INTERNET_ERROR_BASE + 54)
+ERROR_INTERNET_SEC_CERT_ERRORS = (INTERNET_ERROR_BASE + 55)
+ERROR_INTERNET_SEC_CERT_NO_REV = (INTERNET_ERROR_BASE + 56)
+ERROR_INTERNET_SEC_CERT_REV_FAILED = (INTERNET_ERROR_BASE + 57)
+ERROR_FTP_TRANSFER_IN_PROGRESS = (INTERNET_ERROR_BASE + 110)
+ERROR_FTP_DROPPED = (INTERNET_ERROR_BASE + 111)
+ERROR_FTP_NO_PASSIVE_MODE = (INTERNET_ERROR_BASE + 112)
+ERROR_GOPHER_PROTOCOL_ERROR = (INTERNET_ERROR_BASE + 130)
+ERROR_GOPHER_NOT_FILE = (INTERNET_ERROR_BASE + 131)
+ERROR_GOPHER_DATA_ERROR = (INTERNET_ERROR_BASE + 132)
+ERROR_GOPHER_END_OF_DATA = (INTERNET_ERROR_BASE + 133)
+ERROR_GOPHER_INVALID_LOCATOR = (INTERNET_ERROR_BASE + 134)
+ERROR_GOPHER_INCORRECT_LOCATOR_TYPE = (INTERNET_ERROR_BASE + 135)
+ERROR_GOPHER_NOT_GOPHER_PLUS = (INTERNET_ERROR_BASE + 136)
+ERROR_GOPHER_ATTRIBUTE_NOT_FOUND = (INTERNET_ERROR_BASE + 137)
+ERROR_GOPHER_UNKNOWN_LOCATOR = (INTERNET_ERROR_BASE + 138)
+ERROR_HTTP_HEADER_NOT_FOUND = (INTERNET_ERROR_BASE + 150)
+ERROR_HTTP_DOWNLEVEL_SERVER = (INTERNET_ERROR_BASE + 151)
+ERROR_HTTP_INVALID_SERVER_RESPONSE = (INTERNET_ERROR_BASE + 152)
+ERROR_HTTP_INVALID_HEADER = (INTERNET_ERROR_BASE + 153)
+ERROR_HTTP_INVALID_QUERY_REQUEST = (INTERNET_ERROR_BASE + 154)
+ERROR_HTTP_HEADER_ALREADY_EXISTS = (INTERNET_ERROR_BASE + 155)
+ERROR_HTTP_REDIRECT_FAILED = (INTERNET_ERROR_BASE + 156)
+ERROR_HTTP_NOT_REDIRECTED = (INTERNET_ERROR_BASE + 160)
+ERROR_HTTP_COOKIE_NEEDS_CONFIRMATION = (INTERNET_ERROR_BASE + 161)
+ERROR_HTTP_COOKIE_DECLINED = (INTERNET_ERROR_BASE + 162)
+ERROR_HTTP_REDIRECT_NEEDS_CONFIRMATION = (INTERNET_ERROR_BASE + 168)
+ERROR_INTERNET_SECURITY_CHANNEL_ERROR = (INTERNET_ERROR_BASE + 157)
+ERROR_INTERNET_UNABLE_TO_CACHE_FILE = (INTERNET_ERROR_BASE + 158)
+ERROR_INTERNET_TCPIP_NOT_INSTALLED = (INTERNET_ERROR_BASE + 159)
+ERROR_INTERNET_DISCONNECTED = (INTERNET_ERROR_BASE + 163)
+ERROR_INTERNET_SERVER_UNREACHABLE = (INTERNET_ERROR_BASE + 164)
+ERROR_INTERNET_PROXY_SERVER_UNREACHABLE = (INTERNET_ERROR_BASE + 165)
+ERROR_INTERNET_BAD_AUTO_PROXY_SCRIPT = (INTERNET_ERROR_BASE + 166)
+ERROR_INTERNET_UNABLE_TO_DOWNLOAD_SCRIPT = (INTERNET_ERROR_BASE + 167)
+ERROR_INTERNET_SEC_INVALID_CERT = (INTERNET_ERROR_BASE + 169)
+ERROR_INTERNET_SEC_CERT_REVOKED = (INTERNET_ERROR_BASE + 170)
+ERROR_INTERNET_FAILED_DUETOSECURITYCHECK = (INTERNET_ERROR_BASE + 171)
+ERROR_INTERNET_NOT_INITIALIZED = (INTERNET_ERROR_BASE + 172)
+ERROR_INTERNET_NEED_MSN_SSPI_PKG = (INTERNET_ERROR_BASE + 173)
+ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY = (INTERNET_ERROR_BASE + 174)
+INTERNET_ERROR_LAST = ERROR_INTERNET_LOGIN_FAILURE_DISPLAY_ENTITY_BODY
+NORMAL_CACHE_ENTRY = 0x00000001
+STICKY_CACHE_ENTRY = 0x00000004
+EDITED_CACHE_ENTRY = 0x00000008
+TRACK_OFFLINE_CACHE_ENTRY = 0x00000010
+TRACK_ONLINE_CACHE_ENTRY = 0x00000020
+SPARSE_CACHE_ENTRY = 0x00010000
+COOKIE_CACHE_ENTRY = 0x00100000
+URLHISTORY_CACHE_ENTRY = 0x00200000
+URLCACHE_FIND_DEFAULT_FILTER = NORMAL_CACHE_ENTRY             \
+                                    |   COOKIE_CACHE_ENTRY             \
+                                    |   URLHISTORY_CACHE_ENTRY         \
+                                    |   TRACK_OFFLINE_CACHE_ENTRY      \
+                                    |   TRACK_ONLINE_CACHE_ENTRY       \
+                                    |   STICKY_CACHE_ENTRY
+CACHEGROUP_ATTRIBUTE_GET_ALL = (-1)
+CACHEGROUP_ATTRIBUTE_BASIC = 0x00000001
+CACHEGROUP_ATTRIBUTE_FLAG = 0x00000002
+CACHEGROUP_ATTRIBUTE_TYPE = 0x00000004
+CACHEGROUP_ATTRIBUTE_QUOTA = 0x00000008
+CACHEGROUP_ATTRIBUTE_GROUPNAME = 0x00000010
+CACHEGROUP_ATTRIBUTE_STORAGE = 0x00000020
+CACHEGROUP_FLAG_NONPURGEABLE = 0x00000001
+CACHEGROUP_FLAG_GIDONLY = 0x00000004
+CACHEGROUP_FLAG_FLUSHURL_ONDELETE = 0x00000002
+CACHEGROUP_SEARCH_ALL = 0x00000000
+CACHEGROUP_SEARCH_BYURL = 0x00000001
+CACHEGROUP_TYPE_INVALID = 0x00000001
+CACHEGROUP_READWRITE_MASK = \
+            CACHEGROUP_ATTRIBUTE_TYPE               \
+        |   CACHEGROUP_ATTRIBUTE_QUOTA              \
+        |   CACHEGROUP_ATTRIBUTE_GROUPNAME          \
+        |   CACHEGROUP_ATTRIBUTE_STORAGE
+GROUPNAME_MAX_LENGTH = 120
+GROUP_OWNER_STORAGE_SIZE = 4
+CACHE_ENTRY_ATTRIBUTE_FC = 0x00000004
+CACHE_ENTRY_HITRATE_FC = 0x00000010
+CACHE_ENTRY_MODTIME_FC = 0x00000040
+CACHE_ENTRY_EXPTIME_FC = 0x00000080
+CACHE_ENTRY_ACCTIME_FC = 0x00000100
+CACHE_ENTRY_SYNCTIME_FC = 0x00000200
+CACHE_ENTRY_HEADERINFO_FC = 0x00000400
+CACHE_ENTRY_EXEMPT_DELTA_FC = 0x00000800
+INTERNET_CACHE_GROUP_ADD = 0
+INTERNET_CACHE_GROUP_REMOVE = 1
+INTERNET_DIAL_FORCE_PROMPT = 0x2000
+INTERNET_DIAL_SHOW_OFFLINE = 0x4000
+INTERNET_DIAL_UNATTENDED = 0x8000
+INTERENT_GOONLINE_REFRESH = 0x00000001
+INTERENT_GOONLINE_MASK = 0x00000001
+INTERNET_AUTODIAL_FORCE_ONLINE = 1
+INTERNET_AUTODIAL_FORCE_UNATTENDED = 2
+INTERNET_AUTODIAL_FAILIFSECURITYCHECK = 4
+INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT = 8
+INTERNET_AUTODIAL_FLAGS_MASK = (INTERNET_AUTODIAL_FORCE_ONLINE | INTERNET_AUTODIAL_FORCE_UNATTENDED | INTERNET_AUTODIAL_FAILIFSECURITYCHECK | INTERNET_AUTODIAL_OVERRIDE_NET_PRESENT)
+PROXY_AUTO_DETECT_TYPE_DHCP = 1
+PROXY_AUTO_DETECT_TYPE_DNS_A = 2
+INTERNET_CONNECTION_MODEM = 0x01
+INTERNET_CONNECTION_LAN = 0x02
+INTERNET_CONNECTION_PROXY = 0x04
+INTERNET_CONNECTION_MODEM_BUSY = 0x08
+INTERNET_RAS_INSTALLED = 0x10
+INTERNET_CONNECTION_OFFLINE = 0x20
+INTERNET_CONNECTION_CONFIGURED = 0x40
+INTERNET_CUSTOMDIAL_CONNECT = 0
+INTERNET_CUSTOMDIAL_UNATTENDED = 1
+INTERNET_CUSTOMDIAL_DISCONNECT = 2
+INTERNET_CUSTOMDIAL_SHOWOFFLINE = 4
+INTERNET_CUSTOMDIAL_SAFE_FOR_UNATTENDED = 1
+INTERNET_CUSTOMDIAL_WILL_SUPPLY_STATE = 2
+INTERNET_CUSTOMDIAL_CAN_HANGUP = 4
+INTERNET_DIALSTATE_DISCONNECTED = 1
+INTERNET_IDENTITY_FLAG_PRIVATE_CACHE = 0x01
+INTERNET_IDENTITY_FLAG_SHARED_CACHE = 0x02
+INTERNET_IDENTITY_FLAG_CLEAR_DATA = 0x04
+INTERNET_IDENTITY_FLAG_CLEAR_COOKIES = 0x08
+INTERNET_IDENTITY_FLAG_CLEAR_HISTORY = 0x10
+INTERNET_IDENTITY_FLAG_CLEAR_CONTENT = 0x20
+INTERNET_SUPPRESS_RESET_ALL = 0x00
+INTERNET_SUPPRESS_COOKIE_POLICY = 0x01
+INTERNET_SUPPRESS_COOKIE_POLICY_RESET = 0x02
+PRIVACY_TEMPLATE_NO_COOKIES = 0
+PRIVACY_TEMPLATE_HIGH = 1
+PRIVACY_TEMPLATE_MEDIUM_HIGH = 2
+PRIVACY_TEMPLATE_MEDIUM = 3
+PRIVACY_TEMPLATE_MEDIUM_LOW = 4
+PRIVACY_TEMPLATE_LOW = 5
+PRIVACY_TEMPLATE_CUSTOM = 100
+PRIVACY_TEMPLATE_ADVANCED = 101
+PRIVACY_TEMPLATE_MAX = PRIVACY_TEMPLATE_LOW
+PRIVACY_TYPE_FIRST_PARTY = 0
+PRIVACY_TYPE_THIRD_PARTY = 1
+
+# Included from poppack.h
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32netcon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32netcon.py
new file mode 100644
index 0000000..eaf717be
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32netcon.py
@@ -0,0 +1,652 @@
+# Generated by h2py from lmaccess.h
+
+# Included from lmcons.h
+CNLEN = 15
+LM20_CNLEN = 15
+DNLEN = CNLEN
+LM20_DNLEN = LM20_CNLEN
+UNCLEN = (CNLEN+2)
+LM20_UNCLEN = (LM20_CNLEN+2)
+NNLEN = 80
+LM20_NNLEN = 12
+RMLEN = (UNCLEN+1+NNLEN)
+LM20_RMLEN = (LM20_UNCLEN+1+LM20_NNLEN)
+SNLEN = 80
+LM20_SNLEN = 15
+STXTLEN = 256
+LM20_STXTLEN = 63
+PATHLEN = 256
+LM20_PATHLEN = 256
+DEVLEN = 80
+LM20_DEVLEN = 8
+EVLEN = 16
+UNLEN = 256
+LM20_UNLEN = 20
+GNLEN = UNLEN
+LM20_GNLEN = LM20_UNLEN
+PWLEN = 256
+LM20_PWLEN = 14
+SHPWLEN = 8
+CLTYPE_LEN = 12
+MAXCOMMENTSZ = 256
+LM20_MAXCOMMENTSZ = 48
+QNLEN = NNLEN
+LM20_QNLEN = LM20_NNLEN
+ALERTSZ = 128
+NETBIOS_NAME_LEN = 16
+CRYPT_KEY_LEN = 7
+CRYPT_TXT_LEN = 8
+ENCRYPTED_PWLEN = 16
+SESSION_PWLEN = 24
+SESSION_CRYPT_KLEN = 21
+PARMNUM_ALL = 0
+PARM_ERROR_NONE = 0
+PARMNUM_BASE_INFOLEVEL = 1000
+NULL = 0
+PLATFORM_ID_DOS = 300
+PLATFORM_ID_OS2 = 400
+PLATFORM_ID_NT = 500
+PLATFORM_ID_OSF = 600
+PLATFORM_ID_VMS = 700
+MAX_LANMAN_MESSAGE_ID = 5799
+UF_SCRIPT = 1
+UF_ACCOUNTDISABLE = 2
+UF_HOMEDIR_REQUIRED = 8
+UF_LOCKOUT = 16
+UF_PASSWD_NOTREQD = 32
+UF_PASSWD_CANT_CHANGE = 64
+UF_TEMP_DUPLICATE_ACCOUNT = 256
+UF_NORMAL_ACCOUNT = 512
+UF_INTERDOMAIN_TRUST_ACCOUNT = 2048
+UF_WORKSTATION_TRUST_ACCOUNT = 4096
+UF_SERVER_TRUST_ACCOUNT = 8192
+UF_MACHINE_ACCOUNT_MASK = ( UF_INTERDOMAIN_TRUST_ACCOUNT | \
+                                  UF_WORKSTATION_TRUST_ACCOUNT | \
+                                  UF_SERVER_TRUST_ACCOUNT )
+UF_ACCOUNT_TYPE_MASK = ( \
+                    UF_TEMP_DUPLICATE_ACCOUNT | \
+                    UF_NORMAL_ACCOUNT | \
+                    UF_INTERDOMAIN_TRUST_ACCOUNT | \
+                    UF_WORKSTATION_TRUST_ACCOUNT | \
+                    UF_SERVER_TRUST_ACCOUNT \
+                )
+UF_DONT_EXPIRE_PASSWD = 65536
+UF_MNS_LOGON_ACCOUNT = 131072
+UF_SETTABLE_BITS = ( \
+                    UF_SCRIPT | \
+                    UF_ACCOUNTDISABLE | \
+                    UF_LOCKOUT | \
+                    UF_HOMEDIR_REQUIRED  | \
+                    UF_PASSWD_NOTREQD | \
+                    UF_PASSWD_CANT_CHANGE | \
+                    UF_ACCOUNT_TYPE_MASK | \
+                    UF_DONT_EXPIRE_PASSWD | \
+                    UF_MNS_LOGON_ACCOUNT \
+                )
+FILTER_TEMP_DUPLICATE_ACCOUNT = (1)
+FILTER_NORMAL_ACCOUNT = (2)
+FILTER_INTERDOMAIN_TRUST_ACCOUNT = (8)
+FILTER_WORKSTATION_TRUST_ACCOUNT = (16)
+FILTER_SERVER_TRUST_ACCOUNT = (32)
+LG_INCLUDE_INDIRECT = (1)
+AF_OP_PRINT = 1
+AF_OP_COMM = 2
+AF_OP_SERVER = 4
+AF_OP_ACCOUNTS = 8
+AF_SETTABLE_BITS = (AF_OP_PRINT | AF_OP_COMM | \
+                                AF_OP_SERVER | AF_OP_ACCOUNTS)
+UAS_ROLE_STANDALONE = 0
+UAS_ROLE_MEMBER = 1
+UAS_ROLE_BACKUP = 2
+UAS_ROLE_PRIMARY = 3
+USER_NAME_PARMNUM = 1
+USER_PASSWORD_PARMNUM = 3
+USER_PASSWORD_AGE_PARMNUM = 4
+USER_PRIV_PARMNUM = 5
+USER_HOME_DIR_PARMNUM = 6
+USER_COMMENT_PARMNUM = 7
+USER_FLAGS_PARMNUM = 8
+USER_SCRIPT_PATH_PARMNUM = 9
+USER_AUTH_FLAGS_PARMNUM = 10
+USER_FULL_NAME_PARMNUM = 11
+USER_USR_COMMENT_PARMNUM = 12
+USER_PARMS_PARMNUM = 13
+USER_WORKSTATIONS_PARMNUM = 14
+USER_LAST_LOGON_PARMNUM = 15
+USER_LAST_LOGOFF_PARMNUM = 16
+USER_ACCT_EXPIRES_PARMNUM = 17
+USER_MAX_STORAGE_PARMNUM = 18
+USER_UNITS_PER_WEEK_PARMNUM = 19
+USER_LOGON_HOURS_PARMNUM = 20
+USER_PAD_PW_COUNT_PARMNUM = 21
+USER_NUM_LOGONS_PARMNUM = 22
+USER_LOGON_SERVER_PARMNUM = 23
+USER_COUNTRY_CODE_PARMNUM = 24
+USER_CODE_PAGE_PARMNUM = 25
+USER_PRIMARY_GROUP_PARMNUM = 51
+USER_PROFILE = 52
+USER_PROFILE_PARMNUM = 52
+USER_HOME_DIR_DRIVE_PARMNUM = 53
+USER_NAME_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_NAME_PARMNUM)
+USER_PASSWORD_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_PASSWORD_PARMNUM)
+USER_PASSWORD_AGE_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_PASSWORD_AGE_PARMNUM)
+USER_PRIV_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_PRIV_PARMNUM)
+USER_HOME_DIR_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_HOME_DIR_PARMNUM)
+USER_COMMENT_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_COMMENT_PARMNUM)
+USER_FLAGS_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_FLAGS_PARMNUM)
+USER_SCRIPT_PATH_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_SCRIPT_PATH_PARMNUM)
+USER_AUTH_FLAGS_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_AUTH_FLAGS_PARMNUM)
+USER_FULL_NAME_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_FULL_NAME_PARMNUM)
+USER_USR_COMMENT_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_USR_COMMENT_PARMNUM)
+USER_PARMS_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_PARMS_PARMNUM)
+USER_WORKSTATIONS_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_WORKSTATIONS_PARMNUM)
+USER_LAST_LOGON_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_LAST_LOGON_PARMNUM)
+USER_LAST_LOGOFF_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_LAST_LOGOFF_PARMNUM)
+USER_ACCT_EXPIRES_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_ACCT_EXPIRES_PARMNUM)
+USER_MAX_STORAGE_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_MAX_STORAGE_PARMNUM)
+USER_UNITS_PER_WEEK_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_UNITS_PER_WEEK_PARMNUM)
+USER_LOGON_HOURS_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_LOGON_HOURS_PARMNUM)
+USER_PAD_PW_COUNT_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_PAD_PW_COUNT_PARMNUM)
+USER_NUM_LOGONS_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_NUM_LOGONS_PARMNUM)
+USER_LOGON_SERVER_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_LOGON_SERVER_PARMNUM)
+USER_COUNTRY_CODE_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_COUNTRY_CODE_PARMNUM)
+USER_CODE_PAGE_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_CODE_PAGE_PARMNUM)
+USER_PRIMARY_GROUP_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_PRIMARY_GROUP_PARMNUM)
+USER_HOME_DIR_DRIVE_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + USER_HOME_DIR_DRIVE_PARMNUM)
+NULL_USERSETINFO_PASSWD = "              "
+UNITS_PER_DAY = 24
+UNITS_PER_WEEK = UNITS_PER_DAY * 7
+USER_PRIV_MASK = 3
+USER_PRIV_GUEST = 0
+USER_PRIV_USER = 1
+USER_PRIV_ADMIN = 2
+MAX_PASSWD_LEN = PWLEN
+DEF_MIN_PWLEN = 6
+DEF_PWUNIQUENESS = 5
+DEF_MAX_PWHIST = 8
+DEF_MAX_BADPW = 0
+VALIDATED_LOGON = 0
+PASSWORD_EXPIRED = 2
+NON_VALIDATED_LOGON = 3
+VALID_LOGOFF = 1
+MODALS_MIN_PASSWD_LEN_PARMNUM = 1
+MODALS_MAX_PASSWD_AGE_PARMNUM = 2
+MODALS_MIN_PASSWD_AGE_PARMNUM = 3
+MODALS_FORCE_LOGOFF_PARMNUM = 4
+MODALS_PASSWD_HIST_LEN_PARMNUM = 5
+MODALS_ROLE_PARMNUM = 6
+MODALS_PRIMARY_PARMNUM = 7
+MODALS_DOMAIN_NAME_PARMNUM = 8
+MODALS_DOMAIN_ID_PARMNUM = 9
+MODALS_LOCKOUT_DURATION_PARMNUM = 10
+MODALS_LOCKOUT_OBSERVATION_WINDOW_PARMNUM = 11
+MODALS_LOCKOUT_THRESHOLD_PARMNUM = 12
+MODALS_MIN_PASSWD_LEN_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_MIN_PASSWD_LEN_PARMNUM)
+MODALS_MAX_PASSWD_AGE_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_MAX_PASSWD_AGE_PARMNUM)
+MODALS_MIN_PASSWD_AGE_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_MIN_PASSWD_AGE_PARMNUM)
+MODALS_FORCE_LOGOFF_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_FORCE_LOGOFF_PARMNUM)
+MODALS_PASSWD_HIST_LEN_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_PASSWD_HIST_LEN_PARMNUM)
+MODALS_ROLE_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_ROLE_PARMNUM)
+MODALS_PRIMARY_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_PRIMARY_PARMNUM)
+MODALS_DOMAIN_NAME_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_DOMAIN_NAME_PARMNUM)
+MODALS_DOMAIN_ID_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + MODALS_DOMAIN_ID_PARMNUM)
+GROUPIDMASK = 32768
+GROUP_ALL_PARMNUM = 0
+GROUP_NAME_PARMNUM = 1
+GROUP_COMMENT_PARMNUM = 2
+GROUP_ATTRIBUTES_PARMNUM = 3
+GROUP_ALL_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + GROUP_ALL_PARMNUM)
+GROUP_NAME_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + GROUP_NAME_PARMNUM)
+GROUP_COMMENT_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + GROUP_COMMENT_PARMNUM)
+GROUP_ATTRIBUTES_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + GROUP_ATTRIBUTES_PARMNUM)
+LOCALGROUP_NAME_PARMNUM = 1
+LOCALGROUP_COMMENT_PARMNUM = 2
+MAXPERMENTRIES = 64
+ACCESS_NONE = 0
+ACCESS_READ = 1
+ACCESS_WRITE = 2
+ACCESS_CREATE = 4
+ACCESS_EXEC = 8
+ACCESS_DELETE = 16
+ACCESS_ATRIB = 32
+ACCESS_PERM = 64
+ACCESS_GROUP = 32768
+ACCESS_AUDIT = 1
+ACCESS_SUCCESS_OPEN = 16
+ACCESS_SUCCESS_WRITE = 32
+ACCESS_SUCCESS_DELETE = 64
+ACCESS_SUCCESS_ACL = 128
+ACCESS_SUCCESS_MASK = 240
+ACCESS_FAIL_OPEN = 256
+ACCESS_FAIL_WRITE = 512
+ACCESS_FAIL_DELETE = 1024
+ACCESS_FAIL_ACL = 2048
+ACCESS_FAIL_MASK = 3840
+ACCESS_FAIL_SHIFT = 4
+ACCESS_RESOURCE_NAME_PARMNUM = 1
+ACCESS_ATTR_PARMNUM = 2
+ACCESS_COUNT_PARMNUM = 3
+ACCESS_ACCESS_LIST_PARMNUM = 4
+ACCESS_RESOURCE_NAME_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + ACCESS_RESOURCE_NAME_PARMNUM)
+ACCESS_ATTR_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + ACCESS_ATTR_PARMNUM)
+ACCESS_COUNT_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + ACCESS_COUNT_PARMNUM)
+ACCESS_ACCESS_LIST_INFOLEVEL = \
+            (PARMNUM_BASE_INFOLEVEL + ACCESS_ACCESS_LIST_PARMNUM)
+ACCESS_LETTERS = "RWCXDAP         "
+NETLOGON_CONTROL_QUERY = 1
+NETLOGON_CONTROL_REPLICATE = 2
+NETLOGON_CONTROL_SYNCHRONIZE = 3
+NETLOGON_CONTROL_PDC_REPLICATE = 4
+NETLOGON_CONTROL_REDISCOVER = 5
+NETLOGON_CONTROL_TC_QUERY = 6
+NETLOGON_CONTROL_TRANSPORT_NOTIFY = 7
+NETLOGON_CONTROL_FIND_USER = 8
+NETLOGON_CONTROL_UNLOAD_NETLOGON_DLL = 65531
+NETLOGON_CONTROL_BACKUP_CHANGE_LOG = 65532
+NETLOGON_CONTROL_TRUNCATE_LOG = 65533
+NETLOGON_CONTROL_SET_DBFLAG = 65534
+NETLOGON_CONTROL_BREAKPOINT = 65535
+NETLOGON_REPLICATION_NEEDED = 1
+NETLOGON_REPLICATION_IN_PROGRESS = 2
+NETLOGON_FULL_SYNC_REPLICATION = 4
+NETLOGON_REDO_NEEDED = 8
+
+######################
+# Manual stuff
+
+TEXT=lambda x:x
+
+MAX_PREFERRED_LENGTH = -1
+PARM_ERROR_UNKNOWN = -1
+MESSAGE_FILENAME = TEXT("NETMSG")
+OS2MSG_FILENAME = TEXT("BASE")
+HELP_MSG_FILENAME = TEXT("NETH")
+BACKUP_MSG_FILENAME = TEXT("BAK.MSG")
+TIMEQ_FOREVER = -1
+USER_MAXSTORAGE_UNLIMITED = -1
+USER_NO_LOGOFF = -1
+DEF_MAX_PWAGE = TIMEQ_FOREVER
+DEF_MIN_PWAGE = 0
+DEF_FORCE_LOGOFF = -1
+ONE_DAY = 01*24*3600
+GROUP_SPECIALGRP_USERS = "USERS"
+GROUP_SPECIALGRP_ADMINS = "ADMINS"
+GROUP_SPECIALGRP_GUESTS = "GUESTS"
+GROUP_SPECIALGRP_LOCAL = "LOCAL"
+ACCESS_ALL = ( ACCESS_READ | ACCESS_WRITE | ACCESS_CREATE | ACCESS_EXEC | ACCESS_DELETE | ACCESS_ATRIB | ACCESS_PERM )
+
+# From lmserver.h
+SV_PLATFORM_ID_OS2 = 400
+SV_PLATFORM_ID_NT = 500
+MAJOR_VERSION_MASK = 15
+SV_TYPE_WORKSTATION = 1
+SV_TYPE_SERVER = 2
+SV_TYPE_SQLSERVER = 4
+SV_TYPE_DOMAIN_CTRL = 8
+SV_TYPE_DOMAIN_BAKCTRL = 16
+SV_TYPE_TIME_SOURCE = 32
+SV_TYPE_AFP = 64
+SV_TYPE_NOVELL = 128
+SV_TYPE_DOMAIN_MEMBER = 256
+SV_TYPE_PRINTQ_SERVER = 512
+SV_TYPE_DIALIN_SERVER = 1024
+SV_TYPE_XENIX_SERVER = 2048
+SV_TYPE_SERVER_UNIX = SV_TYPE_XENIX_SERVER
+SV_TYPE_NT = 4096
+SV_TYPE_WFW = 8192
+SV_TYPE_SERVER_MFPN = 16384
+SV_TYPE_SERVER_NT = 32768
+SV_TYPE_POTENTIAL_BROWSER = 65536
+SV_TYPE_BACKUP_BROWSER = 131072
+SV_TYPE_MASTER_BROWSER = 262144
+SV_TYPE_DOMAIN_MASTER = 524288
+SV_TYPE_SERVER_OSF = 1048576
+SV_TYPE_SERVER_VMS = 2097152
+SV_TYPE_WINDOWS = 4194304
+SV_TYPE_DFS = 8388608
+SV_TYPE_CLUSTER_NT = 16777216
+SV_TYPE_DCE = 268435456
+SV_TYPE_ALTERNATE_XPORT = 536870912
+SV_TYPE_LOCAL_LIST_ONLY = 1073741824
+SV_TYPE_DOMAIN_ENUM = -2147483648
+SV_TYPE_ALL = -1
+SV_NODISC = (-1L)
+SV_USERSECURITY = 1
+SV_SHARESECURITY = 0
+SV_HIDDEN = 1
+SV_VISIBLE = 0
+SV_PLATFORM_ID_PARMNUM = 101
+SV_NAME_PARMNUM = 102
+SV_VERSION_MAJOR_PARMNUM = 103
+SV_VERSION_MINOR_PARMNUM = 104
+SV_TYPE_PARMNUM = 105
+SV_COMMENT_PARMNUM = 5
+SV_USERS_PARMNUM = 107
+SV_DISC_PARMNUM = 10
+SV_HIDDEN_PARMNUM = 16
+SV_ANNOUNCE_PARMNUM = 17
+SV_ANNDELTA_PARMNUM = 18
+SV_USERPATH_PARMNUM = 112
+SV_ULIST_MTIME_PARMNUM = 401
+SV_GLIST_MTIME_PARMNUM = 402
+SV_ALIST_MTIME_PARMNUM = 403
+SV_ALERTS_PARMNUM = 11
+SV_SECURITY_PARMNUM = 405
+SV_NUMADMIN_PARMNUM = 406
+SV_LANMASK_PARMNUM = 407
+SV_GUESTACC_PARMNUM = 408
+SV_CHDEVQ_PARMNUM = 410
+SV_CHDEVJOBS_PARMNUM = 411
+SV_CONNECTIONS_PARMNUM = 412
+SV_SHARES_PARMNUM = 413
+SV_OPENFILES_PARMNUM = 414
+SV_SESSREQS_PARMNUM = 417
+SV_ACTIVELOCKS_PARMNUM = 419
+SV_NUMREQBUF_PARMNUM = 420
+SV_NUMBIGBUF_PARMNUM = 422
+SV_NUMFILETASKS_PARMNUM = 423
+SV_ALERTSCHED_PARMNUM = 37
+SV_ERRORALERT_PARMNUM = 38
+SV_LOGONALERT_PARMNUM = 39
+SV_ACCESSALERT_PARMNUM = 40
+SV_DISKALERT_PARMNUM = 41
+SV_NETIOALERT_PARMNUM = 42
+SV_MAXAUDITSZ_PARMNUM = 43
+SV_SRVHEURISTICS_PARMNUM = 431
+SV_SESSOPENS_PARMNUM = 501
+SV_SESSVCS_PARMNUM = 502
+SV_OPENSEARCH_PARMNUM = 503
+SV_SIZREQBUF_PARMNUM = 504
+SV_INITWORKITEMS_PARMNUM = 505
+SV_MAXWORKITEMS_PARMNUM = 506
+SV_RAWWORKITEMS_PARMNUM = 507
+SV_IRPSTACKSIZE_PARMNUM = 508
+SV_MAXRAWBUFLEN_PARMNUM = 509
+SV_SESSUSERS_PARMNUM = 510
+SV_SESSCONNS_PARMNUM = 511
+SV_MAXNONPAGEDMEMORYUSAGE_PARMNUM = 512
+SV_MAXPAGEDMEMORYUSAGE_PARMNUM = 513
+SV_ENABLESOFTCOMPAT_PARMNUM = 514
+SV_ENABLEFORCEDLOGOFF_PARMNUM = 515
+SV_TIMESOURCE_PARMNUM = 516
+SV_ACCEPTDOWNLEVELAPIS_PARMNUM = 517
+SV_LMANNOUNCE_PARMNUM = 518
+SV_DOMAIN_PARMNUM = 519
+SV_MAXCOPYREADLEN_PARMNUM = 520
+SV_MAXCOPYWRITELEN_PARMNUM = 521
+SV_MINKEEPSEARCH_PARMNUM = 522
+SV_MAXKEEPSEARCH_PARMNUM = 523
+SV_MINKEEPCOMPLSEARCH_PARMNUM = 524
+SV_MAXKEEPCOMPLSEARCH_PARMNUM = 525
+SV_THREADCOUNTADD_PARMNUM = 526
+SV_NUMBLOCKTHREADS_PARMNUM = 527
+SV_SCAVTIMEOUT_PARMNUM = 528
+SV_MINRCVQUEUE_PARMNUM = 529
+SV_MINFREEWORKITEMS_PARMNUM = 530
+SV_XACTMEMSIZE_PARMNUM = 531
+SV_THREADPRIORITY_PARMNUM = 532
+SV_MAXMPXCT_PARMNUM = 533
+SV_OPLOCKBREAKWAIT_PARMNUM = 534
+SV_OPLOCKBREAKRESPONSEWAIT_PARMNUM = 535
+SV_ENABLEOPLOCKS_PARMNUM = 536
+SV_ENABLEOPLOCKFORCECLOSE_PARMNUM = 537
+SV_ENABLEFCBOPENS_PARMNUM = 538
+SV_ENABLERAW_PARMNUM = 539
+SV_ENABLESHAREDNETDRIVES_PARMNUM = 540
+SV_MINFREECONNECTIONS_PARMNUM = 541
+SV_MAXFREECONNECTIONS_PARMNUM = 542
+SV_INITSESSTABLE_PARMNUM = 543
+SV_INITCONNTABLE_PARMNUM = 544
+SV_INITFILETABLE_PARMNUM = 545
+SV_INITSEARCHTABLE_PARMNUM = 546
+SV_ALERTSCHEDULE_PARMNUM = 547
+SV_ERRORTHRESHOLD_PARMNUM = 548
+SV_NETWORKERRORTHRESHOLD_PARMNUM = 549
+SV_DISKSPACETHRESHOLD_PARMNUM = 550
+SV_MAXLINKDELAY_PARMNUM = 552
+SV_MINLINKTHROUGHPUT_PARMNUM = 553
+SV_LINKINFOVALIDTIME_PARMNUM = 554
+SV_SCAVQOSINFOUPDATETIME_PARMNUM = 555
+SV_MAXWORKITEMIDLETIME_PARMNUM = 556
+SV_MAXRAWWORKITEMS_PARMNUM = 557
+SV_PRODUCTTYPE_PARMNUM = 560
+SV_SERVERSIZE_PARMNUM = 561
+SV_CONNECTIONLESSAUTODISC_PARMNUM = 562
+SV_SHARINGVIOLATIONRETRIES_PARMNUM = 563
+SV_SHARINGVIOLATIONDELAY_PARMNUM = 564
+SV_MAXGLOBALOPENSEARCH_PARMNUM = 565
+SV_REMOVEDUPLICATESEARCHES_PARMNUM = 566
+SV_LOCKVIOLATIONRETRIES_PARMNUM = 567
+SV_LOCKVIOLATIONOFFSET_PARMNUM = 568
+SV_LOCKVIOLATIONDELAY_PARMNUM = 569
+SV_MDLREADSWITCHOVER_PARMNUM = 570
+SV_CACHEDOPENLIMIT_PARMNUM = 571
+SV_CRITICALTHREADS_PARMNUM = 572
+SV_RESTRICTNULLSESSACCESS_PARMNUM = 573
+SV_ENABLEWFW311DIRECTIPX_PARMNUM = 574
+SV_OTHERQUEUEAFFINITY_PARMNUM = 575
+SV_QUEUESAMPLESECS_PARMNUM = 576
+SV_BALANCECOUNT_PARMNUM = 577
+SV_PREFERREDAFFINITY_PARMNUM = 578
+SV_MAXFREERFCBS_PARMNUM = 579
+SV_MAXFREEMFCBS_PARMNUM = 580
+SV_MAXFREELFCBS_PARMNUM = 581
+SV_MAXFREEPAGEDPOOLCHUNKS_PARMNUM = 582
+SV_MINPAGEDPOOLCHUNKSIZE_PARMNUM = 583
+SV_MAXPAGEDPOOLCHUNKSIZE_PARMNUM = 584
+SV_SENDSFROMPREFERREDPROCESSOR_PARMNUM = 585
+SV_MAXTHREADSPERQUEUE_PARMNUM = 586
+SV_CACHEDDIRECTORYLIMIT_PARMNUM = 587
+SV_MAXCOPYLENGTH_PARMNUM = 588
+SV_ENABLEBULKTRANSFER_PARMNUM = 589
+SV_ENABLECOMPRESSION_PARMNUM = 590
+SV_AUTOSHAREWKS_PARMNUM = 591
+SV_AUTOSHARESERVER_PARMNUM = 592
+SV_ENABLESECURITYSIGNATURE_PARMNUM = 593
+SV_REQUIRESECURITYSIGNATURE_PARMNUM = 594
+SV_MINCLIENTBUFFERSIZE_PARMNUM = 595
+SV_CONNECTIONNOSESSIONSTIMEOUT_PARMNUM = 596
+SVI1_NUM_ELEMENTS = 5
+SVI2_NUM_ELEMENTS = 40
+SVI3_NUM_ELEMENTS = 44
+SW_AUTOPROF_LOAD_MASK = 1
+SW_AUTOPROF_SAVE_MASK = 2
+SV_MAX_SRV_HEUR_LEN = 32
+SV_USERS_PER_LICENSE = 5
+SVTI2_REMAP_PIPE_NAMES = 2
+
+# Generated by h2py from lmshare.h
+SHARE_NETNAME_PARMNUM = 1
+SHARE_TYPE_PARMNUM = 3
+SHARE_REMARK_PARMNUM = 4
+SHARE_PERMISSIONS_PARMNUM = 5
+SHARE_MAX_USES_PARMNUM = 6
+SHARE_CURRENT_USES_PARMNUM = 7
+SHARE_PATH_PARMNUM = 8
+SHARE_PASSWD_PARMNUM = 9
+SHARE_FILE_SD_PARMNUM = 501
+SHI1_NUM_ELEMENTS = 4
+SHI2_NUM_ELEMENTS = 10
+STYPE_DISKTREE = 0
+STYPE_PRINTQ = 1
+STYPE_DEVICE = 2
+STYPE_IPC = 3
+STYPE_SPECIAL = -2147483648
+SHI1005_FLAGS_DFS = 1
+SHI1005_FLAGS_DFS_ROOT = 2
+COW_PERMACHINE = 4
+COW_PERUSER = 8
+CSC_CACHEABLE = 16
+CSC_NOFLOWOPS = 32
+CSC_AUTO_INWARD = 64
+CSC_AUTO_OUTWARD = 128
+SHI1005_VALID_FLAGS_SET = (   CSC_CACHEABLE   | \
+                                    CSC_NOFLOWOPS   | \
+                                    CSC_AUTO_INWARD | \
+                                    CSC_AUTO_OUTWARD| \
+                                    COW_PERMACHINE  | \
+                                    COW_PERUSER    )
+SHI1007_VALID_FLAGS_SET = SHI1005_VALID_FLAGS_SET
+SESS_GUEST = 1
+SESS_NOENCRYPTION = 2
+SESI1_NUM_ELEMENTS = 8
+SESI2_NUM_ELEMENTS = 9
+PERM_FILE_READ = 1
+PERM_FILE_WRITE = 2
+PERM_FILE_CREATE = 4
+
+# Generated by h2py from d:\mssdk\include\winnetwk.h
+WNNC_NET_MSNET = 65536
+WNNC_NET_LANMAN = 131072
+WNNC_NET_NETWARE = 196608
+WNNC_NET_VINES = 262144
+WNNC_NET_10NET = 327680
+WNNC_NET_LOCUS = 393216
+WNNC_NET_SUN_PC_NFS = 458752
+WNNC_NET_LANSTEP = 524288
+WNNC_NET_9TILES = 589824
+WNNC_NET_LANTASTIC = 655360
+WNNC_NET_AS400 = 720896
+WNNC_NET_FTP_NFS = 786432
+WNNC_NET_PATHWORKS = 851968
+WNNC_NET_LIFENET = 917504
+WNNC_NET_POWERLAN = 983040
+WNNC_NET_BWNFS = 1048576
+WNNC_NET_COGENT = 1114112
+WNNC_NET_FARALLON = 1179648
+WNNC_NET_APPLETALK = 1245184
+WNNC_NET_INTERGRAPH = 1310720
+WNNC_NET_SYMFONET = 1376256
+WNNC_NET_CLEARCASE = 1441792
+WNNC_NET_FRONTIER = 1507328
+WNNC_NET_BMC = 1572864
+WNNC_NET_DCE = 1638400
+WNNC_NET_DECORB = 2097152
+WNNC_NET_PROTSTOR = 2162688
+WNNC_NET_FJ_REDIR = 2228224
+WNNC_NET_DISTINCT = 2293760
+WNNC_NET_TWINS = 2359296
+WNNC_NET_RDR2SAMPLE = 2424832
+RESOURCE_CONNECTED = 1
+RESOURCE_GLOBALNET = 2
+RESOURCE_REMEMBERED = 3
+RESOURCE_RECENT = 4
+RESOURCE_CONTEXT = 5
+RESOURCETYPE_ANY = 0
+RESOURCETYPE_DISK = 1
+RESOURCETYPE_PRINT = 2
+RESOURCETYPE_RESERVED = 8
+RESOURCETYPE_UNKNOWN = -1
+RESOURCEUSAGE_CONNECTABLE = 1
+RESOURCEUSAGE_CONTAINER = 2
+RESOURCEUSAGE_NOLOCALDEVICE = 4
+RESOURCEUSAGE_SIBLING = 8
+RESOURCEUSAGE_ATTACHED = 16
+RESOURCEUSAGE_ALL = (RESOURCEUSAGE_CONNECTABLE | RESOURCEUSAGE_CONTAINER | RESOURCEUSAGE_ATTACHED)
+RESOURCEUSAGE_RESERVED = -2147483648
+RESOURCEDISPLAYTYPE_GENERIC = 0
+RESOURCEDISPLAYTYPE_DOMAIN = 1
+RESOURCEDISPLAYTYPE_SERVER = 2
+RESOURCEDISPLAYTYPE_SHARE = 3
+RESOURCEDISPLAYTYPE_FILE = 4
+RESOURCEDISPLAYTYPE_GROUP = 5
+RESOURCEDISPLAYTYPE_NETWORK = 6
+RESOURCEDISPLAYTYPE_ROOT = 7
+RESOURCEDISPLAYTYPE_SHAREADMIN = 8
+RESOURCEDISPLAYTYPE_DIRECTORY = 9
+RESOURCEDISPLAYTYPE_TREE = 10
+RESOURCEDISPLAYTYPE_NDSCONTAINER = 11
+NETPROPERTY_PERSISTENT = 1
+CONNECT_UPDATE_PROFILE = 1
+CONNECT_UPDATE_RECENT = 2
+CONNECT_TEMPORARY = 4
+CONNECT_INTERACTIVE = 8
+CONNECT_PROMPT = 16
+CONNECT_NEED_DRIVE = 32
+CONNECT_REFCOUNT = 64
+CONNECT_REDIRECT = 128
+CONNECT_LOCALDRIVE = 256
+CONNECT_CURRENT_MEDIA = 512
+CONNECT_DEFERRED = 1024
+CONNECT_RESERVED = -16777216
+CONNDLG_RO_PATH = 1
+CONNDLG_CONN_POINT = 2
+CONNDLG_USE_MRU = 4
+CONNDLG_HIDE_BOX = 8
+CONNDLG_PERSIST = 16
+CONNDLG_NOT_PERSIST = 32
+DISC_UPDATE_PROFILE = 1
+DISC_NO_FORCE = 64
+UNIVERSAL_NAME_INFO_LEVEL = 1
+REMOTE_NAME_INFO_LEVEL = 2
+WNFMT_MULTILINE = 1
+WNFMT_ABBREVIATED = 2
+WNFMT_INENUM = 16
+WNFMT_CONNECTION = 32
+NETINFO_DLL16 = 1
+NETINFO_DISKRED = 4
+NETINFO_PRINTERRED = 8
+RP_LOGON = 1
+RP_INIFILE = 2
+PP_DISPLAYERRORS = 1
+WNCON_FORNETCARD = 1
+WNCON_NOTROUTED = 2
+WNCON_SLOWLINK = 4
+WNCON_DYNAMIC = 8
+
+## NETSETUP_NAME_TYPE, used with NetValidateName
+NetSetupUnknown = 0
+NetSetupMachine = 1
+NetSetupWorkgroup = 2
+NetSetupDomain = 3
+NetSetupNonExistentDomain = 4
+NetSetupDnsMachine = 5
+
+## NETSETUP_JOIN_STATUS, use with NetGetJoinInformation
+NetSetupUnknownStatus = 0
+NetSetupUnjoined = 1
+NetSetupWorkgroupName = 2
+NetSetupDomainName = 3
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32pdhquery.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32pdhquery.py
new file mode 100644
index 0000000..0a3385b6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32pdhquery.py
@@ -0,0 +1,515 @@
+'''
+Performance Data Helper (PDH) Query Classes
+
+Wrapper classes for end-users and high-level access to the PDH query
+mechanisms.  PDH is a win32-specific mechanism for accessing the
+performance data made available by the system.  The Python for Windows
+PDH module does not implement the "Registry" interface, implementing
+the more straightforward Query-based mechanism.
+
+The basic idea of a PDH Query is an object which can query the system
+about the status of any number of "counters."  The counters are paths
+to a particular piece of performance data.  For instance, the path 
+'\\Memory\\Available Bytes' describes just about exactly what it says
+it does, the amount of free memory on the default computer expressed 
+in Bytes.  These paths can be considerably more complex than this, 
+but part of the point of this wrapper module is to hide that
+complexity from the end-user/programmer.
+
+EXAMPLE: A more complex Path
+	'\\\\RAISTLIN\\PhysicalDisk(_Total)\\Avg. Disk Bytes/Read'
+	Raistlin --> Computer Name
+	PhysicalDisk --> Object Name
+	_Total --> The particular Instance (in this case, all instances, i.e. all drives)
+	Avg. Disk Bytes/Read --> The piece of data being monitored.
+
+EXAMPLE: Collecting Data with a Query
+	As an example, the following code implements a logger which allows the
+	user to choose what counters they would like to log, and logs those
+	counters for 30 seconds, at two-second intervals.
+	
+	query = Query()
+	query.addcounterbybrowsing()
+	query.collectdatafor(30,2)
+	
+	The data is now stored in a list of lists as:
+	query.curresults
+	
+	The counters(paths) which were used to collect the data are:
+	query.curpaths
+	
+	You can use the win32pdh.ParseCounterPath(path) utility function
+	to turn the paths into more easily read values for your task, or
+	write the data to a file, or do whatever you want with it.
+
+OTHER NOTABLE METHODS:
+	query.collectdatawhile(period) # start a logging thread for collecting data
+	query.collectdatawhile_stop() # signal the logging thread to stop logging
+	query.collectdata() # run the query only once
+	query.addperfcounter(object, counter, machine=None) # add a standard performance counter
+	query.addinstcounter(object, counter,machine=None,objtype = 'Process',volatile=1,format = win32pdh.PDH_FMT_LONG) # add a possibly volatile counter
+
+### Known bugs and limitations ###
+Due to a problem with threading under the PythonWin interpreter, there
+will be no data logged if the PythonWin window is not the foreground
+application.  Workaround: scripts using threading should be run in the
+python.exe interpreter.
+
+The volatile-counter handlers are possibly buggy, they haven't been
+tested to any extent.  The wrapper Query makes it safe to pass invalid
+paths (a -1 will be returned, or the Query will be totally ignored,
+depending on the missing element), so you should be able to work around
+the error by including all possible paths and filtering out the -1's.
+
+There is no way I know of to stop a thread which is currently sleeping,
+so you have to wait until the thread in collectdatawhile is activated
+again.  This might become a problem in situations where the collection
+period is multiple minutes (or hours, or whatever).
+
+Should make the win32pdh.ParseCounter function available to the Query
+classes as a method or something similar, so that it can be accessed
+by programmes that have just picked up an instance from somewhere.
+
+Should explicitly mention where QueryErrors can be raised, and create a
+full test set to see if there are any uncaught win32api.error's still
+hanging around.
+
+When using the python.exe interpreter, the addcounterbybrowsing-
+generated browser window is often hidden behind other windows.  No known
+workaround other than Alt-tabing to reach the browser window.
+
+### Other References ###
+The win32pdhutil module (which should be in the %pythonroot%/win32/lib 
+directory) provides quick-and-dirty utilities for one-off access to
+variables from the PDH.  Almost everything in that module can be done
+with a Query object, but it provides task-oriented functions for a
+number of common one-off tasks.
+
+If you can access the MS Developers Network Library, you can find
+information about the PDH API as MS describes it.  For a background article,
+try:
+http://msdn.microsoft.com/library/en-us/dnperfmo/html/msdn_pdhlib.asp
+
+The reference guide for the PDH API was last spotted at:
+http://msdn.microsoft.com/library/en-us/perfmon/base/using_the_pdh_interface.asp
+
+
+In general the Python version of the API is just a wrapper around the
+Query-based version of this API (as far as I can see), so you can learn what
+you need to from there.  From what I understand, the MSDN Online 
+resources are available for the price of signing up for them.  I can't
+guarantee how long that's supposed to last. (Or anything for that
+matter).
+http://premium.microsoft.com/isapi/devonly/prodinfo/msdnprod/msdnlib.idc?theURL=/msdn/library/sdkdoc/perfdata_4982.htm
+
+The eventual plan is for my (Mike Fletcher's) Starship account to include
+a section on NT Administration, and the Query is the first project
+in this plan.  There should be an article describing the creation of
+a simple logger there, but the example above is 90% of the work of
+that project, so don't sweat it if you don't find anything there.
+(currently the account hasn't been set up).
+http://starship.skyport.net/crew/mcfletch/
+
+If you need to contact me immediately, (why I can't imagine), you can
+email me at mcfletch@golden.net, or just post your question to the
+Python newsgroup with a catchy subject line.
+news:comp.lang.python
+
+### Other Stuff ###
+The Query classes are by Mike Fletcher, with the working code
+being corruptions of Mark Hammonds win32pdhutil module.
+
+Use at your own risk, no warranties, no guarantees, no assurances,
+if you use it, you accept the risk of using it, etceteras.
+
+'''
+# Feb 12, 98 - MH added "rawaddcounter" so caller can get exception details.
+
+import win32pdh, win32api,time, thread,copy
+
+class BaseQuery:
+	'''
+	Provides wrapped access to the Performance Data Helper query
+	objects, generally you should use the child class Query
+	unless you have need of doing weird things :)
+
+	This class supports two major working paradigms.  In the first,
+	you open the query, and run it as many times as you need, closing
+	the query when you're done with it.  This is suitable for static
+	queries (ones where processes being monitored don't disappear).
+
+	In the second, you allow the query to be opened each time and
+	closed afterward.  This causes the base query object to be
+	destroyed after each call.  Suitable for dynamic queries (ones
+	which watch processes which might be closed while watching.)
+	'''
+	def __init__(self,paths=None):
+		'''
+		The PDH Query object is initialised with a single, optional
+		list argument, that must be properly formatted PDH Counter
+		paths.  Generally this list will only be provided by the class
+		when it is being unpickled (removed from storage).  Normal
+		use is to call the class with no arguments and use the various
+		addcounter functions (particularly, for end user's, the use of
+		addcounterbybrowsing is the most common approach)  You might
+		want to provide the list directly if you want to hard-code the
+		elements with which your query deals (and thereby avoid the
+		overhead of unpickling the class).
+		'''
+		self.counters = []
+		if paths:
+			self.paths = paths
+		else:
+			self.paths = []
+		self._base = None
+		self.active = 0
+		self.curpaths = []
+	def addcounterbybrowsing(self, flags = win32pdh.PERF_DETAIL_WIZARD, windowtitle="Python Browser"):
+		'''
+		Adds possibly multiple paths to the paths attribute of the query,
+		does this by calling the standard counter browsing dialogue.  Within
+		this dialogue, find the counter you want to log, and click: Add,
+		repeat for every path you want to log, then click on close.  The
+		paths are appended to the non-volatile paths list for this class,
+		subclasses may create a function which parses the paths and decides
+		(via heuristics) whether to add the path to the volatile or non-volatile
+		path list.
+		e.g.:
+			query.addcounter()
+		'''
+		win32pdh.BrowseCounters(None,0, self.paths.append, flags, windowtitle)
+	def rawaddcounter(self,object, counter, instance = None, inum=-1, machine=None):
+		'''
+		Adds a single counter path, without catching any exceptions.
+		
+		See addcounter for details.
+		'''
+		path = win32pdh.MakeCounterPath( (machine,object,instance, None, inum,counter) )
+		self.paths.append(path)
+	
+	def addcounter(self,object, counter, instance = None, inum=-1, machine=None):
+		'''
+		Adds a single counter path to the paths attribute.  Normally
+		this will be called by a child class' speciality functions,
+		rather than being called directly by the user. (Though it isn't
+		hard to call manually, since almost everything is given a default)
+		This method is only functional when the query is closed (or hasn't
+		yet been opened).  This is to prevent conflict in multi-threaded
+		query applications).
+		e.g.:
+			query.addcounter('Memory','Available Bytes')
+		'''
+		if not self.active:
+			try:
+				self.rawaddcounter(object, counter, instance, inum, machine)
+				return 0
+			except win32api.error:
+				return -1
+		else:
+			return -1
+		
+	def open(self):
+		'''
+		Build the base query object for this wrapper,
+		then add all of the counters required for the query.
+		Raise a QueryError if we can't complete the functions.
+		If we are already open, then do nothing.
+		'''
+		if not self.active: # to prevent having multiple open queries
+			# curpaths are made accessible here because of the possibility of volatile paths
+			# which may be dynamically altered by subclasses.
+			self.curpaths = copy.copy(self.paths)
+			try:
+				base = win32pdh.OpenQuery()
+				for path in self.paths:
+					try:
+						self.counters.append(win32pdh.AddCounter(base, path))
+					except win32api.error: # we passed a bad path
+						self.counters.append(0)
+						pass
+				self._base = base
+				self.active = 1
+				return 0 # open succeeded
+			except: # if we encounter any errors, kill the Query
+				try:
+					self.killbase(base)
+				except NameError: # failed in creating query
+					pass
+				self.active = 0
+				self.curpaths = []
+				raise QueryError(self)
+		return 1 # already open
+		
+	def killbase(self,base=None):
+		'''
+		### This is not a public method
+		Mission critical function to kill the win32pdh objects held
+		by this object.  User's should generally use the close method
+		instead of this method, in case a sub-class has overridden
+		close to provide some special functionality.
+		'''
+		# Kill Pythonic references to the objects in this object's namespace
+		self._base = None
+		counters = self.counters
+		self.counters = []
+		# we don't kill the curpaths for convenience, this allows the
+		# user to close a query and still access the last paths
+		self.active = 0
+		# Now call the delete functions on all of the objects
+		try:
+			map(win32pdh.RemoveCounter,counters)
+		except:
+			pass
+		try:
+			win32pdh.CloseQuery(base)
+		except:
+			pass
+		del(counters)
+		del(base)
+	def close(self):
+		'''
+		Makes certain that the underlying query object has been closed,
+		and that all counters have been removed from it.  This is
+		important for reference counting.
+		You should only need to call close if you have previously called
+		open.  The collectdata methods all can handle opening and
+		closing the query.  Calling close multiple times is acceptable.
+		'''
+		try:
+			self.killbase(self._base)
+		except AttributeError:
+			self.killbase()
+	__del__ = close
+	def collectdata(self,format = win32pdh.PDH_FMT_LONG):
+		'''
+		Returns the formatted current values for the Query
+		'''
+		if self._base: # we are currently open, don't change this
+			return self.collectdataslave(format)
+		else: # need to open and then close the _base, should be used by one-offs and elements tracking application instances
+			self.open() # will raise QueryError if couldn't open the query
+			temp = self.collectdataslave(format)
+			self.close() # will always close
+			return temp
+	def collectdataslave(self,format = win32pdh.PDH_FMT_LONG):
+		'''
+		### Not a public method
+		Called only when the Query is known to be open, runs over
+		the whole set of counters, appending results to the temp,
+		returns the values as a list.
+		'''
+		try:
+			win32pdh.CollectQueryData(self._base)
+			temp = []
+			for counter in self.counters:
+				ok = 0
+				try:
+					if counter:
+						temp.append(win32pdh.GetFormattedCounterValue(counter, format)[1])
+						ok = 1
+				except win32api.error:
+					pass
+				if not ok:
+					temp.append(-1) # a better way to signal failure???
+			return temp
+		except win32api.error: # will happen if, for instance, no counters are part of the query and we attempt to collect data for it.
+			return [-1] * len(self.counters)
+	# pickle functions
+	def __getinitargs__(self):
+		'''
+		### Not a public method
+		'''
+		return (self.paths,)
+		
+class Query(BaseQuery):
+	'''
+	Performance Data Helper(PDH) Query object:
+	
+	Provides a wrapper around the native PDH query object which
+	allows for query reuse, query storage, and general maintenance
+	functions (adding counter paths in various ways being the most
+	obvious ones).
+	'''
+	def __init__(self,*args,**namedargs):
+		'''
+		The PDH Query object is initialised with a single, optional
+		list argument, that must be properly formatted PDH Counter
+		paths.  Generally this list will only be provided by the class
+		when it is being unpickled (removed from storage).  Normal
+		use is to call the class with no arguments and use the various
+		addcounter functions (particularly, for end user's, the use of
+		addcounterbybrowsing is the most common approach)  You might
+		want to provide the list directly if you want to hard-code the
+		elements with which your query deals (and thereby avoid the
+		overhead of unpickling the class).
+		'''
+		self.volatilecounters = []
+		apply(BaseQuery.__init__, (self,)+args, namedargs)
+	def addperfcounter(self, object, counter, machine=None):
+		'''
+		A "Performance Counter" is a stable, known, common counter,
+		such as Memory, or Processor.  The use of addperfcounter by 
+		end-users is deprecated, since the use of 
+		addcounterbybrowsing is considerably more flexible and general.
+		It is provided here to allow the easy development of scripts
+		which need to access variables so common we know them by name
+		(such as Memory|Available Bytes), and to provide symmetry with
+		the add inst counter method.
+		usage:
+			query.addperfcounter('Memory', 'Available Bytes')
+		It is just as easy to access addcounter directly, the following
+		has an identicle effect.
+			query.addcounter('Memory', 'Available Bytes')
+		'''
+		BaseQuery.addcounter(self, object=object, counter=counter, machine=machine)
+	def addinstcounter(self, object, counter,machine=None,objtype = 'Process',volatile=1,format = win32pdh.PDH_FMT_LONG):
+		'''
+		The purpose of using an instcounter is to track particular
+		instances of a counter object (e.g. a single processor, a single
+		running copy of a process).  For instance, to track all python.exe
+		instances, you would need merely to ask:
+			query.addinstcounter('python','Virtual Bytes')
+		You can find the names of the objects and their available counters 
+		by doing an addcounterbybrowsing() call on a query object (or by
+		looking in performance monitor's add dialog.)
+		
+		Beyond merely rearranging the call arguments to make more sense,
+		if the volatile flag is true, the instcounters also recalculate
+		the paths of the available instances on every call to open the
+		query.
+		'''
+		if volatile:
+			self.volatilecounters.append((object,counter,machine,objtype,format))
+		else:
+			self.paths[len(self.paths):] = self.getinstpaths(object,counter,machine,objtype,format)
+				
+	def getinstpaths(self,object,counter,machine=None,objtype='Process',format = win32pdh.PDH_FMT_LONG):
+		'''
+		### Not an end-user function
+		Calculate the paths for an instance object. Should alter
+		to allow processing for lists of object-counter pairs.
+		'''
+		items, instances = win32pdh.EnumObjectItems(None,None,objtype, -1)
+		# find out how many instances of this element we have...
+		instances.sort()
+		try:
+			cur = instances.index(object)
+		except ValueError:
+			return [] # no instances of this object
+		temp = [object]
+		try:
+			while instances[cur+1] == object:
+				temp.append(object)
+				cur = cur+1
+		except IndexError: # if we went over the end
+			pass
+		paths = []
+		for ind in range(len(temp)):
+			# can this raise an error?
+			paths.append(win32pdh.MakeCounterPath( (machine,'Process',object,None,ind,counter) ) )
+		return paths # should also return the number of elements for naming purposes
+
+	def open(self,*args,**namedargs):
+		'''
+		Explicitly open a query:
+		When you are needing to make multiple calls to the same query,
+		it is most efficient to open the query, run all of the calls,
+		then close the query, instead of having the collectdata method
+		automatically open and close the query each time it runs.
+		There are currently no arguments to open.
+		'''
+		# do all the normal opening stuff, self._base is now the query object
+		apply(BaseQuery.open,(self,)+args, namedargs)
+		# should rewrite getinstpaths to take a single tuple
+		paths = []
+		for tup in self.volatilecounters:
+			paths[len(paths):] = apply(self.getinstpaths, tup)
+		for path in paths:
+			try:
+				self.counters.append(win32pdh.AddCounter(self._base, path))
+				self.curpaths.append(path) # if we fail on the line above, this path won't be in the table or the counters
+			except win32api.error:
+				pass # again, what to do with a malformed path???
+	def collectdatafor(self, totalperiod, period=1):
+		'''
+		Non-threaded collection of performance data:
+		This method allows you to specify the total period for which you would
+		like to run the Query, and the time interval between individual
+		runs.  The collected data is stored in query.curresults at the
+		_end_ of the run.  The pathnames for the query are stored in
+		query.curpaths.
+		e.g.:
+			query.collectdatafor(30,2)
+		Will collect data for 30seconds at 2 second intervals
+		'''
+		tempresults = []
+		try:
+			self.open()
+			for ind in xrange(totalperiod/period):
+				tempresults.append(self.collectdata())
+				time.sleep(period)
+			self.curresults = tempresults
+		finally:
+			self.close()
+	def collectdatawhile(self, period=1):
+		'''
+		Threaded collection of performance data:
+		This method sets up a simple semaphor system for signalling 
+		when you would like to start and stop a threaded data collection
+		method.  The collection runs every period seconds until the
+		semaphor attribute is set to a non-true value (which normally
+		should be done by calling query.collectdatawhile_stop() .)
+		e.g.:
+			query.collectdatawhile(2)
+			# starts the query running, returns control to the caller immediately
+			# is collecting data every two seconds.
+			# do whatever you want to do while the thread runs, then call:
+			query.collectdatawhile_stop()
+			# when you want to deal with the data.  It is generally a good idea
+			# to sleep for period seconds yourself, since the query will not copy
+			# the required data until the next iteration:
+			time.sleep(2)
+			# now you can access the data from the attributes of the query
+			query.curresults
+			query.curpaths
+		'''
+		self.collectdatawhile_active = 1
+		thread.start_new_thread(self.collectdatawhile_slave,(period,))
+	def collectdatawhile_stop(self):
+		'''
+		Signals the collectdatawhile slave thread to stop collecting data
+		on the next logging iteration.
+		'''
+		self.collectdatawhile_active = 0
+	def collectdatawhile_slave(self, period):
+		'''
+		### Not a public function
+		Does the threaded work of collecting the data and storing it
+		in an attribute of the class.
+		'''
+		tempresults = []
+		try:
+			self.open() # also sets active, so can't be changed.
+			while self.collectdatawhile_active:
+				tempresults.append(self.collectdata())
+				time.sleep(period)
+			self.curresults = tempresults
+		finally:
+			self.close()
+		
+	# pickle functions
+	def __getinitargs__(self):
+		return (self.paths,)
+	def __getstate__(self):
+		return self.volatilecounters
+	def __setstate__(self, volatilecounters):
+		self.volatilecounters = volatilecounters
+
+
+class QueryError:
+	def __init__(self, query):
+		self.query = query
+	def __repr__(self):
+		return '<Query Error in %s>'%repr(self.query)
+	__str__ = __repr__
+	
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32pdhutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32pdhutil.py
new file mode 100644
index 0000000..4e9fb98
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32pdhutil.py
@@ -0,0 +1,145 @@
+"""Utilities for the win32 Performance Data Helper module
+
+Example:
+  To get a single bit of data:
+  >>> import win32pdhutil
+  >>> win32pdhutil.GetPerformanceAttributes("Memory", "Available Bytes")
+  6053888
+  >>> win32pdhutil.FindPerformanceAttributesByName("python", counter="Virtual Bytes")
+  [22278144]
+  
+  First example returns data which is not associated with any specific instance.
+  
+  The second example reads data for a specific instance - hence the list return - 
+  it would return one result for each instance of Python running.
+
+  In general, it can be tricky finding exactly the "name" of the data you wish to query.  
+  Although you can use <om win32pdh.EnumObjectItems>(None,None,(eg)"Memory", -1) to do this, 
+  the easiest way is often to simply use PerfMon to find out the names.
+"""
+
+import win32pdh, string, time
+
+error = win32pdh.error
+
+# Handle some localization issues.
+# see http://support.microsoft.com/default.aspx?scid=http://support.microsoft.com:80/support/kb/articles/Q287/1/59.asp&NoWebContent=1
+# Build a map of english_counter_name: counter_id
+counter_english_map = {}
+
+def find_pdh_counter_localized_name(english_name, machine_name = None):
+	if not counter_english_map:
+		import win32api, win32con
+		counter_reg_value = win32api.RegQueryValueEx(win32con.HKEY_PERFORMANCE_DATA, "Counter 009")
+		counter_list = counter_reg_value[0]
+		for i in range(0, len(counter_list) - 1, 2):
+			try:
+				counter_id = int(counter_list[i])
+			except ValueError:
+				continue
+			counter_english_map[counter_list[i+1].lower()] = counter_id
+	return win32pdh.LookupPerfNameByIndex(machine_name, counter_english_map[english_name.lower()])
+
+def GetPerformanceAttributes(object, counter, instance = None, inum=-1, format = win32pdh.PDH_FMT_LONG, machine=None):
+	# NOTE: Many counters require 2 samples to give accurate results,
+	# including "% Processor Time" (as by definition, at any instant, a
+	# thread's CPU usage is either 0 or 100).  To read counters like this,
+	# you should copy this function, but keep the counter open, and call
+	# CollectQueryData() each time you need to know.
+	# See http://support.microsoft.com/default.aspx?scid=kb;EN-US;q262938
+	# and http://msdn.microsoft.com/library/en-us/dnperfmo/html/perfmonpt2.asp
+	# My older explanation for this was that the "AddCounter" process forced
+	# the CPU to 100%, but the above makes more sense :)
+	path = win32pdh.MakeCounterPath( (machine,object,instance, None, inum,counter) )
+	hq = win32pdh.OpenQuery()
+	try:
+		hc = win32pdh.AddCounter(hq, path)
+		try:
+			win32pdh.CollectQueryData(hq)
+			type, val = win32pdh.GetFormattedCounterValue(hc, format)
+			return val
+		finally:
+			win32pdh.RemoveCounter(hc)
+	finally:
+		win32pdh.CloseQuery(hq)
+
+def FindPerformanceAttributesByName(instanceName, object = None, counter = None, format = win32pdh.PDH_FMT_LONG, machine = None, bRefresh=0):
+	"""Find peformance attributes by (case insensitive) instance name.
+	
+	Given a process name, return a list with the requested attributes.
+	Most useful for returning a tuple of PIDs given a process name.
+	"""
+	if object is None: object = find_pdh_counter_localized_name("Process", machine)
+	if counter is None: counter = find_pdh_counter_localized_name("ID Process", machine)
+	if bRefresh: # PDH docs say this is how you do a refresh.
+		win32pdh.EnumObjects(None, machine, 0, 1)
+	instanceName = string.lower(instanceName)
+	items, instances = win32pdh.EnumObjectItems(None,None,object, -1)
+	# Track multiple instances.
+	instance_dict = {}
+	for instance in instances:
+		try:
+			instance_dict[instance] = instance_dict[instance] + 1
+		except KeyError:
+			instance_dict[instance] = 0
+		
+	ret = []
+	for instance, max_instances in instance_dict.items():
+		for inum in xrange(max_instances+1):
+			if string.lower(instance) == instanceName:
+				ret.append(GetPerformanceAttributes(object, counter, instance, inum, format, machine))
+	return ret
+
+def ShowAllProcesses():
+	object = "Process"
+	items, instances = win32pdh.EnumObjectItems(None,None,object, win32pdh.PERF_DETAIL_WIZARD)
+	# Need to track multiple instances of the same name.
+	instance_dict = {}
+	for instance in instances:
+		try:
+			instance_dict[instance] = instance_dict[instance] + 1
+		except KeyError:
+			instance_dict[instance] = 0
+		
+	# Bit of a hack to get useful info.
+	items = ["ID Process"] + items[:5]
+	print "Process Name", string.join(items,",")
+	for instance, max_instances in instance_dict.items():
+		for inum in xrange(max_instances+1):
+			hq = win32pdh.OpenQuery()
+			hcs = []
+			for item in items:
+				path = win32pdh.MakeCounterPath( (None,object,instance, None, inum, item) )
+				hcs.append(win32pdh.AddCounter(hq, path))
+			win32pdh.CollectQueryData(hq)
+			# as per http://support.microsoft.com/default.aspx?scid=kb;EN-US;q262938, some "%" based
+			# counters need two collections
+			time.sleep(0.01)
+			win32pdh.CollectQueryData(hq)
+			print "%-15s\t" % (instance[:15]),
+			for hc in hcs:
+				type, val = win32pdh.GetFormattedCounterValue(hc, win32pdh.PDH_FMT_LONG)
+				print "%5d" % (val),
+				win32pdh.RemoveCounter(hc)
+			print
+			win32pdh.CloseQuery(hq)
+
+def BrowseCallBackDemo(counter):
+	machine, object, instance, parentInstance, index, counterName = \
+		win32pdh.ParseCounterPath(counter)
+
+	result = GetPerformanceAttributes(object, counterName, instance, index, win32pdh.PDH_FMT_DOUBLE, machine)
+	print "Value of '%s' is" % counter, result
+	print "Added '%s' on object '%s' (machine %s), instance %s(%d)-parent of %s" % (counterName, object, machine, instance, index, parentInstance)
+
+def browse( callback = BrowseCallBackDemo, title="Python Browser", level=win32pdh.PERF_DETAIL_WIZARD):
+	win32pdh.BrowseCounters(None,0, callback, level, title)
+
+if __name__=='__main__':
+	ShowAllProcesses()
+	# Show how to get a couple of attributes by name.
+	print "Virtual Bytes = ", FindPerformanceAttributesByName("python", counter="Virtual Bytes")
+	print "Available Bytes = ", GetPerformanceAttributes("Memory", "Available Bytes")
+	# And a browser.
+	print "Browsing for counters..."
+	browse()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32rcparser.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32rcparser.py
new file mode 100644
index 0000000..45492e5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32rcparser.py
@@ -0,0 +1,582 @@
+# Windows dialog .RC file parser, by Adam Walker.
+
+# This module was adapted from the spambayes project, and is Copyright
+# 2003/2004 The Python Software Foundation and is covered by the Python
+# Software Foundation license.
+"""
+This is a parser for Windows .rc files, which are text files which define
+dialogs and other Windows UI resources.
+"""
+__author__="Adam Walker"
+__version__="0.11"
+
+import sys, os, shlex, stat
+import pprint
+import win32con
+import commctrl
+
+_controlMap = {"DEFPUSHBUTTON":0x80,
+               "PUSHBUTTON":0x80,
+               "Button":0x80,
+               "GROUPBOX":0x80,
+               "Static":0x82,
+               "CTEXT":0x82,
+               "RTEXT":0x82,
+               "LTEXT":0x82,
+               "LISTBOX":0x83,
+               "SCROLLBAR":0x84,
+               "COMBOBOX":0x85,
+               "EDITTEXT":0x81,
+               "ICON":0x82,
+               "RICHEDIT":"RichEdit20A"
+               }
+
+_addDefaults = {"EDITTEXT":win32con.WS_BORDER | win32con.WS_TABSTOP,
+                "GROUPBOX":win32con.BS_GROUPBOX,
+                "LTEXT":win32con.SS_LEFT,
+                "DEFPUSHBUTTON":win32con.BS_DEFPUSHBUTTON | win32con.WS_TABSTOP,
+                "PUSHBUTTON": win32con.WS_TABSTOP,
+                "CTEXT":win32con.SS_CENTER,
+                "RTEXT":win32con.SS_RIGHT,
+                "ICON":win32con.SS_ICON}
+
+defaultControlStyle = win32con.WS_CHILD | win32con.WS_VISIBLE
+defaultControlStyleEx = 0
+
+class DialogDef:
+    name = ""
+    id = 0
+    style = 0
+    styleEx = None
+    caption = ""
+    font = "MS Sans Serif"
+    fontSize = 8
+    x = 0
+    y = 0
+    w = 0
+    h = 0
+    template = None
+    def __init__(self, n, i):
+        self.name = n
+        self.id = i
+        self.styles = []
+        self.stylesEx = []
+        self.controls = []
+        #print "dialog def for ",self.name, self.id
+    def createDialogTemplate(self):
+        t = None
+        self.template = [[self.caption,
+                          (self.x,self.y,self.w,self.h),
+                          self.style, self.styleEx,
+                          (self.fontSize, self.font)]
+        ]
+        # Add the controls
+        for control in self.controls:
+            self.template.append(control.createDialogTemplate())
+        return self.template
+
+class ControlDef:
+    id = ""
+    controlType = ""
+    subType = ""
+    idNum = 0
+    style = defaultControlStyle
+    styleEx = defaultControlStyleEx
+    label = ""
+    x = 0
+    y = 0
+    w = 0
+    h = 0
+    def __init__(self):
+        self.styles = []
+        self.stylesEx = []
+    def toString(self):
+        s = "<Control id:"+self.id+" controlType:"+self.controlType+" subType:"+self.subType\
+            +" idNum:"+str(self.idNum)+" style:"+str(self.style)+" styles:"+str(self.styles)+" label:"+self.label\
+            +" x:"+str(self.x)+" y:"+str(self.y)+" w:"+str(self.w)+" h:"+str(self.h)+">"
+        return s
+    def createDialogTemplate(self):
+        ct = self.controlType
+        if "CONTROL"==ct:
+            ct = self.subType
+        if ct in _controlMap:
+            ct = _controlMap[ct]
+        t = [ct, self.label, self.idNum, (self.x, self.y, self.w, self.h), self.style, self.styleEx]
+        #print t
+        return t
+
+class StringDef:
+    def __init__(self, id, idNum, value):
+        self.id = id
+        self.idNum = idNum
+        self.value = value
+
+class RCParser:
+    next_id = 1001
+    dialogs = {}
+    _dialogs = {}
+    debugEnabled = False
+    token = ""
+
+    def __init__(self):
+        self.ungot = False
+        self.ids = {"IDC_STATIC": -1}
+        self.names = {-1:"IDC_STATIC"}
+        self.bitmaps = {}
+        self.stringTable = {}
+        self.icons = {}
+
+    def debug(self, *args):
+        if self.debugEnabled:
+            print args
+
+    def getToken(self):
+        if self.ungot:
+            self.ungot = False
+            self.debug("getToken returns (ungot):", self.token)
+            return self.token
+        self.token = self.lex.get_token()
+        self.debug("getToken returns:", self.token)
+        if self.token=="":
+            self.token = None
+        return self.token
+
+    def ungetToken(self):
+        self.ungot = True
+
+    def getCheckToken(self, expected):
+        tok = self.getToken()
+        assert tok == expected, "Expected token '%s', but got token '%s'!" % (expected, tok)
+        return tok
+
+    def getCommaToken(self):
+        return self.getCheckToken(",")
+
+    # Return the *current* token as a number, only consuming a token
+    # if it is the negative-sign.
+    def currentNumberToken(self):
+        mult = 1
+        if self.token=='-':
+            mult = -1
+            self.getToken()
+        return int(self.token) * mult
+
+    # Return the *current* token as a string literal (ie, self.token will be a
+    # quote.  consumes all tokens until the end of the string
+    def currentQuotedString(self):
+        # Handle quoted strings - pity shlex doesn't handle it.
+        assert self.token.startswith('"'), self.token
+        bits = [self.token]
+        while 1:
+            tok = self.getToken()
+            if not tok.startswith('"'):
+                self.ungetToken()
+                break
+            bits.append(tok)
+        sval = "".join(bits)[1:-1] # Remove end quotes.            
+        # Fixup quotes in the body, and all (some?) quoted characters back
+        # to their raw value.
+        for i, o in ('""', '"'), ("\\r", "\r"), ("\\n", "\n"), ("\\t", "\t"):
+            sval = sval.replace(i, o)
+        return sval
+        
+    def load(self, rcstream):
+        """
+        RCParser.loadDialogs(rcFileName) -> None
+        Load the dialog information into the parser. Dialog Definations can then be accessed
+        using the "dialogs" dictionary member (name->DialogDef). The "ids" member contains the dictionary of id->name.
+        The "names" member contains the dictionary of name->id
+        """
+        self.open(rcstream)
+        self.getToken()
+        while self.token!=None:
+            self.parse()
+            self.getToken()
+
+    def open(self, rcstream):
+        self.lex = shlex.shlex(rcstream)
+        self.lex.commenters = "//#"
+
+    def parseH(self, file):
+        lex = shlex.shlex(file)
+        lex.commenters = "//"
+        token = " "
+        while token is not None:
+            token = lex.get_token()
+            if token == "" or token is None:
+                token = None
+            else:
+                if token=='define':
+                    n = lex.get_token()
+                    i = int(lex.get_token())
+                    self.ids[n] = i
+                    if self.names.has_key(i):
+                        # Dupe ID really isn't a problem - most consumers
+                        # want to go from name->id, and this is OK.
+                        # It means you can't go from id->name though.
+                        pass
+                        # ignore AppStudio special ones
+                        #if not n.startswith("_APS_"):
+                        #    print "Duplicate id",i,"for",n,"is", self.names[i]
+                    else:
+                        self.names[i] = n
+                    if self.next_id<=i:
+                        self.next_id = i+1
+
+    def parse(self):
+        noid_parsers = {
+            "STRINGTABLE":      self.parse_stringtable,
+        }
+
+        id_parsers = {
+            "DIALOG" :          self.parse_dialog,
+            "DIALOGEX":         self.parse_dialog,
+#            "TEXTINCLUDE":      self.parse_textinclude,
+            "BITMAP":           self.parse_bitmap,
+            "ICON":             self.parse_icon,
+        }
+        deep = 0
+        base_token = self.token
+        rp = noid_parsers.get(base_token)
+        if rp is not None:
+            rp()
+        else:
+            # Not something we parse that isn't prefixed by an ID
+            # See if it is an ID prefixed item - if it is, our token
+            # is the resource ID.
+            resource_id = self.token
+            self.getToken()
+            if self.token is None:
+                return
+
+            if "BEGIN" == self.token:
+                # A 'BEGIN' for a structure we don't understand - skip to the
+                # matching 'END'
+                deep = 1
+                while deep!=0 and self.token is not None:
+                    self.getToken()
+                    self.debug("Zooming over", self.token)
+                    if "BEGIN" == self.token:
+                        deep += 1
+                    elif "END" == self.token:
+                        deep -= 1
+            else:
+                rp = id_parsers.get(self.token)
+                if rp is not None:
+                    self.debug("Dispatching '%s'" % (self.token,))
+                    rp(resource_id)
+                else:
+                    # We don't know what the resource type is, but we
+                    # have already consumed the next, which can cause problems,
+                    # so push it back.
+                    self.debug("Skipping top-level '%s'" % base_token)
+                    self.ungetToken()
+
+    def addId(self, id_name):
+        if id_name in self.ids:
+            id = self.ids[id_name]
+        else:
+            # IDOK, IDCANCEL etc are special - if a real resource has this value
+            for n in ["IDOK","IDCANCEL","IDYES","IDNO", "IDABORT"]:
+                if id_name == n:
+                    v = getattr(win32con, n)
+                    self.ids[n] = v
+                    self.names[v] = n
+                    return v
+            id = self.next_id
+            self.next_id += 1
+            self.ids[id_name] = id
+            self.names[id] = id_name
+        return id
+
+    def lang(self):
+        while self.token[0:4]=="LANG" or self.token[0:7]=="SUBLANG" or self.token==',':
+            self.getToken();
+
+    def parse_textinclude(self, res_id):
+        while self.getToken() != "BEGIN":
+            pass
+        while 1:
+            if self.token == "END":
+                break
+            s = self.getToken()
+
+    def parse_stringtable(self):
+        while self.getToken() != "BEGIN":
+            pass
+        while 1:
+            self.getToken()
+            if self.token == "END":
+                break
+            sid = self.token
+            self.getToken()
+            sd = StringDef(sid, self.addId(sid), self.currentQuotedString())
+            self.stringTable[sid] = sd
+
+    def parse_bitmap(self, name):
+        return self.parse_bitmap_or_icon(name, self.bitmaps)
+
+    def parse_icon(self, name):
+        return self.parse_bitmap_or_icon(name, self.icons)
+
+    def parse_bitmap_or_icon(self, name, dic):
+        self.getToken()
+        while not self.token.startswith('"'):
+            self.getToken()
+        bmf = self.token[1:-1] # quotes
+        dic[name] = bmf
+
+    def parse_dialog(self, name):
+        dlg = DialogDef(name,self.addId(name))
+        assert len(dlg.controls)==0
+        self._dialogs[name] = dlg
+        extras = []
+        self.getToken()
+        while not self.token.isdigit():
+            self.debug("extra", self.token)
+            extras.append(self.token)
+            self.getToken()
+        dlg.x = int(self.token)
+        self.getCommaToken()
+        self.getToken() # number
+        dlg.y = int(self.token)
+        self.getCommaToken()
+        self.getToken() # number
+        dlg.w = int(self.token)
+        self.getCommaToken()
+        self.getToken() # number
+        dlg.h = int(self.token)
+        self.getToken()
+        while not (self.token==None or self.token=="" or self.token=="END"):
+            if self.token=="STYLE":
+                self.dialogStyle(dlg)
+            elif self.token=="EXSTYLE":
+                self.dialogExStyle(dlg)
+            elif self.token=="CAPTION":
+                self.dialogCaption(dlg)
+            elif self.token=="FONT":
+                self.dialogFont(dlg)
+            elif self.token=="BEGIN":
+                self.controls(dlg)
+            else:
+                break
+        self.dialogs[name] = dlg.createDialogTemplate()
+
+    def dialogStyle(self, dlg):
+        dlg.style, dlg.styles = self.styles( [], win32con.DS_SETFONT)
+    def dialogExStyle(self, dlg):
+        self.getToken()
+        dlg.styleEx, dlg.stylesEx = self.styles( [], 0)
+
+    def styles(self, defaults, defaultStyle):
+        list = defaults
+        style = defaultStyle
+
+        if "STYLE"==self.token:
+            self.getToken()
+        i = 0
+        Not = False
+        while ((i%2==1 and ("|"==self.token or "NOT"==self.token)) or (i%2==0)) and not self.token==None:
+            Not = False;
+            if "NOT"==self.token:
+                Not = True
+                self.getToken()
+            i += 1
+            if self.token!="|":
+                if self.token in win32con.__dict__:
+                    value = getattr(win32con,self.token)
+                else:
+                    if self.token in commctrl.__dict__:
+                        value = getattr(commctrl,self.token)
+                    else:
+                        value = 0
+                if Not:
+                    list.append("NOT "+self.token)
+                    self.debug("styles add Not",self.token, value)
+                    style &= ~value
+                else:
+                    list.append(self.token)
+                    self.debug("styles add", self.token, value)
+                    style |= value
+            self.getToken()
+        self.debug("style is ",style)
+
+        return style, list
+
+    def dialogCaption(self, dlg):
+        if "CAPTION"==self.token:
+            self.getToken()
+        self.token = self.token[1:-1]
+        self.debug("Caption is:",self.token)
+        dlg.caption = self.token
+        self.getToken()
+    def dialogFont(self, dlg):
+        if "FONT"==self.token:
+            self.getToken()
+        dlg.fontSize = int(self.token)
+        self.getCommaToken()
+        self.getToken() # Font name
+        dlg.font = self.token[1:-1] # it's quoted
+        self.getToken()
+        while "BEGIN"!=self.token:
+            self.getToken()
+    def controls(self, dlg):
+        if self.token=="BEGIN": self.getToken()
+        # All controls look vaguely like:
+        # TYPE [text, ] Control_id, l, t, r, b [, style]
+        # .rc parser documents all control types as:
+        # CHECKBOX, COMBOBOX, CONTROL, CTEXT, DEFPUSHBUTTON, EDITTEXT, GROUPBOX,
+        # ICON, LISTBOX, LTEXT, PUSHBUTTON, RADIOBUTTON, RTEXT, SCROLLBAR
+        without_text = ["EDITTEXT", "COMBOBOX", "LISTBOX", "SCROLLBAR"]
+        while self.token!="END":
+            control = ControlDef()
+            control.controlType = self.token;
+            self.getToken()
+            if control.controlType not in without_text:
+                if self.token[0:1]=='"':
+                    control.label = self.currentQuotedString()
+                # Some funny controls, like icons and picture controls use
+                # the "window text" as extra resource ID (ie, the ID of the
+                # icon itself).  This may be either a literal, or an ID string.
+                elif self.token=="-" or self.token.isdigit():
+                    control.label = str(self.currentNumberToken())
+                else:
+                    # An ID - use the numeric equiv.
+                    control.label = str(self.addId(self.token))
+                self.getCommaToken()
+                self.getToken()
+            # Control IDs may be "names" or literal ints
+            if self.token=="-" or self.token.isdigit():
+                control.id = self.currentNumberToken()
+                control.idNum = control.id
+            else:
+                # name of an ID
+                control.id = self.token
+                control.idNum = self.addId(control.id)
+            self.getCommaToken()
+
+            if control.controlType == "CONTROL":
+                self.getToken()
+                control.subType = self.token[1:-1]
+                thisDefaultStyle = defaultControlStyle | \
+                                   _addDefaults.get(control.subType, 0)
+                # Styles
+                self.getCommaToken()
+                self.getToken()
+                control.style, control.styles = self.styles([], thisDefaultStyle)
+            else:
+                thisDefaultStyle = defaultControlStyle | \
+                                   _addDefaults.get(control.controlType, 0)
+                # incase no style is specified.
+                control.style = thisDefaultStyle
+            # Rect
+            control.x = int(self.getToken())
+            self.getCommaToken()
+            control.y = int(self.getToken())
+            self.getCommaToken()
+            control.w = int(self.getToken())
+            self.getCommaToken()
+            self.getToken()
+            control.h = int(self.token)
+            self.getToken()
+            if self.token==",":
+                self.getToken()
+                control.style, control.styles = self.styles([], thisDefaultStyle)
+            if self.token==",":
+                self.getToken()
+                control.styleEx, control.stylesEx = self.styles([], defaultControlStyleEx)
+            #print control.toString()
+            dlg.controls.append(control)
+
+def ParseStreams(rc_file, h_file):
+    rcp = RCParser()
+    rcp.parseH(h_file)
+    try:
+        rcp.load(rc_file)
+    except:
+        lex = getattr(rcp, "lex", None)
+        if lex:
+            print "ERROR parsing dialogs at line", lex.lineno
+            print "Next 10 tokens are:"
+            for i in range(10):
+                print lex.get_token(),
+            print
+        raise
+    return rcp
+    
+def Parse(rc_name, h_name = None):
+    if h_name:
+        h_file = open(h_name, "rU")
+    else:
+        # See if same basename as the .rc
+        h_name = rc_name[:-2]+"h"
+        try:
+            h_file = open(h_name, "rU")
+        except IOError:
+            # See if MSVC default of 'resource.h' in the same dir.
+            h_name = os.path.join(os.path.dirname(rc_name), "resource.h")
+            try:
+                h_file = open(h_name, "rU")
+            except IOError:
+                # .h files are optional anyway
+                h_file = None
+    rc_file = open(rc_name, "rU")
+    try:
+        return ParseStreams(rc_file, h_file)
+    finally:
+        if h_file is not None:
+            h_file.close()
+        rc_file.close()
+    return rcp
+
+def GenerateFrozenResource(rc_name, output_name, h_name = None):
+    """Converts an .rc windows resource source file into a python source file
+       with the same basic public interface as the rest of this module.
+       Particularly useful for py2exe or other 'freeze' type solutions,
+       where a frozen .py file can be used inplace of a real .rc file.
+    """
+    rcp = Parse(rc_name, h_name)
+    in_stat = os.stat(rc_name)
+
+    out = open(output_name, "wt")
+    out.write("#%s\n" % output_name)
+    out.write("#This is a generated file. Please edit %s instead.\n" % rc_name)
+    out.write("__version__=%r\n" % __version__)
+    out.write("_rc_size_=%d\n_rc_mtime_=%d\n" % (in_stat[stat.ST_SIZE], in_stat[stat.ST_MTIME]))
+    out.write("class FakeParser:\n")
+
+    for name in "dialogs", "ids", "names", "bitmaps", "icons", "stringTable":
+        out.write("\t%s = \\\n" % (name,))
+        pprint.pprint(getattr(rcp, name), out)
+        out.write("\n")
+
+    out.write("def Parse(s):\n")
+    out.write("\treturn FakeParser()\n")
+    out.close()
+
+if __name__=='__main__':
+    if len(sys.argv) <= 1:
+        print __doc__
+        print
+        print "See test_win32rcparser.py, and the win32rcparser directory (both"
+        print "in the test suite) for an example of this module's usage."
+    else:
+        import pprint
+        filename = sys.argv[1]
+        if "-v" in sys.argv:
+            RCParser.debugEnabled = 1
+        print "Dumping all resources in '%s'" % filename
+        resources = Parse(filename)
+        for id, ddef in resources.dialogs.items():
+            print "Dialog %s (%d controls)" % (id, len(ddef))
+            pprint.pprint(ddef)
+            print
+        for id, sdef in resources.stringTable.items():
+            print "String %s=%r" % (id, sdef.value)
+            print
+        for id, sdef in resources.bitmaps.items():
+            print "Bitmap %s=%r" % (id, sdef)
+            print
+        for id, sdef in resources.icons.items():
+            print "Icon %s=%r" % (id, sdef)
+            print
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32serviceutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32serviceutil.py
new file mode 100644
index 0000000..be875286
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32serviceutil.py
@@ -0,0 +1,789 @@
+# General purpose service utilities, both for standard Python scripts,
+# and for for Python programs which run as services...
+#
+# Note that most utility functions here will raise win32api.error's
+# (which is == win32service.error, pywintypes.error, etc)
+# when things go wrong - eg, not enough permissions to hit the
+# registry etc.
+
+import win32service, win32api, win32con, winerror
+import sys, string, pywintypes, os
+
+error = "Python Service Utility Error"
+
+def LocatePythonServiceExe(exeName = None):
+    if not exeName and hasattr(sys, "frozen"):
+        # If py2exe etc calls this with no exeName, default is current exe.
+        return sys.executable
+
+    # Try and find the specified EXE somewhere.  If specifically registered,
+    # use it.  Otherwise look down sys.path, and the global PATH environment.
+    if exeName is None:
+        if os.path.splitext(win32service.__file__)[0].endswith("_d"):
+            exeName = "PythonService_d.exe"
+        else:
+            exeName = "PythonService.exe"
+    # See if it exists as specified
+    if os.path.isfile(exeName): return win32api.GetFullPathName(exeName)
+    baseName = os.path.splitext(os.path.basename(exeName))[0]
+    try:
+        exeName = win32api.RegQueryValue(win32con.HKEY_LOCAL_MACHINE,
+                                         "Software\\Python\\%s\\%s" % (baseName, sys.winver))
+        if os.path.isfile(exeName):
+            return exeName
+        raise RuntimeError, "The executable '%s' is registered as the Python " \
+                            "service exe, but it does not exist as specified" \
+                            % exeName
+    except win32api.error:
+        # OK - not there - lets go a-searchin'
+        for path in [sys.prefix] + sys.path:
+            look = os.path.join(path, exeName)
+            if os.path.isfile(look):
+                return win32api.GetFullPathName(look)
+        # Try the global Path.
+        try:
+            return win32api.SearchPath(None, exeName)[0]
+        except win32api.error:
+            msg = "%s is not correctly registered\nPlease locate and run %s, and it will self-register\nThen run this service registration process again." % (exeName, exeName)
+            raise error, msg
+
+def _GetServiceShortName(longName):
+    # looks up a services name
+    # from the display name
+    # Thanks to Andy McKay for this code.
+    access = win32con.KEY_READ | win32con.KEY_ENUMERATE_SUB_KEYS | win32con.KEY_QUERY_VALUE
+    hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services", 0, access)
+    num = win32api.RegQueryInfoKey(hkey)[0]
+    longName = longName.lower()
+    # loop through number of subkeys
+    for x in range(0, num):
+    # find service name, open subkey
+        svc = win32api.RegEnumKey(hkey, x)
+        skey = win32api.RegOpenKey(hkey, svc, 0, access)
+        try:
+            # find display name
+            thisName = str(win32api.RegQueryValueEx(skey, "DisplayName")[0])
+            if thisName.lower() == longName:
+                return svc
+        except win32api.error:
+            # in case there is no key called DisplayName
+            pass
+    return None
+
+# Open a service given either it's long or short name.
+def SmartOpenService(hscm, name, access):
+    try:
+        return win32service.OpenService(hscm, name, access)
+    except win32api.error, details:
+        if details[0] not in [winerror.ERROR_SERVICE_DOES_NOT_EXIST,
+                              winerror.ERROR_INVALID_NAME]:
+            raise
+        name = _GetServiceShortName(name)
+        if name is None:
+            raise
+        return win32service.OpenService(hscm, name, access)
+
+def LocateSpecificServiceExe(serviceName):
+    # Given the name of a specific service, return the .EXE name _it_ uses
+    # (which may or may not be the Python Service EXE
+    hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\%s" % (serviceName), 0, win32con.KEY_ALL_ACCESS)
+    try:
+        return win32api.RegQueryValueEx(hkey, "ImagePath")[0]
+    finally:
+        hkey.Close()
+
+def InstallPerfmonForService(serviceName, iniName, dllName = None):
+    # If no DLL name, look it up in the INI file name
+    if not dllName: # May be empty string!
+        dllName = win32api.GetProfileVal("Python", "dll", "", iniName)
+    # Still not found - look for the standard one in the same dir as win32service.pyd
+    if not dllName:
+        try:
+            tryName = os.path.join(os.path.split(win32service.__file__)[0], "perfmondata.dll")
+            if os.path.isfile(tryName):
+                dllName = tryName
+        except AttributeError:
+            # Frozen app? - anyway, can't find it!
+            pass
+    if not dllName:
+        raise ValueError, "The name of the performance DLL must be available"
+    dllName = win32api.GetFullPathName(dllName)
+    # Now setup all the required "Performance" entries.
+    hkey = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services\\%s" % (serviceName), 0, win32con.KEY_ALL_ACCESS)
+    try:
+        subKey = win32api.RegCreateKey(hkey, "Performance")
+        try:
+            win32api.RegSetValueEx(subKey, "Library", 0, win32con.REG_SZ, dllName)
+            win32api.RegSetValueEx(subKey, "Open", 0, win32con.REG_SZ, "OpenPerformanceData")
+            win32api.RegSetValueEx(subKey, "Close", 0, win32con.REG_SZ, "ClosePerformanceData")
+            win32api.RegSetValueEx(subKey, "Collect", 0, win32con.REG_SZ, "CollectPerformanceData")
+        finally:
+            win32api.RegCloseKey(subKey)
+    finally:
+        win32api.RegCloseKey(hkey)
+    # Now do the "Lodctr" thang...
+
+    try:
+        import perfmon
+        path, fname = os.path.split(iniName)
+        oldPath = os.getcwd()
+        if path:
+            os.chdir(path)
+        try:
+            perfmon.LoadPerfCounterTextStrings("python.exe " + fname)
+        finally:
+            os.chdir(oldPath)
+    except win32api.error, details:
+        print "The service was installed OK, but the performance monitor"
+        print "data could not be loaded.", details
+
+def _GetCommandLine(exeName, exeArgs):
+    if exeArgs is not None:
+        return exeName + " " + exeArgs
+    else:
+        return exeName
+
+def InstallService(pythonClassString, serviceName, displayName, startType = None, errorControl = None, bRunInteractive = 0, serviceDeps = None, userName = None, password = None, exeName = None, perfMonIni = None, perfMonDll = None, exeArgs = None, description = None):
+    # Handle the default arguments.
+    if startType is None:
+        startType = win32service.SERVICE_DEMAND_START
+    serviceType = win32service.SERVICE_WIN32_OWN_PROCESS
+    if bRunInteractive:
+        serviceType = serviceType | win32service.SERVICE_INTERACTIVE_PROCESS
+    if errorControl is None:
+        errorControl = win32service.SERVICE_ERROR_NORMAL
+
+    exeName = '"%s"' % LocatePythonServiceExe(exeName) # None here means use default PythonService.exe
+    commandLine = _GetCommandLine(exeName, exeArgs)
+    hscm = win32service.OpenSCManager(None,None,win32service.SC_MANAGER_ALL_ACCESS)
+    try:
+        hs = win32service.CreateService(hscm,
+                                serviceName,
+                                displayName,
+                                win32service.SERVICE_ALL_ACCESS,         # desired access
+                    serviceType,        # service type
+                    startType,
+                    errorControl,       # error control type
+                    commandLine,
+                    None,
+                    0,
+                    serviceDeps,
+                    userName,
+                    password)
+        if description is not None:
+            try:
+                win32service.ChangeServiceConfig2(hs,win32service.SERVICE_CONFIG_DESCRIPTION,description)
+            except NotImplementedError:
+                pass    ## ChangeServiceConfig2 and description do not exist on NT
+        win32service.CloseServiceHandle(hs)
+    finally:
+        win32service.CloseServiceHandle(hscm)
+    InstallPythonClassString(pythonClassString, serviceName)
+    # If I have performance monitor info to install, do that.
+    if perfMonIni is not None:
+        InstallPerfmonForService(serviceName, perfMonIni, perfMonDll)
+
+def ChangeServiceConfig(pythonClassString, serviceName, startType = None, errorControl = None, bRunInteractive = 0, serviceDeps = None, userName = None, password = None, exeName = None, displayName = None, perfMonIni = None, perfMonDll = None, exeArgs = None, description = None):
+    # Before doing anything, remove any perfmon counters.
+    try:
+        import perfmon
+        perfmon.UnloadPerfCounterTextStrings("python.exe "+serviceName)
+    except (ImportError, win32api.error):
+        pass
+
+    # The EXE location may have changed
+    exeName = '"%s"' % LocatePythonServiceExe(exeName)
+
+    # Handle the default arguments.
+    if startType is None: startType = win32service.SERVICE_NO_CHANGE
+    if errorControl is None: errorControl = win32service.SERVICE_NO_CHANGE
+
+    hscm = win32service.OpenSCManager(None,None,win32service.SC_MANAGER_ALL_ACCESS)
+    serviceType = win32service.SERVICE_WIN32_OWN_PROCESS
+    if bRunInteractive:
+        serviceType = serviceType | win32service.SERVICE_INTERACTIVE_PROCESS
+    commandLine = _GetCommandLine(exeName, exeArgs)
+    try:
+        hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS)
+        try:
+
+            win32service.ChangeServiceConfig(hs,
+                serviceType,  # service type
+                startType,
+                errorControl,       # error control type
+                commandLine,
+                None,
+                0,
+                serviceDeps,
+                userName,
+                password,
+                displayName)
+            if description is not None:
+                try:
+                    win32service.ChangeServiceConfig2(hs,win32service.SERVICE_CONFIG_DESCRIPTION,description)
+                except NotImplementedError:
+                    pass    ## ChangeServiceConfig2 and description do not exist on NT
+
+        finally:
+            win32service.CloseServiceHandle(hs)
+    finally:
+        win32service.CloseServiceHandle(hscm)
+    InstallPythonClassString(pythonClassString, serviceName)
+    # If I have performance monitor info to install, do that.
+    if perfMonIni is not None:
+        InstallPerfmonForService(serviceName, perfMonIni, perfMonDll)
+
+def InstallPythonClassString(pythonClassString, serviceName):
+    # Now setup our Python specific entries.
+    if pythonClassString:
+        key = win32api.RegCreateKey(win32con.HKEY_LOCAL_MACHINE, "System\\CurrentControlSet\\Services\\%s\\PythonClass" % serviceName)
+        try:
+            win32api.RegSetValue(key, None, win32con.REG_SZ, pythonClassString);
+        finally:
+            win32api.RegCloseKey(key)
+
+# Utility functions for Services, to allow persistant properties.
+def SetServiceCustomOption(serviceName, option, value):
+    try:
+        serviceName = serviceName._svc_name_
+    except AttributeError:
+        pass
+    key = win32api.RegCreateKey(win32con.HKEY_LOCAL_MACHINE, "System\\CurrentControlSet\\Services\\%s\\Parameters" % serviceName)
+    try:
+        if type(value)==type(0):
+            win32api.RegSetValueEx(key, option, 0, win32con.REG_DWORD, value);
+        else:
+            win32api.RegSetValueEx(key, option, 0, win32con.REG_SZ, value);
+    finally:
+        win32api.RegCloseKey(key)
+
+def GetServiceCustomOption(serviceName, option, defaultValue = None):
+    # First param may also be a service class/instance.
+    # This allows services to pass "self"
+    try:
+        serviceName = serviceName._svc_name_
+    except AttributeError:
+        pass
+    key = win32api.RegCreateKey(win32con.HKEY_LOCAL_MACHINE, "System\\CurrentControlSet\\Services\\%s\\Parameters" % serviceName)
+    try:
+        try:
+            return win32api.RegQueryValueEx(key, option)[0]
+        except win32api.error:  # No value.
+            return defaultValue
+    finally:
+        win32api.RegCloseKey(key)
+
+
+def RemoveService(serviceName):
+    try:
+        import perfmon
+        perfmon.UnloadPerfCounterTextStrings("python.exe "+serviceName)
+    except (ImportError, win32api.error):
+        pass
+
+    hscm = win32service.OpenSCManager(None,None,win32service.SC_MANAGER_ALL_ACCESS)
+    try:
+        hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS)
+        win32service.DeleteService(hs)
+        win32service.CloseServiceHandle(hs)
+    finally:
+        win32service.CloseServiceHandle(hscm)
+
+    import win32evtlogutil
+    try:
+        win32evtlogutil.RemoveSourceFromRegistry(serviceName)
+    except win32api.error:
+        pass
+
+def ControlService(serviceName, code, machine = None):
+    hscm = win32service.OpenSCManager(machine,None,win32service.SC_MANAGER_ALL_ACCESS)
+    try:
+
+        hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS)
+        try:
+            status = win32service.ControlService(hs, code)
+        finally:
+            win32service.CloseServiceHandle(hs)
+    finally:
+        win32service.CloseServiceHandle(hscm)
+    return status
+
+def __FindSvcDeps(findName):
+    if type(findName) is pywintypes.UnicodeType: findName = str(findName)
+    dict = {}
+    k = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Services")
+    num = 0
+    while 1:
+        try:
+            svc = win32api.RegEnumKey(k, num)
+        except win32api.error:
+            break
+        num = num + 1
+        sk = win32api.RegOpenKey(k, svc)
+        try:
+            deps, typ = win32api.RegQueryValueEx(sk, "DependOnService")
+        except win32api.error:
+            deps = ()
+        for dep in deps:
+            dep = string.lower(dep)
+            dep_on = dict.get(dep, [])
+            dep_on.append(svc)
+            dict[dep]=dep_on
+
+    return __ResolveDeps(findName, dict)
+
+
+def __ResolveDeps(findName, dict):
+    items = dict.get(string.lower(findName), [])
+    retList = []
+    for svc in items:
+        retList.insert(0, svc)
+        retList = __ResolveDeps(svc, dict) + retList
+    return retList
+
+def WaitForServiceStatus(serviceName, status, waitSecs, machine=None):
+    """Waits for the service to return the specified status.  You
+    should have already requested the service to enter that state"""
+    for i in range(waitSecs*4):
+        now_status = QueryServiceStatus(serviceName, machine)[1]
+        if now_status == status:
+            break
+        win32api.Sleep(250)
+    else:
+        raise pywintypes.error, (winerror.ERROR_SERVICE_REQUEST_TIMEOUT, "QueryServiceStatus", win32api.FormatMessage(winerror.ERROR_SERVICE_REQUEST_TIMEOUT)[:-2])
+    
+def __StopServiceWithTimeout(hs, waitSecs = 30):
+    try:
+        status = win32service.ControlService(hs, win32service.SERVICE_CONTROL_STOP)
+    except pywintypes.error, (hr, name, msg):
+        if hr!=winerror.ERROR_SERVICE_NOT_ACTIVE:
+            raise win32service.error, (hr, name, msg)
+    for i in range(waitSecs):
+        status = win32service.QueryServiceStatus(hs)
+        if status[1] == win32service.SERVICE_STOPPED:
+            break
+        win32api.Sleep(1000)
+    else:
+        raise pywintypes.error, (winerror.ERROR_SERVICE_REQUEST_TIMEOUT, "ControlService", win32api.FormatMessage(winerror.ERROR_SERVICE_REQUEST_TIMEOUT)[:-2])
+
+
+def StopServiceWithDeps(serviceName, machine = None, waitSecs = 30):
+    # Stop a service recursively looking for dependant services
+    hscm = win32service.OpenSCManager(machine,None,win32service.SC_MANAGER_ALL_ACCESS)
+    try:
+        deps = __FindSvcDeps(serviceName)
+        for dep in deps:
+            hs = win32service.OpenService(hscm, dep, win32service.SERVICE_ALL_ACCESS)
+            try:
+                __StopServiceWithTimeout(hs, waitSecs)
+            finally:
+                win32service.CloseServiceHandle(hs)
+        # Now my service!
+        hs = win32service.OpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS)
+        try:
+            __StopServiceWithTimeout(hs, waitSecs)
+        finally:
+            win32service.CloseServiceHandle(hs)
+
+    finally:
+        win32service.CloseServiceHandle(hscm)
+
+
+def StopService(serviceName, machine = None):
+    return ControlService(serviceName, win32service.SERVICE_CONTROL_STOP, machine)
+
+def StartService(serviceName, args = None, machine = None):
+    hscm = win32service.OpenSCManager(machine,None,win32service.SC_MANAGER_ALL_ACCESS)
+    try:
+
+        hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_ALL_ACCESS)
+        try:
+            win32service.StartService(hs, args)
+        finally:
+            win32service.CloseServiceHandle(hs)
+    finally:
+        win32service.CloseServiceHandle(hscm)
+
+def RestartService(serviceName, args = None, waitSeconds = 30, machine = None):
+    "Stop the service, and then start it again (with some tolerance for allowing it to stop.)"
+    try:
+        StopService(serviceName, machine)
+    except pywintypes.error, (hr, name, msg):
+        # Allow only "service not running" error
+        if hr!=winerror.ERROR_SERVICE_NOT_ACTIVE:
+            raise win32service.error, (hr, name, msg)
+    # Give it a few goes, as the service may take time to stop
+    for i in range(waitSeconds):
+        try:
+            StartService(serviceName, args, machine)
+            break
+        except pywintypes.error, (hr, name, msg):
+            if hr!=winerror.ERROR_SERVICE_ALREADY_RUNNING:
+                raise
+            win32api.Sleep(1000)
+    else:
+        print "Gave up waiting for the old service to stop!"
+
+def _DebugCtrlHandler(evt):
+    if evt in (win32con.CTRL_C_EVENT, win32con.CTRL_BREAK_EVENT):
+        assert g_debugService
+        print "Stopping debug service."
+        g_debugService.SvcStop()
+        return True
+    return False
+
+def DebugService(cls, argv = []):
+    # Run a service in "debug" mode.  Re-implements what pythonservice.exe
+    # does when it sees a "-debug" param.
+    # Currently only used by "frozen" (ie, py2exe) programs (but later may
+    # end up being used for all services should we ever remove
+    # pythonservice.exe)
+    import servicemanager
+    global g_debugService
+
+    print "Debugging service %s - press Ctrl+C to stop." % (cls._svc_name_,)
+    servicemanager.Debugging(True)
+    servicemanager.PrepareToHostSingle(cls)
+    g_debugService = cls(argv)
+    # Setup a ctrl+c handler to simulate a "stop"
+    win32api.SetConsoleCtrlHandler(_DebugCtrlHandler, True)
+    try:
+        g_debugService.SvcRun()
+    finally:
+        win32api.SetConsoleCtrlHandler(_DebugCtrlHandler, False)
+        servicemanager.Debugging(False)
+        g_debugService = None
+
+def GetServiceClassString(cls, argv = None):
+    if argv is None:
+        argv = sys.argv
+    import pickle, os
+    modName = pickle.whichmodule(cls, cls.__name__)
+    if modName == '__main__':
+        try:
+            fname = win32api.GetFullPathName(argv[0])
+            path = os.path.split(fname)[0]
+            # Eaaaahhhh - sometimes this will be a short filename, which causes
+            # problems with 1.5.1 and the silly filename case rule.
+            # Get the long name
+            fname = os.path.join(path, win32api.FindFiles(fname)[0][8])
+        except win32api.error:
+            raise error, "Could not resolve the path name '%s' to a full path" % (argv[0])
+        modName = os.path.splitext(fname)[0]
+    return modName + "." + cls.__name__
+
+def QueryServiceStatus(serviceName, machine=None):
+    hscm = win32service.OpenSCManager(machine,None,win32service.SC_MANAGER_CONNECT)
+    try:
+
+        hs = SmartOpenService(hscm, serviceName, win32service.SERVICE_QUERY_STATUS)
+        try:
+            status = win32service.QueryServiceStatus(hs)
+        finally:
+            win32service.CloseServiceHandle(hs)
+    finally:
+        win32service.CloseServiceHandle(hscm)
+    return status
+
+def usage():
+    try:
+        fname = os.path.split(sys.argv[0])[1]
+    except:
+        fname = sys.argv[0]
+    print "Usage: '%s [options] install|update|remove|start [...]|stop|restart [...]|debug [...]'" % fname
+    print "Options for 'install' and 'update' commands only:"
+    print " --username domain\username : The Username the service is to run under"
+    print " --password password : The password for the username"
+    print " --startup [manual|auto|disabled] : How the service starts, default = manual"
+    print " --interactive : Allow the service to interact with the desktop."
+    print " --perfmonini file: .ini file to use for registering performance monitor data"
+    print " --perfmondll file: .dll file to use when querying the service for"
+    print "   performance data, default = perfmondata.dll"
+    print "Options for 'start' and 'stop' commands only:"
+    print " --wait seconds: Wait for the service to actually start or stop."
+    print "                 If you specify --wait with the 'stop' option, the service"
+    print "                 and all dependent services will be stopped, each waiting"
+    print "                 the specified period."
+    sys.exit(1)
+
+def HandleCommandLine(cls, serviceClassString = None, argv = None, customInstallOptions = "", customOptionHandler = None):
+    """Utility function allowing services to process the command line.
+
+    Allows standard commands such as 'start', 'stop', 'debug', 'install' etc.
+
+    Install supports 'standard' command line options prefixed with '--', such as
+    --username, --password, etc.  In addition,
+    the function allows custom command line options to be handled by the calling function.
+    """
+    err = 0
+
+    if argv is None: argv = sys.argv
+
+    if len(argv)<=1:
+        usage()
+
+    serviceName = cls._svc_name_
+    serviceDisplayName = cls._svc_display_name_
+    if serviceClassString is None:
+        serviceClassString = GetServiceClassString(cls)
+
+    # Pull apart the command line
+    import getopt
+    try:
+        opts, args = getopt.getopt(argv[1:], customInstallOptions,["password=","username=","startup=","perfmonini=", "perfmondll=", "interactive", "wait="])
+    except getopt.error, details:
+        print details
+        usage()
+    userName = None
+    password = None
+    perfMonIni = perfMonDll = None
+    startup = None
+    interactive = None
+    waitSecs = 0
+    for opt, val in opts:
+        if opt=='--username':
+            userName = val
+        elif opt=='--password':
+            password = val
+        elif opt=='--perfmonini':
+            perfMonIni = val
+        elif opt=='--perfmondll':
+            perfMonDll = val
+        elif opt=='--interactive':
+            interactive = 1
+        elif opt=='--startup':
+            map = {"manual": win32service.SERVICE_DEMAND_START, "auto" : win32service.SERVICE_AUTO_START, "disabled": win32service.SERVICE_DISABLED}
+            try:
+                startup = map[string.lower(val)]
+            except KeyError:
+                print "'%s' is not a valid startup option" % val
+        elif opt=='--wait':
+            try:
+                waitSecs = int(val)
+            except ValueError:
+                print "--wait must specify an integer number of seconds."
+                usage()
+
+    arg=args[0]
+    knownArg = 0
+    # First we process all arguments which pass additional args on
+    if arg=="start":
+        knownArg = 1
+        print "Starting service %s" % (serviceName)
+        try:
+            StartService(serviceName, args[1:])
+            if waitSecs:
+                WaitForServiceStatus(serviceName, win32service.SERVICE_RUNNING, waitSecs)
+        except win32service.error, (hr, fn, msg):
+            print "Error starting service: %s" % msg
+
+    elif arg=="restart":
+        knownArg = 1
+        print "Restarting service %s" % (serviceName)
+        RestartService(serviceName, args[1:])
+        if waitSecs:
+            WaitForServiceStatus(serviceName, win32service.SERVICE_RUNNING, waitSecs)
+
+    elif arg=="debug":
+        knownArg = 1
+        if not hasattr(sys, "frozen"):
+            # non-frozen services use pythonservice.exe which handles a
+            # -debug option
+            svcArgs = string.join(args[1:])
+            exeName = LocateSpecificServiceExe(serviceName)
+            try:
+                os.system("%s -debug %s %s" % (exeName, serviceName, svcArgs))
+            # ^C is used to kill the debug service.  Sometimes Python also gets
+            # interrupted - ignore it...
+            except KeyboardInterrupt:
+                pass
+        else:
+            # py2exe services don't use pythonservice - so we simulate
+            # debugging here.
+            DebugService(cls, args)
+
+    if not knownArg and len(args)<>1:
+        usage() # the rest of the cmds don't take addn args
+
+    if arg=="install":
+        knownArg = 1
+        try:
+            serviceDeps = cls._svc_deps_
+        except AttributeError:
+            serviceDeps = None
+        try:
+            exeName = cls._exe_name_
+        except AttributeError:
+            exeName = None # Default to PythonService.exe
+        try:
+            exeArgs = cls._exe_args_
+        except AttributeError:
+            exeArgs = None
+        try:
+            description = cls._svc_description_
+        except AttributeError:
+            description = None
+        print "Installing service %s" % (serviceName,)
+        # Note that we install the service before calling the custom option
+        # handler, so if the custom handler fails, we have an installed service (from NT's POV)
+        # but is unlikely to work, as the Python code controlling it failed.  Therefore
+        # we remove the service if the first bit works, but the second doesnt!
+        try:
+            InstallService(serviceClassString, serviceName, serviceDisplayName, serviceDeps = serviceDeps, startType=startup, bRunInteractive=interactive, userName=userName,password=password, exeName=exeName, perfMonIni=perfMonIni,perfMonDll=perfMonDll,exeArgs=exeArgs,description=description)
+            if customOptionHandler:
+                apply( customOptionHandler, (opts,) )
+            print "Service installed"
+        except win32service.error, (hr, fn, msg):
+            if hr==winerror.ERROR_SERVICE_EXISTS:
+                arg = "update" # Fall through to the "update" param!
+            else:
+                print "Error installing service: %s (%d)" % (msg, hr)
+                err = hr
+        except ValueError, msg: # Can be raised by custom option handler.
+            print "Error installing service: %s" % str(msg)
+            err = -1
+            # xxx - maybe I should remove after _any_ failed install - however,
+            # xxx - it may be useful to help debug to leave the service as it failed.
+            # xxx - We really _must_ remove as per the comments above...
+            # As we failed here, remove the service, so the next installation
+            # attempt works.
+            try:
+                RemoveService(serviceName)
+            except win32api.error:
+                print "Warning - could not remove the partially installed service."
+
+    if arg == "update":
+        knownArg = 1
+        try:
+            serviceDeps = cls._svc_deps_
+        except AttributeError:
+            serviceDeps = None
+        try:
+            exeName = cls._exe_name_
+        except AttributeError:
+            exeName = None # Default to PythonService.exe
+        try:
+            exeArgs = cls._exe_args_
+        except AttributeError:
+            exeArgs = None
+        try:
+            description=cls._svc_description_
+        except AttributeError:
+            description=None
+        print "Changing service configuration"
+        try:
+            ChangeServiceConfig(serviceClassString, serviceName, serviceDeps = serviceDeps, startType=startup, bRunInteractive=interactive, userName=userName,password=password, exeName=exeName, displayName = serviceDisplayName, perfMonIni=perfMonIni,perfMonDll=perfMonDll,exeArgs=exeArgs,description=description)
+            if customOptionHandler:
+                apply( customOptionHandler, (opts,) )
+            print "Service updated"
+        except win32service.error, (hr, fn, msg):
+            print "Error changing service configuration: %s (%d)" % (msg,hr)
+            err = hr
+
+    elif arg=="remove":
+        knownArg = 1
+        print "Removing service %s" % (serviceName)
+        try:
+            RemoveService(serviceName)
+            print "Service removed"
+        except win32service.error, (hr, fn, msg):
+            print "Error removing service: %s (%d)" % (msg,hr)
+            err = hr
+    elif arg=="stop":
+        knownArg = 1
+        print "Stopping service %s" % (serviceName)
+        try:
+            if waitSecs:
+                StopServiceWithDeps(serviceName, waitSecs = waitSecs)
+            else:
+                StopService(serviceName)
+        except win32service.error, (hr, fn, msg):
+            print "Error stopping service: %s (%d)" % (msg,hr)
+            err = hr
+    if not knownArg:
+        err = -1
+        print "Unknown command - '%s'" % arg
+        usage()
+    return err
+
+#
+# Useful base class to build services from.
+#
+class ServiceFramework:
+    # Required Attributes:
+    # _svc_name_ = The service name
+    # _svc_display_name_ = The service display name
+
+    # Optional Attributes:
+    _svc_deps_ = None        # sequence of service names on which this depends
+    _exe_name_ = None        # Default to PythonService.exe
+    _exe_args_ = None        # Default to no arguments
+    _svc_description_ = None # Only exists on Windows 2000 or later, ignored on windows NT
+
+    def __init__(self, args):
+        import servicemanager
+        self.ssh = servicemanager.RegisterServiceCtrlHandler(args[0], self.ServiceCtrlHandler)
+        servicemanager.SetEventSourceName(self._svc_name_)
+        self.checkPoint = 0
+
+    def GetAcceptedControls(self):
+        # Setup the service controls we accept based on our attributes
+        accepted = 0
+        if hasattr(self, "SvcStop"): accepted = accepted | win32service.SERVICE_ACCEPT_STOP
+        if hasattr(self, "SvcPause") and hasattr(self, "SvcContinue"):
+            accepted = accepted | win32service.SERVICE_ACCEPT_PAUSE_CONTINUE
+        if hasattr(self, "SvcShutdown"): accepted = accepted | win32service.SERVICE_ACCEPT_SHUTDOWN
+        return accepted
+
+    def ReportServiceStatus(self, serviceStatus, waitHint = 5000, win32ExitCode = 0, svcExitCode = 0):
+        if self.ssh is None: # Debugging!
+            return
+        if serviceStatus == win32service.SERVICE_START_PENDING:
+            accepted = 0
+        else:
+            accepted = self.GetAcceptedControls()
+
+        if serviceStatus in [win32service.SERVICE_RUNNING,  win32service.SERVICE_STOPPED]:
+            checkPoint = 0
+        else:
+            self.checkPoint = self.checkPoint + 1
+            checkPoint = self.checkPoint
+
+        # Now report the status to the control manager
+        status = (win32service.SERVICE_WIN32_OWN_PROCESS,
+                 serviceStatus,
+                 accepted, # dwControlsAccepted,
+                 win32ExitCode, # dwWin32ExitCode;
+                 svcExitCode, # dwServiceSpecificExitCode;
+                 checkPoint, # dwCheckPoint;
+                 waitHint)
+        win32service.SetServiceStatus( self.ssh, status)
+
+    def SvcInterrogate(self):
+        # Assume we are running, and everyone is happy.
+        self.ReportServiceStatus(win32service.SERVICE_RUNNING)
+
+    def SvcOther(self, control):
+        print "Unknown control status - %d" % control
+
+    def ServiceCtrlHandler(self, control):
+        if control==win32service.SERVICE_CONTROL_STOP:
+            self.SvcStop()
+        elif control==win32service.SERVICE_CONTROL_PAUSE:
+            self.SvcPause()
+        elif control==win32service.SERVICE_CONTROL_CONTINUE:
+            self.SvcContinue()
+        elif control==win32service.SERVICE_CONTROL_INTERROGATE:
+            self.SvcInterrogate()
+        elif control==win32service.SERVICE_CONTROL_SHUTDOWN:
+            self.SvcShutdown()
+        else:
+            self.SvcOther(control)
+
+    def SvcRun(self):
+        self.ReportServiceStatus(win32service.SERVICE_RUNNING)
+        self.SvcDoRun()
+        # Once SvcDoRun terminates, the service has stopped.
+        # We tell the SCM the service is still stopping - the C framework
+        # will automatically tell the SCM it has stopped when this returns.
+        self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32timezone.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32timezone.py
new file mode 100644
index 0000000..3bd2f8c3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32timezone.py
@@ -0,0 +1,293 @@
+# -*- coding: UTF-8 -*-
+
+"""
+win32timezone:
+	Module for handling datetime.tzinfo time zones using the windows
+registry for time zone information.  The time zone names are dependent
+on the registry entries defined by the operating system.
+
+	Currently, this module only supports the Windows NT line of products
+and not Windows 95/98/Me.
+
+	This module may be tested using the doctest module.
+
+	Written by Jason R. Coombs (jaraco@jaraco.com).
+	Copyright © 2003.
+	All Rights Reserved.	
+
+	To use this time zone module with the datetime module, simply pass
+the TimeZoneInfo object to the datetime constructor.  For example,
+
+>>> import win32timezone, datetime
+>>> assert 'Mountain Standard Time' in win32timezone.GetTimeZoneNames()
+>>> tzi = TimeZoneInfo( 'Mountain Standard Time' )
+>>> now = datetime.datetime.now( tzi )
+
+	The now object is now a time-zone aware object, and daylight savings-
+aware methods may be called on it.
+
+>>> now.utcoffset() in ( datetime.timedelta(-1, 61200), datetime.timedelta(-1, 64800) )
+True
+
+(note that the result of utcoffset call will be different based on when now was
+generated, unless standard time is always used)
+
+>>> now = datetime.datetime.now( TimeZoneInfo( 'Mountain Standard Time', True ) )
+>>> now.utcoffset()
+datetime.timedelta(-1, 61200)
+
+>>> aug2 = datetime.datetime( 2003, 8, 2, tzinfo = tzi )
+>>> aug2.utctimetuple()
+(2003, 8, 2, 6, 0, 0, 5, 214, 0)
+>>> nov2 = datetime.datetime( 2003, 11, 2, tzinfo = tzi )
+>>> nov2.utctimetuple()
+(2003, 11, 2, 7, 0, 0, 6, 306, 0)
+
+To convert from one timezone to another, just use the astimezone method.
+
+>>> aug2.isoformat()
+'2003-08-02T00:00:00-06:00'
+>>> aug2est = aug2.astimezone( win32timezone.TimeZoneInfo( 'Eastern Standard Time' ) )
+>>> aug2est.isoformat()
+'2003-08-02T02:00:00-04:00'
+
+calling the displayName member will return the display name as set in the
+registry.
+
+>>> est = win32timezone.TimeZoneInfo( 'Eastern Standard Time' )
+>>> est.displayName
+u'(GMT-05:00) Eastern Time (US & Canada)'
+
+>>> gmt = win32timezone.TimeZoneInfo( 'GMT Standard Time', True )
+>>> gmt.displayName
+u'(GMT) Greenwich Mean Time : Dublin, Edinburgh, Lisbon, London'
+
+TimeZoneInfo now supports being pickled and comparison
+>>> import pickle
+>>> tz = win32timezone.TimeZoneInfo( 'China Standard Time' )
+>>> tz == pickle.loads( pickle.dumps( tz ) )
+True
+"""
+from __future__ import generators
+
+__author__ = 'Jason R. Coombs <jaraco@jaraco.com>'
+__version__ = '$Revision: 1.5 $'[11:-2]
+__vssauthor__ = '$Author: mhammond $'[9:-2]
+__date__ = '$Modtime: 04-04-14 10:52 $'[10:-2]
+
+import os, _winreg, struct, datetime
+
+class TimeZoneInfo( datetime.tzinfo ):
+	"""
+	Main class for handling win32 time zones.
+	Usage:
+		TimeZoneInfo( <Time Zone Standard Name>, [<Fix Standard Time>] )
+	If <Fix Standard Time> evaluates to True, daylight savings time is calculated in the same
+		way as standard time.
+	"""
+
+	# this key works for WinNT+, but not for the Win95 line.
+	tzRegKey = r'SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones'
+		
+	def __init__( self, timeZoneName, fixedStandardTime=False ):
+		self.timeZoneName = timeZoneName
+		key = self._FindTimeZoneKey()
+		self._LoadInfoFromKey( key )
+		self.fixedStandardTime = fixedStandardTime
+
+	def _FindTimeZoneKey( self ):
+		"""Find the registry key for the time zone name (self.timeZoneName)."""
+		# for multi-language compatability, match the time zone name in the
+		# "Std" key of the time zone key.
+		zoneNames = dict( GetIndexedTimeZoneNames( 'Std' ) )
+		# Also match the time zone key name itself, to be compatible with
+		# English-based hard-coded time zones.
+		timeZoneName = zoneNames.get( self.timeZoneName, self.timeZoneName )
+		tzRegKeyPath = os.path.join( self.tzRegKey, timeZoneName )
+		try:
+			key = _winreg.OpenKeyEx( _winreg.HKEY_LOCAL_MACHINE, tzRegKeyPath )
+		except:
+			raise ValueError, 'Timezone Name %s not found.' % timeZoneName
+		return key
+
+	def __getinitargs__( self ):
+		return ( self.timeZoneName, )
+
+	def _LoadInfoFromKey( self, key ):
+		"""Loads the information from an opened time zone registry key
+		into relevant fields of this TZI object"""
+		self.displayName = _winreg.QueryValueEx( key, "Display" )[0]
+		self.standardName = _winreg.QueryValueEx( key, "Std" )[0]
+		self.daylightName = _winreg.QueryValueEx( key, "Dlt" )[0]
+		# TZI contains a structure of time zone information and is similar to
+		#  TIME_ZONE_INFORMATION described in the Windows Platform SDK
+		winTZI, type = _winreg.QueryValueEx( key, "TZI" )
+		winTZI = struct.unpack( '3l8h8h', winTZI )
+		makeMinuteTimeDelta = lambda x: datetime.timedelta( minutes = x )
+		self.bias, self.standardBiasOffset, self.daylightBiasOffset = \
+				   map( makeMinuteTimeDelta, winTZI[:3] )
+		# daylightEnd and daylightStart are 8-tuples representing a Win32 SYSTEMTIME structure
+		self.daylightEnd, self.daylightStart = winTZI[3:11], winTZI[11:19]
+
+	def __repr__( self ):
+		result = '%s( %s' % ( self.__class__.__name__, repr( self.timeZoneName ) )
+		if self.fixedStandardTime:
+			result += ', True'
+		result += ' )'
+		return result
+
+	def __str__( self ):
+		return self.displayName
+
+	def tzname( self, dt ):
+		if self.dst( dt ) == self.daylightBiasOffset:
+			result = self.daylightName
+		elif self.dst( dt ) == self.standardBiasOffset:
+			result = self.standardName
+		return result
+		
+	def _getStandardBias( self ):
+		return self.bias + self.standardBiasOffset
+	standardBias = property( _getStandardBias )
+
+	def _getDaylightBias( self ):
+		return self.bias + self.daylightBiasOffset
+	daylightBias = property( _getDaylightBias )
+
+	def utcoffset( self, dt ):
+		"Calculates the utcoffset according to the datetime.tzinfo spec"
+		if dt is None: return
+		return -( self.bias + self.dst( dt ) )
+
+	def dst( self, dt ):
+		"Calculates the daylight savings offset according to the datetime.tzinfo spec"
+		if dt is None: return
+		assert dt.tzinfo is self
+		result = self.standardBiasOffset
+
+		try:
+			dstStart = self.GetDSTStartTime( dt.year )
+			dstEnd = self.GetDSTEndTime( dt.year )
+
+			if dstStart <= dt.replace( tzinfo=None ) < dstEnd and not self.fixedStandardTime:
+				result = self.daylightBiasOffset
+		except ValueError:
+			# there was an error parsing the time zone, which is normal when a
+			#  start and end time are not specified.
+			pass
+
+		return result
+
+	def GetDSTStartTime( self, year ):
+		"Given a year, determines the time when daylight savings time starts"
+		return self._LocateDay( year, self.daylightStart )
+
+	def GetDSTEndTime( self, year ):
+		"Given a year, determines the time when daylight savings ends."
+		return self._LocateDay( year, self.daylightEnd )
+	
+	def _LocateDay( self, year, win32SystemTime ):
+		"""
+		Takes a SYSTEMTIME structure as retrieved from a TIME_ZONE_INFORMATION
+		structure and interprets it based on the given year to identify the actual day.
+
+		This method is necessary because the SYSTEMTIME structure refers to a day by its
+		day of the week or week of the month (e.g. 4th saturday in April).
+
+		Refer to the Windows Platform SDK for more information on the SYSTEMTIME
+		and TIME_ZONE_INFORMATION structures.
+		"""
+		month = win32SystemTime[ 1 ]
+		# MS stores Sunday as 0, Python datetime stores Monday as zero
+		targetWeekday = ( win32SystemTime[ 2 ] + 6 ) % 7
+		# win32SystemTime[3] is the week of the month, so the following
+		#  is the first day of that week
+		day = ( win32SystemTime[ 3 ] - 1 ) * 7 + 1
+		hour, min, sec, msec = win32SystemTime[4:]
+		result = datetime.datetime( year, month, day, hour, min, sec, msec )
+		# now the result is the correct week, but not necessarily the correct day of the week
+		daysToGo = targetWeekday - result.weekday()
+		result += datetime.timedelta( daysToGo )
+		# if we selected a day in the month following the target month,
+		#  move back a week or two.
+		# This is necessary because Microsoft defines the fifth week in a month
+		#  to be the last week in a month and adding the time delta might have
+		#  pushed the result into the next month.
+		while result.month == month + 1:
+			result -= datetime.timedelta( weeks = 1 )
+		return result
+
+	def __cmp__( self, other ):
+		return cmp( self.__dict__, other.__dict__ )
+
+def _RegKeyEnumerator( key ):
+	return _RegEnumerator( key, _winreg.EnumKey )
+
+def _RegValueEnumerator( key ):
+	return _RegEnumerator( key, _winreg.EnumValue )
+
+def _RegEnumerator( key, func ):
+	"Enumerates an open registry key as an iterable generator"
+	index = 0
+	try:
+		while 1:
+			yield func( key, index )
+			index += 1
+	except WindowsError: pass
+	
+def _RegKeyDict( key ):
+	values = _RegValueEnumerator( key )
+	values = tuple( values )
+	return dict( map( lambda (name,value,type): (name,value), values ) )
+
+def GetTimeZoneNames( ):
+	"Returns the names of the time zones as defined in the registry"
+	key = _winreg.OpenKeyEx( _winreg.HKEY_LOCAL_MACHINE, TimeZoneInfo.tzRegKey )
+	return _RegKeyEnumerator( key )
+
+def GetIndexedTimeZoneNames( index_key = 'Index' ):
+	"""Returns the names of the time zones as defined in the registry, but
+	includes an index by which they may be sorted.  Default index is "Index"
+	by which they may be sorted longitudinally."""
+	for timeZoneName in GetTimeZoneNames():
+		tzRegKeyPath = os.path.join( TimeZoneInfo.tzRegKey, timeZoneName )
+		key = _winreg.OpenKeyEx( _winreg.HKEY_LOCAL_MACHINE, tzRegKeyPath )
+		tzIndex, type = _winreg.QueryValueEx( key, index_key )
+		yield ( tzIndex, timeZoneName )
+
+def GetSortedTimeZoneNames( ):
+	""" Uses GetIndexedTimeZoneNames to return the time zone names sorted
+	longitudinally."""
+	tzs = list( GetIndexedTimeZoneNames() )
+	tzs.sort()
+	return zip( *tzs )[1]
+
+def GetLocalTimeZone( ):
+	"""Returns the local time zone as defined by the operating system in the
+	registry.
+	Note that this will only work if the TimeZone in the registry has not been
+	customized.  It should have been selected from the Windows interface.
+	>>> localTZ = GetLocalTimeZone()
+	>>> nowLoc = datetime.datetime.now( localTZ )
+	>>> nowUTC = datetime.datetime.utcnow( )
+	>>> ( nowUTC - nowLoc ) < datetime.timedelta( seconds = 5 )
+	Traceback (most recent call last):
+	  ...
+	TypeError: can't subtract offset-naive and offset-aware datetimes
+
+	>>> nowUTC = nowUTC.replace( tzinfo = TimeZoneInfo( 'GMT Standard Time', True ) )
+
+	Now one can compare the results of the two offset aware values	
+	>>> ( nowUTC - nowLoc ) < datetime.timedelta( seconds = 5 )
+	True
+	"""
+	tzRegKey = r'SYSTEM\CurrentControlSet\Control\TimeZoneInformation'
+	key = _winreg.OpenKeyEx( _winreg.HKEY_LOCAL_MACHINE, tzRegKey )
+	local = _RegKeyDict( key )
+	# if the user has not checked "Automatically adjust clock for daylight
+	# saving changes" in the Date and Time Properties control, the standard
+	# and daylight values will be the same.  If this is the case, create a
+	# timezone object fixed to standard time.
+	fixStandardTime = local['StandardName'] == local['DaylightName'] and \
+					local['StandardBias'] == local['DaylightBias']
+	return TimeZoneInfo( local['StandardName'], fixStandardTime )
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32traceutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32traceutil.py
new file mode 100644
index 0000000..ce52584
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32traceutil.py
@@ -0,0 +1,53 @@
+# This is a helper for the win32trace module
+
+# If imported from a normal Python program, it sets up sys.stdout and sys.stderr
+# so output goes to the collector.
+
+# If run from the command line, it creates a collector loop.
+
+# Eg:
+# C:>start win32traceutil.py (or python.exe win32traceutil.py)
+# will start a process with a (pretty much) blank screen.
+#
+# then, switch to a DOS prompt, and type:
+# C:>python.exe
+# Python 1.4 etc...
+# >>> import win32traceutil
+# Redirecting output to win32trace remote collector
+# >>> print "Hello"
+# >>>
+# And the output will appear in the first collector process.
+
+# Note - the client or the collector can be started first.
+# There is a 64k buffer.  If this gets full, it is reset, and new
+# output appended from the start.
+
+import win32trace
+
+def RunAsCollector():
+	import sys
+	try:
+		import win32api
+		win32api.SetConsoleTitle("Python Trace Collector")
+	except:
+		pass # Oh well!
+	win32trace.InitRead()
+	print "Collecting Python Trace Output..."
+#	import win32api;win32api.DebugBreak()
+	while 1:
+#		print win32trace.blockingread()
+		sys.stdout.write(win32trace.blockingread())
+
+
+def SetupForPrint():
+	win32trace.InitWrite()
+	try:	# Under certain servers, sys.stdout may be invalid.
+		print "Redirecting output to win32trace remote collector"
+	except:
+		pass
+	win32trace.setprint() # this works in an rexec environment.
+
+if __name__=='__main__':
+	RunAsCollector()
+else:
+	SetupForPrint()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32verstamp.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32verstamp.py
new file mode 100644
index 0000000..3bb26e4b2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/win32verstamp.py
@@ -0,0 +1,202 @@
+""" Stamp a Win32 binary with version information.
+"""
+
+from win32api import BeginUpdateResource, UpdateResource, EndUpdateResource, Unicode
+U = Unicode
+
+import os
+import struct
+import glob
+
+import optparse
+
+VS_FFI_SIGNATURE = -17890115 # 0xFEEF04BD
+VS_FFI_STRUCVERSION = 0x00010000
+VS_FFI_FILEFLAGSMASK = 0x0000003f
+VOS_NT_WINDOWS32 = 0x00040004
+
+#
+# Set VS_FF_PRERELEASE and DEBUG if Debug
+#
+def file_flags(debug):
+  if debug:
+    return 3	# VS_FF_DEBUG | VS_FF_PRERELEASE
+  return 0
+
+def file_type(is_dll):
+  if is_dll:
+    return 2	# VFT_DLL
+  return 1	# VFT_APP
+
+def VS_FIXEDFILEINFO(maj, min, sub, build, debug=0, is_dll=1):
+  return struct.pack('lllllllllllll',
+                     VS_FFI_SIGNATURE,	# dwSignature
+                     VS_FFI_STRUCVERSION,	# dwStrucVersion
+                     (maj << 16) | min,	# dwFileVersionMS
+                     (sub << 16) | build,# dwFileVersionLS
+                     (maj << 16) | min,	# dwProductVersionMS
+                     (sub << 16) | build,		# dwProductVersionLS
+                     VS_FFI_FILEFLAGSMASK,	# dwFileFlagsMask
+                     file_flags(debug),	# dwFileFlags
+                     VOS_NT_WINDOWS32,	# dwFileOS
+                     file_type(is_dll),	# dwFileType
+                     0x00000000,	# dwFileSubtype
+                     0x00000000,	# dwFileDateMS
+                     0x00000000,	# dwFileDateLS
+                     )
+
+def nullterm(s):
+  try:
+    return buffer(unicode(s)) + "\0\0"
+  except NameError: # No unicode builtin
+    return U(s).raw + '\0\0'
+
+def pad32(s, extra=2):
+  # extra is normally 2 to deal with wLength
+  l = 4 - ((len(s) + extra) & 3)
+  if l < 4:
+    return s + ('\0' * l)
+  return s
+
+def addlen(s):
+  return struct.pack('h', len(s) + 2) + s
+
+def String(key, value):
+  key = nullterm(key)
+  value = nullterm(value)
+  result = struct.pack('hh', len(value)/2, 1)	# wValueLength, wType
+  result = result + key
+  result = pad32(result) + value
+  return addlen(result)
+
+def StringTable(key, data):
+  key = nullterm(key)
+  result = struct.pack('hh', 0, 1)	# wValueLength, wType
+  result = result + key
+  for k, v in data.items():
+    result = result + String(k, v)
+    result = pad32(result)
+  return addlen(result)
+
+def StringFileInfo(data):
+  result = struct.pack('hh', 0, 1)	# wValueLength, wType
+  result = result + nullterm('StringFileInfo')
+#  result = pad32(result) + StringTable('040904b0', data)
+  result = pad32(result) + StringTable('040904E4', data)
+  return addlen(result)
+
+def Var(key, value):
+  result = struct.pack('hh', len(value), 0)	# wValueLength, wType
+  result = result + nullterm(key)
+  result = pad32(result) + value
+  return addlen(result)
+
+def VarFileInfo(data):
+  result = struct.pack('hh', 0, 1)	# wValueLength, wType
+  result = result + nullterm('VarFileInfo')
+  result = pad32(result)
+  for k, v in data.items():
+    result = result + Var(k, v)
+  return addlen(result)
+
+def VS_VERSION_INFO(maj, min, sub, build, sdata, vdata, debug=0, is_dll=1):
+  ffi = VS_FIXEDFILEINFO(maj, min, sub, build, debug, is_dll)
+  result = struct.pack('hh', len(ffi), 0)	# wValueLength, wType
+  result = result + nullterm('VS_VERSION_INFO')
+  result = pad32(result) + ffi
+  result = pad32(result) + StringFileInfo(sdata) + VarFileInfo(vdata)
+  return addlen(result)
+
+def stamp(pathname, options):
+  # For some reason, the API functions report success if the file is open
+  # but doesnt work!  Try and open the file for writing, just to see if it is
+  # likely the stamp will work!
+  try:
+    f = open(pathname, "a+b")
+    f.close()
+  except IOError, why:
+    print "WARNING: File %s could not be opened - %s" % (pathname, why)
+
+  ver = options.version
+  try:
+    bits = [int(i) for i in ver.split(".")]
+    vmaj, vmin, vsub, vbuild = bits
+  except (IndexError, TypeError, ValueError):
+    raise ValueError, "--version must be a.b.c.d (all integers) - got %r" % ver
+  
+  ifn = options.internal_name
+  if not ifn:
+    ifn = os.path.basename(pathname)
+  ofn = options.original_filename
+  if not ofn:
+    ofn = os.path.basename(pathname)
+
+  sdata = {
+    'Comments' : options.comments,
+    'CompanyName' : options.company,
+    'FileDescription' : options.description,
+    'FileVersion' : ver,
+    'InternalName' : ifn,
+    'LegalCopyright' : options.copyright,
+    'LegalTrademarks' : options.trademarks,
+    'OriginalFilename' : ofn,
+    'ProductName' : options.product,
+    'ProductVersion' : ver,
+    }
+  vdata = {
+    'Translation' : struct.pack('hh', 0x409,1252),
+    }
+  is_dll = options.dll
+  if is_dll is None:
+    is_dll = os.path.splitext(pathname)[1].lower() in '.dll .pyd'.split()
+  is_debug = options.debug
+  if is_debug is None:
+    is_debug = os.path.splitext(pathname)[0].lower().endswith("_d")
+  # convert None to blank strings
+  for k, v in sdata.items():
+    if v is None:
+      sdata[k] = ""
+  vs = VS_VERSION_INFO(vmaj, vmin, vsub, vbuild, sdata, vdata, is_debug, is_dll)
+
+  h = BeginUpdateResource(pathname, 0)
+  UpdateResource(h, 16, 1, vs)
+  EndUpdateResource(h, 0)
+
+  if options.verbose:
+    print "Stamped:", pathname
+
+if __name__ == '__main__':
+  parser = optparse.OptionParser("%prog [options] filespec ...",
+                                 description=__doc__)
+
+  parser.add_option("-q", "--quiet",
+                    action="store_false", dest="verbose", default=True,
+                    help="don't print status messages to stdout")
+  parser.add_option("", "--version", default="0.0.0.0",
+                    help="The version number as m.n.s.b")
+  parser.add_option("", "--dll",
+                    help="""Stamp the file as a DLL.  Default is to look at the
+                            file extension for .dll or .pyd.""")
+  parser.add_option("", "--debug", help="""Stamp the file as a debug binary.""")
+  parser.add_option("", "--product", help="""The product name to embed.""")
+  parser.add_option("", "--company", help="""The company name to embed.""")
+  parser.add_option("", "--trademarks", help="The trademark string to embed.")
+  parser.add_option("", "--comments", help="The comments string to embed.")
+  parser.add_option("", "--copyright",
+                    help="""The copyright message string to embed.""")
+  parser.add_option("", "--description", metavar="DESC",
+                    help="The description to embed.")
+  parser.add_option("", "--internal-name", metavar="NAME",
+                    help="""The internal filename to embed. If not specified
+                         the base filename is used.""")
+  parser.add_option("", "--original-filename",
+                    help="""The original filename to embed. If not specified
+                            the base filename is used.""")
+  
+  options, args = parser.parse_args()
+  if not args:
+    parser.error("You must supply a file to stamp.  Use --help for details.")
+  
+  for g in args:
+    for f in glob.glob(g):
+      stamp(f, options)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winerror.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winerror.py
new file mode 100644
index 0000000..77c4567
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winerror.py
@@ -0,0 +1,1916 @@
+"""Error related constants for win32
+
+Generated by h2py from winerror.h
+"""
+# Few extras added manually...
+TRUST_E_PROVIDER_UNKNOWN = -2146762751
+TRUST_E_ACTION_UNKNOWN = -2146762750
+TRUST_E_SUBJECT_FORM_UNKNOWN = -2146762749
+TRUST_E_SUBJECT_NOT_TRUSTED = -2146762748
+# up to here...
+
+FACILITY_WINDOWS_CE = 24
+FACILITY_WINDOWS = 8
+FACILITY_URT = 19
+FACILITY_UMI = 22
+FACILITY_SXS = 23
+FACILITY_STORAGE = 3
+FACILITY_STATE_MANAGEMENT = 34
+FACILITY_SSPI = 9
+FACILITY_SCARD = 16
+FACILITY_SETUPAPI = 15
+FACILITY_SECURITY = 9
+FACILITY_RPC = 1
+FACILITY_WIN32 = 7
+FACILITY_CONTROL = 10
+FACILITY_NULL = 0
+FACILITY_MSMQ = 14
+FACILITY_MEDIASERVER = 13
+FACILITY_INTERNET = 12
+FACILITY_ITF = 4
+FACILITY_DISPATCH = 2
+FACILITY_CERT = 11
+ERROR_SUCCESS = 0
+NO_ERROR = 0
+S_OK = (0L)
+S_FALSE = (1L)
+ERROR_INVALID_FUNCTION = 1
+ERROR_FILE_NOT_FOUND = 2
+ERROR_PATH_NOT_FOUND = 3
+ERROR_TOO_MANY_OPEN_FILES = 4
+ERROR_ACCESS_DENIED = 5
+ERROR_INVALID_HANDLE = 6
+ERROR_ARENA_TRASHED = 7
+ERROR_NOT_ENOUGH_MEMORY = 8
+ERROR_INVALID_BLOCK = 9
+ERROR_BAD_ENVIRONMENT = 10
+ERROR_BAD_FORMAT = 11
+ERROR_INVALID_ACCESS = 12
+ERROR_INVALID_DATA = 13
+ERROR_OUTOFMEMORY = 14
+ERROR_INVALID_DRIVE = 15
+ERROR_CURRENT_DIRECTORY = 16
+ERROR_NOT_SAME_DEVICE = 17
+ERROR_NO_MORE_FILES = 18
+ERROR_WRITE_PROTECT = 19
+ERROR_BAD_UNIT = 20
+ERROR_NOT_READY = 21
+ERROR_BAD_COMMAND = 22
+ERROR_CRC = 23
+ERROR_BAD_LENGTH = 24
+ERROR_SEEK = 25
+ERROR_NOT_DOS_DISK = 26
+ERROR_SECTOR_NOT_FOUND = 27
+ERROR_OUT_OF_PAPER = 28
+ERROR_WRITE_FAULT = 29
+ERROR_READ_FAULT = 30
+ERROR_GEN_FAILURE = 31
+ERROR_SHARING_VIOLATION = 32
+ERROR_LOCK_VIOLATION = 33
+ERROR_WRONG_DISK = 34
+ERROR_SHARING_BUFFER_EXCEEDED = 36
+ERROR_HANDLE_EOF = 38
+ERROR_HANDLE_DISK_FULL = 39
+ERROR_NOT_SUPPORTED = 50
+ERROR_REM_NOT_LIST = 51
+ERROR_DUP_NAME = 52
+ERROR_BAD_NETPATH = 53
+ERROR_NETWORK_BUSY = 54
+ERROR_DEV_NOT_EXIST = 55
+ERROR_TOO_MANY_CMDS = 56
+ERROR_ADAP_HDW_ERR = 57
+ERROR_BAD_NET_RESP = 58
+ERROR_UNEXP_NET_ERR = 59
+ERROR_BAD_REM_ADAP = 60
+ERROR_PRINTQ_FULL = 61
+ERROR_NO_SPOOL_SPACE = 62
+ERROR_PRINT_CANCELLED = 63
+ERROR_NETNAME_DELETED = 64
+ERROR_NETWORK_ACCESS_DENIED = 65
+ERROR_BAD_DEV_TYPE = 66
+ERROR_BAD_NET_NAME = 67
+ERROR_TOO_MANY_NAMES = 68
+ERROR_TOO_MANY_SESS = 69
+ERROR_SHARING_PAUSED = 70
+ERROR_REQ_NOT_ACCEP = 71
+ERROR_REDIR_PAUSED = 72
+ERROR_FILE_EXISTS = 80
+ERROR_CANNOT_MAKE = 82
+ERROR_FAIL_I24 = 83
+ERROR_OUT_OF_STRUCTURES = 84
+ERROR_ALREADY_ASSIGNED = 85
+ERROR_INVALID_PASSWORD = 86
+ERROR_INVALID_PARAMETER = 87
+ERROR_NET_WRITE_FAULT = 88
+ERROR_NO_PROC_SLOTS = 89
+ERROR_TOO_MANY_SEMAPHORES = 100
+ERROR_EXCL_SEM_ALREADY_OWNED = 101
+ERROR_SEM_IS_SET = 102
+ERROR_TOO_MANY_SEM_REQUESTS = 103
+ERROR_INVALID_AT_INTERRUPT_TIME = 104
+ERROR_SEM_OWNER_DIED = 105
+ERROR_SEM_USER_LIMIT = 106
+ERROR_DISK_CHANGE = 107
+ERROR_DRIVE_LOCKED = 108
+ERROR_BROKEN_PIPE = 109
+ERROR_OPEN_FAILED = 110
+ERROR_BUFFER_OVERFLOW = 111
+ERROR_DISK_FULL = 112
+ERROR_NO_MORE_SEARCH_HANDLES = 113
+ERROR_INVALID_TARGET_HANDLE = 114
+ERROR_INVALID_CATEGORY = 117
+ERROR_INVALID_VERIFY_SWITCH = 118
+ERROR_BAD_DRIVER_LEVEL = 119
+ERROR_CALL_NOT_IMPLEMENTED = 120
+ERROR_SEM_TIMEOUT = 121
+ERROR_INSUFFICIENT_BUFFER = 122
+ERROR_INVALID_NAME = 123
+ERROR_INVALID_LEVEL = 124
+ERROR_NO_VOLUME_LABEL = 125
+ERROR_MOD_NOT_FOUND = 126
+ERROR_PROC_NOT_FOUND = 127
+ERROR_WAIT_NO_CHILDREN = 128
+ERROR_CHILD_NOT_COMPLETE = 129
+ERROR_DIRECT_ACCESS_HANDLE = 130
+ERROR_NEGATIVE_SEEK = 131
+ERROR_SEEK_ON_DEVICE = 132
+ERROR_IS_JOIN_TARGET = 133
+ERROR_IS_JOINED = 134
+ERROR_IS_SUBSTED = 135
+ERROR_NOT_JOINED = 136
+ERROR_NOT_SUBSTED = 137
+ERROR_JOIN_TO_JOIN = 138
+ERROR_SUBST_TO_SUBST = 139
+ERROR_JOIN_TO_SUBST = 140
+ERROR_SUBST_TO_JOIN = 141
+ERROR_BUSY_DRIVE = 142
+ERROR_SAME_DRIVE = 143
+ERROR_DIR_NOT_ROOT = 144
+ERROR_DIR_NOT_EMPTY = 145
+ERROR_IS_SUBST_PATH = 146
+ERROR_IS_JOIN_PATH = 147
+ERROR_PATH_BUSY = 148
+ERROR_IS_SUBST_TARGET = 149
+ERROR_SYSTEM_TRACE = 150
+ERROR_INVALID_EVENT_COUNT = 151
+ERROR_TOO_MANY_MUXWAITERS = 152
+ERROR_INVALID_LIST_FORMAT = 153
+ERROR_LABEL_TOO_LONG = 154
+ERROR_TOO_MANY_TCBS = 155
+ERROR_SIGNAL_REFUSED = 156
+ERROR_DISCARDED = 157
+ERROR_NOT_LOCKED = 158
+ERROR_BAD_THREADID_ADDR = 159
+ERROR_BAD_ARGUMENTS = 160
+ERROR_BAD_PATHNAME = 161
+ERROR_SIGNAL_PENDING = 162
+ERROR_MAX_THRDS_REACHED = 164
+ERROR_LOCK_FAILED = 167
+ERROR_BUSY = 170
+ERROR_CANCEL_VIOLATION = 173
+ERROR_ATOMIC_LOCKS_NOT_SUPPORTED = 174
+ERROR_INVALID_SEGMENT_NUMBER = 180
+ERROR_INVALID_ORDINAL = 182
+ERROR_ALREADY_EXISTS = 183
+ERROR_INVALID_FLAG_NUMBER = 186
+ERROR_SEM_NOT_FOUND = 187
+ERROR_INVALID_STARTING_CODESEG = 188
+ERROR_INVALID_STACKSEG = 189
+ERROR_INVALID_MODULETYPE = 190
+ERROR_INVALID_EXE_SIGNATURE = 191
+ERROR_EXE_MARKED_INVALID = 192
+ERROR_BAD_EXE_FORMAT = 193
+ERROR_ITERATED_DATA_EXCEEDS_64k = 194
+ERROR_INVALID_MINALLOCSIZE = 195
+ERROR_DYNLINK_FROM_INVALID_RING = 196
+ERROR_IOPL_NOT_ENABLED = 197
+ERROR_INVALID_SEGDPL = 198
+ERROR_AUTODATASEG_EXCEEDS_64k = 199
+ERROR_RING2SEG_MUST_BE_MOVABLE = 200
+ERROR_RELOC_CHAIN_XEEDS_SEGLIM = 201
+ERROR_INFLOOP_IN_RELOC_CHAIN = 202
+ERROR_ENVVAR_NOT_FOUND = 203
+ERROR_NO_SIGNAL_SENT = 205
+ERROR_FILENAME_EXCED_RANGE = 206
+ERROR_RING2_STACK_IN_USE = 207
+ERROR_META_EXPANSION_TOO_LONG = 208
+ERROR_INVALID_SIGNAL_NUMBER = 209
+ERROR_THREAD_1_INACTIVE = 210
+ERROR_LOCKED = 212
+ERROR_TOO_MANY_MODULES = 214
+ERROR_NESTING_NOT_ALLOWED = 215
+ERROR_EXE_MACHINE_TYPE_MISMATCH = 216
+ERROR_BAD_PIPE = 230
+ERROR_PIPE_BUSY = 231
+ERROR_NO_DATA = 232
+ERROR_PIPE_NOT_CONNECTED = 233
+ERROR_MORE_DATA = 234
+ERROR_VC_DISCONNECTED = 240
+ERROR_INVALID_EA_NAME = 254
+ERROR_EA_LIST_INCONSISTENT = 255
+ERROR_NO_MORE_ITEMS = 259
+ERROR_CANNOT_COPY = 266
+ERROR_DIRECTORY = 267
+ERROR_EAS_DIDNT_FIT = 275
+ERROR_EA_FILE_CORRUPT = 276
+ERROR_EA_TABLE_FULL = 277
+ERROR_INVALID_EA_HANDLE = 278
+ERROR_EAS_NOT_SUPPORTED = 282
+ERROR_NOT_OWNER = 288
+ERROR_TOO_MANY_POSTS = 298
+ERROR_PARTIAL_COPY = 299
+ERROR_OPLOCK_NOT_GRANTED = 300
+ERROR_INVALID_OPLOCK_PROTOCOL = 301
+ERROR_MR_MID_NOT_FOUND = 317
+ERROR_INVALID_ADDRESS = 487
+ERROR_ARITHMETIC_OVERFLOW = 534
+ERROR_PIPE_CONNECTED = 535
+ERROR_PIPE_LISTENING = 536
+ERROR_EA_ACCESS_DENIED = 994
+ERROR_OPERATION_ABORTED = 995
+ERROR_IO_INCOMPLETE = 996
+ERROR_IO_PENDING = 997
+ERROR_NOACCESS = 998
+ERROR_SWAPERROR = 999
+ERROR_STACK_OVERFLOW = 1001
+ERROR_INVALID_MESSAGE = 1002
+ERROR_CAN_NOT_COMPLETE = 1003
+ERROR_INVALID_FLAGS = 1004
+ERROR_UNRECOGNIZED_VOLUME = 1005
+ERROR_FILE_INVALID = 1006
+ERROR_FULLSCREEN_MODE = 1007
+ERROR_NO_TOKEN = 1008
+ERROR_BADDB = 1009
+ERROR_BADKEY = 1010
+ERROR_CANTOPEN = 1011
+ERROR_CANTREAD = 1012
+ERROR_CANTWRITE = 1013
+ERROR_REGISTRY_RECOVERED = 1014
+ERROR_REGISTRY_CORRUPT = 1015
+ERROR_REGISTRY_IO_FAILED = 1016
+ERROR_NOT_REGISTRY_FILE = 1017
+ERROR_KEY_DELETED = 1018
+ERROR_NO_LOG_SPACE = 1019
+ERROR_KEY_HAS_CHILDREN = 1020
+ERROR_CHILD_MUST_BE_VOLATILE = 1021
+ERROR_NOTIFY_ENUM_DIR = 1022
+ERROR_DEPENDENT_SERVICES_RUNNING = 1051
+ERROR_INVALID_SERVICE_CONTROL = 1052
+ERROR_SERVICE_REQUEST_TIMEOUT = 1053
+ERROR_SERVICE_NO_THREAD = 1054
+ERROR_SERVICE_DATABASE_LOCKED = 1055
+ERROR_SERVICE_ALREADY_RUNNING = 1056
+ERROR_INVALID_SERVICE_ACCOUNT = 1057
+ERROR_SERVICE_DISABLED = 1058
+ERROR_CIRCULAR_DEPENDENCY = 1059
+ERROR_SERVICE_DOES_NOT_EXIST = 1060
+ERROR_SERVICE_CANNOT_ACCEPT_CTRL = 1061
+ERROR_SERVICE_NOT_ACTIVE = 1062
+ERROR_FAILED_SERVICE_CONTROLLER_CONNECT = 1063
+ERROR_EXCEPTION_IN_SERVICE = 1064
+ERROR_DATABASE_DOES_NOT_EXIST = 1065
+ERROR_SERVICE_SPECIFIC_ERROR = 1066
+ERROR_PROCESS_ABORTED = 1067
+ERROR_SERVICE_DEPENDENCY_FAIL = 1068
+ERROR_SERVICE_LOGON_FAILED = 1069
+ERROR_SERVICE_START_HANG = 1070
+ERROR_INVALID_SERVICE_LOCK = 1071
+ERROR_SERVICE_MARKED_FOR_DELETE = 1072
+ERROR_SERVICE_EXISTS = 1073
+ERROR_ALREADY_RUNNING_LKG = 1074
+ERROR_SERVICE_DEPENDENCY_DELETED = 1075
+ERROR_BOOT_ALREADY_ACCEPTED = 1076
+ERROR_SERVICE_NEVER_STARTED = 1077
+ERROR_DUPLICATE_SERVICE_NAME = 1078
+ERROR_DIFFERENT_SERVICE_ACCOUNT = 1079
+ERROR_CANNOT_DETECT_DRIVER_FAILURE = 1080
+ERROR_CANNOT_DETECT_PROCESS_ABORT = 1081
+ERROR_NO_RECOVERY_PROGRAM = 1082
+ERROR_END_OF_MEDIA = 1100
+ERROR_FILEMARK_DETECTED = 1101
+ERROR_BEGINNING_OF_MEDIA = 1102
+ERROR_SETMARK_DETECTED = 1103
+ERROR_NO_DATA_DETECTED = 1104
+ERROR_PARTITION_FAILURE = 1105
+ERROR_INVALID_BLOCK_LENGTH = 1106
+ERROR_DEVICE_NOT_PARTITIONED = 1107
+ERROR_UNABLE_TO_LOCK_MEDIA = 1108
+ERROR_UNABLE_TO_UNLOAD_MEDIA = 1109
+ERROR_MEDIA_CHANGED = 1110
+ERROR_BUS_RESET = 1111
+ERROR_NO_MEDIA_IN_DRIVE = 1112
+ERROR_NO_UNICODE_TRANSLATION = 1113
+ERROR_DLL_INIT_FAILED = 1114
+ERROR_SHUTDOWN_IN_PROGRESS = 1115
+ERROR_NO_SHUTDOWN_IN_PROGRESS = 1116
+ERROR_IO_DEVICE = 1117
+ERROR_SERIAL_NO_DEVICE = 1118
+ERROR_IRQ_BUSY = 1119
+ERROR_MORE_WRITES = 1120
+ERROR_COUNTER_TIMEOUT = 1121
+ERROR_FLOPPY_ID_MARK_NOT_FOUND = 1122
+ERROR_FLOPPY_WRONG_CYLINDER = 1123
+ERROR_FLOPPY_UNKNOWN_ERROR = 1124
+ERROR_FLOPPY_BAD_REGISTERS = 1125
+ERROR_DISK_RECALIBRATE_FAILED = 1126
+ERROR_DISK_OPERATION_FAILED = 1127
+ERROR_DISK_RESET_FAILED = 1128
+ERROR_EOM_OVERFLOW = 1129
+ERROR_NOT_ENOUGH_SERVER_MEMORY = 1130
+ERROR_POSSIBLE_DEADLOCK = 1131
+ERROR_MAPPED_ALIGNMENT = 1132
+ERROR_SET_POWER_STATE_VETOED = 1140
+ERROR_SET_POWER_STATE_FAILED = 1141
+ERROR_TOO_MANY_LINKS = 1142
+ERROR_OLD_WIN_VERSION = 1150
+ERROR_APP_WRONG_OS = 1151
+ERROR_SINGLE_INSTANCE_APP = 1152
+ERROR_RMODE_APP = 1153
+ERROR_INVALID_DLL = 1154
+ERROR_NO_ASSOCIATION = 1155
+ERROR_DDE_FAIL = 1156
+ERROR_DLL_NOT_FOUND = 1157
+ERROR_NO_MORE_USER_HANDLES = 1158
+ERROR_MESSAGE_SYNC_ONLY = 1159
+ERROR_SOURCE_ELEMENT_EMPTY = 1160
+ERROR_DESTINATION_ELEMENT_FULL = 1161
+ERROR_ILLEGAL_ELEMENT_ADDRESS = 1162
+ERROR_MAGAZINE_NOT_PRESENT = 1163
+ERROR_DEVICE_REINITIALIZATION_NEEDED = 1164
+ERROR_DEVICE_REQUIRES_CLEANING = 1165
+ERROR_DEVICE_DOOR_OPEN = 1166
+ERROR_DEVICE_NOT_CONNECTED = 1167
+ERROR_NOT_FOUND = 1168
+ERROR_NO_MATCH = 1169
+ERROR_SET_NOT_FOUND = 1170
+ERROR_POINT_NOT_FOUND = 1171
+ERROR_NO_TRACKING_SERVICE = 1172
+ERROR_NO_VOLUME_ID = 1173
+ERROR_CONNECTED_OTHER_PASSWORD = 2108
+ERROR_BAD_USERNAME = 2202
+ERROR_NOT_CONNECTED = 2250
+ERROR_OPEN_FILES = 2401
+ERROR_ACTIVE_CONNECTIONS = 2402
+ERROR_DEVICE_IN_USE = 2404
+ERROR_BAD_DEVICE = 1200
+ERROR_CONNECTION_UNAVAIL = 1201
+ERROR_DEVICE_ALREADY_REMEMBERED = 1202
+ERROR_NO_NET_OR_BAD_PATH = 1203
+ERROR_BAD_PROVIDER = 1204
+ERROR_CANNOT_OPEN_PROFILE = 1205
+ERROR_BAD_PROFILE = 1206
+ERROR_NOT_CONTAINER = 1207
+ERROR_EXTENDED_ERROR = 1208
+ERROR_INVALID_GROUPNAME = 1209
+ERROR_INVALID_COMPUTERNAME = 1210
+ERROR_INVALID_EVENTNAME = 1211
+ERROR_INVALID_DOMAINNAME = 1212
+ERROR_INVALID_SERVICENAME = 1213
+ERROR_INVALID_NETNAME = 1214
+ERROR_INVALID_SHARENAME = 1215
+ERROR_INVALID_PASSWORDNAME = 1216
+ERROR_INVALID_MESSAGENAME = 1217
+ERROR_INVALID_MESSAGEDEST = 1218
+ERROR_SESSION_CREDENTIAL_CONFLICT = 1219
+ERROR_REMOTE_SESSION_LIMIT_EXCEEDED = 1220
+ERROR_DUP_DOMAINNAME = 1221
+ERROR_NO_NETWORK = 1222
+ERROR_CANCELLED = 1223
+ERROR_USER_MAPPED_FILE = 1224
+ERROR_CONNECTION_REFUSED = 1225
+ERROR_GRACEFUL_DISCONNECT = 1226
+ERROR_ADDRESS_ALREADY_ASSOCIATED = 1227
+ERROR_ADDRESS_NOT_ASSOCIATED = 1228
+ERROR_CONNECTION_INVALID = 1229
+ERROR_CONNECTION_ACTIVE = 1230
+ERROR_NETWORK_UNREACHABLE = 1231
+ERROR_HOST_UNREACHABLE = 1232
+ERROR_PROTOCOL_UNREACHABLE = 1233
+ERROR_PORT_UNREACHABLE = 1234
+ERROR_REQUEST_ABORTED = 1235
+ERROR_CONNECTION_ABORTED = 1236
+ERROR_RETRY = 1237
+ERROR_CONNECTION_COUNT_LIMIT = 1238
+ERROR_LOGIN_TIME_RESTRICTION = 1239
+ERROR_LOGIN_WKSTA_RESTRICTION = 1240
+ERROR_INCORRECT_ADDRESS = 1241
+ERROR_ALREADY_REGISTERED = 1242
+ERROR_SERVICE_NOT_FOUND = 1243
+ERROR_NOT_AUTHENTICATED = 1244
+ERROR_NOT_LOGGED_ON = 1245
+ERROR_CONTINUE = 1246
+ERROR_ALREADY_INITIALIZED = 1247
+ERROR_NO_MORE_DEVICES = 1248
+ERROR_NO_SUCH_SITE = 1249
+ERROR_DOMAIN_CONTROLLER_EXISTS = 1250
+ERROR_DS_NOT_INSTALLED = 1251
+ERROR_NOT_ALL_ASSIGNED = 1300
+ERROR_SOME_NOT_MAPPED = 1301
+ERROR_NO_QUOTAS_FOR_ACCOUNT = 1302
+ERROR_LOCAL_USER_SESSION_KEY = 1303
+ERROR_NULL_LM_PASSWORD = 1304
+ERROR_UNKNOWN_REVISION = 1305
+ERROR_REVISION_MISMATCH = 1306
+ERROR_INVALID_OWNER = 1307
+ERROR_INVALID_PRIMARY_GROUP = 1308
+ERROR_NO_IMPERSONATION_TOKEN = 1309
+ERROR_CANT_DISABLE_MANDATORY = 1310
+ERROR_NO_LOGON_SERVERS = 1311
+ERROR_NO_SUCH_LOGON_SESSION = 1312
+ERROR_NO_SUCH_PRIVILEGE = 1313
+ERROR_PRIVILEGE_NOT_HELD = 1314
+ERROR_INVALID_ACCOUNT_NAME = 1315
+ERROR_USER_EXISTS = 1316
+ERROR_NO_SUCH_USER = 1317
+ERROR_GROUP_EXISTS = 1318
+ERROR_NO_SUCH_GROUP = 1319
+ERROR_MEMBER_IN_GROUP = 1320
+ERROR_MEMBER_NOT_IN_GROUP = 1321
+ERROR_LAST_ADMIN = 1322
+ERROR_WRONG_PASSWORD = 1323
+ERROR_ILL_FORMED_PASSWORD = 1324
+ERROR_PASSWORD_RESTRICTION = 1325
+ERROR_LOGON_FAILURE = 1326
+ERROR_ACCOUNT_RESTRICTION = 1327
+ERROR_INVALID_LOGON_HOURS = 1328
+ERROR_INVALID_WORKSTATION = 1329
+ERROR_PASSWORD_EXPIRED = 1330
+ERROR_ACCOUNT_DISABLED = 1331
+ERROR_NONE_MAPPED = 1332
+ERROR_TOO_MANY_LUIDS_REQUESTED = 1333
+ERROR_LUIDS_EXHAUSTED = 1334
+ERROR_INVALID_SUB_AUTHORITY = 1335
+ERROR_INVALID_ACL = 1336
+ERROR_INVALID_SID = 1337
+ERROR_INVALID_SECURITY_DESCR = 1338
+ERROR_BAD_INHERITANCE_ACL = 1340
+ERROR_SERVER_DISABLED = 1341
+ERROR_SERVER_NOT_DISABLED = 1342
+ERROR_INVALID_ID_AUTHORITY = 1343
+ERROR_ALLOTTED_SPACE_EXCEEDED = 1344
+ERROR_INVALID_GROUP_ATTRIBUTES = 1345
+ERROR_BAD_IMPERSONATION_LEVEL = 1346
+ERROR_CANT_OPEN_ANONYMOUS = 1347
+ERROR_BAD_VALIDATION_CLASS = 1348
+ERROR_BAD_TOKEN_TYPE = 1349
+ERROR_NO_SECURITY_ON_OBJECT = 1350
+ERROR_CANT_ACCESS_DOMAIN_INFO = 1351
+ERROR_INVALID_SERVER_STATE = 1352
+ERROR_INVALID_DOMAIN_STATE = 1353
+ERROR_INVALID_DOMAIN_ROLE = 1354
+ERROR_NO_SUCH_DOMAIN = 1355
+ERROR_DOMAIN_EXISTS = 1356
+ERROR_DOMAIN_LIMIT_EXCEEDED = 1357
+ERROR_INTERNAL_DB_CORRUPTION = 1358
+ERROR_INTERNAL_ERROR = 1359
+ERROR_GENERIC_NOT_MAPPED = 1360
+ERROR_BAD_DESCRIPTOR_FORMAT = 1361
+ERROR_NOT_LOGON_PROCESS = 1362
+ERROR_LOGON_SESSION_EXISTS = 1363
+ERROR_NO_SUCH_PACKAGE = 1364
+ERROR_BAD_LOGON_SESSION_STATE = 1365
+ERROR_LOGON_SESSION_COLLISION = 1366
+ERROR_INVALID_LOGON_TYPE = 1367
+ERROR_CANNOT_IMPERSONATE = 1368
+ERROR_RXACT_INVALID_STATE = 1369
+ERROR_RXACT_COMMIT_FAILURE = 1370
+ERROR_SPECIAL_ACCOUNT = 1371
+ERROR_SPECIAL_GROUP = 1372
+ERROR_SPECIAL_USER = 1373
+ERROR_MEMBERS_PRIMARY_GROUP = 1374
+ERROR_TOKEN_ALREADY_IN_USE = 1375
+ERROR_NO_SUCH_ALIAS = 1376
+ERROR_MEMBER_NOT_IN_ALIAS = 1377
+ERROR_MEMBER_IN_ALIAS = 1378
+ERROR_ALIAS_EXISTS = 1379
+ERROR_LOGON_NOT_GRANTED = 1380
+ERROR_TOO_MANY_SECRETS = 1381
+ERROR_SECRET_TOO_LONG = 1382
+ERROR_INTERNAL_DB_ERROR = 1383
+ERROR_TOO_MANY_CONTEXT_IDS = 1384
+ERROR_LOGON_TYPE_NOT_GRANTED = 1385
+ERROR_NT_CROSS_ENCRYPTION_REQUIRED = 1386
+ERROR_NO_SUCH_MEMBER = 1387
+ERROR_INVALID_MEMBER = 1388
+ERROR_TOO_MANY_SIDS = 1389
+ERROR_LM_CROSS_ENCRYPTION_REQUIRED = 1390
+ERROR_NO_INHERITANCE = 1391
+ERROR_FILE_CORRUPT = 1392
+ERROR_DISK_CORRUPT = 1393
+ERROR_NO_USER_SESSION_KEY = 1394
+ERROR_LICENSE_QUOTA_EXCEEDED = 1395
+ERROR_INVALID_WINDOW_HANDLE = 1400
+ERROR_INVALID_MENU_HANDLE = 1401
+ERROR_INVALID_CURSOR_HANDLE = 1402
+ERROR_INVALID_ACCEL_HANDLE = 1403
+ERROR_INVALID_HOOK_HANDLE = 1404
+ERROR_INVALID_DWP_HANDLE = 1405
+ERROR_TLW_WITH_WSCHILD = 1406
+ERROR_CANNOT_FIND_WND_CLASS = 1407
+ERROR_WINDOW_OF_OTHER_THREAD = 1408
+ERROR_HOTKEY_ALREADY_REGISTERED = 1409
+ERROR_CLASS_ALREADY_EXISTS = 1410
+ERROR_CLASS_DOES_NOT_EXIST = 1411
+ERROR_CLASS_HAS_WINDOWS = 1412
+ERROR_INVALID_INDEX = 1413
+ERROR_INVALID_ICON_HANDLE = 1414
+ERROR_PRIVATE_DIALOG_INDEX = 1415
+ERROR_LISTBOX_ID_NOT_FOUND = 1416
+ERROR_NO_WILDCARD_CHARACTERS = 1417
+ERROR_CLIPBOARD_NOT_OPEN = 1418
+ERROR_HOTKEY_NOT_REGISTERED = 1419
+ERROR_WINDOW_NOT_DIALOG = 1420
+ERROR_CONTROL_ID_NOT_FOUND = 1421
+ERROR_INVALID_COMBOBOX_MESSAGE = 1422
+ERROR_WINDOW_NOT_COMBOBOX = 1423
+ERROR_INVALID_EDIT_HEIGHT = 1424
+ERROR_DC_NOT_FOUND = 1425
+ERROR_INVALID_HOOK_FILTER = 1426
+ERROR_INVALID_FILTER_PROC = 1427
+ERROR_HOOK_NEEDS_HMOD = 1428
+ERROR_GLOBAL_ONLY_HOOK = 1429
+ERROR_JOURNAL_HOOK_SET = 1430
+ERROR_HOOK_NOT_INSTALLED = 1431
+ERROR_INVALID_LB_MESSAGE = 1432
+ERROR_SETCOUNT_ON_BAD_LB = 1433
+ERROR_LB_WITHOUT_TABSTOPS = 1434
+ERROR_DESTROY_OBJECT_OF_OTHER_THREAD = 1435
+ERROR_CHILD_WINDOW_MENU = 1436
+ERROR_NO_SYSTEM_MENU = 1437
+ERROR_INVALID_MSGBOX_STYLE = 1438
+ERROR_INVALID_SPI_VALUE = 1439
+ERROR_SCREEN_ALREADY_LOCKED = 1440
+ERROR_HWNDS_HAVE_DIFF_PARENT = 1441
+ERROR_NOT_CHILD_WINDOW = 1442
+ERROR_INVALID_GW_COMMAND = 1443
+ERROR_INVALID_THREAD_ID = 1444
+ERROR_NON_MDICHILD_WINDOW = 1445
+ERROR_POPUP_ALREADY_ACTIVE = 1446
+ERROR_NO_SCROLLBARS = 1447
+ERROR_INVALID_SCROLLBAR_RANGE = 1448
+ERROR_INVALID_SHOWWIN_COMMAND = 1449
+ERROR_NO_SYSTEM_RESOURCES = 1450
+ERROR_NONPAGED_SYSTEM_RESOURCES = 1451
+ERROR_PAGED_SYSTEM_RESOURCES = 1452
+ERROR_WORKING_SET_QUOTA = 1453
+ERROR_PAGEFILE_QUOTA = 1454
+ERROR_COMMITMENT_LIMIT = 1455
+ERROR_MENU_ITEM_NOT_FOUND = 1456
+ERROR_INVALID_KEYBOARD_HANDLE = 1457
+ERROR_HOOK_TYPE_NOT_ALLOWED = 1458
+ERROR_REQUIRES_INTERACTIVE_WINDOWSTATION = 1459
+ERROR_TIMEOUT = 1460
+ERROR_INVALID_MONITOR_HANDLE = 1461
+ERROR_EVENTLOG_FILE_CORRUPT = 1500
+ERROR_EVENTLOG_CANT_START = 1501
+ERROR_LOG_FILE_FULL = 1502
+ERROR_EVENTLOG_FILE_CHANGED = 1503
+ERROR_INSTALL_SERVICE = 1601
+ERROR_INSTALL_USEREXIT = 1602
+ERROR_INSTALL_FAILURE = 1603
+ERROR_INSTALL_SUSPEND = 1604
+ERROR_UNKNOWN_PRODUCT = 1605
+ERROR_UNKNOWN_FEATURE = 1606
+ERROR_UNKNOWN_COMPONENT = 1607
+ERROR_UNKNOWN_PROPERTY = 1608
+ERROR_INVALID_HANDLE_STATE = 1609
+ERROR_BAD_CONFIGURATION = 1610
+ERROR_INDEX_ABSENT = 1611
+ERROR_INSTALL_SOURCE_ABSENT = 1612
+ERROR_BAD_DATABASE_VERSION = 1613
+ERROR_PRODUCT_UNINSTALLED = 1614
+ERROR_BAD_QUERY_SYNTAX = 1615
+ERROR_INVALID_FIELD = 1616
+RPC_S_INVALID_STRING_BINDING = 1700
+RPC_S_WRONG_KIND_OF_BINDING = 1701
+RPC_S_INVALID_BINDING = 1702
+RPC_S_PROTSEQ_NOT_SUPPORTED = 1703
+RPC_S_INVALID_RPC_PROTSEQ = 1704
+RPC_S_INVALID_STRING_UUID = 1705
+RPC_S_INVALID_ENDPOINT_FORMAT = 1706
+RPC_S_INVALID_NET_ADDR = 1707
+RPC_S_NO_ENDPOINT_FOUND = 1708
+RPC_S_INVALID_TIMEOUT = 1709
+RPC_S_OBJECT_NOT_FOUND = 1710
+RPC_S_ALREADY_REGISTERED = 1711
+RPC_S_TYPE_ALREADY_REGISTERED = 1712
+RPC_S_ALREADY_LISTENING = 1713
+RPC_S_NO_PROTSEQS_REGISTERED = 1714
+RPC_S_NOT_LISTENING = 1715
+RPC_S_UNKNOWN_MGR_TYPE = 1716
+RPC_S_UNKNOWN_IF = 1717
+RPC_S_NO_BINDINGS = 1718
+RPC_S_NO_PROTSEQS = 1719
+RPC_S_CANT_CREATE_ENDPOINT = 1720
+RPC_S_OUT_OF_RESOURCES = 1721
+RPC_S_SERVER_UNAVAILABLE = 1722
+RPC_S_SERVER_TOO_BUSY = 1723
+RPC_S_INVALID_NETWORK_OPTIONS = 1724
+RPC_S_NO_CALL_ACTIVE = 1725
+RPC_S_CALL_FAILED = 1726
+RPC_S_CALL_FAILED_DNE = 1727
+RPC_S_PROTOCOL_ERROR = 1728
+RPC_S_UNSUPPORTED_TRANS_SYN = 1730
+RPC_S_UNSUPPORTED_TYPE = 1732
+RPC_S_INVALID_TAG = 1733
+RPC_S_INVALID_BOUND = 1734
+RPC_S_NO_ENTRY_NAME = 1735
+RPC_S_INVALID_NAME_SYNTAX = 1736
+RPC_S_UNSUPPORTED_NAME_SYNTAX = 1737
+RPC_S_UUID_NO_ADDRESS = 1739
+RPC_S_DUPLICATE_ENDPOINT = 1740
+RPC_S_UNKNOWN_AUTHN_TYPE = 1741
+RPC_S_MAX_CALLS_TOO_SMALL = 1742
+RPC_S_STRING_TOO_LONG = 1743
+RPC_S_PROTSEQ_NOT_FOUND = 1744
+RPC_S_PROCNUM_OUT_OF_RANGE = 1745
+RPC_S_BINDING_HAS_NO_AUTH = 1746
+RPC_S_UNKNOWN_AUTHN_SERVICE = 1747
+RPC_S_UNKNOWN_AUTHN_LEVEL = 1748
+RPC_S_INVALID_AUTH_IDENTITY = 1749
+RPC_S_UNKNOWN_AUTHZ_SERVICE = 1750
+EPT_S_INVALID_ENTRY = 1751
+EPT_S_CANT_PERFORM_OP = 1752
+EPT_S_NOT_REGISTERED = 1753
+RPC_S_NOTHING_TO_EXPORT = 1754
+RPC_S_INCOMPLETE_NAME = 1755
+RPC_S_INVALID_VERS_OPTION = 1756
+RPC_S_NO_MORE_MEMBERS = 1757
+RPC_S_NOT_ALL_OBJS_UNEXPORTED = 1758
+RPC_S_INTERFACE_NOT_FOUND = 1759
+RPC_S_ENTRY_ALREADY_EXISTS = 1760
+RPC_S_ENTRY_NOT_FOUND = 1761
+RPC_S_NAME_SERVICE_UNAVAILABLE = 1762
+RPC_S_INVALID_NAF_ID = 1763
+RPC_S_CANNOT_SUPPORT = 1764
+RPC_S_NO_CONTEXT_AVAILABLE = 1765
+RPC_S_INTERNAL_ERROR = 1766
+RPC_S_ZERO_DIVIDE = 1767
+RPC_S_ADDRESS_ERROR = 1768
+RPC_S_FP_DIV_ZERO = 1769
+RPC_S_FP_UNDERFLOW = 1770
+RPC_S_FP_OVERFLOW = 1771
+RPC_X_NO_MORE_ENTRIES = 1772
+RPC_X_SS_CHAR_TRANS_OPEN_FAIL = 1773
+RPC_X_SS_CHAR_TRANS_SHORT_FILE = 1774
+RPC_X_SS_IN_NULL_CONTEXT = 1775
+RPC_X_SS_CONTEXT_DAMAGED = 1777
+RPC_X_SS_HANDLES_MISMATCH = 1778
+RPC_X_SS_CANNOT_GET_CALL_HANDLE = 1779
+RPC_X_NULL_REF_POINTER = 1780
+RPC_X_ENUM_VALUE_OUT_OF_RANGE = 1781
+RPC_X_BYTE_COUNT_TOO_SMALL = 1782
+RPC_X_BAD_STUB_DATA = 1783
+ERROR_INVALID_USER_BUFFER = 1784
+ERROR_UNRECOGNIZED_MEDIA = 1785
+ERROR_NO_TRUST_LSA_SECRET = 1786
+ERROR_NO_TRUST_SAM_ACCOUNT = 1787
+ERROR_TRUSTED_DOMAIN_FAILURE = 1788
+ERROR_TRUSTED_RELATIONSHIP_FAILURE = 1789
+ERROR_TRUST_FAILURE = 1790
+RPC_S_CALL_IN_PROGRESS = 1791
+ERROR_NETLOGON_NOT_STARTED = 1792
+ERROR_ACCOUNT_EXPIRED = 1793
+ERROR_REDIRECTOR_HAS_OPEN_HANDLES = 1794
+ERROR_PRINTER_DRIVER_ALREADY_INSTALLED = 1795
+ERROR_UNKNOWN_PORT = 1796
+ERROR_UNKNOWN_PRINTER_DRIVER = 1797
+ERROR_UNKNOWN_PRINTPROCESSOR = 1798
+ERROR_INVALID_SEPARATOR_FILE = 1799
+ERROR_INVALID_PRIORITY = 1800
+ERROR_INVALID_PRINTER_NAME = 1801
+ERROR_PRINTER_ALREADY_EXISTS = 1802
+ERROR_INVALID_PRINTER_COMMAND = 1803
+ERROR_INVALID_DATATYPE = 1804
+ERROR_INVALID_ENVIRONMENT = 1805
+RPC_S_NO_MORE_BINDINGS = 1806
+ERROR_NOLOGON_INTERDOMAIN_TRUST_ACCOUNT = 1807
+ERROR_NOLOGON_WORKSTATION_TRUST_ACCOUNT = 1808
+ERROR_NOLOGON_SERVER_TRUST_ACCOUNT = 1809
+ERROR_DOMAIN_TRUST_INCONSISTENT = 1810
+ERROR_SERVER_HAS_OPEN_HANDLES = 1811
+ERROR_RESOURCE_DATA_NOT_FOUND = 1812
+ERROR_RESOURCE_TYPE_NOT_FOUND = 1813
+ERROR_RESOURCE_NAME_NOT_FOUND = 1814
+ERROR_RESOURCE_LANG_NOT_FOUND = 1815
+ERROR_NOT_ENOUGH_QUOTA = 1816
+RPC_S_NO_INTERFACES = 1817
+RPC_S_CALL_CANCELLED = 1818
+RPC_S_BINDING_INCOMPLETE = 1819
+RPC_S_COMM_FAILURE = 1820
+RPC_S_UNSUPPORTED_AUTHN_LEVEL = 1821
+RPC_S_NO_PRINC_NAME = 1822
+RPC_S_NOT_RPC_ERROR = 1823
+RPC_S_UUID_LOCAL_ONLY = 1824
+RPC_S_SEC_PKG_ERROR = 1825
+RPC_S_NOT_CANCELLED = 1826
+RPC_X_INVALID_ES_ACTION = 1827
+RPC_X_WRONG_ES_VERSION = 1828
+RPC_X_WRONG_STUB_VERSION = 1829
+RPC_X_INVALID_PIPE_OBJECT = 1830
+RPC_X_WRONG_PIPE_ORDER = 1831
+RPC_X_WRONG_PIPE_VERSION = 1832
+RPC_S_GROUP_MEMBER_NOT_FOUND = 1898
+EPT_S_CANT_CREATE = 1899
+RPC_S_INVALID_OBJECT = 1900
+ERROR_INVALID_TIME = 1901
+ERROR_INVALID_FORM_NAME = 1902
+ERROR_INVALID_FORM_SIZE = 1903
+ERROR_ALREADY_WAITING = 1904
+ERROR_PRINTER_DELETED = 1905
+ERROR_INVALID_PRINTER_STATE = 1906
+ERROR_PASSWORD_MUST_CHANGE = 1907
+ERROR_DOMAIN_CONTROLLER_NOT_FOUND = 1908
+ERROR_ACCOUNT_LOCKED_OUT = 1909
+OR_INVALID_OXID = 1910
+OR_INVALID_OID = 1911
+OR_INVALID_SET = 1912
+RPC_S_SEND_INCOMPLETE = 1913
+RPC_S_INVALID_ASYNC_HANDLE = 1914
+RPC_S_INVALID_ASYNC_CALL = 1915
+RPC_X_PIPE_CLOSED = 1916
+RPC_X_PIPE_DISCIPLINE_ERROR = 1917
+RPC_X_PIPE_EMPTY = 1918
+ERROR_NO_SITENAME = 1919
+ERROR_CANT_ACCESS_FILE = 1920
+ERROR_CANT_RESOLVE_FILENAME = 1921
+ERROR_DS_MEMBERSHIP_EVALUATED_LOCALLY = 1922
+ERROR_DS_NO_ATTRIBUTE_OR_VALUE = 1923
+ERROR_DS_INVALID_ATTRIBUTE_SYNTAX = 1924
+ERROR_DS_ATTRIBUTE_TYPE_UNDEFINED = 1925
+ERROR_DS_ATTRIBUTE_OR_VALUE_EXISTS = 1926
+ERROR_DS_BUSY = 1927
+ERROR_DS_UNAVAILABLE = 1928
+ERROR_DS_NO_RIDS_ALLOCATED = 1929
+ERROR_DS_NO_MORE_RIDS = 1930
+ERROR_DS_INCORRECT_ROLE_OWNER = 1931
+ERROR_DS_RIDMGR_INIT_ERROR = 1932
+ERROR_DS_OBJ_CLASS_VIOLATION = 1933
+ERROR_DS_CANT_ON_NON_LEAF = 1934
+ERROR_DS_CANT_ON_RDN = 1935
+ERROR_DS_CANT_MOD_OBJ_CLASS = 1936
+ERROR_DS_CROSS_DOM_MOVE_ERROR = 1937
+ERROR_DS_GC_NOT_AVAILABLE = 1938
+ERROR_NO_BROWSER_SERVERS_FOUND = 6118
+ERROR_INVALID_PIXEL_FORMAT = 2000
+ERROR_BAD_DRIVER = 2001
+ERROR_INVALID_WINDOW_STYLE = 2002
+ERROR_METAFILE_NOT_SUPPORTED = 2003
+ERROR_TRANSFORM_NOT_SUPPORTED = 2004
+ERROR_CLIPPING_NOT_SUPPORTED = 2005
+ERROR_INVALID_CMM = 2300
+ERROR_INVALID_PROFILE = 2301
+ERROR_TAG_NOT_FOUND = 2302
+ERROR_TAG_NOT_PRESENT = 2303
+ERROR_DUPLICATE_TAG = 2304
+ERROR_PROFILE_NOT_ASSOCIATED_WITH_DEVICE = 2305
+ERROR_PROFILE_NOT_FOUND = 2306
+ERROR_INVALID_COLORSPACE = 2307
+ERROR_ICM_NOT_ENABLED = 2308
+ERROR_DELETING_ICM_XFORM = 2309
+ERROR_INVALID_TRANSFORM = 2310
+ERROR_UNKNOWN_PRINT_MONITOR = 3000
+ERROR_PRINTER_DRIVER_IN_USE = 3001
+ERROR_SPOOL_FILE_NOT_FOUND = 3002
+ERROR_SPL_NO_STARTDOC = 3003
+ERROR_SPL_NO_ADDJOB = 3004
+ERROR_PRINT_PROCESSOR_ALREADY_INSTALLED = 3005
+ERROR_PRINT_MONITOR_ALREADY_INSTALLED = 3006
+ERROR_INVALID_PRINT_MONITOR = 3007
+ERROR_PRINT_MONITOR_IN_USE = 3008
+ERROR_PRINTER_HAS_JOBS_QUEUED = 3009
+ERROR_SUCCESS_REBOOT_REQUIRED = 3010
+ERROR_SUCCESS_RESTART_REQUIRED = 3011
+ERROR_WINS_INTERNAL = 4000
+ERROR_CAN_NOT_DEL_LOCAL_WINS = 4001
+ERROR_STATIC_INIT = 4002
+ERROR_INC_BACKUP = 4003
+ERROR_FULL_BACKUP = 4004
+ERROR_REC_NON_EXISTENT = 4005
+ERROR_RPL_NOT_ALLOWED = 4006
+ERROR_DHCP_ADDRESS_CONFLICT = 4100
+ERROR_WMI_GUID_NOT_FOUND = 4200
+ERROR_WMI_INSTANCE_NOT_FOUND = 4201
+ERROR_WMI_ITEMID_NOT_FOUND = 4202
+ERROR_WMI_TRY_AGAIN = 4203
+ERROR_WMI_DP_NOT_FOUND = 4204
+ERROR_WMI_UNRESOLVED_INSTANCE_REF = 4205
+ERROR_WMI_ALREADY_ENABLED = 4206
+ERROR_WMI_GUID_DISCONNECTED = 4207
+ERROR_WMI_SERVER_UNAVAILABLE = 4208
+ERROR_WMI_DP_FAILED = 4209
+ERROR_WMI_INVALID_MOF = 4210
+ERROR_WMI_INVALID_REGINFO = 4211
+ERROR_INVALID_MEDIA = 4300
+ERROR_INVALID_LIBRARY = 4301
+ERROR_INVALID_MEDIA_POOL = 4302
+ERROR_DRIVE_MEDIA_MISMATCH = 4303
+ERROR_MEDIA_OFFLINE = 4304
+ERROR_LIBRARY_OFFLINE = 4305
+ERROR_EMPTY = 4306
+ERROR_NOT_EMPTY = 4307
+ERROR_MEDIA_UNAVAILABLE = 4308
+ERROR_RESOURCE_DISABLED = 4309
+ERROR_INVALID_CLEANER = 4310
+ERROR_UNABLE_TO_CLEAN = 4311
+ERROR_OBJECT_NOT_FOUND = 4312
+ERROR_DATABASE_FAILURE = 4313
+ERROR_DATABASE_FULL = 4314
+ERROR_MEDIA_INCOMPATIBLE = 4315
+ERROR_RESOURCE_NOT_PRESENT = 4316
+ERROR_INVALID_OPERATION = 4317
+ERROR_MEDIA_NOT_AVAILABLE = 4318
+ERROR_DEVICE_NOT_AVAILABLE = 4319
+ERROR_REQUEST_REFUSED = 4320
+ERROR_FILE_OFFLINE = 4350
+ERROR_REMOTE_STORAGE_NOT_ACTIVE = 4351
+ERROR_REMOTE_STORAGE_MEDIA_ERROR = 4352
+ERROR_NOT_A_REPARSE_POINT = 4390
+ERROR_REPARSE_ATTRIBUTE_CONFLICT = 4391
+ERROR_DEPENDENT_RESOURCE_EXISTS = 5001
+ERROR_DEPENDENCY_NOT_FOUND = 5002
+ERROR_DEPENDENCY_ALREADY_EXISTS = 5003
+ERROR_RESOURCE_NOT_ONLINE = 5004
+ERROR_HOST_NODE_NOT_AVAILABLE = 5005
+ERROR_RESOURCE_NOT_AVAILABLE = 5006
+ERROR_RESOURCE_NOT_FOUND = 5007
+ERROR_SHUTDOWN_CLUSTER = 5008
+ERROR_CANT_EVICT_ACTIVE_NODE = 5009
+ERROR_OBJECT_ALREADY_EXISTS = 5010
+ERROR_OBJECT_IN_LIST = 5011
+ERROR_GROUP_NOT_AVAILABLE = 5012
+ERROR_GROUP_NOT_FOUND = 5013
+ERROR_GROUP_NOT_ONLINE = 5014
+ERROR_HOST_NODE_NOT_RESOURCE_OWNER = 5015
+ERROR_HOST_NODE_NOT_GROUP_OWNER = 5016
+ERROR_RESMON_CREATE_FAILED = 5017
+ERROR_RESMON_ONLINE_FAILED = 5018
+ERROR_RESOURCE_ONLINE = 5019
+ERROR_QUORUM_RESOURCE = 5020
+ERROR_NOT_QUORUM_CAPABLE = 5021
+ERROR_CLUSTER_SHUTTING_DOWN = 5022
+ERROR_INVALID_STATE = 5023
+ERROR_RESOURCE_PROPERTIES_STORED = 5024
+ERROR_NOT_QUORUM_CLASS = 5025
+ERROR_CORE_RESOURCE = 5026
+ERROR_QUORUM_RESOURCE_ONLINE_FAILED = 5027
+ERROR_QUORUMLOG_OPEN_FAILED = 5028
+ERROR_CLUSTERLOG_CORRUPT = 5029
+ERROR_CLUSTERLOG_RECORD_EXCEEDS_MAXSIZE = 5030
+ERROR_CLUSTERLOG_EXCEEDS_MAXSIZE = 5031
+ERROR_CLUSTERLOG_CHKPOINT_NOT_FOUND = 5032
+ERROR_CLUSTERLOG_NOT_ENOUGH_SPACE = 5033
+ERROR_ENCRYPTION_FAILED = 6000
+ERROR_DECRYPTION_FAILED = 6001
+ERROR_FILE_ENCRYPTED = 6002
+ERROR_NO_RECOVERY_POLICY = 6003
+ERROR_NO_EFS = 6004
+ERROR_WRONG_EFS = 6005
+ERROR_NO_USER_KEYS = 6006
+ERROR_FILE_NOT_ENCRYPTED = 6007
+ERROR_NOT_EXPORT_FORMAT = 6008
+SEVERITY_SUCCESS = 0
+SEVERITY_ERROR = 1
+
+def HRESULT_FROM_WIN32(scode): return -2147024896 | (scode & 65535)
+
+def SUCCEEDED(Status): return ((Status) >= 0)
+
+def FAILED(Status): return (Status<0)
+
+def HRESULT_CODE(hr): return ((hr) & 65535)
+
+def SCODE_CODE(sc): return ((sc) & 65535)
+
+def HRESULT_FACILITY(hr): return (((hr) >> 16) & 8191)
+
+def SCODE_FACILITY(sc): return (((sc) >> 16) & 8191)
+
+def HRESULT_SEVERITY(hr): return (((hr) >> 31) & 1)
+
+def SCODE_SEVERITY(sc): return (((sc) >> 31) & 1)
+
+FACILITY_NT_BIT = 268435456
+def HRESULT_FROM_NT(x): return x | FACILITY_NT_BIT
+
+def GetScode(hr): return hr
+
+def ResultFromScode(sc): return sc
+
+NOERROR = 0
+E_UNEXPECTED = -2147418113
+E_NOTIMPL = -2147467263
+E_OUTOFMEMORY = -2147024882
+E_INVALIDARG = -2147024809
+E_NOINTERFACE = -2147467262
+E_POINTER = -2147467261
+E_HANDLE = -2147024890
+E_ABORT = -2147467260
+E_FAIL = -2147467259
+E_ACCESSDENIED = -2147024891
+win16_E_NOTIMPL = -2147483647
+win16_E_OUTOFMEMORY = -2147483646
+win16_E_INVALIDARG = -2147483645
+win16_E_NOINTERFACE = -2147483644
+win16_E_POINTER = -2147483643
+win16_E_HANDLE = -2147483642
+win16_E_ABORT = -2147483641
+win16_E_FAIL = -2147483640
+win16_E_ACCESSDENIED = -2147483639
+E_PENDING = -2147483638
+CO_E_INIT_TLS = -2147467258
+CO_E_INIT_SHARED_ALLOCATOR = -2147467257
+CO_E_INIT_MEMORY_ALLOCATOR = -2147467256
+CO_E_INIT_CLASS_CACHE = -2147467255
+CO_E_INIT_RPC_CHANNEL = -2147467254
+CO_E_INIT_TLS_SET_CHANNEL_CONTROL = -2147467253
+CO_E_INIT_TLS_CHANNEL_CONTROL = -2147467252
+CO_E_INIT_UNACCEPTED_USER_ALLOCATOR = -2147467251
+CO_E_INIT_SCM_MUTEX_EXISTS = -2147467250
+CO_E_INIT_SCM_FILE_MAPPING_EXISTS = -2147467249
+CO_E_INIT_SCM_MAP_VIEW_OF_FILE = -2147467248
+CO_E_INIT_SCM_EXEC_FAILURE = -2147467247
+CO_E_INIT_ONLY_SINGLE_THREADED = -2147467246
+CO_E_CANT_REMOTE = -2147467245
+CO_E_BAD_SERVER_NAME = -2147467244
+CO_E_WRONG_SERVER_IDENTITY = -2147467243
+CO_E_OLE1DDE_DISABLED = -2147467242
+CO_E_RUNAS_SYNTAX = -2147467241
+CO_E_CREATEPROCESS_FAILURE = -2147467240
+CO_E_RUNAS_CREATEPROCESS_FAILURE = -2147467239
+CO_E_RUNAS_LOGON_FAILURE = -2147467238
+CO_E_LAUNCH_PERMSSION_DENIED = -2147467237
+CO_E_START_SERVICE_FAILURE = -2147467236
+CO_E_REMOTE_COMMUNICATION_FAILURE = -2147467235
+CO_E_SERVER_START_TIMEOUT = -2147467234
+CO_E_CLSREG_INCONSISTENT = -2147467233
+CO_E_IIDREG_INCONSISTENT = -2147467232
+CO_E_NOT_SUPPORTED = -2147467231
+CO_E_RELOAD_DLL = -2147467230
+CO_E_MSI_ERROR = -2147467229
+
+OLE_E_FIRST = -2147221504
+OLE_E_LAST = -2147221249
+OLE_S_FIRST = 262144
+OLE_S_LAST = 262399
+
+OLE_E_OLEVERB = -2147221504
+OLE_E_ADVF = -2147221503
+OLE_E_ENUM_NOMORE = -2147221502
+OLE_E_ADVISENOTSUPPORTED = -2147221501
+OLE_E_NOCONNECTION = -2147221500
+OLE_E_NOTRUNNING = -2147221499
+OLE_E_NOCACHE = -2147221498
+OLE_E_BLANK = -2147221497
+OLE_E_CLASSDIFF = -2147221496
+OLE_E_CANT_GETMONIKER = -2147221495
+OLE_E_CANT_BINDTOSOURCE = -2147221494
+OLE_E_STATIC = -2147221493
+OLE_E_PROMPTSAVECANCELLED = -2147221492
+OLE_E_INVALIDRECT = -2147221491
+OLE_E_WRONGCOMPOBJ = -2147221490
+OLE_E_INVALIDHWND = -2147221489
+OLE_E_NOT_INPLACEACTIVE = -2147221488
+OLE_E_CANTCONVERT = -2147221487
+OLE_E_NOSTORAGE = -2147221486
+DV_E_FORMATETC = -2147221404
+DV_E_DVTARGETDEVICE = -2147221403
+DV_E_STGMEDIUM = -2147221402
+DV_E_STATDATA = -2147221401
+DV_E_LINDEX = -2147221400
+DV_E_TYMED = -2147221399
+DV_E_CLIPFORMAT = -2147221398
+DV_E_DVASPECT = -2147221397
+DV_E_DVTARGETDEVICE_SIZE = -2147221396
+DV_E_NOIVIEWOBJECT = -2147221395
+DRAGDROP_E_FIRST = -2147221248
+DRAGDROP_E_LAST = -2147221233
+DRAGDROP_S_FIRST = 262400
+DRAGDROP_S_LAST = 262415
+DRAGDROP_E_NOTREGISTERED = -2147221248
+DRAGDROP_E_ALREADYREGISTERED = -2147221247
+DRAGDROP_E_INVALIDHWND = -2147221246
+CLASSFACTORY_E_FIRST = -2147221232
+CLASSFACTORY_E_LAST = -2147221217
+CLASSFACTORY_S_FIRST = 262416
+CLASSFACTORY_S_LAST = 262431
+CLASS_E_NOAGGREGATION = -2147221232
+CLASS_E_CLASSNOTAVAILABLE = -2147221231
+CLASS_E_NOTLICENSED = -2147221230
+MARSHAL_E_FIRST = -2147221216
+MARSHAL_E_LAST = -2147221201
+MARSHAL_S_FIRST = 262432
+MARSHAL_S_LAST = 262447
+DATA_E_FIRST = -2147221200
+DATA_E_LAST = -2147221185
+DATA_S_FIRST = 262448
+DATA_S_LAST = 262463
+VIEW_E_FIRST = -2147221184
+VIEW_E_LAST = -2147221169
+VIEW_S_FIRST = 262464
+VIEW_S_LAST = 262479
+VIEW_E_DRAW = -2147221184
+REGDB_E_FIRST = -2147221168
+REGDB_E_LAST = -2147221153
+REGDB_S_FIRST = 262480
+REGDB_S_LAST = 262495
+REGDB_E_READREGDB = -2147221168
+REGDB_E_WRITEREGDB = -2147221167
+REGDB_E_KEYMISSING = -2147221166
+REGDB_E_INVALIDVALUE = -2147221165
+REGDB_E_CLASSNOTREG = -2147221164
+REGDB_E_IIDNOTREG = -2147221163
+CAT_E_FIRST = -2147221152
+CAT_E_LAST = -2147221151
+CAT_E_CATIDNOEXIST = -2147221152
+CAT_E_NODESCRIPTION = -2147221151
+CS_E_FIRST = -2147221148
+CS_E_LAST = -2147221144
+CS_E_PACKAGE_NOTFOUND = -2147221148
+CS_E_NOT_DELETABLE = -2147221147
+CS_E_CLASS_NOTFOUND = -2147221146
+CS_E_INVALID_VERSION = -2147221145
+CS_E_NO_CLASSSTORE = -2147221144
+CACHE_E_FIRST = -2147221136
+CACHE_E_LAST = -2147221121
+CACHE_S_FIRST = 262512
+CACHE_S_LAST = 262527
+CACHE_E_NOCACHE_UPDATED = -2147221136
+OLEOBJ_E_FIRST = -2147221120
+OLEOBJ_E_LAST = -2147221105
+OLEOBJ_S_FIRST = 262528
+OLEOBJ_S_LAST = 262543
+OLEOBJ_E_NOVERBS = -2147221120
+OLEOBJ_E_INVALIDVERB = -2147221119
+CLIENTSITE_E_FIRST = -2147221104
+CLIENTSITE_E_LAST = -2147221089
+CLIENTSITE_S_FIRST = 262544
+CLIENTSITE_S_LAST = 262559
+INPLACE_E_NOTUNDOABLE = -2147221088
+INPLACE_E_NOTOOLSPACE = -2147221087
+INPLACE_E_FIRST = -2147221088
+INPLACE_E_LAST = -2147221073
+INPLACE_S_FIRST = 262560
+INPLACE_S_LAST = 262575
+ENUM_E_FIRST = -2147221072
+ENUM_E_LAST = -2147221057
+ENUM_S_FIRST = 262576
+ENUM_S_LAST = 262591
+CONVERT10_E_FIRST = -2147221056
+CONVERT10_E_LAST = -2147221041
+CONVERT10_S_FIRST = 262592
+CONVERT10_S_LAST = 262607
+CONVERT10_E_OLESTREAM_GET = -2147221056
+CONVERT10_E_OLESTREAM_PUT = -2147221055
+CONVERT10_E_OLESTREAM_FMT = -2147221054
+CONVERT10_E_OLESTREAM_BITMAP_TO_DIB = -2147221053
+CONVERT10_E_STG_FMT = -2147221052
+CONVERT10_E_STG_NO_STD_STREAM = -2147221051
+CONVERT10_E_STG_DIB_TO_BITMAP = -2147221050
+CLIPBRD_E_FIRST = -2147221040
+CLIPBRD_E_LAST = -2147221025
+CLIPBRD_S_FIRST = 262608
+CLIPBRD_S_LAST = 262623
+CLIPBRD_E_CANT_OPEN = -2147221040
+CLIPBRD_E_CANT_EMPTY = -2147221039
+CLIPBRD_E_CANT_SET = -2147221038
+CLIPBRD_E_BAD_DATA = -2147221037
+CLIPBRD_E_CANT_CLOSE = -2147221036
+MK_E_FIRST = -2147221024
+MK_E_LAST = -2147221009
+MK_S_FIRST = 262624
+MK_S_LAST = 262639
+MK_E_CONNECTMANUALLY = -2147221024
+MK_E_EXCEEDEDDEADLINE = -2147221023
+MK_E_NEEDGENERIC = -2147221022
+MK_E_UNAVAILABLE = -2147221021
+MK_E_SYNTAX = -2147221020
+MK_E_NOOBJECT = -2147221019
+MK_E_INVALIDEXTENSION = -2147221018
+MK_E_INTERMEDIATEINTERFACENOTSUPPORTED = -2147221017
+MK_E_NOTBINDABLE = -2147221016
+MK_E_NOTBOUND = -2147221015
+MK_E_CANTOPENFILE = -2147221014
+MK_E_MUSTBOTHERUSER = -2147221013
+MK_E_NOINVERSE = -2147221012
+MK_E_NOSTORAGE = -2147221011
+MK_E_NOPREFIX = -2147221010
+MK_E_ENUMERATION_FAILED = -2147221009
+CO_E_FIRST = -2147221008
+CO_E_LAST = -2147220993
+CO_S_FIRST = 262640
+CO_S_LAST = 262655
+CO_E_NOTINITIALIZED = -2147221008
+CO_E_ALREADYINITIALIZED = -2147221007
+CO_E_CANTDETERMINECLASS = -2147221006
+CO_E_CLASSSTRING = -2147221005
+CO_E_IIDSTRING = -2147221004
+CO_E_APPNOTFOUND = -2147221003
+CO_E_APPSINGLEUSE = -2147221002
+CO_E_ERRORINAPP = -2147221001
+CO_E_DLLNOTFOUND = -2147221000
+CO_E_ERRORINDLL = -2147220999
+CO_E_WRONGOSFORAPP = -2147220998
+CO_E_OBJNOTREG = -2147220997
+CO_E_OBJISREG = -2147220996
+CO_E_OBJNOTCONNECTED = -2147220995
+CO_E_APPDIDNTREG = -2147220994
+CO_E_RELEASED = -2147220993
+CO_E_FAILEDTOIMPERSONATE = -2147220992
+CO_E_FAILEDTOGETSECCTX = -2147220991
+CO_E_FAILEDTOOPENTHREADTOKEN = -2147220990
+CO_E_FAILEDTOGETTOKENINFO = -2147220989
+CO_E_TRUSTEEDOESNTMATCHCLIENT = -2147220988
+CO_E_FAILEDTOQUERYCLIENTBLANKET = -2147220987
+CO_E_FAILEDTOSETDACL = -2147220986
+CO_E_ACCESSCHECKFAILED = -2147220985
+CO_E_NETACCESSAPIFAILED = -2147220984
+CO_E_WRONGTRUSTEENAMESYNTAX = -2147220983
+CO_E_INVALIDSID = -2147220982
+CO_E_CONVERSIONFAILED = -2147220981
+CO_E_NOMATCHINGSIDFOUND = -2147220980
+CO_E_LOOKUPACCSIDFAILED = -2147220979
+CO_E_NOMATCHINGNAMEFOUND = -2147220978
+CO_E_LOOKUPACCNAMEFAILED = -2147220977
+CO_E_SETSERLHNDLFAILED = -2147220976
+CO_E_FAILEDTOGETWINDIR = -2147220975
+CO_E_PATHTOOLONG = -2147220974
+CO_E_FAILEDTOGENUUID = -2147220973
+CO_E_FAILEDTOCREATEFILE = -2147220972
+CO_E_FAILEDTOCLOSEHANDLE = -2147220971
+CO_E_EXCEEDSYSACLLIMIT = -2147220970
+CO_E_ACESINWRONGORDER = -2147220969
+CO_E_INCOMPATIBLESTREAMVERSION = -2147220968
+CO_E_FAILEDTOOPENPROCESSTOKEN = -2147220967
+CO_E_DECODEFAILED = -2147220966
+CO_E_ACNOTINITIALIZED = -2147220965
+OLE_S_USEREG = 262144
+OLE_S_STATIC = 262145
+OLE_S_MAC_CLIPFORMAT = 262146
+DRAGDROP_S_DROP = 262400
+DRAGDROP_S_CANCEL = 262401
+DRAGDROP_S_USEDEFAULTCURSORS = 262402
+DATA_S_SAMEFORMATETC = 262448
+VIEW_S_ALREADY_FROZEN = 262464
+CACHE_S_FORMATETC_NOTSUPPORTED = 262512
+CACHE_S_SAMECACHE = 262513
+CACHE_S_SOMECACHES_NOTUPDATED = 262514
+OLEOBJ_S_INVALIDVERB = 262528
+OLEOBJ_S_CANNOT_DOVERB_NOW = 262529
+OLEOBJ_S_INVALIDHWND = 262530
+INPLACE_S_TRUNCATED = 262560
+CONVERT10_S_NO_PRESENTATION = 262592
+MK_S_REDUCED_TO_SELF = 262626
+MK_S_ME = 262628
+MK_S_HIM = 262629
+MK_S_US = 262630
+MK_S_MONIKERALREADYREGISTERED = 262631
+CO_E_CLASS_CREATE_FAILED = -2146959359
+CO_E_SCM_ERROR = -2146959358
+CO_E_SCM_RPC_FAILURE = -2146959357
+CO_E_BAD_PATH = -2146959356
+CO_E_SERVER_EXEC_FAILURE = -2146959355
+CO_E_OBJSRV_RPC_FAILURE = -2146959354
+MK_E_NO_NORMALIZED = -2146959353
+CO_E_SERVER_STOPPING = -2146959352
+MEM_E_INVALID_ROOT = -2146959351
+MEM_E_INVALID_LINK = -2146959344
+MEM_E_INVALID_SIZE = -2146959343
+CO_S_NOTALLINTERFACES = 524306
+DISP_E_UNKNOWNINTERFACE = -2147352575
+DISP_E_MEMBERNOTFOUND = -2147352573
+DISP_E_PARAMNOTFOUND = -2147352572
+DISP_E_TYPEMISMATCH = -2147352571
+DISP_E_UNKNOWNNAME = -2147352570
+DISP_E_NONAMEDARGS = -2147352569
+DISP_E_BADVARTYPE = -2147352568
+DISP_E_EXCEPTION = -2147352567
+DISP_E_OVERFLOW = -2147352566
+DISP_E_BADINDEX = -2147352565
+DISP_E_UNKNOWNLCID = -2147352564
+DISP_E_ARRAYISLOCKED = -2147352563
+DISP_E_BADPARAMCOUNT = -2147352562
+DISP_E_PARAMNOTOPTIONAL = -2147352561
+DISP_E_BADCALLEE = -2147352560
+DISP_E_NOTACOLLECTION = -2147352559
+DISP_E_DIVBYZERO = -2147352558
+TYPE_E_BUFFERTOOSMALL = -2147319786
+TYPE_E_FIELDNOTFOUND = -2147319785
+TYPE_E_INVDATAREAD = -2147319784
+TYPE_E_UNSUPFORMAT = -2147319783
+TYPE_E_REGISTRYACCESS = -2147319780
+TYPE_E_LIBNOTREGISTERED = -2147319779
+TYPE_E_UNDEFINEDTYPE = -2147319769
+TYPE_E_QUALIFIEDNAMEDISALLOWED = -2147319768
+TYPE_E_INVALIDSTATE = -2147319767
+TYPE_E_WRONGTYPEKIND = -2147319766
+TYPE_E_ELEMENTNOTFOUND = -2147319765
+TYPE_E_AMBIGUOUSNAME = -2147319764
+TYPE_E_NAMECONFLICT = -2147319763
+TYPE_E_UNKNOWNLCID = -2147319762
+TYPE_E_DLLFUNCTIONNOTFOUND = -2147319761
+TYPE_E_BADMODULEKIND = -2147317571
+TYPE_E_SIZETOOBIG = -2147317563
+TYPE_E_DUPLICATEID = -2147317562
+TYPE_E_INVALIDID = -2147317553
+TYPE_E_TYPEMISMATCH = -2147316576
+TYPE_E_OUTOFBOUNDS = -2147316575
+TYPE_E_IOERROR = -2147316574
+TYPE_E_CANTCREATETMPFILE = -2147316573
+TYPE_E_CANTLOADLIBRARY = -2147312566
+TYPE_E_INCONSISTENTPROPFUNCS = -2147312509
+TYPE_E_CIRCULARTYPE = -2147312508
+STG_E_INVALIDFUNCTION = -2147287039
+STG_E_FILENOTFOUND = -2147287038
+STG_E_PATHNOTFOUND = -2147287037
+STG_E_TOOMANYOPENFILES = -2147287036
+STG_E_ACCESSDENIED = -2147287035
+STG_E_INVALIDHANDLE = -2147287034
+STG_E_INSUFFICIENTMEMORY = -2147287032
+STG_E_INVALIDPOINTER = -2147287031
+STG_E_NOMOREFILES = -2147287022
+STG_E_DISKISWRITEPROTECTED = -2147287021
+STG_E_SEEKERROR = -2147287015
+STG_E_WRITEFAULT = -2147287011
+STG_E_READFAULT = -2147287010
+STG_E_SHAREVIOLATION = -2147287008
+STG_E_LOCKVIOLATION = -2147287007
+STG_E_FILEALREADYEXISTS = -2147286960
+STG_E_INVALIDPARAMETER = -2147286953
+STG_E_MEDIUMFULL = -2147286928
+STG_E_PROPSETMISMATCHED = -2147286800
+STG_E_ABNORMALAPIEXIT = -2147286790
+STG_E_INVALIDHEADER = -2147286789
+STG_E_INVALIDNAME = -2147286788
+STG_E_UNKNOWN = -2147286787
+STG_E_UNIMPLEMENTEDFUNCTION = -2147286786
+STG_E_INVALIDFLAG = -2147286785
+STG_E_INUSE = -2147286784
+STG_E_NOTCURRENT = -2147286783
+STG_E_REVERTED = -2147286782
+STG_E_CANTSAVE = -2147286781
+STG_E_OLDFORMAT = -2147286780
+STG_E_OLDDLL = -2147286779
+STG_E_SHAREREQUIRED = -2147286778
+STG_E_NOTFILEBASEDSTORAGE = -2147286777
+STG_E_EXTANTMARSHALLINGS = -2147286776
+STG_E_DOCFILECORRUPT = -2147286775
+STG_E_BADBASEADDRESS = -2147286768
+STG_E_INCOMPLETE = -2147286527
+STG_E_TERMINATED = -2147286526
+STG_S_CONVERTED = 197120
+STG_S_BLOCK = 197121
+STG_S_RETRYNOW = 197122
+STG_S_MONITORING = 197123
+STG_S_MULTIPLEOPENS = 197124
+STG_S_CONSOLIDATIONFAILED = 197125
+STG_S_CANNOTCONSOLIDATE = 197126
+RPC_E_CALL_REJECTED = -2147418111
+RPC_E_CALL_CANCELED = -2147418110
+RPC_E_CANTPOST_INSENDCALL = -2147418109
+RPC_E_CANTCALLOUT_INASYNCCALL = -2147418108
+RPC_E_CANTCALLOUT_INEXTERNALCALL = -2147418107
+RPC_E_CONNECTION_TERMINATED = -2147418106
+RPC_E_SERVER_DIED = -2147418105
+RPC_E_CLIENT_DIED = -2147418104
+RPC_E_INVALID_DATAPACKET = -2147418103
+RPC_E_CANTTRANSMIT_CALL = -2147418102
+RPC_E_CLIENT_CANTMARSHAL_DATA = -2147418101
+RPC_E_CLIENT_CANTUNMARSHAL_DATA = -2147418100
+RPC_E_SERVER_CANTMARSHAL_DATA = -2147418099
+RPC_E_SERVER_CANTUNMARSHAL_DATA = -2147418098
+RPC_E_INVALID_DATA = -2147418097
+RPC_E_INVALID_PARAMETER = -2147418096
+RPC_E_CANTCALLOUT_AGAIN = -2147418095
+RPC_E_SERVER_DIED_DNE = -2147418094
+RPC_E_SYS_CALL_FAILED = -2147417856
+RPC_E_OUT_OF_RESOURCES = -2147417855
+RPC_E_ATTEMPTED_MULTITHREAD = -2147417854
+RPC_E_NOT_REGISTERED = -2147417853
+RPC_E_FAULT = -2147417852
+RPC_E_SERVERFAULT = -2147417851
+RPC_E_CHANGED_MODE = -2147417850
+RPC_E_INVALIDMETHOD = -2147417849
+RPC_E_DISCONNECTED = -2147417848
+RPC_E_RETRY = -2147417847
+RPC_E_SERVERCALL_RETRYLATER = -2147417846
+RPC_E_SERVERCALL_REJECTED = -2147417845
+RPC_E_INVALID_CALLDATA = -2147417844
+RPC_E_CANTCALLOUT_ININPUTSYNCCALL = -2147417843
+RPC_E_WRONG_THREAD = -2147417842
+RPC_E_THREAD_NOT_INIT = -2147417841
+RPC_E_VERSION_MISMATCH = -2147417840
+RPC_E_INVALID_HEADER = -2147417839
+RPC_E_INVALID_EXTENSION = -2147417838
+RPC_E_INVALID_IPID = -2147417837
+RPC_E_INVALID_OBJECT = -2147417836
+RPC_S_CALLPENDING = -2147417835
+RPC_S_WAITONTIMER = -2147417834
+RPC_E_CALL_COMPLETE = -2147417833
+RPC_E_UNSECURE_CALL = -2147417832
+RPC_E_TOO_LATE = -2147417831
+RPC_E_NO_GOOD_SECURITY_PACKAGES = -2147417830
+RPC_E_ACCESS_DENIED = -2147417829
+RPC_E_REMOTE_DISABLED = -2147417828
+RPC_E_INVALID_OBJREF = -2147417827
+RPC_E_NO_CONTEXT = -2147417826
+RPC_E_TIMEOUT = -2147417825
+RPC_E_NO_SYNC = -2147417824
+RPC_E_UNEXPECTED = -2147352577
+NTE_BAD_UID = -2146893823
+NTE_BAD_HASH = -2146893822
+NTE_BAD_KEY = -2146893821
+NTE_BAD_LEN = -2146893820
+NTE_BAD_DATA = -2146893819
+NTE_BAD_SIGNATURE = -2146893818
+NTE_BAD_VER = -2146893817
+NTE_BAD_ALGID = -2146893816
+NTE_BAD_FLAGS = -2146893815
+NTE_BAD_TYPE = -2146893814
+NTE_BAD_KEY_STATE = -2146893813
+NTE_BAD_HASH_STATE = -2146893812
+NTE_NO_KEY = -2146893811
+NTE_NO_MEMORY = -2146893810
+NTE_EXISTS = -2146893809
+NTE_PERM = -2146893808
+NTE_NOT_FOUND = -2146893807
+NTE_DOUBLE_ENCRYPT = -2146893806
+NTE_BAD_PROVIDER = -2146893805
+NTE_BAD_PROV_TYPE = -2146893804
+NTE_BAD_PUBLIC_KEY = -2146893803
+NTE_BAD_KEYSET = -2146893802
+NTE_PROV_TYPE_NOT_DEF = -2146893801
+NTE_PROV_TYPE_ENTRY_BAD = -2146893800
+NTE_KEYSET_NOT_DEF = -2146893799
+NTE_KEYSET_ENTRY_BAD = -2146893798
+NTE_PROV_TYPE_NO_MATCH = -2146893797
+NTE_SIGNATURE_FILE_BAD = -2146893796
+NTE_PROVIDER_DLL_FAIL = -2146893795
+NTE_PROV_DLL_NOT_FOUND = -2146893794
+NTE_BAD_KEYSET_PARAM = -2146893793
+NTE_FAIL = -2146893792
+NTE_SYS_ERR = -2146893791
+CRYPT_E_MSG_ERROR = -2146889727
+CRYPT_E_UNKNOWN_ALGO = -2146889726
+CRYPT_E_OID_FORMAT = -2146889725
+CRYPT_E_INVALID_MSG_TYPE = -2146889724
+CRYPT_E_UNEXPECTED_ENCODING = -2146889723
+CRYPT_E_AUTH_ATTR_MISSING = -2146889722
+CRYPT_E_HASH_VALUE = -2146889721
+CRYPT_E_INVALID_INDEX = -2146889720
+CRYPT_E_ALREADY_DECRYPTED = -2146889719
+CRYPT_E_NOT_DECRYPTED = -2146889718
+CRYPT_E_RECIPIENT_NOT_FOUND = -2146889717
+CRYPT_E_CONTROL_TYPE = -2146889716
+CRYPT_E_ISSUER_SERIALNUMBER = -2146889715
+CRYPT_E_SIGNER_NOT_FOUND = -2146889714
+CRYPT_E_ATTRIBUTES_MISSING = -2146889713
+CRYPT_E_STREAM_MSG_NOT_READY = -2146889712
+CRYPT_E_STREAM_INSUFFICIENT_DATA = -2146889711
+CRYPT_E_BAD_LEN = -2146885631
+CRYPT_E_BAD_ENCODE = -2146885630
+CRYPT_E_FILE_ERROR = -2146885629
+CRYPT_E_NOT_FOUND = -2146885628
+CRYPT_E_EXISTS = -2146885627
+CRYPT_E_NO_PROVIDER = -2146885626
+CRYPT_E_SELF_SIGNED = -2146885625
+CRYPT_E_DELETED_PREV = -2146885624
+CRYPT_E_NO_MATCH = -2146885623
+CRYPT_E_UNEXPECTED_MSG_TYPE = -2146885622
+CRYPT_E_NO_KEY_PROPERTY = -2146885621
+CRYPT_E_NO_DECRYPT_CERT = -2146885620
+CRYPT_E_BAD_MSG = -2146885619
+CRYPT_E_NO_SIGNER = -2146885618
+CRYPT_E_PENDING_CLOSE = -2146885617
+CRYPT_E_REVOKED = -2146885616
+CRYPT_E_NO_REVOCATION_DLL = -2146885615
+CRYPT_E_NO_REVOCATION_CHECK = -2146885614
+CRYPT_E_REVOCATION_OFFLINE = -2146885613
+CRYPT_E_NOT_IN_REVOCATION_DATABASE = -2146885612
+CRYPT_E_INVALID_NUMERIC_STRING = -2146885600
+CRYPT_E_INVALID_PRINTABLE_STRING = -2146885599
+CRYPT_E_INVALID_IA5_STRING = -2146885598
+CRYPT_E_INVALID_X500_STRING = -2146885597
+CRYPT_E_NOT_CHAR_STRING = -2146885596
+CRYPT_E_FILERESIZED = -2146885595
+CRYPT_E_SECURITY_SETTINGS = -2146885594
+CRYPT_E_NO_VERIFY_USAGE_DLL = -2146885593
+CRYPT_E_NO_VERIFY_USAGE_CHECK = -2146885592
+CRYPT_E_VERIFY_USAGE_OFFLINE = -2146885591
+CRYPT_E_NOT_IN_CTL = -2146885590
+CRYPT_E_NO_TRUSTED_SIGNER = -2146885589
+CRYPT_E_OSS_ERROR = -2146881536
+CERTSRV_E_BAD_REQUESTSUBJECT = -2146877439
+CERTSRV_E_NO_REQUEST = -2146877438
+CERTSRV_E_BAD_REQUESTSTATUS = -2146877437
+CERTSRV_E_PROPERTY_EMPTY = -2146877436
+CERTDB_E_JET_ERROR = -2146873344
+TRUST_E_SYSTEM_ERROR = -2146869247
+TRUST_E_NO_SIGNER_CERT = -2146869246
+TRUST_E_COUNTER_SIGNER = -2146869245
+TRUST_E_CERT_SIGNATURE = -2146869244
+TRUST_E_TIME_STAMP = -2146869243
+TRUST_E_BAD_DIGEST = -2146869232
+TRUST_E_BASIC_CONSTRAINTS = -2146869223
+TRUST_E_FINANCIAL_CRITERIA = -2146869218
+NTE_OP_OK = 0
+TRUST_E_PROVIDER_UNKNOWN = -2146762751
+TRUST_E_ACTION_UNKNOWN = -2146762750
+TRUST_E_SUBJECT_FORM_UNKNOWN = -2146762749
+TRUST_E_SUBJECT_NOT_TRUSTED = -2146762748
+DIGSIG_E_ENCODE = -2146762747
+DIGSIG_E_DECODE = -2146762746
+DIGSIG_E_EXTENSIBILITY = -2146762745
+DIGSIG_E_CRYPTO = -2146762744
+PERSIST_E_SIZEDEFINITE = -2146762743
+PERSIST_E_SIZEINDEFINITE = -2146762742
+PERSIST_E_NOTSELFSIZING = -2146762741
+TRUST_E_NOSIGNATURE = -2146762496
+CERT_E_EXPIRED = -2146762495
+CERT_E_VALIDITYPERIODNESTING = -2146762494
+CERT_E_ROLE = -2146762493
+CERT_E_PATHLENCONST = -2146762492
+CERT_E_CRITICAL = -2146762491
+CERT_E_PURPOSE = -2146762490
+CERT_E_ISSUERCHAINING = -2146762489
+CERT_E_MALFORMED = -2146762488
+CERT_E_UNTRUSTEDROOT = -2146762487
+CERT_E_CHAINING = -2146762486
+TRUST_E_FAIL = -2146762485
+CERT_E_REVOKED = -2146762484
+CERT_E_UNTRUSTEDTESTROOT = -2146762483
+CERT_E_REVOCATION_FAILURE = -2146762482
+CERT_E_CN_NO_MATCH = -2146762481
+CERT_E_WRONG_USAGE = -2146762480
+SPAPI_E_EXPECTED_SECTION_NAME = -2146500608
+SPAPI_E_BAD_SECTION_NAME_LINE = -2146500607
+SPAPI_E_SECTION_NAME_TOO_LONG = -2146500606
+SPAPI_E_GENERAL_SYNTAX = -2146500605
+SPAPI_E_WRONG_INF_STYLE = -2146500352
+SPAPI_E_SECTION_NOT_FOUND = -2146500351
+SPAPI_E_LINE_NOT_FOUND = -2146500350
+SPAPI_E_NO_ASSOCIATED_CLASS = -2146500096
+SPAPI_E_CLASS_MISMATCH = -2146500095
+SPAPI_E_DUPLICATE_FOUND = -2146500094
+SPAPI_E_NO_DRIVER_SELECTED = -2146500093
+SPAPI_E_KEY_DOES_NOT_EXIST = -2146500092
+SPAPI_E_INVALID_DEVINST_NAME = -2146500091
+SPAPI_E_INVALID_CLASS = -2146500090
+SPAPI_E_DEVINST_ALREADY_EXISTS = -2146500089
+SPAPI_E_DEVINFO_NOT_REGISTERED = -2146500088
+SPAPI_E_INVALID_REG_PROPERTY = -2146500087
+SPAPI_E_NO_INF = -2146500086
+SPAPI_E_NO_SUCH_DEVINST = -2146500085
+SPAPI_E_CANT_LOAD_CLASS_ICON = -2146500084
+SPAPI_E_INVALID_CLASS_INSTALLER = -2146500083
+SPAPI_E_DI_DO_DEFAULT = -2146500082
+SPAPI_E_DI_NOFILECOPY = -2146500081
+SPAPI_E_INVALID_HWPROFILE = -2146500080
+SPAPI_E_NO_DEVICE_SELECTED = -2146500079
+SPAPI_E_DEVINFO_LIST_LOCKED = -2146500078
+SPAPI_E_DEVINFO_DATA_LOCKED = -2146500077
+SPAPI_E_DI_BAD_PATH = -2146500076
+SPAPI_E_NO_CLASSINSTALL_PARAMS = -2146500075
+SPAPI_E_FILEQUEUE_LOCKED = -2146500074
+SPAPI_E_BAD_SERVICE_INSTALLSECT = -2146500073
+SPAPI_E_NO_CLASS_DRIVER_LIST = -2146500072
+SPAPI_E_NO_ASSOCIATED_SERVICE = -2146500071
+SPAPI_E_NO_DEFAULT_DEVICE_INTERFACE = -2146500070
+SPAPI_E_DEVICE_INTERFACE_ACTIVE = -2146500069
+SPAPI_E_DEVICE_INTERFACE_REMOVED = -2146500068
+SPAPI_E_BAD_INTERFACE_INSTALLSECT = -2146500067
+SPAPI_E_NO_SUCH_INTERFACE_CLASS = -2146500066
+SPAPI_E_INVALID_REFERENCE_STRING = -2146500065
+SPAPI_E_INVALID_MACHINENAME = -2146500064
+SPAPI_E_REMOTE_COMM_FAILURE = -2146500063
+SPAPI_E_MACHINE_UNAVAILABLE = -2146500062
+SPAPI_E_NO_CONFIGMGR_SERVICES = -2146500061
+SPAPI_E_INVALID_PROPPAGE_PROVIDER = -2146500060
+SPAPI_E_NO_SUCH_DEVICE_INTERFACE = -2146500059
+SPAPI_E_DI_POSTPROCESSING_REQUIRED = -2146500058
+SPAPI_E_INVALID_COINSTALLER = -2146500057
+SPAPI_E_NO_COMPAT_DRIVERS = -2146500056
+SPAPI_E_NO_DEVICE_ICON = -2146500055
+SPAPI_E_INVALID_INF_LOGCONFIG = -2146500054
+SPAPI_E_DI_DONT_INSTALL = -2146500053
+SPAPI_E_INVALID_FILTER_DRIVER = -2146500052
+SPAPI_E_ERROR_NOT_INSTALLED = -2146496512
+
+# Directory storage
+ERROR_DS_NOT_INSTALLED = 8200
+ERROR_DS_MEMBERSHIP_EVALUATED_LOCALLY = 8201
+ERROR_DS_NO_ATTRIBUTE_OR_VALUE = 8202
+ERROR_DS_INVALID_ATTRIBUTE_SYNTAX = 8203
+ERROR_DS_ATTRIBUTE_TYPE_UNDEFINED = 8204
+ERROR_DS_ATTRIBUTE_OR_VALUE_EXISTS = 8205
+ERROR_DS_BUSY = 8206
+ERROR_DS_UNAVAILABLE = 8207
+ERROR_DS_NO_RIDS_ALLOCATED = 8208
+ERROR_DS_NO_MORE_RIDS = 8209
+ERROR_DS_INCORRECT_ROLE_OWNER = 8210
+ERROR_DS_RIDMGR_INIT_ERROR = 8211
+ERROR_DS_OBJ_CLASS_VIOLATION = 8212
+ERROR_DS_CANT_ON_NON_LEAF = 8213
+ERROR_DS_CANT_ON_RDN = 8214
+ERROR_DS_CANT_MOD_OBJ_CLASS = 8215
+ERROR_DS_CROSS_DOM_MOVE_ERROR = 8216
+ERROR_DS_GC_NOT_AVAILABLE = 8217
+ERROR_SHARED_POLICY = 8218
+ERROR_POLICY_OBJECT_NOT_FOUND = 8219
+ERROR_POLICY_ONLY_IN_DS = 8220
+ERROR_PROMOTION_ACTIVE = 8221
+ERROR_NO_PROMOTION_ACTIVE = 8222
+ERROR_DS_OPERATIONS_ERROR = 8224
+ERROR_DS_PROTOCOL_ERROR = 8225
+ERROR_DS_TIMELIMIT_EXCEEDED = 8226
+ERROR_DS_SIZELIMIT_EXCEEDED = 8227
+ERROR_DS_ADMIN_LIMIT_EXCEEDED = 8228
+ERROR_DS_COMPARE_FALSE = 8229
+ERROR_DS_COMPARE_TRUE = 8230
+ERROR_DS_AUTH_METHOD_NOT_SUPPORTED = 8231
+ERROR_DS_STRONG_AUTH_REQUIRED = 8232
+ERROR_DS_INAPPROPRIATE_AUTH = 8233
+ERROR_DS_AUTH_UNKNOWN = 8234
+ERROR_DS_REFERRAL = 8235
+ERROR_DS_UNAVAILABLE_CRIT_EXTENSION = 8236
+ERROR_DS_CONFIDENTIALITY_REQUIRED = 8237
+ERROR_DS_INAPPROPRIATE_MATCHING = 8238
+ERROR_DS_CONSTRAINT_VIOLATION = 8239
+ERROR_DS_NO_SUCH_OBJECT = 8240
+ERROR_DS_ALIAS_PROBLEM = 8241
+ERROR_DS_INVALID_DN_SYNTAX = 8242
+ERROR_DS_IS_LEAF = 8243
+ERROR_DS_ALIAS_DEREF_PROBLEM = 8244
+ERROR_DS_UNWILLING_TO_PERFORM = 8245
+ERROR_DS_LOOP_DETECT = 8246
+ERROR_DS_NAMING_VIOLATION = 8247
+ERROR_DS_OBJECT_RESULTS_TOO_LARGE = 8248
+ERROR_DS_AFFECTS_MULTIPLE_DSAS = 8249
+ERROR_DS_SERVER_DOWN = 8250
+ERROR_DS_LOCAL_ERROR = 8251
+ERROR_DS_ENCODING_ERROR = 8252
+ERROR_DS_DECODING_ERROR = 8253
+ERROR_DS_FILTER_UNKNOWN = 8254
+ERROR_DS_PARAM_ERROR = 8255
+ERROR_DS_NOT_SUPPORTED = 8256
+ERROR_DS_NO_RESULTS_RETURNED = 8257
+ERROR_DS_CONTROL_NOT_FOUND = 8258
+ERROR_DS_CLIENT_LOOP = 8259
+ERROR_DS_REFERRAL_LIMIT_EXCEEDED = 8260
+ERROR_DS_SORT_CONTROL_MISSING = 8261
+ERROR_DS_OFFSET_RANGE_ERROR = 8262
+ERROR_DS_ROOT_MUST_BE_NC = 8301
+ERROR_DS_ADD_REPLICA_INHIBITED = 8302
+ERROR_DS_ATT_NOT_DEF_IN_SCHEMA = 8303
+ERROR_DS_MAX_OBJ_SIZE_EXCEEDED = 8304
+ERROR_DS_OBJ_STRING_NAME_EXISTS = 8305
+ERROR_DS_NO_RDN_DEFINED_IN_SCHEMA = 8306
+ERROR_DS_RDN_DOESNT_MATCH_SCHEMA = 8307
+ERROR_DS_NO_REQUESTED_ATTS_FOUND = 8308
+ERROR_DS_USER_BUFFER_TO_SMALL = 8309
+ERROR_DS_ATT_IS_NOT_ON_OBJ = 8310
+ERROR_DS_ILLEGAL_MOD_OPERATION = 8311
+ERROR_DS_OBJ_TOO_LARGE = 8312
+ERROR_DS_BAD_INSTANCE_TYPE = 8313
+ERROR_DS_MASTERDSA_REQUIRED = 8314
+ERROR_DS_OBJECT_CLASS_REQUIRED = 8315
+ERROR_DS_MISSING_REQUIRED_ATT = 8316
+ERROR_DS_ATT_NOT_DEF_FOR_CLASS = 8317
+ERROR_DS_ATT_ALREADY_EXISTS = 8318
+ERROR_DS_CANT_ADD_ATT_VALUES = 8320
+ERROR_DS_SINGLE_VALUE_CONSTRAINT = 8321
+ERROR_DS_RANGE_CONSTRAINT = 8322
+ERROR_DS_ATT_VAL_ALREADY_EXISTS = 8323
+ERROR_DS_CANT_REM_MISSING_ATT = 8324
+ERROR_DS_CANT_REM_MISSING_ATT_VAL = 8325
+ERROR_DS_ROOT_CANT_BE_SUBREF = 8326
+ERROR_DS_NO_CHAINING = 8327
+ERROR_DS_NO_CHAINED_EVAL = 8328
+ERROR_DS_NO_PARENT_OBJECT = 8329
+ERROR_DS_PARENT_IS_AN_ALIAS = 8330
+ERROR_DS_CANT_MIX_MASTER_AND_REPS = 8331
+ERROR_DS_CHILDREN_EXIST = 8332
+ERROR_DS_OBJ_NOT_FOUND = 8333
+ERROR_DS_ALIASED_OBJ_MISSING = 8334
+ERROR_DS_BAD_NAME_SYNTAX = 8335
+ERROR_DS_ALIAS_POINTS_TO_ALIAS = 8336
+ERROR_DS_CANT_DEREF_ALIAS = 8337
+ERROR_DS_OUT_OF_SCOPE = 8338
+ERROR_DS_OBJECT_BEING_REMOVED = 8339
+ERROR_DS_CANT_DELETE_DSA_OBJ = 8340
+ERROR_DS_GENERIC_ERROR = 8341
+ERROR_DS_DSA_MUST_BE_INT_MASTER = 8342
+ERROR_DS_CLASS_NOT_DSA = 8343
+ERROR_DS_INSUFF_ACCESS_RIGHTS = 8344
+ERROR_DS_ILLEGAL_SUPERIOR = 8345
+ERROR_DS_ATTRIBUTE_OWNED_BY_SAM = 8346
+ERROR_DS_NAME_TOO_MANY_PARTS = 8347
+ERROR_DS_NAME_TOO_LONG = 8348
+ERROR_DS_NAME_VALUE_TOO_LONG = 8349
+ERROR_DS_NAME_UNPARSEABLE = 8350
+ERROR_DS_NAME_TYPE_UNKNOWN = 8351
+ERROR_DS_NOT_AN_OBJECT = 8352
+ERROR_DS_SEC_DESC_TOO_SHORT = 8353
+ERROR_DS_SEC_DESC_INVALID = 8354
+ERROR_DS_NO_DELETED_NAME = 8355
+ERROR_DS_SUBREF_MUST_HAVE_PARENT = 8356
+ERROR_DS_NCNAME_MUST_BE_NC = 8357
+ERROR_DS_CANT_ADD_SYSTEM_ONLY = 8358
+ERROR_DS_CLASS_MUST_BE_CONCRETE = 8359
+ERROR_DS_INVALID_DMD = 8360
+ERROR_DS_OBJ_GUID_EXISTS = 8361
+ERROR_DS_NOT_ON_BACKLINK = 8362
+ERROR_DS_NO_CROSSREF_FOR_NC = 8363
+ERROR_DS_SHUTTING_DOWN = 8364
+ERROR_DS_UNKNOWN_OPERATION = 8365
+ERROR_DS_INVALID_ROLE_OWNER = 8366
+ERROR_DS_COULDNT_CONTACT_FSMO = 8367
+ERROR_DS_CROSS_NC_DN_RENAME = 8368
+ERROR_DS_CANT_MOD_SYSTEM_ONLY = 8369
+ERROR_DS_REPLICATOR_ONLY = 8370
+ERROR_DS_OBJ_CLASS_NOT_DEFINED = 8371
+ERROR_DS_OBJ_CLASS_NOT_SUBCLASS = 8372
+ERROR_DS_NAME_REFERENCE_INVALID = 8373
+ERROR_DS_CROSS_REF_EXISTS = 8374
+ERROR_DS_CANT_DEL_MASTER_CROSSREF = 8375
+ERROR_DS_SUBTREE_NOTIFY_NOT_NC_HEAD = 8376
+ERROR_DS_NOTIFY_FILTER_TOO_COMPLEX = 8377
+ERROR_DS_DUP_RDN = 8378
+ERROR_DS_DUP_OID = 8379
+ERROR_DS_DUP_MAPI_ID = 8380
+ERROR_DS_DUP_SCHEMA_ID_GUID = 8381
+ERROR_DS_DUP_LDAP_DISPLAY_NAME = 8382
+ERROR_DS_SEMANTIC_ATT_TEST = 8383
+ERROR_DS_SYNTAX_MISMATCH = 8384
+ERROR_DS_EXISTS_IN_MUST_HAVE = 8385
+ERROR_DS_EXISTS_IN_MAY_HAVE = 8386
+ERROR_DS_NONEXISTENT_MAY_HAVE = 8387
+ERROR_DS_NONEXISTENT_MUST_HAVE = 8388
+ERROR_DS_AUX_CLS_TEST_FAIL = 8389
+ERROR_DS_NONEXISTENT_POSS_SUP = 8390
+ERROR_DS_SUB_CLS_TEST_FAIL = 8391
+ERROR_DS_BAD_RDN_ATT_ID_SYNTAX = 8392
+ERROR_DS_EXISTS_IN_AUX_CLS = 8393
+ERROR_DS_EXISTS_IN_SUB_CLS = 8394
+ERROR_DS_EXISTS_IN_POSS_SUP = 8395
+ERROR_DS_RECALCSCHEMA_FAILED = 8396
+ERROR_DS_TREE_DELETE_NOT_FINISHED = 8397
+ERROR_DS_CANT_DELETE = 8398
+ERROR_DS_ATT_SCHEMA_REQ_ID = 8399
+ERROR_DS_BAD_ATT_SCHEMA_SYNTAX = 8400
+ERROR_DS_CANT_CACHE_ATT = 8401
+ERROR_DS_CANT_CACHE_CLASS = 8402
+ERROR_DS_CANT_REMOVE_ATT_CACHE = 8403
+ERROR_DS_CANT_REMOVE_CLASS_CACHE = 8404
+ERROR_DS_CANT_RETRIEVE_DN = 8405
+ERROR_DS_MISSING_SUPREF = 8406
+ERROR_DS_CANT_RETRIEVE_INSTANCE = 8407
+ERROR_DS_CODE_INCONSISTENCY = 8408
+ERROR_DS_DATABASE_ERROR = 8409
+ERROR_DS_GOVERNSID_MISSING = 8410
+ERROR_DS_MISSING_EXPECTED_ATT = 8411
+ERROR_DS_NCNAME_MISSING_CR_REF = 8412
+ERROR_DS_SECURITY_CHECKING_ERROR = 8413
+ERROR_DS_SCHEMA_NOT_LOADED = 8414
+ERROR_DS_SCHEMA_ALLOC_FAILED = 8415
+ERROR_DS_ATT_SCHEMA_REQ_SYNTAX = 8416
+ERROR_DS_GCVERIFY_ERROR = 8417
+ERROR_DS_DRA_SCHEMA_MISMATCH = 8418
+ERROR_DS_CANT_FIND_DSA_OBJ = 8419
+ERROR_DS_CANT_FIND_EXPECTED_NC = 8420
+ERROR_DS_CANT_FIND_NC_IN_CACHE = 8421
+ERROR_DS_CANT_RETRIEVE_CHILD = 8422
+ERROR_DS_SECURITY_ILLEGAL_MODIFY = 8423
+ERROR_DS_CANT_REPLACE_HIDDEN_REC = 8424
+ERROR_DS_BAD_HIERARCHY_FILE = 8425
+ERROR_DS_BUILD_HIERARCHY_TABLE_FAILED = 8426
+ERROR_DS_CONFIG_PARAM_MISSING = 8427
+ERROR_DS_COUNTING_AB_INDICES_FAILED = 8428
+ERROR_DS_HIERARCHY_TABLE_MALLOC_FAILED = 8429
+ERROR_DS_INTERNAL_FAILURE = 8430
+ERROR_DS_UNKNOWN_ERROR = 8431
+ERROR_DS_ROOT_REQUIRES_CLASS_TOP = 8432
+ERROR_DS_REFUSING_FSMO_ROLES = 8433
+ERROR_DS_MISSING_FSMO_SETTINGS = 8434
+ERROR_DS_UNABLE_TO_SURRENDER_ROLES = 8435
+ERROR_DS_DRA_GENERIC = 8436
+ERROR_DS_DRA_INVALID_PARAMETER = 8437
+ERROR_DS_DRA_BUSY = 8438
+ERROR_DS_DRA_BAD_DN = 8439
+ERROR_DS_DRA_BAD_NC = 8440
+ERROR_DS_DRA_DN_EXISTS = 8441
+ERROR_DS_DRA_INTERNAL_ERROR = 8442
+ERROR_DS_DRA_INCONSISTENT_DIT = 8443
+ERROR_DS_DRA_CONNECTION_FAILED = 8444
+ERROR_DS_DRA_BAD_INSTANCE_TYPE = 8445
+ERROR_DS_DRA_OUT_OF_MEM = 8446
+ERROR_DS_DRA_MAIL_PROBLEM = 8447
+ERROR_DS_DRA_REF_ALREADY_EXISTS = 8448
+ERROR_DS_DRA_REF_NOT_FOUND = 8449
+ERROR_DS_DRA_OBJ_IS_REP_SOURCE = 8450
+ERROR_DS_DRA_DB_ERROR = 8451
+ERROR_DS_DRA_NO_REPLICA = 8452
+ERROR_DS_DRA_ACCESS_DENIED = 8453
+ERROR_DS_DRA_NOT_SUPPORTED = 8454
+ERROR_DS_DRA_RPC_CANCELLED = 8455
+ERROR_DS_DRA_SOURCE_DISABLED = 8456
+ERROR_DS_DRA_SINK_DISABLED = 8457
+ERROR_DS_DRA_NAME_COLLISION = 8458
+ERROR_DS_DRA_SOURCE_REINSTALLED = 8459
+ERROR_DS_DRA_MISSING_PARENT = 8460
+ERROR_DS_DRA_PREEMPTED = 8461
+ERROR_DS_DRA_ABANDON_SYNC = 8462
+ERROR_DS_DRA_SHUTDOWN = 8463
+ERROR_DS_DRA_INCOMPATIBLE_PARTIAL_SET = 8464
+ERROR_DS_DRA_SOURCE_IS_PARTIAL_REPLICA = 8465
+ERROR_DS_DRA_EXTN_CONNECTION_FAILED = 8466
+ERROR_DS_INSTALL_SCHEMA_MISMATCH = 8467
+ERROR_DS_DUP_LINK_ID = 8468
+ERROR_DS_NAME_ERROR_RESOLVING = 8469
+ERROR_DS_NAME_ERROR_NOT_FOUND = 8470
+ERROR_DS_NAME_ERROR_NOT_UNIQUE = 8471
+ERROR_DS_NAME_ERROR_NO_MAPPING = 8472
+ERROR_DS_NAME_ERROR_DOMAIN_ONLY = 8473
+ERROR_DS_NAME_ERROR_NO_SYNTACTICAL_MAPPING = 8474
+ERROR_DS_CONSTRUCTED_ATT_MOD = 8475
+ERROR_DS_WRONG_OM_OBJ_CLASS = 8476
+ERROR_DS_DRA_REPL_PENDING = 8477
+ERROR_DS_DS_REQUIRED = 8478
+ERROR_DS_INVALID_LDAP_DISPLAY_NAME = 8479
+ERROR_DS_NON_BASE_SEARCH = 8480
+ERROR_DS_CANT_RETRIEVE_ATTS = 8481
+ERROR_DS_BACKLINK_WITHOUT_LINK = 8482
+ERROR_DS_EPOCH_MISMATCH = 8483
+ERROR_DS_SRC_NAME_MISMATCH = 8484
+ERROR_DS_SRC_AND_DST_NC_IDENTICAL = 8485
+ERROR_DS_DST_NC_MISMATCH = 8486
+ERROR_DS_NOT_AUTHORITIVE_FOR_DST_NC = 8487
+ERROR_DS_SRC_GUID_MISMATCH = 8488
+ERROR_DS_CANT_MOVE_DELETED_OBJECT = 8489
+ERROR_DS_PDC_OPERATION_IN_PROGRESS = 8490
+ERROR_DS_CROSS_DOMAIN_CLEANUP_REQD = 8491
+ERROR_DS_ILLEGAL_XDOM_MOVE_OPERATION = 8492
+ERROR_DS_CANT_WITH_ACCT_GROUP_MEMBERSHPS = 8493
+ERROR_DS_NC_MUST_HAVE_NC_PARENT = 8494
+ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE = 8495
+ERROR_DS_DST_DOMAIN_NOT_NATIVE = 8496
+ERROR_DS_MISSING_INFRASTRUCTURE_CONTAINER = 8497
+ERROR_DS_CANT_MOVE_ACCOUNT_GROUP = 8498
+ERROR_DS_CANT_MOVE_RESOURCE_GROUP = 8499
+ERROR_DS_INVALID_SEARCH_FLAG = 8500
+ERROR_DS_NO_TREE_DELETE_ABOVE_NC = 8501
+ERROR_DS_COULDNT_LOCK_TREE_FOR_DELETE = 8502
+ERROR_DS_COULDNT_IDENTIFY_OBJECTS_FOR_TREE_DELETE = 8503
+ERROR_DS_SAM_INIT_FAILURE = 8504
+ERROR_DS_SENSITIVE_GROUP_VIOLATION = 8505
+ERROR_DS_CANT_MOD_PRIMARYGROUPID = 8506
+ERROR_DS_ILLEGAL_BASE_SCHEMA_MOD = 8507
+ERROR_DS_NONSAFE_SCHEMA_CHANGE = 8508
+ERROR_DS_SCHEMA_UPDATE_DISALLOWED = 8509
+ERROR_DS_CANT_CREATE_UNDER_SCHEMA = 8510
+ERROR_DS_INSTALL_NO_SRC_SCH_VERSION = 8511
+ERROR_DS_INSTALL_NO_SCH_VERSION_IN_INIFILE = 8512
+ERROR_DS_INVALID_GROUP_TYPE = 8513
+ERROR_DS_NO_NEST_GLOBALGROUP_IN_MIXEDDOMAIN = 8514
+ERROR_DS_NO_NEST_LOCALGROUP_IN_MIXEDDOMAIN = 8515
+ERROR_DS_GLOBAL_CANT_HAVE_LOCAL_MEMBER = 8516
+ERROR_DS_GLOBAL_CANT_HAVE_UNIVERSAL_MEMBER = 8517
+ERROR_DS_UNIVERSAL_CANT_HAVE_LOCAL_MEMBER = 8518
+ERROR_DS_GLOBAL_CANT_HAVE_CROSSDOMAIN_MEMBER = 8519
+ERROR_DS_LOCAL_CANT_HAVE_CROSSDOMAIN_LOCAL_MEMBER = 8520
+ERROR_DS_HAVE_PRIMARY_MEMBERS = 8521
+ERROR_DS_STRING_SD_CONVERSION_FAILED = 8522
+ERROR_DS_NAMING_MASTER_GC = 8523
+ERROR_DS_DNS_LOOKUP_FAILURE = 8524
+ERROR_DS_COULDNT_UPDATE_SPNS = 8525
+ERROR_DS_CANT_RETRIEVE_SD = 8526
+ERROR_DS_KEY_NOT_UNIQUE = 8527
+ERROR_DS_WRONG_LINKED_ATT_SYNTAX = 8528
+ERROR_DS_SAM_NEED_BOOTKEY_PASSWORD = 8529
+ERROR_DS_SAM_NEED_BOOTKEY_FLOPPY = 8530
+ERROR_DS_CANT_START = 8531
+ERROR_DS_INIT_FAILURE = 8532
+ERROR_DS_NO_PKT_PRIVACY_ON_CONNECTION = 8533
+ERROR_DS_SOURCE_DOMAIN_IN_FOREST = 8534
+ERROR_DS_DESTINATION_DOMAIN_NOT_IN_FOREST = 8535
+ERROR_DS_DESTINATION_AUDITING_NOT_ENABLED = 8536
+ERROR_DS_CANT_FIND_DC_FOR_SRC_DOMAIN = 8537
+ERROR_DS_SRC_OBJ_NOT_GROUP_OR_USER = 8538
+ERROR_DS_SRC_SID_EXISTS_IN_FOREST = 8539
+ERROR_DS_SRC_AND_DST_OBJECT_CLASS_MISMATCH = 8540
+ERROR_SAM_INIT_FAILURE = 8541
+ERROR_DS_DRA_SCHEMA_INFO_SHIP = 8542
+ERROR_DS_DRA_SCHEMA_CONFLICT = 8543
+ERROR_DS_DRA_EARLIER_SCHEMA_CONFLICT = 8544
+ERROR_DS_DRA_OBJ_NC_MISMATCH = 8545
+ERROR_DS_NC_STILL_HAS_DSAS = 8546
+ERROR_DS_GC_REQUIRED = 8547
+ERROR_DS_LOCAL_MEMBER_OF_LOCAL_ONLY = 8548
+ERROR_DS_NO_FPO_IN_UNIVERSAL_GROUPS = 8549
+ERROR_DS_CANT_ADD_TO_GC = 8550
+ERROR_DS_NO_CHECKPOINT_WITH_PDC = 8551
+ERROR_DS_SOURCE_AUDITING_NOT_ENABLED = 8552
+ERROR_DS_CANT_CREATE_IN_NONDOMAIN_NC = 8553
+ERROR_DS_INVALID_NAME_FOR_SPN = 8554
+ERROR_DS_FILTER_USES_CONTRUCTED_ATTRS = 8555
+ERROR_DS_UNICODEPWD_NOT_IN_QUOTES = 8556
+ERROR_DS_MACHINE_ACCOUNT_QUOTA_EXCEEDED = 8557
+ERROR_DS_MUST_BE_RUN_ON_DST_DC = 8558
+ERROR_DS_SRC_DC_MUST_BE_SP4_OR_GREATER = 8559
+ERROR_DS_CANT_TREE_DELETE_CRITICAL_OBJ = 8560
+ERROR_DS_INIT_FAILURE_CONSOLE = 8561
+ERROR_DS_SAM_INIT_FAILURE_CONSOLE = 8562
+ERROR_DS_FOREST_VERSION_TOO_HIGH = 8563
+ERROR_DS_DOMAIN_VERSION_TOO_HIGH = 8564
+ERROR_DS_FOREST_VERSION_TOO_LOW = 8565
+ERROR_DS_DOMAIN_VERSION_TOO_LOW = 8566
+ERROR_DS_INCOMPATIBLE_VERSION = 8567
+ERROR_DS_LOW_DSA_VERSION = 8568
+ERROR_DS_NO_BEHAVIOR_VERSION_IN_MIXEDDOMAIN = 8569
+ERROR_DS_NOT_SUPPORTED_SORT_ORDER = 8570
+ERROR_DS_NAME_NOT_UNIQUE = 8571
+ERROR_DS_MACHINE_ACCOUNT_CREATED_PRENT4 = 8572
+ERROR_DS_OUT_OF_VERSION_STORE = 8573
+ERROR_DS_INCOMPATIBLE_CONTROLS_USED = 8574
+ERROR_DS_NO_REF_DOMAIN = 8575
+ERROR_DS_RESERVED_LINK_ID = 8576
+ERROR_DS_LINK_ID_NOT_AVAILABLE = 8577
+ERROR_DS_AG_CANT_HAVE_UNIVERSAL_MEMBER = 8578
+ERROR_DS_MODIFYDN_DISALLOWED_BY_INSTANCE_TYPE = 8579
+ERROR_DS_NO_OBJECT_MOVE_IN_SCHEMA_NC = 8580
+ERROR_DS_MODIFYDN_DISALLOWED_BY_FLAG = 8581
+ERROR_DS_MODIFYDN_WRONG_GRANDPARENT = 8582
+ERROR_DS_NAME_ERROR_TRUST_REFERRAL = 8583
+ERROR_NOT_SUPPORTED_ON_STANDARD_SERVER = 8584
+ERROR_DS_CANT_ACCESS_REMOTE_PART_OF_AD = 8585
+ERROR_DS_CR_IMPOSSIBLE_TO_VALIDATE_V2 = 8586
+ERROR_DS_THREAD_LIMIT_EXCEEDED = 8587
+ERROR_DS_NOT_CLOSEST = 8588
+ERROR_DS_CANT_DERIVE_SPN_WITHOUT_SERVER_REF = 8589
+ERROR_DS_SINGLE_USER_MODE_FAILED = 8590
+ERROR_DS_NTDSCRIPT_SYNTAX_ERROR = 8591
+ERROR_DS_NTDSCRIPT_PROCESS_ERROR = 8592
+ERROR_DS_DIFFERENT_REPL_EPOCHS = 8593
+ERROR_DS_DRS_EXTENSIONS_CHANGED = 8594
+ERROR_DS_REPLICA_SET_CHANGE_NOT_ALLOWED_ON_DISABLED_CR = 8595
+ERROR_DS_NO_MSDS_INTID = 8596
+ERROR_DS_DUP_MSDS_INTID = 8597
+ERROR_DS_EXISTS_IN_RDNATTID = 8598
+ERROR_DS_AUTHORIZATION_FAILED = 8599
+ERROR_DS_INVALID_SCRIPT = 8600
+ERROR_DS_REMOTE_CROSSREF_OP_FAILED = 8601
+ERROR_DS_CROSS_REF_BUSY = 8602
+ERROR_DS_CANT_DERIVE_SPN_FOR_DELETED_DOMAIN = 8603
+ERROR_DS_CANT_DEMOTE_WITH_WRITEABLE_NC = 8604
+ERROR_DS_DUPLICATE_ID_FOUND = 8605
+ERROR_DS_INSUFFICIENT_ATTR_TO_CREATE_OBJECT = 8606
+ERROR_DS_GROUP_CONVERSION_ERROR = 8607
+ERROR_DS_CANT_MOVE_APP_BASIC_GROUP = 8608
+ERROR_DS_CANT_MOVE_APP_QUERY_GROUP = 8609
+ERROR_DS_ROLE_NOT_VERIFIED = 8610
+ERROR_DS_WKO_CONTAINER_CANNOT_BE_SPECIAL = 8611
+ERROR_DS_DOMAIN_RENAME_IN_PROGRESS = 8612
+ERROR_DS_EXISTING_AD_CHILD_NC = 8613
+ERROR_DS_REPL_LIFETIME_EXCEEDED = 8614
+ERROR_DS_DISALLOWED_IN_SYSTEM_CONTAINER = 8615
+ERROR_DS_LDAP_SEND_QUEUE_FULL = 8616
+ERROR_DS_DRA_OUT_SCHEDULE_WINDOW = 8617
+
+# Common dialog box error codes from cderr.h
+CDERR_DIALOGFAILURE = 65535
+CDERR_GENERALCODES = 0
+CDERR_STRUCTSIZE = 1
+CDERR_INITIALIZATION = 2
+CDERR_NOTEMPLATE = 3
+CDERR_NOHINSTANCE = 4
+CDERR_LOADSTRFAILURE = 5
+CDERR_FINDRESFAILURE = 6
+CDERR_LOADRESFAILURE = 7
+CDERR_LOCKRESFAILURE = 8
+CDERR_MEMALLOCFAILURE = 9
+CDERR_MEMLOCKFAILURE = 10
+CDERR_NOHOOK = 11
+CDERR_REGISTERMSGFAIL = 12
+PDERR_PRINTERCODES = 4096
+PDERR_SETUPFAILURE = 4097
+PDERR_PARSEFAILURE = 4098
+PDERR_RETDEFFAILURE = 4099
+PDERR_LOADDRVFAILURE = 4100
+PDERR_GETDEVMODEFAIL = 4101
+PDERR_INITFAILURE = 4102
+PDERR_NODEVICES = 4103
+PDERR_NODEFAULTPRN = 4104
+PDERR_DNDMMISMATCH = 4105
+PDERR_CREATEICFAILURE = 4106
+PDERR_PRINTERNOTFOUND = 4107
+PDERR_DEFAULTDIFFERENT = 4108
+CFERR_CHOOSEFONTCODES = 8192
+CFERR_NOFONTS = 8193
+CFERR_MAXLESSTHANMIN = 8194
+FNERR_FILENAMECODES = 12288
+FNERR_SUBCLASSFAILURE = 12289
+FNERR_INVALIDFILENAME = 12290
+FNERR_BUFFERTOOSMALL = 12291
+FRERR_FINDREPLACECODES = 16384
+FRERR_BUFFERLENGTHZERO = 16385
+CCERR_CHOOSECOLORCODES = 20480
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winioctlcon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winioctlcon.py
new file mode 100644
index 0000000..e4be5b7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winioctlcon.py
@@ -0,0 +1,663 @@
+## flags, enums, guids used with DeviceIoControl from WinIoCtl.h
+
+import pywintypes
+from ntsecuritycon import FILE_READ_DATA,FILE_WRITE_DATA
+def CTL_CODE(DeviceType, Function, Method, Access):
+    return (DeviceType << 16) | (Access << 14) | (Function << 2) | Method
+
+def DEVICE_TYPE_FROM_CTL_CODE(ctrlCode):
+    return (ctrlCode & 0xffff0000L) >> 16
+
+FILE_DEVICE_BEEP = 0x00000001
+FILE_DEVICE_CD_ROM = 0x00000002
+FILE_DEVICE_CD_ROM_FILE_SYSTEM = 0x00000003
+FILE_DEVICE_CONTROLLER = 0x00000004
+FILE_DEVICE_DATALINK = 0x00000005
+FILE_DEVICE_DFS = 0x00000006
+FILE_DEVICE_DISK = 0x00000007
+FILE_DEVICE_DISK_FILE_SYSTEM = 0x00000008
+FILE_DEVICE_FILE_SYSTEM = 0x00000009
+FILE_DEVICE_INPORT_PORT = 0x0000000a
+FILE_DEVICE_KEYBOARD = 0x0000000b
+FILE_DEVICE_MAILSLOT = 0x0000000c
+FILE_DEVICE_MIDI_IN = 0x0000000d
+FILE_DEVICE_MIDI_OUT = 0x0000000e
+FILE_DEVICE_MOUSE = 0x0000000f
+FILE_DEVICE_MULTI_UNC_PROVIDER = 0x00000010
+FILE_DEVICE_NAMED_PIPE = 0x00000011
+FILE_DEVICE_NETWORK = 0x00000012
+FILE_DEVICE_NETWORK_BROWSER = 0x00000013
+FILE_DEVICE_NETWORK_FILE_SYSTEM = 0x00000014
+FILE_DEVICE_NULL = 0x00000015
+FILE_DEVICE_PARALLEL_PORT = 0x00000016
+FILE_DEVICE_PHYSICAL_NETCARD = 0x00000017
+FILE_DEVICE_PRINTER = 0x00000018
+FILE_DEVICE_SCANNER = 0x00000019
+FILE_DEVICE_SERIAL_MOUSE_PORT = 0x0000001a
+FILE_DEVICE_SERIAL_PORT = 0x0000001b
+FILE_DEVICE_SCREEN = 0x0000001c
+FILE_DEVICE_SOUND = 0x0000001d
+FILE_DEVICE_STREAMS = 0x0000001e
+FILE_DEVICE_TAPE = 0x0000001f
+FILE_DEVICE_TAPE_FILE_SYSTEM = 0x00000020
+FILE_DEVICE_TRANSPORT = 0x00000021
+FILE_DEVICE_UNKNOWN = 0x00000022
+FILE_DEVICE_VIDEO = 0x00000023
+FILE_DEVICE_VIRTUAL_DISK = 0x00000024
+FILE_DEVICE_WAVE_IN = 0x00000025
+FILE_DEVICE_WAVE_OUT = 0x00000026
+FILE_DEVICE_8042_PORT = 0x00000027
+FILE_DEVICE_NETWORK_REDIRECTOR = 0x00000028
+FILE_DEVICE_BATTERY = 0x00000029
+FILE_DEVICE_BUS_EXTENDER = 0x0000002a
+FILE_DEVICE_MODEM = 0x0000002b
+FILE_DEVICE_VDM = 0x0000002c
+FILE_DEVICE_MASS_STORAGE = 0x0000002d
+FILE_DEVICE_SMB = 0x0000002e
+FILE_DEVICE_KS = 0x0000002f
+FILE_DEVICE_CHANGER = 0x00000030
+FILE_DEVICE_SMARTCARD = 0x00000031
+FILE_DEVICE_ACPI = 0x00000032
+FILE_DEVICE_DVD = 0x00000033
+FILE_DEVICE_FULLSCREEN_VIDEO = 0x00000034
+FILE_DEVICE_DFS_FILE_SYSTEM = 0x00000035
+FILE_DEVICE_DFS_VOLUME = 0x00000036
+FILE_DEVICE_SERENUM = 0x00000037
+FILE_DEVICE_TERMSRV = 0x00000038
+FILE_DEVICE_KSEC = 0x00000039
+FILE_DEVICE_FIPS = 0x0000003A
+FILE_DEVICE_INFINIBAND = 0x0000003B
+
+METHOD_BUFFERED = 0
+METHOD_IN_DIRECT = 1
+METHOD_OUT_DIRECT = 2
+METHOD_NEITHER = 3
+METHOD_DIRECT_TO_HARDWARE = METHOD_IN_DIRECT
+METHOD_DIRECT_FROM_HARDWARE = METHOD_OUT_DIRECT
+FILE_ANY_ACCESS = 0
+FILE_SPECIAL_ACCESS = FILE_ANY_ACCESS
+FILE_READ_ACCESS = 0x0001
+FILE_WRITE_ACCESS = 0x0002
+IOCTL_STORAGE_BASE = FILE_DEVICE_MASS_STORAGE
+RECOVERED_WRITES_VALID = 0x00000001
+UNRECOVERED_WRITES_VALID = 0x00000002
+RECOVERED_READS_VALID = 0x00000004
+UNRECOVERED_READS_VALID = 0x00000008
+WRITE_COMPRESSION_INFO_VALID = 0x00000010
+READ_COMPRESSION_INFO_VALID = 0x00000020
+TAPE_RETURN_STATISTICS = 0L
+TAPE_RETURN_ENV_INFO = 1L
+TAPE_RESET_STATISTICS = 2L
+MEDIA_ERASEABLE = 0x00000001
+MEDIA_WRITE_ONCE = 0x00000002
+MEDIA_READ_ONLY = 0x00000004
+MEDIA_READ_WRITE = 0x00000008
+MEDIA_WRITE_PROTECTED = 0x00000100
+MEDIA_CURRENTLY_MOUNTED = 0x80000000L
+IOCTL_DISK_BASE = FILE_DEVICE_DISK
+PARTITION_ENTRY_UNUSED = 0x00
+PARTITION_FAT_12 = 0x01
+PARTITION_XENIX_1 = 0x02
+PARTITION_XENIX_2 = 0x03
+PARTITION_FAT_16 = 0x04
+PARTITION_EXTENDED = 0x05
+PARTITION_HUGE = 0x06
+PARTITION_IFS = 0x07
+PARTITION_OS2BOOTMGR = 0x0A
+PARTITION_FAT32 = 0x0B
+PARTITION_FAT32_XINT13 = 0x0C
+PARTITION_XINT13 = 0x0E
+PARTITION_XINT13_EXTENDED = 0x0F
+PARTITION_PREP = 0x41
+PARTITION_LDM = 0x42
+PARTITION_UNIX = 0x63
+VALID_NTFT = 0xC0
+PARTITION_NTFT = 0x80
+
+GPT_ATTRIBUTE_PLATFORM_REQUIRED = 0x0000000000000001
+GPT_BASIC_DATA_ATTRIBUTE_NO_DRIVE_LETTER = 0x8000000000000000
+GPT_BASIC_DATA_ATTRIBUTE_HIDDEN = 0x4000000000000000
+GPT_BASIC_DATA_ATTRIBUTE_SHADOW_COPY = 0x2000000000000000
+GPT_BASIC_DATA_ATTRIBUTE_READ_ONLY = 0x1000000000000000
+
+HIST_NO_OF_BUCKETS = 24
+DISK_LOGGING_START = 0
+DISK_LOGGING_STOP = 1
+DISK_LOGGING_DUMP = 2
+DISK_BINNING = 3
+CAP_ATA_ID_CMD = 1
+CAP_ATAPI_ID_CMD = 2
+CAP_SMART_CMD = 4
+ATAPI_ID_CMD = 0xA1
+ID_CMD = 0xEC
+SMART_CMD = 0xB0
+SMART_CYL_LOW = 0x4F
+SMART_CYL_HI = 0xC2
+SMART_NO_ERROR = 0
+SMART_IDE_ERROR = 1
+SMART_INVALID_FLAG = 2
+SMART_INVALID_COMMAND = 3
+SMART_INVALID_BUFFER = 4
+SMART_INVALID_DRIVE = 5
+SMART_INVALID_IOCTL = 6
+SMART_ERROR_NO_MEM = 7
+SMART_INVALID_REGISTER = 8
+SMART_NOT_SUPPORTED = 9
+SMART_NO_IDE_DEVICE = 10
+SMART_OFFLINE_ROUTINE_OFFLINE = 0
+SMART_SHORT_SELFTEST_OFFLINE = 1
+SMART_EXTENDED_SELFTEST_OFFLINE = 2
+SMART_ABORT_OFFLINE_SELFTEST = 127
+SMART_SHORT_SELFTEST_CAPTIVE = 129
+SMART_EXTENDED_SELFTEST_CAPTIVE = 130
+READ_ATTRIBUTE_BUFFER_SIZE = 512
+IDENTIFY_BUFFER_SIZE = 512
+READ_THRESHOLD_BUFFER_SIZE = 512
+SMART_LOG_SECTOR_SIZE = 512
+READ_ATTRIBUTES = 0xD0
+READ_THRESHOLDS = 0xD1
+ENABLE_DISABLE_AUTOSAVE = 0xD2
+SAVE_ATTRIBUTE_VALUES = 0xD3
+EXECUTE_OFFLINE_DIAGS = 0xD4
+SMART_READ_LOG = 0xD5
+SMART_WRITE_LOG = 0xd6
+ENABLE_SMART = 0xD8
+DISABLE_SMART = 0xD9
+RETURN_SMART_STATUS = 0xDA
+ENABLE_DISABLE_AUTO_OFFLINE = 0xDB
+IOCTL_CHANGER_BASE = FILE_DEVICE_CHANGER
+MAX_VOLUME_ID_SIZE = 36
+MAX_VOLUME_TEMPLATE_SIZE = 40
+VENDOR_ID_LENGTH = 8
+PRODUCT_ID_LENGTH = 16
+REVISION_LENGTH = 4
+SERIAL_NUMBER_LENGTH = 32
+CHANGER_BAR_CODE_SCANNER_INSTALLED = 0x00000001
+CHANGER_INIT_ELEM_STAT_WITH_RANGE = 0x00000002
+CHANGER_CLOSE_IEPORT = 0x00000004
+CHANGER_OPEN_IEPORT = 0x00000008
+CHANGER_STATUS_NON_VOLATILE = 0x00000010
+CHANGER_EXCHANGE_MEDIA = 0x00000020
+CHANGER_CLEANER_SLOT = 0x00000040
+CHANGER_LOCK_UNLOCK = 0x00000080
+CHANGER_CARTRIDGE_MAGAZINE = 0x00000100
+CHANGER_MEDIUM_FLIP = 0x00000200
+CHANGER_POSITION_TO_ELEMENT = 0x00000400
+CHANGER_REPORT_IEPORT_STATE = 0x00000800
+CHANGER_STORAGE_DRIVE = 0x00001000
+CHANGER_STORAGE_IEPORT = 0x00002000
+CHANGER_STORAGE_SLOT = 0x00004000
+CHANGER_STORAGE_TRANSPORT = 0x00008000
+CHANGER_DRIVE_CLEANING_REQUIRED = 0x00010000
+CHANGER_PREDISMOUNT_EJECT_REQUIRED = 0x00020000
+CHANGER_CLEANER_ACCESS_NOT_VALID = 0x00040000
+CHANGER_PREMOUNT_EJECT_REQUIRED = 0x00080000
+CHANGER_VOLUME_IDENTIFICATION = 0x00100000
+CHANGER_VOLUME_SEARCH = 0x00200000
+CHANGER_VOLUME_ASSERT = 0x00400000
+CHANGER_VOLUME_REPLACE = 0x00800000
+CHANGER_VOLUME_UNDEFINE = 0x01000000
+CHANGER_SERIAL_NUMBER_VALID = 0x04000000
+CHANGER_DEVICE_REINITIALIZE_CAPABLE = 0x08000000
+CHANGER_KEYPAD_ENABLE_DISABLE = 0x10000000
+CHANGER_DRIVE_EMPTY_ON_DOOR_ACCESS = 0x20000000
+
+CHANGER_RESERVED_BIT = 0x80000000L
+CHANGER_PREDISMOUNT_ALIGN_TO_SLOT = 0x80000001L
+CHANGER_PREDISMOUNT_ALIGN_TO_DRIVE = 0x80000002L
+CHANGER_CLEANER_AUTODISMOUNT = 0x80000004L
+CHANGER_TRUE_EXCHANGE_CAPABLE = 0x80000008L
+CHANGER_SLOTS_USE_TRAYS = 0x80000010L
+CHANGER_RTN_MEDIA_TO_ORIGINAL_ADDR = 0x80000020L
+CHANGER_CLEANER_OPS_NOT_SUPPORTED = 0x80000040L
+CHANGER_IEPORT_USER_CONTROL_OPEN = 0x80000080L
+CHANGER_IEPORT_USER_CONTROL_CLOSE = 0x80000100L
+CHANGER_MOVE_EXTENDS_IEPORT = 0x80000200L
+CHANGER_MOVE_RETRACTS_IEPORT = 0x80000400L
+
+
+CHANGER_TO_TRANSPORT = 0x01
+CHANGER_TO_SLOT = 0x02
+CHANGER_TO_IEPORT = 0x04
+CHANGER_TO_DRIVE = 0x08
+LOCK_UNLOCK_IEPORT = 0x01
+LOCK_UNLOCK_DOOR = 0x02
+LOCK_UNLOCK_KEYPAD = 0x04
+LOCK_ELEMENT = 0
+UNLOCK_ELEMENT = 1
+EXTEND_IEPORT = 2
+RETRACT_IEPORT = 3
+ELEMENT_STATUS_FULL = 0x00000001
+ELEMENT_STATUS_IMPEXP = 0x00000002
+ELEMENT_STATUS_EXCEPT = 0x00000004
+ELEMENT_STATUS_ACCESS = 0x00000008
+ELEMENT_STATUS_EXENAB = 0x00000010
+ELEMENT_STATUS_INENAB = 0x00000020
+ELEMENT_STATUS_PRODUCT_DATA = 0x00000040
+ELEMENT_STATUS_LUN_VALID = 0x00001000
+ELEMENT_STATUS_ID_VALID = 0x00002000
+ELEMENT_STATUS_NOT_BUS = 0x00008000
+ELEMENT_STATUS_INVERT = 0x00400000
+ELEMENT_STATUS_SVALID = 0x00800000
+ELEMENT_STATUS_PVOLTAG = 0x10000000
+ELEMENT_STATUS_AVOLTAG = 0x20000000
+ERROR_LABEL_UNREADABLE = 0x00000001
+ERROR_LABEL_QUESTIONABLE = 0x00000002
+ERROR_SLOT_NOT_PRESENT = 0x00000004
+ERROR_DRIVE_NOT_INSTALLED = 0x00000008
+ERROR_TRAY_MALFUNCTION = 0x00000010
+ERROR_INIT_STATUS_NEEDED = 0x00000011
+ERROR_UNHANDLED_ERROR = 0xFFFFFFFFL
+SEARCH_ALL = 0x0
+SEARCH_PRIMARY = 0x1
+SEARCH_ALTERNATE = 0x2
+SEARCH_ALL_NO_SEQ = 0x4
+SEARCH_PRI_NO_SEQ = 0x5
+SEARCH_ALT_NO_SEQ = 0x6
+ASSERT_PRIMARY = 0x8
+ASSERT_ALTERNATE = 0x9
+REPLACE_PRIMARY = 0xA
+REPLACE_ALTERNATE = 0xB
+UNDEFINE_PRIMARY = 0xC
+UNDEFINE_ALTERNATE = 0xD
+USN_PAGE_SIZE = 0x1000
+USN_REASON_DATA_OVERWRITE = 0x00000001
+USN_REASON_DATA_EXTEND = 0x00000002
+USN_REASON_DATA_TRUNCATION = 0x00000004
+USN_REASON_NAMED_DATA_OVERWRITE = 0x00000010
+USN_REASON_NAMED_DATA_EXTEND = 0x00000020
+USN_REASON_NAMED_DATA_TRUNCATION = 0x00000040
+USN_REASON_FILE_CREATE = 0x00000100
+USN_REASON_FILE_DELETE = 0x00000200
+USN_REASON_EA_CHANGE = 0x00000400
+USN_REASON_SECURITY_CHANGE = 0x00000800
+USN_REASON_RENAME_OLD_NAME = 0x00001000
+USN_REASON_RENAME_NEW_NAME = 0x00002000
+USN_REASON_INDEXABLE_CHANGE = 0x00004000
+USN_REASON_BASIC_INFO_CHANGE = 0x00008000
+USN_REASON_HARD_LINK_CHANGE = 0x00010000
+USN_REASON_COMPRESSION_CHANGE = 0x00020000
+USN_REASON_ENCRYPTION_CHANGE = 0x00040000
+USN_REASON_OBJECT_ID_CHANGE = 0x00080000
+USN_REASON_REPARSE_POINT_CHANGE = 0x00100000
+USN_REASON_STREAM_CHANGE = 0x00200000
+USN_REASON_CLOSE = 0x80000000L
+USN_DELETE_FLAG_DELETE = 0x00000001
+USN_DELETE_FLAG_NOTIFY = 0x00000002
+USN_DELETE_VALID_FLAGS = 0x00000003
+USN_SOURCE_DATA_MANAGEMENT = 0x00000001
+USN_SOURCE_AUXILIARY_DATA = 0x00000002
+USN_SOURCE_REPLICATION_MANAGEMENT = 0x00000004
+
+MARK_HANDLE_PROTECT_CLUSTERS = 1
+MARK_HANDLE_TXF_SYSTEM_LOG = 4
+MARK_HANDLE_NOT_TXF_SYSTEM_LOG = 8
+
+VOLUME_IS_DIRTY = 0x00000001
+VOLUME_UPGRADE_SCHEDULED = 0x00000002
+FILE_PREFETCH_TYPE_FOR_CREATE = 0x1
+FILESYSTEM_STATISTICS_TYPE_NTFS = 1
+FILESYSTEM_STATISTICS_TYPE_FAT = 2
+FILE_SET_ENCRYPTION = 0x00000001
+FILE_CLEAR_ENCRYPTION = 0x00000002
+STREAM_SET_ENCRYPTION = 0x00000003
+STREAM_CLEAR_ENCRYPTION = 0x00000004
+MAXIMUM_ENCRYPTION_VALUE = 0x00000004
+ENCRYPTION_FORMAT_DEFAULT = 0x01
+COMPRESSION_FORMAT_SPARSE = 0x4000
+COPYFILE_SIS_LINK = 0x0001
+COPYFILE_SIS_REPLACE = 0x0002
+COPYFILE_SIS_FLAGS = 0x0003
+
+WMI_DISK_GEOMETRY_GUID = pywintypes.IID("{25007F51-57C2-11D1-A528-00A0C9062910}")
+GUID_DEVINTERFACE_CDROM = pywintypes.IID("{53F56308-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_DEVINTERFACE_FLOPPY = pywintypes.IID("{53F56311-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_DEVINTERFACE_SERENUM_BUS_ENUMERATOR = pywintypes.IID("{4D36E978-E325-11CE-BFC1-08002BE10318}")
+GUID_DEVINTERFACE_COMPORT = pywintypes.IID("{86E0D1E0-8089-11D0-9CE4-08003E301F73}")
+GUID_DEVINTERFACE_DISK = pywintypes.IID("{53F56307-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_DEVINTERFACE_STORAGEPORT = pywintypes.IID("{2ACCFE60-C130-11D2-B082-00A0C91EFB8B}")
+GUID_DEVINTERFACE_CDCHANGER = pywintypes.IID("{53F56312-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_DEVINTERFACE_PARTITION = pywintypes.IID("{53F5630A-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_DEVINTERFACE_VOLUME = pywintypes.IID("{53F5630D-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_DEVINTERFACE_WRITEONCEDISK = pywintypes.IID("{53F5630C-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_DEVINTERFACE_TAPE = pywintypes.IID("{53F5630B-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_DEVINTERFACE_MEDIUMCHANGER = pywintypes.IID("{53F56310-B6BF-11D0-94F2-00A0C91EFB8B}")
+GUID_SERENUM_BUS_ENUMERATOR = GUID_DEVINTERFACE_SERENUM_BUS_ENUMERATOR
+GUID_CLASS_COMPORT = GUID_DEVINTERFACE_COMPORT
+
+DiskClassGuid = GUID_DEVINTERFACE_DISK
+CdRomClassGuid = GUID_DEVINTERFACE_CDROM
+PartitionClassGuid = GUID_DEVINTERFACE_PARTITION
+TapeClassGuid = GUID_DEVINTERFACE_TAPE
+WriteOnceDiskClassGuid = GUID_DEVINTERFACE_WRITEONCEDISK
+VolumeClassGuid = GUID_DEVINTERFACE_VOLUME
+MediumChangerClassGuid = GUID_DEVINTERFACE_MEDIUMCHANGER
+FloppyClassGuid = GUID_DEVINTERFACE_FLOPPY
+CdChangerClassGuid = GUID_DEVINTERFACE_CDCHANGER
+StoragePortClassGuid = GUID_DEVINTERFACE_STORAGEPORT
+
+
+IOCTL_STORAGE_CHECK_VERIFY = CTL_CODE(IOCTL_STORAGE_BASE, 0x0200, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_CHECK_VERIFY2 = CTL_CODE(IOCTL_STORAGE_BASE, 0x0200, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_MEDIA_REMOVAL = CTL_CODE(IOCTL_STORAGE_BASE, 0x0201, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_EJECT_MEDIA = CTL_CODE(IOCTL_STORAGE_BASE, 0x0202, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_LOAD_MEDIA = CTL_CODE(IOCTL_STORAGE_BASE, 0x0203, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_LOAD_MEDIA2 = CTL_CODE(IOCTL_STORAGE_BASE, 0x0203, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_RESERVE = CTL_CODE(IOCTL_STORAGE_BASE, 0x0204, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_RELEASE = CTL_CODE(IOCTL_STORAGE_BASE, 0x0205, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_FIND_NEW_DEVICES = CTL_CODE(IOCTL_STORAGE_BASE, 0x0206, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_EJECTION_CONTROL = CTL_CODE(IOCTL_STORAGE_BASE, 0x0250, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_MCN_CONTROL = CTL_CODE(IOCTL_STORAGE_BASE, 0x0251, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_GET_MEDIA_TYPES = CTL_CODE(IOCTL_STORAGE_BASE, 0x0300, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_GET_MEDIA_TYPES_EX = CTL_CODE(IOCTL_STORAGE_BASE, 0x0301, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_GET_MEDIA_SERIAL_NUMBER = CTL_CODE(IOCTL_STORAGE_BASE, 0x0304, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_GET_HOTPLUG_INFO = CTL_CODE(IOCTL_STORAGE_BASE, 0x0305, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_SET_HOTPLUG_INFO = CTL_CODE(IOCTL_STORAGE_BASE, 0x0306, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_STORAGE_RESET_BUS = CTL_CODE(IOCTL_STORAGE_BASE, 0x0400, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_RESET_DEVICE = CTL_CODE(IOCTL_STORAGE_BASE, 0x0401, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_BREAK_RESERVATION = CTL_CODE(IOCTL_STORAGE_BASE, 0x0405, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_STORAGE_GET_DEVICE_NUMBER = CTL_CODE(IOCTL_STORAGE_BASE, 0x0420, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_STORAGE_PREDICT_FAILURE = CTL_CODE(IOCTL_STORAGE_BASE, 0x0440, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_GET_DRIVE_GEOMETRY = CTL_CODE(IOCTL_DISK_BASE, 0x0000, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_GET_PARTITION_INFO = CTL_CODE(IOCTL_DISK_BASE, 0x0001, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_SET_PARTITION_INFO = CTL_CODE(IOCTL_DISK_BASE, 0x0002, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_GET_DRIVE_LAYOUT = CTL_CODE(IOCTL_DISK_BASE, 0x0003, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_SET_DRIVE_LAYOUT = CTL_CODE(IOCTL_DISK_BASE, 0x0004, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_VERIFY = CTL_CODE(IOCTL_DISK_BASE, 0x0005, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_FORMAT_TRACKS = CTL_CODE(IOCTL_DISK_BASE, 0x0006, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_REASSIGN_BLOCKS = CTL_CODE(IOCTL_DISK_BASE, 0x0007, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_PERFORMANCE = CTL_CODE(IOCTL_DISK_BASE, 0x0008, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_IS_WRITABLE = CTL_CODE(IOCTL_DISK_BASE, 0x0009, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_LOGGING = CTL_CODE(IOCTL_DISK_BASE, 0x000a, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_FORMAT_TRACKS_EX = CTL_CODE(IOCTL_DISK_BASE, 0x000b, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_HISTOGRAM_STRUCTURE = CTL_CODE(IOCTL_DISK_BASE, 0x000c, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_HISTOGRAM_DATA = CTL_CODE(IOCTL_DISK_BASE, 0x000d, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_HISTOGRAM_RESET = CTL_CODE(IOCTL_DISK_BASE, 0x000e, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_REQUEST_STRUCTURE = CTL_CODE(IOCTL_DISK_BASE, 0x000f, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_REQUEST_DATA = CTL_CODE(IOCTL_DISK_BASE, 0x0010, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_PERFORMANCE_OFF = CTL_CODE(IOCTL_DISK_BASE, 0x0018, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_CONTROLLER_NUMBER = CTL_CODE(IOCTL_DISK_BASE, 0x0011, METHOD_BUFFERED, FILE_ANY_ACCESS)
+SMART_GET_VERSION = CTL_CODE(IOCTL_DISK_BASE, 0x0020, METHOD_BUFFERED, FILE_READ_ACCESS)
+SMART_SEND_DRIVE_COMMAND = CTL_CODE(IOCTL_DISK_BASE, 0x0021, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+SMART_RCV_DRIVE_DATA = CTL_CODE(IOCTL_DISK_BASE, 0x0022, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_GET_PARTITION_INFO_EX = CTL_CODE(IOCTL_DISK_BASE, 0x0012, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_SET_PARTITION_INFO_EX = CTL_CODE(IOCTL_DISK_BASE, 0x0013, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_GET_DRIVE_LAYOUT_EX = CTL_CODE(IOCTL_DISK_BASE, 0x0014, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_SET_DRIVE_LAYOUT_EX = CTL_CODE(IOCTL_DISK_BASE, 0x0015, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_CREATE_DISK = CTL_CODE(IOCTL_DISK_BASE, 0x0016, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_GET_LENGTH_INFO = CTL_CODE(IOCTL_DISK_BASE, 0x0017, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_GET_DRIVE_GEOMETRY_EX = CTL_CODE(IOCTL_DISK_BASE, 0x0028, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_REASSIGN_BLOCKS_EX = CTL_CODE(IOCTL_DISK_BASE, 0x0029, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+
+IOCTL_DISK_UPDATE_DRIVE_SIZE = CTL_CODE(IOCTL_DISK_BASE, 0x0032, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_GROW_PARTITION = CTL_CODE(IOCTL_DISK_BASE, 0x0034, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_GET_CACHE_INFORMATION = CTL_CODE(IOCTL_DISK_BASE, 0x0035, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_SET_CACHE_INFORMATION = CTL_CODE(IOCTL_DISK_BASE, 0x0036, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+
+OBSOLETE_IOCTL_STORAGE_RESET_BUS = CTL_CODE(IOCTL_STORAGE_BASE, 0x0400, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+OBSOLETE_IOCTL_STORAGE_RESET_DEVICE = CTL_CODE(IOCTL_STORAGE_BASE, 0x0401, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+## the original define no longer exists in winioctl.h
+OBSOLETE_DISK_GET_WRITE_CACHE_STATE = CTL_CODE(IOCTL_DISK_BASE, 0x0037, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_GET_WRITE_CACHE_STATE=OBSOLETE_DISK_GET_WRITE_CACHE_STATE
+
+
+IOCTL_DISK_DELETE_DRIVE_LAYOUT = CTL_CODE(IOCTL_DISK_BASE, 0x0040, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_UPDATE_PROPERTIES = CTL_CODE(IOCTL_DISK_BASE, 0x0050, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_FORMAT_DRIVE = CTL_CODE(IOCTL_DISK_BASE, 0x00f3, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_DISK_SENSE_DEVICE = CTL_CODE(IOCTL_DISK_BASE, 0x00f8, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_DISK_CHECK_VERIFY = CTL_CODE(IOCTL_DISK_BASE, 0x0200, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_MEDIA_REMOVAL = CTL_CODE(IOCTL_DISK_BASE, 0x0201, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_EJECT_MEDIA = CTL_CODE(IOCTL_DISK_BASE, 0x0202, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_LOAD_MEDIA = CTL_CODE(IOCTL_DISK_BASE, 0x0203, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_RESERVE = CTL_CODE(IOCTL_DISK_BASE, 0x0204, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_RELEASE = CTL_CODE(IOCTL_DISK_BASE, 0x0205, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_FIND_NEW_DEVICES = CTL_CODE(IOCTL_DISK_BASE, 0x0206, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_DISK_GET_MEDIA_TYPES = CTL_CODE(IOCTL_DISK_BASE, 0x0300, METHOD_BUFFERED, FILE_ANY_ACCESS)
+
+DISK_HISTOGRAM_SIZE = 72
+HISTOGRAM_BUCKET_SIZE = 8
+
+IOCTL_CHANGER_GET_PARAMETERS = CTL_CODE(IOCTL_CHANGER_BASE, 0x0000, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_CHANGER_GET_STATUS = CTL_CODE(IOCTL_CHANGER_BASE, 0x0001, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_CHANGER_GET_PRODUCT_DATA = CTL_CODE(IOCTL_CHANGER_BASE, 0x0002, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_CHANGER_SET_ACCESS = CTL_CODE(IOCTL_CHANGER_BASE, 0x0004, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_CHANGER_GET_ELEMENT_STATUS = CTL_CODE(IOCTL_CHANGER_BASE, 0x0005, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_CHANGER_INITIALIZE_ELEMENT_STATUS = CTL_CODE(IOCTL_CHANGER_BASE, 0x0006, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_CHANGER_SET_POSITION = CTL_CODE(IOCTL_CHANGER_BASE, 0x0007, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_CHANGER_EXCHANGE_MEDIUM = CTL_CODE(IOCTL_CHANGER_BASE, 0x0008, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_CHANGER_MOVE_MEDIUM = CTL_CODE(IOCTL_CHANGER_BASE, 0x0009, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_CHANGER_REINITIALIZE_TRANSPORT = CTL_CODE(IOCTL_CHANGER_BASE, 0x000A, METHOD_BUFFERED, FILE_READ_ACCESS)
+IOCTL_CHANGER_QUERY_VOLUME_TAGS = CTL_CODE(IOCTL_CHANGER_BASE, 0x000B, METHOD_BUFFERED, FILE_READ_ACCESS | FILE_WRITE_ACCESS)
+IOCTL_SERIAL_LSRMST_INSERT = CTL_CODE(FILE_DEVICE_SERIAL_PORT,31,METHOD_BUFFERED,FILE_ANY_ACCESS)
+IOCTL_SERENUM_EXPOSE_HARDWARE = CTL_CODE(FILE_DEVICE_SERENUM,128,METHOD_BUFFERED,FILE_ANY_ACCESS)
+IOCTL_SERENUM_REMOVE_HARDWARE = CTL_CODE(FILE_DEVICE_SERENUM,129,METHOD_BUFFERED,FILE_ANY_ACCESS)
+IOCTL_SERENUM_PORT_DESC = CTL_CODE(FILE_DEVICE_SERENUM,130,METHOD_BUFFERED,FILE_ANY_ACCESS)
+IOCTL_SERENUM_GET_PORT_NAME = CTL_CODE(FILE_DEVICE_SERENUM,131,METHOD_BUFFERED,FILE_ANY_ACCESS)
+SERIAL_LSRMST_ESCAPE = 0x00
+SERIAL_LSRMST_LSR_DATA = 0x01
+SERIAL_LSRMST_LSR_NODATA = 0x02
+SERIAL_LSRMST_MST = 0x03
+SERIAL_IOC_FCR_FIFO_ENABLE = 0x00000001
+SERIAL_IOC_FCR_RCVR_RESET = 0x00000002
+SERIAL_IOC_FCR_XMIT_RESET = 0x00000004
+SERIAL_IOC_FCR_DMA_MODE = 0x00000008
+SERIAL_IOC_FCR_RES1 = 0x00000010
+SERIAL_IOC_FCR_RES2 = 0x00000020
+SERIAL_IOC_FCR_RCVR_TRIGGER_LSB = 0x00000040
+SERIAL_IOC_FCR_RCVR_TRIGGER_MSB = 0x00000080
+SERIAL_IOC_MCR_DTR = 0x00000001
+SERIAL_IOC_MCR_RTS = 0x00000002
+SERIAL_IOC_MCR_OUT1 = 0x00000004
+SERIAL_IOC_MCR_OUT2 = 0x00000008
+SERIAL_IOC_MCR_LOOP = 0x00000010
+FSCTL_REQUEST_OPLOCK_LEVEL_1 = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  0, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_REQUEST_OPLOCK_LEVEL_2 = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  1, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_REQUEST_BATCH_OPLOCK = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  2, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_OPLOCK_BREAK_ACKNOWLEDGE = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  3, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_OPBATCH_ACK_CLOSE_PENDING = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  4, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_OPLOCK_BREAK_NOTIFY = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  5, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_LOCK_VOLUME = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  6, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_UNLOCK_VOLUME = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  7, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_DISMOUNT_VOLUME = CTL_CODE(FILE_DEVICE_FILE_SYSTEM,  8, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_IS_VOLUME_MOUNTED = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 10, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_IS_PATHNAME_VALID = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 11, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_MARK_VOLUME_DIRTY = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 12, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_QUERY_RETRIEVAL_POINTERS = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 14,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_GET_COMPRESSION = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 15, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_SET_COMPRESSION = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 16, METHOD_BUFFERED, FILE_READ_DATA | FILE_WRITE_DATA)
+FSCTL_MARK_AS_SYSTEM_HIVE = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 19,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_OPLOCK_BREAK_ACK_NO_2 = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 20, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_INVALIDATE_VOLUMES = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 21, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_QUERY_FAT_BPB = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 22, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_REQUEST_FILTER_OPLOCK = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 23, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_FILESYSTEM_GET_STATISTICS = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 24, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_GET_NTFS_VOLUME_DATA = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 25, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_GET_NTFS_FILE_RECORD = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 26, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_GET_VOLUME_BITMAP = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 27,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_GET_RETRIEVAL_POINTERS = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 28,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_MOVE_FILE = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 29, METHOD_BUFFERED, FILE_SPECIAL_ACCESS)
+FSCTL_IS_VOLUME_DIRTY = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 30, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_ALLOW_EXTENDED_DASD_IO = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 32, METHOD_NEITHER,  FILE_ANY_ACCESS)
+FSCTL_FIND_FILES_BY_SID = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 35, METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_SET_OBJECT_ID = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 38, METHOD_BUFFERED, FILE_SPECIAL_ACCESS)
+FSCTL_GET_OBJECT_ID = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 39, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_DELETE_OBJECT_ID = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 40, METHOD_BUFFERED, FILE_SPECIAL_ACCESS)
+FSCTL_SET_REPARSE_POINT = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 41, METHOD_BUFFERED, FILE_SPECIAL_ACCESS)
+FSCTL_GET_REPARSE_POINT = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 42, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_DELETE_REPARSE_POINT = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 43, METHOD_BUFFERED, FILE_SPECIAL_ACCESS)
+FSCTL_ENUM_USN_DATA = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 44,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_SECURITY_ID_CHECK = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 45,  METHOD_NEITHER, FILE_READ_DATA)
+FSCTL_READ_USN_JOURNAL = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 46,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_SET_OBJECT_ID_EXTENDED = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 47, METHOD_BUFFERED, FILE_SPECIAL_ACCESS)
+FSCTL_CREATE_OR_GET_OBJECT_ID = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 48, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_SET_SPARSE = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 49, METHOD_BUFFERED, FILE_SPECIAL_ACCESS)
+FSCTL_SET_ZERO_DATA = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 50, METHOD_BUFFERED, FILE_WRITE_DATA)
+FSCTL_QUERY_ALLOCATED_RANGES = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 51,  METHOD_NEITHER, FILE_READ_DATA)
+FSCTL_SET_ENCRYPTION = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 53,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_ENCRYPTION_FSCTL_IO = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 54,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_WRITE_RAW_ENCRYPTED = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 55,  METHOD_NEITHER, FILE_SPECIAL_ACCESS)
+FSCTL_READ_RAW_ENCRYPTED = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 56,  METHOD_NEITHER, FILE_SPECIAL_ACCESS)
+FSCTL_CREATE_USN_JOURNAL = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 57,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_READ_FILE_USN_DATA = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 58,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_WRITE_USN_CLOSE_RECORD = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 59,  METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_EXTEND_VOLUME = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 60, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_QUERY_USN_JOURNAL = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 61, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_DELETE_USN_JOURNAL = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 62, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_MARK_HANDLE = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 63, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_SIS_COPYFILE = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 64, METHOD_BUFFERED, FILE_ANY_ACCESS)
+FSCTL_SIS_LINK_FILES = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 65, METHOD_BUFFERED, FILE_READ_DATA | FILE_WRITE_DATA)
+FSCTL_HSM_MSG = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 66, METHOD_BUFFERED, FILE_READ_DATA | FILE_WRITE_DATA)
+FSCTL_HSM_DATA = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 68, METHOD_NEITHER, FILE_READ_DATA | FILE_WRITE_DATA)
+FSCTL_RECALL_FILE = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 69, METHOD_NEITHER, FILE_ANY_ACCESS)
+FSCTL_READ_FROM_PLEX = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 71, METHOD_OUT_DIRECT, FILE_READ_DATA)
+FSCTL_FILE_PREFETCH = CTL_CODE(FILE_DEVICE_FILE_SYSTEM, 72, METHOD_BUFFERED, FILE_SPECIAL_ACCESS)
+IOCTL_VOLUME_BASE =  ord('V')
+IOCTL_VOLUME_GET_VOLUME_DISK_EXTENTS = CTL_CODE(IOCTL_VOLUME_BASE, 0, METHOD_BUFFERED, FILE_ANY_ACCESS)
+IOCTL_VOLUME_IS_CLUSTERED = CTL_CODE(IOCTL_VOLUME_BASE, 12, METHOD_BUFFERED, FILE_ANY_ACCESS)
+
+## enums
+## STORAGE_MEDIA_TYPE
+DDS_4mm = 32
+MiniQic = 33
+Travan = 34
+QIC = 35
+MP_8mm = 36
+AME_8mm = 37
+AIT1_8mm = 38
+DLT = 39
+NCTP = 40
+IBM_3480 = 41
+IBM_3490E = 42
+IBM_Magstar_3590 = 43
+IBM_Magstar_MP = 44
+STK_DATA_D3 = 45
+SONY_DTF = 46
+DV_6mm = 47
+DMI = 48
+SONY_D2 = 49
+CLEANER_CARTRIDGE = 50
+CD_ROM = 51
+CD_R = 52
+CD_RW = 53
+DVD_ROM = 54
+DVD_R = 55
+DVD_RW = 56
+MO_3_RW = 57
+MO_5_WO = 58
+MO_5_RW = 59
+MO_5_LIMDOW = 60
+PC_5_WO = 61
+PC_5_RW = 62
+PD_5_RW = 63
+ABL_5_WO = 64
+PINNACLE_APEX_5_RW = 65
+SONY_12_WO = 66
+PHILIPS_12_WO = 67
+HITACHI_12_WO = 68
+CYGNET_12_WO = 69
+KODAK_14_WO = 70
+MO_NFR_525 = 71
+NIKON_12_RW = 72
+IOMEGA_ZIP = 73
+IOMEGA_JAZ = 74
+SYQUEST_EZ135 = 75
+SYQUEST_EZFLYER = 76
+SYQUEST_SYJET = 77
+AVATAR_F2 = 78
+MP2_8mm = 79
+DST_S = 80
+DST_M = 81
+DST_L = 82
+VXATape_1 = 83
+VXATape_2 = 84
+STK_9840 = 85
+LTO_Ultrium = 86
+LTO_Accelis = 87
+DVD_RAM = 88
+AIT_8mm = 89
+ADR_1 = 90
+ADR_2 = 91
+STK_9940 = 92
+
+## STORAGE_BUS_TYPE
+BusTypeUnknown = 0
+BusTypeScsi = 1
+BusTypeAtapi = 2
+BusTypeAta = 3
+BusType1394 = 4
+BusTypeSsa = 5
+BusTypeFibre = 6
+BusTypeUsb = 7
+BusTypeRAID = 8
+BusTypeiScsi = 9
+BusTypeSas = 10
+BusTypeSata = 11
+BusTypeMaxReserved = 127
+
+## MEDIA_TYPE
+Unknown = 0
+F5_1Pt2_512 = 1
+F3_1Pt44_512 = 2
+F3_2Pt88_512 = 3
+F3_20Pt8_512 = 4
+F3_720_512 = 5
+F5_360_512 = 6
+F5_320_512 = 7
+F5_320_1024 = 8
+F5_180_512 = 9
+F5_160_512 = 10
+RemovableMedia = 11
+FixedMedia = 12
+F3_120M_512 = 13
+F3_640_512 = 14
+F5_640_512 = 15
+F5_720_512 = 16
+F3_1Pt2_512 = 17
+F3_1Pt23_1024 = 18
+F5_1Pt23_1024 = 19
+F3_128Mb_512 = 20
+F3_230Mb_512 = 21
+F8_256_128 = 22
+F3_200Mb_512 = 23
+F3_240M_512 = 24
+F3_32M_512 = 25
+
+## PARTITION_STYLE
+PARTITION_STYLE_MBR = 0
+PARTITION_STYLE_GPT = 1
+PARTITION_STYLE_RAW = 2
+
+## DETECTION_TYPE
+DetectNone = 0
+DetectInt13 = 1
+DetectExInt13 = 2
+
+## DISK_CACHE_RETENTION_PRIORITY
+EqualPriority = 0
+KeepPrefetchedData = 1
+KeepReadData = 2
+
+## DISK_WRITE_CACHE_STATE - ?????? this enum has disappeared from winioctl.h in windows 2003 SP1 sdk ??????
+DiskWriteCacheNormal = 0
+DiskWriteCacheForceDisable = 1
+DiskWriteCacheDisableNotSupported = 2
+
+## BIN_TYPES
+RequestSize = 0
+RequestLocation = 1
+
+## CHANGER_DEVICE_PROBLEM_TYPE
+DeviceProblemNone = 0
+DeviceProblemHardware = 1
+DeviceProblemCHMError = 2
+DeviceProblemDoorOpen = 3
+DeviceProblemCalibrationError = 4
+DeviceProblemTargetFailure = 5
+DeviceProblemCHMMoveError = 6
+DeviceProblemCHMZeroError = 7
+DeviceProblemCartridgeInsertError = 8
+DeviceProblemPositionError = 9
+DeviceProblemSensorError = 10
+DeviceProblemCartridgeEjectError = 11
+DeviceProblemGripperError = 12
+DeviceProblemDriveError = 13
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winnt.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winnt.py
new file mode 100644
index 0000000..f3e7ef4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winnt.py
@@ -0,0 +1,1098 @@
+# Generated by h2py from \mssdk\include\winnt.h
+
+APPLICATION_ERROR_MASK = 536870912
+ERROR_SEVERITY_SUCCESS = 0
+ERROR_SEVERITY_INFORMATIONAL = 1073741824
+ERROR_SEVERITY_WARNING = -2147483648
+ERROR_SEVERITY_ERROR = -1073741824
+MINCHAR = 128
+MAXCHAR = 127
+MINSHORT = 32768
+MAXSHORT = 32767
+MINLONG = -2147483648
+MAXLONG = 2147483647
+MAXBYTE = 255
+MAXWORD = 65535
+MAXDWORD = -1
+LANG_NEUTRAL = 0
+LANG_AFRIKAANS = 54
+LANG_ALBANIAN = 28
+LANG_ARABIC = 1
+LANG_BASQUE = 45
+LANG_BELARUSIAN = 35
+LANG_BULGARIAN = 2
+LANG_CATALAN = 3
+LANG_CHINESE = 4
+LANG_CROATIAN = 26
+LANG_CZECH = 5
+LANG_DANISH = 6
+LANG_DUTCH = 19
+LANG_ENGLISH = 9
+LANG_ESTONIAN = 37
+LANG_FAEROESE = 56
+LANG_FARSI = 41
+LANG_FINNISH = 11
+LANG_FRENCH = 12
+LANG_GERMAN = 7
+LANG_GREEK = 8
+LANG_HEBREW = 13
+LANG_HINDI = 57
+LANG_HUNGARIAN = 14
+LANG_ICELANDIC = 15
+LANG_INDONESIAN = 33
+LANG_ITALIAN = 16
+LANG_JAPANESE = 17
+LANG_KOREAN = 18
+LANG_LATVIAN = 38
+LANG_LITHUANIAN = 39
+LANG_MACEDONIAN = 47
+LANG_MALAY = 62
+LANG_NORWEGIAN = 20
+LANG_POLISH = 21
+LANG_PORTUGUESE = 22
+LANG_ROMANIAN = 24
+LANG_RUSSIAN = 25
+LANG_SERBIAN = 26
+LANG_SLOVAK = 27
+LANG_SLOVENIAN = 36
+LANG_SPANISH = 10
+LANG_SWAHILI = 65
+LANG_SWEDISH = 29
+LANG_THAI = 30
+LANG_TURKISH = 31
+LANG_UKRAINIAN = 34
+LANG_VIETNAMESE = 42
+SUBLANG_NEUTRAL = 0
+SUBLANG_DEFAULT = 1
+SUBLANG_SYS_DEFAULT = 2
+SUBLANG_ARABIC_SAUDI_ARABIA = 1
+SUBLANG_ARABIC_IRAQ = 2
+SUBLANG_ARABIC_EGYPT = 3
+SUBLANG_ARABIC_LIBYA = 4
+SUBLANG_ARABIC_ALGERIA = 5
+SUBLANG_ARABIC_MOROCCO = 6
+SUBLANG_ARABIC_TUNISIA = 7
+SUBLANG_ARABIC_OMAN = 8
+SUBLANG_ARABIC_YEMEN = 9
+SUBLANG_ARABIC_SYRIA = 10
+SUBLANG_ARABIC_JORDAN = 11
+SUBLANG_ARABIC_LEBANON = 12
+SUBLANG_ARABIC_KUWAIT = 13
+SUBLANG_ARABIC_UAE = 14
+SUBLANG_ARABIC_BAHRAIN = 15
+SUBLANG_ARABIC_QATAR = 16
+SUBLANG_CHINESE_TRADITIONAL = 1
+SUBLANG_CHINESE_SIMPLIFIED = 2
+SUBLANG_CHINESE_HONGKONG = 3
+SUBLANG_CHINESE_SINGAPORE = 4
+SUBLANG_CHINESE_MACAU = 5
+SUBLANG_DUTCH = 1
+SUBLANG_DUTCH_BELGIAN = 2
+SUBLANG_ENGLISH_US = 1
+SUBLANG_ENGLISH_UK = 2
+SUBLANG_ENGLISH_AUS = 3
+SUBLANG_ENGLISH_CAN = 4
+SUBLANG_ENGLISH_NZ = 5
+SUBLANG_ENGLISH_EIRE = 6
+SUBLANG_ENGLISH_SOUTH_AFRICA = 7
+SUBLANG_ENGLISH_JAMAICA = 8
+SUBLANG_ENGLISH_CARIBBEAN = 9
+SUBLANG_ENGLISH_BELIZE = 10
+SUBLANG_ENGLISH_TRINIDAD = 11
+SUBLANG_ENGLISH_ZIMBABWE = 12
+SUBLANG_ENGLISH_PHILIPPINES = 13
+SUBLANG_FRENCH = 1
+SUBLANG_FRENCH_BELGIAN = 2
+SUBLANG_FRENCH_CANADIAN = 3
+SUBLANG_FRENCH_SWISS = 4
+SUBLANG_FRENCH_LUXEMBOURG = 5
+SUBLANG_FRENCH_MONACO = 6
+SUBLANG_GERMAN = 1
+SUBLANG_GERMAN_SWISS = 2
+SUBLANG_GERMAN_AUSTRIAN = 3
+SUBLANG_GERMAN_LUXEMBOURG = 4
+SUBLANG_GERMAN_LIECHTENSTEIN = 5
+SUBLANG_ITALIAN = 1
+SUBLANG_ITALIAN_SWISS = 2
+SUBLANG_KOREAN = 1
+SUBLANG_KOREAN_JOHAB = 2
+SUBLANG_LITHUANIAN = 1
+SUBLANG_LITHUANIAN_CLASSIC = 2
+SUBLANG_MALAY_MALAYSIA = 1
+SUBLANG_MALAY_BRUNEI_DARUSSALAM = 2
+SUBLANG_NORWEGIAN_BOKMAL = 1
+SUBLANG_NORWEGIAN_NYNORSK = 2
+SUBLANG_PORTUGUESE = 2
+SUBLANG_PORTUGUESE_BRAZILIAN = 1
+SUBLANG_SERBIAN_LATIN = 2
+SUBLANG_SERBIAN_CYRILLIC = 3
+SUBLANG_SPANISH = 1
+SUBLANG_SPANISH_MEXICAN = 2
+SUBLANG_SPANISH_MODERN = 3
+SUBLANG_SPANISH_GUATEMALA = 4
+SUBLANG_SPANISH_COSTA_RICA = 5
+SUBLANG_SPANISH_PANAMA = 6
+SUBLANG_SPANISH_DOMINICAN_REPUBLIC = 7
+SUBLANG_SPANISH_VENEZUELA = 8
+SUBLANG_SPANISH_COLOMBIA = 9
+SUBLANG_SPANISH_PERU = 10
+SUBLANG_SPANISH_ARGENTINA = 11
+SUBLANG_SPANISH_ECUADOR = 12
+SUBLANG_SPANISH_CHILE = 13
+SUBLANG_SPANISH_URUGUAY = 14
+SUBLANG_SPANISH_PARAGUAY = 15
+SUBLANG_SPANISH_BOLIVIA = 16
+SUBLANG_SPANISH_EL_SALVADOR = 17
+SUBLANG_SPANISH_HONDURAS = 18
+SUBLANG_SPANISH_NICARAGUA = 19
+SUBLANG_SPANISH_PUERTO_RICO = 20
+SUBLANG_SWEDISH = 1
+SUBLANG_SWEDISH_FINLAND = 2
+SORT_DEFAULT = 0
+SORT_JAPANESE_XJIS = 0
+SORT_JAPANESE_UNICODE = 1
+SORT_CHINESE_BIG5 = 0
+SORT_CHINESE_PRCP = 0
+SORT_CHINESE_UNICODE = 1
+SORT_CHINESE_PRC = 2
+SORT_KOREAN_KSC = 0
+SORT_KOREAN_UNICODE = 1
+SORT_GERMAN_PHONE_BOOK = 1
+def PRIMARYLANGID(lgid): return ((WORD  )(lgid) & 1023)
+
+def SUBLANGID(lgid): return ((WORD  )(lgid) >> 10)
+
+NLS_VALID_LOCALE_MASK = 1048575
+def LANGIDFROMLCID(lcid): return ((WORD  )(lcid))
+
+def SORTIDFROMLCID(lcid): return ((WORD  )((((DWORD)(lcid)) & NLS_VALID_LOCALE_MASK) >> 16))
+
+def UNREFERENCED_PARAMETER(P): return (P)
+
+def DBG_UNREFERENCED_PARAMETER(P): return (P)
+
+def DBG_UNREFERENCED_LOCAL_VARIABLE(V): return (V)
+
+def UNREFERENCED_PARAMETER(P): return \
+
+def DBG_UNREFERENCED_PARAMETER(P): return \
+
+def DBG_UNREFERENCED_LOCAL_VARIABLE(V): return \
+
+MAXIMUM_WAIT_OBJECTS = 64
+MAXIMUM_SUSPEND_COUNT = MAXCHAR
+
+EXCEPTION_NONCONTINUABLE = 1
+EXCEPTION_MAXIMUM_PARAMETERS = 15
+PROCESS_TERMINATE = (1)
+PROCESS_CREATE_THREAD = (2)
+PROCESS_VM_OPERATION = (8)
+PROCESS_VM_READ = (16)
+PROCESS_VM_WRITE = (32)
+PROCESS_DUP_HANDLE = (64)
+PROCESS_CREATE_PROCESS = (128)
+PROCESS_SET_QUOTA = (256)
+PROCESS_SET_INFORMATION = (512)
+PROCESS_QUERY_INFORMATION = (1024)
+MAXIMUM_PROCESSORS = 32
+THREAD_TERMINATE = (1)
+THREAD_SUSPEND_RESUME = (2)
+THREAD_GET_CONTEXT = (8)
+THREAD_SET_CONTEXT = (16)
+THREAD_SET_INFORMATION = (32)
+THREAD_QUERY_INFORMATION = (64)
+THREAD_SET_THREAD_TOKEN = (128)
+THREAD_IMPERSONATE = (256)
+THREAD_DIRECT_IMPERSONATION = (512)
+JOB_OBJECT_ASSIGN_PROCESS = (1)
+JOB_OBJECT_SET_ATTRIBUTES = (2)
+JOB_OBJECT_QUERY = (4)
+JOB_OBJECT_TERMINATE = (8)
+TLS_MINIMUM_AVAILABLE = 64
+THREAD_BASE_PRIORITY_LOWRT = 15
+THREAD_BASE_PRIORITY_MAX = 2
+THREAD_BASE_PRIORITY_MIN = -2
+THREAD_BASE_PRIORITY_IDLE = -15
+JOB_OBJECT_LIMIT_WORKINGSET = 1
+JOB_OBJECT_LIMIT_PROCESS_TIME = 2
+JOB_OBJECT_LIMIT_JOB_TIME = 4
+JOB_OBJECT_LIMIT_ACTIVE_PROCESS = 8
+JOB_OBJECT_LIMIT_AFFINITY = 16
+JOB_OBJECT_LIMIT_PRIORITY_CLASS = 32
+JOB_OBJECT_LIMIT_VALID_FLAGS = 63
+EVENT_MODIFY_STATE = 2
+MUTANT_QUERY_STATE = 1
+SEMAPHORE_MODIFY_STATE = 2
+TIME_ZONE_ID_UNKNOWN = 0
+TIME_ZONE_ID_STANDARD = 1
+TIME_ZONE_ID_DAYLIGHT = 2
+PROCESSOR_INTEL_386 = 386
+PROCESSOR_INTEL_486 = 486
+PROCESSOR_INTEL_PENTIUM = 586
+PROCESSOR_MIPS_R4000 = 4000
+PROCESSOR_ALPHA_21064 = 21064
+PROCESSOR_HITACHI_SH3 = 10003
+PROCESSOR_HITACHI_SH3E = 10004
+PROCESSOR_HITACHI_SH4 = 10005
+PROCESSOR_MOTOROLA_821 = 821
+PROCESSOR_ARM_7TDMI = 70001
+PROCESSOR_ARCHITECTURE_INTEL = 0
+PROCESSOR_ARCHITECTURE_MIPS = 1
+PROCESSOR_ARCHITECTURE_ALPHA = 2
+PROCESSOR_ARCHITECTURE_PPC = 3
+PROCESSOR_ARCHITECTURE_SH = 4
+PROCESSOR_ARCHITECTURE_ARM = 5
+PROCESSOR_ARCHITECTURE_UNKNOWN = 65535
+PF_FLOATING_POINT_PRECISION_ERRATA = 0
+PF_FLOATING_POINT_EMULATED = 1
+PF_COMPARE_EXCHANGE_DOUBLE = 2
+PF_MMX_INSTRUCTIONS_AVAILABLE = 3
+PF_PPC_MOVEMEM_64BIT_OK = 4
+PF_ALPHA_BYTE_INSTRUCTIONS = 5
+SECTION_QUERY = 1
+SECTION_MAP_WRITE = 2
+SECTION_MAP_READ = 4
+SECTION_MAP_EXECUTE = 8
+SECTION_EXTEND_SIZE = 16
+PAGE_NOACCESS = 1
+PAGE_READONLY = 2
+PAGE_READWRITE = 4
+PAGE_WRITECOPY = 8
+PAGE_EXECUTE = 16
+PAGE_EXECUTE_READ = 32
+PAGE_EXECUTE_READWRITE = 64
+PAGE_EXECUTE_WRITECOPY = 128
+PAGE_GUARD = 256
+PAGE_NOCACHE = 512
+MEM_COMMIT = 4096
+MEM_RESERVE = 8192
+MEM_DECOMMIT = 16384
+MEM_RELEASE = 32768
+MEM_FREE = 65536
+MEM_PRIVATE = 131072
+MEM_MAPPED = 262144
+MEM_RESET = 524288
+MEM_TOP_DOWN = 1048576
+MEM_4MB_PAGES = -2147483648
+SEC_FILE = 8388608
+SEC_IMAGE = 16777216
+SEC_VLM = 33554432
+SEC_RESERVE = 67108864
+SEC_COMMIT = 134217728
+SEC_NOCACHE = 268435456
+MEM_IMAGE = SEC_IMAGE
+FILE_READ_DATA = ( 1 )
+FILE_LIST_DIRECTORY = ( 1 )
+FILE_WRITE_DATA = ( 2 )
+FILE_ADD_FILE = ( 2 )
+FILE_APPEND_DATA = ( 4 )
+FILE_ADD_SUBDIRECTORY = ( 4 )
+FILE_CREATE_PIPE_INSTANCE = ( 4 )
+FILE_READ_EA = ( 8 )
+FILE_WRITE_EA = ( 16 )
+FILE_EXECUTE = ( 32 )
+FILE_TRAVERSE = ( 32 )
+FILE_DELETE_CHILD = ( 64 )
+FILE_READ_ATTRIBUTES = ( 128 )
+FILE_WRITE_ATTRIBUTES = ( 256 )
+FILE_SHARE_READ = 1
+FILE_SHARE_WRITE = 2
+FILE_SHARE_DELETE = 4
+FILE_ATTRIBUTE_READONLY = 1
+FILE_ATTRIBUTE_HIDDEN = 2
+FILE_ATTRIBUTE_SYSTEM = 4
+FILE_ATTRIBUTE_DIRECTORY = 16
+FILE_ATTRIBUTE_ARCHIVE = 32
+FILE_ATTRIBUTE_ENCRYPTED = 64
+FILE_ATTRIBUTE_NORMAL = 128
+FILE_ATTRIBUTE_TEMPORARY = 256
+FILE_ATTRIBUTE_SPARSE_FILE = 512
+FILE_ATTRIBUTE_REPARSE_POINT = 1024
+FILE_ATTRIBUTE_COMPRESSED = 2048
+FILE_ATTRIBUTE_OFFLINE = 4096
+FILE_NOTIFY_CHANGE_FILE_NAME = 1
+FILE_NOTIFY_CHANGE_DIR_NAME = 2
+FILE_NOTIFY_CHANGE_ATTRIBUTES = 4
+FILE_NOTIFY_CHANGE_SIZE = 8
+FILE_NOTIFY_CHANGE_LAST_WRITE = 16
+FILE_NOTIFY_CHANGE_LAST_ACCESS = 32
+FILE_NOTIFY_CHANGE_CREATION = 64
+FILE_NOTIFY_CHANGE_SECURITY = 256
+FILE_ACTION_ADDED = 1
+FILE_ACTION_REMOVED = 2
+FILE_ACTION_MODIFIED = 3
+FILE_ACTION_RENAMED_OLD_NAME = 4
+FILE_ACTION_RENAMED_NEW_NAME = 5
+FILE_CASE_SENSITIVE_SEARCH = 1
+FILE_CASE_PRESERVED_NAMES = 2
+FILE_UNICODE_ON_DISK = 4
+FILE_PERSISTENT_ACLS = 8
+FILE_FILE_COMPRESSION = 16
+FILE_VOLUME_QUOTAS = 32
+FILE_SUPPORTS_SPARSE_FILES = 64
+FILE_SUPPORTS_REPARSE_POINTS = 128
+FILE_SUPPORTS_REMOTE_STORAGE = 256
+FILE_VOLUME_IS_COMPRESSED = 32768
+FILE_SUPPORTS_OBJECT_IDS = 65536
+FILE_SUPPORTS_ENCRYPTION = 131072
+
+MAXIMUM_REPARSE_DATA_BUFFER_SIZE = ( 16 * 1024 )
+IO_REPARSE_TAG_RESERVED_ZERO = (0)
+IO_REPARSE_TAG_RESERVED_ONE = (1)
+IO_REPARSE_TAG_SYMBOLIC_LINK = (2)
+IO_REPARSE_TAG_NSS = (5)
+IO_REPARSE_TAG_FILTER_MANAGER = -2147483637
+IO_REPARSE_TAG_DFS = -2147483638
+IO_REPARSE_TAG_SIS = -2147483641
+IO_REPARSE_TAG_MOUNT_POINT = -1610612733
+IO_REPARSE_TAG_HSM = -1073741820
+IO_REPARSE_TAG_NSSRECOVER = (8)
+IO_REPARSE_TAG_RESERVED_MS_RANGE = (256)
+IO_REPARSE_TAG_RESERVED_RANGE = IO_REPARSE_TAG_RESERVED_ONE
+IO_COMPLETION_MODIFY_STATE = 2
+
+DUPLICATE_CLOSE_SOURCE = 1
+DUPLICATE_SAME_ACCESS = 2
+DELETE = (65536)
+READ_CONTROL = (131072)
+WRITE_DAC = (262144)
+WRITE_OWNER = (524288)
+SYNCHRONIZE = (1048576)
+STANDARD_RIGHTS_REQUIRED = (983040)
+STANDARD_RIGHTS_READ = (READ_CONTROL)
+STANDARD_RIGHTS_WRITE = (READ_CONTROL)
+STANDARD_RIGHTS_EXECUTE = (READ_CONTROL)
+STANDARD_RIGHTS_ALL = (2031616)
+SPECIFIC_RIGHTS_ALL = (65535)
+IO_COMPLETION_ALL_ACCESS = STANDARD_RIGHTS_REQUIRED|SYNCHRONIZE|0x3
+ACCESS_SYSTEM_SECURITY = (16777216)
+MAXIMUM_ALLOWED = (33554432)
+GENERIC_READ = (-2147483648)
+GENERIC_WRITE = (1073741824)
+GENERIC_EXECUTE = (536870912)
+GENERIC_ALL = (268435456)
+
+# Included from pshpack4.h
+
+# Included from poppack.h
+SID_REVISION = (1)
+SID_MAX_SUB_AUTHORITIES = (15)
+SID_RECOMMENDED_SUB_AUTHORITIES = (1)
+
+SidTypeUser = 1
+SidTypeGroup = 2
+SidTypeDomain =3
+SidTypeAlias = 4
+SidTypeWellKnownGroup = 5
+SidTypeDeletedAccount = 6
+SidTypeInvalid = 7
+SidTypeUnknown = 8
+
+SECURITY_NULL_RID = (0)
+SECURITY_WORLD_RID = (0)
+SECURITY_LOCAL_RID = (0X00000000)
+SECURITY_CREATOR_OWNER_RID = (0)
+SECURITY_CREATOR_GROUP_RID = (1)
+SECURITY_CREATOR_OWNER_SERVER_RID = (2)
+SECURITY_CREATOR_GROUP_SERVER_RID = (3)
+SECURITY_DIALUP_RID = (1)
+SECURITY_NETWORK_RID = (2)
+SECURITY_BATCH_RID = (3)
+SECURITY_INTERACTIVE_RID = (4)
+SECURITY_SERVICE_RID = (6)
+SECURITY_ANONYMOUS_LOGON_RID = (7)
+SECURITY_PROXY_RID = (8)
+SECURITY_SERVER_LOGON_RID = (9)
+SECURITY_PRINCIPAL_SELF_RID = (10)
+SECURITY_AUTHENTICATED_USER_RID = (11)
+SECURITY_LOGON_IDS_RID = (5)
+SECURITY_LOGON_IDS_RID_COUNT = (3)
+SECURITY_LOCAL_SYSTEM_RID = (18)
+SECURITY_NT_NON_UNIQUE = (21)
+SECURITY_BUILTIN_DOMAIN_RID = (32)
+DOMAIN_USER_RID_ADMIN = (500)
+DOMAIN_USER_RID_GUEST = (501)
+DOMAIN_GROUP_RID_ADMINS = (512)
+DOMAIN_GROUP_RID_USERS = (513)
+DOMAIN_GROUP_RID_GUESTS = (514)
+DOMAIN_ALIAS_RID_ADMINS = (544)
+DOMAIN_ALIAS_RID_USERS = (545)
+DOMAIN_ALIAS_RID_GUESTS = (546)
+DOMAIN_ALIAS_RID_POWER_USERS = (547)
+DOMAIN_ALIAS_RID_ACCOUNT_OPS = (548)
+DOMAIN_ALIAS_RID_SYSTEM_OPS = (549)
+DOMAIN_ALIAS_RID_PRINT_OPS = (550)
+DOMAIN_ALIAS_RID_BACKUP_OPS = (551)
+DOMAIN_ALIAS_RID_REPLICATOR = (552)
+SE_GROUP_MANDATORY = (1)
+SE_GROUP_ENABLED_BY_DEFAULT = (2)
+SE_GROUP_ENABLED = (4)
+SE_GROUP_OWNER = (8)
+SE_GROUP_LOGON_ID = (-1073741824)
+ACL_REVISION = (2)
+ACL_REVISION_DS = (4)
+ACL_REVISION1 = (1)
+ACL_REVISION2 = (2)
+ACL_REVISION3 = (3)
+ACL_REVISION4 = (4)
+MAX_ACL_REVISION = ACL_REVISION4
+ACCESS_MIN_MS_ACE_TYPE = (0)
+ACCESS_ALLOWED_ACE_TYPE = (0)
+ACCESS_DENIED_ACE_TYPE = (1)
+SYSTEM_AUDIT_ACE_TYPE = (2)
+SYSTEM_ALARM_ACE_TYPE = (3)
+ACCESS_MAX_MS_V2_ACE_TYPE = (3)
+ACCESS_ALLOWED_COMPOUND_ACE_TYPE = (4)
+ACCESS_MAX_MS_V3_ACE_TYPE = (4)
+ACCESS_MIN_MS_OBJECT_ACE_TYPE = (5)
+ACCESS_ALLOWED_OBJECT_ACE_TYPE = (5)
+ACCESS_DENIED_OBJECT_ACE_TYPE = (6)
+SYSTEM_AUDIT_OBJECT_ACE_TYPE = (7)
+SYSTEM_ALARM_OBJECT_ACE_TYPE = (8)
+ACCESS_MAX_MS_OBJECT_ACE_TYPE = (8)
+ACCESS_MAX_MS_V4_ACE_TYPE = (8)
+ACCESS_MAX_MS_ACE_TYPE = (8)
+OBJECT_INHERIT_ACE = (1)
+CONTAINER_INHERIT_ACE = (2)
+NO_PROPAGATE_INHERIT_ACE = (4)
+INHERIT_ONLY_ACE = (8)
+INHERITED_ACE = (16)
+VALID_INHERIT_FLAGS = (31)
+SUCCESSFUL_ACCESS_ACE_FLAG = (64)
+FAILED_ACCESS_ACE_FLAG = (128)
+ACE_OBJECT_TYPE_PRESENT = 1
+ACE_INHERITED_OBJECT_TYPE_PRESENT = 2
+SECURITY_DESCRIPTOR_REVISION = (1)
+SECURITY_DESCRIPTOR_REVISION1 = (1)
+SECURITY_DESCRIPTOR_MIN_LENGTH = (20)
+SE_OWNER_DEFAULTED = (1)
+SE_GROUP_DEFAULTED = (2)
+SE_DACL_PRESENT = (4)
+SE_DACL_DEFAULTED = (8)
+SE_SACL_PRESENT = (16)
+SE_SACL_DEFAULTED = (32)
+SE_DACL_AUTO_INHERIT_REQ = (256)
+SE_SACL_AUTO_INHERIT_REQ = (512)
+SE_DACL_AUTO_INHERITED = (1024)
+SE_SACL_AUTO_INHERITED = (2048)
+SE_DACL_PROTECTED = (4096)
+SE_SACL_PROTECTED = (8192)
+SE_SELF_RELATIVE = (32768)
+ACCESS_OBJECT_GUID = 0
+ACCESS_PROPERTY_SET_GUID = 1
+ACCESS_PROPERTY_GUID = 2
+ACCESS_MAX_LEVEL = 4
+AUDIT_ALLOW_NO_PRIVILEGE = 1
+ACCESS_DS_SOURCE_A = "Directory Service"
+ACCESS_DS_OBJECT_TYPE_NAME_A = "Directory Service Object"
+SE_PRIVILEGE_ENABLED_BY_DEFAULT = (1)
+SE_PRIVILEGE_ENABLED = (2)
+SE_PRIVILEGE_USED_FOR_ACCESS = (-2147483648)
+PRIVILEGE_SET_ALL_NECESSARY = (1)
+
+SE_CREATE_TOKEN_NAME = "SeCreateTokenPrivilege"
+SE_ASSIGNPRIMARYTOKEN_NAME = "SeAssignPrimaryTokenPrivilege"
+SE_LOCK_MEMORY_NAME = "SeLockMemoryPrivilege"
+SE_INCREASE_QUOTA_NAME = "SeIncreaseQuotaPrivilege"
+SE_UNSOLICITED_INPUT_NAME = "SeUnsolicitedInputPrivilege"
+SE_MACHINE_ACCOUNT_NAME = "SeMachineAccountPrivilege"
+SE_TCB_NAME = "SeTcbPrivilege"
+SE_SECURITY_NAME = "SeSecurityPrivilege"
+SE_TAKE_OWNERSHIP_NAME = "SeTakeOwnershipPrivilege"
+SE_LOAD_DRIVER_NAME = "SeLoadDriverPrivilege"
+SE_SYSTEM_PROFILE_NAME = "SeSystemProfilePrivilege"
+SE_SYSTEMTIME_NAME = "SeSystemtimePrivilege"
+SE_PROF_SINGLE_PROCESS_NAME = "SeProfileSingleProcessPrivilege"
+SE_INC_BASE_PRIORITY_NAME = "SeIncreaseBasePriorityPrivilege"
+SE_CREATE_PAGEFILE_NAME = "SeCreatePagefilePrivilege"
+SE_CREATE_PERMANENT_NAME = "SeCreatePermanentPrivilege"
+SE_BACKUP_NAME = "SeBackupPrivilege"
+SE_RESTORE_NAME = "SeRestorePrivilege"
+SE_SHUTDOWN_NAME = "SeShutdownPrivilege"
+SE_DEBUG_NAME = "SeDebugPrivilege"
+SE_AUDIT_NAME = "SeAuditPrivilege"
+SE_SYSTEM_ENVIRONMENT_NAME = "SeSystemEnvironmentPrivilege"
+SE_CHANGE_NOTIFY_NAME = "SeChangeNotifyPrivilege"
+SE_REMOTE_SHUTDOWN_NAME = "SeRemoteShutdownPrivilege"
+TOKEN_ASSIGN_PRIMARY = (1)
+TOKEN_DUPLICATE = (2)
+TOKEN_IMPERSONATE = (4)
+TOKEN_QUERY = (8)
+TOKEN_QUERY_SOURCE = (16)
+TOKEN_ADJUST_PRIVILEGES = (32)
+TOKEN_ADJUST_GROUPS = (64)
+TOKEN_ADJUST_DEFAULT = (128)
+TOKEN_ALL_ACCESS = (STANDARD_RIGHTS_REQUIRED  |\
+                          TOKEN_ASSIGN_PRIMARY      |\
+                          TOKEN_DUPLICATE           |\
+                          TOKEN_IMPERSONATE         |\
+                          TOKEN_QUERY               |\
+                          TOKEN_QUERY_SOURCE        |\
+                          TOKEN_ADJUST_PRIVILEGES   |\
+                          TOKEN_ADJUST_GROUPS       |\
+                          TOKEN_ADJUST_DEFAULT)
+TOKEN_READ = (STANDARD_RIGHTS_READ      |\
+                          TOKEN_QUERY)
+TOKEN_WRITE = (STANDARD_RIGHTS_WRITE     |\
+                          TOKEN_ADJUST_PRIVILEGES   |\
+                          TOKEN_ADJUST_GROUPS       |\
+                          TOKEN_ADJUST_DEFAULT)
+TOKEN_EXECUTE = (STANDARD_RIGHTS_EXECUTE)
+TOKEN_SOURCE_LENGTH = 8
+# Token types
+TokenPrimary = 1
+TokenImpersonation = 2
+
+TokenUser = 1
+TokenGroups = 2
+TokenPrivileges = 3
+TokenOwner = 4
+TokenPrimaryGroup = 5
+TokenDefaultDacl = 6
+TokenSource = 7
+TokenType = 8
+TokenImpersonationLevel = 9
+TokenStatistics = 10
+
+
+OWNER_SECURITY_INFORMATION = (0X00000001)
+GROUP_SECURITY_INFORMATION = (0X00000002)
+DACL_SECURITY_INFORMATION = (0X00000004)
+SACL_SECURITY_INFORMATION = (0X00000008)
+IMAGE_DOS_SIGNATURE = 23117
+IMAGE_OS2_SIGNATURE = 17742
+IMAGE_OS2_SIGNATURE_LE = 17740
+IMAGE_VXD_SIGNATURE = 17740
+IMAGE_NT_SIGNATURE = 17744
+IMAGE_SIZEOF_FILE_HEADER = 20
+IMAGE_FILE_RELOCS_STRIPPED = 1
+IMAGE_FILE_EXECUTABLE_IMAGE = 2
+IMAGE_FILE_LINE_NUMS_STRIPPED = 4
+IMAGE_FILE_LOCAL_SYMS_STRIPPED = 8
+IMAGE_FILE_AGGRESIVE_WS_TRIM = 16
+IMAGE_FILE_LARGE_ADDRESS_AWARE = 32
+IMAGE_FILE_BYTES_REVERSED_LO = 128
+IMAGE_FILE_32BIT_MACHINE = 256
+IMAGE_FILE_DEBUG_STRIPPED = 512
+IMAGE_FILE_REMOVABLE_RUN_FROM_SWAP = 1024
+IMAGE_FILE_NET_RUN_FROM_SWAP = 2048
+IMAGE_FILE_SYSTEM = 4096
+IMAGE_FILE_DLL = 8192
+IMAGE_FILE_UP_SYSTEM_ONLY = 16384
+IMAGE_FILE_BYTES_REVERSED_HI = 32768
+IMAGE_FILE_MACHINE_UNKNOWN = 0
+IMAGE_FILE_MACHINE_I386 = 332
+IMAGE_FILE_MACHINE_R3000 = 354
+IMAGE_FILE_MACHINE_R4000 = 358
+IMAGE_FILE_MACHINE_R10000 = 360
+IMAGE_FILE_MACHINE_WCEMIPSV2 = 361
+IMAGE_FILE_MACHINE_ALPHA = 388
+IMAGE_FILE_MACHINE_POWERPC = 496
+IMAGE_FILE_MACHINE_SH3 = 418
+IMAGE_FILE_MACHINE_SH3E = 420
+IMAGE_FILE_MACHINE_SH4 = 422
+IMAGE_FILE_MACHINE_ARM = 448
+IMAGE_NUMBEROF_DIRECTORY_ENTRIES = 16
+IMAGE_SIZEOF_ROM_OPTIONAL_HEADER = 56
+IMAGE_SIZEOF_STD_OPTIONAL_HEADER = 28
+IMAGE_SIZEOF_NT_OPTIONAL_HEADER = 224
+IMAGE_NT_OPTIONAL_HDR_MAGIC = 267
+IMAGE_ROM_OPTIONAL_HDR_MAGIC = 263
+IMAGE_SUBSYSTEM_UNKNOWN = 0
+IMAGE_SUBSYSTEM_NATIVE = 1
+IMAGE_SUBSYSTEM_WINDOWS_GUI = 2
+IMAGE_SUBSYSTEM_WINDOWS_CUI = 3
+IMAGE_SUBSYSTEM_WINDOWS_CE_GUI = 4
+IMAGE_SUBSYSTEM_OS2_CUI = 5
+IMAGE_SUBSYSTEM_POSIX_CUI = 7
+IMAGE_SUBSYSTEM_RESERVED8 = 8
+IMAGE_DLLCHARACTERISTICS_WDM_DRIVER = 8192
+IMAGE_DIRECTORY_ENTRY_EXPORT = 0
+IMAGE_DIRECTORY_ENTRY_IMPORT = 1
+IMAGE_DIRECTORY_ENTRY_RESOURCE = 2
+IMAGE_DIRECTORY_ENTRY_EXCEPTION = 3
+IMAGE_DIRECTORY_ENTRY_SECURITY = 4
+IMAGE_DIRECTORY_ENTRY_BASERELOC = 5
+IMAGE_DIRECTORY_ENTRY_DEBUG = 6
+IMAGE_DIRECTORY_ENTRY_COPYRIGHT = 7
+IMAGE_DIRECTORY_ENTRY_GLOBALPTR = 8
+IMAGE_DIRECTORY_ENTRY_TLS = 9
+IMAGE_DIRECTORY_ENTRY_LOAD_CONFIG = 10
+IMAGE_DIRECTORY_ENTRY_BOUND_IMPORT = 11
+IMAGE_DIRECTORY_ENTRY_IAT = 12
+IMAGE_SIZEOF_SHORT_NAME = 8
+IMAGE_SIZEOF_SECTION_HEADER = 40
+IMAGE_SCN_TYPE_NO_PAD = 8
+IMAGE_SCN_CNT_CODE = 32
+IMAGE_SCN_CNT_INITIALIZED_DATA = 64
+IMAGE_SCN_CNT_UNINITIALIZED_DATA = 128
+IMAGE_SCN_LNK_OTHER = 256
+IMAGE_SCN_LNK_INFO = 512
+IMAGE_SCN_LNK_REMOVE = 2048
+IMAGE_SCN_LNK_COMDAT = 4096
+IMAGE_SCN_MEM_FARDATA = 32768
+IMAGE_SCN_MEM_PURGEABLE = 131072
+IMAGE_SCN_MEM_16BIT = 131072
+IMAGE_SCN_MEM_LOCKED = 262144
+IMAGE_SCN_MEM_PRELOAD = 524288
+IMAGE_SCN_ALIGN_1BYTES = 1048576
+IMAGE_SCN_ALIGN_2BYTES = 2097152
+IMAGE_SCN_ALIGN_4BYTES = 3145728
+IMAGE_SCN_ALIGN_8BYTES = 4194304
+IMAGE_SCN_ALIGN_16BYTES = 5242880
+IMAGE_SCN_ALIGN_32BYTES = 6291456
+IMAGE_SCN_ALIGN_64BYTES = 7340032
+IMAGE_SCN_LNK_NRELOC_OVFL = 16777216
+IMAGE_SCN_MEM_DISCARDABLE = 33554432
+IMAGE_SCN_MEM_NOT_CACHED = 67108864
+IMAGE_SCN_MEM_NOT_PAGED = 134217728
+IMAGE_SCN_MEM_SHARED = 268435456
+IMAGE_SCN_MEM_EXECUTE = 536870912
+IMAGE_SCN_MEM_READ = 1073741824
+IMAGE_SCN_MEM_WRITE = -2147483648
+IMAGE_SCN_SCALE_INDEX = 1
+IMAGE_SIZEOF_SYMBOL = 18
+IMAGE_SYM_TYPE_NULL = 0
+IMAGE_SYM_TYPE_VOID = 1
+IMAGE_SYM_TYPE_CHAR = 2
+IMAGE_SYM_TYPE_SHORT = 3
+IMAGE_SYM_TYPE_INT = 4
+IMAGE_SYM_TYPE_LONG = 5
+IMAGE_SYM_TYPE_FLOAT = 6
+IMAGE_SYM_TYPE_DOUBLE = 7
+IMAGE_SYM_TYPE_STRUCT = 8
+IMAGE_SYM_TYPE_UNION = 9
+IMAGE_SYM_TYPE_ENUM = 10
+IMAGE_SYM_TYPE_MOE = 11
+IMAGE_SYM_TYPE_BYTE = 12
+IMAGE_SYM_TYPE_WORD = 13
+IMAGE_SYM_TYPE_UINT = 14
+IMAGE_SYM_TYPE_DWORD = 15
+IMAGE_SYM_TYPE_PCODE = 32768
+IMAGE_SYM_DTYPE_NULL = 0
+IMAGE_SYM_DTYPE_POINTER = 1
+IMAGE_SYM_DTYPE_FUNCTION = 2
+IMAGE_SYM_DTYPE_ARRAY = 3
+IMAGE_SYM_CLASS_NULL = 0
+IMAGE_SYM_CLASS_AUTOMATIC = 1
+IMAGE_SYM_CLASS_EXTERNAL = 2
+IMAGE_SYM_CLASS_STATIC = 3
+IMAGE_SYM_CLASS_REGISTER = 4
+IMAGE_SYM_CLASS_EXTERNAL_DEF = 5
+IMAGE_SYM_CLASS_LABEL = 6
+IMAGE_SYM_CLASS_UNDEFINED_LABEL = 7
+IMAGE_SYM_CLASS_MEMBER_OF_STRUCT = 8
+IMAGE_SYM_CLASS_ARGUMENT = 9
+IMAGE_SYM_CLASS_STRUCT_TAG = 10
+IMAGE_SYM_CLASS_MEMBER_OF_UNION = 11
+IMAGE_SYM_CLASS_UNION_TAG = 12
+IMAGE_SYM_CLASS_TYPE_DEFINITION = 13
+IMAGE_SYM_CLASS_UNDEFINED_STATIC = 14
+IMAGE_SYM_CLASS_ENUM_TAG = 15
+IMAGE_SYM_CLASS_MEMBER_OF_ENUM = 16
+IMAGE_SYM_CLASS_REGISTER_PARAM = 17
+IMAGE_SYM_CLASS_BIT_FIELD = 18
+IMAGE_SYM_CLASS_FAR_EXTERNAL = 68
+IMAGE_SYM_CLASS_BLOCK = 100
+IMAGE_SYM_CLASS_FUNCTION = 101
+IMAGE_SYM_CLASS_END_OF_STRUCT = 102
+IMAGE_SYM_CLASS_FILE = 103
+IMAGE_SYM_CLASS_SECTION = 104
+IMAGE_SYM_CLASS_WEAK_EXTERNAL = 105
+N_BTMASK = 15
+N_TMASK = 48
+N_TMASK1 = 192
+N_TMASK2 = 240
+N_BTSHFT = 4
+N_TSHIFT = 2
+def BTYPE(x): return ((x) & N_BTMASK)
+
+def ISPTR(x): return (((x) & N_TMASK) == (IMAGE_SYM_DTYPE_POINTER << N_BTSHFT))
+
+def ISFCN(x): return (((x) & N_TMASK) == (IMAGE_SYM_DTYPE_FUNCTION << N_BTSHFT))
+
+def ISARY(x): return (((x) & N_TMASK) == (IMAGE_SYM_DTYPE_ARRAY << N_BTSHFT))
+
+def INCREF(x): return ((((x)&~N_BTMASK)<<N_TSHIFT)|(IMAGE_SYM_DTYPE_POINTER<<N_BTSHFT)|((x)&N_BTMASK))
+
+def DECREF(x): return ((((x)>>N_TSHIFT)&~N_BTMASK)|((x)&N_BTMASK))
+
+IMAGE_SIZEOF_AUX_SYMBOL = 18
+IMAGE_COMDAT_SELECT_NODUPLICATES = 1
+IMAGE_COMDAT_SELECT_ANY = 2
+IMAGE_COMDAT_SELECT_SAME_SIZE = 3
+IMAGE_COMDAT_SELECT_EXACT_MATCH = 4
+IMAGE_COMDAT_SELECT_ASSOCIATIVE = 5
+IMAGE_COMDAT_SELECT_LARGEST = 6
+IMAGE_COMDAT_SELECT_NEWEST = 7
+IMAGE_WEAK_EXTERN_SEARCH_NOLIBRARY = 1
+IMAGE_WEAK_EXTERN_SEARCH_LIBRARY = 2
+IMAGE_WEAK_EXTERN_SEARCH_ALIAS = 3
+IMAGE_SIZEOF_RELOCATION = 10
+IMAGE_REL_I386_ABSOLUTE = 0
+IMAGE_REL_I386_DIR16 = 1
+IMAGE_REL_I386_REL16 = 2
+IMAGE_REL_I386_DIR32 = 6
+IMAGE_REL_I386_DIR32NB = 7
+IMAGE_REL_I386_SEG12 = 9
+IMAGE_REL_I386_SECTION = 10
+IMAGE_REL_I386_SECREL = 11
+IMAGE_REL_I386_REL32 = 20
+IMAGE_REL_MIPS_ABSOLUTE = 0
+IMAGE_REL_MIPS_REFHALF = 1
+IMAGE_REL_MIPS_REFWORD = 2
+IMAGE_REL_MIPS_JMPADDR = 3
+IMAGE_REL_MIPS_REFHI = 4
+IMAGE_REL_MIPS_REFLO = 5
+IMAGE_REL_MIPS_GPREL = 6
+IMAGE_REL_MIPS_LITERAL = 7
+IMAGE_REL_MIPS_SECTION = 10
+IMAGE_REL_MIPS_SECREL = 11
+IMAGE_REL_MIPS_SECRELLO = 12
+IMAGE_REL_MIPS_SECRELHI = 13
+IMAGE_REL_MIPS_REFWORDNB = 34
+IMAGE_REL_MIPS_PAIR = 37
+IMAGE_REL_ALPHA_ABSOLUTE = 0
+IMAGE_REL_ALPHA_REFLONG = 1
+IMAGE_REL_ALPHA_REFQUAD = 2
+IMAGE_REL_ALPHA_GPREL32 = 3
+IMAGE_REL_ALPHA_LITERAL = 4
+IMAGE_REL_ALPHA_LITUSE = 5
+IMAGE_REL_ALPHA_GPDISP = 6
+IMAGE_REL_ALPHA_BRADDR = 7
+IMAGE_REL_ALPHA_HINT = 8
+IMAGE_REL_ALPHA_INLINE_REFLONG = 9
+IMAGE_REL_ALPHA_REFHI = 10
+IMAGE_REL_ALPHA_REFLO = 11
+IMAGE_REL_ALPHA_PAIR = 12
+IMAGE_REL_ALPHA_MATCH = 13
+IMAGE_REL_ALPHA_SECTION = 14
+IMAGE_REL_ALPHA_SECREL = 15
+IMAGE_REL_ALPHA_REFLONGNB = 16
+IMAGE_REL_ALPHA_SECRELLO = 17
+IMAGE_REL_ALPHA_SECRELHI = 18
+IMAGE_REL_PPC_ABSOLUTE = 0
+IMAGE_REL_PPC_ADDR64 = 1
+IMAGE_REL_PPC_ADDR32 = 2
+IMAGE_REL_PPC_ADDR24 = 3
+IMAGE_REL_PPC_ADDR16 = 4
+IMAGE_REL_PPC_ADDR14 = 5
+IMAGE_REL_PPC_REL24 = 6
+IMAGE_REL_PPC_REL14 = 7
+IMAGE_REL_PPC_TOCREL16 = 8
+IMAGE_REL_PPC_TOCREL14 = 9
+IMAGE_REL_PPC_ADDR32NB = 10
+IMAGE_REL_PPC_SECREL = 11
+IMAGE_REL_PPC_SECTION = 12
+IMAGE_REL_PPC_IFGLUE = 13
+IMAGE_REL_PPC_IMGLUE = 14
+IMAGE_REL_PPC_SECREL16 = 15
+IMAGE_REL_PPC_REFHI = 16
+IMAGE_REL_PPC_REFLO = 17
+IMAGE_REL_PPC_PAIR = 18
+IMAGE_REL_PPC_SECRELLO = 19
+IMAGE_REL_PPC_SECRELHI = 20
+IMAGE_REL_PPC_TYPEMASK = 255
+IMAGE_REL_PPC_NEG = 256
+IMAGE_REL_PPC_BRTAKEN = 512
+IMAGE_REL_PPC_BRNTAKEN = 1024
+IMAGE_REL_PPC_TOCDEFN = 2048
+IMAGE_REL_SH3_ABSOLUTE = 0
+IMAGE_REL_SH3_DIRECT16 = 1
+IMAGE_REL_SH3_DIRECT32 = 2
+IMAGE_REL_SH3_DIRECT8 = 3
+IMAGE_REL_SH3_DIRECT8_WORD = 4
+IMAGE_REL_SH3_DIRECT8_LONG = 5
+IMAGE_REL_SH3_DIRECT4 = 6
+IMAGE_REL_SH3_DIRECT4_WORD = 7
+IMAGE_REL_SH3_DIRECT4_LONG = 8
+IMAGE_REL_SH3_PCREL8_WORD = 9
+IMAGE_REL_SH3_PCREL8_LONG = 10
+IMAGE_REL_SH3_PCREL12_WORD = 11
+IMAGE_REL_SH3_STARTOF_SECTION = 12
+IMAGE_REL_SH3_SIZEOF_SECTION = 13
+IMAGE_REL_SH3_SECTION = 14
+IMAGE_REL_SH3_SECREL = 15
+IMAGE_REL_SH3_DIRECT32_NB = 16
+IMAGE_SIZEOF_LINENUMBER = 6
+IMAGE_SIZEOF_BASE_RELOCATION = 8
+IMAGE_REL_BASED_ABSOLUTE = 0
+IMAGE_REL_BASED_HIGH = 1
+IMAGE_REL_BASED_LOW = 2
+IMAGE_REL_BASED_HIGHLOW = 3
+IMAGE_REL_BASED_HIGHADJ = 4
+IMAGE_REL_BASED_MIPS_JMPADDR = 5
+IMAGE_REL_BASED_SECTION = 6
+IMAGE_REL_BASED_REL32 = 7
+IMAGE_ARCHIVE_START_SIZE = 8
+IMAGE_ARCHIVE_START = "!<arch>\n"
+IMAGE_ARCHIVE_END = "`\n"
+IMAGE_ARCHIVE_PAD = "\n"
+IMAGE_ARCHIVE_LINKER_MEMBER = "/               "
+IMAGE_SIZEOF_ARCHIVE_MEMBER_HDR = 60
+IMAGE_ORDINAL_FLAG = -2147483648
+def IMAGE_SNAP_BY_ORDINAL(Ordina): return ((Ordinal & IMAGE_ORDINAL_FLAG) != 0)
+
+def IMAGE_ORDINAL(Ordina): return (Ordinal & 65535)
+
+IMAGE_RESOURCE_NAME_IS_STRING = -2147483648
+IMAGE_RESOURCE_DATA_IS_DIRECTORY = -2147483648
+IMAGE_DEBUG_TYPE_UNKNOWN = 0
+IMAGE_DEBUG_TYPE_COFF = 1
+IMAGE_DEBUG_TYPE_CODEVIEW = 2
+IMAGE_DEBUG_TYPE_FPO = 3
+IMAGE_DEBUG_TYPE_MISC = 4
+IMAGE_DEBUG_TYPE_EXCEPTION = 5
+IMAGE_DEBUG_TYPE_FIXUP = 6
+IMAGE_DEBUG_TYPE_OMAP_TO_SRC = 7
+IMAGE_DEBUG_TYPE_OMAP_FROM_SRC = 8
+IMAGE_DEBUG_TYPE_BORLAND = 9
+FRAME_FPO = 0
+FRAME_TRAP = 1
+FRAME_TSS = 2
+FRAME_NONFPO = 3
+SIZEOF_RFPO_DATA = 16
+IMAGE_DEBUG_MISC_EXENAME = 1
+IMAGE_SEPARATE_DEBUG_SIGNATURE = 18756
+IMAGE_SEPARATE_DEBUG_FLAGS_MASK = 32768
+IMAGE_SEPARATE_DEBUG_MISMATCH = 32768
+
+# Included from string.h
+_NLSCMPERROR = 2147483647
+NULL = 0
+HEAP_NO_SERIALIZE = 1
+HEAP_GROWABLE = 2
+HEAP_GENERATE_EXCEPTIONS = 4
+HEAP_ZERO_MEMORY = 8
+HEAP_REALLOC_IN_PLACE_ONLY = 16
+HEAP_TAIL_CHECKING_ENABLED = 32
+HEAP_FREE_CHECKING_ENABLED = 64
+HEAP_DISABLE_COALESCE_ON_FREE = 128
+HEAP_CREATE_ALIGN_16 = 65536
+HEAP_CREATE_ENABLE_TRACING = 131072
+HEAP_MAXIMUM_TAG = 4095
+HEAP_PSEUDO_TAG_FLAG = 32768
+HEAP_TAG_SHIFT = 16
+IS_TEXT_UNICODE_ASCII16 = 1
+IS_TEXT_UNICODE_REVERSE_ASCII16 = 16
+IS_TEXT_UNICODE_STATISTICS = 2
+IS_TEXT_UNICODE_REVERSE_STATISTICS = 32
+IS_TEXT_UNICODE_CONTROLS = 4
+IS_TEXT_UNICODE_REVERSE_CONTROLS = 64
+IS_TEXT_UNICODE_SIGNATURE = 8
+IS_TEXT_UNICODE_REVERSE_SIGNATURE = 128
+IS_TEXT_UNICODE_ILLEGAL_CHARS = 256
+IS_TEXT_UNICODE_ODD_LENGTH = 512
+IS_TEXT_UNICODE_DBCS_LEADBYTE = 1024
+IS_TEXT_UNICODE_NULL_BYTES = 4096
+IS_TEXT_UNICODE_UNICODE_MASK = 15
+IS_TEXT_UNICODE_REVERSE_MASK = 240
+IS_TEXT_UNICODE_NOT_UNICODE_MASK = 3840
+IS_TEXT_UNICODE_NOT_ASCII_MASK = 61440
+COMPRESSION_FORMAT_NONE = (0)
+COMPRESSION_FORMAT_DEFAULT = (1)
+COMPRESSION_FORMAT_LZNT1 = (2)
+COMPRESSION_ENGINE_STANDARD = (0)
+COMPRESSION_ENGINE_MAXIMUM = (256)
+MESSAGE_RESOURCE_UNICODE = 1
+RTL_CRITSECT_TYPE = 0
+RTL_RESOURCE_TYPE = 1
+SEF_DACL_AUTO_INHERIT = 1
+SEF_SACL_AUTO_INHERIT = 2
+SEF_DEFAULT_DESCRIPTOR_FOR_OBJECT = 4
+SEF_AVOID_PRIVILEGE_CHECK = 8
+DLL_PROCESS_ATTACH = 1
+DLL_THREAD_ATTACH = 2
+DLL_THREAD_DETACH = 3
+DLL_PROCESS_DETACH = 0
+EVENTLOG_SEQUENTIAL_READ = 0X0001
+EVENTLOG_SEEK_READ = 0X0002
+EVENTLOG_FORWARDS_READ = 0X0004
+EVENTLOG_BACKWARDS_READ = 0X0008
+EVENTLOG_SUCCESS = 0X0000
+EVENTLOG_ERROR_TYPE = 1
+EVENTLOG_WARNING_TYPE = 2
+EVENTLOG_INFORMATION_TYPE = 4
+EVENTLOG_AUDIT_SUCCESS = 8
+EVENTLOG_AUDIT_FAILURE = 16
+EVENTLOG_START_PAIRED_EVENT = 1
+EVENTLOG_END_PAIRED_EVENT = 2
+EVENTLOG_END_ALL_PAIRED_EVENTS = 4
+EVENTLOG_PAIRED_EVENT_ACTIVE = 8
+EVENTLOG_PAIRED_EVENT_INACTIVE = 16
+KEY_QUERY_VALUE = (1)
+KEY_SET_VALUE = (2)
+KEY_CREATE_SUB_KEY = (4)
+KEY_ENUMERATE_SUB_KEYS = (8)
+KEY_NOTIFY = (16)
+KEY_CREATE_LINK = (32)
+KEY_READ = ((STANDARD_RIGHTS_READ       |\
+                                  KEY_QUERY_VALUE            |\
+                                  KEY_ENUMERATE_SUB_KEYS     |\
+                                  KEY_NOTIFY)                 \
+                                  &                           \
+                                 (~SYNCHRONIZE))
+KEY_WRITE = ((STANDARD_RIGHTS_WRITE      |\
+                                  KEY_SET_VALUE              |\
+                                  KEY_CREATE_SUB_KEY)         \
+                                  &                           \
+                                 (~SYNCHRONIZE))
+KEY_EXECUTE = ((KEY_READ)                   \
+                                  &                           \
+                                 (~SYNCHRONIZE))
+KEY_ALL_ACCESS = ((STANDARD_RIGHTS_ALL        |\
+                                  KEY_QUERY_VALUE            |\
+                                  KEY_SET_VALUE              |\
+                                  KEY_CREATE_SUB_KEY         |\
+                                  KEY_ENUMERATE_SUB_KEYS     |\
+                                  KEY_NOTIFY                 |\
+                                  KEY_CREATE_LINK)            \
+                                  &                           \
+                                 (~SYNCHRONIZE))
+REG_OPTION_RESERVED = (0)
+REG_OPTION_NON_VOLATILE = (0)
+REG_OPTION_VOLATILE = (1)
+REG_OPTION_CREATE_LINK = (2)
+REG_OPTION_BACKUP_RESTORE = (4)
+REG_OPTION_OPEN_LINK = (8)
+REG_LEGAL_OPTION = \
+                (REG_OPTION_RESERVED            |\
+                 REG_OPTION_NON_VOLATILE        |\
+                 REG_OPTION_VOLATILE            |\
+                 REG_OPTION_CREATE_LINK         |\
+                 REG_OPTION_BACKUP_RESTORE      |\
+                 REG_OPTION_OPEN_LINK)
+REG_CREATED_NEW_KEY = (1)
+REG_OPENED_EXISTING_KEY = (2)
+REG_WHOLE_HIVE_VOLATILE = (1)
+REG_REFRESH_HIVE = (2)
+REG_NO_LAZY_FLUSH = (4)
+REG_NOTIFY_CHANGE_NAME = (1)
+REG_NOTIFY_CHANGE_ATTRIBUTES = (2)
+REG_NOTIFY_CHANGE_LAST_SET = (4)
+REG_NOTIFY_CHANGE_SECURITY = (8)
+REG_LEGAL_CHANGE_FILTER = \
+                (REG_NOTIFY_CHANGE_NAME          |\
+                 REG_NOTIFY_CHANGE_ATTRIBUTES    |\
+                 REG_NOTIFY_CHANGE_LAST_SET      |\
+                 REG_NOTIFY_CHANGE_SECURITY)
+REG_NONE = ( 0 )
+REG_SZ = ( 1 )
+REG_EXPAND_SZ = ( 2 )
+REG_BINARY = ( 3 )
+REG_DWORD = ( 4 )
+REG_DWORD_LITTLE_ENDIAN = ( 4 )
+REG_DWORD_BIG_ENDIAN = ( 5 )
+REG_LINK = ( 6 )
+REG_MULTI_SZ = ( 7 )
+REG_RESOURCE_LIST = ( 8 )
+REG_FULL_RESOURCE_DESCRIPTOR = ( 9 )
+REG_RESOURCE_REQUIREMENTS_LIST = ( 10 )
+SERVICE_KERNEL_DRIVER = 1
+SERVICE_FILE_SYSTEM_DRIVER = 2
+SERVICE_ADAPTER = 4
+SERVICE_RECOGNIZER_DRIVER = 8
+SERVICE_DRIVER = (SERVICE_KERNEL_DRIVER | \
+                                        SERVICE_FILE_SYSTEM_DRIVER | \
+                                        SERVICE_RECOGNIZER_DRIVER)
+SERVICE_WIN32_OWN_PROCESS = 16
+SERVICE_WIN32_SHARE_PROCESS = 32
+SERVICE_WIN32 = (SERVICE_WIN32_OWN_PROCESS | \
+                                        SERVICE_WIN32_SHARE_PROCESS)
+SERVICE_INTERACTIVE_PROCESS = 256
+SERVICE_TYPE_ALL = (SERVICE_WIN32  | \
+                                        SERVICE_ADAPTER | \
+                                        SERVICE_DRIVER  | \
+                                        SERVICE_INTERACTIVE_PROCESS)
+SERVICE_BOOT_START = 0
+SERVICE_SYSTEM_START = 1
+SERVICE_AUTO_START = 2
+SERVICE_DEMAND_START = 3
+SERVICE_DISABLED = 4
+SERVICE_ERROR_IGNORE = 0
+SERVICE_ERROR_NORMAL = 1
+SERVICE_ERROR_SEVERE = 2
+SERVICE_ERROR_CRITICAL = 3
+TAPE_ERASE_SHORT = 0
+TAPE_ERASE_LONG = 1
+TAPE_LOAD = 0
+TAPE_UNLOAD = 1
+TAPE_TENSION = 2
+TAPE_LOCK = 3
+TAPE_UNLOCK = 4
+TAPE_FORMAT = 5
+TAPE_SETMARKS = 0
+TAPE_FILEMARKS = 1
+TAPE_SHORT_FILEMARKS = 2
+TAPE_LONG_FILEMARKS = 3
+TAPE_ABSOLUTE_POSITION = 0
+TAPE_LOGICAL_POSITION = 1
+TAPE_PSEUDO_LOGICAL_POSITION = 2
+TAPE_REWIND = 0
+TAPE_ABSOLUTE_BLOCK = 1
+TAPE_LOGICAL_BLOCK = 2
+TAPE_PSEUDO_LOGICAL_BLOCK = 3
+TAPE_SPACE_END_OF_DATA = 4
+TAPE_SPACE_RELATIVE_BLOCKS = 5
+TAPE_SPACE_FILEMARKS = 6
+TAPE_SPACE_SEQUENTIAL_FMKS = 7
+TAPE_SPACE_SETMARKS = 8
+TAPE_SPACE_SEQUENTIAL_SMKS = 9
+TAPE_DRIVE_FIXED = 1
+TAPE_DRIVE_SELECT = 2
+TAPE_DRIVE_INITIATOR = 4
+TAPE_DRIVE_ERASE_SHORT = 16
+TAPE_DRIVE_ERASE_LONG = 32
+TAPE_DRIVE_ERASE_BOP_ONLY = 64
+TAPE_DRIVE_ERASE_IMMEDIATE = 128
+TAPE_DRIVE_TAPE_CAPACITY = 256
+TAPE_DRIVE_TAPE_REMAINING = 512
+TAPE_DRIVE_FIXED_BLOCK = 1024
+TAPE_DRIVE_VARIABLE_BLOCK = 2048
+TAPE_DRIVE_WRITE_PROTECT = 4096
+TAPE_DRIVE_EOT_WZ_SIZE = 8192
+TAPE_DRIVE_ECC = 65536
+TAPE_DRIVE_COMPRESSION = 131072
+TAPE_DRIVE_PADDING = 262144
+TAPE_DRIVE_REPORT_SMKS = 524288
+TAPE_DRIVE_GET_ABSOLUTE_BLK = 1048576
+TAPE_DRIVE_GET_LOGICAL_BLK = 2097152
+TAPE_DRIVE_SET_EOT_WZ_SIZE = 4194304
+TAPE_DRIVE_EJECT_MEDIA = 16777216
+TAPE_DRIVE_RESERVED_BIT = -2147483648
+TAPE_DRIVE_LOAD_UNLOAD = -2147483647
+TAPE_DRIVE_TENSION = -2147483646
+TAPE_DRIVE_LOCK_UNLOCK = -2147483644
+TAPE_DRIVE_REWIND_IMMEDIATE = -2147483640
+TAPE_DRIVE_SET_BLOCK_SIZE = -2147483632
+TAPE_DRIVE_LOAD_UNLD_IMMED = -2147483616
+TAPE_DRIVE_TENSION_IMMED = -2147483584
+TAPE_DRIVE_LOCK_UNLK_IMMED = -2147483520
+TAPE_DRIVE_SET_ECC = -2147483392
+TAPE_DRIVE_SET_COMPRESSION = -2147483136
+TAPE_DRIVE_SET_PADDING = -2147482624
+TAPE_DRIVE_SET_REPORT_SMKS = -2147481600
+TAPE_DRIVE_ABSOLUTE_BLK = -2147479552
+TAPE_DRIVE_ABS_BLK_IMMED = -2147475456
+TAPE_DRIVE_LOGICAL_BLK = -2147467264
+TAPE_DRIVE_LOG_BLK_IMMED = -2147450880
+TAPE_DRIVE_END_OF_DATA = -2147418112
+TAPE_DRIVE_RELATIVE_BLKS = -2147352576
+TAPE_DRIVE_FILEMARKS = -2147221504
+TAPE_DRIVE_SEQUENTIAL_FMKS = -2146959360
+TAPE_DRIVE_SETMARKS = -2146435072
+TAPE_DRIVE_SEQUENTIAL_SMKS = -2145386496
+TAPE_DRIVE_REVERSE_POSITION = -2143289344
+TAPE_DRIVE_SPACE_IMMEDIATE = -2139095040
+TAPE_DRIVE_WRITE_SETMARKS = -2130706432
+TAPE_DRIVE_WRITE_FILEMARKS = -2113929216
+TAPE_DRIVE_WRITE_SHORT_FMKS = -2080374784
+TAPE_DRIVE_WRITE_LONG_FMKS = -2013265920
+TAPE_DRIVE_WRITE_MARK_IMMED = -1879048192
+TAPE_DRIVE_FORMAT = -1610612736
+TAPE_DRIVE_FORMAT_IMMEDIATE = -1073741824
+TAPE_DRIVE_HIGH_FEATURES = -2147483648
+TAPE_FIXED_PARTITIONS = 0
+TAPE_SELECT_PARTITIONS = 1
+TAPE_INITIATOR_PARTITIONS = 2
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winperf.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winperf.py
new file mode 100644
index 0000000..6724d537
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winperf.py
@@ -0,0 +1,140 @@
+# Generated by h2py from winperf.h
+PERF_DATA_VERSION = 1
+PERF_DATA_REVISION = 1
+PERF_NO_INSTANCES = -1
+PERF_SIZE_DWORD = 0x00000000
+PERF_SIZE_LARGE = 0x00000100
+PERF_SIZE_ZERO = 0x00000200
+PERF_SIZE_VARIABLE_LEN = 0x00000300
+PERF_TYPE_NUMBER = 0x00000000
+PERF_TYPE_COUNTER = 0x00000400
+PERF_TYPE_TEXT = 0x00000800
+PERF_TYPE_ZERO = 0x00000C00
+PERF_NUMBER_HEX = 0x00000000
+PERF_NUMBER_DECIMAL = 0x00010000
+PERF_NUMBER_DEC_1000 = 0x00020000
+PERF_COUNTER_VALUE = 0x00000000
+PERF_COUNTER_RATE = 0x00010000
+PERF_COUNTER_FRACTION = 0x00020000
+PERF_COUNTER_BASE = 0x00030000
+PERF_COUNTER_ELAPSED = 0x00040000
+PERF_COUNTER_QUEUELEN = 0x00050000
+PERF_COUNTER_HISTOGRAM = 0x00060000
+PERF_TEXT_UNICODE = 0x00000000
+PERF_TEXT_ASCII = 0x00010000
+PERF_TIMER_TICK = 0x00000000
+PERF_TIMER_100NS = 0x00100000
+PERF_OBJECT_TIMER = 0x00200000
+PERF_DELTA_COUNTER = 0x00400000
+PERF_DELTA_BASE = 0x00800000
+PERF_INVERSE_COUNTER = 0x01000000
+PERF_MULTI_COUNTER = 0x02000000
+PERF_DISPLAY_NO_SUFFIX = 0x00000000
+PERF_DISPLAY_PER_SEC = 0x10000000
+PERF_DISPLAY_PERCENT = 0x20000000
+PERF_DISPLAY_SECONDS = 0x30000000
+PERF_DISPLAY_NOSHOW = 0x40000000
+PERF_COUNTER_COUNTER = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_TIMER_TICK | PERF_DELTA_COUNTER | PERF_DISPLAY_PER_SEC)
+PERF_COUNTER_TIMER = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_TIMER_TICK | PERF_DELTA_COUNTER | PERF_DISPLAY_PERCENT)
+PERF_COUNTER_QUEUELEN_TYPE = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_QUEUELEN |\
+            PERF_TIMER_TICK | PERF_DELTA_COUNTER | PERF_DISPLAY_NO_SUFFIX)
+PERF_COUNTER_LARGE_QUEUELEN_TYPE = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_QUEUELEN |\
+            PERF_TIMER_TICK | PERF_DELTA_COUNTER | PERF_DISPLAY_NO_SUFFIX)
+PERF_COUNTER_BULK_COUNT = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_TIMER_TICK | PERF_DELTA_COUNTER | PERF_DISPLAY_PER_SEC)
+PERF_COUNTER_TEXT = \
+            (PERF_SIZE_VARIABLE_LEN | PERF_TYPE_TEXT | PERF_TEXT_UNICODE |\
+            PERF_DISPLAY_NO_SUFFIX)
+PERF_COUNTER_RAWCOUNT = \
+            (PERF_SIZE_DWORD | PERF_TYPE_NUMBER | PERF_NUMBER_DECIMAL |\
+            PERF_DISPLAY_NO_SUFFIX)
+PERF_COUNTER_LARGE_RAWCOUNT = \
+            (PERF_SIZE_LARGE | PERF_TYPE_NUMBER | PERF_NUMBER_DECIMAL |\
+            PERF_DISPLAY_NO_SUFFIX)
+PERF_COUNTER_RAWCOUNT_HEX = \
+            (PERF_SIZE_DWORD | PERF_TYPE_NUMBER | PERF_NUMBER_HEX |\
+            PERF_DISPLAY_NO_SUFFIX)
+PERF_COUNTER_LARGE_RAWCOUNT_HEX = \
+            (PERF_SIZE_LARGE | PERF_TYPE_NUMBER | PERF_NUMBER_HEX |\
+            PERF_DISPLAY_NO_SUFFIX)
+PERF_SAMPLE_FRACTION = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_FRACTION |\
+            PERF_DELTA_COUNTER | PERF_DELTA_BASE | PERF_DISPLAY_PERCENT)
+PERF_SAMPLE_COUNTER = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_TIMER_TICK | PERF_DELTA_COUNTER | PERF_DISPLAY_NO_SUFFIX)
+PERF_COUNTER_NODATA = \
+            (PERF_SIZE_ZERO | PERF_DISPLAY_NOSHOW)
+PERF_COUNTER_TIMER_INV = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_TIMER_TICK | PERF_DELTA_COUNTER | PERF_INVERSE_COUNTER | \
+            PERF_DISPLAY_PERCENT)
+PERF_SAMPLE_BASE = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_BASE |\
+            PERF_DISPLAY_NOSHOW |\
+            0x00000001)
+PERF_AVERAGE_TIMER = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_FRACTION |\
+            PERF_DISPLAY_SECONDS)
+PERF_AVERAGE_BASE = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_BASE |\
+            PERF_DISPLAY_NOSHOW |\
+            0x00000002)
+PERF_AVERAGE_BULK = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_FRACTION  |\
+            PERF_DISPLAY_NOSHOW)
+PERF_100NSEC_TIMER = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_TIMER_100NS | PERF_DELTA_COUNTER | PERF_DISPLAY_PERCENT)
+PERF_100NSEC_TIMER_INV = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_TIMER_100NS | PERF_DELTA_COUNTER | PERF_INVERSE_COUNTER  |\
+            PERF_DISPLAY_PERCENT)
+PERF_COUNTER_MULTI_TIMER = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_DELTA_COUNTER | PERF_TIMER_TICK | PERF_MULTI_COUNTER |\
+            PERF_DISPLAY_PERCENT)
+PERF_COUNTER_MULTI_TIMER_INV = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_RATE |\
+            PERF_DELTA_COUNTER | PERF_MULTI_COUNTER | PERF_TIMER_TICK |\
+            PERF_INVERSE_COUNTER | PERF_DISPLAY_PERCENT)
+PERF_COUNTER_MULTI_BASE = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_BASE |\
+            PERF_MULTI_COUNTER | PERF_DISPLAY_NOSHOW)
+PERF_100NSEC_MULTI_TIMER = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_DELTA_COUNTER  |\
+            PERF_COUNTER_RATE | PERF_TIMER_100NS | PERF_MULTI_COUNTER |\
+            PERF_DISPLAY_PERCENT)
+PERF_100NSEC_MULTI_TIMER_INV = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_DELTA_COUNTER  |\
+            PERF_COUNTER_RATE | PERF_TIMER_100NS | PERF_MULTI_COUNTER |\
+            PERF_INVERSE_COUNTER | PERF_DISPLAY_PERCENT)
+PERF_RAW_FRACTION = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_FRACTION |\
+            PERF_DISPLAY_PERCENT)
+PERF_RAW_BASE = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_BASE |\
+            PERF_DISPLAY_NOSHOW |\
+            0x00000003)
+PERF_ELAPSED_TIME = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_ELAPSED |\
+            PERF_OBJECT_TIMER | PERF_DISPLAY_SECONDS)
+PERF_COUNTER_HISTOGRAM_TYPE = -2147483648 # 0x80000000
+PERF_COUNTER_DELTA = \
+            (PERF_SIZE_DWORD | PERF_TYPE_COUNTER | PERF_COUNTER_VALUE |\
+            PERF_DELTA_COUNTER | PERF_DISPLAY_NO_SUFFIX)
+PERF_COUNTER_LARGE_DELTA = \
+            (PERF_SIZE_LARGE | PERF_TYPE_COUNTER | PERF_COUNTER_VALUE |\
+            PERF_DELTA_COUNTER | PERF_DISPLAY_NO_SUFFIX)
+PERF_DETAIL_NOVICE = 100
+PERF_DETAIL_ADVANCED = 200
+PERF_DETAIL_EXPERT = 300
+PERF_DETAIL_WIZARD = 400
+PERF_NO_UNIQUE_ID = -1
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winxptheme.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winxptheme.py
new file mode 100644
index 0000000..6ff7e41
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/lib/winxptheme.py
@@ -0,0 +1,23 @@
+"""A useful wrapper around the "_winxptheme" module.
+Unlike _winxptheme, this module will load on any version of Windows.
+
+If _winxptheme is not available, then this module will have only 2 functions -
+IsAppThemed() and IsThemeActive, which will both always return False.
+
+If _winxptheme is available, this module will have all methods in that module,
+including real implementations of IsAppThemed() and IsThemeActive().
+"""
+
+import win32api
+try:
+    win32api.FreeLibrary(win32api.LoadLibrary("Uxtheme.dll"))
+    # Life is good, everything is available.
+    from _winxptheme import *
+except win32api.error:
+    # Probably not running XP.
+    def IsAppThemed():
+        return False
+    def IsThemeActive():
+        return False
+
+del win32api
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/libs/pywintypes.lib b/depot_tools/release/win/python_24/Lib/site-packages/win32/libs/pywintypes.lib
new file mode 100644
index 0000000..7e29071f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/libs/pywintypes.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/license.txt b/depot_tools/release/win/python_24/Lib/site-packages/win32/license.txt
new file mode 100644
index 0000000..dd05084
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/license.txt
@@ -0,0 +1,30 @@
+Unless stated in the specfic source file, this work is
+Copyright (c) 1994-2001, Mark Hammond 
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without 
+modification, are permitted provided that the following conditions 
+are met:
+
+Redistributions of source code must retain the above copyright notice, 
+this list of conditions and the following disclaimer.
+
+Redistributions in binary form must reproduce the above copyright 
+notice, this list of conditions and the following disclaimer in 
+the documentation and/or other materials provided with the distribution.
+
+Neither name of Mark Hammond nor the name of contributors may be used 
+to endorse or promote products derived from this software without 
+specific prior written permission. 
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS
+IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/mmapfile.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/mmapfile.pyd
new file mode 100644
index 0000000..9d97d0ec
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/mmapfile.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/odbc.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/odbc.pyd
new file mode 100644
index 0000000..7c872be
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/odbc.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/perfmon.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/perfmon.pyd
new file mode 100644
index 0000000..03724bc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/perfmon.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/perfmondata.dll b/depot_tools/release/win/python_24/Lib/site-packages/win32/perfmondata.dll
new file mode 100644
index 0000000..c08bc482
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/perfmondata.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/pythonservice.exe b/depot_tools/release/win/python_24/Lib/site-packages/win32/pythonservice.exe
new file mode 100644
index 0000000..fb551686
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/pythonservice.exe
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/ControlService.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/ControlService.py
new file mode 100644
index 0000000..4ec4d2e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/ControlService.py
@@ -0,0 +1,255 @@
+# ControlService.py
+#
+# A simple app which duplicates some of the functionality in the
+# Services applet of the control panel.
+#
+# Suggested enhancements (in no particular order):
+#
+# 1. When changing the service status, continue to query the status
+# of the service until the status change is complete.  Use this
+# information to put up some kind of a progress dialog like the CP
+# applet does.  Unlike the CP, allow canceling out in the event that
+# the status change hangs.
+# 2. When starting or stopping a service with dependencies, alert
+# the user about the dependent services, then start (or stop) all
+# dependent services as appropriate.
+# 3. Allow toggling between service view and device view
+# 4. Allow configuration of other service parameters such as startup
+# name and password.
+# 5. Allow connection to remote SCMs.  This is just a matter of
+# reconnecting to the SCM on the remote machine; the rest of the
+# code should still work the same.
+# 6. Either implement the startup parameters or get rid of the editbox.
+# 7. Either implement or get rid of "H/W Profiles".
+# 8. Either implement or get rid of "Help".
+# 9. Improve error handling.  Ideally, this would also include falling
+# back to lower levels of functionality for users with less rights.
+# Right now, we always try to get all the rights and fail when we can't
+
+
+from pywin.mfc import dialog
+import win32ui
+import win32con
+import win32service
+
+class StartupDlg(dialog.Dialog):
+
+	IDC_LABEL = 127
+	IDC_DEVICE = 128
+	IDC_BOOT = 129
+	IDC_SYSTEM = 130
+	IDC_AUTOMATIC = 131
+	IDC_MANUAL = 132
+	IDC_DISABLED = 133
+
+	def __init__(self, displayname, service):
+		dialog.Dialog.__init__(self, self.GetResource())
+		self.name = displayname
+		self.service = service
+
+	def __del__(self):
+		win32service.CloseServiceHandle(self.service)
+
+	def OnInitDialog(self):
+		cfg = win32service.QueryServiceConfig(self.service)
+		self.GetDlgItem(self.IDC_BOOT + cfg[1]).SetCheck(1)
+
+		status = win32service.QueryServiceStatus(self.service)
+		if ((status[0] & win32service.SERVICE_KERNEL_DRIVER) or
+				(status[0] & win32service.SERVICE_FILE_SYSTEM_DRIVER)):
+			# driver
+			self.GetDlgItem(self.IDC_LABEL).SetWindowText('Device:')
+		else:
+			# service
+			self.GetDlgItem(self.IDC_LABEL).SetWindowText('Service:')
+			self.GetDlgItem(self.IDC_BOOT).EnableWindow(0)
+			self.GetDlgItem(self.IDC_SYSTEM).EnableWindow(0)
+		self.GetDlgItem(self.IDC_DEVICE).SetWindowText(str(self.name))
+
+		return dialog.Dialog.OnInitDialog(self)
+
+	def OnOK(self):
+		self.BeginWaitCursor()
+		starttype = self.GetCheckedRadioButton(self.IDC_BOOT, self.IDC_DISABLED) - self.IDC_BOOT
+		try:
+			win32service.ChangeServiceConfig(self.service, win32service.SERVICE_NO_CHANGE, starttype,
+				win32service.SERVICE_NO_CHANGE, None, None, 0, None, None, None, None)
+		except:
+			self.MessageBox('Unable to change startup configuration', None,
+				win32con.MB_ICONEXCLAMATION)
+		self.EndWaitCursor()
+		return dialog.Dialog.OnOK(self)
+
+	def GetResource(self):
+		style = win32con.WS_POPUP | win32con.DS_SETFONT | win32con.WS_SYSMENU | win32con.WS_CAPTION | win32con.WS_VISIBLE | win32con.DS_MODALFRAME
+		exstyle = None
+		t = [["Service Startup", (6, 18, 188, 107), style, exstyle, (8, 'MS Shell Dlg')], ]
+		t.append([130, "Device:", self.IDC_LABEL, (6, 7, 40, 8), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.SS_LEFT])
+		t.append([130, "", self.IDC_DEVICE, (48, 7, 134, 8), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.SS_LEFT])
+		t.append([128, "Startup Type", -1, (6, 21, 130, 80), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_GROUP | win32con.BS_GROUPBOX])
+		t.append([128, "&Boot", self.IDC_BOOT, (12, 33, 39, 10), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_AUTORADIOBUTTON])
+		t.append([128, "&System", self.IDC_SYSTEM, (12, 46, 39, 10), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_AUTORADIOBUTTON])
+		t.append([128, "&Automatic", self.IDC_AUTOMATIC, (12, 59, 118, 10), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_AUTORADIOBUTTON])
+		t.append([128, "&Manual", self.IDC_MANUAL, (12, 72, 118, 10), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_AUTORADIOBUTTON])
+		t.append([128, "&Disabled", self.IDC_DISABLED, (12, 85, 118, 10), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_AUTORADIOBUTTON])
+		t.append([128, "OK", win32con.IDOK, (142, 25, 40, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.WS_GROUP | win32con.BS_DEFPUSHBUTTON])
+		t.append([128, "Cancel", win32con.IDCANCEL, (142, 43, 40, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		t.append([128, "&Help", win32con.IDHELP, (142, 61, 40, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		return t
+
+class ServiceDlg(dialog.Dialog):
+
+	IDC_LIST = 128
+	IDC_START = 129
+	IDC_STOP = 130
+	IDC_PAUSE = 131
+	IDC_CONTINUE = 132
+	IDC_STARTUP = 133
+	IDC_PROFILES = 134
+	IDC_PARAMS = 135
+
+	def __init__(self, machineName = ''):
+		dialog.Dialog.__init__(self, self.GetResource())
+		self.HookCommand(self.OnListEvent, self.IDC_LIST)
+		self.HookCommand(self.OnStartCmd, self.IDC_START)
+		self.HookCommand(self.OnStopCmd, self.IDC_STOP)
+		self.HookCommand(self.OnPauseCmd, self.IDC_PAUSE)
+		self.HookCommand(self.OnContinueCmd, self.IDC_CONTINUE)
+		self.HookCommand(self.OnStartupCmd, self.IDC_STARTUP)
+		self.machineName = machineName
+		self.scm = win32service.OpenSCManager(self.machineName, None, win32service.SC_MANAGER_ALL_ACCESS)
+
+	def __del__(self):
+		win32service.CloseServiceHandle(self.scm)
+
+	def OnInitDialog(self):
+		self.listCtrl = self.GetDlgItem(self.IDC_LIST)
+		self.listCtrl.SetTabStops([158, 200])
+		if self.machineName:
+			self.SetWindowText("Services on %s" % self.machineName)
+		self.ReloadData()
+		return dialog.Dialog.OnInitDialog(self)
+
+	def ReloadData(self):
+		service = self.GetSelService()
+		self.listCtrl.SetRedraw(0)
+		self.listCtrl.ResetContent()
+		svcs = win32service.EnumServicesStatus(self.scm)
+		i = 0
+		self.data = []
+		for svc in svcs:
+			try:
+				status = ('Unknown', 'Stopped', 'Starting', 'Stopping', 'Running',
+					'Continuing', 'Pausing', 'Paused')[svc[2][1]]
+			except:
+				status = 'Unknown'
+			s = win32service.OpenService(self.scm, svc[0], win32service.SERVICE_ALL_ACCESS)
+			cfg = win32service.QueryServiceConfig(s)
+			try:
+				startup = ('Boot', 'System', 'Automatic', 'Manual', 'Disabled')[cfg[1]]
+			except:
+				startup = 'Unknown'
+			win32service.CloseServiceHandle(s)
+
+			# svc[2][2] control buttons
+			pos = self.listCtrl.AddString(str(svc[1]) + '\t' + status + '\t' + startup)
+			self.listCtrl.SetItemData(pos, i)
+			self.data.append(tuple(svc[2]) + (svc[1], svc[0], ))
+			i = i + 1
+
+			if service and service[1] == svc[0]:
+				self.listCtrl.SetCurSel(pos)
+		self.OnListEvent(self.IDC_LIST, win32con.LBN_SELCHANGE)
+		self.listCtrl.SetRedraw(1)
+
+ 	def OnListEvent(self, id, code):
+		if code == win32con.LBN_SELCHANGE or code == win32con.LBN_SELCANCEL:
+			pos = self.listCtrl.GetCurSel()
+			if pos >= 0:
+				data = self.data[self.listCtrl.GetItemData(pos)][2]
+				canstart = (self.data[self.listCtrl.GetItemData(pos)][1] == win32service.SERVICE_STOPPED)
+			else:
+				data = 0
+				canstart = 0
+			self.GetDlgItem(self.IDC_START).EnableWindow(canstart)
+			self.GetDlgItem(self.IDC_STOP).EnableWindow(data & win32service.SERVICE_ACCEPT_STOP)
+			self.GetDlgItem(self.IDC_PAUSE).EnableWindow(data & win32service.SERVICE_ACCEPT_PAUSE_CONTINUE)
+			self.GetDlgItem(self.IDC_CONTINUE).EnableWindow(data & win32service.SERVICE_ACCEPT_PAUSE_CONTINUE)
+
+	def GetSelService(self):
+		pos = self.listCtrl.GetCurSel()
+		if pos < 0:
+			return None
+		pos = self.listCtrl.GetItemData(pos)
+		return self.data[pos][-2:]
+
+ 	def OnStartCmd(self, id, code):
+		service = self.GetSelService()
+		if not service:
+			return
+		s = win32service.OpenService(self.scm, service[1], win32service.SERVICE_ALL_ACCESS)
+		win32service.StartService(s, None)
+		win32service.CloseServiceHandle(s)
+		self.ReloadData()
+
+ 	def OnStopCmd(self, id, code):
+		service = self.GetSelService()
+		if not service:
+			return
+		s = win32service.OpenService(self.scm, service[1], win32service.SERVICE_ALL_ACCESS)
+		win32service.ControlService(s, win32service.SERVICE_CONTROL_STOP)
+		win32service.CloseServiceHandle(s)
+		self.ReloadData()
+
+ 	def OnPauseCmd(self, id, code):
+		service = self.GetSelService()
+		if not service:
+			return
+		s = win32service.OpenService(self.scm, service[1], win32service.SERVICE_ALL_ACCESS)
+		win32service.ControlService(s, win32service.SERVICE_CONTROL_PAUSE)
+		win32service.CloseServiceHandle(s)
+		self.ReloadData()
+
+ 	def OnContinueCmd(self, id, code):
+		service = self.GetSelService()
+		if not service:
+			return
+		s = win32service.OpenService(self.scm, service[1], win32service.SERVICE_ALL_ACCESS)
+		win32service.ControlService(s, win32service.SERVICE_CONTROL_CONTINUE)
+		win32service.CloseServiceHandle(s)
+		self.ReloadData()
+
+ 	def OnStartupCmd(self, id, code):
+		service = self.GetSelService()
+		if not service:
+			return
+		s = win32service.OpenService(self.scm, service[1], win32service.SERVICE_ALL_ACCESS)
+		if StartupDlg(service[0], s).DoModal() == win32con.IDOK:
+			self.ReloadData()
+
+	def GetResource(self):
+		style = win32con.WS_POPUP | win32con.DS_SETFONT | win32con.WS_SYSMENU | win32con.WS_CAPTION | win32con.WS_VISIBLE | win32con.DS_MODALFRAME
+		exstyle = None
+		t = [["Services", (16, 16, 333, 157), style, exstyle, (8, 'MS Shell Dlg')], ]
+		t.append([130, "Ser&vice", -1, (6, 6, 70, 8), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.SS_LEFT])
+		t.append([130, "Status", -1, (164, 6, 42, 8), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.SS_LEFT])
+		t.append([130, "Startup", -1, (206, 6, 50, 8), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.SS_LEFT])
+		t.append([131, "", self.IDC_LIST, (6, 16, 255, 106), win32con.LBS_USETABSTOPS | win32con.LBS_SORT | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_BORDER | win32con.WS_CHILD | win32con.WS_VISIBLE | win32con.WS_TABSTOP | win32con.LBS_NOTIFY | win32con.WS_VSCROLL])
+		t.append([128, "Close", win32con.IDOK, (267, 6, 60, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_GROUP | win32con.WS_TABSTOP | win32con.BS_DEFPUSHBUTTON])
+		t.append([128, "&Start", self.IDC_START, (267, 27, 60, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		t.append([128, "S&top", self.IDC_STOP, (267, 44, 60, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		t.append([128, "&Pause", self.IDC_PAUSE, (267, 61, 60, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		t.append([128, "&Continue", self.IDC_CONTINUE, (267, 78, 60, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		t.append([128, "Sta&rtup...", self.IDC_STARTUP, (267, 99, 60, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		t.append([128, "H&W Profiles...", self.IDC_PROFILES, (267, 116, 60, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		t.append([128, "&Help", win32con.IDHELP, (267, 137, 60, 14), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_TABSTOP | win32con.BS_PUSHBUTTON])
+		t.append([130, "St&artup Parameters:", -1, (6, 128, 70, 8), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.SS_LEFT])
+		t.append([129, "", self.IDC_PARAMS, (6, 139, 247, 12), win32con.WS_VISIBLE | win32con.WS_CHILD | win32con.WS_GROUP | win32con.WS_BORDER | win32con.ES_AUTOHSCROLL])
+		return t
+
+if __name__=='__main__':
+	import sys
+	machine = ''
+	if len(sys.argv)>1:
+		machine = sys.argv[1]
+	ServiceDlg(machine).DoModal()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/BrandProject.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/BrandProject.py
new file mode 100644
index 0000000..506e459
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/BrandProject.py
@@ -0,0 +1,81 @@
+# BrandProject.py
+#
+# Brand a VSS project with a "build number", then optionally
+# stamp DLL/EXE files with version information.
+
+import win32api, os, string, sys
+import vssutil
+import bulkstamp
+
+
+def BrandProject(vssProjectName, descFile, stampPath, filesToSubstitute, buildDesc = None, auto=0, bRebrand = 0):
+	# vssProjectName -- The name of the VSS project to brand.
+	# descFile -- A test file containing descriptions of the files in the release.
+	# stampPath -- The full path to where the files referenced in descFile can be found.
+	path=win32api.GetFullPathName(stampPath)
+	
+	build = vssutil.MakeNewBuildNo(vssProjectName, buildDesc, auto, bRebrand)
+	if build is None:
+		print "Cancelled"
+		return
+
+	bulkstamp.scan( build, stampPath, descFile )
+	for infile, outfile in filesToSubstitute:
+		SubstituteVSSInFile(vssProjectName, infile, outfile)
+	return 1
+
+def usage(msg):
+	print msg
+	print """\
+%s Usage:
+%s [options] vssProject descFile stampPath
+
+Automatically brand a VSS project with an automatically incremented
+build number, and stamp DLL/EXE files with the build number.
+
+Checks that no files are checked out in the project, and finds the last
+build number, and suggests the next number.
+
+Options:
+-a     - Auto increment the build number, and brand (otherwise prompt
+         for the build number after looking for the previous)
+-r     - Restamp the files with the existing build number.
+-d     - A description for the VSS Label.
+-f infile=outfile - Substitute special VSS labels in the specified text
+                    file with the text extracted from VSS.
+""" % (os.path.basename(sys.argv[0]), os.path.basename(sys.argv[0]))
+	sys.exit(1)
+
+if __name__=='__main__':
+	try:
+		import getopt
+		opts, args = getopt.getopt(sys.argv[1:], "af:d:r")
+	except getopts.error, msg:
+		usage(msg)
+	bAuto = bRebrand = 0
+	stampFiles = []
+	desc = None
+	for opt, val in opts:
+		if opt == '-a':
+			bAuto = 1
+		if opt == '-f':
+			infile, outfile = string.split(val, "=", 2)
+			stampFiles.append((infile, outfile))
+		if opt == '-d':
+			desc = val
+		if opt == '-r':
+			bRebrand = 1
+	if len(args)<3:
+		usage("You must specify the required arguments")
+	vssProjectName = "$\\" + args[0]
+	descFile = args[1]
+	path = args[2]
+	try:
+		os.stat(descFile)
+	except IOError:
+		usage("The description file '%s' can not be found" % (descFile))
+	if not os.path.isdir(path):
+		usage("The path to the files to stamp '%s' does not exist" % (path))
+
+	BrandProject(vssProjectName, descFile, path, stampFiles, desc, bAuto, bRebrand)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/bulkstamp.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/bulkstamp.py
new file mode 100644
index 0000000..999f8374
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/bulkstamp.py
@@ -0,0 +1,137 @@
+#
+# bulkstamp.py:
+#    Stamp versions on all files that can be found in a given tree.
+#
+# USAGE: python bulkstamp.py <version> <root directory> <descriptions>
+#
+# Example: python bulkstamp.py 103 ..\win32\Build\ desc.txt
+#
+# <version> corresponds to the build number. It will be concatenated with
+# the major and minor version numbers found in the description file.
+#
+# Description information is pulled from an input text file with lines of
+# the form:
+#
+#    <basename> <white space> <description>
+#
+# For example:
+#
+#    PyWinTypes.dll Common types for Python on Win32
+#    etc
+#
+# The product's name, major, and minor versions are specified as:
+#
+#    name <white space> <value>
+#    major <white space> <value>
+#    minor <white space> <value>
+#
+# The tags are case-sensitive.
+#
+# Any line beginning with "#" will be ignored. Empty lines are okay.
+#
+
+import sys
+import os
+import verstamp
+import fnmatch
+import string
+import win32api
+
+numStamped = 0
+
+g_patterns = [
+  '*.dll',
+  '*.pyd',
+  '*.exe',
+  '*.ocx',
+  ]
+
+
+def walk(arg, dirname, names):
+  global numStamped
+  vars, debug, descriptions = arg
+  for name in names:
+    for pat in g_patterns:
+      if fnmatch.fnmatch(name, pat):
+        # Handle the "_d" thing.
+        pathname = os.path.join(dirname, name)
+        base, ext = os.path.splitext(name)
+        if base[-2:]=='_d':
+          name = base[:-2] + ext
+        is_dll = ext.lower() != ".exe"
+        if descriptions.has_key(os.path.normcase(name)):
+          desc = descriptions[os.path.normcase(name)]
+          try:
+            verstamp.stamp(vars, pathname, desc, is_dll=is_dll)
+            numStamped = numStamped + 1
+          except win32api.error, (hr, func, desc):
+            print "Could not stamp", pathname, "Error", hr, "-", desc
+        else:
+          print 'WARNING: description not provided for:', name
+          # skip branding this - assume already branded or handled elsewhere
+#        print "Stamped", pathname
+
+def load_descriptions(fname, vars):
+  retvars = {}
+  descriptions = { }
+
+  lines = open(fname, 'r').readlines()
+
+  for i in range(len(lines)):
+    line = string.strip(lines[i])
+    if line != '' and line[0] != '#':
+      idx1 = string.find(line, ' ')
+      idx2 = string.find(line, '\t')
+      if idx1 == -1 or idx2 < idx1:
+        idx1 = idx2
+      if idx1 == -1:
+        print 'ERROR: bad syntax in description file at line %d.' % (i+1)
+        sys.exit(1)
+
+      key = line[:idx1]
+      val = string.strip(line[idx1:])
+      if key in vars:
+        retvars[key] = val
+      else:
+        descriptions[key] = val
+
+  if not retvars.has_key('product'):
+    print 'ERROR: description file is missing the product name.'
+    sys.exit(1)
+  if not retvars.has_key('major'):
+    print 'ERROR: description file is missing the major version number.'
+    sys.exit(1)
+  if not retvars.has_key('minor'):
+    print 'ERROR: description file is missing the minor version number.'
+    sys.exit(1)
+
+  return retvars, descriptions
+
+def scan(build, root, desc, **custom_vars ):
+  global numStamped
+  numStamped = 0
+  try:
+    build = string.atoi(build)
+  except ValueError:
+    print 'ERROR: build number is not a number: %s' % build
+    sys.exit(1)
+
+  debug = 0	### maybe fix this one day
+
+  varList = ['major', 'minor', 'sub', 'company', 'copyright', 'trademarks', 'product']
+
+  vars, descriptions = load_descriptions(desc, varList)
+  vars['build'] = build
+  vars.update(custom_vars)
+
+  arg = vars, debug, descriptions
+  os.path.walk(root, walk, arg)
+
+  print "Stamped %d files." % (numStamped)
+
+if __name__ == '__main__':
+  if len(sys.argv) != 4:
+    print "ERROR: incorrect invocation. See script's header comments."
+    sys.exit(1)
+
+  apply(scan, tuple(sys.argv[1:]))
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/vssutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/vssutil.py
new file mode 100644
index 0000000..e823f911
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/VersionStamp/vssutil.py
@@ -0,0 +1,170 @@
+import win32con, string, traceback
+import win32com.client, win32com.client.gencache
+import pythoncom
+import time
+import os
+
+constants = win32com.client.constants
+
+win32com.client.gencache.EnsureModule('{783CD4E0-9D54-11CF-B8EE-00608CC9A71F}', 0, 5, 0)
+
+error = "vssutil error"
+
+def GetSS():
+	ss=win32com.client.Dispatch("SourceSafe")
+	# SS seems a bit wierd.  It defaults the arguments as empty strings, but
+	# then complains when they are used - so we pass "Missing"
+	ss.Open(pythoncom.Missing, pythoncom.Missing, pythoncom.Missing)
+	return ss
+
+def test(projectName):
+	ss=GetSS()
+	project = ss.VSSItem(projectName)
+
+	for item in project.GetVersions(constants.VSSFLAG_RECURSYES):
+		print item.VSSItem.Name, item.VersionNumber, item.Action
+		
+
+#	item=i.Versions[0].VSSItem
+#	for h in i.Versions:
+#		print `h.Comment`, h.Action, h.VSSItem.Name
+	
+
+def SubstituteInString(inString, evalEnv):
+	substChar = "$"
+	fields = string.split(inString, substChar)
+	newFields = []
+	for i in range(len(fields)):
+		didSubst = 0
+		strVal = fields[i]
+		if i%2!=0:
+			try:
+				strVal = eval(strVal,evalEnv[0], evalEnv[1])
+				newFields.append(strVal)
+				didSubst = 1
+			except:
+				traceback.print_exc()
+				print "Could not substitute", strVal
+		if not didSubst:
+			newFields.append(strVal)
+	return string.join(map(str, newFields), "")
+
+def SubstituteInFile(inName, outName, evalEnv):
+	inFile = open(inName, "r")
+	try:
+		outFile = open(outName, "w")
+		try:
+			while 1:
+				line = inFile.read()
+				if not line: break
+				outFile.write(SubstituteInString(line, evalEnv))
+		finally:
+			outFile.close()
+	finally:
+		inFile.close()
+
+def VssLog(project, linePrefix = "", noLabels = 5, maxItems=150):
+	lines = []
+	num = 0
+	labelNum = 0
+	for i in project.GetVersions(constants.VSSFLAG_RECURSYES):
+		num = num + 1
+		if num > maxItems : break
+		commentDesc = itemDesc = ""
+		if i.Action[:5]=="Added":
+			continue
+		if len(i.Label):
+			labelNum = labelNum + 1
+			itemDesc = i.Action
+		else:
+			itemDesc = i.VSSItem.Name
+			if str(itemDesc[-4:])==".dsp":
+				continue
+		if i.Comment:
+			commentDesc ="\n%s\t%s" % (linePrefix, i.Comment)
+		lines.append("%s%s\t%s%s" % (linePrefix, time.asctime(time.localtime(int(i.Date))), itemDesc, commentDesc))
+		if labelNum > noLabels:
+			break
+	return string.join(lines,"\n")
+	
+def SubstituteVSSInFile(projectName, inName, outName):
+	import win32api
+	if win32api.GetFullPathName(inName)==win32api.GetFullPathName(outName):
+		raise RuntimeError, "The input and output filenames can not be the same"
+	sourceSafe=GetSS()
+	project = sourceSafe.VSSItem(projectName)
+	# Find the last label
+	label = None
+	for version in project.Versions:
+		if version.Label:
+			break
+	else:
+		print "Couldnt find a label in the sourcesafe project!"
+		return
+	# Setup some local helpers for the conversion strings.
+	vss_label = version.Label
+	vss_date = time.asctime(time.localtime(int(version.Date)))
+	now = time.asctime(time.localtime(time.time()))
+	SubstituteInFile(inName, outName, (locals(),globals()))
+	
+			
+def CountCheckouts(item):
+	num = 0
+	if item.Type==constants.VSSITEM_PROJECT:
+		for sub in item.Items:
+			num = num + CountCheckouts(sub)
+	else:
+		if item.IsCheckedOut:
+			num = num + 1
+	return num
+
+def GetLastBuildNo(project):
+	i = GetSS().VSSItem(project)
+	# Find the last label
+	lab = None
+	for version in i.Versions:
+		lab = str(version.Label)
+		if lab: return lab
+	return None
+
+def MakeNewBuildNo(project, buildDesc = None, auto=0, bRebrand = 0):
+	if buildDesc is None: buildDesc = "Created by Python"
+	ss = GetSS()
+	i = ss.VSSItem(project)
+	num = CountCheckouts(i)
+	if num > 0:
+		msg = "This project has %d items checked out\r\n\r\nDo you still want to continue?" % num
+		import win32ui
+		if win32ui.MessageBox(msg, project, win32con.MB_YESNO) != win32con.IDYES:
+			return
+
+
+	oldBuild = buildNo = GetLastBuildNo(project)
+	if buildNo is None:
+		buildNo = "1"
+		oldBuild = "<None>"
+	else:
+		try:
+			buildNo = string.atoi(buildNo)
+			if not bRebrand: buildNo = buildNo + 1
+			buildNo = str(buildNo)
+		except ValueError:
+			raise error, "The previous label could not be incremented: %s" % (oldBuild)
+
+	if not auto:
+		from pywin.mfc import dialog
+		buildNo = dialog.GetSimpleInput("Enter new build number", buildNo, "%s - Prev: %s" % (project, oldBuild))
+		if buildNo is None: return
+	i.Label(buildNo, "Build %s: %s" % (buildNo,buildDesc))
+	if auto:
+		print "Branded project %s with label %s" % (project, buildNo)
+	return buildNo
+
+if __name__=='__main__':
+#	UpdateWiseExeName("PyWiseTest.wse", "PyWiseTest-10.exe")
+
+#	MakeVersion()
+#	test(tp)
+#	MakeNewBuildNo(tp)
+	tp = "\\Python\\Python Win32 Extensions"
+	SubstituteVSSInFile(tp, "d:\\src\\pythonex\\win32\\win32.txt", "d:\\temp\\win32.txt")
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/backupEventLog.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/backupEventLog.py
new file mode 100644
index 0000000..a15626a9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/backupEventLog.py
@@ -0,0 +1,40 @@
+
+# Generate a base file name
+import time, os
+import win32api
+import win32evtlog
+
+def BackupClearLog(logType):
+	datePrefix = time.strftime("%Y%m%d", time.localtime(time.time()))
+	fileExists = 1
+	retry = 0
+	while fileExists:
+		if retry == 0:
+			index = ""
+		else:
+			index = "-%d" % retry
+		try:
+			fname = os.path.join(win32api.GetTempPath(), "%s%s-%s" % (datePrefix, index, logType) + ".evt")
+			os.stat(fname)
+		except os.error:
+			fileExists = 0
+		retry = retry + 1
+	# OK - have unique file name.
+	try:
+		hlog = win32evtlog.OpenEventLog(None, logType)
+	except win32evtlogutil.error, details:
+		print "Could not open the event log", details
+		return
+	try:
+		if win32evtlog.GetNumberOfEventLogRecords(hlog)==0:
+			print "No records in event log %s - not backed up" % logType
+			return
+		win32evtlog.ClearEventLog(hlog, fname)
+		print "Backed up %s log to %s" % (logType, fname)
+	finally:
+		win32evtlog.CloseEventLog(hlog)
+		
+if __name__=='__main__':
+	BackupClearLog("Application")
+	BackupClearLog("System")
+	BackupClearLog("Security")
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/ce/pysynch.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/ce/pysynch.py
new file mode 100644
index 0000000..2aa6001
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/ce/pysynch.py
@@ -0,0 +1,247 @@
+# Simple CE synchronisation utility with Python features.
+
+import wincerapi
+import win32api
+import win32file
+import getopt
+import sys
+import os
+import string
+import win32con
+import fnmatch
+
+class InvalidUsage(Exception): pass
+
+def print_error(api_exc, msg):
+    hr, fn, errmsg = api_exc
+    print "%s - %s(%d)" % (msg, errmsg, hr)
+
+def GetFileAttributes(file, local=1):
+    if local: return win32api.GetFileAttributes(file)
+    else: return wincerapi.CeGetFileAttributes(file)
+
+def FindFiles(spec, local=1):
+    if local: return win32api.FindFiles(spec)
+    else: return wincerapi.CeFindFiles(spec)
+
+def isdir(name, local=1):
+    try:
+        attr = GetFileAttributes(name, local)
+        return attr & win32con.FILE_ATTRIBUTE_DIRECTORY
+    except win32api.error:
+        return 0
+
+def CopyFileToCe(src_name, dest_name, progress = None):
+    sh = win32file.CreateFile(src_name, win32con.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None)
+    bytes=0
+    try:
+        dh = wincerapi.CeCreateFile(dest_name, win32con.GENERIC_WRITE, 0, None, win32con.OPEN_ALWAYS, 0, None)
+        try:
+            while 1:
+                hr, data = win32file.ReadFile(sh, 2048)
+                if not data:
+                    break
+                wincerapi.CeWriteFile(dh, data)
+                bytes = bytes + len(data)
+                if progress is not None: progress(bytes)
+        finally:
+            pass
+            dh.Close()
+    finally:
+        sh.Close()
+    return bytes
+
+def BuildFileList(spec, local, recurse, filter, filter_args, recursed_path = ""):
+    files = []
+    if isdir(spec, local):
+        path = spec
+        raw_spec = "*"
+    else:
+        path, raw_spec = os.path.split(spec)
+    if recurse:
+        # Need full scan, to get sub-direcetories.
+        infos = FindFiles(os.path.join(path, "*"), local)
+    else:
+        infos = FindFiles(os.path.join(path, raw_spec), local)
+    for info in infos:
+        src_name = str(info[8])
+        full_src_name = os.path.join(path, src_name)
+        if local: # Can't do this for CE!
+            full_src_name = win32api.GetFullPathName(full_src_name)
+        if isdir(full_src_name, local) :
+            if recurse and src_name not in ['.','..']:
+                new_spec = os.path.join(full_src_name, raw_spec)
+                files = files + BuildFileList(new_spec, local, 1, filter, filter_args, os.path.join(recursed_path, src_name))
+        if fnmatch.fnmatch(src_name, raw_spec):
+            rel_name = os.path.join(recursed_path, src_name)
+            filter_data = filter( full_src_name, rel_name, info, local, filter_args )
+            if filter_data is not None:
+                files.append( (full_src_name, info, filter_data) )
+    return files
+
+def _copyfilter(full_name, rel_name, info, local, bMaintainDir):
+    if isdir(full_name, local): return
+    if bMaintainDir:
+        return rel_name
+    return os.path.split(rel_name)[1]
+
+import pywin.dialogs.status, win32ui
+class FileCopyProgressDialog(pywin.dialogs.status.CStatusProgressDialog):
+    def CopyProgress(self, bytes):
+        self.Set(bytes/1024)
+
+def copy( args ):
+    """copy src [src ...],  dest
+    Copy files to/from the CE device
+    """
+    bRecurse = bVerbose = 0
+    bMaintainDir = 1
+    try:
+        opts, args = getopt.getopt(args, "rv")
+    except getopt.error, details:
+        raise InvalidUsage(details)
+    for o, v in opts:
+        if o=="-r":
+            bRecuse=1
+        elif o=='-v':
+            bVerbose=1
+
+    if len(args)<2:
+        raise InvalidUsage("Must specify a source and destination")
+
+    src = args[:-1]
+    dest = args[-1]
+    # See if WCE: leading anywhere indicates a direction.
+    if string.find(src[0], "WCE:")==0:
+        bToDevice = 0
+    elif string.find(dest, "WCE:")==0:
+        bToDevice = 1
+    else:
+        # Assume copy to device.
+        bToDevice = 1
+
+    if not isdir(dest, not bToDevice):
+        print "%s does not indicate a directory"
+
+    files = [] # List of FQ (from_name, to_name)
+    num_files = 0
+    num_bytes = 0
+    dialog = FileCopyProgressDialog("Copying files")
+    dialog.CreateWindow(win32ui.GetMainFrame())
+    if bToDevice:
+        for spec in src:
+            new = BuildFileList(spec, 1, bRecurse, _copyfilter, bMaintainDir)
+            if not new:
+                print "Warning: '%s' did not match any files" % (spec)
+            files = files + new
+
+        for full_src, src_info, dest_info in files:
+            dest_name = os.path.join(dest, dest_info)
+            size = src_info[5]
+            print "Size=", size
+            if bVerbose:
+                print full_src, "->", dest_name,"- ",
+            dialog.SetText(dest_name)
+            dialog.Set(0, size/1024)
+            bytes = CopyFileToCe(full_src, dest_name, dialog.CopyProgress)
+            num_bytes = num_bytes + bytes
+            if bVerbose:
+                print bytes, "bytes"
+            num_files = num_files + 1
+    dialog.Close()
+    print "%d files copied (%d bytes)" % (num_files, num_bytes)
+
+def _dirfilter(*args):
+    return args[1]
+
+def dir(args):
+    """dir directory_name ...
+    Perform a directory listing on the remote device
+    """
+    bRecurse = 0
+    try:
+        opts, args = getopt.getopt(args, "r")
+    except getopt.error, details:
+        raise InvalidUsage(details)
+    for o, v in opts:
+        if o=="-r":
+            bRecurse=1
+    for arg in args:
+        print "Directory of WCE:%s" % arg
+        files = BuildFileList(arg, 0, bRecurse, _dirfilter, None)
+        total_size=0
+        for full_name, info, rel_name in files:
+            date_str = info[3].Format("%d-%b-%Y %H:%M")
+            attr_string = "     "
+            if info[0] & win32con.FILE_ATTRIBUTE_DIRECTORY: attr_string = "<DIR>"
+            print "%s  %s %10d %s" % (date_str, attr_string, info[5], rel_name)
+            total_size = total_size + info[5]
+        print " " * 14 + "%3d files, %10d bytes" % (len(files), total_size)
+
+def run(args):
+    """run program [args]
+    Starts the specified program on the remote device.
+    """
+    prog_args = []
+    for arg in args:
+        if " " in arg:
+            prog_args.append('"' + arg + '"')
+        else:
+            prog_args.append(arg)
+    prog_args = string.join(prog_args, " ")
+    wincerapi.CeCreateProcess(prog_args, "", None, None, 0, 0, None, "", None)
+
+def delete(args):
+    """delete file, ...
+    Delete one or more remote files
+    """
+    for arg in args:
+        try:
+            wincerapi.CeDeleteFile(arg)
+            print "Deleted: %s" % arg
+        except win32api.error, details:
+            print_error(details, "Error deleting '%s'" % arg)
+
+def DumpCommands():
+    print "%-10s - %s" % ("Command", "Description")
+    print "%-10s - %s" % ("-------", "-----------")
+    for name, item in globals().items():
+        if type(item)==type(DumpCommands):
+            doc = getattr(item, "__doc__", "")
+            if doc:
+                lines = string.split(doc, "\n")
+                print "%-10s - %s" % (name, lines[0])
+                for line in lines[1:]:
+                    if line:
+                        print " " * 8, line
+
+def main():
+    if len(sys.argv)<2:
+        print "You must specify a command!"
+        DumpCommands()
+        return
+    command = sys.argv[1]
+    fn = globals().get(command)
+    if fn is None:
+        print "Unknown command:", command
+        DumpCommands()
+        return
+
+    wincerapi.CeRapiInit()
+    try:
+        verinfo = wincerapi.CeGetVersionEx()
+        print "Connected to device, CE version %d.%d %s" % (verinfo[0], verinfo[1], verinfo[4])
+        try:
+            fn(sys.argv[2:])
+        except InvalidUsage, msg:
+            print "Invalid syntax -", msg
+            print fn.__doc__
+
+    finally:
+        try:
+            wincerapi.CeRapiUninit()
+        except win32api.error, details:
+            print_error(details, "Error disconnecting")
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/killProcName.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/killProcName.py
new file mode 100644
index 0000000..ef3ffb5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/killProcName.py
@@ -0,0 +1,56 @@
+# Kills a process by process name
+#
+# Uses the Performance Data Helper to locate the PID, then kills it.
+# Will only kill the process if there is only one process of that name
+# (eg, attempting to kill "Python.exe" will only work if there is only
+# one Python.exe running.  (Note that the current process does not
+# count - ie, if Python.exe is hosting this script, you can still kill
+# another Python.exe (as long as there is only one other Python.exe)
+
+# Really just a demo for the win32pdh(util) module, which allows you
+# to get all sorts of information about a running process and many
+# other aspects of your system.
+
+import win32api, win32pdhutil, win32con, sys
+
+def killProcName(procname):
+	# Change suggested by Dan Knierim, who found that this performed a
+	# "refresh", allowing us to kill processes created since this was run
+	# for the first time.
+	try:
+		win32pdhutil.GetPerformanceAttributes('Process','ID Process',procname)
+	except:
+		pass
+
+	pids = win32pdhutil.FindPerformanceAttributesByName(procname)
+
+	# If _my_ pid in there, remove it!
+	try:
+		pids.remove(win32api.GetCurrentProcessId())
+	except ValueError:
+		pass
+
+	if len(pids)==0:
+		result = "Can't find %s" % procname
+	elif len(pids)>1:
+		result = "Found too many %s's - pids=`%s`" % (procname,pids)
+	else:
+		handle = win32api.OpenProcess(win32con.PROCESS_TERMINATE, 0,pids[0])
+		win32api.TerminateProcess(handle,0)
+		win32api.CloseHandle(handle)
+		result = ""
+
+	return result
+
+if __name__ == '__main__':
+	if len(sys.argv)>1:
+		for procname in sys.argv[1:]:
+			result = killProcName(procname)
+			if result:
+				print result
+				print "Dumping all processes..."
+				win32pdhutil.ShowAllProcesses()
+			else:
+				print "Killed %s" % procname
+	else:
+		print "Usage: killProcName.py procname ..."
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/rasutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/rasutil.py
new file mode 100644
index 0000000..60d57f5c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/rasutil.py
@@ -0,0 +1,88 @@
+# A demo of using the RAS API from Python
+import sys
+import string
+import win32ras
+
+# The error raised if we can not
+class ConnectionError(Exception):
+	pass
+	
+def Connect(rasEntryName, numRetries = 5):
+	"""Make a connection to the specified RAS entry.
+	
+	Returns a tuple of (bool, handle) on success.
+	- bool is 1 if a new connection was established, or 0 is a connection already existed.
+	- handle is a RAS HANDLE that can be passed to Disconnect() to end the connection.
+	
+	Raises a ConnectionError if the connection could not be established.
+	"""
+	assert numRetries > 0
+	for info in win32ras.EnumConnections():
+		if string.lower(info[1])==string.lower(rasEntryName):
+			print "Already connected to", rasEntryName
+			return 0, info[0]
+
+	dial_params, have_pw = win32ras.GetEntryDialParams(None, rasEntryName)
+	if not have_pw:
+		print "Error: The password is not saved for this connection"
+		print "Please connect manually selecting the 'save password' option and try again"
+		sys.exit(1)
+
+	print "Connecting to", rasEntryName, "..."
+	retryCount = numRetries
+	while retryCount > 0:
+		rasHandle, errCode = win32ras.Dial(None, None, dial_params, None)
+		if win32ras.IsHandleValid(rasHandle):
+			bValid = 1
+			break
+		print "Retrying..."
+		win32api.Sleep(5000)
+		retryCount = retryCount - 1
+	
+	if errCode:
+		raise ConnectionError(errCode, win32ras.GetErrorString(errCode))
+	return 1, rasHandle
+
+def Disconnect(handle):
+	if type(handle)==type(''): # have they passed a connection name?
+		for info in win32ras.EnumConnections():
+			if string.lower(info[1])==string.lower(handle):
+				handle = info[0]
+				break
+		else:
+			raise ConnectionError(0, "Not connected to entry '%s'" % handle)
+
+	win32ras.HangUp(handle)
+
+usage="""rasutil.py - Utilities for using RAS
+
+Usage:
+  rasutil [-r retryCount] [-c rasname] [-d rasname]
+  
+  -r retryCount - Number of times to retry the RAS connection
+  -c rasname - Connect to the phonebook entry specified by rasname
+  -d rasname - Disconnect from the phonebook entry specified by rasname
+"""
+
+def Usage(why):
+	print why
+	print usage
+	sys.exit(1)
+	
+if __name__=='__main__':
+	import getopt
+	try:
+		opts, args = getopt.getopt(sys.argv[1:], "r:c:d:")
+	except getopt.error, why:
+		Usage(why)
+	retries = 5
+	if len(args) <> 0:
+		Usage("Invalid argument")
+
+	for opt, val in opts:
+		if opt=='-c':
+			Connect(val, retries)
+		if opt=='-d':
+			Disconnect(val)
+		if opt=='-r':
+			retries = int(val)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/regsetup.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/regsetup.py
new file mode 100644
index 0000000..3506ce3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/scripts/regsetup.py
@@ -0,0 +1,510 @@
+# A tool to setup the Python registry.
+
+error = "Registry Setup Error"
+
+import sys # at least we can count on this!
+
+def FileExists(fname):
+    """Check if a file exists.  Returns true or false.
+    """
+    import os
+    try:
+        os.stat(fname)
+        return 1
+    except os.error, details:
+        return 0
+
+def IsPackageDir(path, packageName, knownFileName):
+    """Given a path, a ni package name, and possibly a known file name in
+       the root of the package, see if this path is good.
+    """
+    import os
+    if knownFileName is None:
+        knownFileName = "."
+    return FileExists(os.path.join(os.path.join(path, packageName),knownFileName))
+
+def IsDebug():
+    """Return "_d" if we're running a debug version.
+
+    This is to be used within DLL names when locating them.
+    """
+    import imp
+    for suffix_item in imp.get_suffixes():
+        if suffix_item[0]=='_d.pyd':
+            return '_d'
+    return ''
+
+def FindPackagePath(packageName, knownFileName, searchPaths):
+    """Find a package.
+
+       Given a ni style package name, check the package is registered.
+
+       First place looked is the registry for an existing entry.  Then
+       the searchPaths are searched.
+    """
+    import regutil, os
+    pathLook = regutil.GetRegisteredNamedPath(packageName)
+    if pathLook and IsPackageDir(pathLook, packageName, knownFileName):
+        return pathLook, None # The currently registered one is good.
+    # Search down the search paths.
+    for pathLook in searchPaths:
+        if IsPackageDir(pathLook, packageName, knownFileName):
+            # Found it
+            ret = os.path.abspath(pathLook)
+            return ret, ret
+    raise error, "The package %s can not be located" % packageName
+
+def FindHelpPath(helpFile, helpDesc, searchPaths):
+    # See if the current registry entry is OK
+    import os, win32api, win32con
+    try:
+        key = win32api.RegOpenKey(win32con.HKEY_LOCAL_MACHINE, "Software\\Microsoft\\Windows\\Help", 0, win32con.KEY_ALL_ACCESS)
+        try:
+            try:
+                path = win32api.RegQueryValueEx(key, helpDesc)[0]
+                if FileExists(os.path.join(path, helpFile)):
+                    return os.path.abspath(path)
+            except win32api.error:
+                pass # no registry entry.
+        finally:
+            key.Close()
+    except win32api.error:
+        pass
+    for pathLook in searchPaths:
+        if FileExists(os.path.join(pathLook, helpFile)):
+            return os.path.abspath(pathLook)
+        pathLook = os.path.join(pathLook, "Help")
+        if FileExists(os.path.join( pathLook, helpFile)):
+            return os.path.abspath(pathLook)
+    raise error, "The help file %s can not be located" % helpFile
+
+def FindAppPath(appName, knownFileName, searchPaths):
+    """Find an application.
+
+     First place looked is the registry for an existing entry.  Then
+     the searchPaths are searched.
+    """
+    # Look in the first path.
+    import regutil, string, os
+    regPath = regutil.GetRegisteredNamedPath(appName)
+    if regPath:
+        pathLook = string.split(regPath,";")[0]
+    if regPath and FileExists(os.path.join(pathLook, knownFileName)):
+        return None # The currently registered one is good.
+    # Search down the search paths.
+    for pathLook in searchPaths:
+        if FileExists(os.path.join(pathLook, knownFileName)):
+            # Found it
+            return os.path.abspath(pathLook)
+    raise error, "The file %s can not be located for application %s" % (knownFileName, appName)
+
+def FindPythonExe(exeAlias, possibleRealNames, searchPaths):
+    """Find an exe.
+
+       Returns the full path to the .exe, and a boolean indicating if the current
+       registered entry is OK.  We don't trust the already registered version even
+       if it exists - it may be wrong (ie, for a different Python version)
+    """
+    import win32api, regutil, string, os, sys
+    if possibleRealNames is None:
+        possibleRealNames = exeAlias
+    # Look first in Python's home.
+    found = os.path.join(sys.prefix,  possibleRealNames)
+    if not FileExists(found): # for developers
+        found = os.path.join(sys.prefix,  "PCBuild", possibleRealNames)
+    if not FileExists(found):
+        found = LocateFileName(possibleRealNames, searchPaths)
+
+    registered_ok = 0
+    try:
+        registered = win32api.RegQueryValue(regutil.GetRootKey(), regutil.GetAppPathsKey() + "\\" + exeAlias)
+        registered_ok = found==registered
+    except win32api.error:
+        pass
+    return found, registered_ok
+
+def QuotedFileName(fname):
+    """Given a filename, return a quoted version if necessary
+    """
+    import regutil, string
+    try:
+        string.index(fname, " ") # Other chars forcing quote?
+        return '"%s"' % fname
+    except ValueError:
+        # No space in name.
+        return fname
+
+def LocateFileName(fileNamesString, searchPaths):
+    """Locate a file name, anywhere on the search path.
+
+       If the file can not be located, prompt the user to find it for us
+       (using a common OpenFile dialog)
+
+       Raises KeyboardInterrupt if the user cancels.
+    """
+    import regutil, string, os
+    fileNames = string.split(fileNamesString,";")
+    for path in searchPaths:
+        for fileName in fileNames:
+            try:
+                retPath = os.path.join(path, fileName)
+                os.stat(retPath)
+                break
+            except os.error:
+                retPath = None
+        if retPath:
+            break
+    else:
+        fileName = fileNames[0]
+        try:
+            import win32ui, win32con
+        except ImportError:
+            raise error, "Need to locate the file %s, but the win32ui module is not available\nPlease run the program again, passing as a parameter the path to this file." % fileName
+        # Display a common dialog to locate the file.
+        flags=win32con.OFN_FILEMUSTEXIST
+        ext = os.path.splitext(fileName)[1]
+        filter = "Files of requested type (*%s)|*%s||" % (ext,ext)
+        dlg = win32ui.CreateFileDialog(1,None,fileName,flags,filter,None)
+        dlg.SetOFNTitle("Locate " + fileName)
+        if dlg.DoModal() <> win32con.IDOK:
+            raise KeyboardInterrupt, "User cancelled the process"
+        retPath = dlg.GetPathName()
+    return os.path.abspath(retPath)
+
+def LocatePath(fileName, searchPaths):
+    """Like LocateFileName, but returns a directory only.
+    """
+    import os
+    return os.path.abspath(os.path.split(LocateFileName(fileName, searchPaths))[0])
+
+def LocateOptionalPath(fileName, searchPaths):
+    """Like LocatePath, but returns None if the user cancels.
+    """
+    try:
+        return LocatePath(fileName, searchPaths)
+    except KeyboardInterrupt:
+        return None
+
+
+def LocateOptionalFileName(fileName, searchPaths = None):
+    """Like LocateFileName, but returns None if the user cancels.
+    """
+    try:
+        return LocateFileName(fileName, searchPaths)
+    except KeyboardInterrupt:
+        return None
+
+def LocatePythonCore(searchPaths):
+    """Locate and validate the core Python directories.  Returns a list
+     of paths that should be used as the core (ie, un-named) portion of
+     the Python path.
+    """
+    import string, os, regutil
+    currentPath = regutil.GetRegisteredNamedPath(None)
+    if currentPath:
+        presearchPaths = string.split(currentPath, ";")
+    else:
+        presearchPaths = [os.path.abspath(".")]
+    libPath = None
+    for path in presearchPaths:
+        if FileExists(os.path.join(path, "os.py")):
+            libPath = path
+            break
+    if libPath is None and searchPaths is not None:
+        libPath = LocatePath("os.py", searchPaths)
+    if libPath is None:
+        raise error, "The core Python library could not be located."
+
+    corePath = None
+    suffix = IsDebug()
+    for path in presearchPaths:
+        if FileExists(os.path.join(path, "unicodedata%s.pyd" % suffix)):
+            corePath = path
+            break
+    if corePath is None and searchPaths is not None:
+        corePath = LocatePath("unicodedata%s.pyd" % suffix, searchPaths)
+    if corePath is None:
+        raise error, "The core Python path could not be located."
+
+    installPath = os.path.abspath(os.path.join(libPath, ".."))
+    return installPath, [libPath, corePath]
+
+def FindRegisterPackage(packageName, knownFile, searchPaths, registryAppName = None):
+    """Find and Register a package.
+
+       Assumes the core registry setup correctly.
+
+       In addition, if the location located by the package is already
+       in the **core** path, then an entry is registered, but no path.
+       (no other paths are checked, as the application whose path was used
+       may later be uninstalled.  This should not happen with the core)
+    """
+    import regutil, string
+    if not packageName: raise error, "A package name must be supplied"
+    corePaths = string.split(regutil.GetRegisteredNamedPath(None),";")
+    if not searchPaths: searchPaths = corePaths
+    registryAppName = registryAppName or packageName
+    try:
+        pathLook, pathAdd = FindPackagePath(packageName, knownFile, searchPaths)
+        if pathAdd is not None:
+            if pathAdd in corePaths:
+                pathAdd = ""
+            regutil.RegisterNamedPath(registryAppName, pathAdd)
+        return pathLook
+    except error, details:
+        print "*** The %s package could not be registered - %s" % (packageName, details)
+        print "*** Please ensure you have passed the correct paths on the command line."
+        print "*** - For packages, you should pass a path to the packages parent directory,"
+        print "*** - and not the package directory itself..."
+
+
+def FindRegisterApp(appName, knownFiles, searchPaths):
+    """Find and Register a package.
+
+       Assumes the core registry setup correctly.
+
+    """
+    import regutil, string
+    if type(knownFiles)==type(''):
+        knownFiles = [knownFiles]
+    paths=[]
+    try:
+        for knownFile in knownFiles:
+            pathLook = FindAppPath(appName, knownFile, searchPaths)
+            if pathLook:
+                paths.append(pathLook)
+    except error, details:
+        print "*** ", details
+        return
+
+    regutil.RegisterNamedPath(appName, string.join(paths,";"))
+
+def FindRegisterPythonExe(exeAlias, searchPaths, actualFileNames = None):
+    """Find and Register a Python exe (not necessarily *the* python.exe)
+
+       Assumes the core registry setup correctly.
+    """
+    import regutil, string
+    fname, ok = FindPythonExe(exeAlias, actualFileNames, searchPaths)
+    if not ok:
+        regutil.RegisterPythonExe(fname, exeAlias)
+    return fname
+
+
+def FindRegisterHelpFile(helpFile, searchPaths, helpDesc = None ):
+    import regutil
+
+    try:
+        pathLook = FindHelpPath(helpFile, helpDesc, searchPaths)
+    except error, details:
+        print "*** ", details
+        return
+#       print "%s found at %s" % (helpFile, pathLook)
+    regutil.RegisterHelpFile(helpFile, pathLook, helpDesc)
+
+def SetupCore(searchPaths):
+    """Setup the core Python information in the registry.
+
+       This function makes no assumptions about the current state of sys.path.
+
+       After this function has completed, you should have access to the standard
+       Python library, and the standard Win32 extensions
+    """
+
+    import sys
+    for path in searchPaths:
+        sys.path.append(path)
+
+    import string, os
+    import regutil, win32api,win32con
+
+    installPath, corePaths = LocatePythonCore(searchPaths)
+    # Register the core Pythonpath.
+    print corePaths
+    regutil.RegisterNamedPath(None, string.join(corePaths,";"))
+
+    # Register the install path.
+    hKey = win32api.RegCreateKey(regutil.GetRootKey() , regutil.BuildDefaultPythonKey())
+    try:
+        # Core Paths.
+        win32api.RegSetValue(hKey, "InstallPath", win32con.REG_SZ, installPath)
+    finally:
+        win32api.RegCloseKey(hKey)
+
+    # Register the win32 core paths.
+    win32paths = os.path.abspath( os.path.split(win32api.__file__)[0]) + ";" + \
+                 os.path.abspath( os.path.split(LocateFileName("win32con.py;win32con.pyc", sys.path ) )[0] )
+
+    # Python has builtin support for finding a "DLLs" directory, but
+    # not a PCBuild.  Having it in the core paths means it is ignored when
+    # an EXE not in the Python dir is hosting us - so we add it as a named
+    # value
+    check = os.path.join(sys.prefix, "PCBuild")
+    if os.path.isdir(check):
+        regutil.RegisterNamedPath("PCBuild",check)
+
+def RegisterShellInfo(searchPaths):
+    """Registers key parts of the Python installation with the Windows Shell.
+
+       Assumes a valid, minimal Python installation exists
+       (ie, SetupCore() has been previously successfully run)
+    """
+    import regutil, win32con
+    suffix = IsDebug()
+    # Set up a pointer to the .exe's
+    exePath = FindRegisterPythonExe("Python%s.exe" % suffix, searchPaths)
+    regutil.SetRegistryDefaultValue(".py", "Python.File", win32con.HKEY_CLASSES_ROOT)
+    regutil.RegisterShellCommand("Open", QuotedFileName(exePath)+" \"%1\" %*", "&Run")
+    regutil.SetRegistryDefaultValue("Python.File\\DefaultIcon", "%s,0" % exePath, win32con.HKEY_CLASSES_ROOT)
+
+    FindRegisterHelpFile("Python.hlp", searchPaths, "Main Python Documentation")
+    FindRegisterHelpFile("ActivePython.chm", searchPaths, "Main Python Documentation")
+
+    # We consider the win32 core, as it contains all the win32 api type
+    # stuff we need.
+#       FindRegisterApp("win32", ["win32con.pyc", "win32api%s.pyd" % suffix], searchPaths)
+
+usage = """\
+regsetup.py - Setup/maintain the registry for Python apps.
+
+Run without options, (but possibly search paths) to repair a totally broken
+python registry setup.  This should allow other options to work.
+
+Usage:   %s [options ...] paths ...
+-p packageName  -- Find and register a package.  Looks in the paths for
+                   a sub-directory with the name of the package, and
+                   adds a path entry for the package.
+-a appName      -- Unconditionally add an application name to the path.
+                   A new path entry is create with the app name, and the
+                   paths specified are added to the registry.
+-c              -- Add the specified paths to the core Pythonpath.
+                   If a path appears on the core path, and a package also
+                   needs that same path, the package will not bother
+                   registering it.  Therefore, By adding paths to the
+                   core path, you can avoid packages re-registering the same path.
+-m filename     -- Find and register the specific file name as a module.
+                   Do not include a path on the filename!
+--shell         -- Register everything with the Win95/NT shell.
+--upackage name -- Unregister the package
+--uapp name     -- Unregister the app (identical to --upackage)
+--umodule name  -- Unregister the module
+
+--description   -- Print a description of the usage.
+--examples      -- Print examples of usage.
+""" % sys.argv[0]
+
+description="""\
+If no options are processed, the program attempts to validate and set
+the standard Python path to the point where the standard library is
+available.  This can be handy if you move Python to a new drive/sub-directory,
+in which case most of the options would fail (as they need at least string.py,
+os.py etc to function.)
+Running without options should repair Python well enough to run with
+the other options.
+
+paths are search paths that the program will use to seek out a file.
+For example, when registering the core Python, you may wish to
+provide paths to non-standard places to look for the Python help files,
+library files, etc.
+
+See also the "regcheck.py" utility which will check and dump the contents
+of the registry.
+"""
+
+examples="""\
+Examples:
+"regsetup c:\\wierd\\spot\\1 c:\\wierd\\spot\\2"
+Attempts to setup the core Python.  Looks in some standard places,
+as well as the 2 wierd spots to locate the core Python files (eg, Python.exe,
+python14.dll, the standard library and Win32 Extensions.
+
+"regsetup -a myappname . .\subdir"
+Registers a new Pythonpath entry named myappname, with "C:\\I\\AM\\HERE" and
+"C:\\I\\AM\\HERE\subdir" added to the path (ie, all args are converted to
+absolute paths)
+
+"regsetup -c c:\\my\\python\\files"
+Unconditionally add "c:\\my\\python\\files" to the 'core' Python path.
+
+"regsetup -m some.pyd \\windows\\system"
+Register the module some.pyd in \\windows\\system as a registered
+module.  This will allow some.pyd to be imported, even though the
+windows system directory is not (usually!) on the Python Path.
+
+"regsetup --umodule some"
+Unregister the module "some".  This means normal import rules then apply
+for that module.
+"""
+
+if __name__=='__main__':
+    if len(sys.argv)>1 and sys.argv[1] in ['/?','-?','-help','-h']:
+        print usage
+    elif len(sys.argv)==1 or not sys.argv[1][0] in ['/','-']:
+        # No args, or useful args.
+        searchPath = sys.path[:]
+        for arg in sys.argv[1:]:
+            searchPath.append(arg)
+        # Good chance we are being run from the "regsetup.py" directory.
+        # Typically this will be "\somewhere\win32\Scripts" and the
+        # "somewhere" and "..\Lib" should also be searched.
+        searchPath.append("..\\Build")
+        searchPath.append("..\\Lib")
+        searchPath.append("..")
+        searchPath.append("..\\..")
+
+        # for developers:
+        # also search somewhere\lib, ..\build, and ..\..\build
+        searchPath.append("..\\..\\lib")
+        searchPath.append("..\\build")
+        searchPath.append("..\\..\\pcbuild")
+
+        print "Attempting to setup/repair the Python core"
+
+        SetupCore(searchPath)
+        RegisterShellInfo(searchPath)
+        FindRegisterHelpFile("PyWin32.chm", searchPath, "Pythonwin Reference")
+        # Check the registry.
+        print "Registration complete - checking the registry..."
+        import regcheck
+        regcheck.CheckRegistry()
+    else:
+        searchPaths = []
+        import getopt, string
+        opts, args = getopt.getopt(sys.argv[1:], 'p:a:m:c',
+                ['shell','upackage=','uapp=','umodule=','description','examples'])
+        for arg in args:
+            searchPaths.append(arg)
+        for o,a in opts:
+            if o=='--description':
+                print description
+            if o=='--examples':
+                print examples
+            if o=='--shell':
+                print "Registering the Python core."
+                RegisterShellInfo(searchPaths)
+            if o=='-p':
+                print "Registering package", a
+                FindRegisterPackage(a,None,searchPaths)
+            if o in ['--upackage', '--uapp']:
+                import regutil
+                print "Unregistering application/package", a
+                regutil.UnregisterNamedPath(a)
+            if o=='-a':
+                import regutil
+                path = string.join(searchPaths,";")
+                print "Registering application", a,"to path",path
+                regutil.RegisterNamedPath(a,path)
+            if o=='-c':
+                if not len(searchPaths):
+                    raise error, "-c option must provide at least one additional path"
+                import win32api, regutil
+                currentPaths = string.split(regutil.GetRegisteredNamedPath(None),";")
+                oldLen = len(currentPaths)
+                for newPath in searchPaths:
+                    if newPath not in currentPaths:
+                        currentPaths.append(newPath)
+                if len(currentPaths)<>oldLen:
+                    print "Registering %d new core paths" % (len(currentPaths)-oldLen)
+                    regutil.RegisterNamedPath(None,string.join(currentPaths,";"))
+                else:
+                    print "All specified paths are already registered."
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/servicemanager.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/servicemanager.pyd
new file mode 100644
index 0000000..755ca312
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/servicemanager.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/handles.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/handles.py
new file mode 100644
index 0000000..87ea307
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/handles.py
@@ -0,0 +1,87 @@
+import unittest
+import pywintypes
+import win32api
+
+# A class that will never die vie refcounting, but will die via GC.
+class Cycle:
+    def __init__(self, handle):
+        self.cycle = self
+        self.handle = handle
+
+class PyHandleTestCase(unittest.TestCase):
+    def testCleanup1(self):
+        # We used to clobber all outstanding exceptions.
+        def f1(invalidate):
+            import win32event
+            h = win32event.CreateEvent(None, 0, 0, None)
+            if invalidate:
+                win32api.CloseHandle(int(h))
+            1/0
+            # If we invalidated, then the object destruction code will attempt 
+            # to close an invalid handle.  We don't wan't an exception in 
+            # this case
+
+        def f2(invalidate):
+            """ This function should throw an IOError. """
+            try:
+                f1(invalidate)
+            except ZeroDivisionError, exc:
+                raise IOError("raise 2")
+
+        self.assertRaises(IOError, f2, False)
+        # Now do it again, but so the auto object destruction
+        # actually fails.
+        self.assertRaises(IOError, f2, True)
+
+    def testCleanup2(self):
+        # Cause an exception during object destruction.
+        # The worst this does is cause an ".XXX undetected error (why=3)" 
+        # So avoiding that is the goal
+        import win32event
+        h = win32event.CreateEvent(None, 0, 0, None)
+        # Close the handle underneath the object.
+        win32api.CloseHandle(int(h))
+        # Object destructor runs with the implicit close failing
+        h = None
+
+    def testCleanup3(self):
+        # And again with a class - no __del__
+        import win32event
+        class Test:
+            def __init__(self):
+                self.h = win32event.CreateEvent(None, 0, 0, None)
+                win32api.CloseHandle(int(self.h))
+        t=Test()
+        t = None
+
+    def testCleanupGood(self):
+        # And check that normal error semantics *do* work.
+        import win32event
+        h = win32event.CreateEvent(None, 0, 0, None)
+        win32api.CloseHandle(int(h))
+        self.assertRaises(win32api.error, h.Close)
+        # A following Close is documented as working
+        h.Close()
+
+    def testInvalid(self):
+        h=pywintypes.HANDLE(-2)
+        self.assertRaises(win32api.error, h.Close)
+
+    def testGC(self):
+        # This used to provoke:
+        # Fatal Python error: unexpected exception during garbage collection
+        def make():
+            h=pywintypes.HANDLE(-2)
+            c = Cycle(h)
+        import gc
+        make()
+        gc.collect()
+
+    def testTypes(self):
+        self.assertRaises(TypeError, pywintypes.HANDLE, "foo")
+        self.assertRaises(TypeError, pywintypes.HANDLE, ())
+        # should be able to get a long!
+        pywintypes.HANDLE(0L)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_clipboard.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_clipboard.py
new file mode 100644
index 0000000..0d5ebfe
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_clipboard.py
@@ -0,0 +1,96 @@
+# General test module for win32api - please add some :)
+import sys, os
+import unittest
+
+from win32clipboard import *
+import win32gui, win32con
+import pywintypes
+import array
+
+custom_format_name = "PythonClipboardTestFormat"
+
+class CrashingTestCase(unittest.TestCase):
+    def test_722082(self):
+        class crasher(object):
+            pass
+
+        obj = crasher()
+        OpenClipboard()
+        try:
+            EmptyClipboard()
+            # This used to crash - now correctly raises type error.
+            self.assertRaises(TypeError, SetClipboardData, 0, obj )
+        finally:
+            CloseClipboard()
+
+class TestBitmap(unittest.TestCase):
+    def setUp(self):
+        self.bmp_handle = None
+        try:
+            this_file = __file__
+        except NameError:
+            this_file = sys.argv[0]
+        this_dir = os.path.dirname(__file__)
+        self.bmp_name = os.path.join(os.path.abspath(this_dir),
+                                     "..", "Demos", "images", "smiley.bmp")
+        self.failUnless(os.path.isfile(self.bmp_name))
+        flags = win32con.LR_DEFAULTSIZE | win32con.LR_LOADFROMFILE
+        self.bmp_handle = win32gui.LoadImage(0, self.bmp_name,
+                                             win32con.IMAGE_BITMAP,
+                                             0, 0, flags)
+        self.failUnless(self.bmp_handle, "Failed to get a bitmap handle")
+
+    def tearDown(self):
+        if self.bmp_handle:
+            win32gui.DeleteObject(self.bmp_handle)
+
+    def test_bitmap_roundtrip(self):
+        OpenClipboard()
+        try:
+            SetClipboardData(win32con.CF_BITMAP, self.bmp_handle)
+            got_handle = GetClipboardDataHandle(win32con.CF_BITMAP)
+            self.failUnlessEqual(got_handle, self.bmp_handle)
+        finally:
+            CloseClipboard()
+
+class TestStrings(unittest.TestCase):
+    def setUp(self):
+        OpenClipboard()
+    def tearDown(self):
+        CloseClipboard()
+    def test_unicode(self):
+        val = unicode("test-\xe0\xf2", "mbcs")
+        SetClipboardData(win32con.CF_UNICODETEXT, val)
+        self.failUnlessEqual(GetClipboardData(win32con.CF_UNICODETEXT), val)
+    def test_string(self):
+        val = "test"
+        SetClipboardData(win32con.CF_TEXT, val)
+        self.failUnlessEqual(GetClipboardData(win32con.CF_TEXT), val)
+
+class TestGlobalMemory(unittest.TestCase):
+    def setUp(self):
+        OpenClipboard()
+    def tearDown(self):
+        CloseClipboard()
+    def test_mem(self):
+        val = "test"
+        SetClipboardData(win32con.CF_TEXT, val)
+        # Get the raw data - this will include the '\0'
+        raw_data = GetGlobalMemory(GetClipboardDataHandle(win32con.CF_TEXT))
+        self.failUnlessEqual(val + '\0', raw_data)
+    def test_bad_mem(self):
+        self.failUnlessRaises(pywintypes.error, GetGlobalMemory, 0)
+        self.failUnlessRaises(pywintypes.error, GetGlobalMemory, 1)
+        self.failUnlessRaises(pywintypes.error, GetGlobalMemory, -1)
+    def test_custom_mem(self):
+        test_data = "hello\x00\xff"
+        test_buffer = array.array("c", test_data)
+        cf = RegisterClipboardFormat(custom_format_name)
+        self.failUnlessEqual(custom_format_name, GetClipboardFormatName(cf))
+        SetClipboardData(cf, test_buffer)
+        hglobal = GetClipboardDataHandle(cf)
+        data = GetGlobalMemory(hglobal)
+        self.failUnlessEqual(data, test_data)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_odbc.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_odbc.py
new file mode 100644
index 0000000..2a5b0eb3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_odbc.py
@@ -0,0 +1,112 @@
+# odbc test suite kindly contributed by Frank Millman.
+import sys
+import os
+import unittest
+import odbc
+import tempfile
+
+# We use the DAO ODBC driver
+from win32com.client.gencache import EnsureDispatch
+from win32com.client import constants
+
+class TestStuff(unittest.TestCase):
+    def setUp(self):
+        self.conn = self.cur = None
+        self.db_filename = os.path.join(tempfile.gettempdir(), "test_odbc.mdb")
+        if os.path.isfile(self.db_filename):
+            os.unlink(self.db_filename)
+
+        # Create a brand-new database - what is the story with these?
+        for suffix in (".36", ".35", ".30"):
+            try:
+                dbe = EnsureDispatch("DAO.DBEngine" + suffix)
+                break
+            except pythoncom.com_error:
+                pass
+        else:
+            raise RuntimeError, "Can't find a DB engine"
+
+        workspace = dbe.Workspaces(0)
+
+        newdb = workspace.CreateDatabase(self.db_filename, 
+                                         constants.dbLangGeneral,
+                                         constants.dbEncrypt)
+
+        newdb.Close()
+
+        conn_str = "Driver={Microsoft Access Driver (*.mdb)};dbq=%s;Uid=;Pwd=;" \
+                   % (self.db_filename,)
+        self.conn = odbc.odbc(conn_str)
+        # And we expect a 'users' table for these tests.
+        self.cur = self.conn.cursor()
+        self.assertEqual(self.cur.execute(
+            """create table users (
+                    userid varchar(5),  username varchar(25),
+                    bitfield bit,       intfield integer,
+                    floatfield float
+                )"""),-1)
+
+    def tearDown(self):
+        if self.cur is not None:
+            self.cur.close()
+            self.cur = None
+        if self.conn is not None:
+            self.conn.close()
+            self.conn = None
+        os.unlink(self.db_filename)
+
+    def test_insert_select(self, userid='Frank', username='Frank Millman'):
+        self.assertEqual(self.cur.execute("insert into users (userid, username) \
+            values (?,?)", [userid, username]),1)
+        self.assertEqual(self.cur.execute("select * from users \
+            where userid = ?", [userid.lower()]),0)
+        self.assertEqual(self.cur.execute("select * from users \
+            where username = ?", [username.lower()]),0)
+
+    def test_insert_select_large(self):
+        # hard-coded 256 limit in ODBC to trigger large value support
+        self.test_insert_select(userid='Frank' * 200, username='Frank Millman' * 200)
+
+    def test_insert_select_unicode(self, userid=u'Frank', username=u"Frank Millman"):
+        self.assertEqual(self.cur.execute("insert into users  (userid, username)\
+            values (?,?)", [userid, username]),1)
+        self.assertEqual(self.cur.execute("select * from users \
+            where userid = ?", [userid.lower()]),0)
+        self.assertEqual(self.cur.execute("select * from users \
+            where username = ?", [username.lower()]),0)
+
+    def test_insert_select_unicode_ext(self):
+        userid = unicode("test-\xe0\xf2", "mbcs")
+        username = unicode("test-\xe0\xf2 name", "mbcs")
+        self.test_insert_select_unicode(userid, username)
+
+    def _test_val(self, fieldName, value):
+        self.cur.execute("delete from users where userid='Frank'")
+        self.assertEqual(self.cur.execute(
+            "insert into users (userid, %s) values (?,?)" % fieldName,
+            ["Frank", value]), 1)
+        self.cur.execute("select %s from users where userid = ?" % fieldName,
+                         ["Frank"])
+        rows = self.cur.fetchmany()
+        self.failUnlessEqual(1, len(rows))
+        row = rows[0]
+        self.failUnlessEqual(row[0], value)
+
+    def testBit(self):
+        self._test_val('bitfield', 1)
+        self._test_val('bitfield', 0)
+
+    def testInt(self):
+        self._test_val('intfield', 1)
+        self._test_val('intfield', 0)
+        self._test_val('intfield', sys.maxint)
+
+    def testFloat(self):
+        self._test_val('floatfield', 1.01)
+        self._test_val('floatfield', 0)
+
+    def testVarchar(self, ):
+        self._test_val('username', 'foo')
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_pywintypes.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_pywintypes.py
new file mode 100644
index 0000000..23d6dda
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_pywintypes.py
@@ -0,0 +1,34 @@
+import unittest
+import pywintypes
+import time
+
+class TestCase(unittest.TestCase):
+    def testPyTimeFormat(self):
+        struct_current = time.localtime()
+        pytime_current = pywintypes.Time(struct_current)
+        # try and test all the standard parts of the format
+        format_string = "%a %A %b %B %c %d %H %I %j %m %M %p %S %U %w %W %x %X %y %Y %Z"
+        self.assertEquals(pytime_current.Format(format_string), time.strftime(format_string, struct_current))
+
+    def testPyTimePrint(self):
+        # This used to crash with an invalid, or too early time.
+        # We don't really want to check that it does cause a ValueError
+        # (as hopefully this wont be true forever).  So either working, or 
+        # ValueError is OK.
+        t = pywintypes.Time(-2)
+        try:
+            t.Format()
+        except ValueError:
+            return
+
+    def testGUID(self):
+        s = "{00020400-0000-0000-C000-000000000046}"
+        iid = pywintypes.IID(s)
+        iid2 = pywintypes.IID(buffer(iid), True)
+        self.assertEquals(iid, iid2)
+        self.assertRaises(ValueError, pywintypes.IID, '00', True) # too short
+        self.assertRaises(TypeError, pywintypes.IID, 0, True) # no buffer
+
+if __name__ == '__main__':
+    unittest.main()
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_security.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_security.py
new file mode 100644
index 0000000..7ce77c9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_security.py
@@ -0,0 +1,87 @@
+# Tests for the win32security module.
+import sys, os
+import unittest
+
+import win32api, win32con, win32security, ntsecuritycon
+
+class SecurityTests(unittest.TestCase):
+    def setUp(self):
+        self.pwr_sid=win32security.LookupAccountName('','Power Users')[0]
+        self.admin_sid=win32security.LookupAccountName('','Administrator')[0]
+
+    def tearDown(self):
+        pass
+
+    def testMemory(self):
+        pwr_sid = self.pwr_sid
+        admin_sid = self.admin_sid
+        sd1=win32security.SECURITY_DESCRIPTOR()
+        sd2=win32security.SECURITY_DESCRIPTOR()
+        sd3=win32security.SECURITY_DESCRIPTOR()
+        dacl=win32security.ACL()
+        dacl.AddAccessAllowedAce(win32security.ACL_REVISION,win32con.GENERIC_READ,pwr_sid)
+        dacl.AddAccessAllowedAce(win32security.ACL_REVISION,win32con.GENERIC_ALL,admin_sid)
+        sd4=win32security.SECURITY_DESCRIPTOR()
+        sacl=win32security.ACL()
+        sacl.AddAuditAccessAce(win32security.ACL_REVISION,win32con.DELETE,admin_sid,1,1)
+        sacl.AddAuditAccessAce(win32security.ACL_REVISION,win32con.GENERIC_ALL,pwr_sid,1,1)
+        for x in xrange(0,200000):
+            sd1.SetSecurityDescriptorOwner(admin_sid,0)
+            sd2.SetSecurityDescriptorGroup(pwr_sid,0)
+            sd3.SetSecurityDescriptorDacl(1,dacl,0)
+            sd4.SetSecurityDescriptorSacl(1,sacl,0)
+
+class TestDS(unittest.TestCase):
+    def testDsGetDcName(self):
+        # Not sure what we can actually test here!  At least calling it
+        # does something :)
+        win32security.DsGetDcName()
+
+    def testDsListServerInfo(self):
+        # again, not checking much, just exercising the code.
+        h=win32security.DsBind()
+        for (status, ignore, site) in win32security.DsListSites(h):
+            for (status, ignore, server) in win32security.DsListServersInSite(h, site):
+                info = win32security.DsListInfoForServer(h, server)
+            for (status, ignore, domain) in win32security.DsListDomainsInSite(h, site):
+                pass
+
+    def testDsCrackNames(self):
+        h = win32security.DsBind()
+        fmt_offered = ntsecuritycon.DS_FQDN_1779_NAME
+        name = win32api.GetUserNameEx(fmt_offered)
+        result = win32security.DsCrackNames(h, 0, fmt_offered, fmt_offered, (name,))
+        self.failUnlessEqual(name, result[0][2])
+
+    def testDsCrackNamesSyntax(self):
+        # Do a syntax check only - that allows us to avoid binding.
+        # But must use DS_CANONICAL_NAME (or _EX)
+        expected = win32api.GetUserNameEx(win32api.NameCanonical)
+        fmt_offered = ntsecuritycon.DS_FQDN_1779_NAME
+        name = win32api.GetUserNameEx(fmt_offered)
+        result = win32security.DsCrackNames(None, ntsecuritycon.DS_NAME_FLAG_SYNTACTICAL_ONLY,
+                                            fmt_offered, ntsecuritycon.DS_CANONICAL_NAME,
+                                            (name,))
+        self.failUnlessEqual(expected, result[0][2])
+
+class TestTranslate(unittest.TestCase):
+    def _testTranslate(self, fmt_from, fmt_to):
+        name = win32api.GetUserNameEx(fmt_from)
+        expected = win32api.GetUserNameEx(fmt_to)
+        got = win32security.TranslateName(name, fmt_from, fmt_to)
+        self.failUnlessEqual(got, expected)
+
+    def testTranslate1(self):
+        self._testTranslate(win32api.NameFullyQualifiedDN, win32api.NameSamCompatible)
+
+    def testTranslate2(self):
+        self._testTranslate(win32api.NameSamCompatible, win32api.NameFullyQualifiedDN)
+
+    def testTranslate3(self):
+        self._testTranslate(win32api.NameFullyQualifiedDN, win32api.NameUniqueId)
+
+    def testTranslate4(self):
+        self._testTranslate(win32api.NameUniqueId, win32api.NameFullyQualifiedDN)
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_sspi.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_sspi.py
new file mode 100644
index 0000000..e62d0528
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_sspi.py
@@ -0,0 +1,120 @@
+# Some tests of the win32security sspi functions.
+# Stolen from Roger's original test_sspi.c, a version of which is in "Demos"
+# See also the other SSPI demos.
+import win32security, sspi, sspicon, win32api
+import unittest
+
+class TestSSPI(unittest.TestCase):
+
+    def assertRaisesHRESULT(self, hr, func, *args):
+        try:
+            return func(*args)
+            raise RuntimeError, "expecting %s failure" % (hr,)
+        except win32security.error, (hr_got, func, msg):
+            self.failUnlessEqual(hr_got, hr)
+
+    def _doAuth(self, pkg_name):
+        sspiclient=sspi.ClientAuth(pkg_name,targetspn=win32api.GetUserName())
+        sspiserver=sspi.ServerAuth(pkg_name)
+
+        sec_buffer=None
+        err = 1
+        while err != 0:
+            err, sec_buffer = sspiclient.authorize(sec_buffer)
+            err, sec_buffer = sspiserver.authorize(sec_buffer)
+        return sspiclient, sspiserver
+
+    def _doTestImpersonate(self, pkg_name):
+        # Just for the sake of code exercising!
+        sspiclient, sspiserver = self._doAuth(pkg_name)
+        sspiserver.ctxt.ImpersonateSecurityContext()
+        sspiserver.ctxt.RevertSecurityContext()
+
+    def testImpersonateKerberos(self):
+        self._doTestImpersonate("Kerberos")
+
+    def testImpersonateNTLM(self):
+        self._doTestImpersonate("NTLM")
+
+    def _doTestEncrypt(self, pkg_name):
+
+        sspiclient, sspiserver = self._doAuth(pkg_name)
+
+        pkg_size_info=sspiclient.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES)
+        msg='some data to be encrypted ......'
+
+        trailersize=pkg_size_info['SecurityTrailer']
+        encbuf=win32security.PySecBufferDescType()
+        encbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA))
+        encbuf.append(win32security.PySecBufferType(trailersize, sspicon.SECBUFFER_TOKEN))
+        encbuf[0].Buffer=msg
+        sspiclient.ctxt.EncryptMessage(0,encbuf,1)
+        sspiserver.ctxt.DecryptMessage(encbuf,1)
+        self.failUnlessEqual(msg, encbuf[0].Buffer)
+        # and test the higher-level functions
+        data, sig = sspiclient.encrypt("hello")
+        self.assertEqual(sspiserver.decrypt(data, sig), "hello")
+
+        data, sig = sspiserver.encrypt("hello")
+        self.assertEqual(sspiclient.decrypt(data, sig), "hello")
+
+    def testEncryptNTLM(self):
+        self._doTestEncrypt("NTLM")
+    
+    def testEncryptKerberos(self):
+        self._doTestEncrypt("Kerberos")
+
+    def _doTestSign(self, pkg_name):
+
+        sspiclient, sspiserver = self._doAuth(pkg_name)
+
+        pkg_size_info=sspiclient.ctxt.QueryContextAttributes(sspicon.SECPKG_ATTR_SIZES)
+        msg='some data to be encrypted ......'
+        
+        sigsize=pkg_size_info['MaxSignature']
+        sigbuf=win32security.PySecBufferDescType()
+        sigbuf.append(win32security.PySecBufferType(len(msg), sspicon.SECBUFFER_DATA))
+        sigbuf.append(win32security.PySecBufferType(sigsize, sspicon.SECBUFFER_TOKEN))
+        sigbuf[0].Buffer=msg
+        sspiclient.ctxt.MakeSignature(0,sigbuf,0)
+        sspiserver.ctxt.VerifySignature(sigbuf,0)
+        # and test the higher-level functions
+        sspiclient.next_seq_num = 1
+        sspiserver.next_seq_num = 1
+        key = sspiclient.sign("hello")
+        sspiserver.verify("hello", key)
+        key = sspiclient.sign("hello")
+        self.assertRaisesHRESULT(sspicon.SEC_E_MESSAGE_ALTERED,
+                                 sspiserver.verify, "hellox", key)
+
+        # and the other way
+        key = sspiserver.sign("hello")
+        sspiclient.verify("hello", key)
+        key = sspiserver.sign("hello")
+        self.assertRaisesHRESULT(sspicon.SEC_E_MESSAGE_ALTERED,
+                                 sspiclient.verify, "hellox", key)
+
+    def testSignNTLM(self):
+        self._doTestSign("NTLM")
+    
+    def testSignKerberos(self):
+        self._doTestSign("Kerberos")
+
+    def testSequenceSign(self):
+        # Only Kerberos supports sequence detection.
+        sspiclient, sspiserver = self._doAuth("Kerberos")
+        key = sspiclient.sign("hello")
+        sspiclient.sign("hello")
+        self.assertRaisesHRESULT(sspicon.SEC_E_OUT_OF_SEQUENCE,
+                                 sspiserver.verify, 'hello', key)
+
+    def testSequenceEncrypt(self):
+        # Only Kerberos supports sequence detection.
+        sspiclient, sspiserver = self._doAuth("Kerberos")
+        blob, key = sspiclient.encrypt("hello",)
+        blob, key = sspiclient.encrypt("hello")
+        self.assertRaisesHRESULT(sspicon.SEC_E_OUT_OF_SEQUENCE,
+                                 sspiserver.decrypt, blob, key)
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32api.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32api.py
new file mode 100644
index 0000000..084ee8c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32api.py
@@ -0,0 +1,137 @@
+# General test module for win32api - please add some :)
+
+import unittest
+
+import win32api, win32con, win32event, winerror
+import sys, os
+
+class CurrentUserTestCase(unittest.TestCase):
+    def testGetCurrentUser(self):
+        name = "%s\\%s" % (win32api.GetDomainName(), win32api.GetUserName())
+        self.failUnless(name == win32api.GetUserNameEx(win32api.NameSamCompatible))
+
+class TestTime(unittest.TestCase):
+    def testTimezone(self):
+        # GetTimeZoneInformation
+        rc, tzinfo = win32api.GetTimeZoneInformation()
+        if rc == win32con.TIME_ZONE_ID_DAYLIGHT:
+            tz_str = tzinfo[4]
+            tz_time = tzinfo[5]
+        else:
+            tz_str = tzinfo[1]
+            tz_time = tzinfo[2]
+        # for the sake of code exercise but don't output
+        tz_str.encode()
+        tz_time.Format()
+    def TestDateFormat(self):
+        DATE_LONGDATE = 2
+        date_flags = DATE_LONGDATE
+        win32api.GetDateFormat(0, date_flags, None)
+        win32api.GetDateFormat(0, date_flags, 0)
+        win32api.GetDateFormat(0, date_flags, datetime.datetime.now())
+        win32api.GetDateFormat(0, date_flags, time.time())
+    def TestTimeFormat(self):
+        win32api.GetTimeFormat(0, 0, None)
+        win32api.GetTimeFormat(0, 0, 0)
+        win32api.GetTimeFormat(0, 0, datetime.datetime.now())
+        win32api.GetTimeFormat(0, 0, time.time())
+
+
+class Registry(unittest.TestCase):
+    key_name = r'PythonTestHarness\Whatever'
+    def test1(self):
+        # This used to leave a stale exception behind.
+        def reg_operation():
+            hkey = win32api.RegCreateKey(win32con.HKEY_CURRENT_USER, self.key_name)
+            x = 3/0 # or a statement like: raise 'error'
+        # do the test
+        try:
+            try:
+                try:
+                    reg_operation()
+                except:
+                    1/0 # Force exception
+            finally:
+                win32api.RegDeleteKey(win32con.HKEY_CURRENT_USER, self.key_name)
+        except ZeroDivisionError:
+            pass
+    def testNotifyChange(self):
+        def change():
+            hkey = win32api.RegCreateKey(win32con.HKEY_CURRENT_USER, self.key_name)
+            try:
+                win32api.RegSetValue(hkey, None, win32con.REG_SZ, "foo")
+            finally:
+                win32api.RegDeleteKey(win32con.HKEY_CURRENT_USER, self.key_name)
+
+        evt = win32event.CreateEvent(None,0,0,None)
+        ## REG_NOTIFY_CHANGE_LAST_SET - values
+        ## REG_CHANGE_NOTIFY_NAME - keys
+        ## REG_NOTIFY_CHANGE_SECURITY - security descriptor
+        ## REG_NOTIFY_CHANGE_ATTRIBUTES
+        win32api.RegNotifyChangeKeyValue(win32con.HKEY_CURRENT_USER,1,win32api.REG_NOTIFY_CHANGE_LAST_SET,evt,True)
+        ret_code=win32event.WaitForSingleObject(evt,0)
+        # Should be no change.
+        self.failUnless(ret_code==win32con.WAIT_TIMEOUT)
+        change()
+        # Our event should now be in a signalled state.
+        ret_code=win32event.WaitForSingleObject(evt,0)
+        self.failUnless(ret_code==win32con.WAIT_OBJECT_0)
+
+class FileNames(unittest.TestCase):
+    def testShortLongPathNames(self):
+        try:
+            me = __file__
+        except NameError:
+            me = sys.argv[0]
+        fname = os.path.abspath(me)
+        short_name = win32api.GetShortPathName(fname)
+        long_name = win32api.GetLongPathName(short_name)
+        self.failUnless(long_name==fname, \
+                        "Expected long name ('%s') to be original name ('%s')" % (long_name, fname))
+        self.failUnlessEqual(long_name, win32api.GetLongPathNameW(short_name))
+        long_name = win32api.GetLongPathNameW(short_name)
+        self.failUnless(type(long_name)==unicode, "GetLongPathNameW returned type '%s'" % (type(long_name),))
+        self.failUnless(long_name==fname, \
+                        "Expected long name ('%s') to be original name ('%s')" % (long_name, fname))
+
+    def testLongLongPathNames(self):
+        # We need filename where the FQN is > 256 - simplest way is to create a
+        # 250 character directory in the cwd.
+        import win32file
+        basename = "a" * 250
+        fname = "\\\\?\\" + os.path.join(os.getcwd(), basename)
+        try:
+            win32file.CreateDirectoryW(fname, None)
+        except win32api.error, details:
+            if details[0]!=winerror.ERROR_ALREADY_EXISTS:
+                raise
+        try:
+            # GetFileAttributes automatically calls GetFileAttributesW when
+            # passed unicode
+            try:
+                attr = win32api.GetFileAttributes(fname)
+            except win32api.error, details:
+                if details[0] != winerror.ERROR_FILENAME_EXCED_RANGE:
+                    raise
+        
+            attr = win32api.GetFileAttributes(unicode(fname))
+            self.failUnless(attr & win32con.FILE_ATTRIBUTE_DIRECTORY, attr)
+
+            long_name = win32api.GetLongPathNameW(fname)
+            self.failUnlessEqual(long_name, fname)
+        finally:
+            win32file.RemoveDirectory(fname)
+
+class FormatMessage(unittest.TestCase):
+    def test_FromString(self):
+        msg = "Hello %1, how are you %2?"
+        inserts = ["Mark", "today"]
+        result = win32api.FormatMessage(win32con.FORMAT_MESSAGE_FROM_STRING,
+                               msg, # source
+                               0, # ID
+                               0, # LangID
+                               inserts)
+        self.assertEqual(result, "Hello Mark, how are you today?")
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32crypt.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32crypt.py
new file mode 100644
index 0000000..c072a96
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32crypt.py
@@ -0,0 +1,30 @@
+# Test module for win32timezone
+
+import unittest
+import win32crypt
+
+class Crypt(unittest.TestCase):
+    def testSimple(self):
+        data = "My test data"
+        entropy = None
+        desc = "My description"
+        flags = 0
+        ps = None
+        blob = win32crypt.CryptProtectData(data, desc, entropy, None, ps, flags)
+        got_desc, got_data = win32crypt.CryptUnprotectData(blob, entropy, None, ps, flags)
+        self.failUnlessEqual(data, got_data)
+        self.failUnlessEqual(desc, got_desc)
+
+    def testEntropy(self):
+        data = "My test data"
+        entropy = "My test entropy"
+        desc = "My description"
+        flags = 0
+        ps = None
+        blob = win32crypt.CryptProtectData(data, desc, entropy, None, ps, flags)
+        got_desc, got_data = win32crypt.CryptUnprotectData(blob, entropy, None, ps, flags)
+        self.failUnlessEqual(data, got_data)
+        self.failUnlessEqual(desc, got_desc)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32event.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32event.py
new file mode 100644
index 0000000..74732c3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32event.py
@@ -0,0 +1,24 @@
+import unittest
+import win32event
+import time
+import os
+import sys
+
+class TestWaitableTimer(unittest.TestCase):
+    def testWaitableFire(self):
+        h = win32event.CreateWaitableTimer(None, 0, None)
+        dt = -160L # 160 ns.
+        win32event.SetWaitableTimer(h, dt, 0, None, None, 0)
+        rc = win32event.WaitForSingleObject(h, 1000)
+        self.failUnlessEqual(rc, win32event.WAIT_OBJECT_0)
+
+    def testWaitableTrigger(self):
+        h = win32event.CreateWaitableTimer(None, 0, None)
+        # for the sake of this, pass a long that doesn't fit in an int.
+        dt = -2000000000L
+        win32event.SetWaitableTimer(h, dt, 0, None, None, 0)
+        rc = win32event.WaitForSingleObject(h, 10) # 10 ms.
+        self.failUnlessEqual(rc, win32event.WAIT_TIMEOUT)
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32file.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32file.py
new file mode 100644
index 0000000..d9fbc130
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32file.py
@@ -0,0 +1,428 @@
+import unittest
+import win32api, win32file, win32pipe, pywintypes, winerror, win32event
+import win32con, ntsecuritycon
+import sys
+import os
+import tempfile
+import sets
+import threading
+import time
+import shutil
+
+class TestSimpleOps(unittest.TestCase):
+    def testSimpleFiles(self):
+        try:
+            fd, filename = tempfile.mkstemp()
+        except AttributeError:
+            self.fail("This test requires Python 2.3 or later")
+        os.close(fd)
+        os.unlink(filename)
+        handle = win32file.CreateFile(filename, win32file.GENERIC_WRITE, 0, None, win32con.CREATE_NEW, 0, None)
+        test_data = "Hello\0there"
+        try:
+            win32file.WriteFile(handle, test_data)
+            handle.Close()
+            # Try and open for read
+            handle = win32file.CreateFile(filename, win32file.GENERIC_READ, 0, None, win32con.OPEN_EXISTING, 0, None)
+            rc, data = win32file.ReadFile(handle, 1024)
+            self.assertEquals(data, test_data)
+        finally:
+            handle.Close()
+            try:
+                os.unlink(filename)
+            except os.error:
+                pass
+
+    # A simple test using normal read/write operations.
+    def testMoreFiles(self):
+        # Create a file in the %TEMP% directory.
+        testName = os.path.join( win32api.GetTempPath(), "win32filetest.dat" )
+        desiredAccess = win32file.GENERIC_READ | win32file.GENERIC_WRITE
+        # Set a flag to delete the file automatically when it is closed.
+        fileFlags = win32file.FILE_FLAG_DELETE_ON_CLOSE
+        h = win32file.CreateFile( testName, desiredAccess, win32file.FILE_SHARE_READ, None, win32file.CREATE_ALWAYS, fileFlags, 0)
+    
+        # Write a known number of bytes to the file.
+        data = "z" * 1025
+    
+        win32file.WriteFile(h, data)
+    
+        self.failUnless(win32file.GetFileSize(h) == len(data), "WARNING: Written file does not have the same size as the length of the data in it!")
+    
+        # Ensure we can read the data back.
+        win32file.SetFilePointer(h, 0, win32file.FILE_BEGIN)
+        hr, read_data = win32file.ReadFile(h, len(data)+10) # + 10 to get anything extra
+        self.failUnless(hr==0, "Readfile returned %d" % hr)
+
+        self.failUnless(read_data == data, "Read data is not what we wrote!")
+    
+        # Now truncate the file at 1/2 its existing size.
+        newSize = len(data)/2
+        win32file.SetFilePointer(h, newSize, win32file.FILE_BEGIN)
+        win32file.SetEndOfFile(h)
+        self.failUnless(win32file.GetFileSize(h) == newSize, "Truncated file does not have the expected size!")
+    
+        # GetFileAttributesEx/GetFileAttributesExW tests.
+        self.failUnless(win32file.GetFileAttributesEx(testName) == win32file.GetFileAttributesExW(testName),
+                        "ERROR: Expected GetFileAttributesEx and GetFileAttributesExW to return the same data")
+    
+        attr, ct, at, wt, size = win32file.GetFileAttributesEx(testName)
+        self.failUnless(size==newSize, 
+                        "Expected GetFileAttributesEx to return the same size as GetFileSize()")
+        self.failUnless(attr==win32file.GetFileAttributes(testName), 
+                        "Expected GetFileAttributesEx to return the same attributes as GetFileAttributes")
+    
+        h = None # Close the file by removing the last reference to the handle!
+
+        self.failUnless(not os.path.isfile(testName), "After closing the file, it still exists!")
+
+    def testFilePointer(self):
+        # via [ 979270 ] SetFilePointer fails with negative offset
+
+        # Create a file in the %TEMP% directory.
+        filename = os.path.join( win32api.GetTempPath(), "win32filetest.dat" )
+
+        f = win32file.CreateFile(filename,
+                                win32file.GENERIC_READ|win32file.GENERIC_WRITE,
+                                0,
+                                None,
+                                win32file.CREATE_ALWAYS,
+                                win32file.FILE_ATTRIBUTE_NORMAL,
+                                0)
+        try:
+            #Write some data
+            data = 'Some data'
+            (res, written) = win32file.WriteFile(f, data)
+            
+            self.failIf(res)
+            self.assertEqual(written, len(data))
+            
+            #Move at the beginning and read the data
+            win32file.SetFilePointer(f, 0, win32file.FILE_BEGIN)
+            (res, s) = win32file.ReadFile(f, len(data))
+            
+            self.failIf(res)
+            self.assertEqual(s, data)
+            
+            #Move at the end and read the data
+            win32file.SetFilePointer(f, -len(data), win32file.FILE_END)
+            (res, s) = win32file.ReadFile(f, len(data))
+            
+            self.failIf(res)
+            self.failUnlessEqual(s, data)
+        finally:
+            f.Close()
+            os.unlink(filename)
+
+class TestOverlapped(unittest.TestCase):
+    def testSimpleOverlapped(self):
+        # Create a file in the %TEMP% directory.
+        import win32event
+        testName = os.path.join( win32api.GetTempPath(), "win32filetest.dat" )
+        desiredAccess = win32file.GENERIC_WRITE
+        overlapped = pywintypes.OVERLAPPED()
+        evt = win32event.CreateEvent(None, 0, 0, None)
+        overlapped.hEvent = evt
+        # Create the file and write shit-loads of data to it.
+        h = win32file.CreateFile( testName, desiredAccess, 0, None, win32file.CREATE_ALWAYS, 0, 0)
+        chunk_data = "z" * 0x8000
+        num_loops = 512
+        expected_size = num_loops * len(chunk_data)
+        for i in range(num_loops):
+            win32file.WriteFile(h, chunk_data, overlapped)
+            win32event.WaitForSingleObject(overlapped.hEvent, win32event.INFINITE)
+            overlapped.Offset = overlapped.Offset + len(chunk_data)
+        h.Close()
+        # Now read the data back overlapped
+        overlapped = pywintypes.OVERLAPPED()
+        evt = win32event.CreateEvent(None, 0, 0, None)
+        overlapped.hEvent = evt
+        desiredAccess = win32file.GENERIC_READ
+        h = win32file.CreateFile( testName, desiredAccess, 0, None, win32file.OPEN_EXISTING, 0, 0)
+        buffer = win32file.AllocateReadBuffer(0xFFFF)
+        while 1:
+            try:
+                hr, data = win32file.ReadFile(h, buffer, overlapped)
+                win32event.WaitForSingleObject(overlapped.hEvent, win32event.INFINITE)
+                overlapped.Offset = overlapped.Offset + len(data)
+                if not data is buffer:
+                    self.fail("Unexpected result from ReadFile - should be the same buffer we passed it")
+            except win32api.error:
+                break
+        h.Close()
+
+    def testCompletionPortsMultiple(self):
+        # Mainly checking that we can "associate" an existing handle.  This
+        # failed in build 203.
+        import socket
+
+        ioport = win32file.CreateIoCompletionPort(win32file.INVALID_HANDLE_VALUE,
+                                                  0, 0, 0)
+        socks = []
+        for PORT in range(9123, 9125):
+            sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+            sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+            sock.bind(('', PORT))
+            sock.listen(1)
+            socks.append(sock)
+            new = win32file.CreateIoCompletionPort(sock.fileno(), ioport, PORT, 0)
+            assert new is ioport
+        for s in socks:
+            s.close()
+        hv = int(ioport)
+        ioport = new = None
+        # The handle itself should be closed now (unless we leak references!)
+        # Check that.
+        try:
+            win32file.CloseHandle(hv)
+            raise RuntimeError, "Expected close to fail!"
+        except win32file.error, (hr, func, msg):
+            self.failUnlessEqual(hr, winerror.ERROR_INVALID_HANDLE)
+
+    def testCompletionPortsQueued(self):
+        class Foo: pass
+        io_req_port = win32file.CreateIoCompletionPort(-1, None, 0, 0)
+        overlapped = pywintypes.OVERLAPPED()
+        overlapped.object = Foo()
+        win32file.PostQueuedCompletionStatus(io_req_port, 0, 99, overlapped)
+        errCode, bytes, key, overlapped = \
+                win32file.GetQueuedCompletionStatus(io_req_port, win32event.INFINITE)
+        self.failUnlessEqual(errCode, 0)
+        self.failUnless(isinstance(overlapped.object, Foo))
+
+    def _IOCPServerThread(self, handle, port, drop_overlapped_reference):
+        overlapped = pywintypes.OVERLAPPED()
+        win32pipe.ConnectNamedPipe(handle, overlapped)
+        if drop_overlapped_reference:
+            # Be naughty - the overlapped object is now dead, but
+            # GetQueuedCompletionStatus will still find it.  Our check of
+            # reference counting should catch that error.
+            overlapped = None
+            self.failUnlessRaises(RuntimeError,
+                                  win32file.GetQueuedCompletionStatus, port, -1)
+            handle.Close()
+            return
+
+        result = win32file.GetQueuedCompletionStatus(port, -1)
+        ol2 = result[-1]
+        self.failUnless(ol2 is overlapped)
+        data = win32file.ReadFile(handle, 512)[1]
+        win32file.WriteFile(handle, data)
+
+    def testCompletionPortsNonQueued(self, test_overlapped_death = 0):
+        # In 204 we had a reference count bug when OVERLAPPED objects were
+        # associated with a completion port other than via
+        # PostQueuedCompletionStatus.  This test is based on the reproduction
+        # reported with that bug.
+        # Create the pipe.
+        BUFSIZE = 512
+        pipe_name = r"\\.\pipe\pywin32_test_pipe"
+        handle = win32pipe.CreateNamedPipe(pipe_name,
+                          win32pipe.PIPE_ACCESS_DUPLEX|
+                          win32file.FILE_FLAG_OVERLAPPED,
+                          win32pipe.PIPE_TYPE_MESSAGE|
+                          win32pipe.PIPE_READMODE_MESSAGE|
+                          win32pipe.PIPE_WAIT,
+                          1, BUFSIZE, BUFSIZE,
+                          win32pipe.NMPWAIT_WAIT_FOREVER,
+                          None)
+        # Create an IOCP and associate it with the handle.        
+        port = win32file.CreateIoCompletionPort(-1, 0, 0, 0)
+        win32file.CreateIoCompletionPort(handle, port, 1, 0)
+
+        thread = threading.Thread(target=self._IOCPServerThread, args=(handle,port, test_overlapped_death))
+        thread.start()
+        try:
+            time.sleep(0.1) # let thread do its thing.
+            try:
+                win32pipe.CallNamedPipe(r"\\.\pipe\pywin32_test_pipe", "Hello there", BUFSIZE, 0)
+            except win32pipe.error:
+                # Testing for overlapped death causes this
+                if not test_overlapped_death:
+                    raise
+        finally:
+            handle.Close()
+            thread.join()
+
+    def testCompletionPortsNonQueuedBadReference(self):
+        self.testCompletionPortsNonQueued(True)
+
+
+class TestFindFiles(unittest.TestCase):
+    def testIter(self):
+        dir = os.path.join(os.getcwd(), "*")
+        files = win32file.FindFilesW(dir)
+        set1 = sets.Set()
+        set1.update(files)
+        set2 = sets.Set()
+        for file in win32file.FindFilesIterator(dir):
+            set2.add(file)
+        assert len(set2) > 5, "This directory has less than 5 files!?"
+        self.failUnlessEqual(set1, set2)
+
+    def testBadDir(self):
+        dir = os.path.join(os.getcwd(), "a dir that doesnt exist", "*")
+        self.assertRaises(win32file.error, win32file.FindFilesIterator, dir)
+
+    def testEmptySpec(self):
+        spec = os.path.join(os.getcwd(), "*.foo_bar")
+        num = 0
+        for i in win32file.FindFilesIterator(spec):
+            num += 1
+        self.failUnlessEqual(0, num)
+
+    def testEmptyDir(self):
+        test_path = os.path.join(win32api.GetTempPath(), "win32file_test_directory")
+        try:
+            # Note: previously used shutil.rmtree, but when looking for
+            # reference count leaks, that function showed leaks!  os.rmdir
+            # doesn't have that problem.
+            os.rmdir(test_path)
+        except os.error:
+            pass
+        os.mkdir(test_path)
+        try:
+            num = 0
+            for i in win32file.FindFilesIterator(os.path.join(test_path, "*")):
+                num += 1
+            # Expecting "." and ".." only
+            self.failUnlessEqual(2, num)
+        finally:
+            os.rmdir(test_path)
+
+class TestDirectoryChanges(unittest.TestCase):
+    num_test_dirs = 1
+    def setUp(self):
+        self.watcher_threads = []
+        self.watcher_thread_changes = []
+        self.dir_names = []
+        self.dir_handles = []
+        for i in range(self.num_test_dirs):
+            td = tempfile.mktemp("-test-directory-changes-%d" % i)
+            os.mkdir(td)
+            self.dir_names.append(td)
+            hdir = win32file.CreateFile(td, 
+                                        ntsecuritycon.FILE_LIST_DIRECTORY,
+                                        win32con.FILE_SHARE_READ,
+                                        None, # security desc
+                                        win32con.OPEN_EXISTING,
+                                        win32con.FILE_FLAG_BACKUP_SEMANTICS |
+                                        win32con.FILE_FLAG_OVERLAPPED,
+                                        None)
+            self.dir_handles.append(hdir)
+
+            changes = []
+            t = threading.Thread(target=self._watcherThreadOverlapped,
+                                 args=(td, hdir, changes))
+            t.start()
+            self.watcher_threads.append(t)
+            self.watcher_thread_changes.append(changes)
+
+    def _watcherThread(self, dn, dh, changes):
+        # A synchronous version:
+        # XXX - not used - I was having a whole lot of problems trying to
+        # get this to work.  Specifically:
+        # * ReadDirectoryChangesW without an OVERLAPPED blocks infinitely.
+        # * If another thread attempts to close the handle while
+        #   ReadDirectoryChangesW is waiting on it, the ::CloseHandle() method
+        #   blocks (which has nothing to do with the GIL - it is correctly
+        #   managed)
+        # Which ends up with no way to kill the thread!
+        flags = win32con.FILE_NOTIFY_CHANGE_FILE_NAME
+        while 1:
+            try:
+                print "waiting", dh
+                changes = win32file.ReadDirectoryChangesW(dh,
+                                                          8192,
+                                                          False, #sub-tree
+                                                          flags)
+                print "got", changes
+            except 'xx':
+                xx
+            changes.extend(changes)
+
+    def _watcherThreadOverlapped(self, dn, dh, changes):
+        flags = win32con.FILE_NOTIFY_CHANGE_FILE_NAME
+        buf = win32file.AllocateReadBuffer(8192)
+        overlapped = pywintypes.OVERLAPPED()
+        overlapped.hEvent = win32event.CreateEvent(None, 0, 0, None)
+        while 1:
+            win32file.ReadDirectoryChangesW(dh,
+                                            buf,
+                                            False, #sub-tree
+                                            flags,
+                                            overlapped)
+            # Wait for our event, or for 5 seconds.
+            rc = win32event.WaitForSingleObject(overlapped.hEvent, 5000)
+            if rc == win32event.WAIT_OBJECT_0:
+                # got some data!  Must use GetOverlappedResult to find out
+                # how much is valid!  0 generally means the handle has
+                # been closed.  Blocking is OK here, as the event has
+                # already been set.
+                nbytes = win32file.GetOverlappedResult(dh, overlapped, True)
+                if nbytes:
+                    bits = win32file.FILE_NOTIFY_INFORMATION(buf, 8192)
+                    changes.extend(bits)
+                else:
+                    # This is "normal" exit - our 'tearDown' closes the
+                    # handle.
+                    # print "looks like dir handle was closed!"
+                    return
+            else:
+                print "ERROR: Watcher thread timed-out!"
+                return # kill the thread!
+
+    def tearDown(self):
+        # be careful about raising errors at teardown!
+        for h in self.dir_handles:
+            # See comments in _watcherThread above - this appears to
+            # deadlock if a synchronous ReadDirectoryChangesW is waiting...
+            # (No such problems with an asynch ReadDirectoryChangesW)
+            h.Close()
+        for dn in self.dir_names:
+            try:
+                shutil.rmtree(dn)
+            except OSError:
+                print "FAILED to remove directory", dn
+
+        for t in self.watcher_threads:
+            # closing dir handle should have killed threads!
+            t.join(5)
+            if t.isAlive():
+                print "FAILED to wait for thread termination"
+
+    def stablize(self):
+        time.sleep(0.5)
+
+    def testSimple(self):
+        self.stablize()
+        for dn in self.dir_names:
+            fn = os.path.join(dn, "test_file")
+            open(fn, "w").close()
+
+        self.stablize()
+        changes = self.watcher_thread_changes[0]
+        self.failUnlessEqual(changes, [(1, "test_file")])
+
+class TestEncrypt(unittest.TestCase):
+    def testEncrypt(self):
+        fname = tempfile.mktemp("win32file_test")
+        f = open(fname, "wb")
+        f.write("hello")
+        f.close()
+        f = None
+        try:
+            try:
+                win32file.EncryptFile(fname)
+            except win32file.error, details:
+                if details[0] != winerror.ERROR_ACCESS_DENIED:
+                    raise
+                print "It appears this is not NTFS - cant encrypt/decrypt"
+            win32file.DecryptFile(fname)
+        finally:
+            if f is not None:
+                f.close()
+            os.unlink(fname)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32inet.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32inet.py
new file mode 100644
index 0000000..a4bb744
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32inet.py
@@ -0,0 +1,56 @@
+from win32inet import *
+from win32inetcon import *
+
+import unittest
+
+class CookieTests(unittest.TestCase):
+    def testCookies(self):
+        data = "TestData=Test"
+        InternetSetCookie("http://www.python.org", None, data)
+        got = InternetGetCookie("http://www.python.org", None)
+        self.assertEqual(got, data)
+
+class UrlTests(unittest.TestCase):
+    def testSimpleCanonicalize(self):
+        ret = InternetCanonicalizeUrl("foo bar")
+        self.assertEqual(ret, "foo%20bar")
+
+    def testLongCanonicalize(self):
+        # a 4k URL causes the underlying API to request a bigger buffer"
+        big = "x" * 2048
+        ret = InternetCanonicalizeUrl(big + " " + big)
+        self.assertEqual(ret, big + "%20" + big)
+
+class TestNetwork(unittest.TestCase):
+    def setUp(self):
+        self.hi = InternetOpen("test", INTERNET_OPEN_TYPE_DIRECT, None, None, 0)
+    def tearDown(self):
+        self.hi.Close()
+    def testPythonDotOrg(self):
+        hdl = InternetOpenUrl(self.hi, "http://www.python.org", None,
+                              INTERNET_FLAG_EXISTING_CONNECT)
+        chunks = []
+        while 1:
+            chunk = InternetReadFile(hdl, 1024)
+            if not chunk:
+                break
+            chunks.append(chunk)
+        data = ''.join(chunks)
+        assert data.find("Python")>0, repr(data) # This must appear somewhere on the main page!
+
+    def testFtpCommand(self):
+        hcon = InternetConnect(self.hi, "ftp.python.org", INTERNET_INVALID_PORT_NUMBER,
+                               None, None, # username/password
+                               INTERNET_SERVICE_FTP, 0, 0)
+        try:
+            try:
+                hftp = FtpCommand(hcon, True, FTP_TRANSFER_TYPE_ASCII, 'NLST', 0)
+            except error:
+                print "Error info is", InternetGetLastResponseInfo()
+            InternetReadFile(hftp, 2048)
+            hftp.Close()
+        finally:
+            hcon.Close()
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32net.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32net.py
new file mode 100644
index 0000000..ac008bf1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32net.py
@@ -0,0 +1,21 @@
+import unittest
+import win32net, win32netcon
+
+class TestCase(unittest.TestCase):
+    def testGroupsGoodResume(self, server=None):
+        res=0
+        level=0 #setting it to 1 will provide more detailed info
+        while True:
+            (user_list,total,res)=win32net.NetGroupEnum(server,level,res)
+            for i in user_list:
+                pass
+            if not res:
+                break
+
+    def testGroupsBadResume(self, server=None):
+        res=1 # Can't pass this first time round.
+        self.assertRaises(win32net.error, win32net.NetGroupEnum, server,0,res)
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32pipe.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32pipe.py
new file mode 100644
index 0000000..e3acad02
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32pipe.py
@@ -0,0 +1,45 @@
+import unittest
+
+
+import win32pipe
+import win32file
+import pywintypes
+import winerror
+import threading
+
+class CurrentUserTestCase(unittest.TestCase):
+    pipename = "\\\\.\\pipe\\python_test_pipe"
+    def _workerThread(self, e):
+        data = win32pipe.CallNamedPipe(self.pipename,"foo\0bar", 1024, win32pipe.NMPWAIT_WAIT_FOREVER)
+        e.set()
+        self.failUnless(data == "bar\0foo")
+
+    def testCallNamedPipe(self):
+        openMode = win32pipe.PIPE_ACCESS_DUPLEX
+        pipeMode = win32pipe.PIPE_TYPE_MESSAGE  | win32pipe.PIPE_WAIT
+    
+        sa = pywintypes.SECURITY_ATTRIBUTES()
+        sa.SetSecurityDescriptorDacl ( 1, None, 0 )
+    
+        pipeHandle = win32pipe.CreateNamedPipe(self.pipename,
+                                                openMode,
+                                                pipeMode,
+                                                win32pipe.PIPE_UNLIMITED_INSTANCES,
+                                                0,
+                                                0,
+                                                2000,
+                                                sa)
+    
+        event = threading.Event()
+        threading.Thread(target=self._workerThread, args=(event,)).start()
+    
+        hr = win32pipe.ConnectNamedPipe(pipeHandle)
+        win32file.WriteFile(pipeHandle, "bar\0foo")
+        hr, got = win32file.ReadFile(pipeHandle, 100)
+        self.failUnless(got == "foo\0bar")
+        event.wait(5)
+        if not event.isSet():
+            self.fail("Failed to wait for event!")
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32rcparser.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32rcparser.py
new file mode 100644
index 0000000..2af1217d2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32rcparser.py
@@ -0,0 +1,49 @@
+import sys, os
+import unittest
+import win32rcparser
+import win32con
+
+class TestParser(unittest.TestCase):
+    def setUp(self):
+        rc_file = os.path.join(os.path.dirname(__file__), "win32rcparser", "test.rc")
+        self.resources = win32rcparser.Parse(rc_file)
+
+    def testStrings(self):
+        for sid, expected in [
+            ("IDS_TEST_STRING4", "Test 'single quoted' string"),
+            ("IDS_TEST_STRING1", 'Test "quoted" string'),
+            ("IDS_TEST_STRING3", 'String with single " quote'),
+            ("IDS_TEST_STRING2", 'Test string'),
+                             ]:
+            got = self.resources.stringTable[sid].value
+            self.assertEqual(got, expected)
+
+    def testStandardIds(self):
+        for idc in "IDOK IDCANCEL".split():
+            correct = getattr(win32con, idc)
+            self.assertEqual(self.resources.names[correct], idc)
+            self.assertEqual(self.resources.ids[idc], correct)
+
+    def testTabStop(self):
+        d = self.resources.dialogs["IDD_TEST_DIALOG2"]
+        tabstop_names = ["IDC_EDIT1", "IDOK"] # should have WS_TABSTOP
+        tabstop_ids = [self.resources.ids[name] for name in tabstop_names]
+        notabstop_names = ["IDC_EDIT2"] # should have WS_TABSTOP
+        notabstop_ids = [self.resources.ids[name] for name in notabstop_names]
+        num_ok = 0
+        for cdef in d[1:]: # skip dlgdef
+            #print cdef
+            cid = cdef[2]
+            style = cdef[-2]
+            styleex = cdef[-1]
+            if cid in tabstop_ids:
+                self.failUnlessEqual(style & win32con.WS_TABSTOP, win32con.WS_TABSTOP)
+                num_ok += 1
+            elif cid in notabstop_ids:
+                self.failUnlessEqual(style & win32con.WS_TABSTOP, 0)
+                num_ok += 1
+        self.failUnlessEqual(num_ok, len(tabstop_ids) + len(notabstop_ids))
+
+if __name__=='__main__':
+    unittest.main()
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32timezone.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32timezone.py
new file mode 100644
index 0000000..edb44d3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32timezone.py
@@ -0,0 +1,11 @@
+# Test module for win32timezone
+
+import unittest, win32timezone, doctest
+
+class Win32TimeZoneTest(unittest.TestCase):
+    def testWin32TZ(self):
+        failed, total = doctest.testmod( win32timezone, verbose = False )
+        self.failIf( failed )
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32trace.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32trace.py
new file mode 100644
index 0000000..f7ab2adf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32trace.py
@@ -0,0 +1,331 @@
+import unittest
+import win32trace
+import threading
+import time
+import os
+import sys
+
+if __name__=='__main__':
+    this_file = sys.argv[0]
+else:
+    this_file = __file__
+
+def CheckNoOtherReaders():
+    win32trace.write("Hi")
+    time.sleep(0.05)
+    if win32trace.read() != "Hi":
+        # Reset everything so following tests still fail with this error!S
+        win32trace.TermRead()
+        win32trace.TermWrite()
+        raise RuntimeError, "An existing win32trace reader appears to be " \
+                            "running - please stop this process and try again"
+
+class TestInitOps(unittest.TestCase):
+    def setUp(self):
+        # clear old data
+        win32trace.InitRead()
+        win32trace.read()
+        win32trace.TermRead()
+
+    def tearDown(self):
+        try:
+            win32trace.TermRead()
+        except win32trace.error:
+            pass
+        try:
+            win32trace.TermWrite()
+        except win32trace.error:
+            pass
+        
+    def testInitTermRead(self):
+        self.assertRaises(win32trace.error, win32trace.read)
+        win32trace.InitRead()
+        result = win32trace.read()
+        self.assertEquals(result, '')
+        win32trace.TermRead()
+        self.assertRaises(win32trace.error, win32trace.read)
+
+        win32trace.InitRead()
+        self.assertRaises(win32trace.error, win32trace.InitRead)
+        win32trace.InitWrite()
+        self.assertRaises(win32trace.error, win32trace.InitWrite)
+        win32trace.TermWrite()
+        win32trace.TermRead()
+
+    def testInitTermWrite(self):
+        self.assertRaises(win32trace.error, win32trace.write, 'Hei')
+        win32trace.InitWrite()
+        win32trace.write('Johan Galtung')
+        win32trace.TermWrite()
+        self.assertRaises(win32trace.error, win32trace.write, 'Hei')
+
+    def testTermSematics(self):
+        win32trace.InitWrite()
+        win32trace.write('Ta da')
+
+        # if we both Write and Read are terminated at the same time,
+        # we lose the data as the win32 object is closed.  Note that
+        # if another writer is running, we do *not* lose the data - so
+        # test for either the correct data or an empty string
+        win32trace.TermWrite()
+        win32trace.InitRead()
+        self.failUnless(win32trace.read() in ['Ta da', ''])
+        win32trace.TermRead()
+
+        # we keep the data because we init read before terminating write
+        win32trace.InitWrite()
+        win32trace.write('Ta da')
+        win32trace.InitRead()
+        win32trace.TermWrite()
+        self.assertEquals('Ta da', win32trace.read())
+        win32trace.TermRead()
+
+
+class BasicSetupTearDown(unittest.TestCase):
+    def setUp(self):
+        win32trace.InitRead()
+        # If any other writers are running (even if not actively writing),
+        # terminating the module will *not* close the handle, meaning old data
+        # will remain. This can cause other tests to fail.
+        win32trace.read()
+        win32trace.InitWrite()
+
+    def tearDown(self):
+        win32trace.TermWrite()
+        win32trace.TermRead()
+    
+
+class TestModuleOps(BasicSetupTearDown):        
+    def testRoundTrip(self):
+        win32trace.write('Syver Enstad')
+        syverEnstad = win32trace.read()
+        self.assertEquals('Syver Enstad', syverEnstad)
+
+    def testBlockingRead(self):
+        win32trace.write('Syver Enstad')
+        self.assertEquals('Syver Enstad', win32trace.blockingread())
+
+    def testFlush(self):
+        win32trace.flush()
+
+
+class TestTraceObjectOps(BasicSetupTearDown):
+    def testInit(self):
+        win32trace.TermRead()
+        win32trace.TermWrite()
+        traceObject = win32trace.GetTracer()
+        self.assertRaises(win32trace.error, traceObject.read)
+        self.assertRaises(win32trace.error, traceObject.write, '')
+        win32trace.InitRead()
+        win32trace.InitWrite()
+        self.assertEquals('', traceObject.read())
+        traceObject.write('Syver')
+
+    def testFlush(self):
+        traceObject = win32trace.GetTracer()
+        traceObject.flush()
+
+    def testIsatty(self):
+        tracer = win32trace.GetTracer()
+        assert tracer.isatty() == False
+        
+
+    def testRoundTrip(self):
+        traceObject = win32trace.GetTracer()
+        traceObject.write('Syver Enstad')
+        self.assertEquals('Syver Enstad', traceObject.read())
+
+class WriterThread(threading.Thread):
+    def run(self):
+        self.writeCount = 0
+        for each in range(self.BucketCount):
+            win32trace.write(str(each))
+        self.writeCount = self.BucketCount
+
+    def verifyWritten(self):
+        return self.writeCount == self.BucketCount
+
+class TestMultipleThreadsWriting(unittest.TestCase):
+    # FullBucket is the thread count
+    FullBucket = 50
+    BucketCount = 9 # buckets must be a single digit number (ie. less than 10)
+    def setUp(self):
+        WriterThread.BucketCount = self.BucketCount        
+        win32trace.InitRead()
+        win32trace.read() # clear any old data.
+        win32trace.InitWrite()
+        CheckNoOtherReaders()
+        self.threads = [WriterThread() for each in range(self.FullBucket)]
+        self.buckets = range(self.BucketCount)
+        for each in self.buckets:
+            self.buckets[each] = 0
+
+    def tearDown(self):
+        win32trace.TermRead()
+        win32trace.TermWrite()
+
+    def areBucketsFull(self):
+        bucketsAreFull = True
+        for each in self.buckets:
+            assert each <= self.FullBucket, each
+            if each != self.FullBucket:
+                bucketsAreFull = False
+                break
+        return bucketsAreFull
+        
+
+    def read(self):
+        while 1:
+            readString = win32trace.blockingread()
+            for ch in readString:
+                integer = int(ch)
+                count = self.buckets[integer]
+                assert count != -1
+                self.buckets[integer] = count + 1
+                if self.buckets[integer] == self.FullBucket:
+                    if self.areBucketsFull():
+                        return
+                        
+    def testThreads(self):
+        for each in self.threads:
+            each.start()
+        self.read()
+        for each in self.threads:
+            each.join()
+        for each in self.threads:
+            assert each.verifyWritten()
+        assert self.areBucketsFull()
+
+class TestHugeChunks(unittest.TestCase):
+    # BiggestChunk is the size where we stop stressing the writer
+    BiggestChunk = 2**16 # 256k should do it.
+    def setUp(self):
+        win32trace.InitRead()
+        win32trace.read() # clear any old data
+        win32trace.InitWrite()
+    def testHugeChunks(self):
+        data = "*" * 1023 + "\n"
+        while len(data) <= self.BiggestChunk:
+            win32trace.write(data)
+            data = data + data
+        # If we made it here, we passed.
+
+    def tearDown(self):
+        win32trace.TermRead()
+        win32trace.TermWrite()
+
+import win32event
+import win32process
+
+class TraceWriteProcess:
+    def __init__(self, threadCount):
+        self.exitCode = -1
+        self.threadCount = threadCount
+        
+    def start(self):
+        procHandle, threadHandle, procId, threadId  = win32process.CreateProcess(
+            None, # appName
+            'python.exe "%s" /run_test_process %s %s' % (this_file,
+                                                         self.BucketCount,
+                                                         self.threadCount),
+            None, # process security
+            None, # thread security
+            0, # inherit handles
+            win32process.NORMAL_PRIORITY_CLASS,
+            None, # new environment
+            None, # Current directory
+            win32process.STARTUPINFO(), # startup info
+            )
+        self.processHandle = procHandle
+        
+    def join(self):
+        win32event.WaitForSingleObject(self.processHandle,
+                                       win32event.INFINITE)
+        self.exitCode = win32process.GetExitCodeProcess(self.processHandle)
+
+    def verifyWritten(self):
+        return self.exitCode == 0
+
+
+class TestOutofProcess(unittest.TestCase):
+    BucketCount = 9
+    FullBucket = 50
+    def setUp(self):
+        win32trace.InitRead()
+        TraceWriteProcess.BucketCount = self.BucketCount
+        self.setUpWriters()
+        self.buckets = range(self.BucketCount)
+        for each in self.buckets:
+            self.buckets[each] = 0
+
+    def tearDown(self):
+        win32trace.TermRead()
+
+
+    def setUpWriters(self):
+        self.processes = []
+        # 5 processes, quot threads in each process
+        quot, remainder = divmod(self.FullBucket, 5)
+        for each in range(5):
+            self.processes.append(TraceWriteProcess(quot))
+        if remainder:
+            self.processes.append(TraceWriteProcess(remainder))
+            
+    def areBucketsFull(self):
+        bucketsAreFull = True
+        for each in self.buckets:
+            assert each <= self.FullBucket, each
+            if each != self.FullBucket:
+                bucketsAreFull = False
+                break
+        return bucketsAreFull
+        
+    def read(self):
+        while 1:
+            readString = win32trace.blockingread()
+            for ch in readString:
+                integer = int(ch)
+                count = self.buckets[integer]
+                assert count != -1
+                self.buckets[integer] = count + 1
+                if self.buckets[integer] == self.FullBucket:
+                    if self.areBucketsFull():
+                        return
+                        
+    def testProcesses(self):
+        for each in self.processes:
+            each.start()
+        self.read()
+        for each in self.processes:
+            each.join()
+        for each in self.processes:
+            assert each.verifyWritten()
+        assert self.areBucketsFull()    
+
+def _RunAsTestProcess():
+    # Run as an external process by the main tests.
+    WriterThread.BucketCount = int(sys.argv[2])
+    threadCount = int(sys.argv[3])
+    threads = [WriterThread() for each in range(threadCount)]
+    win32trace.InitWrite()
+    for thread in threads:
+        thread.start()
+    for thread in threads:
+        thread.join()
+    for thread in threads:
+        if not thread.verifyWritten():
+            sys.exit(-1)
+    
+if __name__ == '__main__':
+    if sys.argv[1:2]==["/run_test_process"]:
+        _RunAsTestProcess()
+        sys.exit(0)
+    # If some other win32traceutil reader is running, these tests fail
+    # badly (as the other reader sometimes sees the output!)
+    win32trace.InitRead()
+    win32trace.InitWrite()
+    CheckNoOtherReaders()
+    # reset state so test env is back to normal
+    win32trace.TermRead()
+    win32trace.TermWrite()
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32wnet.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32wnet.py
new file mode 100644
index 0000000..2e78ae5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/test_win32wnet.py
@@ -0,0 +1,10 @@
+import unittest
+import win32wnet, win32api
+
+class TestCase(unittest.TestCase):
+    def testGetUser(self):
+        self.assertEquals(win32api.GetUserName(), win32wnet.WNetGetUser())
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/testall.py b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/testall.py
new file mode 100644
index 0000000..787ed0a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/testall.py
@@ -0,0 +1,51 @@
+import sys, os
+import unittest
+
+def import_all():
+    # Some hacks for import order - dde depends on win32ui
+    import win32ui
+    
+    import win32api
+    dir = os.path.dirname(win32api.__file__)
+    num = 0
+    is_debug = os.path.basename(win32api.__file__).endswith("_d")
+    for name in os.listdir(dir):
+        base, ext = os.path.splitext(name)
+        if (ext==".pyd") and \
+           name != "_winxptheme.pyd" and \
+           (is_debug and base.endswith("_d") or \
+           not is_debug and not base.endswith("_d")):
+            try:
+                __import__(base)
+            except ImportError:
+                print "FAILED to import", name
+                raise
+            num += 1
+
+def suite():
+    # Loop over all .py files here, except me :)
+    try:
+        me = __file__
+    except NameError:
+        me = sys.argv[0]
+    me = os.path.abspath(me)
+    files = os.listdir(os.path.dirname(me))
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.FunctionTestCase(import_all))
+    for file in files:
+        base, ext = os.path.splitext(file)
+        if ext=='.py' and os.path.basename(me) != file:
+            mod = __import__(base)
+            if hasattr(mod, "suite"):
+                test = mod.suite()
+            else:
+                test = unittest.defaultTestLoader.loadTestsFromModule(mod)
+            suite.addTest(test)
+    return suite
+
+class CustomLoader(unittest.TestLoader):
+    def loadTestsFromModule(self, module):
+        return suite()
+
+if __name__=='__main__':
+    unittest.TestProgram(testLoader=CustomLoader())(argv=sys.argv)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/.cvsignore b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/.cvsignore
new file mode 100644
index 0000000..138fcda2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/.cvsignore
@@ -0,0 +1 @@
+*.aps

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/python.bmp b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/python.bmp
new file mode 100644
index 0000000..425d315
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/python.bmp
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/python.ico b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/python.ico
new file mode 100644
index 0000000..59104e7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/python.ico
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/test.h b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/test.h
new file mode 100644
index 0000000..f93becb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/test.h
@@ -0,0 +1,46 @@
+//{{NO_DEPENDENCIES}}
+// Microsoft Developer Studio generated include file.
+// Used by test.rc
+//
+#define IDS_TEST_STRING1                51
+#define IDS_TEST_STRING2                52
+#define IDS_TEST_STRING3                53
+#define IDS_TEST_STRING4                54
+#define IDS_TEST_STRING5                55
+#define IDS_TEST_STRING6                56
+#define IDS_TEST_STRING7                57
+#define IDD_TEST_DIALOG1                101
+#define IDD_TEST_DIALOG2                102
+#define IDB_PYTHON                      103
+#define IDI_PYTHON                      105
+#define IDD_TEST_DIALOG3                105
+#define IDC_EDIT1                       1000
+#define IDC_CHECK1                      1001
+#define IDC_EDIT2                       1001
+#define IDC_COMBO1                      1002
+#define IDC_SPIN1                       1003
+#define IDC_PROGRESS1                   1004
+#define IDC_SLIDER1                     1005
+#define IDC_LIST1                       1006
+#define IDC_TREE1                       1007
+#define IDC_TAB1                        1008
+#define IDC_ANIMATE1                    1009
+#define IDC_RICHEDIT1                   1010
+#define IDC_DATETIMEPICKER1             1011
+#define IDC_MONTHCALENDAR1              1012
+#define IDC_SCROLLBAR1                  1013
+#define IDC_SCROLLBAR2                  1014
+#define IDC_LIST2                       1015
+#define IDC_HELLO                       1016
+#define IDC_HELLO2                      1017
+
+// Next default values for new objects
+// 
+#ifdef APSTUDIO_INVOKED
+#ifndef APSTUDIO_READONLY_SYMBOLS
+#define _APS_NEXT_RESOURCE_VALUE        107
+#define _APS_NEXT_COMMAND_VALUE         40002
+#define _APS_NEXT_CONTROL_VALUE         1018
+#define _APS_NEXT_SYMED_VALUE           101
+#endif
+#endif
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/test.rc b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/test.rc
new file mode 100644
index 0000000..a406074
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/test/win32rcparser/test.rc
@@ -0,0 +1,216 @@
+//Microsoft Developer Studio generated resource script.

+//

+#include "test.h"

+

+#define APSTUDIO_READONLY_SYMBOLS

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 2 resource.

+//

+#include "afxres.h"

+

+/////////////////////////////////////////////////////////////////////////////

+#undef APSTUDIO_READONLY_SYMBOLS

+

+/////////////////////////////////////////////////////////////////////////////

+// English (Australia) resources

+

+#if !defined(AFX_RESOURCE_DLL) || defined(AFX_TARG_ENA)

+#ifdef _WIN32

+LANGUAGE LANG_ENGLISH, SUBLANG_ENGLISH_AUS

+#pragma code_page(1252)

+#endif //_WIN32

+

+#ifdef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// TEXTINCLUDE

+//

+

+1 TEXTINCLUDE DISCARDABLE 

+BEGIN

+    "test.h\0"

+END

+

+2 TEXTINCLUDE DISCARDABLE 

+BEGIN

+    "#include ""afxres.h""\r\n"

+    "\0"

+END

+

+3 TEXTINCLUDE DISCARDABLE 

+BEGIN

+    "\r\n"

+    "\0"

+END

+

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog

+//

+

+IDD_TEST_DIALOG1 DIALOG DISCARDABLE  0, 0, 186, 95

+STYLE DS_MODALFRAME | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Test Dialog"

+FONT 8, "MS Sans Serif"

+BEGIN

+    DEFPUSHBUTTON   "OK",IDOK,129,7,50,14

+    PUSHBUTTON      "Cancel",IDCANCEL,129,24,50,14

+    ICON            IDI_PYTHON,IDC_STATIC,142,47,21,20

+    LTEXT           "An icon",IDC_STATIC,140,70,34,9

+END

+

+IDD_TEST_DIALOG2 DIALOG DISCARDABLE  0, 0, 186, 95

+STYLE DS_MODALFRAME | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Test Dialog"

+FONT 8, "MS Sans Serif"

+BEGIN

+    DEFPUSHBUTTON   "OK",IDOK,129,7,50,14

+    PUSHBUTTON      "Cancel",IDCANCEL,129,24,50,14,NOT WS_TABSTOP

+    CONTROL         103,IDC_STATIC,"Static",SS_BITMAP,139,49,32,32

+    LTEXT           "A bitmap",IDC_STATIC,135,72,34,9

+    EDITTEXT        IDC_EDIT1,59,7,59,14,ES_AUTOHSCROLL

+    EDITTEXT        IDC_EDIT2,59,31,60,15,ES_AUTOHSCROLL | NOT WS_TABSTOP

+    LTEXT           "Tabstop",IDC_STATIC,7,9,43,10

+    LTEXT           "Not Tabstop",IDC_STATIC,7,33,43,10

+END

+

+IDD_TEST_DIALOG3 DIALOGEX 0, 0, 232, 310

+STYLE DS_MODALFRAME | WS_POPUP | WS_CAPTION | WS_SYSMENU

+CAPTION "Dialog"

+FONT 8, "MS Sans Serif", 0, 0, 0x1

+BEGIN

+    GROUPBOX        "Frame",IDC_STATIC,7,7,218,41

+    LTEXT           "Left Static",IDC_STATIC,16,17,73,11

+    EDITTEXT        IDC_EDIT1,103,15,112,12,ES_AUTOHSCROLL

+    LTEXT           "Right Static",IDC_STATIC,16,30,73,11,0,WS_EX_RIGHT

+    CONTROL         "",IDC_RICHEDIT1,"RICHEDIT",ES_AUTOHSCROLL | WS_BORDER | 

+                    WS_TABSTOP,103,31,113,14

+    CONTROL         "Check1",IDC_CHECK1,"Button",BS_AUTOCHECKBOX | 

+                    WS_TABSTOP,7,52,68,12

+    COMBOBOX        IDC_COMBO1,85,52,82,35,CBS_DROPDOWNLIST | CBS_SORT | 

+                    WS_VSCROLL | WS_TABSTOP

+    CONTROL         "Spin1",IDC_SPIN1,"msctls_updown32",UDS_ARROWKEYS,7,71,

+                    14,22

+    CONTROL         "Progress1",IDC_PROGRESS1,"msctls_progress32",WS_BORDER,

+                    39,72,153,13

+    SCROLLBAR       IDC_SCROLLBAR2,207,55,13,57,SBS_VERT

+    CONTROL         "Slider1",IDC_SLIDER1,"msctls_trackbar32",TBS_BOTH | 

+                    TBS_NOTICKS | WS_TABSTOP,35,91,159,7

+    SCROLLBAR       IDC_SCROLLBAR1,37,102,155,11

+    CONTROL         "Tab1",IDC_TAB1,"SysTabControl32",0x0,7,120,217,43

+    CONTROL         "Animate1",IDC_ANIMATE1,"SysAnimate32",WS_BORDER | 

+                    WS_TABSTOP,7,171,46,42

+    CONTROL         "List1",IDC_LIST1,"SysListView32",WS_BORDER | WS_TABSTOP,

+                    63,171,53,43

+    CONTROL         "Tree1",IDC_TREE1,"SysTreeView32",WS_BORDER | WS_TABSTOP,

+                    126,171,50,43

+    CONTROL         "MonthCalendar1",IDC_MONTHCALENDAR1,"SysMonthCal32",

+                    MCS_NOTODAY | WS_TABSTOP,7,219,140,84

+    CONTROL         "DateTimePicker1",IDC_DATETIMEPICKER1,"SysDateTimePick32",

+                    DTS_RIGHTALIGN | WS_TABSTOP,174,221,51,15

+    DEFPUSHBUTTON   "OK",IDOK,175,289,50,14

+    PUSHBUTTON      "Hello",IDC_HELLO,175,271,50,14

+    PUSHBUTTON      "Hello",IDC_HELLO2,175,240,50,26,BS_ICON

+    LISTBOX         IDC_LIST2,184,171,40,45,LBS_SORT | LBS_NOINTEGRALHEIGHT | 

+                    WS_VSCROLL | WS_TABSTOP

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// DESIGNINFO

+//

+

+#ifdef APSTUDIO_INVOKED

+GUIDELINES DESIGNINFO DISCARDABLE 

+BEGIN

+    IDD_TEST_DIALOG1, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 179

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 88

+    END

+

+    IDD_TEST_DIALOG2, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 179

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 88

+    END

+

+    IDD_TEST_DIALOG3, DIALOG

+    BEGIN

+        LEFTMARGIN, 7

+        RIGHTMARGIN, 225

+        TOPMARGIN, 7

+        BOTTOMMARGIN, 303

+    END

+END

+#endif    // APSTUDIO_INVOKED

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Icon

+//

+

+// Icon with lowest ID value placed first to ensure application icon

+// remains consistent on all systems.

+IDI_PYTHON              ICON    DISCARDABLE     "python.ico"

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Bitmap

+//

+

+IDB_PYTHON              BITMAP  DISCARDABLE     "python.bmp"

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// Dialog Info

+//

+

+IDD_TEST_DIALOG3 DLGINIT

+BEGIN

+    IDC_COMBO1, 0x403, 6, 0

+0x7449, 0x6d65, 0x0031, 

+    IDC_COMBO1, 0x403, 6, 0

+0x7449, 0x6d65, 0x0032, 

+    0

+END

+

+

+/////////////////////////////////////////////////////////////////////////////

+//

+// String Table

+//

+

+STRINGTABLE DISCARDABLE 

+BEGIN

+    IDS_TEST_STRING1        "Test ""quoted"" string"

+    IDS_TEST_STRING2        "Test string"

+    IDS_TEST_STRING3        "String with single "" quote"

+    IDS_TEST_STRING4        "Test 'single quoted' string"

+END

+

+#endif    // English (Australia) resources

+/////////////////////////////////////////////////////////////////////////////

+

+

+

+#ifndef APSTUDIO_INVOKED

+/////////////////////////////////////////////////////////////////////////////

+//

+// Generated from the TEXTINCLUDE 3 resource.

+//

+

+

+/////////////////////////////////////////////////////////////////////////////

+#endif    // not APSTUDIO_INVOKED

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/timer.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/timer.pyd
new file mode 100644
index 0000000..0c4e09b9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/timer.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win2kras.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win2kras.pyd
new file mode 100644
index 0000000..d9ef1f9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win2kras.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32api.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32api.pyd
new file mode 100644
index 0000000..3482aeeb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32api.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32clipboard.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32clipboard.pyd
new file mode 100644
index 0000000..318d1e2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32clipboard.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32console.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32console.pyd
new file mode 100644
index 0000000..e0c9e50
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32console.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32crypt.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32crypt.pyd
new file mode 100644
index 0000000..8aa8cdb5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32crypt.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32event.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32event.pyd
new file mode 100644
index 0000000..5cfaed5b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32event.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32evtlog.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32evtlog.pyd
new file mode 100644
index 0000000..3cfc82b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32evtlog.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32file.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32file.pyd
new file mode 100644
index 0000000..9ae8ae2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32file.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32gui.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32gui.pyd
new file mode 100644
index 0000000..8d727db
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32gui.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32help.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32help.pyd
new file mode 100644
index 0000000..9a25221a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32help.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32inet.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32inet.pyd
new file mode 100644
index 0000000..def9974
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32inet.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32job.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32job.pyd
new file mode 100644
index 0000000..325fa38
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32job.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32lz.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32lz.pyd
new file mode 100644
index 0000000..246bb9e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32lz.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32net.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32net.pyd
new file mode 100644
index 0000000..4de6f4e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32net.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32pdh.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32pdh.pyd
new file mode 100644
index 0000000..a1cb9fe
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32pdh.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32pipe.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32pipe.pyd
new file mode 100644
index 0000000..19d25ba
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32pipe.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32popenWin9x.exe b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32popenWin9x.exe
new file mode 100644
index 0000000..0f78643
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32popenWin9x.exe
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32print.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32print.pyd
new file mode 100644
index 0000000..0cae6cbb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32print.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32process.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32process.pyd
new file mode 100644
index 0000000..bca5ef7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32process.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32ras.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32ras.pyd
new file mode 100644
index 0000000..e2d7c65
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32ras.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32security.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32security.pyd
new file mode 100644
index 0000000..d65be3a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32security.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32service.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32service.pyd
new file mode 100644
index 0000000..89b0db1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32service.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32trace.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32trace.pyd
new file mode 100644
index 0000000..5e78328
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32trace.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/win32wnet.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32wnet.pyd
new file mode 100644
index 0000000..f2049b7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/win32wnet.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32/winxpgui.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32/winxpgui.pyd
new file mode 100644
index 0000000..d28a94c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32/winxpgui.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/License.txt b/depot_tools/release/win/python_24/Lib/site-packages/win32com/License.txt
new file mode 100644
index 0000000..92ab4e8f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/License.txt
@@ -0,0 +1,30 @@
+Unless stated in the specfic source file, this work is
+Copyright (c) 1996-2001, Greg Stein and Mark Hammond.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without 
+modification, are permitted provided that the following conditions 
+are met:
+
+Redistributions of source code must retain the above copyright notice, 
+this list of conditions and the following disclaimer.
+
+Redistributions in binary form must reproduce the above copyright 
+notice, this list of conditions and the following disclaimer in 
+the documentation and/or other materials provided with the distribution.
+
+Neither names of Greg Stein, Mark Hammond nor the name of contributors may be used 
+to endorse or promote products derived from this software without 
+specific prior written permission. 
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS
+IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
+TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/__init__.py
new file mode 100644
index 0000000..46f9a0c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/__init__.py
@@ -0,0 +1,117 @@
+#
+# Initialization for the win32com package
+#
+
+import win32api, sys, os
+import pythoncom
+
+# flag if we are in a "frozen" build.
+_frozen = getattr(sys, "frozen", 1==0)
+# pythoncom dumbly defaults this to zero - we believe sys.frozen over it.
+if _frozen and not getattr(pythoncom, "frozen", 0):
+	pythoncom.frozen = sys.frozen
+
+# Add support for an external "COM Extensions" path.
+#  Concept is that you can register a seperate path to be used for
+#  COM extensions, outside of the win32com directory.  These modules, however,
+#  look identical to win32com built-in modules.
+#  This is the technique that we use for the "standard" COM extensions.
+#  eg "win32com.mapi" or "win32com.axscript" both work, even though they do not
+#  live under the main win32com directory.
+__gen_path__ = ''
+__build_path__ = None
+### TODO - Load _all_ \\Extensions subkeys - for now, we only read the default
+### Modules will work if loaded into "win32comext" path.
+
+def SetupEnvironment():
+	HKEY_LOCAL_MACHINE = -2147483646 # Avoid pulling in win32con for just these...
+	KEY_QUERY_VALUE = 0x1
+	# Open the root key once, as this is quite slow on NT.
+	try:
+		keyName = "SOFTWARE\\Python\\PythonCore\\%s\\PythonPath\\win32com" % sys.winver
+		key = win32api.RegOpenKey(HKEY_LOCAL_MACHINE , keyName, 0, KEY_QUERY_VALUE)
+	except (win32api.error, AttributeError):
+		key = None
+		
+	try:
+		found = 0
+		if key is not None:
+			try:
+				__path__.append( win32api.RegQueryValue(key, "Extensions" ))
+				found = 1
+			except win32api.error:
+				# Nothing registered
+				pass
+		if not found:
+			try:
+				__path__.append( win32api.GetFullPathName( __path__[0] + "\\..\\win32comext") )
+			except win32api.error:
+				# Give up in disgust!
+				pass
+	
+		# For the sake of developers, we also look up a "BuildPath" key
+		# If extension modules add support, we can load their .pyd's from a completely
+		# different directory (see the comments below)
+		try:
+			if key is not None:
+				global __build_path__
+				__build_path__ = win32api.RegQueryValue(key, "BuildPath")
+				__path__.append(__build_path__)
+		except win32api.error:
+			# __build_path__ neednt be defined.
+			pass
+		global __gen_path__
+		if key is not None:
+			try:
+				__gen_path__ = win32api.RegQueryValue(key, "GenPath")
+			except win32api.error:
+				pass
+	finally:
+		if key is not None:
+			key.Close()
+
+# A Helper for developers.  A sub-package's __init__ can call this help function,
+# which allows the .pyd files for the extension to live in a special "Build" directory
+# (which the win32com developers do!)
+def __PackageSupportBuildPath__(package_path):
+	# See if we have a special directory for the binaries (for developers)
+	if not _frozen and __build_path__:
+		package_path.append(__build_path__)
+
+if not _frozen:
+	SetupEnvironment()
+
+# If we don't have a special __gen_path__, see if we have a gen_py as a
+# normal module and use that (ie, "win32com.gen_py" may already exist as
+# a package.
+if not __gen_path__:
+	try:
+		import win32com.gen_py
+		__gen_path__ = sys.modules["win32com.gen_py"].__path__[0]
+	except ImportError:
+		# If a win32com\gen_py directory already exists, then we use it
+		# (gencache doesn't insist it have an __init__, but our __import__ 
+		# above does!
+		__gen_path__ = os.path.abspath(os.path.join(__path__[0], "gen_py"))
+		if not os.path.isdir(__gen_path__):
+			# We used to dynamically create a directory under win32com -
+			# but this sucks.  If the dir doesn't already exist, we we 
+			# create a version specific directory under the user temp 
+			# directory.
+			__gen_path__ = os.path.join(
+								win32api.GetTempPath(), "gen_py",
+								"%d.%d" % (sys.version_info[0], sys.version_info[1]))
+
+# we must have a __gen_path__, but may not have a gen_py module -
+# set that up.
+if not sys.modules.has_key("win32com.gen_py"):
+	# Create a "win32com.gen_py", but with a custom __path__
+	import new
+	gen_py = new.module("win32com.gen_py")
+	gen_py.__path__ = [ __gen_path__ ]
+	sys.modules[gen_py.__name__]=gen_py
+	del new
+gen_py = sys.modules["win32com.gen_py"]
+
+# get rid of these for module users
+del os, sys, win32api, pythoncom
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/CLSIDToClass.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/CLSIDToClass.py
new file mode 100644
index 0000000..f5e463f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/CLSIDToClass.py
@@ -0,0 +1,57 @@
+"""Manages a dictionary of CLSID strings to Python classes.
+
+Primary use of this module is to allow modules generated by
+makepy.py to share classes.  @makepy@ automatically generates code
+which interacts with this module.  You should never need to reference
+this module directly.
+
+This module only provides support for modules which have been previously
+been imported.  The gencache module provides some support for loading modules
+on demand - once done, this module supports it...
+
+As an example, the MSACCESS.TLB type library makes reference to the
+CLSID of the Database object, as defined in DAO3032.DLL.  This
+allows code using the MSAccess wrapper to natively use Databases.
+
+This obviously applies to all cooperating objects, not just DAO and
+Access.
+"""
+mapCLSIDToClass = {}
+
+def RegisterCLSID( clsid, pythonClass ):
+	"""Register a class that wraps a CLSID
+	
+	This function allows a CLSID to be globally associated with a class.
+	Certain module will automatically convert an IDispatch object to an
+	instance of the associated class.
+	"""
+	
+	mapCLSIDToClass[str(clsid)] = pythonClass
+	
+def RegisterCLSIDsFromDict( dict ):
+	"""Register a dictionary of CLSID's and classes.
+	
+	This module performs the same function as @RegisterCLSID@, but for
+	an entire dictionary of associations.
+	
+	Typically called by makepy generated modules at import time.
+	"""
+	try:
+		mapCLSIDToClass.update(dict)
+	except AttributeError: # Python 1.4?
+		for clsid, pythonClass in dict.items():
+			mapCLSIDToClass[clsid] = pythonClass
+		
+def GetClass(clsid):
+	"""Given a CLSID, return the globally associated class.
+	
+	clsid -- a string CLSID representation to check.
+	"""
+	return mapCLSIDToClass[clsid]
+	
+def HasClass(clsid):
+	"""Determines if the CLSID has an associated class.
+	
+	clsid -- the string CLSID to check
+	"""
+	return mapCLSIDToClass.has_key(clsid)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/__init__.py
new file mode 100644
index 0000000..3e7a8762
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/__init__.py
@@ -0,0 +1,507 @@
+# This module exists to create the "best" dispatch object for a given
+# object.  If "makepy" support for a given object is detected, it is
+# used, otherwise a dynamic dispatch object.
+
+# Note that if the unknown dispatch object then returns a known
+# dispatch object, the known class will be used.  This contrasts
+# with dynamic.Dispatch behaviour, where dynamic objects are always used.
+import __builtin__
+# For some bizarre reason, __builtins__ fails with attribute error on __dict__ here?
+NeedUnicodeConversions = not hasattr(__builtin__, "unicode")
+
+import dynamic, gencache, pythoncom
+import sys
+import pywintypes
+from types import TupleType
+from pywintypes import UnicodeType
+_PyIDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch]
+
+
+def __WrapDispatch(dispatch, userName = None, resultCLSID = None, typeinfo = None, \
+                  UnicodeToString = NeedUnicodeConversions, clsctx = pythoncom.CLSCTX_SERVER,
+                  WrapperClass = None):
+  """
+    Helper function to return a makepy generated class for a CLSID if it exists,
+    otherwise cope by using CDispatch.
+  """
+  if resultCLSID is None:
+    try:
+      typeinfo = dispatch.GetTypeInfo()
+      if typeinfo is not None: # Some objects return NULL, some raise exceptions...
+        resultCLSID = str(typeinfo.GetTypeAttr()[0])
+    except (pythoncom.com_error, AttributeError):
+      pass
+  if resultCLSID is not None:
+    import gencache
+    # Attempt to load generated module support
+    # This may load the module, and make it available
+    klass = gencache.GetClassForCLSID(resultCLSID)
+    if klass is not None:
+      return klass(dispatch)
+
+  # Return a "dynamic" object - best we can do!
+  if WrapperClass is None: WrapperClass = CDispatch
+  return dynamic.Dispatch(dispatch, userName, WrapperClass, typeinfo, UnicodeToString=UnicodeToString,clsctx=clsctx)
+
+
+def GetObject(Pathname = None, Class = None, clsctx = None):
+  """
+    Mimic VB's GetObject() function.
+
+    ob = GetObject(Class = "ProgID") or GetObject(Class = clsid) will
+    connect to an already running instance of the COM object.
+    
+    ob = GetObject(r"c:\blah\blah\foo.xls") (aka the COM moniker syntax)
+    will return a ready to use Python wrapping of the required COM object.
+
+    Note: You must specifiy one or the other of these arguments. I know
+    this isn't pretty, but it is what VB does. Blech. If you don't
+    I'll throw ValueError at you. :)
+    
+    This will most likely throw pythoncom.com_error if anything fails.
+  """
+  if clsctx is None:
+    clsctx = pythoncom.CLSCTX_ALL
+    
+  if (Pathname is None and Class is None) or \
+     (Pathname is not None and Class is not None):
+    raise ValueError, "You must specify a value for Pathname or Class, but not both."
+
+  if Class is not None:
+    return GetActiveObject(Class, clsctx)
+  else:
+    return Moniker(Pathname, clsctx)    
+
+def GetActiveObject(Class, clsctx = pythoncom.CLSCTX_ALL):
+  """
+    Python friendly version of GetObject's ProgID/CLSID functionality.
+  """  
+  resultCLSID = pywintypes.IID(Class)
+  dispatch = pythoncom.GetActiveObject(resultCLSID)
+  dispatch = dispatch.QueryInterface(pythoncom.IID_IDispatch)
+  return __WrapDispatch(dispatch, Class, resultCLSID = resultCLSID, clsctx = clsctx)
+
+def Moniker(Pathname, clsctx = pythoncom.CLSCTX_ALL):
+  """
+    Python friendly version of GetObject's moniker functionality.
+  """
+  moniker, i, bindCtx = pythoncom.MkParseDisplayName(Pathname)
+  dispatch = moniker.BindToObject(bindCtx, None, pythoncom.IID_IDispatch)
+  return __WrapDispatch(dispatch, Pathname, clsctx = clsctx)
+  
+def Dispatch(dispatch, userName = None, resultCLSID = None, typeinfo = None, UnicodeToString=NeedUnicodeConversions, clsctx = pythoncom.CLSCTX_SERVER):
+  """Creates a Dispatch based COM object.
+  """
+  dispatch, userName = dynamic._GetGoodDispatchAndUserName(dispatch,userName,clsctx)
+  return __WrapDispatch(dispatch, userName, resultCLSID, typeinfo, UnicodeToString, clsctx)
+
+def DispatchEx(clsid, machine=None, userName = None, resultCLSID = None, typeinfo = None, UnicodeToString=NeedUnicodeConversions, clsctx = None):
+  """Creates a Dispatch based COM object on a specific machine.
+  """
+  # If InProc is registered, DCOM will use it regardless of the machine name 
+  # (and regardless of the DCOM config for the object.)  So unless the user
+  # specifies otherwise, we exclude inproc apps when a remote machine is used.
+  if clsctx is None:
+    clsctx = pythoncom.CLSCTX_SERVER
+    if machine is not None: clsctx = clsctx & ~pythoncom.CLSCTX_INPROC
+  if machine is None:
+    serverInfo = None
+  else:
+    serverInfo = (machine,)          
+  if userName is None: userName = clsid
+  dispatch = pythoncom.CoCreateInstanceEx(clsid, None, clsctx, serverInfo, (pythoncom.IID_IDispatch,))[0]
+  return Dispatch(dispatch, userName, resultCLSID, typeinfo, UnicodeToString=UnicodeToString, clsctx=clsctx)
+
+class CDispatch(dynamic.CDispatch):
+  """
+    The dynamic class used as a last resort.
+    The purpose of this overriding of dynamic.CDispatch is to perpetuate the policy
+    of using the makepy generated wrapper Python class instead of dynamic.CDispatch
+    if/when possible.
+  """
+  def _wrap_dispatch_(self, ob, userName = None, returnCLSID = None, UnicodeToString = NeedUnicodeConversions):
+    return Dispatch(ob, userName, returnCLSID,None,UnicodeToString)
+
+def CastTo(ob, target):
+    """'Cast' a COM object to another interface"""
+    # todo - should support target being an IID
+    if hasattr(target, "index"): # string like
+    # for now, we assume makepy for this to work.
+        if not ob.__class__.__dict__.has_key("CLSID"):
+            # Eeek - no makepy support - try and build it.
+            ob = gencache.EnsureDispatch(ob)
+        if not ob.__class__.__dict__.has_key("CLSID"):
+            raise ValueError, "Must be a makepy-able object for this to work"
+        clsid = ob.CLSID
+        # Lots of hoops to support "demand-build" - ie, generating
+        # code for an interface first time it is used.  We assume the
+        # interface name exists in the same library as the object.
+        # This is generally the case - only referenced typelibs may be
+        # a problem, and we can handle that later.  Maybe <wink>
+        # So get the generated module for the library itself, then
+        # find the interface CLSID there.
+        mod = gencache.GetModuleForCLSID(clsid)
+        # Get the 'root' module.
+        mod = gencache.GetModuleForTypelib(mod.CLSID, mod.LCID,
+                                           mod.MajorVersion, mod.MinorVersion)
+        # Find the CLSID of the target
+        target_clsid = mod.NamesToIIDMap.get(target)
+        if target_clsid is None:
+            raise ValueError, "The interface name '%s' does not appear in the " \
+                              "same library as object '%r'" % (target, ob)
+        mod = gencache.GetModuleForCLSID(target_clsid)
+        target_class = getattr(mod, target)
+        # resolve coclass to interface
+        target_class = getattr(target_class, "default_interface", target_class)
+        return target_class(ob) # auto QI magic happens
+    raise ValueError, "This object can not be cast"
+
+class Constants:
+  """A container for generated COM constants.
+  """
+  def __init__(self):
+    self.__dicts__ = [] # A list of dictionaries
+  def __getattr__(self, a):
+    for d in self.__dicts__:
+      if d.has_key(a):
+        return d[a]
+    raise AttributeError, a
+
+# And create an instance.
+constants = Constants()
+
+# A helpers for DispatchWithEvents - this becomes __setattr__ for the
+# temporary class.
+def _event_setattr_(self, attr, val):
+  try:
+    # Does the COM object have an attribute of this name?
+    self.__class__.__bases__[0].__setattr__(self, attr, val)
+  except AttributeError:
+    # Otherwise just stash it away in the instance.
+    self.__dict__[attr] = val
+
+# An instance of this "proxy" is created to break the COM circular references
+# that exist (ie, when we connect to the COM events, COM keeps a reference
+# to the object.  Thus, the Event connection must be manually broken before
+# our object can die.  This solves the problem by manually breaking the connection
+# to the real object as the proxy dies.
+class EventsProxy:
+  def __init__(self, ob):
+    self.__dict__['_obj_'] = ob
+  def __del__(self):
+    try:
+      # If there is a COM error on disconnection we should
+      # just ignore it - object probably already shut down...
+      self._obj_.close()
+    except pythoncom.com_error:
+      pass
+  def __getattr__(self, attr):
+    return getattr(self._obj_, attr)
+  def __setattr__(self, attr, val):
+    setattr(self._obj_, attr, val)
+
+def DispatchWithEvents(clsid, user_event_class):
+  """Create a COM object that can fire events to a user defined class.
+  clsid -- The ProgID or CLSID of the object to create.
+  user_event_class -- A Python class object that responds to the events.
+
+  This requires makepy support for the COM object being created.  If
+  this support does not exist it will be automatically generated by
+  this function.  If the object does not support makepy, a TypeError
+  exception will be raised.
+
+  The result is a class instance that both represents the COM object
+  and handles events from the COM object.
+
+  It is important to note that the returned instance is not a direct
+  instance of the user_event_class, but an instance of a temporary
+  class object that derives from three classes:
+  * The makepy generated class for the COM object
+  * The makepy generated class for the COM events
+  * The user_event_class as passed to this function.
+
+  If this is not suitable, see the getevents function for an alternative
+  technique of handling events.
+
+  Object Lifetimes:  Whenever the object returned from this function is
+  cleaned-up by Python, the events will be disconnected from
+  the COM object.  This is almost always what should happen,
+  but see the documentation for getevents() for more details.
+
+  Example:
+
+  >>> class IEEvents:
+  ...    def OnVisible(self, visible):
+  ...       print "Visible changed:", visible
+  ...
+  >>> ie = DispatchWithEvents("InternetExplorer.Application", IEEvents)
+  >>> ie.Visible = 1
+  Visible changed: 1
+  >>> 
+  """
+  # Create/Get the object.
+  disp = Dispatch(clsid)
+  if not disp.__class__.__dict__.get("CLSID"): # Eeek - no makepy support - try and build it.
+    try:
+      ti = disp._oleobj_.GetTypeInfo()
+      disp_clsid = ti.GetTypeAttr()[0]
+      tlb, index = ti.GetContainingTypeLib()
+      tla = tlb.GetLibAttr()
+      gencache.EnsureModule(tla[0], tla[1], tla[3], tla[4], bValidateFile=0)
+      # Get the class from the module.
+      disp_class = gencache.GetClassForProgID(str(disp_clsid))
+    except pythoncom.com_error:
+      raise TypeError, "This COM object can not automate the makepy process - please run makepy manually for this object"
+  else:
+    disp_class = disp.__class__
+  # If the clsid was an object, get the clsid
+  clsid = disp_class.CLSID
+  # Create a new class that derives from 3 classes - the dispatch class, the event sink class and the user class.
+  import new
+  events_class = getevents(clsid)
+  if events_class is None:
+    raise ValueError, "This COM object does not support events."
+  result_class = new.classobj("COMEventClass", (disp_class, events_class, user_event_class), {"__setattr__" : _event_setattr_})
+  instance = result_class(disp._oleobj_) # This only calls the first base class __init__.
+  events_class.__init__(instance, instance)
+  if hasattr(user_event_class, "__init__"):
+    user_event_class.__init__(instance)
+  return EventsProxy(instance)
+
+def WithEvents(disp, user_event_class):
+  """Similar to DispatchWithEvents - except that the returned
+  object is *not* also usable as the original Dispatch object - that is
+  the returned object is not dispatchable.
+
+  The difference is best summarised by example.
+
+  >>> class IEEvents:
+  ...    def OnVisible(self, visible):
+  ...       print "Visible changed:", visible
+  ...
+  >>> ie = Dispatch("InternetExplorer.Application")
+  >>> ie_events = WithEvents(ie, IEEvents)
+  >>> ie.Visible = 1
+  Visible changed: 1
+
+  Compare with the code sample for DispatchWithEvents, where you get a
+  single object that is both the interface and the event handler.  Note that
+  the event handler instance will *not* be able to use 'self.' to refer to
+  IE's methods and properties.
+
+  This is mainly useful where using DispatchWithEvents causes
+  circular reference problems that the simple proxy doesn't deal with
+  """
+  disp = Dispatch(disp)
+  if not disp.__class__.__dict__.get("CLSID"): # Eeek - no makepy support - try and build it.
+    try:
+      ti = disp._oleobj_.GetTypeInfo()
+      disp_clsid = ti.GetTypeAttr()[0]
+      tlb, index = ti.GetContainingTypeLib()
+      tla = tlb.GetLibAttr()
+      gencache.EnsureModule(tla[0], tla[1], tla[3], tla[4], bValidateFile=0)
+      # Get the class from the module.
+      disp_class = gencache.GetClassForProgID(str(disp_clsid))
+    except pythoncom.com_error:
+      raise TypeError, "This COM object can not automate the makepy process - please run makepy manually for this object"
+  else:
+    disp_class = disp.__class__
+  # Get the clsid
+  clsid = disp_class.CLSID
+  # Create a new class that derives from 2 classes - the event sink
+  # class and the user class.
+  import new
+  events_class = getevents(clsid)
+  if events_class is None:
+    raise ValueError, "This COM object does not support events."
+  result_class = new.classobj("COMEventClass", (events_class, user_event_class), {})
+  instance = result_class(disp) # This only calls the first base class __init__.
+  if hasattr(user_event_class, "__init__"):
+    user_event_class.__init__(instance)
+  return instance
+
+def getevents(clsid):
+    """Determine the default outgoing interface for a class, given
+    either a clsid or progid. It returns a class - you can
+    conveniently derive your own handler from this class and implement
+    the appropriate methods.
+
+    This method relies on the classes produced by makepy. You must use
+    either makepy or the gencache module to ensure that the
+    appropriate support classes have been generated for the com server
+    that you will be handling events from.
+
+    Beware of COM circular references.  When the Events class is connected
+    to the COM object, the COM object itself keeps a reference to the Python
+    events class.  Thus, neither the Events instance or the COM object will
+    ever die by themselves.  The 'close' method on the events instance
+    must be called to break this chain and allow standard Python collection
+    rules to manage object lifetimes.  Note that DispatchWithEvents() does
+    work around this problem by the use of a proxy object, but if you use
+    the getevents() function yourself, you must make your own arrangements
+    to manage this circular reference issue.
+
+    Beware of creating Python circular references: this will happen if your
+    handler has a reference to an object that has a reference back to
+    the event source. Call the 'close' method to break the chain.
+    
+    Example:
+
+    >>>win32com.client.gencache.EnsureModule('{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}',0,1,1)
+    <module 'win32com.gen_py.....
+    >>>
+    >>> class InternetExplorerEvents(win32com.client.getevents("InternetExplorer.Application.1")):
+    ...    def OnVisible(self, Visible):
+    ...        print "Visibility changed: ", Visible
+    ...
+    >>>
+    >>> ie=win32com.client.Dispatch("InternetExplorer.Application.1")
+    >>> events=InternetExplorerEvents(ie) 
+    >>> ie.Visible=1
+    Visibility changed:  1
+    >>>
+    """
+
+    # find clsid given progid or clsid
+    clsid=str(pywintypes.IID(clsid))
+    # return default outgoing interface for that class
+    klass = gencache.GetClassForCLSID(clsid)
+    try:
+      return klass.default_source
+    except AttributeError:
+      # See if we have a coclass for the interfaces.
+      try:
+        return gencache.GetClassForCLSID(klass.coclass_clsid).default_source
+      except AttributeError:
+        return None
+
+# A Record object, as used by the COM struct support
+def Record(name, object):
+  """Creates a new record object, given the name of the record,
+  and an object from the same type library.
+
+  Example usage would be:
+    app = win32com.client.Dispatch("Some.Application")
+    point = win32com.client.Record("SomeAppPoint", app)
+    point.x = 0
+    point.y = 0
+    app.MoveTo(point)
+  """
+  # XXX - to do - probably should allow "object" to already be a module object.
+  import gencache
+  object = gencache.EnsureDispatch(object)
+  module = sys.modules[object.__class__.__module__]
+  # to allow us to work correctly with "demand generated" code,
+  # we must use the typelib CLSID to obtain the module
+  # (otherwise we get the sub-module for the object, which
+  # does not hold the records)
+  # thus, package may be module, or may be module's parent if demand generated.
+  package = gencache.GetModuleForTypelib(module.CLSID, module.LCID, module.MajorVersion, module.MinorVersion)
+  try:
+    struct_guid = package.RecordMap[name]
+  except KeyError:
+    raise ValueError, "The structure '%s' is not defined in module '%s'" % (name, package)
+
+  return pythoncom.GetRecordFromGuids(module.CLSID, module.MajorVersion, module.MinorVersion, module.LCID, struct_guid)
+
+
+############################################
+# The base of all makepy generated classes
+############################################
+class DispatchBaseClass:
+	def __init__(self, oobj=None):
+		if oobj is None:
+			oobj = pythoncom.new(self.CLSID)
+		elif type(self) == type(oobj): # An instance
+			try:
+				oobj = oobj._oleobj_.QueryInterface(self.CLSID, pythoncom.IID_IDispatch) # Must be a valid COM instance
+			except pythoncom.com_error, details:
+				import winerror
+				# Some stupid objects fail here, even tho it is _already_ IDispatch!!??
+				# Eg, Lotus notes.
+				# So just let it use the existing object if E_NOINTERFACE
+				if details[0] != winerror.E_NOINTERFACE:
+					raise
+				oobj = oobj._oleobj_
+		self.__dict__["_oleobj_"] = oobj # so we dont call __setattr__
+	# Provide a prettier name than the CLSID
+	def __repr__(self):
+		# Need to get the docstring for the module for this class.
+		try:
+			mod_doc = sys.modules[self.__class__.__module__].__doc__
+			if mod_doc:
+				mod_name = "win32com.gen_py." + mod_doc
+			else:
+				mod_name = sys.modules[self.__class__.__module__].__name__
+		except KeyError:
+		  mod_name = "win32com.gen_py.unknown"
+		return "<%s.%s instance at 0x%s>" % (mod_name, self.__class__.__name__, id(self))
+	# Delegate comparison to the oleobjs, as they know how to do identity.
+	def __cmp__(self, other):
+		other = getattr(other, "_oleobj_", other)
+		return cmp(self._oleobj_, other)
+
+	def _ApplyTypes_(self, dispid, wFlags, retType, argTypes, user,
+                     resultCLSID, *args):
+		return self._get_good_object_(
+                    self._oleobj_.InvokeTypes(
+                              dispid, 0, wFlags, retType, argTypes, *args),
+                    user, resultCLSID)
+
+	def __getattr__(self, attr):
+		args=self._prop_map_get_.get(attr)
+		if args is None:
+			raise AttributeError, "'%s' object has no attribute '%s'" % (repr(self), attr)
+		return self._ApplyTypes_(*args)
+
+	def __setattr__(self, attr, value):
+		if self.__dict__.has_key(attr): self.__dict__[attr] = value; return
+		try:
+			args, defArgs=self._prop_map_put_[attr]
+		except KeyError:
+			raise AttributeError, "'%s' object has no attribute '%s'" % (repr(self), attr)
+		self._oleobj_.Invoke(*(args + (value,) + defArgs))
+	def _get_good_single_object_(self, obj, obUserName=None, resultCLSID=None):
+		return _get_good_single_object_(obj, obUserName, resultCLSID)
+	def _get_good_object_(self, obj, obUserName=None, resultCLSID=None):
+		return _get_good_object_(obj, obUserName, resultCLSID)
+
+# XXX - These should be consolidated with dynamic.py versions.
+def _get_good_single_object_(obj, obUserName=None, resultCLSID=None):
+	if _PyIDispatchType==type(obj):
+		return Dispatch(obj, obUserName, resultCLSID, UnicodeToString=NeedUnicodeConversions)
+	elif NeedUnicodeConversions and UnicodeType==type(obj):
+		return str(obj)
+	return obj
+
+def _get_good_object_(obj, obUserName=None, resultCLSID=None):
+	if obj is None:
+		return None
+	elif type(obj)==TupleType:
+		obUserNameTuple = (obUserName,) * len(obj)
+		resultCLSIDTuple = (resultCLSID,) * len(obj)
+		return tuple(map(_get_good_object_, obj, obUserNameTuple, resultCLSIDTuple))
+	else:
+		return _get_good_single_object_(obj, obUserName, resultCLSID)
+
+class CoClassBaseClass:
+	def __init__(self, oobj=None):
+		if oobj is None: oobj = pythoncom.new(self.CLSID)
+		self.__dict__["_dispobj_"] = self.default_interface(oobj)
+	def __repr__(self):
+		return "<win32com.gen_py.%s.%s>" % (__doc__, self.__class__.__name__)
+
+	def __getattr__(self, attr):
+		d=self.__dict__["_dispobj_"]
+		if d is not None: return getattr(d, attr)
+		raise AttributeError, attr
+	def __setattr__(self, attr, value):
+		if self.__dict__.has_key(attr): self.__dict__[attr] = value; return
+		try:
+			d=self.__dict__["_dispobj_"]
+			if d is not None:
+				d.__setattr__(attr, value)
+				return
+		except AttributeError:
+			pass
+		self.__dict__[attr] = value
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/build.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/build.py
new file mode 100644
index 0000000..6530273
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/build.py
@@ -0,0 +1,615 @@
+"""Contains knowledge to build a COM object definition.
+
+This module is used by both the @dynamic@ and @makepy@ modules to build
+all knowledge of a COM object.
+
+This module contains classes which contain the actual knowledge of the object.
+This include parameter and return type information, the COM dispid and CLSID, etc.
+
+Other modules may use this information to generate .py files, use the information
+dynamically, or possibly even generate .html documentation for objects.
+"""
+
+#
+# NOTES: DispatchItem and MapEntry used by dynamic.py.
+#        the rest is used by makepy.py
+#
+#        OleItem, DispatchItem, MapEntry, BuildCallList() is used by makepy
+
+import sys
+import string
+import types
+from keyword import iskeyword
+from win32com.client import NeedUnicodeConversions
+
+import pythoncom
+from pywintypes import UnicodeType, TimeType
+
+# A string ending with a quote can not be safely triple-quoted.
+def _safeQuotedString(s):
+	if s[-1]=='"': s = s[:-1]+'\\"'
+	return '"""%s"""' % s
+
+error = "PythonCOM.Client.Build error"
+class NotSupportedException(Exception): pass # Raised when we cant support a param type.
+DropIndirection="DropIndirection"
+
+NoTranslateTypes = [
+	pythoncom.VT_BOOL,          pythoncom.VT_CLSID,        pythoncom.VT_CY,
+	pythoncom.VT_DATE,          pythoncom.VT_DECIMAL,       pythoncom.VT_EMPTY,
+	pythoncom.VT_ERROR,         pythoncom.VT_FILETIME,     pythoncom.VT_HRESULT,
+	pythoncom.VT_I1,            pythoncom.VT_I2,           pythoncom.VT_I4,
+	pythoncom.VT_I8,            pythoncom.VT_INT,          pythoncom.VT_NULL,
+	pythoncom.VT_R4,            pythoncom.VT_R8,           pythoncom.VT_NULL,
+	pythoncom.VT_STREAM,
+	pythoncom.VT_UI1,            pythoncom.VT_UI2,           pythoncom.VT_UI4,
+	pythoncom.VT_UI8,            pythoncom.VT_UINT,          pythoncom.VT_VOID,
+]
+
+NoTranslateMap = {}
+for v in NoTranslateTypes:
+	NoTranslateMap[v] = None
+
+class MapEntry:
+	"Simple holder for named attibutes - items in a map."
+	def __init__(self, desc_or_id, names=None, doc=None, resultCLSID=pythoncom.IID_NULL, resultDoc = None, hidden=0):
+		if type(desc_or_id)==type(0):
+			self.dispid = desc_or_id
+			self.desc = None
+		else:
+			self.dispid = desc_or_id[0]
+			self.desc = desc_or_id
+
+		self.names = names
+		self.doc = doc
+		self.resultCLSID = resultCLSID
+		self.resultDocumentation = resultDoc
+		self.wasProperty = 0 # Have I been transformed into a function so I can pass args?
+		self.hidden = hidden
+	def GetResultCLSID(self):
+		rc = self.resultCLSID
+		if rc == pythoncom.IID_NULL: return None
+		return rc
+	# Return a string, suitable for output - either "'{...}'" or "None"
+	def GetResultCLSIDStr(self):
+		rc = self.GetResultCLSID()
+		if rc is None: return "None"
+		return repr(str(rc)) # Convert the IID object to a string, then to a string in a string.
+
+	def GetResultName(self):
+		if self.resultDocumentation is None:
+			return None
+		return self.resultDocumentation[0]
+
+class OleItem:
+  typename = "OleItem"
+
+  def __init__(self, doc=None):
+    self.doc = doc
+    if self.doc:
+        self.python_name = MakePublicAttributeName(self.doc[0])
+    else:
+        self.python_name = None
+    self.bWritten = 0
+    self.bIsDispatch = 0
+    self.bIsSink = 0
+    self.clsid = None
+    self.co_class = None
+
+class DispatchItem(OleItem):
+	typename = "DispatchItem"
+
+	def __init__(self, typeinfo=None, attr=None, doc=None, bForUser=1):
+		OleItem.__init__(self,doc)
+		self.propMap = {}
+		self.propMapGet = {}
+		self.propMapPut = {}
+		self.mapFuncs = {}
+		self.defaultDispatchName = None
+		self.hidden = 0
+
+		if typeinfo:
+			self.Build(typeinfo, attr, bForUser)
+
+	def _propMapPutCheck_(self,key,item):
+		ins, outs, opts = self.CountInOutOptArgs(item.desc[2])
+		if ins>1: # if a Put property takes more than 1 arg:
+			if opts+1==ins or ins==item.desc[6]+1:
+				newKey = "Set" + key
+				deleteExisting = 0 # This one is still OK
+			else:
+				deleteExisting = 1 # No good to us
+				if self.mapFuncs.has_key(key) or self.propMapGet.has_key(key):
+					newKey = "Set" + key
+				else:
+					newKey = key
+			item.wasProperty = 1
+			self.mapFuncs[newKey] = item
+			if deleteExisting:
+				del self.propMapPut[key]
+
+	def _propMapGetCheck_(self,key,item):
+		ins, outs, opts = self.CountInOutOptArgs(item.desc[2])
+		if ins > 0: # if a Get property takes _any_ in args:
+			if item.desc[6]==ins or ins==opts:
+				newKey = "Get" + key
+				deleteExisting = 0 # This one is still OK
+			else:
+				deleteExisting = 1 # No good to us
+				if self.mapFuncs.has_key(key):
+					newKey = "Get" + key
+				else:
+					newKey = key
+			item.wasProperty = 1
+			self.mapFuncs[newKey] = item
+			if deleteExisting:
+				del self.propMapGet[key]
+
+	def	_AddFunc_(self,typeinfo,fdesc,bForUser):
+		id = fdesc.memid
+		funcflags = fdesc.wFuncFlags
+		try:
+			names = typeinfo.GetNames(id)
+			name=names[0]
+		except pythoncom.ole_error:
+			name = ""
+			names = None
+
+		doc = None
+		try:
+			if bForUser:
+				doc = typeinfo.GetDocumentation(id)
+		except pythoncom.ole_error:
+			pass
+
+		if id==0 and name:
+			self.defaultDispatchName = name
+
+		invkind = fdesc.invkind
+
+		# We need to translate any Alias', Enums, structs etc in result and args
+		typerepr, flag, defval = fdesc.rettype
+#		sys.stderr.write("%s result - %s -> " % (name, typerepr))
+		typerepr, resultCLSID, resultDoc = _ResolveType(typerepr, typeinfo)
+#		sys.stderr.write("%s\n" % (typerepr,))
+		fdesc.rettype = typerepr, flag, defval, resultCLSID
+		# Translate any Alias or Enums in argument list.
+		argList = []
+		for argDesc in fdesc.args:
+			typerepr, flag, defval = argDesc
+#			sys.stderr.write("%s arg - %s -> " % (name, typerepr))
+			arg_type, arg_clsid, arg_doc = _ResolveType(typerepr, typeinfo)
+			argDesc = arg_type, flag, defval, arg_clsid
+#			sys.stderr.write("%s\n" % (argDesc[0],))
+			argList.append(argDesc)
+		fdesc.args = tuple(argList)
+
+		hidden = (funcflags & pythoncom.FUNCFLAG_FHIDDEN) != 0
+		if invkind == pythoncom.INVOKE_PROPERTYGET:
+			map = self.propMapGet
+		# This is not the best solution, but I dont think there is
+		# one without specific "set" syntax.
+		# If there is a single PUT or PUTREF, it will function as a property.
+		# If there are both, then the PUT remains a property, and the PUTREF
+		# gets transformed into a function.
+		# (in vb, PUT=="obj=other_obj", PUTREF="set obj=other_obj
+		elif invkind in (pythoncom.INVOKE_PROPERTYPUT, pythoncom.INVOKE_PROPERTYPUTREF):
+			# Special case
+			existing = self.propMapPut.get(name, None)
+			if existing is not None:
+				if existing.desc[4]==pythoncom.INVOKE_PROPERTYPUT: # Keep this one
+					map = self.mapFuncs
+					name = "Set"+name
+				else: # Existing becomes a func.
+					existing.wasProperty = 1
+					self.mapFuncs["Set"+name]=existing
+					map = self.propMapPut # existing gets overwritten below.
+			else:
+				map = self.propMapPut # first time weve seen it.
+
+		elif invkind == pythoncom.INVOKE_FUNC:
+			map = self.mapFuncs
+		else:
+			map = None
+		if not map is None: 
+#				if map.has_key(name):
+#					sys.stderr.write("Warning - overwriting existing method/attribute %s\n" % name)
+			map[name] = MapEntry(tuple(fdesc), names, doc, resultCLSID, resultDoc, hidden)
+			# any methods that can't be reached via DISPATCH we return None
+			# for, so dynamic dispatch doesnt see it.
+			if fdesc.funckind != pythoncom.FUNC_DISPATCH:
+				return None
+			return (name,map)
+		return None
+
+	def _AddVar_(self,typeinfo,fdesc,bForUser):
+		### need pythoncom.VARFLAG_FRESTRICTED ...
+		### then check it
+
+		if fdesc.varkind == pythoncom.VAR_DISPATCH:
+			id = fdesc.memid
+			names = typeinfo.GetNames(id)
+			# Translate any Alias or Enums in result.
+			typerepr, flags, defval = fdesc.elemdescVar
+			typerepr, resultCLSID, resultDoc = _ResolveType(typerepr, typeinfo)
+			fdesc.elemdescVar = typerepr, flags, defval
+			doc = None
+			try:
+				if bForUser: doc = typeinfo.GetDocumentation(id)
+			except pythoncom.ole_error:
+				pass
+
+			# handle the enumerator specially
+			map = self.propMap
+			# Check if the element is hidden.
+			hidden = 0
+			if hasattr(fdesc,"wVarFlags"):
+				hidden = (fdesc.wVarFlags & 0x40) != 0 # VARFLAG_FHIDDEN
+			map[names[0]] = MapEntry(tuple(fdesc), names, doc, resultCLSID, resultDoc, hidden)
+			return (names[0],map)
+		else:
+			return None
+
+	def Build(self, typeinfo, attr, bForUser = 1):
+		self.clsid = attr[0]
+		self.bIsDispatch = (attr.wTypeFlags & pythoncom.TYPEFLAG_FDISPATCHABLE) != 0
+		if typeinfo is None: return
+		# Loop over all methods
+		for j in xrange(attr[6]):
+			fdesc = typeinfo.GetFuncDesc(j)
+			self._AddFunc_(typeinfo,fdesc,bForUser)
+
+		# Loop over all variables (ie, properties)
+		for j in xrange(attr[7]):
+			fdesc = typeinfo.GetVarDesc(j)
+			self._AddVar_(typeinfo,fdesc,bForUser)
+		
+		# Now post-process the maps.  For any "Get" or "Set" properties
+		# that have arguments, we must turn them into methods.  If a method
+		# of the same name already exists, change the name.
+		for key, item in self.propMapGet.items():
+			self._propMapGetCheck_(key,item)
+					
+		for key, item in self.propMapPut.items():
+			self._propMapPutCheck_(key,item)
+
+	def CountInOutOptArgs(self, argTuple):
+		"Return tuple counting in/outs/OPTS.  Sum of result may not be len(argTuple), as some args may be in/out."
+		ins = out = opts = 0
+		for argCheck in argTuple:
+			inOut = argCheck[1]
+			if inOut==0:
+				ins = ins + 1
+				out = out + 1
+			else:
+				if inOut & pythoncom.PARAMFLAG_FIN:
+					ins = ins + 1
+				if inOut & pythoncom.PARAMFLAG_FOPT:
+					opts = opts + 1
+				if inOut & pythoncom.PARAMFLAG_FOUT:
+					out = out + 1
+		return ins, out, opts
+
+	def MakeFuncMethod(self, entry, name, bMakeClass = 1):
+		# If we have a type description, and not varargs...
+		if entry.desc is not None and (len(entry.desc) < 6 or entry.desc[6]!=-1):
+			return self.MakeDispatchFuncMethod(entry, name, bMakeClass)
+		else:
+			return self.MakeVarArgsFuncMethod(entry, name, bMakeClass)
+
+	def MakeDispatchFuncMethod(self, entry, name, bMakeClass = 1):
+		fdesc = entry.desc
+		doc = entry.doc
+		names = entry.names
+		ret = []
+		if bMakeClass:
+			linePrefix = "\t"
+			defNamedOptArg = "defaultNamedOptArg"
+			defNamedNotOptArg = "defaultNamedNotOptArg"
+			defUnnamedArg = "defaultUnnamedArg"
+		else:
+			linePrefix = ""
+			defNamedOptArg = "pythoncom.Missing"
+			defNamedNotOptArg = "pythoncom.Missing"
+			defUnnamedArg = "pythoncom.Missing"
+		defOutArg = "pythoncom.Missing"
+		id = fdesc[0]
+		s = linePrefix + 'def ' + name + '(self' + BuildCallList(fdesc, names, defNamedOptArg, defNamedNotOptArg, defUnnamedArg, defOutArg) + '):'
+		ret.append(s)
+		if doc and doc[1]:
+			ret.append(linePrefix + '\t' + _safeQuotedString(doc[1]))
+
+#		print "fdesc is ", fdesc
+
+		resclsid = entry.GetResultCLSID()
+		if resclsid:
+			resclsid = "'%s'" % resclsid
+		else:
+			resclsid = 'None'
+		# Strip the default values from the arg desc
+		retDesc = fdesc[8][:2]
+		argsDesc = tuple(map(lambda what: what[:2], fdesc[2]))
+		# The runtime translation of the return types is expensive, so when we know the
+		# return type of the function, there is no need to check the type at runtime.
+		# To qualify, this function must return a "simple" type, and have no byref args.
+		# Check if we have byrefs or anything in the args which mean we still need a translate.
+		param_flags = map(lambda what: what[1], fdesc[2])
+		bad_params = filter(lambda flag: flag & (pythoncom.PARAMFLAG_FOUT | pythoncom.PARAMFLAG_FRETVAL)!=0, param_flags)
+		s = None
+		if len(bad_params)==0 and len(retDesc)==2 and retDesc[1]==0:
+			rd = retDesc[0]
+			if NoTranslateMap.has_key(rd):
+				s = '%s\treturn self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)' % (linePrefix, id, fdesc[4], retDesc, argsDesc, _BuildArgList(fdesc, names))
+			elif rd in [pythoncom.VT_DISPATCH, pythoncom.VT_UNKNOWN]:
+				s = '%s\tret = self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)\n' % (linePrefix, id, fdesc[4], retDesc, `argsDesc`, _BuildArgList(fdesc, names))
+				s = s + '%s\tif ret is not None:\n' % (linePrefix,)
+				if rd == pythoncom.VT_UNKNOWN:
+					s = s + "%s\t\t# See if this IUnknown is really an IDispatch\n" % (linePrefix,)
+					s = s + "%s\t\ttry:\n" % (linePrefix,)
+					s = s + "%s\t\t\tret = ret.QueryInterface(pythoncom.IID_IDispatch)\n" % (linePrefix,)
+					s = s + "%s\t\texcept pythoncom.error:\n" % (linePrefix,)
+					s = s + "%s\t\t\treturn ret\n" % (linePrefix,)
+				s = s + '%s\t\tret = Dispatch(ret, %s, %s, UnicodeToString=%d)\n' % (linePrefix,`name`, resclsid, NeedUnicodeConversions) 
+				s = s + '%s\treturn ret' % (linePrefix)
+			elif rd == pythoncom.VT_BSTR:
+				if NeedUnicodeConversions:
+					s = "%s\t# Result is a Unicode object - perform automatic string conversion\n" % (linePrefix,)
+					s = s + '%s\treturn str(self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s))' % (linePrefix, id, fdesc[4], retDesc, `argsDesc`, _BuildArgList(fdesc, names))
+				else:
+					s = "%s\t# Result is a Unicode object - return as-is for this version of Python\n" % (linePrefix,)
+					s = s + '%s\treturn self._oleobj_.InvokeTypes(%d, LCID, %s, %s, %s%s)' % (linePrefix, id, fdesc[4], retDesc, `argsDesc`, _BuildArgList(fdesc, names))
+			# else s remains None
+		if s is None:
+			s = '%s\treturn self._ApplyTypes_(%d, %s, %s, %s, %s, %s%s)' % (linePrefix, id, fdesc[4], retDesc, argsDesc, `name`, resclsid, _BuildArgList(fdesc, names))
+
+		ret.append(s)
+		ret.append("")
+		return ret
+
+	def MakeVarArgsFuncMethod(self, entry, name, bMakeClass = 1):
+		fdesc = entry.desc
+		names = entry.names
+		doc = entry.doc
+		ret = []
+		argPrefix = "self"
+		if bMakeClass:
+			linePrefix = "\t"
+		else:
+			linePrefix = ""
+		ret.append(linePrefix + 'def ' + name + '(' + argPrefix + ', *args):')
+		if doc and doc[1]: ret.append(linePrefix + '\t' + _safeQuotedString(doc[1]))
+		if fdesc:
+			invoketype = fdesc[4]
+		else:
+			invoketype = pythoncom.DISPATCH_METHOD
+		s = linePrefix + '\treturn self._get_good_object_(self._oleobj_.Invoke(*(('
+		ret.append(s + str(entry.dispid) + ",0,%d,1)+args)),'%s')" % (invoketype, names[0]))
+		ret.append("")
+		return ret
+
+# Note - "DispatchItem" poorly named - need a new intermediate class.
+class VTableItem(DispatchItem):
+	def Build(self, typeinfo, attr, bForUser = 1):
+		DispatchItem.Build(self, typeinfo, attr, bForUser)
+		assert typeinfo is not None, "Cant build vtables without type info!"
+
+		def cmp_vtable_off(m1, m2):
+			return cmp(m1.desc[7], m2.desc[7])
+
+		meth_list = self.mapFuncs.values() + self.propMapGet.values() + self.propMapPut.values()
+
+		meth_list.sort( cmp_vtable_off )
+		# Now turn this list into the run-time representation
+		# (ready for immediate use or writing to gencache)
+		self.vtableFuncs = []
+		for entry in meth_list:
+			self.vtableFuncs.append( (entry.names, entry.dispid, entry.desc) )
+
+# A Lazy dispatch item - builds an item on request using info from
+# an ITypeComp.  The dynamic module makes the called to build each item,
+# and also holds the references to the typeinfo and typecomp.
+class LazyDispatchItem(DispatchItem):
+	typename = "LazyDispatchItem"
+	def __init__(self, attr, doc):
+		self.clsid = attr[0]
+		DispatchItem.__init__(self, None, attr, doc, 0)
+
+typeSubstMap = {
+	pythoncom.VT_INT: pythoncom.VT_I4,
+	pythoncom.VT_UINT: pythoncom.VT_I4,
+	pythoncom.VT_HRESULT: pythoncom.VT_I4,
+}
+
+def _ResolveType(typerepr, itypeinfo):
+	# Resolve VT_USERDEFINED (often aliases or typed IDispatches)
+
+	if type(typerepr)==types.TupleType:
+		indir_vt, subrepr = typerepr
+		if indir_vt == pythoncom.VT_PTR:
+			# If it is a VT_PTR to a VT_USERDEFINED that is an IDispatch/IUnknown,
+			# then it resolves to simply the object.
+			# Otherwise, it becomes a ByRef of the resolved type
+			# We need to drop an indirection level on pointer to user defined interfaces.
+			# eg, (VT_PTR, (VT_USERDEFINED, somehandle)) needs to become VT_DISPATCH
+			# only when "somehandle" is an object.
+			# but (VT_PTR, (VT_USERDEFINED, otherhandle)) doesnt get the indirection dropped.
+			was_user = type(subrepr)==types.TupleType and subrepr[0]==pythoncom.VT_USERDEFINED
+			subrepr, sub_clsid, sub_doc = _ResolveType(subrepr, itypeinfo)
+			if was_user and subrepr in [pythoncom.VT_DISPATCH, pythoncom.VT_UNKNOWN, pythoncom.VT_RECORD]:
+				# Drop the VT_PTR indirection
+				return subrepr, sub_clsid, sub_doc
+			# Change PTR indirection to byref
+			return subrepr | pythoncom.VT_BYREF, sub_clsid, sub_doc
+		if indir_vt == pythoncom.VT_SAFEARRAY:
+			# resolve the array element, and convert to VT_ARRAY
+			subrepr, sub_clsid, sub_doc = _ResolveType(subrepr, itypeinfo)
+			return pythoncom.VT_ARRAY | subrepr, sub_clsid, sub_doc
+		if indir_vt == pythoncom.VT_CARRAY: # runtime has no support for this yet.
+			# resolve the array element, and convert to VT_CARRAY
+			# sheesh - return _something_
+			return pythoncom.VT_CARRAY, None, None
+		if indir_vt == pythoncom.VT_USERDEFINED:
+			resultTypeInfo = itypeinfo.GetRefTypeInfo(subrepr)
+			resultAttr = resultTypeInfo.GetTypeAttr()
+			typeKind = resultAttr.typekind
+			if typeKind == pythoncom.TKIND_ALIAS:
+				tdesc = resultAttr.tdescAlias
+				return _ResolveType(tdesc, resultTypeInfo)
+			elif typeKind in [pythoncom.TKIND_ENUM, pythoncom.TKIND_MODULE]:
+				# For now, assume Long
+				return pythoncom.VT_I4, None, None
+
+			elif typeKind == pythoncom.TKIND_DISPATCH:
+				clsid = resultTypeInfo.GetTypeAttr()[0]
+				retdoc = resultTypeInfo.GetDocumentation(-1)
+				return pythoncom.VT_DISPATCH, clsid, retdoc
+
+			elif typeKind in [pythoncom.TKIND_INTERFACE,
+							  pythoncom.TKIND_COCLASS]:
+				# XXX - should probably get default interface for CO_CLASS???
+				clsid = resultTypeInfo.GetTypeAttr()[0]
+				retdoc = resultTypeInfo.GetDocumentation(-1)
+				return pythoncom.VT_UNKNOWN, clsid, retdoc
+
+			elif typeKind == pythoncom.TKIND_RECORD:
+				return pythoncom.VT_RECORD, None, None
+			raise NotSupportedException("Can not resolve alias or user-defined type")
+	return typeSubstMap.get(typerepr,typerepr), None, None
+
+def _BuildArgList(fdesc, names):
+    "Builds list of args to the underlying Invoke method."
+    # Word has TypeInfo for Insert() method, but says "no args"
+    numArgs = max(fdesc[6], len(fdesc[2]))
+    names = list(names)
+    while None in names:
+    	i = names.index(None)
+    	names[i] = "arg%d" % (i,)
+    names = map(MakePublicAttributeName, names[1:])
+    name_num = 0
+    while len(names) < numArgs:
+        names.append("arg%d" % (len(names),))
+    # As per BuildCallList(), avoid huge lines.
+    # Hack a "\n" at the end of every 5th name - "strides" would be handy
+    # here but don't exist in 2.2
+    for i in range(0, len(names), 5):
+        names[i] = names[i] + "\n\t\t\t"
+    return "," + string.join(names, ", ")
+
+valid_identifier_chars = string.letters + string.digits + "_"
+
+def demunge_leading_underscores(className):
+	i = 0
+	while className[i] == "_":
+		i += 1
+	assert i >= 2, "Should only be here with names starting with '__'"
+	return className[i-1:] + className[:i-1]
+
+# Given a "public name" (eg, the name of a class, function, etc)
+# make sure it is a legal (and reasonable!) Python name.
+def MakePublicAttributeName(className, is_global = False):
+	# Given a class attribute that needs to be public, convert it to a
+	# reasonable name.
+	# Also need to be careful that the munging doesnt
+	# create duplicates - eg, just removing a leading "_" is likely to cause
+	# a clash.
+	# if is_global is True, then the name is a global variable that may
+	# overwrite a builtin - eg, "None"
+	if className[:2]=='__':
+		return demunge_leading_underscores(className)
+	elif iskeyword(className): # all keywords are lower case
+		return string.capitalize(className)
+	elif className == 'None':
+		# assign to None is evil (and SyntaxError in 2.4) - note
+		# that if it was a global it would get picked up below
+		className = 'NONE'
+	elif is_global and __builtins__.has_key(className):
+		# builtins may be mixed case.  If capitalizing it doesn't change it,
+		# force to all uppercase (eg, "None", "True" become "NONE", "TRUE"
+		ret = className.capitalize()
+		if ret==className: # didn't change - force all uppercase.
+			ret = ret.upper()
+		return ret
+	# Strip non printable chars
+	return filter( lambda char: char in valid_identifier_chars, className)
+
+# Given a default value passed by a type library, return a string with
+# an appropriate repr() for the type.
+# Takes a raw ELEMDESC and returns a repr string, or None
+# (NOTE: The string itself may be '"None"', which is valid, and different to None.
+# XXX - To do: Dates are probably screwed, but can they come in?
+def MakeDefaultArgRepr(defArgVal):
+  try:
+    inOut = defArgVal[1]
+  except IndexError:
+    # something strange - assume is in param.
+    inOut = pythoncom.PARAMFLAG_FIN
+
+  if inOut & pythoncom.PARAMFLAG_FHASDEFAULT:
+    # hack for Unicode until it repr's better.
+    val = defArgVal[2]
+    if type(val) is UnicodeType:
+      return repr(str(val))
+    elif type(val) is TimeType:
+      year=val.year; month=val.month; day=val.day; hour=val.hour; minute=val.minute; second=val.second; msec=val.msec
+      return "pythoncom.MakeTime((%(year)d, %(month)d, %(day)d, %(hour)d, %(minute)d, %(second)d,0,0,0,%(msec)d))" % locals()
+    else:
+      return repr(val)
+  return None
+
+def BuildCallList(fdesc, names, defNamedOptArg, defNamedNotOptArg, defUnnamedArg, defOutArg, is_comment = False):
+  "Builds a Python declaration for a method."
+  # Names[0] is the func name - param names are from 1.
+  numArgs = len(fdesc[2])
+  numOptArgs = fdesc[6]
+  strval = ''
+  if numOptArgs==-1:	# Special value that says "var args after here"
+    firstOptArg = numArgs
+    numArgs = numArgs - 1
+  else:
+    firstOptArg = numArgs - numOptArgs
+  for arg in xrange(numArgs):
+    try:
+      argName = names[arg+1] 
+      namedArg = argName is not None
+    except IndexError:
+      namedArg = 0
+    if not namedArg: argName = "arg%d" % (arg)
+    thisdesc = fdesc[2][arg]
+    # See if the IDL specified a default value
+    defArgVal = MakeDefaultArgRepr(thisdesc)
+    if defArgVal is None:
+      # Out params always get their special default
+      if thisdesc[1] & (pythoncom.PARAMFLAG_FOUT | pythoncom.PARAMFLAG_FIN) == pythoncom.PARAMFLAG_FOUT:
+        defArgVal = defOutArg
+      else:          
+        # Unnamed arg - always allow default values.
+        if namedArg:
+          # Is a named argument
+          if arg >= firstOptArg:
+            defArgVal = defNamedOptArg
+          else:
+            defArgVal = defNamedNotOptArg
+        else:
+          defArgVal = defUnnamedArg
+
+    argName = MakePublicAttributeName(argName)
+    # insanely long lines with an 'encoding' flag crashes python 2.4.0
+    # keep 5 args per line
+    # This may still fail if the arg names are insane, but that seems
+    # unlikely.  See also _BuildArgList()
+    if (arg+1) % 5 == 0:
+        strval = strval + "\n"
+        if is_comment:
+            strval = strval + "#"
+        strval = strval + "\t\t\t"
+    strval = strval + ", " + argName
+    if defArgVal:
+      strval = strval + "=" + defArgVal
+  if numOptArgs==-1:
+    strval = strval + ", *" + names[-1]
+
+  return strval
+
+
+if __name__=='__main__':
+  print "Use 'makepy.py' to generate Python code - this module is just a helper"
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/combrowse.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/combrowse.py
new file mode 100644
index 0000000..4928bef
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/combrowse.py
@@ -0,0 +1,541 @@
+"""A utility for browsing COM objects.
+
+ Usage:
+
+  Command Prompt
+
+    Use the command *"python.exe catbrowse.py"*.  This will display
+    display a fairly small, modal dialog.
+
+  Pythonwin
+
+    Use the "Run Script" menu item, and this will create the browser in an
+    MDI window.  This window can be fully resized.
+
+ Details
+
+   This module allows browsing of registered Type Libraries, COM categories, 
+   and running COM objects.  The display is similar to the Pythonwin object
+   browser, and displays the objects in a hierarchical window.
+
+   Note that this module requires the win32ui (ie, Pythonwin) diestribution to
+   work.
+
+"""
+import win32con
+import win32api, win32ui
+import string, sys
+import pythoncom
+from win32com.client import util
+from pywin.tools import browser
+
+class HLIRoot(browser.HLIPythonObject):
+	def __init__(self, title):
+		self.name = title
+	def GetSubList(self):
+		return [HLIHeadingCategory(), HLI_IEnumMoniker(pythoncom.GetRunningObjectTable().EnumRunning(), "Running Objects"), HLIHeadingRegisterdTypeLibs()]
+	def __cmp__(self, other):
+		return cmp(self.name, other.name)
+
+class HLICOM(browser.HLIPythonObject):
+	def GetText(self):
+		return self.name
+	def CalculateIsExpandable(self):
+		return 1
+
+class HLICLSID(HLICOM):
+	def __init__(self, myobject, name=None ):
+		if type(myobject)==type(''):
+			myobject = pythoncom.MakeIID(myobject)
+		if name is None:
+			try:
+				name = pythoncom.ProgIDFromCLSID(myobject)
+			except pythoncom.com_error: 
+				name = str(myobject)
+			name = "IID: " + name
+		HLICOM.__init__(self, myobject, name)
+	def CalculateIsExpandable(self):
+		return 0
+	def GetSubList(self):
+		return []
+
+class HLI_Interface(HLICOM):
+	pass
+
+class HLI_Enum(HLI_Interface):
+	def GetBitmapColumn(self):
+		return 0 # Always a folder.
+	def CalculateIsExpandable(self):
+		if self.myobject is not None:
+			rc = len(self.myobject.Next(1))>0
+			self.myobject.Reset()
+		else:
+			rc = 0
+		return rc
+	pass
+
+class HLI_IEnumMoniker(HLI_Enum):
+	def GetSubList(self):
+		ctx = pythoncom.CreateBindCtx()
+		ret = []
+		for mon in util.Enumerator(self.myobject):
+			ret.append(HLI_IMoniker(mon, mon.GetDisplayName(ctx, None)))
+		return ret
+
+class HLI_IMoniker(HLI_Interface):
+	def GetSubList(self):
+		ret = []
+		ret.append(browser.MakeHLI(self.myobject.Hash(), "Hash Value"))
+		subenum = self.myobject.Enum(1)
+		ret.append(HLI_IEnumMoniker(subenum, "Sub Monikers"))
+		return ret
+
+class HLIHeadingCategory(HLICOM):
+	"A tree heading for registered categories"
+	def GetText(self):
+		return "Registered Categories"
+	def GetSubList(self):
+		catinf=pythoncom.CoCreateInstance(pythoncom.CLSID_StdComponentCategoriesMgr,None,pythoncom.CLSCTX_INPROC,pythoncom.IID_ICatInformation)
+		enum=util.Enumerator(catinf.EnumCategories())
+		ret = []
+		try:
+			for catid, lcid, desc in enum:
+				ret.append(HLICategory((catid, lcid, desc)))
+		except pythoncom.com_error:
+			# Registered categories occasionally seem to give spurious errors.
+			pass # Use what we already have.
+		return ret
+
+class HLICategory(HLICOM):
+	"An actual Registered Category"
+	def GetText(self):
+		desc =  self.myobject[2]
+		if not desc: desc = "(unnamed category)"
+		return desc
+	def GetSubList(self):
+		win32ui.DoWaitCursor(1)
+		catid, lcid, desc = self.myobject
+		catinf=pythoncom.CoCreateInstance(pythoncom.CLSID_StdComponentCategoriesMgr,None,pythoncom.CLSCTX_INPROC,pythoncom.IID_ICatInformation)
+		ret = []
+		for clsid in util.Enumerator(catinf.EnumClassesOfCategories((catid,),())):
+			ret.append(HLICLSID(clsid))
+		win32ui.DoWaitCursor(0)
+
+		return ret
+
+class HLIHelpFile(HLICOM):
+	def CalculateIsExpandable(self):
+		return 0
+	def GetText(self):
+		import os
+		fname, ctx = self.myobject
+		base = os.path.split(fname)[1]
+		return "Help reference in %s" %( base)
+
+	def TakeDefaultAction(self):
+		fname, ctx = self.myobject
+		if ctx:
+			cmd = win32con.HELP_CONTEXT
+		else:
+			cmd = win32con.HELP_FINDER
+		win32api.WinHelp(win32ui.GetMainFrame().GetSafeHwnd(), fname, cmd, ctx)
+	def GetBitmapColumn(self):
+		return 6
+
+class HLIRegisteredTypeLibrary(HLICOM):
+	def GetSubList(self):
+		import os
+		clsidstr, versionStr = self.myobject
+		collected = []
+		helpPath = ""
+		key = win32api.RegOpenKey(win32con.HKEY_CLASSES_ROOT, "TypeLib\\%s\\%s" % (clsidstr, versionStr))
+		win32ui.DoWaitCursor(1)
+		try:
+			num = 0
+			while 1:
+				try:
+					subKey = win32api.RegEnumKey(key, num)
+				except win32api.error:
+					break
+				hSubKey = win32api.RegOpenKey(key, subKey)
+				try:
+					value, typ = win32api.RegQueryValueEx(hSubKey, None)
+					if typ == win32con.REG_EXPAND_SZ:
+						value = win32api.ExpandEnvironmentStrings(value)
+				except win32api.error:
+					value = ""
+				if subKey=="HELPDIR":
+					helpPath = value
+				elif subKey=="Flags":
+					flags = value
+				else:
+					try:
+						lcid = string.atof(subKey)
+						lcidkey = win32api.RegOpenKey(key, subKey)
+						# Enumerate the platforms
+						lcidnum = 0
+						while 1:
+							try:
+								platform = win32api.RegEnumKey(lcidkey, lcidnum)
+							except win32api.error:
+								break
+							try:
+								hplatform = win32api.RegOpenKey(lcidkey, platform)
+								fname, typ = win32api.RegQueryValueEx(hplatform, None)
+								if typ == win32con.REG_EXPAND_SZ:
+									fname = win32api.ExpandEnvironmentStrings(fname)
+							except win32api.error:
+								fname = ""
+							collected.append((lcid, platform, fname))
+							lcidnum = lcidnum + 1
+						win32api.RegCloseKey(lcidkey)
+					except ValueError:
+						pass
+				num = num + 1
+		finally:
+			win32ui.DoWaitCursor(0)
+			win32api.RegCloseKey(key)
+		# Now, loop over my collected objects, adding a TypeLib and a HelpFile
+		ret = []
+#		if helpPath: ret.append(browser.MakeHLI(helpPath, "Help Path"))
+		ret.append(HLICLSID(clsidstr))
+		for lcid, platform, fname in collected:
+			extraDescs = []
+			if platform!="win32":
+				extraDescs.append(platform)
+			if lcid:
+				extraDescs.append("locale=%s"%lcid)
+			extraDesc = ""
+			if extraDescs: extraDesc = " (%s)" % string.join(extraDescs, ", ")
+			ret.append(HLITypeLib(fname, "Type Library" + extraDesc))
+		ret.sort()
+		return ret
+
+class HLITypeLibEntry(HLICOM):
+	def GetText(self):
+		tlb, index = self.myobject
+		name, doc, ctx, helpFile = tlb.GetDocumentation(index)
+		try:
+			typedesc = HLITypeKinds[tlb.GetTypeInfoType(index)][1]
+		except KeyError:
+			typedesc = "Unknown!"
+		return name + " - " + typedesc
+	def GetSubList(self):
+		tlb, index = self.myobject
+		name, doc, ctx, helpFile = tlb.GetDocumentation(index)
+		ret = []
+		if doc: ret.append(browser.HLIDocString(doc, "Doc"))
+		if helpFile: ret.append(HLIHelpFile(	(helpFile, ctx) ))
+		return ret
+
+class HLICoClass(HLITypeLibEntry):
+	def GetSubList(self):
+		ret = HLITypeLibEntry.GetSubList(self)
+		tlb, index = self.myobject
+		typeinfo = tlb.GetTypeInfo(index)
+		attr = typeinfo.GetTypeAttr()
+		for j in range(attr[8]):
+			flags = typeinfo.GetImplTypeFlags(j)
+			refType = typeinfo.GetRefTypeInfo(typeinfo.GetRefTypeOfImplType(j))
+			refAttr = refType.GetTypeAttr()
+			ret.append(browser.MakeHLI(refAttr[0], "Name=%s, Flags = %d" % (refAttr[0], flags)))
+		return ret
+
+
+class HLITypeLibMethod(HLITypeLibEntry):
+	def __init__(self, ob, name = None):
+		self.entry_type = "Method"
+		HLITypeLibEntry.__init__(self, ob, name)
+	def GetSubList(self):
+		ret = HLITypeLibEntry.GetSubList(self)
+		tlb, index = self.myobject
+		typeinfo = tlb.GetTypeInfo(index)
+		attr = typeinfo.GetTypeAttr()
+		for i in range(attr[7]):
+			ret.append(HLITypeLibProperty((typeinfo, i)))
+		for i in range(attr[6]):
+			ret.append(HLITypeLibFunction((typeinfo, i)))
+		return ret
+
+class HLITypeLibEnum(HLITypeLibEntry):
+	def __init__(self, myitem):
+		typelib, index = myitem
+		typeinfo = typelib.GetTypeInfo(index)
+		self.id = typeinfo.GetVarDesc(index)[0]
+		name = typeinfo.GetNames(self.id)[0]
+		HLITypeLibEntry.__init__(self, myitem, name)
+	def GetText(self):
+		return self.name + " - Enum/Module"
+	def GetSubList(self):
+		ret = []
+		typelib, index = self.myobject
+		typeinfo = typelib.GetTypeInfo(index)
+		attr = typeinfo.GetTypeAttr()
+		for j in range(attr[7]):
+			vdesc = typeinfo.GetVarDesc(j)
+			name = typeinfo.GetNames(vdesc[0])[0]
+			ret.append(browser.MakeHLI(vdesc[1], name))
+		return ret
+
+class HLITypeLibProperty(HLICOM):
+	def __init__(self, myitem):
+		typeinfo, index = myitem
+		self.id = typeinfo.GetVarDesc(index)[0]
+		name = typeinfo.GetNames(self.id)[0]
+		HLICOM.__init__(self, myitem, name)
+	def GetText(self):
+		return self.name + " - Property"
+	def GetSubList(self):
+		ret = []
+		typeinfo, index = self.myobject
+		names = typeinfo.GetNames(self.id)
+		if len(names)>1:
+			ret.append(browser.MakeHLI(names[1:], "Named Params"))
+		vd = typeinfo.GetVarDesc(index)
+		ret.append(browser.MakeHLI(self.id, "Dispatch ID"))
+		ret.append(browser.MakeHLI(vd[1], "Value"))
+		ret.append(browser.MakeHLI(vd[2], "Elem Desc"))
+		ret.append(browser.MakeHLI(vd[3], "Var Flags"))
+		ret.append(browser.MakeHLI(vd[4], "Var Kind"))
+		return ret
+
+class HLITypeLibFunction(HLICOM):
+	funckinds = {pythoncom.FUNC_VIRTUAL : "Virtual",
+	             pythoncom.FUNC_PUREVIRTUAL : "Pure Virtual",
+	             pythoncom.FUNC_STATIC : "Static",
+	             pythoncom.FUNC_DISPATCH : "Dispatch",
+		}
+	invokekinds = {pythoncom.INVOKE_FUNC: "Function",
+	             pythoncom.INVOKE_PROPERTYGET : "Property Get",
+	             pythoncom.INVOKE_PROPERTYPUT : "Property Put",
+	             pythoncom.INVOKE_PROPERTYPUTREF : "Property Put by reference",
+		}
+	funcflags = [(pythoncom.FUNCFLAG_FRESTRICTED, "Restricted"),
+                   (pythoncom.FUNCFLAG_FSOURCE, "Source"),
+                   (pythoncom.FUNCFLAG_FBINDABLE, "Bindable"),
+                   (pythoncom.FUNCFLAG_FREQUESTEDIT, "Request Edit"),
+                   (pythoncom.FUNCFLAG_FDISPLAYBIND, "Display Bind"),
+                   (pythoncom.FUNCFLAG_FDEFAULTBIND, "Default Bind"),
+                   (pythoncom.FUNCFLAG_FHIDDEN, "Hidden"),
+                   (pythoncom.FUNCFLAG_FUSESGETLASTERROR, "Uses GetLastError"),
+                   ]
+
+	vartypes = {pythoncom.VT_EMPTY: "Empty",
+                pythoncom.VT_NULL: "NULL",
+                pythoncom.VT_I2: "Integer 2",
+                pythoncom.VT_I4: "Integer 4",
+                pythoncom.VT_R4: "Real 4",
+                pythoncom.VT_R8: "Real 8",
+                pythoncom.VT_CY: "CY",
+                pythoncom.VT_DATE: "Date",
+                pythoncom.VT_BSTR: "String",
+                pythoncom.VT_DISPATCH: "IDispatch",
+                pythoncom.VT_ERROR: "Error",
+                pythoncom.VT_BOOL: "BOOL",
+                pythoncom.VT_VARIANT: "Variant",
+                pythoncom.VT_UNKNOWN: "IUnknown",
+                pythoncom.VT_DECIMAL: "Decimal",
+                pythoncom.VT_I1: "Integer 1",
+                pythoncom.VT_UI1: "Unsigned integer 1",
+                pythoncom.VT_UI2: "Unsigned integer 2",
+                pythoncom.VT_UI4: "Unsigned integer 4",
+                pythoncom.VT_I8: "Integer 8",
+                pythoncom.VT_UI8: "Unsigned integer 8",
+                pythoncom.VT_INT: "Integer",
+                pythoncom.VT_UINT: "Unsigned integer",
+                pythoncom.VT_VOID: "Void",
+                pythoncom.VT_HRESULT: "HRESULT",
+                pythoncom.VT_PTR: "Pointer",
+                pythoncom.VT_SAFEARRAY: "SafeArray",
+                pythoncom.VT_CARRAY: "C Array",
+                pythoncom.VT_USERDEFINED: "User Defined",
+                pythoncom.VT_LPSTR: "Pointer to string",
+                pythoncom.VT_LPWSTR: "Pointer to Wide String",
+                pythoncom.VT_FILETIME: "File time",
+                pythoncom.VT_BLOB: "Blob",
+                pythoncom.VT_STREAM: "IStream",
+                pythoncom.VT_STORAGE: "IStorage",
+                pythoncom.VT_STORED_OBJECT: "Stored object",
+                pythoncom.VT_STREAMED_OBJECT: "Streamed object",
+                pythoncom.VT_BLOB_OBJECT: "Blob object",
+                pythoncom.VT_CF: "CF",
+                pythoncom.VT_CLSID: "CLSID",
+        }
+
+	type_flags = [ (pythoncom.VT_VECTOR, "Vector"),
+                   (pythoncom.VT_ARRAY, "Array"),
+                   (pythoncom.VT_BYREF, "ByRef"),
+                   (pythoncom.VT_RESERVED, "Reserved"),
+        ]               
+		
+	def __init__(self, myitem):
+		typeinfo, index = myitem
+		self.id = typeinfo.GetFuncDesc(index)[0]
+		name = typeinfo.GetNames(self.id)[0]
+		HLICOM.__init__(self, myitem, name)
+	def GetText(self):
+		return self.name + " - Function"
+	def MakeReturnTypeName(self, typ):
+		justtyp = typ & pythoncom.VT_TYPEMASK
+		try:
+			typname = self.vartypes[justtyp]
+		except KeyError:
+			typname = "?Bad type?"
+		for (flag, desc) in self.type_flags:
+			if flag & typ:
+				typname = "%s(%s)" % (desc, typname)
+		return typname			
+	def MakeReturnType(self, returnTypeDesc):
+		if type(returnTypeDesc)==type(()):
+			first = returnTypeDesc[0]
+			result = self.MakeReturnType(first)
+			if first != pythoncom.VT_USERDEFINED:
+				result = result + " " + self.MakeReturnType(returnTypeDesc[1])
+			return result
+		else:
+			return self.MakeReturnTypeName(returnTypeDesc)
+			
+	def GetSubList(self):
+		ret = []
+		typeinfo, index = self.myobject
+		names = typeinfo.GetNames(self.id)
+		ret.append(browser.MakeHLI(self.id, "Dispatch ID"))
+		if len(names)>1:
+			ret.append(browser.MakeHLI(string.join(names[1:], ", "), "Named Params"))
+		fd = typeinfo.GetFuncDesc(index)
+		if fd[1]:
+			ret.append(browser.MakeHLI(fd[1], "Possible result values"))
+		if fd[8]:
+			typ, flags, default = fd[8]
+			val = self.MakeReturnType(typ)
+			if flags:
+				val = "%s (Flags=%d, default=%s)" % (val, flags, default)
+			ret.append(browser.MakeHLI(val, "Return Type"))
+				
+		for argDesc in fd[2]:
+			typ, flags, default = argDesc
+			val = self.MakeReturnType(typ)
+			if flags:
+				val = "%s (Flags=%d)" % (val, flags)
+			if default is not None:
+				val = "%s (Default=%s)" % (val, default)
+			ret.append(browser.MakeHLI(val, "Argument"))
+		
+		try:
+			fkind = self.funckinds[fd[3]]
+		except KeyError:
+			fkind = "Unknown"
+		ret.append(browser.MakeHLI(fkind, "Function Kind"))
+		try:
+			ikind = self.invokekinds[fd[4]]
+		except KeyError:
+			ikind = "Unknown"
+		ret.append(browser.MakeHLI(ikind, "Invoke Kind"))
+		# 5 = call conv
+		# 5 = offset vtbl
+		ret.append(browser.MakeHLI(fd[6], "Number Optional Params"))
+		flagDescs = []
+		for flag, desc in self.funcflags:
+			if flag & fd[9]:
+				flagDescs.append(desc)
+		if flagDescs:
+			ret.append(browser.MakeHLI(string.join(flagDescs, ", "), "Function Flags"))
+		return ret
+	
+HLITypeKinds = {
+		pythoncom.TKIND_ENUM : (HLITypeLibEnum, 'Enumeration'),
+		pythoncom.TKIND_RECORD : (HLITypeLibEntry, 'Record'),
+		pythoncom.TKIND_MODULE : (HLITypeLibEnum, 'Module'),
+		pythoncom.TKIND_INTERFACE : (HLITypeLibMethod, 'Interface'),
+		pythoncom.TKIND_DISPATCH : (HLITypeLibMethod, 'Dispatch'),
+		pythoncom.TKIND_COCLASS : (HLICoClass, 'CoClass'),
+		pythoncom.TKIND_ALIAS : (HLITypeLibEntry, 'Alias'),
+		pythoncom.TKIND_UNION : (HLITypeLibEntry, 'Union')
+	}
+
+class HLITypeLib(HLICOM):
+	def GetSubList(self):
+		ret = []
+		ret.append(browser.MakeHLI(self.myobject, "Filename"))
+		try:
+			tlb = pythoncom.LoadTypeLib(self.myobject)
+		except pythoncom.com_error:
+			return [browser.MakeHLI("%s can not be loaded" % self.myobject)]
+			
+		for i in range(tlb.GetTypeInfoCount()):
+			try:
+				ret.append(HLITypeKinds[tlb.GetTypeInfoType(i)][0]( (tlb, i) ) )
+			except pythoncom.com_error:
+				ret.append(browser.MakeHLI("The type info can not be loaded!"))
+		ret.sort()
+		return ret
+		
+class HLIHeadingRegisterdTypeLibs(HLICOM):
+	"A tree heading for registered type libraries"
+	def GetText(self):
+		return "Registered Type Libraries"
+	def GetSubList(self):
+		# Explicit lookup in the registry.
+		ret = []
+		key = win32api.RegOpenKey(win32con.HKEY_CLASSES_ROOT, "TypeLib")
+		win32ui.DoWaitCursor(1)
+		try:
+			num = 0
+			while 1:
+				try:
+					keyName = win32api.RegEnumKey(key, num)
+				except win32api.error:
+					break
+				# Enumerate all version info
+				subKey = win32api.RegOpenKey(key, keyName)
+				name = None
+				try:
+					subNum = 0
+					bestVersion = 0.0
+					while 1:
+						try:
+							versionStr = win32api.RegEnumKey(subKey, subNum)
+						except win32api.error:
+							break
+						try:
+							versionFlt = string.atof(versionStr)
+						except ValueError:
+							versionFlt = 0 # ????
+						if versionFlt > bestVersion:
+							bestVersion = versionFlt
+							name = win32api.RegQueryValue(subKey, versionStr)
+						subNum = subNum + 1
+				finally:
+					win32api.RegCloseKey(subKey)
+				if name is not None:
+					ret.append(HLIRegisteredTypeLibrary((keyName, versionStr), name))
+				num = num + 1
+		finally:
+			win32api.RegCloseKey(key)
+			win32ui.DoWaitCursor(0)
+		ret.sort()
+		return ret
+
+def main():
+	from pywin.tools import hierlist
+	root = HLIRoot("COM Browser")
+	if sys.modules.has_key("app"):
+		# do it in a window
+		browser.MakeTemplate()
+		browser.template.OpenObject(root)
+	else:
+#		list=hierlist.HierListWithItems( root, win32ui.IDB_BROWSER_HIER )
+#		dlg=hierlist.HierDialog("COM Browser",list)
+		dlg = browser.dynamic_browser(root)
+		dlg.DoModal()
+
+
+
+if __name__=='__main__':
+	main()
+
+	ni = pythoncom._GetInterfaceCount()
+	ng = pythoncom._GetGatewayCount()
+	if ni or ng:
+		print "Warning - exiting with %d/%d objects alive" % (ni,ng)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/connect.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/connect.py
new file mode 100644
index 0000000..11ad6de
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/connect.py
@@ -0,0 +1,43 @@
+"""Utilities for working with Connections"""
+import win32com.server.util, pythoncom
+
+class SimpleConnection:
+	"A simple, single connection object"
+	def __init__(self, coInstance = None, eventInstance = None, eventCLSID = None, debug = 0):
+		self.cp = None
+		self.cookie = None
+		self.debug = debug
+		if not coInstance is None:
+			self.Connect(coInstance , eventInstance, eventCLSID)
+
+	def __del__(self):
+		try:
+			self.Disconnect()
+		except pythoncom.error:
+			# Ignore disconnection as we are torn down.
+			pass
+
+	def _wrap(self, obj):
+		useDispatcher = None
+		if self.debug:
+			from win32com.server import dispatcher
+			useDispatcher = dispatcher.DefaultDebugDispatcher
+		return win32com.server.util.wrap(obj, useDispatcher=useDispatcher)
+
+	def Connect(self, coInstance, eventInstance, eventCLSID = None):
+		try:
+			oleobj = coInstance._oleobj_
+		except AttributeError:
+			oleobj = coInstance
+		cpc=oleobj.QueryInterface(pythoncom.IID_IConnectionPointContainer)
+		if eventCLSID is None: eventCLSID = eventInstance.CLSID
+		comEventInstance = self._wrap(eventInstance)
+		self.cp=cpc.FindConnectionPoint(eventCLSID)
+		self.cookie = self.cp.Advise(comEventInstance)
+
+	def Disconnect(self):
+		if not self.cp is None:
+			if self.cookie:
+				self.cp.Unadvise(self.cookie)
+				self.cookie = None
+			self.cp = None		
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/dynamic.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/dynamic.py
new file mode 100644
index 0000000..fe33f96
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/dynamic.py
@@ -0,0 +1,550 @@
+"""Support for dynamic COM client support.
+
+Introduction
+ Dynamic COM client support is the ability to use a COM server without
+ prior knowledge of the server.  This can be used to talk to almost all
+ COM servers, including much of MS Office.
+ 
+ In general, you should not use this module directly - see below.
+ 
+Example
+ >>> import win32com.client
+ >>> xl = win32com.client.Dispatch("Excel.Application")
+ # The line above invokes the functionality of this class.
+ # xl is now an object we can use to talk to Excel.
+ >>> xl.Visible = 1 # The Excel window becomes visible.
+
+"""
+import traceback
+import string
+import new
+
+import pythoncom
+import winerror
+import build
+
+from types import StringType, IntType, TupleType, ListType
+from pywintypes import UnicodeType, IIDType
+
+import win32com.client # Needed as code we eval() references it.
+from win32com.client import NeedUnicodeConversions
+
+debugging=0			# General debugging
+debugging_attr=0	# Debugging dynamic attribute lookups.
+
+LCID = 0x0
+
+# These errors generally mean the property or method exists,
+# but can't be used in this context - eg, property instead of a method, etc.
+# Used to determine if we have a real error or not.
+ERRORS_BAD_CONTEXT = [
+	winerror.DISP_E_MEMBERNOTFOUND,
+	winerror.DISP_E_BADPARAMCOUNT,
+	winerror.DISP_E_PARAMNOTOPTIONAL,
+	winerror.DISP_E_TYPEMISMATCH,
+	winerror.E_INVALIDARG,
+]
+
+ALL_INVOKE_TYPES = [
+	pythoncom.INVOKE_PROPERTYGET,
+	pythoncom.INVOKE_PROPERTYPUT,
+	pythoncom.INVOKE_PROPERTYPUTREF,
+	pythoncom.INVOKE_FUNC
+]
+
+def debug_print(*args):
+	if debugging:
+		for arg in args:
+			print arg,
+		print
+
+def debug_attr_print(*args):
+	if debugging_attr:
+		for arg in args:
+			print arg,
+		print
+
+# get the type objects for IDispatch and IUnknown
+dispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch]
+iunkType = pythoncom.TypeIIDs[pythoncom.IID_IUnknown]
+_GoodDispatchType=[StringType,IIDType,UnicodeType]
+_defaultDispatchItem=build.DispatchItem
+
+def _GetGoodDispatch(IDispatch, clsctx = pythoncom.CLSCTX_SERVER):
+	if type(IDispatch) in _GoodDispatchType:
+		try:
+			IDispatch = pythoncom.connect(IDispatch)
+		except pythoncom.ole_error:
+			IDispatch = pythoncom.CoCreateInstance(IDispatch, None, clsctx, pythoncom.IID_IDispatch)
+	else:
+		# may already be a wrapped class.
+		IDispatch = getattr(IDispatch, "_oleobj_", IDispatch)
+	return IDispatch
+
+def _GetGoodDispatchAndUserName(IDispatch, userName, clsctx):
+	# Get a dispatch object, and a 'user name' (ie, the name as
+	# displayed to the user in repr() etc.
+	if userName is None:
+		if type(IDispatch) == StringType:
+			userName = IDispatch
+		elif type(IDispatch) == UnicodeType:
+			# We always want the displayed name to be a real string
+			userName = IDispatch.encode("ascii", "replace")
+	elif type(userName) == UnicodeType:
+		# As above - always a string...
+		userName = userName.encode("ascii", "replace")
+	else:
+		userName = str(userName)
+	return (_GetGoodDispatch(IDispatch, clsctx), userName)
+
+def _GetDescInvokeType(entry, default_invoke_type):
+	if not entry or not entry.desc: return default_invoke_type
+	return entry.desc[4]
+
+def Dispatch(IDispatch, userName = None, createClass = None, typeinfo = None, UnicodeToString=NeedUnicodeConversions, clsctx = pythoncom.CLSCTX_SERVER):
+	IDispatch, userName = _GetGoodDispatchAndUserName(IDispatch,userName,clsctx)
+	if createClass is None:
+		createClass = CDispatch
+	lazydata = None
+	try:
+		if typeinfo is None:
+			typeinfo = IDispatch.GetTypeInfo()
+		try:
+			#try for a typecomp
+			typecomp = typeinfo.GetTypeComp()
+			lazydata = typeinfo, typecomp
+		except pythoncom.com_error:
+			pass
+	except pythoncom.com_error:
+		typeinfo = None
+	olerepr = MakeOleRepr(IDispatch, typeinfo, lazydata)
+	return createClass(IDispatch, olerepr, userName,UnicodeToString, lazydata)
+
+def MakeOleRepr(IDispatch, typeinfo, typecomp):
+	olerepr = None
+	if typeinfo is not None:
+		try:
+			attr = typeinfo.GetTypeAttr()
+			# If the type info is a special DUAL interface, magically turn it into
+			# a DISPATCH typeinfo.
+			if attr[5] == pythoncom.TKIND_INTERFACE and attr[11] & pythoncom.TYPEFLAG_FDUAL:
+				# Get corresponding Disp interface;
+				# -1 is a special value which does this for us.
+				href = typeinfo.GetRefTypeOfImplType(-1);
+				typeinfo = typeinfo.GetRefTypeInfo(href)
+				attr = typeinfo.GetTypeAttr()
+			if typecomp is None:
+				olerepr = build.DispatchItem(typeinfo, attr, None, 0)
+			else:
+				olerepr = build.LazyDispatchItem(attr, None)
+		except pythoncom.ole_error:
+			pass
+	if olerepr is None: olerepr = build.DispatchItem()
+	return olerepr
+
+def DumbDispatch(IDispatch, userName = None, createClass = None,UnicodeToString=NeedUnicodeConversions, clsctx=pythoncom.CLSCTX_SERVER):
+	"Dispatch with no type info"
+	IDispatch, userName = _GetGoodDispatchAndUserName(IDispatch,userName,clsctx)
+	if createClass is None:
+		createClass = CDispatch
+	return createClass(IDispatch, build.DispatchItem(), userName,UnicodeToString)
+
+class CDispatch:
+	def __init__(self, IDispatch, olerepr, userName =  None, UnicodeToString=NeedUnicodeConversions, lazydata = None):
+		if userName is None: userName = "<unknown>"
+		self.__dict__['_oleobj_'] = IDispatch
+		self.__dict__['_username_'] = userName
+		self.__dict__['_olerepr_'] = olerepr
+		self.__dict__['_mapCachedItems_'] = {}
+		self.__dict__['_builtMethods_'] = {}
+		self.__dict__['_enum_'] = None
+		self.__dict__['_unicode_to_string_'] = UnicodeToString
+		self.__dict__['_lazydata_'] = lazydata
+
+	def __call__(self, *args):
+		"Provide 'default dispatch' COM functionality - allow instance to be called"
+		if self._olerepr_.defaultDispatchName:
+			invkind, dispid = self._find_dispatch_type_(self._olerepr_.defaultDispatchName)
+		else:
+			invkind, dispid = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET, pythoncom.DISPID_VALUE
+		if invkind is not None:
+			allArgs = (dispid,LCID,invkind,1) + args
+			return self._get_good_object_(self._oleobj_.Invoke(*allArgs),self._olerepr_.defaultDispatchName,None)
+		raise TypeError, "This dispatch object does not define a default method"
+
+	def __nonzero__(self):
+		return 1 # ie "if object:" should always be "true" - without this, __len__ is tried.
+		# _Possibly_ want to defer to __len__ if available, but Im not sure this is
+		# desirable???
+
+	def __repr__(self):
+		return "<COMObject %s>" % (self._username_)
+
+	def __str__(self):
+		# __str__ is used when the user does "print object", so we gracefully
+		# fall back to the __repr__ if the object has no default method.
+		try:
+			return str(self.__call__())
+		except pythoncom.com_error, details:
+			if details[0] not in ERRORS_BAD_CONTEXT:
+				raise
+			return self.__repr__()
+
+	# Delegate comparison to the oleobjs, as they know how to do identity.
+	def __cmp__(self, other):
+		other = getattr(other, "_oleobj_", other)
+		return cmp(self._oleobj_, other)
+
+	def __int__(self):
+		return int(self.__call__())
+
+	def __len__(self):
+		invkind, dispid = self._find_dispatch_type_("Count")
+		if invkind:
+			return self._oleobj_.Invoke(dispid, LCID, invkind, 1)
+		raise TypeError, "This dispatch object does not define a Count method"
+
+	def _NewEnum(self):
+		try:
+			invkind = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET
+			enum = self._oleobj_.InvokeTypes(pythoncom.DISPID_NEWENUM,LCID,invkind,(13, 10),())
+		except pythoncom.com_error:
+			return None # no enumerator for this object.
+		import util
+		return util.WrapEnum(enum, None)
+
+	def __getitem__(self, index): # syver modified
+		# Improved __getitem__ courtesy Syver Enstad
+		# Must check _NewEnum before Item, to ensure b/w compat.
+		if isinstance(index, IntType):
+			if self.__dict__['_enum_'] is None:
+				self.__dict__['_enum_'] = self._NewEnum()
+			if self.__dict__['_enum_'] is not None:
+				return self._get_good_object_(self._enum_.__getitem__(index))
+		# See if we have an "Item" method/property we can use (goes hand in hand with Count() above!)
+		invkind, dispid = self._find_dispatch_type_("Item")
+		if invkind is not None:
+			return self._get_good_object_(self._oleobj_.Invoke(dispid, LCID, invkind, 1, index))
+		raise TypeError, "This object does not support enumeration"
+
+	def __setitem__(self, index, *args):
+		# XXX - todo - We should support calling Item() here too!
+#		print "__setitem__ with", index, args
+		if self._olerepr_.defaultDispatchName:
+			invkind, dispid = self._find_dispatch_type_(self._olerepr_.defaultDispatchName)
+		else:
+			invkind, dispid = pythoncom.DISPATCH_PROPERTYPUT | pythoncom.DISPATCH_PROPERTYPUTREF, pythoncom.DISPID_VALUE
+		if invkind is not None:
+			allArgs = (dispid,LCID,invkind,0,index) + args
+			return self._get_good_object_(self._oleobj_.Invoke(*allArgs),self._olerepr_.defaultDispatchName,None)
+		raise TypeError, "This dispatch object does not define a default method"
+
+	def _find_dispatch_type_(self, methodName):
+		if self._olerepr_.mapFuncs.has_key(methodName):
+			item = self._olerepr_.mapFuncs[methodName]
+			return item.desc[4], item.dispid
+
+		if self._olerepr_.propMapGet.has_key(methodName):
+			item = self._olerepr_.propMapGet[methodName]
+			return item.desc[4], item.dispid
+
+		try:
+			dispid = self._oleobj_.GetIDsOfNames(0,methodName)
+		except:	### what error?
+			return None, None
+		return pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET, dispid
+
+	def _ApplyTypes_(self, dispid, wFlags, retType, argTypes, user, resultCLSID, *args):
+		result = self._oleobj_.InvokeTypes(*(dispid, LCID, wFlags, retType, argTypes) + args)
+		return self._get_good_object_(result, user, resultCLSID)
+
+	def _wrap_dispatch_(self, ob, userName = None, returnCLSID = None, UnicodeToString = NeedUnicodeConversions):
+		# Given a dispatch object, wrap it in a class
+		return Dispatch(ob, userName, UnicodeToString=UnicodeToString)
+
+	def _get_good_single_object_(self,ob,userName = None, ReturnCLSID=None):
+		if iunkType==type(ob):
+			try:
+				ob = ob.QueryInterface(pythoncom.IID_IDispatch)
+				# If this works, we then enter the "is dispatch" test below.
+			except pythoncom.com_error:
+				# It is an IUnknown, but not an IDispatch, so just let it through.
+				pass
+		if dispatchType==type(ob):
+			# make a new instance of (probably this) class.
+			return self._wrap_dispatch_(ob, userName, ReturnCLSID)
+		elif self._unicode_to_string_ and UnicodeType==type(ob):  
+			return str(ob)
+		else:
+			return ob
+		
+	def _get_good_object_(self,ob,userName = None, ReturnCLSID=None):
+		"""Given an object (usually the retval from a method), make it a good object to return.
+		   Basically checks if it is a COM object, and wraps it up.
+		   Also handles the fact that a retval may be a tuple of retvals"""
+		if ob is None: # Quick exit!
+			return None
+		elif type(ob)==TupleType:
+			return tuple(map(lambda o, s=self, oun=userName, rc=ReturnCLSID: s._get_good_single_object_(o, oun, rc),  ob))
+		else:
+			return self._get_good_single_object_(ob)
+
+	def _make_method_(self, name):
+		"Make a method object - Assumes in olerepr funcmap"
+		methodName = build.MakePublicAttributeName(name) # translate keywords etc.
+		methodCodeList = self._olerepr_.MakeFuncMethod(self._olerepr_.mapFuncs[name], methodName,0)
+		methodCode = string.join(methodCodeList,"\n")
+		try:
+#			print "Method code for %s is:\n" % self._username_, methodCode
+#			self._print_details_()
+			codeObject = compile(methodCode, "<COMObject %s>" % self._username_,"exec")
+			# Exec the code object
+			tempNameSpace = {}
+			# "Dispatch" in the exec'd code is win32com.client.Dispatch, not ours.
+			globNameSpace = globals().copy()
+			globNameSpace["Dispatch"] = win32com.client.Dispatch
+			exec codeObject in globNameSpace, tempNameSpace # self.__dict__, self.__dict__
+			name = methodName
+			# Save the function in map.
+			fn = self._builtMethods_[name] = tempNameSpace[name]
+			newMeth = new.instancemethod(fn, self, self.__class__)
+			return newMeth
+		except:
+			debug_print("Error building OLE definition for code ", methodCode)
+			traceback.print_exc()
+		return None
+		
+	def _Release_(self):
+		"""Cleanup object - like a close - to force cleanup when you dont 
+		   want to rely on Python's reference counting."""
+		for childCont in self._mapCachedItems_.values():
+			childCont._Release_()
+		self._mapCachedItems_ = {}
+		if self._oleobj_:
+			self._oleobj_.Release()
+			self.__dict__['_oleobj_'] = None
+		if self._olerepr_:
+			self.__dict__['_olerepr_'] = None
+		self._enum_ = None
+
+	def _proc_(self, name, *args):
+		"""Call the named method as a procedure, rather than function.
+		   Mainly used by Word.Basic, which whinges about such things."""
+		try:
+			item = self._olerepr_.mapFuncs[name]
+			dispId = item.dispid
+			return self._get_good_object_(self._oleobj_.Invoke(*(dispId, LCID, item.desc[4], 0) + (args) ))
+		except KeyError:
+			raise AttributeError, name
+		
+	def _print_details_(self):
+		"Debug routine - dumps what it knows about an object."
+		print "AxDispatch container",self._username_
+		try:
+			print "Methods:"
+			for method in self._olerepr_.mapFuncs.keys():
+				print "\t", method
+			print "Props:"
+			for prop, entry in self._olerepr_.propMap.items():
+				print "\t%s = 0x%x - %s" % (prop, entry.dispid, `entry`)
+			print "Get Props:"
+			for prop, entry in self._olerepr_.propMapGet.items():
+				print "\t%s = 0x%x - %s" % (prop, entry.dispid, `entry`)
+			print "Put Props:"
+			for prop, entry in self._olerepr_.propMapPut.items():
+				print "\t%s = 0x%x - %s" % (prop, entry.dispid, `entry`)
+		except:
+			traceback.print_exc()
+
+	def __LazyMap__(self, attr):
+		try:
+			if self._LazyAddAttr_(attr):
+				debug_attr_print("%s.__LazyMap__(%s) added something" % (self._username_,attr))
+				return 1
+		except AttributeError:
+			return 0
+
+	# Using the typecomp, lazily create a new attribute definition.
+	def _LazyAddAttr_(self,attr):
+		if self._lazydata_ is None: return 0
+		res = 0
+		typeinfo, typecomp = self._lazydata_
+		olerepr = self._olerepr_
+		# We need to explicitly check each invoke type individually - simply
+		# specifying '0' will bind to "any member", which may not be the one
+		# we are actually after (ie, we may be after prop_get, but returned
+		# the info for the prop_put.)
+		for i in ALL_INVOKE_TYPES:
+			try:
+				x,t = typecomp.Bind(attr,i)
+				if x==1:	#it's a FUNCDESC
+					r = olerepr._AddFunc_(typeinfo,t,0)
+				elif x==2:	#it's a VARDESC
+					r = olerepr._AddVar_(typeinfo,t,0)
+				else:		#not found or TYPEDESC/IMPLICITAPP
+					r=None
+				if not r is None:
+					key, map = r[0],r[1]
+					item = map[key]
+					if map==olerepr.propMapPut:
+						olerepr._propMapPutCheck_(key,item)
+					elif map==olerepr.propMapGet:
+						olerepr._propMapGetCheck_(key,item)
+					res = 1
+			except:
+				pass
+		return res
+
+	def _FlagAsMethod(self, *methodNames):
+		"""Flag these attribute names as being methods.
+		Some objects do not correctly differentiate methods and
+		properties, leading to problems when calling these methods.
+
+		Specifically, trying to say: ob.SomeFunc()
+		may yield an exception "None object is not callable"
+		In this case, an attempt to fetch the *property*has worked
+		and returned None, rather than indicating it is really a method.
+		Calling: ob._FlagAsMethod("SomeFunc")
+		should then allow this to work.
+		"""
+		for name in methodNames:
+			details = build.MapEntry(self.__AttrToID__(name), (name,))
+			self._olerepr_.mapFuncs[name] = details
+
+	def __AttrToID__(self,attr):
+			debug_attr_print("Calling GetIDsOfNames for property %s in Dispatch container %s" % (attr, self._username_))
+			return self._oleobj_.GetIDsOfNames(0,attr)
+
+	def __getattr__(self, attr):
+		if attr=='__iter__':
+			# We can't handle this as a normal method, as if the attribute
+			# exists, then it must return an iterable object.
+			try:
+				invkind = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET
+				enum = self._oleobj_.InvokeTypes(pythoncom.DISPID_NEWENUM,LCID,invkind,(13, 10),())
+			except pythoncom.com_error:
+				raise AttributeError, "This object can not function as an iterator"
+			# We must return a callable object.
+			class Factory:
+				def __init__(self, ob):
+					self.ob = ob
+				def __call__(self):
+					import win32com.client.util
+					return win32com.client.util.Iterator(self.ob)
+			return Factory(enum)
+			
+		if attr[0]=='_' and attr[-1]=='_': # Fast-track.
+			raise AttributeError, attr
+		# If a known method, create new instance and return.
+		try:
+			return new.instancemethod(self._builtMethods_[attr], self, self.__class__)
+		except KeyError:
+			pass
+		# XXX - Note that we current are case sensitive in the method.
+		#debug_attr_print("GetAttr called for %s on DispatchContainer %s" % (attr,self._username_))
+		# First check if it is in the method map.  Note that an actual method
+		# must not yet exist, (otherwise we would not be here).  This
+		# means we create the actual method object - which also means
+		# this code will never be asked for that method name again.
+		if self._olerepr_.mapFuncs.has_key(attr):
+			return self._make_method_(attr)
+
+		# Delegate to property maps/cached items
+		retEntry = None
+		if self._olerepr_ and self._oleobj_:
+			# first check general property map, then specific "put" map.
+			retEntry = self._olerepr_.propMap.get(attr)
+			if retEntry is None:
+				retEntry = self._olerepr_.propMapGet.get(attr)
+			# Not found so far - See what COM says.
+			if retEntry is None:
+				try:
+					if self.__LazyMap__(attr):
+						if self._olerepr_.mapFuncs.has_key(attr): return self._make_method_(attr)
+						retEntry = self._olerepr_.propMap.get(attr)
+						if retEntry is None:
+							retEntry = self._olerepr_.propMapGet.get(attr)
+					if retEntry is None:
+						retEntry = build.MapEntry(self.__AttrToID__(attr), (attr,))
+				except pythoncom.ole_error:
+					pass # No prop by that name - retEntry remains None.
+
+		if not retEntry is None: # see if in my cache
+			try:
+				ret = self._mapCachedItems_[retEntry.dispid]
+				debug_attr_print ("Cached items has attribute!", ret)
+				return ret
+			except (KeyError, AttributeError):
+				debug_attr_print("Attribute %s not in cache" % attr)
+
+		# If we are still here, and have a retEntry, get the OLE item
+		if not retEntry is None:
+			invoke_type = _GetDescInvokeType(retEntry, pythoncom.INVOKE_PROPERTYGET)
+			debug_attr_print("Getting property Id 0x%x from OLE object" % retEntry.dispid)
+			try:
+				ret = self._oleobj_.Invoke(retEntry.dispid,0,invoke_type,1)
+			except pythoncom.com_error, details:
+				if details[0] in ERRORS_BAD_CONTEXT:
+					# May be a method.
+					self._olerepr_.mapFuncs[attr] = retEntry
+					return self._make_method_(attr)
+				raise pythoncom.com_error, details
+			debug_attr_print("OLE returned ", ret)
+			return self._get_good_object_(ret)
+
+		# no where else to look.
+		raise AttributeError, "%s.%s" % (self._username_, attr)
+
+	def __setattr__(self, attr, value):
+		if self.__dict__.has_key(attr): # Fast-track - if already in our dict, just make the assignment.
+			# XXX - should maybe check method map - if someone assigns to a method,
+			# it could mean something special (not sure what, tho!)
+			self.__dict__[attr] = value
+			return
+		# Allow property assignment.
+		debug_attr_print("SetAttr called for %s.%s=%s on DispatchContainer" % (self._username_, attr, `value`))
+
+		if self._olerepr_:
+			# Check the "general" property map.
+			if self._olerepr_.propMap.has_key(attr):
+				entry = self._olerepr_.propMap[attr]
+				invoke_type = _GetDescInvokeType(entry, pythoncom.INVOKE_PROPERTYPUT)
+				self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value)
+				return
+			# Check the specific "put" map.
+			if self._olerepr_.propMapPut.has_key(attr):
+				entry = self._olerepr_.propMapPut[attr]
+				invoke_type = _GetDescInvokeType(entry, pythoncom.INVOKE_PROPERTYPUT)
+				self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value)
+				return
+
+		# Try the OLE Object
+		if self._oleobj_:
+			if self.__LazyMap__(attr):
+				# Check the "general" property map.
+				if self._olerepr_.propMap.has_key(attr):
+					entry = self._olerepr_.propMap[attr]
+					invoke_type = _GetDescInvokeType(entry, pythoncom.INVOKE_PROPERTYPUT)
+					self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value)
+					return
+				# Check the specific "put" map.
+				if self._olerepr_.propMapPut.has_key(attr):
+					entry = self._olerepr_.propMapPut[attr]
+					invoke_type = _GetDescInvokeType(entry, pythoncom.INVOKE_PROPERTYPUT)
+					self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value)
+					return
+			try:
+				entry = build.MapEntry(self.__AttrToID__(attr),(attr,))
+			except pythoncom.com_error:
+				# No attribute of that name
+				entry = None
+			if entry is not None:
+				try:
+					invoke_type = _GetDescInvokeType(entry, pythoncom.INVOKE_PROPERTYPUT)
+					self._oleobj_.Invoke(entry.dispid, 0, invoke_type, 0, value)
+					self._olerepr_.propMap[attr] = entry
+					debug_attr_print("__setattr__ property %s (id=0x%x) in Dispatch container %s" % (attr, entry.dispid, self._username_))
+					return
+				except pythoncom.com_error:
+					pass
+		raise AttributeError, "Property '%s.%s' can not be set." % (self._username_, attr)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/gencache.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/gencache.py
new file mode 100644
index 0000000..7f77857f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/gencache.py
@@ -0,0 +1,694 @@
+"""Manages the cache of generated Python code.
+
+Description
+  This file manages the cache of generated Python code.  When run from the 
+  command line, it also provides a number of options for managing that cache.
+  
+Implementation
+  Each typelib is generated into a filename of format "{guid}x{lcid}x{major}x{minor}.py"
+  
+  An external persistant dictionary maps from all known IIDs in all known type libraries
+  to the type library itself.
+  
+  Thus, whenever Python code knows the IID of an object, it can find the IID, LCID and version of
+  the type library which supports it.  Given this information, it can find the Python module
+  with the support.
+  
+  If necessary, this support can be generated on the fly.
+  
+Hacks, to do, etc
+  Currently just uses a pickled dictionary, but should used some sort of indexed file.
+  Maybe an OLE2 compound file, or a bsddb file?
+"""
+import pywintypes, os, string, sys
+import pythoncom
+import win32com, win32com.client
+import glob
+import traceback
+import CLSIDToClass
+import operator
+
+bForDemandDefault = 0 # Default value of bForDemand - toggle this to change the world - see also makepy.py
+
+# The global dictionary
+clsidToTypelib = {}
+
+# If we have a different version of the typelib generated, this
+# maps the "requested version" to the "generated version".
+versionRedirectMap = {}
+
+# There is no reason we *must* be readonly in a .zip, but we are now,
+# Rather than check for ".zip" or other tricks, PEP302 defines
+# a "__loader__" attribute, so we use that.
+# (Later, it may become necessary to check if the __loader__ can update files,
+# as a .zip loader potentially could - but punt all that until a need arises)
+is_readonly = hasattr(win32com, "__loader__")
+
+# A dictionary of ITypeLibrary objects for demand generation explicitly handed to us
+# Keyed by usual clsid, lcid, major, minor
+demandGeneratedTypeLibraries = {}
+
+def __init__():
+	# Initialize the module.  Called once explicitly at module import below.
+	try:
+		_LoadDicts()
+	except IOError:
+		Rebuild()
+
+pickleVersion = 1
+def _SaveDicts():
+	if is_readonly:
+		raise RuntimeError, "Trying to write to a readonly gencache ('%s')!" \
+		                    % win32com.__gen_path__
+	import cPickle
+	f = open(os.path.join(GetGeneratePath(), "dicts.dat"), "wb")
+	try:
+		p = cPickle.Pickler(f)
+		p.dump(pickleVersion)
+		p.dump(clsidToTypelib)
+	finally:
+		f.close()
+
+def _LoadDicts():
+	import cPickle
+	# Load the dictionary from a .zip file if that is where we live.
+	if hasattr(win32com, "__loader__"):
+		import cStringIO
+		loader = win32com.__loader__
+		arc_path = loader.archive
+		dicts_path = os.path.join(win32com.__gen_path__, "dicts.dat")
+		if dicts_path.startswith(arc_path):
+			dicts_path = dicts_path[len(arc_path)+1:]
+		else:
+			# Hm. See below.
+			return
+		try:
+			data = loader.get_data(dicts_path)
+		except AttributeError:
+			# The __loader__ has no get_data method.  See below.
+			return
+		except IOError:
+			# Our gencache is in a .zip file (and almost certainly readonly)
+			# but no dicts file.  That actually needn't be fatal for a frozen
+			# application.  Assuming they call "EnsureModule" with the same
+			# typelib IDs they have been frozen with, that EnsureModule will
+			# correctly re-build the dicts on the fly.  However, objects that
+			# rely on the gencache but have not done an EnsureModule will
+			# fail (but their apps are likely to fail running from source
+			# with a clean gencache anyway, as then they would be getting
+			# Dynamic objects until the cache is built - so the best answer
+			# for these apps is to call EnsureModule, rather than freezing
+			# the dict)
+			return
+		f = cStringIO.StringIO(data)
+	else:
+		# NOTE: IOError on file open must be caught by caller.
+		f = open(os.path.join(win32com.__gen_path__, "dicts.dat"), "rb")
+	try:
+		p = cPickle.Unpickler(f)
+		version = p.load()
+		global clsidToTypelib
+		clsidToTypelib = p.load()
+		versionRedirectMap.clear()
+	finally:
+		f.close()
+
+def GetGeneratedFileName(clsid, lcid, major, minor):
+	"""Given the clsid, lcid, major and  minor for a type lib, return
+	the file name (no extension) providing this support.
+	"""
+	return string.upper(str(clsid))[1:-1] + "x%sx%sx%s" % (lcid, major, minor)
+
+def SplitGeneratedFileName(fname):
+	"""Reverse of GetGeneratedFileName()
+	"""
+	return tuple(string.split(fname,'x',4))
+	
+def GetGeneratePath():
+	"""Returns the name of the path to generate to.
+	Checks the directory is OK.
+	"""
+	assert not is_readonly, "Why do you want the genpath for a readonly store?"
+	try:
+		os.makedirs(win32com.__gen_path__)
+		#os.mkdir(win32com.__gen_path__)
+	except os.error:
+		pass
+	try:
+		fname = os.path.join(win32com.__gen_path__, "__init__.py")
+		os.stat(fname)
+	except os.error:
+		f = open(fname,"w")
+		f.write('# Generated file - this directory may be deleted to reset the COM cache...\n')
+		f.write('import win32com\n')
+		f.write('if __path__[:-1] != win32com.__gen_path__: __path__.append(win32com.__gen_path__)\n')
+		f.close()
+	
+	return win32com.__gen_path__
+
+#
+# The helpers for win32com.client.Dispatch and OCX clients.
+#
+def GetClassForProgID(progid):
+	"""Get a Python class for a Program ID
+	
+	Given a Program ID, return a Python class which wraps the COM object
+	
+	Returns the Python class, or None if no module is available.
+	
+	Params
+	progid -- A COM ProgramID or IID (eg, "Word.Application")
+	"""
+	clsid = pywintypes.IID(progid) # This auto-converts named to IDs.
+	return GetClassForCLSID(clsid)
+
+def GetClassForCLSID(clsid):
+	"""Get a Python class for a CLSID
+	
+	Given a CLSID, return a Python class which wraps the COM object
+	
+	Returns the Python class, or None if no module is available.
+	
+	Params
+	clsid -- A COM CLSID (or string repr of one)
+	"""
+	# first, take a short-cut - we may already have generated support ready-to-roll.
+	clsid = str(clsid)
+	if CLSIDToClass.HasClass(clsid):
+		return CLSIDToClass.GetClass(clsid)
+	mod = GetModuleForCLSID(clsid)
+	if mod is None:
+		return None
+	try:
+		return CLSIDToClass.GetClass(clsid)
+	except KeyError:
+		return None
+
+def GetModuleForProgID(progid):
+	"""Get a Python module for a Program ID
+	
+	Given a Program ID, return a Python module which contains the
+	class which wraps the COM object.
+	
+	Returns the Python module, or None if no module is available.
+	
+	Params
+	progid -- A COM ProgramID or IID (eg, "Word.Application")
+	"""
+	try:
+		iid = pywintypes.IID(progid)
+	except pywintypes.com_error:
+		return None
+	return GetModuleForCLSID(iid)
+	
+def GetModuleForCLSID(clsid):
+	"""Get a Python module for a CLSID
+	
+	Given a CLSID, return a Python module which contains the
+	class which wraps the COM object.
+	
+	Returns the Python module, or None if no module is available.
+	
+	Params
+	progid -- A COM CLSID (ie, not the description)
+	"""
+	clsid_str = str(clsid)
+	try:
+		typelibCLSID, lcid, major, minor = clsidToTypelib[clsid_str]
+	except KeyError:
+		return None
+
+	try:
+		mod = GetModuleForTypelib(typelibCLSID, lcid, major, minor)
+	except ImportError:
+		mod = None
+	if mod is not None:
+		sub_mod = mod.CLSIDToPackageMap.get(clsid_str)
+		if sub_mod is None:
+			sub_mod = mod.VTablesToPackageMap.get(clsid_str)
+		if sub_mod is not None:
+			sub_mod_name = mod.__name__ + "." + sub_mod
+			try:
+				__import__(sub_mod_name)
+			except ImportError:
+				info = typelibCLSID, lcid, major, minor
+				# Force the generation.  If this typelibrary has explicitly been added,
+				# use it (it may not be registered, causing a lookup by clsid to fail)
+				if demandGeneratedTypeLibraries.has_key(info):
+					info = demandGeneratedTypeLibraries[info]
+				import makepy
+				makepy.GenerateChildFromTypeLibSpec(sub_mod, info)
+				# Generate does an import...
+			mod = sys.modules[sub_mod_name]
+	return mod
+
+def GetModuleForTypelib(typelibCLSID, lcid, major, minor):
+	"""Get a Python module for a type library ID
+	
+	Given the CLSID of a typelibrary, return an imported Python module, 
+	else None
+	
+	Params
+	typelibCLSID -- IID of the type library.
+	major -- Integer major version.
+	minor -- Integer minor version
+	lcid -- Integer LCID for the library.
+	"""
+	modName = GetGeneratedFileName(typelibCLSID, lcid, major, minor)
+	mod = _GetModule(modName)
+	# If the import worked, it doesn't mean we have actually added this
+	# module to our cache though - check that here.
+	if not mod.__dict__.has_key("_in_gencache_"):
+		AddModuleToCache(typelibCLSID, lcid, major, minor)
+		assert mod.__dict__.has_key("_in_gencache_")
+	return mod
+
+def MakeModuleForTypelib(typelibCLSID, lcid, major, minor, progressInstance = None, bGUIProgress = None, bForDemand = bForDemandDefault, bBuildHidden = 1):
+	"""Generate support for a type library.
+	
+	Given the IID, LCID and version information for a type library, generate
+	and import the necessary support files.
+	
+	Returns the Python module.  No exceptions are caught.
+
+	Params
+	typelibCLSID -- IID of the type library.
+	major -- Integer major version.
+	minor -- Integer minor version.
+	lcid -- Integer LCID for the library.
+	progressInstance -- Instance to use as progress indicator, or None to
+	                    use the GUI progress bar.
+	"""
+	if bGUIProgress is not None:
+		print "The 'bGuiProgress' param to 'MakeModuleForTypelib' is obsolete."
+
+	import makepy
+	try:
+		makepy.GenerateFromTypeLibSpec( (typelibCLSID, lcid, major, minor), progressInstance=progressInstance, bForDemand = bForDemand, bBuildHidden = bBuildHidden)
+	except pywintypes.com_error:
+		return None
+	return GetModuleForTypelib(typelibCLSID, lcid, major, minor)
+
+def MakeModuleForTypelibInterface(typelib_ob, progressInstance = None, bForDemand = bForDemandDefault, bBuildHidden = 1):
+	"""Generate support for a type library.
+	
+	Given a PyITypeLib interface generate and import the necessary support files.  This is useful
+	for getting makepy support for a typelibrary that is not registered - the caller can locate
+	and load the type library itself, rather than relying on COM to find it.
+	
+	Returns the Python module.
+
+	Params
+	typelib_ob -- The type library itself
+	progressInstance -- Instance to use as progress indicator, or None to
+	                    use the GUI progress bar.
+	"""
+	import makepy
+	try:
+		makepy.GenerateFromTypeLibSpec( typelib_ob, progressInstance=progressInstance, bForDemand = bForDemandDefault, bBuildHidden = bBuildHidden)
+	except pywintypes.com_error:
+		return None
+	tla = typelib_ob.GetLibAttr()
+	guid = tla[0]
+	lcid = tla[1]
+	major = tla[3]
+	minor = tla[4]
+	return GetModuleForTypelib(guid, lcid, major, minor)
+
+def EnsureModuleForTypelibInterface(typelib_ob, progressInstance = None, bForDemand = bForDemandDefault, bBuildHidden = 1):
+	"""Check we have support for a type library, generating if not.
+	
+	Given a PyITypeLib interface generate and import the necessary
+	support files if necessary. This is useful for getting makepy support
+	for a typelibrary that is not registered - the caller can locate and
+	load the type library itself, rather than relying on COM to find it.
+	
+	Returns the Python module.
+
+	Params
+	typelib_ob -- The type library itself
+	progressInstance -- Instance to use as progress indicator, or None to
+	                    use the GUI progress bar.
+	"""
+	tla = typelib_ob.GetLibAttr()
+	guid = tla[0]
+	lcid = tla[1]
+	major = tla[3]
+	minor = tla[4]
+
+	#If demand generated, save the typelib interface away for later use
+	if bForDemand:
+		demandGeneratedTypeLibraries[(str(guid), lcid, major, minor)] = typelib_ob
+
+	try:
+		return GetModuleForTypelib(guid, lcid, major, minor)
+	except ImportError:
+		pass
+	# Generate it.
+	return MakeModuleForTypelibInterface(typelib_ob, progressInstance, bForDemand, bBuildHidden)
+
+def ForgetAboutTypelibInterface(typelib_ob):
+	"""Drop any references to a typelib previously added with EnsureModuleForTypelibInterface and forDemand"""
+	tla = typelib_ob.GetLibAttr()
+	guid = tla[0]
+	lcid = tla[1]
+	major = tla[3]
+	minor = tla[4]
+	info = str(guid), lcid, major, minor
+	try:
+		del demandGeneratedTypeLibraries[info]
+	except KeyError:
+		# Not worth raising an exception - maybe they dont know we only remember for demand generated, etc.
+		print "ForgetAboutTypelibInterface:: Warning - type library with info %s is not being remembered!" % (info,)
+	# and drop any version redirects to it
+	for key, val in versionRedirectMap.items():
+		if val==info:
+			del versionRedirectMap[key]
+
+def EnsureModule(typelibCLSID, lcid, major, minor, progressInstance = None, bValidateFile=not is_readonly, bForDemand = bForDemandDefault, bBuildHidden = 1):
+	"""Ensure Python support is loaded for a type library, generating if necessary.
+	
+	Given the IID, LCID and version information for a type library, check and if
+	necessary (re)generate, then import the necessary support files. If we regenerate the file, there
+	is no way to totally snuff out all instances of the old module in Python, and thus we will regenerate the file more than necessary,
+	unless makepy/genpy is modified accordingly.
+	
+	
+	Returns the Python module.  No exceptions are caught during the generate process.
+
+	Params
+	typelibCLSID -- IID of the type library.
+	major -- Integer major version.
+	minor -- Integer minor version
+	lcid -- Integer LCID for the library.
+	progressInstance -- Instance to use as progress indicator, or None to
+	                    use the GUI progress bar.
+	bValidateFile -- Whether or not to perform cache validation or not
+	bForDemand -- Should a complete generation happen now, or on demand?
+	bBuildHidden -- Should hidden members/attributes etc be generated?
+	"""
+	bReloadNeeded = 0
+	try:
+		try:
+			module = GetModuleForTypelib(typelibCLSID, lcid, major, minor)
+		except ImportError:
+			# If we get an ImportError
+			# We may still find a valid cache file under a different MinorVersion #
+			# (which windows will search out for us)
+			#print "Loading reg typelib", typelibCLSID, major, minor, lcid
+			module = None
+			try:
+				tlbAttr = pythoncom.LoadRegTypeLib(typelibCLSID, major, minor, lcid).GetLibAttr()
+				# if the above line doesn't throw a pythoncom.com_error, check if
+				# it is actually a different lib than we requested, and if so, suck it in
+				if tlbAttr[1] != lcid or tlbAttr[4]!=minor:
+					#print "Trying 2nd minor #", tlbAttr[1], tlbAttr[3], tlbAttr[4]
+					try:
+						module = GetModuleForTypelib(typelibCLSID, tlbAttr[1], tlbAttr[3], tlbAttr[4])
+					except ImportError:
+						# We don't have a module, but we do have a better minor
+						# version - remember that.
+						minor = tlbAttr[4]
+				# else module remains None
+			except pythoncom.com_error:
+				# couldn't load any typelib - mod remains None
+				pass
+		if module is not None and bValidateFile:
+			assert not is_readonly, "Can't validate in a read-only gencache"
+			try:
+				typLibPath = pythoncom.QueryPathOfRegTypeLib(typelibCLSID, major, minor, lcid)
+				# windows seems to add an extra \0 (via the underlying BSTR)
+				# The mainwin toolkit does not add this erroneous \0
+				if typLibPath[-1]=='\0':
+					typLibPath=typLibPath[:-1]
+				suf = getattr(os.path, "supports_unicode_filenames", 0)
+				if not suf:
+					# can't pass unicode filenames directly - convert
+					try:
+						typLibPath=typLibPath.encode(sys.getfilesystemencoding())
+					except AttributeError: # no sys.getfilesystemencoding
+						typLibPath=str(typLibPath)
+				tlbAttributes = pythoncom.LoadRegTypeLib(typelibCLSID, major, minor, lcid).GetLibAttr()
+			except pythoncom.com_error:
+				# We have a module, but no type lib - we should still
+				# run with what we have though - the typelib may not be
+				# deployed here.
+				bValidateFile = 0
+		if module is not None and bValidateFile:
+			assert not is_readonly, "Can't validate in a read-only gencache"
+			filePathPrefix  = "%s\\%s" % (GetGeneratePath(), GetGeneratedFileName(typelibCLSID, lcid, major, minor))
+			filePath = filePathPrefix + ".py"
+			filePathPyc = filePathPrefix + ".py"
+			if __debug__:
+				filePathPyc = filePathPyc + "c"
+			else:
+				filePathPyc = filePathPyc + "o"
+			# Verify that type library is up to date.
+			# If we have a differing MinorVersion or genpy has bumped versions, update the file
+			import genpy
+			if module.MinorVersion != tlbAttributes[4] or genpy.makepy_version != module.makepy_version:
+				#print "Version skew: %d, %d" % (module.MinorVersion, tlbAttributes[4])
+				# try to erase the bad file from the cache
+				try:
+					os.unlink(filePath)
+				except os.error:
+					pass
+				try:
+					os.unlink(filePathPyc)
+				except os.error:
+					pass
+				if os.path.isdir(filePathPrefix):
+					import shutil
+					shutil.rmtree(filePathPrefix)
+				minor = tlbAttributes[4]
+				module = None
+				bReloadNeeded = 1
+			else:
+				minor = module.MinorVersion
+				filePathPrefix  = "%s\\%s" % (GetGeneratePath(), GetGeneratedFileName(typelibCLSID, lcid, major, minor))
+				filePath = filePathPrefix + ".py"
+				filePathPyc = filePathPrefix + ".pyc"
+				#print "Trying py stat: ", filePath
+				fModTimeSet = 0
+				try:
+					pyModTime = os.stat(filePath)[8]
+					fModTimeSet = 1
+				except os.error, e:
+					# If .py file fails, try .pyc file
+					#print "Trying pyc stat", filePathPyc
+					try:
+						pyModTime = os.stat(filePathPyc)[8]
+						fModTimeSet = 1
+					except os.error, e:
+						pass
+				#print "Trying stat typelib", pyModTime
+				#print str(typLibPath)
+				typLibModTime = os.stat(typLibPath)[8]
+				if fModTimeSet and (typLibModTime > pyModTime):
+					bReloadNeeded = 1
+					module = None
+	except (ImportError, os.error):	
+		module = None
+	if module is None:
+		# We need to build an item.  If we are in a read-only cache, we
+		# can't/don't want to do this - so before giving up, check for
+		# a different minor version in our cache - according to COM, this is OK
+		if is_readonly:
+			key = str(typelibCLSID), lcid, major, minor
+			# If we have been asked before, get last result.
+			try:
+				return versionRedirectMap[key]
+			except KeyError:
+				pass
+			# Find other candidates.
+			items = []
+			for desc in GetGeneratedInfos():
+				if key[0]==desc[0] and key[1]==desc[1] and key[2]==desc[2]:
+					items.append(desc)
+			if items:
+				# Items are all identical, except for last tuple element
+				# We want the latest minor version we have - so just sort and grab last
+				items.sort()
+				new_minor = items[-1][3]
+				ret = GetModuleForTypelib(typelibCLSID, lcid, major, new_minor)
+			else:
+				ret = None
+			# remember and return
+			versionRedirectMap[key] = ret
+			return ret
+		#print "Rebuilding: ", major, minor
+		module = MakeModuleForTypelib(typelibCLSID, lcid, major, minor, progressInstance, bForDemand = bForDemand, bBuildHidden = bBuildHidden)
+		# If we replaced something, reload it
+		if bReloadNeeded:
+			module = reload(module)
+			AddModuleToCache(typelibCLSID, lcid, major, minor)
+	return module
+
+def EnsureDispatch(prog_id, bForDemand = 1): # New fn, so we default the new demand feature to on!
+	"""Given a COM prog_id, return an object that is using makepy support, building if necessary"""
+	disp = win32com.client.Dispatch(prog_id)
+	if not disp.__dict__.get("CLSID"): # Eeek - no makepy support - try and build it.
+		try:
+			ti = disp._oleobj_.GetTypeInfo()
+			disp_clsid = ti.GetTypeAttr()[0]
+			tlb, index = ti.GetContainingTypeLib()
+			tla = tlb.GetLibAttr()
+			mod = EnsureModule(tla[0], tla[1], tla[3], tla[4], bForDemand=bForDemand)
+			GetModuleForCLSID(disp_clsid)
+			# Get the class from the module.
+			import CLSIDToClass
+			disp_class = CLSIDToClass.GetClass(str(disp_clsid))
+			disp = disp_class(disp._oleobj_)
+		except pythoncom.com_error:
+			raise TypeError, "This COM object can not automate the makepy process - please run makepy manually for this object"
+	return disp
+
+def AddModuleToCache(typelibclsid, lcid, major, minor, verbose = 1, bFlushNow = not is_readonly):
+	"""Add a newly generated file to the cache dictionary.
+	"""
+	fname = GetGeneratedFileName(typelibclsid, lcid, major, minor)
+	mod = _GetModule(fname)
+	# if mod._in_gencache_ is already true, then we are reloading this
+	# module - this doesn't mean anything special though!
+	mod._in_gencache_ = 1
+	dict = mod.CLSIDToClassMap
+	info = str(typelibclsid), lcid, major, minor
+	for clsid, cls in dict.items():
+		clsidToTypelib[clsid] = info
+
+	dict = mod.CLSIDToPackageMap
+	for clsid, name in dict.items():
+		clsidToTypelib[clsid] = info
+
+	dict = mod.VTablesToClassMap
+	for clsid, cls in dict.items():
+		clsidToTypelib[clsid] = info
+
+	dict = mod.VTablesToPackageMap
+	for clsid, cls in dict.items():
+		clsidToTypelib[clsid] = info
+
+	# If this lib was previously redirected, drop it
+	if versionRedirectMap.has_key(info):
+		del versionRedirectMap[info]
+	if bFlushNow:
+		_SaveDicts()
+
+def GetGeneratedInfos():
+	zip_pos = win32com.__gen_path__.find(".zip\\")
+	if zip_pos >= 0:
+		import zipfile, cStringIO
+		zip_file = win32com.__gen_path__[:zip_pos+4]
+		zip_path = win32com.__gen_path__[zip_pos+5:].replace("\\", "/")
+		zf = zipfile.ZipFile(zip_file)
+		infos = {}
+		for n in zf.namelist():
+			if not n.startswith(zip_path):
+				continue
+			base = n[len(zip_path)+1:].split("/")[0]
+			try:
+				iid, lcid, major, minor = base.split("x")
+				lcid = int(lcid)
+				major = int(major)
+				minor = int(minor)
+				iid = pywintypes.IID("{" + iid + "}")
+			except ValueError:
+				continue
+			except pywintypes.com_error:
+				# invalid IID
+				continue
+			infos[(iid, lcid, major, minor)] = 1
+		zf.close()
+		return infos.keys()
+	else:
+		# on the file system
+		files = glob.glob(win32com.__gen_path__+ "\\*")
+		ret = []
+		for file in files:
+			if not os.path.isdir(file) and not os.path.splitext(file)==".py":
+				continue
+			name = os.path.splitext(os.path.split(file)[1])[0]
+			try:
+				iid, lcid, major, minor = string.split(name, "x")
+				iid = pywintypes.IID("{" + iid + "}")
+				lcid = int(lcid)
+				major = int(major)
+				minor = int(minor)
+			except ValueError:
+				continue
+			except pywintypes.com_error:
+				# invalid IID
+				continue
+			ret.append((iid, lcid, major, minor))
+		return ret
+
+def _GetModule(fname):
+	"""Given the name of a module in the gen_py directory, import and return it.
+	"""
+	mod_name = "win32com.gen_py.%s" % fname
+	mod = __import__(mod_name)
+	return sys.modules[mod_name]
+
+def Rebuild(verbose = 1):
+	"""Rebuild the cache indexes from the file system.
+	"""
+	clsidToTypelib.clear()
+	infos = GetGeneratedInfos()
+	if verbose and len(infos): # Dont bother reporting this when directory is empty!
+		print "Rebuilding cache of generated files for COM support..."
+	for info in infos:
+		iid, lcid, major, minor = info
+		if verbose:
+			print "Checking", GetGeneratedFileName(*info)
+		try:
+			AddModuleToCache(iid, lcid, major, minor, verbose, 0)
+		except:
+			print "Could not add module %s - %s: %s" % (info, sys.exc_info()[0],sys.exc_info()[1])
+	if verbose and len(infos): # Dont bother reporting this when directory is empty!
+		print "Done."
+	_SaveDicts()
+
+def _Dump():
+	print "Cache is in directory", win32com.__gen_path__
+	# Build a unique dir
+	d = {}
+	for clsid, (typelibCLSID, lcid, major, minor) in clsidToTypelib.items():
+		d[typelibCLSID, lcid, major, minor] = None
+	for typelibCLSID, lcid, major, minor in d.keys():
+		mod = GetModuleForTypelib(typelibCLSID, lcid, major, minor)
+		print "%s - %s" % (mod.__doc__, typelibCLSID)
+
+# Boot up
+__init__()
+
+def usage():
+	usageString = """\
+	  Usage: gencache [-q] [-d] [-r]
+	  
+			 -q         - Quiet
+			 -d         - Dump the cache (typelibrary description and filename).
+			 -r         - Rebuild the cache dictionary from the existing .py files
+	"""
+	print usageString
+	sys.exit(1)
+
+if __name__=='__main__':
+	import getopt
+	try:
+		opts, args = getopt.getopt(sys.argv[1:], "qrd")
+	except getopt.error, message:
+		print message
+		usage()
+
+	# we only have options - complain about real args, or none at all!
+	if len(sys.argv)==1 or args:
+		print usage()
+		
+	verbose = 1
+	for opt, val in opts:
+		if opt=='-d': # Dump
+			_Dump()
+		if opt=='-r':
+			Rebuild(verbose)
+		if opt=='-q':
+			verbose = 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/genpy.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/genpy.py
new file mode 100644
index 0000000..d2b64a0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/genpy.py
@@ -0,0 +1,1025 @@
+"""genpy.py - The worker for makepy.  See makepy.py for more details
+
+This code was moved simply to speed Python in normal circumstances.  As the makepy.py
+is normally run from the command line, it reparses the code each time.  Now makepy
+is nothing more than the command line handler and public interface.
+
+The makepy command line etc handling is also getting large enough in its own right!
+"""
+
+# NOTE - now supports a "demand" mechanism - the top-level is a package, and
+# each class etc can be made individually.
+# This should eventually become the default.
+# Then the old non-package technique should be removed.
+# There should be no b/w compat issues, and will just help clean the code.
+# This will be done once the new "demand" mechanism gets a good workout.
+import os
+import sys
+import string
+import time
+import win32com
+
+import pythoncom
+import build
+
+error = "makepy.error"
+makepy_version = "0.4.95" # Written to generated file.
+
+GEN_FULL="full"
+GEN_DEMAND_BASE = "demand(base)"
+GEN_DEMAND_CHILD = "demand(child)"
+
+try:
+    TrueRepr = repr(True)
+    FalseRepr = repr(False)
+except NameError:
+    TrueRepr = "1"
+    FalseRepr = "0"
+
+# This map is used purely for the users benefit -it shows the
+# raw, underlying type of Alias/Enums, etc.  The COM implementation
+# does not use this map at runtime - all Alias/Enum have already
+# been translated.
+mapVTToTypeString = {
+    pythoncom.VT_I2: 'types.IntType',
+    pythoncom.VT_I4: 'types.IntType',
+    pythoncom.VT_R4: 'types.FloatType',
+    pythoncom.VT_R8: 'types.FloatType',
+    pythoncom.VT_BSTR: 'types.StringType',
+    pythoncom.VT_BOOL: 'types.IntType',
+    pythoncom.VT_VARIANT: 'types.TypeType',
+    pythoncom.VT_I1: 'types.IntType',
+    pythoncom.VT_UI1: 'types.IntType',
+    pythoncom.VT_UI2: 'types.IntType',
+    pythoncom.VT_UI4: 'types.IntType',
+    pythoncom.VT_I8: 'types.LongType',
+    pythoncom.VT_UI8: 'types.LongType',
+    pythoncom.VT_INT: 'types.IntType',
+    pythoncom.VT_DATE: 'pythoncom.PyTimeType',
+    pythoncom.VT_UINT: 'types.IntType',
+}
+
+# Given a propget function's arg desc, return the default parameters for all
+# params bar the first.  Eg, then Python does a:
+# object.Property = "foo"
+# Python can only pass the "foo" value.  If the property has
+# multiple args, and the rest have default values, this allows
+# Python to correctly pass those defaults.
+def MakeDefaultArgsForPropertyPut(argsDesc):
+    ret = []
+    for desc in argsDesc[1:]:
+        default = build.MakeDefaultArgRepr(desc)
+        if default is None:
+            break
+        ret.append(default)
+    return tuple(ret)
+                            
+
+def MakeMapLineEntry(dispid, wFlags, retType, argTypes, user, resultCLSID):
+    # Strip the default value
+    argTypes = tuple(map(lambda what: what[:2], argTypes))
+    return '(%s, %d, %s, %s, "%s", %s)' % \
+        (dispid, wFlags, retType[:2], argTypes, user, resultCLSID)
+
+def MakeEventMethodName(eventName):
+    if eventName[:2]=="On":
+        return eventName
+    else:
+        return "On"+eventName
+
+def WriteSinkEventMap(obj, stream):
+    print >> stream, '\t_dispid_to_func_ = {'
+    for name, entry in obj.propMapGet.items() + obj.propMapPut.items() + obj.mapFuncs.items():
+        fdesc = entry.desc
+        print >> stream, '\t\t%9d : "%s",' % (entry.desc[0], MakeEventMethodName(entry.names[0]))
+    print >> stream, '\t\t}'
+    
+
+# MI is used to join my writable helpers, and the OLE
+# classes.
+class WritableItem:
+    def __cmp__(self, other):
+        "Compare for sorting"   
+        ret = cmp(self.order, other.order)
+        if ret==0 and self.doc: ret = cmp(self.doc[0], other.doc[0])
+        return ret
+    def __repr__(self):
+        return "OleItem: doc=%s, order=%d" % (`self.doc`, self.order)
+
+
+class RecordItem(build.OleItem, WritableItem):
+  order = 9
+  typename = "RECORD"
+
+  def __init__(self, typeInfo, typeAttr, doc=None, bForUser=1):
+##    sys.stderr.write("Record %s: size %s\n" % (doc,typeAttr.cbSizeInstance))
+##    sys.stderr.write(" cVars = %s\n" % (typeAttr.cVars,))
+##    for i in range(typeAttr.cVars):
+##        vdesc = typeInfo.GetVarDesc(i)
+##        sys.stderr.write(" Var %d has value %s, type %d, desc=%s\n" % (i, vdesc.value, vdesc.varkind, vdesc.elemdescVar))
+##        sys.stderr.write(" Doc is %s\n" % (typeInfo.GetDocumentation(vdesc.memid),))
+
+    build.OleItem.__init__(self, doc)
+    self.clsid = typeAttr[0]
+
+  def WriteClass(self, generator):
+    pass
+
+# Given an enum, write all aliases for it.
+# (no longer necessary for new style code, but still used for old code.
+def WriteAliasesForItem(item, aliasItems, stream):
+  for alias in aliasItems.values():
+    if item.doc and alias.aliasDoc and (alias.aliasDoc[0]==item.doc[0]):
+      alias.WriteAliasItem(aliasItems, stream)
+      
+class AliasItem(build.OleItem, WritableItem):
+  order = 2
+  typename = "ALIAS"
+
+  def __init__(self, typeinfo, attr, doc=None, bForUser = 1):
+    build.OleItem.__init__(self, doc)
+
+    ai = attr[14]
+    self.attr = attr
+    if type(ai) == type(()) and \
+      type(ai[1])==type(0): # XXX - This is a hack - why tuples?  Need to resolve?
+      href = ai[1]
+      alinfo = typeinfo.GetRefTypeInfo(href)
+      self.aliasDoc = alinfo.GetDocumentation(-1)
+      self.aliasAttr = alinfo.GetTypeAttr()
+    else:
+      self.aliasDoc = None
+      self.aliasAttr = None
+
+  def WriteAliasItem(self, aliasDict, stream):
+    # we could have been written as part of an alias dependency
+    if self.bWritten:
+      return
+
+    if self.aliasDoc:
+      depName = self.aliasDoc[0]
+      if aliasDict.has_key(depName):
+        aliasDict[depName].WriteAliasItem(aliasDict, stream)
+      print >> stream, self.doc[0] + " = " + depName
+    else:
+      ai = self.attr[14]
+      if type(ai) == type(0):
+        try:
+          typeStr = mapVTToTypeString[ai]
+          print >> stream, "# %s=%s" % (self.doc[0], typeStr)
+        except KeyError:
+          print >> stream, self.doc[0] + " = None # Can't convert alias info " + str(ai)
+    print >> stream
+    self.bWritten = 1
+
+class EnumerationItem(build.OleItem, WritableItem):
+  order = 1
+  typename = "ENUMERATION"
+
+  def __init__(self, typeinfo, attr, doc=None, bForUser=1):
+    build.OleItem.__init__(self, doc)
+
+    self.clsid = attr[0]
+    self.mapVars = {}
+    typeFlags = attr[11]
+    self.hidden = typeFlags & pythoncom.TYPEFLAG_FHIDDEN or \
+                  typeFlags & pythoncom.TYPEFLAG_FRESTRICTED
+
+    for j in range(attr[7]):
+      vdesc = typeinfo.GetVarDesc(j)
+      name = typeinfo.GetNames(vdesc[0])[0]
+      self.mapVars[name] = build.MapEntry(vdesc)
+
+##  def WriteEnumerationHeaders(self, aliasItems, stream):
+##    enumName = self.doc[0]
+##    print >> stream "%s=constants # Compatibility with previous versions." % (enumName)
+##    WriteAliasesForItem(self, aliasItems)
+    
+  def WriteEnumerationItems(self, stream):
+    enumName = self.doc[0]
+    # Write in name alpha order
+    names = self.mapVars.keys()
+    names.sort()
+    for name in names:
+      entry = self.mapVars[name]
+      vdesc = entry.desc
+      if vdesc[4] == pythoncom.VAR_CONST:
+        val = vdesc[1]
+        if type(val)==type(0):
+          if val==0x80000000L: # special case
+            use = "0x80000000L" # 'L' for future warning
+          elif val > 0x80000000L or val < 0: # avoid a FutureWarning
+            use = long(val)
+          else:
+            use = hex(val)
+        else:
+          use = repr(str(val))
+        print >> stream, "\t%-30s=%-10s # from enum %s" % \
+                      (build.MakePublicAttributeName(name, True), use, enumName)
+
+class VTableItem(build.VTableItem, WritableItem):
+    order = 4
+
+    def WriteClass(self, generator):
+        self.WriteVTableMap(generator)
+        self.bWritten = 1
+
+    def WriteVTableMap(self, generator):
+        stream = generator.file
+        print >> stream, "%s_vtables_dispatch_ = %d" % (self.python_name, self.bIsDispatch)
+        print >> stream, "%s_vtables_ = [" % (self.python_name, ) 
+        for v in self.vtableFuncs:
+            names, dispid, desc = v
+            arg_desc = desc[2]
+
+            arg_reprs = []
+            # more hoops so we don't generate huge lines.
+            item_num = 0
+            print >> stream, "\t((",
+            for name in names:
+                print >> stream, repr(name), ",",
+                item_num = item_num + 1
+                if item_num % 5 == 0:
+                    print >> stream, "\n\t\t\t",
+            print >> stream, "), %d, (%r, %r, [" % (dispid, desc[0], desc[1]),
+            for arg in arg_desc:
+                item_num = item_num + 1
+                if item_num % 5 == 0:
+                    print >> stream, "\n\t\t\t",
+                defval = build.MakeDefaultArgRepr(arg)
+                if arg[3] is None:
+                    arg3_repr = None
+                else:
+                    arg3_repr = repr(arg[3])
+                print >> stream, repr((arg[0], arg[1], defval, arg3_repr)), ",",
+            print >> stream, "],",
+            for d in desc[3:]:
+                print >> stream, repr(d), ",", 
+            print >> stream, ")),"
+        print >> stream, "]"
+        print >> stream
+
+class DispatchItem(build.DispatchItem, WritableItem):
+    order = 3
+
+    def __init__(self, typeinfo, attr, doc=None):
+        build.DispatchItem.__init__(self, typeinfo, attr, doc)
+        self.type_attr = attr
+        self.coclass_clsid = None
+
+    def WriteClass(self, generator):
+      if not self.bIsDispatch and not self.type_attr.typekind == pythoncom.TKIND_DISPATCH:
+          return
+      # This is pretty screwey - now we have vtable support we
+      # should probably rethink this (ie, maybe write both sides for sinks, etc)
+      if self.bIsSink:
+          self.WriteEventSinkClassHeader(generator)
+          self.WriteCallbackClassBody(generator)
+      else:
+          self.WriteClassHeader(generator)
+          self.WriteClassBody(generator)
+      print >> generator.file
+      self.bWritten = 1
+
+    def WriteClassHeader(self, generator):
+        generator.checkWriteDispatchBaseClass()
+        doc = self.doc
+        stream = generator.file
+        print >> stream, 'class ' + self.python_name + '(DispatchBaseClass):'
+        if doc[1]: print >> stream, '\t' + build._safeQuotedString(doc[1])
+        try:
+            progId = pythoncom.ProgIDFromCLSID(self.clsid)
+            print >> stream, "\t# This class is creatable by the name '%s'" % (progId)
+        except pythoncom.com_error:
+            pass
+        print >> stream, "\tCLSID = " + repr(self.clsid)
+        if self.coclass_clsid is None:
+            print >> stream, "\tcoclass_clsid = None"
+        else:
+            print >> stream, "\tcoclass_clsid = " + repr(self.coclass_clsid)
+        print >> stream
+        self.bWritten = 1
+
+    def WriteEventSinkClassHeader(self, generator):
+        generator.checkWriteEventBaseClass()
+        doc = self.doc
+        stream = generator.file
+        print >> stream, 'class ' + self.python_name + ':'
+        if doc[1]: print >> stream, '\t' + build._safeQuotedString(doc[1])
+        try:
+            progId = pythoncom.ProgIDFromCLSID(self.clsid)
+            print >> stream, "\t# This class is creatable by the name '%s'" % (progId)
+        except pythoncom.com_error:
+            pass
+        print >> stream, '\tCLSID = CLSID_Sink = ' + repr(self.clsid)
+        if self.coclass_clsid is None:
+            print >> stream, "\tcoclass_clsid = None"
+        else:
+            print >> stream, "\tcoclass_clsid = " + repr(self.coclass_clsid)
+        print >> stream, '\t_public_methods_ = [] # For COM Server support'
+        WriteSinkEventMap(self, stream)
+        print >> stream
+        print >> stream, '\tdef __init__(self, oobj = None):'
+        print >> stream, "\t\tif oobj is None:"
+        print >> stream, "\t\t\tself._olecp = None"
+        print >> stream, "\t\telse:"
+        print >> stream, '\t\t\timport win32com.server.util'
+        print >> stream, '\t\t\tfrom win32com.server.policy import EventHandlerPolicy'
+        print >> stream, '\t\t\tcpc=oobj._oleobj_.QueryInterface(pythoncom.IID_IConnectionPointContainer)'
+        print >> stream, '\t\t\tcp=cpc.FindConnectionPoint(self.CLSID_Sink)'
+        print >> stream, '\t\t\tcookie=cp.Advise(win32com.server.util.wrap(self, usePolicy=EventHandlerPolicy))'
+        print >> stream, '\t\t\tself._olecp,self._olecp_cookie = cp,cookie'
+        print >> stream, '\tdef __del__(self):'
+        print >> stream, '\t\ttry:'
+        print >> stream, '\t\t\tself.close()'
+        print >> stream, '\t\texcept pythoncom.com_error:'
+        print >> stream, '\t\t\tpass'
+        print >> stream, '\tdef close(self):'
+        print >> stream, '\t\tif self._olecp is not None:'
+        print >> stream, '\t\t\tcp,cookie,self._olecp,self._olecp_cookie = self._olecp,self._olecp_cookie,None,None'
+        print >> stream, '\t\t\tcp.Unadvise(cookie)'
+        print >> stream, '\tdef _query_interface_(self, iid):'
+        print >> stream, '\t\timport win32com.server.util'
+        print >> stream, '\t\tif iid==self.CLSID_Sink: return win32com.server.util.wrap(self)'
+        print >> stream
+        self.bWritten = 1
+
+    def WriteCallbackClassBody(self, generator):
+        stream = generator.file
+        print >> stream, "\t# Event Handlers"
+        print >> stream, "\t# If you create handlers, they should have the following prototypes:"
+        for name, entry in self.propMapGet.items() + self.propMapPut.items() + self.mapFuncs.items():
+            fdesc = entry.desc
+            methName = MakeEventMethodName(entry.names[0])
+            print >> stream, '#\tdef ' + methName + '(self' + build.BuildCallList(fdesc, entry.names, "defaultNamedOptArg", "defaultNamedNotOptArg","defaultUnnamedArg", "pythoncom.Missing", is_comment = True) + '):'
+            if entry.doc and entry.doc[1]:
+                print >> stream, '#\t\t' + build._safeQuotedString(entry.doc[1])
+        print >> stream
+        self.bWritten = 1
+
+    def WriteClassBody(self, generator):
+        stream = generator.file
+        # Write in alpha order.
+        names = self.mapFuncs.keys()
+        names.sort()
+        specialItems = {"count":None, "item":None,"value":None,"_newenum":None} # If found, will end up with (entry, invoke_tupe)
+        itemCount = None
+        for name in names:
+            entry=self.mapFuncs[name]
+            # skip [restricted] methods, unless it is the
+            # enumerator (which, being part of the "system",
+            # we know about and can use)
+            dispid = entry.desc[0]
+            if entry.desc[9] & pythoncom.FUNCFLAG_FRESTRICTED and \
+                dispid != pythoncom.DISPID_NEWENUM:
+                continue
+            # If not accessible via IDispatch, then we can't use it here.
+            if entry.desc[3] != pythoncom.FUNC_DISPATCH:
+                continue
+            if dispid==pythoncom.DISPID_VALUE:
+                lkey = "value"
+            elif dispid==pythoncom.DISPID_NEWENUM:
+                specialItems["_newenum"] = (entry, entry.desc[4], None)
+                continue # Dont build this one now!
+            else:
+                lkey = string.lower(name)
+            if specialItems.has_key(lkey) and specialItems[lkey] is None: # remember if a special one.
+                specialItems[lkey] = (entry, entry.desc[4], None)
+            if generator.bBuildHidden or not entry.hidden:
+                if entry.GetResultName():
+                    print >> stream, '\t# Result is of type ' + entry.GetResultName()
+                if entry.wasProperty:
+                    print >> stream, '\t# The method %s is actually a property, but must be used as a method to correctly pass the arguments' % name
+                ret = self.MakeFuncMethod(entry,build.MakePublicAttributeName(name))
+                for line in ret:
+                    print >> stream, line
+        print >> stream, "\t_prop_map_get_ = {"
+        names = self.propMap.keys(); names.sort()
+        for key in names:
+            entry = self.propMap[key]
+            if generator.bBuildHidden or not entry.hidden:
+                resultName = entry.GetResultName()
+                if resultName:
+                    print >> stream, "\t\t# Property '%s' is an object of type '%s'" % (key, resultName)
+                lkey = string.lower(key)
+                details = entry.desc
+                resultDesc = details[2]
+                argDesc = ()
+                mapEntry = MakeMapLineEntry(details[0], pythoncom.DISPATCH_PROPERTYGET, resultDesc, argDesc, key, entry.GetResultCLSIDStr())
+            
+                if entry.desc[0]==pythoncom.DISPID_VALUE:
+                    lkey = "value"
+                elif entry.desc[0]==pythoncom.DISPID_NEWENUM:
+                    lkey = "_newenum"
+                else:
+                    lkey = string.lower(key)
+                if specialItems.has_key(lkey) and specialItems[lkey] is None: # remember if a special one.
+                    specialItems[lkey] = (entry, pythoncom.DISPATCH_PROPERTYGET, mapEntry)
+                    # All special methods, except _newenum, are written
+                    # "normally".  This is a mess!
+                    if entry.desc[0]==pythoncom.DISPID_NEWENUM:
+                        continue 
+
+                print >> stream, '\t\t"%s": %s,' % (build.MakePublicAttributeName(key), mapEntry)
+        names = self.propMapGet.keys(); names.sort()
+        for key in names:
+            entry = self.propMapGet[key]
+            if generator.bBuildHidden or not entry.hidden:
+                if entry.GetResultName():
+                    print >> stream, "\t\t# Method '%s' returns object of type '%s'" % (key, entry.GetResultName())
+                details = entry.desc
+                lkey = string.lower(key)
+                argDesc = details[2]
+                resultDesc = details[8]
+                mapEntry = MakeMapLineEntry(details[0], pythoncom.DISPATCH_PROPERTYGET, resultDesc, argDesc, key, entry.GetResultCLSIDStr())
+                if entry.desc[0]==pythoncom.DISPID_VALUE:
+                    lkey = "value"
+                elif entry.desc[0]==pythoncom.DISPID_NEWENUM:
+                    lkey = "_newenum"
+                else:
+                    lkey = string.lower(key)
+                if specialItems.has_key(lkey) and specialItems[lkey] is None: # remember if a special one.
+                    specialItems[lkey]=(entry, pythoncom.DISPATCH_PROPERTYGET, mapEntry)
+                    # All special methods, except _newenum, are written
+                    # "normally".  This is a mess!
+                    if entry.desc[0]==pythoncom.DISPID_NEWENUM:
+                        continue 
+                print >> stream, '\t\t"%s": %s,' % (build.MakePublicAttributeName(key), mapEntry)
+
+        print >> stream, "\t}"
+
+        print >> stream, "\t_prop_map_put_ = {"
+        # These are "Invoke" args
+        names = self.propMap.keys(); names.sort()
+        for key in names:
+            entry = self.propMap[key]
+            if generator.bBuildHidden or not entry.hidden:
+                lkey=string.lower(key)
+                details = entry.desc
+                # If default arg is None, write an empty tuple
+                defArgDesc = build.MakeDefaultArgRepr(details[2])
+                if defArgDesc is None:
+                    defArgDesc = ""
+                else:
+                    defArgDesc = defArgDesc + ","
+                print >> stream, '\t\t"%s" : ((%s, LCID, %d, 0),(%s)),' % (build.MakePublicAttributeName(key), details[0], pythoncom.DISPATCH_PROPERTYPUT, defArgDesc)
+
+        names = self.propMapPut.keys(); names.sort()
+        for key in names:
+            entry = self.propMapPut[key]
+            if generator.bBuildHidden or not entry.hidden:
+                details = entry.desc
+                defArgDesc = MakeDefaultArgsForPropertyPut(details[2])
+                print >> stream, '\t\t"%s": ((%s, LCID, %d, 0),%s),' % (build.MakePublicAttributeName(key), details[0], details[4], defArgDesc)
+        print >> stream, "\t}"
+        
+        if specialItems["value"]:
+            entry, invoketype, propArgs = specialItems["value"]
+            if propArgs is None:
+                typename = "method"
+                ret = self.MakeFuncMethod(entry,'__call__')
+            else:
+                typename = "property"
+                ret = [ "\tdef __call__(self):\n\t\treturn self._ApplyTypes_(*%s)" % propArgs]
+            print >> stream, "\t# Default %s for this class is '%s'" % (typename, entry.names[0])
+            for line in ret:
+                print >> stream, line
+            print >> stream, "\t# str(ob) and int(ob) will use __call__"
+            print >> stream, "\tdef __unicode__(self, *args):"
+            print >> stream, "\t\ttry:"
+            print >> stream, "\t\t\treturn unicode(self.__call__(*args))"
+            print >> stream, "\t\texcept pythoncom.com_error:"
+            print >> stream, "\t\t\treturn repr(self)"
+            print >> stream, "\tdef __str__(self, *args):"
+            print >> stream, "\t\treturn str(self.__unicode__(*args))"
+            print >> stream, "\tdef __int__(self, *args):"
+            print >> stream, "\t\treturn int(self.__call__(*args))"
+            
+
+        if specialItems["_newenum"]:
+            enumEntry, invoketype, propArgs = specialItems["_newenum"]
+            resultCLSID = enumEntry.GetResultCLSIDStr()
+            # If we dont have a good CLSID for the enum result, assume it is the same as the Item() method.
+            if resultCLSID == "None" and self.mapFuncs.has_key("Item"):
+                resultCLSID = self.mapFuncs["Item"].GetResultCLSIDStr()
+            # "Native" Python iterator support
+            print >> stream, '\tdef __iter__(self):'
+            print >> stream, '\t\t"Return a Python iterator for this object"'
+            print >> stream, '\t\tob = self._oleobj_.InvokeTypes(%d,LCID,%d,(13, 10),())' % (pythoncom.DISPID_NEWENUM, enumEntry.desc[4])
+            print >> stream, '\t\treturn win32com.client.util.Iterator(ob)'
+            # And 'old style' iterator support - magically used to simulate iterators
+            # before Python grew them
+            print >> stream, '\tdef _NewEnum(self):'
+            print >> stream, '\t\t"Create an enumerator from this object"'
+            print >> stream, '\t\treturn win32com.client.util.WrapEnum(self._oleobj_.InvokeTypes(%d,LCID,%d,(13, 10),()),%s)' % (pythoncom.DISPID_NEWENUM, enumEntry.desc[4], resultCLSID)
+            print >> stream, '\tdef __getitem__(self, index):'
+            print >> stream, '\t\t"Allow this class to be accessed as a collection"'
+            print >> stream, "\t\tif not self.__dict__.has_key('_enum_'):"
+            print >> stream, "\t\t\tself.__dict__['_enum_'] = self._NewEnum()"
+            print >> stream, "\t\treturn self._enum_.__getitem__(index)"
+        else: # Not an Enumerator, but may be an "Item/Count" based collection
+            if specialItems["item"]:
+                entry, invoketype, propArgs = specialItems["item"]
+                print >> stream, '\t#This class has Item property/method which may take args - allow indexed access'
+                print >> stream, '\tdef __getitem__(self, item):'
+                print >> stream, '\t\treturn self._get_good_object_(self._oleobj_.Invoke(*(%d, LCID, %d, 1, item)), "Item")' % (entry.desc[0], invoketype)
+        if specialItems["count"]:
+            entry, invoketype, propArgs = specialItems["count"]
+            if propArgs is None:
+                typename = "method"
+                ret = self.MakeFuncMethod(entry,'__len__')
+            else:
+                typename = "property"
+                ret = [ "\tdef __len__(self):\n\t\treturn self._ApplyTypes_(*%s)" % propArgs]
+            print >> stream, "\t#This class has Count() %s - allow len(ob) to provide this" % (typename)
+            for line in ret:
+                print >> stream, line
+            # Also include a __nonzero__
+            print >> stream, "\t#This class has a __len__ - this is needed so 'if object:' always returns TRUE."
+            print >> stream, "\tdef __nonzero__(self):"
+            print >> stream, "\t\treturn %s" % (TrueRepr,)
+
+class CoClassItem(build.OleItem, WritableItem):
+  order = 5
+  typename = "COCLASS"
+
+  def __init__(self, typeinfo, attr, doc=None, sources = [], interfaces = [], bForUser=1):
+    build.OleItem.__init__(self, doc)
+    self.clsid = attr[0]
+    self.sources = sources
+    self.interfaces = interfaces
+    self.bIsDispatch = 1 # Pretend it is so it is written to the class map.
+
+  def WriteClass(self, generator):
+    generator.checkWriteCoClassBaseClass()
+    doc = self.doc
+    stream = generator.file
+    if generator.generate_type == GEN_DEMAND_CHILD:
+      # Some special imports we must setup.
+      referenced_items = []
+      for ref, flag in self.sources:
+        referenced_items.append(ref)
+      for ref, flag in self.interfaces:
+        referenced_items.append(ref)
+      print >> stream, "import sys"
+      for ref in referenced_items:
+        print >> stream, "__import__('%s.%s')" % (generator.base_mod_name, ref.python_name)
+        print >> stream, "%s = sys.modules['%s.%s'].%s" % (ref.python_name, generator.base_mod_name, ref.python_name, ref.python_name)
+        # And pretend we have written it - the name is now available as if we had!
+        ref.bWritten = 1
+    try:
+      progId = pythoncom.ProgIDFromCLSID(self.clsid)
+      print >> stream, "# This CoClass is known by the name '%s'" % (progId)
+    except pythoncom.com_error:
+      pass
+    print >> stream, 'class %s(CoClassBaseClass): # A CoClass' % (self.python_name)
+    if doc and doc[1]: print >> stream, '\t# ' + doc[1]
+    print >> stream, '\tCLSID = %r' % (self.clsid,)
+    print >> stream, '\tcoclass_sources = ['
+    defItem = None
+    for item, flag in self.sources:
+      if flag & pythoncom.IMPLTYPEFLAG_FDEFAULT:
+        defItem = item
+      # If we have written a Python class, reference the name - 
+      # otherwise just the IID.
+      if item.bWritten: key = item.python_name
+      else: key = repr(str(item.clsid)) # really the iid.
+      print >> stream, '\t\t%s,' % (key)
+    print >> stream, '\t]'
+    if defItem:
+      if defItem.bWritten: defName = defItem.python_name
+      else: defName = repr(str(defItem.clsid)) # really the iid.
+      print >> stream, '\tdefault_source = %s' % (defName,)
+    print >> stream, '\tcoclass_interfaces = ['
+    defItem = None
+    for item, flag in self.interfaces:
+      if flag & pythoncom.IMPLTYPEFLAG_FDEFAULT: # and dual:
+        defItem = item
+      # If we have written a class, refeence its name, otherwise the IID
+      if item.bWritten: key = item.python_name
+      else: key = repr(str(item.clsid)) # really the iid.
+      print >> stream, '\t\t%s,' % (key,)
+    print >> stream, '\t]'
+    if defItem:
+      if defItem.bWritten: defName = defItem.python_name
+      else: defName = repr(str(defItem.clsid)) # really the iid.
+      print >> stream, '\tdefault_interface = %s' % (defName,)
+    self.bWritten = 1
+    print >> stream
+
+class GeneratorProgress:
+    def __init__(self):
+        pass
+    def Starting(self, tlb_desc):
+        """Called when the process starts.
+        """
+        self.tlb_desc = tlb_desc
+    def Finished(self):
+        """Called when the process is complete.
+        """
+    def SetDescription(self, desc, maxticks = None):
+        """We are entering a major step.  If maxticks, then this
+        is how many ticks we expect to make until finished
+        """
+    def Tick(self, desc = None):
+        """Minor progress step.  Can provide new description if necessary
+        """
+    def VerboseProgress(self, desc):
+        """Verbose/Debugging output.
+        """
+    def LogWarning(self, desc):
+        """If a warning is generated
+        """
+    def LogBeginGenerate(self, filename):
+        pass
+    def Close(self):
+        pass
+
+class Generator:
+  def __init__(self, typelib, sourceFilename, progressObject, bBuildHidden=1, bUnicodeToString=0):
+    self.bHaveWrittenDispatchBaseClass = 0
+    self.bHaveWrittenCoClassBaseClass = 0
+    self.bHaveWrittenEventBaseClass = 0
+    self.typelib = typelib
+    self.sourceFilename = sourceFilename
+    self.bBuildHidden = bBuildHidden
+    self.bUnicodeToString = bUnicodeToString
+    self.progress = progressObject
+    # These 2 are later additions and most of the code still 'print's...
+    self.file = None
+
+  def CollectOleItemInfosFromType(self):
+    ret = []
+    for i in xrange(self.typelib.GetTypeInfoCount()):
+      info = self.typelib.GetTypeInfo(i)
+      infotype = self.typelib.GetTypeInfoType(i)
+      doc = self.typelib.GetDocumentation(i)
+      attr = info.GetTypeAttr()
+      ret.append((info, infotype, doc, attr))
+    return ret
+
+  def _Build_CoClass(self, type_info_tuple):
+    info, infotype, doc, attr = type_info_tuple
+    # find the source and dispinterfaces for the coclass
+    child_infos = []
+    for j in range(attr[8]):
+      flags = info.GetImplTypeFlags(j)
+      try:
+        refType = info.GetRefTypeInfo(info.GetRefTypeOfImplType(j))
+      except pythoncom.com_error:
+        # Can't load a dependent typelib?
+        continue
+      refAttr = refType.GetTypeAttr()
+      child_infos.append( (info, refAttr.typekind, refType, refType.GetDocumentation(-1), refAttr, flags) )
+      
+    # Done generating children - now the CoClass itself.
+    newItem = CoClassItem(info, attr, doc)
+    return newItem, child_infos
+
+  def _Build_CoClassChildren(self, coclass, coclass_info, oleItems, vtableItems):
+    sources = {}
+    interfaces = {}
+    for info, info_type, refType, doc, refAttr, flags in coclass_info:
+#          sys.stderr.write("Attr typeflags for coclass referenced object %s=%d (%d), typekind=%d\n" % (name, refAttr.wTypeFlags, refAttr.wTypeFlags & pythoncom.TYPEFLAG_FDUAL,refAttr.typekind))
+        if refAttr.typekind == pythoncom.TKIND_DISPATCH or \
+           (refAttr.typekind == pythoncom.TKIND_INTERFACE and refAttr[11] & pythoncom.TYPEFLAG_FDISPATCHABLE):
+          clsid = refAttr[0]
+          if oleItems.has_key(clsid):
+            dispItem = oleItems[clsid]
+          else:
+            dispItem = DispatchItem(refType, refAttr, doc)
+            oleItems[dispItem.clsid] = dispItem
+          dispItem.coclass_clsid = coclass.clsid
+          if flags & pythoncom.IMPLTYPEFLAG_FSOURCE:
+            dispItem.bIsSink = 1
+            sources[dispItem.clsid] = (dispItem, flags)
+          else:
+            interfaces[dispItem.clsid] = (dispItem, flags)
+          # If dual interface, make do that too.
+          if not vtableItems.has_key(clsid) and refAttr[11] & pythoncom.TYPEFLAG_FDUAL:
+            refType = refType.GetRefTypeInfo(refType.GetRefTypeOfImplType(-1))
+            refAttr = refType.GetTypeAttr()
+            assert refAttr.typekind == pythoncom.TKIND_INTERFACE, "must be interface bynow!"
+            vtableItem = VTableItem(refType, refAttr, doc)
+            vtableItems[clsid] = vtableItem
+    coclass.sources = sources.values()
+    coclass.interfaces = interfaces.values()
+
+  def _Build_Interface(self, type_info_tuple):
+    info, infotype, doc, attr = type_info_tuple
+    oleItem = vtableItem = None
+    if infotype == pythoncom.TKIND_DISPATCH or \
+       (infotype == pythoncom.TKIND_INTERFACE and attr[11] & pythoncom.TYPEFLAG_FDISPATCHABLE):
+        oleItem = DispatchItem(info, attr, doc)
+        # If this DISPATCH interface dual, then build that too.
+        if (attr.wTypeFlags & pythoncom.TYPEFLAG_FDUAL):
+            # Get the vtable interface
+            refhtype = info.GetRefTypeOfImplType(-1)
+            info = info.GetRefTypeInfo(refhtype)
+            attr = info.GetTypeAttr()
+            infotype = pythoncom.TKIND_INTERFACE
+        else:
+            infotype = None
+    assert infotype in [None, pythoncom.TKIND_INTERFACE], "Must be a real interface at this point"
+    if infotype == pythoncom.TKIND_INTERFACE:
+        vtableItem = VTableItem(info, attr, doc)
+    return oleItem, vtableItem
+
+  def BuildOleItemsFromType(self):
+    assert self.bBuildHidden, "This code doesnt look at the hidden flag - I thought everyone set it true!?!?!"
+    oleItems = {}
+    enumItems = {}
+    recordItems = {}
+    vtableItems = {}
+    
+    for type_info_tuple in self.CollectOleItemInfosFromType():
+      info, infotype, doc, attr = type_info_tuple
+      clsid = attr[0]
+      if infotype == pythoncom.TKIND_ENUM or infotype == pythoncom.TKIND_MODULE:
+        newItem = EnumerationItem(info, attr, doc)
+        enumItems[newItem.doc[0]] = newItem
+      # We never hide interfaces (MSAccess, for example, nominates interfaces as
+      # hidden, assuming that you only ever use them via the CoClass)
+      elif infotype in [pythoncom.TKIND_DISPATCH, pythoncom.TKIND_INTERFACE]:
+        if not oleItems.has_key(clsid):
+          oleItem, vtableItem = self._Build_Interface(type_info_tuple)
+          oleItems[clsid] = oleItem # Even "None" goes in here.
+          if vtableItem is not None:
+              vtableItems[clsid] = vtableItem
+      elif infotype == pythoncom.TKIND_RECORD or infotype == pythoncom.TKIND_UNION:
+        newItem = RecordItem(info, attr, doc)
+        recordItems[newItem.clsid] = newItem
+      elif infotype == pythoncom.TKIND_ALIAS:
+        # We dont care about alias' - handled intrinsicly.
+        continue
+      elif infotype == pythoncom.TKIND_COCLASS:
+        newItem, child_infos = self._Build_CoClass(type_info_tuple)
+        self._Build_CoClassChildren(newItem, child_infos, oleItems, vtableItems)
+        oleItems[newItem.clsid] = newItem
+      else:
+        self.progress.LogWarning("Unknown TKIND found: %d" % infotype)
+  
+    return oleItems, enumItems, recordItems, vtableItems
+
+  def generate(self, file, is_for_demand = 0):
+    if is_for_demand:
+      self.generate_type = GEN_DEMAND_BASE
+    else:
+      self.generate_type = GEN_FULL
+    self.file = file
+    self.do_generate()
+    self.file = None
+    self.progress.Finished()
+
+  def do_gen_file_header(self):
+    la = self.typelib.GetLibAttr()
+    moduleDoc = self.typelib.GetDocumentation(-1)
+    docDesc = ""
+    if moduleDoc[1]:
+      docDesc = moduleDoc[1]
+
+    # Reset all the 'per file' state
+    self.bHaveWrittenDispatchBaseClass = 0
+    self.bHaveWrittenCoClassBaseClass = 0
+    self.bHaveWrittenEventBaseClass = 0
+
+    print >> self.file, '# -*- coding: mbcs -*-' # Is this always correct?
+    print >> self.file, '# Created by makepy.py version %s' % (makepy_version,)
+    print >> self.file, '# By python version %s' % \
+                        (sys.version.replace("\n", "-"),)
+    if self.sourceFilename:
+        print >> self.file, "# From type library '%s'" % (os.path.split(self.sourceFilename)[1],)
+    print >> self.file, '# On %s' % time.ctime(time.time())
+
+    print >> self.file, '"""' + docDesc + '"""'
+
+    print >> self.file, 'makepy_version =', `makepy_version`
+    try:
+        print >> self.file, 'python_version = 0x%x' % (sys.hexversion,)
+    except AttributeError:
+        print >> self.file, 'python_version = 0x0 # Presumably Python 1.5.2 - 0x0 is not a problem'
+    print >> self.file
+    print >> self.file, 'import win32com.client.CLSIDToClass, pythoncom'
+    print >> self.file, 'import win32com.client.util'
+    print >> self.file, 'from pywintypes import IID'
+    print >> self.file, 'from win32com.client import Dispatch'
+    print >> self.file
+    print >> self.file, '# The following 3 lines may need tweaking for the particular server'
+    print >> self.file, '# Candidates are pythoncom.Missing, .Empty and .ArgNotFound'
+    print >> self.file, 'defaultNamedOptArg=pythoncom.Empty'
+    print >> self.file, 'defaultNamedNotOptArg=pythoncom.Empty'
+    print >> self.file, 'defaultUnnamedArg=pythoncom.Empty'
+    print >> self.file
+    print >> self.file, 'CLSID = ' + repr(la[0])
+    print >> self.file, 'MajorVersion = ' + str(la[3])
+    print >> self.file, 'MinorVersion = ' + str(la[4])
+    print >> self.file, 'LibraryFlags = ' + str(la[5])
+    print >> self.file, 'LCID = ' + hex(la[1])
+    print >> self.file
+
+  def do_generate(self):
+    moduleDoc = self.typelib.GetDocumentation(-1)
+    stream = self.file
+    docDesc = ""
+    if moduleDoc[1]:
+      docDesc = moduleDoc[1]
+    self.progress.Starting(docDesc)
+    self.progress.SetDescription("Building definitions from type library...")
+
+    self.do_gen_file_header()
+
+    oleItems, enumItems, recordItems, vtableItems = self.BuildOleItemsFromType()
+
+    self.progress.SetDescription("Generating...", len(oleItems)+len(enumItems)+len(vtableItems))
+
+    # Generate the constants and their support.
+    if enumItems:
+        print >> stream, "class constants:"
+        list = enumItems.values()
+        list.sort()
+        for oleitem in list:
+            oleitem.WriteEnumerationItems(stream)
+            self.progress.Tick()
+        print >> stream
+
+    if self.generate_type == GEN_FULL:
+      list = oleItems.values()
+      list = filter(lambda l: l is not None, list)
+      list.sort()
+      for oleitem in list:
+        self.progress.Tick()
+        oleitem.WriteClass(self)
+
+      list = vtableItems.values()
+      list.sort()
+      for oleitem in list:
+        self.progress.Tick()
+        oleitem.WriteClass(self)
+    else:
+        self.progress.Tick(len(oleItems)+len(vtableItems))
+
+    print >> stream, 'RecordMap = {'
+    list = recordItems.values()
+    for record in list:
+        if str(record.clsid) == pythoncom.IID_NULL:
+            print >> stream, "\t###%s: %s, # Typedef disabled because it doesn't have a non-null GUID" % (`record.doc[0]`, `str(record.clsid)`)
+        else:
+            print >> stream, "\t%s: %s," % (`record.doc[0]`, `str(record.clsid)`)
+    print >> stream, "}"
+    print >> stream
+
+    # Write out _all_ my generated CLSID's in the map
+    if self.generate_type == GEN_FULL:
+      print >> stream, 'CLSIDToClassMap = {'
+      for item in oleItems.values():
+          if item is not None and item.bWritten:
+              print >> stream, "\t'%s' : %s," % (str(item.clsid), item.python_name)
+      print >> stream, '}'
+      print >> stream, 'CLSIDToPackageMap = {}'
+      print >> stream, 'win32com.client.CLSIDToClass.RegisterCLSIDsFromDict( CLSIDToClassMap )'
+      print >> stream, "VTablesToPackageMap = {}"
+      print >> stream, "VTablesToClassMap = {"
+      for item in vtableItems.values():
+        print >> stream, "\t'%s' : '%s'," % (item.clsid,item.python_name)
+      print >> stream, '}'
+      print >> stream
+
+    else:
+      print >> stream, 'CLSIDToClassMap = {}'
+      print >> stream, 'CLSIDToPackageMap = {'
+      for item in oleItems.values():
+        if item is not None:
+          print >> stream, "\t'%s' : %s," % (str(item.clsid), `item.python_name`)
+      print >> stream, '}'
+      print >> stream, "VTablesToClassMap = {}"
+      print >> stream, "VTablesToPackageMap = {"
+      for item in vtableItems.values():
+        print >> stream, "\t'%s' : '%s'," % (item.clsid,item.python_name)
+      print >> stream, '}'
+      print >> stream
+
+    print >> stream
+    # Bit of a hack - build a temp map of iteItems + vtableItems - coClasses
+    map = {}
+    for item in oleItems.values():
+        if item is not None and not isinstance(item, CoClassItem):
+            map[item.python_name] = item.clsid
+    for item in vtableItems.values(): # No nones or CoClasses in this map
+        map[item.python_name] = item.clsid
+            
+    print >> stream, "NamesToIIDMap = {"
+    for name, iid in map.items():
+        print >> stream, "\t'%s' : '%s'," % (name, iid)
+    print >> stream, '}'
+    print >> stream
+
+    if enumItems:
+      print >> stream, 'win32com.client.constants.__dicts__.append(constants.__dict__)'
+    print >> stream
+
+  def generate_child(self, child, dir):
+    "Generate a single child.  May force a few children to be built as we generate deps"
+    self.generate_type = GEN_DEMAND_CHILD
+
+    la = self.typelib.GetLibAttr()
+    lcid = la[1]
+    clsid = la[0]
+    major=la[3]
+    minor=la[4]
+    self.base_mod_name = "win32com.gen_py." + str(clsid)[1:-1] + "x%sx%sx%s" % (lcid, major, minor)
+    try:
+      # Process the type library's CoClass objects, looking for the
+      # specified name, or where a child has the specified name.
+      # This ensures that all interesting things (including event interfaces)
+      # are generated correctly.
+      oleItems = {}
+      vtableItems = {}
+      infos = self.CollectOleItemInfosFromType()
+      found = 0
+      for type_info_tuple in infos:
+        info, infotype, doc, attr = type_info_tuple
+        if infotype == pythoncom.TKIND_COCLASS:
+            coClassItem, child_infos = self._Build_CoClass(type_info_tuple)
+            found = build.MakePublicAttributeName(doc[0])==child
+            if not found:
+                # OK, check the child interfaces
+                for info, info_type, refType, doc, refAttr, flags in child_infos:
+                    if build.MakePublicAttributeName(doc[0]) == child:
+                        found = 1
+                        break
+            if found:
+                oleItems[coClassItem.clsid] = coClassItem
+                self._Build_CoClassChildren(coClassItem, child_infos, oleItems, vtableItems)
+                break
+      if not found:
+        # Doesn't appear in a class defn - look in the interface objects for it
+        for type_info_tuple in infos:
+          info, infotype, doc, attr = type_info_tuple
+          if infotype in [pythoncom.TKIND_INTERFACE, pythoncom.TKIND_DISPATCH]:
+            if build.MakePublicAttributeName(doc[0]) == child:
+              found = 1
+              oleItem, vtableItem = self._Build_Interface(type_info_tuple)
+              oleItems[clsid] = oleItem # Even "None" goes in here.
+              if vtableItem is not None:
+                vtableItems[clsid] = vtableItem
+                
+      assert found, "Cant find the '%s' interface in the CoClasses, or the interfaces" % (child,)
+      # Make a map of iid: dispitem, vtableitem)
+      items = {}
+      for key, value in oleItems.items():
+          items[key] = (value,None)
+      for key, value in vtableItems.items():
+          existing = items.get(key, None)
+          if existing is not None:
+              new_val = existing[0], value
+          else:
+              new_val = None, value
+          items[key] = new_val
+
+      self.progress.SetDescription("Generating...", len(items))
+      for oleitem, vtableitem in items.values():
+        an_item = oleitem or vtableitem
+        assert not self.file, "already have a file?"
+        self.file = open(os.path.join(dir, an_item.python_name) + ".py", "w")
+        try:
+          if oleitem is not None:
+            self.do_gen_child_item(oleitem)
+          if vtableitem is not None:
+            self.do_gen_child_item(vtableitem)
+          self.progress.Tick()
+        finally:
+          self.file.close()
+          self.file = None
+    finally:
+      self.progress.Finished()
+
+  def do_gen_child_item(self, oleitem):
+    moduleDoc = self.typelib.GetDocumentation(-1)
+    docDesc = ""
+    if moduleDoc[1]:
+      docDesc = moduleDoc[1]
+    self.progress.Starting(docDesc)
+    self.progress.SetDescription("Building definitions from type library...")
+    self.do_gen_file_header()
+    oleitem.WriteClass(self)
+    if oleitem.bWritten:
+        print >> self.file, 'win32com.client.CLSIDToClass.RegisterCLSID( "%s", %s )' % (oleitem.clsid, oleitem.python_name)
+
+  def checkWriteDispatchBaseClass(self):
+    if not self.bHaveWrittenDispatchBaseClass:
+      print >> self.file, "from win32com.client import DispatchBaseClass"
+      self.bHaveWrittenDispatchBaseClass = 1
+
+  def checkWriteCoClassBaseClass(self):
+    if not self.bHaveWrittenCoClassBaseClass:
+      print >> self.file, "from win32com.client import CoClassBaseClass"
+      self.bHaveWrittenCoClassBaseClass = 1
+
+  def checkWriteEventBaseClass(self):
+    # Not a base class as such...
+      if not self.bHaveWrittenEventBaseClass:
+        # Nothing to do any more!
+        self.bHaveWrittenEventBaseClass = 1
+
+if __name__=='__main__':
+  print "This is a worker module.  Please use makepy to generate Python files."
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/makepy.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/makepy.py
new file mode 100644
index 0000000..c6ba059
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/makepy.py
@@ -0,0 +1,349 @@
+# Originally written by Curt Hagenlocher, and various bits
+# and pieces by Mark Hammond (and now Greg Stein has had
+# a go too :-)
+
+# Note that the main worker code has been moved to genpy.py
+# As this is normally run from the command line, it reparses the code each time.  
+# Now this is nothing more than the command line handler and public interface.
+
+# XXX - TO DO
+# XXX - Greg and Mark have some ideas for a revamp - just no
+#       time - if you want to help, contact us for details.
+#       Main idea is to drop the classes exported and move to a more
+#       traditional data driven model.
+
+"""Generate a .py file from an OLE TypeLibrary file.
+
+
+ This module is concerned only with the actual writing of
+ a .py file.  It draws on the @build@ module, which builds 
+ the knowledge of a COM interface.
+ 
+"""
+usageHelp = """ \
+Usage:
+
+  makepy.py [-h] [-x0|1] [-u] [-o filename] [-d] [typelib, ...]
+  
+  typelib -- A TLB, DLL, OCX, Description, or possibly something else.
+  -h    -- Do not generate hidden methods.
+  -u    -- Python 1.5 and earlier: Do not convert all Unicode objects to strings.
+           Python 1.6 and later: Do convert all Unicode objects to strings.
+  -o outputFile -- Generate to named file - dont generate to standard directory.
+  -i [typelib] -- Show info for specified typelib, or select the typelib if not specified.
+  -v    -- Verbose output
+  -q    -- Quiet output
+  -d    -- Generate the base code now and classes code on demand
+  
+Examples:
+  makepy.py
+    Present a list of type libraries.
+    
+  makepy.py "Microsoft Excel 8.0 Object Library"
+    Generate support for the typelibrary with the specified description
+    (in this case, MS Excel object model)
+
+"""
+
+import genpy, string, sys, os, types, pythoncom
+import selecttlb
+import gencache
+from win32com.client import NeedUnicodeConversions
+
+bForDemandDefault = 0 # Default value of bForDemand - toggle this to change the world - see also gencache.py
+
+error = "makepy.error"
+
+def usage():
+	sys.stderr.write (usageHelp)
+	sys.exit(2)
+
+def ShowInfo(spec):
+	if not spec:
+		tlbSpec = selecttlb.SelectTlb(excludeFlags=selecttlb.FLAG_HIDDEN)
+		if tlbSpec is None:
+			return
+		try:
+			tlb = pythoncom.LoadRegTypeLib(tlbSpec.clsid, tlbSpec.major, tlbSpec.minor, tlbSpec.lcid)
+		except pythoncom.com_error: # May be badly registered.
+			sys.stderr.write("Warning - could not load registered typelib '%s'\n" % (tlbSpec.clsid))
+			tlb = None
+		
+		infos = [(tlb, tlbSpec)]
+	else:
+		infos = GetTypeLibsForSpec(spec)
+	for (tlb, tlbSpec) in infos:
+		desc = tlbSpec.desc
+		if desc is None:
+			if tlb is None:
+				desc = "<Could not load typelib %s>" % (tlbSpec.dll)
+			else:
+				desc = tlb.GetDocumentation(-1)[0]
+		print desc
+		print " %s, lcid=%s, major=%s, minor=%s" % (tlbSpec.clsid, tlbSpec.lcid, tlbSpec.major, tlbSpec.minor)
+		print " >>> # Use these commands in Python code to auto generate .py support"
+		print " >>> from win32com.client import gencache"
+		print " >>> gencache.EnsureModule('%s', %s, %s, %s)" % (tlbSpec.clsid, tlbSpec.lcid, tlbSpec.major, tlbSpec.minor)
+
+class SimpleProgress(genpy.GeneratorProgress):
+	"""A simple progress class prints its output to stderr
+	"""
+	def __init__(self, verboseLevel):
+		self.verboseLevel = verboseLevel
+	def Close(self):
+		pass
+	def Finished(self):
+		if self.verboseLevel>1:
+			sys.stderr.write("Generation complete..\n")
+	def SetDescription(self, desc, maxticks = None):
+		if self.verboseLevel:
+			sys.stderr.write(desc + "\n")
+	def Tick(self, desc = None):
+		pass
+
+	def VerboseProgress(self, desc, verboseLevel = 2):
+		if self.verboseLevel >= verboseLevel:
+			sys.stderr.write(desc + "\n")
+
+	def LogBeginGenerate(self, filename):
+		self.VerboseProgress("Generating to %s" % filename, 1)
+	
+	def LogWarning(self, desc):
+		self.VerboseProgress("WARNING: " + desc, 1)
+
+class GUIProgress(SimpleProgress):
+	def __init__(self, verboseLevel):
+		# Import some modules we need to we can trap failure now.
+		import win32ui, pywin
+		SimpleProgress.__init__(self, verboseLevel)
+		self.dialog = None
+		
+	def Close(self):
+		if self.dialog is not None:
+			self.dialog.Close()
+			self.dialog = None
+
+	def Starting(self, tlb_desc):
+		SimpleProgress.Starting(self, tlb_desc)
+		if self.dialog is None:
+			from pywin.dialogs import status
+			self.dialog=status.ThreadedStatusProgressDialog(tlb_desc)
+		else:
+			self.dialog.SetTitle(tlb_desc)
+		
+	def SetDescription(self, desc, maxticks = None):
+		self.dialog.SetText(desc)
+		if maxticks:
+			self.dialog.SetMaxTicks(maxticks)
+
+	def Tick(self, desc = None):
+		self.dialog.Tick()
+		if desc is not None:
+			self.dialog.SetText(desc)
+
+def GetTypeLibsForSpec(arg):
+	"""Given an argument on the command line (either a file name or a library description)
+	return a list of actual typelibs to use.
+	"""
+	typelibs = []
+	try:
+		try:
+			tlb = pythoncom.LoadTypeLib(arg)
+			spec = selecttlb.TypelibSpec(None, 0,0,0)
+			spec.FromTypelib(tlb, arg)
+			typelibs.append((tlb, spec))
+		except pythoncom.com_error:
+			# See if it is a description
+			tlbs = selecttlb.FindTlbsWithDescription(arg)
+			if len(tlbs)==0:
+				print "Could not locate a type library matching '%s'" % (arg)
+			for spec in tlbs:
+				# Version numbers not always reliable if enumerated from registry.
+				# (as some libs use hex, other's dont.  Both examples from MS, of course.)
+				if spec.dll is None:
+					tlb = pythoncom.LoadRegTypeLib(spec.clsid, spec.major, spec.minor, spec.lcid)
+				else:
+					tlb = pythoncom.LoadTypeLib(spec.dll)
+				
+				# We have a typelib, but it may not be exactly what we specified
+				# (due to automatic version matching of COM).  So we query what we really have!
+				attr = tlb.GetLibAttr()
+				spec.major = attr[3]
+				spec.minor = attr[4]
+				spec.lcid = attr[1]
+				typelibs.append((tlb, spec))
+		return typelibs
+	except pythoncom.com_error:
+		t,v,tb=sys.exc_info()
+		sys.stderr.write ("Unable to load type library from '%s' - %s\n" % (arg, v))
+		tb = None # Storing tb in a local is a cycle!
+		sys.exit(1)
+
+def GenerateFromTypeLibSpec(typelibInfo, file = None, verboseLevel = None, progressInstance = None, bUnicodeToString=NeedUnicodeConversions, bForDemand = bForDemandDefault, bBuildHidden = 1):
+	if verboseLevel is None:
+		verboseLevel = 0 # By default, we use no gui and no verbose level!
+
+	if bForDemand and file is not None:
+		raise RuntimeError, "You can only perform a demand-build when the output goes to the gen_py directory"
+	if type(typelibInfo)==type(()):
+		# Tuple
+		typelibCLSID, lcid, major, minor  = typelibInfo
+		tlb = pythoncom.LoadRegTypeLib(typelibCLSID, major, minor, lcid)
+		spec = selecttlb.TypelibSpec(typelibCLSID, lcid, major, minor)
+		spec.FromTypelib(tlb, str(typelibCLSID))
+		typelibs = [(tlb, spec)]
+	elif type(typelibInfo)==types.InstanceType:
+		if typelibInfo.dll is None:
+			# Version numbers not always reliable if enumerated from registry.
+			tlb = pythoncom.LoadRegTypeLib(typelibInfo.clsid, typelibInfo.major, typelibInfo.minor, typelibInfo.lcid)
+		else:
+			tlb = pythoncom.LoadTypeLib(typelibInfo.dll)
+		typelibs = [(tlb, typelibInfo)]
+	elif hasattr(typelibInfo, "GetLibAttr"):
+		# A real typelib object!
+		tla = typelibInfo.GetLibAttr()
+		guid = tla[0]
+		lcid = tla[1]
+		major = tla[3]
+		minor = tla[4]
+		spec = selecttlb.TypelibSpec(guid, lcid, major, minor)
+		typelibs = [(typelibInfo, spec)]
+	else:
+		typelibs = GetTypeLibsForSpec(typelibInfo)
+
+	if progressInstance is None:
+		progressInstance = SimpleProgress(verboseLevel)
+	progress = progressInstance
+
+	bToGenDir = (file is None)
+
+	for typelib, info in typelibs:
+		if file is None:
+			this_name = gencache.GetGeneratedFileName(info.clsid, info.lcid, info.major, info.minor)
+			full_name = os.path.join(gencache.GetGeneratePath(), this_name)
+			if bForDemand:
+				try: os.unlink(full_name + ".py")
+				except os.error: pass
+				try: os.unlink(full_name + ".pyc")
+				except os.error: pass
+				try: os.unlink(full_name + ".pyo")
+				except os.error: pass
+				if not os.path.isdir(full_name):
+					os.mkdir(full_name)
+				outputName = os.path.join(full_name, "__init__.py")
+			else:
+				outputName = full_name + ".py"
+			fileUse = open(outputName, "wt")
+			progress.LogBeginGenerate(outputName)
+		else:
+			fileUse = file
+
+		gen = genpy.Generator(typelib, info.dll, progress, bUnicodeToString=bUnicodeToString, bBuildHidden=bBuildHidden)
+
+		gen.generate(fileUse, bForDemand)
+		
+		if file is None:
+			fileUse.close()
+		
+		if bToGenDir:
+			progress.SetDescription("Importing module")
+			gencache.AddModuleToCache(info.clsid, info.lcid, info.major, info.minor)
+
+	progress.Close()
+
+def GenerateChildFromTypeLibSpec(child, typelibInfo, verboseLevel = None, progressInstance = None, bUnicodeToString=NeedUnicodeConversions):
+	if verboseLevel is None:
+		verboseLevel = 0 # By default, we use no gui, and no verbose level for the children.
+	if type(typelibInfo)==type(()):
+		typelibCLSID, lcid, major, minor  = typelibInfo
+		tlb = pythoncom.LoadRegTypeLib(typelibCLSID, major, minor, lcid)
+	else:
+		tlb = typelibInfo
+		tla = typelibInfo.GetLibAttr()
+		typelibCLSID = tla[0]
+		lcid = tla[1]
+		major = tla[3]
+		minor = tla[4]
+	spec = selecttlb.TypelibSpec(typelibCLSID, lcid, major, minor)
+	spec.FromTypelib(tlb, str(typelibCLSID))
+	typelibs = [(tlb, spec)]
+
+	if progressInstance is None:
+		progressInstance = SimpleProgress(verboseLevel)
+	progress = progressInstance
+
+	for typelib, info in typelibs:
+		dir_name = gencache.GetGeneratedFileName(info.clsid, info.lcid, info.major, info.minor)
+		dir_path_name = os.path.join(gencache.GetGeneratePath(), dir_name)
+		progress.LogBeginGenerate(dir_path_name)
+
+		gen = genpy.Generator(typelib, info.dll, progress, bUnicodeToString=bUnicodeToString)
+		gen.generate_child(child, dir_path_name)
+		progress.SetDescription("Importing module")
+		__import__("win32com.gen_py." + dir_name + "." + child)
+	progress.Close()
+
+def main():
+	import getopt
+	hiddenSpec = 1
+	bUnicodeToString = NeedUnicodeConversions
+	outputName = None
+	verboseLevel = 1
+	doit = 1
+	bForDemand = bForDemandDefault
+	try:
+		opts, args = getopt.getopt(sys.argv[1:], 'vo:huiqd')
+		for o,v in opts:
+			if o=='-h':
+				hiddenSpec = 0
+			elif o=='-u':
+				bUnicodeToString = not NeedUnicodeConversions
+			elif o=='-o':
+				outputName = v
+			elif o=='-v':
+				verboseLevel = verboseLevel + 1
+			elif o=='-q':
+				verboseLevel = verboseLevel - 1
+			elif o=='-i':
+				if len(args)==0:
+					ShowInfo(None)
+				else:
+					for arg in args:
+						ShowInfo(arg)
+				doit = 0
+			elif o=='-d':
+				bForDemand = not bForDemand
+
+	except (getopt.error, error), msg:
+		sys.stderr.write (str(msg) + "\n")
+		usage()
+
+	if bForDemand and outputName is not None:
+		sys.stderr.write("Can not use -d and -o together\n")
+		usage()
+
+	if not doit:
+		return 0		
+	if len(args)==0:
+		rc = selecttlb.SelectTlb()
+		if rc is None:
+			sys.exit(1)
+		args = [ rc ]
+
+	if outputName is not None:
+		f = open(outputName, "w")
+	else:
+		f = None
+
+	for arg in args:
+		GenerateFromTypeLibSpec(arg, f, verboseLevel = verboseLevel, bForDemand = bForDemand, bBuildHidden = hiddenSpec)
+
+	if f:	
+		f.close()
+
+
+if __name__=='__main__':
+	rc = main()
+	if rc:
+		sys.exit(rc)
+	sys.exit(0)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/selecttlb.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/selecttlb.py
new file mode 100644
index 0000000..1263a12
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/selecttlb.py
@@ -0,0 +1,150 @@
+"""Utilities for selecting and enumerating the Type Libraries installed on the system
+"""
+
+import win32api, win32con, string, pythoncom
+
+class TypelibSpec:
+	def __init__(self, clsid, lcid, major, minor, flags=0):
+		self.clsid = str(clsid)
+		self.lcid = int(lcid)
+		self.major = int(major)
+		self.minor = int(minor)
+		self.dll = None
+		self.desc = None
+		self.ver_desc = None
+		self.flags = flags
+	# For the SelectList
+	def __getitem__(self, item):
+		if item==0:
+			return self.ver_desc
+		raise IndexError, "Cant index me!"
+	def __cmp__(self, other):
+		rc = cmp(string.lower(self.ver_desc or ""), string.lower(other.ver_desc or ""))
+		if rc==0:
+			rc = cmp(string.lower(self.desc), string.lower(other.desc))
+		if rc==0:
+			rc = cmp(self.major, other.major)
+		if rc==0:
+			rc = cmp(self.major, other.minor)
+		return rc
+
+	def Resolve(self):
+		if self.dll is None:
+			return 0
+		tlb = pythoncom.LoadTypeLib(self.dll)
+		self.FromTypelib(tlb, None)
+		return 1
+
+	def FromTypelib(self, typelib, dllName = None):
+		la = typelib.GetLibAttr()
+		self.clsid = str(la[0])
+		self.lcid = la[1]
+		self.major = la[3]
+		self.minor = la[4]
+		if dllName:
+			self.dll = dllName
+
+def EnumKeys(root):
+	index = 0
+	ret = []
+	while 1:
+		try:
+			item = win32api.RegEnumKey(root, index)
+		except win32api.error:
+			break
+		try:
+			# Note this doesn't handle REG_EXPAND_SZ, but the implementation
+			# here doesn't need to - that is handled as the data is read.
+			val = win32api.RegQueryValue(root, item)
+		except win32api.error:
+			val = "" # code using this assumes a string.
+			
+		ret.append((item, val))
+		index = index + 1
+	return ret
+
+FLAG_RESTRICTED=1
+FLAG_CONTROL=2
+FLAG_HIDDEN=4
+
+def EnumTlbs(excludeFlags = 0):
+	"""Return a list of TypelibSpec objects, one for each registered library.
+	"""
+	key = win32api.RegOpenKey(win32con.HKEY_CLASSES_ROOT, "Typelib")
+	iids = EnumKeys(key)
+	results = []
+	for iid, crap in iids:
+		try:
+			key2 = win32api.RegOpenKey(key, str(iid))
+		except win32api.error:
+			# A few good reasons for this, including "access denied".
+			continue
+		for version, tlbdesc in EnumKeys(key2):
+			major_minor = string.split(version, '.', 1)
+			if len(major_minor) < 2:
+				major_minor.append('0')
+			try:
+				# For some reason, this code used to assume the values were hex.
+				# This seems to not be true - particularly for CDO 1.21
+				# *sigh* - it appears there are no rules here at all, so when we need
+				# to know the info, we must load the tlb by filename and request it.
+				# The Resolve() method on the TypelibSpec does this.
+				major = int(major_minor[0])
+				minor = int(major_minor[1])
+			except ValueError: # crap in the registry!
+				continue
+			
+			key3 = win32api.RegOpenKey(key2, str(version))
+			try:
+				# The "FLAGS" are at this point
+				flags = int(win32api.RegQueryValue(key3, "FLAGS"))
+			except (win32api.error, ValueError):
+				flags = 0
+			if flags & excludeFlags==0:
+				for lcid, crap in EnumKeys(key3):
+					try:
+						lcid = int(lcid)
+					except ValueError: # not an LCID entry
+						continue
+					# Only care about "{lcid}\win32" key - jump straight there.
+					try:
+						key4 = win32api.RegOpenKey(key3, "%s\\win32" % (lcid,))
+					except win32api.error:
+						continue
+					try:
+						dll, typ = win32api.RegQueryValueEx(key4, None)
+						if typ==win32con.REG_EXPAND_SZ:
+							dll = win32api.ExpandEnvironmentStrings(dll)
+					except win32api.error:
+						dll = None
+					spec = TypelibSpec(iid, lcid, major, minor, flags)
+					spec.dll = dll
+					spec.desc = tlbdesc
+					spec.ver_desc = tlbdesc + " (" + version + ")"
+					results.append(spec)
+	return results
+
+def FindTlbsWithDescription(desc):
+	"""Find all installed type libraries with the specified description
+	"""
+	ret = []
+	items = EnumTlbs()
+	for item in items:
+		if item.desc==desc:
+			ret.append(item)
+	return ret
+
+def SelectTlb(title="Select Library", excludeFlags = 0):
+	"""Display a list of all the type libraries, and select one.   Returns None if cancelled
+	"""
+	import pywin.dialogs.list
+	items = EnumTlbs(excludeFlags)
+	items.sort()
+	rc = pywin.dialogs.list.SelectFromLists(title, items, ["Type Library"])
+	if rc is None:
+		return None
+	return items[rc]
+
+# Test code.
+if __name__=='__main__':
+	print SelectTlb().__dict__
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/tlbrowse.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/tlbrowse.py
new file mode 100644
index 0000000..66448e8c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/tlbrowse.py
@@ -0,0 +1,242 @@
+import win32ui
+import win32con
+import win32api
+import string
+import commctrl
+import pythoncom
+from pywin.mfc import dialog
+
+error = "TypeLib browser internal error"
+
+FRAMEDLG_STD = win32con.WS_CAPTION | win32con.WS_SYSMENU
+SS_STD = win32con.WS_CHILD | win32con.WS_VISIBLE
+BS_STD = SS_STD  | win32con.WS_TABSTOP
+ES_STD = BS_STD | win32con.WS_BORDER
+LBS_STD = ES_STD | win32con.LBS_NOTIFY | win32con.LBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL
+CBS_STD = ES_STD | win32con.CBS_NOINTEGRALHEIGHT | win32con.WS_VSCROLL
+
+typekindmap = {
+	pythoncom.TKIND_ENUM : 'Enumeration',
+	pythoncom.TKIND_RECORD : 'Record',
+	pythoncom.TKIND_MODULE : 'Module',
+	pythoncom.TKIND_INTERFACE : 'Interface',
+	pythoncom.TKIND_DISPATCH : 'Dispatch',
+	pythoncom.TKIND_COCLASS : 'CoClass',
+	pythoncom.TKIND_ALIAS : 'Alias',
+	pythoncom.TKIND_UNION : 'Union'
+}
+
+TypeBrowseDialog_Parent=dialog.Dialog
+class TypeBrowseDialog(TypeBrowseDialog_Parent):
+	"Browse a type library"
+
+	IDC_TYPELIST = 1000
+	IDC_MEMBERLIST = 1001
+	IDC_PARAMLIST = 1002
+	IDC_LISTVIEW = 1003
+
+	def __init__(self, typefile = None):
+		TypeBrowseDialog_Parent.__init__(self, self.GetTemplate())
+		try:
+			if typefile:
+				self.tlb = pythoncom.LoadTypeLib(typefile)
+			else:
+				self.tlb = None
+		except pythoncom.ole_error:
+			self.MessageBox("The file does not contain type information")
+			self.tlb = None
+		self.HookCommand(self.CmdTypeListbox, self.IDC_TYPELIST)
+		self.HookCommand(self.CmdMemberListbox, self.IDC_MEMBERLIST)
+
+	def OnAttachedObjectDeath(self):
+		self.tlb = None
+		self.typeinfo = None
+		self.attr = None
+		return TypeBrowseDialog_Parent.OnAttachedObjectDeath(self)
+
+	def _SetupMenu(self):
+		menu = win32ui.CreateMenu()
+		flags=win32con.MF_STRING|win32con.MF_ENABLED
+		menu.AppendMenu(flags, win32ui.ID_FILE_OPEN, "&Open...")
+		menu.AppendMenu(flags, win32con.IDCANCEL, "&Close")
+		mainMenu = win32ui.CreateMenu()
+		mainMenu.AppendMenu(flags|win32con.MF_POPUP, menu.GetHandle(), "&File")
+		self.SetMenu(mainMenu)
+		self.HookCommand(self.OnFileOpen,win32ui.ID_FILE_OPEN)
+
+	def OnFileOpen(self, id, code):
+		openFlags = win32con.OFN_OVERWRITEPROMPT | win32con.OFN_FILEMUSTEXIST
+		fspec = "Type Libraries (*.tlb, *.olb)|*.tlb;*.olb|OCX Files (*.ocx)|*.ocx|DLL's (*.dll)|*.dll|All Files (*.*)|*.*||"
+		dlg = win32ui.CreateFileDialog(1, None, None, openFlags, fspec)
+		if dlg.DoModal() == win32con.IDOK:
+			try:
+				self.tlb = pythoncom.LoadTypeLib(dlg.GetPathName())
+			except pythoncom.ole_error:
+				self.MessageBox("The file does not contain type information")
+				self.tlb = None
+			self._SetupTLB()
+
+	def OnInitDialog(self):
+		self._SetupMenu()
+		self.typelb = self.GetDlgItem(self.IDC_TYPELIST)
+		self.memberlb = self.GetDlgItem(self.IDC_MEMBERLIST)
+		self.paramlb = self.GetDlgItem(self.IDC_PARAMLIST)
+		self.listview = self.GetDlgItem(self.IDC_LISTVIEW)
+		
+		# Setup the listview columns
+		itemDetails = (commctrl.LVCFMT_LEFT, 100, "Item", 0)
+		self.listview.InsertColumn(0, itemDetails)
+		itemDetails = (commctrl.LVCFMT_LEFT, 1024, "Details", 0)
+		self.listview.InsertColumn(1, itemDetails)
+
+		if self.tlb is None:
+			self.OnFileOpen(None,None)
+		else:
+			self._SetupTLB()
+		return TypeBrowseDialog_Parent.OnInitDialog(self)
+
+	def _SetupTLB(self):
+		self.typelb.ResetContent()
+		self.memberlb.ResetContent()
+		self.paramlb.ResetContent()
+		self.typeinfo = None
+		self.attr = None
+		if self.tlb is None: return
+		n = self.tlb.GetTypeInfoCount()
+		for i in range(n):
+			self.typelb.AddString(self.tlb.GetDocumentation(i)[0])
+
+	def _SetListviewTextItems(self, items):
+		self.listview.DeleteAllItems()
+		index = -1
+		for item in items:
+			index = self.listview.InsertItem(index+1,item[0])
+			data = item[1]
+			if data is None: data = ""
+			self.listview.SetItemText(index, 1, data)
+
+	def SetupAllInfoTypes(self):
+		infos = self._GetMainInfoTypes() + self._GetMethodInfoTypes()
+		self._SetListviewTextItems(infos)
+
+	def _GetMainInfoTypes(self):
+		pos = self.typelb.GetCurSel()
+		if pos<0: return []
+		docinfo = self.tlb.GetDocumentation(pos)
+		infos = [('GUID', str(self.attr[0]))]
+		infos.append(('Help File', docinfo[3]))
+		infos.append(('Help Context', str(docinfo[2])))
+		try:
+			infos.append(('Type Kind', typekindmap[self.tlb.GetTypeInfoType(pos)]))
+		except:
+			pass
+			
+		info = self.tlb.GetTypeInfo(pos)
+		attr = info.GetTypeAttr()
+		infos.append(('Attributes', str(attr)))
+			
+		for j in range(attr[8]):
+			flags = info.GetImplTypeFlags(j)
+			refInfo = info.GetRefTypeInfo(info.GetRefTypeOfImplType(j))
+			doc = refInfo.GetDocumentation(-1)
+			attr = refInfo.GetTypeAttr()
+			typeKind = attr[5]
+			typeFlags = attr[11]
+
+			desc = doc[0]
+			desc = desc + ", Flags=0x%x, typeKind=0x%x, typeFlags=0x%x" % (flags, typeKind, typeFlags)
+			if flags & pythoncom.IMPLTYPEFLAG_FSOURCE:
+				desc = desc + "(Source)"
+			infos.append( ('Implements', desc))
+
+		return infos
+
+	def _GetMethodInfoTypes(self):
+		pos = self.memberlb.GetCurSel()
+		if pos<0: return []
+
+		realPos, isMethod = self._GetRealMemberPos(pos)
+		ret = []
+		if isMethod:
+			funcDesc = self.typeinfo.GetFuncDesc(realPos)
+			id = funcDesc[0]
+			ret.append(("Func Desc", str(funcDesc)))
+		else:
+			id = self.typeinfo.GetVarDesc(realPos)[0]
+		
+		docinfo = self.typeinfo.GetDocumentation(id)
+		ret.append(('Help String', docinfo[1]))
+		ret.append(('Help Context', str(docinfo[2])))
+		return ret
+
+	def CmdTypeListbox(self, id, code):
+		if code == win32con.LBN_SELCHANGE:
+			pos = self.typelb.GetCurSel()
+			if pos >= 0:
+				self.memberlb.ResetContent()
+				self.typeinfo = self.tlb.GetTypeInfo(pos)
+				self.attr = self.typeinfo.GetTypeAttr()
+				for i in range(self.attr[7]):
+					id = self.typeinfo.GetVarDesc(i)[0]
+					self.memberlb.AddString(self.typeinfo.GetNames(id)[0])
+				for i in range(self.attr[6]):
+					id = self.typeinfo.GetFuncDesc(i)[0]
+					self.memberlb.AddString(self.typeinfo.GetNames(id)[0])
+				self.SetupAllInfoTypes()
+			return 1
+
+	def _GetRealMemberPos(self, pos):
+		pos = self.memberlb.GetCurSel()
+		if pos >= self.attr[7]:
+			return pos - self.attr[7], 1
+		elif pos >= 0:
+			return pos, 0
+		else:
+			raise error, "The position is not valid"
+			
+	def CmdMemberListbox(self, id, code):
+		if code == win32con.LBN_SELCHANGE:
+			self.paramlb.ResetContent()
+			pos = self.memberlb.GetCurSel()
+			realPos, isMethod = self._GetRealMemberPos(pos)
+			if isMethod:
+				id = self.typeinfo.GetFuncDesc(realPos)[0]
+				names = self.typeinfo.GetNames(id)
+				for i in range(len(names)):
+					if i > 0:
+						self.paramlb.AddString(names[i])
+			self.SetupAllInfoTypes()
+			return 1
+
+	def GetTemplate(self):
+		"Return the template used to create this dialog"
+
+		w = 272  # Dialog width
+		h = 192  # Dialog height
+		style = FRAMEDLG_STD | win32con.WS_VISIBLE | win32con.DS_SETFONT | win32con.WS_MINIMIZEBOX
+		template = [['Type Library Browser', (0, 0, w, h), style, None, (8, 'Helv')], ]
+		template.append([130, "&Type", -1, (10, 10, 62, 9), SS_STD | win32con.SS_LEFT])
+		template.append([131, None, self.IDC_TYPELIST, (10, 20, 80, 80), LBS_STD])
+		template.append([130, "&Members", -1, (100, 10, 62, 9), SS_STD | win32con.SS_LEFT])
+		template.append([131, None, self.IDC_MEMBERLIST, (100, 20, 80, 80), LBS_STD])
+		template.append([130, "&Parameters", -1, (190, 10, 62, 9), SS_STD | win32con.SS_LEFT])
+		template.append([131, None, self.IDC_PARAMLIST, (190, 20, 75, 80), LBS_STD])
+		
+		lvStyle = SS_STD | commctrl.LVS_REPORT | commctrl.LVS_AUTOARRANGE | commctrl.LVS_ALIGNLEFT | win32con.WS_BORDER | win32con.WS_TABSTOP
+		template.append(["SysListView32", "", self.IDC_LISTVIEW, (10, 110, 255, 65), lvStyle])
+
+		return template
+
+if __name__=='__main__':
+	import sys
+	fname = None
+	try:
+		fname = sys.argv[1]
+	except:
+		pass
+	dlg = TypeBrowseDialog(fname)
+	try:
+		win32api.GetConsoleTitle()
+		dlg.DoModal()
+	except:
+		dlg.CreateWindow(win32ui.GetMainFrame())
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/util.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/util.py
new file mode 100644
index 0000000..3e1b92a1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/client/util.py
@@ -0,0 +1,83 @@
+"""General client side utilities.
+
+This module contains utility functions, used primarily by advanced COM
+programmers, or other COM modules.
+"""
+import pythoncom
+from win32com.client import Dispatch, _get_good_object_
+
+PyIDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch]
+
+def WrapEnum(ob, resultCLSID = None):
+	"""Wrap an object in a VARIANT enumerator.  
+
+	All VT_DISPATCHs returned by the enumerator are converted to wrapper objects
+	(which may be either a class instance, or a dynamic.Dispatch type object).
+
+	"""
+	if type(ob) != pythoncom.TypeIIDs[pythoncom.IID_IEnumVARIANT]:
+		ob = ob.QueryInterface(pythoncom.IID_IEnumVARIANT)
+	return EnumVARIANT(ob, resultCLSID)
+
+class Enumerator:
+	"""A class that provides indexed access into an Enumerator
+
+	By wrapping a PyIEnum* object in this class, you can perform
+	natural looping and indexing into the Enumerator.
+
+	Looping is very efficient, but it should be noted that although random 
+	access is supported, the underlying object is still an enumerator, so 
+	this will force many reset-and-seek operations to find the requested index.
+
+	"""
+	def __init__(self, enum):
+		self._oleobj_ = enum # a PyIEnumVARIANT
+		self.index = -1
+	def __getitem__(self, index):
+		return self.__GetIndex(index)
+	def __call__(self, index):
+		return self.__GetIndex(index)
+	
+	def __GetIndex(self, index):
+		if type(index)!=type(0): raise TypeError, "Only integer indexes are supported for enumerators"
+		# NOTE
+		# In this context, self.index is users purely as a flag to say 
+		# "am I still in sequence".  The user may call Next() or Reset() if they
+		# so choose, in which case self.index will not be correct (although we
+		# still want to stay in sequence)
+		if index != self.index + 1:
+			# Index requested out of sequence.
+			self._oleobj_.Reset()
+			if index: self._oleobj_.Skip(index) # if asked for item 1, must skip 1, Python always zero based.
+		self.index = index
+		result = self._oleobj_.Next(1)
+		if len(result):
+			return self._make_retval_(result[0])
+		raise IndexError, "list index out of range"
+	def Next(self, count=1):
+		ret = self._oleobj_.Next(count)
+		realRets = []
+		for r in ret:
+			realRets.append(self._make_retval_(r))
+		return tuple(realRets) # Convert back to tuple.
+	def Reset(self):
+		return self._oleobj_.Reset()
+	def Clone(self):
+		return self.__class__( self._oleobj_.Clone(), self.resultCLSID)
+	def _make_retval_(self, result):
+		return result
+
+class EnumVARIANT(Enumerator):
+	def __init__(self, enum, resultCLSID = None):
+		self.resultCLSID = resultCLSID
+		Enumerator.__init__(self, enum)
+	def _make_retval_(self, result):
+		return _get_good_object_(result, resultCLSID = self.resultCLSID)
+
+class Iterator:
+	def __init__(self, enum):
+		self._iter_ = iter(enum.QueryInterface(pythoncom.IID_IEnumVARIANT))
+	def __iter__(self):
+		return self
+	def next(self):
+		return _get_good_object_(self._iter_.next())
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/decimal_23.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/decimal_23.py
new file mode 100644
index 0000000..c31411b2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/decimal_23.py
@@ -0,0 +1,3047 @@
+# <win32com>
+# This is a clone of Python 2.4's 'decimal' module.  It will only be used when
+# 'import decimal' fails - so is likely to be used in Python 2.3.
+# </win32com>
+# Copyright (c) 2004 Python Software Foundation.
+# All rights reserved.
+
+# Written by Eric Price <eprice at tjhsst.edu>
+#    and Facundo Batista <facundo at taniquetil.com.ar>
+#    and Raymond Hettinger <python at rcn.com>
+#    and Aahz <aahz at pobox.com>
+#    and Tim Peters
+
+# This module is currently Py2.3 compatible and should be kept that way
+# unless a major compelling advantage arises.  IOW, 2.3 compatibility is
+# strongly preferred, but not guaranteed.
+
+# Also, this module should be kept in sync with the latest updates of
+# the IBM specification as it evolves.  Those updates will be treated
+# as bug fixes (deviation from the spec is a compatibility, usability
+# bug) and will be backported.  At this point the spec is stabilizing
+# and the updates are becoming fewer, smaller, and less significant.
+
+"""
+This is a Py2.3 implementation of decimal floating point arithmetic based on
+the General Decimal Arithmetic Specification:
+
+    www2.hursley.ibm.com/decimal/decarith.html
+
+and IEEE standard 854-1987:
+
+    www.cs.berkeley.edu/~ejr/projects/754/private/drafts/854-1987/dir.html
+
+Decimal floating point has finite precision with arbitrarily large bounds.
+
+The purpose of the module is to support arithmetic using familiar
+"schoolhouse" rules and to avoid the some of tricky representation
+issues associated with binary floating point.  The package is especially
+useful for financial applications or for contexts where users have
+expectations that are at odds with binary floating point (for instance,
+in binary floating point, 1.00 % 0.1 gives 0.09999999999999995 instead
+of the expected Decimal("0.00") returned by decimal floating point).
+
+Here are some examples of using the decimal module:
+
+>>> from decimal import *
+>>> setcontext(ExtendedContext)
+>>> Decimal(0)
+Decimal("0")
+>>> Decimal("1")
+Decimal("1")
+>>> Decimal("-.0123")
+Decimal("-0.0123")
+>>> Decimal(123456)
+Decimal("123456")
+>>> Decimal("123.45e12345678901234567890")
+Decimal("1.2345E+12345678901234567892")
+>>> Decimal("1.33") + Decimal("1.27")
+Decimal("2.60")
+>>> Decimal("12.34") + Decimal("3.87") - Decimal("18.41")
+Decimal("-2.20")
+>>> dig = Decimal(1)
+>>> print dig / Decimal(3)
+0.333333333
+>>> getcontext().prec = 18
+>>> print dig / Decimal(3)
+0.333333333333333333
+>>> print dig.sqrt()
+1
+>>> print Decimal(3).sqrt()
+1.73205080756887729
+>>> print Decimal(3) ** 123
+4.85192780976896427E+58
+>>> inf = Decimal(1) / Decimal(0)
+>>> print inf
+Infinity
+>>> neginf = Decimal(-1) / Decimal(0)
+>>> print neginf
+-Infinity
+>>> print neginf + inf
+NaN
+>>> print neginf * inf
+-Infinity
+>>> print dig / 0
+Infinity
+>>> getcontext().traps[DivisionByZero] = 1
+>>> print dig / 0
+Traceback (most recent call last):
+  ...
+  ...
+  ...
+DivisionByZero: x / 0
+>>> c = Context()
+>>> c.traps[InvalidOperation] = 0
+>>> print c.flags[InvalidOperation]
+0
+>>> c.divide(Decimal(0), Decimal(0))
+Decimal("NaN")
+>>> c.traps[InvalidOperation] = 1
+>>> print c.flags[InvalidOperation]
+1
+>>> c.flags[InvalidOperation] = 0
+>>> print c.flags[InvalidOperation]
+0
+>>> print c.divide(Decimal(0), Decimal(0))
+Traceback (most recent call last):
+  ...
+  ...
+  ...
+InvalidOperation: 0 / 0
+>>> print c.flags[InvalidOperation]
+1
+>>> c.flags[InvalidOperation] = 0
+>>> c.traps[InvalidOperation] = 0
+>>> print c.divide(Decimal(0), Decimal(0))
+NaN
+>>> print c.flags[InvalidOperation]
+1
+>>>
+"""
+
+__all__ = [
+    # Two major classes
+    'Decimal', 'Context',
+
+    # Contexts
+    'DefaultContext', 'BasicContext', 'ExtendedContext',
+
+    # Exceptions
+    'DecimalException', 'Clamped', 'InvalidOperation', 'DivisionByZero',
+    'Inexact', 'Rounded', 'Subnormal', 'Overflow', 'Underflow',
+
+    # Constants for use in setting up contexts
+    'ROUND_DOWN', 'ROUND_HALF_UP', 'ROUND_HALF_EVEN', 'ROUND_CEILING',
+    'ROUND_FLOOR', 'ROUND_UP', 'ROUND_HALF_DOWN',
+
+    # Functions for manipulating contexts
+    'setcontext', 'getcontext'
+]
+
+import copy
+
+#Rounding
+ROUND_DOWN = 'ROUND_DOWN'
+ROUND_HALF_UP = 'ROUND_HALF_UP'
+ROUND_HALF_EVEN = 'ROUND_HALF_EVEN'
+ROUND_CEILING = 'ROUND_CEILING'
+ROUND_FLOOR = 'ROUND_FLOOR'
+ROUND_UP = 'ROUND_UP'
+ROUND_HALF_DOWN = 'ROUND_HALF_DOWN'
+
+#Rounding decision (not part of the public API)
+NEVER_ROUND = 'NEVER_ROUND'    # Round in division (non-divmod), sqrt ONLY
+ALWAYS_ROUND = 'ALWAYS_ROUND'  # Every operation rounds at end.
+
+#Errors
+
+class DecimalException(ArithmeticError):
+    """Base exception class.
+
+    Used exceptions derive from this.
+    If an exception derives from another exception besides this (such as
+    Underflow (Inexact, Rounded, Subnormal) that indicates that it is only
+    called if the others are present.  This isn't actually used for
+    anything, though.
+
+    handle  -- Called when context._raise_error is called and the
+               trap_enabler is set.  First argument is self, second is the
+               context.  More arguments can be given, those being after
+               the explanation in _raise_error (For example,
+               context._raise_error(NewError, '(-x)!', self._sign) would
+               call NewError().handle(context, self._sign).)
+
+    To define a new exception, it should be sufficient to have it derive
+    from DecimalException.
+    """
+    def handle(self, context, *args):
+        pass
+
+
+class Clamped(DecimalException):
+    """Exponent of a 0 changed to fit bounds.
+
+    This occurs and signals clamped if the exponent of a result has been
+    altered in order to fit the constraints of a specific concrete
+    representation. This may occur when the exponent of a zero result would
+    be outside the bounds of a representation, or  when a large normal
+    number would have an encoded exponent that cannot be represented. In
+    this latter case, the exponent is reduced to fit and the corresponding
+    number of zero digits are appended to the coefficient ("fold-down").
+    """
+
+
+class InvalidOperation(DecimalException):
+    """An invalid operation was performed.
+
+    Various bad things cause this:
+
+    Something creates a signaling NaN
+    -INF + INF
+     0 * (+-)INF
+     (+-)INF / (+-)INF
+    x % 0
+    (+-)INF % x
+    x._rescale( non-integer )
+    sqrt(-x) , x > 0
+    0 ** 0
+    x ** (non-integer)
+    x ** (+-)INF
+    An operand is invalid
+    """
+    def handle(self, context, *args):
+        if args:
+            if args[0] == 1: #sNaN, must drop 's' but keep diagnostics
+                return Decimal( (args[1]._sign, args[1]._int, 'n') )
+        return NaN
+
+class ConversionSyntax(InvalidOperation):
+    """Trying to convert badly formed string.
+
+    This occurs and signals invalid-operation if an string is being
+    converted to a number and it does not conform to the numeric string
+    syntax. The result is [0,qNaN].
+    """
+
+    def handle(self, context, *args):
+        return (0, (0,), 'n') #Passed to something which uses a tuple.
+
+class DivisionByZero(DecimalException, ZeroDivisionError):
+    """Division by 0.
+
+    This occurs and signals division-by-zero if division of a finite number
+    by zero was attempted (during a divide-integer or divide operation, or a
+    power operation with negative right-hand operand), and the dividend was
+    not zero.
+
+    The result of the operation is [sign,inf], where sign is the exclusive
+    or of the signs of the operands for divide, or is 1 for an odd power of
+    -0, for power.
+    """
+
+    def handle(self, context, sign, double = None, *args):
+        if double is not None:
+            return (Infsign[sign],)*2
+        return Infsign[sign]
+
+class DivisionImpossible(InvalidOperation):
+    """Cannot perform the division adequately.
+
+    This occurs and signals invalid-operation if the integer result of a
+    divide-integer or remainder operation had too many digits (would be
+    longer than precision). The result is [0,qNaN].
+    """
+
+    def handle(self, context, *args):
+        return (NaN, NaN)
+
+class DivisionUndefined(InvalidOperation, ZeroDivisionError):
+    """Undefined result of division.
+
+    This occurs and signals invalid-operation if division by zero was
+    attempted (during a divide-integer, divide, or remainder operation), and
+    the dividend is also zero. The result is [0,qNaN].
+    """
+
+    def handle(self, context, tup=None, *args):
+        if tup is not None:
+            return (NaN, NaN) #for 0 %0, 0 // 0
+        return NaN
+
+class Inexact(DecimalException):
+    """Had to round, losing information.
+
+    This occurs and signals inexact whenever the result of an operation is
+    not exact (that is, it needed to be rounded and any discarded digits
+    were non-zero), or if an overflow or underflow condition occurs. The
+    result in all cases is unchanged.
+
+    The inexact signal may be tested (or trapped) to determine if a given
+    operation (or sequence of operations) was inexact.
+    """
+    pass
+
+class InvalidContext(InvalidOperation):
+    """Invalid context.  Unknown rounding, for example.
+
+    This occurs and signals invalid-operation if an invalid context was
+    detected during an operation. This can occur if contexts are not checked
+    on creation and either the precision exceeds the capability of the
+    underlying concrete representation or an unknown or unsupported rounding
+    was specified. These aspects of the context need only be checked when
+    the values are required to be used. The result is [0,qNaN].
+    """
+
+    def handle(self, context, *args):
+        return NaN
+
+class Rounded(DecimalException):
+    """Number got rounded (not  necessarily changed during rounding).
+
+    This occurs and signals rounded whenever the result of an operation is
+    rounded (that is, some zero or non-zero digits were discarded from the
+    coefficient), or if an overflow or underflow condition occurs. The
+    result in all cases is unchanged.
+
+    The rounded signal may be tested (or trapped) to determine if a given
+    operation (or sequence of operations) caused a loss of precision.
+    """
+    pass
+
+class Subnormal(DecimalException):
+    """Exponent < Emin before rounding.
+
+    This occurs and signals subnormal whenever the result of a conversion or
+    operation is subnormal (that is, its adjusted exponent is less than
+    Emin, before any rounding). The result in all cases is unchanged.
+
+    The subnormal signal may be tested (or trapped) to determine if a given
+    or operation (or sequence of operations) yielded a subnormal result.
+    """
+    pass
+
+class Overflow(Inexact, Rounded):
+    """Numerical overflow.
+
+    This occurs and signals overflow if the adjusted exponent of a result
+    (from a conversion or from an operation that is not an attempt to divide
+    by zero), after rounding, would be greater than the largest value that
+    can be handled by the implementation (the value Emax).
+
+    The result depends on the rounding mode:
+
+    For round-half-up and round-half-even (and for round-half-down and
+    round-up, if implemented), the result of the operation is [sign,inf],
+    where sign is the sign of the intermediate result. For round-down, the
+    result is the largest finite number that can be represented in the
+    current precision, with the sign of the intermediate result. For
+    round-ceiling, the result is the same as for round-down if the sign of
+    the intermediate result is 1, or is [0,inf] otherwise. For round-floor,
+    the result is the same as for round-down if the sign of the intermediate
+    result is 0, or is [1,inf] otherwise. In all cases, Inexact and Rounded
+    will also be raised.
+   """
+
+    def handle(self, context, sign, *args):
+        if context.rounding in (ROUND_HALF_UP, ROUND_HALF_EVEN,
+                                     ROUND_HALF_DOWN, ROUND_UP):
+            return Infsign[sign]
+        if sign == 0:
+            if context.rounding == ROUND_CEILING:
+                return Infsign[sign]
+            return Decimal((sign, (9,)*context.prec,
+                            context.Emax-context.prec+1))
+        if sign == 1:
+            if context.rounding == ROUND_FLOOR:
+                return Infsign[sign]
+            return Decimal( (sign, (9,)*context.prec,
+                             context.Emax-context.prec+1))
+
+
+class Underflow(Inexact, Rounded, Subnormal):
+    """Numerical underflow with result rounded to 0.
+
+    This occurs and signals underflow if a result is inexact and the
+    adjusted exponent of the result would be smaller (more negative) than
+    the smallest value that can be handled by the implementation (the value
+    Emin). That is, the result is both inexact and subnormal.
+
+    The result after an underflow will be a subnormal number rounded, if
+    necessary, so that its exponent is not less than Etiny. This may result
+    in 0 with the sign of the intermediate result and an exponent of Etiny.
+
+    In all cases, Inexact, Rounded, and Subnormal will also be raised.
+    """
+
+# List of public traps and flags
+_signals = [Clamped, DivisionByZero, Inexact, Overflow, Rounded,
+           Underflow, InvalidOperation, Subnormal]
+
+# Map conditions (per the spec) to signals
+_condition_map = {ConversionSyntax:InvalidOperation,
+                  DivisionImpossible:InvalidOperation,
+                  DivisionUndefined:InvalidOperation,
+                  InvalidContext:InvalidOperation}
+
+##### Context Functions #######################################
+
+# The getcontext() and setcontext() function manage access to a thread-local
+# current context.  Py2.4 offers direct support for thread locals.  If that
+# is not available, use threading.currentThread() which is slower but will
+# work for older Pythons.  If threads are not part of the build, create a
+# mock threading object with threading.local() returning the module namespace.
+
+try:
+    import threading
+except ImportError:
+    # Python was compiled without threads; create a mock object instead
+    import sys
+    class MockThreading:
+        def local(self, sys=sys):
+            return sys.modules[__name__]
+    threading = MockThreading()
+    del sys, MockThreading
+
+try:
+    threading.local
+
+except AttributeError:
+
+    #To fix reloading, force it to create a new context
+    #Old contexts have different exceptions in their dicts, making problems.
+    if hasattr(threading.currentThread(), '__decimal_context__'):
+        del threading.currentThread().__decimal_context__
+
+    def setcontext(context):
+        """Set this thread's context to context."""
+        if context in (DefaultContext, BasicContext, ExtendedContext):
+            context = context.copy()
+            context.clear_flags()
+        threading.currentThread().__decimal_context__ = context
+
+    def getcontext():
+        """Returns this thread's context.
+
+        If this thread does not yet have a context, returns
+        a new context and sets this thread's context.
+        New contexts are copies of DefaultContext.
+        """
+        try:
+            return threading.currentThread().__decimal_context__
+        except AttributeError:
+            context = Context()
+            threading.currentThread().__decimal_context__ = context
+            return context
+
+else:
+
+    local = threading.local()
+    if hasattr(local, '__decimal_context__'):
+        del local.__decimal_context__
+
+    def getcontext(_local=local):
+        """Returns this thread's context.
+
+        If this thread does not yet have a context, returns
+        a new context and sets this thread's context.
+        New contexts are copies of DefaultContext.
+        """
+        try:
+            return _local.__decimal_context__
+        except AttributeError:
+            context = Context()
+            _local.__decimal_context__ = context
+            return context
+
+    def setcontext(context, _local=local):
+        """Set this thread's context to context."""
+        if context in (DefaultContext, BasicContext, ExtendedContext):
+            context = context.copy()
+            context.clear_flags()
+        _local.__decimal_context__ = context
+
+    del threading, local        # Don't contaminate the namespace
+
+
+##### Decimal class ###########################################
+
+class Decimal(object):
+    """Floating point class for decimal arithmetic."""
+
+    __slots__ = ('_exp','_int','_sign', '_is_special')
+    # Generally, the value of the Decimal instance is given by
+    #  (-1)**_sign * _int * 10**_exp
+    # Special values are signified by _is_special == True
+
+    # We're immutable, so use __new__ not __init__
+    def __new__(cls, value="0", context=None):
+        """Create a decimal point instance.
+
+        >>> Decimal('3.14')              # string input
+        Decimal("3.14")
+        >>> Decimal((0, (3, 1, 4), -2))  # tuple input (sign, digit_tuple, exponent)
+        Decimal("3.14")
+        >>> Decimal(314)                 # int or long
+        Decimal("314")
+        >>> Decimal(Decimal(314))        # another decimal instance
+        Decimal("314")
+        """
+
+        self = object.__new__(cls)
+        self._is_special = False
+
+        # From an internal working value
+        if isinstance(value, _WorkRep):
+            self._sign = value.sign
+            self._int = tuple(map(int, str(value.int)))
+            self._exp = int(value.exp)
+            return self
+
+        # From another decimal
+        if isinstance(value, Decimal):
+            self._exp  = value._exp
+            self._sign = value._sign
+            self._int  = value._int
+            self._is_special  = value._is_special
+            return self
+
+        # From an integer
+        if isinstance(value, (int,long)):
+            if value >= 0:
+                self._sign = 0
+            else:
+                self._sign = 1
+            self._exp = 0
+            self._int = tuple(map(int, str(abs(value))))
+            return self
+
+        # tuple/list conversion (possibly from as_tuple())
+        if isinstance(value, (list,tuple)):
+            if len(value) != 3:
+                raise ValueError, 'Invalid arguments'
+            if value[0] not in (0,1):
+                raise ValueError, 'Invalid sign'
+            for digit in value[1]:
+                if not isinstance(digit, (int,long)) or digit < 0:
+                    raise ValueError, "The second value in the tuple must be composed of non negative integer elements."
+
+            self._sign = value[0]
+            self._int  = tuple(value[1])
+            if value[2] in ('F','n','N'):
+                self._exp = value[2]
+                self._is_special = True
+            else:
+                self._exp  = int(value[2])
+            return self
+
+        if isinstance(value, float):
+            raise TypeError("Cannot convert float to Decimal.  " +
+                            "First convert the float to a string")
+
+        # Other argument types may require the context during interpretation
+        if context is None:
+            context = getcontext()
+
+        # From a string
+        # REs insist on real strings, so we can too.
+        if isinstance(value, basestring):
+            if _isinfinity(value):
+                self._exp = 'F'
+                self._int = (0,)
+                self._is_special = True
+                if _isinfinity(value) == 1:
+                    self._sign = 0
+                else:
+                    self._sign = 1
+                return self
+            if _isnan(value):
+                sig, sign, diag = _isnan(value)
+                self._is_special = True
+                if len(diag) > context.prec: #Diagnostic info too long
+                    self._sign, self._int, self._exp = \
+                                context._raise_error(ConversionSyntax)
+                    return self
+                if sig == 1:
+                    self._exp = 'n' #qNaN
+                else: #sig == 2
+                    self._exp = 'N' #sNaN
+                self._sign = sign
+                self._int = tuple(map(int, diag)) #Diagnostic info
+                return self
+            try:
+                self._sign, self._int, self._exp = _string2exact(value)
+            except ValueError:
+                self._is_special = True
+                self._sign, self._int, self._exp = context._raise_error(ConversionSyntax)
+            return self
+
+        raise TypeError("Cannot convert %r to Decimal" % value)
+
+    def _isnan(self):
+        """Returns whether the number is not actually one.
+
+        0 if a number
+        1 if NaN
+        2 if sNaN
+        """
+        if self._is_special:
+            exp = self._exp
+            if exp == 'n':
+                return 1
+            elif exp == 'N':
+                return 2
+        return 0
+
+    def _isinfinity(self):
+        """Returns whether the number is infinite
+
+        0 if finite or not a number
+        1 if +INF
+        -1 if -INF
+        """
+        if self._exp == 'F':
+            if self._sign:
+                return -1
+            return 1
+        return 0
+
+    def _check_nans(self, other = None, context=None):
+        """Returns whether the number is not actually one.
+
+        if self, other are sNaN, signal
+        if self, other are NaN return nan
+        return 0
+
+        Done before operations.
+        """
+
+        self_is_nan = self._isnan()
+        if other is None:
+            other_is_nan = False
+        else:
+            other_is_nan = other._isnan()
+
+        if self_is_nan or other_is_nan:
+            if context is None:
+                context = getcontext()
+
+            if self_is_nan == 2:
+                return context._raise_error(InvalidOperation, 'sNaN',
+                                        1, self)
+            if other_is_nan == 2:
+                return context._raise_error(InvalidOperation, 'sNaN',
+                                        1, other)
+            if self_is_nan:
+                return self
+
+            return other
+        return 0
+
+    def __nonzero__(self):
+        """Is the number non-zero?
+
+        0 if self == 0
+        1 if self != 0
+        """
+        if self._is_special:
+            return 1
+        return sum(self._int) != 0
+
+    def __cmp__(self, other, context=None):
+        other = _convert_other(other)
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return 1 # Comparison involving NaN's always reports self > other
+
+            # INF = INF
+            return cmp(self._isinfinity(), other._isinfinity())
+
+        if not self and not other:
+            return 0 #If both 0, sign comparison isn't certain.
+
+        #If different signs, neg one is less
+        if other._sign < self._sign:
+            return -1
+        if self._sign < other._sign:
+            return 1
+
+        self_adjusted = self.adjusted()
+        other_adjusted = other.adjusted()
+        if self_adjusted == other_adjusted and \
+           self._int + (0,)*(self._exp - other._exp) == \
+           other._int + (0,)*(other._exp - self._exp):
+            return 0 #equal, except in precision. ([0]*(-x) = [])
+        elif self_adjusted > other_adjusted and self._int[0] != 0:
+            return (-1)**self._sign
+        elif self_adjusted < other_adjusted and other._int[0] != 0:
+            return -((-1)**self._sign)
+
+        # Need to round, so make sure we have a valid context
+        if context is None:
+            context = getcontext()
+
+        context = context._shallow_copy()
+        rounding = context._set_rounding(ROUND_UP) #round away from 0
+
+        flags = context._ignore_all_flags()
+        res = self.__sub__(other, context=context)
+
+        context._regard_flags(*flags)
+
+        context.rounding = rounding
+
+        if not res:
+            return 0
+        elif res._sign:
+            return -1
+        return 1
+
+    def __eq__(self, other):
+        if not isinstance(other, (Decimal, int, long)):
+            return False
+        return self.__cmp__(other) == 0
+
+    def __ne__(self, other):
+        if not isinstance(other, (Decimal, int, long)):
+            return True
+        return self.__cmp__(other) != 0
+
+    def compare(self, other, context=None):
+        """Compares one to another.
+
+        -1 => a < b
+        0  => a = b
+        1  => a > b
+        NaN => one is NaN
+        Like __cmp__, but returns Decimal instances.
+        """
+        other = _convert_other(other)
+
+        #compare(NaN, NaN) = NaN
+        if (self._is_special or other and other._is_special):
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+
+        return Decimal(self.__cmp__(other, context))
+
+    def __hash__(self):
+        """x.__hash__() <==> hash(x)"""
+        # Decimal integers must hash the same as the ints
+        # Non-integer decimals are normalized and hashed as strings
+        # Normalization assures that hast(100E-1) == hash(10)
+        if self._is_special:
+            if self._isnan():
+                raise TypeError('Cannot hash a NaN value.')
+            return hash(str(self))
+        i = int(self)
+        if self == Decimal(i):
+            return hash(i)
+        assert self.__nonzero__()   # '-0' handled by integer case
+        return hash(str(self.normalize()))
+
+    def as_tuple(self):
+        """Represents the number as a triple tuple.
+
+        To show the internals exactly as they are.
+        """
+        return (self._sign, self._int, self._exp)
+
+    def __repr__(self):
+        """Represents the number as an instance of Decimal."""
+        # Invariant:  eval(repr(d)) == d
+        return 'Decimal("%s")' % str(self)
+
+    def __str__(self, eng = 0, context=None):
+        """Return string representation of the number in scientific notation.
+
+        Captures all of the information in the underlying representation.
+        """
+
+        if self._isnan():
+            minus = '-'*self._sign
+            if self._int == (0,):
+                info = ''
+            else:
+                info = ''.join(map(str, self._int))
+            if self._isnan() == 2:
+                return minus + 'sNaN' + info
+            return minus + 'NaN' + info
+        if self._isinfinity():
+            minus = '-'*self._sign
+            return minus + 'Infinity'
+
+        if context is None:
+            context = getcontext()
+
+        tmp = map(str, self._int)
+        numdigits = len(self._int)
+        leftdigits = self._exp + numdigits
+        if eng and not self: #self = 0eX wants 0[.0[0]]eY, not [[0]0]0eY
+            if self._exp < 0 and self._exp >= -6: #short, no need for e/E
+                s = '-'*self._sign + '0.' + '0'*(abs(self._exp))
+                return s
+            #exp is closest mult. of 3 >= self._exp
+            exp = ((self._exp - 1)// 3 + 1) * 3
+            if exp != self._exp:
+                s = '0.'+'0'*(exp - self._exp)
+            else:
+                s = '0'
+            if exp != 0:
+                if context.capitals:
+                    s += 'E'
+                else:
+                    s += 'e'
+                if exp > 0:
+                    s += '+' #0.0e+3, not 0.0e3
+                s += str(exp)
+            s = '-'*self._sign + s
+            return s
+        if eng:
+            dotplace = (leftdigits-1)%3+1
+            adjexp = leftdigits -1 - (leftdigits-1)%3
+        else:
+            adjexp = leftdigits-1
+            dotplace = 1
+        if self._exp == 0:
+            pass
+        elif self._exp < 0 and adjexp >= 0:
+            tmp.insert(leftdigits, '.')
+        elif self._exp < 0 and adjexp >= -6:
+            tmp[0:0] = ['0'] * int(-leftdigits)
+            tmp.insert(0, '0.')
+        else:
+            if numdigits > dotplace:
+                tmp.insert(dotplace, '.')
+            elif numdigits < dotplace:
+                tmp.extend(['0']*(dotplace-numdigits))
+            if adjexp:
+                if not context.capitals:
+                    tmp.append('e')
+                else:
+                    tmp.append('E')
+                    if adjexp > 0:
+                        tmp.append('+')
+                tmp.append(str(adjexp))
+        if eng:
+            while tmp[0:1] == ['0']:
+                tmp[0:1] = []
+            if len(tmp) == 0 or tmp[0] == '.' or tmp[0].lower() == 'e':
+                tmp[0:0] = ['0']
+        if self._sign:
+            tmp.insert(0, '-')
+
+        return ''.join(tmp)
+
+    def to_eng_string(self, context=None):
+        """Convert to engineering-type string.
+
+        Engineering notation has an exponent which is a multiple of 3, so there
+        are up to 3 digits left of the decimal place.
+
+        Same rules for when in exponential and when as a value as in __str__.
+        """
+        return self.__str__(eng=1, context=context)
+
+    def __neg__(self, context=None):
+        """Returns a copy with the sign switched.
+
+        Rounds, if it has reason.
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        if not self:
+            # -Decimal('0') is Decimal('0'), not Decimal('-0')
+            sign = 0
+        elif self._sign:
+            sign = 0
+        else:
+            sign = 1
+
+        if context is None:
+            context = getcontext()
+        if context._rounding_decision == ALWAYS_ROUND:
+            return Decimal((sign, self._int, self._exp))._fix(context)
+        return Decimal( (sign, self._int, self._exp))
+
+    def __pos__(self, context=None):
+        """Returns a copy, unless it is a sNaN.
+
+        Rounds the number (if more then precision digits)
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        sign = self._sign
+        if not self:
+            # + (-0) = 0
+            sign = 0
+
+        if context is None:
+            context = getcontext()
+
+        if context._rounding_decision == ALWAYS_ROUND:
+            ans = self._fix(context)
+        else:
+            ans = Decimal(self)
+        ans._sign = sign
+        return ans
+
+    def __abs__(self, round=1, context=None):
+        """Returns the absolute value of self.
+
+        If the second argument is 0, do not round.
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        if not round:
+            if context is None:
+                context = getcontext()
+            context = context._shallow_copy()
+            context._set_rounding_decision(NEVER_ROUND)
+
+        if self._sign:
+            ans = self.__neg__(context=context)
+        else:
+            ans = self.__pos__(context=context)
+
+        return ans
+
+    def __add__(self, other, context=None):
+        """Returns self + other.
+
+        -INF + INF (or the reverse) cause InvalidOperation errors.
+        """
+        other = _convert_other(other)
+
+        if context is None:
+            context = getcontext()
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+
+            if self._isinfinity():
+                #If both INF, same sign => same as both, opposite => error.
+                if self._sign != other._sign and other._isinfinity():
+                    return context._raise_error(InvalidOperation, '-INF + INF')
+                return Decimal(self)
+            if other._isinfinity():
+                return Decimal(other)  #Can't both be infinity here
+
+        shouldround = context._rounding_decision == ALWAYS_ROUND
+
+        exp = min(self._exp, other._exp)
+        negativezero = 0
+        if context.rounding == ROUND_FLOOR and self._sign != other._sign:
+            #If the answer is 0, the sign should be negative, in this case.
+            negativezero = 1
+
+        if not self and not other:
+            sign = min(self._sign, other._sign)
+            if negativezero:
+                sign = 1
+            return Decimal( (sign, (0,), exp))
+        if not self:
+            exp = max(exp, other._exp - context.prec-1)
+            ans = other._rescale(exp, watchexp=0, context=context)
+            if shouldround:
+                ans = ans._fix(context)
+            return ans
+        if not other:
+            exp = max(exp, self._exp - context.prec-1)
+            ans = self._rescale(exp, watchexp=0, context=context)
+            if shouldround:
+                ans = ans._fix(context)
+            return ans
+
+        op1 = _WorkRep(self)
+        op2 = _WorkRep(other)
+        op1, op2 = _normalize(op1, op2, shouldround, context.prec)
+
+        result = _WorkRep()
+        if op1.sign != op2.sign:
+            # Equal and opposite
+            if op1.int == op2.int:
+                if exp < context.Etiny():
+                    exp = context.Etiny()
+                    context._raise_error(Clamped)
+                return Decimal((negativezero, (0,), exp))
+            if op1.int < op2.int:
+                op1, op2 = op2, op1
+                #OK, now abs(op1) > abs(op2)
+            if op1.sign == 1:
+                result.sign = 1
+                op1.sign, op2.sign = op2.sign, op1.sign
+            else:
+                result.sign = 0
+                #So we know the sign, and op1 > 0.
+        elif op1.sign == 1:
+            result.sign = 1
+            op1.sign, op2.sign = (0, 0)
+        else:
+            result.sign = 0
+        #Now, op1 > abs(op2) > 0
+
+        if op2.sign == 0:
+            result.int = op1.int + op2.int
+        else:
+            result.int = op1.int - op2.int
+
+        result.exp = op1.exp
+        ans = Decimal(result)
+        if shouldround:
+            ans = ans._fix(context)
+        return ans
+
+    __radd__ = __add__
+
+    def __sub__(self, other, context=None):
+        """Return self + (-other)"""
+        other = _convert_other(other)
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context=context)
+            if ans:
+                return ans
+
+        # -Decimal(0) = Decimal(0), which we don't want since
+        # (-0 - 0 = -0 + (-0) = -0, but -0 + 0 = 0.)
+        # so we change the sign directly to a copy
+        tmp = Decimal(other)
+        tmp._sign = 1-tmp._sign
+
+        return self.__add__(tmp, context=context)
+
+    def __rsub__(self, other, context=None):
+        """Return other + (-self)"""
+        other = _convert_other(other)
+
+        tmp = Decimal(self)
+        tmp._sign = 1 - tmp._sign
+        return other.__add__(tmp, context=context)
+
+    def _increment(self, round=1, context=None):
+        """Special case of add, adding 1eExponent
+
+        Since it is common, (rounding, for example) this adds
+        (sign)*one E self._exp to the number more efficiently than add.
+
+        For example:
+        Decimal('5.624e10')._increment() == Decimal('5.625e10')
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+            return Decimal(self) # Must be infinite, and incrementing makes no difference
+
+        L = list(self._int)
+        L[-1] += 1
+        spot = len(L)-1
+        while L[spot] == 10:
+            L[spot] = 0
+            if spot == 0:
+                L[0:0] = [1]
+                break
+            L[spot-1] += 1
+            spot -= 1
+        ans = Decimal((self._sign, L, self._exp))
+
+        if context is None:
+            context = getcontext()
+        if round and context._rounding_decision == ALWAYS_ROUND:
+            ans = ans._fix(context)
+        return ans
+
+    def __mul__(self, other, context=None):
+        """Return self * other.
+
+        (+-) INF * 0 (or its reverse) raise InvalidOperation.
+        """
+        other = _convert_other(other)
+
+        if context is None:
+            context = getcontext()
+
+        resultsign = self._sign ^ other._sign
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+
+            if self._isinfinity():
+                if not other:
+                    return context._raise_error(InvalidOperation, '(+-)INF * 0')
+                return Infsign[resultsign]
+
+            if other._isinfinity():
+                if not self:
+                    return context._raise_error(InvalidOperation, '0 * (+-)INF')
+                return Infsign[resultsign]
+
+        resultexp = self._exp + other._exp
+        shouldround = context._rounding_decision == ALWAYS_ROUND
+
+        # Special case for multiplying by zero
+        if not self or not other:
+            ans = Decimal((resultsign, (0,), resultexp))
+            if shouldround:
+                #Fixing in case the exponent is out of bounds
+                ans = ans._fix(context)
+            return ans
+
+        # Special case for multiplying by power of 10
+        if self._int == (1,):
+            ans = Decimal((resultsign, other._int, resultexp))
+            if shouldround:
+                ans = ans._fix(context)
+            return ans
+        if other._int == (1,):
+            ans = Decimal((resultsign, self._int, resultexp))
+            if shouldround:
+                ans = ans._fix(context)
+            return ans
+
+        op1 = _WorkRep(self)
+        op2 = _WorkRep(other)
+
+        ans = Decimal( (resultsign, map(int, str(op1.int * op2.int)), resultexp))
+        if shouldround:
+            ans = ans._fix(context)
+
+        return ans
+    __rmul__ = __mul__
+
+    def __div__(self, other, context=None):
+        """Return self / other."""
+        return self._divide(other, context=context)
+    __truediv__ = __div__
+
+    def _divide(self, other, divmod = 0, context=None):
+        """Return a / b, to context.prec precision.
+
+        divmod:
+        0 => true division
+        1 => (a //b, a%b)
+        2 => a //b
+        3 => a%b
+
+        Actually, if divmod is 2 or 3 a tuple is returned, but errors for
+        computing the other value are not raised.
+        """
+        other = _convert_other(other)
+
+        if context is None:
+            context = getcontext()
+
+        sign = self._sign ^ other._sign
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                if divmod:
+                    return (ans, ans)
+                return ans
+
+            if self._isinfinity() and other._isinfinity():
+                if divmod:
+                    return (context._raise_error(InvalidOperation,
+                                            '(+-)INF // (+-)INF'),
+                            context._raise_error(InvalidOperation,
+                                            '(+-)INF % (+-)INF'))
+                return context._raise_error(InvalidOperation, '(+-)INF/(+-)INF')
+
+            if self._isinfinity():
+                if divmod == 1:
+                    return (Infsign[sign],
+                            context._raise_error(InvalidOperation, 'INF % x'))
+                elif divmod == 2:
+                    return (Infsign[sign], NaN)
+                elif divmod == 3:
+                    return (Infsign[sign],
+                            context._raise_error(InvalidOperation, 'INF % x'))
+                return Infsign[sign]
+
+            if other._isinfinity():
+                if divmod:
+                    return (Decimal((sign, (0,), 0)), Decimal(self))
+                context._raise_error(Clamped, 'Division by infinity')
+                return Decimal((sign, (0,), context.Etiny()))
+
+        # Special cases for zeroes
+        if not self and not other:
+            if divmod:
+                return context._raise_error(DivisionUndefined, '0 / 0', 1)
+            return context._raise_error(DivisionUndefined, '0 / 0')
+
+        if not self:
+            if divmod:
+                otherside = Decimal(self)
+                otherside._exp = min(self._exp, other._exp)
+                return (Decimal((sign, (0,), 0)),  otherside)
+            exp = self._exp - other._exp
+            if exp < context.Etiny():
+                exp = context.Etiny()
+                context._raise_error(Clamped, '0e-x / y')
+            if exp > context.Emax:
+                exp = context.Emax
+                context._raise_error(Clamped, '0e+x / y')
+            return Decimal( (sign, (0,), exp) )
+
+        if not other:
+            if divmod:
+                return context._raise_error(DivisionByZero, 'divmod(x,0)',
+                                           sign, 1)
+            return context._raise_error(DivisionByZero, 'x / 0', sign)
+
+        #OK, so neither = 0, INF or NaN
+
+        shouldround = context._rounding_decision == ALWAYS_ROUND
+
+        #If we're dividing into ints, and self < other, stop.
+        #self.__abs__(0) does not round.
+        if divmod and (self.__abs__(0, context) < other.__abs__(0, context)):
+
+            if divmod == 1 or divmod == 3:
+                exp = min(self._exp, other._exp)
+                ans2 = self._rescale(exp, context=context, watchexp=0)
+                if shouldround:
+                    ans2 = ans2._fix(context)
+                return (Decimal( (sign, (0,), 0) ),
+                        ans2)
+
+            elif divmod == 2:
+                #Don't round the mod part, if we don't need it.
+                return (Decimal( (sign, (0,), 0) ), Decimal(self))
+
+        op1 = _WorkRep(self)
+        op2 = _WorkRep(other)
+        op1, op2, adjust = _adjust_coefficients(op1, op2)
+        res = _WorkRep( (sign, 0, (op1.exp - op2.exp)) )
+        if divmod and res.exp > context.prec + 1:
+            return context._raise_error(DivisionImpossible)
+
+        prec_limit = 10 ** context.prec
+        while 1:
+            while op2.int <= op1.int:
+                res.int += 1
+                op1.int -= op2.int
+            if res.exp == 0 and divmod:
+                if res.int >= prec_limit and shouldround:
+                    return context._raise_error(DivisionImpossible)
+                otherside = Decimal(op1)
+                frozen = context._ignore_all_flags()
+
+                exp = min(self._exp, other._exp)
+                otherside = otherside._rescale(exp, context=context, watchexp=0)
+                context._regard_flags(*frozen)
+                if shouldround:
+                    otherside = otherside._fix(context)
+                return (Decimal(res), otherside)
+
+            if op1.int == 0 and adjust >= 0 and not divmod:
+                break
+            if res.int >= prec_limit and shouldround:
+                if divmod:
+                    return context._raise_error(DivisionImpossible)
+                shouldround=1
+                # Really, the answer is a bit higher, so adding a one to
+                # the end will make sure the rounding is right.
+                if op1.int != 0:
+                    res.int *= 10
+                    res.int += 1
+                    res.exp -= 1
+
+                break
+            res.int *= 10
+            res.exp -= 1
+            adjust += 1
+            op1.int *= 10
+            op1.exp -= 1
+
+            if res.exp == 0 and divmod and op2.int > op1.int:
+                #Solves an error in precision.  Same as a previous block.
+
+                if res.int >= prec_limit and shouldround:
+                    return context._raise_error(DivisionImpossible)
+                otherside = Decimal(op1)
+                frozen = context._ignore_all_flags()
+
+                exp = min(self._exp, other._exp)
+                otherside = otherside._rescale(exp, context=context)
+
+                context._regard_flags(*frozen)
+
+                return (Decimal(res), otherside)
+
+        ans = Decimal(res)
+        if shouldround:
+            ans = ans._fix(context)
+        return ans
+
+    def __rdiv__(self, other, context=None):
+        """Swaps self/other and returns __div__."""
+        other = _convert_other(other)
+        return other.__div__(self, context=context)
+    __rtruediv__ = __rdiv__
+
+    def __divmod__(self, other, context=None):
+        """
+        (self // other, self % other)
+        """
+        return self._divide(other, 1, context)
+
+    def __rdivmod__(self, other, context=None):
+        """Swaps self/other and returns __divmod__."""
+        other = _convert_other(other)
+        return other.__divmod__(self, context=context)
+
+    def __mod__(self, other, context=None):
+        """
+        self % other
+        """
+        other = _convert_other(other)
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+
+        if self and not other:
+            return context._raise_error(InvalidOperation, 'x % 0')
+
+        return self._divide(other, 3, context)[1]
+
+    def __rmod__(self, other, context=None):
+        """Swaps self/other and returns __mod__."""
+        other = _convert_other(other)
+        return other.__mod__(self, context=context)
+
+    def remainder_near(self, other, context=None):
+        """
+        Remainder nearest to 0-  abs(remainder-near) <= other/2
+        """
+        other = _convert_other(other)
+
+        if self._is_special or other._is_special:
+            ans = self._check_nans(other, context)
+            if ans:
+                return ans
+        if self and not other:
+            return context._raise_error(InvalidOperation, 'x % 0')
+
+        if context is None:
+            context = getcontext()
+        # If DivisionImpossible causes an error, do not leave Rounded/Inexact
+        # ignored in the calling function.
+        context = context._shallow_copy()
+        flags = context._ignore_flags(Rounded, Inexact)
+        #keep DivisionImpossible flags
+        (side, r) = self.__divmod__(other, context=context)
+
+        if r._isnan():
+            context._regard_flags(*flags)
+            return r
+
+        context = context._shallow_copy()
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+
+        if other._sign:
+            comparison = other.__div__(Decimal(-2), context=context)
+        else:
+            comparison = other.__div__(Decimal(2), context=context)
+
+        context._set_rounding_decision(rounding)
+        context._regard_flags(*flags)
+
+        s1, s2 = r._sign, comparison._sign
+        r._sign, comparison._sign = 0, 0
+
+        if r < comparison:
+            r._sign, comparison._sign = s1, s2
+            #Get flags now
+            self.__divmod__(other, context=context)
+            return r._fix(context)
+        r._sign, comparison._sign = s1, s2
+
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+
+        (side, r) = self.__divmod__(other, context=context)
+        context._set_rounding_decision(rounding)
+        if r._isnan():
+            return r
+
+        decrease = not side._iseven()
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+        side = side.__abs__(context=context)
+        context._set_rounding_decision(rounding)
+
+        s1, s2 = r._sign, comparison._sign
+        r._sign, comparison._sign = 0, 0
+        if r > comparison or decrease and r == comparison:
+            r._sign, comparison._sign = s1, s2
+            context.prec += 1
+            if len(side.__add__(Decimal(1), context=context)._int) >= context.prec:
+                context.prec -= 1
+                return context._raise_error(DivisionImpossible)[1]
+            context.prec -= 1
+            if self._sign == other._sign:
+                r = r.__sub__(other, context=context)
+            else:
+                r = r.__add__(other, context=context)
+        else:
+            r._sign, comparison._sign = s1, s2
+
+        return r._fix(context)
+
+    def __floordiv__(self, other, context=None):
+        """self // other"""
+        return self._divide(other, 2, context)[0]
+
+    def __rfloordiv__(self, other, context=None):
+        """Swaps self/other and returns __floordiv__."""
+        other = _convert_other(other)
+        return other.__floordiv__(self, context=context)
+
+    def __float__(self):
+        """Float representation."""
+        return float(str(self))
+
+    def __int__(self):
+        """Converts self to an int, truncating if necessary."""
+        if self._is_special:
+            if self._isnan():
+                context = getcontext()
+                return context._raise_error(InvalidContext)
+            elif self._isinfinity():
+                raise OverflowError, "Cannot convert infinity to long"
+        if self._exp >= 0:
+            s = ''.join(map(str, self._int)) + '0'*self._exp
+        else:
+            s = ''.join(map(str, self._int))[:self._exp]
+        if s == '':
+            s = '0'
+        sign = '-'*self._sign
+        return int(sign + s)
+
+    def __long__(self):
+        """Converts to a long.
+
+        Equivalent to long(int(self))
+        """
+        return long(self.__int__())
+
+    def _fix(self, context):
+        """Round if it is necessary to keep self within prec precision.
+
+        Rounds and fixes the exponent.  Does not raise on a sNaN.
+
+        Arguments:
+        self - Decimal instance
+        context - context used.
+        """
+        if self._is_special:
+            return self
+        if context is None:
+            context = getcontext()
+        prec = context.prec
+        ans = self._fixexponents(context)
+        if len(ans._int) > prec:
+            ans = ans._round(prec, context=context)
+            ans = ans._fixexponents(context)
+        return ans
+
+    def _fixexponents(self, context):
+        """Fix the exponents and return a copy with the exponent in bounds.
+        Only call if known to not be a special value.
+        """
+        folddown = context._clamp
+        Emin = context.Emin
+        ans = self
+        ans_adjusted = ans.adjusted()
+        if ans_adjusted < Emin:
+            Etiny = context.Etiny()
+            if ans._exp < Etiny:
+                if not ans:
+                    ans = Decimal(self)
+                    ans._exp = Etiny
+                    context._raise_error(Clamped)
+                    return ans
+                ans = ans._rescale(Etiny, context=context)
+                #It isn't zero, and exp < Emin => subnormal
+                context._raise_error(Subnormal)
+                if context.flags[Inexact]:
+                    context._raise_error(Underflow)
+            else:
+                if ans:
+                    #Only raise subnormal if non-zero.
+                    context._raise_error(Subnormal)
+        else:
+            Etop = context.Etop()
+            if folddown and ans._exp > Etop:
+                context._raise_error(Clamped)
+                ans = ans._rescale(Etop, context=context)
+            else:
+                Emax = context.Emax
+                if ans_adjusted > Emax:
+                    if not ans:
+                        ans = Decimal(self)
+                        ans._exp = Emax
+                        context._raise_error(Clamped)
+                        return ans
+                    context._raise_error(Inexact)
+                    context._raise_error(Rounded)
+                    return context._raise_error(Overflow, 'above Emax', ans._sign)
+        return ans
+
+    def _round(self, prec=None, rounding=None, context=None):
+        """Returns a rounded version of self.
+
+        You can specify the precision or rounding method.  Otherwise, the
+        context determines it.
+        """
+
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+            if self._isinfinity():
+                return Decimal(self)
+
+        if context is None:
+            context = getcontext()
+
+        if rounding is None:
+            rounding = context.rounding
+        if prec is None:
+            prec = context.prec
+
+        if not self:
+            if prec <= 0:
+                dig = (0,)
+                exp = len(self._int) - prec + self._exp
+            else:
+                dig = (0,) * prec
+                exp = len(self._int) + self._exp - prec
+            ans = Decimal((self._sign, dig, exp))
+            context._raise_error(Rounded)
+            return ans
+
+        if prec == 0:
+            temp = Decimal(self)
+            temp._int = (0,)+temp._int
+            prec = 1
+        elif prec < 0:
+            exp = self._exp + len(self._int) - prec - 1
+            temp = Decimal( (self._sign, (0, 1), exp))
+            prec = 1
+        else:
+            temp = Decimal(self)
+
+        numdigits = len(temp._int)
+        if prec == numdigits:
+            return temp
+
+        # See if we need to extend precision
+        expdiff = prec - numdigits
+        if expdiff > 0:
+            tmp = list(temp._int)
+            tmp.extend([0] * expdiff)
+            ans =  Decimal( (temp._sign, tmp, temp._exp - expdiff))
+            return ans
+
+        #OK, but maybe all the lost digits are 0.
+        lostdigits = self._int[expdiff:]
+        if lostdigits == (0,) * len(lostdigits):
+            ans = Decimal( (temp._sign, temp._int[:prec], temp._exp - expdiff))
+            #Rounded, but not Inexact
+            context._raise_error(Rounded)
+            return ans
+
+        # Okay, let's round and lose data
+
+        this_function = getattr(temp, self._pick_rounding_function[rounding])
+        #Now we've got the rounding function
+
+        if prec != context.prec:
+            context = context._shallow_copy()
+            context.prec = prec
+        ans = this_function(prec, expdiff, context)
+        context._raise_error(Rounded)
+        context._raise_error(Inexact, 'Changed in rounding')
+
+        return ans
+
+    _pick_rounding_function = {}
+
+    def _round_down(self, prec, expdiff, context):
+        """Also known as round-towards-0, truncate."""
+        return Decimal( (self._sign, self._int[:prec], self._exp - expdiff) )
+
+    def _round_half_up(self, prec, expdiff, context, tmp = None):
+        """Rounds 5 up (away from 0)"""
+
+        if tmp is None:
+            tmp = Decimal( (self._sign,self._int[:prec], self._exp - expdiff))
+        if self._int[prec] >= 5:
+            tmp = tmp._increment(round=0, context=context)
+            if len(tmp._int) > prec:
+                return Decimal( (tmp._sign, tmp._int[:-1], tmp._exp + 1))
+        return tmp
+
+    def _round_half_even(self, prec, expdiff, context):
+        """Round 5 to even, rest to nearest."""
+
+        tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff))
+        half = (self._int[prec] == 5)
+        if half:
+            for digit in self._int[prec+1:]:
+                if digit != 0:
+                    half = 0
+                    break
+        if half:
+            if self._int[prec-1] & 1 == 0:
+                return tmp
+        return self._round_half_up(prec, expdiff, context, tmp)
+
+    def _round_half_down(self, prec, expdiff, context):
+        """Round 5 down"""
+
+        tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff))
+        half = (self._int[prec] == 5)
+        if half:
+            for digit in self._int[prec+1:]:
+                if digit != 0:
+                    half = 0
+                    break
+        if half:
+            return tmp
+        return self._round_half_up(prec, expdiff, context, tmp)
+
+    def _round_up(self, prec, expdiff, context):
+        """Rounds away from 0."""
+        tmp = Decimal( (self._sign, self._int[:prec], self._exp - expdiff) )
+        for digit in self._int[prec:]:
+            if digit != 0:
+                tmp = tmp._increment(round=1, context=context)
+                if len(tmp._int) > prec:
+                    return Decimal( (tmp._sign, tmp._int[:-1], tmp._exp + 1))
+                else:
+                    return tmp
+        return tmp
+
+    def _round_ceiling(self, prec, expdiff, context):
+        """Rounds up (not away from 0 if negative.)"""
+        if self._sign:
+            return self._round_down(prec, expdiff, context)
+        else:
+            return self._round_up(prec, expdiff, context)
+
+    def _round_floor(self, prec, expdiff, context):
+        """Rounds down (not towards 0 if negative)"""
+        if not self._sign:
+            return self._round_down(prec, expdiff, context)
+        else:
+            return self._round_up(prec, expdiff, context)
+
+    def __pow__(self, n, modulo = None, context=None):
+        """Return self ** n (mod modulo)
+
+        If modulo is None (default), don't take it mod modulo.
+        """
+        n = _convert_other(n)
+
+        if context is None:
+            context = getcontext()
+
+        if self._is_special or n._is_special or n.adjusted() > 8:
+            #Because the spot << doesn't work with really big exponents
+            if n._isinfinity() or n.adjusted() > 8:
+                return context._raise_error(InvalidOperation, 'x ** INF')
+
+            ans = self._check_nans(n, context)
+            if ans:
+                return ans
+
+        if not n._isinteger():
+            return context._raise_error(InvalidOperation, 'x ** (non-integer)')
+
+        if not self and not n:
+            return context._raise_error(InvalidOperation, '0 ** 0')
+
+        if not n:
+            return Decimal(1)
+
+        if self == Decimal(1):
+            return Decimal(1)
+
+        sign = self._sign and not n._iseven()
+        n = int(n)
+
+        if self._isinfinity():
+            if modulo:
+                return context._raise_error(InvalidOperation, 'INF % x')
+            if n > 0:
+                return Infsign[sign]
+            return Decimal( (sign, (0,), 0) )
+
+        #with ludicrously large exponent, just raise an overflow and return inf.
+        if not modulo and n > 0 and (self._exp + len(self._int) - 1) * n > context.Emax \
+           and self:
+
+            tmp = Decimal('inf')
+            tmp._sign = sign
+            context._raise_error(Rounded)
+            context._raise_error(Inexact)
+            context._raise_error(Overflow, 'Big power', sign)
+            return tmp
+
+        elength = len(str(abs(n)))
+        firstprec = context.prec
+
+        if not modulo and firstprec + elength + 1 > DefaultContext.Emax:
+            return context._raise_error(Overflow, 'Too much precision.', sign)
+
+        mul = Decimal(self)
+        val = Decimal(1)
+        context = context._shallow_copy()
+        context.prec = firstprec + elength + 1
+        if n < 0:
+            #n is a long now, not Decimal instance
+            n = -n
+            mul = Decimal(1).__div__(mul, context=context)
+
+        spot = 1
+        while spot <= n:
+            spot <<= 1
+
+        spot >>= 1
+        #Spot is the highest power of 2 less than n
+        while spot:
+            val = val.__mul__(val, context=context)
+            if val._isinfinity():
+                val = Infsign[sign]
+                break
+            if spot & n:
+                val = val.__mul__(mul, context=context)
+            if modulo is not None:
+                val = val.__mod__(modulo, context=context)
+            spot >>= 1
+        context.prec = firstprec
+
+        if context._rounding_decision == ALWAYS_ROUND:
+            return val._fix(context)
+        return val
+
+    def __rpow__(self, other, context=None):
+        """Swaps self/other and returns __pow__."""
+        other = _convert_other(other)
+        return other.__pow__(self, context=context)
+
+    def normalize(self, context=None):
+        """Normalize- strip trailing 0s, change anything equal to 0 to 0e0"""
+
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        dup = self._fix(context)
+        if dup._isinfinity():
+            return dup
+
+        if not dup:
+            return Decimal( (dup._sign, (0,), 0) )
+        end = len(dup._int)
+        exp = dup._exp
+        while dup._int[end-1] == 0:
+            exp += 1
+            end -= 1
+        return Decimal( (dup._sign, dup._int[:end], exp) )
+
+
+    def quantize(self, exp, rounding=None, context=None, watchexp=1):
+        """Quantize self so its exponent is the same as that of exp.
+
+        Similar to self._rescale(exp._exp) but with error checking.
+        """
+        if self._is_special or exp._is_special:
+            ans = self._check_nans(exp, context)
+            if ans:
+                return ans
+
+            if exp._isinfinity() or self._isinfinity():
+                if exp._isinfinity() and self._isinfinity():
+                    return self  #if both are inf, it is OK
+                if context is None:
+                    context = getcontext()
+                return context._raise_error(InvalidOperation,
+                                        'quantize with one INF')
+        return self._rescale(exp._exp, rounding, context, watchexp)
+
+    def same_quantum(self, other):
+        """Test whether self and other have the same exponent.
+
+        same as self._exp == other._exp, except NaN == sNaN
+        """
+        if self._is_special or other._is_special:
+            if self._isnan() or other._isnan():
+                return self._isnan() and other._isnan() and True
+            if self._isinfinity() or other._isinfinity():
+                return self._isinfinity() and other._isinfinity() and True
+        return self._exp == other._exp
+
+    def _rescale(self, exp, rounding=None, context=None, watchexp=1):
+        """Rescales so that the exponent is exp.
+
+        exp = exp to scale to (an integer)
+        rounding = rounding version
+        watchexp: if set (default) an error is returned if exp is greater
+        than Emax or less than Etiny.
+        """
+        if context is None:
+            context = getcontext()
+
+        if self._is_special:
+            if self._isinfinity():
+                return context._raise_error(InvalidOperation, 'rescale with an INF')
+
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+        if watchexp and (context.Emax  < exp or context.Etiny() > exp):
+            return context._raise_error(InvalidOperation, 'rescale(a, INF)')
+
+        if not self:
+            ans = Decimal(self)
+            ans._int = (0,)
+            ans._exp = exp
+            return ans
+
+        diff = self._exp - exp
+        digits = len(self._int) + diff
+
+        if watchexp and digits > context.prec:
+            return context._raise_error(InvalidOperation, 'Rescale > prec')
+
+        tmp = Decimal(self)
+        tmp._int = (0,) + tmp._int
+        digits += 1
+
+        if digits < 0:
+            tmp._exp = -digits + tmp._exp
+            tmp._int = (0,1)
+            digits = 1
+        tmp = tmp._round(digits, rounding, context=context)
+
+        if tmp._int[0] == 0 and len(tmp._int) > 1:
+            tmp._int = tmp._int[1:]
+        tmp._exp = exp
+
+        tmp_adjusted = tmp.adjusted()
+        if tmp and tmp_adjusted < context.Emin:
+            context._raise_error(Subnormal)
+        elif tmp and tmp_adjusted > context.Emax:
+            return context._raise_error(InvalidOperation, 'rescale(a, INF)')
+        return tmp
+
+    def to_integral(self, rounding=None, context=None):
+        """Rounds to the nearest integer, without raising inexact, rounded."""
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+        if self._exp >= 0:
+            return self
+        if context is None:
+            context = getcontext()
+        flags = context._ignore_flags(Rounded, Inexact)
+        ans = self._rescale(0, rounding, context=context)
+        context._regard_flags(flags)
+        return ans
+
+    def sqrt(self, context=None):
+        """Return the square root of self.
+
+        Uses a converging algorithm (Xn+1 = 0.5*(Xn + self / Xn))
+        Should quadratically approach the right answer.
+        """
+        if self._is_special:
+            ans = self._check_nans(context=context)
+            if ans:
+                return ans
+
+            if self._isinfinity() and self._sign == 0:
+                return Decimal(self)
+
+        if not self:
+            #exponent = self._exp / 2, using round_down.
+            #if self._exp < 0:
+            #    exp = (self._exp+1) // 2
+            #else:
+            exp = (self._exp) // 2
+            if self._sign == 1:
+                #sqrt(-0) = -0
+                return Decimal( (1, (0,), exp))
+            else:
+                return Decimal( (0, (0,), exp))
+
+        if context is None:
+            context = getcontext()
+
+        if self._sign == 1:
+            return context._raise_error(InvalidOperation, 'sqrt(-x), x > 0')
+
+        tmp = Decimal(self)
+
+        expadd = tmp._exp // 2
+        if tmp._exp & 1:
+            tmp._int += (0,)
+            tmp._exp = 0
+        else:
+            tmp._exp = 0
+
+        context = context._shallow_copy()
+        flags = context._ignore_all_flags()
+        firstprec = context.prec
+        context.prec = 3
+        if tmp.adjusted() & 1 == 0:
+            ans = Decimal( (0, (8,1,9), tmp.adjusted()  - 2) )
+            ans = ans.__add__(tmp.__mul__(Decimal((0, (2,5,9), -2)),
+                                          context=context), context=context)
+            ans._exp -= 1 + tmp.adjusted() // 2
+        else:
+            ans = Decimal( (0, (2,5,9), tmp._exp + len(tmp._int)- 3) )
+            ans = ans.__add__(tmp.__mul__(Decimal((0, (8,1,9), -3)),
+                                          context=context), context=context)
+            ans._exp -= 1 + tmp.adjusted()  // 2
+
+        #ans is now a linear approximation.
+
+        Emax, Emin = context.Emax, context.Emin
+        context.Emax, context.Emin = DefaultContext.Emax, DefaultContext.Emin
+
+        half = Decimal('0.5')
+
+        maxp = firstprec + 2
+        rounding = context._set_rounding(ROUND_HALF_EVEN)
+        while 1:
+            context.prec = min(2*context.prec - 2, maxp)
+            ans = half.__mul__(ans.__add__(tmp.__div__(ans, context=context),
+                                           context=context), context=context)
+            if context.prec == maxp:
+                break
+
+        #round to the answer's precision-- the only error can be 1 ulp.
+        context.prec = firstprec
+        prevexp = ans.adjusted()
+        ans = ans._round(context=context)
+
+        #Now, check if the other last digits are better.
+        context.prec = firstprec + 1
+        # In case we rounded up another digit and we should actually go lower.
+        if prevexp != ans.adjusted():
+            ans._int += (0,)
+            ans._exp -= 1
+
+
+        lower = ans.__sub__(Decimal((0, (5,), ans._exp-1)), context=context)
+        context._set_rounding(ROUND_UP)
+        if lower.__mul__(lower, context=context) > (tmp):
+            ans = ans.__sub__(Decimal((0, (1,), ans._exp)), context=context)
+
+        else:
+            upper = ans.__add__(Decimal((0, (5,), ans._exp-1)),context=context)
+            context._set_rounding(ROUND_DOWN)
+            if upper.__mul__(upper, context=context) < tmp:
+                ans = ans.__add__(Decimal((0, (1,), ans._exp)),context=context)
+
+        ans._exp += expadd
+
+        context.prec = firstprec
+        context.rounding = rounding
+        ans = ans._fix(context)
+
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+        if not ans.__mul__(ans, context=context) == self:
+            # Only rounded/inexact if here.
+            context._regard_flags(flags)
+            context._raise_error(Rounded)
+            context._raise_error(Inexact)
+        else:
+            #Exact answer, so let's set the exponent right.
+            #if self._exp < 0:
+            #    exp = (self._exp +1)// 2
+            #else:
+            exp = self._exp // 2
+            context.prec += ans._exp - exp
+            ans = ans._rescale(exp, context=context)
+            context.prec = firstprec
+            context._regard_flags(flags)
+        context.Emax, context.Emin = Emax, Emin
+
+        return ans._fix(context)
+
+    def max(self, other, context=None):
+        """Returns the larger value.
+
+        like max(self, other) except if one is not a number, returns
+        NaN (and signals if one is sNaN).  Also rounds.
+        """
+        other = _convert_other(other)
+
+        if self._is_special or other._is_special:
+            # if one operand is a quiet NaN and the other is number, then the
+            # number is always returned
+            sn = self._isnan()
+            on = other._isnan()
+            if sn or on:
+                if on == 1 and sn != 2:
+                    return self
+                if sn == 1 and on != 2:
+                    return other
+                return self._check_nans(other, context)
+
+        ans = self
+        c = self.__cmp__(other)
+        if c == 0:
+            # if both operands are finite and equal in numerical value
+            # then an ordering is applied:
+            #
+            # if the signs differ then max returns the operand with the
+            # positive sign and min returns the operand with the negative sign
+            #
+            # if the signs are the same then the exponent is used to select
+            # the result.
+            if self._sign != other._sign:
+                if self._sign:
+                    ans = other
+            elif self._exp < other._exp and not self._sign:
+                ans = other
+            elif self._exp > other._exp and self._sign:
+                ans = other
+        elif c == -1:
+            ans = other
+
+        if context is None:
+            context = getcontext()
+        if context._rounding_decision == ALWAYS_ROUND:
+            return ans._fix(context)
+        return ans
+
+    def min(self, other, context=None):
+        """Returns the smaller value.
+
+        like min(self, other) except if one is not a number, returns
+        NaN (and signals if one is sNaN).  Also rounds.
+        """
+        other = _convert_other(other)
+
+        if self._is_special or other._is_special:
+            # if one operand is a quiet NaN and the other is number, then the
+            # number is always returned
+            sn = self._isnan()
+            on = other._isnan()
+            if sn or on:
+                if on == 1 and sn != 2:
+                    return self
+                if sn == 1 and on != 2:
+                    return other
+                return self._check_nans(other, context)
+
+        ans = self
+        c = self.__cmp__(other)
+        if c == 0:
+            # if both operands are finite and equal in numerical value
+            # then an ordering is applied:
+            #
+            # if the signs differ then max returns the operand with the
+            # positive sign and min returns the operand with the negative sign
+            #
+            # if the signs are the same then the exponent is used to select
+            # the result.
+            if self._sign != other._sign:
+                if other._sign:
+                    ans = other
+            elif self._exp > other._exp and not self._sign:
+                ans = other
+            elif self._exp < other._exp and self._sign:
+                ans = other
+        elif c == 1:
+            ans = other
+
+        if context is None:
+            context = getcontext()
+        if context._rounding_decision == ALWAYS_ROUND:
+            return ans._fix(context)
+        return ans
+
+    def _isinteger(self):
+        """Returns whether self is an integer"""
+        if self._exp >= 0:
+            return True
+        rest = self._int[self._exp:]
+        return rest == (0,)*len(rest)
+
+    def _iseven(self):
+        """Returns 1 if self is even.  Assumes self is an integer."""
+        if self._exp > 0:
+            return 1
+        return self._int[-1+self._exp] & 1 == 0
+
+    def adjusted(self):
+        """Return the adjusted exponent of self"""
+        try:
+            return self._exp + len(self._int) - 1
+        #If NaN or Infinity, self._exp is string
+        except TypeError:
+            return 0
+
+    # support for pickling, copy, and deepcopy
+    def __reduce__(self):
+        return (self.__class__, (str(self),))
+
+    def __copy__(self):
+        if type(self) == Decimal:
+            return self     # I'm immutable; therefore I am my own clone
+        return self.__class__(str(self))
+
+    def __deepcopy__(self, memo):
+        if type(self) == Decimal:
+            return self     # My components are also immutable
+        return self.__class__(str(self))
+
+##### Context class ###########################################
+
+
+# get rounding method function:
+rounding_functions = [name for name in Decimal.__dict__.keys() if name.startswith('_round_')]
+for name in rounding_functions:
+    #name is like _round_half_even, goes to the global ROUND_HALF_EVEN value.
+    globalname = name[1:].upper()
+    val = globals()[globalname]
+    Decimal._pick_rounding_function[val] = name
+
+del name, val, globalname, rounding_functions
+
+class Context(object):
+    """Contains the context for a Decimal instance.
+
+    Contains:
+    prec - precision (for use in rounding, division, square roots..)
+    rounding - rounding type. (how you round)
+    _rounding_decision - ALWAYS_ROUND, NEVER_ROUND -- do you round?
+    traps - If traps[exception] = 1, then the exception is
+                    raised when it is caused.  Otherwise, a value is
+                    substituted in.
+    flags  - When an exception is caused, flags[exception] is incremented.
+             (Whether or not the trap_enabler is set)
+             Should be reset by user of Decimal instance.
+    Emin -   Minimum exponent
+    Emax -   Maximum exponent
+    capitals -      If 1, 1*10^1 is printed as 1E+1.
+                    If 0, printed as 1e1
+    _clamp - If 1, change exponents if too high (Default 0)
+    """
+
+    def __init__(self, prec=None, rounding=None,
+                 traps=None, flags=None,
+                 _rounding_decision=None,
+                 Emin=None, Emax=None,
+                 capitals=None, _clamp=0,
+                 _ignored_flags=None):
+        if flags is None:
+            flags = []
+        if _ignored_flags is None:
+            _ignored_flags = []
+        if not isinstance(flags, dict):
+            flags = dict([(s,s in flags) for s in _signals])
+            del s
+        if traps is not None and not isinstance(traps, dict):
+            traps = dict([(s,s in traps) for s in _signals])
+            del s
+        for name, val in locals().items():
+            if val is None:
+                setattr(self, name, copy.copy(getattr(DefaultContext, name)))
+            else:
+                setattr(self, name, val)
+        del self.self
+
+    def __repr__(self):
+        """Show the current context."""
+        s = []
+        s.append('Context(prec=%(prec)d, rounding=%(rounding)s, Emin=%(Emin)d, Emax=%(Emax)d, capitals=%(capitals)d' % vars(self))
+        s.append('flags=[' + ', '.join([f.__name__ for f, v in self.flags.items() if v]) + ']')
+        s.append('traps=[' + ', '.join([t.__name__ for t, v in self.traps.items() if v]) + ']')
+        return ', '.join(s) + ')'
+
+    def clear_flags(self):
+        """Reset all flags to zero"""
+        for flag in self.flags:
+            self.flags[flag] = 0
+
+    def _shallow_copy(self):
+        """Returns a shallow copy from self."""
+        nc = Context(self.prec, self.rounding, self.traps, self.flags,
+                         self._rounding_decision, self.Emin, self.Emax,
+                         self.capitals, self._clamp, self._ignored_flags)
+        return nc
+
+    def copy(self):
+        """Returns a deep copy from self."""
+        nc = Context(self.prec, self.rounding, self.traps.copy(), self.flags.copy(),
+                         self._rounding_decision, self.Emin, self.Emax,
+                         self.capitals, self._clamp, self._ignored_flags)
+        return nc
+    __copy__ = copy
+
+    def _raise_error(self, condition, explanation = None, *args):
+        """Handles an error
+
+        If the flag is in _ignored_flags, returns the default response.
+        Otherwise, it increments the flag, then, if the corresponding
+        trap_enabler is set, it reaises the exception.  Otherwise, it returns
+        the default value after incrementing the flag.
+        """
+        error = _condition_map.get(condition, condition)
+        if error in self._ignored_flags:
+            #Don't touch the flag
+            return error().handle(self, *args)
+
+        self.flags[error] += 1
+        if not self.traps[error]:
+            #The errors define how to handle themselves.
+            return condition().handle(self, *args)
+
+        # Errors should only be risked on copies of the context
+        #self._ignored_flags = []
+        raise error, explanation
+
+    def _ignore_all_flags(self):
+        """Ignore all flags, if they are raised"""
+        return self._ignore_flags(*_signals)
+
+    def _ignore_flags(self, *flags):
+        """Ignore the flags, if they are raised"""
+        # Do not mutate-- This way, copies of a context leave the original
+        # alone.
+        self._ignored_flags = (self._ignored_flags + list(flags))
+        return list(flags)
+
+    def _regard_flags(self, *flags):
+        """Stop ignoring the flags, if they are raised"""
+        if flags and isinstance(flags[0], (tuple,list)):
+            flags = flags[0]
+        for flag in flags:
+            self._ignored_flags.remove(flag)
+
+    def __hash__(self):
+        """A Context cannot be hashed."""
+        # We inherit object.__hash__, so we must deny this explicitly
+        raise TypeError, "Cannot hash a Context."
+
+    def Etiny(self):
+        """Returns Etiny (= Emin - prec + 1)"""
+        return int(self.Emin - self.prec + 1)
+
+    def Etop(self):
+        """Returns maximum exponent (= Emax - prec + 1)"""
+        return int(self.Emax - self.prec + 1)
+
+    def _set_rounding_decision(self, type):
+        """Sets the rounding decision.
+
+        Sets the rounding decision, and returns the current (previous)
+        rounding decision.  Often used like:
+
+        context = context._shallow_copy()
+        # That so you don't change the calling context
+        # if an error occurs in the middle (say DivisionImpossible is raised).
+
+        rounding = context._set_rounding_decision(NEVER_ROUND)
+        instance = instance / Decimal(2)
+        context._set_rounding_decision(rounding)
+
+        This will make it not round for that operation.
+        """
+
+        rounding = self._rounding_decision
+        self._rounding_decision = type
+        return rounding
+
+    def _set_rounding(self, type):
+        """Sets the rounding type.
+
+        Sets the rounding type, and returns the current (previous)
+        rounding type.  Often used like:
+
+        context = context.copy()
+        # so you don't change the calling context
+        # if an error occurs in the middle.
+        rounding = context._set_rounding(ROUND_UP)
+        val = self.__sub__(other, context=context)
+        context._set_rounding(rounding)
+
+        This will make it round up for that operation.
+        """
+        rounding = self.rounding
+        self.rounding= type
+        return rounding
+
+    def create_decimal(self, num='0'):
+        """Creates a new Decimal instance but using self as context."""
+        d = Decimal(num, context=self)
+        return d._fix(self)
+
+    #Methods
+    def abs(self, a):
+        """Returns the absolute value of the operand.
+
+        If the operand is negative, the result is the same as using the minus
+        operation on the operand. Otherwise, the result is the same as using
+        the plus operation on the operand.
+
+        >>> ExtendedContext.abs(Decimal('2.1'))
+        Decimal("2.1")
+        >>> ExtendedContext.abs(Decimal('-100'))
+        Decimal("100")
+        >>> ExtendedContext.abs(Decimal('101.5'))
+        Decimal("101.5")
+        >>> ExtendedContext.abs(Decimal('-101.5'))
+        Decimal("101.5")
+        """
+        return a.__abs__(context=self)
+
+    def add(self, a, b):
+        """Return the sum of the two operands.
+
+        >>> ExtendedContext.add(Decimal('12'), Decimal('7.00'))
+        Decimal("19.00")
+        >>> ExtendedContext.add(Decimal('1E+2'), Decimal('1.01E+4'))
+        Decimal("1.02E+4")
+        """
+        return a.__add__(b, context=self)
+
+    def _apply(self, a):
+        return str(a._fix(self))
+
+    def compare(self, a, b):
+        """Compares values numerically.
+
+        If the signs of the operands differ, a value representing each operand
+        ('-1' if the operand is less than zero, '0' if the operand is zero or
+        negative zero, or '1' if the operand is greater than zero) is used in
+        place of that operand for the comparison instead of the actual
+        operand.
+
+        The comparison is then effected by subtracting the second operand from
+        the first and then returning a value according to the result of the
+        subtraction: '-1' if the result is less than zero, '0' if the result is
+        zero or negative zero, or '1' if the result is greater than zero.
+
+        >>> ExtendedContext.compare(Decimal('2.1'), Decimal('3'))
+        Decimal("-1")
+        >>> ExtendedContext.compare(Decimal('2.1'), Decimal('2.1'))
+        Decimal("0")
+        >>> ExtendedContext.compare(Decimal('2.1'), Decimal('2.10'))
+        Decimal("0")
+        >>> ExtendedContext.compare(Decimal('3'), Decimal('2.1'))
+        Decimal("1")
+        >>> ExtendedContext.compare(Decimal('2.1'), Decimal('-3'))
+        Decimal("1")
+        >>> ExtendedContext.compare(Decimal('-3'), Decimal('2.1'))
+        Decimal("-1")
+        """
+        return a.compare(b, context=self)
+
+    def divide(self, a, b):
+        """Decimal division in a specified context.
+
+        >>> ExtendedContext.divide(Decimal('1'), Decimal('3'))
+        Decimal("0.333333333")
+        >>> ExtendedContext.divide(Decimal('2'), Decimal('3'))
+        Decimal("0.666666667")
+        >>> ExtendedContext.divide(Decimal('5'), Decimal('2'))
+        Decimal("2.5")
+        >>> ExtendedContext.divide(Decimal('1'), Decimal('10'))
+        Decimal("0.1")
+        >>> ExtendedContext.divide(Decimal('12'), Decimal('12'))
+        Decimal("1")
+        >>> ExtendedContext.divide(Decimal('8.00'), Decimal('2'))
+        Decimal("4.00")
+        >>> ExtendedContext.divide(Decimal('2.400'), Decimal('2.0'))
+        Decimal("1.20")
+        >>> ExtendedContext.divide(Decimal('1000'), Decimal('100'))
+        Decimal("10")
+        >>> ExtendedContext.divide(Decimal('1000'), Decimal('1'))
+        Decimal("1000")
+        >>> ExtendedContext.divide(Decimal('2.40E+6'), Decimal('2'))
+        Decimal("1.20E+6")
+        """
+        return a.__div__(b, context=self)
+
+    def divide_int(self, a, b):
+        """Divides two numbers and returns the integer part of the result.
+
+        >>> ExtendedContext.divide_int(Decimal('2'), Decimal('3'))
+        Decimal("0")
+        >>> ExtendedContext.divide_int(Decimal('10'), Decimal('3'))
+        Decimal("3")
+        >>> ExtendedContext.divide_int(Decimal('1'), Decimal('0.3'))
+        Decimal("3")
+        """
+        return a.__floordiv__(b, context=self)
+
+    def divmod(self, a, b):
+        return a.__divmod__(b, context=self)
+
+    def max(self, a,b):
+        """max compares two values numerically and returns the maximum.
+
+        If either operand is a NaN then the general rules apply.
+        Otherwise, the operands are compared as as though by the compare
+        operation. If they are numerically equal then the left-hand operand
+        is chosen as the result. Otherwise the maximum (closer to positive
+        infinity) of the two operands is chosen as the result.
+
+        >>> ExtendedContext.max(Decimal('3'), Decimal('2'))
+        Decimal("3")
+        >>> ExtendedContext.max(Decimal('-10'), Decimal('3'))
+        Decimal("3")
+        >>> ExtendedContext.max(Decimal('1.0'), Decimal('1'))
+        Decimal("1")
+        >>> ExtendedContext.max(Decimal('7'), Decimal('NaN'))
+        Decimal("7")
+        """
+        return a.max(b, context=self)
+
+    def min(self, a,b):
+        """min compares two values numerically and returns the minimum.
+
+        If either operand is a NaN then the general rules apply.
+        Otherwise, the operands are compared as as though by the compare
+        operation. If they are numerically equal then the left-hand operand
+        is chosen as the result. Otherwise the minimum (closer to negative
+        infinity) of the two operands is chosen as the result.
+
+        >>> ExtendedContext.min(Decimal('3'), Decimal('2'))
+        Decimal("2")
+        >>> ExtendedContext.min(Decimal('-10'), Decimal('3'))
+        Decimal("-10")
+        >>> ExtendedContext.min(Decimal('1.0'), Decimal('1'))
+        Decimal("1.0")
+        >>> ExtendedContext.min(Decimal('7'), Decimal('NaN'))
+        Decimal("7")
+        """
+        return a.min(b, context=self)
+
+    def minus(self, a):
+        """Minus corresponds to unary prefix minus in Python.
+
+        The operation is evaluated using the same rules as subtract; the
+        operation minus(a) is calculated as subtract('0', a) where the '0'
+        has the same exponent as the operand.
+
+        >>> ExtendedContext.minus(Decimal('1.3'))
+        Decimal("-1.3")
+        >>> ExtendedContext.minus(Decimal('-1.3'))
+        Decimal("1.3")
+        """
+        return a.__neg__(context=self)
+
+    def multiply(self, a, b):
+        """multiply multiplies two operands.
+
+        If either operand is a special value then the general rules apply.
+        Otherwise, the operands are multiplied together ('long multiplication'),
+        resulting in a number which may be as long as the sum of the lengths
+        of the two operands.
+
+        >>> ExtendedContext.multiply(Decimal('1.20'), Decimal('3'))
+        Decimal("3.60")
+        >>> ExtendedContext.multiply(Decimal('7'), Decimal('3'))
+        Decimal("21")
+        >>> ExtendedContext.multiply(Decimal('0.9'), Decimal('0.8'))
+        Decimal("0.72")
+        >>> ExtendedContext.multiply(Decimal('0.9'), Decimal('-0'))
+        Decimal("-0.0")
+        >>> ExtendedContext.multiply(Decimal('654321'), Decimal('654321'))
+        Decimal("4.28135971E+11")
+        """
+        return a.__mul__(b, context=self)
+
+    def normalize(self, a):
+        """normalize reduces an operand to its simplest form.
+
+        Essentially a plus operation with all trailing zeros removed from the
+        result.
+
+        >>> ExtendedContext.normalize(Decimal('2.1'))
+        Decimal("2.1")
+        >>> ExtendedContext.normalize(Decimal('-2.0'))
+        Decimal("-2")
+        >>> ExtendedContext.normalize(Decimal('1.200'))
+        Decimal("1.2")
+        >>> ExtendedContext.normalize(Decimal('-120'))
+        Decimal("-1.2E+2")
+        >>> ExtendedContext.normalize(Decimal('120.00'))
+        Decimal("1.2E+2")
+        >>> ExtendedContext.normalize(Decimal('0.00'))
+        Decimal("0")
+        """
+        return a.normalize(context=self)
+
+    def plus(self, a):
+        """Plus corresponds to unary prefix plus in Python.
+
+        The operation is evaluated using the same rules as add; the
+        operation plus(a) is calculated as add('0', a) where the '0'
+        has the same exponent as the operand.
+
+        >>> ExtendedContext.plus(Decimal('1.3'))
+        Decimal("1.3")
+        >>> ExtendedContext.plus(Decimal('-1.3'))
+        Decimal("-1.3")
+        """
+        return a.__pos__(context=self)
+
+    def power(self, a, b, modulo=None):
+        """Raises a to the power of b, to modulo if given.
+
+        The right-hand operand must be a whole number whose integer part (after
+        any exponent has been applied) has no more than 9 digits and whose
+        fractional part (if any) is all zeros before any rounding. The operand
+        may be positive, negative, or zero; if negative, the absolute value of
+        the power is used, and the left-hand operand is inverted (divided into
+        1) before use.
+
+        If the increased precision needed for the intermediate calculations
+        exceeds the capabilities of the implementation then an Invalid operation
+        condition is raised.
+
+        If, when raising to a negative power, an underflow occurs during the
+        division into 1, the operation is not halted at that point but
+        continues.
+
+        >>> ExtendedContext.power(Decimal('2'), Decimal('3'))
+        Decimal("8")
+        >>> ExtendedContext.power(Decimal('2'), Decimal('-3'))
+        Decimal("0.125")
+        >>> ExtendedContext.power(Decimal('1.7'), Decimal('8'))
+        Decimal("69.7575744")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('-2'))
+        Decimal("0")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('-1'))
+        Decimal("0")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('0'))
+        Decimal("1")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('1'))
+        Decimal("Infinity")
+        >>> ExtendedContext.power(Decimal('Infinity'), Decimal('2'))
+        Decimal("Infinity")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('-2'))
+        Decimal("0")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('-1'))
+        Decimal("-0")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('0'))
+        Decimal("1")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('1'))
+        Decimal("-Infinity")
+        >>> ExtendedContext.power(Decimal('-Infinity'), Decimal('2'))
+        Decimal("Infinity")
+        >>> ExtendedContext.power(Decimal('0'), Decimal('0'))
+        Decimal("NaN")
+        """
+        return a.__pow__(b, modulo, context=self)
+
+    def quantize(self, a, b):
+        """Returns a value equal to 'a' (rounded) and having the exponent of 'b'.
+
+        The coefficient of the result is derived from that of the left-hand
+        operand. It may be rounded using the current rounding setting (if the
+        exponent is being increased), multiplied by a positive power of ten (if
+        the exponent is being decreased), or is unchanged (if the exponent is
+        already equal to that of the right-hand operand).
+
+        Unlike other operations, if the length of the coefficient after the
+        quantize operation would be greater than precision then an Invalid
+        operation condition is raised. This guarantees that, unless there is an
+        error condition, the exponent of the result of a quantize is always
+        equal to that of the right-hand operand.
+
+        Also unlike other operations, quantize will never raise Underflow, even
+        if the result is subnormal and inexact.
+
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.001'))
+        Decimal("2.170")
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.01'))
+        Decimal("2.17")
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('0.1'))
+        Decimal("2.2")
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('1e+0'))
+        Decimal("2")
+        >>> ExtendedContext.quantize(Decimal('2.17'), Decimal('1e+1'))
+        Decimal("0E+1")
+        >>> ExtendedContext.quantize(Decimal('-Inf'), Decimal('Infinity'))
+        Decimal("-Infinity")
+        >>> ExtendedContext.quantize(Decimal('2'), Decimal('Infinity'))
+        Decimal("NaN")
+        >>> ExtendedContext.quantize(Decimal('-0.1'), Decimal('1'))
+        Decimal("-0")
+        >>> ExtendedContext.quantize(Decimal('-0'), Decimal('1e+5'))
+        Decimal("-0E+5")
+        >>> ExtendedContext.quantize(Decimal('+35236450.6'), Decimal('1e-2'))
+        Decimal("NaN")
+        >>> ExtendedContext.quantize(Decimal('-35236450.6'), Decimal('1e-2'))
+        Decimal("NaN")
+        >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e-1'))
+        Decimal("217.0")
+        >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e-0'))
+        Decimal("217")
+        >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e+1'))
+        Decimal("2.2E+2")
+        >>> ExtendedContext.quantize(Decimal('217'), Decimal('1e+2'))
+        Decimal("2E+2")
+        """
+        return a.quantize(b, context=self)
+
+    def remainder(self, a, b):
+        """Returns the remainder from integer division.
+
+        The result is the residue of the dividend after the operation of
+        calculating integer division as described for divide-integer, rounded to
+        precision digits if necessary. The sign of the result, if non-zero, is
+        the same as that of the original dividend.
+
+        This operation will fail under the same conditions as integer division
+        (that is, if integer division on the same two operands would fail, the
+        remainder cannot be calculated).
+
+        >>> ExtendedContext.remainder(Decimal('2.1'), Decimal('3'))
+        Decimal("2.1")
+        >>> ExtendedContext.remainder(Decimal('10'), Decimal('3'))
+        Decimal("1")
+        >>> ExtendedContext.remainder(Decimal('-10'), Decimal('3'))
+        Decimal("-1")
+        >>> ExtendedContext.remainder(Decimal('10.2'), Decimal('1'))
+        Decimal("0.2")
+        >>> ExtendedContext.remainder(Decimal('10'), Decimal('0.3'))
+        Decimal("0.1")
+        >>> ExtendedContext.remainder(Decimal('3.6'), Decimal('1.3'))
+        Decimal("1.0")
+        """
+        return a.__mod__(b, context=self)
+
+    def remainder_near(self, a, b):
+        """Returns to be "a - b * n", where n is the integer nearest the exact
+        value of "x / b" (if two integers are equally near then the even one
+        is chosen). If the result is equal to 0 then its sign will be the
+        sign of a.
+
+        This operation will fail under the same conditions as integer division
+        (that is, if integer division on the same two operands would fail, the
+        remainder cannot be calculated).
+
+        >>> ExtendedContext.remainder_near(Decimal('2.1'), Decimal('3'))
+        Decimal("-0.9")
+        >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('6'))
+        Decimal("-2")
+        >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('3'))
+        Decimal("1")
+        >>> ExtendedContext.remainder_near(Decimal('-10'), Decimal('3'))
+        Decimal("-1")
+        >>> ExtendedContext.remainder_near(Decimal('10.2'), Decimal('1'))
+        Decimal("0.2")
+        >>> ExtendedContext.remainder_near(Decimal('10'), Decimal('0.3'))
+        Decimal("0.1")
+        >>> ExtendedContext.remainder_near(Decimal('3.6'), Decimal('1.3'))
+        Decimal("-0.3")
+        """
+        return a.remainder_near(b, context=self)
+
+    def same_quantum(self, a, b):
+        """Returns True if the two operands have the same exponent.
+
+        The result is never affected by either the sign or the coefficient of
+        either operand.
+
+        >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('0.001'))
+        False
+        >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('0.01'))
+        True
+        >>> ExtendedContext.same_quantum(Decimal('2.17'), Decimal('1'))
+        False
+        >>> ExtendedContext.same_quantum(Decimal('Inf'), Decimal('-Inf'))
+        True
+        """
+        return a.same_quantum(b)
+
+    def sqrt(self, a):
+        """Returns the square root of a non-negative number to context precision.
+
+        If the result must be inexact, it is rounded using the round-half-even
+        algorithm.
+
+        >>> ExtendedContext.sqrt(Decimal('0'))
+        Decimal("0")
+        >>> ExtendedContext.sqrt(Decimal('-0'))
+        Decimal("-0")
+        >>> ExtendedContext.sqrt(Decimal('0.39'))
+        Decimal("0.624499800")
+        >>> ExtendedContext.sqrt(Decimal('100'))
+        Decimal("10")
+        >>> ExtendedContext.sqrt(Decimal('1'))
+        Decimal("1")
+        >>> ExtendedContext.sqrt(Decimal('1.0'))
+        Decimal("1.0")
+        >>> ExtendedContext.sqrt(Decimal('1.00'))
+        Decimal("1.0")
+        >>> ExtendedContext.sqrt(Decimal('7'))
+        Decimal("2.64575131")
+        >>> ExtendedContext.sqrt(Decimal('10'))
+        Decimal("3.16227766")
+        >>> ExtendedContext.prec
+        9
+        """
+        return a.sqrt(context=self)
+
+    def subtract(self, a, b):
+        """Return the sum of the two operands.
+
+        >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('1.07'))
+        Decimal("0.23")
+        >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('1.30'))
+        Decimal("0.00")
+        >>> ExtendedContext.subtract(Decimal('1.3'), Decimal('2.07'))
+        Decimal("-0.77")
+        """
+        return a.__sub__(b, context=self)
+
+    def to_eng_string(self, a):
+        """Converts a number to a string, using scientific notation.
+
+        The operation is not affected by the context.
+        """
+        return a.to_eng_string(context=self)
+
+    def to_sci_string(self, a):
+        """Converts a number to a string, using scientific notation.
+
+        The operation is not affected by the context.
+        """
+        return a.__str__(context=self)
+
+    def to_integral(self, a):
+        """Rounds to an integer.
+
+        When the operand has a negative exponent, the result is the same
+        as using the quantize() operation using the given operand as the
+        left-hand-operand, 1E+0 as the right-hand-operand, and the precision
+        of the operand as the precision setting, except that no flags will
+        be set. The rounding mode is taken from the context.
+
+        >>> ExtendedContext.to_integral(Decimal('2.1'))
+        Decimal("2")
+        >>> ExtendedContext.to_integral(Decimal('100'))
+        Decimal("100")
+        >>> ExtendedContext.to_integral(Decimal('100.0'))
+        Decimal("100")
+        >>> ExtendedContext.to_integral(Decimal('101.5'))
+        Decimal("102")
+        >>> ExtendedContext.to_integral(Decimal('-101.5'))
+        Decimal("-102")
+        >>> ExtendedContext.to_integral(Decimal('10E+5'))
+        Decimal("1.0E+6")
+        >>> ExtendedContext.to_integral(Decimal('7.89E+77'))
+        Decimal("7.89E+77")
+        >>> ExtendedContext.to_integral(Decimal('-Inf'))
+        Decimal("-Infinity")
+        """
+        return a.to_integral(context=self)
+
+class _WorkRep(object):
+    __slots__ = ('sign','int','exp')
+    # sign: 0 or 1
+    # int:  int or long
+    # exp:  None, int, or string
+
+    def __init__(self, value=None):
+        if value is None:
+            self.sign = None
+            self.int = 0
+            self.exp = None
+        elif isinstance(value, Decimal):
+            self.sign = value._sign
+            cum = 0
+            for digit  in value._int:
+                cum = cum * 10 + digit
+            self.int = cum
+            self.exp = value._exp
+        else:
+            # assert isinstance(value, tuple)
+            self.sign = value[0]
+            self.int = value[1]
+            self.exp = value[2]
+
+    def __repr__(self):
+        return "(%r, %r, %r)" % (self.sign, self.int, self.exp)
+
+    __str__ = __repr__
+
+
+
+def _normalize(op1, op2, shouldround = 0, prec = 0):
+    """Normalizes op1, op2 to have the same exp and length of coefficient.
+
+    Done during addition.
+    """
+    # Yes, the exponent is a long, but the difference between exponents
+    # must be an int-- otherwise you'd get a big memory problem.
+    numdigits = int(op1.exp - op2.exp)
+    if numdigits < 0:
+        numdigits = -numdigits
+        tmp = op2
+        other = op1
+    else:
+        tmp = op1
+        other = op2
+
+
+    if shouldround and numdigits > prec + 1:
+        # Big difference in exponents - check the adjusted exponents
+        tmp_len = len(str(tmp.int))
+        other_len = len(str(other.int))
+        if numdigits > (other_len + prec + 1 - tmp_len):
+            # If the difference in adjusted exps is > prec+1, we know
+            # other is insignificant, so might as well put a 1 after the precision.
+            # (since this is only for addition.)  Also stops use of massive longs.
+
+            extend = prec + 2 - tmp_len
+            if extend <= 0:
+                extend = 1
+            tmp.int *= 10 ** extend
+            tmp.exp -= extend
+            other.int = 1
+            other.exp = tmp.exp
+            return op1, op2
+
+    tmp.int *= 10 ** numdigits
+    tmp.exp -= numdigits
+    return op1, op2
+
+def _adjust_coefficients(op1, op2):
+    """Adjust op1, op2 so that op2.int * 10 > op1.int >= op2.int.
+
+    Returns the adjusted op1, op2 as well as the change in op1.exp-op2.exp.
+
+    Used on _WorkRep instances during division.
+    """
+    adjust = 0
+    #If op1 is smaller, make it larger
+    while op2.int > op1.int:
+        op1.int *= 10
+        op1.exp -= 1
+        adjust += 1
+
+    #If op2 is too small, make it larger
+    while op1.int >= (10 * op2.int):
+        op2.int *= 10
+        op2.exp -= 1
+        adjust -= 1
+
+    return op1, op2, adjust
+
+##### Helper Functions ########################################
+
+def _convert_other(other):
+    """Convert other to Decimal.
+
+    Verifies that it's ok to use in an implicit construction.
+    """
+    if isinstance(other, Decimal):
+        return other
+    if isinstance(other, (int, long)):
+        return Decimal(other)
+
+    raise TypeError, "You can interact Decimal only with int, long or Decimal data types."
+
+_infinity_map = {
+    'inf' : 1,
+    'infinity' : 1,
+    '+inf' : 1,
+    '+infinity' : 1,
+    '-inf' : -1,
+    '-infinity' : -1
+}
+
+def _isinfinity(num):
+    """Determines whether a string or float is infinity.
+
+    +1 for negative infinity; 0 for finite ; +1 for positive infinity
+    """
+    num = str(num).lower()
+    return _infinity_map.get(num, 0)
+
+def _isnan(num):
+    """Determines whether a string or float is NaN
+
+    (1, sign, diagnostic info as string) => NaN
+    (2, sign, diagnostic info as string) => sNaN
+    0 => not a NaN
+    """
+    num = str(num).lower()
+    if not num:
+        return 0
+
+    #get the sign, get rid of trailing [+-]
+    sign = 0
+    if num[0] == '+':
+        num = num[1:]
+    elif num[0] == '-':  #elif avoids '+-nan'
+        num = num[1:]
+        sign = 1
+
+    if num.startswith('nan'):
+        if len(num) > 3 and not num[3:].isdigit(): #diagnostic info
+            return 0
+        return (1, sign, num[3:].lstrip('0'))
+    if num.startswith('snan'):
+        if len(num) > 4 and not num[4:].isdigit():
+            return 0
+        return (2, sign, num[4:].lstrip('0'))
+    return 0
+
+
+##### Setup Specific Contexts ################################
+
+# The default context prototype used by Context()
+# Is mutable, so that new contexts can have different default values
+
+DefaultContext = Context(
+        prec=28, rounding=ROUND_HALF_EVEN,
+        traps=[DivisionByZero, Overflow, InvalidOperation],
+        flags=[],
+        _rounding_decision=ALWAYS_ROUND,
+        Emax=999999999,
+        Emin=-999999999,
+        capitals=1
+)
+
+# Pre-made alternate contexts offered by the specification
+# Don't change these; the user should be able to select these
+# contexts and be able to reproduce results from other implementations
+# of the spec.
+
+BasicContext = Context(
+        prec=9, rounding=ROUND_HALF_UP,
+        traps=[DivisionByZero, Overflow, InvalidOperation, Clamped, Underflow],
+        flags=[],
+)
+
+ExtendedContext = Context(
+        prec=9, rounding=ROUND_HALF_EVEN,
+        traps=[],
+        flags=[],
+)
+
+
+##### Useful Constants (internal use only) ####################
+
+#Reusable defaults
+Inf = Decimal('Inf')
+negInf = Decimal('-Inf')
+
+#Infsign[sign] is infinity w/ that sign
+Infsign = (Inf, negInf)
+
+NaN = Decimal('NaN')
+
+
+##### crud for parsing strings #################################
+import re
+
+# There's an optional sign at the start, and an optional exponent
+# at the end.  The exponent has an optional sign and at least one
+# digit.  In between, must have either at least one digit followed
+# by an optional fraction, or a decimal point followed by at least
+# one digit.  Yuck.
+
+_parser = re.compile(r"""
+#    \s*
+    (?P<sign>[-+])?
+    (
+        (?P<int>\d+) (\. (?P<frac>\d*))?
+    |
+        \. (?P<onlyfrac>\d+)
+    )
+    ([eE](?P<exp>[-+]? \d+))?
+#    \s*
+    $
+""", re.VERBOSE).match #Uncomment the \s* to allow leading or trailing spaces.
+
+del re
+
+# return sign, n, p s.t. float string value == -1**sign * n * 10**p exactly
+
+def _string2exact(s):
+    m = _parser(s)
+    if m is None:
+        raise ValueError("invalid literal for Decimal: %r" % s)
+
+    if m.group('sign') == "-":
+        sign = 1
+    else:
+        sign = 0
+
+    exp = m.group('exp')
+    if exp is None:
+        exp = 0
+    else:
+        exp = int(exp)
+
+    intpart = m.group('int')
+    if intpart is None:
+        intpart = ""
+        fracpart = m.group('onlyfrac')
+    else:
+        fracpart = m.group('frac')
+        if fracpart is None:
+            fracpart = ""
+
+    exp -= len(fracpart)
+
+    mantissa = intpart + fracpart
+    tmp = map(int, mantissa)
+    backup = tmp
+    while tmp and tmp[0] == 0:
+        del tmp[0]
+
+    # It's a zero
+    if not tmp:
+        if backup:
+            return (sign, tuple(backup), exp)
+        return (sign, (0,), exp)
+    mantissa = tuple(tmp)
+
+    return (sign, mantissa, exp)
+
+
+if __name__ == '__main__':
+    import doctest, sys
+    doctest.testmod(sys.modules[__name__])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/connect.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/connect.py
new file mode 100644
index 0000000..d91a1c4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/connect.py
@@ -0,0 +1,82 @@
+# Implements _both_ a connectable client, and a connectable server.
+#
+# Note that we cheat just a little - the Server in this demo is not created
+# via Normal COM - this means we can avoid registering the server.
+# However, the server _is_ accessed as a COM object - just the creation
+# is cheated on - so this is still working as a fully-fledged server.
+
+import pythoncom
+import win32com.server.util
+import win32com.server.connect
+from win32com.server.exception import Exception
+
+# This is the IID of the Events interface both Client and Server support.
+IID_IConnectDemoEvents = pythoncom.MakeIID("{A4988850-49C3-11d0-AE5D-52342E000000}")
+
+# The server which implements
+# Create a connectable class, that has a single public method
+# 'DoIt', which echos to a single sink 'DoneIt'
+
+class ConnectableServer(win32com.server.connect.ConnectableServer):
+	_public_methods_ = ["DoIt"] + win32com.server.connect.ConnectableServer._public_methods_
+	_connect_interfaces_ = [IID_IConnectDemoEvents]
+	# The single public method that the client can call on us
+	# (ie, as a normal COM server, this exposes just this single method.
+	def DoIt(self,arg):
+		# Simply broadcast a notification.
+		self._BroadcastNotify(self.NotifyDoneIt, (arg,))
+
+	def NotifyDoneIt(self, interface, arg):
+		interface.Invoke(1000, 0, pythoncom.DISPATCH_METHOD, 1, arg)
+
+# Here is the client side of the connection world.
+# Define a COM object which implements the methods defined by the
+# IConnectDemoEvents interface.								
+class ConnectableClient:
+	# This is another cheat - I _know_ the server defines the "DoneIt" event
+	# as DISPID==1000 - I also know from the implementation details of COM
+	# that the first method in _public_methods_ gets 1000.
+	# Normally some explicit DISPID->Method mapping is required.
+	_public_methods_ = ["OnDoneIt"]
+	def __init__(self):
+		self.last_event_arg = None
+	# A client must implement QI, and respond to a query for the Event interface.
+	# In addition, it must provide a COM object (which server.util.wrap) does.
+	def _query_interface_(self, iid):
+		import win32com.server.util
+		# Note that this seems like a necessary hack.  I am responding to IID_IConnectDemoEvents
+		# but only creating an IDispatch gateway object.
+		if iid==IID_IConnectDemoEvents: return win32com.server.util.wrap(self)
+	# And here is our event method which gets called.
+	def OnDoneIt(self, arg):
+		self.last_event_arg = arg
+
+def CheckEvent(server, client, val, verbose):
+	client.last_event_arg = None
+	server.DoIt(val)
+	if client.last_event_arg != val:
+		raise RuntimeError, "Sent %r, but got back %r" % (val, client.last_event_arg)
+	if verbose:
+		print "Sent and received %r" % val
+
+# A simple test script for all this.
+# In the real world, it is likely that the code controlling the server
+# will be in the same class as that getting the notifications.
+def test(verbose=0):
+	import win32com.client.dynamic, win32com.client.connect
+	import win32com.server.policy
+	server = win32com.client.dynamic.Dispatch(win32com.server.util.wrap(ConnectableServer()))
+	connection = win32com.client.connect.SimpleConnection()
+	client = ConnectableClient()
+	connection.Connect(server, client, IID_IConnectDemoEvents)
+	CheckEvent(server, client, "Hello", verbose)
+	CheckEvent(server, client, "Here is a null>"+chr(0)+"<", verbose)
+	CheckEvent(server, client, u"Here is a null>"+unichr(0)+"<", verbose)
+	val = unicode("test-\xe0\xf2", "latin-1") # 2 latin characters.
+	CheckEvent(server, client, val, verbose)
+	if verbose:
+		print "Everything seemed to work!"
+	# Aggressive memory leak checking (ie, do nothing!) :-)  All should cleanup OK???
+
+if __name__=='__main__':
+	test(1)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/dump_clipboard.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/dump_clipboard.py
new file mode 100644
index 0000000..af1a1c9f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/dump_clipboard.py
@@ -0,0 +1,64 @@
+import pythoncom
+import win32con
+
+formats = """CF_TEXT CF_BITMAP CF_METAFILEPICT CF_SYLK CF_DIF CF_TIFF
+            CF_OEMTEXT CF_DIB CF_PALETTE CF_PENDATA CF_RIFF CF_WAVE
+            CF_UNICODETEXT CF_ENHMETAFILE CF_HDROP CF_LOCALE CF_MAX
+            CF_OWNERDISPLAY CF_DSPTEXT CF_DSPBITMAP CF_DSPMETAFILEPICT
+            CF_DSPENHMETAFILE""".split()
+format_name_map = {}
+for f in formats:
+    val = getattr(win32con, f)
+    format_name_map[val]=f
+
+tymeds = [attr for attr in pythoncom.__dict__.keys() if attr.startswith("TYMED_")]
+
+def DumpClipboard():
+    do = pythoncom.OleGetClipboard()
+    print "Dumping all clipboard formats..."
+    for fe in do.EnumFormatEtc():
+        fmt, td, aspect, index, tymed = fe
+        tymeds_this = [getattr(pythoncom, t) for t in tymeds if tymed & getattr(pythoncom, t)]
+        print "Clipboard format", format_name_map.get(fmt,str(fmt))
+        for t_this in tymeds_this:
+            # As we are enumerating there should be no need to call
+            # QueryGetData, but we do anyway!
+            fetc_query = fmt, td, aspect, index, t_this
+            try:
+                do.QueryGetData(fetc_query)
+            except pythoncom.com_error:
+                print "Eeek - QGD indicated failure for tymed", t_this
+            # now actually get it.
+            medium = do.GetData(fetc_query)
+            if medium.tymed==pythoncom.TYMED_GDI:
+                data = "GDI handle %d" % medium.data
+            elif medium.tymed==pythoncom.TYMED_MFPICT:
+                data = "METAFILE handle %d" % medium.data
+            elif medium.tymed==pythoncom.TYMED_ENHMF:
+                data = "ENHMETAFILE handle %d" % medium.data
+            elif medium.tymed==pythoncom.TYMED_HGLOBAL:
+                data = "%d bytes via HGLOBAL" % len(medium.data)
+            elif medium.tymed==pythoncom.TYMED_FILE:
+                data = "filename '%s'" % data
+            elif medium.tymed==pythoncom.TYMED_ISTREAM:
+                stream = medium.data
+                stream.Seek(0,0)
+                bytes = 0
+                while 1:
+                    chunk = stream.Read(4096)
+                    if not chunk:
+                        break
+                    bytes += len(chunk)
+                data = "%d bytes via IStream" % bytes
+            elif medium.tymed==pythoncom.TYMED_ISTORAGE:
+                data = "a IStorage"
+            else:
+                data = "*** unknown tymed!"
+            print " -> got", data
+    do = None
+
+if __name__=='__main__':
+    DumpClipboard()
+    if pythoncom._GetInterfaceCount()+pythoncom._GetGatewayCount():
+        print "XXX - Leaving with %d/%d COM objects alive" % \
+              (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount())
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/eventsApartmentThreaded.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/eventsApartmentThreaded.py
new file mode 100644
index 0000000..3a6a926
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/eventsApartmentThreaded.py
@@ -0,0 +1,94 @@
+# A sample originally provided by Richard Bell, and modified by Mark Hammond.
+
+# This sample demonstrates how to use COM events in an aparment-threaded
+# world.  In this world, COM itself ensures that all calls to and events
+# from an object happen on the same thread that created the object, even
+# if they originated from different threads.  For this cross-thread
+# marshalling to work, this main thread *must* run a "message-loop" (ie,
+# a loop fetching and dispatching Windows messages).  Without such message
+# processing, dead-locks can occur.
+
+# See also eventsFreeThreaded.py for how to do this in a free-threaded 
+# world where these marshalling considerations do not exist.
+
+# NOTE: This example uses Internet Explorer, but it should not be considerd
+# a "best-practices" for writing against IE events, but for working with
+# events in general. For example:
+# * The first OnDocumentComplete event is not a reliable indicator that the
+#   URL has completed loading
+# * As we are demonstrating the most efficient way of handling events, when
+#   running this sample you will see an IE Windows briefly appear, but
+#   vanish without ever being repainted.
+
+import sys
+import os
+import win32com.client
+import win32api
+import win32event
+# sys.coinit_flags not set, so pythoncom initializes apartment-threaded.
+import pythoncom
+import time
+
+class ExplorerEvents:
+    def __init__(self):
+        self.event = win32event.CreateEvent(None, 0, 0, None)
+    def OnDocumentComplete(self,
+                           pDisp=pythoncom.Empty,
+                           URL=pythoncom.Empty):
+        thread = win32api.GetCurrentThreadId()
+        print "OnDocumentComplete event processed on thread %d"%thread
+        # Set the event our main thread is waiting on.
+        win32event.SetEvent(self.event)
+    def OnQuit(self):
+        thread = win32api.GetCurrentThreadId()
+        print "OnQuit event processed on thread %d"%thread
+        win32event.SetEvent(self.event)
+
+def WaitWhileProcessingMessages(event, timeout = 2):
+    start = time.clock()
+    while True:
+        # Wake 4 times a second - we can't just specify the
+        # full timeout here, as then it would reset for every
+        # message we process.
+        rc = win32event.MsgWaitForMultipleObjects( (event,), 0,
+                                250,
+                                win32event.QS_ALLEVENTS)
+        if rc == win32event.WAIT_OBJECT_0:
+            # event signalled - stop now!
+            return True
+        if (time.clock() - start) > timeout:
+            # Timeout expired.
+            return False
+        # must be a message.
+        pythoncom.PumpWaitingMessages()
+
+def TestExplorerEvents():
+    iexplore = win32com.client.DispatchWithEvents(
+        "InternetExplorer.Application", ExplorerEvents)
+
+    thread = win32api.GetCurrentThreadId()
+    print 'TestExplorerEvents created IE object on thread %d'%thread
+
+    iexplore.Visible = 1
+    try:
+        iexplore.Navigate(win32api.GetFullPathName('..\\readme.htm'))
+    except pythoncom.com_error, details:
+        print "Warning - could not open the test HTML file", details
+
+    # Wait for the event to be signalled while pumping messages.
+    if not WaitWhileProcessingMessages(iexplore.event):
+        print "Document load event FAILED to fire!!!"
+
+    iexplore.Quit()
+    #
+    # Give IE a chance to shutdown, else it can get upset on fast machines.
+    # Note, Quit generates events.  Although this test does NOT catch them
+    # it is NECESSARY to pump messages here instead of a sleep so that the Quit
+    # happens properly!
+    if not WaitWhileProcessingMessages(iexplore.event):
+        print "OnQuit event FAILED to fire!!!"
+
+    iexplore = None
+
+if __name__=='__main__':
+    TestExplorerEvents()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/eventsFreeThreaded.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/eventsFreeThreaded.py
new file mode 100644
index 0000000..9c06c1a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/eventsFreeThreaded.py
@@ -0,0 +1,88 @@
+# A sample originally provided by Richard Bell, and modified by Mark Hammond.
+
+# This sample demonstrates how to use COM events in a free-threaded world.
+# In this world, there is no need to marshall calls across threads, so
+# no message loops are needed at all.  This means regular cross-thread 
+# sychronization can be used.  In this sample we just wait on win32 event
+# objects.
+
+# See also ieEventsApartmentThreaded.py for how to do this in an
+# aparment-threaded world, where thread-marshalling complicates things.
+
+# NOTE: This example uses Internet Explorer, but it should not be considerd
+# a "best-practices" for writing against IE events, but for working with
+# events in general. For example:
+# * The first OnDocumentComplete event is not a reliable indicator that the
+#   URL has completed loading
+# * As we are demonstrating the most efficient way of handling events, when
+#   running this sample you will see an IE Windows briefly appear, but
+#   vanish without ever being repainted.
+
+import sys
+sys.coinit_flags=0          # specify free threading
+
+import os
+import win32api
+import win32event
+import win32com.client
+import pythoncom
+import time
+
+# The print statements indicate that COM has actually started another thread
+# and will deliver the events to that thread (ie, the events do not actually
+# fire on our main thread.
+class ExplorerEvents:
+    def __init__(self):
+        # We reuse this event for all events.
+        self.event = win32event.CreateEvent(None, 0, 0, None)
+    def OnDocumentComplete(self,
+                           pDisp=pythoncom.Empty,
+                           URL=pythoncom.Empty):
+        #
+        # Caution:  Since the main thread and events thread(s) are different
+        # it may be necessary to serialize access to shared data.  Because
+        # this is a simple test case, that is not required here.  Your
+        # situation may be different.   Caveat programmer.
+        #
+        thread = win32api.GetCurrentThreadId()
+        print "OnDocumentComplete event processed on thread %d"%thread
+        # Set the event our main thread is waiting on.
+        win32event.SetEvent(self.event)
+    def OnQuit(self):
+        thread = win32api.GetCurrentThreadId()
+        print "OnQuit event processed on thread %d"%thread
+        win32event.SetEvent(self.event)
+
+def TestExplorerEvents():
+    iexplore = win32com.client.DispatchWithEvents(
+        "InternetExplorer.Application", ExplorerEvents)
+
+    thread = win32api.GetCurrentThreadId()
+    print 'TestExplorerEvents created IE object on thread %d'%thread
+
+    iexplore.Visible = 1
+    try:
+        iexplore.Navigate(win32api.GetFullPathName('..\\readme.htm'))
+    except pythoncom.com_error, details:
+        print "Warning - could not open the test HTML file", details
+
+    # In this free-threaded example, we can simply wait until an event has 
+    # been set - we will give it 2 seconds before giving up.
+    rc = win32event.WaitForSingleObject(iexplore.event, 2000)
+    if rc != win32event.WAIT_OBJECT_0:
+        print "Document load event FAILED to fire!!!"
+
+    iexplore.Quit()
+    # Now we can do the same thing to wait for exit!
+    # Although Quit generates events, in this free-threaded world we
+    # do *not* need to run any message pumps.
+
+    rc = win32event.WaitForSingleObject(iexplore.event, 2000)
+    if rc != win32event.WAIT_OBJECT_0:
+        print "OnQuit event FAILED to fire!!!"
+
+    iexplore = None
+    print "Finished the IE event sample!"
+
+if __name__=='__main__':
+    TestExplorerEvents()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/excelAddin.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/excelAddin.py
new file mode 100644
index 0000000..f9a4cb3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/excelAddin.py
@@ -0,0 +1,138 @@
+# A demo plugin for Microsoft Excel
+#
+# This addin simply adds a new button to the main Excel toolbar,
+# and displays a message box when clicked.  Thus, it demonstrates
+# how to plug in to Excel itself, and hook Excel events.
+#
+#
+# To register the addin, simply execute:
+#   excelAddin.py
+# This will install the COM server, and write the necessary
+# AddIn key to Excel
+#
+# To unregister completely:
+#   excelAddin.py --unregister
+#
+# To debug, execute:
+#   excelAddin.py --debug
+#
+# Then open Pythonwin, and select "Tools->Trace Collector Debugging Tool"
+# Restart excel, and you should see some output generated.
+#
+# NOTE: If the AddIn fails with an error, Excel will re-register
+# the addin to not automatically load next time Excel starts.  To
+# correct this, simply re-register the addin (see above)
+#
+# Author <ekoome@yahoo.com> Eric Koome
+# Copyright (c) 2003 Wavecom Inc.  All rights reserved
+#
+# Redistribution and use in source and binary forms, with or without
+#modification, are permitted provided that the following conditions
+#are met:
+#
+#1. Redistributions of source code must retain the above copyright
+#    notice, this list of conditions and the following disclaimer.
+#
+# THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED
+# WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED.  IN NO EVENT SHALL ERIC KOOME OR
+# ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
+# USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
+# OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+# SUCH DAMAGE.
+
+from win32com import universal
+from win32com.server.exception import COMException
+from win32com.client import gencache, DispatchWithEvents
+import winerror
+import pythoncom
+from win32com.client import constants, Dispatch
+import sys
+
+# Support for COM objects we use.
+gencache.EnsureModule('{00020813-0000-0000-C000-000000000046}', 0, 1, 3, bForDemand=True) # Excel 9
+gencache.EnsureModule('{2DF8D04C-5BFA-101B-BDE5-00AA0044DE52}', 0, 2, 1, bForDemand=True) # Office 9
+
+# The TLB defiining the interfaces we implement
+universal.RegisterInterfaces('{AC0714F2-3D04-11D1-AE7D-00A0C90F26F4}', 0, 1, 0, ["_IDTExtensibility2"])
+class ButtonEvent:
+    def OnClick(self, button, cancel):
+        import win32ui # Possible, but not necessary, to use a Pythonwin GUI
+        import win32con
+        win32ui.MessageBox("Hello from Python", "Python Test",win32con.MB_OKCANCEL)
+        return cancel
+
+class ExcelAddin:
+    _com_interfaces_ = ['_IDTExtensibility2']
+    _public_methods_ = []
+    _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER
+    _reg_clsid_ = "{C5482ECA-F559-45A0-B078-B2036E6F011A}"
+    _reg_progid_ = "Python.Test.ExcelAddin"
+    _reg_policy_spec_ = "win32com.server.policy.EventHandlerPolicy"
+
+    def __init__(self):
+        self.appHostApp = None    
+    
+    def OnConnection(self, application, connectMode, addin, custom):
+        print "OnConnection", application, connectMode, addin, custom
+        try:
+            self.appHostApp = application
+            cbcMyBar = self.appHostApp.CommandBars.Add(Name="PythonBar", Position=constants.msoBarTop, MenuBar=constants.msoBarTypeNormal, Temporary=True)
+            btnMyButton = cbcMyBar.Controls.Add(Type=constants.msoControlButton, Parameter="Greetings")
+            btnMyButton=self.toolbarButton = DispatchWithEvents(btnMyButton, ButtonEvent)
+            btnMyButton.Style = constants.msoButtonCaption
+            btnMyButton.BeginGroup = True
+            btnMyButton.Caption = "&Python"
+            btnMyButton.TooltipText = "Python rules the World"
+            btnMyButton.Width = "34"
+            cbcMyBar.Visible = True
+        except pythoncom.com_error, (hr, msg, exc, arg):
+            print "The Excel call failed with code %d: %s" % (hr, msg)
+            if exc is None:
+                print "There is no extended error information"
+            else:
+                wcode, source, text, helpFile, helpId, scode = exc
+                print "The source of the error is", source
+                print "The error message is", text
+                print "More info can be found in %s (id=%d)" % (helpFile, helpId)
+
+    def OnDisconnection(self, mode, custom):
+        print "OnDisconnection"
+        self.appHostApp.CommandBars("PythonBar").Delete
+        self.appHostApp=None
+        
+    def OnAddInsUpdate(self, custom):
+        print "OnAddInsUpdate", custom
+    def OnStartupComplete(self, custom):
+        print "OnStartupComplete", custom
+    def OnBeginShutdown(self, custom):
+        print "OnBeginShutdown", custom
+
+def RegisterAddin(klass):
+    import _winreg
+    key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Office\\Excel\\Addins")
+    subkey = _winreg.CreateKey(key, klass._reg_progid_)
+    _winreg.SetValueEx(subkey, "CommandLineSafe", 0, _winreg.REG_DWORD, 0)
+    _winreg.SetValueEx(subkey, "LoadBehavior", 0, _winreg.REG_DWORD, 3)
+    _winreg.SetValueEx(subkey, "Description", 0, _winreg.REG_SZ, "Excel Addin")
+    _winreg.SetValueEx(subkey, "FriendlyName", 0, _winreg.REG_SZ, "A Simple Excel Addin")
+
+def UnregisterAddin(klass):
+    import _winreg
+    try:
+        _winreg.DeleteKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Office\\Excel\\Addins\\" + klass._reg_progid_)
+    except WindowsError:
+        pass
+
+if __name__ == '__main__':
+    import win32com.server.register
+    win32com.server.register.UseCommandLine(ExcelAddin)
+    if "--unregister" in sys.argv:
+        UnregisterAddin(ExcelAddin)
+    else:
+        RegisterAddin(ExcelAddin)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/excelRTDServer.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/excelRTDServer.py
new file mode 100644
index 0000000..bfaab58f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/excelRTDServer.py
@@ -0,0 +1,409 @@
+"""Excel IRTDServer implementation.
+
+This module is a functional example of how to implement the IRTDServer interface
+in python, using the pywin32 extensions. Further details, about this interface
+and it can be found at:
+     http://msdn.microsoft.com/library/default.asp?url=/library/en-us/dnexcl2k2/html/odc_xlrtdfaq.asp
+"""
+
+# Copyright (c) 2003-2004 by Chris Nilsson <chris@slort.org>
+#
+# By obtaining, using, and/or copying this software and/or its
+# associated documentation, you agree that you have read, understood,
+# and will comply with the following terms and conditions:
+#
+# Permission to use, copy, modify, and distribute this software and
+# its associated documentation for any purpose and without fee is
+# hereby granted, provided that the above copyright notice appears in
+# all copies, and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of
+# Christopher Nilsson (the author) not be used in advertising or publicity
+# pertaining to distribution of the software without specific, written
+# prior permission.
+#
+# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
+# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
+# ABILITY AND FITNESS.  IN NO EVENT SHALL THE AUTHOR
+# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
+# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
+# OF THIS SOFTWARE.
+
+import pythoncom
+import win32com.client
+from win32com import universal
+from win32com.client import gencache
+from win32com.server.exception import COMException
+
+import threading
+import datetime # For the example classes...
+
+# Typelib info for version 10 - aka Excel XP.
+# This is the minimum version of excel that we can work with as this is when
+# Microsoft introduced these interfaces.
+EXCEL_TLB_GUID = '{00020813-0000-0000-C000-000000000046}'
+EXCEL_TLB_LCID = 0
+EXCEL_TLB_MAJOR = 1
+EXCEL_TLB_MINOR = 4
+
+# Import the excel typelib to make sure we've got early-binding going on. 
+# The "ByRef" parameters we use later won't work without this.
+gencache.EnsureModule(EXCEL_TLB_GUID, EXCEL_TLB_LCID, \
+                      EXCEL_TLB_MAJOR, EXCEL_TLB_MINOR)
+
+# Tell pywin to import these extra interfaces.
+# --
+# QUESTION: Why? The interfaces seem to descend from IDispatch, so
+# I'd have thought, for example, calling callback.UpdateNotify() (on the
+# IRTDUpdateEvent callback excel gives us) would work without molestation.
+# But the callback needs to be cast to a "real" IRTDUpdateEvent type. Hmm...
+# This is where my small knowledge of the pywin framework / COM gets hazy.
+# --
+# Again, we feed in the Excel typelib as the source of these interfaces.
+universal.RegisterInterfaces(EXCEL_TLB_GUID, 
+                             EXCEL_TLB_LCID, EXCEL_TLB_MAJOR, EXCEL_TLB_MINOR,
+                             ['IRtdServer','IRTDUpdateEvent'])
+
+class ExcelRTDServer(object):
+  """Base RTDServer class.
+  
+  Provides most of the features needed to implement the IRtdServer interface.
+  Manages topic adding, removal, and packing up the values for excel.
+  
+  Shouldn't be instanciated directly. 
+  
+  Instead, descendant classes should override the CreateTopic() method.
+  Topic objects only need to provide a GetValue() function to play nice here.
+  The values given need to be atomic (eg. string, int, float... etc).
+  
+  Also note: nothing has been done within this class to ensure that we get
+  time to check our topics for updates. I've left that up to the subclass 
+  since the ways, and needs, of refreshing your topics will vary greatly. For 
+  example, the sample implementation uses a timer thread to wake itself up. 
+  Whichever way you choose to do it, your class needs to be able to wake up 
+  occaisionally, since excel will never call your class without being asked to 
+  first.
+  
+  Excel will communicate with our object in this order:
+    1. Excel instanciates our object and calls ServerStart, providing us with
+       an IRTDUpdateEvent callback object.
+    2. Excel calls ConnectData when it wants to subscribe to a new "topic".
+    3. When we have new data to provide, we call the UpdateNotify method of the 
+       callback object we were given.
+    4. Excel calls our RefreshData method, and receives a 2d SafeArray (row-major)
+       containing the Topic ids in the 1st dim, and the topic values in the
+       2nd dim.
+    5. When not needed anymore, Excel will call our DisconnectData to 
+       unsubscribe from a topic.
+    6. When there are no more topics left, Excel will call our ServerTerminate
+       method to kill us.
+
+  Throughout, at undetermined periods, Excel will call our Heartbeat
+  method to see if we're still alive. It must return a non-zero value, or 
+  we'll be killed.
+  
+  NOTE: By default, excel will at most call RefreshData once every 2 seconds. 
+        This is a setting that needs to be changed excel-side. To change this, 
+        you can set the throttle interval like this in the excel VBA object model:
+          Application.RTD.ThrottleInterval = 1000 ' milliseconds
+  """
+  _com_interfaces_ = ['IRtdServer']
+  _public_methods_ = ['ConnectData','DisconnectData','Heartbeat',
+                      'RefreshData','ServerStart','ServerTerminate']
+  _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER 
+  #_reg_clsid_ = "# subclass must provide this class attribute"
+  #_reg_desc_ = "# subclass should provide this description"
+  #_reg_progid_ = "# subclass must provide this class attribute"
+
+  ALIVE = 1
+  NOT_ALIVE = 0
+
+  def __init__(self):
+    """Constructor"""
+    super(ExcelRTDServer, self).__init__()
+    self.IsAlive = self.ALIVE
+    self.__callback = None
+    self.topics = {}
+    
+  def SignalExcel(self):
+    """Use the callback we were given to tell excel new data is available."""
+    if self.__callback is None:
+      raise COMException(desc="Callback excel provided is Null")
+    self.__callback.UpdateNotify()
+    
+  def ConnectData(self, TopicID, Strings, GetNewValues):
+    """Creates a new topic out of the Strings excel gives us."""
+    try:
+      self.topics[TopicID] = self.CreateTopic(Strings)
+    except Exception, why:
+      raise COMException(desc=str(why))
+    GetNewValues = True
+    result = self.topics[TopicID]
+    if result is None:
+      result = "# %s: Waiting for update" % self.__class__.__name__
+    else:
+      result = result.GetValue()
+      
+    # fire out internal event...
+    self.OnConnectData(TopicID)
+      
+    # GetNewValues as per interface is ByRef, so we need to pass it back too.
+    return result, GetNewValues
+
+  def DisconnectData(self, TopicID):
+    """Deletes the given topic."""
+    self.OnDisconnectData(TopicID)
+    
+    if self.topics.has_key(TopicID):
+      self.topics[TopicID] = None
+      del self.topics[TopicID]
+
+  def Heartbeat(self):
+    """Called by excel to see if we're still here."""
+    return self.IsAlive
+
+  def RefreshData(self, TopicCount):
+    """Packs up the topic values. Called by excel when it's ready for an update.
+    
+    Needs to:
+      * Return the current number of topics, via the "ByRef" TopicCount
+      * Return a 2d SafeArray of the topic data.
+        - 1st dim: topic numbers
+        - 2nd dim: topic values
+      
+    We could do some caching, instead of repacking everytime...
+    But this works for demonstration purposes."""
+    TopicCount = len(self.topics)
+    self.OnRefreshData()
+    
+    # Grow the lists, so we don't need a heap of calls to append()
+    results = [[None] * TopicCount, [None] * TopicCount]
+    
+    # Excel expects a 2-dimensional array. The first dim contains the 
+    # topic numbers, and the second contains the values for the topics.
+    # In true VBA style (yuck), we need to pack the array in row-major format,
+    # which looks like:
+    #   ( (topic_num1, topic_num2, ..., topic_numN), \
+    #     (topic_val1, topic_val2, ..., topic_valN) )
+    for idx, topicdata in enumerate(self.topics.iteritems()):
+      topicNum, topic = topicdata
+      results[0][idx] = topicNum
+      results[1][idx] = topic.GetValue()
+      
+    # TopicCount is meant to be passed to us ByRef, so return it as well, as per
+    # the way pywin32 handles ByRef arguments.
+    return tuple(results), TopicCount
+
+  def ServerStart(self, CallbackObject):
+    """Excel has just created us... We take its callback for later, and set up shop."""
+    self.IsAlive = self.ALIVE
+
+    if CallbackObject is None:
+      raise COMException(desc='Excel did not provide a callback')
+      
+    # Need to "cast" the raw PyIDispatch object to the IRTDUpdateEvent interface
+    IRTDUpdateEventKlass = win32com.client.CLSIDToClass.GetClass('{A43788C1-D91B-11D3-8F39-00C04F3651B8}')
+    self.__callback = IRTDUpdateEventKlass(CallbackObject)
+    
+    self.OnServerStart()
+
+    return self.IsAlive
+    
+  def ServerTerminate(self):
+    """Called when excel no longer wants us."""
+    self.IsAlive = self.NOT_ALIVE # On next heartbeat, excel will free us
+    self.OnServerTerminate()
+
+  def CreateTopic(self, TopicStrings=None):
+    """Topic factory method. Subclass must override. 
+    
+    Topic objects need to provide:
+      * GetValue() method which returns an atomic value.
+      
+    Will raise NotImplemented if not overridden.
+    """
+    raise NotImplemented, 'Subclass must implement'
+    
+  # Overridable class events...  
+  def OnConnectData(self, TopicID):
+    """Called when a new topic has been created, at excel's request."""
+    pass
+  def OnDisconnectData(self, TopicID):
+    """Called when a topic is about to be deleted, at excel's request."""
+    pass
+  def OnRefreshData(self):
+    """Called when excel has requested all current topic data."""
+    pass
+  def OnServerStart(self):
+    """Called when excel has instanciated us."""
+    pass
+  def OnServerTerminate(self):
+    """Called when excel is about to destroy us."""
+    pass
+  
+class RTDTopic(object):
+  """Base RTD Topic. 
+  Only method required by our RTDServer implementation is GetValue(). 
+  The others are more for convenience."""
+  def __init__(self, TopicStrings):
+    super(RTDTopic, self).__init__()
+    self.TopicStrings = TopicStrings
+    self.__currentValue = None
+    self.__dirty = False
+    
+  def Update(self, sender):
+    """Called by the RTD Server. 
+    Gives us a chance to check if our topic data needs to be 
+    changed (eg. check a file, quiz a database, etc)."""
+    raise NotImplemented, 'subclass must implement'
+    
+  def Reset(self):
+    """Call when this topic isn't considered "dirty" anymore."""
+    self.__dirty = False
+    
+  def GetValue(self):
+    return self.__currentValue
+    
+  def SetValue(self, value):
+    self.__dirty = True
+    self.__currentValue = value
+    
+  def HasChanged(self):
+    return self.__dirty
+    
+# -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
+
+######################################
+# Example classes 
+######################################
+
+class TimeServer(ExcelRTDServer):
+  """Example Time RTD server.
+  
+  Sends time updates back to excel.
+  
+  example of use, in an excel sheet:
+    =RTD("Python.RTD.TimeServer","","seconds","5")
+    
+  This will cause a timestamp string to fill the cell, and update its value
+  every 5 seconds (or as close as possible depending on how busy excel is).
+  
+  The empty string parameter denotes the com server is running on the local
+  machine. Otherwise, put in the hostname to look on. For more info
+  on this, lookup the Excel help for its "RTD" worksheet function.
+  
+  Obviously, you'd want to wrap this kind of thing in a friendlier VBA 
+  function.
+  
+  Also, remember that the RTD function accepts a maximum of 28 arguments!
+  If you want to pass more, you may need to concatenate arguments into one 
+  string, and have your topic parse them appropriately.
+  """
+  # win32com.server setup attributes...
+  # Never copy the _reg_clsid_ value in your own classes!
+  _reg_clsid_ = '{EA7F2CF1-11A2-45E4-B2D5-68E240DB8CB1}'
+  _reg_progid_ = 'Python.RTD.TimeServer' 
+  _reg_desc_ = "Python class implementing Excel IRTDServer -- feeds time"
+  
+  # other class attributes...
+  INTERVAL = 0.5 # secs. Threaded timer will wake us up at this interval.
+  
+  def __init__(self):
+    super(TimeServer, self).__init__()
+    
+    # Simply timer thread to ensure we get to update our topics, and 
+    # tell excel about any changes. This is a pretty basic and dirty way to 
+    # do this. Ideally, there should be some sort of waitable (eg. either win32
+    # event, socket data event...) and be kicked off by that event triggering.
+    # As soon as we set up shop here, we _must_ return control back to excel.
+    # (ie. we can't block and do our own thing...)
+    self.ticker = threading.Timer(self.INTERVAL, self.Update)
+    
+  def OnServerStart(self):
+    self.ticker.start()
+        
+  def OnServerTerminate(self):
+    if not self.ticker.finished.isSet():
+      self.ticker.cancel() # Cancel our wake-up thread. Excel has killed us.
+    
+  def Update(self):
+    # Get our wake-up thread ready...
+    self.ticker = threading.Timer(self.INTERVAL, self.Update)
+    try:
+      # Check if any of our topics have new info to pass on
+      if len(self.topics):     
+        refresh = False
+        for topic in self.topics.itervalues():
+          topic.Update(self)
+          if topic.HasChanged():
+            refresh = True
+          topic.Reset()
+            
+        if refresh:
+          self.SignalExcel()
+    finally:
+      self.ticker.start() # Make sure we get to run again
+    
+  def CreateTopic(self, TopicStrings=None):
+    """Topic factory. Builds a TimeTopic object out of the given TopicStrings."""
+    return TimeTopic(TopicStrings)
+     
+class TimeTopic(RTDTopic):
+  """Example topic for example RTD server.
+  
+  Will accept some simple commands to alter how long to delay value updates.
+  
+  Commands:
+    * seconds, delay_in_seconds
+    * minutes, delay_in_minutes
+    * hours, delay_in_hours
+  """
+  def __init__(self, TopicStrings):
+    super(TimeTopic, self).__init__(TopicStrings)
+    try:
+      self.cmd, self.delay = self.TopicStrings
+    except Exception, E:
+      # We could simply return a "# ERROR" type string as the 
+      # topic value, but explosions like this should be able to get handled by 
+      # the VBA-side "On Error" stuff. 
+      raise ValueError, "Invalid topic strings: %s" % str(TopicStrings)
+    
+    #self.cmd = str(self.cmd)
+    self.delay = float(self.delay)
+    
+    # setup our initial value
+    self.checkpoint = self.timestamp()
+    self.SetValue(str(self.checkpoint))
+      
+  def timestamp(self):
+    return datetime.datetime.now()
+    
+  def Update(self, sender):
+    now = self.timestamp()
+    delta = now - self.checkpoint
+    refresh = False
+    if self.cmd == "seconds":
+      if delta.seconds >= self.delay:
+        refresh = True
+    elif self.cmd == "minutes":
+      if delta.minutes >= self.delay:
+        refresh = True
+    elif self.cmd == "hours":
+      if delta.hours >= self.delay:
+        refresh = True
+    else:
+      self.SetValue("#Unknown command: " + self.cmd)
+    
+    if refresh:
+      self.SetValue(str(now))
+      self.checkpoint = now
+          
+if __name__ == "__main__":    
+  import win32com.server.register
+  
+  # Register/Unregister TimeServer example
+  # eg. at the command line: excelrtd.py --register
+  # Then type in an excel cell something like:
+  # =RTD("Python.RTD.TimeServer","","seconds","5")
+  win32com.server.register.UseCommandLine(TimeServer)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/iebutton.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/iebutton.py
new file mode 100644
index 0000000..5c636a0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/iebutton.py
@@ -0,0 +1,188 @@
+# encoding: latin-1
+
+# PyWin32 Internet Explorer Button
+#
+# written by Leonard Ritter (paniq@gmx.net)
+# and Robert Förtsch (info@robert-foertsch.com)
+
+
+"""
+This sample implements a simple IE Button COM server
+with access to the IWebBrowser2 interface.
+
+To demonstrate:
+* Execute this script to register the server.
+* Open Pythonwin's Tools -> Trace Collector Debugging Tool, so you can
+  see the output of 'print' statements in this demo.
+* Open a new IE instance.  The toolbar should have a new "scissors" icon,
+  with tooltip text "IE Button" - this is our new button - click it.
+* Switch back to the Pythonwin window - you should see:
+   IOleCommandTarget::Exec called.
+  This is the button being clicked.  Extending this to do something more
+  useful is left as an exercise.
+
+Contribtions to this sample to make it a little "friendlier" welcome!
+"""
+
+# imports section
+import sys, os
+from win32com import universal
+from win32com.client import gencache, DispatchWithEvents, Dispatch
+from win32com.client import constants, getevents
+import win32com.server.register
+import win32com
+import pythoncom
+import win32api
+
+# This demo uses 'print' - use win32traceutil to see it if we have no
+# console.
+try:
+    win32api.GetConsoleTitle()
+except win32api.error:
+    import win32traceutil
+
+from win32com.axcontrol import axcontrol
+
+import array, struct
+
+# ensure we know the ms internet controls typelib so we have access to IWebBrowser2 later on
+win32com.client.gencache.EnsureModule('{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}',0,1,1)
+
+
+# 
+IObjectWithSite_methods = ['SetSite','GetSite']
+IOleCommandTarget_methods = ['Exec','QueryStatus']
+
+_iebutton_methods_ = IOleCommandTarget_methods + IObjectWithSite_methods
+_iebutton_com_interfaces_ = [
+    axcontrol.IID_IOleCommandTarget,
+    axcontrol.IID_IObjectWithSite, # IObjectWithSite
+]
+
+class Stub:
+    """
+    this class serves as a method stub,
+    outputting debug info whenever the object
+    is being called.
+    """
+    
+    def __init__(self,name):
+        self.name = name
+        
+    def __call__(self,*args):
+        print 'STUB: ',self.name,args
+        
+class IEButton:
+    """
+    The actual COM server class
+    """
+    _com_interfaces_ = _iebutton_com_interfaces_
+    _public_methods_ = _iebutton_methods_
+    _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER
+    _button_text_ = 'IE Button'
+    _tool_tip_ = 'An example implementation for an IE Button.'
+    _icon_ = ''
+    _hot_icon_ = ''
+
+    def __init__( self ):
+        # put stubs for non-implemented methods
+        for method in self._public_methods_:
+            if not hasattr(self,method):
+                print 'providing default stub for %s' % method
+                setattr(self,method,Stub(method))
+                
+    def QueryStatus (self, pguidCmdGroup, prgCmds, cmdtextf):
+        # 'cmdtextf' is the 'cmdtextf' element from the OLECMDTEXT structure,
+        # or None if a NULL pointer was passed.
+        result = []
+        for id, flags in prgCmds:
+            flags |= axcontrol.OLECMDF_SUPPORTED  | axcontrol.OLECMDF_ENABLED 
+            result.append((id, flags))
+        if cmdtextf is None:
+            cmdtext = None # must return None if nothing requested.
+        # IE never seems to want any text - this code is here for
+        # demo purposes only
+        elif cmdtextf == axcontrol.OLECMDTEXTF_NAME:
+            cmdtext = "IEButton Name"
+        else:
+            cmdtext = "IEButton State"
+        return result, cmdtext
+
+    def Exec(self, pguidCmdGroup, nCmdID, nCmdExecOpt, pvaIn):
+        print pguidCmdGroup, nCmdID, nCmdExecOpt, pvaIn
+        print "IOleCommandTarget::Exec called."
+        #self.webbrowser.ShowBrowserBar(GUID_IETOOLBAR, not is_ietoolbar_visible())
+
+    def SetSite(self,unknown):
+        if unknown:
+            # first get a command target
+            cmdtarget = unknown.QueryInterface(axcontrol.IID_IOleCommandTarget)
+            # then travel over to a service provider
+            serviceprovider = cmdtarget.QueryInterface(pythoncom.IID_IServiceProvider)
+            # finally ask for the internet explorer application, returned as a dispatch object
+            self.webbrowser = win32com.client.Dispatch(serviceprovider.QueryService('{0002DF05-0000-0000-C000-000000000046}',pythoncom.IID_IDispatch))
+        else:
+            # lose all references
+            self.webbrowser = None
+
+    def GetClassID(self):
+        return self._reg_clsid_
+
+def register(classobj):
+    import _winreg
+    subKeyCLSID = "SOFTWARE\\Microsoft\\Internet Explorer\\Extensions\\%38s" % classobj._reg_clsid_
+    try:
+        hKey = _winreg.CreateKey( _winreg.HKEY_LOCAL_MACHINE, subKeyCLSID )
+        subKey = _winreg.SetValueEx( hKey, "ButtonText", 0, _winreg.REG_SZ, classobj._button_text_ )
+        _winreg.SetValueEx( hKey, "ClsidExtension", 0, _winreg.REG_SZ, classobj._reg_clsid_ ) # reg value for calling COM object
+        _winreg.SetValueEx( hKey, "CLSID", 0, _winreg.REG_SZ, "{1FBA04EE-3024-11D2-8F1F-0000F87ABD16}" ) # CLSID for button that sends command to COM object
+        _winreg.SetValueEx( hKey, "Default Visible", 0, _winreg.REG_SZ, "Yes" )
+        _winreg.SetValueEx( hKey, "ToolTip", 0, _winreg.REG_SZ, classobj._tool_tip_ )
+        _winreg.SetValueEx( hKey, "Icon", 0, _winreg.REG_SZ, classobj._icon_)
+        _winreg.SetValueEx( hKey, "HotIcon", 0, _winreg.REG_SZ, classobj._hot_icon_)
+    except WindowsError:
+        print "Couldn't set standard toolbar reg keys."
+    else:
+        print "Set standard toolbar reg keys."
+
+def unregister(classobj):
+    import _winreg
+    subKeyCLSID = "SOFTWARE\\Microsoft\\Internet Explorer\\Extensions\\%38s" % classobj._reg_clsid_
+    try:
+        hKey = _winreg.CreateKey( _winreg.HKEY_LOCAL_MACHINE, subKeyCLSID )
+        subKey = _winreg.DeleteValue( hKey, "ButtonText" )
+        _winreg.DeleteValue( hKey, "ClsidExtension" ) # for calling COM object
+        _winreg.DeleteValue( hKey, "CLSID" )
+        _winreg.DeleteValue( hKey, "Default Visible" )
+        _winreg.DeleteValue( hKey, "ToolTip" )
+        _winreg.DeleteValue( hKey, "Icon" )
+        _winreg.DeleteValue( hKey, "HotIcon" )
+        _winreg.DeleteKey( _winreg.HKEY_LOCAL_MACHINE, subKeyCLSID )
+    except WindowsError:
+        print "Couldn't delete Standard toolbar regkey."
+    else:
+        print "Deleted Standard toolbar regkey."
+
+#
+# test implementation
+#
+
+class PyWin32InternetExplorerButton(IEButton):
+    _reg_clsid_ = "{104B66A9-9E68-49D1-A3F5-94754BE9E0E6}"
+    _reg_progid_ = "PyWin32.IEButton"
+    _reg_desc_ = 'Test Button'
+    _button_text_ = 'IE Button'
+    _tool_tip_ = 'An example implementation for an IE Button.'
+    _icon_ = ''
+    _hot_icon_ = _icon_    
+
+def DllRegisterServer():
+    register(PyWin32InternetExplorerButton)
+    
+def DllUnregisterServer():
+    unregister(PyWin32InternetExplorerButton)
+
+if __name__ == '__main__':
+    win32com.server.register.UseCommandLine(PyWin32InternetExplorerButton,
+                                        finalize_register = DllRegisterServer,
+                                        finalize_unregister = DllUnregisterServer)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/ietoolbar.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/ietoolbar.py
new file mode 100644
index 0000000..504d2eb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/ietoolbar.py
@@ -0,0 +1,320 @@
+# encoding: latin-1
+
+# PyWin32 Internet Explorer Toolbar
+#
+# written by Leonard Ritter (paniq@gmx.net)
+# and Robert Förtsch (info@robert-foertsch.com)
+
+
+"""
+This sample implements a simple IE Toolbar COM server
+supporting Windows XP styles and access to
+the IWebBrowser2 interface.
+
+It also demonstrates how to hijack the parent window
+to catch WM_COMMAND messages.
+"""
+
+# imports section
+import sys, os
+from win32com import universal
+from win32com.client import gencache, DispatchWithEvents, Dispatch
+from win32com.client import constants, getevents
+import win32com
+import pythoncom
+import _winreg
+
+from win32com.shell import shell
+from win32com.shell.shellcon import *
+from win32com.axcontrol import axcontrol
+
+try:
+    # try to get styles (winxp)
+    import winxpgui as win32gui
+except:
+    # import default module (win2k and lower)
+    import win32gui
+import win32ui
+import win32con
+import commctrl
+
+import array, struct
+
+# ensure we know the ms internet controls typelib so we have access to IWebBrowser2 later on
+win32com.client.gencache.EnsureModule('{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}',0,1,1)
+
+# 
+IDeskBand_methods = ['GetBandInfo']
+IDockingWindow_methods = ['ShowDW','CloseDW','ResizeBorderDW']
+IOleWindow_methods = ['GetWindow','ContextSensitiveHelp']
+IInputObject_methods = ['UIActivateIO','HasFocusIO','TranslateAcceleratorIO']
+IObjectWithSite_methods = ['SetSite','GetSite']
+IPersistStream_methods = ['GetClassID','IsDirty','Load','Save','GetSizeMax']
+
+_ietoolbar_methods_ = IDeskBand_methods + IDockingWindow_methods + \
+                      IOleWindow_methods + IInputObject_methods + \
+                      IObjectWithSite_methods + IPersistStream_methods
+_ietoolbar_com_interfaces_ = [
+    shell.IID_IDeskBand, # IDeskBand
+    axcontrol.IID_IObjectWithSite, # IObjectWithSite
+    pythoncom.IID_IPersistStream,
+    axcontrol.IID_IOleCommandTarget,
+]
+
+class WIN32STRUCT:
+    def __init__(self, **kw):
+        full_fmt = ""
+        for name, fmt, default in self._struct_items_:
+            self.__dict__[name] = None
+            if fmt == "z":
+                full_fmt += "pi"
+            else:
+                full_fmt += fmt
+        for name, val in kw.items():
+            self.__dict__[name] = val
+
+    def __setattr__(self, attr, val):
+        if not attr.startswith("_") and not self.__dict__.has_key(attr):
+            raise AttributeError, attr
+        self.__dict__[attr] = val
+
+    def toparam(self):
+        self._buffs = []
+        full_fmt = ""
+        vals = []        
+        for name, fmt, default in self._struct_items_:
+            val = self.__dict__[name]
+            if fmt == "z":
+                fmt = "Pi"
+                if val is None:
+                    vals.append(0)
+                    vals.append(0)
+                else:
+                    str_buf = array.array("c", val+'\0')
+                    vals.append(str_buf.buffer_info()[0])
+                    vals.append(len(val))
+                    self._buffs.append(str_buf) # keep alive during the call.
+            else:
+                if val is None:
+                    val = default
+                vals.append(val)
+            full_fmt += fmt		
+        return apply(struct.pack, (full_fmt,) + tuple(vals) )
+
+class TBBUTTON(WIN32STRUCT):
+    _struct_items_ = [
+        ("iBitmap", "i", 0),
+        ("idCommand", "i", 0),
+        ("fsState", "B", 0),
+        ("fsStyle", "B", 0),
+        ("bReserved", "H", 0),
+        ("dwData", "I", 0),
+        ("iString", "z", None),
+    ]
+
+class Stub:
+    """
+    this class serves as a method stub,
+    outputting debug info whenever the object
+    is being called.
+    """
+
+    def __init__(self,name):
+        self.name = name
+        
+    def __call__(self,*args):
+        print 'STUB: ',self.name,args
+
+class IEToolbarCtrl:
+    """
+    a tiny wrapper for our winapi-based
+    toolbar control implementation.
+    """
+    def __init__(self,hwndparent):
+        styles = win32con.WS_CHILD \
+                | win32con.WS_VISIBLE \
+                | win32con.WS_CLIPSIBLINGS \
+                | win32con.WS_CLIPCHILDREN \
+                | commctrl.TBSTYLE_LIST \
+                | commctrl.TBSTYLE_FLAT \
+                | commctrl.TBSTYLE_TRANSPARENT \
+                | commctrl.CCS_TOP \
+                | commctrl.CCS_NODIVIDER \
+                | commctrl.CCS_NORESIZE \
+                | commctrl.CCS_NOPARENTALIGN
+        self.hwnd = win32gui.CreateWindow('ToolbarWindow32', None, styles,
+                                          0, 0, 100, 100,
+                                          hwndparent, 0, win32gui.dllhandle,
+                                          None)
+        win32gui.SendMessage(self.hwnd, commctrl.TB_BUTTONSTRUCTSIZE, 20, 0)
+
+    def ShowWindow(self,mode):
+        win32gui.ShowWindow(self.hwnd,mode)
+
+    def AddButtons(self,*buttons):
+        tbbuttons = ''
+        for button in buttons:
+            tbbuttons += button.toparam()
+        return win32gui.SendMessage(self.hwnd, commctrl.TB_ADDBUTTONS,
+                                    len(buttons), tbbuttons)
+
+    def GetSafeHwnd(self):
+        return self.hwnd
+
+class IEToolbar:
+    """
+    The actual COM server class
+    """
+    _com_interfaces_ = _ietoolbar_com_interfaces_
+    _public_methods_ = _ietoolbar_methods_
+    _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER
+    # if you copy and modify this example, be sure to change the clsid below
+    _reg_clsid_ = "{F21202A2-959A-4149-B1C3-68B9013F3335}"
+    _reg_progid_ = "PyWin32.IEToolbar"
+    _reg_desc_ = 'PyWin32 IE Toolbar'
+
+    def __init__( self ):
+        # put stubs for non-implemented methods
+        for method in self._public_methods_:
+            if not hasattr(self,method):
+                print 'providing default stub for %s' % method
+                setattr(self,method,Stub(method))
+
+    def GetWindow(self):
+        return self.toolbar.GetSafeHwnd()
+
+    def Load(self, stream):
+        # called when the toolbar is loaded
+        pass
+
+    def Save(self, pStream, fClearDirty):
+        # called when the toolbar shall save its information
+        pass
+
+    def CloseDW(self, dwReserved):
+        del self.toolbar
+        
+    def ShowDW(self, bShow):
+        if bShow:
+            self.toolbar.ShowWindow(win32con.SW_SHOW)
+        else:
+            self.toolbar.ShowWindow(win32con.SW_HIDE)
+
+    def on_first_button(self):
+        print "first!"
+        self.webbrowser.Navigate2('http://starship.python.net/crew/mhammond/')
+
+    def on_second_button(self):
+        print "second!"
+
+    def on_third_button(self):
+        print "third!"
+
+    def toolbar_command_handler(self,args):
+        hwnd,message,wparam,lparam,time,point = args
+        if lparam == self.toolbar.GetSafeHwnd():
+            self._command_map[wparam]()
+
+    def SetSite(self,unknown):
+        if unknown:
+            # retrieve the parent window interface for this site
+            olewindow = unknown.QueryInterface(pythoncom.IID_IOleWindow)
+            # ask the window for its handle
+            hwndparent = olewindow.GetWindow()
+
+            # first get a command target
+            cmdtarget = unknown.QueryInterface(axcontrol.IID_IOleCommandTarget)
+            # then travel over to a service provider
+            serviceprovider = cmdtarget.QueryInterface(pythoncom.IID_IServiceProvider)
+            # finally ask for the internet explorer application, returned as a dispatch object
+            self.webbrowser = win32com.client.Dispatch(serviceprovider.QueryService('{0002DF05-0000-0000-C000-000000000046}',pythoncom.IID_IDispatch))
+
+            # now create and set up the toolbar
+            self.toolbar = IEToolbarCtrl(hwndparent)
+
+            buttons = [
+                ('Visit PyWin32 Homepage',self.on_first_button),
+                ('Another Button', self.on_second_button),
+                ('Yet Another Button', self.on_third_button),
+            ]
+
+            self._command_map = {}
+            # wrap our parent window so we can hook message handlers
+            window = win32ui.CreateWindowFromHandle(hwndparent)
+
+            # add the buttons
+            for i in range(len(buttons)):
+                button = TBBUTTON()
+                name,func = buttons[i]
+                id = 0x4444+i
+                button.iBitmap = -2
+                button.idCommand = id
+                button.fsState = commctrl.TBSTATE_ENABLED
+                button.fsStyle = commctrl.TBSTYLE_BUTTON
+                button.iString = name
+                self._command_map[0x4444+i] = func
+                self.toolbar.AddButtons(button)
+                window.HookMessage(self.toolbar_command_handler,win32con.WM_COMMAND)
+        else:
+            # lose all references
+            self.webbrowser = None
+
+    def GetClassID(self):
+        return self._reg_clsid_
+
+    def GetBandInfo(self, dwBandId, dwViewMode, dwMask):
+        ptMinSize = (0,24)
+        ptMaxSize = (2000,24)
+        ptIntegral = (0,0)
+        ptActual = (2000,24)
+        wszTitle = 'PyWin32 IE Toolbar'
+        dwModeFlags = DBIMF_VARIABLEHEIGHT
+        crBkgnd = 0
+        return (ptMinSize,ptMaxSize,ptIntegral,ptActual,wszTitle,dwModeFlags,crBkgnd)
+
+# used for HKLM install
+def DllInstall( bInstall, cmdLine ):
+    comclass = IEToolbar
+
+# register plugin
+def DllRegisterServer():
+    comclass = IEToolbar
+
+    # register toolbar with IE
+    try:
+        print "Trying to register Toolbar.\n"
+        hkey = _winreg.CreateKey( _winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Internet Explorer\\Toolbar" )
+        subKey = _winreg.SetValueEx( hkey, comclass._reg_clsid_, 0, _winreg.REG_BINARY, "\0" )
+    except WindowsError:
+        print "Couldn't set registry value.\nhkey: %d\tCLSID: %s\n" % ( hkey, comclass._reg_clsid_ )
+    else:
+        print "Set registry value.\nhkey: %d\tCLSID: %s\n" % ( hkey, comclass._reg_clsid_ )
+    # TODO: implement reg settings for standard toolbar button
+
+# unregister plugin
+def DllUnregisterServer():
+    comclass = IEToolbar
+
+    # unregister toolbar from internet explorer
+    try:
+        print "Trying to unregister Toolbar.\n"
+        hkey = _winreg.CreateKey( _winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Microsoft\\Internet Explorer\\Toolbar" )
+        _winreg.DeleteValue( hkey, comclass._reg_clsid_ )
+    except WindowsError:
+        print "Couldn't delete registry value.\nhkey: %d\tCLSID: %s\n" % ( hkey, comclass._reg_clsid_ )
+    else:
+        print "Deleting reg key succeeded.\n"
+
+# entry point
+if __name__ == '__main__':
+    import win32com.server.register
+    win32com.server.register.UseCommandLine( IEToolbar )
+    
+    # parse actual command line option
+    if "--unregister" in sys.argv:
+        DllUnregisterServer()
+    else:
+        DllRegisterServer()
+else:
+    # import trace utility for remote debugging
+    import win32traceutil
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/outlookAddin.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/outlookAddin.py
new file mode 100644
index 0000000..8e4ced1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/outlookAddin.py
@@ -0,0 +1,114 @@
+# A demo plugin for Microsoft Outlook (NOT Outlook Express)
+#
+# This addin simply adds a new button to the main Outlook toolbar,
+# and displays a message box when clicked.  Thus, it demonstrates
+# how to plug in to Outlook itself, and hook outlook events.
+#
+# Additionally, each time a new message arrives in the Inbox, a message
+# is printed with the subject of the message.
+#
+# To register the addin, simply execute:
+#   outlookAddin.py
+# This will install the COM server, and write the necessary
+# AddIn key to Outlook
+#
+# To unregister completely:
+#   outlookAddin.py --unregister
+#
+# To debug, execute:
+#   outlookAddin.py --debug
+#
+# Then open Pythonwin, and select "Tools->Trace Collector Debugging Tool"
+# Restart Outlook, and you should see some output generated.
+#
+# NOTE: If the AddIn fails with an error, Outlook will re-register
+# the addin to not automatically load next time Outlook starts.  To
+# correct this, simply re-register the addin (see above)
+
+from win32com import universal
+from win32com.server.exception import COMException
+from win32com.client import gencache, DispatchWithEvents
+import winerror
+import pythoncom
+from win32com.client import constants
+import sys
+
+# Support for COM objects we use.
+gencache.EnsureModule('{00062FFF-0000-0000-C000-000000000046}', 0, 9, 0, bForDemand=True) # Outlook 9
+gencache.EnsureModule('{2DF8D04C-5BFA-101B-BDE5-00AA0044DE52}', 0, 2, 1, bForDemand=True) # Office 9
+
+# The TLB defiining the interfaces we implement
+universal.RegisterInterfaces('{AC0714F2-3D04-11D1-AE7D-00A0C90F26F4}', 0, 1, 0, ["_IDTExtensibility2"])
+
+class ButtonEvent:
+    def OnClick(self, button, cancel):
+        import win32ui # Possible, but not necessary, to use a Pythonwin GUI
+        win32ui.MessageBox("Hello from Python")
+        return cancel
+
+class FolderEvent:
+    def OnItemAdd(self, item):
+        try:
+            print "An item was added to the inbox with subject:", item.Subject
+        except AttributeError:
+            print "An item was added to the inbox, but it has no subject! - ", repr(item)
+
+
+
+class OutlookAddin:
+    _com_interfaces_ = ['_IDTExtensibility2']
+    _public_methods_ = []
+    _reg_clsctx_ = pythoncom.CLSCTX_INPROC_SERVER
+    _reg_clsid_ = "{0F47D9F3-598B-4d24-B7E3-92AC15ED27E2}"
+    _reg_progid_ = "Python.Test.OutlookAddin"
+    _reg_policy_spec_ = "win32com.server.policy.EventHandlerPolicy"
+    def OnConnection(self, application, connectMode, addin, custom):
+        print "OnConnection", application, connectMode, addin, custom
+        # ActiveExplorer may be none when started without a UI (eg, WinCE synchronisation)
+        activeExplorer = application.ActiveExplorer()
+        if activeExplorer is not None:
+            bars = activeExplorer.CommandBars
+            toolbar = bars.Item("Standard")
+            item = toolbar.Controls.Add(Type=constants.msoControlButton, Temporary=True)
+            # Hook events for the item
+            item = self.toolbarButton = DispatchWithEvents(item, ButtonEvent)
+            item.Caption="Python"
+            item.TooltipText = "Click for Python"
+            item.Enabled = True
+
+        # And now, for the sake of demonstration, setup a hook for all new messages
+        inbox = application.Session.GetDefaultFolder(constants.olFolderInbox)
+        self.inboxItems = DispatchWithEvents(inbox.Items, FolderEvent)
+
+    def OnDisconnection(self, mode, custom):
+        print "OnDisconnection"
+    def OnAddInsUpdate(self, custom):
+        print "OnAddInsUpdate", custom
+    def OnStartupComplete(self, custom):
+        print "OnStartupComplete", custom
+    def OnBeginShutdown(self, custom):
+        print "OnBeginShutdown", custom
+
+def RegisterAddin(klass):
+    import _winreg
+    key = _winreg.CreateKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Office\\Outlook\\Addins")
+    subkey = _winreg.CreateKey(key, klass._reg_progid_)
+    _winreg.SetValueEx(subkey, "CommandLineSafe", 0, _winreg.REG_DWORD, 0)
+    _winreg.SetValueEx(subkey, "LoadBehavior", 0, _winreg.REG_DWORD, 3)
+    _winreg.SetValueEx(subkey, "Description", 0, _winreg.REG_SZ, klass._reg_progid_)
+    _winreg.SetValueEx(subkey, "FriendlyName", 0, _winreg.REG_SZ, klass._reg_progid_)
+
+def UnregisterAddin(klass):
+    import _winreg
+    try:
+        _winreg.DeleteKey(_winreg.HKEY_CURRENT_USER, "Software\\Microsoft\\Office\\Outlook\\Addins\\" + klass._reg_progid_)
+    except WindowsError:
+        pass
+
+if __name__ == '__main__':
+    import win32com.server.register
+    win32com.server.register.UseCommandLine(OutlookAddin)
+    if "--unregister" in sys.argv:
+        UnregisterAddin(OutlookAddin)
+    else:
+        RegisterAddin(OutlookAddin)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/trybag.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/trybag.py
new file mode 100644
index 0000000..b6b4342
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/demos/trybag.py
@@ -0,0 +1,74 @@
+import pythoncom
+from win32com.server import util
+from win32com.server import exception
+
+VT_EMPTY = pythoncom.VT_EMPTY
+
+class Bag:
+  _public_methods_ = [ 'Read', 'Write' ]
+  _com_interfaces_ = [ pythoncom.IID_IPropertyBag ]
+
+  def __init__(self):
+    self.data = { }
+
+  def Read(self, propName, varType, errorLog):
+    print "read: name=", propName, "type=", varType
+    if not self.data.has_key(propName):
+      if errorLog:
+        hr = 0x80070057
+        errorLog.AddError(propName, (0, "Bag.Read", "no such item", None, 0, hr))
+      raise exception.Exception(scode=hr)
+    return self.data[propName]
+
+  def Write(self, propName, value):
+    print "write: name=", propName, "value=", value
+    self.data[propName] = value
+
+
+class Target:
+  _public_methods_ = [ 'GetClassID', 'InitNew', 'Load', 'Save' ]
+  _com_interfaces_ = [ pythoncom.IID_IPersist,
+                       pythoncom.IID_IPersistPropertyBag ]
+
+  def GetClassID(self):
+    raise exception.Exception(scode=0x80004005)	# E_FAIL
+
+  def InitNew(self):
+    pass
+
+  def Load(self, bag, log):
+    print bag.Read('prop1', VT_EMPTY, log)
+    print bag.Read('prop2', VT_EMPTY, log)
+    try:
+      print bag.Read('prop3', VT_EMPTY, log)
+    except exception.Exception:
+      pass
+
+  def Save(self, bag, clearDirty, saveAllProps):
+    bag.Write('prop1', 'prop1.hello')
+    bag.Write('prop2', 'prop2.there')
+
+class Log:
+  _public_methods_ = [ 'AddError' ]
+  _com_interfaces_ = [ pythoncom.IID_IErrorLog ]
+
+  def AddError(self, propName, excepInfo):
+    print "error: propName=", propName, "error=", excepInfo
+
+def test():
+  bag = Bag()
+  target = Target()
+  log = Log()
+
+  target.Save(bag, 1, 1)
+  target.Load(bag, log)
+
+  comBag = util.wrap(bag, pythoncom.IID_IPropertyBag)
+  comTarget = util.wrap(target, pythoncom.IID_IPersistPropertyBag)
+  comLog = util.wrap(log, pythoncom.IID_IErrorLog)
+
+  comTarget.Save(comBag, 1, 1)
+  comTarget.Load(comBag, comLog)
+
+if __name__ == '__main__':
+  test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/gen_py/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/gen_py/__init__.py
new file mode 100644
index 0000000..3eb8d18
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/gen_py/__init__.py
@@ -0,0 +1,3 @@
+# Generated file - this directory may be deleted to reset the COM cache...
+import win32com
+if __path__[:-1] != win32com.__gen_path__: __path__.append(win32com.__gen_path__)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/gen_py/dicts.dat b/depot_tools/release/win/python_24/Lib/site-packages/win32com/gen_py/dicts.dat
new file mode 100644
index 0000000..e569f56
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/gen_py/dicts.dat
@@ -0,0 +1,3 @@
+I1
+.(dp1
+.
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOM.h b/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOM.h
new file mode 100644
index 0000000..c5fd3a1a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOM.h
@@ -0,0 +1,722 @@
+/* PythonCOM.h
+
+ Main header for Python COM support.
+
+ This file is involved mainly with client side COM support for
+ Python.
+
+ Most COM work put together by Greg Stein and Mark Hammond, with a
+ few others starting to come out of the closet.
+
+
+ --------------------------------------------------------------------
+ Thread State Rules
+ ------------------
+ These rules apply to PythonCOM in general, and not just to
+ the client side.
+
+ The rules are quite simple, but it is critical they be followed.
+ In general, errors here will be picked up quite quickly, as Python
+ will raise a Fatal Error.  However, the Release() issue in particular
+ may keep a number of problems well hidden.
+
+ Interfaces:
+ -----------
+ Before making ANY call out to COM, you MUST release the Python lock.
+ This is true to ANY call whatsoever, including the COM call in question,
+ but also any calls to "->Release();"
+
+ This is normally achieved with the calls
+ PY_INTERFACE_PRECALL and PY_INTERFACE_POSTCALL, which release
+ and acquire the Python lock.
+
+ Gateways:
+ ---------
+ Before doing anything related to Python, gateways MUST acquire the
+ Python lock, and must release it before returning.
+
+ This is normally achieved with PY_GATEWAY_METHOD at the top of a 
+ gateway method.  This macro resolves to a class, which automatically does
+ the right thing.
+
+ Release:
+ --------
+ As mentioned above for Interfaces, EVERY call to Release() must be done
+ with the Python lock released.  This is expanded here.
+ 
+ This is very important, but an error may not be noticed.  The problem will 
+ only be seen when the Release() is on a Python object and the Release() is the
+ final one for the object.  In this case, the Python object will attempt to 
+ acquire the Python lock before destroying itself, and Python will raise a
+ fatal error.
+
+ In many many cases, you will not notice this error, but someday, someone will
+ implement the other side in Python, and suddenly FatalErrors will start 
+ appearing.  Make sure you get this right.
+
+ Eg, this code is correct:
+   PY_INTERFACE_PRECALL;
+   pSomeObj->SomeFunction(pSomeOtherObject);
+   pSomeOtherObject->Release();
+   PY_INTERFACE_POSTCALL;
+
+ However, this code is WRONG, but will RARELY FAIL.
+   PY_INTERFACE_PRECALL;
+   pSomeObj->SomeFunction(pSomeOtherObject);
+   PY_INTERFACE_POSTCALL;
+   pSomeOtherObject->Release();
+--------------------------------------------------------------------
+*/
+#ifndef __PYTHONCOM_H__
+#define __PYTHONCOM_H__
+
+// #define _DEBUG_LIFETIMES // Trace COM object lifetimes.
+
+#ifdef FREEZE_PYTHONCOM
+	/* The pythoncom module is being included in a frozen .EXE/.DLL */
+#	define PYCOM_EXPORT
+#else
+#	ifdef BUILD_PYTHONCOM
+		/* We are building pythoncomxx.dll */
+#		define PYCOM_EXPORT __declspec(dllexport)
+#	else
+		/* This module uses pythoncomxx.dll */
+#		define PYCOM_EXPORT __declspec(dllimport)
+#		ifndef _DEBUG
+#			pragma comment(lib, "pythoncom.lib")
+#		else
+#			pragma comment(lib, "pythoncom_d.lib")
+#		endif
+#	endif
+#endif
+
+#ifdef MS_WINCE
+// List of interfaces not supported by CE.
+#define NO_PYCOM_IDISPATCHEX
+#define NO_PYCOM_IPROVIDECLASSINFO
+#define NO_PYCOM_IENUMGUID
+#define NO_PYCOM_IENUMCATEGORYINFO
+#define NO_PYCOM_ICATINFORMATION
+#define NO_PYCOM_ICATREGISTER
+#define NO_PYCOM_ISERVICEPROVIDER
+#define NO_PYCOM_IPROPERTYSTORAGE
+#define NO_PYCOM_IPROPERTYSETSTORAGE
+#define NO_PYCOM_ENUMSTATPROPSTG
+
+#include "ocidl.h"
+#include "oleauto.h"
+
+#endif // MS_WINCE
+
+#ifdef __MINGW32__
+// Special Mingw32 considerations.
+#define NO_PYCOM_IDISPATCHEX
+#define NO_PYCOM_IPROVIDECLASSINFO
+#define NO_PYCOM_ISERVICEPROVIDER
+#define NO_PYCOM_ENUMSTATPROPSTG
+#define NO_PYCOM_IPROPERTYSTORAGE
+#define __try try
+#define __except catch
+#include <olectl.h>
+
+#endif // __MINGW32__
+
+#include <PyWinTypes.h> // Standard Win32 Types
+
+#ifndef NO_PYCOM_IDISPATCHEX
+#include <dispex.h> // New header for IDispatchEx interface.
+#endif // NO_PYCOM_IDISPATCHEX
+
+#if defined(MAINWIN)
+// Mainwin seems to have 1/2 the VT_RECORD infrastructure in place
+#	if !defined(VT_RECORD)
+#		define VT_RECORD 36
+#		define V_RECORDINFO(X) ((X)->brecVal.pRecInfo)
+#		define V_RECORD(X)     ((X)->brecVal.pvRecord)
+#	else
+#		pragma message("MAINWIN appears to have grown correct VT_RECORD " \
+		                "support. Please update PythonCOM.h accordingly")
+#	endif //VT_RECORD
+#endif // MAINWIN
+
+class PyIUnknown;
+// To make life interesting/complicated, I use C++ classes for
+// all Python objects.  The main advantage is that I can derive
+// a PyIDispatch object from a PyIUnknown, etc.  This provides a 
+// clean C++ interface, and "automatically" provides all base
+// Python methods to "derived" Python types.
+//
+// Main disadvantage is that any extension DLLs will need to include
+// these headers, and link with this .lib
+// 
+// Base class for (most of) the type objects.
+
+class PYCOM_EXPORT PyComTypeObject : public PyTypeObject {
+public:
+	PyComTypeObject( const char *name, PyComTypeObject *pBaseType, int typeSize, struct PyMethodDef* methodList, PyIUnknown* (* thector)(IUnknown *)  );
+	~PyComTypeObject();
+
+	// is the given object an interface type object? (e.g. PyIUnknown)
+	static BOOL is_interface_type(const PyObject *ob);
+
+public:
+	PyMethodChain chain;
+	PyComTypeObject *baseType;
+	PyIUnknown * (* ctor)(IUnknown *);
+};
+
+// A type used for interfaces that can automatically provide enumerators
+// (ie, they themselves aren't enumerable, but do have a suitable default 
+// method that returns a PyIEnum object
+class PYCOM_EXPORT PyComEnumProviderTypeObject : public PyComTypeObject {
+public:
+	PyComEnumProviderTypeObject( const char *name, 
+	                             PyComTypeObject *pBaseType, 
+	                             int typeSize, 
+	                             struct PyMethodDef* methodList, 
+	                             PyIUnknown* (* thector)(IUnknown *),
+	                             const char *enum_method_name);
+	static PyObject *iter(PyObject *self);
+	const char *enum_method_name;
+};
+
+// A type used for PyIEnum interfaces
+class PYCOM_EXPORT PyComEnumTypeObject : public PyComTypeObject {
+public:
+	static PyObject *iter(PyObject *self);
+	static PyObject *iternext(PyObject *self);
+	PyComEnumTypeObject( const char *name, PyComTypeObject *pBaseType, int typeSize, struct PyMethodDef* methodList, PyIUnknown* (* thector)(IUnknown *)  );
+};
+
+// Very very base class - not COM specific - Should exist in the
+// Python core somewhere, IMO.
+class PYCOM_EXPORT PyIBase : 
+		public PyObject 
+{
+public:
+	// virtuals for Python support
+	virtual PyObject *getattr(char *name);
+	virtual int setattr(char *name, PyObject *v);
+	virtual PyObject *repr();
+	virtual int compare(PyObject *other) {return (int)this-int(other);}
+	// These iter are a little special, in that returning NULL means
+	// use the implementation in the type
+	virtual PyObject *iter() {return NULL;}
+	virtual PyObject *iternext() {return NULL;}
+	static struct PyMethodDef PyIBase::empty_methods[];
+protected:
+	PyIBase();
+	virtual ~PyIBase();
+
+public:
+	static BOOL is_object( const PyObject *, PyComTypeObject *which);
+	BOOL is_object(PyComTypeObject *which);
+	static void dealloc(PyObject *ob);
+	static PyObject *repr(PyObject *ob);
+	static PyObject *getattro(PyObject *self, PyObject *name);
+	static int setattr(PyObject *op, char *name, PyObject *v);
+	static int cmp(PyObject *ob1, PyObject *ob2);
+};
+
+/* Special Type objects */
+extern PYCOM_EXPORT PyTypeObject PyOleEmptyType;     // equivalent to VT_EMPTY
+extern PYCOM_EXPORT PyTypeObject PyOleMissingType;     // special Python handling.
+extern PYCOM_EXPORT PyTypeObject PyOleArgNotFoundType;     // special VT_ERROR value
+
+// ALL of these set an appropriate Python error on bad return.
+
+// Given a Python object that is a registered COM type, return a given
+// interface pointer on its underlying object, with a new reference added.
+PYCOM_EXPORT BOOL PyCom_InterfaceFromPyObject(
+	PyObject *ob,
+	REFIID iid,
+	LPVOID *ppv,
+	BOOL bNoneOK=TRUE
+	);
+
+// As above, but allows instance with "_oleobj_" attribute.
+PYCOM_EXPORT BOOL PyCom_InterfaceFromPyInstanceOrObject(
+	PyObject *ob,
+	REFIID iid,
+	LPVOID *ppv,
+	BOOL bNoneOK=TRUE
+	);
+
+// Given an IUnknown and an Interface ID, create and return an object
+// of the appropriate type. eg IID_Unknown->PyIUnknown,
+// IID_IDispatch->PyIDispatch, etc.
+// Uses a map that external extension DLLs can populate with their IID/type.
+// Under the principal of least surprise, this will return Py_None is punk is NULL.
+//  Otherwise, a valid PyI*, but with NULL m_obj (and therefore totally useless)
+//  object would be created.
+// BOOL bAddRef indicates if a COM reference count should be added to the IUnknown.
+//  This depends purely on the context in which it is called.  If the IUnknown is obtained
+//  from a function that creates a new ref (eg, CoCreateInstance()) then you should use
+//  FALSE.  If you receive the pointer as (eg) a param to a gateway function, then
+//  you normally need to pass TRUE, as this is truly a new reference.
+//  *** ALWAYS take the time to get this right. ***
+PYCOM_EXPORT PyObject *PyCom_PyObjectFromIUnknown(IUnknown *punk, REFIID riid, BOOL bAddRef = FALSE);
+
+// VARIANT <-> PyObject conversion utilities.
+PYCOM_EXPORT BOOL PyCom_VariantFromPyObject(PyObject *obj, VARIANT *var);
+PYCOM_EXPORT PyObject *PyCom_PyObjectFromVariant(const VARIANT *var);
+
+// PROPVARIANT
+PYCOM_EXPORT PyObject *PyObject_FromPROPVARIANT( PROPVARIANT *pVar );
+PYCOM_EXPORT PyObject *PyObject_FromPROPVARIANTs( PROPVARIANT *pVars, ULONG cVars );
+PYCOM_EXPORT BOOL PyObject_AsPROPVARIANT(PyObject *ob, PROPVARIANT *pVar);
+
+// Other conversion helpers...
+PYCOM_EXPORT PyObject *PyCom_PyObjectFromSTATSTG(STATSTG *pStat);
+PYCOM_EXPORT BOOL PyCom_PyObjectAsSTATSTG(PyObject *ob, STATSTG *pStat, DWORD flags = 0);
+PYCOM_EXPORT BOOL PyCom_SAFEARRAYFromPyObject(PyObject *obj, SAFEARRAY **ppSA, VARENUM vt = VT_VARIANT);
+PYCOM_EXPORT PyObject *PyCom_PyObjectFromSAFEARRAY(SAFEARRAY *psa, VARENUM vt = VT_VARIANT );
+#ifndef NO_PYCOM_STGOPTIONS
+PYCOM_EXPORT BOOL PyCom_PyObjectAsSTGOPTIONS(PyObject *obstgoptions, STGOPTIONS **ppstgoptions);
+#endif
+PYCOM_EXPORT PyObject *PyCom_PyObjectFromSTATPROPSETSTG(STATPROPSETSTG *pStat);
+
+// Currency support.
+PYCOM_EXPORT PyObject *PyObject_FromCurrency(CURRENCY &cy);
+PYCOM_EXPORT BOOL PyObject_AsCurrency(PyObject *ob, CURRENCY *pcy);
+
+/* Functions for Initializing COM, and also letting the core know about it!
+*/
+PYCOM_EXPORT HRESULT PyCom_CoInitializeEx(LPVOID reserved, DWORD dwInit);
+PYCOM_EXPORT HRESULT PyCom_CoInitialize(LPVOID reserved);
+PYCOM_EXPORT void PyCom_CoUninitialize();
+
+///////////////////////////////////////////////////////////////////
+// Error related functions
+
+// Client related functions - generally called by interfaces before
+// they return NULL back to Python to indicate the error.
+// All these functions return NULL so interfaces can generally
+// just "return PyCom_BuildPyException(hr, punk, IID_IWhatever)"
+
+// Uses the HRESULT, and IErrorInfo interfaces if available to
+// create and set a pythoncom.com_error.
+PYCOM_EXPORT PyObject *PyCom_BuildPyException(HRESULT hr, IUnknown *pUnk=NULL, REFIID iid=IID_NULL);
+
+// Uses the HRESULT and an EXCEPINFO structure to create and
+// set a pythoncom.com_error.
+PYCOM_EXPORT PyObject* PyCom_BuildPyExceptionFromEXCEPINFO(HRESULT hr, EXCEPINFO *pexcepInfo, UINT nArgErr = (UINT)-1);
+
+// Sets a pythoncom.internal_error - no one should ever see these!
+PYCOM_EXPORT PyObject* PyCom_BuildInternalPyException(char *msg);
+
+// Log an error to a Python logger object if one can be found, or
+// to stderr if no log available.
+// If logProvider is not NULL, we will call a "_GetLogger_()" method on it.
+// If logProvider is NULL, we attempt to fetch "win32com.logger".
+// If they do not exist, return None, or raise an error fetching them
+// (or even writing to them once fetched), the message still goes to stderr.
+// NOTE: By default, win32com does *not* provide a logger, so default is that
+// all errors are written to stdout.
+PYCOM_EXPORT void PyCom_LoggerNonServerException(PyObject *logProvider,
+											     const char *fmt, ...);
+
+PYCOM_EXPORT void PyCom_LoggerException(PyObject *logProvider, const char *fmt, ...);
+
+// Server related error functions
+// These are supplied so that any Python errors we detect can be
+// converted into COM error information.  The HRESULT returned should
+// be returned by the COM function, and these functions also set the
+// IErrorInfo interfaces, so the caller can extract more detailed
+// information about the Python exception.
+
+// Set a COM exception, logging the exception if not an explicitly raised 'server' exception
+PYCOM_EXPORT HRESULT PyCom_SetAndLogCOMErrorFromPyException(const char *methodName, REFIID riid /* = IID_NULL */);
+PYCOM_EXPORT HRESULT PyCom_SetAndLogCOMErrorFromPyExceptionEx(PyObject *provider, const char *methodName, REFIID riid /* = IID_NULL */);
+
+// Used in gateways to SetErrorInfo() with a simple HRESULT, then return it.
+// The description is generally only useful for debugging purposes,
+// and if you are debugging via a server that supports IErrorInfo (like Python :-)
+// NOTE: this function is usuable from outside the Python context
+PYCOM_EXPORT HRESULT PyCom_SetCOMErrorFromSimple(HRESULT hr, REFIID riid = IID_NULL, const char *description = NULL);
+
+// Used in gateways to SetErrorInfo() the current Python exception, and
+// (assuming not a server error explicitly raised) also logs an error
+// to stdout/win32com.logger.
+// NOTE: this function assumes GIL held
+PYCOM_EXPORT HRESULT PyCom_SetCOMErrorFromPyException(REFIID riid = IID_NULL);
+
+// A couple of EXCEPINFO helpers - could be private to IDispatch
+// if it wasnt for the AXScript support (and ITypeInfo if we get around to that :-)
+// These functions do not set any error states to either Python or
+// COM - they simply convert to/from PyObjects and EXCEPINFOs
+
+// Use the current Python exception to fill an EXCEPINFO structure.
+PYCOM_EXPORT void PyCom_ExcepInfoFromPyException(EXCEPINFO *pExcepInfo);
+
+// Fill in an EXCEPINFO structure from a Python instance or tuple object.
+// (ie, similar to the above, except the Python exception object is specified,
+// rather than using the "current"
+PYCOM_EXPORT BOOL PyCom_ExcepInfoFromPyObject(PyObject *obExcepInfo, EXCEPINFO *pexcepInfo, HRESULT *phresult = NULL);
+
+// Create a Python object holding the exception information.  The exception
+// information is *not* freed by this function.  Python exceptions are
+// raised and NULL is returned if an error occurs.
+PYCOM_EXPORT PyObject *PyCom_PyObjectFromExcepInfo(const EXCEPINFO *pexcepInfo);
+
+///////////////////////////////////////////////////////////////////
+//
+// External C++ helpers - these helpers are for other DLLs which
+// may need similar functionality, but dont want to duplicate all
+
+// This helper is for an application that has an IDispatch, and COM arguments
+// and wants to call a Python function.  It is assumed the caller can map the IDispatch
+// to a Python object, so the Python handler is passed.
+// Args:
+//   handler : A Python callable object.
+//   dispparms : the COM arguments.
+//   pVarResult : The variant for the return value of the Python call.
+//   pexcepinfo : Exception info the helper may fill out.
+//   puArgErr : Argument error the helper may fill out on exception
+//   addnArgs : Any additional arguments to the Python function.  May be NULL.
+// If addnArgs is NULL, then it is assumed the Python call should be native -
+// ie, the COM args are packed as normal Python args to the call.
+// If addnArgs is NOT NULL, it is assumed the Python function itself is
+// a helper.  This Python function will be called with 2 arguments - both
+// tuples - first one is the COM args, second is the addn args.
+PYCOM_EXPORT BOOL PyCom_MakeOlePythonCall(PyObject *handler, DISPPARAMS FAR* params, VARIANT FAR* pVarResult,
+	EXCEPINFO FAR* pexcepinfo, UINT FAR* puArgErr, PyObject *addnlArgs);
+
+/////////////////////////////////////////////////////////////////////////////
+// class PyOleEmpty
+class PYCOM_EXPORT PyOleEmpty : public PyObject
+{
+public:
+	PyOleEmpty();
+};
+
+class PYCOM_EXPORT PyOleMissing : public PyObject
+{
+public:
+	PyOleMissing();
+};
+
+class PYCOM_EXPORT PyOleArgNotFound : public PyObject
+{
+public:
+	PyOleArgNotFound();
+};
+
+// We need to dynamically create C++ Python objects
+// These helpers allow each type object to create it.
+#define MAKE_PYCOM_CTOR(classname) static PyIUnknown * classname::PyObConstruct(IUnknown *pInitObj) {return new classname(pInitObj);}
+#define MAKE_PYCOM_CTOR_ERRORINFO(classname, iid) \
+         static PyIUnknown * classname::PyObConstruct(IUnknown *pInitObj) {return new classname(pInitObj);} \
+		 static PyObject *SetPythonCOMError(PyObject *self, HRESULT hr) {return PyCom_BuildPyException(hr, GetI(self), iid);}
+#define GET_PYCOM_CTOR(classname) classname::PyObConstruct
+
+// Macros that interfaces should use.  PY_INTERFACE_METHOD at the top of the method
+// The other 2 wrap directly around the underlying method call.
+#define PY_INTERFACE_METHOD
+// Identical to Py_BEGIN_ALLOW_THREADS except no { !!!
+#define PY_INTERFACE_PRECALL PyThreadState *_save = PyEval_SaveThread();
+#define PY_INTERFACE_POSTCALL PyEval_RestoreThread(_save);
+
+/////////////////////////////////////////////////////////////////////////////
+// class PyIUnknown
+class PYCOM_EXPORT PyIUnknown : public PyIBase
+{
+public:
+	MAKE_PYCOM_CTOR(PyIUnknown);
+	virtual PyObject *repr();
+	virtual int compare(PyObject *other);
+
+	static IUnknown *GetI(PyObject *self);
+	IUnknown *m_obj;
+	static char *szErrMsgObjectReleased;
+	static void SafeRelease(PyIUnknown *ob);
+	static PyComTypeObject type;
+
+	// The Python methods
+	static PyObject *QueryInterface(PyObject *self, PyObject *args);
+	static PyObject *SafeRelease(PyObject *self, PyObject *args);
+
+protected:
+	PyIUnknown(IUnknown *punk);
+	~PyIUnknown();
+};
+
+/////////////////////////////////////////////////////////////////////////////
+// class PyIDispatch
+
+class PYCOM_EXPORT PyIDispatch : public PyIUnknown
+{
+public:
+	MAKE_PYCOM_CTOR(PyIDispatch);
+	static IDispatch *GetI(PyObject *self);
+	static PyComTypeObject type;
+
+	// The Python methods
+	static PyObject *Invoke(PyObject *self, PyObject *args);
+	static PyObject *InvokeTypes(PyObject *self, PyObject *args);
+	static PyObject *GetIDsOfNames(PyObject *self, PyObject *args);
+	static PyObject *GetTypeInfo(PyObject *self, PyObject *args);
+	static PyObject *GetTypeInfoCount(PyObject *self, PyObject *args);
+protected:
+	PyIDispatch(IUnknown *pdisp);
+	~PyIDispatch();
+};
+
+#ifndef NO_PYCOM_IDISPATCHEX
+/////////////////////////////////////////////////////////////////////////////
+// class PyIDispatchEx
+
+class PYCOM_EXPORT PyIDispatchEx : public PyIDispatch
+{
+public:
+	MAKE_PYCOM_CTOR_ERRORINFO(PyIDispatchEx, IID_IDispatchEx);
+	static IDispatchEx *GetI(PyObject *self);
+	static PyComTypeObject type;
+
+	// The Python methods
+	static PyObject *GetDispID(PyObject *self, PyObject *args);
+	static PyObject *InvokeEx(PyObject *self, PyObject *args);
+	static PyObject *DeleteMemberByName(PyObject *self, PyObject *args);
+	static PyObject *DeleteMemberByDispID(PyObject *self, PyObject *args);
+	static PyObject *GetMemberProperties(PyObject *self, PyObject *args);
+	static PyObject *GetMemberName(PyObject *self, PyObject *args);
+	static PyObject *GetNextDispID(PyObject *self, PyObject *args);
+
+protected:
+	PyIDispatchEx(IUnknown *pdisp);
+	~PyIDispatchEx();
+};
+#endif // NO_PYCOM_IDISPATCHEX
+
+/////////////////////////////////////////////////////////////////////////////
+// class PyIClassFactory
+
+class PYCOM_EXPORT PyIClassFactory : public PyIUnknown
+{
+public:
+	MAKE_PYCOM_CTOR(PyIClassFactory);
+	static IClassFactory *GetI(PyObject *self);
+	static PyComTypeObject type;
+
+	// The Python methods
+	static PyObject *CreateInstance(PyObject *self, PyObject *args);
+	static PyObject *LockServer(PyObject *self, PyObject *args);
+protected:
+	PyIClassFactory(IUnknown *pdisp);
+	~PyIClassFactory();
+};
+
+#ifndef NO_PYCOM_IPROVIDECLASSINFO
+
+/////////////////////////////////////////////////////////////////////////////
+// class PyIProvideTypeInfo
+
+class PYCOM_EXPORT PyIProvideClassInfo : public PyIUnknown
+{
+public:
+	MAKE_PYCOM_CTOR(PyIProvideClassInfo);
+	static IProvideClassInfo *GetI(PyObject *self);
+	static PyComTypeObject type;
+
+	// The Python methods
+	static PyObject *GetClassInfo(PyObject *self, PyObject *args);
+protected:
+	PyIProvideClassInfo(IUnknown *pdisp);
+	~PyIProvideClassInfo();
+};
+
+class PYCOM_EXPORT PyIProvideClassInfo2 : public PyIProvideClassInfo
+{
+public:
+	MAKE_PYCOM_CTOR(PyIProvideClassInfo2);
+	static IProvideClassInfo2 *GetI(PyObject *self);
+	static PyComTypeObject type;
+
+	// The Python methods
+	static PyObject *GetGUID(PyObject *self, PyObject *args);
+protected:
+	PyIProvideClassInfo2(IUnknown *pdisp);
+	~PyIProvideClassInfo2();
+};
+#endif //NO_PYCOM_IPROVIDECLASSINFO
+
+/////////////////////////////////////////////////////////////////////////////
+// class PyITypeInfo
+class PYCOM_EXPORT PyITypeInfo : public PyIUnknown
+{
+public:
+	MAKE_PYCOM_CTOR(PyITypeInfo);
+	static PyComTypeObject type;
+	static ITypeInfo *GetI(PyObject *self);
+	
+	PyObject *GetContainingTypeLib();
+	PyObject *GetDocumentation(MEMBERID);
+	PyObject *GetRefTypeInfo(HREFTYPE href);
+	PyObject *GetRefTypeOfImplType(int index);
+	PyObject *GetFuncDesc(int pos);
+	PyObject *GetIDsOfNames(OLECHAR FAR* FAR*, int);
+	PyObject *GetNames(MEMBERID);
+	PyObject *GetTypeAttr();
+	PyObject *GetVarDesc(int pos);
+	PyObject *GetImplTypeFlags(int index);
+	PyObject *GetTypeComp();
+
+protected:
+	PyITypeInfo(IUnknown *);
+	~PyITypeInfo();
+};
+
+/////////////////////////////////////////////////////////////////////////////
+// class PyITypeComp
+class PYCOM_EXPORT PyITypeComp : public PyIUnknown
+{
+public:
+	MAKE_PYCOM_CTOR(PyITypeComp);
+	static PyComTypeObject type;
+	static ITypeComp *GetI(PyObject *self);
+	
+	PyObject *Bind(OLECHAR* szName, unsigned short wflags);
+	PyObject *BindType(OLECHAR* szName);
+
+protected:
+	PyITypeComp(IUnknown *);
+	~PyITypeComp();
+};
+
+
+/////////////////////////////////////////////////////////////////////////////
+// class CPyTypeLib
+
+class PYCOM_EXPORT PyITypeLib : public PyIUnknown
+{
+public:
+	MAKE_PYCOM_CTOR(PyITypeLib);
+	static PyComTypeObject type;
+	static ITypeLib *GetI(PyObject *self);
+
+	PyObject *GetLibAttr();
+	PyObject *GetDocumentation(int pos);
+	PyObject *GetTypeInfo(int pos);
+	PyObject *GetTypeInfoCount();
+	PyObject *GetTypeInfoOfGuid(REFGUID guid);
+	PyObject *GetTypeInfoType(int pos);
+	PyObject *GetTypeComp();
+
+protected:
+	PyITypeLib(IUnknown *);
+	~PyITypeLib();
+};
+
+/////////////////////////////////////////////////////////////////////////////
+// class PyIConnectionPoint
+
+class PYCOM_EXPORT PyIConnectionPoint : public PyIUnknown
+{
+public:
+	MAKE_PYCOM_CTOR_ERRORINFO(PyIConnectionPoint,IID_IConnectionPoint);
+	static PyComTypeObject type;
+	static IConnectionPoint *GetI(PyObject *self);
+
+	static PyObject *GetConnectionInterface(PyObject *self, PyObject *args);
+	static PyObject *GetConnectionPointContainer(PyObject *self, PyObject *args);
+	static PyObject *Advise(PyObject *self, PyObject *args);
+	static PyObject *Unadvise(PyObject *self, PyObject *args);
+	static PyObject *EnumConnections(PyObject *self, PyObject *args);
+
+protected:
+	PyIConnectionPoint(IUnknown *);
+	~PyIConnectionPoint();
+};
+
+class PYCOM_EXPORT PyIConnectionPointContainer : public PyIUnknown
+{
+public:
+	MAKE_PYCOM_CTOR_ERRORINFO(PyIConnectionPointContainer, IID_IConnectionPointContainer);
+	static PyComTypeObject type;
+	static IConnectionPointContainer *GetI(PyObject *self);
+
+	static PyObject *EnumConnectionPoints(PyObject *self, PyObject *args);
+	static PyObject *FindConnectionPoint(PyObject *self, PyObject *args);
+
+protected:
+	PyIConnectionPointContainer(IUnknown *);
+	~PyIConnectionPointContainer();
+};
+
+
+/////////////////////////////////////////////////////////////////////////////
+// class PythonOleArgHelper
+//
+// A PythonOleArgHelper is used primarily to help out Python helpers
+// which need to convert from a Python object when the specific OLE 
+// type is known - eg, when a TypeInfo is available.
+//
+// The type of conversion determines who owns what buffers etc.  I wish BYREF didnt exist :-)
+typedef enum {
+	// We dont know what sort of conversion it is yet.
+	POAH_CONVERT_UNKNOWN,
+	// A PyObject is given, we convert to a VARIANT, make the COM call, then BYREFs back to a PyObject
+	// ie, this is typically a "normal" COM call, where Python initiates the call
+	POAH_CONVERT_FROM_PYOBJECT, 
+	// A VARIANT is given, we convert to a PyObject, make the Python call, then BYREFs back to a VARIANT.
+	// ie, this is typically handling a COM event, where COM itself initiates the call.
+	POAH_CONVERT_FROM_VARIANT, 
+} POAH_CONVERT_DIRECTION;
+
+class PYCOM_EXPORT PythonOleArgHelper
+{
+public:
+	PythonOleArgHelper();
+	~PythonOleArgHelper();
+	BOOL ParseTypeInformation(PyObject *reqdObjectTuple);
+
+	// Using this call with reqdObject != NULL will check the existing 
+	// VT_ of the variant.  If not VT_EMPTY, then the result will be coerced to
+	// that type.  This contrasts with PyCom_PyObjectToVariant which just
+	// uses the Python type to determine the variant type.
+	BOOL MakeObjToVariant(PyObject *obj, VARIANT *var, PyObject *reqdObjectTuple = NULL);
+	PyObject *MakeVariantToObj(VARIANT *var);
+
+	VARTYPE m_reqdType;
+	BOOL m_bParsedTypeInfo;
+	BOOL m_bIsOut;
+	POAH_CONVERT_DIRECTION m_convertDirection;
+	union {
+		void *m_pValueHolder;
+		short m_sBuf;
+		long m_lBuf;
+		VARIANT_BOOL m_boolBuf;
+		double m_dBuf;
+		float m_fBuf;
+		IDispatch *m_dispBuf;
+		IUnknown *m_unkBuf;
+		SAFEARRAY *m_arrayBuf;
+		VARIANT *m_varBuf;
+		DATE m_dateBuf;
+	};
+};
+
+
+/////////////////////////////////////////////////////////////////////////////
+// global functions and variables
+PYCOM_EXPORT BOOL MakePythonArgumentTuples(PyObject **pArgs, PythonOleArgHelper **ppHelpers, 
+                     PyObject **pNamedArgs, PythonOleArgHelper **ppNamedHelpers,
+                     DISPPARAMS FAR* params);
+
+
+// Convert a Python object to a BSTR - allow embedded NULLs, None, etc.
+PYCOM_EXPORT BOOL PyCom_BstrFromPyObject(PyObject *stringObject, BSTR *pResult, BOOL bNoneOK = FALSE);
+
+// MakeBstrToObj - convert a BSTR into a Python string.
+//
+// ONLY USE THIS FOR TRUE BSTR's - Use the fn below for OLECHAR *'s.
+// NOTE - does not use standard macros, so NULLs get through!
+PYCOM_EXPORT PyObject *MakeBstrToObj(const BSTR bstr);
+
+// Size info is available (eg, a fn returns a string and also fills in a size variable)
+PYCOM_EXPORT PyObject *MakeOLECHARToObj(const OLECHAR * str, int numChars);
+
+// No size info avail.
+PYCOM_EXPORT PyObject *MakeOLECHARToObj(const OLECHAR * str);
+
+#endif // __PYTHONCOM_H__
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOMRegister.h b/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOMRegister.h
new file mode 100644
index 0000000..23c31a56
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOMRegister.h
@@ -0,0 +1,45 @@
+// Support for PythonCOM and its extensions to register the interfaces,
+// gateways and IIDs it supports.
+//
+// The module can simply declare an array of type PyCom_InterfaceSupportInfo, then
+// use the macros to populate it.
+//
+// See Register.cpp and AXScript.cpp for examples on its use.
+
+#ifndef __PYTHONCOMREGISTER_H__
+#define  __PYTHONCOMREGISTER_H__
+
+#include "PythonCOMServer.h" // Need defns in this file...
+
+typedef struct
+{
+	const GUID *pGUID; // The supported IID - required
+	const char *interfaceName; // Name of the interface - required
+	const char *iidName; // Name of the IID that goes into the dict. - required
+	PyTypeObject *pTypeOb; // the type object for client PyI* side - NULL for server only support.
+	pfnPyGatewayConstructor ctor; // Gateway (PyG*) interface constructor - NULL for client only support
+
+} PyCom_InterfaceSupportInfo;
+
+#define PYCOM_INTERFACE_IID_ONLY(ifc)	{ &IID_I##ifc, "I" #ifc, "IID_I" #ifc, NULL, NULL  }
+#define PYCOM_INTERFACE_CLSID_ONLY(ifc)	{ &CLSID_##ifc, "CLSID_" #ifc,  "CLSID_" #ifc, NULL, NULL  }
+#define PYCOM_INTERFACE_CATID_ONLY(ifc)	{ &CATID_##ifc, "CATID_" #ifc,  "CATID_" #ifc, NULL, NULL  }
+#define PYCOM_INTERFACE_CLIENT_ONLY(ifc)	{ &IID_I##ifc, "I" #ifc, "IID_I" #ifc, &PyI##ifc::type, NULL }
+#define PYCOM_INTERFACE_SERVER_ONLY(ifc)	{ &IID_I##ifc, "I" #ifc, "IID_I" #ifc, NULL, GET_PYGATEWAY_CTOR(PyG##ifc)}
+#define PYCOM_INTERFACE_FULL(ifc)	{ &IID_I##ifc, "I" #ifc, "IID_I" #ifc, &PyI##ifc::type, GET_PYGATEWAY_CTOR(PyG##ifc)}
+
+// Prototypes for the register functions
+
+// Register a PythonCOM extension module
+PYCOM_EXPORT int PyCom_RegisterExtensionSupport( PyObject *dict, const PyCom_InterfaceSupportInfo *pInterfaces, int numEntries);
+
+// THESE SHOULD NO LONGER BE USED.  Instead, use the functions above passing an
+// array of PyCom_InterfaceSupportInfo objects.
+
+PYCOM_EXPORT int PyCom_RegisterClientType(PyTypeObject *typeOb, const GUID *guid);
+
+HRESULT PYCOM_EXPORT PyCom_RegisterGatewayObject(REFIID iid, pfnPyGatewayConstructor ctor, const char *interfaceName);
+PYCOM_EXPORT int PyCom_IsGatewayRegistered(REFIID iid);
+
+
+#endif /* __PYTHONCOMREGISTER_H__ */
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOMServer.h b/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOMServer.h
new file mode 100644
index 0000000..166d004b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/include/PythonCOMServer.h
@@ -0,0 +1,162 @@
+#ifndef __PYTHONCOMSERVER_H__
+#define __PYTHONCOMSERVER_H__
+
+// PythonCOMServer.h :Server side COM support
+
+#include <Python.h>
+
+#define DLLAcquireGlobalLock	PyWin_AcquireGlobalLock
+#define DLLReleaseGlobalLock	PyWin_ReleaseGlobalLock
+
+void PYCOM_EXPORT PyCom_DLLAddRef(void);
+void PYCOM_EXPORT PyCom_DLLReleaseRef(void);
+
+// Use this macro at the start of all gateway methods.
+#define PY_GATEWAY_METHOD CEnterLeavePython _celp
+
+class PyGatewayBase;
+// Gateway constructors.
+// Each gateway must be able to be created from a "gateway constructor".  This
+// is simply a function that takes a Python instance as as argument, and returns
+// a gateway object of the correct type.  The MAKE_PYGATEWAY_CTOR is a helper that
+// will embed such a constructor in the class - however, this is not necessary - 
+// _any_ function of the correct signature can be used.
+
+typedef HRESULT (* pfnPyGatewayConstructor)(PyObject *PythonInstance, PyGatewayBase *, void **ppResult, REFIID iid);
+HRESULT PyCom_MakeRegisteredGatewayObject(REFIID iid, PyObject *instance, PyGatewayBase *base, void **ppv);
+
+// A version of the above which support classes being derived from
+// other than IUnknown
+#define PYGATEWAY_MAKE_SUPPORT2(classname, IInterface, theIID, gatewaybaseclass) \
+	public: \
+		static HRESULT classname::PyGatewayConstruct(PyObject *pPyInstance, PyGatewayBase *unkBase, void **ppResult, REFIID iid) { \
+			if (ppResult==NULL) return E_INVALIDARG; \
+			classname *newob = new classname(pPyInstance); \
+			newob->m_pBaseObject = unkBase; \
+			if (unkBase) unkBase->AddRef(); \
+			*ppResult = newob->ThisAsIID(iid);  \
+			return *ppResult ? S_OK : E_OUTOFMEMORY; } \
+	protected: \
+		virtual IID GetIID(void) { return theIID; } \
+		virtual void *ThisAsIID(IID iid) {if (this==NULL) return NULL;if (iid==theIID) return (IInterface *)this; else return gatewaybaseclass::ThisAsIID(iid);} \
+		STDMETHOD_(ULONG,AddRef)(void) {return gatewaybaseclass::AddRef();} \
+		STDMETHOD_(ULONG,Release)(void) {return gatewaybaseclass::Release();} \
+		STDMETHOD(QueryInterface)(REFIID iid, void ** obj) {return gatewaybaseclass::QueryInterface(iid, obj);};
+
+// This is the "old" version to use, or use it if you derive
+// directly from PyGatewayBase
+#define PYGATEWAY_MAKE_SUPPORT(classname, IInterface, theIID) \
+	PYGATEWAY_MAKE_SUPPORT2(classname, IInterface, theIID, PyGatewayBase)
+
+
+#define GET_PYGATEWAY_CTOR(classname) classname::PyGatewayConstruct
+
+#ifdef _MSC_VER
+// Disable an OK warning...
+#pragma warning( disable : 4275 )
+// warning C4275: non dll-interface struct 'IDispatch' used as base for dll-interface class 'PyGatewayBase'
+#endif // _MSC_VER
+
+// Helper interface for fetching a Python object from a gateway
+
+extern const GUID IID_IInternalUnwrapPythonObject;
+
+interface IInternalUnwrapPythonObject : public IUnknown
+{
+public:
+    STDMETHOD(Unwrap)( PyObject **ppPyObject ) = 0;
+};
+
+/////////////////////////////////////////////////////////////////////////////
+// PyGatewayBase
+//
+// Base class for all gateways.
+//
+class PYCOM_EXPORT PyGatewayBase : 
+#ifndef NO_PYCOM_IDISPATCHEX
+	public IDispatchEx, // IDispatch comes along for the ride!
+#else
+	public IDispatch,   // No IDispatchEx - must explicitely use IDispatch
+#endif
+	public ISupportErrorInfo,
+	public IInternalUnwrapPythonObject
+{
+protected:
+	PyGatewayBase(PyObject *instance);
+	virtual ~PyGatewayBase();
+
+	// Invoke the Python method (via the policy object)
+	STDMETHOD(InvokeViaPolicy)(
+		const char *szMethodName,
+		PyObject **ppResult = NULL,
+		const char *szFormat = NULL,
+		...);
+
+public:
+	// IUnknown
+	STDMETHOD_(ULONG,AddRef)(void);
+	STDMETHOD_(ULONG,Release)(void);
+	STDMETHOD(QueryInterface)(REFIID iid, void ** obj);
+
+	// IDispatch
+	STDMETHOD(GetTypeInfoCount)(UINT FAR* pctInfo);
+	STDMETHOD(GetTypeInfo)(UINT itinfo, LCID lcid, ITypeInfo FAR* FAR* pptInfo);
+	STDMETHOD(GetIDsOfNames)(REFIID refiid,	OLECHAR FAR* FAR* rgszNames, UINT cNames, LCID lcid, DISPID FAR* rgdispid);
+	STDMETHOD(Invoke)(DISPID dispid, REFIID riid, LCID lcid, WORD wFlags, DISPPARAMS FAR* params, VARIANT FAR* pVarResult, EXCEPINFO FAR* pexcepinfo, UINT FAR* puArgErr);
+
+	// IDispatchEx
+#ifndef NO_PYCOM_IDISPATCHEX
+	STDMETHOD(GetDispID)(BSTR bstrName, DWORD grfdex, DISPID *pid);
+	STDMETHOD(InvokeEx)(DISPID id, LCID lcid, WORD wFlags, DISPPARAMS *pdp, VARIANT *pvarRes, EXCEPINFO *pei, IServiceProvider *pspCaller);
+	STDMETHOD(DeleteMemberByName)(BSTR bstr, DWORD grfdex);
+	STDMETHOD(DeleteMemberByDispID)(DISPID id);
+	STDMETHOD(GetMemberProperties)(DISPID id, DWORD grfdexFetch, DWORD *pgrfdex);
+	STDMETHOD(GetMemberName)(DISPID id, BSTR *pbstrName);
+	STDMETHOD(GetNextDispID)(DWORD grfdex, DISPID id, DISPID *pid);
+	STDMETHOD(GetNameSpaceParent)(IUnknown **ppunk);
+#endif // NO_PYCOM_IDISPATCHEX
+	// ISupportErrorInfo
+	STDMETHOD(InterfaceSupportsErrorInfo)(REFIID riid);
+
+	// IInternalUnwrapPythonObject
+	STDMETHOD(Unwrap)(PyObject **ppPyObject);
+
+	// Basically just PYGATEWAY_MAKE_SUPPORT(PyGatewayBase, IDispatch, IID_IDispatch);
+	// but with special handling as its the base class.
+	static HRESULT PyGatewayBase::PyGatewayConstruct(PyObject *pPyInstance, PyGatewayBase *gatewayBase, void **ppResult, REFIID iid)
+	{
+		if (ppResult==NULL) return E_INVALIDARG;
+		PyGatewayBase *obNew = new PyGatewayBase(pPyInstance);
+		obNew->m_pBaseObject = gatewayBase;
+		if (gatewayBase) gatewayBase->AddRef();
+		*ppResult = (IDispatch *)obNew;
+		return *ppResult ? S_OK : E_OUTOFMEMORY;
+	}
+	// Currently this is used only for ISupportErrorInfo,
+	// so hopefully this will never be called in this base class.
+	// (however, this is not a rule, so we wont assert or anything!)
+	virtual IID GetIID(void) { return IID_IUnknown; }
+	virtual void *ThisAsIID(IID iid);
+	// End of PYGATEWAY_MAKE_SUPPORT
+	PyObject * m_pPyObject;
+	PyGatewayBase *m_pBaseObject;
+private:
+	LONG m_cRef;
+};
+
+#ifdef _MSC_VER
+#pragma warning(default : 4275 )
+#endif // _MSC_VER
+
+// B/W compat hack for gateways.
+#define PyCom_HandlePythonFailureToCOM() \
+	PyCom_SetAndLogCOMErrorFromPyExceptionEx(this->m_pPyObject, "<unknown>", GetIID())
+
+// F/W compat hack for gateways!  Must be careful about updating
+// PyGatewayBase vtable, so a slightly older pythoncomXX.dll will work
+// with slightly later extensions.  So use a #define.
+#define MAKE_PYCOM_GATEWAY_FAILURE_CODE(method_name) \
+	PyCom_SetAndLogCOMErrorFromPyExceptionEx(this->m_pPyObject, method_name, GetIID())
+
+
+#endif /* __PYTHONCOMSERVER_H__ */
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/libs/pythoncom.lib b/depot_tools/release/win/python_24/Lib/site-packages/win32com/libs/pythoncom.lib
new file mode 100644
index 0000000..64574a3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/libs/pythoncom.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/__init__.py
new file mode 100644
index 0000000..6178c97c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/__init__.py
@@ -0,0 +1 @@
+# indicates a python package.
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegw.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegw.py
new file mode 100644
index 0000000..d018c22
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegw.py
@@ -0,0 +1,452 @@
+"""Utility functions for writing out gateway C++ files
+
+  This module will generate a C++/Python binding for a specific COM
+  interface.
+  
+  At this stage, no command line interface exists.  You must start Python, 
+  import this module,  change to the directory where the generated code should
+  be written, and run the public function.
+  
+  This module is capable of generating both 'Interfaces' (ie, Python
+  client side support for the interface) and 'Gateways' (ie, Python
+  server side support for the interface).  Many COM interfaces are useful
+  both as Client and Server.  Other interfaces, however, really only make
+  sense to implement one side or the other.  For example, it would be pointless
+  for Python to implement Server side for 'IRunningObjectTable', unless we were
+  implementing core COM for an operating system in Python (hey - now there's an idea!)
+  
+  Most COM interface code is totally boiler-plate - it consists of
+  converting arguments, dispatching the call to Python, and processing
+  any result values.
+  
+  This module automates the generation of such code.  It has the ability to
+  parse a .H file generated by the MIDL tool (ie, almost all COM .h files)
+  and build almost totally complete C++ code.
+  
+  The module understands some of the well known data types, and how to
+  convert them.  There are only a couple of places where hand-editing is
+  necessary, as detailed below:
+
+  unsupported types -- If a type is not known, the generator will
+  pretty much ignore it, but write a comment to the generated code.  You
+  may want to add custom support for this type.  In some cases, C++ compile errors
+  will result.  These are intentional - generating code to remove these errors would
+  imply a false sense of security that the generator has done the right thing.
+
+  other return policies -- By default, Python never sees the return SCODE from
+  a COM function.  The interface usually returns None if OK, else a COM exception
+  if "FAILED(scode)" is TRUE.  You may need to change this if:
+  * EXCEPINFO is passed to the COM function.  This is not detected and handled
+  * For some reason Python should always see the result SCODE, even if it
+    did fail or succeed.  For example, some functions return a BOOLEAN result
+    in the SCODE, meaning Python should always see it.
+  * FAILED(scode) for the interface still has valid data to return (by default,
+    the code generated does not process the return values, and raise an exception
+    to Python/COM
+  
+"""
+
+import regsub
+import string
+import makegwparse
+
+def make_framework_support(header_file_name, interface_name, bMakeInterface = 1, bMakeGateway = 1):
+  """Generate C++ code for a Python Interface and Gateway
+  
+  header_file_name -- The full path to the .H file which defines the interface.
+  interface_name -- The name of the interface to search for, and to generate.
+  bMakeInterface = 1 -- Should interface (ie, client) support be generated.
+  bMakeGatewayInterface = 1 -- Should gateway (ie, server) support be generated.
+  
+  This method will write a .cpp and .h file into the current directory,
+  (using the name of the interface to build the file name.
+  
+  """
+  fin=open(header_file_name)
+  try:
+    interface = makegwparse.parse_interface_info(interface_name, fin)
+  finally:
+    fin.close()
+    
+  if bMakeInterface and bMakeGateway:
+  	desc = "Interface and Gateway"
+  elif bMakeInterface and not bMakeGateway:
+  	desc = "Interface"
+  else:
+  	desc = "Gateway"
+  if interface.name[:5]=="IEnum": # IEnum - use my really simple template-based one
+    import win32com.makegw.makegwenum
+    ifc_cpp_writer = win32com.makegw.makegwenum._write_enumifc_cpp
+    gw_cpp_writer = win32com.makegw.makegwenum._write_enumgw_cpp
+  else: # Use my harder working ones.
+    ifc_cpp_writer = _write_ifc_cpp
+    gw_cpp_writer = _write_gw_cpp
+  	
+  fout=open("Py%s.cpp" % interface.name, "w")
+  try:
+    fout.write(\
+'''\
+// This file implements the %s %s for Python.
+// Generated by makegw.py
+
+#include "shell_pch.h"
+''' % (interface.name, desc))
+#    if bMakeGateway:
+#      fout.write('#include "PythonCOMServer.h"\n')
+#    if interface.base not in ["IUnknown", "IDispatch"]:
+#      fout.write('#include "Py%s.h"\n' % interface.base)
+    fout.write('#include "Py%s.h"\n\n// @doc - This file contains autoduck documentation\n' % interface.name)
+    if bMakeInterface: ifc_cpp_writer(fout, interface)
+    if bMakeGateway: gw_cpp_writer(fout, interface)
+  finally:
+    fout.close()
+  fout=open("Py%s.h" % interface.name, "w")
+  try:
+    fout.write(\
+'''\
+// This file declares the %s %s for Python.
+// Generated by makegw.py
+''' % (interface.name, desc))
+
+    if bMakeInterface: _write_ifc_h(fout, interface)
+    if bMakeGateway: _write_gw_h(fout, interface)
+  finally:
+    fout.close()
+
+###########################################################################
+#
+# INTERNAL FUNCTIONS
+#
+#
+
+def _write_ifc_h(f, interface):
+  f.write(\
+'''\
+// ---------------------------------------------------
+//
+// Interface Declaration
+
+class Py%s : public Py%s
+{
+public:
+	MAKE_PYCOM_CTOR(Py%s);
+	static %s *GetI(PyObject *self);
+	static PyComTypeObject type;
+
+	// The Python methods
+''' % (interface.name, interface.base, interface.name, interface.name))
+  for method in interface.methods:
+    f.write('\tstatic PyObject *%s(PyObject *self, PyObject *args);\n' % method.name)
+  f.write(\
+'''\
+
+protected:
+	Py%s(IUnknown *pdisp);
+	~Py%s();
+};
+''' % (interface.name, interface.name))
+
+def _write_ifc_cpp(f, interface):
+  name = interface.name
+  f.write(\
+'''\
+// ---------------------------------------------------
+//
+// Interface Implementation
+
+Py%(name)s::Py%(name)s(IUnknown *pdisp):
+	Py%(base)s(pdisp)
+{
+	ob_type = &type;
+}
+
+Py%(name)s::~Py%(name)s()
+{
+}
+
+/* static */ %(name)s *Py%(name)s::GetI(PyObject *self)
+{
+	return (%(name)s *)Py%(base)s::GetI(self);
+}
+
+''' % (interface.__dict__))
+
+  ptr = regsub.gsub('[a-z]', '', interface.name)
+  strdict = {'interfacename':interface.name, 'ptr': ptr}
+  for method in interface.methods:
+    strdict['method'] = method.name
+    f.write(\
+'''\
+// @pymethod |Py%(interfacename)s|%(method)s|Description of %(method)s.
+PyObject *Py%(interfacename)s::%(method)s(PyObject *self, PyObject *args)
+{
+	%(interfacename)s *p%(ptr)s = GetI(self);
+	if ( p%(ptr)s == NULL )
+		return NULL;
+''' % strdict)
+    argsParseTuple = argsCOM = formatChars = codePost = \
+                     codePobjects = codeCobjects = cleanup = cleanup_gil = ""
+    needConversion = 0
+#    if method.name=="Stat": import win32dbg;win32dbg.brk()
+    for arg in method.args:
+      try:
+        argCvt = makegwparse.make_arg_converter(arg)
+        if arg.HasAttribute("in"):
+          val = argCvt.GetFormatChar()
+          if val:
+            f.write ('\t' + argCvt.GetAutoduckString() + "\n")
+            formatChars = formatChars + val
+            argsParseTuple = argsParseTuple + ", " + argCvt.GetParseTupleArg()
+            codePobjects = codePobjects + argCvt.DeclareParseArgTupleInputConverter()
+            codePost = codePost + argCvt.GetParsePostCode()
+            needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
+            cleanup = cleanup + argCvt.GetInterfaceArgCleanup()
+            cleanup_gil = cleanup_gil + argCvt.GetInterfaceArgCleanupGIL()
+        comArgName, comArgDeclString = argCvt.GetInterfaceCppObjectInfo()
+        if comArgDeclString: # If we should declare a variable
+          codeCobjects = codeCobjects + "\t%s;\n" % (comArgDeclString)
+        argsCOM = argsCOM + ", " + comArgName
+      except makegwparse.error_not_supported, why:
+        f.write('// *** The input argument %s of type "%s" was not processed ***\n//     Please check the conversion function is appropriate and exists!\n' % (arg.name, arg.raw_type))
+
+        f.write('\t%s %s;\n\tPyObject *ob%s;\n' % (arg.type, arg.name, arg.name))
+        f.write('\t// @pyparm <o Py%s>|%s||Description for %s\n' % (arg.type, arg.name, arg.name))
+        codePost = codePost + '\tif (bPythonIsHappy && !PyObject_As%s( ob%s, &%s )) bPythonIsHappy = FALSE;\n' % (arg.type, arg.name, arg.name)
+
+        formatChars = formatChars + "O"
+        argsParseTuple = argsParseTuple + ", &ob%s" % (arg.name)
+
+        argsCOM = argsCOM + ", " + arg.name
+        cleanup = cleanup + "\tPyObject_Free%s(%s);\n" % (arg.type, arg.name)
+
+    if needConversion: f.write("\tUSES_CONVERSION;\n")
+    f.write(codePobjects);
+    f.write(codeCobjects);
+    f.write('\tif ( !PyArg_ParseTuple(args, "%s:%s"%s) )\n\t\treturn NULL;\n' % (formatChars, method.name, argsParseTuple))
+    if codePost:
+      f.write('\tBOOL bPythonIsHappy = TRUE;\n')
+      f.write(codePost);
+      f.write('\tif (!bPythonIsHappy) return NULL;\n')
+    strdict['argsCOM'] = argsCOM[1:]
+    strdict['cleanup'] = cleanup
+    strdict['cleanup_gil'] = cleanup_gil
+    f.write(\
+'''	HRESULT hr;
+	PY_INTERFACE_PRECALL;
+	hr = p%(ptr)s->%(method)s(%(argsCOM)s );
+%(cleanup)s
+	PY_INTERFACE_POSTCALL;
+%(cleanup_gil)s
+	if ( FAILED(hr) )
+		return PyCom_BuildPyException(hr, p%(ptr)s, IID_%(interfacename)s );
+''' % strdict)
+    codePre = codePost = formatChars = codeVarsPass = codeDecl = ""
+    for arg in method.args:
+      if not arg.HasAttribute("out"):
+        continue
+      try:
+        argCvt =  makegwparse.make_arg_converter(arg)
+        formatChar = argCvt.GetFormatChar()
+        if formatChar:
+          formatChars = formatChars + formatChar
+          codePre = codePre + argCvt.GetBuildForInterfacePreCode()
+          codePost = codePost + argCvt.GetBuildForInterfacePostCode()
+          codeVarsPass = codeVarsPass + ", " + argCvt.GetBuildValueArg()
+          codeDecl = codeDecl + argCvt.DeclareParseArgTupleInputConverter()
+      except makegwparse.error_not_supported, why:
+        f.write('// *** The output argument %s of type "%s" was not processed ***\n//     %s\n' % (arg.name, arg.raw_type, why))
+        continue
+    if formatChars:
+      f.write('%s\n%s\tPyObject *pyretval = Py_BuildValue("%s"%s);\n%s\treturn pyretval;' % (codeDecl, codePre, formatChars, codeVarsPass, codePost))
+    else:
+      f.write('\tPy_INCREF(Py_None);\n\treturn Py_None;\n')
+    f.write('\n}\n\n')
+
+  f.write ('// @object Py%s|Description of the interface\n' % (name))
+  f.write('static struct PyMethodDef Py%s_methods[] =\n{\n' % name)
+  for method in interface.methods:
+    f.write('\t{ "%s", Py%s::%s, 1 }, // @pymeth %s|Description of %s\n' % (method.name, interface.name, method.name, method.name, method.name))
+
+  interfacebase = interface.base
+  f.write('''\
+	{ NULL }
+};
+
+PyComTypeObject Py%(name)s::type("Py%(name)s",
+		&Py%(interfacebase)s::type,
+		sizeof(Py%(name)s),
+		Py%(name)s_methods,
+		GET_PYCOM_CTOR(Py%(name)s));
+''' % locals())
+
+def _write_gw_h(f, interface):
+  if interface.name[0] == "I":
+    gname = 'PyG' + interface.name[1:]
+  else:
+    gname = 'PyG' + interface.name
+  name = interface.name
+  if interface.base == "IUnknown" or interface.base == "IDispatch":
+    base_name = "PyGatewayBase"
+  else:
+    if interface.base[0] == "I":
+      base_name = 'PyG' + interface.base[1:]
+    else:
+      base_name = 'PyG' + interface.base
+  f.write(\
+'''\
+// ---------------------------------------------------
+//
+// Gateway Declaration
+
+class %s : public %s, public %s
+{
+protected:
+	%s(PyObject *instance) : %s(instance) { ; }
+	PYGATEWAY_MAKE_SUPPORT2(%s, %s, IID_%s, %s)
+
+''' % (gname, base_name, name, gname, base_name, gname, name, name, base_name))
+  if interface.base != "IUnknown":
+    f.write("\t// %s\n\t// *** Manually add %s method decls here\n\n" % (interface.base, interface.base))
+  else:
+    f.write('\n\n')
+
+  f.write("\t// %s\n" % name)
+
+  for method in interface.methods:
+    f.write('\tSTDMETHOD(%s)(\n' % method.name)
+    if method.args:
+      for arg in method.args[:-1]:
+        f.write("\t\t%s,\n" % (arg.GetRawDeclaration()))
+      arg = method.args[-1]
+      f.write("\t\t%s);\n\n" % (arg.GetRawDeclaration()))
+    else:
+      f.write('\t\tvoid);\n\n')
+
+  f.write('};\n')
+  f.close()
+
+def _write_gw_cpp(f, interface):
+  if interface.name[0] == "I":
+    gname = 'PyG' + interface.name[1:]
+  else:
+    gname = 'PyG' + interface.name
+  name = interface.name
+  if interface.base == "IUnknown" or interface.base == "IDispatch":
+    base_name = "PyGatewayBase"
+  else:
+    if interface.base[0] == "I":
+      base_name = 'PyG' + interface.base[1:]
+    else:
+      base_name = 'PyG' + interface.base
+  f.write('''\
+// ---------------------------------------------------
+//
+// Gateway Implementation
+''' % {'name':name, 'gname':gname, 'base_name':base_name})
+
+  for method in interface.methods:
+    f.write(\
+'''\
+STDMETHODIMP %s::%s(
+''' % (gname, method.name))
+
+    if method.args:
+      for arg in method.args[:-1]:
+        inoutstr = string.join(arg.inout, '][')
+        f.write("\t\t/* [%s] */ %s,\n" % (inoutstr, arg.GetRawDeclaration()))
+        
+      arg = method.args[-1]
+      inoutstr = string.join(arg.inout, '][')
+      f.write("\t\t/* [%s] */ %s)\n" % (inoutstr, arg.GetRawDeclaration()))
+    else:
+      f.write('\t\tvoid)\n')
+
+    f.write("{\n\tPY_GATEWAY_METHOD;\n")
+    cout = 0
+    codePre = codePost = codeVars = ""
+    argStr = ""
+    needConversion = 0
+    formatChars = ""
+    if method.args:
+      for arg in method.args:
+        if arg.HasAttribute("out"):
+          cout = cout + 1
+          if arg.indirectionLevel ==2 :
+            f.write("\tif (%s==NULL) return E_POINTER;\n" % arg.name)
+        if arg.HasAttribute("in"):
+          try:
+            argCvt = makegwparse.make_arg_converter(arg)
+            argCvt.SetGatewayMode()
+            formatchar = argCvt.GetFormatChar();
+            needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
+
+            if formatchar:
+              formatChars = formatChars + formatchar
+              codeVars = codeVars + argCvt.DeclareParseArgTupleInputConverter()
+              argStr = argStr + ", " + argCvt.GetBuildValueArg()
+            codePre = codePre + argCvt.GetBuildForGatewayPreCode()
+            codePost = codePost + argCvt.GetBuildForGatewayPostCode()
+          except makegwparse.error_not_supported, why:
+            f.write('// *** The input argument %s of type "%s" was not processed ***\n//   - Please ensure this conversion function exists, and is appropriate\n//   - %s\n' % (arg.name, arg.raw_type, why))
+            f.write('\tPyObject *ob%s = PyObject_From%s(%s);\n' % (arg.name, arg.type, arg.name))
+            f.write('\tif (ob%s==NULL) return MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % (arg.name, method.name))
+            codePost = codePost + "\tPy_DECREF(ob%s);\n" % arg.name
+            formatChars = formatChars + "O"
+            argStr = argStr + ", ob%s" % (arg.name)
+    
+    if needConversion: f.write('\tUSES_CONVERSION;\n')
+    f.write(codeVars)
+    f.write(codePre)
+    if cout:
+      f.write("\tPyObject *result;\n")
+      resStr = "&result"
+    else:
+      resStr = "NULL"
+      
+    if formatChars:
+      fullArgStr = '%s, "%s"%s' % (resStr, formatChars, argStr)
+    else:
+      fullArgStr = resStr
+
+    f.write('\tHRESULT hr=InvokeViaPolicy("%s", %s);\n' % (method.name, fullArgStr))
+    f.write(codePost)
+    if cout:
+      f.write("\tif (FAILED(hr)) return hr;\n")
+      f.write("\t// Process the Python results, and convert back to the real params\n")
+      # process the output arguments.
+      formatChars = codePobjects = codePost = argsParseTuple = ""
+      needConversion = 0
+      for arg in method.args:
+        if not arg.HasAttribute("out"):
+          continue
+        try:
+          argCvt = makegwparse.make_arg_converter(arg)
+          argCvt.SetGatewayMode()
+          val = argCvt.GetFormatChar()
+          if val:
+            formatChars = formatChars + val
+            argsParseTuple = argsParseTuple + ", " + argCvt.GetParseTupleArg()
+            codePobjects = codePobjects + argCvt.DeclareParseArgTupleInputConverter()
+            codePost = codePost + argCvt.GetParsePostCode()
+            needConversion = needConversion or argCvt.NeedUSES_CONVERSION()
+        except makegwparse.error_not_supported, why:
+          f.write('// *** The output argument %s of type "%s" was not processed ***\n//     %s\n' % (arg.name, arg.raw_type, why))
+
+      if formatChars: # If I have any to actually process.
+        if len(formatChars)==1:
+          parseFn = "PyArg_Parse"
+        else:
+          parseFn = "PyArg_ParseTuple"
+        if codePobjects: f.write(codePobjects)
+        f.write('\tif (!%s(result, "%s" %s))\n\t\treturn MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % (parseFn, formatChars, argsParseTuple, method.name))
+      if codePost: 
+        f.write('\tBOOL bPythonIsHappy = TRUE;\n')
+        f.write(codePost)
+        f.write('\tif (!bPythonIsHappy) hr = MAKE_PYCOM_GATEWAY_FAILURE_CODE("%s");\n' % method.name)
+      f.write('\tPy_DECREF(result);\n');
+    f.write('\treturn hr;\n}\n\n')
+  
+def test():
+#	make_framework_support("d:\\msdev\\include\\objidl.h", "ILockBytes")
+	make_framework_support("d:\\msdev\\include\\objidl.h", "IStorage")
+#	make_framework_support("d:\\msdev\\include\\objidl.h", "IEnumSTATSTG")
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegwenum.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegwenum.py
new file mode 100644
index 0000000..d4f4084
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegwenum.py
@@ -0,0 +1,317 @@
+"""Utility file for generating PyIEnum support.
+
+This is almost a 'template' file.  It simplay contains almost full
+C++ source code for PyIEnum* support, and the Python code simply
+substitutes the appropriate interface name.
+
+This module is notmally not used directly - the @makegw@ module
+automatically calls this.
+"""
+#
+# INTERNAL FUNCTIONS
+#
+#
+import string
+
+def is_interface_enum(enumtype):
+  return not (enumtype[0] in string.uppercase and enumtype[2] in string.uppercase)
+
+
+def _write_enumifc_cpp(f, interface):
+  enumtype = interface.name[5:]
+  if is_interface_enum(enumtype):
+    # Assume an interface.
+    enum_interface = "I" + enumtype[:-1]
+    converter = "PyObject *ob = PyCom_PyObjectFromIUnknown(rgVar[i], IID_%(enum_interface)s, FALSE);" % locals()
+    arraydeclare = "%(enum_interface)s **rgVar = new %(enum_interface)s *[celt];" % locals()
+  else:
+    # Enum of a simple structure
+    converter = "PyObject *ob = PyCom_PyObjectFrom%(enumtype)s(&rgVar[i]);" % locals()
+    arraydeclare = "%(enumtype)s *rgVar = new %(enumtype)s[celt];" % locals()
+
+  f.write(\
+'''
+// ---------------------------------------------------
+//
+// Interface Implementation
+
+PyIEnum%(enumtype)s::PyIEnum%(enumtype)s(IUnknown *pdisp):
+	PyIUnknown(pdisp)
+{
+	ob_type = &type;
+}
+
+PyIEnum%(enumtype)s::~PyIEnum%(enumtype)s()
+{
+}
+
+/* static */ IEnum%(enumtype)s *PyIEnum%(enumtype)s::GetI(PyObject *self)
+{
+	return (IEnum%(enumtype)s *)PyIUnknown::GetI(self);
+}
+
+// @pymethod object|PyIEnum%(enumtype)s|Next|Retrieves a specified number of items in the enumeration sequence.
+PyObject *PyIEnum%(enumtype)s::Next(PyObject *self, PyObject *args)
+{
+	long celt = 1;
+	// @pyparm int|num|1|Number of items to retrieve.
+	if ( !PyArg_ParseTuple(args, "|l:Next", &celt) )
+		return NULL;
+
+	IEnum%(enumtype)s *pIE%(enumtype)s = GetI(self);
+	if ( pIE%(enumtype)s == NULL )
+		return NULL;
+
+	%(arraydeclare)s
+	if ( rgVar == NULL ) {
+		PyErr_SetString(PyExc_MemoryError, "allocating result %(enumtype)ss");
+		return NULL;
+	}
+
+	int i;
+/*	for ( i = celt; i--; )
+		// *** possibly init each structure element???
+*/
+
+	ULONG celtFetched = 0;
+	PY_INTERFACE_PRECALL;
+	HRESULT hr = pIE%(enumtype)s->Next(celt, rgVar, &celtFetched);
+	PY_INTERFACE_POSTCALL;
+	if (  HRESULT_CODE(hr) != ERROR_NO_MORE_ITEMS && FAILED(hr) )
+	{
+		delete [] rgVar;
+		return PyCom_BuildPyException(hr,pIE%(enumtype)s, IID_IE%(enumtype)s);
+	}
+
+	PyObject *result = PyTuple_New(celtFetched);
+	if ( result != NULL )
+	{
+		for ( i = celtFetched; i--; )
+		{
+			%(converter)s
+			if ( ob == NULL )
+			{
+				Py_DECREF(result);
+				result = NULL;
+				break;
+			}
+			PyTuple_SET_ITEM(result, i, ob);
+		}
+	}
+
+/*	for ( i = celtFetched; i--; )
+		// *** possibly cleanup each structure element???
+*/
+	delete [] rgVar;
+	return result;
+}
+
+// @pymethod |PyIEnum%(enumtype)s|Skip|Skips over the next specified elementes.
+PyObject *PyIEnum%(enumtype)s::Skip(PyObject *self, PyObject *args)
+{
+	long celt;
+	if ( !PyArg_ParseTuple(args, "l:Skip", &celt) )
+		return NULL;
+
+	IEnum%(enumtype)s *pIE%(enumtype)s = GetI(self);
+	if ( pIE%(enumtype)s == NULL )
+		return NULL;
+
+	PY_INTERFACE_PRECALL;
+	HRESULT hr = pIE%(enumtype)s->Skip(celt);
+	PY_INTERFACE_POSTCALL;
+	if ( FAILED(hr) )
+		return PyCom_BuildPyException(hr, pIE%(enumtype)s, IID_IE%(enumtype)s);
+
+	Py_INCREF(Py_None);
+	return Py_None;
+}
+
+// @pymethod |PyIEnum%(enumtype)s|Reset|Resets the enumeration sequence to the beginning.
+PyObject *PyIEnum%(enumtype)s::Reset(PyObject *self, PyObject *args)
+{
+	if ( !PyArg_ParseTuple(args, ":Reset") )
+		return NULL;
+
+	IEnum%(enumtype)s *pIE%(enumtype)s = GetI(self);
+	if ( pIE%(enumtype)s == NULL )
+		return NULL;
+
+	PY_INTERFACE_PRECALL;
+	HRESULT hr = pIE%(enumtype)s->Reset();
+	PY_INTERFACE_POSTCALL;
+	if ( FAILED(hr) )
+		return PyCom_BuildPyException(hr, pIE%(enumtype)s, IID_IE%(enumtype)s);
+
+	Py_INCREF(Py_None);
+	return Py_None;
+}
+
+// @pymethod <o PyIEnum%(enumtype)s>|PyIEnum%(enumtype)s|Clone|Creates another enumerator that contains the same enumeration state as the current one
+PyObject *PyIEnum%(enumtype)s::Clone(PyObject *self, PyObject *args)
+{
+	if ( !PyArg_ParseTuple(args, ":Clone") )
+		return NULL;
+
+	IEnum%(enumtype)s *pIE%(enumtype)s = GetI(self);
+	if ( pIE%(enumtype)s == NULL )
+		return NULL;
+
+	IEnum%(enumtype)s *pClone;
+	PY_INTERFACE_PRECALL;
+	HRESULT hr = pIE%(enumtype)s->Clone(&pClone);
+	PY_INTERFACE_POSTCALL;
+	if ( FAILED(hr) )
+		return PyCom_BuildPyException(hr, pIE%(enumtype)s, IID_IE%(enumtype)s);
+
+	return PyCom_PyObjectFromIUnknown(pClone, IID_IEnum%(enumtype)s, FALSE);
+}
+
+// @object PyIEnum%(enumtype)s|A Python interface to IEnum%(enumtype)s
+static struct PyMethodDef PyIEnum%(enumtype)s_methods[] =
+{
+	{ "Next", PyIEnum%(enumtype)s::Next, 1 },    // @pymeth Next|Retrieves a specified number of items in the enumeration sequence.
+	{ "Skip", PyIEnum%(enumtype)s::Skip, 1 },	// @pymeth Skip|Skips over the next specified elementes.
+	{ "Reset", PyIEnum%(enumtype)s::Reset, 1 },	// @pymeth Reset|Resets the enumeration sequence to the beginning.
+	{ "Clone", PyIEnum%(enumtype)s::Clone, 1 },	// @pymeth Clone|Creates another enumerator that contains the same enumeration state as the current one.
+	{ NULL }
+};
+
+PyComTypeObject PyIEnum%(enumtype)s::type("PyIEnum%(enumtype)s",
+		&PyIUnknown::type,
+		sizeof(PyIEnum%(enumtype)s),
+		PyIEnum%(enumtype)s_methods,
+		GET_PYCOM_CTOR(PyIEnum%(enumtype)s));
+''' % locals() )
+
+
+
+def _write_enumgw_cpp(f, interface):
+  enumtype = interface.name[5:]
+  if is_interface_enum(enumtype):
+    # Assume an interface.
+    enum_interface = "I" + enumtype[:-1]
+    converter = "if ( !PyCom_InterfaceFromPyObject(ob, IID_%(enum_interface)s, (void **)&rgVar[i], FALSE) )" % locals()
+    argdeclare="%(enum_interface)s __RPC_FAR * __RPC_FAR *rgVar" % locals()
+  else:
+    argdeclare="%(enumtype)s __RPC_FAR *rgVar" % locals()
+    converter="if ( !PyCom_PyObjectAs%(enumtype)s(ob, &rgVar[i]) )" % locals()
+  f.write(
+'''
+// ---------------------------------------------------
+//
+// Gateway Implementation
+
+// Std delegation
+STDMETHODIMP_(ULONG) PyGEnum%(enumtype)s::AddRef(void) {return PyGatewayBase::AddRef();}
+STDMETHODIMP_(ULONG) PyGEnum%(enumtype)s::Release(void) {return PyGatewayBase::Release();}
+STDMETHODIMP PyGEnum%(enumtype)s::QueryInterface(REFIID iid, void ** obj) {return PyGatewayBase::QueryInterface(iid, obj);}
+STDMETHODIMP PyGEnum%(enumtype)s::GetTypeInfoCount(UINT FAR* pctInfo) {return PyGatewayBase::GetTypeInfoCount(pctInfo);}
+STDMETHODIMP PyGEnum%(enumtype)s::GetTypeInfo(UINT itinfo, LCID lcid, ITypeInfo FAR* FAR* pptInfo) {return PyGatewayBase::GetTypeInfo(itinfo, lcid, pptInfo);}
+STDMETHODIMP PyGEnum%(enumtype)s::GetIDsOfNames(REFIID refiid, OLECHAR FAR* FAR* rgszNames, UINT cNames, LCID lcid, DISPID FAR* rgdispid) {return PyGatewayBase::GetIDsOfNames( refiid, rgszNames, cNames, lcid, rgdispid);}
+STDMETHODIMP PyGEnum%(enumtype)s::Invoke(DISPID dispid, REFIID riid, LCID lcid, WORD wFlags, DISPPARAMS FAR* params, VARIANT FAR* pVarResult, EXCEPINFO FAR* pexcepinfo, UINT FAR* puArgErr) {return PyGatewayBase::Invoke( dispid, riid, lcid, wFlags, params, pVarResult, pexcepinfo, puArgErr);}
+
+STDMETHODIMP PyGEnum%(enumtype)s::Next( 
+            /* [in] */ ULONG celt,
+            /* [length_is][size_is][out] */ %(argdeclare)s,
+            /* [out] */ ULONG __RPC_FAR *pCeltFetched)
+{
+	PY_GATEWAY_METHOD;
+	PyObject *result;
+	HRESULT hr = InvokeViaPolicy("Next", &result, "i", celt);
+	if ( FAILED(hr) )
+		return hr;
+
+	if ( !PySequence_Check(result) )
+		goto error;
+	int len;
+	len = PyObject_Length(result);
+	if ( len == -1 )
+		goto error;
+	if ( len > (int)celt)
+		len = celt;
+
+	if ( pCeltFetched )
+		*pCeltFetched = len;
+
+	int i;
+	for ( i = 0; i < len; ++i )
+	{
+		PyObject *ob = PySequence_GetItem(result, i);
+		if ( ob == NULL )
+			goto error;
+
+		%(converter)s
+		{
+			Py_DECREF(result);
+			return PyCom_SetCOMErrorFromPyException(IID_IEnum%(enumtype)s);
+		}
+	}
+
+	Py_DECREF(result);
+
+	return len < (int)celt ? S_FALSE : S_OK;
+
+  error:
+	PyErr_Clear();	// just in case
+	Py_DECREF(result);
+	return PyCom_SetCOMErrorFromSimple(E_FAIL, IID_IEnum%(enumtype)s, "Next() did not return a sequence of objects");
+}
+
+STDMETHODIMP PyGEnum%(enumtype)s::Skip( 
+            /* [in] */ ULONG celt)
+{
+	PY_GATEWAY_METHOD;
+	return InvokeViaPolicy("Skip", NULL, "i", celt);
+}
+
+STDMETHODIMP PyGEnum%(enumtype)s::Reset(void)
+{
+	PY_GATEWAY_METHOD;
+	return InvokeViaPolicy("Reset");
+}
+
+STDMETHODIMP PyGEnum%(enumtype)s::Clone( 
+            /* [out] */ IEnum%(enumtype)s __RPC_FAR *__RPC_FAR *ppEnum)
+{
+	PY_GATEWAY_METHOD;
+	PyObject * result;
+	HRESULT hr = InvokeViaPolicy("Clone", &result);
+	if ( FAILED(hr) )
+		return hr;
+
+	/*
+	** Make sure we have the right kind of object: we should have some kind
+	** of IUnknown subclass wrapped into a PyIUnknown instance.
+	*/
+	if ( !PyIBase::is_object(result, &PyIUnknown::type) )
+	{
+		/* the wrong kind of object was returned to us */
+		Py_DECREF(result);
+		return PyCom_SetCOMErrorFromSimple(E_FAIL, IID_IEnum%(enumtype)s);
+	}
+
+	/*
+	** Get the IUnknown out of the thing. note that the Python ob maintains
+	** a reference, so we don't have to explicitly AddRef() here.
+	*/
+	IUnknown *punk = ((PyIUnknown *)result)->m_obj;
+	if ( !punk )
+	{
+		/* damn. the object was released. */
+		Py_DECREF(result);
+		return PyCom_SetCOMErrorFromSimple(E_FAIL, IID_IEnum%(enumtype)s);
+	}
+
+	/*
+	** Get the interface we want. note it is returned with a refcount.
+	** This QI is actually going to instantiate a PyGEnum%(enumtype)s.
+	*/
+	hr = punk->QueryInterface(IID_IEnum%(enumtype)s, (LPVOID *)ppEnum);
+
+	/* done with the result; this DECREF is also for <punk> */
+	Py_DECREF(result);
+
+	return PyCom_SetCOMErrorFromSimple(hr, IID_IEnum%(enumtype)s, "Python could not convert the result from Next() into the required COM interface");
+}
+''' % locals())
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegwparse.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegwparse.py
new file mode 100644
index 0000000..903c350
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/makegw/makegwparse.py
@@ -0,0 +1,747 @@
+"""Utilities for makegw - Parse a header file to build an interface
+
+ This module contains the core code for parsing a header file describing a
+ COM interface, and building it into an "Interface" structure.
+
+ Each Interface has methods, and each method has arguments.
+
+ Each argument knows how to use Py_BuildValue or Py_ParseTuple to
+ exchange itself with Python.
+ 
+ See the @win32com.makegw@ module for information in building a COM
+ interface
+"""
+import regex
+import traceback
+import string
+
+error_not_found = "The requested item could not be found"
+error_not_supported = "The required functionality is not supported"
+
+VERBOSE=0
+DEBUG=0
+
+## NOTE : For interfaces as params to work correctly, you must
+## make sure any PythonCOM extensions which expose the interface are loaded
+## before generating.
+
+
+class ArgFormatter:
+	"""An instance for a specific type of argument.	 Knows how to convert itself"""
+	def __init__(self, arg, builtinIndirection, declaredIndirection = 0):
+		#print 'init:', arg.name, builtinIndirection, declaredIndirection, arg.indirectionLevel
+		self.arg = arg
+		self.builtinIndirection = builtinIndirection
+		self.declaredIndirection = declaredIndirection
+		self.gatewayMode = 0
+	def _IndirectPrefix(self, indirectionFrom, indirectionTo):
+		"""Given the indirection level I was declared at (0=Normal, 1=*, 2=**)
+		return a string prefix so I can pass to a function with the
+		required indirection (where the default is the indirection of the method's param.
+		
+		eg, assuming my arg has indirection level of 2, if this function was passed 1
+		it would return "&", so that a variable declared with indirection of 1
+		can be prefixed with this to turn it into the indirection level required of 2
+		"""
+		dif = indirectionFrom - indirectionTo
+		if dif==0:
+		  return ""
+		elif dif==-1:
+		  return "&"
+		elif dif==1:
+		  return "*"
+		else:
+		  return "??"
+		  raise error_not_supported, "Can't indirect this far - please fix me :-)"
+	def GetIndirectedArgName(self, indirectFrom, indirectionTo):
+		#print 'get:',self.arg.name, indirectFrom,self._GetDeclaredIndirection() + self.builtinIndirection, indirectionTo, self.arg.indirectionLevel
+
+		if indirectFrom is None:
+			### ACK! this does not account for [in][out] variables.
+			### when this method is called, we need to know which
+			indirectFrom = self._GetDeclaredIndirection() + self.builtinIndirection
+
+		return self._IndirectPrefix(indirectFrom, indirectionTo) + self.arg.name
+	def GetBuildValueArg(self):
+		"Get the argument to be passes to Py_BuildValue"
+		return self.arg.name
+	def GetParseTupleArg(self):
+		"Get the argument to be passed to PyArg_ParseTuple"
+		if self.gatewayMode:
+			# use whatever they were declared with
+			return self.GetIndirectedArgName(None, 1)
+		# local declarations have just their builtin indirection
+		return self.GetIndirectedArgName(self.builtinIndirection, 1)
+	def GetInterfaceCppObjectInfo(self):
+		"""Provide information about the C++ object used.
+				
+		Simple variables (such as integers) can declare their type (eg an integer)
+		and use it as the target of both PyArg_ParseTuple and the COM function itself.
+		
+		More complex types require a PyObject * declared as the target of PyArg_ParseTuple,
+		then some conversion routine to the C++ object which is actually passed to COM.
+		
+		This method provides the name, and optionally the type of that C++ variable.  
+		If the type if provided, the caller will likely generate a variable declaration.
+		The name must always be returned.
+		
+		Result is a tuple of (variableName, [DeclareType|None|""])
+		"""
+
+		# the first return element is the variable to be passed as
+		#	 an argument to an interface method. the variable was
+		#	 declared with only its builtin indirection level. when
+		#	 we pass it, we'll need to pass in whatever amount of
+		#	 indirection was applied (plus the builtin amount)
+		# the second return element is the variable declaration; it
+		#	 should simply be builtin indirection
+		return self.GetIndirectedArgName(self.builtinIndirection, self.arg.indirectionLevel + self.builtinIndirection), \
+					   "%s %s" % (self.GetUnconstType(), self.arg.name)
+
+	def GetInterfaceArgCleanup(self):
+		"Return cleanup code for C++ args passed to the interface method."
+		if DEBUG:
+			return "/* GetInterfaceArgCleanup output goes here: %s */\n" % self.arg.name
+		else:
+			return ""
+
+	def GetInterfaceArgCleanupGIL(self):
+		"""Return cleanup code for C++ args passed to the interface
+		method that must be executed with the GIL held"""
+		if DEBUG:
+			return "/* GetInterfaceArgCleanup (GIL held) output goes here: %s */\n" % self.arg.name
+		else:
+			return ""
+
+	def GetUnconstType(self):
+		return self.arg.unc_type
+	
+	def SetGatewayMode(self):
+		self.gatewayMode = 1
+	def _GetDeclaredIndirection(self):
+		return self.arg.indirectionLevel
+		print 'declared:', self.arg.name, self.gatewayMode
+		if self.gatewayMode:
+			return self.arg.indirectionLevel
+		else:
+			return self.declaredIndirection
+	def DeclareParseArgTupleInputConverter(self):
+		"Declare the variable used as the PyArg_ParseTuple param for a gateway"
+		# Only declare it??
+		#if self.arg.indirectionLevel==0:
+		#	return "\t%s %s;\n" % (self.arg.type, self.arg.name)
+		#else:
+		if DEBUG:
+			return "/* Declare ParseArgTupleInputConverter goes here: %s */\n" % self.arg.name
+		else:
+			return ""
+	def GetParsePostCode(self):
+		"Get a string of C++ code to be executed after (ie, to finalise) the PyArg_ParseTuple conversion"
+		if DEBUG:
+			return "/* GetParsePostCode code goes here: %s */\n" % self.arg.name
+		else:
+			return ""
+	def GetBuildForInterfacePreCode(self):
+		"Get a string of C++ code to be executed before (ie, to initialise) the Py_BuildValue conversion for Interfaces"
+		if DEBUG:
+			return "/* GetBuildForInterfacePreCode goes here: %s */\n" % self.arg.name
+		else:
+			return ""
+	def GetBuildForGatewayPreCode(self):
+		"Get a string of C++ code to be executed before (ie, to initialise) the Py_BuildValue conversion for Gateways"
+		s = self.GetBuildForInterfacePreCode() # Usually the same
+		if DEBUG:
+			if s[:4] == "/* G":
+				s = "/* GetBuildForGatewayPreCode goes here: %s */\n" % self.arg.name
+		return s
+	def GetBuildForInterfacePostCode(self):
+		"Get a string of C++ code to be executed after (ie, to finalise) the Py_BuildValue conversion for Interfaces"
+		if DEBUG:
+			return "/* GetBuildForInterfacePostCode goes here: %s */\n" % self.arg.name
+		return ""
+	def GetBuildForGatewayPostCode(self):
+		"Get a string of C++ code to be executed after (ie, to finalise) the Py_BuildValue conversion for Gateways"
+		s = self.GetBuildForInterfacePostCode() # Usually the same
+		if DEBUG:
+			if s[:4] == "/* G":
+				s = "/* GetBuildForGatewayPostCode goes here: %s */\n" % self.arg.name
+		return s
+	def GetAutoduckString(self):
+		return '// @pyparm %s|%s||Description for %s' % (self._GetPythonTypeDesc(), self.arg.name, self.arg.name)
+	def _GetPythonTypeDesc(self):
+		"Returns a string with the description of the type.	 Used for doco purposes"
+		return None
+	def NeedUSES_CONVERSION(self):
+		"Determines if this arg forces a USES_CONVERSION macro"
+		return 0
+
+# Special formatter for floats since they're smaller than Python floats.
+class ArgFormatterFloat(ArgFormatter):
+	def GetFormatChar(self):
+		return "f"
+	def DeclareParseArgTupleInputConverter(self):
+		# Declare a double variable
+		return "\tdouble dbl%s;\n" % self.arg.name
+	def GetParseTupleArg(self):
+		return "&dbl" + self.arg.name
+	def _GetPythonTypeDesc(self):
+		return "float"
+	def GetBuildValueArg(self):
+		return "&dbl" + self.arg.name
+	def GetBuildForInterfacePreCode(self):
+		return "\tdbl" + self.arg.name + " = " + self.arg.name + ";\n"
+	def GetBuildForGatewayPreCode(self):
+		return "\tdbl%s = " % self.arg.name + self._IndirectPrefix( \
+			self._GetDeclaredIndirection(),
+			0) + self.arg.name + ";\n"
+	def GetParsePostCode(self):
+		s = "\t"
+		if self.gatewayMode:
+			s = s + self._IndirectPrefix( 
+				self._GetDeclaredIndirection(),
+				0)
+		s = s + self.arg.name
+		s = s + " = (float)dbl%s;\n" % self.arg.name
+		return s
+
+# Special formatter for Shorts because they're
+# a different size than Python ints!
+class ArgFormatterShort(ArgFormatter):
+	def GetFormatChar(self):
+		return "i"
+	def DeclareParseArgTupleInputConverter(self):
+		# Declare a double variable
+		return "\tINT i%s;\n" % self.arg.name
+	def GetParseTupleArg(self):
+		return "&i" + self.arg.name
+	def _GetPythonTypeDesc(self):
+		return "int"
+	def GetBuildValueArg(self):
+		return "&i" + self.arg.name
+	def GetBuildForInterfacePreCode(self):
+		return "\ti" + self.arg.name + " = " + self.arg.name + ";\n"
+	def GetBuildForGatewayPreCode(self):
+		return "\ti%s = " % self.arg.name + self._IndirectPrefix( \
+			self._GetDeclaredIndirection(),
+			0) + self.arg.name + ";\n"
+	def GetParsePostCode(self):
+		s = "\t"
+		if self.gatewayMode:
+			s = s + self._IndirectPrefix( 
+				self._GetDeclaredIndirection(),
+				0)
+		s = s + self.arg.name
+		s = s + " = i%s;\n" % self.arg.name
+		return s
+
+class ArgFormatterPythonCOM(ArgFormatter):
+	"""An arg formatter for types exposed in the PythonCOM module"""
+	def GetFormatChar(self):
+		return "O"
+	#def GetInterfaceCppObjectInfo(self):
+	#	return ArgFormatter.GetInterfaceCppObjectInfo(self)[0], \
+	#		"%s %s%s" % (self.arg.unc_type, "*" * self._GetDeclaredIndirection(), self.arg.name)
+	def DeclareParseArgTupleInputConverter(self):
+		# Declare a PyObject variable
+		return "\tPyObject *ob%s;\n" % self.arg.name
+	def GetParseTupleArg(self):
+		return "&ob"+self.arg.name
+	def _GetPythonTypeDesc(self):
+		return "<o Py%s>" % self.arg.type
+	def GetBuildValueArg(self):
+		return "ob" + self.arg.name
+	def GetBuildForInterfacePostCode(self):
+		return "\tPy_XDECREF(ob%s);\n" % self.arg.name
+	def DeclareParseArgTupleInputConverter(self):
+		# Declare a PyObject variable
+		return "\tPyObject *ob%s;\n" % self.arg.name
+
+class ArgFormatterBSTR(ArgFormatterPythonCOM):
+	def _GetPythonTypeDesc(self):
+		return "<o unicode>"
+	def GetParsePostCode(self):
+		return "\tif (bPythonIsHappy && !PyWinObject_AsBstr(ob%s, %s)) bPythonIsHappy = FALSE;\n" % (self.arg.name, self.GetIndirectedArgName(None, 2))
+	def GetBuildForInterfacePreCode(self):
+		notdirected = self.GetIndirectedArgName(None, 1)
+		return "\tob%s = MakeBstrToObj(%s);\n" % \
+			   (self.arg.name, notdirected)
+	def GetBuildForInterfacePostCode(self):
+		return "\tSysFreeString(%s);\n" % (self.arg.name,) + \
+		       ArgFormatterPythonCOM.GetBuildForInterfacePostCode(self)
+	def GetBuildForGatewayPostCode(self):
+		return "\tPy_XDECREF(ob%s);\n" % self.arg.name
+
+class ArgFormatterOLECHAR(ArgFormatterPythonCOM):
+	def _GetPythonTypeDesc(self):
+		return "<o unicode>"
+	def GetUnconstType(self):
+		if self.arg.type[:3]=="LPC":
+			return self.arg.type[:2] + self.arg.type[3:]
+		else:
+			return self.arg.unc_type
+	def GetParsePostCode(self):
+		return "\tif (bPythonIsHappy && !PyWinObject_AsBstr(ob%s, %s)) bPythonIsHappy = FALSE;\n" % (self.arg.name, self.GetIndirectedArgName(None, 2))
+	def GetInterfaceArgCleanup(self):
+		return "\tSysFreeString(%s);\n" % self.GetIndirectedArgName(None, 1)
+	def GetBuildForInterfacePreCode(self):
+		# the variable was declared with just its builtin indirection
+		notdirected = self.GetIndirectedArgName(self.builtinIndirection, 1)
+		return "\tob%s = MakeOLECHARToObj(%s);\n" % \
+			   (self.arg.name, notdirected)
+	def GetBuildForInterfacePostCode(self):
+		# memory returned into an OLECHAR should be freed
+		return "\tCoTaskMemFree(%s);\n" % (self.arg.name,) + \
+		       ArgFormatterPythonCOM.GetBuildForInterfacePostCode(self)
+	def GetBuildForGatewayPostCode(self):
+		return "\tPy_XDECREF(ob%s);\n" % self.arg.name
+
+class ArgFormatterTCHAR(ArgFormatterPythonCOM):
+	def _GetPythonTypeDesc(self):
+		return "string/<o unicode>"
+	def GetUnconstType(self):
+		if self.arg.type[:3]=="LPC":
+			return self.arg.type[:2] + self.arg.type[3:]
+		else:
+			return self.arg.unc_type
+	def GetParsePostCode(self):
+		return "\tif (bPythonIsHappy && !PyWinObject_AsTCHAR(ob%s, %s)) bPythonIsHappy = FALSE;\n" % (self.arg.name, self.GetIndirectedArgName(None, 2))
+	def GetInterfaceArgCleanup(self):
+		return "\tPyWinObject_FreeTCHAR(%s);\n" % self.GetIndirectedArgName(None, 1)
+	def GetBuildForInterfacePreCode(self):
+		# the variable was declared with just its builtin indirection
+		notdirected = self.GetIndirectedArgName(self.builtinIndirection, 1)
+		return "\tob%s = PyWinObject_FromTCHAR(%s);\n" % \
+			   (self.arg.name, notdirected)
+	def GetBuildForInterfacePostCode(self):
+		return "// ??? - TCHAR post code\n"
+	def GetBuildForGatewayPostCode(self):
+		return "\tPy_XDECREF(ob%s);\n" % self.arg.name
+
+class ArgFormatterIID(ArgFormatterPythonCOM):
+	def _GetPythonTypeDesc(self):
+		return "<o PyIID>"
+	def GetParsePostCode(self):
+		return "\tif (!PyWinObject_AsIID(ob%s, &%s)) bPythonIsHappy = FALSE;\n" % (self.arg.name, self.arg.name)
+	def GetBuildForInterfacePreCode(self):
+#		notdirected = self.GetIndirectedArgName(self.arg.indirectionLevel, 0)
+		notdirected = self.GetIndirectedArgName(None, 0)
+		return "\tob%s = PyWinObject_FromIID(%s);\n" % (self.arg.name, notdirected)
+	def GetInterfaceCppObjectInfo(self):
+		return self.arg.name, "IID %s" % (self.arg.name)
+
+class ArgFormatterTime(ArgFormatterPythonCOM):
+	def __init__(self, arg, builtinIndirection, declaredIndirection = 0):
+		# we don't want to declare LPSYSTEMTIME / LPFILETIME objects
+		if arg.indirectionLevel == 0 and arg.unc_type[:2] == "LP":
+			arg.unc_type = arg.unc_type[2:]
+			# reduce the builtin and increment the declaration
+			arg.indirectionLevel = arg.indirectionLevel + 1
+			builtinIndirection = 0
+		ArgFormatterPythonCOM.__init__(self, arg, builtinIndirection, declaredIndirection)
+
+	def _GetPythonTypeDesc(self):
+		return "<o PyTime>"
+	def GetParsePostCode(self):
+		# variable was declared with only the builtinIndirection
+		### NOTE: this is an [in] ... so use only builtin
+		return '\tif (!PyTime_Check(ob%s)) {\n\t\tPyErr_SetString(PyExc_TypeError, "The argument must be a PyTime object");\n\t\tbPythonIsHappy = FALSE;\n\t}\n\tif (!((PyTime *)ob%s)->GetTime(%s)) bPythonIsHappy = FALSE;\n' % (self.arg.name, self.arg.name, self.GetIndirectedArgName(self.builtinIndirection, 1))
+	def GetBuildForInterfacePreCode(self):
+		### use just the builtinIndirection again...
+		notdirected = self.GetIndirectedArgName(self.builtinIndirection,0)
+		return "\tob%s = new PyTime(%s);\n" % (self.arg.name, notdirected)
+	def GetBuildForInterfacePostCode(self):
+		### hack to determine if we need to free stuff
+		ret = ''
+		if self.builtinIndirection + self.arg.indirectionLevel > 1:
+			# memory returned into an OLECHAR should be freed
+			ret = "\tCoTaskMemFree(%s);\n" % self.arg.name
+		return ret + ArgFormatterPythonCOM.GetBuildForInterfacePostCode(self)
+
+class ArgFormatterSTATSTG(ArgFormatterPythonCOM):
+	def _GetPythonTypeDesc(self):
+		return "<o STATSTG>"
+	def GetParsePostCode(self):
+		return '\tif (!PyCom_PyObjectAsSTATSTG(ob%s, %s, 0/*flags*/)) bPythonIsHappy = FALSE;\n' % (self.arg.name, self.GetIndirectedArgName(None, 1))
+	def GetBuildForInterfacePreCode(self):
+		notdirected = self.GetIndirectedArgName(None, 1)
+		return "\tob%s = PyCom_PyObjectFromSTATSTG(%s);\n\t// STATSTG doco says our responsibility to free\n\tif ((%s).pwcsName) CoTaskMemFree((%s).pwcsName);\n" % (self.arg.name, self.GetIndirectedArgName(None, 1),notdirected,notdirected)
+
+class ArgFormatterGeneric(ArgFormatterPythonCOM):
+	def _GetPythonTypeDesc(self):
+		return "<o %s>" % self.arg.type
+	def GetParsePostCode(self):
+		return '\tif (!PyObject_As%s(ob%s, &%s) bPythonIsHappy = FALSE;\n' % (self.arg.type, self.arg.name, self.GetIndirectedArgName(None, 1))
+	def GetInterfaceArgCleanup(self):
+		return '\tPyObject_Free%s(%s);\n' % (self.arg.type, self.arg.name)
+	def GetBuildForInterfacePreCode(self):
+		notdirected = self.GetIndirectedArgName(None, 1)
+		return "\tob%s = PyObject_From%s(%s);\n" % (self.arg.name, self.arg.type, self.GetIndirectedArgName(None, 1))
+
+class ArgFormatterIDLIST(ArgFormatterPythonCOM):
+	def _GetPythonTypeDesc(self):
+		return "<o PyIDL>"
+	def GetParsePostCode(self):
+		return '\tif (bPythonIsHappy && !PyObject_AsPIDL(ob%s, &%s)) bPythonIsHappy = FALSE;\n' % (self.arg.name, self.GetIndirectedArgName(None, 1))
+	def GetInterfaceArgCleanup(self):
+		return '\tPyObject_FreePIDL(%s);\n' % (self.arg.name,)
+	def GetBuildForInterfacePreCode(self):
+		notdirected = self.GetIndirectedArgName(None, 1)
+		return "\tob%s = PyObject_FromPIDL(%s);\n" % (self.arg.name, self.GetIndirectedArgName(None, 1))
+
+class ArgFormatterHANDLE(ArgFormatterPythonCOM):
+	def _GetPythonTypeDesc(self):
+		return "<o PyHANDLE>"
+	def GetParsePostCode(self):
+		return '\tif (!PyWinObject_AsHANDLE(ob%s, &%s, FALSE) bPythonIsHappy = FALSE;\n' % (self.arg.name, self.GetIndirectedArgName(None, 1))
+	def GetBuildForInterfacePreCode(self):
+		notdirected = self.GetIndirectedArgName(None, 1)
+		return "\tob%s = PyWinObject_FromHANDLE(%s);\n" % (self.arg.name, self.GetIndirectedArgName(None, 0))
+
+class ArgFormatterLARGE_INTEGER(ArgFormatterPythonCOM):
+	def GetKeyName(self):
+		return "LARGE_INTEGER"
+	def _GetPythonTypeDesc(self):
+		return "<o %s>" % self.GetKeyName()
+	def GetParsePostCode(self):
+		return '\tif (!PyWinObject_As%s(ob%s, %s)) bPythonIsHappy = FALSE;\n' % (self.GetKeyName(), self.arg.name, self.GetIndirectedArgName(None, 1))
+	def GetBuildForInterfacePreCode(self):
+		notdirected = self.GetIndirectedArgName(None, 0)
+		return "\tob%s = PyWinObject_From%s(%s);\n" % (self.arg.name, self.GetKeyName(), notdirected)
+
+class ArgFormatterULARGE_INTEGER(ArgFormatterLARGE_INTEGER):
+	def GetKeyName(self):
+		return "ULARGE_INTEGER"
+
+class ArgFormatterInterface(ArgFormatterPythonCOM):
+	def GetInterfaceCppObjectInfo(self):
+		return self.GetIndirectedArgName(1, self.arg.indirectionLevel), \
+			   "%s * %s" % (self.GetUnconstType(), self.arg.name)
+
+	def GetParsePostCode(self):
+		# This gets called for out params in gateway mode
+		if self.gatewayMode:
+			sArg = self.GetIndirectedArgName(None, 2)
+		else:
+		# vs. in params for interface mode.
+			sArg = self.GetIndirectedArgName(1, 2)
+		return "\tif (bPythonIsHappy && !PyCom_InterfaceFromPyInstanceOrObject(ob%s, IID_%s, (void **)%s, TRUE /* bNoneOK */))\n\t\t bPythonIsHappy = FALSE;\n" % (self.arg.name, self.arg.type, sArg)
+	
+	def GetBuildForInterfacePreCode(self):
+		return "\tob%s = PyCom_PyObjectFromIUnknown(%s, IID_%s, FALSE);\n" % (self.arg.name, self.arg.name, self.arg.type)
+	
+	def GetBuildForGatewayPreCode(self):
+		sPrefix = self._IndirectPrefix(self._GetDeclaredIndirection(), 1)
+		return "\tob%s = PyCom_PyObjectFromIUnknown(%s%s, IID_%s, TRUE);\n" % (self.arg.name, sPrefix, self.arg.name, self.arg.type)
+
+	def GetInterfaceArgCleanup(self):
+		return "\tif (%s) %s->Release();\n" % (self.arg.name, self.arg.name)
+
+class ArgFormatterVARIANT(ArgFormatterPythonCOM):
+	def GetParsePostCode(self):
+		return "\tif ( !PyCom_VariantFromPyObject(ob%s, %s) )\n\t\tbPythonIsHappy = FALSE;\n" % (self.arg.name, self.GetIndirectedArgName(None, 1))
+
+	def GetBuildForGatewayPreCode(self):
+		notdirected = self.GetIndirectedArgName(None, 1)
+		return "\tob%s = PyCom_PyObjectFromVariant(%s);\n" % (self.arg.name, notdirected)
+	def GetBuildForGatewayPostCode(self):
+		return "\tPy_XDECREF(ob%s);\n" % self.arg.name
+
+					  # Key :		, Python Type Description, ParseTuple format char
+ConvertSimpleTypes = {"BOOL":("BOOL", "int", "i"),
+					  "UINT":("UINT", "int", "i"),
+					  "BYTE": ("BYTE", "int", "i"),
+					  "INT": ("INT", "int", "i"),
+					  "DWORD": ("DWORD", "int", "l"),
+					  "HRESULT":("HRESULT", "int", "l"),
+					  "ULONG": ("ULONG", "int", "l"),
+					  "LONG": ("LONG", "int", "l"),
+					  "int": ("int", "int", "i"),
+					  "long": ("long", "int", "l"),
+					  "HWND": ("HWND", "HWND", "l"),
+					  "HDC": ("HDC", "HDC", "l"),
+					  "LPARAM" : ("LPARAM", "long", "l"),
+					  "LRESULT" : ("LRESULT", "long", "l"),
+					  "WPARAM" : ("LPARAM", "int", "i"),
+					  "DISPID": ("DISPID", "long", "l"),
+					  "APPBREAKFLAGS": ("int", "int", "i"),
+					  "BREAKRESUMEACTION": ("int", "int", "i"),
+					  "ERRORRESUMEACTION": ("int", "int", "i"),
+					  "BREAKREASON": ("int", "int", "i"),
+					  "BREAKPOINT_STATE": ("int", "int", "i"),
+					  "BREAKRESUME_ACTION": ("int", "int", "i"),
+					  "SOURCE_TEXT_ATTR": ("int", "int", "i"),
+					  "TEXT_DOC_ATTR": ("int", "int", "i"),
+					  "QUERYOPTION": ("int", "int", "i"),
+					  "PARSEACTION": ("int", "int", "i"),
+}
+	
+class ArgFormatterSimple(ArgFormatter):
+	"""An arg formatter for simple integer etc types"""
+	def GetFormatChar(self):
+		return ConvertSimpleTypes[self.arg.type][2]
+	def _GetPythonTypeDesc(self):
+		return ConvertSimpleTypes[self.arg.type][1]
+
+AllConverters = {"const OLECHAR":	(ArgFormatterOLECHAR, 0, 1),
+				 "WCHAR":			(ArgFormatterOLECHAR, 0, 1),
+				 "OLECHAR":			(ArgFormatterOLECHAR, 0, 1),
+				 "LPCOLESTR":		(ArgFormatterOLECHAR, 1, 1),
+				 "LPOLESTR":		(ArgFormatterOLECHAR, 1, 1),
+				 "LPCWSTR":			(ArgFormatterOLECHAR, 1, 1),
+				 "LPWSTR":			(ArgFormatterOLECHAR, 1, 1),
+				 "LPCSTR":			(ArgFormatterOLECHAR, 1, 1),
+				 "LPTSTR":			(ArgFormatterTCHAR, 1, 1),
+				 "LPCTSTR":         (ArgFormatterTCHAR, 1, 1),
+				 "HANDLE":			(ArgFormatterHANDLE, 0),
+				 "BSTR":			(ArgFormatterBSTR, 1, 0),
+				 "const IID":		(ArgFormatterIID, 0),
+				 "CLSID":			(ArgFormatterIID, 0),
+				 "IID":				(ArgFormatterIID, 0),
+				 "GUID":			(ArgFormatterIID, 0),
+				 "const GUID":		(ArgFormatterIID, 0),
+				 "const IID":		(ArgFormatterIID, 0),
+				 "REFCLSID":		(ArgFormatterIID, 0),
+				 "REFIID":			(ArgFormatterIID, 0),
+				 "REFGUID":			(ArgFormatterIID, 0),
+				 "const FILETIME":	(ArgFormatterTime, 0),
+				 "const SYSTEMTIME":(ArgFormatterTime, 0),
+				 "const LPSYSTEMTIME":(ArgFormatterTime, 1, 1),
+				 "LPSYSTEMTIME":	(ArgFormatterTime, 1, 1),
+				 "FILETIME":		(ArgFormatterTime, 0),
+				 "SYSTEMTIME":		(ArgFormatterTime, 0),
+				 "STATSTG":			(ArgFormatterSTATSTG, 0),
+				 "LARGE_INTEGER":	(ArgFormatterLARGE_INTEGER, 0),
+				 "ULARGE_INTEGER":	(ArgFormatterULARGE_INTEGER, 0),
+				 "VARIANT":			(ArgFormatterVARIANT, 0),
+				 "float":			(ArgFormatterFloat, 0),
+				 "single":			(ArgFormatterFloat, 0),
+				 "short":			(ArgFormatterShort, 0),
+				 "WORD":			(ArgFormatterShort, 0),
+				 "VARIANT_BOOL":	(ArgFormatterShort, 0),
+				 "HWND":			(ArgFormatterShort, 0),
+				 "HMENU":			(ArgFormatterShort, 0),
+				 "HOLEMENU":			(ArgFormatterShort, 0),
+				 "HICON":			(ArgFormatterShort, 0),
+				 "UINT":			(ArgFormatterShort, 0),
+				 "SVSIF":			(ArgFormatterShort, 0),
+				 "Control":			(ArgFormatterInterface, 0, 1),
+				 "DataObject":		(ArgFormatterInterface, 0, 1),
+				 "_PropertyBag":	(ArgFormatterInterface, 0, 1),
+				 "AsyncProp":		(ArgFormatterInterface, 0, 1),
+				 "DataSource":		(ArgFormatterInterface, 0, 1),
+				 "DataFormat":		(ArgFormatterInterface, 0, 1),
+				 "void **":			(ArgFormatterInterface, 2, 2),
+				 "ITEMIDLIST":		(ArgFormatterIDLIST, 0, 0),
+				 "LPITEMIDLIST":		(ArgFormatterIDLIST, 0, 1),
+				 "LPCITEMIDLIST":		(ArgFormatterIDLIST, 0, 1),
+				 "const ITEMIDLIST":		(ArgFormatterIDLIST, 0, 1),
+}
+
+# Auto-add all the simple types
+for key in ConvertSimpleTypes.keys():
+	AllConverters[key] = ArgFormatterSimple, 0
+
+def make_arg_converter(arg):
+	try:
+		clz = AllConverters[arg.type][0]
+		bin = AllConverters[arg.type][1]
+		decl = 0
+		if len(AllConverters[arg.type])>2:
+			decl = AllConverters[arg.type][2]
+		return clz(arg,bin, decl)
+	except KeyError:
+		if arg.type[0]=="I":
+			return ArgFormatterInterface(arg, 0, 1)
+
+		raise error_not_supported, "The type '%s' (%s) is unknown." % (arg.type, arg.name)
+
+		
+#############################################################
+#
+# The instances that represent the args, methods and interface
+class Argument:
+	"""A representation of an argument to a COM method
+	
+	This class contains information about a specific argument to a method.
+	In addition, methods exist so that an argument knows how to convert itself
+	to/from Python arguments.
+	"""
+#									  in,out					  type			  name			 [	]
+#								   --------------				--------	  ------------		------
+	regex = regex.compile('/\\* \\[\\([^\\]]*.*\\)] \\*/[ \t]\\(.*[\\* ]\\)\\([a-zA-Z0-9]+\\)\\(\\[ *]\\)?[),]')
+	def __init__(self, good_interface_names):
+		self.good_interface_names = good_interface_names
+		self.inout = self.name = self.type = None
+		self.const = 0
+		self.arrayDecl = 0
+	def BuildFromFile(self, file):
+		"""Parse and build my data from a file
+		
+		Reads the next line in the file, and matches it as an argument
+		description.  If not a valid argument line, an error_not_found exception
+		is raised.
+		"""
+		line = file.readline()
+		if self.regex.search(line)<0:
+			raise error_not_found
+		self.name = self.regex.group(3)
+		self.inout = string.split(self.regex.group(1),'][')
+		typ = string.strip(self.regex.group(2))
+		self.raw_type = typ
+		self.indirectionLevel = 0
+		if self.regex.group(4): # Has "[ ]" decl
+			self.arrayDecl = 1
+			try:
+				pos = string.rindex(typ, "__RPC_FAR")
+				self.indirectionLevel = self.indirectionLevel + 1
+				typ = string.strip(typ[:pos])
+			except ValueError:
+				pass
+		
+		while 1:
+			try:
+				pos = string.rindex(typ, "__RPC_FAR *")
+				self.indirectionLevel = self.indirectionLevel + 1
+				typ = string.strip(typ[:pos])
+			except ValueError:
+				break
+		self.type = typ
+		if self.type[:6]=="const ":
+			self.unc_type = self.type[6:]
+		else:
+			self.unc_type = self.type
+		
+		if VERBOSE:
+			print "	   Arg %s of type %s%s (%s)" % (self.name, self.type, "*" * self.indirectionLevel, self.inout)
+
+	def HasAttribute(self, typ):
+		"""Determines if the argument has the specific attribute.
+		
+		Argument attributes are specified in the header file, such as
+		"[in][out][retval]" etc.  You can pass a specific string (eg "out")
+		to find if this attribute was specified for the argument
+		"""
+		return typ in self.inout
+
+	def GetRawDeclaration(self):
+		ret = "%s %s" % (self.raw_type, self.name)
+		if self.arrayDecl:
+			ret = ret + "[]"
+		return ret
+
+class Method:
+	"""A representation of a C++ method on a COM interface
+	
+	This class contains information about a specific method, as well as 
+	a list of all @Argument@s
+	"""
+#										 options	 ret type callconv	 name
+#								   ----------------- -------- -------- --------
+	regex = regex.compile('virtual \\(/\\*.*\\*/ \\)?\\(.*\\) \\(.*\\) \\(.*\\)(\w?')
+	def __init__(self, good_interface_names ):
+		self.good_interface_names = good_interface_names
+		self.name = self.result = self.callconv = None
+		self.args = []
+	def BuildFromFile(self, file):
+		"""Parse and build my data from a file
+		
+		Reads the next line in the file, and matches it as a method
+		description.  If not a valid method line, an error_not_found exception
+		is raised.
+		"""
+		str = file.readline()
+		if self.regex.search(str, 0) == -1:
+			raise error_not_found
+		self.name = self.regex.group(4)
+		self.result = self.regex.group(2)
+		if self.result != "HRESULT":
+			if self.result=="DWORD": # DWORD is for old old stuff?
+				print "Warning: Old style interface detected - compilation errors likely!"
+			else:
+				print "Method %s - Only HRESULT return types are supported." % self.name
+#				raise error_not_supported,		if VERBOSE:
+			print "	 Method %s %s(" % (self.result, self.name)
+		while 1:
+			arg = Argument(self.good_interface_names)
+			try:
+				arg.BuildFromFile(file)
+				self.args.append(arg)
+			except error_not_found:
+				break
+
+class Interface:
+	"""A representation of a C++ COM Interface
+	
+	This class contains information about a specific interface, as well as 
+	a list of all @Method@s
+	"""
+#									  name				 base
+#									 --------		   --------
+	regex = regex.compile("\\(interface\\|\\) \\([^ ]*\\) : public \\(.*\\)$")
+	def __init__(self):
+		self.methods = []
+		self.name = self.regex.group(2)
+		self.base = self.regex.group(3)
+		if VERBOSE:
+			print "Interface %s : public %s" % (self.name, self.base)
+
+	def BuildMethods(self, file):
+		"""Build all sub-methods for this interface"""
+		# skip the next 2 lines.
+		file.readline();file.readline();
+		while 1:
+			try:
+				method = Method([self.name])
+				method.BuildFromFile(file)
+				self.methods.append(method)
+			except error_not_found:
+				break
+	
+def find_interface(interfaceName, file):
+	"""Find and return an interface in a file
+	
+	Given an interface name and file, search for the specified interface.
+	
+	Upon return, the interface itself has been built, 
+	but not the methods.
+	"""
+
+	line = file.readline()
+	while line:
+		if Interface.regex.search(line, 0) >=0:
+			name = Interface.regex.group(2)
+			print name
+			if name==interfaceName:
+				return Interface()
+		line = file.readline()
+	raise error_not_found
+
+	
+def parse_interface_info(interfaceName, file):
+	"""Find, parse and return an interface in a file
+	
+	Given an interface name and file, search for the specified interface.
+	
+	Upon return, the interface itself is fully built,
+	"""
+	try:
+		interface = find_interface(interfaceName, file)
+		interface.BuildMethods(file)
+		return interface
+	except regex.error:
+		traceback.print_exc()
+		print "The interface could not be built, as the regular expression failed!"
+def test():
+	f=open("d:\\msdev\\include\\objidl.h")
+	try:
+		parse_interface_info("IPersistStream", f)
+	finally:
+		f.close()
+
+def test_regex(r,text):
+	res=r.search(text,0)
+	if res==-1:
+		print "** Not found"
+	else:
+		print "%d\n%s\n%s\n%s\n%s" % (res, r.group(1), r.group(2), r.group(3), r.group(4))
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/olectl.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/olectl.py
new file mode 100644
index 0000000..28252a6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/olectl.py
@@ -0,0 +1,68 @@
+"""Constants used by COM Controls
+
+  Hand created version of OLECTL.H constants.
+"""
+
+import winerror
+
+FACILITY_CONTROL = 0xa
+
+def MAKE_SCODE(sev, fac, code):
+	return int((long(-sev)<<31) | ((fac)<<16) | ((code)))
+
+def STD_CTL_SCODE(n):
+	return MAKE_SCODE(winerror.SEVERITY_ERROR, FACILITY_CONTROL, n)
+
+CTL_E_ILLEGALFUNCTIONCALL       = STD_CTL_SCODE(5)
+CTL_E_OVERFLOW                  = STD_CTL_SCODE(6)
+CTL_E_OUTOFMEMORY               = STD_CTL_SCODE(7)
+CTL_E_DIVISIONBYZERO            = STD_CTL_SCODE(11)
+CTL_E_OUTOFSTRINGSPACE          = STD_CTL_SCODE(14)
+CTL_E_OUTOFSTACKSPACE           = STD_CTL_SCODE(28)
+CTL_E_BADFILENAMEORNUMBER       = STD_CTL_SCODE(52)
+CTL_E_FILENOTFOUND              = STD_CTL_SCODE(53)
+CTL_E_BADFILEMODE               = STD_CTL_SCODE(54)
+CTL_E_FILEALREADYOPEN           = STD_CTL_SCODE(55)
+CTL_E_DEVICEIOERROR             = STD_CTL_SCODE(57)
+CTL_E_FILEALREADYEXISTS         = STD_CTL_SCODE(58)
+CTL_E_BADRECORDLENGTH           = STD_CTL_SCODE(59)
+CTL_E_DISKFULL                  = STD_CTL_SCODE(61)
+CTL_E_BADRECORDNUMBER           = STD_CTL_SCODE(63)
+CTL_E_BADFILENAME               = STD_CTL_SCODE(64)
+CTL_E_TOOMANYFILES              = STD_CTL_SCODE(67)
+CTL_E_DEVICEUNAVAILABLE         = STD_CTL_SCODE(68)
+CTL_E_PERMISSIONDENIED          = STD_CTL_SCODE(70)
+CTL_E_DISKNOTREADY              = STD_CTL_SCODE(71)
+CTL_E_PATHFILEACCESSERROR       = STD_CTL_SCODE(75)
+CTL_E_PATHNOTFOUND              = STD_CTL_SCODE(76)
+CTL_E_INVALIDPATTERNSTRING      = STD_CTL_SCODE(93)
+CTL_E_INVALIDUSEOFNULL          = STD_CTL_SCODE(94)
+CTL_E_INVALIDFILEFORMAT         = STD_CTL_SCODE(321)
+CTL_E_INVALIDPROPERTYVALUE      = STD_CTL_SCODE(380)
+CTL_E_INVALIDPROPERTYARRAYINDEX = STD_CTL_SCODE(381)
+CTL_E_SETNOTSUPPORTEDATRUNTIME  = STD_CTL_SCODE(382)
+CTL_E_SETNOTSUPPORTED           = STD_CTL_SCODE(383)
+CTL_E_NEEDPROPERTYARRAYINDEX    = STD_CTL_SCODE(385)
+CTL_E_SETNOTPERMITTED           = STD_CTL_SCODE(387)
+CTL_E_GETNOTSUPPORTEDATRUNTIME  = STD_CTL_SCODE(393)
+CTL_E_GETNOTSUPPORTED           = STD_CTL_SCODE(394)
+CTL_E_PROPERTYNOTFOUND          = STD_CTL_SCODE(422)
+CTL_E_INVALIDCLIPBOARDFORMAT    = STD_CTL_SCODE(460)
+CTL_E_INVALIDPICTURE            = STD_CTL_SCODE(481)
+CTL_E_PRINTERERROR              = STD_CTL_SCODE(482)
+CTL_E_CANTSAVEFILETOTEMP        = STD_CTL_SCODE(735)
+CTL_E_SEARCHTEXTNOTFOUND        = STD_CTL_SCODE(744)
+CTL_E_REPLACEMENTSTOOLONG       = STD_CTL_SCODE(746)
+
+CONNECT_E_FIRST    = MAKE_SCODE(winerror.SEVERITY_ERROR,   winerror.FACILITY_ITF, 0x0200)
+CONNECT_E_LAST     = MAKE_SCODE(winerror.SEVERITY_ERROR,   winerror.FACILITY_ITF, 0x020F)
+CONNECT_S_FIRST    = MAKE_SCODE(winerror.SEVERITY_SUCCESS, winerror.FACILITY_ITF, 0x0200)
+CONNECT_S_LAST     = MAKE_SCODE(winerror.SEVERITY_SUCCESS, winerror.FACILITY_ITF, 0x020F)
+
+CONNECT_E_NOCONNECTION = CONNECT_E_FIRST+0
+CONNECT_E_ADVISELIMIT  = CONNECT_E_FIRST+1
+CONNECT_E_CANNOTCONNECT = CONNECT_E_FIRST+2
+CONNECT_E_OVERRIDDEN = CONNECT_E_FIRST+3
+
+CLASS_E_NOTLICENSED = winerror.CLASSFACTORY_E_FIRST+2
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/__init__.py
new file mode 100644
index 0000000..143e1ba9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/__init__.py
@@ -0,0 +1 @@
+# Empty __init__ file to designate a sub-package.
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/connect.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/connect.py
new file mode 100644
index 0000000..7190885
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/connect.py
@@ -0,0 +1,65 @@
+"""Utilities for Server Side connections.
+
+  A collection of helpers for server side connection points.
+"""
+import pythoncom
+from exception import Exception
+import winerror
+from win32com import olectl
+import win32com.server.util
+
+# Methods implemented by the interfaces.
+IConnectionPointContainer_methods = ["EnumConnectionPoints","FindConnectionPoint"]
+IConnectionPoint_methods = ["EnumConnections","Unadvise","Advise","GetConnectionPointContainer","GetConnectionInterface"]
+
+class ConnectableServer:
+	_public_methods_ = IConnectionPointContainer_methods + IConnectionPoint_methods
+	_com_interfaces_ = [pythoncom.IID_IConnectionPoint, pythoncom.IID_IConnectionPointContainer]
+	# Clients must set _connect_interfaces_ = [...]
+	def __init__(self):
+		self.cookieNo = 0
+		self.connections = {}
+	# IConnectionPoint interfaces
+	def EnumConnections(self):
+		raise Exception(winerror.E_NOTIMPL)
+	def GetConnectionInterface(self):
+		raise Exception(winerror.E_NOTIMPL)
+	def GetConnectionPointContainer(self):
+		return win32com.server.util.wrap(self)
+	def Advise(self, pUnk):
+		# Creates a connection to the client.  Simply allocate a new cookie,
+		# find the clients interface, and store it in a dictionary.
+		try:
+			interface = pUnk.QueryInterface(self._connect_interfaces_[0],pythoncom.IID_IDispatch)
+		except pythoncom.com_error:
+			raise Exception(scode=olectl.CONNECT_E_NOCONNECTION)
+		self.cookieNo = self.cookieNo + 1
+		self.connections[self.cookieNo] = interface
+		return self.cookieNo
+	def Unadvise(self, cookie):
+		# Destroy a connection - simply delete interface from the map.
+		try:
+			del self.connections[cookie]
+		except KeyError:
+			raise Exception(scode=winerror.E_UNEXPECTED)
+	# IConnectionPointContainer interfaces
+	def EnumConnectionPoints(self):
+		raise Exception(winerror.E_NOTIMPL)
+	def FindConnectionPoint(self, iid):
+		# Find a connection we support.  Only support the single event interface.
+		if iid in self._connect_interfaces_:
+			return win32com.server.util.wrap(self)
+
+	def _BroadcastNotify(self, broadcaster, extraArgs):
+		# Broadcasts a notification to all connections.
+		# Ignores clients that fail.
+		for interface in self.connections.values():
+			try:
+				apply(broadcaster, (interface,)+extraArgs)
+			except pythoncom.com_error, details:
+				self._OnNotifyFail(interface, details)
+
+	def _OnNotifyFail(self, interface, details):
+		print "Ignoring COM error to connection - %s" % (`details`)
+		
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/dispatcher.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/dispatcher.py
new file mode 100644
index 0000000..da67b0ed8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/dispatcher.py
@@ -0,0 +1,283 @@
+"""Dispatcher
+
+Please see policy.py for a discussion on dispatchers and policies
+"""
+import pythoncom, traceback, win32api
+from sys import exc_info
+
+#
+from win32com.server.exception import IsCOMServerException
+from win32com.util import IIDToInterfaceName
+import win32com
+
+class DispatcherBase:
+  """ The base class for all Dispatchers.  
+
+      This dispatcher supports wrapping all operations in exception handlers, 
+      and all the necessary delegation to the policy.
+
+      This base class supports the printing of "unexpected" exceptions.  Note, however,
+      that exactly where the output of print goes may not be useful!  A derived class may
+      provide additional semantics for this.
+  """
+  def __init__(self, policyClass, object):
+    self.policy = policyClass(object)
+    # The logger we should dump to.  If None, we should send to the
+    # default location (typically 'print')
+    self.logger = getattr(win32com, "logger", None)
+
+  # Note the "return self._HandleException_()" is purely to stop pychecker
+  # complaining - _HandleException_ will itself raise an exception for the
+  # pythoncom framework, so the result will never be seen.
+  def _CreateInstance_(self, clsid, reqIID):
+    try:
+      self.policy._CreateInstance_(clsid, reqIID)
+      return pythoncom.WrapObject(self, reqIID)
+    except:
+      return self._HandleException_()
+
+  def _QueryInterface_(self, iid):
+    try:
+      return self.policy._QueryInterface_(iid)
+    except:
+      return self._HandleException_()
+
+  def _Invoke_(self, dispid, lcid, wFlags, args):
+    try:
+      return self.policy._Invoke_(dispid, lcid, wFlags, args)
+    except:
+      return self._HandleException_()
+
+  def _GetIDsOfNames_(self, names, lcid):
+    try:
+      return self.policy._GetIDsOfNames_(names, lcid)
+    except:
+      return self._HandleException_()
+
+  def _GetTypeInfo_(self, index, lcid):
+    try:
+      return self.policy._GetTypeInfo_(index, lcid)
+    except:
+      return self._HandleException_()
+
+  def _GetTypeInfoCount_(self):
+    try:
+      return self.policy._GetTypeInfoCount_()
+    except:
+      return self._HandleException_()
+
+  def _GetDispID_(self, name, fdex):
+    try:
+      return self.policy._GetDispID_(name, fdex)
+    except:
+      return self._HandleException_()
+
+  def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider):
+    try:
+      return self.policy._InvokeEx_(dispid, lcid, wFlags, args, kwargs, serviceProvider)
+    except:
+      return self._HandleException_()
+ 
+  def _DeleteMemberByName_(self, name, fdex):
+    try:
+      return self.policy._DeleteMemberByName_(name, fdex)
+    except:
+      return self._HandleException_()
+
+  def _DeleteMemberByDispID_(self, id):
+    try:
+      return self.policy._DeleteMemberByDispID_(id)
+    except:
+      return self._HandleException_()
+
+  def _GetMemberProperties_(self, id, fdex):
+    try:
+      return self.policy._GetMemberProperties_(id, fdex)
+    except:
+      return self._HandleException_()
+
+  def _GetMemberName_(self, dispid):
+    try:
+      return self.policy._GetMemberName_(dispid)
+    except:
+      return self._HandleException_()
+
+  def _GetNextDispID_(self, fdex, flags):
+    try:
+      return self.policy._GetNextDispID_(fdex, flags)
+    except:
+      return self._HandleException_()
+
+  def _GetNameSpaceParent_(self):
+    try:
+      return self.policy._GetNameSpaceParent_()
+    except:
+      return self._HandleException_()
+
+  def _HandleException_(self):
+    """Called whenever an exception is raised.
+ 
+       Default behaviour is to print the exception.
+    """
+    # If not a COM exception, print it for the developer.
+    if not IsCOMServerException():
+      if self.logger is not None:
+        self.logger.exception("pythoncom server error")
+      else:
+        traceback.print_exc()
+    # But still raise it for the framework.
+    reraise()
+
+  def _trace_(self, *args):
+    if self.logger is not None:
+      record = " ".join(map(str, args))
+      self.logger.debug(record)
+    else:
+      for arg in args[:-1]:
+        print arg,
+      print args[-1]
+
+class DispatcherTrace(DispatcherBase):
+  """A dispatcher, which causes a 'print' line for each COM function called.
+  """
+  def _QueryInterface_(self, iid):
+    rc = DispatcherBase._QueryInterface_(self, iid)
+    if not rc:
+      self._trace_("in %s._QueryInterface_ with unsupported IID %s (%s)" % (`self.policy._obj_`, IIDToInterfaceName(iid),iid))
+    return rc
+
+  def _GetIDsOfNames_(self, names, lcid):
+    self._trace_("in _GetIDsOfNames_ with '%s' and '%d'\n" % (names, lcid))
+    return DispatcherBase._GetIDsOfNames_(self, names, lcid)
+
+  def _GetTypeInfo_(self, index, lcid):
+    self._trace_("in _GetTypeInfo_ with index=%d, lcid=%d\n" % (index, lcid))
+    return DispatcherBase._GetTypeInfo_(self, index, lcid)
+
+  def _GetTypeInfoCount_(self):
+    self._trace_("in _GetTypeInfoCount_\n")
+    return DispatcherBase._GetTypeInfoCount_(self)
+
+  def _Invoke_(self, dispid, lcid, wFlags, args):
+    self._trace_("in _Invoke_ with", dispid, lcid, wFlags, args)
+    return DispatcherBase._Invoke_(self, dispid, lcid, wFlags, args)
+
+  def _GetDispID_(self, name, fdex):
+    self._trace_("in _GetDispID_ with", name, fdex)
+    return DispatcherBase._GetDispID_(self, name, fdex)
+
+  def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider):
+    self._trace_("in %r._InvokeEx_-%s%r [%x,%s,%r]" % (self.policy._obj_, dispid, args, wFlags, lcid, serviceProvider))
+    return DispatcherBase._InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider)
+ 
+  def _DeleteMemberByName_(self, name, fdex):
+    self._trace_("in _DeleteMemberByName_ with", name, fdex)
+    return DispatcherBase._DeleteMemberByName_(self, name, fdex)
+
+  def _DeleteMemberByDispID_(self, id):
+    self._trace_("in _DeleteMemberByDispID_ with", id)
+    return DispatcherBase._DeleteMemberByDispID_(self, id)
+
+  def _GetMemberProperties_(self, id, fdex):
+    self._trace_("in _GetMemberProperties_ with", id, fdex)
+    return DispatcherBase._GetMemberProperties_(self, id, fdex)
+
+  def _GetMemberName_(self, dispid):
+    self._trace_("in _GetMemberName_ with", dispid)
+    return DispatcherBase._GetMemberName_(self, dispid)
+
+  def _GetNextDispID_(self, fdex, flags):
+    self._trace_("in _GetNextDispID_ with", fdex, flags)
+    return DispatcherBase._GetNextDispID_(self, fdex, flags)
+
+  def _GetNameSpaceParent_(self):
+    self._trace_("in _GetNameSpaceParent_")
+    return DispatcherBase._GetNameSpaceParent_(self)
+
+
+class DispatcherWin32trace(DispatcherTrace):
+  """A tracing dispatcher that sends its output to the win32trace remote collector.
+  
+  """
+  def __init__(self, policyClass, object):
+    DispatcherTrace.__init__(self, policyClass, object)
+    if self.logger is None:
+      # If we have no logger, setup our output.
+      import win32traceutil # Sets up everything.
+    self._trace_("Object with win32trace dispatcher created (object=%s)" % `object`)
+
+
+class DispatcherOutputDebugString(DispatcherTrace):
+  """A tracing dispatcher that sends its output to win32api.OutputDebugString
+  
+  """
+  def _trace_(self, *args):
+    for arg in args[:-1]:
+      win32api.OutputDebugString(str(arg)+" ")
+    win32api.OutputDebugString(str(args[-1])+"\n")
+
+
+class DispatcherWin32dbg(DispatcherBase):
+  """A source-level debugger dispatcher
+
+  A dispatcher which invokes the debugger as an object is instantiated, or 
+  when an unexpected exception occurs.
+
+  Requires Pythonwin.
+  """
+  def __init__(self, policyClass, ob):
+    # No one uses this, and it just causes py2exe to drag all of
+    # pythonwin in.
+    #import pywin.debugger 
+    pywin.debugger.brk()
+    print "The DispatcherWin32dbg dispatcher is deprecated!"
+    print "Please let me know if this is a problem."
+    print "Uncomment the relevant lines in dispatcher.py to re-enable"
+    # DEBUGGER Note - You can either:
+    # * Hit Run and wait for a (non Exception class) exception to occur!
+    # * Set a breakpoint and hit run.
+    # * Step into the object creation (a few steps away!)
+    DispatcherBase.__init__(self, policyClass, ob)
+
+  def _HandleException_(self):
+    """ Invoke the debugger post mortem capability """
+    # Save details away.
+    typ, val, tb = exc_info()
+    #import pywin.debugger, pywin.debugger.dbgcon
+    debug = 0
+    try:
+      raise typ, val
+    except Exception: # AARG - What is this Exception???
+      # Use some inside knowledge to borrow a Debugger option which dictates if we
+      # stop at "expected" exceptions.
+      debug = pywin.debugger.GetDebugger().get_option(pywin.debugger.dbgcon.OPT_STOP_EXCEPTIONS)
+    except:
+      debug = 1
+    if debug:
+      try:
+        pywin.debugger.post_mortem(tb, typ, val) # The original exception
+      except:
+        traceback.print_exc()
+
+    # But still raise it.
+    del tb
+    reraise()
+
+def reraise():
+  """Handy function for re-raising errors.
+
+  Note: storing a traceback in a local variable can introduce reference
+  loops if you aren't careful.  Specifically, that local variable should
+  not be within an execution context contained with the traceback.
+
+  By using a utility function, we ensure that our local variable holding
+  the traceback is not referenced by the traceback itself.
+  """
+  t, v, tb = exc_info()
+  raise t, v, tb
+
+try:
+  import win32trace
+  DefaultDebugDispatcher = DispatcherWin32trace
+except ImportError: # no win32trace module - just use a print based one.
+  DefaultDebugDispatcher = DispatcherTrace
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/exception.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/exception.py
new file mode 100644
index 0000000..d5023b2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/exception.py
@@ -0,0 +1,91 @@
+"""Exception Handling
+
+ Exceptions
+
+	 To better support COM exceptions, the framework allows for an instance to be
+	 raised.  This instance may have a certain number of known attributes, which are
+	 translated into COM exception details.
+	
+	 This means, for example, that Python could raise a COM exception that includes details
+	 on a Help file and location, and a description for the user.
+	
+	 This module provides a class which provides the necessary attributes.
+
+"""
+import sys, pythoncom
+
+# Note that we derive from com_error, which derives from exceptions.Exception
+# Also note that we dont support "self.args", as we dont support tuple-unpacking
+class COMException(pythoncom.com_error):
+	"""An Exception object that is understood by the framework.
+	
+	If the framework is presented with an exception of type class,
+	it looks for certain known attributes on this class to provide rich
+	error information to the caller.
+
+	It should be noted that the framework supports providing this error
+	information via COM Exceptions, or via the ISupportErrorInfo interface.
+
+	By using this class, you automatically provide rich error information to the
+	server.
+	"""
+	def __init__(self, description = None, scode = None,
+				 source = None, helpfile = None, helpContext = None,
+				 desc = None, hresult = None):
+		"""Initialize an exception
+		**Params**
+
+		description -- A string description for the exception.
+		scode -- An integer scode to be returned to the server, if necessary.
+		The pythoncom framework defaults this to be DISP_E_EXCEPTION if not specified otherwise.
+		source -- A string which identifies the source of the error.
+		helpfile -- A string which points to a help file which contains details on the error.
+		helpContext -- An integer context in the help file.
+		desc -- A short-cut for description.
+		hresult -- A short-cut for scode.
+		"""
+
+		# convert a WIN32 error into an HRESULT
+		scode = scode or hresult
+		if scode and scode != 1: # We dont want S_FALSE mapped!
+			if scode >= -32768 and scode < 32768:
+				# this is HRESULT_FROM_WIN32()
+				scode = -2147024896 | (scode & 0x0000FFFF)
+		self.scode = scode
+
+		self.description = description or desc
+		if scode==1 and not self.description:
+			self.description = "S_FALSE"
+		elif scode and not self.description:
+			self.description = pythoncom.GetScodeString(scode)
+
+		self.source = source
+		self.helpfile = helpfile
+		self.helpcontext = helpContext
+
+		# todo - fill in the exception value
+		pythoncom.com_error.__init__(self, scode, self.description, None, -1)
+
+	def __repr__(self):
+		return "<COM Exception - scode=%s, desc=%s>" % (self.scode, self.description)
+
+# Old name for the COMException class.
+# Do NOT use the name Exception, as it is now a built-in
+# COMException is the new, official name.
+Exception = COMException
+
+def IsCOMException(t = None):
+	if t is None:
+		t = sys.exc_info()[0]
+	try:
+		return issubclass(t, pythoncom.com_error)
+	except TypeError: # 1.5 in -X mode?
+		return t is pythoncon.com_error
+
+def IsCOMServerException(t = None):
+	if t is None:
+		t = sys.exc_info()[0]
+	try:
+		return issubclass(t, COMException)
+	except TypeError: # String exception
+		return 0
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/factory.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/factory.py
new file mode 100644
index 0000000..f9e7aa9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/factory.py
@@ -0,0 +1,22 @@
+# Class factory utilities.
+import pythoncom
+
+def RegisterClassFactories(clsids, flags = None, clsctx = None):
+	"""Given a list of CLSID, create and register class factories.
+	
+	   Returns a list, which should be passed to RevokeClassFactories
+	"""
+	if flags is None:	flags = pythoncom.REGCLS_MULTIPLEUSE|pythoncom.REGCLS_SUSPENDED
+	if clsctx is None: clsctx = pythoncom.CLSCTX_LOCAL_SERVER
+	ret = []
+	for clsid in clsids:
+		# Some server append '-Embedding' etc
+		if clsid[0] not in ['-', '/']:
+			factory = pythoncom.MakePyFactory(clsid)
+			regId = pythoncom.CoRegisterClassObject(clsid, factory, clsctx, flags)
+			ret.append((factory, regId))
+	return ret
+
+def RevokeClassFactories(infos):
+	for factory, revokeId in infos:
+		pythoncom.CoRevokeClassObject(revokeId)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/localserver.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/localserver.py
new file mode 100644
index 0000000..bb2b26f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/localserver.py
@@ -0,0 +1,49 @@
+# LocalServer .EXE support for Python.
+#
+# This is designed to be used as a _script_ file by pythonw.exe
+#
+# In some cases, you could also use Python.exe, which will create
+# a console window useful for debugging.
+#
+# NOTE: When NOT running in any sort of debugging mode,
+# 'print' statements may fail, as sys.stdout is not valid!!!
+
+#
+# Usage:
+#  wpython.exe LocalServer.py clsid [, clsid]
+import sys
+sys.coinit_flags = 2
+import pythoncom
+import win32api
+from win32com.server import factory
+
+usage = """\
+Invalid command line arguments
+
+This program provides LocalServer COM support
+for Python COM objects.
+
+It is typically run automatically by COM, passing as arguments
+The ProgID or CLSID of the Python Server(s) to be hosted
+"""
+
+def serve(clsids):
+	infos = factory.RegisterClassFactories(clsids)
+
+	pythoncom.EnableQuitMessage(win32api.GetCurrentThreadId())	
+	pythoncom.CoResumeClassObjects()
+
+	pythoncom.PumpMessages()
+	
+	factory.RevokeClassFactories( infos )
+	
+	pythoncom.CoUninitialize()
+	
+def main():
+	if len(sys.argv)==1:
+		win32api.MessageBox(0, usage, "Python COM Server")
+		sys.exit(1)
+	serve(sys.argv[1:])
+
+if __name__=='__main__':
+	main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/policy.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/policy.py
new file mode 100644
index 0000000..a3bae2dc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/policy.py
@@ -0,0 +1,756 @@
+"""Policies 
+
+Note that Dispatchers are now implemented in "dispatcher.py", but
+are still documented here.
+
+Policies
+
+ A policy is an object which manages the interaction between a public 
+ Python object, and COM .  In simple terms, the policy object is the 
+ object which is actually called by COM, and it invokes the requested 
+ method, fetches/sets the requested property, etc.  See the 
+ @win32com.server.policy.CreateInstance@ method for a description of
+ how a policy is specified or created.
+
+ Exactly how a policy determines which underlying object method/property 
+ is obtained is up to the policy.  A few policies are provided, but you 
+ can build your own.  See each policy class for a description of how it 
+ implements its policy.
+
+ There is a policy that allows the object to specify exactly which 
+ methods and properties will be exposed.  There is also a policy that 
+ will dynamically expose all Python methods and properties - even those 
+ added after the object has been instantiated.
+
+Dispatchers
+
+ A Dispatcher is a level in front of a Policy.  A dispatcher is the 
+ thing which actually receives the COM calls, and passes them to the 
+ policy object (which in turn somehow does something with the wrapped 
+ object).
+
+ It is important to note that a policy does not need to have a dispatcher.
+ A dispatcher has the same interface as a policy, and simply steps in its 
+ place, delegating to the real policy.  The primary use for a Dispatcher 
+ is to support debugging when necessary, but without imposing overheads 
+ when not (ie, by not using a dispatcher at all).
+
+ There are a few dispatchers provided - "tracing" dispatchers which simply 
+ prints calls and args (including a variation which uses 
+ win32api.OutputDebugString), and a "debugger" dispatcher, which can 
+ invoke the debugger when necessary.
+
+Error Handling
+
+ It is important to realise that the caller of these interfaces may
+ not be Python.  Therefore, general Python exceptions and tracebacks aren't 
+ much use.
+
+ In general, there is an Exception class that should be raised, to allow 
+ the framework to extract rich COM type error information.
+
+ The general rule is that the **only** exception returned from Python COM 
+ Server code should be an Exception instance.  Any other Python exception 
+ should be considered an implementation bug in the server (if not, it 
+ should be handled, and an appropriate Exception instance raised).  Any 
+ other exception is considered "unexpected", and a dispatcher may take 
+ special action (see Dispatchers above)
+
+ Occasionally, the implementation will raise the policy.error error.  
+ This usually means there is a problem in the implementation that the 
+ Python programmer should fix.
+
+ For example, if policy is asked to wrap an object which it can not 
+ support (because, eg, it does not provide _public_methods_ or _dynamic_) 
+ then policy.error will be raised, indicating it is a Python programmers 
+ problem, rather than a COM error.
+ 
+"""
+__author__ = "Greg Stein and Mark Hammond"
+
+import win32api
+import winerror
+import string
+import sys
+import types
+import win32con, pythoncom
+
+#Import a few important constants to speed lookups.
+from pythoncom import \
+	DISPATCH_METHOD, DISPATCH_PROPERTYGET, DISPATCH_PROPERTYPUT, DISPATCH_PROPERTYPUTREF, \
+	DISPID_UNKNOWN, DISPID_VALUE, DISPID_PROPERTYPUT, DISPID_NEWENUM, \
+	DISPID_EVALUATE, DISPID_CONSTRUCTOR, DISPID_DESTRUCTOR, DISPID_COLLECT,DISPID_STARTENUM
+
+S_OK = 0
+
+# Few more globals to speed things.
+from pywintypes import UnicodeType
+IDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch]
+IUnknownType = pythoncom.TypeIIDs[pythoncom.IID_IUnknown]
+core_has_unicode = hasattr(__builtins__, "unicode")
+
+from exception import COMException
+error = __name__ + " error"
+
+regSpec = 'CLSID\\%s\\PythonCOM'
+regPolicy = 'CLSID\\%s\\PythonCOMPolicy'
+regDispatcher = 'CLSID\\%s\\PythonCOMDispatcher'
+regAddnPath = 'CLSID\\%s\\PythonCOMPath'
+
+# exc_info doesnt appear 'till Python 1.5, but we now have other 1.5 deps!
+from sys import exc_info
+
+def CreateInstance(clsid, reqIID):
+  """Create a new instance of the specified IID
+
+  The COM framework **always** calls this function to create a new 
+  instance for the specified CLSID.  This function looks up the
+  registry for the name of a policy, creates the policy, and asks the
+  policy to create the specified object by calling the _CreateInstance_ method.
+  
+  Exactly how the policy creates the instance is up to the policy.  See the
+  specific policy documentation for more details.
+  """
+  # First see is sys.path should have something on it.
+  try:
+    addnPaths = string.split(win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT,
+                                      regAddnPath % clsid),';')
+    for newPath in addnPaths:
+      if newPath not in sys.path:
+        sys.path.insert(0, newPath)
+  except win32api.error:
+    pass
+  try:
+    policy = win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT,
+                                      regPolicy % clsid)
+    policy = resolve_func(policy)
+  except win32api.error:
+    policy = DefaultPolicy
+
+  try:
+    dispatcher = win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT,
+                                      regDispatcher % clsid)
+    if dispatcher: dispatcher = resolve_func(dispatcher)
+  except win32api.error:
+    dispatcher = None
+    
+  # clear exception information
+  sys.exc_type = sys.exc_value = sys.exc_traceback = None # sys.clearexc() appears in 1.5?
+
+  if dispatcher:
+    retObj = dispatcher(policy, None)
+  else:
+    retObj = policy(None)
+  return retObj._CreateInstance_(clsid, reqIID)
+
+class BasicWrapPolicy:
+  """The base class of policies.
+
+     Normally not used directly (use a child class, instead)
+
+     This policy assumes we are wrapping another object
+     as the COM server.  This supports the delegation of the core COM entry points
+     to either the wrapped object, or to a child class.
+
+     This policy supports the following special attributes on the wrapped object
+
+     _query_interface_ -- A handler which can respond to the COM 'QueryInterface' call.
+     _com_interfaces_ -- An optional list of IIDs which the interface will assume are
+         valid for the object.
+     _invoke_ -- A handler which can respond to the COM 'Invoke' call.  If this attribute
+         is not provided, then the default policy implementation is used.  If this attribute
+         does exist, it is responsible for providing all required functionality - ie, the
+         policy _invoke_ method is not invoked at all (and nor are you able to call it!)
+     _getidsofnames_ -- A handler which can respond to the COM 'GetIDsOfNames' call.  If this attribute
+         is not provided, then the default policy implementation is used.  If this attribute
+         does exist, it is responsible for providing all required functionality - ie, the
+         policy _getidsofnames_ method is not invoked at all (and nor are you able to call it!)
+
+     IDispatchEx functionality:
+
+     _invokeex_ -- Very similar to _invoke_, except slightly different arguments are used.
+         And the result is just the _real_ result (rather than the (hresult, argErr, realResult)
+         tuple that _invoke_ uses.	
+         This is the new, prefered handler (the default _invoke_ handler simply called _invokeex_)
+     _getdispid_ -- Very similar to _getidsofnames_, except slightly different arguments are used,
+         and only 1 property at a time can be fetched (which is all we support in getidsofnames anyway!)
+         This is the new, prefered handler (the default _invoke_ handler simply called _invokeex_)
+     _getnextdispid_- uses self._name_to_dispid_ to enumerate the DISPIDs
+  """
+  def __init__(self, object):
+    """Initialise the policy object
+
+       Params:
+
+       object -- The object to wrap.  May be None *iff* @BasicWrapPolicy._CreateInstance_@ will be
+       called immediately after this to setup a brand new object
+    """
+    if object is not None:
+      self._wrap_(object)
+
+  def _CreateInstance_(self, clsid, reqIID):
+    """Creates a new instance of a **wrapped** object
+
+       This method looks up a "@win32com.server.policy.regSpec@" % clsid entry
+       in the registry (using @DefaultPolicy@)
+    """
+    try:
+      classSpec = win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT,
+                                       regSpec % clsid)
+    except win32api.error:
+      raise error, "The object is not correctly registered - %s key can not be read" % (regSpec % clsid)
+    myob = call_func(classSpec)
+    self._wrap_(myob)
+    try:
+      return pythoncom.WrapObject(self, reqIID)
+    except pythoncom.com_error, (hr, desc, exc, arg):
+      from win32com.util import IIDToInterfaceName
+      desc = "The object '%r' was created, but does not support the " \
+             "interface '%s'(%s): %s" \
+             % (myob, IIDToInterfaceName(reqIID), reqIID, desc)
+      raise pythoncom.com_error, (hr, desc, exc, arg)
+
+
+  def _wrap_(self, object):
+    """Wraps up the specified object.
+
+       This function keeps a reference to the passed
+       object, and may interogate it to determine how to respond to COM requests, etc.
+    """
+    # We "clobber" certain of our own methods with ones
+    # provided by the wrapped object, iff they exist.
+    self._name_to_dispid_ = { }
+    ob = self._obj_ = object
+    if hasattr(ob, '_query_interface_'):
+      self._query_interface_ = ob._query_interface_
+
+    if hasattr(ob, '_invoke_'):
+      self._invoke_ = ob._invoke_
+
+    if hasattr(ob, '_invokeex_'):
+      self._invokeex_ = ob._invokeex_
+
+    if hasattr(ob, '_getidsofnames_'):
+      self._getidsofnames_ = ob._getidsofnames_
+
+    if hasattr(ob, '_getdispid_'):
+      self._getdispid_ = ob._getdispid_
+
+    # Allow for override of certain special attributes.
+    if hasattr(ob, '_com_interfaces_'):
+      self._com_interfaces_ = []
+      # Allow interfaces to be specified by name.
+      for i in ob._com_interfaces_:
+        if type(i) != pythoncom.PyIIDType:
+          # Prolly a string!
+          if i[0] != "{":
+            i = pythoncom.InterfaceNames[i]
+          else:
+            i = pythoncom.MakeIID(i)
+        self._com_interfaces_.append(i)
+    else:
+      self._com_interfaces_ = [ ]
+
+  # "QueryInterface" handling.
+  def _QueryInterface_(self, iid):
+    """The main COM entry-point for QueryInterface. 
+
+       This checks the _com_interfaces_ attribute and if the interface is not specified 
+       there, it calls the derived helper _query_interface_
+    """
+    if iid in self._com_interfaces_:
+      return 1
+    return self._query_interface_(iid)
+
+  def _query_interface_(self, iid):
+    """Called if the object does not provide the requested interface in _com_interfaces,
+       and does not provide a _query_interface_ handler.
+
+       Returns a result to the COM framework indicating the interface is not supported.
+    """
+    return 0
+
+  # "Invoke" handling.
+  def _Invoke_(self, dispid, lcid, wFlags, args):
+    """The main COM entry-point for Invoke.  
+
+       This calls the _invoke_ helper.
+    """
+    #Translate a possible string dispid to real dispid.
+    if type(dispid) == type(""):
+      try:
+        dispid = self._name_to_dispid_[string.lower(dispid)]
+      except KeyError:
+        raise COMException(scode = winerror.DISP_E_MEMBERNOTFOUND, desc="Member not found")
+    return self._invoke_(dispid, lcid, wFlags, args)
+ 
+  def _invoke_(self, dispid, lcid, wFlags, args):
+    # Delegates to the _invokeex_ implementation.  This allows
+    # a custom policy to define _invokeex_, and automatically get _invoke_ too.
+    return S_OK, -1, self._invokeex_(dispid, lcid, wFlags, args, None, None)
+
+  # "GetIDsOfNames" handling.
+  def _GetIDsOfNames_(self, names, lcid):
+    """The main COM entry-point for GetIDsOfNames.
+
+       This checks the validity of the arguments, and calls the _getidsofnames_ helper.
+    """
+    if len(names) > 1:
+      raise COMException(scode = winerror.DISP_E_INVALID, desc="Cannot support member argument names")
+    return self._getidsofnames_(names, lcid)
+
+  def _getidsofnames_(self, names, lcid):
+    ### note: lcid is being ignored...
+    return (self._getdispid_(names[0], 0), )
+
+  # IDispatchEx support for policies.  Most of the IDispathEx functionality
+  # by default will raise E_NOTIMPL.  Thus it is not necessary for derived
+  # policies to explicitely implement all this functionality just to not implement it!
+
+  def _GetDispID_(self, name, fdex):
+    return self._getdispid_(name, fdex)
+
+  def _getdispid_(self, name, fdex):
+    try:
+      ### TODO - look at the fdex flags!!!
+      return self._name_to_dispid_[string.lower(str(name))]
+    except KeyError:
+      raise COMException(scode = winerror.DISP_E_UNKNOWNNAME)
+
+  # "InvokeEx" handling.
+  def _InvokeEx_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider):
+    """The main COM entry-point for InvokeEx.  
+
+       This calls the _invokeex_ helper.
+    """
+    #Translate a possible string dispid to real dispid.
+    if type(dispid) == type(""):
+      try:
+        dispid = self._name_to_dispid_[string.lower(dispid)]
+      except KeyError:
+        raise COMException(scode = winerror.DISP_E_MEMBERNOTFOUND, desc="Member not found")
+    return self._invokeex_(dispid, lcid, wFlags, args, kwargs, serviceProvider)
+ 
+  def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider):
+    """A stub for _invokeex_ - should never be called.  
+ 
+       Simply raises an exception.
+    """
+    # Base classes should override this method (and not call the base)
+    raise error, "This class does not provide _invokeex_ semantics"
+
+  def _DeleteMemberByName_(self, name, fdex):
+    return self._deletememberbyname_(name, fdex)
+  def _deletememberbyname_(self, name, fdex):
+    raise COMException(scode = winerror.E_NOTIMPL)
+
+  def _DeleteMemberByDispID_(self, id):
+    return self._deletememberbydispid(id)
+  def _deletememberbydispid_(self, id):
+    raise COMException(scode = winerror.E_NOTIMPL)
+
+  def _GetMemberProperties_(self, id, fdex):
+    return self._getmemberproperties_(id, fdex)
+  def _getmemberproperties_(self, id, fdex):
+    raise COMException(scode = winerror.E_NOTIMPL)
+
+  def _GetMemberName_(self, dispid):
+    return self._getmembername_(dispid)
+  def _getmembername_(self, dispid):
+    raise COMException(scode = winerror.E_NOTIMPL)
+
+  def _GetNextDispID_(self, fdex, dispid):
+    return self._getnextdispid_(fdex, dispid)
+  def _getnextdispid_(self, fdex, dispid):
+    ids = self._name_to_dispid_.values()
+    ids.sort()
+    if DISPID_STARTENUM in ids: ids.remove(DISPID_STARTENUM)
+    if dispid==DISPID_STARTENUM:
+      return ids[0]
+    else:
+      try:
+        return ids[ids.index(dispid)+1]
+      except ValueError: # dispid not in list?
+        raise COMException(scode = winerror.E_UNEXPECTED)
+      except IndexError: # No more items
+        raise COMException(scode = winerror.S_FALSE)
+
+  def _GetNameSpaceParent_(self):
+    return self._getnamespaceparent()
+  def _getnamespaceparent_(self):
+    raise COMException(scode = winerror.E_NOTIMPL)
+
+
+class MappedWrapPolicy(BasicWrapPolicy):
+  """Wraps an object using maps to do its magic
+
+     This policy wraps up a Python object, using a number of maps
+     which translate from a Dispatch ID and flags, into an object to call/getattr, etc.
+
+     It is the responsibility of derived classes to determine exactly how the
+     maps are filled (ie, the derived classes determine the map filling policy.
+
+     This policy supports the following special attributes on the wrapped object
+
+     _dispid_to_func_/_dispid_to_get_/_dispid_to_put_ -- These are dictionaries
+       (keyed by integer dispid, values are string attribute names) which the COM
+       implementation uses when it is processing COM requests.  Note that the implementation
+       uses this dictionary for its own purposes - not a copy - which means the contents of 
+       these dictionaries will change as the object is used.
+
+  """
+  def _wrap_(self, object):
+    BasicWrapPolicy._wrap_(self, object)
+    ob = self._obj_
+    if hasattr(ob, '_dispid_to_func_'):
+      self._dispid_to_func_ = ob._dispid_to_func_
+    else:
+      self._dispid_to_func_ = { }
+    if hasattr(ob, '_dispid_to_get_'):
+      self._dispid_to_get_ = ob._dispid_to_get_
+    else:
+      self._dispid_to_get_ = { }
+    if hasattr(ob, '_dispid_to_put_'):
+      self._dispid_to_put_ = ob._dispid_to_put_
+    else:
+      self._dispid_to_put_ = { }
+
+  def _getmembername_(self, dispid):
+    if self._dispid_to_func_.has_key(dispid):
+      return self._dispid_to_func_[dispid]
+    elif self._dispid_to_get_.has_key(dispid):
+      return self._dispid_to_get_[dispid]
+    elif self._dispid_to_put_.has_key(dispid):
+      return self._dispid_to_put_[dispid]
+    else:
+      raise COMException(scode = winerror.DISP_E_MEMBERNOTFOUND)
+
+class DesignatedWrapPolicy(MappedWrapPolicy):
+  """A policy which uses a mapping to link functions and dispid
+     
+     A MappedWrappedPolicy which allows the wrapped object to specify, via certain
+     special named attributes, exactly which methods and properties are exposed.
+
+     All a wrapped object need do is provide the special attributes, and the policy
+     will handle everything else.
+
+     Attributes:
+
+     _public_methods_ -- Required, unless a typelib GUID is given -- A list
+                  of strings, which must be the names of methods the object
+                  provides.  These methods will be exposed and callable
+                  from other COM hosts.
+     _public_attrs_ A list of strings, which must be the names of attributes on the object.
+                  These attributes will be exposed and readable and possibly writeable from other COM hosts.
+     _readonly_attrs_ -- A list of strings, which must also appear in _public_attrs.  These
+                  attributes will be readable, but not writable, by other COM hosts.
+     _value_ -- A method that will be called if the COM host requests the "default" method
+                  (ie, calls Invoke with dispid==DISPID_VALUE)
+     _NewEnum -- A method that will be called if the COM host requests an enumerator on the
+                  object (ie, calls Invoke with dispid==DISPID_NEWENUM.)
+                  It is the responsibility of the method to ensure the returned
+                  object conforms to the required Enum interface.
+
+    _typelib_guid_ -- The GUID of the typelibrary with interface definitions we use.
+    _typelib_version_ -- A tuple of (major, minor) with a default of 1,1
+    _typelib_lcid_ -- The LCID of the typelib, default = LOCALE_USER_DEFAULT
+
+     _Evaluate -- Dunno what this means, except the host has called Invoke with dispid==DISPID_EVALUATE!
+                  See the COM documentation for details.
+  """
+  def _wrap_(self, ob):
+    # If we have nominated universal interfaces to support, load them now
+    tlb_guid = getattr(ob, '_typelib_guid_', None)
+    if tlb_guid is not None:
+      tlb_major, tlb_minor = getattr(ob, '_typelib_version_', (1,0))
+      tlb_lcid = getattr(ob, '_typelib_lcid_', 0)
+      from win32com import universal
+      # XXX - what if the user wants to implement interfaces from multiple
+      # typelibs? 
+      # Filter out all 'normal' IIDs (ie, IID objects and strings starting with {
+      interfaces = [i for i in getattr(ob, '_com_interfaces_', [])
+                    if type(i) != pythoncom.PyIIDType and not i.startswith("{")]
+      universal_data = universal.RegisterInterfaces(tlb_guid, tlb_lcid,
+                                       tlb_major, tlb_minor, interfaces)
+    else:
+      universal_data = []
+    MappedWrapPolicy._wrap_(self, ob)
+    if not hasattr(ob, '_public_methods_') and not hasattr(ob, "_typelib_guid_"):
+      raise error, "Object does not support DesignatedWrapPolicy, as it does not have either _public_methods_ or _typelib_guid_ attributes."
+
+    # Copy existing _dispid_to_func_ entries to _name_to_dispid_
+    for dispid, name in self._dispid_to_func_.items():
+      self._name_to_dispid_[string.lower(name)]=dispid
+    for dispid, name in self._dispid_to_get_.items():
+      self._name_to_dispid_[string.lower(name)]=dispid
+    for dispid, name in self._dispid_to_put_.items():
+      self._name_to_dispid_[string.lower(name)]=dispid
+
+    # Patch up the universal stuff.
+    for dispid, invkind, name in universal_data:
+      self._name_to_dispid_[string.lower(name)]=dispid
+      if invkind == DISPATCH_METHOD:
+          self._dispid_to_func_[dispid] = name
+      elif invkind == DISPATCH_PROPERTYPUT:
+          self._dispid_to_put_[dispid] = name
+      elif invkind == DISPATCH_PROPERTYGET:
+          self._dispid_to_get_[dispid] = name
+      else:
+        raise ValueError, "unexpected invkind: %d (%s)" % (invkind,name)
+    
+    # look for reserved methods
+    if hasattr(ob, '_value_'):
+      self._dispid_to_get_[DISPID_VALUE] = '_value_'
+      self._dispid_to_put_[DISPID_PROPERTYPUT] = '_value_'
+    if hasattr(ob, '_NewEnum'):
+      self._name_to_dispid_['_newenum'] = DISPID_NEWENUM
+      self._dispid_to_func_[DISPID_NEWENUM] = '_NewEnum'
+    if hasattr(ob, '_Evaluate'):
+      self._name_to_dispid_['_evaluate'] = DISPID_EVALUATE
+      self._dispid_to_func_[DISPID_EVALUATE] = '_Evaluate'
+
+    dispid = self._allocnextdispid(999)
+    # note: funcs have precedence over attrs (install attrs first)
+    if hasattr(ob, '_public_attrs_'):
+      if hasattr(ob, '_readonly_attrs_'):
+        readonly = ob._readonly_attrs_
+      else:
+        readonly = [ ]
+      for name in ob._public_attrs_:
+        self._name_to_dispid_[string.lower(name)] = dispid
+        self._dispid_to_get_[dispid] = name
+        if name not in readonly:
+          self._dispid_to_put_[dispid] = name
+        dispid = self._allocnextdispid(dispid)
+    for name in getattr(ob, "_public_methods_", []):
+      if not self._name_to_dispid_.has_key(string.lower(name)):
+         self._name_to_dispid_[string.lower(name)] = dispid
+         self._dispid_to_func_[dispid] = name
+         dispid = self._allocnextdispid(dispid)
+    self._typeinfos_ = None # load these on demand.
+
+  def _build_typeinfos_(self):
+    # Can only ever be one for now.
+    tlb_guid = getattr(self._obj_, '_typelib_guid_', None)
+    if tlb_guid is None:
+      return []
+    tlb_major, tlb_minor = getattr(self._obj_, '_typelib_version_', (1,0))
+    tlb = pythoncom.LoadRegTypeLib(tlb_guid, tlb_major, tlb_minor)
+    typecomp = tlb.GetTypeComp()
+    # Not 100% sure what semantics we should use for the default interface.
+    # Look for the first name in _com_interfaces_ that exists in the typelib.
+    for iname in self._obj_._com_interfaces_:
+      try:
+        type_info, type_comp = typecomp.BindType(iname)
+        if type_info is not None:
+          return [type_info]
+      except pythoncom.com_error:
+        pass
+    return []
+
+  def _GetTypeInfoCount_(self):
+    if self._typeinfos_ is None:
+      self._typeinfos_ = self._build_typeinfos_()
+    return len(self._typeinfos_)
+  
+  def _GetTypeInfo_(self, index, lcid):
+    if self._typeinfos_ is None:
+      self._typeinfos_ = self._build_typeinfos_()
+    if index < 0 or index >= len(self._typeinfos_):
+      raise COMException(scode=winerror.DISP_E_BADINDEX)
+    return 0, self._typeinfos_[index]
+
+  def _allocnextdispid(self, last_dispid):
+      while 1:
+        last_dispid = last_dispid + 1
+        if not self._dispid_to_func_.has_key(last_dispid) and \
+           not self._dispid_to_get_.has_key(last_dispid) and \
+           not self._dispid_to_put_.has_key(last_dispid):
+              return last_dispid
+
+  def _invokeex_(self, dispid, lcid, wFlags, args, kwArgs, serviceProvider):
+    ### note: lcid is being ignored...
+
+    if wFlags & DISPATCH_METHOD:
+      try:
+        funcname = self._dispid_to_func_[dispid]
+      except KeyError:
+        if not wFlags & DISPATCH_PROPERTYGET:
+          raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)	# not found
+      else:
+        try:
+          func = getattr(self._obj_, funcname)
+        except AttributeError:
+          # May have a dispid, but that doesnt mean we have the function!
+          raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)
+        # Should check callable here
+        try:
+            return func(*args)
+        except TypeError, v:
+            # Particularly nasty is "wrong number of args" type error
+            # This helps you see what 'func' and 'args' actually is
+            if str(v).find("arguments")>=0:
+                print "** TypeError %s calling function %r(%r)" % (v, func, args)
+            raise
+
+    if wFlags & DISPATCH_PROPERTYGET:
+      try:
+        name = self._dispid_to_get_[dispid]
+      except KeyError:
+          raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)	# not found
+      retob = getattr(self._obj_, name)
+      if type(retob)==types.MethodType: # a method as a property - call it.
+        retob = retob(*args)
+      return retob
+
+    if wFlags & (DISPATCH_PROPERTYPUT | DISPATCH_PROPERTYPUTREF): ### correct?
+      try:
+        name = self._dispid_to_put_[dispid]
+      except KeyError:
+        raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)	# read-only
+      # If we have a method of that name (ie, a property get function), and
+      # we have an equiv. property set function, use that instead.
+      if type(getattr(self._obj_, name, None)) == types.MethodType and \
+         type(getattr(self._obj_, "Set" + name, None)) == types.MethodType:
+        fn = getattr(self._obj_, "Set" + name)
+        fn( *args )
+      else:
+        # just set the attribute
+        setattr(self._obj_, name, args[0])
+      return
+
+    raise COMException(scode=winerror.E_INVALIDARG, desc="invalid wFlags")
+
+class EventHandlerPolicy(DesignatedWrapPolicy):
+    """The default policy used by event handlers in the win32com.client package.
+
+    In addition to the base policy, this provides argument conversion semantics for
+    params
+      * dispatch params are converted to dispatch objects.
+      * Unicode objects are converted to strings (1.5.2 and earlier)
+
+    NOTE: Later, we may allow the object to override this process??
+    """
+    def _transform_args_(self, args, kwArgs, dispid, lcid, wFlags, serviceProvider):
+        ret = []
+        for arg in args:
+            arg_type = type(arg)
+            if arg_type == IDispatchType:
+                import win32com.client
+                arg = win32com.client.Dispatch(arg)
+            elif arg_type == IUnknownType:
+                try:
+                    import win32com.client
+                    arg = win32com.client.Dispatch(arg.QueryInterface(pythoncom.IID_IDispatch))
+                except pythoncom.error:
+                    pass # Keep it as IUnknown
+            elif not core_has_unicode and arg_type==UnicodeType:
+                arg = str(arg)
+            ret.append(arg)
+        return tuple(ret), kwArgs
+    def _invokeex_(self, dispid, lcid, wFlags, args, kwArgs, serviceProvider):
+        # transform the args.
+        args, kwArgs = self._transform_args_(args, kwArgs, dispid, lcid, wFlags, serviceProvider)
+        return DesignatedWrapPolicy._invokeex_( self, dispid, lcid, wFlags, args, kwArgs, serviceProvider)
+
+class DynamicPolicy(BasicWrapPolicy):
+  """A policy which dynamically (ie, at run-time) determines public interfaces.
+  
+     A dynamic policy is used to dynamically dispatch methods and properties to the
+     wrapped object.  The list of objects and properties does not need to be known in
+     advance, and methods or properties added to the wrapped object after construction
+     are also handled.
+
+     The wrapped object must provide the following attributes:
+
+     _dynamic_ -- A method that will be called whenever an invoke on the object
+            is called.  The method is called with the name of the underlying method/property
+            (ie, the mapping of dispid to/from name has been resolved.)  This name property
+            may also be '_value_' to indicate the default, and '_NewEnum' to indicate a new
+            enumerator is requested.
+            
+  """
+  def _wrap_(self, object):
+    BasicWrapPolicy._wrap_(self, object)
+    if not hasattr(self._obj_, '_dynamic_'):
+      raise error, "Object does not support Dynamic COM Policy"
+    self._next_dynamic_ = self._min_dynamic_ = 1000
+    self._dyn_dispid_to_name_ = {DISPID_VALUE:'_value_', DISPID_NEWENUM:'_NewEnum' }
+
+  def _getdispid_(self, name, fdex):
+    # TODO - Look at fdex flags.
+    # TODO - Remove str() of Unicode name param.
+    lname = string.lower(str(name))
+    try:
+      return self._name_to_dispid_[lname]
+    except KeyError:
+      dispid = self._next_dynamic_ = self._next_dynamic_ + 1
+      self._name_to_dispid_[lname] = dispid
+      self._dyn_dispid_to_name_[dispid] = name # Keep case in this map...
+      return dispid
+
+  def _invoke_(self, dispid, lcid, wFlags, args):
+    return S_OK, -1, self._invokeex_(dispid, lcid, wFlags, args, None, None)
+
+  def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider):
+    ### note: lcid is being ignored...
+    ### note: kwargs is being ignored...
+    ### note: serviceProvider is being ignored...
+    ### there might be assigned DISPID values to properties, too...
+    ### TODO - Remove the str() of the Unicode argument
+    try:
+      name = str(self._dyn_dispid_to_name_[dispid])
+    except KeyError:
+      raise COMException(scode = winerror.DISP_E_MEMBERNOTFOUND, desc="Member not found")
+    return self._obj_._dynamic_(name, lcid, wFlags, args)
+
+
+DefaultPolicy = DesignatedWrapPolicy
+
+def resolve_func(spec):
+  """Resolve a function by name
+  
+  Given a function specified by 'module.function', return a callable object
+  (ie, the function itself)
+  """
+  try:
+    idx = string.rindex(spec, ".")
+    mname = spec[:idx]
+    fname = spec[idx+1:]
+    # Dont attempt to optimize by looking in sys.modules,
+    # as another thread may also be performing the import - this
+    # way we take advantage of the built-in import lock.
+    module = _import_module(mname)
+    return getattr(module, fname)
+  except ValueError: # No "." in name - assume in this module
+    return globals()[spec]
+
+def call_func(spec, *args):
+  """Call a function specified by name.
+  
+  Call a function specified by 'module.function' and return the result.
+  """
+
+  return resolve_func(spec)(*args)
+
+def _import_module(mname):
+  """Import a module just like the 'import' statement.
+
+  Having this function is much nicer for importing arbitrary modules than
+  using the 'exec' keyword.  It is more efficient and obvious to the reader.
+  """
+  __import__(mname)
+  # Eeek - result of _import_ is "win32com" - not "win32com.a.b.c"
+  # Get the full module from sys.modules
+  return sys.modules[mname]
+
+#######
+#
+# Temporary hacks until all old code moves.
+#
+# These have been moved to a new source file, but some code may
+# still reference them here.  These will end up being removed.
+try:
+  from dispatcher import DispatcherTrace, DispatcherWin32trace
+except ImportError: # Quite likely a frozen executable that doesnt need dispatchers
+  pass
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/register.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/register.py
new file mode 100644
index 0000000..bc74d9ebd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/register.py
@@ -0,0 +1,503 @@
+"""Utilities for registering objects.
+
+This module contains utility functions to register Python objects as
+valid COM Servers.  The RegisterServer function provides all information
+necessary to allow the COM framework to respond to a request for a COM object,
+construct the necessary Python object, and dispatch COM events.
+
+"""
+import sys
+import win32api
+import win32con
+import pythoncom
+import winerror
+import os
+
+CATID_PythonCOMServer = "{B3EF80D0-68E2-11D0-A689-00C04FD658FF}"
+
+def _set_subkeys(keyName, valueDict, base=win32con.HKEY_CLASSES_ROOT):
+  hkey = win32api.RegCreateKey(base, keyName)
+  try:
+    for key, value in valueDict.items():
+      win32api.RegSetValueEx(hkey, key, None, win32con.REG_SZ, value)
+  finally:
+    win32api.RegCloseKey(hkey)
+			
+def _set_string(path, value, base=win32con.HKEY_CLASSES_ROOT):
+  "Set a string value in the registry."
+
+  win32api.RegSetValue(base,
+                       path,
+                       win32con.REG_SZ,
+                       value)
+
+def _get_string(path, base=win32con.HKEY_CLASSES_ROOT):
+  "Get a string value from the registry."
+
+  try:
+    return win32api.RegQueryValue(base, path)
+  except win32api.error:
+    return None
+
+def _remove_key(path, base=win32con.HKEY_CLASSES_ROOT):
+  "Remove a string from the registry."
+
+  try:
+    win32api.RegDeleteKey(base, path)
+  except win32api.error, (code, fn, msg):
+    if code != winerror.ERROR_FILE_NOT_FOUND:
+      raise win32api.error, (code, fn, msg)
+
+def recurse_delete_key(path, base=win32con.HKEY_CLASSES_ROOT):
+  """Recursively delete registry keys.
+
+  This is needed since you can't blast a key when subkeys exist.
+  """
+  try:
+    h = win32api.RegOpenKey(base, path)
+  except win32api.error, (code, fn, msg):
+    if code != winerror.ERROR_FILE_NOT_FOUND:
+      raise win32api.error, (code, fn, msg)
+  else:
+    # parent key found and opened successfully. do some work, making sure
+    # to always close the thing (error or no).
+    try:
+      # remove all of the subkeys
+      while 1:
+        try:
+          subkeyname = win32api.RegEnumKey(h, 0)
+        except win32api.error, (code, fn, msg):
+          if code != winerror.ERROR_NO_MORE_ITEMS:
+            raise win32api.error, (code, fn, msg)
+          break
+        recurse_delete_key(path + '\\' + subkeyname, base)
+
+      # remove the parent key
+      _remove_key(path, base)
+    finally:
+      win32api.RegCloseKey(h)
+
+def _cat_registrar():
+  return pythoncom.CoCreateInstance(
+    pythoncom.CLSID_StdComponentCategoriesMgr,
+    None,
+    pythoncom.CLSCTX_INPROC_SERVER,
+    pythoncom.IID_ICatRegister
+    )
+    
+def _find_localserver_exe(mustfind):
+  if not sys.platform.startswith("win32"):
+    return sys.executable
+  if pythoncom.__file__.find("_d") < 0:
+    exeBaseName = "pythonw.exe"
+  else:
+    exeBaseName = "pythonw_d.exe"
+  # First see if in the same directory as this .EXE
+  exeName = os.path.join( os.path.split(sys.executable)[0], exeBaseName )
+  if not os.path.exists(exeName):
+    # See if in our sys.prefix directory
+    exeName = os.path.join( sys.prefix, exeBaseName )
+  if not os.path.exists(exeName):
+    # See if in our sys.prefix/pcbuild directory (for developers)
+    exeName = os.path.join( sys.prefix, "PCbuild",  exeBaseName )
+  if not os.path.exists(exeName):
+    # See if the registry has some info.
+    try:
+      key = "SOFTWARE\\Python\\PythonCore\\%s\\InstallPath" % sys.winver
+      path = win32api.RegQueryValue( win32con.HKEY_LOCAL_MACHINE, key )
+      exeName = os.path.join( path, exeBaseName )
+    except (AttributeError,win32api.error):
+      pass
+  if not os.path.exists(exeName):
+    if mustfind:
+      raise RuntimeError, "Can not locate the program '%s'" % exeBaseName
+    return None
+  return exeName
+
+def _find_localserver_module():
+  import win32com.server
+  path = win32com.server.__path__[0]
+  baseName = "localserver"
+  pyfile = os.path.join(path, baseName + ".py")
+  try:
+    os.stat(pyfile)
+  except os.error:
+    # See if we have a compiled extension
+    if __debug__:
+      ext = ".pyc"
+    else:
+      ext = ".pyo"
+    pyfile = os.path.join(path, baseName + ext)
+    try:
+      os.stat(pyfile)
+    except os.error:
+      raise RuntimeError, "Can not locate the Python module 'win32com.server.%s'" % baseName
+  return pyfile
+
+def RegisterServer(clsid, 
+                   pythonInstString=None, 
+                   desc=None,
+                   progID=None, verProgID=None,
+                   defIcon=None,
+                   threadingModel="both",
+                   policy=None,
+                   catids=[], other={},
+                   addPyComCat=None,
+                   dispatcher = None,
+                   clsctx = None,
+                   addnPath = None,
+                  ):
+  """Registers a Python object as a COM Server.  This enters almost all necessary
+     information in the system registry, allowing COM to use the object.
+
+     clsid -- The (unique) CLSID of the server.
+     pythonInstString -- A string holding the instance name that will be created
+                   whenever COM requests a new object.
+     desc -- The description of the COM object.
+     progID -- The user name of this object (eg, Word.Document)
+     verProgId -- The user name of this version's implementation (eg Word.6.Document)
+     defIcon -- The default icon for the object.
+     threadingModel -- The threading model this object supports.
+     policy -- The policy to use when creating this object.
+     catids -- A list of category ID's this object belongs in.
+     other -- A dictionary of extra items to be registered.
+     addPyComCat -- A flag indicating if the object should be added to the list
+              of Python servers installed on the machine.  If None (the default)
+              then it will be registered when running from python source, but
+              not registered if running in a frozen environment.
+     dispatcher -- The dispatcher to use when creating this object.
+     clsctx -- One of the CLSCTX_* constants.
+     addnPath -- An additional path the COM framework will add to sys.path
+                 before attempting to create the object.
+  """
+
+
+  ### backwards-compat check
+  ### Certain policies do not require a "class name", just the policy itself.
+  if not pythonInstString and not policy:
+    raise TypeError, 'You must specify either the Python Class or Python Policy which implement the COM object.'
+
+  keyNameRoot = "CLSID\\%s" % str(clsid)
+  _set_string(keyNameRoot, desc)
+
+  # Also register as an "Application" so DCOM etc all see us.
+  _set_string("AppID\\%s" % clsid, progID)
+  # Depending on contexts requested, register the specified server type.
+  # Set default clsctx.
+  if not clsctx:
+    clsctx = pythoncom.CLSCTX_INPROC_SERVER | pythoncom.CLSCTX_LOCAL_SERVER
+  # And if we are frozen, ignore the ones that don't make sense in this
+  # context.
+  if pythoncom.frozen:
+    assert sys.frozen, "pythoncom is frozen, but sys.frozen is not set - don't know the context!"
+    if sys.frozen == "dll":
+      clsctx = clsctx & pythoncom.CLSCTX_INPROC_SERVER
+    else:
+      clsctx = clsctx & pythoncom.CLSCTX_LOCAL_SERVER
+  # Now setup based on the clsctx left over.
+  if clsctx & pythoncom.CLSCTX_INPROC_SERVER:
+    # get the module to use for registration.
+    # nod to Gordon's installer - if sys.frozen and sys.frozendllhandle
+    # exist, then we are being registered via a DLL - use this DLL as the
+    # file name.
+    if pythoncom.frozen:
+      if hasattr(sys, "frozendllhandle"):
+        dllName = win32api.GetModuleFileName(sys.frozendllhandle)
+      else:
+        raise RuntimeError, "We appear to have a frozen DLL, but I don't know the DLL to use"
+    else:
+      # Normal case - running from .py file, so register pythoncom's DLL.
+      dllName = os.path.basename(pythoncom.__file__)
+
+    _set_subkeys(keyNameRoot + "\\InprocServer32",
+                 { None : dllName,
+                   "ThreadingModel" : threadingModel,
+                   })
+  else: # Remove any old InProcServer32 registrations
+    _remove_key(keyNameRoot + "\\InprocServer32")
+
+  if clsctx & pythoncom.CLSCTX_LOCAL_SERVER:
+    if pythoncom.frozen:
+      # If we are frozen, we write "{exe} /Automate", just
+      # like "normal" .EXEs do
+      exeName = win32api.GetShortPathName(sys.executable)
+      command = '%s /Automate' % (exeName,)
+    else:
+      # Running from .py sources - we need to write
+      # 'python.exe win32com\server\localserver.py {clsid}"
+      exeName = _find_localserver_exe(1)
+      exeName = win32api.GetShortPathName(exeName)
+      pyfile = _find_localserver_module()
+      command = '%s "%s" %s' % (exeName, pyfile, str(clsid))
+    _set_string(keyNameRoot + '\\LocalServer32', command)
+  else: # Remove any old LocalServer32 registrations
+    _remove_key(keyNameRoot + "\\LocalServer32")
+
+  if pythonInstString:
+    _set_string(keyNameRoot + '\\PythonCOM', pythonInstString)
+  else:
+    _remove_key(keyNameRoot + '\\PythonCOM')
+  if policy:
+    _set_string(keyNameRoot + '\\PythonCOMPolicy', policy)
+  else:
+    _remove_key(keyNameRoot + '\\PythonCOMPolicy')
+
+  if dispatcher:
+    _set_string(keyNameRoot + '\\PythonCOMDispatcher', dispatcher)
+  else:
+    _remove_key(keyNameRoot + '\\PythonCOMDispatcher')
+
+  if defIcon:
+    _set_string(keyNameRoot + '\\DefaultIcon', defIcon)
+
+  if addnPath:
+    _set_string(keyNameRoot + "\\PythonCOMPath", addnPath)
+  else:
+    _remove_key(keyNameRoot + "\\PythonCOMPath")
+
+  if addPyComCat is None:
+    addPyComCat = pythoncom.frozen == 0
+  if addPyComCat:
+    catids = catids + [ CATID_PythonCOMServer ]
+
+  # Set up the implemented categories
+  if catids:
+    regCat = _cat_registrar()
+    regCat.RegisterClassImplCategories(clsid, catids)
+
+  # set up any other reg values they might have
+  if other:
+    for key, value in other.items():
+      _set_string(keyNameRoot + '\\' + key, value)
+
+  if progID:
+    # set the progID as the most specific that was given to us
+    if verProgID:
+      _set_string(keyNameRoot + '\\ProgID', verProgID)
+    else:
+      _set_string(keyNameRoot + '\\ProgID', progID)
+
+    # Set up the root entries - version independent.
+    if desc:
+      _set_string(progID, desc)
+    _set_string(progID + '\\CLSID', str(clsid))
+
+    # Set up the root entries - version dependent.
+    if verProgID:
+      # point from independent to the current version
+      _set_string(progID + '\\CurVer', verProgID)
+
+      # point to the version-independent one
+      _set_string(keyNameRoot + '\\VersionIndependentProgID', progID)
+
+      # set up the versioned progID
+      if desc:
+        _set_string(verProgID, desc)
+      _set_string(verProgID + '\\CLSID', str(clsid))
+
+def GetUnregisterServerKeys(clsid, progID=None, verProgID=None, customKeys = None):
+  """Given a server, return a list of of ("key", root), which are keys recursively
+  and uncondtionally deleted at unregister or uninstall time.
+  """
+  # remove the main CLSID registration
+  ret = [("CLSID\\%s" % str(clsid), win32con.HKEY_CLASSES_ROOT)]
+  # remove the versioned ProgID registration
+  if verProgID:
+    ret.append((verProgID, win32con.HKEY_CLASSES_ROOT))
+  # blow away the independent ProgID. we can't leave it since we just
+  # torched the class.
+  ### could potentially check the CLSID... ?
+  if progID:
+    ret.append((progID, win32con.HKEY_CLASSES_ROOT))
+  # The DCOM config tool may write settings to the AppID key for our CLSID
+  ret.append( ("AppID\\%s" % str(clsid), win32con.HKEY_CLASSES_ROOT) )
+  # Any custom keys?
+  if customKeys:
+    ret = ret + customKeys
+   
+  return ret
+  
+
+def UnregisterServer(clsid, progID=None, verProgID=None, customKeys = None):
+  """Unregisters a Python COM server."""
+
+  for args in GetUnregisterServerKeys(clsid, progID, verProgID, customKeys ):
+    recurse_delete_key(*args)
+
+  ### it might be nice at some point to "roll back" the independent ProgID
+  ### to an earlier version if one exists, and just blowing away the
+  ### specified version of the ProgID (and its corresponding CLSID)
+  ### another time, though...
+
+  ### NOTE: ATL simply blows away the above three keys without the
+  ### potential checks that I describe.  Assuming that defines the
+  ### "standard" then we have no additional changes necessary.
+
+def GetRegisteredServerOption(clsid, optionName):
+  """Given a CLSID for a server and option name, return the option value
+  """
+  keyNameRoot = "CLSID\\%s\\%s" % (str(clsid), str(optionName))
+  return _get_string(keyNameRoot)
+
+
+def _get(ob, attr, default=None):
+  try:
+    return getattr(ob, attr)
+  except AttributeError:
+    pass
+  # look down sub-classes
+  try:
+    bases = ob.__bases__
+  except AttributeError:
+    # ob is not a class - no probs.
+    return default
+  for base in bases:
+    val = _get(base, attr, None)
+    if val is not None:
+      return val
+  return default
+
+def RegisterClasses(*classes, **flags):
+  quiet = flags.has_key('quiet') and flags['quiet']
+  debugging = flags.has_key('debug') and flags['debug']
+  for cls in classes:
+    clsid = cls._reg_clsid_
+    progID = _get(cls, '_reg_progid_')
+    desc = _get(cls, '_reg_desc_', progID)
+    spec = _get(cls, '_reg_class_spec_')
+    verProgID = _get(cls, '_reg_verprogid_')
+    defIcon = _get(cls, '_reg_icon_')
+    threadingModel = _get(cls, '_reg_threading_', 'both')
+    catids = _get(cls, '_reg_catids_', [])
+    options = _get(cls, '_reg_options_', {})
+    policySpec = _get(cls, '_reg_policy_spec_')
+    clsctx = _get(cls, '_reg_clsctx_')
+    tlb_filename = _get(cls, '_reg_typelib_filename_')
+    # default to being a COM category only when not frozen.
+    addPyComCat = not _get(cls, '_reg_disable_pycomcat_', pythoncom.frozen!=0)
+    addnPath = None
+    if debugging:
+      # If the class has a debugging dispatcher specified, use it, otherwise
+      # use our default dispatcher.
+      dispatcherSpec = _get(cls, '_reg_debug_dispatcher_spec_')
+      if dispatcherSpec is None:
+        dispatcherSpec = "win32com.server.dispatcher.DefaultDebugDispatcher"
+      # And remember the debugging flag as servers may wish to use it at runtime.
+      debuggingDesc = "(for debugging)"
+      options['Debugging'] = "1"
+    else:
+      dispatcherSpec = _get(cls, '_reg_dispatcher_spec_')
+      debuggingDesc = ""
+      options['Debugging'] = "0"
+
+    if spec is None:
+      moduleName = cls.__module__
+      if moduleName == '__main__':
+        # Use argv[0] to determine the module name.
+        try:
+         # Use the win32api to find the case-sensitive name
+          moduleName = os.path.splitext(win32api.FindFiles(sys.argv[0])[0][8])[0]
+        except (IndexError, win32api.error):
+          # Can't find the script file - the user must explicitely set the _reg_... attribute.
+          raise TypeError, "Can't locate the script hosting the COM object - please set _reg_class_spec_ in your object"
+
+      spec = moduleName + "." + cls.__name__
+      # Frozen apps don't need their directory on sys.path
+      if not pythoncom.frozen:
+        scriptDir = os.path.split(sys.argv[0])[0]
+        if not scriptDir: scriptDir = "."
+        addnPath = win32api.GetFullPathName(scriptDir)
+
+    RegisterServer(clsid, spec, desc, progID, verProgID, defIcon,
+                   threadingModel, policySpec, catids, options,
+                   addPyComCat, dispatcherSpec, clsctx, addnPath)
+    if not quiet:
+      print 'Registered:', progID or spec, debuggingDesc
+    # Register the typelibrary
+    if tlb_filename:
+      tlb_filename = os.path.abspath(tlb_filename)
+      typelib = pythoncom.LoadTypeLib(tlb_filename)
+      pythoncom.RegisterTypeLib(typelib, tlb_filename)
+      if not quiet:
+        print 'Registered type library:', tlb_filename
+  extra = flags.get('finalize_register')
+  if extra:
+    extra()
+
+def UnregisterClasses(*classes, **flags):
+  quiet = flags.has_key('quiet') and flags['quiet']
+  for cls in classes:
+    clsid = cls._reg_clsid_
+    progID = _get(cls, '_reg_progid_')
+    verProgID = _get(cls, '_reg_verprogid_')
+    customKeys = _get(cls, '_reg_remove_keys_')
+    unregister_typelib = _get(cls, '_reg_typelib_filename_') is not None
+
+    UnregisterServer(clsid, progID, verProgID, customKeys)
+    if not quiet:
+      print 'Unregistered:', progID or str(clsid)
+    if unregister_typelib:
+      tlb_guid = _get(cls, "_typelib_guid_")
+      if tlb_guid is None:
+        # I guess I could load the typelib, but they need the GUID anyway.
+        print "Have typelib filename, but no GUID - can't unregister"
+      else:
+        major, minor = _get(cls, "_typelib_version_", (1,0))
+        lcid = _get(cls, "_typelib_lcid_", 0)
+        try:
+          pythoncom.UnRegisterTypeLib(tlb_guid, major, minor, lcid)
+          if not quiet:
+            print 'Unregistered type library'
+        except pythoncom.com_error:
+          pass
+
+  extra = flags.get('finalize_unregister')
+  if extra:
+    extra()
+#
+# Unregister info is for installers or external uninstallers.
+# The WISE installer, for example firstly registers the COM server,
+# then queries for the Unregister info, appending it to its
+# install log.  Uninstalling the package will the uninstall the server
+def UnregisterInfoClasses(*classes, **flags):
+  ret = []
+  for cls in classes:
+    clsid = cls._reg_clsid_
+    progID = _get(cls, '_reg_progid_')
+    verProgID = _get(cls, '_reg_verprogid_')
+    customKeys = _get(cls, '_reg_remove_keys_')
+
+    ret = ret + GetUnregisterServerKeys(clsid, progID, verProgID, customKeys)
+  return ret
+
+def UseCommandLine(*classes, **flags):
+  unregisterInfo = '--unregister_info' in sys.argv
+  unregister = '--unregister' in sys.argv
+  flags['quiet'] = flags.get('quiet',0) or '--quiet' in sys.argv
+  flags['debug'] = flags.get('debug',0) or '--debug' in sys.argv
+  if unregisterInfo:
+    return UnregisterInfoClasses(*classes, **flags)
+  if unregister:
+    UnregisterClasses(*classes, **flags)
+  else:
+    RegisterClasses(*classes, **flags)
+
+
+def RegisterPyComCategory():
+  """ Register the Python COM Server component category.
+  """
+  regCat = _cat_registrar()
+  regCat.RegisterCategories( [ (CATID_PythonCOMServer,
+                                0x0409,
+                                "Python COM Server") ] )
+
+if not pythoncom.frozen:
+  try:
+    win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT,
+                           'Component Categories\\%s' % CATID_PythonCOMServer)
+  except win32api.error:
+    try:
+      RegisterPyComCategory()
+    except pythoncom.error: # Error with the COM category manager - oh well.
+      pass    
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/util.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/util.py
new file mode 100644
index 0000000..0465563
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/server/util.py
@@ -0,0 +1,215 @@
+""" General Server side utilities 
+"""
+import pythoncom
+import policy
+import winerror
+from exception import COMException
+
+def wrap(ob, iid=None, usePolicy=None, useDispatcher=None):
+  """Wraps an object in a PyGDispatch gateway.
+
+     Returns a client side PyI{iid} interface.
+
+     Interface and gateway support must exist for the specified IID, as
+     the QueryInterface() method is used.
+
+  """
+  if usePolicy is None:
+    usePolicy = policy.DefaultPolicy
+  if useDispatcher == 1: # True will also work here.
+    import win32com.server.dispatcher
+    useDispatcher = win32com.server.dispatcher.DefaultDebugDispatcher
+  if useDispatcher is None or useDispatcher==0:
+    ob = usePolicy(ob)
+  else:
+    ob = useDispatcher(usePolicy, ob)
+
+  # get a PyIDispatch, which interfaces to PyGDispatch
+  ob = pythoncom.WrapObject(ob)
+  if iid is not None:
+    ob = ob.QueryInterface(iid)       # Ask the PyIDispatch if it supports it?
+  return ob
+
+def unwrap(ob):
+  """Unwraps an interface.
+
+  Given an interface which wraps up a Gateway, return the object behind
+  the gateway.
+  """
+  ob = pythoncom.UnwrapObject(ob)
+  # see if the object is a dispatcher
+  if hasattr(ob, 'policy'):
+    ob = ob.policy
+  return ob._obj_
+
+
+class ListEnumerator:
+  """A class to expose a Python sequence as an EnumVARIANT.
+
+     Create an instance of this class passing a sequence (list, tuple, or
+     any sequence protocol supporting object) and it will automatically
+     support the EnumVARIANT interface for the object.
+
+     See also the @NewEnum@ function, which can be used to turn the
+     instance into an actual COM server.
+  """
+  _public_methods_ = [ 'Next', 'Skip', 'Reset', 'Clone' ]
+
+  def __init__(self, data, index=0, iid = pythoncom.IID_IEnumVARIANT):
+    self._list_ = data
+    self.index = index
+    self._iid_ = iid
+
+  def _query_interface_(self, iid):
+      if iid == self._iid_:
+          return 1
+  def Next(self, count):
+    result = self._list_[self.index:self.index+count]
+    self.Skip(count)
+    return result
+
+  def Skip(self, count):
+    end = self.index + count
+    if end > len(self._list_):
+      end = len(self._list_)
+    self.index = end
+
+  def Reset(self):
+    self.index = 0
+
+  def Clone(self):
+    return self._wrap(self.__class__(self._list_, self.index))
+
+  def _wrap(self, ob):
+    return wrap(ob)
+
+
+class ListEnumeratorGateway(ListEnumerator):
+  """A List Enumerator which wraps a sequence's items in gateways.
+
+  If a sequence contains items (objects) that have not been wrapped for
+  return through the COM layers, then a ListEnumeratorGateway can be
+  used to wrap those items before returning them (from the Next() method).
+
+  See also the @ListEnumerator@ class and the @NewEnum@ function.
+  """
+
+  def Next(self, count):
+    result = self._list_[self.index:self.index+count]
+    self.Skip(count)
+    return map(self._wrap, result) 
+
+
+def NewEnum(seq,
+            cls=ListEnumerator,
+            iid=pythoncom.IID_IEnumVARIANT,
+            usePolicy=None,
+            useDispatcher=None):
+  """Creates a new enumerator COM server.
+
+  This function creates a new COM Server that implements the 
+  IID_IEnumVARIANT interface.
+
+  A COM server that can enumerate the passed in sequence will be
+  created, then wrapped up for return through the COM framework.
+  Optionally, a custom COM server for enumeration can be passed
+  (the default is @ListEnumerator@), and the specific IEnum
+  interface can be specified.
+  """
+  ob = cls(seq, iid=iid)
+  return wrap(ob, iid, usePolicy=usePolicy, useDispatcher=useDispatcher)
+
+
+class Collection:
+  "A collection of VARIANT values."
+
+  _public_methods_ = [ 'Item', 'Count', 'Add', 'Remove', 'Insert' ]
+
+  def __init__(self, data=None, readOnly=0):
+    if data is None:
+      data = [ ]
+    self.data = data
+
+    # disable Add/Remove if read-only. note that we adjust _public_methods_
+    # on this instance only.
+    if readOnly:
+      self._public_methods_ = [ 'Item', 'Count' ]
+
+  # This method is also used as the "default" method.
+  # Thus "print ob" will cause this to be called with zero
+  # params.  Handle this slightly more elegantly here.
+  # Ideally the  policy should handle this.
+  def Item(self, *args):
+    if len(args) != 1:
+      raise COMException(scode=winerror.DISP_E_BADPARAMCOUNT)
+
+    try:
+      return self.data[args[0]]
+    except IndexError, desc:
+      raise COMException(scode=winerror.DISP_E_BADINDEX, desc=str(desc))
+
+    
+  _value_ = Item
+  
+  def Count(self):
+    return len(self.data)
+
+  def Add(self, value):
+    self.data.append(value)
+
+  def Remove(self, index):
+    try:
+      del self.data[index]
+    except IndexError, desc:
+      raise COMException(scode=winerror.DISP_E_BADINDEX, desc=str(desc))
+
+  def Insert(self, index, value):
+    try:
+      index = int(index)
+    except (ValueError, TypeError):
+      raise COMException(scode=winerror.DISP_E_TYPEMISMATCH)
+    self.data.insert(index, value)
+
+  def _NewEnum(self):
+    return NewEnum(self.data)
+
+def NewCollection(seq, cls=Collection):
+  """Creates a new COM collection object
+
+  This function creates a new COM Server that implements the 
+  common collection protocols, including enumeration. (_NewEnum)
+
+  A COM server that can enumerate the passed in sequence will be
+  created, then wrapped up for return through the COM framework.
+  Optionally, a custom COM server for enumeration can be passed
+  (the default is @Collection@).
+  """
+  return pythoncom.WrapObject(policy.DefaultPolicy(cls(seq)),
+                              pythoncom.IID_IDispatch,
+                              pythoncom.IID_IDispatch)
+
+class FileStream:
+  _public_methods_ = [ 'Read', 'Write', 'Clone', 'CopyTo', 'Seek' ]
+  _com_interfaces_ = [ pythoncom.IID_IStream ]
+
+  def __init__(self, file):
+    self.file = file
+
+  def Read(self, amount):
+    return self.file.read(amount)
+
+  def Write(self, data):
+    self.file.write(data)
+
+  def Clone(self):
+    return self._wrap(self.__class__(self.file))
+
+  def CopyTo(self, dest, cb):
+    dest.Write(file.read(cb))
+
+  def Seek(self, offset, origin):
+    # how convient that the 'origin' values are the same as the CRT :)
+    self.file.seek(offset, origin)
+
+  def _wrap(self, ob):
+    return wrap(ob)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/PythonTools.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/PythonTools.py
new file mode 100644
index 0000000..281d4d2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/PythonTools.py
@@ -0,0 +1,42 @@
+
+import sys
+import time
+
+class Tools:
+  _public_methods_ = [ 'reload', 'adddir', 'echo', 'sleep' ]
+
+  def reload(self, module):
+    if sys.modules.has_key(module):
+      reload(sys.modules[module])
+      return "reload succeeded."
+    return "no reload performed."
+
+  def adddir(self, dir):
+    if type(dir) == type(''):
+      sys.path.append(dir)
+    return str(sys.path)
+
+  def echo(self, arg):
+    return `arg`
+
+  def sleep(self, t):
+    time.sleep(t)
+
+
+if __name__=='__main__':
+	from win32com.server.register import RegisterServer, UnregisterServer
+	clsid = "{06ce7630-1d81-11d0-ae37-c2fa70000000}"
+	progid = "Python.Tools"
+	verprogid = "Python.Tools.1"
+	if "--unregister" in sys.argv:
+		print "Unregistering..."
+		UnregisterServer(clsid, progid, verprogid)
+		print "Unregistered OK"
+	else:
+		print "Registering COM server..."
+		RegisterServer(clsid,
+                       "win32com.servers.PythonTools.Tools",
+                       "Python Tools",
+                       progid,
+                       verprogid)
+		print "Class registered."
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/dictionary.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/dictionary.py
new file mode 100644
index 0000000..1ca58e6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/dictionary.py
@@ -0,0 +1,140 @@
+"""Python.Dictionary COM Server.
+
+This module implements a simple COM server that acts much like a Python
+dictionary or as a standard string-keyed VB Collection.  The keys of
+the dictionary are strings and are case-insensitive.
+
+It uses a highly customized policy to fine-tune the behavior exposed to
+the COM client.
+
+The object exposes the following properties:
+
+    int Count                       (readonly)
+    VARIANT Item(BSTR key)          (propget for Item)
+    Item(BSTR key, VARIANT value)   (propput for Item)
+
+    Note that 'Item' is the default property, so the following forms of
+    VB code are acceptable:
+
+        set ob = CreateObject("Python.Dictionary")
+        ob("hello") = "there"
+        ob.Item("hi") = ob("HELLO")
+
+All keys are defined, returning VT_NULL (None) if a value has not been
+stored.  To delete a key, simply assign VT_NULL to the key.
+
+The object responds to the _NewEnum method by returning an enumerator over
+the dictionary's keys. This allows for the following type of VB code:
+
+    for each name in ob
+        debug.print name, ob(name)
+    next
+"""
+
+import string
+import pythoncom
+from win32com.server import util, policy
+from win32com.server.exception import COMException
+import winerror
+import types
+import pywintypes
+
+from pythoncom import DISPATCH_METHOD, DISPATCH_PROPERTYGET
+from winerror import S_OK
+
+UnicodeType = pywintypes.UnicodeType
+StringType = types.StringType
+
+
+class DictionaryPolicy(policy.BasicWrapPolicy):
+  ### BasicWrapPolicy looks for this
+  _com_interfaces_ = [ ]
+
+  ### BasicWrapPolicy looks for this
+  _name_to_dispid_ = {
+    'item' : pythoncom.DISPID_VALUE,
+    '_newenum' : pythoncom.DISPID_NEWENUM,
+    'count' : 1,
+    }
+
+  ### Auto-Registration process looks for these...
+  _reg_desc_ = 'Python Dictionary'
+  _reg_clsid_ = '{39b61048-c755-11d0-86fa-00c04fc2e03e}'
+  _reg_progid_ = 'Python.Dictionary'
+  _reg_verprogid_ = 'Python.Dictionary.1'
+  _reg_policy_spec_ = 'win32com.servers.dictionary.DictionaryPolicy'
+
+  def _CreateInstance_(self, clsid, reqIID):
+    self._wrap_({ })
+    return pythoncom.WrapObject(self, reqIID)
+
+  def _wrap_(self, ob):
+    self._obj_ = ob	# ob should be a dictionary
+
+  def _invokeex_(self, dispid, lcid, wFlags, args, kwargs, serviceProvider):
+    if dispid == 0:	# item
+      l = len(args)
+      if l < 1:
+        raise COMException(desc="not enough parameters", scode=winerror.DISP_E_BADPARAMCOUNT)
+
+      key = args[0]
+      if type(key) == UnicodeType:
+        pass
+      elif type(key) == StringType:
+        key = pywintypes.Unicode(key)
+      else:
+        ### the nArgErr thing should be 0-based, not reversed... sigh
+        raise COMException(desc="Key must be a string", scode=winerror.DISP_E_TYPEMISMATCH)
+
+      key = key.lower()
+
+      if wFlags & (DISPATCH_METHOD | DISPATCH_PROPERTYGET):
+        if l > 1:
+            raise COMException(scode=winerror.DISP_E_BADPARAMCOUNT)
+        try:
+          return self._obj_[key]
+        except KeyError:
+          return None	# unknown keys return None (VT_NULL)
+
+      if l <> 2:
+        raise COMException(scode=winerror.DISP_E_BADPARAMCOUNT)
+      if args[1] is None:
+        # delete a key when None is assigned to it
+        try:
+          del self._obj_[key]
+        except KeyError:
+          pass
+      else:
+        self._obj_[key] = args[1]
+      return S_OK
+
+    if dispid == 1:	# count
+      if not wFlags & DISPATCH_PROPERTYGET:
+        raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)	# not found
+      if len(args) != 0:
+        raise COMException(scode=winerror.DISP_E_BADPARAMCOUNT)
+      return len(self._obj_)
+
+    if dispid == pythoncom.DISPID_NEWENUM:
+      return util.NewEnum(self._obj_.keys())
+
+    raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)
+
+  def _getidsofnames_(self, names, lcid):
+    ### this is a copy of MappedWrapPolicy._getidsofnames_ ...
+
+    # Note: these names will always be StringType
+    name = string.lower(names[0])
+    try:
+      return (self._name_to_dispid_[name],)
+    except KeyError:
+      raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND,
+                                desc="Member not found")
+
+
+def Register():
+  from win32com.server.register import UseCommandLine
+  return UseCommandLine(DictionaryPolicy)
+
+if __name__ == '__main__':
+  Register()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/interp.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/interp.py
new file mode 100644
index 0000000..dd13605
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/interp.py
@@ -0,0 +1,53 @@
+"""Python.Interpreter COM Server
+
+  This module implements a very very simple COM server which
+  exposes the Python interpreter.
+
+  This is designed more as a demonstration than a full blown COM server.
+  General functionality and Error handling are both limited.
+
+  To use this object, ensure it is registered by running this module
+  from Python.exe.  Then, from Visual Basic, use "CreateObject('Python.Interpreter')",
+  and call its methods!
+"""
+
+from win32com.server.exception import Exception
+from pywintypes import UnicodeType
+import winerror
+
+# Expose the Python interpreter.
+class Interpreter:
+    """The interpreter object exposed via COM
+    """
+    _public_methods_ = [ 'Exec', 'Eval' ]
+    # All registration stuff to support fully automatic register/unregister
+    _reg_verprogid_ = "Python.Interpreter.2"
+    _reg_progid_ = "Python.Interpreter"
+    _reg_desc_ = "Python Interpreter"
+    _reg_clsid_ = "{30BD3490-2632-11cf-AD5B-524153480001}"
+    _reg_class_spec_ = "win32com.servers.interp.Interpreter"
+
+    def __init__(self):
+        self.dict = {}
+
+    def Eval(self, exp):
+        """Evaluate an expression.
+        """
+        if type(exp) not in [type(''),UnicodeType]:
+            raise Exception(desc="Must be a string",scode=winerror.DISP_E_TYPEMISMATCH)
+
+        return eval(str(exp), self.dict)
+    def Exec(self, exp):
+        """Execute a statement.
+        """
+        if type(exp) not in [type(''), UnicodeType]:
+            raise Exception(desc="Must be a string",scode=winerror.DISP_E_TYPEMISMATCH)
+        exec str(exp) in self.dict
+
+def Register():
+    import win32com.server.register
+    return win32com.server.register.UseCommandLine(Interpreter)
+
+if __name__=='__main__':
+    print "Registering COM server..."
+    Register()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/perfmon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/perfmon.py
new file mode 100644
index 0000000..9b62f36
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/perfmon.py
@@ -0,0 +1,27 @@
+"""A COM Server which exposes the NT Performance monitor in a very rudimentary way
+
+Usage from VB:
+	set ob = CreateObject("Python.PerfmonQuery")
+	freeBytes = ob.Query("Memory", "Available Bytes")
+"""
+from win32com.server import exception, register
+import pythoncom, win32pdhutil, winerror
+
+class PerfMonQuery:
+	_reg_verprogid_ = "Python.PerfmonQuery.1"
+	_reg_progid_ = "Python.PerfmonQuery"
+	_reg_desc_ = "Python Performance Monitor query object"
+	_reg_clsid_ = "{64cef7a0-8ece-11d1-a65a-00aa00125a98}"
+	_reg_class_spec_ = "win32com.servers.perfmon.PerfMonQuery"
+	_public_methods_ = [ 'Query' ]
+	def Query(self, object, counter, instance = None, machine = None):
+		try:
+			return win32pdhutil.GetPerformanceAttributes(object, counter, instance, machine=machine)
+		except win32pdhutil.error, (rc, fn, desc):
+			raise exception.Exception(desc=desc)
+		except TypeError, desc:
+			raise exception.Exception(desc=desc,scode=winerror.DISP_E_TYPEMISMATCH)
+				
+if __name__=='__main__':
+	print "Registering COM server..."
+	register.UseCommandLine(PerfMonQuery)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/test_pycomtest.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/test_pycomtest.py
new file mode 100644
index 0000000..7a26e359
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/servers/test_pycomtest.py
@@ -0,0 +1,156 @@
+# This is part of the Python test suite.
+# The object is registered when you first run the test suite.
+# (and hopefully unregistered once done ;-)
+
+# Ensure the vtables in the tlb are known.
+from win32com import universal
+from win32com.server.exception import COMException
+from win32com.client import gencache
+import winerror
+from win32com.client import constants
+from win32com.server.util import wrap
+
+import pythoncom
+pythoncom.__future_currency__ = True
+# We use the constants from the module, so must insist on a gencache.
+# Otherwise, use of gencache is not necessary (tho still advised)
+gencache.EnsureModule('{6BCDCB60-5605-11D0-AE5F-CADD4C000000}', 0, 1, 1)
+
+class PyCOMTest:
+	_typelib_guid_ = "{6BCDCB60-5605-11D0-AE5F-CADD4C000000}"
+	_typelib_version = 1,0
+	_com_interfaces_ = ['IPyCOMTest']
+	_reg_clsid_ = "{e743d9cd-cb03-4b04-b516-11d3a81c1597}"
+	_reg_progid_ = "Python.Test.PyCOMTest"
+
+	def DoubleString(self, str):
+		return str*2
+	def DoubleInOutString(self, str):
+		return str*2
+
+	def Fire(self, nID):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def GetLastVarArgs(self):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def GetMultipleInterfaces(self, outinterface1, outinterface2):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def GetSafeArrays(self, attrs, attrs2, ints):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def GetSetDispatch(self, indisp):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	# Result is of type IPyCOMTest
+	def GetSetInterface(self, ininterface):
+		return wrap(self)
+
+	def GetSetVariant(self, indisp):
+		return indisp
+
+	def TestByRefVariant(self, v):
+		return v * 2
+
+	def TestByRefString(self, v):
+		return v * 2
+
+	# Result is of type IPyCOMTest
+	def GetSetInterfaceArray(self, ininterface):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def GetSetUnknown(self, inunk):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	# Result is of type ISimpleCounter
+	def GetSimpleCounter(self):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def GetSimpleSafeArray(self, ints):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def GetStruct(self):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def SetIntSafeArray(self, ints):
+		return len(ints)
+
+	def SetVarArgs(self, *args):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def SetVariantSafeArray(self, vars):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def Start(self):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def Stop(self, nID):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def StopAll(self):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def TakeByRefDispatch(self, inout):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def TakeByRefTypedDispatch(self, inout):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def Test(self, key, inval):
+		return not inval
+
+	def Test2(self, inval):
+		return inval
+
+	def Test3(self, inval):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def Test4(self, inval):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def Test5(self, inout):
+		if inout == constants.TestAttr1:
+			return constants.TestAttr1_1
+		elif inout == constants.TestAttr1_1:
+			return constants.TestAttr1
+		else:
+			return -1
+
+	def TestOptionals(self, strArg='def', sval=0, lval=1, dval=3.1400001049041748):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def TestOptionals2(self, dval, strval='', sval=1):
+		raise COMException(hresult=winerror.E_NOTIMPL)
+
+	def CheckVariantSafeArray(self, data):
+		return 1
+
+	def LongProp(self):
+		return self.longval
+	def SetLongProp(self, val):
+		self.longval = val
+	def ULongProp(self):
+		return self.ulongval
+	def SetULongProp(self, val):
+		self.ulongval = val
+	def IntProp(self):
+		return self.intval
+	def SetIntProp(self, val):
+		self.intval = val
+
+class PyCOMTestMI(PyCOMTest):
+	_typelib_guid_ = "{6BCDCB60-5605-11D0-AE5F-CADD4C000000}"
+	_typelib_version = 1,0
+	# Interfaces with a interface name, a real IID, and an IID as a string
+	_com_interfaces_ = ['IPyCOMTest',
+						pythoncom.IID_IStream,
+						str(pythoncom.IID_IStorage),
+						]
+	_reg_clsid_ = "{F506E9A1-FB46-4238-A597-FA4EB69787CA}"
+	_reg_progid_ = "Python.Test.PyCOMTestMI"
+
+if __name__ == '__main__':
+	import win32com.server.register
+	win32com.server.register.UseCommandLine(PyCOMTest)
+	win32com.server.register.UseCommandLine(PyCOMTestMI)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/storagecon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/storagecon.py
new file mode 100644
index 0000000..58459db
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/storagecon.py
@@ -0,0 +1,131 @@
+"""Constants related to IStorage and related interfaces
+
+This file was generated by h2py from d:\msdev\include\objbase.h
+then hand edited, a few extra constants added, etc.
+"""
+
+STGC_DEFAULT        = 0
+STGC_OVERWRITE      = 1
+STGC_ONLYIFCURRENT  = 2
+STGC_DANGEROUSLYCOMMITMERELYTODISKCACHE = 4
+STGC_CONSOLIDATE    = 8
+
+STGTY_STORAGE	= 1
+STGTY_STREAM	= 2
+STGTY_LOCKBYTES	= 3
+STGTY_PROPERTY	= 4
+STREAM_SEEK_SET	= 0
+STREAM_SEEK_CUR	= 1
+STREAM_SEEK_END	= 2
+
+LOCK_WRITE	= 1
+LOCK_EXCLUSIVE	= 2
+LOCK_ONLYONCE	= 4
+
+# Generated as from here.
+
+CWCSTORAGENAME = 32
+STGM_DIRECT = 0x00000000L
+STGM_TRANSACTED = 0x00010000L
+STGM_SIMPLE = 0x08000000L
+STGM_READ = 0x00000000L
+STGM_WRITE = 0x00000001L
+STGM_READWRITE = 0x00000002L
+STGM_SHARE_DENY_NONE = 0x00000040L
+STGM_SHARE_DENY_READ = 0x00000030L
+STGM_SHARE_DENY_WRITE = 0x00000020L
+STGM_SHARE_EXCLUSIVE = 0x00000010L
+STGM_PRIORITY = 0x00040000L
+STGM_DELETEONRELEASE = 0x04000000L
+STGM_NOSCRATCH = 0x00100000L
+STGM_CREATE = 0x00001000L
+STGM_CONVERT = 0x00020000L
+STGM_FAILIFTHERE = 0x00000000L
+STGM_NOSNAPSHOT = 0x00200000L
+ASYNC_MODE_COMPATIBILITY = 0x00000001L
+ASYNC_MODE_DEFAULT = 0x00000000L
+STGTY_REPEAT = 0x00000100L
+STG_TOEND = 0xFFFFFFFFL
+STG_LAYOUT_SEQUENTIAL = 0x00000000L
+STG_LAYOUT_INTERLEAVED = 0x00000001L
+COM_RIGHTS_EXECUTE = 1
+
+STGFMT_DOCUMENT = 0
+STGFMT_STORAGE = 0
+STGFMT_NATIVE = 1
+STGFMT_FILE = 3
+STGFMT_ANY = 4
+STGFMT_DOCFILE = 5
+
+PID_DICTIONARY = 0
+PID_CODEPAGE = 1
+PID_FIRST_USABLE = 2
+PID_FIRST_NAME_DEFAULT = 4095
+
+PID_LOCALE = -2147483648
+PID_MODIFY_TIME = -2147483647
+PID_SECURITY = -2147483646
+PID_BEHAVIOR = -2147483645
+PID_ILLEGAL = -1
+PID_MIN_READONLY = -2147483648
+PID_MAX_READONLY = -1073741825
+
+## DiscardableInformation
+PIDDI_THUMBNAIL = 0x00000002L
+
+## SummaryInformation
+PIDSI_TITLE = 2
+PIDSI_SUBJECT = 3
+PIDSI_AUTHOR = 4 
+PIDSI_KEYWORDS = 5 
+PIDSI_COMMENTS = 6 
+PIDSI_TEMPLATE = 7 
+PIDSI_LASTAUTHOR = 8 
+PIDSI_REVNUMBER = 9 
+PIDSI_EDITTIME = 10
+PIDSI_LASTPRINTED = 11
+PIDSI_CREATE_DTM = 12
+PIDSI_LASTSAVE_DTM = 13
+PIDSI_PAGECOUNT = 14
+PIDSI_WORDCOUNT = 15
+PIDSI_CHARCOUNT = 16
+PIDSI_THUMBNAIL = 17
+PIDSI_APPNAME = 18
+PIDSI_DOC_SECURITY = 19
+
+## DocSummaryInformation
+PIDDSI_CATEGORY = 2
+PIDDSI_PRESFORMAT = 3
+PIDDSI_BYTECOUNT = 4
+PIDDSI_LINECOUNT = 5
+PIDDSI_PARCOUNT = 6
+PIDDSI_SLIDECOUNT = 7
+PIDDSI_NOTECOUNT = 8
+PIDDSI_HIDDENCOUNT = 9
+PIDDSI_MMCLIPCOUNT = 10
+PIDDSI_SCALE = 11
+PIDDSI_HEADINGPAIR = 12
+PIDDSI_DOCPARTS = 13
+PIDDSI_MANAGER = 14
+PIDDSI_COMPANY = 15
+PIDDSI_LINKSDIRTY = 16
+
+
+## MediaFileSummaryInfo
+PIDMSI_EDITOR = 2
+PIDMSI_SUPPLIER = 3
+PIDMSI_SOURCE = 4
+PIDMSI_SEQUENCE_NO = 5
+PIDMSI_PROJECT = 6
+PIDMSI_STATUS = 7
+PIDMSI_OWNER = 8
+PIDMSI_RATING = 9
+PIDMSI_PRODUCTION = 10
+PIDMSI_COPYRIGHT = 11
+
+## PROPSETFLAG enum
+PROPSETFLAG_DEFAULT         = 0
+PROPSETFLAG_NONSIMPLE       = 1
+PROPSETFLAG_ANSI            = 2
+PROPSETFLAG_UNBUFFERED      = 4
+PROPSETFLAG_CASE_SENSITIVE  = 8
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/.cvsignore b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/.cvsignore
new file mode 100644
index 0000000..8f63690
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/.cvsignore
@@ -0,0 +1 @@
+pippo.tlb

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/GenTestScripts.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/GenTestScripts.py
new file mode 100644
index 0000000..cf3f2ce
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/GenTestScripts.py
@@ -0,0 +1,84 @@
+#
+# Generate scripts needed for serious testing!
+#
+import win32com, win32com.client.makepy
+import win32com.test
+import pythoncom
+import sys, os
+
+genList = [
+("msword8", "{00020905-0000-0000-C000-000000000046}",1033,8,0),
+]
+
+genDir = "Generated4Test"
+def GetGenPath():
+    import win32api
+    return os.path.join(win32api.GetFullPathName(win32com.test.__path__[0]), genDir)
+
+def GenerateFromRegistered(fname, *loadArgs):
+#       tlb = apply(pythoncom.LoadRegTypeLib, loadArgs)
+    genPath = GetGenPath()
+    try:
+        os.stat(genPath)
+    except os.error:
+        os.mkdir(genPath)
+    # Ensure an __init__ exists.
+    open(os.path.join(genPath, "__init__.py"), "w").close()
+    print fname, ": generating -",
+    f = open(os.path.join(genPath, fname + ".py"), "w")
+    win32com.client.makepy.GenerateFromTypeLibSpec(loadArgs, f, bQuiet = 1, bGUIProgress = 1, bUnicodeToString=NeedUnicodeConversions)
+    f.close()
+    print "compiling -",
+    fullModName = "win32com.test.%s.%s" % (genDir, fname)
+    exec "import " + fullModName
+    # Inject the generated module as a top level module.
+    sys.modules[fname] = sys.modules[fullModName]
+    print "done"
+
+
+def GenerateAll():
+    for args in genList:
+        try:
+            apply(GenerateFromRegistered, args)
+        except KeyboardInterrupt:
+            print "** Interrupted ***"
+            break
+        except pythoncom.com_error:
+            print "** Could not generate test code for ", args[0]
+
+def CleanAll():
+    print "Cleaning generated test scripts..."
+    try: # Clear exceptions!
+        1/0
+    except:
+        pass
+    genPath = GetGenPath()
+    for args in genList:
+        try:
+            name = args[0]+".py"
+            os.unlink(os.path.join(genPath, name))
+        except os.error, details:
+            if type(details)==type(()) and details[0]!=2:
+                print "Could not deleted generated", name, details
+        try:
+            name = args[0]+".pyc"
+            os.unlink(os.path.join(genPath, name))
+        except os.error, details:
+            if type(details)==type(()) and details[0]!=2:
+                print "Could not deleted generated", name, details
+        try:
+            os.unlink(os.path.join(genPath, "__init__.py"))
+        except:
+            pass
+        try:
+            os.unlink(os.path.join(genPath, "__init__.pyc"))
+        except:
+            pass
+    try:
+        os.rmdir(genPath)
+    except os.error, details:
+        print "Could not delete test directory -", details
+
+if __name__=='__main__':
+    GenerateAll()
+    CleanAll()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/Testpys.sct b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/Testpys.sct
new file mode 100644
index 0000000..3bc7ea9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/Testpys.sct
@@ -0,0 +1,64 @@
+<scriptlet>

+

+<Registration

+  Description="TestPys"

+  ProgID="TestPys.Scriptlet"

+  Version="1"

+  ClassID="{2eeb6080-cd58-11d1-b81e-00a0240b2fef}">

+

+  <SCRIPT LANGUAGE="VBScript">      

+	Function Register()

+		Msgbox "Scriptlet 'Test' registered."      

+	End Function

+    

+	Function Unregister()

+		Msgbox "Scriptlet 'Test' unregistered."      

+	End Function

+   </SCRIPT>

+</Registration>

+

+<implements id=Automation type=Automation>

+  <property name=PyProp1>

+    <get/>

+    <put/>

+  </property>

+  <property name=PyProp2>

+    <get/>

+    <put/>

+  </property>

+  <method name=PyMethod1>

+  </method>

+

+  <method name=PyMethod2>

+  </method>

+</implements>

+

+<script language=python>

+

+PyProp1 = "PyScript Property1";

+PyProp2 = "PyScript Property2";

+

+def get_PyProp1():

+  return PyProp1

+

+def put_PyProp1(newValue):

+  global PyProp1

+  PyProp1 = newValue

+

+def get_PyProp2():

+  return PyProp2

+

+def put_PyProp2(newValue):

+  global PyProp2

+  PyProp2 = newValue

+

+def PyMethod1():

+  return "PyMethod1 called"

+

+def PyMethod2():

+  return "PyMethod2 called"

+

+</script>

+

+</scriptlet>

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/__init__.py
new file mode 100644
index 0000000..cb6d7f4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/__init__.py
@@ -0,0 +1 @@
+# Empty file to designate a Python package
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/daodump.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/daodump.py
new file mode 100644
index 0000000..b4a3a7a3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/daodump.py
@@ -0,0 +1,64 @@
+# import dao3032
+# No longer imported here - callers responsibility to load
+#
+import win32com.client
+
+def DumpDB(db, bDeep = 1):
+    # MUST be a DB object.
+    DumpTables(db,bDeep)
+    DumpRelations(db,bDeep)
+    DumpAllContainers(db,bDeep)
+
+def DumpTables(db, bDeep = 1):
+    for tab in db.TableDefs:
+        tab = db.TableDefs(tab.Name) # Redundant lookup for testing purposes.
+        print "Table %s - Fields: %d, Attributes:%d" % (tab.Name, len(tab.Fields), tab.Attributes)
+        if bDeep: DumpFields(tab.Fields)
+
+def DumpFields(fields):
+    for field in fields:
+        print "  %s, size=%d, reqd=%d, type=%d, defVal=%s" % (field.Name, field.Size, field.Required, field.Type, str(field.DefaultValue))
+
+def DumpRelations(db, bDeep = 1):
+    for relation in db.Relations:
+        print "Relation %s - %s->%s" % (relation.Name, relation.Table, relation.ForeignTable)
+
+#### This dont work.  TLB says it is a Fields collection, but apparently not!
+####            if bDeep: DumpFields(relation.Fields)
+
+def DumpAllContainers(db, bDeep = 1):
+    for cont in db.Containers:
+        print "Container %s - %d documents" % (cont.Name, len(cont.Documents))
+        if bDeep: DumpContainerDocuments(cont)
+
+def DumpContainerDocuments(container):
+    for doc in container.Documents:
+        import time
+        timeStr = time.ctime(int(doc.LastUpdated))
+        print "  %s - updated %s (" % (doc.Name, timeStr),
+        print doc.LastUpdated,")" # test the _print_ method?
+
+def TestEngine(engine):
+    import sys
+    if len(sys.argv)>1:
+        dbName = sys.argv[1]
+    else:
+        dbName = "e:\\temp\\TestPython.mdb"
+    db = engine.OpenDatabase(dbName)
+    DumpDB(db)
+
+def test():
+    import win32com.client.gencache
+    if win32com.client.gencache.GetModuleForProgID("DAO.DBEngine.35") is None:
+        print "DAO 3.5 does not seem to be installed or have makepy support"
+    else:
+        TestEngine(win32com.client.Dispatch("DAO.DBEngine.35"))
+
+    if win32com.client.gencache.GetModuleForProgID("DAO.DBEngine.30") is None:
+        print "DAO 3.0 does not seem to be installed or have makepy support"
+    else:
+        TestEngine(win32com.client.Dispatch("DAO.DBEngine.30"))
+
+
+if __name__=='__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/errorSemantics.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/errorSemantics.py
new file mode 100644
index 0000000..d84392a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/errorSemantics.py
@@ -0,0 +1,148 @@
+# errorSemantics.py
+
+# Test the Python error handling semantics.  Specifically:
+#
+# * When a Python COM object is called via IDispatch, the nominated
+#   scode is placed in the exception tuple, and the HRESULT is
+#   DISP_E_EXCEPTION
+# * When the same interface is called via IWhatever, the
+#   nominated  scode is returned directly (with the scode also
+#   reflected in the exception tuple)
+# * In all cases, the description etc end up in the exception tuple
+# * "Normal" Python exceptions resolve to an E_FAIL "internal error"
+
+import pythoncom
+from win32com.server.exception import COMException
+from win32com.server.util import wrap
+from win32com.client import Dispatch
+import winerror
+from win32com.test.util import CaptureWriter
+
+class error(Exception):
+    def __init__(self, msg, com_exception=None):
+        Exception.__init__(self, msg, str(com_exception))
+
+# Our COM server.
+class TestServer:
+    _public_methods_ = [ 'Clone', 'Commit', 'LockRegion', 'Read']
+    _com_interfaces_ = [ pythoncom.IID_IStream ]
+
+    def Clone(self):
+        raise COMException("Not today", scode=winerror.E_UNEXPECTED)
+
+    def Commit(self, flags):
+        raise "foo"
+
+def test():
+    # Call via a native interface.
+    com_server = wrap(TestServer(), pythoncom.IID_IStream)
+    try:
+        com_server.Clone()
+    except pythoncom.com_error, com_exc:
+        hr, desc, exc, argErr = com_exc
+        if hr != winerror.E_UNEXPECTED:
+            raise error("Calling the object natively did not yield the correct scode", com_exc)
+        if not exc or exc[-1] != winerror.E_UNEXPECTED:
+            raise error("The scode element of the exception tuple did not yield the correct scode", com_exc)
+        if exc[2] != "Not today":
+            raise error("The description in the exception tuple did not yield the correct string", com_exc)
+    cap = CaptureWriter()
+    try:
+        cap.capture()
+        try:
+            com_server.Commit(0)
+        finally:
+            cap.release()
+    except pythoncom.com_error, com_exc:
+        hr, desc, exc, argErr = com_exc
+        if hr != winerror.E_FAIL:
+            raise error("The hresult was not E_FAIL for an internal error", com_exc)
+        if exc[1] != "Python COM Server Internal Error":
+            raise error("The description in the exception tuple did not yield the correct string", com_exc)
+    # Check we saw a traceback in stderr
+    if cap.get_captured().find("Traceback")<0:
+        raise error("Could not find a traceback in stderr: %r" % (cap.get_captured(),))
+
+    # Now do it all again, but using IDispatch
+    com_server = Dispatch(wrap(TestServer()))
+    try:
+        com_server.Clone()
+    except pythoncom.com_error, com_exc:
+        hr, desc, exc, argErr = com_exc
+        if hr != winerror.DISP_E_EXCEPTION:
+            raise error("Calling the object via IDispatch did not yield the correct scode", com_exc)
+        if not exc or exc[-1] != winerror.E_UNEXPECTED:
+            raise error("The scode element of the exception tuple did not yield the correct scode", com_exc)
+        if exc[2] != "Not today":
+            raise error("The description in the exception tuple did not yield the correct string", com_exc)
+
+    cap.clear()
+    try:
+        cap.capture()
+        try:
+            com_server.Commit(0)
+        finally:
+            cap.release()
+    except pythoncom.com_error, com_exc:
+        hr, desc, exc, argErr = com_exc
+        if hr != winerror.DISP_E_EXCEPTION:
+            raise error("Calling the object via IDispatch did not yield the correct scode", com_exc)
+        if not exc or exc[-1] != winerror.E_FAIL:
+            raise error("The scode element of the exception tuple did not yield the correct scode", com_exc)
+        if exc[1] != "Python COM Server Internal Error":
+            raise error("The description in the exception tuple did not yield the correct string", com_exc)
+    # Check we saw a traceback in stderr
+    if cap.get_captured().find("Traceback")<0:
+        raise error("Could not find a traceback in stderr: %r" % (cap.get_captured(),))
+
+try:
+    import logging
+except ImportError:
+    logging = None
+if logging is not None:
+    import win32com
+    class TestLogHandler(logging.Handler):
+        def __init__(self):
+            self.num_emits = 0
+            logging.Handler.__init__(self)
+        def emit(self, record):
+            self.num_emits += 1
+            return
+            print "--- record start"
+            print self.format(record)
+            print "--- record end"
+    
+    def testLogger():
+        assert not hasattr(win32com, "logger")
+        handler = TestLogHandler()
+        formatter = logging.Formatter('%(message)s')
+        handler.setFormatter(formatter)
+        log = logging.getLogger("win32com_test")
+        log.addHandler(handler)
+        win32com.logger = log
+        # Now throw some exceptions!
+        # Native interfaces
+        com_server = wrap(TestServer(), pythoncom.IID_IStream)
+        try:
+            com_server.Commit(0)
+            raise RuntimeError, "should have failed"
+        except pythoncom.error:
+            pass
+        assert handler.num_emits == 1, handler.num_emits
+        handler.num_emits = 0 # reset
+
+        com_server = Dispatch(wrap(TestServer()))
+        try:
+            com_server.Commit(0)
+            raise RuntimeError, "should have failed"
+        except pythoncom.error:
+            pass
+        assert handler.num_emits == 1, handler.num_emits
+    
+if __name__=='__main__':
+    test()
+    if logging is not None:
+        testLogger()
+    from util import CheckClean
+    CheckClean()
+    print "error semantic tests worked"
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/pippo.idl b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/pippo.idl
new file mode 100644
index 0000000..21e9b26
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/pippo.idl
@@ -0,0 +1,62 @@
+// TestServer.idl : IDL source for TestServer.dll
+//
+
+// This file will be processed by the MIDL tool to
+// produce the type library (TestServer.tlb) and marshalling code.
+
+import "oaidl.idl";
+import "ocidl.idl";
+	[
+		object,
+		uuid(50086EE8-F535-464B-806E-365ADBB727CF),
+		dual,
+		helpstring("ITestServerApp Interface"),
+		pointer_default(unique)
+	]
+	interface ITestServerApp : IDispatch
+	{
+		[id(1), helpstring("method Test1")] HRESULT Test1([out, retval] ITestServerApp **pVal);
+		[id(2), helpstring("method Test2")] HRESULT Test2([out, retval] VARIANT *pVar);
+		[propget, id(3), helpstring("property MyProp1")] HRESULT MyProp1([out, retval] long *pVal);
+	};
+	[
+		object,
+		uuid(F1A3CC2E-4B2A-4A81-992D-67862076949B),
+		dual,
+		helpstring("IPippo Interface"),
+		pointer_default(unique)
+	]
+	interface IPippo : IDispatch
+	{		
+		[id(1), helpstring("method Method1")] HRESULT Method1([out, retval] IPippo **val);
+		[propget, id(2), helpstring("property MyProp1")] HRESULT MyProp1([out, retval] long *pVal);
+	};
+
+[
+	uuid(41059C57-975F-4B36-8FF3-C5117426647A),
+	version(1.0),
+	helpstring("TestServer 1.0 Type Library")
+]
+library TESTSERVERLib
+{
+	importlib("stdole32.tlb");
+	importlib("stdole2.tlb");
+	importlib("msado15.dll");
+
+	[
+		uuid(49E44E89-5A72-4456-B1D5-68268A19E798),
+		helpstring("TestServerApp Class")
+	]
+	coclass TestServerApp
+	{
+		[default] interface ITestServerApp;
+	};
+	[
+		uuid(E19C0A68-F61C-450B-A974-A7BA6957829C),
+		helpstring("Pippo Class")
+	]
+	coclass Pippo
+	{
+		[default] interface IPippo;
+	};
+};
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/pippo_server.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/pippo_server.py
new file mode 100644
index 0000000..391c779
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/pippo_server.py
@@ -0,0 +1,82 @@
+# A little test server, complete with typelib, we can use for testing.
+# Originally submitted with bug:
+# [ 753154 ] memory leak wrapping object having _typelib_guid_ attribute
+# but modified by mhammond for use as part of the test suite.
+import sys, os
+import pythoncom
+import win32com
+import winerror
+from win32com.server.util import wrap
+
+try:
+    __file__ # 2.3 only for __main__
+except NameError:
+    __file__ = sys.argv[0]
+
+class CPippo:
+    #
+    # COM declarations    
+    #
+    _reg_clsid_ = "{05AC1CCE-3F9B-4d9a-B0B5-DFE8BE45AFA8}"
+    _reg_desc_ = "Pippo Python test object"
+    _reg_progid_ = "Python.Test.Pippo"
+    #_reg_clsctx_ = pythoncom.CLSCTX_LOCAL_SERVER    
+    ###
+    ### Link to typelib
+    _typelib_guid_ = '{41059C57-975F-4B36-8FF3-C5117426647A}'
+    _typelib_version_ = 1, 0
+    _com_interfaces_ = ['IPippo']
+
+    def __init__(self):
+        self.MyProp1 = 10
+
+    def Method1(self):
+        return wrap(CPippo())
+
+def BuildTypelib():
+    from distutils.dep_util import newer
+    this_dir = os.path.dirname(__file__)
+    idl = os.path.abspath(os.path.join(this_dir, "pippo.idl"))
+    tlb=os.path.splitext(idl)[0] + '.tlb'
+    if newer(idl, tlb):
+        print "Compiling %s" % (idl,)
+        rc = os.system ('midl "%s"' % (idl,))
+        if rc:
+            raise RuntimeError, "Compiling MIDL failed!"
+        # Can't work out how to prevent MIDL from generating the stubs.
+        # just nuke them
+        for fname in "dlldata.c pippo_i.c pippo_p.c pippo.h".split():
+            os.remove(os.path.join(this_dir, fname))
+    
+    print "Registering %s" % (tlb,)
+    tli=pythoncom.LoadTypeLib(tlb)
+    pythoncom.RegisterTypeLib(tli,tlb)
+
+def UnregisterTypelib():
+    k = CPippo
+    try:
+        pythoncom.UnRegisterTypeLib(k._typelib_guid_, 
+                                    k._typelib_version_[0], 
+                                    k._typelib_version_[1], 
+                                    0, 
+                                    pythoncom.SYS_WIN32)
+        print "Unregistered typelib"
+    except pythoncom.error, details:
+        if details[0]==winerror.TYPE_E_REGISTRYACCESS:
+            pass
+        else:
+            raise
+
+def main(argv=None):
+    if argv is None: argv = sys.argv[1:]
+    if '--unregister' in argv:
+        # Unregister the type-libraries.
+        UnregisterTypelib()
+    else:
+        # Build and register the type-libraries.
+        BuildTypelib()
+    import win32com.server.register 
+    win32com.server.register.UseCommandLine(CPippo)
+
+if __name__=='__main__':
+    main(sys.argv)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/policySemantics.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/policySemantics.py
new file mode 100644
index 0000000..316a517
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/policySemantics.py
@@ -0,0 +1,89 @@
+import win32com.server.util
+import win32com.client
+import pythoncom
+import winerror
+import win32com.test.util
+
+import unittest
+
+class Error(Exception):
+    pass
+
+# An object representing a list of numbers
+class PythonSemanticClass:
+    _public_methods_ = ["In"]  # DISPIDs are allocated.
+    _dispid_to_func_ = { 10: 'Add', 11:'Remove'} # DISPIDs specified by the object.
+    def __init__(self):
+        self.list = []
+    def _NewEnum(self):
+        return win32com.server.util.NewEnum(self.list)
+    def _value_(self):
+        # should return an array.
+        return self.list
+    def _Evaluate(self):
+        # return the sum
+        return reduce(lambda a,b: a+b, self.list, 0)
+    def In(self, value):
+        return value in self.list
+    def Add(self, value):
+        self.list.append(value)
+    def Remove(self, value):
+        self.list.remove(value)
+
+def DispExTest(ob):
+    if not __debug__: print "WARNING: Tests dressed up as assertions are being skipped!"
+    assert ob.GetDispID("Add", 0)==10, "Policy did not honour the dispid"
+# Not impl
+#       assert ob.GetMemberName(10, 0)=="add", "Policy did not give me the correct function for the dispid"
+    assert ob.GetDispID("Remove", 0)==11, "Policy did not honour the dispid"
+    assert ob.GetDispID("In", 0)==1000, "Allocated dispid unexpected value"
+    assert ob.GetDispID("_NewEnum", 0)==pythoncom.DISPID_NEWENUM, "_NewEnum() got unexpected DISPID"
+    dispids = []
+    dispid = -1
+    while 1:
+        try:
+            dispid = ob.GetNextDispID(0, dispid)
+            dispids.append(dispid)
+        except pythoncom.com_error, (hr, desc, exc, arg):
+            assert hr==winerror.S_FALSE, "Bad result at end of enum"
+            break
+    dispids.sort()
+    if dispids <> [pythoncom.DISPID_EVALUATE, pythoncom.DISPID_NEWENUM, 10, 11, 1000]:
+        raise Error, "Got back the wrong dispids: %s" % dispids
+
+def SemanticTest(ob):
+    # First just check our object "generally" as expected.
+    ob.Add(1)
+    ob.Add(2)
+    ob.Add(3)
+    # invoke _value_
+    if ob() != (1,2,3):
+        raise Error, "Bad result - got %s" % (`ob()`)
+
+    dispob = ob._oleobj_
+
+    rc = dispob.Invoke(pythoncom.DISPID_EVALUATE, 0, pythoncom.DISPATCH_METHOD|pythoncom.DISPATCH_PROPERTYGET, 1)
+    if rc != 6:
+        raise Error, "Evaluate returned", rc
+
+
+class Tester(win32com.test.util.TestCase):
+    def setUp(self):
+        debug=0
+        import win32com.server.dispatcher
+        if debug:
+            dispatcher=win32com.server.dispatcher.DefaultDebugDispatcher
+        else:
+            dispatcher=None
+        disp = win32com.server.util.wrap(PythonSemanticClass(), useDispatcher=dispatcher)
+        self.ob = win32com.client.Dispatch(disp)
+    def tearDown(self):
+        self.ob = None
+    def testSemantics(self):
+        SemanticTest(self.ob)
+    def testIDispatchEx(self):
+        dispexob = self.ob._oleobj_.QueryInterface(pythoncom.IID_IDispatchEx)
+        DispExTest(dispexob)
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/readme.txt b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/readme.txt
new file mode 100644
index 0000000..02edbd5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/readme.txt
@@ -0,0 +1,18 @@
+COM Test Suite Readme
+---------------------
+
+Running the test suite:
+-----------------------
+* Open a command prompt
+* Change to the "win32com\test" directory.
+* run "testall.py".  This will perform level 1 testing.
+  You may specify 1, 2, or 3 on the command line ("testutil 3")
+  to execute more tests.
+
+In general, this should just run the best it can, utilizing what is available
+on the machine.  It is likely some tests will refuse to run due to objects not
+being locally available - this is normal.
+
+The win32com source tree has source code to a C++ and VB component used purely
+for testing.  You may like to build and register these, particularly if you 
+are doing anything related to argument/result handling.
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testADOEvents.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testADOEvents.py
new file mode 100644
index 0000000..3131b74
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testADOEvents.py
@@ -0,0 +1,74 @@
+from win32com.client import Dispatch, DispatchWithEvents, constants
+import pythoncom
+import os
+import time
+
+finished = 0 # Flag for the wait loop from (3) to test
+
+class ADOEvents: # event handler class
+    def OnWillConnect(self, str, user, pw, opt, sts, cn):
+        # Must have this event, as if it is not handled, ADO assumes the
+        # operation is cancelled, and raises an error (Operation cancelled
+        # by the user)
+        pass
+    def OnConnectComplete(self, error, status, connection):
+        # Assume no errors, until we have the basic stuff
+        # working. Now, "connection" should be an open
+        # connection to my data source
+        # Do the "something" from (2). For now, just
+        # print the connection data source
+        print "connection is", connection
+        print "Connected to", connection.Properties("Data Source")
+        # OK, our work is done. Let the main loop know
+        global finished
+        finished = 1
+    def OnCommitTransComplete(self, pError, adStatus, pConnection):
+        pass
+    def OnInfoMessage(self, pError, adStatus, pConnection):
+        pass
+    def OnDisconnect(self, adStatus, pConnection):
+        pass
+    def OnBeginTransComplete(self, TransactionLevel, pError, adStatus, pConnection):
+        pass
+    def OnRollbackTransComplete(self, pError, adStatus, pConnection):
+        pass
+    def OnExecuteComplete(self, RecordsAffected, pError, adStatus, pCommand, pRecordset, pConnection):
+        pass
+    def OnWillExecute(self, Source, CursorType, LockType, Options, adStatus, pCommand, pRecordset, pConnection):
+        pass
+
+def TestConnection(dbname):
+    # Create the ADO connection object, and link the event
+    # handlers into it
+    c = DispatchWithEvents("ADODB.Connection", ADOEvents)
+
+    # Initiate the asynchronous open
+    dsn = "Driver={Microsoft Access Driver (*.mdb)};Dbq=%s" % dbname
+    user = "system"
+    pw = "manager"
+    c.Open(dsn, user, pw, constants.adAsyncConnect)
+
+    # Sit in a loop, until our event handler (above) sets the
+    # "finished" flag or we time out.
+    end_time = time.clock() + 10
+    while time.clock() < end_time:
+        # Pump messages so that COM gets a look in
+        pythoncom.PumpWaitingMessages()
+    if not finished:
+        print "XXX - Failed to connect!"
+
+def Test():
+    import testAccess
+    try:
+        testAccess.GenerateSupport()
+    except pythoncom.com_error:
+        print "*** Can not import the MSAccess type libraries - tests skipped"
+        return
+    dbname = testAccess.CreateTestAccessDatabase()
+    try:
+        TestConnection(dbname)
+    finally:
+        os.unlink(dbname)
+
+if __name__=='__main__':
+    Test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testAXScript.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testAXScript.py
new file mode 100644
index 0000000..af184ee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testAXScript.py
@@ -0,0 +1,34 @@
+# Test AXScripting the best we can in an automated fashion...
+import win32api, os, sys
+
+import win32com.axscript
+import win32com.axscript.client
+
+import unittest
+import win32com.test.util
+
+verbose = "-v" in sys.argv
+
+class AXScript(win32com.test.util.TestCase):
+    def setUp(self):
+        file = win32api.GetFullPathName(os.path.join(win32com.axscript.client.__path__[0], "pyscript.py"))
+        from util import RegisterPythonServer
+        self.verbose = verbose
+        RegisterPythonServer(file, self.verbose)
+
+    def testHost(self):
+        file = win32api.GetFullPathName(os.path.join(win32com.axscript.__path__[0], "test\\testHost.py"))
+        cmd = '%s "%s"' % (win32api.GetModuleFileName(0), file)
+        if verbose:
+            print "Testing Python Scripting host"
+        win32com.test.util.ExecuteShellCommand(cmd, self)
+
+    def testCScript(self):
+        file = win32api.GetFullPathName(os.path.join(win32com.axscript.__path__[0], "Demos\\Client\\wsh\\test.pys"))
+        cmd = 'cscript.exe "%s"' % (file)
+        if verbose:
+            print "Testing Windows Scripting host with Python script"
+        win32com.test.util.ExecuteShellCommand(cmd, self)
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testAccess.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testAccess.py
new file mode 100644
index 0000000..7bbd077
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testAccess.py
@@ -0,0 +1,169 @@
+#
+# This assumes that you have MSAccess and DAO installed.
+#  You need to run makepy.py over "msaccess.tlb" and
+#  "dao3032.dll", and ensure the generated files are on the
+#  path.
+
+# You can run this with no args, and a test database will be generated.
+# You can optionally pass a dbname on the command line, in which case it will be dumped.
+
+import pythoncom
+from win32com.client import gencache, constants, Dispatch
+import win32api
+import os, sys
+
+def CreateTestAccessDatabase(dbname = None):
+    # Creates a test access database - returns the filename.
+    if dbname is None:
+        dbname = os.path.join( win32api.GetTempPath(), "COMTestSuiteTempDatabase.mdb" )
+
+    access = Dispatch("Access.Application")
+    dbEngine = access.DBEngine
+    workspace = dbEngine.Workspaces(0)
+
+    try:
+        os.unlink(dbname)
+    except os.error:
+        print "WARNING - Unable to delete old test database - expect a COM exception RSN!"
+
+    newdb = workspace.CreateDatabase( dbname, constants.dbLangGeneral, constants.dbEncrypt )
+
+    # Create one test table.
+    table = newdb.CreateTableDef("Test Table 1")
+    table.Fields.Append( table.CreateField("First Name", constants.dbText ) )
+    table.Fields.Append( table.CreateField("Last Name", constants.dbText ) )
+
+    index = table.CreateIndex("UniqueIndex")
+    index.Fields.Append( index.CreateField("First Name") )
+    index.Fields.Append( index.CreateField("Last Name") )
+    index.Unique = -1
+    table.Indexes.Append(index)
+
+    newdb.TableDefs.Append( table )
+
+    # Create a second test table.
+    table = newdb.CreateTableDef("Test Table 2")
+    table.Fields.Append( table.CreateField("First Name", constants.dbText ) )
+    table.Fields.Append( table.CreateField("Last Name", constants.dbText ) )
+
+    newdb.TableDefs.Append( table )
+
+    # Create a relationship between them
+    relation = newdb.CreateRelation("TestRelationship")
+    relation.Table = "Test Table 1"
+    relation.ForeignTable = "Test Table 2"
+
+    field = relation.CreateField("First Name")
+    field.ForeignName = "First Name"
+    relation.Fields.Append( field )
+
+    field = relation.CreateField("Last Name")
+    field.ForeignName = "Last Name"
+    relation.Fields.Append( field )
+
+    relation.Attributes = constants.dbRelationDeleteCascade + constants.dbRelationUpdateCascade
+
+    newdb.Relations.Append(relation)
+
+    # Finally we can add some data to the table.
+    tab1 = newdb.OpenRecordset("Test Table 1")
+    tab1.AddNew()
+    tab1.Fields("First Name").Value = "Mark"
+    tab1.Fields("Last Name").Value = "Hammond"
+    tab1.Update()
+
+    tab1.MoveFirst()
+    # We do a simple bookmark test which tests our optimized VT_SAFEARRAY|VT_UI1 support.
+    # The bookmark will be a buffer object - remember it for later.
+    bk = tab1.Bookmark
+
+    # Add a second record.
+    tab1.AddNew()
+    tab1.Fields("First Name").Value = "Second"
+    tab1.Fields("Last Name").Value = "Person"
+    tab1.Update()
+
+    # Reset the bookmark to the one we saved.
+    # But first check the test is actually doing something!
+    tab1.MoveLast()
+    if tab1.Fields("First Name").Value != "Second":
+        raise RuntimeError, "Unexpected record is last - makes bookmark test pointless!"
+
+    tab1.Bookmark = bk
+    if tab1.Bookmark != bk:
+        raise RuntimeError, "The bookmark data is not the same"
+
+    if tab1.Fields("First Name").Value != "Mark":
+        raise RuntimeError, "The bookmark did not reset the record pointer correctly"
+
+    return dbname
+
+
+def DoDumpAccessInfo(dbname):
+    import daodump
+    a = forms = None
+    try:
+        sys.stderr.write("Creating Access Application...\n")
+        a=Dispatch("Access.Application")
+        print "Opening database %s" % dbname
+        a.OpenCurrentDatabase(dbname)
+        db = a.CurrentDb()
+        daodump.DumpDB(db,1)
+        forms = a.Forms
+        print "There are %d forms open." % (len(forms))
+# Uncommenting these lines means Access remains open.
+#               for form in forms:
+#                       print " %s" % form.Name
+        reports = a.Reports
+        print "There are %d reports open" % (len(reports))
+    finally:
+        if not a is None:
+            sys.stderr.write("Closing database\n")
+            try:
+                a.CloseCurrentDatabase()
+            except pythoncom.com_error:
+                pass
+
+# Generate all the support we can.
+def GenerateSupport():
+    # dao
+    gencache.EnsureModule("{00025E01-0000-0000-C000-000000000046}", 0, 4, 0)
+    # Access
+#       gencache.EnsureModule("{4AFFC9A0-5F99-101B-AF4E-00AA003F0F07}", 0, 8, 0)
+    gencache.EnsureDispatch("Access.Application")
+
+def DumpAccessInfo(dbname):
+    amod = gencache.GetModuleForProgID("Access.Application")
+    dmod = gencache.GetModuleForProgID("DAO.DBEngine.35")
+    if amod is None and dmod is None:
+        DoDumpAccessInfo(dbname)
+        # Now generate all the support we can.
+        GenerateSupport()
+    else:
+        sys.stderr.write("testAccess not doing dynamic test, as generated code already exists\n")
+    # Now a generated version.
+    DoDumpAccessInfo(dbname)
+
+def test(dbname = None):
+    if dbname is None:
+        # We need makepy support to create a database (just for the constants!)
+        try:
+            GenerateSupport()
+        except pythoncom.com_error:
+            print "*** Can not import the MSAccess type libraries - tests skipped"
+            return
+        dbname = CreateTestAccessDatabase()
+        print "A test database at '%s' was created" % dbname
+
+    DumpAccessInfo(dbname)
+
+if __name__=='__main__':
+    import sys
+    from util import CheckClean
+    dbname = None
+    if len(sys.argv)>1:
+        dbname = sys.argv[1]
+
+    test(dbname)
+
+    CheckClean()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testArrays.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testArrays.py
new file mode 100644
index 0000000..c9b4e7f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testArrays.py
@@ -0,0 +1,124 @@
+# Originally contributed by Stefan Schukat as part of this arbitrary-sized
+# arrays patch.
+from win32com.client import gencache
+import util
+import unittest
+
+ZeroD = 0
+OneDEmpty = []
+OneD  = [1,2,3]
+TwoD = [
+        [1,2,3],
+        [1,2,3],
+        [1,2,3]
+       ]
+
+TwoD1 = [
+          [
+            [1,2,3,5],
+            [1,2,3],
+            [1,2,3]
+          ],
+          [
+            [1,2,3],
+            [1,2,3],
+            [1,2,3]
+          ]
+       ]
+
+OneD1 = [
+          [
+            [1,2,3],
+            [1,2,3],
+            [1,2,3]
+          ],
+          [
+             [1,2,3],
+             [1,2,3]
+          ]
+         ]
+
+OneD2 = [
+          [1,2,3],
+          [1,2,3,4,5],
+          [
+             [1,2,3,4,5],
+             [1,2,3,4,5],
+             [1,2,3,4,5]
+          ]
+         ]
+
+
+ThreeD = [
+          [
+            [1,2,3],
+            [1,2,3],
+            [1,2,3]
+          ],
+          [
+              [1,2,3],
+              [1,2,3],
+              [1,2,3]
+          ]
+          ]
+
+FourD = [
+          [
+            [[1,2,3],[1,2,3],[1,2,3]],
+            [[1,2,3],[1,2,3],[1,2,3]],
+            [[1,2,3],[1,2,3],[1,2,3]]
+          ],
+          [
+              [[1,2,3],[1,2,3],[1,2,3]],
+              [[1,2,3],[1,2,3],[1,2,3]],
+              [[1,2,3],[1,2,3],[1,2,3]]
+          ]
+          ]
+
+LargeD = [
+    [ [range(10)] * 10],
+] * 512
+
+def _normalize_array(a):
+    if type(a) != type(()):
+        return a
+    ret = []
+    for i in a:
+        ret.append(_normalize_array(i))
+    return ret
+
+class ArrayTest(util.TestCase):
+    def setUp(self):
+        self.arr = gencache.EnsureDispatch("PyCOMTest.ArrayTest")
+    def tearDown(self):
+        self.arr = None
+    def _doTest(self, array):
+        self.arr.Array = array
+        self.failUnlessEqual(_normalize_array(self.arr.Array), array)
+    def testZeroD(self):
+        self._doTest(ZeroD)
+    def testOneDEmpty(self):
+        self._doTest(OneDEmpty)
+    def testOneD(self):
+        self._doTest(OneD)
+    def testTwoD(self):
+        self._doTest(TwoD)
+    def testThreeD(self):
+        self._doTest(ThreeD)
+    def testFourD(self):
+        self._doTest(FourD)
+    def testTwoD1(self):
+        self._doTest(TwoD1)
+    def testOneD1(self):
+        self._doTest(OneD1)
+    def testOneD2(self):
+        self._doTest(OneD2)
+    def testLargeD(self):
+        self._doTest(LargeD)
+
+if __name__ == "__main__":
+    try:
+        util.testmain()
+    except SystemExit, rc:
+        if not rc:
+            raise
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testClipboard.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testClipboard.py
new file mode 100644
index 0000000..f634a038
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testClipboard.py
@@ -0,0 +1,155 @@
+# testClipboard.py
+import unittest
+import pythoncom
+import win32con
+import winerror
+import win32clipboard
+
+from win32com.server.util import NewEnum, wrap
+from win32com.server.exception import COMException
+
+IDataObject_Methods = """GetData GetDataHere QueryGetData
+                         GetCanonicalFormatEtc SetData EnumFormatEtc
+                         DAdvise DUnadvise EnumDAdvise""".split()
+
+# A COM object implementing IDataObject used for basic testing.
+num_do_objects = 0
+
+def WrapCOMObject(ob, iid=None):
+    return wrap(ob, iid=iid, useDispatcher = 0)
+
+class TestDataObject:
+    _com_interfaces_ = [pythoncom.IID_IDataObject]
+    _public_methods_ = IDataObject_Methods
+    def __init__(self, strval):
+        global num_do_objects
+        num_do_objects += 1
+        self.strval = strval
+        self.supported_fe = []
+        for cf in (win32con.CF_TEXT, win32con.CF_UNICODETEXT):
+            fe = cf, None, pythoncom.DVASPECT_CONTENT, -1, pythoncom.TYMED_HGLOBAL
+            self.supported_fe.append(fe)
+
+    def __del__(self):
+        global num_do_objects
+        num_do_objects -= 1
+
+    def _query_interface_(self, iid):
+        if iid==pythoncom.IID_IEnumFORMATETC:
+            return NewEnum(self.supported_fe, iid=iid)
+
+    def GetData(self, fe):
+        ret_stg = None
+        cf, target, aspect, index, tymed  = fe
+        if aspect & pythoncom.DVASPECT_CONTENT and \
+           tymed==pythoncom.TYMED_HGLOBAL:
+            if cf == win32con.CF_TEXT:
+                ret_stg = pythoncom.STGMEDIUM()
+                ret_stg.set(pythoncom.TYMED_HGLOBAL, self.strval)
+            elif cf == win32con.CF_UNICODETEXT:
+                ret_stg = pythoncom.STGMEDIUM()
+                ret_stg.set(pythoncom.TYMED_HGLOBAL, unicode(self.strval))
+
+        if ret_stg is None:
+            raise COMException(hresult=winerror.E_NOTIMPL)
+        return ret_stg
+
+    def GetDataHere(self, fe):
+        raise COMException(hresult=winerror.E_NOTIMPL)
+
+    def QueryGetData(self, fe):
+        cf, target, aspect, index, tymed  = fe
+        if aspect & pythoncom.DVASPECT_CONTENT == 0:
+            raise COMException(hresult=winerror.DV_E_DVASPECT)
+        if tymed!=pythoncom.TYMED_HGLOBAL:
+            raise COMException(hresult=winerror.DV_E_TYMED)
+        return None # should check better
+
+    def GetCanonicalFormatEtc(self, fe):
+        RaiseCOMException(winerror.DATA_S_SAMEFORMATETC)
+        # return fe
+
+    def SetData(self, fe, medium):
+        raise COMException(hresult=winerror.E_NOTIMPL)
+
+    def EnumFormatEtc(self, direction):
+        if direction != pythoncom.DATADIR_GET:
+            raise COMException(hresult=winerror.E_NOTIMPL)
+        return NewEnum(self.supported_fe, iid=pythoncom.IID_IEnumFORMATETC)
+
+    def DAdvise(self, fe, flags, sink):
+        raise COMException(hresult=winerror.E_NOTIMPL)
+
+    def DUnadvise(self, connection):
+        raise COMException(hresult=winerror.E_NOTIMPL)
+
+    def EnumDAdvise(self):
+        raise COMException(hresult=winerror.E_NOTIMPL)
+
+class ClipboardTester(unittest.TestCase):
+    def setUp(self):
+        pythoncom.OleInitialize()
+    def tearDown(self):
+        try:
+            pythoncom.OleFlushClipboard()
+        except pythoncom.com_error:
+            # We never set anything!
+            pass
+    def testIsCurrentClipboard(self):
+        do = TestDataObject("Hello from Python")
+        do = WrapCOMObject(do, iid=pythoncom.IID_IDataObject)
+        pythoncom.OleSetClipboard(do)
+        self.failUnless(pythoncom.OleIsCurrentClipboard(do))
+
+    def testComToWin32(self):
+        # Set the data via our DataObject
+        do = TestDataObject("Hello from Python")
+        do = WrapCOMObject(do, iid=pythoncom.IID_IDataObject)
+        pythoncom.OleSetClipboard(do)
+        # Then get it back via the standard win32 clipboard functions.
+        win32clipboard.OpenClipboard()
+        got = win32clipboard.GetClipboardData(win32con.CF_TEXT)
+        self.assertEqual(got, "Hello from Python")
+        # Now check unicode
+        got = win32clipboard.GetClipboardData(win32con.CF_UNICODETEXT)
+        self.assertEqual(got, u"Hello from Python")
+        win32clipboard.CloseClipboard()
+
+    def testWin32ToCom(self):
+        # Set the data via the std win32 clipboard functions.
+        val = "Hello again!"
+        win32clipboard.OpenClipboard()
+        win32clipboard.SetClipboardData(win32con.CF_TEXT, val)
+        win32clipboard.CloseClipboard()
+        # and get it via an IDataObject provided by COM
+        do = pythoncom.OleGetClipboard()
+        cf = win32con.CF_TEXT, None, pythoncom.DVASPECT_CONTENT, -1, pythoncom.TYMED_HGLOBAL
+        stg = do.GetData(cf)
+        got = stg.data
+        # The data we get back has the \0, as our STGMEDIUM has no way of 
+        # knowing if it meant to be a string, or a binary buffer, so
+        # it must return it too.
+        self.failUnlessEqual(got, val+"\0")
+        
+    def testDataObjectFlush(self):
+        do = TestDataObject("Hello from Python")
+        do = WrapCOMObject(do, iid=pythoncom.IID_IDataObject)
+        pythoncom.OleSetClipboard(do)
+        self.assertEqual(num_do_objects, 1)
+
+        do = None # clear my ref!
+        pythoncom.OleFlushClipboard()
+        self.assertEqual(num_do_objects, 0)
+
+    def testDataObjectReset(self):
+        do = TestDataObject("Hello from Python")
+        do = WrapCOMObject(do)
+        pythoncom.OleSetClipboard(do)
+        do = None # clear my ref!
+        self.assertEqual(num_do_objects, 1)
+        pythoncom.OleSetClipboard(None)
+        self.assertEqual(num_do_objects, 0)
+
+if __name__=='__main__':
+    import util
+    util.testmain()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testCollections.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testCollections.py
new file mode 100644
index 0000000..8a7129fd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testCollections.py
@@ -0,0 +1,143 @@
+# testCollections.py
+#
+# This code tests both the client and server side of collections
+# and enumerators.
+#
+# Also has the side effect of testing some of the PythonCOM error semantics.
+import sys
+import win32com.server.util
+import win32com.test.util
+import win32com.client
+import traceback
+import pythoncom
+import pywintypes
+import winerror
+L=pywintypes.Unicode
+
+import unittest
+
+error = "collection test error"
+
+def MakeEmptyEnum():
+    # create the Python enumerator object as a real COM object
+    o = win32com.server.util.wrap( win32com.server.util.Collection() )
+    return win32com.client.Dispatch(o)
+
+def MakeTestEnum():
+    # create a sub-collection, just to make sure it works :-)
+    sub = win32com.server.util.wrap( win32com.server.util.Collection( ['Sub1', 2, 'Sub3']) )
+    # create the Python enumerator object as a real COM object
+    o = win32com.server.util.wrap( win32com.server.util.Collection( [1,'Two',3, sub]))
+    return win32com.client.Dispatch(o)
+
+def TestEnumAgainst(o,check):
+    for i in range(len(check)):
+        if o(i) != check[i]:
+            raise error, "Using default method gave the incorrect value - %s/%s" % (`o(i)`, `check[i]`)
+
+    for i in range(len(check)):
+        if o.Item(i) != check[i]:
+            raise error, "Using Item method gave the incorrect value - %s/%s" % (`o(i)`, `check[i]`)
+
+    # First try looping.
+    cmp = []
+    for s in o:
+        cmp.append(s)
+
+    if cmp[:len(check)] != check:
+        raise error, "Result after looping isnt correct - %s/%s" % (`cmp[:len(check)]`, `check`)
+
+    for i in range(len(check)):
+        if o[i] != check[i]:
+            raise error, "Using indexing gave the incorrect value"
+
+
+def TestEnum(quiet=None):
+    if quiet is None:
+        quiet = not "-v" in sys.argv
+    if not quiet: print "Simple enum test"
+    o = MakeTestEnum()
+    check = [1,'Two',3]
+    TestEnumAgainst(o, check)
+
+    if not quiet: print "sub-collection test"
+    sub = o[3]
+    TestEnumAgainst(sub ,['Sub1', 2, 'Sub3'])
+
+    # Remove the sublist for this test!
+    o.Remove(o.Count()-1)
+
+    if not quiet: print "Remove item test"
+    del check[1]
+    o.Remove(1)
+    TestEnumAgainst(o, check)
+
+    if not quiet: print "Add item test"
+    o.Add('New Item')
+    check.append('New Item')
+    TestEnumAgainst(o, check)
+
+    if not quiet: print "Insert item test"
+    o.Insert(2, -1)
+    check.insert(2, -1)
+    TestEnumAgainst(o, check)
+
+### This does not work!
+#       if not quiet: print "Indexed replace item test"
+#       o[2] = 'Replaced Item'
+#       check[2] = 'Replaced Item'
+#       TestEnumAgainst(o, check)
+
+    try:
+        o()
+        raise error, "default method with no args worked when it shouldnt have!"
+    except pythoncom.com_error, (hr, desc, exc, argErr):
+        if hr != winerror.DISP_E_BADPARAMCOUNT:
+            raise error, "Expected DISP_E_BADPARAMCOUNT - got %d (%s)" % (hr, desc)
+
+    try:
+        o.Insert("foo", 2)
+        raise error, "Insert worked when it shouldnt have!"
+    except pythoncom.com_error, (hr, desc, exc, argErr):
+        if hr != winerror.DISP_E_TYPEMISMATCH:
+            raise error, "Expected DISP_E_TYPEMISMATCH - got %d (%s)" % (hr, desc)
+
+    # Remove the sublist for this test!
+    try:
+        o.Remove(o.Count())
+        raise error, "Remove worked when it shouldnt have!"
+    except pythoncom.com_error, (hr, desc, exc, argErr):
+        if hr != winerror.DISP_E_BADINDEX:
+            raise error, "Expected DISP_E_BADINDEX - got %d (%s)" % (hr, desc)
+
+    # Test an empty collection
+    if not quiet: print "Empty collection test"
+    o = MakeEmptyEnum()
+    for item in o:
+        raise error, "Empty list performed an iteration"
+
+    try:
+        ob = o[1]
+        raise error, "Empty list could be indexed"
+    except IndexError:
+        pass
+
+    try:
+        ob = o[0]
+        raise error, "Empty list could be indexed"
+    except IndexError:
+        pass
+
+    try:
+        ob = o(0)
+        raise error, "Empty list could be indexed"
+    except pythoncom.com_error, (hr, fn, desc, arg):
+        if hr != winerror.DISP_E_BADINDEX:
+            raise error, "Expected DISP_E_BADINDEX - got %d (%s)" % (hr, desc)
+
+class TestCase(win32com.test.util.TestCase):
+    def testEnum(self):
+        TestEnum()
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDCOM.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDCOM.py
new file mode 100644
index 0000000..ac8bace
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDCOM.py
@@ -0,0 +1,40 @@
+# testDCOM
+usage="""\
+testDCOM.py - Simple DCOM test
+Usage: testDCOM.py serverName
+
+Attempts to start the Python.Interpreter object on the named machine,
+and checks that the object is indeed running remotely.
+
+Requires the named server be configured to run DCOM (using dcomcnfg.exe),
+and the Python.Interpreter object installed and registered on that machine.
+
+The Python.Interpreter object must be installed on the local machine,
+but no special DCOM configuration should be necessary.
+"""
+# NOTE: If you configured the object locally using dcomcnfg, you could
+# simple use Dispatch rather than DispatchEx.
+import pythoncom, win32com.client, win32api, string, sys
+
+def test(serverName):
+    if string.lower(serverName)==string.lower(win32api.GetComputerName()):
+        print "You must specify a remote server name, not the local machine!"
+        return
+
+    # Hack to overcome a DCOM limitation.  As the Python.Interpreter object
+    # is probably installed locally as an InProc object, DCOM seems to ignore
+    # all settings, and use the local object.
+    clsctx = pythoncom.CLSCTX_SERVER & ~pythoncom.CLSCTX_INPROC_SERVER
+    ob = win32com.client.DispatchEx("Python.Interpreter", serverName, clsctx=clsctx)
+    ob.Exec("import win32api")
+    actualName = ob.Eval("win32api.GetComputerName()")
+    if string.lower(serverName) != string.lower(actualName):
+        print "Error: The object created on server '%s' reported its name as '%s'" % (serverName, actualName)
+    else:
+        print "Object created and tested OK on server '%s'" % serverName
+
+if __name__=='__main__':
+    if len(sys.argv) == 2:
+        test(sys.argv[1])
+    else:
+        print usage
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDictionary.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDictionary.py
new file mode 100644
index 0000000..73633722
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDictionary.py
@@ -0,0 +1,80 @@
+# testDictionary.py
+#
+import sys
+import win32com.server.util
+import win32com.test.util
+import win32com.client
+import traceback
+import pythoncom
+import pywintypes
+import winerror
+L=pywintypes.Unicode
+
+import unittest
+
+error = "dictionary test error"
+
+def MakeTestDictionary():
+    return win32com.client.Dispatch("Python.Dictionary")
+
+def TestDictAgainst(dict,check):
+    for key, value in check.items():
+        if dict(key) != value:
+            raise error, "Indexing for '%s' gave the incorrect value - %s/%s" % (`key`, `dict[key]`, `check[key]`)
+
+# Ensure we have the correct version registered.
+def Register(quiet):
+    import win32com.server.register
+    from win32com.servers.dictionary import DictionaryPolicy
+    win32com.server.register.RegisterClasses(DictionaryPolicy, quiet=quiet)
+
+def TestDict(quiet=None):
+    if quiet is None:
+        quiet = not "-v" in sys.argv
+    Register(quiet)
+
+    if not quiet: print "Simple enum test"
+    dict = MakeTestDictionary()
+    checkDict = {}
+    TestDictAgainst(dict, checkDict)
+
+    dict["NewKey"] = "NewValue"
+    checkDict["NewKey"] = "NewValue"
+    TestDictAgainst(dict, checkDict)
+
+    dict["NewKey"] = None
+    del checkDict["NewKey"]
+    TestDictAgainst(dict, checkDict)
+
+    if not quiet:
+        print "Failure tests"
+    try:
+        dict()
+        raise error, "default method with no args worked when it shouldnt have!"
+    except pythoncom.com_error, (hr, desc, exc, argErr):
+        if hr != winerror.DISP_E_BADPARAMCOUNT:
+            raise error, "Expected DISP_E_BADPARAMCOUNT - got %d (%s)" % (hr, desc)
+
+    try:
+        dict("hi", "there")
+        raise error, "multiple args worked when it shouldnt have!"
+    except pythoncom.com_error, (hr, desc, exc, argErr):
+        if hr != winerror.DISP_E_BADPARAMCOUNT:
+            raise error, "Expected DISP_E_BADPARAMCOUNT - got %d (%s)" % (hr, desc)
+
+    try:
+        dict(0)
+        raise error, "int key worked when it shouldnt have!"
+    except pythoncom.com_error, (hr, desc, exc, argErr):
+        if hr != winerror.DISP_E_TYPEMISMATCH:
+            raise error, "Expected DISP_E_TYPEMISMATCH - got %d (%s)" % (hr, desc)
+
+    if not quiet:
+        print "Python.Dictionary tests complete."
+
+class TestCase(win32com.test.util.TestCase):
+    def testDict(self):
+        TestDict()
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDictionary.vbs b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDictionary.vbs
new file mode 100644
index 0000000..9c13053
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDictionary.vbs
@@ -0,0 +1,26 @@
+' Test Pyhon.Dictionary using VBScript - this uses

+' IDispatchEx, so is an interesting test.

+

+set ob = CreateObject("Python.Dictionary")

+ob("hello") = "there"

+' Our keys are case insensitive.

+ob.Item("hi") = ob("HELLO")

+

+dim ok

+ok = true

+

+if ob("hello") <> "there" then

+    WScript.Echo "**** The dictionary value was wrong!!"

+    ok = false

+end if

+

+if ob("hi") <> "there" then

+        WScript.Echo "**** The other dictionary value was wrong!!"

+        ok = false

+end if

+

+if ok then

+    WScript.Echo "VBScript has successfully tested Python.Dictionary"

+end if

+

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDynamic.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDynamic.py
new file mode 100644
index 0000000..38d02b2d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testDynamic.py
@@ -0,0 +1,75 @@
+# Test dynamic policy, and running object table.
+
+import pythoncom
+import winerror
+
+from win32com.server.exception import Exception
+
+error = "testDynamic error"
+
+iid = pythoncom.MakeIID("{b48969a0-784b-11d0-ae71-d23f56000000}")
+
+class VeryPermissive:
+    def _dynamic_(self, name, lcid, wFlags, args):
+        if wFlags & pythoncom.DISPATCH_METHOD:
+            return apply(getattr(self,name),args)
+
+        if wFlags & pythoncom.DISPATCH_PROPERTYGET:
+            try:
+                # to avoid problems with byref param handling, tuple results are converted to lists.
+                ret = self.__dict__[name]
+                if type(ret)==type(()):
+                    ret = list(ret)
+                return ret
+            except KeyError: # Probably a method request.
+                raise Exception(scode=winerror.DISP_E_MEMBERNOTFOUND)
+
+        if wFlags & (pythoncom.DISPATCH_PROPERTYPUT | pythoncom.DISPATCH_PROPERTYPUTREF):
+            setattr(self, name, args[0])
+            return
+
+        raise Exception(scode=winerror.E_INVALIDARG, desc="invalid wFlags")
+
+    def write(self, *args):
+        if len(args)==0:
+            raise Exception(scode=winerror.DISP_E_BADPARAMCOUNT) # Probably call as PROPGET.
+
+        for arg in args[:-1]:
+            print str(arg),
+        print str(args[-1])
+
+def Test():
+    import win32com.server.util, win32com.server.policy
+#       import win32dbg;win32dbg.brk()
+    ob = win32com.server.util.wrap(VeryPermissive(),usePolicy=win32com.server.policy.DynamicPolicy)
+    try:
+        handle = pythoncom.RegisterActiveObject(ob, iid, 0)
+    except pythoncom.com_error, details:
+        print "Warning - could not register the object in the ROT:", details
+        handle = None
+    try:
+        import win32com.client.dynamic
+        client = win32com.client.dynamic.Dispatch(iid)
+        client.ANewAttr = "Hello"
+        if client.ANewAttr != "Hello":
+            raise error, "Could not set dynamic property"
+
+        v = ["Hello","From","Python",1.4]
+        client.TestSequence = v
+        if v != list(client.TestSequence):
+            raise error, "Dynamic sequences not working! %r/%r" % (repr(v), repr(client.testSequence))
+
+        client.write("This","output","has","come","via","COM")
+        # Check our new "_FlagAsMethod" works (kinda!)
+        client._FlagAsMethod("NotReallyAMethod")
+        if not callable(client.NotReallyAMethod):
+            raise error, "Method I flagged as callable isn't!"
+
+
+        client = None
+    finally:
+        if handle is not None:
+            pythoncom.RevokeActiveObject(handle)
+
+if __name__=='__main__':
+    Test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testExchange.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testExchange.py
new file mode 100644
index 0000000..288c7d2c6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testExchange.py
@@ -0,0 +1,102 @@
+# TestExchange = Exchange Server Dump
+# Note that this code uses "CDO", which is unlikely to get the best choice.
+# You should use the Outlook object model, or
+# the win32com.mapi examples for a low-level interface.
+
+from win32com.client import gencache, constants
+import pythoncom
+import os
+
+ammodule = gencache.EnsureModule('{3FA7DEA7-6438-101B-ACC1-00AA00423326}', 0, 1, 1)
+
+def GetDefaultProfileName():
+    import win32api, win32con
+    try:
+        key = win32api.RegOpenKey(win32con.HKEY_CURRENT_USER, "Software\\Microsoft\\Windows NT\\CurrentVersion\\Windows Messaging Subsystem\\Profiles")
+        try:
+            return win32api.RegQueryValueEx(key, "DefaultProfile")[0]
+        finally:
+            key.Close()
+    except win32api.error:
+        return None
+
+#
+# Recursive dump of folders.
+#
+def DumpFolder(folder, indent = 0):
+    print " " * indent, folder.Name
+    folders = folder.Folders
+    folder = folders.GetFirst()
+    while folder:
+        DumpFolder(folder, indent+1)
+        folder = folders.GetNext()
+
+def DumpFolders(session):
+    infostores = session.InfoStores
+    print infostores
+    print "There are %d infostores" % infostores.Count
+    for i in range(infostores.Count):
+        infostore = infostores[i+1]
+        print "Infostore = ", infostore.Name
+        try:
+            folder = infostore.RootFolder
+        except pythoncom.com_error, details:
+            hr, msg, exc, arg = details
+            # -2147221219 == MAPI_E_FAILONEPROVIDER - a single provider temporarily not available.
+            if exc and exc[-1]==-2147221219:
+                print "This info store is currently not available"
+                continue
+        DumpFolder(folder)
+
+# Build a dictionary of property tags, so I can reverse look-up
+#
+PropTagsById={}
+if ammodule:
+    for name, val in ammodule.constants.__dict__.items():
+        PropTagsById[val] = name
+
+
+def TestAddress(session):
+#       entry = session.GetAddressEntry("Skip")
+#       print entry
+    pass
+
+
+def TestUser(session):
+    ae = session.CurrentUser
+    fields = ae.Fields
+    print "User has %d fields" % fields.Count
+    for f in range(len(fields)):
+        field = fields[f+1]
+        try:
+            id = PropTagsById[field.ID]
+        except KeyError:
+            id = field.ID
+        print "%s/%s=%s" % (field.Name, id, field.Value)
+
+def test():
+    if not ammodule:
+        print "MAPI does not appear to be installed on this machine - skipping."
+        return
+
+    import win32com.client
+    oldcwd = os.getcwd()
+    session = win32com.client.Dispatch("MAPI.Session")
+    try:
+        session.Logon(GetDefaultProfileName())
+    except pythoncom.com_error, details:
+        print "Could not log on to MAPI:", details
+        return
+    try:
+        TestUser(session)
+        TestAddress(session)
+        DumpFolders(session)
+    finally:
+        session.Logoff()
+        # It appears Exchange will change the cwd on us :(
+        os.chdir(oldcwd)
+
+if __name__=='__main__':
+    from util import CheckClean
+    test()
+    CheckClean()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testExplorer.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testExplorer.py
new file mode 100644
index 0000000..5013cd5c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testExplorer.py
@@ -0,0 +1,84 @@
+# testExplorer -
+
+import string
+import sys
+import os
+import win32com.client.dynamic
+import win32api
+import glob
+import pythoncom
+import time
+from util import CheckClean
+
+bVisibleEventFired = 0
+
+class ExplorerEvents:
+    def OnVisible(self, visible):
+        global bVisibleEventFired
+        bVisibleEventFired = 1
+
+def TestExplorerEvents():
+    global bVisibleEventFired
+    iexplore = win32com.client.DispatchWithEvents("InternetExplorer.Application", ExplorerEvents)
+    iexplore.Visible = 1
+    if not bVisibleEventFired:
+        raise RuntimeError, "The IE event did not appear to fire!"
+    iexplore.Quit()
+    iexplore = None
+
+    bVisibleEventFired = 0
+    ie = win32com.client.Dispatch("InternetExplorer.Application")
+    ie_events = win32com.client.DispatchWithEvents(ie, ExplorerEvents)
+    ie.Visible = 1
+    if not bVisibleEventFired:
+        raise RuntimeError, "The IE event did not appear to fire!"
+    ie.Quit()
+    ie = None
+    print "IE Event tests worked."
+
+
+def TestExplorer(iexplore):
+    if not iexplore.Visible: iexplore.Visible = -1
+    try:
+        iexplore.Navigate(win32api.GetFullPathName('..\\readme.htm'))
+    except pythoncom.com_error, details:
+        print "Warning - could not open the test HTML file", details
+#       for fname in glob.glob("..\\html\\*.html"):
+#               print "Navigating to", fname
+#               while iexplore.Busy:
+#                       win32api.Sleep(100)
+#               iexplore.Navigate(win32api.GetFullPathName(fname))
+    win32api.Sleep(4000)
+    try:
+        iexplore.Quit()
+    except (AttributeError, pythoncom.com_error):
+        # User got sick of waiting :)
+        pass
+
+def TestAll():
+    try:
+        iexplore = win32com.client.dynamic.Dispatch("InternetExplorer.Application")
+        TestExplorer(iexplore)
+
+        win32api.Sleep(1000)
+        iexplore = None
+
+        # Test IE events.
+        TestExplorerEvents()
+        # Give IE a chance to shutdown, else it can get upset on fast machines.
+        time.sleep(2)
+
+        # Note that the TextExplorerEvents will force makepy - hence
+        # this gencache is really no longer needed.
+
+        from win32com.client import gencache
+        gencache.EnsureModule("{EAB22AC0-30C1-11CF-A7EB-0000C05BAE0B}", 0, 1, 1)
+        iexplore = win32com.client.Dispatch("InternetExplorer.Application")
+        TestExplorer(iexplore)
+
+    finally:
+        iexplore = None
+
+if __name__=='__main__':
+    TestAll()
+    CheckClean()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testGIT.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testGIT.py
new file mode 100644
index 0000000..dcc6b7b7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testGIT.py
@@ -0,0 +1,119 @@
+"""Testing pasing object between multiple COM threads
+
+Uses standard COM marshalling to pass objects between threads.  Even 
+though Python generally seems to work when you just pass COM objects
+between threads, it shouldnt.
+
+This shows the "correct" way to do it.
+
+It shows that although we create new threads to use the Python.Interpreter,
+COM marshalls back all calls to that object to the main Python thread,
+which must be running a message loop (as this sample does).
+
+When this test is run in "free threaded" mode (at this stage, you must 
+manually mark the COM objects as "ThreadingModel=Free", or run from a 
+service which has marked itself as free-threaded), then no marshalling
+is done, and the Python.Interpreter object start doing the "expected" thing
+- ie, it reports being on the same thread as its caller!
+
+Python.exe needs a good way to mark itself as FreeThreaded - at the moment
+this is a pain in the but!
+
+"""
+
+import thread, traceback
+import win32com.client
+import win32event, win32api
+import pythoncom
+
+def TestInterp(interp):
+    if interp.Eval("1+1") <> 2:
+        raise ValueError, "The interpreter returned the wrong result."
+    try:
+        interp.Eval(1+1)
+        raise ValueError, "The interpreter did not raise an exception"
+    except pythoncom.com_error, details:
+        import winerror
+        if details[0]!=winerror.DISP_E_TYPEMISMATCH:
+            raise ValueError, "The interpreter exception was not winerror.DISP_E_TYPEMISMATCH."
+
+
+def TestInterpInThread(stopEvent, cookie):
+    try:
+        DoTestInterpInThread(cookie)
+    finally:
+        win32event.SetEvent(stopEvent)
+
+def CreateGIT():
+    return pythoncom.CoCreateInstance(pythoncom.CLSID_StdGlobalInterfaceTable,
+                                      None,
+                                      pythoncom.CLSCTX_INPROC,
+                                      pythoncom.IID_IGlobalInterfaceTable)
+
+def DoTestInterpInThread(cookie):
+        try:
+            pythoncom.CoInitialize()
+            myThread = win32api.GetCurrentThreadId()
+            GIT = CreateGIT()
+
+            interp = GIT.GetInterfaceFromGlobal(cookie, pythoncom.IID_IDispatch)
+            interp = win32com.client.Dispatch(interp)
+
+            TestInterp(interp)
+            interp.Exec("import win32api")
+            print "The test thread id is %d, Python.Interpreter's thread ID is %d" % (myThread, interp.Eval("win32api.GetCurrentThreadId()"))
+            interp = None
+            pythoncom.CoUninitialize()
+        except:
+            traceback.print_exc()
+
+def BeginThreadsSimpleMarshal(numThreads, cookie):
+    """Creates multiple threads using simple (but slower) marshalling.
+    
+    Single interpreter object, but a new stream is created per thread.
+    
+    Returns the handles the threads will set when complete.
+    """
+    ret = []
+    for i in range(numThreads):
+        hEvent = win32event.CreateEvent(None, 0, 0, None)
+        thread.start_new(TestInterpInThread, (hEvent, cookie))
+        ret.append(hEvent)
+    return ret
+
+
+def test(fn):
+    print "The main thread is %d" % (win32api.GetCurrentThreadId())
+    GIT    = CreateGIT()
+    interp = win32com.client.Dispatch("Python.Interpreter")
+    cookie = GIT.RegisterInterfaceInGlobal(interp._oleobj_, pythoncom.IID_IDispatch)
+    
+    events = fn(4, cookie)
+    numFinished = 0
+    while 1:
+        try:
+            rc = win32event.MsgWaitForMultipleObjects(events, 0, 2000, win32event.QS_ALLINPUT)
+            if rc >= win32event.WAIT_OBJECT_0 and rc < win32event.WAIT_OBJECT_0+len(events):
+                numFinished = numFinished + 1
+                if numFinished >= len(events):
+                    break
+            elif rc==win32event.WAIT_OBJECT_0 + len(events): # a message
+                # This is critical - whole apartment model demo will hang.
+                pythoncom.PumpWaitingMessages()
+            else: # Timeout
+                print "Waiting for thread to stop with interfaces=%d, gateways=%d" % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount())
+        except KeyboardInterrupt:
+            break
+    GIT.RevokeInterfaceFromGlobal(cookie)
+    del interp
+    del GIT
+
+if __name__=='__main__':
+    test(BeginThreadsSimpleMarshal)
+    win32api.Sleep(500)
+    # Doing CoUninit here stop Pythoncom.dll hanging when DLLMain shuts-down the process
+    pythoncom.CoUninitialize()
+    if pythoncom._GetInterfaceCount()!=0 or pythoncom._GetGatewayCount()!=0:
+        print "Done with interfaces=%d, gateways=%d" % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount())
+    else:
+        print "Done."
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testGatewayAddresses.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testGatewayAddresses.py
new file mode 100644
index 0000000..10ac8be
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testGatewayAddresses.py
@@ -0,0 +1,134 @@
+# The purpose of this test is to ensure that the gateways objects
+# do the right thing WRT COM rules about object identity etc.
+
+# Also includes a basic test that we support inheritance correctly in
+# gateway interfaces.
+
+# For our test, we create an object of type IID_IPersistStorage
+# This interface derives from IPersist.
+# Therefore, QI's for IID_IDispatch, IID_IUnknown, IID_IPersist and
+# IID_IPersistStorage should all return the same gateway object.
+#
+# In addition, the interface should only need to declare itself as
+# using the IPersistStorage interface, and as the gateway derives
+# from IPersist, it should automatically be available without declaration.
+#
+# We also create an object of type IID_I??, and perform a QI for it.
+# We then jump through a number of hoops, ensuring that the objects
+# returned by the QIs follow all the rules.
+#
+# Here is Gregs summary of the rules:
+# 1) the set of supported interfaces is static and unchanging
+# 2) symmetric: if you QI an interface for that interface, it succeeds
+# 3) reflexive: if you QI against A for B, the new pointer must succeed
+#   for a QI for A
+# 4) transitive: if you QI for B, then QI that for C, then QIÂ’ing A for C
+#   must succeed
+#
+#
+# Note that 1) Requires cooperation of the Python programmer.  The rule to keep is:
+# "whenever you return an _object_ from _query_interface_(), you must return the
+# same object each time for a given IID.  Note that you must return the same
+# _wrapped_ object
+# you
+# The rest are tested here.
+
+
+from win32com.server.util import wrap
+import pythoncom
+import string
+from util import CheckClean
+
+numErrors = 0
+
+# Check that the 2 objects both have identical COM pointers.
+def CheckSameCOMObject(ob1, ob2):
+    addr1 = string.split(repr(ob1))[6][:-1]
+    addr2 = string.split(repr(ob2))[6][:-1]
+    return addr1==addr2
+
+# Check that the objects conform to COM identity rules.
+def CheckObjectIdentity(ob1, ob2):
+    u1 = ob1.QueryInterface(pythoncom.IID_IUnknown)
+    u2 = ob2.QueryInterface(pythoncom.IID_IUnknown)
+    return CheckSameCOMObject(u1, u2)
+
+def FailObjectIdentity(ob1, ob2, when):
+    if not CheckObjectIdentity(ob1, ob2):
+        global numErrors
+        numErrors = numErrors + 1
+        print when, "are not identical (%s, %s)" % (`ob1`, `ob2`)
+
+
+class Dummy:
+    _public_methods_ = [] # We never attempt to make a call on this object.
+    _com_interfaces_ = [pythoncom.IID_IPersistStorage]
+
+class Dummy2:
+    _public_methods_ = [] # We never attempt to make a call on this object.
+    _com_interfaces_ = [pythoncom.IID_IPersistStorage, pythoncom.IID_IExternalConnection]
+
+class DeletgatedDummy:
+    _public_methods_ = []
+
+class Dummy3:
+    _public_methods_ = [] # We never attempt to make a call on this object.
+    _com_interfaces_ = [pythoncom.IID_IPersistStorage]
+    def _query_interface_(self, iid):
+        if iid==pythoncom.IID_IExternalConnection:
+            # This will NEVER work - can only wrap the object once!
+            return wrap(DelegatedDummy())
+
+def TestGatewayInheritance():
+    # By default, wrap() creates and discards a temporary object.
+    # This is not necessary, but just the current implementation of wrap.
+    # As the object is correctly discarded, it doesnt affect this test.
+    o = wrap(Dummy(), pythoncom.IID_IPersistStorage)
+    o2 = o.QueryInterface(pythoncom.IID_IUnknown)
+    FailObjectIdentity(o, o2, "IID_IPersistStorage->IID_IUnknown")
+
+    o3 = o2.QueryInterface(pythoncom.IID_IDispatch)
+
+    FailObjectIdentity(o2, o3, "IID_IUnknown->IID_IDispatch")
+    FailObjectIdentity(o, o3, "IID_IPersistStorage->IID_IDispatch")
+
+    o4 = o3.QueryInterface(pythoncom.IID_IPersistStorage)
+    FailObjectIdentity(o, o4, "IID_IPersistStorage->IID_IPersistStorage(2)")
+    FailObjectIdentity(o2, o4, "IID_IUnknown->IID_IPersistStorage(2)")
+    FailObjectIdentity(o3, o4, "IID_IDispatch->IID_IPersistStorage(2)")
+
+
+    o5 = o4.QueryInterface(pythoncom.IID_IPersist)
+    FailObjectIdentity(o, o5, "IID_IPersistStorage->IID_IPersist")
+    FailObjectIdentity(o2, o5, "IID_IUnknown->IID_IPersist")
+    FailObjectIdentity(o3, o5, "IID_IDispatch->IID_IPersist")
+    FailObjectIdentity(o4, o5, "IID_IPersistStorage(2)->IID_IPersist")
+
+def TestMultiInterface():
+    o = wrap(Dummy2(), pythoncom.IID_IPersistStorage)
+    o2 = o.QueryInterface(pythoncom.IID_IExternalConnection)
+
+    FailObjectIdentity(o, o2, "IID_IPersistStorage->IID_IExternalConnection")
+
+    # Make the same QI again, to make sure it is stable.
+    o22 = o.QueryInterface(pythoncom.IID_IExternalConnection)
+    FailObjectIdentity(o, o22, "IID_IPersistStorage->IID_IExternalConnection")
+    FailObjectIdentity(o2, o22, "IID_IPersistStorage->IID_IExternalConnection (stability)")
+
+    o3 = o2.QueryInterface(pythoncom.IID_IPersistStorage)
+    FailObjectIdentity(o2, o3, "IID_IExternalConnection->IID_IPersistStorage")
+    FailObjectIdentity(o, o3, "IID_IPersistStorage->IID_IExternalConnection->IID_IPersistStorage")
+
+
+def test():
+    TestGatewayInheritance()
+    TestMultiInterface()
+    if numErrors==0:
+        print "Worked ok"
+    else:
+        print "There were", numErrors, "errors."
+
+
+if __name__=='__main__':
+    test()
+    CheckClean()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testInterp.vbs b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testInterp.vbs
new file mode 100644
index 0000000..3060cee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testInterp.vbs
@@ -0,0 +1,12 @@
+set o = CreateObject("Python.Interpreter")

+if o.Eval("1+1") <> 2 Then

+	WScript.Echo "Eval('1+1') failed"

+	bFailed = True

+end if

+

+if bFailed then

+	WScript.Echo "*********** VBScript tests failed *********"

+else

+	WScript.Echo "VBScript test worked OK"

+end if

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testIterators.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testIterators.py
new file mode 100644
index 0000000..3631573
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testIterators.py
@@ -0,0 +1,132 @@
+from __future__ import generators
+
+# Some raw iter tests.  Some "high-level" iterator tests can be found in
+# testvb.py and testOutlook.py
+import sys
+import unittest
+
+from win32com.client.gencache import EnsureDispatch
+from win32com.client import Dispatch
+import win32com.server.util
+import win32com.test.util
+import pythoncom
+
+def yield_iter(iter):
+    while 1:
+        yield iter.next()
+
+class _BaseTestCase(win32com.test.util.TestCase):
+    def test_enumvariant_vb(self):
+        ob, iter = self.iter_factory()
+        got=[]
+        for v in iter:
+            got.append(v)
+        self.assertEquals(got, self.expected_data)
+    def test_yield(self):
+        ob, i = self.iter_factory()
+        got=[]
+        for v in yield_iter(iter(i)):
+            got.append(v)
+        self.assertEquals(got, self.expected_data)
+
+    def _do_test_nonenum(self, object):
+        try:
+            for i in object:
+                pass
+            self.fail("Could iterate over a non-iterable object")
+        except TypeError:
+            pass # this is expected.
+        self.assertRaises(TypeError, iter, object)
+        self.assertRaises(AttributeError, getattr, object, "next")
+
+    def test_nonenum_wrapper(self):
+        # Check our raw PyIDispatch
+        ob = self.object._oleobj_
+        try:
+            for i in ob:
+                pass
+            self.fail("Could iterate over a non-iterable object")
+        except TypeError:
+            pass # this is expected.
+        self.assertRaises(TypeError, iter, ob)
+        self.assertRaises(AttributeError, getattr, ob, "next")
+
+        # And our Dispatch wrapper
+        ob = self.object
+        try:
+            for i in ob:
+                pass
+            self.fail("Could iterate over a non-iterable object")
+        except TypeError:
+            pass # this is expected.
+        # Note that as our object may be dynamic, we *do* have a __getitem__
+        # method, meaning we *can* call iter() on the object.  In this case
+        # actual iteration is what fails.
+        # So either the 'iter(); will raise a type error, or an attempt to
+        # fetch it
+        try:
+            iter(ob).next()
+            self.fail("Expected a TypeError fetching this iterator")
+        except TypeError:
+            pass
+        # And it should never have a 'next' method
+        self.assertRaises(AttributeError, getattr, ob, "next")
+
+class VBTestCase(_BaseTestCase):
+    def setUp(self):
+        def factory():
+            # Our VB test harness exposes a property with IEnumVariant.
+            ob = self.object.EnumerableCollectionProperty
+            for i in self.expected_data:
+                ob.Add(i)
+            # Get the raw IEnumVARIANT.
+            invkind = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET
+            iter = ob._oleobj_.InvokeTypes(pythoncom.DISPID_NEWENUM,0,invkind,(13, 10),())
+            return ob, iter.QueryInterface(pythoncom.IID_IEnumVARIANT)
+        # We *need* generated dispatch semantics, so dynamic __getitem__ etc
+        # don't get in the way of our tests.
+        self.object = EnsureDispatch("PyCOMVBTest.Tester")
+        self.expected_data = [1, "Two", "3"]
+        self.iter_factory = factory
+
+    def tearDown(self):
+        self.object = None
+
+# Test our client semantics, but using a wrapped Python list object.
+# This has the effect of re-using our client specific tests, but in this
+# case is exercising the server side.
+class SomeObject:
+    _public_methods_ = ["GetCollection"]
+    def __init__(self, data):
+        self.data = data
+    def GetCollection(self):
+        return win32com.server.util.NewCollection(self.data)
+
+class WrappedPythonCOMServerTestCase(_BaseTestCase):
+    def setUp(self):
+        def factory():
+            ob = self.object.GetCollection()
+            flags = pythoncom.DISPATCH_METHOD | pythoncom.DISPATCH_PROPERTYGET
+            enum = ob._oleobj_.Invoke(pythoncom.DISPID_NEWENUM, 0, flags, 1)
+            return ob, enum.QueryInterface(pythoncom.IID_IEnumVARIANT)
+
+        self.expected_data = [1,'Two',3]
+        sv = win32com.server.util.wrap(SomeObject(self.expected_data))
+        self.object = Dispatch(sv)
+        self.iter_factory = factory
+
+    def tearDown(self):
+        self.object = None
+
+def suite():
+    # We dont want our base class run
+    suite = unittest.TestSuite()
+    for item in globals().values():
+        if type(item)==type(unittest.TestCase) and \
+           issubclass(item, unittest.TestCase) and \
+           item != _BaseTestCase:
+            suite.addTest(unittest.makeSuite(item))
+    return suite
+
+if __name__=='__main__':
+    unittest.main(argv=sys.argv + ['suite'])
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMSOffice.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMSOffice.py
new file mode 100644
index 0000000..418dd03
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMSOffice.py
@@ -0,0 +1,169 @@
+# Test MSOffice
+#
+# Main purpose of test is to ensure that Dynamic COM objects
+# work as expected.
+
+# Assumes Word and Excel installed on your machine.
+
+import win32com, sys, string, win32api, traceback
+import win32com.client.dynamic
+from win32com.test.util import CheckClean
+import pythoncom
+from win32com.client import gencache
+from pywintypes import Unicode
+
+error = "MSOffice test error"
+
+# Test a few of the MSOffice components.
+def TestWord():
+    # Try and load the object exposed by Word 8
+    # Office 97 - _totally_ different object model!
+    try:
+        # NOTE - using "client.Dispatch" would return an msword8.py instance!
+        print "Starting Word 8 for dynamic test"
+        word = win32com.client.dynamic.Dispatch("Word.Application")
+        TestWord8(word)
+
+        word = None
+        # Now we will test Dispatch without the new "lazy" capabilities
+        print "Starting Word 8 for non-lazy dynamic test"
+        dispatch = win32com.client.dynamic._GetGoodDispatch("Word.Application")
+        typeinfo = dispatch.GetTypeInfo()
+        attr = typeinfo.GetTypeAttr()
+        olerepr = win32com.client.build.DispatchItem(typeinfo, attr, None, 0)
+        word = win32com.client.dynamic.CDispatch(dispatch, olerepr)
+        dispatch = typeinfo = attr = olerepr = None
+        TestWord8(word)
+
+    except pythoncom.com_error:
+        print "Starting Word 7 for dynamic test"
+        word = win32com.client.Dispatch("Word.Basic")
+        TestWord7(word)
+
+    print "Starting MSWord for generated test"
+    from win32com.client import gencache
+    word = gencache.EnsureDispatch("Word.Application.8")
+    TestWord8(word)
+
+def TestWord7(word):
+    word.FileNew()
+    # If not shown, show the app.
+    if not word.AppShow(): word._proc_("AppShow")
+
+    for i in xrange(12):
+        word.FormatFont(Color=i+1, Points=i+12)
+        word.Insert("Hello from Python %d\n" % i)
+
+    word.FileClose(2)
+
+def TestWord8(word):
+    word.Visible = 1
+    doc = word.Documents.Add()
+    wrange = doc.Range()
+    for i in range(10):
+        wrange.InsertAfter("Hello from Python %d\n" % i)
+    paras = doc.Paragraphs
+    for i in range(len(paras)):
+        p = paras[i]()
+        p.Font.ColorIndex = i+1
+        p.Font.Size = 12 + (4 * i)
+    # XXX - note that
+    # for para in paras:
+    #       para().Font...
+    # doesnt seem to work - no error, just doesnt work
+    # Should check if it works for VB!
+    doc.Close(SaveChanges = 0)
+    word.Quit()
+    win32api.Sleep(1000) # Wait for word to close, else we
+    # may get OA error.
+
+def TestWord8OldStyle():
+    try:
+        import win32com.test.Generated4Test.msword8
+    except ImportError:
+        print "Can not do old style test"
+
+
+def TextExcel(xl):
+    xl.Visible = 0
+    if xl.Visible: raise error, "Visible property is true."
+    xl.Visible = 1
+    if not xl.Visible: raise error, "Visible property not true."
+
+    if int(xl.Version[0])>=8:
+        xl.Workbooks.Add()
+    else:
+        xl.Workbooks().Add()
+
+
+    xl.Range("A1:C1").Value = (1,2,3)
+    xl.Range("A2:C2").Value = ('x','y','z')
+    xl.Range("A3:C3").Value = ('3','2','1')
+
+    for i in xrange(20):
+        xl.Cells(i+1,i+1).Value = "Hi %d" % i
+
+    if xl.Range("A1").Value <> "Hi 0":
+        raise error, "Single cell range failed"
+
+    if xl.Range("A1:B1").Value <> ((Unicode("Hi 0"),2),):
+        raise error, "flat-horizontal cell range failed"
+
+    if xl.Range("A1:A2").Value <> ((Unicode("Hi 0"),),(Unicode("x"),)):
+        raise error, "flat-vertical cell range failed"
+
+    if xl.Range("A1:C3").Value <> ((Unicode("Hi 0"),2,3),(Unicode("x"),Unicode("Hi 1"),Unicode("z")),(3,2,Unicode("Hi 2"))):
+        raise error, "square cell range failed"
+
+    xl.Range("A1:C3").Value =((3,2,1),("x","y","z"),(1,2,3))
+
+    if xl.Range("A1:C3").Value  <> ((3,2,1),(Unicode("x"),Unicode("y"),Unicode("z")),(1,2,3)):
+        raise error, "Range was not what I set it to!"
+
+    # test dates out with Excel
+    xl.Cells(5,1).Value = "Excel time"
+    xl.Cells(5,2).Formula = "=Now()"
+
+    import time
+    xl.Cells(6,1).Value = "Python time"
+    xl.Cells(6,2).Value = pythoncom.MakeTime(time.time())
+    xl.Cells(6,2).NumberFormat = "d/mm/yy h:mm"
+    xl.Columns("A:B").EntireColumn.AutoFit()
+
+    xl.Workbooks(1).Close(0)
+    xl.Quit()
+
+def TestAll():
+    try:
+        TestWord()
+
+        print "Starting Excel for Dynamic test..."
+        xl = win32com.client.dynamic.Dispatch("Excel.Application")
+        TextExcel(xl)
+
+        try:
+            print "Starting Excel 8 for generated excel8.py test..."
+            mod = gencache.EnsureModule("{00020813-0000-0000-C000-000000000046}", 0, 1, 2, bForDemand=1)
+            xl = win32com.client.Dispatch("Excel.Application")
+            TextExcel(xl)
+        except ImportError:
+            print "Could not import the generated Excel 97 wrapper"
+
+        try:
+            import xl5en32
+            mod = gencache.EnsureModule("{00020813-0000-0000-C000-000000000046}", 9, 1, 0)
+            xl = win32com.client.Dispatch("Excel.Application.5")
+            print "Starting Excel 95 for makepy test..."
+            TextExcel(xl)
+        except ImportError:
+            print "Could not import the generated Excel 95 wrapper"
+
+    except KeyboardInterrupt:
+        print "*** Interrupted MSOffice test ***"
+    except:
+        traceback.print_exc()
+
+if __name__=='__main__':
+    TestAll()
+    CheckClean()
+    pythoncom.CoUninitialize()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMSOfficeEvents.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMSOfficeEvents.py
new file mode 100644
index 0000000..2455f43
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMSOfficeEvents.py
@@ -0,0 +1,115 @@
+# OfficeEvents - test/demonstrate events with Word and Excel.
+from win32com.client import DispatchWithEvents, Dispatch
+import msvcrt, pythoncom
+import time, sys
+import types
+
+import threading
+stopEvent = threading.Event()
+
+def TestExcel():
+    class ExcelEvents:
+        def OnNewWorkbook(self, wb):
+            if type(wb) != types.InstanceType:
+                raise RuntimeError, "The transformer doesnt appear to have translated this for us!"
+            self.seen_events["OnNewWorkbook"] = None
+        def OnWindowActivate(self, wb, wn):
+            if type(wb) != types.InstanceType or type(wn) != types.InstanceType:
+                raise RuntimeError, "The transformer doesnt appear to have translated this for us!"
+            self.seen_events["OnWindowActivate"] = None
+        def OnWindowDeactivate(self, wb, wn):
+            self.seen_events["OnWindowDeactivate"] = None
+        def OnSheetDeactivate(self, sh):
+            self.seen_events["OnSheetDeactivate"] = None
+        def OnSheetBeforeDoubleClick(self, Sh, Target, Cancel):
+            if Target.Column % 2 == 0:
+                print "You can double-click there..."
+            else:
+                print "You can not double-click there..."
+            # This function is a void, so the result ends up in
+            # the only ByRef - Cancel.
+                return 1
+
+    class WorkbookEvents:
+        def OnActivate(self):
+            print "workbook OnActivate"
+        def OnBeforeRightClick(self, Target, Cancel):
+            print "It's a Worksheet Event"
+
+    e = DispatchWithEvents("Excel.Application", ExcelEvents)
+    e.seen_events = {}
+    e.Visible=1
+    book = e.Workbooks.Add()
+    book = DispatchWithEvents(book, WorkbookEvents)
+    print "Have book", book
+#    sheet = e.Worksheets(1)
+#    sheet = DispatchWithEvents(sheet, WorksheetEvents)
+    
+    print "Double-click in a few of the Excel cells..."
+    print "Press any key when finished with Excel, or wait 10 seconds..."
+    if not _WaitForFinish(e, 10):
+        e.Quit()
+    if not _CheckSeenEvents(e, ["OnNewWorkbook", "OnWindowActivate"]):
+        sys.exit(1)
+
+def TestWord():
+    class WordEvents:
+        def OnDocumentChange(self):
+            self.seen_events["OnDocumentChange"] = None
+        def OnWindowActivate(self, doc, wn):
+            self.seen_events["OnWindowActivate"] = None
+        def OnQuit(self):
+            self.seen_events["OnQuit"] = None
+            stopEvent.set()
+
+    w = DispatchWithEvents("Word.Application", WordEvents)
+    w.seen_events = {}
+    w.Visible = 1
+    w.Documents.Add()
+    print "Press any key when finished with Word, or wait 10 seconds..."
+    if not _WaitForFinish(w, 10):
+        w.Quit()
+    if not _CheckSeenEvents(w, ["OnDocumentChange", "OnWindowActivate"]):
+        sys.exit(1)
+
+def _WaitForFinish(ob, timeout):
+    end = time.time() + timeout
+    while 1:
+        if msvcrt.kbhit():
+            msvcrt.getch()
+            break
+        pythoncom.PumpWaitingMessages()
+        stopEvent.wait(.2)
+        if stopEvent.isSet():
+            stopEvent.clear()
+            break
+        try:
+            if not ob.Visible:
+                # Gone invisible - we need to pretend we timed
+                # out, so the app is quit.
+                return 0
+        except pythoncom.com_error:
+            # Excel is busy (eg, editing the cell) - ignore
+            pass
+        if time.time() > end:
+            return 0
+    return 1
+
+def _CheckSeenEvents(o, events):
+    rc = 1
+    for e in events:
+        if not o.seen_events.has_key(e):
+            print "ERROR: Expected event did not trigger", e
+            rc = 0
+    return rc
+
+def test():
+    import sys
+    if "noword" not in sys.argv[1:]:
+        TestWord()
+    if "noexcel" not in sys.argv[1:]:
+        TestExcel()
+    print "Word and Excel event tests passed."
+
+if __name__=='__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMarshal.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMarshal.py
new file mode 100644
index 0000000..8ee0e76
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testMarshal.py
@@ -0,0 +1,140 @@
+"""Testing pasing object between multiple COM threads
+
+Uses standard COM marshalling to pass objects between threads.  Even
+though Python generally seems to work when you just pass COM objects
+between threads, it shouldnt.
+
+This shows the "correct" way to do it.
+
+It shows that although we create new threads to use the Python.Interpreter,
+COM marshalls back all calls to that object to the main Python thread,
+which must be running a message loop (as this sample does).
+
+When this test is run in "free threaded" mode (at this stage, you must
+manually mark the COM objects as "ThreadingModel=Free", or run from a
+service which has marked itself as free-threaded), then no marshalling
+is done, and the Python.Interpreter object start doing the "expected" thing
+- ie, it reports being on the same thread as its caller!
+
+Python.exe needs a good way to mark itself as FreeThreaded - at the moment
+this is a pain in the but!
+
+"""
+
+import threading, traceback
+import win32com.client
+import win32event, win32api
+import pythoncom
+import unittest
+
+from testServers import InterpCase
+
+freeThreaded = 1
+
+class ThreadInterpCase(InterpCase):
+    def _testInterpInThread(self, stopEvent, interp):
+        try:
+            self._doTestInThread(interp)
+        finally:
+            win32event.SetEvent(stopEvent)
+
+    def _doTestInThread(self, interp):
+        pythoncom.CoInitialize()
+        myThread = win32api.GetCurrentThreadId()
+
+        if freeThreaded:
+            interp = pythoncom.CoGetInterfaceAndReleaseStream(interp, pythoncom.IID_IDispatch)
+            interp = win32com.client.Dispatch(interp)
+
+        interp.Exec("import win32api")
+        #print "The test thread id is %d, Python.Interpreter's thread ID is %d" % (myThread, interp.Eval("win32api.GetCurrentThreadId()"))
+        pythoncom.CoUninitialize()
+
+    def BeginThreadsSimpleMarshal(self, numThreads):
+        """Creates multiple threads using simple (but slower) marshalling.
+    
+        Single interpreter object, but a new stream is created per thread.
+    
+        Returns the handles the threads will set when complete.
+        """
+        interp = win32com.client.Dispatch("Python.Interpreter")
+        events = []
+        threads = []
+        for i in range(numThreads):
+            hEvent = win32event.CreateEvent(None, 0, 0, None)
+            events.append(hEvent)
+            interpStream = pythoncom.CoMarshalInterThreadInterfaceInStream(pythoncom.IID_IDispatch, interp._oleobj_)
+            t = threading.Thread(target=self._testInterpInThread, args=(hEvent, interpStream))
+            t.setDaemon(1) # so errors dont cause shutdown hang
+            t.start()
+            threads.append(t)
+        interp = None
+        return threads, events
+
+    #
+    # NOTE - this doesnt quite work - Im not even sure it should, but Greg reckons
+    # you should be able to avoid the marshal per thread!
+    # I think that refers to CoMarshalInterface though...
+    def BeginThreadsFastMarshal(self, numThreads):
+        """Creates multiple threads using fast (but complex) marshalling.
+    
+        The marshal stream is created once, and each thread uses the same stream
+    
+        Returns the handles the threads will set when complete.
+        """
+        interp = win32com.client.Dispatch("Python.Interpreter")
+        if freeThreaded:
+            interp = pythoncom.CoMarshalInterThreadInterfaceInStream(pythoncom.IID_IDispatch, interp._oleobj_)
+        events = []
+        threads = []
+        for i in range(numThreads):
+            hEvent = win32event.CreateEvent(None, 0, 0, None)
+            t = threading.Thread(target=self._testInterpInThread, args=(hEvent, interp))
+            t.setDaemon(1) # so errors dont cause shutdown hang
+            t.start()
+            events.append(hEvent)
+            threads.append(t)
+        return threads, events
+
+    def _DoTestMarshal(self, fn, bCoWait = 0):
+        #print "The main thread is %d" % (win32api.GetCurrentThreadId())
+        threads, events = fn(2)
+        numFinished = 0
+        while 1:
+            try:
+                if bCoWait:
+                    rc = pythoncom.CoWaitForMultipleHandles(0, 2000, events)
+                else:
+                    # Specifying "bWaitAll" here will wait for messages *and* all events
+                    # (which is pretty useless)
+                    rc = win32event.MsgWaitForMultipleObjects(events, 0, 2000, win32event.QS_ALLINPUT)
+                if rc >= win32event.WAIT_OBJECT_0 and rc < win32event.WAIT_OBJECT_0+len(events):
+                    numFinished = numFinished + 1
+                    if numFinished >= len(events):
+                        break
+                elif rc==win32event.WAIT_OBJECT_0 + len(events): # a message
+                    # This is critical - whole apartment model demo will hang.
+                    pythoncom.PumpWaitingMessages()
+                else: # Timeout
+                    print "Waiting for thread to stop with interfaces=%d, gateways=%d" % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount())
+            except KeyboardInterrupt:
+                break
+        for t in threads:
+            t.join(2)
+            self.failIf(t.isAlive(), "thread failed to stop!?")
+        threads = None # threads hold references to args
+        # Seems to be a leak here I can't locate :(
+        #self.failUnlessEqual(pythoncom._GetInterfaceCount(), 0)
+        #self.failUnlessEqual(pythoncom._GetGatewayCount(), 0)
+
+    def testSimpleMarshal(self):
+        self._DoTestMarshal(self.BeginThreadsSimpleMarshal)
+
+    def testSimpleMarshalCoWait(self):
+        self._DoTestMarshal(self.BeginThreadsSimpleMarshal, 1)
+
+#    def testFastMarshal(self):
+#        self._DoTestMarshal(self.BeginThreadsFastMarshal)
+
+if __name__=='__main__':
+    unittest.main('testMarshal')
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testNetscape.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testNetscape.py
new file mode 100644
index 0000000..c3208d0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testNetscape.py
@@ -0,0 +1,25 @@
+## AHH - I cant make this work!!!
+
+# But this is the general idea.
+
+import netscape
+import sys
+
+error = "Netscape Test Error"
+
+if __name__=='__main__':
+    n=netscape.CNetworkCX()
+    rc = n.Open("http://d|/temp/apyext.html", 0, None, 0, None)
+    if not rc: raise error, "Open method of Netscape failed"
+    while 1:
+        num, str = n.Read(None, 0)
+        print "Got ", num, str
+        if num==0:
+            break # used to be continue - no idea!!
+        if num==-1:
+            break
+#               sys.stdout.write(str)
+    n.Close()
+    print "Done!"
+    del n
+    sys.last_type = sys.last_value = sys.last_traceback = None
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPersist.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPersist.py
new file mode 100644
index 0000000..95d333c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPersist.py
@@ -0,0 +1,205 @@
+import pythoncom
+import win32com.server.util
+import util
+import time
+
+import win32com, sys, string, win32api, traceback
+import win32com.client.dynamic
+import win32com.client
+import pythoncom
+from win32com.axcontrol import axcontrol
+from pywintypes import Unicode
+from win32com import storagecon
+from win32com.test.util import CheckClean
+
+import pywintypes
+import win32ui
+import win32api, os
+
+S_OK = 0
+
+class LockBytes:
+    _public_methods_ = [ 'ReadAt', 'WriteAt', 'Flush', 'SetSize', 'LockRegion', 'UnlockRegion', 'Stat' ]
+    _com_interfaces_ = [ pythoncom.IID_ILockBytes ]
+
+    def __init__(self, data = ""):
+        self.data = data
+        now = pywintypes.Time(time.time())
+        self.ctime = now
+        self.mtime = now
+        self.atime = now
+
+    def ReadAt(self, offset, cb):
+        print "ReadAt"
+        result = self.data[offset:offset + cb]
+        return result
+
+    def WriteAt(self, offset, data):
+        print "WriteAt " +str(offset)
+        print "len " + str(len(data))
+        print "data:"
+        #print data
+        if len(self.data) >= offset:
+            newdata = self.data[0:offset] + data
+        print len(newdata)
+        if len(self.data) >= offset + len(data):
+            newdata = newdata + self.data[offset +  len(data):]
+        print len(newdata)
+        self.data = newdata
+        return len(data)
+
+    def Flush(self, whatsthis=0):
+        print "Flush" + str(whatsthis)
+        fname = os.path.join(win32api.GetTempPath(), "persist.doc")
+        open(fname, "wb").write(self.data)
+        return S_OK
+
+    def SetSize(self, size):
+        print "Set Size" + str(size)
+        if size > len(self.data):
+            self.data = self.data +  "\000" * (size - len(self.data))
+        else:
+            self.data = self.data[0:size]
+        return S_OK
+
+    def LockRegion(self, offset, size, locktype):
+        print "LockRegion"
+        pass
+
+    def UnlockRegion(self, offset, size, locktype):
+        print "UnlockRegion"
+        pass
+
+    def Stat(self, statflag):
+        print "returning Stat " + str(statflag)
+        return (
+          "PyMemBytes",
+          storagecon.STGTY_LOCKBYTES,
+          len(self.data),
+          self.mtime,
+          self.ctime,
+          self.atime,
+          storagecon.STGM_DIRECT|storagecon.STGM_READWRITE|storagecon.STGM_CREATE ,
+          storagecon.STGM_SHARE_EXCLUSIVE,
+          "{00020905-0000-0000-C000-000000000046}",
+          0,   # statebits ?
+          0
+          )
+
+
+class OleClientSite:
+    _public_methods_ = [ 'SaveObject', 'GetMoniker', 'GetContainer', 'ShowObject', 'OnShowWindow', 'RequestNewObjectLayout' ]
+    _com_interfaces_ = [ axcontrol.IID_IOleClientSite ]
+
+    def __init__(self, data = ""):
+        self.IPersistStorage = None
+        self.IStorage = None
+
+    def SetIPersistStorage(self, IPersistStorage):
+        self.IPersistStorage = IPersistStorage
+
+    def SetIStorage(self, IStorage):
+        self.IStorage = IStorage
+
+    def SaveObject(self):
+        print "SaveObject"
+        if self.IPersistStorage != None and self.IStorage != None:
+            self.IPersistStorage.Save(self.IStorage,1)
+            self.IStorage.Commit(0)
+        return S_OK
+
+    def GetMoniker(self, dwAssign, dwWhichMoniker):
+        print "GetMoniker " + str(dwAssign) + " " + str(dwWhichMoniker)
+
+    def GetContainer(self):
+        print "GetContainer"
+
+    def ShowObject(self):
+        print "ShowObject"
+
+    def OnShowWindow(self, fShow):
+        print "ShowObject" + str(fShow)
+
+    def RequestNewObjectLayout(self):
+        print "RequestNewObjectLayout"
+
+
+def test():
+    # create a LockBytes object and
+    #wrap it as a COM object
+#       import win32com.server.dispatcher
+    lbcom = win32com.server.util.wrap(LockBytes(), pythoncom.IID_ILockBytes) #, useDispatcher=win32com.server.dispatcher.DispatcherWin32trace)
+
+    # create a structured storage on the ILockBytes object
+    stcom = pythoncom.StgCreateDocfileOnILockBytes(lbcom, storagecon.STGM_DIRECT| storagecon.STGM_CREATE | storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE, 0)
+
+    # create our ClientSite
+    ocs = OleClientSite()
+    # wrap it as a COM object
+    ocscom = win32com.server.util.wrap(ocs, axcontrol.IID_IOleClientSite)
+
+    # create a Word OLE Document, connect it to our site and our storage
+    oocom=axcontrol.OleCreate("{00020906-0000-0000-C000-000000000046}",
+            axcontrol.IID_IOleObject,
+            0,
+            (0,),
+            ocscom,
+            stcom,
+            )
+
+    mf=win32ui.GetMainFrame()
+    hwnd=mf.GetSafeHwnd()
+
+    # Set the host and document name
+    # for unknown reason document name becomes hostname, and document name
+    # is not set, debugged it, but don't know where the problem is?
+    oocom.SetHostNames("OTPython", "This is Cool")
+
+    # activate the OLE document
+    oocom.DoVerb( -1, ocscom, 0, hwnd, mf.GetWindowRect())
+
+    # set the hostnames again
+    oocom.SetHostNames("OTPython2", "ThisisCool2")
+
+    # get IDispatch of Word
+    doc=win32com.client.Dispatch(oocom.QueryInterface(pythoncom.IID_IDispatch))
+
+    # get IPersistStorage of Word
+    dpcom=oocom.QueryInterface(pythoncom.IID_IPersistStorage)
+
+    # let our ClientSite know the interfaces
+    ocs.SetIPersistStorage(dpcom)
+    ocs.SetIStorage(stcom)
+
+    # use IDispatch to do the Office Word test
+    # pasted from TestOffice.py
+
+    wrange = doc.Range()
+    for i in range(10):
+        wrange.InsertAfter("Hello from Python %d\n" % i)
+    paras = doc.Paragraphs
+    for i in range(len(paras)):
+        paras[i]().Font.ColorIndex = i+1
+        paras[i]().Font.Size = 12 + (4 * i)
+    # XXX - note that
+    # for para in paras:
+    #       para().Font...
+    # doesnt seem to work - no error, just doesnt work
+    # Should check if it works for VB!
+
+
+    dpcom.Save(stcom, 0)
+    dpcom.HandsOffStorage()
+#       oocom.Close(axcontrol.OLECLOSE_NOSAVE) # or OLECLOSE_SAVEIFDIRTY, but it fails???
+
+    #Save the ILockBytes data to "persist2.doc"
+    lbcom.Flush()
+
+    #exiting Winword will automatically update the ILockBytes data
+    #and flush it to "%TEMP%\persist.doc"
+    doc.Application.Quit()
+
+if __name__=='__main__':
+    test()
+    pythoncom.CoUninitialize()
+    CheckClean()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPippo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPippo.py
new file mode 100644
index 0000000..880aad85
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPippo.py
@@ -0,0 +1,50 @@
+import sys
+import unittest
+import pythoncom
+from win32com.client import Dispatch
+from win32com.client.gencache import EnsureDispatch
+
+class PippoTester(unittest.TestCase):
+    def setUp(self):
+        # register the server
+        import pippo_server
+        pippo_server.main([pippo_server.__file__])
+        # create it.
+        self.object = Dispatch("Python.Test.Pippo")
+
+    def testLeaks(self):
+        try:
+            gtrc = sys.gettotalrefcount
+        except AttributeError:
+            print "Please run this with python_d for leak tests"
+            gtrc = lambda: 0
+        # note creating self.object() should have consumed our "one time" leaks
+        self.object.Method1()
+        start = gtrc()
+        for i in range(1000):
+            object = Dispatch("Python.Test.Pippo")
+            object.Method1()
+        object = None
+        end = gtrc()
+        if end-start > 5:
+            self.fail("We lost %d references!" % (end-start,))
+
+    def testLeaksGencache(self):
+        try:
+            gtrc = sys.gettotalrefcount
+        except AttributeError:
+            print "Please run this with python_d for leak tests"
+            gtrc = lambda: 0
+        # note creating self.object() should have consumed our "one time" leaks
+        object = EnsureDispatch("Python.Test.Pippo")
+        start = gtrc()
+        for i in range(1000):
+            object = EnsureDispatch("Python.Test.Pippo")
+            object.Method1()
+        object = None
+        end = gtrc()
+        if end-start > 10:
+            self.fail("We lost %d references!" % (end-start,))
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPyComTest.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPyComTest.py
new file mode 100644
index 0000000..05c51c8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPyComTest.py
@@ -0,0 +1,449 @@
+# NOTE - Still seems to be a leak here somewhere
+# gateway count doesnt hit zero.  Hence the print statements!
+
+import sys; sys.coinit_flags=0 # Must be free-threaded!
+import win32api, types, pythoncom, time
+import sys, os, win32com, win32com.client.connect
+from win32com.test.util import CheckClean
+from win32com.client import constants
+import win32com
+from util import RegisterPythonServer
+
+importMsg = "**** PyCOMTest is not installed ***\n  PyCOMTest is a Python test specific COM client and server.\n  It is likely this server is not installed on this machine\n  To install the server, you must get the win32com sources\n  and build it using MS Visual C++"
+
+error = "testPyCOMTest error"
+
+# This test uses a Python implemented COM server - ensure correctly registered.
+RegisterPythonServer(os.path.join(os.path.dirname(__file__), '..', "servers", "test_pycomtest.py"))
+
+from win32com.client import gencache
+try:
+    gencache.EnsureModule('{6BCDCB60-5605-11D0-AE5F-CADD4C000000}', 0, 1, 1)
+except pythoncom.com_error:
+    print "The PyCOMTest module can not be located or generated."
+    print importMsg
+    raise RuntimeError, importMsg
+
+# We had a bg where RegisterInterfaces would fail if gencache had 
+# already been run - exercise that here
+from win32com import universal
+universal.RegisterInterfaces('{6BCDCB60-5605-11D0-AE5F-CADD4C000000}', 0, 1, 1)
+
+verbose = 0
+
+def progress(*args):
+    if verbose:
+        for arg in args:
+            print arg,
+        print
+
+def TestApplyResult(fn, args, result):
+    try:
+        import string
+        fnName = string.split(str(fn))[1]
+    except:
+        fnName = str(fn)
+    progress("Testing ", fnName)
+    pref = "function " + fnName
+    try:
+        rc  = apply(fn, args)
+        if rc != result:
+            raise error, "%s failed - result not %d but %d" % (pref, result, rc)
+    except:
+        t, v, tb = sys.exc_info()
+        tb = None
+        raise error, "%s caused exception %s,%s" % (pref, t, v)
+
+# Simple handler class.  This demo only fires one event.
+class RandomEventHandler:
+    def _Init(self):
+        self.fireds = {}
+    def OnFire(self, no):
+        try:
+            self.fireds[no] = self.fireds[no] + 1
+        except KeyError:
+            self.fireds[no] = 0
+    def _DumpFireds(self):
+        if not self.fireds:
+            print "ERROR: Nothing was recieved!"
+        for firedId, no in self.fireds.items():
+            progress("ID %d fired %d times" % (firedId, no))
+
+def TestDynamic():
+    progress("Testing Dynamic")
+    import win32com.client.dynamic
+    o = win32com.client.dynamic.DumbDispatch("PyCOMTest.PyCOMTest")
+
+    progress("Getting counter")
+    counter = o.GetSimpleCounter()
+    TestCounter(counter, 0)
+
+    progress("Checking default args")
+    rc = o.TestOptionals()
+    if  rc[:-1] != ("def", 0, 1) or abs(rc[-1]-3.14)>.01:
+        print rc
+        raise error, "Did not get the optional values correctly"
+    rc = o.TestOptionals("Hi", 2, 3, 1.1)
+    if  rc[:-1] != ("Hi", 2, 3) or abs(rc[-1]-1.1)>.01:
+        print rc
+        raise error, "Did not get the specified optional values correctly"
+    rc = o.TestOptionals2(0)
+    if  rc != (0, "", 1):
+        print rc
+        raise error, "Did not get the optional2 values correctly"
+    rc = o.TestOptionals2(1.1, "Hi", 2)
+    if  rc[1:] != ("Hi", 2) or abs(rc[0]-1.1)>.01:
+        print rc
+        raise error, "Did not get the specified optional2 values correctly"
+
+#       if verbose: print "Testing structs"
+    r = o.GetStruct()
+    assert r.int_value == 99 and str(r.str_value)=="Hello from C++"
+    counter = win32com.client.dynamic.DumbDispatch("PyCOMTest.SimpleCounter")
+    TestCounter(counter, 0)
+    assert o.DoubleString("foo") == "foofoo"
+
+    l=[]
+    TestApplyResult(o.SetVariantSafeArray, (l,), len(l))
+    l=[1,2,3,4]
+    TestApplyResult(o.SetVariantSafeArray, (l,), len(l))
+#       TestApplyResult(o.SetIntSafeArray, (l,), len(l))       Still fails, and probably always will.
+    TestApplyResult(o.CheckVariantSafeArray, ((1,2,3,4,),), 1)
+    o.LongProp = 3
+    if o.LongProp != 3 or o.IntProp != 3:
+        raise error, "Property value wrong - got %d/%d" % (o.LongProp,o.IntProp)
+    o.LongProp = o.IntProp = -3
+    if o.LongProp != -3 or o.IntProp != -3:
+        raise error, "Property value wrong - got %d/%d" % (o.LongProp,o.IntProp)
+    # This number fits in an unsigned long.  Attempting to set it to a normal
+    # long will involve overflow, which is to be expected. But we do
+    # expect it to work in a property explicitly a VT_UI4.
+    check = 3 *10 **9
+    o.ULongProp = check
+    if o.ULongProp != check:
+        raise error, "Property value wrong - got %d (expected %d)" % (o.ULongProp, check)
+    # currency.
+    pythoncom.__future_currency__ = 1
+    if o.CurrencyProp != 0:
+        raise error, "Expecting 0, got %r" % (o.CurrencyProp,)
+    try:
+        import decimal
+    except ImportError:
+        import win32com.decimal_23 as decimal
+    o.CurrencyProp = decimal.Decimal("1234.5678")
+    if o.CurrencyProp != decimal.Decimal("1234.5678"):
+        raise error, "got %r" % (o.CurrencyProp,)
+
+def TestGenerated():
+    # Create an instance of the server.
+    from win32com.client.gencache import EnsureDispatch
+    o = EnsureDispatch("PyCOMTest.PyCOMTest")
+    counter = o.GetSimpleCounter()
+    TestCounter(counter, 1)
+
+    counter = EnsureDispatch("PyCOMTest.SimpleCounter")
+    TestCounter(counter, 1)
+
+    i1, i2 = o.GetMultipleInterfaces()
+    if type(i1) != types.InstanceType or type(i2) != types.InstanceType:
+        # Yay - is now an instance returned!
+        raise error,  "GetMultipleInterfaces did not return instances - got '%s', '%s'" % (i1, i2)
+    del i1
+    del i2
+
+    progress("Checking default args")
+    rc = o.TestOptionals()
+    if  rc[:-1] != ("def", 0, 1) or abs(rc[-1]-3.14)>.01:
+        print rc
+        raise error, "Did not get the optional values correctly"
+    rc = o.TestOptionals("Hi", 2, 3, 1.1)
+    if  rc[:-1] != ("Hi", 2, 3) or abs(rc[-1]-1.1)>.01:
+        print rc
+        raise error, "Did not get the specified optional values correctly"
+    rc = o.TestOptionals2(0)
+    if  rc != (0, "", 1):
+        print rc
+        raise error, "Did not get the optional2 values correctly"
+    rc = o.TestOptionals2(1.1, "Hi", 2)
+    if  rc[1:] != ("Hi", 2) or abs(rc[0]-1.1)>.01:
+        print rc
+        raise error, "Did not get the specified optional2 values correctly"
+
+    progress("Checking var args")
+    o.SetVarArgs("Hi", "There", "From", "Python", 1)
+    if o.GetLastVarArgs() != ("Hi", "There", "From", "Python", 1):
+        raise error, "VarArgs failed -" + str(o.GetLastVarArgs())
+    progress("Checking getting/passing IUnknown")
+    if o.GetSetUnknown(o) != o:
+        raise error, "GetSetUnknown failed"
+    progress("Checking getting/passing IDispatch")
+    if type(o.GetSetDispatch(o)) !=types.InstanceType:
+        raise error, "GetSetDispatch failed"
+    progress("Checking getting/passing IDispatch of known type")
+    if o.GetSetInterface(o).__class__ != o.__class__:
+        raise error, "GetSetDispatch failed"
+    if o.GetSetVariant(4) != 4:
+        raise error, "GetSetVariant (int) failed"
+    if o.GetSetVariant("foo") != "foo":
+        raise error, "GetSetVariant (str) failed"
+    if o.GetSetVariant(o) != o:
+        raise error, "GetSetVariant (dispatch) failed"
+    if o.TestByRefVariant(2) != 4:
+        raise error, "TestByRefVariant failed"
+    if o.TestByRefString("Foo") != "FooFoo":
+        raise error, "TestByRefString failed"
+
+    # Pass some non-sequence objects to our array decoder, and watch it fail.
+    try:
+        o.SetVariantSafeArray("foo")
+        raise error, "Expected a type error"
+    except TypeError:
+        pass
+    try:
+        o.SetVariantSafeArray(666)
+        raise error, "Expected a type error"
+    except TypeError:
+        pass
+
+    o.GetSimpleSafeArray(None)
+    TestApplyResult(o.GetSimpleSafeArray, (None,), tuple(range(10)))
+    resultCheck = tuple(range(5)), tuple(range(10)), tuple(range(20))
+    TestApplyResult(o.GetSafeArrays, (None, None, None), resultCheck)
+
+    l=[1,2,3,4]
+    TestApplyResult(o.SetVariantSafeArray, (l,), len(l))
+    TestApplyResult(o.SetIntSafeArray, (l,), len(l))
+    l=[]
+    TestApplyResult(o.SetVariantSafeArray, (l,), len(l))
+    TestApplyResult(o.SetIntSafeArray, (l,), len(l))
+    # Tell the server to do what it does!
+    TestApplyResult(o.Test, ("Unused", 99), 1) # A bool function
+    TestApplyResult(o.Test, ("Unused", -1), 1) # A bool function
+    TestApplyResult(o.Test, ("Unused", 1==1), 1) # A bool function
+    TestApplyResult(o.Test, ("Unused", 0), 0)
+    TestApplyResult(o.Test, ("Unused", 1==0), 0)
+    TestApplyResult(o.Test2, (constants.Attr2,), constants.Attr2)
+    TestApplyResult(o.Test3, (constants.Attr2,), constants.Attr2)
+    TestApplyResult(o.Test4, (constants.Attr2,), constants.Attr2)
+    TestApplyResult(o.Test5, (constants.Attr2,), constants.Attr2)
+
+    now = pythoncom.MakeTime(time.gmtime(time.time()))
+    later = pythoncom.MakeTime(time.gmtime(time.time()+1))
+    TestApplyResult(o.EarliestDate, (now, later), now)
+
+    assert o.DoubleString("foo") == "foofoo"
+    assert o.DoubleInOutString("foo") == "foofoo"
+
+    o.LongProp = 3
+    if o.LongProp != 3 or o.IntProp != 3:
+        raise error, "Property value wrong - got %d/%d" % (o.LongProp,o.IntProp)
+
+    o.LongProp = o.IntProp = -3
+    if o.LongProp != -3 or o.IntProp != -3:
+        raise error, "Property value wrong - got %d/%d" % (o.LongProp,o.IntProp)
+
+    check = 3 *10 **9
+    o.ULongProp = check
+    if o.ULongProp != check:
+        raise error, "Property value wrong - got %d (expected %d)" % (o.ULongProp, check)
+
+    # currency.
+    pythoncom.__future_currency__ = 1
+    if o.CurrencyProp != 0:
+        raise error, "Expecting 0, got %r" % (o.CurrencyProp,)
+    try:
+        import decimal
+    except ImportError:
+        import win32com.decimal_23 as decimal
+    for val in ("1234.5678", "1234.56", "1234"):
+        o.CurrencyProp = decimal.Decimal(val)
+        if o.CurrencyProp != decimal.Decimal(val):
+            raise error, "%s got %r" % (val, o.CurrencyProp)
+
+    # Do the connection point thing...
+    # Create a connection object.
+    progress("Testing connection points")
+    sessions = []
+    o = win32com.client.DispatchWithEvents( o, RandomEventHandler)
+    o._Init()
+
+    try:
+        for i in range(3):
+            session = o.Start()
+            sessions.append(session)
+        time.sleep(.5)
+    finally:
+        # Stop the servers
+        for session in sessions:
+            o.Stop(session)
+        o._DumpFireds()
+    progress("Finished generated .py test.")
+
+def TestCounter(counter, bIsGenerated):
+    # Test random access into container
+    progress("Testing counter", `counter`)
+    import random
+    for i in xrange(50):
+        num = int(random.random() * len(counter))
+        try:
+            ret = counter[num]
+            if ret != num+1:
+                raise error, "Random access into element %d failed - return was %s" % (num,`ret`)
+        except IndexError:
+            raise error, "** IndexError accessing collection element %d" % num
+
+    num = 0
+    if bIsGenerated:
+        counter.SetTestProperty(1)
+        counter.TestProperty = 1 # Note this has a second, default arg.
+        counter.SetTestProperty(1,2)
+        if counter.TestPropertyWithDef != 0:
+            raise error, "Unexpected property set value!"
+        if counter.TestPropertyNoDef(1) != 1:
+            raise error, "Unexpected property set value!"
+    else:
+        pass
+        # counter.TestProperty = 1
+
+    counter.LBound=1
+    counter.UBound=10
+    if counter.LBound <> 1 or counter.UBound<>10:
+        print "** Error - counter did not keep its properties"
+
+    if bIsGenerated:
+        bounds = counter.GetBounds()
+        if bounds[0]<>1 or bounds[1]<>10:
+            raise error, "** Error - counter did not give the same properties back"
+        counter.SetBounds(bounds[0], bounds[1])
+
+    for item in counter:
+        num = num + 1
+    if num <> len(counter):
+        raise error, "*** Length of counter and loop iterations dont match ***"
+    if num <> 10:
+        raise error, "*** Unexpected number of loop iterations ***"
+
+    counter = counter._enum_.Clone() # Test Clone() and enum directly
+    counter.Reset()
+    num = 0
+    for item in counter:
+        num = num + 1
+    if num <> 10:
+        raise error, "*** Unexpected number of loop iterations - got %d ***" % num
+    progress("Finished testing counter")
+
+def TestLocalVTable(ob):
+    # Python doesn't fully implement this interface.
+    if ob.DoubleString("foo") != "foofoo":
+        raise error("couldn't foofoo")
+
+###############################
+##
+## Some vtable tests of the interface
+##
+def TestVTable(clsctx=pythoncom.CLSCTX_ALL):
+    # Any vtable interfaces marked as dual *should* be able to be
+    # correctly implemented as IDispatch.
+    ob = win32com.client.Dispatch("Python.Test.PyCOMTest")
+    TestLocalVTable(ob)
+    # Now test it via vtable - use some C++ code to help here as Python can't do it directly yet.
+    tester = win32com.client.Dispatch("PyCOMTest.PyCOMTest")
+    testee = pythoncom.CoCreateInstance("Python.Test.PyCOMTest", None, clsctx, pythoncom.IID_IUnknown)
+    # check we fail gracefully with None passed.
+    try:
+        tester.TestMyInterface(None)
+    except pythoncom.com_error, details:
+        pass
+    # and a real object.
+    tester.TestMyInterface(testee)
+
+def TestVTable2():
+    # We once crashed creating our object with the native interface as
+    # the first IID specified.  We must do it _after_ the tests, so that
+    # Python has already had the gateway registered from last run.
+    ob = win32com.client.Dispatch("Python.Test.PyCOMTest")
+    iid = pythoncom.InterfaceNames["IPyCOMTest"]
+    clsid = "Python.Test.PyCOMTest"
+    clsctx = pythoncom.CLSCTX_SERVER
+    try:
+        testee = pythoncom.CoCreateInstance(clsid, None, clsctx, iid)
+    except TypeError:
+        # Python can't actually _use_ this interface yet, so this is
+        # "expected".  Any COM error is not.
+        pass
+
+def TestVTableMI():
+    clsctx = pythoncom.CLSCTX_SERVER
+    ob = pythoncom.CoCreateInstance("Python.Test.PyCOMTestMI", None, clsctx, pythoncom.IID_IUnknown)
+    # This inherits from IStream.
+    ob.QueryInterface(pythoncom.IID_IStream)
+    # This implements IStorage, specifying the IID as a string
+    ob.QueryInterface(pythoncom.IID_IStorage)
+    # IDispatch should always work
+    ob.QueryInterface(pythoncom.IID_IDispatch)
+    
+    iid = pythoncom.InterfaceNames["IPyCOMTest"]
+    try:
+        ob.QueryInterface(iid)
+    except TypeError:
+        # Python can't actually _use_ this interface yet, so this is
+        # "expected".  Any COM error is not.
+        pass
+
+def TestQueryInterface(long_lived_server = 0, iterations=5):
+    tester = win32com.client.Dispatch("PyCOMTest.PyCOMTest")
+    if long_lived_server:
+        # Create a local server
+        t0 = win32com.client.Dispatch("Python.Test.PyCOMTest", clsctx=pythoncom.CLSCTX_LOCAL_SERVER)
+    # Request custom interfaces a number of times
+    prompt = [
+            "Testing QueryInterface without long-lived local-server #%d of %d...",
+            "Testing QueryInterface with long-lived local-server #%d of %d..."
+    ]
+
+    for i in range(iterations):
+        progress(prompt[long_lived_server!=0] % (i+1, iterations))
+        tester.TestQueryInterface()
+
+class Tester(win32com.test.util.TestCase):
+    def testVTableInProc(self):
+        # We used to crash running this the second time - do it a few times
+        for i in range(3):
+            progress("Testing VTables in-process #%d..." % (i+1))
+            TestVTable(pythoncom.CLSCTX_INPROC_SERVER)
+    def testVTableLocalServer(self):
+        for i in range(3):
+            progress("Testing VTables out-of-process #%d..." % (i+1))
+            TestVTable(pythoncom.CLSCTX_LOCAL_SERVER)
+    def testVTable2(self):
+        for i in range(3):
+            TestVTable2()
+    def testVTableMI(self):
+        for i in range(3):
+            TestVTableMI()
+    def testMultiQueryInterface(self):
+        TestQueryInterface(0,6)
+        # When we use the custom interface in the presence of a long-lived
+        # local server, i.e. a local server that is already running when
+        # we request an instance of our COM object, and remains afterwards,
+        # then after repeated requests to create an instance of our object
+        # the custom interface disappears -- i.e. QueryInterface fails with
+        # E_NOINTERFACE. Set the upper range of the following test to 2 to
+        # pass this test, i.e. TestQueryInterface(1,2)
+        TestQueryInterface(1,6)
+    def testDynamic(self):
+        TestDynamic()
+    def testGenerated(self):
+        TestGenerated()
+
+if __name__=='__main__':
+    # XXX - todo - Complete hack to crank threading support.
+    # Should NOT be necessary
+    def NullThreadFunc():
+        pass
+    import thread
+    thread.start_new( NullThreadFunc, () )
+
+    if "-v" in sys.argv: verbose = 1
+    
+    win32com.test.util.testmain()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPyScriptlet.js b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPyScriptlet.js
new file mode 100644
index 0000000..fd9822b9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testPyScriptlet.js
@@ -0,0 +1,33 @@
+function print(msg)
+{
+  WScript.Echo(msg) ;
+}
+
+function check(condition, msg)
+{
+  if (!condition) {
+    print("***** testPyScriptlet.js failed *****");
+    print(msg);
+  }
+}
+
+var thisScriptEngine = ScriptEngine() ;
+
+var majorVersion = ScriptEngineMajorVersion() ;
+var minorVersion = ScriptEngineMinorVersion() ;
+var buildVersion = ScriptEngineBuildVersion() ;
+
+WScript.Echo(thisScriptEngine + " Version " + majorVersion + "." + minorVersion + " Build " + buildVersion) ;
+
+var scriptlet = new  ActiveXObject("TestPys.Scriptlet") ;
+
+check(scriptlet.PyProp1=="PyScript Property1", "PyProp1 wasn't correct initial value");
+scriptlet.PyProp1 = "New Value";
+check(scriptlet.PyProp1=="New Value", "PyProp1 wasn't correct new value");
+
+check(scriptlet.PyProp2=="PyScript Property2", "PyProp2 wasn't correct initial value");
+scriptlet.PyProp2 = "Another New Value";
+check(scriptlet.PyProp2=="Another New Value", "PyProp2 wasn't correct new value");
+
+check(scriptlet.PyMethod1()=="PyMethod1 called", "Method1 wrong value");
+check(scriptlet.PyMethod2()=="PyMethod2 called", "Method2 wrong value");
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testROT.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testROT.py
new file mode 100644
index 0000000..9b31db72
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testROT.py
@@ -0,0 +1,21 @@
+import pythoncom
+import unittest
+import win32com.test.util
+
+class TestROT(win32com.test.util.TestCase):
+    def testit(self):
+        ctx = pythoncom.CreateBindCtx()
+        rot = pythoncom.GetRunningObjectTable()
+        num = 0
+        for mk in rot:
+            name = mk.GetDisplayName(ctx, None)
+            num += 1
+            # Monikers themselves can iterate their contents
+            for sub in mk:
+                num += 1
+
+        #if num < 2:
+        #    print "Only", num, "objects in the ROT - this is unusual"
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testServers.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testServers.py
new file mode 100644
index 0000000..830a0413
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testServers.py
@@ -0,0 +1,39 @@
+import pythoncom, win32com.client.dynamic, sys
+import winerror
+import win32com.test.util
+import unittest
+
+def TestConnections():
+    import win32com.demos.connect
+    win32com.demos.connect.test()
+
+class InterpCase(win32com.test.util.TestCase):
+    def setUp(self):
+        # Ensure the correct version registered.
+        from win32com.servers.interp import Interpreter
+        import win32com.server.register
+        win32com.server.register.RegisterClasses(Interpreter, quiet=1)
+
+    def _testInterp(self, interp):
+        self.assertEquals(interp.Eval("1+1"), 2)
+        win32com.test.util.assertRaisesCOM_HRESULT(self, winerror.DISP_E_TYPEMISMATCH,
+                                     interp.Eval, 2)
+
+    def testInproc(self):
+        interp = win32com.client.dynamic.Dispatch("Python.Interpreter", clsctx = pythoncom.CLSCTX_INPROC)
+        self._testInterp(interp)
+
+    def testLocalServer(self):
+        interp = win32com.client.dynamic.Dispatch("Python.Interpreter", clsctx = pythoncom.CLSCTX_LOCAL_SERVER)
+        self._testInterp(interp)
+    
+    def testAny(self):
+        interp = win32com.client.dynamic.Dispatch("Python.Interpreter")
+        self._testInterp(interp)
+
+class ConnectionsTestCase(win32com.test.util.TestCase):
+    def testConnections(self):
+        TestConnections()
+    
+if __name__=='__main__':
+    unittest.main('testServers')
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testShell.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testShell.py
new file mode 100644
index 0000000..e8afca6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testShell.py
@@ -0,0 +1,191 @@
+import sys, os
+import struct
+import unittest
+import copy
+
+import win32con
+import pythoncom
+from win32com.shell import shell
+from win32com.shell.shellcon import *
+from win32com.storagecon import *
+
+import win32com.test.util
+
+class ShellTester(win32com.test.util.TestCase):
+    def testShellLink(self):
+        desktop = str(shell.SHGetSpecialFolderPath(0, CSIDL_DESKTOP))
+        num = 0
+        shellLink = pythoncom.CoCreateInstance(shell.CLSID_ShellLink, None, pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink)
+        persistFile = shellLink.QueryInterface(pythoncom.IID_IPersistFile)
+        for base_name in os.listdir(desktop):
+            name = os.path.join(desktop, base_name)
+            try:
+                persistFile.Load(name,STGM_READ)
+            except pythoncom.com_error:
+                continue
+            # Resolve is slow - avoid it for our tests.
+            #shellLink.Resolve(0, shell.SLR_ANY_MATCH | shell.SLR_NO_UI)
+            fname, findData = shellLink.GetPath(0)
+            unc = shellLink.GetPath(shell.SLGP_UNCPRIORITY)[0]
+            num += 1
+        if num == 0:
+            # This isn't a fatal error, but is unlikely.
+            print "Could not find any links on your desktop, which is unusual"
+
+    def testShellFolder(self):
+        sf = shell.SHGetDesktopFolder()
+        names_1 = []
+        for i in sf: # Magically calls EnumObjects
+            name = sf.GetDisplayNameOf(i, SHGDN_NORMAL)
+            names_1.append(name)
+        
+        # And get the enumerator manually    
+        enum = sf.EnumObjects(0, SHCONTF_FOLDERS | SHCONTF_NONFOLDERS | SHCONTF_INCLUDEHIDDEN)
+        names_2 = []
+        for i in enum:
+            name = sf.GetDisplayNameOf(i, SHGDN_NORMAL)
+            names_2.append(name)
+        names_1.sort()
+        names_2.sort()
+        self.assertEqual(names_1, names_2)
+
+class PIDLTester(win32com.test.util.TestCase):
+    def _rtPIDL(self, pidl):
+        pidl_str = shell.PIDLAsString(pidl)
+        pidl_rt = shell.StringAsPIDL(pidl_str)
+        self.assertEqual(pidl_rt, pidl)
+        pidl_str_rt = shell.PIDLAsString(pidl_rt)
+        self.assertEqual(pidl_str_rt, pidl_str)
+
+    def _rtCIDA(self, parent, kids):
+        cida = parent, kids
+        cida_str = shell.CIDAAsString(cida)
+        cida_rt = shell.StringAsCIDA(cida_str)
+        self.assertEqual(cida, cida_rt)
+        cida_str_rt = shell.CIDAAsString(cida_rt)
+        self.assertEqual(cida_str_rt, cida_str)
+
+    def testPIDL(self):
+        # A PIDL of "\1" is:   cb    pidl   cb
+        expect =            "\03\00" "\1"  "\0\0"
+        self.assertEqual(shell.PIDLAsString(["\1"]), expect)
+        self._rtPIDL(["\0"])
+        self._rtPIDL(["\1", "\2", "\3"])
+        self._rtPIDL(["\0" * 2048] * 2048)
+        # PIDL must be a list
+        self.assertRaises(TypeError, shell.PIDLAsString, "foo")
+
+    def testCIDA(self):
+        self._rtCIDA(["\0"], [ ["\0"] ])
+        self._rtCIDA(["\1"], [ ["\2"] ])
+        self._rtCIDA(["\0"], [ ["\0"], ["\1"], ["\2"] ])
+
+class FILEGROUPDESCRIPTORTester(win32com.test.util.TestCase):
+    def _testRT(self, fd):
+        fgd_string = shell.FILEGROUPDESCRIPTORAsString([fd])
+        fd2 = shell.StringAsFILEGROUPDESCRIPTOR(fgd_string)[0]
+        
+        fd = fd.copy()
+        fd2 = fd2.copy()
+        
+        # The returned objects *always* have dwFlags and cFileName.
+        if not fd.has_key('dwFlags'):
+            del fd2['dwFlags']
+        if not fd.has_key('cFileName'):
+            self.assertEqual(fd2['cFileName'], '')
+            del fd2['cFileName']
+
+        self.assertEqual(fd, fd2)
+
+    def testSimple(self):
+        fgd = shell.FILEGROUPDESCRIPTORAsString([])
+        header = struct.pack("i", 0)
+        self.assertEqual(header, fgd[:len(header)])
+        self._testRT(dict())
+        d = dict()
+        fgd = shell.FILEGROUPDESCRIPTORAsString([d])
+        header = struct.pack("i", 1)
+        self.assertEqual(header, fgd[:len(header)])
+        self._testRT(d)
+    
+    def testComplex(self):
+        if sys.hexversion < 0x2030000:
+            # no kw-args to dict in 2.2 - not worth converting!
+            return
+        clsid = pythoncom.MakeIID("{CD637886-DB8B-4b04-98B5-25731E1495BE}")
+        d = dict(cFileName="foo.txt",
+                 clsid=clsid,
+                 sizel=(1,2),
+                 pointl=(3,4),
+                 dwFileAttributes = win32con.FILE_ATTRIBUTE_NORMAL,
+                 ftCreationTime=pythoncom.MakeTime(10),
+                 ftLastAccessTime=pythoncom.MakeTime(11),
+                 ftLastWriteTime=pythoncom.MakeTime(12),
+                 nFileSize=sys.maxint + 1)
+        self._testRT(d)
+
+class FileOperationTester(win32com.test.util.TestCase):
+    def setUp(self):
+        import tempfile
+        self.src_name = os.path.join(tempfile.gettempdir(), "pywin32_testshell")
+        self.dest_name = os.path.join(tempfile.gettempdir(), "pywin32_testshell_dest")
+        self.test_data = "Hello from\0Python"
+        f=file(self.src_name, "wb")
+        f.write(self.test_data)
+        f.close()
+        try:
+            os.unlink(self.dest_name)
+        except os.error:
+            pass
+
+    def tearDown(self):
+        for fname in (self.src_name, self.dest_name):
+            if os.path.isfile(fname):
+                os.unlink(fname)
+
+    def testCopy(self):
+        s = (0, # hwnd,
+             FO_COPY, #operation
+             self.src_name,
+             self.dest_name)
+
+        rc, aborted = shell.SHFileOperation(s)
+        self.failUnless(not aborted)
+        self.failUnlessEqual(0, rc)
+        self.failUnless(os.path.isfile(self.src_name))
+        self.failUnless(os.path.isfile(self.dest_name))
+
+    def testRename(self):
+        s = (0, # hwnd,
+             FO_RENAME, #operation
+             self.src_name,
+             self.dest_name)
+        rc, aborted = shell.SHFileOperation(s)
+        self.failUnless(not aborted)
+        self.failUnlessEqual(0, rc)
+        self.failUnless(os.path.isfile(self.dest_name))
+        self.failUnless(not os.path.isfile(self.src_name))
+
+    def testMove(self):
+        s = (0, # hwnd,
+             FO_MOVE, #operation
+             self.src_name,
+             self.dest_name)
+        rc, aborted = shell.SHFileOperation(s)
+        self.failUnless(not aborted)
+        self.failUnlessEqual(0, rc)
+        self.failUnless(os.path.isfile(self.dest_name))
+        self.failUnless(not os.path.isfile(self.src_name))
+
+    def testDelete(self):
+        s = (0, # hwnd,
+             FO_DELETE, #operation
+             self.src_name, None,
+             FOF_NOCONFIRMATION)
+        rc, aborted = shell.SHFileOperation(s)
+        self.failUnless(not aborted)
+        self.failUnlessEqual(0, rc)
+        self.failUnless(not os.path.isfile(self.src_name))
+
+if __name__=='__main__':
+    win32com.test.util.testmain()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testStorage.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testStorage.py
new file mode 100644
index 0000000..c5aaa1a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testStorage.py
@@ -0,0 +1,62 @@
+from win32com import storagecon
+import pythoncom, os, win32api
+import win32com.test.util
+
+import unittest
+
+class TestEnum(win32com.test.util.TestCase):
+    def testit(self):
+        fname, tmp = win32api.GetTempFileName(win32api.GetTempPath(),'stg')
+        m=storagecon.STGM_READWRITE | storagecon.STGM_SHARE_EXCLUSIVE
+        ##  file, mode, format, attrs (always 0), IID (IStorage or IPropertySetStorage, storage options(only used with STGFMT_DOCFILE)
+        pss=pythoncom.StgOpenStorageEx(fname, m, storagecon.STGFMT_FILE, 0 , pythoncom.IID_IPropertySetStorage)
+        ###                               {"Version":2,"reserved":0,"SectorSize":512,"TemplateFile":u'somefilename'})
+        
+        ## FMTID_SummaryInformation FMTID_DocSummaryInformation FMTID_UserDefinedProperties 
+        psuser=pss.Create(pythoncom.FMTID_UserDefinedProperties,
+                      pythoncom.IID_IPropertySetStorage,
+                      storagecon.PROPSETFLAG_DEFAULT,
+                      storagecon.STGM_READWRITE|storagecon.STGM_CREATE|storagecon.STGM_SHARE_EXCLUSIVE) ## its very picky about flag combinations!
+        psuser.WriteMultiple((3,4),('hey','bubba'))
+        psuser.WritePropertyNames((3,4),('property3','property4'))
+        expected_summaries = []
+        expected_summaries.append( ('property3', 3, pythoncom.VT_BSTR))
+        expected_summaries.append( ('property4', 4, pythoncom.VT_BSTR))
+        psuser=None
+        
+        pssum=pss.Create(pythoncom.FMTID_SummaryInformation,
+                      pythoncom.IID_IPropertySetStorage,
+                      storagecon.PROPSETFLAG_DEFAULT,
+                      storagecon.STGM_READWRITE|storagecon.STGM_CREATE|storagecon.STGM_SHARE_EXCLUSIVE)
+        pssum.WriteMultiple((storagecon.PIDSI_AUTHOR,storagecon.PIDSI_COMMENTS),('me', 'comment'))
+        
+        pssum=None
+        pss=None    ## doesn't seem to be a close or release method, and you can't even reopen it from the same process until previous object is gone 
+        
+        pssread=pythoncom.StgOpenStorageEx(fname, storagecon.STGM_READ|storagecon.STGM_SHARE_EXCLUSIVE, storagecon.STGFMT_FILE, 0 , pythoncom.IID_IPropertySetStorage)
+        found_summaries = []
+        for psstat in pssread:
+            ps=pssread.Open(psstat[0],storagecon.STGM_READ|storagecon.STGM_SHARE_EXCLUSIVE)
+            for p in ps:
+                p_val = ps.ReadMultiple((p[1],))[0]
+                if (p[1]==storagecon.PIDSI_AUTHOR and p_val=='me') or \
+                   (p[1]==storagecon.PIDSI_COMMENTS and p_val=='comment'):
+                    pass
+                else:
+                    self.fail("Uxexpected property %s/%s" % (p, p_val))
+            ps=None
+            ## FMTID_UserDefinedProperties can't exist without FMTID_DocSummaryInformation, and isn't returned independently from Enum
+            ## also can't be open at same time
+            if psstat[0]==pythoncom.FMTID_DocSummaryInformation:
+                ps=pssread.Open(pythoncom.FMTID_UserDefinedProperties,storagecon.STGM_READ|storagecon.STGM_SHARE_EXCLUSIVE)
+                for p in ps:
+                    found_summaries.append(p)
+                ps=None
+        psread=None
+        expected_summaries.sort()
+        found_summaries.sort()
+        self.assertEqual(expected_summaries, found_summaries)
+
+if __name__=='__main__':
+    unittest.main()
+   
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testStreams.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testStreams.py
new file mode 100644
index 0000000..fdb02990
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testStreams.py
@@ -0,0 +1,86 @@
+import pythoncom
+import win32com.server.util
+import win32com.test.util
+
+import unittest
+
+class Persists:
+    _public_methods_ = [ 'GetClassID', 'IsDirty', 'Load', 'Save',
+                         'GetSizeMax', 'InitNew' ]
+    _com_interfaces_ = [ pythoncom.IID_IPersistStreamInit ]
+    def __init__(self):
+        self.data = "abcdefg"
+        self.dirty = 1
+    def GetClassID(self):
+        return pythoncom.IID_NULL
+    def IsDirty(self):
+        return self.dirty
+    def Load(self, stream):
+        self.data = stream.Read(26)
+    def Save(self, stream, clearDirty):
+        stream.Write(self.data)
+        if clearDirty:
+            self.dirty = 0
+    def GetSizeMax(self):
+        return 1024
+
+    def InitNew(self):
+        pass
+
+
+class Stream:
+    _public_methods_ = [ 'Read', 'Write' ]
+    _com_interfaces_ = [ pythoncom.IID_IStream ]
+
+    def __init__(self, data):
+        self.data = data
+        self.index = 0
+
+    def Read(self, amount):
+        result = self.data[self.index : self.index + amount]
+        self.index = self.index + amount
+        return result
+
+    def Write(self, data):
+        self.data = data
+        self.index = 0
+        return len(data)
+
+
+class StreamTest(win32com.test.util.TestCase):
+    def _readWrite(self, data, write_stream, read_stream = None):
+        if read_stream is None: read_stream = write_stream
+        write_stream.Write(data)
+        got = read_stream.Read(len(data))
+        self.assertEqual(data, got)
+
+    def testit(self):
+        mydata = 'abcdefghijklmnopqrstuvwxyz'
+    
+        # First test the objects just as Python objects...
+        s = Stream(mydata)
+        p = Persists()
+    
+        p.Load(s)
+        p.Save(s, 0)
+        self.assertEqual(s.data, mydata)
+    
+        # Wrap the Python objects as COM objects, and make the calls as if
+        # they were non-Python COM objects.
+        s2 = win32com.server.util.wrap(s, pythoncom.IID_IStream)
+        p2 = win32com.server.util.wrap(p, pythoncom.IID_IPersistStreamInit)
+
+        self._readWrite(mydata, s, s)
+        self._readWrite(mydata, s, s2)
+        self._readWrite(mydata, s2, s)
+        self._readWrite(mydata, s2, s2)
+
+        self._readWrite("string with\0a NULL", s2, s2)
+        # reset the stream
+        s.Write(mydata)
+        p2.Load(s2)
+        p2.Save(s2, 0)
+        self.assertEqual(s.data, mydata)
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testWMI.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testWMI.py
new file mode 100644
index 0000000..0690629
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testWMI.py
@@ -0,0 +1,17 @@
+from win32com.client import GetObject
+import win32com.test.util
+
+import unittest
+
+class Simple(win32com.test.util.TestCase):
+    def testit(self):
+        cses = GetObject("WinMgMts:").InstancesOf("Win32_Process")
+        vals = []
+        for cs in cses:
+            val = cs.Properties_("Caption").Value
+            vals.append(val)
+        self.failIf(len(vals)<5, "We only found %d processes!" % len(vals))
+
+if __name__=='__main__':
+    unittest.main()
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testall.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testall.py
new file mode 100644
index 0000000..d84a8202
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testall.py
@@ -0,0 +1,219 @@
+import sys, os, string, re
+import pythoncom
+import win32com.client
+from util import CheckClean, TestCase, CapturingFunctionTestCase, TestLoader
+import win32com.test.util
+import traceback
+import getopt
+
+import unittest
+
+verbosity = 1 # default unittest verbosity.
+
+try:
+    this_file = __file__
+except NameError:
+    this_file = sys.argv[0]
+
+def GenerateAndRunOldStyle():
+    import GenTestScripts
+    GenTestScripts.GenerateAll()
+    try:
+        pass #
+    finally:
+        GenTestScripts.CleanAll()
+
+def CleanGenerated():
+    import win32com, shutil
+    if os.path.isdir(win32com.__gen_path__):
+        if verbosity > 1:
+            print "Deleting files from %s" % (win32com.__gen_path__)
+        shutil.rmtree(win32com.__gen_path__)
+    import win32com.client.gencache
+    win32com.client.gencache.__init__() # Reset
+
+def RemoveRefCountOutput(data):
+    while 1:
+        last_line_pos = data.rfind("\n")
+        if not re.match("\[\d+ refs\]", data[last_line_pos+1:]):
+            break
+        if last_line_pos < 0:
+            # All the output
+            return ''
+        data = data[:last_line_pos]
+        
+    return data
+
+def ExecuteSilentlyIfOK(cmd, testcase):
+    f = os.popen(cmd)
+    data = f.read().strip()
+    rc = f.close()
+    if rc:
+        print data
+        testcase.fail("Executing '%s' failed (%d)" % (cmd, rc))
+    # for "_d" builds, strip the '[xxx refs]' line
+    return RemoveRefCountOutput(data)
+
+class PyCOMTest(TestCase):
+    no_leak_tests = True # done by the test itself
+    def testit(self):
+        # Execute testPyComTest in its own process so it can play
+        # with the Python thread state
+        fname = os.path.join(os.path.dirname(this_file), "testPyComTest.py")
+        cmd = '%s "%s" -q 2>&1' % (sys.executable, fname)
+        data = ExecuteSilentlyIfOK(cmd, self)
+
+class PippoTest(TestCase):
+    def testit(self):
+        python = sys.executable
+        fname = os.path.join(os.path.dirname(this_file), "testPippo.py")
+        cmd = '%s "%s" 2>&1' % (python, fname)
+        ExecuteSilentlyIfOK(cmd, self)
+
+unittest_modules = [
+        # Level 1 tests.
+        """testIterators testvbscript_regexp testStorage 
+          testStreams testWMI policySemantics testShell testROT
+          testAXScript testxslt testDictionary testCollections
+          testServers errorSemantics.test testvb testArrays
+          testClipboard testMarshal
+        """.split(),
+        # Level 2 tests.
+        """testMSOffice.TestAll testMSOfficeEvents.test testAccess.test
+           testExplorer.TestAll testExchange.test
+        """.split(),
+        # Level 3 tests.
+        """testmakepy.TestAll
+        """.split()
+]
+
+output_checked_programs = [
+        # Level 1 tests.
+        [
+            ("cscript.exe /nologo testInterp.vbs", "VBScript test worked OK"),
+            ("cscript.exe /nologo testDictionary.vbs",
+                         "VBScript has successfully tested Python.Dictionary"),
+        ],
+        # Level 2 tests.
+        [
+        ],
+        # Level 3 tests
+        [
+        ],
+]
+
+custom_test_cases = [
+        # Level 1 tests.
+        [
+            PyCOMTest,
+            PippoTest,
+        ],
+        # Level 2 tests.
+        [
+        ],
+        # Level 3 tests
+        [
+        ],
+]
+
+def get_test_mod_and_func(test_name, import_failures):
+    if test_name.find(".")>0:
+        mod_name, func_name = test_name.split(".")
+    else:
+        mod_name = test_name
+        func_name = None
+    try:
+        mod = __import__(mod_name)
+    except:
+        import_failures.append((mod_name, sys.exc_info()[:2]))
+        return None, None
+    if func_name is None:
+        func = None
+    else:
+        func = getattr(mod, func_name)
+    return mod, func
+
+# Return a test suite all loaded with the tests we want to run
+def make_test_suite(test_level = 1):
+    suite = unittest.TestSuite()
+    import_failures = []
+    loader = TestLoader()
+    for i in range(testLevel):
+        for mod_name in unittest_modules[i]:
+            mod, func = get_test_mod_and_func(mod_name, import_failures)
+            if mod is None:
+                continue
+            if func is not None:
+                test = win32com.test.util.CapturingFunctionTestCase(func,
+                                                     description=mod_name)
+            else:
+                if hasattr(mod, "suite"):
+                    test = mod.suite()
+                else:
+                    test = loader.loadTestsFromModule(mod)
+            assert test.countTestCases() > 0, "No tests loaded from %r" % mod
+            suite.addTest(test)
+        for cmd, output in output_checked_programs[i]:
+            suite.addTest(win32com.test.util.ShellTestCase(cmd, output))
+
+        for test_class in custom_test_cases[i]:
+            suite.addTest(unittest.defaultTestLoader.loadTestsFromTestCase(test_class))
+    return suite, import_failures
+
+def usage(why):
+    print why
+    print
+    print "win32com test suite"
+    print "usage: testall [-v] test_level"
+    print "  where test_level is an integer 1-3.  Level 1 tests are quick,"
+    print "  level 2 tests invoke Word, IE etc, level 3 take ages!"
+    sys.exit(1)
+    
+if __name__=='__main__':
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "v")
+    except getopt.error, why:
+        usage(why)
+    for opt, val in opts:
+        if opt=='-v':
+            verbosity += 1
+    testLevel = 1 # default to quick test
+    test_names = []
+    for arg in args:
+        try:
+            testLevel = int(arg)
+            if testLevel < 0 or testLevel > 3:
+                raise ValueError, "Only levels 1-3 are supported"
+        except ValueError:
+            test_names.append(arg)
+    if test_names:
+        usage("Test names are not supported yet")
+    CleanGenerated()
+    
+    suite, import_failures = make_test_suite(testLevel)
+    if verbosity:
+        if hasattr(sys, "gettotalrefcount"):
+            print "This is a debug build - memory leak tests will also be run."
+            print "These tests may take *many* minutes to run - be patient!"
+            print "(running from python.exe will avoid these leak tests)"
+        print "Executing level %d tests - %d test cases will be run" \
+                % (testLevel, suite.countTestCases())
+        if verbosity==1 and suite.countTestCases() < 70:
+            # A little row of markers so the dots show how close to finished
+            print '|' * suite.countTestCases()
+    testRunner = unittest.TextTestRunner(verbosity=verbosity)
+    testResult = testRunner.run(suite)
+    if import_failures:
+        testResult.stream.writeln("*** The following test modules could not be imported ***")
+        for mod_name, (exc_type, exc_val) in import_failures:
+            desc = testResult._exc_info_to_string( (exc_type, exc_val, None) )
+            testResult.stream.write("%s: %s" % (mod_name, desc))
+        testResult.stream.writeln("*** %d test(s) could not be run ***" % len(import_failures))
+    
+    # re-print unit-test error here so it is noticed
+    if not testResult.wasSuccessful():
+        print "*" * 20, "- unittest tests FAILED"
+    
+    CheckClean()
+    pythoncom.CoUninitialize()
+    CleanGenerated()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testmakepy.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testmakepy.py
new file mode 100644
index 0000000..4543596
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testmakepy.py
@@ -0,0 +1,54 @@
+# Test makepy - try and run it over every OCX in the windows system directory.
+
+import sys
+import win32api
+import traceback
+import glob
+import os
+import string
+import traceback
+
+import win32com.test.util
+from win32com.client import makepy, selecttlb, gencache
+import pythoncom
+import winerror
+
+def TestBuildAll(verbose = 1):
+    num = 0
+    tlbInfos = selecttlb.EnumTlbs()
+    for info in tlbInfos:
+        if verbose:
+            print "%s (%s)" % (info.desc, info.dll)
+        try:
+            makepy.GenerateFromTypeLibSpec(info)
+#          sys.stderr.write("Attr typeflags for coclass referenced object %s=%d (%d), typekind=%d\n" % (name, refAttr.wTypeFlags, refAttr.wTypeFlags & pythoncom.TYPEFLAG_FDUAL,refAttr.typekind))
+            num += 1
+        except pythoncom.com_error, details:
+            # Ignore these 2 errors, as the are very common and can obscure
+            # useful warnings.
+            if details[0] not in [winerror.TYPE_E_CANTLOADLIBRARY,
+                              winerror.TYPE_E_LIBNOTREGISTERED]:
+                print "** COM error on", info.desc
+                print details
+        except KeyboardInterrupt:
+            print "Interrupted!"
+            raise KeyboardInterrupt
+        except:
+            print "Failed:", info.desc
+            traceback.print_exc()
+        if makepy.bForDemandDefault:
+            # This only builds enums etc by default - build each
+            # interface manually
+            tinfo = (info.clsid, info.lcid, info.major, info.minor)
+            mod = gencache.EnsureModule(info.clsid, info.lcid, info.major, info.minor)
+            for name in mod.NamesToIIDMap.keys():
+                makepy.GenerateChildFromTypeLibSpec(name, tinfo)
+    return num
+
+def TestAll(verbose = 0):
+    num = TestBuildAll(verbose)
+    print "Generated and imported", num, "modules"
+    win32com.test.util.CheckClean()
+
+if __name__=='__main__':
+    TestAll("-q" not in sys.argv)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testvb.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testvb.py
new file mode 100644
index 0000000..7c635e2e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testvb.py
@@ -0,0 +1,457 @@
+# Test code for a VB Program.
+#
+# This requires the PythonCOM VB Test Harness.
+#
+
+import winerror
+import pythoncom, win32com.client, win32com.client.dynamic, win32com.client.gencache
+from win32com.server.util import NewCollection, wrap
+import string
+import util
+
+importMsg = """\
+**** VB Test harness is not installed ***
+  This test requires a VB test program to be built and installed
+  on this PC.
+"""
+
+### NOTE: VB SUCKS!
+### If you delete the DLL built by VB, then reopen VB
+### to rebuild the DLL, it loses the IID of the object!!!
+### So I will try to avoid this in the future :-)
+
+# Import the type library for the test module.
+try:
+    win32com.client.gencache.EnsureDispatch("PyCOMVBTest.Tester")
+except pythoncom.com_error:
+    raise RuntimeError, importMsg
+
+import traceback
+
+# for debugging
+useDispatcher = None
+##  import win32com.server.dispatcher
+##  useDispatcher = win32com.server.dispatcher.DefaultDebugDispatcher
+
+error = "VB Test Error"
+
+# Set up a COM object that VB will do some callbacks on.  This is used
+# to test byref params for gateway IDispatch.
+class TestObject:
+    _public_methods_ = ["CallbackVoidOneByRef","CallbackResultOneByRef", "CallbackVoidTwoByRef",
+                                        "CallbackString","CallbackResultOneByRefButReturnNone",
+                                        "CallbackVoidOneByRefButReturnNone",
+                                        "CallbackArrayResult", "CallbackArrayResultOneArrayByRef",
+                                        "CallbackArrayResultWrongSize"
+                                       ]
+    def CallbackVoidOneByRef(self, intVal):
+        return intVal + 1
+    def CallbackResultOneByRef(self, intVal):
+        return intVal, intVal + 1
+    def CallbackVoidTwoByRef(self, int1, int2):
+        return int1+int2, int1-int2
+    def CallbackString(self, strVal):
+        return 0, strVal + " has visited Python"
+    def CallbackArrayResult(self, arrayVal):
+        ret = []
+        for i in arrayVal:
+            ret.append(i+1)
+        # returning as a list forces it be processed as a single result
+        # (rather than a tuple, where it may be interpreted as
+        # multiple results for byref unpacking)
+        return ret
+    def CallbackArrayResultWrongSize(self, arrayVal):
+        return list(arrayVal[:-1])
+    def CallbackArrayResultOneArrayByRef(self, arrayVal):
+        ret = []
+        for i in arrayVal:
+            ret.append(i+1)
+        # See above for list processing.
+        return list(arrayVal), ret
+
+    def CallbackResultOneByRefButReturnNone(self, intVal):
+        return
+    def CallbackVoidOneByRefButReturnNone(self, intVal):
+        return
+
+def TestVB( vbtest, bUseGenerated ):
+    vbtest.LongProperty = -1
+    if vbtest.LongProperty != -1:
+        raise error, "Could not set the long property correctly."
+    vbtest.IntProperty = 10
+    if vbtest.IntProperty != 10:
+        raise error, "Could not set the integer property correctly."
+    vbtest.VariantProperty = 10
+    if vbtest.VariantProperty != 10:
+        raise error, "Could not set the variant integer property correctly."
+    vbtest.StringProperty = "Hello from Python"
+    if vbtest.StringProperty != "Hello from Python":
+        raise error, "Could not set the string property correctly."
+    vbtest.VariantProperty = "Hello from Python"
+    if vbtest.VariantProperty != "Hello from Python":
+        raise error, "Could not set the variant string property correctly."
+    vbtest.VariantProperty = (1.0, 2.0, 3.0)
+    if vbtest.VariantProperty != (1.0, 2.0, 3.0):
+        raise error, "Could not set the variant property to an array of floats correctly - '%s'." % (vbtest.VariantProperty,)
+
+    TestArrays(vbtest, bUseGenerated)
+    TestStructs(vbtest)
+    TestCollections(vbtest)
+
+    assert vbtest.TakeByValObject(vbtest)==vbtest
+
+    # Python doesnt support PUTREF properties without a typeref
+    # (although we could)
+    if bUseGenerated:
+        ob = vbtest.TakeByRefObject(vbtest)
+        assert ob[0]==vbtest and ob[1]==vbtest
+
+        # A property that only has PUTREF defined.
+        vbtest.VariantPutref = vbtest
+        if vbtest.VariantPutref._oleobj_!= vbtest._oleobj_:
+            raise error, "Could not set the VariantPutref property correctly."
+        # Cant test further types for this VariantPutref, as only
+        # COM objects can be stored ByRef.
+
+        # A "set" type property - only works for generated.
+        # VB recognizes a collection via a few "private" interfaces that we
+        # could later build support in for.
+#               vbtest.CollectionProperty = NewCollection((1,2,"3", "Four"))
+#               if vbtest.CollectionProperty != (1,2,"3", "Four"):
+#                       raise error, "Could not set the Collection property correctly - got back " + str(vbtest.CollectionProperty)
+
+        # These are sub's that have a single byref param
+        # Result should be just the byref.
+        if vbtest.IncrementIntegerParam(1) != 2:
+            raise error, "Could not pass an integer byref"
+
+# Sigh - we cant have *both* "ommited byref" and optional args
+# We really have to opt that args nominated as optional work as optional
+# rather than simply all byrefs working as optional.
+#               if vbtest.IncrementIntegerParam() != 1:
+#                       raise error, "Could not pass an omitted integer byref"
+
+        if vbtest.IncrementVariantParam(1) != 2:
+            raise error, "Could not pass an int VARIANT byref:"+str(vbtest.IncrementVariantParam(1))
+
+        if vbtest.IncrementVariantParam(1.5) != 2.5:
+            raise error, "Could not pass a float VARIANT byref"
+
+        # Can't test IncrementVariantParam with the param omitted as it
+        # it not declared in the VB code as "Optional"
+        callback_ob = wrap(TestObject(), useDispatcher = useDispatcher)
+        vbtest.DoSomeCallbacks(callback_ob)
+
+    ret = vbtest.PassIntByVal(1)
+    if ret != 2:
+        raise error, "Could not increment the integer - "+str(ret)
+
+    TestVBInterface(vbtest)
+    # Python doesnt support byrefs without some sort of generated support.
+    if bUseGenerated:
+        # This is a VB function that takes a single byref
+        # Hence 2 return values - function and byref.
+        ret = vbtest.PassIntByRef(1)
+        if ret != (1,2):
+            raise error, "Could not increment the integer - "+str(ret)
+        # Check you can leave a byref arg blank.
+# see above
+#               ret = vbtest.PassIntByRef()
+#               if ret != (0,1):
+#                       raise error, "Could not increment the integer with default arg- "+str(ret)
+
+def _DoTestCollection(vbtest, col_name, expected):
+    # It sucks that some objects allow "Count()", but others "Count"
+    def _getcount(ob):
+        r = getattr(ob, "Count")
+        if type(r)!=type(0):
+            return r()
+        return r
+    c = getattr(vbtest, col_name)
+    check = []
+    for item in c:
+        check.append(item)
+    if check != list(expected):
+        raise error, "Collection %s didn't have %r (had %r)" % (col_name, expected, check)
+    # Just looping over the collection again works (ie, is restartable)
+    check = []
+    for item in c:
+        check.append(item)
+    if check != list(expected):
+        raise error, "Collection 2nd time around %s didn't have %r (had %r)" % (col_name, expected, check)
+    # Check we can get it via iter()
+    i = iter(getattr(vbtest, col_name))
+    check = []
+    for item in i:
+        check.append(item)
+    if check != list(expected):
+        raise error, "Collection iterator %s didn't have %r 2nd time around (had %r)" % (col_name, expected, check)
+    # but an iterator is not restartable
+    check = []
+    for item in i:
+        check.append(item)
+    if check != []:
+        raise error, "2nd time around Collection iterator %s wasn't empty (had %r)" % (col_name, check)
+
+    # Check len()==Count()
+    c = getattr(vbtest, col_name)
+    if len(c) != _getcount(c):
+        raise error, "Collection %s __len__(%r) wasn't==Count(%r)" % (col_name, len(c), _getcount(c))
+    # Check we can do it with zero based indexing.
+    c = getattr(vbtest, col_name)
+    check = []
+    for i in range(_getcount(c)):
+        check.append(c[i])
+    if check != list(expected):
+        raise error, "Collection %s didn't have %r (had %r)" % (col_name, expected, check)
+
+    # Check we can do it with our old "Skip/Next" methods.
+    c = getattr(vbtest, col_name)._NewEnum()
+    check = []
+    while 1:
+        n = c.Next()
+        if not n:
+            break
+        check.append(n[0])
+    if check != list(expected):
+        raise error, "Collection %s didn't have %r (had %r)" % (col_name, expected, check)
+
+def TestCollections(vbtest):
+    _DoTestCollection(vbtest, "CollectionProperty", [1,"Two", "3"])
+    # zero based indexing works for simple VB collections.
+    if vbtest.CollectionProperty[0] != 1:
+        raise error, "The CollectionProperty[0] element was not the default value"
+
+    _DoTestCollection(vbtest, "EnumerableCollectionProperty", [])
+    vbtest.EnumerableCollectionProperty.Add(1)
+    vbtest.EnumerableCollectionProperty.Add("Two")
+    vbtest.EnumerableCollectionProperty.Add("3")
+    _DoTestCollection(vbtest, "EnumerableCollectionProperty", [1,"Two", "3"])
+
+def _DoTestArray(vbtest, data, expected_exception = None):
+    try:
+        vbtest.ArrayProperty = data
+        if expected_exception is not None:
+            raise error, "Expected '%s'" % expected_exception
+    except expected_exception:
+        return
+    got = vbtest.ArrayProperty
+    if got != data:
+        raise error, \
+              "Could not set the array data correctly - got %r, expected %r" \
+              % (got, data)
+
+def TestArrays(vbtest, bUseGenerated):
+    # Try and use a safe array (note that the VB code has this declared as a VARIANT
+    # and I cant work out how to force it to use native arrays!
+    # (NOTE Python will convert incoming arrays to tuples, so we pass a tuple, even tho
+    # a list works fine - just makes it easier for us to compare the result!
+    # Empty array
+    _DoTestArray(vbtest, ())
+    # Empty child array
+    _DoTestArray(vbtest, ((), ()))
+    # ints
+    _DoTestArray(vbtest, tuple(range(1,100)))
+    # Floats
+    _DoTestArray(vbtest, (1.0, 2.0, 3.0))
+    # Strings.
+    _DoTestArray(vbtest, tuple(string.split("Hello from Python")))
+    # Date and Time?
+    # COM objects.
+    _DoTestArray(vbtest, (vbtest, vbtest))
+    # Mixed
+    _DoTestArray(vbtest, (1, 2.0, "3"))
+    # Array alements containing other arrays
+    _DoTestArray(vbtest, (1,(vbtest, vbtest),("3","4")))
+    # Multi-dimensional
+    _DoTestArray(vbtest, (( (1,2,3), (4,5,6) )))
+    _DoTestArray(vbtest, (( (vbtest,vbtest,vbtest), (vbtest,vbtest,vbtest) )))
+    # Another dimension!
+    arrayData = ( ((1,2),(3,4),(5,6)), ((7,8),(9,10),(11,12)) )
+    arrayData = ( ((vbtest,vbtest),(vbtest,vbtest),(vbtest,vbtest)),
+                  ((vbtest,vbtest),(vbtest,vbtest),(vbtest,vbtest)) )
+    _DoTestArray(vbtest, arrayData)
+
+    # Check that when a '__getitem__ that fails' object is the first item
+    # in the structure, we don't mistake it for a sequence.
+    _DoTestArray(vbtest, (vbtest, 2.0, "3"))
+    _DoTestArray(vbtest, (1, 2.0, vbtest))
+
+    # Pass arbitrarily sized arrays - these used to fail, but thanks to 
+    # Stefan Schukat, they now work!
+    expected_exception = None
+    arrayData = ( ((1,2,1),(3,4),(5,6)), ((7,8),(9,10),(11,12)) )
+    _DoTestArray(vbtest, arrayData, expected_exception)
+    arrayData = ( ((vbtest,vbtest),), ((vbtest,),))
+    _DoTestArray(vbtest, arrayData, expected_exception)
+    # Pass bad data - last item wrong size
+    arrayData = ( ((1,2),(3,4),(5,6,8)), ((7,8),(9,10),(11,12)) )
+    _DoTestArray(vbtest, arrayData, expected_exception)
+    
+    # byref safearray results with incorrect size.
+    callback_ob = wrap(TestObject(), useDispatcher = useDispatcher)
+    print "** Expecting a 'ValueError' exception to be printed next:"
+    try:
+        vbtest.DoCallbackSafeArraySizeFail(callback_ob)
+    except pythoncom.com_error, (hr, msg, exc, arg):
+        assert exc[1] == "Python COM Server Internal Error", "Didnt get the correct exception - '%s'" % (exc,)
+        
+    if bUseGenerated:
+        # This one is a bit strange!  The array param is "ByRef", as VB insists.
+        # The function itself also _returns_ the arram param.
+        # Therefore, Python sees _2_ result values - one for the result,
+        # and one for the byref.
+        testData = string.split("Mark was here")
+        resultData, byRefParam = vbtest.PassSAFEARRAY(testData)
+        # Un unicode everything (only 1.5.2)
+        try:
+            unicode
+        except NameError : # No builtin named Unicode!
+            resultData = map(str, resultData)
+            byRefParam = map(str, byRefParam)
+        if testData != list(resultData):
+            raise error, "The safe array data was not what we expected - got " + str(resultData)
+        if testData != list(byRefParam):
+            raise error, "The safe array data was not what we expected - got " + str(byRefParam)
+        testData = [1.0, 2.0, 3.0]
+        resultData, byRefParam = vbtest.PassSAFEARRAYVariant(testData)
+        assert testData == list(byRefParam)
+        assert testData == list(resultData)
+        testData = ["hi", "from", "Python"]
+        resultData, byRefParam = vbtest.PassSAFEARRAYVariant(testData)
+        assert testData == list(byRefParam), "Expected '%s', got '%s'" % (testData, list(byRefParam))
+        assert testData == list(resultData), "Expected '%s', got '%s'" % (testData, list(resultData))
+        # This time, instead of an explicit str() for 1.5, we just
+        # pass Unicode, so the result should compare equal
+        testData = [1, 2.0, pythoncom.Unicode("3")]
+        resultData, byRefParam = vbtest.PassSAFEARRAYVariant(testData)
+        assert testData == list(byRefParam)
+        assert testData == list(resultData)
+    print "Array tests passed"
+
+def TestStructs(vbtest):
+    try:
+        vbtest.IntProperty = "One"
+        raise error, "Should have failed by now"
+    except pythoncom.com_error, (hr, desc, exc, argErr):
+        if hr != winerror.DISP_E_TYPEMISMATCH:
+            raise error, "Expected DISP_E_TYPEMISMATCH"
+
+    s = vbtest.StructProperty
+    if s.int_val != 99 or str(s.str_val) != "hello":
+        raise error, "The struct value was not correct"
+    s.str_val = "Hi from Python"
+    s.int_val = 11
+    if s.int_val != 11 or str(s.str_val) != "Hi from Python":
+        raise error, "The struct value didnt persist!"
+
+    if s.sub_val.int_val != 66 or str(s.sub_val.str_val) != "sub hello":
+        raise error, "The sub-struct value was not correct"
+    sub = s.sub_val
+    sub.int_val = 22
+    if sub.int_val != 22:
+        print sub.int_val
+        raise error, "The sub-struct value didnt persist!"
+
+    if s.sub_val.int_val != 22:
+        print s.sub_val.int_val
+        raise error, "The sub-struct value (re-fetched) didnt persist!"
+
+    if s.sub_val.array_val[0].int_val != 0 or str(s.sub_val.array_val[0].str_val) != "zero":
+        print s.sub_val.array_val[0].int_val
+        raise error, "The array element wasnt correct"
+    s.sub_val.array_val[0].int_val = 99
+    s.sub_val.array_val[1].int_val = 66
+    if s.sub_val.array_val[0].int_val != 99 or \
+       s.sub_val.array_val[1].int_val != 66:
+        print s.sub_val.array_val[0].int_val
+        raise error, "The array element didnt persist."
+    # Now pass the struct back to VB
+    vbtest.StructProperty = s
+    # And get it back again
+    s = vbtest.StructProperty
+    if s.int_val != 11 or str(s.str_val) != "Hi from Python":
+        raise error, "After sending to VB, the struct value didnt persist!"
+    if s.sub_val.array_val[0].int_val != 99:
+        raise error, "After sending to VB, the struct array value didnt persist!"
+
+    # Now do some object equality tests.
+    assert s==s
+    assert s != s.sub_val
+    import copy
+    s2 = copy.copy(s)
+    assert s is not s2
+    assert s == s2
+    s2.int_val = 123
+    assert s != s2
+    # Make sure everything works with functions
+    s2 = vbtest.GetStructFunc()
+    assert s==s2
+    vbtest.SetStructSub(s2)
+
+    # Create a new structure, and set its elements.
+    s = win32com.client.Record("VBStruct", vbtest)
+    assert s.int_val == 0, "new struct inst initialized correctly!"
+    s.int_val = -1
+    vbtest.SetStructSub(s)
+    assert vbtest.GetStructFunc().int_val == -1, "new struct didnt make the round trip!"
+    # Finally, test stand-alone structure arrays.
+    s_array = vbtest.StructArrayProperty
+    assert s_array is None, "Expected None from the uninitialized VB array"
+    vbtest.MakeStructArrayProperty(3)
+    s_array = vbtest.StructArrayProperty
+    assert len(s_array)==3
+    for i in range(len(s_array)):
+        assert s_array[i].int_val == i
+        assert s_array[i].sub_val.int_val == i
+        assert s_array[i].sub_val.array_val[0].int_val == i
+        assert s_array[i].sub_val.array_val[1].int_val == i+1
+        assert s_array[i].sub_val.array_val[2].int_val == i+2
+
+    # Some error type checks.
+    try:
+        s.bad_attribute
+        raise RuntimeError, "Could get a bad attribute"
+    except AttributeError:
+        pass
+    m = s.__members__
+    assert m[0]=="int_val" and m[1]=="str_val" and m[2]=="ob_val" and m[3]=="sub_val"
+
+    # NOTE - a COM error is _not_ acceptable here!
+    print "Struct/Record tests passed"
+
+def TestVBInterface(ob):
+    t = ob.GetInterfaceTester(2)
+    if t.getn() != 2:
+        raise error, "Initial value wrong"
+    t.setn(3)
+    if t.getn() != 3:
+        raise error, "New value wrong"
+
+def DoTestAll():
+    o = win32com.client.Dispatch("PyCOMVBTest.Tester")
+    TestVB(o,1)
+
+    o = win32com.client.dynamic.DumbDispatch("PyCOMVBTest.Tester")
+    TestVB(o,0)
+
+def TestAll():
+    if not __debug__:
+        raise RuntimeError, "This must be run in debug mode - we use assert!"
+    try:
+        DoTestAll()
+        print "All tests appear to have worked!"
+    except:
+        print "TestAll() failed!!"
+        traceback.print_exc()
+
+# Make this test run under our test suite to leak tests etc work
+def suite():
+    import unittest
+    test = util.CapturingFunctionTestCase(TestAll, description="VB tests")
+    suite = unittest.TestSuite()
+    suite.addTest(test)
+    return suite
+
+if __name__=='__main__':
+    util.testmain()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testvbscript_regexp.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testvbscript_regexp.py
new file mode 100644
index 0000000..c5cdfb7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testvbscript_regexp.py
@@ -0,0 +1,37 @@
+import unittest
+from win32com.client.gencache import EnsureDispatch
+from win32com.client.dynamic import DumbDispatch
+import win32com.test.util
+
+class RegexTest(win32com.test.util.TestCase):
+    def _CheckMatches(self, match, expected):
+        found = []
+        for imatch in match:
+            found.append(imatch.FirstIndex)
+        self.assertEquals(list(found), list(expected))
+
+    def _TestVBScriptRegex(self, re):
+        StringToSearch = "Python python pYthon Python"
+        re.Pattern = "Python"
+        re.Global = True
+        
+        re.IgnoreCase = True
+        match = re.Execute(StringToSearch)
+        expected = 0, 7, 14, 21
+        self._CheckMatches(match, expected)
+    
+        re.IgnoreCase = False
+        match = re.Execute(StringToSearch)
+        expected = 0, 21
+        self._CheckMatches(match, expected)
+    
+    def testDynamic(self):
+        re = DumbDispatch("VBScript.Regexp")
+        self._TestVBScriptRegex(re)
+
+    def testGenerated(self):
+        re = EnsureDispatch("VBScript.Regexp")
+        self._TestVBScriptRegex(re)
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.js b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.js
new file mode 100644
index 0000000..347401c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.js
@@ -0,0 +1,12 @@
+//Args:  input-file style-file output-file
+var xml  = WScript.CreateObject("Microsoft.XMLDOM");          //input
+xml.validateOnParse=false;
+xml.load(WScript.Arguments(0));
+var xsl  = WScript.CreateObject("Microsoft.XMLDOM");          //style
+xsl.validateOnParse=false;
+xsl.load(WScript.Arguments(1));
+var out = WScript.CreateObject("Scripting.FileSystemObject"); //output
+var replace = true; var unicode = false; //output file properties
+var hdl = out.CreateTextFile( WScript.Arguments(2), replace, unicode )
+hdl.write( xml.transformNode( xsl.documentElement ));
+//eof
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.py
new file mode 100644
index 0000000..5706973
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.py
@@ -0,0 +1,29 @@
+import os
+import tempfile
+import unittest
+import win32com.test.util
+
+expected_output = "The jscript test worked.\nThe Python test worked"
+
+class XSLT(win32com.test.util.TestCase):
+    def testAll(self):
+        output_name = tempfile.mktemp("-pycom-test")
+        cmd = "cscript //nologo testxslt.js doesnt_matter.xml testxslt.xsl " + output_name
+        win32com.test.util.ExecuteShellCommand(cmd, self)
+        try:
+            f=open(output_name)
+            try:
+                got = f.read()
+                if got != expected_output:
+                    print "ERROR: XSLT expected output of %r" % (expected_output,)
+                    print "but got %r" % (got,)
+            finally:
+                f.close()
+        finally:
+            try:
+                os.unlink(output_name)
+            except os.error:
+                pass
+
+if __name__=='__main__':
+    unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.xsl b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.xsl
new file mode 100644
index 0000000..05e960dd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/testxslt.xsl
@@ -0,0 +1,55 @@
+<!-- include in the xsl:stylesheet element:

+     (a) the version attribute as usual

+     (b) the XSLT namespace declaration as usual

+     (c) the MSXSL namespace declaration

+     (d) a namespace declaration to identify your functions

+     (e) the 'extension-element-prefixes' attribute to give the

+         namespace prefixes that indicate extension elements

+         (i.e. 'msxsl')

+     (f) the 'exclude-result-prefixes' attribute to indicate the

+         namespaces that aren't supposed to be part of the result

+         tree (i.e. 'foo') -->

+<xsl:stylesheet version="1.0"

+                xmlns:xsl="http://www.w3.org/1999/XSL/Transform"

+                xmlns:msxsl="urn:schemas-microsoft-com:xslt"

+                xmlns:foo="http://www.pythoncom-test.com/foo"

+                xmlns:bar="http://www.pythoncom-test.com/bar"

+                extension-element-prefixes="msxsl"

+                exclude-result-prefixes="foo bar">

+

+<!-- do whatever output you want - you can use full XSLT functionality

+     -->

+<xsl:output method="html" />

+

+<!-- define the Javascript functions that you want to include within

+     a msxsl:script element.

+     - language indicates the scripting language

+     - implements-prefix gives the namespace prefix that you declared

+       for your function (i.e. foo) -->

+<msxsl:script language="javascript"

+              implements-prefix="foo">

+   function worked() {

+      return "The jscript test worked";

+    }

+</msxsl:script>

+

+<!-- ditto for Python, using the 'bar' namespace

+-->

+<msxsl:script language="python"

+              implements-prefix="bar">

+def worked():

+  return "The Python test worked"

+</msxsl:script>

+

+<xsl:template match="/">

+<!-- The output template.  Keep whitespace down as our test matches text exactly -->

+<!-- call your functions using the prefix that you've used (i.e.

+     foo) anywhere you can normally use an XPath function, but

+     make sure it's returning the right kind of object -->

+<xsl:value-of select="foo:worked()" />.

+<xsl:value-of select="bar:worked()" />

+

+</xsl:template>

+

+</xsl:stylesheet>

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/util.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/util.py
new file mode 100644
index 0000000..00eb906
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/test/util.py
@@ -0,0 +1,223 @@
+import sys, os
+import win32api
+import tempfile
+import unittest
+import gc
+import pythoncom
+import winerror
+import cStringIO as StringIO
+from pythoncom import _GetInterfaceCount, _GetGatewayCount
+
+def CheckClean():
+    # Ensure no lingering exceptions - Python should have zero outstanding
+    # COM objects
+    sys.exc_traceback = sys.exc_value = sys.exc_type = None
+    c = _GetInterfaceCount()
+    if c:
+        print "Warning - %d com interface objects still alive" % c
+    c = _GetGatewayCount()
+    if c:
+        print "Warning - %d com gateway objects still alive" % c
+
+def RegisterPythonServer(filename, verbose=0):
+    cmd = '%s "%s" > nul 2>&1' % (win32api.GetModuleFileName(0), filename)
+    if verbose:
+        print "Registering engine", filename
+#       print cmd
+    rc = os.system(cmd)
+    if rc:
+        raise RuntimeError, "Registration of engine '%s' failed" % filename
+
+def ExecuteShellCommand(cmd, testcase,
+                        expected_output = None, # Set to '' to check for nothing
+                        tracebacks_ok = 0, # OK if the output contains a t/b?
+                        ):
+    output_name = tempfile.mktemp('win32com_test')
+    cmd = cmd + ' > "%s" 2>&1' % output_name
+    rc = os.system(cmd)
+    output = open(output_name, "r").read().strip()
+    class Failed(Exception): pass
+    try:
+        if rc:
+            raise Failed, "exit code was " + str(rc)
+        if expected_output is not None and output != expected_output:
+            raise Failed, \
+                  "Expected output %r (got %r)" % (expected_output, output)
+        if not tracebacks_ok and \
+           output.find("Traceback (most recent call last)")>=0:
+            raise Failed, "traceback in program output"
+        return output
+    except Failed, why:
+        print "Failed to exec command '%r'" % cmd
+        print "Failed as", why
+        print "** start of program output **"
+        print output
+        print "** end of program output **"
+        testcase.fail("Executing '%s' failed as %s" % (cmd, why))
+
+def assertRaisesCOM_HRESULT(testcase, hresult, func, *args, **kw):
+    try:
+        func(*args, **kw)
+    except pythoncom.com_error, details:
+        if details[0]==hresult:
+            return
+    testcase.fail("Excepected COM exception with HRESULT 0x%x" % hresult)
+
+class CaptureWriter:
+    def __init__(self):
+        self.old_err = self.old_out = None
+        self.clear()
+    def capture(self):
+        self.clear()
+        self.old_out = sys.stdout
+        self.old_err = sys.stderr
+        sys.stdout = sys.stderr = self
+    def release(self):
+        if self.old_out:
+            sys.stdout = self.old_out
+            self.old_out = None
+        if self.old_err:
+            sys.stderr = self.old_err
+            self.old_err = None
+    def clear(self):
+        self.captured = []
+    def write(self, msg):
+        self.captured.append(msg)
+    def get_captured(self):
+        return "".join(self.captured)
+    def get_num_lines_captured(self):
+        return len("".join(self.captured).split("\n"))
+
+class LeakTestCase(unittest.TestCase):
+    def __init__(self, real_test):
+        unittest.TestCase.__init__(self)
+        self.real_test = real_test
+        self.num_test_cases = 1
+        self.num_leak_iters = 2 # seems to be enough!
+        if hasattr(sys, "gettotalrefcount"):
+            self.num_test_cases = self.num_test_cases + self.num_leak_iters
+    def countTestCases(self):
+        return self.num_test_cases
+    def runTest(self):
+        assert 0, "not used"
+    def __call__(self, result = None):
+        # Always ensure we don't leak gateways/interfaces
+        gc.collect()
+        ni = _GetInterfaceCount()
+        ng = _GetGatewayCount()
+        self.real_test(result)
+        # Failed - no point checking anything else
+        if result.shouldStop or not result.wasSuccessful():
+            return
+        self._do_leak_tests(result)
+        gc.collect()
+        lost_i = _GetInterfaceCount() - ni
+        lost_g = _GetGatewayCount() - ng
+        if lost_i or lost_g:
+            msg = "%d interface objects and %d gateway objects leaked" \
+                                                        % (lost_i, lost_g)
+            result.addFailure(self.real_test, (AssertionError, msg, None))
+    def _do_leak_tests(self, result = None):
+        try:
+            gtrc = sys.gettotalrefcount
+        except AttributeError:
+            return # can't do leak tests in this build
+            def gtrc():
+                return 0
+        # Assume already called once, to prime any caches etc
+        trc = gtrc()
+        for i in range(self.num_leak_iters):
+            self.real_test(result)
+            if result.shouldStop:
+                break
+        del i # created after we remembered the refcount!
+        # int division here means one or 2 stray references won't force 
+        # failure, but one per loop 
+        lost = (gtrc() - trc) // self.num_leak_iters
+        if lost < 0:
+            msg = "LeakTest: %s appeared to gain %d references!!" % (self.real_test, -lost)
+            result.addFailure(self.real_test, (AssertionError, msg, None))
+        if lost > 0:
+            msg = "LeakTest: %s lost %d references" % (self.real_test, lost)
+            result.addFailure(self.real_test, (AssertionError, msg, None))
+
+class TestLoader(unittest.TestLoader):
+    def loadTestsFromTestCase(self, testCaseClass):
+        """Return a suite of all tests cases contained in testCaseClass"""
+        leak_tests = []
+        for name in self.getTestCaseNames(testCaseClass):
+            real_test = testCaseClass(name)
+            leak_test = self._getTestWrapper(real_test)
+            leak_tests.append(leak_test)
+        return self.suiteClass(leak_tests)
+    def _getTestWrapper(self, test):
+        no_leak_tests = getattr(test, "no_leak_tests", False)
+        if no_leak_tests:
+            print "Test says it doesn't want leak tests!"
+            return test
+        return LeakTestCase(test)
+    def loadTestsFromModule(self, mod):
+        if hasattr(mod, "suite"):
+            return mod.suite()
+        else:
+            return unittest.TestLoader.loadTestsFromModule(self, mod)
+    def loadTestsFromName(self, name, module=None):
+        test = unittest.TestLoader.loadTestsFromName(self, name, module)
+        if isinstance(test, unittest.TestSuite):
+            pass # hmmm? print "Don't wrap suites yet!", test._tests
+        elif isinstance(test, unittest.TestCase):
+            test = self._getTestWrapper(test)
+        else:
+            print "XXX - what is", test
+        return test
+
+# We used to override some of this (and may later!)
+TestCase = unittest.TestCase
+
+def CapturingFunctionTestCase(*args, **kw):
+    real_test = _CapturingFunctionTestCase(*args, **kw)
+    return LeakTestCase(real_test)
+
+class _CapturingFunctionTestCase(unittest.FunctionTestCase):#, TestCaseMixin):
+    def __call__(self, result=None):
+        if result is None: result = self.defaultTestResult()
+        writer = CaptureWriter()
+        #self._preTest()
+        writer.capture()
+        try:
+            unittest.FunctionTestCase.__call__(self, result)
+            if getattr(self, "do_leak_tests", 0) and hasattr(sys, "gettotalrefcount"):
+                self.run_leak_tests(result)
+        finally:
+            writer.release()
+            #self._postTest(result)
+        output = writer.get_captured()
+        self.checkOutput(output, result)
+        if result.showAll:
+            print output
+    def checkOutput(self, output, result):
+        if output.find("Traceback")>=0:
+            msg = "Test output contained a traceback\n---\n%s\n---" % output
+            result.errors.append((self, msg))
+
+class ShellTestCase(unittest.TestCase):
+    def __init__(self, cmd, expected_output):
+        self.__cmd = cmd
+        self.__eo = expected_output
+        unittest.TestCase.__init__(self)
+    def runTest(self):
+        ExecuteShellCommand(self.__cmd, self, self.__eo)
+    def __str__(self):
+        max = 30
+        if len(self.__cmd)>max:
+            cmd_repr = self.__cmd[:max] + "..."
+        else:
+            cmd_repr = self.__cmd
+        return "exec: " + cmd_repr
+
+def testmain(*args, **kw):
+    new_kw = kw.copy()
+    if not new_kw.has_key('testLoader'):
+        new_kw['testLoader'] = TestLoader()
+    unittest.main(*args, **new_kw)
+    
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/universal.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/universal.py
new file mode 100644
index 0000000..ba290a3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/universal.py
@@ -0,0 +1,194 @@
+# Code that packs and unpacks the Univgw structures.
+
+# See if we have a special directory for the binaries (for developers)
+import types
+import pythoncom
+from win32com.client import gencache
+
+com_error = pythoncom.com_error
+_univgw = pythoncom._univgw
+
+def RegisterInterfaces(typelibGUID, lcid, major, minor, interface_names = None):
+    ret = [] # return a list of (dispid, funcname for our policy's benefit
+    # First see if we have makepy support.  If so, we can probably satisfy the request without loading the typelib.
+    try:
+        mod = gencache.GetModuleForTypelib(typelibGUID, lcid, major, minor)
+    except ImportError:
+        mod = None
+    if mod is None:
+        import win32com.client.build
+        # Load up the typelib and build (but don't cache) it now
+        tlb = pythoncom.LoadRegTypeLib(typelibGUID, major, minor, lcid)
+        typecomp_lib = tlb.GetTypeComp()
+        if interface_names is None:
+          interface_names = []
+          for i in range(tlb.GetTypeInfoCount()):
+            info = tlb.GetTypeInfo(i)
+            doc = tlb.GetDocumentation(i)
+            attr = info.GetTypeAttr()
+            if attr.typekind == pythoncom.TKIND_INTERFACE or \
+               (attr.typekind == pythoncom.TKIND_DISPATCH and attr.wTypeFlags & pythoncom.TYPEFLAG_FDUAL):
+                interface_names.append(doc[0])
+        for name in interface_names:
+            type_info, type_comp = typecomp_lib.BindType(name, )
+            # Not sure why we don't get an exception here - BindType's C
+            # impl looks correct..
+            if type_info is None:
+                raise ValueError, "The interface '%s' can not be located" % (name,)
+            # If we got back a Dispatch interface, convert to the real interface.
+            attr = type_info.GetTypeAttr()
+            if attr.typekind == pythoncom.TKIND_DISPATCH:
+                refhtype = type_info.GetRefTypeOfImplType(-1)
+                type_info = type_info.GetRefTypeInfo(refhtype)
+                attr = type_info.GetTypeAttr()
+            item = win32com.client.build.VTableItem(type_info, attr, type_info.GetDocumentation(-1))
+            _doCreateVTable(item.clsid, item.python_name, item.bIsDispatch, item.vtableFuncs)
+            for info in item.vtableFuncs:
+                names, dispid, desc = info
+                invkind = desc[4]
+                ret.append((dispid, invkind, names[0]))
+    else:
+        # Cool - can used cached info.
+        if not interface_names:
+            interface_names = mod.VTablesToClassMap.values()
+        for name in interface_names:
+            try:
+                iid = mod.NamesToIIDMap[name]
+            except KeyError:
+                raise ValueError, "Interface '%s' does not exist in this cached typelib" % (name,)
+#            print "Processing interface", name
+            sub_mod = gencache.GetModuleForCLSID(iid)
+            is_dispatch = getattr(sub_mod, name + "_vtables_dispatch_", None)
+            method_defs = getattr(sub_mod, name + "_vtables_", None)
+            if is_dispatch is None or method_defs is None:
+                raise ValueError, "Interface '%s' is IDispatch only" % (name,)
+
+            # And create the univgw defn
+            _doCreateVTable(iid, name, is_dispatch, method_defs)
+            for info in method_defs:
+                names, dispid, desc = info
+                invkind = desc[4]
+                ret.append((dispid, invkind, names[0]))
+    return ret
+
+def _doCreateVTable(iid, interface_name, is_dispatch, method_defs):
+    defn = Definition(iid, is_dispatch, method_defs)
+    vtbl = _univgw.CreateVTable(defn, is_dispatch)
+    _univgw.RegisterVTable(vtbl, iid, interface_name)
+
+def _CalcTypeSize(typeTuple):
+    t = typeTuple[0]
+    if t & (pythoncom.VT_BYREF | pythoncom.VT_ARRAY):
+        # Its a pointer.
+        cb = _univgw.SizeOfVT(pythoncom.VT_PTR)[1]
+    elif t == pythoncom.VT_RECORD:
+        # Just because a type library uses records doesn't mean the user
+        # is trying to.  We need to better place to warn about this, but it
+        # isn't here.
+        #try:
+        #    import warnings
+        #    warnings.warn("warning: records are known to not work for vtable interfaces")
+        #except ImportError:
+        #    print "warning: records are known to not work for vtable interfaces"
+        cb = _univgw.SizeOfVT(pythoncom.VT_PTR)[1]
+        #cb = typeInfo.GetTypeAttr().cbSizeInstance
+    else:
+        cb = _univgw.SizeOfVT(t)[1]
+    return cb
+
+class Arg:
+    def __init__(self, arg_info, name = None):
+        self.name = name
+        self.vt, self.inOut, self.default, self.clsid = arg_info
+        self.size = _CalcTypeSize(arg_info)
+        # Offset from the beginning of the arguments of the stack.
+        self.offset = 0
+
+class Method:
+    def __init__(self, method_info, isEventSink=0):
+        all_names, dispid, desc = method_info
+        name = all_names[0]
+        names = all_names[1:]
+        invkind = desc[4]
+        arg_defs = desc[2]
+        ret_def = desc[8]
+        
+        self.dispid = dispid
+        self.invkind = invkind
+        # We dont use this ATM.
+#        self.ret = Arg(ret_def)
+        if isEventSink and name[:2] != "On":
+            name = "On%s" % name
+        self.name = name
+        cbArgs = 0
+        self.args = []
+        for argDesc in arg_defs:
+            arg = Arg(argDesc)
+            arg.offset = cbArgs
+            cbArgs = cbArgs + arg.size
+            self.args.append(arg)
+        self.cbArgs = cbArgs
+        self._gw_in_args = self._GenerateInArgTuple()
+        self._gw_out_args = self._GenerateOutArgTuple()
+
+    def _GenerateInArgTuple(self):
+        # Given a method, generate the in argument tuple
+        l = []
+        for arg in self.args:
+            if arg.inOut & pythoncom.PARAMFLAG_FIN or \
+                 arg.inOut == 0:
+                l.append((arg.vt, arg.offset, arg.size))
+        return tuple(l)
+
+    def _GenerateOutArgTuple(self):
+        # Given a method, generate the out argument tuple
+        l = []
+        for arg in self.args:
+            if arg.inOut & pythoncom.PARAMFLAG_FOUT or \
+               arg.inOut & pythoncom.PARAMFLAG_FRETVAL or \
+               arg.inOut == 0:
+                l.append((arg.vt, arg.offset, arg.size, arg.clsid))
+        return tuple(l)
+
+class Definition:
+    def __init__(self, iid, is_dispatch, method_defs):
+        self._iid = iid
+        self._methods = []
+        self._is_dispatch = is_dispatch
+        for info in method_defs:
+            entry = Method(info)
+            self._methods.append(entry)
+    def iid(self):
+        return self._iid
+    def vtbl_argsizes(self):
+        return map(lambda m: m.cbArgs, self._methods)
+    def dispatch(self, ob, index, argPtr,
+                 ReadFromInTuple=_univgw.ReadFromInTuple,
+                 WriteFromOutTuple=_univgw.WriteFromOutTuple):
+        "Dispatch a call to an interface method."
+        meth = self._methods[index]
+        # Infer S_OK if they don't return anything bizarre.
+        hr = 0 
+        args = ReadFromInTuple(meth._gw_in_args, argPtr)
+        # If ob is a dispatcher, ensure a policy
+        ob = getattr(ob, "policy", ob)
+        # Ensure the correct dispid is setup
+        ob._dispid_to_func_[meth.dispid] = meth.name
+        retVal = ob._InvokeEx_(meth.dispid, 0, meth.invkind, args, None, None)
+        # None is an allowed return value stating that
+        # the code doesn't want to touch any output arguments.
+        if type(retVal) == types.TupleType: # Like pythoncom, we special case a tuple.
+            # However, if they want to return a specific HRESULT,
+            # then they have to return all of the out arguments
+            # AND the HRESULT.
+            if len(retVal) == len(meth._gw_out_args) + 1:
+                hr = retVal[0]
+                retVal = retVal[1:]
+            else:
+                raise TypeError, "Expected %s return values, got: %s" % (len(meth._gw_out_args) + 1, len(retVal))
+        else:
+            retVal = [retVal]
+            retVal.extend([None] * (len(meth._gw_out_args)-1))
+            retVal = tuple(retVal)
+        WriteFromOutTuple(retVal, meth._gw_out_args, argPtr)
+        return hr
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32com/util.py b/depot_tools/release/win/python_24/Lib/site-packages/win32com/util.py
new file mode 100644
index 0000000..0dca29ac
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32com/util.py
@@ -0,0 +1,31 @@
+"""General utility functions common to client and server.
+
+  This module contains a collection of general purpose utility functions.
+"""
+import pythoncom
+import win32api, win32con
+
+def IIDToInterfaceName(iid):
+	"""Converts an IID to a string interface name.  
+	
+	Used primarily for debugging purposes, this allows a cryptic IID to
+	be converted to a useful string name.  This will firstly look for interfaces
+	known (ie, registered) by pythoncom.  If not known, it will look in the
+	registry for a registered interface.
+
+	iid -- An IID object.
+
+	Result -- Always a string - either an interface name, or '<Unregistered interface>'
+	"""
+	try:
+		return pythoncom.ServerInterfaces[iid]
+	except KeyError:
+		try:
+			try:
+				return win32api.RegQueryValue(win32con.HKEY_CLASSES_ROOT, "Interface\\%s" % iid)
+			except win32api.error:
+				pass
+		except ImportError:
+			pass
+		return str(iid)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/__init__.py
new file mode 100644
index 0000000..da41ea7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/__init__.py
@@ -0,0 +1,101 @@
+import win32com
+import win32com.client
+
+if type(__path__)==type(''):
+	# For freeze to work!
+	import sys
+	try:
+		import adsi
+		sys.modules['win32com.adsi.adsi'] = adsi
+	except ImportError:
+		pass
+else:
+	# See if we have a special directory for the binaries (for developers)
+	win32com.__PackageSupportBuildPath__(__path__)
+
+
+# Some helpers
+# We want to _look_ like the ADSI module, but provide some additional
+# helpers.
+
+# Of specific note - most of the interfaces supported by ADSI
+# derive from IDispatch - thus, you get the custome methods from the
+# interface, as well as via IDispatch.
+import pythoncom
+from adsi import *
+
+LCID = 0
+
+IDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch]
+IADsContainerType = pythoncom.TypeIIDs[adsi.IID_IADsContainer]
+
+def _get_good_ret(ob,
+				  # Named arguments used internally
+				  resultCLSID = None):
+	assert resultCLSID is None, "Now have type info for ADSI objects - fix me!"
+	# See if the object supports IDispatch
+	if hasattr(ob, "Invoke"):
+		import win32com.client.dynamic
+		name = "Dispatch wrapper around %r" %  ob
+		return win32com.client.dynamic.Dispatch(ob, name, ADSIDispatch)
+	return ob
+
+
+class ADSIEnumerator:
+	def __init__(self, ob):
+		# Query the object for the container interface.
+		self._cont_ = ob.QueryInterface(IID_IADsContainer)
+		self._oleobj_ = ADsBuildEnumerator(self._cont_) # a PyIADsEnumVARIANT
+		self.index = -1
+	def __getitem__(self, index):
+		return self.__GetIndex(index)
+	def __call__(self, index):
+		return self.__GetIndex(index)
+	def __GetIndex(self, index):
+		if type(index)!=type(0): raise TypeError, "Only integer indexes are supported for enumerators"
+		if index != self.index + 1:
+			# Index requested out of sequence.
+			raise ValueError, "You must index this object sequentially"
+		self.index = index
+		result = ADsEnumerateNext(self._oleobj_, 1)
+		if len(result):
+			return _get_good_ret(result[0])
+		# Failed - reset for next time around.
+		self.index = -1
+		self._oleobj_ = ADsBuildEnumerator(self._cont_) # a PyIADsEnumVARIANT
+		raise IndexError, "list index out of range"
+
+class ADSIDispatch(win32com.client.CDispatch):
+	def _wrap_dispatch_(self, ob, userName = None, returnCLSID = None, UnicodeToString = win32com.client.NeedUnicodeConversions):
+		if not userName:
+			userName = "ADSI-object"
+		olerepr = win32com.client.dynamic.MakeOleRepr(ob, None, None)
+		return ADSIDispatch(ob, olerepr, userName, UnicodeToString = UnicodeToString)
+
+	def _NewEnum(self):
+		try:
+			return ADSIEnumerator(self)
+		except pythoncom.com_error:
+			# doesnt support it - let our base try!
+			return win32com.client.CDispatch._NewEnum(self)
+
+	def __getattr__(self, attr):
+		try:
+			return getattr(self._oleobj_, attr)
+		except AttributeError:
+			return win32com.client.CDispatch.__getattr__(self, attr)
+
+	def QueryInterface(self, iid):
+		ret = self._oleobj_.QueryInterface(iid)
+		return _get_good_ret(ret)
+
+# We override the global methods to do the right thing.
+_ADsGetObject = ADsGetObject # The one in the .pyd
+def ADsGetObject(path, iid = pythoncom.IID_IDispatch):
+	ret = _ADsGetObject(path, iid)
+	return _get_good_ret(ret)
+
+_ADsOpenObject = ADsOpenObject
+def ADsOpenObject(path, username, password, reserved = 0, iid = pythoncom.IID_IDispatch):
+	ret = _ADsOpenObject(path, username, password, reserved, iid)
+	return _get_good_ret(ret)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/adsi.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/adsi.pyd
new file mode 100644
index 0000000..2fb425e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/adsi.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/adsicon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/adsicon.py
new file mode 100644
index 0000000..fdb083d9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/adsicon.py
@@ -0,0 +1,336 @@
+ADS_ATTR_CLEAR = ( 1 )
+ADS_ATTR_UPDATE = ( 2 )
+ADS_ATTR_APPEND = ( 3 )
+ADS_ATTR_DELETE = ( 4 )
+ADS_EXT_MINEXTDISPID = ( 1 )
+ADS_EXT_MAXEXTDISPID = ( 16777215 )
+ADS_EXT_INITCREDENTIALS = ( 1 )
+ADS_EXT_INITIALIZE_COMPLETE = ( 2 )
+
+ADS_SEARCHPREF_ASYNCHRONOUS     = 0
+ADS_SEARCHPREF_DEREF_ALIASES    = 1
+ADS_SEARCHPREF_SIZE_LIMIT       = 2
+ADS_SEARCHPREF_TIME_LIMIT       = 3
+ADS_SEARCHPREF_ATTRIBTYPES_ONLY = 4
+ADS_SEARCHPREF_SEARCH_SCOPE     = 5
+ADS_SEARCHPREF_TIMEOUT          = 6
+ADS_SEARCHPREF_PAGESIZE         = 7
+ADS_SEARCHPREF_PAGED_TIME_LIMIT = 8
+ADS_SEARCHPREF_CHASE_REFERRALS  = 9
+ADS_SEARCHPREF_SORT_ON          = 10
+ADS_SEARCHPREF_CACHE_RESULTS    = 11
+ADS_SEARCHPREF_DIRSYNC          = 12
+ADS_SEARCHPREF_TOMBSTONE        = 13
+
+ADS_SCOPE_BASE        = 0
+ADS_SCOPE_ONELEVEL    = 1
+ADS_SCOPE_SUBTREE     = 2
+
+ADS_SECURE_AUTHENTICATION  = 0x1
+ADS_USE_ENCRYPTION         = 0x2
+ADS_USE_SSL                = 0x2
+ADS_READONLY_SERVER        = 0x4
+ADS_PROMPT_CREDENTIALS     = 0x8
+ADS_NO_AUTHENTICATION      = 0x10
+ADS_FAST_BIND              = 0x20
+ADS_USE_SIGNING            = 0x40
+ADS_USE_SEALING            = 0x80
+ADS_USE_DELEGATION         = 0x100
+ADS_SERVER_BIND            = 0x200
+
+ADSTYPE_INVALID	= 0
+ADSTYPE_DN_STRING	= ADSTYPE_INVALID + 1
+ADSTYPE_CASE_EXACT_STRING	= ADSTYPE_DN_STRING + 1
+ADSTYPE_CASE_IGNORE_STRING	= ADSTYPE_CASE_EXACT_STRING + 1
+ADSTYPE_PRINTABLE_STRING	= ADSTYPE_CASE_IGNORE_STRING + 1
+ADSTYPE_NUMERIC_STRING	= ADSTYPE_PRINTABLE_STRING + 1
+ADSTYPE_BOOLEAN	= ADSTYPE_NUMERIC_STRING + 1
+ADSTYPE_INTEGER	= ADSTYPE_BOOLEAN + 1
+ADSTYPE_OCTET_STRING	= ADSTYPE_INTEGER + 1
+ADSTYPE_UTC_TIME	= ADSTYPE_OCTET_STRING + 1
+ADSTYPE_LARGE_INTEGER	= ADSTYPE_UTC_TIME + 1
+ADSTYPE_PROV_SPECIFIC	= ADSTYPE_LARGE_INTEGER + 1
+ADSTYPE_OBJECT_CLASS	= ADSTYPE_PROV_SPECIFIC + 1
+ADSTYPE_CASEIGNORE_LIST	= ADSTYPE_OBJECT_CLASS + 1
+ADSTYPE_OCTET_LIST	= ADSTYPE_CASEIGNORE_LIST + 1
+ADSTYPE_PATH	= ADSTYPE_OCTET_LIST + 1
+ADSTYPE_POSTALADDRESS	= ADSTYPE_PATH + 1
+ADSTYPE_TIMESTAMP	= ADSTYPE_POSTALADDRESS + 1
+ADSTYPE_BACKLINK	= ADSTYPE_TIMESTAMP + 1
+ADSTYPE_TYPEDNAME	= ADSTYPE_BACKLINK + 1
+ADSTYPE_HOLD	= ADSTYPE_TYPEDNAME + 1
+ADSTYPE_NETADDRESS	= ADSTYPE_HOLD + 1
+ADSTYPE_REPLICAPOINTER	= ADSTYPE_NETADDRESS + 1
+ADSTYPE_FAXNUMBER	= ADSTYPE_REPLICAPOINTER + 1
+ADSTYPE_EMAIL	= ADSTYPE_FAXNUMBER + 1
+ADSTYPE_NT_SECURITY_DESCRIPTOR	= ADSTYPE_EMAIL + 1
+ADSTYPE_UNKNOWN	= ADSTYPE_NT_SECURITY_DESCRIPTOR + 1
+ADSTYPE_DN_WITH_BINARY	= ADSTYPE_UNKNOWN + 1
+ADSTYPE_DN_WITH_STRING	= ADSTYPE_DN_WITH_BINARY + 1
+
+ADS_PROPERTY_CLEAR	= 1
+ADS_PROPERTY_UPDATE	= 2
+ADS_PROPERTY_APPEND	= 3
+ADS_PROPERTY_DELETE	= 4
+ADS_SYSTEMFLAG_DISALLOW_DELETE	= -2147483648
+ADS_SYSTEMFLAG_CONFIG_ALLOW_RENAME	= 0x40000000
+ADS_SYSTEMFLAG_CONFIG_ALLOW_MOVE	= 0x20000000
+ADS_SYSTEMFLAG_CONFIG_ALLOW_LIMITED_MOVE	= 0x10000000
+ADS_SYSTEMFLAG_DOMAIN_DISALLOW_RENAME	= -2147483648
+ADS_SYSTEMFLAG_DOMAIN_DISALLOW_MOVE	= 0x4000000
+ADS_SYSTEMFLAG_CR_NTDS_NC	= 0x1
+ADS_SYSTEMFLAG_CR_NTDS_DOMAIN	= 0x2
+ADS_SYSTEMFLAG_ATTR_NOT_REPLICATED	= 0x1
+ADS_SYSTEMFLAG_ATTR_IS_CONSTRUCTED	= 0x4
+ADS_GROUP_TYPE_GLOBAL_GROUP	= 0x2
+ADS_GROUP_TYPE_DOMAIN_LOCAL_GROUP	= 0x4
+ADS_GROUP_TYPE_LOCAL_GROUP	= 0x4
+ADS_GROUP_TYPE_UNIVERSAL_GROUP	= 0x8
+ADS_GROUP_TYPE_SECURITY_ENABLED	= -2147483648
+ADS_UF_SCRIPT	= 0x1
+ADS_UF_ACCOUNTDISABLE	= 0x2
+ADS_UF_HOMEDIR_REQUIRED	= 0x8
+ADS_UF_LOCKOUT	= 0x10
+ADS_UF_PASSWD_NOTREQD	= 0x20
+ADS_UF_PASSWD_CANT_CHANGE	= 0x40
+ADS_UF_ENCRYPTED_TEXT_PASSWORD_ALLOWED	= 0x80
+ADS_UF_TEMP_DUPLICATE_ACCOUNT	= 0x100
+ADS_UF_NORMAL_ACCOUNT	= 0x200
+ADS_UF_INTERDOMAIN_TRUST_ACCOUNT	= 0x800
+ADS_UF_WORKSTATION_TRUST_ACCOUNT	= 0x1000
+ADS_UF_SERVER_TRUST_ACCOUNT	= 0x2000
+ADS_UF_DONT_EXPIRE_PASSWD	= 0x10000
+ADS_UF_MNS_LOGON_ACCOUNT	= 0x20000
+ADS_UF_SMARTCARD_REQUIRED	= 0x40000
+ADS_UF_TRUSTED_FOR_DELEGATION	= 0x80000
+ADS_UF_NOT_DELEGATED	= 0x100000
+ADS_UF_USE_DES_KEY_ONLY	= 0x200000
+ADS_UF_DONT_REQUIRE_PREAUTH	= 0x400000
+ADS_UF_PASSWORD_EXPIRED	= 0x800000
+ADS_UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION	= 0x1000000
+ADS_RIGHT_DELETE	= 0x10000
+ADS_RIGHT_READ_CONTROL	= 0x20000
+ADS_RIGHT_WRITE_DAC	= 0x40000
+ADS_RIGHT_WRITE_OWNER	= 0x80000
+ADS_RIGHT_SYNCHRONIZE	= 0x100000
+ADS_RIGHT_ACCESS_SYSTEM_SECURITY	= 0x1000000
+ADS_RIGHT_GENERIC_READ	= -2147483648
+ADS_RIGHT_GENERIC_WRITE	= 0x40000000
+ADS_RIGHT_GENERIC_EXECUTE	= 0x20000000
+ADS_RIGHT_GENERIC_ALL	= 0x10000000
+ADS_RIGHT_DS_CREATE_CHILD	= 0x1
+ADS_RIGHT_DS_DELETE_CHILD	= 0x2
+ADS_RIGHT_ACTRL_DS_LIST	= 0x4
+ADS_RIGHT_DS_SELF	= 0x8
+ADS_RIGHT_DS_READ_PROP	= 0x10
+ADS_RIGHT_DS_WRITE_PROP	= 0x20
+ADS_RIGHT_DS_DELETE_TREE	= 0x40
+ADS_RIGHT_DS_LIST_OBJECT	= 0x80
+ADS_RIGHT_DS_CONTROL_ACCESS	= 0x100
+ADS_ACETYPE_ACCESS_ALLOWED	= 0
+ADS_ACETYPE_ACCESS_DENIED	= 0x1
+ADS_ACETYPE_SYSTEM_AUDIT	= 0x2
+ADS_ACETYPE_ACCESS_ALLOWED_OBJECT	= 0x5
+ADS_ACETYPE_ACCESS_DENIED_OBJECT	= 0x6
+ADS_ACETYPE_SYSTEM_AUDIT_OBJECT	= 0x7
+ADS_ACETYPE_SYSTEM_ALARM_OBJECT	= 0x8
+ADS_ACETYPE_ACCESS_ALLOWED_CALLBACK	= 0x9
+ADS_ACETYPE_ACCESS_DENIED_CALLBACK	= 0xa
+ADS_ACETYPE_ACCESS_ALLOWED_CALLBACK_OBJECT	= 0xb
+ADS_ACETYPE_ACCESS_DENIED_CALLBACK_OBJECT	= 0xc
+ADS_ACETYPE_SYSTEM_AUDIT_CALLBACK	= 0xd
+ADS_ACETYPE_SYSTEM_ALARM_CALLBACK	= 0xe
+ADS_ACETYPE_SYSTEM_AUDIT_CALLBACK_OBJECT	= 0xf
+ADS_ACETYPE_SYSTEM_ALARM_CALLBACK_OBJECT	= 0x10
+ADS_ACEFLAG_INHERIT_ACE	= 0x2
+ADS_ACEFLAG_NO_PROPAGATE_INHERIT_ACE	= 0x4
+ADS_ACEFLAG_INHERIT_ONLY_ACE	= 0x8
+ADS_ACEFLAG_INHERITED_ACE	= 0x10
+ADS_ACEFLAG_VALID_INHERIT_FLAGS	= 0x1f
+ADS_ACEFLAG_SUCCESSFUL_ACCESS	= 0x40
+ADS_ACEFLAG_FAILED_ACCESS	= 0x80
+ADS_FLAG_OBJECT_TYPE_PRESENT	= 0x1
+ADS_FLAG_INHERITED_OBJECT_TYPE_PRESENT	= 0x2
+ADS_SD_CONTROL_SE_OWNER_DEFAULTED	= 0x1
+ADS_SD_CONTROL_SE_GROUP_DEFAULTED	= 0x2
+ADS_SD_CONTROL_SE_DACL_PRESENT	= 0x4
+ADS_SD_CONTROL_SE_DACL_DEFAULTED	= 0x8
+ADS_SD_CONTROL_SE_SACL_PRESENT	= 0x10
+ADS_SD_CONTROL_SE_SACL_DEFAULTED	= 0x20
+ADS_SD_CONTROL_SE_DACL_AUTO_INHERIT_REQ	= 0x100
+ADS_SD_CONTROL_SE_SACL_AUTO_INHERIT_REQ	= 0x200
+ADS_SD_CONTROL_SE_DACL_AUTO_INHERITED	= 0x400
+ADS_SD_CONTROL_SE_SACL_AUTO_INHERITED	= 0x800
+ADS_SD_CONTROL_SE_DACL_PROTECTED	= 0x1000
+ADS_SD_CONTROL_SE_SACL_PROTECTED	= 0x2000
+ADS_SD_CONTROL_SE_SELF_RELATIVE	= 0x8000
+ADS_SD_REVISION_DS	= 4
+ADS_NAME_TYPE_1779	= 1
+ADS_NAME_TYPE_CANONICAL	= 2
+ADS_NAME_TYPE_NT4	= 3
+ADS_NAME_TYPE_DISPLAY	= 4
+ADS_NAME_TYPE_DOMAIN_SIMPLE	= 5
+ADS_NAME_TYPE_ENTERPRISE_SIMPLE	= 6
+ADS_NAME_TYPE_GUID	= 7
+ADS_NAME_TYPE_UNKNOWN	= 8
+ADS_NAME_TYPE_USER_PRINCIPAL_NAME	= 9
+ADS_NAME_TYPE_CANONICAL_EX	= 10
+ADS_NAME_TYPE_SERVICE_PRINCIPAL_NAME	= 11
+ADS_NAME_TYPE_SID_OR_SID_HISTORY_NAME	= 12
+ADS_NAME_INITTYPE_DOMAIN	= 1
+ADS_NAME_INITTYPE_SERVER	= 2
+ADS_NAME_INITTYPE_GC	= 3
+ADS_OPTION_SERVERNAME	= 0
+ADS_OPTION_REFERRALS	= ADS_OPTION_SERVERNAME + 1
+ADS_OPTION_PAGE_SIZE	= ADS_OPTION_REFERRALS + 1
+ADS_OPTION_SECURITY_MASK	= ADS_OPTION_PAGE_SIZE + 1
+ADS_OPTION_MUTUAL_AUTH_STATUS	= ADS_OPTION_SECURITY_MASK + 1
+ADS_OPTION_QUOTA	= ADS_OPTION_MUTUAL_AUTH_STATUS + 1
+ADS_OPTION_PASSWORD_PORTNUMBER	= ADS_OPTION_QUOTA + 1
+ADS_OPTION_PASSWORD_METHOD	= ADS_OPTION_PASSWORD_PORTNUMBER + 1
+ADS_SECURITY_INFO_OWNER	= 0x1
+ADS_SECURITY_INFO_GROUP	= 0x2
+ADS_SECURITY_INFO_DACL	= 0x4
+ADS_SECURITY_INFO_SACL	= 0x8
+ADS_SETTYPE_FULL	= 1
+ADS_SETTYPE_PROVIDER	= 2
+ADS_SETTYPE_SERVER	= 3
+ADS_SETTYPE_DN	= 4
+ADS_FORMAT_WINDOWS	= 1
+ADS_FORMAT_WINDOWS_NO_SERVER	= 2
+ADS_FORMAT_WINDOWS_DN	= 3
+ADS_FORMAT_WINDOWS_PARENT	= 4
+ADS_FORMAT_X500	= 5
+ADS_FORMAT_X500_NO_SERVER	= 6
+ADS_FORMAT_X500_DN	= 7
+ADS_FORMAT_X500_PARENT	= 8
+ADS_FORMAT_SERVER	= 9
+ADS_FORMAT_PROVIDER	= 10
+ADS_FORMAT_LEAF	= 11
+ADS_DISPLAY_FULL	= 1
+ADS_DISPLAY_VALUE_ONLY	= 2
+ADS_ESCAPEDMODE_DEFAULT	= 1
+ADS_ESCAPEDMODE_ON	= 2
+ADS_ESCAPEDMODE_OFF	= 3
+ADS_ESCAPEDMODE_OFF_EX	= 4
+ADS_PATH_FILE	= 1
+ADS_PATH_FILESHARE	= 2
+ADS_PATH_REGISTRY	= 3
+ADS_SD_FORMAT_IID	= 1
+ADS_SD_FORMAT_RAW	= 2
+ADS_SD_FORMAT_HEXSTRING	= 3
+
+
+# Generated by h2py from AdsErr.h
+def _HRESULT_TYPEDEF_(_sc): return _sc
+
+E_ADS_BAD_PATHNAME = _HRESULT_TYPEDEF_((-2147463168))
+E_ADS_INVALID_DOMAIN_OBJECT = _HRESULT_TYPEDEF_((-2147463167))
+E_ADS_INVALID_USER_OBJECT = _HRESULT_TYPEDEF_((-2147463166))
+E_ADS_INVALID_COMPUTER_OBJECT = _HRESULT_TYPEDEF_((-2147463165))
+E_ADS_UNKNOWN_OBJECT = _HRESULT_TYPEDEF_((-2147463164))
+E_ADS_PROPERTY_NOT_SET = _HRESULT_TYPEDEF_((-2147463163))
+E_ADS_PROPERTY_NOT_SUPPORTED = _HRESULT_TYPEDEF_((-2147463162))
+E_ADS_PROPERTY_INVALID = _HRESULT_TYPEDEF_((-2147463161))
+E_ADS_BAD_PARAMETER = _HRESULT_TYPEDEF_((-2147463160))
+E_ADS_OBJECT_UNBOUND = _HRESULT_TYPEDEF_((-2147463159))
+E_ADS_PROPERTY_NOT_MODIFIED = _HRESULT_TYPEDEF_((-2147463158))
+E_ADS_PROPERTY_MODIFIED = _HRESULT_TYPEDEF_((-2147463157))
+E_ADS_CANT_CONVERT_DATATYPE = _HRESULT_TYPEDEF_((-2147463156))
+E_ADS_PROPERTY_NOT_FOUND = _HRESULT_TYPEDEF_((-2147463155))
+E_ADS_OBJECT_EXISTS = _HRESULT_TYPEDEF_((-2147463154))
+E_ADS_SCHEMA_VIOLATION = _HRESULT_TYPEDEF_((-2147463153))
+E_ADS_COLUMN_NOT_SET = _HRESULT_TYPEDEF_((-2147463152))
+S_ADS_ERRORSOCCURRED = _HRESULT_TYPEDEF_(0x00005011L)
+S_ADS_NOMORE_ROWS = _HRESULT_TYPEDEF_(0x00005012L)
+S_ADS_NOMORE_COLUMNS = _HRESULT_TYPEDEF_(0x00005013L)
+E_ADS_INVALID_FILTER = _HRESULT_TYPEDEF_((-2147463148))
+
+# ADS_DEREFENUM enum
+ADS_DEREF_NEVER	= 0
+ADS_DEREF_SEARCHING	= 1
+ADS_DEREF_FINDING	= 2
+ADS_DEREF_ALWAYS	= 3
+
+# ADS_PREFERENCES_ENUM
+ADSIPROP_ASYNCHRONOUS	= 0
+ADSIPROP_DEREF_ALIASES	= 0x1
+ADSIPROP_SIZE_LIMIT	= 0x2
+ADSIPROP_TIME_LIMIT	= 0x3
+ADSIPROP_ATTRIBTYPES_ONLY	= 0x4
+ADSIPROP_SEARCH_SCOPE	= 0x5
+ADSIPROP_TIMEOUT	= 0x6
+ADSIPROP_PAGESIZE	= 0x7
+ADSIPROP_PAGED_TIME_LIMIT	= 0x8
+ADSIPROP_CHASE_REFERRALS	= 0x9
+ADSIPROP_SORT_ON	= 0xa
+ADSIPROP_CACHE_RESULTS	= 0xb
+ADSIPROP_ADSIFLAG	= 0xc
+
+# ADSI_DIALECT_ENUM
+ADSI_DIALECT_LDAP	= 0
+ADSI_DIALECT_SQL	= 0x1
+
+# ADS_CHASE_REFERRALS_ENUM
+ADS_CHASE_REFERRALS_NEVER	= 0
+ADS_CHASE_REFERRALS_SUBORDINATE	= 0x20
+ADS_CHASE_REFERRALS_EXTERNAL	= 0x40
+ADS_CHASE_REFERRALS_ALWAYS	= ADS_CHASE_REFERRALS_SUBORDINATE | ADS_CHASE_REFERRALS_EXTERNAL
+
+# Generated by h2py from ObjSel.h
+DSOP_SCOPE_TYPE_TARGET_COMPUTER = 0x00000001
+DSOP_SCOPE_TYPE_UPLEVEL_JOINED_DOMAIN = 0x00000002
+DSOP_SCOPE_TYPE_DOWNLEVEL_JOINED_DOMAIN = 0x00000004
+DSOP_SCOPE_TYPE_ENTERPRISE_DOMAIN = 0x00000008
+DSOP_SCOPE_TYPE_GLOBAL_CATALOG = 0x00000010
+DSOP_SCOPE_TYPE_EXTERNAL_UPLEVEL_DOMAIN = 0x00000020
+DSOP_SCOPE_TYPE_EXTERNAL_DOWNLEVEL_DOMAIN = 0x00000040
+DSOP_SCOPE_TYPE_WORKGROUP = 0x00000080
+DSOP_SCOPE_TYPE_USER_ENTERED_UPLEVEL_SCOPE = 0x00000100
+DSOP_SCOPE_TYPE_USER_ENTERED_DOWNLEVEL_SCOPE = 0x00000200
+DSOP_SCOPE_FLAG_STARTING_SCOPE = 0x00000001
+DSOP_SCOPE_FLAG_WANT_PROVIDER_WINNT = 0x00000002
+DSOP_SCOPE_FLAG_WANT_PROVIDER_LDAP = 0x00000004
+DSOP_SCOPE_FLAG_WANT_PROVIDER_GC = 0x00000008
+DSOP_SCOPE_FLAG_WANT_SID_PATH = 0x00000010
+DSOP_SCOPE_FLAG_WANT_DOWNLEVEL_BUILTIN_PATH = 0x00000020
+DSOP_SCOPE_FLAG_DEFAULT_FILTER_USERS = 0x00000040
+DSOP_SCOPE_FLAG_DEFAULT_FILTER_GROUPS = 0x00000080
+DSOP_SCOPE_FLAG_DEFAULT_FILTER_COMPUTERS = 0x00000100
+DSOP_SCOPE_FLAG_DEFAULT_FILTER_CONTACTS = 0x00000200
+DSOP_FILTER_INCLUDE_ADVANCED_VIEW = 0x00000001
+DSOP_FILTER_USERS = 0x00000002
+DSOP_FILTER_BUILTIN_GROUPS = 0x00000004
+DSOP_FILTER_WELL_KNOWN_PRINCIPALS = 0x00000008
+DSOP_FILTER_UNIVERSAL_GROUPS_DL = 0x00000010
+DSOP_FILTER_UNIVERSAL_GROUPS_SE = 0x00000020
+DSOP_FILTER_GLOBAL_GROUPS_DL = 0x00000040
+DSOP_FILTER_GLOBAL_GROUPS_SE = 0x00000080
+DSOP_FILTER_DOMAIN_LOCAL_GROUPS_DL = 0x00000100
+DSOP_FILTER_DOMAIN_LOCAL_GROUPS_SE = 0x00000200
+DSOP_FILTER_CONTACTS = 0x00000400
+DSOP_FILTER_COMPUTERS = 0x00000800
+DSOP_DOWNLEVEL_FILTER_USERS = (-2147483647)
+DSOP_DOWNLEVEL_FILTER_LOCAL_GROUPS = (-2147483646)
+DSOP_DOWNLEVEL_FILTER_GLOBAL_GROUPS = (-2147483644)
+DSOP_DOWNLEVEL_FILTER_COMPUTERS = (-2147483640)
+DSOP_DOWNLEVEL_FILTER_WORLD = (-2147483632)
+DSOP_DOWNLEVEL_FILTER_AUTHENTICATED_USER = (-2147483616)
+DSOP_DOWNLEVEL_FILTER_ANONYMOUS = (-2147483584)
+DSOP_DOWNLEVEL_FILTER_BATCH = (-2147483520)
+DSOP_DOWNLEVEL_FILTER_CREATOR_OWNER = (-2147483392)
+DSOP_DOWNLEVEL_FILTER_CREATOR_GROUP = (-2147483136)
+DSOP_DOWNLEVEL_FILTER_DIALUP = (-2147482624)
+DSOP_DOWNLEVEL_FILTER_INTERACTIVE = (-2147481600)
+DSOP_DOWNLEVEL_FILTER_NETWORK = (-2147479552)
+DSOP_DOWNLEVEL_FILTER_SERVICE = (-2147475456)
+DSOP_DOWNLEVEL_FILTER_SYSTEM = (-2147467264)
+DSOP_DOWNLEVEL_FILTER_EXCLUDE_BUILTIN_GROUPS = (-2147450880)
+DSOP_DOWNLEVEL_FILTER_TERMINAL_SERVER = (-2147418112)
+DSOP_DOWNLEVEL_FILTER_ALL_WELLKNOWN_SIDS = (-2147352576)
+DSOP_DOWNLEVEL_FILTER_LOCAL_SERVICE = (-2147221504)
+DSOP_DOWNLEVEL_FILTER_NETWORK_SERVICE = (-2146959360)
+DSOP_DOWNLEVEL_FILTER_REMOTE_LOGON = (-2146435072)
+DSOP_FLAG_MULTISELECT = 0x00000001
+DSOP_FLAG_SKIP_TARGET_COMPUTER_DC_CHECK = 0x00000002
+CFSTR_DSOP_DS_SELECTION_LIST = "CFSTR_DSOP_DS_SELECTION_LIST"
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/objectPicker.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/objectPicker.py
new file mode 100644
index 0000000..df75787
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/objectPicker.py
@@ -0,0 +1,58 @@
+# A demo for the IDsObjectPicker interface.
+import win32clipboard
+import pythoncom
+from win32com.adsi import adsi
+from win32com.adsi.adsicon import *
+
+cf_objectpicker = win32clipboard.RegisterClipboardFormat(CFSTR_DSOP_DS_SELECTION_LIST)
+
+def main(): 
+    hwnd = 0
+ 
+    # Create an instance of the object picker. 
+    picker = pythoncom.CoCreateInstance(adsi.CLSID_DsObjectPicker,
+                                None,
+                                pythoncom.CLSCTX_INPROC_SERVER,
+                                adsi.IID_IDsObjectPicker)
+
+    # Create our scope init info.
+    siis = adsi.DSOP_SCOPE_INIT_INFOs(1)
+    sii = siis[0]
+ 
+    # Combine multiple scope types in a single array entry.
+ 
+    sii.type = DSOP_SCOPE_TYPE_UPLEVEL_JOINED_DOMAIN | \
+               DSOP_SCOPE_TYPE_DOWNLEVEL_JOINED_DOMAIN
+ 
+    # Set uplevel and downlevel filters to include only computer objects.
+    # Uplevel filters apply to both mixed and native modes.
+    # Notice that the uplevel and downlevel flags are different.
+ 
+    sii.filterFlags.uplevel.bothModes = DSOP_FILTER_COMPUTERS
+    sii.filterFlags.downlevel = DSOP_DOWNLEVEL_FILTER_COMPUTERS
+    
+    # Initialize the interface.
+    picker.Initialize(
+        None, # Target is the local computer.
+        siis, # scope infos
+        DSOP_FLAG_MULTISELECT, # options
+        ('objectGUID','displayName') ) # attributes to fetch
+ 
+    do = picker.InvokeDialog(hwnd)
+    # Extract the data from the IDataObject.
+    format_etc = (cf_objectpicker, None,
+                  pythoncom.DVASPECT_CONTENT, -1,
+                  pythoncom.TYMED_HGLOBAL)
+    medium = do.GetData(format_etc)
+    data = adsi.StringAsDS_SELECTION_LIST(medium.data)
+    for item in data:
+        name, klass, adspath, upn, attrs, flags = item
+        print "Item", name
+        print " Class:", klass
+        print " AdsPath:", adspath
+        print " UPN:", upn
+        print " Attrs:", attrs
+        print " Flags:", flags
+ 
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/scp.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/scp.py
new file mode 100644
index 0000000..b37be76
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/scp.py
@@ -0,0 +1,491 @@
+"""A re-implementation of the MS DirectoryService samples related to services.
+
+* Adds and removes an ActiveDirectory "Service Connection Point",
+  including managing the security on the object.
+* Creates and registers Service Principal Names.
+* Changes the username for a domain user.
+
+Some of these functions are likely to become move to a module - but there
+is also a little command-line-interface to try these functions out.
+
+For example:
+
+scp.py --account-name=domain\user --service-class=PythonScpTest \\
+       --keyword=foo --keyword=bar --binding-string=bind_info \\
+       ScpCreate SpnCreate SpnRegister
+
+would:
+* Attempt to delete a Service Connection Point for the service class
+  'PythonScpTest'
+* Attempt to create a Service Connection Point for that class, with 2
+  keywords and a binding string of 'bind_info'
+* Create a Service Principal Name for the service and register it
+
+to undo those changes, you could execute:
+
+scp.py --account-name=domain\user --service-class=PythonScpTest \\
+       SpnCreate SpnUnregister ScpDelete
+
+which will:
+* Create a SPN
+* Unregister that SPN from the Active Directory.
+* Delete the Service Connection Point
+
+Executing with --test will create and remove one of everything.
+"""
+
+from win32com.adsi.adsicon import *
+from win32com.adsi import adsi
+import win32api, win32con, winerror
+from win32com.client import Dispatch
+import ntsecuritycon as dscon
+import win32security
+import optparse, textwrap
+import traceback
+import types
+
+verbose = 1
+g_createdSCP = None
+g_createdSPNs = []
+g_createdSPNLast = None
+
+import logging
+
+logger = logging # use logging module global methods for now.
+
+# still a bit confused about log(n, ...) vs logger.info/debug()
+
+# Returns distinguished name of SCP.
+def ScpCreate(
+    service_binding_info, 
+    service_class_name,      # Service class string to store in SCP.
+    account_name = None,    # Logon account that needs access to SCP.
+    container_name = None,
+    keywords = None,
+    object_class = "serviceConnectionPoint",
+    dns_name_type = "A",
+    dn = None,
+    dns_name = None,
+             ):
+    container_name = container_name or service_class_name
+    if not dns_name:
+        # Get the DNS name of the local computer
+        dns_name = win32api.GetComputerNameEx(win32con.ComputerNameDnsFullyQualified)
+    # Get the distinguished name of the computer object for the local computer
+    if dn is None:
+        dn = win32api.GetComputerObjectName(win32con.NameFullyQualifiedDN)
+    
+    # Compose the ADSpath and bind to the computer object for the local computer
+    comp = adsi.ADsGetObject("LDAP://" + dn, adsi.IID_IDirectoryObject)
+    
+    # Publish the SCP as a child of the computer object
+    keywords = keywords or []
+    # Fill in the attribute values to be stored in the SCP.
+    attrs = [
+        ("cn", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (container_name,)),
+        ("objectClass", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (object_class,)),
+        ("keywords", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, keywords),
+        ("serviceDnsName", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (dns_name,)),
+        ("serviceDnsNameType", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (dns_name_type,)),
+        ("serviceClassName", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (service_class_name,)),
+        ("serviceBindingInformation", ADS_ATTR_UPDATE, ADSTYPE_CASE_IGNORE_STRING, (service_binding_info,)),
+    ]
+    new = comp.CreateDSObject("cn=" + container_name, attrs)
+    logger.info("New connection point is at %s", container_name)
+    # Wrap in a usable IDispatch object.
+    new = Dispatch(new)
+    # And allow access to the SCP for the specified account name
+    AllowAccessToScpProperties(account_name, new)
+    return new
+
+def ScpDelete(container_name, dn = None):
+    if dn is None:
+        dn = win32api.GetComputerObjectName(win32con.NameFullyQualifiedDN)
+    logger.debug("Removing connection point '%s' from %s", container_name, dn)
+    
+    # Compose the ADSpath and bind to the computer object for the local computer
+    comp = adsi.ADsGetObject("LDAP://" + dn, adsi.IID_IDirectoryObject)
+    comp.DeleteDSObject("cn=" + container_name)
+    logger.info("Deleted service connection point '%s'", container_name)
+
+# This function is described in detail in the MSDN article titled
+# "Enabling Service Account to Access SCP Properties"
+# From that article:
+# The following sample code sets a pair of ACEs on a service connection point
+# (SCP) object. The ACEs grant read/write access to the user or computer account
+# under which the service instance will be running. Your service installation
+# program calls this code to ensure that the service will be allowed to update
+# its properties at run time. If you don't set ACEs like these, your service
+# will get access-denied errors if it tries to modify the SCP's properties.
+#
+# The code uses the IADsSecurityDescriptor, IADsAccessControlList, and
+# IADsAccessControlEntry interfaces to do the following: 
+# * Get the SCP object's security descriptor. 
+# * Set ACEs in the DACL of the security descriptor. 
+# * Set the security descriptor back on the SCP object. 
+
+def AllowAccessToScpProperties(
+    accountSAM, #Service account to allow access.
+    scpObject, # The IADs SCP object.
+    schemaIDGUIDs = # Attributes to allow write-access to.
+        ("{28630eb8-41d5-11d1-a9c1-0000f80367c1}", # serviceDNSName
+         "{b7b1311c-b82e-11d0-afee-0000f80367c1}",  # serviceBindingInformation
+        )
+    ):  
+    
+    # If no service account is specified, service runs under LocalSystem.
+    # So allow access to the computer account of the service's host.
+    if accountSAM:
+        trustee = accountSAM
+    else:
+        # Get the SAM account name of the computer object for the server.
+        trustee = win32api.GetComputerObjectName(win32con.NameSamCompatible)
+    
+    # Get the nTSecurityDescriptor attribute
+    attribute = "nTSecurityDescriptor"
+    sd = getattr(scpObject, attribute)
+    acl = sd.DiscretionaryAcl
+
+    for sguid in schemaIDGUIDs:
+        ace = Dispatch(adsi.CLSID_AccessControlEntry)
+
+        # Set the properties of the ACE.
+        # Allow read and write access to the property.
+        ace.AccessMask = ADS_RIGHT_DS_READ_PROP | ADS_RIGHT_DS_WRITE_PROP
+
+        # Set the trustee, which is either the service account or the 
+        # host computer account.
+        ace.Trustee = trustee
+
+        # Set the ACE type.
+        ace.AceType = ADS_ACETYPE_ACCESS_ALLOWED_OBJECT
+
+        # Set AceFlags to zero because ACE is not inheritable.
+        ace.AceFlags = 0
+
+        # Set Flags to indicate an ACE that protects a specified object.
+        ace.Flags = ADS_FLAG_OBJECT_TYPE_PRESENT
+
+        # Set ObjectType to the schemaIDGUID of the attribute.
+        ace.ObjectType = sguid
+
+        # Add the ACEs to the DACL.
+        acl.AddAce(ace)
+
+    # Write the modified DACL back to the security descriptor.
+    sd.DiscretionaryAcl = acl
+    # Write the ntSecurityDescriptor property to the property cache.
+    setattr(scpObject, attribute, sd)
+    # SetInfo updates the SCP object in the directory.
+    scpObject.SetInfo()
+    logger.info("Set security on object for account '%s'" % (trustee,))
+
+# Service Principal Names functions from the same sample.
+# The example calls the DsWriteAccountSpn function, which stores the SPNs in
+# Microsoft Active Directory under the servicePrincipalName attribute of the
+# account object specified by the serviceAcctDN parameter. The account object
+# corresponds to the logon account specified in the CreateService call for this
+# service instance. If the logon account is a domain user account,
+# serviceAcctDN must be the distinguished name of the account object in
+# Active Directory for that user account. If the service's logon account is the
+# LocalSystem account, serviceAcctDN must be the distinguished name of the
+# computer account object for the host computer on which the service is
+# installed. win32api.TranslateNames and win32security.DsCrackNames can
+# be used to convert a domain\account format name to a distinguished name.
+def SpnRegister(
+        serviceAcctDN,    # DN of the service's logon account
+        spns,             # List of SPNs to register
+        operation,         # Add, replace, or delete SPNs
+           ):
+    assert spns not in types.StringTypes and hasattr(spns, "__iter__"), \
+           "spns must be a sequence of strings (got %r)" % spns
+    # Bind to a domain controller. 
+    # Get the domain for the current user.
+    samName = win32api.GetUserNameEx(win32api.NameSamCompatible)
+    samName = samName.split('\\', 1)[0]
+
+    if not serviceAcctDN:
+        # Get the SAM account name of the computer object for the server.
+        serviceAcctDN = win32api.GetComputerObjectName(win32con.NameFullyQualifiedDN)
+    logger.debug("SpnRegister using DN '%s'", serviceAcctDN)
+
+    # Get the name of a domain controller in that domain.
+    info = win32security.DsGetDcName(
+                domainName=samName,
+                flags=dscon.DS_IS_FLAT_NAME |
+                      dscon.DS_RETURN_DNS_NAME |
+                      dscon.DS_DIRECTORY_SERVICE_REQUIRED)
+    # Bind to the domain controller.
+    handle = win32security.DsBind( info['DomainControllerName'] )
+
+    # Write the SPNs to the service account or computer account.
+    logger.debug("DsWriteAccountSpn with spns %s")
+    win32security.DsWriteAccountSpn(
+            handle,         # handle to the directory
+            operation,   # Add or remove SPN from account's existing SPNs
+            serviceAcctDN,        # DN of service account or computer account
+            spns) # names
+
+    # Unbind the DS in any case (but Python would do it anyway)
+    handle.Close()
+
+def UserChangePassword(username_dn, new_password):
+    # set the password on the account.
+    # Use the distinguished name to bind to the account object.
+    accountPath = "LDAP://" + username_dn
+    user = adsi.ADsGetObject(accountPath, adsi.IID_IADsUser)
+ 
+    # Set the password on the account. 
+    user.SetPassword(new_password)
+
+# functions related to the command-line interface
+def log(level, msg, *args):
+    if verbose >= level:
+        print msg % args
+
+class _NoDefault: pass
+
+def _get_option(po, opt_name, default = _NoDefault):
+    parser, options = po
+    ret = getattr(options, opt_name, default)
+    if not ret and default is _NoDefault:
+        parser.error("The '%s' option must be specified for this operation" % opt_name)
+    if not ret:
+        ret = default
+    return ret
+
+def _option_error(po, why):
+    parser = po[0]
+    parser.error(why)
+
+def do_ScpCreate(po):
+    """Create a Service Connection Point"""
+    global g_createdSCP
+    scp = ScpCreate(_get_option(po, "binding_string"),
+                    _get_option(po, "service_class"),
+                    _get_option(po, "account_name_sam", None),
+                    keywords=_get_option(po, "keywords", None))
+    g_createdSCP = scp
+    return scp.distinguishedName
+
+def do_ScpDelete(po):
+    """Delete a Service Connection Point"""
+    sc = _get_option(po, "service_class")
+    try:
+        ScpDelete(sc)
+    except adsi.error, details:
+        if details[0] != winerror.ERROR_DS_OBJ_NOT_FOUND:
+            raise
+        log(2, "ScpDelete ignoring ERROR_DS_OBJ_NOT_FOUND for service-class '%s'",
+            sc)
+    return sc
+
+def do_SpnCreate(po):
+    """Create a Service Principal Name"""
+    # The 'service name' is the dn of our scp.
+    if g_createdSCP is None:
+        # Could accept an arg to avoid this?
+        _option_error(po, "ScpCreate must have been specified before SpnCreate")
+    # Create a Service Principal Name"
+    spns = win32security.DsGetSpn(dscon.DS_SPN_SERVICE,
+                                _get_option(po, "service_class"),
+                                g_createdSCP.distinguishedName,
+                                _get_option(po, "port", 0),
+                                None, None)
+    spn = spns[0]
+    log(2, "Created SPN: %s", spn)
+    global g_createdSPNLast
+    g_createdSPNLast = spn
+    g_createdSPNs.append(spn)
+    return spn
+
+def do_SpnRegister(po):
+    """Register a previously created Service Principal Name"""
+    if not g_createdSPNLast:
+        _option_error(po, "SpnCreate must appear before SpnRegister")
+
+    SpnRegister(_get_option(po, "account_name_dn", None),
+               (g_createdSPNLast,),
+                dscon.DS_SPN_ADD_SPN_OP)
+    return g_createdSPNLast
+
+def do_SpnUnregister(po):
+    """Unregister a previously created Service Principal Name"""
+    if not g_createdSPNLast:
+        _option_error(po, "SpnCreate must appear before SpnUnregister")
+    SpnRegister(_get_option(po, "account_name_dn", None),
+                (g_createdSPNLast,),
+                dscon.DS_SPN_DELETE_SPN_OP)
+    return g_createdSPNLast
+
+def do_UserChangePassword(po):
+    """Change the password for a specified user"""
+    UserChangePassword(_get_option(po, "account_name_dn"),
+                       _get_option(po, "password"))
+    return "Password changed OK"
+
+handlers = (
+    ('ScpCreate', do_ScpCreate),
+    ('ScpDelete', do_ScpDelete),
+    ('SpnCreate', do_SpnCreate),
+    ('SpnRegister', do_SpnRegister),
+    ('SpnUnregister', do_SpnUnregister),
+    ('UserChangePassword', do_UserChangePassword),
+           )
+
+class HelpFormatter(optparse.IndentedHelpFormatter):
+    def format_description(self, description):
+        return description
+
+def main():
+    global verbose
+    _handlers_dict = {}
+
+    arg_descs = []
+    for arg, func in handlers:
+        this_desc = "\n".join(textwrap.wrap(func.__doc__,
+                                            subsequent_indent = " " * 8))
+        arg_descs.append("  %s: %s" % (arg, this_desc))
+        _handlers_dict[arg.lower()] = func
+
+    description = __doc__ + "\ncommands:\n" + "\n".join(arg_descs) + "\n"
+
+    parser = optparse.OptionParser(usage = "%prog [options] command ...",
+                                   description=description,
+                                   formatter=HelpFormatter())
+
+    parser.add_option("-v", action="count",
+                      dest="verbose", default=1,
+                      help="increase the verbosity of status messages")
+
+    parser.add_option("-q", "--quiet", action="store_true",
+                      help="Don't print any status messages")
+
+    parser.add_option("-t", "--test", action="store_true",
+                      help="Execute a mini-test suite, providing defaults for most options and args"),
+
+    parser.add_option("", "--show-tracebacks", action="store_true",
+                      help="Show the tracebacks for any exceptions")
+
+    parser.add_option("", "--service-class",
+                      help="The service class name to use")
+
+    parser.add_option("", "--port", default=0,
+                      help="The port number to associate with the SPN")
+
+    parser.add_option("", "--binding-string",
+                      help="The binding string to use for SCP creation")
+
+    parser.add_option("", "--account-name",
+                      help="The account name to use (default is LocalSystem)")
+
+    parser.add_option("", "--password",
+                      help="The password to set.")
+
+    parser.add_option("", "--keyword", action="append", dest="keywords",
+                      help="""A keyword to add to the SCP.  May be specified
+                              multiple times""")
+
+    parser.add_option("", "--log-level",
+                      help="""The log-level to use - may be a number or a logging
+                             module constant""", default=str(logging.WARNING))
+
+    options, args = parser.parse_args()
+    po = (parser, options)
+    # fixup misc
+    try:
+        options.port = int(options.port)
+    except (TypeError, ValueError):
+        parser.error("--port must be numeric")
+    # fixup log-level
+    try:
+        log_level = int(options.log_level)
+    except (TypeError, ValueError):
+        try:
+            log_level = int(getattr(logging, options.log_level.upper()))
+        except (ValueError, TypeError, AttributeError):
+            parser.error("Invalid --log-level value")
+    try:
+        sl = logger.setLevel
+        # logger is a real logger
+    except AttributeError:
+        # logger is logging module
+        sl = logging.getLogger().setLevel
+    sl(log_level)
+    # Check -q/-v
+    if options.quiet and options.verbose:
+        parser.error("Can't specify --quiet and --verbose")
+    if options.quiet:
+        options.verbose -= 1
+    verbose = options.verbose
+    # --test
+    if options.test:
+        if args:
+            parser.error("Can't specify args with --test")
+    
+        args = "ScpDelete ScpCreate SpnCreate SpnRegister SpnUnregister ScpDelete"
+        log(1, "--test - pretending args are:\n %s", args)
+        args = args.split()
+        if not options.service_class:
+            options.service_class = "PythonScpTest"
+            log(2, "--test: --service-class=%s", options.service_class)
+        if not options.keywords:
+            options.keywords = "Python Powered".split()
+            log(2, "--test: --keyword=%s", options.keywords)
+        if not options.binding_string:
+            options.binding_string = "test binding string"
+            log(2, "--test: --binding-string=%s", options.binding_string)
+
+    # check args
+    if not args:
+        parser.error("No command specified (use --help for valid commands)")
+    for arg in args:
+        if arg.lower() not in _handlers_dict:
+            parser.error("Invalid command '%s' (use --help for valid commands)" % arg)
+
+    # Patch up account-name.
+    if options.account_name:
+        log(2, "Translating account name '%s'", options.account_name)
+        options.account_name_sam = win32security.TranslateName(options.account_name,
+                                                               win32api.NameUnknown,
+                                                               win32api.NameSamCompatible)
+        log(2, "NameSamCompatible is '%s'",options.account_name_sam)
+        options.account_name_dn = win32security.TranslateName(options.account_name,
+                                                               win32api.NameUnknown,
+                                                               win32api.NameFullyQualifiedDN)
+        log(2, "NameFullyQualifiedDNis '%s'",options.account_name_dn)
+
+    # do it.
+    for arg in args:
+        handler = _handlers_dict[arg.lower()] # already been validated
+        if handler is None:
+            parser.error("Invalid command '%s'" % arg)
+        err_msg = None
+        try:
+            try:
+                log(2, "Executing '%s'...", arg)
+                result = handler(po)
+                log(1, "%s: %s", arg, result)
+            except:
+                if options.show_tracebacks:
+                    print "--show-tracebacks specified - dumping exception"
+                    traceback.print_exc()
+                raise
+        except adsi.error, (hr, desc, exc, argerr):
+            if exc:
+                extra_desc = exc[2]
+            else:
+                extra_desc = ""
+            err_msg = desc
+            if extra_desc:
+                err_msg += "\n\t" + extra_desc
+        except win32api.error, (hr, func, msg):
+            err_msg = msg
+        if err_msg:
+            log(1, "Command '%s' failed: %s", arg, err_msg)
+
+if __name__=='__main__':
+    try:
+        main()
+    except KeyboardInterrupt:
+        print "*** Interrupted"
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/search.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/search.py
new file mode 100644
index 0000000..5c2dcef
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/search.py
@@ -0,0 +1,132 @@
+from win32com.adsi import adsi
+from win32com.adsi.adsicon import *
+from win32com.adsi import adsicon
+import pythoncom, pywintypes, win32security
+
+options = None # set to optparse options object
+
+ADsTypeNameMap = {}
+
+def getADsTypeName(type_val):
+    # convert integer type to the 'typename' as known in the headerfiles.
+    if not ADsTypeNameMap:
+        for n, v in adsicon.__dict__.items():
+            if n.startswith("ADSTYPE_"):
+                ADsTypeNameMap[v] = n
+    return ADsTypeNameMap.get(type_val, hex(type_val))
+
+def _guid_from_buffer(b):
+    return pywintypes.IID(b, True)
+
+def _sid_from_buffer(b):
+    return str(pywintypes.SID(b))
+
+_null_converter = lambda x: x
+
+converters = {
+    'objectGUID' : _guid_from_buffer,
+    'objectSid' : _sid_from_buffer,
+    'instanceType' : getADsTypeName,
+}
+
+def log(level, msg, *args):
+    if options.verbose >= level:
+        print "log:", msg % args
+
+def getGC():
+    cont = adsi.ADsOpenObject("GC:", options.user, options.password, 0, adsi.IID_IADsContainer)
+    enum = adsi.ADsBuildEnumerator(cont)
+    # Only 1 child of the global catalog.
+    for e in enum:
+        gc = e.QueryInterface(adsi.IID_IDirectorySearch)
+        return gc
+    return None
+
+def print_attribute(col_data):
+    prop_name, prop_type, values = col_data
+    if values is not None:
+        log(2, "property '%s' has type '%s'", prop_name, getADsTypeName(prop_type))
+        value = [converters.get(prop_name, _null_converter)(v[0]) for v in values]
+        if len(value) == 1:
+            value = value[0]
+        print " %s=%r" % (prop_name, value)
+    else:
+        print " %s is None" % (prop_name,)
+
+def search():
+    gc = getGC()
+    if gc is None:
+        log(0, "Can't find the global catalog")
+        return
+
+    prefs = [(ADS_SEARCHPREF_SEARCH_SCOPE, (ADS_SCOPE_SUBTREE,))]
+    hr, statuses = gc.SetSearchPreference(prefs)
+    log(3, "SetSearchPreference returned %d/%r", hr, statuses)
+    
+    if options.attributes:
+        attributes = options.attributes.split(",")
+    else:
+        attributes = None
+
+    h = gc.ExecuteSearch(options.filter, attributes)
+    hr = gc.GetNextRow(h)
+    while hr != S_ADS_NOMORE_ROWS:
+        print "-- new row --"
+        if attributes is None:
+            # Loop over all columns returned
+            while 1:
+                col_name = gc.GetNextColumnName(h)
+                if col_name is None:
+                    break
+                data = gc.GetColumn(h, col_name)
+                print_attribute(data)
+        else:
+            # loop over attributes specified.
+            for a in attributes:
+                try:
+                    data = gc.GetColumn(h, a)
+                    print_attribute(data)
+                except adsi.error, details:
+                    if details[0] != E_ADS_COLUMN_NOT_SET:
+                        raise
+                    print_attribute( (a, None, None) )
+        hr = gc.GetNextRow(h)
+    gc.CloseSearchHandle(h)
+   
+def main():
+    global options
+    from optparse import OptionParser
+
+    parser = OptionParser()
+    parser.add_option("-f", "--file", dest="filename",
+                      help="write report to FILE", metavar="FILE")
+    parser.add_option("-v", "--verbose",
+                      action="count", default=1,
+                      help="increase verbosity of output")
+    parser.add_option("-q", "--quiet",
+                      action="store_true",
+                      help="suppress output messages")
+
+    parser.add_option("-U", "--user",
+                      help="specify the username used to connect")
+    parser.add_option("-P", "--password",
+                      help="specify the password used to connect")
+    parser.add_option("", "--filter",
+                      default = "(&(objectCategory=person)(objectClass=User))",
+                      help="specify the search filter")
+    parser.add_option("", "--attributes",
+                      help="comma sep'd list of attribute names to print")
+    
+    options, args = parser.parse_args()
+    if options.quiet:
+        if options.verbose != 1:
+            parser.error("Can not use '--verbose' and '--quiet'")
+        options.verbose = 0
+
+    if args:
+        parser.error("You need not specify args")
+
+    search()
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/test.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/test.py
new file mode 100644
index 0000000..8c05e4c3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/adsi/demos/test.py
@@ -0,0 +1,247 @@
+import sys, string
+import pythoncom
+
+import win32api
+from win32com.adsi import *
+
+verbose_level = 0
+
+server = '' # Must have trailing /
+local_name = win32api.GetComputerName()
+
+def DumpRoot():
+	"Dumps the root DSE"
+	path = "LDAP://%srootDSE" % server
+	rootdse = ADsGetObject(path)
+
+	for item in rootdse.Get("SupportedLDAPVersion"):
+		print "%s supports ldap version %s" % (path, item)
+	
+	attributes = ["CurrentTime", "defaultNamingContext"]
+	for attr in attributes:
+		val = rootdse.Get(attr)
+		print " %s=%s" % (attr, val)
+
+###############################################
+#
+# Code taken from article titled:
+# Reading attributeSchema and classSchema Objects
+def _DumpClass(child):
+	attrs = "Abstract lDAPDisplayName schemaIDGUID schemaNamingContext attributeSyntax oMSyntax"
+	_DumpTheseAttributes(child, string.split(attrs))
+	
+def _DumpAttribute(child):
+	attrs = "lDAPDisplayName schemaIDGUID adminDescription adminDisplayName rDNAttID defaultHidingValue defaultObjectCategory systemOnly defaultSecurityDescriptor"
+	_DumpTheseAttributes(child, string.split(attrs))
+
+def _DumpTheseAttributes(child, attrs):
+	for attr in attrs:
+		try:
+			val = child.Get(attr)
+		except pythoncom.com_error, details:
+			continue
+			# ###
+			(hr, msg, exc, arg) = details
+			if exc and exc[2]: msg = exc[2]
+			val = "<Error: %s>" % (msg,)
+		if verbose_level >= 2:
+			print " %s: %s=%s" % (child.Class, attr, val)
+
+def DumpSchema():
+	"Dumps the default DSE schema"
+	# Bind to rootDSE to get the schemaNamingContext property.
+	path = "LDAP://%srootDSE" % server
+	rootdse = ADsGetObject(path)
+	name = rootdse.Get("schemaNamingContext")
+	
+	# Bind to the actual schema container.
+	path= "LDAP://" + server + name
+	print "Binding to", path
+	ob = ADsGetObject(path)
+	nclasses = nattr = nsub = nunk = 0
+
+	# Enumerate the attribute and class objects in the schema container.
+	for child in ob:
+		# Find out if this is a class, attribute, or subSchema object.
+		class_name = child.Class
+		if class_name == "classSchema":
+			_DumpClass(child)
+			nclasses = nclasses + 1
+		elif class_name == "attributeSchema":
+			_DumpAttribute(child)
+			nattr = nattr + 1
+		elif class_name == "subSchema":
+			nsub = nsub + 1
+		else:
+			print "Unknown class:", class_name
+			nunk = nunk + 1
+	if verbose_level:
+		print "Processed", nclasses, "classes"
+		print "Processed", nattr, "attributes"
+		print "Processed", nsub, "sub-schema's"
+		print "Processed", nunk, "unknown types"
+
+def _DumpObject(ob, level = 0):
+	prefix = "  " * level
+	print "%s%s object: %s" % (prefix, ob.Class, ob.Name)
+	# Do the directory object thing
+	try:
+		dir_ob = ADsGetObject(ob.ADsPath, IID_IDirectoryObject)
+	except pythoncom.com_error:
+		dir_ob = None
+	if dir_ob is not None:
+		info = dir_ob.GetObjectInformation()
+		print "%s RDN='%s', ObjectDN='%s'" % (prefix, info.RDN, info.ObjectDN)
+		# Create a list of names to fetch
+		names = ["distinguishedName"]
+		attrs = dir_ob.GetObjectAttributes(names)
+		for attr in attrs:
+			for val, typ in attr.Values:
+				print "%s Attribute '%s' = %s" % (prefix, attr.AttrName, val)
+
+	for child in ob:
+		_DumpObject(child, level+1)
+	
+def DumpAllObjects():
+	"Recursively dump the entire directory!"
+	path = "LDAP://%srootDSE" % server
+	rootdse = ADsGetObject(path)
+	name = rootdse.Get("defaultNamingContext")
+	
+	# Bind to the actual schema container.
+	path= "LDAP://" + server + name
+	print "Binding to", path
+	ob = ADsGetObject(path)
+
+	# Enumerate the attribute and class objects in the schema container.
+	_DumpObject(ob)
+	
+##########################################################
+#
+# Code taken from article:
+# Example Code for Enumerating Schema Classes, Attributes, and Syntaxes
+
+# Fill a map with VT_ datatypes, to give us better names:
+vt_map = {}
+for name, val in pythoncom.__dict__.items():
+	if name[:3] == "VT_":
+		vt_map[val] = name
+
+def DumpSchema2():
+	"Dumps the schema using an alternative technique"
+	path = "LDAP://%sschema" % (server,)
+	schema = ADsGetObject(path, IID_IADsContainer)
+	nclass = nprop = nsyntax = 0
+	for item in schema:
+		item_class = string.lower(item.Class)
+		if item_class == "class":
+			items = []
+			if item.Abstract: items.append("Abstract")
+			if item.Auxiliary: items.append("Auxiliary")
+#			if item.Structural: items.append("Structural")
+			desc = string.join(items, ", ")
+			import win32com.util
+			iid_name = win32com.util.IIDToInterfaceName(item.PrimaryInterface)
+			if verbose_level >= 2:
+				print "Class: Name=%s, Flags=%s, Primary Interface=%s" % (item.Name, desc, iid_name)
+			nclass = nclass + 1
+		elif item_class == "property":
+			if item.MultiValued:
+				val_type = "Multi-Valued"
+			else:
+				val_type = "Single-Valued"
+			if verbose_level >= 2:
+				print "Property: Name=%s, %s" % (item.Name, val_type)
+			nprop = nprop + 1
+		elif item_class == "syntax":
+			data_type = vt_map.get(item.OleAutoDataType, "<unknown type>")
+			if verbose_level >= 2:
+				print "Syntax: Name=%s, Datatype = %s" % (item.Name, data_type)
+			nsyntax = nsyntax + 1
+	if verbose_level >= 1:
+		print "Processed", nclass, "classes"
+		print "Processed", nprop, "properties"
+		print "Processed", nsyntax, "syntax items"
+
+def DumpGC():
+	"Dumps the GC: object (whatever that is!)"
+	ob = ADsGetObject("GC:", IID_IADsContainer)
+	for sub_ob in ob:
+		print "GC ob: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)
+
+def DumpLocalUsers():
+	"Dumps the local machine users"
+	path = "WinNT://%s,computer" % (local_name,)
+	ob = ADsGetObject(path, IID_IADsContainer)
+	ob.put_Filter(["User", "Group"])
+	for sub_ob in ob:
+		print "User/Group: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)
+
+def DumpLocalGroups():
+	"Dumps the local machine groups"
+	path = "WinNT://%s,computer" % (local_name,)
+	ob = ADsGetObject(path, IID_IADsContainer)
+
+	ob.put_Filter(["Group"])
+	for sub_ob in ob:
+		print "Group: %s (%s)" % (sub_ob.Name, sub_ob.ADsPath)
+		# get the members
+		members = sub_ob.Members()
+		for member in members:
+			print "  Group member: %s (%s)" % (member.Name, member.ADsPath)
+
+def usage(tests):
+	import os
+	print "Usage: %s [-s server ] [-v] [Test ...]" % os.path.basename(sys.argv[0])
+	print "  -v : Verbose - print more information"
+	print "  -s : server - execute the tests against the named server"
+	print "where Test is one of:"
+	for t in tests:
+		print t.__name__,":", t.__doc__
+	print
+	print "If not tests are specified, all tests are run"
+	sys.exit(1)
+
+def main():
+	import getopt, traceback
+	tests = []
+	for ob in globals().values():
+		if type(ob)==type(main) and ob.__doc__:
+			tests.append(ob)
+	opts, args = getopt.getopt(sys.argv[1:], "s:hv")
+	for opt, val in opts:
+		if opt=="-s":
+			if val[-1] not in "\\/":
+				val = val + "/"
+			global server
+			server = val
+		if opt=="-h":
+			usage(tests)
+		if opt=="-v":
+			global verbose_level
+			verbose_level = verbose_level + 1
+
+	if len(args)==0:
+		print "Running all tests - use '-h' to see command-line options..."
+		dotests = tests
+	else:
+		dotests = []
+		for arg in args:
+			for t in tests:
+				if t.__name__==arg:
+					dotests.append(t)
+					break
+			else:
+				print "Test '%s' unknown - skipping" % arg
+	if not len(dotests):
+		print "Nothing to do!"
+		usage(tests)
+	for test in dotests:
+		try:
+			test()
+		except:
+			print "Test %s failed" % test.__name__
+			traceback.print_exc()
+
+if __name__=='__main__':
+	main()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/authorization/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/authorization/__init__.py
new file mode 100644
index 0000000..cb9037ee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/authorization/__init__.py
@@ -0,0 +1,6 @@
+# This is a python package
+# __PackageSupportBuildPath__ not needed for distutil based builds,
+# but not everyone is there yet.
+import win32com
+win32com.__PackageSupportBuildPath__(__path__)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/authorization/authorization.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/authorization/authorization.pyd
new file mode 100644
index 0000000..1e99d576
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/authorization/authorization.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axcontrol/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axcontrol/__init__.py
new file mode 100644
index 0000000..d882689
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axcontrol/__init__.py
@@ -0,0 +1,4 @@
+# See if we have a special directory for the binaries (for developers)
+import win32com
+win32com.__PackageSupportBuildPath__(__path__)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axcontrol/axcontrol.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axcontrol/axcontrol.pyd
new file mode 100644
index 0000000..e8da4b66
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axcontrol/axcontrol.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/__init__.py
new file mode 100644
index 0000000..d882689
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/__init__.py
@@ -0,0 +1,4 @@
+# See if we have a special directory for the binaries (for developers)
+import win32com
+win32com.__PackageSupportBuildPath__(__path__)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/adb.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/adb.py
new file mode 100644
index 0000000..0d3fe64
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/adb.py
@@ -0,0 +1,416 @@
+"""The glue between the Python debugger interface and the Active Debugger interface
+"""
+from win32com.axdebug.util import trace, _wrap, _wrap_remove
+from win32com.server.util import unwrap
+import win32com.client.connect
+import gateways
+import sys, bdb, traceback
+import axdebug, stackframe
+import win32api, pythoncom
+import thread, os
+
+def fnull(*args):
+	pass
+
+try:
+	os.environ["DEBUG_AXDEBUG"]
+	debugging = 1
+except KeyError:
+	debugging = 0
+
+traceenter = fnull # trace enter of functions
+tracev = fnull # verbose trace
+
+if debugging:
+	traceenter = trace # trace enter of functions
+	tracev = trace # verbose trace
+
+class OutputReflector:
+	def __init__(self, file, writefunc):
+		self.writefunc = writefunc
+		self.file = file
+	def __getattr__(self,name):
+		return getattr(self.file, name)
+	def write(self,message):
+		self.writefunc(message)
+		self.file.write(message)
+
+def _dumpf(frame):
+	if frame is None:
+		return "<None>"
+	else:
+		addn = "(with trace!)"
+		if frame.f_trace is None:
+			addn = " **No Trace Set **"
+		return "Frame at %d, file %s, line: %d%s" % (id(frame), frame.f_code.co_filename, frame.f_lineno, addn)
+
+g_adb = None
+
+def OnSetBreakPoint(codeContext, breakPointState, lineNo):
+	try:
+		fileName = codeContext.codeContainer.GetFileName()
+		# inject the code into linecache.
+		import linecache
+		linecache.cache[fileName] = 0, 0, codeContext.codeContainer.GetText(), fileName
+		g_adb._OnSetBreakPoint(fileName, codeContext, breakPointState, lineNo+1)
+	except:
+		traceback.print_exc()
+
+class Adb(bdb.Bdb,gateways.RemoteDebugApplicationEvents):
+	def __init__(self):
+		self.debugApplication = None
+		self.debuggingThread = None
+		self.debuggingThreadStateHandle = None
+		self.stackSnifferCookie = self.stackSniffer = None
+		self.codeContainerProvider = None
+		self.debuggingThread = None
+		self.breakFlags = None
+		self.breakReason = None
+		self.appDebugger = None
+		self.appEventConnection = None
+		self.logicalbotframe = None # Anything at this level or below does not exist!
+		self.currentframe = None # The frame we are currently in.
+		self.recursiveData = [] # Data saved for each reentery on this thread.
+		bdb.Bdb.__init__(self)
+		self._threadprotectlock = thread.allocate_lock()
+		self.reset()
+
+	def canonic(self, fname):
+		if fname[0]=='<':
+			return fname
+		return bdb.Bdb.canonic(self, fname)
+
+	def reset(self):
+		traceenter("adb.reset")
+		bdb.Bdb.reset(self)
+		
+	def __xxxxx__set_break(self, filename, lineno, cond = None):
+		# As per standard one, except no linecache checking!
+		if not self.breaks.has_key(filename):
+			self.breaks[filename] = []
+		list = self.breaks[filename]
+		if lineno in list:
+			return 'There is already a breakpoint there!'
+		list.append(lineno)
+		if cond is not None: self.cbreaks[filename, lineno]=cond
+		
+	def stop_here(self, frame):
+		traceenter("stop_here", _dumpf(frame), _dumpf(self.stopframe))
+		# As per bdb.stop_here, except for logicalbotframe
+##		if self.stopframe is None:
+##			return 1
+		if frame is self.stopframe:
+			return 1
+		
+		tracev("stop_here said 'No'!")
+		return 0
+		
+	def break_here(self, frame):
+		traceenter("break_here", self.breakFlags, _dumpf(frame))
+		self.breakReason = None
+		if self.breakFlags==axdebug.APPBREAKFLAG_DEBUGGER_HALT:
+			self.breakReason = axdebug.BREAKREASON_DEBUGGER_HALT
+		elif self.breakFlags==axdebug.APPBREAKFLAG_DEBUGGER_BLOCK:
+			self.breakReason = axdebug.BREAKREASON_DEBUGGER_BLOCK
+		elif self.breakFlags==axdebug.APPBREAKFLAG_STEP:
+			self.breakReason = axdebug.BREAKREASON_STEP
+		else:
+			print "Calling base 'break_here' with", self.breaks
+			if bdb.Bdb.break_here(self, frame):
+				self.breakReason = axdebug.BREAKREASON_BREAKPOINT
+		return self.breakReason is not None
+	
+	def break_anywhere(self, frame):
+		traceenter("break_anywhere", _dumpf(frame))
+		if self.breakFlags==axdebug.APPBREAKFLAG_DEBUGGER_HALT:
+			self.breakReason = axdebug.BREAKREASON_DEBUGGER_HALT
+			return 1
+		rc = bdb.Bdb.break_anywhere(self, frame)
+		tracev("break_anywhere",_dumpf(frame),"returning",rc)
+		return rc
+
+	def dispatch_return(self, frame, arg):
+		traceenter("dispatch_return", _dumpf(frame), arg)
+		if self.logicalbotframe is frame:
+			# We dont want to debug parent frames.
+			tracev("dispatch_return resetting sys.trace")
+			sys.settrace(None)
+			return
+#			self.bSetTrace = 0
+		self.currentframe = frame.f_back
+		return bdb.Bdb.dispatch_return(self, frame, arg)
+
+	def dispatch_line(self, frame):
+		traceenter("dispatch_line", _dumpf(frame), _dumpf(self.botframe))
+#		trace("logbotframe is", _dumpf(self.logicalbotframe), "botframe is", self.botframe)
+		if frame is self.logicalbotframe:
+			trace("dispatch_line", _dumpf(frame), "for bottom frame returing tracer")
+			# The next code executed in the frame above may be a builtin (eg, apply())
+			# in which sys.trace needs to be set.
+			sys.settrace(self.trace_dispatch)
+			# And return the tracer incase we are about to execute Python code,
+			# in which case sys tracer is ignored!
+			return self.trace_dispatch
+	
+		if self.codeContainerProvider.FromFileName(frame.f_code.co_filename) is None:
+			trace("dispatch_line has no document for", _dumpf(frame), "- skipping trace!")
+			return None
+		self.currentframe = frame # So the stack sniffer knows our most recent, debuggable code.
+		return bdb.Bdb.dispatch_line(self, frame)
+	
+	def dispatch_call(self, frame, arg):
+		traceenter("dispatch_call",_dumpf(frame))
+		frame.f_locals['__axstack_address__'] = axdebug.GetStackAddress()
+		if frame is self.botframe:
+			trace("dispatch_call is self.botframe - returning tracer")
+			return self.trace_dispatch
+		# Not our bottom frame.  If we have a document for it,
+		# then trace it, otherwise run at full speed.
+		if self.codeContainerProvider.FromFileName(frame.f_code.co_filename) is None:
+			trace("dispatch_call has no document for", _dumpf(frame), "- skipping trace!")
+##			sys.settrace(None)
+			return None
+		return self.trace_dispatch
+		
+#		rc =  bdb.Bdb.dispatch_call(self, frame, arg)
+#		trace("dispatch_call", _dumpf(frame),"returned",rc)
+#		return rc
+
+	def trace_dispatch(self, frame, event, arg):
+		traceenter("trace_dispatch", _dumpf(frame), event, arg)
+		if self.debugApplication is None:
+			trace("trace_dispatch has no application!")
+			return # None
+		return bdb.Bdb.trace_dispatch(self, frame, event, arg)
+
+
+	#
+	# The user functions do bugger all!
+	#
+#	def user_call(self, frame, argument_list):
+#		traceenter("user_call",_dumpf(frame))
+
+	def user_line(self, frame):
+		traceenter("user_line",_dumpf(frame))
+		# Traces at line zero
+		if frame.f_lineno!=0:
+			breakReason = self.breakReason
+			if breakReason is None:
+				breakReason = axdebug.BREAKREASON_STEP
+			self._HandleBreakPoint(frame, None, breakReason)
+
+	def user_return(self, frame, return_value):
+#		traceenter("user_return",_dumpf(frame),return_value)
+		bdb.Bdb.user_return(self, frame, return_value)
+	
+	def user_exception(self, frame, (exc_type, exc_value, exc_traceback)):
+#		traceenter("user_exception")
+		bdb.Bdb.user_exception(self, frame, (exc_type, exc_value, exc_traceback))
+	
+
+	def _HandleBreakPoint(self, frame, tb, reason):
+		traceenter("Calling HandleBreakPoint with reason", reason,"at frame", _dumpf(frame))
+		traceenter(" Current frame is", _dumpf(self.currentframe))
+		try:
+			resumeAction = self.debugApplication.HandleBreakPoint(reason)
+			tracev("HandleBreakPoint returned with ", resumeAction)
+		except pythoncom.com_error, details:
+			# Eeek - the debugger is dead, or something serious is happening.
+			# Assume we should continue
+			resumeAction = axdebug.BREAKRESUMEACTION_CONTINUE
+			trace("HandleBreakPoint FAILED with", details)
+		
+		self.stack = []
+		self.curindex = 0
+		if resumeAction == axdebug.BREAKRESUMEACTION_ABORT:
+			self.set_quit()
+		elif resumeAction == axdebug.BREAKRESUMEACTION_CONTINUE:
+			tracev("resume action is continue")
+			self.set_continue()
+		elif resumeAction == axdebug.BREAKRESUMEACTION_STEP_INTO:
+			tracev("resume action is step")
+			self.set_step()
+		elif resumeAction == axdebug.BREAKRESUMEACTION_STEP_OVER:
+			tracev("resume action is next")
+			self.set_next(frame)
+		elif resumeAction == axdebug.BREAKRESUMEACTION_STEP_OUT:
+			tracev("resume action is stop out")
+			self.set_return(frame)
+		else:
+			assert(0, "unknown resume action flags")
+		self.breakReason = None
+
+	def set_trace(self):
+		self.breakReason = axdebug.BREAKREASON_LANGUAGE_INITIATED
+		bdb.Bdb.set_trace(self)
+
+	def CloseApp(self):
+		traceenter("ClosingApp")
+		self.reset()
+		self.logicalbotframe = None
+		if self.stackSnifferCookie is not None:
+			try:
+				self.debugApplication.RemoveStackFrameSniffer(self.stackSnifferCookie)
+
+			except pythoncom.com_error:
+				trace("*** Could not RemoveStackFrameSniffer %d" % (self.stackSnifferCookie))
+		if self.stackSniffer:
+			_wrap_remove(self.stackSniffer)
+		self.stackSnifferCookie = self.stackSniffer = None
+
+		if self.appEventConnection is not None:
+			self.appEventConnection.Disconnect()
+			self.appEventConnection = None
+		self.debugApplication = None
+		self.appDebugger = None
+		if self.codeContainerProvider is not None:
+			self.codeContainerProvider.Close()
+			self.codeContainerProvider = None
+		
+	def AttachApp(self, debugApplication, codeContainerProvider):
+#		traceenter("AttachApp", debugApplication, codeContainerProvider)
+		self.codeContainerProvider = codeContainerProvider
+		self.debugApplication = debugApplication
+		self.stackSniffer = _wrap(stackframe.DebugStackFrameSniffer(self), axdebug.IID_IDebugStackFrameSniffer)
+		self.stackSnifferCookie = debugApplication.AddStackFrameSniffer(self.stackSniffer)
+#		trace("StackFrameSniffer added (%d)" % self.stackSnifferCookie)
+		
+		# Connect to the application events.
+		self.appEventConnection = win32com.client.connect.SimpleConnection(self.debugApplication, self, axdebug.IID_IRemoteDebugApplicationEvents)
+		
+	def ResetAXDebugging(self):
+		traceenter("ResetAXDebugging", self, "with refcount", len(self.recursiveData))
+		if win32api.GetCurrentThreadId()!=self.debuggingThread:
+			trace("ResetAXDebugging called on other thread")
+			return
+
+		if len(self.recursiveData)==0:
+#			print "ResetAXDebugging called for final time."
+			self.logicalbotframe = None
+			self.debuggingThread = None
+			self.currentframe = None
+			self.debuggingThreadStateHandle = None
+			return
+
+		self.logbotframe, self.stopframe, self.currentframe, self.debuggingThreadStateHandle = self.recursiveData[0]
+		self.recursiveData = self.recursiveData[1:]
+		
+	def SetupAXDebugging(self, baseFrame = None, userFrame = None):
+		"""Get ready for potential debugging.  Must be called on the thread
+		that is being debugged.
+		"""
+		# userFrame is for non AXScript debugging.  This is the first frame of the
+		# users code.
+		if userFrame is None: 
+			userFrame = baseFrame
+		else:
+			# We have missed the "dispatch_call" function, so set this up now!
+			userFrame.f_locals['__axstack_address__'] = axdebug.GetStackAddress()
+
+		traceenter("SetupAXDebugging", self)
+		self._threadprotectlock.acquire()
+		try:
+			thisThread = win32api.GetCurrentThreadId()
+			if self.debuggingThread is None:
+				self.debuggingThread = thisThread
+			else:
+				if self.debuggingThread!=thisThread:
+					trace("SetupAXDebugging called on other thread - ignored!")
+					return
+				# push our context.
+				self.recursiveData.insert(0, (self.logicalbotframe,self.stopframe, self.currentframe,self.debuggingThreadStateHandle))
+		finally:
+			self._threadprotectlock.release()
+
+		trace("SetupAXDebugging has base frame as", _dumpf(baseFrame))
+		self.botframe = baseFrame
+		self.stopframe = userFrame
+		self.logicalbotframe = baseFrame
+		self.currentframe = None
+		self.debuggingThreadStateHandle = axdebug.GetThreadStateHandle()
+
+		self._BreakFlagsChanged()
+		
+	# RemoteDebugApplicationEvents
+	def OnConnectDebugger(self, appDebugger):
+		traceenter("OnConnectDebugger", appDebugger)
+		self.appDebugger = appDebugger
+		# Reflect output to appDebugger
+		writefunc = lambda s: appDebugger.onDebugOutput(s)
+		sys.stdout = OutputReflector(sys.stdout, writefunc)
+		sys.stderr = OutputReflector(sys.stderr, writefunc)
+		
+	def OnDisconnectDebugger(self):
+		traceenter("OnDisconnectDebugger")
+		# Stop reflecting output
+		if isinstance(sys.stdout, OutputReflector):
+			sys.stdout = sys.stdout.file
+		if isinstance(sys.stderr, OutputReflector):
+			sys.stderr = sys.stderr.file
+		self.appDebugger = None
+		self.set_quit()
+
+	def OnSetName(self, name):
+		traceenter("OnSetName", name)
+	def OnDebugOutput(self, string):
+		traceenter("OnDebugOutput", string)
+	def OnClose(self):
+		traceenter("OnClose")
+	def OnEnterBreakPoint(self, rdat):
+		traceenter("OnEnterBreakPoint", rdat)
+	def OnLeaveBreakPoint(self, rdat):
+		traceenter("OnLeaveBreakPoint", rdat)
+	def OnCreateThread(self, rdat):
+		traceenter("OnCreateThread", rdat)
+	def OnDestroyThread(self, rdat):
+		traceenter("OnDestroyThread", rdat)
+	def OnBreakFlagChange(self, abf, rdat):
+		traceenter("Debugger OnBreakFlagChange", abf, rdat)
+		self.breakFlags = abf
+		self._BreakFlagsChanged()
+		
+	def _BreakFlagsChanged(self):
+		traceenter("_BreakFlagsChanged to %s with our thread = %s, and debugging thread = %s" % (self.breakFlags, self.debuggingThread, win32api.GetCurrentThreadId()))
+		trace("_BreakFlagsChanged has breaks", self.breaks)
+		# If a request comes on our debugging thread, then do it now!
+#		if self.debuggingThread!=win32api.GetCurrentThreadId():
+#			return
+
+		if len(self.breaks) or self.breakFlags:
+
+			if self.logicalbotframe:
+				trace("BreakFlagsChange with bot frame", _dumpf(self.logicalbotframe))
+				# We have frames not to be debugged (eg, Scripting engine frames
+				# (sys.settrace will be set when out logicalbotframe is hit - 
+				#  this may not be the right thing to do, as it may not cause the
+				#  immediate break we desire.)
+				self.logicalbotframe.f_trace = self.trace_dispatch
+			else:
+				trace("BreakFlagsChanged, but no bottom frame")
+				if self.stopframe is not None:
+					self.stopframe.f_trace = self.trace_dispatch
+			# If we have the thread-state for the thread being debugged, then
+			# we dynamically set its trace function - it is possible that the thread
+			# being debugged is in a blocked call (eg, a message box) and we
+			# want to hit the debugger the instant we return
+		if self.debuggingThreadStateHandle is not None and \
+			self.breakFlags and \
+			self.debuggingThread != win32api.GetCurrentThreadId():
+				axdebug.SetThreadStateTrace(self.debuggingThreadStateHandle, self.trace_dispatch)
+	def _OnSetBreakPoint(self, key, codeContext, bps, lineNo):
+		traceenter("_OnSetBreakPoint", self, key, codeContext, bps, lineNo)
+		if bps==axdebug.BREAKPOINT_ENABLED:
+			problem = self.set_break(key, lineNo)
+			if problem:
+				print "*** set_break failed -", problem
+			trace("_OnSetBreakPoint just set BP and has breaks", self.breaks)
+		else:
+			self.clear_break(key, lineNo)
+		self._BreakFlagsChanged()
+		trace("_OnSetBreakPoint leaving with breaks", self.breaks)
+
+def Debugger():
+	global g_adb
+	if g_adb is None:
+		g_adb = Adb()
+	return g_adb
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/axdebug.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/axdebug.pyd
new file mode 100644
index 0000000..59e06a9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/axdebug.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/codecontainer.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/codecontainer.py
new file mode 100644
index 0000000..f3caa46e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/codecontainer.py
@@ -0,0 +1,253 @@
+"""A utility class for a code container.
+
+A code container is a class which holds source code for a debugger.  It knows how
+to color the text, and also how to translate lines into offsets, and back.
+"""
+
+import string, sys
+from win32com.axdebug import axdebug
+import tokenize
+from util import RaiseNotImpl, _wrap
+
+from win32com.server.exception import Exception
+import win32api, winerror
+import contexts
+
+_keywords = {}				# set of Python keywords
+for name in string.split("""
+ and assert break class continue def del elif else except exec
+ finally for from global if import in is lambda not
+ or pass print raise return try while
+ """):
+    _keywords[name] = 1
+
+class SourceCodeContainer:
+	def __init__(self, text, fileName = "<Remove Me!>", sourceContext = 0, startLineNumber = 0, site = None, debugDocument = None):
+		self.sourceContext = sourceContext # The source context added by a smart host.
+		self.text = None
+		if text: 
+			self.text = str(text)# Convert from Unicode if necessary.
+			self._buildlines()
+		self.nextLineNo = 0
+		self.fileName = fileName
+		self.codeContexts = {}
+		self.site = site
+		self.startLineNumber = startLineNumber
+		self.debugDocument = None
+	def _Close(self):
+		self.text = self.lines = self.lineOffsets = None
+		self.codeContexts = None
+		self.debugDocument = None
+		self.site = None
+		self.sourceContext = None
+
+	def GetText(self):
+		return self.text
+	def GetName(self, dnt):
+		assert 0, "You must subclass this"
+	def GetFileName(self):
+		return self.fileName
+
+	def GetPositionOfLine(self, cLineNumber):
+		self.GetText() # Prime us.
+		try:
+			return self.lineOffsets[cLineNumber]
+		except IndexError:
+			raise Exception(scode=winerror.S_FALSE)
+	def GetLineOfPosition(self, charPos):
+		self.GetText() # Prime us.
+		lastOffset = 0
+		lineNo = 0
+		for lineOffset in self.lineOffsets[1:]:
+			if lineOffset > charPos:
+				break
+			lastOffset = lineOffset
+			lineNo = lineNo + 1
+		else: # for not broken.
+#			print "Cant find", charPos, "in", self.lineOffsets
+			raise Exception(scode=winerror.S_FALSE)
+#		print "GLOP ret=",lineNo, 	(charPos-lastOffset)
+		return lineNo, 	(charPos-lastOffset)
+
+	def GetNextLine(self):
+		if self.nextLineNo>=len(self.lines):
+			self.nextLineNo = 0 # auto-reset.
+			return ""
+		rc = self.lines[self.nextLineNo]
+		self.nextLineNo = self.nextLineNo + 1
+		return rc
+		
+	def GetLine(self, num):
+		self.GetText() # Prime us.
+		return self.lines[num]
+		
+	def GetNumChars(self):
+		return len(self.GetText())
+		
+	def GetNumLines(self):
+		self.GetText() # Prime us.
+		return len(self.lines)
+
+	def _buildline(self, pos):
+		i = string.find(self.text, '\n', pos)
+		if i < 0:
+			newpos = len(self.text)
+		else:
+			newpos = i+1
+		r = self.text[pos:newpos]
+		return r, newpos
+		
+	def _buildlines(self):
+		self.lines = []
+		self.lineOffsets = [0]
+		line, pos = self._buildline(0)
+		while line:
+			self.lines.append(line)
+			self.lineOffsets.append(pos)
+			line, pos = self._buildline(pos)
+
+	def _ProcessToken(self, type, token, (srow, scol), (erow, ecol), line):
+		self.GetText() # Prime us.
+		linenum = srow - 1 # Lines zero based for us too.
+		realCharPos = self.lineOffsets[linenum] + scol
+		numskipped = realCharPos - self.lastPos
+		if numskipped==0:
+			pass
+		elif numskipped==1:
+			self.attrs.append(axdebug.SOURCETEXT_ATTR_COMMENT)
+		else:
+			self.attrs.append((axdebug.SOURCETEXT_ATTR_COMMENT, numskipped))
+		kwSize = len(token)
+		self.lastPos = realCharPos + kwSize
+		attr = 0
+
+		if type==tokenize.NAME:
+			if _keywords.has_key(token):
+				attr = axdebug.SOURCETEXT_ATTR_KEYWORD
+		elif type==tokenize.STRING:
+			attr = axdebug.SOURCETEXT_ATTR_STRING
+		elif type==tokenize.NUMBER:
+			attr = axdebug.SOURCETEXT_ATTR_NUMBER
+		elif type==tokenize.OP:
+			attr = axdebug.SOURCETEXT_ATTR_OPERATOR
+		elif type==tokenize.COMMENT:
+			attr = axdebug.SOURCETEXT_ATTR_COMMENT
+		# else attr remains zero...
+		if kwSize==0:
+			pass
+		elif kwSize==1:
+			self.attrs.append(attr)
+		else:
+			self.attrs.append((attr, kwSize))
+
+	def GetSyntaxColorAttributes(self):
+		self.lastPos = 0
+		self.attrs = []
+		try:
+			tokenize.tokenize(self.GetNextLine, self._ProcessToken)
+		except tokenize.TokenError:
+			pass # Ignore - will cause all subsequent text to be commented.
+		numAtEnd = len(self.GetText()) - self.lastPos
+		if numAtEnd:
+			self.attrs.append((axdebug.SOURCETEXT_ATTR_COMMENT, numAtEnd))
+		return self.attrs
+
+	# We also provide and manage DebugDocumentContext objects
+	def _MakeDebugCodeContext(self, lineNo, charPos, len):
+		return _wrap(contexts.DebugCodeContext(lineNo, charPos, len, self, self.site), axdebug.IID_IDebugCodeContext)
+	# Make a context at the given position.  It should take up the entire context.
+	def _MakeContextAtPosition(self, charPos):
+		lineNo, offset = self.GetLineOfPosition(charPos)
+		try:
+			endPos = self.GetPositionOfLine(lineNo+1)
+		except:
+			endPos = charPos
+		codecontext = self._MakeDebugCodeContext(lineNo, charPos, endPos-charPos)
+		return codecontext
+
+	# Returns a DebugCodeContext.  debugDocument can be None for smart hosts.
+	def GetCodeContextAtPosition(self, charPos):
+#		trace("GetContextOfPos", charPos, maxChars)
+		# Convert to line number.
+		lineNo, offset = self.GetLineOfPosition(charPos)
+		charPos = self.GetPositionOfLine(lineNo)
+		try:
+			cc = self.codeContexts[charPos]
+#			trace(" GetContextOfPos using existing")
+		except KeyError:
+			cc = self._MakeContextAtPosition(charPos)
+			self.codeContexts[charPos] = cc
+		return cc
+
+class SourceModuleContainer(SourceCodeContainer):
+	def __init__(self, module):
+		self.module = module
+		if hasattr(module, '__file__'):
+			fname = self.module.__file__
+			# Check for .pyc or .pyo or even .pys!
+			if fname[-1] in ['O','o','C','c', 'S', 's']: fname = fname[:-1]
+			try:
+				fname = win32api.GetFullPathName(fname)
+			except win32api.error:
+				pass
+		else:
+			if module.__name__=='__main__' and len(sys.argv)>0:
+				fname = sys.argv[0]
+			else:
+				fname = "<Unknown!>"
+		SourceCodeContainer.__init__(self, None, fname)
+
+	def GetText(self):
+		if self.text is None:
+			fname = self.GetFileName()
+			if fname:
+				try:
+					self.text = open(fname, "r").read()
+				except IOError, details:
+					self.text = "# Exception opening file\n# %s" % (`details`)
+			else:
+				self.text = "# No file available for module '%s'" % (self.module)
+			self._buildlines()
+		return self.text
+
+	def GetName(self, dnt):
+		name = self.module.__name__
+		try:
+			fname = win32api.GetFullPathName(self.module.__file__)
+		except win32api.error:
+			fname = self.module.__file__
+		except AttributeError:
+			fname = name
+		if dnt==axdebug.DOCUMENTNAMETYPE_APPNODE:
+			return string.split(name, ".")[-1]
+		elif dnt==axdebug.DOCUMENTNAMETYPE_TITLE:
+			return fname
+		elif dnt==axdebug.DOCUMENTNAMETYPE_FILE_TAIL:
+			return os.path.split(fname)[1]
+		elif dnt==axdebug.DOCUMENTNAMETYPE_URL:
+			return "file:%s" % fname
+		else:
+			raise Exception(scode=winerror.E_UNEXPECTED)
+
+if __name__=='__main__':
+	import sys
+	sys.path.append(".")
+	import ttest
+	sc = SourceModuleContainer(ttest)
+#	sc = SourceCodeContainer(open(sys.argv[1], "rb").read(), sys.argv[1])
+	attrs = sc.GetSyntaxColorAttributes()
+	attrlen = 0
+	for attr in attrs:
+		if type(attr)==type(()):
+			attrlen = attrlen + attr[1]
+		else:
+			attrlen = attrlen + 1
+	text = sc.GetText()
+	if attrlen!=len(text):
+		print "Lengths dont match!!! (%d/%d)" % (attrlen, len(text))
+	
+#	print "Attributes:"
+#	print attrs
+	print "GetLineOfPos=", sc.GetLineOfPosition(0)
+	print "GetLineOfPos=", sc.GetLineOfPosition(4)
+	print "GetLineOfPos=", sc.GetLineOfPosition(10)
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/contexts.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/contexts.py
new file mode 100644
index 0000000..5286166
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/contexts.py
@@ -0,0 +1,56 @@
+""" A module for managing the AXDebug I*Contexts
+
+"""
+import gateways, axdebug
+import pythoncom, win32com.server.util
+
+# Utility function for wrapping object created by this module.
+from util import _wrap, _wrap_remove, trace
+import adb
+
+class DebugCodeContext(gateways.DebugCodeContext, gateways.DebugDocumentContext):
+	# NOTE: We also implement the IDebugDocumentContext interface for Simple Hosts.
+	# Thus, debugDocument may be NULL when we have smart hosts - but in that case, we
+	# wont be called upon to provide it.
+	_public_methods_ = gateways.DebugCodeContext._public_methods_ + \
+	                   gateways.DebugDocumentContext._public_methods_
+	_com_interfaces_ = gateways.DebugCodeContext._com_interfaces_ + \
+	                   gateways.DebugDocumentContext._com_interfaces_
+
+	def __init__(self, lineNo, charPos, len, codeContainer, debugSite):
+		self.debugSite = debugSite
+		self.offset = charPos
+		self.length = len
+		self.breakPointState = 0
+		self.lineno = lineNo
+		gateways.DebugCodeContext.__init__(self)
+		self.codeContainer = codeContainer
+
+	def _Close(self):
+		self.debugSite = None
+
+	def GetDocumentContext(self):
+		if self.debugSite is not None:
+			# We have a smart host - let him give it to us.
+			return self.debugSite.GetDocumentContextFromPosition(
+				self.codeContainer.sourceContext,
+				self.offset, 
+				self.length)
+		else:
+			# Simple host - Fine - Ill do it myself!
+			return _wrap(self, axdebug.IID_IDebugDocumentContext)
+
+	def SetBreakPoint(self, bps):
+		self.breakPointState = bps
+		adb.OnSetBreakPoint(self, bps, self.lineno)
+		
+	# The DebugDocumentContext methods for simple hosts.
+	def GetDocument(self):
+		return self.codeContainer.debugDocument
+
+	def EnumCodeContexts(self):
+		return _wrap(EnumDebugCodeContexts([self]), axdebug.IID_IEnumDebugCodeContexts)
+
+class EnumDebugCodeContexts(gateways.EnumDebugCodeContexts):
+	def _wrap(self, obj):
+		return _wrap(obj, axdebug.IID_IDebugCodeContext)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/debugger.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/debugger.py
new file mode 100644
index 0000000..913b3c2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/debugger.py
@@ -0,0 +1,202 @@
+import sys, traceback, string
+
+from win32com.axscript import axscript
+from win32com.axdebug import codecontainer, axdebug, gateways, documents, contexts, adb, expressions
+from win32com.axdebug.util import trace, _wrap, _wrap_remove
+
+import pythoncom
+import win32api, winerror
+import os
+
+currentDebugger = None
+
+class ModuleTreeNode:
+	"""Helper class for building a module tree
+	"""
+	def __init__(self, module):
+		modName = module.__name__
+		self.moduleName = modName
+		self.module = module
+		self.realNode = None
+		self.cont = codecontainer.SourceModuleContainer(module)
+	def __repr__(self):
+		return "<ModuleTreeNode wrapping %s>" % (self.module)
+	def Attach(self, parentRealNode):
+		self.realNode.Attach(parentRealNode)
+		
+	def Close(self):
+		self.module = None
+		self.cont = None
+		self.realNode = None
+
+def BuildModule(module, built_nodes, rootNode, create_node_fn, create_node_args ):
+	if module:
+		keep = module.__name__
+		keep = keep and (built_nodes.get(module) is None)
+		if keep and hasattr(module, '__file__'):
+			keep = string.lower(os.path.splitext(module.__file__)[1]) not in [".pyd", ".dll"]
+#		keep = keep and module.__name__=='__main__'
+	if module and keep:
+#		print "keeping", module.__name__
+		node = ModuleTreeNode(module)
+		built_nodes[module] = node
+		realNode = apply(create_node_fn, (node,)+create_node_args)
+		node.realNode = realNode
+		
+		# Split into parent nodes.
+		parts = string.split(module.__name__, '.')
+		if parts[-1][:8]=='__init__': parts = parts[:-1]
+		parent = string.join(parts[:-1], '.')
+		parentNode = rootNode
+		if parent:
+			parentModule = sys.modules[parent]
+			BuildModule(parentModule, built_nodes, rootNode, create_node_fn, create_node_args)
+			parentNode = built_nodes[parentModule].realNode
+		node.Attach(parentNode)
+
+def RefreshAllModules(builtItems, rootNode, create_node, create_node_args):
+	for module in sys.modules.values():
+		BuildModule(module, builtItems, rootNode, create_node, create_node_args)
+		
+# realNode = pdm.CreateDebugDocumentHelper(None) # DebugDocumentHelper node?
+# app.CreateApplicationNode() # doc provider node.
+
+class CodeContainerProvider(documents.CodeContainerProvider):
+	def __init__(self, axdebugger):
+		self.axdebugger = axdebugger
+		documents.CodeContainerProvider.__init__(self)
+		self.currentNumModules = len(sys.modules)
+		self.nodes = {}
+		self.axdebugger.RefreshAllModules(self.nodes, self)
+		
+	def FromFileName(self, fname):
+### It appears we cant add modules during a debug session!
+#		if self.currentNumModules != len(sys.modules):
+#			self.axdebugger.RefreshAllModules(self.nodes, self)
+#			self.currentNumModules = len(sys.modules)
+#		for key in self.ccsAndNodes.keys():
+#			print "File:", key
+		return documents.CodeContainerProvider.FromFileName(self, fname)
+
+	def Close(self):
+		documents.CodeContainerProvider.Close(self)
+		self.axdebugger = None
+		print "Closing %d nodes" % (len(self.nodes))
+		for node in self.nodes.values():
+			node.Close()
+		self.nodes = {}
+
+class OriginalInterfaceMaker:
+	def MakeInterfaces(self, pdm):
+		app = self.pdm.CreateApplication()
+		self.cookie = pdm.AddApplication(app)
+		root = app.GetRootNode()
+		return app, root
+
+	def CloseInterfaces(self, pdm):
+		pdm.RemoveApplication(self.cookie)
+
+class SimpleHostStyleInterfaceMaker:
+	def MakeInterfaces(self, pdm):
+		app = pdm.GetDefaultApplication()
+		root = app.GetRootNode()
+		return app, root
+	
+	def CloseInterfaces(self, pdm):
+		pass
+	
+
+class AXDebugger:
+	def __init__(self, interfaceMaker = None, processName = None):
+		if processName is None: processName = "Python Process"
+		if interfaceMaker is None: interfaceMaker = SimpleHostStyleInterfaceMaker()
+
+		self.pydebugger = adb.Debugger()
+		
+		self.pdm=pythoncom.CoCreateInstance(axdebug.CLSID_ProcessDebugManager,None,pythoncom.CLSCTX_ALL, axdebug.IID_IProcessDebugManager)
+
+		self.app, self.root = interfaceMaker.MakeInterfaces(self.pdm)
+		self.app.SetName(processName)
+		self.interfaceMaker = interfaceMaker
+
+		expressionProvider = _wrap(expressions.ProvideExpressionContexts(), axdebug.IID_IProvideExpressionContexts)
+		self.expressionCookie = self.app.AddGlobalExpressionContextProvider(expressionProvider)
+		
+		contProvider = CodeContainerProvider(self)
+		self.pydebugger.AttachApp(self.app, contProvider)
+
+	def Break(self):
+		# Get the frame we start debugging from - this is the frame 1 level up
+		try:
+			1 + ''
+		except:
+			frame = sys.exc_info()[2].tb_frame.f_back
+
+		# Get/create the debugger, and tell it to break.
+		self.app.StartDebugSession()
+#		self.app.CauseBreak()
+
+		self.pydebugger.SetupAXDebugging(None, frame)
+		self.pydebugger.set_trace()
+
+	def Close(self):
+		self.pydebugger.ResetAXDebugging()
+		self.interfaceMaker.CloseInterfaces(self.pdm)
+		self.pydebugger.CloseApp()
+		self.app.RemoveGlobalExpressionContextProvider(self.expressionCookie)
+		self.expressionCookie = None
+
+		self.pdm = None
+		self.app = None
+		self.pydebugger = None
+		self.root = None
+
+	def RefreshAllModules(self, nodes, containerProvider):
+		RefreshAllModules(nodes, self.root, self.CreateApplicationNode, (containerProvider,))
+
+	def CreateApplicationNode(self, node, containerProvider):
+		realNode = self.app.CreateApplicationNode()
+		
+		document = documents.DebugDocumentText(node.cont)
+		document = _wrap(document, axdebug.IID_IDebugDocument)
+
+		node.cont.debugDocument = document
+
+		provider = documents.DebugDocumentProvider(document)
+		provider = _wrap(provider, axdebug.IID_IDebugDocumentProvider)
+		realNode.SetDocumentProvider(provider)
+		
+		containerProvider.AddCodeContainer(node.cont, realNode)
+		return realNode
+
+def _GetCurrentDebugger():
+	global currentDebugger
+	if currentDebugger is None:
+		currentDebugger = AXDebugger()
+	return currentDebugger
+	
+def Break():
+	_GetCurrentDebugger().Break()
+	
+brk = Break
+set_trace = Break
+		
+def dosomethingelse():
+	a=2
+	b = "Hi there"
+	
+def dosomething():
+	a=1
+	b=2
+	dosomethingelse()
+
+def test():
+	Break()
+	raw_input("Waiting...")
+	dosomething()
+	print "Done"
+
+if __name__=='__main__':
+	print "About to test the debugging interfaces!"
+	test()
+	print " %d/%d com objects still alive" % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount())
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/documents.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/documents.py
new file mode 100644
index 0000000..799c7a7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/documents.py
@@ -0,0 +1,124 @@
+""" Management of documents for AXDebugging.
+"""
+
+import axdebug, gateways
+import pythoncom
+from util import _wrap, _wrap_remove, RaiseNotImpl, trace
+from win32com.server.util import unwrap
+import codecontainer
+import contexts
+from win32com.server.exception import Exception
+import win32api, winerror, os, string, sys
+
+#def trace(*args):
+#	pass
+
+def GetGoodFileName(fname):
+	if fname[0] <> "<":
+		return win32api.GetFullPathName(fname)
+	return fname
+
+class DebugDocumentProvider(gateways.DebugDocumentProvider):
+	def __init__(self, doc):
+		self.doc = doc
+
+	def GetName(self, dnt):
+		return self.doc.GetName(dnt)
+
+	def GetDocumentClassId(self):
+		return self.doc.GetDocumentClassId()
+	
+	def GetDocument(self):
+		return self.doc
+
+class DebugDocumentText(gateways.DebugDocumentInfo, gateways.DebugDocumentText, gateways.DebugDocument):
+	_com_interfaces_ = gateways.DebugDocumentInfo._com_interfaces_ + \
+	                   gateways.DebugDocumentText._com_interfaces_ + \
+	                   gateways.DebugDocument._com_interfaces_
+	_public_methods_ = gateways.DebugDocumentInfo._public_methods_ + \
+	                   gateways.DebugDocumentText._public_methods_ + \
+	                   gateways.DebugDocument._public_methods_
+	# A class which implements a DebugDocumentText, using the functionality
+	# provided by a codeContainer
+	def __init__(self, codeContainer):
+		gateways.DebugDocumentText.__init__(self)
+		gateways.DebugDocumentInfo.__init__(self)
+		gateways.DebugDocument.__init__(self)
+		self.codeContainer = codeContainer
+		
+	def _Close(self):
+		self.docContexts = None
+#		self.codeContainer._Close()
+		self.codeContainer = None
+	# IDebugDocumentInfo
+	def GetName(self, dnt):
+		return self.codeContainer.GetName(dnt)
+
+	def GetDocumentClassId(self):
+		return "{DF630910-1C1D-11d0-AE36-8C0F5E000000}"
+
+	# IDebugDocument has no methods!
+	#
+
+	# IDebugDocumentText methods.
+	# def GetDocumentAttributes
+	def GetSize(self):
+#		trace("GetSize")
+		return self.codeContainer.GetNumLines(), self.codeContainer.GetNumChars()
+	def GetPositionOfLine(self, cLineNumber):
+		return self.codeContainer.GetPositionOfLine(cLineNumber)
+	def GetLineOfPosition(self, charPos):
+		return self.codeContainer.GetLineOfPosition(charPos)
+	def GetText(self, charPos, maxChars, wantAttr):
+		# Get all the attributes, else the tokenizer will get upset.
+		# XXX - not yet!
+#		trace("GetText", charPos, maxChars, wantAttr)
+		cont = self.codeContainer
+		attr = cont.GetSyntaxColorAttributes()
+		return cont.GetText(), attr
+
+	def GetPositionOfContext(self, context):
+		trace("GetPositionOfContext", context)
+		context = unwrap(context)
+		return context.offset, context.length
+
+	# Return a DebugDocumentContext.
+	def GetContextOfPosition(self, charPos, maxChars):
+		# Make one
+		doc = _wrap(self, axdebug.IID_IDebugDocument)
+		rc = self.codeContainer.GetCodeContextAtPosition(charPos)
+		return rc.QueryInterface(axdebug.IID_IDebugDocumentContext)
+
+class CodeContainerProvider:
+	"""An abstract Python class which provides code containers!
+	
+	Given a Python file name (as the debugger knows it by) this will
+	return a CodeContainer interface suitable for use.
+	
+	This provides a simple base imlpementation that simply supports
+	a dictionary of nodes and providers.
+	"""
+	def __init__(self):
+		self.ccsAndNodes = {}
+	
+	def AddCodeContainer(self, cc, node = None):
+		fname = GetGoodFileName(cc.fileName)
+		self.ccsAndNodes[fname] = cc, node
+
+	def FromFileName(self, fname):
+		cc, node = self.ccsAndNodes.get(GetGoodFileName(fname), (None, None))
+#		if cc is None:
+#			print "FromFileName for %s returning None" % fname
+		return cc
+
+	def Close(self):
+		for cc, node in self.ccsAndNodes.values():
+			try:
+				# Must close the node before closing the provider
+				# as node may make calls on provider (eg Reset breakpoints etc)
+				if node is not None:
+					node.Close()
+				cc._Close()
+			except pythoncom.com_error:
+				pass
+		self.ccsAndNodes = {}
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/dump.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/dump.py
new file mode 100644
index 0000000..2d6a10c9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/dump.py
@@ -0,0 +1,49 @@
+import sys, string
+import traceback
+from win32com.axdebug import axdebug
+from win32com.client.util import Enumerator
+import pythoncom
+
+def DumpDebugApplicationNode(node, level = 0):
+	# Recursive dump of a DebugApplicationNode
+	spacer = " " * level
+	for desc, attr in [("Node Name", axdebug.DOCUMENTNAMETYPE_APPNODE),
+	                   ("Title", axdebug.DOCUMENTNAMETYPE_TITLE),
+	                   ("Filename", axdebug.DOCUMENTNAMETYPE_FILE_TAIL),
+	                   ("URL", axdebug.DOCUMENTNAMETYPE_URL),
+	                   ]:
+		try:
+			info = node.GetName(attr)
+		except pythoncom.com_error:
+			info = "<N/A>"
+		print "%s%s: %s" % (spacer, desc, info)
+	try:
+		doc = node.GetDocument()
+	except pythoncom.com_error:
+		doc = None
+	if doc:
+		doctext = doc.QueryInterface(axdebug.IID_IDebugDocumentText)
+		numLines, numChars = doctext.GetSize()
+#			text, attr = doctext.GetText(0, 20, 1)
+		text, attr = doctext.GetText(0, numChars, 1)
+		print "%sText is %s, %d bytes long" % (spacer, repr(text[:40]+"..."), len(text))
+	else:
+		print "%s%s" % (spacer, "<No document available>")
+	
+	for child in Enumerator(node.EnumChildren()):
+		DumpDebugApplicationNode(child, level+1)
+	
+def dumpall():
+	dm=pythoncom.CoCreateInstance(axdebug.CLSID_MachineDebugManager,None,pythoncom.CLSCTX_ALL, axdebug.IID_IMachineDebugManager)
+	e=Enumerator(dm.EnumApplications())
+	for app in e:
+		print "Application: %s" % app.GetName()
+		node = app.GetRootNode() # of type PyIDebugApplicationNode->PyIDebugDocumentProvider->PyIDebugDocumentInfo
+		DumpDebugApplicationNode(node)
+
+if __name__=='__main__':
+	try:
+		dumpall()
+	except:
+		traceback.print_exc()
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/expressions.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/expressions.py
new file mode 100644
index 0000000..9632b98
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/expressions.py
@@ -0,0 +1,156 @@
+import axdebug, gateways
+from util import _wrap, _wrap_remove, RaiseNotImpl
+import cStringIO, traceback
+from pprint import pprint
+from win32com.server.exception import COMException
+import winerror
+import string
+import sys
+
+# Given an object, return a nice string
+def MakeNiceString(ob):
+	stream = cStringIO.StringIO()
+	pprint(ob, stream)
+	return string.strip(stream.getvalue())
+
+class ProvideExpressionContexts(gateways.ProvideExpressionContexts):
+	pass
+
+class ExpressionContext(gateways.DebugExpressionContext):
+	def __init__(self, frame):
+		self.frame = frame
+	def ParseLanguageText(self, code, radix, delim, flags):
+		return _wrap(Expression(self.frame, code, radix, delim, flags), axdebug.IID_IDebugExpression)
+	def GetLanguageInfo(self):
+#		print "GetLanguageInfo"
+		return "Python", "{DF630910-1C1D-11d0-AE36-8C0F5E000000}"
+	
+class Expression(gateways.DebugExpression):
+	def __init__(self, frame, code, radix, delim, flags):
+		self.callback = None
+		self.frame = frame
+		self.code = str(code)
+		self.radix = radix
+		self.delim = delim
+		self.flags = flags
+		self.isComplete = 0
+		self.result=None
+		self.hresult = winerror.E_UNEXPECTED
+	def Start(self, callback):
+		try:
+			try:
+				try:
+					self.result = eval(self.code, self.frame.f_globals, self.frame.f_locals)
+				except SyntaxError:
+					exec self.code in self.frame.f_globals, self.frame.f_locals
+					self.result = ""
+				self.hresult = 0
+			except:
+				l = traceback.format_exception_only(sys.exc_info()[0], sys.exc_info()[1])
+				# l is a list of strings with trailing "\n"
+				self.result = string.join(map(lambda s:s[:-1], l), "\n")
+				self.hresult = winerror.E_FAIL
+		finally:
+			self.isComplete = 1
+			callback.onComplete()
+	def Abort(self):
+		print "** ABORT **"
+		
+	def QueryIsComplete(self):
+		return self.isComplete
+		
+	def GetResultAsString(self):
+#		print "GetStrAsResult returning", self.result
+		return self.hresult, MakeNiceString(self.result)
+	
+	def GetResultAsDebugProperty(self):
+		result = _wrap(DebugProperty(self.code, self.result, None, self.hresult), axdebug.IID_IDebugProperty)
+		return self.hresult, result
+
+def MakeEnumDebugProperty(object, dwFieldSpec, nRadix, iid, stackFrame = None):
+	name_vals = []
+	if hasattr(object, "has_key"): # If it is a dict.
+		name_vals = object.items()
+		dictionary = object
+	elif hasattr(object, "__dict__"):  #object with dictionary, module
+		name_vals = object.__dict__.items()
+		dictionary = object.__dict__
+	infos = []
+	for name, val in name_vals:
+		infos.append(GetPropertyInfo(name, val, dwFieldSpec, nRadix, 0, dictionary, stackFrame))
+	return _wrap(EnumDebugPropertyInfo(infos), axdebug.IID_IEnumDebugPropertyInfo)
+
+def GetPropertyInfo(obname, obvalue, dwFieldSpec, nRadix, hresult=0, dictionary = None, stackFrame = None):
+	# returns a tuple
+	name = typ = value = fullname = attrib = dbgprop = None
+	if dwFieldSpec & axdebug.DBGPROP_INFO_VALUE:
+		value = MakeNiceString(obvalue)
+	if dwFieldSpec & axdebug.DBGPROP_INFO_NAME:
+		name = obname
+	if dwFieldSpec & axdebug.DBGPROP_INFO_TYPE:
+		if hresult:
+			typ = "Error"
+		else:
+			try:
+				typ = type(obvalue).__name__
+			except AttributeError:
+				typ = str(type(obvalue))
+	if dwFieldSpec & axdebug.DBGPROP_INFO_FULLNAME:
+		fullname = obname
+	if dwFieldSpec & axdebug.DBGPROP_INFO_ATTRIBUTES:
+		if hasattr(obvalue, "has_key") or hasattr(obvalue, "__dict__"): # If it is a dict or object
+			attrib = axdebug.DBGPROP_ATTRIB_VALUE_IS_EXPANDABLE
+		else:
+			attrib = 0
+	if dwFieldSpec & axdebug.DBGPROP_INFO_DEBUGPROP:
+		dbgprop = _wrap(DebugProperty(name, obvalue, None, hresult, dictionary, stackFrame), axdebug.IID_IDebugProperty)
+	return name, typ, value, fullname, attrib, dbgprop
+
+from win32com.server.util import ListEnumeratorGateway
+class EnumDebugPropertyInfo(ListEnumeratorGateway):
+	"""A class to expose a Python sequence as an EnumDebugCodeContexts
+
+	Create an instance of this class passing a sequence (list, tuple, or
+	any sequence protocol supporting object) and it will automatically
+	support the EnumDebugCodeContexts interface for the object.
+
+	"""
+	_public_methods_ = ListEnumeratorGateway._public_methods_ + ["GetCount"]
+	_com_interfaces_ = [ axdebug.IID_IEnumDebugPropertyInfo]
+	def GetCount(self):
+		return len(self._list_)
+	def _wrap(self, ob):
+		return ob
+
+class DebugProperty:
+	_com_interfaces_ = [axdebug.IID_IDebugProperty]
+	_public_methods_ = ['GetPropertyInfo', 'GetExtendedInfo', 'SetValueAsString', 
+	                    'EnumMembers', 'GetParent'
+	]
+	def __init__(self, name, value, parent = None, hresult = 0, dictionary = None, stackFrame = None):
+		self.name = name
+		self.value = value
+		self.parent = parent
+		self.hresult = hresult
+		self.dictionary = dictionary
+		self.stackFrame = stackFrame
+
+	def GetPropertyInfo(self, dwFieldSpec, nRadix):
+		return GetPropertyInfo(self.name, self.value, dwFieldSpec, nRadix, self.hresult, dictionary, stackFrame)
+
+	def GetExtendedInfo(self): ### Note - not in the framework.
+		RaiseNotImpl("DebugProperty::GetExtendedInfo")
+
+	def SetValueAsString(self, value, radix):
+		if self.stackFrame and self.dictionary:
+			self.dictionary[self.name]= eval(value,self.stackFrame.f_globals, self.stackFrame.f_locals)
+		else:
+			RaiseNotImpl("DebugProperty::SetValueAsString")
+
+	def EnumMembers(self, dwFieldSpec, nRadix, iid):
+		# Returns IEnumDebugPropertyInfo
+		return MakeEnumDebugProperty(self.value, dwFieldSpec, nRadix, iid, self.stackFrame)
+
+	def GetParent(self):
+		# return IDebugProperty
+		RaiseNotImpl("DebugProperty::GetParent")
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/gateways.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/gateways.py
new file mode 100644
index 0000000..0cf26ba4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/gateways.py
@@ -0,0 +1,453 @@
+# Classes which describe interfaces.
+
+from win32com.server.exception import Exception
+from win32com.server.util import ListEnumeratorGateway
+from win32com.axdebug import axdebug
+from win32com.axdebug.util import RaiseNotImpl, _wrap
+import pythoncom
+import win32com.server.connect
+import winerror
+import string
+
+class EnumDebugCodeContexts(ListEnumeratorGateway):
+	"""A class to expose a Python sequence as an EnumDebugCodeContexts
+
+	Create an instance of this class passing a sequence (list, tuple, or
+	any sequence protocol supporting object) and it will automatically
+	support the EnumDebugCodeContexts interface for the object.
+
+	"""
+	_com_interfaces_ = [ axdebug.IID_IEnumDebugCodeContexts ]
+
+class EnumDebugStackFrames(ListEnumeratorGateway):
+	"""A class to expose a Python sequence as an EnumDebugStackFrames
+
+	Create an instance of this class passing a sequence (list, tuple, or
+	any sequence protocol supporting object) and it will automatically
+	support the EnumDebugStackFrames interface for the object.
+
+	"""
+	_com_interfaces_ = [ axdebug.IID_IEnumDebugStackFrames ]
+
+class EnumDebugApplicationNodes(ListEnumeratorGateway):
+	"""A class to expose a Python sequence as an EnumDebugStackFrames
+
+	Create an instance of this class passing a sequence (list, tuple, or
+	any sequence protocol supporting object) and it will automatically
+	support the EnumDebugApplicationNodes interface for the object.
+
+	"""
+	_com_interfaces_ = [ axdebug.IID_IEnumDebugApplicationNodes ]
+
+class EnumRemoteDebugApplications(ListEnumeratorGateway):
+	_com_interfaces_ = [ axdebug.IID_IEnumRemoteDebugApplications ]
+
+class EnumRemoteDebugApplicationThreads(ListEnumeratorGateway):
+	_com_interfaces_ = [ axdebug.IID_IEnumRemoteDebugApplicationThreads ]
+
+class DebugDocumentInfo:
+	_public_methods_ = ["GetName", "GetDocumentClassId"]
+	_com_interfaces_ = [axdebug.IID_IDebugDocumentInfo]
+	def __init__(self):
+		pass
+	def GetName(self, dnt):
+		""" Get the one of the name of the document
+		    dnt -- int DOCUMENTNAMETYPE
+		"""
+		RaiseNotImpl("GetName")
+	def GetDocumentClassId(self):
+		"""
+		    Result must be an IID object (or string representing one).
+		"""
+		RaiseNotImpl("GetDocumentClassId")
+		
+class DebugDocumentProvider(DebugDocumentInfo):
+	_public_methods_ = DebugDocumentInfo._public_methods_ + ["GetDocument"]
+	_com_interfaces_ = DebugDocumentInfo._com_interfaces_ + [axdebug.IID_IDebugDocumentProvider]
+	def GetDocument(self):
+		RaiseNotImpl("GetDocument")
+
+class DebugApplicationNode(DebugDocumentProvider):
+	"""Provides the functionality of IDebugDocumentProvider, plus a context within a project tree.
+	"""
+	_public_methods_ = string.split("""EnumChildren GetParent SetDocumentProvider 
+			Close Attach Detach""") + \
+		DebugDocumentProvider._public_methods_
+	_com_interfaces_ = [axdebug.IID_IDebugDocumentProvider] + \
+		DebugDocumentProvider._com_interfaces_
+	def __init__(self):
+		DebugDocumentProvider.__init__(self)
+	def EnumChildren(self):
+		# Result is type PyIEnumDebugApplicationNodes
+		RaiseNotImpl("EnumChildren")
+	def GetParent(self):
+		# result is type PyIDebugApplicationNode
+		RaiseNotImpl("GetParent")
+	def SetDocumentProvider(self, pddp): # PyIDebugDocumentProvider pddp
+		# void result.
+		RaiseNotImpl("SetDocumentProvider")
+	def Close(self):
+		# void result.
+		RaiseNotImpl("Close")
+	def Attach(self, parent): # PyIDebugApplicationNode
+     		# void result.
+		RaiseNotImpl("Attach")
+	def Detach(self):
+		# void result.
+		RaiseNotImpl("Detach")
+
+class DebugApplicationNodeEvents:
+	"""Event interface for DebugApplicationNode object.
+	"""
+	_public_methods_ = string.split("onAddChild onRemoveChild onDetach")
+	_com_interfaces_ = [axdebug.IID_IDebugApplicationNodeEvents]
+	def __init__(self):
+		pass
+	def onAddChild(self, child): # PyIDebugApplicationNode
+     		# void result.
+		RaiseNotImpl("onAddChild")
+	def onRemoveChild(self, child): # PyIDebugApplicationNode
+		# void result.
+		RaiseNotImpl("onRemoveChild")
+	def onDetach(self):
+		# void result.
+		RaiseNotImpl("onDetach")
+	def onAttach(self, parent): # PyIDebugApplicationNode
+		# void result.
+		RaiseNotImpl("onAttach")
+
+class DebugDocument(DebugDocumentInfo):
+	"""The base interface to all debug documents.  
+	"""
+	_public_methods_ = DebugDocumentInfo._public_methods_
+	_com_interfaces_ = [axdebug.IID_IDebugDocument] + DebugDocumentInfo._com_interfaces_
+
+class DebugDocumentText(DebugDocument):
+	"""The interface to a text only debug document.
+	"""
+	_com_interfaces_ = [axdebug.IID_IDebugDocumentText] + \
+		             DebugDocument._com_interfaces_
+	_public_methods_ = ["GetDocumentAttributes", "GetSize",
+	                    "GetPositionOfLine", "GetLineOfPosition", "GetText",
+	                    "GetPositionOfContext", "GetContextOfPosition"] + \
+		             DebugDocument._public_methods_ 
+	def __init__(self):
+		pass
+	# IDebugDocumentText
+	def GetDocumentAttributes(self):
+		# Result is int (TEXT_DOC_ATTR)
+		RaiseNotImpl("GetDocumentAttributes")
+	def GetSize(self):
+		# Result is (numLines, numChars)
+		RaiseNotImpl("GetSize")
+	def GetPositionOfLine(self, cLineNumber):
+		# Result is int char position
+		RaiseNotImpl("GetPositionOfLine")
+	def GetLineOfPosition(self, charPos):
+		# Result is int, int (lineNo, offset)
+		RaiseNotImpl("GetLineOfPosition")
+	def GetText(self, charPos, maxChars, wantAttr):
+		"""Params
+		charPos -- integer
+		maxChars -- integer
+		wantAttr -- Should the function compute attributes.
+
+		Return value must be (string, attribtues).  attributes may be
+		None if(not wantAttr)
+		"""
+		RaiseNotImpl("GetText")
+	def GetPositionOfContext(self, debugDocumentContext):
+		"""Params
+		debugDocumentContext -- a PyIDebugDocumentContext object.
+	
+		Return value must be (charPos, numChars)
+		"""
+		RaiseNotImpl("GetPositionOfContext")
+	def GetContextOfPosition(self, charPos, maxChars):
+		"""Params are integers.
+		Return value must be PyIDebugDocumentContext object
+		"""
+		print self
+		RaiseNotImpl("GetContextOfPosition")
+
+class DebugDocumentTextExternalAuthor:
+	"""Allow external editors to edit file-based debugger documents, and to notify the document when the source file has been changed.
+	"""
+	_public_methods_ = ["GetPathName", "GetFileName", "NotifyChanged"]
+	_com_interfaces_ = [axdebug.IID_IDebugDocumentTextExternalAuthor]
+	def __init__(self):
+		pass
+	def GetPathName(self):
+		"""Return the full path (including file name) to the document's source file.
+		
+		Result must be (filename, fIsOriginal), where
+		- if fIsOriginalPath is TRUE if the path refers to the original file for the document.
+		- if fIsOriginalPath is FALSE if the path refers to a newly created temporary file.
+
+		raise Exception(winerror.E_FAIL) if no source file can be created/determined.  
+		"""
+		RaiseNotImpl("GetPathName")
+  
+	def GetFileName(self):
+		"""Return just the name of the document, with no path information.  (Used for "Save As...")
+
+		Result is a string
+		"""
+		RaiseNotImpl("GetFileName")
+  
+	def NotifyChanged(self):
+		""" Notify the host that the document's source file has been saved and  
+		that its contents should be refreshed.
+		"""
+		RaiseNotImpl("NotifyChanged")
+
+
+class DebugDocumentTextEvents:
+	_public_methods_ = string.split("""onDestroy onInsertText onRemoveText 
+	          onReplaceText onUpdateTextAttributes onUpdateDocumentAttributes""")
+	_com_interfaces_ = [ axdebug.IID_IDebugDocumentTextEvents ]
+	def __init__(self):
+		pass
+	def onDestroy(self):
+		# Result is void.
+		RaiseNotImpl("onDestroy")
+	def onInsertText(self, cCharacterPosition, cNumToInsert):
+		# Result is void.
+		RaiseNotImpl("onInsertText")
+	def onRemoveText(self, cCharacterPosition, cNumToRemove):
+		# Result is void.
+		RaiseNotImpl("onRemoveText")
+	def onReplaceText(self, cCharacterPosition, cNumToReplace):
+		# Result is void.
+		RaiseNotImpl("onReplaceText")
+	def onUpdateTextAttributes(self, cCharacterPosition, cNumToUpdate):
+		# Result is void.
+		RaiseNotImpl("onUpdateTextAttributes")
+	def onUpdateDocumentAttributes(self,textdocattr): # TEXT_DOC_ATTR
+		# Result is void.
+		RaiseNotImpl("onUpdateDocumentAttributes")
+
+class DebugDocumentContext:
+	_public_methods_ = [ 'GetDocument', 'EnumCodeContexts']
+	_com_interfaces_ = [ axdebug.IID_IDebugDocumentContext ]
+	def __init__(self):
+		pass
+	def GetDocument(self):
+		"""Return value must be a PyIDebugDocument object
+		"""
+		RaiseNotImpl("GetDocument")
+
+	def EnumCodeContexts(self):
+		"""Return value must be a PyIEnumDebugCodeContexts object
+		"""
+		RaiseNotImpl("EnumCodeContexts")
+
+
+class DebugCodeContext:
+	_public_methods_ = [ 'GetDocumentContext', 'SetBreakPoint']
+	_com_interfaces_ = [ axdebug.IID_IDebugCodeContext ]
+	def __init__(self):
+		pass
+	def GetDocumentContext(self):
+		"""Return value must be a PyIDebugDocumentContext object
+		"""
+		RaiseNotImpl("GetDocumentContext")
+	def SetBreakPoint(self, bps):
+		"""bps -- an integer with flags.
+		"""
+		RaiseNotImpl("SetBreakPoint")
+
+
+class DebugStackFrame:
+	"""Abstraction representing a logical stack frame on the stack of a thread."""
+	_public_methods_ = [ 'GetCodeContext', 'GetDescriptionString', 'GetLanguageString', 'GetThread', 'GetDebugProperty']
+	_com_interfaces_ = [ axdebug.IID_IDebugStackFrame ]
+	def __init__(self):
+		pass
+	def GetCodeContext(self):
+		"""Returns the current code context associated with the stack frame.
+
+		Return value must be a IDebugCodeContext object
+		"""
+		RaiseNotImpl("GetCodeContext")
+	def GetDescriptionString(self, fLong):
+		"""Returns a textual description of the stack frame.
+		
+		fLong -- A flag indicating if the long name is requested.
+		"""
+		RaiseNotImpl("GetDescriptionString")
+	def GetLanguageString(self):
+		"""Returns a short or long textual description of the language.
+		
+		fLong -- A flag indicating if the long name is requested.
+		"""
+		RaiseNotImpl("GetLanguageString")
+	def GetThread(self):
+		""" Returns the thread associated with this stack frame.
+
+		Result must be a IDebugApplicationThread
+		"""
+		RaiseNotImpl("GetThread")
+	def GetDebugProperty(self):
+		RaiseNotImpl("GetDebugProperty")
+
+
+class DebugDocumentHost:
+	"""The interface from the IDebugDocumentHelper back to  
+	the smart host or language engine.  This interface  
+	exposes host specific functionality such as syntax coloring.
+	"""
+	_public_methods_ = [ 'GetDeferredText', 'GetScriptTextAttributes', 'OnCreateDocumentContext', 'GetPathName', 'GetFileName', 'NotifyChanged']
+	_com_interfaces_ = [ axdebug.IID_IDebugDocumentHost ]
+	def __init__(self):
+		pass
+	def GetDeferredText(self, dwTextStartCookie,  maxChars, bWantAttr):
+		RaiseNotImpl("GetDeferredText")
+  
+	def GetScriptTextAttributes(self, codeText, delimterText, flags):
+		# Result must be an attribute sequence of same "length" as the code.	
+		RaiseNotImpl("GetScriptTextAttributes")
+  
+	def OnCreateDocumentContext(self): 
+		# Result must be a PyIUnknown
+		RaiseNotImpl("OnCreateDocumentContext")
+  
+	def GetPathName(self):
+		# Result must be (string, int) where the int is a BOOL
+		# - TRUE if the path refers to the original file for the document.
+		# - FALSE if the path refers to a newly created temporary file.
+		# - raise Exception(scode=E_FAIL) if no source file can be created/determined.  
+		RaiseNotImpl("GetPathName")
+  
+	def GetFileName(self):
+		# Result is a string with just the name of the document, no path information.  
+		RaiseNotImpl("GetFileName")
+  
+	def NotifyChanged(self):
+		RaiseNotImpl("NotifyChanged")
+
+# Additional gateway related functions.
+
+class DebugDocumentTextConnectServer:
+	_public_methods_ = win32com.server.connect.IConnectionPointContainer_methods + win32com.server.connect.IConnectionPoint_methods
+	_com_interfaces_ = [pythoncom.IID_IConnectionPoint, pythoncom.IID_IConnectionPointContainer]
+	# IConnectionPoint interfaces
+	def __init__(self):
+		self.cookieNo = -1
+		self.connections = {}
+	def EnumConnections(self):
+		RaiseNotImpl("EnumConnections")
+	def GetConnectionInterface(self):
+		RaiseNotImpl("GetConnectionInterface")
+	def GetConnectionPointContainer(self):
+		return _wrap(self)
+	def Advise(self, pUnk):
+		# Creates a connection to the client.  Simply allocate a new cookie,
+		# find the clients interface, and store it in a dictionary.
+		interface = pUnk.QueryInterface(axdebug.IID_IDebugDocumentTextEvents,1)
+		self.cookieNo = self.cookieNo + 1
+		self.connections[self.cookieNo] = interface
+		return self.cookieNo
+	def Unadvise(self, cookie):
+		# Destroy a connection - simply delete interface from the map.
+		try:
+			del self.connections[cookie]
+		except KeyError:
+			return Exception(scode=winerror.E_UNEXPECTED)
+	# IConnectionPointContainer interfaces
+	def EnumConnectionPoints(self):
+		RaiseNotImpl("EnumConnectionPoints")
+	def FindConnectionPoint(self, iid):
+		# Find a connection we support.  Only support the single event interface.
+		if iid==axdebug.IID_IDebugDocumentTextEvents:
+			return _wrap(self)
+		raise Exception(scode=winerror.E_NOINTERFACE) # ??
+
+class RemoteDebugApplicationEvents:
+	_public_methods_ = ["OnConnectDebugger","OnDisconnectDebugger","OnSetName","OnDebugOutput","OnClose","OnEnterBreakPoint","OnLeaveBreakPoint","OnCreateThread","OnDestroyThread","OnBreakFlagChange"]
+	_com_interfaces_ = [axdebug.IID_IRemoteDebugApplicationEvents]
+	def OnConnectDebugger(self, appDebugger):
+		"""appDebugger -- a PyIApplicationDebugger
+		"""
+		RaiseNotImpl("OnConnectDebugger")
+	def OnDisconnectDebugger(self):
+		RaiseNotImpl("OnDisconnectDebugger")
+	def OnSetName(self, name):
+		RaiseNotImpl("OnSetName")
+	def OnDebugOutput(self, string):
+		RaiseNotImpl("OnDebugOutput")
+	def OnClose(self):
+		RaiseNotImpl("OnClose")
+	def OnEnterBreakPoint(self, rdat):
+		"""rdat -- PyIRemoteDebugApplicationThread
+		"""
+		RaiseNotImpl("OnEnterBreakPoint")
+	def OnLeaveBreakPoint(self, rdat):
+		"""rdat -- PyIRemoteDebugApplicationThread
+		"""
+		RaiseNotImpl("OnLeaveBreakPoint")
+	def OnCreateThread(self, rdat):
+		"""rdat -- PyIRemoteDebugApplicationThread
+		"""
+		RaiseNotImpl("OnCreateThread")
+	def OnDestroyThread(self, rdat):
+		"""rdat -- PyIRemoteDebugApplicationThread
+		"""
+		RaiseNotImpl("OnDestroyThread")
+	def OnBreakFlagChange(self, abf, rdat):
+		"""abf -- int - one of the axdebug.APPBREAKFLAGS constants
+		rdat -- PyIRemoteDebugApplicationThread
+		RaiseNotImpl("OnBreakFlagChange")
+		"""
+class DebugExpressionContext:
+	_public_methods_ = ["ParseLanguageText", "GetLanguageInfo"]
+	_com_interfaces_ = [axdebug.IID_IDebugExpressionContext]
+	def __init__(self):
+		pass
+	def ParseLanguageText(self, code, radix, delim, flags):
+		"""
+		result is IDebugExpression
+		"""
+		RaiseNotImpl("ParseLanguageText")
+	def GetLanguageInfo(self):
+		"""
+		result is (string langName, iid langId)
+		"""
+		RaiseNotImpl("GetLanguageInfo")
+
+class DebugExpression:
+	_public_methods_ = ["Start", "Abort", "QueryIsComplete", "GetResultAsString", "GetResultAsDebugProperty"]
+	_com_interfaces_ = [axdebug.IID_IDebugExpression]
+	def Start(self, callback):
+		"""
+		callback -- an IDebugExpressionCallback
+		
+		result - void
+		"""
+		RaiseNotImpl("Start")
+	def Abort(self):
+		"""
+		no params
+		result -- void
+		"""
+		RaiseNotImpl("Abort")
+
+	def QueryIsComplete(self):
+		"""
+		no params
+		result -- void
+		"""
+		RaiseNotImpl("QueryIsComplete")
+
+	def GetResultAsString(self):
+		RaiseNotImpl("GetResultAsString")
+
+	def GetResultAsDebugProperty(self):
+		RaiseNotImpl("GetResultAsDebugProperty")
+
+class ProvideExpressionContexts:
+	_public_methods_ = ["EnumExpressionContexts"]
+	_com_interfaces_ = [axdebug.IID_IProvideExpressionContexts]
+	def EnumExpressionContexts(self):
+		RaiseNotImpl("EnumExpressionContexts")
+		
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/stackframe.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/stackframe.py
new file mode 100644
index 0000000..1623608
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/stackframe.py
@@ -0,0 +1,147 @@
+"""Support for stack-frames.
+
+Provides Implements a nearly complete wrapper for a stack frame.
+"""
+import sys
+from util import _wrap, RaiseNotImpl
+import expressions, gateways, axdebug, winerror
+import pythoncom
+from win32com.server.exception import COMException
+import repr, string
+
+from util import trace
+#def trace(*args):
+#	pass
+
+class EnumDebugStackFrames(gateways.EnumDebugStackFrames):
+	"""A class that given a debugger object, can return an enumerator
+	of DebugStackFrame objects.
+	"""
+	def __init__(self, debugger):
+		infos = []
+		frame = debugger.currentframe
+#		print "Stack check"		
+		while frame:
+#			print " Checking frame", frame.f_code.co_filename, frame.f_lineno-1, frame.f_trace,
+			# Get a DebugCodeContext for the stack frame.  If we fail, then it
+			# is not debuggable, and therefore not worth displaying.
+			cc = debugger.codeContainerProvider.FromFileName(frame.f_code.co_filename)
+			if cc is not None:
+				try:
+					address = frame.f_locals['__axstack_address__']
+				except KeyError:
+#					print "Couldnt find stack address for",frame.f_code.co_filename, frame.f_lineno-1
+					# Use this one, even tho it is wrong :-(
+					address = axdebug.GetStackAddress()
+				frameInfo = DebugStackFrame(frame, frame.f_lineno-1, cc), address, address+1, 0, None
+				infos.append(frameInfo)
+#				print "- Kept!"
+#			else:
+#				print "- rejected"
+			frame = frame.f_back
+
+		gateways.EnumDebugStackFrames.__init__(self, infos, 0)
+#	def __del__(self):
+#		print "EnumDebugStackFrames dieing"
+
+	def Next(self, count):
+		return gateways.EnumDebugStackFrames.Next(self, count)
+	
+#	def _query_interface_(self, iid):
+#		from win32com.util import IIDToInterfaceName
+#		print "EnumDebugStackFrames QI with %s (%s)" % (IIDToInterfaceName(iid), str(iid))
+#		return 0
+	def _wrap(self, obj):
+		# This enum returns a tuple, with 2 com objects in it.
+		obFrame, min, lim, fFinal, obFinal = obj
+		obFrame = _wrap(obFrame, axdebug.IID_IDebugStackFrame)
+		if obFinal:
+			obFinal = _wrap(obFinal, pythoncom.IID_IUnknown)
+		return obFrame, min, lim, fFinal, obFinal
+
+class DebugStackFrame(gateways.DebugStackFrame):
+	def __init__(self, frame, lineno, codeContainer):
+		self.frame = frame
+		self.lineno = lineno
+		self.codeContainer = codeContainer
+		self.expressionContext = None
+#	def __del__(self):
+#		print "DSF dieing"
+	def _query_interface_(self, iid):
+		if iid==axdebug.IID_IDebugExpressionContext:
+			if self.expressionContext is None:
+				self.expressionContext = _wrap(expressions.ExpressionContext(self.frame), axdebug.IID_IDebugExpressionContext)
+			return self.expressionContext
+#		from win32com.util import IIDToInterfaceName
+#		print "DebugStackFrame QI with %s (%s)" % (IIDToInterfaceName(iid), str(iid))
+		return 0
+	#
+	# The following need implementation
+	def GetThread(self):
+		""" Returns the thread associated with this stack frame.
+
+		Result must be a IDebugApplicationThread
+		"""
+		RaiseNotImpl("GetThread")
+
+	def GetCodeContext(self):
+		offset = self.codeContainer.GetPositionOfLine(self.lineno)
+		return self.codeContainer.GetCodeContextAtPosition(offset)
+	#
+	# The following are usefully implemented
+	def GetDescriptionString(self, fLong):
+		filename = self.frame.f_code.co_filename
+		s = ""
+		if 0: #fLong:
+			s = s + filename
+		if self.frame.f_code.co_name:
+		    s = s + self.frame.f_code.co_name
+		else:
+		    s = s + "<lambda>"
+		return s
+	def GetLanguageString(self, fLong):
+		if fLong:
+			return "Python ActiveX Scripting Engine"
+		else:
+			return "Python"
+	def GetDebugProperty(self):
+		return _wrap(StackFrameDebugProperty(self.frame), axdebug.IID_IDebugProperty)
+
+class DebugStackFrameSniffer:
+	_public_methods_ = ["EnumStackFrames"]
+	_com_interfaces_ = [axdebug.IID_IDebugStackFrameSniffer]
+	def __init__(self, debugger):
+		self.debugger = debugger
+		trace("DebugStackFrameSniffer instantiated")
+#	def __del__(self):
+#		print "DSFS dieing"
+	def EnumStackFrames(self):
+		trace("DebugStackFrameSniffer.EnumStackFrames called")
+		return _wrap(EnumDebugStackFrames(self.debugger), axdebug.IID_IEnumDebugStackFrames)
+
+# A DebugProperty for a stack frame.
+class StackFrameDebugProperty:
+	_com_interfaces_ = [axdebug.IID_IDebugProperty]
+	_public_methods_ = ['GetPropertyInfo', 'GetExtendedInfo', 'SetValueAsString', 
+	                    'EnumMembers', 'GetParent'
+	]
+	def __init__(self, frame):
+		self.frame = frame
+		
+	def GetPropertyInfo(self, dwFieldSpec, nRadix):
+		RaiseNotImpl("StackFrameDebugProperty::GetPropertyInfo")
+	def GetExtendedInfo(self): ### Note - not in the framework.
+		RaiseNotImpl("StackFrameDebugProperty::GetExtendedInfo")
+
+	def SetValueAsString(self, value, radix):
+		#
+		RaiseNotImpl("DebugProperty::SetValueAsString")
+		
+	def EnumMembers(self, dwFieldSpec, nRadix, iid):
+		print "EnumMembers", dwFieldSpec, nRadix, iid
+		import expressions
+		return expressions.MakeEnumDebugProperty(self.frame.f_locals, dwFieldSpec, nRadix, iid, self.frame)
+
+	def GetParent(self):
+		# return IDebugProperty
+		RaiseNotImpl("DebugProperty::GetParent")
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/util.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/util.py
new file mode 100644
index 0000000..666c772
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axdebug/util.py
@@ -0,0 +1,115 @@
+# Utility function for wrapping objects.  Centralising allows me to turn
+# debugging on and off for the entire package in a single spot.
+
+import sys
+import win32com.server.util
+from win32com.server.exception import Exception
+import winerror
+import win32api
+import os
+
+try:
+	os.environ["DEBUG_AXDEBUG"]
+	debugging = 1
+except KeyError:
+	debugging = 0
+
+def trace(*args):
+	if not debugging: return
+	print str(win32api.GetCurrentThreadId()) + ":",
+	for arg in args:
+		print arg,
+	print
+
+# The AXDebugging implementation assumes that the returned COM pointers are in
+# some cases identical.  Eg, from a C++ perspective:
+# p->GetSomeInterface( &p1 );
+# p->GetSomeInterface( &p2 );
+# p1==p2
+# By default, this is _not_ true for Python.
+# (Now this is only true for Document objects, and Python
+# now does ensure this.
+
+all_wrapped = {} 
+
+def _wrap_nodebug(object, iid):
+	return win32com.server.util.wrap(object, iid)
+
+def _wrap_debug(object, iid):
+	import win32com.server.policy
+	dispatcher = win32com.server.policy.DispatcherWin32trace
+	return win32com.server.util.wrap(object, iid, useDispatcher = dispatcher)
+
+if debugging:
+	_wrap = _wrap_debug
+else:
+	_wrap = _wrap_nodebug
+
+def _wrap_remove(object, iid = None):
+	# Old - no longer used or necessary!
+	return
+
+def _dump_wrapped():
+	from win32com.server.util import unwrap
+	print "Wrapped items:"
+	for key, items in all_wrapped.items():
+		print key,
+		try:
+			ob = unwrap(key)
+			print ob, sys.getrefcount(ob)
+		except:
+			print "<error>"
+
+
+def RaiseNotImpl(who = None):
+	if who is not None:
+		print "********* Function %s Raising E_NOTIMPL  ************" % (who)
+
+	# Print a sort-of "traceback", dumping all the frames leading to here.		
+	try:
+		1/0
+	except:
+		frame = sys.exc_info()[2].tb_frame
+	while frame:
+		print "File: %s, Line: %d" % (frame.f_code.co_filename, frame.f_lineno)
+		frame = frame.f_back
+	
+	# and raise the exception for COM
+	raise Exception(scode=winerror.E_NOTIMPL)
+		
+
+import win32com.server.policy
+class Dispatcher(win32com.server.policy.DispatcherWin32trace):
+	def __init__(self, policyClass, object):
+		win32com.server.policy.DispatcherTrace.__init__(self, policyClass, object)
+		import win32traceutil # Sets up everything.
+#		print "Object with win32trace dispatcher created (object=%s)" % `object`
+
+	def _QueryInterface_(self, iid):
+		rc = win32com.server.policy.DispatcherBase._QueryInterface_(self, iid)
+#		if not rc:
+#			self._trace_("in _QueryInterface_ with unsupported IID %s (%s)\n" % (IIDToInterfaceName(iid),iid))
+		return rc
+
+	def _Invoke_(self, dispid, lcid, wFlags, args):
+		print "In Invoke with", dispid, lcid, wFlags, args, "with object",self.policy._obj_
+		try:
+			rc = win32com.server.policy.DispatcherBase._Invoke_(self, dispid, lcid, wFlags, args)
+#			print "Invoke of", dispid, "returning", rc
+			return rc
+		except Exception:
+			t, v, tb = sys.exc_info()
+			tb = None # A cycle
+			scode = v.scode
+			try:
+				desc = " (" + str(v.description) + ")"
+			except AttributeError:
+				desc = ""
+			print "*** Invoke of %s raised COM exception 0x%x%s" % (dispid, scode, desc)
+		except:
+			print "*** Invoke of %s failed:" % dispid
+			typ, val, tb = sys.exc_info()
+			import traceback
+			traceback.print_exception(typ, val, tb)
+			raise typ, val, tb
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/CreateObject.asp b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/CreateObject.asp
new file mode 100644
index 0000000..e773ea9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/CreateObject.asp
@@ -0,0 +1,19 @@
+<HTML>

+

+<SCRIPT Language="Python" RUNAT=Server>

+

+# Just for the sake of the demo, our Python script engine

+# will create a Python.Interpreter COM object, and call that.

+

+# This is completely useless, as the Python Script Engine is

+# completely normal Python, and ASP does not impose retrictions, so

+# there is nothing the COM object can do that we can not do natively.

+

+o = Server.CreateObject("Python.Interpreter")

+

+Response.Write("Python says 1+1=" + str(o.Eval("1+1")))

+

+</SCRIPT>

+

+</HTML>

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/caps.asp b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/caps.asp
new file mode 100644
index 0000000..042d7bf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/caps.asp
@@ -0,0 +1,52 @@
+<%@ Language=Python %>

+<HTML>

+

+<HEAD>

+

+<BODY BACKGROUND="/samples/images/backgrnd.gif">

+

+<TITLE>Python test</TITLE>

+

+</HEAD>

+

+<BODY BGCOLOR="FFFFFF">

+

+<SCRIPT Language="Python" RUNAT=Server>

+# NOTE that the <% tags below execute _before_ these tags!

+Response.Write("Hello from Python<P>")

+Response.Write("Browser is "+bc.browser)

+import win32api # Should be no problem using win32api in ASP pages.

+Response.Write("<p>Win32 username is "+win32api.GetUserName())

+</SCRIPT>

+

+<BODY BGCOLOR="FFFFFF">

+

+<% 

+import sys

+print sys.path

+from win32com.axscript.asputil import *

+print "Hello"

+print "There"

+print "How are you"

+%>

+

+<%bc = Server.CreateObject("MSWC.BrowserType")%>

+<BODY BGCOLOR="FFFFFF">

+<table border=1> 

+<tr><td>Browser</td><td> <%=bc.browser %> 

+<tr><td>Version</td><td> <%=bc.version %> </td></TR> 

+<tr><td>Frames</td><td> 

+<%Response.Write( iif(bc.frames, "TRUE", "FALSE")) %></td></TR> 

+<tr><td>Tables</td><td> 

+<%Response.Write( iif (bc.tables, "TRUE", "FALSE")) %></td></TR> 

+<tr><td>BackgroundSounds</td><td> 

+<%Response.Write( iif(bc.BackgroundSounds, "TRUE", "FALSE"))%></td></TR> 

+<tr><td>VBScript</td><td> 

+<%Response.Write( iif(bc.vbscript, "TRUE", "FALSE"))%></td></TR> 

+<tr><td>JavaScript</td><td> 

+<%Response.Write( iif(bc.javascript, "TRUE", "FALSE"))%></td></TR> 

+

+</table> 

+

+</body>

+</html>

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test.asp b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test.asp
new file mode 100644
index 0000000..8ef256a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test.asp
@@ -0,0 +1,4 @@
+<%@ language=python%>

+<html>

+<%Response.Redirect("test1.html")%>

+</html>

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test.html b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test.html
new file mode 100644
index 0000000..0b7f0559
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test.html
@@ -0,0 +1,10 @@
+<html>
+<head>
+<body>
+GOT There
+<script language=javascript>
+location.href ="http://192.168.0.1/Python/interrupt/test.asp"
+</script>
+</body>
+</head>
+</html>
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test1.asp b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test1.asp
new file mode 100644
index 0000000..218422f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test1.asp
@@ -0,0 +1,6 @@
+<%@ language =Python%>

+<html>

+<head>

+<%Response.Redirect("test.html")%>

+</head>

+</html>

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test1.html b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test1.html
new file mode 100644
index 0000000..6f1d9b2f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/interrupt/test1.html
@@ -0,0 +1,11 @@
+<html>
+<head>
+<body>
+GOT HERE
+<script language=javascript>
+location.href ="http://192.168.0.1/Python/interrupt/test1.asp"
+</script>
+</body>
+</head>
+</html>
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/tut1.asp b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/tut1.asp
new file mode 100644
index 0000000..4d7dc9d1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/asp/tut1.asp
@@ -0,0 +1,11 @@
+<HTML>

+

+<SCRIPT Language="Python" RUNAT=Server>

+

+for i in range(3,8):

+  Response.Write("<FONT SIZE=%d>Hello World!!<BR>" % i)

+

+</SCRIPT>

+

+</HTML>

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/CHARTPY.HTM b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/CHARTPY.HTM
new file mode 100644
index 0000000..4bba12c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/CHARTPY.HTM
@@ -0,0 +1,180 @@
+<!--
+original Author:	Bill Rollins
+adapted for python by aaron watters
+Purpose: This HTML example shows how to use Microsoft's ActiveX Chart control.
+	Properties of the control are changed using sub procedures called
+	by radio buttons and check boxes. The AboutBox method is called from
+	a command button.
+Created: 3/16/96
+Edited:	3/19/96 - VBScript code edited so there is only one procedure for each property.
+	3/23/96 - Font sizes changed in title. Added text to the Notes section.
+            6/01/96 - Change OBJECT tag to match E3 Beta requirements
+-->
+
+
+<HTML>
+<HEAD>
+<TITLE>ActiveX / VBScript Chart Example</TITLE>
+</HEAD>
+
+<BODY BGCOLOR=#FFFFCC TEXT=#000000>
+
+<B><FONT SIZE=6>Chart Example</FONT></B><BR>
+<FONT SIZE=2>You must be running Microsoft Internet Explorer 3.0 and have the <A HREF ="http://microsoft.saltmine.com/isapi/activexisv/prmgallery/gallery-activex-info.idc?ID=162">Microsoft ActiveX Chart control</A> installed to view this page.</FONT><BR>
+<P>
+The chart control enables you to draw charts. The chart's types and styles are properties of the control. The chart has one method, AboutBox. The chart generates no events.
+
+<HR>
+
+<OBJECT
+	classid="clsid:FC25B780-75BE-11CF-8B01-444553540000"
+	CODEBASE="http://activex.microsoft.com/controls/iexplorer/iechart.ocx#Version=4,70,0,1161"
+    	TYPE="application/x-oleobject"
+	id=Chart1
+	width=200
+	height=200
+        align=left
+	hspace=0
+	vspace=0
+>
+
+<param name="_extentX" value="300">
+<param name="_extentY" value="150">
+<param name="ChartStyle" value="0">
+<param name="ChartType" value="4">
+<param name="hgridStyle" value="0">
+<param name="vgridStyle" value="0">
+<param name="colorscheme" value="0">
+<param name="rows" value="2">
+<param name="columns" value="4">
+<param name="data[0][0]" value="30">
+<param name="data[0][1]" value="2">
+<param name="data[0][2]" value="20">
+<param name="data[0][3]" value="40">
+<param name="data[1][0]" value="15">
+<param name="data[1][1]" value="33">
+<param name="data[1][2]" value="21">
+<param name="data[1][3]" value="45">
+<param name="BackStyle" value="1">
+
+
+</object>
+
+
+<SCRIPT LANGUAGE="Python">
+<!-- ' This prevents script from being displayed in browsers that don't support the SCRIPT tag
+# OPTION EXPLICIT (?)
+	
+# Calls the AboutBox Method. This displays the Chart Object About Box
+def DoChartAboutBox():
+		ax.Chart1.AboutBox()
+	
+# Changes the type of chart. WhatType is passed as a value (0-5) when one of the Chart Type radio buttons is selected
+def DoChartType(WhatType):
+		ax.Chart1.ChartType = WhatType
+	
+# Turns horizontal gridlines on or off depending on value of chkHorizontal checkbox
+def DoHorizontalGrid():
+                print ax.chkHorizontal.Checked
+		if ax.chkHorizontal.Checked:
+			ax.Chart1.HGridStyle = 1
+		else:
+			ax.Chart1.HGridStyle = 0
+	
+# Sets the background of the chart to Opaque or Transparent
+def DoBackground(intBackGround):
+		ax.Chart1.BackStyle = intBackGround
+
+# Turns vertical gridlines on or off depending on value of chkVertical checkbox
+def DoVerticalGrid():
+		if ax.chkVertical.Checked:	
+			ax.Chart1.VGridStyle = 1
+		else:
+			ax.Chart1.VGridStyle = 0
+-->
+</SCRIPT>
+
+
+
+<TABLE BORDER = 0 XBORDER=5 BGCOLOR="#FFFFCC" WIDTH=300 ALIGN=LEFT>
+<TR><TD colspan=2 BGCOLOR=NAVY ALIGN=CENTER><FONT COLOR=FFFFCC>Chart Type</TR>
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(0)"> Simple Pie </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(11)"> Simple Column </TD></TR>
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(1)"> Pie with Wedge Out </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(12)"> Stacked Column </TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(2)"> Simple Point </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(13)"> Full Column </TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(3)"> Stacked Point </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(14)"> Simple Bar </TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(4)"> Full Point </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(15)"> Stacked Bar </TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(5)"> Simple Line </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(16)"> Full Bar </TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(6)"> Stacked Line </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(17)"> HLC Stock </TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(7)"> Full Line </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(18)"> HLC Stock WSJ</TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(8)"> Simple Area </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(19)"> OHLC Stock </TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(9)"> Stack Area </TD>
+<TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(20)"> OHLC Stock WSJ </TD></TR>
+
+<TR><TD><INPUT TYPE=RADIO NAME=ChartType onClick="DoChartType(10)"> Full Area </TD></TR>
+</TABLE>
+
+
+
+
+
+<TABLE XBORDER=5 WIDTH=125 BGCOLOR="#FFFFCC" ALIGN=LEFT>
+<TR><TD BGCOLOR=NAVY ALIGN=CENTER><FONT COLOR=FFFFCC>Gridlines</TR>
+<TR><TD><INPUT TYPE=CHECKBOX NAME="chkHorizontal" onClick="DoHorizontalGrid()">Horizontal</TD></TR>
+<TR><TD><INPUT TYPE=CHECKBOX NAME="chkVertical" onClick="DoVerticalGrid()">Vertical</TD></TR>
+<TR><TD BGCOLOR=NAVY ALIGN=CENTER><FONT COLOR=FFFFCC>Background</TR>
+<TR><TD><INPUT TYPE=RADIO NAME=BackStyle onClick="DoBackground(1)">Opaque</TD></TR>
+<TR><TD><INPUT TYPE=RADIO NAME=BackStyle onClick="DoBackground(0)">Transparent</TD></TR>
+
+</TABLE>
+
+<BR CLEAR=ALL>
+<BR>
+
+<INPUT TYPE=BUTTON VALUE="About Chart Control" NAME="cdmChartAboutBox" onClick="DoChartAboutBox()">
+
+<BR CLEAR=ALL>
+<BR><BR>
+
+<HR>
+<B><FONT SIZE=4>Notes</FONT></B><BR>
+The chart's properties on this page are changed by selecting the various radio buttons and check boxes. The OnClick event of these intrinsic controls calls VBScript procedures that change the chart properties.<BR>
+<P>
+The About Chart Control command button calls the chart's AboutBox method.<BR>
+<P>
+To view the source code for this page, select <SAMP>Source</SAMP> from the <SAMP>View</SAMP> menu.<BR>
+<P>
+If you have any questions or comments about this example, please send them to <A HREF ="mailto:billr@smart.net">billr@smart.net</A>. We would appreciate feedback and would like to hear about other developers' experiences with these new tools.<BR>
+
+
+<HR>
+
+</BODY>
+
+
+
+<ADDRESS>
+<FONT SIZE=2>
+&copy; 1996 Rollins & Associates, Inc.<BR>
+Page last updated 08/28/96<BR>
+Please send comments to <A HREF ="mailto:billr@smart.net">billr@smart.net</A><BR>
+</FONT>
+</ADDRESS>
+
+</HTML>
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/FOO.HTM b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/FOO.HTM
new file mode 100644
index 0000000..840672f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/FOO.HTM
@@ -0,0 +1,46 @@
+<HTML>
+<BODY>
+Lets try this out:
+
+<FORM>
+<INPUT NAME="Button1" TYPE="Button" OnClick="foo1" LANGUAGE="VBScript">
+<FORM METHOD="GET" NAME="MyForm">
+<INPUT TYPE="TEXT" NAME="Text1" SIZE=25>
+<INPUT TYPE="SUBMIT">
+<INPUT NAME="Button2" TYPE="Button" VALUE="Hi" OnClick="text2.value=window.document.location" LANGUAGE="VBScript">
+<INPUT TYPE="TEXT" SIZE=25 NAME="Text2">
+</FORM>
+And here is a second form
+<P>
+<FORM NAME="Form2" METHOD="GET">
+<INPUT NAME="Button2" TYPE="Button" VALUE="Hi" OnClick="button2.value=window.document.location" LANGUAGE="VBScript">
+</FORM><BR>
+
+<SCRIPT LANGUAGE="JScript">
+y=15
+</SCRIPT>
+
+<SCRIPT LANGUAGE="VBScript">
+x = 13
+Sub foo1
+   Dim y
+   y = 14
+   alert "Hello"
+End Sub
+
+Sub Window_OnLoad
+   foo1
+   MyForm.button2.value = "Loaded"
+   Form2.button2.value = "Loaded"
+End Sub
+Sub Link_MouseMove(b, s, x, y)
+   MsgBox b
+End Sub
+</SCRIPT>
+
+<SCRIPT LANGUAGE="Python">
+print "Python loaded"
+</SCRIPT>
+
+</BODY>
+</HTML>
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/MarqueeText1.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/MarqueeText1.htm
new file mode 100644
index 0000000..85a676f8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/MarqueeText1.htm
@@ -0,0 +1,25 @@
+<HTML>
+<HEAD>
+<base target="text">
+<TITLE> Internet Workshop </TITLE>
+</HEAD>
+<BODY leftmargin=8 bgcolor="#FFFFFF" VLINK="#666666" LINK="#FF0000">
+<FONT FACE="ARIAL,HELVETICA" SIZE="2">
+
+<P>
+<BR>
+<P><FONT FACE="ARIAL,HELVETICA" SIZE="5"><B>Python AX Script Engine</B></FONT>
+<BR>Demo using the Marquee Control
+<BR>Mark Hammond.
+
+<P>This is really quite a boring demo, as the Marquee control does everything.  However, there is Python code behind the buttons that change the speed.  This code is all of 2 lines per button!!!
+
+<P>For more information on Python as an ActiveX scripting language, see
+
+<P><B>Python</B>
+<BR><A HREF="http://www.python.org">http://www.python.org</A>
+
+</FONT>
+</BODY>
+</HTML>
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/calc.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/calc.htm
new file mode 100644
index 0000000..7c7fb48
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/calc.htm
@@ -0,0 +1,116 @@
+<HTML>
+<HEAD><TITLE>Python Script sample:  Calculator</TITLE></HEAD>
+<BODY><FONT FACE=ARIAL SIZE=3> <!-- global default -->
+<SCRIPT LANGUAGE="Python">
+# globals
+Accum = 0.0	  # Previous number (operand) awaiting operation
+FlagNewNum = 1    # Flag to indicate a new number (operand) is being entered
+NullOp = lambda x,y: y
+PendingOp = NullOp# Pending operation waiting for completion of second operand
+numberButNames = ['Zero','One','Two','Three','Four','Five','Six','Seven','Eight','Nine']
+
+def NumPressed(Num):
+	print "NumPressed", Num
+	global FlagNewNum
+	if FlagNewNum:
+		ax.document.Keypad.ReadOut.Value = Num
+		FlagNewNum = None
+	else:
+		if ax.document.Keypad.ReadOut.Value == "0":
+			ax.document.Keypad.ReadOut.Value = str(Num)
+		else:
+			ax.document.Keypad.ReadOut.Value= ax.document.Keypad.ReadOut.Value + str(Num)
+
+# Dynamically create handlers for all the decimal buttons.
+# (ie, this will dynamically create "One_OnClick()"... etc handlers
+for i in range(len(numberButNames)):
+	exec "def %s_OnClick():\tNumPressed(%d)\n" % (numberButNames[i],i)
+
+def Decimal_OnClick():
+	global curReadOut, FlagNewNum
+	curReadOut = ax.document.Keypad.ReadOut.Value
+	if FlagNewNum:
+		curReadOut = "0."
+		FlagNewNum = None
+	else:
+		if not ("." in curReadOut):
+			curReadOut = curReadOut + "."
+	ax.document.Keypad.ReadOut.Value = curReadOut
+
+import sys, string
+
+def Operation(Op, fn):
+	global FlagNewNum, PendingOp, Accum
+	ReadOut = ax.document.Keypad.ReadOut.Value
+	print "Operation", Op, ReadOut, PendingOp, Accum
+	if FlagNewNum:
+		# User is hitting op keys repeatedly, so don't do anything
+		PendingOp = NullOp
+	else:
+		FlagNewNum = 1
+		Accum = PendingOp( Accum, string.atof(ReadOut) )
+	ax.document.Keypad.ReadOut.Value = str(Accum)
+	PendingOp = fn
+
+def ClearEntry_OnClick():
+	# Remove current number and reset state
+	global FlagNewNum
+	ax.document.Keypad.ReadOut.Value = "0"
+	FlagNewNum = 1
+
+def Clear_OnClick():
+	global Accum, PendingOp
+	Accum = 0
+	PendingOp = NullOp
+	ClearEntry_OnClick()
+
+def Neg_OnClick():
+	ax.document.Keypad.ReadOut.Value = str(-string.atof(ax.document.Keypad.ReadOut.Value))
+</SCRIPT>
+
+
+<form action="" Name="Keypad">
+<TABLE>
+<B>
+<TABLE BORDER=2 WIDTH=50 HEIGHT=60 CELLPADDING=1 CELLSPACING=5>
+<CAPTION ALIGN=top> <b>Calculator</b><p> </CAPTION>
+<TR>
+ <TD COLSPAN=3 ALIGN=MIDDLE><INPUT NAME="ReadOut" TYPE="Text" SIZE=24 VALUE="0" WIDTH=100%></TD>
+ <TD></TD>
+ <TD><INPUT NAME="Clear" TYPE="Button" VALUE="  C  " ></TD>
+ <TD><INPUT NAME="ClearEntry" TYPE="Button" VALUE="  CE " ></TD>
+</TR>
+<TR>
+ <TD><INPUT NAME="Seven" TYPE="Button" VALUE="  7  " ></TD>
+ <TD><INPUT NAME="Eight" TYPE="Button" VALUE="  8  " ></TD>
+ <TD><INPUT NAME="Nine" TYPE="Button" VALUE="  9  " ></TD>
+ <TD></TD>
+ <TD><INPUT NAME="Neg" TYPE="Button" VALUE=" +/- " ></TD>
+ <TD><INPUT NAME="Percent" TYPE="Button" VALUE="  % " OnClick="Operation('%', lambda x,y: x*y/100.0)"></TD>
+</TR>
+
+<TR>
+ <TD><INPUT NAME="Four" TYPE="Button" VALUE="  4  " ></TD>
+ <TD><INPUT NAME="Five" TYPE="Button" VALUE="  5  " ></TD>
+ <TD><INPUT NAME="Six" TYPE="Button" VALUE="  6  " ></TD>
+ <TD></TD>
+ <TD ALIGN=MIDDLE><INPUT NAME="Plus" TYPE="Button" VALUE="  +  " OnClick="Operation('+', lambda x,y: x+y)"></TD>
+ <TD ALIGN=MIDDLE><INPUT NAME="Minus" TYPE="Button" VALUE="   -   " OnClick="Operation('-', lambda x,y: x-y)"></TD>
+</TR>
+<TR>
+ <TD><INPUT NAME="One" TYPE="Button" VALUE="  1  " ></TD>
+ <TD><INPUT NAME="Two" TYPE="Button" VALUE="  2  " ></TD>
+ <TD><INPUT NAME="Three" TYPE="Button" VALUE="  3  " ></TD>
+ <TD></TD>
+ <TD ALIGN=MIDDLE><INPUT NAME="Multiply" TYPE="Button" VALUE="  *  " OnClick="Operation('*', lambda x,y: x*y)" ></TD>
+ <TD ALIGN=MIDDLE><INPUT NAME="Divide" TYPE="Button" VALUE="   /   " OnClick="Operation('/', lambda x,y: x/y)" ></TD>
+</TR>
+<TR>
+ <TD><INPUT NAME="Zero" TYPE="Button" VALUE="  0  " ></TD>
+ <TD><INPUT NAME="Decimal" TYPE="Button" VALUE="   .  " ></TD>
+ <TD COLSPAN=3></TD>
+ <TD><INPUT NAME="Equals" TYPE="Button" VALUE="  =  " OnClick="Operation('=', lambda x,y: x)"></TD>
+</TR></TABLE></TABLE></B>
+</FORM>
+</FONT></BODY></HTML>
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/dbgtest.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/dbgtest.htm
new file mode 100644
index 0000000..7ee9468d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/dbgtest.htm
@@ -0,0 +1,16 @@
+<HTML>
+<BODY>
+
+<SCRIPT>
+b="Hello There, how are you"
+</SCRIPT>
+
+<SCRIPT LANGUAGE="Python">
+print "Hello"
+a="Hi there"
+document.write("Hello<P>")
+alert("Hi there")
+</SCRIPT>
+
+</BODY>
+</HTML>
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo.htm
new file mode 100644
index 0000000..a828bac2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo.htm
@@ -0,0 +1,26 @@
+<HTML>
+<HEAD>
+<TITLE>Python AXScript Demos</TITLE>
+</HEAD>
+
+
+<SCRIPT LANGUAGE="Python">
+def Window_OnLoad():
+	pass
+#	import win32traceutil
+#	print "Frames are", ax.window.frames._print_details_()
+#	print "Frame 0 href = ", ax.frames.Item(0).location.href
+
+def Name_OnLoad():
+	print "Frame loading"
+
+</SCRIPT>
+
+<FRAMESET FRAMEBORDER=1 COLS = "250, *">
+	<FRAME SRC="demo_menu.htm">
+	<FRAME SRC="demo_check.htm" NAME="Body">
+</FRAMESET>
+
+
+</HTML>
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_check.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_check.htm
new file mode 100644
index 0000000..f0e09f9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_check.htm
@@ -0,0 +1,42 @@
+<HTML>
+<H1>Engine Registration</H1>
+
+<BODY>
+
+<p>The Python ActiveX Scripting Engine is not currently registered.<p>
+
+<p>Due to a <a href="http://starship.python.net/crew/mhammond/win32/PrivacyProblem.html">privacy
+concern</a> discovered in the engine, the use of Python inside IE has been disabled.</p>
+
+Before any of the supplied demos will work, the engine must be successfully registered.
+
+<P>To install a version of the engine, that does work with IE, you can execute the Python program 
+<CODE>win32com\axscript\client\pyscript_rexec.py</CODE> must be run.  You can either do this manually, or follow the instructions below.</p>
+
+<H2>Register the engine now!</H2>
+
+<p>If you have read about the <a href="http://starship.python.net/crew/mhammond/win32/PrivacyProblem.html">privacy
+concern</a> and still wish to register the engine, just follow the process outlined below:</p>
+<OL>
+  <LI>Click on the link below
+  <LI><B>A dialog will be presented asking if the file should be opened or saved to disk.  Select "Open it".</B>
+  <LI>A Console program will briefly open, while the server is registered.
+</OL>
+
+<P><A HREF="..\..\..\client\pyscript_rexec.py">Register the engine now</A>
+
+<H2>Checking the registration</H2>
+After the registration is complete, simply hit the Reload button.  If the 
+registration was successful, the page will change to the Python/AvtiveX Demo Page.
+
+
+<SCRIPT LANGUAGE="Python">
+try:
+	window.open("demo_intro.htm", "Body")
+except:
+	history.back()
+</SCRIPT>
+</BODY></HTML>
+
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_intro.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_intro.htm
new file mode 100644
index 0000000..b8c811d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_intro.htm
@@ -0,0 +1,38 @@
+<HTML>
+<BODY>
+
+<H1>
+  <MARQUEE NAME="Marquee1" DIRECTION=LEFT BEHAVIOR=SCROLL SCROLLAMOUNT=10 SCROLLDELAY=200
+  >Python ActiveX Scripting Demonstation
+  </MARQUEE> 
+</H1>
+
+<p>Congratulations on installing the Python ActiveX Scripting Engine</p>
+
+<p>Be warned that there is a <a href="http://starship.python.net/crew/mhammond/win32/PrivacyProblem.html">privacy
+concern</a> with this engine.  Please read this information, including how to disable the feature.</p>
+
+
+<H3>Object model</H3>
+<P>Except as described below, the object module exposed should be similar to that exposed
+by Visual Basic, etc.  Due to the nature of ActiveX Scripting, the details for each
+host are different, but Python should work "correctly".
+
+<P>The object model exposed via Python for MSIE is not as seamless as VB.  The biggest limitation is 
+the concept of a "local" namespace.  For example, in VB, you can
+code <code>text="Hi there"</code>, but in Python, you must code 
+<code>MyForm.ThisButton.Text="Hi There"</code>.  See the <A HREF="foo2.htm">foo2</A> sample 
+for futher details.
+
+<H3>Known bugs and problems</H3>
+<UL>
+<LI><P>This release seems to have broken Aaron's mouse-trace sample.  No idea why, and Im supposed to be looking into it.
+<LI><P>Builtin objects such as MARQUEE are giving me grief.  Objects accessed via forms are generally
+no problem.
+<LI><P>If you are trying to use Python with the Windows Scripting Host, note that
+.pys files are not correct registered - you will need to explicitely
+specify either cscript.exe or wscript.exe on the command line.
+</UL>
+
+</BODY></HTML>
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_menu.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_menu.htm
new file mode 100644
index 0000000..ba23a434
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/demo_menu.htm
@@ -0,0 +1,16 @@
+<HTML>
+<BODY>
+<H1>Scripting Demos</H1>
+<P>An <A HREF="demo_check.htm" TARGET=Body>Introduction</A> to the 
+scripting engine.
+
+<P>The <A HREF="calc.htm" TARGET=Body>Calculator Demo</A> is a very 
+cool sample written by Aaron Watters.
+
+<P><A HREF="mouseTrack.htm" TARGET=Body>Mouse track</A> is another of 
+Aaron's samples, and shows how fast the Python engine is!
+
+<P>The <A HREF="foo2.htm" TARGET=Body>foo2 sample</A> is mainly used 
+for debugging and testing, but does show some forms in action.
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/docwrite.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/docwrite.htm
new file mode 100644
index 0000000..c95b790
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/docwrite.htm
@@ -0,0 +1,25 @@
+<HTML>
+<BODY>
+A page generated by Python
+
+<SCRIPT LANGUAGE="XXXVBScript">
+document.open()
+document.writeLn "<P>Hello from VBScript"
+document.close()
+</SCRIPT>
+
+<SCRIPT LANGUAGE="Python">
+ax.document.write("<P>Hello from Python")
+ax.document.close()
+ax.document.open()
+ax.document.write("<P>Hello again from Python")
+ax.document.close()
+
+def Window_OnLoad():
+	pass
+#	ax.document.write("<P>Hello from Load from Python")
+#	ax.document.close()
+</SCRIPT>
+
+</BODY>
+</HTML>
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/foo2.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/foo2.htm
new file mode 100644
index 0000000..d5e0c4a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/foo2.htm
@@ -0,0 +1,105 @@
+<HTML>
+<BODY>
+
+<SCRIPT>
+b="Hello"
+</SCRIPT>
+
+<SCRIPT LANGUAGE="Python">
+import win32traceutil
+import sys
+print "Hello"
+a="Hi there"
+print "Location is", document.location
+document.write("Hello", " from version ", 2, " of the Python AXScript Engine","<P>")
+document.writeln("This is Python", sys.version)
+
+</SCRIPT>
+
+<P>The caption on the first button is set by the Window Load code.  Clicking
+that button changes the text in the first edit box.
+
+<P>The second button changes its own text when clicked.
+
+<P>The fourth button calls a global function, defined in the global 'script' scope,
+rather than the 'MyForm' scope.
+
+<FORM NAME="MyForm" METHOD="GET">
+   <SCRIPT LANGUAGE="Python">
+print "Hello from in the form"
+   </SCRIPT>
+   <INPUT NAME="Button1" TYPE="Button" OnClick="MyForm.Text1.value='Hi'" LANGUAGE="Python">
+   <INPUT TYPE="TEXT" SIZE=25 NAME="Text1">
+   <INPUT NAME="Button2" TYPE="Button" VALUE="Click for 'Hi'" OnClick="a='Howdy'; MyForm.Button2.value='Hi'" LANGUAGE="Python">
+   <INPUT NAME="Button3" TYPE="Button" VALUE="Click for URL" OnClick="MyForm.Text2.value=document.location" LANGUAGE="Python">
+   <INPUT TYPE="TEXT" SIZE=25 NAME="Text2">
+   <INPUT NAME="Button4" TYPE="Button" VALUE="Call global fn" OnClick="foo1()" LANGUAGE="Python">
+   <INPUT NAME="Button5" TYPE="Button" VALUE="Script for... Test">
+<script for="Button5" event="onClick" language="Python">
+print "HelloThere";
+window.alert("Hello")
+def ATest():
+	print "Hello from ATEst"
+
+ATest()
+</script>
+   <INPUT NAME="Button6" TYPE="Button" VALUE="Set Other" OnClick="Form2.Text1.Value='Hi from other'" LANGUAGE="Python">
+
+</FORM><BR>
+<P>
+And here is a second form
+<P>
+<FORM NAME="Form2" METHOD="GET">
+   <INPUT NAME="Button1" TYPE="Button" OnClick="Form2.Text1.Value='Hi'" LANGUAGE="Python">
+   <INPUT NAME="Button2" TYPE="Button" VALUE="Set Other" OnClick="MyForm.Text1.Value='Hi from other'" LANGUAGE="Python">
+   <INPUT TYPE="TEXT" SIZE=25 NAME="Text1">
+   <INPUT NAME="ButRExec" TYPE="Button" VALUE="RExec fail" OnClick="import win32api;win32api.MessageBox(0,'Oops')" LANGUAGE="Python">
+   <INPUT NAME="ButRExec2" TYPE="Button" VALUE="RExec fail 2" OnClick="import sys,win32traceutil;print sys.modules;from win32com.client import dynamic;import win32com.client.dynamic, pythoncom, win32com.client;o=win32com.client.Dispatch('Word.Application')" LANGUAGE="Python">
+   <INPUT NAME="ButVB" TYPE="Button" VALUE="VBScript Button" OnClick='alert("Hi from VBScript")'>
+   <INPUT NAME="ButCallChain" TYPE="Button" VALUE="Multi-Language call" OnClick='CallPython()'>
+</FORM><BR>
+
+<SCRIPT LANGUAGE="VBScript">
+function CallPython()
+	alert("Hello from VB - Im about to call Python!")
+	PythonGlobalFunction()
+end function
+</SCRIPT>
+
+<SCRIPT LANGUAGE="JScript">
+function JScriptFunction()
+{
+  alert("Hello from JScript");
+}
+</SCRIPT>
+
+<SCRIPT LANGUAGE="Python">
+x=13
+
+def foo1():
+   y = 14
+   for name, item in globals().items():
+        print name, `item`
+   alert ("Hello from AXCode")
+   print "Y is ",y
+
+def PythonGlobalFunction():
+	window.alert("Hello from Python - Im about to call JScript!")
+	window.JScriptFunction()
+
+def Window_OnLoad():
+	print "X is", x
+	print "a is", a
+#	print "------ GLOBALS ----------"
+#	for n,v in globals().items():
+#		print n,'=',v
+	print "MyForm is", MyForm
+	print "MyForm is repr", `MyForm`
+	print "MyForm.Button1 is", `MyForm.Button1`
+	MyForm.Button1.Value = "Python Rules!"
+	Form2.Button1.value = "Form2!"
+	MyForm.Text1.value = document.location
+</SCRIPT>
+
+</BODY>
+</HTML>
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/form.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/form.htm
new file mode 100644
index 0000000..97a239f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/form.htm
@@ -0,0 +1,25 @@
+<HTML>
+<BODY>
+
+<FORM NAME="TestForm" METHOD="POST" >
+   <INPUT TYPE="TEXT" SIZE=25 NAME="Name">Name<br>
+   <INPUT TYPE="TEXT" SIZE=25 NAME="Address">Address<br>
+   <INPUT TYPE=SUBMIT
+</FORM>
+
+<SCRIPT LANGUAGE="Python" for="TestForm" Event="onSubmit">
+return Validate()
+</SCRIPT>
+
+<SCRIPT LANGUAGE="Python">
+
+def Validate():
+	if not TestForm.Name.Value or not TestForm.Address.Value:
+		ax.alert("You must enter a name and address.")
+		return 1
+	return 0
+
+</SCRIPT>
+
+</BODY>
+</HTML>
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/marqueeDemo.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/marqueeDemo.htm
new file mode 100644
index 0000000..33847c1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/marqueeDemo.htm
@@ -0,0 +1,60 @@
+<HTML>
+<HEAD>
+<base target="text">
+<TITLE> Internet Workshop </TITLE>
+</HEAD>
+<BODY leftmargin=8 bgcolor="#FFFFFF" VLINK="#666666" LINK="#FF0000">
+<FONT FACE="ARIAL,HELVETICA" SIZE="2">
+
+<P>
+<BR>
+<P><FONT FACE="ARIAL,HELVETICA" SIZE="5"><B>Marquee Demo</B></FONT>
+
+<P>
+
+
+<OBJECT 
+	ID="Marquee1" 
+ 	CLASSID="CLSID:1A4DA620-6217-11CF-BE62-0080C72EDD2D"
+	CODEBASE="/workshop/activex/gallery/ms/marquee/other/marquee.ocx#Version=4,70,0,1112"
+	TYPE="application/x-oleobject"
+	WIDTH=100% 
+	HEIGHT=80
+>
+	<PARAM NAME="szURL" VALUE="marqueeText1.htm">
+	<PARAM NAME="ScrollPixelsX" VALUE="0">
+    	<PARAM NAME="ScrollPixelsY" VALUE="-5">
+    	<PARAM NAME="ScrollDelay" VALUE="100">
+    	<PARAM NAME="Whitespace" VALUE="0">
+</OBJECT>
+
+<br> <br>
+
+<INPUT TYPE="Button" NAME="btnFaster" VALUE="Faster">
+<INPUT TYPE="Button" NAME="btnNormal" VALUE="Normal">
+<INPUT TYPE="Button" NAME="btnSlower" VALUE="Slower">
+
+<SCRIPT Language="Python">
+
+def btnFaster_Onclick():
+	ax.Marquee1.ScrollDelay = 0
+
+def btnNormal_Onclick():
+	ax.Marquee1.ScrollDelay = 50
+
+def btnSlower_Onclick():
+	ax.Marquee1.ScrollDelay = 300
+
+</SCRIPT> 
+
+
+<P>&nbsp;
+<HR>
+<B>Notes:</B>
+<P>
+
+
+</FONT>
+</BODY>
+</HTML>
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/mousetrack.htm b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/mousetrack.htm
new file mode 100644
index 0000000..d307a4a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/mousetrack.htm
@@ -0,0 +1,83 @@
+<HTML>
+
+<HEAD><TITLE>Python Scripting sample:  Mouse tracking</TITLE></HEAD>
+<BODY BGCOLOR="#FFFFFF" TOPMARGIN=8>
+<FONT SIZE=5>
+<TABLE Border=0><TR VALIGN=MIDDLE><TD>
+<A ID="Image"> <IMG 
+SRC="file:..\..\..\..\..\win32com\html\image\pycom_blowing.gif" 
+ALT="Clickable Map Image" HEIGHT=113 WIDTH=624 BORDER=0></A>
+
+</TD></TR>
+ <TR><TD>&nbsp;</TD></TR>
+ <TR VALIGN=MIDDLE><TD VALIGN=MIDDLE ALIGN=CENTER><FONT SIZE=5><INPUT 
+TYPE="text" NAME="TxtLinkDescription" SIZE=50></FONT></TD></TR></TABLE>
+</FONT>
+
+<P>
+A mouse tracking demo.  Move the mouse over the image above...
+
+<SCRIPT Language="Python">
+<!-- 
+# Remember the last location clicked
+#print "here we go", 1
+mx = my = 0
+
+# class for rectangle testing
+class rect:
+   def __init__(self, lowx, lowy, upx, upy, desc, url):
+       self.lowx, self.lowy, self.upx, self.upy, self.desc, self.url = \
+         lowx, lowy, upx, upy, desc, url
+   def inside(self, x, y):
+#       print (x,y), "inside", self.desc,
+       result = self.lowx <= x <= self.upx and self.lowy <= y <= self.upy
+#       print result
+       return result
+   def mouse_move(self):
+#       print "move", self.desc
+       ax.TxtLinkDescription.Value = coords + " - " + self.desc
+   def onclick(self):
+#       print "click", self.desc
+       ax.TxtLinkDescription.Value = coords +" click! " + `self.url`
+       if self.url: ax.location = self.url
+
+blows = "Blows away "
+rects =[rect(12,48,59,101,blows+"Visual Basic", ""),
+	rect(107,0,172,58,blows+"Internet Explorer", ""),
+	rect(193,0,261,56,blows+"Microsoft Access", ""),
+	rect(332,43,392,93,blows+"Microsoft Word", ""),
+	rect(457,52,521,99,blows+"Microsoft Excel", ""),
+	rect(537,12,613,85,"Python blows them all away!", "http://www.python.org"),
+]
+
+default = rect(0,0,0,0,"Click on an icon","")
+
+def Image_MouseMove(s, b, x, y):
+    global mx, my, coords
+    coords =`(x,y)`
+#    print coords,
+    mx, my = x, y
+    for r in rects:
+        if r.inside(x,y):
+#           print r.desc
+           r.mouse_move()
+           break
+    else:
+#        print default.desc
+        default.mouse_move()
+
+def Image_OnClick():
+    for r in rects:
+        if r.inside(mx,my):
+           r.onclick()
+           break
+-->
+</SCRIPT>
+
+<P>
+
+</FONT>
+
+</BODY>
+</HTML>
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/pycom_blowing.gif b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/pycom_blowing.gif
new file mode 100644
index 0000000..0d65a292
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/ie/pycom_blowing.gif
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/blank.pys b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/blank.pys
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/blank.pys
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/excel.pys b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/excel.pys
new file mode 100644
index 0000000..42a94b5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/excel.pys
@@ -0,0 +1,34 @@
+#app=WScript.Application

+#app._print_details_() # Use this to see what Python knows about a COM object.

+

+g_index = 1

+# A procedure, using a global.

+def Show(desc, value = None):

+	global g_index # Need global for g_index, as I locally assign.

+	# No global needed to "xl" object, as only referenced.

+	# Also note "xl" is assigned later in the script - ie, Python is very late bound.

+	xl.Cells(g_index, 1).Value = desc

+	if value: xl.Cells(g_index, 2).Value = value

+	g_index = g_index + 1

+

+xl = WScript.CreateObject("Excel.Application")

+import sys

+

+xl.Visible = 1

+#xl.Workbooks().Add() # Excel versions before 98

+xl.Workbooks.Add()

+

+# Show the WScript properties.

+Show("Application Friendly Name", WScript.Name)

+Show("Application Version", WScript.Version)

+Show("Application Context: Fully Qualified Name", WScript.FullName)

+Show("Application Context: Path Only", WScript.Path)

+Show("State of Interactive Mode", WScript.Interactive)

+

+Show("All script arguments:")

+args = WScript.Arguments

+

+for i in xrange(0,args.Count()):

+	Show("Arg %d" % i, args(i))

+

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/registry.pys b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/registry.pys
new file mode 100644
index 0000000..0108d73
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/registry.pys
@@ -0,0 +1,45 @@
+""" Windows Script Host Sample Script

+' Ported to Python

+'

+' ------------------------------------------------------------------------

+'               Copyright (C) 1996 Microsoft Corporation

+'

+' You have a royalty-free right to use, modify, reproduce and distribute

+' the Sample Application Files (and/or any modified version) in any way

+' you find useful, provided that you agree that Microsoft has no warranty,

+' obligations or liability for any Sample Application Files.

+' ------------------------------------------------------------------------

+'

+' This sample demonstrates how to write/delete from the registry. 

+"""

+

+WshShell = WScript.CreateObject("WScript.Shell")

+

+WshShell.Popup("This script shows how to use registry related methods.", 2)

+

+WshShell.Popup("Create key HKCU\\Foo with value 'Top level key'")

+WshShell.RegWrite("HKCU\\Foo\\", "Top level key")

+

+WshShell.Popup("Create key HKCU\\Foo\\Bar with value 'Second level key'")

+WshShell.RegWrite( "HKCU\\Foo\\Bar\\", "Second level key")

+

+WshShell.Popup ("Set value HKCU\\Foo\\Value to REG_SZ 1")

+WshShell.RegWrite( "HKCU\\Foo\\Value", 1)

+

+WshShell.Popup ("Set value HKCU\\Foo\\Bar to REG_DWORD 2")

+WshShell.RegWrite ("HKCU\\Foo\\Bar", 2, "REG_DWORD")

+

+WshShell.Popup ("Set value HKCU\\Foo\\Bar to REG_EXPAND_SZ '3'")

+WshShell.RegWrite ("HKCU\\Foo\\Bar\\Baz", "%SystemRoot%\\Foo")

+

+WshShell.Popup ("Delete value HKCU\\Foo\\Bar\\Baz")

+WshShell.RegDelete ("HKCU\\Foo\\Bar\\Baz")

+

+WshShell.Popup ("Delete key HKCU\\Foo\\Bar")

+WshShell.RegDelete ("HKCU\\Foo\\Bar\\")

+

+WshShell.Popup ("Delete key HKCU\\Foo")

+WshShell.RegDelete ("HKCU\\Foo\\")

+

+WScript.Echo ("Done")

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/test.pys b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/test.pys
new file mode 100644
index 0000000..e4e79e8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/Demos/client/wsh/test.pys
@@ -0,0 +1,15 @@
+# Testall - test core AX support.

+

+# Test "Restricted Execution" (ie, IObjectSafety).

+# This will fail if in a "restricted execution" environment, but

+# will silenty do nothing of not restricted.  This same line in an MSIE

+# script would cause an exception.

+print "Importing win32api..."

+import win32api

+if 1==1:

+	print "Hi"

+

+WScript.Echo("Hello from WScript")

+

+#fail

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/__init__.py
new file mode 100644
index 0000000..d882689
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/__init__.py
@@ -0,0 +1,4 @@
+# See if we have a special directory for the binaries (for developers)
+import win32com
+win32com.__PackageSupportBuildPath__(__path__)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/asputil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/asputil.py
new file mode 100644
index 0000000..4b07573
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/asputil.py
@@ -0,0 +1,12 @@
+"""A utility module for ASP (Active Server Pages on MS Internet Info Server.
+
+Contains:
+	iif -- A utility function to avoid using "if" statements in ASP <% tags
+
+"""
+
+def iif(cond, t, f):
+	if cond:
+		return t
+	else:
+		return f
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/axscript.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/axscript.pyd
new file mode 100644
index 0000000..16a5f222
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/axscript.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/__init__.py
new file mode 100644
index 0000000..7858ad6
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/__init__.py
@@ -0,0 +1 @@
+# This is a Python package
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/debug.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/debug.py
new file mode 100644
index 0000000..07bcacb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/debug.py
@@ -0,0 +1,209 @@
+import traceback, sys, string
+
+import win32com.server.util
+from win32com.util import IIDToInterfaceName
+from win32com.client.util import Enumerator
+from win32com.server.exception import COMException
+import pythoncom
+from framework import trace
+from win32com.axdebug import axdebug, gateways, contexts, stackframe, documents, adb
+from win32com.axdebug.codecontainer import SourceCodeContainer
+from win32com.axdebug.util import _wrap, _wrap_remove
+import win32com.client.connect
+import win32api, winerror
+import os
+
+try:
+	os.environ["DEBUG_AXDEBUG"]
+	debuggingTrace = 1		# Should we print "trace" output?
+except KeyError:
+	debuggingTrace = 0
+
+def trace(*args):
+	"""A function used instead of "print" for debugging output.
+	"""
+	if not debuggingTrace:
+		return 
+	print win32api.GetCurrentThreadId(),
+	for arg in args:
+		print arg,
+	print
+
+# Note that the DebugManager is not a COM gateway class for the 
+# debugger - but it does create and manage them.
+class DebugManager:
+	_debugger_interfaces_ = [axdebug.IID_IActiveScriptDebug]
+	def __init__(self, scriptEngine):
+		self.scriptEngine = scriptEngine
+		self.adb = adb.Debugger()
+		self.rootNode = None
+		self.debugApplication = None
+		self.ccProvider = documents.CodeContainerProvider()
+		try:
+			self.scriptSiteDebug = scriptEngine.GetScriptSite(axdebug.IID_IActiveScriptSiteDebug)
+		except pythoncom.com_error:
+			# No debugger interface (ie, dumb host).  Do the extra work.
+			trace("Scripting site has no debugger interface")
+			self.scriptSiteDebug = None
+		# Get the debug application object.
+		self.debugApplication = None
+		if self.scriptSiteDebug is not None:
+			# Spec says that we should test for this, and if it fails revert to
+			# PDM application.
+			try:
+				self.debugApplication = self.scriptSiteDebug.GetApplication()
+				self.rootNode = self.scriptSiteDebug.GetRootApplicationNode()
+			except pythoncom.com_error:
+				self.debugApplication = None
+				
+		if self.debugApplication is None:
+			# Try to get/create the default one
+			# NOTE - Dont catch exceptions here - let the parent do it,
+			# so it knows debug support is available.
+			pdm=pythoncom.CoCreateInstance(axdebug.CLSID_ProcessDebugManager,None,pythoncom.CLSCTX_ALL, axdebug.IID_IProcessDebugManager)
+			self.debugApplication = pdm.GetDefaultApplication()
+			self.rootNode = self.debugApplication.GetRootNode()
+			
+		assert self.debugApplication is not None, "Need to have a DebugApplication object by now!"
+		self.activeScriptDebug = None
+
+		if self.debugApplication is not None:
+			self.adb.AttachApp(self.debugApplication, self.ccProvider)
+		self.codeContainers = {}
+		self.activeScriptDebug = _wrap(ActiveScriptDebug(self, self.codeContainers), axdebug.IID_IActiveScriptDebug)
+
+	def Close(self):
+		# Called by the language engine when it receives a close request
+		if self.activeScriptDebug is not None:
+			_wrap_remove(self.activeScriptDebug)
+			self.activeScriptDebug = None
+		self.scriptEngine = None
+		self.rootNode = None
+		self.debugApplication = None
+		self.scriptSiteDebug = None
+		if self.ccProvider is not None:
+			self.ccProvider.Close()
+			self.ccProvider = None
+		self.codeContainers = {}
+		if self.adb:
+			self.adb.CloseApp()
+			self.adb = None
+#		print "Close complete"
+
+	def IsAnyHost(self):
+		"Do we have _any_ debugging interfaces installed?"
+		return self.debugApplication is not None
+
+	def IsSimpleHost(self):
+		return self.scriptSiteDebug is None
+
+	def HandleRuntimeError( self ):
+		"""Called by the engine when a runtime error occurs.  If we have a debugger,
+		we let it know.
+		
+		The result is a boolean which indicates if the error handler should call
+		IActiveScriptSite::OnScriptError()
+		"""
+#		if self.IsAnyHost:
+#			site = _wrap(self, axdebug.IID_IActiveScriptSite)
+#			breakResume, errorResume, fCallOnError = self.debugApplication(activeScriptErrorDebug, site)
+			# Do something with these!
+#		else:
+		trace("HandleRuntimeError")
+		fCallOnError = 1
+		return fCallOnError
+
+	def _query_interface_for_debugger_(self, iid):
+		if iid in self._debugger_interfaces_:
+			return self.activeScriptDebug
+		trace("DebugManager QI - unknown IID", iid)
+		return 0
+		
+		
+	def OnEnterScript(self):
+		trace("OnEnterScript")
+		try:
+			1/0
+		except:
+			# Bit of a hack - reach into engine.
+			baseFrame = sys.exc_info()[2].tb_frame.f_back
+		self.adb.SetupAXDebugging(baseFrame)
+
+	def OnLeaveScript(self):
+		trace("OnLeaveScript")
+		self.adb.ResetAXDebugging()
+
+	def AddScriptBlock(self, codeBlock):
+		# If we dont have debugging support, dont bother.
+		cc = DebugCodeBlockContainer(codeBlock, self.scriptSiteDebug)
+		if self.IsSimpleHost():
+			document = documents.DebugDocumentText(cc)
+			document = _wrap(document, axdebug.IID_IDebugDocument)
+			provider = documents.DebugDocumentProvider(document)
+			provider = _wrap(provider, axdebug.IID_IDebugDocumentProvider)
+			cc.debugDocument = document
+			newNode = self.debugApplication.CreateApplicationNode()
+			newNode.SetDocumentProvider(provider)
+			newNode.Attach(self.rootNode)
+		else:
+			newNode = None # Managed by smart host.
+			self.codeContainers[cc.sourceContext] = cc
+		self.ccProvider.AddCodeContainer(cc, newNode)
+
+class DebugCodeBlockContainer(SourceCodeContainer):
+	def __init__(self, codeBlock, site):
+		self.codeBlock = codeBlock
+		SourceCodeContainer.__init__(self, codeBlock.codeText, codeBlock.GetFileName(), codeBlock.sourceContextCookie, codeBlock.startLineNumber, site)
+
+	def GetName(self, dnt):
+		if dnt==axdebug.DOCUMENTNAMETYPE_APPNODE:
+			return self.codeBlock.GetDisplayName()
+		elif dnt==axdebug.DOCUMENTNAMETYPE_TITLE:
+			return self.codeBlock.GetDisplayName()
+#		elif dnt==axdebug.DOCUMENTNAMETYPE_FILE_TAIL:
+#		elif dnt==axdebug.DOCUMENTNAMETYPE_URL:
+		else:
+			raise COMException(scode=winerror.S_FALSE)
+
+
+class EnumDebugCodeContexts(gateways.EnumDebugCodeContexts):
+	def _wrap(self, ob):
+		return ob
+
+class ActiveScriptDebug:
+	"""The class which implements the IActiveScriptDebug interface for the Active Script engine.
+
+	   Only ever used by smart hosts.
+	"""
+	_public_methods_ = ["GetScriptTextAttributes", "GetScriptletTextAttributes", "EnumCodeContextsOfPosition"]
+	_com_interfaces_ = [axdebug.IID_IActiveScriptDebug]
+	def __init__(self, debugMgr, codeContainers):
+		self.debugMgr = debugMgr
+		self.scriptSiteDebug = debugMgr.scriptSiteDebug
+		self.codeContainers = codeContainers
+
+	def _Close(self):
+		self.debugMgr = None
+		self.scriptSiteDebug = None
+		self.codeContainers = {}
+
+	def _query_interface_(self, iid):
+		trace("DebuggerQI with", iid)
+		return _wrap(self.debugMgr.scriptEngine, iid)
+
+	def GetScriptTextAttributes(self, code, delim, flags):
+		container = SourceCodeContainer(code, "<Temp Code Block>")
+		return container.GetSyntaxColorAttributes()
+	def GetScriptletTextAttributes(self, code, delim, flags):
+		trace ("GetScriptletTextAttributes", code, delim, flags)
+		container = SourceCodeContainer(code, "<Temp Code Block>")
+		return container.GetSyntaxColorAttributes()
+
+	def EnumCodeContextsOfPosition(self, context, charOffset, numChars):
+		trace("EnumCodeContextsOfPosition", context, charOffset, numChars)
+		try:
+			context = self.codeContainers[context].GetCodeContextAtPosition(charOffset)
+		except KeyError:
+			raise COMException(scode=winerror.E_UNEXPECTED)
+		enum = EnumDebugCodeContexts([context])
+		return _wrap(enum, axdebug.IID_IEnumDebugCodeContexts)
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/error.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/error.py
new file mode 100644
index 0000000..20e73636
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/error.py
@@ -0,0 +1,219 @@
+"""Exception and error handling.
+
+ This contains the core exceptions that the implementations should raise
+ as well as the IActiveScriptError interface code.
+ 
+"""
+
+import sys, string, traceback
+from win32com.axscript import axscript
+import winerror
+import win32com.server.exception
+import win32com.server.util
+import pythoncom
+import re
+
+debugging = 0
+
+def FormatForAX(text):
+	"""Format a string suitable for an AX Host
+	"""
+	# Replace all " with ', so it works OK in HTML (ie, ASP)
+	return ExpandTabs(AddCR(text))
+
+def ExpandTabs(text):
+	return re.sub('\t','    ', text)
+
+def AddCR(text):
+	return re.sub('\n','\r\n',text)
+#	return string.join(string.split(text,'\n'),'\r\n')
+
+class IActiveScriptError:
+	"""An implementation of IActiveScriptError
+	
+	The ActiveX Scripting host calls this client whenever we report
+	an exception to it.  This interface provides the exception details
+	for the host to report to the user.
+	"""
+	_com_interfaces_ = [axscript.IID_IActiveScriptError]
+	_public_methods_ = ["GetSourceLineText","GetSourcePosition","GetExceptionInfo"]
+	def _query_interface_(self, iid):
+		print "IActiveScriptError QI - unknown IID", iid
+		return 0
+	def _SetExceptionInfo(self, exc):
+		self.exception = exc
+	def GetSourceLineText(self):
+		return self.exception.linetext
+	def GetSourcePosition(self):
+		ctx = self.exception.sourceContext
+		# Zero based in the debugger (but our columns are too!)
+		return ctx, self.exception.lineno + self.exception.startLineNo-1, self.exception.colno
+	def GetExceptionInfo(self):
+		return self.exception
+
+class AXScriptException(win32com.server.exception.COMException):
+	"""A class used as a COM exception.
+	
+	Note this has attributes which conform to the standard attributes 
+	for COM exceptions, plus a few others specific to our IActiveScriptError
+	object.
+	"""
+	def __init__(self, site, codeBlock, exc_type, exc_value, exc_traceback):
+		# set properties base class shares via base ctor...
+		win32com.server.exception.COMException.__init__( self, \
+			description = "Unknown Exception", \
+			scode = winerror.DISP_E_EXCEPTION, \
+			source = "Python ActiveX Scripting Engine",
+			)
+			
+		# And my other values...
+		if codeBlock is None:
+			self.sourceContext = 0
+			self.startLineNo = 0
+		else:
+			self.sourceContext = codeBlock.sourceContextCookie
+			self.startLineNo = codeBlock.startLineNumber
+		self.linetext = ""
+
+		self.__BuildFromException(site, exc_type, exc_value, exc_traceback)
+
+	def __BuildFromException(self, site, type , value, tb):
+		if debugging:
+			import linecache
+			linecache.clearcache()
+		try:
+			if issubclass(type, SyntaxError):
+				self._BuildFromSyntaxError(site, value, tb)
+			else:
+				self._BuildFromOther(site, type, value, tb)
+		except: # Error extracting traceback info!!!
+			traceback.print_exc()
+			# re-raise.
+			raise sys.exc_info()
+
+	def _BuildFromSyntaxError(self, site, value, tb):
+		try:
+			msg, (filename, lineno, offset, line) = value
+		except:
+			msg = "Unknown"
+			lineno = 0
+			offset = 0
+			line = "Unknown"
+		self.description=FormatForAX(msg)
+		self.lineno = lineno
+		self.colno = offset - 1
+		self.linetext = ExpandTabs(string.rstrip(line))
+
+	def _BuildFromOther(self, site, type, value, tb):
+		self.colno = -1
+		self.lineno = 0
+		if debugging: # Full traceback if debugging.
+			list=traceback.format_exception(type, value, tb)
+			self.description = ExpandTabs(string.join(list,""))
+			print "Script Traceback is", self.description
+			return
+		# Run down the traceback list, looking for the first "<Script..>"
+		# Hide traceback above this.  In addition, keep going down
+		# looking for a "_*_" attribute, and below hide these also.
+		hide_names = ["r_import","r_reload","r_open"] # hide from these functions down in the traceback.
+		depth = None
+		tb_top = tb
+		while tb_top:
+			filename, lineno, name, line = self.ExtractTracebackInfo(tb_top, site)
+			if filename[:7]=="<Script":
+				break
+			tb_top = tb_top.tb_next
+		format_items = []
+		if tb_top: # found one.
+			depth = 0
+			tb_look = tb_top
+			# Look down for our bottom
+			while tb_look:
+				filename, lineno, name, line = self.ExtractTracebackInfo(tb_look, site)
+				if name in hide_names:
+					break
+				# We can report a line-number, but not a filename.  Therefore,
+				# we return the last line-number we find in one of our script
+				# blocks.
+				if filename.startswith("<Script"):
+					self.lineno = lineno
+					self.linetext = line
+				format_items.append((filename, lineno, name, line))
+				depth = depth + 1
+				tb_look = tb_look.tb_next
+		else:
+			depth = None
+			tb_top = tb
+			
+		list = ['Traceback (most recent call last):\n']
+		list = list + traceback.format_list(format_items)
+		if type==pythoncom.com_error:
+			desc = "%s (0x%x)" % (value[1], value[0])
+			if value[0]==winerror.DISP_E_EXCEPTION and value[2] and value[2][2]:
+				desc = value[2][2]
+			list.append("COM Error: "+desc)
+		else:
+			list = list + traceback.format_exception_only(type, value)
+#		list=traceback.format_exception(type, value, tb_top, depth)
+		self.description = ExpandTabs(string.join(list,""))
+		# Clear tracebacks etc.
+		tb = tb_top = tb_look = None
+#		sys.exc_type = sys.exc_value = sys.exc_traceback = None
+
+	def ExtractTracebackInfo(self, tb, site):
+		import linecache
+		f = tb.tb_frame
+		lineno = tb.tb_lineno
+		co = f.f_code
+		filename = co.co_filename
+		name = co.co_name
+		line = linecache.getline(filename, lineno)
+		if not line:
+			try:
+				codeBlock = site.scriptCodeBlocks[filename]
+			except KeyError:
+				codeBlock = None
+			if codeBlock:
+				line = codeBlock.GetLineNo(lineno)
+		if line: 
+			line = string.strip(line)
+		else: 
+			line = None
+		return filename, lineno, name, line
+	def __repr__(self):
+		return "AXScriptException Object with description:" + self.description
+
+def ProcessAXScriptException(scriptingSite, debugManager, exceptionInstance):
+	"""General function to handle any exception in AX code
+	
+	This function creates an instance of our IActiveScriptError interface, and
+	gives it to the host, along with out exception class.  The host will
+	likely call back on the IActiveScriptError interface to get the source text
+	and other information not normally in COM exceptions.
+	"""
+#	traceback.print_exc()
+	instance = IActiveScriptError()
+	instance._SetExceptionInfo(exceptionInstance)
+	gateway = win32com.server.util.wrap(instance, axscript.IID_IActiveScriptError)
+	if debugManager:
+		fCallOnError = debugManager.HandleRuntimeError()
+		if not fCallOnError:
+			return None
+		
+	try:
+		result = scriptingSite.OnScriptError(gateway)
+	except pythoncom.com_error, details:
+		print "**OnScriptError failed:", details
+		print "Exception description:'%s'" % (`exceptionInstance.description`)
+		print "Exception text:'%s'" % (`exceptionInstance.linetext`)
+		result = winerror.S_FALSE
+
+	if result==winerror.S_OK:
+		# If the above  returns NOERROR, it is assumed the error has been 
+		# correctly registered and the value SCRIPT_E_REPORTED is returned.
+		ret = win32com.server.exception.COMException(scode=axscript.SCRIPT_E_REPORTED)
+		return ret
+	else:
+		# The error is taken to be unreported and is propagated up the call stack 
+		# via the IDispatch::Invoke's EXCEPINFO parameter (hr returned is DISP_E_EXCEPTION. 
+		return exceptionInstance
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/framework.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/framework.py
new file mode 100644
index 0000000..803182a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/framework.py
@@ -0,0 +1,1017 @@
+"""AXScript Client Framework
+
+  This module provides a core framework for an ActiveX Scripting client.
+  Derived classes actually implement the AX Client itself, including the
+  scoping rules, etc.
+
+  There are classes defined for the engine itself, and for ScriptItems
+"""
+import sys, string
+from win32com.axscript import axscript
+import win32com.server.util
+
+import win32com.client.connect # Need simple connection point support
+
+import win32api, winerror
+import pythoncom
+import types
+import re
+
+def RemoveCR(text):
+# No longer just "RemoveCR" - should be renamed to
+# FixNewlines, or something.  Idea is to fix arbitary newlines into
+# something Python can compile...
+	return re.sub('(\r\n)|\r|(\n\r)','\n',text)
+
+SCRIPTTEXT_FORCEEXECUTION = -2147483648 # 0x80000000
+SCRIPTTEXT_ISEXPRESSION   = 0x00000020
+SCRIPTTEXT_ISPERSISTENT = 0x00000040
+
+from win32com.server.exception import Exception, IsCOMServerException
+import error # ax.client.error
+
+state_map = {
+	axscript.SCRIPTSTATE_UNINITIALIZED: "SCRIPTSTATE_UNINITIALIZED",
+	axscript.SCRIPTSTATE_INITIALIZED: "SCRIPTSTATE_INITIALIZED",
+	axscript.SCRIPTSTATE_STARTED: "SCRIPTSTATE_STARTED",
+	axscript.SCRIPTSTATE_CONNECTED: "SCRIPTSTATE_CONNECTED",
+	axscript.SCRIPTSTATE_DISCONNECTED: "SCRIPTSTATE_DISCONNECTED",
+	axscript.SCRIPTSTATE_CLOSED: "SCRIPTSTATE_CLOSED",
+}
+
+def profile(fn, *args):
+	import profile
+	prof = profile.Profile()
+	try:
+# roll on 1.6 :-)		
+#		return prof.runcall(fn, *args)
+		return apply(prof.runcall, (fn,) + args)
+	finally:
+		import pstats
+		# Damn - really want to send this to Excel!
+		#      width, list = pstats.Stats(prof).strip_dirs().get_print_list([])
+		pstats.Stats(prof).strip_dirs().sort_stats("time").print_stats()
+
+class SafeOutput:
+	softspace=1
+	def __init__(self, redir=None):
+		if redir is None: redir = sys.stdout
+		self.redir=redir
+	def write(self,message):
+		try:
+			self.redir.write(message)
+		except:
+			win32api.OutputDebugString(message)
+	def flush(self):
+		pass
+	def close(self):
+		pass
+
+# Make sure we have a valid sys.stdout/stderr, otherwise out
+# print and trace statements may raise an exception
+def MakeValidSysOuts():
+	if not isinstance(sys.stdout, SafeOutput):
+		sys.stdout = sys.stderr = SafeOutput()
+		# and for the sake of working around something I can't understand...
+		# prevent keyboard interrupts from killing IIS
+		import signal
+		def noOp(a,b):
+			# it would be nice to get to the bottom of this, so a warning to
+			# the debug console can't hurt.
+			print "WARNING: Ignoring keyboard interrupt from ActiveScripting engine"
+		# If someone else has already redirected, then assume they know what they are doing!
+		if signal.getsignal(signal.SIGINT) == signal.default_int_handler:
+			try:
+				signal.signal(signal.SIGINT, noOp)
+			except ValueError:
+				# Not the main thread - can't do much.
+				pass
+
+def trace(*args):
+	"""A function used instead of "print" for debugging output.
+	"""
+	for arg in args:
+		print arg,
+	print
+
+def RaiseAssert(scode, desc):
+	"""A debugging function that raises an exception considered an "Assertion".
+	"""
+	print "**************** ASSERTION FAILED *******************"
+	print desc
+	raise Exception(scode, desc)
+
+class AXScriptCodeBlock:
+	"""An object which represents a chunk of code in an AX Script
+	"""
+	def __init__(self, name, codeText, sourceContextCookie, startLineNumber, flags):
+		self.name = name
+		self.codeText = codeText
+		self.codeObject = None
+		self.sourceContextCookie = sourceContextCookie
+		self.startLineNumber = startLineNumber
+		self.flags = flags
+		self.beenExecuted = 0
+	def GetFileName(self):
+		# Gets the "file name" for Python - uses <...> so Python doesnt think
+		# it is a real file.
+		return "<%s>" % self.name
+	def GetDisplayName(self):
+		return self.name
+	def GetLineNo(self, no):
+		pos = -1
+		for i in range(no-1):
+			pos = string.find(self.codeText, '\n', pos+1)
+			if pos==-1: pos=len(self.codeText)
+		epos = string.find(self.codeText, '\n', pos+1)
+		if epos==-1:
+			epos=len(self.codeText)
+		return string.strip(self.codeText[pos+1:epos])
+
+class Event:
+	"""A single event for a ActiveX named object.
+	"""
+	def __init__(self):
+		self.name = "<None>"
+	def __repr__(self):
+		return "<%s at %d: %s>" % (self.__class__.__name__, id(self), self.name)
+	def Reset(self):
+		pass
+	def Close(self):
+		pass
+	def Build(self, typeinfo, funcdesc):
+		self.dispid = funcdesc[0]
+		self.name = typeinfo.GetNames(self.dispid)[0]
+#		print "Event.Build() - Event Name is ", self.name
+
+class EventSink:
+	"""A set of events against an item.  Note this is a COM client for connection points.
+	"""
+	_public_methods_ = []
+	def __init__(self, myItem, coDispatch):
+		self.events = {}
+		self.connection = None
+		self.coDispatch = coDispatch
+		self.myScriptItem = myItem
+		self.myInvokeMethod = myItem.GetEngine().ProcessScriptItemEvent
+		self.iid = None
+	def Reset(self):
+		self.Disconnect()
+	def Close(self):
+		self.iid = None
+		self.myScriptItem = None
+		self.myInvokeMethod = None
+		self.coDispatch = None
+		for event in self.events.values():
+			event.Reset()
+		self.events = {}
+		self.Disconnect()
+	# COM Connection point methods.
+	def _query_interface_(self, iid):
+		if iid==self.iid:
+			return win32com.server.util.wrap(self)
+	def _invoke_(self, dispid, lcid, wFlags, args):
+		try:
+			event = self.events[dispid]
+		except:
+			raise Exception(scode=winerror.DISP_E_MEMBERNOTFOUND)
+		#print "Invoke for ", event, "on", self.myScriptItem, " - calling",  self.myInvokeMethod
+		return self.myInvokeMethod(self.myScriptItem, event, lcid, wFlags, args)
+
+	def GetSourceTypeInfo(self, typeinfo):
+		"""Gets the typeinfo for the Source Events for the passed typeinfo"""
+		attr = typeinfo.GetTypeAttr()
+		cFuncs = attr[6]
+		typeKind = attr[5]
+		if typeKind not in [pythoncom.TKIND_COCLASS, pythoncom.TKIND_INTERFACE]:
+			RaiseAssert(winerror.E_UNEXPECTED, "The typeKind of the object is unexpected")
+		cImplType = attr[8]
+		for i in xrange(cImplType):
+			# Look for the [source, default] interface on the coclass
+			# that isn't marked as restricted.
+			flags = typeinfo.GetImplTypeFlags(i)
+			flagsNeeded = pythoncom.IMPLTYPEFLAG_FDEFAULT | pythoncom.IMPLTYPEFLAG_FSOURCE
+			if (flags & ( flagsNeeded | pythoncom.IMPLTYPEFLAG_FRESTRICTED))==(flagsNeeded):
+				# Get the handle to the implemented interface.
+				href = typeinfo.GetRefTypeOfImplType(i)
+				return typeinfo.GetRefTypeInfo(href)
+
+	def BuildEvents(self):
+		# See if it is an extender object.
+		try:
+			mainTypeInfo = self.coDispatch.QueryInterface(axscript.IID_IProvideMultipleClassInfo)
+			isMulti = 1
+			numTypeInfos = mainTypeInfo.GetMultiTypeInfoCount()
+		except pythoncom.com_error:
+			isMulti = 0
+			numTypeInfos = 1
+			try:
+				mainTypeInfo = self.coDispatch.QueryInterface(pythoncom.IID_IProvideClassInfo)
+			except pythoncom.com_error:
+				numTypeInfos = 0
+		# Create an event handler for the item.
+		for item in xrange(numTypeInfos):
+			if isMulti:
+				typeinfo, flags = mainTypeInfo.GetInfoOfIndex(item, axscript.MULTICLASSINFO_GETTYPEINFO)
+			else:
+				typeinfo = mainTypeInfo.GetClassInfo()
+			sourceType = self.GetSourceTypeInfo(typeinfo)
+			cFuncs = 0
+			if sourceType:
+				attr = sourceType.GetTypeAttr()
+				self.iid = attr[0]
+				cFuncs = attr[6]
+				for i in xrange(cFuncs):
+					funcdesc = sourceType.GetFuncDesc(i)
+					event = Event()
+					event.Build(sourceType, funcdesc)
+					self.events[event.dispid] = event
+
+	def Connect(self):
+		if self.connection is not None or self.iid is None: return
+#		trace("Connect for sink item", self.myScriptItem.name, "with IID",str(self.iid))
+		self.connection = win32com.client.connect.SimpleConnection(self.coDispatch, self, self.iid)
+	def Disconnect(self):
+		if self.connection:
+			try:
+				self.connection.Disconnect()
+			except pythoncom.com_error:
+				pass # Ignore disconnection errors.
+			self.connection = None
+
+class ScriptItem:
+	"""An item (or subitem) that is exposed to the ActiveX script
+	"""
+	def __init__(self, parentItem, name, dispatch, flags):
+		self.parentItem = parentItem
+		self.dispatch = dispatch
+		self.name = name
+		self.flags = flags
+		self.eventSink = None
+		self.subItems = {}
+		self.createdConnections = 0
+		self.isRegistered = 0
+#		trace("Creating ScriptItem", name, "of parent", parentItem,"with dispatch", dispatch)
+
+	def __repr__(self):
+		flagsDesc=""
+		if self.flags is not None and self.flags & axscript.SCRIPTITEM_GLOBALMEMBERS:
+			flagsDesc = "/Global"
+		return "<%s at %d: %s%s>" % (self.__class__.__name__, id(self), self.name,flagsDesc)
+
+	def _dump_(self, level):
+		flagDescs = []
+		if self.flags is not None and self.flags & axscript.SCRIPTITEM_GLOBALMEMBERS:
+			flagDescs.append("GLOBAL!")
+		if self.flags is None or self.flags & axscript.SCRIPTITEM_ISVISIBLE == 0:
+			flagDescs.append("NOT VISIBLE")
+		if self.flags is not None and self.flags & axscript.SCRIPTITEM_ISSOURCE:
+			flagDescs.append("EVENT SINK")
+		if self.flags is not None and self.flags & axscript.SCRIPTITEM_CODEONLY:
+			flagDescs.append("CODE ONLY")
+		print " " * level, "Name=", self.name, ", flags=", "/".join(flagDescs), self
+		for subItem in self.subItems.values():
+			subItem._dump_(level+1)
+
+	def Reset(self):
+		self.Disconnect()
+		if self.eventSink:
+			self.eventSink.Reset()
+		self.isRegistered = 0
+		for subItem in self.subItems.values():
+			subItem.Reset()
+
+	def Close(self):
+		self.Reset()
+		self.dispatch = None
+		self.parentItem = None
+		if self.eventSink:
+			self.eventSink.Close()
+			self.eventSink = None
+		for subItem in self.subItems.values():
+			subItem.Close()
+		self.subItems = []
+		self.createdConnections = 0
+
+	def Register(self):
+		if self.isRegistered: return
+		# Get the type info to use to build this item.
+#		if not self.dispatch:
+#			id = self.parentItem.dispatch.GetIDsOfNames(self.name)
+#			print "DispID of me is", id
+#			result = self.parentItem.dispatch.Invoke(id, 0, pythoncom.DISPATCH_PROPERTYGET,1)
+#			if type(result)==pythoncom.TypeIIDs[pythoncom.IID_IDispatch]:
+#				self.dispatch = result
+#			else:
+#				print "*** No dispatch"
+#				return
+#			print "**** Made dispatch"
+		self.isRegistered = 1
+		# Register the sub-items.				
+		for item in self.subItems.values():
+			if not item.isRegistered:
+				item.Register()
+
+	def IsGlobal(self):
+		return self.flags & axscript.SCRIPTITEM_GLOBALMEMBERS
+
+	def IsVisible(self):
+		return (self.flags & (axscript.SCRIPTITEM_ISVISIBLE | axscript.SCRIPTITEM_ISSOURCE)) != 0
+
+	def GetEngine(self):
+		item = self
+		while item.parentItem.__class__==self.__class__:
+			item = item.parentItem
+		return item.parentItem
+
+	def _GetFullItemName(self):
+		ret = self.name
+		if self.parentItem:
+			try:
+				ret = self.parentItem._GetFullItemName() + "." + ret
+			except AttributeError:
+				pass
+		return ret
+	
+	def GetSubItemClass(self):
+		return self.__class__
+
+	def GetSubItem(self, name):
+		return self.subItems[string.lower(name)]
+
+	def GetCreateSubItem(self, parentItem, name, dispatch, flags):
+		keyName = string.lower(name)
+		try:
+			rc = self.subItems[keyName]
+			# No changes allowed to existing flags.
+			if not rc.flags is None and not flags is None and rc.flags != flags:
+				raise Exception(scode=winerror.E_INVALIDARG)
+			# Existing item must not have a dispatch.
+			if not rc.dispatch is None and not dispatch is None:
+				raise Exception(scode=winerror.E_INVALIDARG)
+			rc.flags = flags # Setup the real flags.
+			rc.dispatch = dispatch
+		except KeyError:
+			rc = self.subItems[keyName] = self.GetSubItemClass()(parentItem, name, dispatch, flags)
+		return rc
+#		if self.dispatch is None: 
+#			RaiseAssert(winerror.E_UNEXPECTED, "??")
+
+	def CreateConnections(self):
+		# Create (but do not connect to) the connection points.
+		if self.createdConnections: return
+		self.createdConnections = 1
+		# Nothing to do unless this is an event source
+		# This flags means self, _and_ children, are connectable.
+		if self.flags & axscript.SCRIPTITEM_ISSOURCE:
+			self.BuildEvents()
+			self.FindBuildSubItemEvents()
+
+	def Connect(self):
+		# Connect to the already created connection points.
+		if self.eventSink:
+			self.eventSink.Connect()
+		for subItem in self.subItems.values():
+			subItem.Connect()
+			
+	def Disconnect(self):
+		# Disconnect from the connection points.
+		if self.eventSink:
+			self.eventSink.Disconnect()
+		for subItem in self.subItems.values():
+			subItem.Disconnect()
+
+
+	def BuildEvents(self):
+		if self.eventSink is not None or self.dispatch is None:
+			RaiseAssert(winerror.E_UNEXPECTED, "Item already has built events, or no dispatch available?")
+		
+#		trace("BuildEvents for named item", self._GetFullItemName())
+		self.eventSink = EventSink(self, self.dispatch)
+		self.eventSink.BuildEvents()
+
+	def FindBuildSubItemEvents(self):
+		# Called during connection to event source.  Seeks out and connects to
+		# all children.  As per the AX spec, this is not recursive
+		# (ie, children sub-items are not seeked)
+		try:
+			multiTypeInfo = self.dispatch.QueryInterface(axscript.IID_IProvideMultipleClassInfo)
+			numTypeInfos = multiTypeInfo.GetMultiTypeInfoCount()
+		except pythoncom.com_error:
+			return
+		for item in xrange(numTypeInfos):
+			typeinfo, flags = multiTypeInfo.GetInfoOfIndex(item, axscript.MULTICLASSINFO_GETTYPEINFO)
+			defaultType = self.GetDefaultSourceTypeInfo(typeinfo)
+			index = 0
+			while 1:
+				try:
+					fdesc = defaultType.GetFuncDesc(index)
+				except pythoncom.com_error:
+					break # No more funcs
+				index = index + 1
+				dispid = fdesc[0]
+				funckind = fdesc[3]
+				invkind = fdesc[4]
+				elemdesc = fdesc[8]
+				funcflags = fdesc[9]
+				try:
+					isSubObject = not (funcflags & pythoncom.FUNCFLAG_FRESTRICTED) and \
+						funckind == pythoncom.FUNC_DISPATCH and \
+						invkind  == pythoncom.INVOKE_PROPERTYGET and \
+						elemdesc[0][0] == pythoncom.VT_PTR and \
+						elemdesc[0][1][0] == pythoncom.VT_USERDEFINED
+				except:
+					isSubObject = 0
+				if isSubObject:
+						try:
+							# We found a sub-object.
+							names = typeinfo.GetNames(dispid);
+							result = self.dispatch.Invoke(dispid, 0x0, pythoncom.DISPATCH_PROPERTYGET, 1)
+							# IE has an interesting problem - there are lots of synonyms for the same object.  Eg
+							# in a simple form, "window.top", "window.window", "window.parent", "window.self"
+							# all refer to the same object.  Our event implementation code does not differentiate
+							# eg, "window_onload" will fire for *all* objects named "window".  Thus,
+							# "window" and "window.window" will fire the same event handler :(
+							# One option would be to check if the sub-object is indeed the
+							# parent object - however, this would stop "top_onload" from firing,
+							# as no event handler for "top" would work.
+							# I think we simply need to connect to a *single* event handler.
+							# As use in IE is deprecated, I am not solving this now.
+							if type(result)==pythoncom.TypeIIDs[pythoncom.IID_IDispatch]:
+								name = names[0]
+								subObj = self.GetCreateSubItem(self, name, result, axscript.SCRIPTITEM_ISVISIBLE)
+								#print "subobj", name, "flags are", subObj.flags, "mydisp=", self.dispatch, "result disp=", result, "compare=", self.dispatch==result
+								subObj.BuildEvents()
+								subObj.Register()
+						except pythoncom.com_error:
+							pass
+
+	def GetDefaultSourceTypeInfo(self, typeinfo):
+		"""Gets the typeinfo for the Default Dispatch for the passed typeinfo"""
+		attr = typeinfo.GetTypeAttr()
+		cFuncs = attr[6]
+		typeKind = attr[5]
+		if typeKind not in [pythoncom.TKIND_COCLASS, pythoncom.TKIND_INTERFACE]:
+			RaiseAssert(winerror.E_UNEXPECTED, "The typeKind of the object is unexpected")
+		cImplType = attr[8]
+		for i in xrange(cImplType):
+			# Look for the [source, default] interface on the coclass
+			# that isn't marked as restricted.
+			flags = typeinfo.GetImplTypeFlags(i)
+			if (flags & ( pythoncom.IMPLTYPEFLAG_FDEFAULT | pythoncom.IMPLTYPEFLAG_FSOURCE | pythoncom.IMPLTYPEFLAG_FRESTRICTED))==pythoncom.IMPLTYPEFLAG_FDEFAULT:
+				# Get the handle to the implemented interface.
+				href = typeinfo.GetRefTypeOfImplType(i)
+				defTypeInfo = typeinfo.GetRefTypeInfo(href)
+				attr = defTypeInfo.GetTypeAttr()
+				typeKind = attr[5]
+				typeFlags = attr[11]
+				if typeKind == pythoncom.TKIND_INTERFACE and typeFlags & pythoncom.TYPEFLAG_FDUAL:
+					# Get corresponding Disp interface
+					# -1 is a special value which does this for us.
+					href = typeinfo.GetRefTypeOfImplType(-1)
+					return defTypeInfo.GetRefTypeInfo(href)
+				else:
+					return defTypeInfo
+
+
+IActiveScriptMethods = [
+   "SetScriptSite", "GetScriptSite", "SetScriptState", "GetScriptState",
+   "Close", "AddNamedItem", "AddTypeLib", "GetScriptDispatch",
+   "GetCurrentScriptThreadID", "GetScriptThreadID", "GetScriptThreadState",
+   "InterruptScriptThread", "Clone" ]
+IActiveScriptParseMethods = [
+   "InitNew", "AddScriptlet", "ParseScriptText" ]
+IObjectSafetyMethods = [
+   "GetInterfaceSafetyOptions", "SetInterfaceSafetyOptions"]
+
+# ActiveScriptParseProcedure is a new interface with IIS4/IE4.
+IActiveScriptParseProcedureMethods = ['ParseProcedureText']
+class COMScript:
+	"""An ActiveX Scripting engine base class.
+
+	This class implements the required COM interfaces for ActiveX scripting.
+	"""
+	_public_methods_ = IActiveScriptMethods + IActiveScriptParseMethods + IObjectSafetyMethods + IActiveScriptParseProcedureMethods
+	_com_interfaces_ = [axscript.IID_IActiveScript, axscript.IID_IActiveScriptParse, axscript.IID_IObjectSafety] #, axscript.IID_IActiveScriptParseProcedure]
+
+	def __init__(self):
+		# Make sure we can print/trace wihout an exception!
+		MakeValidSysOuts()
+#		trace("AXScriptEngine object created", self)
+		self.baseThreadId = -1
+		self.debugManager = None
+		self.threadState = axscript.SCRIPTTHREADSTATE_NOTINSCRIPT
+		self.scriptState = axscript.SCRIPTSTATE_UNINITIALIZED
+		self.scriptSite = None
+		self.safetyOptions = 0
+		self.lcid = 0
+		self.subItems = {}
+		self.scriptCodeBlocks = {}
+
+	def _query_interface_(self, iid):
+		if self.debugManager:
+			return self.debugManager._query_interface_for_debugger_(iid)
+#		trace("ScriptEngine QI - unknown IID", iid)
+		return 0
+
+	# IActiveScriptParse
+	def InitNew(self):
+		if self.scriptSite is not None:
+			self.SetScriptState(axscript.SCRIPTSTATE_INITIALIZED)
+
+	def AddScriptlet(self, defaultName, code, itemName, subItemName, eventName, delimiter, sourceContextCookie, startLineNumber):
+#		trace ("AddScriptlet", defaultName, code, itemName, subItemName, eventName, delimiter, sourceContextCookie, startLineNumber)
+		self.DoAddScriptlet(defaultName, str(code), itemName, subItemName, eventName, delimiter,sourceContextCookie, startLineNumber)
+
+	def ParseScriptText(self, code, itemName, context, delimiter, sourceContextCookie, startLineNumber, flags, bWantResult):
+#		trace ("ParseScriptText", code[:20],"...", itemName, context, delimiter, sourceContextCookie, startLineNumber, flags, bWantResult)
+		if bWantResult or self.scriptState == axscript.SCRIPTSTATE_STARTED \
+				or self.scriptState == axscript.SCRIPTSTATE_CONNECTED \
+				or self.scriptState == axscript.SCRIPTSTATE_DISCONNECTED :
+			flags = flags | SCRIPTTEXT_FORCEEXECUTION
+		else:
+			flags = flags & (~SCRIPTTEXT_FORCEEXECUTION)
+
+		if flags & SCRIPTTEXT_FORCEEXECUTION:
+			# About to execute the code.
+			self.RegisterNewNamedItems()
+		return self.DoParseScriptText(str(code), sourceContextCookie, startLineNumber, bWantResult, flags)
+
+	#
+	# IActiveScriptParseProcedure
+	def ParseProcedureText( self, code, formalParams, procName, itemName, unkContext, delimiter, contextCookie, startingLineNumber, flags):
+		trace("ParseProcedureText", code, formalParams, procName, itemName, unkContext, delimiter, contextCookie, startingLineNumber, flags)
+		# NOTE - this is never called, as we have disabled this interface.
+		# Problem is, once enabled all even code comes via here, rather than AddScriptlet.
+		# However, the "procName" is always an empty string - ie, itemName is the object whose event we are handling,
+		# but no idea what the specific event is!?
+		# Problem is disabling this block is that AddScriptlet is _not_ passed
+		# <SCRIPT for="whatever" event="onClick" language="Python">
+		# (but even for those blocks, the "onClick" information is still missing!?!?!?)
+
+#		self.DoAddScriptlet(None, str(code), itemName, subItemName, eventName, delimiter,sourceContextCookie, startLineNumber)
+		return None
+	#
+	# IActiveScript
+	def SetScriptSite(self, site):
+		# We should still work with an existing site (or so MSXML believes :)
+		self.scriptSite = site
+		if self.debugManager is not None:
+			self.debugManager.Close()
+		import traceback
+		try:
+			import win32com.axdebug.axdebug # see if the core exists.
+			import debug
+			self.debugManager = debug.DebugManager(self)
+		except pythoncom.com_error:
+			# COM errors will occur if the debugger interface has never been
+			# seen on the target system
+			trace("Debugging interfaces not available - debugging is disabled..")
+			self.debugManager = None
+		except ImportError:
+			trace("Debugging extensions (axdebug) module does not exist - debugging is disabled..")
+			self.debugManager = None
+		except:
+			traceback.print_exc()
+			trace("*** Debugger Manager could not initialize - %s: %s" % (sys.exc_info()[0],sys.exc_info()[1]))
+			self.debugManager = None
+
+		try:
+			self.lcid = site.GetLCID() 
+		except pythoncom.com_error:
+			self.lcid = win32api.GetUserDefaultLCID()
+		self.Reset()
+
+	def GetScriptSite(self, iid):
+		if self.scriptSite is None: raise Exception(scode=winerror.S_FALSE)
+		return self.scriptSite.QueryInterface(iid)
+
+	def SetScriptState(self, state):
+		#print "SetScriptState with %s - currentstate = %s" % (state_map.get(state),state_map.get(self.scriptState))
+		if state == self.scriptState: return
+		# If closed, allow no other state transitions
+		if self.scriptState==axscript.SCRIPTSTATE_CLOSED:
+			raise Exception(scode=winerror.E_INVALIDARG)
+
+		if state==axscript.SCRIPTSTATE_INITIALIZED:
+			# Re-initialize - shutdown then reset.
+			if self.scriptState in [axscript.SCRIPTSTATE_CONNECTED, axscript.SCRIPTSTATE_STARTED]:
+				self.Stop()
+		elif state==axscript.SCRIPTSTATE_STARTED:
+			if self.scriptState == axscript.SCRIPTSTATE_CONNECTED:
+				self.Disconnect()
+			if self.scriptState == axscript.SCRIPTSTATE_DISCONNECTED:
+				self.Reset()
+			self.Run()
+			self.ChangeScriptState(axscript.SCRIPTSTATE_STARTED)
+		elif state==axscript.SCRIPTSTATE_CONNECTED:
+			if self.scriptState in [axscript.SCRIPTSTATE_UNINITIALIZED,axscript.SCRIPTSTATE_INITIALIZED]:
+				self.ChangeScriptState(axscript.SCRIPTSTATE_STARTED) # report transition through started
+				self.Run()
+			if self.scriptState == axscript.SCRIPTSTATE_STARTED:
+				self.Connect()
+				self.ChangeScriptState(state)
+		elif state==axscript.SCRIPTSTATE_DISCONNECTED:
+			if self.scriptState == axscript.SCRIPTSTATE_CONNECTED:
+				self.Disconnect()
+		elif state==axscript.SCRIPTSTATE_CLOSED:
+			self.Close()
+		elif state==axscript.SCRIPTSTATE_UNINITIALIZED:
+			if self.scriptState == axscript.SCRIPTSTATE_STARTED:
+				self.Stop()
+			if self.scriptState == axscript.SCRIPTSTATE_CONNECTED:
+				self.Disconnect()
+			if self.scriptState == axscript.SCRIPTSTATE_DISCONNECTED:
+				self.Reset()
+			self.ChangeScriptState(state)
+		else:
+			raise Exception(scode=winerror.E_INVALIDARG)
+
+	def GetScriptState(self):
+		return self.scriptState
+
+	def Close(self):
+#		trace("Close")
+		if self.scriptState in [axscript.SCRIPTSTATE_CONNECTED, axscript.SCRIPTSTATE_DISCONNECTED]:
+			self.Stop()
+		if self.scriptState in [axscript.SCRIPTSTATE_CONNECTED, axscript.SCRIPTSTATE_DISCONNECTED, axscript.SCRIPTSTATE_INITIALIZED, axscript.SCRIPTSTATE_STARTED]:
+			pass # engine.close??
+		if self.scriptState in [axscript.SCRIPTSTATE_UNINITIALIZED, axscript.SCRIPTSTATE_CONNECTED, axscript.SCRIPTSTATE_DISCONNECTED, axscript.SCRIPTSTATE_INITIALIZED, axscript.SCRIPTSTATE_STARTED]:
+			self.ChangeScriptState(axscript.SCRIPTSTATE_CLOSED)
+			# Completely reset all named items (including persistent)
+			for item in self.subItems.values():
+				item.Close()
+			self.subItems = {}
+			self.baseThreadId = -1
+		if self.debugManager:
+			self.debugManager.Close()
+			self.debugManager = None
+		self.scriptSite = None
+		self.scriptCodeBlocks = {}
+		self.persistLoaded = 0
+
+	def AddNamedItem(self, name, flags):
+		if self.scriptSite is None: raise Exception(scode=winerror.E_INVALIDARG)
+		name = str(name) # Convert from Unicode.
+		try:
+			unknown = self.scriptSite.GetItemInfo(name, axscript.SCRIPTINFO_IUNKNOWN)[0]
+			dispatch = unknown.QueryInterface(pythoncom.IID_IDispatch)
+		except pythoncom.com_error:
+			raise Exception(scode=winerror.E_NOINTERFACE, desc="Object has no dispatch interface available.")
+		newItem = self.subItems[name] = self.GetNamedItemClass()(self, name, dispatch, flags)
+		if newItem.IsGlobal():
+			newItem.CreateConnections()
+
+	def GetScriptDispatch(self, name):
+		# Base classes should override.
+		raise Exception(scode=winerror.E_NOTIMPL)
+
+	def GetCurrentScriptThreadID(self):
+		return self.baseThreadId
+
+	def GetScriptThreadID(self, win32ThreadId):
+		if self.baseThreadId == -1:
+			raise Exception(scode=winerror.E_UNEXPECTED)
+		if self.baseThreadId != win32ThreadId:
+			raise Exception(scode=winerror.E_INVALIDARG)
+		return self.baseThreadId
+
+	def GetScriptThreadState(self, scriptThreadId):
+		if self.baseThreadId == -1:
+			raise Exception(scode=winerror.E_UNEXPECTED)
+		if scriptThreadId != self.baseThreadId:
+			raise Exception(scode=winerror.E_INVALIDARG)
+		return self.threadState
+
+	def AddTypeLib(self, uuid, major, minor, flags):
+		# Get the win32com gencache to register this library.
+		from win32com.client import gencache
+		gencache.EnsureModule(uuid, self.lcid, major, minor, bForDemand = 1)
+
+	def InterruptScriptThread(self, state, flags):
+		raise Exception("Not Implemented", scode=winerror.E_NOTIMPL)
+
+	def Clone(self):
+		raise Exception("Not Implemented", scode=winerror.E_NOTIMPL)
+	#
+	# IObjectSafety
+
+	# Note that IE seems to insist we say we support all the flags, even tho
+	# we dont accept them all.  If unknown flags come in, they are ignored, and never
+	# reflected in GetInterfaceSafetyOptions and the QIs obviously fail, but still IE
+	# allows our engine to initialize.
+	def SetInterfaceSafetyOptions(self, iid, optionsMask, enabledOptions):
+#		trace ("SetInterfaceSafetyOptions", iid, optionsMask, enabledOptions)
+		if optionsMask & enabledOptions == 0:
+			return
+
+# See comments above.
+#		if (optionsMask & enabledOptions & \
+#			~(axscript.INTERFACESAFE_FOR_UNTRUSTED_DATA | axscript.INTERFACESAFE_FOR_UNTRUSTED_CALLER)):
+#			# request for options we don't understand
+#			RaiseAssert(scode=winerror.E_FAIL, desc="Unknown safety options")
+
+		if iid in [pythoncom.IID_IPersist, pythoncom.IID_IPersistStream, pythoncom.IID_IPersistStreamInit,
+		            axscript.IID_IActiveScript, axscript.IID_IActiveScriptParse]:
+			supported = self._GetSupportedInterfaceSafetyOptions()
+			self.safetyOptions = supported & optionsMask & enabledOptions
+		else:
+			raise Exception(scode=winerror.E_NOINTERFACE)
+
+	def _GetSupportedInterfaceSafetyOptions(self):
+		return 0 
+
+	def GetInterfaceSafetyOptions(self, iid):
+		if iid in [pythoncom.IID_IPersist, pythoncom.IID_IPersistStream, pythoncom.IID_IPersistStreamInit,
+		            axscript.IID_IActiveScript, axscript.IID_IActiveScriptParse]:
+			supported = self._GetSupportedInterfaceSafetyOptions()
+			return supported, self.safetyOptions
+		else:
+			raise Exception(scode=winerror.E_NOINTERFACE)
+	#
+	# Other helpers.
+	def ExecutePendingScripts(self):
+		self.RegisterNewNamedItems()
+		self.DoExecutePendingScripts()
+
+	def ProcessScriptItemEvent(self, item, event, lcid, wFlags, args):
+#		trace("ProcessScriptItemEvent", item, event, lcid, wFlags, args)
+		self.RegisterNewNamedItems()
+		return self.DoProcessScriptItemEvent(item, event, lcid, wFlags, args)
+
+	def _DumpNamedItems_(self):
+		for item in self.subItems.values():
+			item._dump_(0)
+
+	def ResetNamedItems(self):
+		# Due to the way we work, we re-create persistent ones.
+		si = self.subItems.items()
+		self.subItems = {}
+		for name, item in si:
+			item.Close()
+			if item.flags & axscript.SCRIPTITEM_ISPERSISTENT:
+				self.AddNamedItem(item.name, item.flags)
+
+	def GetCurrentSafetyOptions(self):
+		return self.safetyOptions
+	def ProcessNewNamedItemsConnections(self):
+		# Process all sub-items.
+		for item in self.subItems.values():
+			if not item.createdConnections: # Fast-track!
+				item.CreateConnections()
+
+	def RegisterNewNamedItems(self):
+		# Register all sub-items.
+		for item in self.subItems.values():
+			if not item.isRegistered: # Fast-track!
+				self.RegisterNamedItem(item)
+
+	def RegisterNamedItem(self, item):
+		item.Register()
+
+	def CheckConnectedOrDisconnected(self):
+		if self.scriptState in [axscript.SCRIPTSTATE_CONNECTED, axscript.SCRIPTSTATE_DISCONNECTED]:
+			return
+		RaiseAssert(winerror.E_UNEXPECTED, "Not connected or disconnected - %d" % self.scriptState)
+
+	def Connect(self):
+		self.ProcessNewNamedItemsConnections()
+		self.RegisterNewNamedItems()
+		self.ConnectEventHandlers()
+
+	def Run(self):
+#		trace("AXScript running...")
+		if self.scriptState != axscript.SCRIPTSTATE_INITIALIZED and self.scriptState != axscript.SCRIPTSTATE_STARTED:
+			raise Exception(scode=winerror.E_UNEXPECTED)
+#		self._DumpNamedItems_()
+		self.ExecutePendingScripts()
+		self.DoRun()
+
+	def Stop(self):
+		# Stop all executing scripts, and disconnect.
+		if self.scriptState == axscript.SCRIPTSTATE_CONNECTED:
+			self.Disconnect()
+		# Reset back to initialized.
+		self.Reset()
+
+	def Disconnect(self):
+		self.CheckConnectedOrDisconnected()
+		try:
+			self.DisconnectEventHandlers()
+		except pythoncom.com_error:
+			# Ignore errors when disconnecting.
+			pass 
+
+		self.ChangeScriptState(axscript.SCRIPTSTATE_DISCONNECTED)
+
+	def ConnectEventHandlers(self):
+#		trace ("Connecting to event handlers")
+		for item in self.subItems.values():
+			item.Connect()
+		self.ChangeScriptState(axscript.SCRIPTSTATE_CONNECTED);
+
+	def DisconnectEventHandlers(self):
+#		trace ("Disconnecting from event handlers")
+		for item in self.subItems.values():
+			item.Disconnect()
+
+	def Reset(self):
+		# Keeping persistent engine state, reset back an initialized state
+		self.ResetNamedItems()
+		self.ChangeScriptState(axscript.SCRIPTSTATE_INITIALIZED)
+
+	def ChangeScriptState(self, state):
+		#print "  ChangeScriptState with %s - currentstate = %s" % (state_map.get(state),state_map.get(self.scriptState))
+		self.DisableInterrupts()
+		try:
+			self.scriptState = state
+			try:
+				if self.scriptSite: self.scriptSite.OnStateChange(state)
+			except pythoncom.com_error, (hr, desc, exc, arg):
+				pass # Ignore all errors here - E_NOTIMPL likely from scriptlets.
+		finally:
+			self.EnableInterrupts()
+
+	# This stack frame is debugged - therefore we do as little as possible in it.
+	def _ApplyInScriptedSection(self, fn, args):
+		if self.debugManager:
+			self.debugManager.OnEnterScript()
+			if self.debugManager.adb.appDebugger:
+				return self.debugManager.adb.runcall(fn, *args)
+			else:
+				return apply(fn, args)
+		else:
+			return apply(fn, args)
+	
+	def ApplyInScriptedSection(self, codeBlock, fn, args):
+		self.BeginScriptedSection()
+		try:
+			try:
+#				print "ApplyInSS", codeBlock, fn, args
+				return self._ApplyInScriptedSection(fn, args)
+			finally:
+				if self.debugManager: self.debugManager.OnLeaveScript()
+				self.EndScriptedSection()
+		except:
+			self.HandleException(codeBlock)
+			
+	# This stack frame is debugged - therefore we do as little as possible in it.
+	def _CompileInScriptedSection(self, code, name, type):
+		if self.debugManager: self.debugManager.OnEnterScript()
+		return compile(code, name, type)
+
+	def CompileInScriptedSection(self, codeBlock, type, realCode = None):
+		if codeBlock.codeObject is not None: # already compiled
+			return 1
+		if realCode is None:
+			code = codeBlock.codeText
+		else:
+			code = realCode
+		name = codeBlock.GetFileName()
+		self.BeginScriptedSection()
+		try:
+			try:
+				codeObject = self._CompileInScriptedSection(RemoveCR(code), name, type)
+				codeBlock.codeObject = codeObject
+				return 1
+			finally:
+				if self.debugManager: self.debugManager.OnLeaveScript()
+				self.EndScriptedSection()
+		except:
+			self.HandleException(codeBlock)
+	
+	# This stack frame is debugged - therefore we do as little as possible in it.
+	def _ExecInScriptedSection(self, codeObject, globals, locals = None):
+		if self.debugManager:
+			self.debugManager.OnEnterScript()
+			if self.debugManager.adb.appDebugger:
+				return self.debugManager.adb.run(codeObject, globals, locals)
+			else:
+				exec codeObject in globals, locals
+		else:
+			exec codeObject in globals, locals
+
+	def ExecInScriptedSection(self, codeBlock, globals, locals = None):
+		if locals is None: locals = globals
+		assert not codeBlock.beenExecuted, "This code block should not have been executed"
+		codeBlock.beenExecuted = 1
+		self.BeginScriptedSection()
+		try:
+			try:
+				self._ExecInScriptedSection(codeBlock.codeObject, globals, locals)
+			finally:
+				if self.debugManager: self.debugManager.OnLeaveScript()
+				self.EndScriptedSection()
+		except:
+			self.HandleException(codeBlock)
+
+	def _EvalInScriptedSection(self, codeBlock, globals, locals = None):
+		if self.debugManager:
+			self.debugManager.OnEnterScript()
+			if self.debugManager.adb.appDebugger:
+				return self.debugManager.adb.runeval(codeBlock, globals, locals)
+			else:
+				return eval(codeBlock, globals, locals)
+		else:
+			return eval(codeBlock, globals, locals)
+		
+	def EvalInScriptedSection(self, codeBlock, globals, locals = None):
+		if locals is None: locals = globals
+		assert not codeBlock.beenExecuted, "This code block should not have been executed"
+		codeBlock.beenExecuted = 1
+		self.BeginScriptedSection()
+		try:
+			try:
+				return self._EvalInScriptedSection(codeBlock.codeObject, globals, locals)
+			finally:
+				if self.debugManager: self.debugManager.OnLeaveScript()
+				self.EndScriptedSection()
+		except:
+			self.HandleException(codeBlock)
+
+	def HandleException(self, codeBlock):
+		# NOTE - Never returns - raises a ComException
+		exc_type, exc_value, exc_traceback = sys.exc_info()
+		# If a SERVER exception, re-raise it.  If a client side COM error, it is
+		# likely to have originated from the script code itself, and therefore
+		# needs to be reported like any other exception.
+		if IsCOMServerException(exc_type):
+			# Ensure the traceback doesnt cause a cycle.
+			exc_traceback = None
+			raise
+		# It could be an error by another script.
+		if issubclass(pythoncom.com_error, exc_type) and exc_value[0]==axscript.SCRIPT_E_REPORTED:
+			# Ensure the traceback doesnt cause a cycle.
+			exc_traceback = None
+			raise Exception(scode=exc_value[0])
+		
+		exception = error.AXScriptException(self, \
+		                       codeBlock, exc_type, exc_value, exc_traceback)
+
+		# Ensure the traceback doesnt cause a cycle.
+		exc_traceback = None
+		result_exception = error.ProcessAXScriptException(self.scriptSite, self.debugManager, exception)
+		if result_exception is not None:
+			try:
+				self.scriptSite.OnScriptTerminate(None, result_exception)
+			except pythoncom.com_error:
+				pass # Ignore errors telling engine we stopped.
+			# reset ourselves to 'connected' so further events continue to fire.
+			self.SetScriptState(axscript.SCRIPTSTATE_CONNECTED)
+			raise result_exception
+		# I think that in some cases this should just return - but the code
+		# that could return None above is disabled, so it never happens.
+		RaiseAssert(winerror.E_UNEXPECTED, "Don't have an exception to raise to the caller!")
+
+	def BeginScriptedSection(self):
+		if self.scriptSite is None:
+			raise Exception(E_UNEXPECTED)
+		self.scriptSite.OnEnterScript()
+	def EndScriptedSection(self):
+		if self.scriptSite is None:
+			raise Exception(E_UNEXPECTED)
+		self.scriptSite.OnLeaveScript()
+	
+	def DisableInterrupts(self):
+		pass
+	def EnableInterrupts(self):
+		pass
+	def GetNamedItem(self, name):
+		try:
+			return self.subItems[name]
+		except KeyError:
+			raise Exception(scode=winerror.E_INVALIDARG)
+
+	def GetNamedItemClass(self):
+		return ScriptItem
+
+	def _AddScriptCodeBlock(self, codeBlock):
+		self.scriptCodeBlocks[codeBlock.GetFileName()] = codeBlock
+		if self.debugManager:
+			self.debugManager.AddScriptBlock(codeBlock)
+
+if __name__=='__main__':
+	print "This is a framework class - please use pyscript.py etc"
+
+def dumptypeinfo(typeinfo):
+		return
+		attr = typeinfo.GetTypeAttr()
+		# Loop over all methods
+		print "Methods"
+		for j in xrange(attr[6]):
+			fdesc = list(typeinfo.GetFuncDesc(j))
+			id = fdesc[0]
+			try:
+				names = typeinfo.GetNames(id)
+			except pythoncom.ole_error:
+				names = None
+			doc = typeinfo.GetDocumentation(id)
+
+			print " ", names, "has attr", fdesc
+
+		# Loop over all variables (ie, properties)
+		print "Variables"
+		for j in xrange(attr[7]):
+			fdesc = list(typeinfo.GetVarDesc(j))
+			names = typeinfo.GetNames(id)
+			print " ", names, "has attr", fdesc
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pydumper.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pydumper.py
new file mode 100644
index 0000000..1db24f3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pydumper.py
@@ -0,0 +1,70 @@
+# pydumper.py
+#
+# This is being worked on - it does not yet work at all, in ay way
+# shape or form :-)
+#
+# A new script engine, derived from the standard scripting engine,
+# which dumps information.
+
+# This generally can be used to grab all sorts of useful details about
+# an engine - expose bugs in it or Python, dump the object model, etc.
+
+# As it is derived from the standard engine, it fully supports Python
+# as a scripting language - meaning the dumps produced can be quite dynamic,
+# and based on the script code you execute.
+
+import pyscript
+from win32com.axscript import axscript
+
+from pyscript import RaiseAssert, trace, Exception, SCRIPTTEXT_FORCEEXECUTION
+
+PyDump_CLSID = '{ac527e60-c693-11d0-9c25-00aa00125a98}'
+
+class AXScriptAttribute(pyscript.AXScriptAttribute):
+	pass
+
+class NamedScriptAttribute(pyscript.NamedScriptAttribute):
+	pass
+
+
+class PyScript(pyscript.PyScript):
+	pass
+
+
+def Register():
+	import sys
+	if '-d' in sys.argv:
+		dispatcher = "DispatcherWin32trace"
+		debug_desc = " ("+dispatcher+")"
+		debug_option = "Yes"
+	else:
+		dispatcher = None
+		debug_desc = ""
+		debug_option = ""
+
+	categories = [axscript.CATID_ActiveScript,axscript.CATID_ActiveScriptParse]
+	clsid = PyDump_CLSID
+	lcid = 0x0409 # // english
+	policy = None # "win32com.axscript.client.axspolicy.AXScriptPolicy"
+
+	print "Registering COM server%s..." % debug_desc
+	from win32com.server.register import RegisterServer
+
+	languageName = "PyDump"
+	verProgId = "Python.Dumper.1"
+	RegisterServer(clsid = clsid, pythonInstString = "win32com.axscript.client.pyscript.PyDumper", 
+                       className = "Python Debugging/Dumping ActiveX Scripting Engine",
+	                   progID = languageName, verProgID = verProgId,
+                       catids = categories, 
+                       policy=policy, dispatcher = dispatcher)
+
+	CreateRegKey(languageName + "\\OLEScript")
+	# Basic Registration for wsh.
+	win32com.server.register._set_string(".pysDump", "pysDumpFile")
+	win32com.server.register._set_string("pysDumpFile\\ScriptEngine", languageName)
+	print "Dumping Server registered."
+	
+if __name__=='__main__':
+	Register()
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pyscript.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pyscript.py
new file mode 100644
index 0000000..d95c61c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pyscript.py
@@ -0,0 +1,425 @@
+"""Python ActiveX Scripting Implementation
+
+This module implements the Python ActiveX Scripting client.
+
+To register the implementation, simply "run" this Python program - ie
+either double-click on it, or run "python.exe pyscript.py" from the
+command line.
+"""
+
+import framework
+import winerror
+import win32com
+import win32api
+import pythoncom
+from win32com.axscript import axscript
+import win32com.server.register
+import sys
+import string
+import traceback
+import scriptdispatch
+import re
+import win32com.client.dynamic
+
+from framework import RaiseAssert, trace, Exception, SCRIPTTEXT_FORCEEXECUTION, SCRIPTTEXT_ISEXPRESSION, SCRIPTTEXT_ISPERSISTENT
+
+PyScript_CLSID = "{DF630910-1C1D-11d0-AE36-8C0F5E000000}"
+
+debugging_attr = 0
+
+def debug_attr_print(*args):
+	if debugging_attr:
+		apply(trace, args)
+
+def ExpandTabs(text):
+	return re.sub('\t','    ', text)
+
+def AddCR(text):
+	return re.sub('\n','\r\n',text)
+#	return string.join(string.split(text,'\n'),'\r\n')
+
+class AXScriptCodeBlock(framework.AXScriptCodeBlock):
+	def GetDisplayName(self):
+		return "PyScript - " + framework.AXScriptCodeBlock.GetDisplayName(self)
+
+#
+# Restricted execution model.
+#
+import rexec
+class AXRExec(rexec.RExec):
+	ok_builtin_modules = rexec.RExec.ok_builtin_modules + ('win32trace',)
+
+	def __init__(self, pretendMain, hooks = None, verbose = 0):
+		self.pretendMain = pretendMain
+		rexec.RExec.__init__(self, hooks, verbose)
+#		mods = list(self.ok_dynamic_modules)
+#		mods.append("win32trace")
+#		mods = tuple(mods)
+#		self.ok_dynamic_modules = mods
+	def make_main(self):
+		if not self.modules.has_key('__main__'):
+			self.modules['__main__'] = self.pretendMain
+			self.pretendMain.__builtins__ = self.modules['__builtin__']
+			m = self.add_module('__main__')
+
+# Classes that looks and behaves like RExec, but isnt really!
+import ihooks
+class AXNotRHooks(ihooks.Hooks):
+	pass
+
+class AXNotRExec:
+	def __init__(self, pretendMain, hooks = None, verbose = 0):
+		self.pretendMain = pretendMain
+		self.hooks = hooks or AXNotRHooks(verbose)
+		self.modules = {'__main__': self.pretendMain}
+
+	def add_module(self, mname):
+		if self.modules.has_key(mname):
+			return self.modules[mname]
+		self.modules[mname] = m = self.hooks.new_module(mname)
+#		m.__builtins__ = self.modules['__builtin__']
+		return m
+
+# There is only ever _one_ ax object - it exists in the global namespace
+# for all script items.
+# It performs a search from all global/visible objects
+# down.
+# This means that if 2 sub-objects of the same name are used
+# then only one is ever reachable using the ax shortcut.
+class AXScriptAttribute:
+	"An attribute in a scripts namespace."
+	def __init__(self, engine):
+		self.__dict__['_scriptEngine_'] = engine
+	def __getattr__(self, attr):
+		if attr[1]=="_" and attr[:-1]=="_":
+			raise AttributeError, attr
+		rc = self._FindAttribute_(attr)
+		if rc is None:
+			raise AttributeError, attr
+		return rc
+	def _Close_(self):
+		self.__dict__['_scriptEngine_'] = None
+
+	def _DoFindAttribute_(self, obj, attr):
+		try:
+			return obj.subItems[string.lower(attr)].attributeObject
+		except KeyError:
+			pass
+		# Check out the sub-items
+		for item in obj.subItems.values():
+			try:
+				return self._DoFindAttribute_(item, attr)
+			except AttributeError:
+				pass
+		raise AttributeError, attr
+
+	def _FindAttribute_(self, attr):
+		for item in self._scriptEngine_.subItems.values():
+			try:
+				return self._DoFindAttribute_(item, attr)
+			except AttributeError:
+				pass
+		# All else fails, see if it is a global
+		# (mainly b/w compat)
+		return getattr(self._scriptEngine_.globalNameSpaceModule, attr)
+#		raise AttributeError, attr
+
+class NamedScriptAttribute:
+	"An explicitely named object in an objects namespace"
+	# Each named object holds a reference to one of these.
+	# Whenever a sub-item appears in a namespace, it is really one of these
+	# objects.  Has a circular reference back to the item itself, which is
+	# closed via _Close_()
+	def __init__(self, scriptItem):
+		self.__dict__['_scriptItem_'] = scriptItem
+	def __repr__(self):
+		return "<NamedItemAttribute" + repr(self._scriptItem_) + ">"
+	def __getattr__(self, attr):
+		# If a known subitem, return it.
+		try:
+			return self._scriptItem_.subItems[string.lower(attr)].attributeObject
+		except KeyError:
+			# Otherwise see if the dispatch can give it to us
+			if self._scriptItem_.dispatchContainer:
+				return getattr(self._scriptItem_.dispatchContainer,attr)
+		raise AttributeError, attr
+	def __setattr__(self, attr, value):
+		# XXX - todo - if a known item, then should call its default
+		# dispatch method.
+		attr=string.lower(attr)
+		if self._scriptItem_.dispatchContainer:
+			try:
+				return setattr(self._scriptItem_.dispatchContainer,attr, value)
+			except AttributeError:
+				pass
+		raise AttributeError, attr
+	def _Close_(self):
+		self.__dict__['_scriptItem_'] = None
+
+	
+class ScriptItem(framework.ScriptItem):
+	def __init__(self, parentItem, name, dispatch, flags):
+		framework.ScriptItem.__init__(self, parentItem, name, dispatch, flags)
+		self.scriptlets = {}
+		self.attributeObject = None
+	def Reset(self):
+		framework.ScriptItem.Reset(self)
+		if self.attributeObject:
+			self.attributeObject._Close_()
+		self.attributeObject = None
+	def Close(self):
+		framework.ScriptItem.Close(self) # calls reset.
+		self.dispatchContainer = None
+		self.scriptlets = {}
+
+	def Register(self):
+		framework.ScriptItem.Register(self)
+		self.attributeObject = NamedScriptAttribute(self)
+		if self.dispatch:
+			# Need to avoid the new Python "lazy" dispatch behaviour.
+			try:
+				engine = self.GetEngine()
+				olerepr = clsid = None
+				typeinfo = self.dispatch.GetTypeInfo()
+				clsid = typeinfo.GetTypeAttr()[0]
+				try:
+					olerepr = engine.mapKnownCOMTypes[clsid]
+				except KeyError:
+					pass
+			except pythoncom.com_error:
+				typeinfo = None
+			if olerepr is None:
+				olerepr = win32com.client.dynamic.MakeOleRepr(self.dispatch, typeinfo, None)
+				if clsid is not None:
+					engine.mapKnownCOMTypes[clsid] = olerepr
+			self.dispatchContainer = win32com.client.dynamic.CDispatch(self.dispatch, olerepr, self.name)
+#			self.dispatchContainer = win32com.client.dynamic.Dispatch(self.dispatch, userName = self.name)
+#			self.dispatchContainer = win32com.client.dynamic.DumbDispatch(self.dispatch, userName = self.name)
+
+#	def Connect(self):
+#		framework.ScriptItem.Connect(self)
+#	def Disconnect(self):
+#		framework.ScriptItem.Disconnect(self)
+		
+class PyScript(framework.COMScript):
+	# Setup the auto-registration stuff...
+	_reg_verprogid_ = "Python.AXScript.2"
+	_reg_progid_ = "Python"
+#	_reg_policy_spec_ = default
+	_reg_catids_ = [axscript.CATID_ActiveScript,axscript.CATID_ActiveScriptParse]
+	_reg_desc_ = "Python ActiveX Scripting Engine"
+	_reg_clsid_ = PyScript_CLSID
+	_reg_class_spec_ = "win32com.axscript.client.pyscript.PyScript"
+	_reg_remove_keys_ = [(".pys",), ("pysFile",)]
+	_reg_threading_ = "Apartment"
+	
+	def __init__(self):
+		framework.COMScript.__init__(self)
+		self.globalNameSpaceModule = None
+		self.codeBlocks = []
+		self.scriptDispatch = None
+
+	def InitNew(self):
+		framework.COMScript.InitNew(self)
+		import imp
+		self.scriptDispatch = None
+		self.globalNameSpaceModule = imp.new_module("__ax_main__")
+		self.globalNameSpaceModule.__dict__['ax'] = AXScriptAttribute(self)
+		self.rexec_env = None # will be created first time around.
+		
+		self.codeBlocks = []
+		self.persistedCodeBlocks = []
+		self.mapKnownCOMTypes = {} # Map of known CLSID to typereprs
+		self.codeBlockCounter = 0
+
+	def Stop(self):
+		# Flag every pending script as already done
+		for b in self.codeBlocks:
+			b.beenExecuted = 1
+		return framework.COMScript.Stop(self)
+
+	def Reset(self):
+		# Reset all code-blocks that are persistent, and discard the rest
+		oldCodeBlocks = self.codeBlocks[:]
+		self.codeBlocks = []
+		for b in oldCodeBlocks:
+			if b.flags & SCRIPTTEXT_ISPERSISTENT:
+				b.beenExecuted = 0
+				self.codeBlocks.append(b)
+		return framework.COMScript.Reset(self)
+
+	def _GetNextCodeBlockNumber(self):
+		self.codeBlockCounter = self.codeBlockCounter + 1
+		return self.codeBlockCounter
+		
+	def RegisterNamedItem(self, item):
+		if self.rexec_env is None:
+			# RExec is not available in 2.2+.  If we get here for IE, the
+			# user has explicitly run axscript_rexec, so is choosing to
+			# take this risk.
+#			if self.safetyOptions & (axscript.INTERFACESAFE_FOR_UNTRUSTED_DATA | axscript.INTERFACESAFE_FOR_UNTRUSTED_CALLER):
+#				# Use RExec.
+#				self.rexec_env = AXRExec(self.globalNameSpaceModule)
+#			else:
+				# DONT use RExec.
+				self.rexec_env = AXNotRExec(self.globalNameSpaceModule)
+
+		wasReg = item.isRegistered
+		framework.COMScript.RegisterNamedItem(self, item)
+		if not wasReg:
+			# Insert into our namespace.
+			# Add every item by name
+			if item.IsVisible():
+				self.globalNameSpaceModule.__dict__[item.name] = item.attributeObject
+			if item.IsGlobal():
+				# Global items means sub-items are also added...
+				for subitem in item.subItems.values():
+					self.globalNameSpaceModule.__dict__[subitem.name] = subitem.attributeObject
+				# Also add all methods
+				for name, entry in item.dispatchContainer._olerepr_.mapFuncs.items():
+					if not entry.hidden:
+						self.globalNameSpaceModule.__dict__[name] = getattr(item.dispatchContainer,name)
+
+	def DoExecutePendingScripts(self):
+		try:
+			globs = self.globalNameSpaceModule.__dict__
+			for codeBlock in self.codeBlocks:
+				if not codeBlock.beenExecuted:
+					if self.CompileInScriptedSection(codeBlock, "exec"):
+						self.ExecInScriptedSection(codeBlock, globs)
+		finally:
+			pass
+	
+	def DoRun(self):
+		pass
+
+	def Close(self):
+		self.ResetNamespace()
+		self.globalNameSpaceModule = None
+		self.codeBlocks = []
+		self.scriptDispatch = None
+		framework.COMScript.Close(self)
+
+	def GetScriptDispatch(self, name):
+#		trace("GetScriptDispatch with", name)
+#		if name is not None: return None
+		if self.scriptDispatch is None:
+			self.scriptDispatch = scriptdispatch.MakeScriptDispatch(self, self.globalNameSpaceModule)
+		return self.scriptDispatch
+
+	def MakeEventMethodName(self, subItemName, eventName):
+		return string.upper(subItemName[0])+subItemName[1:] + "_" + string.upper(eventName[0])+eventName[1:]
+
+	def DoAddScriptlet(self, defaultName, code, itemName, subItemName, eventName, delimiter,sourceContextCookie, startLineNumber):
+		# Just store the code away - compile when called.  (JIT :-)
+		item = self.GetNamedItem(itemName)
+		if itemName==subItemName: # Explicit handlers - eg <SCRIPT LANGUAGE="Python" for="TestForm" Event="onSubmit">
+			subItem = item
+		else:
+			subItem = item.GetCreateSubItem(item, subItemName, None, None)
+		funcName = self.MakeEventMethodName(subItemName, eventName)
+		
+		codeBlock = AXScriptCodeBlock("Script Event %s" %funcName, code, sourceContextCookie, startLineNumber, 0)
+		self._AddScriptCodeBlock(codeBlock)
+		subItem.scriptlets[funcName] = codeBlock
+
+	def DoProcessScriptItemEvent(self, item, event, lcid, wFlags, args):
+#		trace("ScriptItemEvent", self, item, event, event.name, lcid, wFlags, args)
+		funcName = self.MakeEventMethodName(item.name, event.name)
+		codeBlock = function = None
+		try:
+			function = item.scriptlets[funcName]
+			if type(function)==type(self): # ie, is a CodeBlock instance
+				codeBlock = function
+				function = None
+		except KeyError:
+			pass
+		if codeBlock is not None:
+			realCode = "def %s():\n" % funcName
+			for line in string.split(framework.RemoveCR(codeBlock.codeText),"\n"):
+				realCode = realCode + '\t' + line + '\n'
+			realCode = realCode + '\n'
+			if not self.CompileInScriptedSection(codeBlock, "exec", realCode):
+				return
+			dict = {}
+			self.ExecInScriptedSection(codeBlock, self.globalNameSpaceModule.__dict__, dict)
+			function = dict[funcName]
+			# cache back in scriptlets as a function.
+			item.scriptlets[funcName] = function
+		if function is None:
+			# still no function - see if in the global namespace.
+			try:
+				function = self.globalNameSpaceModule.__dict__[funcName]
+			except KeyError:
+				# Not there _exactly_ - do case ins search.
+				funcNameLook = string.lower(funcName)
+				for attr in self.globalNameSpaceModule.__dict__.keys():
+					if funcNameLook==string.lower(attr):
+						function = self.globalNameSpaceModule.__dict__[attr]
+						# cache back in scriptlets, to avoid this overhead next time
+						item.scriptlets[funcName] = function
+
+		if function is None:
+			raise Exception(scode=winerror.DISP_E_MEMBERNOTFOUND)
+		return self.ApplyInScriptedSection(codeBlock, function, args)
+
+	def DoParseScriptText(self, code, sourceContextCookie, startLineNumber, bWantResult, flags):
+		code = framework.RemoveCR(code) + "\n"
+		if flags & SCRIPTTEXT_ISEXPRESSION:
+			name = "Script Expression"
+			exec_type = "eval"
+		else:
+			name = "Script Block"
+			exec_type = "exec"
+		num = self._GetNextCodeBlockNumber()
+		if num==1: num=""
+		name = "%s %s" % (name, num)
+		codeBlock = AXScriptCodeBlock(name, code, sourceContextCookie, startLineNumber, flags)
+		self._AddScriptCodeBlock(codeBlock)
+		globs = self.globalNameSpaceModule.__dict__
+		if bWantResult: # always immediate.
+			if self.CompileInScriptedSection(codeBlock, exec_type):
+				if flags & SCRIPTTEXT_ISEXPRESSION:
+					return self.EvalInScriptedSection(codeBlock, globs)
+				else:
+					return self.ExecInScriptedSection(codeBlock, globs)
+				
+			# else compile failed, but user chose to keep running...
+		else:
+			if flags & SCRIPTTEXT_FORCEEXECUTION:
+				if self.CompileInScriptedSection(codeBlock, exec_type):
+					self.ExecInScriptedSection(codeBlock, globs)
+			else:
+				self.codeBlocks.append(codeBlock)
+
+	def GetNamedItemClass(self):
+		return ScriptItem
+
+	def ResetNamespace(self):
+		if self.globalNameSpaceModule is not None:
+			try:
+				self.globalNameSpaceModule.ax._Reset_()
+			except AttributeError:
+				pass # ???
+			globalNameSpaceModule = None
+			
+		self.rexec_env = None
+
+def DllRegisterServer():
+	klass=PyScript
+	win32com.server.register._set_subkeys(klass._reg_progid_ + "\\OLEScript", {}) # Just a CreateKey
+	# Basic Registration for wsh.
+	win32com.server.register._set_string(".pys", "pysFile")
+	win32com.server.register._set_string("pysFile\\ScriptEngine", klass._reg_progid_)
+	guid_wsh_shellex = "{60254CA5-953B-11CF-8C96-00AA00B8708C}"
+	win32com.server.register._set_string("pysFile\\ShellEx\\DropHandler", guid_wsh_shellex)
+	win32com.server.register._set_string("pysFile\\ShellEx\\PropertySheetHandlers\\WSHProps", guid_wsh_shellex)
+
+def Register(klass=PyScript):
+	import sys
+	ret = win32com.server.register.UseCommandLine(klass, 
+	                     finalize_register=DllRegisterServer)
+	return ret
+
+if __name__=='__main__':
+	Register()
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pyscript_rexec.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pyscript_rexec.py
new file mode 100644
index 0000000..334906ad
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/pyscript_rexec.py
@@ -0,0 +1,43 @@
+# A version of the ActiveScripting engine that enables rexec support
+# This version supports hosting by IE - however, due to Python's
+# rexec module being neither completely trusted nor private, it is
+# *not* enabled by default.
+# As of Python 2.2, rexec is simply not available - thus, if you use this,
+# a HTML page can do almost *anything* at all on your machine.
+
+# You almost certainly do NOT want to use thus!
+
+import pythoncom
+from win32com.axscript import axscript
+import winerror
+import pyscript
+
+INTERFACE_USES_DISPEX = 0x00000004	# Object knows to use IDispatchEx
+INTERFACE_USES_SECURITY_MANAGER = 0x00000008 # Object knows to use IInternetHostSecurityManager
+
+class PyScriptRExec(pyscript.PyScript):
+	# Setup the auto-registration stuff...
+	_reg_verprogid_ = "Python.AXScript-rexec.2"
+	_reg_progid_ = "Python" # Same ProgID as the standard engine.
+#	_reg_policy_spec_ = default
+	_reg_catids_ = [axscript.CATID_ActiveScript,axscript.CATID_ActiveScriptParse]
+	_reg_desc_ = "Python ActiveX Scripting Engine (with rexec support)"
+	_reg_clsid_ = "{69c2454b-efa2-455b-988c-c3651c4a2f69}"
+	_reg_class_spec_ = "win32com.axscript.client.pyscript_rexec.PyScriptRExec"
+	_reg_remove_keys_ = [(".pys",), ("pysFile",)]
+	_reg_threading_ = "Apartment"
+
+	def _GetSupportedInterfaceSafetyOptions(self):
+		# print "**** calling", pyscript.PyScript._GetSupportedInterfaceSafetyOptions, "**->", pyscript.PyScript._GetSupportedInterfaceSafetyOptions(self)
+		return INTERFACE_USES_DISPEX | \
+		       INTERFACE_USES_SECURITY_MANAGER | \
+		       axscript.INTERFACESAFE_FOR_UNTRUSTED_DATA | \
+		       axscript.INTERFACESAFE_FOR_UNTRUSTED_CALLER
+
+if __name__=='__main__':
+	print "WARNING: By registering this engine, you are giving remote HTML code"
+	print "the ability to execute *any* code on your system."
+	print
+	print "You almost certainly do NOT want to do this."
+	print "You have been warned, and are doing this at your own (significant) risk"
+	pyscript.Register(PyScriptRExec)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/scriptdispatch.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/scriptdispatch.py
new file mode 100644
index 0000000..9ab5b5a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/client/scriptdispatch.py
@@ -0,0 +1,104 @@
+"""dynamic dispatch objects for AX Script.
+
+ This is an IDispatch object that a scripting host may use to
+ query and invoke methods on the main script.  Not may hosts use
+ this yet, so it is not well tested!
+"""
+
+import winerror
+import types
+from win32com.server.exception import COMException
+import win32com.server.policy
+import win32com.server.util
+from win32com.client import Dispatch
+import pythoncom
+from win32com.axscript import axscript
+
+debugging = 0
+
+PyIDispatchType = pythoncom.TypeIIDs[pythoncom.IID_IDispatch]
+
+def _is_callable(obj):
+	return type(obj) in [types.FunctionType, types.MethodType]
+	# ignore hasattr(obj, "__call__") as this means all COM objects!
+
+class ScriptDispatch:
+	_public_methods_ = []
+	def __init__(self, engine, scriptNamespace):
+		self.engine = engine
+		self.scriptNamespace = scriptNamespace
+
+	def _dynamic_(self, name, lcid, wFlags, args):
+		# Ensure any newly added items are available.
+		self.engine.RegisterNewNamedItems()
+		self.engine.ProcessNewNamedItemsConnections()
+		if wFlags & pythoncom.INVOKE_FUNC:
+			# attempt to call a function
+			try:
+				func = getattr(self.scriptNamespace, name)
+				if not _is_callable(func):
+					raise AttributeError, name # Not a function.
+				realArgs = []
+				for arg in args:
+					if type(arg)==PyIDispatchType:
+						realArgs.append(Dispatch(arg))
+					else:
+						realArgs.append(arg)
+				try:
+					# xxx - todo - work out what code block to pass???
+					return self.engine.ApplyInScriptedSection(None, func, tuple(realArgs))
+				except COMException, (hr, msg, exc, arg):
+					raise
+
+			except AttributeError:
+				if not wFlags & pythoncom.DISPATCH_PROPERTYGET:
+					raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)
+		if wFlags & pythoncom.DISPATCH_PROPERTYGET:
+			# attempt to get a property
+			try:
+				ret =  getattr(self.scriptNamespace, name)
+				if _is_callable(ret):
+					raise AttributeError, name # Not a property.
+			except AttributeError:
+				raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)
+			except COMException, instance:
+				raise
+			except:
+				ret = self.engine.HandleException()
+			return ret
+
+		raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)
+
+class StrictDynamicPolicy(win32com.server.policy.DynamicPolicy):
+	def _wrap_(self, object):
+		win32com.server.policy.DynamicPolicy._wrap_(self, object)
+		if hasattr(self._obj_, 'scriptNamespace'):
+			for name in dir(self._obj_.scriptNamespace):
+				self._dyn_dispid_to_name_[self._getdispid_(name,0)] = name    
+
+	def _getmembername_(self, dispid):
+		try:
+			return str(self._dyn_dispid_to_name_[dispid])
+		except KeyError:
+			raise COMException(scode=winerror.DISP_E_UNKNOWNNAME, desc="Name not found")	
+
+	def _getdispid_(self, name, fdex):
+		try:
+			func = getattr(self._obj_.scriptNamespace, str(name))
+		except AttributeError:
+			raise COMException(scode=winerror.DISP_E_MEMBERNOTFOUND)
+#		if not _is_callable(func):
+		return win32com.server.policy.DynamicPolicy._getdispid_(self, name, fdex)
+
+def _wrap_debug(obj):
+	return win32com.server.util.wrap(obj, usePolicy=StrictDynamicPolicy, useDispatcher=win32com.server.policy.DispatcherWin32trace)
+def _wrap_nodebug(obj):
+	return win32com.server.util.wrap(obj, usePolicy=StrictDynamicPolicy)
+
+if debugging:
+	_wrap = _wrap_debug
+else:
+	_wrap = _wrap_nodebug
+
+def MakeScriptDispatch(engine, namespace):
+	return _wrap(ScriptDispatch(engine, namespace))
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/__init__.py
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/axsite.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/axsite.py
new file mode 100644
index 0000000..33a5e13
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/axsite.py
@@ -0,0 +1,129 @@
+import string
+import win32com.axscript.axscript
+import winerror
+from win32com.axscript import axscript
+from win32com.server import exception, util
+import pythoncom
+
+class AXEngine:
+  def __init__(self, site, engine):
+    self.eScript = self.eParse = self.eSafety = None
+    if type(engine) == type(''):
+      engine = pythoncom.CoCreateInstance(engine,
+                                          None,
+                                          pythoncom.CLSCTX_SERVER,
+                                          pythoncom.IID_IUnknown)
+
+    self.eScript = engine.QueryInterface(axscript.IID_IActiveScript)
+    self.eParse = engine.QueryInterface(axscript.IID_IActiveScriptParse)
+    self.eSafety = engine.QueryInterface(axscript.IID_IObjectSafety)
+
+    self.eScript.SetScriptSite(site)
+    self.eParse.InitNew()
+  def __del__(self):
+    self.Close()
+  def GetScriptDispatch(self, name = None):
+    return self.eScript.GetScriptDispatch(name)
+  def AddNamedItem(self, item, flags):
+    return self.eScript.AddNamedItem(item, flags)
+  # Some helpers.  
+  def AddCode(self, code, flags=0):
+    self.eParse.ParseScriptText(code, None, None, None, 0, 0, flags)
+  def EvalCode(self, code):
+    return self.eParse.ParseScriptText(code, None, None, None, 0, 0, axscript.SCRIPTTEXT_ISEXPRESSION)
+  def Start(self):
+    # Should maybe check state?
+    # Do I need to transition through?
+    self.eScript.SetScriptState(axscript.SCRIPTSTATE_STARTED) 
+#    self.eScript.SetScriptState(axscript.SCRIPTSTATE_CONNECTED)
+
+  def Close(self):
+    if self.eScript:
+      self.eScript.Close()
+    self.eScript = self.eParse = self.eSafety = None
+  def SetScriptState(self, state):
+    self.eScript.SetScriptState(state)
+		
+IActiveScriptSite_methods = [
+  'GetLCID',
+  'GetItemInfo',
+  'GetDocVersionString',
+  'OnScriptTerminate',
+  'OnStateChange',
+  'OnScriptError',
+  'OnEnterScript',
+  'OnLeaveScript',
+  ]
+
+class AXSite:
+  """An Active Scripting site.  A Site can have exactly one engine.
+  """
+  _public_methods_ = IActiveScriptSite_methods
+  _com_interfaces_ = [ axscript.IID_IActiveScriptSite ]
+
+  def __init__(self, objModel={}, engine = None, lcid=0):
+    self.lcid = lcid
+
+    self.objModel = { }
+    for name, object in objModel.items():
+      # Gregs code did string.lower this - I think that is callers job if he wants!
+      self.objModel[name] = object
+
+    self.engine = None
+    if engine:
+      self._AddEngine(engine)
+    
+  def AddEngine(self, engine):
+    """Adds a new engine to the site.
+    engine can be a string, or a fully wrapped engine object.
+    """
+    if type(engine)==type(''):
+      newEngine = AXEngine(util.wrap(self), engine)
+    else:
+      newEngine = engine
+    self.engine = newEngine
+    flags = axscript.SCRIPTITEM_ISVISIBLE | axscript.SCRIPTITEM_NOCODE | axscript.SCRIPTITEM_GLOBALMEMBERS | axscript.SCRIPTITEM_ISPERSISTENT
+    for name in self.objModel.keys():
+      newEngine.AddNamedItem(name, flags)
+      newEngine.SetScriptState(axscript.SCRIPTSTATE_INITIALIZED)
+    return newEngine
+
+  # B/W compat
+  _AddEngine = AddEngine
+
+  def _Close(self):
+    self.engine.Close()
+    self.objModel = {}
+
+  def GetLCID(self):
+    return self.lcid
+
+  def GetItemInfo(self, name, returnMask):
+#    name = string.lower(name)
+    if not self.objModel.has_key(name):
+      raise exception.Exception(scode=winerror.TYPE_E_ELEMENTNOTFOUND, desc='item not found')
+
+    ### for now, we don't have any type information
+
+    if returnMask & axscript.SCRIPTINFO_IUNKNOWN:
+      return (self.objModel[name], None)
+
+    return (None, None)
+
+  def GetDocVersionString(self):
+    return 'Python AXHost version 1.0'
+
+  def OnScriptTerminate(self, result, excepInfo):
+    pass
+
+  def OnStateChange(self, state):
+    pass
+
+  def OnScriptError(self, errorInterface):
+    return winerror.S_FALSE  
+
+  def OnEnterScript(self):
+    pass
+
+  def OnLeaveScript(self):
+    pass
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/error.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/error.py
new file mode 100644
index 0000000..fd3d96e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/server/error.py
@@ -0,0 +1,18 @@
+"""Exception instance for AXScript servers.
+
+This module implements an exception instance that is raised by the core 
+server scripting support.
+
+When a script error occurs, it wraps the COM object that describes the
+exception in a Python instance, which can then be raised and caught.
+"""
+
+class Exception:
+	def __init__(self, activeScriptError):
+		self.activeScriptError = activeScriptError
+	def __getattr__(self, attr):
+		try:
+			return getattr(self.activeScriptError, attr)
+		except AttributeError:
+			raise AttributeError, attr
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/TEST.BAT b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/TEST.BAT
new file mode 100644
index 0000000..d1a4194
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/TEST.BAT
@@ -0,0 +1 @@
+start pythonwin /nodde /run \src\pythonex\pythonwin\pywin\demos\webbrowser.py test.html 
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/debugTest.pys b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/debugTest.pys
new file mode 100644
index 0000000..5dafb89
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/debugTest.pys
@@ -0,0 +1,17 @@
+def Function(i):

+	Test.Echo(i)

+

+print dir()

+

+a=1

+b=a

+c=b # And here is a comment

+d="A string"

+print a

+Test.echo("Hello from Python")

+for i in range(2):

+	Function(i)

+a = """\

+A multi-line string!

+"""

+

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/debugTest.vbs b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/debugTest.vbs
new file mode 100644
index 0000000..d5ee3faf7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/debugTest.vbs
@@ -0,0 +1,7 @@
+a=1

+b=a

+Test.Echo "Hello from VBScript"

+' Here is a comment

+for i = 1 to 10

+  

+next

diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/leakTest.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/leakTest.py
new file mode 100644
index 0000000..362d480
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/leakTest.py
@@ -0,0 +1,163 @@
+import string, sys
+from win32com.axscript.server.error import Exception
+from win32com.axscript import axscript
+from win32com.axscript.server import axsite
+import pythoncom
+from win32com.server import util, connect
+import win32com.server.policy
+
+class MySite(axsite.AXSite):
+
+  def OnScriptError(self, error):
+    exc = error.GetExceptionInfo()
+    context, line, char = error.GetSourcePosition()
+    print " >Exception:", exc[1]
+    try:
+      st = error.GetSourceLineText()
+    except pythoncom.com_error:
+      st = None
+    if st is None: st = ""
+    text = st + "\n" + (" " * (char-1)) + "^" + "\n" + exc[2]
+    for line in string.split(text,"\n"):
+      print "  >" + line
+
+class MyCollection(util.Collection):
+	def _NewEnum(self):
+		print "Making new Enumerator"
+		return util.Collection._NewEnum(self)
+
+class Test:
+  _public_methods_ = [ 'echo' ]
+  _public_attrs_ = ['collection', 'verbose']
+  def __init__(self):
+    self.verbose = 0
+    self.collection = util.wrap( MyCollection( [1,'Two',3] ))
+    self.last = ""
+#    self._connect_server_ = TestConnectServer(self)
+
+  def echo(self, *args):
+    self.last = string.join(map(str, args))
+    if self.verbose:
+      for arg in args:
+        print arg,
+      print
+#    self._connect_server_.Broadcast(last)
+
+
+#### Connections currently wont work, as there is no way for the engine to
+#### know what events we support.  We need typeinfo support.
+
+IID_ITestEvents = pythoncom.MakeIID("{8EB72F90-0D44-11d1-9C4B-00AA00125A98}")
+
+class TestConnectServer(connect.ConnectableServer):
+	_connect_interfaces_ = [IID_ITestEvents]
+	# The single public method that the client can call on us
+	# (ie, as a normal COM server, this exposes just this single method.
+	def __init__(self, object):
+		self.object = object
+		
+	def Broadcast(self,arg):
+		# Simply broadcast a notification.
+		self._BroadcastNotify(self.NotifyDoneIt, (arg,))
+
+	def NotifyDoneIt(self, interface, arg):
+		interface.Invoke(1000, 0, pythoncom.DISPATCH_METHOD, 1, arg)
+
+VBScript = """\
+prop = "Property Value"
+
+sub hello(arg1)
+   test.echo arg1
+end sub
+  
+sub testcollection
+   test.verbose = 1
+   for each item in test.collection
+     test.echo "Collection item is", item
+   next
+end sub
+"""
+PyScript = """\
+print "PyScript is being parsed..."
+prop = "Property Value"
+def hello(arg1):
+   test.echo(arg1)
+   pass
+   
+def testcollection():
+   test.verbose = 1
+#   test.collection[1] = "New one"
+   for item in test.collection:
+     test.echo("Collection item is", item)
+   pass
+"""
+
+ErrScript = """\
+bad code for everyone!
+"""
+
+
+def TestEngine(engineName, code, bShouldWork = 1):
+  echoer = Test()
+  model = {
+    'test' : util.wrap(echoer),
+    }
+
+  site = MySite(model)
+  engine = site._AddEngine(engineName)
+  engine.AddCode(code, axscript.SCRIPTTEXT_ISPERSISTENT)
+  try:
+    engine.Start()
+  finally:
+    if not bShouldWork:
+      engine.Close()
+      return
+  doTestEngine(engine, echoer)
+  # re-transition the engine back to the UNINITIALIZED state, a-la ASP.
+  engine.eScript.SetScriptState(axscript.SCRIPTSTATE_UNINITIALIZED)
+  engine.eScript.SetScriptSite(util.wrap(site))
+  print "restarting"
+  engine.Start()
+  # all done!
+  engine.Close()
+
+def doTestEngine(engine, echoer):    
+  # Now call into the scripts IDispatch
+  from win32com.client.dynamic import Dispatch
+  ob = Dispatch(engine.GetScriptDispatch())
+  try:
+    ob.hello("Goober")
+  except pythoncom.com_error, exc:
+    print "***** Calling 'hello' failed", exc
+    return
+  if echoer.last != "Goober":
+    print "***** Function call didnt set value correctly", `echoer.last`
+    
+  if str(ob.prop) != "Property Value":
+    print "***** Property Value not correct - ", `ob.prop`
+
+  ob.testcollection()
+
+  # Now make sure my engines can evaluate stuff.
+  result = engine.eParse.ParseScriptText("1+1", None, None, None, 0, 0, axscript.SCRIPTTEXT_ISEXPRESSION)
+  if result != 2:
+    print "Engine could not evaluate '1+1' - said the result was", result
+
+def dotestall():
+  for i in xrange(10):
+    TestEngine("Python", PyScript)
+    print sys.gettotalrefcount()
+##  print "Testing Exceptions"
+##  try:
+##    TestEngine("Python", ErrScript, 0)
+##  except pythoncom.com_error:
+##    pass
+   
+
+def testall():
+  dotestall()
+  pythoncom.CoUninitialize()
+  print "AXScript Host worked correctly - %d/%d COM objects left alive." % (pythoncom._GetInterfaceCount(), pythoncom._GetGatewayCount())
+
+if __name__ == '__main__':
+	testall()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/test.html b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/test.html
new file mode 100644
index 0000000..c52eab6d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/test.html
@@ -0,0 +1,88 @@
+<HTML>
+<BODY>
+A multi-language Active Debugging demo.
+
+<FORM>
+  <INPUT NAME="Button1" 
+       VALUE="Click for VB to call JScript!" 
+       TYPE="Button" 
+       OnClick="JScriptEntryPoint" 
+       LANGUAGE="VBScript"
+  >
+  <INPUT NAME="Button2" 
+       VALUE="Click for VB to call ForthScript!" 
+       TYPE="Button" 
+       OnClick="ForthEntryPoint2"
+       LANGUAGE="VBScript"
+  >
+</FORM>
+<BR>
+
+<!-- We start with JScript code calling Python... -->
+<SCRIPT LANGUAGE="JScript">
+
+function JScriptEntryPoint()
+{
+    PythonEntryPoint2();
+}
+
+</SCRIPT>
+
+<!-- Python calling Perl -->
+<SCRIPT LANGUAGE="Python">
+
+def PythonEntryPoint():
+	# Perl works as a property.
+	result = window.PerlEntryPoint
+
+</SCRIPT>
+
+<SCRIPT LANGUAGE="Python">
+def PythonEntryPoint2():
+	a = 1
+	b = 2
+	DoTheCall()
+
+def DoTheCall():
+	a = 2
+	b = "Hi there"
+	window.PythonEntryPoint()
+	c = "Done it!"
+
+</SCRIPT>
+
+<!-- And some Perl code to call VBScript -->
+<SCRIPT LANGUAGE="PerlScript">
+
+sub PerlEntryPoint {
+	$window->VBScriptEntryPoint();
+	$window->alert("Perl just called VBScript");
+}
+
+</SCRIPT>
+
+
+<!-- VBscript finally calls our Forth Sample -->
+<SCRIPT LANGUAGE="VBScript">
+
+Sub VBScriptEntryPoint
+   call ForthEntryPoint
+End Sub
+
+</SCRIPT>
+
+
+<SCRIPT LANGUAGE="ForthScript">
+
+: ForthEntryPoint
+    1 0 /
+    "Forth has no one to call" 1 window.alert call
+;
+
+: ForthEntryPoint2
+	0 window.JScriptEntryPoint call
+;
+</SCRIPT>
+
+</BODY>
+</HTML>
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/testHost.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/testHost.py
new file mode 100644
index 0000000..e74637c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/testHost.py
@@ -0,0 +1,213 @@
+import string, sys
+from win32com.axscript.server.error import Exception
+from win32com.axscript import axscript
+from win32com.axscript.server import axsite
+import pythoncom
+from win32com.server import util, connect
+import win32com.server.policy
+
+import unittest
+import win32com.test.util
+
+verbose = "-v" in sys.argv
+
+class MySite(axsite.AXSite):
+  def __init__(self, *args):
+    self.seen_exception = 0
+    axsite.AXSite.__init__(self, *args)
+
+  def OnScriptError(self, error):
+    exc = error.GetExceptionInfo()
+    context, line, char = error.GetSourcePosition()
+    self.seen_exception = 1
+    if not verbose:
+      return
+    print " >Exception:", exc[1]
+    try:
+      st = error.GetSourceLineText()
+    except pythoncom.com_error:
+      st = None
+    if st is None: st = ""
+    text = st + "\n" + (" " * (char-1)) + "^" + "\n" + exc[2]
+    for line in string.split(text,"\n"):
+      print "  >" + line
+
+class MyCollection(util.Collection):
+	def _NewEnum(self):
+		return util.Collection._NewEnum(self)
+
+class Test:
+  _public_methods_ = [ 'echo', 'fail' ]
+  _public_attrs_ = ['collection']
+  def __init__(self):
+    self.verbose = verbose
+    self.collection = util.wrap( MyCollection( [1,'Two',3] ))
+    self.last = ""
+    self.fail_called = 0
+#    self._connect_server_ = TestConnectServer(self)
+
+  def echo(self, *args):
+    self.last = string.join(map(str, args))
+    if self.verbose:
+      for arg in args:
+        print arg,
+      print
+
+  def fail(self, *args):
+    print "**** fail() called ***"
+    for arg in args:
+      print arg,
+    print
+    self.fail_called = 1
+#    self._connect_server_.Broadcast(last)
+
+
+#### Connections currently wont work, as there is no way for the engine to
+#### know what events we support.  We need typeinfo support.
+
+IID_ITestEvents = pythoncom.MakeIID("{8EB72F90-0D44-11d1-9C4B-00AA00125A98}")
+
+class TestConnectServer(connect.ConnectableServer):
+	_connect_interfaces_ = [IID_ITestEvents]
+	# The single public method that the client can call on us
+	# (ie, as a normal COM server, this exposes just this single method.
+	def __init__(self, object):
+		self.object = object
+		
+	def Broadcast(self,arg):
+		# Simply broadcast a notification.
+		self._BroadcastNotify(self.NotifyDoneIt, (arg,))
+
+	def NotifyDoneIt(self, interface, arg):
+		interface.Invoke(1000, 0, pythoncom.DISPATCH_METHOD, 1, arg)
+
+VBScript = """\
+prop = "Property Value"
+
+sub hello(arg1)
+   test.echo arg1
+end sub
+  
+sub testcollection
+   if test.collection.Item(0) <> 1 then
+     test.fail("Index 0 was wrong")
+   end if
+   if test.collection.Item(1) <> "Two" then
+     test.fail("Index 1 was wrong")
+   end if
+   if test.collection.Item(2) <> 3 then
+     test.fail("Index 2 was wrong")
+   end if
+   num = 0
+   for each item in test.collection
+     num = num + 1
+   next
+   if num <> 3 then
+     test.fail("Collection didn't have 3 items")
+   end if
+end sub
+"""
+PyScript = """\
+prop = "Property Value"
+def hello(arg1):
+   test.echo(arg1)
+   
+def testcollection():
+#   test.collection[1] = "New one"
+   got = []
+   for item in test.collection:
+     got.append(item)
+   if got != [1, "Two", 3]:
+     test.fail("Didn't get the collection")
+   pass
+"""
+
+ErrScript = """\
+bad code for everyone!
+"""
+
+state_map = {
+	axscript.SCRIPTSTATE_UNINITIALIZED: "SCRIPTSTATE_UNINITIALIZED",
+	axscript.SCRIPTSTATE_INITIALIZED: "SCRIPTSTATE_INITIALIZED",
+	axscript.SCRIPTSTATE_STARTED: "SCRIPTSTATE_STARTED",
+	axscript.SCRIPTSTATE_CONNECTED: "SCRIPTSTATE_CONNECTED",
+	axscript.SCRIPTSTATE_DISCONNECTED: "SCRIPTSTATE_DISCONNECTED",
+	axscript.SCRIPTSTATE_CLOSED: "SCRIPTSTATE_CLOSED",
+}
+
+def _CheckEngineState(engine, name, state):
+  got = engine.engine.eScript.GetScriptState()
+  if got != state:
+    got_name = state_map.get(got, str(got))
+    state_name = state_map.get(state, str(state))
+    raise RuntimeError, "Warning - engine %s has state %s, but expected %s" % (name, got_name, state_name)
+
+class EngineTester(win32com.test.util.TestCase):
+  def _TestEngine(self, engineName, code, bShouldWork = 1):
+    echoer = Test()
+    model = {
+      'test' : util.wrap(echoer),
+      }
+    try:
+      try:
+        site = MySite(model)
+        engine = site._AddEngine(engineName)
+        _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_INITIALIZED)
+        engine.AddCode(code)
+        engine.Start()
+        _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_STARTED)
+      finally:
+        if bShouldWork:
+          self.failUnless(not site.seen_exception, "Script site should not have seen an exception")
+        else:
+          self.failUnless(site.seen_exception, "Script site should have seen an exception")
+      self.failUnless(not echoer.fail_called, "Fail should not have been called")
+      # Now call into the scripts IDispatch
+      from win32com.client.dynamic import Dispatch
+      ob = Dispatch(engine.GetScriptDispatch())
+      ob.hello("Goober")
+      self.assertEqual(echoer.last, "Goober")
+  
+      self.assertEqual(str(ob.prop), "Property Value")
+      ob.testcollection()
+      self.failUnless(not echoer.fail_called, "Fail should not have been called")
+  
+      # Now make sure my engines can evaluate stuff.
+      result = engine.eParse.ParseScriptText("1+1", None, None, None, 0, 0, axscript.SCRIPTTEXT_ISEXPRESSION)
+      self.assertEqual(result, 2)
+      # re-initialize to make sure it transitions back to initialized again.
+      engine.SetScriptState(axscript.SCRIPTSTATE_INITIALIZED)
+      _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_INITIALIZED)
+      engine.Start()
+      _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_STARTED)
+      
+      # Transition back to initialized, then through connected too.
+      engine.SetScriptState(axscript.SCRIPTSTATE_INITIALIZED)
+      _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_INITIALIZED)
+      engine.SetScriptState(axscript.SCRIPTSTATE_CONNECTED)
+      _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_CONNECTED)
+      engine.SetScriptState(axscript.SCRIPTSTATE_INITIALIZED)
+      _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_INITIALIZED)
+    
+      engine.SetScriptState(axscript.SCRIPTSTATE_CONNECTED)
+      _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_CONNECTED)
+      engine.SetScriptState(axscript.SCRIPTSTATE_DISCONNECTED)
+      _CheckEngineState(site, engineName, axscript.SCRIPTSTATE_DISCONNECTED)
+    finally:
+      engine.Close()
+      engine = None
+      site = None
+
+  def testVB(self):
+    self._TestEngine("VBScript", VBScript)
+  def testPython(self):
+    self._TestEngine("Python", PyScript)
+  def testVBExceptions(self):
+    self.assertRaises(pythoncom.com_error,
+                      self._TestEngine, "VBScript", ErrScript, 0)
+  def testPythonExceptions(self):
+    self.assertRaises(pythoncom.com_error,
+                      self._TestEngine, "Python", ErrScript, 0)
+
+if __name__ == '__main__':
+  unittest.main()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/testHost4Dbg.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/testHost4Dbg.py
new file mode 100644
index 0000000..d920959dc
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/axscript/test/testHost4Dbg.py
@@ -0,0 +1,76 @@
+import string, os, sys, traceback
+from win32com.axscript import axscript
+from win32com.axscript.server import axsite
+from win32com.axscript.server.error import Exception
+import pythoncom
+from win32com.server import util
+import win32ui
+
+version = "0.0.1"
+
+class MySite(axsite.AXSite):
+
+  def OnScriptError(self, error):
+    print "An error occurred in the Script Code"
+    exc = error.GetExceptionInfo()
+    try:
+      text = error.GetSourceLineText()
+    except:
+      text = "<unknown>"
+    context, line, char = error.GetSourcePosition()
+    print "Exception: %s (line %d)\n%s\n%s^\n%s" % (exc[1], line, text, " " * (char-1), exc[2])
+
+class ObjectModel:
+  _public_methods_ = [ 'echo', 'msgbox' ]
+  def echo(self, *args):
+    print string.join(map(str, args))
+  def msgbox(self, *args):
+    msg = string.join(map(str, args))
+    win32ui.MessageBox(msg)
+
+def TestEngine():
+  model = {'Test' : util.wrap(ObjectModel()) }
+  scriptDir = "."
+  site = MySite(model)
+  pyEngine = site._AddEngine("Python")
+#  pyEngine2 = site._AddEngine("Python")
+  vbEngine = site._AddEngine("VBScript")
+#  forthEngine = site._AddEngine("ForthScript")
+  try:
+#    code = open(os.path.join(scriptDir, "debugTest.4ths"),"rb").read()
+#    forthEngine.AddCode(code)
+    code = open(os.path.join(scriptDir, "debugTest.pys"),"rb").read()
+    pyEngine.AddCode(code)
+    code = open(os.path.join(scriptDir, "debugTest.vbs"),"rb").read()
+    vbEngine.AddCode(code)
+#    code = open(os.path.join(scriptDir, "debugTestFail.pys"),"rb").read()
+#    pyEngine2.AddCode(code)
+
+#    from win32com.axdebug import axdebug
+#    sessionProvider=pythoncom.CoCreateInstance(axdebug.CLSID_DefaultDebugSessionProvider,None,pythoncom.CLSCTX_ALL, axdebug.IID_IDebugSessionProvider)
+#    sessionProvider.StartDebugSession(None)
+    
+    raw_input("Press enter to continue")
+ #   forthEngine.Start()
+    pyEngine.Start() # Actually run the Python code
+    vbEngine.Start() # Actually run the VB code
+  except pythoncom.com_error, details:
+    print "Script failed: %s (0x%x)" % (details[1], details[0])
+  # Now run the code expected to fail!
+#  try:
+#    pyEngine2.Start() # Actually run the Python code that fails!
+#    print "Script code worked when it should have failed."
+#  except pythoncom.com_error:
+#    pass
+
+  site._Close()
+
+if __name__ == '__main__':
+  import win32com.axdebug.util
+  try:
+    TestEngine()
+  except:
+    traceback.print_exc()
+  win32com.axdebug.util._dump_wrapped()
+  sys.exc_type = sys.exc_value = sys.exc_traceback = None
+  print pythoncom._GetInterfaceCount(),"com objects still alive"
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/directsound/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/directsound/__init__.py
new file mode 100644
index 0000000..c14b39f58
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/directsound/__init__.py
@@ -0,0 +1,3 @@
+# See if we have a special directory for the binaries (for developers)
+import win32com
+win32com.__PackageSupportBuildPath__(__path__)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/directsound/directsound.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/directsound/directsound.pyd
new file mode 100644
index 0000000..aca2ea66
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/directsound/directsound.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/__init__.py
new file mode 100644
index 0000000..cc577a2
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/__init__.py
@@ -0,0 +1 @@
+# empty file to designate as a package.
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/demo/filterDemo.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/demo/filterDemo.py
new file mode 100644
index 0000000..1d542c9d
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/demo/filterDemo.py
@@ -0,0 +1,253 @@
+import pythoncom
+import pywintypes
+
+from win32com.ifilter import ifilter
+from win32com.ifilter.ifiltercon import *
+
+from win32com import storagecon
+
+class FileParser:
+    # Property IDs for the Storage Property Set
+    PIDS_BODY                = 0x00000013
+
+    # property IDs for HTML Storage Property Set
+    PIDH_DESCRIPTION         = "DESCRIPTION"
+    PIDH_HREF                = "A.HREF"
+    PIDH_IMGSRC              = "IMG.SRC"
+    
+    # conversion map to convert ifilter properties to more user friendly names
+    propertyToName = {PSGUID_STORAGE            : {PIDS_BODY      : 'body'},
+                      
+                      PSGUID_SUMMARYINFORMATION : {PIDSI_TITLE    : 'title',
+                                                   PIDSI_SUBJECT  : 'description',
+                                                   PIDSI_AUTHOR   : 'author',
+                                                   PIDSI_KEYWORDS : 'keywords',
+                                                   PIDSI_COMMENTS : 'comments'},
+                      
+                      PSGUID_HTMLINFORMATION    : {PIDH_DESCRIPTION : 'description'},
+
+                      PSGUID_HTML2_INFORMATION  : {PIDH_HREF : 'href',
+                                                   PIDH_IMGSRC : 'img'}      
+                      }
+    
+    def __init__(self, verbose=False):
+        self.f = None
+        self.stg = None
+        self.verbose = verbose
+
+    def Close(self):
+        self.f = None
+        self.stg = None
+        
+    def Parse(self, fileName, maxErrors=10):        
+        properties = {}
+        
+        try:
+            self._bind_to_filter(fileName)
+            try:
+                flags = self.f.Init(IFILTER_INIT_APPLY_INDEX_ATTRIBUTES | IFILTER_INIT_APPLY_OTHER_ATTRIBUTES)
+                if flags == IFILTER_FLAGS_OLE_PROPERTIES and self.stg is not None:
+                    self._trace('filter requires to get properities via ole')
+                    self._get_properties(properties)
+
+                errCnt = 0
+                while True:
+                    try:
+                        # each chunk returns a tuple with the following:-
+                        # idChunk       = The chunk identifier. each chunk has a unique identifier
+                        # breakType     = The type of break that separates the previous chunk from the current chunk. Values are:-
+                        #                 CHUNK_NO_BREAK=0,CHUNK_EOW=1,CHUNK_EOS= 2,CHUNK_EOP= 3,CHUNK_EOC= 4
+                        # flags         = Flags indicate whether this chunk contains a text-type or a value-type property
+                        #                 locale = The language and sublanguage associated with a chunk of text
+                        # attr          = A tuple containing the property to be applied to the chunk. Tuple is (propertyset GUID, property ID)
+                        #                 Property ID can be a number or string
+                        # idChunkSource = The ID of the source of a chunk. The value of the idChunkSource member depends on the nature of the chunk
+                        # startSource   = The offset from which the source text for a derived chunk starts in the source chunk
+                        # lenSource     = The length in characters of the source text from which the current chunk was derived.
+                        #                 A zero value signifies character-by-character correspondence between the source text and the derived text.
+                        
+                        idChunk, breakType, flags, locale, attr, idChunkSource,	startSource,lenSource = self.f.GetChunk()
+                        self._trace("Chunk details:", idChunk, breakType, flags, locale, attr, idChunkSource, startSource,lenSource)
+
+                        # attempt to map each property to a more user friendly name. If we don't know what it is just return
+                        # the set guid and property id. (note: the id can be a number or a string.
+                        propSet = self.propertyToName.get(attr[0])
+                        if propSet:
+                            propName = propSet.get(attr[1], '%s:%s' % attr)
+                        else:
+                            propName = '%s:%s' % attr
+                            
+                    except pythoncom.com_error, e:
+                        if e[0] == FILTER_E_END_OF_CHUNKS:
+                            # we have read all the chunks
+                            break 
+                        elif e[0] in [FILTER_E_EMBEDDING_UNAVAILABLE, FILTER_E_LINK_UNAVAILABLE]:
+                            # the next chunk can't be read. Also keep track of the number of times we
+                            # fail as some filters (ie. the Msoft office ones can get stuck here)
+                            errCnt += 1
+                            if errCnt > maxErrors:
+                                raise
+                            else:
+                                continue
+                        elif e[0] == FILTER_E_ACCESS:
+                            self._trace('Access denied')
+                            raise
+                        elif e[0] == FILTER_E_PASSWORD:
+                            self._trace('Password required')
+                            raise
+                        else:
+                            # any other type of error really can't be recovered from
+                            raise
+                            
+                    # reset consecutive errors (some filters may get stuck in a lopp if embedding or link failures occurs    
+                    errCnt = 0 
+                    
+                    if flags == CHUNK_TEXT:
+                        # its a text segment - get all available text for this chunk.
+                        body_chunks = properties.setdefault(propName, [])
+                        self._get_text(body_chunks)
+                    elif flags == CHUNK_VALUE:
+                        # its a data segment - get the value 
+                        properties[propName] =  self.f.GetValue()  
+                    else:
+                        self._trace("Unknown flag returned by GetChunk:", flags)
+            finally:
+                self.Close()
+                
+        except pythoncom.com_error, e:
+            self._trace("ERROR processing file", e)
+            raise
+
+        return properties    
+
+    def _bind_to_filter(self, fileName):
+        """
+        See if the file is a structured storage file or a normal file
+        and then return an ifilter interface by calling the appropriate bind/load function
+        """
+        if pythoncom.StgIsStorageFile(fileName):
+            self.stg  = pythoncom.StgOpenStorage(fileName, None, storagecon.STGM_READ | storagecon.STGM_SHARE_DENY_WRITE)
+            try:
+                self.f = ifilter.BindIFilterFromStorage(self.stg)
+            except pythoncom.com_error, e:
+                if e[0] == -2147467262: # 0x80004002: # no interface, try the load interface (this happens for some MSoft files)
+                    self.f = ifilter.LoadIFilter(fileName)
+                else:
+                    raise
+        else:
+            self.f = ifilter.LoadIFilter(fileName)
+            self.stg = None
+
+    def _get_text(self, body_chunks):
+        """
+        Gets all the text for a particular chunk. We need to keep calling get text till all the
+        segments for this chunk are retrieved
+        """
+        while True:
+            try:
+               body_chunks.append(self.f.GetText())
+            except pythoncom.com_error, e:
+                if e[0] in [FILTER_E_NO_MORE_TEXT, FILTER_E_NO_MORE_TEXT, FILTER_E_NO_TEXT]:
+                     break
+                else:
+                    raise # not one of the values we were expecting
+                    
+
+    def _get_properties(self, properties):
+        """
+        Use OLE property sets to get base properties
+        """
+        try:
+            pss = self.stg.QueryInterface(pythoncom.IID_IPropertySetStorage )
+        except pythoncom.com_error, e:
+            self._trace('No Property information could be retrieved', e)
+            return
+
+        ps = pss.Open(PSGUID_SUMMARYINFORMATION)
+
+        props = (PIDSI_TITLE, PIDSI_SUBJECT, PIDSI_AUTHOR,  PIDSI_KEYWORDS, PIDSI_COMMENTS)
+
+        title, subject, author, keywords, comments = ps.ReadMultiple(props)
+        if title is not None:
+            properties['title'] = title
+        if subject is not None:
+            properties['description'] = subject
+        if author is not None:    
+            properties['author'] = author
+        if keywords is not None:    
+            properties['keywords'] = keywords
+        if comments is not None:
+            properties['comments'] = comments
+                
+    def _trace(self, *args):
+        
+        if self.verbose:
+            ret = ' '.join([str(arg) for arg in args])
+            try:
+                print ret
+            except IOError:
+                pass
+
+def _usage():
+    import os
+    print "Usage: %s filename [verbose [dumpbody]]" % (os.path.basename(sys.argv[0]),)
+    print
+    print "Where:-"
+    print "filename = name of the file to extract text & properties from"
+    print "verbose = 1=debug output, 0=no debug output (default=0)"
+    print "dumpbody = 1=print text content, 0=don't print content (default=1)"
+    print
+    print "e.g. to dump a word file called spam.doc go:- filterDemo.py spam.doc"
+    print
+    print "by default .htm, .txt, .doc, .dot, .xls, .xlt, .ppt are supported"
+    print "you can filter .pdf's by downloading adobes ifilter component. "
+    print "(currently found at http://download.adobe.com/pub/adobe/acrobat/win/all/ifilter50.exe)."
+    print "ifilters for other filetypes are also available."
+    print
+    print "This extension is only supported on win2000 & winXP - because thats the only"
+    print "place the ifilter stuff is supported. For more info on the API check out "
+    print "MSDN under ifilters"
+    
+        
+if __name__ == "__main__":
+    import sys
+    import operator
+    fName = ''
+    verbose = False
+    bDumpBody =True
+
+    if len(sys.argv) < 2:
+        _usage()
+        sys.exit(1)
+        
+    try:
+        fName = sys.argv[1]
+        verbose = (sys.argv[2]!="0")
+        bDumpBody = (sys.argv[3]!="0")
+    except:
+        pass
+
+    p = FileParser(verbose) 
+    propMap = p.Parse(fName)
+
+    if bDumpBody:
+        print "Body"
+        ch = ' '.join(propMap.get('body', []))
+        try:
+            print ch
+        except UnicodeError:
+            print ch.encode('iso8859-1','ignore')
+
+    print "Properties"
+    for propName, propValue in propMap.items():            
+        print propName,":",
+        if propName == 'body':
+            print "<%s length: %d>" % (propName, reduce(operator.add, [len(p) for p in propValue]),)
+        elif type(propValue) == type([]):
+            print
+            for pv in propValue:
+                print pv
+        else:
+            print propValue
+        print         
+        
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/ifilter.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/ifilter.pyd
new file mode 100644
index 0000000..4b1d76e4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/ifilter.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/ifiltercon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/ifiltercon.py
new file mode 100644
index 0000000..c71b850
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/ifilter/ifiltercon.py
@@ -0,0 +1,109 @@
+# manual stuff
+from pywintypes import IID
+PSGUID_STORAGE             = IID('{B725F130-47EF-101A-A5F1-02608C9EEBAC}')
+PSGUID_SUMMARYINFORMATION  = IID('{F29F85E0-4FF9-1068-AB91-08002B27B3D9}')
+PSGUID_HTMLINFORMATION     = IID('{D1B5D3F0-C0B3-11CF-9A92-00A0C908DBF1}')
+PSGUID_HTML2_INFORMATION   = IID('{C82BF597-B831-11D0-B733-00AA00A1EBD2}')
+
+IFILTER_INIT_CANON_PARAGRAPHS	= 1
+IFILTER_INIT_HARD_LINE_BREAKS	= 2
+IFILTER_INIT_CANON_HYPHENS	= 4
+IFILTER_INIT_CANON_SPACES	= 8
+IFILTER_INIT_APPLY_INDEX_ATTRIBUTES	= 16
+IFILTER_INIT_APPLY_CRAWL_ATTRIBUTES  = 256
+IFILTER_INIT_APPLY_OTHER_ATTRIBUTES	= 32
+IFILTER_INIT_INDEXING_ONLY	= 64
+IFILTER_INIT_SEARCH_LINKS	= 128
+IFILTER_INIT_FILTER_OWNED_VALUE_OK = 512
+
+IFILTER_FLAGS_OLE_PROPERTIES	= 1
+
+CHUNK_TEXT	= 0x1
+CHUNK_VALUE	= 0x2
+CHUNK_NO_BREAK	= 0
+CHUNK_EOW	= 1
+CHUNK_EOS	= 2
+CHUNK_EOP	= 3
+CHUNK_EOC	= 4
+
+NOT_AN_ERROR = 0x00080000L
+FILTER_E_END_OF_CHUNKS = -2147215616
+FILTER_E_NO_MORE_TEXT = -2147215615
+FILTER_E_NO_MORE_VALUES = -2147215614
+FILTER_E_ACCESS = -2147215613
+FILTER_W_MONIKER_CLIPPED = 0x00041704
+FILTER_E_NO_TEXT = -2147215611
+FILTER_E_NO_VALUES = -2147215610
+FILTER_E_EMBEDDING_UNAVAILABLE = -2147215609
+FILTER_E_LINK_UNAVAILABLE = -2147215608
+FILTER_S_LAST_TEXT = 0x00041709
+FILTER_S_LAST_VALUES = 0x0004170A
+FILTER_E_PASSWORD = -2147215605
+FILTER_E_UNKNOWNFORMAT = -2147215604
+
+# Generated by h2py from PropIdl.h
+PROPSETFLAG_DEFAULT = ( 0 )
+PROPSETFLAG_NONSIMPLE = ( 1 )
+PROPSETFLAG_ANSI = ( 2 )
+PROPSETFLAG_UNBUFFERED = ( 4 )
+PROPSETFLAG_CASE_SENSITIVE = ( 8 )
+PROPSET_BEHAVIOR_CASE_SENSITIVE = ( 1 )
+PID_DICTIONARY = ( 0 )
+PID_CODEPAGE = ( 0x1 )
+PID_FIRST_USABLE = ( 0x2 )
+PID_FIRST_NAME_DEFAULT = ( 0xfff )
+PID_LOCALE = ( (-2147483648) )
+PID_MODIFY_TIME = ( (-2147483647) )
+PID_SECURITY = ( (-2147483646) )
+PID_BEHAVIOR = ( (-2147483645) )
+PID_ILLEGAL = ( (-1) )
+PID_MIN_READONLY = ( (-2147483648) )
+PID_MAX_READONLY = ( (-1073741825) )
+PIDDI_THUMBNAIL = 0x00000002
+PIDSI_TITLE = 0x00000002
+PIDSI_SUBJECT = 0x00000003
+PIDSI_AUTHOR = 0x00000004
+PIDSI_KEYWORDS = 0x00000005
+PIDSI_COMMENTS = 0x00000006
+PIDSI_TEMPLATE = 0x00000007
+PIDSI_LASTAUTHOR = 0x00000008
+PIDSI_REVNUMBER = 0x00000009
+PIDSI_EDITTIME = 0x0000000a
+PIDSI_LASTPRINTED = 0x0000000b
+PIDSI_CREATE_DTM = 0x0000000c
+PIDSI_LASTSAVE_DTM = 0x0000000d
+PIDSI_PAGECOUNT = 0x0000000e
+PIDSI_WORDCOUNT = 0x0000000f
+PIDSI_CHARCOUNT = 0x00000010
+PIDSI_THUMBNAIL = 0x00000011
+PIDSI_APPNAME = 0x00000012
+PIDSI_DOC_SECURITY = 0x00000013
+PIDDSI_CATEGORY = 0x00000002
+PIDDSI_PRESFORMAT = 0x00000003
+PIDDSI_BYTECOUNT = 0x00000004
+PIDDSI_LINECOUNT = 0x00000005
+PIDDSI_PARCOUNT = 0x00000006
+PIDDSI_SLIDECOUNT = 0x00000007
+PIDDSI_NOTECOUNT = 0x00000008
+PIDDSI_HIDDENCOUNT = 0x00000009
+PIDDSI_MMCLIPCOUNT = 0x0000000A
+PIDDSI_SCALE = 0x0000000B
+PIDDSI_HEADINGPAIR = 0x0000000C
+PIDDSI_DOCPARTS = 0x0000000D
+PIDDSI_MANAGER = 0x0000000E
+PIDDSI_COMPANY = 0x0000000F
+PIDDSI_LINKSDIRTY = 0x00000010
+PIDMSI_EDITOR = 0x00000002
+PIDMSI_SUPPLIER = 0x00000003
+PIDMSI_SOURCE = 0x00000004
+PIDMSI_SEQUENCE_NO = 0x00000005
+PIDMSI_PROJECT = 0x00000006
+PIDMSI_STATUS = 0x00000007
+PIDMSI_OWNER = 0x00000008
+PIDMSI_RATING = 0x00000009
+PIDMSI_PRODUCTION = 0x0000000A
+PIDMSI_COPYRIGHT = 0x0000000B
+PRSPEC_INVALID = -1
+PRSPEC_LPWSTR = 0
+PRSPEC_PROPID = 1
+CCH_MAX_PROPSTG_NAME = 31
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/internet/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/internet/__init__.py
new file mode 100644
index 0000000..d882689
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/internet/__init__.py
@@ -0,0 +1,4 @@
+# See if we have a special directory for the binaries (for developers)
+import win32com
+win32com.__PackageSupportBuildPath__(__path__)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/internet/internet.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/internet/internet.pyd
new file mode 100644
index 0000000..d3162ff0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/internet/internet.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/__init__.py
new file mode 100644
index 0000000..0ffa2dc9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/__init__.py
@@ -0,0 +1,23 @@
+if type(__path__)==type(''):
+	# For freeze to work!
+	import sys
+	try:
+		import mapi
+		sys.modules['win32com.mapi.mapi'] = mapi
+	except ImportError:
+		pass
+	try:
+		import exchange
+		sys.modules['win32com.mapi.exchange'] = exchange
+	except ImportError:
+		pass
+	try:
+		import exchdapi
+		sys.modules['win32com.mapi.exchdapi'] = exchdapi
+	except ImportError:
+		pass
+else:
+	import win32com
+	# See if we have a special directory for the binaries (for developers)
+	win32com.__PackageSupportBuildPath__(__path__)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/demos/mapisend.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/demos/mapisend.py
new file mode 100644
index 0000000..7f6f3a0
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/demos/mapisend.py
@@ -0,0 +1,85 @@
+#!/usr/bin/env python
+
+"""module to send mail with Extended MAPI using the pywin32 mapi wrappers..."""
+
+# this was based on Jason Hattingh's C++ code at http://www.codeproject.com/internet/mapadmin.asp
+# written by David Fraser <davidf at sjsoft.com> and Stephen Emslie <stephene at sjsoft.com>
+# you can test this by changing the variables at the bottom and running from the command line
+
+from win32com.mapi import mapi
+from win32com.mapi import mapitags
+
+# Pre 2.2.1 compat.
+try: True, False
+except NameError: True = 1==1; False = 1==0
+
+def SendEMAPIMail(Subject="", Message="", SendTo=None, SendCC=None, SendBCC=None, MAPIProfile=None):
+    """Sends an email to the recipient using the extended MAPI interface
+    Subject and Message are strings
+    Send{To,CC,BCC} are comma-separated address lists
+    MAPIProfile is the name of the MAPI profile"""
+
+    # initialize and log on
+    mapi.MAPIInitialize(None)
+    session = mapi.MAPILogonEx(0, MAPIProfile, None, mapi.MAPI_EXTENDED | mapi.MAPI_USE_DEFAULT)
+    messagestorestable = session.GetMsgStoresTable(0)
+    messagestorestable.SetColumns((mapitags.PR_ENTRYID, mapitags.PR_DISPLAY_NAME_A, mapitags.PR_DEFAULT_STORE),0)
+
+    while True:
+        rows = messagestorestable.QueryRows(1, 0)
+        #if this is the last row then stop
+        if len(rows) != 1:
+            break
+        row = rows[0]
+        #if this is the default store then stop
+        if ((mapitags.PR_DEFAULT_STORE,True) in row):
+            break
+
+    # unpack the row and open the message store
+    (eid_tag, eid), (name_tag, name), (def_store_tag, def_store) = row
+    msgstore = session.OpenMsgStore(0,eid,None,mapi.MDB_NO_DIALOG | mapi.MAPI_BEST_ACCESS)
+
+    # get the outbox
+    hr, props = msgstore.GetProps((mapitags.PR_IPM_OUTBOX_ENTRYID), 0)
+    (tag, eid) = props[0]
+    #check for errors
+    if mapitags.PROP_TYPE(tag) == mapitags.PT_ERROR:
+        raise TypeError,'got PT_ERROR instead of PT_BINARY: %s'%eid
+    outboxfolder = msgstore.OpenEntry(eid,None,mapi.MAPI_BEST_ACCESS)
+
+    # create the message and the addrlist
+    message = outboxfolder.CreateMessage(None,0)
+    # note: you can use the resolveaddress functions for this. but you may get headaches
+    pal = []
+    def makeentry(recipient, recipienttype):
+      return ((mapitags.PR_RECIPIENT_TYPE, recipienttype),
+              (mapitags.PR_SEND_RICH_INFO, False),
+              (mapitags.PR_DISPLAY_TYPE, 0),
+              (mapitags.PR_OBJECT_TYPE, 6),
+              (mapitags.PR_EMAIL_ADDRESS_A, recipient),
+              (mapitags.PR_ADDRTYPE_A, 'SMTP'),
+              (mapitags.PR_DISPLAY_NAME_A, recipient))
+    
+    if SendTo:
+      pal.extend([makeentry(recipient, mapi.MAPI_TO) for recipient in SendTo.split(",")])
+    if SendCC:
+      pal.extend([makeentry(recipient, mapi.MAPI_CC) for recipient in SendCC.split(",")])
+    if SendBCC:
+      pal.extend([makeentry(recipient, mapi.MAPI_BCC) for recipient in SendBCC.split(",")])
+
+    # add the resolved recipients to the message
+    message.ModifyRecipients(mapi.MODRECIP_ADD,pal)
+    message.SetProps([(mapitags.PR_BODY_A,Message),
+                      (mapitags.PR_SUBJECT_A,Subject)])
+
+    # save changes and submit
+    outboxfolder.SaveChanges(0)
+    message.SubmitMessage(0)
+
+if __name__ == '__main__':
+   MAPIProfile = ""
+   # Change this to a valid email address to test
+   SendTo = "an.invalid at address"
+   SendMessage = "testing one two three"
+   SendSubject = "Testing Extended MAPI!!"
+   SendEMAPIMail(SendSubject, SendMessage, SendTo, MAPIProfile=MAPIProfile)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/emsabtags.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/emsabtags.py
new file mode 100644
index 0000000..bbbbe2dd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/emsabtags.py
@@ -0,0 +1,845 @@
+# Converted "manually" from EMSABTAG.H
+from mapitags import PT_UNSPECIFIED, PT_NULL, PT_I2, PT_LONG, PT_R4, \
+                     PT_DOUBLE, PT_CURRENCY, PT_APPTIME, PT_ERROR, \
+                     PT_BOOLEAN, PT_OBJECT, PT_I8, PT_STRING8, PT_UNICODE, \
+                     PT_SYSTIME, PT_CLSID, PT_BINARY, PT_SHORT, PT_I4, \
+                     PT_FLOAT, PT_DOUBLE, PT_LONGLONG, PT_TSTRING, \
+                     PT_MV_I2, PT_MV_LONG, PT_MV_R4, PT_MV_DOUBLE, \
+                     PT_MV_CURRENCY, PT_MV_APPTIME, PT_MV_SYSTIME, \
+                     PT_MV_STRING8, PT_MV_BINARY, PT_MV_UNICODE, \
+                     PT_MV_CLSID, PT_MV_I8, PT_MV_SHORT, PT_MV_I4, \
+                     PT_MV_FLOAT, PT_MV_R8, PT_MV_LONGLONG, PT_MV_TSTRING, \
+                     PROP_TAG
+
+
+AB_SHOW_PHANTOMS                      = 2
+AB_SHOW_OTHERS                        = 4
+
+# Flags for ulFlag on ResolveNames
+EMS_AB_ADDRESS_LOOKUP                 = 1
+
+
+# Constructed, but externally visible.
+PR_EMS_AB_SERVER                     = PROP_TAG( PT_TSTRING, 65534)
+PR_EMS_AB_SERVER_A                   = PROP_TAG( PT_STRING8, 65534)
+PR_EMS_AB_SERVER_W                   = PROP_TAG( PT_UNICODE, 65534)
+PR_EMS_AB_CONTAINERID                = PROP_TAG( PT_LONG,    65533)
+PR_EMS_AB_DOS_ENTRYID                = PR_EMS_AB_CONTAINERID
+PR_EMS_AB_PARENT_ENTRYID             = PROP_TAG( PT_BINARY,  65532)
+PR_EMS_AB_IS_MASTER                  = PROP_TAG(PT_BOOLEAN,  65531)
+PR_EMS_AB_OBJECT_OID                 = PROP_TAG(PT_BINARY,   65530)
+PR_EMS_AB_HIERARCHY_PATH             = PROP_TAG(PT_TSTRING,  65529)
+PR_EMS_AB_HIERARCHY_PATH_A           = PROP_TAG(PT_STRING8,  65529)
+PR_EMS_AB_HIERARCHY_PATH_W           = PROP_TAG(PT_UNICODE,  65529)
+PR_EMS_AB_CHILD_RDNS                  = PROP_TAG(PT_MV_STRING8,   65528)
+
+MIN_EMS_AB_CONSTRUCTED_PROP_ID        = 65528
+
+PR_EMS_AB_OTHER_RECIPS                = PROP_TAG(PT_OBJECT,       61440)
+
+# Prop tags defined in the schema.
+PR_EMS_AB_DISPLAY_NAME_PRINTABLE      = PROP_TAG(PT_TSTRING,      14847)
+PR_EMS_AB_DISPLAY_NAME_PRINTABLE_A    = PROP_TAG(PT_STRING8,      14847)
+PR_EMS_AB_DISPLAY_NAME_PRINTABLE_W    = PROP_TAG(PT_UNICODE,      14847)
+
+PR_EMS_AB_ACCESS_CATEGORY            = PROP_TAG( PT_LONG,          32836)
+PR_EMS_AB_ACTIVATION_SCHEDULE        = PROP_TAG( PT_BINARY,        32837)
+PR_EMS_AB_ACTIVATION_STYLE           = PROP_TAG( PT_LONG,          32838)
+PR_EMS_AB_ADDRESS_ENTRY_DISPLAY_TABLE = PROP_TAG( PT_BINARY,        32791)
+PR_EMS_AB_ADDRESS_ENTRY_DISPLAY_TABLE_MSDOS = PROP_TAG( PT_BINARY,        32839)
+PR_EMS_AB_ADDRESS_SYNTAX             = PROP_TAG( PT_BINARY,        32792)
+PR_EMS_AB_ADDRESS_TYPE               = PROP_TAG( PT_TSTRING,       32840)
+PR_EMS_AB_ADDRESS_TYPE_A             = PROP_TAG( PT_STRING8,       32840)
+PR_EMS_AB_ADDRESS_TYPE_W             = PROP_TAG( PT_UNICODE,       32840)
+PR_EMS_AB_ADMD                       = PROP_TAG( PT_TSTRING,       32841)
+PR_EMS_AB_ADMD_A                     = PROP_TAG( PT_STRING8,       32841)
+PR_EMS_AB_ADMD_W                     = PROP_TAG( PT_UNICODE,       32841)
+PR_EMS_AB_ADMIN_DESCRIPTION          = PROP_TAG( PT_TSTRING,       32842)
+PR_EMS_AB_ADMIN_DESCRIPTION_A        = PROP_TAG( PT_STRING8,       32842)
+PR_EMS_AB_ADMIN_DESCRIPTION_W        = PROP_TAG( PT_UNICODE,       32842)
+PR_EMS_AB_ADMIN_DISPLAY_NAME         = PROP_TAG( PT_TSTRING,       32843)
+PR_EMS_AB_ADMIN_DISPLAY_NAME_A       = PROP_TAG( PT_STRING8,       32843)
+PR_EMS_AB_ADMIN_DISPLAY_NAME_W       = PROP_TAG( PT_UNICODE,       32843)
+PR_EMS_AB_ADMIN_EXTENSION_DLL        = PROP_TAG( PT_TSTRING,       32844)
+PR_EMS_AB_ADMIN_EXTENSION_DLL_A      = PROP_TAG( PT_STRING8,       32844)
+PR_EMS_AB_ADMIN_EXTENSION_DLL_W      = PROP_TAG( PT_UNICODE,       32844)
+PR_EMS_AB_ALIASED_OBJECT_NAME        = PROP_TAG( PT_TSTRING,       32845)
+PR_EMS_AB_ALIASED_OBJECT_NAME_A      = PROP_TAG( PT_STRING8,       32845)
+PR_EMS_AB_ALIASED_OBJECT_NAME_W      = PROP_TAG( PT_UNICODE,       32845)
+PR_EMS_AB_ALIASED_OBJECT_NAME_O      = PROP_TAG( PT_OBJECT,        32845)
+PR_EMS_AB_ALIASED_OBJECT_NAME_T      = PROP_TAG( PT_TSTRING,       32845)
+PR_EMS_AB_ALT_RECIPIENT              = PROP_TAG( PT_TSTRING,       32846)
+PR_EMS_AB_ALT_RECIPIENT_A            = PROP_TAG( PT_STRING8,       32846)
+PR_EMS_AB_ALT_RECIPIENT_W            = PROP_TAG( PT_UNICODE,       32846)
+PR_EMS_AB_ALT_RECIPIENT_O            = PROP_TAG( PT_OBJECT,        32846)
+PR_EMS_AB_ALT_RECIPIENT_T            = PROP_TAG( PT_TSTRING,       32846)
+PR_EMS_AB_ALT_RECIPIENT_BL           = PROP_TAG( PT_MV_TSTRING,    32847)
+PR_EMS_AB_ALT_RECIPIENT_BL_A         = PROP_TAG( PT_MV_STRING8,    32847)
+PR_EMS_AB_ALT_RECIPIENT_BL_W         = PROP_TAG( PT_MV_UNICODE,    32847)
+PR_EMS_AB_ALT_RECIPIENT_BL_O         = PROP_TAG( PT_OBJECT,        32847)
+PR_EMS_AB_ALT_RECIPIENT_BL_T         = PROP_TAG( PT_MV_TSTRING,    32847)
+PR_EMS_AB_ANCESTOR_ID                = PROP_TAG( PT_BINARY,        32848)
+PR_EMS_AB_ASSOC_NT_ACCOUNT           = PROP_TAG( PT_BINARY,        32807)
+PR_EMS_AB_ASSOC_REMOTE_DXA           = PROP_TAG( PT_MV_TSTRING,    32849)
+PR_EMS_AB_ASSOC_REMOTE_DXA_A         = PROP_TAG( PT_MV_STRING8,    32849)
+PR_EMS_AB_ASSOC_REMOTE_DXA_W         = PROP_TAG( PT_MV_UNICODE,    32849)
+PR_EMS_AB_ASSOC_REMOTE_DXA_O         = PROP_TAG( PT_OBJECT,        32849)
+PR_EMS_AB_ASSOC_REMOTE_DXA_T         = PROP_TAG( PT_MV_TSTRING,    32849)
+PR_EMS_AB_ASSOCIATION_LIFETIME       = PROP_TAG( PT_LONG,          32850)
+PR_EMS_AB_AUTH_ORIG_BL               = PROP_TAG( PT_MV_TSTRING,    32851)
+PR_EMS_AB_AUTH_ORIG_BL_A             = PROP_TAG( PT_MV_STRING8,    32851)
+PR_EMS_AB_AUTH_ORIG_BL_W             = PROP_TAG( PT_MV_UNICODE,    32851)
+PR_EMS_AB_AUTH_ORIG_BL_O             = PROP_TAG( PT_OBJECT,        32851)
+PR_EMS_AB_AUTH_ORIG_BL_T             = PROP_TAG( PT_MV_TSTRING,    32851)
+PR_EMS_AB_AUTHORITY_REVOCATION_LIST  = PROP_TAG( PT_MV_BINARY,     32806)
+PR_EMS_AB_AUTHORIZED_DOMAIN          = PROP_TAG( PT_TSTRING,       32852)
+PR_EMS_AB_AUTHORIZED_DOMAIN_A        = PROP_TAG( PT_STRING8,       32852)
+PR_EMS_AB_AUTHORIZED_DOMAIN_W        = PROP_TAG( PT_UNICODE,       32852)
+PR_EMS_AB_AUTHORIZED_PASSWORD        = PROP_TAG( PT_BINARY,        32853)
+PR_EMS_AB_AUTHORIZED_USER            = PROP_TAG( PT_TSTRING,       32854)
+PR_EMS_AB_AUTHORIZED_USER_A          = PROP_TAG( PT_STRING8,       32854)
+PR_EMS_AB_AUTHORIZED_USER_W          = PROP_TAG( PT_UNICODE,       32854)
+PR_EMS_AB_AUTOREPLY                  = PROP_TAG( PT_BOOLEAN,       32779)
+PR_EMS_AB_AUTOREPLY_MESSAGE          = PROP_TAG( PT_TSTRING,       32778)
+PR_EMS_AB_AUTOREPLY_MESSAGE_A        = PROP_TAG( PT_STRING8,       32778)
+PR_EMS_AB_AUTOREPLY_MESSAGE_W        = PROP_TAG( PT_UNICODE,       32778)
+PR_EMS_AB_AUTOREPLY_SUBJECT          = PROP_TAG( PT_TSTRING,       32830)
+PR_EMS_AB_AUTOREPLY_SUBJECT_A        = PROP_TAG( PT_STRING8,       32830)
+PR_EMS_AB_AUTOREPLY_SUBJECT_W        = PROP_TAG( PT_UNICODE,       32830)
+PR_EMS_AB_BRIDGEHEAD_SERVERS         = PROP_TAG( PT_MV_TSTRING,    33140)
+PR_EMS_AB_BRIDGEHEAD_SERVERS_A       = PROP_TAG( PT_MV_STRING8,    33140)
+PR_EMS_AB_BRIDGEHEAD_SERVERS_W       = PROP_TAG( PT_MV_UNICODE,    33140)
+PR_EMS_AB_BRIDGEHEAD_SERVERS_O       = PROP_TAG( PT_OBJECT,        33140)
+PR_EMS_AB_BRIDGEHEAD_SERVERS_T       = PROP_TAG( PT_MV_TSTRING,    33140)
+PR_EMS_AB_BUSINESS_CATEGORY          = PROP_TAG( PT_MV_TSTRING,    32855)
+PR_EMS_AB_BUSINESS_CATEGORY_A        = PROP_TAG( PT_MV_STRING8,    32855)
+PR_EMS_AB_BUSINESS_CATEGORY_W        = PROP_TAG( PT_MV_UNICODE,    32855)
+PR_EMS_AB_BUSINESS_ROLES             = PROP_TAG( PT_BINARY,        32803)
+PR_EMS_AB_CA_CERTIFICATE             = PROP_TAG( PT_MV_BINARY,     32771)
+PR_EMS_AB_CAN_CREATE_PF              = PROP_TAG( PT_MV_TSTRING,    32856)
+PR_EMS_AB_CAN_CREATE_PF_A            = PROP_TAG( PT_MV_STRING8,    32856)
+PR_EMS_AB_CAN_CREATE_PF_W            = PROP_TAG( PT_MV_UNICODE,    32856)
+PR_EMS_AB_CAN_CREATE_PF_O            = PROP_TAG( PT_OBJECT,        32856)
+PR_EMS_AB_CAN_CREATE_PF_T            = PROP_TAG( PT_MV_TSTRING,    32856)
+PR_EMS_AB_CAN_CREATE_PF_BL           = PROP_TAG( PT_MV_TSTRING,    32857)
+PR_EMS_AB_CAN_CREATE_PF_BL_A         = PROP_TAG( PT_MV_STRING8,    32857)
+PR_EMS_AB_CAN_CREATE_PF_BL_W         = PROP_TAG( PT_MV_UNICODE,    32857)
+PR_EMS_AB_CAN_CREATE_PF_BL_O         = PROP_TAG( PT_OBJECT,        32857)
+PR_EMS_AB_CAN_CREATE_PF_BL_T         = PROP_TAG( PT_MV_TSTRING,    32857)
+PR_EMS_AB_CAN_CREATE_PF_DL           = PROP_TAG( PT_MV_TSTRING,    32858)
+PR_EMS_AB_CAN_CREATE_PF_DL_A         = PROP_TAG( PT_MV_STRING8,    32858)
+PR_EMS_AB_CAN_CREATE_PF_DL_W         = PROP_TAG( PT_MV_UNICODE,    32858)
+PR_EMS_AB_CAN_CREATE_PF_DL_O         = PROP_TAG( PT_OBJECT,        32858)
+PR_EMS_AB_CAN_CREATE_PF_DL_T         = PROP_TAG( PT_MV_TSTRING,    32858)
+PR_EMS_AB_CAN_CREATE_PF_DL_BL        = PROP_TAG( PT_MV_TSTRING,    32859)
+PR_EMS_AB_CAN_CREATE_PF_DL_BL_A      = PROP_TAG( PT_MV_STRING8,    32859)
+PR_EMS_AB_CAN_CREATE_PF_DL_BL_W      = PROP_TAG( PT_MV_UNICODE,    32859)
+PR_EMS_AB_CAN_CREATE_PF_DL_BL_O      = PROP_TAG( PT_OBJECT,        32859)
+PR_EMS_AB_CAN_CREATE_PF_DL_BL_T      = PROP_TAG( PT_MV_TSTRING,    32859)
+PR_EMS_AB_CAN_NOT_CREATE_PF          = PROP_TAG( PT_MV_TSTRING,    32860)
+PR_EMS_AB_CAN_NOT_CREATE_PF_A        = PROP_TAG( PT_MV_STRING8,    32860)
+PR_EMS_AB_CAN_NOT_CREATE_PF_W        = PROP_TAG( PT_MV_UNICODE,    32860)
+PR_EMS_AB_CAN_NOT_CREATE_PF_O        = PROP_TAG( PT_OBJECT,        32860)
+PR_EMS_AB_CAN_NOT_CREATE_PF_T        = PROP_TAG( PT_MV_TSTRING,    32860)
+PR_EMS_AB_CAN_NOT_CREATE_PF_BL       = PROP_TAG( PT_MV_TSTRING,    32861)
+PR_EMS_AB_CAN_NOT_CREATE_PF_BL_A     = PROP_TAG( PT_MV_STRING8,    32861)
+PR_EMS_AB_CAN_NOT_CREATE_PF_BL_W     = PROP_TAG( PT_MV_UNICODE,    32861)
+PR_EMS_AB_CAN_NOT_CREATE_PF_BL_O     = PROP_TAG( PT_OBJECT,        32861)
+PR_EMS_AB_CAN_NOT_CREATE_PF_BL_T     = PROP_TAG( PT_MV_TSTRING,    32861)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL       = PROP_TAG( PT_MV_TSTRING,    32862)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_A     = PROP_TAG( PT_MV_STRING8,    32862)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_W     = PROP_TAG( PT_MV_UNICODE,    32862)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_O     = PROP_TAG( PT_OBJECT,        32862)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_T     = PROP_TAG( PT_MV_TSTRING,    32862)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL    = PROP_TAG( PT_MV_TSTRING,    32863)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL_A  = PROP_TAG( PT_MV_STRING8,    32863)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL_W  = PROP_TAG( PT_MV_UNICODE,    32863)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL_O  = PROP_TAG( PT_OBJECT,        32863)
+PR_EMS_AB_CAN_NOT_CREATE_PF_DL_BL_T  = PROP_TAG( PT_MV_TSTRING,    32863)
+PR_EMS_AB_CAN_PRESERVE_DNS           = PROP_TAG( PT_BOOLEAN,       32864)
+PR_EMS_AB_CERTIFICATE_REVOCATION_LIST = PROP_TAG( PT_BINARY,        32790)
+PR_EMS_AB_CLOCK_ALERT_OFFSET         = PROP_TAG( PT_LONG,          32865)
+PR_EMS_AB_CLOCK_ALERT_REPAIR         = PROP_TAG( PT_BOOLEAN,       32866)
+PR_EMS_AB_CLOCK_WARNING_OFFSET       = PROP_TAG( PT_LONG,          32867)
+PR_EMS_AB_CLOCK_WARNING_REPAIR       = PROP_TAG( PT_BOOLEAN,       32868)
+PR_EMS_AB_COMPUTER_NAME              = PROP_TAG( PT_TSTRING,       32869)
+PR_EMS_AB_COMPUTER_NAME_A            = PROP_TAG( PT_STRING8,       32869)
+PR_EMS_AB_COMPUTER_NAME_W            = PROP_TAG( PT_UNICODE,       32869)
+PR_EMS_AB_CONNECTED_DOMAINS          = PROP_TAG( PT_MV_TSTRING,    32870)
+PR_EMS_AB_CONNECTED_DOMAINS_A        = PROP_TAG( PT_MV_STRING8,    32870)
+PR_EMS_AB_CONNECTED_DOMAINS_W        = PROP_TAG( PT_MV_UNICODE,    32870)
+PR_EMS_AB_CONTAINER_INFO             = PROP_TAG( PT_LONG,          32871)
+PR_EMS_AB_COST                       = PROP_TAG( PT_LONG,          32872)
+PR_EMS_AB_COUNTRY_NAME               = PROP_TAG( PT_TSTRING,       32873)
+PR_EMS_AB_COUNTRY_NAME_A             = PROP_TAG( PT_STRING8,       32873)
+PR_EMS_AB_COUNTRY_NAME_W             = PROP_TAG( PT_UNICODE,       32873)
+PR_EMS_AB_CROSS_CERTIFICATE_PAIR     = PROP_TAG( PT_MV_BINARY,     32805)
+PR_EMS_AB_DELIV_CONT_LENGTH          = PROP_TAG( PT_LONG,          32874)
+PR_EMS_AB_DELIV_EITS                 = PROP_TAG( PT_MV_BINARY,     32875)
+PR_EMS_AB_DELIV_EXT_CONT_TYPES       = PROP_TAG( PT_MV_BINARY,     32876)
+PR_EMS_AB_DELIVER_AND_REDIRECT       = PROP_TAG( PT_BOOLEAN,       32877)
+PR_EMS_AB_DELIVERY_MECHANISM         = PROP_TAG( PT_LONG,          32878)
+PR_EMS_AB_DESCRIPTION                = PROP_TAG( PT_MV_TSTRING,    32879)
+PR_EMS_AB_DESCRIPTION_A              = PROP_TAG( PT_MV_STRING8,    32879)
+PR_EMS_AB_DESCRIPTION_W              = PROP_TAG( PT_MV_UNICODE,    32879)
+PR_EMS_AB_DESTINATION_INDICATOR      = PROP_TAG( PT_MV_TSTRING,    32880)
+PR_EMS_AB_DESTINATION_INDICATOR_A    = PROP_TAG( PT_MV_STRING8,    32880)
+PR_EMS_AB_DESTINATION_INDICATOR_W    = PROP_TAG( PT_MV_UNICODE,    32880)
+PR_EMS_AB_DIAGNOSTIC_REG_KEY         = PROP_TAG( PT_TSTRING,       32881)
+PR_EMS_AB_DIAGNOSTIC_REG_KEY_A       = PROP_TAG( PT_STRING8,       32881)
+PR_EMS_AB_DIAGNOSTIC_REG_KEY_W       = PROP_TAG( PT_UNICODE,       32881)
+PR_EMS_AB_DISPLAY_NAME_OVERRIDE      = PROP_TAG( PT_BOOLEAN,       32769)
+PR_EMS_AB_DL_MEM_REJECT_PERMS_BL     = PROP_TAG( PT_MV_TSTRING,    32882)
+PR_EMS_AB_DL_MEM_REJECT_PERMS_BL_A   = PROP_TAG( PT_MV_STRING8,    32882)
+PR_EMS_AB_DL_MEM_REJECT_PERMS_BL_W   = PROP_TAG( PT_MV_UNICODE,    32882)
+PR_EMS_AB_DL_MEM_REJECT_PERMS_BL_O   = PROP_TAG( PT_OBJECT,        32882)
+PR_EMS_AB_DL_MEM_REJECT_PERMS_BL_T   = PROP_TAG( PT_MV_TSTRING,    32882)
+PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL     = PROP_TAG( PT_MV_TSTRING,    32883)
+PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL_A   = PROP_TAG( PT_MV_STRING8,    32883)
+PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL_W   = PROP_TAG( PT_MV_UNICODE,    32883)
+PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL_O   = PROP_TAG( PT_OBJECT,        32883)
+PR_EMS_AB_DL_MEM_SUBMIT_PERMS_BL_T   = PROP_TAG( PT_MV_TSTRING,    32883)
+PR_EMS_AB_DL_MEMBER_RULE             = PROP_TAG( PT_MV_BINARY,     32884)
+PR_EMS_AB_DOMAIN_DEF_ALT_RECIP       = PROP_TAG( PT_TSTRING,       32885)
+PR_EMS_AB_DOMAIN_DEF_ALT_RECIP_A     = PROP_TAG( PT_STRING8,       32885)
+PR_EMS_AB_DOMAIN_DEF_ALT_RECIP_W     = PROP_TAG( PT_UNICODE,       32885)
+PR_EMS_AB_DOMAIN_DEF_ALT_RECIP_O     = PROP_TAG( PT_OBJECT,        32885)
+PR_EMS_AB_DOMAIN_DEF_ALT_RECIP_T     = PROP_TAG( PT_TSTRING,       32885)
+PR_EMS_AB_DOMAIN_NAME                = PROP_TAG( PT_TSTRING,       32886)
+PR_EMS_AB_DOMAIN_NAME_A              = PROP_TAG( PT_STRING8,       32886)
+PR_EMS_AB_DOMAIN_NAME_W              = PROP_TAG( PT_UNICODE,       32886)
+PR_EMS_AB_DSA_SIGNATURE              = PROP_TAG( PT_BINARY,        32887)
+PR_EMS_AB_DXA_ADMIN_COPY             = PROP_TAG( PT_BOOLEAN,       32888)
+PR_EMS_AB_DXA_ADMIN_FORWARD          = PROP_TAG( PT_BOOLEAN,       32889)
+PR_EMS_AB_DXA_ADMIN_UPDATE           = PROP_TAG( PT_LONG,          32890)
+PR_EMS_AB_DXA_APPEND_REQCN           = PROP_TAG( PT_BOOLEAN,       32891)
+PR_EMS_AB_DXA_CONF_CONTAINER_LIST    = PROP_TAG( PT_MV_TSTRING,    32892)
+PR_EMS_AB_DXA_CONF_CONTAINER_LIST_A  = PROP_TAG( PT_MV_STRING8,    32892)
+PR_EMS_AB_DXA_CONF_CONTAINER_LIST_W  = PROP_TAG( PT_MV_UNICODE,    32892)
+PR_EMS_AB_DXA_CONF_CONTAINER_LIST_O  = PROP_TAG( PT_OBJECT,        32892)
+PR_EMS_AB_DXA_CONF_CONTAINER_LIST_T  = PROP_TAG( PT_MV_TSTRING,    32892)
+PR_EMS_AB_DXA_CONF_REQ_TIME          = PROP_TAG( PT_SYSTIME,       32893)
+PR_EMS_AB_DXA_CONF_SEQ               = PROP_TAG( PT_TSTRING,       32894)
+PR_EMS_AB_DXA_CONF_SEQ_A             = PROP_TAG( PT_STRING8,       32894)
+PR_EMS_AB_DXA_CONF_SEQ_W             = PROP_TAG( PT_UNICODE,       32894)
+PR_EMS_AB_DXA_CONF_SEQ_USN           = PROP_TAG( PT_LONG,          32895)
+PR_EMS_AB_DXA_EXCHANGE_OPTIONS       = PROP_TAG( PT_LONG,          32896)
+PR_EMS_AB_DXA_EXPORT_NOW             = PROP_TAG( PT_BOOLEAN,       32897)
+PR_EMS_AB_DXA_FLAGS                  = PROP_TAG( PT_LONG,          32898)
+PR_EMS_AB_DXA_IMP_SEQ                = PROP_TAG( PT_TSTRING,       32899)
+PR_EMS_AB_DXA_IMP_SEQ_A              = PROP_TAG( PT_STRING8,       32899)
+PR_EMS_AB_DXA_IMP_SEQ_W              = PROP_TAG( PT_UNICODE,       32899)
+PR_EMS_AB_DXA_IMP_SEQ_TIME           = PROP_TAG( PT_SYSTIME,       32900)
+PR_EMS_AB_DXA_IMP_SEQ_USN            = PROP_TAG( PT_LONG,          32901)
+PR_EMS_AB_DXA_IMPORT_NOW             = PROP_TAG( PT_BOOLEAN,       32902)
+PR_EMS_AB_DXA_IN_TEMPLATE_MAP        = PROP_TAG( PT_MV_TSTRING,    32903)
+PR_EMS_AB_DXA_IN_TEMPLATE_MAP_A      = PROP_TAG( PT_MV_STRING8,    32903)
+PR_EMS_AB_DXA_IN_TEMPLATE_MAP_W      = PROP_TAG( PT_MV_UNICODE,    32903)
+PR_EMS_AB_DXA_LOCAL_ADMIN            = PROP_TAG( PT_TSTRING,       32904)
+PR_EMS_AB_DXA_LOCAL_ADMIN_A          = PROP_TAG( PT_STRING8,       32904)
+PR_EMS_AB_DXA_LOCAL_ADMIN_W          = PROP_TAG( PT_UNICODE,       32904)
+PR_EMS_AB_DXA_LOCAL_ADMIN_O          = PROP_TAG( PT_OBJECT,        32904)
+PR_EMS_AB_DXA_LOCAL_ADMIN_T          = PROP_TAG( PT_TSTRING,       32904)
+PR_EMS_AB_DXA_LOGGING_LEVEL          = PROP_TAG( PT_LONG,          32905)
+PR_EMS_AB_DXA_NATIVE_ADDRESS_TYPE    = PROP_TAG( PT_TSTRING,       32906)
+PR_EMS_AB_DXA_NATIVE_ADDRESS_TYPE_A  = PROP_TAG( PT_STRING8,       32906)
+PR_EMS_AB_DXA_NATIVE_ADDRESS_TYPE_W  = PROP_TAG( PT_UNICODE,       32906)
+PR_EMS_AB_DXA_OUT_TEMPLATE_MAP       = PROP_TAG( PT_MV_TSTRING,    32907)
+PR_EMS_AB_DXA_OUT_TEMPLATE_MAP_A     = PROP_TAG( PT_MV_STRING8,    32907)
+PR_EMS_AB_DXA_OUT_TEMPLATE_MAP_W     = PROP_TAG( PT_MV_UNICODE,    32907)
+PR_EMS_AB_DXA_PASSWORD               = PROP_TAG( PT_TSTRING,       32908)
+PR_EMS_AB_DXA_PASSWORD_A             = PROP_TAG( PT_STRING8,       32908)
+PR_EMS_AB_DXA_PASSWORD_W             = PROP_TAG( PT_UNICODE,       32908)
+PR_EMS_AB_DXA_PREV_EXCHANGE_OPTIONS  = PROP_TAG( PT_LONG,          32909)
+PR_EMS_AB_DXA_PREV_EXPORT_NATIVE_ONLY = PROP_TAG( PT_BOOLEAN,       32910)
+PR_EMS_AB_DXA_PREV_IN_EXCHANGE_SENSITIVITY = PROP_TAG( PT_LONG,          32911)
+PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES    = PROP_TAG( PT_TSTRING,       32912)
+PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES_A  = PROP_TAG( PT_STRING8,       32912)
+PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES_W  = PROP_TAG( PT_UNICODE,       32912)
+PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES_O  = PROP_TAG( PT_OBJECT,        32912)
+PR_EMS_AB_DXA_PREV_REMOTE_ENTRIES_T  = PROP_TAG( PT_TSTRING,       32912)
+PR_EMS_AB_DXA_PREV_REPLICATION_SENSITIVITY = PROP_TAG( PT_LONG,          32913)
+PR_EMS_AB_DXA_PREV_TEMPLATE_OPTIONS  = PROP_TAG( PT_LONG,          32914)
+PR_EMS_AB_DXA_PREV_TYPES             = PROP_TAG( PT_LONG,          32915)
+PR_EMS_AB_DXA_RECIPIENT_CP           = PROP_TAG( PT_TSTRING,       32916)
+PR_EMS_AB_DXA_RECIPIENT_CP_A         = PROP_TAG( PT_STRING8,       32916)
+PR_EMS_AB_DXA_RECIPIENT_CP_W         = PROP_TAG( PT_UNICODE,       32916)
+PR_EMS_AB_DXA_REMOTE_CLIENT          = PROP_TAG( PT_TSTRING,       32917)
+PR_EMS_AB_DXA_REMOTE_CLIENT_A        = PROP_TAG( PT_STRING8,       32917)
+PR_EMS_AB_DXA_REMOTE_CLIENT_W        = PROP_TAG( PT_UNICODE,       32917)
+PR_EMS_AB_DXA_REMOTE_CLIENT_O        = PROP_TAG( PT_OBJECT,        32917)
+PR_EMS_AB_DXA_REMOTE_CLIENT_T        = PROP_TAG( PT_TSTRING,       32917)
+PR_EMS_AB_DXA_REQ_SEQ                = PROP_TAG( PT_TSTRING,       32918)
+PR_EMS_AB_DXA_REQ_SEQ_A              = PROP_TAG( PT_STRING8,       32918)
+PR_EMS_AB_DXA_REQ_SEQ_W              = PROP_TAG( PT_UNICODE,       32918)
+PR_EMS_AB_DXA_REQ_SEQ_TIME           = PROP_TAG( PT_SYSTIME,       32919)
+PR_EMS_AB_DXA_REQ_SEQ_USN            = PROP_TAG( PT_LONG,          32920)
+PR_EMS_AB_DXA_REQNAME                = PROP_TAG( PT_TSTRING,       32921)
+PR_EMS_AB_DXA_REQNAME_A              = PROP_TAG( PT_STRING8,       32921)
+PR_EMS_AB_DXA_REQNAME_W              = PROP_TAG( PT_UNICODE,       32921)
+PR_EMS_AB_DXA_SVR_SEQ                = PROP_TAG( PT_TSTRING,       32922)
+PR_EMS_AB_DXA_SVR_SEQ_A              = PROP_TAG( PT_STRING8,       32922)
+PR_EMS_AB_DXA_SVR_SEQ_W              = PROP_TAG( PT_UNICODE,       32922)
+PR_EMS_AB_DXA_SVR_SEQ_TIME           = PROP_TAG( PT_SYSTIME,       32923)
+PR_EMS_AB_DXA_SVR_SEQ_USN            = PROP_TAG( PT_LONG,          32924)
+PR_EMS_AB_DXA_TASK                   = PROP_TAG( PT_LONG,          32925)
+PR_EMS_AB_DXA_TEMPLATE_OPTIONS       = PROP_TAG( PT_LONG,          32926)
+PR_EMS_AB_DXA_TEMPLATE_TIMESTAMP     = PROP_TAG( PT_SYSTIME,       32927)
+PR_EMS_AB_DXA_TYPES                  = PROP_TAG( PT_LONG,          32928)
+PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST  = PROP_TAG( PT_MV_TSTRING,    32929)
+PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST_A = PROP_TAG( PT_MV_STRING8,    32929)
+PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST_W = PROP_TAG( PT_MV_UNICODE,    32929)
+PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST_O = PROP_TAG( PT_OBJECT,        32929)
+PR_EMS_AB_DXA_UNCONF_CONTAINER_LIST_T = PROP_TAG( PT_MV_TSTRING,    32929)
+PR_EMS_AB_ENABLED_PROTOCOLS          = PROP_TAG( PT_LONG,          33151)
+PR_EMS_AB_ENCAPSULATION_METHOD       = PROP_TAG( PT_LONG,          32930)
+PR_EMS_AB_ENCRYPT                    = PROP_TAG( PT_BOOLEAN,       32931)
+PR_EMS_AB_ENCRYPT_ALG_LIST_NA        = PROP_TAG( PT_MV_TSTRING,    32832)
+PR_EMS_AB_ENCRYPT_ALG_LIST_NA_A      = PROP_TAG( PT_MV_STRING8,    32832)
+PR_EMS_AB_ENCRYPT_ALG_LIST_NA_W      = PROP_TAG( PT_MV_UNICODE,    32832)
+PR_EMS_AB_ENCRYPT_ALG_LIST_OTHER     = PROP_TAG( PT_MV_TSTRING,    32833)
+PR_EMS_AB_ENCRYPT_ALG_LIST_OTHER_A   = PROP_TAG( PT_MV_STRING8,    32833)
+PR_EMS_AB_ENCRYPT_ALG_LIST_OTHER_W   = PROP_TAG( PT_MV_UNICODE,    32833)
+PR_EMS_AB_ENCRYPT_ALG_SELECTED_NA    = PROP_TAG( PT_TSTRING,       32835)
+PR_EMS_AB_ENCRYPT_ALG_SELECTED_NA_A  = PROP_TAG( PT_STRING8,       32835)
+PR_EMS_AB_ENCRYPT_ALG_SELECTED_NA_W  = PROP_TAG( PT_UNICODE,       32835)
+PR_EMS_AB_ENCRYPT_ALG_SELECTED_OTHER = PROP_TAG( PT_TSTRING,       32829)
+PR_EMS_AB_ENCRYPT_ALG_SELECTED_OTHER_A = PROP_TAG( PT_STRING8,       32829)
+PR_EMS_AB_ENCRYPT_ALG_SELECTED_OTHER_W = PROP_TAG( PT_UNICODE,       32829)
+PR_EMS_AB_EXPAND_DLS_LOCALLY         = PROP_TAG( PT_BOOLEAN,       32932)
+PR_EMS_AB_EXPIRATION_TIME            = PROP_TAG( PT_SYSTIME,       32808)
+PR_EMS_AB_EXPORT_CONTAINERS          = PROP_TAG( PT_MV_TSTRING,    32933)
+PR_EMS_AB_EXPORT_CONTAINERS_A        = PROP_TAG( PT_MV_STRING8,    32933)
+PR_EMS_AB_EXPORT_CONTAINERS_W        = PROP_TAG( PT_MV_UNICODE,    32933)
+PR_EMS_AB_EXPORT_CONTAINERS_O        = PROP_TAG( PT_OBJECT,        32933)
+PR_EMS_AB_EXPORT_CONTAINERS_T        = PROP_TAG( PT_MV_TSTRING,    32933)
+PR_EMS_AB_EXPORT_CUSTOM_RECIPIENTS   = PROP_TAG( PT_BOOLEAN,       32934)
+PR_EMS_AB_EXTENDED_CHARS_ALLOWED     = PROP_TAG( PT_BOOLEAN,       32935)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_1      = PROP_TAG( PT_TSTRING,       32813)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_1_A    = PROP_TAG( PT_STRING8,       32813)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_1_W    = PROP_TAG( PT_UNICODE,       32813)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_10     = PROP_TAG( PT_TSTRING,       32822)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_10_A   = PROP_TAG( PT_STRING8,       32822)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_10_W   = PROP_TAG( PT_UNICODE,       32822)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_2      = PROP_TAG( PT_TSTRING,       32814)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_2_A    = PROP_TAG( PT_STRING8,       32814)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_2_W    = PROP_TAG( PT_UNICODE,       32814)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_3      = PROP_TAG( PT_TSTRING,       32815)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_3_A    = PROP_TAG( PT_STRING8,       32815)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_3_W    = PROP_TAG( PT_UNICODE,       32815)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_4      = PROP_TAG( PT_TSTRING,       32816)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_4_A    = PROP_TAG( PT_STRING8,       32816)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_4_W    = PROP_TAG( PT_UNICODE,       32816)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_5      = PROP_TAG( PT_TSTRING,       32817)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_5_A    = PROP_TAG( PT_STRING8,       32817)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_5_W    = PROP_TAG( PT_UNICODE,       32817)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_6      = PROP_TAG( PT_TSTRING,       32818)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_6_A    = PROP_TAG( PT_STRING8,       32818)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_6_W    = PROP_TAG( PT_UNICODE,       32818)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_7      = PROP_TAG( PT_TSTRING,       32819)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_7_A    = PROP_TAG( PT_STRING8,       32819)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_7_W    = PROP_TAG( PT_UNICODE,       32819)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_8      = PROP_TAG( PT_TSTRING,       32820)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_8_A    = PROP_TAG( PT_STRING8,       32820)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_8_W    = PROP_TAG( PT_UNICODE,       32820)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_9      = PROP_TAG( PT_TSTRING,       32821)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_9_A    = PROP_TAG( PT_STRING8,       32821)
+PR_EMS_AB_EXTENSION_ATTRIBUTE_9_W    = PROP_TAG( PT_UNICODE,       32821)
+PR_EMS_AB_EXTENSION_DATA             = PROP_TAG( PT_MV_BINARY,     32936)
+PR_EMS_AB_EXTENSION_NAME             = PROP_TAG( PT_MV_TSTRING,    32937)
+PR_EMS_AB_EXTENSION_NAME_A           = PROP_TAG( PT_MV_STRING8,    32937)
+PR_EMS_AB_EXTENSION_NAME_W           = PROP_TAG( PT_MV_UNICODE,    32937)
+PR_EMS_AB_EXTENSION_NAME_INHERITED   = PROP_TAG( PT_MV_TSTRING,    32938)
+PR_EMS_AB_EXTENSION_NAME_INHERITED_A = PROP_TAG( PT_MV_STRING8,    32938)
+PR_EMS_AB_EXTENSION_NAME_INHERITED_W = PROP_TAG( PT_MV_UNICODE,    32938)
+PR_EMS_AB_FACSIMILE_TELEPHONE_NUMBER = PROP_TAG( PT_MV_BINARY,     32939)
+PR_EMS_AB_FILE_VERSION               = PROP_TAG( PT_BINARY,        32940)
+PR_EMS_AB_FILTER_LOCAL_ADDRESSES     = PROP_TAG( PT_BOOLEAN,       32941)
+PR_EMS_AB_FOLDER_PATHNAME            = PROP_TAG( PT_TSTRING,       32772)
+PR_EMS_AB_FOLDER_PATHNAME_A          = PROP_TAG( PT_STRING8,       32772)
+PR_EMS_AB_FOLDER_PATHNAME_W          = PROP_TAG( PT_UNICODE,       32772)
+PR_EMS_AB_FOLDERS_CONTAINER          = PROP_TAG( PT_TSTRING,       32942)
+PR_EMS_AB_FOLDERS_CONTAINER_A        = PROP_TAG( PT_STRING8,       32942)
+PR_EMS_AB_FOLDERS_CONTAINER_W        = PROP_TAG( PT_UNICODE,       32942)
+PR_EMS_AB_FOLDERS_CONTAINER_O        = PROP_TAG( PT_OBJECT,        32942)
+PR_EMS_AB_FOLDERS_CONTAINER_T        = PROP_TAG( PT_TSTRING,       32942)
+PR_EMS_AB_GARBAGE_COLL_PERIOD        = PROP_TAG( PT_LONG,          32943)
+PR_EMS_AB_GATEWAY_LOCAL_CRED         = PROP_TAG( PT_TSTRING,       32944)
+PR_EMS_AB_GATEWAY_LOCAL_CRED_A       = PROP_TAG( PT_STRING8,       32944)
+PR_EMS_AB_GATEWAY_LOCAL_CRED_W       = PROP_TAG( PT_UNICODE,       32944)
+PR_EMS_AB_GATEWAY_LOCAL_DESIG        = PROP_TAG( PT_TSTRING,       32945)
+PR_EMS_AB_GATEWAY_LOCAL_DESIG_A      = PROP_TAG( PT_STRING8,       32945)
+PR_EMS_AB_GATEWAY_LOCAL_DESIG_W      = PROP_TAG( PT_UNICODE,       32945)
+PR_EMS_AB_GATEWAY_PROXY              = PROP_TAG( PT_MV_TSTRING,    32946)
+PR_EMS_AB_GATEWAY_PROXY_A            = PROP_TAG( PT_MV_STRING8,    32946)
+PR_EMS_AB_GATEWAY_PROXY_W            = PROP_TAG( PT_MV_UNICODE,    32946)
+PR_EMS_AB_GATEWAY_ROUTING_TREE       = PROP_TAG( PT_BINARY,        32947)
+PR_EMS_AB_GWART_LAST_MODIFIED        = PROP_TAG( PT_SYSTIME,       32948)
+PR_EMS_AB_HAS_FULL_REPLICA_NCS       = PROP_TAG( PT_MV_TSTRING,    32949)
+PR_EMS_AB_HAS_FULL_REPLICA_NCS_A     = PROP_TAG( PT_MV_STRING8,    32949)
+PR_EMS_AB_HAS_FULL_REPLICA_NCS_W     = PROP_TAG( PT_MV_UNICODE,    32949)
+PR_EMS_AB_HAS_FULL_REPLICA_NCS_O     = PROP_TAG( PT_OBJECT,        32949)
+PR_EMS_AB_HAS_FULL_REPLICA_NCS_T     = PROP_TAG( PT_MV_TSTRING,    32949)
+PR_EMS_AB_HAS_MASTER_NCS             = PROP_TAG( PT_MV_TSTRING,    32950)
+PR_EMS_AB_HAS_MASTER_NCS_A           = PROP_TAG( PT_MV_STRING8,    32950)
+PR_EMS_AB_HAS_MASTER_NCS_W           = PROP_TAG( PT_MV_UNICODE,    32950)
+PR_EMS_AB_HAS_MASTER_NCS_O           = PROP_TAG( PT_OBJECT,        32950)
+PR_EMS_AB_HAS_MASTER_NCS_T           = PROP_TAG( PT_MV_TSTRING,    32950)
+PR_EMS_AB_HELP_DATA16                = PROP_TAG( PT_BINARY,        32826)
+PR_EMS_AB_HELP_DATA32                = PROP_TAG( PT_BINARY,        32784)
+PR_EMS_AB_HELP_FILE_NAME             = PROP_TAG( PT_TSTRING,       32827)
+PR_EMS_AB_HELP_FILE_NAME_A           = PROP_TAG( PT_STRING8,       32827)
+PR_EMS_AB_HELP_FILE_NAME_W           = PROP_TAG( PT_UNICODE,       32827)
+PR_EMS_AB_HEURISTICS                 = PROP_TAG( PT_LONG,          32951)
+PR_EMS_AB_HIDE_DL_MEMBERSHIP         = PROP_TAG( PT_BOOLEAN,       32952)
+PR_EMS_AB_HIDE_FROM_ADDRESS_BOOK     = PROP_TAG( PT_BOOLEAN,       32953)
+PR_EMS_AB_HOME_MDB                   = PROP_TAG( PT_TSTRING,       32774)
+PR_EMS_AB_HOME_MDB_A                 = PROP_TAG( PT_STRING8,       32774)
+PR_EMS_AB_HOME_MDB_W                 = PROP_TAG( PT_UNICODE,       32774)
+PR_EMS_AB_HOME_MDB_O                 = PROP_TAG( PT_OBJECT,        32774)
+PR_EMS_AB_HOME_MDB_T                 = PROP_TAG( PT_TSTRING,       32774)
+PR_EMS_AB_HOME_MDB_BL                = PROP_TAG( PT_MV_TSTRING,    32788)
+PR_EMS_AB_HOME_MDB_BL_A              = PROP_TAG( PT_MV_STRING8,    32788)
+PR_EMS_AB_HOME_MDB_BL_W              = PROP_TAG( PT_MV_UNICODE,    32788)
+PR_EMS_AB_HOME_MDB_BL_O              = PROP_TAG( PT_OBJECT,        32788)
+PR_EMS_AB_HOME_MDB_BL_T              = PROP_TAG( PT_MV_TSTRING,    32788)
+PR_EMS_AB_HOME_MTA                   = PROP_TAG( PT_TSTRING,       32775)
+PR_EMS_AB_HOME_MTA_A                 = PROP_TAG( PT_STRING8,       32775)
+PR_EMS_AB_HOME_MTA_W                 = PROP_TAG( PT_UNICODE,       32775)
+PR_EMS_AB_HOME_MTA_O                 = PROP_TAG( PT_OBJECT,        32775)
+PR_EMS_AB_HOME_MTA_T                 = PROP_TAG( PT_TSTRING,       32775)
+PR_EMS_AB_HOME_PUBLIC_SERVER         = PROP_TAG( PT_TSTRING,       32831)
+PR_EMS_AB_HOME_PUBLIC_SERVER_A       = PROP_TAG( PT_STRING8,       32831)
+PR_EMS_AB_HOME_PUBLIC_SERVER_W       = PROP_TAG( PT_UNICODE,       32831)
+PR_EMS_AB_HOME_PUBLIC_SERVER_O       = PROP_TAG( PT_OBJECT,        32831)
+PR_EMS_AB_HOME_PUBLIC_SERVER_T       = PROP_TAG( PT_TSTRING,       32831)
+PR_EMS_AB_IMPORT_CONTAINER           = PROP_TAG( PT_TSTRING,       32954)
+PR_EMS_AB_IMPORT_CONTAINER_A         = PROP_TAG( PT_STRING8,       32954)
+PR_EMS_AB_IMPORT_CONTAINER_W         = PROP_TAG( PT_UNICODE,       32954)
+PR_EMS_AB_IMPORT_CONTAINER_O         = PROP_TAG( PT_OBJECT,        32954)
+PR_EMS_AB_IMPORT_CONTAINER_T         = PROP_TAG( PT_TSTRING,       32954)
+PR_EMS_AB_IMPORT_SENSITIVITY         = PROP_TAG( PT_LONG,          32955)
+PR_EMS_AB_IMPORTED_FROM              = PROP_TAG( PT_TSTRING,       32834)
+PR_EMS_AB_IMPORTED_FROM_A            = PROP_TAG( PT_STRING8,       32834)
+PR_EMS_AB_IMPORTED_FROM_W            = PROP_TAG( PT_UNICODE,       32834)
+PR_EMS_AB_INBOUND_SITES              = PROP_TAG( PT_MV_TSTRING,    32956)
+PR_EMS_AB_INBOUND_SITES_A            = PROP_TAG( PT_MV_STRING8,    32956)
+PR_EMS_AB_INBOUND_SITES_W            = PROP_TAG( PT_MV_UNICODE,    32956)
+PR_EMS_AB_INBOUND_SITES_O            = PROP_TAG( PT_OBJECT,        32956)
+PR_EMS_AB_INBOUND_SITES_T            = PROP_TAG( PT_MV_TSTRING,    32956)
+PR_EMS_AB_INSTANCE_TYPE              = PROP_TAG( PT_LONG,          32957)
+PR_EMS_AB_INTERNATIONAL_ISDN_NUMBER  = PROP_TAG( PT_MV_TSTRING,    32958)
+PR_EMS_AB_INTERNATIONAL_ISDN_NUMBER_A = PROP_TAG( PT_MV_STRING8,    32958)
+PR_EMS_AB_INTERNATIONAL_ISDN_NUMBER_W = PROP_TAG( PT_MV_UNICODE,    32958)
+PR_EMS_AB_INVOCATION_ID              = PROP_TAG( PT_BINARY,        32959)
+PR_EMS_AB_IS_DELETED                 = PROP_TAG( PT_BOOLEAN,       32960)
+PR_EMS_AB_IS_MEMBER_OF_DL            = PROP_TAG( PT_OBJECT,	 32776)
+PR_EMS_AB_IS_MEMBER_OF_DL_A          = PROP_TAG( PT_MV_STRING8,    32776)
+PR_EMS_AB_IS_MEMBER_OF_DL_W          = PROP_TAG( PT_MV_UNICODE,    32776)
+PR_EMS_AB_IS_MEMBER_OF_DL_O          = PROP_TAG( PT_OBJECT,        32776)
+PR_EMS_AB_IS_MEMBER_OF_DL_T          = PROP_TAG( PT_MV_TSTRING,    32776)
+PR_EMS_AB_IS_SINGLE_VALUED           = PROP_TAG( PT_BOOLEAN,       32961)
+PR_EMS_AB_KCC_STATUS                 = PROP_TAG( PT_MV_BINARY,     32962)
+PR_EMS_AB_KM_SERVER                  = PROP_TAG( PT_TSTRING,       32781)
+PR_EMS_AB_KM_SERVER_A                = PROP_TAG( PT_STRING8,       32781)
+PR_EMS_AB_KM_SERVER_W                = PROP_TAG( PT_UNICODE,       32781)
+PR_EMS_AB_KM_SERVER_O                = PROP_TAG( PT_OBJECT,        32781)
+PR_EMS_AB_KM_SERVER_T                = PROP_TAG( PT_TSTRING,       32781)
+PR_EMS_AB_KNOWLEDGE_INFORMATION      = PROP_TAG( PT_MV_TSTRING,    32963)
+PR_EMS_AB_KNOWLEDGE_INFORMATION_A    = PROP_TAG( PT_MV_STRING8,    32963)
+PR_EMS_AB_KNOWLEDGE_INFORMATION_W    = PROP_TAG( PT_MV_UNICODE,    32963)
+PR_EMS_AB_LANGUAGE                   = PROP_TAG( PT_LONG,          33144)
+PR_EMS_AB_LDAP_DISPLAY_NAME          = PROP_TAG( PT_MV_TSTRING,    33137)
+PR_EMS_AB_LDAP_DISPLAY_NAME_A        = PROP_TAG( PT_MV_STRING8,    33137)
+PR_EMS_AB_LDAP_DISPLAY_NAME_W        = PROP_TAG( PT_MV_UNICODE,    33137)
+PR_EMS_AB_LINE_WRAP                  = PROP_TAG( PT_LONG,          32964)
+PR_EMS_AB_LINK_ID                    = PROP_TAG( PT_LONG,          32965)
+PR_EMS_AB_LOCAL_BRIDGE_HEAD          = PROP_TAG( PT_TSTRING,       32966)
+PR_EMS_AB_LOCAL_BRIDGE_HEAD_A        = PROP_TAG( PT_STRING8,       32966)
+PR_EMS_AB_LOCAL_BRIDGE_HEAD_W        = PROP_TAG( PT_UNICODE,       32966)
+PR_EMS_AB_LOCAL_BRIDGE_HEAD_ADDRESS  = PROP_TAG( PT_TSTRING,       32967)
+PR_EMS_AB_LOCAL_BRIDGE_HEAD_ADDRESS_A = PROP_TAG( PT_STRING8,       32967)
+PR_EMS_AB_LOCAL_BRIDGE_HEAD_ADDRESS_W = PROP_TAG( PT_UNICODE,       32967)
+PR_EMS_AB_LOCAL_INITIAL_TURN         = PROP_TAG( PT_BOOLEAN,       32968)
+PR_EMS_AB_LOCAL_SCOPE                = PROP_TAG( PT_MV_TSTRING,    32969)
+PR_EMS_AB_LOCAL_SCOPE_A              = PROP_TAG( PT_MV_STRING8,    32969)
+PR_EMS_AB_LOCAL_SCOPE_W              = PROP_TAG( PT_MV_UNICODE,    32969)
+PR_EMS_AB_LOCAL_SCOPE_O              = PROP_TAG( PT_OBJECT,        32969)
+PR_EMS_AB_LOCAL_SCOPE_T              = PROP_TAG( PT_MV_TSTRING,    32969)
+PR_EMS_AB_LOG_FILENAME               = PROP_TAG( PT_TSTRING,       32970)
+PR_EMS_AB_LOG_FILENAME_A             = PROP_TAG( PT_STRING8,       32970)
+PR_EMS_AB_LOG_FILENAME_W             = PROP_TAG( PT_UNICODE,       32970)
+PR_EMS_AB_LOG_ROLLOVER_INTERVAL      = PROP_TAG( PT_LONG,          32971)
+PR_EMS_AB_MAINTAIN_AUTOREPLY_HISTORY = PROP_TAG( PT_BOOLEAN,       32972)
+PR_EMS_AB_MANAGER                    = PROP_TAG( PT_OBJECT,        32773)
+PR_EMS_AB_MANAGER_A                  = PROP_TAG( PT_STRING8,       32773)
+PR_EMS_AB_MANAGER_W                  = PROP_TAG( PT_UNICODE,       32773)
+PR_EMS_AB_MANAGER_O                  = PROP_TAG( PT_OBJECT,        32773)
+PR_EMS_AB_MANAGER_T                  = PROP_TAG( PT_TSTRING,       32773)
+PR_EMS_AB_MAPI_DISPLAY_TYPE          = PROP_TAG( PT_LONG,          32973)
+PR_EMS_AB_MAPI_ID                    = PROP_TAG( PT_LONG,          32974)
+PR_EMS_AB_MAXIMUM_OBJECT_ID          = PROP_TAG( PT_BINARY,        33129)
+PR_EMS_AB_MDB_BACKOFF_INTERVAL       = PROP_TAG( PT_LONG,          32975)
+PR_EMS_AB_MDB_MSG_TIME_OUT_PERIOD    = PROP_TAG( PT_LONG,          32976)
+PR_EMS_AB_MDB_OVER_QUOTA_LIMIT       = PROP_TAG( PT_LONG,          32977)
+PR_EMS_AB_MDB_STORAGE_QUOTA          = PROP_TAG( PT_LONG,          32978)
+PR_EMS_AB_MDB_UNREAD_LIMIT           = PROP_TAG( PT_LONG,          32979)
+PR_EMS_AB_MDB_USE_DEFAULTS           = PROP_TAG( PT_BOOLEAN,       32980)
+PR_EMS_AB_MEMBER                     = PROP_TAG( PT_OBJECT,        32777)
+PR_EMS_AB_MEMBER_A                   = PROP_TAG( PT_MV_STRING8,    32777)
+PR_EMS_AB_MEMBER_W                   = PROP_TAG( PT_MV_UNICODE,    32777)
+PR_EMS_AB_MEMBER_O                   = PROP_TAG( PT_OBJECT,        32777)
+PR_EMS_AB_MEMBER_T                   = PROP_TAG( PT_MV_TSTRING,    32777)
+PR_EMS_AB_MESSAGE_TRACKING_ENABLED   = PROP_TAG( PT_BOOLEAN,       32981)
+PR_EMS_AB_MONITOR_CLOCK              = PROP_TAG( PT_BOOLEAN,       32982)
+PR_EMS_AB_MONITOR_SERVERS            = PROP_TAG( PT_BOOLEAN,       32983)
+PR_EMS_AB_MONITOR_SERVICES           = PROP_TAG( PT_BOOLEAN,       32984)
+PR_EMS_AB_MONITORED_CONFIGURATIONS   = PROP_TAG( PT_MV_TSTRING,    32985)
+PR_EMS_AB_MONITORED_CONFIGURATIONS_A = PROP_TAG( PT_MV_STRING8,    32985)
+PR_EMS_AB_MONITORED_CONFIGURATIONS_W = PROP_TAG( PT_MV_UNICODE,    32985)
+PR_EMS_AB_MONITORED_CONFIGURATIONS_O = PROP_TAG( PT_OBJECT,        32985)
+PR_EMS_AB_MONITORED_CONFIGURATIONS_T = PROP_TAG( PT_MV_TSTRING,    32985)
+PR_EMS_AB_MONITORED_SERVERS          = PROP_TAG( PT_MV_TSTRING,    32986)
+PR_EMS_AB_MONITORED_SERVERS_A        = PROP_TAG( PT_MV_STRING8,    32986)
+PR_EMS_AB_MONITORED_SERVERS_W        = PROP_TAG( PT_MV_UNICODE,    32986)
+PR_EMS_AB_MONITORED_SERVERS_O        = PROP_TAG( PT_OBJECT,        32986)
+PR_EMS_AB_MONITORED_SERVERS_T        = PROP_TAG( PT_MV_TSTRING,    32986)
+PR_EMS_AB_MONITORED_SERVICES         = PROP_TAG( PT_MV_TSTRING,    32987)
+PR_EMS_AB_MONITORED_SERVICES_A       = PROP_TAG( PT_MV_STRING8,    32987)
+PR_EMS_AB_MONITORED_SERVICES_W       = PROP_TAG( PT_MV_UNICODE,    32987)
+PR_EMS_AB_MONITORING_ALERT_DELAY     = PROP_TAG( PT_LONG,          32988)
+PR_EMS_AB_MONITORING_ALERT_UNITS     = PROP_TAG( PT_LONG,          32989)
+PR_EMS_AB_MONITORING_AVAILABILITY_STYLE = PROP_TAG( PT_LONG,          32990)
+PR_EMS_AB_MONITORING_AVAILABILITY_WINDOW = PROP_TAG( PT_BINARY,        32991)
+PR_EMS_AB_MONITORING_CACHED_VIA_MAIL = PROP_TAG( PT_MV_TSTRING,    32992)
+PR_EMS_AB_MONITORING_CACHED_VIA_MAIL_A = PROP_TAG( PT_MV_STRING8,    32992)
+PR_EMS_AB_MONITORING_CACHED_VIA_MAIL_W = PROP_TAG( PT_MV_UNICODE,    32992)
+PR_EMS_AB_MONITORING_CACHED_VIA_MAIL_O = PROP_TAG( PT_OBJECT,        32992)
+PR_EMS_AB_MONITORING_CACHED_VIA_MAIL_T = PROP_TAG( PT_MV_TSTRING,    32992)
+PR_EMS_AB_MONITORING_CACHED_VIA_RPC  = PROP_TAG( PT_MV_TSTRING,    32993)
+PR_EMS_AB_MONITORING_CACHED_VIA_RPC_A = PROP_TAG( PT_MV_STRING8,    32993)
+PR_EMS_AB_MONITORING_CACHED_VIA_RPC_W = PROP_TAG( PT_MV_UNICODE,    32993)
+PR_EMS_AB_MONITORING_CACHED_VIA_RPC_O = PROP_TAG( PT_OBJECT,        32993)
+PR_EMS_AB_MONITORING_CACHED_VIA_RPC_T = PROP_TAG( PT_MV_TSTRING,    32993)
+PR_EMS_AB_MONITORING_ESCALATION_PROCEDURE = PROP_TAG( PT_MV_BINARY,     32994)
+PR_EMS_AB_MONITORING_HOTSITE_POLL_INTERVAL = PROP_TAG( PT_LONG,          32995)
+PR_EMS_AB_MONITORING_HOTSITE_POLL_UNITS = PROP_TAG( PT_LONG,          32996)
+PR_EMS_AB_MONITORING_MAIL_UPDATE_INTERVAL = PROP_TAG( PT_LONG,          32997)
+PR_EMS_AB_MONITORING_MAIL_UPDATE_UNITS = PROP_TAG( PT_LONG,          32998)
+PR_EMS_AB_MONITORING_NORMAL_POLL_INTERVAL = PROP_TAG( PT_LONG,          32999)
+PR_EMS_AB_MONITORING_NORMAL_POLL_UNITS = PROP_TAG( PT_LONG,          33000)
+PR_EMS_AB_MONITORING_RECIPIENTS      = PROP_TAG( PT_MV_TSTRING,    33001)
+PR_EMS_AB_MONITORING_RECIPIENTS_A    = PROP_TAG( PT_MV_STRING8,    33001)
+PR_EMS_AB_MONITORING_RECIPIENTS_W    = PROP_TAG( PT_MV_UNICODE,    33001)
+PR_EMS_AB_MONITORING_RECIPIENTS_O    = PROP_TAG( PT_OBJECT,        33001)
+PR_EMS_AB_MONITORING_RECIPIENTS_T    = PROP_TAG( PT_MV_TSTRING,    33001)
+PR_EMS_AB_MONITORING_RECIPIENTS_NDR  = PROP_TAG( PT_MV_TSTRING,    33002)
+PR_EMS_AB_MONITORING_RECIPIENTS_NDR_A = PROP_TAG( PT_MV_STRING8,    33002)
+PR_EMS_AB_MONITORING_RECIPIENTS_NDR_W = PROP_TAG( PT_MV_UNICODE,    33002)
+PR_EMS_AB_MONITORING_RECIPIENTS_NDR_O = PROP_TAG( PT_OBJECT,        33002)
+PR_EMS_AB_MONITORING_RECIPIENTS_NDR_T = PROP_TAG( PT_MV_TSTRING,    33002)
+PR_EMS_AB_MONITORING_RPC_UPDATE_INTERVAL = PROP_TAG( PT_LONG,          33003)
+PR_EMS_AB_MONITORING_RPC_UPDATE_UNITS = PROP_TAG( PT_LONG,          33004)
+PR_EMS_AB_MONITORING_WARNING_DELAY   = PROP_TAG( PT_LONG,          33005)
+PR_EMS_AB_MONITORING_WARNING_UNITS   = PROP_TAG( PT_LONG,          33006)
+PR_EMS_AB_MTA_LOCAL_CRED             = PROP_TAG( PT_TSTRING,       33007)
+PR_EMS_AB_MTA_LOCAL_CRED_A           = PROP_TAG( PT_STRING8,       33007)
+PR_EMS_AB_MTA_LOCAL_CRED_W           = PROP_TAG( PT_UNICODE,       33007)
+PR_EMS_AB_MTA_LOCAL_DESIG            = PROP_TAG( PT_TSTRING,       33008)
+PR_EMS_AB_MTA_LOCAL_DESIG_A          = PROP_TAG( PT_STRING8,       33008)
+PR_EMS_AB_MTA_LOCAL_DESIG_W          = PROP_TAG( PT_UNICODE,       33008)
+PR_EMS_AB_N_ADDRESS                  = PROP_TAG( PT_BINARY,        33009)
+PR_EMS_AB_N_ADDRESS_TYPE             = PROP_TAG( PT_LONG,          33010)
+PR_EMS_AB_NETWORK_ADDRESS            = PROP_TAG( PT_MV_TSTRING,    33136)
+PR_EMS_AB_NETWORK_ADDRESS_A          = PROP_TAG( PT_MV_STRING8,    33136)
+PR_EMS_AB_NETWORK_ADDRESS_W          = PROP_TAG( PT_MV_UNICODE,    33136)
+PR_EMS_AB_NNTP_CHARACTER_SET         = PROP_TAG( PT_TSTRING,       33149)
+PR_EMS_AB_NNTP_CHARACTER_SET_A       = PROP_TAG( PT_STRING8,       33149)
+PR_EMS_AB_NNTP_CHARACTER_SET_W       = PROP_TAG( PT_UNICODE,       33149)
+PR_EMS_AB_NNTP_CONTENT_FORMAT        = PROP_TAG( PT_TSTRING,       33142)
+PR_EMS_AB_NNTP_CONTENT_FORMAT_A      = PROP_TAG( PT_STRING8,       33142)
+PR_EMS_AB_NNTP_CONTENT_FORMAT_W      = PROP_TAG( PT_UNICODE,       33142)
+PR_EMS_AB_NT_MACHINE_NAME            = PROP_TAG( PT_TSTRING,       33011)
+PR_EMS_AB_NT_MACHINE_NAME_A          = PROP_TAG( PT_STRING8,       33011)
+PR_EMS_AB_NT_MACHINE_NAME_W          = PROP_TAG( PT_UNICODE,       33011)
+PR_EMS_AB_NT_SECURITY_DESCRIPTOR     = PROP_TAG( PT_BINARY,        32787)
+PR_EMS_AB_NUM_OF_OPEN_RETRIES        = PROP_TAG( PT_LONG,          33012)
+PR_EMS_AB_NUM_OF_TRANSFER_RETRIES    = PROP_TAG( PT_LONG,          33013)
+PR_EMS_AB_OBJ_DIST_NAME              = PROP_TAG( PT_TSTRING,       32828)
+PR_EMS_AB_OBJ_DIST_NAME_A            = PROP_TAG( PT_STRING8,       32828)
+PR_EMS_AB_OBJ_DIST_NAME_W            = PROP_TAG( PT_UNICODE,       32828)
+PR_EMS_AB_OBJ_DIST_NAME_O            = PROP_TAG( PT_OBJECT,        32828)
+PR_EMS_AB_OBJ_DIST_NAME_T            = PROP_TAG( PT_TSTRING,       32828)
+PR_EMS_AB_OBJECT_CLASS_CATEGORY      = PROP_TAG( PT_LONG,          33014)
+PR_EMS_AB_OBJECT_VERSION             = PROP_TAG( PT_LONG,          33015)
+PR_EMS_AB_OFF_LINE_AB_CONTAINERS     = PROP_TAG( PT_MV_TSTRING,    33016)
+PR_EMS_AB_OFF_LINE_AB_CONTAINERS_A   = PROP_TAG( PT_MV_STRING8,    33016)
+PR_EMS_AB_OFF_LINE_AB_CONTAINERS_W   = PROP_TAG( PT_MV_UNICODE,    33016)
+PR_EMS_AB_OFF_LINE_AB_CONTAINERS_O   = PROP_TAG( PT_OBJECT,        33016)
+PR_EMS_AB_OFF_LINE_AB_CONTAINERS_T   = PROP_TAG( PT_MV_TSTRING,    33016)
+PR_EMS_AB_OFF_LINE_AB_SCHEDULE       = PROP_TAG( PT_BINARY,        33017)
+PR_EMS_AB_OFF_LINE_AB_SERVER         = PROP_TAG( PT_TSTRING,       33018)
+PR_EMS_AB_OFF_LINE_AB_SERVER_A       = PROP_TAG( PT_STRING8,       33018)
+PR_EMS_AB_OFF_LINE_AB_SERVER_W       = PROP_TAG( PT_UNICODE,       33018)
+PR_EMS_AB_OFF_LINE_AB_SERVER_O       = PROP_TAG( PT_OBJECT,        33018)
+PR_EMS_AB_OFF_LINE_AB_SERVER_T       = PROP_TAG( PT_TSTRING,       33018)
+PR_EMS_AB_OFF_LINE_AB_STYLE          = PROP_TAG( PT_LONG,          33019)
+PR_EMS_AB_OID_TYPE                   = PROP_TAG( PT_LONG,          33020)
+PR_EMS_AB_OM_OBJECT_CLASS            = PROP_TAG( PT_BINARY,        33021)
+PR_EMS_AB_OM_SYNTAX                  = PROP_TAG( PT_LONG,          33022)
+PR_EMS_AB_OOF_REPLY_TO_ORIGINATOR    = PROP_TAG( PT_BOOLEAN,       33023)
+PR_EMS_AB_OPEN_RETRY_INTERVAL        = PROP_TAG( PT_LONG,          33024)
+PR_EMS_AB_ORGANIZATION_NAME          = PROP_TAG( PT_MV_TSTRING,    33025)
+PR_EMS_AB_ORGANIZATION_NAME_A        = PROP_TAG( PT_MV_STRING8,    33025)
+PR_EMS_AB_ORGANIZATION_NAME_W        = PROP_TAG( PT_MV_UNICODE,    33025)
+PR_EMS_AB_ORGANIZATIONAL_UNIT_NAME   = PROP_TAG( PT_MV_TSTRING,    33026)
+PR_EMS_AB_ORGANIZATIONAL_UNIT_NAME_A = PROP_TAG( PT_MV_STRING8,    33026)
+PR_EMS_AB_ORGANIZATIONAL_UNIT_NAME_W = PROP_TAG( PT_MV_UNICODE,    33026)
+PR_EMS_AB_ORIGINAL_DISPLAY_TABLE     = PROP_TAG( PT_BINARY,        33027)
+PR_EMS_AB_ORIGINAL_DISPLAY_TABLE_MSDOS = PROP_TAG( PT_BINARY,        33028)
+PR_EMS_AB_OUTBOUND_SITES             = PROP_TAG( PT_MV_TSTRING,    33029)
+PR_EMS_AB_OUTBOUND_SITES_A           = PROP_TAG( PT_MV_STRING8,    33029)
+PR_EMS_AB_OUTBOUND_SITES_W           = PROP_TAG( PT_MV_UNICODE,    33029)
+PR_EMS_AB_OUTBOUND_SITES_O           = PROP_TAG( PT_OBJECT,        33029)
+PR_EMS_AB_OUTBOUND_SITES_T           = PROP_TAG( PT_MV_TSTRING,    33029)
+PR_EMS_AB_OWNER                      = PROP_TAG( PT_TSTRING,       32780)
+PR_EMS_AB_OWNER_A                    = PROP_TAG( PT_STRING8,       32780)
+PR_EMS_AB_OWNER_W                    = PROP_TAG( PT_UNICODE,       32780)
+PR_EMS_AB_OWNER_O                    = PROP_TAG( PT_OBJECT,        32780)
+PR_EMS_AB_OWNER_T                    = PROP_TAG( PT_TSTRING,       32780)
+PR_EMS_AB_OWNER_BL                   = PROP_TAG( PT_TSTRING,       32804)
+PR_EMS_AB_OWNER_BL_A                 = PROP_TAG( PT_STRING8,       32804)
+PR_EMS_AB_OWNER_BL_W                 = PROP_TAG( PT_UNICODE,       32804)
+PR_EMS_AB_OWNER_BL_O                 = PROP_TAG( PT_OBJECT,        32804)
+PR_EMS_AB_OWNER_BL_T                 = PROP_TAG( PT_TSTRING,       32804)
+PR_EMS_AB_P_SELECTOR                 = PROP_TAG( PT_BINARY,        33030)
+PR_EMS_AB_P_SELECTOR_INBOUND         = PROP_TAG( PT_BINARY,        33031)
+PR_EMS_AB_PER_MSG_DIALOG_DISPLAY_TABLE = PROP_TAG( PT_BINARY,        33032)
+PR_EMS_AB_PER_RECIP_DIALOG_DISPLAY_TABLE = PROP_TAG( PT_BINARY,        33033)
+PR_EMS_AB_PERIOD_REP_SYNC_TIMES      = PROP_TAG( PT_BINARY,        33034)
+PR_EMS_AB_PERIOD_REPL_STAGGER        = PROP_TAG( PT_LONG,          33035)
+PR_EMS_AB_PF_CONTACTS                = PROP_TAG( PT_MV_TSTRING,    32824)
+PR_EMS_AB_PF_CONTACTS_A              = PROP_TAG( PT_MV_STRING8,    32824)
+PR_EMS_AB_PF_CONTACTS_W              = PROP_TAG( PT_MV_UNICODE,    32824)
+PR_EMS_AB_PF_CONTACTS_O              = PROP_TAG( PT_OBJECT,        32824)
+PR_EMS_AB_PF_CONTACTS_T              = PROP_TAG( PT_MV_TSTRING,    32824)
+PR_EMS_AB_POP_CHARACTER_SET          = PROP_TAG( PT_TSTRING,       33145)
+PR_EMS_AB_POP_CHARACTER_SET_A        = PROP_TAG( PT_STRING8,       33145)
+PR_EMS_AB_POP_CHARACTER_SET_W        = PROP_TAG( PT_UNICODE,       33145)
+PR_EMS_AB_POP_CONTENT_FORMAT         = PROP_TAG( PT_TSTRING,       33143)
+PR_EMS_AB_POP_CONTENT_FORMAT_A       = PROP_TAG( PT_STRING8,       33143)
+PR_EMS_AB_POP_CONTENT_FORMAT_W       = PROP_TAG( PT_UNICODE,       33143)
+PR_EMS_AB_POSTAL_ADDRESS             = PROP_TAG( PT_MV_BINARY,     33036)
+PR_EMS_AB_PREFERRED_DELIVERY_METHOD  = PROP_TAG( PT_MV_LONG,       33037)
+PR_EMS_AB_PRMD                       = PROP_TAG( PT_TSTRING,       33038)
+PR_EMS_AB_PRMD_A                     = PROP_TAG( PT_STRING8,       33038)
+PR_EMS_AB_PRMD_W                     = PROP_TAG( PT_UNICODE,       33038)
+PR_EMS_AB_PROXY_ADDRESSES            = PROP_TAG( PT_MV_TSTRING,    32783)
+PR_EMS_AB_PROXY_ADDRESSES_A          = PROP_TAG( PT_MV_STRING8,    32783)
+PR_EMS_AB_PROXY_ADDRESSES_W          = PROP_TAG( PT_MV_UNICODE,    32783)
+PR_EMS_AB_PROXY_GENERATOR_DLL        = PROP_TAG( PT_TSTRING,       33039)
+PR_EMS_AB_PROXY_GENERATOR_DLL_A      = PROP_TAG( PT_STRING8,       33039)
+PR_EMS_AB_PROXY_GENERATOR_DLL_W      = PROP_TAG( PT_UNICODE,       33039)
+PR_EMS_AB_PUBLIC_DELEGATES           = PROP_TAG( PT_OBJECT,        32789)
+PR_EMS_AB_PUBLIC_DELEGATES_A         = PROP_TAG( PT_MV_STRING8,    32789)
+PR_EMS_AB_PUBLIC_DELEGATES_W         = PROP_TAG( PT_MV_UNICODE,    32789)
+PR_EMS_AB_PUBLIC_DELEGATES_O         = PROP_TAG( PT_OBJECT,        32789)
+PR_EMS_AB_PUBLIC_DELEGATES_T         = PROP_TAG( PT_MV_TSTRING,    32789)
+PR_EMS_AB_PUBLIC_DELEGATES_BL        = PROP_TAG( PT_MV_TSTRING,    33040)
+PR_EMS_AB_PUBLIC_DELEGATES_BL_A      = PROP_TAG( PT_MV_STRING8,    33040)
+PR_EMS_AB_PUBLIC_DELEGATES_BL_W      = PROP_TAG( PT_MV_UNICODE,    33040)
+PR_EMS_AB_PUBLIC_DELEGATES_BL_O      = PROP_TAG( PT_OBJECT,        33040)
+PR_EMS_AB_PUBLIC_DELEGATES_BL_T      = PROP_TAG( PT_MV_TSTRING,    33040)
+PR_EMS_AB_QUOTA_NOTIFICATION_SCHEDULE = PROP_TAG( PT_BINARY,        33041)
+PR_EMS_AB_QUOTA_NOTIFICATION_STYLE   = PROP_TAG( PT_LONG,          33042)
+PR_EMS_AB_RANGE_LOWER                = PROP_TAG( PT_LONG,          33043)
+PR_EMS_AB_RANGE_UPPER                = PROP_TAG( PT_LONG,          33044)
+PR_EMS_AB_RAS_CALLBACK_NUMBER        = PROP_TAG( PT_TSTRING,       33045)
+PR_EMS_AB_RAS_CALLBACK_NUMBER_A      = PROP_TAG( PT_STRING8,       33045)
+PR_EMS_AB_RAS_CALLBACK_NUMBER_W      = PROP_TAG( PT_UNICODE,       33045)
+PR_EMS_AB_RAS_PHONE_NUMBER           = PROP_TAG( PT_TSTRING,       33046)
+PR_EMS_AB_RAS_PHONE_NUMBER_A         = PROP_TAG( PT_STRING8,       33046)
+PR_EMS_AB_RAS_PHONE_NUMBER_W         = PROP_TAG( PT_UNICODE,       33046)
+PR_EMS_AB_RAS_PHONEBOOK_ENTRY_NAME   = PROP_TAG( PT_TSTRING,       33047)
+PR_EMS_AB_RAS_PHONEBOOK_ENTRY_NAME_A = PROP_TAG( PT_STRING8,       33047)
+PR_EMS_AB_RAS_PHONEBOOK_ENTRY_NAME_W = PROP_TAG( PT_UNICODE,       33047)
+PR_EMS_AB_RAS_REMOTE_SRVR_NAME       = PROP_TAG( PT_TSTRING,       33048)
+PR_EMS_AB_RAS_REMOTE_SRVR_NAME_A     = PROP_TAG( PT_STRING8,       33048)
+PR_EMS_AB_RAS_REMOTE_SRVR_NAME_W     = PROP_TAG( PT_UNICODE,       33048)
+PR_EMS_AB_REGISTERED_ADDRESS         = PROP_TAG( PT_MV_BINARY,     33049)
+PR_EMS_AB_REMOTE_BRIDGE_HEAD         = PROP_TAG( PT_TSTRING,       33050)
+PR_EMS_AB_REMOTE_BRIDGE_HEAD_A       = PROP_TAG( PT_STRING8,       33050)
+PR_EMS_AB_REMOTE_BRIDGE_HEAD_W       = PROP_TAG( PT_UNICODE,       33050)
+PR_EMS_AB_REMOTE_BRIDGE_HEAD_ADDRESS = PROP_TAG( PT_TSTRING,       33051)
+PR_EMS_AB_REMOTE_BRIDGE_HEAD_ADDRESS_A = PROP_TAG( PT_STRING8,       33051)
+PR_EMS_AB_REMOTE_BRIDGE_HEAD_ADDRESS_W = PROP_TAG( PT_UNICODE,       33051)
+PR_EMS_AB_REMOTE_OUT_BH_SERVER       = PROP_TAG( PT_TSTRING,       33052)
+PR_EMS_AB_REMOTE_OUT_BH_SERVER_A     = PROP_TAG( PT_STRING8,       33052)
+PR_EMS_AB_REMOTE_OUT_BH_SERVER_W     = PROP_TAG( PT_UNICODE,       33052)
+PR_EMS_AB_REMOTE_OUT_BH_SERVER_O     = PROP_TAG( PT_OBJECT,        33052)
+PR_EMS_AB_REMOTE_OUT_BH_SERVER_T     = PROP_TAG( PT_TSTRING,       33052)
+PR_EMS_AB_REMOTE_SITE                = PROP_TAG( PT_TSTRING,       33053)
+PR_EMS_AB_REMOTE_SITE_A              = PROP_TAG( PT_STRING8,       33053)
+PR_EMS_AB_REMOTE_SITE_W              = PROP_TAG( PT_UNICODE,       33053)
+PR_EMS_AB_REMOTE_SITE_O              = PROP_TAG( PT_OBJECT,        33053)
+PR_EMS_AB_REMOTE_SITE_T              = PROP_TAG( PT_TSTRING,       33053)
+PR_EMS_AB_REPLICATION_MAIL_MSG_SIZE  = PROP_TAG( PT_LONG,          33128)
+PR_EMS_AB_REPLICATION_SENSITIVITY    = PROP_TAG( PT_LONG,          33054)
+PR_EMS_AB_REPLICATION_STAGGER        = PROP_TAG( PT_LONG,          33055)
+PR_EMS_AB_REPORT_TO_ORIGINATOR       = PROP_TAG( PT_BOOLEAN,       33056)
+PR_EMS_AB_REPORT_TO_OWNER            = PROP_TAG( PT_BOOLEAN,       33057)
+PR_EMS_AB_REPORTS                    = PROP_TAG( PT_OBJECT,        32782)
+PR_EMS_AB_REPORTS_A                  = PROP_TAG( PT_MV_STRING8,    32782)
+PR_EMS_AB_REPORTS_W                  = PROP_TAG( PT_MV_UNICODE,    32782)
+PR_EMS_AB_REPORTS_O                  = PROP_TAG( PT_OBJECT,        32782)
+PR_EMS_AB_REPORTS_T                  = PROP_TAG( PT_MV_TSTRING,    32782)
+PR_EMS_AB_REQ_SEQ                    = PROP_TAG( PT_LONG,          33058)
+PR_EMS_AB_RESPONSIBLE_LOCAL_DXA      = PROP_TAG( PT_TSTRING,       33059)
+PR_EMS_AB_RESPONSIBLE_LOCAL_DXA_A    = PROP_TAG( PT_STRING8,       33059)
+PR_EMS_AB_RESPONSIBLE_LOCAL_DXA_W    = PROP_TAG( PT_UNICODE,       33059)
+PR_EMS_AB_RESPONSIBLE_LOCAL_DXA_O    = PROP_TAG( PT_OBJECT,        33059)
+PR_EMS_AB_RESPONSIBLE_LOCAL_DXA_T    = PROP_TAG( PT_TSTRING,       33059)
+PR_EMS_AB_RID_SERVER                 = PROP_TAG( PT_TSTRING,       33060)
+PR_EMS_AB_RID_SERVER_A               = PROP_TAG( PT_STRING8,       33060)
+PR_EMS_AB_RID_SERVER_W               = PROP_TAG( PT_UNICODE,       33060)
+PR_EMS_AB_RID_SERVER_O               = PROP_TAG( PT_OBJECT,        33060)
+PR_EMS_AB_RID_SERVER_T               = PROP_TAG( PT_TSTRING,       33060)
+PR_EMS_AB_ROLE_OCCUPANT              = PROP_TAG( PT_MV_TSTRING,    33061)
+PR_EMS_AB_ROLE_OCCUPANT_A            = PROP_TAG( PT_MV_STRING8,    33061)
+PR_EMS_AB_ROLE_OCCUPANT_W            = PROP_TAG( PT_MV_UNICODE,    33061)
+PR_EMS_AB_ROLE_OCCUPANT_O            = PROP_TAG( PT_OBJECT,        33061)
+PR_EMS_AB_ROLE_OCCUPANT_T            = PROP_TAG( PT_MV_TSTRING,    33061)
+PR_EMS_AB_ROUTING_LIST               = PROP_TAG( PT_MV_TSTRING,    33062)
+PR_EMS_AB_ROUTING_LIST_A             = PROP_TAG( PT_MV_STRING8,    33062)
+PR_EMS_AB_ROUTING_LIST_W             = PROP_TAG( PT_MV_UNICODE,    33062)
+PR_EMS_AB_RTS_CHECKPOINT_SIZE        = PROP_TAG( PT_LONG,          33063)
+PR_EMS_AB_RTS_RECOVERY_TIMEOUT       = PROP_TAG( PT_LONG,          33064)
+PR_EMS_AB_RTS_WINDOW_SIZE            = PROP_TAG( PT_LONG,          33065)
+PR_EMS_AB_RUNS_ON                    = PROP_TAG( PT_MV_TSTRING,    33066)
+PR_EMS_AB_RUNS_ON_A                  = PROP_TAG( PT_MV_STRING8,    33066)
+PR_EMS_AB_RUNS_ON_W                  = PROP_TAG( PT_MV_UNICODE,    33066)
+PR_EMS_AB_RUNS_ON_O                  = PROP_TAG( PT_OBJECT,        33066)
+PR_EMS_AB_RUNS_ON_T                  = PROP_TAG( PT_MV_TSTRING,    33066)
+PR_EMS_AB_S_SELECTOR                 = PROP_TAG( PT_BINARY,        33067)
+PR_EMS_AB_S_SELECTOR_INBOUND         = PROP_TAG( PT_BINARY,        33068)
+PR_EMS_AB_SCHEMA_FLAGS               = PROP_TAG( PT_LONG,          33139)
+PR_EMS_AB_SCHEMA_VERSION             = PROP_TAG( PT_MV_LONG,       33148)
+PR_EMS_AB_SEARCH_FLAGS               = PROP_TAG( PT_LONG,          33069)
+PR_EMS_AB_SEARCH_GUIDE               = PROP_TAG( PT_MV_BINARY,     33070)
+PR_EMS_AB_SECURITY_PROTOCOL          = PROP_TAG( PT_MV_BINARY,     32823)
+PR_EMS_AB_SEE_ALSO                   = PROP_TAG( PT_MV_TSTRING,    33071)
+PR_EMS_AB_SEE_ALSO_A                 = PROP_TAG( PT_MV_STRING8,    33071)
+PR_EMS_AB_SEE_ALSO_W                 = PROP_TAG( PT_MV_UNICODE,    33071)
+PR_EMS_AB_SEE_ALSO_O                 = PROP_TAG( PT_OBJECT,        33071)
+PR_EMS_AB_SEE_ALSO_T                 = PROP_TAG( PT_MV_TSTRING,    33071)
+PR_EMS_AB_SERIAL_NUMBER              = PROP_TAG( PT_MV_TSTRING,    33072)
+PR_EMS_AB_SERIAL_NUMBER_A            = PROP_TAG( PT_MV_STRING8,    33072)
+PR_EMS_AB_SERIAL_NUMBER_W            = PROP_TAG( PT_MV_UNICODE,    33072)
+PR_EMS_AB_SERVICE_ACTION_FIRST       = PROP_TAG( PT_LONG,          33073)
+PR_EMS_AB_SERVICE_ACTION_OTHER       = PROP_TAG( PT_LONG,          33074)
+PR_EMS_AB_SERVICE_ACTION_SECOND      = PROP_TAG( PT_LONG,          33075)
+PR_EMS_AB_SERVICE_RESTART_DELAY      = PROP_TAG( PT_LONG,          33076)
+PR_EMS_AB_SERVICE_RESTART_MESSAGE    = PROP_TAG( PT_TSTRING,       33077)
+PR_EMS_AB_SERVICE_RESTART_MESSAGE_A  = PROP_TAG( PT_STRING8,       33077)
+PR_EMS_AB_SERVICE_RESTART_MESSAGE_W  = PROP_TAG( PT_UNICODE,       33077)
+PR_EMS_AB_SESSION_DISCONNECT_TIMER   = PROP_TAG( PT_LONG,          33078)
+PR_EMS_AB_SITE_AFFINITY              = PROP_TAG( PT_MV_TSTRING,    33079)
+PR_EMS_AB_SITE_AFFINITY_A            = PROP_TAG( PT_MV_STRING8,    33079)
+PR_EMS_AB_SITE_AFFINITY_W            = PROP_TAG( PT_MV_UNICODE,    33079)
+PR_EMS_AB_SITE_FOLDER_GUID           = PROP_TAG( PT_BINARY,        33126)
+PR_EMS_AB_SITE_FOLDER_SERVER         = PROP_TAG( PT_TSTRING,       33127)
+PR_EMS_AB_SITE_FOLDER_SERVER_A       = PROP_TAG( PT_STRING8,       33127)
+PR_EMS_AB_SITE_FOLDER_SERVER_W       = PROP_TAG( PT_UNICODE,       33127)
+PR_EMS_AB_SITE_FOLDER_SERVER_O       = PROP_TAG( PT_OBJECT,        33127)
+PR_EMS_AB_SITE_FOLDER_SERVER_T       = PROP_TAG( PT_TSTRING,       33127)
+PR_EMS_AB_SITE_PROXY_SPACE           = PROP_TAG( PT_MV_TSTRING,    33080)
+PR_EMS_AB_SITE_PROXY_SPACE_A         = PROP_TAG( PT_MV_STRING8,    33080)
+PR_EMS_AB_SITE_PROXY_SPACE_W         = PROP_TAG( PT_MV_UNICODE,    33080)
+PR_EMS_AB_SPACE_LAST_COMPUTED        = PROP_TAG( PT_SYSTIME,       33081)
+PR_EMS_AB_STREET_ADDRESS             = PROP_TAG( PT_TSTRING,       33082)
+PR_EMS_AB_STREET_ADDRESS_A           = PROP_TAG( PT_STRING8,       33082)
+PR_EMS_AB_STREET_ADDRESS_W           = PROP_TAG( PT_UNICODE,       33082)
+PR_EMS_AB_SUB_REFS                   = PROP_TAG( PT_MV_TSTRING,    33083)
+PR_EMS_AB_SUB_REFS_A                 = PROP_TAG( PT_MV_STRING8,    33083)
+PR_EMS_AB_SUB_REFS_W                 = PROP_TAG( PT_MV_UNICODE,    33083)
+PR_EMS_AB_SUB_REFS_O                 = PROP_TAG( PT_OBJECT,        33083)
+PR_EMS_AB_SUB_REFS_T                 = PROP_TAG( PT_MV_TSTRING,    33083)
+PR_EMS_AB_SUB_SITE                   = PROP_TAG( PT_TSTRING,       33147)
+PR_EMS_AB_SUB_SITE_A                 = PROP_TAG( PT_STRING8,       33147)
+PR_EMS_AB_SUB_SITE_W                 = PROP_TAG( PT_UNICODE,       33147)
+PR_EMS_AB_SUBMISSION_CONT_LENGTH     = PROP_TAG( PT_LONG,          33084)
+PR_EMS_AB_SUPPORTED_APPLICATION_CONTEXT = PROP_TAG( PT_MV_BINARY,     33085)
+PR_EMS_AB_SUPPORTING_STACK           = PROP_TAG( PT_MV_TSTRING,    33086)
+PR_EMS_AB_SUPPORTING_STACK_A         = PROP_TAG( PT_MV_STRING8,    33086)
+PR_EMS_AB_SUPPORTING_STACK_W         = PROP_TAG( PT_MV_UNICODE,    33086)
+PR_EMS_AB_SUPPORTING_STACK_O         = PROP_TAG( PT_OBJECT,        33086)
+PR_EMS_AB_SUPPORTING_STACK_T         = PROP_TAG( PT_MV_TSTRING,    33086)
+PR_EMS_AB_SUPPORTING_STACK_BL        = PROP_TAG( PT_MV_TSTRING,    33087)
+PR_EMS_AB_SUPPORTING_STACK_BL_A      = PROP_TAG( PT_MV_STRING8,    33087)
+PR_EMS_AB_SUPPORTING_STACK_BL_W      = PROP_TAG( PT_MV_UNICODE,    33087)
+PR_EMS_AB_SUPPORTING_STACK_BL_O      = PROP_TAG( PT_OBJECT,        33087)
+PR_EMS_AB_SUPPORTING_STACK_BL_T      = PROP_TAG( PT_MV_TSTRING,    33087)
+PR_EMS_AB_T_SELECTOR                 = PROP_TAG( PT_BINARY,        33088)
+PR_EMS_AB_T_SELECTOR_INBOUND         = PROP_TAG( PT_BINARY,        33089)
+PR_EMS_AB_TARGET_ADDRESS             = PROP_TAG( PT_TSTRING,       32785)
+PR_EMS_AB_TARGET_ADDRESS_A           = PROP_TAG( PT_STRING8,       32785)
+PR_EMS_AB_TARGET_ADDRESS_W           = PROP_TAG( PT_UNICODE,       32785)
+PR_EMS_AB_TARGET_MTAS                = PROP_TAG( PT_MV_TSTRING,    33090)
+PR_EMS_AB_TARGET_MTAS_A              = PROP_TAG( PT_MV_STRING8,    33090)
+PR_EMS_AB_TARGET_MTAS_W              = PROP_TAG( PT_MV_UNICODE,    33090)
+PR_EMS_AB_TELEPHONE_NUMBER           = PROP_TAG( PT_MV_TSTRING,    32786)
+PR_EMS_AB_TELEPHONE_NUMBER_A         = PROP_TAG( PT_MV_STRING8,    32786)
+PR_EMS_AB_TELEPHONE_NUMBER_W         = PROP_TAG( PT_MV_UNICODE,    32786)
+PR_EMS_AB_TELETEX_TERMINAL_IDENTIFIER = PROP_TAG( PT_MV_BINARY,     33091)
+PR_EMS_AB_TEMP_ASSOC_THRESHOLD       = PROP_TAG( PT_LONG,          33092)
+PR_EMS_AB_TOMBSTONE_LIFETIME         = PROP_TAG( PT_LONG,          33093)
+PR_EMS_AB_TRACKING_LOG_PATH_NAME     = PROP_TAG( PT_TSTRING,       33094)
+PR_EMS_AB_TRACKING_LOG_PATH_NAME_A   = PROP_TAG( PT_STRING8,       33094)
+PR_EMS_AB_TRACKING_LOG_PATH_NAME_W   = PROP_TAG( PT_UNICODE,       33094)
+PR_EMS_AB_TRANS_RETRY_MINS           = PROP_TAG( PT_LONG,          33095)
+PR_EMS_AB_TRANS_TIMEOUT_MINS         = PROP_TAG( PT_LONG,          33096)
+PR_EMS_AB_TRANSFER_RETRY_INTERVAL    = PROP_TAG( PT_LONG,          33097)
+PR_EMS_AB_TRANSFER_TIMEOUT_NON_URGENT = PROP_TAG( PT_LONG,          33098)
+PR_EMS_AB_TRANSFER_TIMEOUT_NORMAL    = PROP_TAG( PT_LONG,          33099)
+PR_EMS_AB_TRANSFER_TIMEOUT_URGENT    = PROP_TAG( PT_LONG,          33100)
+PR_EMS_AB_TRANSLATION_TABLE_USED     = PROP_TAG( PT_LONG,          33101)
+PR_EMS_AB_TRANSPORT_EXPEDITED_DATA   = PROP_TAG( PT_BOOLEAN,       33102)
+PR_EMS_AB_TRUST_LEVEL                = PROP_TAG( PT_LONG,          33103)
+PR_EMS_AB_TURN_REQUEST_THRESHOLD     = PROP_TAG( PT_LONG,          33104)
+PR_EMS_AB_TWO_WAY_ALTERNATE_FACILITY = PROP_TAG( PT_BOOLEAN,       33105)
+PR_EMS_AB_UNAUTH_ORIG_BL             = PROP_TAG( PT_MV_TSTRING,    33106)
+PR_EMS_AB_UNAUTH_ORIG_BL_A           = PROP_TAG( PT_MV_STRING8,    33106)
+PR_EMS_AB_UNAUTH_ORIG_BL_W           = PROP_TAG( PT_MV_UNICODE,    33106)
+PR_EMS_AB_UNAUTH_ORIG_BL_O           = PROP_TAG( PT_OBJECT,        33106)
+PR_EMS_AB_UNAUTH_ORIG_BL_T           = PROP_TAG( PT_MV_TSTRING,    33106)
+PR_EMS_AB_USE_SERVER_VALUES          = PROP_TAG( PT_BOOLEAN,       33150)
+PR_EMS_AB_USER_PASSWORD              = PROP_TAG( PT_MV_BINARY,     33107)
+PR_EMS_AB_USN_CHANGED                = PROP_TAG( PT_LONG,          32809)
+PR_EMS_AB_USN_CREATED                = PROP_TAG( PT_LONG,          33108)
+PR_EMS_AB_USN_DSA_LAST_OBJ_REMOVED   = PROP_TAG( PT_LONG,          33109)
+PR_EMS_AB_USN_INTERSITE              = PROP_TAG( PT_LONG,          33146)
+PR_EMS_AB_USN_LAST_OBJ_REM           = PROP_TAG( PT_LONG,          33110)
+PR_EMS_AB_USN_SOURCE                 = PROP_TAG( PT_LONG,          33111)
+PR_EMS_AB_WWW_HOME_PAGE              = PROP_TAG( PT_TSTRING,       33141)
+PR_EMS_AB_WWW_HOME_PAGE_A            = PROP_TAG( PT_STRING8,       33141)
+PR_EMS_AB_WWW_HOME_PAGE_W            = PROP_TAG( PT_UNICODE,       33141)
+PR_EMS_AB_X121_ADDRESS               = PROP_TAG( PT_MV_TSTRING,    33112)
+PR_EMS_AB_X121_ADDRESS_A             = PROP_TAG( PT_MV_STRING8,    33112)
+PR_EMS_AB_X121_ADDRESS_W             = PROP_TAG( PT_MV_UNICODE,    33112)
+PR_EMS_AB_X25_CALL_USER_DATA_INCOMING = PROP_TAG( PT_BINARY,        33113)
+PR_EMS_AB_X25_CALL_USER_DATA_OUTGOING = PROP_TAG( PT_BINARY,        33114)
+PR_EMS_AB_X25_FACILITIES_DATA_INCOMING = PROP_TAG( PT_BINARY,        33115)
+PR_EMS_AB_X25_FACILITIES_DATA_OUTGOING = PROP_TAG( PT_BINARY,        33116)
+PR_EMS_AB_X25_LEASED_LINE_PORT       = PROP_TAG( PT_BINARY,        33117)
+PR_EMS_AB_X25_LEASED_OR_SWITCHED     = PROP_TAG( PT_BOOLEAN,       33118)
+PR_EMS_AB_X25_REMOTE_MTA_PHONE       = PROP_TAG( PT_TSTRING,       33119)
+PR_EMS_AB_X25_REMOTE_MTA_PHONE_A     = PROP_TAG( PT_STRING8,       33119)
+PR_EMS_AB_X25_REMOTE_MTA_PHONE_W     = PROP_TAG( PT_UNICODE,       33119)
+PR_EMS_AB_X400_ATTACHMENT_TYPE       = PROP_TAG( PT_BINARY,        33120)
+PR_EMS_AB_X400_SELECTOR_SYNTAX       = PROP_TAG( PT_LONG,          33121)
+PR_EMS_AB_X500_ACCESS_CONTROL_LIST   = PROP_TAG( PT_BINARY,        33122)
+PR_EMS_AB_XMIT_TIMEOUT_NON_URGENT    = PROP_TAG( PT_LONG,          33123)
+PR_EMS_AB_XMIT_TIMEOUT_NORMAL        = PROP_TAG( PT_LONG,          33124)
+PR_EMS_AB_XMIT_TIMEOUT_URGENT        = PROP_TAG( PT_LONG,          33125)
+
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapi.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapi.pyd
new file mode 100644
index 0000000..e90cff00
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapi.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapitags.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapitags.py
new file mode 100644
index 0000000..196039f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapitags.py
@@ -0,0 +1,1003 @@
+MV_FLAG         = 4096          # Multi-value flag 
+
+PT_UNSPECIFIED  = 0
+PT_NULL         = 1
+PT_I2           = 2
+PT_LONG         = 3
+PT_R4           = 4
+PT_DOUBLE       = 5
+PT_CURRENCY     = 6
+PT_APPTIME      = 7
+PT_ERROR        = 10
+PT_BOOLEAN      = 11
+PT_OBJECT       = 13
+PT_I8           = 20
+PT_STRING8      = 30
+PT_UNICODE      = 31
+PT_SYSTIME      = 64
+PT_CLSID        = 72
+PT_BINARY       = 258
+
+PT_SHORT    = PT_I2
+PT_I4       = PT_LONG
+PT_FLOAT    = PT_R4
+PT_R8       = PT_DOUBLE
+PT_LONGLONG = PT_I8
+
+PT_MV_I2        = (MV_FLAG|PT_I2)
+PT_MV_LONG      = (MV_FLAG|PT_LONG)
+PT_MV_R4        = (MV_FLAG|PT_R4)
+PT_MV_DOUBLE    = (MV_FLAG|PT_DOUBLE)
+PT_MV_CURRENCY  = (MV_FLAG|PT_CURRENCY)
+PT_MV_APPTIME   = (MV_FLAG|PT_APPTIME)
+PT_MV_SYSTIME   = (MV_FLAG|PT_SYSTIME)
+PT_MV_STRING8   = (MV_FLAG|PT_STRING8)
+PT_MV_BINARY    = (MV_FLAG|PT_BINARY)
+PT_MV_UNICODE   = (MV_FLAG|PT_UNICODE)
+PT_MV_CLSID     = (MV_FLAG|PT_CLSID)
+PT_MV_I8        = (MV_FLAG|PT_I8)
+
+PT_MV_SHORT     = PT_MV_I2
+PT_MV_I4        = PT_MV_LONG
+PT_MV_FLOAT     = PT_MV_R4
+PT_MV_R8        = PT_MV_DOUBLE
+PT_MV_LONGLONG  = PT_MV_I8
+
+PT_TSTRING = PT_UNICODE # ???
+PT_MV_TSTRING =  (MV_FLAG|PT_UNICODE)
+
+
+PROP_TYPE_MASK          = 65535 # Mask for Property type
+
+def PROP_TYPE(ulPropTag):
+	return ulPropTag & PROP_TYPE_MASK
+
+def PROP_ID(ulPropTag):
+	return ulPropTag>>16
+
+def PROP_TAG(ulPropType,ulPropID):
+	return (ulPropID<<16)|(ulPropType)
+
+PROP_ID_NULL            = 0
+PROP_ID_INVALID         = 65535
+PR_NULL                 = PROP_TAG( PT_NULL, PROP_ID_NULL)
+
+
+PR_ACKNOWLEDGEMENT_MODE                     = PROP_TAG( PT_LONG,      1)
+PR_ACKNOWLEDGEMENT_MODE                     = PROP_TAG( PT_LONG,      1)
+PR_ALTERNATE_RECIPIENT_ALLOWED              = PROP_TAG( PT_BOOLEAN,   2)
+PR_AUTHORIZING_USERS                        = PROP_TAG( PT_BINARY,    3)
+PR_AUTO_FORWARD_COMMENT                     = PROP_TAG( PT_TSTRING,   4)
+PR_AUTO_FORWARD_COMMENT_W                   = PROP_TAG( PT_UNICODE,   4)
+PR_AUTO_FORWARD_COMMENT_W                   = PROP_TAG( PT_UNICODE,   4)
+PR_AUTO_FORWARD_COMMENT_A                   = PROP_TAG( PT_STRING8,   4)
+PR_AUTO_FORWARDED                           = PROP_TAG( PT_BOOLEAN,   5)
+PR_CONTENT_CONFIDENTIALITY_ALGORITHM_ID     = PROP_TAG( PT_BINARY,    6)
+PR_CONTENT_CORRELATOR                       = PROP_TAG( PT_BINARY,    7)
+PR_CONTENT_IDENTIFIER                       = PROP_TAG( PT_TSTRING,   8)
+PR_CONTENT_IDENTIFIER_W                     = PROP_TAG( PT_UNICODE,   8)
+PR_CONTENT_IDENTIFIER_A                     = PROP_TAG( PT_STRING8,   8)
+PR_CONTENT_LENGTH                           = PROP_TAG( PT_LONG,      9)
+PR_CONTENT_RETURN_REQUESTED                 = PROP_TAG( PT_BOOLEAN,   10)
+PR_CONVERSATION_KEY                         = PROP_TAG( PT_BINARY,    11)
+PR_CONVERSION_EITS                          = PROP_TAG( PT_BINARY,    12)
+PR_CONVERSION_WITH_LOSS_PROHIBITED          = PROP_TAG( PT_BOOLEAN,   13)
+PR_CONVERTED_EITS                           = PROP_TAG( PT_BINARY,    14)
+PR_DEFERRED_DELIVERY_TIME                   = PROP_TAG( PT_SYSTIME,   15)
+PR_DELIVER_TIME                             = PROP_TAG( PT_SYSTIME,   16)
+PR_DISCARD_REASON                           = PROP_TAG( PT_LONG,      17)
+PR_DISCLOSURE_OF_RECIPIENTS                 = PROP_TAG( PT_BOOLEAN,   18)
+PR_DL_EXPANSION_HISTORY                     = PROP_TAG( PT_BINARY,    19)
+PR_DL_EXPANSION_PROHIBITED                  = PROP_TAG( PT_BOOLEAN,   20)
+PR_EXPIRY_TIME                              = PROP_TAG( PT_SYSTIME,   21)
+PR_IMPLICIT_CONVERSION_PROHIBITED           = PROP_TAG( PT_BOOLEAN,   22)
+PR_IMPORTANCE                               = PROP_TAG( PT_LONG,      23)
+PR_IPM_ID                                   = PROP_TAG( PT_BINARY,    24)
+PR_LATEST_DELIVERY_TIME                     = PROP_TAG( PT_SYSTIME,   25)
+PR_MESSAGE_CLASS                            = PROP_TAG( PT_TSTRING,   26)
+PR_MESSAGE_CLASS_W                          = PROP_TAG( PT_UNICODE,   26)
+PR_MESSAGE_CLASS_A                          = PROP_TAG( PT_STRING8,   26)
+PR_MESSAGE_DELIVERY_ID                      = PROP_TAG( PT_BINARY,    27)
+PR_MESSAGE_SECURITY_LABEL                   = PROP_TAG( PT_BINARY,    30)
+PR_OBSOLETED_IPMS                           = PROP_TAG( PT_BINARY,    31)
+PR_ORIGINALLY_INTENDED_RECIPIENT_NAME       = PROP_TAG( PT_BINARY,    32)
+PR_ORIGINAL_EITS                            = PROP_TAG( PT_BINARY,    33)
+PR_ORIGINATOR_CERTIFICATE                   = PROP_TAG( PT_BINARY,    34)
+PR_ORIGINATOR_DELIVERY_REPORT_REQUESTED     = PROP_TAG( PT_BOOLEAN,   35)
+PR_ORIGINATOR_RETURN_ADDRESS                = PROP_TAG( PT_BINARY,    36)
+PR_PARENT_KEY                               = PROP_TAG( PT_BINARY,    37)
+PR_PRIORITY                                 = PROP_TAG( PT_LONG,      38)
+PR_ORIGIN_CHECK                             = PROP_TAG( PT_BINARY,    39)
+PR_PROOF_OF_SUBMISSION_REQUESTED            = PROP_TAG( PT_BOOLEAN,   40)
+PR_READ_RECEIPT_REQUESTED                   = PROP_TAG( PT_BOOLEAN,   41)
+PR_RECEIPT_TIME                             = PROP_TAG( PT_SYSTIME,   42)
+PR_RECIPIENT_REASSIGNMENT_PROHIBITED        = PROP_TAG( PT_BOOLEAN,   43)
+PR_REDIRECTION_HISTORY                      = PROP_TAG( PT_BINARY,    44)
+PR_RELATED_IPMS                             = PROP_TAG( PT_BINARY,    45)
+PR_ORIGINAL_SENSITIVITY                     = PROP_TAG( PT_LONG,      46)
+PR_LANGUAGES                                = PROP_TAG( PT_TSTRING,   47)
+PR_LANGUAGES_W                              = PROP_TAG( PT_UNICODE,   47)
+PR_LANGUAGES_A                              = PROP_TAG( PT_STRING8,   47)
+PR_REPLY_TIME                               = PROP_TAG( PT_SYSTIME,   48)
+PR_REPORT_TAG                               = PROP_TAG( PT_BINARY,    49)
+PR_REPORT_TIME                              = PROP_TAG( PT_SYSTIME,   50)
+PR_RETURNED_IPM                             = PROP_TAG( PT_BOOLEAN,   51)
+PR_SECURITY                                 = PROP_TAG( PT_LONG,      52)
+PR_INCOMPLETE_COPY                          = PROP_TAG( PT_BOOLEAN,   53)
+PR_SENSITIVITY                              = PROP_TAG( PT_LONG,      54)
+PR_SUBJECT                                  = PROP_TAG( PT_TSTRING,   55)
+PR_SUBJECT_W                                = PROP_TAG( PT_UNICODE,   55)
+PR_SUBJECT_A                                = PROP_TAG( PT_STRING8,   55)
+PR_SUBJECT_IPM                              = PROP_TAG( PT_BINARY,    56)
+PR_CLIENT_SUBMIT_TIME                       = PROP_TAG( PT_SYSTIME,   57)
+PR_REPORT_NAME                              = PROP_TAG( PT_TSTRING,   58)
+PR_REPORT_NAME_W                            = PROP_TAG( PT_UNICODE,   58)
+PR_REPORT_NAME_A                            = PROP_TAG( PT_STRING8,   58)
+PR_SENT_REPRESENTING_SEARCH_KEY             = PROP_TAG( PT_BINARY,    59)
+PR_X400_CONTENT_TYPE                        = PROP_TAG( PT_BINARY,    60)
+PR_SUBJECT_PREFIX                           = PROP_TAG( PT_TSTRING,   61)
+PR_SUBJECT_PREFIX_W                         = PROP_TAG( PT_UNICODE,   61)
+PR_SUBJECT_PREFIX_A                         = PROP_TAG( PT_STRING8,   61)
+PR_NON_RECEIPT_REASON                       = PROP_TAG( PT_LONG,      62)
+PR_RECEIVED_BY_ENTRYID                      = PROP_TAG( PT_BINARY,    63)
+PR_RECEIVED_BY_NAME                         = PROP_TAG( PT_TSTRING,   64)
+PR_RECEIVED_BY_NAME_W                       = PROP_TAG( PT_UNICODE,   64)
+PR_RECEIVED_BY_NAME_A                       = PROP_TAG( PT_STRING8,   64)
+PR_SENT_REPRESENTING_ENTRYID                = PROP_TAG( PT_BINARY,    65)
+PR_SENT_REPRESENTING_NAME                   = PROP_TAG( PT_TSTRING,   66)
+PR_SENT_REPRESENTING_NAME_W                 = PROP_TAG( PT_UNICODE,   66)
+PR_SENT_REPRESENTING_NAME_A                 = PROP_TAG( PT_STRING8,   66)
+PR_RCVD_REPRESENTING_ENTRYID                = PROP_TAG( PT_BINARY,    67)
+PR_RCVD_REPRESENTING_NAME                   = PROP_TAG( PT_TSTRING,   68)
+PR_RCVD_REPRESENTING_NAME_W                 = PROP_TAG( PT_UNICODE,   68)
+PR_RCVD_REPRESENTING_NAME_A                 = PROP_TAG( PT_STRING8,   68)
+PR_REPORT_ENTRYID                           = PROP_TAG( PT_BINARY,    69)
+PR_READ_RECEIPT_ENTRYID                     = PROP_TAG( PT_BINARY,    70)
+PR_MESSAGE_SUBMISSION_ID                    = PROP_TAG( PT_BINARY,    71)
+PR_PROVIDER_SUBMIT_TIME                     = PROP_TAG( PT_SYSTIME,   72)
+PR_ORIGINAL_SUBJECT                         = PROP_TAG( PT_TSTRING,   73)
+PR_ORIGINAL_SUBJECT_W                       = PROP_TAG( PT_UNICODE,   73)
+PR_ORIGINAL_SUBJECT_A                       = PROP_TAG( PT_STRING8,   73)
+PR_DISC_VAL                                 = PROP_TAG( PT_BOOLEAN,   74)
+PR_ORIG_MESSAGE_CLASS                       = PROP_TAG( PT_TSTRING,   75)
+PR_ORIG_MESSAGE_CLASS_W                     = PROP_TAG( PT_UNICODE,   75)
+PR_ORIG_MESSAGE_CLASS_A                     = PROP_TAG( PT_STRING8,   75)
+PR_ORIGINAL_AUTHOR_ENTRYID                  = PROP_TAG( PT_BINARY,    76)
+PR_ORIGINAL_AUTHOR_NAME                     = PROP_TAG( PT_TSTRING,   77)
+PR_ORIGINAL_AUTHOR_NAME_W                   = PROP_TAG( PT_UNICODE,   77)
+PR_ORIGINAL_AUTHOR_NAME_A                   = PROP_TAG( PT_STRING8,   77)
+PR_ORIGINAL_SUBMIT_TIME                     = PROP_TAG( PT_SYSTIME,   78)
+PR_REPLY_RECIPIENT_ENTRIES                  = PROP_TAG( PT_BINARY,    79)
+PR_REPLY_RECIPIENT_NAMES                    = PROP_TAG( PT_TSTRING,   80)
+PR_REPLY_RECIPIENT_NAMES_W                  = PROP_TAG( PT_UNICODE,   80)
+PR_REPLY_RECIPIENT_NAMES_A                  = PROP_TAG( PT_STRING8,   80)
+PR_RECEIVED_BY_SEARCH_KEY                   = PROP_TAG( PT_BINARY,    81)
+PR_RCVD_REPRESENTING_SEARCH_KEY             = PROP_TAG( PT_BINARY,    82)
+PR_READ_RECEIPT_SEARCH_KEY                  = PROP_TAG( PT_BINARY,    83)
+PR_REPORT_SEARCH_KEY                        = PROP_TAG( PT_BINARY,    84)
+PR_ORIGINAL_DELIVERY_TIME                   = PROP_TAG( PT_SYSTIME,   85)
+PR_ORIGINAL_AUTHOR_SEARCH_KEY               = PROP_TAG( PT_BINARY,    86)
+PR_MESSAGE_TO_ME                            = PROP_TAG( PT_BOOLEAN,   87)
+PR_MESSAGE_CC_ME                            = PROP_TAG( PT_BOOLEAN,   88)
+PR_MESSAGE_RECIP_ME                         = PROP_TAG( PT_BOOLEAN,   89)
+PR_ORIGINAL_SENDER_NAME                     = PROP_TAG( PT_TSTRING,   90)
+PR_ORIGINAL_SENDER_NAME_W                   = PROP_TAG( PT_UNICODE,   90)
+PR_ORIGINAL_SENDER_NAME_A                   = PROP_TAG( PT_STRING8,   90)
+PR_ORIGINAL_SENDER_ENTRYID                  = PROP_TAG( PT_BINARY,    91)
+PR_ORIGINAL_SENDER_SEARCH_KEY               = PROP_TAG( PT_BINARY,    92)
+PR_ORIGINAL_SENT_REPRESENTING_NAME          = PROP_TAG( PT_TSTRING,   93)
+PR_ORIGINAL_SENT_REPRESENTING_NAME_W        = PROP_TAG( PT_UNICODE,   93)
+PR_ORIGINAL_SENT_REPRESENTING_NAME_A        = PROP_TAG( PT_STRING8,   93)
+PR_ORIGINAL_SENT_REPRESENTING_ENTRYID       = PROP_TAG( PT_BINARY,    94)
+PR_ORIGINAL_SENT_REPRESENTING_SEARCH_KEY    = PROP_TAG( PT_BINARY,    95)
+PR_START_DATE                               = PROP_TAG( PT_SYSTIME,   96)
+PR_END_DATE                                 = PROP_TAG( PT_SYSTIME,   97)
+PR_OWNER_APPT_ID                            = PROP_TAG( PT_LONG,      98)
+PR_RESPONSE_REQUESTED                       = PROP_TAG( PT_BOOLEAN,   99)
+PR_SENT_REPRESENTING_ADDRTYPE               = PROP_TAG( PT_TSTRING,   100)
+PR_SENT_REPRESENTING_ADDRTYPE_W             = PROP_TAG( PT_UNICODE,   100)
+PR_SENT_REPRESENTING_ADDRTYPE_A             = PROP_TAG( PT_STRING8,   100)
+PR_SENT_REPRESENTING_EMAIL_ADDRESS          = PROP_TAG( PT_TSTRING,   101)
+PR_SENT_REPRESENTING_EMAIL_ADDRESS_W        = PROP_TAG( PT_UNICODE,   101)
+PR_SENT_REPRESENTING_EMAIL_ADDRESS_A        = PROP_TAG( PT_STRING8,   101)
+PR_ORIGINAL_SENDER_ADDRTYPE                 = PROP_TAG( PT_TSTRING,   102)
+PR_ORIGINAL_SENDER_ADDRTYPE_W               = PROP_TAG( PT_UNICODE,   102)
+PR_ORIGINAL_SENDER_ADDRTYPE_A               = PROP_TAG( PT_STRING8,   102)
+PR_ORIGINAL_SENDER_EMAIL_ADDRESS            = PROP_TAG( PT_TSTRING,   103)
+PR_ORIGINAL_SENDER_EMAIL_ADDRESS_W          = PROP_TAG( PT_UNICODE,   103)
+PR_ORIGINAL_SENDER_EMAIL_ADDRESS_A          = PROP_TAG( PT_STRING8,   103)
+PR_ORIGINAL_SENT_REPRESENTING_ADDRTYPE      = PROP_TAG( PT_TSTRING,   104)
+PR_ORIGINAL_SENT_REPRESENTING_ADDRTYPE_W    = PROP_TAG( PT_UNICODE,   104)
+PR_ORIGINAL_SENT_REPRESENTING_ADDRTYPE_A    = PROP_TAG( PT_STRING8,   104)
+PR_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS = PROP_TAG( PT_TSTRING,   105)
+PR_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS_W   = PROP_TAG( PT_UNICODE,   105)
+PR_ORIGINAL_SENT_REPRESENTING_EMAIL_ADDRESS_A   = PROP_TAG( PT_STRING8,   105)
+PR_CONVERSATION_TOPIC                       = PROP_TAG( PT_TSTRING,   112)
+PR_CONVERSATION_TOPIC_W                     = PROP_TAG( PT_UNICODE,   112)
+PR_CONVERSATION_TOPIC_A                     = PROP_TAG( PT_STRING8,   112)
+PR_CONVERSATION_INDEX                       = PROP_TAG( PT_BINARY,    113)
+PR_ORIGINAL_DISPLAY_BCC                     = PROP_TAG( PT_TSTRING,   114)
+PR_ORIGINAL_DISPLAY_BCC_W                   = PROP_TAG( PT_UNICODE,   114)
+PR_ORIGINAL_DISPLAY_BCC_A                   = PROP_TAG( PT_STRING8,   114)
+PR_ORIGINAL_DISPLAY_CC                      = PROP_TAG( PT_TSTRING,   115)
+PR_ORIGINAL_DISPLAY_CC_W                    = PROP_TAG( PT_UNICODE,   115)
+PR_ORIGINAL_DISPLAY_CC_A                    = PROP_TAG( PT_STRING8,   115)
+PR_ORIGINAL_DISPLAY_TO                      = PROP_TAG( PT_TSTRING,   116)
+PR_ORIGINAL_DISPLAY_TO_W                    = PROP_TAG( PT_UNICODE,   116)
+PR_ORIGINAL_DISPLAY_TO_A                    = PROP_TAG( PT_STRING8,   116)
+PR_RECEIVED_BY_ADDRTYPE                     = PROP_TAG( PT_TSTRING,   117)
+PR_RECEIVED_BY_ADDRTYPE_W                   = PROP_TAG( PT_UNICODE,   117)
+PR_RECEIVED_BY_ADDRTYPE_A                   = PROP_TAG( PT_STRING8,   117)
+PR_RECEIVED_BY_EMAIL_ADDRESS                = PROP_TAG( PT_TSTRING,   118)
+PR_RECEIVED_BY_EMAIL_ADDRESS_W              = PROP_TAG( PT_UNICODE,   118)
+PR_RECEIVED_BY_EMAIL_ADDRESS_A              = PROP_TAG( PT_STRING8,   118)
+PR_RCVD_REPRESENTING_ADDRTYPE               = PROP_TAG( PT_TSTRING,   119)
+PR_RCVD_REPRESENTING_ADDRTYPE_W             = PROP_TAG( PT_UNICODE,   119)
+PR_RCVD_REPRESENTING_ADDRTYPE_A             = PROP_TAG( PT_STRING8,   119)
+PR_RCVD_REPRESENTING_EMAIL_ADDRESS          = PROP_TAG( PT_TSTRING,   120)
+PR_RCVD_REPRESENTING_EMAIL_ADDRESS_W        = PROP_TAG( PT_UNICODE,   120)
+PR_RCVD_REPRESENTING_EMAIL_ADDRESS_A        = PROP_TAG( PT_STRING8,   120)
+PR_ORIGINAL_AUTHOR_ADDRTYPE                 = PROP_TAG( PT_TSTRING,   121)
+PR_ORIGINAL_AUTHOR_ADDRTYPE_W               = PROP_TAG( PT_UNICODE,   121)
+PR_ORIGINAL_AUTHOR_ADDRTYPE_A               = PROP_TAG( PT_STRING8,   121)
+PR_ORIGINAL_AUTHOR_EMAIL_ADDRESS            = PROP_TAG( PT_TSTRING,   122)
+PR_ORIGINAL_AUTHOR_EMAIL_ADDRESS_W          = PROP_TAG( PT_UNICODE,   122)
+PR_ORIGINAL_AUTHOR_EMAIL_ADDRESS_A          = PROP_TAG( PT_STRING8,   122)
+PR_ORIGINALLY_INTENDED_RECIP_ADDRTYPE       = PROP_TAG( PT_TSTRING,   123)
+PR_ORIGINALLY_INTENDED_RECIP_ADDRTYPE_W     = PROP_TAG( PT_UNICODE,   123)
+PR_ORIGINALLY_INTENDED_RECIP_ADDRTYPE_A     = PROP_TAG( PT_STRING8,   123)
+PR_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS  = PROP_TAG( PT_TSTRING,   124)
+PR_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS_W    = PROP_TAG( PT_UNICODE,   124)
+PR_ORIGINALLY_INTENDED_RECIP_EMAIL_ADDRESS_A    = PROP_TAG( PT_STRING8,   124)
+PR_TRANSPORT_MESSAGE_HEADERS                = PROP_TAG(PT_TSTRING,    125)
+PR_TRANSPORT_MESSAGE_HEADERS_W              = PROP_TAG(PT_UNICODE,    125)
+PR_TRANSPORT_MESSAGE_HEADERS_A              = PROP_TAG(PT_STRING8,    125)
+PR_DELEGATION                               = PROP_TAG(PT_BINARY,     126)
+PR_TNEF_CORRELATION_KEY                     = PROP_TAG(PT_BINARY,     127)
+PR_BODY                                     = PROP_TAG( PT_TSTRING,   4096)
+PR_BODY_W                                   = PROP_TAG( PT_UNICODE,   4096)
+PR_BODY_A                                   = PROP_TAG( PT_STRING8,   4096)
+PR_BODY_HTML_A                              = PROP_TAG( PT_STRING8,   4115)
+PR_BODY_HTML_W                              = PROP_TAG( PT_UNICODE,   4115)
+
+PR_REPORT_TEXT                              = PROP_TAG( PT_TSTRING,   4097)
+PR_REPORT_TEXT_W                            = PROP_TAG( PT_UNICODE,   4097)
+PR_REPORT_TEXT_A                            = PROP_TAG( PT_STRING8,   4097)
+PR_ORIGINATOR_AND_DL_EXPANSION_HISTORY      = PROP_TAG( PT_BINARY,    4098)
+PR_REPORTING_DL_NAME                        = PROP_TAG( PT_BINARY,    4099)
+PR_REPORTING_MTA_CERTIFICATE                = PROP_TAG( PT_BINARY,    4100)
+PR_RTF_SYNC_BODY_CRC                        = PROP_TAG( PT_LONG,      4102)
+PR_RTF_SYNC_BODY_COUNT                      = PROP_TAG( PT_LONG,      4103)
+PR_RTF_SYNC_BODY_TAG                        = PROP_TAG( PT_TSTRING,   4104)
+PR_RTF_SYNC_BODY_TAG_W                      = PROP_TAG( PT_UNICODE,   4104)
+PR_RTF_SYNC_BODY_TAG_A                      = PROP_TAG( PT_STRING8,   4104)
+PR_RTF_COMPRESSED                           = PROP_TAG( PT_BINARY,    4105)
+PR_RTF_SYNC_PREFIX_COUNT                    = PROP_TAG( PT_LONG,      4112)
+PR_RTF_SYNC_TRAILING_COUNT                  = PROP_TAG( PT_LONG,      4113)
+PR_ORIGINALLY_INTENDED_RECIP_ENTRYID        = PROP_TAG( PT_BINARY,    4114)
+PR_CONTENT_INTEGRITY_CHECK                  = PROP_TAG( PT_BINARY,    3072)
+PR_EXPLICIT_CONVERSION                      = PROP_TAG( PT_LONG,      3073)
+PR_IPM_RETURN_REQUESTED                     = PROP_TAG( PT_BOOLEAN,   3074)
+PR_MESSAGE_TOKEN                            = PROP_TAG( PT_BINARY,    3075)
+PR_NDR_REASON_CODE                          = PROP_TAG( PT_LONG,      3076)
+PR_NDR_DIAG_CODE                            = PROP_TAG( PT_LONG,      3077)
+PR_NON_RECEIPT_NOTIFICATION_REQUESTED       = PROP_TAG( PT_BOOLEAN,   3078)
+PR_DELIVERY_POINT                           = PROP_TAG( PT_LONG,      3079)
+PR_ORIGINATOR_NON_DELIVERY_REPORT_REQUESTED = PROP_TAG( PT_BOOLEAN,   3080)
+PR_ORIGINATOR_REQUESTED_ALTERNATE_RECIPIENT = PROP_TAG( PT_BINARY,    3081)
+PR_PHYSICAL_DELIVERY_BUREAU_FAX_DELIVERY    = PROP_TAG( PT_BOOLEAN,   3082)
+PR_PHYSICAL_DELIVERY_MODE                   = PROP_TAG( PT_LONG,      3083)
+PR_PHYSICAL_DELIVERY_REPORT_REQUEST         = PROP_TAG( PT_LONG,      3084)
+PR_PHYSICAL_FORWARDING_ADDRESS              = PROP_TAG( PT_BINARY,    3085)
+PR_PHYSICAL_FORWARDING_ADDRESS_REQUESTED    = PROP_TAG( PT_BOOLEAN,   3086)
+PR_PHYSICAL_FORWARDING_PROHIBITED           = PROP_TAG( PT_BOOLEAN,   3087)
+PR_PHYSICAL_RENDITION_ATTRIBUTES            = PROP_TAG( PT_BINARY,    3088)
+PR_PROOF_OF_DELIVERY                        = PROP_TAG( PT_BINARY,    3089)
+PR_PROOF_OF_DELIVERY_REQUESTED              = PROP_TAG( PT_BOOLEAN,   3090)
+PR_RECIPIENT_CERTIFICATE                    = PROP_TAG( PT_BINARY,    3091)
+PR_RECIPIENT_NUMBER_FOR_ADVICE              = PROP_TAG( PT_TSTRING,   3092)
+PR_RECIPIENT_NUMBER_FOR_ADVICE_W            = PROP_TAG( PT_UNICODE,   3092)
+PR_RECIPIENT_NUMBER_FOR_ADVICE_A            = PROP_TAG( PT_STRING8,   3092)
+PR_RECIPIENT_TYPE                           = PROP_TAG( PT_LONG,      3093)
+PR_REGISTERED_MAIL_TYPE                     = PROP_TAG( PT_LONG,      3094)
+PR_REPLY_REQUESTED                          = PROP_TAG( PT_BOOLEAN,   3095)
+PR_REQUESTED_DELIVERY_METHOD                = PROP_TAG( PT_LONG,      3096)
+PR_SENDER_ENTRYID                           = PROP_TAG( PT_BINARY,    3097)
+PR_SENDER_NAME                              = PROP_TAG( PT_TSTRING,   3098)
+PR_SENDER_NAME_W                            = PROP_TAG( PT_UNICODE,   3098)
+PR_SENDER_NAME_A                            = PROP_TAG( PT_STRING8,   3098)
+PR_SUPPLEMENTARY_INFO                       = PROP_TAG( PT_TSTRING,   3099)
+PR_SUPPLEMENTARY_INFO_W                     = PROP_TAG( PT_UNICODE,   3099)
+PR_SUPPLEMENTARY_INFO_A                     = PROP_TAG( PT_STRING8,   3099)
+PR_TYPE_OF_MTS_USER                         = PROP_TAG( PT_LONG,      3100)
+PR_SENDER_SEARCH_KEY                        = PROP_TAG( PT_BINARY,    3101)
+PR_SENDER_ADDRTYPE                          = PROP_TAG( PT_TSTRING,   3102)
+PR_SENDER_ADDRTYPE_W                        = PROP_TAG( PT_UNICODE,   3102)
+PR_SENDER_ADDRTYPE_A                        = PROP_TAG( PT_STRING8,   3102)
+PR_SENDER_EMAIL_ADDRESS                     = PROP_TAG( PT_TSTRING,   3103)
+PR_SENDER_EMAIL_ADDRESS_W                   = PROP_TAG( PT_UNICODE,   3103)
+PR_SENDER_EMAIL_ADDRESS_A                   = PROP_TAG( PT_STRING8,   3103)
+PR_CURRENT_VERSION                          = PROP_TAG( PT_I8,        3584)
+PR_DELETE_AFTER_SUBMIT                      = PROP_TAG( PT_BOOLEAN,   3585)
+PR_DISPLAY_BCC                              = PROP_TAG( PT_TSTRING,   3586)
+PR_DISPLAY_BCC_W                            = PROP_TAG( PT_UNICODE,   3586)
+PR_DISPLAY_BCC_A                            = PROP_TAG( PT_STRING8,   3586)
+PR_DISPLAY_CC                               = PROP_TAG( PT_TSTRING,   3587)
+PR_DISPLAY_CC_W                             = PROP_TAG( PT_UNICODE,   3587)
+PR_DISPLAY_CC_A                             = PROP_TAG( PT_STRING8,   3587)
+PR_DISPLAY_TO                               = PROP_TAG( PT_TSTRING,   3588)
+PR_DISPLAY_TO_W                             = PROP_TAG( PT_UNICODE,   3588)
+PR_DISPLAY_TO_A                             = PROP_TAG( PT_STRING8,   3588)
+PR_PARENT_DISPLAY                           = PROP_TAG( PT_TSTRING,   3589)
+PR_PARENT_DISPLAY_W                         = PROP_TAG( PT_UNICODE,   3589)
+PR_PARENT_DISPLAY_A                         = PROP_TAG( PT_STRING8,   3589)
+PR_MESSAGE_DELIVERY_TIME                    = PROP_TAG( PT_SYSTIME,   3590)
+PR_MESSAGE_FLAGS                            = PROP_TAG( PT_LONG,      3591)
+PR_MESSAGE_SIZE                             = PROP_TAG( PT_LONG,      3592)
+PR_PARENT_ENTRYID                           = PROP_TAG( PT_BINARY,    3593)
+PR_SENTMAIL_ENTRYID                         = PROP_TAG( PT_BINARY,    3594)
+PR_CORRELATE                                = PROP_TAG( PT_BOOLEAN,   3596)
+PR_CORRELATE_MTSID                          = PROP_TAG( PT_BINARY,    3597)
+PR_DISCRETE_VALUES                          = PROP_TAG( PT_BOOLEAN,   3598)
+PR_RESPONSIBILITY                           = PROP_TAG( PT_BOOLEAN,   3599)
+PR_SPOOLER_STATUS                           = PROP_TAG( PT_LONG,      3600)
+PR_TRANSPORT_STATUS                         = PROP_TAG( PT_LONG,      3601)
+PR_MESSAGE_RECIPIENTS                       = PROP_TAG( PT_OBJECT,    3602)
+PR_MESSAGE_ATTACHMENTS                      = PROP_TAG( PT_OBJECT,    3603)
+PR_SUBMIT_FLAGS                             = PROP_TAG( PT_LONG,      3604)
+PR_RECIPIENT_STATUS                         = PROP_TAG( PT_LONG,      3605)
+PR_TRANSPORT_KEY                            = PROP_TAG( PT_LONG,      3606)
+PR_MSG_STATUS                               = PROP_TAG( PT_LONG,      3607)
+PR_MESSAGE_DOWNLOAD_TIME                    = PROP_TAG( PT_LONG,      3608)
+PR_CREATION_VERSION                         = PROP_TAG( PT_I8,        3609)
+PR_MODIFY_VERSION                           = PROP_TAG( PT_I8,        3610)
+PR_HASATTACH                                = PROP_TAG( PT_BOOLEAN,   3611)
+PR_BODY_CRC                                 = PROP_TAG( PT_LONG,      3612)
+PR_NORMALIZED_SUBJECT                       = PROP_TAG( PT_TSTRING,   3613)
+PR_NORMALIZED_SUBJECT_W                     = PROP_TAG( PT_UNICODE,   3613)
+PR_NORMALIZED_SUBJECT_A                     = PROP_TAG( PT_STRING8,   3613)
+PR_RTF_IN_SYNC                              = PROP_TAG( PT_BOOLEAN,   3615)
+PR_ATTACH_SIZE                              = PROP_TAG( PT_LONG,      3616)
+PR_ATTACH_NUM                               = PROP_TAG( PT_LONG,      3617)
+PR_PREPROCESS                               = PROP_TAG( PT_BOOLEAN,   3618)
+PR_ORIGINATING_MTA_CERTIFICATE              = PROP_TAG( PT_BINARY,    3621)
+PR_PROOF_OF_SUBMISSION                      = PROP_TAG( PT_BINARY,    3622)
+PR_ENTRYID                                  = PROP_TAG( PT_BINARY,    4095)
+PR_OBJECT_TYPE                              = PROP_TAG( PT_LONG,      4094)
+PR_ICON                                     = PROP_TAG( PT_BINARY,    4093)
+PR_MINI_ICON                                = PROP_TAG( PT_BINARY,    4092)
+PR_STORE_ENTRYID                            = PROP_TAG( PT_BINARY,    4091)
+PR_STORE_RECORD_KEY                         = PROP_TAG( PT_BINARY,    4090)
+PR_RECORD_KEY                               = PROP_TAG( PT_BINARY,    4089)
+PR_MAPPING_SIGNATURE                        = PROP_TAG( PT_BINARY,    4088)
+PR_ACCESS_LEVEL                             = PROP_TAG( PT_LONG,      4087)
+PR_INSTANCE_KEY                             = PROP_TAG( PT_BINARY,    4086)
+PR_ROW_TYPE                                 = PROP_TAG( PT_LONG,      4085)
+PR_ACCESS                                   = PROP_TAG( PT_LONG,      4084)
+PR_ROWID                                    = PROP_TAG( PT_LONG,      12288)
+PR_DISPLAY_NAME                             = PROP_TAG( PT_TSTRING,   12289)
+PR_DISPLAY_NAME_W                           = PROP_TAG( PT_UNICODE,   12289)
+PR_DISPLAY_NAME_A                           = PROP_TAG( PT_STRING8,   12289)
+PR_ADDRTYPE                                 = PROP_TAG( PT_TSTRING,   12290)
+PR_ADDRTYPE_W                               = PROP_TAG( PT_UNICODE,   12290)
+PR_ADDRTYPE_A                               = PROP_TAG( PT_STRING8,   12290)
+PR_EMAIL_ADDRESS                            = PROP_TAG( PT_TSTRING,   12291)
+PR_EMAIL_ADDRESS_W                          = PROP_TAG( PT_UNICODE,   12291)
+PR_EMAIL_ADDRESS_A                          = PROP_TAG( PT_STRING8,   12291)
+PR_COMMENT                                  = PROP_TAG( PT_TSTRING,   12292)
+PR_COMMENT_W                                = PROP_TAG( PT_UNICODE,   12292)
+PR_COMMENT_A                                = PROP_TAG( PT_STRING8,   12292)
+PR_DEPTH                                    = PROP_TAG( PT_LONG,      12293)
+PR_PROVIDER_DISPLAY                         = PROP_TAG( PT_TSTRING,   12294)
+PR_PROVIDER_DISPLAY_W                       = PROP_TAG( PT_UNICODE,   12294)
+PR_PROVIDER_DISPLAY_A                       = PROP_TAG( PT_STRING8,   12294)
+PR_CREATION_TIME                            = PROP_TAG( PT_SYSTIME,   12295)
+PR_LAST_MODIFICATION_TIME                   = PROP_TAG( PT_SYSTIME,   12296)
+PR_RESOURCE_FLAGS                           = PROP_TAG( PT_LONG,      12297)
+PR_PROVIDER_DLL_NAME                        = PROP_TAG( PT_TSTRING,   12298)
+PR_PROVIDER_DLL_NAME_W                      = PROP_TAG( PT_UNICODE,   12298)
+PR_PROVIDER_DLL_NAME_A                      = PROP_TAG( PT_STRING8,   12298)
+PR_SEARCH_KEY                               = PROP_TAG( PT_BINARY,    12299)
+PR_PROVIDER_UID                             = PROP_TAG( PT_BINARY,    12300)
+PR_PROVIDER_ORDINAL                         = PROP_TAG( PT_LONG,      12301)
+PR_FORM_VERSION                             = PROP_TAG(PT_TSTRING,    13057)
+PR_FORM_VERSION_W                           = PROP_TAG(PT_UNICODE,    13057)
+PR_FORM_VERSION_A                           = PROP_TAG(PT_STRING8,    13057)
+PR_FORM_CLSID                               = PROP_TAG(PT_CLSID,      13058)
+PR_FORM_CONTACT_NAME                        = PROP_TAG(PT_TSTRING,    13059)
+PR_FORM_CONTACT_NAME_W                      = PROP_TAG(PT_UNICODE,    13059)
+PR_FORM_CONTACT_NAME_A                      = PROP_TAG(PT_STRING8,    13059)
+PR_FORM_CATEGORY                            = PROP_TAG(PT_TSTRING,    13060)
+PR_FORM_CATEGORY_W                          = PROP_TAG(PT_UNICODE,    13060)
+PR_FORM_CATEGORY_A                          = PROP_TAG(PT_STRING8,    13060)
+PR_FORM_CATEGORY_SUB                        = PROP_TAG(PT_TSTRING,    13061)
+PR_FORM_CATEGORY_SUB_W                      = PROP_TAG(PT_UNICODE,    13061)
+PR_FORM_CATEGORY_SUB_A                      = PROP_TAG(PT_STRING8,    13061)
+PR_FORM_HOST_MAP                            = PROP_TAG(PT_MV_LONG,    13062)
+PR_FORM_HIDDEN                              = PROP_TAG(PT_BOOLEAN,    13063)
+PR_FORM_DESIGNER_NAME                       = PROP_TAG(PT_TSTRING,    13064)
+PR_FORM_DESIGNER_NAME_W                     = PROP_TAG(PT_UNICODE,    13064)
+PR_FORM_DESIGNER_NAME_A                     = PROP_TAG(PT_STRING8,    13064)
+PR_FORM_DESIGNER_GUID                       = PROP_TAG(PT_CLSID,      13065)
+PR_FORM_MESSAGE_BEHAVIOR                    = PROP_TAG(PT_LONG,       13066)
+PR_DEFAULT_STORE                            = PROP_TAG( PT_BOOLEAN,   13312)
+PR_STORE_SUPPORT_MASK                       = PROP_TAG( PT_LONG,      13325)
+PR_STORE_STATE                              = PROP_TAG( PT_LONG,      13326)
+PR_IPM_SUBTREE_SEARCH_KEY                   = PROP_TAG( PT_BINARY,    13328)
+PR_IPM_OUTBOX_SEARCH_KEY                    = PROP_TAG( PT_BINARY,    13329)
+PR_IPM_WASTEBASKET_SEARCH_KEY               = PROP_TAG( PT_BINARY,    13330)
+PR_IPM_SENTMAIL_SEARCH_KEY                  = PROP_TAG( PT_BINARY,    13331)
+PR_MDB_PROVIDER                             = PROP_TAG( PT_BINARY,    13332)
+PR_RECEIVE_FOLDER_SETTINGS                  = PROP_TAG( PT_OBJECT,    13333)
+PR_VALID_FOLDER_MASK                        = PROP_TAG( PT_LONG,      13791)
+PR_IPM_SUBTREE_ENTRYID                      = PROP_TAG( PT_BINARY,    13792)
+PR_IPM_OUTBOX_ENTRYID                       = PROP_TAG( PT_BINARY,    13794)
+PR_IPM_WASTEBASKET_ENTRYID                  = PROP_TAG( PT_BINARY,    13795)
+PR_IPM_SENTMAIL_ENTRYID                     = PROP_TAG( PT_BINARY,    13796)
+PR_VIEWS_ENTRYID                            = PROP_TAG( PT_BINARY,    13797)
+PR_COMMON_VIEWS_ENTRYID                     = PROP_TAG( PT_BINARY,    13798)
+PR_FINDER_ENTRYID                           = PROP_TAG( PT_BINARY,    13799)
+PR_CONTAINER_FLAGS                          = PROP_TAG( PT_LONG,      13824)
+PR_FOLDER_TYPE                              = PROP_TAG( PT_LONG,      13825)
+PR_CONTENT_COUNT                            = PROP_TAG( PT_LONG,      13826)
+PR_CONTENT_UNREAD                           = PROP_TAG( PT_LONG,      13827)
+PR_CREATE_TEMPLATES                         = PROP_TAG( PT_OBJECT,    13828)
+PR_DETAILS_TABLE                            = PROP_TAG( PT_OBJECT,    13829)
+PR_SEARCH                                   = PROP_TAG( PT_OBJECT,    13831)
+PR_SELECTABLE                               = PROP_TAG( PT_BOOLEAN,   13833)
+PR_SUBFOLDERS                               = PROP_TAG( PT_BOOLEAN,   13834)
+PR_STATUS                                   = PROP_TAG( PT_LONG,      13835)
+PR_ANR                                      = PROP_TAG( PT_TSTRING,   13836)
+PR_ANR_W                                    = PROP_TAG( PT_UNICODE,   13836)
+PR_ANR_A                                    = PROP_TAG( PT_STRING8,   13836)
+PR_CONTENTS_SORT_ORDER                      = PROP_TAG( PT_MV_LONG,   13837)
+PR_CONTAINER_HIERARCHY                      = PROP_TAG( PT_OBJECT,    13838)
+PR_CONTAINER_CONTENTS                       = PROP_TAG( PT_OBJECT,    13839)
+PR_FOLDER_ASSOCIATED_CONTENTS               = PROP_TAG( PT_OBJECT,    13840)
+PR_DEF_CREATE_DL                            = PROP_TAG( PT_BINARY,    13841)
+PR_DEF_CREATE_MAILUSER                      = PROP_TAG( PT_BINARY,    13842)
+PR_CONTAINER_CLASS                          = PROP_TAG( PT_TSTRING,   13843)
+PR_CONTAINER_CLASS_W                        = PROP_TAG( PT_UNICODE,   13843)
+PR_CONTAINER_CLASS_A                        = PROP_TAG( PT_STRING8,   13843)
+PR_CONTAINER_MODIFY_VERSION                 = PROP_TAG( PT_I8,        13844)
+PR_AB_PROVIDER_ID                           = PROP_TAG( PT_BINARY,    13845)
+PR_DEFAULT_VIEW_ENTRYID                     = PROP_TAG( PT_BINARY,    13846)
+PR_ASSOC_CONTENT_COUNT                      = PROP_TAG( PT_LONG,      13847)
+PR_ATTACHMENT_X400_PARAMETERS               = PROP_TAG( PT_BINARY,    14080)
+PR_ATTACH_DATA_OBJ                          = PROP_TAG( PT_OBJECT,    14081)
+PR_ATTACH_DATA_BIN                          = PROP_TAG( PT_BINARY,    14081)
+PR_ATTACH_ENCODING                          = PROP_TAG( PT_BINARY,    14082)
+PR_ATTACH_EXTENSION                         = PROP_TAG( PT_TSTRING,   14083)
+PR_ATTACH_EXTENSION_W                       = PROP_TAG( PT_UNICODE,   14083)
+PR_ATTACH_EXTENSION_A                       = PROP_TAG( PT_STRING8,   14083)
+PR_ATTACH_FILENAME                          = PROP_TAG( PT_TSTRING,   14084)
+PR_ATTACH_FILENAME_W                        = PROP_TAG( PT_UNICODE,   14084)
+PR_ATTACH_FILENAME_A                        = PROP_TAG( PT_STRING8,   14084)
+PR_ATTACH_METHOD                            = PROP_TAG( PT_LONG,      14085)
+PR_ATTACH_LONG_FILENAME                     = PROP_TAG( PT_TSTRING,   14087)
+PR_ATTACH_LONG_FILENAME_W                   = PROP_TAG( PT_UNICODE,   14087)
+PR_ATTACH_LONG_FILENAME_A                   = PROP_TAG( PT_STRING8,   14087)
+PR_ATTACH_PATHNAME                          = PROP_TAG( PT_TSTRING,   14088)
+PR_ATTACH_PATHNAME_W                        = PROP_TAG( PT_UNICODE,   14088)
+PR_ATTACH_PATHNAME_A                        = PROP_TAG( PT_STRING8,   14088)
+PR_ATTACH_RENDERING                         = PROP_TAG( PT_BINARY,    14089)
+PR_ATTACH_TAG                               = PROP_TAG( PT_BINARY,    14090)
+PR_RENDERING_POSITION                       = PROP_TAG( PT_LONG,      14091)
+PR_ATTACH_TRANSPORT_NAME                    = PROP_TAG( PT_TSTRING,   14092)
+PR_ATTACH_TRANSPORT_NAME_W                  = PROP_TAG( PT_UNICODE,   14092)
+PR_ATTACH_TRANSPORT_NAME_A                  = PROP_TAG( PT_STRING8,   14092)
+PR_ATTACH_LONG_PATHNAME                     = PROP_TAG( PT_TSTRING,   14093)
+PR_ATTACH_LONG_PATHNAME_W                   = PROP_TAG( PT_UNICODE,   14093)
+PR_ATTACH_LONG_PATHNAME_A                   = PROP_TAG( PT_STRING8,   14093)
+PR_ATTACH_MIME_TAG                          = PROP_TAG( PT_TSTRING,   14094)
+PR_ATTACH_MIME_TAG_W                        = PROP_TAG( PT_UNICODE,   14094)
+PR_ATTACH_MIME_TAG_A                        = PROP_TAG( PT_STRING8,   14094)
+PR_ATTACH_ADDITIONAL_INFO                   = PROP_TAG( PT_BINARY,    14095)
+PR_DISPLAY_TYPE                             = PROP_TAG( PT_LONG,      14592)
+PR_TEMPLATEID                               = PROP_TAG( PT_BINARY,    14594)
+PR_PRIMARY_CAPABILITY                       = PROP_TAG( PT_BINARY,    14596)
+PR_7BIT_DISPLAY_NAME                        = PROP_TAG( PT_STRING8,   14847)
+PR_ACCOUNT                                  = PROP_TAG( PT_TSTRING,   14848)
+PR_ACCOUNT_W                                = PROP_TAG( PT_UNICODE,   14848)
+PR_ACCOUNT_A                                = PROP_TAG( PT_STRING8,   14848)
+PR_ALTERNATE_RECIPIENT                      = PROP_TAG( PT_BINARY,    14849)
+PR_CALLBACK_TELEPHONE_NUMBER                = PROP_TAG( PT_TSTRING,   14850)
+PR_CALLBACK_TELEPHONE_NUMBER_W              = PROP_TAG( PT_UNICODE,   14850)
+PR_CALLBACK_TELEPHONE_NUMBER_A              = PROP_TAG( PT_STRING8,   14850)
+PR_CONVERSION_PROHIBITED                    = PROP_TAG( PT_BOOLEAN,   14851)
+PR_DISCLOSE_RECIPIENTS                      = PROP_TAG( PT_BOOLEAN,   14852)
+PR_GENERATION                               = PROP_TAG( PT_TSTRING,   14853)
+PR_GENERATION_W                             = PROP_TAG( PT_UNICODE,   14853)
+PR_GENERATION_A                             = PROP_TAG( PT_STRING8,   14853)
+PR_GIVEN_NAME                               = PROP_TAG( PT_TSTRING,   14854)
+PR_GIVEN_NAME_W                             = PROP_TAG( PT_UNICODE,   14854)
+PR_GIVEN_NAME_A                             = PROP_TAG( PT_STRING8,   14854)
+PR_GOVERNMENT_ID_NUMBER                     = PROP_TAG( PT_TSTRING,   14855)
+PR_GOVERNMENT_ID_NUMBER_W                   = PROP_TAG( PT_UNICODE,   14855)
+PR_GOVERNMENT_ID_NUMBER_A                   = PROP_TAG( PT_STRING8,   14855)
+PR_BUSINESS_TELEPHONE_NUMBER                = PROP_TAG( PT_TSTRING,   14856)
+PR_BUSINESS_TELEPHONE_NUMBER_W              = PROP_TAG( PT_UNICODE,   14856)
+PR_BUSINESS_TELEPHONE_NUMBER_A              = PROP_TAG( PT_STRING8,   14856)
+PR_OFFICE_TELEPHONE_NUMBER                  = PR_BUSINESS_TELEPHONE_NUMBER
+PR_OFFICE_TELEPHONE_NUMBER_W                = PR_BUSINESS_TELEPHONE_NUMBER_W
+PR_OFFICE_TELEPHONE_NUMBER_A                = PR_BUSINESS_TELEPHONE_NUMBER_A
+PR_HOME_TELEPHONE_NUMBER                    = PROP_TAG( PT_TSTRING,   14857)
+PR_HOME_TELEPHONE_NUMBER_W                  = PROP_TAG( PT_UNICODE,   14857)
+PR_HOME_TELEPHONE_NUMBER_A                  = PROP_TAG( PT_STRING8,   14857)
+PR_INITIALS                                 = PROP_TAG( PT_TSTRING,   14858)
+PR_INITIALS_W                               = PROP_TAG( PT_UNICODE,   14858)
+PR_INITIALS_A                               = PROP_TAG( PT_STRING8,   14858)
+PR_KEYWORD                                  = PROP_TAG( PT_TSTRING,   14859)
+PR_KEYWORD_W                                = PROP_TAG( PT_UNICODE,   14859)
+PR_KEYWORD_A                                = PROP_TAG( PT_STRING8,   14859)
+PR_LANGUAGE                                 = PROP_TAG( PT_TSTRING,   14860)
+PR_LANGUAGE_W                               = PROP_TAG( PT_UNICODE,   14860)
+PR_LANGUAGE_A                               = PROP_TAG( PT_STRING8,   14860)
+PR_LOCATION                                 = PROP_TAG( PT_TSTRING,   14861)
+PR_LOCATION_W                               = PROP_TAG( PT_UNICODE,   14861)
+PR_LOCATION_A                               = PROP_TAG( PT_STRING8,   14861)
+PR_MAIL_PERMISSION                          = PROP_TAG( PT_BOOLEAN,   14862)
+PR_MHS_COMMON_NAME                          = PROP_TAG( PT_TSTRING,   14863)
+PR_MHS_COMMON_NAME_W                        = PROP_TAG( PT_UNICODE,   14863)
+PR_MHS_COMMON_NAME_A                        = PROP_TAG( PT_STRING8,   14863)
+PR_ORGANIZATIONAL_ID_NUMBER                 = PROP_TAG( PT_TSTRING,   14864)
+PR_ORGANIZATIONAL_ID_NUMBER_W               = PROP_TAG( PT_UNICODE,   14864)
+PR_ORGANIZATIONAL_ID_NUMBER_A               = PROP_TAG( PT_STRING8,   14864)
+PR_SURNAME                                  = PROP_TAG( PT_TSTRING,   14865)
+PR_SURNAME_W                                = PROP_TAG( PT_UNICODE,   14865)
+PR_SURNAME_A                                = PROP_TAG( PT_STRING8,   14865)
+PR_ORIGINAL_ENTRYID                         = PROP_TAG( PT_BINARY,    14866)
+PR_ORIGINAL_DISPLAY_NAME                    = PROP_TAG( PT_TSTRING,   14867)
+PR_ORIGINAL_DISPLAY_NAME_W                  = PROP_TAG( PT_UNICODE,   14867)
+PR_ORIGINAL_DISPLAY_NAME_A                  = PROP_TAG( PT_STRING8,   14867)
+PR_ORIGINAL_SEARCH_KEY                      = PROP_TAG( PT_BINARY,    14868)
+PR_POSTAL_ADDRESS                           = PROP_TAG( PT_TSTRING,   14869)
+PR_POSTAL_ADDRESS_W                         = PROP_TAG( PT_UNICODE,   14869)
+PR_POSTAL_ADDRESS_A                         = PROP_TAG( PT_STRING8,   14869)
+PR_COMPANY_NAME                             = PROP_TAG( PT_TSTRING,   14870)
+PR_COMPANY_NAME_W                           = PROP_TAG( PT_UNICODE,   14870)
+PR_COMPANY_NAME_A                           = PROP_TAG( PT_STRING8,   14870)
+PR_TITLE                                    = PROP_TAG( PT_TSTRING,   14871)
+PR_TITLE_W                                  = PROP_TAG( PT_UNICODE,   14871)
+PR_TITLE_A                                  = PROP_TAG( PT_STRING8,   14871)
+PR_DEPARTMENT_NAME                          = PROP_TAG( PT_TSTRING,   14872)
+PR_DEPARTMENT_NAME_W                        = PROP_TAG( PT_UNICODE,   14872)
+PR_DEPARTMENT_NAME_A                        = PROP_TAG( PT_STRING8,   14872)
+PR_OFFICE_LOCATION                          = PROP_TAG( PT_TSTRING,   14873)
+PR_OFFICE_LOCATION_W                        = PROP_TAG( PT_UNICODE,   14873)
+PR_OFFICE_LOCATION_A                        = PROP_TAG( PT_STRING8,   14873)
+PR_PRIMARY_TELEPHONE_NUMBER                 = PROP_TAG( PT_TSTRING,   14874)
+PR_PRIMARY_TELEPHONE_NUMBER_W               = PROP_TAG( PT_UNICODE,   14874)
+PR_PRIMARY_TELEPHONE_NUMBER_A               = PROP_TAG( PT_STRING8,   14874)
+PR_BUSINESS2_TELEPHONE_NUMBER               = PROP_TAG( PT_TSTRING,   14875)
+PR_BUSINESS2_TELEPHONE_NUMBER_W             = PROP_TAG( PT_UNICODE,   14875)
+PR_BUSINESS2_TELEPHONE_NUMBER_A             = PROP_TAG( PT_STRING8,   14875)
+PR_OFFICE2_TELEPHONE_NUMBER                 = PR_BUSINESS2_TELEPHONE_NUMBER
+PR_OFFICE2_TELEPHONE_NUMBER_W               = PR_BUSINESS2_TELEPHONE_NUMBER_W
+PR_OFFICE2_TELEPHONE_NUMBER_A               = PR_BUSINESS2_TELEPHONE_NUMBER_A
+PR_MOBILE_TELEPHONE_NUMBER                  = PROP_TAG( PT_TSTRING,   14876)
+PR_MOBILE_TELEPHONE_NUMBER_W                = PROP_TAG( PT_UNICODE,   14876)
+PR_MOBILE_TELEPHONE_NUMBER_A                = PROP_TAG( PT_STRING8,   14876)
+PR_CELLULAR_TELEPHONE_NUMBER                = PR_MOBILE_TELEPHONE_NUMBER
+PR_CELLULAR_TELEPHONE_NUMBER_W              = PR_MOBILE_TELEPHONE_NUMBER_W
+PR_CELLULAR_TELEPHONE_NUMBER_A              = PR_MOBILE_TELEPHONE_NUMBER_A
+PR_RADIO_TELEPHONE_NUMBER                   = PROP_TAG( PT_TSTRING,   14877)
+PR_RADIO_TELEPHONE_NUMBER_W                 = PROP_TAG( PT_UNICODE,   14877)
+PR_RADIO_TELEPHONE_NUMBER_A                 = PROP_TAG( PT_STRING8,   14877)
+PR_CAR_TELEPHONE_NUMBER                     = PROP_TAG( PT_TSTRING,   14878)
+PR_CAR_TELEPHONE_NUMBER_W                   = PROP_TAG( PT_UNICODE,   14878)
+PR_CAR_TELEPHONE_NUMBER_A                   = PROP_TAG( PT_STRING8,   14878)
+PR_OTHER_TELEPHONE_NUMBER                   = PROP_TAG( PT_TSTRING,   14879)
+PR_OTHER_TELEPHONE_NUMBER_W                 = PROP_TAG( PT_UNICODE,   14879)
+PR_OTHER_TELEPHONE_NUMBER_A                 = PROP_TAG( PT_STRING8,   14879)
+PR_TRANSMITABLE_DISPLAY_NAME                = PROP_TAG( PT_TSTRING,   14880)
+PR_TRANSMITABLE_DISPLAY_NAME_W              = PROP_TAG( PT_UNICODE,   14880)
+PR_TRANSMITABLE_DISPLAY_NAME_A              = PROP_TAG( PT_STRING8,   14880)
+PR_PAGER_TELEPHONE_NUMBER                   = PROP_TAG( PT_TSTRING,   14881)
+PR_PAGER_TELEPHONE_NUMBER_W                 = PROP_TAG( PT_UNICODE,   14881)
+PR_PAGER_TELEPHONE_NUMBER_A                 = PROP_TAG( PT_STRING8,   14881)
+PR_BEEPER_TELEPHONE_NUMBER                  = PR_PAGER_TELEPHONE_NUMBER
+PR_BEEPER_TELEPHONE_NUMBER_W                = PR_PAGER_TELEPHONE_NUMBER_W
+PR_BEEPER_TELEPHONE_NUMBER_A                = PR_PAGER_TELEPHONE_NUMBER_A
+PR_USER_CERTIFICATE                         = PROP_TAG( PT_BINARY,    14882)
+PR_PRIMARY_FAX_NUMBER                       = PROP_TAG( PT_TSTRING,   14883)
+PR_PRIMARY_FAX_NUMBER_W                     = PROP_TAG( PT_UNICODE,   14883)
+PR_PRIMARY_FAX_NUMBER_A                     = PROP_TAG( PT_STRING8,   14883)
+PR_BUSINESS_FAX_NUMBER                      = PROP_TAG( PT_TSTRING,   14884)
+PR_BUSINESS_FAX_NUMBER_W                    = PROP_TAG( PT_UNICODE,   14884)
+PR_BUSINESS_FAX_NUMBER_A                    = PROP_TAG( PT_STRING8,   14884)
+PR_HOME_FAX_NUMBER                          = PROP_TAG( PT_TSTRING,   14885)
+PR_HOME_FAX_NUMBER_W                        = PROP_TAG( PT_UNICODE,   14885)
+PR_HOME_FAX_NUMBER_A                        = PROP_TAG( PT_STRING8,   14885)
+PR_COUNTRY                                  = PROP_TAG( PT_TSTRING,   14886)
+PR_COUNTRY_W                                = PROP_TAG( PT_UNICODE,   14886)
+PR_COUNTRY_A                                = PROP_TAG( PT_STRING8,   14886)
+PR_BUSINESS_ADDRESS_COUNTRY                 = PR_COUNTRY
+PR_BUSINESS_ADDRESS_COUNTRY_W               = PR_COUNTRY_W
+PR_BUSINESS_ADDRESS_COUNTRY_A               = PR_COUNTRY_A
+PR_LOCALITY                                 = PROP_TAG( PT_TSTRING,   14887)
+PR_LOCALITY_W                               = PROP_TAG( PT_UNICODE,   14887)
+PR_LOCALITY_A                               = PROP_TAG( PT_STRING8,   14887)
+PR_BUSINESS_ADDRESS_CITY                    = PR_LOCALITY
+PR_BUSINESS_ADDRESS_CITY_W                  = PR_LOCALITY_W
+PR_BUSINESS_ADDRESS_CITY_A                  = PR_LOCALITY_A
+PR_STATE_OR_PROVINCE                        = PROP_TAG( PT_TSTRING,   14888)
+PR_STATE_OR_PROVINCE_W                      = PROP_TAG( PT_UNICODE,   14888)
+PR_STATE_OR_PROVINCE_A                      = PROP_TAG( PT_STRING8,   14888)
+PR_BUSINESS_ADDRESS_STATE_OR_PROVINCE       = PR_STATE_OR_PROVINCE
+PR_BUSINESS_ADDRESS_STATE_OR_PROVINCE_W     = PR_STATE_OR_PROVINCE_W
+PR_BUSINESS_ADDRESS_STATE_OR_PROVINCE_A     = PR_STATE_OR_PROVINCE_A
+PR_STREET_ADDRESS                           = PROP_TAG( PT_TSTRING,   14889)
+PR_STREET_ADDRESS_W                         = PROP_TAG( PT_UNICODE,   14889)
+PR_STREET_ADDRESS_A                         = PROP_TAG( PT_STRING8,   14889)
+PR_BUSINESS_ADDRESS_STREET                  = PR_STREET_ADDRESS
+PR_BUSINESS_ADDRESS_STREET_W                = PR_STREET_ADDRESS_W
+PR_BUSINESS_ADDRESS_STREET_A                = PR_STREET_ADDRESS_A
+PR_POSTAL_CODE                              = PROP_TAG( PT_TSTRING,   14890)
+PR_POSTAL_CODE_W                            = PROP_TAG( PT_UNICODE,   14890)
+PR_POSTAL_CODE_A                            = PROP_TAG( PT_STRING8,   14890)
+PR_BUSINESS_ADDRESS_POSTAL_CODE             = PR_POSTAL_CODE
+PR_BUSINESS_ADDRESS_POSTAL_CODE_W           = PR_POSTAL_CODE_W
+PR_BUSINESS_ADDRESS_POSTAL_CODE_A           = PR_POSTAL_CODE_A
+PR_POST_OFFICE_BOX                          = PROP_TAG( PT_TSTRING,   14891)
+PR_POST_OFFICE_BOX_W                        = PROP_TAG( PT_UNICODE,   14891)
+PR_POST_OFFICE_BOX_A                        = PROP_TAG( PT_STRING8,   14891)
+PR_BUSINESS_ADDRESS_POST_OFFICE_BOX         = PR_POST_OFFICE_BOX
+PR_BUSINESS_ADDRESS_POST_OFFICE_BOX_W       = PR_POST_OFFICE_BOX_W
+PR_BUSINESS_ADDRESS_POST_OFFICE_BOX_A       = PR_POST_OFFICE_BOX_A
+PR_TELEX_NUMBER                             = PROP_TAG( PT_TSTRING,   14892)
+PR_TELEX_NUMBER_W                           = PROP_TAG( PT_UNICODE,   14892)
+PR_TELEX_NUMBER_A                           = PROP_TAG( PT_STRING8,   14892)
+PR_ISDN_NUMBER                              = PROP_TAG( PT_TSTRING,   14893)
+PR_ISDN_NUMBER_W                            = PROP_TAG( PT_UNICODE,   14893)
+PR_ISDN_NUMBER_A                            = PROP_TAG( PT_STRING8,   14893)
+PR_ASSISTANT_TELEPHONE_NUMBER               = PROP_TAG( PT_TSTRING,   14894)
+PR_ASSISTANT_TELEPHONE_NUMBER_W             = PROP_TAG( PT_UNICODE,   14894)
+PR_ASSISTANT_TELEPHONE_NUMBER_A             = PROP_TAG( PT_STRING8,   14894)
+PR_HOME2_TELEPHONE_NUMBER                   = PROP_TAG( PT_TSTRING,   14895)
+PR_HOME2_TELEPHONE_NUMBER_W                 = PROP_TAG( PT_UNICODE,   14895)
+PR_HOME2_TELEPHONE_NUMBER_A                 = PROP_TAG( PT_STRING8,   14895)
+PR_ASSISTANT                                = PROP_TAG( PT_TSTRING,   14896)
+PR_ASSISTANT_W                              = PROP_TAG( PT_UNICODE,   14896)
+PR_ASSISTANT_A                              = PROP_TAG( PT_STRING8,   14896)
+PR_SEND_RICH_INFO                           = PROP_TAG( PT_BOOLEAN,   14912)
+PR_WEDDING_ANNIVERSARY                      = PROP_TAG( PT_SYSTIME, 14913)
+PR_BIRTHDAY                                 = PROP_TAG( PT_SYSTIME, 14914)
+PR_HOBBIES                                  = PROP_TAG( PT_TSTRING, 14915)
+PR_HOBBIES_W                                = PROP_TAG( PT_UNICODE, 14915)
+PR_HOBBIES_A                                = PROP_TAG( PT_STRING8, 14915)
+PR_MIDDLE_NAME                              = PROP_TAG( PT_TSTRING, 14916)
+PR_MIDDLE_NAME_W                            = PROP_TAG( PT_UNICODE, 14916)
+PR_MIDDLE_NAME_A                            = PROP_TAG( PT_STRING8, 14916)
+PR_DISPLAY_NAME_PREFIX                      = PROP_TAG( PT_TSTRING, 14917)
+PR_DISPLAY_NAME_PREFIX_W                    = PROP_TAG( PT_UNICODE, 14917)
+PR_DISPLAY_NAME_PREFIX_A                    = PROP_TAG( PT_STRING8, 14917)
+PR_PROFESSION                               = PROP_TAG( PT_TSTRING, 14918)
+PR_PROFESSION_W                             = PROP_TAG( PT_UNICODE, 14918)
+PR_PROFESSION_A                             = PROP_TAG( PT_STRING8, 14918)
+PR_PREFERRED_BY_NAME                        = PROP_TAG( PT_TSTRING, 14919)
+PR_PREFERRED_BY_NAME_W                      = PROP_TAG( PT_UNICODE, 14919)
+PR_PREFERRED_BY_NAME_A                      = PROP_TAG( PT_STRING8, 14919)
+PR_SPOUSE_NAME                              = PROP_TAG( PT_TSTRING, 14920)
+PR_SPOUSE_NAME_W                            = PROP_TAG( PT_UNICODE, 14920)
+PR_SPOUSE_NAME_A                            = PROP_TAG( PT_STRING8, 14920)
+PR_COMPUTER_NETWORK_NAME                    = PROP_TAG( PT_TSTRING, 14921)
+PR_COMPUTER_NETWORK_NAME_W                  = PROP_TAG( PT_UNICODE, 14921)
+PR_COMPUTER_NETWORK_NAME_A                  = PROP_TAG( PT_STRING8, 14921)
+PR_CUSTOMER_ID                              = PROP_TAG( PT_TSTRING, 14922)
+PR_CUSTOMER_ID_W                            = PROP_TAG( PT_UNICODE, 14922)
+PR_CUSTOMER_ID_A                            = PROP_TAG( PT_STRING8, 14922)
+PR_TTYTDD_PHONE_NUMBER                      = PROP_TAG( PT_TSTRING, 14923)
+PR_TTYTDD_PHONE_NUMBER_W                    = PROP_TAG( PT_UNICODE, 14923)
+PR_TTYTDD_PHONE_NUMBER_A                    = PROP_TAG( PT_STRING8, 14923)
+PR_FTP_SITE                                 = PROP_TAG( PT_TSTRING, 14924)
+PR_FTP_SITE_W                               = PROP_TAG( PT_UNICODE, 14924)
+PR_FTP_SITE_A                               = PROP_TAG( PT_STRING8, 14924)
+PR_GENDER                                   = PROP_TAG( PT_SHORT, 14925)
+PR_MANAGER_NAME                             = PROP_TAG( PT_TSTRING, 14926)
+PR_MANAGER_NAME_W                           = PROP_TAG( PT_UNICODE, 14926)
+PR_MANAGER_NAME_A                           = PROP_TAG( PT_STRING8, 14926)
+PR_NICKNAME                                 = PROP_TAG( PT_TSTRING, 14927)
+PR_NICKNAME_W                               = PROP_TAG( PT_UNICODE, 14927)
+PR_NICKNAME_A                               = PROP_TAG( PT_STRING8, 14927)
+PR_PERSONAL_HOME_PAGE                       = PROP_TAG( PT_TSTRING, 14928)
+PR_PERSONAL_HOME_PAGE_W                     = PROP_TAG( PT_UNICODE, 14928)
+PR_PERSONAL_HOME_PAGE_A                     = PROP_TAG( PT_STRING8, 14928)
+PR_BUSINESS_HOME_PAGE                       = PROP_TAG( PT_TSTRING, 14929)
+PR_BUSINESS_HOME_PAGE_W                     = PROP_TAG( PT_UNICODE, 14929)
+PR_BUSINESS_HOME_PAGE_A                     = PROP_TAG( PT_STRING8, 14929)
+PR_CONTACT_VERSION                          = PROP_TAG( PT_CLSID, 14930)
+PR_CONTACT_ENTRYIDS                         = PROP_TAG( PT_MV_BINARY, 14931)
+PR_CONTACT_ADDRTYPES                        = PROP_TAG( PT_MV_TSTRING, 14932)
+PR_CONTACT_ADDRTYPES_W                      = PROP_TAG( PT_MV_UNICODE, 14932)
+PR_CONTACT_ADDRTYPES_A                      = PROP_TAG( PT_MV_STRING8, 14932)
+PR_CONTACT_DEFAULT_ADDRESS_INDEX            = PROP_TAG( PT_LONG, 14933)
+PR_CONTACT_EMAIL_ADDRESSES                  = PROP_TAG( PT_MV_TSTRING, 14934)
+PR_CONTACT_EMAIL_ADDRESSES_W                = PROP_TAG( PT_MV_UNICODE, 14934)
+PR_CONTACT_EMAIL_ADDRESSES_A                = PROP_TAG( PT_MV_STRING8, 14934)
+PR_COMPANY_MAIN_PHONE_NUMBER                = PROP_TAG( PT_TSTRING, 14935)
+PR_COMPANY_MAIN_PHONE_NUMBER_W              = PROP_TAG( PT_UNICODE, 14935)
+PR_COMPANY_MAIN_PHONE_NUMBER_A              = PROP_TAG( PT_STRING8, 14935)
+PR_CHILDRENS_NAMES                          = PROP_TAG( PT_MV_TSTRING, 14936)
+PR_CHILDRENS_NAMES_W                        = PROP_TAG( PT_MV_UNICODE, 14936)
+PR_CHILDRENS_NAMES_A                        = PROP_TAG( PT_MV_STRING8, 14936)
+PR_HOME_ADDRESS_CITY                        = PROP_TAG( PT_TSTRING, 14937)
+PR_HOME_ADDRESS_CITY_W                      = PROP_TAG( PT_UNICODE, 14937)
+PR_HOME_ADDRESS_CITY_A                      = PROP_TAG( PT_STRING8, 14937)
+PR_HOME_ADDRESS_COUNTRY                     = PROP_TAG( PT_TSTRING, 14938)
+PR_HOME_ADDRESS_COUNTRY_W                   = PROP_TAG( PT_UNICODE, 14938)
+PR_HOME_ADDRESS_COUNTRY_A                   = PROP_TAG( PT_STRING8, 14938)
+PR_HOME_ADDRESS_POSTAL_CODE                 = PROP_TAG( PT_TSTRING, 14939)
+PR_HOME_ADDRESS_POSTAL_CODE_W               = PROP_TAG( PT_UNICODE, 14939)
+PR_HOME_ADDRESS_POSTAL_CODE_A               = PROP_TAG( PT_STRING8, 14939)
+PR_HOME_ADDRESS_STATE_OR_PROVINCE           = PROP_TAG( PT_TSTRING, 14940)
+PR_HOME_ADDRESS_STATE_OR_PROVINCE_W         = PROP_TAG( PT_UNICODE, 14940)
+PR_HOME_ADDRESS_STATE_OR_PROVINCE_A         = PROP_TAG( PT_STRING8, 14940)
+PR_HOME_ADDRESS_STREET                      = PROP_TAG( PT_TSTRING, 14941)
+PR_HOME_ADDRESS_STREET_W                    = PROP_TAG( PT_UNICODE, 14941)
+PR_HOME_ADDRESS_STREET_A                    = PROP_TAG( PT_STRING8, 14941)
+PR_HOME_ADDRESS_POST_OFFICE_BOX             = PROP_TAG( PT_TSTRING, 14942)
+PR_HOME_ADDRESS_POST_OFFICE_BOX_W           = PROP_TAG( PT_UNICODE, 14942)
+PR_HOME_ADDRESS_POST_OFFICE_BOX_A           = PROP_TAG( PT_STRING8, 14942)
+PR_OTHER_ADDRESS_CITY                       = PROP_TAG( PT_TSTRING, 14943)
+PR_OTHER_ADDRESS_CITY_W                     = PROP_TAG( PT_UNICODE, 14943)
+PR_OTHER_ADDRESS_CITY_A                     = PROP_TAG( PT_STRING8, 14943)
+PR_OTHER_ADDRESS_COUNTRY                    = PROP_TAG( PT_TSTRING, 14944)
+PR_OTHER_ADDRESS_COUNTRY_W                  = PROP_TAG( PT_UNICODE, 14944)
+PR_OTHER_ADDRESS_COUNTRY_A                  = PROP_TAG( PT_STRING8, 14944)
+PR_OTHER_ADDRESS_POSTAL_CODE                = PROP_TAG( PT_TSTRING, 14945)
+PR_OTHER_ADDRESS_POSTAL_CODE_W              = PROP_TAG( PT_UNICODE, 14945)
+PR_OTHER_ADDRESS_POSTAL_CODE_A              = PROP_TAG( PT_STRING8, 14945)
+PR_OTHER_ADDRESS_STATE_OR_PROVINCE          = PROP_TAG( PT_TSTRING, 14946)
+PR_OTHER_ADDRESS_STATE_OR_PROVINCE_W        = PROP_TAG( PT_UNICODE, 14946)
+PR_OTHER_ADDRESS_STATE_OR_PROVINCE_A        = PROP_TAG( PT_STRING8, 14946)
+PR_OTHER_ADDRESS_STREET                     = PROP_TAG( PT_TSTRING, 14947)
+PR_OTHER_ADDRESS_STREET_W                   = PROP_TAG( PT_UNICODE, 14947)
+PR_OTHER_ADDRESS_STREET_A                   = PROP_TAG( PT_STRING8, 14947)
+PR_OTHER_ADDRESS_POST_OFFICE_BOX            = PROP_TAG( PT_TSTRING, 14948)
+PR_OTHER_ADDRESS_POST_OFFICE_BOX_W          = PROP_TAG( PT_UNICODE, 14948)
+PR_OTHER_ADDRESS_POST_OFFICE_BOX_A          = PROP_TAG( PT_STRING8, 14948)
+PR_STORE_PROVIDERS                          = PROP_TAG( PT_BINARY,    15616)
+PR_AB_PROVIDERS                             = PROP_TAG( PT_BINARY,    15617)
+PR_TRANSPORT_PROVIDERS                      = PROP_TAG( PT_BINARY,    15618)
+PR_DEFAULT_PROFILE                          = PROP_TAG( PT_BOOLEAN,   15620)
+PR_AB_SEARCH_PATH                           = PROP_TAG( PT_MV_BINARY, 15621)
+PR_AB_DEFAULT_DIR                           = PROP_TAG( PT_BINARY,    15622)
+PR_AB_DEFAULT_PAB                           = PROP_TAG( PT_BINARY,    15623)
+PR_FILTERING_HOOKS                          = PROP_TAG( PT_BINARY,    15624)
+PR_SERVICE_NAME                             = PROP_TAG( PT_TSTRING,   15625)
+PR_SERVICE_NAME_W                           = PROP_TAG( PT_UNICODE,   15625)
+PR_SERVICE_NAME_A                           = PROP_TAG( PT_STRING8,   15625)
+PR_SERVICE_DLL_NAME                         = PROP_TAG( PT_TSTRING,   15626)
+PR_SERVICE_DLL_NAME_W                       = PROP_TAG( PT_UNICODE,   15626)
+PR_SERVICE_DLL_NAME_A                       = PROP_TAG( PT_STRING8,   15626)
+PR_SERVICE_ENTRY_NAME                       = PROP_TAG( PT_STRING8,   15627)
+PR_SERVICE_UID                              = PROP_TAG( PT_BINARY,    15628)
+PR_SERVICE_EXTRA_UIDS                       = PROP_TAG( PT_BINARY,    15629)
+PR_SERVICES                                 = PROP_TAG( PT_BINARY,    15630)
+PR_SERVICE_SUPPORT_FILES                    = PROP_TAG( PT_MV_TSTRING, 15631)
+PR_SERVICE_SUPPORT_FILES_W                  = PROP_TAG( PT_MV_UNICODE, 15631)
+PR_SERVICE_SUPPORT_FILES_A                  = PROP_TAG( PT_MV_STRING8, 15631)
+PR_SERVICE_DELETE_FILES                     = PROP_TAG( PT_MV_TSTRING, 15632)
+PR_SERVICE_DELETE_FILES_W                   = PROP_TAG( PT_MV_UNICODE, 15632)
+PR_SERVICE_DELETE_FILES_A                   = PROP_TAG( PT_MV_STRING8, 15632)
+PR_AB_SEARCH_PATH_UPDATE                    = PROP_TAG( PT_BINARY,     15633)
+PR_PROFILE_NAME                             = PROP_TAG( PT_TSTRING,   15634)
+PR_PROFILE_NAME_A                           = PROP_TAG( PT_STRING8,   15634)
+PR_PROFILE_NAME_W                           = PROP_TAG( PT_UNICODE,   15634)
+PR_IDENTITY_DISPLAY                         = PROP_TAG( PT_TSTRING,   15872)
+PR_IDENTITY_DISPLAY_W                       = PROP_TAG( PT_UNICODE,   15872)
+PR_IDENTITY_DISPLAY_A                       = PROP_TAG( PT_STRING8,   15872)
+PR_IDENTITY_ENTRYID                         = PROP_TAG( PT_BINARY,    15873)
+PR_RESOURCE_METHODS                         = PROP_TAG( PT_LONG,      15874)
+PR_RESOURCE_TYPE                            = PROP_TAG( PT_LONG,      15875)
+PR_STATUS_CODE                              = PROP_TAG( PT_LONG,      15876)
+PR_IDENTITY_SEARCH_KEY                      = PROP_TAG( PT_BINARY,    15877)
+PR_OWN_STORE_ENTRYID                        = PROP_TAG( PT_BINARY,    15878)
+PR_RESOURCE_PATH                            = PROP_TAG( PT_TSTRING,   15879)
+PR_RESOURCE_PATH_W                          = PROP_TAG( PT_UNICODE,   15879)
+PR_RESOURCE_PATH_A                          = PROP_TAG( PT_STRING8,   15879)
+PR_STATUS_STRING                            = PROP_TAG( PT_TSTRING,   15880)
+PR_STATUS_STRING_W                          = PROP_TAG( PT_UNICODE,   15880)
+PR_STATUS_STRING_A                          = PROP_TAG( PT_STRING8,   15880)
+PR_X400_DEFERRED_DELIVERY_CANCEL            = PROP_TAG( PT_BOOLEAN,   15881)
+PR_HEADER_FOLDER_ENTRYID                    = PROP_TAG( PT_BINARY,    15882)
+PR_REMOTE_PROGRESS                          = PROP_TAG( PT_LONG,      15883)
+PR_REMOTE_PROGRESS_TEXT                     = PROP_TAG( PT_TSTRING,   15884)
+PR_REMOTE_PROGRESS_TEXT_W                   = PROP_TAG( PT_UNICODE,   15884)
+PR_REMOTE_PROGRESS_TEXT_A                   = PROP_TAG( PT_STRING8,   15884)
+PR_REMOTE_VALIDATE_OK                       = PROP_TAG( PT_BOOLEAN,   15885)
+PR_CONTROL_FLAGS                            = PROP_TAG( PT_LONG,      16128)
+PR_CONTROL_STRUCTURE                        = PROP_TAG( PT_BINARY,    16129)
+PR_CONTROL_TYPE                             = PROP_TAG( PT_LONG,      16130)
+PR_DELTAX                                   = PROP_TAG( PT_LONG,      16131)
+PR_DELTAY                                   = PROP_TAG( PT_LONG,      16132)
+PR_XPOS                                     = PROP_TAG( PT_LONG,      16133)
+PR_YPOS                                     = PROP_TAG( PT_LONG,      16134)
+PR_CONTROL_ID                               = PROP_TAG( PT_BINARY,    16135)
+PR_INITIAL_DETAILS_PANE                     = PROP_TAG( PT_LONG,      16136)
+
+PROP_ID_SECURE_MIN =                         26608
+PROP_ID_SECURE_MAX =                         26623
+
+# From EDKMDB.H
+pidExchangeXmitReservedMin		= 16352
+pidExchangeNonXmitReservedMin	= 26080
+pidProfileMin					= 26112
+pidStoreMin						= 26136
+pidFolderMin					= 26168
+pidMessageReadOnlyMin			= 26176
+pidMessageWriteableMin			= 26200
+pidAttachReadOnlyMin			= 26220
+pidSpecialMin					= 26224
+pidAdminMin						= 26256
+pidSecureProfileMin				= PROP_ID_SECURE_MIN
+
+PR_PROFILE_VERSION				= PROP_TAG( PT_LONG, pidProfileMin+0)
+PR_PROFILE_CONFIG_FLAGS			= PROP_TAG( PT_LONG, pidProfileMin+1)
+PR_PROFILE_HOME_SERVER			= PROP_TAG( PT_STRING8, pidProfileMin+2)
+PR_PROFILE_HOME_SERVER_DN		= PROP_TAG( PT_STRING8, pidProfileMin+18)
+PR_PROFILE_HOME_SERVER_ADDRS	= PROP_TAG( PT_MV_STRING8, pidProfileMin+19)
+PR_PROFILE_USER					= PROP_TAG( PT_STRING8, pidProfileMin+3)
+PR_PROFILE_CONNECT_FLAGS		= PROP_TAG( PT_LONG, pidProfileMin+4)
+PR_PROFILE_TRANSPORT_FLAGS		= PROP_TAG( PT_LONG, pidProfileMin+5)
+PR_PROFILE_UI_STATE				= PROP_TAG( PT_LONG, pidProfileMin+6)
+PR_PROFILE_UNRESOLVED_NAME		= PROP_TAG( PT_STRING8, pidProfileMin+7)
+PR_PROFILE_UNRESOLVED_SERVER	= PROP_TAG( PT_STRING8, pidProfileMin+8)
+PR_PROFILE_BINDING_ORDER		= PROP_TAG( PT_STRING8, pidProfileMin+9)
+PR_PROFILE_MAX_RESTRICT			= PROP_TAG( PT_LONG, pidProfileMin+13)
+PR_PROFILE_AB_FILES_PATH		= PROP_TAG( PT_STRING8, pidProfileMin+14)
+PR_PROFILE_OFFLINE_STORE_PATH	= PROP_TAG( PT_STRING8, pidProfileMin+16)
+PR_PROFILE_OFFLINE_INFO			= PROP_TAG( PT_BINARY, pidProfileMin+17)
+PR_PROFILE_ADDR_INFO			= PROP_TAG( PT_BINARY, pidSpecialMin+23)
+PR_PROFILE_OPTIONS_DATA			= PROP_TAG( PT_BINARY, pidSpecialMin+25)
+PR_PROFILE_SECURE_MAILBOX		= PROP_TAG( PT_BINARY, pidSecureProfileMin + 0)
+PR_DISABLE_WINSOCK				= PROP_TAG( PT_LONG, pidProfileMin+24)
+PR_OST_ENCRYPTION				= PROP_TAG(PT_LONG, 26370)
+PR_PROFILE_OPEN_FLAGS			= PROP_TAG( PT_LONG, pidProfileMin+9)
+PR_PROFILE_TYPE					= PROP_TAG( PT_LONG, pidProfileMin+10)
+PR_PROFILE_MAILBOX				= PROP_TAG( PT_STRING8, pidProfileMin+11)
+PR_PROFILE_SERVER				= PROP_TAG( PT_STRING8, pidProfileMin+12)
+PR_PROFILE_SERVER_DN			= PROP_TAG( PT_STRING8, pidProfileMin+20)
+PR_PROFILE_FAVFLD_DISPLAY_NAME	= PROP_TAG(PT_STRING8, pidProfileMin+15)
+PR_PROFILE_FAVFLD_COMMENT		= PROP_TAG(PT_STRING8, pidProfileMin+21)
+PR_PROFILE_ALLPUB_DISPLAY_NAME	= PROP_TAG(PT_STRING8, pidProfileMin+22)
+PR_PROFILE_ALLPUB_COMMENT		= PROP_TAG(PT_STRING8, pidProfileMin+23)
+
+OSTF_NO_ENCRYPTION              = -2147483648
+OSTF_COMPRESSABLE_ENCRYPTION    = 1073741824
+OSTF_BEST_ENCRYPTION            = 536870912
+
+
+PR_NON_IPM_SUBTREE_ENTRYID				= PROP_TAG( PT_BINARY, pidStoreMin+8)
+PR_EFORMS_REGISTRY_ENTRYID				= PROP_TAG( PT_BINARY, pidStoreMin+9)
+PR_SPLUS_FREE_BUSY_ENTRYID				= PROP_TAG( PT_BINARY, pidStoreMin+10)
+PR_OFFLINE_ADDRBOOK_ENTRYID				= PROP_TAG( PT_BINARY, pidStoreMin+11)
+PR_EFORMS_FOR_LOCALE_ENTRYID			= PROP_TAG( PT_BINARY, pidStoreMin+12)
+PR_FREE_BUSY_FOR_LOCAL_SITE_ENTRYID		= PROP_TAG( PT_BINARY, pidStoreMin+13)
+PR_ADDRBOOK_FOR_LOCAL_SITE_ENTRYID		= PROP_TAG( PT_BINARY, pidStoreMin+14)
+PR_OFFLINE_MESSAGE_ENTRYID				= PROP_TAG( PT_BINARY, pidStoreMin+15)
+PR_IPM_FAVORITES_ENTRYID				= PROP_TAG( PT_BINARY, pidStoreMin+24)
+PR_IPM_PUBLIC_FOLDERS_ENTRYID			= PROP_TAG( PT_BINARY, pidStoreMin+25)
+PR_GW_MTSIN_ENTRYID				= PROP_TAG( PT_BINARY, pidStoreMin+16)
+PR_GW_MTSOUT_ENTRYID			= PROP_TAG( PT_BINARY, pidStoreMin+17)
+PR_TRANSFER_ENABLED				= PROP_TAG( PT_BOOLEAN, pidStoreMin+18)
+PR_TEST_LINE_SPEED				= PROP_TAG( PT_BINARY, pidStoreMin+19)
+PR_HIERARCHY_SYNCHRONIZER		= PROP_TAG( PT_OBJECT, pidStoreMin+20)
+PR_CONTENTS_SYNCHRONIZER		= PROP_TAG( PT_OBJECT, pidStoreMin+21)
+PR_COLLECTOR					= PROP_TAG( PT_OBJECT, pidStoreMin+22)
+PR_FAST_TRANSFER				= PROP_TAG( PT_OBJECT, pidStoreMin+23)
+PR_STORE_OFFLINE				= PROP_TAG( PT_BOOLEAN, pidStoreMin+26)
+PR_IN_TRANSIT					= PROP_TAG( PT_BOOLEAN, pidStoreMin)
+PR_REPLICATION_STYLE			= PROP_TAG( PT_LONG, pidAdminMin)
+PR_REPLICATION_SCHEDULE			= PROP_TAG( PT_BINARY, pidAdminMin+1)
+PR_REPLICATION_MESSAGE_PRIORITY = PROP_TAG( PT_LONG, pidAdminMin+2)
+PR_OVERALL_MSG_AGE_LIMIT		= PROP_TAG( PT_LONG, pidAdminMin+3 )
+PR_REPLICATION_ALWAYS_INTERVAL	= PROP_TAG( PT_LONG, pidAdminMin+4 )
+PR_REPLICATION_MSG_SIZE			= PROP_TAG( PT_LONG, pidAdminMin+5 )
+STYLE_ALWAYS_INTERVAL_DEFAULT	= 15
+REPLICATION_MESSAGE_SIZE_LIMIT_DEFAULT	= 100
+STYLE_NEVER				= 0
+STYLE_NORMAL			= 1
+STYLE_ALWAYS			= 2
+STYLE_DEFAULT			= -1
+PR_SOURCE_KEY					= PROP_TAG( PT_BINARY, pidExchangeNonXmitReservedMin+0)
+PR_PARENT_SOURCE_KEY			= PROP_TAG( PT_BINARY, pidExchangeNonXmitReservedMin+1)
+PR_CHANGE_KEY					= PROP_TAG( PT_BINARY, pidExchangeNonXmitReservedMin+2)
+PR_PREDECESSOR_CHANGE_LIST		= PROP_TAG( PT_BINARY, pidExchangeNonXmitReservedMin+3)
+PR_FOLDER_CHILD_COUNT			= PROP_TAG( PT_LONG, pidFolderMin)
+PR_RIGHTS						= PROP_TAG( PT_LONG, pidFolderMin+1)
+PR_ACL_TABLE					= PROP_TAG( PT_OBJECT, pidExchangeXmitReservedMin)
+PR_RULES_TABLE					= PROP_TAG( PT_OBJECT, pidExchangeXmitReservedMin+1)
+PR_HAS_RULES				= PROP_TAG( PT_BOOLEAN, pidFolderMin+2)
+PR_ADDRESS_BOOK_ENTRYID		= PROP_TAG( PT_BINARY, pidFolderMin+3)
+PR_ACL_DATA					= PROP_TAG( PT_BINARY, pidExchangeXmitReservedMin)
+PR_RULES_DATA				= PROP_TAG( PT_BINARY, pidExchangeXmitReservedMin+1)
+PR_FOLDER_DESIGN_FLAGS		= PROP_TAG( PT_LONG, pidExchangeXmitReservedMin+2)
+PR_DESIGN_IN_PROGRESS		= PROP_TAG( PT_BOOLEAN, pidExchangeXmitReservedMin+4)
+PR_SECURE_ORIGINATION		= PROP_TAG( PT_BOOLEAN, pidExchangeXmitReservedMin+5)
+PR_PUBLISH_IN_ADDRESS_BOOK	= PROP_TAG( PT_BOOLEAN, pidExchangeXmitReservedMin+6)
+PR_RESOLVE_METHOD			= PROP_TAG( PT_LONG,  pidExchangeXmitReservedMin+7)
+PR_ADDRESS_BOOK_DISPLAY_NAME	= PROP_TAG( PT_TSTRING, pidExchangeXmitReservedMin+8)
+PR_EFORMS_LOCALE_ID			= PROP_TAG( PT_LONG, pidExchangeXmitReservedMin+9)
+PR_REPLICA_LIST				= PROP_TAG( PT_BINARY, pidAdminMin+8)
+PR_OVERALL_AGE_LIMIT		= PROP_TAG( PT_LONG, pidAdminMin+9)
+RESOLVE_METHOD_DEFAULT			= 0
+RESOLVE_METHOD_LAST_WRITER_WINS	= 1
+RESOLVE_METHOD_NO_CONFLICT_NOTIFICATION = 2
+PR_PUBLIC_FOLDER_ENTRYID	= PROP_TAG( PT_BINARY, pidFolderMin+4)
+PR_HAS_NAMED_PROPERTIES			= PROP_TAG(PT_BOOLEAN, pidMessageReadOnlyMin+10)
+PR_CREATOR_NAME					= PROP_TAG(PT_TSTRING, pidExchangeXmitReservedMin+24)
+PR_CREATOR_ENTRYID				= PROP_TAG(PT_BINARY, pidExchangeXmitReservedMin+25)
+PR_LAST_MODIFIER_NAME			= PROP_TAG(PT_TSTRING, pidExchangeXmitReservedMin+26)
+PR_LAST_MODIFIER_ENTRYID		= PROP_TAG(PT_BINARY, pidExchangeXmitReservedMin+27)
+PR_HAS_DAMS						= PROP_TAG( PT_BOOLEAN, pidExchangeXmitReservedMin+10)
+PR_RULE_TRIGGER_HISTORY			= PROP_TAG( PT_BINARY, pidExchangeXmitReservedMin+18)
+PR_MOVE_TO_STORE_ENTRYID		= PROP_TAG( PT_BINARY, pidExchangeXmitReservedMin+19)
+PR_MOVE_TO_FOLDER_ENTRYID		= PROP_TAG( PT_BINARY, pidExchangeXmitReservedMin+20)
+PR_REPLICA_SERVER				= PROP_TAG(PT_TSTRING, pidMessageReadOnlyMin+4)
+PR_DEFERRED_SEND_NUMBER			= PROP_TAG( PT_LONG, pidExchangeXmitReservedMin+11)
+PR_DEFERRED_SEND_UNITS			= PROP_TAG( PT_LONG, pidExchangeXmitReservedMin+12)
+PR_EXPIRY_NUMBER				= PROP_TAG( PT_LONG, pidExchangeXmitReservedMin+13)
+PR_EXPIRY_UNITS					= PROP_TAG( PT_LONG, pidExchangeXmitReservedMin+14)
+PR_DEFERRED_SEND_TIME			= PROP_TAG( PT_SYSTIME, pidExchangeXmitReservedMin+15)
+PR_GW_ADMIN_OPERATIONS			= PROP_TAG( PT_LONG, pidMessageWriteableMin)
+PR_P1_CONTENT					= PROP_TAG( PT_BINARY, 4352)
+PR_P1_CONTENT_TYPE				= PROP_TAG( PT_BINARY, 4353)
+PR_CLIENT_ACTIONS		  		= PROP_TAG(PT_BINARY, pidMessageReadOnlyMin+5)
+PR_DAM_ORIGINAL_ENTRYID			= PROP_TAG(PT_BINARY, pidMessageReadOnlyMin+6)
+PR_DAM_BACK_PATCHED				= PROP_TAG( PT_BOOLEAN, pidMessageReadOnlyMin+7)
+PR_RULE_ERROR					= PROP_TAG(PT_LONG, pidMessageReadOnlyMin+8)
+PR_RULE_ACTION_TYPE				= PROP_TAG(PT_LONG, pidMessageReadOnlyMin+9)
+PR_RULE_ACTION_NUMBER			= PROP_TAG(PT_LONG, pidMessageReadOnlyMin+16)
+PR_RULE_FOLDER_ENTRYID			= PROP_TAG(PT_BINARY, pidMessageReadOnlyMin+17)
+PR_CONFLICT_ENTRYID				= PROP_TAG(PT_BINARY, pidExchangeXmitReservedMin+16)
+PR_MESSAGE_LOCALE_ID			= PROP_TAG(PT_LONG, pidExchangeXmitReservedMin+17)
+PR_STORAGE_QUOTA_LIMIT			= PROP_TAG(PT_LONG, pidExchangeXmitReservedMin+21)
+PR_EXCESS_STORAGE_USED			= PROP_TAG(PT_LONG, pidExchangeXmitReservedMin+22)
+PR_SVR_GENERATING_QUOTA_MSG		= PROP_TAG(PT_TSTRING, pidExchangeXmitReservedMin+23)
+PR_DELEGATED_BY_RULE			= PROP_TAG( PT_BOOLEAN, pidExchangeXmitReservedMin+3)
+MSGSTATUS_IN_CONFLICT			= 2048
+PR_IN_CONFLICT					= PROP_TAG(PT_BOOLEAN, pidAttachReadOnlyMin)
+PR_LONGTERM_ENTRYID_FROM_TABLE	= PROP_TAG(PT_BINARY, pidSpecialMin)
+PR_ORIGINATOR_NAME				= PROP_TAG( PT_TSTRING, pidMessageWriteableMin+3)
+PR_ORIGINATOR_ADDR				= PROP_TAG( PT_TSTRING, pidMessageWriteableMin+4)
+PR_ORIGINATOR_ADDRTYPE			= PROP_TAG( PT_TSTRING, pidMessageWriteableMin+5)
+PR_ORIGINATOR_ENTRYID			= PROP_TAG( PT_BINARY, pidMessageWriteableMin+6)
+PR_ARRIVAL_TIME					= PROP_TAG( PT_SYSTIME, pidMessageWriteableMin+7)
+PR_TRACE_INFO					= PROP_TAG( PT_BINARY, pidMessageWriteableMin+8)
+PR_INTERNAL_TRACE_INFO 			= PROP_TAG( PT_BINARY, pidMessageWriteableMin+18)
+PR_SUBJECT_TRACE_INFO			= PROP_TAG( PT_BINARY, pidMessageWriteableMin+9)
+PR_RECIPIENT_NUMBER				= PROP_TAG( PT_LONG, pidMessageWriteableMin+10)
+PR_MTS_SUBJECT_ID				= PROP_TAG(PT_BINARY, pidMessageWriteableMin+11)
+PR_REPORT_DESTINATION_NAME		= PROP_TAG(PT_TSTRING, pidMessageWriteableMin+12)
+PR_REPORT_DESTINATION_ENTRYID	= PROP_TAG(PT_BINARY, pidMessageWriteableMin+13)
+PR_CONTENT_SEARCH_KEY			= PROP_TAG(PT_BINARY, pidMessageWriteableMin+14)
+PR_FOREIGN_ID					= PROP_TAG(PT_BINARY, pidMessageWriteableMin+15)
+PR_FOREIGN_REPORT_ID			= PROP_TAG(PT_BINARY, pidMessageWriteableMin+16)
+PR_FOREIGN_SUBJECT_ID			= PROP_TAG(PT_BINARY, pidMessageWriteableMin+17)
+PR_MTS_ID						= PR_MESSAGE_SUBMISSION_ID
+PR_MTS_REPORT_ID				= PR_MESSAGE_SUBMISSION_ID
+
+PR_FOLDER_FLAGS                         = PROP_TAG( PT_LONG, pidAdminMin+24 )
+PR_LAST_ACCESS_TIME		=				PROP_TAG( PT_SYSTIME, pidAdminMin+25 )
+PR_RESTRICTION_COUNT                    = PROP_TAG( PT_LONG, pidAdminMin+26 )
+PR_CATEG_COUNT                          = PROP_TAG( PT_LONG, pidAdminMin+27 )
+PR_CACHED_COLUMN_COUNT                  = PROP_TAG( PT_LONG, pidAdminMin+28 )
+PR_NORMAL_MSG_W_ATTACH_COUNT    		= PROP_TAG( PT_LONG, pidAdminMin+29 )
+PR_ASSOC_MSG_W_ATTACH_COUNT             = PROP_TAG( PT_LONG, pidAdminMin+30 )
+PR_RECIPIENT_ON_NORMAL_MSG_COUNT        = PROP_TAG( PT_LONG, pidAdminMin+31 )
+PR_RECIPIENT_ON_ASSOC_MSG_COUNT 		= PROP_TAG( PT_LONG, pidAdminMin+32 )
+PR_ATTACH_ON_NORMAL_MSG_COUNT   		= PROP_TAG( PT_LONG, pidAdminMin+33 )
+PR_ATTACH_ON_ASSOC_MSG_COUNT    		= PROP_TAG( PT_LONG, pidAdminMin+34 )
+PR_NORMAL_MESSAGE_SIZE                  = PROP_TAG( PT_LONG, pidAdminMin+35 )
+PR_NORMAL_MESSAGE_SIZE_EXTENDED         = PROP_TAG( PT_I8, pidAdminMin+35 )
+PR_ASSOC_MESSAGE_SIZE                   = PROP_TAG( PT_LONG, pidAdminMin+36 )
+PR_ASSOC_MESSAGE_SIZE_EXTENDED          = PROP_TAG( PT_I8, pidAdminMin+36 )
+PR_FOLDER_PATHNAME                      = PROP_TAG(PT_TSTRING, pidAdminMin+37 )
+PR_OWNER_COUNT							= PROP_TAG( PT_LONG, pidAdminMin+38 )
+PR_CONTACT_COUNT						= PROP_TAG( PT_LONG, pidAdminMin+39 )
+
+PR_MESSAGE_SIZE_EXTENDED			= PROP_TAG(PT_I8, PROP_ID(PR_MESSAGE_SIZE))
+
+PR_USERFIELDS = PROP_TAG(PT_BINARY, 0x36e3)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapiutil.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapiutil.py
new file mode 100644
index 0000000..87ee340
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/mapi/mapiutil.py
@@ -0,0 +1,166 @@
+# General utilities for MAPI and MAPI objects.
+from types import TupleType, ListType, IntType, StringType
+from pywintypes import UnicodeType, TimeType
+import pythoncom
+import mapi, mapitags
+
+# Pre 2.2.1 compat.
+try: True, False
+except NameError: True = 1==1; False = 1==0
+
+prTable = {}
+def GetPropTagName(pt):
+	if not prTable:
+		for name, value in mapitags.__dict__.items():
+			if name[:3] == 'PR_':
+				# Store both the full ID (including type) and just the ID.
+				# This is so PR_FOO_A and PR_FOO_W are still differentiated,
+				# but should we get a PT_FOO with PT_ERROR set, we fallback
+				# to the ID.
+				prTable[value] = name
+				prTable[mapitags.PROP_ID(value)] = name
+	try:
+		try:
+			return prTable[pt]
+		except KeyError:
+			# Can't find it exactly - see if the raw ID exists.
+			return prTable[mapitags.PROP_ID(pt)]
+	except KeyError:
+		# god-damn bullshit hex() warnings: I don't see a way to get the
+		# old behaviour without a warning!!
+		ret = hex(long(pt))
+		# -0x8000000L -> 0x80000000
+		if ret[0]=='-': ret = ret[1:]
+		if ret[-1]=='L': ret = ret[:-1]
+		return ret
+
+mapiErrorTable = {}
+def GetScodeString(hr):
+	if not mapiErrorTable:
+		for name, value in mapi.__dict__.items():
+			if name[:7] in ['MAPI_E_', 'MAPI_W_']:
+				mapiErrorTable[value] = name
+	return mapiErrorTable.get(hr, pythoncom.GetScodeString(hr))
+
+
+ptTable = {}
+def GetMapiTypeName(propType):
+	"""Given a mapi type flag, return a string description of the type"""
+	if not ptTable:
+		for name, value in mapitags.__dict__.items():
+			if name[:3] == 'PT_':
+				ptTable[value] = name
+
+	rawType = propType & ~mapitags.MV_FLAG
+	return ptTable.get(rawType, str(hex(rawType)))
+
+def GetProperties(obj, propList):
+	"""Given a MAPI object and a list of properties, return a list of property values.
+	
+	Allows a single property to be passed, and the result is a single object.
+	
+	Each request property can be an integer or a string.  Of a string, it is 
+	automatically converted to an integer via the GetIdsFromNames function.
+	
+	If the property fetch fails, the result is None.
+	"""
+	bRetList = 1
+	if type(propList) not in [TupleType, ListType]:
+		bRetList = 0
+		propList = (propList,)
+	realPropList = []
+	rc = []
+	for prop in propList:
+		if type(prop)!=IntType:	# Integer
+			props = ( (mapi.PS_PUBLIC_STRINGS, prop), )
+			propIds = obj.GetIDsFromNames(props, 0)
+			prop = mapitags.PROP_TAG( mapitags.PT_UNSPECIFIED, mapitags.PROP_ID(propIds[0]))
+		realPropList.append(prop)
+		
+	hr, data = obj.GetProps(realPropList,0)
+	if hr != 0:
+		data = None
+		return None
+	if bRetList:
+		return map( lambda(v): v[1], data )
+	else:
+		return data[0][1]
+
+def GetAllProperties(obj, make_tag_names = True):
+	tags = obj.GetPropList(0)
+	hr, data = obj.GetProps(tags)
+	ret = []
+	for tag, val in data:
+		if make_tag_names:
+			hr, tags, array = obj.GetNamesFromIDs( (tag,) )
+			if type(array[0][1])==type(u''):
+				name = array[0][1]
+			else:
+				name = GetPropTagName(tag)
+		else:
+			name = tag
+		ret.append((name, val))
+	return ret
+
+_MapiTypeMap = {
+    type(0.0): mapitags.PT_DOUBLE,
+    type(0): mapitags.PT_I4,
+    type(''): mapitags.PT_STRING8,
+    type(u''): mapitags.PT_UNICODE,
+    type(None): mapitags.PT_UNSPECIFIED,
+    # In Python 2.2.2, bool isn't a distinct type (type(1==1) is type(0)).
+}
+
+def SetPropertyValue(obj, prop, val):
+	if type(prop)!=IntType:
+		props = ( (mapi.PS_PUBLIC_STRINGS, prop), )
+		propIds = obj.GetIDsFromNames(props, mapi.MAPI_CREATE)
+		if val == (1==1) or val == (1==0):
+			type_tag = mapitags.PT_BOOLEAN
+		else:
+			type_tag = _MapiTypeMap.get(type(val))
+			if type_tag is None:
+				raise ValueError, "Don't know what to do with '%r' ('%s')" % (val, type(val))
+		prop = mapitags.PROP_TAG( type_tag, mapitags.PROP_ID(propIds[0]))
+	if val is None:
+		# Delete the property
+		obj.DeleteProps((prop,))
+	else:
+		obj.SetProps(((prop,val),))
+
+def SetProperties( msg, propDict):
+	""" Given a Python dictionary, set the objects properties.
+	
+	If the dictionary key is a string, then a property ID is queried
+	otherwise the ID is assumed native.
+	
+	Coded for maximum efficiency wrt server calls - ie, maximum of
+	2 calls made to the object, regardless of the dictionary contents
+	(only 1 if dictionary full of int keys)
+	"""
+
+	newProps = []
+	# First pass over the properties we should get IDs for.
+	for key, val in propDict.items():
+		if type(key) in [StringType, UnicodeType]:
+			newProps.append((mapi.PS_PUBLIC_STRINGS, key))
+	# Query for the new IDs
+	if newProps: newIds = msg.GetIDsFromNames(newProps, mapi.MAPI_CREATE)
+	newIdNo = 0
+	newProps = []
+	for key, val in propDict.items():
+		if type(key) in [StringType, UnicodeType]:
+			type_val=type(val)
+			if type_val in [StringType, pywintypes.UnicodeType]:
+				tagType = mapitags.PT_UNICODE
+			elif type_val==IntType:
+				tagType = mapitags.PT_I4
+			elif type_val==TimeType:
+				tagType = mapitags.PT_SYSTIME
+			else:
+				raise ValueError, "The type of object %s(%s) can not be written" % (`val`,type_val)
+			key = mapitags.PROP_TAG(tagType, mapitags.PROP_ID(newIds[newIdNo]))
+			newIdNo = newIdNo + 1
+		newProps.append( (key, val) )
+	msg.SetProps(newProps)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/__init__.py
new file mode 100644
index 0000000..d882689
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/__init__.py
@@ -0,0 +1,4 @@
+# See if we have a special directory for the binaries (for developers)
+import win32com
+win32com.__PackageSupportBuildPath__(__path__)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/browse_for_folder.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/browse_for_folder.py
new file mode 100644
index 0000000..dbf9128
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/browse_for_folder.py
@@ -0,0 +1,40 @@
+# A couple of samples using SHBrowseForFolder
+
+import sys, os
+from win32com.shell import shell, shellcon
+import win32gui
+
+# A callback procedure - called by SHBrowseForFolder
+def BrowseCallbackProc(hwnd, msg, lp, data):
+    if msg== shellcon.BFFM_INITIALIZED:
+        win32gui.SendMessage(hwnd, shellcon.BFFM_SETSELECTION, 1, data)
+    elif msg == shellcon.BFFM_SELCHANGED:
+        # Set the status text of the
+        # For this message, 'lp' is the address of the PIDL.
+        pidl = shell.AddressAsPIDL(lp)
+        try:
+            path = shell.SHGetPathFromIDList(pidl)
+            win32gui.SendMessage(hwnd, shellcon.BFFM_SETSTATUSTEXT, 0, path)
+        except shell.error:
+            # No path for this PIDL
+            pass
+
+if __name__=='__main__':
+    # Demonstrate a dialog with the cwd selected as the default - this
+    # must be done via a callback function.
+    flags = shellcon.BIF_STATUSTEXT
+    shell.SHBrowseForFolder(0, # parent HWND
+                            None, # root PIDL.
+                            "Default of %s" % os.getcwd(), # title
+                            flags, # flags
+                            BrowseCallbackProc, # callback function
+                            os.getcwd() # 'data' param for the callback
+                            )
+    # Browse from this directory down only.
+    # Get the PIDL for the cwd.
+    desktop = shell.SHGetDesktopFolder()
+    cb, pidl, extra = desktop.ParseDisplayName(0, None, os.getcwd())
+    shell.SHBrowseForFolder(0, # parent HWND
+                            pidl, # root PIDL.
+                            "From %s down only" % os.getcwd(), # title
+                            )
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/column_provider.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/column_provider.py
new file mode 100644
index 0000000..bdfe845
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/column_provider.py
@@ -0,0 +1,105 @@
+# A sample shell column provider
+# Mainly ported from MSDN article:
+#  Using Shell Column Handlers for Detailed File Information, 
+#  Raymond Chen, Microsoft Corporation, February 2000
+#
+# To demostrate:
+# * Execute this script to register the namespace.
+# * Open Windows Explorer
+# * Right-click an explorer column header - select "More"
+# * Locate column 'pyc size' or 'pyo size', and add it to the view.
+# This handler is providing that column data.
+import sys, os, stat
+import pythoncom
+from win32com.shell import shell, shellcon
+import commctrl
+import winerror
+from win32com.server.util import wrap
+from pywintypes import IID
+
+IPersist_Methods = ["GetClassID"]
+IColumnProvider_Methods = IPersist_Methods + \
+                          ["Initialize", "GetColumnInfo", "GetItemData"]
+
+class ColumnProvider:
+    _reg_progid_ = "Python.ShellExtension.ColumnProvider"
+    _reg_desc_ = "Python Sample Shell Extension (Column Provider)"
+    _reg_clsid_ = IID("{0F14101A-E05E-4070-BD54-83DFA58C3D68}")
+    _com_interfaces_ = [pythoncom.IID_IPersist,
+                        shell.IID_IColumnProvider,
+                        ]
+    _public_methods_ = IColumnProvider_Methods
+    # IPersist
+    def GetClassID(self):
+        return self._reg_clsid_
+    # IColumnProvider
+    def Initialize(self, colInit):
+        flags, reserved, name = colInit
+        print "ColumnProvider initializing for file", name
+    def GetColumnInfo(self, index):
+        # We support exactly 2 columns - 'pyc size' and 'pyo size'
+        if index in [0,1]:
+            # As per the MSDN sample, use our CLSID as the fmtid
+            if index==0:
+                ext = ".pyc"
+            else:
+                ext = ".pyo"
+            title = ext + " size"
+            description = "Size of compiled %s file" % ext
+            col_id = (self._reg_clsid_, # fmtid
+                     index)            # pid
+            col_info = (
+                    col_id, # scid
+                    pythoncom.VT_I4, # vt
+                    commctrl.LVCFMT_RIGHT, # fmt
+                    20, #cChars
+                    shellcon.SHCOLSTATE_TYPE_INT | \
+                    shellcon.SHCOLSTATE_SECONDARYUI, # csFlags
+                    title,
+                    description)
+            return col_info
+        return None # Indicate no more columns.
+    def GetItemData(self, colid, colData):
+        fmt_id, pid = colid
+        fmt_id==self._reg_clsid_
+        flags, attr, reserved, ext, name = colData
+        if ext.lower() not in [".py", ".pyw"]:
+            return None
+        if pid==0:
+            ext = ".pyc"
+        else:
+            ext = ".pyo"
+        check_file = os.path.splitext(name)[0] + ext
+        try:
+            st = os.stat(check_file)
+            return st[stat.ST_SIZE]
+        except OSError:
+            # No file
+            return None
+
+def DllRegisterServer():
+    import _winreg
+    # Special ColumnProvider key
+    key = _winreg.CreateKey(_winreg.HKEY_CLASSES_ROOT,
+                        "Folder\\ShellEx\\ColumnHandlers\\" + \
+                        str(ColumnProvider._reg_clsid_ ))
+    _winreg.SetValueEx(key, None, 0, _winreg.REG_SZ, ColumnProvider._reg_desc_)
+    print ColumnProvider._reg_desc_, "registration complete."
+
+def DllUnregisterServer():
+    import _winreg
+    try:
+        key = _winreg.DeleteKey(_winreg.HKEY_CLASSES_ROOT,
+                            "Folder\\ShellEx\\ColumnHandlers\\" + \
+                            str(ColumnProvider._reg_clsid_) )
+    except WindowsError, details:
+        import errno
+        if details.errno != errno.ENOENT:
+            raise
+    print ColumnProvider._reg_desc_, "unregistration complete."
+
+if __name__=='__main__':
+    from win32com.server import register
+    register.UseCommandLine(ColumnProvider,
+                   finalize_register = DllRegisterServer,
+                   finalize_unregister = DllUnregisterServer)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/context_menu.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/context_menu.py
new file mode 100644
index 0000000..28d78539
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/context_menu.py
@@ -0,0 +1,104 @@
+# A sample context menu handler.
+# Adds a 'Hello from Python' menu entry to .py files.  When clicked, a
+# simple message box is displayed.
+#
+# To demostrate:
+# * Execute this script to register the context menu.
+# * Open Windows Explorer, and browse to a directory with a .py file.
+# * Right-Click on a .py file - locate and click on 'Hello from Python' on
+#   the context menu.
+
+import pythoncom
+from win32com.shell import shell, shellcon
+import win32gui
+import win32con
+
+IContextMenu_Methods = ["QueryContextMenu", "InvokeCommand", "GetCommandString"]
+IShellExtInit_Methods = ["Initialize"]
+
+class ShellExtension:
+    _reg_progid_ = "Python.ShellExtension.ContextMenu"
+    _reg_desc_ = "Python Sample Shell Extension (context menu)"
+    _reg_clsid_ = "{CED0336C-C9EE-4a7f-8D7F-C660393C381F}"
+    _com_interfaces_ = [shell.IID_IShellExtInit, shell.IID_IContextMenu]
+    _public_methods_ = IContextMenu_Methods + IShellExtInit_Methods
+
+    def Initialize(self, folder, dataobj, hkey):
+        print "Init", folder, dataobj, hkey
+        self.dataobj = dataobj
+
+    def QueryContextMenu(self, hMenu, indexMenu, idCmdFirst, idCmdLast, uFlags):
+        print "QCM", hMenu, indexMenu, idCmdFirst, idCmdLast, uFlags
+        # Query the items clicked on
+        format_etc = win32con.CF_HDROP, None, 1, -1, pythoncom.TYMED_HGLOBAL
+        sm = self.dataobj.GetData(format_etc)
+        num_files = shell.DragQueryFile(sm.data_handle, -1)
+        if num_files>1:
+            msg = "&Hello from Python (with %d files selected)" % num_files
+        else:
+            fname = shell.DragQueryFile(sm.data_handle, 0)
+            msg = "&Hello from Python (with '%s' selected)" % fname
+        idCmd = idCmdFirst
+        items = []
+        if (uFlags & 0x000F) == shellcon.CMF_NORMAL: # Check == here, since CMF_NORMAL=0
+            print "CMF_NORMAL..."
+            items.append(msg)
+        elif uFlags & shellcon.CMF_VERBSONLY:
+            print "CMF_VERBSONLY..."
+            items.append(msg + " - shortcut")
+        elif uFlags & shellcon.CMF_EXPLORE:
+            print "CMF_EXPLORE..."
+            items.append(msg + " - normal file, right-click in Explorer")
+        elif uFlags & CMF_DEFAULTONLY:
+            print "CMF_DEFAULTONLY...\r\n"
+        else:
+            print "** unknown flags", uFlags
+        win32gui.InsertMenu(hMenu, indexMenu,
+                            win32con.MF_SEPARATOR|win32con.MF_BYPOSITION,
+                            0, None)
+        indexMenu += 1
+        for item in items:
+            win32gui.InsertMenu(hMenu, indexMenu,
+                                win32con.MF_STRING|win32con.MF_BYPOSITION,
+                                idCmd, item)
+            indexMenu += 1
+            idCmd += 1
+
+        win32gui.InsertMenu(hMenu, indexMenu,
+                            win32con.MF_SEPARATOR|win32con.MF_BYPOSITION,
+                            0, None)
+        indexMenu += 1
+        return idCmd-idCmdFirst # Must return number of menu items we added.
+
+    def InvokeCommand(self, ci):
+        mask, hwnd, verb, params, dir, nShow, hotkey, hicon = ci
+        win32gui.MessageBox(hwnd, "Hello", "Wow", win32con.MB_OK)
+
+    def GetCommandString(self, cmd, typ):
+        return "Hello from Python!!"
+
+def DllRegisterServer():
+    import _winreg
+    key = _winreg.CreateKey(_winreg.HKEY_CLASSES_ROOT,
+                            "Python.File\\shellex")
+    subkey = _winreg.CreateKey(key, "ContextMenuHandlers")
+    subkey2 = _winreg.CreateKey(subkey, "PythonSample")
+    _winreg.SetValueEx(subkey2, None, 0, _winreg.REG_SZ, ShellExtension._reg_clsid_)
+    print ShellExtension._reg_desc_, "registration complete."
+
+def DllUnregisterServer():
+    import _winreg
+    try:
+        key = _winreg.DeleteKey(_winreg.HKEY_CLASSES_ROOT,
+                                "Python.File\\shellex\\ContextMenuHandlers\\PythonSample")
+    except WindowsError, details:
+        import errno
+        if details.errno != errno.ENOENT:
+            raise
+    print ShellExtension._reg_desc_, "unregistration complete."
+
+if __name__=='__main__':
+    from win32com.server import register
+    register.UseCommandLine(ShellExtension,
+                   finalize_register = DllRegisterServer,
+                   finalize_unregister = DllUnregisterServer)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/copy_hook.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/copy_hook.py
new file mode 100644
index 0000000..dd9613b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/copy_hook.py
@@ -0,0 +1,71 @@
+# A sample shell copy hook.
+
+# To demostrate:
+# * Execute this script to register the context menu.
+# * Open Windows Explorer
+# * Attempt to move or copy a directory.
+# * Note our hook's dialog is displayed.
+import sys, os
+import pythoncom
+from win32com.shell import shell, shellcon
+import win32gui
+import win32con
+import winerror
+
+# Our shell extension.
+class ShellExtension:
+    _reg_progid_ = "Python.ShellExtension.CopyHook"
+    _reg_desc_ = "Python Sample Shell Extension (copy hook)"
+    _reg_clsid_ = "{1845b6ba-2bbd-4197-b930-46d8651497c1}"
+    _com_interfaces_ = [shell.IID_ICopyHook]
+    _public_methods_ = ["CopyCallBack"]
+    
+    def CopyCallBack(self, hwnd, func, flags,
+                     srcName, srcAttr, destName, destAttr):
+        # This function should return:
+        # IDYES Allows the operation. 
+        # IDNO Prevents the operation on this folder but continues with any other operations that have been approved (for example, a batch copy operation).  
+        # IDCANCEL Prevents the current operation and cancels any pending operations.  
+        print "CopyCallBack", hwnd, func, flags, srcName, srcAttr, destName, destAttr
+        return win32gui.MessageBox(hwnd, "Allow operation?", "CopyHook",
+                                   win32con.MB_YESNO)
+
+def DllRegisterServer():
+    import _winreg
+    key = _winreg.CreateKey(_winreg.HKEY_CLASSES_ROOT,
+                            "directory\\shellex\\CopyHookHandlers\\" +
+                            ShellExtension._reg_desc_)
+    _winreg.SetValueEx(key, None, 0, _winreg.REG_SZ, ShellExtension._reg_clsid_)
+    key = _winreg.CreateKey(_winreg.HKEY_CLASSES_ROOT,
+                            "*\\shellex\\CopyHookHandlers\\" +
+                            ShellExtension._reg_desc_)
+    _winreg.SetValueEx(key, None, 0, _winreg.REG_SZ, ShellExtension._reg_clsid_)
+    print ShellExtension._reg_desc_, "registration complete."
+
+def DllUnregisterServer():
+    import _winreg
+    try:
+        key = _winreg.DeleteKey(_winreg.HKEY_CLASSES_ROOT,
+                                "directory\\shellex\\CopyHookHandlers\\" +
+                            ShellExtension._reg_desc_)
+    except WindowsError, details:
+        import errno
+        if details.errno != errno.ENOENT:
+            raise
+    try:
+        key = _winreg.DeleteKey(_winreg.HKEY_CLASSES_ROOT,
+                                "*\\shellex\\CopyHookHandlers\\" +
+                            ShellExtension._reg_desc_)
+    except WindowsError, details:
+        import errno
+        if details.errno != errno.ENOENT:
+            raise
+    print ShellExtension._reg_desc_, "unregistration complete."
+
+if __name__=='__main__':
+    from win32com.server import register
+    register.UseCommandLine(ShellExtension,
+                   finalize_register = DllRegisterServer,
+                   finalize_unregister = DllUnregisterServer)
+#!/usr/bin/env python
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/icon_handler.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/icon_handler.py
new file mode 100644
index 0000000..e06389a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/icon_handler.py
@@ -0,0 +1,70 @@
+# A sample icon handler.  Sets the icon for Python files to a random
+# ICO file.  ICO files are found in the Python directory - generally there will
+# be 3 icons found.
+#
+# To demostrate:
+# * Execute this script to register the context menu.
+# * Open Windows Explorer, and browse to a directory with a .py file.
+# * Note the pretty, random selection of icons!
+import sys, os
+import pythoncom
+from win32com.shell import shell, shellcon
+import win32gui
+import win32con
+import winerror
+
+# Use glob to locate ico files, and random.choice to pick one.
+import glob, random
+ico_files = glob.glob(os.path.join(sys.prefix, "*.ico"))
+if not ico_files:
+    ico_files = glob.glob(os.path.join(sys.prefix, "PC", "*.ico"))
+if not ico_files:
+    print "WARNING: Can't find any icon files"
+    
+# Our shell extension.
+IExtractIcon_Methods = "Extract GetIconLocation".split()
+IPersistFile_Methods = "IsDirty Load Save SaveCompleted GetCurFile".split()
+
+class ShellExtension:
+    _reg_progid_ = "Python.ShellExtension.IconHandler"
+    _reg_desc_ = "Python Sample Shell Extension (icon handler)"
+    _reg_clsid_ = "{a97e32d7-3b78-448c-b341-418120ea9227}"
+    _com_interfaces_ = [shell.IID_IExtractIcon, pythoncom.IID_IPersistFile]
+    _public_methods_ = IExtractIcon_Methods + IPersistFile_Methods
+    
+    def Load(self, filename, mode):
+        self.filename = filename
+        self.mode = mode
+
+    def GetIconLocation(self, flags):
+        # note - returning a single int will set the HRESULT (eg, S_FALSE,
+        # E_PENDING - see MS docs for details.
+        return random.choice(ico_files), 0, 0
+
+    def Extract(self, fname, index, size):
+        return winerror.S_FALSE
+
+def DllRegisterServer():
+    import _winreg
+    key = _winreg.CreateKey(_winreg.HKEY_CLASSES_ROOT,
+                            "Python.File\\shellex")
+    subkey = _winreg.CreateKey(key, "IconHandler")
+    _winreg.SetValueEx(subkey, None, 0, _winreg.REG_SZ, ShellExtension._reg_clsid_)
+    print ShellExtension._reg_desc_, "registration complete."
+
+def DllUnregisterServer():
+    import _winreg
+    try:
+        key = _winreg.DeleteKey(_winreg.HKEY_CLASSES_ROOT,
+                                "Python.File\\shellex\\IconHandler")
+    except WindowsError, details:
+        import errno
+        if details.errno != errno.ENOENT:
+            raise
+    print ShellExtension._reg_desc_, "unregistration complete."
+
+if __name__=='__main__':
+    from win32com.server import register
+    register.UseCommandLine(ShellExtension,
+                   finalize_register = DllRegisterServer,
+                   finalize_unregister = DllUnregisterServer)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/shell_view.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/shell_view.py
new file mode 100644
index 0000000..f953d4c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/servers/shell_view.py
@@ -0,0 +1,455 @@
+# A sample shell namespace view
+
+# To demostrate:
+# * Execute this script to register the namespace.
+# * Open Windows Explorer, and locate the new "Python Path Shell Browser"
+#   folder off "My Computer"
+# * Browse this tree - .py files are shown expandable, with classes and
+#   methods selectable.  Selecting a Python file, or a class/method, will
+#   display the file using Scintilla.
+# Known problems:
+# * Classes and methods don't have icons - this is a demo, so we keep it small
+#   See icon_handler.py for examples of how to work with icons.
+#
+#
+# Notes on PIDLs
+# PIDLS are complicated, but fairly well documented in MSDN.  If you need to
+# do much with these shell extensions, you must understand their concept.
+# Here is a short-course, as it applies to this sample:
+# A PIDL identifies an item, much in the same way that a filename does
+# (however, the shell is not limited to displaying "files").
+# An "ItemID" is a single string, each being an item in the hierarchy.
+# A "PIDL" is a list of these strings.
+# All shell etc functions work with PIDLs, so even in the case where
+# an ItemID is conceptually used, a 1-item list is always used.
+# Conceptually, think of:
+#    pidl = pathname.split("\\") # pidl is a list of "parent" items.
+#    # each item is a string 'item id', but these are ever used directly
+# As there is no concept of passing a single item, to open a file using only
+# a relative filename, conceptually you would say:
+#   open_file([filename]) # Pass a single-itemed relative "PIDL"
+# and continuing the analogy, a "listdir" type function would return a list
+# of single-itemed lists - each list containing the relative PIDL of the child.
+#
+# Each PIDL entry is a binary string, and may contain any character.  For
+# PIDLs not created by you, they can not be interpreted - they are just
+# blobs.  PIDLs created by you (ie, children of your IShellFolder) can
+# store and interpret the string however makes most sense for your application.
+# (but within PIDL rules - they must be persistable, etc)
+# There is no reason that pickled strings, for example, couldn't be used
+# as an EntryID.
+# This application takes a simple approach - each PIDL is a string of form
+# "directory\0directory_name", "file\0file_name" or
+# "object\0file_name\0class_name[.method_name"
+# The first string in each example is literal (ie, the word 'directory',
+# 'file' or 'object', and every other string is variable.  We use '\0' as
+# a field sep just 'cos we can (and 'cos it can't possibly conflict with the
+# string content)
+
+import sys, os
+import thread
+import pyclbr
+import pythoncom
+import win32gui, win32api, win32con, winerror
+from win32com.shell import shell, shellcon
+from win32com.server.util import wrap, NewEnum
+from win32com.server.exception import COMException
+from win32com.util import IIDToInterfaceName
+from pywin.scintilla import scintillacon
+
+# Set this to 1 to cause debug version to be registered and used.  A debug
+# version will spew output to win32traceutil.
+debug=0
+
+# Helper function to get a system IShellFolder interface, and the PIDL within
+# that folder for an existing file/directory.
+def GetFolderAndPIDLForPath(filename):
+    desktop = shell.SHGetDesktopFolder()
+    info = desktop.ParseDisplayName(0, None, os.path.abspath(filename))
+    cchEaten, pidl, attr = info
+    # We must walk the ID list, looking for one child at a time.
+    folder = desktop
+    while len(pidl) > 1:
+        this = pidl.pop(0)
+        folder = folder.BindToObject([this], None, shell.IID_IShellFolder)
+    # We are left with the pidl for the specific item.  Leave it as
+    # a list, so it remains a valid PIDL.
+    return folder, pidl
+
+# A cache of pyclbr module objects, so we only parse a given filename once.
+clbr_modules = {} # Indexed by path, item is dict as returned from pyclbr
+def get_clbr_for_file(path):
+    try:
+        objects = clbr_modules[path]
+    except KeyError:
+        dir, filename = os.path.split(path)
+        base, ext = os.path.splitext(filename)
+        objects = pyclbr.readmodule_ex(base, [dir])
+        clbr_modules[path] = objects
+    return objects
+
+# Our COM interfaces.
+IOleWindow_Methods = "GetWindow ContextSensitiveHelp".split()
+IShellView_Methods = IOleWindow_Methods + \
+                    """TranslateAccelerator EnableModeless UIActivate
+                       Refresh CreateViewWindow DestroyViewWindow
+                       GetCurrentInfo AddPropertySheetPages SaveViewState
+                       SelectItem GetItemObject""".split()
+
+IShellFolder_Methods = """ParseDisplayName EnumObjects BindToObject
+                          BindToStorage CompareIDs CreateViewObject
+                          GetAttributesOf GetUIObjectOf GetDisplayNameOf
+                          SetNameOf""".split()
+
+IBrowserFrame_Methods = ["GetFrameOptions"]
+
+IPersist_Methods = ["GetClassID"]
+IPersistFolder_Methods = IPersist_Methods + ["Initialize"]
+
+# Base class for a shell folder.
+# All child classes use a simple PIDL of the form:
+#  "object_type\0object_name[\0extra ...]"
+class ShellFolderBase:
+    _com_interfaces_ = [shell.IID_IBrowserFrameOptions,
+                        pythoncom.IID_IPersist,
+                        shell.IID_IPersistFolder,
+                        shell.IID_IShellFolder,
+                        ]
+
+    _public_methods_ = IBrowserFrame_Methods + \
+                       IPersistFolder_Methods + \
+                       IShellFolder_Methods
+
+    def GetFrameOptions(self, mask):
+        #print "GetFrameOptions", self, mask
+        return 0
+    def ParseDisplayName(self, hwnd, reserved, displayName, attr):
+        print "ParseDisplayName", displayName
+        # return cchEaten, pidl, attr
+    def BindToStorage(self, pidl, bc, iid):
+        print "BTS", iid, IIDToInterfaceName(iid)
+    def BindToObject(self, pidl, bc, iid):
+        # We may be passed a set of relative PIDLs here - ie
+        # [pidl_of_dir, pidl_of_child_dir, pidl_of_file, pidl_of_function]
+        # But each of our PIDLs keeps the fully qualified name anyway - so
+        # just jump directly to the last.
+        final_pidl = pidl[-1]
+        typ, extra = final_pidl.split('\0', 1)
+        if typ == "directory":
+            klass = ShellFolderDirectory
+        elif typ == "file":
+            klass = ShellFolderFile
+        elif typ == "object":
+            klass = ShellFolderObject
+        else:
+            raise RuntimeError, "What is " + repr(typ)
+        ret = wrap(klass(extra), iid, useDispatcher = (debug>0))
+        return ret
+
+# A ShellFolder for an object with CHILDREN on the file system
+# Note that this means our "File" folder is *not* a 'FileSystem' folder,
+# as it's children (functions and classes) are not on the file system.
+# 
+class ShellFolderFileSystem(ShellFolderBase):
+    def _GetFolderAndPIDLForPIDL(self, my_idl):
+        typ, name = my_idl[0].split('\0')
+        return GetFolderAndPIDLForPath(name)
+    # Interface methods
+    def CompareIDs(self, param, id1, id2):
+        return cmp(id1, id2)
+    def GetUIObjectOf(self, hwndOwner, pidls, iid, inout):
+        # delegate to the shell.
+        assert len(pidls)==1, "oops - arent expecting more than one!)"
+        pidl = pidls[0]
+        folder, child_pidl = self._GetFolderAndPIDLForPIDL(pidl)
+        try:
+            inout, ret = folder.GetUIObjectOf(hwndOwner, [child_pidl], iid,
+                                              inout, pythoncom.IID_IUnknown)
+        except pythoncom.com_error, (hr, desc, exc, arg):
+            raise COMException(hresult=hr)
+        return inout, ret
+        # return object of IID
+    def GetDisplayNameOf(self, pidl, flags):
+        # delegate to the shell.
+        folder, child_pidl = self._GetFolderAndPIDLForPIDL(pidl)
+        ret = folder.GetDisplayNameOf(child_pidl, flags)
+        return ret
+    def GetAttributesOf(self, pidls, attrFlags):
+        ret_flags = -1
+        for pidl in pidls:
+            pidl = pidl[0] # ??
+            typ, name = pidl.split('\0')
+            flags = shellcon.SHGFI_ATTRIBUTES 
+            rc, info = shell.SHGetFileInfo(name, 0, flags)
+            hIcon, iIcon, dwAttr, name, typeName = info
+            # All our items, even files, have sub-items
+            extras = shellcon.SFGAO_HASSUBFOLDER | \
+                     shellcon.SFGAO_FOLDER | \
+                     shellcon.SFGAO_FILESYSANCESTOR
+            ret_flags &= (dwAttr | extras)
+        return ret_flags
+ 
+class ShellFolderDirectory(ShellFolderFileSystem):
+    def __init__(self, path):
+        self.path = os.path.abspath(path)
+    def CreateViewObject(self, hwnd, iid):
+        # delegate to the shell.
+        folder, child_pidl = GetFolderAndPIDLForPath(self.path)
+        return folder.CreateViewObject(hwnd, iid)
+    def EnumObjects(self, hwndOwner, flags):
+        pidls = []
+        for fname in os.listdir(self.path):
+            fqn = os.path.join(self.path, fname)
+            if os.path.isdir(fqn):
+                type_name = "directory"
+                type_class = ShellFolderDirectory
+            else:
+                base, ext = os.path.splitext(fname)
+                if ext in [".py", ".pyw"]:
+                    type_class = ShellFolderFile
+                    type_name = "file"
+                else:
+                    type_class = None
+            if type_class is not None:
+                pidls.append( [type_name + "\0" + fqn] )
+        return NewEnum(pidls, iid=shell.IID_IEnumIDList,
+                       useDispatcher=(debug>0))
+  
+# As per comments above, even though this manages a file, it is *not* a
+# ShellFolderFileSystem, as the children are not on the file system.
+class ShellFolderFile(ShellFolderBase):
+    def __init__(self, path):
+        self.path = os.path.abspath(path)
+    def EnumObjects(self, hwndOwner, flags):
+        objects = get_clbr_for_file(self.path)
+        pidls = []
+        for name, ob in objects.items():
+            pidls.append( ["object\0" + self.path + "\0" + name] )
+        return NewEnum(pidls, iid=shell.IID_IEnumIDList,
+                       useDispatcher=(debug>0))
+
+    def GetAttributesOf(self, pidls, attrFlags):
+        ret_flags = -1
+        for pidl in pidls:
+            assert len(pidl)==1, "Expecting relative pidls"
+            pidl = pidl[0]
+            typ, filename, obname = pidl.split('\0')
+            obs = get_clbr_for_file(filename)
+            ob = obs[obname]
+            flags = shellcon.SFGAO_BROWSABLE | shellcon.SFGAO_FOLDER | \
+                    shellcon.SFGAO_FILESYSANCESTOR
+            if hasattr(ob, "methods"):
+                flags |= shellcon.SFGAO_HASSUBFOLDER
+            ret_flags &= flags
+        return ret_flags
+
+    def GetDisplayNameOf(self, pidl, flags):
+        assert len(pidl)==1, "Expecting relative PIDL"
+        typ, fname, obname = pidl[0].split('\0')
+        fqname = os.path.splitext(fname)[0] + "." + obname
+        if flags & shellcon.SHGDN_INFOLDER:
+            ret = obname
+        else: # SHGDN_NORMAL is the default
+            ret = fqname
+        # No need to look at the SHGDN_FOR* modifiers.
+        return ret
+
+    def CreateViewObject(self, hwnd, iid):
+        return wrap(ScintillaShellView(hwnd, self.path), useDispatcher=debug>0)
+
+# A ShellFolder for our Python objects
+class ShellFolderObject(ShellFolderBase):
+    def __init__(self, details):
+        self.path, details = details.split('\0')
+        if details.find(".")>0:
+            self.class_name, self.method_name = details.split(".")
+        else:
+            self.class_name = details
+            self.method_name = None
+    def CreateViewObject(self, hwnd, iid):
+        mod_objects = get_clbr_for_file(self.path)
+        object = mod_objects[self.class_name]
+        if self.method_name is None:
+            lineno = object.lineno
+        else:
+            lineno = object.methods[self.method_name]
+        return wrap(ScintillaShellView(hwnd, self.path, lineno),
+                    useDispatcher=debug>0)
+    def EnumObjects(self, hwndOwner, flags):
+        assert self.method_name is None, "Should not be enuming methods!"
+        mod_objects = get_clbr_for_file(self.path)
+        my_objects = mod_objects[self.class_name]
+        pidls = []
+        for func_name, lineno in my_objects.methods.items():
+            pidl = ["object\0" + self.path + "\0" +
+                    self.class_name + "." + func_name]
+            pidls.append(pidl)
+        return NewEnum(pidls, iid=shell.IID_IEnumIDList,
+                       useDispatcher=(debug>0))
+    def GetDisplayNameOf(self, pidl, flags):
+        assert len(pidl)==1, "Expecting relative PIDL"
+        typ, fname, obname = pidl[0].split('\0')
+        class_name, method_name = obname.split(".")
+        fqname = os.path.splitext(fname)[0] + "." + obname
+        if flags & shellcon.SHGDN_INFOLDER:
+            ret = method_name
+        else: # SHGDN_NORMAL is the default
+            ret = fqname
+        # No need to look at the SHGDN_FOR* modifiers.
+        return ret
+    def GetAttributesOf(self, pidls, attrFlags):
+        ret_flags = -1
+        for pidl in pidls:
+            assert len(pidl)==1, "Expecting relative pidls"
+            flags = shellcon.SFGAO_BROWSABLE | shellcon.SFGAO_FOLDER | \
+                    shellcon.SFGAO_FILESYSANCESTOR
+            ret_flags &= flags
+        return ret_flags
+
+# The "Root" folder of our namespace.  As all children are directories,
+# it is derived from ShellFolderFileSystem
+# This is the only COM object actually registered and externally created.
+class ShellFolderRoot(ShellFolderFileSystem):
+    _reg_progid_ = "Python.ShellExtension.Folder"
+    _reg_desc_ = "Python Path Shell Browser"
+    _reg_clsid_ = "{f6287035-3074-4cb5-a8a6-d3c80e206944}"
+    def GetClassID(self):
+        return self._reg_clsid_
+    def Initialize(self, pidl):
+        # This is the PIDL of us, as created by the shell.  This is our
+        # top-level ID.  All other items under us have PIDLs defined
+        # by us - see the notes at the top of the file.
+        #print "Initialize called with pidl", repr(pidl)
+        pass
+    def CreateViewObject(self, hwnd, iid):
+        raise COMException(hresult=winerror.E_NOTIMPL)
+    def EnumObjects(self, hwndOwner, flags):
+        items = [ ["directory\0" + p] for p in sys.path if os.path.isdir(p)]
+        return NewEnum(items, iid=shell.IID_IEnumIDList,
+                       useDispatcher=(debug>0))
+
+# A Simple shell view implementation
+# This uses scintilla to display a filename, and optionally jump to a line
+# number.
+class ScintillaShellView:
+    _public_methods_ = IShellView_Methods
+    _com_interfaces_ = [pythoncom.IID_IOleWindow,
+                        shell.IID_IShellView,
+                        ]
+    def __init__(self, hwnd, filename, lineno = None):
+        self.filename = filename
+        self.lineno = lineno
+        self.hwnd_parent = hwnd
+        self.hwnd = None
+    def _SendSci(self, msg, wparam=0, lparam=0):
+        return win32gui.SendMessage(self.hwnd, msg, wparam, lparam)
+   # IShellView
+    def CreateViewWindow(self, prev, settings, browser, rect):
+        print "CreateViewWindow", prev, settings, browser, rect
+        # Make sure scintilla.dll is loaded.  If not, find it on sys.path
+        # (which it generally is for Pythonwin)
+        try:
+            win32api.GetModuleHandle("Scintilla.dll")
+        except win32api.error:
+            for p in sys.path:
+                fname = os.path.join(p, "Scintilla.dll")
+                if not os.path.isfile(fname):
+                    fname = os.path.join(p, "Build", "Scintilla.dll")
+                if os.path.isfile(fname):
+                    win32api.LoadLibrary(fname)
+                    break
+            else:
+                raise RuntimeError, "Can't find scintilla!"
+
+        style = win32con.WS_CHILD | win32con.WS_VSCROLL | \
+                win32con.WS_HSCROLL | win32con.WS_CLIPCHILDREN | \
+                win32con.WS_VISIBLE
+        self.hwnd = win32gui.CreateWindow("Scintilla", "Scintilla", style,
+                              rect[0], rect[1], rect[2]-rect[0], rect[3]-rect[1], 
+                              self.hwnd_parent, 1000, 0, None)
+        file_data = file(self.filename, "U").read()
+
+        self._SetupLexer()
+        self._SendSci(scintillacon.SCI_SETTEXT, 0, file_data)
+        if self.lineno != None:
+            self._SendSci(scintillacon.SCI_GOTOLINE, self.lineno)
+        print "Scintilla's hwnd is", self.hwnd
+
+    def _SetupLexer(self):
+        h = self.hwnd
+        styles = [
+            ((0, 0, 200, 0, 0x808080), None,     scintillacon.SCE_P_DEFAULT ),
+            ((0, 2, 200, 0, 0x008000), None,     scintillacon.SCE_P_COMMENTLINE ),
+            ((0, 2, 200, 0, 0x808080), None,     scintillacon.SCE_P_COMMENTBLOCK ),
+            ((0, 0, 200, 0, 0x808000), None,     scintillacon.SCE_P_NUMBER ),
+            ((0, 0, 200, 0, 0x008080), None,     scintillacon.SCE_P_STRING ),
+            ((0, 0, 200, 0, 0x008080), None,     scintillacon.SCE_P_CHARACTER ),
+            ((0, 0, 200, 0, 0x008080), None,     scintillacon.SCE_P_TRIPLE ),
+            ((0, 0, 200, 0, 0x008080), None,     scintillacon.SCE_P_TRIPLEDOUBLE),
+            ((0, 0, 200, 0, 0x000000), 0x008080, scintillacon.SCE_P_STRINGEOL),
+            ((0, 1, 200, 0, 0x800000), None,     scintillacon.SCE_P_WORD),
+            ((0, 1, 200, 0, 0xFF0000), None,     scintillacon.SCE_P_CLASSNAME ),
+            ((0, 1, 200, 0, 0x808000), None,     scintillacon.SCE_P_DEFNAME),
+            ((0, 0, 200, 0, 0x000000), None,     scintillacon.SCE_P_OPERATOR),
+            ((0, 0, 200, 0, 0x000000), None,     scintillacon.SCE_P_IDENTIFIER ),
+                 ]
+        self._SendSci(scintillacon.SCI_SETLEXER, scintillacon.SCLEX_PYTHON, 0)
+        self._SendSci(scintillacon.SCI_SETSTYLEBITS, 5)
+        baseFormat = (-402653169, 0, 200, 0, 0, 0, 49, 'Courier New')
+        for f, bg, stylenum in styles:
+            self._SendSci(scintillacon.SCI_STYLESETFORE, stylenum, f[4])
+            self._SendSci(scintillacon.SCI_STYLESETFONT, stylenum, baseFormat[7])
+            if f[1] & 1: self._SendSci(scintillacon.SCI_STYLESETBOLD, stylenum, 1)
+            else: self._SendSci(scintillacon.SCI_STYLESETBOLD, stylenum, 0)
+            if f[1] & 2: self._SendSci(scintillacon.SCI_STYLESETITALIC, stylenum, 1)
+            else: self._SendSci(scintillacon.SCI_STYLESETITALIC, stylenum, 0)
+            self._SendSci(scintillacon.SCI_STYLESETSIZE, stylenum, int(baseFormat[2]/20))
+            if bg is not None:
+                self._SendSci(scintillacon.SCI_STYLESETBACK, stylenum, bg)
+            self._SendSci(scintillacon.SCI_STYLESETEOLFILLED, stylenum, 1) # Only needed for unclosed strings.
+
+    def DestroyViewWindow(self):
+        win32gui.DestroyWindow(self.hwnd)
+        self.hwnd = None
+        print "Destroyed scintilla window"
+
+    def TranslateAccelerator(self, msg):
+        return winerror.S_FALSE
+
+def DllRegisterServer():
+    import _winreg
+    key = _winreg.CreateKey(_winreg.HKEY_LOCAL_MACHINE,
+                            "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\" \
+                            "Explorer\\Desktop\\Namespace\\" + \
+                            ShellFolderRoot._reg_clsid_)
+    _winreg.SetValueEx(key, None, 0, _winreg.REG_SZ, ShellFolderRoot._reg_desc_)
+    # And special shell keys under our CLSID
+    key = _winreg.CreateKey(_winreg.HKEY_CLASSES_ROOT,
+                        "CLSID\\" + ShellFolderRoot._reg_clsid_ + "\\ShellFolder")
+    # 'Attributes' is an int stored as a binary! use struct
+    attr = shellcon.SFGAO_FOLDER | shellcon.SFGAO_HASSUBFOLDER | \
+           shellcon.SFGAO_BROWSABLE
+    import struct
+    s = struct.pack("i", attr)
+    _winreg.SetValueEx(key, "Attributes", 0, _winreg.REG_BINARY, s)
+    print ShellFolderRoot._reg_desc_, "registration complete."
+
+def DllUnregisterServer():
+    import _winreg
+    try:
+        key = _winreg.DeleteKey(_winreg.HKEY_LOCAL_MACHINE,
+                            "SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\" \
+                            "Explorer\\Desktop\\Namespace\\" + \
+                            ShellFolderRoot._reg_clsid_)
+    except WindowsError, details:
+        import errno
+        if details.errno != errno.ENOENT:
+            raise
+    print ShellFolderRoot._reg_desc_, "unregistration complete."
+
+if __name__=='__main__':
+    from win32com.server import register
+    register.UseCommandLine(ShellFolderRoot,
+                   debug = debug,
+                   finalize_register = DllRegisterServer,
+                   finalize_unregister = DllUnregisterServer)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/shellexecuteex.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/shellexecuteex.py
new file mode 100644
index 0000000..f73c729
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/shellexecuteex.py
@@ -0,0 +1,15 @@
+from win32com.shell import shell, shellcon
+import win32con
+
+def ExplorePIDL():
+    pidl = shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_DESKTOP)
+    print "The desktop is at", shell.SHGetPathFromIDList(pidl)
+    shell.ShellExecuteEx(fMask=shellcon.SEE_MASK_NOCLOSEPROCESS,
+                         nShow=win32con.SW_NORMAL,
+                         lpClass="folder", 
+                         lpVerb="explore", 
+                         lpIDList=pidl)
+    print "Done!"
+
+if __name__=='__main__':
+    ExplorePIDL()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/viewstate.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/viewstate.py
new file mode 100644
index 0000000..eb82a52
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/viewstate.py
@@ -0,0 +1,41 @@
+"""
+Demonstrates how to propagate a folder's view state to all its subfolders
+The format of the ColInfo stream is apparently undocumented, but
+it can be read raw from one folder and copied to another's view state.
+"""
+
+from win32com.shell import shell, shellcon
+import pythoncom
+import os, sys
+
+template_folder=os.path.split(sys.executable)[0]
+print 'Template folder:', template_folder
+template_pidl=shell.SHILCreateFromPath(template_folder,0)[0]
+template_pb=shell.SHGetViewStatePropertyBag(template_pidl, "Shell", shellcon.SHGVSPB_FOLDERNODEFAULTS, pythoncom.IID_IPropertyBag)
+
+# Column info has to be read as a stream
+# This may blow up if folder has never been opened in Explorer and has no ColInfo yet
+template_iunk=template_pb.Read('ColInfo',pythoncom.VT_UNKNOWN)
+template_stream=template_iunk.QueryInterface(pythoncom.IID_IStream)
+streamsize=template_stream.Stat()[2]
+template_colinfo=template_stream.Read(streamsize)
+
+def update_colinfo(not_used, dir_name, fnames):
+	for fname in fnames:
+		full_fname=os.path.join(dir_name,fname)
+		if os.path.isdir(full_fname):
+			print full_fname
+			pidl=shell.SHILCreateFromPath(full_fname,0)[0]
+			pb=shell.SHGetViewStatePropertyBag(pidl, "Shell", shellcon.SHGVSPB_FOLDERNODEFAULTS, pythoncom.IID_IPropertyBag)
+			## not all folders already have column info, and we're replacing it anyway
+			pb.Write('ColInfo', template_stream)
+			iunk=pb.Read('ColInfo',pythoncom.VT_UNKNOWN)
+			s=iunk.QueryInterface(pythoncom.IID_IStream)
+			s.Write(template_colinfo)
+			s=None
+			## attribute names read from registry, can't find any way to enumerate IPropertyBag
+			for attr in ('Address','Buttons','Col','Vid','WFlags','FFlags','Sort','SortDir','ShowCmd','FolderType','Mode','Rev'):
+				pb.Write(attr, template_pb.Read(attr))
+			pb=None
+os.path.walk(template_folder,update_colinfo,None)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/walk_shell_folders.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/walk_shell_folders.py
new file mode 100644
index 0000000..d6c2960c
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/demos/walk_shell_folders.py
@@ -0,0 +1,24 @@
+# A little sample that walks from the desktop into child
+# items.
+from win32com.shell import shell, shellcon
+
+def walk(folder, depth=2, indent=""):
+    try:
+        pidls = folder.EnumObjects(0, shellcon.SHCONTF_FOLDERS)
+    except shell.error:
+        # no items
+        return
+    for pidl in pidls:
+        dn = folder.GetDisplayNameOf(pidl, 
+                                     shellcon.SHGDN_NORMAL)
+        print indent, dn
+        if depth:
+            try:
+                child = folder.BindToObject(pidl, None, 
+                                      shell.IID_IShellFolder)
+            except shell.error:
+                pass
+            else:
+                walk(child, depth-1, indent+" ")
+
+walk(shell.SHGetDesktopFolder())
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/shell.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/shell.pyd
new file mode 100644
index 0000000..a52ebca
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/shell.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/shellcon.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/shellcon.py
new file mode 100644
index 0000000..8695998
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/shellcon.py
@@ -0,0 +1,956 @@
+# Generated by h2py from \mssdk\include\shlobj.h and shellapi.h
+WM_USER = 1024
+DROPEFFECT_NONE = 0
+DROPEFFECT_COPY = 1
+DROPEFFECT_MOVE = 2
+DROPEFFECT_LINK = 4
+DROPEFFECT_SCROLL = -2147483648
+
+FO_MOVE = 1
+FO_COPY = 2
+FO_DELETE = 3
+FO_RENAME = 4
+FOF_MULTIDESTFILES = 1
+FOF_CONFIRMMOUSE = 2
+FOF_SILENT = 4
+FOF_RENAMEONCOLLISION = 8
+FOF_NOCONFIRMATION = 16
+FOF_WANTMAPPINGHANDLE = 32
+FOF_ALLOWUNDO = 64
+FOF_FILESONLY = 128
+FOF_SIMPLEPROGRESS = 256
+FOF_NOCONFIRMMKDIR = 512
+FOF_NOERRORUI = 1024
+FOF_NOCOPYSECURITYATTRIBS = 2048
+PO_DELETE = 19
+PO_RENAME = 20
+PO_PORTCHANGE = 32
+PO_REN_PORT = 52
+SE_ERR_FNF = 2
+SE_ERR_PNF = 3
+SE_ERR_ACCESSDENIED = 5
+SE_ERR_OOM = 8
+SE_ERR_DLLNOTFOUND = 32
+SE_ERR_SHARE = 26
+SE_ERR_ASSOCINCOMPLETE = 27
+SE_ERR_DDETIMEOUT = 28
+SE_ERR_DDEFAIL = 29
+SE_ERR_DDEBUSY = 30
+SE_ERR_NOASSOC = 31
+SEE_MASK_CLASSNAME = 1
+SEE_MASK_CLASSKEY = 3
+SEE_MASK_IDLIST = 4
+SEE_MASK_INVOKEIDLIST = 12
+SEE_MASK_ICON = 16
+SEE_MASK_HOTKEY = 32
+SEE_MASK_NOCLOSEPROCESS = 64
+SEE_MASK_CONNECTNETDRV = 128
+SEE_MASK_FLAG_DDEWAIT = 256
+SEE_MASK_DOENVSUBST = 512
+SEE_MASK_FLAG_NO_UI = 1024
+SEE_MASK_UNICODE = 16384
+SEE_MASK_NO_CONSOLE = 32768
+SEE_MASK_ASYNCOK = 1048576
+SEE_MASK_HMONITOR = 2097152
+SEE_MASK_CLASSNAME = 1
+SEE_MASK_CLASSKEY = 3
+SEE_MASK_IDLIST = 4
+SEE_MASK_INVOKEIDLIST = 12
+SEE_MASK_ICON = 16
+SEE_MASK_HOTKEY = 32
+SEE_MASK_NOCLOSEPROCESS = 64
+SEE_MASK_CONNECTNETDRV = 128
+SEE_MASK_FLAG_DDEWAIT = 256
+SEE_MASK_DOENVSUBST = 512
+SEE_MASK_FLAG_NO_UI = 1024
+SEE_MASK_UNICODE = 16384
+SEE_MASK_NO_CONSOLE = 32768
+SEE_MASK_ASYNCOK = 1048576
+SEE_MASK_HMONITOR = 2097152
+SHERB_NOCONFIRMATION = 1
+SHERB_NOPROGRESSUI = 2
+SHERB_NOSOUND = 4
+NIM_ADD = 0
+NIM_MODIFY = 1
+NIM_DELETE = 2
+NIF_MESSAGE = 1
+NIF_ICON = 2
+NIF_TIP = 4
+SHGFI_ICON = 256
+SHGFI_DISPLAYNAME = 512
+SHGFI_TYPENAME = 1024
+SHGFI_ATTRIBUTES = 2048
+SHGFI_ICONLOCATION = 4096
+SHGFI_EXETYPE = 8192
+SHGFI_SYSICONINDEX = 16384
+SHGFI_LINKOVERLAY = 32768
+SHGFI_SELECTED = 65536
+SHGFI_ATTR_SPECIFIED = 131072
+SHGFI_LARGEICON = 0
+SHGFI_SMALLICON = 1
+SHGFI_OPENICON = 2
+SHGFI_SHELLICONSIZE = 4
+SHGFI_PIDL = 8
+SHGFI_USEFILEATTRIBUTES = 16
+SHGNLI_PIDL = 1
+SHGNLI_PREFIXNAME = 2
+SHGNLI_NOUNIQUE = 4
+PRINTACTION_OPEN = 0
+PRINTACTION_PROPERTIES = 1
+PRINTACTION_NETINSTALL = 2
+PRINTACTION_NETINSTALLLINK = 3
+PRINTACTION_TESTPAGE = 4
+PRINTACTION_OPENNETPRN = 5
+PRINTACTION_DOCUMENTDEFAULTS = 6
+PRINTACTION_SERVERPROPERTIES = 7
+
+
+CMF_NORMAL = 0
+CMF_DEFAULTONLY = 1
+CMF_VERBSONLY = 2
+CMF_EXPLORE = 4
+CMF_NOVERBS = 8
+CMF_CANRENAME = 16
+CMF_NODEFAULT = 32
+CMF_INCLUDESTATIC = 64
+CMF_RESERVED = -65536
+GCS_VERBA = 0
+GCS_HELPTEXTA = 1
+GCS_VALIDATEA = 2
+GCS_VERBW = 4
+GCS_HELPTEXTW = 5
+GCS_VALIDATEW = 6
+GCS_UNICODE = 4
+GCS_VERB = GCS_VERBW
+GCS_HELPTEXT = GCS_HELPTEXTW
+GCS_VALIDATE = GCS_VALIDATEW
+GCS_VERB = GCS_VERBA
+GCS_HELPTEXT = GCS_HELPTEXTA
+GCS_VALIDATE = GCS_VALIDATEA
+CMDSTR_NEWFOLDERA = "NewFolder"
+CMDSTR_VIEWLISTA = "ViewList"
+CMDSTR_VIEWDETAILSA = "ViewDetails"
+CMDSTR_NEWFOLDER = CMDSTR_NEWFOLDERA
+CMDSTR_VIEWLIST = CMDSTR_VIEWLISTA
+CMDSTR_VIEWDETAILS = CMDSTR_VIEWDETAILSA
+CMIC_MASK_HOTKEY = SEE_MASK_HOTKEY
+CMIC_MASK_ICON = SEE_MASK_ICON
+CMIC_MASK_FLAG_NO_UI = SEE_MASK_FLAG_NO_UI
+CMIC_MASK_UNICODE = SEE_MASK_UNICODE
+CMIC_MASK_NO_CONSOLE = SEE_MASK_NO_CONSOLE
+CMIC_MASK_ASYNCOK = SEE_MASK_ASYNCOK
+CMIC_MASK_PTINVOKE = 536870912
+GIL_OPENICON = 1
+GIL_FORSHELL = 2
+GIL_ASYNC = 32
+GIL_SIMULATEDOC = 1
+GIL_PERINSTANCE = 2
+GIL_PERCLASS = 4
+GIL_NOTFILENAME = 8
+GIL_DONTCACHE = 16
+ISIOI_ICONFILE = 1
+ISIOI_ICONINDEX = 2
+ISIOI_SYSIMAGELISTINDEX = 4
+FO_MOVE = 1
+FO_COPY = 2
+FO_DELETE = 3
+FO_RENAME = 4
+FOF_MULTIDESTFILES = 1
+FOF_CONFIRMMOUSE = 2
+FOF_SILENT = 4
+FOF_RENAMEONCOLLISION = 8
+FOF_NOCONFIRMATION = 16
+FOF_WANTMAPPINGHANDLE = 32
+FOF_ALLOWUNDO = 64
+FOF_FILESONLY = 128
+FOF_SIMPLEPROGRESS = 256
+FOF_NOCONFIRMMKDIR = 512
+FOF_NOERRORUI = 1024
+FOF_NOCOPYSECURITYATTRIBS = 2048
+PO_DELETE = 19
+PO_RENAME = 20
+PO_PORTCHANGE = 32
+PO_REN_PORT = 52
+FVSIF_RECT = 1
+FVSIF_PINNED = 2
+FVSIF_NEWFAILED = 134217728
+FVSIF_NEWFILE = -2147483648
+FVSIF_CANVIEWIT = 1073741824
+FCIDM_SHVIEWFIRST = 0
+FCIDM_SHVIEWLAST = 32767
+FCIDM_BROWSERFIRST = 40960
+FCIDM_BROWSERLAST = 48896
+FCIDM_GLOBALFIRST = 32768
+FCIDM_GLOBALLAST = 40959
+FCIDM_MENU_FILE = (FCIDM_GLOBALFIRST+0)
+FCIDM_MENU_EDIT = (FCIDM_GLOBALFIRST+64)
+FCIDM_MENU_VIEW = (FCIDM_GLOBALFIRST+128)
+FCIDM_MENU_VIEW_SEP_OPTIONS = (FCIDM_GLOBALFIRST+129)
+FCIDM_MENU_TOOLS = (FCIDM_GLOBALFIRST+192)
+FCIDM_MENU_TOOLS_SEP_GOTO = (FCIDM_GLOBALFIRST+193)
+FCIDM_MENU_HELP = (FCIDM_GLOBALFIRST+256)
+FCIDM_MENU_FIND = (FCIDM_GLOBALFIRST+320)
+FCIDM_MENU_EXPLORE = (FCIDM_GLOBALFIRST+336)
+FCIDM_MENU_FAVORITES = (FCIDM_GLOBALFIRST+368)
+FCIDM_TOOLBAR = (FCIDM_BROWSERFIRST + 0)
+FCIDM_STATUS = (FCIDM_BROWSERFIRST + 1)
+IDC_OFFLINE_HAND = 103
+SBSP_DEFBROWSER = 0
+SBSP_SAMEBROWSER = 1
+SBSP_NEWBROWSER = 2
+SBSP_DEFMODE = 0
+SBSP_OPENMODE = 16
+SBSP_EXPLOREMODE = 32
+SBSP_ABSOLUTE = 0
+SBSP_RELATIVE = 4096
+SBSP_PARENT = 8192
+SBSP_NAVIGATEBACK = 16384
+SBSP_NAVIGATEFORWARD = 32768
+SBSP_ALLOW_AUTONAVIGATE = 65536
+SBSP_INITIATEDBYHLINKFRAME = -2147483648
+SBSP_REDIRECT = 1073741824
+SBSP_WRITENOHISTORY = 134217728
+SBSP_NOAUTOSELECT = 67108864
+FCW_STATUS = 1
+FCW_TOOLBAR = 2
+FCW_TREE = 3
+FCW_INTERNETBAR = 6
+FCW_PROGRESS = 8
+FCT_MERGE = 1
+FCT_CONFIGABLE = 2
+FCT_ADDTOEND = 4
+CDBOSC_SETFOCUS = 0
+CDBOSC_KILLFOCUS = 1
+CDBOSC_SELCHANGE = 2
+CDBOSC_RENAME = 3
+SVSI_DESELECT = 0
+SVSI_SELECT = 1
+SVSI_EDIT = 3
+SVSI_DESELECTOTHERS = 4
+SVSI_ENSUREVISIBLE = 8
+SVSI_FOCUSED = 16
+SVSI_TRANSLATEPT = 32
+SVGIO_BACKGROUND = 0
+SVGIO_SELECTION = 1
+SVGIO_ALLVIEW = 2
+SVGIO_CHECKED	= 0x3,
+SVGIO_TYPE_MASK	= 0xf,
+SVGIO_FLAG_VIEWORDER = -2147483648 # 0x80000000
+STRRET_WSTR = 0
+STRRET_OFFSET = 1
+STRRET_CSTR = 2
+CSIDL_DESKTOP = 0
+CSIDL_INTERNET = 1
+CSIDL_PROGRAMS = 2
+CSIDL_CONTROLS = 3
+CSIDL_PRINTERS = 4
+CSIDL_PERSONAL = 5
+CSIDL_FAVORITES = 6
+CSIDL_STARTUP = 7
+CSIDL_RECENT = 8
+CSIDL_SENDTO = 9
+CSIDL_BITBUCKET = 10
+CSIDL_STARTMENU = 11
+CSIDL_MYDOCUMENTS = 12
+CSIDL_MYMUSIC = 13
+CSIDL_MYVIDEO = 14
+CSIDL_DESKTOPDIRECTORY = 16
+CSIDL_DRIVES = 17
+CSIDL_NETWORK = 18
+CSIDL_NETHOOD = 19
+CSIDL_FONTS = 20
+CSIDL_TEMPLATES = 21
+CSIDL_COMMON_STARTMENU = 22
+CSIDL_COMMON_PROGRAMS = 23
+CSIDL_COMMON_STARTUP = 24
+CSIDL_COMMON_DESKTOPDIRECTORY = 25
+CSIDL_APPDATA = 26
+CSIDL_PRINTHOOD = 27
+CSIDL_LOCAL_APPDATA = 28
+CSIDL_ALTSTARTUP = 29
+CSIDL_COMMON_ALTSTARTUP = 30
+CSIDL_COMMON_FAVORITES = 31
+CSIDL_INTERNET_CACHE = 32
+CSIDL_COOKIES = 33
+CSIDL_HISTORY = 34
+CSIDL_COMMON_APPDATA = 35
+CSIDL_WINDOWS = 36
+CSIDL_SYSTEM = 37
+CSIDL_PROGRAM_FILES = 38
+CSIDL_MYPICTURES = 39
+CSIDL_PROFILE = 40
+CSIDL_SYSTEMX86 = 41
+CSIDL_PROGRAM_FILESX86 = 42
+CSIDL_PROGRAM_FILES_COMMON = 43
+CSIDL_PROGRAM_FILES_COMMONX86 = 44
+CSIDL_COMMON_TEMPLATES = 45
+CSIDL_COMMON_DOCUMENTS = 46
+CSIDL_COMMON_ADMINTOOLS = 47
+CSIDL_ADMINTOOLS  = 48
+CSIDL_CONNECTIONS = 49
+CSIDL_COMMON_MUSIC = 53
+CSIDL_COMMON_PICTURES = 54
+CSIDL_COMMON_VIDEO = 55
+CSIDL_RESOURCES = 56
+CSIDL_RESOURCES_LOCALIZED = 57
+CSIDL_COMMON_OEM_LINKS = 58
+CSIDL_CDBURN_AREA = 59
+# 60 unused
+CSIDL_COMPUTERSNEARME = 61
+
+BIF_RETURNONLYFSDIRS = 1
+BIF_DONTGOBELOWDOMAIN = 2
+BIF_STATUSTEXT = 4
+BIF_RETURNFSANCESTORS = 8
+BIF_EDITBOX = 16
+BIF_VALIDATE = 32
+BIF_BROWSEFORCOMPUTER = 4096
+BIF_BROWSEFORPRINTER = 8192
+BIF_BROWSEINCLUDEFILES = 16384
+BFFM_INITIALIZED = 1
+BFFM_SELCHANGED = 2
+BFFM_VALIDATEFAILEDA = 3
+BFFM_VALIDATEFAILEDW = 4
+BFFM_SETSTATUSTEXTA = (WM_USER + 100)
+BFFM_ENABLEOK = (WM_USER + 101)
+BFFM_SETSELECTIONA = (WM_USER + 102)
+BFFM_SETSELECTIONW = (WM_USER + 103)
+BFFM_SETSTATUSTEXTW = (WM_USER + 104)
+BFFM_SETSTATUSTEXT = BFFM_SETSTATUSTEXTW
+BFFM_SETSELECTION = BFFM_SETSELECTIONW
+BFFM_VALIDATEFAILED = BFFM_VALIDATEFAILEDW
+BFFM_SETSTATUSTEXT = BFFM_SETSTATUSTEXTA
+BFFM_SETSELECTION = BFFM_SETSELECTIONA
+BFFM_VALIDATEFAILED = BFFM_VALIDATEFAILEDA
+SFGAO_CANCOPY = DROPEFFECT_COPY
+SFGAO_CANMOVE = DROPEFFECT_MOVE
+SFGAO_CANLINK = DROPEFFECT_LINK
+SFGAO_CANRENAME = 16
+SFGAO_CANDELETE = 32
+SFGAO_HASPROPSHEET = 64
+SFGAO_DROPTARGET = 256
+SFGAO_CAPABILITYMASK = 375
+SFGAO_LINK = 65536
+SFGAO_SHARE = 131072
+SFGAO_READONLY = 262144
+SFGAO_GHOSTED = 524288
+SFGAO_HIDDEN = 524288
+SFGAO_DISPLAYATTRMASK = 983040
+SFGAO_FILESYSANCESTOR = 268435456
+SFGAO_FOLDER = 536870912
+SFGAO_FILESYSTEM = 1073741824
+SFGAO_HASSUBFOLDER = (-2147483648)
+SFGAO_CONTENTSMASK = (-2147483648)
+SFGAO_VALIDATE = 16777216
+SFGAO_REMOVABLE = 33554432
+SFGAO_COMPRESSED = 67108864
+SFGAO_BROWSABLE = 134217728
+SFGAO_NONENUMERATED = 1048576
+SFGAO_NEWCONTENT = 2097152
+SFGAO_STORAGE = 8
+DWFRF_NORMAL = 0
+DWFRF_DELETECONFIGDATA = 1
+DWFAF_HIDDEN = 1
+DBIM_MINSIZE = 1
+DBIM_MAXSIZE = 2
+DBIM_INTEGRAL = 4
+DBIM_ACTUAL = 8
+DBIM_TITLE = 16
+DBIM_MODEFLAGS = 32
+DBIM_BKCOLOR = 64
+DBIMF_NORMAL = 0
+DBIMF_VARIABLEHEIGHT = 8
+DBIMF_DEBOSSED = 32
+DBIMF_BKCOLOR = 64
+DBIF_VIEWMODE_NORMAL = 0
+DBIF_VIEWMODE_VERTICAL = 1
+DBIF_VIEWMODE_FLOATING = 2
+DBIF_VIEWMODE_TRANSPARENT = 4
+COMPONENT_TOP = (2147483647)
+COMP_TYPE_HTMLDOC = 0
+COMP_TYPE_PICTURE = 1
+COMP_TYPE_WEBSITE = 2
+COMP_TYPE_CONTROL = 3
+COMP_TYPE_CFHTML = 4
+COMP_TYPE_MAX = 4
+AD_APPLY_SAVE = 1
+AD_APPLY_HTMLGEN = 2
+AD_APPLY_REFRESH = 4
+AD_APPLY_ALL = (AD_APPLY_SAVE | AD_APPLY_HTMLGEN | AD_APPLY_REFRESH)
+AD_APPLY_FORCE = 8
+AD_APPLY_BUFFERED_REFRESH = 16
+WPSTYLE_CENTER = 0
+WPSTYLE_TILE = 1
+WPSTYLE_STRETCH = 2
+WPSTYLE_MAX = 3
+COMP_ELEM_TYPE = 1
+COMP_ELEM_CHECKED = 2
+COMP_ELEM_DIRTY = 4
+COMP_ELEM_NOSCROLL = 8
+COMP_ELEM_POS_LEFT = 16
+COMP_ELEM_POS_TOP = 32
+COMP_ELEM_SIZE_WIDTH = 64
+COMP_ELEM_SIZE_HEIGHT = 128
+COMP_ELEM_POS_ZINDEX = 256
+COMP_ELEM_SOURCE = 512
+COMP_ELEM_FRIENDLYNAME = 1024
+COMP_ELEM_SUBSCRIBEDURL = 2048
+ADDURL_SILENT = 0X0001
+CFSTR_SHELLIDLIST = "Shell IDList Array"
+CFSTR_SHELLIDLISTOFFSET = "Shell Object Offsets"
+CFSTR_NETRESOURCES = "Net Resource"
+CFSTR_FILEDESCRIPTORA = "FileGroupDescriptor"
+CFSTR_FILEDESCRIPTORW = "FileGroupDescriptorW"
+CFSTR_FILECONTENTS = "FileContents"
+CFSTR_FILENAMEA = "FileName"
+CFSTR_FILENAMEW = "FileNameW"
+CFSTR_PRINTERGROUP = "PrinterFriendlyName"
+CFSTR_FILENAMEMAPA = "FileNameMap"
+CFSTR_FILENAMEMAPW = "FileNameMapW"
+CFSTR_SHELLURL = "UniformResourceLocator"
+CFSTR_INETURLA = CFSTR_SHELLURL
+CFSTR_INETURLW = "UniformResourceLocatorW"
+CFSTR_PREFERREDDROPEFFECT = "Preferred DropEffect"
+CFSTR_PERFORMEDDROPEFFECT = "Performed DropEffect"
+CFSTR_PASTESUCCEEDED = "Paste Succeeded"
+CFSTR_INDRAGLOOP = "InShellDragLoop"
+CFSTR_DRAGCONTEXT = "DragContext"
+CFSTR_MOUNTEDVOLUME = "MountedVolume"
+CFSTR_PERSISTEDDATAOBJECT = "PersistedDataObject"
+CFSTR_TARGETCLSID = "TargetCLSID"
+CFSTR_LOGICALPERFORMEDDROPEFFECT = "Logical Performed DropEffect"
+CFSTR_AUTOPLAY_SHELLIDLISTS = "Autoplay Enumerated IDList Array"
+CFSTR_FILEDESCRIPTOR = CFSTR_FILEDESCRIPTORW
+CFSTR_FILENAME = CFSTR_FILENAMEW
+CFSTR_FILENAMEMAP = CFSTR_FILENAMEMAPW
+CFSTR_FILEDESCRIPTOR = CFSTR_FILEDESCRIPTORA
+CFSTR_FILENAME = CFSTR_FILENAMEA
+CFSTR_FILENAMEMAP = CFSTR_FILENAMEMAPA
+DVASPECT_SHORTNAME = 2
+SHCNE_RENAMEITEM = 1
+SHCNE_CREATE = 2
+SHCNE_DELETE = 4
+SHCNE_MKDIR = 8
+SHCNE_RMDIR = 16
+SHCNE_MEDIAINSERTED = 32
+SHCNE_MEDIAREMOVED = 64
+SHCNE_DRIVEREMOVED = 128
+SHCNE_DRIVEADD = 256
+SHCNE_NETSHARE = 512
+SHCNE_NETUNSHARE = 1024
+SHCNE_ATTRIBUTES = 2048
+SHCNE_UPDATEDIR = 4096
+SHCNE_UPDATEITEM = 8192
+SHCNE_SERVERDISCONNECT = 16384
+SHCNE_UPDATEIMAGE = 32768
+SHCNE_DRIVEADDGUI = 65536
+SHCNE_RENAMEFOLDER = 131072
+SHCNE_FREESPACE = 262144
+SHCNE_EXTENDED_EVENT = 67108864
+SHCNE_ASSOCCHANGED = 134217728
+SHCNE_DISKEVENTS = 145439
+SHCNE_GLOBALEVENTS = 201687520
+SHCNE_ALLEVENTS = 2147483647
+SHCNE_INTERRUPT = -2147483648
+SHCNEE_ORDERCHANGED = 2
+SHCNF_IDLIST = 0
+SHCNF_PATHA = 1
+SHCNF_PRINTERA = 2
+SHCNF_DWORD = 3
+SHCNF_PATHW = 5
+SHCNF_PRINTERW = 6
+SHCNF_TYPE = 255
+SHCNF_FLUSH = 4096
+SHCNF_FLUSHNOWAIT = 8192
+SHCNF_PATH = SHCNF_PATHW
+SHCNF_PRINTER = SHCNF_PRINTERW
+SHCNF_PATH = SHCNF_PATHA
+SHCNF_PRINTER = SHCNF_PRINTERA
+QIF_CACHED = 1
+QIF_DONTEXPANDFOLDER = 2
+SHARD_PIDL = 1
+SHARD_PATHA = 2
+SHARD_PATHW = 3
+SHARD_PATH = SHARD_PATHW
+SHARD_PATH = SHARD_PATHA
+SHGDFIL_FINDDATA = 1
+SHGDFIL_NETRESOURCE = 2
+SHGDFIL_DESCRIPTIONID = 3
+SHDID_ROOT_REGITEM = 1
+SHDID_FS_FILE = 2
+SHDID_FS_DIRECTORY = 3
+SHDID_FS_OTHER = 4
+SHDID_COMPUTER_DRIVE35 = 5
+SHDID_COMPUTER_DRIVE525 = 6
+SHDID_COMPUTER_REMOVABLE = 7
+SHDID_COMPUTER_FIXED = 8
+SHDID_COMPUTER_NETDRIVE = 9
+SHDID_COMPUTER_CDROM = 10
+SHDID_COMPUTER_RAMDISK = 11
+SHDID_COMPUTER_OTHER = 12
+SHDID_NET_DOMAIN = 13
+SHDID_NET_SERVER = 14
+SHDID_NET_SHARE = 15
+SHDID_NET_RESTOFNET = 16
+SHDID_NET_OTHER = 17
+PID_IS_URL = 2
+PID_IS_NAME = 4
+PID_IS_WORKINGDIR = 5
+PID_IS_HOTKEY = 6
+PID_IS_SHOWCMD = 7
+PID_IS_ICONINDEX = 8
+PID_IS_ICONFILE = 9
+PID_IS_WHATSNEW = 10
+PID_IS_AUTHOR = 11
+PID_IS_DESCRIPTION = 12
+PID_IS_COMMENT = 13
+PID_INTSITE_WHATSNEW = 2
+PID_INTSITE_AUTHOR = 3
+PID_INTSITE_LASTVISIT = 4
+PID_INTSITE_LASTMOD = 5
+PID_INTSITE_VISITCOUNT = 6
+PID_INTSITE_DESCRIPTION = 7
+PID_INTSITE_COMMENT = 8
+PID_INTSITE_FLAGS = 9
+PID_INTSITE_CONTENTLEN = 10
+PID_INTSITE_CONTENTCODE = 11
+PID_INTSITE_RECURSE = 12
+PID_INTSITE_WATCH = 13
+PID_INTSITE_SUBSCRIPTION = 14
+PID_INTSITE_URL = 15
+PID_INTSITE_TITLE = 16
+PID_INTSITE_CODEPAGE = 18
+PID_INTSITE_TRACKING = 19
+PIDISF_RECENTLYCHANGED = 1
+PIDISF_CACHEDSTICKY = 2
+PIDISF_CACHEIMAGES = 16
+PIDISF_FOLLOWALLLINKS = 32
+PIDISM_GLOBAL = 0
+PIDISM_WATCH = 1
+PIDISM_DONTWATCH = 2
+SSF_SHOWALLOBJECTS = 1
+SSF_SHOWEXTENSIONS = 2
+SSF_SHOWCOMPCOLOR = 8
+SSF_SHOWSYSFILES = 32
+SSF_DOUBLECLICKINWEBVIEW = 128
+SSF_SHOWATTRIBCOL = 256
+SSF_DESKTOPHTML = 512
+SSF_WIN95CLASSIC = 1024
+SSF_DONTPRETTYPATH = 2048
+SSF_SHOWINFOTIP = 8192
+SSF_MAPNETDRVBUTTON = 4096
+SSF_NOCONFIRMRECYCLE = 32768
+SSF_HIDEICONS = 16384
+
+ABM_NEW = 0
+ABM_REMOVE = 1
+ABM_QUERYPOS = 2
+ABM_SETPOS = 3
+ABM_GETSTATE = 4
+ABM_GETTASKBARPOS = 5
+ABM_ACTIVATE = 6
+ABM_GETAUTOHIDEBAR = 7
+ABM_SETAUTOHIDEBAR = 8
+ABM_WINDOWPOSCHANGED = 9
+ABN_STATECHANGE = 0
+ABN_POSCHANGED = 1
+ABN_FULLSCREENAPP = 2
+ABN_WINDOWARRANGE = 3
+ABS_AUTOHIDE = 1
+ABS_ALWAYSONTOP = 2
+ABE_LEFT = 0
+ABE_TOP = 1
+ABE_RIGHT = 2
+ABE_BOTTOM = 3
+def EIRESID(x): return (-1 * (int)(x))
+
+# Some manually added ones
+CSIDL_COMMON_APPDATA = 35
+CSIDL_LOCAL_APPDATA = 28
+
+SHCONTF_FOLDERS         = 32 # for shell browser
+SHCONTF_NONFOLDERS      = 64 # for default view
+SHCONTF_INCLUDEHIDDEN   = 128 # for hidden/system objects
+SHCONTF_INIT_ON_FIRST_NEXT  = 256
+SHCONTF_NETPRINTERSRCH      = 512
+SHCONTF_SHAREABLE           = 1024
+SHCONTF_STORAGE             = 2048
+
+SHGDN_NORMAL            = 0        # default (display purpose)
+SHGDN_INFOLDER          = 1        # displayed under a folder (relative)
+SHGDN_FOREDITING         = 4096   # for in-place editing
+SHGDN_INCLUDE_NONFILESYS = 8192   # if not set, display names for shell name space items that are not in the file system will fail.
+SHGDN_FORADDRESSBAR     = 16384   # for displaying in the address (drives dropdown) bar
+SHGDN_FORPARSING        = 32768   # for ParseDisplayName or path
+
+SHCONTF_FOLDERS         = 32       # for shell browser
+SHCONTF_NONFOLDERS      = 64       # for default view
+SHCONTF_INCLUDEHIDDEN   = 128      # for hidden/system objects
+
+BFO_NONE	= 0
+BFO_BROWSER_PERSIST_SETTINGS	= 1
+BFO_RENAME_FOLDER_OPTIONS_TOINTERNET	= 2
+BFO_BOTH_OPTIONS	= 4
+BIF_PREFER_INTERNET_SHORTCUT	= 8
+BFO_BROWSE_NO_IN_NEW_PROCESS	= 16
+BFO_ENABLE_HYPERLINK_TRACKING	= 32
+BFO_USE_IE_OFFLINE_SUPPORT	= 64
+BFO_SUBSTITUE_INTERNET_START_PAGE	= 128
+BFO_USE_IE_LOGOBANDING	= 256
+BFO_ADD_IE_TOCAPTIONBAR	= 512
+BFO_USE_DIALUP_REF	= 1024
+BFO_USE_IE_TOOLBAR	= 2048
+BFO_NO_PARENT_FOLDER_SUPPORT	= 4096
+BFO_NO_REOPEN_NEXT_RESTART	= 8192
+BFO_GO_HOME_PAGE	= 16384
+BFO_PREFER_IEPROCESS	= 32768
+BFO_SHOW_NAVIGATION_CANCELLED	= 65536
+BFO_QUERY_ALL	= -1
+# From ShlGuid.h
+PID_FINDDATA = 0
+PID_NETRESOURCE = 1
+PID_DESCRIPTIONID = 2
+PID_WHICHFOLDER = 3
+PID_NETWORKLOCATION = 4
+PID_COMPUTERNAME = 5
+PID_DISPLACED_FROM = 2
+PID_DISPLACED_DATE = 3
+PID_SYNC_COPY_IN = 2
+PID_MISC_STATUS = 2
+PID_MISC_ACCESSCOUNT = 3
+PID_MISC_OWNER = 4
+PID_HTMLINFOTIPFILE = 5
+PID_MISC_PICS = 6
+PID_DISPLAY_PROPERTIES = 0
+PID_INTROTEXT = 1
+PIDSI_ARTIST = 2
+PIDSI_SONGTITLE = 3
+PIDSI_ALBUM = 4
+PIDSI_YEAR = 5
+PIDSI_COMMENT = 6
+PIDSI_TRACK = 7
+PIDSI_GENRE = 11
+PIDSI_LYRICS = 12
+PIDDRSI_PROTECTED = 2
+PIDDRSI_DESCRIPTION = 3
+PIDDRSI_PLAYCOUNT = 4
+PIDDRSI_PLAYSTARTS = 5
+PIDDRSI_PLAYEXPIRES = 6
+PIDVSI_STREAM_NAME = 2
+PIDVSI_FRAME_WIDTH = 3
+PIDVSI_FRAME_HEIGHT = 4
+PIDVSI_TIMELENGTH = 7
+PIDVSI_FRAME_COUNT = 5
+PIDVSI_FRAME_RATE = 6
+PIDVSI_DATA_RATE = 8
+PIDVSI_SAMPLE_SIZE = 9
+PIDVSI_COMPRESSION = 10
+PIDVSI_STREAM_NUMBER = 11
+PIDASI_FORMAT = 2
+PIDASI_TIMELENGTH = 3
+PIDASI_AVG_DATA_RATE = 4
+PIDASI_SAMPLE_RATE = 5
+PIDASI_SAMPLE_SIZE = 6
+PIDASI_CHANNEL_COUNT = 7
+PIDASI_STREAM_NUMBER = 8
+PIDASI_STREAM_NAME = 9
+PIDASI_COMPRESSION = 10
+PID_CONTROLPANEL_CATEGORY = 2
+PID_VOLUME_FREE = 2
+PID_VOLUME_CAPACITY = 3
+PID_VOLUME_FILESYSTEM = 4
+PID_SHARE_CSC_STATUS = 2
+PID_LINK_TARGET = 2
+PID_QUERY_RANK = 2
+# From PropIdl.h
+PROPSETFLAG_DEFAULT = ( 0 )
+PROPSETFLAG_NONSIMPLE = ( 1 )
+PROPSETFLAG_ANSI = ( 2 )
+PROPSETFLAG_UNBUFFERED = ( 4 )
+PROPSETFLAG_CASE_SENSITIVE = ( 8 )
+PROPSET_BEHAVIOR_CASE_SENSITIVE = ( 1 )
+PID_DICTIONARY = ( 0 )
+PID_CODEPAGE = ( 1 )
+PID_FIRST_USABLE = ( 2 )
+PID_FIRST_NAME_DEFAULT = ( 4095 )
+PID_LOCALE = ( (-2147483648) )
+PID_MODIFY_TIME = ( (-2147483647) )
+PID_SECURITY = ( (-2147483646) )
+PID_BEHAVIOR = ( (-2147483645) )
+PID_ILLEGAL = ( (-1) )
+PID_MIN_READONLY = ( (-2147483648) )
+PID_MAX_READONLY = ( (-1073741825) )
+PIDDI_THUMBNAIL = 2
+PIDSI_TITLE = 2
+PIDSI_SUBJECT = 3
+PIDSI_AUTHOR = 4
+PIDSI_KEYWORDS = 5
+PIDSI_COMMENTS = 6
+PIDSI_TEMPLATE = 7
+PIDSI_LASTAUTHOR = 8
+PIDSI_REVNUMBER = 9
+PIDSI_EDITTIME = 10
+PIDSI_LASTPRINTED = 11
+PIDSI_CREATE_DTM = 12
+PIDSI_LASTSAVE_DTM = 13
+PIDSI_PAGECOUNT = 14
+PIDSI_WORDCOUNT = 15
+PIDSI_CHARCOUNT = 16
+PIDSI_THUMBNAIL = 17
+PIDSI_APPNAME = 18
+PIDSI_DOC_SECURITY = 19
+PIDDSI_CATEGORY = 2
+PIDDSI_PRESFORMAT = 3
+PIDDSI_BYTECOUNT = 4
+PIDDSI_LINECOUNT = 5
+PIDDSI_PARCOUNT = 6
+PIDDSI_SLIDECOUNT = 7
+PIDDSI_NOTECOUNT = 8
+PIDDSI_HIDDENCOUNT = 9
+PIDDSI_MMCLIPCOUNT = 10
+PIDDSI_SCALE = 11
+PIDDSI_HEADINGPAIR = 12
+PIDDSI_DOCPARTS = 13
+PIDDSI_MANAGER = 14
+PIDDSI_COMPANY = 15
+PIDDSI_LINKSDIRTY = 16
+PIDMSI_EDITOR = 2
+PIDMSI_SUPPLIER = 3
+PIDMSI_SOURCE = 4
+PIDMSI_SEQUENCE_NO = 5
+PIDMSI_PROJECT = 6
+PIDMSI_STATUS = 7
+PIDMSI_OWNER = 8
+PIDMSI_RATING = 9
+PIDMSI_PRODUCTION = 10
+PIDMSI_COPYRIGHT = 11
+PRSPEC_INVALID = ( (-1) )
+PRSPEC_LPWSTR = ( 0 )
+PRSPEC_PROPID = ( 1 )
+# From ShObjIdl.h
+SHCIDS_ALLFIELDS = (-2147483648)
+SHCIDS_CANONICALONLY = 268435456
+SHCIDS_BITMASK = (-65536)
+SHCIDS_COLUMNMASK = 65535
+SFGAO_CANMONIKER = 4194304
+SFGAO_HASSTORAGE = 4194304
+SFGAO_STREAM = 4194304
+SFGAO_STORAGEANCESTOR = 8388608
+SFGAO_STORAGECAPMASK = 1891958792
+
+MAXPROPPAGES = 100
+PSP_DEFAULT = 0
+PSP_DLGINDIRECT = 1
+PSP_USEHICON = 2
+PSP_USEICONID = 4
+PSP_USETITLE = 8
+PSP_RTLREADING = 16
+PSP_HASHELP = 32
+PSP_USEREFPARENT = 64
+PSP_USECALLBACK = 128
+PSP_PREMATURE = 1024
+PSP_HIDEHEADER = 2048
+PSP_USEHEADERTITLE = 4096
+PSP_USEHEADERSUBTITLE = 8192
+PSP_USEFUSIONCONTEXT = 16384
+PSPCB_ADDREF = 0
+PSPCB_RELEASE = 1
+PSPCB_CREATE = 2
+
+PSH_DEFAULT = 0
+PSH_PROPTITLE = 1
+PSH_USEHICON = 2
+PSH_USEICONID = 4
+PSH_PROPSHEETPAGE = 8
+PSH_WIZARDHASFINISH = 16
+PSH_WIZARD = 32
+PSH_USEPSTARTPAGE = 64
+PSH_NOAPPLYNOW = 128
+PSH_USECALLBACK = 256
+PSH_HASHELP = 512
+PSH_MODELESS = 1024
+PSH_RTLREADING = 2048
+PSH_WIZARDCONTEXTHELP = 4096
+PSH_WIZARD97 = 8192
+PSH_WIZARD97 = 16777216
+PSH_WATERMARK = 32768
+PSH_USEHBMWATERMARK = 65536
+PSH_USEHPLWATERMARK = 131072
+PSH_STRETCHWATERMARK = 262144
+PSH_HEADER = 524288
+PSH_USEHBMHEADER = 1048576
+PSH_USEPAGELANG = 2097152
+PSH_WIZARD_LITE = 4194304
+PSH_NOCONTEXTHELP = 33554432
+
+PSCB_INITIALIZED = 1
+PSCB_PRECREATE = 2
+PSCB_BUTTONPRESSED = 3
+PSNRET_NOERROR = 0
+PSNRET_INVALID = 1
+PSNRET_INVALID_NOCHANGEPAGE = 2
+PSNRET_MESSAGEHANDLED = 3
+
+PSWIZB_BACK = 1
+PSWIZB_NEXT = 2
+PSWIZB_FINISH = 4
+PSWIZB_DISABLEDFINISH = 8
+PSBTN_BACK = 0
+PSBTN_NEXT = 1
+PSBTN_FINISH = 2
+PSBTN_OK = 3
+PSBTN_APPLYNOW = 4
+PSBTN_CANCEL = 5
+PSBTN_HELP = 6
+PSBTN_MAX = 6
+
+ID_PSRESTARTWINDOWS = 2
+ID_PSREBOOTSYSTEM = (ID_PSRESTARTWINDOWS | 1)
+WIZ_CXDLG = 276
+WIZ_CYDLG = 140
+WIZ_CXBMP = 80
+WIZ_BODYX = 92
+WIZ_BODYCX = 184
+PROP_SM_CXDLG = 212
+PROP_SM_CYDLG = 188
+PROP_MED_CXDLG = 227
+PROP_MED_CYDLG = 215
+PROP_LG_CXDLG = 252
+PROP_LG_CYDLG = 218
+ISOLATION_AWARE_USE_STATIC_LIBRARY = 0
+ISOLATION_AWARE_BUILD_STATIC_LIBRARY = 0
+
+SHCOLSTATE_TYPE_STR	= 1
+SHCOLSTATE_TYPE_INT	= 2
+SHCOLSTATE_TYPE_DATE	= 3
+SHCOLSTATE_TYPEMASK	= 15
+SHCOLSTATE_ONBYDEFAULT	= 16
+SHCOLSTATE_SLOW	= 32
+SHCOLSTATE_EXTENDED	= 64
+SHCOLSTATE_SECONDARYUI	= 128
+SHCOLSTATE_HIDDEN	= 256
+SHCOLSTATE_PREFER_VARCMP	= 512
+
+FWF_AUTOARRANGE	= 1
+FWF_ABBREVIATEDNAMES	= 2
+FWF_SNAPTOGRID	= 4
+FWF_OWNERDATA	= 8
+FWF_BESTFITWINDOW	= 16
+FWF_DESKTOP	= 32
+FWF_SINGLESEL	= 64
+FWF_NOSUBFOLDERS	= 128
+FWF_TRANSPARENT	= 256
+FWF_NOCLIENTEDGE	= 512
+FWF_NOSCROLL	= 1024
+FWF_ALIGNLEFT	= 2048
+FWF_NOICONS	= 4096
+FWF_SHOWSELALWAYS	= 8192
+FWF_NOVISIBLE	= 16384
+FWF_SINGLECLICKACTIVATE	= 32768
+FWF_NOWEBVIEW	= 65536
+FWF_HIDEFILENAMES	= 131072
+FWF_CHECKSELECT	= 262144
+
+FVM_FIRST	= 1
+FVM_ICON	= 1
+FVM_SMALLICON	= 2
+FVM_LIST	= 3
+FVM_DETAILS	= 4
+FVM_THUMBNAIL	= 5
+FVM_TILE	= 6
+FVM_THUMBSTRIP	= 7
+
+SVUIA_DEACTIVATE	= 0
+SVUIA_ACTIVATE_NOFOCUS	= 1
+SVUIA_ACTIVATE_FOCUS	= 2
+SVUIA_INPLACEACTIVATE	= 3
+
+# SHChangeNotifyRegister flags
+SHCNRF_InterruptLevel = 1
+SHCNRF_ShellLevel = 2
+SHCNRF_RecursiveInterrupt = 4096
+SHCNRF_NewDelivery = 32768
+
+FD_CLSID            = 0x0001
+FD_SIZEPOINT        = 0x0002
+FD_ATTRIBUTES       = 0x0004
+FD_CREATETIME       = 0x0008
+FD_ACCESSTIME       = 0x0010
+FD_WRITESTIME       = 0x0020
+FD_FILESIZE         = 0x0040
+FD_PROGRESSUI       = 0x4000
+FD_LINKUI           = 0x8000
+
+# shlwapi stuff
+ASSOCF_INIT_NOREMAPCLSID           = 0x00000001  #  do not remap clsids to progids
+ASSOCF_INIT_BYEXENAME              = 0x00000002 # executable is being passed in
+ASSOCF_OPEN_BYEXENAME              = 0x00000002 # executable is being passed in
+ASSOCF_INIT_DEFAULTTOSTAR          = 0x00000004 # treat "*" as the BaseClass
+ASSOCF_INIT_DEFAULTTOFOLDER        = 0x00000008 # treat "Folder" as the BaseClass
+ASSOCF_NOUSERSETTINGS              = 0x00000010 #  dont use HKCU
+ASSOCF_NOTRUNCATE                  = 0x00000020 # dont truncate the return string
+ASSOCF_VERIFY                      = 0x00000040 #  verify data is accurate (DISK HITS)
+ASSOCF_REMAPRUNDLL                 = 0x00000080 # actually gets info about rundlls target if applicable
+ASSOCF_NOFIXUPS                    = 0x00000100 # attempt to fix errors if found
+ASSOCF_IGNOREBASECLASS             = 0x00000200 # dont recurse into the baseclass
+
+ASSOCSTR_COMMAND      = 1 # shell\verb\command string
+ASSOCSTR_EXECUTABLE = 2 # the executable part of command string
+ASSOCSTR_FRIENDLYDOCNAME = 3 # friendly name of the document type
+ASSOCSTR_FRIENDLYAPPNAME = 4 # friendly name of executable
+ASSOCSTR_NOOPEN = 5  # noopen value
+ASSOCSTR_SHELLNEWVALUE = 6 # query values under the shellnew key
+ASSOCSTR_DDECOMMAND = 7 # template for DDE commands
+ASSOCSTR_DDEIFEXEC = 8 # DDECOMMAND to use if just create a process
+ASSOCSTR_DDEAPPLICATION = 9 # Application name in DDE broadcast
+ASSOCSTR_DDETOPIC = 10 # Topic Name in DDE broadcast
+ASSOCSTR_INFOTIP = 11 # info tip for an item, or list of properties to create info tip from
+ASSOCSTR_QUICKTIP = 12 # same as ASSOCSTR_INFOTIP, except, this list contains only quickly retrievable properties
+ASSOCSTR_TILEINFO = 13 # similar to ASSOCSTR_INFOTIP - lists important properties for tileview
+ASSOCSTR_CONTENTTYPE = 14 # MIME Content type
+ASSOCSTR_DEFAULTICON = 15 # Default icon source
+ASSOCSTR_SHELLEXTENSION = 16 # Guid string pointing to the Shellex\Shellextensionhandler value.
+
+ASSOCKEY_SHELLEXECCLASS = 1 # the key that should be passed to ShellExec(hkeyClass)
+ASSOCKEY_APP = 2 # the "Application" key for the association
+ASSOCKEY_CLASS = 3 # the progid or class key
+ASSOCKEY_BASECLASS = 4 # the BaseClass key
+
+ASSOCDATA_MSIDESCRIPTOR = 1 # Component Descriptor to pass to MSI APIs
+ASSOCDATA_NOACTIVATEHANDLER = 2 # restrict attempts to activate window
+ASSOCDATA_QUERYCLASSSTORE = 3 # should check with the NT Class Store
+ASSOCDATA_HASPERUSERASSOC = 4 # defaults to user specified association
+ASSOCDATA_EDITFLAGS = 5 # Edit flags.
+ASSOCDATA_VALUE = 6 # use pszExtra as the Value name
+
+# flags used with SHGetViewStatePropertyBag
+SHGVSPB_PERUSER = 1
+SHGVSPB_ALLUSERS = 2
+SHGVSPB_PERFOLDER = 4
+SHGVSPB_ALLFOLDERS = 8
+SHGVSPB_INHERIT = 16
+SHGVSPB_ROAM = 32
+SHGVSPB_NOAUTODEFAULTS = 2147483648 # 0x80000000
+SHGVSPB_FOLDER = SHGVSPB_PERUSER | SHGVSPB_PERFOLDER
+SHGVSPB_FOLDERNODEFAULTS = SHGVSPB_PERUSER | SHGVSPB_PERFOLDER | SHGVSPB_NOAUTODEFAULTS
+SHGVSPB_USERDEFAULTS = SHGVSPB_PERUSER | SHGVSPB_ALLFOLDERS
+SHGVSPB_GLOBALDEAFAULTS = SHGVSPB_ALLUSERS | SHGVSPB_ALLFOLDERS
+
+# IDeskband and related
+DBIM_MINSIZE    = 0x0001
+DBIM_MAXSIZE    = 0x0002
+DBIM_INTEGRAL   = 0x0004
+DBIM_ACTUAL     = 0x0008
+DBIM_TITLE      = 0x0010
+DBIM_MODEFLAGS  = 0x0020
+DBIM_BKCOLOR    = 0x0040
+
+DBIMF_NORMAL            = 0x0000
+DBIMF_VARIABLEHEIGHT    = 0x0008
+DBIMF_DEBOSSED          = 0x0020
+DBIMF_BKCOLOR           = 0x0040
+
+DBIF_VIEWMODE_NORMAL         = 0x0000
+DBIF_VIEWMODE_VERTICAL       = 0x0001
+DBIF_VIEWMODE_FLOATING       = 0x0002
+DBIF_VIEWMODE_TRANSPARENT    = 0x0004
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/link.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/link.py
new file mode 100644
index 0000000..04e6bf1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/link.py
@@ -0,0 +1,52 @@
+# link.py
+# From a demo by Mark Hammond, corrupted by Mike Fletcher
+# (and re-corrupted by Mark Hammond :-)
+from win32com.shell import shell
+import pythoncom, os
+
+class PyShortcut:
+	def __init__( self ):
+		self._base = pythoncom.CoCreateInstance(
+			shell.CLSID_ShellLink, None,
+			pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink
+		)
+	def load( self, filename ):
+		# Get an IPersist interface
+		# which allows save/restore of object to/from files
+		self._base.QueryInterface( pythoncom.IID_IPersistFile ).Load( filename )
+	def save( self, filename ):
+		self._base.QueryInterface( pythoncom.IID_IPersistFile ).Save( filename, 0 )
+	def __getattr__( self, name ):
+		if name != "_base":
+			return getattr( self._base, name )
+
+if __name__=='__main__':
+	import sys
+	if len(sys.argv)<2:
+		print "Usage: %s LinkFile [path [, args[, description[, working_dir]]]]\n\nIf LinkFile does not exist, it will be created using the other args"
+		sys.exit(1)
+	file = sys.argv[1]
+	shortcut = PyShortcut()
+	if os.path.exists( file ):
+		# load and dump info from file...
+		shortcut.load( file )
+		# now print data...
+		print 'Shortcut in file %s to file:\n\t%s\nArguments:\n\t%s\nDescription:\n\t%s\nWorking Directory:\n\t%s\nItemIDs:\n\t<skipped>'%(
+			file,
+			shortcut.GetPath(shell.SLGP_SHORTPATH)[0],
+			shortcut.GetArguments(),
+			shortcut.GetDescription(),
+			shortcut.GetWorkingDirectory(),
+			#shortcut.GetIDList(),
+		)
+	else:
+		if len(sys.argv) <3:
+			print "Link file does not exist\nYou must supply the path, args, description and working_dir as args"
+			sys.exit(1)
+		# create the shortcut using rest of args...
+		data = map( None, sys.argv[2:], ("SetPath", "SetArguments", "SetDescription", "SetWorkingDirectory") )
+		for value, function in data:
+			if value and function:
+				# call function on each non-null value
+				getattr( shortcut, function)( value )
+		shortcut.save( file )
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testSHFileOperation.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testSHFileOperation.py
new file mode 100644
index 0000000..ec47986
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testSHFileOperation.py
@@ -0,0 +1,23 @@
+from win32com.shell import shell, shellcon
+import win32api
+import os
+
+def testSHFileOperation(file_cnt):
+    temp_dir=os.environ['temp']
+    orig_fnames=[win32api.GetTempFileName(temp_dir,'sfo')[0] for x in range(file_cnt)]
+    new_fnames=[os.path.join(temp_dir,'copy of '+os.path.split(orig_fnames[x])[1]) for x in range(file_cnt)]
+
+    pFrom='\0'.join(orig_fnames)
+    pTo='\0'.join(new_fnames)
+
+    shell.SHFileOperation((0, shellcon.FO_MOVE, pFrom, pTo, shellcon.FOF_MULTIDESTFILES|shellcon.FOF_NOCONFIRMATION))
+    for fname in orig_fnames:
+        assert not os.path.isfile(fname)
+        
+    for fname in new_fnames:
+        assert os.path.isfile(fname)
+        shell.SHFileOperation((0, shellcon.FO_DELETE, fname, None, shellcon.FOF_NOCONFIRMATION|shellcon.FOF_NOERRORUI))
+
+
+testSHFileOperation(10)
+testSHFileOperation(1)
\ No newline at end of file
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testShell.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testShell.py
new file mode 100644
index 0000000..303f9f5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testShell.py
@@ -0,0 +1,49 @@
+# dumpLink.py
+#
+# given a win32 shortcut file, dump all we know about it.
+import sys, os
+from win32com.shell import shell, shellcon
+import pythoncom
+import glob
+from win32com.storagecon import *
+
+
+def DumpLink(fname):
+	shellLink = pythoncom.CoCreateInstance(shell.CLSID_ShellLink, None, pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink)
+	persistFile = shellLink.QueryInterface(pythoncom.IID_IPersistFile)
+	persistFile.Load(fname,STGM_READ)
+	shellLink.Resolve(0, shell.SLR_ANY_MATCH | shell.SLR_NO_UI)
+	fname, findData = shellLink.GetPath(0)
+	print "Filename", fname, ", UNC=", shellLink.GetPath(shell.SLGP_UNCPRIORITY)[0]
+
+def FavDumper(nothing, path, names):
+	# called by os.path.walk
+	for name in names:
+		print name, 
+		try:
+			DumpLink(name)
+		except pythoncom.com_error:
+			print " - not a link"
+
+
+
+def DumpFavorites():
+	favfold = str(shell.SHGetSpecialFolderPath(0, shellcon.CSIDL_FAVORITES))
+	print "Your favourites are at", favfold
+	os.path.walk(favfold, FavDumper, None)
+
+	
+if __name__=='__main__':
+	if len(sys.argv)>1:
+		for fspec in sys.argv[1:]:
+			files = glob.glob(fspec)
+			if files:
+				for file in files:
+					print file
+					DumpLink(file)
+					print
+			else:
+				print "Can not find", fspec
+	else:
+		print "Dumping your favorites folder!"
+		DumpFavorites()
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testShellFolder.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testShellFolder.py
new file mode 100644
index 0000000..82d12640
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/shell/test/testShellFolder.py
@@ -0,0 +1,21 @@
+from win32com.shell import shell
+from win32com.shell.shellcon import *
+
+sf = shell.SHGetDesktopFolder()
+print "Shell Folder is", sf
+
+names = []
+for i in sf: # Magically calls EnumObjects
+    name = sf.GetDisplayNameOf(i, SHGDN_NORMAL)
+    names.append(name)
+
+# And get the enumerator manually    
+enum = sf.EnumObjects(0, SHCONTF_FOLDERS | SHCONTF_NONFOLDERS | SHCONTF_INCLUDEHIDDEN)
+num = 0
+for i in enum:
+    num += 1
+if num != len(names):
+    print "Should have got the same number of names!?"
+print "Found", len(names), "items on the desktop"
+for name in names:
+    print name
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/__init__.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/__init__.py
new file mode 100644
index 0000000..cb9037ee
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/__init__.py
@@ -0,0 +1,6 @@
+# This is a python package
+# __PackageSupportBuildPath__ not needed for distutil based builds,
+# but not everyone is there yet.
+import win32com
+win32com.__PackageSupportBuildPath__(__path__)
+
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/taskscheduler.pyd b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/taskscheduler.pyd
new file mode 100644
index 0000000..d700261
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/taskscheduler.pyd
Binary files differ
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask.py
new file mode 100644
index 0000000..9fb0c4d8
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask.py
@@ -0,0 +1,50 @@
+import pythoncom, sys, os, time, win32api
+from win32com.taskscheduler import taskscheduler
+task_name='test_addtask.job'
+ts=pythoncom.CoCreateInstance(taskscheduler.CLSID_CTaskScheduler,None,
+                              pythoncom.CLSCTX_INPROC_SERVER,taskscheduler.IID_ITaskScheduler)
+tasks=ts.Enum()
+for task in tasks:
+    print task
+if task_name in tasks:
+    print 'Deleting existing task '+task_name
+    ts.Delete(task_name)
+    
+t=ts.NewWorkItem(task_name)
+t.SetComment('rude comments')
+t.SetApplicationName(sys.executable)
+t.SetPriority(taskscheduler.REALTIME_PRIORITY_CLASS)
+t.SetParameters('-c"import win32ui,time;win32ui.MessageBox(\'hey bubba I am running\');"')
+t.SetWorkingDirectory(os.path.dirname(sys.executable))
+t.SetCreator('test_addtask.py')
+t.SetMaxRunTime(20000)  #milliseconds
+t.SetFlags(taskscheduler.TASK_FLAG_INTERACTIVE|taskscheduler.TASK_FLAG_RUN_ONLY_IF_LOGGED_ON)
+##               |taskscheduler.TASK_FLAG_DELETE_WHEN_DONE)  #task self destructs when no more future run times
+t.SetAccountInformation(win32api.GetUserName(),None)
+## None is only valid for local system acct or if task flags contain TASK_FLAG_RUN_ONLY_IF_LOGGED_ON
+t.SetWorkItemData('some binary garbage')
+
+run_time = time.localtime(time.time() + 60)
+tr_ind, tr=t.CreateTrigger()
+tt=tr.GetTrigger()
+
+## flags default to TASK_TRIGGER_FLAG_DISABLED (4)
+tt.Flags=taskscheduler.TASK_TRIGGER_FLAG_KILL_AT_DURATION_END
+tt.BeginYear=int(time.strftime('%Y',run_time))
+tt.BeginMonth=int(time.strftime('%m',run_time))
+tt.BeginDay=int(time.strftime('%d',run_time))
+tt.StartMinute=int(time.strftime('%M',run_time))
+tt.StartHour=int(time.strftime('%H',run_time))
+tt.MinutesInterval=1
+tt.MinutesDuration=5
+
+tt.TriggerType=taskscheduler.TASK_TIME_TRIGGER_MONTHLYDATE
+#months can contain multiples in a bitmask, use 1<<(month_nbr-1)
+tt.MonthlyDate_Months=1<<(int(time.strftime('%m',run_time))-1) ## corresponds to TASK_JANUARY..TASK_DECEMBER constants
+#days too
+tt.MonthlyDate_Days=1<<(int(time.strftime('%d',run_time))-1)
+tr.SetTrigger(tt)
+print t.GetTriggerString(tr_ind)
+
+pf=t.QueryInterface(pythoncom.IID_IPersistFile)
+pf.Save(None,1)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask_1.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask_1.py
new file mode 100644
index 0000000..d366c997
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask_1.py
@@ -0,0 +1,53 @@
+import pythoncom, time, win32api
+from win32com.taskscheduler import taskscheduler
+test_task_name='test_addtask_1.job'
+
+ts=pythoncom.CoCreateInstance(taskscheduler.CLSID_CTaskScheduler,None,
+                              pythoncom.CLSCTX_INPROC_SERVER,taskscheduler.IID_ITaskScheduler)
+
+tasks=ts.Enum()
+for task in tasks:
+    print task
+if test_task_name in tasks:
+    print 'Deleting existing task '+test_task_name
+    ts.Delete(test_task_name)
+
+new_task=pythoncom.CoCreateInstance(taskscheduler.CLSID_CTask,None,
+                              pythoncom.CLSCTX_INPROC_SERVER,taskscheduler.IID_ITask)
+ts.AddWorkItem(test_task_name,new_task)  ## task object is modified in place
+
+new_task.SetFlags(taskscheduler.TASK_FLAG_INTERACTIVE|taskscheduler.TASK_FLAG_RUN_ONLY_IF_LOGGED_ON)
+new_task.SetIdleWait(1,10000)
+new_task.SetComment('test task with idle trigger')
+new_task.SetApplicationName('c:\\python23\\python.exe')
+new_task.SetPriority(taskscheduler.REALTIME_PRIORITY_CLASS)
+new_task.SetParameters('-c"import win32ui,time;win32ui.MessageBox(\'why aint you doing no work ?\');"')
+new_task.SetWorkingDirectory('c:\\python23')
+new_task.SetCreator('test_addtask_1.py')
+new_task.SetAccountInformation(win32api.GetUserName(),None)
+## None is only valid for local system acct or if Flags contain TASK_FLAG_RUN_ONLY_IF_LOGGED_ON
+
+
+run_time = time.localtime(time.time() + 30)
+end_time = time.localtime(time.time() + 60*60*24)
+
+tr_ind, tr=new_task.CreateTrigger()
+tt=tr.GetTrigger()
+tt.TriggerType=taskscheduler.TASK_EVENT_TRIGGER_ON_IDLE
+tt.Flags=taskscheduler.TASK_TRIGGER_FLAG_HAS_END_DATE
+
+tt.BeginYear=int(time.strftime('%Y',run_time))
+tt.BeginMonth=int(time.strftime('%m',run_time))
+tt.BeginDay=int(time.strftime('%d',run_time))
+tt.StartMinute=int(time.strftime('%M',run_time))
+tt.StartHour=int(time.strftime('%H',run_time))
+
+tt.EndYear=int(time.strftime('%Y',end_time))
+tt.EndMonth=int(time.strftime('%m',end_time))
+tt.EndDay=int(time.strftime('%d',end_time))
+
+tr.SetTrigger(tt)
+print  new_task.GetTriggerString(tr_ind)
+
+pf=new_task.QueryInterface(pythoncom.IID_IPersistFile)
+pf.Save(None,1)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask_2.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask_2.py
new file mode 100644
index 0000000..2a44a5cd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_addtask_2.py
@@ -0,0 +1,41 @@
+import pythoncom, time, win32api
+from win32com.taskscheduler import taskscheduler
+task_name='test_addtask_2.job'
+ts=pythoncom.CoCreateInstance(taskscheduler.CLSID_CTaskScheduler,None,
+                              pythoncom.CLSCTX_INPROC_SERVER,taskscheduler.IID_ITaskScheduler)
+tasks=ts.Enum()
+for task in tasks:
+    print task
+if task_name in tasks:
+    print 'Deleting existing task '+task_name
+    ts.Delete(task_name)
+    
+t=ts.NewWorkItem(task_name)
+t.SetComment('Test a task running as local system acct')
+t.SetApplicationName('c:\\python23\\python.exe')
+t.SetPriority(taskscheduler.REALTIME_PRIORITY_CLASS)
+t.SetParameters('test_localsystem.py')
+t.SetWorkingDirectory('c:\\python23')
+t.SetCreator('test_addtask_2.py')
+t.SetMaxRunTime(20000)  #milliseconds
+t.SetFlags(taskscheduler.TASK_FLAG_DELETE_WHEN_DONE)
+t.SetAccountInformation('',None)    ## empty string for account name means to use local system
+## None is only valid for local system acct or if task flags contain TASK_FLAG_RUN_ONLY_IF_LOGGED_ON
+
+run_time = time.localtime(time.time() + 60)
+tr_ind, tr=t.CreateTrigger()
+
+tt=tr.GetTrigger()
+tt.Flags=0   ## flags for a new trigger default to TASK_TRIGGER_FLAG_DISABLED (4), make sure to clear them if not using any
+tt.TriggerType=taskscheduler.TASK_TIME_TRIGGER_ONCE
+tt.BeginYear=int(time.strftime('%Y',run_time))
+tt.BeginMonth=int(time.strftime('%m',run_time))
+tt.BeginDay=int(time.strftime('%d',run_time))
+tt.StartMinute=int(time.strftime('%M',run_time))
+tt.StartHour=int(time.strftime('%H',run_time))
+
+tr.SetTrigger(tt)
+print t.GetTriggerString(tr_ind)
+
+pf=t.QueryInterface(pythoncom.IID_IPersistFile)
+pf.Save(None,1)
diff --git a/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_localsystem.py b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_localsystem.py
new file mode 100644
index 0000000..2030a246
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site-packages/win32comext/taskscheduler/test/test_localsystem.py
@@ -0,0 +1,3 @@
+f=open('test_localsystem.txt','w')
+f.write('I have run\n')
+f.close()
diff --git a/depot_tools/release/win/python_24/Lib/site.py b/depot_tools/release/win/python_24/Lib/site.py
new file mode 100644
index 0000000..2207ec5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/site.py
@@ -0,0 +1,406 @@
+"""Append module search paths for third-party packages to sys.path.
+
+****************************************************************
+* This module is automatically imported during initialization. *
+****************************************************************
+
+In earlier versions of Python (up to 1.5a3), scripts or modules that
+needed to use site-specific modules would place ``import site''
+somewhere near the top of their code.  Because of the automatic
+import, this is no longer necessary (but code that does it still
+works).
+
+This will append site-specific paths to the module search path.  On
+Unix, it starts with sys.prefix and sys.exec_prefix (if different) and
+appends lib/python<version>/site-packages as well as lib/site-python.
+On other platforms (mainly Mac and Windows), it uses just sys.prefix
+(and sys.exec_prefix, if different, but this is unlikely).  The
+resulting directories, if they exist, are appended to sys.path, and
+also inspected for path configuration files.
+
+A path configuration file is a file whose name has the form
+<package>.pth; its contents are additional directories (one per line)
+to be added to sys.path.  Non-existing directories (or
+non-directories) are never added to sys.path; no directory is added to
+sys.path more than once.  Blank lines and lines beginning with
+'#' are skipped. Lines starting with 'import' are executed.
+
+For example, suppose sys.prefix and sys.exec_prefix are set to
+/usr/local and there is a directory /usr/local/lib/python1.5/site-packages
+with three subdirectories, foo, bar and spam, and two path
+configuration files, foo.pth and bar.pth.  Assume foo.pth contains the
+following:
+
+  # foo package configuration
+  foo
+  bar
+  bletch
+
+and bar.pth contains:
+
+  # bar package configuration
+  bar
+
+Then the following directories are added to sys.path, in this order:
+
+  /usr/local/lib/python1.5/site-packages/bar
+  /usr/local/lib/python1.5/site-packages/foo
+
+Note that bletch is omitted because it doesn't exist; bar precedes foo
+because bar.pth comes alphabetically before foo.pth; and spam is
+omitted because it is not mentioned in either path configuration file.
+
+After these path manipulations, an attempt is made to import a module
+named sitecustomize, which can perform arbitrary additional
+site-specific customizations.  If this import fails with an
+ImportError exception, it is silently ignored.
+
+"""
+
+import sys
+import os
+import __builtin__
+
+
+def makepath(*paths):
+    dir = os.path.abspath(os.path.join(*paths))
+    return dir, os.path.normcase(dir)
+
+def abs__file__():
+    """Set all module' __file__ attribute to an absolute path"""
+    for m in sys.modules.values():
+        try:
+            m.__file__ = os.path.abspath(m.__file__)
+        except AttributeError:
+            continue
+
+def removeduppaths():
+    """ Remove duplicate entries from sys.path along with making them
+    absolute"""
+    # This ensures that the initial path provided by the interpreter contains
+    # only absolute pathnames, even if we're running from the build directory.
+    L = []
+    known_paths = set()
+    for dir in sys.path:
+        # Filter out duplicate paths (on case-insensitive file systems also
+        # if they only differ in case); turn relative paths into absolute
+        # paths.
+        dir, dircase = makepath(dir)
+        if not dircase in known_paths:
+            L.append(dir)
+            known_paths.add(dircase)
+    sys.path[:] = L
+    return known_paths
+
+# XXX This should not be part of site.py, since it is needed even when
+# using the -S option for Python.  See http://www.python.org/sf/586680
+def addbuilddir():
+    """Append ./build/lib.<platform> in case we're running in the build dir
+    (especially for Guido :-)"""
+    from distutils.util import get_platform
+    s = "build/lib.%s-%.3s" % (get_platform(), sys.version)
+    s = os.path.join(os.path.dirname(sys.path[-1]), s)
+    sys.path.append(s)
+
+def _init_pathinfo():
+    """Return a set containing all existing directory entries from sys.path"""
+    d = set()
+    for dir in sys.path:
+        try:
+            if os.path.isdir(dir):
+                dir, dircase = makepath(dir)
+                d.add(dircase)
+        except TypeError:
+            continue
+    return d
+
+def addpackage(sitedir, name, known_paths):
+    """Add a new path to known_paths by combining sitedir and 'name' or execute
+    sitedir if it starts with 'import'"""
+    if known_paths is None:
+        _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    fullname = os.path.join(sitedir, name)
+    try:
+        f = open(fullname, "rU")
+    except IOError:
+        return
+    try:
+        for line in f:
+            if line.startswith("#"):
+                continue
+            if line.startswith("import"):
+                exec line
+                continue
+            line = line.rstrip()
+            dir, dircase = makepath(sitedir, line)
+            if not dircase in known_paths and os.path.exists(dir):
+                sys.path.append(dir)
+                known_paths.add(dircase)
+    finally:
+        f.close()
+    if reset:
+        known_paths = None
+    return known_paths
+
+def addsitedir(sitedir, known_paths=None):
+    """Add 'sitedir' argument to sys.path if missing and handle .pth files in
+    'sitedir'"""
+    if known_paths is None:
+        known_paths = _init_pathinfo()
+        reset = 1
+    else:
+        reset = 0
+    sitedir, sitedircase = makepath(sitedir)
+    if not sitedircase in known_paths:
+        sys.path.append(sitedir)        # Add path component
+    try:
+        names = os.listdir(sitedir)
+    except os.error:
+        return
+    names.sort()
+    for name in names:
+        if name.endswith(os.extsep + "pth"):
+            addpackage(sitedir, name, known_paths)
+    if reset:
+        known_paths = None
+    return known_paths
+
+def addsitepackages(known_paths):
+    """Add site-packages (and possibly site-python) to sys.path"""
+    prefixes = [sys.prefix]
+    if sys.exec_prefix != sys.prefix:
+        prefixes.append(sys.exec_prefix)
+    for prefix in prefixes:
+        if prefix:
+            if sys.platform in ('os2emx', 'riscos'):
+                sitedirs = [os.path.join(prefix, "Lib", "site-packages")]
+            elif os.sep == '/':
+                sitedirs = [os.path.join(prefix,
+                                         "lib",
+                                         "python" + sys.version[:3],
+                                         "site-packages"),
+                            os.path.join(prefix, "lib", "site-python")]
+            else:
+                sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")]
+            if sys.platform == 'darwin':
+                # for framework builds *only* we add the standard Apple
+                # locations. Currently only per-user, but /Library and
+                # /Network/Library could be added too
+                if 'Python.framework' in prefix:
+                    home = os.environ.get('HOME')
+                    if home:
+                        sitedirs.append(
+                            os.path.join(home,
+                                         'Library',
+                                         'Python',
+                                         sys.version[:3],
+                                         'site-packages'))
+            for sitedir in sitedirs:
+                if os.path.isdir(sitedir):
+                    addsitedir(sitedir, known_paths)
+    return None
+
+
+def setBEGINLIBPATH():
+    """The OS/2 EMX port has optional extension modules that do double duty
+    as DLLs (and must use the .DLL file extension) for other extensions.
+    The library search path needs to be amended so these will be found
+    during module import.  Use BEGINLIBPATH so that these are at the start
+    of the library search path.
+
+    """
+    dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload")
+    libpath = os.environ['BEGINLIBPATH'].split(';')
+    if libpath[-1]:
+        libpath.append(dllpath)
+    else:
+        libpath[-1] = dllpath
+    os.environ['BEGINLIBPATH'] = ';'.join(libpath)
+
+
+def setquit():
+    """Define new built-ins 'quit' and 'exit'.
+    These are simply strings that display a hint on how to exit.
+
+    """
+    if os.sep == ':':
+        exit = 'Use Cmd-Q to quit.'
+    elif os.sep == '\\':
+        exit = 'Use Ctrl-Z plus Return to exit.'
+    else:
+        exit = 'Use Ctrl-D (i.e. EOF) to exit.'
+    __builtin__.quit = __builtin__.exit = exit
+
+
+class _Printer(object):
+    """interactive prompt objects for printing the license text, a list of
+    contributors and the copyright notice."""
+
+    MAXLINES = 23
+
+    def __init__(self, name, data, files=(), dirs=()):
+        self.__name = name
+        self.__data = data
+        self.__files = files
+        self.__dirs = dirs
+        self.__lines = None
+
+    def __setup(self):
+        if self.__lines:
+            return
+        data = None
+        for dir in self.__dirs:
+            for filename in self.__files:
+                filename = os.path.join(dir, filename)
+                try:
+                    fp = file(filename, "rU")
+                    data = fp.read()
+                    fp.close()
+                    break
+                except IOError:
+                    pass
+            if data:
+                break
+        if not data:
+            data = self.__data
+        self.__lines = data.split('\n')
+        self.__linecnt = len(self.__lines)
+
+    def __repr__(self):
+        self.__setup()
+        if len(self.__lines) <= self.MAXLINES:
+            return "\n".join(self.__lines)
+        else:
+            return "Type %s() to see the full %s text" % ((self.__name,)*2)
+
+    def __call__(self):
+        self.__setup()
+        prompt = 'Hit Return for more, or q (and Return) to quit: '
+        lineno = 0
+        while 1:
+            try:
+                for i in range(lineno, lineno + self.MAXLINES):
+                    print self.__lines[i]
+            except IndexError:
+                break
+            else:
+                lineno += self.MAXLINES
+                key = None
+                while key is None:
+                    key = raw_input(prompt)
+                    if key not in ('', 'q'):
+                        key = None
+                if key == 'q':
+                    break
+
+def setcopyright():
+    """Set 'copyright' and 'credits' in __builtin__"""
+    __builtin__.copyright = _Printer("copyright", sys.copyright)
+    if sys.platform[:4] == 'java':
+        __builtin__.credits = _Printer(
+            "credits",
+            "Jython is maintained by the Jython developers (www.jython.org).")
+    else:
+        __builtin__.credits = _Printer("credits", """\
+    Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands
+    for supporting Python development.  See www.python.org for more information.""")
+    here = os.path.dirname(os.__file__)
+    __builtin__.license = _Printer(
+        "license", "See http://www.python.org/%.3s/license.html" % sys.version,
+        ["LICENSE.txt", "LICENSE"],
+        [os.path.join(here, os.pardir), here, os.curdir])
+
+
+class _Helper(object):
+    """Define the built-in 'help'.
+    This is a wrapper around pydoc.help (with a twist).
+
+    """
+
+    def __repr__(self):
+        return "Type help() for interactive help, " \
+               "or help(object) for help about object."
+    def __call__(self, *args, **kwds):
+        import pydoc
+        return pydoc.help(*args, **kwds)
+
+def sethelper():
+    __builtin__.help = _Helper()
+
+def aliasmbcs():
+    """On Windows, some default encodings are not provided by Python,
+    while they are always available as "mbcs" in each locale. Make
+    them usable by aliasing to "mbcs" in such a case."""
+    if sys.platform == 'win32':
+        import locale, codecs
+        enc = locale.getdefaultlocale()[1]
+        if enc.startswith('cp'):            # "cp***" ?
+            try:
+                codecs.lookup(enc)
+            except LookupError:
+                import encodings
+                encodings._cache[enc] = encodings._unknown
+                encodings.aliases.aliases[enc] = 'mbcs'
+
+def setencoding():
+    """Set the string encoding used by the Unicode implementation.  The
+    default is 'ascii', but if you're willing to experiment, you can
+    change this."""
+    encoding = "ascii" # Default value set by _PyUnicode_Init()
+    if 0:
+        # Enable to support locale aware default string encodings.
+        import locale
+        loc = locale.getdefaultlocale()
+        if loc[1]:
+            encoding = loc[1]
+    if 0:
+        # Enable to switch off string to Unicode coercion and implicit
+        # Unicode to string conversion.
+        encoding = "undefined"
+    if encoding != "ascii":
+        # On Non-Unicode builds this will raise an AttributeError...
+        sys.setdefaultencoding(encoding) # Needs Python Unicode build !
+
+
+def execsitecustomize():
+    """Run custom site specific code, if available."""
+    try:
+        import sitecustomize
+    except ImportError:
+        pass
+
+
+def main():
+    abs__file__()
+    paths_in_sys = removeduppaths()
+    if (os.name == "posix" and sys.path and
+        os.path.basename(sys.path[-1]) == "Modules"):
+        addbuilddir()
+    paths_in_sys = addsitepackages(paths_in_sys)
+    if sys.platform == 'os2emx':
+        setBEGINLIBPATH()
+    setquit()
+    setcopyright()
+    sethelper()
+    aliasmbcs()
+    setencoding()
+    execsitecustomize()
+    # Remove sys.setdefaultencoding() so that users cannot change the
+    # encoding after initialization.  The test for presence is needed when
+    # this module is run as a script, because this code is executed twice.
+    if hasattr(sys, "setdefaultencoding"):
+        del sys.setdefaultencoding
+
+main()
+
+def _test():
+    print "sys.path = ["
+    for dir in sys.path:
+        print "    %r," % (dir,)
+    print "]"
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/smtpd.py b/depot_tools/release/win/python_24/Lib/smtpd.py
new file mode 100644
index 0000000..c656ec76
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/smtpd.py
@@ -0,0 +1,549 @@
+#! /usr/bin/env python
+"""An RFC 2821 smtp proxy.
+
+Usage: %(program)s [options] [localhost:localport [remotehost:remoteport]]
+
+Options:
+
+    --nosetuid
+    -n
+        This program generally tries to setuid `nobody', unless this flag is
+        set.  The setuid call will fail if this program is not run as root (in
+        which case, use this flag).
+
+    --version
+    -V
+        Print the version number and exit.
+
+    --class classname
+    -c classname
+        Use `classname' as the concrete SMTP proxy class.  Uses `PureProxy' by
+        default.
+
+    --debug
+    -d
+        Turn on debugging prints.
+
+    --help
+    -h
+        Print this message and exit.
+
+Version: %(__version__)s
+
+If localhost is not given then `localhost' is used, and if localport is not
+given then 8025 is used.  If remotehost is not given then `localhost' is used,
+and if remoteport is not given, then 25 is used.
+"""
+
+
+# Overview:
+#
+# This file implements the minimal SMTP protocol as defined in RFC 821.  It
+# has a hierarchy of classes which implement the backend functionality for the
+# smtpd.  A number of classes are provided:
+#
+#   SMTPServer - the base class for the backend.  Raises NotImplementedError
+#   if you try to use it.
+#
+#   DebuggingServer - simply prints each message it receives on stdout.
+#
+#   PureProxy - Proxies all messages to a real smtpd which does final
+#   delivery.  One known problem with this class is that it doesn't handle
+#   SMTP errors from the backend server at all.  This should be fixed
+#   (contributions are welcome!).
+#
+#   MailmanProxy - An experimental hack to work with GNU Mailman
+#   <www.list.org>.  Using this server as your real incoming smtpd, your
+#   mailhost will automatically recognize and accept mail destined to Mailman
+#   lists when those lists are created.  Every message not destined for a list
+#   gets forwarded to a real backend smtpd, as with PureProxy.  Again, errors
+#   are not handled correctly yet.
+#
+# Please note that this script requires Python 2.0
+#
+# Author: Barry Warsaw <barry@python.org>
+#
+# TODO:
+#
+# - support mailbox delivery
+# - alias files
+# - ESMTP
+# - handle error codes from the backend smtpd
+
+import sys
+import os
+import errno
+import getopt
+import time
+import socket
+import asyncore
+import asynchat
+
+__all__ = ["SMTPServer","DebuggingServer","PureProxy","MailmanProxy"]
+
+program = sys.argv[0]
+__version__ = 'Python SMTP proxy version 0.2'
+
+
+class Devnull:
+    def write(self, msg): pass
+    def flush(self): pass
+
+
+DEBUGSTREAM = Devnull()
+NEWLINE = '\n'
+EMPTYSTRING = ''
+COMMASPACE = ', '
+
+
+
+def usage(code, msg=''):
+    print >> sys.stderr, __doc__ % globals()
+    if msg:
+        print >> sys.stderr, msg
+    sys.exit(code)
+
+
+
+class SMTPChannel(asynchat.async_chat):
+    COMMAND = 0
+    DATA = 1
+
+    def __init__(self, server, conn, addr):
+        asynchat.async_chat.__init__(self, conn)
+        self.__server = server
+        self.__conn = conn
+        self.__addr = addr
+        self.__line = []
+        self.__state = self.COMMAND
+        self.__greeting = 0
+        self.__mailfrom = None
+        self.__rcpttos = []
+        self.__data = ''
+        self.__fqdn = socket.getfqdn()
+        self.__peer = conn.getpeername()
+        print >> DEBUGSTREAM, 'Peer:', repr(self.__peer)
+        self.push('220 %s %s' % (self.__fqdn, __version__))
+        self.set_terminator('\r\n')
+
+    # Overrides base class for convenience
+    def push(self, msg):
+        asynchat.async_chat.push(self, msg + '\r\n')
+
+    # Implementation of base class abstract method
+    def collect_incoming_data(self, data):
+        self.__line.append(data)
+
+    # Implementation of base class abstract method
+    def found_terminator(self):
+        line = EMPTYSTRING.join(self.__line)
+        print >> DEBUGSTREAM, 'Data:', repr(line)
+        self.__line = []
+        if self.__state == self.COMMAND:
+            if not line:
+                self.push('500 Error: bad syntax')
+                return
+            method = None
+            i = line.find(' ')
+            if i < 0:
+                command = line.upper()
+                arg = None
+            else:
+                command = line[:i].upper()
+                arg = line[i+1:].strip()
+            method = getattr(self, 'smtp_' + command, None)
+            if not method:
+                self.push('502 Error: command "%s" not implemented' % command)
+                return
+            method(arg)
+            return
+        else:
+            if self.__state != self.DATA:
+                self.push('451 Internal confusion')
+                return
+            # Remove extraneous carriage returns and de-transparency according
+            # to RFC 821, Section 4.5.2.
+            data = []
+            for text in line.split('\r\n'):
+                if text and text[0] == '.':
+                    data.append(text[1:])
+                else:
+                    data.append(text)
+            self.__data = NEWLINE.join(data)
+            status = self.__server.process_message(self.__peer,
+                                                   self.__mailfrom,
+                                                   self.__rcpttos,
+                                                   self.__data)
+            self.__rcpttos = []
+            self.__mailfrom = None
+            self.__state = self.COMMAND
+            self.set_terminator('\r\n')
+            if not status:
+                self.push('250 Ok')
+            else:
+                self.push(status)
+
+    # SMTP and ESMTP commands
+    def smtp_HELO(self, arg):
+        if not arg:
+            self.push('501 Syntax: HELO hostname')
+            return
+        if self.__greeting:
+            self.push('503 Duplicate HELO/EHLO')
+        else:
+            self.__greeting = arg
+            self.push('250 %s' % self.__fqdn)
+
+    def smtp_NOOP(self, arg):
+        if arg:
+            self.push('501 Syntax: NOOP')
+        else:
+            self.push('250 Ok')
+
+    def smtp_QUIT(self, arg):
+        # args is ignored
+        self.push('221 Bye')
+        self.close_when_done()
+
+    # factored
+    def __getaddr(self, keyword, arg):
+        address = None
+        keylen = len(keyword)
+        if arg[:keylen].upper() == keyword:
+            address = arg[keylen:].strip()
+            if not address:
+                pass
+            elif address[0] == '<' and address[-1] == '>' and address != '<>':
+                # Addresses can be in the form <person@dom.com> but watch out
+                # for null address, e.g. <>
+                address = address[1:-1]
+        return address
+
+    def smtp_MAIL(self, arg):
+        print >> DEBUGSTREAM, '===> MAIL', arg
+        address = self.__getaddr('FROM:', arg)
+        if not address:
+            self.push('501 Syntax: MAIL FROM:<address>')
+            return
+        if self.__mailfrom:
+            self.push('503 Error: nested MAIL command')
+            return
+        self.__mailfrom = address
+        print >> DEBUGSTREAM, 'sender:', self.__mailfrom
+        self.push('250 Ok')
+
+    def smtp_RCPT(self, arg):
+        print >> DEBUGSTREAM, '===> RCPT', arg
+        if not self.__mailfrom:
+            self.push('503 Error: need MAIL command')
+            return
+        address = self.__getaddr('TO:', arg)
+        if not address:
+            self.push('501 Syntax: RCPT TO: <address>')
+            return
+        self.__rcpttos.append(address)
+        print >> DEBUGSTREAM, 'recips:', self.__rcpttos
+        self.push('250 Ok')
+
+    def smtp_RSET(self, arg):
+        if arg:
+            self.push('501 Syntax: RSET')
+            return
+        # Resets the sender, recipients, and data, but not the greeting
+        self.__mailfrom = None
+        self.__rcpttos = []
+        self.__data = ''
+        self.__state = self.COMMAND
+        self.push('250 Ok')
+
+    def smtp_DATA(self, arg):
+        if not self.__rcpttos:
+            self.push('503 Error: need RCPT command')
+            return
+        if arg:
+            self.push('501 Syntax: DATA')
+            return
+        self.__state = self.DATA
+        self.set_terminator('\r\n.\r\n')
+        self.push('354 End data with <CR><LF>.<CR><LF>')
+
+
+
+class SMTPServer(asyncore.dispatcher):
+    def __init__(self, localaddr, remoteaddr):
+        self._localaddr = localaddr
+        self._remoteaddr = remoteaddr
+        asyncore.dispatcher.__init__(self)
+        self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
+        # try to re-use a server port if possible
+        self.set_reuse_addr()
+        self.bind(localaddr)
+        self.listen(5)
+        print >> DEBUGSTREAM, \
+              '%s started at %s\n\tLocal addr: %s\n\tRemote addr:%s' % (
+            self.__class__.__name__, time.ctime(time.time()),
+            localaddr, remoteaddr)
+
+    def handle_accept(self):
+        conn, addr = self.accept()
+        print >> DEBUGSTREAM, 'Incoming connection from %s' % repr(addr)
+        channel = SMTPChannel(self, conn, addr)
+
+    # API for "doing something useful with the message"
+    def process_message(self, peer, mailfrom, rcpttos, data):
+        """Override this abstract method to handle messages from the client.
+
+        peer is a tuple containing (ipaddr, port) of the client that made the
+        socket connection to our smtp port.
+
+        mailfrom is the raw address the client claims the message is coming
+        from.
+
+        rcpttos is a list of raw addresses the client wishes to deliver the
+        message to.
+
+        data is a string containing the entire full text of the message,
+        headers (if supplied) and all.  It has been `de-transparencied'
+        according to RFC 821, Section 4.5.2.  In other words, a line
+        containing a `.' followed by other text has had the leading dot
+        removed.
+
+        This function should return None, for a normal `250 Ok' response;
+        otherwise it returns the desired response string in RFC 821 format.
+
+        """
+        raise NotImplementedError
+
+
+
+class DebuggingServer(SMTPServer):
+    # Do something with the gathered message
+    def process_message(self, peer, mailfrom, rcpttos, data):
+        inheaders = 1
+        lines = data.split('\n')
+        print '---------- MESSAGE FOLLOWS ----------'
+        for line in lines:
+            # headers first
+            if inheaders and not line:
+                print 'X-Peer:', peer[0]
+                inheaders = 0
+            print line
+        print '------------ END MESSAGE ------------'
+
+
+
+class PureProxy(SMTPServer):
+    def process_message(self, peer, mailfrom, rcpttos, data):
+        lines = data.split('\n')
+        # Look for the last header
+        i = 0
+        for line in lines:
+            if not line:
+                break
+            i += 1
+        lines.insert(i, 'X-Peer: %s' % peer[0])
+        data = NEWLINE.join(lines)
+        refused = self._deliver(mailfrom, rcpttos, data)
+        # TBD: what to do with refused addresses?
+        print >> DEBUGSTREAM, 'we got some refusals:', refused
+
+    def _deliver(self, mailfrom, rcpttos, data):
+        import smtplib
+        refused = {}
+        try:
+            s = smtplib.SMTP()
+            s.connect(self._remoteaddr[0], self._remoteaddr[1])
+            try:
+                refused = s.sendmail(mailfrom, rcpttos, data)
+            finally:
+                s.quit()
+        except smtplib.SMTPRecipientsRefused, e:
+            print >> DEBUGSTREAM, 'got SMTPRecipientsRefused'
+            refused = e.recipients
+        except (socket.error, smtplib.SMTPException), e:
+            print >> DEBUGSTREAM, 'got', e.__class__
+            # All recipients were refused.  If the exception had an associated
+            # error code, use it.  Otherwise,fake it with a non-triggering
+            # exception code.
+            errcode = getattr(e, 'smtp_code', -1)
+            errmsg = getattr(e, 'smtp_error', 'ignore')
+            for r in rcpttos:
+                refused[r] = (errcode, errmsg)
+        return refused
+
+
+
+class MailmanProxy(PureProxy):
+    def process_message(self, peer, mailfrom, rcpttos, data):
+        from cStringIO import StringIO
+        from Mailman import Utils
+        from Mailman import Message
+        from Mailman import MailList
+        # If the message is to a Mailman mailing list, then we'll invoke the
+        # Mailman script directly, without going through the real smtpd.
+        # Otherwise we'll forward it to the local proxy for disposition.
+        listnames = []
+        for rcpt in rcpttos:
+            local = rcpt.lower().split('@')[0]
+            # We allow the following variations on the theme
+            #   listname
+            #   listname-admin
+            #   listname-owner
+            #   listname-request
+            #   listname-join
+            #   listname-leave
+            parts = local.split('-')
+            if len(parts) > 2:
+                continue
+            listname = parts[0]
+            if len(parts) == 2:
+                command = parts[1]
+            else:
+                command = ''
+            if not Utils.list_exists(listname) or command not in (
+                    '', 'admin', 'owner', 'request', 'join', 'leave'):
+                continue
+            listnames.append((rcpt, listname, command))
+        # Remove all list recipients from rcpttos and forward what we're not
+        # going to take care of ourselves.  Linear removal should be fine
+        # since we don't expect a large number of recipients.
+        for rcpt, listname, command in listnames:
+            rcpttos.remove(rcpt)
+        # If there's any non-list destined recipients left,
+        print >> DEBUGSTREAM, 'forwarding recips:', ' '.join(rcpttos)
+        if rcpttos:
+            refused = self._deliver(mailfrom, rcpttos, data)
+            # TBD: what to do with refused addresses?
+            print >> DEBUGSTREAM, 'we got refusals:', refused
+        # Now deliver directly to the list commands
+        mlists = {}
+        s = StringIO(data)
+        msg = Message.Message(s)
+        # These headers are required for the proper execution of Mailman.  All
+        # MTAs in existance seem to add these if the original message doesn't
+        # have them.
+        if not msg.getheader('from'):
+            msg['From'] = mailfrom
+        if not msg.getheader('date'):
+            msg['Date'] = time.ctime(time.time())
+        for rcpt, listname, command in listnames:
+            print >> DEBUGSTREAM, 'sending message to', rcpt
+            mlist = mlists.get(listname)
+            if not mlist:
+                mlist = MailList.MailList(listname, lock=0)
+                mlists[listname] = mlist
+            # dispatch on the type of command
+            if command == '':
+                # post
+                msg.Enqueue(mlist, tolist=1)
+            elif command == 'admin':
+                msg.Enqueue(mlist, toadmin=1)
+            elif command == 'owner':
+                msg.Enqueue(mlist, toowner=1)
+            elif command == 'request':
+                msg.Enqueue(mlist, torequest=1)
+            elif command in ('join', 'leave'):
+                # TBD: this is a hack!
+                if command == 'join':
+                    msg['Subject'] = 'subscribe'
+                else:
+                    msg['Subject'] = 'unsubscribe'
+                msg.Enqueue(mlist, torequest=1)
+
+
+
+class Options:
+    setuid = 1
+    classname = 'PureProxy'
+
+
+
+def parseargs():
+    global DEBUGSTREAM
+    try:
+        opts, args = getopt.getopt(
+            sys.argv[1:], 'nVhc:d',
+            ['class=', 'nosetuid', 'version', 'help', 'debug'])
+    except getopt.error, e:
+        usage(1, e)
+
+    options = Options()
+    for opt, arg in opts:
+        if opt in ('-h', '--help'):
+            usage(0)
+        elif opt in ('-V', '--version'):
+            print >> sys.stderr, __version__
+            sys.exit(0)
+        elif opt in ('-n', '--nosetuid'):
+            options.setuid = 0
+        elif opt in ('-c', '--class'):
+            options.classname = arg
+        elif opt in ('-d', '--debug'):
+            DEBUGSTREAM = sys.stderr
+
+    # parse the rest of the arguments
+    if len(args) < 1:
+        localspec = 'localhost:8025'
+        remotespec = 'localhost:25'
+    elif len(args) < 2:
+        localspec = args[0]
+        remotespec = 'localhost:25'
+    elif len(args) < 3:
+        localspec = args[0]
+        remotespec = args[1]
+    else:
+        usage(1, 'Invalid arguments: %s' % COMMASPACE.join(args))
+
+    # split into host/port pairs
+    i = localspec.find(':')
+    if i < 0:
+        usage(1, 'Bad local spec: %s' % localspec)
+    options.localhost = localspec[:i]
+    try:
+        options.localport = int(localspec[i+1:])
+    except ValueError:
+        usage(1, 'Bad local port: %s' % localspec)
+    i = remotespec.find(':')
+    if i < 0:
+        usage(1, 'Bad remote spec: %s' % remotespec)
+    options.remotehost = remotespec[:i]
+    try:
+        options.remoteport = int(remotespec[i+1:])
+    except ValueError:
+        usage(1, 'Bad remote port: %s' % remotespec)
+    return options
+
+
+
+if __name__ == '__main__':
+    options = parseargs()
+    # Become nobody
+    if options.setuid:
+        try:
+            import pwd
+        except ImportError:
+            print >> sys.stderr, \
+                  'Cannot import module "pwd"; try running with -n option.'
+            sys.exit(1)
+        nobody = pwd.getpwnam('nobody')[2]
+        try:
+            os.setuid(nobody)
+        except OSError, e:
+            if e.errno != errno.EPERM: raise
+            print >> sys.stderr, \
+                  'Cannot setuid "nobody"; try running with -n option.'
+            sys.exit(1)
+    classname = options.classname
+    if "." in classname:
+        lastdot = classname.rfind(".")
+        mod = __import__(classname[:lastdot], globals(), locals(), [""])
+        classname = classname[lastdot+1:]
+    else:
+        import __main__ as mod
+    class_ = getattr(mod, classname)
+    proxy = class_((options.localhost, options.localport),
+                   (options.remotehost, options.remoteport))
+    try:
+        asyncore.loop()
+    except KeyboardInterrupt:
+        pass
diff --git a/depot_tools/release/win/python_24/Lib/smtplib.py b/depot_tools/release/win/python_24/Lib/smtplib.py
new file mode 100644
index 0000000..2e12483
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/smtplib.py
@@ -0,0 +1,736 @@
+#! /usr/bin/env python
+
+'''SMTP/ESMTP client class.
+
+This should follow RFC 821 (SMTP), RFC 1869 (ESMTP), RFC 2554 (SMTP
+Authentication) and RFC 2487 (Secure SMTP over TLS).
+
+Notes:
+
+Please remember, when doing ESMTP, that the names of the SMTP service
+extensions are NOT the same thing as the option keywords for the RCPT
+and MAIL commands!
+
+Example:
+
+  >>> import smtplib
+  >>> s=smtplib.SMTP("localhost")
+  >>> print s.help()
+  This is Sendmail version 8.8.4
+  Topics:
+      HELO    EHLO    MAIL    RCPT    DATA
+      RSET    NOOP    QUIT    HELP    VRFY
+      EXPN    VERB    ETRN    DSN
+  For more info use "HELP <topic>".
+  To report bugs in the implementation send email to
+      sendmail-bugs@sendmail.org.
+  For local information send email to Postmaster at your site.
+  End of HELP info
+  >>> s.putcmd("vrfy","someone@here")
+  >>> s.getreply()
+  (250, "Somebody OverHere <somebody@here.my.org>")
+  >>> s.quit()
+'''
+
+# Author: The Dragon De Monsyne <dragondm@integral.org>
+# ESMTP support, test code and doc fixes added by
+#     Eric S. Raymond <esr@thyrsus.com>
+# Better RFC 821 compliance (MAIL and RCPT, and CRLF in data)
+#     by Carey Evans <c.evans@clear.net.nz>, for picky mail servers.
+# RFC 2554 (authentication) support by Gerhard Haering <gerhard@bigfoot.de>.
+#
+# This was modified from the Python 1.5 library HTTP lib.
+
+import socket
+import re
+import rfc822
+import base64
+import hmac
+from email.base64MIME import encode as encode_base64
+from sys import stderr
+
+__all__ = ["SMTPException","SMTPServerDisconnected","SMTPResponseException",
+           "SMTPSenderRefused","SMTPRecipientsRefused","SMTPDataError",
+           "SMTPConnectError","SMTPHeloError","SMTPAuthenticationError",
+           "quoteaddr","quotedata","SMTP"]
+
+SMTP_PORT = 25
+CRLF="\r\n"
+
+OLDSTYLE_AUTH = re.compile(r"auth=(.*)", re.I)
+
+# Exception classes used by this module.
+class SMTPException(Exception):
+    """Base class for all exceptions raised by this module."""
+
+class SMTPServerDisconnected(SMTPException):
+    """Not connected to any SMTP server.
+
+    This exception is raised when the server unexpectedly disconnects,
+    or when an attempt is made to use the SMTP instance before
+    connecting it to a server.
+    """
+
+class SMTPResponseException(SMTPException):
+    """Base class for all exceptions that include an SMTP error code.
+
+    These exceptions are generated in some instances when the SMTP
+    server returns an error code.  The error code is stored in the
+    `smtp_code' attribute of the error, and the `smtp_error' attribute
+    is set to the error message.
+    """
+
+    def __init__(self, code, msg):
+        self.smtp_code = code
+        self.smtp_error = msg
+        self.args = (code, msg)
+
+class SMTPSenderRefused(SMTPResponseException):
+    """Sender address refused.
+
+    In addition to the attributes set by on all SMTPResponseException
+    exceptions, this sets `sender' to the string that the SMTP refused.
+    """
+
+    def __init__(self, code, msg, sender):
+        self.smtp_code = code
+        self.smtp_error = msg
+        self.sender = sender
+        self.args = (code, msg, sender)
+
+class SMTPRecipientsRefused(SMTPException):
+    """All recipient addresses refused.
+
+    The errors for each recipient are accessible through the attribute
+    'recipients', which is a dictionary of exactly the same sort as
+    SMTP.sendmail() returns.
+    """
+
+    def __init__(self, recipients):
+        self.recipients = recipients
+        self.args = ( recipients,)
+
+
+class SMTPDataError(SMTPResponseException):
+    """The SMTP server didn't accept the data."""
+
+class SMTPConnectError(SMTPResponseException):
+    """Error during connection establishment."""
+
+class SMTPHeloError(SMTPResponseException):
+    """The server refused our HELO reply."""
+
+class SMTPAuthenticationError(SMTPResponseException):
+    """Authentication error.
+
+    Most probably the server didn't accept the username/password
+    combination provided.
+    """
+
+class SSLFakeSocket:
+    """A fake socket object that really wraps a SSLObject.
+
+    It only supports what is needed in smtplib.
+    """
+    def __init__(self, realsock, sslobj):
+        self.realsock = realsock
+        self.sslobj = sslobj
+
+    def send(self, str):
+        self.sslobj.write(str)
+        return len(str)
+
+    sendall = send
+
+    def close(self):
+        self.realsock.close()
+
+class SSLFakeFile:
+    """A fake file like object that really wraps a SSLObject.
+
+    It only supports what is needed in smtplib.
+    """
+    def __init__( self, sslobj):
+        self.sslobj = sslobj
+
+    def readline(self):
+        str = ""
+        chr = None
+        while chr != "\n":
+            chr = self.sslobj.read(1)
+            str += chr
+        return str
+
+    def close(self):
+        pass
+
+def quoteaddr(addr):
+    """Quote a subset of the email addresses defined by RFC 821.
+
+    Should be able to handle anything rfc822.parseaddr can handle.
+    """
+    m = (None, None)
+    try:
+        m=rfc822.parseaddr(addr)[1]
+    except AttributeError:
+        pass
+    if m == (None, None): # Indicates parse failure or AttributeError
+        #something weird here.. punt -ddm
+        return "<%s>" % addr
+    else:
+        return "<%s>" % m
+
+def quotedata(data):
+    """Quote data for email.
+
+    Double leading '.', and change Unix newline '\\n', or Mac '\\r' into
+    Internet CRLF end-of-line.
+    """
+    return re.sub(r'(?m)^\.', '..',
+        re.sub(r'(?:\r\n|\n|\r(?!\n))', CRLF, data))
+
+
+class SMTP:
+    """This class manages a connection to an SMTP or ESMTP server.
+    SMTP Objects:
+        SMTP objects have the following attributes:
+            helo_resp
+                This is the message given by the server in response to the
+                most recent HELO command.
+
+            ehlo_resp
+                This is the message given by the server in response to the
+                most recent EHLO command. This is usually multiline.
+
+            does_esmtp
+                This is a True value _after you do an EHLO command_, if the
+                server supports ESMTP.
+
+            esmtp_features
+                This is a dictionary, which, if the server supports ESMTP,
+                will _after you do an EHLO command_, contain the names of the
+                SMTP service extensions this server supports, and their
+                parameters (if any).
+
+                Note, all extension names are mapped to lower case in the
+                dictionary.
+
+        See each method's docstrings for details.  In general, there is a
+        method of the same name to perform each SMTP command.  There is also a
+        method called 'sendmail' that will do an entire mail transaction.
+        """
+    debuglevel = 0
+    file = None
+    helo_resp = None
+    ehlo_resp = None
+    does_esmtp = 0
+
+    def __init__(self, host = '', port = 0, local_hostname = None):
+        """Initialize a new instance.
+
+        If specified, `host' is the name of the remote host to which to
+        connect.  If specified, `port' specifies the port to which to connect.
+        By default, smtplib.SMTP_PORT is used.  An SMTPConnectError is raised
+        if the specified `host' doesn't respond correctly.  If specified,
+        `local_hostname` is used as the FQDN of the local host.  By default,
+        the local hostname is found using socket.getfqdn().
+
+        """
+        self.esmtp_features = {}
+        if host:
+            (code, msg) = self.connect(host, port)
+            if code != 220:
+                raise SMTPConnectError(code, msg)
+        if local_hostname is not None:
+            self.local_hostname = local_hostname
+        else:
+            # RFC 2821 says we should use the fqdn in the EHLO/HELO verb, and
+            # if that can't be calculated, that we should use a domain literal
+            # instead (essentially an encoded IP address like [A.B.C.D]).
+            fqdn = socket.getfqdn()
+            if '.' in fqdn:
+                self.local_hostname = fqdn
+            else:
+                # We can't find an fqdn hostname, so use a domain literal
+                addr = socket.gethostbyname(socket.gethostname())
+                self.local_hostname = '[%s]' % addr
+
+    def set_debuglevel(self, debuglevel):
+        """Set the debug output level.
+
+        A non-false value results in debug messages for connection and for all
+        messages sent to and received from the server.
+
+        """
+        self.debuglevel = debuglevel
+
+    def connect(self, host='localhost', port = 0):
+        """Connect to a host on a given port.
+
+        If the hostname ends with a colon (`:') followed by a number, and
+        there is no port specified, that suffix will be stripped off and the
+        number interpreted as the port number to use.
+
+        Note: This method is automatically invoked by __init__, if a host is
+        specified during instantiation.
+
+        """
+        if not port and (host.find(':') == host.rfind(':')):
+            i = host.rfind(':')
+            if i >= 0:
+                host, port = host[:i], host[i+1:]
+                try: port = int(port)
+                except ValueError:
+                    raise socket.error, "nonnumeric port"
+        if not port: port = SMTP_PORT
+        if self.debuglevel > 0: print>>stderr, 'connect:', (host, port)
+        msg = "getaddrinfo returns an empty list"
+        self.sock = None
+        for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                self.sock = socket.socket(af, socktype, proto)
+                if self.debuglevel > 0: print>>stderr, 'connect:', (host, port)
+                self.sock.connect(sa)
+            except socket.error, msg:
+                if self.debuglevel > 0: print>>stderr, 'connect fail:', (host, port)
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error, msg
+        (code, msg) = self.getreply()
+        if self.debuglevel > 0: print>>stderr, "connect:", msg
+        return (code, msg)
+
+    def send(self, str):
+        """Send `str' to the server."""
+        if self.debuglevel > 0: print>>stderr, 'send:', repr(str)
+        if self.sock:
+            try:
+                self.sock.sendall(str)
+            except socket.error:
+                self.close()
+                raise SMTPServerDisconnected('Server not connected')
+        else:
+            raise SMTPServerDisconnected('please run connect() first')
+
+    def putcmd(self, cmd, args=""):
+        """Send a command to the server."""
+        if args == "":
+            str = '%s%s' % (cmd, CRLF)
+        else:
+            str = '%s %s%s' % (cmd, args, CRLF)
+        self.send(str)
+
+    def getreply(self):
+        """Get a reply from the server.
+
+        Returns a tuple consisting of:
+
+          - server response code (e.g. '250', or such, if all goes well)
+            Note: returns -1 if it can't read response code.
+
+          - server response string corresponding to response code (multiline
+            responses are converted to a single, multiline string).
+
+        Raises SMTPServerDisconnected if end-of-file is reached.
+        """
+        resp=[]
+        if self.file is None:
+            self.file = self.sock.makefile('rb')
+        while 1:
+            line = self.file.readline()
+            if line == '':
+                self.close()
+                raise SMTPServerDisconnected("Connection unexpectedly closed")
+            if self.debuglevel > 0: print>>stderr, 'reply:', repr(line)
+            resp.append(line[4:].strip())
+            code=line[:3]
+            # Check that the error code is syntactically correct.
+            # Don't attempt to read a continuation line if it is broken.
+            try:
+                errcode = int(code)
+            except ValueError:
+                errcode = -1
+                break
+            # Check if multiline response.
+            if line[3:4]!="-":
+                break
+
+        errmsg = "\n".join(resp)
+        if self.debuglevel > 0:
+            print>>stderr, 'reply: retcode (%s); Msg: %s' % (errcode,errmsg)
+        return errcode, errmsg
+
+    def docmd(self, cmd, args=""):
+        """Send a command, and return its response code."""
+        self.putcmd(cmd,args)
+        return self.getreply()
+
+    # std smtp commands
+    def helo(self, name=''):
+        """SMTP 'helo' command.
+        Hostname to send for this command defaults to the FQDN of the local
+        host.
+        """
+        self.putcmd("helo", name or self.local_hostname)
+        (code,msg)=self.getreply()
+        self.helo_resp=msg
+        return (code,msg)
+
+    def ehlo(self, name=''):
+        """ SMTP 'ehlo' command.
+        Hostname to send for this command defaults to the FQDN of the local
+        host.
+        """
+        self.esmtp_features = {}
+        self.putcmd("ehlo", name or self.local_hostname)
+        (code,msg)=self.getreply()
+        # According to RFC1869 some (badly written)
+        # MTA's will disconnect on an ehlo. Toss an exception if
+        # that happens -ddm
+        if code == -1 and len(msg) == 0:
+            self.close()
+            raise SMTPServerDisconnected("Server not connected")
+        self.ehlo_resp=msg
+        if code != 250:
+            return (code,msg)
+        self.does_esmtp=1
+        #parse the ehlo response -ddm
+        resp=self.ehlo_resp.split('\n')
+        del resp[0]
+        for each in resp:
+            # To be able to communicate with as many SMTP servers as possible,
+            # we have to take the old-style auth advertisement into account,
+            # because:
+            # 1) Else our SMTP feature parser gets confused.
+            # 2) There are some servers that only advertise the auth methods we
+            #    support using the old style.
+            auth_match = OLDSTYLE_AUTH.match(each)
+            if auth_match:
+                # This doesn't remove duplicates, but that's no problem
+                self.esmtp_features["auth"] = self.esmtp_features.get("auth", "") \
+                        + " " + auth_match.groups(0)[0]
+                continue
+
+            # RFC 1869 requires a space between ehlo keyword and parameters.
+            # It's actually stricter, in that only spaces are allowed between
+            # parameters, but were not going to check for that here.  Note
+            # that the space isn't present if there are no parameters.
+            m=re.match(r'(?P<feature>[A-Za-z0-9][A-Za-z0-9\-]*) ?',each)
+            if m:
+                feature=m.group("feature").lower()
+                params=m.string[m.end("feature"):].strip()
+                if feature == "auth":
+                    self.esmtp_features[feature] = self.esmtp_features.get(feature, "") \
+                            + " " + params
+                else:
+                    self.esmtp_features[feature]=params
+        return (code,msg)
+
+    def has_extn(self, opt):
+        """Does the server support a given SMTP service extension?"""
+        return opt.lower() in self.esmtp_features
+
+    def help(self, args=''):
+        """SMTP 'help' command.
+        Returns help text from server."""
+        self.putcmd("help", args)
+        return self.getreply()
+
+    def rset(self):
+        """SMTP 'rset' command -- resets session."""
+        return self.docmd("rset")
+
+    def noop(self):
+        """SMTP 'noop' command -- doesn't do anything :>"""
+        return self.docmd("noop")
+
+    def mail(self,sender,options=[]):
+        """SMTP 'mail' command -- begins mail xfer session."""
+        optionlist = ''
+        if options and self.does_esmtp:
+            optionlist = ' ' + ' '.join(options)
+        self.putcmd("mail", "FROM:%s%s" % (quoteaddr(sender) ,optionlist))
+        return self.getreply()
+
+    def rcpt(self,recip,options=[]):
+        """SMTP 'rcpt' command -- indicates 1 recipient for this mail."""
+        optionlist = ''
+        if options and self.does_esmtp:
+            optionlist = ' ' + ' '.join(options)
+        self.putcmd("rcpt","TO:%s%s" % (quoteaddr(recip),optionlist))
+        return self.getreply()
+
+    def data(self,msg):
+        """SMTP 'DATA' command -- sends message data to server.
+
+        Automatically quotes lines beginning with a period per rfc821.
+        Raises SMTPDataError if there is an unexpected reply to the
+        DATA command; the return value from this method is the final
+        response code received when the all data is sent.
+        """
+        self.putcmd("data")
+        (code,repl)=self.getreply()
+        if self.debuglevel >0 : print>>stderr, "data:", (code,repl)
+        if code != 354:
+            raise SMTPDataError(code,repl)
+        else:
+            q = quotedata(msg)
+            if q[-2:] != CRLF:
+                q = q + CRLF
+            q = q + "." + CRLF
+            self.send(q)
+            (code,msg)=self.getreply()
+            if self.debuglevel >0 : print>>stderr, "data:", (code,msg)
+            return (code,msg)
+
+    def verify(self, address):
+        """SMTP 'verify' command -- checks for address validity."""
+        self.putcmd("vrfy", quoteaddr(address))
+        return self.getreply()
+    # a.k.a.
+    vrfy=verify
+
+    def expn(self, address):
+        """SMTP 'verify' command -- checks for address validity."""
+        self.putcmd("expn", quoteaddr(address))
+        return self.getreply()
+
+    # some useful methods
+
+    def login(self, user, password):
+        """Log in on an SMTP server that requires authentication.
+
+        The arguments are:
+            - user:     The user name to authenticate with.
+            - password: The password for the authentication.
+
+        If there has been no previous EHLO or HELO command this session, this
+        method tries ESMTP EHLO first.
+
+        This method will return normally if the authentication was successful.
+
+        This method may raise the following exceptions:
+
+         SMTPHeloError            The server didn't reply properly to
+                                  the helo greeting.
+         SMTPAuthenticationError  The server didn't accept the username/
+                                  password combination.
+         SMTPException            No suitable authentication method was
+                                  found.
+        """
+
+        def encode_cram_md5(challenge, user, password):
+            challenge = base64.decodestring(challenge)
+            response = user + " " + hmac.HMAC(password, challenge).hexdigest()
+            return encode_base64(response, eol="")
+
+        def encode_plain(user, password):
+            return encode_base64("%s\0%s\0%s" % (user, user, password), eol="")
+
+
+        AUTH_PLAIN = "PLAIN"
+        AUTH_CRAM_MD5 = "CRAM-MD5"
+        AUTH_LOGIN = "LOGIN"
+
+        if self.helo_resp is None and self.ehlo_resp is None:
+            if not (200 <= self.ehlo()[0] <= 299):
+                (code, resp) = self.helo()
+                if not (200 <= code <= 299):
+                    raise SMTPHeloError(code, resp)
+
+        if not self.has_extn("auth"):
+            raise SMTPException("SMTP AUTH extension not supported by server.")
+
+        # Authentication methods the server supports:
+        authlist = self.esmtp_features["auth"].split()
+
+        # List of authentication methods we support: from preferred to
+        # less preferred methods. Except for the purpose of testing the weaker
+        # ones, we prefer stronger methods like CRAM-MD5:
+        preferred_auths = [AUTH_CRAM_MD5, AUTH_PLAIN, AUTH_LOGIN]
+
+        # Determine the authentication method we'll use
+        authmethod = None
+        for method in preferred_auths:
+            if method in authlist:
+                authmethod = method
+                break
+
+        if authmethod == AUTH_CRAM_MD5:
+            (code, resp) = self.docmd("AUTH", AUTH_CRAM_MD5)
+            if code == 503:
+                # 503 == 'Error: already authenticated'
+                return (code, resp)
+            (code, resp) = self.docmd(encode_cram_md5(resp, user, password))
+        elif authmethod == AUTH_PLAIN:
+            (code, resp) = self.docmd("AUTH",
+                AUTH_PLAIN + " " + encode_plain(user, password))
+        elif authmethod == AUTH_LOGIN:
+            (code, resp) = self.docmd("AUTH",
+                "%s %s" % (AUTH_LOGIN, encode_base64(user, eol="")))
+            if code != 334:
+                raise SMTPAuthenticationError(code, resp)
+            (code, resp) = self.docmd(encode_base64(password, eol=""))
+        elif authmethod is None:
+            raise SMTPException("No suitable authentication method found.")
+        if code not in [235, 503]:
+            # 235 == 'Authentication successful'
+            # 503 == 'Error: already authenticated'
+            raise SMTPAuthenticationError(code, resp)
+        return (code, resp)
+
+    def starttls(self, keyfile = None, certfile = None):
+        """Puts the connection to the SMTP server into TLS mode.
+
+        If the server supports TLS, this will encrypt the rest of the SMTP
+        session. If you provide the keyfile and certfile parameters,
+        the identity of the SMTP server and client can be checked. This,
+        however, depends on whether the socket module really checks the
+        certificates.
+        """
+        (resp, reply) = self.docmd("STARTTLS")
+        if resp == 220:
+            sslobj = socket.ssl(self.sock, keyfile, certfile)
+            self.sock = SSLFakeSocket(self.sock, sslobj)
+            self.file = SSLFakeFile(sslobj)
+        return (resp, reply)
+
+    def sendmail(self, from_addr, to_addrs, msg, mail_options=[],
+                 rcpt_options=[]):
+        """This command performs an entire mail transaction.
+
+        The arguments are:
+            - from_addr    : The address sending this mail.
+            - to_addrs     : A list of addresses to send this mail to.  A bare
+                             string will be treated as a list with 1 address.
+            - msg          : The message to send.
+            - mail_options : List of ESMTP options (such as 8bitmime) for the
+                             mail command.
+            - rcpt_options : List of ESMTP options (such as DSN commands) for
+                             all the rcpt commands.
+
+        If there has been no previous EHLO or HELO command this session, this
+        method tries ESMTP EHLO first.  If the server does ESMTP, message size
+        and each of the specified options will be passed to it.  If EHLO
+        fails, HELO will be tried and ESMTP options suppressed.
+
+        This method will return normally if the mail is accepted for at least
+        one recipient.  It returns a dictionary, with one entry for each
+        recipient that was refused.  Each entry contains a tuple of the SMTP
+        error code and the accompanying error message sent by the server.
+
+        This method may raise the following exceptions:
+
+         SMTPHeloError          The server didn't reply properly to
+                                the helo greeting.
+         SMTPRecipientsRefused  The server rejected ALL recipients
+                                (no mail was sent).
+         SMTPSenderRefused      The server didn't accept the from_addr.
+         SMTPDataError          The server replied with an unexpected
+                                error code (other than a refusal of
+                                a recipient).
+
+        Note: the connection will be open even after an exception is raised.
+
+        Example:
+
+         >>> import smtplib
+         >>> s=smtplib.SMTP("localhost")
+         >>> tolist=["one@one.org","two@two.org","three@three.org","four@four.org"]
+         >>> msg = '''\\
+         ... From: Me@my.org
+         ... Subject: testin'...
+         ...
+         ... This is a test '''
+         >>> s.sendmail("me@my.org",tolist,msg)
+         { "three@three.org" : ( 550 ,"User unknown" ) }
+         >>> s.quit()
+
+        In the above example, the message was accepted for delivery to three
+        of the four addresses, and one was rejected, with the error code
+        550.  If all addresses are accepted, then the method will return an
+        empty dictionary.
+
+        """
+        if self.helo_resp is None and self.ehlo_resp is None:
+            if not (200 <= self.ehlo()[0] <= 299):
+                (code,resp) = self.helo()
+                if not (200 <= code <= 299):
+                    raise SMTPHeloError(code, resp)
+        esmtp_opts = []
+        if self.does_esmtp:
+            # Hmmm? what's this? -ddm
+            # self.esmtp_features['7bit']=""
+            if self.has_extn('size'):
+                esmtp_opts.append("size=%d" % len(msg))
+            for option in mail_options:
+                esmtp_opts.append(option)
+
+        (code,resp) = self.mail(from_addr, esmtp_opts)
+        if code != 250:
+            self.rset()
+            raise SMTPSenderRefused(code, resp, from_addr)
+        senderrs={}
+        if isinstance(to_addrs, basestring):
+            to_addrs = [to_addrs]
+        for each in to_addrs:
+            (code,resp)=self.rcpt(each, rcpt_options)
+            if (code != 250) and (code != 251):
+                senderrs[each]=(code,resp)
+        if len(senderrs)==len(to_addrs):
+            # the server refused all our recipients
+            self.rset()
+            raise SMTPRecipientsRefused(senderrs)
+        (code,resp) = self.data(msg)
+        if code != 250:
+            self.rset()
+            raise SMTPDataError(code, resp)
+        #if we got here then somebody got our mail
+        return senderrs
+
+
+    def close(self):
+        """Close the connection to the SMTP server."""
+        if self.file:
+            self.file.close()
+        self.file = None
+        if self.sock:
+            self.sock.close()
+        self.sock = None
+
+
+    def quit(self):
+        """Terminate the SMTP session."""
+        self.docmd("quit")
+        self.close()
+
+
+# Test the sendmail method, which tests most of the others.
+# Note: This always sends to localhost.
+if __name__ == '__main__':
+    import sys
+
+    def prompt(prompt):
+        sys.stdout.write(prompt + ": ")
+        return sys.stdin.readline().strip()
+
+    fromaddr = prompt("From")
+    toaddrs  = prompt("To").split(',')
+    print "Enter message, end with ^D:"
+    msg = ''
+    while 1:
+        line = sys.stdin.readline()
+        if not line:
+            break
+        msg = msg + line
+    print "Message length is %d" % len(msg)
+
+    server = SMTP('localhost')
+    server.set_debuglevel(1)
+    server.sendmail(fromaddr, toaddrs, msg)
+    server.quit()
diff --git a/depot_tools/release/win/python_24/Lib/sndhdr.py b/depot_tools/release/win/python_24/Lib/sndhdr.py
new file mode 100644
index 0000000..df2ccf1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sndhdr.py
@@ -0,0 +1,228 @@
+"""Routines to help recognizing sound files.
+
+Function whathdr() recognizes various types of sound file headers.
+It understands almost all headers that SOX can decode.
+
+The return tuple contains the following items, in this order:
+- file type (as SOX understands it)
+- sampling rate (0 if unknown or hard to decode)
+- number of channels (0 if unknown or hard to decode)
+- number of frames in the file (-1 if unknown or hard to decode)
+- number of bits/sample, or 'U' for U-LAW, or 'A' for A-LAW
+
+If the file doesn't have a recognizable type, it returns None.
+If the file can't be opened, IOError is raised.
+
+To compute the total time, divide the number of frames by the
+sampling rate (a frame contains a sample for each channel).
+
+Function what() calls whathdr().  (It used to also use some
+heuristics for raw data, but this doesn't work very well.)
+
+Finally, the function test() is a simple main program that calls
+what() for all files mentioned on the argument list.  For directory
+arguments it calls what() for all files in that directory.  Default
+argument is "." (testing all files in the current directory).  The
+option -r tells it to recurse down directories found inside
+explicitly given directories.
+"""
+
+# The file structure is top-down except that the test program and its
+# subroutine come last.
+
+__all__ = ["what","whathdr"]
+
+def what(filename):
+    """Guess the type of a sound file"""
+    res = whathdr(filename)
+    return res
+
+
+def whathdr(filename):
+    """Recognize sound headers"""
+    f = open(filename, 'rb')
+    h = f.read(512)
+    for tf in tests:
+        res = tf(h, f)
+        if res:
+            return res
+    return None
+
+
+#-----------------------------------#
+# Subroutines per sound header type #
+#-----------------------------------#
+
+tests = []
+
+def test_aifc(h, f):
+    import aifc
+    if h[:4] != 'FORM':
+        return None
+    if h[8:12] == 'AIFC':
+        fmt = 'aifc'
+    elif h[8:12] == 'AIFF':
+        fmt = 'aiff'
+    else:
+        return None
+    f.seek(0)
+    try:
+        a = aifc.openfp(f, 'r')
+    except (EOFError, aifc.Error):
+        return None
+    return (fmt, a.getframerate(), a.getnchannels(), \
+            a.getnframes(), 8*a.getsampwidth())
+
+tests.append(test_aifc)
+
+
+def test_au(h, f):
+    if h[:4] == '.snd':
+        f = get_long_be
+    elif h[:4] in ('\0ds.', 'dns.'):
+        f = get_long_le
+    else:
+        return None
+    type = 'au'
+    hdr_size = f(h[4:8])
+    data_size = f(h[8:12])
+    encoding = f(h[12:16])
+    rate = f(h[16:20])
+    nchannels = f(h[20:24])
+    sample_size = 1 # default
+    if encoding == 1:
+        sample_bits = 'U'
+    elif encoding == 2:
+        sample_bits = 8
+    elif encoding == 3:
+        sample_bits = 16
+        sample_size = 2
+    else:
+        sample_bits = '?'
+    frame_size = sample_size * nchannels
+    return type, rate, nchannels, data_size/frame_size, sample_bits
+
+tests.append(test_au)
+
+
+def test_hcom(h, f):
+    if h[65:69] != 'FSSD' or h[128:132] != 'HCOM':
+        return None
+    divisor = get_long_be(h[128+16:128+20])
+    return 'hcom', 22050/divisor, 1, -1, 8
+
+tests.append(test_hcom)
+
+
+def test_voc(h, f):
+    if h[:20] != 'Creative Voice File\032':
+        return None
+    sbseek = get_short_le(h[20:22])
+    rate = 0
+    if 0 <= sbseek < 500 and h[sbseek] == '\1':
+        ratecode = ord(h[sbseek+4])
+        rate = int(1000000.0 / (256 - ratecode))
+    return 'voc', rate, 1, -1, 8
+
+tests.append(test_voc)
+
+
+def test_wav(h, f):
+    # 'RIFF' <len> 'WAVE' 'fmt ' <len>
+    if h[:4] != 'RIFF' or h[8:12] != 'WAVE' or h[12:16] != 'fmt ':
+        return None
+    style = get_short_le(h[20:22])
+    nchannels = get_short_le(h[22:24])
+    rate = get_long_le(h[24:28])
+    sample_bits = get_short_le(h[34:36])
+    return 'wav', rate, nchannels, -1, sample_bits
+
+tests.append(test_wav)
+
+
+def test_8svx(h, f):
+    if h[:4] != 'FORM' or h[8:12] != '8SVX':
+        return None
+    # Should decode it to get #channels -- assume always 1
+    return '8svx', 0, 1, 0, 8
+
+tests.append(test_8svx)
+
+
+def test_sndt(h, f):
+    if h[:5] == 'SOUND':
+        nsamples = get_long_le(h[8:12])
+        rate = get_short_le(h[20:22])
+        return 'sndt', rate, 1, nsamples, 8
+
+tests.append(test_sndt)
+
+
+def test_sndr(h, f):
+    if h[:2] == '\0\0':
+        rate = get_short_le(h[2:4])
+        if 4000 <= rate <= 25000:
+            return 'sndr', rate, 1, -1, 8
+
+tests.append(test_sndr)
+
+
+#---------------------------------------------#
+# Subroutines to extract numbers from strings #
+#---------------------------------------------#
+
+def get_long_be(s):
+    return (ord(s[0])<<24) | (ord(s[1])<<16) | (ord(s[2])<<8) | ord(s[3])
+
+def get_long_le(s):
+    return (ord(s[3])<<24) | (ord(s[2])<<16) | (ord(s[1])<<8) | ord(s[0])
+
+def get_short_be(s):
+    return (ord(s[0])<<8) | ord(s[1])
+
+def get_short_le(s):
+    return (ord(s[1])<<8) | ord(s[0])
+
+
+#--------------------#
+# Small test program #
+#--------------------#
+
+def test():
+    import sys
+    recursive = 0
+    if sys.argv[1:] and sys.argv[1] == '-r':
+        del sys.argv[1:2]
+        recursive = 1
+    try:
+        if sys.argv[1:]:
+            testall(sys.argv[1:], recursive, 1)
+        else:
+            testall(['.'], recursive, 1)
+    except KeyboardInterrupt:
+        sys.stderr.write('\n[Interrupted]\n')
+        sys.exit(1)
+
+def testall(list, recursive, toplevel):
+    import sys
+    import os
+    for filename in list:
+        if os.path.isdir(filename):
+            print filename + '/:',
+            if recursive or toplevel:
+                print 'recursing down:'
+                import glob
+                names = glob.glob(os.path.join(filename, '*'))
+                testall(names, recursive, 0)
+            else:
+                print '*** directory (use -r) ***'
+        else:
+            print filename + ':',
+            sys.stdout.flush()
+            try:
+                print what(filename)
+            except IOError:
+                print '*** not found ***'
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/socket.py b/depot_tools/release/win/python_24/Lib/socket.py
new file mode 100644
index 0000000..5fd51f4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/socket.py
@@ -0,0 +1,408 @@
+# Wrapper module for _socket, providing some additional facilities
+# implemented in Python.
+
+"""\
+This module provides socket operations and some related functions.
+On Unix, it supports IP (Internet Protocol) and Unix domain sockets.
+On other systems, it only supports IP. Functions specific for a
+socket are available as methods of the socket object.
+
+Functions:
+
+socket() -- create a new socket object
+socketpair() -- create a pair of new socket objects [*]
+fromfd() -- create a socket object from an open file descriptor [*]
+gethostname() -- return the current hostname
+gethostbyname() -- map a hostname to its IP number
+gethostbyaddr() -- map an IP number or hostname to DNS info
+getservbyname() -- map a service name and a protocol name to a port number
+getprotobyname() -- mape a protocol name (e.g. 'tcp') to a number
+ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order
+htons(), htonl() -- convert 16, 32 bit int from host to network byte order
+inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format
+inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)
+ssl() -- secure socket layer support (only available if configured)
+socket.getdefaulttimeout() -- get the default timeout value
+socket.setdefaulttimeout() -- set the default timeout value
+
+ [*] not available on all platforms!
+
+Special objects:
+
+SocketType -- type object for socket objects
+error -- exception raised for I/O errors
+has_ipv6 -- boolean value indicating if IPv6 is supported
+
+Integer constants:
+
+AF_INET, AF_UNIX -- socket domains (first argument to socket() call)
+SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)
+
+Many other constants may be defined; these may be used in calls to
+the setsockopt() and getsockopt() methods.
+"""
+
+import _socket
+from _socket import *
+
+_have_ssl = False
+try:
+    import _ssl
+    from _ssl import *
+    _have_ssl = True
+except ImportError:
+    pass
+
+import os, sys
+
+try:
+    from errno import EBADF
+except ImportError:
+    EBADF = 9
+
+__all__ = ["getfqdn"]
+__all__.extend(os._get_exports_list(_socket))
+if _have_ssl:
+    __all__.extend(os._get_exports_list(_ssl))
+
+_realsocket = socket
+if _have_ssl:
+    _realssl = ssl
+    def ssl(sock, keyfile=None, certfile=None):
+        if hasattr(sock, "_sock"):
+            sock = sock._sock
+        return _realssl(sock, keyfile, certfile)
+
+# WSA error codes
+if sys.platform.lower().startswith("win"):
+    errorTab = {}
+    errorTab[10004] = "The operation was interrupted."
+    errorTab[10009] = "A bad file handle was passed."
+    errorTab[10013] = "Permission denied."
+    errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
+    errorTab[10022] = "An invalid operation was attempted."
+    errorTab[10035] = "The socket operation would block"
+    errorTab[10036] = "A blocking operation is already in progress."
+    errorTab[10048] = "The network address is in use."
+    errorTab[10054] = "The connection has been reset."
+    errorTab[10058] = "The network has been shut down."
+    errorTab[10060] = "The operation timed out."
+    errorTab[10061] = "Connection refused."
+    errorTab[10063] = "The name is too long."
+    errorTab[10064] = "The host is down."
+    errorTab[10065] = "The host is unreachable."
+    __all__.append("errorTab")
+
+
+
+def getfqdn(name=''):
+    """Get fully qualified domain name from name.
+
+    An empty argument is interpreted as meaning the local host.
+
+    First the hostname returned by gethostbyaddr() is checked, then
+    possibly existing aliases. In case no FQDN is available, hostname
+    as returned by gethostname() is returned.
+    """
+    name = name.strip()
+    if not name or name == '0.0.0.0':
+        name = gethostname()
+    try:
+        hostname, aliases, ipaddrs = gethostbyaddr(name)
+    except error:
+        pass
+    else:
+        aliases.insert(0, hostname)
+        for name in aliases:
+            if '.' in name:
+                break
+        else:
+            name = hostname
+    return name
+
+
+#
+# These classes are used by the socket() defined on Windows and BeOS
+# platforms to provide a best-effort implementation of the cleanup
+# semantics needed when sockets can't be dup()ed.
+#
+# These are not actually used on other platforms.
+#
+
+_socketmethods = (
+    'bind', 'connect', 'connect_ex', 'fileno', 'listen',
+    'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
+    'sendall', 'setblocking',
+    'settimeout', 'gettimeout', 'shutdown')
+
+if sys.platform == "riscos":
+    _socketmethods = _socketmethods + ('sleeptaskw',)
+
+class _closedsocket(object):
+    __slots__ = []
+    def _dummy(*args):
+        raise error(EBADF, 'Bad file descriptor')
+    send = recv = sendto = recvfrom = __getattr__ = _dummy
+
+class _socketobject(object):
+
+    __doc__ = _realsocket.__doc__
+
+    __slots__ = ["_sock", "send", "recv", "sendto", "recvfrom",
+                 "__weakref__"]
+
+    def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, _sock=None):
+        if _sock is None:
+            _sock = _realsocket(family, type, proto)
+        self._sock = _sock
+        self.send = self._sock.send
+        self.recv = self._sock.recv
+        self.sendto = self._sock.sendto
+        self.recvfrom = self._sock.recvfrom
+
+    def close(self):
+        self._sock = _closedsocket()
+        self.send = self.recv = self.sendto = self.recvfrom = self._sock._dummy
+    close.__doc__ = _realsocket.close.__doc__
+
+    def accept(self):
+        sock, addr = self._sock.accept()
+        return _socketobject(_sock=sock), addr
+    accept.__doc__ = _realsocket.accept.__doc__
+
+    def dup(self):
+        """dup() -> socket object
+
+        Return a new socket object connected to the same system resource."""
+        return _socketobject(_sock=self._sock)
+
+    def makefile(self, mode='r', bufsize=-1):
+        """makefile([mode[, bufsize]]) -> file object
+
+        Return a regular file object corresponding to the socket.  The mode
+        and bufsize arguments are as for the built-in open() function."""
+        return _fileobject(self._sock, mode, bufsize)
+
+    _s = ("def %s(self, *args): return self._sock.%s(*args)\n\n"
+          "%s.__doc__ = _realsocket.%s.__doc__\n")
+    for _m in _socketmethods:
+        exec _s % (_m, _m, _m, _m)
+    del _m, _s
+
+socket = SocketType = _socketobject
+
+class _fileobject(object):
+    """Faux file object attached to a socket object."""
+
+    default_bufsize = 8192
+    name = "<socket>"
+
+    __slots__ = ["mode", "bufsize", "softspace",
+                 # "closed" is a property, see below
+                 "_sock", "_rbufsize", "_wbufsize", "_rbuf", "_wbuf"]
+
+    def __init__(self, sock, mode='rb', bufsize=-1):
+        self._sock = sock
+        self.mode = mode # Not actually used in this version
+        if bufsize < 0:
+            bufsize = self.default_bufsize
+        self.bufsize = bufsize
+        self.softspace = False
+        if bufsize == 0:
+            self._rbufsize = 1
+        elif bufsize == 1:
+            self._rbufsize = self.default_bufsize
+        else:
+            self._rbufsize = bufsize
+        self._wbufsize = bufsize
+        self._rbuf = "" # A string
+        self._wbuf = [] # A list of strings
+
+    def _getclosed(self):
+        return self._sock is None
+    closed = property(_getclosed, doc="True if the file is closed")
+
+    def close(self):
+        try:
+            if self._sock:
+                self.flush()
+        finally:
+            self._sock = None
+
+    def __del__(self):
+        try:
+            self.close()
+        except:
+            # close() may fail if __init__ didn't complete
+            pass
+
+    def flush(self):
+        if self._wbuf:
+            buffer = "".join(self._wbuf)
+            self._wbuf = []
+            self._sock.sendall(buffer)
+
+    def fileno(self):
+        return self._sock.fileno()
+
+    def write(self, data):
+        data = str(data) # XXX Should really reject non-string non-buffers
+        if not data:
+            return
+        self._wbuf.append(data)
+        if (self._wbufsize == 0 or
+            self._wbufsize == 1 and '\n' in data or
+            self._get_wbuf_len() >= self._wbufsize):
+            self.flush()
+
+    def writelines(self, list):
+        # XXX We could do better here for very long lists
+        # XXX Should really reject non-string non-buffers
+        self._wbuf.extend(filter(None, map(str, list)))
+        if (self._wbufsize <= 1 or
+            self._get_wbuf_len() >= self._wbufsize):
+            self.flush()
+
+    def _get_wbuf_len(self):
+        buf_len = 0
+        for x in self._wbuf:
+            buf_len += len(x)
+        return buf_len
+
+    def read(self, size=-1):
+        data = self._rbuf
+        if size < 0:
+            # Read until EOF
+            buffers = []
+            if data:
+                buffers.append(data)
+            self._rbuf = ""
+            if self._rbufsize <= 1:
+                recv_size = self.default_bufsize
+            else:
+                recv_size = self._rbufsize
+            while True:
+                data = self._sock.recv(recv_size)
+                if not data:
+                    break
+                buffers.append(data)
+            return "".join(buffers)
+        else:
+            # Read until size bytes or EOF seen, whichever comes first
+            buf_len = len(data)
+            if buf_len >= size:
+                self._rbuf = data[size:]
+                return data[:size]
+            buffers = []
+            if data:
+                buffers.append(data)
+            self._rbuf = ""
+            while True:
+                left = size - buf_len
+                recv_size = max(self._rbufsize, left)
+                data = self._sock.recv(recv_size)
+                if not data:
+                    break
+                buffers.append(data)
+                n = len(data)
+                if n >= left:
+                    self._rbuf = data[left:]
+                    buffers[-1] = data[:left]
+                    break
+                buf_len += n
+            return "".join(buffers)
+
+    def readline(self, size=-1):
+        data = self._rbuf
+        if size < 0:
+            # Read until \n or EOF, whichever comes first
+            if self._rbufsize <= 1:
+                # Speed up unbuffered case
+                assert data == ""
+                buffers = []
+                recv = self._sock.recv
+                while data != "\n":
+                    data = recv(1)
+                    if not data:
+                        break
+                    buffers.append(data)
+                return "".join(buffers)
+            nl = data.find('\n')
+            if nl >= 0:
+                nl += 1
+                self._rbuf = data[nl:]
+                return data[:nl]
+            buffers = []
+            if data:
+                buffers.append(data)
+            self._rbuf = ""
+            while True:
+                data = self._sock.recv(self._rbufsize)
+                if not data:
+                    break
+                buffers.append(data)
+                nl = data.find('\n')
+                if nl >= 0:
+                    nl += 1
+                    self._rbuf = data[nl:]
+                    buffers[-1] = data[:nl]
+                    break
+            return "".join(buffers)
+        else:
+            # Read until size bytes or \n or EOF seen, whichever comes first
+            nl = data.find('\n', 0, size)
+            if nl >= 0:
+                nl += 1
+                self._rbuf = data[nl:]
+                return data[:nl]
+            buf_len = len(data)
+            if buf_len >= size:
+                self._rbuf = data[size:]
+                return data[:size]
+            buffers = []
+            if data:
+                buffers.append(data)
+            self._rbuf = ""
+            while True:
+                data = self._sock.recv(self._rbufsize)
+                if not data:
+                    break
+                buffers.append(data)
+                left = size - buf_len
+                nl = data.find('\n', 0, left)
+                if nl >= 0:
+                    nl += 1
+                    self._rbuf = data[nl:]
+                    buffers[-1] = data[:nl]
+                    break
+                n = len(data)
+                if n >= left:
+                    self._rbuf = data[left:]
+                    buffers[-1] = data[:left]
+                    break
+                buf_len += n
+            return "".join(buffers)
+
+    def readlines(self, sizehint=0):
+        total = 0
+        list = []
+        while True:
+            line = self.readline()
+            if not line:
+                break
+            list.append(line)
+            total += len(line)
+            if sizehint and total >= sizehint:
+                break
+        return list
+
+    # Iterator protocols
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        line = self.readline()
+        if not line:
+            raise StopIteration
+        return line
diff --git a/depot_tools/release/win/python_24/Lib/sre.py b/depot_tools/release/win/python_24/Lib/sre.py
new file mode 100644
index 0000000..7969723e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sre.py
@@ -0,0 +1,309 @@
+#
+# Secret Labs' Regular Expression Engine
+#
+# re-compatible interface for the sre matching engine
+#
+# Copyright (c) 1998-2001 by Secret Labs AB.  All rights reserved.
+#
+# This version of the SRE library can be redistributed under CNRI's
+# Python 1.6 license.  For any other use, please contact Secret Labs
+# AB (info@pythonware.com).
+#
+# Portions of this engine have been developed in cooperation with
+# CNRI.  Hewlett-Packard provided funding for 1.6 integration and
+# other compatibility work.
+#
+
+r"""Support for regular expressions (RE).
+
+This module provides regular expression matching operations similar to
+those found in Perl.  It supports both 8-bit and Unicode strings; both
+the pattern and the strings being processed can contain null bytes and
+characters outside the US ASCII range.
+
+Regular expressions can contain both special and ordinary characters.
+Most ordinary characters, like "A", "a", or "0", are the simplest
+regular expressions; they simply match themselves.  You can
+concatenate ordinary characters, so last matches the string 'last'.
+
+The special characters are:
+    "."      Matches any character except a newline.
+    "^"      Matches the start of the string.
+    "$"      Matches the end of the string.
+    "*"      Matches 0 or more (greedy) repetitions of the preceding RE.
+             Greedy means that it will match as many repetitions as possible.
+    "+"      Matches 1 or more (greedy) repetitions of the preceding RE.
+    "?"      Matches 0 or 1 (greedy) of the preceding RE.
+    *?,+?,?? Non-greedy versions of the previous three special characters.
+    {m,n}    Matches from m to n repetitions of the preceding RE.
+    {m,n}?   Non-greedy version of the above.
+    "\\"      Either escapes special characters or signals a special sequence.
+    []       Indicates a set of characters.
+             A "^" as the first character indicates a complementing set.
+    "|"      A|B, creates an RE that will match either A or B.
+    (...)    Matches the RE inside the parentheses.
+             The contents can be retrieved or matched later in the string.
+    (?iLmsux) Set the I, L, M, S, U, or X flag for the RE (see below).
+    (?:...)  Non-grouping version of regular parentheses.
+    (?P<name>...) The substring matched by the group is accessible by name.
+    (?P=name)     Matches the text matched earlier by the group named name.
+    (?#...)  A comment; ignored.
+    (?=...)  Matches if ... matches next, but doesn't consume the string.
+    (?!...)  Matches if ... doesn't match next.
+
+The special sequences consist of "\\" and a character from the list
+below.  If the ordinary character is not on the list, then the
+resulting RE will match the second character.
+    \number  Matches the contents of the group of the same number.
+    \A       Matches only at the start of the string.
+    \Z       Matches only at the end of the string.
+    \b       Matches the empty string, but only at the start or end of a word.
+    \B       Matches the empty string, but not at the start or end of a word.
+    \d       Matches any decimal digit; equivalent to the set [0-9].
+    \D       Matches any non-digit character; equivalent to the set [^0-9].
+    \s       Matches any whitespace character; equivalent to [ \t\n\r\f\v].
+    \S       Matches any non-whitespace character; equiv. to [^ \t\n\r\f\v].
+    \w       Matches any alphanumeric character; equivalent to [a-zA-Z0-9_].
+             With LOCALE, it will match the set [0-9_] plus characters defined
+             as letters for the current locale.
+    \W       Matches the complement of \w.
+    \\       Matches a literal backslash.
+
+This module exports the following functions:
+    match    Match a regular expression pattern to the beginning of a string.
+    search   Search a string for the presence of a pattern.
+    sub      Substitute occurrences of a pattern found in a string.
+    subn     Same as sub, but also return the number of substitutions made.
+    split    Split a string by the occurrences of a pattern.
+    findall  Find all occurrences of a pattern in a string.
+    compile  Compile a pattern into a RegexObject.
+    purge    Clear the regular expression cache.
+    escape   Backslash all non-alphanumerics in a string.
+
+Some of the functions in this module takes flags as optional parameters:
+    I  IGNORECASE  Perform case-insensitive matching.
+    L  LOCALE      Make \w, \W, \b, \B, dependent on the current locale.
+    M  MULTILINE   "^" matches the beginning of lines as well as the string.
+                   "$" matches the end of lines as well as the string.
+    S  DOTALL      "." matches any character at all, including the newline.
+    X  VERBOSE     Ignore whitespace and comments for nicer looking RE's.
+    U  UNICODE     Make \w, \W, \b, \B, dependent on the Unicode locale.
+
+This module also defines an exception 'error'.
+
+"""
+
+import sys
+import sre_compile
+import sre_parse
+
+# public symbols
+__all__ = [ "match", "search", "sub", "subn", "split", "findall",
+    "compile", "purge", "template", "escape", "I", "L", "M", "S", "X",
+    "U", "IGNORECASE", "LOCALE", "MULTILINE", "DOTALL", "VERBOSE",
+    "UNICODE", "error" ]
+
+__version__ = "2.2.1"
+
+# flags
+I = IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE # ignore case
+L = LOCALE = sre_compile.SRE_FLAG_LOCALE # assume current 8-bit locale
+U = UNICODE = sre_compile.SRE_FLAG_UNICODE # assume unicode locale
+M = MULTILINE = sre_compile.SRE_FLAG_MULTILINE # make anchors look for newline
+S = DOTALL = sre_compile.SRE_FLAG_DOTALL # make dot match newline
+X = VERBOSE = sre_compile.SRE_FLAG_VERBOSE # ignore whitespace and comments
+
+# sre extensions (experimental, don't rely on these)
+T = TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE # disable backtracking
+DEBUG = sre_compile.SRE_FLAG_DEBUG # dump pattern after compilation
+
+# sre exception
+error = sre_compile.error
+
+# --------------------------------------------------------------------
+# public interface
+
+def match(pattern, string, flags=0):
+    """Try to apply the pattern at the start of the string, returning
+    a match object, or None if no match was found."""
+    return _compile(pattern, flags).match(string)
+
+def search(pattern, string, flags=0):
+    """Scan through string looking for a match to the pattern, returning
+    a match object, or None if no match was found."""
+    return _compile(pattern, flags).search(string)
+
+def sub(pattern, repl, string, count=0):
+    """Return the string obtained by replacing the leftmost
+    non-overlapping occurrences of the pattern in string by the
+    replacement repl.  repl can be either a string or a callable;
+    if a callable, it's passed the match object and must return
+    a replacement string to be used."""
+    return _compile(pattern, 0).sub(repl, string, count)
+
+def subn(pattern, repl, string, count=0):
+    """Return a 2-tuple containing (new_string, number).
+    new_string is the string obtained by replacing the leftmost
+    non-overlapping occurrences of the pattern in the source
+    string by the replacement repl.  number is the number of
+    substitutions that were made. repl can be either a string or a
+    callable; if a callable, it's passed the match object and must
+    return a replacement string to be used."""
+    return _compile(pattern, 0).subn(repl, string, count)
+
+def split(pattern, string, maxsplit=0):
+    """Split the source string by the occurrences of the pattern,
+    returning a list containing the resulting substrings."""
+    return _compile(pattern, 0).split(string, maxsplit)
+
+def findall(pattern, string, flags=0):
+    """Return a list of all non-overlapping matches in the string.
+
+    If one or more groups are present in the pattern, return a
+    list of groups; this will be a list of tuples if the pattern
+    has more than one group.
+
+    Empty matches are included in the result."""
+    return _compile(pattern, flags).findall(string)
+
+if sys.hexversion >= 0x02020000:
+    __all__.append("finditer")
+    def finditer(pattern, string, flags=0):
+        """Return an iterator over all non-overlapping matches in the
+        string.  For each match, the iterator returns a match object.
+
+        Empty matches are included in the result."""
+        return _compile(pattern, flags).finditer(string)
+
+def compile(pattern, flags=0):
+    "Compile a regular expression pattern, returning a pattern object."
+    return _compile(pattern, flags)
+
+def purge():
+    "Clear the regular expression cache"
+    _cache.clear()
+    _cache_repl.clear()
+
+def template(pattern, flags=0):
+    "Compile a template pattern, returning a pattern object"
+    return _compile(pattern, flags|T)
+
+def escape(pattern):
+    "Escape all non-alphanumeric characters in pattern."
+    s = list(pattern)
+    for i in range(len(pattern)):
+        c = pattern[i]
+        if not ("a" <= c <= "z" or "A" <= c <= "Z" or "0" <= c <= "9"):
+            if c == "\000":
+                s[i] = "\\000"
+            else:
+                s[i] = "\\" + c
+    return pattern[:0].join(s)
+
+# --------------------------------------------------------------------
+# internals
+
+_cache = {}
+_cache_repl = {}
+
+_pattern_type = type(sre_compile.compile("", 0))
+
+_MAXCACHE = 100
+
+def _compile(*key):
+    # internal: compile pattern
+    cachekey = (type(key[0]),) + key
+    p = _cache.get(cachekey)
+    if p is not None:
+        return p
+    pattern, flags = key
+    if isinstance(pattern, _pattern_type):
+        return pattern
+    if not sre_compile.isstring(pattern):
+        raise TypeError, "first argument must be string or compiled pattern"
+    try:
+        p = sre_compile.compile(pattern, flags)
+    except error, v:
+        raise error, v # invalid expression
+    if len(_cache) >= _MAXCACHE:
+        _cache.clear()
+    _cache[cachekey] = p
+    return p
+
+def _compile_repl(*key):
+    # internal: compile replacement pattern
+    p = _cache_repl.get(key)
+    if p is not None:
+        return p
+    repl, pattern = key
+    try:
+        p = sre_parse.parse_template(repl, pattern)
+    except error, v:
+        raise error, v # invalid expression
+    if len(_cache_repl) >= _MAXCACHE:
+        _cache_repl.clear()
+    _cache_repl[key] = p
+    return p
+
+def _expand(pattern, match, template):
+    # internal: match.expand implementation hook
+    template = sre_parse.parse_template(template, pattern)
+    return sre_parse.expand_template(template, match)
+
+def _subx(pattern, template):
+    # internal: pattern.sub/subn implementation helper
+    template = _compile_repl(template, pattern)
+    if not template[0] and len(template[1]) == 1:
+        # literal replacement
+        return template[1][0]
+    def filter(match, template=template):
+        return sre_parse.expand_template(template, match)
+    return filter
+
+# register myself for pickling
+
+import copy_reg
+
+def _pickle(p):
+    return _compile, (p.pattern, p.flags)
+
+copy_reg.pickle(_pattern_type, _pickle, _compile)
+
+# --------------------------------------------------------------------
+# experimental stuff (see python-dev discussions for details)
+
+class Scanner:
+    def __init__(self, lexicon, flags=0):
+        from sre_constants import BRANCH, SUBPATTERN
+        self.lexicon = lexicon
+        # combine phrases into a compound pattern
+        p = []
+        s = sre_parse.Pattern()
+        s.flags = flags
+        for phrase, action in lexicon:
+            p.append(sre_parse.SubPattern(s, [
+                (SUBPATTERN, (len(p)+1, sre_parse.parse(phrase, flags))),
+                ]))
+        p = sre_parse.SubPattern(s, [(BRANCH, (None, p))])
+        s.groups = len(p)
+        self.scanner = sre_compile.compile(p)
+    def scan(self, string):
+        result = []
+        append = result.append
+        match = self.scanner.scanner(string).match
+        i = 0
+        while 1:
+            m = match()
+            if not m:
+                break
+            j = m.end()
+            if i == j:
+                break
+            action = self.lexicon[m.lastindex-1][1]
+            if callable(action):
+                self.match = m
+                action = action(self, m.group())
+            if action is not None:
+                append(action)
+            i = j
+        return result, string[i:]
diff --git a/depot_tools/release/win/python_24/Lib/sre_compile.py b/depot_tools/release/win/python_24/Lib/sre_compile.py
new file mode 100644
index 0000000..27ab1fe
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sre_compile.py
@@ -0,0 +1,520 @@
+#
+# Secret Labs' Regular Expression Engine
+#
+# convert template to internal format
+#
+# Copyright (c) 1997-2001 by Secret Labs AB.  All rights reserved.
+#
+# See the sre.py file for information on usage and redistribution.
+#
+
+"""Internal support module for sre"""
+
+import _sre, sys
+
+from sre_constants import *
+
+assert _sre.MAGIC == MAGIC, "SRE module mismatch"
+
+if _sre.CODESIZE == 2:
+    MAXCODE = 65535
+else:
+    MAXCODE = 0xFFFFFFFFL
+
+def _identityfunction(x):
+    return x
+
+def _compile(code, pattern, flags):
+    # internal: compile a (sub)pattern
+    emit = code.append
+    _len = len
+    LITERAL_CODES = {LITERAL:1, NOT_LITERAL:1}
+    REPEATING_CODES = {REPEAT:1, MIN_REPEAT:1, MAX_REPEAT:1}
+    SUCCESS_CODES = {SUCCESS:1, FAILURE:1}
+    ASSERT_CODES = {ASSERT:1, ASSERT_NOT:1}
+    for op, av in pattern:
+        if op in LITERAL_CODES:
+            if flags & SRE_FLAG_IGNORECASE:
+                emit(OPCODES[OP_IGNORE[op]])
+                emit(_sre.getlower(av, flags))
+            else:
+                emit(OPCODES[op])
+                emit(av)
+        elif op is IN:
+            if flags & SRE_FLAG_IGNORECASE:
+                emit(OPCODES[OP_IGNORE[op]])
+                def fixup(literal, flags=flags):
+                    return _sre.getlower(literal, flags)
+            else:
+                emit(OPCODES[op])
+                fixup = _identityfunction
+            skip = _len(code); emit(0)
+            _compile_charset(av, flags, code, fixup)
+            code[skip] = _len(code) - skip
+        elif op is ANY:
+            if flags & SRE_FLAG_DOTALL:
+                emit(OPCODES[ANY_ALL])
+            else:
+                emit(OPCODES[ANY])
+        elif op in REPEATING_CODES:
+            if flags & SRE_FLAG_TEMPLATE:
+                raise error, "internal: unsupported template operator"
+                emit(OPCODES[REPEAT])
+                skip = _len(code); emit(0)
+                emit(av[0])
+                emit(av[1])
+                _compile(code, av[2], flags)
+                emit(OPCODES[SUCCESS])
+                code[skip] = _len(code) - skip
+            elif _simple(av) and op is not REPEAT:
+                if op is MAX_REPEAT:
+                    emit(OPCODES[REPEAT_ONE])
+                else:
+                    emit(OPCODES[MIN_REPEAT_ONE])
+                skip = _len(code); emit(0)
+                emit(av[0])
+                emit(av[1])
+                _compile(code, av[2], flags)
+                emit(OPCODES[SUCCESS])
+                code[skip] = _len(code) - skip
+            else:
+                emit(OPCODES[REPEAT])
+                skip = _len(code); emit(0)
+                emit(av[0])
+                emit(av[1])
+                _compile(code, av[2], flags)
+                code[skip] = _len(code) - skip
+                if op is MAX_REPEAT:
+                    emit(OPCODES[MAX_UNTIL])
+                else:
+                    emit(OPCODES[MIN_UNTIL])
+        elif op is SUBPATTERN:
+            if av[0]:
+                emit(OPCODES[MARK])
+                emit((av[0]-1)*2)
+            # _compile_info(code, av[1], flags)
+            _compile(code, av[1], flags)
+            if av[0]:
+                emit(OPCODES[MARK])
+                emit((av[0]-1)*2+1)
+        elif op in SUCCESS_CODES:
+            emit(OPCODES[op])
+        elif op in ASSERT_CODES:
+            emit(OPCODES[op])
+            skip = _len(code); emit(0)
+            if av[0] >= 0:
+                emit(0) # look ahead
+            else:
+                lo, hi = av[1].getwidth()
+                if lo != hi:
+                    raise error, "look-behind requires fixed-width pattern"
+                emit(lo) # look behind
+            _compile(code, av[1], flags)
+            emit(OPCODES[SUCCESS])
+            code[skip] = _len(code) - skip
+        elif op is CALL:
+            emit(OPCODES[op])
+            skip = _len(code); emit(0)
+            _compile(code, av, flags)
+            emit(OPCODES[SUCCESS])
+            code[skip] = _len(code) - skip
+        elif op is AT:
+            emit(OPCODES[op])
+            if flags & SRE_FLAG_MULTILINE:
+                av = AT_MULTILINE.get(av, av)
+            if flags & SRE_FLAG_LOCALE:
+                av = AT_LOCALE.get(av, av)
+            elif flags & SRE_FLAG_UNICODE:
+                av = AT_UNICODE.get(av, av)
+            emit(ATCODES[av])
+        elif op is BRANCH:
+            emit(OPCODES[op])
+            tail = []
+            tailappend = tail.append
+            for av in av[1]:
+                skip = _len(code); emit(0)
+                # _compile_info(code, av, flags)
+                _compile(code, av, flags)
+                emit(OPCODES[JUMP])
+                tailappend(_len(code)); emit(0)
+                code[skip] = _len(code) - skip
+            emit(0) # end of branch
+            for tail in tail:
+                code[tail] = _len(code) - tail
+        elif op is CATEGORY:
+            emit(OPCODES[op])
+            if flags & SRE_FLAG_LOCALE:
+                av = CH_LOCALE[av]
+            elif flags & SRE_FLAG_UNICODE:
+                av = CH_UNICODE[av]
+            emit(CHCODES[av])
+        elif op is GROUPREF:
+            if flags & SRE_FLAG_IGNORECASE:
+                emit(OPCODES[OP_IGNORE[op]])
+            else:
+                emit(OPCODES[op])
+            emit(av-1)
+        elif op is GROUPREF_EXISTS:
+            emit(OPCODES[op])
+            emit((av[0]-1)*2)
+            skipyes = _len(code); emit(0)
+            _compile(code, av[1], flags)
+            if av[2]:
+                emit(OPCODES[JUMP])
+                skipno = _len(code); emit(0)
+                code[skipyes] = _len(code) - skipyes + 1
+                _compile(code, av[2], flags)
+                code[skipno] = _len(code) - skipno
+            else:
+                code[skipyes] = _len(code) - skipyes + 1
+        else:
+            raise ValueError, ("unsupported operand type", op)
+
+def _compile_charset(charset, flags, code, fixup=None):
+    # compile charset subprogram
+    emit = code.append
+    if fixup is None:
+        fixup = _identityfunction
+    for op, av in _optimize_charset(charset, fixup):
+        emit(OPCODES[op])
+        if op is NEGATE:
+            pass
+        elif op is LITERAL:
+            emit(fixup(av))
+        elif op is RANGE:
+            emit(fixup(av[0]))
+            emit(fixup(av[1]))
+        elif op is CHARSET:
+            code.extend(av)
+        elif op is BIGCHARSET:
+            code.extend(av)
+        elif op is CATEGORY:
+            if flags & SRE_FLAG_LOCALE:
+                emit(CHCODES[CH_LOCALE[av]])
+            elif flags & SRE_FLAG_UNICODE:
+                emit(CHCODES[CH_UNICODE[av]])
+            else:
+                emit(CHCODES[av])
+        else:
+            raise error, "internal: unsupported set operator"
+    emit(OPCODES[FAILURE])
+
+def _optimize_charset(charset, fixup):
+    # internal: optimize character set
+    out = []
+    outappend = out.append
+    charmap = [0]*256
+    try:
+        for op, av in charset:
+            if op is NEGATE:
+                outappend((op, av))
+            elif op is LITERAL:
+                charmap[fixup(av)] = 1
+            elif op is RANGE:
+                for i in range(fixup(av[0]), fixup(av[1])+1):
+                    charmap[i] = 1
+            elif op is CATEGORY:
+                # XXX: could append to charmap tail
+                return charset # cannot compress
+    except IndexError:
+        # character set contains unicode characters
+        return _optimize_unicode(charset, fixup)
+    # compress character map
+    i = p = n = 0
+    runs = []
+    runsappend = runs.append
+    for c in charmap:
+        if c:
+            if n == 0:
+                p = i
+            n = n + 1
+        elif n:
+            runsappend((p, n))
+            n = 0
+        i = i + 1
+    if n:
+        runsappend((p, n))
+    if len(runs) <= 2:
+        # use literal/range
+        for p, n in runs:
+            if n == 1:
+                outappend((LITERAL, p))
+            else:
+                outappend((RANGE, (p, p+n-1)))
+        if len(out) < len(charset):
+            return out
+    else:
+        # use bitmap
+        data = _mk_bitmap(charmap)
+        outappend((CHARSET, data))
+        return out
+    return charset
+
+def _mk_bitmap(bits):
+    data = []
+    dataappend = data.append
+    if _sre.CODESIZE == 2:
+        start = (1, 0)
+    else:
+        start = (1L, 0L)
+    m, v = start
+    for c in bits:
+        if c:
+            v = v + m
+        m = m + m
+        if m > MAXCODE:
+            dataappend(v)
+            m, v = start
+    return data
+
+# To represent a big charset, first a bitmap of all characters in the
+# set is constructed. Then, this bitmap is sliced into chunks of 256
+# characters, duplicate chunks are eliminitated, and each chunk is
+# given a number. In the compiled expression, the charset is
+# represented by a 16-bit word sequence, consisting of one word for
+# the number of different chunks, a sequence of 256 bytes (128 words)
+# of chunk numbers indexed by their original chunk position, and a
+# sequence of chunks (16 words each).
+
+# Compression is normally good: in a typical charset, large ranges of
+# Unicode will be either completely excluded (e.g. if only cyrillic
+# letters are to be matched), or completely included (e.g. if large
+# subranges of Kanji match). These ranges will be represented by
+# chunks of all one-bits or all zero-bits.
+
+# Matching can be also done efficiently: the more significant byte of
+# the Unicode character is an index into the chunk number, and the
+# less significant byte is a bit index in the chunk (just like the
+# CHARSET matching).
+
+# In UCS-4 mode, the BIGCHARSET opcode still supports only subsets
+# of the basic multilingual plane; an efficient representation
+# for all of UTF-16 has not yet been developed. This means,
+# in particular, that negated charsets cannot be represented as
+# bigcharsets.
+
+def _optimize_unicode(charset, fixup):
+    try:
+        import array
+    except ImportError:
+        return charset
+    charmap = [0]*65536
+    negate = 0
+    try:
+        for op, av in charset:
+            if op is NEGATE:
+                negate = 1
+            elif op is LITERAL:
+                charmap[fixup(av)] = 1
+            elif op is RANGE:
+                for i in xrange(fixup(av[0]), fixup(av[1])+1):
+                    charmap[i] = 1
+            elif op is CATEGORY:
+                # XXX: could expand category
+                return charset # cannot compress
+    except IndexError:
+        # non-BMP characters
+        return charset
+    if negate:
+        if sys.maxunicode != 65535:
+            # XXX: negation does not work with big charsets
+            return charset
+        for i in xrange(65536):
+            charmap[i] = not charmap[i]
+    comps = {}
+    mapping = [0]*256
+    block = 0
+    data = []
+    for i in xrange(256):
+        chunk = tuple(charmap[i*256:(i+1)*256])
+        new = comps.setdefault(chunk, block)
+        mapping[i] = new
+        if new == block:
+            block = block + 1
+            data = data + _mk_bitmap(chunk)
+    header = [block]
+    if _sre.CODESIZE == 2:
+        code = 'H'
+    else:
+        code = 'I'
+    # Convert block indices to byte array of 256 bytes
+    mapping = array.array('b', mapping).tostring()
+    # Convert byte array to word array
+    mapping = array.array(code, mapping)
+    assert mapping.itemsize == _sre.CODESIZE
+    header = header + mapping.tolist()
+    data[0:0] = header
+    return [(BIGCHARSET, data)]
+
+def _simple(av):
+    # check if av is a "simple" operator
+    lo, hi = av[2].getwidth()
+    if lo == 0 and hi == MAXREPEAT:
+        raise error, "nothing to repeat"
+    return lo == hi == 1 and av[2][0][0] != SUBPATTERN
+
+def _compile_info(code, pattern, flags):
+    # internal: compile an info block.  in the current version,
+    # this contains min/max pattern width, and an optional literal
+    # prefix or a character map
+    lo, hi = pattern.getwidth()
+    if lo == 0:
+        return # not worth it
+    # look for a literal prefix
+    prefix = []
+    prefixappend = prefix.append
+    prefix_skip = 0
+    charset = [] # not used
+    charsetappend = charset.append
+    if not (flags & SRE_FLAG_IGNORECASE):
+        # look for literal prefix
+        for op, av in pattern.data:
+            if op is LITERAL:
+                if len(prefix) == prefix_skip:
+                    prefix_skip = prefix_skip + 1
+                prefixappend(av)
+            elif op is SUBPATTERN and len(av[1]) == 1:
+                op, av = av[1][0]
+                if op is LITERAL:
+                    prefixappend(av)
+                else:
+                    break
+            else:
+                break
+        # if no prefix, look for charset prefix
+        if not prefix and pattern.data:
+            op, av = pattern.data[0]
+            if op is SUBPATTERN and av[1]:
+                op, av = av[1][0]
+                if op is LITERAL:
+                    charsetappend((op, av))
+                elif op is BRANCH:
+                    c = []
+                    cappend = c.append
+                    for p in av[1]:
+                        if not p:
+                            break
+                        op, av = p[0]
+                        if op is LITERAL:
+                            cappend((op, av))
+                        else:
+                            break
+                    else:
+                        charset = c
+            elif op is BRANCH:
+                c = []
+                cappend = c.append
+                for p in av[1]:
+                    if not p:
+                        break
+                    op, av = p[0]
+                    if op is LITERAL:
+                        cappend((op, av))
+                    else:
+                        break
+                else:
+                    charset = c
+            elif op is IN:
+                charset = av
+##     if prefix:
+##         print "*** PREFIX", prefix, prefix_skip
+##     if charset:
+##         print "*** CHARSET", charset
+    # add an info block
+    emit = code.append
+    emit(OPCODES[INFO])
+    skip = len(code); emit(0)
+    # literal flag
+    mask = 0
+    if prefix:
+        mask = SRE_INFO_PREFIX
+        if len(prefix) == prefix_skip == len(pattern.data):
+            mask = mask + SRE_INFO_LITERAL
+    elif charset:
+        mask = mask + SRE_INFO_CHARSET
+    emit(mask)
+    # pattern length
+    if lo < MAXCODE:
+        emit(lo)
+    else:
+        emit(MAXCODE)
+        prefix = prefix[:MAXCODE]
+    if hi < MAXCODE:
+        emit(hi)
+    else:
+        emit(0)
+    # add literal prefix
+    if prefix:
+        emit(len(prefix)) # length
+        emit(prefix_skip) # skip
+        code.extend(prefix)
+        # generate overlap table
+        table = [-1] + ([0]*len(prefix))
+        for i in xrange(len(prefix)):
+            table[i+1] = table[i]+1
+            while table[i+1] > 0 and prefix[i] != prefix[table[i+1]-1]:
+                table[i+1] = table[table[i+1]-1]+1
+        code.extend(table[1:]) # don't store first entry
+    elif charset:
+        _compile_charset(charset, flags, code)
+    code[skip] = len(code) - skip
+
+try:
+    unicode
+except NameError:
+    STRING_TYPES = (type(""),)
+else:
+    STRING_TYPES = (type(""), type(unicode("")))
+
+def isstring(obj):
+    for tp in STRING_TYPES:
+        if isinstance(obj, tp):
+            return 1
+    return 0
+
+def _code(p, flags):
+
+    flags = p.pattern.flags | flags
+    code = []
+
+    # compile info block
+    _compile_info(code, p, flags)
+
+    # compile the pattern
+    _compile(code, p.data, flags)
+
+    code.append(OPCODES[SUCCESS])
+
+    return code
+
+def compile(p, flags=0):
+    # internal: convert pattern list to internal format
+
+    if isstring(p):
+        import sre_parse
+        pattern = p
+        p = sre_parse.parse(p, flags)
+    else:
+        pattern = None
+
+    code = _code(p, flags)
+
+    # print code
+
+    # XXX: <fl> get rid of this limitation!
+    if p.pattern.groups > 100:
+        raise AssertionError(
+            "sorry, but this version only supports 100 named groups"
+            )
+
+    # map in either direction
+    groupindex = p.pattern.groupdict
+    indexgroup = [None] * p.pattern.groups
+    for k, i in groupindex.items():
+        indexgroup[i] = k
+
+    return _sre.compile(
+        pattern, flags, code,
+        p.pattern.groups-1,
+        groupindex, indexgroup
+        )
diff --git a/depot_tools/release/win/python_24/Lib/sre_constants.py b/depot_tools/release/win/python_24/Lib/sre_constants.py
new file mode 100644
index 0000000..1863f48
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sre_constants.py
@@ -0,0 +1,261 @@
+#
+# Secret Labs' Regular Expression Engine
+#
+# various symbols used by the regular expression engine.
+# run this script to update the _sre include files!
+#
+# Copyright (c) 1998-2001 by Secret Labs AB.  All rights reserved.
+#
+# See the sre.py file for information on usage and redistribution.
+#
+
+"""Internal support module for sre"""
+
+# update when constants are added or removed
+
+MAGIC = 20031017
+
+# max code word in this release
+
+MAXREPEAT = 65535
+
+# SRE standard exception (access as sre.error)
+# should this really be here?
+
+class error(Exception):
+    pass
+
+# operators
+
+FAILURE = "failure"
+SUCCESS = "success"
+
+ANY = "any"
+ANY_ALL = "any_all"
+ASSERT = "assert"
+ASSERT_NOT = "assert_not"
+AT = "at"
+BIGCHARSET = "bigcharset"
+BRANCH = "branch"
+CALL = "call"
+CATEGORY = "category"
+CHARSET = "charset"
+GROUPREF = "groupref"
+GROUPREF_IGNORE = "groupref_ignore"
+GROUPREF_EXISTS = "groupref_exists"
+IN = "in"
+IN_IGNORE = "in_ignore"
+INFO = "info"
+JUMP = "jump"
+LITERAL = "literal"
+LITERAL_IGNORE = "literal_ignore"
+MARK = "mark"
+MAX_REPEAT = "max_repeat"
+MAX_UNTIL = "max_until"
+MIN_REPEAT = "min_repeat"
+MIN_UNTIL = "min_until"
+NEGATE = "negate"
+NOT_LITERAL = "not_literal"
+NOT_LITERAL_IGNORE = "not_literal_ignore"
+RANGE = "range"
+REPEAT = "repeat"
+REPEAT_ONE = "repeat_one"
+SUBPATTERN = "subpattern"
+MIN_REPEAT_ONE = "min_repeat_one"
+
+# positions
+AT_BEGINNING = "at_beginning"
+AT_BEGINNING_LINE = "at_beginning_line"
+AT_BEGINNING_STRING = "at_beginning_string"
+AT_BOUNDARY = "at_boundary"
+AT_NON_BOUNDARY = "at_non_boundary"
+AT_END = "at_end"
+AT_END_LINE = "at_end_line"
+AT_END_STRING = "at_end_string"
+AT_LOC_BOUNDARY = "at_loc_boundary"
+AT_LOC_NON_BOUNDARY = "at_loc_non_boundary"
+AT_UNI_BOUNDARY = "at_uni_boundary"
+AT_UNI_NON_BOUNDARY = "at_uni_non_boundary"
+
+# categories
+CATEGORY_DIGIT = "category_digit"
+CATEGORY_NOT_DIGIT = "category_not_digit"
+CATEGORY_SPACE = "category_space"
+CATEGORY_NOT_SPACE = "category_not_space"
+CATEGORY_WORD = "category_word"
+CATEGORY_NOT_WORD = "category_not_word"
+CATEGORY_LINEBREAK = "category_linebreak"
+CATEGORY_NOT_LINEBREAK = "category_not_linebreak"
+CATEGORY_LOC_WORD = "category_loc_word"
+CATEGORY_LOC_NOT_WORD = "category_loc_not_word"
+CATEGORY_UNI_DIGIT = "category_uni_digit"
+CATEGORY_UNI_NOT_DIGIT = "category_uni_not_digit"
+CATEGORY_UNI_SPACE = "category_uni_space"
+CATEGORY_UNI_NOT_SPACE = "category_uni_not_space"
+CATEGORY_UNI_WORD = "category_uni_word"
+CATEGORY_UNI_NOT_WORD = "category_uni_not_word"
+CATEGORY_UNI_LINEBREAK = "category_uni_linebreak"
+CATEGORY_UNI_NOT_LINEBREAK = "category_uni_not_linebreak"
+
+OPCODES = [
+
+    # failure=0 success=1 (just because it looks better that way :-)
+    FAILURE, SUCCESS,
+
+    ANY, ANY_ALL,
+    ASSERT, ASSERT_NOT,
+    AT,
+    BRANCH,
+    CALL,
+    CATEGORY,
+    CHARSET, BIGCHARSET,
+    GROUPREF, GROUPREF_EXISTS, GROUPREF_IGNORE,
+    IN, IN_IGNORE,
+    INFO,
+    JUMP,
+    LITERAL, LITERAL_IGNORE,
+    MARK,
+    MAX_UNTIL,
+    MIN_UNTIL,
+    NOT_LITERAL, NOT_LITERAL_IGNORE,
+    NEGATE,
+    RANGE,
+    REPEAT,
+    REPEAT_ONE,
+    SUBPATTERN,
+    MIN_REPEAT_ONE
+
+]
+
+ATCODES = [
+    AT_BEGINNING, AT_BEGINNING_LINE, AT_BEGINNING_STRING, AT_BOUNDARY,
+    AT_NON_BOUNDARY, AT_END, AT_END_LINE, AT_END_STRING,
+    AT_LOC_BOUNDARY, AT_LOC_NON_BOUNDARY, AT_UNI_BOUNDARY,
+    AT_UNI_NON_BOUNDARY
+]
+
+CHCODES = [
+    CATEGORY_DIGIT, CATEGORY_NOT_DIGIT, CATEGORY_SPACE,
+    CATEGORY_NOT_SPACE, CATEGORY_WORD, CATEGORY_NOT_WORD,
+    CATEGORY_LINEBREAK, CATEGORY_NOT_LINEBREAK, CATEGORY_LOC_WORD,
+    CATEGORY_LOC_NOT_WORD, CATEGORY_UNI_DIGIT, CATEGORY_UNI_NOT_DIGIT,
+    CATEGORY_UNI_SPACE, CATEGORY_UNI_NOT_SPACE, CATEGORY_UNI_WORD,
+    CATEGORY_UNI_NOT_WORD, CATEGORY_UNI_LINEBREAK,
+    CATEGORY_UNI_NOT_LINEBREAK
+]
+
+def makedict(list):
+    d = {}
+    i = 0
+    for item in list:
+        d[item] = i
+        i = i + 1
+    return d
+
+OPCODES = makedict(OPCODES)
+ATCODES = makedict(ATCODES)
+CHCODES = makedict(CHCODES)
+
+# replacement operations for "ignore case" mode
+OP_IGNORE = {
+    GROUPREF: GROUPREF_IGNORE,
+    IN: IN_IGNORE,
+    LITERAL: LITERAL_IGNORE,
+    NOT_LITERAL: NOT_LITERAL_IGNORE
+}
+
+AT_MULTILINE = {
+    AT_BEGINNING: AT_BEGINNING_LINE,
+    AT_END: AT_END_LINE
+}
+
+AT_LOCALE = {
+    AT_BOUNDARY: AT_LOC_BOUNDARY,
+    AT_NON_BOUNDARY: AT_LOC_NON_BOUNDARY
+}
+
+AT_UNICODE = {
+    AT_BOUNDARY: AT_UNI_BOUNDARY,
+    AT_NON_BOUNDARY: AT_UNI_NON_BOUNDARY
+}
+
+CH_LOCALE = {
+    CATEGORY_DIGIT: CATEGORY_DIGIT,
+    CATEGORY_NOT_DIGIT: CATEGORY_NOT_DIGIT,
+    CATEGORY_SPACE: CATEGORY_SPACE,
+    CATEGORY_NOT_SPACE: CATEGORY_NOT_SPACE,
+    CATEGORY_WORD: CATEGORY_LOC_WORD,
+    CATEGORY_NOT_WORD: CATEGORY_LOC_NOT_WORD,
+    CATEGORY_LINEBREAK: CATEGORY_LINEBREAK,
+    CATEGORY_NOT_LINEBREAK: CATEGORY_NOT_LINEBREAK
+}
+
+CH_UNICODE = {
+    CATEGORY_DIGIT: CATEGORY_UNI_DIGIT,
+    CATEGORY_NOT_DIGIT: CATEGORY_UNI_NOT_DIGIT,
+    CATEGORY_SPACE: CATEGORY_UNI_SPACE,
+    CATEGORY_NOT_SPACE: CATEGORY_UNI_NOT_SPACE,
+    CATEGORY_WORD: CATEGORY_UNI_WORD,
+    CATEGORY_NOT_WORD: CATEGORY_UNI_NOT_WORD,
+    CATEGORY_LINEBREAK: CATEGORY_UNI_LINEBREAK,
+    CATEGORY_NOT_LINEBREAK: CATEGORY_UNI_NOT_LINEBREAK
+}
+
+# flags
+SRE_FLAG_TEMPLATE = 1 # template mode (disable backtracking)
+SRE_FLAG_IGNORECASE = 2 # case insensitive
+SRE_FLAG_LOCALE = 4 # honour system locale
+SRE_FLAG_MULTILINE = 8 # treat target as multiline string
+SRE_FLAG_DOTALL = 16 # treat target as a single string
+SRE_FLAG_UNICODE = 32 # use unicode locale
+SRE_FLAG_VERBOSE = 64 # ignore whitespace and comments
+SRE_FLAG_DEBUG = 128 # debugging
+
+# flags for INFO primitive
+SRE_INFO_PREFIX = 1 # has prefix
+SRE_INFO_LITERAL = 2 # entire pattern is literal (given by prefix)
+SRE_INFO_CHARSET = 4 # pattern starts with character from given set
+
+if __name__ == "__main__":
+    def dump(f, d, prefix):
+        items = d.items()
+        items.sort(key=lambda a: a[1])
+        for k, v in items:
+            f.write("#define %s_%s %s\n" % (prefix, k.upper(), v))
+    f = open("sre_constants.h", "w")
+    f.write("""\
+/*
+ * Secret Labs' Regular Expression Engine
+ *
+ * regular expression matching engine
+ *
+ * NOTE: This file is generated by sre_constants.py.  If you need
+ * to change anything in here, edit sre_constants.py and run it.
+ *
+ * Copyright (c) 1997-2001 by Secret Labs AB.  All rights reserved.
+ *
+ * See the _sre.c file for information on usage and redistribution.
+ */
+
+""")
+
+    f.write("#define SRE_MAGIC %d\n" % MAGIC)
+
+    dump(f, OPCODES, "SRE_OP")
+    dump(f, ATCODES, "SRE")
+    dump(f, CHCODES, "SRE")
+
+    f.write("#define SRE_FLAG_TEMPLATE %d\n" % SRE_FLAG_TEMPLATE)
+    f.write("#define SRE_FLAG_IGNORECASE %d\n" % SRE_FLAG_IGNORECASE)
+    f.write("#define SRE_FLAG_LOCALE %d\n" % SRE_FLAG_LOCALE)
+    f.write("#define SRE_FLAG_MULTILINE %d\n" % SRE_FLAG_MULTILINE)
+    f.write("#define SRE_FLAG_DOTALL %d\n" % SRE_FLAG_DOTALL)
+    f.write("#define SRE_FLAG_UNICODE %d\n" % SRE_FLAG_UNICODE)
+    f.write("#define SRE_FLAG_VERBOSE %d\n" % SRE_FLAG_VERBOSE)
+
+    f.write("#define SRE_INFO_PREFIX %d\n" % SRE_INFO_PREFIX)
+    f.write("#define SRE_INFO_LITERAL %d\n" % SRE_INFO_LITERAL)
+    f.write("#define SRE_INFO_CHARSET %d\n" % SRE_INFO_CHARSET)
+
+    f.close()
+    print "done"
diff --git a/depot_tools/release/win/python_24/Lib/sre_parse.py b/depot_tools/release/win/python_24/Lib/sre_parse.py
new file mode 100644
index 0000000..5066615
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sre_parse.py
@@ -0,0 +1,782 @@
+#
+# Secret Labs' Regular Expression Engine
+#
+# convert re-style regular expression to sre pattern
+#
+# Copyright (c) 1998-2001 by Secret Labs AB.  All rights reserved.
+#
+# See the sre.py file for information on usage and redistribution.
+#
+
+"""Internal support module for sre"""
+
+# XXX: show string offset and offending character for all errors
+
+import sys
+
+from sre_constants import *
+
+SPECIAL_CHARS = ".\\[{()*+?^$|"
+REPEAT_CHARS = "*+?{"
+
+DIGITS = tuple("0123456789")
+
+OCTDIGITS = tuple("01234567")
+HEXDIGITS = tuple("0123456789abcdefABCDEF")
+
+WHITESPACE = tuple(" \t\n\r\v\f")
+
+ESCAPES = {
+    r"\a": (LITERAL, ord("\a")),
+    r"\b": (LITERAL, ord("\b")),
+    r"\f": (LITERAL, ord("\f")),
+    r"\n": (LITERAL, ord("\n")),
+    r"\r": (LITERAL, ord("\r")),
+    r"\t": (LITERAL, ord("\t")),
+    r"\v": (LITERAL, ord("\v")),
+    r"\\": (LITERAL, ord("\\"))
+}
+
+CATEGORIES = {
+    r"\A": (AT, AT_BEGINNING_STRING), # start of string
+    r"\b": (AT, AT_BOUNDARY),
+    r"\B": (AT, AT_NON_BOUNDARY),
+    r"\d": (IN, [(CATEGORY, CATEGORY_DIGIT)]),
+    r"\D": (IN, [(CATEGORY, CATEGORY_NOT_DIGIT)]),
+    r"\s": (IN, [(CATEGORY, CATEGORY_SPACE)]),
+    r"\S": (IN, [(CATEGORY, CATEGORY_NOT_SPACE)]),
+    r"\w": (IN, [(CATEGORY, CATEGORY_WORD)]),
+    r"\W": (IN, [(CATEGORY, CATEGORY_NOT_WORD)]),
+    r"\Z": (AT, AT_END_STRING), # end of string
+}
+
+FLAGS = {
+    # standard flags
+    "i": SRE_FLAG_IGNORECASE,
+    "L": SRE_FLAG_LOCALE,
+    "m": SRE_FLAG_MULTILINE,
+    "s": SRE_FLAG_DOTALL,
+    "x": SRE_FLAG_VERBOSE,
+    # extensions
+    "t": SRE_FLAG_TEMPLATE,
+    "u": SRE_FLAG_UNICODE,
+}
+
+class Pattern:
+    # master pattern object.  keeps track of global attributes
+    def __init__(self):
+        self.flags = 0
+        self.open = []
+        self.groups = 1
+        self.groupdict = {}
+    def opengroup(self, name=None):
+        gid = self.groups
+        self.groups = gid + 1
+        if name is not None:
+            ogid = self.groupdict.get(name, None)
+            if ogid is not None:
+                raise error, ("redefinition of group name %s as group %d; "
+                              "was group %d" % (repr(name), gid,  ogid))
+            self.groupdict[name] = gid
+        self.open.append(gid)
+        return gid
+    def closegroup(self, gid):
+        self.open.remove(gid)
+    def checkgroup(self, gid):
+        return gid < self.groups and gid not in self.open
+
+class SubPattern:
+    # a subpattern, in intermediate form
+    def __init__(self, pattern, data=None):
+        self.pattern = pattern
+        if data is None:
+            data = []
+        self.data = data
+        self.width = None
+    def dump(self, level=0):
+        nl = 1
+        seqtypes = type(()), type([])
+        for op, av in self.data:
+            print level*"  " + op,; nl = 0
+            if op == "in":
+                # member sublanguage
+                print; nl = 1
+                for op, a in av:
+                    print (level+1)*"  " + op, a
+            elif op == "branch":
+                print; nl = 1
+                i = 0
+                for a in av[1]:
+                    if i > 0:
+                        print level*"  " + "or"
+                    a.dump(level+1); nl = 1
+                    i = i + 1
+            elif type(av) in seqtypes:
+                for a in av:
+                    if isinstance(a, SubPattern):
+                        if not nl: print
+                        a.dump(level+1); nl = 1
+                    else:
+                        print a, ; nl = 0
+            else:
+                print av, ; nl = 0
+            if not nl: print
+    def __repr__(self):
+        return repr(self.data)
+    def __len__(self):
+        return len(self.data)
+    def __delitem__(self, index):
+        del self.data[index]
+    def __getitem__(self, index):
+        return self.data[index]
+    def __setitem__(self, index, code):
+        self.data[index] = code
+    def __getslice__(self, start, stop):
+        return SubPattern(self.pattern, self.data[start:stop])
+    def insert(self, index, code):
+        self.data.insert(index, code)
+    def append(self, code):
+        self.data.append(code)
+    def getwidth(self):
+        # determine the width (min, max) for this subpattern
+        if self.width:
+            return self.width
+        lo = hi = 0L
+        UNITCODES = (ANY, RANGE, IN, LITERAL, NOT_LITERAL, CATEGORY)
+        REPEATCODES = (MIN_REPEAT, MAX_REPEAT)
+        for op, av in self.data:
+            if op is BRANCH:
+                i = sys.maxint
+                j = 0
+                for av in av[1]:
+                    l, h = av.getwidth()
+                    i = min(i, l)
+                    j = max(j, h)
+                lo = lo + i
+                hi = hi + j
+            elif op is CALL:
+                i, j = av.getwidth()
+                lo = lo + i
+                hi = hi + j
+            elif op is SUBPATTERN:
+                i, j = av[1].getwidth()
+                lo = lo + i
+                hi = hi + j
+            elif op in REPEATCODES:
+                i, j = av[2].getwidth()
+                lo = lo + long(i) * av[0]
+                hi = hi + long(j) * av[1]
+            elif op in UNITCODES:
+                lo = lo + 1
+                hi = hi + 1
+            elif op == SUCCESS:
+                break
+        self.width = int(min(lo, sys.maxint)), int(min(hi, sys.maxint))
+        return self.width
+
+class Tokenizer:
+    def __init__(self, string):
+        self.string = string
+        self.index = 0
+        self.__next()
+    def __next(self):
+        if self.index >= len(self.string):
+            self.next = None
+            return
+        char = self.string[self.index]
+        if char[0] == "\\":
+            try:
+                c = self.string[self.index + 1]
+            except IndexError:
+                raise error, "bogus escape (end of line)"
+            char = char + c
+        self.index = self.index + len(char)
+        self.next = char
+    def match(self, char, skip=1):
+        if char == self.next:
+            if skip:
+                self.__next()
+            return 1
+        return 0
+    def get(self):
+        this = self.next
+        self.__next()
+        return this
+    def tell(self):
+        return self.index, self.next
+    def seek(self, index):
+        self.index, self.next = index
+
+def isident(char):
+    return "a" <= char <= "z" or "A" <= char <= "Z" or char == "_"
+
+def isdigit(char):
+    return "0" <= char <= "9"
+
+def isname(name):
+    # check that group name is a valid string
+    if not isident(name[0]):
+        return False
+    for char in name[1:]:
+        if not isident(char) and not isdigit(char):
+            return False
+    return True
+
+def _class_escape(source, escape):
+    # handle escape code inside character class
+    code = ESCAPES.get(escape)
+    if code:
+        return code
+    code = CATEGORIES.get(escape)
+    if code:
+        return code
+    try:
+        c = escape[1:2]
+        if c == "x":
+            # hexadecimal escape (exactly two digits)
+            while source.next in HEXDIGITS and len(escape) < 4:
+                escape = escape + source.get()
+            escape = escape[2:]
+            if len(escape) != 2:
+                raise error, "bogus escape: %s" % repr("\\" + escape)
+            return LITERAL, int(escape, 16) & 0xff
+        elif c in OCTDIGITS:
+            # octal escape (up to three digits)
+            while source.next in OCTDIGITS and len(escape) < 4:
+                escape = escape + source.get()
+            escape = escape[1:]
+            return LITERAL, int(escape, 8) & 0xff
+        elif c in DIGITS:
+            raise error, "bogus escape: %s" % repr(escape)
+        if len(escape) == 2:
+            return LITERAL, ord(escape[1])
+    except ValueError:
+        pass
+    raise error, "bogus escape: %s" % repr(escape)
+
+def _escape(source, escape, state):
+    # handle escape code in expression
+    code = CATEGORIES.get(escape)
+    if code:
+        return code
+    code = ESCAPES.get(escape)
+    if code:
+        return code
+    try:
+        c = escape[1:2]
+        if c == "x":
+            # hexadecimal escape
+            while source.next in HEXDIGITS and len(escape) < 4:
+                escape = escape + source.get()
+            if len(escape) != 4:
+                raise ValueError
+            return LITERAL, int(escape[2:], 16) & 0xff
+        elif c == "0":
+            # octal escape
+            while source.next in OCTDIGITS and len(escape) < 4:
+                escape = escape + source.get()
+            return LITERAL, int(escape[1:], 8) & 0xff
+        elif c in DIGITS:
+            # octal escape *or* decimal group reference (sigh)
+            if source.next in DIGITS:
+                escape = escape + source.get()
+                if (escape[1] in OCTDIGITS and escape[2] in OCTDIGITS and
+                    source.next in OCTDIGITS):
+                    # got three octal digits; this is an octal escape
+                    escape = escape + source.get()
+                    return LITERAL, int(escape[1:], 8) & 0xff
+            # not an octal escape, so this is a group reference
+            group = int(escape[1:])
+            if group < state.groups:
+                if not state.checkgroup(group):
+                    raise error, "cannot refer to open group"
+                return GROUPREF, group
+            raise ValueError
+        if len(escape) == 2:
+            return LITERAL, ord(escape[1])
+    except ValueError:
+        pass
+    raise error, "bogus escape: %s" % repr(escape)
+
+def _parse_sub(source, state, nested=1):
+    # parse an alternation: a|b|c
+
+    items = []
+    itemsappend = items.append
+    sourcematch = source.match
+    while 1:
+        itemsappend(_parse(source, state))
+        if sourcematch("|"):
+            continue
+        if not nested:
+            break
+        if not source.next or sourcematch(")", 0):
+            break
+        else:
+            raise error, "pattern not properly closed"
+
+    if len(items) == 1:
+        return items[0]
+
+    subpattern = SubPattern(state)
+    subpatternappend = subpattern.append
+
+    # check if all items share a common prefix
+    while 1:
+        prefix = None
+        for item in items:
+            if not item:
+                break
+            if prefix is None:
+                prefix = item[0]
+            elif item[0] != prefix:
+                break
+        else:
+            # all subitems start with a common "prefix".
+            # move it out of the branch
+            for item in items:
+                del item[0]
+            subpatternappend(prefix)
+            continue # check next one
+        break
+
+    # check if the branch can be replaced by a character set
+    for item in items:
+        if len(item) != 1 or item[0][0] != LITERAL:
+            break
+    else:
+        # we can store this as a character set instead of a
+        # branch (the compiler may optimize this even more)
+        set = []
+        setappend = set.append
+        for item in items:
+            setappend(item[0])
+        subpatternappend((IN, set))
+        return subpattern
+
+    subpattern.append((BRANCH, (None, items)))
+    return subpattern
+
+def _parse_sub_cond(source, state, condgroup):
+    item_yes = _parse(source, state)
+    if source.match("|"):
+        item_no = _parse(source, state)
+        if source.match("|"):
+            raise error, "conditional backref with more than two branches"
+    else:
+        item_no = None
+    if source.next and not source.match(")", 0):
+        raise error, "pattern not properly closed"
+    subpattern = SubPattern(state)
+    subpattern.append((GROUPREF_EXISTS, (condgroup, item_yes, item_no)))
+    return subpattern
+
+def _parse(source, state):
+    # parse a simple pattern
+    subpattern = SubPattern(state)
+
+    # precompute constants into local variables
+    subpatternappend = subpattern.append
+    sourceget = source.get
+    sourcematch = source.match
+    _len = len
+    PATTERNENDERS = ("|", ")")
+    ASSERTCHARS = ("=", "!", "<")
+    LOOKBEHINDASSERTCHARS = ("=", "!")
+    REPEATCODES = (MIN_REPEAT, MAX_REPEAT)
+
+    while 1:
+
+        if source.next in PATTERNENDERS:
+            break # end of subpattern
+        this = sourceget()
+        if this is None:
+            break # end of pattern
+
+        if state.flags & SRE_FLAG_VERBOSE:
+            # skip whitespace and comments
+            if this in WHITESPACE:
+                continue
+            if this == "#":
+                while 1:
+                    this = sourceget()
+                    if this in (None, "\n"):
+                        break
+                continue
+
+        if this and this[0] not in SPECIAL_CHARS:
+            subpatternappend((LITERAL, ord(this)))
+
+        elif this == "[":
+            # character set
+            set = []
+            setappend = set.append
+##          if sourcematch(":"):
+##              pass # handle character classes
+            if sourcematch("^"):
+                setappend((NEGATE, None))
+            # check remaining characters
+            start = set[:]
+            while 1:
+                this = sourceget()
+                if this == "]" and set != start:
+                    break
+                elif this and this[0] == "\\":
+                    code1 = _class_escape(source, this)
+                elif this:
+                    code1 = LITERAL, ord(this)
+                else:
+                    raise error, "unexpected end of regular expression"
+                if sourcematch("-"):
+                    # potential range
+                    this = sourceget()
+                    if this == "]":
+                        if code1[0] is IN:
+                            code1 = code1[1][0]
+                        setappend(code1)
+                        setappend((LITERAL, ord("-")))
+                        break
+                    elif this:
+                        if this[0] == "\\":
+                            code2 = _class_escape(source, this)
+                        else:
+                            code2 = LITERAL, ord(this)
+                        if code1[0] != LITERAL or code2[0] != LITERAL:
+                            raise error, "bad character range"
+                        lo = code1[1]
+                        hi = code2[1]
+                        if hi < lo:
+                            raise error, "bad character range"
+                        setappend((RANGE, (lo, hi)))
+                    else:
+                        raise error, "unexpected end of regular expression"
+                else:
+                    if code1[0] is IN:
+                        code1 = code1[1][0]
+                    setappend(code1)
+
+            # XXX: <fl> should move set optimization to compiler!
+            if _len(set)==1 and set[0][0] is LITERAL:
+                subpatternappend(set[0]) # optimization
+            elif _len(set)==2 and set[0][0] is NEGATE and set[1][0] is LITERAL:
+                subpatternappend((NOT_LITERAL, set[1][1])) # optimization
+            else:
+                # XXX: <fl> should add charmap optimization here
+                subpatternappend((IN, set))
+
+        elif this and this[0] in REPEAT_CHARS:
+            # repeat previous item
+            if this == "?":
+                min, max = 0, 1
+            elif this == "*":
+                min, max = 0, MAXREPEAT
+
+            elif this == "+":
+                min, max = 1, MAXREPEAT
+            elif this == "{":
+                here = source.tell()
+                min, max = 0, MAXREPEAT
+                lo = hi = ""
+                while source.next in DIGITS:
+                    lo = lo + source.get()
+                if sourcematch(","):
+                    while source.next in DIGITS:
+                        hi = hi + sourceget()
+                else:
+                    hi = lo
+                if not sourcematch("}"):
+                    subpatternappend((LITERAL, ord(this)))
+                    source.seek(here)
+                    continue
+                if lo:
+                    min = int(lo)
+                if hi:
+                    max = int(hi)
+                if max < min:
+                    raise error, "bad repeat interval"
+            else:
+                raise error, "not supported"
+            # figure out which item to repeat
+            if subpattern:
+                item = subpattern[-1:]
+            else:
+                item = None
+            if not item or (_len(item) == 1 and item[0][0] == AT):
+                raise error, "nothing to repeat"
+            if item[0][0] in REPEATCODES:
+                raise error, "multiple repeat"
+            if sourcematch("?"):
+                subpattern[-1] = (MIN_REPEAT, (min, max, item))
+            else:
+                subpattern[-1] = (MAX_REPEAT, (min, max, item))
+
+        elif this == ".":
+            subpatternappend((ANY, None))
+
+        elif this == "(":
+            group = 1
+            name = None
+            condgroup = None
+            if sourcematch("?"):
+                group = 0
+                # options
+                if sourcematch("P"):
+                    # python extensions
+                    if sourcematch("<"):
+                        # named group: skip forward to end of name
+                        name = ""
+                        while 1:
+                            char = sourceget()
+                            if char is None:
+                                raise error, "unterminated name"
+                            if char == ">":
+                                break
+                            name = name + char
+                        group = 1
+                        if not isname(name):
+                            raise error, "bad character in group name"
+                    elif sourcematch("="):
+                        # named backreference
+                        name = ""
+                        while 1:
+                            char = sourceget()
+                            if char is None:
+                                raise error, "unterminated name"
+                            if char == ")":
+                                break
+                            name = name + char
+                        if not isname(name):
+                            raise error, "bad character in group name"
+                        gid = state.groupdict.get(name)
+                        if gid is None:
+                            raise error, "unknown group name"
+                        subpatternappend((GROUPREF, gid))
+                        continue
+                    else:
+                        char = sourceget()
+                        if char is None:
+                            raise error, "unexpected end of pattern"
+                        raise error, "unknown specifier: ?P%s" % char
+                elif sourcematch(":"):
+                    # non-capturing group
+                    group = 2
+                elif sourcematch("#"):
+                    # comment
+                    while 1:
+                        if source.next is None or source.next == ")":
+                            break
+                        sourceget()
+                    if not sourcematch(")"):
+                        raise error, "unbalanced parenthesis"
+                    continue
+                elif source.next in ASSERTCHARS:
+                    # lookahead assertions
+                    char = sourceget()
+                    dir = 1
+                    if char == "<":
+                        if source.next not in LOOKBEHINDASSERTCHARS:
+                            raise error, "syntax error"
+                        dir = -1 # lookbehind
+                        char = sourceget()
+                    p = _parse_sub(source, state)
+                    if not sourcematch(")"):
+                        raise error, "unbalanced parenthesis"
+                    if char == "=":
+                        subpatternappend((ASSERT, (dir, p)))
+                    else:
+                        subpatternappend((ASSERT_NOT, (dir, p)))
+                    continue
+                elif sourcematch("("):
+                    # conditional backreference group
+                    condname = ""
+                    while 1:
+                        char = sourceget()
+                        if char is None:
+                            raise error, "unterminated name"
+                        if char == ")":
+                            break
+                        condname = condname + char
+                    group = 2
+                    if isname(condname):
+                        condgroup = state.groupdict.get(condname)
+                        if condgroup is None:
+                            raise error, "unknown group name"
+                    else:
+                        try:
+                            condgroup = int(condname)
+                        except ValueError:
+                            raise error, "bad character in group name"
+                else:
+                    # flags
+                    if not source.next in FLAGS:
+                        raise error, "unexpected end of pattern"
+                    while source.next in FLAGS:
+                        state.flags = state.flags | FLAGS[sourceget()]
+            if group:
+                # parse group contents
+                if group == 2:
+                    # anonymous group
+                    group = None
+                else:
+                    group = state.opengroup(name)
+                if condgroup:
+                    p = _parse_sub_cond(source, state, condgroup)
+                else:
+                    p = _parse_sub(source, state)
+                if not sourcematch(")"):
+                    raise error, "unbalanced parenthesis"
+                if group is not None:
+                    state.closegroup(group)
+                subpatternappend((SUBPATTERN, (group, p)))
+            else:
+                while 1:
+                    char = sourceget()
+                    if char is None:
+                        raise error, "unexpected end of pattern"
+                    if char == ")":
+                        break
+                    raise error, "unknown extension"
+
+        elif this == "^":
+            subpatternappend((AT, AT_BEGINNING))
+
+        elif this == "$":
+            subpattern.append((AT, AT_END))
+
+        elif this and this[0] == "\\":
+            code = _escape(source, this, state)
+            subpatternappend(code)
+
+        else:
+            raise error, "parser error"
+
+    return subpattern
+
+def parse(str, flags=0, pattern=None):
+    # parse 're' pattern into list of (opcode, argument) tuples
+
+    source = Tokenizer(str)
+
+    if pattern is None:
+        pattern = Pattern()
+    pattern.flags = flags
+    pattern.str = str
+
+    p = _parse_sub(source, pattern, 0)
+
+    tail = source.get()
+    if tail == ")":
+        raise error, "unbalanced parenthesis"
+    elif tail:
+        raise error, "bogus characters at end of regular expression"
+
+    if flags & SRE_FLAG_DEBUG:
+        p.dump()
+
+    if not (flags & SRE_FLAG_VERBOSE) and p.pattern.flags & SRE_FLAG_VERBOSE:
+        # the VERBOSE flag was switched on inside the pattern.  to be
+        # on the safe side, we'll parse the whole thing again...
+        return parse(str, p.pattern.flags)
+
+    return p
+
+def parse_template(source, pattern):
+    # parse 're' replacement string into list of literals and
+    # group references
+    s = Tokenizer(source)
+    sget = s.get
+    p = []
+    a = p.append
+    def literal(literal, p=p, pappend=a):
+        if p and p[-1][0] is LITERAL:
+            p[-1] = LITERAL, p[-1][1] + literal
+        else:
+            pappend((LITERAL, literal))
+    sep = source[:0]
+    if type(sep) is type(""):
+        makechar = chr
+    else:
+        makechar = unichr
+    while 1:
+        this = sget()
+        if this is None:
+            break # end of replacement string
+        if this and this[0] == "\\":
+            # group
+            c = this[1:2]
+            if c == "g":
+                name = ""
+                if s.match("<"):
+                    while 1:
+                        char = sget()
+                        if char is None:
+                            raise error, "unterminated group name"
+                        if char == ">":
+                            break
+                        name = name + char
+                if not name:
+                    raise error, "bad group name"
+                try:
+                    index = int(name)
+                    if index < 0:
+                        raise error, "negative group number"
+                except ValueError:
+                    if not isname(name):
+                        raise error, "bad character in group name"
+                    try:
+                        index = pattern.groupindex[name]
+                    except KeyError:
+                        raise IndexError, "unknown group name"
+                a((MARK, index))
+            elif c == "0":
+                if s.next in OCTDIGITS:
+                    this = this + sget()
+                    if s.next in OCTDIGITS:
+                        this = this + sget()
+                literal(makechar(int(this[1:], 8) & 0xff))
+            elif c in DIGITS:
+                isoctal = False
+                if s.next in DIGITS:
+                    this = this + sget()
+                    if (c in OCTDIGITS and this[2] in OCTDIGITS and
+                        s.next in OCTDIGITS):
+                        this = this + sget()
+                        isoctal = True
+                        literal(makechar(int(this[1:], 8) & 0xff))
+                if not isoctal:
+                    a((MARK, int(this[1:])))
+            else:
+                try:
+                    this = makechar(ESCAPES[this][1])
+                except KeyError:
+                    pass
+                literal(this)
+        else:
+            literal(this)
+    # convert template to groups and literals lists
+    i = 0
+    groups = []
+    groupsappend = groups.append
+    literals = [None] * len(p)
+    for c, s in p:
+        if c is MARK:
+            groupsappend((i, s))
+            # literal[i] is already None
+        else:
+            literals[i] = s
+        i = i + 1
+    return groups, literals
+
+def expand_template(template, match):
+    g = match.group
+    sep = match.string[:0]
+    groups, literals = template
+    literals = literals[:]
+    try:
+        for index, group in groups:
+            literals[index] = s = g(group)
+            if s is None:
+                raise error, "unmatched group"
+    except IndexError:
+        raise error, "invalid group reference"
+    return sep.join(literals)
diff --git a/depot_tools/release/win/python_24/Lib/stat.py b/depot_tools/release/win/python_24/Lib/stat.py
new file mode 100644
index 0000000..70750d8b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/stat.py
@@ -0,0 +1,86 @@
+"""Constants/functions for interpreting results of os.stat() and os.lstat().
+
+Suggested usage: from stat import *
+"""
+
+# XXX Strictly spoken, this module may have to be adapted for each POSIX
+# implementation; in practice, however, the numeric constants used by
+# stat() are almost universal (even for stat() emulations on non-UNIX
+# systems like MS-DOS).
+
+# Indices for stat struct members in tuple returned by os.stat()
+
+ST_MODE  = 0
+ST_INO   = 1
+ST_DEV   = 2
+ST_NLINK = 3
+ST_UID   = 4
+ST_GID   = 5
+ST_SIZE  = 6
+ST_ATIME = 7
+ST_MTIME = 8
+ST_CTIME = 9
+
+# Extract bits from the mode
+
+def S_IMODE(mode):
+    return mode & 07777
+
+def S_IFMT(mode):
+    return mode & 0170000
+
+# Constants used as S_IFMT() for various file types
+# (not all are implemented on all systems)
+
+S_IFDIR  = 0040000
+S_IFCHR  = 0020000
+S_IFBLK  = 0060000
+S_IFREG  = 0100000
+S_IFIFO  = 0010000
+S_IFLNK  = 0120000
+S_IFSOCK = 0140000
+
+# Functions to test for each file type
+
+def S_ISDIR(mode):
+    return S_IFMT(mode) == S_IFDIR
+
+def S_ISCHR(mode):
+    return S_IFMT(mode) == S_IFCHR
+
+def S_ISBLK(mode):
+    return S_IFMT(mode) == S_IFBLK
+
+def S_ISREG(mode):
+    return S_IFMT(mode) == S_IFREG
+
+def S_ISFIFO(mode):
+    return S_IFMT(mode) == S_IFIFO
+
+def S_ISLNK(mode):
+    return S_IFMT(mode) == S_IFLNK
+
+def S_ISSOCK(mode):
+    return S_IFMT(mode) == S_IFSOCK
+
+# Names for permission bits
+
+S_ISUID = 04000
+S_ISGID = 02000
+S_ENFMT = S_ISGID
+S_ISVTX = 01000
+S_IREAD = 00400
+S_IWRITE = 00200
+S_IEXEC = 00100
+S_IRWXU = 00700
+S_IRUSR = 00400
+S_IWUSR = 00200
+S_IXUSR = 00100
+S_IRWXG = 00070
+S_IRGRP = 00040
+S_IWGRP = 00020
+S_IXGRP = 00010
+S_IRWXO = 00007
+S_IROTH = 00004
+S_IWOTH = 00002
+S_IXOTH = 00001
diff --git a/depot_tools/release/win/python_24/Lib/statcache.py b/depot_tools/release/win/python_24/Lib/statcache.py
new file mode 100644
index 0000000..d478393
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/statcache.py
@@ -0,0 +1,82 @@
+"""Maintain a cache of stat() information on files.
+
+There are functions to reset the cache or to selectively remove items.
+"""
+
+import warnings
+warnings.warn("The statcache module is obsolete.  Use os.stat() instead.",
+              DeprecationWarning)
+del warnings
+
+import os as _os
+from stat import *
+
+__all__ = ["stat","reset","forget","forget_prefix","forget_dir",
+           "forget_except_prefix","isdir"]
+
+# The cache.  Keys are pathnames, values are os.stat outcomes.
+# Remember that multiple threads may be calling this!  So, e.g., that
+# path in cache returns 1 doesn't mean the cache will still contain
+# path on the next line.  Code defensively.
+
+cache = {}
+
+def stat(path):
+    """Stat a file, possibly out of the cache."""
+    ret = cache.get(path, None)
+    if ret is None:
+        cache[path] = ret = _os.stat(path)
+    return ret
+
+def reset():
+    """Clear the cache."""
+    cache.clear()
+
+# For thread saftey, always use forget() internally too.
+def forget(path):
+    """Remove a given item from the cache, if it exists."""
+    try:
+        del cache[path]
+    except KeyError:
+        pass
+
+def forget_prefix(prefix):
+    """Remove all pathnames with a given prefix."""
+    for path in cache.keys():
+        if path.startswith(prefix):
+            forget(path)
+
+def forget_dir(prefix):
+    """Forget a directory and all entries except for entries in subdirs."""
+
+    # Remove trailing separator, if any.  This is tricky to do in a
+    # x-platform way.  For example, Windows accepts both / and \ as
+    # separators, and if there's nothing *but* a separator we want to
+    # preserve that this is the root.  Only os.path has the platform
+    # knowledge we need.
+    from os.path import split, join
+    prefix = split(join(prefix, "xxx"))[0]
+    forget(prefix)
+    for path in cache.keys():
+        # First check that the path at least starts with the prefix, so
+        # that when it doesn't we can avoid paying for split().
+        if path.startswith(prefix) and split(path)[0] == prefix:
+            forget(path)
+
+def forget_except_prefix(prefix):
+    """Remove all pathnames except with a given prefix.
+
+    Normally used with prefix = '/' after a chdir().
+    """
+
+    for path in cache.keys():
+        if not path.startswith(prefix):
+            forget(path)
+
+def isdir(path):
+    """Return True if directory, else False."""
+    try:
+        st = stat(path)
+    except _os.error:
+        return False
+    return S_ISDIR(st.st_mode)
diff --git a/depot_tools/release/win/python_24/Lib/statvfs.py b/depot_tools/release/win/python_24/Lib/statvfs.py
new file mode 100644
index 0000000..06a323f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/statvfs.py
@@ -0,0 +1,15 @@
+"""Constants for interpreting the results of os.statvfs() and os.fstatvfs()."""
+
+# Indices for statvfs struct members in the tuple returned by
+# os.statvfs() and os.fstatvfs().
+
+F_BSIZE   = 0           # Preferred file system block size
+F_FRSIZE  = 1           # Fundamental file system block size
+F_BLOCKS  = 2           # Total number of file system blocks (FRSIZE)
+F_BFREE   = 3           # Total number of free blocks
+F_BAVAIL  = 4           # Free blocks available to non-superuser
+F_FILES   = 5           # Total number of file nodes
+F_FFREE   = 6           # Total number of free file nodes
+F_FAVAIL  = 7           # Free nodes available to non-superuser
+F_FLAG    = 8           # Flags (see your local statvfs man page)
+F_NAMEMAX = 9           # Maximum file name length
diff --git a/depot_tools/release/win/python_24/Lib/string.py b/depot_tools/release/win/python_24/Lib/string.py
new file mode 100644
index 0000000..7c0e001
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/string.py
@@ -0,0 +1,531 @@
+"""A collection of string operations (most are no longer used).
+
+Warning: most of the code you see here isn't normally used nowadays.
+Beginning with Python 1.6, many of these functions are implemented as
+methods on the standard string object. They used to be implemented by
+a built-in module called strop, but strop is now obsolete itself.
+
+Public module variables:
+
+whitespace -- a string containing all characters considered whitespace
+lowercase -- a string containing all characters considered lowercase letters
+uppercase -- a string containing all characters considered uppercase letters
+letters -- a string containing all characters considered letters
+digits -- a string containing all characters considered decimal digits
+hexdigits -- a string containing all characters considered hexadecimal digits
+octdigits -- a string containing all characters considered octal digits
+punctuation -- a string containing all characters considered punctuation
+printable -- a string containing all characters considered printable
+
+"""
+
+# Some strings for ctype-style character classification
+whitespace = ' \t\n\r\v\f'
+lowercase = 'abcdefghijklmnopqrstuvwxyz'
+uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+letters = lowercase + uppercase
+ascii_lowercase = lowercase
+ascii_uppercase = uppercase
+ascii_letters = ascii_lowercase + ascii_uppercase
+digits = '0123456789'
+hexdigits = digits + 'abcdef' + 'ABCDEF'
+octdigits = '01234567'
+punctuation = """!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~"""
+printable = digits + letters + punctuation + whitespace
+
+# Case conversion helpers
+# Use str to convert Unicode literal in case of -U
+# Note that Cookie.py bogusly uses _idmap :(
+l = map(chr, xrange(256))
+_idmap = str('').join(l)
+del l
+
+# Functions which aren't available as string methods.
+
+# Capitalize the words in a string, e.g. " aBc  dEf " -> "Abc Def".
+# See also regsub.capwords().
+def capwords(s, sep=None):
+    """capwords(s, [sep]) -> string
+
+    Split the argument into words using split, capitalize each
+    word using capitalize, and join the capitalized words using
+    join. Note that this replaces runs of whitespace characters by
+    a single space.
+
+    """
+    return (sep or ' ').join([x.capitalize() for x in s.split(sep)])
+
+
+# Construct a translation string
+_idmapL = None
+def maketrans(fromstr, tostr):
+    """maketrans(frm, to) -> string
+
+    Return a translation table (a string of 256 bytes long)
+    suitable for use in string.translate.  The strings frm and to
+    must be of the same length.
+
+    """
+    if len(fromstr) != len(tostr):
+        raise ValueError, "maketrans arguments must have same length"
+    global _idmapL
+    if not _idmapL:
+        _idmapL = map(None, _idmap)
+    L = _idmapL[:]
+    fromstr = map(ord, fromstr)
+    for i in range(len(fromstr)):
+        L[fromstr[i]] = tostr[i]
+    return ''.join(L)
+
+
+
+####################################################################
+import re as _re
+
+class _multimap:
+    """Helper class for combining multiple mappings.
+
+    Used by .{safe_,}substitute() to combine the mapping and keyword
+    arguments.
+    """
+    def __init__(self, primary, secondary):
+        self._primary = primary
+        self._secondary = secondary
+
+    def __getitem__(self, key):
+        try:
+            return self._primary[key]
+        except KeyError:
+            return self._secondary[key]
+
+
+class _TemplateMetaclass(type):
+    pattern = r"""
+    %(delim)s(?:
+      (?P<escaped>%(delim)s) |   # Escape sequence of two delimiters
+      (?P<named>%(id)s)      |   # delimiter and a Python identifier
+      {(?P<braced>%(id)s)}   |   # delimiter and a braced identifier
+      (?P<invalid>)              # Other ill-formed delimiter exprs
+    )
+    """
+
+    def __init__(cls, name, bases, dct):
+        super(_TemplateMetaclass, cls).__init__(name, bases, dct)
+        if 'pattern' in dct:
+            pattern = cls.pattern
+        else:
+            pattern = _TemplateMetaclass.pattern % {
+                'delim' : _re.escape(cls.delimiter),
+                'id'    : cls.idpattern,
+                }
+        cls.pattern = _re.compile(pattern, _re.IGNORECASE | _re.VERBOSE)
+
+
+class Template:
+    """A string class for supporting $-substitutions."""
+    __metaclass__ = _TemplateMetaclass
+
+    delimiter = '$'
+    idpattern = r'[_a-z][_a-z0-9]*'
+
+    def __init__(self, template):
+        self.template = template
+
+    # Search for $$, $identifier, ${identifier}, and any bare $'s
+
+    def _invalid(self, mo):
+        i = mo.start('invalid')
+        lines = self.template[:i].splitlines(True)
+        if not lines:
+            colno = 1
+            lineno = 1
+        else:
+            colno = i - len(''.join(lines[:-1]))
+            lineno = len(lines)
+        raise ValueError('Invalid placeholder in string: line %d, col %d' %
+                         (lineno, colno))
+
+    def substitute(self, *args, **kws):
+        if len(args) > 1:
+            raise TypeError('Too many positional arguments')
+        if not args:
+            mapping = kws
+        elif kws:
+            mapping = _multimap(kws, args[0])
+        else:
+            mapping = args[0]
+        # Helper function for .sub()
+        def convert(mo):
+            # Check the most common path first.
+            named = mo.group('named') or mo.group('braced')
+            if named is not None:
+                val = mapping[named]
+                # We use this idiom instead of str() because the latter will
+                # fail if val is a Unicode containing non-ASCII characters.
+                return '%s' % val
+            if mo.group('escaped') is not None:
+                return self.delimiter
+            if mo.group('invalid') is not None:
+                self._invalid(mo)
+            raise ValueError('Unrecognized named group in pattern',
+                             self.pattern)
+        return self.pattern.sub(convert, self.template)
+
+    def safe_substitute(self, *args, **kws):
+        if len(args) > 1:
+            raise TypeError('Too many positional arguments')
+        if not args:
+            mapping = kws
+        elif kws:
+            mapping = _multimap(kws, args[0])
+        else:
+            mapping = args[0]
+        # Helper function for .sub()
+        def convert(mo):
+            named = mo.group('named')
+            if named is not None:
+                try:
+                    # We use this idiom instead of str() because the latter
+                    # will fail if val is a Unicode containing non-ASCII
+                    return '%s' % mapping[named]
+                except KeyError:
+                    return self.delimiter + named
+            braced = mo.group('braced')
+            if braced is not None:
+                try:
+                    return '%s' % mapping[braced]
+                except KeyError:
+                    return self.delimiter + '{' + braced + '}'
+            if mo.group('escaped') is not None:
+                return self.delimiter
+            if mo.group('invalid') is not None:
+                return self.delimiter
+            raise ValueError('Unrecognized named group in pattern',
+                             self.pattern)
+        return self.pattern.sub(convert, self.template)
+
+
+
+####################################################################
+# NOTE: Everything below here is deprecated.  Use string methods instead.
+# This stuff will go away in Python 3.0.
+
+# Backward compatible names for exceptions
+index_error = ValueError
+atoi_error = ValueError
+atof_error = ValueError
+atol_error = ValueError
+
+# convert UPPER CASE letters to lower case
+def lower(s):
+    """lower(s) -> string
+
+    Return a copy of the string s converted to lowercase.
+
+    """
+    return s.lower()
+
+# Convert lower case letters to UPPER CASE
+def upper(s):
+    """upper(s) -> string
+
+    Return a copy of the string s converted to uppercase.
+
+    """
+    return s.upper()
+
+# Swap lower case letters and UPPER CASE
+def swapcase(s):
+    """swapcase(s) -> string
+
+    Return a copy of the string s with upper case characters
+    converted to lowercase and vice versa.
+
+    """
+    return s.swapcase()
+
+# Strip leading and trailing tabs and spaces
+def strip(s, chars=None):
+    """strip(s [,chars]) -> string
+
+    Return a copy of the string s with leading and trailing
+    whitespace removed.
+    If chars is given and not None, remove characters in chars instead.
+    If chars is unicode, S will be converted to unicode before stripping.
+
+    """
+    return s.strip(chars)
+
+# Strip leading tabs and spaces
+def lstrip(s, chars=None):
+    """lstrip(s [,chars]) -> string
+
+    Return a copy of the string s with leading whitespace removed.
+    If chars is given and not None, remove characters in chars instead.
+
+    """
+    return s.lstrip(chars)
+
+# Strip trailing tabs and spaces
+def rstrip(s, chars=None):
+    """rstrip(s [,chars]) -> string
+
+    Return a copy of the string s with trailing whitespace removed.
+    If chars is given and not None, remove characters in chars instead.
+
+    """
+    return s.rstrip(chars)
+
+
+# Split a string into a list of space/tab-separated words
+def split(s, sep=None, maxsplit=-1):
+    """split(s [,sep [,maxsplit]]) -> list of strings
+
+    Return a list of the words in the string s, using sep as the
+    delimiter string.  If maxsplit is given, splits at no more than
+    maxsplit places (resulting in at most maxsplit+1 words).  If sep
+    is not specified or is None, any whitespace string is a separator.
+
+    (split and splitfields are synonymous)
+
+    """
+    return s.split(sep, maxsplit)
+splitfields = split
+
+# Split a string into a list of space/tab-separated words
+def rsplit(s, sep=None, maxsplit=-1):
+    """rsplit(s [,sep [,maxsplit]]) -> list of strings
+
+    Return a list of the words in the string s, using sep as the
+    delimiter string, starting at the end of the string and working
+    to the front.  If maxsplit is given, at most maxsplit splits are
+    done. If sep is not specified or is None, any whitespace string
+    is a separator.
+    """
+    return s.rsplit(sep, maxsplit)
+
+# Join fields with optional separator
+def join(words, sep = ' '):
+    """join(list [,sep]) -> string
+
+    Return a string composed of the words in list, with
+    intervening occurrences of sep.  The default separator is a
+    single space.
+
+    (joinfields and join are synonymous)
+
+    """
+    return sep.join(words)
+joinfields = join
+
+# Find substring, raise exception if not found
+def index(s, *args):
+    """index(s, sub [,start [,end]]) -> int
+
+    Like find but raises ValueError when the substring is not found.
+
+    """
+    return s.index(*args)
+
+# Find last substring, raise exception if not found
+def rindex(s, *args):
+    """rindex(s, sub [,start [,end]]) -> int
+
+    Like rfind but raises ValueError when the substring is not found.
+
+    """
+    return s.rindex(*args)
+
+# Count non-overlapping occurrences of substring
+def count(s, *args):
+    """count(s, sub[, start[,end]]) -> int
+
+    Return the number of occurrences of substring sub in string
+    s[start:end].  Optional arguments start and end are
+    interpreted as in slice notation.
+
+    """
+    return s.count(*args)
+
+# Find substring, return -1 if not found
+def find(s, *args):
+    """find(s, sub [,start [,end]]) -> in
+
+    Return the lowest index in s where substring sub is found,
+    such that sub is contained within s[start,end].  Optional
+    arguments start and end are interpreted as in slice notation.
+
+    Return -1 on failure.
+
+    """
+    return s.find(*args)
+
+# Find last substring, return -1 if not found
+def rfind(s, *args):
+    """rfind(s, sub [,start [,end]]) -> int
+
+    Return the highest index in s where substring sub is found,
+    such that sub is contained within s[start,end].  Optional
+    arguments start and end are interpreted as in slice notation.
+
+    Return -1 on failure.
+
+    """
+    return s.rfind(*args)
+
+# for a bit of speed
+_float = float
+_int = int
+_long = long
+
+# Convert string to float
+def atof(s):
+    """atof(s) -> float
+
+    Return the floating point number represented by the string s.
+
+    """
+    return _float(s)
+
+
+# Convert string to integer
+def atoi(s , base=10):
+    """atoi(s [,base]) -> int
+
+    Return the integer represented by the string s in the given
+    base, which defaults to 10.  The string s must consist of one
+    or more digits, possibly preceded by a sign.  If base is 0, it
+    is chosen from the leading characters of s, 0 for octal, 0x or
+    0X for hexadecimal.  If base is 16, a preceding 0x or 0X is
+    accepted.
+
+    """
+    return _int(s, base)
+
+
+# Convert string to long integer
+def atol(s, base=10):
+    """atol(s [,base]) -> long
+
+    Return the long integer represented by the string s in the
+    given base, which defaults to 10.  The string s must consist
+    of one or more digits, possibly preceded by a sign.  If base
+    is 0, it is chosen from the leading characters of s, 0 for
+    octal, 0x or 0X for hexadecimal.  If base is 16, a preceding
+    0x or 0X is accepted.  A trailing L or l is not accepted,
+    unless base is 0.
+
+    """
+    return _long(s, base)
+
+
+# Left-justify a string
+def ljust(s, width, *args):
+    """ljust(s, width[, fillchar]) -> string
+
+    Return a left-justified version of s, in a field of the
+    specified width, padded with spaces as needed.  The string is
+    never truncated.  If specified the fillchar is used instead of spaces.
+
+    """
+    return s.ljust(width, *args)
+
+# Right-justify a string
+def rjust(s, width, *args):
+    """rjust(s, width[, fillchar]) -> string
+
+    Return a right-justified version of s, in a field of the
+    specified width, padded with spaces as needed.  The string is
+    never truncated.  If specified the fillchar is used instead of spaces.
+
+    """
+    return s.rjust(width, *args)
+
+# Center a string
+def center(s, width, *args):
+    """center(s, width[, fillchar]) -> string
+
+    Return a center version of s, in a field of the specified
+    width. padded with spaces as needed.  The string is never
+    truncated.  If specified the fillchar is used instead of spaces.
+
+    """
+    return s.center(width, *args)
+
+# Zero-fill a number, e.g., (12, 3) --> '012' and (-3, 3) --> '-03'
+# Decadent feature: the argument may be a string or a number
+# (Use of this is deprecated; it should be a string as with ljust c.s.)
+def zfill(x, width):
+    """zfill(x, width) -> string
+
+    Pad a numeric string x with zeros on the left, to fill a field
+    of the specified width.  The string x is never truncated.
+
+    """
+    if not isinstance(x, basestring):
+        x = repr(x)
+    return x.zfill(width)
+
+# Expand tabs in a string.
+# Doesn't take non-printing chars into account, but does understand \n.
+def expandtabs(s, tabsize=8):
+    """expandtabs(s [,tabsize]) -> string
+
+    Return a copy of the string s with all tab characters replaced
+    by the appropriate number of spaces, depending on the current
+    column, and the tabsize (default 8).
+
+    """
+    return s.expandtabs(tabsize)
+
+# Character translation through look-up table.
+def translate(s, table, deletions=""):
+    """translate(s,table [,deletions]) -> string
+
+    Return a copy of the string s, where all characters occurring
+    in the optional argument deletions are removed, and the
+    remaining characters have been mapped through the given
+    translation table, which must be a string of length 256.  The
+    deletions argument is not allowed for Unicode strings.
+
+    """
+    if deletions:
+        return s.translate(table, deletions)
+    else:
+        # Add s[:0] so that if s is Unicode and table is an 8-bit string,
+        # table is converted to Unicode.  This means that table *cannot*
+        # be a dictionary -- for that feature, use u.translate() directly.
+        return s.translate(table + s[:0])
+
+# Capitalize a string, e.g. "aBc  dEf" -> "Abc  def".
+def capitalize(s):
+    """capitalize(s) -> string
+
+    Return a copy of the string s with only its first character
+    capitalized.
+
+    """
+    return s.capitalize()
+
+# Substring replacement (global)
+def replace(s, old, new, maxsplit=-1):
+    """replace (str, old, new[, maxsplit]) -> string
+
+    Return a copy of string str with all occurrences of substring
+    old replaced by new. If the optional argument maxsplit is
+    given, only the first maxsplit occurrences are replaced.
+
+    """
+    return s.replace(old, new, maxsplit)
+
+
+# Try importing optional built-in module "strop" -- if it exists,
+# it redefines some string operations that are 100-1000 times faster.
+# It also defines values for whitespace, lowercase and uppercase
+# that match <ctype.h>'s definitions.
+
+try:
+    from strop import maketrans, lowercase, uppercase, whitespace
+    letters = lowercase + uppercase
+except ImportError:
+    pass                                          # Use the original versions
diff --git a/depot_tools/release/win/python_24/Lib/stringold.py b/depot_tools/release/win/python_24/Lib/stringold.py
new file mode 100644
index 0000000..dd2d584
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/stringold.py
@@ -0,0 +1,430 @@
+# module 'string' -- A collection of string operations
+
+# Warning: most of the code you see here isn't normally used nowadays.  With
+# Python 1.6, many of these functions are implemented as methods on the
+# standard string object. They used to be implemented by a built-in module
+# called strop, but strop is now obsolete itself.
+
+"""Common string manipulations.
+
+Public module variables:
+
+whitespace -- a string containing all characters considered whitespace
+lowercase -- a string containing all characters considered lowercase letters
+uppercase -- a string containing all characters considered uppercase letters
+letters -- a string containing all characters considered letters
+digits -- a string containing all characters considered decimal digits
+hexdigits -- a string containing all characters considered hexadecimal digits
+octdigits -- a string containing all characters considered octal digits
+
+"""
+
+# Some strings for ctype-style character classification
+whitespace = ' \t\n\r\v\f'
+lowercase = 'abcdefghijklmnopqrstuvwxyz'
+uppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+letters = lowercase + uppercase
+digits = '0123456789'
+hexdigits = digits + 'abcdef' + 'ABCDEF'
+octdigits = '01234567'
+
+# Case conversion helpers
+_idmap = ''
+for i in range(256): _idmap = _idmap + chr(i)
+del i
+
+# Backward compatible names for exceptions
+index_error = ValueError
+atoi_error = ValueError
+atof_error = ValueError
+atol_error = ValueError
+
+# convert UPPER CASE letters to lower case
+def lower(s):
+    """lower(s) -> string
+
+    Return a copy of the string s converted to lowercase.
+
+    """
+    return s.lower()
+
+# Convert lower case letters to UPPER CASE
+def upper(s):
+    """upper(s) -> string
+
+    Return a copy of the string s converted to uppercase.
+
+    """
+    return s.upper()
+
+# Swap lower case letters and UPPER CASE
+def swapcase(s):
+    """swapcase(s) -> string
+
+    Return a copy of the string s with upper case characters
+    converted to lowercase and vice versa.
+
+    """
+    return s.swapcase()
+
+# Strip leading and trailing tabs and spaces
+def strip(s):
+    """strip(s) -> string
+
+    Return a copy of the string s with leading and trailing
+    whitespace removed.
+
+    """
+    return s.strip()
+
+# Strip leading tabs and spaces
+def lstrip(s):
+    """lstrip(s) -> string
+
+    Return a copy of the string s with leading whitespace removed.
+
+    """
+    return s.lstrip()
+
+# Strip trailing tabs and spaces
+def rstrip(s):
+    """rstrip(s) -> string
+
+    Return a copy of the string s with trailing whitespace
+    removed.
+
+    """
+    return s.rstrip()
+
+
+# Split a string into a list of space/tab-separated words
+def split(s, sep=None, maxsplit=0):
+    """split(str [,sep [,maxsplit]]) -> list of strings
+
+    Return a list of the words in the string s, using sep as the
+    delimiter string.  If maxsplit is nonzero, splits into at most
+    maxsplit words If sep is not specified, any whitespace string
+    is a separator.  Maxsplit defaults to 0.
+
+    (split and splitfields are synonymous)
+
+    """
+    return s.split(sep, maxsplit)
+splitfields = split
+
+# Join fields with optional separator
+def join(words, sep = ' '):
+    """join(list [,sep]) -> string
+
+    Return a string composed of the words in list, with
+    intervening occurrences of sep.  The default separator is a
+    single space.
+
+    (joinfields and join are synonymous)
+
+    """
+    return sep.join(words)
+joinfields = join
+
+# for a little bit of speed
+_apply = apply
+
+# Find substring, raise exception if not found
+def index(s, *args):
+    """index(s, sub [,start [,end]]) -> int
+
+    Like find but raises ValueError when the substring is not found.
+
+    """
+    return _apply(s.index, args)
+
+# Find last substring, raise exception if not found
+def rindex(s, *args):
+    """rindex(s, sub [,start [,end]]) -> int
+
+    Like rfind but raises ValueError when the substring is not found.
+
+    """
+    return _apply(s.rindex, args)
+
+# Count non-overlapping occurrences of substring
+def count(s, *args):
+    """count(s, sub[, start[,end]]) -> int
+
+    Return the number of occurrences of substring sub in string
+    s[start:end].  Optional arguments start and end are
+    interpreted as in slice notation.
+
+    """
+    return _apply(s.count, args)
+
+# Find substring, return -1 if not found
+def find(s, *args):
+    """find(s, sub [,start [,end]]) -> in
+
+    Return the lowest index in s where substring sub is found,
+    such that sub is contained within s[start,end].  Optional
+    arguments start and end are interpreted as in slice notation.
+
+    Return -1 on failure.
+
+    """
+    return _apply(s.find, args)
+
+# Find last substring, return -1 if not found
+def rfind(s, *args):
+    """rfind(s, sub [,start [,end]]) -> int
+
+    Return the highest index in s where substring sub is found,
+    such that sub is contained within s[start,end].  Optional
+    arguments start and end are interpreted as in slice notation.
+
+    Return -1 on failure.
+
+    """
+    return _apply(s.rfind, args)
+
+# for a bit of speed
+_float = float
+_int = int
+_long = long
+_StringType = type('')
+
+# Convert string to float
+def atof(s):
+    """atof(s) -> float
+
+    Return the floating point number represented by the string s.
+
+    """
+    if type(s) == _StringType:
+        return _float(s)
+    else:
+        raise TypeError('argument 1: expected string, %s found' %
+                        type(s).__name__)
+
+# Convert string to integer
+def atoi(*args):
+    """atoi(s [,base]) -> int
+
+    Return the integer represented by the string s in the given
+    base, which defaults to 10.  The string s must consist of one
+    or more digits, possibly preceded by a sign.  If base is 0, it
+    is chosen from the leading characters of s, 0 for octal, 0x or
+    0X for hexadecimal.  If base is 16, a preceding 0x or 0X is
+    accepted.
+
+    """
+    try:
+        s = args[0]
+    except IndexError:
+        raise TypeError('function requires at least 1 argument: %d given' %
+                        len(args))
+    # Don't catch type error resulting from too many arguments to int().  The
+    # error message isn't compatible but the error type is, and this function
+    # is complicated enough already.
+    if type(s) == _StringType:
+        return _apply(_int, args)
+    else:
+        raise TypeError('argument 1: expected string, %s found' %
+                        type(s).__name__)
+
+
+# Convert string to long integer
+def atol(*args):
+    """atol(s [,base]) -> long
+
+    Return the long integer represented by the string s in the
+    given base, which defaults to 10.  The string s must consist
+    of one or more digits, possibly preceded by a sign.  If base
+    is 0, it is chosen from the leading characters of s, 0 for
+    octal, 0x or 0X for hexadecimal.  If base is 16, a preceding
+    0x or 0X is accepted.  A trailing L or l is not accepted,
+    unless base is 0.
+
+    """
+    try:
+        s = args[0]
+    except IndexError:
+        raise TypeError('function requires at least 1 argument: %d given' %
+                        len(args))
+    # Don't catch type error resulting from too many arguments to long().  The
+    # error message isn't compatible but the error type is, and this function
+    # is complicated enough already.
+    if type(s) == _StringType:
+        return _apply(_long, args)
+    else:
+        raise TypeError('argument 1: expected string, %s found' %
+                        type(s).__name__)
+
+
+# Left-justify a string
+def ljust(s, width):
+    """ljust(s, width) -> string
+
+    Return a left-justified version of s, in a field of the
+    specified width, padded with spaces as needed.  The string is
+    never truncated.
+
+    """
+    n = width - len(s)
+    if n <= 0: return s
+    return s + ' '*n
+
+# Right-justify a string
+def rjust(s, width):
+    """rjust(s, width) -> string
+
+    Return a right-justified version of s, in a field of the
+    specified width, padded with spaces as needed.  The string is
+    never truncated.
+
+    """
+    n = width - len(s)
+    if n <= 0: return s
+    return ' '*n + s
+
+# Center a string
+def center(s, width):
+    """center(s, width) -> string
+
+    Return a center version of s, in a field of the specified
+    width. padded with spaces as needed.  The string is never
+    truncated.
+
+    """
+    n = width - len(s)
+    if n <= 0: return s
+    half = n/2
+    if n%2 and width%2:
+        # This ensures that center(center(s, i), j) = center(s, j)
+        half = half+1
+    return ' '*half +  s + ' '*(n-half)
+
+# Zero-fill a number, e.g., (12, 3) --> '012' and (-3, 3) --> '-03'
+# Decadent feature: the argument may be a string or a number
+# (Use of this is deprecated; it should be a string as with ljust c.s.)
+def zfill(x, width):
+    """zfill(x, width) -> string
+
+    Pad a numeric string x with zeros on the left, to fill a field
+    of the specified width.  The string x is never truncated.
+
+    """
+    if type(x) == type(''): s = x
+    else: s = repr(x)
+    n = len(s)
+    if n >= width: return s
+    sign = ''
+    if s[0] in ('-', '+'):
+        sign, s = s[0], s[1:]
+    return sign + '0'*(width-n) + s
+
+# Expand tabs in a string.
+# Doesn't take non-printing chars into account, but does understand \n.
+def expandtabs(s, tabsize=8):
+    """expandtabs(s [,tabsize]) -> string
+
+    Return a copy of the string s with all tab characters replaced
+    by the appropriate number of spaces, depending on the current
+    column, and the tabsize (default 8).
+
+    """
+    res = line = ''
+    for c in s:
+        if c == '\t':
+            c = ' '*(tabsize - len(line) % tabsize)
+        line = line + c
+        if c == '\n':
+            res = res + line
+            line = ''
+    return res + line
+
+# Character translation through look-up table.
+def translate(s, table, deletions=""):
+    """translate(s,table [,deletechars]) -> string
+
+    Return a copy of the string s, where all characters occurring
+    in the optional argument deletechars are removed, and the
+    remaining characters have been mapped through the given
+    translation table, which must be a string of length 256.
+
+    """
+    return s.translate(table, deletions)
+
+# Capitalize a string, e.g. "aBc  dEf" -> "Abc  def".
+def capitalize(s):
+    """capitalize(s) -> string
+
+    Return a copy of the string s with only its first character
+    capitalized.
+
+    """
+    return s.capitalize()
+
+# Capitalize the words in a string, e.g. " aBc  dEf " -> "Abc Def".
+# See also regsub.capwords().
+def capwords(s, sep=None):
+    """capwords(s, [sep]) -> string
+
+    Split the argument into words using split, capitalize each
+    word using capitalize, and join the capitalized words using
+    join. Note that this replaces runs of whitespace characters by
+    a single space.
+
+    """
+    return join(map(capitalize, s.split(sep)), sep or ' ')
+
+# Construct a translation string
+_idmapL = None
+def maketrans(fromstr, tostr):
+    """maketrans(frm, to) -> string
+
+    Return a translation table (a string of 256 bytes long)
+    suitable for use in string.translate.  The strings frm and to
+    must be of the same length.
+
+    """
+    if len(fromstr) != len(tostr):
+        raise ValueError, "maketrans arguments must have same length"
+    global _idmapL
+    if not _idmapL:
+        _idmapL = map(None, _idmap)
+    L = _idmapL[:]
+    fromstr = map(ord, fromstr)
+    for i in range(len(fromstr)):
+        L[fromstr[i]] = tostr[i]
+    return join(L, "")
+
+# Substring replacement (global)
+def replace(s, old, new, maxsplit=0):
+    """replace (str, old, new[, maxsplit]) -> string
+
+    Return a copy of string str with all occurrences of substring
+    old replaced by new. If the optional argument maxsplit is
+    given, only the first maxsplit occurrences are replaced.
+
+    """
+    return s.replace(old, new, maxsplit)
+
+
+# XXX: transitional
+#
+# If string objects do not have methods, then we need to use the old string.py
+# library, which uses strop for many more things than just the few outlined
+# below.
+try:
+    ''.upper
+except AttributeError:
+    from stringold import *
+
+# Try importing optional built-in module "strop" -- if it exists,
+# it redefines some string operations that are 100-1000 times faster.
+# It also defines values for whitespace, lowercase and uppercase
+# that match <ctype.h>'s definitions.
+
+try:
+    from strop import maketrans, lowercase, uppercase, whitespace
+    letters = lowercase + uppercase
+except ImportError:
+    pass                                          # Use the original versions
diff --git a/depot_tools/release/win/python_24/Lib/stringprep.py b/depot_tools/release/win/python_24/Lib/stringprep.py
new file mode 100644
index 0000000..f997a67
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/stringprep.py
@@ -0,0 +1,272 @@
+# This file is generated by mkstringprep.py. DO NOT EDIT.
+"""Library that exposes various tables found in the StringPrep RFC 3454.
+
+There are two kinds of tables: sets, for which a member test is provided,
+and mappings, for which a mapping function is provided.
+"""
+
+import unicodedata
+
+assert unicodedata.unidata_version == '3.2.0'
+
+def in_table_a1(code):
+    if unicodedata.category(code) != 'Cn': return False
+    c = ord(code)
+    if 0xFDD0 <= c < 0xFDF0: return False
+    return (c & 0xFFFF) not in (0xFFFE, 0xFFFF)
+
+
+b1_set = set([173, 847, 6150, 6155, 6156, 6157, 8203, 8204, 8205, 8288, 65279] + range(65024,65040))
+def in_table_b1(code):
+    return ord(code) in b1_set
+
+
+b3_exceptions = {
+0xb5:u'\u03bc', 0xdf:u'ss', 0x130:u'i\u0307', 0x149:u'\u02bcn',
+0x17f:u's', 0x1f0:u'j\u030c', 0x345:u'\u03b9', 0x37a:u' \u03b9',
+0x390:u'\u03b9\u0308\u0301', 0x3b0:u'\u03c5\u0308\u0301', 0x3c2:u'\u03c3', 0x3d0:u'\u03b2',
+0x3d1:u'\u03b8', 0x3d2:u'\u03c5', 0x3d3:u'\u03cd', 0x3d4:u'\u03cb',
+0x3d5:u'\u03c6', 0x3d6:u'\u03c0', 0x3f0:u'\u03ba', 0x3f1:u'\u03c1',
+0x3f2:u'\u03c3', 0x3f5:u'\u03b5', 0x587:u'\u0565\u0582', 0x1e96:u'h\u0331',
+0x1e97:u't\u0308', 0x1e98:u'w\u030a', 0x1e99:u'y\u030a', 0x1e9a:u'a\u02be',
+0x1e9b:u'\u1e61', 0x1f50:u'\u03c5\u0313', 0x1f52:u'\u03c5\u0313\u0300', 0x1f54:u'\u03c5\u0313\u0301',
+0x1f56:u'\u03c5\u0313\u0342', 0x1f80:u'\u1f00\u03b9', 0x1f81:u'\u1f01\u03b9', 0x1f82:u'\u1f02\u03b9',
+0x1f83:u'\u1f03\u03b9', 0x1f84:u'\u1f04\u03b9', 0x1f85:u'\u1f05\u03b9', 0x1f86:u'\u1f06\u03b9',
+0x1f87:u'\u1f07\u03b9', 0x1f88:u'\u1f00\u03b9', 0x1f89:u'\u1f01\u03b9', 0x1f8a:u'\u1f02\u03b9',
+0x1f8b:u'\u1f03\u03b9', 0x1f8c:u'\u1f04\u03b9', 0x1f8d:u'\u1f05\u03b9', 0x1f8e:u'\u1f06\u03b9',
+0x1f8f:u'\u1f07\u03b9', 0x1f90:u'\u1f20\u03b9', 0x1f91:u'\u1f21\u03b9', 0x1f92:u'\u1f22\u03b9',
+0x1f93:u'\u1f23\u03b9', 0x1f94:u'\u1f24\u03b9', 0x1f95:u'\u1f25\u03b9', 0x1f96:u'\u1f26\u03b9',
+0x1f97:u'\u1f27\u03b9', 0x1f98:u'\u1f20\u03b9', 0x1f99:u'\u1f21\u03b9', 0x1f9a:u'\u1f22\u03b9',
+0x1f9b:u'\u1f23\u03b9', 0x1f9c:u'\u1f24\u03b9', 0x1f9d:u'\u1f25\u03b9', 0x1f9e:u'\u1f26\u03b9',
+0x1f9f:u'\u1f27\u03b9', 0x1fa0:u'\u1f60\u03b9', 0x1fa1:u'\u1f61\u03b9', 0x1fa2:u'\u1f62\u03b9',
+0x1fa3:u'\u1f63\u03b9', 0x1fa4:u'\u1f64\u03b9', 0x1fa5:u'\u1f65\u03b9', 0x1fa6:u'\u1f66\u03b9',
+0x1fa7:u'\u1f67\u03b9', 0x1fa8:u'\u1f60\u03b9', 0x1fa9:u'\u1f61\u03b9', 0x1faa:u'\u1f62\u03b9',
+0x1fab:u'\u1f63\u03b9', 0x1fac:u'\u1f64\u03b9', 0x1fad:u'\u1f65\u03b9', 0x1fae:u'\u1f66\u03b9',
+0x1faf:u'\u1f67\u03b9', 0x1fb2:u'\u1f70\u03b9', 0x1fb3:u'\u03b1\u03b9', 0x1fb4:u'\u03ac\u03b9',
+0x1fb6:u'\u03b1\u0342', 0x1fb7:u'\u03b1\u0342\u03b9', 0x1fbc:u'\u03b1\u03b9', 0x1fbe:u'\u03b9',
+0x1fc2:u'\u1f74\u03b9', 0x1fc3:u'\u03b7\u03b9', 0x1fc4:u'\u03ae\u03b9', 0x1fc6:u'\u03b7\u0342',
+0x1fc7:u'\u03b7\u0342\u03b9', 0x1fcc:u'\u03b7\u03b9', 0x1fd2:u'\u03b9\u0308\u0300', 0x1fd3:u'\u03b9\u0308\u0301',
+0x1fd6:u'\u03b9\u0342', 0x1fd7:u'\u03b9\u0308\u0342', 0x1fe2:u'\u03c5\u0308\u0300', 0x1fe3:u'\u03c5\u0308\u0301',
+0x1fe4:u'\u03c1\u0313', 0x1fe6:u'\u03c5\u0342', 0x1fe7:u'\u03c5\u0308\u0342', 0x1ff2:u'\u1f7c\u03b9',
+0x1ff3:u'\u03c9\u03b9', 0x1ff4:u'\u03ce\u03b9', 0x1ff6:u'\u03c9\u0342', 0x1ff7:u'\u03c9\u0342\u03b9',
+0x1ffc:u'\u03c9\u03b9', 0x20a8:u'rs', 0x2102:u'c', 0x2103:u'\xb0c',
+0x2107:u'\u025b', 0x2109:u'\xb0f', 0x210b:u'h', 0x210c:u'h',
+0x210d:u'h', 0x2110:u'i', 0x2111:u'i', 0x2112:u'l',
+0x2115:u'n', 0x2116:u'no', 0x2119:u'p', 0x211a:u'q',
+0x211b:u'r', 0x211c:u'r', 0x211d:u'r', 0x2120:u'sm',
+0x2121:u'tel', 0x2122:u'tm', 0x2124:u'z', 0x2128:u'z',
+0x212c:u'b', 0x212d:u'c', 0x2130:u'e', 0x2131:u'f',
+0x2133:u'm', 0x213e:u'\u03b3', 0x213f:u'\u03c0', 0x2145:u'd',
+0x3371:u'hpa', 0x3373:u'au', 0x3375:u'ov', 0x3380:u'pa',
+0x3381:u'na', 0x3382:u'\u03bca', 0x3383:u'ma', 0x3384:u'ka',
+0x3385:u'kb', 0x3386:u'mb', 0x3387:u'gb', 0x338a:u'pf',
+0x338b:u'nf', 0x338c:u'\u03bcf', 0x3390:u'hz', 0x3391:u'khz',
+0x3392:u'mhz', 0x3393:u'ghz', 0x3394:u'thz', 0x33a9:u'pa',
+0x33aa:u'kpa', 0x33ab:u'mpa', 0x33ac:u'gpa', 0x33b4:u'pv',
+0x33b5:u'nv', 0x33b6:u'\u03bcv', 0x33b7:u'mv', 0x33b8:u'kv',
+0x33b9:u'mv', 0x33ba:u'pw', 0x33bb:u'nw', 0x33bc:u'\u03bcw',
+0x33bd:u'mw', 0x33be:u'kw', 0x33bf:u'mw', 0x33c0:u'k\u03c9',
+0x33c1:u'm\u03c9', 0x33c3:u'bq', 0x33c6:u'c\u2215kg', 0x33c7:u'co.',
+0x33c8:u'db', 0x33c9:u'gy', 0x33cb:u'hp', 0x33cd:u'kk',
+0x33ce:u'km', 0x33d7:u'ph', 0x33d9:u'ppm', 0x33da:u'pr',
+0x33dc:u'sv', 0x33dd:u'wb', 0xfb00:u'ff', 0xfb01:u'fi',
+0xfb02:u'fl', 0xfb03:u'ffi', 0xfb04:u'ffl', 0xfb05:u'st',
+0xfb06:u'st', 0xfb13:u'\u0574\u0576', 0xfb14:u'\u0574\u0565', 0xfb15:u'\u0574\u056b',
+0xfb16:u'\u057e\u0576', 0xfb17:u'\u0574\u056d', 0x1d400:u'a', 0x1d401:u'b',
+0x1d402:u'c', 0x1d403:u'd', 0x1d404:u'e', 0x1d405:u'f',
+0x1d406:u'g', 0x1d407:u'h', 0x1d408:u'i', 0x1d409:u'j',
+0x1d40a:u'k', 0x1d40b:u'l', 0x1d40c:u'm', 0x1d40d:u'n',
+0x1d40e:u'o', 0x1d40f:u'p', 0x1d410:u'q', 0x1d411:u'r',
+0x1d412:u's', 0x1d413:u't', 0x1d414:u'u', 0x1d415:u'v',
+0x1d416:u'w', 0x1d417:u'x', 0x1d418:u'y', 0x1d419:u'z',
+0x1d434:u'a', 0x1d435:u'b', 0x1d436:u'c', 0x1d437:u'd',
+0x1d438:u'e', 0x1d439:u'f', 0x1d43a:u'g', 0x1d43b:u'h',
+0x1d43c:u'i', 0x1d43d:u'j', 0x1d43e:u'k', 0x1d43f:u'l',
+0x1d440:u'm', 0x1d441:u'n', 0x1d442:u'o', 0x1d443:u'p',
+0x1d444:u'q', 0x1d445:u'r', 0x1d446:u's', 0x1d447:u't',
+0x1d448:u'u', 0x1d449:u'v', 0x1d44a:u'w', 0x1d44b:u'x',
+0x1d44c:u'y', 0x1d44d:u'z', 0x1d468:u'a', 0x1d469:u'b',
+0x1d46a:u'c', 0x1d46b:u'd', 0x1d46c:u'e', 0x1d46d:u'f',
+0x1d46e:u'g', 0x1d46f:u'h', 0x1d470:u'i', 0x1d471:u'j',
+0x1d472:u'k', 0x1d473:u'l', 0x1d474:u'm', 0x1d475:u'n',
+0x1d476:u'o', 0x1d477:u'p', 0x1d478:u'q', 0x1d479:u'r',
+0x1d47a:u's', 0x1d47b:u't', 0x1d47c:u'u', 0x1d47d:u'v',
+0x1d47e:u'w', 0x1d47f:u'x', 0x1d480:u'y', 0x1d481:u'z',
+0x1d49c:u'a', 0x1d49e:u'c', 0x1d49f:u'd', 0x1d4a2:u'g',
+0x1d4a5:u'j', 0x1d4a6:u'k', 0x1d4a9:u'n', 0x1d4aa:u'o',
+0x1d4ab:u'p', 0x1d4ac:u'q', 0x1d4ae:u's', 0x1d4af:u't',
+0x1d4b0:u'u', 0x1d4b1:u'v', 0x1d4b2:u'w', 0x1d4b3:u'x',
+0x1d4b4:u'y', 0x1d4b5:u'z', 0x1d4d0:u'a', 0x1d4d1:u'b',
+0x1d4d2:u'c', 0x1d4d3:u'd', 0x1d4d4:u'e', 0x1d4d5:u'f',
+0x1d4d6:u'g', 0x1d4d7:u'h', 0x1d4d8:u'i', 0x1d4d9:u'j',
+0x1d4da:u'k', 0x1d4db:u'l', 0x1d4dc:u'm', 0x1d4dd:u'n',
+0x1d4de:u'o', 0x1d4df:u'p', 0x1d4e0:u'q', 0x1d4e1:u'r',
+0x1d4e2:u's', 0x1d4e3:u't', 0x1d4e4:u'u', 0x1d4e5:u'v',
+0x1d4e6:u'w', 0x1d4e7:u'x', 0x1d4e8:u'y', 0x1d4e9:u'z',
+0x1d504:u'a', 0x1d505:u'b', 0x1d507:u'd', 0x1d508:u'e',
+0x1d509:u'f', 0x1d50a:u'g', 0x1d50d:u'j', 0x1d50e:u'k',
+0x1d50f:u'l', 0x1d510:u'm', 0x1d511:u'n', 0x1d512:u'o',
+0x1d513:u'p', 0x1d514:u'q', 0x1d516:u's', 0x1d517:u't',
+0x1d518:u'u', 0x1d519:u'v', 0x1d51a:u'w', 0x1d51b:u'x',
+0x1d51c:u'y', 0x1d538:u'a', 0x1d539:u'b', 0x1d53b:u'd',
+0x1d53c:u'e', 0x1d53d:u'f', 0x1d53e:u'g', 0x1d540:u'i',
+0x1d541:u'j', 0x1d542:u'k', 0x1d543:u'l', 0x1d544:u'm',
+0x1d546:u'o', 0x1d54a:u's', 0x1d54b:u't', 0x1d54c:u'u',
+0x1d54d:u'v', 0x1d54e:u'w', 0x1d54f:u'x', 0x1d550:u'y',
+0x1d56c:u'a', 0x1d56d:u'b', 0x1d56e:u'c', 0x1d56f:u'd',
+0x1d570:u'e', 0x1d571:u'f', 0x1d572:u'g', 0x1d573:u'h',
+0x1d574:u'i', 0x1d575:u'j', 0x1d576:u'k', 0x1d577:u'l',
+0x1d578:u'm', 0x1d579:u'n', 0x1d57a:u'o', 0x1d57b:u'p',
+0x1d57c:u'q', 0x1d57d:u'r', 0x1d57e:u's', 0x1d57f:u't',
+0x1d580:u'u', 0x1d581:u'v', 0x1d582:u'w', 0x1d583:u'x',
+0x1d584:u'y', 0x1d585:u'z', 0x1d5a0:u'a', 0x1d5a1:u'b',
+0x1d5a2:u'c', 0x1d5a3:u'd', 0x1d5a4:u'e', 0x1d5a5:u'f',
+0x1d5a6:u'g', 0x1d5a7:u'h', 0x1d5a8:u'i', 0x1d5a9:u'j',
+0x1d5aa:u'k', 0x1d5ab:u'l', 0x1d5ac:u'm', 0x1d5ad:u'n',
+0x1d5ae:u'o', 0x1d5af:u'p', 0x1d5b0:u'q', 0x1d5b1:u'r',
+0x1d5b2:u's', 0x1d5b3:u't', 0x1d5b4:u'u', 0x1d5b5:u'v',
+0x1d5b6:u'w', 0x1d5b7:u'x', 0x1d5b8:u'y', 0x1d5b9:u'z',
+0x1d5d4:u'a', 0x1d5d5:u'b', 0x1d5d6:u'c', 0x1d5d7:u'd',
+0x1d5d8:u'e', 0x1d5d9:u'f', 0x1d5da:u'g', 0x1d5db:u'h',
+0x1d5dc:u'i', 0x1d5dd:u'j', 0x1d5de:u'k', 0x1d5df:u'l',
+0x1d5e0:u'm', 0x1d5e1:u'n', 0x1d5e2:u'o', 0x1d5e3:u'p',
+0x1d5e4:u'q', 0x1d5e5:u'r', 0x1d5e6:u's', 0x1d5e7:u't',
+0x1d5e8:u'u', 0x1d5e9:u'v', 0x1d5ea:u'w', 0x1d5eb:u'x',
+0x1d5ec:u'y', 0x1d5ed:u'z', 0x1d608:u'a', 0x1d609:u'b',
+0x1d60a:u'c', 0x1d60b:u'd', 0x1d60c:u'e', 0x1d60d:u'f',
+0x1d60e:u'g', 0x1d60f:u'h', 0x1d610:u'i', 0x1d611:u'j',
+0x1d612:u'k', 0x1d613:u'l', 0x1d614:u'm', 0x1d615:u'n',
+0x1d616:u'o', 0x1d617:u'p', 0x1d618:u'q', 0x1d619:u'r',
+0x1d61a:u's', 0x1d61b:u't', 0x1d61c:u'u', 0x1d61d:u'v',
+0x1d61e:u'w', 0x1d61f:u'x', 0x1d620:u'y', 0x1d621:u'z',
+0x1d63c:u'a', 0x1d63d:u'b', 0x1d63e:u'c', 0x1d63f:u'd',
+0x1d640:u'e', 0x1d641:u'f', 0x1d642:u'g', 0x1d643:u'h',
+0x1d644:u'i', 0x1d645:u'j', 0x1d646:u'k', 0x1d647:u'l',
+0x1d648:u'm', 0x1d649:u'n', 0x1d64a:u'o', 0x1d64b:u'p',
+0x1d64c:u'q', 0x1d64d:u'r', 0x1d64e:u's', 0x1d64f:u't',
+0x1d650:u'u', 0x1d651:u'v', 0x1d652:u'w', 0x1d653:u'x',
+0x1d654:u'y', 0x1d655:u'z', 0x1d670:u'a', 0x1d671:u'b',
+0x1d672:u'c', 0x1d673:u'd', 0x1d674:u'e', 0x1d675:u'f',
+0x1d676:u'g', 0x1d677:u'h', 0x1d678:u'i', 0x1d679:u'j',
+0x1d67a:u'k', 0x1d67b:u'l', 0x1d67c:u'm', 0x1d67d:u'n',
+0x1d67e:u'o', 0x1d67f:u'p', 0x1d680:u'q', 0x1d681:u'r',
+0x1d682:u's', 0x1d683:u't', 0x1d684:u'u', 0x1d685:u'v',
+0x1d686:u'w', 0x1d687:u'x', 0x1d688:u'y', 0x1d689:u'z',
+0x1d6a8:u'\u03b1', 0x1d6a9:u'\u03b2', 0x1d6aa:u'\u03b3', 0x1d6ab:u'\u03b4',
+0x1d6ac:u'\u03b5', 0x1d6ad:u'\u03b6', 0x1d6ae:u'\u03b7', 0x1d6af:u'\u03b8',
+0x1d6b0:u'\u03b9', 0x1d6b1:u'\u03ba', 0x1d6b2:u'\u03bb', 0x1d6b3:u'\u03bc',
+0x1d6b4:u'\u03bd', 0x1d6b5:u'\u03be', 0x1d6b6:u'\u03bf', 0x1d6b7:u'\u03c0',
+0x1d6b8:u'\u03c1', 0x1d6b9:u'\u03b8', 0x1d6ba:u'\u03c3', 0x1d6bb:u'\u03c4',
+0x1d6bc:u'\u03c5', 0x1d6bd:u'\u03c6', 0x1d6be:u'\u03c7', 0x1d6bf:u'\u03c8',
+0x1d6c0:u'\u03c9', 0x1d6d3:u'\u03c3', 0x1d6e2:u'\u03b1', 0x1d6e3:u'\u03b2',
+0x1d6e4:u'\u03b3', 0x1d6e5:u'\u03b4', 0x1d6e6:u'\u03b5', 0x1d6e7:u'\u03b6',
+0x1d6e8:u'\u03b7', 0x1d6e9:u'\u03b8', 0x1d6ea:u'\u03b9', 0x1d6eb:u'\u03ba',
+0x1d6ec:u'\u03bb', 0x1d6ed:u'\u03bc', 0x1d6ee:u'\u03bd', 0x1d6ef:u'\u03be',
+0x1d6f0:u'\u03bf', 0x1d6f1:u'\u03c0', 0x1d6f2:u'\u03c1', 0x1d6f3:u'\u03b8',
+0x1d6f4:u'\u03c3', 0x1d6f5:u'\u03c4', 0x1d6f6:u'\u03c5', 0x1d6f7:u'\u03c6',
+0x1d6f8:u'\u03c7', 0x1d6f9:u'\u03c8', 0x1d6fa:u'\u03c9', 0x1d70d:u'\u03c3',
+0x1d71c:u'\u03b1', 0x1d71d:u'\u03b2', 0x1d71e:u'\u03b3', 0x1d71f:u'\u03b4',
+0x1d720:u'\u03b5', 0x1d721:u'\u03b6', 0x1d722:u'\u03b7', 0x1d723:u'\u03b8',
+0x1d724:u'\u03b9', 0x1d725:u'\u03ba', 0x1d726:u'\u03bb', 0x1d727:u'\u03bc',
+0x1d728:u'\u03bd', 0x1d729:u'\u03be', 0x1d72a:u'\u03bf', 0x1d72b:u'\u03c0',
+0x1d72c:u'\u03c1', 0x1d72d:u'\u03b8', 0x1d72e:u'\u03c3', 0x1d72f:u'\u03c4',
+0x1d730:u'\u03c5', 0x1d731:u'\u03c6', 0x1d732:u'\u03c7', 0x1d733:u'\u03c8',
+0x1d734:u'\u03c9', 0x1d747:u'\u03c3', 0x1d756:u'\u03b1', 0x1d757:u'\u03b2',
+0x1d758:u'\u03b3', 0x1d759:u'\u03b4', 0x1d75a:u'\u03b5', 0x1d75b:u'\u03b6',
+0x1d75c:u'\u03b7', 0x1d75d:u'\u03b8', 0x1d75e:u'\u03b9', 0x1d75f:u'\u03ba',
+0x1d760:u'\u03bb', 0x1d761:u'\u03bc', 0x1d762:u'\u03bd', 0x1d763:u'\u03be',
+0x1d764:u'\u03bf', 0x1d765:u'\u03c0', 0x1d766:u'\u03c1', 0x1d767:u'\u03b8',
+0x1d768:u'\u03c3', 0x1d769:u'\u03c4', 0x1d76a:u'\u03c5', 0x1d76b:u'\u03c6',
+0x1d76c:u'\u03c7', 0x1d76d:u'\u03c8', 0x1d76e:u'\u03c9', 0x1d781:u'\u03c3',
+0x1d790:u'\u03b1', 0x1d791:u'\u03b2', 0x1d792:u'\u03b3', 0x1d793:u'\u03b4',
+0x1d794:u'\u03b5', 0x1d795:u'\u03b6', 0x1d796:u'\u03b7', 0x1d797:u'\u03b8',
+0x1d798:u'\u03b9', 0x1d799:u'\u03ba', 0x1d79a:u'\u03bb', 0x1d79b:u'\u03bc',
+0x1d79c:u'\u03bd', 0x1d79d:u'\u03be', 0x1d79e:u'\u03bf', 0x1d79f:u'\u03c0',
+0x1d7a0:u'\u03c1', 0x1d7a1:u'\u03b8', 0x1d7a2:u'\u03c3', 0x1d7a3:u'\u03c4',
+0x1d7a4:u'\u03c5', 0x1d7a5:u'\u03c6', 0x1d7a6:u'\u03c7', 0x1d7a7:u'\u03c8',
+0x1d7a8:u'\u03c9', 0x1d7bb:u'\u03c3', }
+
+def map_table_b3(code):
+    r = b3_exceptions.get(ord(code))
+    if r is not None: return r
+    return code.lower()
+
+
+def map_table_b2(a):
+    al = map_table_b3(a)
+    b = unicodedata.normalize("NFKC", al)
+    bl = u"".join([map_table_b3(ch) for ch in b])
+    c = unicodedata.normalize("NFKC", bl)
+    if b != c:
+        return c
+    else:
+        return al
+
+
+def in_table_c11(code):
+    return code == u" "
+
+
+def in_table_c12(code):
+    return unicodedata.category(code) == "Zs" and code != u" "
+
+def in_table_c11_c12(code):
+    return unicodedata.category(code) == "Zs"
+
+
+def in_table_c21(code):
+    return ord(code) < 128 and unicodedata.category(code) == "Cc"
+
+c22_specials = set([1757, 1807, 6158, 8204, 8205, 8232, 8233, 65279] + range(8288,8292) + range(8298,8304) + range(65529,65533) + range(119155,119163))
+def in_table_c22(code):
+    c = ord(code)
+    if c < 128: return False
+    if unicodedata.category(code) == "Cc": return True
+    return c in c22_specials
+
+def in_table_c21_c22(code):
+    return unicodedata.category(code) == "Cc" or \
+           ord(code) in c22_specials
+
+
+def in_table_c3(code):
+    return unicodedata.category(code) == "Co"
+
+
+def in_table_c4(code):
+    c = ord(code)
+    if c < 0xFDD0: return False
+    if c < 0xFDF0: return True
+    return (ord(code) & 0xFFFF) in (0xFFFE, 0xFFFF)
+
+
+def in_table_c5(code):
+    return unicodedata.category(code) == "Cs"
+
+
+c6_set = set(range(65529,65534))
+def in_table_c6(code):
+    return ord(code) in c6_set
+
+
+c7_set = set(range(12272,12284))
+def in_table_c7(code):
+    return ord(code) in c7_set
+
+
+c8_set = set([832, 833, 8206, 8207] + range(8234,8239) + range(8298,8304))
+def in_table_c8(code):
+    return ord(code) in c8_set
+
+
+c9_set = set([917505] + range(917536,917632))
+def in_table_c9(code):
+    return ord(code) in c9_set
+
+
+def in_table_d1(code):
+    return unicodedata.bidirectional(code) in ("R","AL")
+
+
+def in_table_d2(code):
+    return unicodedata.bidirectional(code) == "L"
diff --git a/depot_tools/release/win/python_24/Lib/subprocess.py b/depot_tools/release/win/python_24/Lib/subprocess.py
new file mode 100644
index 0000000..d115e87
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/subprocess.py
@@ -0,0 +1,1165 @@
+# subprocess - Subprocesses with accessible I/O streams
+#
+# For more information about this module, see PEP 324.
+#
+# Copyright (c) 2003-2004 by Peter Astrand <astrand@lysator.liu.se>
+#
+# By obtaining, using, and/or copying this software and/or its
+# associated documentation, you agree that you have read, understood,
+# and will comply with the following terms and conditions:
+#
+# Permission to use, copy, modify, and distribute this software and
+# its associated documentation for any purpose and without fee is
+# hereby granted, provided that the above copyright notice appears in
+# all copies, and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of the
+# author not be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior
+# permission.
+#
+# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
+# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS.
+# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, INDIRECT OR
+# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
+# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
+# WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+r"""subprocess - Subprocesses with accessible I/O streams
+
+This module allows you to spawn processes, connect to their
+input/output/error pipes, and obtain their return codes.  This module
+intends to replace several other, older modules and functions, like:
+
+os.system
+os.spawn*
+os.popen*
+popen2.*
+commands.*
+
+Information about how the subprocess module can be used to replace these
+modules and functions can be found below.
+
+
+
+Using the subprocess module
+===========================
+This module defines one class called Popen:
+
+class Popen(args, bufsize=0, executable=None,
+            stdin=None, stdout=None, stderr=None,
+            preexec_fn=None, close_fds=False, shell=False,
+            cwd=None, env=None, universal_newlines=False,
+            startupinfo=None, creationflags=0):
+
+
+Arguments are:
+
+args should be a string, or a sequence of program arguments.  The
+program to execute is normally the first item in the args sequence or
+string, but can be explicitly set by using the executable argument.
+
+On UNIX, with shell=False (default): In this case, the Popen class
+uses os.execvp() to execute the child program.  args should normally
+be a sequence.  A string will be treated as a sequence with the string
+as the only item (the program to execute).
+
+On UNIX, with shell=True: If args is a string, it specifies the
+command string to execute through the shell.  If args is a sequence,
+the first item specifies the command string, and any additional items
+will be treated as additional shell arguments.
+
+On Windows: the Popen class uses CreateProcess() to execute the child
+program, which operates on strings.  If args is a sequence, it will be
+converted to a string using the list2cmdline method.  Please note that
+not all MS Windows applications interpret the command line the same
+way: The list2cmdline is designed for applications using the same
+rules as the MS C runtime.
+
+bufsize, if given, has the same meaning as the corresponding argument
+to the built-in open() function: 0 means unbuffered, 1 means line
+buffered, any other positive value means use a buffer of
+(approximately) that size.  A negative bufsize means to use the system
+default, which usually means fully buffered.  The default value for
+bufsize is 0 (unbuffered).
+
+stdin, stdout and stderr specify the executed programs' standard
+input, standard output and standard error file handles, respectively.
+Valid values are PIPE, an existing file descriptor (a positive
+integer), an existing file object, and None.  PIPE indicates that a
+new pipe to the child should be created.  With None, no redirection
+will occur; the child's file handles will be inherited from the
+parent.  Additionally, stderr can be STDOUT, which indicates that the
+stderr data from the applications should be captured into the same
+file handle as for stdout.
+
+If preexec_fn is set to a callable object, this object will be called
+in the child process just before the child is executed.
+
+If close_fds is true, all file descriptors except 0, 1 and 2 will be
+closed before the child process is executed.
+
+if shell is true, the specified command will be executed through the
+shell.
+
+If cwd is not None, the current directory will be changed to cwd
+before the child is executed.
+
+If env is not None, it defines the environment variables for the new
+process.
+
+If universal_newlines is true, the file objects stdout and stderr are
+opened as a text files, but lines may be terminated by any of '\n',
+the Unix end-of-line convention, '\r', the Macintosh convention or
+'\r\n', the Windows convention.  All of these external representations
+are seen as '\n' by the Python program.  Note: This feature is only
+available if Python is built with universal newline support (the
+default).  Also, the newlines attribute of the file objects stdout,
+stdin and stderr are not updated by the communicate() method.
+
+The startupinfo and creationflags, if given, will be passed to the
+underlying CreateProcess() function.  They can specify things such as
+appearance of the main window and priority for the new process.
+(Windows only)
+
+
+This module also defines two shortcut functions:
+
+call(*args, **kwargs):
+    Run command with arguments.  Wait for command to complete, then
+    return the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    retcode = call(["ls", "-l"])
+
+
+Exceptions
+----------
+Exceptions raised in the child process, before the new program has
+started to execute, will be re-raised in the parent.  Additionally,
+the exception object will have one extra attribute called
+'child_traceback', which is a string containing traceback information
+from the childs point of view.
+
+The most common exception raised is OSError.  This occurs, for
+example, when trying to execute a non-existent file.  Applications
+should prepare for OSErrors.
+
+A ValueError will be raised if Popen is called with invalid arguments.
+
+
+Security
+--------
+Unlike some other popen functions, this implementation will never call
+/bin/sh implicitly.  This means that all characters, including shell
+metacharacters, can safely be passed to child processes.
+
+
+Popen objects
+=============
+Instances of the Popen class have the following methods:
+
+poll()
+    Check if child process has terminated.  Returns returncode
+    attribute.
+
+wait()
+    Wait for child process to terminate.  Returns returncode attribute.
+
+communicate(input=None)
+    Interact with process: Send data to stdin.  Read data from stdout
+    and stderr, until end-of-file is reached.  Wait for process to
+    terminate.  The optional stdin argument should be a string to be
+    sent to the child process, or None, if no data should be sent to
+    the child.
+
+    communicate() returns a tuple (stdout, stderr).
+
+    Note: The data read is buffered in memory, so do not use this
+    method if the data size is large or unlimited.
+
+The following attributes are also available:
+
+stdin
+    If the stdin argument is PIPE, this attribute is a file object
+    that provides input to the child process.  Otherwise, it is None.
+
+stdout
+    If the stdout argument is PIPE, this attribute is a file object
+    that provides output from the child process.  Otherwise, it is
+    None.
+
+stderr
+    If the stderr argument is PIPE, this attribute is file object that
+    provides error output from the child process.  Otherwise, it is
+    None.
+
+pid
+    The process ID of the child process.
+
+returncode
+    The child return code.  A None value indicates that the process
+    hasn't terminated yet.  A negative value -N indicates that the
+    child was terminated by signal N (UNIX only).
+
+
+Replacing older functions with the subprocess module
+====================================================
+In this section, "a ==> b" means that b can be used as a replacement
+for a.
+
+Note: All functions in this section fail (more or less) silently if
+the executed program cannot be found; this module raises an OSError
+exception.
+
+In the following examples, we assume that the subprocess module is
+imported with "from subprocess import *".
+
+
+Replacing /bin/sh shell backquote
+---------------------------------
+output=`mycmd myarg`
+==>
+output = Popen(["mycmd", "myarg"], stdout=PIPE).communicate()[0]
+
+
+Replacing shell pipe line
+-------------------------
+output=`dmesg | grep hda`
+==>
+p1 = Popen(["dmesg"], stdout=PIPE)
+p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
+output = p2.communicate()[0]
+
+
+Replacing os.system()
+---------------------
+sts = os.system("mycmd" + " myarg")
+==>
+p = Popen("mycmd" + " myarg", shell=True)
+sts = os.waitpid(p.pid, 0)
+
+Note:
+
+* Calling the program through the shell is usually not required.
+
+* It's easier to look at the returncode attribute than the
+  exitstatus.
+
+A more real-world example would look like this:
+
+try:
+    retcode = call("mycmd" + " myarg", shell=True)
+    if retcode < 0:
+        print >>sys.stderr, "Child was terminated by signal", -retcode
+    else:
+        print >>sys.stderr, "Child returned", retcode
+except OSError, e:
+    print >>sys.stderr, "Execution failed:", e
+
+
+Replacing os.spawn*
+-------------------
+P_NOWAIT example:
+
+pid = os.spawnlp(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg")
+==>
+pid = Popen(["/bin/mycmd", "myarg"]).pid
+
+
+P_WAIT example:
+
+retcode = os.spawnlp(os.P_WAIT, "/bin/mycmd", "mycmd", "myarg")
+==>
+retcode = call(["/bin/mycmd", "myarg"])
+
+
+Vector example:
+
+os.spawnvp(os.P_NOWAIT, path, args)
+==>
+Popen([path] + args[1:])
+
+
+Environment example:
+
+os.spawnlpe(os.P_NOWAIT, "/bin/mycmd", "mycmd", "myarg", env)
+==>
+Popen(["/bin/mycmd", "myarg"], env={"PATH": "/usr/bin"})
+
+
+Replacing os.popen*
+-------------------
+pipe = os.popen(cmd, mode='r', bufsize)
+==>
+pipe = Popen(cmd, shell=True, bufsize=bufsize, stdout=PIPE).stdout
+
+pipe = os.popen(cmd, mode='w', bufsize)
+==>
+pipe = Popen(cmd, shell=True, bufsize=bufsize, stdin=PIPE).stdin
+
+
+(child_stdin, child_stdout) = os.popen2(cmd, mode, bufsize)
+==>
+p = Popen(cmd, shell=True, bufsize=bufsize,
+          stdin=PIPE, stdout=PIPE, close_fds=True)
+(child_stdin, child_stdout) = (p.stdin, p.stdout)
+
+
+(child_stdin,
+ child_stdout,
+ child_stderr) = os.popen3(cmd, mode, bufsize)
+==>
+p = Popen(cmd, shell=True, bufsize=bufsize,
+          stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=True)
+(child_stdin,
+ child_stdout,
+ child_stderr) = (p.stdin, p.stdout, p.stderr)
+
+
+(child_stdin, child_stdout_and_stderr) = os.popen4(cmd, mode, bufsize)
+==>
+p = Popen(cmd, shell=True, bufsize=bufsize,
+          stdin=PIPE, stdout=PIPE, stderr=STDOUT, close_fds=True)
+(child_stdin, child_stdout_and_stderr) = (p.stdin, p.stdout)
+
+
+Replacing popen2.*
+------------------
+Note: If the cmd argument to popen2 functions is a string, the command
+is executed through /bin/sh.  If it is a list, the command is directly
+executed.
+
+(child_stdout, child_stdin) = popen2.popen2("somestring", bufsize, mode)
+==>
+p = Popen(["somestring"], shell=True, bufsize=bufsize
+          stdin=PIPE, stdout=PIPE, close_fds=True)
+(child_stdout, child_stdin) = (p.stdout, p.stdin)
+
+
+(child_stdout, child_stdin) = popen2.popen2(["mycmd", "myarg"], bufsize, mode)
+==>
+p = Popen(["mycmd", "myarg"], bufsize=bufsize,
+          stdin=PIPE, stdout=PIPE, close_fds=True)
+(child_stdout, child_stdin) = (p.stdout, p.stdin)
+
+The popen2.Popen3 and popen3.Popen4 basically works as subprocess.Popen,
+except that:
+
+* subprocess.Popen raises an exception if the execution fails
+* the capturestderr argument is replaced with the stderr argument.
+* stdin=PIPE and stdout=PIPE must be specified.
+* popen2 closes all filedescriptors by default, but you have to specify
+  close_fds=True with subprocess.Popen.
+
+
+"""
+
+import sys
+mswindows = (sys.platform == "win32")
+
+import os
+import types
+import traceback
+
+if mswindows:
+    import threading
+    import msvcrt
+    if 0: # <-- change this to use pywin32 instead of the _subprocess driver
+        import pywintypes
+        from win32api import GetStdHandle, STD_INPUT_HANDLE, \
+                             STD_OUTPUT_HANDLE, STD_ERROR_HANDLE
+        from win32api import GetCurrentProcess, DuplicateHandle, \
+                             GetModuleFileName, GetVersion
+        from win32con import DUPLICATE_SAME_ACCESS, SW_HIDE
+        from win32pipe import CreatePipe
+        from win32process import CreateProcess, STARTUPINFO, \
+                                 GetExitCodeProcess, STARTF_USESTDHANDLES, \
+                                 STARTF_USESHOWWINDOW, CREATE_NEW_CONSOLE
+        from win32event import WaitForSingleObject, INFINITE, WAIT_OBJECT_0
+    else:
+        from _subprocess import *
+        class STARTUPINFO:
+            dwFlags = 0
+            hStdInput = None
+            hStdOutput = None
+            hStdError = None
+        class pywintypes:
+            error = IOError
+else:
+    import select
+    import errno
+    import fcntl
+    import pickle
+
+__all__ = ["Popen", "PIPE", "STDOUT", "call"]
+
+try:
+    MAXFD = os.sysconf("SC_OPEN_MAX")
+except:
+    MAXFD = 256
+
+# True/False does not exist on 2.2.0
+try:
+    False
+except NameError:
+    False = 0
+    True = 1
+
+_active = []
+
+def _cleanup():
+    for inst in _active[:]:
+        inst.poll()
+
+PIPE = -1
+STDOUT = -2
+
+
+def call(*args, **kwargs):
+    """Run command with arguments.  Wait for command to complete, then
+    return the returncode attribute.
+
+    The arguments are the same as for the Popen constructor.  Example:
+
+    retcode = call(["ls", "-l"])
+    """
+    return Popen(*args, **kwargs).wait()
+
+
+def list2cmdline(seq):
+    """
+    Translate a sequence of arguments into a command line
+    string, using the same rules as the MS C runtime:
+
+    1) Arguments are delimited by white space, which is either a
+       space or a tab.
+
+    2) A string surrounded by double quotation marks is
+       interpreted as a single argument, regardless of white space
+       contained within.  A quoted string can be embedded in an
+       argument.
+
+    3) A double quotation mark preceded by a backslash is
+       interpreted as a literal double quotation mark.
+
+    4) Backslashes are interpreted literally, unless they
+       immediately precede a double quotation mark.
+
+    5) If backslashes immediately precede a double quotation mark,
+       every pair of backslashes is interpreted as a literal
+       backslash.  If the number of backslashes is odd, the last
+       backslash escapes the next double quotation mark as
+       described in rule 3.
+    """
+
+    # See
+    # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
+    result = []
+    needquote = False
+    for arg in seq:
+        bs_buf = []
+
+        # Add a space to separate this argument from the others
+        if result:
+            result.append(' ')
+
+        needquote = (" " in arg) or ("\t" in arg)
+        if needquote:
+            result.append('"')
+
+        for c in arg:
+            if c == '\\':
+                # Don't know if we need to double yet.
+                bs_buf.append(c)
+            elif c == '"':
+                # Double backspaces.
+                result.append('\\' * len(bs_buf)*2)
+                bs_buf = []
+                result.append('\\"')
+            else:
+                # Normal char
+                if bs_buf:
+                    result.extend(bs_buf)
+                    bs_buf = []
+                result.append(c)
+
+        # Add remaining backspaces, if any.
+        if bs_buf:
+            result.extend(bs_buf)
+
+        if needquote:
+            result.extend(bs_buf)
+            result.append('"')
+
+    return ''.join(result)
+
+
+class Popen(object):
+    def __init__(self, args, bufsize=0, executable=None,
+                 stdin=None, stdout=None, stderr=None,
+                 preexec_fn=None, close_fds=False, shell=False,
+                 cwd=None, env=None, universal_newlines=False,
+                 startupinfo=None, creationflags=0):
+        """Create new Popen instance."""
+        _cleanup()
+
+        if not isinstance(bufsize, (int, long)):
+            raise TypeError("bufsize must be an integer")
+
+        if mswindows:
+            if preexec_fn is not None:
+                raise ValueError("preexec_fn is not supported on Windows "
+                                 "platforms")
+            if close_fds:
+                raise ValueError("close_fds is not supported on Windows "
+                                 "platforms")
+        else:
+            # POSIX
+            if startupinfo is not None:
+                raise ValueError("startupinfo is only supported on Windows "
+                                 "platforms")
+            if creationflags != 0:
+                raise ValueError("creationflags is only supported on Windows "
+                                 "platforms")
+
+        self.stdin = None
+        self.stdout = None
+        self.stderr = None
+        self.pid = None
+        self.returncode = None
+        self.universal_newlines = universal_newlines
+
+        # Input and output objects. The general principle is like
+        # this:
+        #
+        # Parent                   Child
+        # ------                   -----
+        # p2cwrite   ---stdin--->  p2cread
+        # c2pread    <--stdout---  c2pwrite
+        # errread    <--stderr---  errwrite
+        #
+        # On POSIX, the child objects are file descriptors.  On
+        # Windows, these are Windows file handles.  The parent objects
+        # are file descriptors on both platforms.  The parent objects
+        # are None when not using PIPEs. The child objects are None
+        # when not redirecting.
+
+        (p2cread, p2cwrite,
+         c2pread, c2pwrite,
+         errread, errwrite) = self._get_handles(stdin, stdout, stderr)
+
+        self._execute_child(args, executable, preexec_fn, close_fds,
+                            cwd, env, universal_newlines,
+                            startupinfo, creationflags, shell,
+                            p2cread, p2cwrite,
+                            c2pread, c2pwrite,
+                            errread, errwrite)
+
+        if p2cwrite:
+            self.stdin = os.fdopen(p2cwrite, 'wb', bufsize)
+        if c2pread:
+            if universal_newlines:
+                self.stdout = os.fdopen(c2pread, 'rU', bufsize)
+            else:
+                self.stdout = os.fdopen(c2pread, 'rb', bufsize)
+        if errread:
+            if universal_newlines:
+                self.stderr = os.fdopen(errread, 'rU', bufsize)
+            else:
+                self.stderr = os.fdopen(errread, 'rb', bufsize)
+
+        _active.append(self)
+
+
+    def _translate_newlines(self, data):
+        data = data.replace("\r\n", "\n")
+        data = data.replace("\r", "\n")
+        return data
+
+
+    if mswindows:
+        #
+        # Windows methods
+        #
+        def _get_handles(self, stdin, stdout, stderr):
+            """Construct and return tupel with IO objects:
+            p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+            """
+            if stdin == None and stdout == None and stderr == None:
+                return (None, None, None, None, None, None)
+
+            p2cread, p2cwrite = None, None
+            c2pread, c2pwrite = None, None
+            errread, errwrite = None, None
+
+            if stdin == None:
+                p2cread = GetStdHandle(STD_INPUT_HANDLE)
+            elif stdin == PIPE:
+                p2cread, p2cwrite = CreatePipe(None, 0)
+                # Detach and turn into fd
+                p2cwrite = p2cwrite.Detach()
+                p2cwrite = msvcrt.open_osfhandle(p2cwrite, 0)
+            elif type(stdin) == types.IntType:
+                p2cread = msvcrt.get_osfhandle(stdin)
+            else:
+                # Assuming file-like object
+                p2cread = msvcrt.get_osfhandle(stdin.fileno())
+            p2cread = self._make_inheritable(p2cread)
+
+            if stdout == None:
+                c2pwrite = GetStdHandle(STD_OUTPUT_HANDLE)
+            elif stdout == PIPE:
+                c2pread, c2pwrite = CreatePipe(None, 0)
+                # Detach and turn into fd
+                c2pread = c2pread.Detach()
+                c2pread = msvcrt.open_osfhandle(c2pread, 0)
+            elif type(stdout) == types.IntType:
+                c2pwrite = msvcrt.get_osfhandle(stdout)
+            else:
+                # Assuming file-like object
+                c2pwrite = msvcrt.get_osfhandle(stdout.fileno())
+            c2pwrite = self._make_inheritable(c2pwrite)
+
+            if stderr == None:
+                errwrite = GetStdHandle(STD_ERROR_HANDLE)
+            elif stderr == PIPE:
+                errread, errwrite = CreatePipe(None, 0)
+                # Detach and turn into fd
+                errread = errread.Detach()
+                errread = msvcrt.open_osfhandle(errread, 0)
+            elif stderr == STDOUT:
+                errwrite = c2pwrite
+            elif type(stderr) == types.IntType:
+                errwrite = msvcrt.get_osfhandle(stderr)
+            else:
+                # Assuming file-like object
+                errwrite = msvcrt.get_osfhandle(stderr.fileno())
+            errwrite = self._make_inheritable(errwrite)
+
+            return (p2cread, p2cwrite,
+                    c2pread, c2pwrite,
+                    errread, errwrite)
+
+
+        def _make_inheritable(self, handle):
+            """Return a duplicate of handle, which is inheritable"""
+            return DuplicateHandle(GetCurrentProcess(), handle,
+                                   GetCurrentProcess(), 0, 1,
+                                   DUPLICATE_SAME_ACCESS)
+
+
+        def _find_w9xpopen(self):
+            """Find and return absolut path to w9xpopen.exe"""
+            w9xpopen = os.path.join(os.path.dirname(GetModuleFileName(0)),
+                                    "w9xpopen.exe")
+            if not os.path.exists(w9xpopen):
+                # Eeek - file-not-found - possibly an embedding
+                # situation - see if we can locate it in sys.exec_prefix
+                w9xpopen = os.path.join(os.path.dirname(sys.exec_prefix),
+                                        "w9xpopen.exe")
+                if not os.path.exists(w9xpopen):
+                    raise RuntimeError("Cannot locate w9xpopen.exe, which is "
+                                       "needed for Popen to work with your "
+                                       "shell or platform.")
+            return w9xpopen
+
+
+        def _execute_child(self, args, executable, preexec_fn, close_fds,
+                           cwd, env, universal_newlines,
+                           startupinfo, creationflags, shell,
+                           p2cread, p2cwrite,
+                           c2pread, c2pwrite,
+                           errread, errwrite):
+            """Execute program (MS Windows version)"""
+
+            if not isinstance(args, types.StringTypes):
+                args = list2cmdline(args)
+
+            # Process startup details
+            default_startupinfo = STARTUPINFO()
+            if startupinfo == None:
+                startupinfo = default_startupinfo
+            if not None in (p2cread, c2pwrite, errwrite):
+                startupinfo.dwFlags |= STARTF_USESTDHANDLES
+                startupinfo.hStdInput = p2cread
+                startupinfo.hStdOutput = c2pwrite
+                startupinfo.hStdError = errwrite
+
+            if shell:
+                default_startupinfo.dwFlags |= STARTF_USESHOWWINDOW
+                default_startupinfo.wShowWindow = SW_HIDE
+                comspec = os.environ.get("COMSPEC", "cmd.exe")
+                args = comspec + " /c " + args
+                if (GetVersion() >= 0x80000000L or
+                        os.path.basename(comspec).lower() == "command.com"):
+                    # Win9x, or using command.com on NT. We need to
+                    # use the w9xpopen intermediate program. For more
+                    # information, see KB Q150956
+                    # (http://web.archive.org/web/20011105084002/http://support.microsoft.com/support/kb/articles/Q150/9/56.asp)
+                    w9xpopen = self._find_w9xpopen()
+                    args = '"%s" %s' % (w9xpopen, args)
+                    # Not passing CREATE_NEW_CONSOLE has been known to
+                    # cause random failures on win9x.  Specifically a
+                    # dialog: "Your program accessed mem currently in
+                    # use at xxx" and a hopeful warning about the
+                    # stability of your system.  Cost is Ctrl+C wont
+                    # kill children.
+                    creationflags |= CREATE_NEW_CONSOLE
+
+            # Start the process
+            try:
+                hp, ht, pid, tid = CreateProcess(executable, args,
+                                         # no special security
+                                         None, None,
+                                         # must inherit handles to pass std
+                                         # handles
+                                         1,
+                                         creationflags,
+                                         env,
+                                         cwd,
+                                         startupinfo)
+            except pywintypes.error, e:
+                # Translate pywintypes.error to WindowsError, which is
+                # a subclass of OSError.  FIXME: We should really
+                # translate errno using _sys_errlist (or simliar), but
+                # how can this be done from Python?
+                raise WindowsError(*e.args)
+
+            # Retain the process handle, but close the thread handle
+            self._handle = hp
+            self.pid = pid
+            ht.Close()
+
+            # Child is launched. Close the parent's copy of those pipe
+            # handles that only the child should have open.  You need
+            # to make sure that no handles to the write end of the
+            # output pipe are maintained in this process or else the
+            # pipe will not close when the child process exits and the
+            # ReadFile will hang.
+            if p2cread != None:
+                p2cread.Close()
+            if c2pwrite != None:
+                c2pwrite.Close()
+            if errwrite != None:
+                errwrite.Close()
+
+
+        def poll(self):
+            """Check if child process has terminated.  Returns returncode
+            attribute."""
+            if self.returncode == None:
+                if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
+                    self.returncode = GetExitCodeProcess(self._handle)
+                    _active.remove(self)
+            return self.returncode
+
+
+        def wait(self):
+            """Wait for child process to terminate.  Returns returncode
+            attribute."""
+            if self.returncode == None:
+                obj = WaitForSingleObject(self._handle, INFINITE)
+                self.returncode = GetExitCodeProcess(self._handle)
+                _active.remove(self)
+            return self.returncode
+
+
+        def _readerthread(self, fh, buffer):
+            buffer.append(fh.read())
+
+
+        def communicate(self, input=None):
+            """Interact with process: Send data to stdin.  Read data from
+            stdout and stderr, until end-of-file is reached.  Wait for
+            process to terminate.  The optional input argument should be a
+            string to be sent to the child process, or None, if no data
+            should be sent to the child.
+
+            communicate() returns a tuple (stdout, stderr)."""
+            stdout = None # Return
+            stderr = None # Return
+
+            if self.stdout:
+                stdout = []
+                stdout_thread = threading.Thread(target=self._readerthread,
+                                                 args=(self.stdout, stdout))
+                stdout_thread.setDaemon(True)
+                stdout_thread.start()
+            if self.stderr:
+                stderr = []
+                stderr_thread = threading.Thread(target=self._readerthread,
+                                                 args=(self.stderr, stderr))
+                stderr_thread.setDaemon(True)
+                stderr_thread.start()
+
+            if self.stdin:
+                if input != None:
+                    self.stdin.write(input)
+                self.stdin.close()
+
+            if self.stdout:
+                stdout_thread.join()
+            if self.stderr:
+                stderr_thread.join()
+
+            # All data exchanged.  Translate lists into strings.
+            if stdout != None:
+                stdout = stdout[0]
+            if stderr != None:
+                stderr = stderr[0]
+
+            # Translate newlines, if requested.  We cannot let the file
+            # object do the translation: It is based on stdio, which is
+            # impossible to combine with select (unless forcing no
+            # buffering).
+            if self.universal_newlines and hasattr(open, 'newlines'):
+                if stdout:
+                    stdout = self._translate_newlines(stdout)
+                if stderr:
+                    stderr = self._translate_newlines(stderr)
+
+            self.wait()
+            return (stdout, stderr)
+
+    else:
+        #
+        # POSIX methods
+        #
+        def _get_handles(self, stdin, stdout, stderr):
+            """Construct and return tupel with IO objects:
+            p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite
+            """
+            p2cread, p2cwrite = None, None
+            c2pread, c2pwrite = None, None
+            errread, errwrite = None, None
+
+            if stdin == None:
+                pass
+            elif stdin == PIPE:
+                p2cread, p2cwrite = os.pipe()
+            elif type(stdin) == types.IntType:
+                p2cread = stdin
+            else:
+                # Assuming file-like object
+                p2cread = stdin.fileno()
+
+            if stdout == None:
+                pass
+            elif stdout == PIPE:
+                c2pread, c2pwrite = os.pipe()
+            elif type(stdout) == types.IntType:
+                c2pwrite = stdout
+            else:
+                # Assuming file-like object
+                c2pwrite = stdout.fileno()
+
+            if stderr == None:
+                pass
+            elif stderr == PIPE:
+                errread, errwrite = os.pipe()
+            elif stderr == STDOUT:
+                errwrite = c2pwrite
+            elif type(stderr) == types.IntType:
+                errwrite = stderr
+            else:
+                # Assuming file-like object
+                errwrite = stderr.fileno()
+
+            return (p2cread, p2cwrite,
+                    c2pread, c2pwrite,
+                    errread, errwrite)
+
+
+        def _set_cloexec_flag(self, fd):
+            try:
+                cloexec_flag = fcntl.FD_CLOEXEC
+            except AttributeError:
+                cloexec_flag = 1
+
+            old = fcntl.fcntl(fd, fcntl.F_GETFD)
+            fcntl.fcntl(fd, fcntl.F_SETFD, old | cloexec_flag)
+
+
+        def _close_fds(self, but):
+            for i in range(3, MAXFD):
+                if i == but:
+                    continue
+                try:
+                    os.close(i)
+                except:
+                    pass
+
+
+        def _execute_child(self, args, executable, preexec_fn, close_fds,
+                           cwd, env, universal_newlines,
+                           startupinfo, creationflags, shell,
+                           p2cread, p2cwrite,
+                           c2pread, c2pwrite,
+                           errread, errwrite):
+            """Execute program (POSIX version)"""
+
+            if isinstance(args, types.StringTypes):
+                args = [args]
+
+            if shell:
+                args = ["/bin/sh", "-c"] + args
+
+            if executable == None:
+                executable = args[0]
+
+            # For transferring possible exec failure from child to parent
+            # The first char specifies the exception type: 0 means
+            # OSError, 1 means some other error.
+            errpipe_read, errpipe_write = os.pipe()
+            self._set_cloexec_flag(errpipe_write)
+
+            self.pid = os.fork()
+            if self.pid == 0:
+                # Child
+                try:
+                    # Close parent's pipe ends
+                    if p2cwrite:
+                        os.close(p2cwrite)
+                    if c2pread:
+                        os.close(c2pread)
+                    if errread:
+                        os.close(errread)
+                    os.close(errpipe_read)
+
+                    # Dup fds for child
+                    if p2cread:
+                        os.dup2(p2cread, 0)
+                    if c2pwrite:
+                        os.dup2(c2pwrite, 1)
+                    if errwrite:
+                        os.dup2(errwrite, 2)
+
+                    # Close pipe fds.  Make sure we doesn't close the same
+                    # fd more than once.
+                    if p2cread:
+                        os.close(p2cread)
+                    if c2pwrite and c2pwrite not in (p2cread,):
+                        os.close(c2pwrite)
+                    if errwrite and errwrite not in (p2cread, c2pwrite):
+                        os.close(errwrite)
+
+                    # Close all other fds, if asked for
+                    if close_fds:
+                        self._close_fds(but=errpipe_write)
+
+                    if cwd != None:
+                        os.chdir(cwd)
+
+                    if preexec_fn:
+                        apply(preexec_fn)
+
+                    if env == None:
+                        os.execvp(executable, args)
+                    else:
+                        os.execvpe(executable, args, env)
+
+                except:
+                    exc_type, exc_value, tb = sys.exc_info()
+                    # Save the traceback and attach it to the exception object
+                    exc_lines = traceback.format_exception(exc_type,
+                                                           exc_value,
+                                                           tb)
+                    exc_value.child_traceback = ''.join(exc_lines)
+                    os.write(errpipe_write, pickle.dumps(exc_value))
+
+                # This exitcode won't be reported to applications, so it
+                # really doesn't matter what we return.
+                os._exit(255)
+
+            # Parent
+            os.close(errpipe_write)
+            if p2cread and p2cwrite:
+                os.close(p2cread)
+            if c2pwrite and c2pread:
+                os.close(c2pwrite)
+            if errwrite and errread:
+                os.close(errwrite)
+
+            # Wait for exec to fail or succeed; possibly raising exception
+            data = os.read(errpipe_read, 1048576) # Exceptions limited to 1 MB
+            os.close(errpipe_read)
+            if data != "":
+                os.waitpid(self.pid, 0)
+                child_exception = pickle.loads(data)
+                raise child_exception
+
+
+        def _handle_exitstatus(self, sts):
+            if os.WIFSIGNALED(sts):
+                self.returncode = -os.WTERMSIG(sts)
+            elif os.WIFEXITED(sts):
+                self.returncode = os.WEXITSTATUS(sts)
+            else:
+                # Should never happen
+                raise RuntimeError("Unknown child exit status!")
+
+            _active.remove(self)
+
+
+        def poll(self):
+            """Check if child process has terminated.  Returns returncode
+            attribute."""
+            if self.returncode == None:
+                try:
+                    pid, sts = os.waitpid(self.pid, os.WNOHANG)
+                    if pid == self.pid:
+                        self._handle_exitstatus(sts)
+                except os.error:
+                    pass
+            return self.returncode
+
+
+        def wait(self):
+            """Wait for child process to terminate.  Returns returncode
+            attribute."""
+            if self.returncode == None:
+                pid, sts = os.waitpid(self.pid, 0)
+                self._handle_exitstatus(sts)
+            return self.returncode
+
+
+        def communicate(self, input=None):
+            """Interact with process: Send data to stdin.  Read data from
+            stdout and stderr, until end-of-file is reached.  Wait for
+            process to terminate.  The optional input argument should be a
+            string to be sent to the child process, or None, if no data
+            should be sent to the child.
+
+            communicate() returns a tuple (stdout, stderr)."""
+            read_set = []
+            write_set = []
+            stdout = None # Return
+            stderr = None # Return
+
+            if self.stdin:
+                # Flush stdio buffer.  This might block, if the user has
+                # been writing to .stdin in an uncontrolled fashion.
+                self.stdin.flush()
+                if input:
+                    write_set.append(self.stdin)
+                else:
+                    self.stdin.close()
+            if self.stdout:
+                read_set.append(self.stdout)
+                stdout = []
+            if self.stderr:
+                read_set.append(self.stderr)
+                stderr = []
+
+            while read_set or write_set:
+                rlist, wlist, xlist = select.select(read_set, write_set, [])
+
+                if self.stdin in wlist:
+                    # When select has indicated that the file is writable,
+                    # we can write up to PIPE_BUF bytes without risk
+                    # blocking.  POSIX defines PIPE_BUF >= 512
+                    bytes_written = os.write(self.stdin.fileno(), input[:512])
+                    input = input[bytes_written:]
+                    if not input:
+                        self.stdin.close()
+                        write_set.remove(self.stdin)
+
+                if self.stdout in rlist:
+                    data = os.read(self.stdout.fileno(), 1024)
+                    if data == "":
+                        self.stdout.close()
+                        read_set.remove(self.stdout)
+                    stdout.append(data)
+
+                if self.stderr in rlist:
+                    data = os.read(self.stderr.fileno(), 1024)
+                    if data == "":
+                        self.stderr.close()
+                        read_set.remove(self.stderr)
+                    stderr.append(data)
+
+            # All data exchanged.  Translate lists into strings.
+            if stdout != None:
+                stdout = ''.join(stdout)
+            if stderr != None:
+                stderr = ''.join(stderr)
+
+            # Translate newlines, if requested.  We cannot let the file
+            # object do the translation: It is based on stdio, which is
+            # impossible to combine with select (unless forcing no
+            # buffering).
+            if self.universal_newlines and hasattr(open, 'newlines'):
+                if stdout:
+                    stdout = self._translate_newlines(stdout)
+                if stderr:
+                    stderr = self._translate_newlines(stderr)
+
+            self.wait()
+            return (stdout, stderr)
+
+
+def _demo_posix():
+    #
+    # Example 1: Simple redirection: Get process list
+    #
+    plist = Popen(["ps"], stdout=PIPE).communicate()[0]
+    print "Process list:"
+    print plist
+
+    #
+    # Example 2: Change uid before executing child
+    #
+    if os.getuid() == 0:
+        p = Popen(["id"], preexec_fn=lambda: os.setuid(100))
+        p.wait()
+
+    #
+    # Example 3: Connecting several subprocesses
+    #
+    print "Looking for 'hda'..."
+    p1 = Popen(["dmesg"], stdout=PIPE)
+    p2 = Popen(["grep", "hda"], stdin=p1.stdout, stdout=PIPE)
+    print repr(p2.communicate()[0])
+
+    #
+    # Example 4: Catch execution error
+    #
+    print
+    print "Trying a weird file..."
+    try:
+        print Popen(["/this/path/does/not/exist"]).communicate()
+    except OSError, e:
+        if e.errno == errno.ENOENT:
+            print "The file didn't exist.  I thought so..."
+            print "Child traceback:"
+            print e.child_traceback
+        else:
+            print "Error", e.errno
+    else:
+        print >>sys.stderr, "Gosh.  No error."
+
+
+def _demo_windows():
+    #
+    # Example 1: Connecting several subprocesses
+    #
+    print "Looking for 'PROMPT' in set output..."
+    p1 = Popen("set", stdout=PIPE, shell=True)
+    p2 = Popen('find "PROMPT"', stdin=p1.stdout, stdout=PIPE)
+    print repr(p2.communicate()[0])
+
+    #
+    # Example 2: Simple execution of program
+    #
+    print "Executing calc..."
+    p = Popen("calc")
+    p.wait()
+
+
+if __name__ == "__main__":
+    if mswindows:
+        _demo_windows()
+    else:
+        _demo_posix()
diff --git a/depot_tools/release/win/python_24/Lib/sunau.py b/depot_tools/release/win/python_24/Lib/sunau.py
new file mode 100644
index 0000000..0a40295
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sunau.py
@@ -0,0 +1,474 @@
+"""Stuff to parse Sun and NeXT audio files.
+
+An audio file consists of a header followed by the data.  The structure
+of the header is as follows.
+
+        +---------------+
+        | magic word    |
+        +---------------+
+        | header size   |
+        +---------------+
+        | data size     |
+        +---------------+
+        | encoding      |
+        +---------------+
+        | sample rate   |
+        +---------------+
+        | # of channels |
+        +---------------+
+        | info          |
+        |               |
+        +---------------+
+
+The magic word consists of the 4 characters '.snd'.  Apart from the
+info field, all header fields are 4 bytes in size.  They are all
+32-bit unsigned integers encoded in big-endian byte order.
+
+The header size really gives the start of the data.
+The data size is the physical size of the data.  From the other
+parameters the number of frames can be calculated.
+The encoding gives the way in which audio samples are encoded.
+Possible values are listed below.
+The info field currently consists of an ASCII string giving a
+human-readable description of the audio file.  The info field is
+padded with NUL bytes to the header size.
+
+Usage.
+
+Reading audio files:
+        f = sunau.open(file, 'r')
+where file is either the name of a file or an open file pointer.
+The open file pointer must have methods read(), seek(), and close().
+When the setpos() and rewind() methods are not used, the seek()
+method is not  necessary.
+
+This returns an instance of a class with the following public methods:
+        getnchannels()  -- returns number of audio channels (1 for
+                           mono, 2 for stereo)
+        getsampwidth()  -- returns sample width in bytes
+        getframerate()  -- returns sampling frequency
+        getnframes()    -- returns number of audio frames
+        getcomptype()   -- returns compression type ('NONE' or 'ULAW')
+        getcompname()   -- returns human-readable version of
+                           compression type ('not compressed' matches 'NONE')
+        getparams()     -- returns a tuple consisting of all of the
+                           above in the above order
+        getmarkers()    -- returns None (for compatibility with the
+                           aifc module)
+        getmark(id)     -- raises an error since the mark does not
+                           exist (for compatibility with the aifc module)
+        readframes(n)   -- returns at most n frames of audio
+        rewind()        -- rewind to the beginning of the audio stream
+        setpos(pos)     -- seek to the specified position
+        tell()          -- return the current position
+        close()         -- close the instance (make it unusable)
+The position returned by tell() and the position given to setpos()
+are compatible and have nothing to do with the actual position in the
+file.
+The close() method is called automatically when the class instance
+is destroyed.
+
+Writing audio files:
+        f = sunau.open(file, 'w')
+where file is either the name of a file or an open file pointer.
+The open file pointer must have methods write(), tell(), seek(), and
+close().
+
+This returns an instance of a class with the following public methods:
+        setnchannels(n) -- set the number of channels
+        setsampwidth(n) -- set the sample width
+        setframerate(n) -- set the frame rate
+        setnframes(n)   -- set the number of frames
+        setcomptype(type, name)
+                        -- set the compression type and the
+                           human-readable compression type
+        setparams(tuple)-- set all parameters at once
+        tell()          -- return current position in output file
+        writeframesraw(data)
+                        -- write audio frames without pathing up the
+                           file header
+        writeframes(data)
+                        -- write audio frames and patch up the file header
+        close()         -- patch up the file header and close the
+                           output file
+You should set the parameters before the first writeframesraw or
+writeframes.  The total number of frames does not need to be set,
+but when it is set to the correct value, the header does not have to
+be patched up.
+It is best to first set all parameters, perhaps possibly the
+compression type, and then write audio frames using writeframesraw.
+When all frames have been written, either call writeframes('') or
+close() to patch up the sizes in the header.
+The close() method is called automatically when the class instance
+is destroyed.
+"""
+
+# from <multimedia/audio_filehdr.h>
+AUDIO_FILE_MAGIC = 0x2e736e64
+AUDIO_FILE_ENCODING_MULAW_8 = 1
+AUDIO_FILE_ENCODING_LINEAR_8 = 2
+AUDIO_FILE_ENCODING_LINEAR_16 = 3
+AUDIO_FILE_ENCODING_LINEAR_24 = 4
+AUDIO_FILE_ENCODING_LINEAR_32 = 5
+AUDIO_FILE_ENCODING_FLOAT = 6
+AUDIO_FILE_ENCODING_DOUBLE = 7
+AUDIO_FILE_ENCODING_ADPCM_G721 = 23
+AUDIO_FILE_ENCODING_ADPCM_G722 = 24
+AUDIO_FILE_ENCODING_ADPCM_G723_3 = 25
+AUDIO_FILE_ENCODING_ADPCM_G723_5 = 26
+AUDIO_FILE_ENCODING_ALAW_8 = 27
+
+# from <multimedia/audio_hdr.h>
+AUDIO_UNKNOWN_SIZE = 0xFFFFFFFFL        # ((unsigned)(~0))
+
+_simple_encodings = [AUDIO_FILE_ENCODING_MULAW_8,
+                     AUDIO_FILE_ENCODING_LINEAR_8,
+                     AUDIO_FILE_ENCODING_LINEAR_16,
+                     AUDIO_FILE_ENCODING_LINEAR_24,
+                     AUDIO_FILE_ENCODING_LINEAR_32,
+                     AUDIO_FILE_ENCODING_ALAW_8]
+
+class Error(Exception):
+    pass
+
+def _read_u32(file):
+    x = 0L
+    for i in range(4):
+        byte = file.read(1)
+        if byte == '':
+            raise EOFError
+        x = x*256 + ord(byte)
+    return x
+
+def _write_u32(file, x):
+    data = []
+    for i in range(4):
+        d, m = divmod(x, 256)
+        data.insert(0, m)
+        x = d
+    for i in range(4):
+        file.write(chr(int(data[i])))
+
+class Au_read:
+
+    def __init__(self, f):
+        if type(f) == type(''):
+            import __builtin__
+            f = __builtin__.open(f, 'rb')
+        self.initfp(f)
+
+    def __del__(self):
+        if self._file:
+            self.close()
+
+    def initfp(self, file):
+        self._file = file
+        self._soundpos = 0
+        magic = int(_read_u32(file))
+        if magic != AUDIO_FILE_MAGIC:
+            raise Error, 'bad magic number'
+        self._hdr_size = int(_read_u32(file))
+        if self._hdr_size < 24:
+            raise Error, 'header size too small'
+        if self._hdr_size > 100:
+            raise Error, 'header size ridiculously large'
+        self._data_size = _read_u32(file)
+        if self._data_size != AUDIO_UNKNOWN_SIZE:
+            self._data_size = int(self._data_size)
+        self._encoding = int(_read_u32(file))
+        if self._encoding not in _simple_encodings:
+            raise Error, 'encoding not (yet) supported'
+        if self._encoding in (AUDIO_FILE_ENCODING_MULAW_8,
+                  AUDIO_FILE_ENCODING_ALAW_8):
+            self._sampwidth = 2
+            self._framesize = 1
+        elif self._encoding == AUDIO_FILE_ENCODING_LINEAR_8:
+            self._framesize = self._sampwidth = 1
+        elif self._encoding == AUDIO_FILE_ENCODING_LINEAR_16:
+            self._framesize = self._sampwidth = 2
+        elif self._encoding == AUDIO_FILE_ENCODING_LINEAR_24:
+            self._framesize = self._sampwidth = 3
+        elif self._encoding == AUDIO_FILE_ENCODING_LINEAR_32:
+            self._framesize = self._sampwidth = 4
+        else:
+            raise Error, 'unknown encoding'
+        self._framerate = int(_read_u32(file))
+        self._nchannels = int(_read_u32(file))
+        self._framesize = self._framesize * self._nchannels
+        if self._hdr_size > 24:
+            self._info = file.read(self._hdr_size - 24)
+            for i in range(len(self._info)):
+                if self._info[i] == '\0':
+                    self._info = self._info[:i]
+                    break
+        else:
+            self._info = ''
+
+    def getfp(self):
+        return self._file
+
+    def getnchannels(self):
+        return self._nchannels
+
+    def getsampwidth(self):
+        return self._sampwidth
+
+    def getframerate(self):
+        return self._framerate
+
+    def getnframes(self):
+        if self._data_size == AUDIO_UNKNOWN_SIZE:
+            return AUDIO_UNKNOWN_SIZE
+        if self._encoding in _simple_encodings:
+            return self._data_size / self._framesize
+        return 0                # XXX--must do some arithmetic here
+
+    def getcomptype(self):
+        if self._encoding == AUDIO_FILE_ENCODING_MULAW_8:
+            return 'ULAW'
+        elif self._encoding == AUDIO_FILE_ENCODING_ALAW_8:
+            return 'ALAW'
+        else:
+            return 'NONE'
+
+    def getcompname(self):
+        if self._encoding == AUDIO_FILE_ENCODING_MULAW_8:
+            return 'CCITT G.711 u-law'
+        elif self._encoding == AUDIO_FILE_ENCODING_ALAW_8:
+            return 'CCITT G.711 A-law'
+        else:
+            return 'not compressed'
+
+    def getparams(self):
+        return self.getnchannels(), self.getsampwidth(), \
+                  self.getframerate(), self.getnframes(), \
+                  self.getcomptype(), self.getcompname()
+
+    def getmarkers(self):
+        return None
+
+    def getmark(self, id):
+        raise Error, 'no marks'
+
+    def readframes(self, nframes):
+        if self._encoding in _simple_encodings:
+            if nframes == AUDIO_UNKNOWN_SIZE:
+                data = self._file.read()
+            else:
+                data = self._file.read(nframes * self._framesize * self._nchannels)
+            if self._encoding == AUDIO_FILE_ENCODING_MULAW_8:
+                import audioop
+                data = audioop.ulaw2lin(data, self._sampwidth)
+            return data
+        return None             # XXX--not implemented yet
+
+    def rewind(self):
+        self._soundpos = 0
+        self._file.seek(self._hdr_size)
+
+    def tell(self):
+        return self._soundpos
+
+    def setpos(self, pos):
+        if pos < 0 or pos > self.getnframes():
+            raise Error, 'position not in range'
+        self._file.seek(pos * self._framesize + self._hdr_size)
+        self._soundpos = pos
+
+    def close(self):
+        self._file = None
+
+class Au_write:
+
+    def __init__(self, f):
+        if type(f) == type(''):
+            import __builtin__
+            f = __builtin__.open(f, 'wb')
+        self.initfp(f)
+
+    def __del__(self):
+        if self._file:
+            self.close()
+
+    def initfp(self, file):
+        self._file = file
+        self._framerate = 0
+        self._nchannels = 0
+        self._sampwidth = 0
+        self._framesize = 0
+        self._nframes = AUDIO_UNKNOWN_SIZE
+        self._nframeswritten = 0
+        self._datawritten = 0
+        self._datalength = 0
+        self._info = ''
+        self._comptype = 'ULAW' # default is U-law
+
+    def setnchannels(self, nchannels):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if nchannels not in (1, 2, 4):
+            raise Error, 'only 1, 2, or 4 channels supported'
+        self._nchannels = nchannels
+
+    def getnchannels(self):
+        if not self._nchannels:
+            raise Error, 'number of channels not set'
+        return self._nchannels
+
+    def setsampwidth(self, sampwidth):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if sampwidth not in (1, 2, 4):
+            raise Error, 'bad sample width'
+        self._sampwidth = sampwidth
+
+    def getsampwidth(self):
+        if not self._framerate:
+            raise Error, 'sample width not specified'
+        return self._sampwidth
+
+    def setframerate(self, framerate):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        self._framerate = framerate
+
+    def getframerate(self):
+        if not self._framerate:
+            raise Error, 'frame rate not set'
+        return self._framerate
+
+    def setnframes(self, nframes):
+        if self._nframeswritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if nframes < 0:
+            raise Error, '# of frames cannot be negative'
+        self._nframes = nframes
+
+    def getnframes(self):
+        return self._nframeswritten
+
+    def setcomptype(self, type, name):
+        if type in ('NONE', 'ULAW'):
+            self._comptype = type
+        else:
+            raise Error, 'unknown compression type'
+
+    def getcomptype(self):
+        return self._comptype
+
+    def getcompname(self):
+        if self._comptype == 'ULAW':
+            return 'CCITT G.711 u-law'
+        elif self._comptype == 'ALAW':
+            return 'CCITT G.711 A-law'
+        else:
+            return 'not compressed'
+
+    def setparams(self, (nchannels, sampwidth, framerate, nframes, comptype, compname)):
+        self.setnchannels(nchannels)
+        self.setsampwidth(sampwidth)
+        self.setframerate(framerate)
+        self.setnframes(nframes)
+        self.setcomptype(comptype, compname)
+
+    def getparams(self):
+        return self.getnchannels(), self.getsampwidth(), \
+                  self.getframerate(), self.getnframes(), \
+                  self.getcomptype(), self.getcompname()
+
+    def tell(self):
+        return self._nframeswritten
+
+    def writeframesraw(self, data):
+        self._ensure_header_written()
+        nframes = len(data) / self._framesize
+        if self._comptype == 'ULAW':
+            import audioop
+            data = audioop.lin2ulaw(data, self._sampwidth)
+        self._file.write(data)
+        self._nframeswritten = self._nframeswritten + nframes
+        self._datawritten = self._datawritten + len(data)
+
+    def writeframes(self, data):
+        self.writeframesraw(data)
+        if self._nframeswritten != self._nframes or \
+                  self._datalength != self._datawritten:
+            self._patchheader()
+
+    def close(self):
+        self._ensure_header_written()
+        if self._nframeswritten != self._nframes or \
+                  self._datalength != self._datawritten:
+            self._patchheader()
+        self._file.flush()
+        self._file = None
+
+    #
+    # private methods
+    #
+
+    def _ensure_header_written(self):
+        if not self._nframeswritten:
+            if not self._nchannels:
+                raise Error, '# of channels not specified'
+            if not self._sampwidth:
+                raise Error, 'sample width not specified'
+            if not self._framerate:
+                raise Error, 'frame rate not specified'
+            self._write_header()
+
+    def _write_header(self):
+        if self._comptype == 'NONE':
+            if self._sampwidth == 1:
+                encoding = AUDIO_FILE_ENCODING_LINEAR_8
+                self._framesize = 1
+            elif self._sampwidth == 2:
+                encoding = AUDIO_FILE_ENCODING_LINEAR_16
+                self._framesize = 2
+            elif self._sampwidth == 4:
+                encoding = AUDIO_FILE_ENCODING_LINEAR_32
+                self._framesize = 4
+            else:
+                raise Error, 'internal error'
+        elif self._comptype == 'ULAW':
+            encoding = AUDIO_FILE_ENCODING_MULAW_8
+            self._framesize = 1
+        else:
+            raise Error, 'internal error'
+        self._framesize = self._framesize * self._nchannels
+        _write_u32(self._file, AUDIO_FILE_MAGIC)
+        header_size = 25 + len(self._info)
+        header_size = (header_size + 7) & ~7
+        _write_u32(self._file, header_size)
+        if self._nframes == AUDIO_UNKNOWN_SIZE:
+            length = AUDIO_UNKNOWN_SIZE
+        else:
+            length = self._nframes * self._framesize
+        _write_u32(self._file, length)
+        self._datalength = length
+        _write_u32(self._file, encoding)
+        _write_u32(self._file, self._framerate)
+        _write_u32(self._file, self._nchannels)
+        self._file.write(self._info)
+        self._file.write('\0'*(header_size - len(self._info) - 24))
+
+    def _patchheader(self):
+        self._file.seek(8)
+        _write_u32(self._file, self._datawritten)
+        self._datalength = self._datawritten
+        self._file.seek(0, 2)
+
+def open(f, mode=None):
+    if mode is None:
+        if hasattr(f, 'mode'):
+            mode = f.mode
+        else:
+            mode = 'rb'
+    if mode in ('r', 'rb'):
+        return Au_read(f)
+    elif mode in ('w', 'wb'):
+        return Au_write(f)
+    else:
+        raise Error, "mode must be 'r', 'rb', 'w', or 'wb'"
+
+openfp = open
diff --git a/depot_tools/release/win/python_24/Lib/sunaudio.py b/depot_tools/release/win/python_24/Lib/sunaudio.py
new file mode 100644
index 0000000..3b0ee279
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/sunaudio.py
@@ -0,0 +1,44 @@
+"""Interpret sun audio headers."""
+
+MAGIC = '.snd'
+
+class error(Exception):
+    pass
+
+
+def get_long_be(s):
+    """Convert a 4-char value to integer."""
+    return (ord(s[0])<<24) | (ord(s[1])<<16) | (ord(s[2])<<8) | ord(s[3])
+
+
+def gethdr(fp):
+    """Read a sound header from an open file."""
+    if fp.read(4) != MAGIC:
+        raise error, 'gethdr: bad magic word'
+    hdr_size = get_long_be(fp.read(4))
+    data_size = get_long_be(fp.read(4))
+    encoding = get_long_be(fp.read(4))
+    sample_rate = get_long_be(fp.read(4))
+    channels = get_long_be(fp.read(4))
+    excess = hdr_size - 24
+    if excess < 0:
+        raise error, 'gethdr: bad hdr_size'
+    if excess > 0:
+        info = fp.read(excess)
+    else:
+        info = ''
+    return (data_size, encoding, sample_rate, channels, info)
+
+
+def printhdr(file):
+    """Read and print the sound header of a named file."""
+    hdr = gethdr(open(file, 'r'))
+    data_size, encoding, sample_rate, channels, info = hdr
+    while info[-1:] == '\0':
+        info = info[:-1]
+    print 'File name:  ', file
+    print 'Data size:  ', data_size
+    print 'Encoding:   ', encoding
+    print 'Sample rate:', sample_rate
+    print 'Channels:   ', channels
+    print 'Info:       ', repr(info)
diff --git a/depot_tools/release/win/python_24/Lib/symbol.py b/depot_tools/release/win/python_24/Lib/symbol.py
new file mode 100644
index 0000000..eaf5a25
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/symbol.py
@@ -0,0 +1,107 @@
+#! /usr/bin/env python
+
+"""Non-terminal symbols of Python grammar (from "graminit.h")."""
+
+#  This file is automatically generated; please don't muck it up!
+#
+#  To update the symbols in this file, 'cd' to the top directory of
+#  the python source tree after building the interpreter and run:
+#
+#    python Lib/symbol.py
+
+#--start constants--
+single_input = 256
+file_input = 257
+eval_input = 258
+decorator = 259
+decorators = 260
+funcdef = 261
+parameters = 262
+varargslist = 263
+fpdef = 264
+fplist = 265
+stmt = 266
+simple_stmt = 267
+small_stmt = 268
+expr_stmt = 269
+augassign = 270
+print_stmt = 271
+del_stmt = 272
+pass_stmt = 273
+flow_stmt = 274
+break_stmt = 275
+continue_stmt = 276
+return_stmt = 277
+yield_stmt = 278
+raise_stmt = 279
+import_stmt = 280
+import_name = 281
+import_from = 282
+import_as_name = 283
+dotted_as_name = 284
+import_as_names = 285
+dotted_as_names = 286
+dotted_name = 287
+global_stmt = 288
+exec_stmt = 289
+assert_stmt = 290
+compound_stmt = 291
+if_stmt = 292
+while_stmt = 293
+for_stmt = 294
+try_stmt = 295
+except_clause = 296
+suite = 297
+test = 298
+and_test = 299
+not_test = 300
+comparison = 301
+comp_op = 302
+expr = 303
+xor_expr = 304
+and_expr = 305
+shift_expr = 306
+arith_expr = 307
+term = 308
+factor = 309
+power = 310
+atom = 311
+listmaker = 312
+testlist_gexp = 313
+lambdef = 314
+trailer = 315
+subscriptlist = 316
+subscript = 317
+sliceop = 318
+exprlist = 319
+testlist = 320
+testlist_safe = 321
+dictmaker = 322
+classdef = 323
+arglist = 324
+argument = 325
+list_iter = 326
+list_for = 327
+list_if = 328
+gen_iter = 329
+gen_for = 330
+gen_if = 331
+testlist1 = 332
+encoding_decl = 333
+#--end constants--
+
+sym_name = {}
+for _name, _value in globals().items():
+    if type(_value) is type(0):
+        sym_name[_value] = _name
+
+
+def main():
+    import sys
+    import token
+    if len(sys.argv) == 1:
+        sys.argv = sys.argv + ["Include/graminit.h", "Lib/symbol.py"]
+    token.main()
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/symtable.py b/depot_tools/release/win/python_24/Lib/symtable.py
new file mode 100644
index 0000000..38042ae
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/symtable.py
@@ -0,0 +1,249 @@
+"""Interface to the compiler's internal symbol tables"""
+
+import _symtable
+from _symtable import USE, DEF_GLOBAL, DEF_LOCAL, DEF_PARAM, \
+     DEF_STAR, DEF_DOUBLESTAR, DEF_INTUPLE, DEF_FREE, \
+     DEF_FREE_GLOBAL, DEF_FREE_CLASS, DEF_IMPORT, DEF_BOUND, \
+     OPT_IMPORT_STAR, OPT_EXEC, OPT_BARE_EXEC
+
+import weakref
+
+__all__ = ["symtable", "SymbolTable", "newSymbolTable", "Class",
+           "Function", "Symbol"]
+
+def symtable(code, filename, compile_type):
+    raw = _symtable.symtable(code, filename, compile_type)
+    return newSymbolTable(raw[0], filename)
+
+class SymbolTableFactory:
+    def __init__(self):
+        self.__memo = weakref.WeakValueDictionary()
+
+    def new(self, table, filename):
+        if table.type == _symtable.TYPE_FUNCTION:
+            return Function(table, filename)
+        if table.type == _symtable.TYPE_CLASS:
+            return Class(table, filename)
+        return SymbolTable(table, filename)
+
+    def __call__(self, table, filename):
+        key = table, filename
+        obj = self.__memo.get(key, None)
+        if obj is None:
+            obj = self.__memo[key] = self.new(table, filename)
+        return obj
+
+newSymbolTable = SymbolTableFactory()
+
+def is_free(flags):
+    if (flags & (USE | DEF_FREE)) \
+       and (flags & (DEF_LOCAL | DEF_PARAM | DEF_GLOBAL)):
+        return True
+    if flags & DEF_FREE_CLASS:
+        return True
+    return False
+
+class SymbolTable:
+    def __init__(self, raw_table, filename):
+        self._table = raw_table
+        self._filename = filename
+        self._symbols = {}
+
+    def __repr__(self):
+        if self.__class__ == SymbolTable:
+            kind = ""
+        else:
+            kind = "%s " % self.__class__.__name__
+
+        if self._table.name == "global":
+            return "<%sSymbolTable for module %s>" % (kind, self._filename)
+        else:
+            return "<%sSymbolTable for %s in %s>" % (kind, self._table.name,
+                                                     self._filename)
+
+    def get_type(self):
+        if self._table.type == _symtable.TYPE_MODULE:
+            return "module"
+        if self._table.type == _symtable.TYPE_FUNCTION:
+            return "function"
+        if self._table.type == _symtable.TYPE_CLASS:
+            return "class"
+        assert self._table.type in (1, 2, 3), \
+               "unexpected type: %s" % self._table.type
+
+    def get_id(self):
+        return self._table.id
+
+    def get_name(self):
+        return self._table.name
+
+    def get_lineno(self):
+        return self._table.lineno
+
+    def is_optimized(self):
+        return bool(self._table.type == _symtable.TYPE_FUNCTION
+                    and not self._table.optimized)
+
+    def is_nested(self):
+        return bool(self._table.nested)
+
+    def has_children(self):
+        return bool(self._table.children)
+
+    def has_exec(self):
+        """Return true if the scope uses exec"""
+        return bool(self._table.optimized & (OPT_EXEC | OPT_BARE_EXEC))
+
+    def has_import_star(self):
+        """Return true if the scope uses import *"""
+        return bool(self._table.optimized & OPT_IMPORT_STAR)
+
+    def get_identifiers(self):
+        return self._table.symbols.keys()
+
+    def lookup(self, name):
+        sym = self._symbols.get(name)
+        if sym is None:
+            flags = self._table.symbols[name]
+            namespaces = self.__check_children(name)
+            sym = self._symbols[name] = Symbol(name, flags, namespaces)
+        return sym
+
+    def get_symbols(self):
+        return [self.lookup(ident) for ident in self.get_identifiers()]
+
+    def __check_children(self, name):
+        return [newSymbolTable(st, self._filename)
+                for st in self._table.children
+                if st.name == name]
+
+    def get_children(self):
+        return [newSymbolTable(st, self._filename)
+                for st in self._table.children]
+
+class Function(SymbolTable):
+
+    # Default values for instance variables
+    __params = None
+    __locals = None
+    __frees = None
+    __globals = None
+
+    def __idents_matching(self, test_func):
+        return tuple([ident for ident in self.get_identifiers()
+                      if test_func(self._table.symbols[ident])])
+
+    def get_parameters(self):
+        if self.__params is None:
+            self.__params = self.__idents_matching(lambda x:x & DEF_PARAM)
+        return self.__params
+
+    def get_locals(self):
+        if self.__locals is None:
+            self.__locals = self.__idents_matching(lambda x:x & DEF_BOUND)
+        return self.__locals
+
+    def get_globals(self):
+        if self.__globals is None:
+            glob = DEF_GLOBAL | DEF_FREE_GLOBAL
+            self.__globals = self.__idents_matching(lambda x:x & glob)
+        return self.__globals
+
+    def get_frees(self):
+        if self.__frees is None:
+            self.__frees = self.__idents_matching(is_free)
+        return self.__frees
+
+class Class(SymbolTable):
+
+    __methods = None
+
+    def get_methods(self):
+        if self.__methods is None:
+            d = {}
+            for st in self._table.children:
+                d[st.name] = 1
+            self.__methods = tuple(d)
+        return self.__methods
+
+class Symbol:
+    def __init__(self, name, flags, namespaces=None):
+        self.__name = name
+        self.__flags = flags
+        self.__namespaces = namespaces or ()
+
+    def __repr__(self):
+        return "<symbol '%s'>" % self.__name
+
+    def get_name(self):
+        return self.__name
+
+    def is_referenced(self):
+        return bool(self.__flags & _symtable.USE)
+
+    def is_parameter(self):
+        return bool(self.__flags & DEF_PARAM)
+
+    def is_global(self):
+        return bool((self.__flags & DEF_GLOBAL)
+                    or (self.__flags & DEF_FREE_GLOBAL))
+
+    def is_vararg(self):
+        return bool(self.__flags & DEF_STAR)
+
+    def is_keywordarg(self):
+        return bool(self.__flags & DEF_DOUBLESTAR)
+
+    def is_local(self):
+        return bool(self.__flags & DEF_BOUND)
+
+    def is_free(self):
+        if (self.__flags & (USE | DEF_FREE)) \
+            and (self.__flags & (DEF_LOCAL | DEF_PARAM | DEF_GLOBAL)):
+            return True
+        if self.__flags & DEF_FREE_CLASS:
+            return True
+        return False
+
+    def is_imported(self):
+        return bool(self.__flags & DEF_IMPORT)
+
+    def is_assigned(self):
+        return bool(self.__flags & DEF_LOCAL)
+
+    def is_in_tuple(self):
+        return bool(self.__flags & DEF_INTUPLE)
+
+    def is_namespace(self):
+        """Returns true if name binding introduces new namespace.
+
+        If the name is used as the target of a function or class
+        statement, this will be true.
+
+        Note that a single name can be bound to multiple objects.  If
+        is_namespace() is true, the name may also be bound to other
+        objects, like an int or list, that does not introduce a new
+        namespace.
+        """
+        return bool(self.__namespaces)
+
+    def get_namespaces(self):
+        """Return a list of namespaces bound to this name"""
+        return self.__namespaces
+
+    def get_namespace(self):
+        """Returns the single namespace bound to this name.
+
+        Raises ValueError if the name is bound to multiple namespaces.
+        """
+        if len(self.__namespaces) != 1:
+            raise ValueError, "name is bound to multiple namespaces"
+        return self.__namespaces[0]
+
+if __name__ == "__main__":
+    import os, sys
+    src = open(sys.argv[0]).read()
+    mod = symtable(src, os.path.split(sys.argv[0])[1], "exec")
+    for ident in mod.get_identifiers():
+        info = mod.lookup(ident)
+        print info, info.is_local(), info.is_namespace()
diff --git a/depot_tools/release/win/python_24/Lib/tabnanny.py b/depot_tools/release/win/python_24/Lib/tabnanny.py
new file mode 100644
index 0000000..f38a79f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/tabnanny.py
@@ -0,0 +1,325 @@
+#! /usr/bin/env python
+
+"""The Tab Nanny despises ambiguous indentation.  She knows no mercy.
+
+tabnanny -- Detection of ambiguous indentation
+
+For the time being this module is intended to be called as a script.
+However it is possible to import it into an IDE and use the function
+check() described below.
+
+Warning: The API provided by this module is likely to change in future
+releases; such changes may not be backward compatible.
+"""
+
+# Released to the public domain, by Tim Peters, 15 April 1998.
+
+# XXX Note: this is now a standard library module.
+# XXX The API needs to undergo changes however; the current code is too
+# XXX script-like.  This will be addressed later.
+
+__version__ = "6"
+
+import os
+import sys
+import getopt
+import tokenize
+if not hasattr(tokenize, 'NL'):
+    raise ValueError("tokenize.NL doesn't exist -- tokenize module too old")
+
+__all__ = ["check", "NannyNag", "process_tokens"]
+
+verbose = 0
+filename_only = 0
+
+def errprint(*args):
+    sep = ""
+    for arg in args:
+        sys.stderr.write(sep + str(arg))
+        sep = " "
+    sys.stderr.write("\n")
+
+def main():
+    global verbose, filename_only
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "qv")
+    except getopt.error, msg:
+        errprint(msg)
+        return
+    for o, a in opts:
+        if o == '-q':
+            filename_only = filename_only + 1
+        if o == '-v':
+            verbose = verbose + 1
+    if not args:
+        errprint("Usage:", sys.argv[0], "[-v] file_or_directory ...")
+        return
+    for arg in args:
+        check(arg)
+
+class NannyNag(Exception):
+    """
+    Raised by tokeneater() if detecting an ambiguous indent.
+    Captured and handled in check().
+    """
+    def __init__(self, lineno, msg, line):
+        self.lineno, self.msg, self.line = lineno, msg, line
+    def get_lineno(self):
+        return self.lineno
+    def get_msg(self):
+        return self.msg
+    def get_line(self):
+        return self.line
+
+def check(file):
+    """check(file_or_dir)
+
+    If file_or_dir is a directory and not a symbolic link, then recursively
+    descend the directory tree named by file_or_dir, checking all .py files
+    along the way. If file_or_dir is an ordinary Python source file, it is
+    checked for whitespace related problems. The diagnostic messages are
+    written to standard output using the print statement.
+    """
+
+    if os.path.isdir(file) and not os.path.islink(file):
+        if verbose:
+            print "%r: listing directory" % (file,)
+        names = os.listdir(file)
+        for name in names:
+            fullname = os.path.join(file, name)
+            if (os.path.isdir(fullname) and
+                not os.path.islink(fullname) or
+                os.path.normcase(name[-3:]) == ".py"):
+                check(fullname)
+        return
+
+    try:
+        f = open(file)
+    except IOError, msg:
+        errprint("%r: I/O Error: %s" % (file, msg))
+        return
+
+    if verbose > 1:
+        print "checking %r ..." % file
+
+    try:
+        process_tokens(tokenize.generate_tokens(f.readline))
+
+    except tokenize.TokenError, msg:
+        errprint("%r: Token Error: %s" % (file, msg))
+        return
+
+    except NannyNag, nag:
+        badline = nag.get_lineno()
+        line = nag.get_line()
+        if verbose:
+            print "%r: *** Line %d: trouble in tab city! ***" % (file, badline)
+            print "offending line: %r" % (line,)
+            print nag.get_msg()
+        else:
+            if ' ' in file: file = '"' + file + '"'
+            if filename_only: print file
+            else: print file, badline, repr(line)
+        return
+
+    if verbose:
+        print "%r: Clean bill of health." % (file,)
+
+class Whitespace:
+    # the characters used for space and tab
+    S, T = ' \t'
+
+    # members:
+    #   raw
+    #       the original string
+    #   n
+    #       the number of leading whitespace characters in raw
+    #   nt
+    #       the number of tabs in raw[:n]
+    #   norm
+    #       the normal form as a pair (count, trailing), where:
+    #       count
+    #           a tuple such that raw[:n] contains count[i]
+    #           instances of S * i + T
+    #       trailing
+    #           the number of trailing spaces in raw[:n]
+    #       It's A Theorem that m.indent_level(t) ==
+    #       n.indent_level(t) for all t >= 1 iff m.norm == n.norm.
+    #   is_simple
+    #       true iff raw[:n] is of the form (T*)(S*)
+
+    def __init__(self, ws):
+        self.raw  = ws
+        S, T = Whitespace.S, Whitespace.T
+        count = []
+        b = n = nt = 0
+        for ch in self.raw:
+            if ch == S:
+                n = n + 1
+                b = b + 1
+            elif ch == T:
+                n = n + 1
+                nt = nt + 1
+                if b >= len(count):
+                    count = count + [0] * (b - len(count) + 1)
+                count[b] = count[b] + 1
+                b = 0
+            else:
+                break
+        self.n    = n
+        self.nt   = nt
+        self.norm = tuple(count), b
+        self.is_simple = len(count) <= 1
+
+    # return length of longest contiguous run of spaces (whether or not
+    # preceding a tab)
+    def longest_run_of_spaces(self):
+        count, trailing = self.norm
+        return max(len(count)-1, trailing)
+
+    def indent_level(self, tabsize):
+        # count, il = self.norm
+        # for i in range(len(count)):
+        #    if count[i]:
+        #        il = il + (i/tabsize + 1)*tabsize * count[i]
+        # return il
+
+        # quicker:
+        # il = trailing + sum (i/ts + 1)*ts*count[i] =
+        # trailing + ts * sum (i/ts + 1)*count[i] =
+        # trailing + ts * sum i/ts*count[i] + count[i] =
+        # trailing + ts * [(sum i/ts*count[i]) + (sum count[i])] =
+        # trailing + ts * [(sum i/ts*count[i]) + num_tabs]
+        # and note that i/ts*count[i] is 0 when i < ts
+
+        count, trailing = self.norm
+        il = 0
+        for i in range(tabsize, len(count)):
+            il = il + i/tabsize * count[i]
+        return trailing + tabsize * (il + self.nt)
+
+    # return true iff self.indent_level(t) == other.indent_level(t)
+    # for all t >= 1
+    def equal(self, other):
+        return self.norm == other.norm
+
+    # return a list of tuples (ts, i1, i2) such that
+    # i1 == self.indent_level(ts) != other.indent_level(ts) == i2.
+    # Intended to be used after not self.equal(other) is known, in which
+    # case it will return at least one witnessing tab size.
+    def not_equal_witness(self, other):
+        n = max(self.longest_run_of_spaces(),
+                other.longest_run_of_spaces()) + 1
+        a = []
+        for ts in range(1, n+1):
+            if self.indent_level(ts) != other.indent_level(ts):
+                a.append( (ts,
+                           self.indent_level(ts),
+                           other.indent_level(ts)) )
+        return a
+
+    # Return True iff self.indent_level(t) < other.indent_level(t)
+    # for all t >= 1.
+    # The algorithm is due to Vincent Broman.
+    # Easy to prove it's correct.
+    # XXXpost that.
+    # Trivial to prove n is sharp (consider T vs ST).
+    # Unknown whether there's a faster general way.  I suspected so at
+    # first, but no longer.
+    # For the special (but common!) case where M and N are both of the
+    # form (T*)(S*), M.less(N) iff M.len() < N.len() and
+    # M.num_tabs() <= N.num_tabs(). Proof is easy but kinda long-winded.
+    # XXXwrite that up.
+    # Note that M is of the form (T*)(S*) iff len(M.norm[0]) <= 1.
+    def less(self, other):
+        if self.n >= other.n:
+            return False
+        if self.is_simple and other.is_simple:
+            return self.nt <= other.nt
+        n = max(self.longest_run_of_spaces(),
+                other.longest_run_of_spaces()) + 1
+        # the self.n >= other.n test already did it for ts=1
+        for ts in range(2, n+1):
+            if self.indent_level(ts) >= other.indent_level(ts):
+                return False
+        return True
+
+    # return a list of tuples (ts, i1, i2) such that
+    # i1 == self.indent_level(ts) >= other.indent_level(ts) == i2.
+    # Intended to be used after not self.less(other) is known, in which
+    # case it will return at least one witnessing tab size.
+    def not_less_witness(self, other):
+        n = max(self.longest_run_of_spaces(),
+                other.longest_run_of_spaces()) + 1
+        a = []
+        for ts in range(1, n+1):
+            if self.indent_level(ts) >= other.indent_level(ts):
+                a.append( (ts,
+                           self.indent_level(ts),
+                           other.indent_level(ts)) )
+        return a
+
+def format_witnesses(w):
+    firsts = map(lambda tup: str(tup[0]), w)
+    prefix = "at tab size"
+    if len(w) > 1:
+        prefix = prefix + "s"
+    return prefix + " " + ', '.join(firsts)
+
+def process_tokens(tokens):
+    INDENT = tokenize.INDENT
+    DEDENT = tokenize.DEDENT
+    NEWLINE = tokenize.NEWLINE
+    JUNK = tokenize.COMMENT, tokenize.NL
+    indents = [Whitespace("")]
+    check_equal = 0
+
+    for (type, token, start, end, line) in tokens:
+        if type == NEWLINE:
+            # a program statement, or ENDMARKER, will eventually follow,
+            # after some (possibly empty) run of tokens of the form
+            #     (NL | COMMENT)* (INDENT | DEDENT+)?
+            # If an INDENT appears, setting check_equal is wrong, and will
+            # be undone when we see the INDENT.
+            check_equal = 1
+
+        elif type == INDENT:
+            check_equal = 0
+            thisguy = Whitespace(token)
+            if not indents[-1].less(thisguy):
+                witness = indents[-1].not_less_witness(thisguy)
+                msg = "indent not greater e.g. " + format_witnesses(witness)
+                raise NannyNag(start[0], msg, line)
+            indents.append(thisguy)
+
+        elif type == DEDENT:
+            # there's nothing we need to check here!  what's important is
+            # that when the run of DEDENTs ends, the indentation of the
+            # program statement (or ENDMARKER) that triggered the run is
+            # equal to what's left at the top of the indents stack
+
+            # Ouch!  This assert triggers if the last line of the source
+            # is indented *and* lacks a newline -- then DEDENTs pop out
+            # of thin air.
+            # assert check_equal  # else no earlier NEWLINE, or an earlier INDENT
+            check_equal = 1
+
+            del indents[-1]
+
+        elif check_equal and type not in JUNK:
+            # this is the first "real token" following a NEWLINE, so it
+            # must be the first token of the next program statement, or an
+            # ENDMARKER; the "line" argument exposes the leading whitespace
+            # for this statement; in the case of ENDMARKER, line is an empty
+            # string, so will properly match the empty string with which the
+            # "indents" stack was seeded
+            check_equal = 0
+            thisguy = Whitespace(line)
+            if not indents[-1].equal(thisguy):
+                witness = indents[-1].not_equal_witness(thisguy)
+                msg = "indent not equal e.g. " + format_witnesses(witness)
+                raise NannyNag(start[0], msg, line)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/tarfile.py b/depot_tools/release/win/python_24/Lib/tarfile.py
new file mode 100644
index 0000000..2d89bf9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/tarfile.py
@@ -0,0 +1,1974 @@
+#!/usr/bin/env python
+# -*- coding: iso-8859-1 -*-
+#-------------------------------------------------------------------
+# tarfile.py
+#-------------------------------------------------------------------
+# Copyright (C) 2002 Lars Gustäbel <lars@gustaebel.de>
+# All rights reserved.
+#
+# Permission  is  hereby granted,  free  of charge,  to  any person
+# obtaining a  copy of  this software  and associated documentation
+# files  (the  "Software"),  to   deal  in  the  Software   without
+# restriction,  including  without limitation  the  rights to  use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies  of  the  Software,  and to  permit  persons  to  whom the
+# Software  is  furnished  to  do  so,  subject  to  the  following
+# conditions:
+#
+# The above copyright  notice and this  permission notice shall  be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS  IS", WITHOUT WARRANTY OF ANY  KIND,
+# EXPRESS OR IMPLIED, INCLUDING  BUT NOT LIMITED TO  THE WARRANTIES
+# OF  MERCHANTABILITY,  FITNESS   FOR  A  PARTICULAR   PURPOSE  AND
+# NONINFRINGEMENT.  IN  NO  EVENT SHALL  THE  AUTHORS  OR COPYRIGHT
+# HOLDERS  BE LIABLE  FOR ANY  CLAIM, DAMAGES  OR OTHER  LIABILITY,
+# WHETHER  IN AN  ACTION OF  CONTRACT, TORT  OR OTHERWISE,  ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+"""Read from and write to tar format archives.
+"""
+
+__version__ = "$Revision: 1.21.2.1 $"
+# $Source: /cvsroot/python/python/dist/src/Lib/tarfile.py,v $
+
+version     = "0.6.4"
+__author__  = "Lars Gustäbel (lars@gustaebel.de)"
+__date__    = "$Date: 2005/03/03 23:15:03 $"
+__cvsid__   = "$Id: tarfile.py,v 1.21.2.1 2005/03/03 23:15:03 loewis Exp $"
+__credits__ = "Gustavo Niemeyer, Niels Gustäbel, Richard Townsend."
+
+#---------
+# Imports
+#---------
+import sys
+import os
+import shutil
+import stat
+import errno
+import time
+import struct
+
+if sys.platform == 'mac':
+    # This module needs work for MacOS9, especially in the area of pathname
+    # handling. In many places it is assumed a simple substitution of / by the
+    # local os.path.sep is good enough to convert pathnames, but this does not
+    # work with the mac rooted:path:name versus :nonrooted:path:name syntax
+    raise ImportError, "tarfile does not work for platform==mac"
+
+try:
+    import grp, pwd
+except ImportError:
+    grp = pwd = None
+
+# from tarfile import *
+__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
+
+#---------------------------------------------------------
+# tar constants
+#---------------------------------------------------------
+NUL        = "\0"               # the null character
+BLOCKSIZE  = 512                # length of processing blocks
+RECORDSIZE = BLOCKSIZE * 20     # length of records
+MAGIC      = "ustar"            # magic tar string
+VERSION    = "00"               # version number
+
+LENGTH_NAME    = 100            # maximum length of a filename
+LENGTH_LINK    = 100            # maximum length of a linkname
+LENGTH_PREFIX  = 155            # maximum length of the prefix field
+MAXSIZE_MEMBER = 077777777777L  # maximum size of a file (11 octal digits)
+
+REGTYPE  = "0"                  # regular file
+AREGTYPE = "\0"                 # regular file
+LNKTYPE  = "1"                  # link (inside tarfile)
+SYMTYPE  = "2"                  # symbolic link
+CHRTYPE  = "3"                  # character special device
+BLKTYPE  = "4"                  # block special device
+DIRTYPE  = "5"                  # directory
+FIFOTYPE = "6"                  # fifo special device
+CONTTYPE = "7"                  # contiguous file
+
+GNUTYPE_LONGNAME = "L"          # GNU tar extension for longnames
+GNUTYPE_LONGLINK = "K"          # GNU tar extension for longlink
+GNUTYPE_SPARSE   = "S"          # GNU tar extension for sparse file
+
+#---------------------------------------------------------
+# tarfile constants
+#---------------------------------------------------------
+SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,  # file types that tarfile
+                   SYMTYPE, DIRTYPE, FIFOTYPE,  # can cope with.
+                   CONTTYPE, CHRTYPE, BLKTYPE,
+                   GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
+                   GNUTYPE_SPARSE)
+
+REGULAR_TYPES = (REGTYPE, AREGTYPE,             # file types that somehow
+                 CONTTYPE, GNUTYPE_SPARSE)      # represent regular files
+
+#---------------------------------------------------------
+# Bits used in the mode field, values in octal.
+#---------------------------------------------------------
+S_IFLNK = 0120000        # symbolic link
+S_IFREG = 0100000        # regular file
+S_IFBLK = 0060000        # block device
+S_IFDIR = 0040000        # directory
+S_IFCHR = 0020000        # character device
+S_IFIFO = 0010000        # fifo
+
+TSUID   = 04000          # set UID on execution
+TSGID   = 02000          # set GID on execution
+TSVTX   = 01000          # reserved
+
+TUREAD  = 0400           # read by owner
+TUWRITE = 0200           # write by owner
+TUEXEC  = 0100           # execute/search by owner
+TGREAD  = 0040           # read by group
+TGWRITE = 0020           # write by group
+TGEXEC  = 0010           # execute/search by group
+TOREAD  = 0004           # read by other
+TOWRITE = 0002           # write by other
+TOEXEC  = 0001           # execute/search by other
+
+#---------------------------------------------------------
+# Some useful functions
+#---------------------------------------------------------
+def nts(s):
+    """Convert a null-terminated string buffer to a python string.
+    """
+    return s.rstrip(NUL)
+
+def calc_chksum(buf):
+    """Calculate the checksum for a member's header. It's a simple addition
+       of all bytes, treating the chksum field as if filled with spaces.
+       buf is a 512 byte long string buffer which holds the header.
+    """
+    chk = 256                           # chksum field is treated as blanks,
+                                        # so the initial value is 8 * ord(" ")
+    for c in buf[:148]: chk += ord(c)   # sum up all bytes before chksum
+    for c in buf[156:]: chk += ord(c)   # sum up all bytes after chksum
+    return chk
+
+def copyfileobj(src, dst, length=None):
+    """Copy length bytes from fileobj src to fileobj dst.
+       If length is None, copy the entire content.
+    """
+    if length == 0:
+        return
+    if length is None:
+        shutil.copyfileobj(src, dst)
+        return
+
+    BUFSIZE = 16 * 1024
+    blocks, remainder = divmod(length, BUFSIZE)
+    for b in xrange(blocks):
+        buf = src.read(BUFSIZE)
+        if len(buf) < BUFSIZE:
+            raise IOError, "end of file reached"
+        dst.write(buf)
+
+    if remainder != 0:
+        buf = src.read(remainder)
+        if len(buf) < remainder:
+            raise IOError, "end of file reached"
+        dst.write(buf)
+    return
+
+filemode_table = (
+    ((S_IFLNK,      "l"),
+     (S_IFREG,      "-"),
+     (S_IFBLK,      "b"),
+     (S_IFDIR,      "d"),
+     (S_IFCHR,      "c"),
+     (S_IFIFO,      "p")),
+
+    ((TUREAD,       "r"),),
+    ((TUWRITE,      "w"),),
+    ((TUEXEC|TSUID, "s"),
+     (TSUID,        "S"),
+     (TUEXEC,       "x")),
+
+    ((TGREAD,       "r"),),
+    ((TGWRITE,      "w"),),
+    ((TGEXEC|TSGID, "s"),
+     (TSGID,        "S"),
+     (TGEXEC,       "x")),
+
+    ((TOREAD,       "r"),),
+    ((TOWRITE,      "w"),),
+    ((TOEXEC|TSVTX, "t"),
+     (TSVTX,        "T"),
+     (TOEXEC,       "x"))
+)
+
+def filemode(mode):
+    """Convert a file's mode to a string of the form
+       -rwxrwxrwx.
+       Used by TarFile.list()
+    """
+    perm = []
+    for table in filemode_table:
+        for bit, char in table:
+            if mode & bit == bit:
+                perm.append(char)
+                break
+        else:
+            perm.append("-")
+    return "".join(perm)
+
+if os.sep != "/":
+    normpath = lambda path: os.path.normpath(path).replace(os.sep, "/")
+else:
+    normpath = os.path.normpath
+
+class TarError(Exception):
+    """Base exception."""
+    pass
+class ExtractError(TarError):
+    """General exception for extract errors."""
+    pass
+class ReadError(TarError):
+    """Exception for unreadble tar archives."""
+    pass
+class CompressionError(TarError):
+    """Exception for unavailable compression methods."""
+    pass
+class StreamError(TarError):
+    """Exception for unsupported operations on stream-like TarFiles."""
+    pass
+
+#---------------------------
+# internal stream interface
+#---------------------------
+class _LowLevelFile:
+    """Low-level file object. Supports reading and writing.
+       It is used instead of a regular file object for streaming
+       access.
+    """
+
+    def __init__(self, name, mode):
+        mode = {
+            "r": os.O_RDONLY,
+            "w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
+        }[mode]
+        if hasattr(os, "O_BINARY"):
+            mode |= os.O_BINARY
+        self.fd = os.open(name, mode)
+
+    def close(self):
+        os.close(self.fd)
+
+    def read(self, size):
+        return os.read(self.fd, size)
+
+    def write(self, s):
+        os.write(self.fd, s)
+
+class _Stream:
+    """Class that serves as an adapter between TarFile and
+       a stream-like object.  The stream-like object only
+       needs to have a read() or write() method and is accessed
+       blockwise.  Use of gzip or bzip2 compression is possible.
+       A stream-like object could be for example: sys.stdin,
+       sys.stdout, a socket, a tape device etc.
+
+       _Stream is intended to be used only internally.
+    """
+
+    def __init__(self, name, mode, type, fileobj, bufsize):
+        """Construct a _Stream object.
+        """
+        self._extfileobj = True
+        if fileobj is None:
+            fileobj = _LowLevelFile(name, mode)
+            self._extfileobj = False
+
+        self.name    = name or ""
+        self.mode    = mode
+        self.type    = type
+        self.fileobj = fileobj
+        self.bufsize = bufsize
+        self.buf     = ""
+        self.pos     = 0L
+        self.closed  = False
+
+        if type == "gz":
+            try:
+                import zlib
+            except ImportError:
+                raise CompressionError, "zlib module is not available"
+            self.zlib = zlib
+            self.crc = zlib.crc32("")
+            if mode == "r":
+                self._init_read_gz()
+            else:
+                self._init_write_gz()
+
+        if type == "bz2":
+            try:
+                import bz2
+            except ImportError:
+                raise CompressionError, "bz2 module is not available"
+            if mode == "r":
+                self.dbuf = ""
+                self.cmp = bz2.BZ2Decompressor()
+            else:
+                self.cmp = bz2.BZ2Compressor()
+
+    def __del__(self):
+        if not self.closed:
+            self.close()
+
+    def _init_write_gz(self):
+        """Initialize for writing with gzip compression.
+        """
+        self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
+                                            -self.zlib.MAX_WBITS,
+                                            self.zlib.DEF_MEM_LEVEL,
+                                            0)
+        timestamp = struct.pack("<L", long(time.time()))
+        self.__write("\037\213\010\010%s\002\377" % timestamp)
+        if self.name.endswith(".gz"):
+            self.name = self.name[:-3]
+        self.__write(self.name + NUL)
+
+    def write(self, s):
+        """Write string s to the stream.
+        """
+        if self.type == "gz":
+            self.crc = self.zlib.crc32(s, self.crc)
+        self.pos += len(s)
+        if self.type != "tar":
+            s = self.cmp.compress(s)
+        self.__write(s)
+
+    def __write(self, s):
+        """Write string s to the stream if a whole new block
+           is ready to be written.
+        """
+        self.buf += s
+        while len(self.buf) > self.bufsize:
+            self.fileobj.write(self.buf[:self.bufsize])
+            self.buf = self.buf[self.bufsize:]
+
+    def close(self):
+        """Close the _Stream object. No operation should be
+           done on it afterwards.
+        """
+        if self.closed:
+            return
+
+        if self.mode == "w" and self.type != "tar":
+            self.buf += self.cmp.flush()
+        if self.mode == "w" and self.buf:
+            self.fileobj.write(self.buf)
+            self.buf = ""
+            if self.type == "gz":
+                self.fileobj.write(struct.pack("<l", self.crc))
+                self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFFL))
+
+        if not self._extfileobj:
+            self.fileobj.close()
+
+        self.closed = True
+
+    def _init_read_gz(self):
+        """Initialize for reading a gzip compressed fileobj.
+        """
+        self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
+        self.dbuf = ""
+
+        # taken from gzip.GzipFile with some alterations
+        if self.__read(2) != "\037\213":
+            raise ReadError, "not a gzip file"
+        if self.__read(1) != "\010":
+            raise CompressionError, "unsupported compression method"
+
+        flag = ord(self.__read(1))
+        self.__read(6)
+
+        if flag & 4:
+            xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
+            self.read(xlen)
+        if flag & 8:
+            while True:
+                s = self.__read(1)
+                if not s or s == NUL:
+                    break
+        if flag & 16:
+            while True:
+                s = self.__read(1)
+                if not s or s == NUL:
+                    break
+        if flag & 2:
+            self.__read(2)
+
+    def tell(self):
+        """Return the stream's file pointer position.
+        """
+        return self.pos
+
+    def seek(self, pos=0):
+        """Set the stream's file pointer to pos. Negative seeking
+           is forbidden.
+        """
+        if pos - self.pos >= 0:
+            blocks, remainder = divmod(pos - self.pos, self.bufsize)
+            for i in xrange(blocks):
+                self.read(self.bufsize)
+            self.read(remainder)
+        else:
+            raise StreamError, "seeking backwards is not allowed"
+        return self.pos
+
+    def read(self, size=None):
+        """Return the next size number of bytes from the stream.
+           If size is not defined, return all bytes of the stream
+           up to EOF.
+        """
+        if size is None:
+            t = []
+            while True:
+                buf = self._read(self.bufsize)
+                if not buf:
+                    break
+                t.append(buf)
+            buf = "".join(t)
+        else:
+            buf = self._read(size)
+        self.pos += len(buf)
+        return buf
+
+    def _read(self, size):
+        """Return size bytes from the stream.
+        """
+        if self.type == "tar":
+            return self.__read(size)
+
+        c = len(self.dbuf)
+        t = [self.dbuf]
+        while c < size:
+            buf = self.__read(self.bufsize)
+            if not buf:
+                break
+            buf = self.cmp.decompress(buf)
+            t.append(buf)
+            c += len(buf)
+        t = "".join(t)
+        self.dbuf = t[size:]
+        return t[:size]
+
+    def __read(self, size):
+        """Return size bytes from stream. If internal buffer is empty,
+           read another block from the stream.
+        """
+        c = len(self.buf)
+        t = [self.buf]
+        while c < size:
+            buf = self.fileobj.read(self.bufsize)
+            if not buf:
+                break
+            t.append(buf)
+            c += len(buf)
+        t = "".join(t)
+        self.buf = t[size:]
+        return t[:size]
+# class _Stream
+
+#------------------------
+# Extraction file object
+#------------------------
+class ExFileObject(object):
+    """File-like object for reading an archive member.
+       Is returned by TarFile.extractfile(). Support for
+       sparse files included.
+    """
+
+    def __init__(self, tarfile, tarinfo):
+        self.fileobj = tarfile.fileobj
+        self.name    = tarinfo.name
+        self.mode    = "r"
+        self.closed  = False
+        self.offset  = tarinfo.offset_data
+        self.size    = tarinfo.size
+        self.pos     = 0L
+        self.linebuffer = ""
+        if tarinfo.issparse():
+            self.sparse = tarinfo.sparse
+            self.read = self._readsparse
+        else:
+            self.read = self._readnormal
+
+    def __read(self, size):
+        """Overloadable read method.
+        """
+        return self.fileobj.read(size)
+
+    def readline(self, size=-1):
+        """Read a line with approx. size. If size is negative,
+           read a whole line. readline() and read() must not
+           be mixed up (!).
+        """
+        if size < 0:
+            size = sys.maxint
+
+        nl = self.linebuffer.find("\n")
+        if nl >= 0:
+            nl = min(nl, size)
+        else:
+            size -= len(self.linebuffer)
+            while (nl < 0 and size > 0):
+                buf = self.read(min(size, 100))
+                if not buf:
+                    break
+                self.linebuffer += buf
+                size -= len(buf)
+                nl = self.linebuffer.find("\n")
+            if nl == -1:
+                s = self.linebuffer
+                self.linebuffer = ""
+                return s
+        buf = self.linebuffer[:nl]
+        self.linebuffer = self.linebuffer[nl + 1:]
+        while buf[-1:] == "\r":
+            buf = buf[:-1]
+        return buf + "\n"
+
+    def readlines(self):
+        """Return a list with all (following) lines.
+        """
+        result = []
+        while True:
+            line = self.readline()
+            if not line: break
+            result.append(line)
+        return result
+
+    def _readnormal(self, size=None):
+        """Read operation for regular files.
+        """
+        if self.closed:
+            raise ValueError, "file is closed"
+        self.fileobj.seek(self.offset + self.pos)
+        bytesleft = self.size - self.pos
+        if size is None:
+            bytestoread = bytesleft
+        else:
+            bytestoread = min(size, bytesleft)
+        self.pos += bytestoread
+        return self.__read(bytestoread)
+
+    def _readsparse(self, size=None):
+        """Read operation for sparse files.
+        """
+        if self.closed:
+            raise ValueError, "file is closed"
+
+        if size is None:
+            size = self.size - self.pos
+
+        data = []
+        while size > 0:
+            buf = self._readsparsesection(size)
+            if not buf:
+                break
+            size -= len(buf)
+            data.append(buf)
+        return "".join(data)
+
+    def _readsparsesection(self, size):
+        """Read a single section of a sparse file.
+        """
+        section = self.sparse.find(self.pos)
+
+        if section is None:
+            return ""
+
+        toread = min(size, section.offset + section.size - self.pos)
+        if isinstance(section, _data):
+            realpos = section.realpos + self.pos - section.offset
+            self.pos += toread
+            self.fileobj.seek(self.offset + realpos)
+            return self.__read(toread)
+        else:
+            self.pos += toread
+            return NUL * toread
+
+    def tell(self):
+        """Return the current file position.
+        """
+        return self.pos
+
+    def seek(self, pos, whence=0):
+        """Seek to a position in the file.
+        """
+        self.linebuffer = ""
+        if whence == 0:
+            self.pos = min(max(pos, 0), self.size)
+        if whence == 1:
+            if pos < 0:
+                self.pos = max(self.pos + pos, 0)
+            else:
+                self.pos = min(self.pos + pos, self.size)
+        if whence == 2:
+            self.pos = max(min(self.size + pos, self.size), 0)
+
+    def close(self):
+        """Close the file object.
+        """
+        self.closed = True
+#class ExFileObject
+
+#------------------
+# Exported Classes
+#------------------
+class TarInfo(object):
+    """Informational class which holds the details about an
+       archive member given by a tar header block.
+       TarInfo objects are returned by TarFile.getmember(),
+       TarFile.getmembers() and TarFile.gettarinfo() and are
+       usually created internally.
+    """
+
+    def __init__(self, name=""):
+        """Construct a TarInfo object. name is the optional name
+           of the member.
+        """
+
+        self.name     = name       # member name (dirnames must end with '/')
+        self.mode     = 0666       # file permissions
+        self.uid      = 0          # user id
+        self.gid      = 0          # group id
+        self.size     = 0          # file size
+        self.mtime    = 0          # modification time
+        self.chksum   = 0          # header checksum
+        self.type     = REGTYPE    # member type
+        self.linkname = ""         # link name
+        self.uname    = "user"     # user name
+        self.gname    = "group"    # group name
+        self.devmajor = 0          #-
+        self.devminor = 0          #-for use with CHRTYPE and BLKTYPE
+        self.prefix   = ""         # prefix to filename or holding information
+                                   # about sparse files
+
+        self.offset   = 0          # the tar header starts here
+        self.offset_data = 0       # the file's data starts here
+
+    def __repr__(self):
+        return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
+
+    def frombuf(cls, buf):
+        """Construct a TarInfo object from a 512 byte string buffer.
+        """
+        tarinfo = cls()
+        tarinfo.name   = nts(buf[0:100])
+        tarinfo.mode   = int(buf[100:108], 8)
+        tarinfo.uid    = int(buf[108:116],8)
+        tarinfo.gid    = int(buf[116:124],8)
+
+        # There are two possible codings for the size field we
+        # have to discriminate, see comment in tobuf() below.
+        if buf[124] != chr(0200):
+            tarinfo.size = long(buf[124:136], 8)
+        else:
+            tarinfo.size = 0L
+            for i in range(11):
+                tarinfo.size <<= 8
+                tarinfo.size += ord(buf[125 + i])
+
+        tarinfo.mtime  = long(buf[136:148], 8)
+        tarinfo.chksum = int(buf[148:156], 8)
+        tarinfo.type   = buf[156:157]
+        tarinfo.linkname = nts(buf[157:257])
+        tarinfo.uname  = nts(buf[265:297])
+        tarinfo.gname  = nts(buf[297:329])
+        try:
+            tarinfo.devmajor = int(buf[329:337], 8)
+            tarinfo.devminor = int(buf[337:345], 8)
+        except ValueError:
+            tarinfo.devmajor = tarinfo.devmajor = 0
+        tarinfo.prefix = buf[345:500]
+
+        # The prefix field is used for filenames > 100 in
+        # the POSIX standard.
+        # name = prefix + '/' + name
+        if tarinfo.type != GNUTYPE_SPARSE:
+            tarinfo.name = normpath(os.path.join(nts(tarinfo.prefix), tarinfo.name))
+
+        # Directory names should have a '/' at the end.
+        if tarinfo.isdir() and tarinfo.name[-1:] != "/":
+            tarinfo.name += "/"
+        return tarinfo
+
+    frombuf = classmethod(frombuf)
+
+    def tobuf(self):
+        """Return a tar header block as a 512 byte string.
+        """
+        # Prefer the size to be encoded as 11 octal ascii digits
+        # which is the most portable. If the size exceeds this
+        # limit (>= 8 GB), encode it as an 88-bit value which is
+        # a GNU tar feature.
+        if self.size <= MAXSIZE_MEMBER:
+            size = "%011o" % self.size
+        else:
+            s = self.size
+            size = ""
+            for i in range(11):
+                size = chr(s & 0377) + size
+                s >>= 8
+            size = chr(0200) + size
+
+        # The following code was contributed by Detlef Lannert.
+        parts = []
+        for value, fieldsize in (
+                (self.name, 100),
+                ("%07o" % (self.mode & 07777), 8),
+                ("%07o" % self.uid, 8),
+                ("%07o" % self.gid, 8),
+                (size, 12),
+                ("%011o" % self.mtime, 12),
+                ("        ", 8),
+                (self.type, 1),
+                (self.linkname, 100),
+                (MAGIC, 6),
+                (VERSION, 2),
+                (self.uname, 32),
+                (self.gname, 32),
+                ("%07o" % self.devmajor, 8),
+                ("%07o" % self.devminor, 8),
+                (self.prefix, 155)
+            ):
+            l = len(value)
+            parts.append(value[:fieldsize] + (fieldsize - l) * NUL)
+
+        buf = "".join(parts)
+        chksum = calc_chksum(buf)
+        buf = buf[:148] + "%06o\0" % chksum + buf[155:]
+        buf += (BLOCKSIZE - len(buf)) * NUL
+        self.buf = buf
+        return buf
+
+    def isreg(self):
+        return self.type in REGULAR_TYPES
+    def isfile(self):
+        return self.isreg()
+    def isdir(self):
+        return self.type == DIRTYPE
+    def issym(self):
+        return self.type == SYMTYPE
+    def islnk(self):
+        return self.type == LNKTYPE
+    def ischr(self):
+        return self.type == CHRTYPE
+    def isblk(self):
+        return self.type == BLKTYPE
+    def isfifo(self):
+        return self.type == FIFOTYPE
+    def issparse(self):
+        return self.type == GNUTYPE_SPARSE
+    def isdev(self):
+        return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
+# class TarInfo
+
+class TarFile(object):
+    """The TarFile Class provides an interface to tar archives.
+    """
+
+    debug = 0                   # May be set from 0 (no msgs) to 3 (all msgs)
+
+    dereference = False         # If true, add content of linked file to the
+                                # tar file, else the link.
+
+    ignore_zeros = False        # If true, skips empty or invalid blocks and
+                                # continues processing.
+
+    errorlevel = 0              # If 0, fatal errors only appear in debug
+                                # messages (if debug >= 0). If > 0, errors
+                                # are passed to the caller as exceptions.
+
+    posix = False               # If True, generates POSIX.1-1990-compliant
+                                # archives (no GNU extensions!)
+
+    fileobject = ExFileObject
+
+    def __init__(self, name=None, mode="r", fileobj=None):
+        """Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
+           read from an existing archive, 'a' to append data to an existing
+           file or 'w' to create a new file overwriting an existing one. `mode'
+           defaults to 'r'.
+           If `fileobj' is given, it is used for reading or writing data. If it
+           can be determined, `mode' is overridden by `fileobj's mode.
+           `fileobj' is not closed, when TarFile is closed.
+        """
+        self.name = name
+
+        if len(mode) > 1 or mode not in "raw":
+            raise ValueError, "mode must be 'r', 'a' or 'w'"
+        self._mode = mode
+        self.mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
+
+        if not fileobj:
+            fileobj = file(self.name, self.mode)
+            self._extfileobj = False
+        else:
+            if self.name is None and hasattr(fileobj, "name"):
+                self.name = fileobj.name
+            if hasattr(fileobj, "mode"):
+                self.mode = fileobj.mode
+            self._extfileobj = True
+        self.fileobj = fileobj
+
+        # Init datastructures
+        self.closed      = False
+        self.members     = []       # list of members as TarInfo objects
+        self._loaded     = False    # flag if all members have been read
+        self.offset      = 0L       # current position in the archive file
+        self.inodes      = {}       # dictionary caching the inodes of
+                                    # archive members already added
+
+        if self._mode == "r":
+            self.firstmember = None
+            self.firstmember = self.next()
+
+        if self._mode == "a":
+            # Move to the end of the archive,
+            # before the first empty block.
+            self.firstmember = None
+            while True:
+                try:
+                    tarinfo = self.next()
+                except ReadError:
+                    self.fileobj.seek(0)
+                    break
+                if tarinfo is None:
+                    self.fileobj.seek(- BLOCKSIZE, 1)
+                    break
+
+        if self._mode in "aw":
+            self._loaded = True
+
+    #--------------------------------------------------------------------------
+    # Below are the classmethods which act as alternate constructors to the
+    # TarFile class. The open() method is the only one that is needed for
+    # public use; it is the "super"-constructor and is able to select an
+    # adequate "sub"-constructor for a particular compression using the mapping
+    # from OPEN_METH.
+    #
+    # This concept allows one to subclass TarFile without losing the comfort of
+    # the super-constructor. A sub-constructor is registered and made available
+    # by adding it to the mapping in OPEN_METH.
+
+    def open(cls, name=None, mode="r", fileobj=None, bufsize=20*512):
+        """Open a tar archive for reading, writing or appending. Return
+           an appropriate TarFile class.
+
+           mode:
+           'r'          open for reading with transparent compression
+           'r:'         open for reading exclusively uncompressed
+           'r:gz'       open for reading with gzip compression
+           'r:bz2'      open for reading with bzip2 compression
+           'a' or 'a:'  open for appending
+           'w' or 'w:'  open for writing without compression
+           'w:gz'       open for writing with gzip compression
+           'w:bz2'      open for writing with bzip2 compression
+           'r|'         open an uncompressed stream of tar blocks for reading
+           'r|gz'       open a gzip compressed stream of tar blocks
+           'r|bz2'      open a bzip2 compressed stream of tar blocks
+           'w|'         open an uncompressed stream for writing
+           'w|gz'       open a gzip compressed stream for writing
+           'w|bz2'      open a bzip2 compressed stream for writing
+        """
+
+        if not name and not fileobj:
+            raise ValueError, "nothing to open"
+
+        if ":" in mode:
+            filemode, comptype = mode.split(":", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            # Select the *open() function according to
+            # given compression.
+            if comptype in cls.OPEN_METH:
+                func = getattr(cls, cls.OPEN_METH[comptype])
+            else:
+                raise CompressionError, "unknown compression type %r" % comptype
+            return func(name, filemode, fileobj)
+
+        elif "|" in mode:
+            filemode, comptype = mode.split("|", 1)
+            filemode = filemode or "r"
+            comptype = comptype or "tar"
+
+            if filemode not in "rw":
+                raise ValueError, "mode must be 'r' or 'w'"
+
+            t = cls(name, filemode,
+                    _Stream(name, filemode, comptype, fileobj, bufsize))
+            t._extfileobj = False
+            return t
+
+        elif mode == "r":
+            # Find out which *open() is appropriate for opening the file.
+            for comptype in cls.OPEN_METH:
+                func = getattr(cls, cls.OPEN_METH[comptype])
+                try:
+                    return func(name, "r", fileobj)
+                except (ReadError, CompressionError):
+                    continue
+            raise ReadError, "file could not be opened successfully"
+
+        elif mode in "aw":
+            return cls.taropen(name, mode, fileobj)
+
+        raise ValueError, "undiscernible mode"
+
+    open = classmethod(open)
+
+    def taropen(cls, name, mode="r", fileobj=None):
+        """Open uncompressed tar archive name for reading or writing.
+        """
+        if len(mode) > 1 or mode not in "raw":
+            raise ValueError, "mode must be 'r', 'a' or 'w'"
+        return cls(name, mode, fileobj)
+
+    taropen = classmethod(taropen)
+
+    def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9):
+        """Open gzip compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if len(mode) > 1 or mode not in "rw":
+            raise ValueError, "mode must be 'r' or 'w'"
+
+        try:
+            import gzip
+            gzip.GzipFile
+        except (ImportError, AttributeError):
+            raise CompressionError, "gzip module is not available"
+
+        pre, ext = os.path.splitext(name)
+        pre = os.path.basename(pre)
+        if ext == ".tgz":
+            ext = ".tar"
+        if ext == ".gz":
+            ext = ""
+        tarname = pre + ext
+
+        if fileobj is None:
+            fileobj = file(name, mode + "b")
+
+        if mode != "r":
+            name = tarname
+
+        try:
+            t = cls.taropen(tarname, mode,
+                gzip.GzipFile(name, mode, compresslevel, fileobj)
+            )
+        except IOError:
+            raise ReadError, "not a gzip file"
+        t._extfileobj = False
+        return t
+
+    gzopen = classmethod(gzopen)
+
+    def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9):
+        """Open bzip2 compressed tar archive name for reading or writing.
+           Appending is not allowed.
+        """
+        if len(mode) > 1 or mode not in "rw":
+            raise ValueError, "mode must be 'r' or 'w'."
+
+        try:
+            import bz2
+        except ImportError:
+            raise CompressionError, "bz2 module is not available"
+
+        pre, ext = os.path.splitext(name)
+        pre = os.path.basename(pre)
+        if ext == ".tbz2":
+            ext = ".tar"
+        if ext == ".bz2":
+            ext = ""
+        tarname = pre + ext
+
+        if fileobj is not None:
+            raise ValueError, "no support for external file objects"
+
+        try:
+            t = cls.taropen(tarname, mode, bz2.BZ2File(name, mode, compresslevel=compresslevel))
+        except IOError:
+            raise ReadError, "not a bzip2 file"
+        t._extfileobj = False
+        return t
+
+    bz2open = classmethod(bz2open)
+
+    # All *open() methods are registered here.
+    OPEN_METH = {
+        "tar": "taropen",   # uncompressed tar
+        "gz":  "gzopen",    # gzip compressed tar
+        "bz2": "bz2open"    # bzip2 compressed tar
+    }
+
+    #--------------------------------------------------------------------------
+    # The public methods which TarFile provides:
+
+    def close(self):
+        """Close the TarFile. In write-mode, two finishing zero blocks are
+           appended to the archive.
+        """
+        if self.closed:
+            return
+
+        if self._mode in "aw":
+            self.fileobj.write(NUL * (BLOCKSIZE * 2))
+            self.offset += (BLOCKSIZE * 2)
+            # fill up the end with zero-blocks
+            # (like option -b20 for tar does)
+            blocks, remainder = divmod(self.offset, RECORDSIZE)
+            if remainder > 0:
+                self.fileobj.write(NUL * (RECORDSIZE - remainder))
+
+        if not self._extfileobj:
+            self.fileobj.close()
+        self.closed = True
+
+    def getmember(self, name):
+        """Return a TarInfo object for member `name'. If `name' can not be
+           found in the archive, KeyError is raised. If a member occurs more
+           than once in the archive, its last occurence is assumed to be the
+           most up-to-date version.
+        """
+        tarinfo = self._getmember(name)
+        if tarinfo is None:
+            raise KeyError, "filename %r not found" % name
+        return tarinfo
+
+    def getmembers(self):
+        """Return the members of the archive as a list of TarInfo objects. The
+           list has the same order as the members in the archive.
+        """
+        self._check()
+        if not self._loaded:    # if we want to obtain a list of
+            self._load()        # all members, we first have to
+                                # scan the whole archive.
+        return self.members
+
+    def getnames(self):
+        """Return the members of the archive as a list of their names. It has
+           the same order as the list returned by getmembers().
+        """
+        return [tarinfo.name for tarinfo in self.getmembers()]
+
+    def gettarinfo(self, name=None, arcname=None, fileobj=None):
+        """Create a TarInfo object for either the file `name' or the file
+           object `fileobj' (using os.fstat on its file descriptor). You can
+           modify some of the TarInfo's attributes before you add it using
+           addfile(). If given, `arcname' specifies an alternative name for the
+           file in the archive.
+        """
+        self._check("aw")
+
+        # When fileobj is given, replace name by
+        # fileobj's real name.
+        if fileobj is not None:
+            name = fileobj.name
+
+        # Building the name of the member in the archive.
+        # Backward slashes are converted to forward slashes,
+        # Absolute paths are turned to relative paths.
+        if arcname is None:
+            arcname = name
+        arcname = normpath(arcname)
+        drv, arcname = os.path.splitdrive(arcname)
+        while arcname[0:1] == "/":
+            arcname = arcname[1:]
+
+        # Now, fill the TarInfo object with
+        # information specific for the file.
+        tarinfo = TarInfo()
+
+        # Use os.stat or os.lstat, depending on platform
+        # and if symlinks shall be resolved.
+        if fileobj is None:
+            if hasattr(os, "lstat") and not self.dereference:
+                statres = os.lstat(name)
+            else:
+                statres = os.stat(name)
+        else:
+            statres = os.fstat(fileobj.fileno())
+        linkname = ""
+
+        stmd = statres.st_mode
+        if stat.S_ISREG(stmd):
+            inode = (statres.st_ino, statres.st_dev)
+            if inode in self.inodes and not self.dereference:
+                # Is it a hardlink to an already
+                # archived file?
+                type = LNKTYPE
+                linkname = self.inodes[inode]
+            else:
+                # The inode is added only if its valid.
+                # For win32 it is always 0.
+                type = REGTYPE
+                if inode[0]:
+                    self.inodes[inode] = arcname
+        elif stat.S_ISDIR(stmd):
+            type = DIRTYPE
+            if arcname[-1:] != "/":
+                arcname += "/"
+        elif stat.S_ISFIFO(stmd):
+            type = FIFOTYPE
+        elif stat.S_ISLNK(stmd):
+            type = SYMTYPE
+            linkname = os.readlink(name)
+        elif stat.S_ISCHR(stmd):
+            type = CHRTYPE
+        elif stat.S_ISBLK(stmd):
+            type = BLKTYPE
+        else:
+            return None
+
+        # Fill the TarInfo object with all
+        # information we can get.
+        tarinfo.name  = arcname
+        tarinfo.mode  = stmd
+        tarinfo.uid   = statres.st_uid
+        tarinfo.gid   = statres.st_gid
+        if stat.S_ISDIR(stmd):
+            # For a directory, the size must be 0
+            tarinfo.size  = 0
+        else:
+            tarinfo.size = statres.st_size
+        tarinfo.mtime = statres.st_mtime
+        tarinfo.type  = type
+        tarinfo.linkname = linkname
+        if pwd:
+            try:
+                tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
+            except KeyError:
+                pass
+        if grp:
+            try:
+                tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
+            except KeyError:
+                pass
+
+        if type in (CHRTYPE, BLKTYPE):
+            if hasattr(os, "major") and hasattr(os, "minor"):
+                tarinfo.devmajor = os.major(statres.st_rdev)
+                tarinfo.devminor = os.minor(statres.st_rdev)
+        return tarinfo
+
+    def list(self, verbose=True):
+        """Print a table of contents to sys.stdout. If `verbose' is False, only
+           the names of the members are printed. If it is True, an `ls -l'-like
+           output is produced.
+        """
+        self._check()
+
+        for tarinfo in self:
+            if verbose:
+                print filemode(tarinfo.mode),
+                print "%s/%s" % (tarinfo.uname or tarinfo.uid,
+                                 tarinfo.gname or tarinfo.gid),
+                if tarinfo.ischr() or tarinfo.isblk():
+                    print "%10s" % ("%d,%d" \
+                                    % (tarinfo.devmajor, tarinfo.devminor)),
+                else:
+                    print "%10d" % tarinfo.size,
+                print "%d-%02d-%02d %02d:%02d:%02d" \
+                      % time.localtime(tarinfo.mtime)[:6],
+
+            print tarinfo.name,
+
+            if verbose:
+                if tarinfo.issym():
+                    print "->", tarinfo.linkname,
+                if tarinfo.islnk():
+                    print "link to", tarinfo.linkname,
+            print
+
+    def add(self, name, arcname=None, recursive=True):
+        """Add the file `name' to the archive. `name' may be any type of file
+           (directory, fifo, symbolic link, etc.). If given, `arcname'
+           specifies an alternative name for the file in the archive.
+           Directories are added recursively by default. This can be avoided by
+           setting `recursive' to False.
+        """
+        self._check("aw")
+
+        if arcname is None:
+            arcname = name
+
+        # Skip if somebody tries to archive the archive...
+        if self.name is not None \
+            and os.path.abspath(name) == os.path.abspath(self.name):
+            self._dbg(2, "tarfile: Skipped %r" % name)
+            return
+
+        # Special case: The user wants to add the current
+        # working directory.
+        if name == ".":
+            if recursive:
+                if arcname == ".":
+                    arcname = ""
+                for f in os.listdir("."):
+                    self.add(f, os.path.join(arcname, f))
+            return
+
+        self._dbg(1, name)
+
+        # Create a TarInfo object from the file.
+        tarinfo = self.gettarinfo(name, arcname)
+
+        if tarinfo is None:
+            self._dbg(1, "tarfile: Unsupported type %r" % name)
+            return
+
+        # Append the tar header and data to the archive.
+        if tarinfo.isreg():
+            f = file(name, "rb")
+            self.addfile(tarinfo, f)
+            f.close()
+
+        if tarinfo.type in (LNKTYPE, SYMTYPE, FIFOTYPE, CHRTYPE, BLKTYPE):
+            tarinfo.size = 0L
+            self.addfile(tarinfo)
+
+        if tarinfo.isdir():
+            self.addfile(tarinfo)
+            if recursive:
+                for f in os.listdir(name):
+                    self.add(os.path.join(name, f), os.path.join(arcname, f))
+
+    def addfile(self, tarinfo, fileobj=None):
+        """Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
+           given, tarinfo.size bytes are read from it and added to the archive.
+           You can create TarInfo objects using gettarinfo().
+           On Windows platforms, `fileobj' should always be opened with mode
+           'rb' to avoid irritation about the file size.
+        """
+        self._check("aw")
+
+        tarinfo.name = normpath(tarinfo.name)
+        if tarinfo.isdir():
+            # directories should end with '/'
+            tarinfo.name += "/"
+
+        if tarinfo.linkname:
+            tarinfo.linkname = normpath(tarinfo.linkname)
+
+        if tarinfo.size > MAXSIZE_MEMBER:
+            if self.posix:
+                raise ValueError, "file is too large (>= 8 GB)"
+            else:
+                self._dbg(2, "tarfile: Created GNU tar largefile header")
+
+
+        if len(tarinfo.linkname) > LENGTH_LINK:
+            if self.posix:
+                raise ValueError, "linkname is too long (>%d)" \
+                                  % (LENGTH_LINK)
+            else:
+                self._create_gnulong(tarinfo.linkname, GNUTYPE_LONGLINK)
+                tarinfo.linkname = tarinfo.linkname[:LENGTH_LINK -1]
+                self._dbg(2, "tarfile: Created GNU tar extension LONGLINK")
+
+        if len(tarinfo.name) > LENGTH_NAME:
+            if self.posix:
+                prefix = tarinfo.name[:LENGTH_PREFIX + 1]
+                while prefix and prefix[-1] != "/":
+                    prefix = prefix[:-1]
+
+                name = tarinfo.name[len(prefix):]
+                prefix = prefix[:-1]
+
+                if not prefix or len(name) > LENGTH_NAME:
+                    raise ValueError, "name is too long (>%d)" \
+                                      % (LENGTH_NAME)
+
+                tarinfo.name   = name
+                tarinfo.prefix = prefix
+            else:
+                self._create_gnulong(tarinfo.name, GNUTYPE_LONGNAME)
+                tarinfo.name = tarinfo.name[:LENGTH_NAME - 1]
+                self._dbg(2, "tarfile: Created GNU tar extension LONGNAME")
+
+        self.fileobj.write(tarinfo.tobuf())
+        self.offset += BLOCKSIZE
+
+        # If there's data to follow, append it.
+        if fileobj is not None:
+            copyfileobj(fileobj, self.fileobj, tarinfo.size)
+            blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+            if remainder > 0:
+                self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+                blocks += 1
+            self.offset += blocks * BLOCKSIZE
+
+        self.members.append(tarinfo)
+
+    def extract(self, member, path=""):
+        """Extract a member from the archive to the current working directory,
+           using its full name. Its file information is extracted as accurately
+           as possible. `member' may be a filename or a TarInfo object. You can
+           specify a different directory using `path'.
+        """
+        self._check("r")
+
+        if isinstance(member, TarInfo):
+            tarinfo = member
+        else:
+            tarinfo = self.getmember(member)
+
+        # Prepare the link target for makelink().
+        if tarinfo.islnk():
+            tarinfo._link_target = os.path.join(path, tarinfo.linkname)
+
+        try:
+            self._extract_member(tarinfo, os.path.join(path, tarinfo.name))
+        except EnvironmentError, e:
+            if self.errorlevel > 0:
+                raise
+            else:
+                if e.filename is None:
+                    self._dbg(1, "tarfile: %s" % e.strerror)
+                else:
+                    self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
+        except ExtractError, e:
+            if self.errorlevel > 1:
+                raise
+            else:
+                self._dbg(1, "tarfile: %s" % e)
+
+    def extractfile(self, member):
+        """Extract a member from the archive as a file object. `member' may be
+           a filename or a TarInfo object. If `member' is a regular file, a
+           file-like object is returned. If `member' is a link, a file-like
+           object is constructed from the link's target. If `member' is none of
+           the above, None is returned.
+           The file-like object is read-only and provides the following
+           methods: read(), readline(), readlines(), seek() and tell()
+        """
+        self._check("r")
+
+        if isinstance(member, TarInfo):
+            tarinfo = member
+        else:
+            tarinfo = self.getmember(member)
+
+        if tarinfo.isreg():
+            return self.fileobject(self, tarinfo)
+
+        elif tarinfo.type not in SUPPORTED_TYPES:
+            # If a member's type is unknown, it is treated as a
+            # regular file.
+            return self.fileobject(self, tarinfo)
+
+        elif tarinfo.islnk() or tarinfo.issym():
+            if isinstance(self.fileobj, _Stream):
+                # A small but ugly workaround for the case that someone tries
+                # to extract a (sym)link as a file-object from a non-seekable
+                # stream of tar blocks.
+                raise StreamError, "cannot extract (sym)link as file object"
+            else:
+                # A (sym)link's file object is it's target's file object.
+                return self.extractfile(self._getmember(tarinfo.linkname,
+                                                        tarinfo))
+        else:
+            # If there's no data associated with the member (directory, chrdev,
+            # blkdev, etc.), return None instead of a file object.
+            return None
+
+    def _extract_member(self, tarinfo, targetpath):
+        """Extract the TarInfo object tarinfo to a physical
+           file called targetpath.
+        """
+        # Fetch the TarInfo object for the given name
+        # and build the destination pathname, replacing
+        # forward slashes to platform specific separators.
+        if targetpath[-1:] == "/":
+            targetpath = targetpath[:-1]
+        targetpath = os.path.normpath(targetpath)
+
+        # Create all upper directories.
+        upperdirs = os.path.dirname(targetpath)
+        if upperdirs and not os.path.exists(upperdirs):
+            ti = TarInfo()
+            ti.name  = upperdirs
+            ti.type  = DIRTYPE
+            ti.mode  = 0777
+            ti.mtime = tarinfo.mtime
+            ti.uid   = tarinfo.uid
+            ti.gid   = tarinfo.gid
+            ti.uname = tarinfo.uname
+            ti.gname = tarinfo.gname
+            try:
+                self._extract_member(ti, ti.name)
+            except:
+                pass
+
+        if tarinfo.islnk() or tarinfo.issym():
+            self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
+        else:
+            self._dbg(1, tarinfo.name)
+
+        if tarinfo.isreg():
+            self.makefile(tarinfo, targetpath)
+        elif tarinfo.isdir():
+            self.makedir(tarinfo, targetpath)
+        elif tarinfo.isfifo():
+            self.makefifo(tarinfo, targetpath)
+        elif tarinfo.ischr() or tarinfo.isblk():
+            self.makedev(tarinfo, targetpath)
+        elif tarinfo.islnk() or tarinfo.issym():
+            self.makelink(tarinfo, targetpath)
+        elif tarinfo.type not in SUPPORTED_TYPES:
+            self.makeunknown(tarinfo, targetpath)
+        else:
+            self.makefile(tarinfo, targetpath)
+
+        self.chown(tarinfo, targetpath)
+        if not tarinfo.issym():
+            self.chmod(tarinfo, targetpath)
+            self.utime(tarinfo, targetpath)
+
+    #--------------------------------------------------------------------------
+    # Below are the different file methods. They are called via
+    # _extract_member() when extract() is called. They can be replaced in a
+    # subclass to implement other functionality.
+
+    def makedir(self, tarinfo, targetpath):
+        """Make a directory called targetpath.
+        """
+        try:
+            os.mkdir(targetpath)
+        except EnvironmentError, e:
+            if e.errno != errno.EEXIST:
+                raise
+
+    def makefile(self, tarinfo, targetpath):
+        """Make a file called targetpath.
+        """
+        source = self.extractfile(tarinfo)
+        target = file(targetpath, "wb")
+        copyfileobj(source, target)
+        source.close()
+        target.close()
+
+    def makeunknown(self, tarinfo, targetpath):
+        """Make a file from a TarInfo object with an unknown type
+           at targetpath.
+        """
+        self.makefile(tarinfo, targetpath)
+        self._dbg(1, "tarfile: Unknown file type %r, " \
+                     "extracted as regular file." % tarinfo.type)
+
+    def makefifo(self, tarinfo, targetpath):
+        """Make a fifo called targetpath.
+        """
+        if hasattr(os, "mkfifo"):
+            os.mkfifo(targetpath)
+        else:
+            raise ExtractError, "fifo not supported by system"
+
+    def makedev(self, tarinfo, targetpath):
+        """Make a character or block device called targetpath.
+        """
+        if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
+            raise ExtractError, "special devices not supported by system"
+
+        mode = tarinfo.mode
+        if tarinfo.isblk():
+            mode |= stat.S_IFBLK
+        else:
+            mode |= stat.S_IFCHR
+
+        os.mknod(targetpath, mode,
+                 os.makedev(tarinfo.devmajor, tarinfo.devminor))
+
+    def makelink(self, tarinfo, targetpath):
+        """Make a (symbolic) link called targetpath. If it cannot be created
+          (platform limitation), we try to make a copy of the referenced file
+          instead of a link.
+        """
+        linkpath = tarinfo.linkname
+        try:
+            if tarinfo.issym():
+                os.symlink(linkpath, targetpath)
+            else:
+                # See extract().
+                os.link(tarinfo._link_target, targetpath)
+        except AttributeError:
+            if tarinfo.issym():
+                linkpath = os.path.join(os.path.dirname(tarinfo.name),
+                                        linkpath)
+                linkpath = normpath(linkpath)
+
+            try:
+                self._extract_member(self.getmember(linkpath), targetpath)
+            except (EnvironmentError, KeyError), e:
+                linkpath = os.path.normpath(linkpath)
+                try:
+                    shutil.copy2(linkpath, targetpath)
+                except EnvironmentError, e:
+                    raise IOError, "link could not be created"
+
+    def chown(self, tarinfo, targetpath):
+        """Set owner of targetpath according to tarinfo.
+        """
+        if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
+            # We have to be root to do so.
+            try:
+                g = grp.getgrnam(tarinfo.gname)[2]
+            except KeyError:
+                try:
+                    g = grp.getgrgid(tarinfo.gid)[2]
+                except KeyError:
+                    g = os.getgid()
+            try:
+                u = pwd.getpwnam(tarinfo.uname)[2]
+            except KeyError:
+                try:
+                    u = pwd.getpwuid(tarinfo.uid)[2]
+                except KeyError:
+                    u = os.getuid()
+            try:
+                if tarinfo.issym() and hasattr(os, "lchown"):
+                    os.lchown(targetpath, u, g)
+                else:
+                    if sys.platform != "os2emx":
+                        os.chown(targetpath, u, g)
+            except EnvironmentError, e:
+                raise ExtractError, "could not change owner"
+
+    def chmod(self, tarinfo, targetpath):
+        """Set file permissions of targetpath according to tarinfo.
+        """
+        if hasattr(os, 'chmod'):
+            try:
+                os.chmod(targetpath, tarinfo.mode)
+            except EnvironmentError, e:
+                raise ExtractError, "could not change mode"
+
+    def utime(self, tarinfo, targetpath):
+        """Set modification time of targetpath according to tarinfo.
+        """
+        if not hasattr(os, 'utime'):
+            return
+        if sys.platform == "win32" and tarinfo.isdir():
+            # According to msdn.microsoft.com, it is an error (EACCES)
+            # to use utime() on directories.
+            return
+        try:
+            os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
+        except EnvironmentError, e:
+            raise ExtractError, "could not change modification time"
+
+    #--------------------------------------------------------------------------
+
+    def next(self):
+        """Return the next member of the archive as a TarInfo object, when
+           TarFile is opened for reading. Return None if there is no more
+           available.
+        """
+        self._check("ra")
+        if self.firstmember is not None:
+            m = self.firstmember
+            self.firstmember = None
+            return m
+
+        # Read the next block.
+        self.fileobj.seek(self.offset)
+        while True:
+            buf = self.fileobj.read(BLOCKSIZE)
+            if not buf:
+                return None
+            try:
+                tarinfo = TarInfo.frombuf(buf)
+            except ValueError:
+                if self.ignore_zeros:
+                    if buf.count(NUL) == BLOCKSIZE:
+                        adj = "empty"
+                    else:
+                        adj = "invalid"
+                    self._dbg(2, "0x%X: %s block" % (self.offset, adj))
+                    self.offset += BLOCKSIZE
+                    continue
+                else:
+                    # Block is empty or unreadable.
+                    if self.offset == 0:
+                        # If the first block is invalid. That does not
+                        # look like a tar archive we can handle.
+                        raise ReadError,"empty, unreadable or compressed file"
+                    return None
+            break
+
+        # We shouldn't rely on this checksum, because some tar programs
+        # calculate it differently and it is merely validating the
+        # header block. We could just as well skip this part, which would
+        # have a slight effect on performance...
+        if tarinfo.chksum != calc_chksum(buf):
+            self._dbg(1, "tarfile: Bad Checksum %r" % tarinfo.name)
+
+        # Set the TarInfo object's offset to the current position of the
+        # TarFile and set self.offset to the position where the data blocks
+        # should begin.
+        tarinfo.offset = self.offset
+        self.offset += BLOCKSIZE
+
+        # Check if the TarInfo object has a typeflag for which a callback
+        # method is registered in the TYPE_METH. If so, then call it.
+        if tarinfo.type in self.TYPE_METH:
+            return self.TYPE_METH[tarinfo.type](self, tarinfo)
+
+        tarinfo.offset_data = self.offset
+        if tarinfo.isreg() or tarinfo.type not in SUPPORTED_TYPES:
+            # Skip the following data blocks.
+            self.offset += self._block(tarinfo.size)
+
+        if tarinfo.isreg() and tarinfo.name[:-1] == "/":
+            # some old tar programs don't know DIRTYPE
+            tarinfo.type = DIRTYPE
+
+        self.members.append(tarinfo)
+        return tarinfo
+
+    #--------------------------------------------------------------------------
+    # Below are some methods which are called for special typeflags in the
+    # next() method, e.g. for unwrapping GNU longname/longlink blocks. They
+    # are registered in TYPE_METH below. You can register your own methods
+    # with this mapping.
+    # A registered method is called with a TarInfo object as only argument.
+    #
+    # During its execution the method MUST perform the following tasks:
+    # 1. set tarinfo.offset_data to the position where the data blocks begin,
+    #    if there is data to follow.
+    # 2. set self.offset to the position where the next member's header will
+    #    begin.
+    # 3. append the tarinfo object to self.members, if it is supposed to appear
+    #    as a member of the TarFile object.
+    # 4. return tarinfo or another valid TarInfo object.
+
+    def proc_gnulong(self, tarinfo):
+        """Evaluate the blocks that hold a GNU longname
+           or longlink member.
+        """
+        buf = ""
+        count = tarinfo.size
+        while count > 0:
+            block = self.fileobj.read(BLOCKSIZE)
+            buf += block
+            self.offset += BLOCKSIZE
+            count -= BLOCKSIZE
+
+        # Fetch the next header
+        next = self.next()
+
+        next.offset = tarinfo.offset
+        if tarinfo.type == GNUTYPE_LONGNAME:
+            next.name = nts(buf)
+        elif tarinfo.type == GNUTYPE_LONGLINK:
+            next.linkname = nts(buf)
+
+        return next
+
+    def proc_sparse(self, tarinfo):
+        """Analyze a GNU sparse header plus extra headers.
+        """
+        buf = tarinfo.tobuf()
+        sp = _ringbuffer()
+        pos = 386
+        lastpos = 0L
+        realpos = 0L
+        # There are 4 possible sparse structs in the
+        # first header.
+        for i in xrange(4):
+            try:
+                offset = int(buf[pos:pos + 12], 8)
+                numbytes = int(buf[pos + 12:pos + 24], 8)
+            except ValueError:
+                break
+            if offset > lastpos:
+                sp.append(_hole(lastpos, offset - lastpos))
+            sp.append(_data(offset, numbytes, realpos))
+            realpos += numbytes
+            lastpos = offset + numbytes
+            pos += 24
+
+        isextended = ord(buf[482])
+        origsize = int(buf[483:495], 8)
+
+        # If the isextended flag is given,
+        # there are extra headers to process.
+        while isextended == 1:
+            buf = self.fileobj.read(BLOCKSIZE)
+            self.offset += BLOCKSIZE
+            pos = 0
+            for i in xrange(21):
+                try:
+                    offset = int(buf[pos:pos + 12], 8)
+                    numbytes = int(buf[pos + 12:pos + 24], 8)
+                except ValueError:
+                    break
+                if offset > lastpos:
+                    sp.append(_hole(lastpos, offset - lastpos))
+                sp.append(_data(offset, numbytes, realpos))
+                realpos += numbytes
+                lastpos = offset + numbytes
+                pos += 24
+            isextended = ord(buf[504])
+
+        if lastpos < origsize:
+            sp.append(_hole(lastpos, origsize - lastpos))
+
+        tarinfo.sparse = sp
+
+        tarinfo.offset_data = self.offset
+        self.offset += self._block(tarinfo.size)
+        tarinfo.size = origsize
+
+        self.members.append(tarinfo)
+        return tarinfo
+
+    # The type mapping for the next() method. The keys are single character
+    # strings, the typeflag. The values are methods which are called when
+    # next() encounters such a typeflag.
+    TYPE_METH = {
+        GNUTYPE_LONGNAME: proc_gnulong,
+        GNUTYPE_LONGLINK: proc_gnulong,
+        GNUTYPE_SPARSE:   proc_sparse
+    }
+
+    #--------------------------------------------------------------------------
+    # Little helper methods:
+
+    def _block(self, count):
+        """Round up a byte count by BLOCKSIZE and return it,
+           e.g. _block(834) => 1024.
+        """
+        blocks, remainder = divmod(count, BLOCKSIZE)
+        if remainder:
+            blocks += 1
+        return blocks * BLOCKSIZE
+
+    def _getmember(self, name, tarinfo=None):
+        """Find an archive member by name from bottom to top.
+           If tarinfo is given, it is used as the starting point.
+        """
+        # Ensure that all members have been loaded.
+        members = self.getmembers()
+
+        if tarinfo is None:
+            end = len(members)
+        else:
+            end = members.index(tarinfo)
+
+        for i in xrange(end - 1, -1, -1):
+            if name == members[i].name:
+                return members[i]
+
+    def _load(self):
+        """Read through the entire archive file and look for readable
+           members.
+        """
+        while True:
+            tarinfo = self.next()
+            if tarinfo is None:
+                break
+        self._loaded = True
+
+    def _check(self, mode=None):
+        """Check if TarFile is still open, and if the operation's mode
+           corresponds to TarFile's mode.
+        """
+        if self.closed:
+            raise IOError, "%s is closed" % self.__class__.__name__
+        if mode is not None and self._mode not in mode:
+            raise IOError, "bad operation for mode %r" % self._mode
+
+    def __iter__(self):
+        """Provide an iterator object.
+        """
+        if self._loaded:
+            return iter(self.members)
+        else:
+            return TarIter(self)
+
+    def _create_gnulong(self, name, type):
+        """Write a GNU longname/longlink member to the TarFile.
+           It consists of an extended tar header, with the length
+           of the longname as size, followed by data blocks,
+           which contain the longname as a null terminated string.
+        """
+        name += NUL
+
+        tarinfo = TarInfo()
+        tarinfo.name = "././@LongLink"
+        tarinfo.type = type
+        tarinfo.mode = 0
+        tarinfo.size = len(name)
+
+        # write extended header
+        self.fileobj.write(tarinfo.tobuf())
+        self.offset += BLOCKSIZE
+        # write name blocks
+        self.fileobj.write(name)
+        blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
+        if remainder > 0:
+            self.fileobj.write(NUL * (BLOCKSIZE - remainder))
+            blocks += 1
+        self.offset += blocks * BLOCKSIZE
+
+    def _dbg(self, level, msg):
+        """Write debugging output to sys.stderr.
+        """
+        if level <= self.debug:
+            print >> sys.stderr, msg
+# class TarFile
+
+class TarIter:
+    """Iterator Class.
+
+       for tarinfo in TarFile(...):
+           suite...
+    """
+
+    def __init__(self, tarfile):
+        """Construct a TarIter object.
+        """
+        self.tarfile = tarfile
+        self.index = 0
+    def __iter__(self):
+        """Return iterator object.
+        """
+        return self
+    def next(self):
+        """Return the next item using TarFile's next() method.
+           When all members have been read, set TarFile as _loaded.
+        """
+        # Fix for SF #1100429: Under rare circumstances it can
+        # happen that getmembers() is called during iteration,
+        # which will cause TarIter to stop prematurely.
+        if not self.tarfile._loaded:
+            tarinfo = self.tarfile.next()
+            if not tarinfo:
+                self.tarfile._loaded = True
+                raise StopIteration
+        else:
+            try:
+                tarinfo = self.tarfile.members[self.index]
+            except IndexError:
+                raise StopIteration
+        self.index += 1
+        return tarinfo
+
+# Helper classes for sparse file support
+class _section:
+    """Base class for _data and _hole.
+    """
+    def __init__(self, offset, size):
+        self.offset = offset
+        self.size = size
+    def __contains__(self, offset):
+        return self.offset <= offset < self.offset + self.size
+
+class _data(_section):
+    """Represent a data section in a sparse file.
+    """
+    def __init__(self, offset, size, realpos):
+        _section.__init__(self, offset, size)
+        self.realpos = realpos
+
+class _hole(_section):
+    """Represent a hole section in a sparse file.
+    """
+    pass
+
+class _ringbuffer(list):
+    """Ringbuffer class which increases performance
+       over a regular list.
+    """
+    def __init__(self):
+        self.idx = 0
+    def find(self, offset):
+        idx = self.idx
+        while True:
+            item = self[idx]
+            if offset in item:
+                break
+            idx += 1
+            if idx == len(self):
+                idx = 0
+            if idx == self.idx:
+                # End of File
+                return None
+        self.idx = idx
+        return item
+
+#---------------------------------------------
+# zipfile compatible TarFile class
+#---------------------------------------------
+TAR_PLAIN = 0           # zipfile.ZIP_STORED
+TAR_GZIPPED = 8         # zipfile.ZIP_DEFLATED
+class TarFileCompat:
+    """TarFile class compatible with standard module zipfile's
+       ZipFile class.
+    """
+    def __init__(self, file, mode="r", compression=TAR_PLAIN):
+        if compression == TAR_PLAIN:
+            self.tarfile = TarFile.taropen(file, mode)
+        elif compression == TAR_GZIPPED:
+            self.tarfile = TarFile.gzopen(file, mode)
+        else:
+            raise ValueError, "unknown compression constant"
+        if mode[0:1] == "r":
+            members = self.tarfile.getmembers()
+            for i in xrange(len(members)):
+                m = members[i]
+                m.filename = m.name
+                m.file_size = m.size
+                m.date_time = time.gmtime(m.mtime)[:6]
+    def namelist(self):
+        return map(lambda m: m.name, self.infolist())
+    def infolist(self):
+        return filter(lambda m: m.type in REGULAR_TYPES,
+                      self.tarfile.getmembers())
+    def printdir(self):
+        self.tarfile.list()
+    def testzip(self):
+        return
+    def getinfo(self, name):
+        return self.tarfile.getmember(name)
+    def read(self, name):
+        return self.tarfile.extractfile(self.tarfile.getmember(name)).read()
+    def write(self, filename, arcname=None, compress_type=None):
+        self.tarfile.add(filename, arcname)
+    def writestr(self, zinfo, bytes):
+        import StringIO
+        import calendar
+        zinfo.name = zinfo.filename
+        zinfo.size = zinfo.file_size
+        zinfo.mtime = calendar.timegm(zinfo.date_time)
+        self.tarfile.addfile(zinfo, StringIO.StringIO(bytes))
+    def close(self):
+        self.tarfile.close()
+#class TarFileCompat
+
+#--------------------
+# exported functions
+#--------------------
+def is_tarfile(name):
+    """Return True if name points to a tar archive that we
+       are able to handle, else return False.
+    """
+    try:
+        t = open(name)
+        t.close()
+        return True
+    except TarError:
+        return False
+
+open = TarFile.open
diff --git a/depot_tools/release/win/python_24/Lib/telnetlib.py b/depot_tools/release/win/python_24/Lib/telnetlib.py
new file mode 100644
index 0000000..f073050b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/telnetlib.py
@@ -0,0 +1,655 @@
+"""TELNET client class.
+
+Based on RFC 854: TELNET Protocol Specification, by J. Postel and
+J. Reynolds
+
+Example:
+
+>>> from telnetlib import Telnet
+>>> tn = Telnet('www.python.org', 79)   # connect to finger port
+>>> tn.write('guido\r\n')
+>>> print tn.read_all()
+Login       Name               TTY         Idle    When    Where
+guido    Guido van Rossum      pts/2        <Dec  2 11:10> snag.cnri.reston..
+
+>>>
+
+Note that read_all() won't read until eof -- it just reads some data
+-- but it guarantees to read at least one byte unless EOF is hit.
+
+It is possible to pass a Telnet object to select.select() in order to
+wait until more data is available.  Note that in this case,
+read_eager() may return '' even if there was data on the socket,
+because the protocol negotiation may have eaten the data.  This is why
+EOFError is needed in some cases to distinguish between "no data" and
+"connection closed" (since the socket also appears ready for reading
+when it is closed).
+
+To do:
+- option negotiation
+- timeout should be intrinsic to the connection object instead of an
+  option on one of the read calls only
+
+"""
+
+
+# Imported modules
+import sys
+import socket
+import select
+
+__all__ = ["Telnet"]
+
+# Tunable parameters
+DEBUGLEVEL = 0
+
+# Telnet protocol defaults
+TELNET_PORT = 23
+
+# Telnet protocol characters (don't change)
+IAC  = chr(255) # "Interpret As Command"
+DONT = chr(254)
+DO   = chr(253)
+WONT = chr(252)
+WILL = chr(251)
+theNULL = chr(0)
+
+SE  = chr(240)  # Subnegotiation End
+NOP = chr(241)  # No Operation
+DM  = chr(242)  # Data Mark
+BRK = chr(243)  # Break
+IP  = chr(244)  # Interrupt process
+AO  = chr(245)  # Abort output
+AYT = chr(246)  # Are You There
+EC  = chr(247)  # Erase Character
+EL  = chr(248)  # Erase Line
+GA  = chr(249)  # Go Ahead
+SB =  chr(250)  # Subnegotiation Begin
+
+
+# Telnet protocol options code (don't change)
+# These ones all come from arpa/telnet.h
+BINARY = chr(0) # 8-bit data path
+ECHO = chr(1) # echo
+RCP = chr(2) # prepare to reconnect
+SGA = chr(3) # suppress go ahead
+NAMS = chr(4) # approximate message size
+STATUS = chr(5) # give status
+TM = chr(6) # timing mark
+RCTE = chr(7) # remote controlled transmission and echo
+NAOL = chr(8) # negotiate about output line width
+NAOP = chr(9) # negotiate about output page size
+NAOCRD = chr(10) # negotiate about CR disposition
+NAOHTS = chr(11) # negotiate about horizontal tabstops
+NAOHTD = chr(12) # negotiate about horizontal tab disposition
+NAOFFD = chr(13) # negotiate about formfeed disposition
+NAOVTS = chr(14) # negotiate about vertical tab stops
+NAOVTD = chr(15) # negotiate about vertical tab disposition
+NAOLFD = chr(16) # negotiate about output LF disposition
+XASCII = chr(17) # extended ascii character set
+LOGOUT = chr(18) # force logout
+BM = chr(19) # byte macro
+DET = chr(20) # data entry terminal
+SUPDUP = chr(21) # supdup protocol
+SUPDUPOUTPUT = chr(22) # supdup output
+SNDLOC = chr(23) # send location
+TTYPE = chr(24) # terminal type
+EOR = chr(25) # end or record
+TUID = chr(26) # TACACS user identification
+OUTMRK = chr(27) # output marking
+TTYLOC = chr(28) # terminal location number
+VT3270REGIME = chr(29) # 3270 regime
+X3PAD = chr(30) # X.3 PAD
+NAWS = chr(31) # window size
+TSPEED = chr(32) # terminal speed
+LFLOW = chr(33) # remote flow control
+LINEMODE = chr(34) # Linemode option
+XDISPLOC = chr(35) # X Display Location
+OLD_ENVIRON = chr(36) # Old - Environment variables
+AUTHENTICATION = chr(37) # Authenticate
+ENCRYPT = chr(38) # Encryption option
+NEW_ENVIRON = chr(39) # New - Environment variables
+# the following ones come from
+# http://www.iana.org/assignments/telnet-options
+# Unfortunately, that document does not assign identifiers
+# to all of them, so we are making them up
+TN3270E = chr(40) # TN3270E
+XAUTH = chr(41) # XAUTH
+CHARSET = chr(42) # CHARSET
+RSP = chr(43) # Telnet Remote Serial Port
+COM_PORT_OPTION = chr(44) # Com Port Control Option
+SUPPRESS_LOCAL_ECHO = chr(45) # Telnet Suppress Local Echo
+TLS = chr(46) # Telnet Start TLS
+KERMIT = chr(47) # KERMIT
+SEND_URL = chr(48) # SEND-URL
+FORWARD_X = chr(49) # FORWARD_X
+PRAGMA_LOGON = chr(138) # TELOPT PRAGMA LOGON
+SSPI_LOGON = chr(139) # TELOPT SSPI LOGON
+PRAGMA_HEARTBEAT = chr(140) # TELOPT PRAGMA HEARTBEAT
+EXOPL = chr(255) # Extended-Options-List
+NOOPT = chr(0)
+
+class Telnet:
+
+    """Telnet interface class.
+
+    An instance of this class represents a connection to a telnet
+    server.  The instance is initially not connected; the open()
+    method must be used to establish a connection.  Alternatively, the
+    host name and optional port number can be passed to the
+    constructor, too.
+
+    Don't try to reopen an already connected instance.
+
+    This class has many read_*() methods.  Note that some of them
+    raise EOFError when the end of the connection is read, because
+    they can return an empty string for other reasons.  See the
+    individual doc strings.
+
+    read_until(expected, [timeout])
+        Read until the expected string has been seen, or a timeout is
+        hit (default is no timeout); may block.
+
+    read_all()
+        Read all data until EOF; may block.
+
+    read_some()
+        Read at least one byte or EOF; may block.
+
+    read_very_eager()
+        Read all data available already queued or on the socket,
+        without blocking.
+
+    read_eager()
+        Read either data already queued or some data available on the
+        socket, without blocking.
+
+    read_lazy()
+        Read all data in the raw queue (processing it first), without
+        doing any socket I/O.
+
+    read_very_lazy()
+        Reads all data in the cooked queue, without doing any socket
+        I/O.
+
+    read_sb_data()
+        Reads available data between SB ... SE sequence. Don't block.
+
+    set_option_negotiation_callback(callback)
+        Each time a telnet option is read on the input flow, this callback
+        (if set) is called with the following parameters :
+        callback(telnet socket, command, option)
+            option will be chr(0) when there is no option.
+        No other action is done afterwards by telnetlib.
+
+    """
+
+    def __init__(self, host=None, port=0):
+        """Constructor.
+
+        When called without arguments, create an unconnected instance.
+        With a hostname argument, it connects the instance; a port
+        number is optional.
+
+        """
+        self.debuglevel = DEBUGLEVEL
+        self.host = host
+        self.port = port
+        self.sock = None
+        self.rawq = ''
+        self.irawq = 0
+        self.cookedq = ''
+        self.eof = 0
+        self.iacseq = '' # Buffer for IAC sequence.
+        self.sb = 0 # flag for SB and SE sequence.
+        self.sbdataq = ''
+        self.option_callback = None
+        if host is not None:
+            self.open(host, port)
+
+    def open(self, host, port=0):
+        """Connect to a host.
+
+        The optional second argument is the port number, which
+        defaults to the standard telnet port (23).
+
+        Don't try to reopen an already connected instance.
+
+        """
+        self.eof = 0
+        if not port:
+            port = TELNET_PORT
+        self.host = host
+        self.port = port
+        msg = "getaddrinfo returns an empty list"
+        for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM):
+            af, socktype, proto, canonname, sa = res
+            try:
+                self.sock = socket.socket(af, socktype, proto)
+                self.sock.connect(sa)
+            except socket.error, msg:
+                if self.sock:
+                    self.sock.close()
+                self.sock = None
+                continue
+            break
+        if not self.sock:
+            raise socket.error, msg
+
+    def __del__(self):
+        """Destructor -- close the connection."""
+        self.close()
+
+    def msg(self, msg, *args):
+        """Print a debug message, when the debug level is > 0.
+
+        If extra arguments are present, they are substituted in the
+        message using the standard string formatting operator.
+
+        """
+        if self.debuglevel > 0:
+            print 'Telnet(%s,%d):' % (self.host, self.port),
+            if args:
+                print msg % args
+            else:
+                print msg
+
+    def set_debuglevel(self, debuglevel):
+        """Set the debug level.
+
+        The higher it is, the more debug output you get (on sys.stdout).
+
+        """
+        self.debuglevel = debuglevel
+
+    def close(self):
+        """Close the connection."""
+        if self.sock:
+            self.sock.close()
+        self.sock = 0
+        self.eof = 1
+        self.iacseq = ''
+        self.sb = 0
+
+    def get_socket(self):
+        """Return the socket object used internally."""
+        return self.sock
+
+    def fileno(self):
+        """Return the fileno() of the socket object used internally."""
+        return self.sock.fileno()
+
+    def write(self, buffer):
+        """Write a string to the socket, doubling any IAC characters.
+
+        Can block if the connection is blocked.  May raise
+        socket.error if the connection is closed.
+
+        """
+        if IAC in buffer:
+            buffer = buffer.replace(IAC, IAC+IAC)
+        self.msg("send %r", buffer)
+        self.sock.sendall(buffer)
+
+    def read_until(self, match, timeout=None):
+        """Read until a given string is encountered or until timeout.
+
+        When no match is found, return whatever is available instead,
+        possibly the empty string.  Raise EOFError if the connection
+        is closed and no cooked data is available.
+
+        """
+        n = len(match)
+        self.process_rawq()
+        i = self.cookedq.find(match)
+        if i >= 0:
+            i = i+n
+            buf = self.cookedq[:i]
+            self.cookedq = self.cookedq[i:]
+            return buf
+        s_reply = ([self], [], [])
+        s_args = s_reply
+        if timeout is not None:
+            s_args = s_args + (timeout,)
+        while not self.eof and select.select(*s_args) == s_reply:
+            i = max(0, len(self.cookedq)-n)
+            self.fill_rawq()
+            self.process_rawq()
+            i = self.cookedq.find(match, i)
+            if i >= 0:
+                i = i+n
+                buf = self.cookedq[:i]
+                self.cookedq = self.cookedq[i:]
+                return buf
+        return self.read_very_lazy()
+
+    def read_all(self):
+        """Read all data until EOF; block until connection closed."""
+        self.process_rawq()
+        while not self.eof:
+            self.fill_rawq()
+            self.process_rawq()
+        buf = self.cookedq
+        self.cookedq = ''
+        return buf
+
+    def read_some(self):
+        """Read at least one byte of cooked data unless EOF is hit.
+
+        Return '' if EOF is hit.  Block if no data is immediately
+        available.
+
+        """
+        self.process_rawq()
+        while not self.cookedq and not self.eof:
+            self.fill_rawq()
+            self.process_rawq()
+        buf = self.cookedq
+        self.cookedq = ''
+        return buf
+
+    def read_very_eager(self):
+        """Read everything that's possible without blocking in I/O (eager).
+
+        Raise EOFError if connection closed and no cooked data
+        available.  Return '' if no cooked data available otherwise.
+        Don't block unless in the midst of an IAC sequence.
+
+        """
+        self.process_rawq()
+        while not self.eof and self.sock_avail():
+            self.fill_rawq()
+            self.process_rawq()
+        return self.read_very_lazy()
+
+    def read_eager(self):
+        """Read readily available data.
+
+        Raise EOFError if connection closed and no cooked data
+        available.  Return '' if no cooked data available otherwise.
+        Don't block unless in the midst of an IAC sequence.
+
+        """
+        self.process_rawq()
+        while not self.cookedq and not self.eof and self.sock_avail():
+            self.fill_rawq()
+            self.process_rawq()
+        return self.read_very_lazy()
+
+    def read_lazy(self):
+        """Process and return data that's already in the queues (lazy).
+
+        Raise EOFError if connection closed and no data available.
+        Return '' if no cooked data available otherwise.  Don't block
+        unless in the midst of an IAC sequence.
+
+        """
+        self.process_rawq()
+        return self.read_very_lazy()
+
+    def read_very_lazy(self):
+        """Return any data available in the cooked queue (very lazy).
+
+        Raise EOFError if connection closed and no data available.
+        Return '' if no cooked data available otherwise.  Don't block.
+
+        """
+        buf = self.cookedq
+        self.cookedq = ''
+        if not buf and self.eof and not self.rawq:
+            raise EOFError, 'telnet connection closed'
+        return buf
+
+    def read_sb_data(self):
+        """Return any data available in the SB ... SE queue.
+
+        Return '' if no SB ... SE available. Should only be called
+        after seeing a SB or SE command. When a new SB command is
+        found, old unread SB data will be discarded. Don't block.
+
+        """
+        buf = self.sbdataq
+        self.sbdataq = ''
+        return buf
+
+    def set_option_negotiation_callback(self, callback):
+        """Provide a callback function called after each receipt of a telnet option."""
+        self.option_callback = callback
+
+    def process_rawq(self):
+        """Transfer from raw queue to cooked queue.
+
+        Set self.eof when connection is closed.  Don't block unless in
+        the midst of an IAC sequence.
+
+        """
+        buf = ['', '']
+        try:
+            while self.rawq:
+                c = self.rawq_getchar()
+                if not self.iacseq:
+                    if c == theNULL:
+                        continue
+                    if c == "\021":
+                        continue
+                    if c != IAC:
+                        buf[self.sb] = buf[self.sb] + c
+                        continue
+                    else:
+                        self.iacseq += c
+                elif len(self.iacseq) == 1:
+                    'IAC: IAC CMD [OPTION only for WILL/WONT/DO/DONT]'
+                    if c in (DO, DONT, WILL, WONT):
+                        self.iacseq += c
+                        continue
+
+                    self.iacseq = ''
+                    if c == IAC:
+                        buf[self.sb] = buf[self.sb] + c
+                    else:
+                        if c == SB: # SB ... SE start.
+                            self.sb = 1
+                            self.sbdataq = ''
+                        elif c == SE:
+                            self.sb = 0
+                            self.sbdataq = self.sbdataq + buf[1]
+                            buf[1] = ''
+                        if self.option_callback:
+                            # Callback is supposed to look into
+                            # the sbdataq
+                            self.option_callback(self.sock, c, NOOPT)
+                        else:
+                            # We can't offer automatic processing of
+                            # suboptions. Alas, we should not get any
+                            # unless we did a WILL/DO before.
+                            self.msg('IAC %d not recognized' % ord(c))
+                elif len(self.iacseq) == 2:
+                    cmd = self.iacseq[1]
+                    self.iacseq = ''
+                    opt = c
+                    if cmd in (DO, DONT):
+                        self.msg('IAC %s %d',
+                            cmd == DO and 'DO' or 'DONT', ord(opt))
+                        if self.option_callback:
+                            self.option_callback(self.sock, cmd, opt)
+                        else:
+                            self.sock.sendall(IAC + WONT + opt)
+                    elif cmd in (WILL, WONT):
+                        self.msg('IAC %s %d',
+                            cmd == WILL and 'WILL' or 'WONT', ord(opt))
+                        if self.option_callback:
+                            self.option_callback(self.sock, cmd, opt)
+                        else:
+                            self.sock.sendall(IAC + DONT + opt)
+        except EOFError: # raised by self.rawq_getchar()
+            self.iacseq = '' # Reset on EOF
+            self.sb = 0
+            pass
+        self.cookedq = self.cookedq + buf[0]
+        self.sbdataq = self.sbdataq + buf[1]
+
+    def rawq_getchar(self):
+        """Get next char from raw queue.
+
+        Block if no data is immediately available.  Raise EOFError
+        when connection is closed.
+
+        """
+        if not self.rawq:
+            self.fill_rawq()
+            if self.eof:
+                raise EOFError
+        c = self.rawq[self.irawq]
+        self.irawq = self.irawq + 1
+        if self.irawq >= len(self.rawq):
+            self.rawq = ''
+            self.irawq = 0
+        return c
+
+    def fill_rawq(self):
+        """Fill raw queue from exactly one recv() system call.
+
+        Block if no data is immediately available.  Set self.eof when
+        connection is closed.
+
+        """
+        if self.irawq >= len(self.rawq):
+            self.rawq = ''
+            self.irawq = 0
+        # The buffer size should be fairly small so as to avoid quadratic
+        # behavior in process_rawq() above
+        buf = self.sock.recv(50)
+        self.msg("recv %r", buf)
+        self.eof = (not buf)
+        self.rawq = self.rawq + buf
+
+    def sock_avail(self):
+        """Test whether data is available on the socket."""
+        return select.select([self], [], [], 0) == ([self], [], [])
+
+    def interact(self):
+        """Interaction function, emulates a very dumb telnet client."""
+        if sys.platform == "win32":
+            self.mt_interact()
+            return
+        while 1:
+            rfd, wfd, xfd = select.select([self, sys.stdin], [], [])
+            if self in rfd:
+                try:
+                    text = self.read_eager()
+                except EOFError:
+                    print '*** Connection closed by remote host ***'
+                    break
+                if text:
+                    sys.stdout.write(text)
+                    sys.stdout.flush()
+            if sys.stdin in rfd:
+                line = sys.stdin.readline()
+                if not line:
+                    break
+                self.write(line)
+
+    def mt_interact(self):
+        """Multithreaded version of interact()."""
+        import thread
+        thread.start_new_thread(self.listener, ())
+        while 1:
+            line = sys.stdin.readline()
+            if not line:
+                break
+            self.write(line)
+
+    def listener(self):
+        """Helper for mt_interact() -- this executes in the other thread."""
+        while 1:
+            try:
+                data = self.read_eager()
+            except EOFError:
+                print '*** Connection closed by remote host ***'
+                return
+            if data:
+                sys.stdout.write(data)
+            else:
+                sys.stdout.flush()
+
+    def expect(self, list, timeout=None):
+        """Read until one from a list of a regular expressions matches.
+
+        The first argument is a list of regular expressions, either
+        compiled (re.RegexObject instances) or uncompiled (strings).
+        The optional second argument is a timeout, in seconds; default
+        is no timeout.
+
+        Return a tuple of three items: the index in the list of the
+        first regular expression that matches; the match object
+        returned; and the text read up till and including the match.
+
+        If EOF is read and no text was read, raise EOFError.
+        Otherwise, when nothing matches, return (-1, None, text) where
+        text is the text received so far (may be the empty string if a
+        timeout happened).
+
+        If a regular expression ends with a greedy match (e.g. '.*')
+        or if more than one expression can match the same input, the
+        results are undeterministic, and may depend on the I/O timing.
+
+        """
+        re = None
+        list = list[:]
+        indices = range(len(list))
+        for i in indices:
+            if not hasattr(list[i], "search"):
+                if not re: import re
+                list[i] = re.compile(list[i])
+        while 1:
+            self.process_rawq()
+            for i in indices:
+                m = list[i].search(self.cookedq)
+                if m:
+                    e = m.end()
+                    text = self.cookedq[:e]
+                    self.cookedq = self.cookedq[e:]
+                    return (i, m, text)
+            if self.eof:
+                break
+            if timeout is not None:
+                r, w, x = select.select([self.fileno()], [], [], timeout)
+                if not r:
+                    break
+            self.fill_rawq()
+        text = self.read_very_lazy()
+        if not text and self.eof:
+            raise EOFError
+        return (-1, None, text)
+
+
+def test():
+    """Test program for telnetlib.
+
+    Usage: python telnetlib.py [-d] ... [host [port]]
+
+    Default host is localhost; default port is 23.
+
+    """
+    debuglevel = 0
+    while sys.argv[1:] and sys.argv[1] == '-d':
+        debuglevel = debuglevel+1
+        del sys.argv[1]
+    host = 'localhost'
+    if sys.argv[1:]:
+        host = sys.argv[1]
+    port = 0
+    if sys.argv[2:]:
+        portstr = sys.argv[2]
+        try:
+            port = int(portstr)
+        except ValueError:
+            port = socket.getservbyname(portstr, 'tcp')
+    tn = Telnet()
+    tn.set_debuglevel(debuglevel)
+    tn.open(host, port)
+    tn.interact()
+    tn.close()
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/tempfile.py b/depot_tools/release/win/python_24/Lib/tempfile.py
new file mode 100644
index 0000000..dd7e864
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/tempfile.py
@@ -0,0 +1,472 @@
+"""Temporary files.
+
+This module provides generic, low- and high-level interfaces for
+creating temporary files and directories.  The interfaces listed
+as "safe" just below can be used without fear of race conditions.
+Those listed as "unsafe" cannot, and are provided for backward
+compatibility only.
+
+This module also provides some data items to the user:
+
+  TMP_MAX  - maximum number of names that will be tried before
+             giving up.
+  template - the default prefix for all temporary names.
+             You may change this to control the default prefix.
+  tempdir  - If this is set to a string before the first use of
+             any routine from this module, it will be considered as
+             another candidate location to store temporary files.
+"""
+
+__all__ = [
+    "NamedTemporaryFile", "TemporaryFile", # high level safe interfaces
+    "mkstemp", "mkdtemp",                  # low level safe interfaces
+    "mktemp",                              # deprecated unsafe interface
+    "TMP_MAX", "gettempprefix",            # constants
+    "tempdir", "gettempdir"
+   ]
+
+
+# Imports.
+
+import os as _os
+import errno as _errno
+from random import Random as _Random
+
+if _os.name == 'mac':
+    import Carbon.Folder as _Folder
+    import Carbon.Folders as _Folders
+
+try:
+    import fcntl as _fcntl
+except ImportError:
+    def _set_cloexec(fd):
+        pass
+else:
+    def _set_cloexec(fd):
+        try:
+            flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0)
+        except IOError:
+            pass
+        else:
+            # flags read successfully, modify
+            flags |= _fcntl.FD_CLOEXEC
+            _fcntl.fcntl(fd, _fcntl.F_SETFD, flags)
+
+
+try:
+    import thread as _thread
+except ImportError:
+    import dummy_thread as _thread
+_allocate_lock = _thread.allocate_lock
+
+_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
+if hasattr(_os, 'O_NOINHERIT'):
+    _text_openflags |= _os.O_NOINHERIT
+if hasattr(_os, 'O_NOFOLLOW'):
+    _text_openflags |= _os.O_NOFOLLOW
+
+_bin_openflags = _text_openflags
+if hasattr(_os, 'O_BINARY'):
+    _bin_openflags |= _os.O_BINARY
+
+if hasattr(_os, 'TMP_MAX'):
+    TMP_MAX = _os.TMP_MAX
+else:
+    TMP_MAX = 10000
+
+template = "tmp"
+
+tempdir = None
+
+# Internal routines.
+
+_once_lock = _allocate_lock()
+
+if hasattr(_os, "lstat"):
+    _stat = _os.lstat
+elif hasattr(_os, "stat"):
+    _stat = _os.stat
+else:
+    # Fallback.  All we need is something that raises os.error if the
+    # file doesn't exist.
+    def _stat(fn):
+        try:
+            f = open(fn)
+        except IOError:
+            raise _os.error
+        f.close()
+
+def _exists(fn):
+    try:
+        _stat(fn)
+    except _os.error:
+        return False
+    else:
+        return True
+
+class _RandomNameSequence:
+    """An instance of _RandomNameSequence generates an endless
+    sequence of unpredictable strings which can safely be incorporated
+    into file names.  Each string is six characters long.  Multiple
+    threads can safely use the same instance at the same time.
+
+    _RandomNameSequence is an iterator."""
+
+    characters = ("abcdefghijklmnopqrstuvwxyz" +
+                  "ABCDEFGHIJKLMNOPQRSTUVWXYZ" +
+                  "0123456789-_")
+
+    def __init__(self):
+        self.mutex = _allocate_lock()
+        self.rng = _Random()
+        self.normcase = _os.path.normcase
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        m = self.mutex
+        c = self.characters
+        choose = self.rng.choice
+
+        m.acquire()
+        try:
+            letters = [choose(c) for dummy in "123456"]
+        finally:
+            m.release()
+
+        return self.normcase(''.join(letters))
+
+def _candidate_tempdir_list():
+    """Generate a list of candidate temporary directories which
+    _get_default_tempdir will try."""
+
+    dirlist = []
+
+    # First, try the environment.
+    for envname in 'TMPDIR', 'TEMP', 'TMP':
+        dirname = _os.getenv(envname)
+        if dirname: dirlist.append(dirname)
+
+    # Failing that, try OS-specific locations.
+    if _os.name == 'mac':
+        try:
+            fsr = _Folder.FSFindFolder(_Folders.kOnSystemDisk,
+                                              _Folders.kTemporaryFolderType, 1)
+            dirname = fsr.as_pathname()
+            dirlist.append(dirname)
+        except _Folder.error:
+            pass
+    elif _os.name == 'riscos':
+        dirname = _os.getenv('Wimp$ScrapDir')
+        if dirname: dirlist.append(dirname)
+    elif _os.name == 'nt':
+        dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
+    else:
+        dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
+
+    # As a last resort, the current directory.
+    try:
+        dirlist.append(_os.getcwd())
+    except (AttributeError, _os.error):
+        dirlist.append(_os.curdir)
+
+    return dirlist
+
+def _get_default_tempdir():
+    """Calculate the default directory to use for temporary files.
+    This routine should be called exactly once.
+
+    We determine whether or not a candidate temp dir is usable by
+    trying to create and write to a file in that directory.  If this
+    is successful, the test file is deleted.  To prevent denial of
+    service, the name of the test file must be randomized."""
+
+    namer = _RandomNameSequence()
+    dirlist = _candidate_tempdir_list()
+    flags = _text_openflags
+
+    for dir in dirlist:
+        if dir != _os.curdir:
+            dir = _os.path.normcase(_os.path.abspath(dir))
+        # Try only a few names per directory.
+        for seq in xrange(100):
+            name = namer.next()
+            filename = _os.path.join(dir, name)
+            try:
+                fd = _os.open(filename, flags, 0600)
+                fp = _os.fdopen(fd, 'w')
+                fp.write('blat')
+                fp.close()
+                _os.unlink(filename)
+                del fp, fd
+                return dir
+            except (OSError, IOError), e:
+                if e[0] != _errno.EEXIST:
+                    break # no point trying more names in this directory
+                pass
+    raise IOError, (_errno.ENOENT,
+                    ("No usable temporary directory found in %s" % dirlist))
+
+_name_sequence = None
+
+def _get_candidate_names():
+    """Common setup sequence for all user-callable interfaces."""
+
+    global _name_sequence
+    if _name_sequence is None:
+        _once_lock.acquire()
+        try:
+            if _name_sequence is None:
+                _name_sequence = _RandomNameSequence()
+        finally:
+            _once_lock.release()
+    return _name_sequence
+
+
+def _mkstemp_inner(dir, pre, suf, flags):
+    """Code common to mkstemp, TemporaryFile, and NamedTemporaryFile."""
+
+    names = _get_candidate_names()
+
+    for seq in xrange(TMP_MAX):
+        name = names.next()
+        file = _os.path.join(dir, pre + name + suf)
+        try:
+            fd = _os.open(file, flags, 0600)
+            _set_cloexec(fd)
+            return (fd, _os.path.abspath(file))
+        except OSError, e:
+            if e.errno == _errno.EEXIST:
+                continue # try again
+            raise
+
+    raise IOError, (_errno.EEXIST, "No usable temporary file name found")
+
+
+# User visible interfaces.
+
+def gettempprefix():
+    """Accessor for tempdir.template."""
+    return template
+
+tempdir = None
+
+def gettempdir():
+    """Accessor for tempdir.tempdir."""
+    global tempdir
+    if tempdir is None:
+        _once_lock.acquire()
+        try:
+            if tempdir is None:
+                tempdir = _get_default_tempdir()
+        finally:
+            _once_lock.release()
+    return tempdir
+
+def mkstemp(suffix="", prefix=template, dir=None, text=False):
+    """mkstemp([suffix, [prefix, [dir, [text]]]])
+    User-callable function to create and return a unique temporary
+    file.  The return value is a pair (fd, name) where fd is the
+    file descriptor returned by os.open, and name is the filename.
+
+    If 'suffix' is specified, the file name will end with that suffix,
+    otherwise there will be no suffix.
+
+    If 'prefix' is specified, the file name will begin with that prefix,
+    otherwise a default prefix is used.
+
+    If 'dir' is specified, the file will be created in that directory,
+    otherwise a default directory is used.
+
+    If 'text' is specified and true, the file is opened in text
+    mode.  Else (the default) the file is opened in binary mode.  On
+    some operating systems, this makes no difference.
+
+    The file is readable and writable only by the creating user ID.
+    If the operating system uses permission bits to indicate whether a
+    file is executable, the file is executable by no one. The file
+    descriptor is not inherited by children of this process.
+
+    Caller is responsible for deleting the file when done with it.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    if text:
+        flags = _text_openflags
+    else:
+        flags = _bin_openflags
+
+    return _mkstemp_inner(dir, prefix, suffix, flags)
+
+
+def mkdtemp(suffix="", prefix=template, dir=None):
+    """mkdtemp([suffix, [prefix, [dir]]])
+    User-callable function to create and return a unique temporary
+    directory.  The return value is the pathname of the directory.
+
+    Arguments are as for mkstemp, except that the 'text' argument is
+    not accepted.
+
+    The directory is readable, writable, and searchable only by the
+    creating user.
+
+    Caller is responsible for deleting the directory when done with it.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    names = _get_candidate_names()
+
+    for seq in xrange(TMP_MAX):
+        name = names.next()
+        file = _os.path.join(dir, prefix + name + suffix)
+        try:
+            _os.mkdir(file, 0700)
+            return file
+        except OSError, e:
+            if e.errno == _errno.EEXIST:
+                continue # try again
+            raise
+
+    raise IOError, (_errno.EEXIST, "No usable temporary directory name found")
+
+def mktemp(suffix="", prefix=template, dir=None):
+    """mktemp([suffix, [prefix, [dir]]])
+    User-callable function to return a unique temporary file name.  The
+    file is not created.
+
+    Arguments are as for mkstemp, except that the 'text' argument is
+    not accepted.
+
+    This function is unsafe and should not be used.  The file name
+    refers to a file that did not exist at some point, but by the time
+    you get around to creating it, someone else may have beaten you to
+    the punch.
+    """
+
+##    from warnings import warn as _warn
+##    _warn("mktemp is a potential security risk to your program",
+##          RuntimeWarning, stacklevel=2)
+
+    if dir is None:
+        dir = gettempdir()
+
+    names = _get_candidate_names()
+    for seq in xrange(TMP_MAX):
+        name = names.next()
+        file = _os.path.join(dir, prefix + name + suffix)
+        if not _exists(file):
+            return file
+
+    raise IOError, (_errno.EEXIST, "No usable temporary filename found")
+
+class _TemporaryFileWrapper:
+    """Temporary file wrapper
+
+    This class provides a wrapper around files opened for
+    temporary use.  In particular, it seeks to automatically
+    remove the file when it is no longer needed.
+    """
+
+    def __init__(self, file, name):
+        self.file = file
+        self.name = name
+        self.close_called = False
+
+    def __getattr__(self, name):
+        file = self.__dict__['file']
+        a = getattr(file, name)
+        if type(a) != type(0):
+            setattr(self, name, a)
+        return a
+
+    # NT provides delete-on-close as a primitive, so we don't need
+    # the wrapper to do anything special.  We still use it so that
+    # file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile.
+    if _os.name != 'nt':
+
+        # Cache the unlinker so we don't get spurious errors at
+        # shutdown when the module-level "os" is None'd out.  Note
+        # that this must be referenced as self.unlink, because the
+        # name TemporaryFileWrapper may also get None'd out before
+        # __del__ is called.
+        unlink = _os.unlink
+
+        def close(self):
+            if not self.close_called:
+                self.close_called = True
+                self.file.close()
+                self.unlink(self.name)
+
+        def __del__(self):
+            self.close()
+
+def NamedTemporaryFile(mode='w+b', bufsize=-1, suffix="",
+                       prefix=template, dir=None):
+    """Create and return a temporary file.
+    Arguments:
+    'prefix', 'suffix', 'dir' -- as for mkstemp.
+    'mode' -- the mode argument to os.fdopen (default "w+b").
+    'bufsize' -- the buffer size argument to os.fdopen (default -1).
+    The file is created as mkstemp() would do it.
+
+    Returns an object with a file-like interface; the name of the file
+    is accessible as file.name.  The file will be automatically deleted
+    when it is closed.
+    """
+
+    if dir is None:
+        dir = gettempdir()
+
+    if 'b' in mode:
+        flags = _bin_openflags
+    else:
+        flags = _text_openflags
+
+    # Setting O_TEMPORARY in the flags causes the OS to delete
+    # the file when it is closed.  This is only supported by Windows.
+    if _os.name == 'nt':
+        flags |= _os.O_TEMPORARY
+
+    (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
+    file = _os.fdopen(fd, mode, bufsize)
+    return _TemporaryFileWrapper(file, name)
+
+if _os.name != 'posix' or _os.sys.platform == 'cygwin':
+    # On non-POSIX and Cygwin systems, assume that we cannot unlink a file
+    # while it is open.
+    TemporaryFile = NamedTemporaryFile
+
+else:
+    def TemporaryFile(mode='w+b', bufsize=-1, suffix="",
+                      prefix=template, dir=None):
+        """Create and return a temporary file.
+        Arguments:
+        'prefix', 'suffix', 'directory' -- as for mkstemp.
+        'mode' -- the mode argument to os.fdopen (default "w+b").
+        'bufsize' -- the buffer size argument to os.fdopen (default -1).
+        The file is created as mkstemp() would do it.
+
+        Returns an object with a file-like interface.  The file has no
+        name, and will cease to exist when it is closed.
+        """
+
+        if dir is None:
+            dir = gettempdir()
+
+        if 'b' in mode:
+            flags = _bin_openflags
+        else:
+            flags = _text_openflags
+
+        (fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
+        try:
+            _os.unlink(name)
+            return _os.fdopen(fd, mode, bufsize)
+        except:
+            _os.close(fd)
+            raise
diff --git a/depot_tools/release/win/python_24/Lib/textwrap.py b/depot_tools/release/win/python_24/Lib/textwrap.py
new file mode 100644
index 0000000..7c6ad6902
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/textwrap.py
@@ -0,0 +1,354 @@
+"""Text wrapping and filling.
+"""
+
+# Copyright (C) 1999-2001 Gregory P. Ward.
+# Copyright (C) 2002, 2003 Python Software Foundation.
+# Written by Greg Ward <gward@python.net>
+
+__revision__ = "$Id: textwrap.py,v 1.35.4.1 2005/03/05 02:38:32 gward Exp $"
+
+import string, re
+
+# Do the right thing with boolean values for all known Python versions
+# (so this module can be copied to projects that don't depend on Python
+# 2.3, e.g. Optik and Docutils).
+try:
+    True, False
+except NameError:
+    (True, False) = (1, 0)
+
+__all__ = ['TextWrapper', 'wrap', 'fill']
+
+# Hardcode the recognized whitespace characters to the US-ASCII
+# whitespace characters.  The main reason for doing this is that in
+# ISO-8859-1, 0xa0 is non-breaking whitespace, so in certain locales
+# that character winds up in string.whitespace.  Respecting
+# string.whitespace in those cases would 1) make textwrap treat 0xa0 the
+# same as any other whitespace char, which is clearly wrong (it's a
+# *non-breaking* space), 2) possibly cause problems with Unicode,
+# since 0xa0 is not in range(128).
+_whitespace = '\t\n\x0b\x0c\r '
+
+class TextWrapper:
+    """
+    Object for wrapping/filling text.  The public interface consists of
+    the wrap() and fill() methods; the other methods are just there for
+    subclasses to override in order to tweak the default behaviour.
+    If you want to completely replace the main wrapping algorithm,
+    you'll probably have to override _wrap_chunks().
+
+    Several instance attributes control various aspects of wrapping:
+      width (default: 70)
+        the maximum width of wrapped lines (unless break_long_words
+        is false)
+      initial_indent (default: "")
+        string that will be prepended to the first line of wrapped
+        output.  Counts towards the line's width.
+      subsequent_indent (default: "")
+        string that will be prepended to all lines save the first
+        of wrapped output; also counts towards each line's width.
+      expand_tabs (default: true)
+        Expand tabs in input text to spaces before further processing.
+        Each tab will become 1 .. 8 spaces, depending on its position in
+        its line.  If false, each tab is treated as a single character.
+      replace_whitespace (default: true)
+        Replace all whitespace characters in the input text by spaces
+        after tab expansion.  Note that if expand_tabs is false and
+        replace_whitespace is true, every tab will be converted to a
+        single space!
+      fix_sentence_endings (default: false)
+        Ensure that sentence-ending punctuation is always followed
+        by two spaces.  Off by default because the algorithm is
+        (unavoidably) imperfect.
+      break_long_words (default: true)
+        Break words longer than 'width'.  If false, those words will not
+        be broken, and some lines might be longer than 'width'.
+    """
+
+    whitespace_trans = string.maketrans(_whitespace, ' ' * len(_whitespace))
+
+    unicode_whitespace_trans = {}
+    uspace = ord(u' ')
+    for x in map(ord, _whitespace):
+        unicode_whitespace_trans[x] = uspace
+
+    # This funky little regex is just the trick for splitting
+    # text up into word-wrappable chunks.  E.g.
+    #   "Hello there -- you goof-ball, use the -b option!"
+    # splits into
+    #   Hello/ /there/ /--/ /you/ /goof-/ball,/ /use/ /the/ /-b/ /option!
+    # (after stripping out empty strings).
+    wordsep_re = re.compile(
+        r'(\s+|'                                  # any whitespace
+        r'[^\s\w]*\w+[a-zA-Z]-(?=\w+[a-zA-Z])|'   # hyphenated words
+        r'(?<=[\w\!\"\'\&\.\,\?])-{2,}(?=\w))')   # em-dash
+
+    # XXX this is not locale- or charset-aware -- string.lowercase
+    # is US-ASCII only (and therefore English-only)
+    sentence_end_re = re.compile(r'[%s]'              # lowercase letter
+                                 r'[\.\!\?]'          # sentence-ending punct.
+                                 r'[\"\']?'           # optional end-of-quote
+                                 % string.lowercase)
+
+
+    def __init__(self,
+                 width=70,
+                 initial_indent="",
+                 subsequent_indent="",
+                 expand_tabs=True,
+                 replace_whitespace=True,
+                 fix_sentence_endings=False,
+                 break_long_words=True):
+        self.width = width
+        self.initial_indent = initial_indent
+        self.subsequent_indent = subsequent_indent
+        self.expand_tabs = expand_tabs
+        self.replace_whitespace = replace_whitespace
+        self.fix_sentence_endings = fix_sentence_endings
+        self.break_long_words = break_long_words
+
+
+    # -- Private methods -----------------------------------------------
+    # (possibly useful for subclasses to override)
+
+    def _munge_whitespace(self, text):
+        """_munge_whitespace(text : string) -> string
+
+        Munge whitespace in text: expand tabs and convert all other
+        whitespace characters to spaces.  Eg. " foo\tbar\n\nbaz"
+        becomes " foo    bar  baz".
+        """
+        if self.expand_tabs:
+            text = text.expandtabs()
+        if self.replace_whitespace:
+            if isinstance(text, str):
+                text = text.translate(self.whitespace_trans)
+            elif isinstance(text, unicode):
+                text = text.translate(self.unicode_whitespace_trans)
+        return text
+
+
+    def _split(self, text):
+        """_split(text : string) -> [string]
+
+        Split the text to wrap into indivisible chunks.  Chunks are
+        not quite the same as words; see wrap_chunks() for full
+        details.  As an example, the text
+          Look, goof-ball -- use the -b option!
+        breaks into the following chunks:
+          'Look,', ' ', 'goof-', 'ball', ' ', '--', ' ',
+          'use', ' ', 'the', ' ', '-b', ' ', 'option!'
+        """
+        chunks = self.wordsep_re.split(text)
+        chunks = filter(None, chunks)
+        return chunks
+
+    def _fix_sentence_endings(self, chunks):
+        """_fix_sentence_endings(chunks : [string])
+
+        Correct for sentence endings buried in 'chunks'.  Eg. when the
+        original text contains "... foo.\nBar ...", munge_whitespace()
+        and split() will convert that to [..., "foo.", " ", "Bar", ...]
+        which has one too few spaces; this method simply changes the one
+        space to two.
+        """
+        i = 0
+        pat = self.sentence_end_re
+        while i < len(chunks)-1:
+            if chunks[i+1] == " " and pat.search(chunks[i]):
+                chunks[i+1] = "  "
+                i += 2
+            else:
+                i += 1
+
+    def _handle_long_word(self, chunks, cur_line, cur_len, width):
+        """_handle_long_word(chunks : [string],
+                             cur_line : [string],
+                             cur_len : int, width : int)
+
+        Handle a chunk of text (most likely a word, not whitespace) that
+        is too long to fit in any line.
+        """
+        space_left = max(width - cur_len, 1)
+
+        # If we're allowed to break long words, then do so: put as much
+        # of the next chunk onto the current line as will fit.
+        if self.break_long_words:
+            cur_line.append(chunks[0][0:space_left])
+            chunks[0] = chunks[0][space_left:]
+
+        # Otherwise, we have to preserve the long word intact.  Only add
+        # it to the current line if there's nothing already there --
+        # that minimizes how much we violate the width constraint.
+        elif not cur_line:
+            cur_line.append(chunks.pop(0))
+
+        # If we're not allowed to break long words, and there's already
+        # text on the current line, do nothing.  Next time through the
+        # main loop of _wrap_chunks(), we'll wind up here again, but
+        # cur_len will be zero, so the next line will be entirely
+        # devoted to the long word that we can't handle right now.
+
+    def _wrap_chunks(self, chunks):
+        """_wrap_chunks(chunks : [string]) -> [string]
+
+        Wrap a sequence of text chunks and return a list of lines of
+        length 'self.width' or less.  (If 'break_long_words' is false,
+        some lines may be longer than this.)  Chunks correspond roughly
+        to words and the whitespace between them: each chunk is
+        indivisible (modulo 'break_long_words'), but a line break can
+        come between any two chunks.  Chunks should not have internal
+        whitespace; ie. a chunk is either all whitespace or a "word".
+        Whitespace chunks will be removed from the beginning and end of
+        lines, but apart from that whitespace is preserved.
+        """
+        lines = []
+        if self.width <= 0:
+            raise ValueError("invalid width %r (must be > 0)" % self.width)
+
+        while chunks:
+
+            # Start the list of chunks that will make up the current line.
+            # cur_len is just the length of all the chunks in cur_line.
+            cur_line = []
+            cur_len = 0
+
+            # Figure out which static string will prefix this line.
+            if lines:
+                indent = self.subsequent_indent
+            else:
+                indent = self.initial_indent
+
+            # Maximum width for this line.
+            width = self.width - len(indent)
+
+            # First chunk on line is whitespace -- drop it, unless this
+            # is the very beginning of the text (ie. no lines started yet).
+            if chunks[0].strip() == '' and lines:
+                del chunks[0]
+
+            while chunks:
+                l = len(chunks[0])
+
+                # Can at least squeeze this chunk onto the current line.
+                if cur_len + l <= width:
+                    cur_line.append(chunks.pop(0))
+                    cur_len += l
+
+                # Nope, this line is full.
+                else:
+                    break
+
+            # The current line is full, and the next chunk is too big to
+            # fit on *any* line (not just this one).
+            if chunks and len(chunks[0]) > width:
+                self._handle_long_word(chunks, cur_line, cur_len, width)
+
+            # If the last chunk on this line is all whitespace, drop it.
+            if cur_line and cur_line[-1].strip() == '':
+                del cur_line[-1]
+
+            # Convert current line back to a string and store it in list
+            # of all lines (return value).
+            if cur_line:
+                lines.append(indent + ''.join(cur_line))
+
+        return lines
+
+
+    # -- Public interface ----------------------------------------------
+
+    def wrap(self, text):
+        """wrap(text : string) -> [string]
+
+        Reformat the single paragraph in 'text' so it fits in lines of
+        no more than 'self.width' columns, and return a list of wrapped
+        lines.  Tabs in 'text' are expanded with string.expandtabs(),
+        and all other whitespace characters (including newline) are
+        converted to space.
+        """
+        text = self._munge_whitespace(text)
+        indent = self.initial_indent
+        chunks = self._split(text)
+        if self.fix_sentence_endings:
+            self._fix_sentence_endings(chunks)
+        return self._wrap_chunks(chunks)
+
+    def fill(self, text):
+        """fill(text : string) -> string
+
+        Reformat the single paragraph in 'text' to fit in lines of no
+        more than 'self.width' columns, and return a new string
+        containing the entire wrapped paragraph.
+        """
+        return "\n".join(self.wrap(text))
+
+
+# -- Convenience interface ---------------------------------------------
+
+def wrap(text, width=70, **kwargs):
+    """Wrap a single paragraph of text, returning a list of wrapped lines.
+
+    Reformat the single paragraph in 'text' so it fits in lines of no
+    more than 'width' columns, and return a list of wrapped lines.  By
+    default, tabs in 'text' are expanded with string.expandtabs(), and
+    all other whitespace characters (including newline) are converted to
+    space.  See TextWrapper class for available keyword args to customize
+    wrapping behaviour.
+    """
+    w = TextWrapper(width=width, **kwargs)
+    return w.wrap(text)
+
+def fill(text, width=70, **kwargs):
+    """Fill a single paragraph of text, returning a new string.
+
+    Reformat the single paragraph in 'text' to fit in lines of no more
+    than 'width' columns, and return a new string containing the entire
+    wrapped paragraph.  As with wrap(), tabs are expanded and other
+    whitespace characters converted to space.  See TextWrapper class for
+    available keyword args to customize wrapping behaviour.
+    """
+    w = TextWrapper(width=width, **kwargs)
+    return w.fill(text)
+
+
+# -- Loosely related functionality -------------------------------------
+
+def dedent(text):
+    """dedent(text : string) -> string
+
+    Remove any whitespace than can be uniformly removed from the left
+    of every line in `text`.
+
+    This can be used e.g. to make triple-quoted strings line up with
+    the left edge of screen/whatever, while still presenting it in the
+    source code in indented form.
+
+    For example:
+
+        def test():
+            # end first line with \ to avoid the empty line!
+            s = '''\
+            hello
+              world
+            '''
+            print repr(s)          # prints '    hello\n      world\n    '
+            print repr(dedent(s))  # prints 'hello\n  world\n'
+    """
+    lines = text.expandtabs().split('\n')
+    margin = None
+    for line in lines:
+        content = line.lstrip()
+        if not content:
+            continue
+        indent = len(line) - len(content)
+        if margin is None:
+            margin = indent
+        else:
+            margin = min(margin, indent)
+
+    if margin is not None and margin > 0:
+        for i in range(len(lines)):
+            lines[i] = lines[i][margin:]
+
+    return '\n'.join(lines)
diff --git a/depot_tools/release/win/python_24/Lib/this.py b/depot_tools/release/win/python_24/Lib/this.py
new file mode 100644
index 0000000..37754b78
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/this.py
@@ -0,0 +1,28 @@
+s = """Gur Mra bs Clguba, ol Gvz Crgref
+
+Ornhgvshy vf orggre guna htyl.
+Rkcyvpvg vf orggre guna vzcyvpvg.
+Fvzcyr vf orggre guna pbzcyrk.
+Pbzcyrk vf orggre guna pbzcyvpngrq.
+Syng vf orggre guna arfgrq.
+Fcnefr vf orggre guna qrafr.
+Ernqnovyvgl pbhagf.
+Fcrpvny pnfrf nera'g fcrpvny rabhtu gb oernx gur ehyrf.
+Nygubhtu cenpgvpnyvgl orngf chevgl.
+Reebef fubhyq arire cnff fvyragyl.
+Hayrff rkcyvpvgyl fvyraprq.
+Va gur snpr bs nzovthvgl, ershfr gur grzcgngvba gb thrff.
+Gurer fubhyq or bar-- naq cersrenoyl bayl bar --boivbhf jnl gb qb vg.
+Nygubhtu gung jnl znl abg or boivbhf ng svefg hayrff lbh'er Qhgpu.
+Abj vf orggre guna arire.
+Nygubhtu arire vf bsgra orggre guna *evtug* abj.
+Vs gur vzcyrzragngvba vf uneq gb rkcynva, vg'f n onq vqrn.
+Vs gur vzcyrzragngvba vf rnfl gb rkcynva, vg znl or n tbbq vqrn.
+Anzrfcnprf ner bar ubaxvat terng vqrn -- yrg'f qb zber bs gubfr!"""
+
+d = {}
+for c in (65, 97):
+    for i in range(26):
+        d[chr(i+c)] = chr((i+13) % 26 + c)
+
+print "".join([d.get(c, c) for c in s])
diff --git a/depot_tools/release/win/python_24/Lib/threading.py b/depot_tools/release/win/python_24/Lib/threading.py
new file mode 100644
index 0000000..6def594
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/threading.py
@@ -0,0 +1,787 @@
+"""Thread module emulating a subset of Java's threading model."""
+
+import sys as _sys
+
+try:
+    import thread
+except ImportError:
+    del _sys.modules[__name__]
+    raise
+
+from time import time as _time, sleep as _sleep
+from traceback import format_exc as _format_exc
+from collections import deque
+
+# Rename some stuff so "from threading import *" is safe
+__all__ = ['activeCount', 'Condition', 'currentThread', 'enumerate', 'Event',
+           'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread',
+           'Timer', 'setprofile', 'settrace', 'local']
+
+_start_new_thread = thread.start_new_thread
+_allocate_lock = thread.allocate_lock
+_get_ident = thread.get_ident
+ThreadError = thread.error
+del thread
+
+
+# Debug support (adapted from ihooks.py).
+# All the major classes here derive from _Verbose.  We force that to
+# be a new-style class so that all the major classes here are new-style.
+# This helps debugging (type(instance) is more revealing for instances
+# of new-style classes).
+
+_VERBOSE = False
+
+if __debug__:
+
+    class _Verbose(object):
+
+        def __init__(self, verbose=None):
+            if verbose is None:
+                verbose = _VERBOSE
+            self.__verbose = verbose
+
+        def _note(self, format, *args):
+            if self.__verbose:
+                format = format % args
+                format = "%s: %s\n" % (
+                    currentThread().getName(), format)
+                _sys.stderr.write(format)
+
+else:
+    # Disable this when using "python -O"
+    class _Verbose(object):
+        def __init__(self, verbose=None):
+            pass
+        def _note(self, *args):
+            pass
+
+# Support for profile and trace hooks
+
+_profile_hook = None
+_trace_hook = None
+
+def setprofile(func):
+    global _profile_hook
+    _profile_hook = func
+
+def settrace(func):
+    global _trace_hook
+    _trace_hook = func
+
+# Synchronization classes
+
+Lock = _allocate_lock
+
+def RLock(*args, **kwargs):
+    return _RLock(*args, **kwargs)
+
+class _RLock(_Verbose):
+
+    def __init__(self, verbose=None):
+        _Verbose.__init__(self, verbose)
+        self.__block = _allocate_lock()
+        self.__owner = None
+        self.__count = 0
+
+    def __repr__(self):
+        return "<%s(%s, %d)>" % (
+                self.__class__.__name__,
+                self.__owner and self.__owner.getName(),
+                self.__count)
+
+    def acquire(self, blocking=1):
+        me = currentThread()
+        if self.__owner is me:
+            self.__count = self.__count + 1
+            if __debug__:
+                self._note("%s.acquire(%s): recursive success", self, blocking)
+            return 1
+        rc = self.__block.acquire(blocking)
+        if rc:
+            self.__owner = me
+            self.__count = 1
+            if __debug__:
+                self._note("%s.acquire(%s): initial success", self, blocking)
+        else:
+            if __debug__:
+                self._note("%s.acquire(%s): failure", self, blocking)
+        return rc
+
+    def release(self):
+        me = currentThread()
+        assert self.__owner is me, "release() of un-acquire()d lock"
+        self.__count = count = self.__count - 1
+        if not count:
+            self.__owner = None
+            self.__block.release()
+            if __debug__:
+                self._note("%s.release(): final release", self)
+        else:
+            if __debug__:
+                self._note("%s.release(): non-final release", self)
+
+    # Internal methods used by condition variables
+
+    def _acquire_restore(self, (count, owner)):
+        self.__block.acquire()
+        self.__count = count
+        self.__owner = owner
+        if __debug__:
+            self._note("%s._acquire_restore()", self)
+
+    def _release_save(self):
+        if __debug__:
+            self._note("%s._release_save()", self)
+        count = self.__count
+        self.__count = 0
+        owner = self.__owner
+        self.__owner = None
+        self.__block.release()
+        return (count, owner)
+
+    def _is_owned(self):
+        return self.__owner is currentThread()
+
+
+def Condition(*args, **kwargs):
+    return _Condition(*args, **kwargs)
+
+class _Condition(_Verbose):
+
+    def __init__(self, lock=None, verbose=None):
+        _Verbose.__init__(self, verbose)
+        if lock is None:
+            lock = RLock()
+        self.__lock = lock
+        # Export the lock's acquire() and release() methods
+        self.acquire = lock.acquire
+        self.release = lock.release
+        # If the lock defines _release_save() and/or _acquire_restore(),
+        # these override the default implementations (which just call
+        # release() and acquire() on the lock).  Ditto for _is_owned().
+        try:
+            self._release_save = lock._release_save
+        except AttributeError:
+            pass
+        try:
+            self._acquire_restore = lock._acquire_restore
+        except AttributeError:
+            pass
+        try:
+            self._is_owned = lock._is_owned
+        except AttributeError:
+            pass
+        self.__waiters = []
+
+    def __repr__(self):
+        return "<Condition(%s, %d)>" % (self.__lock, len(self.__waiters))
+
+    def _release_save(self):
+        self.__lock.release()           # No state to save
+
+    def _acquire_restore(self, x):
+        self.__lock.acquire()           # Ignore saved state
+
+    def _is_owned(self):
+        # Return True if lock is owned by currentThread.
+        # This method is called only if __lock doesn't have _is_owned().
+        if self.__lock.acquire(0):
+            self.__lock.release()
+            return False
+        else:
+            return True
+
+    def wait(self, timeout=None):
+        assert self._is_owned(), "wait() of un-acquire()d lock"
+        waiter = _allocate_lock()
+        waiter.acquire()
+        self.__waiters.append(waiter)
+        saved_state = self._release_save()
+        try:    # restore state no matter what (e.g., KeyboardInterrupt)
+            if timeout is None:
+                waiter.acquire()
+                if __debug__:
+                    self._note("%s.wait(): got it", self)
+            else:
+                # Balancing act:  We can't afford a pure busy loop, so we
+                # have to sleep; but if we sleep the whole timeout time,
+                # we'll be unresponsive.  The scheme here sleeps very
+                # little at first, longer as time goes on, but never longer
+                # than 20 times per second (or the timeout time remaining).
+                endtime = _time() + timeout
+                delay = 0.0005 # 500 us -> initial delay of 1 ms
+                while True:
+                    gotit = waiter.acquire(0)
+                    if gotit:
+                        break
+                    remaining = endtime - _time()
+                    if remaining <= 0:
+                        break
+                    delay = min(delay * 2, remaining, .05)
+                    _sleep(delay)
+                if not gotit:
+                    if __debug__:
+                        self._note("%s.wait(%s): timed out", self, timeout)
+                    try:
+                        self.__waiters.remove(waiter)
+                    except ValueError:
+                        pass
+                else:
+                    if __debug__:
+                        self._note("%s.wait(%s): got it", self, timeout)
+        finally:
+            self._acquire_restore(saved_state)
+
+    def notify(self, n=1):
+        assert self._is_owned(), "notify() of un-acquire()d lock"
+        __waiters = self.__waiters
+        waiters = __waiters[:n]
+        if not waiters:
+            if __debug__:
+                self._note("%s.notify(): no waiters", self)
+            return
+        self._note("%s.notify(): notifying %d waiter%s", self, n,
+                   n!=1 and "s" or "")
+        for waiter in waiters:
+            waiter.release()
+            try:
+                __waiters.remove(waiter)
+            except ValueError:
+                pass
+
+    def notifyAll(self):
+        self.notify(len(self.__waiters))
+
+
+def Semaphore(*args, **kwargs):
+    return _Semaphore(*args, **kwargs)
+
+class _Semaphore(_Verbose):
+
+    # After Tim Peters' semaphore class, but not quite the same (no maximum)
+
+    def __init__(self, value=1, verbose=None):
+        assert value >= 0, "Semaphore initial value must be >= 0"
+        _Verbose.__init__(self, verbose)
+        self.__cond = Condition(Lock())
+        self.__value = value
+
+    def acquire(self, blocking=1):
+        rc = False
+        self.__cond.acquire()
+        while self.__value == 0:
+            if not blocking:
+                break
+            if __debug__:
+                self._note("%s.acquire(%s): blocked waiting, value=%s",
+                           self, blocking, self.__value)
+            self.__cond.wait()
+        else:
+            self.__value = self.__value - 1
+            if __debug__:
+                self._note("%s.acquire: success, value=%s",
+                           self, self.__value)
+            rc = True
+        self.__cond.release()
+        return rc
+
+    def release(self):
+        self.__cond.acquire()
+        self.__value = self.__value + 1
+        if __debug__:
+            self._note("%s.release: success, value=%s",
+                       self, self.__value)
+        self.__cond.notify()
+        self.__cond.release()
+
+
+def BoundedSemaphore(*args, **kwargs):
+    return _BoundedSemaphore(*args, **kwargs)
+
+class _BoundedSemaphore(_Semaphore):
+    """Semaphore that checks that # releases is <= # acquires"""
+    def __init__(self, value=1, verbose=None):
+        _Semaphore.__init__(self, value, verbose)
+        self._initial_value = value
+
+    def release(self):
+        if self._Semaphore__value >= self._initial_value:
+            raise ValueError, "Semaphore released too many times"
+        return _Semaphore.release(self)
+
+
+def Event(*args, **kwargs):
+    return _Event(*args, **kwargs)
+
+class _Event(_Verbose):
+
+    # After Tim Peters' event class (without is_posted())
+
+    def __init__(self, verbose=None):
+        _Verbose.__init__(self, verbose)
+        self.__cond = Condition(Lock())
+        self.__flag = False
+
+    def isSet(self):
+        return self.__flag
+
+    def set(self):
+        self.__cond.acquire()
+        try:
+            self.__flag = True
+            self.__cond.notifyAll()
+        finally:
+            self.__cond.release()
+
+    def clear(self):
+        self.__cond.acquire()
+        try:
+            self.__flag = False
+        finally:
+            self.__cond.release()
+
+    def wait(self, timeout=None):
+        self.__cond.acquire()
+        try:
+            if not self.__flag:
+                self.__cond.wait(timeout)
+        finally:
+            self.__cond.release()
+
+# Helper to generate new thread names
+_counter = 0
+def _newname(template="Thread-%d"):
+    global _counter
+    _counter = _counter + 1
+    return template % _counter
+
+# Active thread administration
+_active_limbo_lock = _allocate_lock()
+_active = {}
+_limbo = {}
+
+
+# Main class for threads
+
+class Thread(_Verbose):
+
+    __initialized = False
+    # Need to store a reference to sys.exc_info for printing
+    # out exceptions when a thread tries to use a global var. during interp.
+    # shutdown and thus raises an exception about trying to perform some
+    # operation on/with a NoneType
+    __exc_info = _sys.exc_info
+
+    def __init__(self, group=None, target=None, name=None,
+                 args=(), kwargs={}, verbose=None):
+        assert group is None, "group argument must be None for now"
+        _Verbose.__init__(self, verbose)
+        self.__target = target
+        self.__name = str(name or _newname())
+        self.__args = args
+        self.__kwargs = kwargs
+        self.__daemonic = self._set_daemon()
+        self.__started = False
+        self.__stopped = False
+        self.__block = Condition(Lock())
+        self.__initialized = True
+        # sys.stderr is not stored in the class like
+        # sys.exc_info since it can be changed between instances
+        self.__stderr = _sys.stderr
+
+    def _set_daemon(self):
+        # Overridden in _MainThread and _DummyThread
+        return currentThread().isDaemon()
+
+    def __repr__(self):
+        assert self.__initialized, "Thread.__init__() was not called"
+        status = "initial"
+        if self.__started:
+            status = "started"
+        if self.__stopped:
+            status = "stopped"
+        if self.__daemonic:
+            status = status + " daemon"
+        return "<%s(%s, %s)>" % (self.__class__.__name__, self.__name, status)
+
+    def start(self):
+        assert self.__initialized, "Thread.__init__() not called"
+        assert not self.__started, "thread already started"
+        if __debug__:
+            self._note("%s.start(): starting thread", self)
+        _active_limbo_lock.acquire()
+        _limbo[self] = self
+        _active_limbo_lock.release()
+        _start_new_thread(self.__bootstrap, ())
+        self.__started = True
+        _sleep(0.000001)    # 1 usec, to let the thread run (Solaris hack)
+
+    def run(self):
+        if self.__target:
+            self.__target(*self.__args, **self.__kwargs)
+
+    def __bootstrap(self):
+        try:
+            self.__started = True
+            _active_limbo_lock.acquire()
+            _active[_get_ident()] = self
+            del _limbo[self]
+            _active_limbo_lock.release()
+            if __debug__:
+                self._note("%s.__bootstrap(): thread started", self)
+
+            if _trace_hook:
+                self._note("%s.__bootstrap(): registering trace hook", self)
+                _sys.settrace(_trace_hook)
+            if _profile_hook:
+                self._note("%s.__bootstrap(): registering profile hook", self)
+                _sys.setprofile(_profile_hook)
+
+            try:
+                self.run()
+            except SystemExit:
+                if __debug__:
+                    self._note("%s.__bootstrap(): raised SystemExit", self)
+            except:
+                if __debug__:
+                    self._note("%s.__bootstrap(): unhandled exception", self)
+                # If sys.stderr is no more (most likely from interpreter
+                # shutdown) use self.__stderr.  Otherwise still use sys (as in
+                # _sys) in case sys.stderr was redefined since the creation of
+                # self.
+                if _sys:
+                    _sys.stderr.write("Exception in thread %s:\n%s\n" %
+                                      (self.getName(), _format_exc()))
+                else:
+                    # Do the best job possible w/o a huge amt. of code to
+                    # approximate a traceback (code ideas from
+                    # Lib/traceback.py)
+                    exc_type, exc_value, exc_tb = self.__exc_info()
+                    try:
+                        print>>self.__stderr, (
+                            "Exception in thread " + self.getName() +
+                            " (most likely raised during interpreter shutdown):")
+                        print>>self.__stderr, (
+                            "Traceback (most recent call last):")
+                        while exc_tb:
+                            print>>self.__stderr, (
+                                '  File "%s", line %s, in %s' %
+                                (exc_tb.tb_frame.f_code.co_filename,
+                                    exc_tb.tb_lineno,
+                                    exc_tb.tb_frame.f_code.co_name))
+                            exc_tb = exc_tb.tb_next
+                        print>>self.__stderr, ("%s: %s" % (exc_type, exc_value))
+                    # Make sure that exc_tb gets deleted since it is a memory
+                    # hog; deleting everything else is just for thoroughness
+                    finally:
+                        del exc_type, exc_value, exc_tb
+            else:
+                if __debug__:
+                    self._note("%s.__bootstrap(): normal return", self)
+        finally:
+            self.__stop()
+            try:
+                self.__delete()
+            except:
+                pass
+
+    def __stop(self):
+        self.__block.acquire()
+        self.__stopped = True
+        self.__block.notifyAll()
+        self.__block.release()
+
+    def __delete(self):
+        "Remove current thread from the dict of currently running threads."
+
+        # Notes about running with dummy_thread:
+        #
+        # Must take care to not raise an exception if dummy_thread is being
+        # used (and thus this module is being used as an instance of
+        # dummy_threading).  dummy_thread.get_ident() always returns -1 since
+        # there is only one thread if dummy_thread is being used.  Thus
+        # len(_active) is always <= 1 here, and any Thread instance created
+        # overwrites the (if any) thread currently registered in _active.
+        #
+        # An instance of _MainThread is always created by 'threading'.  This
+        # gets overwritten the instant an instance of Thread is created; both
+        # threads return -1 from dummy_thread.get_ident() and thus have the
+        # same key in the dict.  So when the _MainThread instance created by
+        # 'threading' tries to clean itself up when atexit calls this method
+        # it gets a KeyError if another Thread instance was created.
+        #
+        # This all means that KeyError from trying to delete something from
+        # _active if dummy_threading is being used is a red herring.  But
+        # since it isn't if dummy_threading is *not* being used then don't
+        # hide the exception.
+
+        _active_limbo_lock.acquire()
+        try:
+            try:
+                del _active[_get_ident()]
+            except KeyError:
+                if 'dummy_threading' not in _sys.modules:
+                    raise
+        finally:
+            _active_limbo_lock.release()
+
+    def join(self, timeout=None):
+        assert self.__initialized, "Thread.__init__() not called"
+        assert self.__started, "cannot join thread before it is started"
+        assert self is not currentThread(), "cannot join current thread"
+        if __debug__:
+            if not self.__stopped:
+                self._note("%s.join(): waiting until thread stops", self)
+        self.__block.acquire()
+        if timeout is None:
+            while not self.__stopped:
+                self.__block.wait()
+            if __debug__:
+                self._note("%s.join(): thread stopped", self)
+        else:
+            deadline = _time() + timeout
+            while not self.__stopped:
+                delay = deadline - _time()
+                if delay <= 0:
+                    if __debug__:
+                        self._note("%s.join(): timed out", self)
+                    break
+                self.__block.wait(delay)
+            else:
+                if __debug__:
+                    self._note("%s.join(): thread stopped", self)
+        self.__block.release()
+
+    def getName(self):
+        assert self.__initialized, "Thread.__init__() not called"
+        return self.__name
+
+    def setName(self, name):
+        assert self.__initialized, "Thread.__init__() not called"
+        self.__name = str(name)
+
+    def isAlive(self):
+        assert self.__initialized, "Thread.__init__() not called"
+        return self.__started and not self.__stopped
+
+    def isDaemon(self):
+        assert self.__initialized, "Thread.__init__() not called"
+        return self.__daemonic
+
+    def setDaemon(self, daemonic):
+        assert self.__initialized, "Thread.__init__() not called"
+        assert not self.__started, "cannot set daemon status of active thread"
+        self.__daemonic = daemonic
+
+# The timer class was contributed by Itamar Shtull-Trauring
+
+def Timer(*args, **kwargs):
+    return _Timer(*args, **kwargs)
+
+class _Timer(Thread):
+    """Call a function after a specified number of seconds:
+
+    t = Timer(30.0, f, args=[], kwargs={})
+    t.start()
+    t.cancel() # stop the timer's action if it's still waiting
+    """
+
+    def __init__(self, interval, function, args=[], kwargs={}):
+        Thread.__init__(self)
+        self.interval = interval
+        self.function = function
+        self.args = args
+        self.kwargs = kwargs
+        self.finished = Event()
+
+    def cancel(self):
+        """Stop the timer if it hasn't finished yet"""
+        self.finished.set()
+
+    def run(self):
+        self.finished.wait(self.interval)
+        if not self.finished.isSet():
+            self.function(*self.args, **self.kwargs)
+        self.finished.set()
+
+# Special thread class to represent the main thread
+# This is garbage collected through an exit handler
+
+class _MainThread(Thread):
+
+    def __init__(self):
+        Thread.__init__(self, name="MainThread")
+        self._Thread__started = True
+        _active_limbo_lock.acquire()
+        _active[_get_ident()] = self
+        _active_limbo_lock.release()
+        import atexit
+        atexit.register(self.__exitfunc)
+
+    def _set_daemon(self):
+        return False
+
+    def __exitfunc(self):
+        self._Thread__stop()
+        t = _pickSomeNonDaemonThread()
+        if t:
+            if __debug__:
+                self._note("%s: waiting for other threads", self)
+        while t:
+            t.join()
+            t = _pickSomeNonDaemonThread()
+        if __debug__:
+            self._note("%s: exiting", self)
+        self._Thread__delete()
+
+def _pickSomeNonDaemonThread():
+    for t in enumerate():
+        if not t.isDaemon() and t.isAlive():
+            return t
+    return None
+
+
+# Dummy thread class to represent threads not started here.
+# These aren't garbage collected when they die,
+# nor can they be waited for.
+# Their purpose is to return *something* from currentThread().
+# They are marked as daemon threads so we won't wait for them
+# when we exit (conform previous semantics).
+
+class _DummyThread(Thread):
+
+    def __init__(self):
+        Thread.__init__(self, name=_newname("Dummy-%d"))
+        self._Thread__started = True
+        _active_limbo_lock.acquire()
+        _active[_get_ident()] = self
+        _active_limbo_lock.release()
+
+    def _set_daemon(self):
+        return True
+
+    def join(self, timeout=None):
+        assert False, "cannot join a dummy thread"
+
+
+# Global API functions
+
+def currentThread():
+    try:
+        return _active[_get_ident()]
+    except KeyError:
+        ##print "currentThread(): no current thread for", _get_ident()
+        return _DummyThread()
+
+def activeCount():
+    _active_limbo_lock.acquire()
+    count = len(_active) + len(_limbo)
+    _active_limbo_lock.release()
+    return count
+
+def enumerate():
+    _active_limbo_lock.acquire()
+    active = _active.values() + _limbo.values()
+    _active_limbo_lock.release()
+    return active
+
+# Create the main thread object
+
+_MainThread()
+
+# get thread-local implementation, either from the thread
+# module, or from the python fallback
+
+try:
+    from thread import _local as local
+except ImportError:
+    from _threading_local import local
+
+
+# Self-test code
+
+def _test():
+
+    class BoundedQueue(_Verbose):
+
+        def __init__(self, limit):
+            _Verbose.__init__(self)
+            self.mon = RLock()
+            self.rc = Condition(self.mon)
+            self.wc = Condition(self.mon)
+            self.limit = limit
+            self.queue = deque()
+
+        def put(self, item):
+            self.mon.acquire()
+            while len(self.queue) >= self.limit:
+                self._note("put(%s): queue full", item)
+                self.wc.wait()
+            self.queue.append(item)
+            self._note("put(%s): appended, length now %d",
+                       item, len(self.queue))
+            self.rc.notify()
+            self.mon.release()
+
+        def get(self):
+            self.mon.acquire()
+            while not self.queue:
+                self._note("get(): queue empty")
+                self.rc.wait()
+            item = self.queue.popleft()
+            self._note("get(): got %s, %d left", item, len(self.queue))
+            self.wc.notify()
+            self.mon.release()
+            return item
+
+    class ProducerThread(Thread):
+
+        def __init__(self, queue, quota):
+            Thread.__init__(self, name="Producer")
+            self.queue = queue
+            self.quota = quota
+
+        def run(self):
+            from random import random
+            counter = 0
+            while counter < self.quota:
+                counter = counter + 1
+                self.queue.put("%s.%d" % (self.getName(), counter))
+                _sleep(random() * 0.00001)
+
+
+    class ConsumerThread(Thread):
+
+        def __init__(self, queue, count):
+            Thread.__init__(self, name="Consumer")
+            self.queue = queue
+            self.count = count
+
+        def run(self):
+            while self.count > 0:
+                item = self.queue.get()
+                print item
+                self.count = self.count - 1
+
+    NP = 3
+    QL = 4
+    NI = 5
+
+    Q = BoundedQueue(QL)
+    P = []
+    for i in range(NP):
+        t = ProducerThread(Q, NI)
+        t.setName("Producer-%d" % (i+1))
+        P.append(t)
+    C = ConsumerThread(Q, NI*NP)
+    for t in P:
+        t.start()
+        _sleep(0.000001)
+    C.start()
+    for t in P:
+        t.join()
+    C.join()
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/timeit.py b/depot_tools/release/win/python_24/Lib/timeit.py
new file mode 100644
index 0000000..8c0f7a5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/timeit.py
@@ -0,0 +1,285 @@
+#! /usr/bin/env python
+
+"""Tool for measuring execution time of small code snippets.
+
+This module avoids a number of common traps for measuring execution
+times.  See also Tim Peters' introduction to the Algorithms chapter in
+the Python Cookbook, published by O'Reilly.
+
+Library usage: see the Timer class.
+
+Command line usage:
+    python timeit.py [-n N] [-r N] [-s S] [-t] [-c] [-h] [statement]
+
+Options:
+  -n/--number N: how many times to execute 'statement' (default: see below)
+  -r/--repeat N: how many times to repeat the timer (default 3)
+  -s/--setup S: statement to be executed once initially (default 'pass')
+  -t/--time: use time.time() (default on Unix)
+  -c/--clock: use time.clock() (default on Windows)
+  -v/--verbose: print raw timing results; repeat for more digits precision
+  -h/--help: print this usage message and exit
+  statement: statement to be timed (default 'pass')
+
+A multi-line statement may be given by specifying each line as a
+separate argument; indented lines are possible by enclosing an
+argument in quotes and using leading spaces.  Multiple -s options are
+treated similarly.
+
+If -n is not given, a suitable number of loops is calculated by trying
+successive powers of 10 until the total time is at least 0.2 seconds.
+
+The difference in default timer function is because on Windows,
+clock() has microsecond granularity but time()'s granularity is 1/60th
+of a second; on Unix, clock() has 1/100th of a second granularity and
+time() is much more precise.  On either platform, the default timer
+functions measure wall clock time, not the CPU time.  This means that
+other processes running on the same computer may interfere with the
+timing.  The best thing to do when accurate timing is necessary is to
+repeat the timing a few times and use the best time.  The -r option is
+good for this; the default of 3 repetitions is probably enough in most
+cases.  On Unix, you can use clock() to measure CPU time.
+
+Note: there is a certain baseline overhead associated with executing a
+pass statement.  The code here doesn't try to hide it, but you should
+be aware of it.  The baseline overhead can be measured by invoking the
+program without arguments.
+
+The baseline overhead differs between Python versions!  Also, to
+fairly compare older Python versions to Python 2.3, you may want to
+use python -O for the older versions to avoid timing SET_LINENO
+instructions.
+"""
+
+import gc
+import sys
+import time
+try:
+    import itertools
+except ImportError:
+    # Must be an older Python version (see timeit() below)
+    itertools = None
+
+__all__ = ["Timer"]
+
+dummy_src_name = "<timeit-src>"
+default_number = 1000000
+default_repeat = 3
+
+if sys.platform == "win32":
+    # On Windows, the best timer is time.clock()
+    default_timer = time.clock
+else:
+    # On most other platforms the best timer is time.time()
+    default_timer = time.time
+
+# Don't change the indentation of the template; the reindent() calls
+# in Timer.__init__() depend on setup being indented 4 spaces and stmt
+# being indented 8 spaces.
+template = """
+def inner(_it, _timer):
+    %(setup)s
+    _t0 = _timer()
+    for _i in _it:
+        %(stmt)s
+    _t1 = _timer()
+    return _t1 - _t0
+"""
+
+def reindent(src, indent):
+    """Helper to reindent a multi-line statement."""
+    return src.replace("\n", "\n" + " "*indent)
+
+class Timer:
+    """Class for timing execution speed of small code snippets.
+
+    The constructor takes a statement to be timed, an additional
+    statement used for setup, and a timer function.  Both statements
+    default to 'pass'; the timer function is platform-dependent (see
+    module doc string).
+
+    To measure the execution time of the first statement, use the
+    timeit() method.  The repeat() method is a convenience to call
+    timeit() multiple times and return a list of results.
+
+    The statements may contain newlines, as long as they don't contain
+    multi-line string literals.
+    """
+
+    def __init__(self, stmt="pass", setup="pass", timer=default_timer):
+        """Constructor.  See class doc string."""
+        self.timer = timer
+        stmt = reindent(stmt, 8)
+        setup = reindent(setup, 4)
+        src = template % {'stmt': stmt, 'setup': setup}
+        self.src = src # Save for traceback display
+        code = compile(src, dummy_src_name, "exec")
+        ns = {}
+        exec code in globals(), ns
+        self.inner = ns["inner"]
+
+    def print_exc(self, file=None):
+        """Helper to print a traceback from the timed code.
+
+        Typical use:
+
+            t = Timer(...)       # outside the try/except
+            try:
+                t.timeit(...)    # or t.repeat(...)
+            except:
+                t.print_exc()
+
+        The advantage over the standard traceback is that source lines
+        in the compiled template will be displayed.
+
+        The optional file argument directs where the traceback is
+        sent; it defaults to sys.stderr.
+        """
+        import linecache, traceback
+        linecache.cache[dummy_src_name] = (len(self.src),
+                                           None,
+                                           self.src.split("\n"),
+                                           dummy_src_name)
+        traceback.print_exc(file=file)
+
+    def timeit(self, number=default_number):
+        """Time 'number' executions of the main statement.
+
+        To be precise, this executes the setup statement once, and
+        then returns the time it takes to execute the main statement
+        a number of times, as a float measured in seconds.  The
+        argument is the number of times through the loop, defaulting
+        to one million.  The main statement, the setup statement and
+        the timer function to be used are passed to the constructor.
+        """
+        if itertools:
+            it = itertools.repeat(None, number)
+        else:
+            it = [None] * number
+        gcold = gc.isenabled()
+        gc.disable()
+        timing = self.inner(it, self.timer)
+        if gcold:
+            gc.enable()
+        return timing
+
+    def repeat(self, repeat=default_repeat, number=default_number):
+        """Call timeit() a few times.
+
+        This is a convenience function that calls the timeit()
+        repeatedly, returning a list of results.  The first argument
+        specifies how many times to call timeit(), defaulting to 3;
+        the second argument specifies the timer argument, defaulting
+        to one million.
+
+        Note: it's tempting to calculate mean and standard deviation
+        from the result vector and report these.  However, this is not
+        very useful.  In a typical case, the lowest value gives a
+        lower bound for how fast your machine can run the given code
+        snippet; higher values in the result vector are typically not
+        caused by variability in Python's speed, but by other
+        processes interfering with your timing accuracy.  So the min()
+        of the result is probably the only number you should be
+        interested in.  After that, you should look at the entire
+        vector and apply common sense rather than statistics.
+        """
+        r = []
+        for i in range(repeat):
+            t = self.timeit(number)
+            r.append(t)
+        return r
+
+def main(args=None):
+    """Main program, used when run as a script.
+
+    The optional argument specifies the command line to be parsed,
+    defaulting to sys.argv[1:].
+
+    The return value is an exit code to be passed to sys.exit(); it
+    may be None to indicate success.
+
+    When an exception happens during timing, a traceback is printed to
+    stderr and the return value is 1.  Exceptions at other times
+    (including the template compilation) are not caught.
+    """
+    if args is None:
+        args = sys.argv[1:]
+    import getopt
+    try:
+        opts, args = getopt.getopt(args, "n:s:r:tcvh",
+                                   ["number=", "setup=", "repeat=",
+                                    "time", "clock", "verbose", "help"])
+    except getopt.error, err:
+        print err
+        print "use -h/--help for command line help"
+        return 2
+    timer = default_timer
+    stmt = "\n".join(args) or "pass"
+    number = 0 # auto-determine
+    setup = []
+    repeat = default_repeat
+    verbose = 0
+    precision = 3
+    for o, a in opts:
+        if o in ("-n", "--number"):
+            number = int(a)
+        if o in ("-s", "--setup"):
+            setup.append(a)
+        if o in ("-r", "--repeat"):
+            repeat = int(a)
+            if repeat <= 0:
+                repeat = 1
+        if o in ("-t", "--time"):
+            timer = time.time
+        if o in ("-c", "--clock"):
+            timer = time.clock
+        if o in ("-v", "--verbose"):
+            if verbose:
+                precision += 1
+            verbose += 1
+        if o in ("-h", "--help"):
+            print __doc__,
+            return 0
+    setup = "\n".join(setup) or "pass"
+    # Include the current directory, so that local imports work (sys.path
+    # contains the directory of this script, rather than the current
+    # directory)
+    import os
+    sys.path.insert(0, os.curdir)
+    t = Timer(stmt, setup, timer)
+    if number == 0:
+        # determine number so that 0.2 <= total time < 2.0
+        for i in range(1, 10):
+            number = 10**i
+            try:
+                x = t.timeit(number)
+            except:
+                t.print_exc()
+                return 1
+            if verbose:
+                print "%d loops -> %.*g secs" % (number, precision, x)
+            if x >= 0.2:
+                break
+    try:
+        r = t.repeat(repeat, number)
+    except:
+        t.print_exc()
+        return 1
+    best = min(r)
+    if verbose:
+        print "raw times:", " ".join(["%.*g" % (precision, x) for x in r])
+    print "%d loops," % number,
+    usec = best * 1e6 / number
+    if usec < 1000:
+        print "best of %d: %.*g usec per loop" % (repeat, precision, usec)
+    else:
+        msec = usec / 1000
+        if msec < 1000:
+            print "best of %d: %.*g msec per loop" % (repeat, precision, msec)
+        else:
+            sec = msec / 1000
+            print "best of %d: %.*g sec per loop" % (repeat, precision, sec)
+    return None
+
+if __name__ == "__main__":
+    sys.exit(main())
diff --git a/depot_tools/release/win/python_24/Lib/toaiff.py b/depot_tools/release/win/python_24/Lib/toaiff.py
new file mode 100644
index 0000000..3c8a02b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/toaiff.py
@@ -0,0 +1,107 @@
+"""Convert "arbitrary" sound files to AIFF (Apple and SGI's audio format).
+
+Input may be compressed.
+Uncompressed file type may be AIFF, WAV, VOC, 8SVX, NeXT/Sun, and others.
+An exception is raised if the file is not of a recognized type.
+Returned filename is either the input filename or a temporary filename;
+in the latter case the caller must ensure that it is removed.
+Other temporary files used are removed by the function.
+"""
+
+import os
+import tempfile
+import pipes
+import sndhdr
+
+__all__ = ["error", "toaiff"]
+
+table = {}
+
+t = pipes.Template()
+t.append('sox -t au - -t aiff -r 8000 -', '--')
+table['au'] = t
+
+# XXX The following is actually sub-optimal.
+# XXX The HCOM sampling rate can be 22k, 22k/2, 22k/3 or 22k/4.
+# XXX We must force the output sampling rate else the SGI won't play
+# XXX files sampled at 5.5k or 7.333k; however this means that files
+# XXX sampled at 11k are unnecessarily expanded.
+# XXX Similar comments apply to some other file types.
+t = pipes.Template()
+t.append('sox -t hcom - -t aiff -r 22050 -', '--')
+table['hcom'] = t
+
+t = pipes.Template()
+t.append('sox -t voc - -t aiff -r 11025 -', '--')
+table['voc'] = t
+
+t = pipes.Template()
+t.append('sox -t wav - -t aiff -', '--')
+table['wav'] = t
+
+t = pipes.Template()
+t.append('sox -t 8svx - -t aiff -r 16000 -', '--')
+table['8svx'] = t
+
+t = pipes.Template()
+t.append('sox -t sndt - -t aiff -r 16000 -', '--')
+table['sndt'] = t
+
+t = pipes.Template()
+t.append('sox -t sndr - -t aiff -r 16000 -', '--')
+table['sndr'] = t
+
+uncompress = pipes.Template()
+uncompress.append('uncompress', '--')
+
+
+class error(Exception):
+    pass
+
+def toaiff(filename):
+    temps = []
+    ret = None
+    try:
+        ret = _toaiff(filename, temps)
+    finally:
+        for temp in temps[:]:
+            if temp != ret:
+                try:
+                    os.unlink(temp)
+                except os.error:
+                    pass
+                temps.remove(temp)
+    return ret
+
+def _toaiff(filename, temps):
+    if filename[-2:] == '.Z':
+        (fd, fname) = tempfile.mkstemp()
+        os.close(fd)
+        temps.append(fname)
+        sts = uncompress.copy(filename, fname)
+        if sts:
+            raise error, filename + ': uncompress failed'
+    else:
+        fname = filename
+    try:
+        ftype = sndhdr.whathdr(fname)
+        if ftype:
+            ftype = ftype[0] # All we're interested in
+    except IOError, msg:
+        if type(msg) == type(()) and len(msg) == 2 and \
+                type(msg[0]) == type(0) and type(msg[1]) == type(''):
+            msg = msg[1]
+        if type(msg) != type(''):
+            msg = repr(msg)
+        raise error, filename + ': ' + msg
+    if ftype == 'aiff':
+        return fname
+    if ftype is None or not ftype in table:
+        raise error, '%s: unsupported audio file type %r' % (filename, ftype)
+    (fd, temp) = tempfile.mkstemp()
+    os.close(fd)
+    temps.append(temp)
+    sts = table[ftype].copy(fname, temp)
+    if sts:
+        raise error, filename + ': conversion to aiff failed'
+    return temp
diff --git a/depot_tools/release/win/python_24/Lib/token.py b/depot_tools/release/win/python_24/Lib/token.py
new file mode 100644
index 0000000..c4db6c51
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/token.py
@@ -0,0 +1,141 @@
+#! /usr/bin/env python
+
+"""Token constants (from "token.h")."""
+
+#  This file is automatically generated; please don't muck it up!
+#
+#  To update the symbols in this file, 'cd' to the top directory of
+#  the python source tree after building the interpreter and run:
+#
+#    python Lib/token.py
+
+#--start constants--
+ENDMARKER = 0
+NAME = 1
+NUMBER = 2
+STRING = 3
+NEWLINE = 4
+INDENT = 5
+DEDENT = 6
+LPAR = 7
+RPAR = 8
+LSQB = 9
+RSQB = 10
+COLON = 11
+COMMA = 12
+SEMI = 13
+PLUS = 14
+MINUS = 15
+STAR = 16
+SLASH = 17
+VBAR = 18
+AMPER = 19
+LESS = 20
+GREATER = 21
+EQUAL = 22
+DOT = 23
+PERCENT = 24
+BACKQUOTE = 25
+LBRACE = 26
+RBRACE = 27
+EQEQUAL = 28
+NOTEQUAL = 29
+LESSEQUAL = 30
+GREATEREQUAL = 31
+TILDE = 32
+CIRCUMFLEX = 33
+LEFTSHIFT = 34
+RIGHTSHIFT = 35
+DOUBLESTAR = 36
+PLUSEQUAL = 37
+MINEQUAL = 38
+STAREQUAL = 39
+SLASHEQUAL = 40
+PERCENTEQUAL = 41
+AMPEREQUAL = 42
+VBAREQUAL = 43
+CIRCUMFLEXEQUAL = 44
+LEFTSHIFTEQUAL = 45
+RIGHTSHIFTEQUAL = 46
+DOUBLESTAREQUAL = 47
+DOUBLESLASH = 48
+DOUBLESLASHEQUAL = 49
+AT = 50
+OP = 51
+ERRORTOKEN = 52
+N_TOKENS = 53
+NT_OFFSET = 256
+#--end constants--
+
+tok_name = {}
+for _name, _value in globals().items():
+    if type(_value) is type(0):
+        tok_name[_value] = _name
+
+
+def ISTERMINAL(x):
+    return x < NT_OFFSET
+
+def ISNONTERMINAL(x):
+    return x >= NT_OFFSET
+
+def ISEOF(x):
+    return x == ENDMARKER
+
+
+def main():
+    import re
+    import sys
+    args = sys.argv[1:]
+    inFileName = args and args[0] or "Include/token.h"
+    outFileName = "Lib/token.py"
+    if len(args) > 1:
+        outFileName = args[1]
+    try:
+        fp = open(inFileName)
+    except IOError, err:
+        sys.stdout.write("I/O error: %s\n" % str(err))
+        sys.exit(1)
+    lines = fp.read().split("\n")
+    fp.close()
+    prog = re.compile(
+        "#define[ \t][ \t]*([A-Z0-9][A-Z0-9_]*)[ \t][ \t]*([0-9][0-9]*)",
+        re.IGNORECASE)
+    tokens = {}
+    for line in lines:
+        match = prog.match(line)
+        if match:
+            name, val = match.group(1, 2)
+            val = int(val)
+            tokens[val] = name          # reverse so we can sort them...
+    keys = tokens.keys()
+    keys.sort()
+    # load the output skeleton from the target:
+    try:
+        fp = open(outFileName)
+    except IOError, err:
+        sys.stderr.write("I/O error: %s\n" % str(err))
+        sys.exit(2)
+    format = fp.read().split("\n")
+    fp.close()
+    try:
+        start = format.index("#--start constants--") + 1
+        end = format.index("#--end constants--")
+    except ValueError:
+        sys.stderr.write("target does not contain format markers")
+        sys.exit(3)
+    lines = []
+    for val in keys:
+        lines.append("%s = %d" % (tokens[val], val))
+    format[start:end] = lines
+    try:
+        fp = open(outFileName, 'w')
+    except IOError, err:
+        sys.stderr.write("I/O error: %s\n" % str(err))
+        sys.exit(4)
+    fp.write("\n".join(format))
+    fp.close()
+
+
+if __name__ == "__main__":
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/tokenize.py b/depot_tools/release/win/python_24/Lib/tokenize.py
new file mode 100644
index 0000000..9087e84
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/tokenize.py
@@ -0,0 +1,295 @@
+"""Tokenization help for Python programs.
+
+generate_tokens(readline) is a generator that breaks a stream of
+text into Python tokens.  It accepts a readline-like method which is called
+repeatedly to get the next line of input (or "" for EOF).  It generates
+5-tuples with these members:
+
+    the token type (see token.py)
+    the token (a string)
+    the starting (row, column) indices of the token (a 2-tuple of ints)
+    the ending (row, column) indices of the token (a 2-tuple of ints)
+    the original line (string)
+
+It is designed to match the working of the Python tokenizer exactly, except
+that it produces COMMENT tokens for comments and gives type OP for all
+operators
+
+Older entry points
+    tokenize_loop(readline, tokeneater)
+    tokenize(readline, tokeneater=printtoken)
+are the same, except instead of generating tokens, tokeneater is a callback
+function to which the 5 fields described above are passed as 5 arguments,
+each time a new token is found."""
+
+__author__ = 'Ka-Ping Yee <ping@lfw.org>'
+__credits__ = \
+    'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro'
+
+import string, re
+from token import *
+
+import token
+__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize",
+           "generate_tokens", "NL"]
+del x
+del token
+
+COMMENT = N_TOKENS
+tok_name[COMMENT] = 'COMMENT'
+NL = N_TOKENS + 1
+tok_name[NL] = 'NL'
+N_TOKENS += 2
+
+def group(*choices): return '(' + '|'.join(choices) + ')'
+def any(*choices): return group(*choices) + '*'
+def maybe(*choices): return group(*choices) + '?'
+
+Whitespace = r'[ \f\t]*'
+Comment = r'#[^\r\n]*'
+Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
+Name = r'[a-zA-Z_]\w*'
+
+Hexnumber = r'0[xX][\da-fA-F]*[lL]?'
+Octnumber = r'0[0-7]*[lL]?'
+Decnumber = r'[1-9]\d*[lL]?'
+Intnumber = group(Hexnumber, Octnumber, Decnumber)
+Exponent = r'[eE][-+]?\d+'
+Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
+Expfloat = r'\d+' + Exponent
+Floatnumber = group(Pointfloat, Expfloat)
+Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
+Number = group(Imagnumber, Floatnumber, Intnumber)
+
+# Tail end of ' string.
+Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
+# Tail end of " string.
+Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
+# Tail end of ''' string.
+Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
+# Tail end of """ string.
+Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
+Triple = group("[uU]?[rR]?'''", '[uU]?[rR]?"""')
+# Single-line ' or " string.
+String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
+               r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
+
+# Because of leftmost-then-longest match semantics, be sure to put the
+# longest operators first (e.g., if = came before ==, == would get
+# recognized as two instances of =).
+Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
+                 r"//=?",
+                 r"[+\-*/%&|^=<>]=?",
+                 r"~")
+
+Bracket = '[][(){}]'
+Special = group(r'\r?\n', r'[:;.,`@]')
+Funny = group(Operator, Bracket, Special)
+
+PlainToken = group(Number, Funny, String, Name)
+Token = Ignore + PlainToken
+
+# First (or only) line of ' or " string.
+ContStr = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
+                group("'", r'\\\r?\n'),
+                r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
+                group('"', r'\\\r?\n'))
+PseudoExtras = group(r'\\\r?\n', Comment, Triple)
+PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
+
+tokenprog, pseudoprog, single3prog, double3prog = map(
+    re.compile, (Token, PseudoToken, Single3, Double3))
+endprogs = {"'": re.compile(Single), '"': re.compile(Double),
+            "'''": single3prog, '"""': double3prog,
+            "r'''": single3prog, 'r"""': double3prog,
+            "u'''": single3prog, 'u"""': double3prog,
+            "ur'''": single3prog, 'ur"""': double3prog,
+            "R'''": single3prog, 'R"""': double3prog,
+            "U'''": single3prog, 'U"""': double3prog,
+            "uR'''": single3prog, 'uR"""': double3prog,
+            "Ur'''": single3prog, 'Ur"""': double3prog,
+            "UR'''": single3prog, 'UR"""': double3prog,
+            'r': None, 'R': None, 'u': None, 'U': None}
+
+triple_quoted = {}
+for t in ("'''", '"""',
+          "r'''", 'r"""', "R'''", 'R"""',
+          "u'''", 'u"""', "U'''", 'U"""',
+          "ur'''", 'ur"""', "Ur'''", 'Ur"""',
+          "uR'''", 'uR"""', "UR'''", 'UR"""'):
+    triple_quoted[t] = t
+single_quoted = {}
+for t in ("'", '"',
+          "r'", 'r"', "R'", 'R"',
+          "u'", 'u"', "U'", 'U"',
+          "ur'", 'ur"', "Ur'", 'Ur"',
+          "uR'", 'uR"', "UR'", 'UR"' ):
+    single_quoted[t] = t
+
+tabsize = 8
+
+class TokenError(Exception): pass
+
+class StopTokenizing(Exception): pass
+
+def printtoken(type, token, (srow, scol), (erow, ecol), line): # for testing
+    print "%d,%d-%d,%d:\t%s\t%s" % \
+        (srow, scol, erow, ecol, tok_name[type], repr(token))
+
+def tokenize(readline, tokeneater=printtoken):
+    """
+    The tokenize() function accepts two parameters: one representing the
+    input stream, and one providing an output mechanism for tokenize().
+
+    The first parameter, readline, must be a callable object which provides
+    the same interface as the readline() method of built-in file objects.
+    Each call to the function should return one line of input as a string.
+
+    The second parameter, tokeneater, must also be a callable object. It is
+    called once for each token, with five arguments, corresponding to the
+    tuples generated by generate_tokens().
+    """
+    try:
+        tokenize_loop(readline, tokeneater)
+    except StopTokenizing:
+        pass
+
+# backwards compatible interface
+def tokenize_loop(readline, tokeneater):
+    for token_info in generate_tokens(readline):
+        tokeneater(*token_info)
+
+def generate_tokens(readline):
+    """
+    The generate_tokens() generator requires one argment, readline, which
+    must be a callable object which provides the same interface as the
+    readline() method of built-in file objects. Each call to the function
+    should return one line of input as a string.
+
+    The generator produces 5-tuples with these members: the token type; the
+    token string; a 2-tuple (srow, scol) of ints specifying the row and
+    column where the token begins in the source; a 2-tuple (erow, ecol) of
+    ints specifying the row and column where the token ends in the source;
+    and the line on which the token was found. The line passed is the
+    logical line; continuation lines are included.
+    """
+    lnum = parenlev = continued = 0
+    namechars, numchars = string.ascii_letters + '_', '0123456789'
+    contstr, needcont = '', 0
+    contline = None
+    indents = [0]
+
+    while 1:                                   # loop over lines in stream
+        line = readline()
+        lnum = lnum + 1
+        pos, max = 0, len(line)
+
+        if contstr:                            # continued string
+            if not line:
+                raise TokenError, ("EOF in multi-line string", strstart)
+            endmatch = endprog.match(line)
+            if endmatch:
+                pos = end = endmatch.end(0)
+                yield (STRING, contstr + line[:end],
+                           strstart, (lnum, end), contline + line)
+                contstr, needcont = '', 0
+                contline = None
+            elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
+                yield (ERRORTOKEN, contstr + line,
+                           strstart, (lnum, len(line)), contline)
+                contstr = ''
+                contline = None
+                continue
+            else:
+                contstr = contstr + line
+                contline = contline + line
+                continue
+
+        elif parenlev == 0 and not continued:  # new statement
+            if not line: break
+            column = 0
+            while pos < max:                   # measure leading whitespace
+                if line[pos] == ' ': column = column + 1
+                elif line[pos] == '\t': column = (column/tabsize + 1)*tabsize
+                elif line[pos] == '\f': column = 0
+                else: break
+                pos = pos + 1
+            if pos == max: break
+
+            if line[pos] in '#\r\n':           # skip comments or blank lines
+                yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
+                           (lnum, pos), (lnum, len(line)), line)
+                continue
+
+            if column > indents[-1]:           # count indents or dedents
+                indents.append(column)
+                yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
+            while column < indents[-1]:
+                indents = indents[:-1]
+                yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
+
+        else:                                  # continued statement
+            if not line:
+                raise TokenError, ("EOF in multi-line statement", (lnum, 0))
+            continued = 0
+
+        while pos < max:
+            pseudomatch = pseudoprog.match(line, pos)
+            if pseudomatch:                                # scan for tokens
+                start, end = pseudomatch.span(1)
+                spos, epos, pos = (lnum, start), (lnum, end), end
+                token, initial = line[start:end], line[start]
+
+                if initial in numchars or \
+                   (initial == '.' and token != '.'):      # ordinary number
+                    yield (NUMBER, token, spos, epos, line)
+                elif initial in '\r\n':
+                    yield (parenlev > 0 and NL or NEWLINE,
+                               token, spos, epos, line)
+                elif initial == '#':
+                    yield (COMMENT, token, spos, epos, line)
+                elif token in triple_quoted:
+                    endprog = endprogs[token]
+                    endmatch = endprog.match(line, pos)
+                    if endmatch:                           # all on one line
+                        pos = endmatch.end(0)
+                        token = line[start:pos]
+                        yield (STRING, token, spos, (lnum, pos), line)
+                    else:
+                        strstart = (lnum, start)           # multiple lines
+                        contstr = line[start:]
+                        contline = line
+                        break
+                elif initial in single_quoted or \
+                    token[:2] in single_quoted or \
+                    token[:3] in single_quoted:
+                    if token[-1] == '\n':                  # continued string
+                        strstart = (lnum, start)
+                        endprog = (endprogs[initial] or endprogs[token[1]] or
+                                   endprogs[token[2]])
+                        contstr, needcont = line[start:], 1
+                        contline = line
+                        break
+                    else:                                  # ordinary string
+                        yield (STRING, token, spos, epos, line)
+                elif initial in namechars:                 # ordinary name
+                    yield (NAME, token, spos, epos, line)
+                elif initial == '\\':                      # continued stmt
+                    continued = 1
+                else:
+                    if initial in '([{': parenlev = parenlev + 1
+                    elif initial in ')]}': parenlev = parenlev - 1
+                    yield (OP, token, spos, epos, line)
+            else:
+                yield (ERRORTOKEN, line[pos],
+                           (lnum, pos), (lnum, pos+1), line)
+                pos = pos + 1
+
+    for indent in indents[1:]:                 # pop remaining indent levels
+        yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
+    yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
+
+if __name__ == '__main__':                     # testing
+    import sys
+    if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline)
+    else: tokenize(sys.stdin.readline)
diff --git a/depot_tools/release/win/python_24/Lib/trace.py b/depot_tools/release/win/python_24/Lib/trace.py
new file mode 100644
index 0000000..f167fcf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/trace.py
@@ -0,0 +1,788 @@
+#!/usr/bin/env python
+
+# portions copyright 2001, Autonomous Zones Industries, Inc., all rights...
+# err...  reserved and offered to the public under the terms of the
+# Python 2.2 license.
+# Author: Zooko O'Whielacronx
+# http://zooko.com/
+# mailto:zooko@zooko.com
+#
+# Copyright 2000, Mojam Media, Inc., all rights reserved.
+# Author: Skip Montanaro
+#
+# Copyright 1999, Bioreason, Inc., all rights reserved.
+# Author: Andrew Dalke
+#
+# Copyright 1995-1997, Automatrix, Inc., all rights reserved.
+# Author: Skip Montanaro
+#
+# Copyright 1991-1995, Stichting Mathematisch Centrum, all rights reserved.
+#
+#
+# Permission to use, copy, modify, and distribute this Python software and
+# its associated documentation for any purpose without fee is hereby
+# granted, provided that the above copyright notice appears in all copies,
+# and that both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of neither Automatrix,
+# Bioreason or Mojam Media be used in advertising or publicity pertaining to
+# distribution of the software without specific, written prior permission.
+#
+"""program/module to trace Python program or function execution
+
+Sample use, command line:
+  trace.py -c -f counts --ignore-dir '$prefix' spam.py eggs
+  trace.py -t --ignore-dir '$prefix' spam.py eggs
+  trace.py --trackcalls spam.py eggs
+
+Sample use, programmatically
+   # create a Trace object, telling it what to ignore, and whether to
+   # do tracing or line-counting or both.
+   trace = trace.Trace(ignoredirs=[sys.prefix, sys.exec_prefix,], trace=0,
+                       count=1)
+   # run the new command using the given trace
+   trace.run('main()')
+   # make a report, telling it where you want output
+   r = trace.results()
+   r.write_results(show_missing=True)
+"""
+
+import linecache
+import os
+import re
+import sys
+import threading
+import token
+import tokenize
+import types
+import gc
+
+try:
+    import cPickle
+    pickle = cPickle
+except ImportError:
+    import pickle
+
+def usage(outfile):
+    outfile.write("""Usage: %s [OPTIONS] <file> [ARGS]
+
+Meta-options:
+--help                Display this help then exit.
+--version             Output version information then exit.
+
+Otherwise, exactly one of the following three options must be given:
+-t, --trace           Print each line to sys.stdout before it is executed.
+-c, --count           Count the number of times each line is executed
+                      and write the counts to <module>.cover for each
+                      module executed, in the module's directory.
+                      See also `--coverdir', `--file', `--no-report' below.
+-l, --listfuncs       Keep track of which functions are executed at least
+                      once and write the results to sys.stdout after the
+                      program exits.
+-T, --trackcalls      Keep track of caller/called pairs and write the
+                      results to sys.stdout after the program exits.
+-r, --report          Generate a report from a counts file; do not execute
+                      any code.  `--file' must specify the results file to
+                      read, which must have been created in a previous run
+                      with `--count --file=FILE'.
+
+Modifiers:
+-f, --file=<file>     File to accumulate counts over several runs.
+-R, --no-report       Do not generate the coverage report files.
+                      Useful if you want to accumulate over several runs.
+-C, --coverdir=<dir>  Directory where the report files.  The coverage
+                      report for <package>.<module> is written to file
+                      <dir>/<package>/<module>.cover.
+-m, --missing         Annotate executable lines that were not executed
+                      with '>>>>>> '.
+-s, --summary         Write a brief summary on stdout for each file.
+                      (Can only be used with --count or --report.)
+
+Filters, may be repeated multiple times:
+--ignore-module=<mod> Ignore the given module and its submodules
+                      (if it is a package).
+--ignore-dir=<dir>    Ignore files in the given directory (multiple
+                      directories can be joined by os.pathsep).
+""" % sys.argv[0])
+
+PRAGMA_NOCOVER = "#pragma NO COVER"
+
+# Simple rx to find lines with no code.
+rx_blank = re.compile(r'^\s*(#.*)?$')
+
+class Ignore:
+    def __init__(self, modules = None, dirs = None):
+        self._mods = modules or []
+        self._dirs = dirs or []
+
+        self._dirs = map(os.path.normpath, self._dirs)
+        self._ignore = { '<string>': 1 }
+
+    def names(self, filename, modulename):
+        if self._ignore.has_key(modulename):
+            return self._ignore[modulename]
+
+        # haven't seen this one before, so see if the module name is
+        # on the ignore list.  Need to take some care since ignoring
+        # "cmp" musn't mean ignoring "cmpcache" but ignoring
+        # "Spam" must also mean ignoring "Spam.Eggs".
+        for mod in self._mods:
+            if mod == modulename:  # Identical names, so ignore
+                self._ignore[modulename] = 1
+                return 1
+            # check if the module is a proper submodule of something on
+            # the ignore list
+            n = len(mod)
+            # (will not overflow since if the first n characters are the
+            # same and the name has not already occured, then the size
+            # of "name" is greater than that of "mod")
+            if mod == modulename[:n] and modulename[n] == '.':
+                self._ignore[modulename] = 1
+                return 1
+
+        # Now check that __file__ isn't in one of the directories
+        if filename is None:
+            # must be a built-in, so we must ignore
+            self._ignore[modulename] = 1
+            return 1
+
+        # Ignore a file when it contains one of the ignorable paths
+        for d in self._dirs:
+            # The '+ os.sep' is to ensure that d is a parent directory,
+            # as compared to cases like:
+            #  d = "/usr/local"
+            #  filename = "/usr/local.py"
+            # or
+            #  d = "/usr/local.py"
+            #  filename = "/usr/local.py"
+            if filename.startswith(d + os.sep):
+                self._ignore[modulename] = 1
+                return 1
+
+        # Tried the different ways, so we don't ignore this module
+        self._ignore[modulename] = 0
+        return 0
+
+def modname(path):
+    """Return a plausible module name for the patch."""
+
+    base = os.path.basename(path)
+    filename, ext = os.path.splitext(base)
+    return filename
+
+def fullmodname(path):
+    """Return a plausible module name for the path."""
+
+    # If the file 'path' is part of a package, then the filename isn't
+    # enough to uniquely identify it.  Try to do the right thing by
+    # looking in sys.path for the longest matching prefix.  We'll
+    # assume that the rest is the package name.
+
+    longest = ""
+    for dir in sys.path:
+        if path.startswith(dir) and path[len(dir)] == os.path.sep:
+            if len(dir) > len(longest):
+                longest = dir
+
+    if longest:
+        base = path[len(longest) + 1:]
+    else:
+        base = path
+    base = base.replace(os.sep, ".")
+    if os.altsep:
+        base = base.replace(os.altsep, ".")
+    filename, ext = os.path.splitext(base)
+    return filename
+
+class CoverageResults:
+    def __init__(self, counts=None, calledfuncs=None, infile=None,
+                 callers=None, outfile=None):
+        self.counts = counts
+        if self.counts is None:
+            self.counts = {}
+        self.counter = self.counts.copy() # map (filename, lineno) to count
+        self.calledfuncs = calledfuncs
+        if self.calledfuncs is None:
+            self.calledfuncs = {}
+        self.calledfuncs = self.calledfuncs.copy()
+        self.callers = callers
+        if self.callers is None:
+            self.callers = {}
+        self.callers = self.callers.copy()
+        self.infile = infile
+        self.outfile = outfile
+        if self.infile:
+            # Try to merge existing counts file.
+            try:
+                counts, calledfuncs, callers = \
+                        pickle.load(open(self.infile, 'rb'))
+                self.update(self.__class__(counts, calledfuncs, callers))
+            except (IOError, EOFError, ValueError), err:
+                print >> sys.stderr, ("Skipping counts file %r: %s"
+                                      % (self.infile, err))
+
+    def update(self, other):
+        """Merge in the data from another CoverageResults"""
+        counts = self.counts
+        calledfuncs = self.calledfuncs
+        callers = self.callers
+        other_counts = other.counts
+        other_calledfuncs = other.calledfuncs
+        other_callers = other.callers
+
+        for key in other_counts.keys():
+            counts[key] = counts.get(key, 0) + other_counts[key]
+
+        for key in other_calledfuncs.keys():
+            calledfuncs[key] = 1
+
+        for key in other_callers.keys():
+            callers[key] = 1
+
+    def write_results(self, show_missing=True, summary=False, coverdir=None):
+        """
+        @param coverdir
+        """
+        if self.calledfuncs:
+            print
+            print "functions called:"
+            calls = self.calledfuncs.keys()
+            calls.sort()
+            for filename, modulename, funcname in calls:
+                print ("filename: %s, modulename: %s, funcname: %s"
+                       % (filename, modulename, funcname))
+
+        if self.callers:
+            print
+            print "calling relationships:"
+            calls = self.callers.keys()
+            calls.sort()
+            lastfile = lastcfile = ""
+            for ((pfile, pmod, pfunc), (cfile, cmod, cfunc)) in calls:
+                if pfile != lastfile:
+                    print
+                    print "***", pfile, "***"
+                    lastfile = pfile
+                    lastcfile = ""
+                if cfile != pfile and lastcfile != cfile:
+                    print "  -->", cfile
+                    lastcfile = cfile
+                print "    %s.%s -> %s.%s" % (pmod, pfunc, cmod, cfunc)
+
+        # turn the counts data ("(filename, lineno) = count") into something
+        # accessible on a per-file basis
+        per_file = {}
+        for filename, lineno in self.counts.keys():
+            lines_hit = per_file[filename] = per_file.get(filename, {})
+            lines_hit[lineno] = self.counts[(filename, lineno)]
+
+        # accumulate summary info, if needed
+        sums = {}
+
+        for filename, count in per_file.iteritems():
+            # skip some "files" we don't care about...
+            if filename == "<string>":
+                continue
+
+            if filename.endswith(".pyc") or filename.endswith(".pyo"):
+                filename = filename[:-1]
+
+            if coverdir is None:
+                dir = os.path.dirname(os.path.abspath(filename))
+                modulename = modname(filename)
+            else:
+                dir = coverdir
+                if not os.path.exists(dir):
+                    os.makedirs(dir)
+                modulename = fullmodname(filename)
+
+            # If desired, get a list of the line numbers which represent
+            # executable content (returned as a dict for better lookup speed)
+            if show_missing:
+                lnotab = find_executable_linenos(filename)
+            else:
+                lnotab = {}
+
+            source = linecache.getlines(filename)
+            coverpath = os.path.join(dir, modulename + ".cover")
+            n_hits, n_lines = self.write_results_file(coverpath, source,
+                                                      lnotab, count)
+
+            if summary and n_lines:
+                percent = int(100 * n_hits / n_lines)
+                sums[modulename] = n_lines, percent, modulename, filename
+
+        if summary and sums:
+            mods = sums.keys()
+            mods.sort()
+            print "lines   cov%   module   (path)"
+            for m in mods:
+                n_lines, percent, modulename, filename = sums[m]
+                print "%5d   %3d%%   %s   (%s)" % sums[m]
+
+        if self.outfile:
+            # try and store counts and module info into self.outfile
+            try:
+                pickle.dump((self.counts, self.calledfuncs, self.callers),
+                            open(self.outfile, 'wb'), 1)
+            except IOError, err:
+                print >> sys.stderr, "Can't save counts files because %s" % err
+
+    def write_results_file(self, path, lines, lnotab, lines_hit):
+        """Return a coverage results file in path."""
+
+        try:
+            outfile = open(path, "w")
+        except IOError, err:
+            print >> sys.stderr, ("trace: Could not open %r for writing: %s"
+                                  "- skipping" % (path, err))
+            return 0, 0
+
+        n_lines = 0
+        n_hits = 0
+        for i, line in enumerate(lines):
+            lineno = i + 1
+            # do the blank/comment match to try to mark more lines
+            # (help the reader find stuff that hasn't been covered)
+            if lineno in lines_hit:
+                outfile.write("%5d: " % lines_hit[lineno])
+                n_hits += 1
+                n_lines += 1
+            elif rx_blank.match(line):
+                outfile.write("       ")
+            else:
+                # lines preceded by no marks weren't hit
+                # Highlight them if so indicated, unless the line contains
+                # #pragma: NO COVER
+                if lineno in lnotab and not PRAGMA_NOCOVER in lines[i]:
+                    outfile.write(">>>>>> ")
+                    n_lines += 1
+                else:
+                    outfile.write("       ")
+            outfile.write(lines[i].expandtabs(8))
+        outfile.close()
+
+        return n_hits, n_lines
+
+def find_lines_from_code(code, strs):
+    """Return dict where keys are lines in the line number table."""
+    linenos = {}
+
+    line_increments = [ord(c) for c in code.co_lnotab[1::2]]
+    table_length = len(line_increments)
+    docstring = False
+
+    lineno = code.co_firstlineno
+    for li in line_increments:
+        lineno += li
+        if lineno not in strs:
+            linenos[lineno] = 1
+
+    return linenos
+
+def find_lines(code, strs):
+    """Return lineno dict for all code objects reachable from code."""
+    # get all of the lineno information from the code of this scope level
+    linenos = find_lines_from_code(code, strs)
+
+    # and check the constants for references to other code objects
+    for c in code.co_consts:
+        if isinstance(c, types.CodeType):
+            # find another code object, so recurse into it
+            linenos.update(find_lines(c, strs))
+    return linenos
+
+def find_strings(filename):
+    """Return a dict of possible docstring positions.
+
+    The dict maps line numbers to strings.  There is an entry for
+    line that contains only a string or a part of a triple-quoted
+    string.
+    """
+    d = {}
+    # If the first token is a string, then it's the module docstring.
+    # Add this special case so that the test in the loop passes.
+    prev_ttype = token.INDENT
+    f = open(filename)
+    for ttype, tstr, start, end, line in tokenize.generate_tokens(f.readline):
+        if ttype == token.STRING:
+            if prev_ttype == token.INDENT:
+                sline, scol = start
+                eline, ecol = end
+                for i in range(sline, eline + 1):
+                    d[i] = 1
+        prev_ttype = ttype
+    f.close()
+    return d
+
+def find_executable_linenos(filename):
+    """Return dict where keys are line numbers in the line number table."""
+    try:
+        prog = open(filename, "rU").read()
+    except IOError, err:
+        print >> sys.stderr, ("Not printing coverage data for %r: %s"
+                              % (filename, err))
+        return {}
+    code = compile(prog, filename, "exec")
+    strs = find_strings(filename)
+    return find_lines(code, strs)
+
+class Trace:
+    def __init__(self, count=1, trace=1, countfuncs=0, countcallers=0,
+                 ignoremods=(), ignoredirs=(), infile=None, outfile=None):
+        """
+        @param count true iff it should count number of times each
+                     line is executed
+        @param trace true iff it should print out each line that is
+                     being counted
+        @param countfuncs true iff it should just output a list of
+                     (filename, modulename, funcname,) for functions
+                     that were called at least once;  This overrides
+                     `count' and `trace'
+        @param ignoremods a list of the names of modules to ignore
+        @param ignoredirs a list of the names of directories to ignore
+                     all of the (recursive) contents of
+        @param infile file from which to read stored counts to be
+                     added into the results
+        @param outfile file in which to write the results
+        """
+        self.infile = infile
+        self.outfile = outfile
+        self.ignore = Ignore(ignoremods, ignoredirs)
+        self.counts = {}   # keys are (filename, linenumber)
+        self.blabbed = {} # for debugging
+        self.pathtobasename = {} # for memoizing os.path.basename
+        self.donothing = 0
+        self.trace = trace
+        self._calledfuncs = {}
+        self._callers = {}
+        self._caller_cache = {}
+        if countcallers:
+            self.globaltrace = self.globaltrace_trackcallers
+        elif countfuncs:
+            self.globaltrace = self.globaltrace_countfuncs
+        elif trace and count:
+            self.globaltrace = self.globaltrace_lt
+            self.localtrace = self.localtrace_trace_and_count
+        elif trace:
+            self.globaltrace = self.globaltrace_lt
+            self.localtrace = self.localtrace_trace
+        elif count:
+            self.globaltrace = self.globaltrace_lt
+            self.localtrace = self.localtrace_count
+        else:
+            # Ahem -- do nothing?  Okay.
+            self.donothing = 1
+
+    def run(self, cmd):
+        import __main__
+        dict = __main__.__dict__
+        if not self.donothing:
+            sys.settrace(self.globaltrace)
+            threading.settrace(self.globaltrace)
+        try:
+            exec cmd in dict, dict
+        finally:
+            if not self.donothing:
+                sys.settrace(None)
+                threading.settrace(None)
+
+    def runctx(self, cmd, globals=None, locals=None):
+        if globals is None: globals = {}
+        if locals is None: locals = {}
+        if not self.donothing:
+            sys.settrace(self.globaltrace)
+            threading.settrace(self.globaltrace)
+        try:
+            exec cmd in globals, locals
+        finally:
+            if not self.donothing:
+                sys.settrace(None)
+                threading.settrace(None)
+
+    def runfunc(self, func, *args, **kw):
+        result = None
+        if not self.donothing:
+            sys.settrace(self.globaltrace)
+        try:
+            result = func(*args, **kw)
+        finally:
+            if not self.donothing:
+                sys.settrace(None)
+        return result
+
+    def file_module_function_of(self, frame):
+        code = frame.f_code
+        filename = code.co_filename
+        if filename:
+            modulename = modname(filename)
+        else:
+            modulename = None
+
+        funcname = code.co_name
+        clsname = None
+        if code in self._caller_cache:
+            if self._caller_cache[code] is not None:
+                clsname = self._caller_cache[code]
+        else:
+            self._caller_cache[code] = None
+            ## use of gc.get_referrers() was suggested by Michael Hudson
+            # all functions which refer to this code object
+            funcs = [f for f in gc.get_referrers(code)
+                         if hasattr(f, "func_doc")]
+            # require len(func) == 1 to avoid ambiguity caused by calls to
+            # new.function(): "In the face of ambiguity, refuse the
+            # temptation to guess."
+            if len(funcs) == 1:
+                dicts = [d for d in gc.get_referrers(funcs[0])
+                             if isinstance(d, dict)]
+                if len(dicts) == 1:
+                    classes = [c for c in gc.get_referrers(dicts[0])
+                                   if hasattr(c, "__bases__")]
+                    if len(classes) == 1:
+                        # ditto for new.classobj()
+                        clsname = str(classes[0])
+                        # cache the result - assumption is that new.* is
+                        # not called later to disturb this relationship
+                        # _caller_cache could be flushed if functions in
+                        # the new module get called.
+                        self._caller_cache[code] = clsname
+        if clsname is not None:
+            # final hack - module name shows up in str(cls), but we've already
+            # computed module name, so remove it
+            clsname = clsname.split(".")[1:]
+            clsname = ".".join(clsname)
+            funcname = "%s.%s" % (clsname, funcname)
+
+        return filename, modulename, funcname
+
+    def globaltrace_trackcallers(self, frame, why, arg):
+        """Handler for call events.
+
+        Adds information about who called who to the self._callers dict.
+        """
+        if why == 'call':
+            # XXX Should do a better job of identifying methods
+            this_func = self.file_module_function_of(frame)
+            parent_func = self.file_module_function_of(frame.f_back)
+            self._callers[(parent_func, this_func)] = 1
+
+    def globaltrace_countfuncs(self, frame, why, arg):
+        """Handler for call events.
+
+        Adds (filename, modulename, funcname) to the self._calledfuncs dict.
+        """
+        if why == 'call':
+            this_func = self.file_module_function_of(frame)
+            self._calledfuncs[this_func] = 1
+
+    def globaltrace_lt(self, frame, why, arg):
+        """Handler for call events.
+
+        If the code block being entered is to be ignored, returns `None',
+        else returns self.localtrace.
+        """
+        if why == 'call':
+            code = frame.f_code
+            filename = code.co_filename
+            if filename:
+                # XXX modname() doesn't work right for packages, so
+                # the ignore support won't work right for packages
+                modulename = modname(filename)
+                if modulename is not None:
+                    ignore_it = self.ignore.names(filename, modulename)
+                    if not ignore_it:
+                        if self.trace:
+                            print (" --- modulename: %s, funcname: %s"
+                                   % (modulename, code.co_name))
+                        return self.localtrace
+            else:
+                return None
+
+    def localtrace_trace_and_count(self, frame, why, arg):
+        if why == "line":
+            # record the file name and line number of every trace
+            filename = frame.f_code.co_filename
+            lineno = frame.f_lineno
+            key = filename, lineno
+            self.counts[key] = self.counts.get(key, 0) + 1
+
+            bname = os.path.basename(filename)
+            print "%s(%d): %s" % (bname, lineno,
+                                  linecache.getline(filename, lineno)),
+        return self.localtrace
+
+    def localtrace_trace(self, frame, why, arg):
+        if why == "line":
+            # record the file name and line number of every trace
+            filename = frame.f_code.co_filename
+            lineno = frame.f_lineno
+
+            bname = os.path.basename(filename)
+            print "%s(%d): %s" % (bname, lineno,
+                                  linecache.getline(filename, lineno)),
+        return self.localtrace
+
+    def localtrace_count(self, frame, why, arg):
+        if why == "line":
+            filename = frame.f_code.co_filename
+            lineno = frame.f_lineno
+            key = filename, lineno
+            self.counts[key] = self.counts.get(key, 0) + 1
+        return self.localtrace
+
+    def results(self):
+        return CoverageResults(self.counts, infile=self.infile,
+                               outfile=self.outfile,
+                               calledfuncs=self._calledfuncs,
+                               callers=self._callers)
+
+def _err_exit(msg):
+    sys.stderr.write("%s: %s\n" % (sys.argv[0], msg))
+    sys.exit(1)
+
+def main(argv=None):
+    import getopt
+
+    if argv is None:
+        argv = sys.argv
+    try:
+        opts, prog_argv = getopt.getopt(argv[1:], "tcrRf:d:msC:lT",
+                                        ["help", "version", "trace", "count",
+                                         "report", "no-report", "summary",
+                                         "file=", "missing",
+                                         "ignore-module=", "ignore-dir=",
+                                         "coverdir=", "listfuncs",
+                                         "trackcalls"])
+
+    except getopt.error, msg:
+        sys.stderr.write("%s: %s\n" % (sys.argv[0], msg))
+        sys.stderr.write("Try `%s --help' for more information\n"
+                         % sys.argv[0])
+        sys.exit(1)
+
+    trace = 0
+    count = 0
+    report = 0
+    no_report = 0
+    counts_file = None
+    missing = 0
+    ignore_modules = []
+    ignore_dirs = []
+    coverdir = None
+    summary = 0
+    listfuncs = False
+    countcallers = False
+
+    for opt, val in opts:
+        if opt == "--help":
+            usage(sys.stdout)
+            sys.exit(0)
+
+        if opt == "--version":
+            sys.stdout.write("trace 2.0\n")
+            sys.exit(0)
+
+        if opt == "-T" or opt == "--trackcalls":
+            countcallers = True
+            continue
+
+        if opt == "-l" or opt == "--listfuncs":
+            listfuncs = True
+            continue
+
+        if opt == "-t" or opt == "--trace":
+            trace = 1
+            continue
+
+        if opt == "-c" or opt == "--count":
+            count = 1
+            continue
+
+        if opt == "-r" or opt == "--report":
+            report = 1
+            continue
+
+        if opt == "-R" or opt == "--no-report":
+            no_report = 1
+            continue
+
+        if opt == "-f" or opt == "--file":
+            counts_file = val
+            continue
+
+        if opt == "-m" or opt == "--missing":
+            missing = 1
+            continue
+
+        if opt == "-C" or opt == "--coverdir":
+            coverdir = val
+            continue
+
+        if opt == "-s" or opt == "--summary":
+            summary = 1
+            continue
+
+        if opt == "--ignore-module":
+            ignore_modules.append(val)
+            continue
+
+        if opt == "--ignore-dir":
+            for s in val.split(os.pathsep):
+                s = os.path.expandvars(s)
+                # should I also call expanduser? (after all, could use $HOME)
+
+                s = s.replace("$prefix",
+                              os.path.join(sys.prefix, "lib",
+                                           "python" + sys.version[:3]))
+                s = s.replace("$exec_prefix",
+                              os.path.join(sys.exec_prefix, "lib",
+                                           "python" + sys.version[:3]))
+                s = os.path.normpath(s)
+                ignore_dirs.append(s)
+            continue
+
+        assert 0, "Should never get here"
+
+    if listfuncs and (count or trace):
+        _err_exit("cannot specify both --listfuncs and (--trace or --count)")
+
+    if not (count or trace or report or listfuncs or countcallers):
+        _err_exit("must specify one of --trace, --count, --report, "
+                  "--listfuncs, or --trackcalls")
+
+    if report and no_report:
+        _err_exit("cannot specify both --report and --no-report")
+
+    if report and not counts_file:
+        _err_exit("--report requires a --file")
+
+    if no_report and len(prog_argv) == 0:
+        _err_exit("missing name of file to run")
+
+    # everything is ready
+    if report:
+        results = CoverageResults(infile=counts_file, outfile=counts_file)
+        results.write_results(missing, summary=summary, coverdir=coverdir)
+    else:
+        sys.argv = prog_argv
+        progname = prog_argv[0]
+        sys.path[0] = os.path.split(progname)[0]
+
+        t = Trace(count, trace, countfuncs=listfuncs,
+                  countcallers=countcallers, ignoremods=ignore_modules,
+                  ignoredirs=ignore_dirs, infile=counts_file,
+                  outfile=counts_file)
+        try:
+            t.run('execfile(%r)' % (progname,))
+        except IOError, err:
+            _err_exit("Cannot run file %r because: %s" % (sys.argv[0], err))
+        except SystemExit:
+            pass
+
+        results = t.results()
+
+        if not no_report:
+            results.write_results(missing, summary=summary, coverdir=coverdir)
+
+if __name__=='__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/traceback.py b/depot_tools/release/win/python_24/Lib/traceback.py
new file mode 100644
index 0000000..95cde2bd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/traceback.py
@@ -0,0 +1,297 @@
+"""Extract, format and print information about Python stack traces."""
+
+import linecache
+import sys
+import types
+
+__all__ = ['extract_stack', 'extract_tb', 'format_exception',
+           'format_exception_only', 'format_list', 'format_stack',
+           'format_tb', 'print_exc', 'format_exc', 'print_exception',
+           'print_last', 'print_stack', 'print_tb', 'tb_lineno']
+
+def _print(file, str='', terminator='\n'):
+    file.write(str+terminator)
+
+
+def print_list(extracted_list, file=None):
+    """Print the list of tuples as returned by extract_tb() or
+    extract_stack() as a formatted stack trace to the given file."""
+    if file is None:
+        file = sys.stderr
+    for filename, lineno, name, line in extracted_list:
+        _print(file,
+               '  File "%s", line %d, in %s' % (filename,lineno,name))
+        if line:
+            _print(file, '    %s' % line.strip())
+
+def format_list(extracted_list):
+    """Format a list of traceback entry tuples for printing.
+
+    Given a list of tuples as returned by extract_tb() or
+    extract_stack(), return a list of strings ready for printing.
+    Each string in the resulting list corresponds to the item with the
+    same index in the argument list.  Each string ends in a newline;
+    the strings may contain internal newlines as well, for those items
+    whose source text line is not None.
+    """
+    list = []
+    for filename, lineno, name, line in extracted_list:
+        item = '  File "%s", line %d, in %s\n' % (filename,lineno,name)
+        if line:
+            item = item + '    %s\n' % line.strip()
+        list.append(item)
+    return list
+
+
+def print_tb(tb, limit=None, file=None):
+    """Print up to 'limit' stack trace entries from the traceback 'tb'.
+
+    If 'limit' is omitted or None, all entries are printed.  If 'file'
+    is omitted or None, the output goes to sys.stderr; otherwise
+    'file' should be an open file or file-like object with a write()
+    method.
+    """
+    if file is None:
+        file = sys.stderr
+    if limit is None:
+        if hasattr(sys, 'tracebacklimit'):
+            limit = sys.tracebacklimit
+    n = 0
+    while tb is not None and (limit is None or n < limit):
+        f = tb.tb_frame
+        lineno = tb.tb_lineno
+        co = f.f_code
+        filename = co.co_filename
+        name = co.co_name
+        _print(file,
+               '  File "%s", line %d, in %s' % (filename,lineno,name))
+        linecache.checkcache(filename)
+        line = linecache.getline(filename, lineno)
+        if line: _print(file, '    ' + line.strip())
+        tb = tb.tb_next
+        n = n+1
+
+def format_tb(tb, limit = None):
+    """A shorthand for 'format_list(extract_stack(f, limit))."""
+    return format_list(extract_tb(tb, limit))
+
+def extract_tb(tb, limit = None):
+    """Return list of up to limit pre-processed entries from traceback.
+
+    This is useful for alternate formatting of stack traces.  If
+    'limit' is omitted or None, all entries are extracted.  A
+    pre-processed stack trace entry is a quadruple (filename, line
+    number, function name, text) representing the information that is
+    usually printed for a stack trace.  The text is a string with
+    leading and trailing whitespace stripped; if the source is not
+    available it is None.
+    """
+    if limit is None:
+        if hasattr(sys, 'tracebacklimit'):
+            limit = sys.tracebacklimit
+    list = []
+    n = 0
+    while tb is not None and (limit is None or n < limit):
+        f = tb.tb_frame
+        lineno = tb.tb_lineno
+        co = f.f_code
+        filename = co.co_filename
+        name = co.co_name
+        linecache.checkcache(filename)
+        line = linecache.getline(filename, lineno)
+        if line: line = line.strip()
+        else: line = None
+        list.append((filename, lineno, name, line))
+        tb = tb.tb_next
+        n = n+1
+    return list
+
+
+def print_exception(etype, value, tb, limit=None, file=None):
+    """Print exception up to 'limit' stack trace entries from 'tb' to 'file'.
+
+    This differs from print_tb() in the following ways: (1) if
+    traceback is not None, it prints a header "Traceback (most recent
+    call last):"; (2) it prints the exception type and value after the
+    stack trace; (3) if type is SyntaxError and value has the
+    appropriate format, it prints the line where the syntax error
+    occurred with a caret on the next line indicating the approximate
+    position of the error.
+    """
+    if file is None:
+        file = sys.stderr
+    if tb:
+        _print(file, 'Traceback (most recent call last):')
+        print_tb(tb, limit, file)
+    lines = format_exception_only(etype, value)
+    for line in lines[:-1]:
+        _print(file, line, ' ')
+    _print(file, lines[-1], '')
+
+def format_exception(etype, value, tb, limit = None):
+    """Format a stack trace and the exception information.
+
+    The arguments have the same meaning as the corresponding arguments
+    to print_exception().  The return value is a list of strings, each
+    ending in a newline and some containing internal newlines.  When
+    these lines are concatenated and printed, exactly the same text is
+    printed as does print_exception().
+    """
+    if tb:
+        list = ['Traceback (most recent call last):\n']
+        list = list + format_tb(tb, limit)
+    else:
+        list = []
+    list = list + format_exception_only(etype, value)
+    return list
+
+def format_exception_only(etype, value):
+    """Format the exception part of a traceback.
+
+    The arguments are the exception type and value such as given by
+    sys.last_type and sys.last_value. The return value is a list of
+    strings, each ending in a newline.  Normally, the list contains a
+    single string; however, for SyntaxError exceptions, it contains
+    several lines that (when printed) display detailed information
+    about where the syntax error occurred.  The message indicating
+    which exception occurred is the always last string in the list.
+    """
+    list = []
+    if type(etype) == types.ClassType:
+        stype = etype.__name__
+    else:
+        stype = etype
+    if value is None:
+        list.append(str(stype) + '\n')
+    else:
+        if etype is SyntaxError:
+            try:
+                msg, (filename, lineno, offset, line) = value
+            except:
+                pass
+            else:
+                if not filename: filename = "<string>"
+                list.append('  File "%s", line %d\n' %
+                            (filename, lineno))
+                if line is not None:
+                    i = 0
+                    while i < len(line) and line[i].isspace():
+                        i = i+1
+                    list.append('    %s\n' % line.strip())
+                    if offset is not None:
+                        s = '    '
+                        for c in line[i:offset-1]:
+                            if c.isspace():
+                                s = s + c
+                            else:
+                                s = s + ' '
+                        list.append('%s^\n' % s)
+                    value = msg
+        s = _some_str(value)
+        if s:
+            list.append('%s: %s\n' % (str(stype), s))
+        else:
+            list.append('%s\n' % str(stype))
+    return list
+
+def _some_str(value):
+    try:
+        return str(value)
+    except:
+        return '<unprintable %s object>' % type(value).__name__
+
+
+def print_exc(limit=None, file=None):
+    """Shorthand for 'print_exception(sys.exc_type, sys.exc_value, sys.exc_traceback, limit, file)'.
+    (In fact, it uses sys.exc_info() to retrieve the same information
+    in a thread-safe way.)"""
+    if file is None:
+        file = sys.stderr
+    try:
+        etype, value, tb = sys.exc_info()
+        print_exception(etype, value, tb, limit, file)
+    finally:
+        etype = value = tb = None
+
+
+def format_exc(limit=None):
+    """Like print_exc() but return a string."""
+    try:
+        etype, value, tb = sys.exc_info()
+        return ''.join(format_exception(etype, value, tb, limit))
+    finally:
+        etype = value = tb = None
+
+
+def print_last(limit=None, file=None):
+    """This is a shorthand for 'print_exception(sys.last_type,
+    sys.last_value, sys.last_traceback, limit, file)'."""
+    if file is None:
+        file = sys.stderr
+    print_exception(sys.last_type, sys.last_value, sys.last_traceback,
+                    limit, file)
+
+
+def print_stack(f=None, limit=None, file=None):
+    """Print a stack trace from its invocation point.
+
+    The optional 'f' argument can be used to specify an alternate
+    stack frame at which to start. The optional 'limit' and 'file'
+    arguments have the same meaning as for print_exception().
+    """
+    if f is None:
+        try:
+            raise ZeroDivisionError
+        except ZeroDivisionError:
+            f = sys.exc_info()[2].tb_frame.f_back
+    print_list(extract_stack(f, limit), file)
+
+def format_stack(f=None, limit=None):
+    """Shorthand for 'format_list(extract_stack(f, limit))'."""
+    if f is None:
+        try:
+            raise ZeroDivisionError
+        except ZeroDivisionError:
+            f = sys.exc_info()[2].tb_frame.f_back
+    return format_list(extract_stack(f, limit))
+
+def extract_stack(f=None, limit = None):
+    """Extract the raw traceback from the current stack frame.
+
+    The return value has the same format as for extract_tb().  The
+    optional 'f' and 'limit' arguments have the same meaning as for
+    print_stack().  Each item in the list is a quadruple (filename,
+    line number, function name, text), and the entries are in order
+    from oldest to newest stack frame.
+    """
+    if f is None:
+        try:
+            raise ZeroDivisionError
+        except ZeroDivisionError:
+            f = sys.exc_info()[2].tb_frame.f_back
+    if limit is None:
+        if hasattr(sys, 'tracebacklimit'):
+            limit = sys.tracebacklimit
+    list = []
+    n = 0
+    while f is not None and (limit is None or n < limit):
+        lineno = f.f_lineno
+        co = f.f_code
+        filename = co.co_filename
+        name = co.co_name
+        linecache.checkcache(filename)
+        line = linecache.getline(filename, lineno)
+        if line: line = line.strip()
+        else: line = None
+        list.append((filename, lineno, name, line))
+        f = f.f_back
+        n = n+1
+    list.reverse()
+    return list
+
+def tb_lineno(tb):
+    """Calculate correct line number of traceback given in tb.
+
+    Obsolete in 2.3.
+    """
+    return tb.tb_lineno
diff --git a/depot_tools/release/win/python_24/Lib/tty.py b/depot_tools/release/win/python_24/Lib/tty.py
new file mode 100644
index 0000000..a72eb675
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/tty.py
@@ -0,0 +1,36 @@
+"""Terminal utilities."""
+
+# Author: Steen Lumholt.
+
+from termios import *
+
+__all__ = ["setraw", "setcbreak"]
+
+# Indexes for termios list.
+IFLAG = 0
+OFLAG = 1
+CFLAG = 2
+LFLAG = 3
+ISPEED = 4
+OSPEED = 5
+CC = 6
+
+def setraw(fd, when=TCSAFLUSH):
+    """Put terminal into a raw mode."""
+    mode = tcgetattr(fd)
+    mode[IFLAG] = mode[IFLAG] & ~(BRKINT | ICRNL | INPCK | ISTRIP | IXON)
+    mode[OFLAG] = mode[OFLAG] & ~(OPOST)
+    mode[CFLAG] = mode[CFLAG] & ~(CSIZE | PARENB)
+    mode[CFLAG] = mode[CFLAG] | CS8
+    mode[LFLAG] = mode[LFLAG] & ~(ECHO | ICANON | IEXTEN | ISIG)
+    mode[CC][VMIN] = 1
+    mode[CC][VTIME] = 0
+    tcsetattr(fd, when, mode)
+
+def setcbreak(fd, when=TCSAFLUSH):
+    """Put terminal into a cbreak mode."""
+    mode = tcgetattr(fd)
+    mode[LFLAG] = mode[LFLAG] & ~(ECHO | ICANON)
+    mode[CC][VMIN] = 1
+    mode[CC][VTIME] = 0
+    tcsetattr(fd, when, mode)
diff --git a/depot_tools/release/win/python_24/Lib/types.py b/depot_tools/release/win/python_24/Lib/types.py
new file mode 100644
index 0000000..39812ac
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/types.py
@@ -0,0 +1,89 @@
+"""Define names for all type symbols known in the standard interpreter.
+
+Types that are part of optional modules (e.g. array) are not listed.
+"""
+import sys
+
+# Iterators in Python aren't a matter of type but of protocol.  A large
+# and changing number of builtin types implement *some* flavor of
+# iterator.  Don't check the type!  Use hasattr to check for both
+# "__iter__" and "next" attributes instead.
+
+NoneType = type(None)
+TypeType = type
+ObjectType = object
+
+IntType = int
+LongType = long
+FloatType = float
+BooleanType = bool
+try:
+    ComplexType = complex
+except NameError:
+    pass
+
+StringType = str
+
+# StringTypes is already outdated.  Instead of writing "type(x) in
+# types.StringTypes", you should use "isinstance(x, basestring)".  But
+# we keep around for compatibility with Python 2.2.
+try:
+    UnicodeType = unicode
+    StringTypes = (StringType, UnicodeType)
+except NameError:
+    StringTypes = (StringType,)
+
+BufferType = buffer
+
+TupleType = tuple
+ListType = list
+DictType = DictionaryType = dict
+
+def _f(): pass
+FunctionType = type(_f)
+LambdaType = type(lambda: None)         # Same as FunctionType
+try:
+    CodeType = type(_f.func_code)
+except RuntimeError:
+    # Execution in restricted environment
+    pass
+
+def _g():
+    yield 1
+GeneratorType = type(_g())
+
+class _C:
+    def _m(self): pass
+ClassType = type(_C)
+UnboundMethodType = type(_C._m)         # Same as MethodType
+_x = _C()
+InstanceType = type(_x)
+MethodType = type(_x._m)
+
+BuiltinFunctionType = type(len)
+BuiltinMethodType = type([].append)     # Same as BuiltinFunctionType
+
+ModuleType = type(sys)
+FileType = file
+XRangeType = xrange
+
+try:
+    raise TypeError
+except TypeError:
+    try:
+        tb = sys.exc_info()[2]
+        TracebackType = type(tb)
+        FrameType = type(tb.tb_frame)
+    except AttributeError:
+        # In the restricted environment, exc_info returns (None, None,
+        # None) Then, tb.tb_frame gives an attribute error
+        pass
+    tb = None; del tb
+
+SliceType = slice
+EllipsisType = type(Ellipsis)
+
+DictProxyType = type(TypeType.__dict__)
+NotImplementedType = type(NotImplemented)
+
+del sys, _f, _g, _C, _x                  # Not for export
diff --git a/depot_tools/release/win/python_24/Lib/tzparse.py b/depot_tools/release/win/python_24/Lib/tzparse.py
new file mode 100644
index 0000000..12468b5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/tzparse.py
@@ -0,0 +1,98 @@
+"""Parse a timezone specification."""
+
+# XXX Unfinished.
+# XXX Only the typical form "XXXhhYYY;ddd/hh,ddd/hh" is currently supported.
+
+import warnings
+warnings.warn(
+    "The tzparse module is obsolete and will disappear in the future",
+    DeprecationWarning)
+
+tzpat = ('^([A-Z][A-Z][A-Z])([-+]?[0-9]+)([A-Z][A-Z][A-Z]);'
+          '([0-9]+)/([0-9]+),([0-9]+)/([0-9]+)$')
+
+tzprog = None
+
+def tzparse(tzstr):
+    """Given a timezone spec, return a tuple of information
+    (tzname, delta, dstname, daystart, hourstart, dayend, hourend),
+    where 'tzname' is the name of the timezone, 'delta' is the offset
+    in hours from GMT, 'dstname' is the name of the daylight-saving
+    timezone, and 'daystart'/'hourstart' and 'dayend'/'hourend'
+    specify the starting and ending points for daylight saving time."""
+    global tzprog
+    if tzprog is None:
+        import re
+        tzprog = re.compile(tzpat)
+    match = tzprog.match(tzstr)
+    if not match:
+        raise ValueError, 'not the TZ syntax I understand'
+    subs = []
+    for i in range(1, 8):
+        subs.append(match.group(i))
+    for i in (1, 3, 4, 5, 6):
+        subs[i] = eval(subs[i])
+    [tzname, delta, dstname, daystart, hourstart, dayend, hourend] = subs
+    return (tzname, delta, dstname, daystart, hourstart, dayend, hourend)
+
+def tzlocaltime(secs, params):
+    """Given a Unix time in seconds and a tuple of information about
+    a timezone as returned by tzparse(), return the local time in the
+    form (year, month, day, hour, min, sec, yday, wday, tzname)."""
+    import time
+    (tzname, delta, dstname, daystart, hourstart, dayend, hourend) = params
+    year, month, days, hours, mins, secs, yday, wday, isdst = \
+            time.gmtime(secs - delta*3600)
+    if (daystart, hourstart) <= (yday+1, hours) < (dayend, hourend):
+        tzname = dstname
+        hours = hours + 1
+    return year, month, days, hours, mins, secs, yday, wday, tzname
+
+def tzset():
+    """Determine the current timezone from the "TZ" environment variable."""
+    global tzparams, timezone, altzone, daylight, tzname
+    import os
+    tzstr = os.environ['TZ']
+    tzparams = tzparse(tzstr)
+    timezone = tzparams[1] * 3600
+    altzone = timezone - 3600
+    daylight = 1
+    tzname = tzparams[0], tzparams[2]
+
+def isdst(secs):
+    """Return true if daylight-saving time is in effect for the given
+    Unix time in the current timezone."""
+    import time
+    (tzname, delta, dstname, daystart, hourstart, dayend, hourend) = \
+            tzparams
+    year, month, days, hours, mins, secs, yday, wday, isdst = \
+            time.gmtime(secs - delta*3600)
+    return (daystart, hourstart) <= (yday+1, hours) < (dayend, hourend)
+
+tzset()
+
+def localtime(secs):
+    """Get the local time in the current timezone."""
+    return tzlocaltime(secs, tzparams)
+
+def test():
+    from time import asctime, gmtime
+    import time, sys
+    now = time.time()
+    x = localtime(now)
+    tm = x[:-1] + (0,)
+    print 'now =', now, '=', asctime(tm), x[-1]
+    now = now - now % (24*3600)
+    if sys.argv[1:]: now = now + eval(sys.argv[1])
+    x = gmtime(now)
+    tm = x[:-1] + (0,)
+    print 'gmtime =', now, '=', asctime(tm), 'yday =', x[-2]
+    jan1 = now - x[-2]*24*3600
+    x = localtime(jan1)
+    tm = x[:-1] + (0,)
+    print 'jan1 =', jan1, '=', asctime(tm), x[-1]
+    for d in range(85, 95) + range(265, 275):
+        t = jan1 + d*24*3600
+        x = localtime(t)
+        tm = x[:-1] + (0,)
+        print 'd =', d, 't =', t, '=', asctime(tm), x[-1]
diff --git a/depot_tools/release/win/python_24/Lib/unittest.py b/depot_tools/release/win/python_24/Lib/unittest.py
new file mode 100644
index 0000000..efb2b025a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/unittest.py
@@ -0,0 +1,807 @@
+#!/usr/bin/env python
+'''
+Python unit testing framework, based on Erich Gamma's JUnit and Kent Beck's
+Smalltalk testing framework.
+
+This module contains the core framework classes that form the basis of
+specific test cases and suites (TestCase, TestSuite etc.), and also a
+text-based utility class for running the tests and reporting the results
+ (TextTestRunner).
+
+Simple usage:
+
+    import unittest
+
+    class IntegerArithmenticTestCase(unittest.TestCase):
+        def testAdd(self):  ## test method names begin 'test*'
+            self.assertEquals((1 + 2), 3)
+            self.assertEquals(0 + 1, 1)
+        def testMultiply(self):
+            self.assertEquals((0 * 10), 0)
+            self.assertEquals((5 * 8), 40)
+
+    if __name__ == '__main__':
+        unittest.main()
+
+Further information is available in the bundled documentation, and from
+
+  http://pyunit.sourceforge.net/
+
+Copyright (c) 1999-2003 Steve Purcell
+This module is free software, and you may redistribute it and/or modify
+it under the same terms as Python itself, so long as this copyright message
+and disclaimer are retained in their original form.
+
+IN NO EVENT SHALL THE AUTHOR BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT,
+SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OF
+THIS CODE, EVEN IF THE AUTHOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+DAMAGE.
+
+THE AUTHOR SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
+PARTICULAR PURPOSE.  THE CODE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS,
+AND THERE IS NO OBLIGATION WHATSOEVER TO PROVIDE MAINTENANCE,
+SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
+'''
+
+__author__ = "Steve Purcell"
+__email__ = "stephen_purcell at yahoo dot com"
+__version__ = "#Revision: 1.63 $"[11:-2]
+
+import time
+import sys
+import traceback
+import os
+import types
+
+##############################################################################
+# Exported classes and functions
+##############################################################################
+__all__ = ['TestResult', 'TestCase', 'TestSuite', 'TextTestRunner',
+           'TestLoader', 'FunctionTestCase', 'main', 'defaultTestLoader']
+
+# Expose obsolete functions for backwards compatibility
+__all__.extend(['getTestCaseNames', 'makeSuite', 'findTestCases'])
+
+
+##############################################################################
+# Backward compatibility
+##############################################################################
+if sys.version_info[:2] < (2, 2):
+    False, True = 0, 1
+    def isinstance(obj, clsinfo):
+        import __builtin__
+        if type(clsinfo) in (types.TupleType, types.ListType):
+            for cls in clsinfo:
+                if cls is type: cls = types.ClassType
+                if __builtin__.isinstance(obj, cls):
+                    return 1
+            return 0
+        else: return __builtin__.isinstance(obj, clsinfo)
+
+
+##############################################################################
+# Test framework core
+##############################################################################
+
+# All classes defined herein are 'new-style' classes, allowing use of 'super()'
+__metaclass__ = type
+
+def _strclass(cls):
+    return "%s.%s" % (cls.__module__, cls.__name__)
+
+__unittest = 1
+
+class TestResult:
+    """Holder for test result information.
+
+    Test results are automatically managed by the TestCase and TestSuite
+    classes, and do not need to be explicitly manipulated by writers of tests.
+
+    Each instance holds the total number of tests run, and collections of
+    failures and errors that occurred among those test runs. The collections
+    contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
+    formatted traceback of the error that occurred.
+    """
+    def __init__(self):
+        self.failures = []
+        self.errors = []
+        self.testsRun = 0
+        self.shouldStop = 0
+
+    def startTest(self, test):
+        "Called when the given test is about to be run"
+        self.testsRun = self.testsRun + 1
+
+    def stopTest(self, test):
+        "Called when the given test has been run"
+        pass
+
+    def addError(self, test, err):
+        """Called when an error has occurred. 'err' is a tuple of values as
+        returned by sys.exc_info().
+        """
+        self.errors.append((test, self._exc_info_to_string(err, test)))
+
+    def addFailure(self, test, err):
+        """Called when an error has occurred. 'err' is a tuple of values as
+        returned by sys.exc_info()."""
+        self.failures.append((test, self._exc_info_to_string(err, test)))
+
+    def addSuccess(self, test):
+        "Called when a test has completed successfully"
+        pass
+
+    def wasSuccessful(self):
+        "Tells whether or not this result was a success"
+        return len(self.failures) == len(self.errors) == 0
+
+    def stop(self):
+        "Indicates that the tests should be aborted"
+        self.shouldStop = True
+
+    def _exc_info_to_string(self, err, test):
+        """Converts a sys.exc_info()-style tuple of values into a string."""
+        exctype, value, tb = err
+        # Skip test runner traceback levels
+        while tb and self._is_relevant_tb_level(tb):
+            tb = tb.tb_next
+        if exctype is test.failureException:
+            # Skip assert*() traceback levels
+            length = self._count_relevant_tb_levels(tb)
+            return ''.join(traceback.format_exception(exctype, value, tb, length))
+        return ''.join(traceback.format_exception(exctype, value, tb))
+
+    def _is_relevant_tb_level(self, tb):
+        return tb.tb_frame.f_globals.has_key('__unittest')
+
+    def _count_relevant_tb_levels(self, tb):
+        length = 0
+        while tb and not self._is_relevant_tb_level(tb):
+            length += 1
+            tb = tb.tb_next
+        return length
+
+    def __repr__(self):
+        return "<%s run=%i errors=%i failures=%i>" % \
+               (_strclass(self.__class__), self.testsRun, len(self.errors),
+                len(self.failures))
+
+class TestCase:
+    """A class whose instances are single test cases.
+
+    By default, the test code itself should be placed in a method named
+    'runTest'.
+
+    If the fixture may be used for many test cases, create as
+    many test methods as are needed. When instantiating such a TestCase
+    subclass, specify in the constructor arguments the name of the test method
+    that the instance is to execute.
+
+    Test authors should subclass TestCase for their own tests. Construction
+    and deconstruction of the test's environment ('fixture') can be
+    implemented by overriding the 'setUp' and 'tearDown' methods respectively.
+
+    If it is necessary to override the __init__ method, the base class
+    __init__ method must always be called. It is important that subclasses
+    should not change the signature of their __init__ method, since instances
+    of the classes are instantiated automatically by parts of the framework
+    in order to be run.
+    """
+
+    # This attribute determines which exception will be raised when
+    # the instance's assertion methods fail; test methods raising this
+    # exception will be deemed to have 'failed' rather than 'errored'
+
+    failureException = AssertionError
+
+    def __init__(self, methodName='runTest'):
+        """Create an instance of the class that will use the named test
+           method when executed. Raises a ValueError if the instance does
+           not have a method with the specified name.
+        """
+        try:
+            self.__testMethodName = methodName
+            testMethod = getattr(self, methodName)
+            self.__testMethodDoc = testMethod.__doc__
+        except AttributeError:
+            raise ValueError, "no such test method in %s: %s" % \
+                  (self.__class__, methodName)
+
+    def setUp(self):
+        "Hook method for setting up the test fixture before exercising it."
+        pass
+
+    def tearDown(self):
+        "Hook method for deconstructing the test fixture after testing it."
+        pass
+
+    def countTestCases(self):
+        return 1
+
+    def defaultTestResult(self):
+        return TestResult()
+
+    def shortDescription(self):
+        """Returns a one-line description of the test, or None if no
+        description has been provided.
+
+        The default implementation of this method returns the first line of
+        the specified test method's docstring.
+        """
+        doc = self.__testMethodDoc
+        return doc and doc.split("\n")[0].strip() or None
+
+    def id(self):
+        return "%s.%s" % (_strclass(self.__class__), self.__testMethodName)
+
+    def __str__(self):
+        return "%s (%s)" % (self.__testMethodName, _strclass(self.__class__))
+
+    def __repr__(self):
+        return "<%s testMethod=%s>" % \
+               (_strclass(self.__class__), self.__testMethodName)
+
+    def run(self, result=None):
+        if result is None: result = self.defaultTestResult()
+        result.startTest(self)
+        testMethod = getattr(self, self.__testMethodName)
+        try:
+            try:
+                self.setUp()
+            except KeyboardInterrupt:
+                raise
+            except:
+                result.addError(self, self.__exc_info())
+                return
+
+            ok = False
+            try:
+                testMethod()
+                ok = True
+            except self.failureException:
+                result.addFailure(self, self.__exc_info())
+            except KeyboardInterrupt:
+                raise
+            except:
+                result.addError(self, self.__exc_info())
+
+            try:
+                self.tearDown()
+            except KeyboardInterrupt:
+                raise
+            except:
+                result.addError(self, self.__exc_info())
+                ok = False
+            if ok: result.addSuccess(self)
+        finally:
+            result.stopTest(self)
+
+    def __call__(self, *args, **kwds):
+        return self.run(*args, **kwds)
+
+    def debug(self):
+        """Run the test without collecting errors in a TestResult"""
+        self.setUp()
+        getattr(self, self.__testMethodName)()
+        self.tearDown()
+
+    def __exc_info(self):
+        """Return a version of sys.exc_info() with the traceback frame
+           minimised; usually the top level of the traceback frame is not
+           needed.
+        """
+        exctype, excvalue, tb = sys.exc_info()
+        if sys.platform[:4] == 'java': ## tracebacks look different in Jython
+            return (exctype, excvalue, tb)
+        return (exctype, excvalue, tb)
+
+    def fail(self, msg=None):
+        """Fail immediately, with the given message."""
+        raise self.failureException, msg
+
+    def failIf(self, expr, msg=None):
+        "Fail the test if the expression is true."
+        if expr: raise self.failureException, msg
+
+    def failUnless(self, expr, msg=None):
+        """Fail the test unless the expression is true."""
+        if not expr: raise self.failureException, msg
+
+    def failUnlessRaises(self, excClass, callableObj, *args, **kwargs):
+        """Fail unless an exception of class excClass is thrown
+           by callableObj when invoked with arguments args and keyword
+           arguments kwargs. If a different type of exception is
+           thrown, it will not be caught, and the test case will be
+           deemed to have suffered an error, exactly as for an
+           unexpected exception.
+        """
+        try:
+            callableObj(*args, **kwargs)
+        except excClass:
+            return
+        else:
+            if hasattr(excClass,'__name__'): excName = excClass.__name__
+            else: excName = str(excClass)
+            raise self.failureException, "%s not raised" % excName
+
+    def failUnlessEqual(self, first, second, msg=None):
+        """Fail if the two objects are unequal as determined by the '=='
+           operator.
+        """
+        if not first == second:
+            raise self.failureException, \
+                  (msg or '%r != %r' % (first, second))
+
+    def failIfEqual(self, first, second, msg=None):
+        """Fail if the two objects are equal as determined by the '=='
+           operator.
+        """
+        if first == second:
+            raise self.failureException, \
+                  (msg or '%r == %r' % (first, second))
+
+    def failUnlessAlmostEqual(self, first, second, places=7, msg=None):
+        """Fail if the two objects are unequal as determined by their
+           difference rounded to the given number of decimal places
+           (default 7) and comparing to zero.
+
+           Note that decimal places (from zero) are usually not the same
+           as significant digits (measured from the most signficant digit).
+        """
+        if round(second-first, places) != 0:
+            raise self.failureException, \
+                  (msg or '%r != %r within %r places' % (first, second, places))
+
+    def failIfAlmostEqual(self, first, second, places=7, msg=None):
+        """Fail if the two objects are equal as determined by their
+           difference rounded to the given number of decimal places
+           (default 7) and comparing to zero.
+
+           Note that decimal places (from zero) are usually not the same
+           as significant digits (measured from the most signficant digit).
+        """
+        if round(second-first, places) == 0:
+            raise self.failureException, \
+                  (msg or '%r == %r within %r places' % (first, second, places))
+
+    # Synonyms for assertion methods
+
+    assertEqual = assertEquals = failUnlessEqual
+
+    assertNotEqual = assertNotEquals = failIfEqual
+
+    assertAlmostEqual = assertAlmostEquals = failUnlessAlmostEqual
+
+    assertNotAlmostEqual = assertNotAlmostEquals = failIfAlmostEqual
+
+    assertRaises = failUnlessRaises
+
+    assert_ = assertTrue = failUnless
+
+    assertFalse = failIf
+
+
+
+class TestSuite:
+    """A test suite is a composite test consisting of a number of TestCases.
+
+    For use, create an instance of TestSuite, then add test case instances.
+    When all tests have been added, the suite can be passed to a test
+    runner, such as TextTestRunner. It will run the individual test cases
+    in the order in which they were added, aggregating the results. When
+    subclassing, do not forget to call the base class constructor.
+    """
+    def __init__(self, tests=()):
+        self._tests = []
+        self.addTests(tests)
+
+    def __repr__(self):
+        return "<%s tests=%s>" % (_strclass(self.__class__), self._tests)
+
+    __str__ = __repr__
+
+    def __iter__(self):
+        return iter(self._tests)
+
+    def countTestCases(self):
+        cases = 0
+        for test in self._tests:
+            cases += test.countTestCases()
+        return cases
+
+    def addTest(self, test):
+        self._tests.append(test)
+
+    def addTests(self, tests):
+        for test in tests:
+            self.addTest(test)
+
+    def run(self, result):
+        for test in self._tests:
+            if result.shouldStop:
+                break
+            test(result)
+        return result
+
+    def __call__(self, *args, **kwds):
+        return self.run(*args, **kwds)
+
+    def debug(self):
+        """Run the tests without collecting errors in a TestResult"""
+        for test in self._tests: test.debug()
+
+
+class FunctionTestCase(TestCase):
+    """A test case that wraps a test function.
+
+    This is useful for slipping pre-existing test functions into the
+    PyUnit framework. Optionally, set-up and tidy-up functions can be
+    supplied. As with TestCase, the tidy-up ('tearDown') function will
+    always be called if the set-up ('setUp') function ran successfully.
+    """
+
+    def __init__(self, testFunc, setUp=None, tearDown=None,
+                 description=None):
+        TestCase.__init__(self)
+        self.__setUpFunc = setUp
+        self.__tearDownFunc = tearDown
+        self.__testFunc = testFunc
+        self.__description = description
+
+    def setUp(self):
+        if self.__setUpFunc is not None:
+            self.__setUpFunc()
+
+    def tearDown(self):
+        if self.__tearDownFunc is not None:
+            self.__tearDownFunc()
+
+    def runTest(self):
+        self.__testFunc()
+
+    def id(self):
+        return self.__testFunc.__name__
+
+    def __str__(self):
+        return "%s (%s)" % (_strclass(self.__class__), self.__testFunc.__name__)
+
+    def __repr__(self):
+        return "<%s testFunc=%s>" % (_strclass(self.__class__), self.__testFunc)
+
+    def shortDescription(self):
+        if self.__description is not None: return self.__description
+        doc = self.__testFunc.__doc__
+        return doc and doc.split("\n")[0].strip() or None
+
+
+
+##############################################################################
+# Locating and loading tests
+##############################################################################
+
+class TestLoader:
+    """This class is responsible for loading tests according to various
+    criteria and returning them wrapped in a Test
+    """
+    testMethodPrefix = 'test'
+    sortTestMethodsUsing = cmp
+    suiteClass = TestSuite
+
+    def loadTestsFromTestCase(self, testCaseClass):
+        """Return a suite of all tests cases contained in testCaseClass"""
+        if issubclass(testCaseClass, TestSuite):
+            raise TypeError("Test cases should not be derived from TestSuite. Maybe you meant to derive from TestCase?")
+        testCaseNames = self.getTestCaseNames(testCaseClass)
+        if not testCaseNames and hasattr(testCaseClass, 'runTest'):
+            testCaseNames = ['runTest']
+        return self.suiteClass(map(testCaseClass, testCaseNames))
+
+    def loadTestsFromModule(self, module):
+        """Return a suite of all tests cases contained in the given module"""
+        tests = []
+        for name in dir(module):
+            obj = getattr(module, name)
+            if (isinstance(obj, (type, types.ClassType)) and
+                issubclass(obj, TestCase)):
+                tests.append(self.loadTestsFromTestCase(obj))
+        return self.suiteClass(tests)
+
+    def loadTestsFromName(self, name, module=None):
+        """Return a suite of all tests cases given a string specifier.
+
+        The name may resolve either to a module, a test case class, a
+        test method within a test case class, or a callable object which
+        returns a TestCase or TestSuite instance.
+
+        The method optionally resolves the names relative to a given module.
+        """
+        parts = name.split('.')
+        if module is None:
+            parts_copy = parts[:]
+            while parts_copy:
+                try:
+                    module = __import__('.'.join(parts_copy))
+                    break
+                except ImportError:
+                    del parts_copy[-1]
+                    if not parts_copy: raise
+            parts = parts[1:]
+        obj = module
+        for part in parts:
+            parent, obj = obj, getattr(obj, part)
+
+        if type(obj) == types.ModuleType:
+            return self.loadTestsFromModule(obj)
+        elif (isinstance(obj, (type, types.ClassType)) and
+              issubclass(obj, TestCase)):
+            return self.loadTestsFromTestCase(obj)
+        elif type(obj) == types.UnboundMethodType:
+            return parent(obj.__name__)
+        elif isinstance(obj, TestSuite):
+            return obj
+        elif callable(obj):
+            test = obj()
+            if not isinstance(test, (TestCase, TestSuite)):
+                raise ValueError, \
+                      "calling %s returned %s, not a test" % (obj,test)
+            return test
+        else:
+            raise ValueError, "don't know how to make test from: %s" % obj
+
+    def loadTestsFromNames(self, names, module=None):
+        """Return a suite of all tests cases found using the given sequence
+        of string specifiers. See 'loadTestsFromName()'.
+        """
+        suites = [self.loadTestsFromName(name, module) for name in names]
+        return self.suiteClass(suites)
+
+    def getTestCaseNames(self, testCaseClass):
+        """Return a sorted sequence of method names found within testCaseClass
+        """
+        def isTestMethod(attrname, testCaseClass=testCaseClass, prefix=self.testMethodPrefix):
+            return attrname.startswith(prefix) and callable(getattr(testCaseClass, attrname))
+        testFnNames = filter(isTestMethod, dir(testCaseClass))
+        for baseclass in testCaseClass.__bases__:
+            for testFnName in self.getTestCaseNames(baseclass):
+                if testFnName not in testFnNames:  # handle overridden methods
+                    testFnNames.append(testFnName)
+        if self.sortTestMethodsUsing:
+            testFnNames.sort(self.sortTestMethodsUsing)
+        return testFnNames
+
+
+
+defaultTestLoader = TestLoader()
+
+
+##############################################################################
+# Patches for old functions: these functions should be considered obsolete
+##############################################################################
+
+def _makeLoader(prefix, sortUsing, suiteClass=None):
+    loader = TestLoader()
+    loader.sortTestMethodsUsing = sortUsing
+    loader.testMethodPrefix = prefix
+    if suiteClass: loader.suiteClass = suiteClass
+    return loader
+
+def getTestCaseNames(testCaseClass, prefix, sortUsing=cmp):
+    return _makeLoader(prefix, sortUsing).getTestCaseNames(testCaseClass)
+
+def makeSuite(testCaseClass, prefix='test', sortUsing=cmp, suiteClass=TestSuite):
+    return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromTestCase(testCaseClass)
+
+def findTestCases(module, prefix='test', sortUsing=cmp, suiteClass=TestSuite):
+    return _makeLoader(prefix, sortUsing, suiteClass).loadTestsFromModule(module)
+
+
+##############################################################################
+# Text UI
+##############################################################################
+
+class _WritelnDecorator:
+    """Used to decorate file-like objects with a handy 'writeln' method"""
+    def __init__(self,stream):
+        self.stream = stream
+
+    def __getattr__(self, attr):
+        return getattr(self.stream,attr)
+
+    def writeln(self, arg=None):
+        if arg: self.write(arg)
+        self.write('\n') # text-mode streams translate to \r\n if needed
+
+
+class _TextTestResult(TestResult):
+    """A test result class that can print formatted text results to a stream.
+
+    Used by TextTestRunner.
+    """
+    separator1 = '=' * 70
+    separator2 = '-' * 70
+
+    def __init__(self, stream, descriptions, verbosity):
+        TestResult.__init__(self)
+        self.stream = stream
+        self.showAll = verbosity > 1
+        self.dots = verbosity == 1
+        self.descriptions = descriptions
+
+    def getDescription(self, test):
+        if self.descriptions:
+            return test.shortDescription() or str(test)
+        else:
+            return str(test)
+
+    def startTest(self, test):
+        TestResult.startTest(self, test)
+        if self.showAll:
+            self.stream.write(self.getDescription(test))
+            self.stream.write(" ... ")
+
+    def addSuccess(self, test):
+        TestResult.addSuccess(self, test)
+        if self.showAll:
+            self.stream.writeln("ok")
+        elif self.dots:
+            self.stream.write('.')
+
+    def addError(self, test, err):
+        TestResult.addError(self, test, err)
+        if self.showAll:
+            self.stream.writeln("ERROR")
+        elif self.dots:
+            self.stream.write('E')
+
+    def addFailure(self, test, err):
+        TestResult.addFailure(self, test, err)
+        if self.showAll:
+            self.stream.writeln("FAIL")
+        elif self.dots:
+            self.stream.write('F')
+
+    def printErrors(self):
+        if self.dots or self.showAll:
+            self.stream.writeln()
+        self.printErrorList('ERROR', self.errors)
+        self.printErrorList('FAIL', self.failures)
+
+    def printErrorList(self, flavour, errors):
+        for test, err in errors:
+            self.stream.writeln(self.separator1)
+            self.stream.writeln("%s: %s" % (flavour,self.getDescription(test)))
+            self.stream.writeln(self.separator2)
+            self.stream.writeln("%s" % err)
+
+
+class TextTestRunner:
+    """A test runner class that displays results in textual form.
+
+    It prints out the names of tests as they are run, errors as they
+    occur, and a summary of the results at the end of the test run.
+    """
+    def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1):
+        self.stream = _WritelnDecorator(stream)
+        self.descriptions = descriptions
+        self.verbosity = verbosity
+
+    def _makeResult(self):
+        return _TextTestResult(self.stream, self.descriptions, self.verbosity)
+
+    def run(self, test):
+        "Run the given test case or test suite."
+        result = self._makeResult()
+        startTime = time.time()
+        test(result)
+        stopTime = time.time()
+        timeTaken = stopTime - startTime
+        result.printErrors()
+        self.stream.writeln(result.separator2)
+        run = result.testsRun
+        self.stream.writeln("Ran %d test%s in %.3fs" %
+                            (run, run != 1 and "s" or "", timeTaken))
+        self.stream.writeln()
+        if not result.wasSuccessful():
+            self.stream.write("FAILED (")
+            failed, errored = map(len, (result.failures, result.errors))
+            if failed:
+                self.stream.write("failures=%d" % failed)
+            if errored:
+                if failed: self.stream.write(", ")
+                self.stream.write("errors=%d" % errored)
+            self.stream.writeln(")")
+        else:
+            self.stream.writeln("OK")
+        return result
+
+
+
+##############################################################################
+# Facilities for running tests from the command line
+##############################################################################
+
+class TestProgram:
+    """A command-line program that runs a set of tests; this is primarily
+       for making test modules conveniently executable.
+    """
+    USAGE = """\
+Usage: %(progName)s [options] [test] [...]
+
+Options:
+  -h, --help       Show this message
+  -v, --verbose    Verbose output
+  -q, --quiet      Minimal output
+
+Examples:
+  %(progName)s                               - run default set of tests
+  %(progName)s MyTestSuite                   - run suite 'MyTestSuite'
+  %(progName)s MyTestCase.testSomething      - run MyTestCase.testSomething
+  %(progName)s MyTestCase                    - run all 'test*' test methods
+                                               in MyTestCase
+"""
+    def __init__(self, module='__main__', defaultTest=None,
+                 argv=None, testRunner=None, testLoader=defaultTestLoader):
+        if type(module) == type(''):
+            self.module = __import__(module)
+            for part in module.split('.')[1:]:
+                self.module = getattr(self.module, part)
+        else:
+            self.module = module
+        if argv is None:
+            argv = sys.argv
+        self.verbosity = 1
+        self.defaultTest = defaultTest
+        self.testRunner = testRunner
+        self.testLoader = testLoader
+        self.progName = os.path.basename(argv[0])
+        self.parseArgs(argv)
+        self.runTests()
+
+    def usageExit(self, msg=None):
+        if msg: print msg
+        print self.USAGE % self.__dict__
+        sys.exit(2)
+
+    def parseArgs(self, argv):
+        import getopt
+        try:
+            options, args = getopt.getopt(argv[1:], 'hHvq',
+                                          ['help','verbose','quiet'])
+            for opt, value in options:
+                if opt in ('-h','-H','--help'):
+                    self.usageExit()
+                if opt in ('-q','--quiet'):
+                    self.verbosity = 0
+                if opt in ('-v','--verbose'):
+                    self.verbosity = 2
+            if len(args) == 0 and self.defaultTest is None:
+                self.test = self.testLoader.loadTestsFromModule(self.module)
+                return
+            if len(args) > 0:
+                self.testNames = args
+            else:
+                self.testNames = (self.defaultTest,)
+            self.createTests()
+        except getopt.error, msg:
+            self.usageExit(msg)
+
+    def createTests(self):
+        self.test = self.testLoader.loadTestsFromNames(self.testNames,
+                                                       self.module)
+
+    def runTests(self):
+        if self.testRunner is None:
+            self.testRunner = TextTestRunner(verbosity=self.verbosity)
+        result = self.testRunner.run(self.test)
+        sys.exit(not result.wasSuccessful())
+
+main = TestProgram
+
+
+##############################################################################
+# Executing this module from the command line
+##############################################################################
+
+if __name__ == "__main__":
+    main(module=None)
diff --git a/depot_tools/release/win/python_24/Lib/urllib.py b/depot_tools/release/win/python_24/Lib/urllib.py
new file mode 100644
index 0000000..e2f01c5
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/urllib.py
@@ -0,0 +1,1426 @@
+"""Open an arbitrary URL.
+
+See the following document for more info on URLs:
+"Names and Addresses, URIs, URLs, URNs, URCs", at
+http://www.w3.org/pub/WWW/Addressing/Overview.html
+
+See also the HTTP spec (from which the error codes are derived):
+"HTTP - Hypertext Transfer Protocol", at
+http://www.w3.org/pub/WWW/Protocols/
+
+Related standards and specs:
+- RFC1808: the "relative URL" spec. (authoritative status)
+- RFC1738 - the "URL standard". (authoritative status)
+- RFC1630 - the "URI spec". (informational status)
+
+The object returned by URLopener().open(file) will differ per
+protocol.  All you know is that is has methods read(), readline(),
+readlines(), fileno(), close() and info().  The read*(), fileno()
+and close() methods work like those of open files.
+The info() method returns a mimetools.Message object which can be
+used to query various info about the object, if available.
+(mimetools.Message objects are queried with the getheader() method.)
+"""
+
+import string
+import socket
+import os
+import time
+import sys
+from urlparse import urljoin as basejoin
+
+__all__ = ["urlopen", "URLopener", "FancyURLopener", "urlretrieve",
+           "urlcleanup", "quote", "quote_plus", "unquote", "unquote_plus",
+           "urlencode", "url2pathname", "pathname2url", "splittag",
+           "localhost", "thishost", "ftperrors", "basejoin", "unwrap",
+           "splittype", "splithost", "splituser", "splitpasswd", "splitport",
+           "splitnport", "splitquery", "splitattr", "splitvalue",
+           "splitgophertype", "getproxies"]
+
+__version__ = '1.16'    # XXX This version is not always updated :-(
+
+MAXFTPCACHE = 10        # Trim the ftp cache beyond this size
+
+# Helper for non-unix systems
+if os.name == 'mac':
+    from macurl2path import url2pathname, pathname2url
+elif os.name == 'nt':
+    from nturl2path import url2pathname, pathname2url
+elif os.name == 'riscos':
+    from rourl2path import url2pathname, pathname2url
+else:
+    def url2pathname(pathname):
+        return unquote(pathname)
+    def pathname2url(pathname):
+        return quote(pathname)
+
+# This really consists of two pieces:
+# (1) a class which handles opening of all sorts of URLs
+#     (plus assorted utilities etc.)
+# (2) a set of functions for parsing URLs
+# XXX Should these be separated out into different modules?
+
+
+# Shortcut for basic usage
+_urlopener = None
+def urlopen(url, data=None, proxies=None):
+    """urlopen(url [, data]) -> open file-like object"""
+    global _urlopener
+    if proxies is not None:
+        opener = FancyURLopener(proxies=proxies)
+    elif not _urlopener:
+        opener = FancyURLopener()
+        _urlopener = opener
+    else:
+        opener = _urlopener
+    if data is None:
+        return opener.open(url)
+    else:
+        return opener.open(url, data)
+def urlretrieve(url, filename=None, reporthook=None, data=None):
+    global _urlopener
+    if not _urlopener:
+        _urlopener = FancyURLopener()
+    return _urlopener.retrieve(url, filename, reporthook, data)
+def urlcleanup():
+    if _urlopener:
+        _urlopener.cleanup()
+
+
+ftpcache = {}
+class URLopener:
+    """Class to open URLs.
+    This is a class rather than just a subroutine because we may need
+    more than one set of global protocol-specific options.
+    Note -- this is a base class for those who don't want the
+    automatic handling of errors type 302 (relocated) and 401
+    (authorization needed)."""
+
+    __tempfiles = None
+
+    version = "Python-urllib/%s" % __version__
+
+    # Constructor
+    def __init__(self, proxies=None, **x509):
+        if proxies is None:
+            proxies = getproxies()
+        assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
+        self.proxies = proxies
+        self.key_file = x509.get('key_file')
+        self.cert_file = x509.get('cert_file')
+        self.addheaders = [('User-agent', self.version)]
+        self.__tempfiles = []
+        self.__unlink = os.unlink # See cleanup()
+        self.tempcache = None
+        # Undocumented feature: if you assign {} to tempcache,
+        # it is used to cache files retrieved with
+        # self.retrieve().  This is not enabled by default
+        # since it does not work for changing documents (and I
+        # haven't got the logic to check expiration headers
+        # yet).
+        self.ftpcache = ftpcache
+        # Undocumented feature: you can use a different
+        # ftp cache by assigning to the .ftpcache member;
+        # in case you want logically independent URL openers
+        # XXX This is not threadsafe.  Bah.
+
+    def __del__(self):
+        self.close()
+
+    def close(self):
+        self.cleanup()
+
+    def cleanup(self):
+        # This code sometimes runs when the rest of this module
+        # has already been deleted, so it can't use any globals
+        # or import anything.
+        if self.__tempfiles:
+            for file in self.__tempfiles:
+                try:
+                    self.__unlink(file)
+                except OSError:
+                    pass
+            del self.__tempfiles[:]
+        if self.tempcache:
+            self.tempcache.clear()
+
+    def addheader(self, *args):
+        """Add a header to be used by the HTTP interface only
+        e.g. u.addheader('Accept', 'sound/basic')"""
+        self.addheaders.append(args)
+
+    # External interface
+    def open(self, fullurl, data=None):
+        """Use URLopener().open(file) instead of open(file, 'r')."""
+        fullurl = unwrap(toBytes(fullurl))
+        if self.tempcache and fullurl in self.tempcache:
+            filename, headers = self.tempcache[fullurl]
+            fp = open(filename, 'rb')
+            return addinfourl(fp, headers, fullurl)
+        urltype, url = splittype(fullurl)
+        if not urltype:
+            urltype = 'file'
+        if urltype in self.proxies:
+            proxy = self.proxies[urltype]
+            urltype, proxyhost = splittype(proxy)
+            host, selector = splithost(proxyhost)
+            url = (host, fullurl) # Signal special case to open_*()
+        else:
+            proxy = None
+        name = 'open_' + urltype
+        self.type = urltype
+        name = name.replace('-', '_')
+        if not hasattr(self, name):
+            if proxy:
+                return self.open_unknown_proxy(proxy, fullurl, data)
+            else:
+                return self.open_unknown(fullurl, data)
+        try:
+            if data is None:
+                return getattr(self, name)(url)
+            else:
+                return getattr(self, name)(url, data)
+        except socket.error, msg:
+            raise IOError, ('socket error', msg), sys.exc_info()[2]
+
+    def open_unknown(self, fullurl, data=None):
+        """Overridable interface to open unknown URL type."""
+        type, url = splittype(fullurl)
+        raise IOError, ('url error', 'unknown url type', type)
+
+    def open_unknown_proxy(self, proxy, fullurl, data=None):
+        """Overridable interface to open unknown URL type."""
+        type, url = splittype(fullurl)
+        raise IOError, ('url error', 'invalid proxy for %s' % type, proxy)
+
+    # External interface
+    def retrieve(self, url, filename=None, reporthook=None, data=None):
+        """retrieve(url) returns (filename, headers) for a local object
+        or (tempfilename, headers) for a remote object."""
+        url = unwrap(toBytes(url))
+        if self.tempcache and url in self.tempcache:
+            return self.tempcache[url]
+        type, url1 = splittype(url)
+        if filename is None and (not type or type == 'file'):
+            try:
+                fp = self.open_local_file(url1)
+                hdrs = fp.info()
+                del fp
+                return url2pathname(splithost(url1)[1]), hdrs
+            except IOError, msg:
+                pass
+        fp = self.open(url, data)
+        headers = fp.info()
+        if filename:
+            tfp = open(filename, 'wb')
+        else:
+            import tempfile
+            garbage, path = splittype(url)
+            garbage, path = splithost(path or "")
+            path, garbage = splitquery(path or "")
+            path, garbage = splitattr(path or "")
+            suffix = os.path.splitext(path)[1]
+            (fd, filename) = tempfile.mkstemp(suffix)
+            self.__tempfiles.append(filename)
+            tfp = os.fdopen(fd, 'wb')
+        result = filename, headers
+        if self.tempcache is not None:
+            self.tempcache[url] = result
+        bs = 1024*8
+        size = -1
+        blocknum = 1
+        if reporthook:
+            if "content-length" in headers:
+                size = int(headers["Content-Length"])
+            reporthook(0, bs, size)
+        block = fp.read(bs)
+        if reporthook:
+            reporthook(1, bs, size)
+        while block:
+            tfp.write(block)
+            block = fp.read(bs)
+            blocknum = blocknum + 1
+            if reporthook:
+                reporthook(blocknum, bs, size)
+        fp.close()
+        tfp.close()
+        del fp
+        del tfp
+        return result
+
+    # Each method named open_<type> knows how to open that type of URL
+
+    def open_http(self, url, data=None):
+        """Use HTTP protocol."""
+        import httplib
+        user_passwd = None
+        if isinstance(url, str):
+            host, selector = splithost(url)
+            if host:
+                user_passwd, host = splituser(host)
+                host = unquote(host)
+            realhost = host
+        else:
+            host, selector = url
+            urltype, rest = splittype(selector)
+            url = rest
+            user_passwd = None
+            if urltype.lower() != 'http':
+                realhost = None
+            else:
+                realhost, rest = splithost(rest)
+                if realhost:
+                    user_passwd, realhost = splituser(realhost)
+                if user_passwd:
+                    selector = "%s://%s%s" % (urltype, realhost, rest)
+                if proxy_bypass(realhost):
+                    host = realhost
+
+            #print "proxy via http:", host, selector
+        if not host: raise IOError, ('http error', 'no host given')
+        if user_passwd:
+            import base64
+            auth = base64.encodestring(user_passwd).strip()
+        else:
+            auth = None
+        h = httplib.HTTP(host)
+        if data is not None:
+            h.putrequest('POST', selector)
+            h.putheader('Content-type', 'application/x-www-form-urlencoded')
+            h.putheader('Content-length', '%d' % len(data))
+        else:
+            h.putrequest('GET', selector)
+        if auth: h.putheader('Authorization', 'Basic %s' % auth)
+        if realhost: h.putheader('Host', realhost)
+        for args in self.addheaders: h.putheader(*args)
+        h.endheaders()
+        if data is not None:
+            h.send(data)
+        errcode, errmsg, headers = h.getreply()
+        fp = h.getfile()
+        if errcode == 200:
+            return addinfourl(fp, headers, "http:" + url)
+        else:
+            if data is None:
+                return self.http_error(url, fp, errcode, errmsg, headers)
+            else:
+                return self.http_error(url, fp, errcode, errmsg, headers, data)
+
+    def http_error(self, url, fp, errcode, errmsg, headers, data=None):
+        """Handle http errors.
+        Derived class can override this, or provide specific handlers
+        named http_error_DDD where DDD is the 3-digit error code."""
+        # First check if there's a specific handler for this error
+        name = 'http_error_%d' % errcode
+        if hasattr(self, name):
+            method = getattr(self, name)
+            if data is None:
+                result = method(url, fp, errcode, errmsg, headers)
+            else:
+                result = method(url, fp, errcode, errmsg, headers, data)
+            if result: return result
+        return self.http_error_default(url, fp, errcode, errmsg, headers)
+
+    def http_error_default(self, url, fp, errcode, errmsg, headers):
+        """Default error handler: close the connection and raise IOError."""
+        void = fp.read()
+        fp.close()
+        raise IOError, ('http error', errcode, errmsg, headers)
+
+    if hasattr(socket, "ssl"):
+        def open_https(self, url, data=None):
+            """Use HTTPS protocol."""
+            import httplib
+            user_passwd = None
+            if isinstance(url, str):
+                host, selector = splithost(url)
+                if host:
+                    user_passwd, host = splituser(host)
+                    host = unquote(host)
+                realhost = host
+            else:
+                host, selector = url
+                urltype, rest = splittype(selector)
+                url = rest
+                user_passwd = None
+                if urltype.lower() != 'https':
+                    realhost = None
+                else:
+                    realhost, rest = splithost(rest)
+                    if realhost:
+                        user_passwd, realhost = splituser(realhost)
+                    if user_passwd:
+                        selector = "%s://%s%s" % (urltype, realhost, rest)
+                #print "proxy via https:", host, selector
+            if not host: raise IOError, ('https error', 'no host given')
+            if user_passwd:
+                import base64
+                auth = base64.encodestring(user_passwd).strip()
+            else:
+                auth = None
+            h = httplib.HTTPS(host, 0,
+                              key_file=self.key_file,
+                              cert_file=self.cert_file)
+            if data is not None:
+                h.putrequest('POST', selector)
+                h.putheader('Content-type',
+                            'application/x-www-form-urlencoded')
+                h.putheader('Content-length', '%d' % len(data))
+            else:
+                h.putrequest('GET', selector)
+            if auth: h.putheader('Authorization', 'Basic %s' % auth)
+            if realhost: h.putheader('Host', realhost)
+            for args in self.addheaders: h.putheader(*args)
+            h.endheaders()
+            if data is not None:
+                h.send(data)
+            errcode, errmsg, headers = h.getreply()
+            fp = h.getfile()
+            if errcode == 200:
+                return addinfourl(fp, headers, "https:" + url)
+            else:
+                if data is None:
+                    return self.http_error(url, fp, errcode, errmsg, headers)
+                else:
+                    return self.http_error(url, fp, errcode, errmsg, headers,
+                                           data)
+
+    def open_gopher(self, url):
+        """Use Gopher protocol."""
+        import gopherlib
+        host, selector = splithost(url)
+        if not host: raise IOError, ('gopher error', 'no host given')
+        host = unquote(host)
+        type, selector = splitgophertype(selector)
+        selector, query = splitquery(selector)
+        selector = unquote(selector)
+        if query:
+            query = unquote(query)
+            fp = gopherlib.send_query(selector, query, host)
+        else:
+            fp = gopherlib.send_selector(selector, host)
+        return addinfourl(fp, noheaders(), "gopher:" + url)
+
+    def open_file(self, url):
+        """Use local file or FTP depending on form of URL."""
+        if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
+            return self.open_ftp(url)
+        else:
+            return self.open_local_file(url)
+
+    def open_local_file(self, url):
+        """Use local file."""
+        import mimetypes, mimetools, email.Utils, StringIO
+        host, file = splithost(url)
+        localname = url2pathname(file)
+        try:
+            stats = os.stat(localname)
+        except OSError, e:
+            raise IOError(e.errno, e.strerror, e.filename)
+        size = stats.st_size
+        modified = email.Utils.formatdate(stats.st_mtime, usegmt=True)
+        mtype = mimetypes.guess_type(url)[0]
+        headers = mimetools.Message(StringIO.StringIO(
+            'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
+            (mtype or 'text/plain', size, modified)))
+        if not host:
+            urlfile = file
+            if file[:1] == '/':
+                urlfile = 'file://' + file
+            return addinfourl(open(localname, 'rb'),
+                              headers, urlfile)
+        host, port = splitport(host)
+        if not port \
+           and socket.gethostbyname(host) in (localhost(), thishost()):
+            urlfile = file
+            if file[:1] == '/':
+                urlfile = 'file://' + file
+            return addinfourl(open(localname, 'rb'),
+                              headers, urlfile)
+        raise IOError, ('local file error', 'not on local host')
+
+    def open_ftp(self, url):
+        """Use FTP protocol."""
+        import mimetypes, mimetools, StringIO
+        host, path = splithost(url)
+        if not host: raise IOError, ('ftp error', 'no host given')
+        host, port = splitport(host)
+        user, host = splituser(host)
+        if user: user, passwd = splitpasswd(user)
+        else: passwd = None
+        host = unquote(host)
+        user = unquote(user or '')
+        passwd = unquote(passwd or '')
+        host = socket.gethostbyname(host)
+        if not port:
+            import ftplib
+            port = ftplib.FTP_PORT
+        else:
+            port = int(port)
+        path, attrs = splitattr(path)
+        path = unquote(path)
+        dirs = path.split('/')
+        dirs, file = dirs[:-1], dirs[-1]
+        if dirs and not dirs[0]: dirs = dirs[1:]
+        if dirs and not dirs[0]: dirs[0] = '/'
+        key = user, host, port, '/'.join(dirs)
+        # XXX thread unsafe!
+        if len(self.ftpcache) > MAXFTPCACHE:
+            # Prune the cache, rather arbitrarily
+            for k in self.ftpcache.keys():
+                if k != key:
+                    v = self.ftpcache[k]
+                    del self.ftpcache[k]
+                    v.close()
+        try:
+            if not key in self.ftpcache:
+                self.ftpcache[key] = \
+                    ftpwrapper(user, passwd, host, port, dirs)
+            if not file: type = 'D'
+            else: type = 'I'
+            for attr in attrs:
+                attr, value = splitvalue(attr)
+                if attr.lower() == 'type' and \
+                   value in ('a', 'A', 'i', 'I', 'd', 'D'):
+                    type = value.upper()
+            (fp, retrlen) = self.ftpcache[key].retrfile(file, type)
+            mtype = mimetypes.guess_type("ftp:" + url)[0]
+            headers = ""
+            if mtype:
+                headers += "Content-Type: %s\n" % mtype
+            if retrlen is not None and retrlen >= 0:
+                headers += "Content-Length: %d\n" % retrlen
+            headers = mimetools.Message(StringIO.StringIO(headers))
+            return addinfourl(fp, headers, "ftp:" + url)
+        except ftperrors(), msg:
+            raise IOError, ('ftp error', msg), sys.exc_info()[2]
+
+    def open_data(self, url, data=None):
+        """Use "data" URL."""
+        # ignore POSTed data
+        #
+        # syntax of data URLs:
+        # dataurl   := "data:" [ mediatype ] [ ";base64" ] "," data
+        # mediatype := [ type "/" subtype ] *( ";" parameter )
+        # data      := *urlchar
+        # parameter := attribute "=" value
+        import StringIO, mimetools
+        try:
+            [type, data] = url.split(',', 1)
+        except ValueError:
+            raise IOError, ('data error', 'bad data URL')
+        if not type:
+            type = 'text/plain;charset=US-ASCII'
+        semi = type.rfind(';')
+        if semi >= 0 and '=' not in type[semi:]:
+            encoding = type[semi+1:]
+            type = type[:semi]
+        else:
+            encoding = ''
+        msg = []
+        msg.append('Date: %s'%time.strftime('%a, %d %b %Y %T GMT',
+                                            time.gmtime(time.time())))
+        msg.append('Content-type: %s' % type)
+        if encoding == 'base64':
+            import base64
+            data = base64.decodestring(data)
+        else:
+            data = unquote(data)
+        msg.append('Content-length: %d' % len(data))
+        msg.append('')
+        msg.append(data)
+        msg = '\n'.join(msg)
+        f = StringIO.StringIO(msg)
+        headers = mimetools.Message(f, 0)
+        f.fileno = None     # needed for addinfourl
+        return addinfourl(f, headers, url)
+
+
+class FancyURLopener(URLopener):
+    """Derived class with handlers for errors we can handle (perhaps)."""
+
+    def __init__(self, *args, **kwargs):
+        URLopener.__init__(self, *args, **kwargs)
+        self.auth_cache = {}
+        self.tries = 0
+        self.maxtries = 10
+
+    def http_error_default(self, url, fp, errcode, errmsg, headers):
+        """Default error handling -- don't raise an exception."""
+        return addinfourl(fp, headers, "http:" + url)
+
+    def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
+        """Error 302 -- relocated (temporarily)."""
+        self.tries += 1
+        if self.maxtries and self.tries >= self.maxtries:
+            if hasattr(self, "http_error_500"):
+                meth = self.http_error_500
+            else:
+                meth = self.http_error_default
+            self.tries = 0
+            return meth(url, fp, 500,
+                        "Internal Server Error: Redirect Recursion", headers)
+        result = self.redirect_internal(url, fp, errcode, errmsg, headers,
+                                        data)
+        self.tries = 0
+        return result
+
+    def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
+        if 'location' in headers:
+            newurl = headers['location']
+        elif 'uri' in headers:
+            newurl = headers['uri']
+        else:
+            return
+        void = fp.read()
+        fp.close()
+        # In case the server sent a relative URL, join with original:
+        newurl = basejoin(self.type + ":" + url, newurl)
+        return self.open(newurl)
+
+    def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
+        """Error 301 -- also relocated (permanently)."""
+        return self.http_error_302(url, fp, errcode, errmsg, headers, data)
+
+    def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
+        """Error 303 -- also relocated (essentially identical to 302)."""
+        return self.http_error_302(url, fp, errcode, errmsg, headers, data)
+
+    def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
+        """Error 307 -- relocated, but turn POST into error."""
+        if data is None:
+            return self.http_error_302(url, fp, errcode, errmsg, headers, data)
+        else:
+            return self.http_error_default(url, fp, errcode, errmsg, headers)
+
+    def http_error_401(self, url, fp, errcode, errmsg, headers, data=None):
+        """Error 401 -- authentication required.
+        See this URL for a description of the basic authentication scheme:
+        http://www.ics.uci.edu/pub/ietf/http/draft-ietf-http-v10-spec-00.txt"""
+        if not 'www-authenticate' in headers:
+            URLopener.http_error_default(self, url, fp,
+                                         errcode, errmsg, headers)
+        stuff = headers['www-authenticate']
+        import re
+        match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
+        if not match:
+            URLopener.http_error_default(self, url, fp,
+                                         errcode, errmsg, headers)
+        scheme, realm = match.groups()
+        if scheme.lower() != 'basic':
+            URLopener.http_error_default(self, url, fp,
+                                         errcode, errmsg, headers)
+        name = 'retry_' + self.type + '_basic_auth'
+        if data is None:
+            return getattr(self,name)(url, realm)
+        else:
+            return getattr(self,name)(url, realm, data)
+
+    def retry_http_basic_auth(self, url, realm, data=None):
+        host, selector = splithost(url)
+        i = host.find('@') + 1
+        host = host[i:]
+        user, passwd = self.get_user_passwd(host, realm, i)
+        if not (user or passwd): return None
+        host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
+        newurl = 'http://' + host + selector
+        if data is None:
+            return self.open(newurl)
+        else:
+            return self.open(newurl, data)
+
+    def retry_https_basic_auth(self, url, realm, data=None):
+        host, selector = splithost(url)
+        i = host.find('@') + 1
+        host = host[i:]
+        user, passwd = self.get_user_passwd(host, realm, i)
+        if not (user or passwd): return None
+        host = quote(user, safe='') + ':' + quote(passwd, safe='') + '@' + host
+        newurl = '//' + host + selector
+        return self.open_https(newurl, data)
+
+    def get_user_passwd(self, host, realm, clear_cache = 0):
+        key = realm + '@' + host.lower()
+        if key in self.auth_cache:
+            if clear_cache:
+                del self.auth_cache[key]
+            else:
+                return self.auth_cache[key]
+        user, passwd = self.prompt_user_passwd(host, realm)
+        if user or passwd: self.auth_cache[key] = (user, passwd)
+        return user, passwd
+
+    def prompt_user_passwd(self, host, realm):
+        """Override this in a GUI environment!"""
+        import getpass
+        try:
+            user = raw_input("Enter username for %s at %s: " % (realm,
+                                                                host))
+            passwd = getpass.getpass("Enter password for %s in %s at %s: " %
+                (user, realm, host))
+            return user, passwd
+        except KeyboardInterrupt:
+            print
+            return None, None
+
+
+# Utility functions
+
+_localhost = None
+def localhost():
+    """Return the IP address of the magic hostname 'localhost'."""
+    global _localhost
+    if _localhost is None:
+        _localhost = socket.gethostbyname('localhost')
+    return _localhost
+
+_thishost = None
+def thishost():
+    """Return the IP address of the current host."""
+    global _thishost
+    if _thishost is None:
+        _thishost = socket.gethostbyname(socket.gethostname())
+    return _thishost
+
+_ftperrors = None
+def ftperrors():
+    """Return the set of errors raised by the FTP class."""
+    global _ftperrors
+    if _ftperrors is None:
+        import ftplib
+        _ftperrors = ftplib.all_errors
+    return _ftperrors
+
+_noheaders = None
+def noheaders():
+    """Return an empty mimetools.Message object."""
+    global _noheaders
+    if _noheaders is None:
+        import mimetools
+        import StringIO
+        _noheaders = mimetools.Message(StringIO.StringIO(), 0)
+        _noheaders.fp.close()   # Recycle file descriptor
+    return _noheaders
+
+
+# Utility classes
+
+class ftpwrapper:
+    """Class used by open_ftp() for cache of open FTP connections."""
+
+    def __init__(self, user, passwd, host, port, dirs):
+        self.user = user
+        self.passwd = passwd
+        self.host = host
+        self.port = port
+        self.dirs = dirs
+        self.init()
+
+    def init(self):
+        import ftplib
+        self.busy = 0
+        self.ftp = ftplib.FTP()
+        self.ftp.connect(self.host, self.port)
+        self.ftp.login(self.user, self.passwd)
+        for dir in self.dirs:
+            self.ftp.cwd(dir)
+
+    def retrfile(self, file, type):
+        import ftplib
+        self.endtransfer()
+        if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
+        else: cmd = 'TYPE ' + type; isdir = 0
+        try:
+            self.ftp.voidcmd(cmd)
+        except ftplib.all_errors:
+            self.init()
+            self.ftp.voidcmd(cmd)
+        conn = None
+        if file and not isdir:
+            # Use nlst to see if the file exists at all
+            try:
+                self.ftp.nlst(file)
+            except ftplib.error_perm, reason:
+                raise IOError, ('ftp error', reason), sys.exc_info()[2]
+            # Restore the transfer mode!
+            self.ftp.voidcmd(cmd)
+            # Try to retrieve as a file
+            try:
+                cmd = 'RETR ' + file
+                conn = self.ftp.ntransfercmd(cmd)
+            except ftplib.error_perm, reason:
+                if str(reason)[:3] != '550':
+                    raise IOError, ('ftp error', reason), sys.exc_info()[2]
+        if not conn:
+            # Set transfer mode to ASCII!
+            self.ftp.voidcmd('TYPE A')
+            # Try a directory listing
+            if file: cmd = 'LIST ' + file
+            else: cmd = 'LIST'
+            conn = self.ftp.ntransfercmd(cmd)
+        self.busy = 1
+        # Pass back both a suitably decorated object and a retrieval length
+        return (addclosehook(conn[0].makefile('rb'),
+                             self.endtransfer), conn[1])
+    def endtransfer(self):
+        if not self.busy:
+            return
+        self.busy = 0
+        try:
+            self.ftp.voidresp()
+        except ftperrors():
+            pass
+
+    def close(self):
+        self.endtransfer()
+        try:
+            self.ftp.close()
+        except ftperrors():
+            pass
+
+class addbase:
+    """Base class for addinfo and addclosehook."""
+
+    def __init__(self, fp):
+        self.fp = fp
+        self.read = self.fp.read
+        self.readline = self.fp.readline
+        if hasattr(self.fp, "readlines"): self.readlines = self.fp.readlines
+        if hasattr(self.fp, "fileno"): self.fileno = self.fp.fileno
+        if hasattr(self.fp, "__iter__"):
+            self.__iter__ = self.fp.__iter__
+            if hasattr(self.fp, "next"):
+                self.next = self.fp.next
+
+    def __repr__(self):
+        return '<%s at %r whose fp = %r>' % (self.__class__.__name__,
+                                             id(self), self.fp)
+
+    def close(self):
+        self.read = None
+        self.readline = None
+        self.readlines = None
+        self.fileno = None
+        if self.fp: self.fp.close()
+        self.fp = None
+
+class addclosehook(addbase):
+    """Class to add a close hook to an open file."""
+
+    def __init__(self, fp, closehook, *hookargs):
+        addbase.__init__(self, fp)
+        self.closehook = closehook
+        self.hookargs = hookargs
+
+    def close(self):
+        addbase.close(self)
+        if self.closehook:
+            self.closehook(*self.hookargs)
+            self.closehook = None
+            self.hookargs = None
+
+class addinfo(addbase):
+    """class to add an info() method to an open file."""
+
+    def __init__(self, fp, headers):
+        addbase.__init__(self, fp)
+        self.headers = headers
+
+    def info(self):
+        return self.headers
+
+class addinfourl(addbase):
+    """class to add info() and geturl() methods to an open file."""
+
+    def __init__(self, fp, headers, url):
+        addbase.__init__(self, fp)
+        self.headers = headers
+        self.url = url
+
+    def info(self):
+        return self.headers
+
+    def geturl(self):
+        return self.url
+
+
+# Utilities to parse URLs (most of these return None for missing parts):
+# unwrap('<URL:type://host/path>') --> 'type://host/path'
+# splittype('type:opaquestring') --> 'type', 'opaquestring'
+# splithost('//host[:port]/path') --> 'host[:port]', '/path'
+# splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'
+# splitpasswd('user:passwd') -> 'user', 'passwd'
+# splitport('host:port') --> 'host', 'port'
+# splitquery('/path?query') --> '/path', 'query'
+# splittag('/path#tag') --> '/path', 'tag'
+# splitattr('/path;attr1=value1;attr2=value2;...') ->
+#   '/path', ['attr1=value1', 'attr2=value2', ...]
+# splitvalue('attr=value') --> 'attr', 'value'
+# splitgophertype('/Xselector') --> 'X', 'selector'
+# unquote('abc%20def') -> 'abc def'
+# quote('abc def') -> 'abc%20def')
+
+try:
+    unicode
+except NameError:
+    def _is_unicode(x):
+        return 0
+else:
+    def _is_unicode(x):
+        return isinstance(x, unicode)
+
+def toBytes(url):
+    """toBytes(u"URL") --> 'URL'."""
+    # Most URL schemes require ASCII. If that changes, the conversion
+    # can be relaxed
+    if _is_unicode(url):
+        try:
+            url = url.encode("ASCII")
+        except UnicodeError:
+            raise UnicodeError("URL " + repr(url) +
+                               " contains non-ASCII characters")
+    return url
+
+def unwrap(url):
+    """unwrap('<URL:type://host/path>') --> 'type://host/path'."""
+    url = url.strip()
+    if url[:1] == '<' and url[-1:] == '>':
+        url = url[1:-1].strip()
+    if url[:4] == 'URL:': url = url[4:].strip()
+    return url
+
+_typeprog = None
+def splittype(url):
+    """splittype('type:opaquestring') --> 'type', 'opaquestring'."""
+    global _typeprog
+    if _typeprog is None:
+        import re
+        _typeprog = re.compile('^([^/:]+):')
+
+    match = _typeprog.match(url)
+    if match:
+        scheme = match.group(1)
+        return scheme.lower(), url[len(scheme) + 1:]
+    return None, url
+
+_hostprog = None
+def splithost(url):
+    """splithost('//host[:port]/path') --> 'host[:port]', '/path'."""
+    global _hostprog
+    if _hostprog is None:
+        import re
+        _hostprog = re.compile('^//([^/]*)(.*)$')
+
+    match = _hostprog.match(url)
+    if match: return match.group(1, 2)
+    return None, url
+
+_userprog = None
+def splituser(host):
+    """splituser('user[:passwd]@host[:port]') --> 'user[:passwd]', 'host[:port]'."""
+    global _userprog
+    if _userprog is None:
+        import re
+        _userprog = re.compile('^(.*)@(.*)$')
+
+    match = _userprog.match(host)
+    if match: return map(unquote, match.group(1, 2))
+    return None, host
+
+_passwdprog = None
+def splitpasswd(user):
+    """splitpasswd('user:passwd') -> 'user', 'passwd'."""
+    global _passwdprog
+    if _passwdprog is None:
+        import re
+        _passwdprog = re.compile('^([^:]*):(.*)$')
+
+    match = _passwdprog.match(user)
+    if match: return match.group(1, 2)
+    return user, None
+
+# splittag('/path#tag') --> '/path', 'tag'
+_portprog = None
+def splitport(host):
+    """splitport('host:port') --> 'host', 'port'."""
+    global _portprog
+    if _portprog is None:
+        import re
+        _portprog = re.compile('^(.*):([0-9]+)$')
+
+    match = _portprog.match(host)
+    if match: return match.group(1, 2)
+    return host, None
+
+_nportprog = None
+def splitnport(host, defport=-1):
+    """Split host and port, returning numeric port.
+    Return given default port if no ':' found; defaults to -1.
+    Return numerical port if a valid number are found after ':'.
+    Return None if ':' but not a valid number."""
+    global _nportprog
+    if _nportprog is None:
+        import re
+        _nportprog = re.compile('^(.*):(.*)$')
+
+    match = _nportprog.match(host)
+    if match:
+        host, port = match.group(1, 2)
+        try:
+            if not port: raise ValueError, "no digits"
+            nport = int(port)
+        except ValueError:
+            nport = None
+        return host, nport
+    return host, defport
+
+_queryprog = None
+def splitquery(url):
+    """splitquery('/path?query') --> '/path', 'query'."""
+    global _queryprog
+    if _queryprog is None:
+        import re
+        _queryprog = re.compile('^(.*)\?([^?]*)$')
+
+    match = _queryprog.match(url)
+    if match: return match.group(1, 2)
+    return url, None
+
+_tagprog = None
+def splittag(url):
+    """splittag('/path#tag') --> '/path', 'tag'."""
+    global _tagprog
+    if _tagprog is None:
+        import re
+        _tagprog = re.compile('^(.*)#([^#]*)$')
+
+    match = _tagprog.match(url)
+    if match: return match.group(1, 2)
+    return url, None
+
+def splitattr(url):
+    """splitattr('/path;attr1=value1;attr2=value2;...') ->
+        '/path', ['attr1=value1', 'attr2=value2', ...]."""
+    words = url.split(';')
+    return words[0], words[1:]
+
+_valueprog = None
+def splitvalue(attr):
+    """splitvalue('attr=value') --> 'attr', 'value'."""
+    global _valueprog
+    if _valueprog is None:
+        import re
+        _valueprog = re.compile('^([^=]*)=(.*)$')
+
+    match = _valueprog.match(attr)
+    if match: return match.group(1, 2)
+    return attr, None
+
+def splitgophertype(selector):
+    """splitgophertype('/Xselector') --> 'X', 'selector'."""
+    if selector[:1] == '/' and selector[1:2]:
+        return selector[1], selector[2:]
+    return None, selector
+
+def unquote(s):
+    """unquote('abc%20def') -> 'abc def'."""
+    mychr = chr
+    myatoi = int
+    list = s.split('%')
+    res = [list[0]]
+    myappend = res.append
+    del list[0]
+    for item in list:
+        if item[1:2]:
+            try:
+                myappend(mychr(myatoi(item[:2], 16))
+                     + item[2:])
+            except ValueError:
+                myappend('%' + item)
+        else:
+            myappend('%' + item)
+    return "".join(res)
+
+def unquote_plus(s):
+    """unquote('%7e/abc+def') -> '~/abc def'"""
+    s = s.replace('+', ' ')
+    return unquote(s)
+
+always_safe = ('ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+               'abcdefghijklmnopqrstuvwxyz'
+               '0123456789' '_.-')
+
+_fast_safe_test = always_safe + '/'
+_fast_safe = None
+
+def _fast_quote(s):
+    global _fast_safe
+    if _fast_safe is None:
+        _fast_safe = {}
+        for c in _fast_safe_test:
+            _fast_safe[c] = c
+    res = list(s)
+    for i in range(len(res)):
+        c = res[i]
+        if not c in _fast_safe:
+            res[i] = '%%%02X' % ord(c)
+    return ''.join(res)
+
+def quote(s, safe = '/'):
+    """quote('abc def') -> 'abc%20def'
+
+    Each part of a URL, e.g. the path info, the query, etc., has a
+    different set of reserved characters that must be quoted.
+
+    RFC 2396 Uniform Resource Identifiers (URI): Generic Syntax lists
+    the following reserved characters.
+
+    reserved    = ";" | "/" | "?" | ":" | "@" | "&" | "=" | "+" |
+                  "$" | ","
+
+    Each of these characters is reserved in some component of a URL,
+    but not necessarily in all of them.
+
+    By default, the quote function is intended for quoting the path
+    section of a URL.  Thus, it will not encode '/'.  This character
+    is reserved, but in typical usage the quote function is being
+    called on a path where the existing slash characters are used as
+    reserved characters.
+    """
+    safe = always_safe + safe
+    if _fast_safe_test == safe:
+        return _fast_quote(s)
+    res = list(s)
+    for i in range(len(res)):
+        c = res[i]
+        if c not in safe:
+            res[i] = '%%%02X' % ord(c)
+    return ''.join(res)
+
+def quote_plus(s, safe = ''):
+    """Quote the query fragment of a URL; replacing ' ' with '+'"""
+    if ' ' in s:
+        l = s.split(' ')
+        for i in range(len(l)):
+            l[i] = quote(l[i], safe)
+        return '+'.join(l)
+    else:
+        return quote(s, safe)
+
+def urlencode(query,doseq=0):
+    """Encode a sequence of two-element tuples or dictionary into a URL query string.
+
+    If any values in the query arg are sequences and doseq is true, each
+    sequence element is converted to a separate parameter.
+
+    If the query arg is a sequence of two-element tuples, the order of the
+    parameters in the output will match the order of parameters in the
+    input.
+    """
+
+    if hasattr(query,"items"):
+        # mapping objects
+        query = query.items()
+    else:
+        # it's a bother at times that strings and string-like objects are
+        # sequences...
+        try:
+            # non-sequence items should not work with len()
+            # non-empty strings will fail this
+            if len(query) and not isinstance(query[0], tuple):
+                raise TypeError
+            # zero-length sequences of all types will get here and succeed,
+            # but that's a minor nit - since the original implementation
+            # allowed empty dicts that type of behavior probably should be
+            # preserved for consistency
+        except TypeError:
+            ty,va,tb = sys.exc_info()
+            raise TypeError, "not a valid non-string sequence or mapping object", tb
+
+    l = []
+    if not doseq:
+        # preserve old behavior
+        for k, v in query:
+            k = quote_plus(str(k))
+            v = quote_plus(str(v))
+            l.append(k + '=' + v)
+    else:
+        for k, v in query:
+            k = quote_plus(str(k))
+            if isinstance(v, str):
+                v = quote_plus(v)
+                l.append(k + '=' + v)
+            elif _is_unicode(v):
+                # is there a reasonable way to convert to ASCII?
+                # encode generates a string, but "replace" or "ignore"
+                # lose information and "strict" can raise UnicodeError
+                v = quote_plus(v.encode("ASCII","replace"))
+                l.append(k + '=' + v)
+            else:
+                try:
+                    # is this a sufficient test for sequence-ness?
+                    x = len(v)
+                except TypeError:
+                    # not a sequence
+                    v = quote_plus(str(v))
+                    l.append(k + '=' + v)
+                else:
+                    # loop over the sequence
+                    for elt in v:
+                        l.append(k + '=' + quote_plus(str(elt)))
+    return '&'.join(l)
+
+# Proxy handling
+def getproxies_environment():
+    """Return a dictionary of scheme -> proxy server URL mappings.
+
+    Scan the environment for variables named <scheme>_proxy;
+    this seems to be the standard convention.  If you need a
+    different way, you can pass a proxies dictionary to the
+    [Fancy]URLopener constructor.
+
+    """
+    proxies = {}
+    for name, value in os.environ.items():
+        name = name.lower()
+        if value and name[-6:] == '_proxy':
+            proxies[name[:-6]] = value
+    return proxies
+
+if sys.platform == 'darwin':
+    def getproxies_internetconfig():
+        """Return a dictionary of scheme -> proxy server URL mappings.
+
+        By convention the mac uses Internet Config to store
+        proxies.  An HTTP proxy, for instance, is stored under
+        the HttpProxy key.
+
+        """
+        try:
+            import ic
+        except ImportError:
+            return {}
+
+        try:
+            config = ic.IC()
+        except ic.error:
+            return {}
+        proxies = {}
+        # HTTP:
+        if 'UseHTTPProxy' in config and config['UseHTTPProxy']:
+            try:
+                value = config['HTTPProxyHost']
+            except ic.error:
+                pass
+            else:
+                proxies['http'] = 'http://%s' % value
+        # FTP: XXXX To be done.
+        # Gopher: XXXX To be done.
+        return proxies
+
+    def proxy_bypass(x):
+        return 0
+
+    def getproxies():
+        return getproxies_environment() or getproxies_internetconfig()
+
+elif os.name == 'nt':
+    def getproxies_registry():
+        """Return a dictionary of scheme -> proxy server URL mappings.
+
+        Win32 uses the registry to store proxies.
+
+        """
+        proxies = {}
+        try:
+            import _winreg
+        except ImportError:
+            # Std module, so should be around - but you never know!
+            return proxies
+        try:
+            internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
+                r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
+            proxyEnable = _winreg.QueryValueEx(internetSettings,
+                                               'ProxyEnable')[0]
+            if proxyEnable:
+                # Returned as Unicode but problems if not converted to ASCII
+                proxyServer = str(_winreg.QueryValueEx(internetSettings,
+                                                       'ProxyServer')[0])
+                if '=' in proxyServer:
+                    # Per-protocol settings
+                    for p in proxyServer.split(';'):
+                        protocol, address = p.split('=', 1)
+                        # See if address has a type:// prefix
+                        import re
+                        if not re.match('^([^/:]+)://', address):
+                            address = '%s://%s' % (protocol, address)
+                        proxies[protocol] = address
+                else:
+                    # Use one setting for all protocols
+                    if proxyServer[:5] == 'http:':
+                        proxies['http'] = proxyServer
+                    else:
+                        proxies['http'] = 'http://%s' % proxyServer
+                        proxies['ftp'] = 'ftp://%s' % proxyServer
+            internetSettings.Close()
+        except (WindowsError, ValueError, TypeError):
+            # Either registry key not found etc, or the value in an
+            # unexpected format.
+            # proxies already set up to be empty so nothing to do
+            pass
+        return proxies
+
+    def getproxies():
+        """Return a dictionary of scheme -> proxy server URL mappings.
+
+        Returns settings gathered from the environment, if specified,
+        or the registry.
+
+        """
+        return getproxies_environment() or getproxies_registry()
+
+    def proxy_bypass(host):
+        try:
+            import _winreg
+            import re
+        except ImportError:
+            # Std modules, so should be around - but you never know!
+            return 0
+        try:
+            internetSettings = _winreg.OpenKey(_winreg.HKEY_CURRENT_USER,
+                r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
+            proxyEnable = _winreg.QueryValueEx(internetSettings,
+                                               'ProxyEnable')[0]
+            proxyOverride = str(_winreg.QueryValueEx(internetSettings,
+                                                     'ProxyOverride')[0])
+            # ^^^^ Returned as Unicode but problems if not converted to ASCII
+        except WindowsError:
+            return 0
+        if not proxyEnable or not proxyOverride:
+            return 0
+        # try to make a host list from name and IP address.
+        host = [host]
+        try:
+            addr = socket.gethostbyname(host[0])
+            if addr != host:
+                host.append(addr)
+        except socket.error:
+            pass
+        # make a check value list from the registry entry: replace the
+        # '<local>' string by the localhost entry and the corresponding
+        # canonical entry.
+        proxyOverride = proxyOverride.split(';')
+        i = 0
+        while i < len(proxyOverride):
+            if proxyOverride[i] == '<local>':
+                proxyOverride[i:i+1] = ['localhost',
+                                        '127.0.0.1',
+                                        socket.gethostname(),
+                                        socket.gethostbyname(
+                                            socket.gethostname())]
+            i += 1
+        # print proxyOverride
+        # now check if we match one of the registry values.
+        for test in proxyOverride:
+            test = test.replace(".", r"\.")     # mask dots
+            test = test.replace("*", r".*")     # change glob sequence
+            test = test.replace("?", r".")      # change glob char
+            for val in host:
+                # print "%s <--> %s" %( test, val )
+                if re.match(test, val, re.I):
+                    return 1
+        return 0
+
+else:
+    # By default use environment variables
+    getproxies = getproxies_environment
+
+    def proxy_bypass(host):
+        return 0
+
+# Test and time quote() and unquote()
+def test1():
+    s = ''
+    for i in range(256): s = s + chr(i)
+    s = s*4
+    t0 = time.time()
+    qs = quote(s)
+    uqs = unquote(qs)
+    t1 = time.time()
+    if uqs != s:
+        print 'Wrong!'
+    print repr(s)
+    print repr(qs)
+    print repr(uqs)
+    print round(t1 - t0, 3), 'sec'
+
+
+def reporthook(blocknum, blocksize, totalsize):
+    # Report during remote transfers
+    print "Block number: %d, Block size: %d, Total size: %d" % (
+        blocknum, blocksize, totalsize)
+
+# Test program
+def test(args=[]):
+    if not args:
+        args = [
+            '/etc/passwd',
+            'file:/etc/passwd',
+            'file://localhost/etc/passwd',
+            'ftp://ftp.python.org/pub/python/README',
+##          'gopher://gopher.micro.umn.edu/1/',
+            'http://www.python.org/index.html',
+            ]
+        if hasattr(URLopener, "open_https"):
+            args.append('https://synergy.as.cmu.edu/~geek/')
+    try:
+        for url in args:
+            print '-'*10, url, '-'*10
+            fn, h = urlretrieve(url, None, reporthook)
+            print fn
+            if h:
+                print '======'
+                for k in h.keys(): print k + ':', h[k]
+                print '======'
+            fp = open(fn, 'rb')
+            data = fp.read()
+            del fp
+            if '\r' in data:
+                table = string.maketrans("", "")
+                data = data.translate(table, "\r")
+            print data
+            fn, h = None, None
+        print '-'*40
+    finally:
+        urlcleanup()
+
+def main():
+    import getopt, sys
+    try:
+        opts, args = getopt.getopt(sys.argv[1:], "th")
+    except getopt.error, msg:
+        print msg
+        print "Use -h for help"
+        return
+    t = 0
+    for o, a in opts:
+        if o == '-t':
+            t = t + 1
+        if o == '-h':
+            print "Usage: python urllib.py [-t] [url ...]"
+            print "-t runs self-test;",
+            print "otherwise, contents of urls are printed"
+            return
+    if t:
+        if t > 1:
+            test1()
+        test(args)
+    else:
+        if not args:
+            print "Use -h for help"
+        for url in args:
+            print urlopen(url).read(),
+
+# Run test program when run as a script
+if __name__ == '__main__':
+    main()
diff --git a/depot_tools/release/win/python_24/Lib/urllib2.py b/depot_tools/release/win/python_24/Lib/urllib2.py
new file mode 100644
index 0000000..3c777cde
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/urllib2.py
@@ -0,0 +1,1290 @@
+"""An extensible library for opening URLs using a variety of protocols
+
+The simplest way to use this module is to call the urlopen function,
+which accepts a string containing a URL or a Request object (described
+below).  It opens the URL and returns the results as file-like
+object; the returned object has some extra methods described below.
+
+The OpenerDirector manages a collection of Handler objects that do
+all the actual work.  Each Handler implements a particular protocol or
+option.  The OpenerDirector is a composite object that invokes the
+Handlers needed to open the requested URL.  For example, the
+HTTPHandler performs HTTP GET and POST requests and deals with
+non-error returns.  The HTTPRedirectHandler automatically deals with
+HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
+deals with digest authentication.
+
+urlopen(url, data=None) -- basic usage is that same as original
+urllib.  pass the url and optionally data to post to an HTTP URL, and
+get a file-like object back.  One difference is that you can also pass
+a Request instance instead of URL.  Raises a URLError (subclass of
+IOError); for HTTP errors, raises an HTTPError, which can also be
+treated as a valid response.
+
+build_opener -- function that creates a new OpenerDirector instance.
+will install the default handlers.  accepts one or more Handlers as
+arguments, either instances or Handler classes that it will
+instantiate.  if one of the argument is a subclass of the default
+handler, the argument will be installed instead of the default.
+
+install_opener -- installs a new opener as the default opener.
+
+objects of interest:
+OpenerDirector --
+
+Request -- an object that encapsulates the state of a request.  the
+state can be a simple as the URL.  it can also include extra HTTP
+headers, e.g. a User-Agent.
+
+BaseHandler --
+
+exceptions:
+URLError-- a subclass of IOError, individual protocols have their own
+specific subclass
+
+HTTPError-- also a valid HTTP response, so you can treat an HTTP error
+as an exceptional event or valid response
+
+internals:
+BaseHandler and parent
+_call_chain conventions
+
+Example usage:
+
+import urllib2
+
+# set up authentication info
+authinfo = urllib2.HTTPBasicAuthHandler()
+authinfo.add_password('realm', 'host', 'username', 'password')
+
+proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
+
+# build a new opener that adds authentication and caching FTP handlers
+opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
+
+# install it
+urllib2.install_opener(opener)
+
+f = urllib2.urlopen('http://www.python.org/')
+
+
+"""
+
+# XXX issues:
+# If an authentication error handler that tries to perform
+# authentication for some reason but fails, how should the error be
+# signalled?  The client needs to know the HTTP error code.  But if
+# the handler knows that the problem was, e.g., that it didn't know
+# that hash algo that requested in the challenge, it would be good to
+# pass that information along to the client, too.
+
+# XXX to do:
+# name!
+# documentation (getting there)
+# complex proxies
+# abstract factory for opener
+# ftp errors aren't handled cleanly
+# gopher can return a socket.error
+# check digest against correct (i.e. non-apache) implementation
+
+import base64
+import ftplib
+import gopherlib
+import httplib
+import inspect
+import md5
+import mimetypes
+import mimetools
+import os
+import posixpath
+import random
+import re
+import sha
+import socket
+import sys
+import time
+import urlparse
+import bisect
+import cookielib
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+# not sure how many of these need to be gotten rid of
+from urllib import (unwrap, unquote, splittype, splithost,
+     addinfourl, splitport, splitgophertype, splitquery,
+     splitattr, ftpwrapper, noheaders, splituser, splitpasswd, splitvalue)
+
+# support for FileHandler, proxies via environment variables
+from urllib import localhost, url2pathname, getproxies
+
+__version__ = "2.4"
+
+_opener = None
+def urlopen(url, data=None):
+    global _opener
+    if _opener is None:
+        _opener = build_opener()
+    return _opener.open(url, data)
+
+def install_opener(opener):
+    global _opener
+    _opener = opener
+
+# do these error classes make sense?
+# make sure all of the IOError stuff is overridden.  we just want to be
+# subtypes.
+
+class URLError(IOError):
+    # URLError is a sub-type of IOError, but it doesn't share any of
+    # the implementation.  need to override __init__ and __str__.
+    # It sets self.args for compatibility with other EnvironmentError
+    # subclasses, but args doesn't have the typical format with errno in
+    # slot 0 and strerror in slot 1.  This may be better than nothing.
+    def __init__(self, reason):
+        self.args = reason,
+        self.reason = reason
+
+    def __str__(self):
+        return '<urlopen error %s>' % self.reason
+
+class HTTPError(URLError, addinfourl):
+    """Raised when HTTP error occurs, but also acts like non-error return"""
+    __super_init = addinfourl.__init__
+
+    def __init__(self, url, code, msg, hdrs, fp):
+        self.code = code
+        self.msg = msg
+        self.hdrs = hdrs
+        self.fp = fp
+        self.filename = url
+        # The addinfourl classes depend on fp being a valid file
+        # object.  In some cases, the HTTPError may not have a valid
+        # file object.  If this happens, the simplest workaround is to
+        # not initialize the base classes.
+        if fp is not None:
+            self.__super_init(fp, hdrs, url)
+
+    def __str__(self):
+        return 'HTTP Error %s: %s' % (self.code, self.msg)
+
+class GopherError(URLError):
+    pass
+
+
+class Request:
+
+    def __init__(self, url, data=None, headers={},
+                 origin_req_host=None, unverifiable=False):
+        # unwrap('<URL:type://host/path>') --> 'type://host/path'
+        self.__original = unwrap(url)
+        self.type = None
+        # self.__r_type is what's left after doing the splittype
+        self.host = None
+        self.port = None
+        self.data = data
+        self.headers = {}
+        for key, value in headers.items():
+            self.add_header(key, value)
+        self.unredirected_hdrs = {}
+        if origin_req_host is None:
+            origin_req_host = cookielib.request_host(self)
+        self.origin_req_host = origin_req_host
+        self.unverifiable = unverifiable
+
+    def __getattr__(self, attr):
+        # XXX this is a fallback mechanism to guard against these
+        # methods getting called in a non-standard order.  this may be
+        # too complicated and/or unnecessary.
+        # XXX should the __r_XXX attributes be public?
+        if attr[:12] == '_Request__r_':
+            name = attr[12:]
+            if hasattr(Request, 'get_' + name):
+                getattr(self, 'get_' + name)()
+                return getattr(self, attr)
+        raise AttributeError, attr
+
+    def get_method(self):
+        if self.has_data():
+            return "POST"
+        else:
+            return "GET"
+
+    # XXX these helper methods are lame
+
+    def add_data(self, data):
+        self.data = data
+
+    def has_data(self):
+        return self.data is not None
+
+    def get_data(self):
+        return self.data
+
+    def get_full_url(self):
+        return self.__original
+
+    def get_type(self):
+        if self.type is None:
+            self.type, self.__r_type = splittype(self.__original)
+            if self.type is None:
+                raise ValueError, "unknown url type: %s" % self.__original
+        return self.type
+
+    def get_host(self):
+        if self.host is None:
+            self.host, self.__r_host = splithost(self.__r_type)
+            if self.host:
+                self.host = unquote(self.host)
+        return self.host
+
+    def get_selector(self):
+        return self.__r_host
+
+    def set_proxy(self, host, type):
+        self.host, self.type = host, type
+        self.__r_host = self.__original
+
+    def get_origin_req_host(self):
+        return self.origin_req_host
+
+    def is_unverifiable(self):
+        return self.unverifiable
+
+    def add_header(self, key, val):
+        # useful for something like authentication
+        self.headers[key.capitalize()] = val
+
+    def add_unredirected_header(self, key, val):
+        # will not be added to a redirected request
+        self.unredirected_hdrs[key.capitalize()] = val
+
+    def has_header(self, header_name):
+        return (header_name in self.headers or
+                header_name in self.unredirected_hdrs)
+
+    def get_header(self, header_name, default=None):
+        return self.headers.get(
+            header_name,
+            self.unredirected_hdrs.get(header_name, default))
+
+    def header_items(self):
+        hdrs = self.unredirected_hdrs.copy()
+        hdrs.update(self.headers)
+        return hdrs.items()
+
+class OpenerDirector:
+    def __init__(self):
+        server_version = "Python-urllib/%s" % __version__
+        self.addheaders = [('User-agent', server_version)]
+        # manage the individual handlers
+        self.handlers = []
+        self.handle_open = {}
+        self.handle_error = {}
+        self.process_response = {}
+        self.process_request = {}
+
+    def add_handler(self, handler):
+        added = False
+        for meth in dir(handler):
+            i = meth.find("_")
+            protocol = meth[:i]
+            condition = meth[i+1:]
+
+            if condition.startswith("error"):
+                j = condition.find("_") + i + 1
+                kind = meth[j+1:]
+                try:
+                    kind = int(kind)
+                except ValueError:
+                    pass
+                lookup = self.handle_error.get(protocol, {})
+                self.handle_error[protocol] = lookup
+            elif condition == "open":
+                kind = protocol
+                lookup = getattr(self, "handle_"+condition)
+            elif condition in ["response", "request"]:
+                kind = protocol
+                lookup = getattr(self, "process_"+condition)
+            else:
+                continue
+
+            handlers = lookup.setdefault(kind, [])
+            if handlers:
+                bisect.insort(handlers, handler)
+            else:
+                handlers.append(handler)
+            added = True
+
+        if added:
+            # XXX why does self.handlers need to be sorted?
+            bisect.insort(self.handlers, handler)
+            handler.add_parent(self)
+
+    def close(self):
+        # Only exists for backwards compatibility.
+        pass
+
+    def _call_chain(self, chain, kind, meth_name, *args):
+        # XXX raise an exception if no one else should try to handle
+        # this url.  return None if you can't but someone else could.
+        handlers = chain.get(kind, ())
+        for handler in handlers:
+            func = getattr(handler, meth_name)
+
+            result = func(*args)
+            if result is not None:
+                return result
+
+    def open(self, fullurl, data=None):
+        # accept a URL or a Request object
+        if isinstance(fullurl, basestring):
+            req = Request(fullurl, data)
+        else:
+            req = fullurl
+            if data is not None:
+                req.add_data(data)
+
+        protocol = req.get_type()
+
+        # pre-process request
+        meth_name = protocol+"_request"
+        for processor in self.process_request.get(protocol, []):
+            meth = getattr(processor, meth_name)
+            req = meth(req)
+
+        response = self._open(req, data)
+
+        # post-process response
+        meth_name = protocol+"_response"
+        for processor in self.process_response.get(protocol, []):
+            meth = getattr(processor, meth_name)
+            response = meth(req, response)
+
+        return response
+
+    def _open(self, req, data=None):
+        result = self._call_chain(self.handle_open, 'default',
+                                  'default_open', req)
+        if result:
+            return result
+
+        protocol = req.get_type()
+        result = self._call_chain(self.handle_open, protocol, protocol +
+                                  '_open', req)
+        if result:
+            return result
+
+        return self._call_chain(self.handle_open, 'unknown',
+                                'unknown_open', req)
+
+    def error(self, proto, *args):
+        if proto in ['http', 'https']:
+            # XXX http[s] protocols are special-cased
+            dict = self.handle_error['http'] # https is not different than http
+            proto = args[2]  # YUCK!
+            meth_name = 'http_error_%s' % proto
+            http_err = 1
+            orig_args = args
+        else:
+            dict = self.handle_error
+            meth_name = proto + '_error'
+            http_err = 0
+        args = (dict, proto, meth_name) + args
+        result = self._call_chain(*args)
+        if result:
+            return result
+
+        if http_err:
+            args = (dict, 'default', 'http_error_default') + orig_args
+            return self._call_chain(*args)
+
+# XXX probably also want an abstract factory that knows when it makes
+# sense to skip a superclass in favor of a subclass and when it might
+# make sense to include both
+
+def build_opener(*handlers):
+    """Create an opener object from a list of handlers.
+
+    The opener will use several default handlers, including support
+    for HTTP and FTP.
+
+    If any of the handlers passed as arguments are subclasses of the
+    default handlers, the default handlers will not be used.
+    """
+
+    opener = OpenerDirector()
+    default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
+                       HTTPDefaultErrorHandler, HTTPRedirectHandler,
+                       FTPHandler, FileHandler, HTTPErrorProcessor]
+    if hasattr(httplib, 'HTTPS'):
+        default_classes.append(HTTPSHandler)
+    skip = []
+    for klass in default_classes:
+        for check in handlers:
+            if inspect.isclass(check):
+                if issubclass(check, klass):
+                    skip.append(klass)
+            elif isinstance(check, klass):
+                skip.append(klass)
+    for klass in skip:
+        default_classes.remove(klass)
+
+    for klass in default_classes:
+        opener.add_handler(klass())
+
+    for h in handlers:
+        if inspect.isclass(h):
+            h = h()
+        opener.add_handler(h)
+    return opener
+
+class BaseHandler:
+    handler_order = 500
+
+    def add_parent(self, parent):
+        self.parent = parent
+
+    def close(self):
+        # Only exists for backwards compatibility
+        pass
+
+    def __lt__(self, other):
+        if not hasattr(other, "handler_order"):
+            # Try to preserve the old behavior of having custom classes
+            # inserted after default ones (works only for custom user
+            # classes which are not aware of handler_order).
+            return True
+        return self.handler_order < other.handler_order
+
+
+class HTTPErrorProcessor(BaseHandler):
+    """Process HTTP error responses."""
+    handler_order = 1000  # after all other processing
+
+    def http_response(self, request, response):
+        code, msg, hdrs = response.code, response.msg, response.info()
+
+        if code not in (200, 206):
+            response = self.parent.error(
+                'http', request, response, code, msg, hdrs)
+
+        return response
+
+    https_response = http_response
+
+class HTTPDefaultErrorHandler(BaseHandler):
+    def http_error_default(self, req, fp, code, msg, hdrs):
+        raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
+
+class HTTPRedirectHandler(BaseHandler):
+    # maximum number of redirections to any single URL
+    # this is needed because of the state that cookies introduce
+    max_repeats = 4
+    # maximum total number of redirections (regardless of URL) before
+    # assuming we're in a loop
+    max_redirections = 10
+
+    def redirect_request(self, req, fp, code, msg, headers, newurl):
+        """Return a Request or None in response to a redirect.
+
+        This is called by the http_error_30x methods when a
+        redirection response is received.  If a redirection should
+        take place, return a new Request to allow http_error_30x to
+        perform the redirect.  Otherwise, raise HTTPError if no-one
+        else should try to handle this url.  Return None if you can't
+        but another Handler might.
+        """
+        m = req.get_method()
+        if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
+            or code in (301, 302, 303) and m == "POST"):
+            # Strictly (according to RFC 2616), 301 or 302 in response
+            # to a POST MUST NOT cause a redirection without confirmation
+            # from the user (of urllib2, in this case).  In practice,
+            # essentially all clients do redirect in this case, so we
+            # do the same.
+            return Request(newurl,
+                           headers=req.headers,
+                           origin_req_host=req.get_origin_req_host(),
+                           unverifiable=True)
+        else:
+            raise HTTPError(req.get_full_url(), code, msg, headers, fp)
+
+    # Implementation note: To avoid the server sending us into an
+    # infinite loop, the request object needs to track what URLs we
+    # have already seen.  Do this by adding a handler-specific
+    # attribute to the Request object.
+    def http_error_302(self, req, fp, code, msg, headers):
+        # Some servers (incorrectly) return multiple Location headers
+        # (so probably same goes for URI).  Use first header.
+        if 'location' in headers:
+            newurl = headers.getheaders('location')[0]
+        elif 'uri' in headers:
+            newurl = headers.getheaders('uri')[0]
+        else:
+            return
+        newurl = urlparse.urljoin(req.get_full_url(), newurl)
+
+        # XXX Probably want to forget about the state of the current
+        # request, although that might interact poorly with other
+        # handlers that also use handler-specific request attributes
+        new = self.redirect_request(req, fp, code, msg, headers, newurl)
+        if new is None:
+            return
+
+        # loop detection
+        # .redirect_dict has a key url if url was previously visited.
+        if hasattr(req, 'redirect_dict'):
+            visited = new.redirect_dict = req.redirect_dict
+            if (visited.get(newurl, 0) >= self.max_repeats or
+                len(visited) >= self.max_redirections):
+                raise HTTPError(req.get_full_url(), code,
+                                self.inf_msg + msg, headers, fp)
+        else:
+            visited = new.redirect_dict = req.redirect_dict = {}
+        visited[newurl] = visited.get(newurl, 0) + 1
+
+        # Don't close the fp until we are sure that we won't use it
+        # with HTTPError.
+        fp.read()
+        fp.close()
+
+        return self.parent.open(new)
+
+    http_error_301 = http_error_303 = http_error_307 = http_error_302
+
+    inf_msg = "The HTTP server returned a redirect error that would " \
+              "lead to an infinite loop.\n" \
+              "The last 30x error message was:\n"
+
+class ProxyHandler(BaseHandler):
+    # Proxies must be in front
+    handler_order = 100
+
+    def __init__(self, proxies=None):
+        if proxies is None:
+            proxies = getproxies()
+        assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
+        self.proxies = proxies
+        for type, url in proxies.items():
+            setattr(self, '%s_open' % type,
+                    lambda r, proxy=url, type=type, meth=self.proxy_open: \
+                    meth(r, proxy, type))
+
+    def proxy_open(self, req, proxy, type):
+        orig_type = req.get_type()
+        type, r_type = splittype(proxy)
+        host, XXX = splithost(r_type)
+        if '@' in host:
+            user_pass, host = host.split('@', 1)
+            if ':' in user_pass:
+                user, password = user_pass.split(':', 1)
+                user_pass = base64.encodestring('%s:%s' % (unquote(user),
+                                                           unquote(password)))
+                req.add_header('Proxy-authorization', 'Basic ' + user_pass)
+        host = unquote(host)
+        req.set_proxy(host, type)
+        if orig_type == type:
+            # let other handlers take care of it
+            # XXX this only makes sense if the proxy is before the
+            # other handlers
+            return None
+        else:
+            # need to start over, because the other handlers don't
+            # grok the proxy's URL type
+            return self.parent.open(req)
+
+# feature suggested by Duncan Booth
+# XXX custom is not a good name
+class CustomProxy:
+    # either pass a function to the constructor or override handle
+    def __init__(self, proto, func=None, proxy_addr=None):
+        self.proto = proto
+        self.func = func
+        self.addr = proxy_addr
+
+    def handle(self, req):
+        if self.func and self.func(req):
+            return 1
+
+    def get_proxy(self):
+        return self.addr
+
+class CustomProxyHandler(BaseHandler):
+    # Proxies must be in front
+    handler_order = 100
+
+    def __init__(self, *proxies):
+        self.proxies = {}
+
+    def proxy_open(self, req):
+        proto = req.get_type()
+        try:
+            proxies = self.proxies[proto]
+        except KeyError:
+            return None
+        for p in proxies:
+            if p.handle(req):
+                req.set_proxy(p.get_proxy())
+                return self.parent.open(req)
+        return None
+
+    def do_proxy(self, p, req):
+        return self.parent.open(req)
+
+    def add_proxy(self, cpo):
+        if cpo.proto in self.proxies:
+            self.proxies[cpo.proto].append(cpo)
+        else:
+            self.proxies[cpo.proto] = [cpo]
+
+class HTTPPasswordMgr:
+    def __init__(self):
+        self.passwd = {}
+
+    def add_password(self, realm, uri, user, passwd):
+        # uri could be a single URI or a sequence
+        if isinstance(uri, basestring):
+            uri = [uri]
+        uri = tuple(map(self.reduce_uri, uri))
+        if not realm in self.passwd:
+            self.passwd[realm] = {}
+        self.passwd[realm][uri] = (user, passwd)
+
+    def find_user_password(self, realm, authuri):
+        domains = self.passwd.get(realm, {})
+        authuri = self.reduce_uri(authuri)
+        for uris, authinfo in domains.iteritems():
+            for uri in uris:
+                if self.is_suburi(uri, authuri):
+                    return authinfo
+        return None, None
+
+    def reduce_uri(self, uri):
+        """Accept netloc or URI and extract only the netloc and path"""
+        parts = urlparse.urlparse(uri)
+        if parts[1]:
+            return parts[1], parts[2] or '/'
+        else:
+            return parts[2], '/'
+
+    def is_suburi(self, base, test):
+        """Check if test is below base in a URI tree
+
+        Both args must be URIs in reduced form.
+        """
+        if base == test:
+            return True
+        if base[0] != test[0]:
+            return False
+        common = posixpath.commonprefix((base[1], test[1]))
+        if len(common) == len(base[1]):
+            return True
+        return False
+
+
+class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
+
+    def find_user_password(self, realm, authuri):
+        user, password = HTTPPasswordMgr.find_user_password(self, realm,
+                                                            authuri)
+        if user is not None:
+            return user, password
+        return HTTPPasswordMgr.find_user_password(self, None, authuri)
+
+
+class AbstractBasicAuthHandler:
+
+    rx = re.compile('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', re.I)
+
+    # XXX there can actually be multiple auth-schemes in a
+    # www-authenticate header.  should probably be a lot more careful
+    # in parsing them to extract multiple alternatives
+
+    def __init__(self, password_mgr=None):
+        if password_mgr is None:
+            password_mgr = HTTPPasswordMgr()
+        self.passwd = password_mgr
+        self.add_password = self.passwd.add_password
+
+    def http_error_auth_reqed(self, authreq, host, req, headers):
+        # XXX could be multiple headers
+        authreq = headers.get(authreq, None)
+        if authreq:
+            mo = AbstractBasicAuthHandler.rx.search(authreq)
+            if mo:
+                scheme, realm = mo.groups()
+                if scheme.lower() == 'basic':
+                    return self.retry_http_basic_auth(host, req, realm)
+
+    def retry_http_basic_auth(self, host, req, realm):
+        user,pw = self.passwd.find_user_password(realm, host)
+        if pw is not None:
+            raw = "%s:%s" % (user, pw)
+            auth = 'Basic %s' % base64.encodestring(raw).strip()
+            if req.headers.get(self.auth_header, None) == auth:
+                return None
+            req.add_header(self.auth_header, auth)
+            return self.parent.open(req)
+        else:
+            return None
+
+class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
+
+    auth_header = 'Authorization'
+
+    def http_error_401(self, req, fp, code, msg, headers):
+        host = urlparse.urlparse(req.get_full_url())[1]
+        return self.http_error_auth_reqed('www-authenticate',
+                                          host, req, headers)
+
+
+class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
+
+    auth_header = 'Proxy-authorization'
+
+    def http_error_407(self, req, fp, code, msg, headers):
+        host = req.get_host()
+        return self.http_error_auth_reqed('proxy-authenticate',
+                                          host, req, headers)
+
+
+def randombytes(n):
+    """Return n random bytes."""
+    # Use /dev/urandom if it is available.  Fall back to random module
+    # if not.  It might be worthwhile to extend this function to use
+    # other platform-specific mechanisms for getting random bytes.
+    if os.path.exists("/dev/urandom"):
+        f = open("/dev/urandom")
+        s = f.read(n)
+        f.close()
+        return s
+    else:
+        L = [chr(random.randrange(0, 256)) for i in range(n)]
+        return "".join(L)
+
+class AbstractDigestAuthHandler:
+    # Digest authentication is specified in RFC 2617.
+
+    # XXX The client does not inspect the Authentication-Info header
+    # in a successful response.
+
+    # XXX It should be possible to test this implementation against
+    # a mock server that just generates a static set of challenges.
+
+    # XXX qop="auth-int" supports is shaky
+
+    def __init__(self, passwd=None):
+        if passwd is None:
+            passwd = HTTPPasswordMgr()
+        self.passwd = passwd
+        self.add_password = self.passwd.add_password
+        self.retried = 0
+        self.nonce_count = 0
+
+    def reset_retry_count(self):
+        self.retried = 0
+
+    def http_error_auth_reqed(self, auth_header, host, req, headers):
+        authreq = headers.get(auth_header, None)
+        if self.retried > 5:
+            # Don't fail endlessly - if we failed once, we'll probably
+            # fail a second time. Hm. Unless the Password Manager is
+            # prompting for the information. Crap. This isn't great
+            # but it's better than the current 'repeat until recursion
+            # depth exceeded' approach <wink>
+            raise HTTPError(req.get_full_url(), 401, "digest auth failed",
+                            headers, None)
+        else:
+            self.retried += 1
+        if authreq:
+            scheme = authreq.split()[0]
+            if scheme.lower() == 'digest':
+                return self.retry_http_digest_auth(req, authreq)
+            else:
+                raise ValueError("AbstractDigestAuthHandler doesn't know "
+                                 "about %s"%(scheme))
+
+    def retry_http_digest_auth(self, req, auth):
+        token, challenge = auth.split(' ', 1)
+        chal = parse_keqv_list(parse_http_list(challenge))
+        auth = self.get_authorization(req, chal)
+        if auth:
+            auth_val = 'Digest %s' % auth
+            if req.headers.get(self.auth_header, None) == auth_val:
+                return None
+            req.add_header(self.auth_header, auth_val)
+            resp = self.parent.open(req)
+            return resp
+
+    def get_cnonce(self, nonce):
+        # The cnonce-value is an opaque
+        # quoted string value provided by the client and used by both client
+        # and server to avoid chosen plaintext attacks, to provide mutual
+        # authentication, and to provide some message integrity protection.
+        # This isn't a fabulous effort, but it's probably Good Enough.
+        dig = sha.new("%s:%s:%s:%s" % (self.nonce_count, nonce, time.ctime(),
+                                       randombytes(8))).hexdigest()
+        return dig[:16]
+
+    def get_authorization(self, req, chal):
+        try:
+            realm = chal['realm']
+            nonce = chal['nonce']
+            qop = chal.get('qop')
+            algorithm = chal.get('algorithm', 'MD5')
+            # mod_digest doesn't send an opaque, even though it isn't
+            # supposed to be optional
+            opaque = chal.get('opaque', None)
+        except KeyError:
+            return None
+
+        H, KD = self.get_algorithm_impls(algorithm)
+        if H is None:
+            return None
+
+        user, pw = self.passwd.find_user_password(realm, req.get_full_url())
+        if user is None:
+            return None
+
+        # XXX not implemented yet
+        if req.has_data():
+            entdig = self.get_entity_digest(req.get_data(), chal)
+        else:
+            entdig = None
+
+        A1 = "%s:%s:%s" % (user, realm, pw)
+        A2 = "%s:%s" % (req.get_method(),
+                        # XXX selector: what about proxies and full urls
+                        req.get_selector())
+        if qop == 'auth':
+            self.nonce_count += 1
+            ncvalue = '%08x' % self.nonce_count
+            cnonce = self.get_cnonce(nonce)
+            noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
+            respdig = KD(H(A1), noncebit)
+        elif qop is None:
+            respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
+        else:
+            # XXX handle auth-int.
+            pass
+
+        # XXX should the partial digests be encoded too?
+
+        base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
+               'response="%s"' % (user, realm, nonce, req.get_selector(),
+                                  respdig)
+        if opaque:
+            base = base + ', opaque="%s"' % opaque
+        if entdig:
+            base = base + ', digest="%s"' % entdig
+        if algorithm != 'MD5':
+            base = base + ', algorithm="%s"' % algorithm
+        if qop:
+            base = base + ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
+        return base
+
+    def get_algorithm_impls(self, algorithm):
+        # lambdas assume digest modules are imported at the top level
+        if algorithm == 'MD5':
+            H = lambda x: md5.new(x).hexdigest()
+        elif algorithm == 'SHA':
+            H = lambda x: sha.new(x).hexdigest()
+        # XXX MD5-sess
+        KD = lambda s, d: H("%s:%s" % (s, d))
+        return H, KD
+
+    def get_entity_digest(self, data, chal):
+        # XXX not implemented yet
+        return None
+
+
+class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
+    """An authentication protocol defined by RFC 2069
+
+    Digest authentication improves on basic authentication because it
+    does not transmit passwords in the clear.
+    """
+
+    auth_header = 'Authorization'
+
+    def http_error_401(self, req, fp, code, msg, headers):
+        host = urlparse.urlparse(req.get_full_url())[1]
+        retry = self.http_error_auth_reqed('www-authenticate',
+                                           host, req, headers)
+        self.reset_retry_count()
+        return retry
+
+
+class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
+
+    auth_header = 'Proxy-Authorization'
+
+    def http_error_407(self, req, fp, code, msg, headers):
+        host = req.get_host()
+        retry = self.http_error_auth_reqed('proxy-authenticate',
+                                           host, req, headers)
+        self.reset_retry_count()
+        return retry
+
+class AbstractHTTPHandler(BaseHandler):
+
+    def __init__(self, debuglevel=0):
+        self._debuglevel = debuglevel
+
+    def set_http_debuglevel(self, level):
+        self._debuglevel = level
+
+    def do_request_(self, request):
+        host = request.get_host()
+        if not host:
+            raise URLError('no host given')
+
+        if request.has_data():  # POST
+            data = request.get_data()
+            if not request.has_header('Content-type'):
+                request.add_unredirected_header(
+                    'Content-type',
+                    'application/x-www-form-urlencoded')
+            if not request.has_header('Content-length'):
+                request.add_unredirected_header(
+                    'Content-length', '%d' % len(data))
+
+        scheme, sel = splittype(request.get_selector())
+        sel_host, sel_path = splithost(sel)
+        if not request.has_header('Host'):
+            request.add_unredirected_header('Host', sel_host or host)
+        for name, value in self.parent.addheaders:
+            name = name.capitalize()
+            if not request.has_header(name):
+                request.add_unredirected_header(name, value)
+
+        return request
+
+    def do_open(self, http_class, req):
+        """Return an addinfourl object for the request, using http_class.
+
+        http_class must implement the HTTPConnection API from httplib.
+        The addinfourl return value is a file-like object.  It also
+        has methods and attributes including:
+            - info(): return a mimetools.Message object for the headers
+            - geturl(): return the original request URL
+            - code: HTTP status code
+        """
+        host = req.get_host()
+        if not host:
+            raise URLError('no host given')
+
+        h = http_class(host) # will parse host:port
+        h.set_debuglevel(self._debuglevel)
+
+        headers = dict(req.headers)
+        headers.update(req.unredirected_hdrs)
+        # We want to make an HTTP/1.1 request, but the addinfourl
+        # class isn't prepared to deal with a persistent connection.
+        # It will try to read all remaining data from the socket,
+        # which will block while the server waits for the next request.
+        # So make sure the connection gets closed after the (only)
+        # request.
+        headers["Connection"] = "close"
+        try:
+            h.request(req.get_method(), req.get_selector(), req.data, headers)
+            r = h.getresponse()
+        except socket.error, err: # XXX what error?
+            raise URLError(err)
+
+        # Pick apart the HTTPResponse object to get the addinfourl
+        # object initialized properly.
+
+        # Wrap the HTTPResponse object in socket's file object adapter
+        # for Windows.  That adapter calls recv(), so delegate recv()
+        # to read().  This weird wrapping allows the returned object to
+        # have readline() and readlines() methods.
+
+        # XXX It might be better to extract the read buffering code
+        # out of socket._fileobject() and into a base class.
+
+        r.recv = r.read
+        fp = socket._fileobject(r)
+
+        resp = addinfourl(fp, r.msg, req.get_full_url())
+        resp.code = r.status
+        resp.msg = r.reason
+        return resp
+
+
+class HTTPHandler(AbstractHTTPHandler):
+
+    def http_open(self, req):
+        return self.do_open(httplib.HTTPConnection, req)
+
+    http_request = AbstractHTTPHandler.do_request_
+
+if hasattr(httplib, 'HTTPS'):
+    class HTTPSHandler(AbstractHTTPHandler):
+
+        def https_open(self, req):
+            return self.do_open(httplib.HTTPSConnection, req)
+
+        https_request = AbstractHTTPHandler.do_request_
+
+class HTTPCookieProcessor(BaseHandler):
+    def __init__(self, cookiejar=None):
+        if cookiejar is None:
+            cookiejar = cookielib.CookieJar()
+        self.cookiejar = cookiejar
+
+    def http_request(self, request):
+        self.cookiejar.add_cookie_header(request)
+        return request
+
+    def http_response(self, request, response):
+        self.cookiejar.extract_cookies(response, request)
+        return response
+
+    https_request = http_request
+    https_response = http_response
+
+class UnknownHandler(BaseHandler):
+    def unknown_open(self, req):
+        type = req.get_type()
+        raise URLError('unknown url type: %s' % type)
+
+def parse_keqv_list(l):
+    """Parse list of key=value strings where keys are not duplicated."""
+    parsed = {}
+    for elt in l:
+        k, v = elt.split('=', 1)
+        if v[0] == '"' and v[-1] == '"':
+            v = v[1:-1]
+        parsed[k] = v
+    return parsed
+
+def parse_http_list(s):
+    """Parse lists as described by RFC 2068 Section 2.
+
+    In particular, parse comma-separated lists where the elements of
+    the list may include quoted-strings.  A quoted-string could
+    contain a comma.
+    """
+    # XXX this function could probably use more testing
+
+    list = []
+    end = len(s)
+    i = 0
+    inquote = 0
+    start = 0
+    while i < end:
+        cur = s[i:]
+        c = cur.find(',')
+        q = cur.find('"')
+        if c == -1:
+            list.append(s[start:])
+            break
+        if q == -1:
+            if inquote:
+                raise ValueError, "unbalanced quotes"
+            else:
+                list.append(s[start:i+c])
+                i = i + c + 1
+                continue
+        if inquote:
+            if q < c:
+                list.append(s[start:i+c])
+                i = i + c + 1
+                start = i
+                inquote = 0
+            else:
+                i = i + q
+        else:
+            if c < q:
+                list.append(s[start:i+c])
+                i = i + c + 1
+                start = i
+            else:
+                inquote = 1
+                i = i + q + 1
+    return map(lambda x: x.strip(), list)
+
+class FileHandler(BaseHandler):
+    # Use local file or FTP depending on form of URL
+    def file_open(self, req):
+        url = req.get_selector()
+        if url[:2] == '//' and url[2:3] != '/':
+            req.type = 'ftp'
+            return self.parent.open(req)
+        else:
+            return self.open_local_file(req)
+
+    # names for the localhost
+    names = None
+    def get_names(self):
+        if FileHandler.names is None:
+            FileHandler.names = (socket.gethostbyname('localhost'),
+                                 socket.gethostbyname(socket.gethostname()))
+        return FileHandler.names
+
+    # not entirely sure what the rules are here
+    def open_local_file(self, req):
+        import email.Utils
+        host = req.get_host()
+        file = req.get_selector()
+        localfile = url2pathname(file)
+        stats = os.stat(localfile)
+        size = stats.st_size
+        modified = email.Utils.formatdate(stats.st_mtime, usegmt=True)
+        mtype = mimetypes.guess_type(file)[0]
+        headers = mimetools.Message(StringIO(
+            'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
+            (mtype or 'text/plain', size, modified)))
+        if host:
+            host, port = splitport(host)
+        if not host or \
+           (not port and socket.gethostbyname(host) in self.get_names()):
+            return addinfourl(open(localfile, 'rb'),
+                              headers, 'file:'+file)
+        raise URLError('file not on local host')
+
+class FTPHandler(BaseHandler):
+    def ftp_open(self, req):
+        host = req.get_host()
+        if not host:
+            raise IOError, ('ftp error', 'no host given')
+        host, port = splitport(host)
+        if port is None:
+            port = ftplib.FTP_PORT
+        else:
+            port = int(port)
+
+        # username/password handling
+        user, host = splituser(host)
+        if user:
+            user, passwd = splitpasswd(user)
+        else:
+            passwd = None
+        host = unquote(host)
+        user = unquote(user or '')
+        passwd = unquote(passwd or '')
+
+        try:
+            host = socket.gethostbyname(host)
+        except socket.error, msg:
+            raise URLError(msg)
+        path, attrs = splitattr(req.get_selector())
+        dirs = path.split('/')
+        dirs = map(unquote, dirs)
+        dirs, file = dirs[:-1], dirs[-1]
+        if dirs and not dirs[0]:
+            dirs = dirs[1:]
+        try:
+            fw = self.connect_ftp(user, passwd, host, port, dirs)
+            type = file and 'I' or 'D'
+            for attr in attrs:
+                attr, value = splitvalue(attr)
+                if attr.lower() == 'type' and \
+                   value in ('a', 'A', 'i', 'I', 'd', 'D'):
+                    type = value.upper()
+            fp, retrlen = fw.retrfile(file, type)
+            headers = ""
+            mtype = mimetypes.guess_type(req.get_full_url())[0]
+            if mtype:
+                headers += "Content-type: %s\n" % mtype
+            if retrlen is not None and retrlen >= 0:
+                headers += "Content-length: %d\n" % retrlen
+            sf = StringIO(headers)
+            headers = mimetools.Message(sf)
+            return addinfourl(fp, headers, req.get_full_url())
+        except ftplib.all_errors, msg:
+            raise IOError, ('ftp error', msg), sys.exc_info()[2]
+
+    def connect_ftp(self, user, passwd, host, port, dirs):
+        fw = ftpwrapper(user, passwd, host, port, dirs)
+##        fw.ftp.set_debuglevel(1)
+        return fw
+
+class CacheFTPHandler(FTPHandler):
+    # XXX would be nice to have pluggable cache strategies
+    # XXX this stuff is definitely not thread safe
+    def __init__(self):
+        self.cache = {}
+        self.timeout = {}
+        self.soonest = 0
+        self.delay = 60
+        self.max_conns = 16
+
+    def setTimeout(self, t):
+        self.delay = t
+
+    def setMaxConns(self, m):
+        self.max_conns = m
+
+    def connect_ftp(self, user, passwd, host, port, dirs):
+        key = user, host, port, '/'.join(dirs)
+        if key in self.cache:
+            self.timeout[key] = time.time() + self.delay
+        else:
+            self.cache[key] = ftpwrapper(user, passwd, host, port, dirs)
+            self.timeout[key] = time.time() + self.delay
+        self.check_cache()
+        return self.cache[key]
+
+    def check_cache(self):
+        # first check for old ones
+        t = time.time()
+        if self.soonest <= t:
+            for k, v in self.timeout.items():
+                if v < t:
+                    self.cache[k].close()
+                    del self.cache[k]
+                    del self.timeout[k]
+        self.soonest = min(self.timeout.values())
+
+        # then check the size
+        if len(self.cache) == self.max_conns:
+            for k, v in self.timeout.items():
+                if v == self.soonest:
+                    del self.cache[k]
+                    del self.timeout[k]
+                    break
+            self.soonest = min(self.timeout.values())
+
+class GopherHandler(BaseHandler):
+    def gopher_open(self, req):
+        host = req.get_host()
+        if not host:
+            raise GopherError('no host given')
+        host = unquote(host)
+        selector = req.get_selector()
+        type, selector = splitgophertype(selector)
+        selector, query = splitquery(selector)
+        selector = unquote(selector)
+        if query:
+            query = unquote(query)
+            fp = gopherlib.send_query(selector, query, host)
+        else:
+            fp = gopherlib.send_selector(selector, host)
+        return addinfourl(fp, noheaders(), req.get_full_url())
+
+#bleck! don't use this yet
+class OpenerFactory:
+
+    default_handlers = [UnknownHandler, HTTPHandler,
+                        HTTPDefaultErrorHandler, HTTPRedirectHandler,
+                        FTPHandler, FileHandler]
+    handlers = []
+    replacement_handlers = []
+
+    def add_handler(self, h):
+        self.handlers = self.handlers + [h]
+
+    def replace_handler(self, h):
+        pass
+
+    def build_opener(self):
+        opener = OpenerDirector()
+        for ph in self.default_handlers:
+            if inspect.isclass(ph):
+                ph = ph()
+            opener.add_handler(ph)
diff --git a/depot_tools/release/win/python_24/Lib/urlparse.py b/depot_tools/release/win/python_24/Lib/urlparse.py
new file mode 100644
index 0000000..7fd1633
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/urlparse.py
@@ -0,0 +1,266 @@
+"""Parse (absolute and relative) URLs.
+
+See RFC 1808: "Relative Uniform Resource Locators", by R. Fielding,
+UC Irvine, June 1995.
+"""
+
+__all__ = ["urlparse", "urlunparse", "urljoin", "urldefrag",
+           "urlsplit", "urlunsplit"]
+
+# A classification of schemes ('' means apply by default)
+uses_relative = ['ftp', 'http', 'gopher', 'nntp', 'imap',
+                               'wais', 'file', 'https', 'shttp', 'mms',
+                               'prospero', 'rtsp', 'rtspu', '']
+uses_netloc = ['ftp', 'http', 'gopher', 'nntp', 'telnet',
+                             'imap', 'wais', 'file', 'mms', 'https', 'shttp',
+                             'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '']
+non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
+                                  'telnet', 'wais', 'imap', 'snews', 'sip']
+uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
+                             'https', 'shttp', 'rtsp', 'rtspu', 'sip',
+                             'mms', '']
+uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
+                            'gopher', 'rtsp', 'rtspu', 'sip', '']
+uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
+                               'nntp', 'wais', 'https', 'shttp', 'snews',
+                               'file', 'prospero', '']
+
+# Characters valid in scheme names
+scheme_chars = ('abcdefghijklmnopqrstuvwxyz'
+                'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
+                '0123456789'
+                '+-.')
+
+MAX_CACHE_SIZE = 20
+_parse_cache = {}
+
+def clear_cache():
+    """Clear the parse cache."""
+    global _parse_cache
+    _parse_cache = {}
+
+
+def urlparse(url, scheme='', allow_fragments=1):
+    """Parse a URL into 6 components:
+    <scheme>://<netloc>/<path>;<params>?<query>#<fragment>
+    Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
+    Note that we don't break the components up in smaller bits
+    (e.g. netloc is a single string) and we don't expand % escapes."""
+    tuple = urlsplit(url, scheme, allow_fragments)
+    scheme, netloc, url, query, fragment = tuple
+    if scheme in uses_params and ';' in url:
+        url, params = _splitparams(url)
+    else:
+        params = ''
+    return scheme, netloc, url, params, query, fragment
+
+def _splitparams(url):
+    if '/'  in url:
+        i = url.find(';', url.rfind('/'))
+        if i < 0:
+            return url, ''
+    else:
+        i = url.find(';')
+    return url[:i], url[i+1:]
+
+def _splitnetloc(url, start=0):
+    for c in '/?#': # the order is important!
+        delim = url.find(c, start)
+        if delim >= 0:
+            break
+    else:
+        delim = len(url)
+    return url[start:delim], url[delim:]
+
+def urlsplit(url, scheme='', allow_fragments=1):
+    """Parse a URL into 5 components:
+    <scheme>://<netloc>/<path>?<query>#<fragment>
+    Return a 5-tuple: (scheme, netloc, path, query, fragment).
+    Note that we don't break the components up in smaller bits
+    (e.g. netloc is a single string) and we don't expand % escapes."""
+    key = url, scheme, allow_fragments
+    cached = _parse_cache.get(key, None)
+    if cached:
+        return cached
+    if len(_parse_cache) >= MAX_CACHE_SIZE: # avoid runaway growth
+        clear_cache()
+    netloc = query = fragment = ''
+    i = url.find(':')
+    if i > 0:
+        if url[:i] == 'http': # optimize the common case
+            scheme = url[:i].lower()
+            url = url[i+1:]
+            if url[:2] == '//':
+                netloc, url = _splitnetloc(url, 2)
+            if allow_fragments and '#' in url:
+                url, fragment = url.split('#', 1)
+            if '?' in url:
+                url, query = url.split('?', 1)
+            tuple = scheme, netloc, url, query, fragment
+            _parse_cache[key] = tuple
+            return tuple
+        for c in url[:i]:
+            if c not in scheme_chars:
+                break
+        else:
+            scheme, url = url[:i].lower(), url[i+1:]
+    if scheme in uses_netloc and url[:2] == '//':
+        netloc, url = _splitnetloc(url, 2)
+    if allow_fragments and scheme in uses_fragment and '#' in url:
+        url, fragment = url.split('#', 1)
+    if scheme in uses_query and '?' in url:
+        url, query = url.split('?', 1)
+    tuple = scheme, netloc, url, query, fragment
+    _parse_cache[key] = tuple
+    return tuple
+
+def urlunparse((scheme, netloc, url, params, query, fragment)):
+    """Put a parsed URL back together again.  This may result in a
+    slightly different, but equivalent URL, if the URL that was parsed
+    originally had redundant delimiters, e.g. a ? with an empty query
+    (the draft states that these are equivalent)."""
+    if params:
+        url = "%s;%s" % (url, params)
+    return urlunsplit((scheme, netloc, url, query, fragment))
+
+def urlunsplit((scheme, netloc, url, query, fragment)):
+    if netloc or (scheme and scheme in uses_netloc and url[:2] != '//'):
+        if url and url[:1] != '/': url = '/' + url
+        url = '//' + (netloc or '') + url
+    if scheme:
+        url = scheme + ':' + url
+    if query:
+        url = url + '?' + query
+    if fragment:
+        url = url + '#' + fragment
+    return url
+
+def urljoin(base, url, allow_fragments = 1):
+    """Join a base URL and a possibly relative URL to form an absolute
+    interpretation of the latter."""
+    if not base:
+        return url
+    if not url:
+        return base
+    bscheme, bnetloc, bpath, bparams, bquery, bfragment = \
+            urlparse(base, '', allow_fragments)
+    scheme, netloc, path, params, query, fragment = \
+            urlparse(url, bscheme, allow_fragments)
+    if scheme != bscheme or scheme not in uses_relative:
+        return url
+    if scheme in uses_netloc:
+        if netloc:
+            return urlunparse((scheme, netloc, path,
+                               params, query, fragment))
+        netloc = bnetloc
+    if path[:1] == '/':
+        return urlunparse((scheme, netloc, path,
+                           params, query, fragment))
+    if not (path or params or query):
+        return urlunparse((scheme, netloc, bpath,
+                           bparams, bquery, fragment))
+    segments = bpath.split('/')[:-1] + path.split('/')
+    # XXX The stuff below is bogus in various ways...
+    if segments[-1] == '.':
+        segments[-1] = ''
+    while '.' in segments:
+        segments.remove('.')
+    while 1:
+        i = 1
+        n = len(segments) - 1
+        while i < n:
+            if (segments[i] == '..'
+                and segments[i-1] not in ('', '..')):
+                del segments[i-1:i+1]
+                break
+            i = i+1
+        else:
+            break
+    if segments == ['', '..']:
+        segments[-1] = ''
+    elif len(segments) >= 2 and segments[-1] == '..':
+        segments[-2:] = ['']
+    return urlunparse((scheme, netloc, '/'.join(segments),
+                       params, query, fragment))
+
+def urldefrag(url):
+    """Removes any existing fragment from URL.
+
+    Returns a tuple of the defragmented URL and the fragment.  If
+    the URL contained no fragments, the second element is the
+    empty string.
+    """
+    if '#' in url:
+        s, n, p, a, q, frag = urlparse(url)
+        defrag = urlunparse((s, n, p, a, q, ''))
+        return defrag, frag
+    else:
+        return url, ''
+
+
+test_input = """
+      http://a/b/c/d
+
+      g:h        = <URL:g:h>
+      http:g     = <URL:http://a/b/c/g>
+      http:      = <URL:http://a/b/c/d>
+      g          = <URL:http://a/b/c/g>
+      ./g        = <URL:http://a/b/c/g>
+      g/         = <URL:http://a/b/c/g/>
+      /g         = <URL:http://a/g>
+      //g        = <URL:http://g>
+      ?y         = <URL:http://a/b/c/d?y>
+      g?y        = <URL:http://a/b/c/g?y>
+      g?y/./x    = <URL:http://a/b/c/g?y/./x>
+      .          = <URL:http://a/b/c/>
+      ./         = <URL:http://a/b/c/>
+      ..         = <URL:http://a/b/>
+      ../        = <URL:http://a/b/>
+      ../g       = <URL:http://a/b/g>
+      ../..      = <URL:http://a/>
+      ../../g    = <URL:http://a/g>
+      ../../../g = <URL:http://a/../g>
+      ./../g     = <URL:http://a/b/g>
+      ./g/.      = <URL:http://a/b/c/g/>
+      /./g       = <URL:http://a/./g>
+      g/./h      = <URL:http://a/b/c/g/h>
+      g/../h     = <URL:http://a/b/c/h>
+      http:g     = <URL:http://a/b/c/g>
+      http:      = <URL:http://a/b/c/d>
+      http:?y         = <URL:http://a/b/c/d?y>
+      http:g?y        = <URL:http://a/b/c/g?y>
+      http:g?y/./x    = <URL:http://a/b/c/g?y/./x>
+"""
+
+def test():
+    import sys
+    base = ''
+    if sys.argv[1:]:
+        fn = sys.argv[1]
+        if fn == '-':
+            fp = sys.stdin
+        else:
+            fp = open(fn)
+    else:
+        import StringIO
+        fp = StringIO.StringIO(test_input)
+    while 1:
+        line = fp.readline()
+        if not line: break
+        words = line.split()
+        if not words:
+            continue
+        url = words[0]
+        parts = urlparse(url)
+        print '%-10s : %s' % (url, parts)
+        abs = urljoin(base, url)
+        if not base:
+            base = abs
+        wrapped = '<URL:%s>' % abs
+        print '%-10s = %s' % (url, wrapped)
+        if len(words) == 3 and words[1] == '=':
+            if wrapped != words[2]:
+                print 'EXPECTED', words[2], '!!!!!!!!!!'
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/user.py b/depot_tools/release/win/python_24/Lib/user.py
new file mode 100644
index 0000000..e550e52
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/user.py
@@ -0,0 +1,45 @@
+"""Hook to allow user-specified customization code to run.
+
+As a policy, Python doesn't run user-specified code on startup of
+Python programs (interactive sessions execute the script specified in
+the PYTHONSTARTUP environment variable if it exists).
+
+However, some programs or sites may find it convenient to allow users
+to have a standard customization file, which gets run when a program
+requests it.  This module implements such a mechanism.  A program
+that wishes to use the mechanism must execute the statement
+
+    import user
+
+The user module looks for a file .pythonrc.py in the user's home
+directory and if it can be opened, execfile()s it in its own global
+namespace.  Errors during this phase are not caught; that's up to the
+program that imports the user module, if it wishes.
+
+The user's .pythonrc.py could conceivably test for sys.version if it
+wishes to do different things depending on the Python version.
+
+"""
+
+import os
+
+home = os.curdir                        # Default
+if 'HOME' in os.environ:
+    home = os.environ['HOME']
+elif os.name == 'posix':
+    home = os.path.expanduser("~/")
+elif os.name == 'nt':                   # Contributed by Jeff Bauer
+    if 'HOMEPATH' in os.environ:
+        if 'HOMEDRIVE' in os.environ:
+            home = os.environ['HOMEDRIVE'] + os.environ['HOMEPATH']
+        else:
+            home = os.environ['HOMEPATH']
+
+pythonrc = os.path.join(home, ".pythonrc.py")
+try:
+    f = open(pythonrc)
+except IOError:
+    pass
+else:
+    f.close()
+    execfile(pythonrc)
diff --git a/depot_tools/release/win/python_24/Lib/uu.py b/depot_tools/release/win/python_24/Lib/uu.py
new file mode 100644
index 0000000..f591798
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/uu.py
@@ -0,0 +1,191 @@
+#! /usr/bin/env python
+
+# Copyright 1994 by Lance Ellinghouse
+# Cathedral City, California Republic, United States of America.
+#                        All Rights Reserved
+# Permission to use, copy, modify, and distribute this software and its
+# documentation for any purpose and without fee is hereby granted,
+# provided that the above copyright notice appear in all copies and that
+# both that copyright notice and this permission notice appear in
+# supporting documentation, and that the name of Lance Ellinghouse
+# not be used in advertising or publicity pertaining to distribution
+# of the software without specific, written prior permission.
+# LANCE ELLINGHOUSE DISCLAIMS ALL WARRANTIES WITH REGARD TO
+# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+# FITNESS, IN NO EVENT SHALL LANCE ELLINGHOUSE CENTRUM BE LIABLE
+# FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+#
+# Modified by Jack Jansen, CWI, July 1995:
+# - Use binascii module to do the actual line-by-line conversion
+#   between ascii and binary. This results in a 1000-fold speedup. The C
+#   version is still 5 times faster, though.
+# - Arguments more compliant with python standard
+
+"""Implementation of the UUencode and UUdecode functions.
+
+encode(in_file, out_file [,name, mode])
+decode(in_file [, out_file, mode])
+"""
+
+import binascii
+import os
+import sys
+from types import StringType
+
+__all__ = ["Error", "encode", "decode"]
+
+class Error(Exception):
+    pass
+
+def encode(in_file, out_file, name=None, mode=None):
+    """Uuencode file"""
+    #
+    # If in_file is a pathname open it and change defaults
+    #
+    if in_file == '-':
+        in_file = sys.stdin
+    elif isinstance(in_file, StringType):
+        if name is None:
+            name = os.path.basename(in_file)
+        if mode is None:
+            try:
+                mode = os.stat(in_file).st_mode
+            except AttributeError:
+                pass
+        in_file = open(in_file, 'rb')
+    #
+    # Open out_file if it is a pathname
+    #
+    if out_file == '-':
+        out_file = sys.stdout
+    elif isinstance(out_file, StringType):
+        out_file = open(out_file, 'w')
+    #
+    # Set defaults for name and mode
+    #
+    if name is None:
+        name = '-'
+    if mode is None:
+        mode = 0666
+    #
+    # Write the data
+    #
+    out_file.write('begin %o %s\n' % ((mode&0777),name))
+    str = in_file.read(45)
+    while len(str) > 0:
+        out_file.write(binascii.b2a_uu(str))
+        str = in_file.read(45)
+    out_file.write(' \nend\n')
+
+
+def decode(in_file, out_file=None, mode=None, quiet=0):
+    """Decode uuencoded file"""
+    #
+    # Open the input file, if needed.
+    #
+    if in_file == '-':
+        in_file = sys.stdin
+    elif isinstance(in_file, StringType):
+        in_file = open(in_file)
+    #
+    # Read until a begin is encountered or we've exhausted the file
+    #
+    while 1:
+        hdr = in_file.readline()
+        if not hdr:
+            raise Error, 'No valid begin line found in input file'
+        if hdr[:5] != 'begin':
+            continue
+        hdrfields = hdr.split(" ", 2)
+        if len(hdrfields) == 3 and hdrfields[0] == 'begin':
+            try:
+                int(hdrfields[1], 8)
+                break
+            except ValueError:
+                pass
+    if out_file is None:
+        out_file = hdrfields[2].rstrip()
+        if os.path.exists(out_file):
+            raise Error, 'Cannot overwrite existing file: %s' % out_file
+    if mode is None:
+        mode = int(hdrfields[1], 8)
+    #
+    # Open the output file
+    #
+    if out_file == '-':
+        out_file = sys.stdout
+    elif isinstance(out_file, StringType):
+        fp = open(out_file, 'wb')
+        try:
+            os.path.chmod(out_file, mode)
+        except AttributeError:
+            pass
+        out_file = fp
+    #
+    # Main decoding loop
+    #
+    s = in_file.readline()
+    while s and s.strip() != 'end':
+        try:
+            data = binascii.a2b_uu(s)
+        except binascii.Error, v:
+            # Workaround for broken uuencoders by /Fredrik Lundh
+            nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3
+            data = binascii.a2b_uu(s[:nbytes])
+            if not quiet:
+                sys.stderr.write("Warning: %s\n" % str(v))
+        out_file.write(data)
+        s = in_file.readline()
+    if not s:
+        raise Error, 'Truncated input file'
+
+def test():
+    """uuencode/uudecode main program"""
+    import getopt
+
+    dopt = 0
+    topt = 0
+    input = sys.stdin
+    output = sys.stdout
+    ok = 1
+    try:
+        optlist, args = getopt.getopt(sys.argv[1:], 'dt')
+    except getopt.error:
+        ok = 0
+    if not ok or len(args) > 2:
+        print 'Usage:', sys.argv[0], '[-d] [-t] [input [output]]'
+        print ' -d: Decode (in stead of encode)'
+        print ' -t: data is text, encoded format unix-compatible text'
+        sys.exit(1)
+
+    for o, a in optlist:
+        if o == '-d': dopt = 1
+        if o == '-t': topt = 1
+
+    if len(args) > 0:
+        input = args[0]
+    if len(args) > 1:
+        output = args[1]
+
+    if dopt:
+        if topt:
+            if isinstance(output, StringType):
+                output = open(output, 'w')
+            else:
+                print sys.argv[0], ': cannot do -t to stdout'
+                sys.exit(1)
+        decode(input, output)
+    else:
+        if topt:
+            if isinstance(input, StringType):
+                input = open(input, 'r')
+            else:
+                print sys.argv[0], ': cannot do -t from stdin'
+                sys.exit(1)
+        encode(input, output)
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/warnings.py b/depot_tools/release/win/python_24/Lib/warnings.py
new file mode 100644
index 0000000..06d7685
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/warnings.py
@@ -0,0 +1,255 @@
+"""Python part of the warnings subsystem."""
+
+# Note: function level imports should *not* be used
+# in this module as it may cause import lock deadlock.
+# See bug 683658.
+import sys, types
+import linecache
+
+__all__ = ["warn", "showwarning", "formatwarning", "filterwarnings",
+           "resetwarnings"]
+
+# filters contains a sequence of filter 5-tuples
+# The components of the 5-tuple are:
+# - an action: error, ignore, always, default, module, or once
+# - a compiled regex that must match the warning message
+# - a class representing the warning category
+# - a compiled regex that must match the module that is being warned
+# - a line number for the line being warning, or 0 to mean any line
+# If either if the compiled regexs are None, match anything.
+filters = []
+defaultaction = "default"
+onceregistry = {}
+
+def warn(message, category=None, stacklevel=1):
+    """Issue a warning, or maybe ignore it or raise an exception."""
+    # Check if message is already a Warning object
+    if isinstance(message, Warning):
+        category = message.__class__
+    # Check category argument
+    if category is None:
+        category = UserWarning
+    assert issubclass(category, Warning)
+    # Get context information
+    try:
+        caller = sys._getframe(stacklevel)
+    except ValueError:
+        globals = sys.__dict__
+        lineno = 1
+    else:
+        globals = caller.f_globals
+        lineno = caller.f_lineno
+    if '__name__' in globals:
+        module = globals['__name__']
+    else:
+        module = "<string>"
+    filename = globals.get('__file__')
+    if filename:
+        fnl = filename.lower()
+        if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
+            filename = filename[:-1]
+    else:
+        if module == "__main__":
+            filename = sys.argv[0]
+        if not filename:
+            filename = module
+    registry = globals.setdefault("__warningregistry__", {})
+    warn_explicit(message, category, filename, lineno, module, registry)
+
+def warn_explicit(message, category, filename, lineno,
+                  module=None, registry=None):
+    if module is None:
+        module = filename
+        if module[-3:].lower() == ".py":
+            module = module[:-3] # XXX What about leading pathname?
+    if registry is None:
+        registry = {}
+    if isinstance(message, Warning):
+        text = str(message)
+        category = message.__class__
+    else:
+        text = message
+        message = category(message)
+    key = (text, category, lineno)
+    # Quick test for common case
+    if registry.get(key):
+        return
+    # Search the filters
+    for item in filters:
+        action, msg, cat, mod, ln = item
+        if ((msg is None or msg.match(text)) and
+            issubclass(category, cat) and
+            (mod is None or mod.match(module)) and
+            (ln == 0 or lineno == ln)):
+            break
+    else:
+        action = defaultaction
+    # Early exit actions
+    if action == "ignore":
+        registry[key] = 1
+        return
+    if action == "error":
+        raise message
+    # Other actions
+    if action == "once":
+        registry[key] = 1
+        oncekey = (text, category)
+        if onceregistry.get(oncekey):
+            return
+        onceregistry[oncekey] = 1
+    elif action == "always":
+        pass
+    elif action == "module":
+        registry[key] = 1
+        altkey = (text, category, 0)
+        if registry.get(altkey):
+            return
+        registry[altkey] = 1
+    elif action == "default":
+        registry[key] = 1
+    else:
+        # Unrecognized actions are errors
+        raise RuntimeError(
+              "Unrecognized action (%r) in warnings.filters:\n %s" %
+              (action, item))
+    # Print message and context
+    showwarning(message, category, filename, lineno)
+
+def showwarning(message, category, filename, lineno, file=None):
+    """Hook to write a warning to a file; replace if you like."""
+    if file is None:
+        file = sys.stderr
+    try:
+        file.write(formatwarning(message, category, filename, lineno))
+    except IOError:
+        pass # the file (probably stderr) is invalid - this warning gets lost.
+
+def formatwarning(message, category, filename, lineno):
+    """Function to format a warning the standard way."""
+    s =  "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
+    line = linecache.getline(filename, lineno).strip()
+    if line:
+        s = s + "  " + line + "\n"
+    return s
+
+def filterwarnings(action, message="", category=Warning, module="", lineno=0,
+                   append=0):
+    """Insert an entry into the list of warnings filters (at the front).
+
+    Use assertions to check that all arguments have the right type."""
+    import re
+    assert action in ("error", "ignore", "always", "default", "module",
+                      "once"), "invalid action: %r" % (action,)
+    assert isinstance(message, basestring), "message must be a string"
+    assert isinstance(category, types.ClassType), "category must be a class"
+    assert issubclass(category, Warning), "category must be a Warning subclass"
+    assert isinstance(module, basestring), "module must be a string"
+    assert isinstance(lineno, int) and lineno >= 0, \
+           "lineno must be an int >= 0"
+    item = (action, re.compile(message, re.I), category,
+            re.compile(module), lineno)
+    if append:
+        filters.append(item)
+    else:
+        filters.insert(0, item)
+
+def simplefilter(action, category=Warning, lineno=0, append=0):
+    """Insert a simple entry into the list of warnings filters (at the front).
+
+    A simple filter matches all modules and messages.
+    """
+    assert action in ("error", "ignore", "always", "default", "module",
+                      "once"), "invalid action: %r" % (action,)
+    assert isinstance(lineno, int) and lineno >= 0, \
+           "lineno must be an int >= 0"
+    item = (action, None, category, None, lineno)
+    if append:
+        filters.append(item)
+    else:
+        filters.insert(0, item)
+
+def resetwarnings():
+    """Clear the list of warning filters, so that no filters are active."""
+    filters[:] = []
+
+class _OptionError(Exception):
+    """Exception used by option processing helpers."""
+    pass
+
+# Helper to process -W options passed via sys.warnoptions
+def _processoptions(args):
+    for arg in args:
+        try:
+            _setoption(arg)
+        except _OptionError, msg:
+            print >>sys.stderr, "Invalid -W option ignored:", msg
+
+# Helper for _processoptions()
+def _setoption(arg):
+    import re
+    parts = arg.split(':')
+    if len(parts) > 5:
+        raise _OptionError("too many fields (max 5): %r" % (arg,))
+    while len(parts) < 5:
+        parts.append('')
+    action, message, category, module, lineno = [s.strip()
+                                                 for s in parts]
+    action = _getaction(action)
+    message = re.escape(message)
+    category = _getcategory(category)
+    module = re.escape(module)
+    if module:
+        module = module + '$'
+    if lineno:
+        try:
+            lineno = int(lineno)
+            if lineno < 0:
+                raise ValueError
+        except (ValueError, OverflowError):
+            raise _OptionError("invalid lineno %r" % (lineno,))
+    else:
+        lineno = 0
+    filterwarnings(action, message, category, module, lineno)
+
+# Helper for _setoption()
+def _getaction(action):
+    if not action:
+        return "default"
+    if action == "all": return "always" # Alias
+    for a in ['default', 'always', 'ignore', 'module', 'once', 'error']:
+        if a.startswith(action):
+            return a
+    raise _OptionError("invalid action: %r" % (action,))
+
+# Helper for _setoption()
+def _getcategory(category):
+    import re
+    if not category:
+        return Warning
+    if re.match("^[a-zA-Z0-9_]+$", category):
+        try:
+            cat = eval(category)
+        except NameError:
+            raise _OptionError("unknown warning category: %r" % (category,))
+    else:
+        i = category.rfind(".")
+        module = category[:i]
+        klass = category[i+1:]
+        try:
+            m = __import__(module, None, None, [klass])
+        except ImportError:
+            raise _OptionError("invalid module name: %r" % (module,))
+        try:
+            cat = getattr(m, klass)
+        except AttributeError:
+            raise _OptionError("unknown warning category: %r" % (category,))
+    if (not isinstance(cat, types.ClassType) or
+        not issubclass(cat, Warning)):
+        raise _OptionError("invalid warning category: %r" % (category,))
+    return cat
+
+# Module initialization
+_processoptions(sys.warnoptions)
+# XXX OverflowWarning should go away for Python 2.5.
+simplefilter("ignore", category=OverflowWarning, append=1)
+simplefilter("ignore", category=PendingDeprecationWarning, append=1)
diff --git a/depot_tools/release/win/python_24/Lib/wave.py b/depot_tools/release/win/python_24/Lib/wave.py
new file mode 100644
index 0000000..08c51ba9
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/wave.py
@@ -0,0 +1,489 @@
+"""Stuff to parse WAVE files.
+
+Usage.
+
+Reading WAVE files:
+      f = wave.open(file, 'r')
+where file is either the name of a file or an open file pointer.
+The open file pointer must have methods read(), seek(), and close().
+When the setpos() and rewind() methods are not used, the seek()
+method is not  necessary.
+
+This returns an instance of a class with the following public methods:
+      getnchannels()  -- returns number of audio channels (1 for
+                         mono, 2 for stereo)
+      getsampwidth()  -- returns sample width in bytes
+      getframerate()  -- returns sampling frequency
+      getnframes()    -- returns number of audio frames
+      getcomptype()   -- returns compression type ('NONE' for linear samples)
+      getcompname()   -- returns human-readable version of
+                         compression type ('not compressed' linear samples)
+      getparams()     -- returns a tuple consisting of all of the
+                         above in the above order
+      getmarkers()    -- returns None (for compatibility with the
+                         aifc module)
+      getmark(id)     -- raises an error since the mark does not
+                         exist (for compatibility with the aifc module)
+      readframes(n)   -- returns at most n frames of audio
+      rewind()        -- rewind to the beginning of the audio stream
+      setpos(pos)     -- seek to the specified position
+      tell()          -- return the current position
+      close()         -- close the instance (make it unusable)
+The position returned by tell() and the position given to setpos()
+are compatible and have nothing to do with the actual position in the
+file.
+The close() method is called automatically when the class instance
+is destroyed.
+
+Writing WAVE files:
+      f = wave.open(file, 'w')
+where file is either the name of a file or an open file pointer.
+The open file pointer must have methods write(), tell(), seek(), and
+close().
+
+This returns an instance of a class with the following public methods:
+      setnchannels(n) -- set the number of channels
+      setsampwidth(n) -- set the sample width
+      setframerate(n) -- set the frame rate
+      setnframes(n)   -- set the number of frames
+      setcomptype(type, name)
+                      -- set the compression type and the
+                         human-readable compression type
+      setparams(tuple)
+                      -- set all parameters at once
+      tell()          -- return current position in output file
+      writeframesraw(data)
+                      -- write audio frames without pathing up the
+                         file header
+      writeframes(data)
+                      -- write audio frames and patch up the file header
+      close()         -- patch up the file header and close the
+                         output file
+You should set the parameters before the first writeframesraw or
+writeframes.  The total number of frames does not need to be set,
+but when it is set to the correct value, the header does not have to
+be patched up.
+It is best to first set all parameters, perhaps possibly the
+compression type, and then write audio frames using writeframesraw.
+When all frames have been written, either call writeframes('') or
+close() to patch up the sizes in the header.
+The close() method is called automatically when the class instance
+is destroyed.
+"""
+
+import __builtin__
+
+__all__ = ["open", "openfp", "Error"]
+
+class Error(Exception):
+    pass
+
+WAVE_FORMAT_PCM = 0x0001
+
+_array_fmts = None, 'b', 'h', None, 'l'
+
+# Determine endian-ness
+import struct
+if struct.pack("h", 1) == "\000\001":
+    big_endian = 1
+else:
+    big_endian = 0
+
+from chunk import Chunk
+
+class Wave_read:
+    """Variables used in this class:
+
+    These variables are available to the user though appropriate
+    methods of this class:
+    _file -- the open file with methods read(), close(), and seek()
+              set through the __init__() method
+    _nchannels -- the number of audio channels
+              available through the getnchannels() method
+    _nframes -- the number of audio frames
+              available through the getnframes() method
+    _sampwidth -- the number of bytes per audio sample
+              available through the getsampwidth() method
+    _framerate -- the sampling frequency
+              available through the getframerate() method
+    _comptype -- the AIFF-C compression type ('NONE' if AIFF)
+              available through the getcomptype() method
+    _compname -- the human-readable AIFF-C compression type
+              available through the getcomptype() method
+    _soundpos -- the position in the audio stream
+              available through the tell() method, set through the
+              setpos() method
+
+    These variables are used internally only:
+    _fmt_chunk_read -- 1 iff the FMT chunk has been read
+    _data_seek_needed -- 1 iff positioned correctly in audio
+              file for readframes()
+    _data_chunk -- instantiation of a chunk class for the DATA chunk
+    _framesize -- size of one frame in the file
+    """
+
+    def initfp(self, file):
+        self._convert = None
+        self._soundpos = 0
+        self._file = Chunk(file, bigendian = 0)
+        if self._file.getname() != 'RIFF':
+            raise Error, 'file does not start with RIFF id'
+        if self._file.read(4) != 'WAVE':
+            raise Error, 'not a WAVE file'
+        self._fmt_chunk_read = 0
+        self._data_chunk = None
+        while 1:
+            self._data_seek_needed = 1
+            try:
+                chunk = Chunk(self._file, bigendian = 0)
+            except EOFError:
+                break
+            chunkname = chunk.getname()
+            if chunkname == 'fmt ':
+                self._read_fmt_chunk(chunk)
+                self._fmt_chunk_read = 1
+            elif chunkname == 'data':
+                if not self._fmt_chunk_read:
+                    raise Error, 'data chunk before fmt chunk'
+                self._data_chunk = chunk
+                self._nframes = chunk.chunksize // self._framesize
+                self._data_seek_needed = 0
+                break
+            chunk.skip()
+        if not self._fmt_chunk_read or not self._data_chunk:
+            raise Error, 'fmt chunk and/or data chunk missing'
+
+    def __init__(self, f):
+        self._i_opened_the_file = None
+        if isinstance(f, basestring):
+            f = __builtin__.open(f, 'rb')
+            self._i_opened_the_file = f
+        # else, assume it is an open file object already
+        self.initfp(f)
+
+    def __del__(self):
+        self.close()
+    #
+    # User visible methods.
+    #
+    def getfp(self):
+        return self._file
+
+    def rewind(self):
+        self._data_seek_needed = 1
+        self._soundpos = 0
+
+    def close(self):
+        if self._i_opened_the_file:
+            self._i_opened_the_file.close()
+            self._i_opened_the_file = None
+        self._file = None
+
+    def tell(self):
+        return self._soundpos
+
+    def getnchannels(self):
+        return self._nchannels
+
+    def getnframes(self):
+        return self._nframes
+
+    def getsampwidth(self):
+        return self._sampwidth
+
+    def getframerate(self):
+        return self._framerate
+
+    def getcomptype(self):
+        return self._comptype
+
+    def getcompname(self):
+        return self._compname
+
+    def getparams(self):
+        return self.getnchannels(), self.getsampwidth(), \
+               self.getframerate(), self.getnframes(), \
+               self.getcomptype(), self.getcompname()
+
+    def getmarkers(self):
+        return None
+
+    def getmark(self, id):
+        raise Error, 'no marks'
+
+    def setpos(self, pos):
+        if pos < 0 or pos > self._nframes:
+            raise Error, 'position not in range'
+        self._soundpos = pos
+        self._data_seek_needed = 1
+
+    def readframes(self, nframes):
+        if self._data_seek_needed:
+            self._data_chunk.seek(0, 0)
+            pos = self._soundpos * self._framesize
+            if pos:
+                self._data_chunk.seek(pos, 0)
+            self._data_seek_needed = 0
+        if nframes == 0:
+            return ''
+        if self._sampwidth > 1 and big_endian:
+            # unfortunately the fromfile() method does not take
+            # something that only looks like a file object, so
+            # we have to reach into the innards of the chunk object
+            import array
+            chunk = self._data_chunk
+            data = array.array(_array_fmts[self._sampwidth])
+            nitems = nframes * self._nchannels
+            if nitems * self._sampwidth > chunk.chunksize - chunk.size_read:
+                nitems = (chunk.chunksize - chunk.size_read) / self._sampwidth
+            data.fromfile(chunk.file.file, nitems)
+            # "tell" data chunk how much was read
+            chunk.size_read = chunk.size_read + nitems * self._sampwidth
+            # do the same for the outermost chunk
+            chunk = chunk.file
+            chunk.size_read = chunk.size_read + nitems * self._sampwidth
+            data.byteswap()
+            data = data.tostring()
+        else:
+            data = self._data_chunk.read(nframes * self._framesize)
+        if self._convert and data:
+            data = self._convert(data)
+        self._soundpos = self._soundpos + len(data) // (self._nchannels * self._sampwidth)
+        return data
+
+    #
+    # Internal methods.
+    #
+
+    def _read_fmt_chunk(self, chunk):
+        wFormatTag, self._nchannels, self._framerate, dwAvgBytesPerSec, wBlockAlign = struct.unpack('<hhllh', chunk.read(14))
+        if wFormatTag == WAVE_FORMAT_PCM:
+            sampwidth = struct.unpack('<h', chunk.read(2))[0]
+            self._sampwidth = (sampwidth + 7) // 8
+        else:
+            raise Error, 'unknown format: %r' % (wFormatTag,)
+        self._framesize = self._nchannels * self._sampwidth
+        self._comptype = 'NONE'
+        self._compname = 'not compressed'
+
+class Wave_write:
+    """Variables used in this class:
+
+    These variables are user settable through appropriate methods
+    of this class:
+    _file -- the open file with methods write(), close(), tell(), seek()
+              set through the __init__() method
+    _comptype -- the AIFF-C compression type ('NONE' in AIFF)
+              set through the setcomptype() or setparams() method
+    _compname -- the human-readable AIFF-C compression type
+              set through the setcomptype() or setparams() method
+    _nchannels -- the number of audio channels
+              set through the setnchannels() or setparams() method
+    _sampwidth -- the number of bytes per audio sample
+              set through the setsampwidth() or setparams() method
+    _framerate -- the sampling frequency
+              set through the setframerate() or setparams() method
+    _nframes -- the number of audio frames written to the header
+              set through the setnframes() or setparams() method
+
+    These variables are used internally only:
+    _datalength -- the size of the audio samples written to the header
+    _nframeswritten -- the number of frames actually written
+    _datawritten -- the size of the audio samples actually written
+    """
+
+    def __init__(self, f):
+        self._i_opened_the_file = None
+        if isinstance(f, basestring):
+            f = __builtin__.open(f, 'wb')
+            self._i_opened_the_file = f
+        self.initfp(f)
+
+    def initfp(self, file):
+        self._file = file
+        self._convert = None
+        self._nchannels = 0
+        self._sampwidth = 0
+        self._framerate = 0
+        self._nframes = 0
+        self._nframeswritten = 0
+        self._datawritten = 0
+        self._datalength = 0
+
+    def __del__(self):
+        self.close()
+
+    #
+    # User visible methods.
+    #
+    def setnchannels(self, nchannels):
+        if self._datawritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if nchannels < 1:
+            raise Error, 'bad # of channels'
+        self._nchannels = nchannels
+
+    def getnchannels(self):
+        if not self._nchannels:
+            raise Error, 'number of channels not set'
+        return self._nchannels
+
+    def setsampwidth(self, sampwidth):
+        if self._datawritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if sampwidth < 1 or sampwidth > 4:
+            raise Error, 'bad sample width'
+        self._sampwidth = sampwidth
+
+    def getsampwidth(self):
+        if not self._sampwidth:
+            raise Error, 'sample width not set'
+        return self._sampwidth
+
+    def setframerate(self, framerate):
+        if self._datawritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if framerate <= 0:
+            raise Error, 'bad frame rate'
+        self._framerate = framerate
+
+    def getframerate(self):
+        if not self._framerate:
+            raise Error, 'frame rate not set'
+        return self._framerate
+
+    def setnframes(self, nframes):
+        if self._datawritten:
+            raise Error, 'cannot change parameters after starting to write'
+        self._nframes = nframes
+
+    def getnframes(self):
+        return self._nframeswritten
+
+    def setcomptype(self, comptype, compname):
+        if self._datawritten:
+            raise Error, 'cannot change parameters after starting to write'
+        if comptype not in ('NONE',):
+            raise Error, 'unsupported compression type'
+        self._comptype = comptype
+        self._compname = compname
+
+    def getcomptype(self):
+        return self._comptype
+
+    def getcompname(self):
+        return self._compname
+
+    def setparams(self, (nchannels, sampwidth, framerate, nframes, comptype, compname)):
+        if self._datawritten:
+            raise Error, 'cannot change parameters after starting to write'
+        self.setnchannels(nchannels)
+        self.setsampwidth(sampwidth)
+        self.setframerate(framerate)
+        self.setnframes(nframes)
+        self.setcomptype(comptype, compname)
+
+    def getparams(self):
+        if not self._nchannels or not self._sampwidth or not self._framerate:
+            raise Error, 'not all parameters set'
+        return self._nchannels, self._sampwidth, self._framerate, \
+              self._nframes, self._comptype, self._compname
+
+    def setmark(self, id, pos, name):
+        raise Error, 'setmark() not supported'
+
+    def getmark(self, id):
+        raise Error, 'no marks'
+
+    def getmarkers(self):
+        return None
+
+    def tell(self):
+        return self._nframeswritten
+
+    def writeframesraw(self, data):
+        self._ensure_header_written(len(data))
+        nframes = len(data) // (self._sampwidth * self._nchannels)
+        if self._convert:
+            data = self._convert(data)
+        if self._sampwidth > 1 and big_endian:
+            import array
+            data = array.array(_array_fmts[self._sampwidth], data)
+            data.byteswap()
+            data.tofile(self._file)
+            self._datawritten = self._datawritten + len(data) * self._sampwidth
+        else:
+            self._file.write(data)
+            self._datawritten = self._datawritten + len(data)
+        self._nframeswritten = self._nframeswritten + nframes
+
+    def writeframes(self, data):
+        self.writeframesraw(data)
+        if self._datalength != self._datawritten:
+            self._patchheader()
+
+    def close(self):
+        if self._file:
+            self._ensure_header_written(0)
+            if self._datalength != self._datawritten:
+                self._patchheader()
+            self._file.flush()
+            self._file = None
+        if self._i_opened_the_file:
+            self._i_opened_the_file.close()
+            self._i_opened_the_file = None
+
+    #
+    # Internal methods.
+    #
+
+    def _ensure_header_written(self, datasize):
+        if not self._datawritten:
+            if not self._nchannels:
+                raise Error, '# channels not specified'
+            if not self._sampwidth:
+                raise Error, 'sample width not specified'
+            if not self._framerate:
+                raise Error, 'sampling rate not specified'
+            self._write_header(datasize)
+
+    def _write_header(self, initlength):
+        self._file.write('RIFF')
+        if not self._nframes:
+            self._nframes = initlength / (self._nchannels * self._sampwidth)
+        self._datalength = self._nframes * self._nchannels * self._sampwidth
+        self._form_length_pos = self._file.tell()
+        self._file.write(struct.pack('<l4s4slhhllhh4s',
+            36 + self._datalength, 'WAVE', 'fmt ', 16,
+            WAVE_FORMAT_PCM, self._nchannels, self._framerate,
+            self._nchannels * self._framerate * self._sampwidth,
+            self._nchannels * self._sampwidth,
+            self._sampwidth * 8, 'data'))
+        self._data_length_pos = self._file.tell()
+        self._file.write(struct.pack('<l', self._datalength))
+
+    def _patchheader(self):
+        if self._datawritten == self._datalength:
+            return
+        curpos = self._file.tell()
+        self._file.seek(self._form_length_pos, 0)
+        self._file.write(struct.pack('<l', 36 + self._datawritten))
+        self._file.seek(self._data_length_pos, 0)
+        self._file.write(struct.pack('<l', self._datawritten))
+        self._file.seek(curpos, 0)
+        self._datalength = self._datawritten
+
+def open(f, mode=None):
+    if mode is None:
+        if hasattr(f, 'mode'):
+            mode = f.mode
+        else:
+            mode = 'rb'
+    if mode in ('r', 'rb'):
+        return Wave_read(f)
+    elif mode in ('w', 'wb'):
+        return Wave_write(f)
+    else:
+        raise Error, "mode must be 'r', 'rb', 'w', or 'wb'"
+
+openfp = open # B/W compatibility
diff --git a/depot_tools/release/win/python_24/Lib/weakref.py b/depot_tools/release/win/python_24/Lib/weakref.py
new file mode 100644
index 0000000..c0669b03
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/weakref.py
@@ -0,0 +1,307 @@
+"""Weak reference support for Python.
+
+This module is an implementation of PEP 205:
+
+http://python.sourceforge.net/peps/pep-0205.html
+"""
+
+# Naming convention: Variables named "wr" are weak reference objects;
+# they are called this instead of "ref" to avoid name collisions with
+# the module-global ref() function imported from _weakref.
+
+import UserDict
+
+from _weakref import (
+     getweakrefcount,
+     getweakrefs,
+     ref,
+     proxy,
+     CallableProxyType,
+     ProxyType,
+     ReferenceType)
+
+from exceptions import ReferenceError
+
+
+ProxyTypes = (ProxyType, CallableProxyType)
+
+__all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs",
+           "WeakKeyDictionary", "ReferenceType", "ProxyType",
+           "CallableProxyType", "ProxyTypes", "WeakValueDictionary"]
+
+
+class WeakValueDictionary(UserDict.UserDict):
+    """Mapping class that references values weakly.
+
+    Entries in the dictionary will be discarded when no strong
+    reference to the value exists anymore
+    """
+    # We inherit the constructor without worrying about the input
+    # dictionary; since it uses our .update() method, we get the right
+    # checks (if the other dictionary is a WeakValueDictionary,
+    # objects are unwrapped on the way out, and we always wrap on the
+    # way in).
+
+    def __init__(self, *args, **kw):
+        UserDict.UserDict.__init__(self, *args, **kw)
+        def remove(wr, selfref=ref(self)):
+            self = selfref()
+            if self is not None:
+                del self.data[wr.key]
+        self._remove = remove
+
+    def __getitem__(self, key):
+        o = self.data[key]()
+        if o is None:
+            raise KeyError, key
+        else:
+            return o
+
+    def __contains__(self, key):
+        try:
+            o = self.data[key]()
+        except KeyError:
+            return False
+        return o is not None
+
+    def has_key(self, key):
+        try:
+            o = self.data[key]()
+        except KeyError:
+            return False
+        return o is not None
+
+    def __repr__(self):
+        return "<WeakValueDictionary at %s>" % id(self)
+
+    def __setitem__(self, key, value):
+        self.data[key] = KeyedRef(value, self._remove, key)
+
+    def copy(self):
+        new = WeakValueDictionary()
+        for key, wr in self.data.items():
+            o = wr()
+            if o is not None:
+                new[key] = o
+        return new
+
+    def get(self, key, default=None):
+        try:
+            wr = self.data[key]
+        except KeyError:
+            return default
+        else:
+            o = wr()
+            if o is None:
+                # This should only happen
+                return default
+            else:
+                return o
+
+    def items(self):
+        L = []
+        for key, wr in self.data.items():
+            o = wr()
+            if o is not None:
+                L.append((key, o))
+        return L
+
+    def iteritems(self):
+        for wr in self.data.itervalues():
+            value = wr()
+            if value is not None:
+                yield wr.key, value
+
+    def iterkeys(self):
+        return self.data.iterkeys()
+
+    def __iter__(self):
+        return self.data.iterkeys()
+
+    def itervalues(self):
+        for wr in self.data.itervalues():
+            obj = wr()
+            if obj is not None:
+                yield obj
+
+    def popitem(self):
+        while 1:
+            key, wr = self.data.popitem()
+            o = wr()
+            if o is not None:
+                return key, o
+
+    def pop(self, key, *args):
+        try:
+            o = self.data.pop(key)()
+        except KeyError:
+            if args:
+                return args[0]
+            raise
+        if o is None:
+            raise KeyError, key
+        else:
+            return o
+
+    def setdefault(self, key, default=None):
+        try:
+            wr = self.data[key]
+        except KeyError:
+            self.data[key] = KeyedRef(default, self._remove, key)
+            return default
+        else:
+            return wr()
+
+    def update(self, dict=None, **kwargs):
+        d = self.data
+        if dict is not None:
+            if not hasattr(dict, "items"):
+                dict = type({})(dict)
+            for key, o in dict.items():
+                d[key] = KeyedRef(o, self._remove, key)
+        if len(kwargs):
+            self.update(kwargs)
+
+    def values(self):
+        L = []
+        for wr in self.data.values():
+            o = wr()
+            if o is not None:
+                L.append(o)
+        return L
+
+
+class KeyedRef(ref):
+    """Specialized reference that includes a key corresponding to the value.
+
+    This is used in the WeakValueDictionary to avoid having to create
+    a function object for each key stored in the mapping.  A shared
+    callback object can use the 'key' attribute of a KeyedRef instead
+    of getting a reference to the key from an enclosing scope.
+
+    """
+
+    __slots__ = "key",
+
+    def __new__(type, ob, callback, key):
+        self = ref.__new__(type, ob, callback)
+        self.key = key
+        return self
+
+    def __init__(self, ob, callback, key):
+        super(KeyedRef,  self).__init__(ob, callback)
+
+
+class WeakKeyDictionary(UserDict.UserDict):
+    """ Mapping class that references keys weakly.
+
+    Entries in the dictionary will be discarded when there is no
+    longer a strong reference to the key. This can be used to
+    associate additional data with an object owned by other parts of
+    an application without adding attributes to those objects. This
+    can be especially useful with objects that override attribute
+    accesses.
+    """
+
+    def __init__(self, dict=None):
+        self.data = {}
+        def remove(k, selfref=ref(self)):
+            self = selfref()
+            if self is not None:
+                del self.data[k]
+        self._remove = remove
+        if dict is not None: self.update(dict)
+
+    def __delitem__(self, key):
+        del self.data[ref(key)]
+
+    def __getitem__(self, key):
+        return self.data[ref(key)]
+
+    def __repr__(self):
+        return "<WeakKeyDictionary at %s>" % id(self)
+
+    def __setitem__(self, key, value):
+        self.data[ref(key, self._remove)] = value
+
+    def copy(self):
+        new = WeakKeyDictionary()
+        for key, value in self.data.items():
+            o = key()
+            if o is not None:
+                new[o] = value
+        return new
+
+    def get(self, key, default=None):
+        return self.data.get(ref(key),default)
+
+    def has_key(self, key):
+        try:
+            wr = ref(key)
+        except TypeError:
+            return 0
+        return wr in self.data
+
+    def __contains__(self, key):
+        try:
+            wr = ref(key)
+        except TypeError:
+            return 0
+        return wr in self.data
+
+    def items(self):
+        L = []
+        for key, value in self.data.items():
+            o = key()
+            if o is not None:
+                L.append((o, value))
+        return L
+
+    def iteritems(self):
+        for wr, value in self.data.iteritems():
+            key = wr()
+            if key is not None:
+                yield key, value
+
+    def iterkeys(self):
+        for wr in self.data.iterkeys():
+            obj = wr()
+            if obj is not None:
+                yield obj
+
+    def __iter__(self):
+        return self.iterkeys()
+
+    def itervalues(self):
+        return self.data.itervalues()
+
+    def keys(self):
+        L = []
+        for wr in self.data.keys():
+            o = wr()
+            if o is not None:
+                L.append(o)
+        return L
+
+    def popitem(self):
+        while 1:
+            key, value = self.data.popitem()
+            o = key()
+            if o is not None:
+                return o, value
+
+    def pop(self, key, *args):
+        return self.data.pop(ref(key), *args)
+
+    def setdefault(self, key, default=None):
+        return self.data.setdefault(ref(key, self._remove),default)
+
+    def update(self, dict=None, **kwargs):
+        d = self.data
+        if dict is not None:
+            if not hasattr(dict, "items"):
+                dict = type({})(dict)
+            for key, value in dict.items():
+                d[ref(key, self._remove)] = value
+        if len(kwargs):
+            self.update(kwargs)
diff --git a/depot_tools/release/win/python_24/Lib/webbrowser.py b/depot_tools/release/win/python_24/Lib/webbrowser.py
new file mode 100644
index 0000000..4750fe25
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/webbrowser.py
@@ -0,0 +1,370 @@
+"""Interfaces for launching and remotely controlling Web browsers."""
+
+import os
+import sys
+
+__all__ = ["Error", "open", "get", "register"]
+
+class Error(Exception):
+    pass
+
+_browsers = {}          # Dictionary of available browser controllers
+_tryorder = []          # Preference order of available browsers
+
+def register(name, klass, instance=None):
+    """Register a browser connector and, optionally, connection."""
+    _browsers[name.lower()] = [klass, instance]
+
+def get(using=None):
+    """Return a browser launcher instance appropriate for the environment."""
+    if using is not None:
+        alternatives = [using]
+    else:
+        alternatives = _tryorder
+    for browser in alternatives:
+        if '%s' in browser:
+            # User gave us a command line, don't mess with it.
+            return GenericBrowser(browser)
+        else:
+            # User gave us a browser name.
+            try:
+                command = _browsers[browser.lower()]
+            except KeyError:
+                command = _synthesize(browser)
+            if command[1] is None:
+                return command[0]()
+            else:
+                return command[1]
+    raise Error("could not locate runnable browser")
+
+# Please note: the following definition hides a builtin function.
+
+def open(url, new=0, autoraise=1):
+    get().open(url, new, autoraise)
+
+def open_new(url):
+    get().open(url, 1)
+
+
+def _synthesize(browser):
+    """Attempt to synthesize a controller base on existing controllers.
+
+    This is useful to create a controller when a user specifies a path to
+    an entry in the BROWSER environment variable -- we can copy a general
+    controller to operate using a specific installation of the desired
+    browser in this way.
+
+    If we can't create a controller in this way, or if there is no
+    executable for the requested browser, return [None, None].
+
+    """
+    if not os.path.exists(browser):
+        return [None, None]
+    name = os.path.basename(browser)
+    try:
+        command = _browsers[name.lower()]
+    except KeyError:
+        return [None, None]
+    # now attempt to clone to fit the new name:
+    controller = command[1]
+    if controller and name.lower() == controller.basename:
+        import copy
+        controller = copy.copy(controller)
+        controller.name = browser
+        controller.basename = os.path.basename(browser)
+        register(browser, None, controller)
+        return [None, controller]
+    return [None, None]
+
+
+def _iscommand(cmd):
+    """Return True if cmd can be found on the executable search path."""
+    path = os.environ.get("PATH")
+    if not path:
+        return False
+    for d in path.split(os.pathsep):
+        exe = os.path.join(d, cmd)
+        if os.path.isfile(exe):
+            return True
+    return False
+
+
+PROCESS_CREATION_DELAY = 4
+
+
+class GenericBrowser:
+    def __init__(self, cmd):
+        self.name, self.args = cmd.split(None, 1)
+        self.basename = os.path.basename(self.name)
+
+    def open(self, url, new=0, autoraise=1):
+        assert "'" not in url
+        command = "%s %s" % (self.name, self.args)
+        os.system(command % url)
+
+    def open_new(self, url):
+        self.open(url)
+
+
+class Netscape:
+    "Launcher class for Netscape browsers."
+    def __init__(self, name):
+        self.name = name
+        self.basename = os.path.basename(name)
+
+    def _remote(self, action, autoraise):
+        raise_opt = ("-noraise", "-raise")[autoraise]
+        cmd = "%s %s -remote '%s' >/dev/null 2>&1" % (self.name,
+                                                      raise_opt,
+                                                      action)
+        rc = os.system(cmd)
+        if rc:
+            import time
+            os.system("%s &" % self.name)
+            time.sleep(PROCESS_CREATION_DELAY)
+            rc = os.system(cmd)
+        return not rc
+
+    def open(self, url, new=0, autoraise=1):
+        if new:
+            self._remote("openURL(%s, new-window)"%url, autoraise)
+        else:
+            self._remote("openURL(%s)" % url, autoraise)
+
+    def open_new(self, url):
+        self.open(url, 1)
+
+
+class Galeon:
+    """Launcher class for Galeon browsers."""
+    def __init__(self, name):
+        self.name = name
+        self.basename = os.path.basename(name)
+
+    def _remote(self, action, autoraise):
+        raise_opt = ("--noraise", "")[autoraise]
+        cmd = "%s %s %s >/dev/null 2>&1" % (self.name, raise_opt, action)
+        rc = os.system(cmd)
+        if rc:
+            import time
+            os.system("%s >/dev/null 2>&1 &" % self.name)
+            time.sleep(PROCESS_CREATION_DELAY)
+            rc = os.system(cmd)
+        return not rc
+
+    def open(self, url, new=0, autoraise=1):
+        if new:
+            self._remote("-w '%s'" % url, autoraise)
+        else:
+            self._remote("-n '%s'" % url, autoraise)
+
+    def open_new(self, url):
+        self.open(url, 1)
+
+
+class Konqueror:
+    """Controller for the KDE File Manager (kfm, or Konqueror).
+
+    See http://developer.kde.org/documentation/other/kfmclient.html
+    for more information on the Konqueror remote-control interface.
+
+    """
+    def __init__(self):
+        if _iscommand("konqueror"):
+            self.name = self.basename = "konqueror"
+        else:
+            self.name = self.basename = "kfm"
+
+    def _remote(self, action):
+        cmd = "kfmclient %s >/dev/null 2>&1" % action
+        rc = os.system(cmd)
+        if rc:
+            import time
+            if self.basename == "konqueror":
+                os.system(self.name + " --silent &")
+            else:
+                os.system(self.name + " -d &")
+            time.sleep(PROCESS_CREATION_DELAY)
+            rc = os.system(cmd)
+        return not rc
+
+    def open(self, url, new=1, autoraise=1):
+        # XXX Currently I know no way to prevent KFM from
+        # opening a new win.
+        assert "'" not in url
+        self._remote("openURL '%s'" % url)
+
+    open_new = open
+
+
+class Grail:
+    # There should be a way to maintain a connection to Grail, but the
+    # Grail remote control protocol doesn't really allow that at this
+    # point.  It probably neverwill!
+    def _find_grail_rc(self):
+        import glob
+        import pwd
+        import socket
+        import tempfile
+        tempdir = os.path.join(tempfile.gettempdir(),
+                               ".grail-unix")
+        user = pwd.getpwuid(os.getuid())[0]
+        filename = os.path.join(tempdir, user + "-*")
+        maybes = glob.glob(filename)
+        if not maybes:
+            return None
+        s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        for fn in maybes:
+            # need to PING each one until we find one that's live
+            try:
+                s.connect(fn)
+            except socket.error:
+                # no good; attempt to clean it out, but don't fail:
+                try:
+                    os.unlink(fn)
+                except IOError:
+                    pass
+            else:
+                return s
+
+    def _remote(self, action):
+        s = self._find_grail_rc()
+        if not s:
+            return 0
+        s.send(action)
+        s.close()
+        return 1
+
+    def open(self, url, new=0, autoraise=1):
+        if new:
+            self._remote("LOADNEW " + url)
+        else:
+            self._remote("LOAD " + url)
+
+    def open_new(self, url):
+        self.open(url, 1)
+
+
+class WindowsDefault:
+    def open(self, url, new=0, autoraise=1):
+        os.startfile(url)
+
+    def open_new(self, url):
+        self.open(url)
+
+#
+# Platform support for Unix
+#
+
+# This is the right test because all these Unix browsers require either
+# a console terminal of an X display to run.  Note that we cannot split
+# the TERM and DISPLAY cases, because we might be running Python from inside
+# an xterm.
+if os.environ.get("TERM") or os.environ.get("DISPLAY"):
+    _tryorder = ["links", "lynx", "w3m"]
+
+    # Easy cases first -- register console browsers if we have them.
+    if os.environ.get("TERM"):
+        # The Links browser <http://artax.karlin.mff.cuni.cz/~mikulas/links/>
+        if _iscommand("links"):
+            register("links", None, GenericBrowser("links '%s'"))
+        # The Lynx browser <http://lynx.browser.org/>
+        if _iscommand("lynx"):
+            register("lynx", None, GenericBrowser("lynx '%s'"))
+        # The w3m browser <http://ei5nazha.yz.yamagata-u.ac.jp/~aito/w3m/eng/>
+        if _iscommand("w3m"):
+            register("w3m", None, GenericBrowser("w3m '%s'"))
+
+    # X browsers have more in the way of options
+    if os.environ.get("DISPLAY"):
+        _tryorder = ["galeon", "skipstone",
+                     "mozilla-firefox", "mozilla-firebird", "mozilla", "netscape",
+                     "kfm", "grail"] + _tryorder
+
+        # First, the Netscape series
+        for browser in ("mozilla-firefox", "mozilla-firebird",
+                        "mozilla", "netscape"):
+            if _iscommand(browser):
+                register(browser, None, Netscape(browser))
+
+        # Next, Mosaic -- old but still in use.
+        if _iscommand("mosaic"):
+            register("mosaic", None, GenericBrowser(
+                "mosaic '%s' >/dev/null &"))
+
+        # Gnome's Galeon
+        if _iscommand("galeon"):
+            register("galeon", None, Galeon("galeon"))
+
+        # Skipstone, another Gtk/Mozilla based browser
+        if _iscommand("skipstone"):
+            register("skipstone", None, GenericBrowser(
+                "skipstone '%s' >/dev/null &"))
+
+        # Konqueror/kfm, the KDE browser.
+        if _iscommand("kfm") or _iscommand("konqueror"):
+            register("kfm", Konqueror, Konqueror())
+
+        # Grail, the Python browser.
+        if _iscommand("grail"):
+            register("grail", Grail, None)
+
+
+class InternetConfig:
+    def open(self, url, new=0, autoraise=1):
+        ic.launchurl(url)
+
+    def open_new(self, url):
+        self.open(url)
+
+
+#
+# Platform support for Windows
+#
+
+if sys.platform[:3] == "win":
+    _tryorder = ["netscape", "windows-default"]
+    register("windows-default", WindowsDefault)
+
+#
+# Platform support for MacOS
+#
+
+try:
+    import ic
+except ImportError:
+    pass
+else:
+    # internet-config is the only supported controller on MacOS,
+    # so don't mess with the default!
+    _tryorder = ["internet-config"]
+    register("internet-config", InternetConfig)
+
+#
+# Platform support for OS/2
+#
+
+if sys.platform[:3] == "os2" and _iscommand("netscape.exe"):
+    _tryorder = ["os2netscape"]
+    register("os2netscape", None,
+             GenericBrowser("start netscape.exe %s"))
+
+# OK, now that we know what the default preference orders for each
+# platform are, allow user to override them with the BROWSER variable.
+#
+if "BROWSER" in os.environ:
+    # It's the user's responsibility to register handlers for any unknown
+    # browser referenced by this value, before calling open().
+    _tryorder = os.environ["BROWSER"].split(os.pathsep)
+
+for cmd in _tryorder:
+    if not cmd.lower() in _browsers:
+        if _iscommand(cmd.lower()):
+            register(cmd.lower(), None, GenericBrowser(
+                "%s '%%s'" % cmd.lower()))
+cmd = None # to make del work if _tryorder was empty
+del cmd
+
+_tryorder = filter(lambda x: x.lower() in _browsers
+                   or x.find("%s") > -1, _tryorder)
+# what to do if _tryorder is now empty?
diff --git a/depot_tools/release/win/python_24/Lib/whichdb.py b/depot_tools/release/win/python_24/Lib/whichdb.py
new file mode 100644
index 0000000..cc5ced3a
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/whichdb.py
@@ -0,0 +1,117 @@
+# !/usr/bin/env python
+"""Guess which db package to use to open a db file."""
+
+import os
+import struct
+import sys
+
+try:
+    import dbm
+    _dbmerror = dbm.error
+except ImportError:
+    dbm = None
+    # just some sort of valid exception which might be raised in the
+    # dbm test
+    _dbmerror = IOError
+
+def whichdb(filename):
+    """Guess which db package to use to open a db file.
+
+    Return values:
+
+    - None if the database file can't be read;
+    - empty string if the file can be read but can't be recognized
+    - the module name (e.g. "dbm" or "gdbm") if recognized.
+
+    Importing the given module may still fail, and opening the
+    database using that module may still fail.
+    """
+
+    # Check for dbm first -- this has a .pag and a .dir file
+    try:
+        f = open(filename + os.extsep + "pag", "rb")
+        f.close()
+        # dbm linked with gdbm on OS/2 doesn't have .dir file
+        if not (dbm.library == "GNU gdbm" and sys.platform == "os2emx"):
+            f = open(filename + os.extsep + "dir", "rb")
+            f.close()
+        return "dbm"
+    except IOError:
+        # some dbm emulations based on Berkeley DB generate a .db file
+        # some do not, but they should be caught by the dbhash checks
+        try:
+            f = open(filename + os.extsep + "db", "rb")
+            f.close()
+            # guarantee we can actually open the file using dbm
+            # kind of overkill, but since we are dealing with emulations
+            # it seems like a prudent step
+            if dbm is not None:
+                d = dbm.open(filename)
+                d.close()
+                return "dbm"
+        except (IOError, _dbmerror):
+            pass
+
+    # Check for dumbdbm next -- this has a .dir and a .dat file
+    try:
+        # First check for presence of files
+        os.stat(filename + os.extsep + "dat")
+        size = os.stat(filename + os.extsep + "dir").st_size
+        # dumbdbm files with no keys are empty
+        if size == 0:
+            return "dumbdbm"
+        f = open(filename + os.extsep + "dir", "rb")
+        try:
+            if f.read(1) in ["'", '"']:
+                return "dumbdbm"
+        finally:
+            f.close()
+    except (OSError, IOError):
+        pass
+
+    # See if the file exists, return None if not
+    try:
+        f = open(filename, "rb")
+    except IOError:
+        return None
+
+    # Read the start of the file -- the magic number
+    s16 = f.read(16)
+    f.close()
+    s = s16[0:4]
+
+    # Return "" if not at least 4 bytes
+    if len(s) != 4:
+        return ""
+
+    # Convert to 4-byte int in native byte order -- return "" if impossible
+    try:
+        (magic,) = struct.unpack("=l", s)
+    except struct.error:
+        return ""
+
+    # Check for GNU dbm
+    if magic == 0x13579ace:
+        return "gdbm"
+
+    # Check for old Berkeley db hash file format v2
+    if magic in (0x00061561, 0x61150600):
+        return "bsddb185"
+
+    # Later versions of Berkeley db hash file have a 12-byte pad in
+    # front of the file type
+    try:
+        (magic,) = struct.unpack("=l", s16[-4:])
+    except struct.error:
+        return ""
+
+    # Check for BSD hash
+    if magic in (0x00061561, 0x61150600):
+        return "dbhash"
+
+    # Unknown
+    return ""
+
+if __name__ == "__main__":
+    for filename in sys.argv[1:]:
+        print whichdb(filename) or "UNKNOWN", filename
diff --git a/depot_tools/release/win/python_24/Lib/whrandom.py b/depot_tools/release/win/python_24/Lib/whrandom.py
new file mode 100644
index 0000000..bc0d1a4
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/whrandom.py
@@ -0,0 +1,144 @@
+"""Wichman-Hill random number generator.
+
+Wichmann, B. A. & Hill, I. D. (1982)
+Algorithm AS 183:
+An efficient and portable pseudo-random number generator
+Applied Statistics 31 (1982) 188-190
+
+see also:
+        Correction to Algorithm AS 183
+        Applied Statistics 33 (1984) 123
+
+        McLeod, A. I. (1985)
+        A remark on Algorithm AS 183
+        Applied Statistics 34 (1985),198-200
+
+
+USE:
+whrandom.random()       yields double precision random numbers
+                        uniformly distributed between 0 and 1.
+
+whrandom.seed(x, y, z)  must be called before whrandom.random()
+                        to seed the generator
+
+There is also an interface to create multiple independent
+random generators, and to choose from other ranges.
+
+
+
+Multi-threading note: the random number generator used here is not
+thread-safe; it is possible that nearly simultaneous calls in
+different theads return the same random value.  To avoid this, you
+have to use a lock around all calls.  (I didn't want to slow this
+down in the serial case by using a lock here.)
+"""
+
+import warnings
+warnings.warn("the whrandom module is deprecated; please use the random module",
+              DeprecationWarning)
+
+# Translated by Guido van Rossum from C source provided by
+# Adrian Baddeley.
+
+
+class whrandom:
+    def __init__(self, x = 0, y = 0, z = 0):
+        """Initialize an instance.
+        Without arguments, initialize from current time.
+        With arguments (x, y, z), initialize from them."""
+        self.seed(x, y, z)
+
+    def seed(self, x = 0, y = 0, z = 0):
+        """Set the seed from (x, y, z).
+        These must be integers in the range [0, 256)."""
+        if not type(x) == type(y) == type(z) == type(0):
+            raise TypeError, 'seeds must be integers'
+        if not (0 <= x < 256 and 0 <= y < 256 and 0 <= z < 256):
+            raise ValueError, 'seeds must be in range(0, 256)'
+        if 0 == x == y == z:
+            # Initialize from current time
+            import time
+            t = long(time.time() * 256)
+            t = int((t&0xffffff) ^ (t>>24))
+            t, x = divmod(t, 256)
+            t, y = divmod(t, 256)
+            t, z = divmod(t, 256)
+        # Zero is a poor seed, so substitute 1
+        self._seed = (x or 1, y or 1, z or 1)
+
+    def random(self):
+        """Get the next random number in the range [0.0, 1.0)."""
+        # This part is thread-unsafe:
+        # BEGIN CRITICAL SECTION
+        x, y, z = self._seed
+        #
+        x = (171 * x) % 30269
+        y = (172 * y) % 30307
+        z = (170 * z) % 30323
+        #
+        self._seed = x, y, z
+        # END CRITICAL SECTION
+        #
+        return (x/30269.0 + y/30307.0 + z/30323.0) % 1.0
+
+    def uniform(self, a, b):
+        """Get a random number in the range [a, b)."""
+        return a + (b-a) * self.random()
+
+    def randint(self, a, b):
+        """Get a random integer in the range [a, b] including
+        both end points.
+
+        (Deprecated; use randrange below.)"""
+        return self.randrange(a, b+1)
+
+    def choice(self, seq):
+        """Choose a random element from a non-empty sequence."""
+        return seq[int(self.random() * len(seq))]
+
+    def randrange(self, start, stop=None, step=1, int=int, default=None):
+        """Choose a random item from range(start, stop[, step]).
+
+        This fixes the problem with randint() which includes the
+        endpoint; in Python this is usually not what you want.
+        Do not supply the 'int' and 'default' arguments."""
+        # This code is a bit messy to make it fast for the
+        # common case while still doing adequate error checking
+        istart = int(start)
+        if istart != start:
+            raise ValueError, "non-integer arg 1 for randrange()"
+        if stop is default:
+            if istart > 0:
+                return int(self.random() * istart)
+            raise ValueError, "empty range for randrange()"
+        istop = int(stop)
+        if istop != stop:
+            raise ValueError, "non-integer stop for randrange()"
+        if step == 1:
+            if istart < istop:
+                return istart + int(self.random() *
+                                   (istop - istart))
+            raise ValueError, "empty range for randrange()"
+        istep = int(step)
+        if istep != step:
+            raise ValueError, "non-integer step for randrange()"
+        if istep > 0:
+            n = (istop - istart + istep - 1) / istep
+        elif istep < 0:
+            n = (istop - istart + istep + 1) / istep
+        else:
+            raise ValueError, "zero step for randrange()"
+
+        if n <= 0:
+            raise ValueError, "empty range for randrange()"
+        return istart + istep*int(self.random() * n)
+
+
+# Initialize from the current time
+_inst = whrandom()
+seed = _inst.seed
+random = _inst.random
+uniform = _inst.uniform
+randint = _inst.randint
+choice = _inst.choice
+randrange = _inst.randrange
diff --git a/depot_tools/release/win/python_24/Lib/xdrlib.py b/depot_tools/release/win/python_24/Lib/xdrlib.py
new file mode 100644
index 0000000..1123090
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xdrlib.py
@@ -0,0 +1,285 @@
+"""Implements (a subset of) Sun XDR -- eXternal Data Representation.
+
+See: RFC 1014
+
+"""
+
+import struct
+try:
+    from cStringIO import StringIO as _StringIO
+except ImportError:
+    from StringIO import StringIO as _StringIO
+
+__all__ = ["Error", "Packer", "Unpacker", "ConversionError"]
+
+# exceptions
+class Error(Exception):
+    """Exception class for this module. Use:
+
+    except xdrlib.Error, var:
+        # var has the Error instance for the exception
+
+    Public ivars:
+        msg -- contains the message
+
+    """
+    def __init__(self, msg):
+        self.msg = msg
+    def __repr__(self):
+        return repr(self.msg)
+    def __str__(self):
+        return str(self.msg)
+
+
+class ConversionError(Error):
+    pass
+
+
+
+class Packer:
+    """Pack various data representations into a buffer."""
+
+    def __init__(self):
+        self.reset()
+
+    def reset(self):
+        self.__buf = _StringIO()
+
+    def get_buffer(self):
+        return self.__buf.getvalue()
+    # backwards compatibility
+    get_buf = get_buffer
+
+    def pack_uint(self, x):
+        self.__buf.write(struct.pack('>L', x))
+
+    pack_int = pack_uint
+    pack_enum = pack_int
+
+    def pack_bool(self, x):
+        if x: self.__buf.write('\0\0\0\1')
+        else: self.__buf.write('\0\0\0\0')
+
+    def pack_uhyper(self, x):
+        self.pack_uint(x>>32 & 0xffffffffL)
+        self.pack_uint(x & 0xffffffffL)
+
+    pack_hyper = pack_uhyper
+
+    def pack_float(self, x):
+        try: self.__buf.write(struct.pack('>f', x))
+        except struct.error, msg:
+            raise ConversionError, msg
+
+    def pack_double(self, x):
+        try: self.__buf.write(struct.pack('>d', x))
+        except struct.error, msg:
+            raise ConversionError, msg
+
+    def pack_fstring(self, n, s):
+        if n < 0:
+            raise ValueError, 'fstring size must be nonnegative'
+        n = ((n+3)/4)*4
+        data = s[:n]
+        data = data + (n - len(data)) * '\0'
+        self.__buf.write(data)
+
+    pack_fopaque = pack_fstring
+
+    def pack_string(self, s):
+        n = len(s)
+        self.pack_uint(n)
+        self.pack_fstring(n, s)
+
+    pack_opaque = pack_string
+    pack_bytes = pack_string
+
+    def pack_list(self, list, pack_item):
+        for item in list:
+            self.pack_uint(1)
+            pack_item(item)
+        self.pack_uint(0)
+
+    def pack_farray(self, n, list, pack_item):
+        if len(list) != n:
+            raise ValueError, 'wrong array size'
+        for item in list:
+            pack_item(item)
+
+    def pack_array(self, list, pack_item):
+        n = len(list)
+        self.pack_uint(n)
+        self.pack_farray(n, list, pack_item)
+
+
+
+class Unpacker:
+    """Unpacks various data representations from the given buffer."""
+
+    def __init__(self, data):
+        self.reset(data)
+
+    def reset(self, data):
+        self.__buf = data
+        self.__pos = 0
+
+    def get_position(self):
+        return self.__pos
+
+    def set_position(self, position):
+        self.__pos = position
+
+    def get_buffer(self):
+        return self.__buf
+
+    def done(self):
+        if self.__pos < len(self.__buf):
+            raise Error('unextracted data remains')
+
+    def unpack_uint(self):
+        i = self.__pos
+        self.__pos = j = i+4
+        data = self.__buf[i:j]
+        if len(data) < 4:
+            raise EOFError
+        x = struct.unpack('>L', data)[0]
+        try:
+            return int(x)
+        except OverflowError:
+            return x
+
+    def unpack_int(self):
+        i = self.__pos
+        self.__pos = j = i+4
+        data = self.__buf[i:j]
+        if len(data) < 4:
+            raise EOFError
+        return struct.unpack('>l', data)[0]
+
+    unpack_enum = unpack_int
+    unpack_bool = unpack_int
+
+    def unpack_uhyper(self):
+        hi = self.unpack_uint()
+        lo = self.unpack_uint()
+        return long(hi)<<32 | lo
+
+    def unpack_hyper(self):
+        x = self.unpack_uhyper()
+        if x >= 0x8000000000000000L:
+            x = x - 0x10000000000000000L
+        return x
+
+    def unpack_float(self):
+        i = self.__pos
+        self.__pos = j = i+4
+        data = self.__buf[i:j]
+        if len(data) < 4:
+            raise EOFError
+        return struct.unpack('>f', data)[0]
+
+    def unpack_double(self):
+        i = self.__pos
+        self.__pos = j = i+8
+        data = self.__buf[i:j]
+        if len(data) < 8:
+            raise EOFError
+        return struct.unpack('>d', data)[0]
+
+    def unpack_fstring(self, n):
+        if n < 0:
+            raise ValueError, 'fstring size must be nonnegative'
+        i = self.__pos
+        j = i + (n+3)/4*4
+        if j > len(self.__buf):
+            raise EOFError
+        self.__pos = j
+        return self.__buf[i:i+n]
+
+    unpack_fopaque = unpack_fstring
+
+    def unpack_string(self):
+        n = self.unpack_uint()
+        return self.unpack_fstring(n)
+
+    unpack_opaque = unpack_string
+    unpack_bytes = unpack_string
+
+    def unpack_list(self, unpack_item):
+        list = []
+        while 1:
+            x = self.unpack_uint()
+            if x == 0: break
+            if x != 1:
+                raise ConversionError, '0 or 1 expected, got %r' % (x,)
+            item = unpack_item()
+            list.append(item)
+        return list
+
+    def unpack_farray(self, n, unpack_item):
+        list = []
+        for i in range(n):
+            list.append(unpack_item())
+        return list
+
+    def unpack_array(self, unpack_item):
+        n = self.unpack_uint()
+        return self.unpack_farray(n, unpack_item)
+
+
+# test suite
+def _test():
+    p = Packer()
+    packtest = [
+        (p.pack_uint,    (9,)),
+        (p.pack_bool,    (None,)),
+        (p.pack_bool,    ('hello',)),
+        (p.pack_uhyper,  (45L,)),
+        (p.pack_float,   (1.9,)),
+        (p.pack_double,  (1.9,)),
+        (p.pack_string,  ('hello world',)),
+        (p.pack_list,    (range(5), p.pack_uint)),
+        (p.pack_array,   (['what', 'is', 'hapnin', 'doctor'], p.pack_string)),
+        ]
+    succeedlist = [1] * len(packtest)
+    count = 0
+    for method, args in packtest:
+        print 'pack test', count,
+        try:
+            method(*args)
+            print 'succeeded'
+        except ConversionError, var:
+            print 'ConversionError:', var.msg
+            succeedlist[count] = 0
+        count = count + 1
+    data = p.get_buffer()
+    # now verify
+    up = Unpacker(data)
+    unpacktest = [
+        (up.unpack_uint,   (), lambda x: x == 9),
+        (up.unpack_bool,   (), lambda x: not x),
+        (up.unpack_bool,   (), lambda x: x),
+        (up.unpack_uhyper, (), lambda x: x == 45L),
+        (up.unpack_float,  (), lambda x: 1.89 < x < 1.91),
+        (up.unpack_double, (), lambda x: 1.89 < x < 1.91),
+        (up.unpack_string, (), lambda x: x == 'hello world'),
+        (up.unpack_list,   (up.unpack_uint,), lambda x: x == range(5)),
+        (up.unpack_array,  (up.unpack_string,),
+         lambda x: x == ['what', 'is', 'hapnin', 'doctor']),
+        ]
+    count = 0
+    for method, args, pred in unpacktest:
+        print 'unpack test', count,
+        try:
+            if succeedlist[count]:
+                x = method(*args)
+                print pred(x) and 'succeeded' or 'failed', ':', x
+            else:
+                print 'skipping'
+        except ConversionError, var:
+            print 'ConversionError:', var.msg
+        count = count + 1
+
+
+if __name__ == '__main__':
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/xml/__init__.py b/depot_tools/release/win/python_24/Lib/xml/__init__.py
new file mode 100644
index 0000000..6e4825b3
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/__init__.py
@@ -0,0 +1,42 @@
+"""Core XML support for Python.
+
+This package contains three sub-packages:
+
+dom -- The W3C Document Object Model.  This supports DOM Level 1 +
+       Namespaces.
+
+parsers -- Python wrappers for XML parsers (currently only supports Expat).
+
+sax -- The Simple API for XML, developed by XML-Dev, led by David
+       Megginson and ported to Python by Lars Marius Garshol.  This
+       supports the SAX 2 API.
+"""
+
+
+__all__ = ["dom", "parsers", "sax"]
+
+# When being checked-out without options, this has the form
+# "<dollar>Revision: x.y </dollar>"
+# When exported using -kv, it is "x.y".
+__version__ = "$Revision: 1.15 $".split()[-2:][0]
+
+
+_MINIMUM_XMLPLUS_VERSION = (0, 8, 4)
+
+
+try:
+    import _xmlplus
+except ImportError:
+    pass
+else:
+    try:
+        v = _xmlplus.version_info
+    except AttributeError:
+        # _xmlplus is too old; ignore it
+        pass
+    else:
+        if v >= _MINIMUM_XMLPLUS_VERSION:
+            import sys
+            sys.modules[__name__] = _xmlplus
+        else:
+            del v
diff --git a/depot_tools/release/win/python_24/Lib/xml/dom/NodeFilter.py b/depot_tools/release/win/python_24/Lib/xml/dom/NodeFilter.py
new file mode 100644
index 0000000..fc05245
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/dom/NodeFilter.py
@@ -0,0 +1,27 @@
+# This is the Python mapping for interface NodeFilter from
+# DOM2-Traversal-Range. It contains only constants.
+
+class NodeFilter:
+    """
+    This is the DOM2 NodeFilter interface. It contains only constants.
+    """
+    FILTER_ACCEPT = 1
+    FILTER_REJECT = 2
+    FILTER_SKIP   = 3
+
+    SHOW_ALL                    = 0xFFFFFFFFL
+    SHOW_ELEMENT                = 0x00000001
+    SHOW_ATTRIBUTE              = 0x00000002
+    SHOW_TEXT                   = 0x00000004
+    SHOW_CDATA_SECTION          = 0x00000008
+    SHOW_ENTITY_REFERENCE       = 0x00000010
+    SHOW_ENTITY                 = 0x00000020
+    SHOW_PROCESSING_INSTRUCTION = 0x00000040
+    SHOW_COMMENT                = 0x00000080
+    SHOW_DOCUMENT               = 0x00000100
+    SHOW_DOCUMENT_TYPE          = 0x00000200
+    SHOW_DOCUMENT_FRAGMENT      = 0x00000400
+    SHOW_NOTATION               = 0x00000800
+
+    def acceptNode(self, node):
+        raise NotImplementedError
diff --git a/depot_tools/release/win/python_24/Lib/xml/dom/__init__.py b/depot_tools/release/win/python_24/Lib/xml/dom/__init__.py
new file mode 100644
index 0000000..6363d006
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/dom/__init__.py
@@ -0,0 +1,139 @@
+"""W3C Document Object Model implementation for Python.
+
+The Python mapping of the Document Object Model is documented in the
+Python Library Reference in the section on the xml.dom package.
+
+This package contains the following modules:
+
+minidom -- A simple implementation of the Level 1 DOM with namespace
+           support added (based on the Level 2 specification) and other
+           minor Level 2 functionality.
+
+pulldom -- DOM builder supporting on-demand tree-building for selected
+           subtrees of the document.
+
+"""
+
+
+class Node:
+    """Class giving the NodeType constants."""
+
+    # DOM implementations may use this as a base class for their own
+    # Node implementations.  If they don't, the constants defined here
+    # should still be used as the canonical definitions as they match
+    # the values given in the W3C recommendation.  Client code can
+    # safely refer to these values in all tests of Node.nodeType
+    # values.
+
+    ELEMENT_NODE                = 1
+    ATTRIBUTE_NODE              = 2
+    TEXT_NODE                   = 3
+    CDATA_SECTION_NODE          = 4
+    ENTITY_REFERENCE_NODE       = 5
+    ENTITY_NODE                 = 6
+    PROCESSING_INSTRUCTION_NODE = 7
+    COMMENT_NODE                = 8
+    DOCUMENT_NODE               = 9
+    DOCUMENT_TYPE_NODE          = 10
+    DOCUMENT_FRAGMENT_NODE      = 11
+    NOTATION_NODE               = 12
+
+
+#ExceptionCode
+INDEX_SIZE_ERR                 = 1
+DOMSTRING_SIZE_ERR             = 2
+HIERARCHY_REQUEST_ERR          = 3
+WRONG_DOCUMENT_ERR             = 4
+INVALID_CHARACTER_ERR          = 5
+NO_DATA_ALLOWED_ERR            = 6
+NO_MODIFICATION_ALLOWED_ERR    = 7
+NOT_FOUND_ERR                  = 8
+NOT_SUPPORTED_ERR              = 9
+INUSE_ATTRIBUTE_ERR            = 10
+INVALID_STATE_ERR              = 11
+SYNTAX_ERR                     = 12
+INVALID_MODIFICATION_ERR       = 13
+NAMESPACE_ERR                  = 14
+INVALID_ACCESS_ERR             = 15
+VALIDATION_ERR                 = 16
+
+
+class DOMException(Exception):
+    """Abstract base class for DOM exceptions.
+    Exceptions with specific codes are specializations of this class."""
+
+    def __init__(self, *args, **kw):
+        if self.__class__ is DOMException:
+            raise RuntimeError(
+                "DOMException should not be instantiated directly")
+        Exception.__init__(self, *args, **kw)
+
+    def _get_code(self):
+        return self.code
+
+
+class IndexSizeErr(DOMException):
+    code = INDEX_SIZE_ERR
+
+class DomstringSizeErr(DOMException):
+    code = DOMSTRING_SIZE_ERR
+
+class HierarchyRequestErr(DOMException):
+    code = HIERARCHY_REQUEST_ERR
+
+class WrongDocumentErr(DOMException):
+    code = WRONG_DOCUMENT_ERR
+
+class InvalidCharacterErr(DOMException):
+    code = INVALID_CHARACTER_ERR
+
+class NoDataAllowedErr(DOMException):
+    code = NO_DATA_ALLOWED_ERR
+
+class NoModificationAllowedErr(DOMException):
+    code = NO_MODIFICATION_ALLOWED_ERR
+
+class NotFoundErr(DOMException):
+    code = NOT_FOUND_ERR
+
+class NotSupportedErr(DOMException):
+    code = NOT_SUPPORTED_ERR
+
+class InuseAttributeErr(DOMException):
+    code = INUSE_ATTRIBUTE_ERR
+
+class InvalidStateErr(DOMException):
+    code = INVALID_STATE_ERR
+
+class SyntaxErr(DOMException):
+    code = SYNTAX_ERR
+
+class InvalidModificationErr(DOMException):
+    code = INVALID_MODIFICATION_ERR
+
+class NamespaceErr(DOMException):
+    code = NAMESPACE_ERR
+
+class InvalidAccessErr(DOMException):
+    code = INVALID_ACCESS_ERR
+
+class ValidationErr(DOMException):
+    code = VALIDATION_ERR
+
+class UserDataHandler:
+    """Class giving the operation constants for UserDataHandler.handle()."""
+
+    # Based on DOM Level 3 (WD 9 April 2002)
+
+    NODE_CLONED   = 1
+    NODE_IMPORTED = 2
+    NODE_DELETED  = 3
+    NODE_RENAMED  = 4
+
+XML_NAMESPACE = "http://www.w3.org/XML/1998/namespace"
+XMLNS_NAMESPACE = "http://www.w3.org/2000/xmlns/"
+XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml"
+EMPTY_NAMESPACE = None
+EMPTY_PREFIX = None
+
+from domreg import getDOMImplementation,registerDOMImplementation
diff --git a/depot_tools/release/win/python_24/Lib/xml/dom/domreg.py b/depot_tools/release/win/python_24/Lib/xml/dom/domreg.py
new file mode 100644
index 0000000..684c436
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/dom/domreg.py
@@ -0,0 +1,99 @@
+"""Registration facilities for DOM. This module should not be used
+directly. Instead, the functions getDOMImplementation and
+registerDOMImplementation should be imported from xml.dom."""
+
+from xml.dom.minicompat import *  # isinstance, StringTypes
+
+# This is a list of well-known implementations.  Well-known names
+# should be published by posting to xml-sig@python.org, and are
+# subsequently recorded in this file.
+
+well_known_implementations = {
+    'minidom':'xml.dom.minidom',
+    '4DOM': 'xml.dom.DOMImplementation',
+    }
+
+# DOM implementations not officially registered should register
+# themselves with their
+
+registered = {}
+
+def registerDOMImplementation(name, factory):
+    """registerDOMImplementation(name, factory)
+
+    Register the factory function with the name. The factory function
+    should return an object which implements the DOMImplementation
+    interface. The factory function can either return the same object,
+    or a new one (e.g. if that implementation supports some
+    customization)."""
+
+    registered[name] = factory
+
+def _good_enough(dom, features):
+    "_good_enough(dom, features) -> Return 1 if the dom offers the features"
+    for f,v in features:
+        if not dom.hasFeature(f,v):
+            return 0
+    return 1
+
+def getDOMImplementation(name = None, features = ()):
+    """getDOMImplementation(name = None, features = ()) -> DOM implementation.
+
+    Return a suitable DOM implementation. The name is either
+    well-known, the module name of a DOM implementation, or None. If
+    it is not None, imports the corresponding module and returns
+    DOMImplementation object if the import succeeds.
+
+    If name is not given, consider the available implementations to
+    find one with the required feature set. If no implementation can
+    be found, raise an ImportError. The features list must be a sequence
+    of (feature, version) pairs which are passed to hasFeature."""
+
+    import os
+    creator = None
+    mod = well_known_implementations.get(name)
+    if mod:
+        mod = __import__(mod, {}, {}, ['getDOMImplementation'])
+        return mod.getDOMImplementation()
+    elif name:
+        return registered[name]()
+    elif os.environ.has_key("PYTHON_DOM"):
+        return getDOMImplementation(name = os.environ["PYTHON_DOM"])
+
+    # User did not specify a name, try implementations in arbitrary
+    # order, returning the one that has the required features
+    if isinstance(features, StringTypes):
+        features = _parse_feature_string(features)
+    for creator in registered.values():
+        dom = creator()
+        if _good_enough(dom, features):
+            return dom
+
+    for creator in well_known_implementations.keys():
+        try:
+            dom = getDOMImplementation(name = creator)
+        except StandardError: # typically ImportError, or AttributeError
+            continue
+        if _good_enough(dom, features):
+            return dom
+
+    raise ImportError,"no suitable DOM implementation found"
+
+def _parse_feature_string(s):
+    features = []
+    parts = s.split()
+    i = 0
+    length = len(parts)
+    while i < length:
+        feature = parts[i]
+        if feature[0] in "0123456789":
+            raise ValueError, "bad feature name: %r" % (feature,)
+        i = i + 1
+        version = None
+        if i < length:
+            v = parts[i]
+            if v[0] in "0123456789":
+                i = i + 1
+                version = v
+        features.append((feature, version))
+    return tuple(features)
diff --git a/depot_tools/release/win/python_24/Lib/xml/dom/expatbuilder.py b/depot_tools/release/win/python_24/Lib/xml/dom/expatbuilder.py
new file mode 100644
index 0000000..47d81fb
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/dom/expatbuilder.py
@@ -0,0 +1,983 @@
+"""Facility to use the Expat parser to load a minidom instance
+from a string or file.
+
+This avoids all the overhead of SAX and pulldom to gain performance.
+"""
+
+# Warning!
+#
+# This module is tightly bound to the implementation details of the
+# minidom DOM and can't be used with other DOM implementations.  This
+# is due, in part, to a lack of appropriate methods in the DOM (there is
+# no way to create Entity and Notation nodes via the DOM Level 2
+# interface), and for performance.  The later is the cause of some fairly
+# cryptic code.
+#
+# Performance hacks:
+#
+#   -  .character_data_handler() has an extra case in which continuing
+#      data is appended to an existing Text node; this can be a
+#      speedup since pyexpat can break up character data into multiple
+#      callbacks even though we set the buffer_text attribute on the
+#      parser.  This also gives us the advantage that we don't need a
+#      separate normalization pass.
+#
+#   -  Determining that a node exists is done using an identity comparison
+#      with None rather than a truth test; this avoids searching for and
+#      calling any methods on the node object if it exists.  (A rather
+#      nice speedup is achieved this way as well!)
+
+from xml.dom import xmlbuilder, minidom, Node
+from xml.dom import EMPTY_NAMESPACE, EMPTY_PREFIX, XMLNS_NAMESPACE
+from xml.parsers import expat
+from xml.dom.minidom import _append_child, _set_attribute_node
+from xml.dom.NodeFilter import NodeFilter
+
+from xml.dom.minicompat import *
+
+TEXT_NODE = Node.TEXT_NODE
+CDATA_SECTION_NODE = Node.CDATA_SECTION_NODE
+DOCUMENT_NODE = Node.DOCUMENT_NODE
+
+FILTER_ACCEPT = xmlbuilder.DOMBuilderFilter.FILTER_ACCEPT
+FILTER_REJECT = xmlbuilder.DOMBuilderFilter.FILTER_REJECT
+FILTER_SKIP = xmlbuilder.DOMBuilderFilter.FILTER_SKIP
+FILTER_INTERRUPT = xmlbuilder.DOMBuilderFilter.FILTER_INTERRUPT
+
+theDOMImplementation = minidom.getDOMImplementation()
+
+# Expat typename -> TypeInfo
+_typeinfo_map = {
+    "CDATA":    minidom.TypeInfo(None, "cdata"),
+    "ENUM":     minidom.TypeInfo(None, "enumeration"),
+    "ENTITY":   minidom.TypeInfo(None, "entity"),
+    "ENTITIES": minidom.TypeInfo(None, "entities"),
+    "ID":       minidom.TypeInfo(None, "id"),
+    "IDREF":    minidom.TypeInfo(None, "idref"),
+    "IDREFS":   minidom.TypeInfo(None, "idrefs"),
+    "NMTOKEN":  minidom.TypeInfo(None, "nmtoken"),
+    "NMTOKENS": minidom.TypeInfo(None, "nmtokens"),
+    }
+
+class ElementInfo(NewStyle):
+    __slots__ = '_attr_info', '_model', 'tagName'
+
+    def __init__(self, tagName, model=None):
+        self.tagName = tagName
+        self._attr_info = []
+        self._model = model
+
+    def __getstate__(self):
+        return self._attr_info, self._model, self.tagName
+
+    def __setstate__(self, state):
+        self._attr_info, self._model, self.tagName = state
+
+    def getAttributeType(self, aname):
+        for info in self._attr_info:
+            if info[1] == aname:
+                t = info[-2]
+                if t[0] == "(":
+                    return _typeinfo_map["ENUM"]
+                else:
+                    return _typeinfo_map[info[-2]]
+        return minidom._no_type
+
+    def getAttributeTypeNS(self, namespaceURI, localName):
+        return minidom._no_type
+
+    def isElementContent(self):
+        if self._model:
+            type = self._model[0]
+            return type not in (expat.model.XML_CTYPE_ANY,
+                                expat.model.XML_CTYPE_MIXED)
+        else:
+            return False
+
+    def isEmpty(self):
+        if self._model:
+            return self._model[0] == expat.model.XML_CTYPE_EMPTY
+        else:
+            return False
+
+    def isId(self, aname):
+        for info in self._attr_info:
+            if info[1] == aname:
+                return info[-2] == "ID"
+        return False
+
+    def isIdNS(self, euri, ename, auri, aname):
+        # not sure this is meaningful
+        return self.isId((auri, aname))
+
+def _intern(builder, s):
+    return builder._intern_setdefault(s, s)
+
+def _parse_ns_name(builder, name):
+    assert ' ' in name
+    parts = name.split(' ')
+    intern = builder._intern_setdefault
+    if len(parts) == 3:
+        uri, localname, prefix = parts
+        prefix = intern(prefix, prefix)
+        qname = "%s:%s" % (prefix, localname)
+        qname = intern(qname, qname)
+        localname = intern(localname, localname)
+    else:
+        uri, localname = parts
+        prefix = EMPTY_PREFIX
+        qname = localname = intern(localname, localname)
+    return intern(uri, uri), localname, prefix, qname
+
+
+class ExpatBuilder:
+    """Document builder that uses Expat to build a ParsedXML.DOM document
+    instance."""
+
+    def __init__(self, options=None):
+        if options is None:
+            options = xmlbuilder.Options()
+        self._options = options
+        if self._options.filter is not None:
+            self._filter = FilterVisibilityController(self._options.filter)
+        else:
+            self._filter = None
+            # This *really* doesn't do anything in this case, so
+            # override it with something fast & minimal.
+            self._finish_start_element = id
+        self._parser = None
+        self.reset()
+
+    def createParser(self):
+        """Create a new parser object."""
+        return expat.ParserCreate()
+
+    def getParser(self):
+        """Return the parser object, creating a new one if needed."""
+        if not self._parser:
+            self._parser = self.createParser()
+            self._intern_setdefault = self._parser.intern.setdefault
+            self._parser.buffer_text = True
+            self._parser.ordered_attributes = True
+            self._parser.specified_attributes = True
+            self.install(self._parser)
+        return self._parser
+
+    def reset(self):
+        """Free all data structures used during DOM construction."""
+        self.document = theDOMImplementation.createDocument(
+            EMPTY_NAMESPACE, None, None)
+        self.curNode = self.document
+        self._elem_info = self.document._elem_info
+        self._cdata = False
+
+    def install(self, parser):
+        """Install the callbacks needed to build the DOM into the parser."""
+        # This creates circular references!
+        parser.StartDoctypeDeclHandler = self.start_doctype_decl_handler
+        parser.StartElementHandler = self.first_element_handler
+        parser.EndElementHandler = self.end_element_handler
+        parser.ProcessingInstructionHandler = self.pi_handler
+        if self._options.entities:
+            parser.EntityDeclHandler = self.entity_decl_handler
+        parser.NotationDeclHandler = self.notation_decl_handler
+        if self._options.comments:
+            parser.CommentHandler = self.comment_handler
+        if self._options.cdata_sections:
+            parser.StartCdataSectionHandler = self.start_cdata_section_handler
+            parser.EndCdataSectionHandler = self.end_cdata_section_handler
+            parser.CharacterDataHandler = self.character_data_handler_cdata
+        else:
+            parser.CharacterDataHandler = self.character_data_handler
+        parser.ExternalEntityRefHandler = self.external_entity_ref_handler
+        parser.XmlDeclHandler = self.xml_decl_handler
+        parser.ElementDeclHandler = self.element_decl_handler
+        parser.AttlistDeclHandler = self.attlist_decl_handler
+
+    def parseFile(self, file):
+        """Parse a document from a file object, returning the document
+        node."""
+        parser = self.getParser()
+        first_buffer = True
+        try:
+            while 1:
+                buffer = file.read(16*1024)
+                if not buffer:
+                    break
+                parser.Parse(buffer, 0)
+                if first_buffer and self.document.documentElement:
+                    self._setup_subset(buffer)
+                first_buffer = False
+            parser.Parse("", True)
+        except ParseEscape:
+            pass
+        doc = self.document
+        self.reset()
+        self._parser = None
+        return doc
+
+    def parseString(self, string):
+        """Parse a document from a string, returning the document node."""
+        parser = self.getParser()
+        try:
+            parser.Parse(string, True)
+            self._setup_subset(string)
+        except ParseEscape:
+            pass
+        doc = self.document
+        self.reset()
+        self._parser = None
+        return doc
+
+    def _setup_subset(self, buffer):
+        """Load the internal subset if there might be one."""
+        if self.document.doctype:
+            extractor = InternalSubsetExtractor()
+            extractor.parseString(buffer)
+            subset = extractor.getSubset()
+            self.document.doctype.internalSubset = subset
+
+    def start_doctype_decl_handler(self, doctypeName, systemId, publicId,
+                                   has_internal_subset):
+        doctype = self.document.implementation.createDocumentType(
+            doctypeName, publicId, systemId)
+        doctype.ownerDocument = self.document
+        self.document.childNodes.append(doctype)
+        self.document.doctype = doctype
+        if self._filter and self._filter.acceptNode(doctype) == FILTER_REJECT:
+            self.document.doctype = None
+            del self.document.childNodes[-1]
+            doctype = None
+            self._parser.EntityDeclHandler = None
+            self._parser.NotationDeclHandler = None
+        if has_internal_subset:
+            if doctype is not None:
+                doctype.entities._seq = []
+                doctype.notations._seq = []
+            self._parser.CommentHandler = None
+            self._parser.ProcessingInstructionHandler = None
+            self._parser.EndDoctypeDeclHandler = self.end_doctype_decl_handler
+
+    def end_doctype_decl_handler(self):
+        if self._options.comments:
+            self._parser.CommentHandler = self.comment_handler
+        self._parser.ProcessingInstructionHandler = self.pi_handler
+        if not (self._elem_info or self._filter):
+            self._finish_end_element = id
+
+    def pi_handler(self, target, data):
+        node = self.document.createProcessingInstruction(target, data)
+        _append_child(self.curNode, node)
+        if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
+            self.curNode.removeChild(node)
+
+    def character_data_handler_cdata(self, data):
+        childNodes = self.curNode.childNodes
+        if self._cdata:
+            if (  self._cdata_continue
+                  and childNodes[-1].nodeType == CDATA_SECTION_NODE):
+                childNodes[-1].appendData(data)
+                return
+            node = self.document.createCDATASection(data)
+            self._cdata_continue = True
+        elif childNodes and childNodes[-1].nodeType == TEXT_NODE:
+            node = childNodes[-1]
+            value = node.data + data
+            d = node.__dict__
+            d['data'] = d['nodeValue'] = value
+            return
+        else:
+            node = minidom.Text()
+            d = node.__dict__
+            d['data'] = d['nodeValue'] = data
+            d['ownerDocument'] = self.document
+        _append_child(self.curNode, node)
+
+    def character_data_handler(self, data):
+        childNodes = self.curNode.childNodes
+        if childNodes and childNodes[-1].nodeType == TEXT_NODE:
+            node = childNodes[-1]
+            d = node.__dict__
+            d['data'] = d['nodeValue'] = node.data + data
+            return
+        node = minidom.Text()
+        d = node.__dict__
+        d['data'] = d['nodeValue'] = node.data + data
+        d['ownerDocument'] = self.document
+        _append_child(self.curNode, node)
+
+    def entity_decl_handler(self, entityName, is_parameter_entity, value,
+                            base, systemId, publicId, notationName):
+        if is_parameter_entity:
+            # we don't care about parameter entities for the DOM
+            return
+        if not self._options.entities:
+            return
+        node = self.document._create_entity(entityName, publicId,
+                                            systemId, notationName)
+        if value is not None:
+            # internal entity
+            # node *should* be readonly, but we'll cheat
+            child = self.document.createTextNode(value)
+            node.childNodes.append(child)
+        self.document.doctype.entities._seq.append(node)
+        if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
+            del self.document.doctype.entities._seq[-1]
+
+    def notation_decl_handler(self, notationName, base, systemId, publicId):
+        node = self.document._create_notation(notationName, publicId, systemId)
+        self.document.doctype.notations._seq.append(node)
+        if self._filter and self._filter.acceptNode(node) == FILTER_ACCEPT:
+            del self.document.doctype.notations._seq[-1]
+
+    def comment_handler(self, data):
+        node = self.document.createComment(data)
+        _append_child(self.curNode, node)
+        if self._filter and self._filter.acceptNode(node) == FILTER_REJECT:
+            self.curNode.removeChild(node)
+
+    def start_cdata_section_handler(self):
+        self._cdata = True
+        self._cdata_continue = False
+
+    def end_cdata_section_handler(self):
+        self._cdata = False
+        self._cdata_continue = False
+
+    def external_entity_ref_handler(self, context, base, systemId, publicId):
+        return 1
+
+    def first_element_handler(self, name, attributes):
+        if self._filter is None and not self._elem_info:
+            self._finish_end_element = id
+        self.getParser().StartElementHandler = self.start_element_handler
+        self.start_element_handler(name, attributes)
+
+    def start_element_handler(self, name, attributes):
+        node = self.document.createElement(name)
+        _append_child(self.curNode, node)
+        self.curNode = node
+
+        if attributes:
+            for i in range(0, len(attributes), 2):
+                a = minidom.Attr(attributes[i], EMPTY_NAMESPACE,
+                                 None, EMPTY_PREFIX)
+                value = attributes[i+1]
+                d = a.childNodes[0].__dict__
+                d['data'] = d['nodeValue'] = value
+                d = a.__dict__
+                d['value'] = d['nodeValue'] = value
+                d['ownerDocument'] = self.document
+                _set_attribute_node(node, a)
+
+        if node is not self.document.documentElement:
+            self._finish_start_element(node)
+
+    def _finish_start_element(self, node):
+        if self._filter:
+            # To be general, we'd have to call isSameNode(), but this
+            # is sufficient for minidom:
+            if node is self.document.documentElement:
+                return
+            filt = self._filter.startContainer(node)
+            if filt == FILTER_REJECT:
+                # ignore this node & all descendents
+                Rejecter(self)
+            elif filt == FILTER_SKIP:
+                # ignore this node, but make it's children become
+                # children of the parent node
+                Skipper(self)
+            else:
+                return
+            self.curNode = node.parentNode
+            node.parentNode.removeChild(node)
+            node.unlink()
+
+    # If this ever changes, Namespaces.end_element_handler() needs to
+    # be changed to match.
+    #
+    def end_element_handler(self, name):
+        curNode = self.curNode
+        self.curNode = curNode.parentNode
+        self._finish_end_element(curNode)
+
+    def _finish_end_element(self, curNode):
+        info = self._elem_info.get(curNode.tagName)
+        if info:
+            self._handle_white_text_nodes(curNode, info)
+        if self._filter:
+            if curNode is self.document.documentElement:
+                return
+            if self._filter.acceptNode(curNode) == FILTER_REJECT:
+                self.curNode.removeChild(curNode)
+                curNode.unlink()
+
+    def _handle_white_text_nodes(self, node, info):
+        if (self._options.whitespace_in_element_content
+            or not info.isElementContent()):
+            return
+
+        # We have element type information and should remove ignorable
+        # whitespace; identify for text nodes which contain only
+        # whitespace.
+        L = []
+        for child in node.childNodes:
+            if child.nodeType == TEXT_NODE and not child.data.strip():
+                L.append(child)
+
+        # Remove ignorable whitespace from the tree.
+        for child in L:
+            node.removeChild(child)
+
+    def element_decl_handler(self, name, model):
+        info = self._elem_info.get(name)
+        if info is None:
+            self._elem_info[name] = ElementInfo(name, model)
+        else:
+            assert info._model is None
+            info._model = model
+
+    def attlist_decl_handler(self, elem, name, type, default, required):
+        info = self._elem_info.get(elem)
+        if info is None:
+            info = ElementInfo(elem)
+            self._elem_info[elem] = info
+        info._attr_info.append(
+            [None, name, None, None, default, 0, type, required])
+
+    def xml_decl_handler(self, version, encoding, standalone):
+        self.document.version = version
+        self.document.encoding = encoding
+        # This is still a little ugly, thanks to the pyexpat API. ;-(
+        if standalone >= 0:
+            if standalone:
+                self.document.standalone = True
+            else:
+                self.document.standalone = False
+
+
+# Don't include FILTER_INTERRUPT, since that's checked separately
+# where allowed.
+_ALLOWED_FILTER_RETURNS = (FILTER_ACCEPT, FILTER_REJECT, FILTER_SKIP)
+
+class FilterVisibilityController(NewStyle):
+    """Wrapper around a DOMBuilderFilter which implements the checks
+    to make the whatToShow filter attribute work."""
+
+    __slots__ = 'filter',
+
+    def __init__(self, filter):
+        self.filter = filter
+
+    def startContainer(self, node):
+        mask = self._nodetype_mask[node.nodeType]
+        if self.filter.whatToShow & mask:
+            val = self.filter.startContainer(node)
+            if val == FILTER_INTERRUPT:
+                raise ParseEscape
+            if val not in _ALLOWED_FILTER_RETURNS:
+                raise ValueError, \
+                      "startContainer() returned illegal value: " + repr(val)
+            return val
+        else:
+            return FILTER_ACCEPT
+
+    def acceptNode(self, node):
+        mask = self._nodetype_mask[node.nodeType]
+        if self.filter.whatToShow & mask:
+            val = self.filter.acceptNode(node)
+            if val == FILTER_INTERRUPT:
+                raise ParseEscape
+            if val == FILTER_SKIP:
+                # move all child nodes to the parent, and remove this node
+                parent = node.parentNode
+                for child in node.childNodes[:]:
+                    parent.appendChild(child)
+                # node is handled by the caller
+                return FILTER_REJECT
+            if val not in _ALLOWED_FILTER_RETURNS:
+                raise ValueError, \
+                      "acceptNode() returned illegal value: " + repr(val)
+            return val
+        else:
+            return FILTER_ACCEPT
+
+    _nodetype_mask = {
+        Node.ELEMENT_NODE:                NodeFilter.SHOW_ELEMENT,
+        Node.ATTRIBUTE_NODE:              NodeFilter.SHOW_ATTRIBUTE,
+        Node.TEXT_NODE:                   NodeFilter.SHOW_TEXT,
+        Node.CDATA_SECTION_NODE:          NodeFilter.SHOW_CDATA_SECTION,
+        Node.ENTITY_REFERENCE_NODE:       NodeFilter.SHOW_ENTITY_REFERENCE,
+        Node.ENTITY_NODE:                 NodeFilter.SHOW_ENTITY,
+        Node.PROCESSING_INSTRUCTION_NODE: NodeFilter.SHOW_PROCESSING_INSTRUCTION,
+        Node.COMMENT_NODE:                NodeFilter.SHOW_COMMENT,
+        Node.DOCUMENT_NODE:               NodeFilter.SHOW_DOCUMENT,
+        Node.DOCUMENT_TYPE_NODE:          NodeFilter.SHOW_DOCUMENT_TYPE,
+        Node.DOCUMENT_FRAGMENT_NODE:      NodeFilter.SHOW_DOCUMENT_FRAGMENT,
+        Node.NOTATION_NODE:               NodeFilter.SHOW_NOTATION,
+        }
+
+
+class FilterCrutch(NewStyle):
+    __slots__ = '_builder', '_level', '_old_start', '_old_end'
+
+    def __init__(self, builder):
+        self._level = 0
+        self._builder = builder
+        parser = builder._parser
+        self._old_start = parser.StartElementHandler
+        self._old_end = parser.EndElementHandler
+        parser.StartElementHandler = self.start_element_handler
+        parser.EndElementHandler = self.end_element_handler
+
+class Rejecter(FilterCrutch):
+    __slots__ = ()
+
+    def __init__(self, builder):
+        FilterCrutch.__init__(self, builder)
+        parser = builder._parser
+        for name in ("ProcessingInstructionHandler",
+                     "CommentHandler",
+                     "CharacterDataHandler",
+                     "StartCdataSectionHandler",
+                     "EndCdataSectionHandler",
+                     "ExternalEntityRefHandler",
+                     ):
+            setattr(parser, name, None)
+
+    def start_element_handler(self, *args):
+        self._level = self._level + 1
+
+    def end_element_handler(self, *args):
+        if self._level == 0:
+            # restore the old handlers
+            parser = self._builder._parser
+            self._builder.install(parser)
+            parser.StartElementHandler = self._old_start
+            parser.EndElementHandler = self._old_end
+        else:
+            self._level = self._level - 1
+
+class Skipper(FilterCrutch):
+    __slots__ = ()
+
+    def start_element_handler(self, *args):
+        node = self._builder.curNode
+        self._old_start(*args)
+        if self._builder.curNode is not node:
+            self._level = self._level + 1
+
+    def end_element_handler(self, *args):
+        if self._level == 0:
+            # We're popping back out of the node we're skipping, so we
+            # shouldn't need to do anything but reset the handlers.
+            self._builder._parser.StartElementHandler = self._old_start
+            self._builder._parser.EndElementHandler = self._old_end
+            self._builder = None
+        else:
+            self._level = self._level - 1
+            self._old_end(*args)
+
+
+# framework document used by the fragment builder.
+# Takes a string for the doctype, subset string, and namespace attrs string.
+
+_FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID = \
+    "http://xml.python.org/entities/fragment-builder/internal"
+
+_FRAGMENT_BUILDER_TEMPLATE = (
+    '''\
+<!DOCTYPE wrapper
+  %%s [
+  <!ENTITY fragment-builder-internal
+    SYSTEM "%s">
+%%s
+]>
+<wrapper %%s
+>&fragment-builder-internal;</wrapper>'''
+    % _FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID)
+
+
+class FragmentBuilder(ExpatBuilder):
+    """Builder which constructs document fragments given XML source
+    text and a context node.
+
+    The context node is expected to provide information about the
+    namespace declarations which are in scope at the start of the
+    fragment.
+    """
+
+    def __init__(self, context, options=None):
+        if context.nodeType == DOCUMENT_NODE:
+            self.originalDocument = context
+            self.context = context
+        else:
+            self.originalDocument = context.ownerDocument
+            self.context = context
+        ExpatBuilder.__init__(self, options)
+
+    def reset(self):
+        ExpatBuilder.reset(self)
+        self.fragment = None
+
+    def parseFile(self, file):
+        """Parse a document fragment from a file object, returning the
+        fragment node."""
+        return self.parseString(file.read())
+
+    def parseString(self, string):
+        """Parse a document fragment from a string, returning the
+        fragment node."""
+        self._source = string
+        parser = self.getParser()
+        doctype = self.originalDocument.doctype
+        ident = ""
+        if doctype:
+            subset = doctype.internalSubset or self._getDeclarations()
+            if doctype.publicId:
+                ident = ('PUBLIC "%s" "%s"'
+                         % (doctype.publicId, doctype.systemId))
+            elif doctype.systemId:
+                ident = 'SYSTEM "%s"' % doctype.systemId
+        else:
+            subset = ""
+        nsattrs = self._getNSattrs() # get ns decls from node's ancestors
+        document = _FRAGMENT_BUILDER_TEMPLATE % (ident, subset, nsattrs)
+        try:
+            parser.Parse(document, 1)
+        except:
+            self.reset()
+            raise
+        fragment = self.fragment
+        self.reset()
+##         self._parser = None
+        return fragment
+
+    def _getDeclarations(self):
+        """Re-create the internal subset from the DocumentType node.
+
+        This is only needed if we don't already have the
+        internalSubset as a string.
+        """
+        doctype = self.context.ownerDocument.doctype
+        s = ""
+        if doctype:
+            for i in range(doctype.notations.length):
+                notation = doctype.notations.item(i)
+                if s:
+                    s = s + "\n  "
+                s = "%s<!NOTATION %s" % (s, notation.nodeName)
+                if notation.publicId:
+                    s = '%s PUBLIC "%s"\n             "%s">' \
+                        % (s, notation.publicId, notation.systemId)
+                else:
+                    s = '%s SYSTEM "%s">' % (s, notation.systemId)
+            for i in range(doctype.entities.length):
+                entity = doctype.entities.item(i)
+                if s:
+                    s = s + "\n  "
+                s = "%s<!ENTITY %s" % (s, entity.nodeName)
+                if entity.publicId:
+                    s = '%s PUBLIC "%s"\n             "%s"' \
+                        % (s, entity.publicId, entity.systemId)
+                elif entity.systemId:
+                    s = '%s SYSTEM "%s"' % (s, entity.systemId)
+                else:
+                    s = '%s "%s"' % (s, entity.firstChild.data)
+                if entity.notationName:
+                    s = "%s NOTATION %s" % (s, entity.notationName)
+                s = s + ">"
+        return s
+
+    def _getNSattrs(self):
+        return ""
+
+    def external_entity_ref_handler(self, context, base, systemId, publicId):
+        if systemId == _FRAGMENT_BUILDER_INTERNAL_SYSTEM_ID:
+            # this entref is the one that we made to put the subtree
+            # in; all of our given input is parsed in here.
+            old_document = self.document
+            old_cur_node = self.curNode
+            parser = self._parser.ExternalEntityParserCreate(context)
+            # put the real document back, parse into the fragment to return
+            self.document = self.originalDocument
+            self.fragment = self.document.createDocumentFragment()
+            self.curNode = self.fragment
+            try:
+                parser.Parse(self._source, 1)
+            finally:
+                self.curNode = old_cur_node
+                self.document = old_document
+                self._source = None
+            return -1
+        else:
+            return ExpatBuilder.external_entity_ref_handler(
+                self, context, base, systemId, publicId)
+
+
+class Namespaces:
+    """Mix-in class for builders; adds support for namespaces."""
+
+    def _initNamespaces(self):
+        # list of (prefix, uri) ns declarations.  Namespace attrs are
+        # constructed from this and added to the element's attrs.
+        self._ns_ordered_prefixes = []
+
+    def createParser(self):
+        """Create a new namespace-handling parser."""
+        parser = expat.ParserCreate(namespace_separator=" ")
+        parser.namespace_prefixes = True
+        return parser
+
+    def install(self, parser):
+        """Insert the namespace-handlers onto the parser."""
+        ExpatBuilder.install(self, parser)
+        if self._options.namespace_declarations:
+            parser.StartNamespaceDeclHandler = (
+                self.start_namespace_decl_handler)
+
+    def start_namespace_decl_handler(self, prefix, uri):
+        """Push this namespace declaration on our storage."""
+        self._ns_ordered_prefixes.append((prefix, uri))
+
+    def start_element_handler(self, name, attributes):
+        if ' ' in name:
+            uri, localname, prefix, qname = _parse_ns_name(self, name)
+        else:
+            uri = EMPTY_NAMESPACE
+            qname = name
+            localname = None
+            prefix = EMPTY_PREFIX
+        node = minidom.Element(qname, uri, prefix, localname)
+        node.ownerDocument = self.document
+        _append_child(self.curNode, node)
+        self.curNode = node
+
+        if self._ns_ordered_prefixes:
+            for prefix, uri in self._ns_ordered_prefixes:
+                if prefix:
+                    a = minidom.Attr(_intern(self, 'xmlns:' + prefix),
+                                     XMLNS_NAMESPACE, prefix, "xmlns")
+                else:
+                    a = minidom.Attr("xmlns", XMLNS_NAMESPACE,
+                                     "xmlns", EMPTY_PREFIX)
+                d = a.childNodes[0].__dict__
+                d['data'] = d['nodeValue'] = uri
+                d = a.__dict__
+                d['value'] = d['nodeValue'] = uri
+                d['ownerDocument'] = self.document
+                _set_attribute_node(node, a)
+            del self._ns_ordered_prefixes[:]
+
+        if attributes:
+            _attrs = node._attrs
+            _attrsNS = node._attrsNS
+            for i in range(0, len(attributes), 2):
+                aname = attributes[i]
+                value = attributes[i+1]
+                if ' ' in aname:
+                    uri, localname, prefix, qname = _parse_ns_name(self, aname)
+                    a = minidom.Attr(qname, uri, localname, prefix)
+                    _attrs[qname] = a
+                    _attrsNS[(uri, localname)] = a
+                else:
+                    a = minidom.Attr(aname, EMPTY_NAMESPACE,
+                                     aname, EMPTY_PREFIX)
+                    _attrs[aname] = a
+                    _attrsNS[(EMPTY_NAMESPACE, aname)] = a
+                d = a.childNodes[0].__dict__
+                d['data'] = d['nodeValue'] = value
+                d = a.__dict__
+                d['ownerDocument'] = self.document
+                d['value'] = d['nodeValue'] = value
+                d['ownerElement'] = node
+
+    if __debug__:
+        # This only adds some asserts to the original
+        # end_element_handler(), so we only define this when -O is not
+        # used.  If changing one, be sure to check the other to see if
+        # it needs to be changed as well.
+        #
+        def end_element_handler(self, name):
+            curNode = self.curNode
+            if ' ' in name:
+                uri, localname, prefix, qname = _parse_ns_name(self, name)
+                assert (curNode.namespaceURI == uri
+                        and curNode.localName == localname
+                        and curNode.prefix == prefix), \
+                        "element stack messed up! (namespace)"
+            else:
+                assert curNode.nodeName == name, \
+                       "element stack messed up - bad nodeName"
+                assert curNode.namespaceURI == EMPTY_NAMESPACE, \
+                       "element stack messed up - bad namespaceURI"
+            self.curNode = curNode.parentNode
+            self._finish_end_element(curNode)
+
+
+class ExpatBuilderNS(Namespaces, ExpatBuilder):
+    """Document builder that supports namespaces."""
+
+    def reset(self):
+        ExpatBuilder.reset(self)
+        self._initNamespaces()
+
+
+class FragmentBuilderNS(Namespaces, FragmentBuilder):
+    """Fragment builder that supports namespaces."""
+
+    def reset(self):
+        FragmentBuilder.reset(self)
+        self._initNamespaces()
+
+    def _getNSattrs(self):
+        """Return string of namespace attributes from this element and
+        ancestors."""
+        # XXX This needs to be re-written to walk the ancestors of the
+        # context to build up the namespace information from
+        # declarations, elements, and attributes found in context.
+        # Otherwise we have to store a bunch more data on the DOM
+        # (though that *might* be more reliable -- not clear).
+        attrs = ""
+        context = self.context
+        L = []
+        while context:
+            if hasattr(context, '_ns_prefix_uri'):
+                for prefix, uri in context._ns_prefix_uri.items():
+                    # add every new NS decl from context to L and attrs string
+                    if prefix in L:
+                        continue
+                    L.append(prefix)
+                    if prefix:
+                        declname = "xmlns:" + prefix
+                    else:
+                        declname = "xmlns"
+                    if attrs:
+                        attrs = "%s\n    %s='%s'" % (attrs, declname, uri)
+                    else:
+                        attrs = " %s='%s'" % (declname, uri)
+            context = context.parentNode
+        return attrs
+
+
+class ParseEscape(Exception):
+    """Exception raised to short-circuit parsing in InternalSubsetExtractor."""
+    pass
+
+class InternalSubsetExtractor(ExpatBuilder):
+    """XML processor which can rip out the internal document type subset."""
+
+    subset = None
+
+    def getSubset(self):
+        """Return the internal subset as a string."""
+        return self.subset
+
+    def parseFile(self, file):
+        try:
+            ExpatBuilder.parseFile(self, file)
+        except ParseEscape:
+            pass
+
+    def parseString(self, string):
+        try:
+            ExpatBuilder.parseString(self, string)
+        except ParseEscape:
+            pass
+
+    def install(self, parser):
+        parser.StartDoctypeDeclHandler = self.start_doctype_decl_handler
+        parser.StartElementHandler = self.start_element_handler
+
+    def start_doctype_decl_handler(self, name, publicId, systemId,
+                                   has_internal_subset):
+        if has_internal_subset:
+            parser = self.getParser()
+            self.subset = []
+            parser.DefaultHandler = self.subset.append
+            parser.EndDoctypeDeclHandler = self.end_doctype_decl_handler
+        else:
+            raise ParseEscape()
+
+    def end_doctype_decl_handler(self):
+        s = ''.join(self.subset).replace('\r\n', '\n').replace('\r', '\n')
+        self.subset = s
+        raise ParseEscape()
+
+    def start_element_handler(self, name, attrs):
+        raise ParseEscape()
+
+
+def parse(file, namespaces=1):
+    """Parse a document, returning the resulting Document node.
+
+    'file' may be either a file name or an open file object.
+    """
+    if namespaces:
+        builder = ExpatBuilderNS()
+    else:
+        builder = ExpatBuilder()
+
+    if isinstance(file, StringTypes):
+        fp = open(file, 'rb')
+        try:
+            result = builder.parseFile(fp)
+        finally:
+            fp.close()
+    else:
+        result = builder.parseFile(file)
+    return result
+
+
+def parseString(string, namespaces=1):
+    """Parse a document from a string, returning the resulting
+    Document node.
+    """
+    if namespaces:
+        builder = ExpatBuilderNS()
+    else:
+        builder = ExpatBuilder()
+    return builder.parseString(string)
+
+
+def parseFragment(file, context, namespaces=1):
+    """Parse a fragment of a document, given the context from which it
+    was originally extracted.  context should be the parent of the
+    node(s) which are in the fragment.
+
+    'file' may be either a file name or an open file object.
+    """
+    if namespaces:
+        builder = FragmentBuilderNS(context)
+    else:
+        builder = FragmentBuilder(context)
+
+    if isinstance(file, StringTypes):
+        fp = open(file, 'rb')
+        try:
+            result = builder.parseFile(fp)
+        finally:
+            fp.close()
+    else:
+        result = builder.parseFile(file)
+    return result
+
+
+def parseFragmentString(string, context, namespaces=1):
+    """Parse a fragment of a document from a string, given the context
+    from which it was originally extracted.  context should be the
+    parent of the node(s) which are in the fragment.
+    """
+    if namespaces:
+        builder = FragmentBuilderNS(context)
+    else:
+        builder = FragmentBuilder(context)
+    return builder.parseString(string)
+
+
+def makeBuilder(options):
+    """Create a builder based on an Options object."""
+    if options.namespaces:
+        return ExpatBuilderNS(options)
+    else:
+        return ExpatBuilder(options)
diff --git a/depot_tools/release/win/python_24/Lib/xml/dom/minicompat.py b/depot_tools/release/win/python_24/Lib/xml/dom/minicompat.py
new file mode 100644
index 0000000..9f2f8f7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/dom/minicompat.py
@@ -0,0 +1,184 @@
+"""Python version compatibility support for minidom."""
+
+# This module should only be imported using "import *".
+#
+# The following names are defined:
+#
+#   isinstance    -- version of the isinstance() function that accepts
+#                    tuples as the second parameter regardless of the
+#                    Python version
+#
+#   NodeList      -- lightest possible NodeList implementation
+#
+#   EmptyNodeList -- lightest possible NodeList that is guarateed to
+#                    remain empty (immutable)
+#
+#   StringTypes   -- tuple of defined string types
+#
+#   GetattrMagic  -- base class used to make _get_<attr> be magically
+#                    invoked when available
+#   defproperty   -- function used in conjunction with GetattrMagic;
+#                    using these together is needed to make them work
+#                    as efficiently as possible in both Python 2.2+
+#                    and older versions.  For example:
+#
+#                        class MyClass(GetattrMagic):
+#                            def _get_myattr(self):
+#                                return something
+#
+#                        defproperty(MyClass, "myattr",
+#                                    "return some value")
+#
+#                    For Python 2.2 and newer, this will construct a
+#                    property object on the class, which avoids
+#                    needing to override __getattr__().  It will only
+#                    work for read-only attributes.
+#
+#                    For older versions of Python, inheriting from
+#                    GetattrMagic will use the traditional
+#                    __getattr__() hackery to achieve the same effect,
+#                    but less efficiently.
+#
+#                    defproperty() should be used for each version of
+#                    the relevant _get_<property>() function.
+#
+#   NewStyle      -- base class to cause __slots__ to be honored in
+#                    the new world
+#
+#   True, False   -- only for Python 2.2 and earlier
+
+__all__ = ["NodeList", "EmptyNodeList", "NewStyle",
+           "StringTypes", "defproperty", "GetattrMagic"]
+
+import xml.dom
+
+try:
+    unicode
+except NameError:
+    StringTypes = type(''),
+else:
+    StringTypes = type(''), type(unicode(''))
+
+
+# define True and False only if not defined as built-ins
+try:
+    True
+except NameError:
+    True = 1
+    False = 0
+    __all__.extend(["True", "False"])
+
+
+try:
+    isinstance('', StringTypes)
+except TypeError:
+    #
+    # Wrap isinstance() to make it compatible with the version in
+    # Python 2.2 and newer.
+    #
+    _isinstance = isinstance
+    def isinstance(obj, type_or_seq):
+        try:
+            return _isinstance(obj, type_or_seq)
+        except TypeError:
+            for t in type_or_seq:
+                if _isinstance(obj, t):
+                    return 1
+            return 0
+    __all__.append("isinstance")
+
+
+if list is type([]):
+    class NodeList(list):
+        __slots__ = ()
+
+        def item(self, index):
+            if 0 <= index < len(self):
+                return self[index]
+
+        def _get_length(self):
+            return len(self)
+
+        def _set_length(self, value):
+            raise xml.dom.NoModificationAllowedErr(
+                "attempt to modify read-only attribute 'length'")
+
+        length = property(_get_length, _set_length,
+                          doc="The number of nodes in the NodeList.")
+
+        def __getstate__(self):
+            return list(self)
+
+        def __setstate__(self, state):
+            self[:] = state
+
+    class EmptyNodeList(tuple):
+        __slots__ = ()
+
+        def __add__(self, other):
+            NL = NodeList()
+            NL.extend(other)
+            return NL
+
+        def __radd__(self, other):
+            NL = NodeList()
+            NL.extend(other)
+            return NL
+
+        def item(self, index):
+            return None
+
+        def _get_length(self):
+            return 0
+
+        def _set_length(self, value):
+            raise xml.dom.NoModificationAllowedErr(
+                "attempt to modify read-only attribute 'length'")
+
+        length = property(_get_length, _set_length,
+                          doc="The number of nodes in the NodeList.")
+
+else:
+    def NodeList():
+        return []
+
+    def EmptyNodeList():
+        return []
+
+
+try:
+    property
+except NameError:
+    def defproperty(klass, name, doc):
+        # taken care of by the base __getattr__()
+        pass
+
+    class GetattrMagic:
+        def __getattr__(self, key):
+            if key.startswith("_"):
+                raise AttributeError, key
+
+            try:
+                get = getattr(self, "_get_" + key)
+            except AttributeError:
+                raise AttributeError, key
+            return get()
+
+    class NewStyle:
+        pass
+
+else:
+    def defproperty(klass, name, doc):
+        get = getattr(klass, ("_get_" + name)).im_func
+        def set(self, value, name=name):
+            raise xml.dom.NoModificationAllowedErr(
+                "attempt to modify read-only attribute " + repr(name))
+        assert not hasattr(klass, "_set_" + name), \
+               "expected not to find _set_" + name
+        prop = property(get, set, doc=doc)
+        setattr(klass, name, prop)
+
+    class GetattrMagic:
+        pass
+
+    NewStyle = object
diff --git a/depot_tools/release/win/python_24/Lib/xml/dom/minidom.py b/depot_tools/release/win/python_24/Lib/xml/dom/minidom.py
new file mode 100644
index 0000000..f17578ba
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/dom/minidom.py
@@ -0,0 +1,1938 @@
+"""\
+minidom.py -- a lightweight DOM implementation.
+
+parse("foo.xml")
+
+parseString("<foo><bar/></foo>")
+
+Todo:
+=====
+ * convenience methods for getting elements and text.
+ * more testing
+ * bring some of the writer and linearizer code into conformance with this
+        interface
+ * SAX 2 namespaces
+"""
+
+import xml.dom
+
+from xml.dom import EMPTY_NAMESPACE, EMPTY_PREFIX, XMLNS_NAMESPACE, domreg
+from xml.dom.minicompat import *
+from xml.dom.xmlbuilder import DOMImplementationLS, DocumentLS
+
+_TupleType = type(())
+
+# This is used by the ID-cache invalidation checks; the list isn't
+# actually complete, since the nodes being checked will never be the
+# DOCUMENT_NODE or DOCUMENT_FRAGMENT_NODE.  (The node being checked is
+# the node being added or removed, not the node being modified.)
+#
+_nodeTypes_with_children = (xml.dom.Node.ELEMENT_NODE,
+                            xml.dom.Node.ENTITY_REFERENCE_NODE)
+
+
+class Node(xml.dom.Node, GetattrMagic):
+    namespaceURI = None # this is non-null only for elements and attributes
+    parentNode = None
+    ownerDocument = None
+    nextSibling = None
+    previousSibling = None
+
+    prefix = EMPTY_PREFIX # non-null only for NS elements and attributes
+
+    def __nonzero__(self):
+        return True
+
+    def toxml(self, encoding = None):
+        return self.toprettyxml("", "", encoding)
+
+    def toprettyxml(self, indent="\t", newl="\n", encoding = None):
+        # indent = the indentation string to prepend, per level
+        # newl = the newline string to append
+        writer = _get_StringIO()
+        if encoding is not None:
+            import codecs
+            # Can't use codecs.getwriter to preserve 2.0 compatibility
+            writer = codecs.lookup(encoding)[3](writer)
+        if self.nodeType == Node.DOCUMENT_NODE:
+            # Can pass encoding only to document, to put it into XML header
+            self.writexml(writer, "", indent, newl, encoding)
+        else:
+            self.writexml(writer, "", indent, newl)
+        return writer.getvalue()
+
+    def hasChildNodes(self):
+        if self.childNodes:
+            return True
+        else:
+            return False
+
+    def _get_childNodes(self):
+        return self.childNodes
+
+    def _get_firstChild(self):
+        if self.childNodes:
+            return self.childNodes[0]
+
+    def _get_lastChild(self):
+        if self.childNodes:
+            return self.childNodes[-1]
+
+    def insertBefore(self, newChild, refChild):
+        if newChild.nodeType == self.DOCUMENT_FRAGMENT_NODE:
+            for c in tuple(newChild.childNodes):
+                self.insertBefore(c, refChild)
+            ### The DOM does not clearly specify what to return in this case
+            return newChild
+        if newChild.nodeType not in self._child_node_types:
+            raise xml.dom.HierarchyRequestErr(
+                "%s cannot be child of %s" % (repr(newChild), repr(self)))
+        if newChild.parentNode is not None:
+            newChild.parentNode.removeChild(newChild)
+        if refChild is None:
+            self.appendChild(newChild)
+        else:
+            try:
+                index = self.childNodes.index(refChild)
+            except ValueError:
+                raise xml.dom.NotFoundErr()
+            if newChild.nodeType in _nodeTypes_with_children:
+                _clear_id_cache(self)
+            self.childNodes.insert(index, newChild)
+            newChild.nextSibling = refChild
+            refChild.previousSibling = newChild
+            if index:
+                node = self.childNodes[index-1]
+                node.nextSibling = newChild
+                newChild.previousSibling = node
+            else:
+                newChild.previousSibling = None
+            newChild.parentNode = self
+        return newChild
+
+    def appendChild(self, node):
+        if node.nodeType == self.DOCUMENT_FRAGMENT_NODE:
+            for c in tuple(node.childNodes):
+                self.appendChild(c)
+            ### The DOM does not clearly specify what to return in this case
+            return node
+        if node.nodeType not in self._child_node_types:
+            raise xml.dom.HierarchyRequestErr(
+                "%s cannot be child of %s" % (repr(node), repr(self)))
+        elif node.nodeType in _nodeTypes_with_children:
+            _clear_id_cache(self)
+        if node.parentNode is not None:
+            node.parentNode.removeChild(node)
+        _append_child(self, node)
+        node.nextSibling = None
+        return node
+
+    def replaceChild(self, newChild, oldChild):
+        if newChild.nodeType == self.DOCUMENT_FRAGMENT_NODE:
+            refChild = oldChild.nextSibling
+            self.removeChild(oldChild)
+            return self.insertBefore(newChild, refChild)
+        if newChild.nodeType not in self._child_node_types:
+            raise xml.dom.HierarchyRequestErr(
+                "%s cannot be child of %s" % (repr(newChild), repr(self)))
+        if newChild.parentNode is not None:
+            newChild.parentNode.removeChild(newChild)
+        if newChild is oldChild:
+            return
+        try:
+            index = self.childNodes.index(oldChild)
+        except ValueError:
+            raise xml.dom.NotFoundErr()
+        self.childNodes[index] = newChild
+        newChild.parentNode = self
+        oldChild.parentNode = None
+        if (newChild.nodeType in _nodeTypes_with_children
+            or oldChild.nodeType in _nodeTypes_with_children):
+            _clear_id_cache(self)
+        newChild.nextSibling = oldChild.nextSibling
+        newChild.previousSibling = oldChild.previousSibling
+        oldChild.nextSibling = None
+        oldChild.previousSibling = None
+        if newChild.previousSibling:
+            newChild.previousSibling.nextSibling = newChild
+        if newChild.nextSibling:
+            newChild.nextSibling.previousSibling = newChild
+        return oldChild
+
+    def removeChild(self, oldChild):
+        try:
+            self.childNodes.remove(oldChild)
+        except ValueError:
+            raise xml.dom.NotFoundErr()
+        if oldChild.nextSibling is not None:
+            oldChild.nextSibling.previousSibling = oldChild.previousSibling
+        if oldChild.previousSibling is not None:
+            oldChild.previousSibling.nextSibling = oldChild.nextSibling
+        oldChild.nextSibling = oldChild.previousSibling = None
+        if oldChild.nodeType in _nodeTypes_with_children:
+            _clear_id_cache(self)
+
+        oldChild.parentNode = None
+        return oldChild
+
+    def normalize(self):
+        L = []
+        for child in self.childNodes:
+            if child.nodeType == Node.TEXT_NODE:
+                data = child.data
+                if data and L and L[-1].nodeType == child.nodeType:
+                    # collapse text node
+                    node = L[-1]
+                    node.data = node.data + child.data
+                    node.nextSibling = child.nextSibling
+                    child.unlink()
+                elif data:
+                    if L:
+                        L[-1].nextSibling = child
+                        child.previousSibling = L[-1]
+                    else:
+                        child.previousSibling = None
+                    L.append(child)
+                else:
+                    # empty text node; discard
+                    child.unlink()
+            else:
+                if L:
+                    L[-1].nextSibling = child
+                    child.previousSibling = L[-1]
+                else:
+                    child.previousSibling = None
+                L.append(child)
+                if child.nodeType == Node.ELEMENT_NODE:
+                    child.normalize()
+        self.childNodes[:] = L
+
+    def cloneNode(self, deep):
+        return _clone_node(self, deep, self.ownerDocument or self)
+
+    def isSupported(self, feature, version):
+        return self.ownerDocument.implementation.hasFeature(feature, version)
+
+    def _get_localName(self):
+        # Overridden in Element and Attr where localName can be Non-Null
+        return None
+
+    # Node interfaces from Level 3 (WD 9 April 2002)
+
+    def isSameNode(self, other):
+        return self is other
+
+    def getInterface(self, feature):
+        if self.isSupported(feature, None):
+            return self
+        else:
+            return None
+
+    # The "user data" functions use a dictionary that is only present
+    # if some user data has been set, so be careful not to assume it
+    # exists.
+
+    def getUserData(self, key):
+        try:
+            return self._user_data[key][0]
+        except (AttributeError, KeyError):
+            return None
+
+    def setUserData(self, key, data, handler):
+        old = None
+        try:
+            d = self._user_data
+        except AttributeError:
+            d = {}
+            self._user_data = d
+        if d.has_key(key):
+            old = d[key][0]
+        if data is None:
+            # ignore handlers passed for None
+            handler = None
+            if old is not None:
+                del d[key]
+        else:
+            d[key] = (data, handler)
+        return old
+
+    def _call_user_data_handler(self, operation, src, dst):
+        if hasattr(self, "_user_data"):
+            for key, (data, handler) in self._user_data.items():
+                if handler is not None:
+                    handler.handle(operation, key, data, src, dst)
+
+    # minidom-specific API:
+
+    def unlink(self):
+        self.parentNode = self.ownerDocument = None
+        if self.childNodes:
+            for child in self.childNodes:
+                child.unlink()
+            self.childNodes = NodeList()
+        self.previousSibling = None
+        self.nextSibling = None
+
+defproperty(Node, "firstChild", doc="First child node, or None.")
+defproperty(Node, "lastChild",  doc="Last child node, or None.")
+defproperty(Node, "localName",  doc="Namespace-local name of this node.")
+
+
+def _append_child(self, node):
+    # fast path with less checks; usable by DOM builders if careful
+    childNodes = self.childNodes
+    if childNodes:
+        last = childNodes[-1]
+        node.__dict__["previousSibling"] = last
+        last.__dict__["nextSibling"] = node
+    childNodes.append(node)
+    node.__dict__["parentNode"] = self
+
+def _in_document(node):
+    # return True iff node is part of a document tree
+    while node is not None:
+        if node.nodeType == Node.DOCUMENT_NODE:
+            return True
+        node = node.parentNode
+    return False
+
+def _write_data(writer, data):
+    "Writes datachars to writer."
+    data = data.replace("&", "&amp;").replace("<", "&lt;")
+    data = data.replace("\"", "&quot;").replace(">", "&gt;")
+    writer.write(data)
+
+def _get_elements_by_tagName_helper(parent, name, rc):
+    for node in parent.childNodes:
+        if node.nodeType == Node.ELEMENT_NODE and \
+            (name == "*" or node.tagName == name):
+            rc.append(node)
+        _get_elements_by_tagName_helper(node, name, rc)
+    return rc
+
+def _get_elements_by_tagName_ns_helper(parent, nsURI, localName, rc):
+    for node in parent.childNodes:
+        if node.nodeType == Node.ELEMENT_NODE:
+            if ((localName == "*" or node.localName == localName) and
+                (nsURI == "*" or node.namespaceURI == nsURI)):
+                rc.append(node)
+            _get_elements_by_tagName_ns_helper(node, nsURI, localName, rc)
+    return rc
+
+class DocumentFragment(Node):
+    nodeType = Node.DOCUMENT_FRAGMENT_NODE
+    nodeName = "#document-fragment"
+    nodeValue = None
+    attributes = None
+    parentNode = None
+    _child_node_types = (Node.ELEMENT_NODE,
+                         Node.TEXT_NODE,
+                         Node.CDATA_SECTION_NODE,
+                         Node.ENTITY_REFERENCE_NODE,
+                         Node.PROCESSING_INSTRUCTION_NODE,
+                         Node.COMMENT_NODE,
+                         Node.NOTATION_NODE)
+
+    def __init__(self):
+        self.childNodes = NodeList()
+
+
+class Attr(Node):
+    nodeType = Node.ATTRIBUTE_NODE
+    attributes = None
+    ownerElement = None
+    specified = False
+    _is_id = False
+
+    _child_node_types = (Node.TEXT_NODE, Node.ENTITY_REFERENCE_NODE)
+
+    def __init__(self, qName, namespaceURI=EMPTY_NAMESPACE, localName=None,
+                 prefix=None):
+        # skip setattr for performance
+        d = self.__dict__
+        d["nodeName"] = d["name"] = qName
+        d["namespaceURI"] = namespaceURI
+        d["prefix"] = prefix
+        d['childNodes'] = NodeList()
+
+        # Add the single child node that represents the value of the attr
+        self.childNodes.append(Text())
+
+        # nodeValue and value are set elsewhere
+
+    def _get_localName(self):
+        return self.nodeName.split(":", 1)[-1]
+
+    def _get_name(self):
+        return self.name
+
+    def _get_specified(self):
+        return self.specified
+
+    def __setattr__(self, name, value):
+        d = self.__dict__
+        if name in ("value", "nodeValue"):
+            d["value"] = d["nodeValue"] = value
+            d2 = self.childNodes[0].__dict__
+            d2["data"] = d2["nodeValue"] = value
+            if self.ownerElement is not None:
+                _clear_id_cache(self.ownerElement)
+        elif name in ("name", "nodeName"):
+            d["name"] = d["nodeName"] = value
+            if self.ownerElement is not None:
+                _clear_id_cache(self.ownerElement)
+        else:
+            d[name] = value
+
+    def _set_prefix(self, prefix):
+        nsuri = self.namespaceURI
+        if prefix == "xmlns":
+            if nsuri and nsuri != XMLNS_NAMESPACE:
+                raise xml.dom.NamespaceErr(
+                    "illegal use of 'xmlns' prefix for the wrong namespace")
+        d = self.__dict__
+        d['prefix'] = prefix
+        if prefix is None:
+            newName = self.localName
+        else:
+            newName = "%s:%s" % (prefix, self.localName)
+        if self.ownerElement:
+            _clear_id_cache(self.ownerElement)
+        d['nodeName'] = d['name'] = newName
+
+    def _set_value(self, value):
+        d = self.__dict__
+        d['value'] = d['nodeValue'] = value
+        if self.ownerElement:
+            _clear_id_cache(self.ownerElement)
+        self.childNodes[0].data = value
+
+    def unlink(self):
+        # This implementation does not call the base implementation
+        # since most of that is not needed, and the expense of the
+        # method call is not warranted.  We duplicate the removal of
+        # children, but that's all we needed from the base class.
+        elem = self.ownerElement
+        if elem is not None:
+            del elem._attrs[self.nodeName]
+            del elem._attrsNS[(self.namespaceURI, self.localName)]
+            if self._is_id:
+                self._is_id = False
+                elem._magic_id_nodes -= 1
+                self.ownerDocument._magic_id_count -= 1
+        for child in self.childNodes:
+            child.unlink()
+        del self.childNodes[:]
+
+    def _get_isId(self):
+        if self._is_id:
+            return True
+        doc = self.ownerDocument
+        elem = self.ownerElement
+        if doc is None or elem is None:
+            return False
+
+        info = doc._get_elem_info(elem)
+        if info is None:
+            return False
+        if self.namespaceURI:
+            return info.isIdNS(self.namespaceURI, self.localName)
+        else:
+            return info.isId(self.nodeName)
+
+    def _get_schemaType(self):
+        doc = self.ownerDocument
+        elem = self.ownerElement
+        if doc is None or elem is None:
+            return _no_type
+
+        info = doc._get_elem_info(elem)
+        if info is None:
+            return _no_type
+        if self.namespaceURI:
+            return info.getAttributeTypeNS(self.namespaceURI, self.localName)
+        else:
+            return info.getAttributeType(self.nodeName)
+
+defproperty(Attr, "isId",       doc="True if this attribute is an ID.")
+defproperty(Attr, "localName",  doc="Namespace-local name of this attribute.")
+defproperty(Attr, "schemaType", doc="Schema type for this attribute.")
+
+
+class NamedNodeMap(NewStyle, GetattrMagic):
+    """The attribute list is a transient interface to the underlying
+    dictionaries.  Mutations here will change the underlying element's
+    dictionary.
+
+    Ordering is imposed artificially and does not reflect the order of
+    attributes as found in an input document.
+    """
+
+    __slots__ = ('_attrs', '_attrsNS', '_ownerElement')
+
+    def __init__(self, attrs, attrsNS, ownerElement):
+        self._attrs = attrs
+        self._attrsNS = attrsNS
+        self._ownerElement = ownerElement
+
+    def _get_length(self):
+        return len(self._attrs)
+
+    def item(self, index):
+        try:
+            return self[self._attrs.keys()[index]]
+        except IndexError:
+            return None
+
+    def items(self):
+        L = []
+        for node in self._attrs.values():
+            L.append((node.nodeName, node.value))
+        return L
+
+    def itemsNS(self):
+        L = []
+        for node in self._attrs.values():
+            L.append(((node.namespaceURI, node.localName), node.value))
+        return L
+
+    def has_key(self, key):
+        if isinstance(key, StringTypes):
+            return self._attrs.has_key(key)
+        else:
+            return self._attrsNS.has_key(key)
+
+    def keys(self):
+        return self._attrs.keys()
+
+    def keysNS(self):
+        return self._attrsNS.keys()
+
+    def values(self):
+        return self._attrs.values()
+
+    def get(self, name, value=None):
+        return self._attrs.get(name, value)
+
+    __len__ = _get_length
+
+    def __cmp__(self, other):
+        if self._attrs is getattr(other, "_attrs", None):
+            return 0
+        else:
+            return cmp(id(self), id(other))
+
+    def __getitem__(self, attname_or_tuple):
+        if isinstance(attname_or_tuple, _TupleType):
+            return self._attrsNS[attname_or_tuple]
+        else:
+            return self._attrs[attname_or_tuple]
+
+    # same as set
+    def __setitem__(self, attname, value):
+        if isinstance(value, StringTypes):
+            try:
+                node = self._attrs[attname]
+            except KeyError:
+                node = Attr(attname)
+                node.ownerDocument = self._ownerElement.ownerDocument
+                self.setNamedItem(node)
+            node.value = value
+        else:
+            if not isinstance(value, Attr):
+                raise TypeError, "value must be a string or Attr object"
+            node = value
+            self.setNamedItem(node)
+
+    def getNamedItem(self, name):
+        try:
+            return self._attrs[name]
+        except KeyError:
+            return None
+
+    def getNamedItemNS(self, namespaceURI, localName):
+        try:
+            return self._attrsNS[(namespaceURI, localName)]
+        except KeyError:
+            return None
+
+    def removeNamedItem(self, name):
+        n = self.getNamedItem(name)
+        if n is not None:
+            _clear_id_cache(self._ownerElement)
+            del self._attrs[n.nodeName]
+            del self._attrsNS[(n.namespaceURI, n.localName)]
+            if n.__dict__.has_key('ownerElement'):
+                n.__dict__['ownerElement'] = None
+            return n
+        else:
+            raise xml.dom.NotFoundErr()
+
+    def removeNamedItemNS(self, namespaceURI, localName):
+        n = self.getNamedItemNS(namespaceURI, localName)
+        if n is not None:
+            _clear_id_cache(self._ownerElement)
+            del self._attrsNS[(n.namespaceURI, n.localName)]
+            del self._attrs[n.nodeName]
+            if n.__dict__.has_key('ownerElement'):
+                n.__dict__['ownerElement'] = None
+            return n
+        else:
+            raise xml.dom.NotFoundErr()
+
+    def setNamedItem(self, node):
+        if not isinstance(node, Attr):
+            raise xml.dom.HierarchyRequestErr(
+                "%s cannot be child of %s" % (repr(node), repr(self)))
+        old = self._attrs.get(node.name)
+        if old:
+            old.unlink()
+        self._attrs[node.name] = node
+        self._attrsNS[(node.namespaceURI, node.localName)] = node
+        node.ownerElement = self._ownerElement
+        _clear_id_cache(node.ownerElement)
+        return old
+
+    def setNamedItemNS(self, node):
+        return self.setNamedItem(node)
+
+    def __delitem__(self, attname_or_tuple):
+        node = self[attname_or_tuple]
+        _clear_id_cache(node.ownerElement)
+        node.unlink()
+
+    def __getstate__(self):
+        return self._attrs, self._attrsNS, self._ownerElement
+
+    def __setstate__(self, state):
+        self._attrs, self._attrsNS, self._ownerElement = state
+
+defproperty(NamedNodeMap, "length",
+            doc="Number of nodes in the NamedNodeMap.")
+
+AttributeList = NamedNodeMap
+
+
+class TypeInfo(NewStyle):
+    __slots__ = 'namespace', 'name'
+
+    def __init__(self, namespace, name):
+        self.namespace = namespace
+        self.name = name
+
+    def __repr__(self):
+        if self.namespace:
+            return "<TypeInfo %r (from %r)>" % (self.name, self.namespace)
+        else:
+            return "<TypeInfo %r>" % self.name
+
+    def _get_name(self):
+        return self.name
+
+    def _get_namespace(self):
+        return self.namespace
+
+_no_type = TypeInfo(None, None)
+
+class Element(Node):
+    nodeType = Node.ELEMENT_NODE
+    nodeValue = None
+    schemaType = _no_type
+
+    _magic_id_nodes = 0
+
+    _child_node_types = (Node.ELEMENT_NODE,
+                         Node.PROCESSING_INSTRUCTION_NODE,
+                         Node.COMMENT_NODE,
+                         Node.TEXT_NODE,
+                         Node.CDATA_SECTION_NODE,
+                         Node.ENTITY_REFERENCE_NODE)
+
+    def __init__(self, tagName, namespaceURI=EMPTY_NAMESPACE, prefix=None,
+                 localName=None):
+        self.tagName = self.nodeName = tagName
+        self.prefix = prefix
+        self.namespaceURI = namespaceURI
+        self.childNodes = NodeList()
+
+        self._attrs = {}   # attributes are double-indexed:
+        self._attrsNS = {} #    tagName -> Attribute
+                           #    URI,localName -> Attribute
+                           # in the future: consider lazy generation
+                           # of attribute objects this is too tricky
+                           # for now because of headaches with
+                           # namespaces.
+
+    def _get_localName(self):
+        return self.tagName.split(":", 1)[-1]
+
+    def _get_tagName(self):
+        return self.tagName
+
+    def unlink(self):
+        for attr in self._attrs.values():
+            attr.unlink()
+        self._attrs = None
+        self._attrsNS = None
+        Node.unlink(self)
+
+    def getAttribute(self, attname):
+        try:
+            return self._attrs[attname].value
+        except KeyError:
+            return ""
+
+    def getAttributeNS(self, namespaceURI, localName):
+        try:
+            return self._attrsNS[(namespaceURI, localName)].value
+        except KeyError:
+            return ""
+
+    def setAttribute(self, attname, value):
+        attr = self.getAttributeNode(attname)
+        if attr is None:
+            attr = Attr(attname)
+            # for performance
+            d = attr.__dict__
+            d["value"] = d["nodeValue"] = value
+            d["ownerDocument"] = self.ownerDocument
+            self.setAttributeNode(attr)
+        elif value != attr.value:
+            d = attr.__dict__
+            d["value"] = d["nodeValue"] = value
+            if attr.isId:
+                _clear_id_cache(self)
+
+    def setAttributeNS(self, namespaceURI, qualifiedName, value):
+        prefix, localname = _nssplit(qualifiedName)
+        attr = self.getAttributeNodeNS(namespaceURI, localname)
+        if attr is None:
+            # for performance
+            attr = Attr(qualifiedName, namespaceURI, localname, prefix)
+            d = attr.__dict__
+            d["prefix"] = prefix
+            d["nodeName"] = qualifiedName
+            d["value"] = d["nodeValue"] = value
+            d["ownerDocument"] = self.ownerDocument
+            self.setAttributeNode(attr)
+        else:
+            d = attr.__dict__
+            if value != attr.value:
+                d["value"] = d["nodeValue"] = value
+                if attr.isId:
+                    _clear_id_cache(self)
+            if attr.prefix != prefix:
+                d["prefix"] = prefix
+                d["nodeName"] = qualifiedName
+
+    def getAttributeNode(self, attrname):
+        return self._attrs.get(attrname)
+
+    def getAttributeNodeNS(self, namespaceURI, localName):
+        return self._attrsNS.get((namespaceURI, localName))
+
+    def setAttributeNode(self, attr):
+        if attr.ownerElement not in (None, self):
+            raise xml.dom.InuseAttributeErr("attribute node already owned")
+        old1 = self._attrs.get(attr.name, None)
+        if old1 is not None:
+            self.removeAttributeNode(old1)
+        old2 = self._attrsNS.get((attr.namespaceURI, attr.localName), None)
+        if old2 is not None and old2 is not old1:
+            self.removeAttributeNode(old2)
+        _set_attribute_node(self, attr)
+
+        if old1 is not attr:
+            # It might have already been part of this node, in which case
+            # it doesn't represent a change, and should not be returned.
+            return old1
+        if old2 is not attr:
+            return old2
+
+    setAttributeNodeNS = setAttributeNode
+
+    def removeAttribute(self, name):
+        try:
+            attr = self._attrs[name]
+        except KeyError:
+            raise xml.dom.NotFoundErr()
+        self.removeAttributeNode(attr)
+
+    def removeAttributeNS(self, namespaceURI, localName):
+        try:
+            attr = self._attrsNS[(namespaceURI, localName)]
+        except KeyError:
+            raise xml.dom.NotFoundErr()
+        self.removeAttributeNode(attr)
+
+    def removeAttributeNode(self, node):
+        if node is None:
+            raise xml.dom.NotFoundErr()
+        try:
+            self._attrs[node.name]
+        except KeyError:
+            raise xml.dom.NotFoundErr()
+        _clear_id_cache(self)
+        node.unlink()
+        # Restore this since the node is still useful and otherwise
+        # unlinked
+        node.ownerDocument = self.ownerDocument
+
+    removeAttributeNodeNS = removeAttributeNode
+
+    def hasAttribute(self, name):
+        return self._attrs.has_key(name)
+
+    def hasAttributeNS(self, namespaceURI, localName):
+        return self._attrsNS.has_key((namespaceURI, localName))
+
+    def getElementsByTagName(self, name):
+        return _get_elements_by_tagName_helper(self, name, NodeList())
+
+    def getElementsByTagNameNS(self, namespaceURI, localName):
+        return _get_elements_by_tagName_ns_helper(
+            self, namespaceURI, localName, NodeList())
+
+    def __repr__(self):
+        return "<DOM Element: %s at %#x>" % (self.tagName, id(self))
+
+    def writexml(self, writer, indent="", addindent="", newl=""):
+        # indent = current indentation
+        # addindent = indentation to add to higher levels
+        # newl = newline string
+        writer.write(indent+"<" + self.tagName)
+
+        attrs = self._get_attributes()
+        a_names = attrs.keys()
+        a_names.sort()
+
+        for a_name in a_names:
+            writer.write(" %s=\"" % a_name)
+            _write_data(writer, attrs[a_name].value)
+            writer.write("\"")
+        if self.childNodes:
+            writer.write(">%s"%(newl))
+            for node in self.childNodes:
+                node.writexml(writer,indent+addindent,addindent,newl)
+            writer.write("%s</%s>%s" % (indent,self.tagName,newl))
+        else:
+            writer.write("/>%s"%(newl))
+
+    def _get_attributes(self):
+        return NamedNodeMap(self._attrs, self._attrsNS, self)
+
+    def hasAttributes(self):
+        if self._attrs:
+            return True
+        else:
+            return False
+
+    # DOM Level 3 attributes, based on the 22 Oct 2002 draft
+
+    def setIdAttribute(self, name):
+        idAttr = self.getAttributeNode(name)
+        self.setIdAttributeNode(idAttr)
+
+    def setIdAttributeNS(self, namespaceURI, localName):
+        idAttr = self.getAttributeNodeNS(namespaceURI, localName)
+        self.setIdAttributeNode(idAttr)
+
+    def setIdAttributeNode(self, idAttr):
+        if idAttr is None or not self.isSameNode(idAttr.ownerElement):
+            raise xml.dom.NotFoundErr()
+        if _get_containing_entref(self) is not None:
+            raise xml.dom.NoModificationAllowedErr()
+        if not idAttr._is_id:
+            idAttr.__dict__['_is_id'] = True
+            self._magic_id_nodes += 1
+            self.ownerDocument._magic_id_count += 1
+            _clear_id_cache(self)
+
+defproperty(Element, "attributes",
+            doc="NamedNodeMap of attributes on the element.")
+defproperty(Element, "localName",
+            doc="Namespace-local name of this element.")
+
+
+def _set_attribute_node(element, attr):
+    _clear_id_cache(element)
+    element._attrs[attr.name] = attr
+    element._attrsNS[(attr.namespaceURI, attr.localName)] = attr
+
+    # This creates a circular reference, but Element.unlink()
+    # breaks the cycle since the references to the attribute
+    # dictionaries are tossed.
+    attr.__dict__['ownerElement'] = element
+
+
+class Childless:
+    """Mixin that makes childless-ness easy to implement and avoids
+    the complexity of the Node methods that deal with children.
+    """
+
+    attributes = None
+    childNodes = EmptyNodeList()
+    firstChild = None
+    lastChild = None
+
+    def _get_firstChild(self):
+        return None
+
+    def _get_lastChild(self):
+        return None
+
+    def appendChild(self, node):
+        raise xml.dom.HierarchyRequestErr(
+            self.nodeName + " nodes cannot have children")
+
+    def hasChildNodes(self):
+        return False
+
+    def insertBefore(self, newChild, refChild):
+        raise xml.dom.HierarchyRequestErr(
+            self.nodeName + " nodes do not have children")
+
+    def removeChild(self, oldChild):
+        raise xml.dom.NotFoundErr(
+            self.nodeName + " nodes do not have children")
+
+    def replaceChild(self, newChild, oldChild):
+        raise xml.dom.HierarchyRequestErr(
+            self.nodeName + " nodes do not have children")
+
+
+class ProcessingInstruction(Childless, Node):
+    nodeType = Node.PROCESSING_INSTRUCTION_NODE
+
+    def __init__(self, target, data):
+        self.target = self.nodeName = target
+        self.data = self.nodeValue = data
+
+    def _get_data(self):
+        return self.data
+    def _set_data(self, value):
+        d = self.__dict__
+        d['data'] = d['nodeValue'] = value
+
+    def _get_target(self):
+        return self.target
+    def _set_target(self, value):
+        d = self.__dict__
+        d['target'] = d['nodeName'] = value
+
+    def __setattr__(self, name, value):
+        if name == "data" or name == "nodeValue":
+            self.__dict__['data'] = self.__dict__['nodeValue'] = value
+        elif name == "target" or name == "nodeName":
+            self.__dict__['target'] = self.__dict__['nodeName'] = value
+        else:
+            self.__dict__[name] = value
+
+    def writexml(self, writer, indent="", addindent="", newl=""):
+        writer.write("%s<?%s %s?>%s" % (indent,self.target, self.data, newl))
+
+
+class CharacterData(Childless, Node):
+    def _get_length(self):
+        return len(self.data)
+    __len__ = _get_length
+
+    def _get_data(self):
+        return self.__dict__['data']
+    def _set_data(self, data):
+        d = self.__dict__
+        d['data'] = d['nodeValue'] = data
+
+    _get_nodeValue = _get_data
+    _set_nodeValue = _set_data
+
+    def __setattr__(self, name, value):
+        if name == "data" or name == "nodeValue":
+            self.__dict__['data'] = self.__dict__['nodeValue'] = value
+        else:
+            self.__dict__[name] = value
+
+    def __repr__(self):
+        data = self.data
+        if len(data) > 10:
+            dotdotdot = "..."
+        else:
+            dotdotdot = ""
+        return "<DOM %s node \"%s%s\">" % (
+            self.__class__.__name__, data[0:10], dotdotdot)
+
+    def substringData(self, offset, count):
+        if offset < 0:
+            raise xml.dom.IndexSizeErr("offset cannot be negative")
+        if offset >= len(self.data):
+            raise xml.dom.IndexSizeErr("offset cannot be beyond end of data")
+        if count < 0:
+            raise xml.dom.IndexSizeErr("count cannot be negative")
+        return self.data[offset:offset+count]
+
+    def appendData(self, arg):
+        self.data = self.data + arg
+
+    def insertData(self, offset, arg):
+        if offset < 0:
+            raise xml.dom.IndexSizeErr("offset cannot be negative")
+        if offset >= len(self.data):
+            raise xml.dom.IndexSizeErr("offset cannot be beyond end of data")
+        if arg:
+            self.data = "%s%s%s" % (
+                self.data[:offset], arg, self.data[offset:])
+
+    def deleteData(self, offset, count):
+        if offset < 0:
+            raise xml.dom.IndexSizeErr("offset cannot be negative")
+        if offset >= len(self.data):
+            raise xml.dom.IndexSizeErr("offset cannot be beyond end of data")
+        if count < 0:
+            raise xml.dom.IndexSizeErr("count cannot be negative")
+        if count:
+            self.data = self.data[:offset] + self.data[offset+count:]
+
+    def replaceData(self, offset, count, arg):
+        if offset < 0:
+            raise xml.dom.IndexSizeErr("offset cannot be negative")
+        if offset >= len(self.data):
+            raise xml.dom.IndexSizeErr("offset cannot be beyond end of data")
+        if count < 0:
+            raise xml.dom.IndexSizeErr("count cannot be negative")
+        if count:
+            self.data = "%s%s%s" % (
+                self.data[:offset], arg, self.data[offset+count:])
+
+defproperty(CharacterData, "length", doc="Length of the string data.")
+
+
+class Text(CharacterData):
+    # Make sure we don't add an instance __dict__ if we don't already
+    # have one, at least when that's possible:
+    # XXX this does not work, CharacterData is an old-style class
+    # __slots__ = ()
+
+    nodeType = Node.TEXT_NODE
+    nodeName = "#text"
+    attributes = None
+
+    def splitText(self, offset):
+        if offset < 0 or offset > len(self.data):
+            raise xml.dom.IndexSizeErr("illegal offset value")
+        newText = self.__class__()
+        newText.data = self.data[offset:]
+        newText.ownerDocument = self.ownerDocument
+        next = self.nextSibling
+        if self.parentNode and self in self.parentNode.childNodes:
+            if next is None:
+                self.parentNode.appendChild(newText)
+            else:
+                self.parentNode.insertBefore(newText, next)
+        self.data = self.data[:offset]
+        return newText
+
+    def writexml(self, writer, indent="", addindent="", newl=""):
+        _write_data(writer, "%s%s%s"%(indent, self.data, newl))
+
+    # DOM Level 3 (WD 9 April 2002)
+
+    def _get_wholeText(self):
+        L = [self.data]
+        n = self.previousSibling
+        while n is not None:
+            if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
+                L.insert(0, n.data)
+                n = n.previousSibling
+            else:
+                break
+        n = self.nextSibling
+        while n is not None:
+            if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
+                L.append(n.data)
+                n = n.nextSibling
+            else:
+                break
+        return ''.join(L)
+
+    def replaceWholeText(self, content):
+        # XXX This needs to be seriously changed if minidom ever
+        # supports EntityReference nodes.
+        parent = self.parentNode
+        n = self.previousSibling
+        while n is not None:
+            if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
+                next = n.previousSibling
+                parent.removeChild(n)
+                n = next
+            else:
+                break
+        n = self.nextSibling
+        if not content:
+            parent.removeChild(self)
+        while n is not None:
+            if n.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
+                next = n.nextSibling
+                parent.removeChild(n)
+                n = next
+            else:
+                break
+        if content:
+            d = self.__dict__
+            d['data'] = content
+            d['nodeValue'] = content
+            return self
+        else:
+            return None
+
+    def _get_isWhitespaceInElementContent(self):
+        if self.data.strip():
+            return False
+        elem = _get_containing_element(self)
+        if elem is None:
+            return False
+        info = self.ownerDocument._get_elem_info(elem)
+        if info is None:
+            return False
+        else:
+            return info.isElementContent()
+
+defproperty(Text, "isWhitespaceInElementContent",
+            doc="True iff this text node contains only whitespace"
+                " and is in element content.")
+defproperty(Text, "wholeText",
+            doc="The text of all logically-adjacent text nodes.")
+
+
+def _get_containing_element(node):
+    c = node.parentNode
+    while c is not None:
+        if c.nodeType == Node.ELEMENT_NODE:
+            return c
+        c = c.parentNode
+    return None
+
+def _get_containing_entref(node):
+    c = node.parentNode
+    while c is not None:
+        if c.nodeType == Node.ENTITY_REFERENCE_NODE:
+            return c
+        c = c.parentNode
+    return None
+
+
+class Comment(Childless, CharacterData):
+    nodeType = Node.COMMENT_NODE
+    nodeName = "#comment"
+
+    def __init__(self, data):
+        self.data = self.nodeValue = data
+
+    def writexml(self, writer, indent="", addindent="", newl=""):
+        writer.write("%s<!--%s-->%s" % (indent, self.data, newl))
+
+
+class CDATASection(Text):
+    # Make sure we don't add an instance __dict__ if we don't already
+    # have one, at least when that's possible:
+    # XXX this does not work, Text is an old-style class
+    # __slots__ = ()
+
+    nodeType = Node.CDATA_SECTION_NODE
+    nodeName = "#cdata-section"
+
+    def writexml(self, writer, indent="", addindent="", newl=""):
+        if self.data.find("]]>") >= 0:
+            raise ValueError("']]>' not allowed in a CDATA section")
+        writer.write("<![CDATA[%s]]>" % self.data)
+
+
+class ReadOnlySequentialNamedNodeMap(NewStyle, GetattrMagic):
+    __slots__ = '_seq',
+
+    def __init__(self, seq=()):
+        # seq should be a list or tuple
+        self._seq = seq
+
+    def __len__(self):
+        return len(self._seq)
+
+    def _get_length(self):
+        return len(self._seq)
+
+    def getNamedItem(self, name):
+        for n in self._seq:
+            if n.nodeName == name:
+                return n
+
+    def getNamedItemNS(self, namespaceURI, localName):
+        for n in self._seq:
+            if n.namespaceURI == namespaceURI and n.localName == localName:
+                return n
+
+    def __getitem__(self, name_or_tuple):
+        if isinstance(name_or_tuple, _TupleType):
+            node = self.getNamedItemNS(*name_or_tuple)
+        else:
+            node = self.getNamedItem(name_or_tuple)
+        if node is None:
+            raise KeyError, name_or_tuple
+        return node
+
+    def item(self, index):
+        if index < 0:
+            return None
+        try:
+            return self._seq[index]
+        except IndexError:
+            return None
+
+    def removeNamedItem(self, name):
+        raise xml.dom.NoModificationAllowedErr(
+            "NamedNodeMap instance is read-only")
+
+    def removeNamedItemNS(self, namespaceURI, localName):
+        raise xml.dom.NoModificationAllowedErr(
+            "NamedNodeMap instance is read-only")
+
+    def setNamedItem(self, node):
+        raise xml.dom.NoModificationAllowedErr(
+            "NamedNodeMap instance is read-only")
+
+    def setNamedItemNS(self, node):
+        raise xml.dom.NoModificationAllowedErr(
+            "NamedNodeMap instance is read-only")
+
+    def __getstate__(self):
+        return [self._seq]
+
+    def __setstate__(self, state):
+        self._seq = state[0]
+
+defproperty(ReadOnlySequentialNamedNodeMap, "length",
+            doc="Number of entries in the NamedNodeMap.")
+
+
+class Identified:
+    """Mix-in class that supports the publicId and systemId attributes."""
+
+    # XXX this does not work, this is an old-style class
+    # __slots__ = 'publicId', 'systemId'
+
+    def _identified_mixin_init(self, publicId, systemId):
+        self.publicId = publicId
+        self.systemId = systemId
+
+    def _get_publicId(self):
+        return self.publicId
+
+    def _get_systemId(self):
+        return self.systemId
+
+class DocumentType(Identified, Childless, Node):
+    nodeType = Node.DOCUMENT_TYPE_NODE
+    nodeValue = None
+    name = None
+    publicId = None
+    systemId = None
+    internalSubset = None
+
+    def __init__(self, qualifiedName):
+        self.entities = ReadOnlySequentialNamedNodeMap()
+        self.notations = ReadOnlySequentialNamedNodeMap()
+        if qualifiedName:
+            prefix, localname = _nssplit(qualifiedName)
+            self.name = localname
+        self.nodeName = self.name
+
+    def _get_internalSubset(self):
+        return self.internalSubset
+
+    def cloneNode(self, deep):
+        if self.ownerDocument is None:
+            # it's ok
+            clone = DocumentType(None)
+            clone.name = self.name
+            clone.nodeName = self.name
+            operation = xml.dom.UserDataHandler.NODE_CLONED
+            if deep:
+                clone.entities._seq = []
+                clone.notations._seq = []
+                for n in self.notations._seq:
+                    notation = Notation(n.nodeName, n.publicId, n.systemId)
+                    clone.notations._seq.append(notation)
+                    n._call_user_data_handler(operation, n, notation)
+                for e in self.entities._seq:
+                    entity = Entity(e.nodeName, e.publicId, e.systemId,
+                                    e.notationName)
+                    entity.actualEncoding = e.actualEncoding
+                    entity.encoding = e.encoding
+                    entity.version = e.version
+                    clone.entities._seq.append(entity)
+                    e._call_user_data_handler(operation, n, entity)
+            self._call_user_data_handler(operation, self, clone)
+            return clone
+        else:
+            return None
+
+    def writexml(self, writer, indent="", addindent="", newl=""):
+        writer.write("<!DOCTYPE ")
+        writer.write(self.name)
+        if self.publicId:
+            writer.write("\n  PUBLIC '%s'\n  '%s'"
+                         % (self.publicId, self.systemId))
+        elif self.systemId:
+            writer.write("\n  SYSTEM '%s'" % self.systemId)
+        if self.internalSubset is not None:
+            writer.write(" [")
+            writer.write(self.internalSubset)
+            writer.write("]")
+        writer.write(">\n")
+
+class Entity(Identified, Node):
+    attributes = None
+    nodeType = Node.ENTITY_NODE
+    nodeValue = None
+
+    actualEncoding = None
+    encoding = None
+    version = None
+
+    def __init__(self, name, publicId, systemId, notation):
+        self.nodeName = name
+        self.notationName = notation
+        self.childNodes = NodeList()
+        self._identified_mixin_init(publicId, systemId)
+
+    def _get_actualEncoding(self):
+        return self.actualEncoding
+
+    def _get_encoding(self):
+        return self.encoding
+
+    def _get_version(self):
+        return self.version
+
+    def appendChild(self, newChild):
+        raise xml.dom.HierarchyRequestErr(
+            "cannot append children to an entity node")
+
+    def insertBefore(self, newChild, refChild):
+        raise xml.dom.HierarchyRequestErr(
+            "cannot insert children below an entity node")
+
+    def removeChild(self, oldChild):
+        raise xml.dom.HierarchyRequestErr(
+            "cannot remove children from an entity node")
+
+    def replaceChild(self, newChild, oldChild):
+        raise xml.dom.HierarchyRequestErr(
+            "cannot replace children of an entity node")
+
+class Notation(Identified, Childless, Node):
+    nodeType = Node.NOTATION_NODE
+    nodeValue = None
+
+    def __init__(self, name, publicId, systemId):
+        self.nodeName = name
+        self._identified_mixin_init(publicId, systemId)
+
+
+class DOMImplementation(DOMImplementationLS):
+    _features = [("core", "1.0"),
+                 ("core", "2.0"),
+                 ("core", "3.0"),
+                 ("core", None),
+                 ("xml", "1.0"),
+                 ("xml", "2.0"),
+                 ("xml", "3.0"),
+                 ("xml", None),
+                 ("ls-load", "3.0"),
+                 ("ls-load", None),
+                 ]
+
+    def hasFeature(self, feature, version):
+        if version == "":
+            version = None
+        return (feature.lower(), version) in self._features
+
+    def createDocument(self, namespaceURI, qualifiedName, doctype):
+        if doctype and doctype.parentNode is not None:
+            raise xml.dom.WrongDocumentErr(
+                "doctype object owned by another DOM tree")
+        doc = self._create_document()
+
+        add_root_element = not (namespaceURI is None
+                                and qualifiedName is None
+                                and doctype is None)
+
+        if not qualifiedName and add_root_element:
+            # The spec is unclear what to raise here; SyntaxErr
+            # would be the other obvious candidate. Since Xerces raises
+            # InvalidCharacterErr, and since SyntaxErr is not listed
+            # for createDocument, that seems to be the better choice.
+            # XXX: need to check for illegal characters here and in
+            # createElement.
+
+            # DOM Level III clears this up when talking about the return value
+            # of this function.  If namespaceURI, qName and DocType are
+            # Null the document is returned without a document element
+            # Otherwise if doctype or namespaceURI are not None
+            # Then we go back to the above problem
+            raise xml.dom.InvalidCharacterErr("Element with no name")
+
+        if add_root_element:
+            prefix, localname = _nssplit(qualifiedName)
+            if prefix == "xml" \
+               and namespaceURI != "http://www.w3.org/XML/1998/namespace":
+                raise xml.dom.NamespaceErr("illegal use of 'xml' prefix")
+            if prefix and not namespaceURI:
+                raise xml.dom.NamespaceErr(
+                    "illegal use of prefix without namespaces")
+            element = doc.createElementNS(namespaceURI, qualifiedName)
+            if doctype:
+                doc.appendChild(doctype)
+            doc.appendChild(element)
+
+        if doctype:
+            doctype.parentNode = doctype.ownerDocument = doc
+
+        doc.doctype = doctype
+        doc.implementation = self
+        return doc
+
+    def createDocumentType(self, qualifiedName, publicId, systemId):
+        doctype = DocumentType(qualifiedName)
+        doctype.publicId = publicId
+        doctype.systemId = systemId
+        return doctype
+
+    # DOM Level 3 (WD 9 April 2002)
+
+    def getInterface(self, feature):
+        if self.hasFeature(feature, None):
+            return self
+        else:
+            return None
+
+    # internal
+    def _create_document(self):
+        return Document()
+
+class ElementInfo(NewStyle):
+    """Object that represents content-model information for an element.
+
+    This implementation is not expected to be used in practice; DOM
+    builders should provide implementations which do the right thing
+    using information available to it.
+
+    """
+
+    __slots__ = 'tagName',
+
+    def __init__(self, name):
+        self.tagName = name
+
+    def getAttributeType(self, aname):
+        return _no_type
+
+    def getAttributeTypeNS(self, namespaceURI, localName):
+        return _no_type
+
+    def isElementContent(self):
+        return False
+
+    def isEmpty(self):
+        """Returns true iff this element is declared to have an EMPTY
+        content model."""
+        return False
+
+    def isId(self, aname):
+        """Returns true iff the named attribte is a DTD-style ID."""
+        return False
+
+    def isIdNS(self, namespaceURI, localName):
+        """Returns true iff the identified attribute is a DTD-style ID."""
+        return False
+
+    def __getstate__(self):
+        return self.tagName
+
+    def __setstate__(self, state):
+        self.tagName = state
+
+def _clear_id_cache(node):
+    if node.nodeType == Node.DOCUMENT_NODE:
+        node._id_cache.clear()
+        node._id_search_stack = None
+    elif _in_document(node):
+        node.ownerDocument._id_cache.clear()
+        node.ownerDocument._id_search_stack= None
+
+class Document(Node, DocumentLS):
+    _child_node_types = (Node.ELEMENT_NODE, Node.PROCESSING_INSTRUCTION_NODE,
+                         Node.COMMENT_NODE, Node.DOCUMENT_TYPE_NODE)
+
+    nodeType = Node.DOCUMENT_NODE
+    nodeName = "#document"
+    nodeValue = None
+    attributes = None
+    doctype = None
+    parentNode = None
+    previousSibling = nextSibling = None
+
+    implementation = DOMImplementation()
+
+    # Document attributes from Level 3 (WD 9 April 2002)
+
+    actualEncoding = None
+    encoding = None
+    standalone = None
+    version = None
+    strictErrorChecking = False
+    errorHandler = None
+    documentURI = None
+
+    _magic_id_count = 0
+
+    def __init__(self):
+        self.childNodes = NodeList()
+        # mapping of (namespaceURI, localName) -> ElementInfo
+        #        and tagName -> ElementInfo
+        self._elem_info = {}
+        self._id_cache = {}
+        self._id_search_stack = None
+
+    def _get_elem_info(self, element):
+        if element.namespaceURI:
+            key = element.namespaceURI, element.localName
+        else:
+            key = element.tagName
+        return self._elem_info.get(key)
+
+    def _get_actualEncoding(self):
+        return self.actualEncoding
+
+    def _get_doctype(self):
+        return self.doctype
+
+    def _get_documentURI(self):
+        return self.documentURI
+
+    def _get_encoding(self):
+        return self.encoding
+
+    def _get_errorHandler(self):
+        return self.errorHandler
+
+    def _get_standalone(self):
+        return self.standalone
+
+    def _get_strictErrorChecking(self):
+        return self.strictErrorChecking
+
+    def _get_version(self):
+        return self.version
+
+    def appendChild(self, node):
+        if node.nodeType not in self._child_node_types:
+            raise xml.dom.HierarchyRequestErr(
+                "%s cannot be child of %s" % (repr(node), repr(self)))
+        if node.parentNode is not None:
+            # This needs to be done before the next test since this
+            # may *be* the document element, in which case it should
+            # end up re-ordered to the end.
+            node.parentNode.removeChild(node)
+
+        if node.nodeType == Node.ELEMENT_NODE \
+           and self._get_documentElement():
+            raise xml.dom.HierarchyRequestErr(
+                "two document elements disallowed")
+        return Node.appendChild(self, node)
+
+    def removeChild(self, oldChild):
+        try:
+            self.childNodes.remove(oldChild)
+        except ValueError:
+            raise xml.dom.NotFoundErr()
+        oldChild.nextSibling = oldChild.previousSibling = None
+        oldChild.parentNode = None
+        if self.documentElement is oldChild:
+            self.documentElement = None
+
+        return oldChild
+
+    def _get_documentElement(self):
+        for node in self.childNodes:
+            if node.nodeType == Node.ELEMENT_NODE:
+                return node
+
+    def unlink(self):
+        if self.doctype is not None:
+            self.doctype.unlink()
+            self.doctype = None
+        Node.unlink(self)
+
+    def cloneNode(self, deep):
+        if not deep:
+            return None
+        clone = self.implementation.createDocument(None, None, None)
+        clone.encoding = self.encoding
+        clone.standalone = self.standalone
+        clone.version = self.version
+        for n in self.childNodes:
+            childclone = _clone_node(n, deep, clone)
+            assert childclone.ownerDocument.isSameNode(clone)
+            clone.childNodes.append(childclone)
+            if childclone.nodeType == Node.DOCUMENT_NODE:
+                assert clone.documentElement is None
+            elif childclone.nodeType == Node.DOCUMENT_TYPE_NODE:
+                assert clone.doctype is None
+                clone.doctype = childclone
+            childclone.parentNode = clone
+        self._call_user_data_handler(xml.dom.UserDataHandler.NODE_CLONED,
+                                     self, clone)
+        return clone
+
+    def createDocumentFragment(self):
+        d = DocumentFragment()
+        d.ownerDocument = self
+        return d
+
+    def createElement(self, tagName):
+        e = Element(tagName)
+        e.ownerDocument = self
+        return e
+
+    def createTextNode(self, data):
+        if not isinstance(data, StringTypes):
+            raise TypeError, "node contents must be a string"
+        t = Text()
+        t.data = data
+        t.ownerDocument = self
+        return t
+
+    def createCDATASection(self, data):
+        if not isinstance(data, StringTypes):
+            raise TypeError, "node contents must be a string"
+        c = CDATASection()
+        c.data = data
+        c.ownerDocument = self
+        return c
+
+    def createComment(self, data):
+        c = Comment(data)
+        c.ownerDocument = self
+        return c
+
+    def createProcessingInstruction(self, target, data):
+        p = ProcessingInstruction(target, data)
+        p.ownerDocument = self
+        return p
+
+    def createAttribute(self, qName):
+        a = Attr(qName)
+        a.ownerDocument = self
+        a.value = ""
+        return a
+
+    def createElementNS(self, namespaceURI, qualifiedName):
+        prefix, localName = _nssplit(qualifiedName)
+        e = Element(qualifiedName, namespaceURI, prefix)
+        e.ownerDocument = self
+        return e
+
+    def createAttributeNS(self, namespaceURI, qualifiedName):
+        prefix, localName = _nssplit(qualifiedName)
+        a = Attr(qualifiedName, namespaceURI, localName, prefix)
+        a.ownerDocument = self
+        a.value = ""
+        return a
+
+    # A couple of implementation-specific helpers to create node types
+    # not supported by the W3C DOM specs:
+
+    def _create_entity(self, name, publicId, systemId, notationName):
+        e = Entity(name, publicId, systemId, notationName)
+        e.ownerDocument = self
+        return e
+
+    def _create_notation(self, name, publicId, systemId):
+        n = Notation(name, publicId, systemId)
+        n.ownerDocument = self
+        return n
+
+    def getElementById(self, id):
+        if self._id_cache.has_key(id):
+            return self._id_cache[id]
+        if not (self._elem_info or self._magic_id_count):
+            return None
+
+        stack = self._id_search_stack
+        if stack is None:
+            # we never searched before, or the cache has been cleared
+            stack = [self.documentElement]
+            self._id_search_stack = stack
+        elif not stack:
+            # Previous search was completed and cache is still valid;
+            # no matching node.
+            return None
+
+        result = None
+        while stack:
+            node = stack.pop()
+            # add child elements to stack for continued searching
+            stack.extend([child for child in node.childNodes
+                          if child.nodeType in _nodeTypes_with_children])
+            # check this node
+            info = self._get_elem_info(node)
+            if info:
+                # We have to process all ID attributes before
+                # returning in order to get all the attributes set to
+                # be IDs using Element.setIdAttribute*().
+                for attr in node.attributes.values():
+                    if attr.namespaceURI:
+                        if info.isIdNS(attr.namespaceURI, attr.localName):
+                            self._id_cache[attr.value] = node
+                            if attr.value == id:
+                                result = node
+                            elif not node._magic_id_nodes:
+                                break
+                    elif info.isId(attr.name):
+                        self._id_cache[attr.value] = node
+                        if attr.value == id:
+                            result = node
+                        elif not node._magic_id_nodes:
+                            break
+                    elif attr._is_id:
+                        self._id_cache[attr.value] = node
+                        if attr.value == id:
+                            result = node
+                        elif node._magic_id_nodes == 1:
+                            break
+            elif node._magic_id_nodes:
+                for attr in node.attributes.values():
+                    if attr._is_id:
+                        self._id_cache[attr.value] = node
+                        if attr.value == id:
+                            result = node
+            if result is not None:
+                break
+        return result
+
+    def getElementsByTagName(self, name):
+        return _get_elements_by_tagName_helper(self, name, NodeList())
+
+    def getElementsByTagNameNS(self, namespaceURI, localName):
+        return _get_elements_by_tagName_ns_helper(
+            self, namespaceURI, localName, NodeList())
+
+    def isSupported(self, feature, version):
+        return self.implementation.hasFeature(feature, version)
+
+    def importNode(self, node, deep):
+        if node.nodeType == Node.DOCUMENT_NODE:
+            raise xml.dom.NotSupportedErr("cannot import document nodes")
+        elif node.nodeType == Node.DOCUMENT_TYPE_NODE:
+            raise xml.dom.NotSupportedErr("cannot import document type nodes")
+        return _clone_node(node, deep, self)
+
+    def writexml(self, writer, indent="", addindent="", newl="",
+                 encoding = None):
+        if encoding is None:
+            writer.write('<?xml version="1.0" ?>\n')
+        else:
+            writer.write('<?xml version="1.0" encoding="%s"?>\n' % encoding)
+        for node in self.childNodes:
+            node.writexml(writer, indent, addindent, newl)
+
+    # DOM Level 3 (WD 9 April 2002)
+
+    def renameNode(self, n, namespaceURI, name):
+        if n.ownerDocument is not self:
+            raise xml.dom.WrongDocumentErr(
+                "cannot rename nodes from other documents;\n"
+                "expected %s,\nfound %s" % (self, n.ownerDocument))
+        if n.nodeType not in (Node.ELEMENT_NODE, Node.ATTRIBUTE_NODE):
+            raise xml.dom.NotSupportedErr(
+                "renameNode() only applies to element and attribute nodes")
+        if namespaceURI != EMPTY_NAMESPACE:
+            if ':' in name:
+                prefix, localName = name.split(':', 1)
+                if (  prefix == "xmlns"
+                      and namespaceURI != xml.dom.XMLNS_NAMESPACE):
+                    raise xml.dom.NamespaceErr(
+                        "illegal use of 'xmlns' prefix")
+            else:
+                if (  name == "xmlns"
+                      and namespaceURI != xml.dom.XMLNS_NAMESPACE
+                      and n.nodeType == Node.ATTRIBUTE_NODE):
+                    raise xml.dom.NamespaceErr(
+                        "illegal use of the 'xmlns' attribute")
+                prefix = None
+                localName = name
+        else:
+            prefix = None
+            localName = None
+        if n.nodeType == Node.ATTRIBUTE_NODE:
+            element = n.ownerElement
+            if element is not None:
+                is_id = n._is_id
+                element.removeAttributeNode(n)
+        else:
+            element = None
+        # avoid __setattr__
+        d = n.__dict__
+        d['prefix'] = prefix
+        d['localName'] = localName
+        d['namespaceURI'] = namespaceURI
+        d['nodeName'] = name
+        if n.nodeType == Node.ELEMENT_NODE:
+            d['tagName'] = name
+        else:
+            # attribute node
+            d['name'] = name
+            if element is not None:
+                element.setAttributeNode(n)
+                if is_id:
+                    element.setIdAttributeNode(n)
+        # It's not clear from a semantic perspective whether we should
+        # call the user data handlers for the NODE_RENAMED event since
+        # we're re-using the existing node.  The draft spec has been
+        # interpreted as meaning "no, don't call the handler unless a
+        # new node is created."
+        return n
+
+defproperty(Document, "documentElement",
+            doc="Top-level element of this document.")
+
+
+def _clone_node(node, deep, newOwnerDocument):
+    """
+    Clone a node and give it the new owner document.
+    Called by Node.cloneNode and Document.importNode
+    """
+    if node.ownerDocument.isSameNode(newOwnerDocument):
+        operation = xml.dom.UserDataHandler.NODE_CLONED
+    else:
+        operation = xml.dom.UserDataHandler.NODE_IMPORTED
+    if node.nodeType == Node.ELEMENT_NODE:
+        clone = newOwnerDocument.createElementNS(node.namespaceURI,
+                                                 node.nodeName)
+        for attr in node.attributes.values():
+            clone.setAttributeNS(attr.namespaceURI, attr.nodeName, attr.value)
+            a = clone.getAttributeNodeNS(attr.namespaceURI, attr.localName)
+            a.specified = attr.specified
+
+        if deep:
+            for child in node.childNodes:
+                c = _clone_node(child, deep, newOwnerDocument)
+                clone.appendChild(c)
+
+    elif node.nodeType == Node.DOCUMENT_FRAGMENT_NODE:
+        clone = newOwnerDocument.createDocumentFragment()
+        if deep:
+            for child in node.childNodes:
+                c = _clone_node(child, deep, newOwnerDocument)
+                clone.appendChild(c)
+
+    elif node.nodeType == Node.TEXT_NODE:
+        clone = newOwnerDocument.createTextNode(node.data)
+    elif node.nodeType == Node.CDATA_SECTION_NODE:
+        clone = newOwnerDocument.createCDATASection(node.data)
+    elif node.nodeType == Node.PROCESSING_INSTRUCTION_NODE:
+        clone = newOwnerDocument.createProcessingInstruction(node.target,
+                                                             node.data)
+    elif node.nodeType == Node.COMMENT_NODE:
+        clone = newOwnerDocument.createComment(node.data)
+    elif node.nodeType == Node.ATTRIBUTE_NODE:
+        clone = newOwnerDocument.createAttributeNS(node.namespaceURI,
+                                                   node.nodeName)
+        clone.specified = True
+        clone.value = node.value
+    elif node.nodeType == Node.DOCUMENT_TYPE_NODE:
+        assert node.ownerDocument is not newOwnerDocument
+        operation = xml.dom.UserDataHandler.NODE_IMPORTED
+        clone = newOwnerDocument.implementation.createDocumentType(
+            node.name, node.publicId, node.systemId)
+        clone.ownerDocument = newOwnerDocument
+        if deep:
+            clone.entities._seq = []
+            clone.notations._seq = []
+            for n in node.notations._seq:
+                notation = Notation(n.nodeName, n.publicId, n.systemId)
+                notation.ownerDocument = newOwnerDocument
+                clone.notations._seq.append(notation)
+                if hasattr(n, '_call_user_data_handler'):
+                    n._call_user_data_handler(operation, n, notation)
+            for e in node.entities._seq:
+                entity = Entity(e.nodeName, e.publicId, e.systemId,
+                                e.notationName)
+                entity.actualEncoding = e.actualEncoding
+                entity.encoding = e.encoding
+                entity.version = e.version
+                entity.ownerDocument = newOwnerDocument
+                clone.entities._seq.append(entity)
+                if hasattr(e, '_call_user_data_handler'):
+                    e._call_user_data_handler(operation, n, entity)
+    else:
+        # Note the cloning of Document and DocumentType nodes is
+        # implemenetation specific.  minidom handles those cases
+        # directly in the cloneNode() methods.
+        raise xml.dom.NotSupportedErr("Cannot clone node %s" % repr(node))
+
+    # Check for _call_user_data_handler() since this could conceivably
+    # used with other DOM implementations (one of the FourThought
+    # DOMs, perhaps?).
+    if hasattr(node, '_call_user_data_handler'):
+        node._call_user_data_handler(operation, node, clone)
+    return clone
+
+
+def _nssplit(qualifiedName):
+    fields = qualifiedName.split(':', 1)
+    if len(fields) == 2:
+        return fields
+    else:
+        return (None, fields[0])
+
+
+def _get_StringIO():
+    # we can't use cStringIO since it doesn't support Unicode strings
+    from StringIO import StringIO
+    return StringIO()
+
+def _do_pulldom_parse(func, args, kwargs):
+    events = func(*args, **kwargs)
+    toktype, rootNode = events.getEvent()
+    events.expandNode(rootNode)
+    events.clear()
+    return rootNode
+
+def parse(file, parser=None, bufsize=None):
+    """Parse a file into a DOM by filename or file object."""
+    if parser is None and not bufsize:
+        from xml.dom import expatbuilder
+        return expatbuilder.parse(file)
+    else:
+        from xml.dom import pulldom
+        return _do_pulldom_parse(pulldom.parse, (file,),
+            {'parser': parser, 'bufsize': bufsize})
+
+def parseString(string, parser=None):
+    """Parse a file into a DOM from a string."""
+    if parser is None:
+        from xml.dom import expatbuilder
+        return expatbuilder.parseString(string)
+    else:
+        from xml.dom import pulldom
+        return _do_pulldom_parse(pulldom.parseString, (string,),
+                                 {'parser': parser})
+
+def getDOMImplementation(features=None):
+    if features:
+        if isinstance(features, StringTypes):
+            features = domreg._parse_feature_string(features)
+        for f, v in features:
+            if not Document.implementation.hasFeature(f, v):
+                return None
+    return Document.implementation
diff --git a/depot_tools/release/win/python_24/Lib/xml/dom/pulldom.py b/depot_tools/release/win/python_24/Lib/xml/dom/pulldom.py
new file mode 100644
index 0000000..18f49b501
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/dom/pulldom.py
@@ -0,0 +1,351 @@
+import xml.sax
+import xml.sax.handler
+import types
+
+try:
+    _StringTypes = [types.StringType, types.UnicodeType]
+except AttributeError:
+    _StringTypes = [types.StringType]
+
+START_ELEMENT = "START_ELEMENT"
+END_ELEMENT = "END_ELEMENT"
+COMMENT = "COMMENT"
+START_DOCUMENT = "START_DOCUMENT"
+END_DOCUMENT = "END_DOCUMENT"
+PROCESSING_INSTRUCTION = "PROCESSING_INSTRUCTION"
+IGNORABLE_WHITESPACE = "IGNORABLE_WHITESPACE"
+CHARACTERS = "CHARACTERS"
+
+class PullDOM(xml.sax.ContentHandler):
+    _locator = None
+    document = None
+
+    def __init__(self, documentFactory=None):
+        from xml.dom import XML_NAMESPACE
+        self.documentFactory = documentFactory
+        self.firstEvent = [None, None]
+        self.lastEvent = self.firstEvent
+        self.elementStack = []
+        self.push = self.elementStack.append
+        try:
+            self.pop = self.elementStack.pop
+        except AttributeError:
+            # use class' pop instead
+            pass
+        self._ns_contexts = [{XML_NAMESPACE:'xml'}] # contains uri -> prefix dicts
+        self._current_context = self._ns_contexts[-1]
+        self.pending_events = []
+
+    def pop(self):
+        result = self.elementStack[-1]
+        del self.elementStack[-1]
+        return result
+
+    def setDocumentLocator(self, locator):
+        self._locator = locator
+
+    def startPrefixMapping(self, prefix, uri):
+        if not hasattr(self, '_xmlns_attrs'):
+            self._xmlns_attrs = []
+        self._xmlns_attrs.append((prefix or 'xmlns', uri))
+        self._ns_contexts.append(self._current_context.copy())
+        self._current_context[uri] = prefix or None
+
+    def endPrefixMapping(self, prefix):
+        self._current_context = self._ns_contexts.pop()
+
+    def startElementNS(self, name, tagName , attrs):
+        # Retrieve xml namespace declaration attributes.
+        xmlns_uri = 'http://www.w3.org/2000/xmlns/'
+        xmlns_attrs = getattr(self, '_xmlns_attrs', None)
+        if xmlns_attrs is not None:
+            for aname, value in xmlns_attrs:
+                attrs._attrs[(xmlns_uri, aname)] = value
+            self._xmlns_attrs = []
+        uri, localname = name
+        if uri:
+            # When using namespaces, the reader may or may not
+            # provide us with the original name. If not, create
+            # *a* valid tagName from the current context.
+            if tagName is None:
+                prefix = self._current_context[uri]
+                if prefix:
+                    tagName = prefix + ":" + localname
+                else:
+                    tagName = localname
+            if self.document:
+                node = self.document.createElementNS(uri, tagName)
+            else:
+                node = self.buildDocument(uri, tagName)
+        else:
+            # When the tagname is not prefixed, it just appears as
+            # localname
+            if self.document:
+                node = self.document.createElement(localname)
+            else:
+                node = self.buildDocument(None, localname)
+
+        for aname,value in attrs.items():
+            a_uri, a_localname = aname
+            if a_uri == xmlns_uri:
+                if a_localname == 'xmlns':
+                    qname = a_localname
+                else:
+                    qname = 'xmlns:' + a_localname
+                attr = self.document.createAttributeNS(a_uri, qname)
+                node.setAttributeNodeNS(attr)
+            elif a_uri:
+                prefix = self._current_context[a_uri]
+                if prefix:
+                    qname = prefix + ":" + a_localname
+                else:
+                    qname = a_localname
+                attr = self.document.createAttributeNS(a_uri, qname)
+                node.setAttributeNodeNS(attr)
+            else:
+                attr = self.document.createAttribute(a_localname)
+                node.setAttributeNode(attr)
+            attr.value = value
+
+        self.lastEvent[1] = [(START_ELEMENT, node), None]
+        self.lastEvent = self.lastEvent[1]
+        self.push(node)
+
+    def endElementNS(self, name, tagName):
+        self.lastEvent[1] = [(END_ELEMENT, self.pop()), None]
+        self.lastEvent = self.lastEvent[1]
+
+    def startElement(self, name, attrs):
+        if self.document:
+            node = self.document.createElement(name)
+        else:
+            node = self.buildDocument(None, name)
+
+        for aname,value in attrs.items():
+            attr = self.document.createAttribute(aname)
+            attr.value = value
+            node.setAttributeNode(attr)
+
+        self.lastEvent[1] = [(START_ELEMENT, node), None]
+        self.lastEvent = self.lastEvent[1]
+        self.push(node)
+
+    def endElement(self, name):
+        self.lastEvent[1] = [(END_ELEMENT, self.pop()), None]
+        self.lastEvent = self.lastEvent[1]
+
+    def comment(self, s):
+        if self.document:
+            node = self.document.createComment(s)
+            self.lastEvent[1] = [(COMMENT, node), None]
+            self.lastEvent = self.lastEvent[1]
+        else:
+            event = [(COMMENT, s), None]
+            self.pending_events.append(event)
+
+    def processingInstruction(self, target, data):
+        if self.document:
+            node = self.document.createProcessingInstruction(target, data)
+            self.lastEvent[1] = [(PROCESSING_INSTRUCTION, node), None]
+            self.lastEvent = self.lastEvent[1]
+        else:
+            event = [(PROCESSING_INSTRUCTION, target, data), None]
+            self.pending_events.append(event)
+
+    def ignorableWhitespace(self, chars):
+        node = self.document.createTextNode(chars)
+        self.lastEvent[1] = [(IGNORABLE_WHITESPACE, node), None]
+        self.lastEvent = self.lastEvent[1]
+
+    def characters(self, chars):
+        node = self.document.createTextNode(chars)
+        self.lastEvent[1] = [(CHARACTERS, node), None]
+        self.lastEvent = self.lastEvent[1]
+
+    def startDocument(self):
+        if self.documentFactory is None:
+            import xml.dom.minidom
+            self.documentFactory = xml.dom.minidom.Document.implementation
+
+    def buildDocument(self, uri, tagname):
+        # Can't do that in startDocument, since we need the tagname
+        # XXX: obtain DocumentType
+        node = self.documentFactory.createDocument(uri, tagname, None)
+        self.document = node
+        self.lastEvent[1] = [(START_DOCUMENT, node), None]
+        self.lastEvent = self.lastEvent[1]
+        self.push(node)
+        # Put everything we have seen so far into the document
+        for e in self.pending_events:
+            if e[0][0] == PROCESSING_INSTRUCTION:
+                _,target,data = e[0]
+                n = self.document.createProcessingInstruction(target, data)
+                e[0] = (PROCESSING_INSTRUCTION, n)
+            elif e[0][0] == COMMENT:
+                n = self.document.createComment(e[0][1])
+                e[0] = (COMMENT, n)
+            else:
+                raise AssertionError("Unknown pending event ",e[0][0])
+            self.lastEvent[1] = e
+            self.lastEvent = e
+        self.pending_events = None
+        return node.firstChild
+
+    def endDocument(self):
+        self.lastEvent[1] = [(END_DOCUMENT, self.document), None]
+        self.pop()
+
+    def clear(self):
+        "clear(): Explicitly release parsing structures"
+        self.document = None
+
+class ErrorHandler:
+    def warning(self, exception):
+        print exception
+    def error(self, exception):
+        raise exception
+    def fatalError(self, exception):
+        raise exception
+
+class DOMEventStream:
+    def __init__(self, stream, parser, bufsize):
+        self.stream = stream
+        self.parser = parser
+        self.bufsize = bufsize
+        if not hasattr(self.parser, 'feed'):
+            self.getEvent = self._slurp
+        self.reset()
+
+    def reset(self):
+        self.pulldom = PullDOM()
+        # This content handler relies on namespace support
+        self.parser.setFeature(xml.sax.handler.feature_namespaces, 1)
+        self.parser.setContentHandler(self.pulldom)
+
+    def __getitem__(self, pos):
+        rc = self.getEvent()
+        if rc:
+            return rc
+        raise IndexError
+
+    def next(self):
+        rc = self.getEvent()
+        if rc:
+            return rc
+        raise StopIteration
+
+    def __iter__(self):
+        return self
+
+    def expandNode(self, node):
+        event = self.getEvent()
+        parents = [node]
+        while event:
+            token, cur_node = event
+            if cur_node is node:
+                return
+            if token != END_ELEMENT:
+                parents[-1].appendChild(cur_node)
+            if token == START_ELEMENT:
+                parents.append(cur_node)
+            elif token == END_ELEMENT:
+                del parents[-1]
+            event = self.getEvent()
+
+    def getEvent(self):
+        # use IncrementalParser interface, so we get the desired
+        # pull effect
+        if not self.pulldom.firstEvent[1]:
+            self.pulldom.lastEvent = self.pulldom.firstEvent
+        while not self.pulldom.firstEvent[1]:
+            buf = self.stream.read(self.bufsize)
+            if not buf:
+                self.parser.close()
+                return None
+            self.parser.feed(buf)
+        rc = self.pulldom.firstEvent[1][0]
+        self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1]
+        return rc
+
+    def _slurp(self):
+        """ Fallback replacement for getEvent() using the
+            standard SAX2 interface, which means we slurp the
+            SAX events into memory (no performance gain, but
+            we are compatible to all SAX parsers).
+        """
+        self.parser.parse(self.stream)
+        self.getEvent = self._emit
+        return self._emit()
+
+    def _emit(self):
+        """ Fallback replacement for getEvent() that emits
+            the events that _slurp() read previously.
+        """
+        rc = self.pulldom.firstEvent[1][0]
+        self.pulldom.firstEvent[1] = self.pulldom.firstEvent[1][1]
+        return rc
+
+    def clear(self):
+        """clear(): Explicitly release parsing objects"""
+        self.pulldom.clear()
+        del self.pulldom
+        self.parser = None
+        self.stream = None
+
+class SAX2DOM(PullDOM):
+
+    def startElementNS(self, name, tagName , attrs):
+        PullDOM.startElementNS(self, name, tagName, attrs)
+        curNode = self.elementStack[-1]
+        parentNode = self.elementStack[-2]
+        parentNode.appendChild(curNode)
+
+    def startElement(self, name, attrs):
+        PullDOM.startElement(self, name, attrs)
+        curNode = self.elementStack[-1]
+        parentNode = self.elementStack[-2]
+        parentNode.appendChild(curNode)
+
+    def processingInstruction(self, target, data):
+        PullDOM.processingInstruction(self, target, data)
+        node = self.lastEvent[0][1]
+        parentNode = self.elementStack[-1]
+        parentNode.appendChild(node)
+
+    def ignorableWhitespace(self, chars):
+        PullDOM.ignorableWhitespace(self, chars)
+        node = self.lastEvent[0][1]
+        parentNode = self.elementStack[-1]
+        parentNode.appendChild(node)
+
+    def characters(self, chars):
+        PullDOM.characters(self, chars)
+        node = self.lastEvent[0][1]
+        parentNode = self.elementStack[-1]
+        parentNode.appendChild(node)
+
+
+default_bufsize = (2 ** 14) - 20
+
+def parse(stream_or_string, parser=None, bufsize=None):
+    if bufsize is None:
+        bufsize = default_bufsize
+    if type(stream_or_string) in _StringTypes:
+        stream = open(stream_or_string)
+    else:
+        stream = stream_or_string
+    if not parser:
+        parser = xml.sax.make_parser()
+    return DOMEventStream(stream, parser, bufsize)
+
+def parseString(string, parser=None):
+    try:
+        from cStringIO import StringIO
+    except ImportError:
+        from StringIO import StringIO
+
+    bufsize = len(string)
+    buf = StringIO(string)
+    if not parser:
+        parser = xml.sax.make_parser()
+    return DOMEventStream(buf, parser, bufsize)
diff --git a/depot_tools/release/win/python_24/Lib/xml/dom/xmlbuilder.py b/depot_tools/release/win/python_24/Lib/xml/dom/xmlbuilder.py
new file mode 100644
index 0000000..5c044128
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/dom/xmlbuilder.py
@@ -0,0 +1,388 @@
+"""Implementation of the DOM Level 3 'LS-Load' feature."""
+
+import copy
+import xml.dom
+
+from xml.dom.minicompat import *
+
+from xml.dom.NodeFilter import NodeFilter
+
+
+__all__ = ["DOMBuilder", "DOMEntityResolver", "DOMInputSource"]
+
+
+class Options:
+    """Features object that has variables set for each DOMBuilder feature.
+
+    The DOMBuilder class uses an instance of this class to pass settings to
+    the ExpatBuilder class.
+    """
+
+    # Note that the DOMBuilder class in LoadSave constrains which of these
+    # values can be set using the DOM Level 3 LoadSave feature.
+
+    namespaces = 1
+    namespace_declarations = True
+    validation = False
+    external_parameter_entities = True
+    external_general_entities = True
+    external_dtd_subset = True
+    validate_if_schema = False
+    validate = False
+    datatype_normalization = False
+    create_entity_ref_nodes = True
+    entities = True
+    whitespace_in_element_content = True
+    cdata_sections = True
+    comments = True
+    charset_overrides_xml_encoding = True
+    infoset = False
+    supported_mediatypes_only = False
+
+    errorHandler = None
+    filter = None
+
+
+class DOMBuilder:
+    entityResolver = None
+    errorHandler = None
+    filter = None
+
+    ACTION_REPLACE = 1
+    ACTION_APPEND_AS_CHILDREN = 2
+    ACTION_INSERT_AFTER = 3
+    ACTION_INSERT_BEFORE = 4
+
+    _legal_actions = (ACTION_REPLACE, ACTION_APPEND_AS_CHILDREN,
+                      ACTION_INSERT_AFTER, ACTION_INSERT_BEFORE)
+
+    def __init__(self):
+        self._options = Options()
+
+    def _get_entityResolver(self):
+        return self.entityResolver
+    def _set_entityResolver(self, entityResolver):
+        self.entityResolver = entityResolver
+
+    def _get_errorHandler(self):
+        return self.errorHandler
+    def _set_errorHandler(self, errorHandler):
+        self.errorHandler = errorHandler
+
+    def _get_filter(self):
+        return self.filter
+    def _set_filter(self, filter):
+        self.filter = filter
+
+    def setFeature(self, name, state):
+        if self.supportsFeature(name):
+            state = state and 1 or 0
+            try:
+                settings = self._settings[(_name_xform(name), state)]
+            except KeyError:
+                raise xml.dom.NotSupportedErr(
+                    "unsupported feature: %r" % (name,))
+            else:
+                for name, value in settings:
+                    setattr(self._options, name, value)
+        else:
+            raise xml.dom.NotFoundErr("unknown feature: " + repr(name))
+
+    def supportsFeature(self, name):
+        return hasattr(self._options, _name_xform(name))
+
+    def canSetFeature(self, name, state):
+        key = (_name_xform(name), state and 1 or 0)
+        return self._settings.has_key(key)
+
+    # This dictionary maps from (feature,value) to a list of
+    # (option,value) pairs that should be set on the Options object.
+    # If a (feature,value) setting is not in this dictionary, it is
+    # not supported by the DOMBuilder.
+    #
+    _settings = {
+        ("namespace_declarations", 0): [
+            ("namespace_declarations", 0)],
+        ("namespace_declarations", 1): [
+            ("namespace_declarations", 1)],
+        ("validation", 0): [
+            ("validation", 0)],
+        ("external_general_entities", 0): [
+            ("external_general_entities", 0)],
+        ("external_general_entities", 1): [
+            ("external_general_entities", 1)],
+        ("external_parameter_entities", 0): [
+            ("external_parameter_entities", 0)],
+        ("external_parameter_entities", 1): [
+            ("external_parameter_entities", 1)],
+        ("validate_if_schema", 0): [
+            ("validate_if_schema", 0)],
+        ("create_entity_ref_nodes", 0): [
+            ("create_entity_ref_nodes", 0)],
+        ("create_entity_ref_nodes", 1): [
+            ("create_entity_ref_nodes", 1)],
+        ("entities", 0): [
+            ("create_entity_ref_nodes", 0),
+            ("entities", 0)],
+        ("entities", 1): [
+            ("entities", 1)],
+        ("whitespace_in_element_content", 0): [
+            ("whitespace_in_element_content", 0)],
+        ("whitespace_in_element_content", 1): [
+            ("whitespace_in_element_content", 1)],
+        ("cdata_sections", 0): [
+            ("cdata_sections", 0)],
+        ("cdata_sections", 1): [
+            ("cdata_sections", 1)],
+        ("comments", 0): [
+            ("comments", 0)],
+        ("comments", 1): [
+            ("comments", 1)],
+        ("charset_overrides_xml_encoding", 0): [
+            ("charset_overrides_xml_encoding", 0)],
+        ("charset_overrides_xml_encoding", 1): [
+            ("charset_overrides_xml_encoding", 1)],
+        ("infoset", 0): [],
+        ("infoset", 1): [
+            ("namespace_declarations", 0),
+            ("validate_if_schema", 0),
+            ("create_entity_ref_nodes", 0),
+            ("entities", 0),
+            ("cdata_sections", 0),
+            ("datatype_normalization", 1),
+            ("whitespace_in_element_content", 1),
+            ("comments", 1),
+            ("charset_overrides_xml_encoding", 1)],
+        ("supported_mediatypes_only", 0): [
+            ("supported_mediatypes_only", 0)],
+        ("namespaces", 0): [
+            ("namespaces", 0)],
+        ("namespaces", 1): [
+            ("namespaces", 1)],
+    }
+
+    def getFeature(self, name):
+        xname = _name_xform(name)
+        try:
+            return getattr(self._options, xname)
+        except AttributeError:
+            if name == "infoset":
+                options = self._options
+                return (options.datatype_normalization
+                        and options.whitespace_in_element_content
+                        and options.comments
+                        and options.charset_overrides_xml_encoding
+                        and not (options.namespace_declarations
+                                 or options.validate_if_schema
+                                 or options.create_entity_ref_nodes
+                                 or options.entities
+                                 or options.cdata_sections))
+            raise xml.dom.NotFoundErr("feature %s not known" % repr(name))
+
+    def parseURI(self, uri):
+        if self.entityResolver:
+            input = self.entityResolver.resolveEntity(None, uri)
+        else:
+            input = DOMEntityResolver().resolveEntity(None, uri)
+        return self.parse(input)
+
+    def parse(self, input):
+        options = copy.copy(self._options)
+        options.filter = self.filter
+        options.errorHandler = self.errorHandler
+        fp = input.byteStream
+        if fp is None and options.systemId:
+            import urllib2
+            fp = urllib2.urlopen(input.systemId)
+        return self._parse_bytestream(fp, options)
+
+    def parseWithContext(self, input, cnode, action):
+        if action not in self._legal_actions:
+            raise ValueError("not a legal action")
+        raise NotImplementedError("Haven't written this yet...")
+
+    def _parse_bytestream(self, stream, options):
+        import xml.dom.expatbuilder
+        builder = xml.dom.expatbuilder.makeBuilder(options)
+        return builder.parseFile(stream)
+
+
+def _name_xform(name):
+    return name.lower().replace('-', '_')
+
+
+class DOMEntityResolver(NewStyle):
+    __slots__ = '_opener',
+
+    def resolveEntity(self, publicId, systemId):
+        assert systemId is not None
+        source = DOMInputSource()
+        source.publicId = publicId
+        source.systemId = systemId
+        source.byteStream = self._get_opener().open(systemId)
+
+        # determine the encoding if the transport provided it
+        source.encoding = self._guess_media_encoding(source)
+
+        # determine the base URI is we can
+        import posixpath, urlparse
+        parts = urlparse.urlparse(systemId)
+        scheme, netloc, path, params, query, fragment = parts
+        # XXX should we check the scheme here as well?
+        if path and not path.endswith("/"):
+            path = posixpath.dirname(path) + "/"
+            parts = scheme, netloc, path, params, query, fragment
+            source.baseURI = urlparse.urlunparse(parts)
+
+        return source
+
+    def _get_opener(self):
+        try:
+            return self._opener
+        except AttributeError:
+            self._opener = self._create_opener()
+            return self._opener
+
+    def _create_opener(self):
+        import urllib2
+        return urllib2.build_opener()
+
+    def _guess_media_encoding(self, source):
+        info = source.byteStream.info()
+        if info.has_key("Content-Type"):
+            for param in info.getplist():
+                if param.startswith("charset="):
+                    return param.split("=", 1)[1].lower()
+
+
+class DOMInputSource(NewStyle):
+    __slots__ = ('byteStream', 'characterStream', 'stringData',
+                 'encoding', 'publicId', 'systemId', 'baseURI')
+
+    def __init__(self):
+        self.byteStream = None
+        self.characterStream = None
+        self.stringData = None
+        self.encoding = None
+        self.publicId = None
+        self.systemId = None
+        self.baseURI = None
+
+    def _get_byteStream(self):
+        return self.byteStream
+    def _set_byteStream(self, byteStream):
+        self.byteStream = byteStream
+
+    def _get_characterStream(self):
+        return self.characterStream
+    def _set_characterStream(self, characterStream):
+        self.characterStream = characterStream
+
+    def _get_stringData(self):
+        return self.stringData
+    def _set_stringData(self, data):
+        self.stringData = data
+
+    def _get_encoding(self):
+        return self.encoding
+    def _set_encoding(self, encoding):
+        self.encoding = encoding
+
+    def _get_publicId(self):
+        return self.publicId
+    def _set_publicId(self, publicId):
+        self.publicId = publicId
+
+    def _get_systemId(self):
+        return self.systemId
+    def _set_systemId(self, systemId):
+        self.systemId = systemId
+
+    def _get_baseURI(self):
+        return self.baseURI
+    def _set_baseURI(self, uri):
+        self.baseURI = uri
+
+
+class DOMBuilderFilter:
+    """Element filter which can be used to tailor construction of
+    a DOM instance.
+    """
+
+    # There's really no need for this class; concrete implementations
+    # should just implement the endElement() and startElement()
+    # methods as appropriate.  Using this makes it easy to only
+    # implement one of them.
+
+    FILTER_ACCEPT = 1
+    FILTER_REJECT = 2
+    FILTER_SKIP = 3
+    FILTER_INTERRUPT = 4
+
+    whatToShow = NodeFilter.SHOW_ALL
+
+    def _get_whatToShow(self):
+        return self.whatToShow
+
+    def acceptNode(self, element):
+        return self.FILTER_ACCEPT
+
+    def startContainer(self, element):
+        return self.FILTER_ACCEPT
+
+del NodeFilter
+
+
+class DocumentLS:
+    """Mixin to create documents that conform to the load/save spec."""
+
+    async = False
+
+    def _get_async(self):
+        return False
+    def _set_async(self, async):
+        if async:
+            raise xml.dom.NotSupportedErr(
+                "asynchronous document loading is not supported")
+
+    def abort(self):
+        # What does it mean to "clear" a document?  Does the
+        # documentElement disappear?
+        raise NotImplementedError(
+            "haven't figured out what this means yet")
+
+    def load(self, uri):
+        raise NotImplementedError("haven't written this yet")
+
+    def loadXML(self, source):
+        raise NotImplementedError("haven't written this yet")
+
+    def saveXML(self, snode):
+        if snode is None:
+            snode = self
+        elif snode.ownerDocument is not self:
+            raise xml.dom.WrongDocumentErr()
+        return snode.toxml()
+
+
+class DOMImplementationLS:
+    MODE_SYNCHRONOUS = 1
+    MODE_ASYNCHRONOUS = 2
+
+    def createDOMBuilder(self, mode, schemaType):
+        if schemaType is not None:
+            raise xml.dom.NotSupportedErr(
+                "schemaType not yet supported")
+        if mode == self.MODE_SYNCHRONOUS:
+            return DOMBuilder()
+        if mode == self.MODE_ASYNCHRONOUS:
+            raise xml.dom.NotSupportedErr(
+                "asynchronous builders are not supported")
+        raise ValueError("unknown value for mode")
+
+    def createDOMWriter(self):
+        raise NotImplementedError(
+            "the writer interface hasn't been written yet!")
+
+    def createDOMInputSource(self):
+        return DOMInputSource()
diff --git a/depot_tools/release/win/python_24/Lib/xml/parsers/__init__.py b/depot_tools/release/win/python_24/Lib/xml/parsers/__init__.py
new file mode 100644
index 0000000..eb314a3b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/parsers/__init__.py
@@ -0,0 +1,8 @@
+"""Python interfaces to XML parsers.
+
+This package contains one module:
+
+expat -- Python wrapper for James Clark's Expat parser, with namespace
+         support.
+
+"""
diff --git a/depot_tools/release/win/python_24/Lib/xml/parsers/expat.py b/depot_tools/release/win/python_24/Lib/xml/parsers/expat.py
new file mode 100644
index 0000000..d0a8b80
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/parsers/expat.py
@@ -0,0 +1,4 @@
+"""Interface to the Expat non-validating XML parser."""
+__version__ = '$Revision: 1.1 $'
+
+from pyexpat import *
diff --git a/depot_tools/release/win/python_24/Lib/xml/sax/__init__.py b/depot_tools/release/win/python_24/Lib/xml/sax/__init__.py
new file mode 100644
index 0000000..6b1b1ba
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/sax/__init__.py
@@ -0,0 +1,108 @@
+"""Simple API for XML (SAX) implementation for Python.
+
+This module provides an implementation of the SAX 2 interface;
+information about the Java version of the interface can be found at
+http://www.megginson.com/SAX/.  The Python version of the interface is
+documented at <...>.
+
+This package contains the following modules:
+
+handler -- Base classes and constants which define the SAX 2 API for
+           the 'client-side' of SAX for Python.
+
+saxutils -- Implementation of the convenience classes commonly used to
+            work with SAX.
+
+xmlreader -- Base classes and constants which define the SAX 2 API for
+             the parsers used with SAX for Python.
+
+expatreader -- Driver that allows use of the Expat parser with SAX.
+"""
+
+from xmlreader import InputSource
+from handler import ContentHandler, ErrorHandler
+from _exceptions import SAXException, SAXNotRecognizedException, \
+                        SAXParseException, SAXNotSupportedException, \
+                        SAXReaderNotAvailable
+
+
+def parse(source, handler, errorHandler=ErrorHandler()):
+    parser = make_parser()
+    parser.setContentHandler(handler)
+    parser.setErrorHandler(errorHandler)
+    parser.parse(source)
+
+def parseString(string, handler, errorHandler=ErrorHandler()):
+    try:
+        from cStringIO import StringIO
+    except ImportError:
+        from StringIO import StringIO
+
+    if errorHandler is None:
+        errorHandler = ErrorHandler()
+    parser = make_parser()
+    parser.setContentHandler(handler)
+    parser.setErrorHandler(errorHandler)
+
+    inpsrc = InputSource()
+    inpsrc.setByteStream(StringIO(string))
+    parser.parse(inpsrc)
+
+# this is the parser list used by the make_parser function if no
+# alternatives are given as parameters to the function
+
+default_parser_list = ["xml.sax.expatreader"]
+
+# tell modulefinder that importing sax potentially imports expatreader
+_false = 0
+if _false:
+    import xml.sax.expatreader
+
+import os, sys
+if os.environ.has_key("PY_SAX_PARSER"):
+    default_parser_list = os.environ["PY_SAX_PARSER"].split(",")
+del os
+
+_key = "python.xml.sax.parser"
+if sys.platform[:4] == "java" and sys.registry.containsKey(_key):
+    default_parser_list = sys.registry.getProperty(_key).split(",")
+
+
+def make_parser(parser_list = []):
+    """Creates and returns a SAX parser.
+
+    Creates the first parser it is able to instantiate of the ones
+    given in the list created by doing parser_list +
+    default_parser_list.  The lists must contain the names of Python
+    modules containing both a SAX parser and a create_parser function."""
+
+    for parser_name in parser_list + default_parser_list:
+        try:
+            return _create_parser(parser_name)
+        except ImportError,e:
+            import sys
+            if sys.modules.has_key(parser_name):
+                # The parser module was found, but importing it
+                # failed unexpectedly, pass this exception through
+                raise
+        except SAXReaderNotAvailable:
+            # The parser module detected that it won't work properly,
+            # so try the next one
+            pass
+
+    raise SAXReaderNotAvailable("No parsers found", None)
+
+# --- Internal utility methods used by make_parser
+
+if sys.platform[ : 4] == "java":
+    def _create_parser(parser_name):
+        from org.python.core import imp
+        drv_module = imp.importName(parser_name, 0, globals())
+        return drv_module.create_parser()
+
+else:
+    def _create_parser(parser_name):
+        drv_module = __import__(parser_name,{},{},['create_parser'])
+        return drv_module.create_parser()
+
+del sys
diff --git a/depot_tools/release/win/python_24/Lib/xml/sax/_exceptions.py b/depot_tools/release/win/python_24/Lib/xml/sax/_exceptions.py
new file mode 100644
index 0000000..fdd614ae
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/sax/_exceptions.py
@@ -0,0 +1,131 @@
+"""Different kinds of SAX Exceptions"""
+import sys
+if sys.platform[:4] == "java":
+    from java.lang import Exception
+del sys
+
+# ===== SAXEXCEPTION =====
+
+class SAXException(Exception):
+    """Encapsulate an XML error or warning. This class can contain
+    basic error or warning information from either the XML parser or
+    the application: you can subclass it to provide additional
+    functionality, or to add localization. Note that although you will
+    receive a SAXException as the argument to the handlers in the
+    ErrorHandler interface, you are not actually required to throw
+    the exception; instead, you can simply read the information in
+    it."""
+
+    def __init__(self, msg, exception=None):
+        """Creates an exception. The message is required, but the exception
+        is optional."""
+        self._msg = msg
+        self._exception = exception
+        Exception.__init__(self, msg)
+
+    def getMessage(self):
+        "Return a message for this exception."
+        return self._msg
+
+    def getException(self):
+        "Return the embedded exception, or None if there was none."
+        return self._exception
+
+    def __str__(self):
+        "Create a string representation of the exception."
+        return self._msg
+
+    def __getitem__(self, ix):
+        """Avoids weird error messages if someone does exception[ix] by
+        mistake, since Exception has __getitem__ defined."""
+        raise AttributeError("__getitem__")
+
+
+# ===== SAXPARSEEXCEPTION =====
+
+class SAXParseException(SAXException):
+    """Encapsulate an XML parse error or warning.
+
+    This exception will include information for locating the error in
+    the original XML document. Note that although the application will
+    receive a SAXParseException as the argument to the handlers in the
+    ErrorHandler interface, the application is not actually required
+    to throw the exception; instead, it can simply read the
+    information in it and take a different action.
+
+    Since this exception is a subclass of SAXException, it inherits
+    the ability to wrap another exception."""
+
+    def __init__(self, msg, exception, locator):
+        "Creates the exception. The exception parameter is allowed to be None."
+        SAXException.__init__(self, msg, exception)
+        self._locator = locator
+
+        # We need to cache this stuff at construction time.
+        # If this exception is thrown, the objects through which we must
+        # traverse to get this information may be deleted by the time
+        # it gets caught.
+        self._systemId = self._locator.getSystemId()
+        self._colnum = self._locator.getColumnNumber()
+        self._linenum = self._locator.getLineNumber()
+
+    def getColumnNumber(self):
+        """The column number of the end of the text where the exception
+        occurred."""
+        return self._colnum
+
+    def getLineNumber(self):
+        "The line number of the end of the text where the exception occurred."
+        return self._linenum
+
+    def getPublicId(self):
+        "Get the public identifier of the entity where the exception occurred."
+        return self._locator.getPublicId()
+
+    def getSystemId(self):
+        "Get the system identifier of the entity where the exception occurred."
+        return self._systemId
+
+    def __str__(self):
+        "Create a string representation of the exception."
+        sysid = self.getSystemId()
+        if sysid is None:
+            sysid = "<unknown>"
+        linenum = self.getLineNumber()
+        if linenum is None:
+            linenum = "?"
+        colnum = self.getColumnNumber()
+        if colnum is None:
+            colnum = "?"
+        return "%s:%s:%s: %s" % (sysid, linenum, colnum, self._msg)
+
+
+# ===== SAXNOTRECOGNIZEDEXCEPTION =====
+
+class SAXNotRecognizedException(SAXException):
+    """Exception class for an unrecognized identifier.
+
+    An XMLReader will raise this exception when it is confronted with an
+    unrecognized feature or property. SAX applications and extensions may
+    use this class for similar purposes."""
+
+
+# ===== SAXNOTSUPPORTEDEXCEPTION =====
+
+class SAXNotSupportedException(SAXException):
+    """Exception class for an unsupported operation.
+
+    An XMLReader will raise this exception when a service it cannot
+    perform is requested (specifically setting a state or value). SAX
+    applications and extensions may use this class for similar
+    purposes."""
+
+# ===== SAXNOTSUPPORTEDEXCEPTION =====
+
+class SAXReaderNotAvailable(SAXNotSupportedException):
+    """Exception class for a missing driver.
+
+    An XMLReader module (driver) should raise this exception when it
+    is first imported, e.g. when a support module cannot be imported.
+    It also may be raised during parsing, e.g. if executing an external
+    program is not permitted."""
diff --git a/depot_tools/release/win/python_24/Lib/xml/sax/expatreader.py b/depot_tools/release/win/python_24/Lib/xml/sax/expatreader.py
new file mode 100644
index 0000000..ab8cbc1
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/sax/expatreader.py
@@ -0,0 +1,413 @@
+"""
+SAX driver for the pyexpat C module.  This driver works with
+pyexpat.__version__ == '2.22'.
+"""
+
+version = "0.20"
+
+from xml.sax._exceptions import *
+from xml.sax.handler import feature_validation, feature_namespaces
+from xml.sax.handler import feature_namespace_prefixes
+from xml.sax.handler import feature_external_ges, feature_external_pes
+from xml.sax.handler import feature_string_interning
+from xml.sax.handler import property_xml_string, property_interning_dict
+
+# xml.parsers.expat does not raise ImportError in Jython
+import sys
+if sys.platform[:4] == "java":
+    raise SAXReaderNotAvailable("expat not available in Java", None)
+del sys
+
+try:
+    from xml.parsers import expat
+except ImportError:
+    raise SAXReaderNotAvailable("expat not supported", None)
+else:
+    if not hasattr(expat, "ParserCreate"):
+        raise SAXReaderNotAvailable("expat not supported", None)
+from xml.sax import xmlreader, saxutils, handler
+
+AttributesImpl = xmlreader.AttributesImpl
+AttributesNSImpl = xmlreader.AttributesNSImpl
+
+# If we're using a sufficiently recent version of Python, we can use
+# weak references to avoid cycles between the parser and content
+# handler, otherwise we'll just have to pretend.
+try:
+    import _weakref
+except ImportError:
+    def _mkproxy(o):
+        return o
+else:
+    import weakref
+    _mkproxy = weakref.proxy
+    del weakref, _weakref
+
+# --- ExpatLocator
+
+class ExpatLocator(xmlreader.Locator):
+    """Locator for use with the ExpatParser class.
+
+    This uses a weak reference to the parser object to avoid creating
+    a circular reference between the parser and the content handler.
+    """
+    def __init__(self, parser):
+        self._ref = _mkproxy(parser)
+
+    def getColumnNumber(self):
+        parser = self._ref
+        if parser._parser is None:
+            return None
+        return parser._parser.ErrorColumnNumber
+
+    def getLineNumber(self):
+        parser = self._ref
+        if parser._parser is None:
+            return 1
+        return parser._parser.ErrorLineNumber
+
+    def getPublicId(self):
+        parser = self._ref
+        if parser is None:
+            return None
+        return parser._source.getPublicId()
+
+    def getSystemId(self):
+        parser = self._ref
+        if parser is None:
+            return None
+        return parser._source.getSystemId()
+
+
+# --- ExpatParser
+
+class ExpatParser(xmlreader.IncrementalParser, xmlreader.Locator):
+    """SAX driver for the pyexpat C module."""
+
+    def __init__(self, namespaceHandling=0, bufsize=2**16-20):
+        xmlreader.IncrementalParser.__init__(self, bufsize)
+        self._source = xmlreader.InputSource()
+        self._parser = None
+        self._namespaces = namespaceHandling
+        self._lex_handler_prop = None
+        self._parsing = 0
+        self._entity_stack = []
+        self._external_ges = 1
+        self._interning = None
+
+    # XMLReader methods
+
+    def parse(self, source):
+        "Parse an XML document from a URL or an InputSource."
+        source = saxutils.prepare_input_source(source)
+
+        self._source = source
+        self.reset()
+        self._cont_handler.setDocumentLocator(ExpatLocator(self))
+        xmlreader.IncrementalParser.parse(self, source)
+
+    def prepareParser(self, source):
+        if source.getSystemId() != None:
+            self._parser.SetBase(source.getSystemId())
+
+    # Redefined setContentHandler to allow changing handlers during parsing
+
+    def setContentHandler(self, handler):
+        xmlreader.IncrementalParser.setContentHandler(self, handler)
+        if self._parsing:
+            self._reset_cont_handler()
+
+    def getFeature(self, name):
+        if name == feature_namespaces:
+            return self._namespaces
+        elif name == feature_string_interning:
+            return self._interning is not None
+        elif name in (feature_validation, feature_external_pes,
+                      feature_namespace_prefixes):
+            return 0
+        elif name == feature_external_ges:
+            return self._external_ges
+        raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
+
+    def setFeature(self, name, state):
+        if self._parsing:
+            raise SAXNotSupportedException("Cannot set features while parsing")
+
+        if name == feature_namespaces:
+            self._namespaces = state
+        elif name == feature_external_ges:
+            self._external_ges = state
+        elif name == feature_string_interning:
+            if state:
+                if self._interning is None:
+                    self._interning = {}
+            else:
+                self._interning = None
+        elif name == feature_validation:
+            if state:
+                raise SAXNotSupportedException(
+                    "expat does not support validation")
+        elif name == feature_external_pes:
+            if state:
+                raise SAXNotSupportedException(
+                    "expat does not read external parameter entities")
+        elif name == feature_namespace_prefixes:
+            if state:
+                raise SAXNotSupportedException(
+                    "expat does not report namespace prefixes")
+        else:
+            raise SAXNotRecognizedException(
+                "Feature '%s' not recognized" % name)
+
+    def getProperty(self, name):
+        if name == handler.property_lexical_handler:
+            return self._lex_handler_prop
+        elif name == property_interning_dict:
+            return self._interning
+        elif name == property_xml_string:
+            if self._parser:
+                if hasattr(self._parser, "GetInputContext"):
+                    return self._parser.GetInputContext()
+                else:
+                    raise SAXNotRecognizedException(
+                        "This version of expat does not support getting"
+                        " the XML string")
+            else:
+                raise SAXNotSupportedException(
+                    "XML string cannot be returned when not parsing")
+        raise SAXNotRecognizedException("Property '%s' not recognized" % name)
+
+    def setProperty(self, name, value):
+        if name == handler.property_lexical_handler:
+            self._lex_handler_prop = value
+            if self._parsing:
+                self._reset_lex_handler_prop()
+        elif name == property_interning_dict:
+            self._interning = value
+        elif name == property_xml_string:
+            raise SAXNotSupportedException("Property '%s' cannot be set" %
+                                           name)
+        else:
+            raise SAXNotRecognizedException("Property '%s' not recognized" %
+                                            name)
+
+    # IncrementalParser methods
+
+    def feed(self, data, isFinal = 0):
+        if not self._parsing:
+            self.reset()
+            self._parsing = 1
+            self._cont_handler.startDocument()
+
+        try:
+            # The isFinal parameter is internal to the expat reader.
+            # If it is set to true, expat will check validity of the entire
+            # document. When feeding chunks, they are not normally final -
+            # except when invoked from close.
+            self._parser.Parse(data, isFinal)
+        except expat.error, e:
+            exc = SAXParseException(expat.ErrorString(e.code), e, self)
+            # FIXME: when to invoke error()?
+            self._err_handler.fatalError(exc)
+
+    def close(self):
+        if self._entity_stack:
+            # If we are completing an external entity, do nothing here
+            return
+        self.feed("", isFinal = 1)
+        self._cont_handler.endDocument()
+        self._parsing = 0
+        # break cycle created by expat handlers pointing to our methods
+        self._parser = None
+
+    def _reset_cont_handler(self):
+        self._parser.ProcessingInstructionHandler = \
+                                    self._cont_handler.processingInstruction
+        self._parser.CharacterDataHandler = self._cont_handler.characters
+
+    def _reset_lex_handler_prop(self):
+        lex = self._lex_handler_prop
+        parser = self._parser
+        if lex is None:
+            parser.CommentHandler = None
+            parser.StartCdataSectionHandler = None
+            parser.EndCdataSectionHandler = None
+            parser.StartDoctypeDeclHandler = None
+            parser.EndDoctypeDeclHandler = None
+        else:
+            parser.CommentHandler = lex.comment
+            parser.StartCdataSectionHandler = lex.startCDATA
+            parser.EndCdataSectionHandler = lex.endCDATA
+            parser.StartDoctypeDeclHandler = self.start_doctype_decl
+            parser.EndDoctypeDeclHandler = lex.endDTD
+
+    def reset(self):
+        if self._namespaces:
+            self._parser = expat.ParserCreate(None, " ",
+                                              intern=self._interning)
+            self._parser.namespace_prefixes = 1
+            self._parser.StartElementHandler = self.start_element_ns
+            self._parser.EndElementHandler = self.end_element_ns
+        else:
+            self._parser = expat.ParserCreate(intern = self._interning)
+            self._parser.StartElementHandler = self.start_element
+            self._parser.EndElementHandler = self.end_element
+
+        self._reset_cont_handler()
+        self._parser.UnparsedEntityDeclHandler = self.unparsed_entity_decl
+        self._parser.NotationDeclHandler = self.notation_decl
+        self._parser.StartNamespaceDeclHandler = self.start_namespace_decl
+        self._parser.EndNamespaceDeclHandler = self.end_namespace_decl
+
+        self._decl_handler_prop = None
+        if self._lex_handler_prop:
+            self._reset_lex_handler_prop()
+#         self._parser.DefaultHandler =
+#         self._parser.DefaultHandlerExpand =
+#         self._parser.NotStandaloneHandler =
+        self._parser.ExternalEntityRefHandler = self.external_entity_ref
+        try:
+            self._parser.SkippedEntityHandler = self.skipped_entity_handler
+        except AttributeError:
+            # This pyexpat does not support SkippedEntity
+            pass
+        self._parser.SetParamEntityParsing(
+            expat.XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE)
+
+        self._parsing = 0
+        self._entity_stack = []
+
+    # Locator methods
+
+    def getColumnNumber(self):
+        if self._parser is None:
+            return None
+        return self._parser.ErrorColumnNumber
+
+    def getLineNumber(self):
+        if self._parser is None:
+            return 1
+        return self._parser.ErrorLineNumber
+
+    def getPublicId(self):
+        return self._source.getPublicId()
+
+    def getSystemId(self):
+        return self._source.getSystemId()
+
+    # event handlers
+    def start_element(self, name, attrs):
+        self._cont_handler.startElement(name, AttributesImpl(attrs))
+
+    def end_element(self, name):
+        self._cont_handler.endElement(name)
+
+    def start_element_ns(self, name, attrs):
+        pair = name.split()
+        if len(pair) == 1:
+            # no namespace
+            pair = (None, name)
+        elif len(pair) == 3:
+            pair = pair[0], pair[1]
+        else:
+            # default namespace
+            pair = tuple(pair)
+
+        newattrs = {}
+        qnames = {}
+        for (aname, value) in attrs.items():
+            parts = aname.split()
+            length = len(parts)
+            if length == 1:
+                # no namespace
+                qname = aname
+                apair = (None, aname)
+            elif length == 3:
+                qname = "%s:%s" % (parts[2], parts[1])
+                apair = parts[0], parts[1]
+            else:
+                # default namespace
+                qname = parts[1]
+                apair = tuple(parts)
+
+            newattrs[apair] = value
+            qnames[apair] = qname
+
+        self._cont_handler.startElementNS(pair, None,
+                                          AttributesNSImpl(newattrs, qnames))
+
+    def end_element_ns(self, name):
+        pair = name.split()
+        if len(pair) == 1:
+            pair = (None, name)
+        elif len(pair) == 3:
+            pair = pair[0], pair[1]
+        else:
+            pair = tuple(pair)
+
+        self._cont_handler.endElementNS(pair, None)
+
+    # this is not used (call directly to ContentHandler)
+    def processing_instruction(self, target, data):
+        self._cont_handler.processingInstruction(target, data)
+
+    # this is not used (call directly to ContentHandler)
+    def character_data(self, data):
+        self._cont_handler.characters(data)
+
+    def start_namespace_decl(self, prefix, uri):
+        self._cont_handler.startPrefixMapping(prefix, uri)
+
+    def end_namespace_decl(self, prefix):
+        self._cont_handler.endPrefixMapping(prefix)
+
+    def start_doctype_decl(self, name, sysid, pubid, has_internal_subset):
+        self._lex_handler_prop.startDTD(name, pubid, sysid)
+
+    def unparsed_entity_decl(self, name, base, sysid, pubid, notation_name):
+        self._dtd_handler.unparsedEntityDecl(name, pubid, sysid, notation_name)
+
+    def notation_decl(self, name, base, sysid, pubid):
+        self._dtd_handler.notationDecl(name, pubid, sysid)
+
+    def external_entity_ref(self, context, base, sysid, pubid):
+        if not self._external_ges:
+            return 1
+
+        source = self._ent_handler.resolveEntity(pubid, sysid)
+        source = saxutils.prepare_input_source(source,
+                                               self._source.getSystemId() or
+                                               "")
+
+        self._entity_stack.append((self._parser, self._source))
+        self._parser = self._parser.ExternalEntityParserCreate(context)
+        self._source = source
+
+        try:
+            xmlreader.IncrementalParser.parse(self, source)
+        except:
+            return 0  # FIXME: save error info here?
+
+        (self._parser, self._source) = self._entity_stack[-1]
+        del self._entity_stack[-1]
+        return 1
+
+    def skipped_entity_handler(self, name, is_pe):
+        if is_pe:
+            # The SAX spec requires to report skipped PEs with a '%'
+            name = '%'+name
+        self._cont_handler.skippedEntity(name)
+
+# ---
+
+def create_parser(*args, **kwargs):
+    return ExpatParser(*args, **kwargs)
+
+# ---
+
+if __name__ == "__main__":
+    import xml.sax
+    p = create_parser()
+    p.setContentHandler(xml.sax.XMLGenerator())
+    p.setErrorHandler(xml.sax.ErrorHandler())
+    p.parse("../../../hamlet.xml")
diff --git a/depot_tools/release/win/python_24/Lib/xml/sax/handler.py b/depot_tools/release/win/python_24/Lib/xml/sax/handler.py
new file mode 100644
index 0000000..747f82b
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/sax/handler.py
@@ -0,0 +1,342 @@
+"""
+This module contains the core classes of version 2.0 of SAX for Python.
+This file provides only default classes with absolutely minimum
+functionality, from which drivers and applications can be subclassed.
+
+Many of these classes are empty and are included only as documentation
+of the interfaces.
+
+$Id: handler.py,v 1.11 2004/05/06 03:47:48 fdrake Exp $
+"""
+
+version = '2.0beta'
+
+#============================================================================
+#
+# HANDLER INTERFACES
+#
+#============================================================================
+
+# ===== ERRORHANDLER =====
+
+class ErrorHandler:
+    """Basic interface for SAX error handlers.
+
+    If you create an object that implements this interface, then
+    register the object with your XMLReader, the parser will call the
+    methods in your object to report all warnings and errors. There
+    are three levels of errors available: warnings, (possibly)
+    recoverable errors, and unrecoverable errors. All methods take a
+    SAXParseException as the only parameter."""
+
+    def error(self, exception):
+        "Handle a recoverable error."
+        raise exception
+
+    def fatalError(self, exception):
+        "Handle a non-recoverable error."
+        raise exception
+
+    def warning(self, exception):
+        "Handle a warning."
+        print exception
+
+
+# ===== CONTENTHANDLER =====
+
+class ContentHandler:
+    """Interface for receiving logical document content events.
+
+    This is the main callback interface in SAX, and the one most
+    important to applications. The order of events in this interface
+    mirrors the order of the information in the document."""
+
+    def __init__(self):
+        self._locator = None
+
+    def setDocumentLocator(self, locator):
+        """Called by the parser to give the application a locator for
+        locating the origin of document events.
+
+        SAX parsers are strongly encouraged (though not absolutely
+        required) to supply a locator: if it does so, it must supply
+        the locator to the application by invoking this method before
+        invoking any of the other methods in the DocumentHandler
+        interface.
+
+        The locator allows the application to determine the end
+        position of any document-related event, even if the parser is
+        not reporting an error. Typically, the application will use
+        this information for reporting its own errors (such as
+        character content that does not match an application's
+        business rules). The information returned by the locator is
+        probably not sufficient for use with a search engine.
+
+        Note that the locator will return correct information only
+        during the invocation of the events in this interface. The
+        application should not attempt to use it at any other time."""
+        self._locator = locator
+
+    def startDocument(self):
+        """Receive notification of the beginning of a document.
+
+        The SAX parser will invoke this method only once, before any
+        other methods in this interface or in DTDHandler (except for
+        setDocumentLocator)."""
+
+    def endDocument(self):
+        """Receive notification of the end of a document.
+
+        The SAX parser will invoke this method only once, and it will
+        be the last method invoked during the parse. The parser shall
+        not invoke this method until it has either abandoned parsing
+        (because of an unrecoverable error) or reached the end of
+        input."""
+
+    def startPrefixMapping(self, prefix, uri):
+        """Begin the scope of a prefix-URI Namespace mapping.
+
+        The information from this event is not necessary for normal
+        Namespace processing: the SAX XML reader will automatically
+        replace prefixes for element and attribute names when the
+        http://xml.org/sax/features/namespaces feature is true (the
+        default).
+
+        There are cases, however, when applications need to use
+        prefixes in character data or in attribute values, where they
+        cannot safely be expanded automatically; the
+        start/endPrefixMapping event supplies the information to the
+        application to expand prefixes in those contexts itself, if
+        necessary.
+
+        Note that start/endPrefixMapping events are not guaranteed to
+        be properly nested relative to each-other: all
+        startPrefixMapping events will occur before the corresponding
+        startElement event, and all endPrefixMapping events will occur
+        after the corresponding endElement event, but their order is
+        not guaranteed."""
+
+    def endPrefixMapping(self, prefix):
+        """End the scope of a prefix-URI mapping.
+
+        See startPrefixMapping for details. This event will always
+        occur after the corresponding endElement event, but the order
+        of endPrefixMapping events is not otherwise guaranteed."""
+
+    def startElement(self, name, attrs):
+        """Signals the start of an element in non-namespace mode.
+
+        The name parameter contains the raw XML 1.0 name of the
+        element type as a string and the attrs parameter holds an
+        instance of the Attributes class containing the attributes of
+        the element."""
+
+    def endElement(self, name):
+        """Signals the end of an element in non-namespace mode.
+
+        The name parameter contains the name of the element type, just
+        as with the startElement event."""
+
+    def startElementNS(self, name, qname, attrs):
+        """Signals the start of an element in namespace mode.
+
+        The name parameter contains the name of the element type as a
+        (uri, localname) tuple, the qname parameter the raw XML 1.0
+        name used in the source document, and the attrs parameter
+        holds an instance of the Attributes class containing the
+        attributes of the element.
+
+        The uri part of the name tuple is None for elements which have
+        no namespace."""
+
+    def endElementNS(self, name, qname):
+        """Signals the end of an element in namespace mode.
+
+        The name parameter contains the name of the element type, just
+        as with the startElementNS event."""
+
+    def characters(self, content):
+        """Receive notification of character data.
+
+        The Parser will call this method to report each chunk of
+        character data. SAX parsers may return all contiguous
+        character data in a single chunk, or they may split it into
+        several chunks; however, all of the characters in any single
+        event must come from the same external entity so that the
+        Locator provides useful information."""
+
+    def ignorableWhitespace(self, whitespace):
+        """Receive notification of ignorable whitespace in element content.
+
+        Validating Parsers must use this method to report each chunk
+        of ignorable whitespace (see the W3C XML 1.0 recommendation,
+        section 2.10): non-validating parsers may also use this method
+        if they are capable of parsing and using content models.
+
+        SAX parsers may return all contiguous whitespace in a single
+        chunk, or they may split it into several chunks; however, all
+        of the characters in any single event must come from the same
+        external entity, so that the Locator provides useful
+        information."""
+
+    def processingInstruction(self, target, data):
+        """Receive notification of a processing instruction.
+
+        The Parser will invoke this method once for each processing
+        instruction found: note that processing instructions may occur
+        before or after the main document element.
+
+        A SAX parser should never report an XML declaration (XML 1.0,
+        section 2.8) or a text declaration (XML 1.0, section 4.3.1)
+        using this method."""
+
+    def skippedEntity(self, name):
+        """Receive notification of a skipped entity.
+
+        The Parser will invoke this method once for each entity
+        skipped. Non-validating processors may skip entities if they
+        have not seen the declarations (because, for example, the
+        entity was declared in an external DTD subset). All processors
+        may skip external entities, depending on the values of the
+        http://xml.org/sax/features/external-general-entities and the
+        http://xml.org/sax/features/external-parameter-entities
+        properties."""
+
+
+# ===== DTDHandler =====
+
+class DTDHandler:
+    """Handle DTD events.
+
+    This interface specifies only those DTD events required for basic
+    parsing (unparsed entities and attributes)."""
+
+    def notationDecl(self, name, publicId, systemId):
+        "Handle a notation declaration event."
+
+    def unparsedEntityDecl(self, name, publicId, systemId, ndata):
+        "Handle an unparsed entity declaration event."
+
+
+# ===== ENTITYRESOLVER =====
+
+class EntityResolver:
+    """Basic interface for resolving entities. If you create an object
+    implementing this interface, then register the object with your
+    Parser, the parser will call the method in your object to
+    resolve all external entities. Note that DefaultHandler implements
+    this interface with the default behaviour."""
+
+    def resolveEntity(self, publicId, systemId):
+        """Resolve the system identifier of an entity and return either
+        the system identifier to read from as a string, or an InputSource
+        to read from."""
+        return systemId
+
+
+#============================================================================
+#
+# CORE FEATURES
+#
+#============================================================================
+
+feature_namespaces = "http://xml.org/sax/features/namespaces"
+# true: Perform Namespace processing (default).
+# false: Optionally do not perform Namespace processing
+#        (implies namespace-prefixes).
+# access: (parsing) read-only; (not parsing) read/write
+
+feature_namespace_prefixes = "http://xml.org/sax/features/namespace-prefixes"
+# true: Report the original prefixed names and attributes used for Namespace
+#       declarations.
+# false: Do not report attributes used for Namespace declarations, and
+#        optionally do not report original prefixed names (default).
+# access: (parsing) read-only; (not parsing) read/write
+
+feature_string_interning = "http://xml.org/sax/features/string-interning"
+# true: All element names, prefixes, attribute names, Namespace URIs, and
+#       local names are interned using the built-in intern function.
+# false: Names are not necessarily interned, although they may be (default).
+# access: (parsing) read-only; (not parsing) read/write
+
+feature_validation = "http://xml.org/sax/features/validation"
+# true: Report all validation errors (implies external-general-entities and
+#       external-parameter-entities).
+# false: Do not report validation errors.
+# access: (parsing) read-only; (not parsing) read/write
+
+feature_external_ges = "http://xml.org/sax/features/external-general-entities"
+# true: Include all external general (text) entities.
+# false: Do not include external general entities.
+# access: (parsing) read-only; (not parsing) read/write
+
+feature_external_pes = "http://xml.org/sax/features/external-parameter-entities"
+# true: Include all external parameter entities, including the external
+#       DTD subset.
+# false: Do not include any external parameter entities, even the external
+#        DTD subset.
+# access: (parsing) read-only; (not parsing) read/write
+
+all_features = [feature_namespaces,
+                feature_namespace_prefixes,
+                feature_string_interning,
+                feature_validation,
+                feature_external_ges,
+                feature_external_pes]
+
+
+#============================================================================
+#
+# CORE PROPERTIES
+#
+#============================================================================
+
+property_lexical_handler = "http://xml.org/sax/properties/lexical-handler"
+# data type: xml.sax.sax2lib.LexicalHandler
+# description: An optional extension handler for lexical events like comments.
+# access: read/write
+
+property_declaration_handler = "http://xml.org/sax/properties/declaration-handler"
+# data type: xml.sax.sax2lib.DeclHandler
+# description: An optional extension handler for DTD-related events other
+#              than notations and unparsed entities.
+# access: read/write
+
+property_dom_node = "http://xml.org/sax/properties/dom-node"
+# data type: org.w3c.dom.Node
+# description: When parsing, the current DOM node being visited if this is
+#              a DOM iterator; when not parsing, the root DOM node for
+#              iteration.
+# access: (parsing) read-only; (not parsing) read/write
+
+property_xml_string = "http://xml.org/sax/properties/xml-string"
+# data type: String
+# description: The literal string of characters that was the source for
+#              the current event.
+# access: read-only
+
+property_encoding = "http://www.python.org/sax/properties/encoding"
+# data type: String
+# description: The name of the encoding to assume for input data.
+# access: write: set the encoding, e.g. established by a higher-level
+#                protocol. May change during parsing (e.g. after
+#                processing a META tag)
+#         read:  return the current encoding (possibly established through
+#                auto-detection.
+# initial value: UTF-8
+#
+
+property_interning_dict = "http://www.python.org/sax/properties/interning-dict"
+# data type: Dictionary
+# description: The dictionary used to intern common strings in the document
+# access: write: Request that the parser uses a specific dictionary, to
+#                allow interning across different documents
+#         read:  return the current interning dictionary, or None
+#
+
+all_properties = [property_lexical_handler,
+                  property_dom_node,
+                  property_declaration_handler,
+                  property_xml_string,
+                  property_encoding,
+                  property_interning_dict]
diff --git a/depot_tools/release/win/python_24/Lib/xml/sax/saxutils.py b/depot_tools/release/win/python_24/Lib/xml/sax/saxutils.py
new file mode 100644
index 0000000..582b008
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/sax/saxutils.py
@@ -0,0 +1,297 @@
+"""\
+A library of useful helper classes to the SAX classes, for the
+convenience of application and driver writers.
+"""
+
+import os, urlparse, urllib, types
+import handler
+import xmlreader
+
+try:
+    _StringTypes = [types.StringType, types.UnicodeType]
+except AttributeError:
+    _StringTypes = [types.StringType]
+
+# See whether the xmlcharrefreplace error handler is
+# supported
+try:
+    from codecs import xmlcharrefreplace_errors
+    _error_handling = "xmlcharrefreplace"
+    del xmlcharrefreplace_errors
+except ImportError:
+    _error_handling = "strict"
+
+def __dict_replace(s, d):
+    """Replace substrings of a string using a dictionary."""
+    for key, value in d.items():
+        s = s.replace(key, value)
+    return s
+
+def escape(data, entities={}):
+    """Escape &, <, and > in a string of data.
+
+    You can escape other strings of data by passing a dictionary as
+    the optional entities parameter.  The keys and values must all be
+    strings; each key will be replaced with its corresponding value.
+    """
+
+    # must do ampersand first
+    data = data.replace("&", "&amp;")
+    data = data.replace(">", "&gt;")
+    data = data.replace("<", "&lt;")
+    if entities:
+        data = __dict_replace(data, entities)
+    return data
+
+def unescape(data, entities={}):
+    """Unescape &amp;, &lt;, and &gt; in a string of data.
+
+    You can unescape other strings of data by passing a dictionary as
+    the optional entities parameter.  The keys and values must all be
+    strings; each key will be replaced with its corresponding value.
+    """
+    data = data.replace("&lt;", "<")
+    data = data.replace("&gt;", ">")
+    if entities:
+        data = __dict_replace(data, entities)
+    # must do ampersand last
+    return data.replace("&amp;", "&")
+
+def quoteattr(data, entities={}):
+    """Escape and quote an attribute value.
+
+    Escape &, <, and > in a string of data, then quote it for use as
+    an attribute value.  The \" character will be escaped as well, if
+    necessary.
+
+    You can escape other strings of data by passing a dictionary as
+    the optional entities parameter.  The keys and values must all be
+    strings; each key will be replaced with its corresponding value.
+    """
+    data = escape(data, entities)
+    if '"' in data:
+        if "'" in data:
+            data = '"%s"' % data.replace('"', "&quot;")
+        else:
+            data = "'%s'" % data
+    else:
+        data = '"%s"' % data
+    return data
+
+
+class XMLGenerator(handler.ContentHandler):
+
+    def __init__(self, out=None, encoding="iso-8859-1"):
+        if out is None:
+            import sys
+            out = sys.stdout
+        handler.ContentHandler.__init__(self)
+        self._out = out
+        self._ns_contexts = [{}] # contains uri -> prefix dicts
+        self._current_context = self._ns_contexts[-1]
+        self._undeclared_ns_maps = []
+        self._encoding = encoding
+
+    def _write(self, text):
+        if isinstance(text, str):
+            self._out.write(text)
+        else:
+            self._out.write(text.encode(self._encoding, _error_handling))
+
+    # ContentHandler methods
+
+    def startDocument(self):
+        self._write('<?xml version="1.0" encoding="%s"?>\n' %
+                        self._encoding)
+
+    def startPrefixMapping(self, prefix, uri):
+        self._ns_contexts.append(self._current_context.copy())
+        self._current_context[uri] = prefix
+        self._undeclared_ns_maps.append((prefix, uri))
+
+    def endPrefixMapping(self, prefix):
+        self._current_context = self._ns_contexts[-1]
+        del self._ns_contexts[-1]
+
+    def startElement(self, name, attrs):
+        self._write('<' + name)
+        for (name, value) in attrs.items():
+            self._write(' %s=%s' % (name, quoteattr(value)))
+        self._write('>')
+
+    def endElement(self, name):
+        self._write('</%s>' % name)
+
+    def startElementNS(self, name, qname, attrs):
+        if name[0] is None:
+            # if the name was not namespace-scoped, use the unqualified part
+            name = name[1]
+        else:
+            # else try to restore the original prefix from the namespace
+            name = self._current_context[name[0]] + ":" + name[1]
+        self._write('<' + name)
+
+        for pair in self._undeclared_ns_maps:
+            self._write(' xmlns:%s="%s"' % pair)
+        self._undeclared_ns_maps = []
+
+        for (name, value) in attrs.items():
+            name = self._current_context[name[0]] + ":" + name[1]
+            self._write(' %s=%s' % (name, quoteattr(value)))
+        self._write('>')
+
+    def endElementNS(self, name, qname):
+        if name[0] is None:
+            name = name[1]
+        else:
+            name = self._current_context[name[0]] + ":" + name[1]
+        self._write('</%s>' % name)
+
+    def characters(self, content):
+        self._write(escape(content))
+
+    def ignorableWhitespace(self, content):
+        self._write(content)
+
+    def processingInstruction(self, target, data):
+        self._write('<?%s %s?>' % (target, data))
+
+
+class XMLFilterBase(xmlreader.XMLReader):
+    """This class is designed to sit between an XMLReader and the
+    client application's event handlers.  By default, it does nothing
+    but pass requests up to the reader and events on to the handlers
+    unmodified, but subclasses can override specific methods to modify
+    the event stream or the configuration requests as they pass
+    through."""
+
+    def __init__(self, parent = None):
+        xmlreader.XMLReader.__init__(self)
+        self._parent = parent
+
+    # ErrorHandler methods
+
+    def error(self, exception):
+        self._err_handler.error(exception)
+
+    def fatalError(self, exception):
+        self._err_handler.fatalError(exception)
+
+    def warning(self, exception):
+        self._err_handler.warning(exception)
+
+    # ContentHandler methods
+
+    def setDocumentLocator(self, locator):
+        self._cont_handler.setDocumentLocator(locator)
+
+    def startDocument(self):
+        self._cont_handler.startDocument()
+
+    def endDocument(self):
+        self._cont_handler.endDocument()
+
+    def startPrefixMapping(self, prefix, uri):
+        self._cont_handler.startPrefixMapping(prefix, uri)
+
+    def endPrefixMapping(self, prefix):
+        self._cont_handler.endPrefixMapping(prefix)
+
+    def startElement(self, name, attrs):
+        self._cont_handler.startElement(name, attrs)
+
+    def endElement(self, name):
+        self._cont_handler.endElement(name)
+
+    def startElementNS(self, name, qname, attrs):
+        self._cont_handler.startElementNS(name, qname, attrs)
+
+    def endElementNS(self, name, qname):
+        self._cont_handler.endElementNS(name, qname)
+
+    def characters(self, content):
+        self._cont_handler.characters(content)
+
+    def ignorableWhitespace(self, chars):
+        self._cont_handler.ignorableWhitespace(chars)
+
+    def processingInstruction(self, target, data):
+        self._cont_handler.processingInstruction(target, data)
+
+    def skippedEntity(self, name):
+        self._cont_handler.skippedEntity(name)
+
+    # DTDHandler methods
+
+    def notationDecl(self, name, publicId, systemId):
+        self._dtd_handler.notationDecl(name, publicId, systemId)
+
+    def unparsedEntityDecl(self, name, publicId, systemId, ndata):
+        self._dtd_handler.unparsedEntityDecl(name, publicId, systemId, ndata)
+
+    # EntityResolver methods
+
+    def resolveEntity(self, publicId, systemId):
+        return self._ent_handler.resolveEntity(publicId, systemId)
+
+    # XMLReader methods
+
+    def parse(self, source):
+        self._parent.setContentHandler(self)
+        self._parent.setErrorHandler(self)
+        self._parent.setEntityResolver(self)
+        self._parent.setDTDHandler(self)
+        self._parent.parse(source)
+
+    def setLocale(self, locale):
+        self._parent.setLocale(locale)
+
+    def getFeature(self, name):
+        return self._parent.getFeature(name)
+
+    def setFeature(self, name, state):
+        self._parent.setFeature(name, state)
+
+    def getProperty(self, name):
+        return self._parent.getProperty(name)
+
+    def setProperty(self, name, value):
+        self._parent.setProperty(name, value)
+
+    # XMLFilter methods
+
+    def getParent(self):
+        return self._parent
+
+    def setParent(self, parent):
+        self._parent = parent
+
+# --- Utility functions
+
+def prepare_input_source(source, base = ""):
+    """This function takes an InputSource and an optional base URL and
+    returns a fully resolved InputSource object ready for reading."""
+
+    if type(source) in _StringTypes:
+        source = xmlreader.InputSource(source)
+    elif hasattr(source, "read"):
+        f = source
+        source = xmlreader.InputSource()
+        source.setByteStream(f)
+        if hasattr(f, "name"):
+            source.setSystemId(f.name)
+
+    if source.getByteStream() is None:
+        sysid = source.getSystemId()
+        basehead = os.path.dirname(os.path.normpath(base))
+        sysidfilename = os.path.join(basehead, sysid)
+        if os.path.isfile(sysidfilename):
+            source.setSystemId(sysidfilename)
+            f = open(sysidfilename, "rb")
+        else:
+            source.setSystemId(urlparse.urljoin(base, sysid))
+            f = urllib.urlopen(source.getSystemId())
+
+        source.setByteStream(f)
+
+    return source
diff --git a/depot_tools/release/win/python_24/Lib/xml/sax/xmlreader.py b/depot_tools/release/win/python_24/Lib/xml/sax/xmlreader.py
new file mode 100644
index 0000000..9a2361e
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xml/sax/xmlreader.py
@@ -0,0 +1,381 @@
+"""An XML Reader is the SAX 2 name for an XML parser. XML Parsers
+should be based on this code. """
+
+import handler
+
+from _exceptions import SAXNotSupportedException, SAXNotRecognizedException
+
+
+# ===== XMLREADER =====
+
+class XMLReader:
+    """Interface for reading an XML document using callbacks.
+
+    XMLReader is the interface that an XML parser's SAX2 driver must
+    implement. This interface allows an application to set and query
+    features and properties in the parser, to register event handlers
+    for document processing, and to initiate a document parse.
+
+    All SAX interfaces are assumed to be synchronous: the parse
+    methods must not return until parsing is complete, and readers
+    must wait for an event-handler callback to return before reporting
+    the next event."""
+
+    def __init__(self):
+        self._cont_handler = handler.ContentHandler()
+        self._dtd_handler = handler.DTDHandler()
+        self._ent_handler = handler.EntityResolver()
+        self._err_handler = handler.ErrorHandler()
+
+    def parse(self, source):
+        "Parse an XML document from a system identifier or an InputSource."
+        raise NotImplementedError("This method must be implemented!")
+
+    def getContentHandler(self):
+        "Returns the current ContentHandler."
+        return self._cont_handler
+
+    def setContentHandler(self, handler):
+        "Registers a new object to receive document content events."
+        self._cont_handler = handler
+
+    def getDTDHandler(self):
+        "Returns the current DTD handler."
+        return self._dtd_handler
+
+    def setDTDHandler(self, handler):
+        "Register an object to receive basic DTD-related events."
+        self._dtd_handler = handler
+
+    def getEntityResolver(self):
+        "Returns the current EntityResolver."
+        return self._ent_handler
+
+    def setEntityResolver(self, resolver):
+        "Register an object to resolve external entities."
+        self._ent_handler = resolver
+
+    def getErrorHandler(self):
+        "Returns the current ErrorHandler."
+        return self._err_handler
+
+    def setErrorHandler(self, handler):
+        "Register an object to receive error-message events."
+        self._err_handler = handler
+
+    def setLocale(self, locale):
+        """Allow an application to set the locale for errors and warnings.
+
+        SAX parsers are not required to provide localization for errors
+        and warnings; if they cannot support the requested locale,
+        however, they must throw a SAX exception. Applications may
+        request a locale change in the middle of a parse."""
+        raise SAXNotSupportedException("Locale support not implemented")
+
+    def getFeature(self, name):
+        "Looks up and returns the state of a SAX2 feature."
+        raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
+
+    def setFeature(self, name, state):
+        "Sets the state of a SAX2 feature."
+        raise SAXNotRecognizedException("Feature '%s' not recognized" % name)
+
+    def getProperty(self, name):
+        "Looks up and returns the value of a SAX2 property."
+        raise SAXNotRecognizedException("Property '%s' not recognized" % name)
+
+    def setProperty(self, name, value):
+        "Sets the value of a SAX2 property."
+        raise SAXNotRecognizedException("Property '%s' not recognized" % name)
+
+class IncrementalParser(XMLReader):
+    """This interface adds three extra methods to the XMLReader
+    interface that allow XML parsers to support incremental
+    parsing. Support for this interface is optional, since not all
+    underlying XML parsers support this functionality.
+
+    When the parser is instantiated it is ready to begin accepting
+    data from the feed method immediately. After parsing has been
+    finished with a call to close the reset method must be called to
+    make the parser ready to accept new data, either from feed or
+    using the parse method.
+
+    Note that these methods must _not_ be called during parsing, that
+    is, after parse has been called and before it returns.
+
+    By default, the class also implements the parse method of the XMLReader
+    interface using the feed, close and reset methods of the
+    IncrementalParser interface as a convenience to SAX 2.0 driver
+    writers."""
+
+    def __init__(self, bufsize=2**16):
+        self._bufsize = bufsize
+        XMLReader.__init__(self)
+
+    def parse(self, source):
+        import saxutils
+        source = saxutils.prepare_input_source(source)
+
+        self.prepareParser(source)
+        file = source.getByteStream()
+        buffer = file.read(self._bufsize)
+        while buffer != "":
+            self.feed(buffer)
+            buffer = file.read(self._bufsize)
+        self.close()
+
+    def feed(self, data):
+        """This method gives the raw XML data in the data parameter to
+        the parser and makes it parse the data, emitting the
+        corresponding events. It is allowed for XML constructs to be
+        split across several calls to feed.
+
+        feed may raise SAXException."""
+        raise NotImplementedError("This method must be implemented!")
+
+    def prepareParser(self, source):
+        """This method is called by the parse implementation to allow
+        the SAX 2.0 driver to prepare itself for parsing."""
+        raise NotImplementedError("prepareParser must be overridden!")
+
+    def close(self):
+        """This method is called when the entire XML document has been
+        passed to the parser through the feed method, to notify the
+        parser that there are no more data. This allows the parser to
+        do the final checks on the document and empty the internal
+        data buffer.
+
+        The parser will not be ready to parse another document until
+        the reset method has been called.
+
+        close may raise SAXException."""
+        raise NotImplementedError("This method must be implemented!")
+
+    def reset(self):
+        """This method is called after close has been called to reset
+        the parser so that it is ready to parse new documents. The
+        results of calling parse or feed after close without calling
+        reset are undefined."""
+        raise NotImplementedError("This method must be implemented!")
+
+# ===== LOCATOR =====
+
+class Locator:
+    """Interface for associating a SAX event with a document
+    location. A locator object will return valid results only during
+    calls to DocumentHandler methods; at any other time, the
+    results are unpredictable."""
+
+    def getColumnNumber(self):
+        "Return the column number where the current event ends."
+        return -1
+
+    def getLineNumber(self):
+        "Return the line number where the current event ends."
+        return -1
+
+    def getPublicId(self):
+        "Return the public identifier for the current event."
+        return None
+
+    def getSystemId(self):
+        "Return the system identifier for the current event."
+        return None
+
+# ===== INPUTSOURCE =====
+
+class InputSource:
+    """Encapsulation of the information needed by the XMLReader to
+    read entities.
+
+    This class may include information about the public identifier,
+    system identifier, byte stream (possibly with character encoding
+    information) and/or the character stream of an entity.
+
+    Applications will create objects of this class for use in the
+    XMLReader.parse method and for returning from
+    EntityResolver.resolveEntity.
+
+    An InputSource belongs to the application, the XMLReader is not
+    allowed to modify InputSource objects passed to it from the
+    application, although it may make copies and modify those."""
+
+    def __init__(self, system_id = None):
+        self.__system_id = system_id
+        self.__public_id = None
+        self.__encoding  = None
+        self.__bytefile  = None
+        self.__charfile  = None
+
+    def setPublicId(self, public_id):
+        "Sets the public identifier of this InputSource."
+        self.__public_id = public_id
+
+    def getPublicId(self):
+        "Returns the public identifier of this InputSource."
+        return self.__public_id
+
+    def setSystemId(self, system_id):
+        "Sets the system identifier of this InputSource."
+        self.__system_id = system_id
+
+    def getSystemId(self):
+        "Returns the system identifier of this InputSource."
+        return self.__system_id
+
+    def setEncoding(self, encoding):
+        """Sets the character encoding of this InputSource.
+
+        The encoding must be a string acceptable for an XML encoding
+        declaration (see section 4.3.3 of the XML recommendation).
+
+        The encoding attribute of the InputSource is ignored if the
+        InputSource also contains a character stream."""
+        self.__encoding = encoding
+
+    def getEncoding(self):
+        "Get the character encoding of this InputSource."
+        return self.__encoding
+
+    def setByteStream(self, bytefile):
+        """Set the byte stream (a Python file-like object which does
+        not perform byte-to-character conversion) for this input
+        source.
+
+        The SAX parser will ignore this if there is also a character
+        stream specified, but it will use a byte stream in preference
+        to opening a URI connection itself.
+
+        If the application knows the character encoding of the byte
+        stream, it should set it with the setEncoding method."""
+        self.__bytefile = bytefile
+
+    def getByteStream(self):
+        """Get the byte stream for this input source.
+
+        The getEncoding method will return the character encoding for
+        this byte stream, or None if unknown."""
+        return self.__bytefile
+
+    def setCharacterStream(self, charfile):
+        """Set the character stream for this input source. (The stream
+        must be a Python 2.0 Unicode-wrapped file-like that performs
+        conversion to Unicode strings.)
+
+        If there is a character stream specified, the SAX parser will
+        ignore any byte stream and will not attempt to open a URI
+        connection to the system identifier."""
+        self.__charfile = charfile
+
+    def getCharacterStream(self):
+        "Get the character stream for this input source."
+        return self.__charfile
+
+# ===== ATTRIBUTESIMPL =====
+
+class AttributesImpl:
+
+    def __init__(self, attrs):
+        """Non-NS-aware implementation.
+
+        attrs should be of the form {name : value}."""
+        self._attrs = attrs
+
+    def getLength(self):
+        return len(self._attrs)
+
+    def getType(self, name):
+        return "CDATA"
+
+    def getValue(self, name):
+        return self._attrs[name]
+
+    def getValueByQName(self, name):
+        return self._attrs[name]
+
+    def getNameByQName(self, name):
+        if not self._attrs.has_key(name):
+            raise KeyError, name
+        return name
+
+    def getQNameByName(self, name):
+        if not self._attrs.has_key(name):
+            raise KeyError, name
+        return name
+
+    def getNames(self):
+        return self._attrs.keys()
+
+    def getQNames(self):
+        return self._attrs.keys()
+
+    def __len__(self):
+        return len(self._attrs)
+
+    def __getitem__(self, name):
+        return self._attrs[name]
+
+    def keys(self):
+        return self._attrs.keys()
+
+    def has_key(self, name):
+        return self._attrs.has_key(name)
+
+    def __contains__(self, name):
+        return self._attrs.has_key(name)
+
+    def get(self, name, alternative=None):
+        return self._attrs.get(name, alternative)
+
+    def copy(self):
+        return self.__class__(self._attrs)
+
+    def items(self):
+        return self._attrs.items()
+
+    def values(self):
+        return self._attrs.values()
+
+# ===== ATTRIBUTESNSIMPL =====
+
+class AttributesNSImpl(AttributesImpl):
+
+    def __init__(self, attrs, qnames):
+        """NS-aware implementation.
+
+        attrs should be of the form {(ns_uri, lname): value, ...}.
+        qnames of the form {(ns_uri, lname): qname, ...}."""
+        self._attrs = attrs
+        self._qnames = qnames
+
+    def getValueByQName(self, name):
+        for (nsname, qname) in self._qnames.items():
+            if qname == name:
+                return self._attrs[nsname]
+
+        raise KeyError, name
+
+    def getNameByQName(self, name):
+        for (nsname, qname) in self._qnames.items():
+            if qname == name:
+                return nsname
+
+        raise KeyError, name
+
+    def getQNameByName(self, name):
+        return self._qnames[name]
+
+    def getQNames(self):
+        return self._qnames.values()
+
+    def copy(self):
+        return self.__class__(self._attrs, self._qnames)
+
+
+def _test():
+    XMLReader()
+    IncrementalParser()
+    Locator()
+
+if __name__ == "__main__":
+    _test()
diff --git a/depot_tools/release/win/python_24/Lib/xmllib.py b/depot_tools/release/win/python_24/Lib/xmllib.py
new file mode 100644
index 0000000..2a189cd
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xmllib.py
@@ -0,0 +1,929 @@
+"""A parser for XML, using the derived class as static DTD."""
+
+# Author: Sjoerd Mullender.
+
+import re
+import string
+
+import warnings
+warnings.warn("The xmllib module is obsolete.  Use xml.sax instead.", DeprecationWarning)
+del warnings
+
+version = '0.3'
+
+class Error(RuntimeError):
+    pass
+
+# Regular expressions used for parsing
+
+_S = '[ \t\r\n]+'                       # white space
+_opS = '[ \t\r\n]*'                     # optional white space
+_Name = '[a-zA-Z_:][-a-zA-Z0-9._:]*'    # valid XML name
+_QStr = "(?:'[^']*'|\"[^\"]*\")"        # quoted XML string
+illegal = re.compile('[^\t\r\n -\176\240-\377]') # illegal chars in content
+interesting = re.compile('[]&<]')
+
+amp = re.compile('&')
+ref = re.compile('&(' + _Name + '|#[0-9]+|#x[0-9a-fA-F]+)[^-a-zA-Z0-9._:]')
+entityref = re.compile('&(?P<name>' + _Name + ')[^-a-zA-Z0-9._:]')
+charref = re.compile('&#(?P<char>[0-9]+[^0-9]|x[0-9a-fA-F]+[^0-9a-fA-F])')
+space = re.compile(_S + '$')
+newline = re.compile('\n')
+
+attrfind = re.compile(
+    _S + '(?P<name>' + _Name + ')'
+    '(' + _opS + '=' + _opS +
+    '(?P<value>'+_QStr+'|[-a-zA-Z0-9.:+*%?!\(\)_#=~]+))?')
+starttagopen = re.compile('<' + _Name)
+starttagend = re.compile(_opS + '(?P<slash>/?)>')
+starttagmatch = re.compile('<(?P<tagname>'+_Name+')'
+                      '(?P<attrs>(?:'+attrfind.pattern+')*)'+
+                      starttagend.pattern)
+endtagopen = re.compile('</')
+endbracket = re.compile(_opS + '>')
+endbracketfind = re.compile('(?:[^>\'"]|'+_QStr+')*>')
+tagfind = re.compile(_Name)
+cdataopen = re.compile(r'<!\[CDATA\[')
+cdataclose = re.compile(r'\]\]>')
+# this matches one of the following:
+# SYSTEM SystemLiteral
+# PUBLIC PubidLiteral SystemLiteral
+_SystemLiteral = '(?P<%s>'+_QStr+')'
+_PublicLiteral = '(?P<%s>"[-\'\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*"|' \
+                        "'[-\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*')"
+_ExternalId = '(?:SYSTEM|' \
+                 'PUBLIC'+_S+_PublicLiteral%'pubid'+ \
+              ')'+_S+_SystemLiteral%'syslit'
+doctype = re.compile('<!DOCTYPE'+_S+'(?P<name>'+_Name+')'
+                     '(?:'+_S+_ExternalId+')?'+_opS)
+xmldecl = re.compile('<\?xml'+_S+
+                     'version'+_opS+'='+_opS+'(?P<version>'+_QStr+')'+
+                     '(?:'+_S+'encoding'+_opS+'='+_opS+
+                        "(?P<encoding>'[A-Za-z][-A-Za-z0-9._]*'|"
+                        '"[A-Za-z][-A-Za-z0-9._]*"))?'
+                     '(?:'+_S+'standalone'+_opS+'='+_opS+
+                        '(?P<standalone>\'(?:yes|no)\'|"(?:yes|no)"))?'+
+                     _opS+'\?>')
+procopen = re.compile(r'<\?(?P<proc>' + _Name + ')' + _opS)
+procclose = re.compile(_opS + r'\?>')
+commentopen = re.compile('<!--')
+commentclose = re.compile('-->')
+doubledash = re.compile('--')
+attrtrans = string.maketrans(' \r\n\t', '    ')
+
+# definitions for XML namespaces
+_NCName = '[a-zA-Z_][-a-zA-Z0-9._]*'    # XML Name, minus the ":"
+ncname = re.compile(_NCName + '$')
+qname = re.compile('(?:(?P<prefix>' + _NCName + '):)?' # optional prefix
+                   '(?P<local>' + _NCName + ')$')
+
+xmlns = re.compile('xmlns(?::(?P<ncname>'+_NCName+'))?$')
+
+# XML parser base class -- find tags and call handler functions.
+# Usage: p = XMLParser(); p.feed(data); ...; p.close().
+# The dtd is defined by deriving a class which defines methods with
+# special names to handle tags: start_foo and end_foo to handle <foo>
+# and </foo>, respectively.  The data between tags is passed to the
+# parser by calling self.handle_data() with some data as argument (the
+# data may be split up in arbitrary chunks).
+
+class XMLParser:
+    attributes = {}                     # default, to be overridden
+    elements = {}                       # default, to be overridden
+
+    # parsing options, settable using keyword args in __init__
+    __accept_unquoted_attributes = 0
+    __accept_missing_endtag_name = 0
+    __map_case = 0
+    __accept_utf8 = 0
+    __translate_attribute_references = 1
+
+    # Interface -- initialize and reset this instance
+    def __init__(self, **kw):
+        self.__fixed = 0
+        if 'accept_unquoted_attributes' in kw:
+            self.__accept_unquoted_attributes = kw['accept_unquoted_attributes']
+        if 'accept_missing_endtag_name' in kw:
+            self.__accept_missing_endtag_name = kw['accept_missing_endtag_name']
+        if 'map_case' in kw:
+            self.__map_case = kw['map_case']
+        if 'accept_utf8' in kw:
+            self.__accept_utf8 = kw['accept_utf8']
+        if 'translate_attribute_references' in kw:
+            self.__translate_attribute_references = kw['translate_attribute_references']
+        self.reset()
+
+    def __fixelements(self):
+        self.__fixed = 1
+        self.elements = {}
+        self.__fixdict(self.__dict__)
+        self.__fixclass(self.__class__)
+
+    def __fixclass(self, kl):
+        self.__fixdict(kl.__dict__)
+        for k in kl.__bases__:
+            self.__fixclass(k)
+
+    def __fixdict(self, dict):
+        for key in dict.keys():
+            if key[:6] == 'start_':
+                tag = key[6:]
+                start, end = self.elements.get(tag, (None, None))
+                if start is None:
+                    self.elements[tag] = getattr(self, key), end
+            elif key[:4] == 'end_':
+                tag = key[4:]
+                start, end = self.elements.get(tag, (None, None))
+                if end is None:
+                    self.elements[tag] = start, getattr(self, key)
+
+    # Interface -- reset this instance.  Loses all unprocessed data
+    def reset(self):
+        self.rawdata = ''
+        self.stack = []
+        self.nomoretags = 0
+        self.literal = 0
+        self.lineno = 1
+        self.__at_start = 1
+        self.__seen_doctype = None
+        self.__seen_starttag = 0
+        self.__use_namespaces = 0
+        self.__namespaces = {'xml':None}   # xml is implicitly declared
+        # backward compatibility hack: if elements not overridden,
+        # fill it in ourselves
+        if self.elements is XMLParser.elements:
+            self.__fixelements()
+
+    # For derived classes only -- enter literal mode (CDATA) till EOF
+    def setnomoretags(self):
+        self.nomoretags = self.literal = 1
+
+    # For derived classes only -- enter literal mode (CDATA)
+    def setliteral(self, *args):
+        self.literal = 1
+
+    # Interface -- feed some data to the parser.  Call this as
+    # often as you want, with as little or as much text as you
+    # want (may include '\n').  (This just saves the text, all the
+    # processing is done by goahead().)
+    def feed(self, data):
+        self.rawdata = self.rawdata + data
+        self.goahead(0)
+
+    # Interface -- handle the remaining data
+    def close(self):
+        self.goahead(1)
+        if self.__fixed:
+            self.__fixed = 0
+            # remove self.elements so that we don't leak
+            del self.elements
+
+    # Interface -- translate references
+    def translate_references(self, data, all = 1):
+        if not self.__translate_attribute_references:
+            return data
+        i = 0
+        while 1:
+            res = amp.search(data, i)
+            if res is None:
+                return data
+            s = res.start(0)
+            res = ref.match(data, s)
+            if res is None:
+                self.syntax_error("bogus `&'")
+                i = s+1
+                continue
+            i = res.end(0)
+            str = res.group(1)
+            rescan = 0
+            if str[0] == '#':
+                if str[1] == 'x':
+                    str = chr(int(str[2:], 16))
+                else:
+                    str = chr(int(str[1:]))
+                if data[i - 1] != ';':
+                    self.syntax_error("`;' missing after char reference")
+                    i = i-1
+            elif all:
+                if str in self.entitydefs:
+                    str = self.entitydefs[str]
+                    rescan = 1
+                elif data[i - 1] != ';':
+                    self.syntax_error("bogus `&'")
+                    i = s + 1 # just past the &
+                    continue
+                else:
+                    self.syntax_error("reference to unknown entity `&%s;'" % str)
+                    str = '&' + str + ';'
+            elif data[i - 1] != ';':
+                self.syntax_error("bogus `&'")
+                i = s + 1 # just past the &
+                continue
+
+            # when we get here, str contains the translated text and i points
+            # to the end of the string that is to be replaced
+            data = data[:s] + str + data[i:]
+            if rescan:
+                i = s
+            else:
+                i = s + len(str)
+
+    # Interface - return a dictionary of all namespaces currently valid
+    def getnamespace(self):
+        nsdict = {}
+        for t, d, nst in self.stack:
+            nsdict.update(d)
+        return nsdict
+
+    # Internal -- handle data as far as reasonable.  May leave state
+    # and data to be processed by a subsequent call.  If 'end' is
+    # true, force handling all data as if followed by EOF marker.
+    def goahead(self, end):
+        rawdata = self.rawdata
+        i = 0
+        n = len(rawdata)
+        while i < n:
+            if i > 0:
+                self.__at_start = 0
+            if self.nomoretags:
+                data = rawdata[i:n]
+                self.handle_data(data)
+                self.lineno = self.lineno + data.count('\n')
+                i = n
+                break
+            res = interesting.search(rawdata, i)
+            if res:
+                j = res.start(0)
+            else:
+                j = n
+            if i < j:
+                data = rawdata[i:j]
+                if self.__at_start and space.match(data) is None:
+                    self.syntax_error('illegal data at start of file')
+                self.__at_start = 0
+                if not self.stack and space.match(data) is None:
+                    self.syntax_error('data not in content')
+                if not self.__accept_utf8 and illegal.search(data):
+                    self.syntax_error('illegal character in content')
+                self.handle_data(data)
+                self.lineno = self.lineno + data.count('\n')
+            i = j
+            if i == n: break
+            if rawdata[i] == '<':
+                if starttagopen.match(rawdata, i):
+                    if self.literal:
+                        data = rawdata[i]
+                        self.handle_data(data)
+                        self.lineno = self.lineno + data.count('\n')
+                        i = i+1
+                        continue
+                    k = self.parse_starttag(i)
+                    if k < 0: break
+                    self.__seen_starttag = 1
+                    self.lineno = self.lineno + rawdata[i:k].count('\n')
+                    i = k
+                    continue
+                if endtagopen.match(rawdata, i):
+                    k = self.parse_endtag(i)
+                    if k < 0: break
+                    self.lineno = self.lineno + rawdata[i:k].count('\n')
+                    i =  k
+                    continue
+                if commentopen.match(rawdata, i):
+                    if self.literal:
+                        data = rawdata[i]
+                        self.handle_data(data)
+                        self.lineno = self.lineno + data.count('\n')
+                        i = i+1
+                        continue
+                    k = self.parse_comment(i)
+                    if k < 0: break
+                    self.lineno = self.lineno + rawdata[i:k].count('\n')
+                    i = k
+                    continue
+                if cdataopen.match(rawdata, i):
+                    k = self.parse_cdata(i)
+                    if k < 0: break
+                    self.lineno = self.lineno + rawdata[i:k].count('\n')
+                    i = k
+                    continue
+                res = xmldecl.match(rawdata, i)
+                if res:
+                    if not self.__at_start:
+                        self.syntax_error("<?xml?> declaration not at start of document")
+                    version, encoding, standalone = res.group('version',
+                                                              'encoding',
+                                                              'standalone')
+                    if version[1:-1] != '1.0':
+                        raise Error('only XML version 1.0 supported')
+                    if encoding: encoding = encoding[1:-1]
+                    if standalone: standalone = standalone[1:-1]
+                    self.handle_xml(encoding, standalone)
+                    i = res.end(0)
+                    continue
+                res = procopen.match(rawdata, i)
+                if res:
+                    k = self.parse_proc(i)
+                    if k < 0: break
+                    self.lineno = self.lineno + rawdata[i:k].count('\n')
+                    i = k
+                    continue
+                res = doctype.match(rawdata, i)
+                if res:
+                    if self.literal:
+                        data = rawdata[i]
+                        self.handle_data(data)
+                        self.lineno = self.lineno + data.count('\n')
+                        i = i+1
+                        continue
+                    if self.__seen_doctype:
+                        self.syntax_error('multiple DOCTYPE elements')
+                    if self.__seen_starttag:
+                        self.syntax_error('DOCTYPE not at beginning of document')
+                    k = self.parse_doctype(res)
+                    if k < 0: break
+                    self.__seen_doctype = res.group('name')
+                    if self.__map_case:
+                        self.__seen_doctype = self.__seen_doctype.lower()
+                    self.lineno = self.lineno + rawdata[i:k].count('\n')
+                    i = k
+                    continue
+            elif rawdata[i] == '&':
+                if self.literal:
+                    data = rawdata[i]
+                    self.handle_data(data)
+                    i = i+1
+                    continue
+                res = charref.match(rawdata, i)
+                if res is not None:
+                    i = res.end(0)
+                    if rawdata[i-1] != ';':
+                        self.syntax_error("`;' missing in charref")
+                        i = i-1
+                    if not self.stack:
+                        self.syntax_error('data not in content')
+                    self.handle_charref(res.group('char')[:-1])
+                    self.lineno = self.lineno + res.group(0).count('\n')
+                    continue
+                res = entityref.match(rawdata, i)
+                if res is not None:
+                    i = res.end(0)
+                    if rawdata[i-1] != ';':
+                        self.syntax_error("`;' missing in entityref")
+                        i = i-1
+                    name = res.group('name')
+                    if self.__map_case:
+                        name = name.lower()
+                    if name in self.entitydefs:
+                        self.rawdata = rawdata = rawdata[:res.start(0)] + self.entitydefs[name] + rawdata[i:]
+                        n = len(rawdata)
+                        i = res.start(0)
+                    else:
+                        self.unknown_entityref(name)
+                    self.lineno = self.lineno + res.group(0).count('\n')
+                    continue
+            elif rawdata[i] == ']':
+                if self.literal:
+                    data = rawdata[i]
+                    self.handle_data(data)
+                    i = i+1
+                    continue
+                if n-i < 3:
+                    break
+                if cdataclose.match(rawdata, i):
+                    self.syntax_error("bogus `]]>'")
+                self.handle_data(rawdata[i])
+                i = i+1
+                continue
+            else:
+                raise Error('neither < nor & ??')
+            # We get here only if incomplete matches but
+            # nothing else
+            break
+        # end while
+        if i > 0:
+            self.__at_start = 0
+        if end and i < n:
+            data = rawdata[i]
+            self.syntax_error("bogus `%s'" % data)
+            if not self.__accept_utf8 and illegal.search(data):
+                self.syntax_error('illegal character in content')
+            self.handle_data(data)
+            self.lineno = self.lineno + data.count('\n')
+            self.rawdata = rawdata[i+1:]
+            return self.goahead(end)
+        self.rawdata = rawdata[i:]
+        if end:
+            if not self.__seen_starttag:
+                self.syntax_error('no elements in file')
+            if self.stack:
+                self.syntax_error('missing end tags')
+                while self.stack:
+                    self.finish_endtag(self.stack[-1][0])
+
+    # Internal -- parse comment, return length or -1 if not terminated
+    def parse_comment(self, i):
+        rawdata = self.rawdata
+        if rawdata[i:i+4] != '<!--':
+            raise Error('unexpected call to handle_comment')
+        res = commentclose.search(rawdata, i+4)
+        if res is None:
+            return -1
+        if doubledash.search(rawdata, i+4, res.start(0)):
+            self.syntax_error("`--' inside comment")
+        if rawdata[res.start(0)-1] == '-':
+            self.syntax_error('comment cannot end in three dashes')
+        if not self.__accept_utf8 and \
+           illegal.search(rawdata, i+4, res.start(0)):
+            self.syntax_error('illegal character in comment')
+        self.handle_comment(rawdata[i+4: res.start(0)])
+        return res.end(0)
+
+    # Internal -- handle DOCTYPE tag, return length or -1 if not terminated
+    def parse_doctype(self, res):
+        rawdata = self.rawdata
+        n = len(rawdata)
+        name = res.group('name')
+        if self.__map_case:
+            name = name.lower()
+        pubid, syslit = res.group('pubid', 'syslit')
+        if pubid is not None:
+            pubid = pubid[1:-1]         # remove quotes
+            pubid = ' '.join(pubid.split()) # normalize
+        if syslit is not None: syslit = syslit[1:-1] # remove quotes
+        j = k = res.end(0)
+        if k >= n:
+            return -1
+        if rawdata[k] == '[':
+            level = 0
+            k = k+1
+            dq = sq = 0
+            while k < n:
+                c = rawdata[k]
+                if not sq and c == '"':
+                    dq = not dq
+                elif not dq and c == "'":
+                    sq = not sq
+                elif sq or dq:
+                    pass
+                elif level <= 0 and c == ']':
+                    res = endbracket.match(rawdata, k+1)
+                    if res is None:
+                        return -1
+                    self.handle_doctype(name, pubid, syslit, rawdata[j+1:k])
+                    return res.end(0)
+                elif c == '<':
+                    level = level + 1
+                elif c == '>':
+                    level = level - 1
+                    if level < 0:
+                        self.syntax_error("bogus `>' in DOCTYPE")
+                k = k+1
+        res = endbracketfind.match(rawdata, k)
+        if res is None:
+            return -1
+        if endbracket.match(rawdata, k) is None:
+            self.syntax_error('garbage in DOCTYPE')
+        self.handle_doctype(name, pubid, syslit, None)
+        return res.end(0)
+
+    # Internal -- handle CDATA tag, return length or -1 if not terminated
+    def parse_cdata(self, i):
+        rawdata = self.rawdata
+        if rawdata[i:i+9] != '<![CDATA[':
+            raise Error('unexpected call to parse_cdata')
+        res = cdataclose.search(rawdata, i+9)
+        if res is None:
+            return -1
+        if not self.__accept_utf8 and \
+           illegal.search(rawdata, i+9, res.start(0)):
+            self.syntax_error('illegal character in CDATA')
+        if not self.stack:
+            self.syntax_error('CDATA not in content')
+        self.handle_cdata(rawdata[i+9:res.start(0)])
+        return res.end(0)
+
+    __xml_namespace_attributes = {'ns':None, 'src':None, 'prefix':None}
+    # Internal -- handle a processing instruction tag
+    def parse_proc(self, i):
+        rawdata = self.rawdata
+        end = procclose.search(rawdata, i)
+        if end is None:
+            return -1
+        j = end.start(0)
+        if not self.__accept_utf8 and illegal.search(rawdata, i+2, j):
+            self.syntax_error('illegal character in processing instruction')
+        res = tagfind.match(rawdata, i+2)
+        if res is None:
+            raise Error('unexpected call to parse_proc')
+        k = res.end(0)
+        name = res.group(0)
+        if self.__map_case:
+            name = name.lower()
+        if name == 'xml:namespace':
+            self.syntax_error('old-fashioned namespace declaration')
+            self.__use_namespaces = -1
+            # namespace declaration
+            # this must come after the <?xml?> declaration (if any)
+            # and before the <!DOCTYPE> (if any).
+            if self.__seen_doctype or self.__seen_starttag:
+                self.syntax_error('xml:namespace declaration too late in document')
+            attrdict, namespace, k = self.parse_attributes(name, k, j)
+            if namespace:
+                self.syntax_error('namespace declaration inside namespace declaration')
+            for attrname in attrdict.keys():
+                if not attrname in self.__xml_namespace_attributes:
+                    self.syntax_error("unknown attribute `%s' in xml:namespace tag" % attrname)
+            if not 'ns' in attrdict or not 'prefix' in attrdict:
+                self.syntax_error('xml:namespace without required attributes')
+            prefix = attrdict.get('prefix')
+            if ncname.match(prefix) is None:
+                self.syntax_error('xml:namespace illegal prefix value')
+                return end.end(0)
+            if prefix in self.__namespaces:
+                self.syntax_error('xml:namespace prefix not unique')
+            self.__namespaces[prefix] = attrdict['ns']
+        else:
+            if name.lower() == 'xml':
+                self.syntax_error('illegal processing instruction target name')
+            self.handle_proc(name, rawdata[k:j])
+        return end.end(0)
+
+    # Internal -- parse attributes between i and j
+    def parse_attributes(self, tag, i, j):
+        rawdata = self.rawdata
+        attrdict = {}
+        namespace = {}
+        while i < j:
+            res = attrfind.match(rawdata, i)
+            if res is None:
+                break
+            attrname, attrvalue = res.group('name', 'value')
+            if self.__map_case:
+                attrname = attrname.lower()
+            i = res.end(0)
+            if attrvalue is None:
+                self.syntax_error("no value specified for attribute `%s'" % attrname)
+                attrvalue = attrname
+            elif attrvalue[:1] == "'" == attrvalue[-1:] or \
+                 attrvalue[:1] == '"' == attrvalue[-1:]:
+                attrvalue = attrvalue[1:-1]
+            elif not self.__accept_unquoted_attributes:
+                self.syntax_error("attribute `%s' value not quoted" % attrname)
+            res = xmlns.match(attrname)
+            if res is not None:
+                # namespace declaration
+                ncname = res.group('ncname')
+                namespace[ncname or ''] = attrvalue or None
+                if not self.__use_namespaces:
+                    self.__use_namespaces = len(self.stack)+1
+                continue
+            if '<' in attrvalue:
+                self.syntax_error("`<' illegal in attribute value")
+            if attrname in attrdict:
+                self.syntax_error("attribute `%s' specified twice" % attrname)
+            attrvalue = attrvalue.translate(attrtrans)
+            attrdict[attrname] = self.translate_references(attrvalue)
+        return attrdict, namespace, i
+
+    # Internal -- handle starttag, return length or -1 if not terminated
+    def parse_starttag(self, i):
+        rawdata = self.rawdata
+        # i points to start of tag
+        end = endbracketfind.match(rawdata, i+1)
+        if end is None:
+            return -1
+        tag = starttagmatch.match(rawdata, i)
+        if tag is None or tag.end(0) != end.end(0):
+            self.syntax_error('garbage in starttag')
+            return end.end(0)
+        nstag = tagname = tag.group('tagname')
+        if self.__map_case:
+            nstag = tagname = nstag.lower()
+        if not self.__seen_starttag and self.__seen_doctype and \
+           tagname != self.__seen_doctype:
+            self.syntax_error('starttag does not match DOCTYPE')
+        if self.__seen_starttag and not self.stack:
+            self.syntax_error('multiple elements on top level')
+        k, j = tag.span('attrs')
+        attrdict, nsdict, k = self.parse_attributes(tagname, k, j)
+        self.stack.append((tagname, nsdict, nstag))
+        if self.__use_namespaces:
+            res = qname.match(tagname)
+        else:
+            res = None
+        if res is not None:
+            prefix, nstag = res.group('prefix', 'local')
+            if prefix is None:
+                prefix = ''
+            ns = None
+            for t, d, nst in self.stack:
+                if prefix in d:
+                    ns = d[prefix]
+            if ns is None and prefix != '':
+                ns = self.__namespaces.get(prefix)
+            if ns is not None:
+                nstag = ns + ' ' + nstag
+            elif prefix != '':
+                nstag = prefix + ':' + nstag # undo split
+            self.stack[-1] = tagname, nsdict, nstag
+        # translate namespace of attributes
+        attrnamemap = {} # map from new name to old name (used for error reporting)
+        for key in attrdict.keys():
+            attrnamemap[key] = key
+        if self.__use_namespaces:
+            nattrdict = {}
+            for key, val in attrdict.items():
+                okey = key
+                res = qname.match(key)
+                if res is not None:
+                    aprefix, key = res.group('prefix', 'local')
+                    if self.__map_case:
+                        key = key.lower()
+                    if aprefix is not None:
+                        ans = None
+                        for t, d, nst in self.stack:
+                            if aprefix in d:
+                                ans = d[aprefix]
+                        if ans is None:
+                            ans = self.__namespaces.get(aprefix)
+                        if ans is not None:
+                            key = ans + ' ' + key
+                        else:
+                            key = aprefix + ':' + key
+                nattrdict[key] = val
+                attrnamemap[key] = okey
+            attrdict = nattrdict
+        attributes = self.attributes.get(nstag)
+        if attributes is not None:
+            for key in attrdict.keys():
+                if not key in attributes:
+                    self.syntax_error("unknown attribute `%s' in tag `%s'" % (attrnamemap[key], tagname))
+            for key, val in attributes.items():
+                if val is not None and not key in attrdict:
+                    attrdict[key] = val
+        method = self.elements.get(nstag, (None, None))[0]
+        self.finish_starttag(nstag, attrdict, method)
+        if tag.group('slash') == '/':
+            self.finish_endtag(tagname)
+        return tag.end(0)
+
+    # Internal -- parse endtag
+    def parse_endtag(self, i):
+        rawdata = self.rawdata
+        end = endbracketfind.match(rawdata, i+1)
+        if end is None:
+            return -1
+        res = tagfind.match(rawdata, i+2)
+        if res is None:
+            if self.literal:
+                self.handle_data(rawdata[i])
+                return i+1
+            if not self.__accept_missing_endtag_name:
+                self.syntax_error('no name specified in end tag')
+            tag = self.stack[-1][0]
+            k = i+2
+        else:
+            tag = res.group(0)
+            if self.__map_case:
+                tag = tag.lower()
+            if self.literal:
+                if not self.stack or tag != self.stack[-1][0]:
+                    self.handle_data(rawdata[i])
+                    return i+1
+            k = res.end(0)
+        if endbracket.match(rawdata, k) is None:
+            self.syntax_error('garbage in end tag')
+        self.finish_endtag(tag)
+        return end.end(0)
+
+    # Internal -- finish processing of start tag
+    def finish_starttag(self, tagname, attrdict, method):
+        if method is not None:
+            self.handle_starttag(tagname, method, attrdict)
+        else:
+            self.unknown_starttag(tagname, attrdict)
+
+    # Internal -- finish processing of end tag
+    def finish_endtag(self, tag):
+        self.literal = 0
+        if not tag:
+            self.syntax_error('name-less end tag')
+            found = len(self.stack) - 1
+            if found < 0:
+                self.unknown_endtag(tag)
+                return
+        else:
+            found = -1
+            for i in range(len(self.stack)):
+                if tag == self.stack[i][0]:
+                    found = i
+            if found == -1:
+                self.syntax_error('unopened end tag')
+                return
+        while len(self.stack) > found:
+            if found < len(self.stack) - 1:
+                self.syntax_error('missing close tag for %s' % self.stack[-1][2])
+            nstag = self.stack[-1][2]
+            method = self.elements.get(nstag, (None, None))[1]
+            if method is not None:
+                self.handle_endtag(nstag, method)
+            else:
+                self.unknown_endtag(nstag)
+            if self.__use_namespaces == len(self.stack):
+                self.__use_namespaces = 0
+            del self.stack[-1]
+
+    # Overridable -- handle xml processing instruction
+    def handle_xml(self, encoding, standalone):
+        pass
+
+    # Overridable -- handle DOCTYPE
+    def handle_doctype(self, tag, pubid, syslit, data):
+        pass
+
+    # Overridable -- handle start tag
+    def handle_starttag(self, tag, method, attrs):
+        method(attrs)
+
+    # Overridable -- handle end tag
+    def handle_endtag(self, tag, method):
+        method()
+
+    # Example -- handle character reference, no need to override
+    def handle_charref(self, name):
+        try:
+            if name[0] == 'x':
+                n = int(name[1:], 16)
+            else:
+                n = int(name)
+        except ValueError:
+            self.unknown_charref(name)
+            return
+        if not 0 <= n <= 255:
+            self.unknown_charref(name)
+            return
+        self.handle_data(chr(n))
+
+    # Definition of entities -- derived classes may override
+    entitydefs = {'lt': '&#60;',        # must use charref
+                  'gt': '&#62;',
+                  'amp': '&#38;',       # must use charref
+                  'quot': '&#34;',
+                  'apos': '&#39;',
+                  }
+
+    # Example -- handle data, should be overridden
+    def handle_data(self, data):
+        pass
+
+    # Example -- handle cdata, could be overridden
+    def handle_cdata(self, data):
+        pass
+
+    # Example -- handle comment, could be overridden
+    def handle_comment(self, data):
+        pass
+
+    # Example -- handle processing instructions, could be overridden
+    def handle_proc(self, name, data):
+        pass
+
+    # Example -- handle relatively harmless syntax errors, could be overridden
+    def syntax_error(self, message):
+        raise Error('Syntax error at line %d: %s' % (self.lineno, message))
+
+    # To be overridden -- handlers for unknown objects
+    def unknown_starttag(self, tag, attrs): pass
+    def unknown_endtag(self, tag): pass
+    def unknown_charref(self, ref): pass
+    def unknown_entityref(self, name):
+        self.syntax_error("reference to unknown entity `&%s;'" % name)
+
+
+class TestXMLParser(XMLParser):
+
+    def __init__(self, **kw):
+        self.testdata = ""
+        XMLParser.__init__(self, **kw)
+
+    def handle_xml(self, encoding, standalone):
+        self.flush()
+        print 'xml: encoding =',encoding,'standalone =',standalone
+
+    def handle_doctype(self, tag, pubid, syslit, data):
+        self.flush()
+        print 'DOCTYPE:',tag, repr(data)
+
+    def handle_data(self, data):
+        self.testdata = self.testdata + data
+        if len(repr(self.testdata)) >= 70:
+            self.flush()
+
+    def flush(self):
+        data = self.testdata
+        if data:
+            self.testdata = ""
+            print 'data:', repr(data)
+
+    def handle_cdata(self, data):
+        self.flush()
+        print 'cdata:', repr(data)
+
+    def handle_proc(self, name, data):
+        self.flush()
+        print 'processing:',name,repr(data)
+
+    def handle_comment(self, data):
+        self.flush()
+        r = repr(data)
+        if len(r) > 68:
+            r = r[:32] + '...' + r[-32:]
+        print 'comment:', r
+
+    def syntax_error(self, message):
+        print 'error at line %d:' % self.lineno, message
+
+    def unknown_starttag(self, tag, attrs):
+        self.flush()
+        if not attrs:
+            print 'start tag: <' + tag + '>'
+        else:
+            print 'start tag: <' + tag,
+            for name, value in attrs.items():
+                print name + '=' + '"' + value + '"',
+            print '>'
+
+    def unknown_endtag(self, tag):
+        self.flush()
+        print 'end tag: </' + tag + '>'
+
+    def unknown_entityref(self, ref):
+        self.flush()
+        print '*** unknown entity ref: &' + ref + ';'
+
+    def unknown_charref(self, ref):
+        self.flush()
+        print '*** unknown char ref: &#' + ref + ';'
+
+    def close(self):
+        XMLParser.close(self)
+        self.flush()
+
+def test(args = None):
+    import sys, getopt
+    from time import time
+
+    if not args:
+        args = sys.argv[1:]
+
+    opts, args = getopt.getopt(args, 'st')
+    klass = TestXMLParser
+    do_time = 0
+    for o, a in opts:
+        if o == '-s':
+            klass = XMLParser
+        elif o == '-t':
+            do_time = 1
+
+    if args:
+        file = args[0]
+    else:
+        file = 'test.xml'
+
+    if file == '-':
+        f = sys.stdin
+    else:
+        try:
+            f = open(file, 'r')
+        except IOError, msg:
+            print file, ":", msg
+            sys.exit(1)
+
+    data = f.read()
+    if f is not sys.stdin:
+        f.close()
+
+    x = klass()
+    t0 = time()
+    try:
+        if do_time:
+            x.feed(data)
+            x.close()
+        else:
+            for c in data:
+                x.feed(c)
+            x.close()
+    except Error, msg:
+        t1 = time()
+        print msg
+        if do_time:
+            print 'total time: %g' % (t1-t0)
+        sys.exit(1)
+    t1 = time()
+    if do_time:
+        print 'total time: %g' % (t1-t0)
+
+
+if __name__ == '__main__':
+    test()
diff --git a/depot_tools/release/win/python_24/Lib/xmlrpclib.py b/depot_tools/release/win/python_24/Lib/xmlrpclib.py
new file mode 100644
index 0000000..8ff467b7
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/xmlrpclib.py
@@ -0,0 +1,1434 @@
+#
+# XML-RPC CLIENT LIBRARY
+# $Id: xmlrpclib.py,v 1.36.2.1 2005/02/11 17:59:58 fdrake Exp $
+#
+# an XML-RPC client interface for Python.
+#
+# the marshalling and response parser code can also be used to
+# implement XML-RPC servers.
+#
+# Notes:
+# this version is designed to work with Python 2.1 or newer.
+#
+# History:
+# 1999-01-14 fl  Created
+# 1999-01-15 fl  Changed dateTime to use localtime
+# 1999-01-16 fl  Added Binary/base64 element, default to RPC2 service
+# 1999-01-19 fl  Fixed array data element (from Skip Montanaro)
+# 1999-01-21 fl  Fixed dateTime constructor, etc.
+# 1999-02-02 fl  Added fault handling, handle empty sequences, etc.
+# 1999-02-10 fl  Fixed problem with empty responses (from Skip Montanaro)
+# 1999-06-20 fl  Speed improvements, pluggable parsers/transports (0.9.8)
+# 2000-11-28 fl  Changed boolean to check the truth value of its argument
+# 2001-02-24 fl  Added encoding/Unicode/SafeTransport patches
+# 2001-02-26 fl  Added compare support to wrappers (0.9.9/1.0b1)
+# 2001-03-28 fl  Make sure response tuple is a singleton
+# 2001-03-29 fl  Don't require empty params element (from Nicholas Riley)
+# 2001-06-10 fl  Folded in _xmlrpclib accelerator support (1.0b2)
+# 2001-08-20 fl  Base xmlrpclib.Error on built-in Exception (from Paul Prescod)
+# 2001-09-03 fl  Allow Transport subclass to override getparser
+# 2001-09-10 fl  Lazy import of urllib, cgi, xmllib (20x import speedup)
+# 2001-10-01 fl  Remove containers from memo cache when done with them
+# 2001-10-01 fl  Use faster escape method (80% dumps speedup)
+# 2001-10-02 fl  More dumps microtuning
+# 2001-10-04 fl  Make sure import expat gets a parser (from Guido van Rossum)
+# 2001-10-10 sm  Allow long ints to be passed as ints if they don't overflow
+# 2001-10-17 sm  Test for int and long overflow (allows use on 64-bit systems)
+# 2001-11-12 fl  Use repr() to marshal doubles (from Paul Felix)
+# 2002-03-17 fl  Avoid buffered read when possible (from James Rucker)
+# 2002-04-07 fl  Added pythondoc comments
+# 2002-04-16 fl  Added __str__ methods to datetime/binary wrappers
+# 2002-05-15 fl  Added error constants (from Andrew Kuchling)
+# 2002-06-27 fl  Merged with Python CVS version
+# 2002-10-22 fl  Added basic authentication (based on code from Phillip Eby)
+# 2003-01-22 sm  Add support for the bool type
+# 2003-02-27 gvr Remove apply calls
+# 2003-04-24 sm  Use cStringIO if available
+# 2003-04-25 ak  Add support for nil
+# 2003-06-15 gn  Add support for time.struct_time
+# 2003-07-12 gp  Correct marshalling of Faults
+# 2003-10-31 mvl Add multicall support
+# 2004-08-20 mvl Bump minimum supported Python version to 2.1
+#
+# Copyright (c) 1999-2002 by Secret Labs AB.
+# Copyright (c) 1999-2002 by Fredrik Lundh.
+#
+# info@pythonware.com
+# http://www.pythonware.com
+#
+# --------------------------------------------------------------------
+# The XML-RPC client interface is
+#
+# Copyright (c) 1999-2002 by Secret Labs AB
+# Copyright (c) 1999-2002 by Fredrik Lundh
+#
+# By obtaining, using, and/or copying this software and/or its
+# associated documentation, you agree that you have read, understood,
+# and will comply with the following terms and conditions:
+#
+# Permission to use, copy, modify, and distribute this software and
+# its associated documentation for any purpose and without fee is
+# hereby granted, provided that the above copyright notice appears in
+# all copies, and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of
+# Secret Labs AB or the author not be used in advertising or publicity
+# pertaining to distribution of the software without specific, written
+# prior permission.
+#
+# SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
+# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
+# ABILITY AND FITNESS.  IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR
+# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
+# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
+# OF THIS SOFTWARE.
+# --------------------------------------------------------------------
+
+#
+# things to look into some day:
+
+# TODO: sort out True/False/boolean issues for Python 2.3
+
+"""
+An XML-RPC client interface for Python.
+
+The marshalling and response parser code can also be used to
+implement XML-RPC servers.
+
+Exported exceptions:
+
+  Error          Base class for client errors
+  ProtocolError  Indicates an HTTP protocol error
+  ResponseError  Indicates a broken response package
+  Fault          Indicates an XML-RPC fault package
+
+Exported classes:
+
+  ServerProxy    Represents a logical connection to an XML-RPC server
+
+  MultiCall      Executor of boxcared xmlrpc requests
+  Boolean        boolean wrapper to generate a "boolean" XML-RPC value
+  DateTime       dateTime wrapper for an ISO 8601 string or time tuple or
+                 localtime integer value to generate a "dateTime.iso8601"
+                 XML-RPC value
+  Binary         binary data wrapper
+
+  SlowParser     Slow but safe standard parser (based on xmllib)
+  Marshaller     Generate an XML-RPC params chunk from a Python data structure
+  Unmarshaller   Unmarshal an XML-RPC response from incoming XML event message
+  Transport      Handles an HTTP transaction to an XML-RPC server
+  SafeTransport  Handles an HTTPS transaction to an XML-RPC server
+
+Exported constants:
+
+  True
+  False
+
+Exported functions:
+
+  boolean        Convert any Python value to an XML-RPC boolean
+  getparser      Create instance of the fastest available parser & attach
+                 to an unmarshalling object
+  dumps          Convert an argument tuple or a Fault instance to an XML-RPC
+                 request (or response, if the methodresponse option is used).
+  loads          Convert an XML-RPC packet to unmarshalled data plus a method
+                 name (None if not present).
+"""
+
+import re, string, time, operator
+
+from types import *
+
+# --------------------------------------------------------------------
+# Internal stuff
+
+try:
+    unicode
+except NameError:
+    unicode = None # unicode support not available
+
+try:
+    _bool_is_builtin = False.__class__.__name__ == "bool"
+except NameError:
+    _bool_is_builtin = 0
+
+def _decode(data, encoding, is8bit=re.compile("[\x80-\xff]").search):
+    # decode non-ascii string (if possible)
+    if unicode and encoding and is8bit(data):
+        data = unicode(data, encoding)
+    return data
+
+def escape(s, replace=string.replace):
+    s = replace(s, "&", "&amp;")
+    s = replace(s, "<", "&lt;")
+    return replace(s, ">", "&gt;",)
+
+if unicode:
+    def _stringify(string):
+        # convert to 7-bit ascii if possible
+        try:
+            return string.encode("ascii")
+        except UnicodeError:
+            return string
+else:
+    def _stringify(string):
+        return string
+
+__version__ = "1.0.1"
+
+# xmlrpc integer limits
+MAXINT =  2L**31-1
+MININT = -2L**31
+
+# --------------------------------------------------------------------
+# Error constants (from Dan Libby's specification at
+# http://xmlrpc-epi.sourceforge.net/specs/rfc.fault_codes.php)
+
+# Ranges of errors
+PARSE_ERROR       = -32700
+SERVER_ERROR      = -32600
+APPLICATION_ERROR = -32500
+SYSTEM_ERROR      = -32400
+TRANSPORT_ERROR   = -32300
+
+# Specific errors
+NOT_WELLFORMED_ERROR  = -32700
+UNSUPPORTED_ENCODING  = -32701
+INVALID_ENCODING_CHAR = -32702
+INVALID_XMLRPC        = -32600
+METHOD_NOT_FOUND      = -32601
+INVALID_METHOD_PARAMS = -32602
+INTERNAL_ERROR        = -32603
+
+# --------------------------------------------------------------------
+# Exceptions
+
+##
+# Base class for all kinds of client-side errors.
+
+class Error(Exception):
+    """Base class for client errors."""
+    def __str__(self):
+        return repr(self)
+
+##
+# Indicates an HTTP-level protocol error.  This is raised by the HTTP
+# transport layer, if the server returns an error code other than 200
+# (OK).
+#
+# @param url The target URL.
+# @param errcode The HTTP error code.
+# @param errmsg The HTTP error message.
+# @param headers The HTTP header dictionary.
+
+class ProtocolError(Error):
+    """Indicates an HTTP protocol error."""
+    def __init__(self, url, errcode, errmsg, headers):
+        Error.__init__(self)
+        self.url = url
+        self.errcode = errcode
+        self.errmsg = errmsg
+        self.headers = headers
+    def __repr__(self):
+        return (
+            "<ProtocolError for %s: %s %s>" %
+            (self.url, self.errcode, self.errmsg)
+            )
+
+##
+# Indicates a broken XML-RPC response package.  This exception is
+# raised by the unmarshalling layer, if the XML-RPC response is
+# malformed.
+
+class ResponseError(Error):
+    """Indicates a broken response package."""
+    pass
+
+##
+# Indicates an XML-RPC fault response package.  This exception is
+# raised by the unmarshalling layer, if the XML-RPC response contains
+# a fault string.  This exception can also used as a class, to
+# generate a fault XML-RPC message.
+#
+# @param faultCode The XML-RPC fault code.
+# @param faultString The XML-RPC fault string.
+
+class Fault(Error):
+    """Indicates an XML-RPC fault package."""
+    def __init__(self, faultCode, faultString, **extra):
+        Error.__init__(self)
+        self.faultCode = faultCode
+        self.faultString = faultString
+    def __repr__(self):
+        return (
+            "<Fault %s: %s>" %
+            (self.faultCode, repr(self.faultString))
+            )
+
+# --------------------------------------------------------------------
+# Special values
+
+##
+# Wrapper for XML-RPC boolean values.  Use the xmlrpclib.True and
+# xmlrpclib.False constants, or the xmlrpclib.boolean() function, to
+# generate boolean XML-RPC values.
+#
+# @param value A boolean value.  Any true value is interpreted as True,
+#              all other values are interpreted as False.
+
+if _bool_is_builtin:
+    boolean = Boolean = bool
+    # to avoid breaking code which references xmlrpclib.{True,False}
+    True, False = True, False
+else:
+    class Boolean:
+        """Boolean-value wrapper.
+
+        Use True or False to generate a "boolean" XML-RPC value.
+        """
+
+        def __init__(self, value = 0):
+            self.value = operator.truth(value)
+
+        def encode(self, out):
+            out.write("<value><boolean>%d</boolean></value>\n" % self.value)
+
+        def __cmp__(self, other):
+            if isinstance(other, Boolean):
+                other = other.value
+            return cmp(self.value, other)
+
+        def __repr__(self):
+            if self.value:
+                return "<Boolean True at %x>" % id(self)
+            else:
+                return "<Boolean False at %x>" % id(self)
+
+        def __int__(self):
+            return self.value
+
+        def __nonzero__(self):
+            return self.value
+
+    True, False = Boolean(1), Boolean(0)
+
+    ##
+    # Map true or false value to XML-RPC boolean values.
+    #
+    # @def boolean(value)
+    # @param value A boolean value.  Any true value is mapped to True,
+    #              all other values are mapped to False.
+    # @return xmlrpclib.True or xmlrpclib.False.
+    # @see Boolean
+    # @see True
+    # @see False
+
+    def boolean(value, _truefalse=(False, True)):
+        """Convert any Python value to XML-RPC 'boolean'."""
+        return _truefalse[operator.truth(value)]
+
+##
+# Wrapper for XML-RPC DateTime values.  This converts a time value to
+# the format used by XML-RPC.
+# <p>
+# The value can be given as a string in the format
+# "yyyymmddThh:mm:ss", as a 9-item time tuple (as returned by
+# time.localtime()), or an integer value (as returned by time.time()).
+# The wrapper uses time.localtime() to convert an integer to a time
+# tuple.
+#
+# @param value The time, given as an ISO 8601 string, a time
+#              tuple, or a integer time value.
+
+class DateTime:
+    """DateTime wrapper for an ISO 8601 string or time tuple or
+    localtime integer value to generate 'dateTime.iso8601' XML-RPC
+    value.
+    """
+
+    def __init__(self, value=0):
+        if not isinstance(value, StringType):
+            if not isinstance(value, (TupleType, time.struct_time)):
+                if value == 0:
+                    value = time.time()
+                value = time.localtime(value)
+            value = time.strftime("%Y%m%dT%H:%M:%S", value)
+        self.value = value
+
+    def __cmp__(self, other):
+        if isinstance(other, DateTime):
+            other = other.value
+        return cmp(self.value, other)
+
+    ##
+    # Get date/time value.
+    #
+    # @return Date/time value, as an ISO 8601 string.
+
+    def __str__(self):
+        return self.value
+
+    def __repr__(self):
+        return "<DateTime %s at %x>" % (repr(self.value), id(self))
+
+    def decode(self, data):
+        self.value = string.strip(data)
+
+    def encode(self, out):
+        out.write("<value><dateTime.iso8601>")
+        out.write(self.value)
+        out.write("</dateTime.iso8601></value>\n")
+
+def _datetime(data):
+    # decode xml element contents into a DateTime structure.
+    value = DateTime()
+    value.decode(data)
+    return value
+
+##
+# Wrapper for binary data.  This can be used to transport any kind
+# of binary data over XML-RPC, using BASE64 encoding.
+#
+# @param data An 8-bit string containing arbitrary data.
+
+import base64
+try:
+    import cStringIO as StringIO
+except ImportError:
+    import StringIO
+
+class Binary:
+    """Wrapper for binary data."""
+
+    def __init__(self, data=None):
+        self.data = data
+
+    ##
+    # Get buffer contents.
+    #
+    # @return Buffer contents, as an 8-bit string.
+
+    def __str__(self):
+        return self.data or ""
+
+    def __cmp__(self, other):
+        if isinstance(other, Binary):
+            other = other.data
+        return cmp(self.data, other)
+
+    def decode(self, data):
+        self.data = base64.decodestring(data)
+
+    def encode(self, out):
+        out.write("<value><base64>\n")
+        base64.encode(StringIO.StringIO(self.data), out)
+        out.write("</base64></value>\n")
+
+def _binary(data):
+    # decode xml element contents into a Binary structure
+    value = Binary()
+    value.decode(data)
+    return value
+
+WRAPPERS = (DateTime, Binary)
+if not _bool_is_builtin:
+    WRAPPERS = WRAPPERS + (Boolean,)
+
+# --------------------------------------------------------------------
+# XML parsers
+
+try:
+    # optional xmlrpclib accelerator
+    import _xmlrpclib
+    FastParser = _xmlrpclib.Parser
+    FastUnmarshaller = _xmlrpclib.Unmarshaller
+except (AttributeError, ImportError):
+    FastParser = FastUnmarshaller = None
+
+try:
+    import _xmlrpclib
+    FastMarshaller = _xmlrpclib.Marshaller
+except (AttributeError, ImportError):
+    FastMarshaller = None
+
+#
+# the SGMLOP parser is about 15x faster than Python's builtin
+# XML parser.  SGMLOP sources can be downloaded from:
+#
+#     http://www.pythonware.com/products/xml/sgmlop.htm
+#
+
+try:
+    import sgmlop
+    if not hasattr(sgmlop, "XMLParser"):
+        raise ImportError
+except ImportError:
+    SgmlopParser = None # sgmlop accelerator not available
+else:
+    class SgmlopParser:
+        def __init__(self, target):
+
+            # setup callbacks
+            self.finish_starttag = target.start
+            self.finish_endtag = target.end
+            self.handle_data = target.data
+            self.handle_xml = target.xml
+
+            # activate parser
+            self.parser = sgmlop.XMLParser()
+            self.parser.register(self)
+            self.feed = self.parser.feed
+            self.entity = {
+                "amp": "&", "gt": ">", "lt": "<",
+                "apos": "'", "quot": '"'
+                }
+
+        def close(self):
+            try:
+                self.parser.close()
+            finally:
+                self.parser = self.feed = None # nuke circular reference
+
+        def handle_proc(self, tag, attr):
+            m = re.search("encoding\s*=\s*['\"]([^\"']+)[\"']", attr)
+            if m:
+                self.handle_xml(m.group(1), 1)
+
+        def handle_entityref(self, entity):
+            # <string> entity
+            try:
+                self.handle_data(self.entity[entity])
+            except KeyError:
+                self.handle_data("&%s;" % entity)
+
+try:
+    from xml.parsers import expat
+    if not hasattr(expat, "ParserCreate"):
+        raise ImportError
+except ImportError:
+    ExpatParser = None # expat not available
+else:
+    class ExpatParser:
+        # fast expat parser for Python 2.0 and later.  this is about
+        # 50% slower than sgmlop, on roundtrip testing
+        def __init__(self, target):
+            self._parser = parser = expat.ParserCreate(None, None)
+            self._target = target
+            parser.StartElementHandler = target.start
+            parser.EndElementHandler = target.end
+            parser.CharacterDataHandler = target.data
+            encoding = None
+            if not parser.returns_unicode:
+                encoding = "utf-8"
+            target.xml(encoding, None)
+
+        def feed(self, data):
+            self._parser.Parse(data, 0)
+
+        def close(self):
+            self._parser.Parse("", 1) # end of data
+            del self._target, self._parser # get rid of circular references
+
+class SlowParser:
+    """Default XML parser (based on xmllib.XMLParser)."""
+    # this is about 10 times slower than sgmlop, on roundtrip
+    # testing.
+    def __init__(self, target):
+        import xmllib # lazy subclassing (!)
+        if xmllib.XMLParser not in SlowParser.__bases__:
+            SlowParser.__bases__ = (xmllib.XMLParser,)
+        self.handle_xml = target.xml
+        self.unknown_starttag = target.start
+        self.handle_data = target.data
+        self.handle_cdata = target.data
+        self.unknown_endtag = target.end
+        try:
+            xmllib.XMLParser.__init__(self, accept_utf8=1)
+        except TypeError:
+            xmllib.XMLParser.__init__(self) # pre-2.0
+
+# --------------------------------------------------------------------
+# XML-RPC marshalling and unmarshalling code
+
+##
+# XML-RPC marshaller.
+#
+# @param encoding Default encoding for 8-bit strings.  The default
+#     value is None (interpreted as UTF-8).
+# @see dumps
+
+class Marshaller:
+    """Generate an XML-RPC params chunk from a Python data structure.
+
+    Create a Marshaller instance for each set of parameters, and use
+    the "dumps" method to convert your data (represented as a tuple)
+    to an XML-RPC params chunk.  To write a fault response, pass a
+    Fault instance instead.  You may prefer to use the "dumps" module
+    function for this purpose.
+    """
+
+    # by the way, if you don't understand what's going on in here,
+    # that's perfectly ok.
+
+    def __init__(self, encoding=None, allow_none=0):
+        self.memo = {}
+        self.data = None
+        self.encoding = encoding
+        self.allow_none = allow_none
+
+    dispatch = {}
+
+    def dumps(self, values):
+        out = []
+        write = out.append
+        dump = self.__dump
+        if isinstance(values, Fault):
+            # fault instance
+            write("<fault>\n")
+            dump({'faultCode': values.faultCode,
+                  'faultString': values.faultString},
+                 write)
+            write("</fault>\n")
+        else:
+            # parameter block
+            # FIXME: the xml-rpc specification allows us to leave out
+            # the entire <params> block if there are no parameters.
+            # however, changing this may break older code (including
+            # old versions of xmlrpclib.py), so this is better left as
+            # is for now.  See @XMLRPC3 for more information. /F
+            write("<params>\n")
+            for v in values:
+                write("<param>\n")
+                dump(v, write)
+                write("</param>\n")
+            write("</params>\n")
+        result = string.join(out, "")
+        return result
+
+    def __dump(self, value, write):
+        try:
+            f = self.dispatch[type(value)]
+        except KeyError:
+            raise TypeError, "cannot marshal %s objects" % type(value)
+        else:
+            f(self, value, write)
+
+    def dump_nil (self, value, write):
+        if not self.allow_none:
+            raise TypeError, "cannot marshal None unless allow_none is enabled"
+        write("<value><nil/></value>")
+    dispatch[NoneType] = dump_nil
+
+    def dump_int(self, value, write):
+        # in case ints are > 32 bits
+        if value > MAXINT or value < MININT:
+            raise OverflowError, "int exceeds XML-RPC limits"
+        write("<value><int>")
+        write(str(value))
+        write("</int></value>\n")
+    dispatch[IntType] = dump_int
+
+    if _bool_is_builtin:
+        def dump_bool(self, value, write):
+            write("<value><boolean>")
+            write(value and "1" or "0")
+            write("</boolean></value>\n")
+        dispatch[bool] = dump_bool
+
+    def dump_long(self, value, write):
+        if value > MAXINT or value < MININT:
+            raise OverflowError, "long int exceeds XML-RPC limits"
+        write("<value><int>")
+        write(str(int(value)))
+        write("</int></value>\n")
+    dispatch[LongType] = dump_long
+
+    def dump_double(self, value, write):
+        write("<value><double>")
+        write(repr(value))
+        write("</double></value>\n")
+    dispatch[FloatType] = dump_double
+
+    def dump_string(self, value, write, escape=escape):
+        write("<value><string>")
+        write(escape(value))
+        write("</string></value>\n")
+    dispatch[StringType] = dump_string
+
+    if unicode:
+        def dump_unicode(self, value, write, escape=escape):
+            value = value.encode(self.encoding)
+            write("<value><string>")
+            write(escape(value))
+            write("</string></value>\n")
+        dispatch[UnicodeType] = dump_unicode
+
+    def dump_array(self, value, write):
+        i = id(value)
+        if self.memo.has_key(i):
+            raise TypeError, "cannot marshal recursive sequences"
+        self.memo[i] = None
+        dump = self.__dump
+        write("<value><array><data>\n")
+        for v in value:
+            dump(v, write)
+        write("</data></array></value>\n")
+        del self.memo[i]
+    dispatch[TupleType] = dump_array
+    dispatch[ListType] = dump_array
+
+    def dump_struct(self, value, write, escape=escape):
+        i = id(value)
+        if self.memo.has_key(i):
+            raise TypeError, "cannot marshal recursive dictionaries"
+        self.memo[i] = None
+        dump = self.__dump
+        write("<value><struct>\n")
+        for k, v in value.items():
+            write("<member>\n")
+            if type(k) is not StringType:
+                if unicode and type(k) is UnicodeType:
+                    k = k.encode(self.encoding)
+                else:
+                    raise TypeError, "dictionary key must be string"
+            write("<name>%s</name>\n" % escape(k))
+            dump(v, write)
+            write("</member>\n")
+        write("</struct></value>\n")
+        del self.memo[i]
+    dispatch[DictType] = dump_struct
+
+    def dump_instance(self, value, write):
+        # check for special wrappers
+        if value.__class__ in WRAPPERS:
+            self.write = write
+            value.encode(self)
+            del self.write
+        else:
+            # store instance attributes as a struct (really?)
+            self.dump_struct(value.__dict__, write)
+    dispatch[InstanceType] = dump_instance
+
+##
+# XML-RPC unmarshaller.
+#
+# @see loads
+
+class Unmarshaller:
+    """Unmarshal an XML-RPC response, based on incoming XML event
+    messages (start, data, end).  Call close() to get the resulting
+    data structure.
+
+    Note that this reader is fairly tolerant, and gladly accepts bogus
+    XML-RPC data without complaining (but not bogus XML).
+    """
+
+    # and again, if you don't understand what's going on in here,
+    # that's perfectly ok.
+
+    def __init__(self):
+        self._type = None
+        self._stack = []
+        self._marks = []
+        self._data = []
+        self._methodname = None
+        self._encoding = "utf-8"
+        self.append = self._stack.append
+
+    def close(self):
+        # return response tuple and target method
+        if self._type is None or self._marks:
+            raise ResponseError()
+        if self._type == "fault":
+            raise Fault(**self._stack[0])
+        return tuple(self._stack)
+
+    def getmethodname(self):
+        return self._methodname
+
+    #
+    # event handlers
+
+    def xml(self, encoding, standalone):
+        self._encoding = encoding
+        # FIXME: assert standalone == 1 ???
+
+    def start(self, tag, attrs):
+        # prepare to handle this element
+        if tag == "array" or tag == "struct":
+            self._marks.append(len(self._stack))
+        self._data = []
+        self._value = (tag == "value")
+
+    def data(self, text):
+        self._data.append(text)
+
+    def end(self, tag, join=string.join):
+        # call the appropriate end tag handler
+        try:
+            f = self.dispatch[tag]
+        except KeyError:
+            pass # unknown tag ?
+        else:
+            return f(self, join(self._data, ""))
+
+    #
+    # accelerator support
+
+    def end_dispatch(self, tag, data):
+        # dispatch data
+        try:
+            f = self.dispatch[tag]
+        except KeyError:
+            pass # unknown tag ?
+        else:
+            return f(self, data)
+
+    #
+    # element decoders
+
+    dispatch = {}
+
+    def end_nil (self, data):
+        self.append(None)
+        self._value = 0
+    dispatch["nil"] = end_nil
+
+    def end_boolean(self, data):
+        if data == "0":
+            self.append(False)
+        elif data == "1":
+            self.append(True)
+        else:
+            raise TypeError, "bad boolean value"
+        self._value = 0
+    dispatch["boolean"] = end_boolean
+
+    def end_int(self, data):
+        self.append(int(data))
+        self._value = 0
+    dispatch["i4"] = end_int
+    dispatch["int"] = end_int
+
+    def end_double(self, data):
+        self.append(float(data))
+        self._value = 0
+    dispatch["double"] = end_double
+
+    def end_string(self, data):
+        if self._encoding:
+            data = _decode(data, self._encoding)
+        self.append(_stringify(data))
+        self._value = 0
+    dispatch["string"] = end_string
+    dispatch["name"] = end_string # struct keys are always strings
+
+    def end_array(self, data):
+        mark = self._marks.pop()
+        # map arrays to Python lists
+        self._stack[mark:] = [self._stack[mark:]]
+        self._value = 0
+    dispatch["array"] = end_array
+
+    def end_struct(self, data):
+        mark = self._marks.pop()
+        # map structs to Python dictionaries
+        dict = {}
+        items = self._stack[mark:]
+        for i in range(0, len(items), 2):
+            dict[_stringify(items[i])] = items[i+1]
+        self._stack[mark:] = [dict]
+        self._value = 0
+    dispatch["struct"] = end_struct
+
+    def end_base64(self, data):
+        value = Binary()
+        value.decode(data)
+        self.append(value)
+        self._value = 0
+    dispatch["base64"] = end_base64
+
+    def end_dateTime(self, data):
+        value = DateTime()
+        value.decode(data)
+        self.append(value)
+    dispatch["dateTime.iso8601"] = end_dateTime
+
+    def end_value(self, data):
+        # if we stumble upon a value element with no internal
+        # elements, treat it as a string element
+        if self._value:
+            self.end_string(data)
+    dispatch["value"] = end_value
+
+    def end_params(self, data):
+        self._type = "params"
+    dispatch["params"] = end_params
+
+    def end_fault(self, data):
+        self._type = "fault"
+    dispatch["fault"] = end_fault
+
+    def end_methodName(self, data):
+        if self._encoding:
+            data = _decode(data, self._encoding)
+        self._methodname = data
+        self._type = "methodName" # no params
+    dispatch["methodName"] = end_methodName
+
+## Multicall support
+#
+
+class _MultiCallMethod:
+    # some lesser magic to store calls made to a MultiCall object
+    # for batch execution
+    def __init__(self, call_list, name):
+        self.__call_list = call_list
+        self.__name = name
+    def __getattr__(self, name):
+        return _MultiCallMethod(self.__call_list, "%s.%s" % (self.__name, name))
+    def __call__(self, *args):
+        self.__call_list.append((self.__name, args))
+
+class MultiCallIterator:
+    """Iterates over the results of a multicall. Exceptions are
+    thrown in response to xmlrpc faults."""
+
+    def __init__(self, results):
+        self.results = results
+
+    def __getitem__(self, i):
+        item = self.results[i]
+        if type(item) == type({}):
+            raise Fault(item['faultCode'], item['faultString'])
+        elif type(item) == type([]):
+            return item[0]
+        else:
+            raise ValueError,\
+                  "unexpected type in multicall result"
+
+class MultiCall:
+    """server -> a object used to boxcar method calls
+
+    server should be a ServerProxy object.
+
+    Methods can be added to the MultiCall using normal
+    method call syntax e.g.:
+
+    multicall = MultiCall(server_proxy)
+    multicall.add(2,3)
+    multicall.get_address("Guido")
+
+    To execute the multicall, call the MultiCall object e.g.:
+
+    add_result, address = multicall()
+    """
+
+    def __init__(self, server):
+        self.__server = server
+        self.__call_list = []
+
+    def __repr__(self):
+        return "<MultiCall at %x>" % id(self)
+
+    __str__ = __repr__
+
+    def __getattr__(self, name):
+        return _MultiCallMethod(self.__call_list, name)
+
+    def __call__(self):
+        marshalled_list = []
+        for name, args in self.__call_list:
+            marshalled_list.append({'methodName' : name, 'params' : args})
+
+        return MultiCallIterator(self.__server.system.multicall(marshalled_list))
+
+# --------------------------------------------------------------------
+# convenience functions
+
+##
+# Create a parser object, and connect it to an unmarshalling instance.
+# This function picks the fastest available XML parser.
+#
+# return A (parser, unmarshaller) tuple.
+
+def getparser():
+    """getparser() -> parser, unmarshaller
+
+    Create an instance of the fastest available parser, and attach it
+    to an unmarshalling object.  Return both objects.
+    """
+    if FastParser and FastUnmarshaller:
+        target = FastUnmarshaller(True, False, _binary, _datetime, Fault)
+        parser = FastParser(target)
+    else:
+        target = Unmarshaller()
+        if FastParser:
+            parser = FastParser(target)
+        elif SgmlopParser:
+            parser = SgmlopParser(target)
+        elif ExpatParser:
+            parser = ExpatParser(target)
+        else:
+            parser = SlowParser(target)
+    return parser, target
+
+##
+# Convert a Python tuple or a Fault instance to an XML-RPC packet.
+#
+# @def dumps(params, **options)
+# @param params A tuple or Fault instance.
+# @keyparam methodname If given, create a methodCall request for
+#     this method name.
+# @keyparam methodresponse If given, create a methodResponse packet.
+#     If used with a tuple, the tuple must be a singleton (that is,
+#     it must contain exactly one element).
+# @keyparam encoding The packet encoding.
+# @return A string containing marshalled data.
+
+def dumps(params, methodname=None, methodresponse=None, encoding=None,
+          allow_none=0):
+    """data [,options] -> marshalled data
+
+    Convert an argument tuple or a Fault instance to an XML-RPC
+    request (or response, if the methodresponse option is used).
+
+    In addition to the data object, the following options can be given
+    as keyword arguments:
+
+        methodname: the method name for a methodCall packet
+
+        methodresponse: true to create a methodResponse packet.
+        If this option is used with a tuple, the tuple must be
+        a singleton (i.e. it can contain only one element).
+
+        encoding: the packet encoding (default is UTF-8)
+
+    All 8-bit strings in the data structure are assumed to use the
+    packet encoding.  Unicode strings are automatically converted,
+    where necessary.
+    """
+
+    assert isinstance(params, TupleType) or isinstance(params, Fault),\
+           "argument must be tuple or Fault instance"
+
+    if isinstance(params, Fault):
+        methodresponse = 1
+    elif methodresponse and isinstance(params, TupleType):
+        assert len(params) == 1, "response tuple must be a singleton"
+
+    if not encoding:
+        encoding = "utf-8"
+
+    if FastMarshaller:
+        m = FastMarshaller(encoding)
+    else:
+        m = Marshaller(encoding, allow_none)
+
+    data = m.dumps(params)
+
+    if encoding != "utf-8":
+        xmlheader = "<?xml version='1.0' encoding='%s'?>\n" % str(encoding)
+    else:
+        xmlheader = "<?xml version='1.0'?>\n" # utf-8 is default
+
+    # standard XML-RPC wrappings
+    if methodname:
+        # a method call
+        if not isinstance(methodname, StringType):
+            methodname = methodname.encode(encoding)
+        data = (
+            xmlheader,
+            "<methodCall>\n"
+            "<methodName>", methodname, "</methodName>\n",
+            data,
+            "</methodCall>\n"
+            )
+    elif methodresponse:
+        # a method response, or a fault structure
+        data = (
+            xmlheader,
+            "<methodResponse>\n",
+            data,
+            "</methodResponse>\n"
+            )
+    else:
+        return data # return as is
+    return string.join(data, "")
+
+##
+# Convert an XML-RPC packet to a Python object.  If the XML-RPC packet
+# represents a fault condition, this function raises a Fault exception.
+#
+# @param data An XML-RPC packet, given as an 8-bit string.
+# @return A tuple containing the unpacked data, and the method name
+#     (None if not present).
+# @see Fault
+
+def loads(data):
+    """data -> unmarshalled data, method name
+
+    Convert an XML-RPC packet to unmarshalled data plus a method
+    name (None if not present).
+
+    If the XML-RPC packet represents a fault condition, this function
+    raises a Fault exception.
+    """
+    p, u = getparser()
+    p.feed(data)
+    p.close()
+    return u.close(), u.getmethodname()
+
+
+# --------------------------------------------------------------------
+# request dispatcher
+
+class _Method:
+    # some magic to bind an XML-RPC method to an RPC server.
+    # supports "nested" methods (e.g. examples.getStateName)
+    def __init__(self, send, name):
+        self.__send = send
+        self.__name = name
+    def __getattr__(self, name):
+        return _Method(self.__send, "%s.%s" % (self.__name, name))
+    def __call__(self, *args):
+        return self.__send(self.__name, args)
+
+##
+# Standard transport class for XML-RPC over HTTP.
+# <p>
+# You can create custom transports by subclassing this method, and
+# overriding selected methods.
+
+class Transport:
+    """Handles an HTTP transaction to an XML-RPC server."""
+
+    # client identifier (may be overridden)
+    user_agent = "xmlrpclib.py/%s (by www.pythonware.com)" % __version__
+
+    ##
+    # Send a complete request, and parse the response.
+    #
+    # @param host Target host.
+    # @param handler Target PRC handler.
+    # @param request_body XML-RPC request body.
+    # @param verbose Debugging flag.
+    # @return Parsed response.
+
+    def request(self, host, handler, request_body, verbose=0):
+        # issue XML-RPC request
+
+        h = self.make_connection(host)
+        if verbose:
+            h.set_debuglevel(1)
+
+        self.send_request(h, handler, request_body)
+        self.send_host(h, host)
+        self.send_user_agent(h)
+        self.send_content(h, request_body)
+
+        errcode, errmsg, headers = h.getreply()
+
+        if errcode != 200:
+            raise ProtocolError(
+                host + handler,
+                errcode, errmsg,
+                headers
+                )
+
+        self.verbose = verbose
+
+        try:
+            sock = h._conn.sock
+        except AttributeError:
+            sock = None
+
+        return self._parse_response(h.getfile(), sock)
+
+    ##
+    # Create parser.
+    #
+    # @return A 2-tuple containing a parser and a unmarshaller.
+
+    def getparser(self):
+        # get parser and unmarshaller
+        return getparser()
+
+    ##
+    # Get authorization info from host parameter
+    # Host may be a string, or a (host, x509-dict) tuple; if a string,
+    # it is checked for a "user:pw@host" format, and a "Basic
+    # Authentication" header is added if appropriate.
+    #
+    # @param host Host descriptor (URL or (URL, x509 info) tuple).
+    # @return A 3-tuple containing (actual host, extra headers,
+    #     x509 info).  The header and x509 fields may be None.
+
+    def get_host_info(self, host):
+
+        x509 = {}
+        if isinstance(host, TupleType):
+            host, x509 = host
+
+        import urllib
+        auth, host = urllib.splituser(host)
+
+        if auth:
+            import base64
+            auth = base64.encodestring(urllib.unquote(auth))
+            auth = string.join(string.split(auth), "") # get rid of whitespace
+            extra_headers = [
+                ("Authorization", "Basic " + auth)
+                ]
+        else:
+            extra_headers = None
+
+        return host, extra_headers, x509
+
+    ##
+    # Connect to server.
+    #
+    # @param host Target host.
+    # @return A connection handle.
+
+    def make_connection(self, host):
+        # create a HTTP connection object from a host descriptor
+        import httplib
+        host, extra_headers, x509 = self.get_host_info(host)
+        return httplib.HTTP(host)
+
+    ##
+    # Send request header.
+    #
+    # @param connection Connection handle.
+    # @param handler Target RPC handler.
+    # @param request_body XML-RPC body.
+
+    def send_request(self, connection, handler, request_body):
+        connection.putrequest("POST", handler)
+
+    ##
+    # Send host name.
+    #
+    # @param connection Connection handle.
+    # @param host Host name.
+
+    def send_host(self, connection, host):
+        host, extra_headers, x509 = self.get_host_info(host)
+        connection.putheader("Host", host)
+        if extra_headers:
+            if isinstance(extra_headers, DictType):
+                extra_headers = extra_headers.items()
+            for key, value in extra_headers:
+                connection.putheader(key, value)
+
+    ##
+    # Send user-agent identifier.
+    #
+    # @param connection Connection handle.
+
+    def send_user_agent(self, connection):
+        connection.putheader("User-Agent", self.user_agent)
+
+    ##
+    # Send request body.
+    #
+    # @param connection Connection handle.
+    # @param request_body XML-RPC request body.
+
+    def send_content(self, connection, request_body):
+        connection.putheader("Content-Type", "text/xml")
+        connection.putheader("Content-Length", str(len(request_body)))
+        connection.endheaders()
+        if request_body:
+            connection.send(request_body)
+
+    ##
+    # Parse response.
+    #
+    # @param file Stream.
+    # @return Response tuple and target method.
+
+    def parse_response(self, file):
+        # compatibility interface
+        return self._parse_response(file, None)
+
+    ##
+    # Parse response (alternate interface).  This is similar to the
+    # parse_response method, but also provides direct access to the
+    # underlying socket object (where available).
+    #
+    # @param file Stream.
+    # @param sock Socket handle (or None, if the socket object
+    #    could not be accessed).
+    # @return Response tuple and target method.
+
+    def _parse_response(self, file, sock):
+        # read response from input file/socket, and parse it
+
+        p, u = self.getparser()
+
+        while 1:
+            if sock:
+                response = sock.recv(1024)
+            else:
+                response = file.read(1024)
+            if not response:
+                break
+            if self.verbose:
+                print "body:", repr(response)
+            p.feed(response)
+
+        file.close()
+        p.close()
+
+        return u.close()
+
+##
+# Standard transport class for XML-RPC over HTTPS.
+
+class SafeTransport(Transport):
+    """Handles an HTTPS transaction to an XML-RPC server."""
+
+    # FIXME: mostly untested
+
+    def make_connection(self, host):
+        # create a HTTPS connection object from a host descriptor
+        # host may be a string, or a (host, x509-dict) tuple
+        import httplib
+        host, extra_headers, x509 = self.get_host_info(host)
+        try:
+            HTTPS = httplib.HTTPS
+        except AttributeError:
+            raise NotImplementedError(
+                "your version of httplib doesn't support HTTPS"
+                )
+        else:
+            return HTTPS(host, None, **(x509 or {}))
+
+##
+# Standard server proxy.  This class establishes a virtual connection
+# to an XML-RPC server.
+# <p>
+# This class is available as ServerProxy and Server.  New code should
+# use ServerProxy, to avoid confusion.
+#
+# @def ServerProxy(uri, **options)
+# @param uri The connection point on the server.
+# @keyparam transport A transport factory, compatible with the
+#    standard transport class.
+# @keyparam encoding The default encoding used for 8-bit strings
+#    (default is UTF-8).
+# @keyparam verbose Use a true value to enable debugging output.
+#    (printed to standard output).
+# @see Transport
+
+class ServerProxy:
+    """uri [,options] -> a logical connection to an XML-RPC server
+
+    uri is the connection point on the server, given as
+    scheme://host/target.
+
+    The standard implementation always supports the "http" scheme.  If
+    SSL socket support is available (Python 2.0), it also supports
+    "https".
+
+    If the target part and the slash preceding it are both omitted,
+    "/RPC2" is assumed.
+
+    The following options can be given as keyword arguments:
+
+        transport: a transport factory
+        encoding: the request encoding (default is UTF-8)
+
+    All 8-bit strings passed to the server proxy are assumed to use
+    the given encoding.
+    """
+
+    def __init__(self, uri, transport=None, encoding=None, verbose=0,
+                 allow_none=0):
+        # establish a "logical" server connection
+
+        # get the url
+        import urllib
+        type, uri = urllib.splittype(uri)
+        if type not in ("http", "https"):
+            raise IOError, "unsupported XML-RPC protocol"
+        self.__host, self.__handler = urllib.splithost(uri)
+        if not self.__handler:
+            self.__handler = "/RPC2"
+
+        if transport is None:
+            if type == "https":
+                transport = SafeTransport()
+            else:
+                transport = Transport()
+        self.__transport = transport
+
+        self.__encoding = encoding
+        self.__verbose = verbose
+        self.__allow_none = allow_none
+
+    def __request(self, methodname, params):
+        # call a method on the remote server
+
+        request = dumps(params, methodname, encoding=self.__encoding,
+                        allow_none=self.__allow_none)
+
+        response = self.__transport.request(
+            self.__host,
+            self.__handler,
+            request,
+            verbose=self.__verbose
+            )
+
+        if len(response) == 1:
+            response = response[0]
+
+        return response
+
+    def __repr__(self):
+        return (
+            "<ServerProxy for %s%s>" %
+            (self.__host, self.__handler)
+            )
+
+    __str__ = __repr__
+
+    def __getattr__(self, name):
+        # magic method dispatcher
+        return _Method(self.__request, name)
+
+    # note: to call a remote object with an non-standard name, use
+    # result getattr(server, "strange-python-name")(args)
+
+# compatibility
+
+Server = ServerProxy
+
+# --------------------------------------------------------------------
+# test code
+
+if __name__ == "__main__":
+
+    # simple test program (from the XML-RPC specification)
+
+    # server = ServerProxy("http://localhost:8000") # local server
+    server = ServerProxy("http://time.xmlrpc.com/RPC2")
+
+    print server
+
+    try:
+        print server.currentTime.getCurrentTime()
+    except Error, v:
+        print "ERROR", v
+
+    multi = MultiCall(server)
+    multi.currentTime.getCurrentTime()
+    multi.currentTime.getCurrentTime()
+    try:
+        for response in multi():
+            print response
+    except Error, v:
+        print "ERROR", v
diff --git a/depot_tools/release/win/python_24/Lib/zipfile.py b/depot_tools/release/win/python_24/Lib/zipfile.py
new file mode 100644
index 0000000..93436cf
--- /dev/null
+++ b/depot_tools/release/win/python_24/Lib/zipfile.py
@@ -0,0 +1,615 @@
+"Read and write ZIP files."
+
+import struct, os, time
+import binascii
+
+try:
+    import zlib # We may need its compression method
+except ImportError:
+    zlib = None
+
+__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
+           "ZipInfo", "ZipFile", "PyZipFile"]
+
+class BadZipfile(Exception):
+    pass
+error = BadZipfile      # The exception raised by this module
+
+# constants for Zip file compression methods
+ZIP_STORED = 0
+ZIP_DEFLATED = 8
+# Other ZIP compression methods not supported
+
+# Here are some struct module formats for reading headers
+structEndArchive = "<4s4H2lH"     # 9 items, end of archive, 22 bytes
+stringEndArchive = "PK\005\006"   # magic number for end of archive record
+structCentralDir = "<4s4B4HlLL5HLl"# 19 items, central directory, 46 bytes
+stringCentralDir = "PK\001\002"   # magic number for central directory
+structFileHeader = "<4s2B4HlLL2H"  # 12 items, file header record, 30 bytes
+stringFileHeader = "PK\003\004"   # magic number for file header
+
+# indexes of entries in the central directory structure
+_CD_SIGNATURE = 0
+_CD_CREATE_VERSION = 1
+_CD_CREATE_SYSTEM = 2
+_CD_EXTRACT_VERSION = 3
+_CD_EXTRACT_SYSTEM = 4                  # is this meaningful?
+_CD_FLAG_BITS = 5
+_CD_COMPRESS_TYPE = 6
+_CD_TIME = 7
+_CD_DATE = 8
+_CD_CRC = 9
+_CD_COMPRESSED_SIZE = 10
+_CD_UNCOMPRESSED_SIZE = 11
+_CD_FILENAME_LENGTH = 12
+_CD_EXTRA_FIELD_LENGTH = 13
+_CD_COMMENT_LENGTH = 14
+_CD_DISK_NUMBER_START = 15
+_CD_INTERNAL_FILE_ATTRIBUTES = 16
+_CD_EXTERNAL_FILE_ATTRIBUTES = 17
+_CD_LOCAL_HEADER_OFFSET = 18
+
+# indexes of entries in the local file header structure
+_FH_SIGNATURE = 0
+_FH_EXTRACT_VERSION = 1
+_FH_EXTRACT_SYSTEM = 2                  # is this meaningful?
+_FH_GENERAL_PURPOSE_FLAG_BITS = 3
+_FH_COMPRESSION_METHOD = 4
+_FH_LAST_MOD_TIME = 5
+_FH_LAST_MOD_DATE = 6
+_FH_CRC = 7
+_FH_COMPRESSED_SIZE = 8
+_FH_UNCOMPRESSED_SIZE = 9
+_FH_FILENAME_LENGTH = 10
+_FH_EXTRA_FIELD_LENGTH = 11
+
+def is_zipfile(filename):
+    """Quickly see if file is a ZIP file by checking the magic number."""
+    try:
+        fpin = open(filename, "rb")
+        endrec = _EndRecData(fpin)
+        fpin.close()
+        if endrec:
+            return True                 # file has correct magic number
+    except IOError:
+        pass
+    return False
+
+def _EndRecData(fpin):
+    """Return data from the "End of Central Directory" record, or None.
+
+    The data is a list of the nine items in the ZIP "End of central dir"
+    record followed by a tenth item, the file seek offset of this record."""
+    fpin.seek(-22, 2)               # Assume no archive comment.
+    filesize = fpin.tell() + 22     # Get file size
+    data = fpin.read()
+    if data[0:4] == stringEndArchive and data[-2:] == "\000\000":
+        endrec = struct.unpack(structEndArchive, data)
+        endrec = list(endrec)
+        endrec.append("")               # Append the archive comment
+        endrec.append(filesize - 22)    # Append the record start offset
+        return endrec
+    # Search the last END_BLOCK bytes of the file for the record signature.
+    # The comment is appended to the ZIP file and has a 16 bit length.
+    # So the comment may be up to 64K long.  We limit the search for the
+    # signature to a few Kbytes at the end of the file for efficiency.
+    # also, the signature must not appear in the comment.
+    END_BLOCK = min(filesize, 1024 * 4)
+    fpin.seek(filesize - END_BLOCK, 0)
+    data = fpin.read()
+    start = data.rfind(stringEndArchive)
+    if start >= 0:     # Correct signature string was found
+        endrec = struct.unpack(structEndArchive, data[start:start+22])
+        endrec = list(endrec)
+        comment = data[start+22:]
+        if endrec[7] == len(comment):     # Comment length checks out
+            # Append the archive comment and start offset
+            endrec.append(comment)
+            endrec.append(filesize - END_BLOCK + start)
+            return endrec
+    return      # Error, return None
+
+
+class ZipInfo:
+    """Class with attributes describing each file in the ZIP archive."""
+
+    def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
+        self.orig_filename = filename   # Original file name in archive
+# Terminate the file name at the first null byte.  Null bytes in file
+# names are used as tricks by viruses in archives.
+        null_byte = filename.find(chr(0))
+        if null_byte >= 0:
+            filename = filename[0:null_byte]
+# This is used to ensure paths in generated ZIP files always use
+# forward slashes as the directory separator, as required by the
+# ZIP format specification.
+        if os.sep != "/":
+            filename = filename.replace(os.sep, "/")
+        self.filename = filename        # Normalized file name
+        self.date_time = date_time      # year, month, day, hour, min, sec
+        # Standard values:
+        self.compress_type = ZIP_STORED # Type of compression for the file
+        self.comment = ""               # Comment for each file
+        self.extra = ""                 # ZIP extra data
+        self.create_system = 0          # System which created ZIP archive
+        self.create_version = 20        # Version which created ZIP archive
+        self.extract_version = 20       # Version needed to extract archive
+        self.reserved = 0               # Must be zero
+        self.flag_bits = 0              # ZIP flag bits
+        self.volume = 0                 # Volume number of file header
+        self.internal_attr = 0          # Internal attributes
+        self.external_attr = 0          # External file attributes
+        # Other attributes are set by class ZipFile:
+        # header_offset         Byte offset to the file header
+        # file_offset           Byte offset to the start of the file data
+        # CRC                   CRC-32 of the uncompressed file
+        # compress_size         Size of the compressed file
+        # file_size             Size of the uncompressed file
+
+    def FileHeader(self):
+        """Return the per-file header as a string."""
+        dt = self.date_time
+        dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
+        dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
+        if self.flag_bits & 0x08:
+            # Set these to zero because we write them after the file data
+            CRC = compress_size = file_size = 0
+        else:
+            CRC = self.CRC
+            compress_size = self.compress_size
+            file_size = self.file_size
+        header = struct.pack(structFileHeader, stringFileHeader,
+                 self.extract_version, self.reserved, self.flag_bits,
+                 self.compress_type, dostime, dosdate, CRC,
+                 compress_size, file_size,
+                 len(self.filename), len(self.extra))
+        return header + self.filename + self.extra
+
+
+class ZipFile:
+    """ Class with methods to open, read, write, close, list zip files.
+
+    z = ZipFile(file, mode="r", compression=ZIP_STORED)
+
+    file: Either the path to the file, or a file-like object.
+          If it is a path, the file will be opened and closed by ZipFile.
+    mode: The mode can be either read "r", write "w" or append "a".
+    compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib).
+    """
+
+    fp = None                   # Set here since __del__ checks it
+
+    def __init__(self, file, mode="r", compression=ZIP_STORED):
+        """Open the ZIP file with mode read "r", write "w" or append "a"."""
+        if compression == ZIP_STORED:
+            pass
+        elif compression == ZIP_DEFLATED:
+            if not zlib:
+                raise RuntimeError,\
+                      "Compression requires the (missing) zlib module"
+        else:
+            raise RuntimeError, "That compression method is not supported"
+        self.debug = 0  # Level of printing: 0 through 3
+        self.NameToInfo = {}    # Find file info given name
+        self.filelist = []      # List of ZipInfo instances for archive
+        self.compression = compression  # Method of compression
+        self.mode = key = mode.replace('b', '')[0]
+
+        # Check if we were passed a file-like object
+        if isinstance(file, basestring):
+            self._filePassed = 0
+            self.filename = file
+            modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
+            self.fp = open(file, modeDict[mode])
+        else:
+            self._filePassed = 1
+            self.fp = file
+            self.filename = getattr(file, 'name', None)
+
+        if key == 'r':
+            self._GetContents()
+        elif key == 'w':
+            pass
+        elif key == 'a':
+            try:                        # See if file is a zip file
+                self._RealGetContents()
+                # seek to start of directory and overwrite
+                self.fp.seek(self.start_dir, 0)
+            except BadZipfile:          # file is not a zip file, just append
+                self.fp.seek(0, 2)
+        else:
+            if not self._filePassed:
+                self.fp.close()
+                self.fp = None
+            raise RuntimeError, 'Mode must be "r", "w" or "a"'
+
+    def _GetContents(self):
+        """Read the directory, making sure we close the file if the format
+        is bad."""
+        try:
+            self._RealGetContents()
+        except BadZipfile:
+            if not self._filePassed:
+                self.fp.close()
+                self.fp = None
+            raise
+
+    def _RealGetContents(self):
+        """Read in the table of contents for the ZIP file."""
+        fp = self.fp
+        endrec = _EndRecData(fp)
+        if not endrec:
+            raise BadZipfile, "File is not a zip file"
+        if self.debug > 1:
+            print endrec
+        size_cd = endrec[5]             # bytes in central directory
+        offset_cd = endrec[6]   # offset of central directory
+        self.comment = endrec[8]        # archive comment
+        # endrec[9] is the offset of the "End of Central Dir" record
+        x = endrec[9] - size_cd
+        # "concat" is zero, unless zip was concatenated to another file
+        concat = x - offset_cd
+        if self.debug > 2:
+            print "given, inferred, offset", offset_cd, x, concat
+        # self.start_dir:  Position of start of central directory
+        self.start_dir = offset_cd + concat
+        fp.seek(self.start_dir, 0)
+        total = 0
+        while total < size_cd:
+            centdir = fp.read(46)
+            total = total + 46
+            if centdir[0:4] != stringCentralDir:
+                raise BadZipfile, "Bad magic number for central directory"
+            centdir = struct.unpack(structCentralDir, centdir)
+            if self.debug > 2:
+                print centdir
+            filename = fp.read(centdir[_CD_FILENAME_LENGTH])
+            # Create ZipInfo instance to store file information
+            x = ZipInfo(filename)
+            x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
+            x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
+            total = (total + centdir[_CD_FILENAME_LENGTH]
+                     + centdir[_CD_EXTRA_FIELD_LENGTH]
+                     + centdir[_CD_COMMENT_LENGTH])
+            x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET] + concat
+            # file_offset must be computed below...
+            (x.create_version, x.create_system, x.extract_version, x.reserved,
+                x.flag_bits, x.compress_type, t, d,
+                x.CRC, x.compress_size, x.file_size) = centdir[1:12]
+            x.volume, x.internal_attr, x.external_attr = centdir[15:18]
+            # Convert date/time code to (year, month, day, hour, min, sec)
+            x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
+                                     t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
+            self.filelist.append(x)
+            self.NameToInfo[x.filename] = x
+            if self.debug > 2:
+                print "total", total
+        for data in self.filelist:
+            fp.seek(data.header_offset, 0)
+            fheader = fp.read(30)
+            if fheader[0:4] != stringFileHeader:
+                raise BadZipfile, "Bad magic number for file header"
+            fheader = struct.unpack(structFileHeader, fheader)
+            # file_offset is computed here, since the extra field for
+            # the central directory and for the local file header
+            # refer to different fields, and they can have different
+            # lengths
+            data.file_offset = (data.header_offset + 30
+                                + fheader[_FH_FILENAME_LENGTH]
+                                + fheader[_FH_EXTRA_FIELD_LENGTH])
+            fname = fp.read(fheader[_FH_FILENAME_LENGTH])
+            if fname != data.orig_filename:
+                raise RuntimeError, \
+                      'File name in directory "%s" and header "%s" differ.' % (
+                          data.orig_filename, fname)
+
+    def namelist(self):
+        """Return a list of file names in the archive."""
+        l = []
+        for data in self.filelist:
+            l.append(data.filename)
+        return l
+
+    def infolist(self):
+        """Return a list of class ZipInfo instances for files in the
+        archive."""
+        return self.filelist
+
+    def printdir(self):
+        """Print a table of contents for the zip file."""
+        print "%-46s %19s %12s" % ("File Name", "Modified    ", "Size")
+        for zinfo in self.filelist:
+            date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time
+            print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
+
+    def testzip(self):
+        """Read all the files and check the CRC."""
+        for zinfo in self.filelist:
+            try:
+                self.read(zinfo.filename)       # Check CRC-32
+            except BadZipfile:
+                return zinfo.filename
+
+    def getinfo(self, name):
+        """Return the instance of ZipInfo given 'name'."""
+        return self.NameToInfo[name]
+
+    def read(self, name):
+        """Return file bytes (as a string) for name."""
+        if self.mode not in ("r", "a"):
+            raise RuntimeError, 'read() requires mode "r" or "a"'
+        if not self.fp:
+            raise RuntimeError, \
+                  "Attempt to read ZIP archive that was already closed"
+        zinfo = self.getinfo(name)
+        filepos = self.fp.tell()
+        self.fp.seek(zinfo.file_offset, 0)
+        bytes = self.fp.read(zinfo.compress_size)
+        self.fp.seek(filepos, 0)
+        if zinfo.compress_type == ZIP_STORED:
+            pass
+        elif zinfo.compress_type == ZIP_DEFLATED:
+            if not zlib:
+                raise RuntimeError, \
+                      "De-compression requires the (missing) zlib module"
+            # zlib compress/decompress code by Jeremy Hylton of CNRI
+            dc = zlib.decompressobj(-15)
+            bytes = dc.decompress(bytes)
+            # need to feed in unused pad byte so that zlib won't choke
+            ex = dc.decompress('Z') + dc.flush()
+            if ex:
+                bytes = bytes + ex
+        else:
+            raise BadZipfile, \
+                  "Unsupported compression method %d for file %s" % \
+            (zinfo.compress_type, name)
+        crc = binascii.crc32(bytes)
+        if crc != zinfo.CRC:
+            raise BadZipfile, "Bad CRC-32 for file %s" % name
+        return bytes
+
+    def _writecheck(self, zinfo):
+        """Check for errors before writing a file to the archive."""
+        if zinfo.filename in self.NameToInfo:
+            if self.debug:      # Warning for duplicate names
+                print "Duplicate name:", zinfo.filename
+        if self.mode not in ("w", "a"):
+            raise RuntimeError, 'write() requires mode "w" or "a"'
+        if not self.fp:
+            raise RuntimeError, \
+                  "Attempt to write ZIP archive that was already closed"
+        if zinfo.compress_type == ZIP_DEFLATED and not zlib:
+            raise RuntimeError, \
+                  "Compression requires the (missing) zlib module"
+        if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED):
+            raise RuntimeError, \
+                  "That compression method is not supported"
+
+    def write(self, filename, arcname=None, compress_type=None):
+        """Put the bytes from filename into the archive under the name
+        arcname."""
+        st = os.stat(filename)
+        mtime = time.localtime(st.st_mtime)
+        date_time = mtime[0:6]
+        # Create ZipInfo instance to store file information
+        if arcname is None:
+            zinfo = ZipInfo(filename, date_time)
+        else:
+            zinfo = ZipInfo(arcname, date_time)
+        zinfo.external_attr = (st[0] & 0xFFFF) << 16L      # Unix attributes
+        if compress_type is None:
+            zinfo.compress_type = self.compression
+        else:
+            zinfo.compress_type = compress_type
+        self._writecheck(zinfo)
+        fp = open(filename, "rb")
+        zinfo.flag_bits = 0x00
+        zinfo.header_offset = self.fp.tell()    # Start of header bytes
+        # Must overwrite CRC and sizes with correct data later
+        zinfo.CRC = CRC = 0
+        zinfo.compress_size = compress_size = 0
+        zinfo.file_size = file_size = 0
+        self.fp.write(zinfo.FileHeader())
+        zinfo.file_offset = self.fp.tell()      # Start of file bytes
+        if zinfo.compress_type == ZIP_DEFLATED:
+            cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
+                 zlib.DEFLATED, -15)
+        else:
+            cmpr = None
+        while 1:
+            buf = fp.read(1024 * 8)
+            if not buf:
+                break
+            file_size = file_size + len(buf)
+            CRC = binascii.crc32(buf, CRC)
+            if cmpr:
+                buf = cmpr.compress(buf)
+                compress_size = compress_size + len(buf)
+            self.fp.write(buf)
+        fp.close()
+        if cmpr:
+            buf = cmpr.flush()
+            compress_size = compress_size + len(buf)
+            self.fp.write(buf)
+            zinfo.compress_size = compress_size
+        else:
+            zinfo.compress_size = file_size
+        zinfo.CRC = CRC
+        zinfo.file_size = file_size
+        # Seek backwards and write CRC and file sizes
+        position = self.fp.tell()       # Preserve current position in file
+        self.fp.seek(zinfo.header_offset + 14, 0)
+        self.fp.write(struct.pack("<lLL", zinfo.CRC, zinfo.compress_size,
+              zinfo.file_size))
+        self.fp.seek(position, 0)
+        self.filelist.append(zinfo)
+        self.NameToInfo[zinfo.filename] = zinfo
+
+    def writestr(self, zinfo_or_arcname, bytes):
+        """Write a file into the archive.  The contents is the string
+        'bytes'.  'zinfo_or_arcname' is either a ZipInfo instance or
+        the name of the file in the archive."""
+        if not isinstance(zinfo_or_arcname, ZipInfo):
+            zinfo = ZipInfo(filename=zinfo_or_arcname,
+                            date_time=time.localtime(time.time()))
+            zinfo.compress_type = self.compression
+        else:
+            zinfo = zinfo_or_arcname
+        self._writecheck(zinfo)
+        zinfo.file_size = len(bytes)            # Uncompressed size
+        zinfo.CRC = binascii.crc32(bytes)       # CRC-32 checksum
+        if zinfo.compress_type == ZIP_DEFLATED:
+            co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
+                 zlib.DEFLATED, -15)
+            bytes = co.compress(bytes) + co.flush()
+            zinfo.compress_size = len(bytes)    # Compressed size
+        else:
+            zinfo.compress_size = zinfo.file_size
+        zinfo.header_offset = self.fp.tell()    # Start of header bytes
+        self.fp.write(zinfo.FileHeader())
+        zinfo.file_offset = self.fp.tell()      # Start of file bytes
+        self.fp.write(bytes)
+        if zinfo.flag_bits & 0x08:
+            # Write CRC and file sizes after the file data
+            self.fp.write(struct.pack("<lLL", zinfo.CRC, zinfo.compress_size,
+                  zinfo.file_size))
+        self.filelist.append(zinfo)
+        self.NameToInfo[zinfo.filename] = zinfo
+
+    def __del__(self):
+        """Call the "close()" method in case the user forgot."""
+        self.close()
+
+    def close(self):
+        """Close the file, and for mode "w" and "a" write the ending
+        records."""
+        if self.fp is None:
+            return
+        if self.mode in ("w", "a"):             # write ending records
+            count = 0
+            pos1 = self.fp.tell()
+            for zinfo in self.filelist:         # write central directory
+                count = count + 1
+                dt = zinfo.date_time
+                dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
+                dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
+                centdir = struct.pack(structCentralDir,
+                  stringCentralDir, zinfo.create_version,
+                  zinfo.create_system, zinfo.extract_version, zinfo.reserved,
+                  zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
+                  zinfo.CRC, zinfo.compress_size, zinfo.file_size,
+                  len(zinfo.filename), len(zinfo.extra), len(zinfo.comment),
+                  0, zinfo.internal_attr, zinfo.external_attr,
+                  zinfo.header_offset)
+                self.fp.write(centdir)
+                self.fp.write(zinfo.filename)
+                self.fp.write(zinfo.extra)
+                self.fp.write(zinfo.comment)
+            pos2 = self.fp.tell()
+            # Write end-of-zip-archive record
+            endrec = struct.pack(structEndArchive, stringEndArchive,
+                     0, 0, count, count, pos2 - pos1, pos1, 0)
+            self.fp.write(endrec)
+            self.fp.flush()
+        if not self._filePassed:
+            self.fp.close()
+        self.fp = None
+
+
+class PyZipFile(ZipFile):
+    """Class to create ZIP archives with Python library files and packages."""
+
+    def writepy(self, pathname, basename = ""):
+        """Add all files from "pathname" to the ZIP archive.
+
+        If pathname is a package directory, search the directory and
+        all package subdirectories recursively for all *.py and enter
+        the modules into the archive.  If pathname is a plain
+        directory, listdir *.py and enter all modules.  Else, pathname
+        must be a Python *.py file and the module will be put into the
+        archive.  Added modules are always module.pyo or module.pyc.
+        This method will compile the module.py into module.pyc if
+        necessary.
+        """
+        dir, name = os.path.split(pathname)
+        if os.path.isdir(pathname):
+            initname = os.path.join(pathname, "__init__.py")
+            if os.path.isfile(initname):
+                # This is a package directory, add it
+                if basename:
+                    basename = "%s/%s" % (basename, name)
+                else:
+                    basename = name
+                if self.debug:
+                    print "Adding package in", pathname, "as", basename
+                fname, arcname = self._get_codename(initname[0:-3], basename)
+                if self.debug:
+                    print "Adding", arcname
+                self.write(fname, arcname)
+                dirlist = os.listdir(pathname)
+                dirlist.remove("__init__.py")
+                # Add all *.py files and package subdirectories
+                for filename in dirlist:
+                    path = os.path.join(pathname, filename)
+                    root, ext = os.path.splitext(filename)
+                    if os.path.isdir(path):
+                        if os.path.isfile(os.path.join(path, "__init__.py")):
+                            # This is a package directory, add it
+                            self.writepy(path, basename)  # Recursive call
+                    elif ext == ".py":
+                        fname, arcname = self._get_codename(path[0:-3],
+                                         basename)
+                        if self.debug:
+                            print "Adding", arcname
+                        self.write(fname, arcname)
+            else:
+                # This is NOT a package directory, add its files at top level
+                if self.debug:
+                    print "Adding files from directory", pathname
+                for filename in os.listdir(pathname):
+                    path = os.path.join(pathname, filename)
+                    root, ext = os.path.splitext(filename)
+                    if ext == ".py":
+                        fname, arcname = self._get_codename(path[0:-3],
+                                         basename)
+                        if self.debug:
+                            print "Adding", arcname
+                        self.write(fname, arcname)
+        else:
+            if pathname[-3:] != ".py":
+                raise RuntimeError, \
+                      'Files added with writepy() must end with ".py"'
+            fname, arcname = self._get_codename(pathname[0:-3], basename)
+            if self.debug:
+                print "Adding file", arcname
+            self.write(fname, arcname)
+
+    def _get_codename(self, pathname, basename):
+        """Return (filename, archivename) for the path.
+
+        Given a module name path, return the correct file path and
+        archive name, compiling if necessary.  For example, given
+        /python/lib/string, return (/python/lib/string.pyc, string).
+        """
+        file_py  = pathname + ".py"
+        file_pyc = pathname + ".pyc"
+        file_pyo = pathname + ".pyo"
+        if os.path.isfile(file_pyo) and \
+                            os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
+            fname = file_pyo    # Use .pyo file
+        elif not os.path.isfile(file_pyc) or \
+             os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
+            import py_compile
+            if self.debug:
+                print "Compiling", file_py
+            try:
+                py_compile.compile(file_py, file_pyc, None, True)
+            except py_compile.PyCompileError,err:
+                print err.msg
+            fname = file_pyc
+        else:
+            fname = file_pyc
+        archivename = os.path.split(fname)[1]
+        if basename:
+            archivename = "%s/%s" % (basename, archivename)
+        return (fname, archivename)
diff --git a/depot_tools/release/win/python_24/README.google b/depot_tools/release/win/python_24/README.google
new file mode 100644
index 0000000..5d92a96
--- /dev/null
+++ b/depot_tools/release/win/python_24/README.google
@@ -0,0 +1,75 @@
+This is a slightly customized version of Python 2.4.1.
+
+This version was built from the 2.4.1 sources at python.org.
+
+It was compiled using Visual Studio 7.1, after changing the build files
+to statically link the CRT (since some machines don't have msvcrt71.dll).
+Several optimizations were also changed.  See below for full details.
+
+Chris Prince
+(cprince@google.com)
+
+
+
+BUILD INSTRUCTIONS
+
+--- Setup ---
+* Unpack the Python source (from python.org) to ROOT\Python-2.4.1
+* Unpack the bzip2 source to ROOT\bzip2-1.0.3
+* Unpack the zlib source to ROOT\zlib-1.2.2
+* Unpack the BSDDB source to ROOT\db-4.3.28.NC
+* Unpack the OpenSSL source to ROOT\openssl-0.9.7g
+
+See the Python build docs for where to download the source packages.
+Note that we don't bother supporting Tcl/Tk on Windows.
+
+* Modify the Python projects to use the downloaded versions:
+  - In PCBuild\bz2.vcproj, change "1.0.2" to "1.0.3"
+  - In PCBuild\zlib.vcproj, change "1.2.1" to "1.2.2"
+  - In PCBuild\_bsddb.vcproj, change "4.2.52" to "4.3.28.NC"
+      AND change "libdb42" to "libdb43"
+  - In PCBuild\build_ssl.py, add the full path to your perl.exe
+      (in main)
+
+* Build BSDDB using the instructions included with Python and BSDDB
+  (see PCBuild\readme.txt).
+
+--- Enable CRT Static Linking, and Optimizations ---
+
+* Under bzip2: in makefile.msc, change "-MD" to "-MT"
+* Under OpenSSL: in util\pl\VC-32.pl, change "/MD" to "/MT"
+* Under Python: in PCBuild\_ssl.mak, change "/MD" to "/MT"
+* Under zlib: in win32\makefile.msc, change "-MD" to "-MT"
+
+* Open the Python solution in VC and choose the Release configuration.
+  Select all projects (except _ssl and Solution Items).  Right-click,
+  select Properties, and:
+  - Change C/C++ -> Code Generation -> Runtime Library from "/MD" to "/MT"
+  - Enable "/OPT:REF" and "/OPT:ICF" under Linker -> Optimization.
+
+Now build the solution normally (F7).  Note that one project will fail
+to build (_tkinter).
+
+--- Create the distribution ---
+
+* Create an output directory OUTDIR.  Then from the Python directory:
+  - Copy PCBuild\py*.exe and PCBuild\py*.dll to OUTDIR\
+  - Copy PCBuild\*.pyd to OUTDIR\DLLs\
+  - Copy Lib\... to OUTDIR\Lib\
+  - To save space, delete some unnecessary files from OUTDIR:
+    + Lib\plat-*\... (none are applicable to Windows)
+    + Lib\lib-tk\...
+    + Lib\test\...
+    + Lib\bsddb\test\...
+    + Lib\email\test\...
+    + Lib\distutils\tests\...
+
+* If you want the ability to build extension DLLs, then also:
+  - Copy Include\... to OUTDIR\include\
+  - Copy PC\pyconfig.h to OUTDIR\include\
+  - Copy PCBuild\*.lib to OUTDIR\libs\
+
+--- Check that this worked ---
+
+* Run dumpbin on OUTDIR\* and OUTDIR\DLLs\*, and grep the output to
+  make sure "msvcrt" is not present.
diff --git a/depot_tools/release/win/python_24/Scripts/change-svn-wc-format b/depot_tools/release/win/python_24/Scripts/change-svn-wc-format
new file mode 100644
index 0000000..c472c77f
--- /dev/null
+++ b/depot_tools/release/win/python_24/Scripts/change-svn-wc-format
@@ -0,0 +1,352 @@
+#!/usr/bin/env python

+#

+# change-svn-wc-format.py: Change the format of a Subversion working copy.

+#

+# ====================================================================

+# Copyright (c) 2007 CollabNet.  All rights reserved.

+#

+# This software is licensed as described in the file COPYING, which

+# you should have received as part of this distribution.  The terms

+# are also available at http://subversion.tigris.org/license-1.html.

+# If newer versions of this license are posted there, you may use a

+# newer version instead, at your option.

+#

+# This software consists of voluntary contributions made by many

+# individuals.  For exact contribution history, see the revision

+# history and logs, available at http://subversion.tigris.org/.

+# ====================================================================

+

+import sys

+import os

+import getopt

+try:

+  my_getopt = getopt.gnu_getopt

+except AttributeError:

+  my_getopt = getopt.getopt

+

+# Pretend we have true booleans on older python versions

+try:

+  True

+except:

+  True = 1

+  False = 0

+

+### The entries file parser in subversion/tests/cmdline/svntest/entry.py

+### handles the XML-based WC entries file format used by Subversion

+### 1.3 and lower.  It could be rolled into this script.

+

+LATEST_FORMATS = { "1.4" : 8,

+                   "1.5" : 9 }

+

+def usage_and_exit(error_msg=None):

+  """Write usage information and exit.  If ERROR_MSG is provide, that

+  error message is printed first (to stderr), the usage info goes to

+  stderr, and the script exits with a non-zero status.  Otherwise,

+  usage info goes to stdout and the script exits with a zero status."""

+  progname = os.path.basename(sys.argv[0])

+

+  stream = error_msg and sys.stderr or sys.stdout

+  if error_msg:

+    print >> stream, "ERROR: %s\n" % error_msg

+  print >> stream, """\

+usage: %s WC_PATH SVN_VERSION [--verbose] [--force] [--skip-unknown-format]

+       %s --help

+

+Change the format of a Subversion working copy to that of SVN_VERSION.

+

+  --skip-unknown-format    : skip directories with unknown working copy

+                             format and continue the update

+""" % (progname, progname)

+  sys.exit(error_msg and 1 or 0)

+

+def get_adm_dir():

+  """Return the name of Subversion's administrative directory,

+  adjusted for the SVN_ASP_DOT_NET_HACK environment variable.  See

+  <http://svn.collab.net/repos/svn/trunk/notes/asp-dot-net-hack.txt>

+  for details."""

+  return os.environ.has_key("SVN_ASP_DOT_NET_HACK") and "_svn" or ".svn"

+

+class WCFormatConverter:

+  "Performs WC format conversions."

+  root_path = None

+  error_on_unrecognized = True

+  force = False

+  verbosity = 0

+

+  def write_dir_format(self, format_nbr, dirname, paths):

+    """Attempt to write the WC format FORMAT_NBR to the entries file

+    for DIRNAME.  Throws LossyConversionException when not in --force

+    mode, and unconvertable WC data is encountered."""

+

+    # Avoid iterating in unversioned directories.

+    if not get_adm_dir() in paths:

+      del paths[:]

+      return

+

+    for path in paths:

+      # Process the entries file for this versioned directory.

+      if path == get_adm_dir():

+        if self.verbosity:

+          print "Processing directory '%s'" % dirname

+        entries = Entries(os.path.join(dirname, path, "entries"))

+

+        if self.verbosity:

+          print "Parsing file '%s'" % entries.path

+        try:

+          entries.parse(self.verbosity)

+        except UnrecognizedWCFormatException, e:

+          if self.error_on_unrecognized:

+            raise

+          print >>sys.stderr, "%s, skipping" % (e,)

+

+        if self.verbosity:

+          print "Checking whether WC format can be converted"

+        try:

+          entries.assert_valid_format(format_nbr, self.verbosity)

+        except LossyConversionException, e:

+          # In --force mode, ignore complaints about lossy conversion.

+          if self.force:

+            print "WARNING: WC format conversion will be lossy. Dropping "\

+                  "field(s) %s " % ", ".join(e.lossy_fields)

+          else:

+            raise

+

+        if self.verbosity:

+          print "Writing WC format"

+        entries.write_format(format_nbr)

+        break

+

+  def change_wc_format(self, format_nbr):

+    """Walk all paths in a WC tree, and change their format to

+    FORMAT_NBR.  Throw LossyConversionException or NotImplementedError

+    if the WC format should not be converted, or is unrecognized."""

+    os.path.walk(self.root_path, self.write_dir_format, format_nbr)

+

+class Entries:

+  """Represents a .svn/entries file.

+

+  'The entries file' section in subversion/libsvn_wc/README is a

+  useful reference."""

+

+  # The name and index of each field composing an entry's record.

+  entry_fields = (

+    "name",

+    "kind",

+    "revision",

+    "url",

+    "repos",

+    "schedule",

+    "text-time",

+    "checksum",

+    "committed-date",

+    "committed-rev",

+    "last-author",

+    "has-props",

+    "has-prop-mods",

+    "cachable-props",

+    "present-props",

+    "conflict-old",

+    "conflict-new",

+    "conflict-wrk",

+    "prop-reject-file",

+    "copied",

+    "copyfrom-url",

+    "copyfrom-rev",

+    "deleted",

+    "absent",

+    "incomplete",

+    "uuid",

+    "lock-token",

+    "lock-owner",

+    "lock-comment",

+    "lock-creation-date",

+    "changelist",

+    "keep-local",

+    "working-size",

+    "depth",

+  )

+

+  def __init__(self, path):

+    self.path = path

+    self.entries = []

+

+  def parse(self, verbosity=0):

+    """Parse the entries file.  Throw NotImplementedError if the WC

+    format is unrecognized."""

+

+    input = open(self.path, "r")

+

+    # Read and discard WC format number from INPUT.  Validate that it

+    # is a supported format for conversion.

+    format_line = input.readline()

+    try:

+      format_nbr = int(format_line)

+    except ValueError:

+      format_nbr = -1

+    if not format_nbr in LATEST_FORMATS.values():

+      raise UnrecognizedWCFormatException(format_nbr, self.path)

+

+    # Parse file into individual entries, to later inspect for

+    # non-convertable data.

+    entry = None

+    while True:

+      entry = self.parse_entry(input, verbosity)

+      if entry is None:

+        break

+      self.entries.append(entry)

+

+    input.close()

+

+  def assert_valid_format(self, format_nbr, verbosity=0):

+    if verbosity >= 2:

+      print "Validating format for entries file '%s'" % self.path

+    for entry in self.entries:

+      if verbosity >= 3:

+        print "Validating format for entry '%s'" % entry.get_name()

+      try:

+        entry.assert_valid_format(format_nbr)

+      except LossyConversionException:

+        if verbosity >= 3:

+          print >> sys.stderr, "Offending entry:"

+          print >> sys.stderr, str(entry)

+        raise

+

+  def parse_entry(self, input, verbosity=0):

+    "Read an individual entry from INPUT stream."

+    entry = None

+

+    while True:

+      line = input.readline()

+      if line in ("", "\x0c\n"):

+        # EOF or end of entry terminator encountered.

+        break

+

+      if entry is None:

+        entry = Entry()

+

+      # Retain the field value, ditching its field terminator ("\x0a").

+      entry.fields.append(line[:-1])

+

+    if entry is not None and verbosity >= 3:

+      sys.stdout.write(str(entry))

+      print "-" * 76

+    return entry

+

+  def write_format(self, format_nbr):

+    os.chmod(self.path, 0600)

+    output = open(self.path, "r+", 0)

+    output.write("%d" % format_nbr)

+    output.close()

+    os.chmod(self.path, 0400)

+

+class Entry:

+  "Describes an entry in a WC."

+

+  # The list of field indices within an entry's record which must be

+  # retained for 1.5 -> 1.4 migration (changelist, keep-local, and depth).

+  must_retain_fields = (30, 31, 33)

+

+  def __init__(self):

+    self.fields = []

+

+  def assert_valid_format(self, format_nbr):

+    "Assure that conversion will be non-lossy by examining fields."

+

+    # Check whether lossy conversion is being attempted.

+    lossy_fields = []

+    for field_index in self.must_retain_fields:

+      if len(self.fields) - 1 >= field_index and self.fields[field_index]:

+        lossy_fields.append(Entries.entry_fields[field_index])

+    if lossy_fields:

+      raise LossyConversionException(lossy_fields,

+        "Lossy WC format conversion requested for entry '%s'\n"

+        "Data for the following field(s) is unsupported by older versions "

+        "of\nSubversion, and is likely to be subsequently discarded, and/or "

+        "have\nunexpected side-effects: %s\n\n"

+        "WC format conversion was cancelled, use the --force option to "

+        "override\nthe default behavior."

+        % (self.get_name(), ", ".join(lossy_fields)))

+

+  def get_name(self):

+    "Return the name of this entry."

+    return len(self.fields) > 0 and self.fields[0] or ""

+

+  def __str__(self):

+    "Return all fields from this entry as a multi-line string."

+    rep = ""

+    for i in range(0, len(self.fields)):

+      rep += "[%s] %s\n" % (Entries.entry_fields[i], self.fields[i])

+    return rep

+

+

+class LocalException(Exception):

+  """Root of local exception class hierarchy."""

+  pass

+

+class LossyConversionException(LocalException):

+  "Exception thrown when a lossy WC format conversion is requested."

+  def __init__(self, lossy_fields, str):

+    self.lossy_fields = lossy_fields

+    self.str = str

+  def __str__(self):

+    return self.str

+

+class UnrecognizedWCFormatException(LocalException):

+  def __init__(self, format, path):

+    self.format = format

+    self.path = path

+  def __str__(self):

+    return "Unrecognized WC format %d in '%s'" % (self.format, self.path)

+

+

+def main():

+  try:

+    opts, args = my_getopt(sys.argv[1:], "vh?",

+                           ["debug", "force", "skip-unknown-format",

+                            "verbose", "help"])

+  except:

+    usage_and_exit("Unable to process arguments/options")

+

+  converter = WCFormatConverter()

+

+  # Process arguments.

+  if len(args) == 2:

+    converter.root_path = args[0]

+    svn_version = args[1]

+  else:

+    usage_and_exit()

+

+  # Process options.

+  debug = False

+  for opt, value in opts:

+    if opt in ("--help", "-h", "-?"):

+      usage_and_exit()

+    elif opt == "--force":

+      converter.force = True

+    elif opt == "--skip-unknown-format":

+      converter.error_on_unrecognized = False

+    elif opt in ("--verbose", "-v"):

+      converter.verbosity += 1

+    elif opt == "--debug":

+      debug = True

+    else:

+      usage_and_exit("Unknown option '%s'" % opt)

+

+  try:

+    new_format_nbr = LATEST_FORMATS[svn_version]

+  except KeyError:

+    usage_and_exit("Unsupported version number '%s'" % svn_version)

+

+  try:

+    converter.change_wc_format(new_format_nbr)

+  except LocalException, e:

+    if debug:

+      raise

+    print >> sys.stderr, str(e)

+    sys.exit(1)

+

+  print "Converted WC at '%s' into format %d for Subversion %s" % \

+        (converter.root_path, new_format_nbr, svn_version)

+

+if __name__ == "__main__":

+  main()

diff --git a/depot_tools/release/win/python_24/include/Python.h b/depot_tools/release/win/python_24/include/Python.h
new file mode 100644
index 0000000..2d48d2e
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/Python.h
@@ -0,0 +1,171 @@
+#ifndef Py_PYTHON_H
+#define Py_PYTHON_H
+/* Since this is a "meta-include" file, no #ifdef __cplusplus / extern "C" { */
+
+/* Include nearly all Python header files */
+
+#include "patchlevel.h"
+#include "pyconfig.h"
+
+/* Cyclic gc is always enabled, starting with release 2.3a1.  Supply the
+ * old symbol for the benefit of extension modules written before then
+ * that may be conditionalizing on it.  The core doesn't use it anymore.
+ */
+#ifndef WITH_CYCLE_GC
+#define WITH_CYCLE_GC 1
+#endif
+
+#include <limits.h>
+
+#ifndef UCHAR_MAX
+#error "Something's broken.  UCHAR_MAX should be defined in limits.h."
+#endif
+
+#if UCHAR_MAX != 255
+#error "Python's source code assumes C's unsigned char is an 8-bit type."
+#endif
+
+#if defined(__sgi) && defined(WITH_THREAD) && !defined(_SGI_MP_SOURCE)
+#define _SGI_MP_SOURCE
+#endif
+
+#include <stdio.h>
+#ifndef NULL
+#   error "Python.h requires that stdio.h define NULL."
+#endif
+
+#include <string.h>
+#include <errno.h>
+#include <stdlib.h>
+#ifdef HAVE_UNISTD_H
+#include <unistd.h>
+#endif
+
+/* For uintptr_t, intptr_t */
+#ifdef HAVE_STDDEF_H
+#include <stddef.h>
+#endif
+
+/* CAUTION:  Build setups should ensure that NDEBUG is defined on the
+ * compiler command line when building Python in release mode; else
+ * assert() calls won't be removed.
+ */
+#include <assert.h>
+
+#include "pyport.h"
+
+/* pyconfig.h or pyport.h may or may not define DL_IMPORT */
+#ifndef DL_IMPORT	/* declarations for DLL import/export */
+#define DL_IMPORT(RTYPE) RTYPE
+#endif
+#ifndef DL_EXPORT	/* declarations for DLL import/export */
+#define DL_EXPORT(RTYPE) RTYPE
+#endif
+
+/* Debug-mode build with pymalloc implies PYMALLOC_DEBUG.
+ *  PYMALLOC_DEBUG is in error if pymalloc is not in use.
+ */
+#if defined(Py_DEBUG) && defined(WITH_PYMALLOC) && !defined(PYMALLOC_DEBUG)
+#define PYMALLOC_DEBUG
+#endif
+#if defined(PYMALLOC_DEBUG) && !defined(WITH_PYMALLOC)
+#error "PYMALLOC_DEBUG requires WITH_PYMALLOC"
+#endif
+#include "pymem.h"
+
+#include "object.h"
+#include "objimpl.h"
+
+#include "pydebug.h"
+
+#include "unicodeobject.h"
+#include "intobject.h"
+#include "boolobject.h"
+#include "longobject.h"
+#include "floatobject.h"
+#ifndef WITHOUT_COMPLEX
+#include "complexobject.h"
+#endif
+#include "rangeobject.h"
+#include "stringobject.h"
+#include "bufferobject.h"
+#include "tupleobject.h"
+#include "listobject.h"
+#include "dictobject.h"
+#include "enumobject.h"
+#include "setobject.h"
+#include "methodobject.h"
+#include "moduleobject.h"
+#include "funcobject.h"
+#include "classobject.h"
+#include "fileobject.h"
+#include "cobject.h"
+#include "traceback.h"
+#include "sliceobject.h"
+#include "cellobject.h"
+#include "iterobject.h"
+#include "genobject.h"
+#include "descrobject.h"
+#include "weakrefobject.h"
+
+#include "codecs.h"
+#include "pyerrors.h"
+
+#include "pystate.h"
+
+#include "modsupport.h"
+#include "pythonrun.h"
+#include "ceval.h"
+#include "sysmodule.h"
+#include "intrcheck.h"
+#include "import.h"
+
+#include "abstract.h"
+
+#include "compile.h"
+#include "eval.h"
+
+#include "pystrtod.h"
+
+/* _Py_Mangle is defined in compile.c */
+PyAPI_FUNC(int) _Py_Mangle(char *p, char *name, \
+				 char *buffer, size_t maxlen);
+
+/* PyArg_GetInt is deprecated and should not be used, use PyArg_Parse(). */
+#define PyArg_GetInt(v, a)	PyArg_Parse((v), "i", (a))
+
+/* PyArg_NoArgs should not be necessary.
+   Set ml_flags in the PyMethodDef to METH_NOARGS. */
+#define PyArg_NoArgs(v)		PyArg_Parse(v, "")
+
+/* Convert a possibly signed character to a nonnegative int */
+/* XXX This assumes characters are 8 bits wide */
+#ifdef __CHAR_UNSIGNED__
+#define Py_CHARMASK(c)		(c)
+#else
+#define Py_CHARMASK(c)		((c) & 0xff)
+#endif
+
+#include "pyfpe.h"
+
+/* These definitions must match corresponding definitions in graminit.h.
+   There's code in compile.c that checks that they are the same. */
+#define Py_single_input 256
+#define Py_file_input 257
+#define Py_eval_input 258
+
+#ifdef HAVE_PTH
+/* GNU pth user-space thread support */
+#include <pth.h>
+#endif
+
+/* Define macros for inline documentation. */
+#define PyDoc_VAR(name) static char name[]
+#define PyDoc_STRVAR(name,str) PyDoc_VAR(name) = PyDoc_STR(str)
+#ifdef WITH_DOC_STRINGS
+#define PyDoc_STR(str) str
+#else
+#define PyDoc_STR(str) ""
+#endif
+
+#endif /* !Py_PYTHON_H */
diff --git a/depot_tools/release/win/python_24/include/abstract.h b/depot_tools/release/win/python_24/include/abstract.h
new file mode 100644
index 0000000..17ce105
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/abstract.h
@@ -0,0 +1,1223 @@
+#ifndef Py_ABSTRACTOBJECT_H
+#define Py_ABSTRACTOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Abstract Object Interface (many thanks to Jim Fulton) */
+
+/*
+   PROPOSAL: A Generic Python Object Interface for Python C Modules
+
+Problem
+
+  Python modules written in C that must access Python objects must do
+  so through routines whose interfaces are described by a set of
+  include files.  Unfortunately, these routines vary according to the
+  object accessed.  To use these routines, the C programmer must check
+  the type of the object being used and must call a routine based on
+  the object type.  For example, to access an element of a sequence,
+  the programmer must determine whether the sequence is a list or a
+  tuple:
+
+    if(is_tupleobject(o))
+      e=gettupleitem(o,i)
+    else if(is_listitem(o))
+      e=getlistitem(o,i)
+
+  If the programmer wants to get an item from another type of object
+  that provides sequence behavior, there is no clear way to do it
+  correctly.  
+
+  The persistent programmer may peruse object.h and find that the
+  _typeobject structure provides a means of invoking up to (currently
+  about) 41 special operators.  So, for example, a routine can get an
+  item from any object that provides sequence behavior. However, to
+  use this mechanism, the programmer must make their code dependent on
+  the current Python implementation.
+
+  Also, certain semantics, especially memory management semantics, may
+  differ by the type of object being used.  Unfortunately, these
+  semantics are not clearly described in the current include files.
+  An abstract interface providing more consistent semantics is needed. 
+
+Proposal
+
+  I propose the creation of a standard interface (with an associated
+  library of routines and/or macros) for generically obtaining the
+  services of Python objects.  This proposal can be viewed as one
+  components of a Python C interface consisting of several components.
+
+  From the viewpoint of C access to Python services, we have (as
+  suggested by Guido in off-line discussions):
+
+  - "Very high level layer": two or three functions that let you exec or
+    eval arbitrary Python code given as a string in a module whose name is
+    given, passing C values in and getting C values out using
+    mkvalue/getargs style format strings.  This does not require the user
+    to declare any variables of type "PyObject *".  This should be enough
+    to write a simple application that gets Python code from the user,
+    execs it, and returns the output or errors.  (Error handling must also
+    be part of this API.)
+
+  - "Abstract objects layer": which is the subject of this proposal.
+    It has many functions operating on objects, and lest you do many
+    things from C that you can also write in Python, without going
+    through the Python parser.
+
+  - "Concrete objects layer": This is the public type-dependent
+    interface provided by the standard built-in types, such as floats,
+    strings, and lists.  This interface exists and is currently
+    documented by the collection of include files provided with the
+    Python distributions.
+
+  From the point of view of Python accessing services provided by C
+  modules: 
+
+  - "Python module interface": this interface consist of the basic
+    routines used to define modules and their members.  Most of the
+    current extensions-writing guide deals with this interface.
+
+  - "Built-in object interface": this is the interface that a new
+    built-in type must provide and the mechanisms and rules that a
+    developer of a new built-in type must use and follow.
+
+  This proposal is a "first-cut" that is intended to spur
+  discussion. See especially the lists of notes.
+
+  The Python C object interface will provide four protocols: object,
+  numeric, sequence, and mapping.  Each protocol consists of a
+  collection of related operations.  If an operation that is not
+  provided by a particular type is invoked, then a standard exception,
+  NotImplementedError is raised with a operation name as an argument.
+  In addition, for convenience this interface defines a set of
+  constructors for building objects of built-in types.  This is needed
+  so new objects can be returned from C functions that otherwise treat
+  objects generically.
+
+Memory Management
+
+  For all of the functions described in this proposal, if a function
+  retains a reference to a Python object passed as an argument, then the
+  function will increase the reference count of the object.  It is
+  unnecessary for the caller to increase the reference count of an
+  argument in anticipation of the object's retention.
+
+  All Python objects returned from functions should be treated as new
+  objects.  Functions that return objects assume that the caller will
+  retain a reference and the reference count of the object has already
+  been incremented to account for this fact.  A caller that does not
+  retain a reference to an object that is returned from a function
+  must decrement the reference count of the object (using
+  DECREF(object)) to prevent memory leaks.
+
+  Note that the behavior mentioned here is different from the current
+  behavior for some objects (e.g. lists and tuples) when certain
+  type-specific routines are called directly (e.g. setlistitem).  The
+  proposed abstraction layer will provide a consistent memory
+  management interface, correcting for inconsistent behavior for some
+  built-in types.
+
+Protocols
+
+xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx*/
+
+/*  Object Protocol: */
+
+     /* Implemented elsewhere:
+
+     int PyObject_Print(PyObject *o, FILE *fp, int flags);
+
+         Print an object, o, on file, fp.  Returns -1 on
+	 error.  The flags argument is used to enable certain printing
+	 options. The only option currently supported is Py_Print_RAW. 
+
+         (What should be said about Py_Print_RAW?)	
+
+       */
+
+     /* Implemented elsewhere:
+
+     int PyObject_HasAttrString(PyObject *o, char *attr_name);
+
+         Returns 1 if o has the attribute attr_name, and 0 otherwise.
+	 This is equivalent to the Python expression:
+	 hasattr(o,attr_name). 
+
+	 This function always succeeds.
+
+       */
+
+     /* Implemented elsewhere:
+
+     PyObject* PyObject_GetAttrString(PyObject *o, char *attr_name);
+
+	 Retrieve an attributed named attr_name form object o.
+	 Returns the attribute value on success, or NULL on failure.
+	 This is the equivalent of the Python expression: o.attr_name.
+
+       */
+
+     /* Implemented elsewhere:
+
+     int PyObject_HasAttr(PyObject *o, PyObject *attr_name);
+
+         Returns 1 if o has the attribute attr_name, and 0 otherwise.
+	 This is equivalent to the Python expression:
+	 hasattr(o,attr_name). 
+
+	 This function always succeeds.
+
+       */
+
+     /* Implemented elsewhere:
+
+     PyObject* PyObject_GetAttr(PyObject *o, PyObject *attr_name);
+
+	 Retrieve an attributed named attr_name form object o.
+	 Returns the attribute value on success, or NULL on failure.
+	 This is the equivalent of the Python expression: o.attr_name.
+
+       */
+
+
+     /* Implemented elsewhere:
+
+     int PyObject_SetAttrString(PyObject *o, char *attr_name, PyObject *v);
+
+	 Set the value of the attribute named attr_name, for object o,
+	 to the value, v. Returns -1 on failure.  This is
+	 the equivalent of the Python statement: o.attr_name=v.
+
+       */
+
+     /* Implemented elsewhere:
+
+     int PyObject_SetAttr(PyObject *o, PyObject *attr_name, PyObject *v);
+
+	 Set the value of the attribute named attr_name, for object o,
+	 to the value, v. Returns -1 on failure.  This is
+	 the equivalent of the Python statement: o.attr_name=v.
+
+       */
+
+     /* implemented as a macro:
+
+     int PyObject_DelAttrString(PyObject *o, char *attr_name);
+
+	 Delete attribute named attr_name, for object o. Returns
+	 -1 on failure.  This is the equivalent of the Python
+	 statement: del o.attr_name.
+
+       */
+#define  PyObject_DelAttrString(O,A) PyObject_SetAttrString((O),(A),NULL)
+
+     /* implemented as a macro:
+
+     int PyObject_DelAttr(PyObject *o, PyObject *attr_name);
+
+	 Delete attribute named attr_name, for object o. Returns -1
+	 on failure.  This is the equivalent of the Python
+	 statement: del o.attr_name.
+
+       */
+#define  PyObject_DelAttr(O,A) PyObject_SetAttr((O),(A),NULL)
+
+     PyAPI_FUNC(int) PyObject_Cmp(PyObject *o1, PyObject *o2, int *result);
+
+       /*
+	 Compare the values of o1 and o2 using a routine provided by
+	 o1, if one exists, otherwise with a routine provided by o2.
+	 The result of the comparison is returned in result.  Returns
+	 -1 on failure.  This is the equivalent of the Python
+	 statement: result=cmp(o1,o2).
+
+       */
+
+     /* Implemented elsewhere:
+
+     int PyObject_Compare(PyObject *o1, PyObject *o2);
+
+	 Compare the values of o1 and o2 using a routine provided by
+	 o1, if one exists, otherwise with a routine provided by o2.
+	 Returns the result of the comparison on success.  On error,
+	 the value returned is undefined. This is equivalent to the
+	 Python expression: cmp(o1,o2).
+
+       */
+
+     /* Implemented elsewhere:
+
+     PyObject *PyObject_Repr(PyObject *o);
+
+	 Compute the string representation of object, o.  Returns the
+	 string representation on success, NULL on failure.  This is
+	 the equivalent of the Python expression: repr(o).
+
+	 Called by the repr() built-in function and by reverse quotes.
+
+       */
+
+     /* Implemented elsewhere:
+
+     PyObject *PyObject_Str(PyObject *o);
+
+	 Compute the string representation of object, o.  Returns the
+	 string representation on success, NULL on failure.  This is
+	 the equivalent of the Python expression: str(o).)
+
+	 Called by the str() built-in function and by the print
+	 statement.
+
+       */
+
+     /* Implemented elsewhere:
+
+     PyObject *PyObject_Unicode(PyObject *o);
+
+	 Compute the unicode representation of object, o.  Returns the
+	 unicode representation on success, NULL on failure.  This is
+	 the equivalent of the Python expression: unistr(o).)
+
+	 Called by the unistr() built-in function.
+
+       */
+
+     PyAPI_FUNC(int) PyCallable_Check(PyObject *o);
+
+       /*
+	 Determine if the object, o, is callable.  Return 1 if the
+	 object is callable and 0 otherwise.
+
+	 This function always succeeds.
+
+       */
+
+
+
+     PyAPI_FUNC(PyObject *) PyObject_Call(PyObject *callable_object,
+					 PyObject *args, PyObject *kw);
+
+       /*
+	 Call a callable Python object, callable_object, with
+	 arguments and keywords arguments.  The 'args' argument can not be
+	 NULL, but the 'kw' argument can be NULL.
+
+       */
+     
+     PyAPI_FUNC(PyObject *) PyObject_CallObject(PyObject *callable_object,
+                                               PyObject *args);
+
+       /*
+	 Call a callable Python object, callable_object, with
+	 arguments given by the tuple, args.  If no arguments are
+	 needed, then args may be NULL.  Returns the result of the
+	 call on success, or NULL on failure.  This is the equivalent
+	 of the Python expression: apply(o,args).
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyObject_CallFunction(PyObject *callable_object,
+                                                 char *format, ...);
+
+       /*
+	 Call a callable Python object, callable_object, with a
+	 variable number of C arguments. The C arguments are described
+	 using a mkvalue-style format string. The format may be NULL,
+	 indicating that no arguments are provided.  Returns the
+	 result of the call on success, or NULL on failure.  This is
+	 the equivalent of the Python expression: apply(o,args).
+
+       */
+
+
+     PyAPI_FUNC(PyObject *) PyObject_CallMethod(PyObject *o, char *m,
+                                               char *format, ...);
+
+       /*
+	 Call the method named m of object o with a variable number of
+	 C arguments.  The C arguments are described by a mkvalue
+	 format string.  The format may be NULL, indicating that no
+	 arguments are provided. Returns the result of the call on
+	 success, or NULL on failure.  This is the equivalent of the
+	 Python expression: o.method(args).
+       */
+
+
+     PyAPI_FUNC(PyObject *) PyObject_CallFunctionObjArgs(PyObject *callable,
+                                                        ...);
+
+       /*
+	 Call a callable Python object, callable_object, with a
+	 variable number of C arguments.  The C arguments are provided
+	 as PyObject * values, terminated by a NULL.  Returns the
+	 result of the call on success, or NULL on failure.  This is
+	 the equivalent of the Python expression: apply(o,args).
+       */
+
+
+     PyAPI_FUNC(PyObject *) PyObject_CallMethodObjArgs(PyObject *o,
+                                                      PyObject *m, ...);
+
+       /*
+	 Call the method named m of object o with a variable number of
+	 C arguments.  The C arguments are provided as PyObject *
+	 values, terminated by NULL.  Returns the result of the call
+	 on success, or NULL on failure.  This is the equivalent of
+	 the Python expression: o.method(args).
+       */
+
+
+     /* Implemented elsewhere:
+
+     long PyObject_Hash(PyObject *o);
+
+         Compute and return the hash, hash_value, of an object, o.  On
+	 failure, return -1.  This is the equivalent of the Python
+	 expression: hash(o).
+
+       */
+
+
+     /* Implemented elsewhere:
+
+     int PyObject_IsTrue(PyObject *o);
+
+	 Returns 1 if the object, o, is considered to be true, 0 if o is
+	 considered to be false and -1 on failure. This is equivalent to the
+	 Python expression: not not o
+
+       */
+
+     /* Implemented elsewhere:
+
+     int PyObject_Not(PyObject *o);
+
+	 Returns 0 if the object, o, is considered to be true, 1 if o is
+	 considered to be false and -1 on failure. This is equivalent to the
+	 Python expression: not o
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyObject_Type(PyObject *o);
+
+       /*
+	 On success, returns a type object corresponding to the object
+	 type of object o. On failure, returns NULL.  This is
+	 equivalent to the Python expression: type(o).
+       */
+
+     PyAPI_FUNC(int) PyObject_Size(PyObject *o);
+
+       /*
+         Return the size of object o.  If the object, o, provides
+	 both sequence and mapping protocols, the sequence size is
+	 returned. On error, -1 is returned.  This is the equivalent
+	 to the Python expression: len(o).
+
+       */
+
+       /* For DLL compatibility */
+#undef PyObject_Length
+     PyAPI_FUNC(int) PyObject_Length(PyObject *o);
+#define PyObject_Length PyObject_Size
+
+
+     PyAPI_FUNC(PyObject *) PyObject_GetItem(PyObject *o, PyObject *key);
+
+       /*
+	 Return element of o corresponding to the object, key, or NULL
+	 on failure. This is the equivalent of the Python expression:
+	 o[key].
+
+       */
+
+     PyAPI_FUNC(int) PyObject_SetItem(PyObject *o, PyObject *key, PyObject *v);
+
+       /*
+	 Map the object, key, to the value, v.  Returns
+	 -1 on failure.  This is the equivalent of the Python
+	 statement: o[key]=v.
+       */
+
+     PyAPI_FUNC(int) PyObject_DelItemString(PyObject *o, char *key);
+
+       /*
+         Remove the mapping for object, key, from the object *o.
+         Returns -1 on failure.  This is equivalent to
+         the Python statement: del o[key].
+       */
+
+     PyAPI_FUNC(int) PyObject_DelItem(PyObject *o, PyObject *key);
+
+       /*
+	 Delete the mapping for key from *o.  Returns -1 on failure.
+	 This is the equivalent of the Python statement: del o[key].
+       */
+
+     PyAPI_FUNC(int) PyObject_AsCharBuffer(PyObject *obj,
+					  const char **buffer,
+					  int *buffer_len);
+
+       /* 
+	  Takes an arbitrary object which must support the (character,
+	  single segment) buffer interface and returns a pointer to a
+	  read-only memory location useable as character based input
+	  for subsequent processing.
+
+	  0 is returned on success.  buffer and buffer_len are only
+	  set in case no error occurs. Otherwise, -1 is returned and
+	  an exception set.
+
+       */
+
+     PyAPI_FUNC(int) PyObject_CheckReadBuffer(PyObject *obj);
+
+      /*  
+	  Checks whether an arbitrary object supports the (character,
+	  single segment) buffer interface.  Returns 1 on success, 0
+	  on failure.
+
+      */
+
+     PyAPI_FUNC(int) PyObject_AsReadBuffer(PyObject *obj,
+					  const void **buffer,
+					  int *buffer_len);
+
+       /* 
+	  Same as PyObject_AsCharBuffer() except that this API expects
+	  (readable, single segment) buffer interface and returns a
+	  pointer to a read-only memory location which can contain
+	  arbitrary data.
+
+	  0 is returned on success.  buffer and buffer_len are only
+	  set in case no error occurrs.  Otherwise, -1 is returned and
+	  an exception set.
+
+       */
+
+     PyAPI_FUNC(int) PyObject_AsWriteBuffer(PyObject *obj,
+					   void **buffer,
+					   int *buffer_len);
+
+       /* 
+	  Takes an arbitrary object which must support the (writeable,
+	  single segment) buffer interface and returns a pointer to a
+	  writeable memory location in buffer of size buffer_len.
+
+	  0 is returned on success.  buffer and buffer_len are only
+	  set in case no error occurrs. Otherwise, -1 is returned and
+	  an exception set.
+
+       */
+
+/* Iterators */
+
+     PyAPI_FUNC(PyObject *) PyObject_GetIter(PyObject *);
+     /* Takes an object and returns an iterator for it.
+        This is typically a new iterator but if the argument
+	is an iterator, this returns itself. */
+
+#define PyIter_Check(obj) \
+    (PyType_HasFeature((obj)->ob_type, Py_TPFLAGS_HAVE_ITER) && \
+     (obj)->ob_type->tp_iternext != NULL)
+
+     PyAPI_FUNC(PyObject *) PyIter_Next(PyObject *);
+     /* Takes an iterator object and calls its tp_iternext slot,
+	returning the next value.  If the iterator is exhausted,
+	this returns NULL without setting an exception.
+	NULL with an exception means an error occurred. */
+
+/*  Number Protocol:*/
+
+     PyAPI_FUNC(int) PyNumber_Check(PyObject *o);
+
+       /*
+         Returns 1 if the object, o, provides numeric protocols, and
+	 false otherwise. 
+
+	 This function always succeeds.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Add(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of adding o1 and o2, or null on failure.
+	 This is the equivalent of the Python expression: o1+o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Subtract(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of subtracting o2 from o1, or null on
+	 failure.  This is the equivalent of the Python expression:
+	 o1-o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Multiply(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of multiplying o1 and o2, or null on
+	 failure.  This is the equivalent of the Python expression:
+	 o1*o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Divide(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of dividing o1 by o2, or null on failure.
+	 This is the equivalent of the Python expression: o1/o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_FloorDivide(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of dividing o1 by o2 giving an integral result,
+	 or null on failure.
+	 This is the equivalent of the Python expression: o1//o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_TrueDivide(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of dividing o1 by o2 giving a float result,
+	 or null on failure.
+	 This is the equivalent of the Python expression: o1/o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Remainder(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the remainder of dividing o1 by o2, or null on
+	 failure.  This is the equivalent of the Python expression:
+	 o1%o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Divmod(PyObject *o1, PyObject *o2);
+
+       /*
+	 See the built-in function divmod.  Returns NULL on failure.
+	 This is the equivalent of the Python expression:
+	 divmod(o1,o2).
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Power(PyObject *o1, PyObject *o2,
+                                          PyObject *o3);
+
+       /*
+	 See the built-in function pow.  Returns NULL on failure.
+	 This is the equivalent of the Python expression:
+	 pow(o1,o2,o3), where o3 is optional.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Negative(PyObject *o);
+
+       /*
+	 Returns the negation of o on success, or null on failure.
+	 This is the equivalent of the Python expression: -o.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Positive(PyObject *o);
+
+       /*
+         Returns the (what?) of o on success, or NULL on failure.
+	 This is the equivalent of the Python expression: +o.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Absolute(PyObject *o);
+
+       /*
+	 Returns the absolute value of o, or null on failure.  This is
+	 the equivalent of the Python expression: abs(o).
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Invert(PyObject *o);
+
+       /*
+	 Returns the bitwise negation of o on success, or NULL on
+	 failure.  This is the equivalent of the Python expression:
+	 ~o.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Lshift(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of left shifting o1 by o2 on success, or
+	 NULL on failure.  This is the equivalent of the Python
+	 expression: o1 << o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Rshift(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of right shifting o1 by o2 on success, or
+	 NULL on failure.  This is the equivalent of the Python
+	 expression: o1 >> o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_And(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of bitwise and of o1 and o2 on success, or
+	 NULL on failure. This is the equivalent of the Python
+	 expression: o1&o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Xor(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the bitwise exclusive or of o1 by o2 on success, or
+	 NULL on failure.  This is the equivalent of the Python
+	 expression: o1^o2.
+
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Or(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of bitwise or on o1 and o2 on success, or
+	 NULL on failure.  This is the equivalent of the Python
+	 expression: o1|o2.
+
+       */
+
+     /* Implemented elsewhere:
+
+     int PyNumber_Coerce(PyObject **p1, PyObject **p2);
+
+	 This function takes the addresses of two variables of type
+	 PyObject*.
+
+	 If the objects pointed to by *p1 and *p2 have the same type,
+	 increment their reference count and return 0 (success).
+	 If the objects can be converted to a common numeric type,
+	 replace *p1 and *p2 by their converted value (with 'new'
+	 reference counts), and return 0.
+	 If no conversion is possible, or if some other error occurs,
+	 return -1 (failure) and don't increment the reference counts.
+	 The call PyNumber_Coerce(&o1, &o2) is equivalent to the Python
+	 statement o1, o2 = coerce(o1, o2).
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Int(PyObject *o);
+
+       /*
+	 Returns the o converted to an integer object on success, or
+	 NULL on failure.  This is the equivalent of the Python
+	 expression: int(o).
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Long(PyObject *o);
+
+       /*
+	 Returns the o converted to a long integer object on success,
+	 or NULL on failure.  This is the equivalent of the Python
+	 expression: long(o).
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_Float(PyObject *o);
+
+       /*
+	 Returns the o converted to a float object on success, or NULL
+	 on failure.  This is the equivalent of the Python expression:
+	 float(o).
+       */
+	 
+/*  In-place variants of (some of) the above number protocol functions */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceAdd(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of adding o2 to o1, possibly in-place, or null
+	 on failure.  This is the equivalent of the Python expression:
+	 o1 += o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceSubtract(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of subtracting o2 from o1, possibly in-place or
+	 null on failure.  This is the equivalent of the Python expression:
+	 o1 -= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceMultiply(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of multiplying o1 by o2, possibly in-place, or
+	 null on failure.  This is the equivalent of the Python expression:
+	 o1 *= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceDivide(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of dividing o1 by o2, possibly in-place, or null
+	 on failure.  This is the equivalent of the Python expression:
+	 o1 /= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceFloorDivide(PyObject *o1,
+						       PyObject *o2);
+
+       /*
+	 Returns the result of dividing o1 by o2 giving an integral result,
+	 possibly in-place, or null on failure.
+	 This is the equivalent of the Python expression:
+	 o1 /= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceTrueDivide(PyObject *o1,
+						      PyObject *o2);
+
+       /*
+	 Returns the result of dividing o1 by o2 giving a float result,
+	 possibly in-place, or null on failure.
+	 This is the equivalent of the Python expression:
+	 o1 /= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceRemainder(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the remainder of dividing o1 by o2, possibly in-place, or
+	 null on failure.  This is the equivalent of the Python expression:
+	 o1 %= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlacePower(PyObject *o1, PyObject *o2,
+     						 PyObject *o3);
+
+       /*
+	 Returns the result of raising o1 to the power of o2, possibly
+	 in-place, or null on failure.  This is the equivalent of the Python
+	 expression: o1 **= o2, or pow(o1, o2, o3) if o3 is present.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceLshift(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of left shifting o1 by o2, possibly in-place, or
+	 null on failure.  This is the equivalent of the Python expression:
+	 o1 <<= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceRshift(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of right shifting o1 by o2, possibly in-place or
+	 null on failure.  This is the equivalent of the Python expression:
+	 o1 >>= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceAnd(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of bitwise and of o1 and o2, possibly in-place,
+	 or null on failure. This is the equivalent of the Python
+	 expression: o1 &= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceXor(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the bitwise exclusive or of o1 by o2, possibly in-place, or
+	 null on failure.  This is the equivalent of the Python expression:
+	 o1 ^= o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PyNumber_InPlaceOr(PyObject *o1, PyObject *o2);
+
+       /*
+	 Returns the result of bitwise or of o1 and o2, possibly in-place,
+	 or null on failure.  This is the equivalent of the Python
+	 expression: o1 |= o2.
+
+       */
+
+
+/*  Sequence protocol:*/
+
+     PyAPI_FUNC(int) PySequence_Check(PyObject *o);
+
+       /*
+         Return 1 if the object provides sequence protocol, and zero
+	 otherwise.  
+
+	 This function always succeeds.
+
+       */
+
+     PyAPI_FUNC(int) PySequence_Size(PyObject *o);
+
+       /*
+         Return the size of sequence object o, or -1 on failure.
+
+       */
+
+       /* For DLL compatibility */
+#undef PySequence_Length
+     PyAPI_FUNC(int) PySequence_Length(PyObject *o);
+#define PySequence_Length PySequence_Size
+
+
+     PyAPI_FUNC(PyObject *) PySequence_Concat(PyObject *o1, PyObject *o2);
+
+       /*
+	 Return the concatenation of o1 and o2 on success, and NULL on
+	 failure.   This is the equivalent of the Python
+	 expression: o1+o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PySequence_Repeat(PyObject *o, int count);
+
+       /*
+	 Return the result of repeating sequence object o count times,
+	 or NULL on failure.  This is the equivalent of the Python
+	 expression: o1*count.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PySequence_GetItem(PyObject *o, int i);
+
+       /*
+	 Return the ith element of o, or NULL on failure. This is the
+	 equivalent of the Python expression: o[i].
+       */
+
+     PyAPI_FUNC(PyObject *) PySequence_GetSlice(PyObject *o, int i1, int i2);
+
+       /*
+	 Return the slice of sequence object o between i1 and i2, or
+	 NULL on failure. This is the equivalent of the Python
+	 expression: o[i1:i2].
+
+       */
+
+     PyAPI_FUNC(int) PySequence_SetItem(PyObject *o, int i, PyObject *v);
+
+       /*
+	 Assign object v to the ith element of o.  Returns
+	 -1 on failure.  This is the equivalent of the Python
+	 statement: o[i]=v.
+
+       */
+
+     PyAPI_FUNC(int) PySequence_DelItem(PyObject *o, int i);
+
+       /*
+	 Delete the ith element of object v.  Returns
+	 -1 on failure.  This is the equivalent of the Python
+	 statement: del o[i].
+       */
+
+     PyAPI_FUNC(int) PySequence_SetSlice(PyObject *o, int i1, int i2,
+                                        PyObject *v);
+
+       /*
+         Assign the sequence object, v, to the slice in sequence
+	 object, o, from i1 to i2.  Returns -1 on failure. This is the
+	 equivalent of the Python statement: o[i1:i2]=v.
+       */
+
+     PyAPI_FUNC(int) PySequence_DelSlice(PyObject *o, int i1, int i2);
+
+       /*
+	 Delete the slice in sequence object, o, from i1 to i2.
+	 Returns -1 on failure. This is the equivalent of the Python
+	 statement: del o[i1:i2].
+       */
+
+     PyAPI_FUNC(PyObject *) PySequence_Tuple(PyObject *o);
+
+       /*
+	 Returns the sequence, o, as a tuple on success, and NULL on failure.
+	 This is equivalent to the Python expression: tuple(o)
+       */
+
+
+     PyAPI_FUNC(PyObject *) PySequence_List(PyObject *o);
+       /*
+	 Returns the sequence, o, as a list on success, and NULL on failure.
+	 This is equivalent to the Python expression: list(o)
+       */
+
+     PyAPI_FUNC(PyObject *) PySequence_Fast(PyObject *o, const char* m);
+       /*
+         Returns the sequence, o, as a tuple, unless it's already a
+         tuple or list.  Use PySequence_Fast_GET_ITEM to access the
+         members of this list, and PySequence_Fast_GET_SIZE to get its length.
+
+         Returns NULL on failure.  If the object does not support iteration,
+         raises a TypeError exception with m as the message text.
+       */
+
+#define PySequence_Fast_GET_SIZE(o) \
+	(PyList_Check(o) ? PyList_GET_SIZE(o) : PyTuple_GET_SIZE(o))
+       /*
+	 Return the size of o, assuming that o was returned by
+         PySequence_Fast and is not NULL.
+       */
+
+#define PySequence_Fast_GET_ITEM(o, i)\
+     (PyList_Check(o) ? PyList_GET_ITEM(o, i) : PyTuple_GET_ITEM(o, i))
+       /*
+	 Return the ith element of o, assuming that o was returned by
+         PySequence_Fast, and that i is within bounds.
+       */
+
+#define PySequence_ITEM(o, i)\
+	( o->ob_type->tp_as_sequence->sq_item(o, i) )
+       /* Assume tp_as_sequence and sq_item exist and that i does not
+	  need to be corrected for a negative index
+       */     
+
+#define PySequence_Fast_ITEMS(sf) \
+	(PyList_Check(sf) ? ((PyListObject *)(sf))->ob_item \
+			  : ((PyTupleObject *)(sf))->ob_item)
+	/* Return a pointer to the underlying item array for
+           an object retured by PySequence_Fast */
+
+     PyAPI_FUNC(int) PySequence_Count(PyObject *o, PyObject *value);
+
+       /*
+         Return the number of occurrences on value on o, that is,
+	 return the number of keys for which o[key]==value.  On
+	 failure, return -1.  This is equivalent to the Python
+	 expression: o.count(value).
+       */
+
+     PyAPI_FUNC(int) PySequence_Contains(PyObject *seq, PyObject *ob);
+       /*
+         Return -1 if error; 1 if ob in seq; 0 if ob not in seq.
+         Use __contains__ if possible, else _PySequence_IterSearch().
+       */
+
+#define PY_ITERSEARCH_COUNT    1
+#define PY_ITERSEARCH_INDEX    2
+#define PY_ITERSEARCH_CONTAINS 3
+     PyAPI_FUNC(int) _PySequence_IterSearch(PyObject *seq, PyObject *obj,
+     		    int operation);
+	/*
+	  Iterate over seq.  Result depends on the operation:
+	  PY_ITERSEARCH_COUNT:  return # of times obj appears in seq; -1 if
+	  	error.
+	  PY_ITERSEARCH_INDEX:  return 0-based index of first occurrence of
+	  	obj in seq; set ValueError and return -1 if none found;
+	  	also return -1 on error.
+	  PY_ITERSEARCH_CONTAINS:  return 1 if obj in seq, else 0; -1 on
+	  	error.
+	*/
+
+/* For DLL-level backwards compatibility */
+#undef PySequence_In
+     PyAPI_FUNC(int) PySequence_In(PyObject *o, PyObject *value);
+
+/* For source-level backwards compatibility */
+#define PySequence_In PySequence_Contains
+
+       /*
+	 Determine if o contains value.  If an item in o is equal to
+	 X, return 1, otherwise return 0.  On error, return -1.  This
+	 is equivalent to the Python expression: value in o.
+       */
+
+     PyAPI_FUNC(int) PySequence_Index(PyObject *o, PyObject *value);
+
+       /*
+	 Return the first index for which o[i]=value.  On error,
+	 return -1.    This is equivalent to the Python
+	 expression: o.index(value).
+       */
+
+/* In-place versions of some of the above Sequence functions. */
+
+     PyAPI_FUNC(PyObject *) PySequence_InPlaceConcat(PyObject *o1, PyObject *o2);
+
+       /*
+	 Append o2 to o1, in-place when possible. Return the resulting
+	 object, which could be o1, or NULL on failure.  This is the
+	 equivalent of the Python expression: o1 += o2.
+
+       */
+
+     PyAPI_FUNC(PyObject *) PySequence_InPlaceRepeat(PyObject *o, int count);
+
+       /*
+	 Repeat o1 by count, in-place when possible. Return the resulting
+	 object, which could be o1, or NULL on failure.  This is the
+	 equivalent of the Python expression: o1 *= count.
+
+       */
+
+/*  Mapping protocol:*/
+
+     PyAPI_FUNC(int) PyMapping_Check(PyObject *o);
+
+       /*
+         Return 1 if the object provides mapping protocol, and zero
+	 otherwise.  
+
+	 This function always succeeds.
+       */
+
+     PyAPI_FUNC(int) PyMapping_Size(PyObject *o);
+
+       /*
+         Returns the number of keys in object o on success, and -1 on
+	 failure.  For objects that do not provide sequence protocol,
+	 this is equivalent to the Python expression: len(o).
+       */
+
+       /* For DLL compatibility */
+#undef PyMapping_Length
+     PyAPI_FUNC(int) PyMapping_Length(PyObject *o);
+#define PyMapping_Length PyMapping_Size
+
+
+     /* implemented as a macro:
+
+     int PyMapping_DelItemString(PyObject *o, char *key);
+
+	 Remove the mapping for object, key, from the object *o.
+	 Returns -1 on failure.  This is equivalent to
+	 the Python statement: del o[key].
+       */
+#define PyMapping_DelItemString(O,K) PyObject_DelItemString((O),(K))
+
+     /* implemented as a macro:
+
+     int PyMapping_DelItem(PyObject *o, PyObject *key);
+
+	 Remove the mapping for object, key, from the object *o.
+	 Returns -1 on failure.  This is equivalent to
+	 the Python statement: del o[key].
+       */
+#define PyMapping_DelItem(O,K) PyObject_DelItem((O),(K))
+
+     PyAPI_FUNC(int) PyMapping_HasKeyString(PyObject *o, char *key);
+
+       /*
+	 On success, return 1 if the mapping object has the key, key,
+	 and 0 otherwise.  This is equivalent to the Python expression:
+	 o.has_key(key). 
+
+	 This function always succeeds.
+       */
+
+     PyAPI_FUNC(int) PyMapping_HasKey(PyObject *o, PyObject *key);
+
+       /*
+	 Return 1 if the mapping object has the key, key,
+	 and 0 otherwise.  This is equivalent to the Python expression:
+	 o.has_key(key). 
+
+	 This function always succeeds.
+
+       */
+
+     /* Implemented as macro:
+
+     PyObject *PyMapping_Keys(PyObject *o);
+
+         On success, return a list of the keys in object o.  On
+	 failure, return NULL. This is equivalent to the Python
+	 expression: o.keys().
+       */
+#define PyMapping_Keys(O) PyObject_CallMethod(O,"keys",NULL)
+
+     /* Implemented as macro:
+
+     PyObject *PyMapping_Values(PyObject *o);
+
+         On success, return a list of the values in object o.  On
+	 failure, return NULL. This is equivalent to the Python
+	 expression: o.values().
+       */
+#define PyMapping_Values(O) PyObject_CallMethod(O,"values",NULL)
+
+     /* Implemented as macro:
+
+     PyObject *PyMapping_Items(PyObject *o);
+
+         On success, return a list of the items in object o, where
+	 each item is a tuple containing a key-value pair.  On
+	 failure, return NULL. This is equivalent to the Python
+	 expression: o.items().
+
+       */
+#define PyMapping_Items(O) PyObject_CallMethod(O,"items",NULL)
+
+     PyAPI_FUNC(PyObject *) PyMapping_GetItemString(PyObject *o, char *key);
+
+       /*
+	 Return element of o corresponding to the object, key, or NULL
+	 on failure. This is the equivalent of the Python expression:
+	 o[key].
+       */
+
+     PyAPI_FUNC(int) PyMapping_SetItemString(PyObject *o, char *key,
+                                            PyObject *value);
+
+       /*
+	 Map the object, key, to the value, v.  Returns 
+	 -1 on failure.  This is the equivalent of the Python
+	 statement: o[key]=v.
+      */
+
+
+PyAPI_FUNC(int) PyObject_IsInstance(PyObject *object, PyObject *typeorclass);
+      /* isinstance(object, typeorclass) */
+
+PyAPI_FUNC(int) PyObject_IsSubclass(PyObject *object, PyObject *typeorclass);
+      /* issubclass(object, typeorclass) */
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* Py_ABSTRACTOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/bitset.h b/depot_tools/release/win/python_24/include/bitset.h
new file mode 100644
index 0000000..faeb419
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/bitset.h
@@ -0,0 +1,32 @@
+
+#ifndef Py_BITSET_H
+#define Py_BITSET_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Bitset interface */
+
+#define BYTE		char
+
+typedef BYTE *bitset;
+
+bitset newbitset(int nbits);
+void delbitset(bitset bs);
+#define testbit(ss, ibit) (((ss)[BIT2BYTE(ibit)] & BIT2MASK(ibit)) != 0)
+int addbit(bitset bs, int ibit); /* Returns 0 if already set */
+int samebitset(bitset bs1, bitset bs2, int nbits);
+void mergebitset(bitset bs1, bitset bs2, int nbits);
+
+#define BITSPERBYTE	(8*sizeof(BYTE))
+#define NBYTES(nbits)	(((nbits) + BITSPERBYTE - 1) / BITSPERBYTE)
+
+#define BIT2BYTE(ibit)	((ibit) / BITSPERBYTE)
+#define BIT2SHIFT(ibit)	((ibit) % BITSPERBYTE)
+#define BIT2MASK(ibit)	(1 << BIT2SHIFT(ibit))
+#define BYTE2BIT(ibyte)	((ibyte) * BITSPERBYTE)
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_BITSET_H */
diff --git a/depot_tools/release/win/python_24/include/boolobject.h b/depot_tools/release/win/python_24/include/boolobject.h
new file mode 100644
index 0000000..7f9ad01
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/boolobject.h
@@ -0,0 +1,36 @@
+/* Boolean object interface */
+
+#ifndef Py_BOOLOBJECT_H
+#define Py_BOOLOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+typedef PyIntObject PyBoolObject;
+
+PyAPI_DATA(PyTypeObject) PyBool_Type;
+
+#define PyBool_Check(x) ((x)->ob_type == &PyBool_Type)
+
+/* Py_False and Py_True are the only two bools in existence.
+Don't forget to apply Py_INCREF() when returning either!!! */
+
+/* Don't use these directly */
+PyAPI_DATA(PyIntObject) _Py_ZeroStruct, _Py_TrueStruct;
+
+/* Use these macros */
+#define Py_False ((PyObject *) &_Py_ZeroStruct)
+#define Py_True ((PyObject *) &_Py_TrueStruct)
+
+/* Macros for returning Py_True or Py_False, respectively */
+#define Py_RETURN_TRUE return Py_INCREF(Py_True), Py_True
+#define Py_RETURN_FALSE return Py_INCREF(Py_False), Py_False
+
+/* Function to return a bool from a C long */
+PyAPI_FUNC(PyObject *) PyBool_FromLong(long);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_BOOLOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/bufferobject.h b/depot_tools/release/win/python_24/include/bufferobject.h
new file mode 100644
index 0000000..ed2c91db
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/bufferobject.h
@@ -0,0 +1,33 @@
+
+/* Buffer object interface */
+
+/* Note: the object's structure is private */
+
+#ifndef Py_BUFFEROBJECT_H
+#define Py_BUFFEROBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+PyAPI_DATA(PyTypeObject) PyBuffer_Type;
+
+#define PyBuffer_Check(op) ((op)->ob_type == &PyBuffer_Type)
+
+#define Py_END_OF_BUFFER	(-1)
+
+PyAPI_FUNC(PyObject *) PyBuffer_FromObject(PyObject *base,
+                                                 int offset, int size);
+PyAPI_FUNC(PyObject *) PyBuffer_FromReadWriteObject(PyObject *base,
+                                                          int offset,
+                                                          int size);
+
+PyAPI_FUNC(PyObject *) PyBuffer_FromMemory(void *ptr, int size);
+PyAPI_FUNC(PyObject *) PyBuffer_FromReadWriteMemory(void *ptr, int size);
+
+PyAPI_FUNC(PyObject *) PyBuffer_New(int size);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_BUFFEROBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/cStringIO.h b/depot_tools/release/win/python_24/include/cStringIO.h
new file mode 100644
index 0000000..66e1edb
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/cStringIO.h
@@ -0,0 +1,70 @@
+#ifndef Py_CSTRINGIO_H
+#define Py_CSTRINGIO_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+
+  This header provides access to cStringIO objects from C.
+  Functions are provided for calling cStringIO objects and
+  macros are provided for testing whether you have cStringIO 
+  objects.
+
+  Before calling any of the functions or macros, you must initialize
+  the routines with:
+
+    PycString_IMPORT
+
+  This would typically be done in your init function.
+
+*/
+#define PycString_IMPORT \
+  PycStringIO = (struct PycStringIO_CAPI*)PyCObject_Import("cStringIO", \
+                                                           "cStringIO_CAPI")
+
+/* Basic functions to manipulate cStringIO objects from C */
+
+static struct PycStringIO_CAPI {
+  
+ /* Read a string from an input object.  If the last argument
+    is -1, the remainder will be read.
+    */
+  int(*cread)(PyObject *, char **, int);
+
+ /* Read a line from an input object.  Returns the length of the read
+    line as an int and a pointer inside the object buffer as char** (so
+    the caller doesn't have to provide its own buffer as destination).
+    */
+  int(*creadline)(PyObject *, char **);
+
+  /* Write a string to an output object*/
+  int(*cwrite)(PyObject *, char *, int);
+
+  /* Get the output object as a Python string (returns new reference). */
+  PyObject *(*cgetvalue)(PyObject *);
+
+  /* Create a new output object */
+  PyObject *(*NewOutput)(int);
+
+  /* Create an input object from a Python string
+     (copies the Python string reference).
+     */
+  PyObject *(*NewInput)(PyObject *);
+
+  /* The Python types for cStringIO input and output objects.
+     Note that you can do input on an output object.
+     */
+  PyTypeObject *InputType, *OutputType;
+
+} *PycStringIO;
+
+/* These can be used to test if you have one */
+#define PycStringIO_InputCheck(O) \
+  ((O)->ob_type==PycStringIO->InputType)
+#define PycStringIO_OutputCheck(O) \
+  ((O)->ob_type==PycStringIO->OutputType)
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_CSTRINGIO_H */
diff --git a/depot_tools/release/win/python_24/include/cellobject.h b/depot_tools/release/win/python_24/include/cellobject.h
new file mode 100644
index 0000000..fd186e2
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/cellobject.h
@@ -0,0 +1,28 @@
+/* Cell object interface */
+
+#ifndef Py_CELLOBJECT_H
+#define Py_CELLOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+	PyObject_HEAD
+	PyObject *ob_ref;	/* Content of the cell or NULL when empty */
+} PyCellObject;
+
+PyAPI_DATA(PyTypeObject) PyCell_Type;
+
+#define PyCell_Check(op) ((op)->ob_type == &PyCell_Type)
+
+PyAPI_FUNC(PyObject *) PyCell_New(PyObject *);
+PyAPI_FUNC(PyObject *) PyCell_Get(PyObject *);
+PyAPI_FUNC(int) PyCell_Set(PyObject *, PyObject *);
+
+#define PyCell_GET(op) (((PyCellObject *)(op))->ob_ref)
+#define PyCell_SET(op, v) (((PyCellObject *)(op))->ob_ref = v)
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_TUPLEOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/ceval.h b/depot_tools/release/win/python_24/include/ceval.h
new file mode 100644
index 0000000..d9320e0
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/ceval.h
@@ -0,0 +1,154 @@
+#ifndef Py_CEVAL_H
+#define Py_CEVAL_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Interface to random parts in ceval.c */
+
+PyAPI_FUNC(PyObject *) PyEval_CallObjectWithKeywords(
+	PyObject *, PyObject *, PyObject *);
+
+/* DLL-level Backwards compatibility: */
+#undef PyEval_CallObject
+PyAPI_FUNC(PyObject *) PyEval_CallObject(PyObject *, PyObject *);
+
+/* Inline this */
+#define PyEval_CallObject(func,arg) \
+        PyEval_CallObjectWithKeywords(func, arg, (PyObject *)NULL)
+
+PyAPI_FUNC(PyObject *) PyEval_CallFunction(PyObject *obj, char *format, ...);
+PyAPI_FUNC(PyObject *) PyEval_CallMethod(PyObject *obj,
+                                        char *methodname, char *format, ...);
+
+PyAPI_FUNC(void) PyEval_SetProfile(Py_tracefunc, PyObject *);
+PyAPI_FUNC(void) PyEval_SetTrace(Py_tracefunc, PyObject *);
+
+struct _frame; /* Avoid including frameobject.h */
+
+PyAPI_FUNC(PyObject *) PyEval_GetBuiltins(void);
+PyAPI_FUNC(PyObject *) PyEval_GetGlobals(void);
+PyAPI_FUNC(PyObject *) PyEval_GetLocals(void);
+PyAPI_FUNC(struct _frame *) PyEval_GetFrame(void);
+PyAPI_FUNC(int) PyEval_GetRestricted(void);
+
+/* Look at the current frame's (if any) code's co_flags, and turn on
+   the corresponding compiler flags in cf->cf_flags.  Return 1 if any
+   flag was set, else return 0. */
+PyAPI_FUNC(int) PyEval_MergeCompilerFlags(PyCompilerFlags *cf);
+
+PyAPI_FUNC(int) Py_FlushLine(void);
+
+PyAPI_FUNC(int) Py_AddPendingCall(int (*func)(void *), void *arg);
+PyAPI_FUNC(int) Py_MakePendingCalls(void);
+
+/* Protection against deeply nested recursive calls */
+PyAPI_FUNC(void) Py_SetRecursionLimit(int);
+PyAPI_FUNC(int) Py_GetRecursionLimit(void);
+
+#define Py_EnterRecursiveCall(where)                                    \
+	    (_Py_MakeRecCheck(PyThreadState_GET()->recursion_depth) &&  \
+	     _Py_CheckRecursiveCall(where))
+#define Py_LeaveRecursiveCall()				\
+	    (--PyThreadState_GET()->recursion_depth)
+PyAPI_FUNC(int) _Py_CheckRecursiveCall(char *where);
+PyAPI_DATA(int) _Py_CheckRecursionLimit;
+#ifdef USE_STACKCHECK
+#  define _Py_MakeRecCheck(x)  (++(x) > --_Py_CheckRecursionLimit)
+#else
+#  define _Py_MakeRecCheck(x)  (++(x) > _Py_CheckRecursionLimit)
+#endif
+
+PyAPI_FUNC(char *) PyEval_GetFuncName(PyObject *);
+PyAPI_FUNC(char *) PyEval_GetFuncDesc(PyObject *);
+
+PyAPI_FUNC(PyObject *) PyEval_GetCallStats(PyObject *);
+PyAPI_FUNC(PyObject *) PyEval_EvalFrame(struct _frame *);
+
+/* this used to be handled on a per-thread basis - now just two globals */
+PyAPI_DATA(volatile int) _Py_Ticker;
+PyAPI_DATA(int) _Py_CheckInterval;
+
+/* Interface for threads.
+
+   A module that plans to do a blocking system call (or something else
+   that lasts a long time and doesn't touch Python data) can allow other
+   threads to run as follows:
+
+	...preparations here...
+	Py_BEGIN_ALLOW_THREADS
+	...blocking system call here...
+	Py_END_ALLOW_THREADS
+	...interpret result here...
+
+   The Py_BEGIN_ALLOW_THREADS/Py_END_ALLOW_THREADS pair expands to a
+   {}-surrounded block.
+   To leave the block in the middle (e.g., with return), you must insert
+   a line containing Py_BLOCK_THREADS before the return, e.g.
+
+	if (...premature_exit...) {
+		Py_BLOCK_THREADS
+		PyErr_SetFromErrno(PyExc_IOError);
+		return NULL;
+	}
+
+   An alternative is:
+
+	Py_BLOCK_THREADS
+	if (...premature_exit...) {
+		PyErr_SetFromErrno(PyExc_IOError);
+		return NULL;
+	}
+	Py_UNBLOCK_THREADS
+
+   For convenience, that the value of 'errno' is restored across
+   Py_END_ALLOW_THREADS and Py_BLOCK_THREADS.
+
+   WARNING: NEVER NEST CALLS TO Py_BEGIN_ALLOW_THREADS AND
+   Py_END_ALLOW_THREADS!!!
+
+   The function PyEval_InitThreads() should be called only from
+   initthread() in "threadmodule.c".
+
+   Note that not yet all candidates have been converted to use this
+   mechanism!
+*/
+
+PyAPI_FUNC(PyThreadState *) PyEval_SaveThread(void);
+PyAPI_FUNC(void) PyEval_RestoreThread(PyThreadState *);
+
+#ifdef WITH_THREAD
+
+PyAPI_FUNC(int)  PyEval_ThreadsInitialized(void);
+PyAPI_FUNC(void) PyEval_InitThreads(void);
+PyAPI_FUNC(void) PyEval_AcquireLock(void);
+PyAPI_FUNC(void) PyEval_ReleaseLock(void);
+PyAPI_FUNC(void) PyEval_AcquireThread(PyThreadState *tstate);
+PyAPI_FUNC(void) PyEval_ReleaseThread(PyThreadState *tstate);
+PyAPI_FUNC(void) PyEval_ReInitThreads(void);
+
+#define Py_BEGIN_ALLOW_THREADS { \
+			PyThreadState *_save; \
+			_save = PyEval_SaveThread();
+#define Py_BLOCK_THREADS	PyEval_RestoreThread(_save);
+#define Py_UNBLOCK_THREADS	_save = PyEval_SaveThread();
+#define Py_END_ALLOW_THREADS	PyEval_RestoreThread(_save); \
+		 }
+
+#else /* !WITH_THREAD */
+
+#define Py_BEGIN_ALLOW_THREADS {
+#define Py_BLOCK_THREADS
+#define Py_UNBLOCK_THREADS
+#define Py_END_ALLOW_THREADS }
+
+#endif /* !WITH_THREAD */
+
+PyAPI_FUNC(int) _PyEval_SliceIndex(PyObject *, int *);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_CEVAL_H */
diff --git a/depot_tools/release/win/python_24/include/classobject.h b/depot_tools/release/win/python_24/include/classobject.h
new file mode 100644
index 0000000..8f8db7d
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/classobject.h
@@ -0,0 +1,81 @@
+
+/* Class object interface */
+
+/* Revealing some structures (not for general use) */
+
+#ifndef Py_CLASSOBJECT_H
+#define Py_CLASSOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    PyObject_HEAD
+    PyObject	*cl_bases;	/* A tuple of class objects */
+    PyObject	*cl_dict;	/* A dictionary */
+    PyObject	*cl_name;	/* A string */
+    /* The following three are functions or NULL */
+    PyObject	*cl_getattr;
+    PyObject	*cl_setattr;
+    PyObject	*cl_delattr;
+} PyClassObject;
+
+typedef struct {
+    PyObject_HEAD
+    PyClassObject *in_class;	/* The class object */
+    PyObject	  *in_dict;	/* A dictionary */
+    PyObject	  *in_weakreflist; /* List of weak references */
+} PyInstanceObject;
+
+typedef struct {
+    PyObject_HEAD
+    PyObject *im_func;   /* The callable object implementing the method */
+    PyObject *im_self;   /* The instance it is bound to, or NULL */
+    PyObject *im_class;  /* The class that asked for the method */
+    PyObject *im_weakreflist; /* List of weak references */
+} PyMethodObject;
+
+PyAPI_DATA(PyTypeObject) PyClass_Type, PyInstance_Type, PyMethod_Type;
+
+#define PyClass_Check(op) ((op)->ob_type == &PyClass_Type)
+#define PyInstance_Check(op) ((op)->ob_type == &PyInstance_Type)
+#define PyMethod_Check(op) ((op)->ob_type == &PyMethod_Type)
+
+PyAPI_FUNC(PyObject *) PyClass_New(PyObject *, PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyInstance_New(PyObject *, PyObject *,
+                                            PyObject *);
+PyAPI_FUNC(PyObject *) PyInstance_NewRaw(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyMethod_New(PyObject *, PyObject *, PyObject *);
+
+PyAPI_FUNC(PyObject *) PyMethod_Function(PyObject *);
+PyAPI_FUNC(PyObject *) PyMethod_Self(PyObject *);
+PyAPI_FUNC(PyObject *) PyMethod_Class(PyObject *);
+
+/* Look up attribute with name (a string) on instance object pinst, using
+ * only the instance and base class dicts.  If a descriptor is found in
+ * a class dict, the descriptor is returned without calling it.
+ * Returns NULL if nothing found, else a borrowed reference to the
+ * value associated with name in the dict in which name was found.
+ * The point of this routine is that it never calls arbitrary Python
+ * code, so is always "safe":  all it does is dict lookups.  The function
+ * can't fail, never sets an exception, and NULL is not an error (it just
+ * means "not found").
+ */
+PyAPI_FUNC(PyObject *) _PyInstance_Lookup(PyObject *pinst, PyObject *name);
+
+/* Macros for direct access to these values. Type checks are *not*
+   done, so use with care. */
+#define PyMethod_GET_FUNCTION(meth) \
+        (((PyMethodObject *)meth) -> im_func)
+#define PyMethod_GET_SELF(meth) \
+	(((PyMethodObject *)meth) -> im_self)
+#define PyMethod_GET_CLASS(meth) \
+	(((PyMethodObject *)meth) -> im_class)
+
+PyAPI_FUNC(int) PyClass_IsSubclass(PyObject *, PyObject *);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_CLASSOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/cobject.h b/depot_tools/release/win/python_24/include/cobject.h
new file mode 100644
index 0000000..ad23ac8d
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/cobject.h
@@ -0,0 +1,54 @@
+
+/* C objects to be exported from one extension module to another.
+ 
+   C objects are used for communication between extension modules.
+   They provide a way for an extension module to export a C interface
+   to other extension modules, so that extension modules can use the
+   Python import mechanism to link to one another.
+
+*/
+
+#ifndef Py_COBJECT_H
+#define Py_COBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_DATA(PyTypeObject) PyCObject_Type;
+
+#define PyCObject_Check(op) ((op)->ob_type == &PyCObject_Type)
+
+/* Create a PyCObject from a pointer to a C object and an optional
+   destructor function.  If the second argument is non-null, then it
+   will be called with the first argument if and when the PyCObject is
+   destroyed.
+
+*/
+PyAPI_FUNC(PyObject *) PyCObject_FromVoidPtr(
+	void *cobj, void (*destruct)(void*));
+
+
+/* Create a PyCObject from a pointer to a C object, a description object,
+   and an optional destructor function.  If the third argument is non-null,
+   then it will be called with the first and second arguments if and when 
+   the PyCObject is destroyed.
+*/
+PyAPI_FUNC(PyObject *) PyCObject_FromVoidPtrAndDesc(
+	void *cobj, void *desc, void (*destruct)(void*,void*));
+
+/* Retrieve a pointer to a C object from a PyCObject. */
+PyAPI_FUNC(void *) PyCObject_AsVoidPtr(PyObject *);
+
+/* Retrieve a pointer to a description object from a PyCObject. */
+PyAPI_FUNC(void *) PyCObject_GetDesc(PyObject *);
+
+/* Import a pointer to a C object from a module using a PyCObject. */
+PyAPI_FUNC(void *) PyCObject_Import(char *module_name, char *cobject_name);
+
+/* Modify a C object. Fails (==0) if object has a destructor. */
+PyAPI_FUNC(int) PyCObject_SetVoidPtr(PyObject *self, void *cobj);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_COBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/codecs.h b/depot_tools/release/win/python_24/include/codecs.h
new file mode 100644
index 0000000..82f18cdc
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/codecs.h
@@ -0,0 +1,153 @@
+#ifndef Py_CODECREGISTRY_H
+#define Py_CODECREGISTRY_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* ------------------------------------------------------------------------
+
+   Python Codec Registry and support functions
+
+
+Written by Marc-Andre Lemburg (mal@lemburg.com).
+
+Copyright (c) Corporation for National Research Initiatives.
+
+   ------------------------------------------------------------------------ */
+
+/* Register a new codec search function.
+
+   As side effect, this tries to load the encodings package, if not
+   yet done, to make sure that it is always first in the list of
+   search functions.
+
+   The search_function's refcount is incremented by this function. */
+
+PyAPI_FUNC(int) PyCodec_Register(
+       PyObject *search_function
+       );
+
+/* Codec register lookup API.
+
+   Looks up the given encoding and returns a tuple (encoder, decoder,
+   stream reader, stream writer) of functions which implement the
+   different aspects of processing the encoding.
+
+   The encoding string is looked up converted to all lower-case
+   characters. This makes encodings looked up through this mechanism
+   effectively case-insensitive.
+
+   If no codec is found, a KeyError is set and NULL returned. 
+
+   As side effect, this tries to load the encodings package, if not
+   yet done. This is part of the lazy load strategy for the encodings
+   package.
+
+ */
+
+PyAPI_FUNC(PyObject *) _PyCodec_Lookup(
+       const char *encoding
+       );
+
+/* Generic codec based encoding API.
+
+   object is passed through the encoder function found for the given
+   encoding using the error handling method defined by errors. errors
+   may be NULL to use the default method defined for the codec.
+   
+   Raises a LookupError in case no encoder can be found.
+
+ */
+
+PyAPI_FUNC(PyObject *) PyCodec_Encode(
+       PyObject *object,
+       const char *encoding,
+       const char *errors
+       );
+
+/* Generic codec based decoding API.
+
+   object is passed through the decoder function found for the given
+   encoding using the error handling method defined by errors. errors
+   may be NULL to use the default method defined for the codec.
+   
+   Raises a LookupError in case no encoder can be found.
+
+ */
+
+PyAPI_FUNC(PyObject *) PyCodec_Decode(
+       PyObject *object,
+       const char *encoding,
+       const char *errors
+       );
+
+/* --- Codec Lookup APIs -------------------------------------------------- 
+
+   All APIs return a codec object with incremented refcount and are
+   based on _PyCodec_Lookup().  The same comments w/r to the encoding
+   name also apply to these APIs.
+
+*/
+
+/* Get an encoder function for the given encoding. */
+
+PyAPI_FUNC(PyObject *) PyCodec_Encoder(
+       const char *encoding
+       );
+
+/* Get a decoder function for the given encoding. */
+
+PyAPI_FUNC(PyObject *) PyCodec_Decoder(
+       const char *encoding
+       );
+
+/* Get a StreamReader factory function for the given encoding. */
+
+PyAPI_FUNC(PyObject *) PyCodec_StreamReader(
+       const char *encoding,
+       PyObject *stream,
+       const char *errors
+       );
+
+/* Get a StreamWriter factory function for the given encoding. */
+
+PyAPI_FUNC(PyObject *) PyCodec_StreamWriter(
+       const char *encoding,
+       PyObject *stream,
+       const char *errors
+       );
+
+/* Unicode encoding error handling callback registry API */
+
+/* Register the error handling callback function error under the name
+   name. This function will be called by the codec when it encounters
+   unencodable characters/undecodable bytes and doesn't know the
+   callback name, when name is specified as the error parameter
+   in the call to the encode/decode function.
+   Return 0 on success, -1 on error */
+PyAPI_FUNC(int) PyCodec_RegisterError(const char *name, PyObject *error);
+
+/* Lookup the error handling callback function registered under the
+   name error. As a special case NULL can be passed, in which case
+   the error handling callback for "strict" will be returned. */
+PyAPI_FUNC(PyObject *) PyCodec_LookupError(const char *name);
+
+/* raise exc as an exception */
+PyAPI_FUNC(PyObject *) PyCodec_StrictErrors(PyObject *exc);
+
+/* ignore the unicode error, skipping the faulty input */
+PyAPI_FUNC(PyObject *) PyCodec_IgnoreErrors(PyObject *exc);
+
+/* replace the unicode error with ? or U+FFFD */
+PyAPI_FUNC(PyObject *) PyCodec_ReplaceErrors(PyObject *exc);
+
+/* replace the unicode encode error with XML character references */
+PyAPI_FUNC(PyObject *) PyCodec_XMLCharRefReplaceErrors(PyObject *exc);
+
+/* replace the unicode encode error with backslash escapes (\x, \u and \U) */
+PyAPI_FUNC(PyObject *) PyCodec_BackslashReplaceErrors(PyObject *exc);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_CODECREGISTRY_H */
diff --git a/depot_tools/release/win/python_24/include/compile.h b/depot_tools/release/win/python_24/include/compile.h
new file mode 100644
index 0000000..82bf708
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/compile.h
@@ -0,0 +1,87 @@
+
+/* Definitions for bytecode */
+
+#ifndef Py_COMPILE_H
+#define Py_COMPILE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Bytecode object */
+typedef struct {
+    PyObject_HEAD
+    int co_argcount;		/* #arguments, except *args */
+    int co_nlocals;		/* #local variables */
+    int co_stacksize;		/* #entries needed for evaluation stack */
+    int co_flags;		/* CO_..., see below */
+    PyObject *co_code;		/* instruction opcodes */
+    PyObject *co_consts;	/* list (constants used) */
+    PyObject *co_names;		/* list of strings (names used) */
+    PyObject *co_varnames;	/* tuple of strings (local variable names) */
+    PyObject *co_freevars;	/* tuple of strings (free variable names) */
+    PyObject *co_cellvars;      /* tuple of strings (cell variable names) */
+    /* The rest doesn't count for hash/cmp */
+    PyObject *co_filename;	/* string (where it was loaded from) */
+    PyObject *co_name;		/* string (name, for reference) */
+    int co_firstlineno;		/* first source line number */
+    PyObject *co_lnotab;	/* string (encoding addr<->lineno mapping) */
+} PyCodeObject;
+
+/* Masks for co_flags above */
+#define CO_OPTIMIZED	0x0001
+#define CO_NEWLOCALS	0x0002
+#define CO_VARARGS	0x0004
+#define CO_VARKEYWORDS	0x0008
+#define CO_NESTED       0x0010
+#define CO_GENERATOR    0x0020
+/* The CO_NOFREE flag is set if there are no free or cell variables.
+   This information is redundant, but it allows a single flag test
+   to determine whether there is any extra work to be done when the
+   call frame it setup.
+*/
+#define CO_NOFREE       0x0040
+/* XXX Temporary hack.  Until generators are a permanent part of the
+   language, we need a way for a code object to record that generators
+   were *possible* when it was compiled.  This is so code dynamically
+   compiled *by* a code object knows whether to allow yield stmts.  In
+   effect, this passes on the "from __future__ import generators" state
+   in effect when the code block was compiled. */
+#define CO_GENERATOR_ALLOWED    0x1000 /* no longer used in an essential way */
+#define CO_FUTURE_DIVISION    	0x2000
+
+PyAPI_DATA(PyTypeObject) PyCode_Type;
+
+#define PyCode_Check(op) ((op)->ob_type == &PyCode_Type)
+#define PyCode_GetNumFree(op) (PyTuple_GET_SIZE((op)->co_freevars))
+
+#define CO_MAXBLOCKS 20 /* Max static block nesting within a function */
+
+/* Public interface */
+struct _node; /* Declare the existence of this type */
+PyAPI_FUNC(PyCodeObject *) PyNode_Compile(struct _node *, const char *);
+PyAPI_FUNC(PyCodeObject *) PyCode_New(
+	int, int, int, int, PyObject *, PyObject *, PyObject *, PyObject *,
+	PyObject *, PyObject *, PyObject *, PyObject *, int, PyObject *); 
+        /* same as struct above */
+PyAPI_FUNC(int) PyCode_Addr2Line(PyCodeObject *, int);
+
+/* Future feature support */
+
+typedef struct {
+    int ff_found_docstring;
+    int ff_last_lineno;
+    int ff_features;
+} PyFutureFeatures;
+
+PyAPI_FUNC(PyFutureFeatures *) PyNode_Future(struct _node *, const char *);
+PyAPI_FUNC(PyCodeObject *) PyNode_CompileFlags(struct _node *, const char *,
+					      PyCompilerFlags *);
+
+#define FUTURE_NESTED_SCOPES "nested_scopes"
+#define FUTURE_GENERATORS "generators"
+#define FUTURE_DIVISION "division"
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_COMPILE_H */
diff --git a/depot_tools/release/win/python_24/include/complexobject.h b/depot_tools/release/win/python_24/include/complexobject.h
new file mode 100644
index 0000000..e59bf2c
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/complexobject.h
@@ -0,0 +1,58 @@
+/* Complex number structure */
+
+#ifndef Py_COMPLEXOBJECT_H
+#define Py_COMPLEXOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    double real;
+    double imag;
+} Py_complex;
+
+/* Operations on complex numbers from complexmodule.c */
+
+#define c_sum _Py_c_sum
+#define c_diff _Py_c_diff
+#define c_neg _Py_c_neg
+#define c_prod _Py_c_prod
+#define c_quot _Py_c_quot
+#define c_pow _Py_c_pow
+
+PyAPI_FUNC(Py_complex) c_sum(Py_complex, Py_complex);
+PyAPI_FUNC(Py_complex) c_diff(Py_complex, Py_complex);
+PyAPI_FUNC(Py_complex) c_neg(Py_complex);
+PyAPI_FUNC(Py_complex) c_prod(Py_complex, Py_complex);
+PyAPI_FUNC(Py_complex) c_quot(Py_complex, Py_complex);
+PyAPI_FUNC(Py_complex) c_pow(Py_complex, Py_complex);
+
+
+/* Complex object interface */
+
+/*
+PyComplexObject represents a complex number with double-precision
+real and imaginary parts.
+*/
+
+typedef struct {
+    PyObject_HEAD
+    Py_complex cval;
+} PyComplexObject;     
+
+PyAPI_DATA(PyTypeObject) PyComplex_Type;
+
+#define PyComplex_Check(op) PyObject_TypeCheck(op, &PyComplex_Type)
+#define PyComplex_CheckExact(op) ((op)->ob_type == &PyComplex_Type)
+
+PyAPI_FUNC(PyObject *) PyComplex_FromCComplex(Py_complex);
+PyAPI_FUNC(PyObject *) PyComplex_FromDoubles(double real, double imag);
+
+PyAPI_FUNC(double) PyComplex_RealAsDouble(PyObject *op);
+PyAPI_FUNC(double) PyComplex_ImagAsDouble(PyObject *op);
+PyAPI_FUNC(Py_complex) PyComplex_AsCComplex(PyObject *op);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_COMPLEXOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/datetime.h b/depot_tools/release/win/python_24/include/datetime.h
new file mode 100644
index 0000000..db0f3aa
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/datetime.h
@@ -0,0 +1,245 @@
+/*  datetime.h
+ */
+
+#ifndef DATETIME_H
+#define DATETIME_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Fields are packed into successive bytes, each viewed as unsigned and
+ * big-endian, unless otherwise noted:
+ *
+ * byte offset
+ *  0 		year     2 bytes, 1-9999
+ *  2		month    1 byte, 1-12
+ *  3 		day      1 byte, 1-31
+ *  4		hour     1 byte, 0-23
+ *  5 		minute   1 byte, 0-59
+ *  6 		second   1 byte, 0-59
+ *  7 		usecond  3 bytes, 0-999999
+ * 10
+ */
+
+/* # of bytes for year, month, and day. */
+#define _PyDateTime_DATE_DATASIZE 4
+
+/* # of bytes for hour, minute, second, and usecond. */
+#define _PyDateTime_TIME_DATASIZE 6
+
+/* # of bytes for year, month, day, hour, minute, second, and usecond. */
+#define _PyDateTime_DATETIME_DATASIZE 10
+
+
+typedef struct
+{
+	PyObject_HEAD
+	long hashcode;		/* -1 when unknown */
+	int days;		/* -MAX_DELTA_DAYS <= days <= MAX_DELTA_DAYS */
+	int seconds;		/* 0 <= seconds < 24*3600 is invariant */
+	int microseconds;	/* 0 <= microseconds < 1000000 is invariant */
+} PyDateTime_Delta;
+
+typedef struct
+{
+	PyObject_HEAD		/* a pure abstract base clase */
+} PyDateTime_TZInfo;
+
+
+/* The datetime and time types have hashcodes, and an optional tzinfo member,
+ * present if and only if hastzinfo is true.
+ */
+#define _PyTZINFO_HEAD		\
+	PyObject_HEAD		\
+	long hashcode;		\
+	char hastzinfo;		/* boolean flag */
+
+/* No _PyDateTime_BaseTZInfo is allocated; it's just to have something
+ * convenient to cast to, when getting at the hastzinfo member of objects
+ * starting with _PyTZINFO_HEAD.
+ */
+typedef struct
+{
+	_PyTZINFO_HEAD
+} _PyDateTime_BaseTZInfo;
+
+/* All time objects are of PyDateTime_TimeType, but that can be allocated
+ * in two ways, with or without a tzinfo member.  Without is the same as
+ * tzinfo == None, but consumes less memory.  _PyDateTime_BaseTime is an
+ * internal struct used to allocate the right amount of space for the
+ * "without" case.
+ */
+#define _PyDateTime_TIMEHEAD	\
+	_PyTZINFO_HEAD		\
+	unsigned char data[_PyDateTime_TIME_DATASIZE];
+
+typedef struct
+{
+	_PyDateTime_TIMEHEAD
+} _PyDateTime_BaseTime;		/* hastzinfo false */
+
+typedef struct
+{
+	_PyDateTime_TIMEHEAD
+	PyObject *tzinfo;
+} PyDateTime_Time;		/* hastzinfo true */
+
+
+/* All datetime objects are of PyDateTime_DateTimeType, but that can be
+ * allocated in two ways too, just like for time objects above.  In addition,
+ * the plain date type is a base class for datetime, so it must also have
+ * a hastzinfo member (although it's unused there).
+ */
+typedef struct
+{
+	_PyTZINFO_HEAD
+	unsigned char data[_PyDateTime_DATE_DATASIZE];
+} PyDateTime_Date;
+
+#define _PyDateTime_DATETIMEHEAD	\
+	_PyTZINFO_HEAD			\
+	unsigned char data[_PyDateTime_DATETIME_DATASIZE];
+
+typedef struct
+{
+	_PyDateTime_DATETIMEHEAD
+} _PyDateTime_BaseDateTime;	/* hastzinfo false */
+
+typedef struct
+{
+	_PyDateTime_DATETIMEHEAD
+	PyObject *tzinfo;
+} PyDateTime_DateTime;		/* hastzinfo true */
+
+
+/* Apply for date and datetime instances. */
+#define PyDateTime_GET_YEAR(o)     ((((PyDateTime_Date*)o)->data[0] << 8) | \
+                                     ((PyDateTime_Date*)o)->data[1])
+#define PyDateTime_GET_MONTH(o)    (((PyDateTime_Date*)o)->data[2])
+#define PyDateTime_GET_DAY(o)      (((PyDateTime_Date*)o)->data[3])
+
+#define PyDateTime_DATE_GET_HOUR(o)        (((PyDateTime_DateTime*)o)->data[4])
+#define PyDateTime_DATE_GET_MINUTE(o)      (((PyDateTime_DateTime*)o)->data[5])
+#define PyDateTime_DATE_GET_SECOND(o)      (((PyDateTime_DateTime*)o)->data[6])
+#define PyDateTime_DATE_GET_MICROSECOND(o) 		\
+	((((PyDateTime_DateTime*)o)->data[7] << 16) |	\
+         (((PyDateTime_DateTime*)o)->data[8] << 8)  |	\
+          ((PyDateTime_DateTime*)o)->data[9])
+
+/* Apply for time instances. */
+#define PyDateTime_TIME_GET_HOUR(o)        (((PyDateTime_Time*)o)->data[0])
+#define PyDateTime_TIME_GET_MINUTE(o)      (((PyDateTime_Time*)o)->data[1])
+#define PyDateTime_TIME_GET_SECOND(o)      (((PyDateTime_Time*)o)->data[2])
+#define PyDateTime_TIME_GET_MICROSECOND(o) 		\
+	((((PyDateTime_Time*)o)->data[3] << 16) |	\
+         (((PyDateTime_Time*)o)->data[4] << 8)  |	\
+          ((PyDateTime_Time*)o)->data[5])
+
+
+/* Define structure for C API. */
+typedef struct {
+    /* type objects */
+    PyTypeObject *DateType;
+    PyTypeObject *DateTimeType;
+    PyTypeObject *TimeType;
+    PyTypeObject *DeltaType;
+    PyTypeObject *TZInfoType;
+
+    /* constructors */
+    PyObject *(*Date_FromDate)(int, int, int, PyTypeObject*);
+    PyObject *(*DateTime_FromDateAndTime)(int, int, int, int, int, int, int,
+            PyObject*, PyTypeObject*);
+    PyObject *(*Time_FromTime)(int, int, int, int, PyObject*, PyTypeObject*);
+    PyObject *(*Delta_FromDelta)(int, int, int, int, PyTypeObject*);
+
+    /* constructors for the DB API */
+    PyObject *(*DateTime_FromTimestamp)(PyObject*, PyObject*, PyObject*);
+    PyObject *(*Date_FromTimestamp)(PyObject*, PyObject*);
+
+} PyDateTime_CAPI;
+
+
+/* "magic" constant used to partially protect against developer mistakes. */
+#define DATETIME_API_MAGIC 0x414548d5
+
+#ifdef Py_BUILD_CORE
+
+/* Macros for type checking when building the Python core. */
+#define PyDate_Check(op) PyObject_TypeCheck(op, &PyDateTime_DateType)
+#define PyDate_CheckExact(op) ((op)->ob_type == &PyDateTime_DateType)
+
+#define PyDateTime_Check(op) PyObject_TypeCheck(op, &PyDateTime_DateTimeType)
+#define PyDateTime_CheckExact(op) ((op)->ob_type == &PyDateTime_DateTimeType)
+
+#define PyTime_Check(op) PyObject_TypeCheck(op, &PyDateTime_TimeType)
+#define PyTime_CheckExact(op) ((op)->ob_type == &PyDateTime_TimeType)
+
+#define PyDelta_Check(op) PyObject_TypeCheck(op, &PyDateTime_DeltaType)
+#define PyDelta_CheckExact(op) ((op)->ob_type == &PyDateTime_DeltaType)
+
+#define PyTZInfo_Check(op) PyObject_TypeCheck(op, &PyDateTime_TZInfoType)
+#define PyTZInfo_CheckExact(op) ((op)->ob_type == &PyDateTime_TZInfoType)
+
+#else
+
+/* Define global variable for the C API and a macro for setting it. */
+static PyDateTime_CAPI *PyDateTimeAPI;
+
+#define PyDateTime_IMPORT \
+        PyDateTimeAPI = (PyDateTime_CAPI*) PyCObject_Import("datetime", \
+                                                            "datetime_CAPI")
+
+/* This macro would be used if PyCObject_ImportEx() was created.
+#define PyDateTime_IMPORT \
+        PyDateTimeAPI = (PyDateTime_CAPI*) PyCObject_ImportEx("datetime", \
+                                                            "datetime_CAPI", \
+                                                            DATETIME_API_MAGIC)
+*/
+
+/* Macros for type checking when not building the Python core. */
+#define PyDate_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DateType)
+#define PyDate_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->DateType)
+
+#define PyDateTime_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DateTimeType)
+#define PyDateTime_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->DateTimeType)
+
+#define PyTime_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->TimeType)
+#define PyTime_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->TimeType)
+
+#define PyDelta_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->DeltaType)
+#define PyDelta_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->DeltaType)
+
+#define PyTZInfo_Check(op) PyObject_TypeCheck(op, PyDateTimeAPI->TZInfoType)
+#define PyTZInfo_CheckExact(op) ((op)->ob_type == PyDateTimeAPI->TZInfoType)
+
+/* Macros for accessing constructors in a simplified fashion. */
+#define PyDate_FromDate(year, month, day) \
+	PyDateTimeAPI->Date_FromDate(year, month, day, PyDateTimeAPI->DateType)
+
+#define PyDateTime_FromDateAndTime(year, month, day, hour, min, sec, usec) \
+	PyDateTimeAPI->DateTime_FromDateAndTime(year, month, day, hour, \
+		min, sec, usec, Py_None, PyDateTimeAPI->DateTimeType)
+
+#define PyTime_FromTime(hour, minute, second, usecond) \
+	PyDateTimeAPI->Time_FromTime(hour, minute, second, usecond, \
+		Py_None, PyDateTimeAPI->TimeType)
+
+#define PyDelta_FromDSU(days, seconds, useconds) \
+	PyDateTimeAPI->Delta_FromDelta(days, seconds, useconds, 1, \
+		PyDateTimeAPI->DeltaType)
+
+/* Macros supporting the DB API. */
+#define PyDateTime_FromTimestamp(args) \
+	PyDateTimeAPI->DateTime_FromTimestamp( \
+		(PyObject*) (PyDateTimeAPI->DateTimeType), args, NULL)
+
+#define PyDate_FromTimestamp(args) \
+	PyDateTimeAPI->Date_FromTimestamp( \
+		(PyObject*) (PyDateTimeAPI->DateType), args)
+
+#endif	/* Py_BUILD_CORE */
+
+#ifdef __cplusplus
+}
+#endif
+#endif
diff --git a/depot_tools/release/win/python_24/include/descrobject.h b/depot_tools/release/win/python_24/include/descrobject.h
new file mode 100644
index 0000000..a74af60
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/descrobject.h
@@ -0,0 +1,91 @@
+/* Descriptors */
+#ifndef Py_DESCROBJECT_H
+#define Py_DESCROBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef PyObject *(*getter)(PyObject *, void *);
+typedef int (*setter)(PyObject *, PyObject *, void *);
+
+typedef struct PyGetSetDef {
+	char *name;
+	getter get;
+	setter set;
+	char *doc;
+	void *closure;
+} PyGetSetDef;
+
+typedef PyObject *(*wrapperfunc)(PyObject *self, PyObject *args,
+				 void *wrapped);
+
+typedef PyObject *(*wrapperfunc_kwds)(PyObject *self, PyObject *args,
+				      void *wrapped, PyObject *kwds);
+
+struct wrapperbase {
+	char *name;
+	int offset;
+	void *function;
+	wrapperfunc wrapper;
+	char *doc;
+	int flags;
+	PyObject *name_strobj;
+};
+
+/* Flags for above struct */
+#define PyWrapperFlag_KEYWORDS 1 /* wrapper function takes keyword args */
+
+/* Various kinds of descriptor objects */
+
+#define PyDescr_COMMON \
+	PyObject_HEAD \
+	PyTypeObject *d_type; \
+	PyObject *d_name
+
+typedef struct {
+	PyDescr_COMMON;
+} PyDescrObject;
+
+typedef struct {
+	PyDescr_COMMON;
+	PyMethodDef *d_method;
+} PyMethodDescrObject;
+
+typedef struct {
+	PyDescr_COMMON;
+	struct PyMemberDef *d_member;
+} PyMemberDescrObject;
+
+typedef struct {
+	PyDescr_COMMON;
+	PyGetSetDef *d_getset;
+} PyGetSetDescrObject;
+
+typedef struct {
+	PyDescr_COMMON;
+	struct wrapperbase *d_base;
+	void *d_wrapped; /* This can be any function pointer */
+} PyWrapperDescrObject;
+
+PyAPI_DATA(PyTypeObject) PyWrapperDescr_Type;
+
+PyAPI_FUNC(PyObject *) PyDescr_NewMethod(PyTypeObject *, PyMethodDef *);
+PyAPI_FUNC(PyObject *) PyDescr_NewClassMethod(PyTypeObject *, PyMethodDef *);
+PyAPI_FUNC(PyObject *) PyDescr_NewMember(PyTypeObject *,
+					       struct PyMemberDef *);
+PyAPI_FUNC(PyObject *) PyDescr_NewGetSet(PyTypeObject *,
+					       struct PyGetSetDef *);
+PyAPI_FUNC(PyObject *) PyDescr_NewWrapper(PyTypeObject *,
+						struct wrapperbase *, void *);
+#define PyDescr_IsData(d) ((d)->ob_type->tp_descr_set != NULL)
+
+PyAPI_FUNC(PyObject *) PyDictProxy_New(PyObject *);
+PyAPI_FUNC(PyObject *) PyWrapper_New(PyObject *, PyObject *);
+
+
+PyAPI_DATA(PyTypeObject) PyProperty_Type;
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_DESCROBJECT_H */
+
diff --git a/depot_tools/release/win/python_24/include/dictobject.h b/depot_tools/release/win/python_24/include/dictobject.h
new file mode 100644
index 0000000..3da1273
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/dictobject.h
@@ -0,0 +1,134 @@
+#ifndef Py_DICTOBJECT_H
+#define Py_DICTOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Dictionary object type -- mapping from hashable object to object */
+
+/* The distribution includes a separate file, Objects/dictnotes.txt,
+   describing explorations into dictionary design and optimization.  
+   It covers typical dictionary use patterns, the parameters for
+   tuning dictionaries, and several ideas for possible optimizations.
+*/
+
+/*
+There are three kinds of slots in the table:
+
+1. Unused.  me_key == me_value == NULL
+   Does not hold an active (key, value) pair now and never did.  Unused can
+   transition to Active upon key insertion.  This is the only case in which
+   me_key is NULL, and is each slot's initial state.
+
+2. Active.  me_key != NULL and me_key != dummy and me_value != NULL
+   Holds an active (key, value) pair.  Active can transition to Dummy upon
+   key deletion.  This is the only case in which me_value != NULL.
+
+3. Dummy.  me_key == dummy and me_value == NULL
+   Previously held an active (key, value) pair, but that was deleted and an
+   active pair has not yet overwritten the slot.  Dummy can transition to
+   Active upon key insertion.  Dummy slots cannot be made Unused again
+   (cannot have me_key set to NULL), else the probe sequence in case of
+   collision would have no way to know they were once active.
+
+Note: .popitem() abuses the me_hash field of an Unused or Dummy slot to
+hold a search finger.  The me_hash field of Unused or Dummy slots has no
+meaning otherwise.
+*/
+
+/* PyDict_MINSIZE is the minimum size of a dictionary.  This many slots are
+ * allocated directly in the dict object (in the ma_smalltable member).
+ * It must be a power of 2, and at least 4.  8 allows dicts with no more
+ * than 5 active entries to live in ma_smalltable (and so avoid an
+ * additional malloc); instrumentation suggested this suffices for the
+ * majority of dicts (consisting mostly of usually-small instance dicts and
+ * usually-small dicts created to pass keyword arguments).
+ */
+#define PyDict_MINSIZE 8
+
+typedef struct {
+	long me_hash;      /* cached hash code of me_key */
+	PyObject *me_key;
+	PyObject *me_value;
+} PyDictEntry;
+
+/*
+To ensure the lookup algorithm terminates, there must be at least one Unused
+slot (NULL key) in the table.
+The value ma_fill is the number of non-NULL keys (sum of Active and Dummy);
+ma_used is the number of non-NULL, non-dummy keys (== the number of non-NULL
+values == the number of Active items).
+To avoid slowing down lookups on a near-full table, we resize the table when
+it's two-thirds full.
+*/
+typedef struct _dictobject PyDictObject;
+struct _dictobject {
+	PyObject_HEAD
+	int ma_fill;  /* # Active + # Dummy */
+	int ma_used;  /* # Active */
+
+	/* The table contains ma_mask + 1 slots, and that's a power of 2.
+	 * We store the mask instead of the size because the mask is more
+	 * frequently needed.
+	 */
+	int ma_mask;
+
+	/* ma_table points to ma_smalltable for small tables, else to
+	 * additional malloc'ed memory.  ma_table is never NULL!  This rule
+	 * saves repeated runtime null-tests in the workhorse getitem and
+	 * setitem calls.
+	 */
+	PyDictEntry *ma_table;
+	PyDictEntry *(*ma_lookup)(PyDictObject *mp, PyObject *key, long hash);
+	PyDictEntry ma_smalltable[PyDict_MINSIZE];
+};
+
+PyAPI_DATA(PyTypeObject) PyDict_Type;
+
+#define PyDict_Check(op) PyObject_TypeCheck(op, &PyDict_Type)
+#define PyDict_CheckExact(op) ((op)->ob_type == &PyDict_Type)
+
+PyAPI_FUNC(PyObject *) PyDict_New(void);
+PyAPI_FUNC(PyObject *) PyDict_GetItem(PyObject *mp, PyObject *key);
+PyAPI_FUNC(int) PyDict_SetItem(PyObject *mp, PyObject *key, PyObject *item);
+PyAPI_FUNC(int) PyDict_DelItem(PyObject *mp, PyObject *key);
+PyAPI_FUNC(void) PyDict_Clear(PyObject *mp);
+PyAPI_FUNC(int) PyDict_Next(
+	PyObject *mp, int *pos, PyObject **key, PyObject **value);
+PyAPI_FUNC(PyObject *) PyDict_Keys(PyObject *mp);
+PyAPI_FUNC(PyObject *) PyDict_Values(PyObject *mp);
+PyAPI_FUNC(PyObject *) PyDict_Items(PyObject *mp);
+PyAPI_FUNC(int) PyDict_Size(PyObject *mp);
+PyAPI_FUNC(PyObject *) PyDict_Copy(PyObject *mp);
+PyAPI_FUNC(int) PyDict_Contains(PyObject *mp, PyObject *key);
+
+/* PyDict_Update(mp, other) is equivalent to PyDict_Merge(mp, other, 1). */
+PyAPI_FUNC(int) PyDict_Update(PyObject *mp, PyObject *other);
+
+/* PyDict_Merge updates/merges from a mapping object (an object that
+   supports PyMapping_Keys() and PyObject_GetItem()).  If override is true,
+   the last occurrence of a key wins, else the first.  The Python
+   dict.update(other) is equivalent to PyDict_Merge(dict, other, 1).
+*/
+PyAPI_FUNC(int) PyDict_Merge(PyObject *mp,
+				   PyObject *other,
+				   int override);
+
+/* PyDict_MergeFromSeq2 updates/merges from an iterable object producing
+   iterable objects of length 2.  If override is true, the last occurrence
+   of a key wins, else the first.  The Python dict constructor dict(seq2)
+   is equivalent to dict={}; PyDict_MergeFromSeq(dict, seq2, 1).
+*/
+PyAPI_FUNC(int) PyDict_MergeFromSeq2(PyObject *d,
+					   PyObject *seq2,
+					   int override);
+
+PyAPI_FUNC(PyObject *) PyDict_GetItemString(PyObject *dp, const char *key);
+PyAPI_FUNC(int) PyDict_SetItemString(PyObject *dp, const char *key, PyObject *item);
+PyAPI_FUNC(int) PyDict_DelItemString(PyObject *dp, const char *key);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_DICTOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/enumobject.h b/depot_tools/release/win/python_24/include/enumobject.h
new file mode 100644
index 0000000..c14dbfc
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/enumobject.h
@@ -0,0 +1,17 @@
+#ifndef Py_ENUMOBJECT_H
+#define Py_ENUMOBJECT_H
+
+/* Enumerate Object */
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_DATA(PyTypeObject) PyEnum_Type;
+PyAPI_DATA(PyTypeObject) PyReversed_Type;
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* !Py_ENUMOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/errcode.h b/depot_tools/release/win/python_24/include/errcode.h
new file mode 100644
index 0000000..985911e
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/errcode.h
@@ -0,0 +1,35 @@
+#ifndef Py_ERRCODE_H
+#define Py_ERRCODE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Error codes passed around between file input, tokenizer, parser and
+   interpreter.  This is necessary so we can turn them into Python
+   exceptions at a higher level.  Note that some errors have a
+   slightly different meaning when passed from the tokenizer to the
+   parser than when passed from the parser to the interpreter; e.g.
+   the parser only returns E_EOF when it hits EOF immediately, and it
+   never returns E_OK. */
+
+#define E_OK		10	/* No error */
+#define E_EOF		11	/* End Of File */
+#define E_INTR		12	/* Interrupted */
+#define E_TOKEN		13	/* Bad token */
+#define E_SYNTAX	14	/* Syntax error */
+#define E_NOMEM		15	/* Ran out of memory */
+#define E_DONE		16	/* Parsing complete */
+#define E_ERROR		17	/* Execution error */
+#define E_TABSPACE	18	/* Inconsistent mixing of tabs and spaces */
+#define E_OVERFLOW      19	/* Node had too many children */
+#define E_TOODEEP	20	/* Too many indentation levels */
+#define E_DEDENT	21	/* No matching outer block for dedent */
+#define E_DECODE	22	/* Error in decoding into Unicode */
+#define E_EOFS		23	/* EOF in triple-quoted string */
+#define E_EOLS		24	/* EOL in single-quoted string */
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_ERRCODE_H */
diff --git a/depot_tools/release/win/python_24/include/eval.h b/depot_tools/release/win/python_24/include/eval.h
new file mode 100644
index 0000000..b78dfe0f
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/eval.h
@@ -0,0 +1,25 @@
+
+/* Interface to execute compiled code */
+
+#ifndef Py_EVAL_H
+#define Py_EVAL_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_FUNC(PyObject *) PyEval_EvalCode(PyCodeObject *, PyObject *, PyObject *);
+
+PyAPI_FUNC(PyObject *) PyEval_EvalCodeEx(PyCodeObject *co,
+					PyObject *globals,
+					PyObject *locals,
+					PyObject **args, int argc,
+					PyObject **kwds, int kwdc,
+					PyObject **defs, int defc,
+					PyObject *closure);
+
+PyAPI_FUNC(PyObject *) _PyEval_CallTracing(PyObject *func, PyObject *args);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_EVAL_H */
diff --git a/depot_tools/release/win/python_24/include/fileobject.h b/depot_tools/release/win/python_24/include/fileobject.h
new file mode 100644
index 0000000..ebbb521
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/fileobject.h
@@ -0,0 +1,63 @@
+
+/* File object interface */
+
+#ifndef Py_FILEOBJECT_H
+#define Py_FILEOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+	PyObject_HEAD
+	FILE *f_fp;
+	PyObject *f_name;
+	PyObject *f_mode;
+	int (*f_close)(FILE *);
+	int f_softspace;	/* Flag used by 'print' command */
+	int f_binary;		/* Flag which indicates whether the file is 
+				   open in binary (1) or text (0) mode */
+	char* f_buf;		/* Allocated readahead buffer */
+	char* f_bufend;		/* Points after last occupied position */
+	char* f_bufptr;		/* Current buffer position */
+	char *f_setbuf;		/* Buffer for setbuf(3) and setvbuf(3) */
+	int f_univ_newline;	/* Handle any newline convention */
+	int f_newlinetypes;	/* Types of newlines seen */
+	int f_skipnextlf;	/* Skip next \n */
+	PyObject *f_encoding;
+	PyObject *weakreflist; /* List of weak references */
+} PyFileObject;
+
+PyAPI_DATA(PyTypeObject) PyFile_Type;
+
+#define PyFile_Check(op) PyObject_TypeCheck(op, &PyFile_Type)
+#define PyFile_CheckExact(op) ((op)->ob_type == &PyFile_Type)
+
+PyAPI_FUNC(PyObject *) PyFile_FromString(char *, char *);
+PyAPI_FUNC(void) PyFile_SetBufSize(PyObject *, int);
+PyAPI_FUNC(int) PyFile_SetEncoding(PyObject *, const char *);
+PyAPI_FUNC(PyObject *) PyFile_FromFile(FILE *, char *, char *,
+                                             int (*)(FILE *));
+PyAPI_FUNC(FILE *) PyFile_AsFile(PyObject *);
+PyAPI_FUNC(PyObject *) PyFile_Name(PyObject *);
+PyAPI_FUNC(PyObject *) PyFile_GetLine(PyObject *, int);
+PyAPI_FUNC(int) PyFile_WriteObject(PyObject *, PyObject *, int);
+PyAPI_FUNC(int) PyFile_SoftSpace(PyObject *, int);
+PyAPI_FUNC(int) PyFile_WriteString(const char *, PyObject *);
+PyAPI_FUNC(int) PyObject_AsFileDescriptor(PyObject *);
+
+/* The default encoding used by the platform file system APIs
+   If non-NULL, this is different than the default encoding for strings
+*/
+PyAPI_DATA(const char *) Py_FileSystemDefaultEncoding;
+
+/* Routines to replace fread() and fgets() which accept any of \r, \n
+   or \r\n as line terminators.
+*/
+#define PY_STDIOTEXTMODE "b"
+char *Py_UniversalNewlineFgets(char *, int, FILE*, PyObject *);
+size_t Py_UniversalNewlineFread(char *, size_t, FILE *, PyObject *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_FILEOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/floatobject.h b/depot_tools/release/win/python_24/include/floatobject.h
new file mode 100644
index 0000000..9a2066f
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/floatobject.h
@@ -0,0 +1,95 @@
+
+/* Float object interface */
+
+/*
+PyFloatObject represents a (double precision) floating point number.
+*/
+
+#ifndef Py_FLOATOBJECT_H
+#define Py_FLOATOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    PyObject_HEAD
+    double ob_fval;
+} PyFloatObject;
+
+PyAPI_DATA(PyTypeObject) PyFloat_Type;
+
+#define PyFloat_Check(op) PyObject_TypeCheck(op, &PyFloat_Type)
+#define PyFloat_CheckExact(op) ((op)->ob_type == &PyFloat_Type)
+
+/* Return Python float from string PyObject.  Second argument ignored on
+   input, and, if non-NULL, NULL is stored into *junk (this tried to serve a
+   purpose once but can't be made to work as intended). */
+PyAPI_FUNC(PyObject *) PyFloat_FromString(PyObject*, char** junk);
+
+/* Return Python float from C double. */
+PyAPI_FUNC(PyObject *) PyFloat_FromDouble(double);
+
+/* Extract C double from Python float.  The macro version trades safety for
+   speed. */
+PyAPI_FUNC(double) PyFloat_AsDouble(PyObject *);
+#define PyFloat_AS_DOUBLE(op) (((PyFloatObject *)(op))->ob_fval)
+
+/* Write repr(v) into the char buffer argument, followed by null byte.  The
+   buffer must be "big enough"; >= 100 is very safe.
+   PyFloat_AsReprString(buf, x) strives to print enough digits so that
+   PyFloat_FromString(buf) then reproduces x exactly. */
+PyAPI_FUNC(void) PyFloat_AsReprString(char*, PyFloatObject *v);
+
+/* Write str(v) into the char buffer argument, followed by null byte.  The
+   buffer must be "big enough"; >= 100 is very safe.  Note that it's
+   unusual to be able to get back the float you started with from
+   PyFloat_AsString's result -- use PyFloat_AsReprString() if you want to
+   preserve precision across conversions. */
+PyAPI_FUNC(void) PyFloat_AsString(char*, PyFloatObject *v);
+
+/* _PyFloat_{Pack,Unpack}{4,8}
+ *
+ * The struct and pickle (at least) modules need an efficient platform-
+ * independent way to store floating-point values as byte strings.
+ * The Pack routines produce a string from a C double, and the Unpack
+ * routines produce a C double from such a string.  The suffix (4 or 8)
+ * specifies the number of bytes in the string.
+ *
+ * Excepting NaNs and infinities (which aren't handled correctly), the 4-
+ * byte format is identical to the IEEE-754 single precision format, and
+ * the 8-byte format to the IEEE-754 double precision format.  On non-
+ * IEEE platforms with more precision, or larger dynamic range, than
+ * 754 supports, not all values can be packed; on non-IEEE platforms with
+ * less precision, or smaller dynamic range, not all values can be
+ * unpacked.  What happens in such cases is partly accidental (alas).
+ */
+
+/* The pack routines write 4 or 8 bytes, starting at p.  le is a bool
+ * argument, true if you want the string in little-endian format (exponent
+ * last, at p+3 or p+7), false if you want big-endian format (exponent
+ * first, at p).
+ * Return value:  0 if all is OK, -1 if error (and an exception is
+ * set, most likely OverflowError).
+ * Bug:  What this does is undefined if x is a NaN or infinity.
+ * Bug:  -0.0 and +0.0 produce the same string.
+ */
+PyAPI_FUNC(int) _PyFloat_Pack4(double x, unsigned char *p, int le);
+PyAPI_FUNC(int) _PyFloat_Pack8(double x, unsigned char *p, int le);
+
+/* The unpack routines read 4 or 8 bytes, starting at p.  le is a bool
+ * argument, true if the string is in little-endian format (exponent
+ * last, at p+3 or p+7), false if big-endian (exponent first, at p).
+ * Return value:  The unpacked double.  On error, this is -1.0 and
+ * PyErr_Occurred() is true (and an exception is set, most likely
+ * OverflowError).
+ * Bug:  What this does is undefined if the string represents a NaN or
+ * infinity.
+ */
+PyAPI_FUNC(double) _PyFloat_Unpack4(const unsigned char *p, int le);
+PyAPI_FUNC(double) _PyFloat_Unpack8(const unsigned char *p, int le);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_FLOATOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/frameobject.h b/depot_tools/release/win/python_24/include/frameobject.h
new file mode 100644
index 0000000..7dc14e3
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/frameobject.h
@@ -0,0 +1,76 @@
+
+/* Frame object interface */
+
+#ifndef Py_FRAMEOBJECT_H
+#define Py_FRAMEOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    int b_type;			/* what kind of block this is */
+    int b_handler;		/* where to jump to find handler */
+    int b_level;		/* value stack level to pop to */
+} PyTryBlock;
+
+typedef struct _frame {
+    PyObject_VAR_HEAD
+    struct _frame *f_back;	/* previous frame, or NULL */
+    PyCodeObject *f_code;	/* code segment */
+    PyObject *f_builtins;	/* builtin symbol table (PyDictObject) */
+    PyObject *f_globals;	/* global symbol table (PyDictObject) */
+    PyObject *f_locals;		/* local symbol table (any mapping) */
+    PyObject **f_valuestack;	/* points after the last local */
+    /* Next free slot in f_valuestack.  Frame creation sets to f_valuestack.
+       Frame evaluation usually NULLs it, but a frame that yields sets it
+       to the current stack top. */
+    PyObject **f_stacktop;
+    PyObject *f_trace;		/* Trace function */
+    PyObject *f_exc_type, *f_exc_value, *f_exc_traceback;
+    PyThreadState *f_tstate;
+    int f_lasti;		/* Last instruction if called */
+    /* As of 2.3 f_lineno is only valid when tracing is active (i.e. when
+       f_trace is set) -- at other times use PyCode_Addr2Line instead. */
+    int f_lineno;		/* Current line number */
+    int f_restricted;		/* Flag set if restricted operations
+				   in this scope */
+    int f_iblock;		/* index in f_blockstack */
+    PyTryBlock f_blockstack[CO_MAXBLOCKS]; /* for try and loop blocks */
+    int f_nlocals;		/* number of locals */
+    int f_ncells;
+    int f_nfreevars;
+    int f_stacksize;		/* size of value stack */
+    PyObject *f_localsplus[1];	/* locals+stack, dynamically sized */
+} PyFrameObject;
+
+
+/* Standard object interface */
+
+PyAPI_DATA(PyTypeObject) PyFrame_Type;
+
+#define PyFrame_Check(op) ((op)->ob_type == &PyFrame_Type)
+
+PyAPI_FUNC(PyFrameObject *) PyFrame_New(PyThreadState *, PyCodeObject *,
+                                       PyObject *, PyObject *);
+
+
+/* The rest of the interface is specific for frame objects */
+
+/* Block management functions */
+
+PyAPI_FUNC(void) PyFrame_BlockSetup(PyFrameObject *, int, int, int);
+PyAPI_FUNC(PyTryBlock *) PyFrame_BlockPop(PyFrameObject *);
+
+/* Extend the value stack */
+
+PyAPI_FUNC(PyObject **) PyFrame_ExtendStack(PyFrameObject *, int, int);
+
+/* Conversions between "fast locals" and locals in dictionary */
+
+PyAPI_FUNC(void) PyFrame_LocalsToFast(PyFrameObject *, int);
+PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_FRAMEOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/funcobject.h b/depot_tools/release/win/python_24/include/funcobject.h
new file mode 100644
index 0000000..59c19bb
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/funcobject.h
@@ -0,0 +1,76 @@
+
+/* Function object interface */
+
+#ifndef Py_FUNCOBJECT_H
+#define Py_FUNCOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Function objects and code objects should not be confused with each other:
+ *
+ * Function objects are created by the execution of the 'def' statement.
+ * They reference a code object in their func_code attribute, which is a
+ * purely syntactic object, i.e. nothing more than a compiled version of some
+ * source code lines.  There is one code object per source code "fragment",
+ * but each code object can be referenced by zero or many function objects
+ * depending only on how many times the 'def' statement in the source was
+ * executed so far.
+ */
+
+typedef struct {
+    PyObject_HEAD
+    PyObject *func_code;	/* A code object */
+    PyObject *func_globals;	/* A dictionary (other mappings won't do) */
+    PyObject *func_defaults;	/* NULL or a tuple */
+    PyObject *func_closure;	/* NULL or a tuple of cell objects */
+    PyObject *func_doc;		/* The __doc__ attribute, can be anything */
+    PyObject *func_name;	/* The __name__ attribute, a string object */
+    PyObject *func_dict;	/* The __dict__ attribute, a dict or NULL */
+    PyObject *func_weakreflist;	/* List of weak references */
+    PyObject *func_module;	/* The __module__ attribute, can be anything */
+
+    /* Invariant:
+     *     func_closure contains the bindings for func_code->co_freevars, so
+     *     PyTuple_Size(func_closure) == PyCode_GetNumFree(func_code)
+     *     (func_closure may be NULL if PyCode_GetNumFree(func_code) == 0).
+     */
+} PyFunctionObject;
+
+PyAPI_DATA(PyTypeObject) PyFunction_Type;
+
+#define PyFunction_Check(op) ((op)->ob_type == &PyFunction_Type)
+
+PyAPI_FUNC(PyObject *) PyFunction_New(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyFunction_GetCode(PyObject *);
+PyAPI_FUNC(PyObject *) PyFunction_GetGlobals(PyObject *);
+PyAPI_FUNC(PyObject *) PyFunction_GetModule(PyObject *);
+PyAPI_FUNC(PyObject *) PyFunction_GetDefaults(PyObject *);
+PyAPI_FUNC(int) PyFunction_SetDefaults(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyFunction_GetClosure(PyObject *);
+PyAPI_FUNC(int) PyFunction_SetClosure(PyObject *, PyObject *);
+
+/* Macros for direct access to these values. Type checks are *not*
+   done, so use with care. */
+#define PyFunction_GET_CODE(func) \
+        (((PyFunctionObject *)func) -> func_code)
+#define PyFunction_GET_GLOBALS(func) \
+	(((PyFunctionObject *)func) -> func_globals)
+#define PyFunction_GET_MODULE(func) \
+	(((PyFunctionObject *)func) -> func_module)
+#define PyFunction_GET_DEFAULTS(func) \
+	(((PyFunctionObject *)func) -> func_defaults)
+#define PyFunction_GET_CLOSURE(func) \
+	(((PyFunctionObject *)func) -> func_closure)
+
+/* The classmethod and staticmethod types lives here, too */
+PyAPI_DATA(PyTypeObject) PyClassMethod_Type;
+PyAPI_DATA(PyTypeObject) PyStaticMethod_Type;
+
+PyAPI_FUNC(PyObject *) PyClassMethod_New(PyObject *);
+PyAPI_FUNC(PyObject *) PyStaticMethod_New(PyObject *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_FUNCOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/genobject.h b/depot_tools/release/win/python_24/include/genobject.h
new file mode 100644
index 0000000..f4226ed
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/genobject.h
@@ -0,0 +1,35 @@
+
+/* Generator object interface */
+
+#ifndef Py_GENOBJECT_H
+#define Py_GENOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+struct _frame; /* Avoid including frameobject.h */
+
+typedef struct {
+	PyObject_HEAD
+	/* The gi_ prefix is intended to remind of generator-iterator. */
+
+	struct _frame *gi_frame;
+
+	/* True if generator is being executed. */
+	int gi_running;
+
+	/* List of weak reference. */
+	PyObject *gi_weakreflist;
+} PyGenObject;
+
+PyAPI_DATA(PyTypeObject) PyGen_Type;
+
+#define PyGen_Check(op) PyObject_TypeCheck(op, &PyGen_Type)
+#define PyGen_CheckExact(op) ((op)->ob_type == &PyGen_Type)
+
+PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_GENOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/graminit.h b/depot_tools/release/win/python_24/include/graminit.h
new file mode 100644
index 0000000..a46cdc9
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/graminit.h
@@ -0,0 +1,78 @@
+#define single_input 256
+#define file_input 257
+#define eval_input 258
+#define decorator 259
+#define decorators 260
+#define funcdef 261
+#define parameters 262
+#define varargslist 263
+#define fpdef 264
+#define fplist 265
+#define stmt 266
+#define simple_stmt 267
+#define small_stmt 268
+#define expr_stmt 269
+#define augassign 270
+#define print_stmt 271
+#define del_stmt 272
+#define pass_stmt 273
+#define flow_stmt 274
+#define break_stmt 275
+#define continue_stmt 276
+#define return_stmt 277
+#define yield_stmt 278
+#define raise_stmt 279
+#define import_stmt 280
+#define import_name 281
+#define import_from 282
+#define import_as_name 283
+#define dotted_as_name 284
+#define import_as_names 285
+#define dotted_as_names 286
+#define dotted_name 287
+#define global_stmt 288
+#define exec_stmt 289
+#define assert_stmt 290
+#define compound_stmt 291
+#define if_stmt 292
+#define while_stmt 293
+#define for_stmt 294
+#define try_stmt 295
+#define except_clause 296
+#define suite 297
+#define test 298
+#define and_test 299
+#define not_test 300
+#define comparison 301
+#define comp_op 302
+#define expr 303
+#define xor_expr 304
+#define and_expr 305
+#define shift_expr 306
+#define arith_expr 307
+#define term 308
+#define factor 309
+#define power 310
+#define atom 311
+#define listmaker 312
+#define testlist_gexp 313
+#define lambdef 314
+#define trailer 315
+#define subscriptlist 316
+#define subscript 317
+#define sliceop 318
+#define exprlist 319
+#define testlist 320
+#define testlist_safe 321
+#define dictmaker 322
+#define classdef 323
+#define arglist 324
+#define argument 325
+#define list_iter 326
+#define list_for 327
+#define list_if 328
+#define gen_iter 329
+#define gen_for 330
+#define gen_if 331
+#define testlist1 332
+#define encoding_decl 333
diff --git a/depot_tools/release/win/python_24/include/grammar.h b/depot_tools/release/win/python_24/include/grammar.h
new file mode 100644
index 0000000..8426da3
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/grammar.h
@@ -0,0 +1,93 @@
+
+/* Grammar interface */
+
+#ifndef Py_GRAMMAR_H
+#define Py_GRAMMAR_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include "bitset.h" /* Sigh... */
+
+/* A label of an arc */
+
+typedef struct {
+    int		 lb_type;
+    char	*lb_str;
+} label;
+
+#define EMPTY 0		/* Label number 0 is by definition the empty label */
+
+/* A list of labels */
+
+typedef struct {
+    int		 ll_nlabels;
+    label	*ll_label;
+} labellist;
+
+/* An arc from one state to another */
+
+typedef struct {
+    short	a_lbl;		/* Label of this arc */
+    short	a_arrow;	/* State where this arc goes to */
+} arc;
+
+/* A state in a DFA */
+
+typedef struct {
+    int		 s_narcs;
+    arc		*s_arc;		/* Array of arcs */
+	
+    /* Optional accelerators */
+    int		 s_lower;	/* Lowest label index */
+    int		 s_upper;	/* Highest label index */
+    int		*s_accel;	/* Accelerator */
+    int		 s_accept;	/* Nonzero for accepting state */
+} state;
+
+/* A DFA */
+
+typedef struct {
+    int		 d_type;	/* Non-terminal this represents */
+    char	*d_name;	/* For printing */
+    int		 d_initial;	/* Initial state */
+    int		 d_nstates;
+    state	*d_state;	/* Array of states */
+    bitset	 d_first;
+} dfa;
+
+/* A grammar */
+
+typedef struct {
+    int		 g_ndfas;
+    dfa		*g_dfa;		/* Array of DFAs */
+    labellist	 g_ll;
+    int		 g_start;	/* Start symbol of the grammar */
+    int		 g_accel;	/* Set if accelerators present */
+} grammar;
+
+/* FUNCTIONS */
+
+grammar *newgrammar(int start);
+dfa *adddfa(grammar *g, int type, char *name);
+int addstate(dfa *d);
+void addarc(dfa *d, int from, int to, int lbl);
+dfa *PyGrammar_FindDFA(grammar *g, int type);
+
+int addlabel(labellist *ll, int type, char *str);
+int findlabel(labellist *ll, int type, char *str);
+char *PyGrammar_LabelRepr(label *lb);
+void translatelabels(grammar *g);
+
+void addfirstsets(grammar *g);
+
+void PyGrammar_AddAccelerators(grammar *g);
+void PyGrammar_RemoveAccelerators(grammar *);
+
+void printgrammar(grammar *g, FILE *fp);
+void printnonterminals(grammar *g, FILE *fp);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_GRAMMAR_H */
diff --git a/depot_tools/release/win/python_24/include/import.h b/depot_tools/release/win/python_24/include/import.h
new file mode 100644
index 0000000..9f1c2be2
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/import.h
@@ -0,0 +1,55 @@
+
+/* Module definition and import interface */
+
+#ifndef Py_IMPORT_H
+#define Py_IMPORT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_FUNC(long) PyImport_GetMagicNumber(void);
+PyAPI_FUNC(PyObject *) PyImport_ExecCodeModule(char *name, PyObject *co);
+PyAPI_FUNC(PyObject *) PyImport_ExecCodeModuleEx(
+	char *name, PyObject *co, char *pathname);
+PyAPI_FUNC(PyObject *) PyImport_GetModuleDict(void);
+PyAPI_FUNC(PyObject *) PyImport_AddModule(char *name);
+PyAPI_FUNC(PyObject *) PyImport_ImportModule(char *name);
+PyAPI_FUNC(PyObject *) PyImport_ImportModuleEx(
+	char *name, PyObject *globals, PyObject *locals, PyObject *fromlist);
+PyAPI_FUNC(PyObject *) PyImport_Import(PyObject *name);
+PyAPI_FUNC(PyObject *) PyImport_ReloadModule(PyObject *m);
+PyAPI_FUNC(void) PyImport_Cleanup(void);
+PyAPI_FUNC(int) PyImport_ImportFrozenModule(char *);
+
+PyAPI_FUNC(struct filedescr *) _PyImport_FindModule(
+	const char *, PyObject *, char *, size_t, FILE **, PyObject **);
+PyAPI_FUNC(int) _PyImport_IsScript(struct filedescr *);
+
+PyAPI_FUNC(PyObject *)_PyImport_FindExtension(char *, char *);
+PyAPI_FUNC(PyObject *)_PyImport_FixupExtension(char *, char *);
+
+struct _inittab {
+    char *name;
+    void (*initfunc)(void);
+};
+
+PyAPI_DATA(struct _inittab *) PyImport_Inittab;
+
+PyAPI_FUNC(int) PyImport_AppendInittab(char *name, void (*initfunc)(void));
+PyAPI_FUNC(int) PyImport_ExtendInittab(struct _inittab *newtab);
+
+struct _frozen {
+    char *name;
+    unsigned char *code;
+    int size;
+};
+
+/* Embedding apps may change this pointer to point to their favorite
+   collection of frozen modules: */
+
+PyAPI_DATA(struct _frozen *) PyImport_FrozenModules;
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_IMPORT_H */
diff --git a/depot_tools/release/win/python_24/include/intobject.h b/depot_tools/release/win/python_24/include/intobject.h
new file mode 100644
index 0000000..1bbd59c
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/intobject.h
@@ -0,0 +1,61 @@
+
+/* Integer object interface */
+
+/*
+PyIntObject represents a (long) integer.  This is an immutable object;
+an integer cannot change its value after creation.
+
+There are functions to create new integer objects, to test an object
+for integer-ness, and to get the integer value.  The latter functions
+returns -1 and sets errno to EBADF if the object is not an PyIntObject.
+None of the functions should be applied to nil objects.
+
+The type PyIntObject is (unfortunately) exposed here so we can declare
+_Py_TrueStruct and _Py_ZeroStruct in boolobject.h; don't use this.
+*/
+
+#ifndef Py_INTOBJECT_H
+#define Py_INTOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    PyObject_HEAD
+    long ob_ival;
+} PyIntObject;
+
+PyAPI_DATA(PyTypeObject) PyInt_Type;
+
+#define PyInt_Check(op) PyObject_TypeCheck(op, &PyInt_Type)
+#define PyInt_CheckExact(op) ((op)->ob_type == &PyInt_Type)
+
+PyAPI_FUNC(PyObject *) PyInt_FromString(char*, char**, int);
+#ifdef Py_USING_UNICODE
+PyAPI_FUNC(PyObject *) PyInt_FromUnicode(Py_UNICODE*, int, int);
+#endif
+PyAPI_FUNC(PyObject *) PyInt_FromLong(long);
+PyAPI_FUNC(long) PyInt_AsLong(PyObject *);
+PyAPI_FUNC(unsigned long) PyInt_AsUnsignedLongMask(PyObject *);
+#ifdef HAVE_LONG_LONG
+PyAPI_FUNC(unsigned PY_LONG_LONG) PyInt_AsUnsignedLongLongMask(PyObject *);
+#endif
+
+PyAPI_FUNC(long) PyInt_GetMax(void);
+
+/* Macro, trading safety for speed */
+#define PyInt_AS_LONG(op) (((PyIntObject *)(op))->ob_ival)
+
+/* These aren't really part of the Int object, but they're handy; the protos
+ * are necessary for systems that need the magic of PyAPI_FUNC and that want
+ * to have stropmodule as a dynamically loaded module instead of building it
+ * into the main Python shared library/DLL.  Guido thinks I'm weird for
+ * building it this way.  :-)  [cjh]
+ */
+PyAPI_FUNC(unsigned long) PyOS_strtoul(char *, char **, int);
+PyAPI_FUNC(long) PyOS_strtol(char *, char **, int);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/intrcheck.h b/depot_tools/release/win/python_24/include/intrcheck.h
new file mode 100644
index 0000000..3b67ed0d
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/intrcheck.h
@@ -0,0 +1,15 @@
+
+#ifndef Py_INTRCHECK_H
+#define Py_INTRCHECK_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_FUNC(int) PyOS_InterruptOccurred(void);
+PyAPI_FUNC(void) PyOS_InitInterrupts(void);
+PyAPI_FUNC(void) PyOS_AfterFork(void);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_INTRCHECK_H */
diff --git a/depot_tools/release/win/python_24/include/iterobject.h b/depot_tools/release/win/python_24/include/iterobject.h
new file mode 100644
index 0000000..c078ebb2
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/iterobject.h
@@ -0,0 +1,23 @@
+#ifndef Py_ITEROBJECT_H
+#define Py_ITEROBJECT_H
+/* Iterators (the basic kind, over a sequence) */
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_DATA(PyTypeObject) PySeqIter_Type;
+
+#define PySeqIter_Check(op) ((op)->ob_type == &PySeqIter_Type)
+
+PyAPI_FUNC(PyObject *) PySeqIter_New(PyObject *);
+
+PyAPI_DATA(PyTypeObject) PyCallIter_Type;
+
+#define PyCallIter_Check(op) ((op)->ob_type == &PyCallIter_Type)
+
+PyAPI_FUNC(PyObject *) PyCallIter_New(PyObject *, PyObject *);
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_ITEROBJECT_H */
+
diff --git a/depot_tools/release/win/python_24/include/listobject.h b/depot_tools/release/win/python_24/include/listobject.h
new file mode 100644
index 0000000..0999a821
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/listobject.h
@@ -0,0 +1,67 @@
+
+/* List object interface */
+
+/*
+Another generally useful object type is an list of object pointers.
+This is a mutable type: the list items can be changed, and items can be
+added or removed.  Out-of-range indices or non-list objects are ignored.
+
+*** WARNING *** PyList_SetItem does not increment the new item's reference
+count, but does decrement the reference count of the item it replaces,
+if not nil.  It does *decrement* the reference count if it is *not*
+inserted in the list.  Similarly, PyList_GetItem does not increment the
+returned item's reference count.
+*/
+
+#ifndef Py_LISTOBJECT_H
+#define Py_LISTOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    PyObject_VAR_HEAD
+    /* Vector of pointers to list elements.  list[0] is ob_item[0], etc. */
+    PyObject **ob_item;
+
+    /* ob_item contains space for 'allocated' elements.  The number
+     * currently in use is ob_size.
+     * Invariants:
+     *     0 <= ob_size <= allocated
+     *     len(list) == ob_size
+     *     ob_item == NULL implies ob_size == allocated == 0
+     * list.sort() temporarily sets allocated to -1 to detect mutations.
+     *
+     * Items must normally not be NULL, except during construction when
+     * the list is not yet visible outside the function that builds it.
+     */
+    int allocated;
+} PyListObject;
+
+PyAPI_DATA(PyTypeObject) PyList_Type;
+
+#define PyList_Check(op) PyObject_TypeCheck(op, &PyList_Type)
+#define PyList_CheckExact(op) ((op)->ob_type == &PyList_Type)
+
+PyAPI_FUNC(PyObject *) PyList_New(int size);
+PyAPI_FUNC(int) PyList_Size(PyObject *);
+PyAPI_FUNC(PyObject *) PyList_GetItem(PyObject *, int);
+PyAPI_FUNC(int) PyList_SetItem(PyObject *, int, PyObject *);
+PyAPI_FUNC(int) PyList_Insert(PyObject *, int, PyObject *);
+PyAPI_FUNC(int) PyList_Append(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyList_GetSlice(PyObject *, int, int);
+PyAPI_FUNC(int) PyList_SetSlice(PyObject *, int, int, PyObject *);
+PyAPI_FUNC(int) PyList_Sort(PyObject *);
+PyAPI_FUNC(int) PyList_Reverse(PyObject *);
+PyAPI_FUNC(PyObject *) PyList_AsTuple(PyObject *);
+PyAPI_FUNC(PyObject *) _PyList_Extend(PyListObject *, PyObject *);
+
+/* Macro, trading safety for speed */
+#define PyList_GET_ITEM(op, i) (((PyListObject *)(op))->ob_item[i])
+#define PyList_SET_ITEM(op, i, v) (((PyListObject *)(op))->ob_item[i] = (v))
+#define PyList_GET_SIZE(op)    (((PyListObject *)(op))->ob_size)
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_LISTOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/longintrepr.h b/depot_tools/release/win/python_24/include/longintrepr.h
new file mode 100644
index 0000000..254076e
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/longintrepr.h
@@ -0,0 +1,63 @@
+#ifndef Py_LONGINTREPR_H
+#define Py_LONGINTREPR_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* This is published for the benefit of "friend" marshal.c only. */
+
+/* Parameters of the long integer representation.
+   These shouldn't have to be changed as C should guarantee that a short
+   contains at least 16 bits, but it's made changeable anyway.
+   Note: 'digit' should be able to hold 2*MASK+1, and 'twodigits'
+   should be able to hold the intermediate results in 'mul'
+   (at most (BASE-1)*(2*BASE+1) == MASK*(2*MASK+3)).
+   Also, x_sub assumes that 'digit' is an unsigned type, and overflow
+   is handled by taking the result mod 2**N for some N > SHIFT.
+   And, at some places it is assumed that MASK fits in an int, as well.
+   long_pow() requires that SHIFT be divisible by 5. */
+
+typedef unsigned short digit;
+typedef unsigned int wdigit; /* digit widened to parameter size */
+#define BASE_TWODIGITS_TYPE long
+typedef unsigned BASE_TWODIGITS_TYPE twodigits;
+typedef BASE_TWODIGITS_TYPE stwodigits; /* signed variant of twodigits */
+
+#define SHIFT	15
+#define BASE	((digit)1 << SHIFT)
+#define MASK	((int)(BASE - 1))
+
+#if SHIFT % 5 != 0
+#error "longobject.c requires that SHIFT be divisible by 5"
+#endif
+
+/* Long integer representation.
+   The absolute value of a number is equal to
+   	SUM(for i=0 through abs(ob_size)-1) ob_digit[i] * 2**(SHIFT*i)
+   Negative numbers are represented with ob_size < 0;
+   zero is represented by ob_size == 0.
+   In a normalized number, ob_digit[abs(ob_size)-1] (the most significant
+   digit) is never zero.  Also, in all cases, for all valid i,
+   	0 <= ob_digit[i] <= MASK.
+   The allocation function takes care of allocating extra memory
+   so that ob_digit[0] ... ob_digit[abs(ob_size)-1] are actually available.
+
+   CAUTION:  Generic code manipulating subtypes of PyVarObject has to
+   aware that longs abuse  ob_size's sign bit.
+*/
+
+struct _longobject {
+	PyObject_VAR_HEAD
+	digit ob_digit[1];
+};
+
+PyAPI_FUNC(PyLongObject *) _PyLong_New(int);
+
+/* Return a copy of src. */
+PyAPI_FUNC(PyObject *) _PyLong_Copy(PyLongObject *src);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_LONGINTREPR_H */
diff --git a/depot_tools/release/win/python_24/include/longobject.h b/depot_tools/release/win/python_24/include/longobject.h
new file mode 100644
index 0000000..209c082
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/longobject.h
@@ -0,0 +1,108 @@
+#ifndef Py_LONGOBJECT_H
+#define Py_LONGOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Long (arbitrary precision) integer object interface */
+
+typedef struct _longobject PyLongObject; /* Revealed in longintrepr.h */
+
+PyAPI_DATA(PyTypeObject) PyLong_Type;
+
+#define PyLong_Check(op) PyObject_TypeCheck(op, &PyLong_Type)
+#define PyLong_CheckExact(op) ((op)->ob_type == &PyLong_Type)
+
+PyAPI_FUNC(PyObject *) PyLong_FromLong(long);
+PyAPI_FUNC(PyObject *) PyLong_FromUnsignedLong(unsigned long);
+PyAPI_FUNC(PyObject *) PyLong_FromDouble(double);
+PyAPI_FUNC(long) PyLong_AsLong(PyObject *);
+PyAPI_FUNC(unsigned long) PyLong_AsUnsignedLong(PyObject *);
+PyAPI_FUNC(unsigned long) PyLong_AsUnsignedLongMask(PyObject *);
+
+/* _PyLong_AsScaledDouble returns a double x and an exponent e such that
+   the true value is approximately equal to x * 2**(SHIFT*e).  e is >= 0.
+   x is 0.0 if and only if the input is 0 (in which case, e and x are both
+   zeroes).  Overflow is impossible.  Note that the exponent returned must
+   be multiplied by SHIFT!  There may not be enough room in an int to store
+   e*SHIFT directly. */
+PyAPI_FUNC(double) _PyLong_AsScaledDouble(PyObject *vv, int *e);
+
+PyAPI_FUNC(double) PyLong_AsDouble(PyObject *);
+PyAPI_FUNC(PyObject *) PyLong_FromVoidPtr(void *);
+PyAPI_FUNC(void *) PyLong_AsVoidPtr(PyObject *);
+
+#ifdef HAVE_LONG_LONG
+PyAPI_FUNC(PyObject *) PyLong_FromLongLong(PY_LONG_LONG);
+PyAPI_FUNC(PyObject *) PyLong_FromUnsignedLongLong(unsigned PY_LONG_LONG);
+PyAPI_FUNC(PY_LONG_LONG) PyLong_AsLongLong(PyObject *);
+PyAPI_FUNC(unsigned PY_LONG_LONG) PyLong_AsUnsignedLongLong(PyObject *);
+PyAPI_FUNC(unsigned PY_LONG_LONG) PyLong_AsUnsignedLongLongMask(PyObject *);
+#endif /* HAVE_LONG_LONG */
+
+PyAPI_FUNC(PyObject *) PyLong_FromString(char *, char **, int);
+#ifdef Py_USING_UNICODE
+PyAPI_FUNC(PyObject *) PyLong_FromUnicode(Py_UNICODE*, int, int);
+#endif
+
+/* _PyLong_Sign.  Return 0 if v is 0, -1 if v < 0, +1 if v > 0.
+   v must not be NULL, and must be a normalized long.
+   There are no error cases.
+*/
+PyAPI_FUNC(int) _PyLong_Sign(PyObject *v);
+
+
+/* _PyLong_NumBits.  Return the number of bits needed to represent the
+   absolute value of a long.  For example, this returns 1 for 1 and -1, 2
+   for 2 and -2, and 2 for 3 and -3.  It returns 0 for 0.
+   v must not be NULL, and must be a normalized long.
+   (size_t)-1 is returned and OverflowError set if the true result doesn't
+   fit in a size_t.
+*/
+PyAPI_FUNC(size_t) _PyLong_NumBits(PyObject *v);
+
+/* _PyLong_FromByteArray:  View the n unsigned bytes as a binary integer in
+   base 256, and return a Python long with the same numeric value.
+   If n is 0, the integer is 0.  Else:
+   If little_endian is 1/true, bytes[n-1] is the MSB and bytes[0] the LSB;
+   else (little_endian is 0/false) bytes[0] is the MSB and bytes[n-1] the
+   LSB.
+   If is_signed is 0/false, view the bytes as a non-negative integer.
+   If is_signed is 1/true, view the bytes as a 2's-complement integer,
+   non-negative if bit 0x80 of the MSB is clear, negative if set.
+   Error returns:
+   + Return NULL with the appropriate exception set if there's not
+     enough memory to create the Python long.
+*/
+PyAPI_FUNC(PyObject *) _PyLong_FromByteArray(
+	const unsigned char* bytes, size_t n,
+	int little_endian, int is_signed);
+
+/* _PyLong_AsByteArray: Convert the least-significant 8*n bits of long
+   v to a base-256 integer, stored in array bytes.  Normally return 0,
+   return -1 on error.
+   If little_endian is 1/true, store the MSB at bytes[n-1] and the LSB at
+   bytes[0]; else (little_endian is 0/false) store the MSB at bytes[0] and
+   the LSB at bytes[n-1].
+   If is_signed is 0/false, it's an error if v < 0; else (v >= 0) n bytes
+   are filled and there's nothing special about bit 0x80 of the MSB.
+   If is_signed is 1/true, bytes is filled with the 2's-complement
+   representation of v's value.  Bit 0x80 of the MSB is the sign bit.
+   Error returns (-1):
+   + is_signed is 0 and v < 0.  TypeError is set in this case, and bytes
+     isn't altered.
+   + n isn't big enough to hold the full mathematical value of v.  For
+     example, if is_signed is 0 and there are more digits in the v than
+     fit in n; or if is_signed is 1, v < 0, and n is just 1 bit shy of
+     being large enough to hold a sign bit.  OverflowError is set in this
+     case, but bytes holds the least-signficant n bytes of the true value.
+*/
+PyAPI_FUNC(int) _PyLong_AsByteArray(PyLongObject* v,
+	unsigned char* bytes, size_t n,
+	int little_endian, int is_signed);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_LONGOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/marshal.h b/depot_tools/release/win/python_24/include/marshal.h
new file mode 100644
index 0000000..fc491dd
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/marshal.h
@@ -0,0 +1,25 @@
+
+/* Interface for marshal.c */
+
+#ifndef Py_MARSHAL_H
+#define Py_MARSHAL_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define Py_MARSHAL_VERSION 1
+
+PyAPI_FUNC(void) PyMarshal_WriteLongToFile(long, FILE *, int);
+PyAPI_FUNC(void) PyMarshal_WriteObjectToFile(PyObject *, FILE *, int);
+PyAPI_FUNC(PyObject *) PyMarshal_WriteObjectToString(PyObject *, int);
+
+PyAPI_FUNC(long) PyMarshal_ReadLongFromFile(FILE *);
+PyAPI_FUNC(int) PyMarshal_ReadShortFromFile(FILE *);
+PyAPI_FUNC(PyObject *) PyMarshal_ReadObjectFromFile(FILE *);
+PyAPI_FUNC(PyObject *) PyMarshal_ReadLastObjectFromFile(FILE *);
+PyAPI_FUNC(PyObject *) PyMarshal_ReadObjectFromString(char *, int);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_MARSHAL_H */
diff --git a/depot_tools/release/win/python_24/include/metagrammar.h b/depot_tools/release/win/python_24/include/metagrammar.h
new file mode 100644
index 0000000..15c8ef8
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/metagrammar.h
@@ -0,0 +1,18 @@
+#ifndef Py_METAGRAMMAR_H
+#define Py_METAGRAMMAR_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+#define MSTART 256
+#define RULE 257
+#define RHS 258
+#define ALT 259
+#define ITEM 260
+#define ATOM 261
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_METAGRAMMAR_H */
diff --git a/depot_tools/release/win/python_24/include/methodobject.h b/depot_tools/release/win/python_24/include/methodobject.h
new file mode 100644
index 0000000..9736dc3
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/methodobject.h
@@ -0,0 +1,91 @@
+
+/* Method object interface */
+
+#ifndef Py_METHODOBJECT_H
+#define Py_METHODOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* This is about the type 'builtin_function_or_method',
+   not Python methods in user-defined classes.  See classobject.h
+   for the latter. */
+
+PyAPI_DATA(PyTypeObject) PyCFunction_Type;
+
+#define PyCFunction_Check(op) ((op)->ob_type == &PyCFunction_Type)
+
+typedef PyObject *(*PyCFunction)(PyObject *, PyObject *);
+typedef PyObject *(*PyCFunctionWithKeywords)(PyObject *, PyObject *,
+					     PyObject *);
+typedef PyObject *(*PyNoArgsFunction)(PyObject *);
+
+PyAPI_FUNC(PyCFunction) PyCFunction_GetFunction(PyObject *);
+PyAPI_FUNC(PyObject *) PyCFunction_GetSelf(PyObject *);
+PyAPI_FUNC(int) PyCFunction_GetFlags(PyObject *);
+
+/* Macros for direct access to these values. Type checks are *not*
+   done, so use with care. */
+#define PyCFunction_GET_FUNCTION(func) \
+        (((PyCFunctionObject *)func) -> m_ml -> ml_meth)
+#define PyCFunction_GET_SELF(func) \
+	(((PyCFunctionObject *)func) -> m_self)
+#define PyCFunction_GET_FLAGS(func) \
+	(((PyCFunctionObject *)func) -> m_ml -> ml_flags)
+PyAPI_FUNC(PyObject *) PyCFunction_Call(PyObject *, PyObject *, PyObject *);
+
+struct PyMethodDef {
+    char	*ml_name;	/* The name of the built-in function/method */
+    PyCFunction  ml_meth;	/* The C function that implements it */
+    int		 ml_flags;	/* Combination of METH_xxx flags, which mostly
+				   describe the args expected by the C func */
+    char	*ml_doc;	/* The __doc__ attribute, or NULL */
+};
+typedef struct PyMethodDef PyMethodDef;
+
+PyAPI_FUNC(PyObject *) Py_FindMethod(PyMethodDef[], PyObject *, char *);
+
+#define PyCFunction_New(ML, SELF) PyCFunction_NewEx((ML), (SELF), NULL)
+PyAPI_FUNC(PyObject *) PyCFunction_NewEx(PyMethodDef *, PyObject *, 
+					 PyObject *);
+
+/* Flag passed to newmethodobject */
+#define METH_OLDARGS  0x0000
+#define METH_VARARGS  0x0001
+#define METH_KEYWORDS 0x0002
+/* METH_NOARGS and METH_O must not be combined with the flags above. */
+#define METH_NOARGS   0x0004
+#define METH_O        0x0008
+
+/* METH_CLASS and METH_STATIC are a little different; these control
+   the construction of methods for a class.  These cannot be used for
+   functions in modules. */
+#define METH_CLASS    0x0010
+#define METH_STATIC   0x0020
+
+/* METH_COEXIST allows a method to be entered eventhough a slot has
+   already filled the entry.  When defined, the flag allows a separate
+   method, "__contains__" for example, to coexist with a defined 
+   slot like sq_contains. */
+
+#define METH_COEXIST   0x0040
+
+typedef struct PyMethodChain {
+    PyMethodDef *methods;		/* Methods of this type */
+    struct PyMethodChain *link;	/* NULL or base type */
+} PyMethodChain;
+
+PyAPI_FUNC(PyObject *) Py_FindMethodInChain(PyMethodChain *, PyObject *,
+                                                  char *);
+
+typedef struct {
+    PyObject_HEAD
+    PyMethodDef *m_ml; /* Description of the C function to call */
+    PyObject    *m_self; /* Passed as 'self' arg to the C func, can be NULL */
+    PyObject    *m_module; /* The __module__ attribute, can be anything */
+} PyCFunctionObject;
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_METHODOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/modsupport.h b/depot_tools/release/win/python_24/include/modsupport.h
new file mode 100644
index 0000000..2d67f3e
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/modsupport.h
@@ -0,0 +1,103 @@
+
+#ifndef Py_MODSUPPORT_H
+#define Py_MODSUPPORT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* Module support interface */
+
+#include <stdarg.h>
+
+PyAPI_FUNC(int) PyArg_Parse(PyObject *, char *, ...);
+PyAPI_FUNC(int) PyArg_ParseTuple(PyObject *, char *, ...);
+PyAPI_FUNC(int) PyArg_ParseTupleAndKeywords(PyObject *, PyObject *,
+                                                  char *, char **, ...);
+PyAPI_FUNC(int) PyArg_UnpackTuple(PyObject *, char *, int, int, ...);
+PyAPI_FUNC(PyObject *) Py_BuildValue(char *, ...);
+
+PyAPI_FUNC(int) PyArg_VaParse(PyObject *, char *, va_list);
+PyAPI_FUNC(int) PyArg_VaParseTupleAndKeywords(PyObject *, PyObject *,
+                                                  char *, char **, va_list);
+PyAPI_FUNC(PyObject *) Py_VaBuildValue(char *, va_list);
+
+PyAPI_FUNC(int) PyModule_AddObject(PyObject *, char *, PyObject *);
+PyAPI_FUNC(int) PyModule_AddIntConstant(PyObject *, char *, long);
+PyAPI_FUNC(int) PyModule_AddStringConstant(PyObject *, char *, char *);
+
+#define PYTHON_API_VERSION 1012
+#define PYTHON_API_STRING "1012"
+/* The API version is maintained (independently from the Python version)
+   so we can detect mismatches between the interpreter and dynamically
+   loaded modules.  These are diagnosed by an error message but
+   the module is still loaded (because the mismatch can only be tested
+   after loading the module).  The error message is intended to
+   explain the core dump a few seconds later.
+
+   The symbol PYTHON_API_STRING defines the same value as a string
+   literal.  *** PLEASE MAKE SURE THE DEFINITIONS MATCH. ***
+
+   Please add a line or two to the top of this log for each API
+   version change:
+
+   19-Aug-2002  GvR	1012	Changes to string object struct for
+   				interning changes, saving 3 bytes.
+
+   17-Jul-2001	GvR	1011	Descr-branch, just to be on the safe side
+
+   25-Jan-2001  FLD     1010    Parameters added to PyCode_New() and
+                                PyFrame_New(); Python 2.1a2
+
+   14-Mar-2000  GvR     1009    Unicode API added
+
+   3-Jan-1999	GvR	1007	Decided to change back!  (Don't reuse 1008!)
+
+   3-Dec-1998	GvR	1008	Python 1.5.2b1
+
+   18-Jan-1997	GvR	1007	string interning and other speedups
+
+   11-Oct-1996	GvR	renamed Py_Ellipses to Py_Ellipsis :-(
+
+   30-Jul-1996	GvR	Slice and ellipses syntax added
+
+   23-Jul-1996	GvR	For 1.4 -- better safe than sorry this time :-)
+
+   7-Nov-1995	GvR	Keyword arguments (should've been done at 1.3 :-( )
+
+   10-Jan-1995	GvR	Renamed globals to new naming scheme
+
+   9-Jan-1995	GvR	Initial version (incompatible with older API)
+*/
+
+#ifdef MS_WINDOWS
+/* Special defines for Windows versions used to live here.  Things
+   have changed, and the "Version" is now in a global string variable.
+   Reason for this is that this for easier branding of a "custom DLL"
+   without actually needing a recompile.  */
+#endif /* MS_WINDOWS */
+
+#ifdef Py_TRACE_REFS
+/* When we are tracing reference counts, rename Py_InitModule4 so
+   modules compiled with incompatible settings will generate a
+   link-time error. */
+#define Py_InitModule4 Py_InitModule4TraceRefs
+#endif
+
+PyAPI_FUNC(PyObject *) Py_InitModule4(char *name, PyMethodDef *methods,
+                                            char *doc, PyObject *self,
+                                            int apiver);
+
+#define Py_InitModule(name, methods) \
+	Py_InitModule4(name, methods, (char *)NULL, (PyObject *)NULL, \
+		       PYTHON_API_VERSION)
+
+#define Py_InitModule3(name, methods, doc) \
+	Py_InitModule4(name, methods, doc, (PyObject *)NULL, \
+		       PYTHON_API_VERSION)
+
+PyAPI_DATA(char *) _Py_PackageContext;
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_MODSUPPORT_H */
diff --git a/depot_tools/release/win/python_24/include/moduleobject.h b/depot_tools/release/win/python_24/include/moduleobject.h
new file mode 100644
index 0000000..0f67da3c
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/moduleobject.h
@@ -0,0 +1,24 @@
+
+/* Module object interface */
+
+#ifndef Py_MODULEOBJECT_H
+#define Py_MODULEOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_DATA(PyTypeObject) PyModule_Type;
+
+#define PyModule_Check(op) PyObject_TypeCheck(op, &PyModule_Type)
+#define PyModule_CheckExact(op) ((op)->ob_type == &PyModule_Type)
+
+PyAPI_FUNC(PyObject *) PyModule_New(char *);
+PyAPI_FUNC(PyObject *) PyModule_GetDict(PyObject *);
+PyAPI_FUNC(char *) PyModule_GetName(PyObject *);
+PyAPI_FUNC(char *) PyModule_GetFilename(PyObject *);
+PyAPI_FUNC(void) _PyModule_Clear(PyObject *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_MODULEOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/node.h b/depot_tools/release/win/python_24/include/node.h
new file mode 100644
index 0000000..4c64708
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/node.h
@@ -0,0 +1,39 @@
+
+/* Parse tree node interface */
+
+#ifndef Py_NODE_H
+#define Py_NODE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct _node {
+    short		n_type;
+    char		*n_str;
+    int			n_lineno;
+    int			n_nchildren;
+    struct _node	*n_child;
+} node;
+
+PyAPI_FUNC(node *) PyNode_New(int type);
+PyAPI_FUNC(int) PyNode_AddChild(node *n, int type,
+                                      char *str, int lineno);
+PyAPI_FUNC(void) PyNode_Free(node *n);
+
+/* Node access functions */
+#define NCH(n)		((n)->n_nchildren)
+	
+#define CHILD(n, i)	(&(n)->n_child[i])
+#define RCHILD(n, i)	(CHILD(n, NCH(n) + i))
+#define TYPE(n)		((n)->n_type)
+#define STR(n)		((n)->n_str)
+
+/* Assert that the type of a node is what we expect */
+#define REQ(n, type) assert(TYPE(n) == (type))
+
+PyAPI_FUNC(void) PyNode_ListTree(node *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_NODE_H */
diff --git a/depot_tools/release/win/python_24/include/object.h b/depot_tools/release/win/python_24/include/object.h
new file mode 100644
index 0000000..fd7c235
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/object.h
@@ -0,0 +1,807 @@
+#ifndef Py_OBJECT_H
+#define Py_OBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Object and type object interface */
+
+/*
+Objects are structures allocated on the heap.  Special rules apply to
+the use of objects to ensure they are properly garbage-collected.
+Objects are never allocated statically or on the stack; they must be
+accessed through special macros and functions only.  (Type objects are
+exceptions to the first rule; the standard types are represented by
+statically initialized type objects, although work on type/class unification
+for Python 2.2 made it possible to have heap-allocated type objects too).
+
+An object has a 'reference count' that is increased or decreased when a
+pointer to the object is copied or deleted; when the reference count
+reaches zero there are no references to the object left and it can be
+removed from the heap.
+
+An object has a 'type' that determines what it represents and what kind
+of data it contains.  An object's type is fixed when it is created.
+Types themselves are represented as objects; an object contains a
+pointer to the corresponding type object.  The type itself has a type
+pointer pointing to the object representing the type 'type', which
+contains a pointer to itself!).
+
+Objects do not float around in memory; once allocated an object keeps
+the same size and address.  Objects that must hold variable-size data
+can contain pointers to variable-size parts of the object.  Not all
+objects of the same type have the same size; but the size cannot change
+after allocation.  (These restrictions are made so a reference to an
+object can be simply a pointer -- moving an object would require
+updating all the pointers, and changing an object's size would require
+moving it if there was another object right next to it.)
+
+Objects are always accessed through pointers of the type 'PyObject *'.
+The type 'PyObject' is a structure that only contains the reference count
+and the type pointer.  The actual memory allocated for an object
+contains other data that can only be accessed after casting the pointer
+to a pointer to a longer structure type.  This longer type must start
+with the reference count and type fields; the macro PyObject_HEAD should be
+used for this (to accommodate for future changes).  The implementation
+of a particular object type can cast the object pointer to the proper
+type and back.
+
+A standard interface exists for objects that contain an array of items
+whose size is determined when the object is allocated.
+*/
+
+/* Py_DEBUG implies Py_TRACE_REFS. */
+#if defined(Py_DEBUG) && !defined(Py_TRACE_REFS)
+#define Py_TRACE_REFS
+#endif
+
+/* Py_TRACE_REFS implies Py_REF_DEBUG. */
+#if defined(Py_TRACE_REFS) && !defined(Py_REF_DEBUG)
+#define Py_REF_DEBUG
+#endif
+
+#ifdef Py_TRACE_REFS
+/* Define pointers to support a doubly-linked list of all live heap objects. */
+#define _PyObject_HEAD_EXTRA		\
+	struct _object *_ob_next;	\
+	struct _object *_ob_prev;
+
+#define _PyObject_EXTRA_INIT 0, 0,
+
+#else
+#define _PyObject_HEAD_EXTRA
+#define _PyObject_EXTRA_INIT
+#endif
+
+/* PyObject_HEAD defines the initial segment of every PyObject. */
+#define PyObject_HEAD			\
+	_PyObject_HEAD_EXTRA		\
+	int ob_refcnt;			\
+	struct _typeobject *ob_type;
+
+#define PyObject_HEAD_INIT(type)	\
+	_PyObject_EXTRA_INIT		\
+	1, type,
+
+/* PyObject_VAR_HEAD defines the initial segment of all variable-size
+ * container objects.  These end with a declaration of an array with 1
+ * element, but enough space is malloc'ed so that the array actually
+ * has room for ob_size elements.  Note that ob_size is an element count,
+ * not necessarily a byte count.
+ */
+#define PyObject_VAR_HEAD		\
+	PyObject_HEAD			\
+	int ob_size; /* Number of items in variable part */
+
+/* Nothing is actually declared to be a PyObject, but every pointer to
+ * a Python object can be cast to a PyObject*.  This is inheritance built
+ * by hand.  Similarly every pointer to a variable-size Python object can,
+ * in addition, be cast to PyVarObject*.
+ */
+typedef struct _object {
+	PyObject_HEAD
+} PyObject;
+
+typedef struct {
+	PyObject_VAR_HEAD
+} PyVarObject;
+
+
+/*
+Type objects contain a string containing the type name (to help somewhat
+in debugging), the allocation parameters (see PyObject_New() and
+PyObject_NewVar()),
+and methods for accessing objects of the type.  Methods are optional, a
+nil pointer meaning that particular kind of access is not available for
+this type.  The Py_DECREF() macro uses the tp_dealloc method without
+checking for a nil pointer; it should always be implemented except if
+the implementation can guarantee that the reference count will never
+reach zero (e.g., for statically allocated type objects).
+
+NB: the methods for certain type groups are now contained in separate
+method blocks.
+*/
+
+typedef PyObject * (*unaryfunc)(PyObject *);
+typedef PyObject * (*binaryfunc)(PyObject *, PyObject *);
+typedef PyObject * (*ternaryfunc)(PyObject *, PyObject *, PyObject *);
+typedef int (*inquiry)(PyObject *);
+typedef int (*coercion)(PyObject **, PyObject **);
+typedef PyObject *(*intargfunc)(PyObject *, int);
+typedef PyObject *(*intintargfunc)(PyObject *, int, int);
+typedef int(*intobjargproc)(PyObject *, int, PyObject *);
+typedef int(*intintobjargproc)(PyObject *, int, int, PyObject *);
+typedef int(*objobjargproc)(PyObject *, PyObject *, PyObject *);
+typedef int (*getreadbufferproc)(PyObject *, int, void **);
+typedef int (*getwritebufferproc)(PyObject *, int, void **);
+typedef int (*getsegcountproc)(PyObject *, int *);
+typedef int (*getcharbufferproc)(PyObject *, int, const char **);
+typedef int (*objobjproc)(PyObject *, PyObject *);
+typedef int (*visitproc)(PyObject *, void *);
+typedef int (*traverseproc)(PyObject *, visitproc, void *);
+
+typedef struct {
+	/* For numbers without flag bit Py_TPFLAGS_CHECKTYPES set, all
+	   arguments are guaranteed to be of the object's type (modulo
+	   coercion hacks -- i.e. if the type's coercion function
+	   returns other types, then these are allowed as well).  Numbers that
+	   have the Py_TPFLAGS_CHECKTYPES flag bit set should check *both*
+	   arguments for proper type and implement the necessary conversions
+	   in the slot functions themselves. */
+
+	binaryfunc nb_add;
+	binaryfunc nb_subtract;
+	binaryfunc nb_multiply;
+	binaryfunc nb_divide;
+	binaryfunc nb_remainder;
+	binaryfunc nb_divmod;
+	ternaryfunc nb_power;
+	unaryfunc nb_negative;
+	unaryfunc nb_positive;
+	unaryfunc nb_absolute;
+	inquiry nb_nonzero;
+	unaryfunc nb_invert;
+	binaryfunc nb_lshift;
+	binaryfunc nb_rshift;
+	binaryfunc nb_and;
+	binaryfunc nb_xor;
+	binaryfunc nb_or;
+	coercion nb_coerce;
+	unaryfunc nb_int;
+	unaryfunc nb_long;
+	unaryfunc nb_float;
+	unaryfunc nb_oct;
+	unaryfunc nb_hex;
+	/* Added in release 2.0 */
+	binaryfunc nb_inplace_add;
+	binaryfunc nb_inplace_subtract;
+	binaryfunc nb_inplace_multiply;
+	binaryfunc nb_inplace_divide;
+	binaryfunc nb_inplace_remainder;
+	ternaryfunc nb_inplace_power;
+	binaryfunc nb_inplace_lshift;
+	binaryfunc nb_inplace_rshift;
+	binaryfunc nb_inplace_and;
+	binaryfunc nb_inplace_xor;
+	binaryfunc nb_inplace_or;
+
+	/* Added in release 2.2 */
+	/* The following require the Py_TPFLAGS_HAVE_CLASS flag */
+	binaryfunc nb_floor_divide;
+	binaryfunc nb_true_divide;
+	binaryfunc nb_inplace_floor_divide;
+	binaryfunc nb_inplace_true_divide;
+} PyNumberMethods;
+
+typedef struct {
+	inquiry sq_length;
+	binaryfunc sq_concat;
+	intargfunc sq_repeat;
+	intargfunc sq_item;
+	intintargfunc sq_slice;
+	intobjargproc sq_ass_item;
+	intintobjargproc sq_ass_slice;
+	objobjproc sq_contains;
+	/* Added in release 2.0 */
+	binaryfunc sq_inplace_concat;
+	intargfunc sq_inplace_repeat;
+} PySequenceMethods;
+
+typedef struct {
+	inquiry mp_length;
+	binaryfunc mp_subscript;
+	objobjargproc mp_ass_subscript;
+} PyMappingMethods;
+
+typedef struct {
+	getreadbufferproc bf_getreadbuffer;
+	getwritebufferproc bf_getwritebuffer;
+	getsegcountproc bf_getsegcount;
+	getcharbufferproc bf_getcharbuffer;
+} PyBufferProcs;
+
+
+typedef void (*freefunc)(void *);
+typedef void (*destructor)(PyObject *);
+typedef int (*printfunc)(PyObject *, FILE *, int);
+typedef PyObject *(*getattrfunc)(PyObject *, char *);
+typedef PyObject *(*getattrofunc)(PyObject *, PyObject *);
+typedef int (*setattrfunc)(PyObject *, char *, PyObject *);
+typedef int (*setattrofunc)(PyObject *, PyObject *, PyObject *);
+typedef int (*cmpfunc)(PyObject *, PyObject *);
+typedef PyObject *(*reprfunc)(PyObject *);
+typedef long (*hashfunc)(PyObject *);
+typedef PyObject *(*richcmpfunc) (PyObject *, PyObject *, int);
+typedef PyObject *(*getiterfunc) (PyObject *);
+typedef PyObject *(*iternextfunc) (PyObject *);
+typedef PyObject *(*descrgetfunc) (PyObject *, PyObject *, PyObject *);
+typedef int (*descrsetfunc) (PyObject *, PyObject *, PyObject *);
+typedef int (*initproc)(PyObject *, PyObject *, PyObject *);
+typedef PyObject *(*newfunc)(struct _typeobject *, PyObject *, PyObject *);
+typedef PyObject *(*allocfunc)(struct _typeobject *, int);
+
+typedef struct _typeobject {
+	PyObject_VAR_HEAD
+	char *tp_name; /* For printing, in format "<module>.<name>" */
+	int tp_basicsize, tp_itemsize; /* For allocation */
+
+	/* Methods to implement standard operations */
+
+	destructor tp_dealloc;
+	printfunc tp_print;
+	getattrfunc tp_getattr;
+	setattrfunc tp_setattr;
+	cmpfunc tp_compare;
+	reprfunc tp_repr;
+
+	/* Method suites for standard classes */
+
+	PyNumberMethods *tp_as_number;
+	PySequenceMethods *tp_as_sequence;
+	PyMappingMethods *tp_as_mapping;
+
+	/* More standard operations (here for binary compatibility) */
+
+	hashfunc tp_hash;
+	ternaryfunc tp_call;
+	reprfunc tp_str;
+	getattrofunc tp_getattro;
+	setattrofunc tp_setattro;
+
+	/* Functions to access object as input/output buffer */
+	PyBufferProcs *tp_as_buffer;
+
+	/* Flags to define presence of optional/expanded features */
+	long tp_flags;
+
+	char *tp_doc; /* Documentation string */
+
+	/* Assigned meaning in release 2.0 */
+	/* call function for all accessible objects */
+	traverseproc tp_traverse;
+
+	/* delete references to contained objects */
+	inquiry tp_clear;
+
+	/* Assigned meaning in release 2.1 */
+	/* rich comparisons */
+	richcmpfunc tp_richcompare;
+
+	/* weak reference enabler */
+	long tp_weaklistoffset;
+
+	/* Added in release 2.2 */
+	/* Iterators */
+	getiterfunc tp_iter;
+	iternextfunc tp_iternext;
+
+	/* Attribute descriptor and subclassing stuff */
+	struct PyMethodDef *tp_methods;
+	struct PyMemberDef *tp_members;
+	struct PyGetSetDef *tp_getset;
+	struct _typeobject *tp_base;
+	PyObject *tp_dict;
+	descrgetfunc tp_descr_get;
+	descrsetfunc tp_descr_set;
+	long tp_dictoffset;
+	initproc tp_init;
+	allocfunc tp_alloc;
+	newfunc tp_new;
+	freefunc tp_free; /* Low-level free-memory routine */
+	inquiry tp_is_gc; /* For PyObject_IS_GC */
+	PyObject *tp_bases;
+	PyObject *tp_mro; /* method resolution order */
+	PyObject *tp_cache;
+	PyObject *tp_subclasses;
+	PyObject *tp_weaklist;
+	destructor tp_del;
+
+#ifdef COUNT_ALLOCS
+	/* these must be last and never explicitly initialized */
+	int tp_allocs;
+	int tp_frees;
+	int tp_maxalloc;
+	struct _typeobject *tp_next;
+#endif
+} PyTypeObject;
+
+
+/* The *real* layout of a type object when allocated on the heap */
+typedef struct _heaptypeobject {
+	/* Note: there's a dependency on the order of these members
+	   in slotptr() in typeobject.c . */
+	PyTypeObject type;
+	PyNumberMethods as_number;
+	PyMappingMethods as_mapping;
+	PySequenceMethods as_sequence; /* as_sequence comes after as_mapping,
+					  so that the mapping wins when both
+					  the mapping and the sequence define
+					  a given operator (e.g. __getitem__).
+					  see add_operators() in typeobject.c . */
+	PyBufferProcs as_buffer;
+	PyObject *name, *slots;
+	/* here are optional user slots, followed by the members. */
+} PyHeapTypeObject;
+
+/* access macro to the members which are floating "behind" the object */
+#define PyHeapType_GET_MEMBERS(etype) \
+    ((PyMemberDef *)(((char *)etype) + (etype)->type.ob_type->tp_basicsize))
+
+
+/* Generic type check */
+PyAPI_FUNC(int) PyType_IsSubtype(PyTypeObject *, PyTypeObject *);
+#define PyObject_TypeCheck(ob, tp) \
+	((ob)->ob_type == (tp) || PyType_IsSubtype((ob)->ob_type, (tp)))
+
+PyAPI_DATA(PyTypeObject) PyType_Type; /* built-in 'type' */
+PyAPI_DATA(PyTypeObject) PyBaseObject_Type; /* built-in 'object' */
+PyAPI_DATA(PyTypeObject) PySuper_Type; /* built-in 'super' */
+
+#define PyType_Check(op) PyObject_TypeCheck(op, &PyType_Type)
+#define PyType_CheckExact(op) ((op)->ob_type == &PyType_Type)
+
+PyAPI_FUNC(int) PyType_Ready(PyTypeObject *);
+PyAPI_FUNC(PyObject *) PyType_GenericAlloc(PyTypeObject *, int);
+PyAPI_FUNC(PyObject *) PyType_GenericNew(PyTypeObject *,
+					       PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) _PyType_Lookup(PyTypeObject *, PyObject *);
+
+/* Generic operations on objects */
+PyAPI_FUNC(int) PyObject_Print(PyObject *, FILE *, int);
+PyAPI_FUNC(void) _PyObject_Dump(PyObject *);
+PyAPI_FUNC(PyObject *) PyObject_Repr(PyObject *);
+PyAPI_FUNC(PyObject *) PyObject_Str(PyObject *);
+#ifdef Py_USING_UNICODE
+PyAPI_FUNC(PyObject *) PyObject_Unicode(PyObject *);
+#endif
+PyAPI_FUNC(int) PyObject_Compare(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyObject_RichCompare(PyObject *, PyObject *, int);
+PyAPI_FUNC(int) PyObject_RichCompareBool(PyObject *, PyObject *, int);
+PyAPI_FUNC(PyObject *) PyObject_GetAttrString(PyObject *, char *);
+PyAPI_FUNC(int) PyObject_SetAttrString(PyObject *, char *, PyObject *);
+PyAPI_FUNC(int) PyObject_HasAttrString(PyObject *, char *);
+PyAPI_FUNC(PyObject *) PyObject_GetAttr(PyObject *, PyObject *);
+PyAPI_FUNC(int) PyObject_SetAttr(PyObject *, PyObject *, PyObject *);
+PyAPI_FUNC(int) PyObject_HasAttr(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject **) _PyObject_GetDictPtr(PyObject *);
+PyAPI_FUNC(PyObject *) PyObject_SelfIter(PyObject *);
+PyAPI_FUNC(PyObject *) PyObject_GenericGetAttr(PyObject *, PyObject *);
+PyAPI_FUNC(int) PyObject_GenericSetAttr(PyObject *,
+					      PyObject *, PyObject *);
+PyAPI_FUNC(long) PyObject_Hash(PyObject *);
+PyAPI_FUNC(int) PyObject_IsTrue(PyObject *);
+PyAPI_FUNC(int) PyObject_Not(PyObject *);
+PyAPI_FUNC(int) PyCallable_Check(PyObject *);
+PyAPI_FUNC(int) PyNumber_Coerce(PyObject **, PyObject **);
+PyAPI_FUNC(int) PyNumber_CoerceEx(PyObject **, PyObject **);
+
+PyAPI_FUNC(void) PyObject_ClearWeakRefs(PyObject *);
+
+/* A slot function whose address we need to compare */
+extern int _PyObject_SlotCompare(PyObject *, PyObject *);
+
+
+/* PyObject_Dir(obj) acts like Python __builtin__.dir(obj), returning a
+   list of strings.  PyObject_Dir(NULL) is like __builtin__.dir(),
+   returning the names of the current locals.  In this case, if there are
+   no current locals, NULL is returned, and PyErr_Occurred() is false.
+*/
+PyAPI_FUNC(PyObject *) PyObject_Dir(PyObject *);
+
+
+/* Helpers for printing recursive container types */
+PyAPI_FUNC(int) Py_ReprEnter(PyObject *);
+PyAPI_FUNC(void) Py_ReprLeave(PyObject *);
+
+/* Helpers for hash functions */
+PyAPI_FUNC(long) _Py_HashDouble(double);
+PyAPI_FUNC(long) _Py_HashPointer(void*);
+
+/* Helper for passing objects to printf and the like */
+#define PyObject_REPR(obj) PyString_AS_STRING(PyObject_Repr(obj))
+
+/* Flag bits for printing: */
+#define Py_PRINT_RAW	1	/* No string quotes etc. */
+
+/*
+`Type flags (tp_flags)
+
+These flags are used to extend the type structure in a backwards-compatible
+fashion. Extensions can use the flags to indicate (and test) when a given
+type structure contains a new feature. The Python core will use these when
+introducing new functionality between major revisions (to avoid mid-version
+changes in the PYTHON_API_VERSION).
+
+Arbitration of the flag bit positions will need to be coordinated among
+all extension writers who publically release their extensions (this will
+be fewer than you might expect!)..
+
+Python 1.5.2 introduced the bf_getcharbuffer slot into PyBufferProcs.
+
+Type definitions should use Py_TPFLAGS_DEFAULT for their tp_flags value.
+
+Code can use PyType_HasFeature(type_ob, flag_value) to test whether the
+given type object has a specified feature.
+*/
+
+/* PyBufferProcs contains bf_getcharbuffer */
+#define Py_TPFLAGS_HAVE_GETCHARBUFFER  (1L<<0)
+
+/* PySequenceMethods contains sq_contains */
+#define Py_TPFLAGS_HAVE_SEQUENCE_IN (1L<<1)
+
+/* This is here for backwards compatibility.  Extensions that use the old GC
+ * API will still compile but the objects will not be tracked by the GC. */
+#define Py_TPFLAGS_GC 0 /* used to be (1L<<2) */
+
+/* PySequenceMethods and PyNumberMethods contain in-place operators */
+#define Py_TPFLAGS_HAVE_INPLACEOPS (1L<<3)
+
+/* PyNumberMethods do their own coercion */
+#define Py_TPFLAGS_CHECKTYPES (1L<<4)
+
+/* tp_richcompare is defined */
+#define Py_TPFLAGS_HAVE_RICHCOMPARE (1L<<5)
+
+/* Objects which are weakly referencable if their tp_weaklistoffset is >0 */
+#define Py_TPFLAGS_HAVE_WEAKREFS (1L<<6)
+
+/* tp_iter is defined */
+#define Py_TPFLAGS_HAVE_ITER (1L<<7)
+
+/* New members introduced by Python 2.2 exist */
+#define Py_TPFLAGS_HAVE_CLASS (1L<<8)
+
+/* Set if the type object is dynamically allocated */
+#define Py_TPFLAGS_HEAPTYPE (1L<<9)
+
+/* Set if the type allows subclassing */
+#define Py_TPFLAGS_BASETYPE (1L<<10)
+
+/* Set if the type is 'ready' -- fully initialized */
+#define Py_TPFLAGS_READY (1L<<12)
+
+/* Set while the type is being 'readied', to prevent recursive ready calls */
+#define Py_TPFLAGS_READYING (1L<<13)
+
+/* Objects support garbage collection (see objimp.h) */
+#define Py_TPFLAGS_HAVE_GC (1L<<14)
+
+/* These two bits are preserved for Stackless Python, next after this is 16 */
+#ifdef STACKLESS
+#define Py_TPFLAGS_HAVE_STACKLESS_EXTENSION (3L<<15)
+#else
+#define Py_TPFLAGS_HAVE_STACKLESS_EXTENSION 0
+#endif
+
+#define Py_TPFLAGS_DEFAULT  ( \
+                             Py_TPFLAGS_HAVE_GETCHARBUFFER | \
+                             Py_TPFLAGS_HAVE_SEQUENCE_IN | \
+                             Py_TPFLAGS_HAVE_INPLACEOPS | \
+                             Py_TPFLAGS_HAVE_RICHCOMPARE | \
+                             Py_TPFLAGS_HAVE_WEAKREFS | \
+                             Py_TPFLAGS_HAVE_ITER | \
+                             Py_TPFLAGS_HAVE_CLASS | \
+                             Py_TPFLAGS_HAVE_STACKLESS_EXTENSION | \
+                            0)
+
+#define PyType_HasFeature(t,f)  (((t)->tp_flags & (f)) != 0)
+
+
+/*
+The macros Py_INCREF(op) and Py_DECREF(op) are used to increment or decrement
+reference counts.  Py_DECREF calls the object's deallocator function when
+the refcount falls to 0; for
+objects that don't contain references to other objects or heap memory
+this can be the standard function free().  Both macros can be used
+wherever a void expression is allowed.  The argument must not be a
+NIL pointer.  If it may be NIL, use Py_XINCREF/Py_XDECREF instead.
+The macro _Py_NewReference(op) initialize reference counts to 1, and
+in special builds (Py_REF_DEBUG, Py_TRACE_REFS) performs additional
+bookkeeping appropriate to the special build.
+
+We assume that the reference count field can never overflow; this can
+be proven when the size of the field is the same as the pointer size, so
+we ignore the possibility.  Provided a C int is at least 32 bits (which
+is implicitly assumed in many parts of this code), that's enough for
+about 2**31 references to an object.
+
+XXX The following became out of date in Python 2.2, but I'm not sure
+XXX what the full truth is now.  Certainly, heap-allocated type objects
+XXX can and should be deallocated.
+Type objects should never be deallocated; the type pointer in an object
+is not considered to be a reference to the type object, to save
+complications in the deallocation function.  (This is actually a
+decision that's up to the implementer of each new type so if you want,
+you can count such references to the type object.)
+
+*** WARNING*** The Py_DECREF macro must have a side-effect-free argument
+since it may evaluate its argument multiple times.  (The alternative
+would be to mace it a proper function or assign it to a global temporary
+variable first, both of which are slower; and in a multi-threaded
+environment the global variable trick is not safe.)
+*/
+
+/* First define a pile of simple helper macros, one set per special
+ * build symbol.  These either expand to the obvious things, or to
+ * nothing at all when the special mode isn't in effect.  The main
+ * macros can later be defined just once then, yet expand to different
+ * things depending on which special build options are and aren't in effect.
+ * Trust me <wink>:  while painful, this is 20x easier to understand than,
+ * e.g, defining _Py_NewReference five different times in a maze of nested
+ * #ifdefs (we used to do that -- it was impenetrable).
+ */
+#ifdef Py_REF_DEBUG
+PyAPI_DATA(long) _Py_RefTotal;
+PyAPI_FUNC(void) _Py_NegativeRefcount(const char *fname,
+					    int lineno, PyObject *op);
+#define _Py_INC_REFTOTAL	_Py_RefTotal++
+#define _Py_DEC_REFTOTAL	_Py_RefTotal--
+#define _Py_REF_DEBUG_COMMA	,
+#define _Py_CHECK_REFCNT(OP)					\
+{	if ((OP)->ob_refcnt < 0)				\
+		_Py_NegativeRefcount(__FILE__, __LINE__,	\
+				     (PyObject *)(OP));		\
+}
+#else
+#define _Py_INC_REFTOTAL
+#define _Py_DEC_REFTOTAL
+#define _Py_REF_DEBUG_COMMA
+#define _Py_CHECK_REFCNT(OP)	/* a semicolon */;
+#endif /* Py_REF_DEBUG */
+
+#ifdef COUNT_ALLOCS
+PyAPI_FUNC(void) inc_count(PyTypeObject *);
+#define _Py_INC_TPALLOCS(OP)	inc_count((OP)->ob_type)
+#define _Py_INC_TPFREES(OP)	(OP)->ob_type->tp_frees++
+#define _Py_DEC_TPFREES(OP)	(OP)->ob_type->tp_frees--
+#define _Py_COUNT_ALLOCS_COMMA	,
+#else
+#define _Py_INC_TPALLOCS(OP)
+#define _Py_INC_TPFREES(OP)
+#define _Py_DEC_TPFREES(OP)
+#define _Py_COUNT_ALLOCS_COMMA
+#endif /* COUNT_ALLOCS */
+
+#ifdef Py_TRACE_REFS
+/* Py_TRACE_REFS is such major surgery that we call external routines. */
+PyAPI_FUNC(void) _Py_NewReference(PyObject *);
+PyAPI_FUNC(void) _Py_ForgetReference(PyObject *);
+PyAPI_FUNC(void) _Py_Dealloc(PyObject *);
+PyAPI_FUNC(void) _Py_PrintReferences(FILE *);
+PyAPI_FUNC(void) _Py_PrintReferenceAddresses(FILE *);
+PyAPI_FUNC(void) _Py_AddToAllObjects(PyObject *, int force);
+
+#else
+/* Without Py_TRACE_REFS, there's little enough to do that we expand code
+ * inline.
+ */
+#define _Py_NewReference(op) (				\
+	_Py_INC_TPALLOCS(op) _Py_COUNT_ALLOCS_COMMA	\
+	_Py_INC_REFTOTAL  _Py_REF_DEBUG_COMMA		\
+	(op)->ob_refcnt = 1)
+
+#define _Py_ForgetReference(op) _Py_INC_TPFREES(op)
+
+#define _Py_Dealloc(op) (				\
+	_Py_INC_TPFREES(op) _Py_COUNT_ALLOCS_COMMA	\
+	(*(op)->ob_type->tp_dealloc)((PyObject *)(op)))
+#endif /* !Py_TRACE_REFS */
+
+#define Py_INCREF(op) (				\
+	_Py_INC_REFTOTAL  _Py_REF_DEBUG_COMMA	\
+	(op)->ob_refcnt++)
+
+#define Py_DECREF(op)					\
+	if (_Py_DEC_REFTOTAL  _Py_REF_DEBUG_COMMA	\
+	    --(op)->ob_refcnt != 0)			\
+		_Py_CHECK_REFCNT(op)			\
+	else						\
+		_Py_Dealloc((PyObject *)(op))
+
+#define Py_CLEAR(op)				\
+        do {                            	\
+                if (op) {			\
+                        PyObject *tmp = (PyObject *)(op);	\
+                        (op) = NULL;		\
+                        Py_DECREF(tmp);		\
+                }				\
+        } while (0)
+
+/* Macros to use in case the object pointer may be NULL: */
+#define Py_XINCREF(op) if ((op) == NULL) ; else Py_INCREF(op)
+#define Py_XDECREF(op) if ((op) == NULL) ; else Py_DECREF(op)
+
+/*
+These are provided as conveniences to Python runtime embedders, so that
+they can have object code that is not dependent on Python compilation flags.
+*/
+PyAPI_FUNC(void) Py_IncRef(PyObject *);
+PyAPI_FUNC(void) Py_DecRef(PyObject *);
+
+/*
+_Py_NoneStruct is an object of undefined type which can be used in contexts
+where NULL (nil) is not suitable (since NULL often means 'error').
+
+Don't forget to apply Py_INCREF() when returning this value!!!
+*/
+PyAPI_DATA(PyObject) _Py_NoneStruct; /* Don't use this directly */
+#define Py_None (&_Py_NoneStruct)
+
+/* Macro for returning Py_None from a function */
+#define Py_RETURN_NONE return Py_INCREF(Py_None), Py_None
+
+/*
+Py_NotImplemented is a singleton used to signal that an operation is
+not implemented for a given type combination.
+*/
+PyAPI_DATA(PyObject) _Py_NotImplementedStruct; /* Don't use this directly */
+#define Py_NotImplemented (&_Py_NotImplementedStruct)
+
+/* Rich comparison opcodes */
+#define Py_LT 0
+#define Py_LE 1
+#define Py_EQ 2
+#define Py_NE 3
+#define Py_GT 4
+#define Py_GE 5
+
+/* Maps Py_LT to Py_GT, ..., Py_GE to Py_LE.
+ * Defined in object.c.
+ */
+PyAPI_DATA(int) _Py_SwappedOp[];
+
+/*
+Define staticforward and statichere for source compatibility with old
+C extensions.
+
+The staticforward define was needed to support certain broken C
+compilers (notably SCO ODT 3.0, perhaps early AIX as well) botched the
+static keyword when it was used with a forward declaration of a static
+initialized structure.  Standard C allows the forward declaration with
+static, and we've decided to stop catering to broken C compilers.
+(In fact, we expect that the compilers are all fixed eight years later.)
+*/
+
+#define staticforward static
+#define statichere static
+
+
+/*
+More conventions
+================
+
+Argument Checking
+-----------------
+
+Functions that take objects as arguments normally don't check for nil
+arguments, but they do check the type of the argument, and return an
+error if the function doesn't apply to the type.
+
+Failure Modes
+-------------
+
+Functions may fail for a variety of reasons, including running out of
+memory.  This is communicated to the caller in two ways: an error string
+is set (see errors.h), and the function result differs: functions that
+normally return a pointer return NULL for failure, functions returning
+an integer return -1 (which could be a legal return value too!), and
+other functions return 0 for success and -1 for failure.
+Callers should always check for errors before using the result.  If
+an error was set, the caller must either explicitly clear it, or pass
+the error on to its caller.
+
+Reference Counts
+----------------
+
+It takes a while to get used to the proper usage of reference counts.
+
+Functions that create an object set the reference count to 1; such new
+objects must be stored somewhere or destroyed again with Py_DECREF().
+Some functions that 'store' objects, such as PyTuple_SetItem() and
+PyList_SetItem(),
+don't increment the reference count of the object, since the most
+frequent use is to store a fresh object.  Functions that 'retrieve'
+objects, such as PyTuple_GetItem() and PyDict_GetItemString(), also
+don't increment
+the reference count, since most frequently the object is only looked at
+quickly.  Thus, to retrieve an object and store it again, the caller
+must call Py_INCREF() explicitly.
+
+NOTE: functions that 'consume' a reference count, like
+PyList_SetItem(), consume the reference even if the object wasn't
+successfully stored, to simplify error handling.
+
+It seems attractive to make other functions that take an object as
+argument consume a reference count; however, this may quickly get
+confusing (even the current practice is already confusing).  Consider
+it carefully, it may save lots of calls to Py_INCREF() and Py_DECREF() at
+times.
+*/
+
+
+/* Trashcan mechanism, thanks to Christian Tismer.
+
+When deallocating a container object, it's possible to trigger an unbounded
+chain of deallocations, as each Py_DECREF in turn drops the refcount on "the
+next" object in the chain to 0.  This can easily lead to stack faults, and
+especially in threads (which typically have less stack space to work with).
+
+A container object that participates in cyclic gc can avoid this by
+bracketing the body of its tp_dealloc function with a pair of macros:
+
+static void
+mytype_dealloc(mytype *p)
+{
+        ... declarations go here ...
+
+ 	PyObject_GC_UnTrack(p);	   // must untrack first
+	Py_TRASHCAN_SAFE_BEGIN(p)
+	... The body of the deallocator goes here, including all calls ...
+	... to Py_DECREF on contained objects.                         ...
+	Py_TRASHCAN_SAFE_END(p)
+}
+
+CAUTION:  Never return from the middle of the body!  If the body needs to
+"get out early", put a label immediately before the Py_TRASHCAN_SAFE_END
+call, and goto it.  Else the call-depth counter (see below) will stay
+above 0 forever, and the trashcan will never get emptied.
+
+How it works:  The BEGIN macro increments a call-depth counter.  So long
+as this counter is small, the body of the deallocator is run directly without
+further ado.  But if the counter gets large, it instead adds p to a list of
+objects to be deallocated later, skips the body of the deallocator, and
+resumes execution after the END macro.  The tp_dealloc routine then returns
+without deallocating anything (and so unbounded call-stack depth is avoided).
+
+When the call stack finishes unwinding again, code generated by the END macro
+notices this, and calls another routine to deallocate all the objects that
+may have been added to the list of deferred deallocations.  In effect, a
+chain of N deallocations is broken into N / PyTrash_UNWIND_LEVEL pieces,
+with the call stack never exceeding a depth of PyTrash_UNWIND_LEVEL.
+*/
+
+PyAPI_FUNC(void) _PyTrash_deposit_object(PyObject*);
+PyAPI_FUNC(void) _PyTrash_destroy_chain(void);
+PyAPI_DATA(int) _PyTrash_delete_nesting;
+PyAPI_DATA(PyObject *) _PyTrash_delete_later;
+
+#define PyTrash_UNWIND_LEVEL 50
+
+#define Py_TRASHCAN_SAFE_BEGIN(op) \
+	if (_PyTrash_delete_nesting < PyTrash_UNWIND_LEVEL) { \
+		++_PyTrash_delete_nesting;
+		/* The body of the deallocator is here. */
+#define Py_TRASHCAN_SAFE_END(op) \
+		--_PyTrash_delete_nesting; \
+		if (_PyTrash_delete_later && _PyTrash_delete_nesting <= 0) \
+			_PyTrash_destroy_chain(); \
+	} \
+	else \
+		_PyTrash_deposit_object((PyObject*)op);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_OBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/objimpl.h b/depot_tools/release/win/python_24/include/objimpl.h
new file mode 100644
index 0000000..5707e50
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/objimpl.h
@@ -0,0 +1,340 @@
+/* The PyObject_ memory family:  high-level object memory interfaces.
+   See pymem.h for the low-level PyMem_ family.
+*/
+
+#ifndef Py_OBJIMPL_H
+#define Py_OBJIMPL_H
+
+#include "pymem.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* BEWARE:
+
+   Each interface exports both functions and macros.  Extension modules should
+   use the functions, to ensure binary compatibility across Python versions.
+   Because the Python implementation is free to change internal details, and
+   the macros may (or may not) expose details for speed, if you do use the
+   macros you must recompile your extensions with each Python release.
+
+   Never mix calls to PyObject_ memory functions with calls to the platform
+   malloc/realloc/ calloc/free, or with calls to PyMem_.
+*/
+
+/*
+Functions and macros for modules that implement new object types.
+
+ - PyObject_New(type, typeobj) allocates memory for a new object of the given
+   type, and initializes part of it.  'type' must be the C structure type used
+   to represent the object, and 'typeobj' the address of the corresponding
+   type object.  Reference count and type pointer are filled in; the rest of
+   the bytes of the object are *undefined*!  The resulting expression type is
+   'type *'.  The size of the object is determined by the tp_basicsize field
+   of the type object.
+
+ - PyObject_NewVar(type, typeobj, n) is similar but allocates a variable-size
+   object with room for n items.  In addition to the refcount and type pointer
+   fields, this also fills in the ob_size field.
+
+ - PyObject_Del(op) releases the memory allocated for an object.  It does not
+   run a destructor -- it only frees the memory.  PyObject_Free is identical.
+
+ - PyObject_Init(op, typeobj) and PyObject_InitVar(op, typeobj, n) don't
+   allocate memory.  Instead of a 'type' parameter, they take a pointer to a
+   new object (allocated by an arbitrary allocator), and initialize its object
+   header fields.
+
+Note that objects created with PyObject_{New, NewVar} are allocated using the
+specialized Python allocator (implemented in obmalloc.c), if WITH_PYMALLOC is
+enabled.  In addition, a special debugging allocator is used if PYMALLOC_DEBUG
+is also #defined.
+
+In case a specific form of memory management is needed (for example, if you
+must use the platform malloc heap(s), or shared memory, or C++ local storage or
+operator new), you must first allocate the object with your custom allocator,
+then pass its pointer to PyObject_{Init, InitVar} for filling in its Python-
+specific fields:  reference count, type pointer, possibly others.  You should
+be aware that Python no control over these objects because they don't
+cooperate with the Python memory manager.  Such objects may not be eligible
+for automatic garbage collection and you have to make sure that they are
+released accordingly whenever their destructor gets called (cf. the specific
+form of memory management you're using).
+
+Unless you have specific memory management requirements, use
+PyObject_{New, NewVar, Del}.
+*/
+
+/*
+ * Raw object memory interface
+ * ===========================
+ */
+
+/* Functions to call the same malloc/realloc/free as used by Python's
+   object allocator.  If WITH_PYMALLOC is enabled, these may differ from
+   the platform malloc/realloc/free.  The Python object allocator is
+   designed for fast, cache-conscious allocation of many "small" objects,
+   and with low hidden memory overhead.
+
+   PyObject_Malloc(0) returns a unique non-NULL pointer if possible.
+
+   PyObject_Realloc(NULL, n) acts like PyObject_Malloc(n).
+   PyObject_Realloc(p != NULL, 0) does not return  NULL, or free the memory
+   at p.
+
+   Returned pointers must be checked for NULL explicitly; no action is
+   performed on failure other than to return NULL (no warning it printed, no
+   exception is set, etc).
+
+   For allocating objects, use PyObject_{New, NewVar} instead whenever
+   possible.  The PyObject_{Malloc, Realloc, Free} family is exposed
+   so that you can exploit Python's small-block allocator for non-object
+   uses.  If you must use these routines to allocate object memory, make sure
+   the object gets initialized via PyObject_{Init, InitVar} after obtaining
+   the raw memory.
+*/
+PyAPI_FUNC(void *) PyObject_Malloc(size_t);
+PyAPI_FUNC(void *) PyObject_Realloc(void *, size_t);
+PyAPI_FUNC(void) PyObject_Free(void *);
+
+
+/* Macros */
+#ifdef WITH_PYMALLOC
+#ifdef PYMALLOC_DEBUG
+PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes);
+PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes);
+PyAPI_FUNC(void) _PyObject_DebugFree(void *p);
+PyAPI_FUNC(void) _PyObject_DebugDumpAddress(const void *p);
+PyAPI_FUNC(void) _PyObject_DebugCheckAddress(const void *p);
+PyAPI_FUNC(void) _PyObject_DebugMallocStats(void);
+#define PyObject_MALLOC		_PyObject_DebugMalloc
+#define PyObject_Malloc		_PyObject_DebugMalloc
+#define PyObject_REALLOC	_PyObject_DebugRealloc
+#define PyObject_Realloc	_PyObject_DebugRealloc
+#define PyObject_FREE		_PyObject_DebugFree
+#define PyObject_Free		_PyObject_DebugFree
+
+#else	/* WITH_PYMALLOC && ! PYMALLOC_DEBUG */
+#define PyObject_MALLOC		PyObject_Malloc
+#define PyObject_REALLOC	PyObject_Realloc
+#define PyObject_FREE		PyObject_Free
+#endif
+
+#else	/* ! WITH_PYMALLOC */
+#define PyObject_MALLOC		PyMem_MALLOC
+#define PyObject_REALLOC	PyMem_REALLOC
+/* This is an odd one!  For backward compatability with old extensions, the
+   PyMem "release memory" functions have to invoke the object allocator's
+   free() function.  When pymalloc isn't enabled, that leaves us using
+   the platform free(). */
+#define PyObject_FREE		free
+
+#endif	/* WITH_PYMALLOC */
+
+#define PyObject_Del		PyObject_Free
+#define PyObject_DEL		PyObject_FREE
+
+/* for source compatibility with 2.2 */
+#define _PyObject_Del		PyObject_Free
+
+/*
+ * Generic object allocator interface
+ * ==================================
+ */
+
+/* Functions */
+PyAPI_FUNC(PyObject *) PyObject_Init(PyObject *, PyTypeObject *);
+PyAPI_FUNC(PyVarObject *) PyObject_InitVar(PyVarObject *,
+                                                 PyTypeObject *, int);
+PyAPI_FUNC(PyObject *) _PyObject_New(PyTypeObject *);
+PyAPI_FUNC(PyVarObject *) _PyObject_NewVar(PyTypeObject *, int);
+
+#define PyObject_New(type, typeobj) \
+		( (type *) _PyObject_New(typeobj) )
+#define PyObject_NewVar(type, typeobj, n) \
+		( (type *) _PyObject_NewVar((typeobj), (n)) )
+
+/* Macros trading binary compatibility for speed. See also pymem.h.
+   Note that these macros expect non-NULL object pointers.*/
+#define PyObject_INIT(op, typeobj) \
+	( (op)->ob_type = (typeobj), _Py_NewReference((PyObject *)(op)), (op) )
+#define PyObject_INIT_VAR(op, typeobj, size) \
+	( (op)->ob_size = (size), PyObject_INIT((op), (typeobj)) )
+
+#define _PyObject_SIZE(typeobj) ( (typeobj)->tp_basicsize )
+
+/* _PyObject_VAR_SIZE returns the number of bytes (as size_t) allocated for a
+   vrbl-size object with nitems items, exclusive of gc overhead (if any).  The
+   value is rounded up to the closest multiple of sizeof(void *), in order to
+   ensure that pointer fields at the end of the object are correctly aligned
+   for the platform (this is of special importance for subclasses of, e.g.,
+   str or long, so that pointers can be stored after the embedded data).
+
+   Note that there's no memory wastage in doing this, as malloc has to
+   return (at worst) pointer-aligned memory anyway.
+*/
+#if ((SIZEOF_VOID_P - 1) & SIZEOF_VOID_P) != 0
+#   error "_PyObject_VAR_SIZE requires SIZEOF_VOID_P be a power of 2"
+#endif
+
+#define _PyObject_VAR_SIZE(typeobj, nitems)	\
+	(size_t)				\
+	( ( (typeobj)->tp_basicsize +		\
+	    (nitems)*(typeobj)->tp_itemsize +	\
+	    (SIZEOF_VOID_P - 1)			\
+	  ) & ~(SIZEOF_VOID_P - 1)		\
+	)
+
+#define PyObject_NEW(type, typeobj) \
+( (type *) PyObject_Init( \
+	(PyObject *) PyObject_MALLOC( _PyObject_SIZE(typeobj) ), (typeobj)) )
+
+#define PyObject_NEW_VAR(type, typeobj, n) \
+( (type *) PyObject_InitVar( \
+      (PyVarObject *) PyObject_MALLOC(_PyObject_VAR_SIZE((typeobj),(n)) ),\
+      (typeobj), (n)) )
+
+/* This example code implements an object constructor with a custom
+   allocator, where PyObject_New is inlined, and shows the important
+   distinction between two steps (at least):
+       1) the actual allocation of the object storage;
+       2) the initialization of the Python specific fields
+          in this storage with PyObject_{Init, InitVar}.
+
+   PyObject *
+   YourObject_New(...)
+   {
+       PyObject *op;
+
+       op = (PyObject *) Your_Allocator(_PyObject_SIZE(YourTypeStruct));
+       if (op == NULL)
+           return PyErr_NoMemory();
+
+       PyObject_Init(op, &YourTypeStruct);
+
+       op->ob_field = value;
+       ...
+       return op;
+   }
+
+   Note that in C++, the use of the new operator usually implies that
+   the 1st step is performed automatically for you, so in a C++ class
+   constructor you would start directly with PyObject_Init/InitVar
+*/
+
+/*
+ * Garbage Collection Support
+ * ==========================
+ */
+
+/* C equivalent of gc.collect(). */
+long PyGC_Collect(void);
+
+/* Test if a type has a GC head */
+#define PyType_IS_GC(t) PyType_HasFeature((t), Py_TPFLAGS_HAVE_GC)
+
+/* Test if an object has a GC head */
+#define PyObject_IS_GC(o) (PyType_IS_GC((o)->ob_type) && \
+	((o)->ob_type->tp_is_gc == NULL || (o)->ob_type->tp_is_gc(o)))
+
+PyAPI_FUNC(PyVarObject *) _PyObject_GC_Resize(PyVarObject *, int);
+#define PyObject_GC_Resize(type, op, n) \
+		( (type *) _PyObject_GC_Resize((PyVarObject *)(op), (n)) )
+
+/* for source compatibility with 2.2 */
+#define _PyObject_GC_Del PyObject_GC_Del
+
+/* GC information is stored BEFORE the object structure. */
+typedef union _gc_head {
+	struct {
+		union _gc_head *gc_next;
+		union _gc_head *gc_prev;
+		int gc_refs;
+	} gc;
+	long double dummy;  /* force worst-case alignment */
+} PyGC_Head;
+
+extern PyGC_Head *_PyGC_generation0;
+
+#define _Py_AS_GC(o) ((PyGC_Head *)(o)-1)
+
+#define _PyGC_REFS_UNTRACKED			(-2)
+#define _PyGC_REFS_REACHABLE			(-3)
+#define _PyGC_REFS_TENTATIVELY_UNREACHABLE	(-4)
+
+/* Tell the GC to track this object.  NB: While the object is tracked the
+ * collector it must be safe to call the ob_traverse method. */
+#define _PyObject_GC_TRACK(o) do { \
+	PyGC_Head *g = _Py_AS_GC(o); \
+	if (g->gc.gc_refs != _PyGC_REFS_UNTRACKED) \
+		Py_FatalError("GC object already tracked"); \
+	g->gc.gc_refs = _PyGC_REFS_REACHABLE; \
+	g->gc.gc_next = _PyGC_generation0; \
+	g->gc.gc_prev = _PyGC_generation0->gc.gc_prev; \
+	g->gc.gc_prev->gc.gc_next = g; \
+	_PyGC_generation0->gc.gc_prev = g; \
+    } while (0);
+
+/* Tell the GC to stop tracking this object.
+ * gc_next doesn't need to be set to NULL, but doing so is a good
+ * way to provoke memory errors if calling code is confused.
+ */
+#define _PyObject_GC_UNTRACK(o) do { \
+	PyGC_Head *g = _Py_AS_GC(o); \
+	assert(g->gc.gc_refs != _PyGC_REFS_UNTRACKED); \
+	g->gc.gc_refs = _PyGC_REFS_UNTRACKED; \
+	g->gc.gc_prev->gc.gc_next = g->gc.gc_next; \
+	g->gc.gc_next->gc.gc_prev = g->gc.gc_prev; \
+	g->gc.gc_next = NULL; \
+    } while (0);
+
+PyAPI_FUNC(PyObject *) _PyObject_GC_Malloc(size_t);
+PyAPI_FUNC(PyObject *) _PyObject_GC_New(PyTypeObject *);
+PyAPI_FUNC(PyVarObject *) _PyObject_GC_NewVar(PyTypeObject *, int);
+PyAPI_FUNC(void) PyObject_GC_Track(void *);
+PyAPI_FUNC(void) PyObject_GC_UnTrack(void *);
+PyAPI_FUNC(void) PyObject_GC_Del(void *);
+
+#define PyObject_GC_New(type, typeobj) \
+		( (type *) _PyObject_GC_New(typeobj) )
+#define PyObject_GC_NewVar(type, typeobj, n) \
+		( (type *) _PyObject_GC_NewVar((typeobj), (n)) )
+
+
+/* Utility macro to help write tp_traverse functions.
+ * To use this macro, the tp_traverse function must name its arguments
+ * "visit" and "arg".  This is intended to keep tp_traverse functions
+ * looking as much alike as possible.
+ */
+#define Py_VISIT(op)					\
+        do { 						\
+                if (op) {				\
+                        int vret = visit((op), arg);	\
+                        if (vret)			\
+                                return vret;		\
+                }					\
+        } while (0)
+
+/* This is here for the sake of backwards compatibility.  Extensions that
+ * use the old GC API will still compile but the objects will not be
+ * tracked by the GC. */
+#define PyGC_HEAD_SIZE 0
+#define PyObject_GC_Init(op)
+#define PyObject_GC_Fini(op)
+#define PyObject_AS_GC(op) (op)
+#define PyObject_FROM_GC(op) (op)
+
+
+/* Test if a type supports weak references */
+#define PyType_SUPPORTS_WEAKREFS(t) \
+        (PyType_HasFeature((t), Py_TPFLAGS_HAVE_WEAKREFS) \
+         && ((t)->tp_weaklistoffset > 0))
+
+#define PyObject_GET_WEAKREFS_LISTPTR(o) \
+	((PyObject **) (((char *) (o)) + (o)->ob_type->tp_weaklistoffset))
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_OBJIMPL_H */
diff --git a/depot_tools/release/win/python_24/include/opcode.h b/depot_tools/release/win/python_24/include/opcode.h
new file mode 100644
index 0000000..868512f
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/opcode.h
@@ -0,0 +1,153 @@
+#ifndef Py_OPCODE_H
+#define Py_OPCODE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Instruction opcodes for compiled code */
+
+#define STOP_CODE	0
+#define POP_TOP		1
+#define ROT_TWO		2
+#define ROT_THREE	3
+#define DUP_TOP		4
+#define ROT_FOUR	5
+#define NOP		9
+
+#define UNARY_POSITIVE	10
+#define UNARY_NEGATIVE	11
+#define UNARY_NOT	12
+#define UNARY_CONVERT	13
+
+#define UNARY_INVERT	15
+
+#define LIST_APPEND	18
+#define BINARY_POWER	19
+
+#define BINARY_MULTIPLY	20
+#define BINARY_DIVIDE	21
+#define BINARY_MODULO	22
+#define BINARY_ADD	23
+#define BINARY_SUBTRACT	24
+#define BINARY_SUBSCR	25
+#define BINARY_FLOOR_DIVIDE 26
+#define BINARY_TRUE_DIVIDE 27
+#define INPLACE_FLOOR_DIVIDE 28
+#define INPLACE_TRUE_DIVIDE 29
+
+#define SLICE		30
+/* Also uses 31-33 */
+
+#define STORE_SLICE	40
+/* Also uses 41-43 */
+
+#define DELETE_SLICE	50
+/* Also uses 51-53 */
+
+#define INPLACE_ADD	55
+#define INPLACE_SUBTRACT	56
+#define INPLACE_MULTIPLY	57
+#define INPLACE_DIVIDE	58
+#define INPLACE_MODULO	59
+#define STORE_SUBSCR	60
+#define DELETE_SUBSCR	61
+
+#define BINARY_LSHIFT	62
+#define BINARY_RSHIFT	63
+#define BINARY_AND	64
+#define BINARY_XOR	65
+#define BINARY_OR	66
+#define INPLACE_POWER	67
+#define GET_ITER	68
+
+#define PRINT_EXPR	70
+#define PRINT_ITEM	71
+#define PRINT_NEWLINE	72
+#define PRINT_ITEM_TO   73
+#define PRINT_NEWLINE_TO 74
+#define INPLACE_LSHIFT	75
+#define INPLACE_RSHIFT	76
+#define INPLACE_AND	77
+#define INPLACE_XOR	78
+#define INPLACE_OR	79
+#define BREAK_LOOP	80
+
+#define LOAD_LOCALS	82
+#define RETURN_VALUE	83
+#define IMPORT_STAR	84
+#define EXEC_STMT	85
+#define YIELD_VALUE	86
+
+#define POP_BLOCK	87
+#define END_FINALLY	88
+#define BUILD_CLASS	89
+
+#define HAVE_ARGUMENT	90	/* Opcodes from here have an argument: */
+
+#define STORE_NAME	90	/* Index in name list */
+#define DELETE_NAME	91	/* "" */
+#define UNPACK_SEQUENCE	92	/* Number of sequence items */
+#define FOR_ITER	93
+
+#define STORE_ATTR	95	/* Index in name list */
+#define DELETE_ATTR	96	/* "" */
+#define STORE_GLOBAL	97	/* "" */
+#define DELETE_GLOBAL	98	/* "" */
+#define DUP_TOPX	99	/* number of items to duplicate */
+#define LOAD_CONST	100	/* Index in const list */
+#define LOAD_NAME	101	/* Index in name list */
+#define BUILD_TUPLE	102	/* Number of tuple items */
+#define BUILD_LIST	103	/* Number of list items */
+#define BUILD_MAP	104	/* Always zero for now */
+#define LOAD_ATTR	105	/* Index in name list */
+#define COMPARE_OP	106	/* Comparison operator */
+#define IMPORT_NAME	107	/* Index in name list */
+#define IMPORT_FROM	108	/* Index in name list */
+
+#define JUMP_FORWARD	110	/* Number of bytes to skip */
+#define JUMP_IF_FALSE	111	/* "" */
+#define JUMP_IF_TRUE	112	/* "" */
+#define JUMP_ABSOLUTE	113	/* Target byte offset from beginning of code */
+
+#define LOAD_GLOBAL	116	/* Index in name list */
+
+#define CONTINUE_LOOP	119	/* Start of loop (absolute) */
+#define SETUP_LOOP	120	/* Target address (absolute) */
+#define SETUP_EXCEPT	121	/* "" */
+#define SETUP_FINALLY	122	/* "" */
+
+#define LOAD_FAST	124	/* Local variable number */
+#define STORE_FAST	125	/* Local variable number */
+#define DELETE_FAST	126	/* Local variable number */
+
+#define RAISE_VARARGS	130	/* Number of raise arguments (1, 2 or 3) */
+/* CALL_FUNCTION_XXX opcodes defined below depend on this definition */
+#define CALL_FUNCTION	131	/* #args + (#kwargs<<8) */
+#define MAKE_FUNCTION	132	/* #defaults */
+#define BUILD_SLICE 	133	/* Number of items */
+
+#define MAKE_CLOSURE    134     /* #free vars */
+#define LOAD_CLOSURE    135     /* Load free variable from closure */
+#define LOAD_DEREF      136     /* Load and dereference from closure cell */ 
+#define STORE_DEREF     137     /* Store into cell */ 
+
+/* The next 3 opcodes must be contiguous and satisfy
+   (CALL_FUNCTION_VAR - CALL_FUNCTION) & 3 == 1  */
+#define CALL_FUNCTION_VAR          140	/* #args + (#kwargs<<8) */
+#define CALL_FUNCTION_KW           141	/* #args + (#kwargs<<8) */
+#define CALL_FUNCTION_VAR_KW       142	/* #args + (#kwargs<<8) */
+
+/* Support for opargs more than 16 bits long */
+#define EXTENDED_ARG  143
+
+
+enum cmp_op {PyCmp_LT=Py_LT, PyCmp_LE=Py_LE, PyCmp_EQ=Py_EQ, PyCmp_NE=Py_NE, PyCmp_GT=Py_GT, PyCmp_GE=Py_GE,
+	     PyCmp_IN, PyCmp_NOT_IN, PyCmp_IS, PyCmp_IS_NOT, PyCmp_EXC_MATCH, PyCmp_BAD};
+
+#define HAS_ARG(op) ((op) >= HAVE_ARGUMENT)
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_OPCODE_H */
diff --git a/depot_tools/release/win/python_24/include/osdefs.h b/depot_tools/release/win/python_24/include/osdefs.h
new file mode 100644
index 0000000..8190a752
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/osdefs.h
@@ -0,0 +1,51 @@
+#ifndef Py_OSDEFS_H
+#define Py_OSDEFS_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Operating system dependencies */
+
+/* Mod by chrish: QNX has WATCOM, but isn't DOS */
+#if !defined(__QNX__)
+#if defined(MS_WINDOWS) || defined(__BORLANDC__) || defined(__WATCOMC__) || defined(__DJGPP__) || defined(PYOS_OS2)
+#if defined(PYOS_OS2) && defined(PYCC_GCC)
+#define MAXPATHLEN 260
+#define SEP '/'
+#define ALTSEP '\\'
+#else
+#define SEP '\\'
+#define ALTSEP '/'
+#define MAXPATHLEN 256
+#endif
+#define DELIM ';'
+#endif
+#endif
+
+#ifdef RISCOS
+#define SEP '.'
+#define MAXPATHLEN 256
+#define DELIM ','
+#endif
+
+
+/* Filename separator */
+#ifndef SEP
+#define SEP '/'
+#endif
+
+/* Max pathname length */
+#ifndef MAXPATHLEN
+#define MAXPATHLEN 1024
+#endif
+
+/* Search path entry delimiter */
+#ifndef DELIM
+#define DELIM ':'
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_OSDEFS_H */
diff --git a/depot_tools/release/win/python_24/include/parsetok.h b/depot_tools/release/win/python_24/include/parsetok.h
new file mode 100644
index 0000000..b788566
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/parsetok.h
@@ -0,0 +1,48 @@
+
+/* Parser-tokenizer link interface */
+
+#ifndef Py_PARSETOK_H
+#define Py_PARSETOK_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct {
+    int error;
+    const char *filename;
+    int lineno;
+    int offset;
+    char *text;
+    int token;
+    int expected;
+} perrdetail;
+
+#if 0
+#define PyPARSE_YIELD_IS_KEYWORD	0x0001
+#endif
+
+#define PyPARSE_DONT_IMPLY_DEDENT	0x0002
+
+PyAPI_FUNC(node *) PyParser_ParseString(const char *, grammar *, int,
+                                              perrdetail *);
+PyAPI_FUNC(node *) PyParser_ParseFile (FILE *, const char *, grammar *, int,
+                                             char *, char *, perrdetail *);
+
+PyAPI_FUNC(node *) PyParser_ParseStringFlags(const char *, grammar *, int,
+                                              perrdetail *, int);
+PyAPI_FUNC(node *) PyParser_ParseFileFlags(FILE *, const char *, grammar *,
+						 int, char *, char *,
+						 perrdetail *, int);
+
+PyAPI_FUNC(node *) PyParser_ParseStringFlagsFilename(const char *,
+					      const char *,
+					      grammar *, int,
+                                              perrdetail *, int);
+
+/* Note that he following function is defined in pythonrun.c not parsetok.c. */
+PyAPI_FUNC(void) PyParser_SetError(perrdetail *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PARSETOK_H */
diff --git a/depot_tools/release/win/python_24/include/patchlevel.h b/depot_tools/release/win/python_24/include/patchlevel.h
new file mode 100644
index 0000000..d39beef
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/patchlevel.h
@@ -0,0 +1,37 @@
+
+/* Newfangled version identification scheme.
+
+   This scheme was added in Python 1.5.2b2; before that time, only PATCHLEVEL
+   was available.  To test for presence of the scheme, test for
+   defined(PY_MAJOR_VERSION).
+
+   When the major or minor version changes, the VERSION variable in
+   configure.in must also be changed.
+
+   There is also (independent) API version information in modsupport.h.
+*/
+
+/* Values for PY_RELEASE_LEVEL */
+#define PY_RELEASE_LEVEL_ALPHA	0xA
+#define PY_RELEASE_LEVEL_BETA	0xB
+#define PY_RELEASE_LEVEL_GAMMA	0xC     /* For release candidates */
+#define PY_RELEASE_LEVEL_FINAL	0xF	/* Serial should be 0 here */
+					/* Higher for patch releases */
+
+/* Version parsed out into numeric values */
+#define PY_MAJOR_VERSION	2
+#define PY_MINOR_VERSION	4
+#define PY_MICRO_VERSION	1
+#define PY_RELEASE_LEVEL	PY_RELEASE_LEVEL_FINAL
+#define PY_RELEASE_SERIAL	0
+
+/* Version as a string */
+#define PY_VERSION		"2.4.1"
+
+/* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2.
+   Use this for numeric comparisons, e.g. #if PY_VERSION_HEX >= ... */
+#define PY_VERSION_HEX ((PY_MAJOR_VERSION << 24) | \
+			(PY_MINOR_VERSION << 16) | \
+			(PY_MICRO_VERSION <<  8) | \
+			(PY_RELEASE_LEVEL <<  4) | \
+			(PY_RELEASE_SERIAL << 0))
diff --git a/depot_tools/release/win/python_24/include/pgen.h b/depot_tools/release/win/python_24/include/pgen.h
new file mode 100644
index 0000000..8a325ed
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pgen.h
@@ -0,0 +1,18 @@
+#ifndef Py_PGEN_H
+#define Py_PGEN_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Parser generator interface */
+
+extern grammar *meta_grammar(void);
+
+struct _node;
+extern grammar *pgen(struct _node *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PGEN_H */
diff --git a/depot_tools/release/win/python_24/include/pgenheaders.h b/depot_tools/release/win/python_24/include/pgenheaders.h
new file mode 100644
index 0000000..2049ae3
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pgenheaders.h
@@ -0,0 +1,42 @@
+#ifndef Py_PGENHEADERS_H
+#define Py_PGENHEADERS_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Include files and extern declarations used by most of the parser. */
+
+#include "Python.h"
+
+PyAPI_FUNC(void) PySys_WriteStdout(const char *format, ...)
+			Py_GCC_ATTRIBUTE((format(printf, 1, 2)));
+PyAPI_FUNC(void) PySys_WriteStderr(const char *format, ...)
+			Py_GCC_ATTRIBUTE((format(printf, 1, 2)));
+
+#define addarc _Py_addarc
+#define addbit _Py_addbit
+#define adddfa _Py_adddfa
+#define addfirstsets _Py_addfirstsets
+#define addlabel _Py_addlabel
+#define addstate _Py_addstate
+#define delbitset _Py_delbitset
+#define dumptree _Py_dumptree
+#define findlabel _Py_findlabel
+#define mergebitset _Py_mergebitset
+#define meta_grammar _Py_meta_grammar
+#define newbitset _Py_newbitset
+#define newgrammar _Py_newgrammar
+#define pgen _Py_pgen
+#define printgrammar _Py_printgrammar
+#define printnonterminals _Py_printnonterminals
+#define printtree _Py_printtree
+#define samebitset _Py_samebitset
+#define showtree _Py_showtree
+#define tok_dump _Py_tok_dump
+#define translatelabels _Py_translatelabels
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PGENHEADERS_H */
diff --git a/depot_tools/release/win/python_24/include/py_curses.h b/depot_tools/release/win/python_24/include/py_curses.h
new file mode 100644
index 0000000..aaff4bd
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/py_curses.h
@@ -0,0 +1,176 @@
+
+#ifndef Py_CURSES_H
+#define Py_CURSES_H
+
+#ifdef __APPLE__
+/*
+** On Mac OS X 10.2 [n]curses.h and stdlib.h use different guards
+** against multiple definition of wchar_t.
+*/
+#ifdef	_BSD_WCHAR_T_DEFINED_
+#define _WCHAR_T
+#endif
+#endif
+
+#ifdef __FreeBSD__
+/*
+** On FreeBSD, [n]curses.h and stdlib.h/wchar.h use different guards
+** against multiple definition of wchar_t and wint_t.
+*/
+#ifdef	_XOPEN_SOURCE_EXTENDED
+#ifndef __FreeBSD_version
+#include <osreldate.h>
+#endif
+#if __FreeBSD_version >= 500000
+#ifndef __wchar_t
+#define __wchar_t
+#endif
+#ifndef __wint_t
+#define __wint_t
+#endif
+#else
+#ifndef _WCHAR_T
+#define _WCHAR_T
+#endif
+#ifndef _WINT_T
+#define _WINT_T
+#endif
+#endif
+#endif
+#endif
+
+#ifdef HAVE_NCURSES_H
+#include <ncurses.h>
+#else
+#include <curses.h>
+#ifdef HAVE_TERM_H
+/* for tigetstr, which is not declared in SysV curses */
+#include <term.h>
+#endif
+#endif
+
+#ifdef HAVE_NCURSES_H
+/* configure was checking <curses.h>, but we will
+   use <ncurses.h>, which has all these features. */
+#ifndef WINDOW_HAS_FLAGS
+#define WINDOW_HAS_FLAGS 1
+#endif
+#ifndef MVWDELCH_IS_EXPRESSION
+#define MVWDELCH_IS_EXPRESSION 1
+#endif
+#endif
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define PyCurses_API_pointers 4
+
+/* Type declarations */
+
+typedef struct {
+	PyObject_HEAD
+	WINDOW *win;
+} PyCursesWindowObject;
+
+#define PyCursesWindow_Check(v)	 ((v)->ob_type == &PyCursesWindow_Type)
+
+#ifdef CURSES_MODULE
+/* This section is used when compiling _cursesmodule.c */
+
+#else
+/* This section is used in modules that use the _cursesmodule API */
+
+static void **PyCurses_API;
+
+#define PyCursesWindow_Type (*(PyTypeObject *) PyCurses_API[0])
+#define PyCursesSetupTermCalled  {if (! ((int (*)(void))PyCurses_API[1]) () ) return NULL;}
+#define PyCursesInitialised      {if (! ((int (*)(void))PyCurses_API[2]) () ) return NULL;}
+#define PyCursesInitialisedColor {if (! ((int (*)(void))PyCurses_API[3]) () ) return NULL;}
+
+#define import_curses() \
+{ \
+  PyObject *module = PyImport_ImportModule("_curses"); \
+  if (module != NULL) { \
+    PyObject *module_dict = PyModule_GetDict(module); \
+    PyObject *c_api_object = PyDict_GetItemString(module_dict, "_C_API"); \
+    if (PyCObject_Check(c_api_object)) { \
+      PyCurses_API = (void **)PyCObject_AsVoidPtr(c_api_object); \
+    } \
+  } \
+}
+#endif
+
+/* general error messages */
+static char *catchall_ERR  = "curses function returned ERR";
+static char *catchall_NULL = "curses function returned NULL";
+
+/* Function Prototype Macros - They are ugly but very, very useful. ;-)
+
+   X - function name
+   TYPE - parameter Type
+   ERGSTR - format string for construction of the return value
+   PARSESTR - format string for argument parsing
+   */
+
+#define NoArgNoReturnFunction(X) \
+static PyObject *PyCurses_ ## X (PyObject *self) \
+{ \
+  PyCursesInitialised \
+  return PyCursesCheckERR(X(), # X); }
+
+#define NoArgOrFlagNoReturnFunction(X) \
+static PyObject *PyCurses_ ## X (PyObject *self, PyObject *args) \
+{ \
+  int flag = 0; \
+  PyCursesInitialised \
+  switch(PyTuple_Size(args)) { \
+  case 0: \
+    return PyCursesCheckERR(X(), # X); \
+  case 1: \
+    if (!PyArg_ParseTuple(args, "i;True(1) or False(0)", &flag)) return NULL; \
+    if (flag) return PyCursesCheckERR(X(), # X); \
+    else return PyCursesCheckERR(no ## X (), # X); \
+  default: \
+    PyErr_SetString(PyExc_TypeError, # X " requires 0 or 1 arguments"); \
+    return NULL; } }
+
+#define NoArgReturnIntFunction(X) \
+static PyObject *PyCurses_ ## X (PyObject *self) \
+{ \
+ PyCursesInitialised \
+ return PyInt_FromLong((long) X()); }
+
+
+#define NoArgReturnStringFunction(X) \
+static PyObject *PyCurses_ ## X (PyObject *self) \
+{ \
+  PyCursesInitialised \
+  return PyString_FromString(X()); }
+
+#define NoArgTrueFalseFunction(X) \
+static PyObject *PyCurses_ ## X (PyObject *self) \
+{ \
+  PyCursesInitialised \
+  if (X () == FALSE) { \
+    Py_INCREF(Py_False); \
+    return Py_False; \
+  } \
+  Py_INCREF(Py_True); \
+  return Py_True; }
+
+#define NoArgNoReturnVoidFunction(X) \
+static PyObject *PyCurses_ ## X (PyObject *self) \
+{ \
+  PyCursesInitialised \
+  X(); \
+  Py_INCREF(Py_None); \
+  return Py_None; }
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* !defined(Py_CURSES_H) */
+
+
diff --git a/depot_tools/release/win/python_24/include/pyconfig.h b/depot_tools/release/win/python_24/include/pyconfig.h
new file mode 100644
index 0000000..1979b7e
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pyconfig.h
@@ -0,0 +1,565 @@
+#ifndef Py_CONFIG_H
+#define Py_CONFIG_H
+
+/* pyconfig.h.  NOT Generated automatically by configure.
+
+This is a manually maintained version used for the Watcom,
+Borland and Microsoft Visual C++ compilers.  It is a
+standard part of the Python distribution.
+
+WINDOWS DEFINES:
+The code specific to Windows should be wrapped around one of
+the following #defines
+
+MS_WIN64 - Code specific to the MS Win64 API
+MS_WIN32 - Code specific to the MS Win32 (and Win64) API (obsolete, this covers all supported APIs)
+MS_WINDOWS - Code specific to Windows, but all versions.
+Py_ENABLE_SHARED - Code if the Python core is built as a DLL.
+
+Also note that neither "_M_IX86" or "_MSC_VER" should be used for
+any purpose other than "Windows Intel x86 specific" and "Microsoft
+compiler specific".  Therefore, these should be very rare.
+
+
+NOTE: The following symbols are deprecated:
+NT, WIN32, USE_DL_EXPORT, USE_DL_IMPORT, DL_EXPORT, DL_IMPORT
+MS_CORE_DLL.
+
+*/
+
+#include <io.h>
+#define HAVE_SYS_UTIME_H
+#define HAVE_HYPOT
+#define HAVE_TEMPNAM
+#define HAVE_TMPFILE
+#define HAVE_TMPNAM
+#define HAVE_CLOCK
+#define HAVE_STRFTIME
+#define HAVE_STRERROR
+#define DONT_HAVE_SIG_ALARM
+#define DONT_HAVE_SIG_PAUSE
+#define LONG_BIT	32
+#define WORD_BIT 32
+#define PREFIX ""
+#define EXEC_PREFIX ""
+
+#define MS_WIN32 /* only support win32 and greater. */
+#define MS_WINDOWS
+#ifndef PYTHONPATH
+#	define PYTHONPATH ".\\DLLs;.\\lib;.\\lib\\plat-win;.\\lib\\lib-tk"
+#endif
+#define NT_THREADS
+#define WITH_THREAD
+#ifndef NETSCAPE_PI
+#define USE_SOCKET
+#endif
+
+/* Compiler specific defines */
+
+/* ------------------------------------------------------------------------*/
+/* Microsoft C defines _MSC_VER */
+#ifdef _MSC_VER
+
+/* We want COMPILER to expand to a string containing _MSC_VER's *value*.
+ * This is horridly tricky, because the stringization operator only works
+ * on macro arguments, and doesn't evaluate macros passed *as* arguments.
+ * Attempts simpler than the following appear doomed to produce "_MSC_VER"
+ * literally in the string.
+ */
+#define _Py_PASTE_VERSION(SUFFIX) \
+	("[MSC v." _Py_STRINGIZE(_MSC_VER) " " SUFFIX "]")
+/* e.g., this produces, after compile-time string catenation,
+ * 	("[MSC v.1200 32 bit (Intel)]")
+ *
+ * _Py_STRINGIZE(_MSC_VER) expands to
+ * _Py_STRINGIZE1((_MSC_VER)) expands to
+ * _Py_STRINGIZE2(_MSC_VER) but as this call is the result of token-pasting
+ *      it's scanned again for macros and so further expands to (under MSVC 6)
+ * _Py_STRINGIZE2(1200) which then expands to
+ * "1200"
+ */
+#define _Py_STRINGIZE(X) _Py_STRINGIZE1((X))
+#define _Py_STRINGIZE1(X) _Py_STRINGIZE2 ## X
+#define _Py_STRINGIZE2(X) #X
+
+/* MSVC defines _WINxx to differentiate the windows platform types
+
+   Note that for compatibility reasons _WIN32 is defined on Win32
+   *and* on Win64. For the same reasons, in Python, MS_WIN32 is
+   defined on Win32 *and* Win64. Win32 only code must therefore be
+   guarded as follows:
+   	#if defined(MS_WIN32) && !defined(MS_WIN64)
+*/
+#ifdef _WIN64
+#define MS_WIN64
+#endif
+
+/* set the COMPILER */
+#ifdef MS_WIN64
+#ifdef _M_IX86
+#define COMPILER _Py_PASTE_VERSION("64 bit (Intel)")
+#elif defined(_M_IA64)
+#define COMPILER _Py_PASTE_VERSION("64 bit (Itanium)")
+#elif defined(_M_AMD64)
+#define COMPILER _Py_PASTE_VERSION("64 bit (AMD64)")
+#else
+#define COMPILER _Py_PASTE_VERSION("64 bit (Unknown)")
+#endif
+#endif /* MS_WIN64 */
+
+#if defined(MS_WIN32) && !defined(MS_WIN64)
+#ifdef _M_IX86
+#define COMPILER _Py_PASTE_VERSION("32 bit (Intel)")
+#else
+#define COMPILER _Py_PASTE_VERSION("32 bit (Unknown)")
+#endif
+#endif /* MS_WIN32 && !MS_WIN64 */
+
+typedef int pid_t;
+#define hypot _hypot
+
+#include <float.h>
+#define Py_IS_NAN _isnan
+#define Py_IS_INFINITY(X) (!_finite(X) && !_isnan(X))
+
+#endif /* _MSC_VER */
+
+/* define some ANSI types that are not defined in earlier Win headers */
+#if defined(_MSC_VER) && _MSC_VER >= 1200
+/* This file only exists in VC 6.0 or higher */
+#include <basetsd.h>
+#endif
+
+/* ------------------------------------------------------------------------*/
+/* The Borland compiler defines __BORLANDC__ */
+/* XXX These defines are likely incomplete, but should be easy to fix. */
+#ifdef __BORLANDC__
+#define COMPILER "[Borland]"
+
+#ifdef _WIN32
+/* tested with BCC 5.5 (__BORLANDC__ >= 0x0550)
+ */
+
+typedef int pid_t;
+/* BCC55 seems to understand __declspec(dllimport), it is used in its
+   own header files (winnt.h, ...) - so we can do nothing and get the default*/
+
+#undef HAVE_SYS_UTIME_H
+#define HAVE_UTIME_H
+#define HAVE_DIRENT_H
+
+/* rename a few functions for the Borland compiler */
+#include <io.h>
+#define _chsize chsize
+#define _setmode setmode
+
+#else /* !_WIN32 */
+#error "Only Win32 and later are supported"
+#endif /* !_WIN32 */
+
+#endif /* BORLANDC */
+
+/* ------------------------------------------------------------------------*/
+/* egcs/gnu-win32 defines __GNUC__ and _WIN32 */
+#if defined(__GNUC__) && defined(_WIN32)
+/* XXX These defines are likely incomplete, but should be easy to fix.
+   They should be complete enough to build extension modules. */
+/* Suggested by Rene Liebscher <R.Liebscher@gmx.de> to avoid a GCC 2.91.*
+   bug that requires structure imports.  More recent versions of the
+   compiler don't exhibit this bug.
+*/
+#if (__GNUC__==2) && (__GNUC_MINOR__<=91)
+#warning "Please use an up-to-date version of gcc! (>2.91 recommended)"
+#endif
+
+#define COMPILER "[gcc]"
+#define hypot _hypot
+#define PY_LONG_LONG long long
+#endif /* GNUC */
+
+/* ------------------------------------------------------------------------*/
+/* lcc-win32 defines __LCC__ */
+#if defined(__LCC__)
+/* XXX These defines are likely incomplete, but should be easy to fix.
+   They should be complete enough to build extension modules. */
+
+#define COMPILER "[lcc-win32]"
+typedef int pid_t;
+/* __declspec() is supported here too - do nothing to get the defaults */
+
+#endif /* LCC */
+
+/* ------------------------------------------------------------------------*/
+/* End of compilers - finish up */
+
+#ifndef NO_STDIO_H
+#	include <stdio.h>
+#endif
+
+/* 64 bit ints are usually spelt __int64 unless compiler has overridden */
+#define HAVE_LONG_LONG 1
+#ifndef PY_LONG_LONG
+#	define PY_LONG_LONG __int64
+#endif
+
+/* For Windows the Python core is in a DLL by default.  Test
+Py_NO_ENABLE_SHARED to find out.  Also support MS_NO_COREDLL for b/w compat */
+#if !defined(MS_NO_COREDLL) && !defined(Py_NO_ENABLE_SHARED)
+#	define Py_ENABLE_SHARED 1 /* standard symbol for shared library */
+#	define MS_COREDLL	/* deprecated old symbol */
+#endif /* !MS_NO_COREDLL && ... */
+
+/* Deprecated USE_DL_EXPORT macro - please use Py_BUILD_CORE */
+#ifdef USE_DL_EXPORT
+#	define Py_BUILD_CORE
+#endif /* USE_DL_EXPORT */
+
+/*  All windows compilers that use this header support __declspec */
+#define HAVE_DECLSPEC_DLL
+
+/* For an MSVC DLL, we can nominate the .lib files used by extensions */
+#ifdef MS_COREDLL
+#	ifndef Py_BUILD_CORE /* not building the core - must be an ext */
+#		if defined(_MSC_VER)
+			/* So MSVC users need not specify the .lib file in
+			their Makefile (other compilers are generally
+			taken care of by distutils.) */
+#			ifdef _DEBUG
+#				pragma comment(lib,"python24_d.lib")
+#			else
+#				pragma comment(lib,"python24.lib")
+#			endif /* _DEBUG */
+#		endif /* _MSC_VER */
+#	endif /* Py_BUILD_CORE */
+#endif /* MS_COREDLL */
+
+#if defined(MS_WIN64)
+/* maintain "win32" sys.platform for backward compatibility of Python code,
+   the Win64 API should be close enough to the Win32 API to make this
+   preferable */
+#	define PLATFORM "win32"
+#	define SIZEOF_VOID_P 8
+#	define SIZEOF_TIME_T 8
+#	define SIZEOF_OFF_T 4
+#	define SIZEOF_FPOS_T 8
+#	define SIZEOF_HKEY 8
+/* configure.in defines HAVE_LARGEFILE_SUPPORT iff HAVE_LONG_LONG,
+   sizeof(off_t) > sizeof(long), and sizeof(PY_LONG_LONG) >= sizeof(off_t).
+   On Win64 the second condition is not true, but if fpos_t replaces off_t
+   then this is true. The uses of HAVE_LARGEFILE_SUPPORT imply that Win64
+   should define this. */
+#	define HAVE_LARGEFILE_SUPPORT
+#elif defined(MS_WIN32)
+#	define PLATFORM "win32"
+#	define HAVE_LARGEFILE_SUPPORT
+#	define SIZEOF_VOID_P 4
+#	define SIZEOF_TIME_T 4
+#	define SIZEOF_OFF_T 4
+#	define SIZEOF_FPOS_T 8
+#	define SIZEOF_HKEY 4
+#endif
+
+#ifdef _DEBUG
+#	define Py_DEBUG
+#endif
+
+
+#ifdef MS_WIN32
+
+#define SIZEOF_SHORT 2
+#define SIZEOF_INT 4
+#define SIZEOF_LONG 4
+#define SIZEOF_LONG_LONG 8
+
+/* VC 7.1 has them and VC 6.0 does not.  VC 6.0 has a version number of 1200.
+   If some compiler does not provide them, modify the #if appropriately. */
+#if defined(_MSC_VER)
+#if _MSC_VER > 1200
+#define HAVE_UINTPTR_T 1
+#define HAVE_INTPTR_T 1
+#endif  /* _MSC_VER > 1200  */ 
+#endif  /* _MSC_VER */
+
+#endif
+
+/* Fairly standard from here! */
+
+/* Define if on AIX 3.
+   System headers sometimes define this.
+   We just want to avoid a redefinition error message.  */
+#ifndef _ALL_SOURCE
+/* #undef _ALL_SOURCE */
+#endif
+
+/* Define to empty if the keyword does not work.  */
+/* #define const  */
+
+/* Define if you have dirent.h.  */
+/* #define DIRENT 1 */
+
+/* Define to the type of elements in the array set by `getgroups'.
+   Usually this is either `int' or `gid_t'.  */
+/* #undef GETGROUPS_T */
+
+/* Define to `int' if <sys/types.h> doesn't define.  */
+/* #undef gid_t */
+
+/* Define if your struct tm has tm_zone.  */
+/* #undef HAVE_TM_ZONE */
+
+/* Define if you don't have tm_zone but do have the external array
+   tzname.  */
+#define HAVE_TZNAME
+
+/* Define to `int' if <sys/types.h> doesn't define.  */
+/* #undef mode_t */
+
+/* Define if you don't have dirent.h, but have ndir.h.  */
+/* #undef NDIR */
+
+/* Define to `long' if <sys/types.h> doesn't define.  */
+/* #undef off_t */
+
+/* Define to `int' if <sys/types.h> doesn't define.  */
+/* #undef pid_t */
+
+/* Define if the system does not provide POSIX.1 features except
+   with this defined.  */
+/* #undef _POSIX_1_SOURCE */
+
+/* Define if you need to in order for stat and other things to work.  */
+/* #undef _POSIX_SOURCE */
+
+/* Define as the return type of signal handlers (int or void).  */
+#define RETSIGTYPE void
+
+/* Define to `unsigned' if <sys/types.h> doesn't define.  */
+/* #undef size_t */
+
+/* Define to `int' if <sys/types.h> doesn't define.  */
+#if _MSC_VER + 0 >= 1300
+/* VC.NET typedefs socklen_t in ws2tcpip.h. */
+#else
+#define socklen_t int
+#endif
+
+/* Define if you have the ANSI C header files.  */
+#define STDC_HEADERS 1
+
+/* Define if you don't have dirent.h, but have sys/dir.h.  */
+/* #undef SYSDIR */
+
+/* Define if you don't have dirent.h, but have sys/ndir.h.  */
+/* #undef SYSNDIR */
+
+/* Define if you can safely include both <sys/time.h> and <time.h>.  */
+/* #undef TIME_WITH_SYS_TIME */
+
+/* Define if your <sys/time.h> declares struct tm.  */
+/* #define TM_IN_SYS_TIME 1 */
+
+/* Define to `int' if <sys/types.h> doesn't define.  */
+/* #undef uid_t */
+
+/* Define if the closedir function returns void instead of int.  */
+/* #undef VOID_CLOSEDIR */
+
+/* Define if getpgrp() must be called as getpgrp(0)
+   and (consequently) setpgrp() as setpgrp(0, 0). */
+/* #undef GETPGRP_HAVE_ARGS */
+
+/* Define this if your time.h defines altzone */
+/* #define HAVE_ALTZONE */
+
+/* Define if you have the putenv function.  */
+#define HAVE_PUTENV
+
+/* Define if your compiler supports function prototypes */
+#define HAVE_PROTOTYPES
+
+/* Define if  you can safely include both <sys/select.h> and <sys/time.h>
+   (which you can't on SCO ODT 3.0). */
+/* #undef SYS_SELECT_WITH_SYS_TIME */
+
+/* Define if you want documentation strings in extension modules */
+#define WITH_DOC_STRINGS 1
+
+/* Define if you want to compile in rudimentary thread support */
+/* #undef WITH_THREAD */
+
+/* Define if you want to use the GNU readline library */
+/* #define WITH_READLINE 1 */
+
+/* Define if you want to have a Unicode type. */
+#define Py_USING_UNICODE
+
+/* Define as the integral type used for Unicode representation. */
+#define PY_UNICODE_TYPE unsigned short
+
+/* Define as the size of the unicode type. */
+#define Py_UNICODE_SIZE SIZEOF_SHORT
+
+/* Define if you have a useable wchar_t type defined in wchar.h; useable
+   means wchar_t must be 16-bit unsigned type. (see
+   Include/unicodeobject.h). */
+#if Py_UNICODE_SIZE == 2
+#define HAVE_USABLE_WCHAR_T
+
+/* Define to indicate that the Python Unicode representation can be passed
+   as-is to Win32 Wide API.  */
+#define Py_WIN_WIDE_FILENAMES
+#endif
+
+/* Use Python's own small-block memory-allocator. */
+#define WITH_PYMALLOC 1
+
+/* Define if you have clock.  */
+/* #define HAVE_CLOCK */
+
+/* Define when any dynamic module loading is enabled */
+#define HAVE_DYNAMIC_LOADING
+
+/* Define if you have ftime.  */
+#define HAVE_FTIME
+
+/* Define if you have getpeername.  */
+#define HAVE_GETPEERNAME
+
+/* Define if you have getpgrp.  */
+/* #undef HAVE_GETPGRP */
+
+/* Define if you have getpid.  */
+#define HAVE_GETPID
+
+/* Define if you have gettimeofday.  */
+/* #undef HAVE_GETTIMEOFDAY */
+
+/* Define if you have getwd.  */
+/* #undef HAVE_GETWD */
+
+/* Define if you have lstat.  */
+/* #undef HAVE_LSTAT */
+
+/* Define if you have the mktime function.  */
+#define HAVE_MKTIME
+
+/* Define if you have nice.  */
+/* #undef HAVE_NICE */
+
+/* Define if you have readlink.  */
+/* #undef HAVE_READLINK */
+
+/* Define if you have select.  */
+/* #undef HAVE_SELECT */
+
+/* Define if you have setpgid.  */
+/* #undef HAVE_SETPGID */
+
+/* Define if you have setpgrp.  */
+/* #undef HAVE_SETPGRP */
+
+/* Define if you have setsid.  */
+/* #undef HAVE_SETSID */
+
+/* Define if you have setvbuf.  */
+#define HAVE_SETVBUF
+
+/* Define if you have siginterrupt.  */
+/* #undef HAVE_SIGINTERRUPT */
+
+/* Define if you have symlink.  */
+/* #undef HAVE_SYMLINK */
+
+/* Define if you have tcgetpgrp.  */
+/* #undef HAVE_TCGETPGRP */
+
+/* Define if you have tcsetpgrp.  */
+/* #undef HAVE_TCSETPGRP */
+
+/* Define if you have times.  */
+/* #undef HAVE_TIMES */
+
+/* Define if you have uname.  */
+/* #undef HAVE_UNAME */
+
+/* Define if you have waitpid.  */
+/* #undef HAVE_WAITPID */
+
+/* Define to 1 if you have the `wcscoll' function. */
+#define HAVE_WCSCOLL 1
+
+/* Define if you have the <dlfcn.h> header file.  */
+/* #undef HAVE_DLFCN_H */
+
+/* Define if you have the <fcntl.h> header file.  */
+#define HAVE_FCNTL_H 1
+
+/* Define if you have the <stdarg.h> prototypes.  */
+#define HAVE_STDARG_PROTOTYPES
+
+/* Define if you have the <stddef.h> header file.  */
+#define HAVE_STDDEF_H 1
+
+/* Define if you have the <sys/audioio.h> header file.  */
+/* #undef HAVE_SYS_AUDIOIO_H */
+
+/* Define if you have the <sys/param.h> header file.  */
+/* #define HAVE_SYS_PARAM_H 1 */
+
+/* Define if you have the <sys/select.h> header file.  */
+/* #define HAVE_SYS_SELECT_H 1 */
+
+/* Define if you have the <sys/time.h> header file.  */
+/* #define HAVE_SYS_TIME_H 1 */
+
+/* Define if you have the <sys/times.h> header file.  */
+/* #define HAVE_SYS_TIMES_H 1 */
+
+/* Define if you have the <sys/un.h> header file.  */
+/* #define HAVE_SYS_UN_H 1 */
+
+/* Define if you have the <sys/utime.h> header file.  */
+/* #define HAVE_SYS_UTIME_H 1 */
+
+/* Define if you have the <sys/utsname.h> header file.  */
+/* #define HAVE_SYS_UTSNAME_H 1 */
+
+/* Define if you have the <thread.h> header file.  */
+/* #undef HAVE_THREAD_H */
+
+/* Define if you have the <unistd.h> header file.  */
+/* #define HAVE_UNISTD_H 1 */
+
+/* Define if you have the <utime.h> header file.  */
+/* #define HAVE_UTIME_H 1 */
+
+/* Define if the compiler provides a wchar.h header file. */
+#define HAVE_WCHAR_H 1
+
+/* Define if you have the dl library (-ldl).  */
+/* #undef HAVE_LIBDL */
+
+/* Define if you have the mpc library (-lmpc).  */
+/* #undef HAVE_LIBMPC */
+
+/* Define if you have the nsl library (-lnsl).  */
+#define HAVE_LIBNSL 1
+
+/* Define if you have the seq library (-lseq).  */
+/* #undef HAVE_LIBSEQ */
+
+/* Define if you have the socket library (-lsocket).  */
+#define HAVE_LIBSOCKET 1
+
+/* Define if you have the sun library (-lsun).  */
+/* #undef HAVE_LIBSUN */
+
+/* Define if you have the termcap library (-ltermcap).  */
+/* #undef HAVE_LIBTERMCAP */
+
+/* Define if you have the termlib library (-ltermlib).  */
+/* #undef HAVE_LIBTERMLIB */
+
+/* Define if you have the thread library (-lthread).  */
+/* #undef HAVE_LIBTHREAD */
+#endif /* !Py_CONFIG_H */
diff --git a/depot_tools/release/win/python_24/include/pydebug.h b/depot_tools/release/win/python_24/include/pydebug.h
new file mode 100644
index 0000000..f49a98e
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pydebug.h
@@ -0,0 +1,34 @@
+
+#ifndef Py_PYDEBUG_H
+#define Py_PYDEBUG_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_DATA(int) Py_DebugFlag;
+PyAPI_DATA(int) Py_VerboseFlag;
+PyAPI_DATA(int) Py_InteractiveFlag;
+PyAPI_DATA(int) Py_OptimizeFlag;
+PyAPI_DATA(int) Py_NoSiteFlag;
+PyAPI_DATA(int) Py_UseClassExceptionsFlag;
+PyAPI_DATA(int) Py_FrozenFlag;
+PyAPI_DATA(int) Py_TabcheckFlag;
+PyAPI_DATA(int) Py_UnicodeFlag;
+PyAPI_DATA(int) Py_IgnoreEnvironmentFlag;
+PyAPI_DATA(int) Py_DivisionWarningFlag;
+/* _XXX Py_QnewFlag should go away in 3.0.  It's true iff -Qnew is passed,
+  on the command line, and is used in 2.2 by ceval.c to make all "/" divisions
+  true divisions (which they will be in 3.0). */
+PyAPI_DATA(int) _Py_QnewFlag;
+
+/* this is a wrapper around getenv() that pays attention to
+   Py_IgnoreEnvironmentFlag.  It should be used for getting variables like
+   PYTHONPATH and PYTHONHOME from the environment */
+#define Py_GETENV(s) (Py_IgnoreEnvironmentFlag ? NULL : getenv(s))
+
+PyAPI_FUNC(void) Py_FatalError(const char *message);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PYDEBUG_H */
diff --git a/depot_tools/release/win/python_24/include/pyerrors.h b/depot_tools/release/win/python_24/include/pyerrors.h
new file mode 100644
index 0000000..f433cc0
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pyerrors.h
@@ -0,0 +1,234 @@
+#ifndef Py_ERRORS_H
+#define Py_ERRORS_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Error handling definitions */
+
+PyAPI_FUNC(void) PyErr_SetNone(PyObject *);
+PyAPI_FUNC(void) PyErr_SetObject(PyObject *, PyObject *);
+PyAPI_FUNC(void) PyErr_SetString(PyObject *, const char *);
+PyAPI_FUNC(PyObject *) PyErr_Occurred(void);
+PyAPI_FUNC(void) PyErr_Clear(void);
+PyAPI_FUNC(void) PyErr_Fetch(PyObject **, PyObject **, PyObject **);
+PyAPI_FUNC(void) PyErr_Restore(PyObject *, PyObject *, PyObject *);
+
+/* Error testing and normalization */
+PyAPI_FUNC(int) PyErr_GivenExceptionMatches(PyObject *, PyObject *);
+PyAPI_FUNC(int) PyErr_ExceptionMatches(PyObject *);
+PyAPI_FUNC(void) PyErr_NormalizeException(PyObject**, PyObject**, PyObject**);
+
+
+/* Predefined exceptions */
+
+PyAPI_DATA(PyObject *) PyExc_Exception;
+PyAPI_DATA(PyObject *) PyExc_StopIteration;
+PyAPI_DATA(PyObject *) PyExc_StandardError;
+PyAPI_DATA(PyObject *) PyExc_ArithmeticError;
+PyAPI_DATA(PyObject *) PyExc_LookupError;
+
+PyAPI_DATA(PyObject *) PyExc_AssertionError;
+PyAPI_DATA(PyObject *) PyExc_AttributeError;
+PyAPI_DATA(PyObject *) PyExc_EOFError;
+PyAPI_DATA(PyObject *) PyExc_FloatingPointError;
+PyAPI_DATA(PyObject *) PyExc_EnvironmentError;
+PyAPI_DATA(PyObject *) PyExc_IOError;
+PyAPI_DATA(PyObject *) PyExc_OSError;
+PyAPI_DATA(PyObject *) PyExc_ImportError;
+PyAPI_DATA(PyObject *) PyExc_IndexError;
+PyAPI_DATA(PyObject *) PyExc_KeyError;
+PyAPI_DATA(PyObject *) PyExc_KeyboardInterrupt;
+PyAPI_DATA(PyObject *) PyExc_MemoryError;
+PyAPI_DATA(PyObject *) PyExc_NameError;
+PyAPI_DATA(PyObject *) PyExc_OverflowError;
+PyAPI_DATA(PyObject *) PyExc_RuntimeError;
+PyAPI_DATA(PyObject *) PyExc_NotImplementedError;
+PyAPI_DATA(PyObject *) PyExc_SyntaxError;
+PyAPI_DATA(PyObject *) PyExc_IndentationError;
+PyAPI_DATA(PyObject *) PyExc_TabError;
+PyAPI_DATA(PyObject *) PyExc_ReferenceError;
+PyAPI_DATA(PyObject *) PyExc_SystemError;
+PyAPI_DATA(PyObject *) PyExc_SystemExit;
+PyAPI_DATA(PyObject *) PyExc_TypeError;
+PyAPI_DATA(PyObject *) PyExc_UnboundLocalError;
+PyAPI_DATA(PyObject *) PyExc_UnicodeError;
+PyAPI_DATA(PyObject *) PyExc_UnicodeEncodeError;
+PyAPI_DATA(PyObject *) PyExc_UnicodeDecodeError;
+PyAPI_DATA(PyObject *) PyExc_UnicodeTranslateError;
+PyAPI_DATA(PyObject *) PyExc_ValueError;
+PyAPI_DATA(PyObject *) PyExc_ZeroDivisionError;
+#ifdef MS_WINDOWS
+PyAPI_DATA(PyObject *) PyExc_WindowsError;
+#endif
+#ifdef __VMS
+PyAPI_DATA(PyObject *) PyExc_VMSError;
+#endif
+
+PyAPI_DATA(PyObject *) PyExc_MemoryErrorInst;
+
+/* Predefined warning categories */
+PyAPI_DATA(PyObject *) PyExc_Warning;
+PyAPI_DATA(PyObject *) PyExc_UserWarning;
+PyAPI_DATA(PyObject *) PyExc_DeprecationWarning;
+PyAPI_DATA(PyObject *) PyExc_PendingDeprecationWarning;
+PyAPI_DATA(PyObject *) PyExc_SyntaxWarning;
+/* PyExc_OverflowWarning will go away for Python 2.5 */
+PyAPI_DATA(PyObject *) PyExc_OverflowWarning;
+PyAPI_DATA(PyObject *) PyExc_RuntimeWarning;
+PyAPI_DATA(PyObject *) PyExc_FutureWarning;
+
+
+/* Convenience functions */
+
+PyAPI_FUNC(int) PyErr_BadArgument(void);
+PyAPI_FUNC(PyObject *) PyErr_NoMemory(void);
+PyAPI_FUNC(PyObject *) PyErr_SetFromErrno(PyObject *);
+PyAPI_FUNC(PyObject *) PyErr_SetFromErrnoWithFilenameObject(
+	PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyErr_SetFromErrnoWithFilename(PyObject *, char *);
+#ifdef Py_WIN_WIDE_FILENAMES
+PyAPI_FUNC(PyObject *) PyErr_SetFromErrnoWithUnicodeFilename(
+	PyObject *, Py_UNICODE *);
+#endif /* Py_WIN_WIDE_FILENAMES */
+
+PyAPI_FUNC(PyObject *) PyErr_Format(PyObject *, const char *, ...)
+			Py_GCC_ATTRIBUTE((format(printf, 2, 3)));
+
+#ifdef MS_WINDOWS
+PyAPI_FUNC(PyObject *) PyErr_SetFromWindowsErrWithFilenameObject(
+	int, const char *);
+PyAPI_FUNC(PyObject *) PyErr_SetFromWindowsErrWithFilename(
+	int, const char *);
+#ifdef Py_WIN_WIDE_FILENAMES
+PyAPI_FUNC(PyObject *) PyErr_SetFromWindowsErrWithUnicodeFilename(
+	int, const Py_UNICODE *);
+#endif /* Py_WIN_WIDE_FILENAMES */
+PyAPI_FUNC(PyObject *) PyErr_SetFromWindowsErr(int);
+PyAPI_FUNC(PyObject *) PyErr_SetExcFromWindowsErrWithFilenameObject(
+	PyObject *,int, PyObject *);
+PyAPI_FUNC(PyObject *) PyErr_SetExcFromWindowsErrWithFilename(
+	PyObject *,int, const char *);
+#ifdef Py_WIN_WIDE_FILENAMES
+PyAPI_FUNC(PyObject *) PyErr_SetExcFromWindowsErrWithUnicodeFilename(
+	PyObject *,int, const Py_UNICODE *);
+#endif /* Py_WIN_WIDE_FILENAMES */
+PyAPI_FUNC(PyObject *) PyErr_SetExcFromWindowsErr(PyObject *, int);
+#endif /* MS_WINDOWS */
+
+/* Export the old function so that the existing API remains available: */
+PyAPI_FUNC(void) PyErr_BadInternalCall(void);
+PyAPI_FUNC(void) _PyErr_BadInternalCall(char *filename, int lineno);
+/* Mask the old API with a call to the new API for code compiled under
+   Python 2.0: */
+#define PyErr_BadInternalCall() _PyErr_BadInternalCall(__FILE__, __LINE__)
+
+/* Function to create a new exception */
+PyAPI_FUNC(PyObject *) PyErr_NewException(char *name, PyObject *base,
+                                         PyObject *dict);
+PyAPI_FUNC(void) PyErr_WriteUnraisable(PyObject *);
+
+/* Issue a warning or exception */
+PyAPI_FUNC(int) PyErr_Warn(PyObject *, char *);
+PyAPI_FUNC(int) PyErr_WarnExplicit(PyObject *, const char *,
+				   const char *, int, 
+				   const char *, PyObject *);
+
+/* In sigcheck.c or signalmodule.c */
+PyAPI_FUNC(int) PyErr_CheckSignals(void);
+PyAPI_FUNC(void) PyErr_SetInterrupt(void);
+
+/* Support for adding program text to SyntaxErrors */
+PyAPI_FUNC(void) PyErr_SyntaxLocation(const char *, int);
+PyAPI_FUNC(PyObject *) PyErr_ProgramText(const char *, int);
+
+#ifdef Py_USING_UNICODE
+/* The following functions are used to create and modify unicode
+   exceptions from C */
+
+/* create a UnicodeDecodeError object */
+PyAPI_FUNC(PyObject *) PyUnicodeDecodeError_Create(
+	const char *, const char *, int, int, int, const char *);
+
+/* create a UnicodeEncodeError object */
+PyAPI_FUNC(PyObject *) PyUnicodeEncodeError_Create(
+	const char *, const Py_UNICODE *, int, int, int, const char *);
+
+/* create a UnicodeTranslateError object */
+PyAPI_FUNC(PyObject *) PyUnicodeTranslateError_Create(
+	const Py_UNICODE *, int, int, int, const char *);
+
+/* get the encoding attribute */
+PyAPI_FUNC(PyObject *) PyUnicodeEncodeError_GetEncoding(PyObject *);
+PyAPI_FUNC(PyObject *) PyUnicodeDecodeError_GetEncoding(PyObject *);
+
+/* get the object attribute */
+PyAPI_FUNC(PyObject *) PyUnicodeEncodeError_GetObject(PyObject *);
+PyAPI_FUNC(PyObject *) PyUnicodeDecodeError_GetObject(PyObject *);
+PyAPI_FUNC(PyObject *) PyUnicodeTranslateError_GetObject(PyObject *);
+
+/* get the value of the start attribute (the int * may not be NULL)
+   return 0 on success, -1 on failure */
+PyAPI_FUNC(int) PyUnicodeEncodeError_GetStart(PyObject *, int *);
+PyAPI_FUNC(int) PyUnicodeDecodeError_GetStart(PyObject *, int *);
+PyAPI_FUNC(int) PyUnicodeTranslateError_GetStart(PyObject *, int *);
+
+/* assign a new value to the start attribute
+   return 0 on success, -1 on failure */
+PyAPI_FUNC(int) PyUnicodeEncodeError_SetStart(PyObject *, int);
+PyAPI_FUNC(int) PyUnicodeDecodeError_SetStart(PyObject *, int);
+PyAPI_FUNC(int) PyUnicodeTranslateError_SetStart(PyObject *, int);
+
+/* get the value of the end attribute (the int *may not be NULL)
+ return 0 on success, -1 on failure */
+PyAPI_FUNC(int) PyUnicodeEncodeError_GetEnd(PyObject *, int *);
+PyAPI_FUNC(int) PyUnicodeDecodeError_GetEnd(PyObject *, int *);
+PyAPI_FUNC(int) PyUnicodeTranslateError_GetEnd(PyObject *, int *);
+
+/* assign a new value to the end attribute
+   return 0 on success, -1 on failure */
+PyAPI_FUNC(int) PyUnicodeEncodeError_SetEnd(PyObject *, int);
+PyAPI_FUNC(int) PyUnicodeDecodeError_SetEnd(PyObject *, int);
+PyAPI_FUNC(int) PyUnicodeTranslateError_SetEnd(PyObject *, int);
+
+/* get the value of the reason attribute */
+PyAPI_FUNC(PyObject *) PyUnicodeEncodeError_GetReason(PyObject *);
+PyAPI_FUNC(PyObject *) PyUnicodeDecodeError_GetReason(PyObject *);
+PyAPI_FUNC(PyObject *) PyUnicodeTranslateError_GetReason(PyObject *);
+
+/* assign a new value to the reason attribute
+   return 0 on success, -1 on failure */
+PyAPI_FUNC(int) PyUnicodeEncodeError_SetReason(
+	PyObject *, const char *);
+PyAPI_FUNC(int) PyUnicodeDecodeError_SetReason(
+	PyObject *, const char *);
+PyAPI_FUNC(int) PyUnicodeTranslateError_SetReason(
+	PyObject *, const char *);
+#endif
+
+
+/* These APIs aren't really part of the error implementation, but
+   often needed to format error messages; the native C lib APIs are
+   not available on all platforms, which is why we provide emulations
+   for those platforms in Python/mysnprintf.c,
+   WARNING:  The return value of snprintf varies across platforms; do
+   not rely on any particular behavior; eventually the C99 defn may
+   be reliable.
+*/
+#if defined(MS_WIN32) && !defined(HAVE_SNPRINTF)
+# define HAVE_SNPRINTF
+# define snprintf _snprintf
+# define vsnprintf _vsnprintf
+#endif
+
+#include <stdarg.h>
+PyAPI_FUNC(int) PyOS_snprintf(char *str, size_t size, const char  *format, ...)
+			Py_GCC_ATTRIBUTE((format(printf, 3, 4)));
+PyAPI_FUNC(int) PyOS_vsnprintf(char *str, size_t size, const char  *format, va_list va)
+			Py_GCC_ATTRIBUTE((format(printf, 3, 0)));
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_ERRORS_H */
diff --git a/depot_tools/release/win/python_24/include/pyfpe.h b/depot_tools/release/win/python_24/include/pyfpe.h
new file mode 100644
index 0000000..19110ab
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pyfpe.h
@@ -0,0 +1,176 @@
+#ifndef Py_PYFPE_H
+#define Py_PYFPE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+/*
+     ---------------------------------------------------------------------  
+    /                       Copyright (c) 1996.                           \ 
+   |          The Regents of the University of California.                 |
+   |                        All rights reserved.                           |
+   |                                                                       |
+   |   Permission to use, copy, modify, and distribute this software for   |
+   |   any purpose without fee is hereby granted, provided that this en-   |
+   |   tire notice is included in all copies of any software which is or   |
+   |   includes  a  copy  or  modification  of  this software and in all   |
+   |   copies of the supporting documentation for such software.           |
+   |                                                                       |
+   |   This  work was produced at the University of California, Lawrence   |
+   |   Livermore National Laboratory under  contract  no.  W-7405-ENG-48   |
+   |   between  the  U.S.  Department  of  Energy and The Regents of the   |
+   |   University of California for the operation of UC LLNL.              |
+   |                                                                       |
+   |                              DISCLAIMER                               |
+   |                                                                       |
+   |   This  software was prepared as an account of work sponsored by an   |
+   |   agency of the United States Government. Neither the United States   |
+   |   Government  nor the University of California nor any of their em-   |
+   |   ployees, makes any warranty, express or implied, or  assumes  any   |
+   |   liability  or  responsibility  for the accuracy, completeness, or   |
+   |   usefulness of any information,  apparatus,  product,  or  process   |
+   |   disclosed,   or  represents  that  its  use  would  not  infringe   |
+   |   privately-owned rights. Reference herein to any specific  commer-   |
+   |   cial  products,  process,  or  service  by trade name, trademark,   |
+   |   manufacturer, or otherwise, does not  necessarily  constitute  or   |
+   |   imply  its endorsement, recommendation, or favoring by the United   |
+   |   States Government or the University of California. The views  and   |
+   |   opinions  of authors expressed herein do not necessarily state or   |
+   |   reflect those of the United States Government or  the  University   |
+   |   of  California,  and shall not be used for advertising or product   |
+    \  endorsement purposes.                                              / 
+     ---------------------------------------------------------------------  
+*/
+
+/*
+ *       Define macros for handling SIGFPE.
+ *       Lee Busby, LLNL, November, 1996
+ *       busby1@llnl.gov
+ * 
+ *********************************************
+ * Overview of the system for handling SIGFPE:
+ * 
+ * This file (Include/pyfpe.h) defines a couple of "wrapper" macros for
+ * insertion into your Python C code of choice. Their proper use is
+ * discussed below. The file Python/pyfpe.c defines a pair of global
+ * variables PyFPE_jbuf and PyFPE_counter which are used by the signal
+ * handler for SIGFPE to decide if a particular exception was protected
+ * by the macros. The signal handler itself, and code for enabling the
+ * generation of SIGFPE in the first place, is in a (new) Python module
+ * named fpectl. This module is standard in every respect. It can be loaded
+ * either statically or dynamically as you choose, and like any other
+ * Python module, has no effect until you import it.
+ * 
+ * In the general case, there are three steps toward handling SIGFPE in any
+ * Python code:
+ * 
+ * 1) Add the *_PROTECT macros to your C code as required to protect
+ *    dangerous floating point sections.
+ * 
+ * 2) Turn on the inclusion of the code by adding the ``--with-fpectl''
+ *    flag at the time you run configure.  If the fpectl or other modules
+ *    which use the *_PROTECT macros are to be dynamically loaded, be
+ *    sure they are compiled with WANT_SIGFPE_HANDLER defined.
+ * 
+ * 3) When python is built and running, import fpectl, and execute
+ *    fpectl.turnon_sigfpe(). This sets up the signal handler and enables
+ *    generation of SIGFPE whenever an exception occurs. From this point
+ *    on, any properly trapped SIGFPE should result in the Python
+ *    FloatingPointError exception.
+ * 
+ * Step 1 has been done already for the Python kernel code, and should be
+ * done soon for the NumPy array package.  Step 2 is usually done once at
+ * python install time. Python's behavior with respect to SIGFPE is not
+ * changed unless you also do step 3. Thus you can control this new
+ * facility at compile time, or run time, or both.
+ * 
+ ******************************** 
+ * Using the macros in your code:
+ * 
+ * static PyObject *foobar(PyObject *self,PyObject *args)
+ * {
+ *     ....
+ *     PyFPE_START_PROTECT("Error in foobar", return 0)
+ *     result = dangerous_op(somearg1, somearg2, ...);
+ *     PyFPE_END_PROTECT(result)
+ *     ....
+ * }
+ * 
+ * If a floating point error occurs in dangerous_op, foobar returns 0 (NULL),
+ * after setting the associated value of the FloatingPointError exception to
+ * "Error in foobar". ``Dangerous_op'' can be a single operation, or a block
+ * of code, function calls, or any combination, so long as no alternate
+ * return is possible before the PyFPE_END_PROTECT macro is reached.
+ * 
+ * The macros can only be used in a function context where an error return
+ * can be recognized as signaling a Python exception. (Generally, most
+ * functions that return a PyObject * will qualify.)
+ * 
+ * Guido's original design suggestion for PyFPE_START_PROTECT and
+ * PyFPE_END_PROTECT had them open and close a local block, with a locally
+ * defined jmp_buf and jmp_buf pointer. This would allow recursive nesting
+ * of the macros. The Ansi C standard makes it clear that such local
+ * variables need to be declared with the "volatile" type qualifier to keep
+ * setjmp from corrupting their values. Some current implementations seem
+ * to be more restrictive. For example, the HPUX man page for setjmp says
+ * 
+ *   Upon the return from a setjmp() call caused by a longjmp(), the
+ *   values of any non-static local variables belonging to the routine
+ *   from which setjmp() was called are undefined. Code which depends on
+ *   such values is not guaranteed to be portable.
+ * 
+ * I therefore decided on a more limited form of nesting, using a counter
+ * variable (PyFPE_counter) to keep track of any recursion.  If an exception
+ * occurs in an ``inner'' pair of macros, the return will apparently
+ * come from the outermost level.
+ * 
+ */
+
+#ifdef WANT_SIGFPE_HANDLER
+#include <signal.h>
+#include <setjmp.h>
+#include <math.h>
+extern jmp_buf PyFPE_jbuf;
+extern int PyFPE_counter;
+extern double PyFPE_dummy(void *);
+
+#define PyFPE_START_PROTECT(err_string, leave_stmt) \
+if (!PyFPE_counter++ && setjmp(PyFPE_jbuf)) { \
+	PyErr_SetString(PyExc_FloatingPointError, err_string); \
+	PyFPE_counter = 0; \
+	leave_stmt; \
+}
+
+/*
+ * This (following) is a heck of a way to decrement a counter. However,
+ * unless the macro argument is provided, code optimizers will sometimes move
+ * this statement so that it gets executed *before* the unsafe expression
+ * which we're trying to protect.  That pretty well messes things up,
+ * of course.
+ * 
+ * If the expression(s) you're trying to protect don't happen to return a
+ * value, you will need to manufacture a dummy result just to preserve the
+ * correct ordering of statements.  Note that the macro passes the address
+ * of its argument (so you need to give it something which is addressable).
+ * If your expression returns multiple results, pass the last such result
+ * to PyFPE_END_PROTECT.
+ * 
+ * Note that PyFPE_dummy returns a double, which is cast to int.
+ * This seeming insanity is to tickle the Floating Point Unit (FPU).
+ * If an exception has occurred in a preceding floating point operation,
+ * some architectures (notably Intel 80x86) will not deliver the interrupt
+ * until the *next* floating point operation.  This is painful if you've
+ * already decremented PyFPE_counter.
+ */
+#define PyFPE_END_PROTECT(v) PyFPE_counter -= (int)PyFPE_dummy(&(v));
+
+#else
+
+#define PyFPE_START_PROTECT(err_string, leave_stmt)
+#define PyFPE_END_PROTECT(v)
+
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PYFPE_H */
diff --git a/depot_tools/release/win/python_24/include/pygetopt.h b/depot_tools/release/win/python_24/include/pygetopt.h
new file mode 100644
index 0000000..80908be
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pygetopt.h
@@ -0,0 +1,17 @@
+
+#ifndef Py_PYGETOPT_H
+#define Py_PYGETOPT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_DATA(int) _PyOS_opterr;
+PyAPI_DATA(int) _PyOS_optind;
+PyAPI_DATA(char *) _PyOS_optarg;
+
+PyAPI_FUNC(int) _PyOS_GetOpt(int argc, char **argv, char *optstring);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PYGETOPT_H */
diff --git a/depot_tools/release/win/python_24/include/pymactoolbox.h b/depot_tools/release/win/python_24/include/pymactoolbox.h
new file mode 100644
index 0000000..92799e9
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pymactoolbox.h
@@ -0,0 +1,189 @@
+/*
+** pymactoolbox.h - globals defined in mactoolboxglue.c
+*/
+#ifndef Py_PYMACTOOLBOX_H
+#define Py_PYMACTOOLBOX_H
+#ifdef __cplusplus
+	extern "C" {
+#endif
+
+#include <Carbon/Carbon.h>
+#include <QuickTime/QuickTime.h>
+
+/*
+** Helper routines for error codes and such.
+*/
+char *PyMac_StrError(int);			/* strerror with mac errors */
+extern PyObject *PyMac_OSErrException;		/* Exception for OSErr */
+PyObject *PyMac_GetOSErrException(void);	/* Initialize & return it */
+PyObject *PyErr_Mac(PyObject *, int);		/* Exception with a mac error */
+PyObject *PyMac_Error(OSErr);			/* Uses PyMac_GetOSErrException */
+extern OSErr PyMac_GetFullPathname(FSSpec *, char *, int); /* convert
+							      fsspec->path */
+/*
+** These conversion routines are defined in mactoolboxglue.c itself.
+*/
+int PyMac_GetOSType(PyObject *, OSType *);	/* argument parser for OSType */
+PyObject *PyMac_BuildOSType(OSType);		/* Convert OSType to PyObject */
+
+PyObject *PyMac_BuildNumVersion(NumVersion);/* Convert NumVersion to PyObject */
+
+int PyMac_GetStr255(PyObject *, Str255);	/* argument parser for Str255 */
+PyObject *PyMac_BuildStr255(Str255);		/* Convert Str255 to PyObject */
+PyObject *PyMac_BuildOptStr255(Str255);		/* Convert Str255 to PyObject,
+						   NULL to None */
+
+int PyMac_GetRect(PyObject *, Rect *);		/* argument parser for Rect */
+PyObject *PyMac_BuildRect(Rect *);		/* Convert Rect to PyObject */
+
+int PyMac_GetPoint(PyObject *, Point *);	/* argument parser for Point */
+PyObject *PyMac_BuildPoint(Point);		/* Convert Point to PyObject */
+
+int PyMac_GetEventRecord(PyObject *, EventRecord *); /* argument parser for
+							EventRecord */
+PyObject *PyMac_BuildEventRecord(EventRecord *); /* Convert EventRecord to
+						    PyObject */
+
+int PyMac_GetFixed(PyObject *, Fixed *);	/* argument parser for Fixed */
+PyObject *PyMac_BuildFixed(Fixed);		/* Convert Fixed to PyObject */
+int PyMac_Getwide(PyObject *, wide *);		/* argument parser for wide */
+PyObject *PyMac_Buildwide(wide *);		/* Convert wide to PyObject */
+
+/*
+** The rest of the routines are implemented by extension modules. If they are
+** dynamically loaded mactoolboxglue will contain a stub implementation of the
+** routine, which imports the module, whereupon the module's init routine will
+** communicate the routine pointer back to the stub.
+** If USE_TOOLBOX_OBJECT_GLUE is not defined there is no glue code, and the
+** extension modules simply declare the routine. This is the case for static
+** builds (and could be the case for MacPython CFM builds, because CFM extension
+** modules can reference each other without problems).
+*/
+
+#ifdef USE_TOOLBOX_OBJECT_GLUE
+/*
+** These macros are used in the module init code. If we use toolbox object glue
+** it sets the function pointer to point to the real function.
+*/
+#define PyMac_INIT_TOOLBOX_OBJECT_NEW(object, rtn) { \
+	extern PyObject *(*PyMacGluePtr_##rtn)(object); \
+	PyMacGluePtr_##rtn = _##rtn; \
+}
+#define PyMac_INIT_TOOLBOX_OBJECT_CONVERT(object, rtn) { \
+	extern int (*PyMacGluePtr_##rtn)(PyObject *, object *); \
+	PyMacGluePtr_##rtn = _##rtn; \
+}
+#else
+/*
+** If we don't use toolbox object glue the init macros are empty. Moreover, we define
+** _xxx_New to be the same as xxx_New, and the code in mactoolboxglue isn't included.
+*/
+#define PyMac_INIT_TOOLBOX_OBJECT_NEW(object, rtn)
+#define PyMac_INIT_TOOLBOX_OBJECT_CONVERT(object, rtn)
+#endif /* USE_TOOLBOX_OBJECT_GLUE */
+
+/* macfs exports */
+int PyMac_GetFSSpec(PyObject *, FSSpec *);	/* argument parser for FSSpec */
+PyObject *PyMac_BuildFSSpec(FSSpec *);		/* Convert FSSpec to PyObject */
+
+int PyMac_GetFSRef(PyObject *, FSRef *);	/* argument parser for FSRef */
+PyObject *PyMac_BuildFSRef(FSRef *);		/* Convert FSRef to PyObject */
+
+/* AE exports */
+extern PyObject *AEDesc_New(AppleEvent *); /* XXXX Why passed by address?? */
+extern PyObject *AEDesc_NewBorrowed(AppleEvent *);
+extern int AEDesc_Convert(PyObject *, AppleEvent *);
+
+/* Cm exports */
+extern PyObject *CmpObj_New(Component);
+extern int CmpObj_Convert(PyObject *, Component *);
+extern PyObject *CmpInstObj_New(ComponentInstance);
+extern int CmpInstObj_Convert(PyObject *, ComponentInstance *);
+
+/* Ctl exports */
+extern PyObject *CtlObj_New(ControlHandle);
+extern int CtlObj_Convert(PyObject *, ControlHandle *);
+
+/* Dlg exports */
+extern PyObject *DlgObj_New(DialogPtr);
+extern int DlgObj_Convert(PyObject *, DialogPtr *);
+extern PyObject *DlgObj_WhichDialog(DialogPtr);
+
+/* Drag exports */
+extern PyObject *DragObj_New(DragReference);
+extern int DragObj_Convert(PyObject *, DragReference *);
+
+/* List exports */
+extern PyObject *ListObj_New(ListHandle);
+extern int ListObj_Convert(PyObject *, ListHandle *);
+
+/* Menu exports */
+extern PyObject *MenuObj_New(MenuHandle);
+extern int MenuObj_Convert(PyObject *, MenuHandle *);
+
+/* Qd exports */
+extern PyObject *GrafObj_New(GrafPtr);
+extern int GrafObj_Convert(PyObject *, GrafPtr *);
+extern PyObject *BMObj_New(BitMapPtr);
+extern int BMObj_Convert(PyObject *, BitMapPtr *);
+extern PyObject *QdRGB_New(RGBColor *);
+extern int QdRGB_Convert(PyObject *, RGBColor *);
+
+/* Qdoffs exports */
+extern PyObject *GWorldObj_New(GWorldPtr);
+extern int GWorldObj_Convert(PyObject *, GWorldPtr *);
+
+/* Qt exports */
+extern PyObject *TrackObj_New(Track);
+extern int TrackObj_Convert(PyObject *, Track *);
+extern PyObject *MovieObj_New(Movie);
+extern int MovieObj_Convert(PyObject *, Movie *);
+extern PyObject *MovieCtlObj_New(MovieController);
+extern int MovieCtlObj_Convert(PyObject *, MovieController *);
+extern PyObject *TimeBaseObj_New(TimeBase);
+extern int TimeBaseObj_Convert(PyObject *, TimeBase *);
+extern PyObject *UserDataObj_New(UserData);
+extern int UserDataObj_Convert(PyObject *, UserData *);
+extern PyObject *MediaObj_New(Media);
+extern int MediaObj_Convert(PyObject *, Media *);
+
+/* Res exports */
+extern PyObject *ResObj_New(Handle);
+extern int ResObj_Convert(PyObject *, Handle *);
+extern PyObject *OptResObj_New(Handle);
+extern int OptResObj_Convert(PyObject *, Handle *);
+
+/* TE exports */
+extern PyObject *TEObj_New(TEHandle);
+extern int TEObj_Convert(PyObject *, TEHandle *);
+
+/* Win exports */
+extern PyObject *WinObj_New(WindowPtr);
+extern int WinObj_Convert(PyObject *, WindowPtr *);
+extern PyObject *WinObj_WhichWindow(WindowPtr);
+
+/* CF exports */
+extern PyObject *CFObj_New(CFTypeRef);
+extern int CFObj_Convert(PyObject *, CFTypeRef *);
+extern PyObject *CFTypeRefObj_New(CFTypeRef);
+extern int CFTypeRefObj_Convert(PyObject *, CFTypeRef *);
+extern PyObject *CFStringRefObj_New(CFStringRef);
+extern int CFStringRefObj_Convert(PyObject *, CFStringRef *);
+extern PyObject *CFMutableStringRefObj_New(CFMutableStringRef);
+extern int CFMutableStringRefObj_Convert(PyObject *, CFMutableStringRef *);
+extern PyObject *CFArrayRefObj_New(CFArrayRef);
+extern int CFArrayRefObj_Convert(PyObject *, CFArrayRef *);
+extern PyObject *CFMutableArrayRefObj_New(CFMutableArrayRef);
+extern int CFMutableArrayRefObj_Convert(PyObject *, CFMutableArrayRef *);
+extern PyObject *CFDictionaryRefObj_New(CFDictionaryRef);
+extern int CFDictionaryRefObj_Convert(PyObject *, CFDictionaryRef *);
+extern PyObject *CFMutableDictionaryRefObj_New(CFMutableDictionaryRef);
+extern int CFMutableDictionaryRefObj_Convert(PyObject *, CFMutableDictionaryRef *);
+extern PyObject *CFURLRefObj_New(CFURLRef);
+extern int CFURLRefObj_Convert(PyObject *, CFURLRef *);
+extern int OptionalCFURLRefObj_Convert(PyObject *, CFURLRef *);
+
+#ifdef __cplusplus
+	}
+#endif
+#endif
diff --git a/depot_tools/release/win/python_24/include/pymem.h b/depot_tools/release/win/python_24/include/pymem.h
new file mode 100644
index 0000000..f8aef29
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pymem.h
@@ -0,0 +1,108 @@
+/* The PyMem_ family:  low-level memory allocation interfaces.
+   See objimpl.h for the PyObject_ memory family.
+*/
+
+#ifndef Py_PYMEM_H
+#define Py_PYMEM_H
+
+#include "pyport.h"
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* BEWARE:
+
+   Each interface exports both functions and macros.  Extension modules should
+   use the functions, to ensure binary compatibility across Python versions.
+   Because the Python implementation is free to change internal details, and
+   the macros may (or may not) expose details for speed, if you do use the
+   macros you must recompile your extensions with each Python release.
+
+   Never mix calls to PyMem_ with calls to the platform malloc/realloc/
+   calloc/free.  For example, on Windows different DLLs may end up using
+   different heaps, and if you use PyMem_Malloc you'll get the memory from the
+   heap used by the Python DLL; it could be a disaster if you free()'ed that
+   directly in your own extension.  Using PyMem_Free instead ensures Python
+   can return the memory to the proper heap.  As another example, in
+   PYMALLOC_DEBUG mode, Python wraps all calls to all PyMem_ and PyObject_
+   memory functions in special debugging wrappers that add additional
+   debugging info to dynamic memory blocks.  The system routines have no idea
+   what to do with that stuff, and the Python wrappers have no idea what to do
+   with raw blocks obtained directly by the system routines then.
+*/
+
+/*
+ * Raw memory interface
+ * ====================
+ */
+
+/* Functions
+
+   Functions supplying platform-independent semantics for malloc/realloc/
+   free.  These functions make sure that allocating 0 bytes returns a distinct
+   non-NULL pointer (whenever possible -- if we're flat out of memory, NULL
+   may be returned), even if the platform malloc and realloc don't.
+   Returned pointers must be checked for NULL explicitly.  No action is
+   performed on failure (no exception is set, no warning is printed, etc).
+*/
+
+PyAPI_FUNC(void *) PyMem_Malloc(size_t);
+PyAPI_FUNC(void *) PyMem_Realloc(void *, size_t);
+PyAPI_FUNC(void) PyMem_Free(void *);
+
+/* Starting from Python 1.6, the wrappers Py_{Malloc,Realloc,Free} are
+   no longer supported. They used to call PyErr_NoMemory() on failure. */
+
+/* Macros. */
+#ifdef PYMALLOC_DEBUG
+/* Redirect all memory operations to Python's debugging allocator. */
+#define PyMem_MALLOC		PyObject_MALLOC
+#define PyMem_REALLOC		PyObject_REALLOC
+
+#else	/* ! PYMALLOC_DEBUG */
+
+/* PyMem_MALLOC(0) means malloc(1). Some systems would return NULL
+   for malloc(0), which would be treated as an error. Some platforms
+   would return a pointer with no memory behind it, which would break
+   pymalloc. To solve these problems, allocate an extra byte. */
+#define PyMem_MALLOC(n)         malloc((n) ? (n) : 1)
+#define PyMem_REALLOC(p, n)     realloc((p), (n) ? (n) : 1)
+
+#endif	/* PYMALLOC_DEBUG */
+
+/* In order to avoid breaking old code mixing PyObject_{New, NEW} with
+   PyMem_{Del, DEL} and PyMem_{Free, FREE}, the PyMem "release memory"
+   functions have to be redirected to the object deallocator. */
+#define PyMem_FREE           	PyObject_FREE
+
+/*
+ * Type-oriented memory interface
+ * ==============================
+ *
+ * These are carried along for historical reasons.  There's rarely a good
+ * reason to use them anymore (you can just as easily do the multiply and
+ * cast yourself).
+ */
+
+#define PyMem_New(type, n) \
+	( (type *) PyMem_Malloc((n) * sizeof(type)) )
+#define PyMem_NEW(type, n) \
+	( (type *) PyMem_MALLOC((n) * sizeof(type)) )
+
+#define PyMem_Resize(p, type, n) \
+	( (p) = (type *) PyMem_Realloc((p), (n) * sizeof(type)) )
+#define PyMem_RESIZE(p, type, n) \
+	( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) )
+
+/* In order to avoid breaking old code mixing PyObject_{New, NEW} with
+   PyMem_{Del, DEL} and PyMem_{Free, FREE}, the PyMem "release memory"
+   functions have to be redirected to the object deallocator. */
+#define PyMem_Del		PyObject_Free
+#define PyMem_DEL		PyObject_FREE
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* !Py_PYMEM_H */
diff --git a/depot_tools/release/win/python_24/include/pyport.h b/depot_tools/release/win/python_24/include/pyport.h
new file mode 100644
index 0000000..f71b9f4a
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pyport.h
@@ -0,0 +1,637 @@
+#ifndef Py_PYPORT_H
+#define Py_PYPORT_H
+
+#include "pyconfig.h" /* include for defines */
+
+/**************************************************************************
+Symbols and macros to supply platform-independent interfaces to basic
+C language & library operations whose spellings vary across platforms.
+
+Please try to make documentation here as clear as possible:  by definition,
+the stuff here is trying to illuminate C's darkest corners.
+
+Config #defines referenced here:
+
+SIGNED_RIGHT_SHIFT_ZERO_FILLS
+Meaning:  To be defined iff i>>j does not extend the sign bit when i is a
+          signed integral type and i < 0.
+Used in:  Py_ARITHMETIC_RIGHT_SHIFT
+
+Py_DEBUG
+Meaning:  Extra checks compiled in for debug mode.
+Used in:  Py_SAFE_DOWNCAST
+
+HAVE_UINTPTR_T
+Meaning:  The C9X type uintptr_t is supported by the compiler
+Used in:  Py_uintptr_t
+
+HAVE_LONG_LONG
+Meaning:  The compiler supports the C type "long long"
+Used in:  PY_LONG_LONG
+
+**************************************************************************/
+
+
+/* For backward compatibility only. Obsolete, do not use. */
+#ifdef HAVE_PROTOTYPES
+#define Py_PROTO(x) x
+#else
+#define Py_PROTO(x) ()
+#endif
+#ifndef Py_FPROTO
+#define Py_FPROTO(x) Py_PROTO(x)
+#endif
+
+/* typedefs for some C9X-defined synonyms for integral types.
+ *
+ * The names in Python are exactly the same as the C9X names, except with a
+ * Py_ prefix.  Until C9X is universally implemented, this is the only way
+ * to ensure that Python gets reliable names that don't conflict with names
+ * in non-Python code that are playing their own tricks to define the C9X
+ * names.
+ *
+ * NOTE: don't go nuts here!  Python has no use for *most* of the C9X
+ * integral synonyms.  Only define the ones we actually need.
+ */
+
+#ifdef HAVE_LONG_LONG
+#ifndef PY_LONG_LONG
+#define PY_LONG_LONG long long
+#endif
+#endif /* HAVE_LONG_LONG */
+
+/* uintptr_t is the C9X name for an unsigned integral type such that a
+ * legitimate void* can be cast to uintptr_t and then back to void* again
+ * without loss of information.  Similarly for intptr_t, wrt a signed
+ * integral type.
+ */
+#ifdef HAVE_UINTPTR_T
+typedef uintptr_t	Py_uintptr_t;
+typedef intptr_t	Py_intptr_t;
+
+#elif SIZEOF_VOID_P <= SIZEOF_INT
+typedef unsigned int	Py_uintptr_t;
+typedef int		Py_intptr_t;
+
+#elif SIZEOF_VOID_P <= SIZEOF_LONG
+typedef unsigned long	Py_uintptr_t;
+typedef long		Py_intptr_t;
+
+#elif defined(HAVE_LONG_LONG) && (SIZEOF_VOID_P <= SIZEOF_LONG_LONG)
+typedef unsigned PY_LONG_LONG	Py_uintptr_t;
+typedef PY_LONG_LONG		Py_intptr_t;
+
+#else
+#   error "Python needs a typedef for Py_uintptr_t in pyport.h."
+#endif /* HAVE_UINTPTR_T */
+
+#include <stdlib.h>
+
+#include <math.h> /* Moved here from the math section, before extern "C" */
+
+/********************************************
+ * WRAPPER FOR <time.h> and/or <sys/time.h> *
+ ********************************************/
+
+#ifdef TIME_WITH_SYS_TIME
+#include <sys/time.h>
+#include <time.h>
+#else /* !TIME_WITH_SYS_TIME */
+#ifdef HAVE_SYS_TIME_H
+#include <sys/time.h>
+#else /* !HAVE_SYS_TIME_H */
+#include <time.h>
+#endif /* !HAVE_SYS_TIME_H */
+#endif /* !TIME_WITH_SYS_TIME */
+
+
+/******************************
+ * WRAPPER FOR <sys/select.h> *
+ ******************************/
+
+/* NB caller must include <sys/types.h> */
+
+#ifdef HAVE_SYS_SELECT_H
+
+#include <sys/select.h>
+
+#endif /* !HAVE_SYS_SELECT_H */
+
+/*******************************
+ * stat() and fstat() fiddling *
+ *******************************/
+
+/* We expect that stat and fstat exist on most systems.
+ *  It's confirmed on Unix, Mac and Windows.
+ *  If you don't have them, add
+ *      #define DONT_HAVE_STAT
+ * and/or
+ *      #define DONT_HAVE_FSTAT
+ * to your pyconfig.h. Python code beyond this should check HAVE_STAT and
+ * HAVE_FSTAT instead.
+ * Also
+ *      #define DONT_HAVE_SYS_STAT_H
+ * if <sys/stat.h> doesn't exist on your platform, and
+ *      #define HAVE_STAT_H
+ * if <stat.h> does (don't look at me -- ths mess is inherited).
+ */
+#ifndef DONT_HAVE_STAT
+#define HAVE_STAT
+#endif
+
+#ifndef DONT_HAVE_FSTAT
+#define HAVE_FSTAT
+#endif
+
+#ifdef RISCOS
+#include <sys/types.h>
+#include "unixstuff.h"
+#endif
+
+#ifndef DONT_HAVE_SYS_STAT_H
+#if defined(PYOS_OS2) && defined(PYCC_GCC)
+#include <sys/types.h>
+#endif
+#include <sys/stat.h>
+#elif defined(HAVE_STAT_H)
+#include <stat.h>
+#endif
+
+#if defined(PYCC_VACPP)
+/* VisualAge C/C++ Failed to Define MountType Field in sys/stat.h */
+#define S_IFMT (S_IFDIR|S_IFCHR|S_IFREG)
+#endif
+
+#ifndef S_ISREG
+#define S_ISREG(x) (((x) & S_IFMT) == S_IFREG)
+#endif
+
+#ifndef S_ISDIR
+#define S_ISDIR(x) (((x) & S_IFMT) == S_IFDIR)
+#endif
+
+
+#ifdef __cplusplus
+/* Move this down here since some C++ #include's don't like to be included
+   inside an extern "C" */
+extern "C" {
+#endif
+
+
+/* Py_ARITHMETIC_RIGHT_SHIFT
+ * C doesn't define whether a right-shift of a signed integer sign-extends
+ * or zero-fills.  Here a macro to force sign extension:
+ * Py_ARITHMETIC_RIGHT_SHIFT(TYPE, I, J)
+ *    Return I >> J, forcing sign extension.
+ * Requirements:
+ *    I is of basic signed type TYPE (char, short, int, long, or long long).
+ *    TYPE is one of char, short, int, long, or long long, although long long
+ *    must not be used except on platforms that support it.
+ *    J is an integer >= 0 and strictly less than the number of bits in TYPE
+ *    (because C doesn't define what happens for J outside that range either).
+ * Caution:
+ *    I may be evaluated more than once.
+ */
+#ifdef SIGNED_RIGHT_SHIFT_ZERO_FILLS
+#define Py_ARITHMETIC_RIGHT_SHIFT(TYPE, I, J) \
+	((I) < 0 ? ~((~(unsigned TYPE)(I)) >> (J)) : (I) >> (J))
+#else
+#define Py_ARITHMETIC_RIGHT_SHIFT(TYPE, I, J) ((I) >> (J))
+#endif
+
+/* Py_FORCE_EXPANSION(X)
+ * "Simply" returns its argument.  However, macro expansions within the
+ * argument are evaluated.  This unfortunate trickery is needed to get
+ * token-pasting to work as desired in some cases.
+ */
+#define Py_FORCE_EXPANSION(X) X
+
+/* Py_SAFE_DOWNCAST(VALUE, WIDE, NARROW)
+ * Cast VALUE to type NARROW from type WIDE.  In Py_DEBUG mode, this
+ * assert-fails if any information is lost.
+ * Caution:
+ *    VALUE may be evaluated more than once.
+ */
+#ifdef Py_DEBUG
+#define Py_SAFE_DOWNCAST(VALUE, WIDE, NARROW) \
+	(assert((WIDE)(NARROW)(VALUE) == (VALUE)), (NARROW)(VALUE))
+#else
+#define Py_SAFE_DOWNCAST(VALUE, WIDE, NARROW) (NARROW)(VALUE)
+#endif
+
+/* Py_IS_NAN(X)
+ * Return 1 if float or double arg is a NaN, else 0.
+ * Caution:
+ *     X is evaluated more than once.
+ *     This may not work on all platforms.  Each platform has *some*
+ *     way to spell this, though -- override in pyconfig.h if you have
+ *     a platform where it doesn't work.
+ */
+#ifndef Py_IS_NAN
+#define Py_IS_NAN(X) ((X) != (X))
+#endif
+
+/* Py_IS_INFINITY(X)
+ * Return 1 if float or double arg is an infinity, else 0.
+ * Caution:
+ *    X is evaluated more than once.
+ *    This implementation may set the underflow flag if |X| is very small;
+ *    it really can't be implemented correctly (& easily) before C99.
+ *    Override in pyconfig.h if you have a better spelling on your platform.
+ */
+#ifndef Py_IS_INFINITY
+#define Py_IS_INFINITY(X) ((X) && (X)*0.5 == (X))
+#endif
+
+/* HUGE_VAL is supposed to expand to a positive double infinity.  Python
+ * uses Py_HUGE_VAL instead because some platforms are broken in this
+ * respect.  We used to embed code in pyport.h to try to worm around that,
+ * but different platforms are broken in conflicting ways.  If you're on
+ * a platform where HUGE_VAL is defined incorrectly, fiddle your Python
+ * config to #define Py_HUGE_VAL to something that works on your platform.
+ */
+#ifndef Py_HUGE_VAL
+#define Py_HUGE_VAL HUGE_VAL
+#endif
+
+/* Py_OVERFLOWED(X)
+ * Return 1 iff a libm function overflowed.  Set errno to 0 before calling
+ * a libm function, and invoke this macro after, passing the function
+ * result.
+ * Caution:
+ *    This isn't reliable.  C99 no longer requires libm to set errno under
+ *	  any exceptional condition, but does require +- HUGE_VAL return
+ *	  values on overflow.  A 754 box *probably* maps HUGE_VAL to a
+ *	  double infinity, and we're cool if that's so, unless the input
+ *	  was an infinity and an infinity is the expected result.  A C89
+ *	  system sets errno to ERANGE, so we check for that too.  We're
+ *	  out of luck if a C99 754 box doesn't map HUGE_VAL to +Inf, or
+ *	  if the returned result is a NaN, or if a C89 box returns HUGE_VAL
+ *	  in non-overflow cases.
+ *    X is evaluated more than once.
+ * Some platforms have better way to spell this, so expect some #ifdef'ery.
+ *
+ * OpenBSD uses 'isinf()' because a compiler bug on that platform causes
+ * the longer macro version to be mis-compiled. This isn't optimal, and
+ * should be removed once a newer compiler is available on that platform.
+ * The system that had the failure was running OpenBSD 3.2 on Intel, with
+ * gcc 2.95.3.
+ *
+ * According to Tim's checkin, the FreeBSD systems use isinf() to work
+ * around a FPE bug on that platform.
+ */
+#if defined(__FreeBSD__) || defined(__OpenBSD__)
+#define Py_OVERFLOWED(X) isinf(X)
+#else
+#define Py_OVERFLOWED(X) ((X) != 0.0 && (errno == ERANGE ||    \
+					 (X) == Py_HUGE_VAL || \
+					 (X) == -Py_HUGE_VAL))
+#endif
+
+/* Py_SET_ERRNO_ON_MATH_ERROR(x)
+ * If a libm function did not set errno, but it looks like the result
+ * overflowed or not-a-number, set errno to ERANGE or EDOM.  Set errno
+ * to 0 before calling a libm function, and invoke this macro after,
+ * passing the function result.
+ * Caution:
+ *    This isn't reliable.  See Py_OVERFLOWED comments.
+ *    X is evaluated more than once.
+ */
+#if defined(__FreeBSD__) || defined(__OpenBSD__)
+#define _Py_SET_EDOM_FOR_NAN(X) if (isnan(X)) errno = EDOM;
+#else
+#define _Py_SET_EDOM_FOR_NAN(X) ;
+#endif
+#define Py_SET_ERRNO_ON_MATH_ERROR(X) \
+	do { \
+		if (errno == 0) { \
+			if ((X) == Py_HUGE_VAL || (X) == -Py_HUGE_VAL) \
+				errno = ERANGE; \
+			else _Py_SET_EDOM_FOR_NAN(X) \
+		} \
+	} while(0)
+
+/* Py_SET_ERANGE_ON_OVERFLOW(x)
+ * An alias of Py_SET_ERRNO_ON_MATH_ERROR for backward-compatibility.
+ */
+#define Py_SET_ERANGE_IF_OVERFLOW(X) Py_SET_ERRNO_ON_MATH_ERROR(X)
+
+/* Py_ADJUST_ERANGE1(x)
+ * Py_ADJUST_ERANGE2(x, y)
+ * Set errno to 0 before calling a libm function, and invoke one of these
+ * macros after, passing the function result(s) (Py_ADJUST_ERANGE2 is useful
+ * for functions returning complex results).  This makes two kinds of
+ * adjustments to errno:  (A) If it looks like the platform libm set
+ * errno=ERANGE due to underflow, clear errno. (B) If it looks like the
+ * platform libm overflowed but didn't set errno, force errno to ERANGE.  In
+ * effect, we're trying to force a useful implementation of C89 errno
+ * behavior.
+ * Caution:
+ *    This isn't reliable.  See Py_OVERFLOWED comments.
+ *    X and Y may be evaluated more than once.
+ */
+#define Py_ADJUST_ERANGE1(X)						\
+	do {								\
+		if (errno == 0) {					\
+			if ((X) == Py_HUGE_VAL || (X) == -Py_HUGE_VAL)	\
+				errno = ERANGE;				\
+		}							\
+		else if (errno == ERANGE && (X) == 0.0)			\
+			errno = 0;					\
+	} while(0)
+
+#define Py_ADJUST_ERANGE2(X, Y)						\
+	do {								\
+		if ((X) == Py_HUGE_VAL || (X) == -Py_HUGE_VAL ||	\
+		    (Y) == Py_HUGE_VAL || (Y) == -Py_HUGE_VAL) {	\
+				if (errno == 0)				\
+					errno = ERANGE;			\
+		}							\
+		else if (errno == ERANGE)				\
+			errno = 0;					\
+	} while(0)
+
+/* Py_DEPRECATED(version)
+ * Declare a variable, type, or function deprecated.
+ * Usage:
+ *    extern int old_var Py_DEPRECATED(2.3);
+ *    typedef int T1 Py_DEPRECATED(2.4);
+ *    extern int x() Py_DEPRECATED(2.5);
+ */
+#if defined(__GNUC__) && (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1)
+#define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
+#else
+#define Py_DEPRECATED(VERSION_UNUSED)
+#endif
+
+/**************************************************************************
+Prototypes that are missing from the standard include files on some systems
+(and possibly only some versions of such systems.)
+
+Please be conservative with adding new ones, document them and enclose them
+in platform-specific #ifdefs.
+**************************************************************************/
+
+#ifdef SOLARIS
+/* Unchecked */
+extern int gethostname(char *, int);
+#endif
+
+#ifdef __BEOS__
+/* Unchecked */
+/* It's in the libs, but not the headers... - [cjh] */
+int shutdown( int, int );
+#endif
+
+#ifdef HAVE__GETPTY
+#include <sys/types.h>		/* we need to import mode_t */
+extern char * _getpty(int *, int, mode_t, int);
+#endif
+
+#if defined(HAVE_OPENPTY) || defined(HAVE_FORKPTY)
+#if !defined(HAVE_PTY_H) && !defined(HAVE_LIBUTIL_H)
+/* BSDI does not supply a prototype for the 'openpty' and 'forkpty'
+   functions, even though they are included in libutil. */
+#include <termios.h>
+extern int openpty(int *, int *, char *, struct termios *, struct winsize *);
+extern int forkpty(int *, char *, struct termios *, struct winsize *);
+#endif /* !defined(HAVE_PTY_H) && !defined(HAVE_LIBUTIL_H) */
+#endif /* defined(HAVE_OPENPTY) || defined(HAVE_FORKPTY) */
+
+
+/* These are pulled from various places. It isn't obvious on what platforms
+   they are necessary, nor what the exact prototype should look like (which
+   is likely to vary between platforms!) If you find you need one of these
+   declarations, please move them to a platform-specific block and include
+   proper prototypes. */
+#if 0
+
+/* From Modules/resource.c */
+extern int getrusage();
+extern int getpagesize();
+
+/* From Python/sysmodule.c and Modules/posixmodule.c */
+extern int fclose(FILE *);
+
+/* From Modules/posixmodule.c */
+extern int fdatasync(int);
+#endif /* 0 */
+
+
+/************************
+ * WRAPPER FOR <math.h> *
+ ************************/
+
+#ifndef HAVE_HYPOT
+extern double hypot(double, double);
+#endif
+
+
+/*******************************************************************
+On 4.4BSD-descendants, ctype functions serves the whole range of
+wchar_t character set rather than single byte code points only.
+This characteristic can break some operations of string object
+including str.upper() and str.split() on UTF-8 locales.  This
+workaround was provided by Tim Robbins of FreeBSD project.  He said
+the incompatibility will be fixed in FreeBSD 6.
+********************************************************************/
+
+#ifdef __FreeBSD__
+#include <osreldate.h>
+#if __FreeBSD_version > 500039
+#include <ctype.h>
+#include <wctype.h>
+#undef isalnum
+#define isalnum(c) iswalnum(btowc(c))
+#undef isalpha
+#define isalpha(c) iswalpha(btowc(c))
+#undef islower
+#define islower(c) iswlower(btowc(c))
+#undef isspace
+#define isspace(c) iswspace(btowc(c))
+#undef isupper
+#define isupper(c) iswupper(btowc(c))
+#undef tolower
+#define tolower(c) towlower(btowc(c))
+#undef toupper
+#define toupper(c) towupper(btowc(c))
+#endif
+#endif
+
+
+/* Declarations for symbol visibility.
+
+  PyAPI_FUNC(type): Declares a public Python API function and return type
+  PyAPI_DATA(type): Declares public Python data and its type
+  PyMODINIT_FUNC:   A Python module init function.  If these functions are
+                    inside the Python core, they are private to the core.
+                    If in an extension module, it may be declared with
+                    external linkage depending on the platform.
+
+  As a number of platforms support/require "__declspec(dllimport/dllexport)",
+  we support a HAVE_DECLSPEC_DLL macro to save duplication.
+*/
+
+/*
+  All windows ports, except cygwin, are handled in PC/pyconfig.h.
+
+  BeOS and cygwin are the only other autoconf platform requiring special
+  linkage handling and both of these use __declspec().
+*/
+#if defined(__CYGWIN__) || defined(__BEOS__)
+#	define HAVE_DECLSPEC_DLL
+#endif
+
+/* only get special linkage if built as shared or platform is Cygwin */
+#if defined(Py_ENABLE_SHARED) || defined(__CYGWIN__)
+#	if defined(HAVE_DECLSPEC_DLL)
+#		ifdef Py_BUILD_CORE
+#			define PyAPI_FUNC(RTYPE) __declspec(dllexport) RTYPE
+#			define PyAPI_DATA(RTYPE) extern __declspec(dllexport) RTYPE
+			/* module init functions inside the core need no external linkage */
+			/* except for Cygwin to handle embedding (FIXME: BeOS too?) */
+#			if defined(__CYGWIN__)
+#				define PyMODINIT_FUNC __declspec(dllexport) void
+#			else /* __CYGWIN__ */
+#				define PyMODINIT_FUNC void
+#			endif /* __CYGWIN__ */
+#		else /* Py_BUILD_CORE */
+			/* Building an extension module, or an embedded situation */
+			/* public Python functions and data are imported */
+			/* Under Cygwin, auto-import functions to prevent compilation */
+			/* failures similar to http://python.org/doc/FAQ.html#3.24 */
+#			if !defined(__CYGWIN__)
+#				define PyAPI_FUNC(RTYPE) __declspec(dllimport) RTYPE
+#			endif /* !__CYGWIN__ */
+#			define PyAPI_DATA(RTYPE) extern __declspec(dllimport) RTYPE
+			/* module init functions outside the core must be exported */
+#			if defined(__cplusplus)
+#				define PyMODINIT_FUNC extern "C" __declspec(dllexport) void
+#			else /* __cplusplus */
+#				define PyMODINIT_FUNC __declspec(dllexport) void
+#			endif /* __cplusplus */
+#		endif /* Py_BUILD_CORE */
+#	endif /* HAVE_DECLSPEC */
+#endif /* Py_ENABLE_SHARED */
+
+/* If no external linkage macros defined by now, create defaults */
+#ifndef PyAPI_FUNC
+#	define PyAPI_FUNC(RTYPE) RTYPE
+#endif
+#ifndef PyAPI_DATA
+#	define PyAPI_DATA(RTYPE) extern RTYPE
+#endif
+#ifndef PyMODINIT_FUNC
+#	if defined(__cplusplus)
+#		define PyMODINIT_FUNC extern "C" void
+#	else /* __cplusplus */
+#		define PyMODINIT_FUNC void
+#	endif /* __cplusplus */
+#endif
+
+/* Deprecated DL_IMPORT and DL_EXPORT macros */
+#if defined(Py_ENABLE_SHARED) && defined (HAVE_DECLSPEC_DLL)
+#	if defined(Py_BUILD_CORE)
+#		define DL_IMPORT(RTYPE) __declspec(dllexport) RTYPE
+#		define DL_EXPORT(RTYPE) __declspec(dllexport) RTYPE
+#	else
+#		define DL_IMPORT(RTYPE) __declspec(dllimport) RTYPE
+#		define DL_EXPORT(RTYPE) __declspec(dllexport) RTYPE
+#	endif
+#endif
+#ifndef DL_EXPORT
+#	define DL_EXPORT(RTYPE) RTYPE
+#endif
+#ifndef DL_IMPORT
+#	define DL_IMPORT(RTYPE) RTYPE
+#endif
+/* End of deprecated DL_* macros */
+
+/* If the fd manipulation macros aren't defined,
+   here is a set that should do the job */
+
+#if 0 /* disabled and probably obsolete */
+
+#ifndef	FD_SETSIZE
+#define	FD_SETSIZE	256
+#endif
+
+#ifndef FD_SET
+
+typedef long fd_mask;
+
+#define NFDBITS	(sizeof(fd_mask) * NBBY)	/* bits per mask */
+#ifndef howmany
+#define	howmany(x, y)	(((x)+((y)-1))/(y))
+#endif /* howmany */
+
+typedef	struct fd_set {
+	fd_mask	fds_bits[howmany(FD_SETSIZE, NFDBITS)];
+} fd_set;
+
+#define	FD_SET(n, p)	((p)->fds_bits[(n)/NFDBITS] |= (1 << ((n) % NFDBITS)))
+#define	FD_CLR(n, p)	((p)->fds_bits[(n)/NFDBITS] &= ~(1 << ((n) % NFDBITS)))
+#define	FD_ISSET(n, p)	((p)->fds_bits[(n)/NFDBITS] & (1 << ((n) % NFDBITS)))
+#define FD_ZERO(p)	memset((char *)(p), '\0', sizeof(*(p)))
+
+#endif /* FD_SET */
+
+#endif /* fd manipulation macros */
+
+
+/* limits.h constants that may be missing */
+
+#ifndef INT_MAX
+#define INT_MAX 2147483647
+#endif
+
+#ifndef LONG_MAX
+#if SIZEOF_LONG == 4
+#define LONG_MAX 0X7FFFFFFFL
+#elif SIZEOF_LONG == 8
+#define LONG_MAX 0X7FFFFFFFFFFFFFFFL
+#else
+#error "could not set LONG_MAX in pyport.h"
+#endif
+#endif
+
+#ifndef LONG_MIN
+#define LONG_MIN (-LONG_MAX-1)
+#endif
+
+#ifndef LONG_BIT
+#define LONG_BIT (8 * SIZEOF_LONG)
+#endif
+
+#if LONG_BIT != 8 * SIZEOF_LONG
+/* 04-Oct-2000 LONG_BIT is apparently (mis)defined as 64 on some recent
+ * 32-bit platforms using gcc.  We try to catch that here at compile-time
+ * rather than waiting for integer multiplication to trigger bogus
+ * overflows.
+ */
+#error "LONG_BIT definition appears wrong for platform (bad gcc/glibc config?)."
+#endif
+
+#ifdef __cplusplus
+}
+#endif
+
+/*
+ * Hide GCC attributes from compilers that don't support them.
+ */
+#if (!defined(__GNUC__) || __GNUC__ < 2 || \
+     (__GNUC__ == 2 && __GNUC_MINOR__ < 7) ) && \
+    !defined(RISCOS)
+#define Py_GCC_ATTRIBUTE(x)
+#else
+#define Py_GCC_ATTRIBUTE(x) __attribute__(x)
+#endif
+
+/* Eliminate end-of-loop code not reached warnings from SunPro C
+ * when using do{...}while(0) macros
+ */
+#ifdef __SUNPRO_C
+#pragma error_messages (off,E_END_OF_LOOP_CODE_NOT_REACHED)
+#endif
+
+#endif /* Py_PYPORT_H */
diff --git a/depot_tools/release/win/python_24/include/pystate.h b/depot_tools/release/win/python_24/include/pystate.h
new file mode 100644
index 0000000..1ff2626
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pystate.h
@@ -0,0 +1,185 @@
+
+/* Thread and interpreter state structures and their interfaces */
+
+
+#ifndef Py_PYSTATE_H
+#define Py_PYSTATE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* State shared between threads */
+
+struct _ts; /* Forward */
+struct _is; /* Forward */
+
+typedef struct _is {
+
+    struct _is *next;
+    struct _ts *tstate_head;
+
+    PyObject *modules;
+    PyObject *sysdict;
+    PyObject *builtins;
+
+    PyObject *codec_search_path;
+    PyObject *codec_search_cache;
+    PyObject *codec_error_registry;
+
+#ifdef HAVE_DLOPEN
+    int dlopenflags;
+#endif
+#ifdef WITH_TSC
+    int tscdump;
+#endif
+
+} PyInterpreterState;
+
+
+/* State unique per thread */
+
+struct _frame; /* Avoid including frameobject.h */
+
+/* Py_tracefunc return -1 when raising an exception, or 0 for success. */
+typedef int (*Py_tracefunc)(PyObject *, struct _frame *, int, PyObject *);
+
+/* The following values are used for 'what' for tracefunc functions: */
+#define PyTrace_CALL 0
+#define PyTrace_EXCEPTION 1
+#define PyTrace_LINE 2
+#define PyTrace_RETURN 3
+#define PyTrace_C_CALL 4
+#define PyTrace_C_EXCEPTION 5
+#define PyTrace_C_RETURN 6
+
+typedef struct _ts {
+
+    struct _ts *next;
+    PyInterpreterState *interp;
+
+    struct _frame *frame;
+    int recursion_depth;
+    int tracing;
+    int use_tracing;
+
+    Py_tracefunc c_profilefunc;
+    Py_tracefunc c_tracefunc;
+    PyObject *c_profileobj;
+    PyObject *c_traceobj;
+
+    PyObject *curexc_type;
+    PyObject *curexc_value;
+    PyObject *curexc_traceback;
+
+    PyObject *exc_type;
+    PyObject *exc_value;
+    PyObject *exc_traceback;
+
+    PyObject *dict;
+
+    /* tick_counter is incremented whenever the check_interval ticker
+     * reaches zero. The purpose is to give a useful measure of the number
+     * of interpreted bytecode instructions in a given thread.  This
+     * extremely lightweight statistic collector may be of interest to
+     * profilers (like psyco.jit()), although nothing in the core uses it.
+     */
+    int tick_counter;
+
+    int gilstate_counter;
+
+    PyObject *async_exc; /* Asynchronous exception to raise */
+    long thread_id; /* Thread id where this tstate was created */
+
+    /* XXX signal handlers should also be here */
+
+} PyThreadState;
+
+
+PyAPI_FUNC(PyInterpreterState *) PyInterpreterState_New(void);
+PyAPI_FUNC(void) PyInterpreterState_Clear(PyInterpreterState *);
+PyAPI_FUNC(void) PyInterpreterState_Delete(PyInterpreterState *);
+
+PyAPI_FUNC(PyThreadState *) PyThreadState_New(PyInterpreterState *);
+PyAPI_FUNC(void) PyThreadState_Clear(PyThreadState *);
+PyAPI_FUNC(void) PyThreadState_Delete(PyThreadState *);
+#ifdef WITH_THREAD
+PyAPI_FUNC(void) PyThreadState_DeleteCurrent(void);
+#endif
+
+PyAPI_FUNC(PyThreadState *) PyThreadState_Get(void);
+PyAPI_FUNC(PyThreadState *) PyThreadState_Swap(PyThreadState *);
+PyAPI_FUNC(PyObject *) PyThreadState_GetDict(void);
+PyAPI_FUNC(int) PyThreadState_SetAsyncExc(long, PyObject *);
+
+
+/* Variable and macro for in-line access to current thread state */
+
+PyAPI_DATA(PyThreadState *) _PyThreadState_Current;
+
+#ifdef Py_DEBUG
+#define PyThreadState_GET() PyThreadState_Get()
+#else
+#define PyThreadState_GET() (_PyThreadState_Current)
+#endif
+
+typedef
+    enum {PyGILState_LOCKED, PyGILState_UNLOCKED}
+        PyGILState_STATE;
+
+/* Ensure that the current thread is ready to call the Python
+   C API, regardless of the current state of Python, or of its
+   thread lock.  This may be called as many times as desired
+   by a thread so long as each call is matched with a call to
+   PyGILState_Release().  In general, other thread-state APIs may
+   be used between _Ensure() and _Release() calls, so long as the
+   thread-state is restored to its previous state before the Release().
+   For example, normal use of the Py_BEGIN_ALLOW_THREADS/
+   Py_END_ALLOW_THREADS macros are acceptable.
+
+   The return value is an opaque "handle" to the thread state when
+   PyGILState_Ensure() was called, and must be passed to
+   PyGILState_Release() to ensure Python is left in the same state. Even
+   though recursive calls are allowed, these handles can *not* be shared -
+   each unique call to PyGILState_Ensure must save the handle for its
+   call to PyGILState_Release.
+
+   When the function returns, the current thread will hold the GIL.
+
+   Failure is a fatal error.
+*/
+PyAPI_FUNC(PyGILState_STATE) PyGILState_Ensure(void);
+
+/* Release any resources previously acquired.  After this call, Python's
+   state will be the same as it was prior to the corresponding
+   PyGILState_Ensure() call (but generally this state will be unknown to
+   the caller, hence the use of the GILState API.)
+
+   Every call to PyGILState_Ensure must be matched by a call to
+   PyGILState_Release on the same thread.
+*/
+PyAPI_FUNC(void) PyGILState_Release(PyGILState_STATE);
+
+/* Helper/diagnostic function - get the current thread state for
+   this thread.  May return NULL if no GILState API has been used
+   on the current thread.  Note the main thread always has such a
+   thread-state, even if no auto-thread-state call has been made
+   on the main thread.
+*/
+PyAPI_FUNC(PyThreadState *) PyGILState_GetThisThreadState(void);
+
+/* Routines for advanced debuggers, requested by David Beazley.
+   Don't use unless you know what you are doing! */
+PyAPI_FUNC(PyInterpreterState *) PyInterpreterState_Head(void);
+PyAPI_FUNC(PyInterpreterState *) PyInterpreterState_Next(PyInterpreterState *);
+PyAPI_FUNC(PyThreadState *) PyInterpreterState_ThreadHead(PyInterpreterState *);
+PyAPI_FUNC(PyThreadState *) PyThreadState_Next(PyThreadState *);
+
+typedef struct _frame *(*PyThreadFrameGetter)(PyThreadState *self_);
+
+/* hook for PyEval_GetFrame(), requested for Psyco */
+PyAPI_DATA(PyThreadFrameGetter) _PyThreadState_GetFrame;
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PYSTATE_H */
diff --git a/depot_tools/release/win/python_24/include/pystrtod.h b/depot_tools/release/win/python_24/include/pystrtod.h
new file mode 100644
index 0000000..5314f26
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pystrtod.h
@@ -0,0 +1,18 @@
+#ifndef Py_STRTOD_H
+#define Py_STRTOD_H
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+PyAPI_FUNC(double) PyOS_ascii_strtod(const char *str, char **ptr);
+PyAPI_FUNC(double) PyOS_ascii_atof(const char *str);
+PyAPI_FUNC(char *) PyOS_ascii_formatd(char *buffer, int buf_len,  const char *format, double d);
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* !Py_STRTOD_H */
diff --git a/depot_tools/release/win/python_24/include/pythonrun.h b/depot_tools/release/win/python_24/include/pythonrun.h
new file mode 100644
index 0000000..33497ce
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pythonrun.h
@@ -0,0 +1,152 @@
+
+/* Interfaces to parse and execute pieces of python code */
+
+#ifndef Py_PYTHONRUN_H
+#define Py_PYTHONRUN_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define PyCF_MASK (CO_FUTURE_DIVISION)
+#define PyCF_MASK_OBSOLETE (CO_GENERATOR_ALLOWED | CO_NESTED)
+#define PyCF_SOURCE_IS_UTF8  0x0100
+#define PyCF_DONT_IMPLY_DEDENT 0x0200
+
+typedef struct {
+	int cf_flags;  /* bitmask of CO_xxx flags relevant to future */
+} PyCompilerFlags;
+
+PyAPI_FUNC(void) Py_SetProgramName(char *);
+PyAPI_FUNC(char *) Py_GetProgramName(void);
+
+PyAPI_FUNC(void) Py_SetPythonHome(char *);
+PyAPI_FUNC(char *) Py_GetPythonHome(void);
+
+PyAPI_FUNC(void) Py_Initialize(void);
+PyAPI_FUNC(void) Py_InitializeEx(int);
+PyAPI_FUNC(void) Py_Finalize(void);
+PyAPI_FUNC(int) Py_IsInitialized(void);
+PyAPI_FUNC(PyThreadState *) Py_NewInterpreter(void);
+PyAPI_FUNC(void) Py_EndInterpreter(PyThreadState *);
+
+PyAPI_FUNC(int) PyRun_AnyFile(FILE *, const char *);
+PyAPI_FUNC(int) PyRun_AnyFileEx(FILE *, const char *, int);
+
+PyAPI_FUNC(int) PyRun_AnyFileFlags(FILE *, const char *, PyCompilerFlags *);
+PyAPI_FUNC(int) PyRun_AnyFileExFlags(FILE *, const char *, int, PyCompilerFlags *);
+
+PyAPI_FUNC(int) PyRun_SimpleString(const char *);
+PyAPI_FUNC(int) PyRun_SimpleStringFlags(const char *, PyCompilerFlags *);
+PyAPI_FUNC(int) PyRun_SimpleFile(FILE *, const char *);
+PyAPI_FUNC(int) PyRun_SimpleFileEx(FILE *, const char *, int);
+PyAPI_FUNC(int) PyRun_SimpleFileExFlags(FILE *, const char *, int, PyCompilerFlags *);
+PyAPI_FUNC(int) PyRun_InteractiveOne(FILE *, const char *);
+PyAPI_FUNC(int) PyRun_InteractiveOneFlags(FILE *, const char *, PyCompilerFlags *);
+PyAPI_FUNC(int) PyRun_InteractiveLoop(FILE *, const char *);
+PyAPI_FUNC(int) PyRun_InteractiveLoopFlags(FILE *, const char *, PyCompilerFlags *);
+
+PyAPI_FUNC(struct _node *) PyParser_SimpleParseString(const char *, int);
+PyAPI_FUNC(struct _node *) PyParser_SimpleParseFile(FILE *, const char *, int);
+PyAPI_FUNC(struct _node *) PyParser_SimpleParseStringFlags(const char *, int, int);
+PyAPI_FUNC(struct _node *) PyParser_SimpleParseStringFlagsFilename(const char *,
+								  const char *,
+								  int,
+								  int);
+PyAPI_FUNC(struct _node *) PyParser_SimpleParseFileFlags(FILE *, const char *,
+							int, int);
+
+PyAPI_FUNC(PyObject *) PyRun_String(const char *, int, PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyRun_File(FILE *, const char *, int, PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) PyRun_FileEx(FILE *, const char *, int,
+				   PyObject *, PyObject *, int);
+PyAPI_FUNC(PyObject *) PyRun_StringFlags(const char *, int, PyObject *, PyObject *,
+					PyCompilerFlags *);
+PyAPI_FUNC(PyObject *) PyRun_FileFlags(FILE *, const char *, int, PyObject *, 
+				      PyObject *, PyCompilerFlags *);
+PyAPI_FUNC(PyObject *) PyRun_FileExFlags(FILE *, const char *, int, PyObject *, 
+					PyObject *, int, PyCompilerFlags *);
+
+PyAPI_FUNC(PyObject *) Py_CompileString(const char *, const char *, int);
+PyAPI_FUNC(PyObject *) Py_CompileStringFlags(const char *, const char *, int,
+					    PyCompilerFlags *);
+PyAPI_FUNC(struct symtable *) Py_SymtableString(const char *, const char *, int);
+
+PyAPI_FUNC(void) PyErr_Print(void);
+PyAPI_FUNC(void) PyErr_PrintEx(int);
+PyAPI_FUNC(void) PyErr_Display(PyObject *, PyObject *, PyObject *);
+
+PyAPI_FUNC(int) Py_AtExit(void (*func)(void));
+
+PyAPI_FUNC(void) Py_Exit(int);
+
+PyAPI_FUNC(int) Py_FdIsInteractive(FILE *, const char *);
+
+/* Bootstrap */
+PyAPI_FUNC(int) Py_Main(int argc, char **argv);
+
+/* In getpath.c */
+PyAPI_FUNC(char *) Py_GetProgramFullPath(void);
+PyAPI_FUNC(char *) Py_GetPrefix(void);
+PyAPI_FUNC(char *) Py_GetExecPrefix(void);
+PyAPI_FUNC(char *) Py_GetPath(void);
+
+/* In their own files */
+PyAPI_FUNC(const char *) Py_GetVersion(void);
+PyAPI_FUNC(const char *) Py_GetPlatform(void);
+PyAPI_FUNC(const char *) Py_GetCopyright(void);
+PyAPI_FUNC(const char *) Py_GetCompiler(void);
+PyAPI_FUNC(const char *) Py_GetBuildInfo(void);
+
+/* Internal -- various one-time initializations */
+PyAPI_FUNC(PyObject *) _PyBuiltin_Init(void);
+PyAPI_FUNC(PyObject *) _PySys_Init(void);
+PyAPI_FUNC(void) _PyImport_Init(void);
+PyAPI_FUNC(void) _PyExc_Init(void);
+PyAPI_FUNC(void) _PyImportHooks_Init(void);
+PyAPI_FUNC(int) _PyFrame_Init(void);
+PyAPI_FUNC(int) _PyInt_Init(void);
+
+/* Various internal finalizers */
+PyAPI_FUNC(void) _PyExc_Fini(void);
+PyAPI_FUNC(void) _PyImport_Fini(void);
+PyAPI_FUNC(void) PyMethod_Fini(void);
+PyAPI_FUNC(void) PyFrame_Fini(void);
+PyAPI_FUNC(void) PyCFunction_Fini(void);
+PyAPI_FUNC(void) PyTuple_Fini(void);
+PyAPI_FUNC(void) PyList_Fini(void);
+PyAPI_FUNC(void) PyString_Fini(void);
+PyAPI_FUNC(void) PyInt_Fini(void);
+PyAPI_FUNC(void) PyFloat_Fini(void);
+PyAPI_FUNC(void) PyOS_FiniInterrupts(void);
+
+/* Stuff with no proper home (yet) */
+PyAPI_FUNC(char *) PyOS_Readline(FILE *, FILE *, char *);
+PyAPI_DATA(int) (*PyOS_InputHook)(void);
+PyAPI_DATA(char) *(*PyOS_ReadlineFunctionPointer)(FILE *, FILE *, char *);
+PyAPI_DATA(PyThreadState*) _PyOS_ReadlineTState;
+
+/* Stack size, in "pointers" (so we get extra safety margins
+   on 64-bit platforms).  On a 32-bit platform, this translates
+   to a 8k margin. */
+#define PYOS_STACK_MARGIN 2048
+
+#if defined(WIN32) && !defined(MS_WIN64) && defined(_MSC_VER)
+/* Enable stack checking under Microsoft C */
+#define USE_STACKCHECK
+#endif
+
+#ifdef USE_STACKCHECK
+/* Check that we aren't overflowing our stack */
+PyAPI_FUNC(int) PyOS_CheckStack(void);
+#endif
+
+/* Signals */
+typedef void (*PyOS_sighandler_t)(int);
+PyAPI_FUNC(PyOS_sighandler_t) PyOS_getsig(int);
+PyAPI_FUNC(PyOS_sighandler_t) PyOS_setsig(int, PyOS_sighandler_t);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_PYTHONRUN_H */
diff --git a/depot_tools/release/win/python_24/include/pythread.h b/depot_tools/release/win/python_24/include/pythread.h
new file mode 100644
index 0000000..0fa8db0
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/pythread.h
@@ -0,0 +1,44 @@
+
+#ifndef Py_PYTHREAD_H
+#define Py_PYTHREAD_H
+
+#define NO_EXIT_PROG		/* don't define PyThread_exit_prog() */
+				/* (the result is no use of signals on SGI) */
+
+typedef void *PyThread_type_lock;
+typedef void *PyThread_type_sema;
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_FUNC(void) PyThread_init_thread(void);
+PyAPI_FUNC(long) PyThread_start_new_thread(void (*)(void *), void *);
+PyAPI_FUNC(void) PyThread_exit_thread(void);
+PyAPI_FUNC(void) PyThread__PyThread_exit_thread(void);
+PyAPI_FUNC(long) PyThread_get_thread_ident(void);
+
+PyAPI_FUNC(PyThread_type_lock) PyThread_allocate_lock(void);
+PyAPI_FUNC(void) PyThread_free_lock(PyThread_type_lock);
+PyAPI_FUNC(int) PyThread_acquire_lock(PyThread_type_lock, int);
+#define WAIT_LOCK	1
+#define NOWAIT_LOCK	0
+PyAPI_FUNC(void) PyThread_release_lock(PyThread_type_lock);
+
+#ifndef NO_EXIT_PROG
+PyAPI_FUNC(void) PyThread_exit_prog(int);
+PyAPI_FUNC(void) PyThread__PyThread_exit_prog(int);
+#endif
+
+/* Thread Local Storage (TLS) API */
+PyAPI_FUNC(int) PyThread_create_key(void);
+PyAPI_FUNC(void) PyThread_delete_key(int);
+PyAPI_FUNC(int) PyThread_set_key_value(int, void *);
+PyAPI_FUNC(void *) PyThread_get_key_value(int);
+PyAPI_FUNC(void) PyThread_delete_key_value(int key);
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif /* !Py_PYTHREAD_H */
diff --git a/depot_tools/release/win/python_24/include/rangeobject.h b/depot_tools/release/win/python_24/include/rangeobject.h
new file mode 100644
index 0000000..50aa061
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/rangeobject.h
@@ -0,0 +1,30 @@
+
+/* Range object interface */
+
+#ifndef Py_RANGEOBJECT_H
+#define Py_RANGEOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* This is about the type 'xrange', not the built-in function range(), which
+   returns regular lists. */
+
+/*
+A range object represents an integer range.  This is an immutable object;
+a range cannot change its value after creation.
+
+Range objects behave like the corresponding tuple objects except that
+they are represented by a start, stop, and step datamembers.
+*/
+
+PyAPI_DATA(PyTypeObject) PyRange_Type;
+
+#define PyRange_Check(op) ((op)->ob_type == &PyRange_Type)
+
+PyAPI_FUNC(PyObject *) PyRange_New(long, long, long, int);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_RANGEOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/setobject.h b/depot_tools/release/win/python_24/include/setobject.h
new file mode 100644
index 0000000..cc2d683
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/setobject.h
@@ -0,0 +1,41 @@
+
+/* Set object interface */
+
+#ifndef Py_SETOBJECT_H
+#define Py_SETOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+This data structure is shared by set and frozenset objects.
+*/
+
+typedef struct {
+	PyObject_HEAD
+	PyObject *data;
+	long hash;	/* only used by frozenset objects */
+	PyObject *weakreflist; /* List of weak references */
+
+	/* Invariants:
+	 *     data is a dictionary whose values are all True.
+	 *     data points to the same dict for the whole life of the set.
+	 * For frozensets only:
+	 *     data is immutable.
+	 *     hash is the hash of the frozenset or -1 if not computed yet.
+	 */
+} PySetObject;
+
+PyAPI_DATA(PyTypeObject) PySet_Type;
+PyAPI_DATA(PyTypeObject) PyFrozenSet_Type;
+
+#define PyFrozenSet_CheckExact(ob) ((ob)->ob_type == &PyFrozenSet_Type)
+#define PyAnySet_Check(ob) \
+	((ob)->ob_type == &PySet_Type || (ob)->ob_type == &PyFrozenSet_Type || \
+	  PyType_IsSubtype((ob)->ob_type, &PySet_Type) || \
+	  PyType_IsSubtype((ob)->ob_type, &PyFrozenSet_Type))
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_SETOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/sliceobject.h b/depot_tools/release/win/python_24/include/sliceobject.h
new file mode 100644
index 0000000..fc80254
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/sliceobject.h
@@ -0,0 +1,42 @@
+#ifndef Py_SLICEOBJECT_H
+#define Py_SLICEOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* The unique ellipsis object "..." */
+
+PyAPI_DATA(PyObject) _Py_EllipsisObject; /* Don't use this directly */
+
+#define Py_Ellipsis (&_Py_EllipsisObject)
+
+/* Slice object interface */
+
+/*
+
+A slice object containing start, stop, and step data members (the
+names are from range).  After much talk with Guido, it was decided to
+let these be any arbitrary python type.  Py_None stands for omitted values.
+*/
+
+typedef struct {
+    PyObject_HEAD
+    PyObject *start, *stop, *step;	/* not NULL */
+} PySliceObject;
+
+PyAPI_DATA(PyTypeObject) PySlice_Type;
+
+#define PySlice_Check(op) ((op)->ob_type == &PySlice_Type)
+
+PyAPI_FUNC(PyObject *) PySlice_New(PyObject* start, PyObject* stop,
+                                  PyObject* step);
+PyAPI_FUNC(int) PySlice_GetIndices(PySliceObject *r, int length,
+                                  int *start, int *stop, int *step);
+PyAPI_FUNC(int) PySlice_GetIndicesEx(PySliceObject *r, int length,
+				    int *start, int *stop, 
+				    int *step, int *slicelength);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_SLICEOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/stringobject.h b/depot_tools/release/win/python_24/include/stringobject.h
new file mode 100644
index 0000000..0c7e5b6
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/stringobject.h
@@ -0,0 +1,183 @@
+
+/* String object interface */
+
+#ifndef Py_STRINGOBJECT_H
+#define Py_STRINGOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#include <stdarg.h>
+
+/*
+Type PyStringObject represents a character string.  An extra zero byte is
+reserved at the end to ensure it is zero-terminated, but a size is
+present so strings with null bytes in them can be represented.  This
+is an immutable object type.
+
+There are functions to create new string objects, to test
+an object for string-ness, and to get the
+string value.  The latter function returns a null pointer
+if the object is not of the proper type.
+There is a variant that takes an explicit size as well as a
+variant that assumes a zero-terminated string.  Note that none of the
+functions should be applied to nil objects.
+*/
+
+/* Caching the hash (ob_shash) saves recalculation of a string's hash value.
+   Interning strings (ob_sstate) tries to ensure that only one string
+   object with a given value exists, so equality tests can be one pointer
+   comparison.  This is generally restricted to strings that "look like"
+   Python identifiers, although the intern() builtin can be used to force
+   interning of any string.
+   Together, these sped the interpreter by up to 20%. */
+
+typedef struct {
+    PyObject_VAR_HEAD
+    long ob_shash;
+    int ob_sstate;
+    char ob_sval[1];
+
+    /* Invariants:
+     *     ob_sval contains space for 'ob_size+1' elements.
+     *     ob_sval[ob_size] == 0.
+     *     ob_shash is the hash of the string or -1 if not computed yet.
+     *     ob_sstate != 0 iff the string object is in stringobject.c's
+     *       'interned' dictionary; in this case the two references
+     *       from 'interned' to this object are *not counted* in ob_refcnt.
+     */
+} PyStringObject;
+
+#define SSTATE_NOT_INTERNED 0
+#define SSTATE_INTERNED_MORTAL 1
+#define SSTATE_INTERNED_IMMORTAL 2
+
+PyAPI_DATA(PyTypeObject) PyBaseString_Type;
+PyAPI_DATA(PyTypeObject) PyString_Type;
+
+#define PyString_Check(op) PyObject_TypeCheck(op, &PyString_Type)
+#define PyString_CheckExact(op) ((op)->ob_type == &PyString_Type)
+
+PyAPI_FUNC(PyObject *) PyString_FromStringAndSize(const char *, int);
+PyAPI_FUNC(PyObject *) PyString_FromString(const char *);
+PyAPI_FUNC(PyObject *) PyString_FromFormatV(const char*, va_list)
+				Py_GCC_ATTRIBUTE((format(printf, 1, 0)));
+PyAPI_FUNC(PyObject *) PyString_FromFormat(const char*, ...)
+				Py_GCC_ATTRIBUTE((format(printf, 1, 2)));
+PyAPI_FUNC(int) PyString_Size(PyObject *);
+PyAPI_FUNC(char *) PyString_AsString(PyObject *);
+PyAPI_FUNC(PyObject *) PyString_Repr(PyObject *, int);
+PyAPI_FUNC(void) PyString_Concat(PyObject **, PyObject *);
+PyAPI_FUNC(void) PyString_ConcatAndDel(PyObject **, PyObject *);
+PyAPI_FUNC(int) _PyString_Resize(PyObject **, int);
+PyAPI_FUNC(int) _PyString_Eq(PyObject *, PyObject*);
+PyAPI_FUNC(PyObject *) PyString_Format(PyObject *, PyObject *);
+PyAPI_FUNC(PyObject *) _PyString_FormatLong(PyObject*, int, int,
+						  int, char**, int*);
+PyAPI_FUNC(PyObject *) PyString_DecodeEscape(const char *, int, 
+						   const char *, int,
+						   const char *);
+
+PyAPI_FUNC(void) PyString_InternInPlace(PyObject **);
+PyAPI_FUNC(void) PyString_InternImmortal(PyObject **);
+PyAPI_FUNC(PyObject *) PyString_InternFromString(const char *);
+PyAPI_FUNC(void) _Py_ReleaseInternedStrings(void);
+
+/* Use only if you know it's a string */
+#define PyString_CHECK_INTERNED(op) (((PyStringObject *)(op))->ob_sstate)
+
+/* Macro, trading safety for speed */
+#define PyString_AS_STRING(op) (((PyStringObject *)(op))->ob_sval)
+#define PyString_GET_SIZE(op)  (((PyStringObject *)(op))->ob_size)
+
+/* _PyString_Join(sep, x) is like sep.join(x).  sep must be PyStringObject*,
+   x must be an iterable object. */
+PyAPI_FUNC(PyObject *) _PyString_Join(PyObject *sep, PyObject *x);
+
+/* --- Generic Codecs ----------------------------------------------------- */
+
+/* Create an object by decoding the encoded string s of the
+   given size. */
+
+PyAPI_FUNC(PyObject*) PyString_Decode(
+    const char *s,              /* encoded string */
+    int size,                   /* size of buffer */
+    const char *encoding,       /* encoding */
+    const char *errors          /* error handling */
+    );
+
+/* Encodes a char buffer of the given size and returns a 
+   Python object. */
+
+PyAPI_FUNC(PyObject*) PyString_Encode(
+    const char *s,              /* string char buffer */
+    int size,                   /* number of chars to encode */
+    const char *encoding,       /* encoding */
+    const char *errors          /* error handling */
+    );
+
+/* Encodes a string object and returns the result as Python 
+   object. */
+
+PyAPI_FUNC(PyObject*) PyString_AsEncodedObject(
+    PyObject *str,	 	/* string object */
+    const char *encoding,	/* encoding */
+    const char *errors		/* error handling */
+    );
+
+/* Encodes a string object and returns the result as Python string
+   object.   
+   
+   If the codec returns an Unicode object, the object is converted
+   back to a string using the default encoding.
+
+   DEPRECATED - use PyString_AsEncodedObject() instead. */
+
+PyAPI_FUNC(PyObject*) PyString_AsEncodedString(
+    PyObject *str,	 	/* string object */
+    const char *encoding,	/* encoding */
+    const char *errors		/* error handling */
+    );
+
+/* Decodes a string object and returns the result as Python 
+   object. */
+
+PyAPI_FUNC(PyObject*) PyString_AsDecodedObject(
+    PyObject *str,	 	/* string object */
+    const char *encoding,	/* encoding */
+    const char *errors		/* error handling */
+    );
+
+/* Decodes a string object and returns the result as Python string
+   object.  
+   
+   If the codec returns an Unicode object, the object is converted
+   back to a string using the default encoding.
+
+   DEPRECATED - use PyString_AsDecodedObject() instead. */
+
+PyAPI_FUNC(PyObject*) PyString_AsDecodedString(
+    PyObject *str,	 	/* string object */
+    const char *encoding,	/* encoding */
+    const char *errors		/* error handling */
+    );
+
+/* Provides access to the internal data buffer and size of a string
+   object or the default encoded version of an Unicode object. Passing
+   NULL as *len parameter will force the string buffer to be
+   0-terminated (passing a string with embedded NULL characters will
+   cause an exception).  */
+
+PyAPI_FUNC(int) PyString_AsStringAndSize(
+    register PyObject *obj,	/* string or Unicode object */
+    register char **s,		/* pointer to buffer variable */
+    register int *len		/* pointer to length variable or NULL
+				   (only possible for 0-terminated
+				   strings) */
+    );
+    
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_STRINGOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/structmember.h b/depot_tools/release/win/python_24/include/structmember.h
new file mode 100644
index 0000000..015ca19
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/structmember.h
@@ -0,0 +1,89 @@
+#ifndef Py_STRUCTMEMBER_H
+#define Py_STRUCTMEMBER_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Interface to map C struct members to Python object attributes */
+
+#include <stddef.h> /* For offsetof */
+
+/* The offsetof() macro calculates the offset of a structure member
+   in its structure.  Unfortunately this cannot be written down
+   portably, hence it is provided by a Standard C header file.
+   For pre-Standard C compilers, here is a version that usually works
+   (but watch out!): */
+
+#ifndef offsetof
+#define offsetof(type, member) ( (int) & ((type*)0) -> member )
+#endif
+
+/* An array of memberlist structures defines the name, type and offset
+   of selected members of a C structure.  These can be read by
+   PyMember_Get() and set by PyMember_Set() (except if their READONLY flag
+   is set).  The array must be terminated with an entry whose name
+   pointer is NULL. */
+
+struct memberlist {
+	/* Obsolete version, for binary backwards compatibility */
+	char *name;
+	int type;
+	int offset;
+	int flags;
+};
+
+typedef struct PyMemberDef {
+	/* Current version, use this */
+	char *name;
+	int type;
+	int offset;
+	int flags;
+	char *doc;
+} PyMemberDef;
+
+/* Types */
+#define T_SHORT		0
+#define T_INT		1
+#define T_LONG		2
+#define T_FLOAT		3
+#define T_DOUBLE	4
+#define T_STRING	5
+#define T_OBJECT	6
+/* XXX the ordering here is weird for binary compatibility */
+#define T_CHAR		7	/* 1-character string */
+#define T_BYTE		8	/* 8-bit signed int */
+/* unsigned variants: */
+#define T_UBYTE		9
+#define T_USHORT	10
+#define T_UINT		11
+#define T_ULONG		12
+
+/* Added by Jack: strings contained in the structure */
+#define T_STRING_INPLACE	13
+
+#define T_OBJECT_EX	16	/* Like T_OBJECT, but raises AttributeError
+				   when the value is NULL, instead of
+				   converting to None. */
+
+/* Flags */
+#define READONLY	1
+#define RO		READONLY		/* Shorthand */
+#define READ_RESTRICTED	2
+#define WRITE_RESTRICTED 4
+#define RESTRICTED	(READ_RESTRICTED | WRITE_RESTRICTED)
+
+
+/* Obsolete API, for binary backwards compatibility */
+PyAPI_FUNC(PyObject *) PyMember_Get(char *, struct memberlist *, char *);
+PyAPI_FUNC(int) PyMember_Set(char *, struct memberlist *, char *, PyObject *);
+
+/* Current API, use this */
+PyAPI_FUNC(PyObject *) PyMember_GetOne(char *, struct PyMemberDef *);
+PyAPI_FUNC(int) PyMember_SetOne(char *, struct PyMemberDef *, PyObject *);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_STRUCTMEMBER_H */
diff --git a/depot_tools/release/win/python_24/include/structseq.h b/depot_tools/release/win/python_24/include/structseq.h
new file mode 100644
index 0000000..e662916
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/structseq.h
@@ -0,0 +1,41 @@
+
+/* Tuple object interface */
+
+#ifndef Py_STRUCTSEQ_H
+#define Py_STRUCTSEQ_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+typedef struct PyStructSequence_Field {
+	char *name;
+	char *doc;
+} PyStructSequence_Field;
+
+typedef struct PyStructSequence_Desc {
+	char *name;
+	char *doc;
+	struct PyStructSequence_Field *fields;
+	int n_in_sequence;
+} PyStructSequence_Desc;
+
+extern char* PyStructSequence_UnnamedField;
+
+PyAPI_FUNC(void) PyStructSequence_InitType(PyTypeObject *type,
+					   PyStructSequence_Desc *desc);
+
+PyAPI_FUNC(PyObject *) PyStructSequence_New(PyTypeObject* type);
+
+typedef struct {
+	PyObject_VAR_HEAD
+	PyObject *ob_item[1];
+} PyStructSequence;
+
+/* Macro, *only* to be used to fill in brand new objects */
+#define PyStructSequence_SET_ITEM(op, i, v) \
+	(((PyStructSequence *)(op))->ob_item[i] = v)
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_STRUCTSEQ_H */
diff --git a/depot_tools/release/win/python_24/include/symtable.h b/depot_tools/release/win/python_24/include/symtable.h
new file mode 100644
index 0000000..628c3e6
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/symtable.h
@@ -0,0 +1,102 @@
+#ifndef Py_SYMTABLE_H
+#define Py_SYMTABLE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* A symbol table is constructed each time PyNode_Compile() is
+   called.  The table walks the entire parse tree and identifies each
+   use or definition of a variable. 
+
+   The symbol table contains a dictionary for each code block in a
+   module: The symbol dictionary for the block.  They keys of these
+   dictionaries are the name of all variables used or defined in the
+   block; the integer values are used to store several flags,
+   e.g. DEF_PARAM indicates that a variable is a parameter to a
+   function. 
+*/
+
+struct _symtable_entry;
+
+struct symtable {
+	int st_pass;             /* pass == 1 or 2 */
+	const char *st_filename; /* name of file being compiled */
+	struct _symtable_entry *st_cur; /* current symbol table entry */
+	PyObject *st_symbols;    /* dictionary of symbol table entries */
+        PyObject *st_stack;      /* stack of namespace info */
+	PyObject *st_global;     /* borrowed ref to MODULE in st_symbols */
+	int st_nscopes;          /* number of scopes */
+	int st_errors;           /* number of errors */
+	char *st_private;        /* name of current class or NULL */
+	PyFutureFeatures *st_future; /* module's future features */
+};
+
+typedef struct _symtable_entry {
+	PyObject_HEAD
+	PyObject *ste_id;        /* int: key in st_symbols) */
+	PyObject *ste_symbols;   /* dict: name to flags) */
+	PyObject *ste_name;      /* string: name of scope */
+	PyObject *ste_varnames;  /* list of variable names */
+	PyObject *ste_children;  /* list of child ids */
+	int ste_type;            /* module, class, or function */
+	int ste_lineno;          /* first line of scope */
+	int ste_optimized;       /* true if namespace can't be optimized */
+	int ste_nested;          /* true if scope is nested */
+	int ste_child_free;      /* true if a child scope has free variables,
+				    including free refs to globals */
+	int ste_generator;       /* true if namespace is a generator */
+	int ste_opt_lineno;      /* lineno of last exec or import * */
+	int ste_tmpname;         /* temporary name counter */
+	struct symtable *ste_table;
+} PySymtableEntryObject;
+
+PyAPI_DATA(PyTypeObject) PySymtableEntry_Type;
+
+#define PySymtableEntry_Check(op) ((op)->ob_type == &PySymtableEntry_Type)
+
+PyAPI_FUNC(PyObject *) PySymtableEntry_New(struct symtable *,
+						 char *, int, int);
+
+PyAPI_FUNC(struct symtable *) PyNode_CompileSymtable(struct _node *, const char *);
+PyAPI_FUNC(void) PySymtable_Free(struct symtable *);
+
+
+#define TOP "global"
+
+/* Flags for def-use information */
+
+#define DEF_GLOBAL 1           /* global stmt */
+#define DEF_LOCAL 2            /* assignment in code block */
+#define DEF_PARAM 2<<1         /* formal parameter */
+#define USE 2<<2               /* name is used */
+#define DEF_STAR 2<<3          /* parameter is star arg */
+#define DEF_DOUBLESTAR 2<<4    /* parameter is star-star arg */
+#define DEF_INTUPLE 2<<5       /* name defined in tuple in parameters */
+#define DEF_FREE 2<<6          /* name used but not defined in nested scope */
+#define DEF_FREE_GLOBAL 2<<7   /* free variable is actually implicit global */
+#define DEF_FREE_CLASS 2<<8    /* free variable from class's method */
+#define DEF_IMPORT 2<<9        /* assignment occurred via import */
+
+#define DEF_BOUND (DEF_LOCAL | DEF_PARAM | DEF_IMPORT)
+
+#define TYPE_FUNCTION 1
+#define TYPE_CLASS 2
+#define TYPE_MODULE 3
+
+#define LOCAL 1
+#define GLOBAL_EXPLICIT 2
+#define GLOBAL_IMPLICIT 3
+#define FREE 4
+#define CELL 5
+
+#define OPT_IMPORT_STAR 1
+#define OPT_EXEC 2
+#define OPT_BARE_EXEC 4
+
+#define GENERATOR 1
+#define GENERATOR_EXPRESSION 2
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_SYMTABLE_H */
diff --git a/depot_tools/release/win/python_24/include/sysmodule.h b/depot_tools/release/win/python_24/include/sysmodule.h
new file mode 100644
index 0000000..1c9b187
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/sysmodule.h
@@ -0,0 +1,30 @@
+
+/* System module interface */
+
+#ifndef Py_SYSMODULE_H
+#define Py_SYSMODULE_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+PyAPI_FUNC(PyObject *) PySys_GetObject(char *);
+PyAPI_FUNC(int) PySys_SetObject(char *, PyObject *);
+PyAPI_FUNC(FILE *) PySys_GetFile(char *, FILE *);
+PyAPI_FUNC(void) PySys_SetArgv(int, char **);
+PyAPI_FUNC(void) PySys_SetPath(char *);
+
+PyAPI_FUNC(void) PySys_WriteStdout(const char *format, ...)
+			Py_GCC_ATTRIBUTE((format(printf, 1, 2)));
+PyAPI_FUNC(void) PySys_WriteStderr(const char *format, ...)
+			Py_GCC_ATTRIBUTE((format(printf, 1, 2)));
+
+PyAPI_DATA(PyObject *) _PySys_TraceFunc, *_PySys_ProfileFunc;
+PyAPI_DATA(int) _PySys_CheckInterval;
+
+PyAPI_FUNC(void) PySys_ResetWarnOptions(void);
+PyAPI_FUNC(void) PySys_AddWarnOption(char *);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_SYSMODULE_H */
diff --git a/depot_tools/release/win/python_24/include/timefuncs.h b/depot_tools/release/win/python_24/include/timefuncs.h
new file mode 100644
index 0000000..553142d
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/timefuncs.h
@@ -0,0 +1,23 @@
+/*  timefuncs.h
+ */
+
+/* Utility function related to timemodule.c. */
+
+#ifndef TIMEFUNCS_H
+#define TIMEFUNCS_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+/* Cast double x to time_t, but raise ValueError if x is too large
+ * to fit in a time_t.  ValueError is set on return iff the return
+ * value is (time_t)-1 and PyErr_Occurred().
+ */
+PyAPI_FUNC(time_t) _PyTime_DoubleToTimet(double x);
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif  /* TIMEFUNCS_H */
diff --git a/depot_tools/release/win/python_24/include/token.h b/depot_tools/release/win/python_24/include/token.h
new file mode 100644
index 0000000..4250000
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/token.h
@@ -0,0 +1,83 @@
+
+/* Token types */
+
+#ifndef Py_TOKEN_H
+#define Py_TOKEN_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+#define ENDMARKER	0
+#define NAME		1
+#define NUMBER		2
+#define STRING		3
+#define NEWLINE		4
+#define INDENT		5
+#define DEDENT		6
+#define LPAR		7
+#define RPAR		8
+#define LSQB		9
+#define RSQB		10
+#define COLON		11
+#define COMMA		12
+#define SEMI		13
+#define PLUS		14
+#define MINUS		15
+#define STAR		16
+#define SLASH		17
+#define VBAR		18
+#define AMPER		19
+#define LESS		20
+#define GREATER		21
+#define EQUAL		22
+#define DOT		23
+#define PERCENT		24
+#define BACKQUOTE	25
+#define LBRACE		26
+#define RBRACE		27
+#define EQEQUAL		28
+#define NOTEQUAL	29
+#define LESSEQUAL	30
+#define GREATEREQUAL	31
+#define TILDE		32
+#define CIRCUMFLEX	33
+#define LEFTSHIFT	34
+#define RIGHTSHIFT	35
+#define DOUBLESTAR	36
+#define PLUSEQUAL	37
+#define MINEQUAL	38
+#define STAREQUAL	39
+#define SLASHEQUAL	40
+#define PERCENTEQUAL	41
+#define AMPEREQUAL	42
+#define VBAREQUAL	43
+#define CIRCUMFLEXEQUAL	44
+#define LEFTSHIFTEQUAL	45
+#define RIGHTSHIFTEQUAL	46
+#define DOUBLESTAREQUAL	47
+#define DOUBLESLASH	48
+#define DOUBLESLASHEQUAL 49
+#define AT              50	
+/* Don't forget to update the table _PyParser_TokenNames in tokenizer.c! */
+#define OP		51
+#define ERRORTOKEN	52
+#define N_TOKENS	53
+
+/* Special definitions for cooperation with parser */
+
+#define NT_OFFSET		256
+
+#define ISTERMINAL(x)		((x) < NT_OFFSET)
+#define ISNONTERMINAL(x)	((x) >= NT_OFFSET)
+#define ISEOF(x)		((x) == ENDMARKER)
+
+
+PyAPI_DATA(char *) _PyParser_TokenNames[]; /* Token names */
+PyAPI_FUNC(int) PyToken_OneChar(int);
+PyAPI_FUNC(int) PyToken_TwoChars(int, int);
+PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_TOKEN_H */
diff --git a/depot_tools/release/win/python_24/include/traceback.h b/depot_tools/release/win/python_24/include/traceback.h
new file mode 100644
index 0000000..6501600
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/traceback.h
@@ -0,0 +1,30 @@
+
+#ifndef Py_TRACEBACK_H
+#define Py_TRACEBACK_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+struct _frame;
+
+/* Traceback interface */
+
+typedef struct _traceback {
+	PyObject_HEAD
+	struct _traceback *tb_next;
+	struct _frame *tb_frame;
+	int tb_lasti;
+	int tb_lineno;
+} PyTracebackObject;
+
+PyAPI_FUNC(int) PyTraceBack_Here(struct _frame *);
+PyAPI_FUNC(int) PyTraceBack_Print(PyObject *, PyObject *);
+
+/* Reveal traceback type so we can typecheck traceback objects */
+PyAPI_DATA(PyTypeObject) PyTraceBack_Type;
+#define PyTraceBack_Check(v) ((v)->ob_type == &PyTraceBack_Type)
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_TRACEBACK_H */
diff --git a/depot_tools/release/win/python_24/include/tupleobject.h b/depot_tools/release/win/python_24/include/tupleobject.h
new file mode 100644
index 0000000..6b60d62
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/tupleobject.h
@@ -0,0 +1,57 @@
+
+/* Tuple object interface */
+
+#ifndef Py_TUPLEOBJECT_H
+#define Py_TUPLEOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/*
+Another generally useful object type is a tuple of object pointers.
+For Python, this is an immutable type.  C code can change the tuple items
+(but not their number), and even use tuples are general-purpose arrays of
+object references, but in general only brand new tuples should be mutated,
+not ones that might already have been exposed to Python code.
+
+*** WARNING *** PyTuple_SetItem does not increment the new item's reference
+count, but does decrement the reference count of the item it replaces,
+if not nil.  It does *decrement* the reference count if it is *not*
+inserted in the tuple.  Similarly, PyTuple_GetItem does not increment the
+returned item's reference count.
+*/
+
+typedef struct {
+    PyObject_VAR_HEAD
+    PyObject *ob_item[1];
+
+    /* ob_item contains space for 'ob_size' elements.
+     * Items must normally not be NULL, except during construction when
+     * the tuple is not yet visible outside the function that builds it.
+     */
+} PyTupleObject;
+
+PyAPI_DATA(PyTypeObject) PyTuple_Type;
+
+#define PyTuple_Check(op) PyObject_TypeCheck(op, &PyTuple_Type)
+#define PyTuple_CheckExact(op) ((op)->ob_type == &PyTuple_Type)
+
+PyAPI_FUNC(PyObject *) PyTuple_New(int size);
+PyAPI_FUNC(int) PyTuple_Size(PyObject *);
+PyAPI_FUNC(PyObject *) PyTuple_GetItem(PyObject *, int);
+PyAPI_FUNC(int) PyTuple_SetItem(PyObject *, int, PyObject *);
+PyAPI_FUNC(PyObject *) PyTuple_GetSlice(PyObject *, int, int);
+PyAPI_FUNC(int) _PyTuple_Resize(PyObject **, int);
+PyAPI_FUNC(PyObject *) PyTuple_Pack(int, ...);
+
+/* Macro, trading safety for speed */
+#define PyTuple_GET_ITEM(op, i) (((PyTupleObject *)(op))->ob_item[i])
+#define PyTuple_GET_SIZE(op)    (((PyTupleObject *)(op))->ob_size)
+
+/* Macro, *only* to be used to fill in brand new tuples */
+#define PyTuple_SET_ITEM(op, i, v) (((PyTupleObject *)(op))->ob_item[i] = v)
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_TUPLEOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/ucnhash.h b/depot_tools/release/win/python_24/include/ucnhash.h
new file mode 100644
index 0000000..a664336
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/ucnhash.h
@@ -0,0 +1,29 @@
+/* Unicode name database interface */
+
+#ifndef Py_UCNHASH_H
+#define Py_UCNHASH_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* revised ucnhash CAPI interface (exported through a PyCObject) */
+
+typedef struct {
+
+    /* Size of this struct */
+    int size;
+
+    /* Get name for a given character code.  Returns non-zero if
+       success, zero if not.  Does not set Python exceptions. */
+    int (*getname)(Py_UCS4 code, char* buffer, int buflen);
+
+    /* Get character code for a given name.  Same error handling
+       as for getname. */
+    int (*getcode)(const char* name, int namelen, Py_UCS4* code);
+
+} _PyUnicode_Name_CAPI;
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_UCNHASH_H */
diff --git a/depot_tools/release/win/python_24/include/unicodeobject.h b/depot_tools/release/win/python_24/include/unicodeobject.h
new file mode 100644
index 0000000..6738cbd
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/unicodeobject.h
@@ -0,0 +1,1196 @@
+#ifndef Py_UNICODEOBJECT_H
+#define Py_UNICODEOBJECT_H
+
+/*
+
+Unicode implementation based on original code by Fredrik Lundh,
+modified by Marc-Andre Lemburg (mal@lemburg.com) according to the
+Unicode Integration Proposal (see file Misc/unicode.txt).
+
+Copyright (c) Corporation for National Research Initiatives.
+
+
+ Original header:
+ --------------------------------------------------------------------
+
+ * Yet another Unicode string type for Python.  This type supports the
+ * 16-bit Basic Multilingual Plane (BMP) only.
+ *
+ * Written by Fredrik Lundh, January 1999.
+ *
+ * Copyright (c) 1999 by Secret Labs AB.
+ * Copyright (c) 1999 by Fredrik Lundh.
+ *
+ * fredrik@pythonware.com
+ * http://www.pythonware.com
+ *
+ * --------------------------------------------------------------------
+ * This Unicode String Type is
+ * 
+ * Copyright (c) 1999 by Secret Labs AB
+ * Copyright (c) 1999 by Fredrik Lundh
+ * 
+ * By obtaining, using, and/or copying this software and/or its
+ * associated documentation, you agree that you have read, understood,
+ * and will comply with the following terms and conditions:
+ * 
+ * Permission to use, copy, modify, and distribute this software and its
+ * associated documentation for any purpose and without fee is hereby
+ * granted, provided that the above copyright notice appears in all
+ * copies, and that both that copyright notice and this permission notice
+ * appear in supporting documentation, and that the name of Secret Labs
+ * AB or the author not be used in advertising or publicity pertaining to
+ * distribution of the software without specific, written prior
+ * permission.
+ * 
+ * SECRET LABS AB AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO
+ * THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+ * FITNESS.  IN NO EVENT SHALL SECRET LABS AB OR THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
+ * OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ * -------------------------------------------------------------------- */
+
+#include <ctype.h>
+
+/* === Internal API ======================================================= */
+
+/* --- Internal Unicode Format -------------------------------------------- */
+
+#ifndef Py_USING_UNICODE
+
+#define PyUnicode_Check(op)                 0
+#define PyUnicode_CheckExact(op)            0
+
+#else
+
+/* FIXME: MvL's new implementation assumes that Py_UNICODE_SIZE is
+   properly set, but the default rules below doesn't set it.  I'll
+   sort this out some other day -- fredrik@pythonware.com */
+
+#ifndef Py_UNICODE_SIZE
+#error Must define Py_UNICODE_SIZE
+#endif
+
+/* Setting Py_UNICODE_WIDE enables UCS-4 storage.  Otherwise, Unicode
+   strings are stored as UCS-2 (with limited support for UTF-16) */
+
+#if Py_UNICODE_SIZE >= 4
+#define Py_UNICODE_WIDE
+#endif
+
+/* Set these flags if the platform has "wchar.h", "wctype.h" and the
+   wchar_t type is a 16-bit unsigned type */
+/* #define HAVE_WCHAR_H */
+/* #define HAVE_USABLE_WCHAR_T */
+
+/* Defaults for various platforms */
+#ifndef PY_UNICODE_TYPE
+
+/* Windows has a usable wchar_t type (unless we're using UCS-4) */
+# if defined(MS_WIN32) && Py_UNICODE_SIZE == 2
+#  define HAVE_USABLE_WCHAR_T
+#  define PY_UNICODE_TYPE wchar_t
+# endif
+
+# if defined(Py_UNICODE_WIDE)
+#  define PY_UNICODE_TYPE Py_UCS4
+# endif
+
+#endif
+
+/* If the compiler provides a wchar_t type we try to support it
+   through the interface functions PyUnicode_FromWideChar() and
+   PyUnicode_AsWideChar(). */
+
+#ifdef HAVE_USABLE_WCHAR_T
+# ifndef HAVE_WCHAR_H
+#  define HAVE_WCHAR_H
+# endif
+#endif
+
+#ifdef HAVE_WCHAR_H
+/* Work around a cosmetic bug in BSDI 4.x wchar.h; thanks to Thomas Wouters */
+# ifdef _HAVE_BSDI
+#  include <time.h>
+# endif
+#  include <wchar.h>
+#endif
+
+/*
+ * Use this typedef when you need to represent a UTF-16 surrogate pair
+ * as single unsigned integer.
+ */
+#if SIZEOF_INT >= 4 
+typedef unsigned int Py_UCS4; 
+#elif SIZEOF_LONG >= 4
+typedef unsigned long Py_UCS4; 
+#endif
+
+typedef PY_UNICODE_TYPE Py_UNICODE;
+
+/* --- UCS-2/UCS-4 Name Mangling ------------------------------------------ */
+
+/* Unicode API names are mangled to assure that UCS-2 and UCS-4 builds
+   produce different external names and thus cause import errors in
+   case Python interpreters and extensions with mixed compiled in
+   Unicode width assumptions are combined. */
+
+#ifndef Py_UNICODE_WIDE
+
+# define PyUnicode_AsASCIIString PyUnicodeUCS2_AsASCIIString
+# define PyUnicode_AsCharmapString PyUnicodeUCS2_AsCharmapString
+# define PyUnicode_AsEncodedObject PyUnicodeUCS2_AsEncodedObject
+# define PyUnicode_AsEncodedString PyUnicodeUCS2_AsEncodedString
+# define PyUnicode_AsLatin1String PyUnicodeUCS2_AsLatin1String
+# define PyUnicode_AsRawUnicodeEscapeString PyUnicodeUCS2_AsRawUnicodeEscapeString
+# define PyUnicode_AsUTF16String PyUnicodeUCS2_AsUTF16String
+# define PyUnicode_AsUTF8String PyUnicodeUCS2_AsUTF8String
+# define PyUnicode_AsUnicode PyUnicodeUCS2_AsUnicode
+# define PyUnicode_AsUnicodeEscapeString PyUnicodeUCS2_AsUnicodeEscapeString
+# define PyUnicode_AsWideChar PyUnicodeUCS2_AsWideChar
+# define PyUnicode_Compare PyUnicodeUCS2_Compare
+# define PyUnicode_Concat PyUnicodeUCS2_Concat
+# define PyUnicode_Contains PyUnicodeUCS2_Contains
+# define PyUnicode_Count PyUnicodeUCS2_Count
+# define PyUnicode_Decode PyUnicodeUCS2_Decode
+# define PyUnicode_DecodeASCII PyUnicodeUCS2_DecodeASCII
+# define PyUnicode_DecodeCharmap PyUnicodeUCS2_DecodeCharmap
+# define PyUnicode_DecodeLatin1 PyUnicodeUCS2_DecodeLatin1
+# define PyUnicode_DecodeRawUnicodeEscape PyUnicodeUCS2_DecodeRawUnicodeEscape
+# define PyUnicode_DecodeUTF16 PyUnicodeUCS2_DecodeUTF16
+# define PyUnicode_DecodeUTF16Stateful PyUnicodeUCS2_DecodeUTF16Stateful
+# define PyUnicode_DecodeUTF8 PyUnicodeUCS2_DecodeUTF8
+# define PyUnicode_DecodeUTF8Stateful PyUnicodeUCS2_DecodeUTF8Stateful
+# define PyUnicode_DecodeUnicodeEscape PyUnicodeUCS2_DecodeUnicodeEscape
+# define PyUnicode_Encode PyUnicodeUCS2_Encode
+# define PyUnicode_EncodeASCII PyUnicodeUCS2_EncodeASCII
+# define PyUnicode_EncodeCharmap PyUnicodeUCS2_EncodeCharmap
+# define PyUnicode_EncodeDecimal PyUnicodeUCS2_EncodeDecimal
+# define PyUnicode_EncodeLatin1 PyUnicodeUCS2_EncodeLatin1
+# define PyUnicode_EncodeRawUnicodeEscape PyUnicodeUCS2_EncodeRawUnicodeEscape
+# define PyUnicode_EncodeUTF16 PyUnicodeUCS2_EncodeUTF16
+# define PyUnicode_EncodeUTF8 PyUnicodeUCS2_EncodeUTF8
+# define PyUnicode_EncodeUnicodeEscape PyUnicodeUCS2_EncodeUnicodeEscape
+# define PyUnicode_Find PyUnicodeUCS2_Find
+# define PyUnicode_Format PyUnicodeUCS2_Format
+# define PyUnicode_FromEncodedObject PyUnicodeUCS2_FromEncodedObject
+# define PyUnicode_FromObject PyUnicodeUCS2_FromObject
+# define PyUnicode_FromOrdinal PyUnicodeUCS2_FromOrdinal
+# define PyUnicode_FromUnicode PyUnicodeUCS2_FromUnicode
+# define PyUnicode_FromWideChar PyUnicodeUCS2_FromWideChar
+# define PyUnicode_GetDefaultEncoding PyUnicodeUCS2_GetDefaultEncoding
+# define PyUnicode_GetMax PyUnicodeUCS2_GetMax
+# define PyUnicode_GetSize PyUnicodeUCS2_GetSize
+# define PyUnicode_Join PyUnicodeUCS2_Join
+# define PyUnicode_Replace PyUnicodeUCS2_Replace
+# define PyUnicode_Resize PyUnicodeUCS2_Resize
+# define PyUnicode_SetDefaultEncoding PyUnicodeUCS2_SetDefaultEncoding
+# define PyUnicode_Split PyUnicodeUCS2_Split
+# define PyUnicode_RSplit PyUnicodeUCS2_RSplit
+# define PyUnicode_Splitlines PyUnicodeUCS2_Splitlines
+# define PyUnicode_Tailmatch PyUnicodeUCS2_Tailmatch
+# define PyUnicode_Translate PyUnicodeUCS2_Translate
+# define PyUnicode_TranslateCharmap PyUnicodeUCS2_TranslateCharmap
+# define _PyUnicode_AsDefaultEncodedString _PyUnicodeUCS2_AsDefaultEncodedString
+# define _PyUnicode_Fini _PyUnicodeUCS2_Fini
+# define _PyUnicode_Init _PyUnicodeUCS2_Init
+# define _PyUnicode_IsAlpha _PyUnicodeUCS2_IsAlpha
+# define _PyUnicode_IsDecimalDigit _PyUnicodeUCS2_IsDecimalDigit
+# define _PyUnicode_IsDigit _PyUnicodeUCS2_IsDigit
+# define _PyUnicode_IsLinebreak _PyUnicodeUCS2_IsLinebreak
+# define _PyUnicode_IsLowercase _PyUnicodeUCS2_IsLowercase
+# define _PyUnicode_IsNumeric _PyUnicodeUCS2_IsNumeric
+# define _PyUnicode_IsTitlecase _PyUnicodeUCS2_IsTitlecase
+# define _PyUnicode_IsUppercase _PyUnicodeUCS2_IsUppercase
+# define _PyUnicode_IsWhitespace _PyUnicodeUCS2_IsWhitespace
+# define _PyUnicode_ToDecimalDigit _PyUnicodeUCS2_ToDecimalDigit
+# define _PyUnicode_ToDigit _PyUnicodeUCS2_ToDigit
+# define _PyUnicode_ToLowercase _PyUnicodeUCS2_ToLowercase
+# define _PyUnicode_ToNumeric _PyUnicodeUCS2_ToNumeric
+# define _PyUnicode_ToTitlecase _PyUnicodeUCS2_ToTitlecase
+# define _PyUnicode_ToUppercase _PyUnicodeUCS2_ToUppercase
+
+#else
+
+# define PyUnicode_AsASCIIString PyUnicodeUCS4_AsASCIIString
+# define PyUnicode_AsCharmapString PyUnicodeUCS4_AsCharmapString
+# define PyUnicode_AsEncodedObject PyUnicodeUCS4_AsEncodedObject
+# define PyUnicode_AsEncodedString PyUnicodeUCS4_AsEncodedString
+# define PyUnicode_AsLatin1String PyUnicodeUCS4_AsLatin1String
+# define PyUnicode_AsRawUnicodeEscapeString PyUnicodeUCS4_AsRawUnicodeEscapeString
+# define PyUnicode_AsUTF16String PyUnicodeUCS4_AsUTF16String
+# define PyUnicode_AsUTF8String PyUnicodeUCS4_AsUTF8String
+# define PyUnicode_AsUnicode PyUnicodeUCS4_AsUnicode
+# define PyUnicode_AsUnicodeEscapeString PyUnicodeUCS4_AsUnicodeEscapeString
+# define PyUnicode_AsWideChar PyUnicodeUCS4_AsWideChar
+# define PyUnicode_Compare PyUnicodeUCS4_Compare
+# define PyUnicode_Concat PyUnicodeUCS4_Concat
+# define PyUnicode_Contains PyUnicodeUCS4_Contains
+# define PyUnicode_Count PyUnicodeUCS4_Count
+# define PyUnicode_Decode PyUnicodeUCS4_Decode
+# define PyUnicode_DecodeASCII PyUnicodeUCS4_DecodeASCII
+# define PyUnicode_DecodeCharmap PyUnicodeUCS4_DecodeCharmap
+# define PyUnicode_DecodeLatin1 PyUnicodeUCS4_DecodeLatin1
+# define PyUnicode_DecodeRawUnicodeEscape PyUnicodeUCS4_DecodeRawUnicodeEscape
+# define PyUnicode_DecodeUTF16 PyUnicodeUCS4_DecodeUTF16
+# define PyUnicode_DecodeUTF16Stateful PyUnicodeUCS4_DecodeUTF16Stateful
+# define PyUnicode_DecodeUTF8 PyUnicodeUCS4_DecodeUTF8
+# define PyUnicode_DecodeUTF8Stateful PyUnicodeUCS4_DecodeUTF8Stateful
+# define PyUnicode_DecodeUnicodeEscape PyUnicodeUCS4_DecodeUnicodeEscape
+# define PyUnicode_Encode PyUnicodeUCS4_Encode
+# define PyUnicode_EncodeASCII PyUnicodeUCS4_EncodeASCII
+# define PyUnicode_EncodeCharmap PyUnicodeUCS4_EncodeCharmap
+# define PyUnicode_EncodeDecimal PyUnicodeUCS4_EncodeDecimal
+# define PyUnicode_EncodeLatin1 PyUnicodeUCS4_EncodeLatin1
+# define PyUnicode_EncodeRawUnicodeEscape PyUnicodeUCS4_EncodeRawUnicodeEscape
+# define PyUnicode_EncodeUTF16 PyUnicodeUCS4_EncodeUTF16
+# define PyUnicode_EncodeUTF8 PyUnicodeUCS4_EncodeUTF8
+# define PyUnicode_EncodeUnicodeEscape PyUnicodeUCS4_EncodeUnicodeEscape
+# define PyUnicode_Find PyUnicodeUCS4_Find
+# define PyUnicode_Format PyUnicodeUCS4_Format
+# define PyUnicode_FromEncodedObject PyUnicodeUCS4_FromEncodedObject
+# define PyUnicode_FromObject PyUnicodeUCS4_FromObject
+# define PyUnicode_FromOrdinal PyUnicodeUCS4_FromOrdinal
+# define PyUnicode_FromUnicode PyUnicodeUCS4_FromUnicode
+# define PyUnicode_FromWideChar PyUnicodeUCS4_FromWideChar
+# define PyUnicode_GetDefaultEncoding PyUnicodeUCS4_GetDefaultEncoding
+# define PyUnicode_GetMax PyUnicodeUCS4_GetMax
+# define PyUnicode_GetSize PyUnicodeUCS4_GetSize
+# define PyUnicode_Join PyUnicodeUCS4_Join
+# define PyUnicode_Replace PyUnicodeUCS4_Replace
+# define PyUnicode_Resize PyUnicodeUCS4_Resize
+# define PyUnicode_SetDefaultEncoding PyUnicodeUCS4_SetDefaultEncoding
+# define PyUnicode_Split PyUnicodeUCS4_Split
+# define PyUnicode_Splitlines PyUnicodeUCS4_Splitlines
+# define PyUnicode_Tailmatch PyUnicodeUCS4_Tailmatch
+# define PyUnicode_Translate PyUnicodeUCS4_Translate
+# define PyUnicode_TranslateCharmap PyUnicodeUCS4_TranslateCharmap
+# define _PyUnicode_AsDefaultEncodedString _PyUnicodeUCS4_AsDefaultEncodedString
+# define _PyUnicode_Fini _PyUnicodeUCS4_Fini
+# define _PyUnicode_Init _PyUnicodeUCS4_Init
+# define _PyUnicode_IsAlpha _PyUnicodeUCS4_IsAlpha
+# define _PyUnicode_IsDecimalDigit _PyUnicodeUCS4_IsDecimalDigit
+# define _PyUnicode_IsDigit _PyUnicodeUCS4_IsDigit
+# define _PyUnicode_IsLinebreak _PyUnicodeUCS4_IsLinebreak
+# define _PyUnicode_IsLowercase _PyUnicodeUCS4_IsLowercase
+# define _PyUnicode_IsNumeric _PyUnicodeUCS4_IsNumeric
+# define _PyUnicode_IsTitlecase _PyUnicodeUCS4_IsTitlecase
+# define _PyUnicode_IsUppercase _PyUnicodeUCS4_IsUppercase
+# define _PyUnicode_IsWhitespace _PyUnicodeUCS4_IsWhitespace
+# define _PyUnicode_ToDecimalDigit _PyUnicodeUCS4_ToDecimalDigit
+# define _PyUnicode_ToDigit _PyUnicodeUCS4_ToDigit
+# define _PyUnicode_ToLowercase _PyUnicodeUCS4_ToLowercase
+# define _PyUnicode_ToNumeric _PyUnicodeUCS4_ToNumeric
+# define _PyUnicode_ToTitlecase _PyUnicodeUCS4_ToTitlecase
+# define _PyUnicode_ToUppercase _PyUnicodeUCS4_ToUppercase
+
+
+#endif
+
+/* --- Internal Unicode Operations ---------------------------------------- */
+
+/* If you want Python to use the compiler's wctype.h functions instead
+   of the ones supplied with Python, define WANT_WCTYPE_FUNCTIONS or
+   configure Python using --with-wctype-functions.  This reduces the
+   interpreter's code size. */
+
+#if defined(HAVE_USABLE_WCHAR_T) && defined(WANT_WCTYPE_FUNCTIONS)
+
+#include <wctype.h>
+
+#define Py_UNICODE_ISSPACE(ch) iswspace(ch)
+
+#define Py_UNICODE_ISLOWER(ch) iswlower(ch)
+#define Py_UNICODE_ISUPPER(ch) iswupper(ch)
+#define Py_UNICODE_ISTITLE(ch) _PyUnicode_IsTitlecase(ch)
+#define Py_UNICODE_ISLINEBREAK(ch) _PyUnicode_IsLinebreak(ch)
+
+#define Py_UNICODE_TOLOWER(ch) towlower(ch)
+#define Py_UNICODE_TOUPPER(ch) towupper(ch)
+#define Py_UNICODE_TOTITLE(ch) _PyUnicode_ToTitlecase(ch)
+
+#define Py_UNICODE_ISDECIMAL(ch) _PyUnicode_IsDecimalDigit(ch)
+#define Py_UNICODE_ISDIGIT(ch) _PyUnicode_IsDigit(ch)
+#define Py_UNICODE_ISNUMERIC(ch) _PyUnicode_IsNumeric(ch)
+
+#define Py_UNICODE_TODECIMAL(ch) _PyUnicode_ToDecimalDigit(ch)
+#define Py_UNICODE_TODIGIT(ch) _PyUnicode_ToDigit(ch)
+#define Py_UNICODE_TONUMERIC(ch) _PyUnicode_ToNumeric(ch)
+
+#define Py_UNICODE_ISALPHA(ch) iswalpha(ch)
+
+#else
+
+#define Py_UNICODE_ISSPACE(ch) _PyUnicode_IsWhitespace(ch)
+
+#define Py_UNICODE_ISLOWER(ch) _PyUnicode_IsLowercase(ch)
+#define Py_UNICODE_ISUPPER(ch) _PyUnicode_IsUppercase(ch)
+#define Py_UNICODE_ISTITLE(ch) _PyUnicode_IsTitlecase(ch)
+#define Py_UNICODE_ISLINEBREAK(ch) _PyUnicode_IsLinebreak(ch)
+
+#define Py_UNICODE_TOLOWER(ch) _PyUnicode_ToLowercase(ch)
+#define Py_UNICODE_TOUPPER(ch) _PyUnicode_ToUppercase(ch)
+#define Py_UNICODE_TOTITLE(ch) _PyUnicode_ToTitlecase(ch)
+
+#define Py_UNICODE_ISDECIMAL(ch) _PyUnicode_IsDecimalDigit(ch)
+#define Py_UNICODE_ISDIGIT(ch) _PyUnicode_IsDigit(ch)
+#define Py_UNICODE_ISNUMERIC(ch) _PyUnicode_IsNumeric(ch)
+
+#define Py_UNICODE_TODECIMAL(ch) _PyUnicode_ToDecimalDigit(ch)
+#define Py_UNICODE_TODIGIT(ch) _PyUnicode_ToDigit(ch)
+#define Py_UNICODE_TONUMERIC(ch) _PyUnicode_ToNumeric(ch)
+
+#define Py_UNICODE_ISALPHA(ch) _PyUnicode_IsAlpha(ch)
+
+#endif
+
+#define Py_UNICODE_ISALNUM(ch) \
+       (Py_UNICODE_ISALPHA(ch) || \
+        Py_UNICODE_ISDECIMAL(ch) || \
+        Py_UNICODE_ISDIGIT(ch) || \
+        Py_UNICODE_ISNUMERIC(ch))
+
+#define Py_UNICODE_COPY(target, source, length)\
+    (memcpy((target), (source), (length)*sizeof(Py_UNICODE)))
+
+#define Py_UNICODE_FILL(target, value, length) do\
+    {int i; for (i = 0; i < (length); i++) (target)[i] = (value);}\
+    while (0)
+
+#define Py_UNICODE_MATCH(string, offset, substring)\
+    ((*((string)->str + (offset)) == *((substring)->str)) &&\
+     !memcmp((string)->str + (offset), (substring)->str,\
+             (substring)->length*sizeof(Py_UNICODE)))
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/* --- Unicode Type ------------------------------------------------------- */
+
+typedef struct {
+    PyObject_HEAD
+    int length;			/* Length of raw Unicode data in buffer */
+    Py_UNICODE *str;		/* Raw Unicode buffer */
+    long hash;			/* Hash value; -1 if not set */
+    PyObject *defenc;		/* (Default) Encoded version as Python
+				   string, or NULL; this is used for
+				   implementing the buffer protocol */
+} PyUnicodeObject;
+
+PyAPI_DATA(PyTypeObject) PyUnicode_Type;
+
+#define PyUnicode_Check(op) PyObject_TypeCheck(op, &PyUnicode_Type)
+#define PyUnicode_CheckExact(op) ((op)->ob_type == &PyUnicode_Type)
+
+/* Fast access macros */
+#define PyUnicode_GET_SIZE(op) \
+        (((PyUnicodeObject *)(op))->length)
+#define PyUnicode_GET_DATA_SIZE(op) \
+        (((PyUnicodeObject *)(op))->length * sizeof(Py_UNICODE))
+#define PyUnicode_AS_UNICODE(op) \
+        (((PyUnicodeObject *)(op))->str)
+#define PyUnicode_AS_DATA(op) \
+        ((const char *)((PyUnicodeObject *)(op))->str)
+
+/* --- Constants ---------------------------------------------------------- */
+
+/* This Unicode character will be used as replacement character during
+   decoding if the errors argument is set to "replace". Note: the
+   Unicode character U+FFFD is the official REPLACEMENT CHARACTER in
+   Unicode 3.0. */
+
+#define Py_UNICODE_REPLACEMENT_CHARACTER ((Py_UNICODE) 0xFFFD)
+
+/* === Public API ========================================================= */
+
+/* --- Plain Py_UNICODE --------------------------------------------------- */
+
+/* Create a Unicode Object from the Py_UNICODE buffer u of the given
+   size. 
+
+   u may be NULL which causes the contents to be undefined. It is the
+   user's responsibility to fill in the needed data afterwards. Note
+   that modifying the Unicode object contents after construction is
+   only allowed if u was set to NULL.
+
+   The buffer is copied into the new object. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode(
+    const Py_UNICODE *u,        /* Unicode buffer */
+    int size                    /* size of buffer */
+    );
+
+/* Return a read-only pointer to the Unicode object's internal
+   Py_UNICODE buffer. */
+
+PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode(
+    PyObject *unicode	 	/* Unicode object */
+    );
+
+/* Get the length of the Unicode object. */
+
+PyAPI_FUNC(int) PyUnicode_GetSize(
+    PyObject *unicode	 	/* Unicode object */
+    );
+
+/* Get the maximum ordinal for a Unicode character. */
+PyAPI_FUNC(Py_UNICODE) PyUnicode_GetMax(void);
+
+/* Resize an already allocated Unicode object to the new size length.
+
+   *unicode is modified to point to the new (resized) object and 0
+   returned on success.
+
+   This API may only be called by the function which also called the
+   Unicode constructor. The refcount on the object must be 1. Otherwise,
+   an error is returned.
+
+   Error handling is implemented as follows: an exception is set, -1
+   is returned and *unicode left untouched.
+
+*/
+
+PyAPI_FUNC(int) PyUnicode_Resize(
+    PyObject **unicode,		/* Pointer to the Unicode object */
+    int length			/* New length */
+    );
+
+/* Coerce obj to an Unicode object and return a reference with
+   *incremented* refcount.
+
+   Coercion is done in the following way:
+
+   1. String and other char buffer compatible objects are decoded
+      under the assumptions that they contain data using the current
+      default encoding. Decoding is done in "strict" mode.
+
+   2. All other objects (including Unicode objects) raise an
+      exception.
+
+   The API returns NULL in case of an error. The caller is responsible
+   for decref'ing the returned objects.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_FromEncodedObject(
+    register PyObject *obj, 	/* Object */
+    const char *encoding,       /* encoding */
+    const char *errors          /* error handling */
+    );
+
+/* Coerce obj to an Unicode object and return a reference with
+   *incremented* refcount.
+   
+   Unicode objects are passed back as-is (subclasses are converted to
+   true Unicode objects), all other objects are delegated to
+   PyUnicode_FromEncodedObject(obj, NULL, "strict") which results in
+   using the default encoding as basis for decoding the object.
+
+   The API returns NULL in case of an error. The caller is responsible
+   for decref'ing the returned objects.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_FromObject(
+    register PyObject *obj 	/* Object */
+    );
+
+/* --- wchar_t support for platforms which support it --------------------- */
+
+#ifdef HAVE_WCHAR_H
+
+/* Create a Unicode Object from the whcar_t buffer w of the given
+   size.
+
+   The buffer is copied into the new object. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_FromWideChar(
+    register const wchar_t *w,  /* wchar_t buffer */
+    int size                    /* size of buffer */
+    );
+
+/* Copies the Unicode Object contents into the wchar_t buffer w.  At
+   most size wchar_t characters are copied.
+
+   Note that the resulting wchar_t string may or may not be
+   0-terminated.  It is the responsibility of the caller to make sure
+   that the wchar_t string is 0-terminated in case this is required by
+   the application.
+
+   Returns the number of wchar_t characters copied (excluding a
+   possibly trailing 0-termination character) or -1 in case of an
+   error. */
+
+PyAPI_FUNC(int) PyUnicode_AsWideChar(
+    PyUnicodeObject *unicode,   /* Unicode object */
+    register wchar_t *w,        /* wchar_t buffer */
+    int size                    /* size of buffer */
+    );
+
+#endif
+
+/* --- Unicode ordinals --------------------------------------------------- */
+
+/* Create a Unicode Object from the given Unicode code point ordinal. 
+ 
+   The ordinal must be in range(0x10000) on narrow Python builds
+   (UCS2), and range(0x110000) on wide builds (UCS4). A ValueError is
+   raised in case it is not.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_FromOrdinal(int ordinal);
+
+/* === Builtin Codecs ===================================================== 
+
+   Many of these APIs take two arguments encoding and errors. These
+   parameters encoding and errors have the same semantics as the ones
+   of the builtin unicode() API. 
+
+   Setting encoding to NULL causes the default encoding to be used.
+
+   Error handling is set by errors which may also be set to NULL
+   meaning to use the default handling defined for the codec. Default
+   error handling for all builtin codecs is "strict" (ValueErrors are
+   raised).
+
+   The codecs all use a similar interface. Only deviation from the
+   generic ones are documented.
+
+*/
+
+/* --- Manage the default encoding ---------------------------------------- */
+
+/* Return a Python string holding the default encoded value of the
+   Unicode object. 
+
+   The resulting string is cached in the Unicode object for subsequent
+   usage by this function. The cached version is needed to implement
+   the character buffer interface and will live (at least) as long as
+   the Unicode object itself.
+
+   The refcount of the string is *not* incremented.
+
+   *** Exported for internal use by the interpreter only !!! ***
+
+*/
+
+PyAPI_FUNC(PyObject *) _PyUnicode_AsDefaultEncodedString(
+    PyObject *, const char *);
+
+/* Returns the currently active default encoding.
+
+   The default encoding is currently implemented as run-time settable
+   process global.  This may change in future versions of the
+   interpreter to become a parameter which is managed on a per-thread
+   basis.
+   
+ */
+
+PyAPI_FUNC(const char*) PyUnicode_GetDefaultEncoding(void);
+
+/* Sets the currently active default encoding.
+
+   Returns 0 on success, -1 in case of an error.
+   
+ */
+
+PyAPI_FUNC(int) PyUnicode_SetDefaultEncoding(
+    const char *encoding	/* Encoding name in standard form */
+    );
+
+/* --- Generic Codecs ----------------------------------------------------- */
+
+/* Create a Unicode object by decoding the encoded string s of the
+   given size. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_Decode(
+    const char *s,              /* encoded string */
+    int size,                   /* size of buffer */
+    const char *encoding,       /* encoding */
+    const char *errors          /* error handling */
+    );
+
+/* Encodes a Py_UNICODE buffer of the given size and returns a 
+   Python string object. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_Encode(
+    const Py_UNICODE *s,        /* Unicode char buffer */
+    int size,                   /* number of Py_UNICODE chars to encode */
+    const char *encoding,       /* encoding */
+    const char *errors          /* error handling */
+    );
+
+/* Encodes a Unicode object and returns the result as Python
+   object. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsEncodedObject(
+    PyObject *unicode,	 	/* Unicode object */
+    const char *encoding,	/* encoding */
+    const char *errors		/* error handling */
+    );
+
+/* Encodes a Unicode object and returns the result as Python string
+   object. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsEncodedString(
+    PyObject *unicode,	 	/* Unicode object */
+    const char *encoding,	/* encoding */
+    const char *errors		/* error handling */
+    );
+
+/* --- UTF-7 Codecs ------------------------------------------------------- */
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeUTF7(
+    const char *string, 	/* UTF-7 encoded string */
+    int length,	 		/* size of string */
+    const char *errors		/* error handling */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeUTF7(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length,	 		/* number of Py_UNICODE chars to encode */
+    int encodeSetO,             /* force the encoder to encode characters in
+                                   Set O, as described in RFC2152 */
+    int encodeWhiteSpace,       /* force the encoder to encode space, tab,
+                                   carriage return and linefeed characters */
+    const char *errors		/* error handling */
+    );
+
+/* --- UTF-8 Codecs ------------------------------------------------------- */
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeUTF8(
+    const char *string, 	/* UTF-8 encoded string */
+    int length,	 		/* size of string */
+    const char *errors		/* error handling */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeUTF8Stateful(
+    const char *string, 	/* UTF-8 encoded string */
+    int length,	 		/* size of string */
+    const char *errors,		/* error handling */
+    int *consumed		/* bytes consumed */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsUTF8String(
+    PyObject *unicode	 	/* Unicode object */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeUTF8(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length,	 		/* number of Py_UNICODE chars to encode */
+    const char *errors		/* error handling */
+    );
+
+/* --- UTF-16 Codecs ------------------------------------------------------ */
+
+/* Decodes length bytes from a UTF-16 encoded buffer string and returns
+   the corresponding Unicode object.
+
+   errors (if non-NULL) defines the error handling. It defaults
+   to "strict". 
+
+   If byteorder is non-NULL, the decoder starts decoding using the
+   given byte order:
+
+	*byteorder == -1: little endian
+	*byteorder == 0:  native order
+	*byteorder == 1:  big endian
+
+   In native mode, the first two bytes of the stream are checked for a
+   BOM mark. If found, the BOM mark is analysed, the byte order
+   adjusted and the BOM skipped.  In the other modes, no BOM mark
+   interpretation is done. After completion, *byteorder is set to the
+   current byte order at the end of input data.
+
+   If byteorder is NULL, the codec starts in native order mode.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeUTF16(
+    const char *string, 	/* UTF-16 encoded string */
+    int length,	 		/* size of string */
+    const char *errors,		/* error handling */
+    int *byteorder		/* pointer to byteorder to use
+				   0=native;-1=LE,1=BE; updated on
+				   exit */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeUTF16Stateful(
+    const char *string, 	/* UTF-16 encoded string */
+    int length,	 		/* size of string */
+    const char *errors,		/* error handling */
+    int *byteorder,		/* pointer to byteorder to use
+				   0=native;-1=LE,1=BE; updated on
+				   exit */
+    int *consumed		/* bytes consumed */
+    );
+
+/* Returns a Python string using the UTF-16 encoding in native byte
+   order. The string always starts with a BOM mark.  */
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsUTF16String(
+    PyObject *unicode	 	/* Unicode object */
+    );
+
+/* Returns a Python string object holding the UTF-16 encoded value of
+   the Unicode data.
+
+   If byteorder is not 0, output is written according to the following
+   byte order:
+
+   byteorder == -1: little endian
+   byteorder == 0:  native byte order (writes a BOM mark)
+   byteorder == 1:  big endian
+
+   If byteorder is 0, the output string will always start with the
+   Unicode BOM mark (U+FEFF). In the other two modes, no BOM mark is
+   prepended.
+
+   Note that Py_UNICODE data is being interpreted as UTF-16 reduced to
+   UCS-2. This trick makes it possible to add full UTF-16 capabilities
+   at a later point without compromising the APIs.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeUTF16(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length,	 		/* number of Py_UNICODE chars to encode */
+    const char *errors,		/* error handling */
+    int byteorder		/* byteorder to use 0=BOM+native;-1=LE,1=BE */
+    );
+
+/* --- Unicode-Escape Codecs ---------------------------------------------- */
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeUnicodeEscape(
+    const char *string, 	/* Unicode-Escape encoded string */
+    int length,	 		/* size of string */
+    const char *errors		/* error handling */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsUnicodeEscapeString(
+    PyObject *unicode	 	/* Unicode object */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeUnicodeEscape(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length	 		/* Number of Py_UNICODE chars to encode */
+    );
+
+/* --- Raw-Unicode-Escape Codecs ------------------------------------------ */
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeRawUnicodeEscape(
+    const char *string, 	/* Raw-Unicode-Escape encoded string */
+    int length,	 		/* size of string */
+    const char *errors		/* error handling */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsRawUnicodeEscapeString(
+    PyObject *unicode	 	/* Unicode object */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeRawUnicodeEscape(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length	 		/* Number of Py_UNICODE chars to encode */
+    );
+
+/* --- Latin-1 Codecs ----------------------------------------------------- 
+
+   Note: Latin-1 corresponds to the first 256 Unicode ordinals.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeLatin1(
+    const char *string, 	/* Latin-1 encoded string */
+    int length,	 		/* size of string */
+    const char *errors		/* error handling */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsLatin1String(
+    PyObject *unicode	 	/* Unicode object */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeLatin1(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length,	 		/* Number of Py_UNICODE chars to encode */
+    const char *errors		/* error handling */
+    );
+
+/* --- ASCII Codecs ------------------------------------------------------- 
+
+   Only 7-bit ASCII data is excepted. All other codes generate errors.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeASCII(
+    const char *string, 	/* ASCII encoded string */
+    int length,	 		/* size of string */
+    const char *errors		/* error handling */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsASCIIString(
+    PyObject *unicode	 	/* Unicode object */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeASCII(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length,	 		/* Number of Py_UNICODE chars to encode */
+    const char *errors		/* error handling */
+    );
+
+/* --- Character Map Codecs ----------------------------------------------- 
+
+   This codec uses mappings to encode and decode characters. 
+
+   Decoding mappings must map single string characters to single
+   Unicode characters, integers (which are then interpreted as Unicode
+   ordinals) or None (meaning "undefined mapping" and causing an
+   error).
+
+   Encoding mappings must map single Unicode characters to single
+   string characters, integers (which are then interpreted as Latin-1
+   ordinals) or None (meaning "undefined mapping" and causing an
+   error).
+
+   If a character lookup fails with a LookupError, the character is
+   copied as-is meaning that its ordinal value will be interpreted as
+   Unicode or Latin-1 ordinal resp. Because of this mappings only need
+   to contain those mappings which map characters to different code
+   points.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeCharmap(
+    const char *string, 	/* Encoded string */
+    int length,	 		/* size of string */
+    PyObject *mapping,		/* character mapping 
+				   (char ordinal -> unicode ordinal) */
+    const char *errors		/* error handling */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsCharmapString(
+    PyObject *unicode,	 	/* Unicode object */
+    PyObject *mapping		/* character mapping 
+				   (unicode ordinal -> char ordinal) */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeCharmap(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length,	 		/* Number of Py_UNICODE chars to encode */
+    PyObject *mapping,		/* character mapping 
+				   (unicode ordinal -> char ordinal) */
+    const char *errors		/* error handling */
+    );
+
+/* Translate a Py_UNICODE buffer of the given length by applying a
+   character mapping table to it and return the resulting Unicode
+   object.
+
+   The mapping table must map Unicode ordinal integers to Unicode
+   ordinal integers or None (causing deletion of the character). 
+
+   Mapping tables may be dictionaries or sequences. Unmapped character
+   ordinals (ones which cause a LookupError) are left untouched and
+   are copied as-is.
+
+*/
+
+PyAPI_FUNC(PyObject *) PyUnicode_TranslateCharmap(
+    const Py_UNICODE *data, 	/* Unicode char buffer */
+    int length,	 		/* Number of Py_UNICODE chars to encode */
+    PyObject *table,		/* Translate table */
+    const char *errors		/* error handling */
+    );
+
+#ifdef MS_WIN32
+
+/* --- MBCS codecs for Windows -------------------------------------------- */
+
+PyAPI_FUNC(PyObject*) PyUnicode_DecodeMBCS(
+    const char *string,         /* MBCS encoded string */
+    int length,                 /* size of string */
+    const char *errors          /* error handling */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_AsMBCSString(
+    PyObject *unicode           /* Unicode object */
+    );
+
+PyAPI_FUNC(PyObject*) PyUnicode_EncodeMBCS(
+    const Py_UNICODE *data,     /* Unicode char buffer */
+    int length,                 /* Number of Py_UNICODE chars to encode */
+    const char *errors          /* error handling */
+    );
+
+#endif /* MS_WIN32 */
+
+/* --- Decimal Encoder ---------------------------------------------------- */
+
+/* Takes a Unicode string holding a decimal value and writes it into
+   an output buffer using standard ASCII digit codes.
+
+   The output buffer has to provide at least length+1 bytes of storage
+   area. The output string is 0-terminated.
+
+   The encoder converts whitespace to ' ', decimal characters to their
+   corresponding ASCII digit and all other Latin-1 characters except
+   \0 as-is. Characters outside this range (Unicode ordinals 1-256)
+   are treated as errors. This includes embedded NULL bytes.
+
+   Error handling is defined by the errors argument:
+
+      NULL or "strict": raise a ValueError
+      "ignore": ignore the wrong characters (these are not copied to the
+		output buffer)
+      "replace": replaces illegal characters with '?'
+
+   Returns 0 on success, -1 on failure.
+
+*/
+
+PyAPI_FUNC(int) PyUnicode_EncodeDecimal(
+    Py_UNICODE *s,		/* Unicode buffer */
+    int length,			/* Number of Py_UNICODE chars to encode */
+    char *output,		/* Output buffer; must have size >= length */
+    const char *errors		/* error handling */
+    );
+
+/* --- Methods & Slots ----------------------------------------------------
+
+   These are capable of handling Unicode objects and strings on input
+   (we refer to them as strings in the descriptions) and return
+   Unicode objects or integers as apporpriate. */
+
+/* Concat two strings giving a new Unicode string. */
+
+PyAPI_FUNC(PyObject*) PyUnicode_Concat(
+    PyObject *left,	 	/* Left string */
+    PyObject *right	 	/* Right string */
+    );
+
+/* Split a string giving a list of Unicode strings.
+
+   If sep is NULL, splitting will be done at all whitespace
+   substrings. Otherwise, splits occur at the given separator.
+
+   At most maxsplit splits will be done. If negative, no limit is set.
+
+   Separators are not included in the resulting list.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_Split(
+    PyObject *s,		/* String to split */
+    PyObject *sep,		/* String separator */
+    int maxsplit		/* Maxsplit count */
+    );		
+
+/* Dito, but split at line breaks.
+
+   CRLF is considered to be one line break. Line breaks are not
+   included in the resulting list. */
+    
+PyAPI_FUNC(PyObject*) PyUnicode_Splitlines(
+    PyObject *s,		/* String to split */
+    int keepends		/* If true, line end markers are included */
+    );		
+
+/* Split a string giving a list of Unicode strings.
+
+   If sep is NULL, splitting will be done at all whitespace
+   substrings. Otherwise, splits occur at the given separator.
+
+   At most maxsplit splits will be done. But unlike PyUnicode_Split
+   PyUnicode_RSplit splits from the end of the string. If negative,
+   no limit is set.
+
+   Separators are not included in the resulting list.
+
+*/
+
+PyAPI_FUNC(PyObject*) PyUnicode_RSplit(
+    PyObject *s,		/* String to split */
+    PyObject *sep,		/* String separator */
+    int maxsplit		/* Maxsplit count */
+    );		
+
+/* Translate a string by applying a character mapping table to it and
+   return the resulting Unicode object.
+
+   The mapping table must map Unicode ordinal integers to Unicode
+   ordinal integers or None (causing deletion of the character). 
+
+   Mapping tables may be dictionaries or sequences. Unmapped character
+   ordinals (ones which cause a LookupError) are left untouched and
+   are copied as-is.
+
+*/
+
+PyAPI_FUNC(PyObject *) PyUnicode_Translate(
+    PyObject *str,		/* String */ 
+    PyObject *table,		/* Translate table */
+    const char *errors		/* error handling */
+    );
+
+/* Join a sequence of strings using the given separator and return
+   the resulting Unicode string. */
+    
+PyAPI_FUNC(PyObject*) PyUnicode_Join(
+    PyObject *separator, 	/* Separator string */
+    PyObject *seq	 	/* Sequence object */
+    );
+
+/* Return 1 if substr matches str[start:end] at the given tail end, 0
+   otherwise. */
+
+PyAPI_FUNC(int) PyUnicode_Tailmatch(
+    PyObject *str,		/* String */ 
+    PyObject *substr,		/* Prefix or Suffix string */
+    int start,			/* Start index */
+    int end,			/* Stop index */
+    int direction		/* Tail end: -1 prefix, +1 suffix */
+    );
+
+/* Return the first position of substr in str[start:end] using the
+   given search direction or -1 if not found. -2 is returned in case
+   an error occurred and an exception is set. */
+
+PyAPI_FUNC(int) PyUnicode_Find(
+    PyObject *str,		/* String */ 
+    PyObject *substr,		/* Substring to find */
+    int start,			/* Start index */
+    int end,			/* Stop index */
+    int direction		/* Find direction: +1 forward, -1 backward */
+    );
+
+/* Count the number of occurrences of substr in str[start:end]. */
+
+PyAPI_FUNC(int) PyUnicode_Count(
+    PyObject *str,		/* String */ 
+    PyObject *substr,		/* Substring to count */
+    int start,			/* Start index */
+    int end			/* Stop index */
+    );
+
+/* Replace at most maxcount occurrences of substr in str with replstr
+   and return the resulting Unicode object. */
+
+PyAPI_FUNC(PyObject *) PyUnicode_Replace(
+    PyObject *str,		/* String */ 
+    PyObject *substr,		/* Substring to find */
+    PyObject *replstr,		/* Substring to replace */
+    int maxcount		/* Max. number of replacements to apply;
+				   -1 = all */
+    );
+
+/* Compare two strings and return -1, 0, 1 for less than, equal,
+   greater than resp. */
+
+PyAPI_FUNC(int) PyUnicode_Compare(
+    PyObject *left,		/* Left string */ 
+    PyObject *right		/* Right string */
+    );
+
+/* Apply a argument tuple or dictionary to a format string and return
+   the resulting Unicode string. */
+
+PyAPI_FUNC(PyObject *) PyUnicode_Format(
+    PyObject *format,		/* Format string */ 
+    PyObject *args		/* Argument tuple or dictionary */
+    );
+
+/* Checks whether element is contained in container and return 1/0
+   accordingly.
+
+   element has to coerce to an one element Unicode string. -1 is
+   returned in case of an error. */
+
+PyAPI_FUNC(int) PyUnicode_Contains(
+    PyObject *container,	/* Container string */ 
+    PyObject *element		/* Element string */
+    );
+
+/* Externally visible for str.strip(unicode) */
+PyAPI_FUNC(PyObject *) _PyUnicode_XStrip(
+    PyUnicodeObject *self,
+    int striptype,
+    PyObject *sepobj
+    );
+
+/* === Characters Type APIs =============================================== */
+
+/* These should not be used directly. Use the Py_UNICODE_IS* and
+   Py_UNICODE_TO* macros instead. 
+
+   These APIs are implemented in Objects/unicodectype.c.
+
+*/
+
+PyAPI_FUNC(int) _PyUnicode_IsLowercase(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_IsUppercase(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_IsTitlecase(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_IsWhitespace(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_IsLinebreak(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(Py_UNICODE) _PyUnicode_ToLowercase(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(Py_UNICODE) _PyUnicode_ToUppercase(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(Py_UNICODE) _PyUnicode_ToTitlecase(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_ToDecimalDigit(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_ToDigit(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(double) _PyUnicode_ToNumeric(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_IsDecimalDigit(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_IsDigit(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_IsNumeric(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+PyAPI_FUNC(int) _PyUnicode_IsAlpha(
+    Py_UNICODE ch 	/* Unicode character */
+    );
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* Py_USING_UNICODE */
+#endif /* !Py_UNICODEOBJECT_H */
diff --git a/depot_tools/release/win/python_24/include/weakrefobject.h b/depot_tools/release/win/python_24/include/weakrefobject.h
new file mode 100644
index 0000000..daf490f
--- /dev/null
+++ b/depot_tools/release/win/python_24/include/weakrefobject.h
@@ -0,0 +1,75 @@
+/* Weak references objects for Python. */
+
+#ifndef Py_WEAKREFOBJECT_H
+#define Py_WEAKREFOBJECT_H
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+typedef struct _PyWeakReference PyWeakReference;
+
+/* PyWeakReference is the base struct for the Python ReferenceType, ProxyType,
+ * and CallableProxyType.
+ */
+struct _PyWeakReference {
+    PyObject_HEAD
+
+    /* The object to which this is a weak reference, or Py_None if none.
+     * Note that this is a stealth reference:  wr_object's refcount is
+     * not incremented to reflect this pointer.
+     */
+    PyObject *wr_object;
+
+    /* A callable to invoke when wr_object dies, or NULL if none. */
+    PyObject *wr_callback;
+
+    /* A cache for wr_object's hash code.  As usual for hashes, this is -1
+     * if the hash code isn't known yet.
+     */
+    long hash;
+
+    /* If wr_object is weakly referenced, wr_object has a doubly-linked NULL-
+     * terminated list of weak references to it.  These are the list pointers.
+     * If wr_object goes away, wr_object is set to Py_None, and these pointers
+     * have no meaning then.
+     */
+    PyWeakReference *wr_prev;
+    PyWeakReference *wr_next;
+};
+
+PyAPI_DATA(PyTypeObject) _PyWeakref_RefType;
+PyAPI_DATA(PyTypeObject) _PyWeakref_ProxyType;
+PyAPI_DATA(PyTypeObject) _PyWeakref_CallableProxyType;
+
+#define PyWeakref_CheckRef(op) PyObject_TypeCheck(op, &_PyWeakref_RefType)
+#define PyWeakref_CheckRefExact(op) \
+        ((op)->ob_type == &_PyWeakref_RefType)
+#define PyWeakref_CheckProxy(op) \
+        (((op)->ob_type == &_PyWeakref_ProxyType) || \
+         ((op)->ob_type == &_PyWeakref_CallableProxyType))
+
+/* This macro calls PyWeakref_CheckRef() last since that can involve a
+   function call; this makes it more likely that the function call
+   will be avoided. */
+#define PyWeakref_Check(op) \
+        (PyWeakref_CheckRef(op) || PyWeakref_CheckProxy(op))
+
+
+PyAPI_FUNC(PyObject *) PyWeakref_NewRef(PyObject *ob,
+                                              PyObject *callback);
+PyAPI_FUNC(PyObject *) PyWeakref_NewProxy(PyObject *ob,
+                                                PyObject *callback);
+PyAPI_FUNC(PyObject *) PyWeakref_GetObject(PyObject *ref);
+
+PyAPI_FUNC(long) _PyWeakref_GetWeakrefCount(PyWeakReference *head);
+
+PyAPI_FUNC(void) _PyWeakref_ClearRef(PyWeakReference *self);
+
+#define PyWeakref_GET_OBJECT(ref) (((PyWeakReference *)(ref))->wr_object)
+
+
+#ifdef __cplusplus
+}
+#endif
+#endif /* !Py_WEAKREFOBJECT_H */
diff --git a/depot_tools/release/win/python_24/libs/_bsddb.lib b/depot_tools/release/win/python_24/libs/_bsddb.lib
new file mode 100644
index 0000000..42da63c
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/_bsddb.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/_socket.lib b/depot_tools/release/win/python_24/libs/_socket.lib
new file mode 100644
index 0000000..9150154
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/_socket.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/_ssl.lib b/depot_tools/release/win/python_24/libs/_ssl.lib
new file mode 100644
index 0000000..94f6a2d
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/_ssl.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/_testcapi.lib b/depot_tools/release/win/python_24/libs/_testcapi.lib
new file mode 100644
index 0000000..9148db8d
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/_testcapi.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/bz2.lib b/depot_tools/release/win/python_24/libs/bz2.lib
new file mode 100644
index 0000000..07b255a
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/bz2.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/pyexpat.lib b/depot_tools/release/win/python_24/libs/pyexpat.lib
new file mode 100644
index 0000000..7f8da8b
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/pyexpat.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/python24.lib b/depot_tools/release/win/python_24/libs/python24.lib
new file mode 100644
index 0000000..6249d2b
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/python24.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/select.lib b/depot_tools/release/win/python_24/libs/select.lib
new file mode 100644
index 0000000..d8f689b
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/select.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/unicodedata.lib b/depot_tools/release/win/python_24/libs/unicodedata.lib
new file mode 100644
index 0000000..7830570d
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/unicodedata.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/winsound.lib b/depot_tools/release/win/python_24/libs/winsound.lib
new file mode 100644
index 0000000..ffd0937a
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/winsound.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/libs/zlib.lib b/depot_tools/release/win/python_24/libs/zlib.lib
new file mode 100644
index 0000000..999376f
--- /dev/null
+++ b/depot_tools/release/win/python_24/libs/zlib.lib
Binary files differ
diff --git a/depot_tools/release/win/python_24/python.exe b/depot_tools/release/win/python_24/python.exe
new file mode 100644
index 0000000..b2a494b
--- /dev/null
+++ b/depot_tools/release/win/python_24/python.exe
Binary files differ
diff --git a/depot_tools/release/win/python_24/python24.dll b/depot_tools/release/win/python_24/python24.dll
new file mode 100644
index 0000000..1466a9c2
--- /dev/null
+++ b/depot_tools/release/win/python_24/python24.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/pythoncom24.dll b/depot_tools/release/win/python_24/pythoncom24.dll
new file mode 100644
index 0000000..2056709a
--- /dev/null
+++ b/depot_tools/release/win/python_24/pythoncom24.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/pythonw.exe b/depot_tools/release/win/python_24/pythonw.exe
new file mode 100644
index 0000000..e6dfb12c
--- /dev/null
+++ b/depot_tools/release/win/python_24/pythonw.exe
Binary files differ
diff --git a/depot_tools/release/win/python_24/pywintypes24.dll b/depot_tools/release/win/python_24/pywintypes24.dll
new file mode 100644
index 0000000..149f244
--- /dev/null
+++ b/depot_tools/release/win/python_24/pywintypes24.dll
Binary files differ
diff --git a/depot_tools/release/win/python_24/setup_env.bat b/depot_tools/release/win/python_24/setup_env.bat
new file mode 100644
index 0000000..2234e78
--- /dev/null
+++ b/depot_tools/release/win/python_24/setup_env.bat
@@ -0,0 +1,8 @@
+:: This script must not rely on any external tools or PATH values.
+@echo OFF
+
+:: Let advanced users checkout the tools in just one P4 enlistment
+if "%SETUP_ENV_PYTHON_24%"=="done" goto :EOF
+set  SETUP_ENV_PYTHON_24=done
+
+set PATH=%PATH%;%~dp0
diff --git a/depot_tools/release/win/svn/README.google b/depot_tools/release/win/svn/README.google
new file mode 100644
index 0000000..0be1a47
--- /dev/null
+++ b/depot_tools/release/win/svn/README.google
@@ -0,0 +1,5 @@
+This directory contains a copy of the Windows Subversion client, built with SSL and other dependencies suitable for use with Google's Subversion repository at svn.corp.google.com.

+

+The source of this distribution is Pam Greene's web directory:

+http://www.corp.google.com/~pamg/gvn-windows.zip

+

diff --git a/depot_tools/release/win/svn/change-svn-wc-format b/depot_tools/release/win/svn/change-svn-wc-format
new file mode 100644
index 0000000..4b6e990
--- /dev/null
+++ b/depot_tools/release/win/svn/change-svn-wc-format
@@ -0,0 +1,2 @@
+#! /bin/sh
+exec $(cygpath 'c:\python24\python.exe') 'c:\python24\Scripts\change-svn-wc-format' "$@"
diff --git a/depot_tools/release/win/svn/change-svn-wc-format.bat b/depot_tools/release/win/svn/change-svn-wc-format.bat
new file mode 100644
index 0000000..3be105b3
--- /dev/null
+++ b/depot_tools/release/win/svn/change-svn-wc-format.bat
@@ -0,0 +1 @@
+@c:\python24\python.exe c:\python24\Scripts\change-svn-wc-format %*
diff --git a/depot_tools/release/win/svn/libapr-1.dll b/depot_tools/release/win/svn/libapr-1.dll
new file mode 100644
index 0000000..cf77bb1f
--- /dev/null
+++ b/depot_tools/release/win/svn/libapr-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libapriconv-1.dll b/depot_tools/release/win/svn/libapriconv-1.dll
new file mode 100644
index 0000000..3ec773c
--- /dev/null
+++ b/depot_tools/release/win/svn/libapriconv-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libaprutil-1.dll b/depot_tools/release/win/svn/libaprutil-1.dll
new file mode 100644
index 0000000..bb85898
--- /dev/null
+++ b/depot_tools/release/win/svn/libaprutil-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libeay32.dll b/depot_tools/release/win/svn/libeay32.dll
new file mode 100644
index 0000000..0d5a3c5c9
--- /dev/null
+++ b/depot_tools/release/win/svn/libeay32.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libeay32.dll.manifest b/depot_tools/release/win/svn/libeay32.dll.manifest
new file mode 100644
index 0000000..324c707e
--- /dev/null
+++ b/depot_tools/release/win/svn/libeay32.dll.manifest
@@ -0,0 +1,8 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes'?>

+<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>

+  <dependency>

+    <dependentAssembly>

+      <assemblyIdentity type='win32' name='Microsoft.VC80.CRT' version='8.0.50727.762' processorArchitecture='x86' publicKeyToken='1fc8b3b9a1e18e3b' />

+    </dependentAssembly>

+  </dependency>

+</assembly>

diff --git a/depot_tools/release/win/svn/libsvn_client-1.dll b/depot_tools/release/win/svn/libsvn_client-1.dll
new file mode 100644
index 0000000..932eadd
--- /dev/null
+++ b/depot_tools/release/win/svn/libsvn_client-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libsvn_delta-1.dll b/depot_tools/release/win/svn/libsvn_delta-1.dll
new file mode 100644
index 0000000..7560bf91
--- /dev/null
+++ b/depot_tools/release/win/svn/libsvn_delta-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libsvn_diff-1.dll b/depot_tools/release/win/svn/libsvn_diff-1.dll
new file mode 100644
index 0000000..1364d28
--- /dev/null
+++ b/depot_tools/release/win/svn/libsvn_diff-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libsvn_fs-1.dll b/depot_tools/release/win/svn/libsvn_fs-1.dll
new file mode 100644
index 0000000..5781a54
--- /dev/null
+++ b/depot_tools/release/win/svn/libsvn_fs-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libsvn_ra-1.dll b/depot_tools/release/win/svn/libsvn_ra-1.dll
new file mode 100644
index 0000000..f7e7ecaf
--- /dev/null
+++ b/depot_tools/release/win/svn/libsvn_ra-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libsvn_repos-1.dll b/depot_tools/release/win/svn/libsvn_repos-1.dll
new file mode 100644
index 0000000..75d44e9e
--- /dev/null
+++ b/depot_tools/release/win/svn/libsvn_repos-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libsvn_subr-1.dll b/depot_tools/release/win/svn/libsvn_subr-1.dll
new file mode 100644
index 0000000..1892842f
--- /dev/null
+++ b/depot_tools/release/win/svn/libsvn_subr-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/libsvn_wc-1.dll b/depot_tools/release/win/svn/libsvn_wc-1.dll
new file mode 100644
index 0000000..8cf2f112f
--- /dev/null
+++ b/depot_tools/release/win/svn/libsvn_wc-1.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/openssl.exe b/depot_tools/release/win/svn/openssl.exe
new file mode 100644
index 0000000..236f4e9
--- /dev/null
+++ b/depot_tools/release/win/svn/openssl.exe
Binary files differ
diff --git a/depot_tools/release/win/svn/openssl.exe.manifest b/depot_tools/release/win/svn/openssl.exe.manifest
new file mode 100644
index 0000000..324c707e
--- /dev/null
+++ b/depot_tools/release/win/svn/openssl.exe.manifest
@@ -0,0 +1,8 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes'?>

+<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>

+  <dependency>

+    <dependentAssembly>

+      <assemblyIdentity type='win32' name='Microsoft.VC80.CRT' version='8.0.50727.762' processorArchitecture='x86' publicKeyToken='1fc8b3b9a1e18e3b' />

+    </dependentAssembly>

+  </dependency>

+</assembly>

diff --git a/depot_tools/release/win/svn/sqlite3.dll b/depot_tools/release/win/svn/sqlite3.dll
new file mode 100644
index 0000000..472769b
--- /dev/null
+++ b/depot_tools/release/win/svn/sqlite3.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/ssleay32.dll b/depot_tools/release/win/svn/ssleay32.dll
new file mode 100644
index 0000000..e1c4ffcc
--- /dev/null
+++ b/depot_tools/release/win/svn/ssleay32.dll
Binary files differ
diff --git a/depot_tools/release/win/svn/ssleay32.dll.manifest b/depot_tools/release/win/svn/ssleay32.dll.manifest
new file mode 100644
index 0000000..324c707e
--- /dev/null
+++ b/depot_tools/release/win/svn/ssleay32.dll.manifest
@@ -0,0 +1,8 @@
+<?xml version='1.0' encoding='UTF-8' standalone='yes'?>

+<assembly xmlns='urn:schemas-microsoft-com:asm.v1' manifestVersion='1.0'>

+  <dependency>

+    <dependentAssembly>

+      <assemblyIdentity type='win32' name='Microsoft.VC80.CRT' version='8.0.50727.762' processorArchitecture='x86' publicKeyToken='1fc8b3b9a1e18e3b' />

+    </dependentAssembly>

+  </dependency>

+</assembly>

diff --git a/depot_tools/release/win/svn/svn.exe b/depot_tools/release/win/svn/svn.exe
new file mode 100644
index 0000000..0fe9dae8
--- /dev/null
+++ b/depot_tools/release/win/svn/svn.exe
Binary files differ
diff --git a/depot_tools/release/win/svn/svnadmin.exe b/depot_tools/release/win/svn/svnadmin.exe
new file mode 100644
index 0000000..d1013300
--- /dev/null
+++ b/depot_tools/release/win/svn/svnadmin.exe
Binary files differ
diff --git a/depot_tools/release/win/svn/svndumpfilter.exe b/depot_tools/release/win/svn/svndumpfilter.exe
new file mode 100644
index 0000000..2bc2e79d
--- /dev/null
+++ b/depot_tools/release/win/svn/svndumpfilter.exe
Binary files differ
diff --git a/depot_tools/release/win/svn/svnlook.exe b/depot_tools/release/win/svn/svnlook.exe
new file mode 100644
index 0000000..e007308
--- /dev/null
+++ b/depot_tools/release/win/svn/svnlook.exe
Binary files differ
diff --git a/depot_tools/release/win/svn/svnserve.exe b/depot_tools/release/win/svn/svnserve.exe
new file mode 100644
index 0000000..04829a3
--- /dev/null
+++ b/depot_tools/release/win/svn/svnserve.exe
Binary files differ
diff --git a/depot_tools/release/win/svn/svnsync.exe b/depot_tools/release/win/svn/svnsync.exe
new file mode 100644
index 0000000..350fe6c
--- /dev/null
+++ b/depot_tools/release/win/svn/svnsync.exe
Binary files differ
diff --git a/depot_tools/release/win/svn/svnversion.exe b/depot_tools/release/win/svn/svnversion.exe
new file mode 100644
index 0000000..c7c49da
--- /dev/null
+++ b/depot_tools/release/win/svn/svnversion.exe
Binary files differ
diff --git a/depot_tools/release/win/wrappers/README b/depot_tools/release/win/wrappers/README
new file mode 100644
index 0000000..c18df2d5
--- /dev/null
+++ b/depot_tools/release/win/wrappers/README
@@ -0,0 +1,23 @@
+This package contains tools for working with a subversion depot.  It knows how
+to keep itself updated to the latest version of the depot_tools release.
+
+This package contains:
+
+  gclient
+    A tool for managing a workspace with modular dependencies that are each
+    checked out independently.  More info at http://go/gclient
+
+  gvn
+    A tool for review-then-commit source development.  More info at:
+    http://code.google.com/p/gvn/
+
+  svn
+    The shizniz of revision control systems.  More info at:
+    http://subversion.tigris.com/
+
+  upload
+    A tool to upload changelists.  More info at:
+    http://code.google.com/p/rietveld/
+
+To manually update this distribution, run bootstrap\update.bat, but if you run
+gclient.bat to checkout your working copy then you should be set.
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libapr-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libapr-1.dll
new file mode 100644
index 0000000..cf77bb1f
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libapr-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libapriconv-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libapriconv-1.dll
new file mode 100644
index 0000000..3ec773c
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libapriconv-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libaprutil-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libaprutil-1.dll
new file mode 100644
index 0000000..bb85898
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libaprutil-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libeay32.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libeay32.dll
new file mode 100644
index 0000000..0d5a3c5c9
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libeay32.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_client-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_client-1.dll
new file mode 100644
index 0000000..932eadd
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_client-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_delta-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_delta-1.dll
new file mode 100644
index 0000000..7560bf91
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_delta-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_diff-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_diff-1.dll
new file mode 100644
index 0000000..1364d28
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_diff-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_fs-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_fs-1.dll
new file mode 100644
index 0000000..5781a54
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_fs-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_ra-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_ra-1.dll
new file mode 100644
index 0000000..f7e7ecaf
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_ra-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_repos-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_repos-1.dll
new file mode 100644
index 0000000..75d44e9e
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_repos-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_subr-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_subr-1.dll
new file mode 100644
index 0000000..1892842f
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_subr-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_wc-1.dll b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_wc-1.dll
new file mode 100644
index 0000000..8cf2f112f
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/libsvn_wc-1.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/sqlite3.dll b/depot_tools/release/win/wrappers/bootstrap/svn/sqlite3.dll
new file mode 100644
index 0000000..472769b
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/sqlite3.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/ssleay32.dll b/depot_tools/release/win/wrappers/bootstrap/svn/ssleay32.dll
new file mode 100644
index 0000000..e1c4ffcc
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/ssleay32.dll
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/svn.exe b/depot_tools/release/win/wrappers/bootstrap/svn/svn.exe
new file mode 100644
index 0000000..0fe9dae8
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/svn.exe
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/svn/svnadmin.exe b/depot_tools/release/win/wrappers/bootstrap/svn/svnadmin.exe
new file mode 100644
index 0000000..d1013300
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/svn/svnadmin.exe
Binary files differ
diff --git a/depot_tools/release/win/wrappers/bootstrap/update.bat b/depot_tools/release/win/wrappers/bootstrap/update.bat
new file mode 100644
index 0000000..2ece616
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/update.bat
@@ -0,0 +1,12 @@
+@echo off
+
+setlocal
+set url="https://svn/chrome/branches/depot_tools_release_branch"
+set opt=-q
+
+:: we silently update the depot_tools when it already exists
+IF NOT EXIST %1. (
+  echo checking out latest depot_tools...
+)
+
+%~dp0svn\svn.exe co %opt% %url% %1
diff --git a/depot_tools/release/win/wrappers/bootstrap/update.unix b/depot_tools/release/win/wrappers/bootstrap/update.unix
new file mode 100644
index 0000000..62335dc
--- /dev/null
+++ b/depot_tools/release/win/wrappers/bootstrap/update.unix
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+url="https://svn.corp.google.com/chrome/branches/depot_tools_release_branch"
+opt=-q
+
+# we silently update the depot_tools when it already exists
+if [ ! -e $1 ]
+then
+  echo checking out latest depot_tools...
+fi
+
+exec gvn co $opt $url $1
diff --git a/depot_tools/release/win/wrappers/gcl b/depot_tools/release/win/wrappers/gcl
new file mode 100644
index 0000000..0d81818
--- /dev/null
+++ b/depot_tools/release/win/wrappers/gcl
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+base_dir=$(dirname "$0")
+
+exec "$base_dir/gcl.bat" "$@"
diff --git a/depot_tools/release/win/wrappers/gcl.bat b/depot_tools/release/win/wrappers/gcl.bat
new file mode 100644
index 0000000..7452634
--- /dev/null
+++ b/depot_tools/release/win/wrappers/gcl.bat
@@ -0,0 +1,6 @@
+@echo off
+
+setlocal
+set PATH=%~dp0..\release\svn;%PATH%
+
+%~dp0..\release\python_24\python.exe %~dp0..\release\gcl.py %*
diff --git a/depot_tools/release/win/wrappers/gclient b/depot_tools/release/win/wrappers/gclient
new file mode 100644
index 0000000..eb1db36
--- /dev/null
+++ b/depot_tools/release/win/wrappers/gclient
@@ -0,0 +1,14 @@
+#!/bin/sh

+

+base_dir=$(dirname "$0")

+

+if [ "x$OS" == "xWindows_NT" ]

+then

+  # gclient.bat will run the update and then gclient.py

+  exec "$base_dir/gclient.bat" "$@"

+else

+  # Update "../release" every time this script is run to stay up-to-date

+  # with the latest depot_tools release.

+  $base_dir/bootstrap/update.sh $base_dir/../release

+  exec /usr/bin/python2.4 $base_dir/../release/bin/gclient.py "$@"

+fi

diff --git a/depot_tools/release/win/wrappers/gclient.bat b/depot_tools/release/win/wrappers/gclient.bat
new file mode 100644
index 0000000..87920f6
--- /dev/null
+++ b/depot_tools/release/win/wrappers/gclient.bat
@@ -0,0 +1,7 @@
+@echo off
+
+:: Update "..\release" every time this script is run to stay up-to-date
+:: with the latest depot_tools release.
+call %~dp0bootstrap\update.bat %~dp0..\release
+
+%~dp0..\release\bin\gclient.bat %*
diff --git a/depot_tools/release/win/wrappers/hammer b/depot_tools/release/win/wrappers/hammer
new file mode 100644
index 0000000..cd44951
--- /dev/null
+++ b/depot_tools/release/win/wrappers/hammer
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+base_dir=$(dirname "$0")
+
+exec "$base_dir/hammer.bat" "$@"
diff --git a/depot_tools/release/win/wrappers/hammer.bat b/depot_tools/release/win/wrappers/hammer.bat
new file mode 100644
index 0000000..5e6b7ec
--- /dev/null
+++ b/depot_tools/release/win/wrappers/hammer.bat
@@ -0,0 +1,6 @@
+@echo off
+
+setlocal
+set PATH=%~dp0..\release\svn;%PATH%
+
+%~dp0..\third_party\python_24\python.exe %~dp0..\third_party\scons\scons.py %*
diff --git a/depot_tools/release/win/wrappers/svn b/depot_tools/release/win/wrappers/svn
new file mode 100644
index 0000000..c5370f2
--- /dev/null
+++ b/depot_tools/release/win/wrappers/svn
@@ -0,0 +1,5 @@
+#!/bin/sh
+
+base_dir=$(dirname "$0")
+
+exec "$base_dir/svn.bat" "$@"
diff --git a/depot_tools/release/win/wrappers/svn.bat b/depot_tools/release/win/wrappers/svn.bat
new file mode 100644
index 0000000..34983c1
--- /dev/null
+++ b/depot_tools/release/win/wrappers/svn.bat
@@ -0,0 +1 @@
+@%~dp0..\release\svn\svn.exe %*
diff --git a/depot_tools/win/bin/bootstrap/svn/libapr-1.dll b/depot_tools/win/bin/bootstrap/svn/libapr-1.dll
new file mode 100644
index 0000000..cf77bb1f
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libapr-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libapriconv-1.dll b/depot_tools/win/bin/bootstrap/svn/libapriconv-1.dll
new file mode 100644
index 0000000..3ec773c
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libapriconv-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libaprutil-1.dll b/depot_tools/win/bin/bootstrap/svn/libaprutil-1.dll
new file mode 100644
index 0000000..bb85898
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libaprutil-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libeay32.dll b/depot_tools/win/bin/bootstrap/svn/libeay32.dll
new file mode 100644
index 0000000..0d5a3c5c9
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libeay32.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libsvn_client-1.dll b/depot_tools/win/bin/bootstrap/svn/libsvn_client-1.dll
new file mode 100644
index 0000000..932eadd
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libsvn_client-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libsvn_delta-1.dll b/depot_tools/win/bin/bootstrap/svn/libsvn_delta-1.dll
new file mode 100644
index 0000000..7560bf91
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libsvn_delta-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libsvn_diff-1.dll b/depot_tools/win/bin/bootstrap/svn/libsvn_diff-1.dll
new file mode 100644
index 0000000..1364d28
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libsvn_diff-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libsvn_fs-1.dll b/depot_tools/win/bin/bootstrap/svn/libsvn_fs-1.dll
new file mode 100644
index 0000000..5781a54
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libsvn_fs-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libsvn_ra-1.dll b/depot_tools/win/bin/bootstrap/svn/libsvn_ra-1.dll
new file mode 100644
index 0000000..f7e7ecaf
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libsvn_ra-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libsvn_repos-1.dll b/depot_tools/win/bin/bootstrap/svn/libsvn_repos-1.dll
new file mode 100644
index 0000000..75d44e9e
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libsvn_repos-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libsvn_subr-1.dll b/depot_tools/win/bin/bootstrap/svn/libsvn_subr-1.dll
new file mode 100644
index 0000000..1892842f
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libsvn_subr-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/libsvn_wc-1.dll b/depot_tools/win/bin/bootstrap/svn/libsvn_wc-1.dll
new file mode 100644
index 0000000..8cf2f112f
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/libsvn_wc-1.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/sqlite3.dll b/depot_tools/win/bin/bootstrap/svn/sqlite3.dll
new file mode 100644
index 0000000..472769b
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/sqlite3.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/ssleay32.dll b/depot_tools/win/bin/bootstrap/svn/ssleay32.dll
new file mode 100644
index 0000000..e1c4ffcc
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/ssleay32.dll
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/svn.exe b/depot_tools/win/bin/bootstrap/svn/svn.exe
new file mode 100644
index 0000000..0fe9dae8
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/svn.exe
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/svn/svnadmin.exe b/depot_tools/win/bin/bootstrap/svn/svnadmin.exe
new file mode 100644
index 0000000..d1013300
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/svn/svnadmin.exe
Binary files differ
diff --git a/depot_tools/win/bin/bootstrap/update.bat b/depot_tools/win/bin/bootstrap/update.bat
new file mode 100644
index 0000000..91a8319
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/update.bat
@@ -0,0 +1,12 @@
+@echo off
+
+setlocal
+set url="https://chrome-svn.corp.google.com/chrome/trunk/depot_tools/release/win"
+set opt=-q
+
+:: we silently update the depot_tools when it already exists
+IF NOT EXIST %1. (
+  echo checking out latest depot_tools...
+)
+
+%~dp0svn\svn.exe co %opt% %url% %1
diff --git a/depot_tools/win/bin/bootstrap/update.sh b/depot_tools/win/bin/bootstrap/update.sh
new file mode 100755
index 0000000..a835a87
--- /dev/null
+++ b/depot_tools/win/bin/bootstrap/update.sh
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+url="https://chrome-svn.corp.google.com/chrome/trunk/depot_tools/release/win"
+opt=-q
+
+# we silently update the depot_tools when it already exists
+if [ ! -e $1 ]
+then
+  echo checking out latest depot_tools...
+fi
+
+exec svn co $opt $url $1
diff --git a/depot_tools/win/bin/gclient b/depot_tools/win/bin/gclient
new file mode 100644
index 0000000..eb1db36
--- /dev/null
+++ b/depot_tools/win/bin/gclient
@@ -0,0 +1,14 @@
+#!/bin/sh

+

+base_dir=$(dirname "$0")

+

+if [ "x$OS" == "xWindows_NT" ]

+then

+  # gclient.bat will run the update and then gclient.py

+  exec "$base_dir/gclient.bat" "$@"

+else

+  # Update "../release" every time this script is run to stay up-to-date

+  # with the latest depot_tools release.

+  $base_dir/bootstrap/update.sh $base_dir/../release

+  exec /usr/bin/python2.4 $base_dir/../release/bin/gclient.py "$@"

+fi

diff --git a/depot_tools/win/bin/gclient.bat b/depot_tools/win/bin/gclient.bat
new file mode 100644
index 0000000..87920f6
--- /dev/null
+++ b/depot_tools/win/bin/gclient.bat
@@ -0,0 +1,7 @@
+@echo off
+
+:: Update "..\release" every time this script is run to stay up-to-date
+:: with the latest depot_tools release.
+call %~dp0bootstrap\update.bat %~dp0..\release
+
+%~dp0..\release\bin\gclient.bat %*